From 288fb7604c8c1e1ad66c82292915b54c04a5b1e5 Mon Sep 17 00:00:00 2001 From: Artiprocher Date: Tue, 4 Nov 2025 10:59:29 +0800 Subject: [PATCH] diffsynth 2.0 prototype --- .gitignore | 167 + README.md | 520 +- README_zh.md | 535 - apps/gradio/DiffSynth_Studio.py | 252 - apps/gradio/entity_level_control.py | 390 - apps/gradio/qwen_image_eligen.py | 382 - apps/streamlit/DiffSynth_Studio.py | 15 - apps/streamlit/pages/1_Image_Creator.py | 362 - apps/streamlit/pages/2_Video_Creator.py | 197 - diffsynth/__init__.py | 7 +- diffsynth/configs/__init__.py | 2 + diffsynth/configs/model_config.py | 857 - diffsynth/configs/model_configs.py | 29 + .../configs/vram_management_module_maps.py | 24 + diffsynth/controlnets/__init__.py | 2 - diffsynth/controlnets/controlnet_unit.py | 91 - diffsynth/core/__init__.py | 5 + diffsynth/core/attention/__init__.py | 1 + diffsynth/core/attention/attention.py | 121 + diffsynth/core/data/__init__.py | 1 + .../data/operators.py} | 119 +- diffsynth/core/data/unified_dataset.py | 112 + diffsynth/core/gradient/__init__.py | 1 + .../gradient/gradient_checkpoint.py} | 0 diffsynth/core/loader/__init__.py | 3 + diffsynth/core/loader/config.py | 77 + diffsynth/core/loader/file.py | 106 + diffsynth/core/loader/model.py | 71 + diffsynth/core/vram/__init__.py | 2 + diffsynth/core/vram/disk_map.py | 90 + diffsynth/core/vram/initialization.py | 21 + diffsynth/core/vram/layers.py | 418 + diffsynth/data/__init__.py | 1 - diffsynth/data/simple_text_image.py | 41 - diffsynth/data/video.py | 217 - diffsynth/diffusion/__init__.py | 6 + diffsynth/diffusion/base_pipeline.py | 414 + .../{schedulers => diffusion}/flow_match.py | 1 - diffsynth/diffusion/logger.py | 43 + diffsynth/diffusion/loss.py | 29 + diffsynth/diffusion/parsers.py | 62 + diffsynth/diffusion/runner.py | 71 + diffsynth/diffusion/training_module.py | 186 + .../distributed/xdit_context_parallel.py | 131 - diffsynth/extensions/ESRGAN/__init__.py | 137 - diffsynth/extensions/FastBlend/__init__.py | 63 - diffsynth/extensions/FastBlend/api.py | 397 - .../extensions/FastBlend/cupy_kernels.py | 119 - diffsynth/extensions/FastBlend/data.py | 146 - diffsynth/extensions/FastBlend/patch_match.py | 299 - .../extensions/FastBlend/runners/__init__.py | 4 - .../extensions/FastBlend/runners/accurate.py | 35 - .../extensions/FastBlend/runners/balanced.py | 46 - .../extensions/FastBlend/runners/fast.py | 141 - .../FastBlend/runners/interpolation.py | 121 - .../ImageQualityMetric/BLIP/__init__.py | 1 - .../ImageQualityMetric/BLIP/blip.py | 77 - .../ImageQualityMetric/BLIP/blip_pretrain.py | 44 - .../extensions/ImageQualityMetric/BLIP/med.py | 947 - .../extensions/ImageQualityMetric/BLIP/vit.py | 301 - .../extensions/ImageQualityMetric/__init__.py | 148 - .../ImageQualityMetric/aesthetic.py | 148 - .../extensions/ImageQualityMetric/clip.py | 97 - .../extensions/ImageQualityMetric/config.py | 23 - .../extensions/ImageQualityMetric/hps.py | 118 - .../ImageQualityMetric/imagereward.py | 212 - .../extensions/ImageQualityMetric/mps.py | 129 - .../ImageQualityMetric/open_clip/__init__.py | 14 - .../open_clip/coca_model.py | 458 - .../ImageQualityMetric/open_clip/constants.py | 2 - .../ImageQualityMetric/open_clip/factory.py | 433 - .../open_clip/hf_configs.py | 45 - .../ImageQualityMetric/open_clip/hf_model.py | 176 - .../ImageQualityMetric/open_clip/loss.py | 270 - .../ImageQualityMetric/open_clip/model.py | 461 - .../open_clip/model_configs/ViT-H-14.json | 17 - .../open_clip/modified_resnet.py | 181 - .../ImageQualityMetric/open_clip/openai.py | 144 - .../open_clip/pretrained.py | 376 - .../open_clip/push_to_hf_hub.py | 243 - .../open_clip/timm_model.py | 127 - .../ImageQualityMetric/open_clip/tokenizer.py | 211 - .../ImageQualityMetric/open_clip/transform.py | 216 - .../open_clip/transformer.py | 727 - .../ImageQualityMetric/open_clip/utils.py | 60 - .../ImageQualityMetric/open_clip/version.py | 1 - .../ImageQualityMetric/pickscore.py | 112 - .../ImageQualityMetric/trainer/__init__.py | 1 - .../trainer/models/__init__.py | 3 - .../trainer/models/base_model.py | 7 - .../trainer/models/clip_model.py | 146 - .../trainer/models/cross_modeling.py | 292 - diffsynth/extensions/RIFE/__init__.py | 242 - diffsynth/lora/__init__.py | 45 - diffsynth/lora/flux_lora.py | 324 - diffsynth/models/__init__.py | 1 - diffsynth/models/attention.py | 89 - diffsynth/models/cog_dit.py | 408 - diffsynth/models/cog_vae.py | 518 - diffsynth/models/downloader.py | 111 - diffsynth/models/flux_controlnet.py | 331 - diffsynth/models/flux_dit.py | 748 - diffsynth/models/flux_infiniteyou.py | 129 - diffsynth/models/flux_ipadapter.py | 94 - diffsynth/models/flux_lora_encoder.py | 111 - diffsynth/models/flux_text_encoder.py | 32 - diffsynth/models/flux_vae.py | 303 - diffsynth/models/flux_value_control.py | 60 - diffsynth/models/general_modules.py | 139 + diffsynth/models/hunyuan_dit.py | 451 - diffsynth/models/hunyuan_dit_text_encoder.py | 163 - diffsynth/models/hunyuan_video_dit.py | 920 - .../models/hunyuan_video_text_encoder.py | 68 - diffsynth/models/hunyuan_video_vae_decoder.py | 507 - diffsynth/models/hunyuan_video_vae_encoder.py | 307 - diffsynth/models/kolors_text_encoder.py | 1551 - diffsynth/models/longcat_video_dit.py | 901 - diffsynth/models/lora.py | 402 - diffsynth/models/model_loader.py | 89 + diffsynth/models/model_manager.py | 467 - diffsynth/models/nexus_gen.py | 161 - diffsynth/models/nexus_gen_ar_model.py | 1143 - diffsynth/models/nexus_gen_projector.py | 417 - diffsynth/models/omnigen.py | 803 - diffsynth/models/qwen_image_controlnet.py | 20 +- diffsynth/models/qwen_image_dit.py | 16 +- diffsynth/models/qwen_image_text_encoder.py | 70 +- diffsynth/models/qwen_image_vae.py | 13 - diffsynth/models/qwenvl.py | 168 - diffsynth/models/sd3_dit.py | 567 - diffsynth/models/sd3_text_encoder.py | 1120 - diffsynth/models/sd3_vae_decoder.py | 81 - diffsynth/models/sd3_vae_encoder.py | 95 - diffsynth/models/sd_controlnet.py | 589 - diffsynth/models/sd_ipadapter.py | 57 - diffsynth/models/sd_motion.py | 199 - diffsynth/models/sd_text_encoder.py | 321 - diffsynth/models/sd_unet.py | 1108 - diffsynth/models/sd_vae_decoder.py | 336 - diffsynth/models/sd_vae_encoder.py | 282 - diffsynth/models/sdxl_controlnet.py | 318 - diffsynth/models/sdxl_ipadapter.py | 122 - diffsynth/models/sdxl_motion.py | 104 - diffsynth/models/sdxl_text_encoder.py | 759 - diffsynth/models/sdxl_unet.py | 1901 - diffsynth/models/sdxl_vae_decoder.py | 24 - diffsynth/models/sdxl_vae_encoder.py | 24 - diffsynth/models/step1x_connector.py | 683 - diffsynth/models/stepvideo_dit.py | 940 - diffsynth/models/stepvideo_text_encoder.py | 553 - diffsynth/models/stepvideo_vae.py | 1132 - diffsynth/models/svd_image_encoder.py | 505 - diffsynth/models/svd_unet.py | 2014 - diffsynth/models/svd_vae_decoder.py | 578 - diffsynth/models/svd_vae_encoder.py | 139 - diffsynth/models/tiler.py | 234 - diffsynth/models/utils.py | 182 - diffsynth/models/wan_video_animate_adapter.py | 670 - .../models/wan_video_camera_controller.py | 206 - diffsynth/models/wan_video_dit.py | 746 - diffsynth/models/wan_video_dit_s2v.py | 625 - diffsynth/models/wan_video_image_encoder.py | 902 - .../models/wan_video_motion_controller.py | 44 - diffsynth/models/wan_video_text_encoder.py | 269 - diffsynth/models/wan_video_vace.py | 113 - diffsynth/models/wan_video_vae.py | 1382 - diffsynth/models/wav2vec.py | 204 - diffsynth/pipelines/__init__.py | 15 - diffsynth/pipelines/base.py | 127 - diffsynth/pipelines/cog_video.py | 135 - diffsynth/pipelines/dancer.py | 236 - diffsynth/pipelines/flux_image.py | 823 - diffsynth/pipelines/flux_image_new.py | 1310 - diffsynth/pipelines/hunyuan_image.py | 288 - diffsynth/pipelines/hunyuan_video.py | 395 - diffsynth/pipelines/omnigen_image.py | 289 - diffsynth/pipelines/pipeline_runner.py | 105 - diffsynth/pipelines/qwen_image.py | 359 +- diffsynth/pipelines/sd3_image.py | 147 - diffsynth/pipelines/sd_image.py | 191 - diffsynth/pipelines/sd_video.py | 269 - diffsynth/pipelines/sdxl_image.py | 226 - diffsynth/pipelines/sdxl_video.py | 226 - diffsynth/pipelines/step_video.py | 209 - diffsynth/pipelines/svd_video.py | 300 - diffsynth/pipelines/wan_video.py | 626 - diffsynth/pipelines/wan_video_new.py | 1637 - diffsynth/processors/FastBlend.py | 142 - diffsynth/processors/PILEditor.py | 28 - diffsynth/processors/RIFE.py | 77 - diffsynth/processors/base.py | 6 - diffsynth/processors/sequencial_processor.py | 41 - diffsynth/prompters/__init__.py | 12 - diffsynth/prompters/base_prompter.py | 70 - diffsynth/prompters/cog_prompter.py | 46 - diffsynth/prompters/flux_prompter.py | 74 - diffsynth/prompters/hunyuan_dit_prompter.py | 69 - diffsynth/prompters/hunyuan_video_prompter.py | 275 - diffsynth/prompters/kolors_prompter.py | 354 - diffsynth/prompters/omnigen_prompter.py | 356 - diffsynth/prompters/omost.py | 323 - diffsynth/prompters/prompt_refiners.py | 130 - diffsynth/prompters/sd3_prompter.py | 93 - diffsynth/prompters/sd_prompter.py | 73 - diffsynth/prompters/sdxl_prompter.py | 61 - diffsynth/prompters/stepvideo_prompter.py | 56 - diffsynth/prompters/wan_prompter.py | 109 - diffsynth/schedulers/__init__.py | 3 - diffsynth/schedulers/continuous_ode.py | 59 - diffsynth/schedulers/ddim.py | 105 - .../cog/tokenizer/added_tokens.json | 102 - .../cog/tokenizer/special_tokens_map.json | 125 - .../cog/tokenizer/spiece.model | Bin 791656 -> 0 bytes .../cog/tokenizer/tokenizer_config.json | 940 - .../flux/tokenizer_1/merges.txt | 48895 - .../flux/tokenizer_1/special_tokens_map.json | 30 - .../flux/tokenizer_1/tokenizer_config.json | 30 - .../flux/tokenizer_1/vocab.json | 49410 - .../flux/tokenizer_2/special_tokens_map.json | 125 - .../flux/tokenizer_2/spiece.model | Bin 791656 -> 0 bytes .../flux/tokenizer_2/tokenizer.json | 129428 -- .../flux/tokenizer_2/tokenizer_config.json | 940 - .../tokenizer/special_tokens_map.json | 7 - .../tokenizer/tokenizer_config.json | 16 - .../hunyuan_dit/tokenizer/vocab.txt | 47020 - .../hunyuan_dit/tokenizer/vocab_org.txt | 21128 - .../hunyuan_dit/tokenizer_t5/config.json | 28 - .../tokenizer_t5/special_tokens_map.json | 1 - .../hunyuan_dit/tokenizer_t5/spiece.model | Bin 4309802 -> 0 bytes .../tokenizer_t5/tokenizer_config.json | 1 - .../hunyuan_video/tokenizer_1/merges.txt | 48895 - .../tokenizer_1/special_tokens_map.json | 30 - .../tokenizer_1/tokenizer_config.json | 30 - .../hunyuan_video/tokenizer_1/vocab.json | 49410 - .../tokenizer_2/preprocessor_config.json | 45 - .../tokenizer_2/special_tokens_map.json | 30 - .../hunyuan_video/tokenizer_2/tokenizer.json | 1251020 -------------- .../tokenizer_2/tokenizer_config.json | 2095 - .../kolors/tokenizer/tokenizer.model | Bin 1018370 -> 0 bytes .../kolors/tokenizer/tokenizer_config.json | 12 - .../kolors/tokenizer/vocab.txt | Bin 1018370 -> 0 bytes .../stable_diffusion/tokenizer/merges.txt | 48895 - .../tokenizer/special_tokens_map.json | 24 - .../tokenizer/tokenizer_config.json | 34 - .../stable_diffusion/tokenizer/vocab.json | 49410 - .../stable_diffusion_3/tokenizer_1/merges.txt | 48895 - .../tokenizer_1/special_tokens_map.json | 30 - .../tokenizer_1/tokenizer_config.json | 30 - .../stable_diffusion_3/tokenizer_1/vocab.json | 49410 - .../stable_diffusion_3/tokenizer_2/merges.txt | 48895 - .../tokenizer_2/special_tokens_map.json | 30 - .../tokenizer_2/tokenizer_config.json | 38 - .../stable_diffusion_3/tokenizer_2/vocab.json | 49410 - .../tokenizer_3/special_tokens_map.json | 125 - .../tokenizer_3/spiece.model | Bin 791656 -> 0 bytes .../tokenizer_3/tokenizer.json | 129428 -- .../tokenizer_3/tokenizer_config.json | 940 - .../tokenizer_2/merges.txt | 40213 - .../tokenizer_2/special_tokens_map.json | 24 - .../tokenizer_2/tokenizer_config.json | 38 - .../tokenizer_2/vocab.json | 49411 - diffsynth/trainers/__init__.py | 0 diffsynth/trainers/text_to_image.py | 318 - diffsynth/trainers/utils.py | 694 - diffsynth/utils/__init__.py | 275 - diffsynth/utils/controlnet/__init__.py | 2 + .../controlnet/annotator.py} | 0 .../utils/controlnet/controlnet_input.py | 13 + diffsynth/utils/lora/__init__.py | 1 + diffsynth/utils/lora/general.py | 62 + .../state_dict_converters}/__init__.py | 0 .../qwen_image_text_encoder.py | 10 + diffsynth/vram_management/__init__.py | 2 - diffsynth/vram_management/layers.py | 213 - docs/API_Reference/Environment_Variables.md | 35 + docs/API_Reference/core/attention.md | 73 + docs/API_Reference/core/data.md | 3 + .../API_Reference/core/gradient.md | 0 .../API_Reference/core/loader.md | 0 .../API_Reference/core/vram.md | 0 docs/Developer_Guide/Building_a_Pipeline.md | 243 + .../Developer_Guide/Integrating_Your_Model.md | 152 + .../Training_Diffusion_Models.md | 66 + .../__init__.py => docs/Overview.md | 0 examples/ArtAug/README.md | 43 - examples/ArtAug/artaug_flux.py | 14 - examples/CogVideoX/README.md | 39 - examples/CogVideoX/cogvideo_text_to_video.py | 73 - examples/ControlNet/README.md | 91 - examples/ControlNet/flux_controlnet.py | 299 - .../flux_controlnet_quantization.py | 447 - examples/Diffutoon/Diffutoon.ipynb | 512 - examples/Diffutoon/README.md | 21 - examples/Diffutoon/diffutoon_toon_shading.py | 100 - ...utoon_toon_shading_with_editing_signals.py | 204 - examples/Diffutoon/sd_toon_shading.py | 65 - examples/EntityControl/README.md | 90 - examples/EntityControl/entity_control.py | 83 - .../EntityControl/entity_control_ipadapter.py | 46 - examples/EntityControl/entity_inpaint.py | 45 - examples/EntityControl/entity_transfer.py | 84 - .../EntityControl/styled_entity_control.py | 90 - examples/EntityControl/utils.py | 59 - examples/ExVideo/ExVideo_cogvideox_test.py | 21 - examples/ExVideo/ExVideo_ema.py | 64 - examples/ExVideo/ExVideo_svd_test.py | 114 - examples/ExVideo/ExVideo_svd_train.py | 364 - examples/ExVideo/README.md | 89 - examples/HunyuanVideo/README.md | 33 - examples/HunyuanVideo/hunyuanvideo_24G.py | 42 - examples/HunyuanVideo/hunyuanvideo_6G.py | 52 - examples/HunyuanVideo/hunyuanvideo_80G.py | 45 - examples/HunyuanVideo/hunyuanvideo_i2v_24G.py | 43 - examples/HunyuanVideo/hunyuanvideo_i2v_80G.py | 45 - examples/HunyuanVideo/hunyuanvideo_v2v_6G.py | 55 - examples/InfiniteYou/README.md | 7 - examples/InfiniteYou/infiniteyou.py | 58 - examples/Ip-Adapter/README.md | 44 - examples/Ip-Adapter/flux_ipadapter.py | 38 - examples/Ip-Adapter/sd_ipadapter.py | 38 - examples/Ip-Adapter/sdxl_ipadapter.py | 61 - .../sdxl_ipadapter_multi_reference.py | 34 - examples/TeaCache/README.md | 34 - examples/TeaCache/flux_teacache.py | 15 - examples/TeaCache/hunyuanvideo_teacache.py | 42 - examples/diffsynth/README.md | 7 - examples/diffsynth/sd_video_rerender.py | 64 - examples/flux/README.md | 395 - examples/flux/README_zh.md | 396 - examples/flux/acceleration/teacache.py | 24 - .../flux/model_inference/FLEX.2-preview.py | 50 - .../model_inference/FLUX.1-Kontext-dev.py | 54 - .../flux/model_inference/FLUX.1-Krea-dev.py | 27 - .../model_inference/FLUX.1-dev-AttriCtrl.py | 19 - .../FLUX.1-dev-Controlnet-Inpainting-Beta.py | 37 - .../FLUX.1-dev-Controlnet-Union-alpha.py | 40 - .../FLUX.1-dev-Controlnet-Upscaler.py | 33 - .../flux/model_inference/FLUX.1-dev-EliGen.py | 147 - .../model_inference/FLUX.1-dev-IP-Adapter.py | 24 - .../model_inference/FLUX.1-dev-InfiniteYou.py | 59 - .../FLUX.1-dev-LoRA-Encoder.py | 40 - .../model_inference/FLUX.1-dev-LoRA-Fusion.py | 29 - examples/flux/model_inference/FLUX.1-dev.py | 26 - .../flux/model_inference/Nexus-Gen-Editing.py | 37 - .../model_inference/Nexus-Gen-Generation.py | 32 - examples/flux/model_inference/Step1X-Edit.py | 32 - .../FLEX.2-preview.py | 51 - .../FLUX.1-Kontext-dev.py | 55 - .../FLUX.1-Krea-dev.py | 28 - .../FLUX.1-dev-AttriCtrl.py | 20 - .../FLUX.1-dev-Controlnet-Inpainting-Beta.py | 38 - .../FLUX.1-dev-Controlnet-Union-alpha.py | 41 - .../FLUX.1-dev-Controlnet-Upscaler.py | 34 - .../FLUX.1-dev-EliGen.py | 148 - .../FLUX.1-dev-IP-Adapter.py | 25 - .../FLUX.1-dev-InfiniteYou.py | 60 - .../FLUX.1-dev-LoRA-Encoder.py | 41 - .../FLUX.1-dev-LoRAFusion.py | 35 - .../model_inference_low_vram/FLUX.1-dev.py | 27 - .../Nexus-Gen-Editing.py | 38 - .../Nexus-Gen-Generation.py | 33 - .../model_inference_low_vram/Step1X-Edit.py | 33 - .../model_training/full/FLEX.2-preview.sh | 12 - .../model_training/full/FLUX.1-Kontext-dev.sh | 14 - .../model_training/full/FLUX.1-Krea-dev.sh | 12 - .../full/FLUX.1-dev-AttriCtrl.sh | 14 - .../FLUX.1-dev-Controlnet-Inpainting-Beta.sh | 14 - .../full/FLUX.1-dev-Controlnet-Union-alpha.sh | 14 - .../full/FLUX.1-dev-Controlnet-Upscaler.sh | 14 - .../full/FLUX.1-dev-IP-Adapter.sh | 14 - .../full/FLUX.1-dev-InfiniteYou.sh | 14 - .../full/FLUX.1-dev-LoRA-Encoder.sh | 14 - .../flux/model_training/full/FLUX.1-dev.sh | 12 - .../flux/model_training/full/Nexus-Gen.sh | 14 - .../flux/model_training/full/Step1X-Edit.sh | 14 - .../full/accelerate_config.yaml | 22 - .../full/accelerate_config_zero2offload.yaml | 22 - .../model_training/lora/FLEX.2-preview.sh | 15 - .../model_training/lora/FLUX.1-Kontext-dev.sh | 17 - .../model_training/lora/FLUX.1-Krea-dev.sh | 15 - .../lora/FLUX.1-dev-AttriCtrl.sh | 17 - .../FLUX.1-dev-Controlnet-Inpainting-Beta.sh | 17 - .../lora/FLUX.1-dev-Controlnet-Union-alpha.sh | 17 - .../lora/FLUX.1-dev-Controlnet-Upscaler.sh | 17 - .../model_training/lora/FLUX.1-dev-EliGen.sh | 17 - .../lora/FLUX.1-dev-IP-Adapter.sh | 17 - .../lora/FLUX.1-dev-InfiniteYou.sh | 17 - .../flux/model_training/lora/FLUX.1-dev.sh | 15 - .../flux/model_training/lora/Nexus-Gen.sh | 17 - .../flux/model_training/lora/Step1X-Edit.sh | 17 - examples/flux/model_training/train.py | 120 - .../validate_full/FLEX.2-preview.py | 20 - .../validate_full/FLUX.1-Kontext-dev.py | 26 - .../validate_full/FLUX.1-Krea-dev.py | 20 - .../validate_full/FLUX.1-dev-AttriCtrl.py | 21 - .../FLUX.1-dev-Controlnet-Inpainting-Beta.py | 31 - .../FLUX.1-dev-Controlnet-Union-alpha.py | 31 - .../FLUX.1-dev-Controlnet-Upscaler.py | 30 - .../validate_full/FLUX.1-dev-IP-Adapter.py | 28 - .../validate_full/FLUX.1-dev-InfiniteYou.py | 33 - .../validate_full/FLUX.1-dev-LoRA-Encoder.py | 25 - .../validate_full/FLUX.1-dev.py | 20 - .../model_training/validate_full/Nexus-Gen.py | 28 - .../validate_full/Step1X-Edit.py | 25 - .../validate_lora/FLEX.2-preview.py | 18 - .../validate_lora/FLUX.1-Kontext-dev.py | 24 - .../validate_lora/FLUX.1-Krea-dev.py | 18 - .../validate_lora/FLUX.1-dev-AttriCtrl.py | 19 - .../FLUX.1-dev-Controlnet-Inpainting-Beta.py | 29 - .../FLUX.1-dev-Controlnet-Union-alpha.py | 29 - .../FLUX.1-dev-Controlnet-Upscaler.py | 28 - .../validate_lora/FLUX.1-dev-EliGen.py | 33 - .../validate_lora/FLUX.1-dev-IP-Adapter.py | 26 - .../validate_lora/FLUX.1-dev-InfiniteYou.py | 28 - .../validate_lora/FLUX.1-dev.py | 18 - .../model_training/validate_lora/Nexus-Gen.py | 26 - .../validate_lora/Step1X-Edit.py | 23 - examples/image_quality_metric/README.md | 15 - .../image_quality_evaluation.py | 23 - examples/image_synthesis/README.md | 95 - .../image_synthesis/flex_text_to_image.py | 49 - .../image_synthesis/flux_text_to_image.py | 41 - .../flux_text_to_image_low_vram.py | 51 - .../hunyuan_dit_text_to_image.py | 42 - .../image_synthesis/kolors_text_to_image.py | 38 - .../kolors_with_sdxl_models.py | 68 - .../image_synthesis/omnigen_text_to_image.py | 25 - .../image_synthesis/sd35_text_to_image.py | 28 - examples/image_synthesis/sd3_text_to_image.py | 33 - examples/image_synthesis/sd_text_to_image.py | 75 - .../image_synthesis/sdxl_text_to_image.py | 34 - examples/image_synthesis/sdxl_turbo.py | 31 - examples/prompt_magic/README.md | 34 - examples/prompt_magic/bf_prompt_refining.py | 30 - .../prompt_magic/omost_flux_text_to_image.py | 35 - examples/prompt_magic/qwen_prompt_refining.py | 27 - examples/qwen_image/README.md | 404 - examples/qwen_image/README_zh.md | 404 - .../accelerate/Qwen-Image-FP8-offload.py | 18 - .../qwen_image/accelerate/Qwen-Image-FP8.py | 51 - .../model_inference/Qwen-Image-Edit-2509.py | 6 + .../model_inference/Qwen-Image-EliGen.py | 21 - .../Qwen-Image-In-Context-Control-Union.py | 2 +- .../Qwen-Image-Blockwise-ControlNet-Canny.py | 20 +- .../Qwen-Image-Blockwise-ControlNet-Depth.py | 20 +- ...Qwen-Image-Blockwise-ControlNet-Inpaint.py | 20 +- .../Qwen-Image-Distill-DMD2.py | 35 + .../Qwen-Image-Distill-Full.py | 18 +- .../Qwen-Image-Distill-LoRA.py | 21 +- .../Qwen-Image-Edit-2509.py | 25 +- .../Qwen-Image-Edit-Lowres-Fix.py | 22 +- .../Qwen-Image-Edit.py | 31 +- .../Qwen-Image-EliGen-Poster.py | 20 +- .../Qwen-Image-EliGen-V2.py | 21 +- .../Qwen-Image-EliGen.py | 41 +- .../Qwen-Image-In-Context-Control-Union.py | 20 +- .../model_inference_low_vram/Qwen-Image.py | 18 +- .../differential_training/Qwen-Image-LoRA.sh | 39 + .../special/fp8_training/Qwen-Image-LoRA.sh | 17 + .../special/fp8_training/validate.py | 18 + .../low_vram_training/Qwen-Image-LoRA.sh | 38 + .../split_training/Qwen-Image-LoRA.sh} | 32 +- .../special/split_training/validate.py | 18 + examples/qwen_image/model_training/train.py | 118 +- examples/qwen_image/test/run.py | 64 + examples/step1x/step1x.py | 35 - examples/stepvideo/README.md | 19 - examples/stepvideo/stepvideo_text_to_video.py | 50 - .../stepvideo_text_to_video_low_vram.py | 54 - .../stepvideo_text_to_video_quantized.py | 53 - examples/train/README.md | 569 - examples/train/flux/train_flux_lora.py | 124 - .../hunyuan_dit/train_hunyuan_dit_lora.py | 70 - examples/train/kolors/train_kolors_lora.py | 86 - .../train/stable_diffusion/train_sd_lora.py | 65 - .../stable_diffusion_3/train_sd3_lora.py | 83 - .../stable_diffusion_xl/train_sdxl_lora.py | 65 - examples/video_synthesis/README.md | 7 - examples/video_synthesis/sd_text_to_video.py | 39 - .../video_synthesis/sdxl_text_to_video.py | 28 - examples/video_synthesis/svd_text_to_video.py | 36 - examples/vram_management/README.md | 3 - .../vram_management/flux_text_to_image.py | 25 - examples/wanvideo/README.md | 470 - examples/wanvideo/README_zh.md | 473 - examples/wanvideo/acceleration/teacache.py | 27 - .../acceleration/unified_sequence_parallel.py | 27 - .../wanvideo/model_inference/LongCat-Video.py | 35 - .../Wan2.1-1.3b-speedcontrol-v1.py | 34 - .../model_inference/Wan2.1-FLF2V-14B-720P.py | 36 - .../Wan2.1-Fun-1.3B-Control.py | 34 - .../model_inference/Wan2.1-Fun-1.3B-InP.py | 36 - .../model_inference/Wan2.1-Fun-14B-Control.py | 34 - .../model_inference/Wan2.1-Fun-14B-InP.py | 36 - .../Wan2.1-Fun-V1.1-1.3B-Control-Camera.py | 44 - .../Wan2.1-Fun-V1.1-1.3B-Control.py | 36 - .../Wan2.1-Fun-V1.1-1.3B-InP.py | 36 - .../Wan2.1-Fun-V1.1-14B-Control-Camera.py | 44 - .../Wan2.1-Fun-V1.1-14B-Control.py | 36 - .../Wan2.1-Fun-V1.1-14B-InP.py | 36 - .../model_inference/Wan2.1-I2V-14B-480P.py | 34 - .../model_inference/Wan2.1-I2V-14B-720P.py | 35 - .../model_inference/Wan2.1-T2V-1.3B.py | 34 - .../model_inference/Wan2.1-T2V-14B.py | 24 - .../Wan2.1-VACE-1.3B-Preview.py | 52 - .../model_inference/Wan2.1-VACE-1.3B.py | 53 - .../model_inference/Wan2.1-VACE-14B.py | 54 - .../model_inference/Wan2.2-Animate-14B.py | 62 - .../Wan2.2-Fun-A14B-Control-Camera.py | 43 - .../Wan2.2-Fun-A14B-Control.py | 35 - .../model_inference/Wan2.2-Fun-A14B-InP.py | 35 - .../model_inference/Wan2.2-I2V-A14B.py | 33 - .../model_inference/Wan2.2-S2V-14B.py | 69 - .../Wan2.2-S2V-14B_multi_clips.py | 116 - .../model_inference/Wan2.2-T2V-A14B.py | 24 - .../model_inference/Wan2.2-TI2V-5B.py | 43 - .../model_inference/Wan2.2-VACE-Fun-A14B.py | 54 - .../model_inference/krea-realtime-video.py | 25 - .../model_training/full/LongCat-Video.sh | 12 - .../full/Wan2.1-1.3b-speedcontrol-v1.sh | 13 - .../full/Wan2.1-FLF2V-14B-720P.sh | 13 - .../full/Wan2.1-Fun-1.3B-Control.sh | 14 - .../full/Wan2.1-Fun-1.3B-InP.sh | 13 - .../full/Wan2.1-Fun-14B-Control.sh | 14 - .../model_training/full/Wan2.1-Fun-14B-InP.sh | 13 - .../Wan2.1-Fun-V1.1-1.3B-Control-Camera.sh | 13 - .../full/Wan2.1-Fun-V1.1-1.3B-Control.sh | 14 - .../full/Wan2.1-Fun-V1.1-1.3B-InP.sh | 13 - .../Wan2.1-Fun-V1.1-14B-Control-Camera.sh | 13 - .../full/Wan2.1-Fun-V1.1-14B-Control.sh | 14 - .../full/Wan2.1-Fun-V1.1-14B-InP.sh | 13 - .../full/Wan2.1-I2V-14B-480P.sh | 13 - .../full/Wan2.1-I2V-14B-720P.sh | 15 - .../model_training/full/Wan2.1-T2V-1.3B.sh | 12 - .../model_training/full/Wan2.1-T2V-14B.sh | 12 - .../full/Wan2.1-VACE-1.3B-Preview.sh | 16 - .../model_training/full/Wan2.1-VACE-1.3B.sh | 16 - .../model_training/full/Wan2.1-VACE-14B.sh | 16 - .../model_training/full/Wan2.2-Animate-14B.sh | 16 - .../full/Wan2.2-Fun-A14B-Control-Camera.sh | 35 - .../full/Wan2.2-Fun-A14B-Control.sh | 35 - .../full/Wan2.2-Fun-A14B-InP.sh | 33 - .../model_training/full/Wan2.2-I2V-A14B.sh | 37 - .../model_training/full/Wan2.2-S2V-14B.sh | 17 - .../model_training/full/Wan2.2-T2V-A14B.sh | 33 - .../model_training/full/Wan2.2-TI2V-5B.sh | 14 - .../full/Wan2.2-VACE-Fun-A14B.sh | 40 - .../full/accelerate_config_14B.yaml | 22 - .../full/krea-realtime-video.sh | 12 - .../wanvideo/model_training/full/run_test.py | 38 - .../model_training/lora/LongCat-Video.sh | 14 - .../lora/Wan2.1-1.3b-speedcontrol-v1.sh | 15 - .../lora/Wan2.1-FLF2V-14B-720P.sh | 15 - .../lora/Wan2.1-Fun-1.3B-Control.sh | 16 - .../lora/Wan2.1-Fun-1.3B-InP.sh | 15 - .../lora/Wan2.1-Fun-14B-Control.sh | 16 - .../model_training/lora/Wan2.1-Fun-14B-InP.sh | 15 - .../Wan2.1-Fun-V1.1-1.3B-Control-Camera.sh | 15 - .../lora/Wan2.1-Fun-V1.1-1.3B-Control.sh | 16 - .../lora/Wan2.1-Fun-V1.1-1.3B-InP.sh | 15 - .../Wan2.1-Fun-V1.1-14B-Control-Camera.sh | 15 - .../lora/Wan2.1-Fun-V1.1-14B-Control.sh | 16 - .../lora/Wan2.1-Fun-V1.1-14B-InP.sh | 15 - .../lora/Wan2.1-I2V-14B-480P.sh | 15 - .../lora/Wan2.1-I2V-14B-720P.sh | 17 - .../model_training/lora/Wan2.1-T2V-1.3B.sh | 14 - .../model_training/lora/Wan2.1-T2V-14B.sh | 14 - .../lora/Wan2.1-VACE-1.3B-Preview.sh | 17 - .../model_training/lora/Wan2.1-VACE-1.3B.sh | 17 - .../model_training/lora/Wan2.1-VACE-14B.sh | 18 - .../model_training/lora/Wan2.2-Animate-14B.sh | 20 - .../lora/Wan2.2-Fun-A14B-Control-Camera.sh | 39 - .../lora/Wan2.2-Fun-A14B-Control.sh | 39 - .../lora/Wan2.2-Fun-A14B-InP.sh | 37 - .../model_training/lora/Wan2.2-I2V-A14B.sh | 39 - .../model_training/lora/Wan2.2-S2V-14B.sh | 19 - .../model_training/lora/Wan2.2-T2V-A14B.sh | 38 - .../model_training/lora/Wan2.2-TI2V-5B.sh | 16 - .../lora/Wan2.2-VACE-Fun-A14B.sh | 43 - .../lora/krea-realtime-video.sh | 14 - .../wanvideo/model_training/lora/run_test.py | 25 - examples/wanvideo/model_training/train.py | 136 - .../validate_full/LongCat-Video.py | 25 - .../Wan2.1-1.3b-speedcontrol-v1.py | 28 - .../validate_full/Wan2.1-FLF2V-14B-720P.py | 33 - .../validate_full/Wan2.1-Fun-1.3B-Control.py | 32 - .../validate_full/Wan2.1-Fun-1.3B-InP.py | 31 - .../validate_full/Wan2.1-Fun-14B-Control.py | 32 - .../validate_full/Wan2.1-Fun-14B-InP.py | 31 - .../Wan2.1-Fun-V1.1-1.3B-Control-Camera.py | 32 - .../Wan2.1-Fun-V1.1-1.3B-Control.py | 33 - .../validate_full/Wan2.1-Fun-V1.1-1.3B-InP.py | 31 - .../Wan2.1-Fun-V1.1-14B-Control-Camera.py | 32 - .../Wan2.1-Fun-V1.1-14B-Control.py | 33 - .../validate_full/Wan2.1-Fun-V1.1-14B-InP.py | 31 - .../validate_full/Wan2.1-I2V-14B-480P.py | 30 - .../validate_full/Wan2.1-I2V-14B-720P.py | 31 - .../validate_full/Wan2.1-T2V-1.3B.py | 25 - .../validate_full/Wan2.1-T2V-14B.py | 25 - .../validate_full/Wan2.1-VACE-1.3B-Preview.py | 30 - .../validate_full/Wan2.1-VACE-1.3B.py | 30 - .../validate_full/Wan2.1-VACE-14B.py | 30 - .../validate_full/Wan2.2-Animate-14B.py | 33 - .../Wan2.2-Fun-A14B-Control-Camera.py | 34 - .../validate_full/Wan2.2-Fun-A14B-Control.py | 35 - .../validate_full/Wan2.2-Fun-A14B-InP.py | 32 - .../validate_full/Wan2.2-I2V-A14B.py | 33 - .../validate_full/Wan2.2-S2V-14B.py | 53 - .../validate_full/Wan2.2-T2V-A14B.py | 28 - .../validate_full/Wan2.2-TI2V-5B.py | 30 - .../validate_full/Wan2.2-VACE-Fun-A14B.py | 33 - .../validate_full/krea-realtime-video.py | 28 - .../model_training/validate_full/run_test.py | 25 - .../validate_lora/LongCat-Video.py | 24 - .../Wan2.1-1.3b-speedcontrol-v1.py | 27 - .../validate_lora/Wan2.1-FLF2V-14B-720P.py | 32 - .../validate_lora/Wan2.1-Fun-1.3B-Control.py | 31 - .../validate_lora/Wan2.1-Fun-1.3B-InP.py | 30 - .../validate_lora/Wan2.1-Fun-14B-Control.py | 31 - .../validate_lora/Wan2.1-Fun-14B-InP.py | 30 - .../Wan2.1-Fun-V1.1-1.3B-Control-Camera.py | 31 - .../Wan2.1-Fun-V1.1-1.3B-Control.py | 32 - .../validate_lora/Wan2.1-Fun-V1.1-1.3B-InP.py | 30 - .../Wan2.1-Fun-V1.1-14B-Control-Camera.py | 31 - .../Wan2.1-Fun-V1.1-14B-Control.py | 32 - .../validate_lora/Wan2.1-Fun-V1.1-14B-InP.py | 30 - .../validate_lora/Wan2.1-I2V-14B-480P.py | 29 - .../validate_lora/Wan2.1-I2V-14B-720P.py | 30 - .../validate_lora/Wan2.1-T2V-1.3B.py | 24 - .../validate_lora/Wan2.1-T2V-14B.py | 24 - .../validate_lora/Wan2.1-VACE-1.3B-Preview.py | 29 - .../validate_lora/Wan2.1-VACE-1.3B.py | 29 - .../validate_lora/Wan2.1-VACE-14B.py | 29 - .../validate_lora/Wan2.2-Animate-14B.py | 32 - .../Wan2.2-Fun-A14B-Control-Camera.py | 32 - .../validate_lora/Wan2.2-Fun-A14B-Control.py | 33 - .../validate_lora/Wan2.2-Fun-A14B-InP.py | 31 - .../validate_lora/Wan2.2-I2V-A14B.py | 31 - .../validate_lora/Wan2.2-S2V-14B.py | 51 - .../validate_lora/Wan2.2-T2V-A14B.py | 28 - .../validate_lora/Wan2.2-TI2V-5B.py | 29 - .../validate_lora/Wan2.2-VACE-Fun-A14B.py | 31 - .../validate_lora/krea-realtime-video.py | 28 - .../model_training/validate_lora/run_test.py | 25 - .../Put AnimateDiff ckpt files here.txt | 0 .../Put ControlNet annotators here.txt | 0 .../Put BeautifulPrompt models here.txt | 0 .../Put ControlNet pth files here.txt | 0 .../Put Stable Diffusion checkpoints here.txt | 0 .../Put Hunyuan DiT checkpoints here.txt | 0 .../Put IP-Adapter checkpoints here.txt | 0 models/RIFE/Put RIFE models here.txt | 0 models/kolors/Put Kolors checkpoints here.txt | 1 - models/lora/Put lora files here.txt | 0 .../Put Stable Diffusion checkpoints here.txt | 0 ...ut Stable Diffusion 3 checkpoints here.txt | 0 ...t Stable Diffusion XL checkpoints here.txt | 0 ...le Diffusion XL Turbo checkpoints here.txt | 0 ...table Video Diffusion checkpoints here.txt | 0 .../Put Textual Inversion files here.txt | 0 .../translator/Put translator models here.txt | 0 pyproject.toml | 38 + requirements.txt | 15 - setup.py | 30 - 664 files changed, 3581 insertions(+), 2237905 deletions(-) create mode 100644 .gitignore delete mode 100644 README_zh.md delete mode 100644 apps/gradio/DiffSynth_Studio.py delete mode 100644 apps/gradio/entity_level_control.py delete mode 100644 apps/gradio/qwen_image_eligen.py delete mode 100644 apps/streamlit/DiffSynth_Studio.py delete mode 100644 apps/streamlit/pages/1_Image_Creator.py delete mode 100644 apps/streamlit/pages/2_Video_Creator.py delete mode 100644 diffsynth/configs/model_config.py create mode 100644 diffsynth/configs/model_configs.py create mode 100644 diffsynth/configs/vram_management_module_maps.py delete mode 100644 diffsynth/controlnets/__init__.py delete mode 100644 diffsynth/controlnets/controlnet_unit.py create mode 100644 diffsynth/core/__init__.py create mode 100644 diffsynth/core/attention/__init__.py create mode 100644 diffsynth/core/attention/attention.py create mode 100644 diffsynth/core/data/__init__.py rename diffsynth/{trainers/unified_dataset.py => core/data/operators.py} (59%) create mode 100644 diffsynth/core/data/unified_dataset.py create mode 100644 diffsynth/core/gradient/__init__.py rename diffsynth/{vram_management/gradient_checkpointing.py => core/gradient/gradient_checkpoint.py} (100%) create mode 100644 diffsynth/core/loader/__init__.py create mode 100644 diffsynth/core/loader/config.py create mode 100644 diffsynth/core/loader/file.py create mode 100644 diffsynth/core/loader/model.py create mode 100644 diffsynth/core/vram/__init__.py create mode 100644 diffsynth/core/vram/disk_map.py create mode 100644 diffsynth/core/vram/initialization.py create mode 100644 diffsynth/core/vram/layers.py delete mode 100644 diffsynth/data/__init__.py delete mode 100644 diffsynth/data/simple_text_image.py delete mode 100644 diffsynth/data/video.py create mode 100644 diffsynth/diffusion/__init__.py create mode 100644 diffsynth/diffusion/base_pipeline.py rename diffsynth/{schedulers => diffusion}/flow_match.py (99%) create mode 100644 diffsynth/diffusion/logger.py create mode 100644 diffsynth/diffusion/loss.py create mode 100644 diffsynth/diffusion/parsers.py create mode 100644 diffsynth/diffusion/runner.py create mode 100644 diffsynth/diffusion/training_module.py delete mode 100644 diffsynth/distributed/xdit_context_parallel.py delete mode 100644 diffsynth/extensions/ESRGAN/__init__.py delete mode 100644 diffsynth/extensions/FastBlend/__init__.py delete mode 100644 diffsynth/extensions/FastBlend/api.py delete mode 100644 diffsynth/extensions/FastBlend/cupy_kernels.py delete mode 100644 diffsynth/extensions/FastBlend/data.py delete mode 100644 diffsynth/extensions/FastBlend/patch_match.py delete mode 100644 diffsynth/extensions/FastBlend/runners/__init__.py delete mode 100644 diffsynth/extensions/FastBlend/runners/accurate.py delete mode 100644 diffsynth/extensions/FastBlend/runners/balanced.py delete mode 100644 diffsynth/extensions/FastBlend/runners/fast.py delete mode 100644 diffsynth/extensions/FastBlend/runners/interpolation.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/BLIP/__init__.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/BLIP/blip.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/BLIP/blip_pretrain.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/BLIP/med.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/BLIP/vit.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/__init__.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/aesthetic.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/clip.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/config.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/hps.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/imagereward.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/mps.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/open_clip/__init__.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/open_clip/coca_model.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/open_clip/constants.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/open_clip/factory.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/open_clip/hf_configs.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/open_clip/hf_model.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/open_clip/loss.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/open_clip/model.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/open_clip/model_configs/ViT-H-14.json delete mode 100644 diffsynth/extensions/ImageQualityMetric/open_clip/modified_resnet.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/open_clip/openai.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/open_clip/pretrained.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/open_clip/push_to_hf_hub.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/open_clip/timm_model.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/open_clip/tokenizer.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/open_clip/transform.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/open_clip/transformer.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/open_clip/utils.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/open_clip/version.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/pickscore.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/trainer/__init__.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/trainer/models/__init__.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/trainer/models/base_model.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/trainer/models/clip_model.py delete mode 100644 diffsynth/extensions/ImageQualityMetric/trainer/models/cross_modeling.py delete mode 100644 diffsynth/extensions/RIFE/__init__.py delete mode 100644 diffsynth/lora/__init__.py delete mode 100644 diffsynth/lora/flux_lora.py delete mode 100644 diffsynth/models/__init__.py delete mode 100644 diffsynth/models/attention.py delete mode 100644 diffsynth/models/cog_dit.py delete mode 100644 diffsynth/models/cog_vae.py delete mode 100644 diffsynth/models/downloader.py delete mode 100644 diffsynth/models/flux_controlnet.py delete mode 100644 diffsynth/models/flux_dit.py delete mode 100644 diffsynth/models/flux_infiniteyou.py delete mode 100644 diffsynth/models/flux_ipadapter.py delete mode 100644 diffsynth/models/flux_lora_encoder.py delete mode 100644 diffsynth/models/flux_text_encoder.py delete mode 100644 diffsynth/models/flux_vae.py delete mode 100644 diffsynth/models/flux_value_control.py create mode 100644 diffsynth/models/general_modules.py delete mode 100644 diffsynth/models/hunyuan_dit.py delete mode 100644 diffsynth/models/hunyuan_dit_text_encoder.py delete mode 100644 diffsynth/models/hunyuan_video_dit.py delete mode 100644 diffsynth/models/hunyuan_video_text_encoder.py delete mode 100644 diffsynth/models/hunyuan_video_vae_decoder.py delete mode 100644 diffsynth/models/hunyuan_video_vae_encoder.py delete mode 100644 diffsynth/models/kolors_text_encoder.py delete mode 100644 diffsynth/models/longcat_video_dit.py delete mode 100644 diffsynth/models/lora.py create mode 100644 diffsynth/models/model_loader.py delete mode 100644 diffsynth/models/model_manager.py delete mode 100644 diffsynth/models/nexus_gen.py delete mode 100644 diffsynth/models/nexus_gen_ar_model.py delete mode 100644 diffsynth/models/nexus_gen_projector.py delete mode 100644 diffsynth/models/omnigen.py delete mode 100644 diffsynth/models/qwenvl.py delete mode 100644 diffsynth/models/sd3_dit.py delete mode 100644 diffsynth/models/sd3_text_encoder.py delete mode 100644 diffsynth/models/sd3_vae_decoder.py delete mode 100644 diffsynth/models/sd3_vae_encoder.py delete mode 100644 diffsynth/models/sd_controlnet.py delete mode 100644 diffsynth/models/sd_ipadapter.py delete mode 100644 diffsynth/models/sd_motion.py delete mode 100644 diffsynth/models/sd_text_encoder.py delete mode 100644 diffsynth/models/sd_unet.py delete mode 100644 diffsynth/models/sd_vae_decoder.py delete mode 100644 diffsynth/models/sd_vae_encoder.py delete mode 100644 diffsynth/models/sdxl_controlnet.py delete mode 100644 diffsynth/models/sdxl_ipadapter.py delete mode 100644 diffsynth/models/sdxl_motion.py delete mode 100644 diffsynth/models/sdxl_text_encoder.py delete mode 100644 diffsynth/models/sdxl_unet.py delete mode 100644 diffsynth/models/sdxl_vae_decoder.py delete mode 100644 diffsynth/models/sdxl_vae_encoder.py delete mode 100644 diffsynth/models/step1x_connector.py delete mode 100644 diffsynth/models/stepvideo_dit.py delete mode 100644 diffsynth/models/stepvideo_text_encoder.py delete mode 100644 diffsynth/models/stepvideo_vae.py delete mode 100644 diffsynth/models/svd_image_encoder.py delete mode 100644 diffsynth/models/svd_unet.py delete mode 100644 diffsynth/models/svd_vae_decoder.py delete mode 100644 diffsynth/models/svd_vae_encoder.py delete mode 100644 diffsynth/models/tiler.py delete mode 100644 diffsynth/models/utils.py delete mode 100644 diffsynth/models/wan_video_animate_adapter.py delete mode 100644 diffsynth/models/wan_video_camera_controller.py delete mode 100644 diffsynth/models/wan_video_dit.py delete mode 100644 diffsynth/models/wan_video_dit_s2v.py delete mode 100644 diffsynth/models/wan_video_image_encoder.py delete mode 100644 diffsynth/models/wan_video_motion_controller.py delete mode 100644 diffsynth/models/wan_video_text_encoder.py delete mode 100644 diffsynth/models/wan_video_vace.py delete mode 100644 diffsynth/models/wan_video_vae.py delete mode 100644 diffsynth/models/wav2vec.py delete mode 100644 diffsynth/pipelines/__init__.py delete mode 100644 diffsynth/pipelines/base.py delete mode 100644 diffsynth/pipelines/cog_video.py delete mode 100644 diffsynth/pipelines/dancer.py delete mode 100644 diffsynth/pipelines/flux_image.py delete mode 100644 diffsynth/pipelines/flux_image_new.py delete mode 100644 diffsynth/pipelines/hunyuan_image.py delete mode 100644 diffsynth/pipelines/hunyuan_video.py delete mode 100644 diffsynth/pipelines/omnigen_image.py delete mode 100644 diffsynth/pipelines/pipeline_runner.py delete mode 100644 diffsynth/pipelines/sd3_image.py delete mode 100644 diffsynth/pipelines/sd_image.py delete mode 100644 diffsynth/pipelines/sd_video.py delete mode 100644 diffsynth/pipelines/sdxl_image.py delete mode 100644 diffsynth/pipelines/sdxl_video.py delete mode 100644 diffsynth/pipelines/step_video.py delete mode 100644 diffsynth/pipelines/svd_video.py delete mode 100644 diffsynth/pipelines/wan_video.py delete mode 100644 diffsynth/pipelines/wan_video_new.py delete mode 100644 diffsynth/processors/FastBlend.py delete mode 100644 diffsynth/processors/PILEditor.py delete mode 100644 diffsynth/processors/RIFE.py delete mode 100644 diffsynth/processors/base.py delete mode 100644 diffsynth/processors/sequencial_processor.py delete mode 100644 diffsynth/prompters/__init__.py delete mode 100644 diffsynth/prompters/base_prompter.py delete mode 100644 diffsynth/prompters/cog_prompter.py delete mode 100644 diffsynth/prompters/flux_prompter.py delete mode 100644 diffsynth/prompters/hunyuan_dit_prompter.py delete mode 100644 diffsynth/prompters/hunyuan_video_prompter.py delete mode 100644 diffsynth/prompters/kolors_prompter.py delete mode 100644 diffsynth/prompters/omnigen_prompter.py delete mode 100644 diffsynth/prompters/omost.py delete mode 100644 diffsynth/prompters/prompt_refiners.py delete mode 100644 diffsynth/prompters/sd3_prompter.py delete mode 100644 diffsynth/prompters/sd_prompter.py delete mode 100644 diffsynth/prompters/sdxl_prompter.py delete mode 100644 diffsynth/prompters/stepvideo_prompter.py delete mode 100644 diffsynth/prompters/wan_prompter.py delete mode 100644 diffsynth/schedulers/__init__.py delete mode 100644 diffsynth/schedulers/continuous_ode.py delete mode 100644 diffsynth/schedulers/ddim.py delete mode 100644 diffsynth/tokenizer_configs/cog/tokenizer/added_tokens.json delete mode 100644 diffsynth/tokenizer_configs/cog/tokenizer/special_tokens_map.json delete mode 100644 diffsynth/tokenizer_configs/cog/tokenizer/spiece.model delete mode 100644 diffsynth/tokenizer_configs/cog/tokenizer/tokenizer_config.json delete mode 100644 diffsynth/tokenizer_configs/flux/tokenizer_1/merges.txt delete mode 100644 diffsynth/tokenizer_configs/flux/tokenizer_1/special_tokens_map.json delete mode 100644 diffsynth/tokenizer_configs/flux/tokenizer_1/tokenizer_config.json delete mode 100644 diffsynth/tokenizer_configs/flux/tokenizer_1/vocab.json delete mode 100644 diffsynth/tokenizer_configs/flux/tokenizer_2/special_tokens_map.json delete mode 100644 diffsynth/tokenizer_configs/flux/tokenizer_2/spiece.model delete mode 100644 diffsynth/tokenizer_configs/flux/tokenizer_2/tokenizer.json delete mode 100644 diffsynth/tokenizer_configs/flux/tokenizer_2/tokenizer_config.json delete mode 100644 diffsynth/tokenizer_configs/hunyuan_dit/tokenizer/special_tokens_map.json delete mode 100644 diffsynth/tokenizer_configs/hunyuan_dit/tokenizer/tokenizer_config.json delete mode 100644 diffsynth/tokenizer_configs/hunyuan_dit/tokenizer/vocab.txt delete mode 100644 diffsynth/tokenizer_configs/hunyuan_dit/tokenizer/vocab_org.txt delete mode 100644 diffsynth/tokenizer_configs/hunyuan_dit/tokenizer_t5/config.json delete mode 100644 diffsynth/tokenizer_configs/hunyuan_dit/tokenizer_t5/special_tokens_map.json delete mode 100644 diffsynth/tokenizer_configs/hunyuan_dit/tokenizer_t5/spiece.model delete mode 100644 diffsynth/tokenizer_configs/hunyuan_dit/tokenizer_t5/tokenizer_config.json delete mode 100644 diffsynth/tokenizer_configs/hunyuan_video/tokenizer_1/merges.txt delete mode 100644 diffsynth/tokenizer_configs/hunyuan_video/tokenizer_1/special_tokens_map.json delete mode 100644 diffsynth/tokenizer_configs/hunyuan_video/tokenizer_1/tokenizer_config.json delete mode 100644 diffsynth/tokenizer_configs/hunyuan_video/tokenizer_1/vocab.json delete mode 100644 diffsynth/tokenizer_configs/hunyuan_video/tokenizer_2/preprocessor_config.json delete mode 100644 diffsynth/tokenizer_configs/hunyuan_video/tokenizer_2/special_tokens_map.json delete mode 100644 diffsynth/tokenizer_configs/hunyuan_video/tokenizer_2/tokenizer.json delete mode 100644 diffsynth/tokenizer_configs/hunyuan_video/tokenizer_2/tokenizer_config.json delete mode 100644 diffsynth/tokenizer_configs/kolors/tokenizer/tokenizer.model delete mode 100644 diffsynth/tokenizer_configs/kolors/tokenizer/tokenizer_config.json delete mode 100644 diffsynth/tokenizer_configs/kolors/tokenizer/vocab.txt delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion/tokenizer/merges.txt delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion/tokenizer/special_tokens_map.json delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion/tokenizer/tokenizer_config.json delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion/tokenizer/vocab.json delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion_3/tokenizer_1/merges.txt delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion_3/tokenizer_1/special_tokens_map.json delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion_3/tokenizer_1/tokenizer_config.json delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion_3/tokenizer_1/vocab.json delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion_3/tokenizer_2/merges.txt delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion_3/tokenizer_2/special_tokens_map.json delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion_3/tokenizer_2/tokenizer_config.json delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion_3/tokenizer_2/vocab.json delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion_3/tokenizer_3/special_tokens_map.json delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion_3/tokenizer_3/spiece.model delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion_3/tokenizer_3/tokenizer.json delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion_3/tokenizer_3/tokenizer_config.json delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion_xl/tokenizer_2/merges.txt delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion_xl/tokenizer_2/special_tokens_map.json delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion_xl/tokenizer_2/tokenizer_config.json delete mode 100644 diffsynth/tokenizer_configs/stable_diffusion_xl/tokenizer_2/vocab.json delete mode 100644 diffsynth/trainers/__init__.py delete mode 100644 diffsynth/trainers/text_to_image.py delete mode 100644 diffsynth/trainers/utils.py delete mode 100644 diffsynth/utils/__init__.py create mode 100644 diffsynth/utils/controlnet/__init__.py rename diffsynth/{controlnets/processors.py => utils/controlnet/annotator.py} (100%) create mode 100644 diffsynth/utils/controlnet/controlnet_input.py create mode 100644 diffsynth/utils/lora/__init__.py create mode 100644 diffsynth/utils/lora/general.py rename diffsynth/{distributed => utils/state_dict_converters}/__init__.py (100%) create mode 100644 diffsynth/utils/state_dict_converters/qwen_image_text_encoder.py delete mode 100644 diffsynth/vram_management/__init__.py delete mode 100644 diffsynth/vram_management/layers.py create mode 100644 docs/API_Reference/Environment_Variables.md create mode 100644 docs/API_Reference/core/attention.md create mode 100644 docs/API_Reference/core/data.md rename diffsynth/extensions/ImageQualityMetric/open_clip/generation_utils.py => docs/API_Reference/core/gradient.md (100%) rename diffsynth/extensions/__init__.py => docs/API_Reference/core/loader.md (100%) rename diffsynth/processors/__init__.py => docs/API_Reference/core/vram.md (100%) create mode 100644 docs/Developer_Guide/Building_a_Pipeline.md create mode 100644 docs/Developer_Guide/Integrating_Your_Model.md create mode 100644 docs/Developer_Guide/Training_Diffusion_Models.md rename diffsynth/tokenizer_configs/__init__.py => docs/Overview.md (100%) delete mode 100644 examples/ArtAug/README.md delete mode 100644 examples/ArtAug/artaug_flux.py delete mode 100644 examples/CogVideoX/README.md delete mode 100644 examples/CogVideoX/cogvideo_text_to_video.py delete mode 100644 examples/ControlNet/README.md delete mode 100644 examples/ControlNet/flux_controlnet.py delete mode 100644 examples/ControlNet/flux_controlnet_quantization.py delete mode 100644 examples/Diffutoon/Diffutoon.ipynb delete mode 100644 examples/Diffutoon/README.md delete mode 100644 examples/Diffutoon/diffutoon_toon_shading.py delete mode 100644 examples/Diffutoon/diffutoon_toon_shading_with_editing_signals.py delete mode 100644 examples/Diffutoon/sd_toon_shading.py delete mode 100644 examples/EntityControl/README.md delete mode 100644 examples/EntityControl/entity_control.py delete mode 100644 examples/EntityControl/entity_control_ipadapter.py delete mode 100644 examples/EntityControl/entity_inpaint.py delete mode 100644 examples/EntityControl/entity_transfer.py delete mode 100644 examples/EntityControl/styled_entity_control.py delete mode 100644 examples/EntityControl/utils.py delete mode 100644 examples/ExVideo/ExVideo_cogvideox_test.py delete mode 100644 examples/ExVideo/ExVideo_ema.py delete mode 100644 examples/ExVideo/ExVideo_svd_test.py delete mode 100644 examples/ExVideo/ExVideo_svd_train.py delete mode 100644 examples/ExVideo/README.md delete mode 100644 examples/HunyuanVideo/README.md delete mode 100644 examples/HunyuanVideo/hunyuanvideo_24G.py delete mode 100644 examples/HunyuanVideo/hunyuanvideo_6G.py delete mode 100644 examples/HunyuanVideo/hunyuanvideo_80G.py delete mode 100644 examples/HunyuanVideo/hunyuanvideo_i2v_24G.py delete mode 100644 examples/HunyuanVideo/hunyuanvideo_i2v_80G.py delete mode 100644 examples/HunyuanVideo/hunyuanvideo_v2v_6G.py delete mode 100644 examples/InfiniteYou/README.md delete mode 100644 examples/InfiniteYou/infiniteyou.py delete mode 100644 examples/Ip-Adapter/README.md delete mode 100644 examples/Ip-Adapter/flux_ipadapter.py delete mode 100644 examples/Ip-Adapter/sd_ipadapter.py delete mode 100644 examples/Ip-Adapter/sdxl_ipadapter.py delete mode 100644 examples/Ip-Adapter/sdxl_ipadapter_multi_reference.py delete mode 100644 examples/TeaCache/README.md delete mode 100644 examples/TeaCache/flux_teacache.py delete mode 100644 examples/TeaCache/hunyuanvideo_teacache.py delete mode 100644 examples/diffsynth/README.md delete mode 100644 examples/diffsynth/sd_video_rerender.py delete mode 100644 examples/flux/README.md delete mode 100644 examples/flux/README_zh.md delete mode 100644 examples/flux/acceleration/teacache.py delete mode 100644 examples/flux/model_inference/FLEX.2-preview.py delete mode 100644 examples/flux/model_inference/FLUX.1-Kontext-dev.py delete mode 100644 examples/flux/model_inference/FLUX.1-Krea-dev.py delete mode 100644 examples/flux/model_inference/FLUX.1-dev-AttriCtrl.py delete mode 100644 examples/flux/model_inference/FLUX.1-dev-Controlnet-Inpainting-Beta.py delete mode 100644 examples/flux/model_inference/FLUX.1-dev-Controlnet-Union-alpha.py delete mode 100644 examples/flux/model_inference/FLUX.1-dev-Controlnet-Upscaler.py delete mode 100644 examples/flux/model_inference/FLUX.1-dev-EliGen.py delete mode 100644 examples/flux/model_inference/FLUX.1-dev-IP-Adapter.py delete mode 100644 examples/flux/model_inference/FLUX.1-dev-InfiniteYou.py delete mode 100644 examples/flux/model_inference/FLUX.1-dev-LoRA-Encoder.py delete mode 100644 examples/flux/model_inference/FLUX.1-dev-LoRA-Fusion.py delete mode 100644 examples/flux/model_inference/FLUX.1-dev.py delete mode 100644 examples/flux/model_inference/Nexus-Gen-Editing.py delete mode 100644 examples/flux/model_inference/Nexus-Gen-Generation.py delete mode 100644 examples/flux/model_inference/Step1X-Edit.py delete mode 100644 examples/flux/model_inference_low_vram/FLEX.2-preview.py delete mode 100644 examples/flux/model_inference_low_vram/FLUX.1-Kontext-dev.py delete mode 100644 examples/flux/model_inference_low_vram/FLUX.1-Krea-dev.py delete mode 100644 examples/flux/model_inference_low_vram/FLUX.1-dev-AttriCtrl.py delete mode 100644 examples/flux/model_inference_low_vram/FLUX.1-dev-Controlnet-Inpainting-Beta.py delete mode 100644 examples/flux/model_inference_low_vram/FLUX.1-dev-Controlnet-Union-alpha.py delete mode 100644 examples/flux/model_inference_low_vram/FLUX.1-dev-Controlnet-Upscaler.py delete mode 100644 examples/flux/model_inference_low_vram/FLUX.1-dev-EliGen.py delete mode 100644 examples/flux/model_inference_low_vram/FLUX.1-dev-IP-Adapter.py delete mode 100644 examples/flux/model_inference_low_vram/FLUX.1-dev-InfiniteYou.py delete mode 100644 examples/flux/model_inference_low_vram/FLUX.1-dev-LoRA-Encoder.py delete mode 100644 examples/flux/model_inference_low_vram/FLUX.1-dev-LoRAFusion.py delete mode 100644 examples/flux/model_inference_low_vram/FLUX.1-dev.py delete mode 100644 examples/flux/model_inference_low_vram/Nexus-Gen-Editing.py delete mode 100644 examples/flux/model_inference_low_vram/Nexus-Gen-Generation.py delete mode 100644 examples/flux/model_inference_low_vram/Step1X-Edit.py delete mode 100644 examples/flux/model_training/full/FLEX.2-preview.sh delete mode 100644 examples/flux/model_training/full/FLUX.1-Kontext-dev.sh delete mode 100644 examples/flux/model_training/full/FLUX.1-Krea-dev.sh delete mode 100644 examples/flux/model_training/full/FLUX.1-dev-AttriCtrl.sh delete mode 100644 examples/flux/model_training/full/FLUX.1-dev-Controlnet-Inpainting-Beta.sh delete mode 100644 examples/flux/model_training/full/FLUX.1-dev-Controlnet-Union-alpha.sh delete mode 100644 examples/flux/model_training/full/FLUX.1-dev-Controlnet-Upscaler.sh delete mode 100644 examples/flux/model_training/full/FLUX.1-dev-IP-Adapter.sh delete mode 100644 examples/flux/model_training/full/FLUX.1-dev-InfiniteYou.sh delete mode 100644 examples/flux/model_training/full/FLUX.1-dev-LoRA-Encoder.sh delete mode 100644 examples/flux/model_training/full/FLUX.1-dev.sh delete mode 100644 examples/flux/model_training/full/Nexus-Gen.sh delete mode 100644 examples/flux/model_training/full/Step1X-Edit.sh delete mode 100644 examples/flux/model_training/full/accelerate_config.yaml delete mode 100644 examples/flux/model_training/full/accelerate_config_zero2offload.yaml delete mode 100644 examples/flux/model_training/lora/FLEX.2-preview.sh delete mode 100644 examples/flux/model_training/lora/FLUX.1-Kontext-dev.sh delete mode 100644 examples/flux/model_training/lora/FLUX.1-Krea-dev.sh delete mode 100644 examples/flux/model_training/lora/FLUX.1-dev-AttriCtrl.sh delete mode 100644 examples/flux/model_training/lora/FLUX.1-dev-Controlnet-Inpainting-Beta.sh delete mode 100644 examples/flux/model_training/lora/FLUX.1-dev-Controlnet-Union-alpha.sh delete mode 100644 examples/flux/model_training/lora/FLUX.1-dev-Controlnet-Upscaler.sh delete mode 100644 examples/flux/model_training/lora/FLUX.1-dev-EliGen.sh delete mode 100644 examples/flux/model_training/lora/FLUX.1-dev-IP-Adapter.sh delete mode 100644 examples/flux/model_training/lora/FLUX.1-dev-InfiniteYou.sh delete mode 100644 examples/flux/model_training/lora/FLUX.1-dev.sh delete mode 100644 examples/flux/model_training/lora/Nexus-Gen.sh delete mode 100644 examples/flux/model_training/lora/Step1X-Edit.sh delete mode 100644 examples/flux/model_training/train.py delete mode 100644 examples/flux/model_training/validate_full/FLEX.2-preview.py delete mode 100644 examples/flux/model_training/validate_full/FLUX.1-Kontext-dev.py delete mode 100644 examples/flux/model_training/validate_full/FLUX.1-Krea-dev.py delete mode 100644 examples/flux/model_training/validate_full/FLUX.1-dev-AttriCtrl.py delete mode 100644 examples/flux/model_training/validate_full/FLUX.1-dev-Controlnet-Inpainting-Beta.py delete mode 100644 examples/flux/model_training/validate_full/FLUX.1-dev-Controlnet-Union-alpha.py delete mode 100644 examples/flux/model_training/validate_full/FLUX.1-dev-Controlnet-Upscaler.py delete mode 100644 examples/flux/model_training/validate_full/FLUX.1-dev-IP-Adapter.py delete mode 100644 examples/flux/model_training/validate_full/FLUX.1-dev-InfiniteYou.py delete mode 100644 examples/flux/model_training/validate_full/FLUX.1-dev-LoRA-Encoder.py delete mode 100644 examples/flux/model_training/validate_full/FLUX.1-dev.py delete mode 100644 examples/flux/model_training/validate_full/Nexus-Gen.py delete mode 100644 examples/flux/model_training/validate_full/Step1X-Edit.py delete mode 100644 examples/flux/model_training/validate_lora/FLEX.2-preview.py delete mode 100644 examples/flux/model_training/validate_lora/FLUX.1-Kontext-dev.py delete mode 100644 examples/flux/model_training/validate_lora/FLUX.1-Krea-dev.py delete mode 100644 examples/flux/model_training/validate_lora/FLUX.1-dev-AttriCtrl.py delete mode 100644 examples/flux/model_training/validate_lora/FLUX.1-dev-Controlnet-Inpainting-Beta.py delete mode 100644 examples/flux/model_training/validate_lora/FLUX.1-dev-Controlnet-Union-alpha.py delete mode 100644 examples/flux/model_training/validate_lora/FLUX.1-dev-Controlnet-Upscaler.py delete mode 100644 examples/flux/model_training/validate_lora/FLUX.1-dev-EliGen.py delete mode 100644 examples/flux/model_training/validate_lora/FLUX.1-dev-IP-Adapter.py delete mode 100644 examples/flux/model_training/validate_lora/FLUX.1-dev-InfiniteYou.py delete mode 100644 examples/flux/model_training/validate_lora/FLUX.1-dev.py delete mode 100644 examples/flux/model_training/validate_lora/Nexus-Gen.py delete mode 100644 examples/flux/model_training/validate_lora/Step1X-Edit.py delete mode 100644 examples/image_quality_metric/README.md delete mode 100644 examples/image_quality_metric/image_quality_evaluation.py delete mode 100644 examples/image_synthesis/README.md delete mode 100644 examples/image_synthesis/flex_text_to_image.py delete mode 100644 examples/image_synthesis/flux_text_to_image.py delete mode 100644 examples/image_synthesis/flux_text_to_image_low_vram.py delete mode 100644 examples/image_synthesis/hunyuan_dit_text_to_image.py delete mode 100644 examples/image_synthesis/kolors_text_to_image.py delete mode 100644 examples/image_synthesis/kolors_with_sdxl_models.py delete mode 100644 examples/image_synthesis/omnigen_text_to_image.py delete mode 100644 examples/image_synthesis/sd35_text_to_image.py delete mode 100644 examples/image_synthesis/sd3_text_to_image.py delete mode 100644 examples/image_synthesis/sd_text_to_image.py delete mode 100644 examples/image_synthesis/sdxl_text_to_image.py delete mode 100644 examples/image_synthesis/sdxl_turbo.py delete mode 100644 examples/prompt_magic/README.md delete mode 100644 examples/prompt_magic/bf_prompt_refining.py delete mode 100644 examples/prompt_magic/omost_flux_text_to_image.py delete mode 100644 examples/prompt_magic/qwen_prompt_refining.py delete mode 100644 examples/qwen_image/README.md delete mode 100644 examples/qwen_image/README_zh.md delete mode 100644 examples/qwen_image/accelerate/Qwen-Image-FP8-offload.py delete mode 100644 examples/qwen_image/accelerate/Qwen-Image-FP8.py create mode 100644 examples/qwen_image/model_inference_low_vram/Qwen-Image-Distill-DMD2.py create mode 100644 examples/qwen_image/model_training/special/differential_training/Qwen-Image-LoRA.sh create mode 100644 examples/qwen_image/model_training/special/fp8_training/Qwen-Image-LoRA.sh create mode 100644 examples/qwen_image/model_training/special/fp8_training/validate.py create mode 100644 examples/qwen_image/model_training/special/low_vram_training/Qwen-Image-LoRA.sh rename examples/qwen_image/model_training/{lora/Qwen-Image-Splited.sh => special/split_training/Qwen-Image-LoRA.sh} (63%) create mode 100644 examples/qwen_image/model_training/special/split_training/validate.py create mode 100644 examples/qwen_image/test/run.py delete mode 100644 examples/step1x/step1x.py delete mode 100644 examples/stepvideo/README.md delete mode 100644 examples/stepvideo/stepvideo_text_to_video.py delete mode 100644 examples/stepvideo/stepvideo_text_to_video_low_vram.py delete mode 100644 examples/stepvideo/stepvideo_text_to_video_quantized.py delete mode 100644 examples/train/README.md delete mode 100644 examples/train/flux/train_flux_lora.py delete mode 100644 examples/train/hunyuan_dit/train_hunyuan_dit_lora.py delete mode 100644 examples/train/kolors/train_kolors_lora.py delete mode 100644 examples/train/stable_diffusion/train_sd_lora.py delete mode 100644 examples/train/stable_diffusion_3/train_sd3_lora.py delete mode 100644 examples/train/stable_diffusion_xl/train_sdxl_lora.py delete mode 100644 examples/video_synthesis/README.md delete mode 100644 examples/video_synthesis/sd_text_to_video.py delete mode 100644 examples/video_synthesis/sdxl_text_to_video.py delete mode 100644 examples/video_synthesis/svd_text_to_video.py delete mode 100644 examples/vram_management/README.md delete mode 100644 examples/vram_management/flux_text_to_image.py delete mode 100644 examples/wanvideo/README.md delete mode 100644 examples/wanvideo/README_zh.md delete mode 100644 examples/wanvideo/acceleration/teacache.py delete mode 100644 examples/wanvideo/acceleration/unified_sequence_parallel.py delete mode 100644 examples/wanvideo/model_inference/LongCat-Video.py delete mode 100644 examples/wanvideo/model_inference/Wan2.1-1.3b-speedcontrol-v1.py delete mode 100644 examples/wanvideo/model_inference/Wan2.1-FLF2V-14B-720P.py delete mode 100644 examples/wanvideo/model_inference/Wan2.1-Fun-1.3B-Control.py delete mode 100644 examples/wanvideo/model_inference/Wan2.1-Fun-1.3B-InP.py delete mode 100644 examples/wanvideo/model_inference/Wan2.1-Fun-14B-Control.py delete mode 100644 examples/wanvideo/model_inference/Wan2.1-Fun-14B-InP.py delete mode 100644 examples/wanvideo/model_inference/Wan2.1-Fun-V1.1-1.3B-Control-Camera.py delete mode 100644 examples/wanvideo/model_inference/Wan2.1-Fun-V1.1-1.3B-Control.py delete mode 100644 examples/wanvideo/model_inference/Wan2.1-Fun-V1.1-1.3B-InP.py delete mode 100644 examples/wanvideo/model_inference/Wan2.1-Fun-V1.1-14B-Control-Camera.py delete mode 100644 examples/wanvideo/model_inference/Wan2.1-Fun-V1.1-14B-Control.py delete mode 100644 examples/wanvideo/model_inference/Wan2.1-Fun-V1.1-14B-InP.py delete mode 100644 examples/wanvideo/model_inference/Wan2.1-I2V-14B-480P.py delete mode 100644 examples/wanvideo/model_inference/Wan2.1-I2V-14B-720P.py delete mode 100644 examples/wanvideo/model_inference/Wan2.1-T2V-1.3B.py delete mode 100644 examples/wanvideo/model_inference/Wan2.1-T2V-14B.py delete mode 100644 examples/wanvideo/model_inference/Wan2.1-VACE-1.3B-Preview.py delete mode 100644 examples/wanvideo/model_inference/Wan2.1-VACE-1.3B.py delete mode 100644 examples/wanvideo/model_inference/Wan2.1-VACE-14B.py delete mode 100644 examples/wanvideo/model_inference/Wan2.2-Animate-14B.py delete mode 100644 examples/wanvideo/model_inference/Wan2.2-Fun-A14B-Control-Camera.py delete mode 100644 examples/wanvideo/model_inference/Wan2.2-Fun-A14B-Control.py delete mode 100644 examples/wanvideo/model_inference/Wan2.2-Fun-A14B-InP.py delete mode 100644 examples/wanvideo/model_inference/Wan2.2-I2V-A14B.py delete mode 100644 examples/wanvideo/model_inference/Wan2.2-S2V-14B.py delete mode 100644 examples/wanvideo/model_inference/Wan2.2-S2V-14B_multi_clips.py delete mode 100644 examples/wanvideo/model_inference/Wan2.2-T2V-A14B.py delete mode 100644 examples/wanvideo/model_inference/Wan2.2-TI2V-5B.py delete mode 100644 examples/wanvideo/model_inference/Wan2.2-VACE-Fun-A14B.py delete mode 100644 examples/wanvideo/model_inference/krea-realtime-video.py delete mode 100644 examples/wanvideo/model_training/full/LongCat-Video.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.1-1.3b-speedcontrol-v1.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.1-FLF2V-14B-720P.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.1-Fun-1.3B-Control.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.1-Fun-1.3B-InP.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.1-Fun-14B-Control.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.1-Fun-14B-InP.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.1-Fun-V1.1-1.3B-Control-Camera.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.1-Fun-V1.1-1.3B-Control.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.1-Fun-V1.1-1.3B-InP.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.1-Fun-V1.1-14B-Control-Camera.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.1-Fun-V1.1-14B-Control.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.1-Fun-V1.1-14B-InP.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.1-I2V-14B-480P.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.1-I2V-14B-720P.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.1-T2V-1.3B.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.1-T2V-14B.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.1-VACE-1.3B-Preview.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.1-VACE-1.3B.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.1-VACE-14B.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.2-Animate-14B.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.2-Fun-A14B-Control-Camera.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.2-Fun-A14B-Control.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.2-Fun-A14B-InP.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.2-I2V-A14B.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.2-S2V-14B.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.2-T2V-A14B.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.2-TI2V-5B.sh delete mode 100644 examples/wanvideo/model_training/full/Wan2.2-VACE-Fun-A14B.sh delete mode 100644 examples/wanvideo/model_training/full/accelerate_config_14B.yaml delete mode 100644 examples/wanvideo/model_training/full/krea-realtime-video.sh delete mode 100644 examples/wanvideo/model_training/full/run_test.py delete mode 100644 examples/wanvideo/model_training/lora/LongCat-Video.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.1-1.3b-speedcontrol-v1.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.1-FLF2V-14B-720P.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.1-Fun-1.3B-Control.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.1-Fun-1.3B-InP.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.1-Fun-14B-Control.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.1-Fun-14B-InP.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.1-Fun-V1.1-1.3B-Control-Camera.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.1-Fun-V1.1-1.3B-Control.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.1-Fun-V1.1-1.3B-InP.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.1-Fun-V1.1-14B-Control-Camera.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.1-Fun-V1.1-14B-Control.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.1-Fun-V1.1-14B-InP.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.1-I2V-14B-480P.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.1-I2V-14B-720P.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.1-T2V-1.3B.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.1-T2V-14B.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.1-VACE-1.3B-Preview.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.1-VACE-1.3B.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.1-VACE-14B.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.2-Animate-14B.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.2-Fun-A14B-Control-Camera.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.2-Fun-A14B-Control.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.2-Fun-A14B-InP.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.2-I2V-A14B.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.2-S2V-14B.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.2-T2V-A14B.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.2-TI2V-5B.sh delete mode 100644 examples/wanvideo/model_training/lora/Wan2.2-VACE-Fun-A14B.sh delete mode 100644 examples/wanvideo/model_training/lora/krea-realtime-video.sh delete mode 100644 examples/wanvideo/model_training/lora/run_test.py delete mode 100644 examples/wanvideo/model_training/train.py delete mode 100644 examples/wanvideo/model_training/validate_full/LongCat-Video.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.1-1.3b-speedcontrol-v1.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.1-FLF2V-14B-720P.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.1-Fun-1.3B-Control.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.1-Fun-1.3B-InP.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.1-Fun-14B-Control.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.1-Fun-14B-InP.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.1-Fun-V1.1-1.3B-Control-Camera.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.1-Fun-V1.1-1.3B-Control.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.1-Fun-V1.1-1.3B-InP.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.1-Fun-V1.1-14B-Control-Camera.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.1-Fun-V1.1-14B-Control.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.1-Fun-V1.1-14B-InP.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.1-I2V-14B-480P.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.1-I2V-14B-720P.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.1-T2V-1.3B.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.1-T2V-14B.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.1-VACE-1.3B-Preview.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.1-VACE-1.3B.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.1-VACE-14B.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.2-Animate-14B.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.2-Fun-A14B-Control-Camera.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.2-Fun-A14B-Control.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.2-Fun-A14B-InP.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.2-I2V-A14B.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.2-S2V-14B.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.2-T2V-A14B.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.2-TI2V-5B.py delete mode 100644 examples/wanvideo/model_training/validate_full/Wan2.2-VACE-Fun-A14B.py delete mode 100644 examples/wanvideo/model_training/validate_full/krea-realtime-video.py delete mode 100644 examples/wanvideo/model_training/validate_full/run_test.py delete mode 100644 examples/wanvideo/model_training/validate_lora/LongCat-Video.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.1-1.3b-speedcontrol-v1.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.1-FLF2V-14B-720P.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-1.3B-Control.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-1.3B-InP.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-14B-Control.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-14B-InP.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-V1.1-1.3B-Control-Camera.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-V1.1-1.3B-Control.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-V1.1-1.3B-InP.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-V1.1-14B-Control-Camera.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-V1.1-14B-Control.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-V1.1-14B-InP.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.1-I2V-14B-480P.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.1-I2V-14B-720P.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.1-T2V-1.3B.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.1-T2V-14B.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.1-VACE-1.3B-Preview.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.1-VACE-1.3B.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.1-VACE-14B.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.2-Animate-14B.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.2-Fun-A14B-Control-Camera.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.2-Fun-A14B-Control.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.2-Fun-A14B-InP.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.2-I2V-A14B.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.2-S2V-14B.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.2-T2V-A14B.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.2-TI2V-5B.py delete mode 100644 examples/wanvideo/model_training/validate_lora/Wan2.2-VACE-Fun-A14B.py delete mode 100644 examples/wanvideo/model_training/validate_lora/krea-realtime-video.py delete mode 100644 examples/wanvideo/model_training/validate_lora/run_test.py delete mode 100644 models/AnimateDiff/Put AnimateDiff ckpt files here.txt delete mode 100644 models/Annotators/Put ControlNet annotators here.txt delete mode 100644 models/BeautifulPrompt/Put BeautifulPrompt models here.txt delete mode 100644 models/ControlNet/Put ControlNet pth files here.txt delete mode 100644 models/FLUX/Put Stable Diffusion checkpoints here.txt delete mode 100644 models/HunyuanDiT/Put Hunyuan DiT checkpoints here.txt delete mode 100644 models/IpAdapter/Put IP-Adapter checkpoints here.txt delete mode 100644 models/RIFE/Put RIFE models here.txt delete mode 100644 models/kolors/Put Kolors checkpoints here.txt delete mode 100644 models/lora/Put lora files here.txt delete mode 100644 models/stable_diffusion/Put Stable Diffusion checkpoints here.txt delete mode 100644 models/stable_diffusion_3/Put Stable Diffusion 3 checkpoints here.txt delete mode 100644 models/stable_diffusion_xl/Put Stable Diffusion XL checkpoints here.txt delete mode 100644 models/stable_diffusion_xl_turbo/Put Stable Diffusion XL Turbo checkpoints here.txt delete mode 100644 models/stable_video_diffusion/Put Stable Video Diffusion checkpoints here.txt delete mode 100644 models/textual_inversion/Put Textual Inversion files here.txt delete mode 100644 models/translator/Put translator models here.txt create mode 100644 pyproject.toml delete mode 100644 requirements.txt delete mode 100644 setup.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..830557b --- /dev/null +++ b/.gitignore @@ -0,0 +1,167 @@ +*.pkl +*.safetensors +*.pth +*.ckpt +*.pt +*.bin + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ \ No newline at end of file diff --git a/README.md b/README.md index 9d85ead..30404ce 100644 --- a/README.md +++ b/README.md @@ -1,519 +1 @@ -# DiffSynth-Studio - - modelscope%2FDiffSynth-Studio | Trendshift

- -[![PyPI](https://img.shields.io/pypi/v/DiffSynth)](https://pypi.org/project/DiffSynth/) -[![license](https://img.shields.io/github/license/modelscope/DiffSynth-Studio.svg)](https://github.com/modelscope/DiffSynth-Studio/blob/master/LICENSE) -[![open issues](https://isitmaintained.com/badge/open/modelscope/DiffSynth-Studio.svg)](https://github.com/modelscope/DiffSynth-Studio/issues) -[![GitHub pull-requests](https://img.shields.io/github/issues-pr/modelscope/DiffSynth-Studio.svg)](https://GitHub.com/modelscope/DiffSynth-Studio/pull/) -[![GitHub latest commit](https://badgen.net/github/last-commit/modelscope/DiffSynth-Studio)](https://GitHub.com/modelscope/DiffSynth-Studio/commit/) - -[切换到中文](./README_zh.md) - -## Introduction - -Welcome to the magic world of Diffusion models! DiffSynth-Studio is an open-source Diffusion model engine developed and maintained by [ModelScope](https://www.modelscope.cn/) team. We aim to foster technical innovation through framework development, bring together the power of the open-source community, and explore the limits of generative models! - -DiffSynth currently includes two open-source projects: -* [DiffSynth-Studio](https://github.com/modelscope/DiffSynth-Studio): Focused on aggressive technical exploration, for academia, providing support for more cutting-edge model capabilities. -* [DiffSynth-Engine](https://github.com/modelscope/DiffSynth-Engine): Focused on stable model deployment, for industry, offering higher computing performance and more stable features. - -[DiffSynth-Studio](https://github.com/modelscope/DiffSynth-Studio) and [DiffSynth-Engine](https://github.com/modelscope/DiffSynth-Engine) are the core projects behind ModelScope [AIGC zone](https://modelscope.cn/aigc/home), offering powerful AI content generation abilities. Come and try our carefully designed features and start your AI creation journey! - -## Installation - -Install from source (recommended): - -``` -git clone https://github.com/modelscope/DiffSynth-Studio.git -cd DiffSynth-Studio -pip install -e . -``` - -
-Other installation methods - -Install from PyPI (version updates may be delayed; for latest features, install from source) - -``` -pip install diffsynth -``` - -If you meet problems during installation, they might be caused by upstream dependencies. Please check the docs of these packages: - -* [torch](https://pytorch.org/get-started/locally/) -* [sentencepiece](https://github.com/google/sentencepiece) -* [cmake](https://cmake.org) -* [cupy](https://docs.cupy.dev/en/stable/install.html) - -
- -## Basic Framework - -DiffSynth-Studio redesigns the inference and training pipelines for mainstream Diffusion models (including FLUX, Wan, etc.), enabling efficient memory management and flexible model training. - -### Qwen-Image Series (🔥New Model) - -Details: [./examples/qwen_image/](./examples/qwen_image/) - -![Image](https://github.com/user-attachments/assets/738078d8-8749-4a53-a046-571861541924) - -
- -Quick Start - -```python -from diffsynth.pipelines.qwen_image import QwenImagePipeline, ModelConfig -from PIL import Image -import torch - -pipe = QwenImagePipeline.from_pretrained( - torch_dtype=torch.bfloat16, - device="cuda", - model_configs=[ - ModelConfig(model_id="Qwen/Qwen-Image", origin_file_pattern="transformer/diffusion_pytorch_model*.safetensors"), - ModelConfig(model_id="Qwen/Qwen-Image", origin_file_pattern="text_encoder/model*.safetensors"), - ModelConfig(model_id="Qwen/Qwen-Image", origin_file_pattern="vae/diffusion_pytorch_model.safetensors"), - ], - tokenizer_config=ModelConfig(model_id="Qwen/Qwen-Image", origin_file_pattern="tokenizer/"), -) -prompt = "A detailed portrait of a girl underwater, wearing a blue flowing dress, hair gently floating, clear light and shadow, surrounded by bubbles, calm expression, fine details, dreamy and beautiful." -image = pipe( - prompt, seed=0, num_inference_steps=40, - # edit_image=Image.open("xxx.jpg").resize((1328, 1328)) # For Qwen-Image-Edit -) -image.save("image.jpg") -``` - -
- -
- -Model Overview - -|Model ID|Inference|Low VRAM Inference|Full Training|Validation after Full Training|LoRA Training|Validation after LoRA Training| -|-|-|-|-|-|-|-| -|[Qwen/Qwen-Image](https://www.modelscope.cn/models/Qwen/Qwen-Image)|[code](./examples/qwen_image/model_inference/Qwen-Image.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image.py)|[code](./examples/qwen_image/model_training/full/Qwen-Image.sh)|[code](./examples/qwen_image/model_training/validate_full/Qwen-Image.py)|[code](./examples/qwen_image/model_training/lora/Qwen-Image.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image.py)| -|[Qwen/Qwen-Image-Edit](https://www.modelscope.cn/models/Qwen/Qwen-Image-Edit)|[code](./examples/qwen_image/model_inference/Qwen-Image-Edit.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-Edit.py)|[code](./examples/qwen_image/model_training/full/Qwen-Image-Edit.sh)|[code](./examples/qwen_image/model_training/validate_full/Qwen-Image-Edit.py)|[code](./examples/qwen_image/model_training/lora/Qwen-Image-Edit.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-Edit.py)| -|[Qwen/Qwen-Image-Edit-2509](https://www.modelscope.cn/models/Qwen/Qwen-Image-Edit-2509)|[code](./examples/qwen_image/model_inference/Qwen-Image-Edit-2509.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-Edit-2509.py)|[code](./examples/qwen_image/model_training/full/Qwen-Image-Edit-2509.sh)|[code](./examples/qwen_image/model_training/validate_full/Qwen-Image-Edit-2509.py)|[code](./examples/qwen_image/model_training/lora/Qwen-Image-Edit-2509.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-Edit-2509.py)| -|[DiffSynth-Studio/Qwen-Image-EliGen-V2](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-EliGen-V2)|[code](./examples/qwen_image/model_inference/Qwen-Image-EliGen-V2.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-EliGen-V2.py)|-|-|[code](./examples/qwen_image/model_training/lora/Qwen-Image-EliGen.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-EliGen.py)| -|[DiffSynth-Studio/Qwen-Image-EliGen-Poster](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-EliGen-Poster)|[code](./examples/qwen_image/model_inference/Qwen-Image-EliGen-Poster.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-EliGen-Poster.py)|-|-|[code](./examples/qwen_image/model_training/lora/Qwen-Image-EliGen-Poster.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-EliGen-Poster.py)| -|[DiffSynth-Studio/Qwen-Image-Distill-Full](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Distill-Full)|[code](./examples/qwen_image/model_inference/Qwen-Image-Distill-Full.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-Distill-Full.py)|[code](./examples/qwen_image/model_training/full/Qwen-Image-Distill-Full.sh)|[code](./examples/qwen_image/model_training/validate_full/Qwen-Image-Distill-Full.py)|[code](./examples/qwen_image/model_training/lora/Qwen-Image-Distill-Full.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-Distill-Full.py)| -|[DiffSynth-Studio/Qwen-Image-Distill-LoRA](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Distill-LoRA)|[code](./examples/qwen_image/model_inference/Qwen-Image-Distill-LoRA.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-Distill-LoRA.py)|-|-|[code](./examples/qwen_image/model_training/lora/Qwen-Image-Distill-LoRA.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-Distill-LoRA.py)| -|[DiffSynth-Studio/Qwen-Image-EliGen](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-EliGen)|[code](./examples/qwen_image/model_inference/Qwen-Image-EliGen.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-EliGen.py)|-|-|[code](./examples/qwen_image/model_training/lora/Qwen-Image-EliGen.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-EliGen.py)| -|[DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Canny](https://modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Canny)|[code](./examples/qwen_image/model_inference/Qwen-Image-Blockwise-ControlNet-Canny.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-Blockwise-ControlNet-Canny.py)|[code](./examples/qwen_image/model_training/full/Qwen-Image-Blockwise-ControlNet-Canny.sh)|[code](./examples/qwen_image/model_training/validate_full/Qwen-Image-Blockwise-ControlNet-Canny.py)|[code](./examples/qwen_image/model_training/lora/Qwen-Image-Blockwise-ControlNet-Canny.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-Blockwise-ControlNet-Canny.py)| -|[DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Depth](https://modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Depth)|[code](./examples/qwen_image/model_inference/Qwen-Image-Blockwise-ControlNet-Depth.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-Blockwise-ControlNet-Depth.py)|[code](./examples/qwen_image/model_training/full/Qwen-Image-Blockwise-ControlNet-Depth.sh)|[code](./examples/qwen_image/model_training/validate_full/Qwen-Image-Blockwise-ControlNet-Depth.py)|[code](./examples/qwen_image/model_training/lora/Qwen-Image-Blockwise-ControlNet-Depth.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-Blockwise-ControlNet-Depth.py)| -|[DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Inpaint](https://modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Inpaint)|[code](./examples/qwen_image/model_inference/Qwen-Image-Blockwise-ControlNet-Inpaint.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-Blockwise-ControlNet-Inpaint.py)|[code](./examples/qwen_image/model_training/full/Qwen-Image-Blockwise-ControlNet-Inpaint.sh)|[code](./examples/qwen_image/model_training/validate_full/Qwen-Image-Blockwise-ControlNet-Inpaint.py)|[code](./examples/qwen_image/model_training/lora/Qwen-Image-Blockwise-ControlNet-Inpaint.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-Blockwise-ControlNet-Inpaint.py)| -|[DiffSynth-Studio/Qwen-Image-In-Context-Control-Union](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-In-Context-Control-Union)|[code](./examples/qwen_image/model_inference/Qwen-Image-In-Context-Control-Union.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-In-Context-Control-Union.py)|-|-|[code](./examples/qwen_image/model_training/lora/Qwen-Image-In-Context-Control-Union.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-In-Context-Control-Union.py)| -|[DiffSynth-Studio/Qwen-Image-Edit-Lowres-Fix](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Edit-Lowres-Fix)|[code](./examples/qwen_image/model_inference/Qwen-Image-Edit-Lowres-Fix.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-Edit-Lowres-Fix.py)|-|-|-|-| - -
- -### FLUX Series - -Detail page: [./examples/flux/](./examples/flux/) - -![Image](https://github.com/user-attachments/assets/c01258e2-f251-441a-aa1e-ebb22f02594d) - -
- -Quick Start - -```python -import torch -from diffsynth.pipelines.flux_image_new import FluxImagePipeline, ModelConfig - -pipe = FluxImagePipeline.from_pretrained( - torch_dtype=torch.bfloat16, - device="cuda", - model_configs=[ - ModelConfig(model_id="black-forest-labs/FLUX.1-dev", origin_file_pattern="flux1-dev.safetensors"), - ModelConfig(model_id="black-forest-labs/FLUX.1-dev", origin_file_pattern="text_encoder/model.safetensors"), - ModelConfig(model_id="black-forest-labs/FLUX.1-dev", origin_file_pattern="text_encoder_2/"), - ModelConfig(model_id="black-forest-labs/FLUX.1-dev", origin_file_pattern="ae.safetensors"), - ], -) - -image = pipe(prompt="a cat", seed=0) -image.save("image.jpg") -``` - -
- -
- -Model Overview - -| Model ID | Extra Parameters | Inference | Low VRAM Inference | Full Training | Validate After Full Training | LoRA Training | Validate After LoRA Training | -|-|-|-|-|-|-|-|-| -|[FLUX.1-dev](https://www.modelscope.cn/models/black-forest-labs/FLUX.1-dev)||[code](./examples/flux/model_inference/FLUX.1-dev.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-dev.py)|[code](./examples/flux/model_training/full/FLUX.1-dev.sh)|[code](./examples/flux/model_training/validate_full/FLUX.1-dev.py)|[code](./examples/flux/model_training/lora/FLUX.1-dev.sh)|[code](./examples/flux/model_training/validate_lora/FLUX.1-dev.py)| -|[FLUX.1-Krea-dev](https://www.modelscope.cn/models/black-forest-labs/FLUX.1-Krea-dev)||[code](./examples/flux/model_inference/FLUX.1-Krea-dev.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-Krea-dev.py)|[code](./examples/flux/model_training/full/FLUX.1-Krea-dev.sh)|[code](./examples/flux/model_training/validate_full/FLUX.1-Krea-dev.py)|[code](./examples/flux/model_training/lora/FLUX.1-Krea-dev.sh)|[code](./examples/flux/model_training/validate_lora/FLUX.1-Krea-dev.py)| -|[FLUX.1-Kontext-dev](https://www.modelscope.cn/models/black-forest-labs/FLUX.1-Kontext-dev)|`kontext_images`|[code](./examples/flux/model_inference/FLUX.1-Kontext-dev.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-Kontext-dev.py)|[code](./examples/flux/model_training/full/FLUX.1-Kontext-dev.sh)|[code](./examples/flux/model_training/validate_full/FLUX.1-Kontext-dev.py)|[code](./examples/flux/model_training/lora/FLUX.1-Kontext-dev.sh)|[code](./examples/flux/model_training/validate_lora/FLUX.1-Kontext-dev.py)| -|[FLUX.1-dev-Controlnet-Inpainting-Beta](https://www.modelscope.cn/models/alimama-creative/FLUX.1-dev-Controlnet-Inpainting-Beta)|`controlnet_inputs`|[code](./examples/flux/model_inference/FLUX.1-dev-Controlnet-Inpainting-Beta.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-dev-Controlnet-Inpainting-Beta.py)|[code](./examples/flux/model_training/full/FLUX.1-dev-Controlnet-Inpainting-Beta.sh)|[code](./examples/flux/model_training/validate_full/FLUX.1-dev-Controlnet-Inpainting-Beta.py)|[code](./examples/flux/model_training/lora/FLUX.1-dev-Controlnet-Inpainting-Beta.sh)|[code](./examples/flux/model_training/validate_lora/FLUX.1-dev-Controlnet-Inpainting-Beta.py)| -|[FLUX.1-dev-Controlnet-Union-alpha](https://www.modelscope.cn/models/InstantX/FLUX.1-dev-Controlnet-Union-alpha)|`controlnet_inputs`|[code](./examples/flux/model_inference/FLUX.1-dev-Controlnet-Union-alpha.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-dev-Controlnet-Union-alpha.py)|[code](./examples/flux/model_training/full/FLUX.1-dev-Controlnet-Union-alpha.sh)|[code](./examples/flux/model_training/validate_full/FLUX.1-dev-Controlnet-Union-alpha.py)|[code](./examples/flux/model_training/lora/FLUX.1-dev-Controlnet-Union-alpha.sh)|[code](./examples/flux/model_training/validate_lora/FLUX.1-dev-Controlnet-Union-alpha.py)| -|[FLUX.1-dev-Controlnet-Upscaler](https://www.modelscope.cn/models/jasperai/Flux.1-dev-Controlnet-Upscaler)|`controlnet_inputs`|[code](./examples/flux/model_inference/FLUX.1-dev-Controlnet-Upscaler.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-dev-Controlnet-Upscaler.py)|[code](./examples/flux/model_training/full/FLUX.1-dev-Controlnet-Upscaler.sh)|[code](./examples/flux/model_training/validate_full/FLUX.1-dev-Controlnet-Upscaler.py)|[code](./examples/flux/model_training/lora/FLUX.1-dev-Controlnet-Upscaler.sh)|[code](./examples/flux/model_training/validate_lora/FLUX.1-dev-Controlnet-Upscaler.py)| -|[FLUX.1-dev-IP-Adapter](https://www.modelscope.cn/models/InstantX/FLUX.1-dev-IP-Adapter)|`ipadapter_images`, `ipadapter_scale`|[code](./examples/flux/model_inference/FLUX.1-dev-IP-Adapter.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-dev-IP-Adapter.py)|[code](./examples/flux/model_training/full/FLUX.1-dev-IP-Adapter.sh)|[code](./examples/flux/model_training/validate_full/FLUX.1-dev-IP-Adapter.py)|[code](./examples/flux/model_training/lora/FLUX.1-dev-IP-Adapter.sh)|[code](./examples/flux/model_training/validate_lora/FLUX.1-dev-IP-Adapter.py)| -|[FLUX.1-dev-InfiniteYou](https://www.modelscope.cn/models/ByteDance/InfiniteYou)|`infinityou_id_image`, `infinityou_guidance`, `controlnet_inputs`|[code](./examples/flux/model_inference/FLUX.1-dev-InfiniteYou.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-dev-InfiniteYou.py)|[code](./examples/flux/model_training/full/FLUX.1-dev-InfiniteYou.sh)|[code](./examples/flux/model_training/validate_full/FLUX.1-dev-InfiniteYou.py)|[code](./examples/flux/model_training/lora/FLUX.1-dev-InfiniteYou.sh)|[code](./examples/flux/model_training/validate_lora/FLUX.1-dev-InfiniteYou.py)| -|[FLUX.1-dev-EliGen](https://www.modelscope.cn/models/DiffSynth-Studio/Eligen)|`eligen_entity_prompts`, `eligen_entity_masks`, `eligen_enable_on_negative`, `eligen_enable_inpaint`|[code](./examples/flux/model_inference/FLUX.1-dev-EliGen.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-dev-EliGen.py)|-|-|[code](./examples/flux/model_training/lora/FLUX.1-dev-EliGen.sh)|[code](./examples/flux/model_training/validate_lora/FLUX.1-dev-EliGen.py)| -|[FLUX.1-dev-LoRA-Encoder](https://www.modelscope.cn/models/DiffSynth-Studio/LoRA-Encoder-FLUX.1-Dev)|`lora_encoder_inputs`, `lora_encoder_scale`|[code](./examples/flux/model_inference/FLUX.1-dev-LoRA-Encoder.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-dev-LoRA-Encoder.py)|[code](./examples/flux/model_training/full/FLUX.1-dev-LoRA-Encoder.sh)|[code](./examples/flux/model_training/validate_full/FLUX.1-dev-LoRA-Encoder.py)|-|-| -|[FLUX.1-dev-LoRA-Fusion-Preview](https://modelscope.cn/models/DiffSynth-Studio/LoRAFusion-preview-FLUX.1-dev)||[code](./examples/flux/model_inference/FLUX.1-dev-LoRA-Fusion.py)|-|-|-|-|-| -|[Step1X-Edit](https://www.modelscope.cn/models/stepfun-ai/Step1X-Edit)|`step1x_reference_image`|[code](./examples/flux/model_inference/Step1X-Edit.py)|[code](./examples/flux/model_inference_low_vram/Step1X-Edit.py)|[code](./examples/flux/model_training/full/Step1X-Edit.sh)|[code](./examples/flux/model_training/validate_full/Step1X-Edit.py)|[code](./examples/flux/model_training/lora/Step1X-Edit.sh)|[code](./examples/flux/model_training/validate_lora/Step1X-Edit.py)| -|[FLEX.2-preview](https://www.modelscope.cn/models/ostris/Flex.2-preview)|`flex_inpaint_image`, `flex_inpaint_mask`, `flex_control_image`, `flex_control_strength`, `flex_control_stop`|[code](./examples/flux/model_inference/FLEX.2-preview.py)|[code](./examples/flux/model_inference_low_vram/FLEX.2-preview.py)|[code](./examples/flux/model_training/full/FLEX.2-preview.sh)|[code](./examples/flux/model_training/validate_full/FLEX.2-preview.py)|[code](./examples/flux/model_training/lora/FLEX.2-preview.sh)|[code](./examples/flux/model_training/validate_lora/FLEX.2-preview.py)| -|[Nexus-Gen](https://www.modelscope.cn/models/DiffSynth-Studio/Nexus-GenV2)|`nexus_gen_reference_image`|[code](./examples/flux/model_inference/Nexus-Gen-Editing.py)|[code](./examples/flux/model_inference_low_vram/Nexus-Gen-Editing.py)|[code](./examples/flux/model_training/full/Nexus-Gen.sh)|[code](./examples/flux/model_training/validate_full/Nexus-Gen.py)|[code](./examples/flux/model_training/lora/Nexus-Gen.sh)|[code](./examples/flux/model_training/validate_lora/Nexus-Gen.py)| - -
- - - -### Wan Series - -Detail page: [./examples/wanvideo/](./examples/wanvideo/) - -https://github.com/user-attachments/assets/1d66ae74-3b02-40a9-acc3-ea95fc039314 - -
- -Quick Start - -```python -import torch -from diffsynth import save_video -from diffsynth.pipelines.wan_video_new import WanVideoPipeline, ModelConfig - -pipe = WanVideoPipeline.from_pretrained( - torch_dtype=torch.bfloat16, - device="cuda", - model_configs=[ - ModelConfig(model_id="Wan-AI/Wan2.1-T2V-1.3B", origin_file_pattern="diffusion_pytorch_model*.safetensors", offload_device="cpu"), - ModelConfig(model_id="Wan-AI/Wan2.1-T2V-1.3B", origin_file_pattern="models_t5_umt5-xxl-enc-bf16.pth", offload_device="cpu"), - ModelConfig(model_id="Wan-AI/Wan2.1-T2V-1.3B", origin_file_pattern="Wan2.1_VAE.pth", offload_device="cpu"), - ], -) -pipe.enable_vram_management() - -video = pipe( - prompt="A documentary photography style scene: a lively puppy rapidly running on green grass. The puppy has brown-yellow fur, upright ears, and looks focused and joyful. Sunlight shines on its body, making the fur appear soft and shiny. The background is an open field with occasional wildflowers, and faint blue sky and clouds in the distance. Strong sense of perspective captures the motion of the puppy and the vitality of the surrounding grass. Mid-shot side-moving view.", - negative_prompt="Bright colors, overexposed, static, blurry details, subtitles, style, artwork, image, still, overall gray, worst quality, low quality, JPEG compression artifacts, ugly, deformed, extra fingers, poorly drawn hands, poorly drawn face, malformed limbs, fused fingers, still frame, messy background, three legs, crowded background people, walking backwards", - seed=0, tiled=True, -) -save_video(video, "video1.mp4", fps=15, quality=5) -``` - -
- -
- -Model Overview - -| Model ID | Extra Parameters | Inference | Full Training | Validate After Full Training | LoRA Training | Validate After LoRA Training | -|-|-|-|-|-|-|-| -|[Wan-AI/Wan2.2-Animate-14B](https://www.modelscope.cn/models/Wan-AI/Wan2.2-Animate-14B)|`input_image`, `animate_pose_video`, `animate_face_video`, `animate_inpaint_video`, `animate_mask_video`|[code](./examples/wanvideo/model_inference/Wan2.2-Animate-14B.py)|[code](./examples/wanvideo/model_training/full/Wan2.2-Animate-14B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.2-Animate-14B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.2-Animate-14B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.2-Animate-14B.py)| -|[Wan-AI/Wan2.2-S2V-14B](https://www.modelscope.cn/models/Wan-AI/Wan2.2-S2V-14B)|`input_image`, `input_audio`, `audio_sample_rate`, `s2v_pose_video`|[code](./examples/wanvideo/model_inference/Wan2.2-S2V-14B_multi_clips.py)|[code](./examples/wanvideo/model_training/full/Wan2.2-S2V-14B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.2-S2V-14B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.2-S2V-14B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.2-S2V-14B.py)| -|[Wan-AI/Wan2.2-I2V-A14B](https://modelscope.cn/models/Wan-AI/Wan2.2-I2V-A14B)|`input_image`|[code](./examples/wanvideo/model_inference/Wan2.2-I2V-A14B.py)|[code](./examples/wanvideo/model_training/full/Wan2.2-I2V-A14B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.2-I2V-A14B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.2-I2V-A14B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.2-I2V-A14B.py)| -|[Wan-AI/Wan2.2-T2V-A14B](https://modelscope.cn/models/Wan-AI/Wan2.2-T2V-A14B)||[code](./examples/wanvideo/model_inference/Wan2.2-T2V-A14B.py)|[code](./examples/wanvideo/model_training/full/Wan2.2-T2V-A14B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.2-T2V-A14B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.2-T2V-A14B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.2-T2V-A14B.py)| -|[Wan-AI/Wan2.2-TI2V-5B](https://modelscope.cn/models/Wan-AI/Wan2.2-TI2V-5B)|`input_image`|[code](./examples/wanvideo/model_inference/Wan2.2-TI2V-5B.py)|[code](./examples/wanvideo/model_training/full/Wan2.2-TI2V-5B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.2-TI2V-5B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.2-TI2V-5B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.2-TI2V-5B.py)| -|[Wan-AI/Wan2.2-VACE-Fun-A14B](https://www.modelscope.cn/models/PAI/Wan2.2-VACE-Fun-A14B)|`vace_control_video`, `vace_reference_image`|[code](./examples/wanvideo/model_inference/Wan2.2-VACE-Fun-A14B.py)|[code](./examples/wanvideo/model_training/full/Wan2.2-VACE-Fun-A14B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.2-VACE-Fun-A14B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.2-VACE-Fun-A14B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.2-VACE-Fun-A14B.py)| -|[PAI/Wan2.2-Fun-A14B-InP](https://modelscope.cn/models/PAI/Wan2.2-Fun-A14B-InP)|`input_image`, `end_image`|[code](./examples/wanvideo/model_inference/Wan2.2-Fun-A14B-InP.py)|[code](./examples/wanvideo/model_training/full/Wan2.2-Fun-A14B-InP.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.2-Fun-A14B-InP.py)|[code](./examples/wanvideo/model_training/lora/Wan2.2-Fun-A14B-InP.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.2-Fun-A14B-InP.py)| -|[PAI/Wan2.2-Fun-A14B-Control](https://modelscope.cn/models/PAI/Wan2.2-Fun-A14B-Control)|`control_video`, `reference_image`|[code](./examples/wanvideo/model_inference/Wan2.2-Fun-A14B-Control.py)|[code](./examples/wanvideo/model_training/full/Wan2.2-Fun-A14B-Control.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.2-Fun-A14B-Control.py)|[code](./examples/wanvideo/model_training/lora/Wan2.2-Fun-A14B-Control.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.2-Fun-A14B-Control.py)| -|[PAI/Wan2.2-Fun-A14B-Control-Camera](https://modelscope.cn/models/PAI/Wan2.2-Fun-A14B-Control-Camera)|`control_camera_video`, `input_image`|[code](./examples/wanvideo/model_inference/Wan2.2-Fun-A14B-Control-Camera.py)|[code](./examples/wanvideo/model_training/full/Wan2.2-Fun-A14B-Control-Camera.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.2-Fun-A14B-Control-Camera.py)|[code](./examples/wanvideo/model_training/lora/Wan2.2-Fun-A14B-Control-Camera.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.2-Fun-A14B-Control-Camera.py)| -|[Wan-AI/Wan2.1-T2V-1.3B](https://modelscope.cn/models/Wan-AI/Wan2.1-T2V-1.3B)||[code](./examples/wanvideo/model_inference/Wan2.1-T2V-1.3B.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-T2V-1.3B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-T2V-1.3B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-T2V-1.3B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-T2V-1.3B.py)| -|[Wan-AI/Wan2.1-T2V-14B](https://modelscope.cn/models/Wan-AI/Wan2.1-T2V-14B)||[code](./examples/wanvideo/model_inference/Wan2.1-T2V-14B.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-T2V-14B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-T2V-14B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-T2V-14B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-T2V-14B.py)| -|[Wan-AI/Wan2.1-I2V-14B-480P](https://modelscope.cn/models/Wan-AI/Wan2.1-I2V-14B-480P)|`input_image`|[code](./examples/wanvideo/model_inference/Wan2.1-I2V-14B-480P.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-I2V-14B-480P.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-I2V-14B-480P.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-I2V-14B-480P.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-I2V-14B-480P.py)| -|[Wan-AI/Wan2.1-I2V-14B-720P](https://modelscope.cn/models/Wan-AI/Wan2.1-I2V-14B-720P)|`input_image`|[code](./examples/wanvideo/model_inference/Wan2.1-I2V-14B-720P.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-I2V-14B-720P.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-I2V-14B-720P.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-I2V-14B-720P.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-I2V-14B-720P.py)| -|[Wan-AI/Wan2.1-FLF2V-14B-720P](https://modelscope.cn/models/Wan-AI/Wan2.1-FLF2V-14B-720P)|`input_image`, `end_image`|[code](./examples/wanvideo/model_inference/Wan2.1-FLF2V-14B-720P.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-FLF2V-14B-720P.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-FLF2V-14B-720P.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-FLF2V-14B-720P.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-FLF2V-14B-720P.py)| -|[PAI/Wan2.1-Fun-1.3B-InP](https://modelscope.cn/models/PAI/Wan2.1-Fun-1.3B-InP)|`input_image`, `end_image`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-1.3B-InP.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-1.3B-InP.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-1.3B-InP.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-1.3B-InP.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-1.3B-InP.py)| -|[PAI/Wan2.1-Fun-1.3B-Control](https://modelscope.cn/models/PAI/Wan2.1-Fun-1.3B-Control)|`control_video`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-1.3B-Control.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-1.3B-Control.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-1.3B-Control.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-1.3B-Control.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-1.3B-Control.py)| -|[PAI/Wan2.1-Fun-14B-InP](https://modelscope.cn/models/PAI/Wan2.1-Fun-14B-InP)|`input_image`, `end_image`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-14B-InP.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-14B-InP.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-14B-InP.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-14B-InP.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-14B-InP.py)| -|[PAI/Wan2.1-Fun-14B-Control](https://modelscope.cn/models/PAI/Wan2.1-Fun-14B-Control)|`control_video`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-14B-Control.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-14B-Control.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-14B-Control.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-14B-Control.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-14B-Control.py)| -|[PAI/Wan2.1-Fun-V1.1-1.3B-Control](https://modelscope.cn/models/PAI/Wan2.1-Fun-V1.1-1.3B-Control)|`control_video`, `reference_image`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-V1.1-1.3B-Control.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-V1.1-1.3B-Control.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-V1.1-1.3B-Control.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-V1.1-1.3B-Control.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-V1.1-1.3B-Control.py)| -|[PAI/Wan2.1-Fun-V1.1-14B-Control](https://modelscope.cn/models/PAI/Wan2.1-Fun-V1.1-14B-Control)|`control_video`, `reference_image`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-V1.1-14B-Control.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-V1.1-14B-Control.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-V1.1-14B-Control.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-V1.1-14B-Control.sh)|[code](./examples/wanvideo/examples/wanmodel_training/validate_lora/Wan2.1-Fun-V1.1-14B-Control.py)| -|[PAI/Wan2.1-Fun-V1.1-1.3B-InP](https://modelscope.cn/models/PAI/Wan2.1-Fun-V1.1-1.3B-InP)|`input_image`, `end_image`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-V1.1-1.3B-InP.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-V1.1-1.3B-InP.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-V1.1-1.3B-InP.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-V1.1-1.3B-InP.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-V1.1-1.3B-InP.py)| -|[PAI/Wan2.1-Fun-V1.1-14B-InP](https://modelscope.cn/models/PAI/Wan2.1-Fun-V1.1-14B-InP)|`input_image`, `end_image`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-V1.1-14B-InP.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-V1.1-14B-InP.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-V1.1-14B-InP.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-V1.1-14B-InP.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-V1.1-14B-InP.py)| -|[PAI/Wan2.1-Fun-V1.1-1.3B-Control-Camera](https://modelscope.cn/models/PAI/Wan2.1-Fun-V1.1-1.3B-Control-Camera)|`control_camera_video`, `input_image`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-V1.1-1.3B-Control-Camera.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-V1.1-1.3B-Control-Camera.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-V1.1-1.3B-Control-Camera.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-V1.1-1.3B-Control-Camera.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-V1.1-1.3B-Control-Camera.py)| -|[PAI/Wan2.1-Fun-V1.1-14B-Control-Camera](https://modelscope.cn/models/PAI/Wan2.1-Fun-V1.1-14B-Control-Camera)|`control_camera_video`, `input_image`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-V1.1-14B-Control-Camera.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-V1.1-14B-Control-Camera.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-V1.1-14B-Control-Camera.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-V1.1-14B-Control-Camera.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-V1.1-14B-Control-Camera.py)| -|[iic/VACE-Wan2.1-1.3B-Preview](https://modelscope.cn/models/iic/VACE-Wan2.1-1.3B-Preview)|`vace_control_video`, `vace_reference_image`|[code](./examples/wanvideo/model_inference/Wan2.1-VACE-1.3B-Preview.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-VACE-1.3B-Preview.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-VACE-1.3B-Preview.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-VACE-1.3B-Preview.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-VACE-1.3B-Preview.py)| -|[Wan-AI/Wan2.1-VACE-1.3B](https://modelscope.cn/models/Wan-AI/Wan2.1-VACE-1.3B)|`vace_control_video`, `vace_reference_image`|[code](./examples/wanvideo/model_inference/Wan2.1-VACE-1.3B.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-VACE-1.3B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-VACE-1.3B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-VACE-1.3B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-VACE-1.3B.py)| -|[Wan-AI/Wan2.1-VACE-14B](https://modelscope.cn/models/Wan-AI/Wan2.1-VACE-14B)|`vace_control_video`, `vace_reference_image`|[code](./examples/wanvideo/model_inference/Wan2.1-VACE-14B.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-VACE-14B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-VACE-14B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-VACE-14B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-VACE-14B.py)| -|[DiffSynth-Studio/Wan2.1-1.3b-speedcontrol-v1](https://modelscope.cn/models/DiffSynth-Studio/Wan2.1-1.3b-speedcontrol-v1)|`motion_bucket_id`|[code](./examples/wanvideo/model_inference/Wan2.1-1.3b-speedcontrol-v1.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-1.3b-speedcontrol-v1.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-1.3b-speedcontrol-v1.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-1.3b-speedcontrol-v1.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-1.3b-speedcontrol-v1.py)| -|[krea/krea-realtime-video](https://www.modelscope.cn/models/krea/krea-realtime-video)||[code](./examples/wanvideo/model_inference/krea-realtime-video.py)|[code](./examples/wanvideo/model_training/full/krea-realtime-video.sh)|[code](./examples/wanvideo/model_training/validate_full/krea-realtime-video.py)|[code](./examples/wanvideo/model_training/lora/krea-realtime-video.sh)|[code](./examples/wanvideo/model_training/validate_lora/krea-realtime-video.py)| -|[meituan-longcat/LongCat-Video](https://www.modelscope.cn/models/meituan-longcat/LongCat-Video)|`longcat_video`|[code](./examples/wanvideo/model_inference/LongCat-Video.py)|[code](./examples/wanvideo/model_training/full/LongCat-Video.sh)|[code](./examples/wanvideo/model_training/validate_full/LongCat-Video.py)|[code](./examples/wanvideo/model_training/lora/LongCat-Video.sh)|[code](./examples/wanvideo/model_training/validate_lora/LongCat-Video.py)| - -
- -### More Models - - - -
-Image Generation Models - -Detail page: [./examples/image_synthesis/](./examples/image_synthesis/) - -|FLUX|Stable Diffusion 3| -|-|-| -|![image_1024_cfg](https://github.com/user-attachments/assets/984561e9-553d-4952-9443-79ce144f379f)|![image_1024](https://github.com/modelscope/DiffSynth-Studio/assets/35051019/4df346db-6f91-420a-b4c1-26e205376098)| - -|Kolors|Hunyuan-DiT| -|-|-| -|![image_1024](https://github.com/modelscope/DiffSynth-Studio/assets/35051019/53ef6f41-da11-4701-8665-9f64392607bf)|![image_1024](https://github.com/modelscope/DiffSynth-Studio/assets/35051019/60b022c8-df3f-4541-95ab-bf39f2fa8bb5)| - -|Stable Diffusion|Stable Diffusion XL| -|-|-| -|![1024](https://github.com/Artiprocher/DiffSynth-Studio/assets/35051019/6fc84611-8da6-4a1f-8fee-9a34eba3b4a5)|![1024](https://github.com/Artiprocher/DiffSynth-Studio/assets/35051019/67687748-e738-438c-aee5-96096f09ac90)| - -
- - - -
-Video Generation Models - -- HunyuanVideo: [./examples/HunyuanVideo/](./examples/HunyuanVideo/) - -https://github.com/user-attachments/assets/48dd24bb-0cc6-40d2-88c3-10feed3267e9 - -- StepVideo: [./examples/stepvideo/](./examples/stepvideo/) - -https://github.com/user-attachments/assets/5954fdaa-a3cf-45a3-bd35-886e3cc4581b - -- CogVideoX: [./examples/CogVideoX/](./examples/CogVideoX/) - -https://github.com/user-attachments/assets/26b044c1-4a60-44a4-842f-627ff289d006 - -
- - - -
-Image Quality Assessment Models - -We have integrated a series of image quality assessment models. These models can be used for evaluating image generation models, alignment training, and similar tasks. - -Detail page: [./examples/image_quality_metric/](./examples/image_quality_metric/) - -* [ImageReward](https://github.com/THUDM/ImageReward) -* [Aesthetic](https://github.com/christophschuhmann/improved-aesthetic-predictor) -* [PickScore](https://github.com/yuvalkirstain/pickscore) -* [CLIP](https://github.com/openai/CLIP) -* [HPSv2](https://github.com/tgxs002/HPSv2) -* [HPSv2.1](https://github.com/tgxs002/HPSv2) -* [MPS](https://github.com/Kwai-Kolors/MPS) - -
- - - -## Innovative Achievements - -DiffSynth-Studio is not just an engineering model framework, but also a platform for incubating innovative results. - -
-Nexus-Gen: Unified Architecture for Image Understanding, Generation, and Editing - -- Detail page: https://github.com/modelscope/Nexus-Gen -- Paper: [Nexus-Gen: Unified Image Understanding, Generation, and Editing via Prefilled Autoregression in Shared Embedding Space](https://arxiv.org/pdf/2504.21356) -- Model: [ModelScope](https://www.modelscope.cn/models/DiffSynth-Studio/Nexus-GenV2), [HuggingFace](https://huggingface.co/modelscope/Nexus-GenV2) -- Dataset: [ModelScope Dataset](https://www.modelscope.cn/datasets/DiffSynth-Studio/Nexus-Gen-Training-Dataset) -- Online Demo: [ModelScope Nexus-Gen Studio](https://www.modelscope.cn/studios/DiffSynth-Studio/Nexus-Gen) - -![](https://github.com/modelscope/Nexus-Gen/raw/main/assets/illustrations/gen_edit.jpg) - -
- -
-ArtAug: Aesthetic Enhancement for Image Generation Models - -- Detail page: [./examples/ArtAug/](./examples/ArtAug/) -- Paper: [ArtAug: Enhancing Text-to-Image Generation through Synthesis-Understanding Interaction](https://arxiv.org/abs/2412.12888) -- Model: [ModelScope](https://www.modelscope.cn/models/DiffSynth-Studio/ArtAug-lora-FLUX.1dev-v1), [HuggingFace](https://huggingface.co/ECNU-CILab/ArtAug-lora-FLUX.1dev-v1) -- Online Demo: [ModelScope AIGC Tab](https://www.modelscope.cn/aigc/imageGeneration?tab=advanced&versionId=7228&modelType=LoRA&sdVersion=FLUX_1&modelUrl=modelscope%3A%2F%2FDiffSynth-Studio%2FArtAug-lora-FLUX.1dev-v1%3Frevision%3Dv1.0) - -|FLUX.1-dev|FLUX.1-dev + ArtAug LoRA| -|-|-| -|![image_1_base](https://github.com/user-attachments/assets/e1d5c505-b423-45fe-be01-25c2758f5417)|![image_1_enhance](https://github.com/user-attachments/assets/335908e3-d0bd-41c2-9d99-d10528a2d719)| - -
- -
-EliGen: Precise Image Region Control - -- Detail page: [./examples/EntityControl/](./examples/EntityControl/) -- Paper: [EliGen: Entity-Level Controlled Image Generation with Regional Attention](https://arxiv.org/abs/2501.01097) -- Model: [ModelScope](https://www.modelscope.cn/models/DiffSynth-Studio/Eligen), [HuggingFace](https://huggingface.co/modelscope/EliGen) -- Online Demo: [ModelScope EliGen Studio](https://www.modelscope.cn/studios/DiffSynth-Studio/EliGen) -- Dataset: [EliGen Train Set](https://www.modelscope.cn/datasets/DiffSynth-Studio/EliGenTrainSet) - -|Entity Control Mask|Generated Image| -|-|-| -|![eligen_example_2_mask_0](https://github.com/user-attachments/assets/1c6d9445-5022-4d91-ad2e-dc05321883d1)|![eligen_example_2_0](https://github.com/user-attachments/assets/86739945-cb07-4a49-b3b3-3bb65c90d14f)| - -
- -
-ExVideo: Extended Training for Video Generation Models - -- Project Page: [Project Page](https://ecnu-cilab.github.io/ExVideoProjectPage/) -- Paper: [ExVideo: Extending Video Diffusion Models via Parameter-Efficient Post-Tuning](https://arxiv.org/abs/2406.14130) -- Code Example: [./examples/ExVideo/](./examples/ExVideo/) -- Model: [ModelScope](https://modelscope.cn/models/ECNU-CILab/ExVideo-SVD-128f-v1), [HuggingFace](https://huggingface.co/ECNU-CILab/ExVideo-SVD-128f-v1) - -https://github.com/modelscope/DiffSynth-Studio/assets/35051019/d97f6aa9-8064-4b5b-9d49-ed6001bb9acc - -
- -
-Diffutoon: High-Resolution Anime-Style Video Rendering - -- Project Page: [Project Page](https://ecnu-cilab.github.io/DiffutoonProjectPage/) -- Paper: [Diffutoon: High-Resolution Editable Toon Shading via Diffusion Models](https://arxiv.org/abs/2401.16224) -- Code Example: [./examples/Diffutoon/](./examples/Diffutoon/) - -https://github.com/Artiprocher/DiffSynth-Studio/assets/35051019/b54c05c5-d747-4709-be5e-b39af82404dd - -
- -
-DiffSynth: The Initial Version of This Project - -- Project Page: [Project Page](https://ecnu-cilab.github.io/DiffSynth.github.io/) -- Paper: [DiffSynth: Latent In-Iteration Deflickering for Realistic Video Synthesis](https://arxiv.org/abs/2308.03463) -- Code Example: [./examples/diffsynth/](./examples/diffsynth/) - -https://github.com/Artiprocher/DiffSynth-Studio/assets/35051019/59fb2f7b-8de0-4481-b79f-0c3a7361a1ea - -
- - - -## Update History - -- **October 30, 2025**: We support [meituan-longcat/LongCat-Video](https://www.modelscope.cn/models/meituan-longcat/LongCat-Video) model, which enables text-to-video, image-to-video, and video continuation capabilities. This model adopts Wan's framework for both inference and training in this project. - -- **October 27, 2025**: We support [krea/krea-realtime-video](https://www.modelscope.cn/models/krea/krea-realtime-video) model, further expanding Wan's ecosystem. - -- **September 23, 2025** [DiffSynth-Studio/Qwen-Image-EliGen-Poster](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-EliGen-Poster) is released! This model is jointly developed and open-sourced by us and the Taobao Design Team. The model is built upon Qwen-Image, specifically designed for e-commerce poster scenarios, and supports precise partition layout control. Please refer to [our example code](./examples/qwen_image/model_inference/Qwen-Image-EliGen-Poster.py). - -- **September 9, 2025**: Our training framework now supports multiple training modes and has been adapted for Qwen-Image. In addition to the standard SFT training mode, Direct Distill is now also supported; please refer to [our example code](./examples/qwen_image/model_training/lora/Qwen-Image-Distill-LoRA.sh). This feature is experimental, and we will continue to improve it to support comprehensive model training capabilities. - -- **August 28, 2025** We support Wan2.2-S2V, an audio-driven cinematic video generation model open-sourced by Alibaba. See [./examples/wanvideo/](./examples/wanvideo/). - -- **August 21, 2025**: [DiffSynth-Studio/Qwen-Image-EliGen-V2](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-EliGen-V2) is released! Compared to the V1 version, the training dataset has been updated to the [Qwen-Image-Self-Generated-Dataset](https://www.modelscope.cn/datasets/DiffSynth-Studio/Qwen-Image-Self-Generated-Dataset), enabling generated images to better align with the inherent image distribution and style of Qwen-Image. Please refer to [our sample code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-EliGen-V2.py). - -- **August 21, 2025**: We open-sourced the [DiffSynth-Studio/Qwen-Image-In-Context-Control-Union](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-In-Context-Control-Union) structure control LoRA model. Following "In Context" routine, it supports various types of structural control conditions, including canny, depth, lineart, softedge, normal, and openpose. Please refer to [our sample code](./examples/qwen_image/model_inference/Qwen-Image-In-Context-Control-Union.py). - -- **August 20, 2025** We open-sourced [DiffSynth-Studio/Qwen-Image-Edit-Lowres-Fix](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Edit-Lowres-Fix), which improves the editing performance of Qwen-Image-Edit on low-resolution image inputs. Please refer to [our example code](./examples/qwen_image/model_inference/Qwen-Image-Edit-Lowres-Fix.py). - -- **August 19, 2025** 🔥 Qwen-Image-Edit is now open source. Welcome the new member to the image editing model family! - -- **August 18, 2025** We trained and open-sourced the Inpaint ControlNet model for Qwen-Image, [DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Inpaint](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Inpaint), which adopts a lightweight architectural design. Please refer to [our sample code](./examples/qwen_image/model_inference/Qwen-Image-Blockwise-ControlNet-Inpaint.py). - -- **August 15, 2025** We open-sourced the [Qwen-Image-Self-Generated-Dataset](https://www.modelscope.cn/datasets/DiffSynth-Studio/Qwen-Image-Self-Generated-Dataset). This is an image dataset generated using the Qwen-Image model, with a total of 160,000 `1024 x 1024` images. It includes the general, English text rendering, and Chinese text rendering subsets. We provide caption, entity and control images annotations for each image. Developers can use this dataset to train models such as ControlNet and EliGen for the Qwen-Image model. We aim to promote technological development through open-source contributions! - -- **August 13, 2025** We trained and open-sourced the ControlNet model for Qwen-Image, [DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Depth](https://modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Depth), which adopts a lightweight architectural design. Please refer to [our sample code](./examples/qwen_image/model_inference/Qwen-Image-Blockwise-ControlNet-Depth.py). - -- **August 12, 2025** We trained and open-sourced the ControlNet model for Qwen-Image, [DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Canny](https://modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Canny), which adopts a lightweight architectural design. Please refer to [our sample code](./examples/qwen_image/model_inference/Qwen-Image-Blockwise-ControlNet-Canny.py). - -- **August 11, 2025** We released another distilled acceleration model for Qwen-Image, [DiffSynth-Studio/Qwen-Image-Distill-LoRA](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Distill-LoRA). It uses the same training process as [DiffSynth-Studio/Qwen-Image-Distill-Full](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Distill-Full), but the model structure is changed to LoRA. This makes it work better with other open-source models. - -- **August 7, 2025** We open-sourced the entity control LoRA of Qwen-Image, [DiffSynth-Studio/Qwen-Image-EliGen](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-EliGen). Qwen-Image-EliGen is able to achieve entity-level controlled text-to-image generation. See the [paper](https://arxiv.org/abs/2501.01097) for technical details. Training dataset: [EliGenTrainSet](https://www.modelscope.cn/datasets/DiffSynth-Studio/EliGenTrainSet). - -- **August 5, 2025** We open-sourced the distilled acceleration model of Qwen-Image, [DiffSynth-Studio/Qwen-Image-Distill-Full](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Distill-Full), achieving approximately 5x speedup. - -- **August 4, 2025** 🔥 Qwen-Image is now open source. Welcome the new member to the image generation model family! - -- **August 1, 2025** [FLUX.1-Krea-dev](https://www.modelscope.cn/models/black-forest-labs/FLUX.1-Krea-dev) with a focus on aesthetic photography is comprehensively supported, including low-GPU-memory layer-by-layer offload, LoRA training and full training. See [./examples/flux/](./examples/flux/). - -- **July 28, 2025** With the open-sourcing of Wan 2.2, we immediately provided comprehensive support, including low-GPU-memory layer-by-layer offload, FP8 quantization, sequence parallelism, LoRA training, full training. See [./examples/wanvideo/](./examples/wanvideo/). - -- **July 11, 2025** We propose Nexus-Gen, a unified model that synergizes the language reasoning capabilities of LLMs with the image synthesis power of diffusion models. This framework enables seamless image understanding, generation, and editing tasks. - - Paper: [Nexus-Gen: Unified Image Understanding, Generation, and Editing via Prefilled Autoregression in Shared Embedding Space](https://arxiv.org/pdf/2504.21356) - - Github Repo: https://github.com/modelscope/Nexus-Gen - - Model: [ModelScope](https://www.modelscope.cn/models/DiffSynth-Studio/Nexus-GenV2), [HuggingFace](https://huggingface.co/modelscope/Nexus-GenV2) - - Training Dataset: [ModelScope Dataset](https://www.modelscope.cn/datasets/DiffSynth-Studio/Nexus-Gen-Training-Dataset) - - Online Demo: [ModelScope Nexus-Gen Studio](https://www.modelscope.cn/studios/DiffSynth-Studio/Nexus-Gen) - -
-More - -- **June 15, 2025** ModelScope's official evaluation framework, [EvalScope](https://github.com/modelscope/evalscope), now supports text-to-image generation evaluation. Try it with the [Best Practices](https://evalscope.readthedocs.io/zh-cn/latest/best_practice/t2i_eval.html) guide. - -- **March 25, 2025** Our new open-source project, [DiffSynth-Engine](https://github.com/modelscope/DiffSynth-Engine), is now open-sourced! Focused on stable model deployment. Geared towards industry. Offers better engineering support, higher computational performance, and more stable functionality. - -- **March 31, 2025** We support InfiniteYou, an identity preserving method for FLUX. Please refer to [./examples/InfiniteYou/](./examples/InfiniteYou/) for more details. - -- **March 13, 2025** We support HunyuanVideo-I2V, the image-to-video generation version of HunyuanVideo open-sourced by Tencent. Please refer to [./examples/HunyuanVideo/](./examples/HunyuanVideo/) for more details. - -- **February 25, 2025** We support Wan-Video, a collection of SOTA video synthesis models open-sourced by Alibaba. See [./examples/wanvideo/](./examples/wanvideo/). - -- **February 17, 2025** We support [StepVideo](https://modelscope.cn/models/stepfun-ai/stepvideo-t2v/summary)! State-of-the-art video synthesis model! See [./examples/stepvideo](./examples/stepvideo/). - -- **December 31, 2024** We propose EliGen, a novel framework for precise entity-level controlled text-to-image generation, complemented by an inpainting fusion pipeline to extend its capabilities to image inpainting tasks. EliGen seamlessly integrates with existing community models, such as IP-Adapter and In-Context LoRA, enhancing its versatility. For more details, see [./examples/EntityControl](./examples/EntityControl/). - - Paper: [EliGen: Entity-Level Controlled Image Generation with Regional Attention](https://arxiv.org/abs/2501.01097) - - Model: [ModelScope](https://www.modelscope.cn/models/DiffSynth-Studio/Eligen), [HuggingFace](https://huggingface.co/modelscope/EliGen) - - Online Demo: [ModelScope EliGen Studio](https://www.modelscope.cn/studios/DiffSynth-Studio/EliGen) - - Training Dataset: [EliGen Train Set](https://www.modelscope.cn/datasets/DiffSynth-Studio/EliGenTrainSet) - -- **December 19, 2024** We implement advanced VRAM management for HunyuanVideo, making it possible to generate videos at a resolution of 129x720x1280 using 24GB of VRAM, or at 129x512x384 resolution with just 6GB of VRAM. Please refer to [./examples/HunyuanVideo/](./examples/HunyuanVideo/) for more details. - -- **December 18, 2024** We propose ArtAug, an approach designed to improve text-to-image synthesis models through synthesis-understanding interactions. We have trained an ArtAug enhancement module for FLUX.1-dev in the format of LoRA. This model integrates the aesthetic understanding of Qwen2-VL-72B into FLUX.1-dev, leading to an improvement in the quality of generated images. - - Paper: https://arxiv.org/abs/2412.12888 - - Examples: https://github.com/modelscope/DiffSynth-Studio/tree/main/examples/ArtAug - - Model: [ModelScope](https://www.modelscope.cn/models/DiffSynth-Studio/ArtAug-lora-FLUX.1dev-v1), [HuggingFace](https://huggingface.co/ECNU-CILab/ArtAug-lora-FLUX.1dev-v1) - - Demo: [ModelScope](https://modelscope.cn/aigc/imageGeneration?tab=advanced&versionId=7228&modelType=LoRA&sdVersion=FLUX_1&modelUrl=modelscope%3A%2F%2FDiffSynth-Studio%2FArtAug-lora-FLUX.1dev-v1%3Frevision%3Dv1.0), HuggingFace (Coming soon) - -- **October 25, 2024** We provide extensive FLUX ControlNet support. This project supports many different ControlNet models that can be freely combined, even if their structures differ. Additionally, ControlNet models are compatible with high-resolution refinement and partition control techniques, enabling very powerful controllable image generation. See [`./examples/ControlNet/`](./examples/ControlNet/). - -- **October 8, 2024.** We release the extended LoRA based on CogVideoX-5B and ExVideo. You can download this model from [ModelScope](https://modelscope.cn/models/ECNU-CILab/ExVideo-CogVideoX-LoRA-129f-v1) or [HuggingFace](https://huggingface.co/ECNU-CILab/ExVideo-CogVideoX-LoRA-129f-v1). - -- **August 22, 2024.** CogVideoX-5B is supported in this project. See [here](/examples/video_synthesis/). We provide several interesting features for this text-to-video model, including - - Text to video - - Video editing - - Self-upscaling - - Video interpolation - -- **August 22, 2024.** We have implemented an interesting painter that supports all text-to-image models. Now you can create stunning images using the painter, with assistance from AI! - - Use it in our [WebUI](#usage-in-webui). - -- **August 21, 2024.** FLUX is supported in DiffSynth-Studio. - - Enable CFG and highres-fix to improve visual quality. See [here](/examples/image_synthesis/README.md) - - LoRA, ControlNet, and additional models will be available soon. - -- **June 21, 2024.** We propose ExVideo, a post-tuning technique aimed at enhancing the capability of video generation models. We have extended Stable Video Diffusion to achieve the generation of long videos up to 128 frames. - - [Project Page](https://ecnu-cilab.github.io/ExVideoProjectPage/) - - Source code is released in this repo. See [`examples/ExVideo`](./examples/ExVideo/). - - Models are released on [HuggingFace](https://huggingface.co/ECNU-CILab/ExVideo-SVD-128f-v1) and [ModelScope](https://modelscope.cn/models/ECNU-CILab/ExVideo-SVD-128f-v1). - - Technical report is released on [arXiv](https://arxiv.org/abs/2406.14130). - - You can try ExVideo in this [Demo](https://huggingface.co/spaces/modelscope/ExVideo-SVD-128f-v1)! - -- **June 13, 2024.** DiffSynth Studio is transferred to ModelScope. The developers have transitioned from "I" to "we". Of course, I will still participate in development and maintenance. - -- **Jan 29, 2024.** We propose Diffutoon, a fantastic solution for toon shading. - - [Project Page](https://ecnu-cilab.github.io/DiffutoonProjectPage/) - - The source codes are released in this project. - - The technical report (IJCAI 2024) is released on [arXiv](https://arxiv.org/abs/2401.16224). - -- **Dec 8, 2023.** We decide to develop a new Project, aiming to release the potential of diffusion models, especially in video synthesis. The development of this project is started. - -- **Nov 15, 2023.** We propose FastBlend, a powerful video deflickering algorithm. - - The sd-webui extension is released on [GitHub](https://github.com/Artiprocher/sd-webui-fastblend). - - Demo videos are shown on Bilibili, including three tasks. - - [Video deflickering](https://www.bilibili.com/video/BV1d94y1W7PE) - - [Video interpolation](https://www.bilibili.com/video/BV1Lw411m71p) - - [Image-driven video rendering](https://www.bilibili.com/video/BV1RB4y1Z7LF) - - The technical report is released on [arXiv](https://arxiv.org/abs/2311.09265). - - An unofficial ComfyUI extension developed by other users is released on [GitHub](https://github.com/AInseven/ComfyUI-fastblend). - -- **Oct 1, 2023.** We release an early version of this project, namely FastSDXL. A try for building a diffusion engine. - - The source codes are released on [GitHub](https://github.com/Artiprocher/FastSDXL). - - FastSDXL includes a trainable OLSS scheduler for efficiency improvement. - - The original repo of OLSS is [here](https://github.com/alibaba/EasyNLP/tree/master/diffusion/olss_scheduler). - - The technical report (CIKM 2023) is released on [arXiv](https://arxiv.org/abs/2305.14677). - - A demo video is shown on [Bilibili](https://www.bilibili.com/video/BV1w8411y7uj). - - Since OLSS requires additional training, we don't implement it in this project. - -- **Aug 29, 2023.** We propose DiffSynth, a video synthesis framework. - - [Project Page](https://ecnu-cilab.github.io/DiffSynth.github.io/). - - The source codes are released in [EasyNLP](https://github.com/alibaba/EasyNLP/tree/master/diffusion/DiffSynth). - - The technical report (ECML PKDD 2024) is released on [arXiv](https://arxiv.org/abs/2308.03463). - -
\ No newline at end of file +TODO \ No newline at end of file diff --git a/README_zh.md b/README_zh.md deleted file mode 100644 index d6dc241..0000000 --- a/README_zh.md +++ /dev/null @@ -1,535 +0,0 @@ -# DiffSynth-Studio - - modelscope%2FDiffSynth-Studio | Trendshift

- -[![PyPI](https://img.shields.io/pypi/v/DiffSynth)](https://pypi.org/project/DiffSynth/) -[![license](https://img.shields.io/github/license/modelscope/DiffSynth-Studio.svg)](https://github.com/modelscope/DiffSynth-Studio/blob/master/LICENSE) -[![open issues](https://isitmaintained.com/badge/open/modelscope/DiffSynth-Studio.svg)](https://github.com/modelscope/DiffSynth-Studio/issues) -[![GitHub pull-requests](https://img.shields.io/github/issues-pr/modelscope/DiffSynth-Studio.svg)](https://GitHub.com/modelscope/DiffSynth-Studio/pull/) -[![GitHub latest commit](https://badgen.net/github/last-commit/modelscope/DiffSynth-Studio)](https://GitHub.com/modelscope/DiffSynth-Studio/commit/) - -[Switch to English](./README.md) - -## 简介 - -欢迎来到 Diffusion 模型的魔法世界!DiffSynth-Studio 是由[魔搭社区](https://www.modelscope.cn/)团队开发和维护的开源 Diffusion 模型引擎。我们期望以框架建设孵化技术创新,凝聚开源社区的力量,探索生成式模型技术的边界! - -DiffSynth 目前包括两个开源项目: -* [DiffSynth-Studio](https://github.com/modelscope/DiffSynth-Studio): 聚焦于激进的技术探索,面向学术界,提供更前沿的模型能力支持。 -* [DiffSynth-Engine](https://github.com/modelscope/DiffSynth-Engine): 聚焦于稳定的模型部署,面向工业界,提供更高的计算性能与更稳定的功能。 - -[DiffSynth-Studio](https://github.com/modelscope/DiffSynth-Studio) 与 [DiffSynth-Engine](https://github.com/modelscope/DiffSynth-Engine) 作为魔搭社区 [AIGC 专区](https://modelscope.cn/aigc/home) 的核心技术支撑,提供了强大的AI生成内容能力。欢迎体验我们精心打造的产品化功能,开启您的AI创作之旅! - -## 安装 - -从源码安装(推荐): - -``` -git clone https://github.com/modelscope/DiffSynth-Studio.git -cd DiffSynth-Studio -pip install -e . -``` - -
-其他安装方式 - -从 pypi 安装(存在版本更新延迟,如需使用最新功能,请从源码安装) - -``` -pip install diffsynth -``` - -如果在安装过程中遇到问题,可能是由上游依赖包导致的,请参考这些包的文档: - -* [torch](https://pytorch.org/get-started/locally/) -* [sentencepiece](https://github.com/google/sentencepiece) -* [cmake](https://cmake.org) -* [cupy](https://docs.cupy.dev/en/stable/install.html) - -
- - - -## 基础框架 - -DiffSynth-Studio 为主流 Diffusion 模型(包括 FLUX、Wan 等)重新设计了推理和训练流水线,能够实现高效的显存管理、灵活的模型训练。 - -### Qwen-Image 系列 (🔥新模型) - -详细页面:[./examples/qwen_image/](./examples/qwen_image/) - -![Image](https://github.com/user-attachments/assets/738078d8-8749-4a53-a046-571861541924) - -
- -快速开始 - -```python -from diffsynth.pipelines.qwen_image import QwenImagePipeline, ModelConfig -from PIL import Image -import torch - -pipe = QwenImagePipeline.from_pretrained( - torch_dtype=torch.bfloat16, - device="cuda", - model_configs=[ - ModelConfig(model_id="Qwen/Qwen-Image", origin_file_pattern="transformer/diffusion_pytorch_model*.safetensors"), - ModelConfig(model_id="Qwen/Qwen-Image", origin_file_pattern="text_encoder/model*.safetensors"), - ModelConfig(model_id="Qwen/Qwen-Image", origin_file_pattern="vae/diffusion_pytorch_model.safetensors"), - ], - tokenizer_config=ModelConfig(model_id="Qwen/Qwen-Image", origin_file_pattern="tokenizer/"), -) -prompt = "精致肖像,水下少女,蓝裙飘逸,发丝轻扬,光影透澈,气泡环绕,面容恬静,细节精致,梦幻唯美。" -image = pipe( - prompt, seed=0, num_inference_steps=40, - # edit_image=Image.open("xxx.jpg").resize((1328, 1328)) # For Qwen-Image-Edit -) -image.save("image.jpg") -``` - -
- -
- -模型总览 - -|模型 ID|推理|低显存推理|全量训练|全量训练后验证|LoRA 训练|LoRA 训练后验证| -|-|-|-|-|-|-|-| -|[Qwen/Qwen-Image](https://www.modelscope.cn/models/Qwen/Qwen-Image)|[code](./examples/qwen_image/model_inference/Qwen-Image.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image.py)|[code](./examples/qwen_image/model_training/full/Qwen-Image.sh)|[code](./examples/qwen_image/model_training/validate_full/Qwen-Image.py)|[code](./examples/qwen_image/model_training/lora/Qwen-Image.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image.py)| -|[Qwen/Qwen-Image-Edit](https://www.modelscope.cn/models/Qwen/Qwen-Image-Edit)|[code](./examples/qwen_image/model_inference/Qwen-Image-Edit.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-Edit.py)|[code](./examples/qwen_image/model_training/full/Qwen-Image-Edit.sh)|[code](./examples/qwen_image/model_training/validate_full/Qwen-Image-Edit.py)|[code](./examples/qwen_image/model_training/lora/Qwen-Image-Edit.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-Edit.py)| -|[Qwen/Qwen-Image-Edit-2509](https://www.modelscope.cn/models/Qwen/Qwen-Image-Edit-2509)|[code](./examples/qwen_image/model_inference/Qwen-Image-Edit-2509.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-Edit-2509.py)|[code](./examples/qwen_image/model_training/full/Qwen-Image-Edit-2509.sh)|[code](./examples/qwen_image/model_training/validate_full/Qwen-Image-Edit-2509.py)|[code](./examples/qwen_image/model_training/lora/Qwen-Image-Edit-2509.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-Edit-2509.py)| -|[DiffSynth-Studio/Qwen-Image-EliGen-V2](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-EliGen-V2)|[code](./examples/qwen_image/model_inference/Qwen-Image-EliGen-V2.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-EliGen-V2.py)|-|-|[code](./examples/qwen_image/model_training/lora/Qwen-Image-EliGen.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-EliGen.py)| -|[DiffSynth-Studio/Qwen-Image-EliGen-Poster](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-EliGen-Poster)|[code](./examples/qwen_image/model_inference/Qwen-Image-EliGen-Poster.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-EliGen-Poster.py)|-|-|[code](./examples/qwen_image/model_training/lora/Qwen-Image-EliGen-Poster.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-EliGen-Poster.py)| -|[DiffSynth-Studio/Qwen-Image-Distill-Full](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Distill-Full)|[code](./examples/qwen_image/model_inference/Qwen-Image-Distill-Full.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-Distill-Full.py)|[code](./examples/qwen_image/model_training/full/Qwen-Image-Distill-Full.sh)|[code](./examples/qwen_image/model_training/validate_full/Qwen-Image-Distill-Full.py)|[code](./examples/qwen_image/model_training/lora/Qwen-Image-Distill-Full.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-Distill-Full.py)| -|[DiffSynth-Studio/Qwen-Image-Distill-LoRA](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Distill-LoRA)|[code](./examples/qwen_image/model_inference/Qwen-Image-Distill-LoRA.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-Distill-LoRA.py)|-|-|[code](./examples/qwen_image/model_training/lora/Qwen-Image-Distill-LoRA.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-Distill-LoRA.py)| -|[DiffSynth-Studio/Qwen-Image-EliGen](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-EliGen)|[code](./examples/qwen_image/model_inference/Qwen-Image-EliGen.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-EliGen.py)|-|-|[code](./examples/qwen_image/model_training/lora/Qwen-Image-EliGen.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-EliGen.py)| -|[DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Canny](https://modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Canny)|[code](./examples/qwen_image/model_inference/Qwen-Image-Blockwise-ControlNet-Canny.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-Blockwise-ControlNet-Canny.py)|[code](./examples/qwen_image/model_training/full/Qwen-Image-Blockwise-ControlNet-Canny.sh)|[code](./examples/qwen_image/model_training/validate_full/Qwen-Image-Blockwise-ControlNet-Canny.py)|[code](./examples/qwen_image/model_training/lora/Qwen-Image-Blockwise-ControlNet-Canny.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-Blockwise-ControlNet-Canny.py)| -|[DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Depth](https://modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Depth)|[code](./examples/qwen_image/model_inference/Qwen-Image-Blockwise-ControlNet-Depth.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-Blockwise-ControlNet-Depth.py)|[code](./examples/qwen_image/model_training/full/Qwen-Image-Blockwise-ControlNet-Depth.sh)|[code](./examples/qwen_image/model_training/validate_full/Qwen-Image-Blockwise-ControlNet-Depth.py)|[code](./examples/qwen_image/model_training/lora/Qwen-Image-Blockwise-ControlNet-Depth.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-Blockwise-ControlNet-Depth.py)| -|[DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Inpaint](https://modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Inpaint)|[code](./examples/qwen_image/model_inference/Qwen-Image-Blockwise-ControlNet-Inpaint.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-Blockwise-ControlNet-Inpaint.py)|[code](./examples/qwen_image/model_training/full/Qwen-Image-Blockwise-ControlNet-Inpaint.sh)|[code](./examples/qwen_image/model_training/validate_full/Qwen-Image-Blockwise-ControlNet-Inpaint.py)|[code](./examples/qwen_image/model_training/lora/Qwen-Image-Blockwise-ControlNet-Inpaint.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-Blockwise-ControlNet-Inpaint.py)| -|[DiffSynth-Studio/Qwen-Image-In-Context-Control-Union](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-In-Context-Control-Union)|[code](./examples/qwen_image/model_inference/Qwen-Image-In-Context-Control-Union.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-In-Context-Control-Union.py)|-|-|[code](./examples/qwen_image/model_training/lora/Qwen-Image-In-Context-Control-Union.sh)|[code](./examples/qwen_image/model_training/validate_lora/Qwen-Image-In-Context-Control-Union.py)| -|[DiffSynth-Studio/Qwen-Image-Edit-Lowres-Fix](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Edit-Lowres-Fix)|[code](./examples/qwen_image/model_inference/Qwen-Image-Edit-Lowres-Fix.py)|[code](./examples/qwen_image/model_inference_low_vram/Qwen-Image-Edit-Lowres-Fix.py)|-|-|-|-| - -
- -### FLUX 系列 - -详细页面:[./examples/flux/](./examples/flux/) - -![Image](https://github.com/user-attachments/assets/c01258e2-f251-441a-aa1e-ebb22f02594d) - -
- -快速开始 - -```python -import torch -from diffsynth.pipelines.flux_image_new import FluxImagePipeline, ModelConfig - -pipe = FluxImagePipeline.from_pretrained( - torch_dtype=torch.bfloat16, - device="cuda", - model_configs=[ - ModelConfig(model_id="black-forest-labs/FLUX.1-dev", origin_file_pattern="flux1-dev.safetensors"), - ModelConfig(model_id="black-forest-labs/FLUX.1-dev", origin_file_pattern="text_encoder/model.safetensors"), - ModelConfig(model_id="black-forest-labs/FLUX.1-dev", origin_file_pattern="text_encoder_2/"), - ModelConfig(model_id="black-forest-labs/FLUX.1-dev", origin_file_pattern="ae.safetensors"), - ], -) - -image = pipe(prompt="a cat", seed=0) -image.save("image.jpg") -``` - -
- -
- -模型总览 - -|模型 ID|额外参数|推理|低显存推理|全量训练|全量训练后验证|LoRA 训练|LoRA 训练后验证| -|-|-|-|-|-|-|-|-| -|[FLUX.1-dev](https://www.modelscope.cn/models/black-forest-labs/FLUX.1-dev)||[code](./examples/flux/model_inference/FLUX.1-dev.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-dev.py)|[code](./examples/flux/model_training/full/FLUX.1-dev.sh)|[code](./examples/flux/model_training/validate_full/FLUX.1-dev.py)|[code](./examples/flux/model_training/lora/FLUX.1-dev.sh)|[code](./examples/flux/model_training/validate_lora/FLUX.1-dev.py)| -|[FLUX.1-Krea-dev](https://www.modelscope.cn/models/black-forest-labs/FLUX.1-Krea-dev)||[code](./examples/flux/model_inference/FLUX.1-Krea-dev.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-Krea-dev.py)|[code](./examples/flux/model_training/full/FLUX.1-Krea-dev.sh)|[code](./examples/flux/model_training/validate_full/FLUX.1-Krea-dev.py)|[code](./examples/flux/model_training/lora/FLUX.1-Krea-dev.sh)|[code](./examples/flux/model_training/validate_lora/FLUX.1-Krea-dev.py)| -|[FLUX.1-Kontext-dev](https://www.modelscope.cn/models/black-forest-labs/FLUX.1-Kontext-dev)|`kontext_images`|[code](./examples/flux/model_inference/FLUX.1-Kontext-dev.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-Kontext-dev.py)|[code](./examples/flux/model_training/full/FLUX.1-Kontext-dev.sh)|[code](./examples/flux/model_training/validate_full/FLUX.1-Kontext-dev.py)|[code](./examples/flux/model_training/lora/FLUX.1-Kontext-dev.sh)|[code](./examples/flux/model_training/validate_lora/FLUX.1-Kontext-dev.py)| -|[FLUX.1-dev-Controlnet-Inpainting-Beta](https://www.modelscope.cn/models/alimama-creative/FLUX.1-dev-Controlnet-Inpainting-Beta)|`controlnet_inputs`|[code](./examples/flux/model_inference/FLUX.1-dev-Controlnet-Inpainting-Beta.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-dev-Controlnet-Inpainting-Beta.py)|[code](./examples/flux/model_training/full/FLUX.1-dev-Controlnet-Inpainting-Beta.sh)|[code](./examples/flux/model_training/validate_full/FLUX.1-dev-Controlnet-Inpainting-Beta.py)|[code](./examples/flux/model_training/lora/FLUX.1-dev-Controlnet-Inpainting-Beta.sh)|[code](./examples/flux/model_training/validate_lora/FLUX.1-dev-Controlnet-Inpainting-Beta.py)| -|[FLUX.1-dev-Controlnet-Union-alpha](https://www.modelscope.cn/models/InstantX/FLUX.1-dev-Controlnet-Union-alpha)|`controlnet_inputs`|[code](./examples/flux/model_inference/FLUX.1-dev-Controlnet-Union-alpha.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-dev-Controlnet-Union-alpha.py)|[code](./examples/flux/model_training/full/FLUX.1-dev-Controlnet-Union-alpha.sh)|[code](./examples/flux/model_training/validate_full/FLUX.1-dev-Controlnet-Union-alpha.py)|[code](./examples/flux/model_training/lora/FLUX.1-dev-Controlnet-Union-alpha.sh)|[code](./examples/flux/model_training/validate_lora/FLUX.1-dev-Controlnet-Union-alpha.py)| -|[FLUX.1-dev-Controlnet-Upscaler](https://www.modelscope.cn/models/jasperai/Flux.1-dev-Controlnet-Upscaler)|`controlnet_inputs`|[code](./examples/flux/model_inference/FLUX.1-dev-Controlnet-Upscaler.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-dev-Controlnet-Upscaler.py)|[code](./examples/flux/model_training/full/FLUX.1-dev-Controlnet-Upscaler.sh)|[code](./examples/flux/model_training/validate_full/FLUX.1-dev-Controlnet-Upscaler.py)|[code](./examples/flux/model_training/lora/FLUX.1-dev-Controlnet-Upscaler.sh)|[code](./examples/flux/model_training/validate_lora/FLUX.1-dev-Controlnet-Upscaler.py)| -|[FLUX.1-dev-IP-Adapter](https://www.modelscope.cn/models/InstantX/FLUX.1-dev-IP-Adapter)|`ipadapter_images`, `ipadapter_scale`|[code](./examples/flux/model_inference/FLUX.1-dev-IP-Adapter.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-dev-IP-Adapter.py)|[code](./examples/flux/model_training/full/FLUX.1-dev-IP-Adapter.sh)|[code](./examples/flux/model_training/validate_full/FLUX.1-dev-IP-Adapter.py)|[code](./examples/flux/model_training/lora/FLUX.1-dev-IP-Adapter.sh)|[code](./examples/flux/model_training/validate_lora/FLUX.1-dev-IP-Adapter.py)| -|[FLUX.1-dev-InfiniteYou](https://www.modelscope.cn/models/ByteDance/InfiniteYou)|`infinityou_id_image`, `infinityou_guidance`, `controlnet_inputs`|[code](./examples/flux/model_inference/FLUX.1-dev-InfiniteYou.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-dev-InfiniteYou.py)|[code](./examples/flux/model_training/full/FLUX.1-dev-InfiniteYou.sh)|[code](./examples/flux/model_training/validate_full/FLUX.1-dev-InfiniteYou.py)|[code](./examples/flux/model_training/lora/FLUX.1-dev-InfiniteYou.sh)|[code](./examples/flux/model_training/validate_lora/FLUX.1-dev-InfiniteYou.py)| -|[FLUX.1-dev-EliGen](https://www.modelscope.cn/models/DiffSynth-Studio/Eligen)|`eligen_entity_prompts`, `eligen_entity_masks`, `eligen_enable_on_negative`, `eligen_enable_inpaint`|[code](./examples/flux/model_inference/FLUX.1-dev-EliGen.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-dev-EliGen.py)|-|-|[code](./examples/flux/model_training/lora/FLUX.1-dev-EliGen.sh)|[code](./examples/flux/model_training/validate_lora/FLUX.1-dev-EliGen.py)| -|[FLUX.1-dev-LoRA-Encoder](https://www.modelscope.cn/models/DiffSynth-Studio/LoRA-Encoder-FLUX.1-Dev)|`lora_encoder_inputs`, `lora_encoder_scale`|[code](./examples/flux/model_inference/FLUX.1-dev-LoRA-Encoder.py)|[code](./examples/flux/model_inference_low_vram/FLUX.1-dev-LoRA-Encoder.py)|[code](./examples/flux/model_training/full/FLUX.1-dev-LoRA-Encoder.sh)|[code](./examples/flux/model_training/validate_full/FLUX.1-dev-LoRA-Encoder.py)|-|-| -|[FLUX.1-dev-LoRA-Fusion-Preview](https://modelscope.cn/models/DiffSynth-Studio/LoRAFusion-preview-FLUX.1-dev)||[code](./examples/flux/model_inference/FLUX.1-dev-LoRA-Fusion.py)|-|-|-|-|-| -|[Step1X-Edit](https://www.modelscope.cn/models/stepfun-ai/Step1X-Edit)|`step1x_reference_image`|[code](./examples/flux/model_inference/Step1X-Edit.py)|[code](./examples/flux/model_inference_low_vram/Step1X-Edit.py)|[code](./examples/flux/model_training/full/Step1X-Edit.sh)|[code](./examples/flux/model_training/validate_full/Step1X-Edit.py)|[code](./examples/flux/model_training/lora/Step1X-Edit.sh)|[code](./examples/flux/model_training/validate_lora/Step1X-Edit.py)| -|[FLEX.2-preview](https://www.modelscope.cn/models/ostris/Flex.2-preview)|`flex_inpaint_image`, `flex_inpaint_mask`, `flex_control_image`, `flex_control_strength`, `flex_control_stop`|[code](./examples/flux/model_inference/FLEX.2-preview.py)|[code](./examples/flux/model_inference_low_vram/FLEX.2-preview.py)|[code](./examples/flux/model_training/full/FLEX.2-preview.sh)|[code](./examples/flux/model_training/validate_full/FLEX.2-preview.py)|[code](./examples/flux/model_training/lora/FLEX.2-preview.sh)|[code](./examples/flux/model_training/validate_lora/FLEX.2-preview.py)| -|[Nexus-Gen](https://www.modelscope.cn/models/DiffSynth-Studio/Nexus-GenV2)|`nexus_gen_reference_image`|[code](./examples/flux/model_inference/Nexus-Gen-Editing.py)|[code](./examples/flux/model_inference_low_vram/Nexus-Gen-Editing.py)|[code](./examples/flux/model_training/full/Nexus-Gen.sh)|[code](./examples/flux/model_training/validate_full/Nexus-Gen.py)|[code](./examples/flux/model_training/lora/Nexus-Gen.sh)|[code](./examples/flux/model_training/validate_lora/Nexus-Gen.py)| - -
- -### Wan 系列 - -详细页面:[./examples/wanvideo/](./examples/wanvideo/) - -https://github.com/user-attachments/assets/1d66ae74-3b02-40a9-acc3-ea95fc039314 - -
- -快速开始 - -```python -import torch -from diffsynth import save_video -from diffsynth.pipelines.wan_video_new import WanVideoPipeline, ModelConfig - -pipe = WanVideoPipeline.from_pretrained( - torch_dtype=torch.bfloat16, - device="cuda", - model_configs=[ - ModelConfig(model_id="Wan-AI/Wan2.1-T2V-1.3B", origin_file_pattern="diffusion_pytorch_model*.safetensors", offload_device="cpu"), - ModelConfig(model_id="Wan-AI/Wan2.1-T2V-1.3B", origin_file_pattern="models_t5_umt5-xxl-enc-bf16.pth", offload_device="cpu"), - ModelConfig(model_id="Wan-AI/Wan2.1-T2V-1.3B", origin_file_pattern="Wan2.1_VAE.pth", offload_device="cpu"), - ], -) -pipe.enable_vram_management() - -video = pipe( - prompt="纪实摄影风格画面,一只活泼的小狗在绿茵茵的草地上迅速奔跑。小狗毛色棕黄,两只耳朵立起,神情专注而欢快。阳光洒在它身上,使得毛发看上去格外柔软而闪亮。背景是一片开阔的草地,偶尔点缀着几朵野花,远处隐约可见蓝天和几片白云。透视感鲜明,捕捉小狗奔跑时的动感和四周草地的生机。中景侧面移动视角。", - negative_prompt="色调艳丽,过曝,静态,细节模糊不清,字幕,风格,作品,画作,画面,静止,整体发灰,最差质量,低质量,JPEG压缩残留,丑陋的,残缺的,多余的手指,画得不好的手部,画得不好的脸部,畸形的,毁容的,形态畸形的肢体,手指融合,静止不动的画面,杂乱的背景,三条腿,背景人很多,倒着走", - seed=0, tiled=True, -) -save_video(video, "video1.mp4", fps=15, quality=5) -``` - -
- -
- -模型总览 - -|模型 ID|额外参数|推理|全量训练|全量训练后验证|LoRA 训练|LoRA 训练后验证| -|-|-|-|-|-|-|-| -|[Wan-AI/Wan2.2-Animate-14B](https://www.modelscope.cn/models/Wan-AI/Wan2.2-Animate-14B)|`input_image`, `animate_pose_video`, `animate_face_video`, `animate_inpaint_video`, `animate_mask_video`|[code](./examples/wanvideo/model_inference/Wan2.2-Animate-14B.py)|[code](./examples/wanvideo/model_training/full/Wan2.2-Animate-14B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.2-Animate-14B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.2-Animate-14B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.2-Animate-14B.py)| -|[Wan-AI/Wan2.2-S2V-14B](https://www.modelscope.cn/models/Wan-AI/Wan2.2-S2V-14B)|`input_image`, `input_audio`, `audio_sample_rate`, `s2v_pose_video`|[code](./examples/wanvideo/model_inference/Wan2.2-S2V-14B_multi_clips.py)|[code](./examples/wanvideo/model_training/full/Wan2.2-S2V-14B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.2-S2V-14B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.2-S2V-14B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.2-S2V-14B.py)| -|[Wan-AI/Wan2.2-I2V-A14B](https://modelscope.cn/models/Wan-AI/Wan2.2-I2V-A14B)|`input_image`|[code](./examples/wanvideo/model_inference/Wan2.2-I2V-A14B.py)|[code](./examples/wanvideo/model_training/full/Wan2.2-I2V-A14B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.2-I2V-A14B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.2-I2V-A14B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.2-I2V-A14B.py)| -|[Wan-AI/Wan2.2-T2V-A14B](https://modelscope.cn/models/Wan-AI/Wan2.2-T2V-A14B)||[code](./examples/wanvideo/model_inference/Wan2.2-T2V-A14B.py)|[code](./examples/wanvideo/model_training/full/Wan2.2-T2V-A14B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.2-T2V-A14B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.2-T2V-A14B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.2-T2V-A14B.py)| -|[Wan-AI/Wan2.2-TI2V-5B](https://modelscope.cn/models/Wan-AI/Wan2.2-TI2V-5B)|`input_image`|[code](./examples/wanvideo/model_inference/Wan2.2-TI2V-5B.py)|[code](./examples/wanvideo/model_training/full/Wan2.2-TI2V-5B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.2-TI2V-5B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.2-TI2V-5B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.2-TI2V-5B.py)| -|[Wan-AI/Wan2.2-VACE-Fun-A14B](https://www.modelscope.cn/models/PAI/Wan2.2-VACE-Fun-A14B)|`vace_control_video`, `vace_reference_image`|[code](./examples/wanvideo/model_inference/Wan2.2-VACE-Fun-A14B.py)|[code](./examples/wanvideo/model_training/full/Wan2.2-VACE-Fun-A14B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.2-VACE-Fun-A14B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.2-VACE-Fun-A14B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.2-VACE-Fun-A14B.py)| -|[PAI/Wan2.2-Fun-A14B-InP](https://modelscope.cn/models/PAI/Wan2.2-Fun-A14B-InP)|`input_image`, `end_image`|[code](./examples/wanvideo/model_inference/Wan2.2-Fun-A14B-InP.py)|[code](./examples/wanvideo/model_training/full/Wan2.2-Fun-A14B-InP.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.2-Fun-A14B-InP.py)|[code](./examples/wanvideo/model_training/lora/Wan2.2-Fun-A14B-InP.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.2-Fun-A14B-InP.py)| -|[PAI/Wan2.2-Fun-A14B-Control](https://modelscope.cn/models/PAI/Wan2.2-Fun-A14B-Control)|`control_video`, `reference_image`|[code](./examples/wanvideo/model_inference/Wan2.2-Fun-A14B-Control.py)|[code](./examples/wanvideo/model_training/full/Wan2.2-Fun-A14B-Control.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.2-Fun-A14B-Control.py)|[code](./examples/wanvideo/model_training/lora/Wan2.2-Fun-A14B-Control.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.2-Fun-A14B-Control.py)| -|[PAI/Wan2.2-Fun-A14B-Control-Camera](https://modelscope.cn/models/PAI/Wan2.2-Fun-A14B-Control-Camera)|`control_camera_video`, `input_image`|[code](./examples/wanvideo/model_inference/Wan2.2-Fun-A14B-Control-Camera.py)|[code](./examples/wanvideo/model_training/full/Wan2.2-Fun-A14B-Control-Camera.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.2-Fun-A14B-Control-Camera.py)|[code](./examples/wanvideo/model_training/lora/Wan2.2-Fun-A14B-Control-Camera.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.2-Fun-A14B-Control-Camera.py)| -|[Wan-AI/Wan2.1-T2V-1.3B](https://modelscope.cn/models/Wan-AI/Wan2.1-T2V-1.3B)||[code](./examples/wanvideo/model_inference/Wan2.1-T2V-1.3B.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-T2V-1.3B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-T2V-1.3B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-T2V-1.3B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-T2V-1.3B.py)| -|[Wan-AI/Wan2.1-T2V-14B](https://modelscope.cn/models/Wan-AI/Wan2.1-T2V-14B)||[code](./examples/wanvideo/model_inference/Wan2.1-T2V-14B.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-T2V-14B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-T2V-14B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-T2V-14B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-T2V-14B.py)| -|[Wan-AI/Wan2.1-I2V-14B-480P](https://modelscope.cn/models/Wan-AI/Wan2.1-I2V-14B-480P)|`input_image`|[code](./examples/wanvideo/model_inference/Wan2.1-I2V-14B-480P.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-I2V-14B-480P.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-I2V-14B-480P.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-I2V-14B-480P.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-I2V-14B-480P.py)| -|[Wan-AI/Wan2.1-I2V-14B-720P](https://modelscope.cn/models/Wan-AI/Wan2.1-I2V-14B-720P)|`input_image`|[code](./examples/wanvideo/model_inference/Wan2.1-I2V-14B-720P.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-I2V-14B-720P.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-I2V-14B-720P.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-I2V-14B-720P.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-I2V-14B-720P.py)| -|[Wan-AI/Wan2.1-FLF2V-14B-720P](https://modelscope.cn/models/Wan-AI/Wan2.1-FLF2V-14B-720P)|`input_image`, `end_image`|[code](./examples/wanvideo/model_inference/Wan2.1-FLF2V-14B-720P.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-FLF2V-14B-720P.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-FLF2V-14B-720P.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-FLF2V-14B-720P.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-FLF2V-14B-720P.py)| -|[PAI/Wan2.1-Fun-1.3B-InP](https://modelscope.cn/models/PAI/Wan2.1-Fun-1.3B-InP)|`input_image`, `end_image`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-1.3B-InP.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-1.3B-InP.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-1.3B-InP.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-1.3B-InP.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-1.3B-InP.py)| -|[PAI/Wan2.1-Fun-1.3B-Control](https://modelscope.cn/models/PAI/Wan2.1-Fun-1.3B-Control)|`control_video`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-1.3B-Control.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-1.3B-Control.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-1.3B-Control.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-1.3B-Control.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-1.3B-Control.py)| -|[PAI/Wan2.1-Fun-14B-InP](https://modelscope.cn/models/PAI/Wan2.1-Fun-14B-InP)|`input_image`, `end_image`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-14B-InP.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-14B-InP.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-14B-InP.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-14B-InP.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-14B-InP.py)| -|[PAI/Wan2.1-Fun-14B-Control](https://modelscope.cn/models/PAI/Wan2.1-Fun-14B-Control)|`control_video`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-14B-Control.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-14B-Control.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-14B-Control.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-14B-Control.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-14B-Control.py)| -|[PAI/Wan2.1-Fun-V1.1-1.3B-Control](https://modelscope.cn/models/PAI/Wan2.1-Fun-V1.1-1.3B-Control)|`control_video`, `reference_image`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-V1.1-1.3B-Control.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-V1.1-1.3B-Control.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-V1.1-1.3B-Control.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-V1.1-1.3B-Control.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-V1.1-1.3B-Control.py)| -|[PAI/Wan2.1-Fun-V1.1-14B-Control](https://modelscope.cn/models/PAI/Wan2.1-Fun-V1.1-14B-Control)|`control_video`, `reference_image`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-V1.1-14B-Control.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-V1.1-14B-Control.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-V1.1-14B-Control.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-V1.1-14B-Control.sh)|[code](./examples/wanvideo/examples/wanmodel_training/validate_lora/Wan2.1-Fun-V1.1-14B-Control.py)| -|[PAI/Wan2.1-Fun-V1.1-1.3B-InP](https://modelscope.cn/models/PAI/Wan2.1-Fun-V1.1-1.3B-InP)|`input_image`, `end_image`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-V1.1-1.3B-InP.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-V1.1-1.3B-InP.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-V1.1-1.3B-InP.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-V1.1-1.3B-InP.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-V1.1-1.3B-InP.py)| -|[PAI/Wan2.1-Fun-V1.1-14B-InP](https://modelscope.cn/models/PAI/Wan2.1-Fun-V1.1-14B-InP)|`input_image`, `end_image`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-V1.1-14B-InP.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-V1.1-14B-InP.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-V1.1-14B-InP.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-V1.1-14B-InP.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-V1.1-14B-InP.py)| -|[PAI/Wan2.1-Fun-V1.1-1.3B-Control-Camera](https://modelscope.cn/models/PAI/Wan2.1-Fun-V1.1-1.3B-Control-Camera)|`control_camera_video`, `input_image`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-V1.1-1.3B-Control-Camera.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-V1.1-1.3B-Control-Camera.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-V1.1-1.3B-Control-Camera.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-V1.1-1.3B-Control-Camera.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-V1.1-1.3B-Control-Camera.py)| -|[PAI/Wan2.1-Fun-V1.1-14B-Control-Camera](https://modelscope.cn/models/PAI/Wan2.1-Fun-V1.1-14B-Control-Camera)|`control_camera_video`, `input_image`|[code](./examples/wanvideo/model_inference/Wan2.1-Fun-V1.1-14B-Control-Camera.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-Fun-V1.1-14B-Control-Camera.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-Fun-V1.1-14B-Control-Camera.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-Fun-V1.1-14B-Control-Camera.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-Fun-V1.1-14B-Control-Camera.py)| -|[iic/VACE-Wan2.1-1.3B-Preview](https://modelscope.cn/models/iic/VACE-Wan2.1-1.3B-Preview)|`vace_control_video`, `vace_reference_image`|[code](./examples/wanvideo/model_inference/Wan2.1-VACE-1.3B-Preview.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-VACE-1.3B-Preview.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-VACE-1.3B-Preview.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-VACE-1.3B-Preview.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-VACE-1.3B-Preview.py)| -|[Wan-AI/Wan2.1-VACE-1.3B](https://modelscope.cn/models/Wan-AI/Wan2.1-VACE-1.3B)|`vace_control_video`, `vace_reference_image`|[code](./examples/wanvideo/model_inference/Wan2.1-VACE-1.3B.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-VACE-1.3B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-VACE-1.3B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-VACE-1.3B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-VACE-1.3B.py)| -|[Wan-AI/Wan2.1-VACE-14B](https://modelscope.cn/models/Wan-AI/Wan2.1-VACE-14B)|`vace_control_video`, `vace_reference_image`|[code](./examples/wanvideo/model_inference/Wan2.1-VACE-14B.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-VACE-14B.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-VACE-14B.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-VACE-14B.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-VACE-14B.py)| -|[DiffSynth-Studio/Wan2.1-1.3b-speedcontrol-v1](https://modelscope.cn/models/DiffSynth-Studio/Wan2.1-1.3b-speedcontrol-v1)|`motion_bucket_id`|[code](./examples/wanvideo/model_inference/Wan2.1-1.3b-speedcontrol-v1.py)|[code](./examples/wanvideo/model_training/full/Wan2.1-1.3b-speedcontrol-v1.sh)|[code](./examples/wanvideo/model_training/validate_full/Wan2.1-1.3b-speedcontrol-v1.py)|[code](./examples/wanvideo/model_training/lora/Wan2.1-1.3b-speedcontrol-v1.sh)|[code](./examples/wanvideo/model_training/validate_lora/Wan2.1-1.3b-speedcontrol-v1.py)| -|[krea/krea-realtime-video](https://www.modelscope.cn/models/krea/krea-realtime-video)||[code](./examples/wanvideo/model_inference/krea-realtime-video.py)|[code](./examples/wanvideo/model_training/full/krea-realtime-video.sh)|[code](./examples/wanvideo/model_training/validate_full/krea-realtime-video.py)|[code](./examples/wanvideo/model_training/lora/krea-realtime-video.sh)|[code](./examples/wanvideo/model_training/validate_lora/krea-realtime-video.py)| -|[meituan-longcat/LongCat-Video](https://www.modelscope.cn/models/meituan-longcat/LongCat-Video)|`longcat_video`|[code](./examples/wanvideo/model_inference/LongCat-Video.py)|[code](./examples/wanvideo/model_training/full/LongCat-Video.sh)|[code](./examples/wanvideo/model_training/validate_full/LongCat-Video.py)|[code](./examples/wanvideo/model_training/lora/LongCat-Video.sh)|[code](./examples/wanvideo/model_training/validate_lora/LongCat-Video.py)| - -
- - - -### 更多模型 - - - -
-图像生成模型 - -详细页面:[./examples/image_synthesis/](./examples/image_synthesis/) - -|FLUX|Stable Diffusion 3| -|-|-| -|![image_1024_cfg](https://github.com/user-attachments/assets/984561e9-553d-4952-9443-79ce144f379f)|![image_1024](https://github.com/modelscope/DiffSynth-Studio/assets/35051019/4df346db-6f91-420a-b4c1-26e205376098)| - -|Kolors|Hunyuan-DiT| -|-|-| -|![image_1024](https://github.com/modelscope/DiffSynth-Studio/assets/35051019/53ef6f41-da11-4701-8665-9f64392607bf)|![image_1024](https://github.com/modelscope/DiffSynth-Studio/assets/35051019/60b022c8-df3f-4541-95ab-bf39f2fa8bb5)| - -|Stable Diffusion|Stable Diffusion XL| -|-|-| -|![1024](https://github.com/Artiprocher/DiffSynth-Studio/assets/35051019/6fc84611-8da6-4a1f-8fee-9a34eba3b4a5)|![1024](https://github.com/Artiprocher/DiffSynth-Studio/assets/35051019/67687748-e738-438c-aee5-96096f09ac90)| - -
- - - -
-视频生成模型 - -- HunyuanVideo:[./examples/HunyuanVideo/](./examples/HunyuanVideo/) - -https://github.com/user-attachments/assets/48dd24bb-0cc6-40d2-88c3-10feed3267e9 - -- StepVideo:[./examples/stepvideo/](./examples/stepvideo/) - -https://github.com/user-attachments/assets/5954fdaa-a3cf-45a3-bd35-886e3cc4581b - -- CogVideoX:[./examples/CogVideoX/](./examples/CogVideoX/) - -https://github.com/user-attachments/assets/26b044c1-4a60-44a4-842f-627ff289d006 - -
- - - -
-图像质量评估模型 - -我们集成了一系列图像质量评估模型,这些模型可以用于图像生成模型的评测、对齐训练等场景中。 - -详细页面:[./examples/image_quality_metric/](./examples/image_quality_metric/) - -* [ImageReward](https://github.com/THUDM/ImageReward) -* [Aesthetic](https://github.com/christophschuhmann/improved-aesthetic-predictor) -* [PickScore](https://github.com/yuvalkirstain/pickscore) -* [CLIP](https://github.com/openai/CLIP) -* [HPSv2](https://github.com/tgxs002/HPSv2) -* [HPSv2.1](https://github.com/tgxs002/HPSv2) -* [MPS](https://github.com/Kwai-Kolors/MPS) - -
- - - -## 创新成果 - -DiffSynth-Studio 不仅仅是一个工程化的模型框架,更是创新成果的孵化器。 - -
-Nexus-Gen: 统一架构的图像理解、生成、编辑 - -- 详细页面:https://github.com/modelscope/Nexus-Gen -- 论文:[Nexus-Gen: Unified Image Understanding, Generation, and Editing via Prefilled Autoregression in Shared Embedding Space](https://arxiv.org/pdf/2504.21356) -- 模型:[ModelScope](https://www.modelscope.cn/models/DiffSynth-Studio/Nexus-GenV2), [HuggingFace](https://huggingface.co/modelscope/Nexus-GenV2) -- 数据集:[ModelScope Dataset](https://www.modelscope.cn/datasets/DiffSynth-Studio/Nexus-Gen-Training-Dataset) -- 在线体验:[ModelScope Nexus-Gen Studio](https://www.modelscope.cn/studios/DiffSynth-Studio/Nexus-Gen) - -![](https://github.com/modelscope/Nexus-Gen/raw/main/assets/illustrations/gen_edit.jpg) - -
- - - -
-ArtAug: 图像生成模型的美学提升 - -- 详细页面:[./examples/ArtAug/](./examples/ArtAug/) -- 论文:[ArtAug: Enhancing Text-to-Image Generation through Synthesis-Understanding Interaction](https://arxiv.org/abs/2412.12888) -- 模型:[ModelScope](https://www.modelscope.cn/models/DiffSynth-Studio/ArtAug-lora-FLUX.1dev-v1), [HuggingFace](https://huggingface.co/ECNU-CILab/ArtAug-lora-FLUX.1dev-v1) -- 在线体验:[ModelScope AIGC Tab](https://www.modelscope.cn/aigc/imageGeneration?tab=advanced&versionId=7228&modelType=LoRA&sdVersion=FLUX_1&modelUrl=modelscope%3A%2F%2FDiffSynth-Studio%2FArtAug-lora-FLUX.1dev-v1%3Frevision%3Dv1.0) - -|FLUX.1-dev|FLUX.1-dev + ArtAug LoRA| -|-|-| -|![image_1_base](https://github.com/user-attachments/assets/e1d5c505-b423-45fe-be01-25c2758f5417)|![image_1_enhance](https://github.com/user-attachments/assets/335908e3-d0bd-41c2-9d99-d10528a2d719)| - -
- - - -
- -EliGen: 精准的图像分区控制 - -- 详细页面:[./examples/EntityControl/](./examples/EntityControl/) -- 论文:[EliGen: Entity-Level Controlled Image Generation with Regional Attention](https://arxiv.org/abs/2501.01097) -- 模型:[ModelScope](https://www.modelscope.cn/models/DiffSynth-Studio/Eligen), [HuggingFace](https://huggingface.co/modelscope/EliGen) -- 在线体验:[ModelScope EliGen Studio](https://www.modelscope.cn/studios/DiffSynth-Studio/EliGen) -- 数据集:[EliGen Train Set](https://www.modelscope.cn/datasets/DiffSynth-Studio/EliGenTrainSet) - -|实体控制区域|生成图像| -|-|-| -|![eligen_example_2_mask_0](https://github.com/user-attachments/assets/1c6d9445-5022-4d91-ad2e-dc05321883d1)|![eligen_example_2_0](https://github.com/user-attachments/assets/86739945-cb07-4a49-b3b3-3bb65c90d14f)| - -
- - - -
- -ExVideo: 视频生成模型的扩展训练 - -- 项目页面:[Project Page](https://ecnu-cilab.github.io/ExVideoProjectPage/) -- 论文:[ExVideo: Extending Video Diffusion Models via Parameter-Efficient Post-Tuning](https://arxiv.org/abs/2406.14130) -- 代码样例:[./examples/ExVideo/](./examples/ExVideo/) -- 模型:[ModelScope](https://modelscope.cn/models/ECNU-CILab/ExVideo-SVD-128f-v1), [HuggingFace](https://huggingface.co/ECNU-CILab/ExVideo-SVD-128f-v1) - -https://github.com/modelscope/DiffSynth-Studio/assets/35051019/d97f6aa9-8064-4b5b-9d49-ed6001bb9acc - -
- - - -
- -Diffutoon: 高分辨率动漫风格视频渲染 - -- 项目页面:[Project Page](https://ecnu-cilab.github.io/DiffutoonProjectPage/) -- 论文:[Diffutoon: High-Resolution Editable Toon Shading via Diffusion Models](https://arxiv.org/abs/2401.16224) -- 代码样例:[./examples/Diffutoon/](./examples/Diffutoon/) - -https://github.com/Artiprocher/DiffSynth-Studio/assets/35051019/b54c05c5-d747-4709-be5e-b39af82404dd - -
- - - -
- -DiffSynth: 本项目的初代版本 - -- 项目页面:[Project Page](https://ecnu-cilab.github.io/DiffSynth.github.io/) -- 论文:[DiffSynth: Latent In-Iteration Deflickering for Realistic Video Synthesis](https://arxiv.org/abs/2308.03463) -- 代码样例:[./examples/diffsynth/](./examples/diffsynth/) - -https://github.com/Artiprocher/DiffSynth-Studio/assets/35051019/59fb2f7b-8de0-4481-b79f-0c3a7361a1ea - -
- - - -## 更新历史 - -- **2025年10月30日** 支持了 [meituan-longcat/LongCat-Video](https://www.modelscope.cn/models/meituan-longcat/LongCat-Video) 模型,该模型支持文生视频、图生视频、视频续写。这个模型在本项目中沿用 Wan 的框架进行推理和训练。 - -- **2025年10月27日** 支持了 [krea/krea-realtime-video](https://www.modelscope.cn/models/krea/krea-realtime-video) 模型,Wan 模型生态再添一员。 - -- **2025年9月23日** [DiffSynth-Studio/Qwen-Image-EliGen-Poster](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-EliGen-Poster) 发布!本模型由我们与淘天体验设计团队联合研发并开源。模型基于 Qwen-Image 构建,专为电商海报场景设计,支持精确的分区布局控制。 请参考[我们的示例代码](./examples/qwen_image/model_inference/Qwen-Image-EliGen-Poster.py)。 - -- **2025年9月9日** 我们的训练框架支持了多种训练模式,目前已适配 Qwen-Image,除标准 SFT 训练模式外,已支持 Direct Distill,请参考[我们的示例代码](./examples/qwen_image/model_training/lora/Qwen-Image-Distill-LoRA.sh)。这项功能是实验性的,我们将会继续完善已支持更全面的模型训练功能。 - -- **2025年8月28日** 我们支持了Wan2.2-S2V,一个音频驱动的电影级视频生成模型。请参见[./examples/wanvideo/](./examples/wanvideo/)。 - -- **2025年8月21日** [DiffSynth-Studio/Qwen-Image-EliGen-V2](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-EliGen-V2) 发布!相比于 V1 版本,训练数据集变为 [Qwen-Image-Self-Generated-Dataset](https://www.modelscope.cn/datasets/DiffSynth-Studio/Qwen-Image-Self-Generated-Dataset),因此,生成的图像更符合 Qwen-Image 本身的图像分布和风格。 请参考[我们的示例代码](./examples/qwen_image/model_inference_low_vram/Qwen-Image-EliGen-V2.py)。 - -- **2025年8月21日** 我们开源了 [DiffSynth-Studio/Qwen-Image-In-Context-Control-Union](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-In-Context-Control-Union) 结构控制 LoRA 模型,采用 In Context 的技术路线,支持多种类别的结构控制条件,包括 canny, depth, lineart, softedge, normal, openpose。 请参考[我们的示例代码](./examples/qwen_image/model_inference/Qwen-Image-In-Context-Control-Union.py)。 - -- **2025年8月20日** 我们开源了 [DiffSynth-Studio/Qwen-Image-Edit-Lowres-Fix](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Edit-Lowres-Fix) 模型,提升了 Qwen-Image-Edit 对低分辨率图像输入的编辑效果。请参考[我们的示例代码](./examples/qwen_image/model_inference/Qwen-Image-Edit-Lowres-Fix.py) - -- **2025年8月19日** 🔥 Qwen-Image-Edit 开源,欢迎图像编辑模型新成员! - -- **2025年8月18日** 我们训练并开源了 Qwen-Image 的图像重绘 ControlNet 模型 [DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Inpaint](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Inpaint),模型结构采用了轻量化的设计,请参考[我们的示例代码](./examples/qwen_image/model_inference/Qwen-Image-Blockwise-ControlNet-Inpaint.py)。 - -- **2025年8月15日** 我们开源了 [Qwen-Image-Self-Generated-Dataset](https://www.modelscope.cn/datasets/DiffSynth-Studio/Qwen-Image-Self-Generated-Dataset) 数据集。这是一个使用 Qwen-Image 模型生成的图像数据集,共包含 160,000 张`1024 x 1024`图像。它包括通用、英文文本渲染和中文文本渲染子集。我们为每张图像提供了图像描述、实体和结构控制图像的标注。开发者可以使用这个数据集来训练 Qwen-Image 模型的 ControlNet 和 EliGen 等模型,我们旨在通过开源推动技术发展! - -- **2025年8月13日** 我们训练并开源了 Qwen-Image 的 ControlNet 模型 [DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Depth](https://modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Depth),模型结构采用了轻量化的设计,请参考[我们的示例代码](./examples/qwen_image/model_inference/Qwen-Image-Blockwise-ControlNet-Depth.py)。 - -- **2025年8月12日** 我们训练并开源了 Qwen-Image 的 ControlNet 模型 [DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Canny](https://modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Blockwise-ControlNet-Canny),模型结构采用了轻量化的设计,请参考[我们的示例代码](./examples/qwen_image/model_inference/Qwen-Image-Blockwise-ControlNet-Canny.py)。 - -- **2025年8月11日** 我们开源了 Qwen-Image 的蒸馏加速模型 [DiffSynth-Studio/Qwen-Image-Distill-LoRA](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Distill-LoRA),沿用了与 [DiffSynth-Studio/Qwen-Image-Distill-Full](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Distill-Full) 相同的训练流程,但模型结构修改为了 LoRA,因此能够更好地与其他开源生态模型兼容。 - -- **2025年8月7日** 我们开源了 Qwen-Image 的实体控制 LoRA 模型 [DiffSynth-Studio/Qwen-Image-EliGen](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-EliGen)。Qwen-Image-EliGen 能够实现实体级可控的文生图。技术细节请参见[论文](https://arxiv.org/abs/2501.01097)。训练数据集:[EliGenTrainSet](https://www.modelscope.cn/datasets/DiffSynth-Studio/EliGenTrainSet)。 - -- **2025年8月5日** 我们开源了 Qwen-Image 的蒸馏加速模型 [DiffSynth-Studio/Qwen-Image-Distill-Full](https://www.modelscope.cn/models/DiffSynth-Studio/Qwen-Image-Distill-Full),实现了约 5 倍加速。 - -- **2025年8月4日** 🔥 Qwen-Image 开源,欢迎图像生成模型家族新成员! - -- **2025年8月1日** [FLUX.1-Krea-dev](https://www.modelscope.cn/models/black-forest-labs/FLUX.1-Krea-dev) 开源,这是一个专注于美学摄影的文生图模型。我们第一时间提供了全方位支持,包括低显存逐层 offload、LoRA 训练、全量训练。详细信息请参考 [./examples/flux/](./examples/flux/)。 - -- **2025年7月28日** Wan 2.2 开源,我们第一时间提供了全方位支持,包括低显存逐层 offload、FP8 量化、序列并行、LoRA 训练、全量训练。详细信息请参考 [./examples/wanvideo/](./examples/wanvideo/)。 - -- **2025年7月11日** 我们提出 Nexus-Gen,一个将大语言模型(LLM)的语言推理能力与扩散模型的图像生成能力相结合的统一框架。该框架支持无缝的图像理解、生成和编辑任务。 - - 论文: [Nexus-Gen: Unified Image Understanding, Generation, and Editing via Prefilled Autoregression in Shared Embedding Space](https://arxiv.org/pdf/2504.21356) - - Github 仓库: https://github.com/modelscope/Nexus-Gen - - 模型: [ModelScope](https://www.modelscope.cn/models/DiffSynth-Studio/Nexus-GenV2), [HuggingFace](https://huggingface.co/modelscope/Nexus-GenV2) - - 训练数据集: [ModelScope Dataset](https://www.modelscope.cn/datasets/DiffSynth-Studio/Nexus-Gen-Training-Dataset) - - 在线体验: [ModelScope Nexus-Gen Studio](https://www.modelscope.cn/studios/DiffSynth-Studio/Nexus-Gen) - -
-更多 - -- **2025年6月15日** ModelScope 官方评测框架 [EvalScope](https://github.com/modelscope/evalscope) 现已支持文生图生成评测。请参考[最佳实践](https://evalscope.readthedocs.io/zh-cn/latest/best_practice/t2i_eval.html)指南进行尝试。 - -- **2025年3月25日** 我们的新开源项目 [DiffSynth-Engine](https://github.com/modelscope/DiffSynth-Engine) 现已开源!专注于稳定的模型部署,面向工业界,提供更好的工程支持、更高的计算性能和更稳定的功能。 - -- **2025年3月31日** 我们支持 InfiniteYou,一种用于 FLUX 的人脸特征保留方法。更多细节请参考 [./examples/InfiniteYou/](./examples/InfiniteYou/)。 - -- **2025年3月13日** 我们支持 HunyuanVideo-I2V,即腾讯开源的 HunyuanVideo 的图像到视频生成版本。更多细节请参考 [./examples/HunyuanVideo/](./examples/HunyuanVideo/)。 - -- **2025年2月25日** 我们支持 Wan-Video,这是阿里巴巴开源的一系列最先进的视频合成模型。详见 [./examples/wanvideo/](./examples/wanvideo/)。 - -- **2025年2月17日** 我们支持 [StepVideo](https://modelscope.cn/models/stepfun-ai/stepvideo-t2v/summary)!先进的视频合成模型!详见 [./examples/stepvideo](./examples/stepvideo/)。 - -- **2024年12月31日** 我们提出 EliGen,一种用于精确实体级别控制的文本到图像生成的新框架,并辅以修复融合管道,将其能力扩展到图像修复任务。EliGen 可以无缝集成现有的社区模型,如 IP-Adapter 和 In-Context LoRA,提升其通用性。更多详情,请见 [./examples/EntityControl](./examples/EntityControl/)。 - - 论文: [EliGen: Entity-Level Controlled Image Generation with Regional Attention](https://arxiv.org/abs/2501.01097) - - 模型: [ModelScope](https://www.modelscope.cn/models/DiffSynth-Studio/Eligen), [HuggingFace](https://huggingface.co/modelscope/EliGen) - - 在线体验: [ModelScope EliGen Studio](https://www.modelscope.cn/studios/DiffSynth-Studio/EliGen) - - 训练数据集: [EliGen Train Set](https://www.modelscope.cn/datasets/DiffSynth-Studio/EliGenTrainSet) - -- **2024年12月19日** 我们为 HunyuanVideo 实现了高级显存管理,使得在 24GB 显存下可以生成分辨率为 129x720x1280 的视频,或在仅 6GB 显存下生成分辨率为 129x512x384 的视频。更多细节请参考 [./examples/HunyuanVideo/](./examples/HunyuanVideo/)。 - -- **2024年12月18日** 我们提出 ArtAug,一种通过合成-理解交互来改进文生图模型的方法。我们以 LoRA 格式为 FLUX.1-dev 训练了一个 ArtAug 增强模块。该模型将 Qwen2-VL-72B 的美学理解融入 FLUX.1-dev,从而提升了生成图像的质量。 - - 论文: https://arxiv.org/abs/2412.12888 - - 示例: https://github.com/modelscope/DiffSynth-Studio/tree/main/examples/ArtAug - - 模型: [ModelScope](https://www.modelscope.cn/models/DiffSynth-Studio/ArtAug-lora-FLUX.1dev-v1), [HuggingFace](https://huggingface.co/ECNU-CILab/ArtAug-lora-FLUX.1dev-v1) - - 演示: [ModelScope](https://modelscope.cn/aigc/imageGeneration?tab=advanced&versionId=7228&modelType=LoRA&sdVersion=FLUX_1&modelUrl=modelscope%3A%2F%2FDiffSynth-Studio%2FArtAug-lora-FLUX.1dev-v1%3Frevision%3Dv1.0), HuggingFace (即将上线) - -- **2024年10月25日** 我们提供了广泛的 FLUX ControlNet 支持。该项目支持许多不同的 ControlNet 模型,并且可以自由组合,即使它们的结构不同。此外,ControlNet 模型兼容高分辨率优化和分区控制技术,能够实现非常强大的可控图像生成。详见 [`./examples/ControlNet/`](./examples/ControlNet/)。 - -- **2024年10月8日** 我们发布了基于 CogVideoX-5B 和 ExVideo 的扩展 LoRA。您可以从 [ModelScope](https://modelscope.cn/models/ECNU-CILab/ExVideo-CogVideoX-LoRA-129f-v1) 或 [HuggingFace](https://huggingface.co/ECNU-CILab/ExVideo-CogVideoX-LoRA-129f-v1) 下载此模型。 - -- **2024年8月22日** 本项目现已支持 CogVideoX-5B。详见 [此处](/examples/video_synthesis/)。我们为这个文生视频模型提供了几个有趣的功能,包括: - - 文本到视频 - - 视频编辑 - - 自我超分 - - 视频插帧 - -- **2024年8月22日** 我们实现了一个有趣的画笔功能,支持所有文生图模型。现在,您可以在 AI 的辅助下使用画笔创作惊艳的图像了! - - 在我们的 [WebUI](#usage-in-webui) 中使用它。 - -- **2024年8月21日** DiffSynth-Studio 现已支持 FLUX。 - - 启用 CFG 和高分辨率修复以提升视觉质量。详见 [此处](/examples/image_synthesis/README.md) - - LoRA、ControlNet 和其他附加模型将很快推出。 - -- **2024年6月21日** 我们提出 ExVideo,一种旨在增强视频生成模型能力的后训练微调技术。我们将 Stable Video Diffusion 进行了扩展,实现了长达 128 帧的长视频生成。 - - [项目页面](https://ecnu-cilab.github.io/ExVideoProjectPage/) - - 源代码已在此仓库中发布。详见 [`examples/ExVideo`](./examples/ExVideo/)。 - - 模型已发布于 [HuggingFace](https://huggingface.co/ECNU-CILab/ExVideo-SVD-128f-v1) 和 [ModelScope](https://modelscope.cn/models/ECNU-CILab/ExVideo-SVD-128f-v1)。 - - 技术报告已发布于 [arXiv](https://arxiv.org/abs/2406.14130)。 - - 您可以在此 [演示](https://huggingface.co/spaces/modelscope/ExVideo-SVD-128f-v1) 中试用 ExVideo! - -- **2024年6月13日** DiffSynth Studio 已迁移至 ModelScope。开发团队也从“我”转变为“我们”。当然,我仍会参与后续的开发和维护工作。 - -- **2024年1月29日** 我们提出 Diffutoon,这是一个出色的卡通着色解决方案。 - - [项目页面](https://ecnu-cilab.github.io/DiffutoonProjectPage/) - - 源代码已在此项目中发布。 - - 技术报告(IJCAI 2024)已发布于 [arXiv](https://arxiv.org/abs/2401.16224)。 - -- **2023年12月8日** 我们决定启动一个新项目,旨在释放扩散模型的潜力,尤其是在视频合成方面。该项目的开发工作正式开始。 - -- **2023年11月15日** 我们提出 FastBlend,一种强大的视频去闪烁算法。 - - sd-webui 扩展已发布于 [GitHub](https://github.com/Artiprocher/sd-webui-fastblend)。 - - 演示视频已在 Bilibili 上展示,包含三个任务: - - [视频去闪烁](https://www.bilibili.com/video/BV1d94y1W7PE) - - [视频插帧](https://www.bilibili.com/video/BV1Lw411m71p) - - [图像驱动的视频渲染](https://www.bilibili.com/video/BV1RB4y1Z7LF) - - 技术报告已发布于 [arXiv](https://arxiv.org/abs/2311.09265)。 - - 其他用户开发的非官方 ComfyUI 扩展已发布于 [GitHub](https://github.com/AInseven/ComfyUI-fastblend)。 - -- **2023年10月1日** 我们发布了该项目的早期版本,名为 FastSDXL。这是构建一个扩散引擎的初步尝试。 - - 源代码已发布于 [GitHub](https://github.com/Artiprocher/FastSDXL)。 - - FastSDXL 包含一个可训练的 OLSS 调度器,以提高效率。 - - OLSS 的原始仓库位于 [此处](https://github.com/alibaba/EasyNLP/tree/master/diffusion/olss_scheduler)。 - - 技术报告(CIKM 2023)已发布于 [arXiv](https://arxiv.org/abs/2305.14677)。 - - 演示视频已发布于 [Bilibili](https://www.bilibili.com/video/BV1w8411y7uj)。 - - 由于 OLSS 需要额外训练,我们未在本项目中实现它。 - -- **2023年8月29日** 我们提出 DiffSynth,一个视频合成框架。 - - [项目页面](https://ecnu-cilab.github.io/DiffSynth.github.io/)。 - - 源代码已发布在 [EasyNLP](https://github.com/alibaba/EasyNLP/tree/master/diffusion/DiffSynth)。 - - 技术报告(ECML PKDD 2024)已发布于 [arXiv](https://arxiv.org/abs/2308.03463)。 - -
diff --git a/apps/gradio/DiffSynth_Studio.py b/apps/gradio/DiffSynth_Studio.py deleted file mode 100644 index d265492..0000000 --- a/apps/gradio/DiffSynth_Studio.py +++ /dev/null @@ -1,252 +0,0 @@ -import gradio as gr -from diffsynth import ModelManager, SDImagePipeline, SDXLImagePipeline, SD3ImagePipeline, HunyuanDiTImagePipeline, FluxImagePipeline -import os, torch -from PIL import Image -import numpy as np - - -config = { - "model_config": { - "Stable Diffusion": { - "model_folder": "models/stable_diffusion", - "pipeline_class": SDImagePipeline, - "default_parameters": { - "cfg_scale": 7.0, - "height": 512, - "width": 512, - } - }, - "Stable Diffusion XL": { - "model_folder": "models/stable_diffusion_xl", - "pipeline_class": SDXLImagePipeline, - "default_parameters": { - "cfg_scale": 7.0, - } - }, - "Stable Diffusion 3": { - "model_folder": "models/stable_diffusion_3", - "pipeline_class": SD3ImagePipeline, - "default_parameters": { - "cfg_scale": 7.0, - } - }, - "Stable Diffusion XL Turbo": { - "model_folder": "models/stable_diffusion_xl_turbo", - "pipeline_class": SDXLImagePipeline, - "default_parameters": { - "negative_prompt": "", - "cfg_scale": 1.0, - "num_inference_steps": 1, - "height": 512, - "width": 512, - } - }, - "Kolors": { - "model_folder": "models/kolors", - "pipeline_class": SDXLImagePipeline, - "default_parameters": { - "cfg_scale": 7.0, - } - }, - "HunyuanDiT": { - "model_folder": "models/HunyuanDiT", - "pipeline_class": HunyuanDiTImagePipeline, - "default_parameters": { - "cfg_scale": 7.0, - } - }, - "FLUX": { - "model_folder": "models/FLUX", - "pipeline_class": FluxImagePipeline, - "default_parameters": { - "cfg_scale": 1.0, - } - } - }, - "max_num_painter_layers": 8, - "max_num_model_cache": 1, -} - - -def load_model_list(model_type): - if model_type is None: - return [] - folder = config["model_config"][model_type]["model_folder"] - file_list = [i for i in os.listdir(folder) if i.endswith(".safetensors")] - if model_type in ["HunyuanDiT", "Kolors", "FLUX"]: - file_list += [i for i in os.listdir(folder) if os.path.isdir(os.path.join(folder, i))] - file_list = sorted(file_list) - return file_list - - -def load_model(model_type, model_path): - global model_dict - model_key = f"{model_type}:{model_path}" - if model_key in model_dict: - return model_dict[model_key] - model_path = os.path.join(config["model_config"][model_type]["model_folder"], model_path) - model_manager = ModelManager() - if model_type == "HunyuanDiT": - model_manager.load_models([ - os.path.join(model_path, "clip_text_encoder/pytorch_model.bin"), - os.path.join(model_path, "mt5/pytorch_model.bin"), - os.path.join(model_path, "model/pytorch_model_ema.pt"), - os.path.join(model_path, "sdxl-vae-fp16-fix/diffusion_pytorch_model.bin"), - ]) - elif model_type == "Kolors": - model_manager.load_models([ - os.path.join(model_path, "text_encoder"), - os.path.join(model_path, "unet/diffusion_pytorch_model.safetensors"), - os.path.join(model_path, "vae/diffusion_pytorch_model.safetensors"), - ]) - elif model_type == "FLUX": - model_manager.torch_dtype = torch.bfloat16 - file_list = [ - os.path.join(model_path, "text_encoder/model.safetensors"), - os.path.join(model_path, "text_encoder_2"), - ] - for file_name in os.listdir(model_path): - if file_name.endswith(".safetensors"): - file_list.append(os.path.join(model_path, file_name)) - model_manager.load_models(file_list) - else: - model_manager.load_model(model_path) - pipe = config["model_config"][model_type]["pipeline_class"].from_model_manager(model_manager) - while len(model_dict) + 1 > config["max_num_model_cache"]: - key = next(iter(model_dict.keys())) - model_manager_to_release, _ = model_dict[key] - model_manager_to_release.to("cpu") - del model_dict[key] - torch.cuda.empty_cache() - model_dict[model_key] = model_manager, pipe - return model_manager, pipe - - -model_dict = {} - -with gr.Blocks() as app: - gr.Markdown("# DiffSynth-Studio Painter") - with gr.Row(): - with gr.Column(scale=382, min_width=100): - - with gr.Accordion(label="Model"): - model_type = gr.Dropdown(choices=[i for i in config["model_config"]], label="Model type") - model_path = gr.Dropdown(choices=[], interactive=True, label="Model path") - - @gr.on(inputs=model_type, outputs=model_path, triggers=model_type.change) - def model_type_to_model_path(model_type): - return gr.Dropdown(choices=load_model_list(model_type)) - - with gr.Accordion(label="Prompt"): - prompt = gr.Textbox(label="Prompt", lines=3) - negative_prompt = gr.Textbox(label="Negative prompt", lines=1) - cfg_scale = gr.Slider(minimum=1.0, maximum=10.0, value=7.0, step=0.1, interactive=True, label="Classifier-free guidance scale") - embedded_guidance = gr.Slider(minimum=0.0, maximum=10.0, value=0.0, step=0.1, interactive=True, label="Embedded guidance scale (only for FLUX)") - - with gr.Accordion(label="Image"): - num_inference_steps = gr.Slider(minimum=1, maximum=100, value=20, step=1, interactive=True, label="Inference steps") - height = gr.Slider(minimum=64, maximum=2048, value=1024, step=64, interactive=True, label="Height") - width = gr.Slider(minimum=64, maximum=2048, value=1024, step=64, interactive=True, label="Width") - with gr.Column(): - use_fixed_seed = gr.Checkbox(value=True, interactive=False, label="Use fixed seed") - seed = gr.Number(minimum=0, maximum=10**9, value=0, interactive=True, label="Random seed", show_label=False) - - @gr.on( - inputs=[model_type, model_path, prompt, negative_prompt, cfg_scale, embedded_guidance, num_inference_steps, height, width], - outputs=[prompt, negative_prompt, cfg_scale, embedded_guidance, num_inference_steps, height, width], - triggers=model_path.change - ) - def model_path_to_default_params(model_type, model_path, prompt, negative_prompt, cfg_scale, embedded_guidance, num_inference_steps, height, width): - load_model(model_type, model_path) - cfg_scale = config["model_config"][model_type]["default_parameters"].get("cfg_scale", cfg_scale) - embedded_guidance = config["model_config"][model_type]["default_parameters"].get("embedded_guidance", embedded_guidance) - num_inference_steps = config["model_config"][model_type]["default_parameters"].get("num_inference_steps", num_inference_steps) - height = config["model_config"][model_type]["default_parameters"].get("height", height) - width = config["model_config"][model_type]["default_parameters"].get("width", width) - return prompt, negative_prompt, cfg_scale, embedded_guidance, num_inference_steps, height, width - - - with gr.Column(scale=618, min_width=100): - with gr.Accordion(label="Painter"): - enable_local_prompt_list = [] - local_prompt_list = [] - mask_scale_list = [] - canvas_list = [] - for painter_layer_id in range(config["max_num_painter_layers"]): - with gr.Tab(label=f"Layer {painter_layer_id}"): - enable_local_prompt = gr.Checkbox(label="Enable", value=False, key=f"enable_local_prompt_{painter_layer_id}") - local_prompt = gr.Textbox(label="Local prompt", key=f"local_prompt_{painter_layer_id}") - mask_scale = gr.Slider(minimum=0.0, maximum=5.0, value=1.0, step=0.1, interactive=True, label="Mask scale", key=f"mask_scale_{painter_layer_id}") - canvas = gr.ImageEditor(canvas_size=(512, 1), sources=None, layers=False, interactive=True, image_mode="RGBA", - brush=gr.Brush(default_size=100, default_color="#000000", colors=["#000000"]), - label="Painter", key=f"canvas_{painter_layer_id}") - @gr.on(inputs=[height, width, canvas], outputs=canvas, triggers=[height.change, width.change, canvas.clear, enable_local_prompt.change], show_progress="hidden") - def resize_canvas(height, width, canvas): - h, w = canvas["background"].shape[:2] - if h != height or width != w: - return np.ones((height, width, 3), dtype=np.uint8) * 255 - else: - return canvas - - enable_local_prompt_list.append(enable_local_prompt) - local_prompt_list.append(local_prompt) - mask_scale_list.append(mask_scale) - canvas_list.append(canvas) - with gr.Accordion(label="Results"): - run_button = gr.Button(value="Generate", variant="primary") - output_image = gr.Image(sources=None, show_label=False, interactive=False, type="pil") - with gr.Row(): - with gr.Column(): - output_to_painter_button = gr.Button(value="Set as painter's background") - with gr.Column(): - output_to_input_button = gr.Button(value="Set as input image") - painter_background = gr.State(None) - input_background = gr.State(None) - @gr.on( - inputs=[model_type, model_path, prompt, negative_prompt, cfg_scale, embedded_guidance, num_inference_steps, height, width, seed] + enable_local_prompt_list + local_prompt_list + mask_scale_list + canvas_list, - outputs=[output_image], - triggers=run_button.click - ) - def generate_image(model_type, model_path, prompt, negative_prompt, cfg_scale, embedded_guidance, num_inference_steps, height, width, seed, *args, progress=gr.Progress()): - _, pipe = load_model(model_type, model_path) - input_params = { - "prompt": prompt, - "negative_prompt": negative_prompt, - "cfg_scale": cfg_scale, - "num_inference_steps": num_inference_steps, - "height": height, - "width": width, - "progress_bar_cmd": progress.tqdm, - } - if isinstance(pipe, FluxImagePipeline): - input_params["embedded_guidance"] = embedded_guidance - enable_local_prompt_list, local_prompt_list, mask_scale_list, canvas_list = ( - args[0 * config["max_num_painter_layers"]: 1 * config["max_num_painter_layers"]], - args[1 * config["max_num_painter_layers"]: 2 * config["max_num_painter_layers"]], - args[2 * config["max_num_painter_layers"]: 3 * config["max_num_painter_layers"]], - args[3 * config["max_num_painter_layers"]: 4 * config["max_num_painter_layers"]] - ) - local_prompts, masks, mask_scales = [], [], [] - for enable_local_prompt, local_prompt, mask_scale, canvas in zip( - enable_local_prompt_list, local_prompt_list, mask_scale_list, canvas_list - ): - if enable_local_prompt: - local_prompts.append(local_prompt) - masks.append(Image.fromarray(canvas["layers"][0][:, :, -1]).convert("RGB")) - mask_scales.append(mask_scale) - input_params.update({ - "local_prompts": local_prompts, - "masks": masks, - "mask_scales": mask_scales, - }) - torch.manual_seed(seed) - image = pipe(**input_params) - return image - - @gr.on(inputs=[output_image] + canvas_list, outputs=canvas_list, triggers=output_to_painter_button.click) - def send_output_to_painter_background(output_image, *canvas_list): - for canvas in canvas_list: - h, w = canvas["background"].shape[:2] - canvas["background"] = output_image.resize((w, h)) - return tuple(canvas_list) -app.launch() diff --git a/apps/gradio/entity_level_control.py b/apps/gradio/entity_level_control.py deleted file mode 100644 index 58f4722..0000000 --- a/apps/gradio/entity_level_control.py +++ /dev/null @@ -1,390 +0,0 @@ -import os -import torch -import numpy as np -from PIL import Image, ImageDraw, ImageFont -import random -import json -import gradio as gr -from diffsynth import ModelManager, FluxImagePipeline, download_customized_models -from modelscope import dataset_snapshot_download - - -dataset_snapshot_download(dataset_id="DiffSynth-Studio/examples_in_diffsynth", local_dir="./", allow_file_pattern=f"data/examples/eligen/entity_control/*") -example_json = 'data/examples/eligen/entity_control/ui_examples.json' -with open(example_json, 'r') as f: - examples = json.load(f)['examples'] - -for idx in range(len(examples)): - example_id = examples[idx]['example_id'] - entity_prompts = examples[idx]['local_prompt_list'] - examples[idx]['mask_lists'] = [Image.open(f"data/examples/eligen/entity_control/example_{example_id}/{i}.png").convert('RGB') for i in range(len(entity_prompts))] - -def create_canvas_data(background, masks): - if background.shape[-1] == 3: - background = np.dstack([background, np.full(background.shape[:2], 255, dtype=np.uint8)]) - layers = [] - for mask in masks: - if mask is not None: - mask_single_channel = mask if mask.ndim == 2 else mask[..., 0] - layer = np.zeros((mask_single_channel.shape[0], mask_single_channel.shape[1], 4), dtype=np.uint8) - layer[..., -1] = mask_single_channel - layers.append(layer) - else: - layers.append(np.zeros_like(background)) - - composite = background.copy() - for layer in layers: - if layer.size > 0: - composite = np.where(layer[..., -1:] > 0, layer, composite) - return { - "background": background, - "layers": layers, - "composite": composite, - } - -def load_example(load_example_button): - example_idx = int(load_example_button.split()[-1]) - 1 - example = examples[example_idx] - result = [ - 50, - example["global_prompt"], - example["negative_prompt"], - example["seed"], - *example["local_prompt_list"], - ] - num_entities = len(example["local_prompt_list"]) - result += [""] * (config["max_num_painter_layers"] - num_entities) - masks = [] - for mask in example["mask_lists"]: - mask_single_channel = np.array(mask.convert("L")) - masks.append(mask_single_channel) - for _ in range(config["max_num_painter_layers"] - len(masks)): - blank_mask = np.zeros_like(masks[0]) if masks else np.zeros((512, 512), dtype=np.uint8) - masks.append(blank_mask) - background = np.ones((masks[0].shape[0], masks[0].shape[1], 4), dtype=np.uint8) * 255 - canvas_data_list = [] - for mask in masks: - canvas_data = create_canvas_data(background, [mask]) - canvas_data_list.append(canvas_data) - result.extend(canvas_data_list) - return result - -def save_mask_prompts(masks, mask_prompts, global_prompt, seed=0, random_dir='0000000'): - save_dir = os.path.join('workdirs/tmp_mask', random_dir) - print(f'save to {save_dir}') - os.makedirs(save_dir, exist_ok=True) - for i, mask in enumerate(masks): - save_path = os.path.join(save_dir, f'{i}.png') - mask.save(save_path) - sample = { - "global_prompt": global_prompt, - "mask_prompts": mask_prompts, - "seed": seed, - } - with open(os.path.join(save_dir, f"prompts.json"), 'w') as f: - json.dump(sample, f, indent=4) - -def visualize_masks(image, masks, mask_prompts, font_size=35, use_random_colors=False): - # Create a blank image for overlays - overlay = Image.new('RGBA', image.size, (0, 0, 0, 0)) - colors = [ - (165, 238, 173, 80), - (76, 102, 221, 80), - (221, 160, 77, 80), - (204, 93, 71, 80), - (145, 187, 149, 80), - (134, 141, 172, 80), - (157, 137, 109, 80), - (153, 104, 95, 80), - (165, 238, 173, 80), - (76, 102, 221, 80), - (221, 160, 77, 80), - (204, 93, 71, 80), - (145, 187, 149, 80), - (134, 141, 172, 80), - (157, 137, 109, 80), - (153, 104, 95, 80), - ] - # Generate random colors for each mask - if use_random_colors: - colors = [(random.randint(0, 255), random.randint(0, 255), random.randint(0, 255), 80) for _ in range(len(masks))] - # Font settings - try: - font = ImageFont.truetype("arial", font_size) # Adjust as needed - except IOError: - font = ImageFont.load_default(font_size) - # Overlay each mask onto the overlay image - for mask, mask_prompt, color in zip(masks, mask_prompts, colors): - if mask is None: - continue - # Convert mask to RGBA mode - mask_rgba = mask.convert('RGBA') - mask_data = mask_rgba.getdata() - new_data = [(color if item[:3] == (255, 255, 255) else (0, 0, 0, 0)) for item in mask_data] - mask_rgba.putdata(new_data) - # Draw the mask prompt text on the mask - draw = ImageDraw.Draw(mask_rgba) - mask_bbox = mask.getbbox() # Get the bounding box of the mask - if mask_bbox is None: - continue - text_position = (mask_bbox[0] + 10, mask_bbox[1] + 10) # Adjust text position based on mask position - draw.text(text_position, mask_prompt, fill=(255, 255, 255, 255), font=font) - # Alpha composite the overlay with this mask - overlay = Image.alpha_composite(overlay, mask_rgba) - # Composite the overlay onto the original image - result = Image.alpha_composite(image.convert('RGBA'), overlay) - return result - -config = { - "model_config": { - "FLUX": { - "model_folder": "models/FLUX", - "pipeline_class": FluxImagePipeline, - "default_parameters": { - "cfg_scale": 3.0, - "embedded_guidance": 3.5, - "num_inference_steps": 30, - } - }, - }, - "max_num_painter_layers": 8, - "max_num_model_cache": 1, -} - -model_dict = {} - -def load_model(model_type='FLUX', model_path='FLUX.1-dev'): - global model_dict - model_key = f"{model_type}:{model_path}" - if model_key in model_dict: - return model_dict[model_key] - model_path = os.path.join(config["model_config"][model_type]["model_folder"], model_path) - model_manager = ModelManager(torch_dtype=torch.bfloat16, device="cuda", model_id_list=["FLUX.1-dev"]) - model_manager.load_lora( - download_customized_models( - model_id="DiffSynth-Studio/Eligen", - origin_file_path="model_bf16.safetensors", - local_dir="models/lora/entity_control", - ), - lora_alpha=1, - ) - pipe = config["model_config"][model_type]["pipeline_class"].from_model_manager(model_manager) - model_dict[model_key] = model_manager, pipe - return model_manager, pipe - - -with gr.Blocks() as app: - gr.Markdown( - """## EliGen: Entity-Level Controllable Text-to-Image Model - 1. On the left, input the **global prompt** for the overall image, such as "a person stands by the river." - 2. On the right, input the **local prompt** for each entity, such as "person," and draw the corresponding mask in the **Entity Mask Painter**. Generally, solid rectangular masks yield better results. - 3. Click the **Generate** button to create the image. By selecting different **random seeds**, you can generate diverse images. - 4. **You can directly click the "Load Example" button on any sample at the bottom to load example inputs.** - """ - ) - - loading_status = gr.Textbox(label="Loading Model...", value="Loading model... Please wait...", visible=True) - main_interface = gr.Column(visible=False) - - def initialize_model(): - try: - load_model() - return { - loading_status: gr.update(value="Model loaded successfully!", visible=False), - main_interface: gr.update(visible=True), - } - except Exception as e: - print(f'Failed to load model with error: {e}') - return { - loading_status: gr.update(value=f"Failed to load model: {str(e)}", visible=True), - main_interface: gr.update(visible=True), - } - - app.load(initialize_model, inputs=None, outputs=[loading_status, main_interface]) - - with main_interface: - with gr.Row(): - local_prompt_list = [] - canvas_list = [] - random_mask_dir = gr.State(f'{random.randint(0, 1000000):08d}') - with gr.Column(scale=382, min_width=100): - model_type = gr.State('FLUX') - model_path = gr.State('FLUX.1-dev') - with gr.Accordion(label="Global prompt"): - prompt = gr.Textbox(label="Global Prompt", lines=3) - negative_prompt = gr.Textbox(label="Negative prompt", value="worst quality, low quality, monochrome, zombie, interlocked fingers, Aissist, cleavage, nsfw, blur,", lines=3) - with gr.Accordion(label="Inference Options", open=True): - seed = gr.Number(minimum=0, maximum=10**9, value=42, interactive=True, label="Random seed", show_label=True) - num_inference_steps = gr.Slider(minimum=1, maximum=100, value=30, step=1, interactive=True, label="Inference steps") - cfg_scale = gr.Slider(minimum=2.0, maximum=10.0, value=3.0, step=0.1, interactive=True, label="Classifier-free guidance scale") - embedded_guidance = gr.Slider(minimum=0.0, maximum=10.0, value=3.5, step=0.1, interactive=True, label="Embedded guidance scale") - height = gr.Slider(minimum=64, maximum=2048, value=1024, step=64, interactive=True, label="Height") - width = gr.Slider(minimum=64, maximum=2048, value=1024, step=64, interactive=True, label="Width") - with gr.Accordion(label="Inpaint Input Image", open=False): - input_image = gr.Image(sources=None, show_label=False, interactive=True, type="pil") - background_weight = gr.Slider(minimum=0.0, maximum=1000., value=0., step=1, interactive=False, label="background_weight", visible=False) - - with gr.Column(): - reset_input_button = gr.Button(value="Reset Inpaint Input") - send_input_to_painter = gr.Button(value="Set as painter's background") - @gr.on(inputs=[input_image], outputs=[input_image], triggers=reset_input_button.click) - def reset_input_image(input_image): - return None - - with gr.Column(scale=618, min_width=100): - with gr.Accordion(label="Entity Painter"): - for painter_layer_id in range(config["max_num_painter_layers"]): - with gr.Tab(label=f"Entity {painter_layer_id}"): - local_prompt = gr.Textbox(label="Local prompt", key=f"local_prompt_{painter_layer_id}") - canvas = gr.ImageEditor( - canvas_size=(512, 512), - sources=None, - layers=False, - interactive=True, - image_mode="RGBA", - brush=gr.Brush( - default_size=50, - default_color="#000000", - colors=["#000000"], - ), - label="Entity Mask Painter", - key=f"canvas_{painter_layer_id}", - width=width, - height=height, - ) - @gr.on(inputs=[height, width, canvas], outputs=canvas, triggers=[height.change, width.change, canvas.clear], show_progress="hidden") - def resize_canvas(height, width, canvas): - h, w = canvas["background"].shape[:2] - if h != height or width != w: - return np.ones((height, width, 3), dtype=np.uint8) * 255 - else: - return canvas - local_prompt_list.append(local_prompt) - canvas_list.append(canvas) - with gr.Accordion(label="Results"): - run_button = gr.Button(value="Generate", variant="primary") - output_image = gr.Image(sources=None, show_label=False, interactive=False, type="pil") - with gr.Row(): - with gr.Column(): - output_to_painter_button = gr.Button(value="Set as painter's background") - with gr.Column(): - return_with_mask = gr.Checkbox(value=False, interactive=True, label="show result with mask painting") - output_to_input_button = gr.Button(value="Set as input image", visible=False, interactive=False) - real_output = gr.State(None) - mask_out = gr.State(None) - - @gr.on( - inputs=[model_type, model_path, prompt, negative_prompt, cfg_scale, embedded_guidance, num_inference_steps, height, width, return_with_mask, seed, input_image, background_weight, random_mask_dir] + local_prompt_list + canvas_list, - outputs=[output_image, real_output, mask_out], - triggers=run_button.click - ) - def generate_image(model_type, model_path, prompt, negative_prompt, cfg_scale, embedded_guidance, num_inference_steps, height, width, return_with_mask, seed, input_image, background_weight, random_mask_dir, *args, progress=gr.Progress()): - _, pipe = load_model(model_type, model_path) - input_params = { - "prompt": prompt, - "negative_prompt": negative_prompt, - "cfg_scale": cfg_scale, - "num_inference_steps": num_inference_steps, - "height": height, - "width": width, - "progress_bar_cmd": progress.tqdm, - } - if isinstance(pipe, FluxImagePipeline): - input_params["embedded_guidance"] = embedded_guidance - if input_image is not None: - input_params["input_image"] = input_image.resize((width, height)).convert("RGB") - input_params["enable_eligen_inpaint"] = True - - local_prompt_list, canvas_list = ( - args[0 * config["max_num_painter_layers"]: 1 * config["max_num_painter_layers"]], - args[1 * config["max_num_painter_layers"]: 2 * config["max_num_painter_layers"]], - ) - local_prompts, masks = [], [] - for local_prompt, canvas in zip(local_prompt_list, canvas_list): - if isinstance(local_prompt, str) and len(local_prompt) > 0: - local_prompts.append(local_prompt) - masks.append(Image.fromarray(canvas["layers"][0][:, :, -1]).convert("RGB")) - entity_masks = None if len(masks) == 0 else masks - entity_prompts = None if len(local_prompts) == 0 else local_prompts - input_params.update({ - "eligen_entity_prompts": entity_prompts, - "eligen_entity_masks": entity_masks, - }) - torch.manual_seed(seed) - # save_mask_prompts(masks, local_prompts, prompt, seed, random_mask_dir) - image = pipe(**input_params) - masks = [mask.resize(image.size) for mask in masks] - image_with_mask = visualize_masks(image, masks, local_prompts) - - real_output = gr.State(image) - mask_out = gr.State(image_with_mask) - - if return_with_mask: - return image_with_mask, real_output, mask_out - return image, real_output, mask_out - - @gr.on(inputs=[input_image] + canvas_list, outputs=canvas_list, triggers=send_input_to_painter.click) - def send_input_to_painter_background(input_image, *canvas_list): - if input_image is None: - return tuple(canvas_list) - for canvas in canvas_list: - h, w = canvas["background"].shape[:2] - canvas["background"] = input_image.resize((w, h)) - return tuple(canvas_list) - @gr.on(inputs=[real_output] + canvas_list, outputs=canvas_list, triggers=output_to_painter_button.click) - def send_output_to_painter_background(real_output, *canvas_list): - if real_output is None: - return tuple(canvas_list) - for canvas in canvas_list: - h, w = canvas["background"].shape[:2] - canvas["background"] = real_output.value.resize((w, h)) - return tuple(canvas_list) - @gr.on(inputs=[return_with_mask, real_output, mask_out], outputs=[output_image], triggers=[return_with_mask.change], show_progress="hidden") - def show_output(return_with_mask, real_output, mask_out): - if return_with_mask: - return mask_out.value - else: - return real_output.value - @gr.on(inputs=[real_output], outputs=[input_image], triggers=output_to_input_button.click) - def send_output_to_pipe_input(real_output): - return real_output.value - - with gr.Column(): - gr.Markdown("## Examples") - for i in range(0, len(examples), 2): - with gr.Row(): - if i < len(examples): - example = examples[i] - with gr.Column(): - example_image = gr.Image( - value=f"data/examples/eligen/entity_control/example_{example['example_id']}/example_image.png", - label=example["description"], - interactive=False, - width=1024, - height=512 - ) - load_example_button = gr.Button(value=f"Load Example {example['example_id']}") - load_example_button.click( - load_example, - inputs=[load_example_button], - outputs=[num_inference_steps, prompt, negative_prompt, seed] + local_prompt_list + canvas_list - ) - - if i + 1 < len(examples): - example = examples[i + 1] - with gr.Column(): - example_image = gr.Image( - value=f"data/examples/eligen/entity_control/example_{example['example_id']}/example_image.png", - label=example["description"], - interactive=False, - width=1024, - height=512 - ) - load_example_button = gr.Button(value=f"Load Example {example['example_id']}") - load_example_button.click( - load_example, - inputs=[load_example_button], - outputs=[num_inference_steps, prompt, negative_prompt, seed] + local_prompt_list + canvas_list - ) -app.config["show_progress"] = "hidden" -app.launch() diff --git a/apps/gradio/qwen_image_eligen.py b/apps/gradio/qwen_image_eligen.py deleted file mode 100644 index c224f01..0000000 --- a/apps/gradio/qwen_image_eligen.py +++ /dev/null @@ -1,382 +0,0 @@ -import os -import torch -import numpy as np -from PIL import Image, ImageDraw, ImageFont -import random -import json -import gradio as gr -from diffsynth.pipelines.qwen_image import QwenImagePipeline, ModelConfig -from modelscope import dataset_snapshot_download, snapshot_download - -# pip install pydantic==2.10.6 -# pip install gradio==5.4.0 - -snapshot_download("DiffSynth-Studio/Qwen-Image-EliGen", local_dir="models/DiffSynth-Studio/Qwen-Image-EliGen", allow_file_pattern="model.safetensors") - -dataset_snapshot_download(dataset_id="DiffSynth-Studio/examples_in_diffsynth", local_dir="./", allow_file_pattern=f"data/examples/eligen/qwen-image/*") -example_json = 'data/examples/eligen/qwen-image/ui_examples.json' -with open(example_json, 'r') as f: - examples = json.load(f)['examples'] - -for idx in range(len(examples)): - example_id = examples[idx]['example_id'] - entity_prompts = examples[idx]['local_prompt_list'] - examples[idx]['mask_lists'] = [Image.open(f"data/examples/eligen/qwen-image/example_{example_id}/{i}.png").convert('RGB') for i in range(len(entity_prompts))] - -def create_canvas_data(background, masks): - if background.shape[-1] == 3: - background = np.dstack([background, np.full(background.shape[:2], 255, dtype=np.uint8)]) - layers = [] - for mask in masks: - if mask is not None: - mask_single_channel = mask if mask.ndim == 2 else mask[..., 0] - layer = np.zeros((mask_single_channel.shape[0], mask_single_channel.shape[1], 4), dtype=np.uint8) - layer[..., -1] = mask_single_channel - layers.append(layer) - else: - layers.append(np.zeros_like(background)) - - composite = background.copy() - for layer in layers: - if layer.size > 0: - composite = np.where(layer[..., -1:] > 0, layer, composite) - return { - "background": background, - "layers": layers, - "composite": composite, - } - -def load_example(load_example_button): - example_idx = int(load_example_button.split()[-1]) - 1 - example = examples[example_idx] - result = [ - 50, - example["global_prompt"], - example["negative_prompt"], - example["seed"], - *example["local_prompt_list"], - ] - num_entities = len(example["local_prompt_list"]) - result += [""] * (config["max_num_painter_layers"] - num_entities) - masks = [] - for mask in example["mask_lists"]: - mask_single_channel = np.array(mask.convert("L")) - masks.append(mask_single_channel) - for _ in range(config["max_num_painter_layers"] - len(masks)): - blank_mask = np.zeros_like(masks[0]) if masks else np.zeros((512, 512), dtype=np.uint8) - masks.append(blank_mask) - background = np.ones((masks[0].shape[0], masks[0].shape[1], 4), dtype=np.uint8) * 255 - canvas_data_list = [] - for mask in masks: - canvas_data = create_canvas_data(background, [mask]) - canvas_data_list.append(canvas_data) - result.extend(canvas_data_list) - return result - -def save_mask_prompts(masks, mask_prompts, global_prompt, seed=0, random_dir='0000000'): - save_dir = os.path.join('workdirs/tmp_mask', random_dir) - print(f'save to {save_dir}') - os.makedirs(save_dir, exist_ok=True) - for i, mask in enumerate(masks): - save_path = os.path.join(save_dir, f'{i}.png') - mask.save(save_path) - sample = { - "global_prompt": global_prompt, - "mask_prompts": mask_prompts, - "seed": seed, - } - with open(os.path.join(save_dir, f"prompts.json"), 'w', encoding='utf-8') as f: - json.dump(sample, f, ensure_ascii=False, indent=4) - -def visualize_masks(image, masks, mask_prompts, font_size=35, use_random_colors=False): - # Create a blank image for overlays - overlay = Image.new('RGBA', image.size, (0, 0, 0, 0)) - colors = [ - (165, 238, 173, 80), - (76, 102, 221, 80), - (221, 160, 77, 80), - (204, 93, 71, 80), - (145, 187, 149, 80), - (134, 141, 172, 80), - (157, 137, 109, 80), - (153, 104, 95, 80), - (165, 238, 173, 80), - (76, 102, 221, 80), - (221, 160, 77, 80), - (204, 93, 71, 80), - (145, 187, 149, 80), - (134, 141, 172, 80), - (157, 137, 109, 80), - (153, 104, 95, 80), - ] - # Generate random colors for each mask - if use_random_colors: - colors = [(random.randint(0, 255), random.randint(0, 255), random.randint(0, 255), 80) for _ in range(len(masks))] - # Font settings - try: - font = ImageFont.truetype("wqy-zenhei.ttc", font_size) # Adjust as needed - except IOError: - font = ImageFont.load_default(font_size) - # Overlay each mask onto the overlay image - for mask, mask_prompt, color in zip(masks, mask_prompts, colors): - if mask is None: - continue - # Convert mask to RGBA mode - mask_rgba = mask.convert('RGBA') - mask_data = mask_rgba.getdata() - new_data = [(color if item[:3] == (255, 255, 255) else (0, 0, 0, 0)) for item in mask_data] - mask_rgba.putdata(new_data) - # Draw the mask prompt text on the mask - draw = ImageDraw.Draw(mask_rgba) - mask_bbox = mask.getbbox() # Get the bounding box of the mask - if mask_bbox is None: - continue - text_position = (mask_bbox[0] + 10, mask_bbox[1] + 10) # Adjust text position based on mask position - draw.text(text_position, mask_prompt, fill=(255, 255, 255, 255), font=font) - # Alpha composite the overlay with this mask - overlay = Image.alpha_composite(overlay, mask_rgba) - # Composite the overlay onto the original image - result = Image.alpha_composite(image.convert('RGBA'), overlay) - return result - -config = { - "max_num_painter_layers": 8, - "max_num_model_cache": 1, -} - -model_dict = {} - -def load_model(model_type='qwen-image'): - global model_dict - model_key = f"{model_type}" - if model_key in model_dict: - return model_dict[model_key] - pipe = QwenImagePipeline.from_pretrained( - torch_dtype=torch.bfloat16, - device="cuda", - model_configs=[ - ModelConfig(model_id="Qwen/Qwen-Image", origin_file_pattern="transformer/diffusion_pytorch_model*.safetensors"), - ModelConfig(model_id="Qwen/Qwen-Image", origin_file_pattern="text_encoder/model*.safetensors"), - ModelConfig(model_id="Qwen/Qwen-Image", origin_file_pattern="vae/diffusion_pytorch_model.safetensors"), - ], - tokenizer_config=ModelConfig(model_id="Qwen/Qwen-Image", origin_file_pattern="tokenizer/"), - ) - pipe.load_lora(pipe.dit, "models/DiffSynth-Studio/Qwen-Image-EliGen/model.safetensors") - model_dict[model_key] = pipe - return pipe - -load_model('qwen-image') - -with gr.Blocks() as app: - gr.Markdown( - """## EliGen: Entity-Level Controllable Text-to-Image Model - 1. On the left, input the **global prompt** for the overall image, such as "a person stands by the river." - 2. On the right, input the **local prompt** for each entity, such as "person," and draw the corresponding mask in the **Entity Mask Painter**. Generally, solid rectangular masks yield better results. - 3. Click the **Generate** button to create the image. By selecting different **random seeds**, you can generate diverse images. - 4. **You can directly click the "Load Example" button on any sample at the bottom to load example inputs.** - """ - ) - - loading_status = gr.Textbox(label="Loading Model...", value="Loading model... Please wait...", visible=True) - main_interface = gr.Column(visible=False) - - def initialize_model(): - try: - load_model('qwen-image') - return { - loading_status: gr.update(value="Model loaded successfully!", visible=False), - main_interface: gr.update(visible=True), - } - except Exception as e: - print(f'Failed to load model with error: {e}') - return { - loading_status: gr.update(value=f"Failed to load model: {str(e)}", visible=True), - main_interface: gr.update(visible=True), - } - - app.load(initialize_model, inputs=None, outputs=[loading_status, main_interface]) - - with main_interface: - with gr.Row(): - local_prompt_list = [] - canvas_list = [] - random_mask_dir = gr.State(f'{random.randint(0, 1000000):08d}') - with gr.Column(scale=382, min_width=100): - model_type = gr.State('qwen-image') - with gr.Accordion(label="Global prompt"): - prompt = gr.Textbox(label="Global Prompt", lines=3) - negative_prompt = gr.Textbox(label="Negative prompt", value="", lines=3) - with gr.Accordion(label="Inference Options", open=True): - seed = gr.Number(minimum=0, maximum=10**9, value=42, interactive=True, label="Random seed", show_label=True) - num_inference_steps = gr.Slider(minimum=1, maximum=100, value=30, step=1, interactive=True, label="Inference steps") - cfg_scale = gr.Slider(minimum=2.0, maximum=10.0, value=4.0, step=0.1, interactive=True, label="Classifier-free guidance scale") - height = gr.Slider(minimum=64, maximum=2048, value=1024, step=64, interactive=True, label="Height") - width = gr.Slider(minimum=64, maximum=2048, value=1024, step=64, interactive=True, label="Width") - with gr.Accordion(label="Inpaint Input Image", open=False, visible=False): - input_image = gr.Image(sources=None, show_label=False, interactive=True, type="pil") - background_weight = gr.Slider(minimum=0.0, maximum=1000., value=0., step=1, interactive=False, label="background_weight", visible=False) - - with gr.Column(): - reset_input_button = gr.Button(value="Reset Inpaint Input") - send_input_to_painter = gr.Button(value="Set as painter's background") - @gr.on(inputs=[input_image], outputs=[input_image], triggers=reset_input_button.click) - def reset_input_image(input_image): - return None - - with gr.Column(scale=618, min_width=100): - with gr.Accordion(label="Entity Painter"): - for painter_layer_id in range(config["max_num_painter_layers"]): - with gr.Tab(label=f"Entity {painter_layer_id}"): - local_prompt = gr.Textbox(label="Local prompt", key=f"local_prompt_{painter_layer_id}") - canvas = gr.ImageEditor( - canvas_size=(1024, 1024), - sources=None, - layers=False, - interactive=True, - image_mode="RGBA", - brush=gr.Brush( - default_size=50, - default_color="#000000", - colors=["#000000"], - ), - label="Entity Mask Painter", - key=f"canvas_{painter_layer_id}", - width=width, - height=height, - ) - @gr.on(inputs=[height, width, canvas], outputs=canvas, triggers=[height.change, width.change, canvas.clear], show_progress="hidden") - def resize_canvas(height, width, canvas): - if canvas is None or canvas["background"] is None: - return np.ones((height, width, 3), dtype=np.uint8) * 255 - h, w = canvas["background"].shape[:2] - if h != height or width != w: - return np.ones((height, width, 3), dtype=np.uint8) * 255 - else: - return canvas - local_prompt_list.append(local_prompt) - canvas_list.append(canvas) - with gr.Accordion(label="Results"): - run_button = gr.Button(value="Generate", variant="primary") - output_image = gr.Image(sources=None, show_label=False, interactive=False, type="pil") - with gr.Row(): - with gr.Column(): - output_to_painter_button = gr.Button(value="Set as painter's background") - with gr.Column(): - return_with_mask = gr.Checkbox(value=False, interactive=True, label="show result with mask painting") - output_to_input_button = gr.Button(value="Set as input image", visible=False, interactive=False) - real_output = gr.State(None) - mask_out = gr.State(None) - - @gr.on( - inputs=[model_type, prompt, negative_prompt, cfg_scale, num_inference_steps, height, width, return_with_mask, seed, input_image, background_weight, random_mask_dir] + local_prompt_list + canvas_list, - outputs=[output_image, real_output, mask_out], - triggers=run_button.click - ) - def generate_image(model_type, prompt, negative_prompt, cfg_scale, num_inference_steps, height, width, return_with_mask, seed, input_image, background_weight, random_mask_dir, *args, progress=gr.Progress()): - pipe = load_model(model_type) - input_params = { - "prompt": prompt, - "negative_prompt": negative_prompt, - "cfg_scale": cfg_scale, - "num_inference_steps": num_inference_steps, - "height": height, - "width": width, - "progress_bar_cmd": progress.tqdm, - } - # if input_image is not None: - # input_params["input_image"] = input_image.resize((width, height)).convert("RGB") - # input_params["enable_eligen_inpaint"] = True - - local_prompt_list, canvas_list = ( - args[0 * config["max_num_painter_layers"]: 1 * config["max_num_painter_layers"]], - args[1 * config["max_num_painter_layers"]: 2 * config["max_num_painter_layers"]], - ) - local_prompts, masks = [], [] - for local_prompt, canvas in zip(local_prompt_list, canvas_list): - if isinstance(local_prompt, str) and len(local_prompt) > 0: - local_prompts.append(local_prompt) - masks.append(Image.fromarray(canvas["layers"][0][:, :, -1]).convert("RGB")) - entity_prompts = None if len(local_prompts) == 0 else local_prompts - entity_masks = None if len(masks) == 0 or entity_prompts is None else masks - input_params.update({ - "eligen_entity_prompts": entity_prompts, - "eligen_entity_masks": entity_masks, - }) - torch.manual_seed(seed) - save_mask_prompts(masks, local_prompts, prompt, seed, random_mask_dir) - image = pipe(**input_params) - masks = [mask.resize(image.size) for mask in masks] - image_with_mask = visualize_masks(image, masks, local_prompts) - - real_output = gr.State(image) - mask_out = gr.State(image_with_mask) - - if return_with_mask: - return image_with_mask, real_output, mask_out - return image, real_output, mask_out - - @gr.on(inputs=[input_image] + canvas_list, outputs=canvas_list, triggers=send_input_to_painter.click) - def send_input_to_painter_background(input_image, *canvas_list): - if input_image is None: - return tuple(canvas_list) - for canvas in canvas_list: - h, w = canvas["background"].shape[:2] - canvas["background"] = input_image.resize((w, h)) - return tuple(canvas_list) - @gr.on(inputs=[real_output] + canvas_list, outputs=canvas_list, triggers=output_to_painter_button.click) - def send_output_to_painter_background(real_output, *canvas_list): - if real_output is None: - return tuple(canvas_list) - for canvas in canvas_list: - h, w = canvas["background"].shape[:2] - canvas["background"] = real_output.value.resize((w, h)) - return tuple(canvas_list) - @gr.on(inputs=[return_with_mask, real_output, mask_out], outputs=[output_image], triggers=[return_with_mask.change], show_progress="hidden") - def show_output(return_with_mask, real_output, mask_out): - if return_with_mask: - return mask_out.value - else: - return real_output.value - @gr.on(inputs=[real_output], outputs=[input_image], triggers=output_to_input_button.click) - def send_output_to_pipe_input(real_output): - return real_output.value - - with gr.Column(): - gr.Markdown("## Examples") - for i in range(0, len(examples), 2): - with gr.Row(): - if i < len(examples): - example = examples[i] - with gr.Column(): - example_image = gr.Image( - value=f"data/examples/eligen/qwen-image/example_{example['example_id']}/example_image.png", - label=example["description"], - interactive=False, - width=1024, - height=512 - ) - load_example_button = gr.Button(value=f"Load Example {example['example_id']}") - load_example_button.click( - load_example, - inputs=[load_example_button], - outputs=[num_inference_steps, prompt, negative_prompt, seed] + local_prompt_list + canvas_list - ) - - if i + 1 < len(examples): - example = examples[i + 1] - with gr.Column(): - example_image = gr.Image( - value=f"data/examples/eligen/qwen-image/example_{example['example_id']}/example_image.png", - label=example["description"], - interactive=False, - width=1024, - height=512 - ) - load_example_button = gr.Button(value=f"Load Example {example['example_id']}") - load_example_button.click( - load_example, - inputs=[load_example_button], - outputs=[num_inference_steps, prompt, negative_prompt, seed] + local_prompt_list + canvas_list - ) -app.config["show_progress"] = "hidden" -app.launch(share=False) diff --git a/apps/streamlit/DiffSynth_Studio.py b/apps/streamlit/DiffSynth_Studio.py deleted file mode 100644 index cfd3856..0000000 --- a/apps/streamlit/DiffSynth_Studio.py +++ /dev/null @@ -1,15 +0,0 @@ -# Set web page format -import streamlit as st -st.set_page_config(layout="wide") -# Disable virtual VRAM on windows system -import torch -torch.cuda.set_per_process_memory_fraction(0.999, 0) - - -st.markdown(""" -# DiffSynth Studio - -[Source Code](https://github.com/Artiprocher/DiffSynth-Studio) - -Welcome to DiffSynth Studio. -""") diff --git a/apps/streamlit/pages/1_Image_Creator.py b/apps/streamlit/pages/1_Image_Creator.py deleted file mode 100644 index 732d219..0000000 --- a/apps/streamlit/pages/1_Image_Creator.py +++ /dev/null @@ -1,362 +0,0 @@ -import torch, os, io, json, time -import numpy as np -from PIL import Image -import streamlit as st -st.set_page_config(layout="wide") -from streamlit_drawable_canvas import st_canvas -from diffsynth.models import ModelManager -from diffsynth.pipelines import SDImagePipeline, SDXLImagePipeline, SD3ImagePipeline, HunyuanDiTImagePipeline, FluxImagePipeline -from diffsynth.data.video import crop_and_resize - - -config = { - "Stable Diffusion": { - "model_folder": "models/stable_diffusion", - "pipeline_class": SDImagePipeline, - "fixed_parameters": {} - }, - "Stable Diffusion XL": { - "model_folder": "models/stable_diffusion_xl", - "pipeline_class": SDXLImagePipeline, - "fixed_parameters": {} - }, - "Stable Diffusion 3": { - "model_folder": "models/stable_diffusion_3", - "pipeline_class": SD3ImagePipeline, - "fixed_parameters": {} - }, - "Stable Diffusion XL Turbo": { - "model_folder": "models/stable_diffusion_xl_turbo", - "pipeline_class": SDXLImagePipeline, - "fixed_parameters": { - "negative_prompt": "", - "cfg_scale": 1.0, - "num_inference_steps": 1, - "height": 512, - "width": 512, - } - }, - "Kolors": { - "model_folder": "models/kolors", - "pipeline_class": SDXLImagePipeline, - "fixed_parameters": {} - }, - "HunyuanDiT": { - "model_folder": "models/HunyuanDiT", - "pipeline_class": HunyuanDiTImagePipeline, - "fixed_parameters": { - "height": 1024, - "width": 1024, - } - }, - "FLUX": { - "model_folder": "models/FLUX", - "pipeline_class": FluxImagePipeline, - "fixed_parameters": { - "cfg_scale": 1.0, - } - } -} - - -def load_model_list(model_type): - folder = config[model_type]["model_folder"] - file_list = [i for i in os.listdir(folder) if i.endswith(".safetensors")] - if model_type in ["HunyuanDiT", "Kolors", "FLUX"]: - file_list += [i for i in os.listdir(folder) if os.path.isdir(os.path.join(folder, i))] - file_list = sorted(file_list) - return file_list - - -def release_model(): - if "model_manager" in st.session_state: - st.session_state["model_manager"].to("cpu") - del st.session_state["loaded_model_path"] - del st.session_state["model_manager"] - del st.session_state["pipeline"] - torch.cuda.empty_cache() - - -def load_model(model_type, model_path): - model_manager = ModelManager() - if model_type == "HunyuanDiT": - model_manager.load_models([ - os.path.join(model_path, "clip_text_encoder/pytorch_model.bin"), - os.path.join(model_path, "mt5/pytorch_model.bin"), - os.path.join(model_path, "model/pytorch_model_ema.pt"), - os.path.join(model_path, "sdxl-vae-fp16-fix/diffusion_pytorch_model.bin"), - ]) - elif model_type == "Kolors": - model_manager.load_models([ - os.path.join(model_path, "text_encoder"), - os.path.join(model_path, "unet/diffusion_pytorch_model.safetensors"), - os.path.join(model_path, "vae/diffusion_pytorch_model.safetensors"), - ]) - elif model_type == "FLUX": - model_manager.torch_dtype = torch.bfloat16 - file_list = [ - os.path.join(model_path, "text_encoder/model.safetensors"), - os.path.join(model_path, "text_encoder_2"), - ] - for file_name in os.listdir(model_path): - if file_name.endswith(".safetensors"): - file_list.append(os.path.join(model_path, file_name)) - model_manager.load_models(file_list) - else: - model_manager.load_model(model_path) - pipeline = config[model_type]["pipeline_class"].from_model_manager(model_manager) - st.session_state.loaded_model_path = model_path - st.session_state.model_manager = model_manager - st.session_state.pipeline = pipeline - return model_manager, pipeline - - -def use_output_image_as_input(update=True): - # Search for input image - output_image_id = 0 - selected_output_image = None - while True: - if f"use_output_as_input_{output_image_id}" not in st.session_state: - break - if st.session_state[f"use_output_as_input_{output_image_id}"]: - selected_output_image = st.session_state["output_images"][output_image_id] - break - output_image_id += 1 - if update and selected_output_image is not None: - st.session_state["input_image"] = selected_output_image - return selected_output_image is not None - - -def apply_stroke_to_image(stroke_image, image): - image = np.array(image.convert("RGB")).astype(np.float32) - height, width, _ = image.shape - - stroke_image = np.array(Image.fromarray(stroke_image).resize((width, height))).astype(np.float32) - weight = stroke_image[:, :, -1:] / 255 - stroke_image = stroke_image[:, :, :-1] - - image = stroke_image * weight + image * (1 - weight) - image = np.clip(image, 0, 255).astype(np.uint8) - image = Image.fromarray(image) - return image - - -@st.cache_data -def image2bits(image): - image_byte = io.BytesIO() - image.save(image_byte, format="PNG") - image_byte = image_byte.getvalue() - return image_byte - - -def show_output_image(image): - st.image(image, use_column_width="always") - st.button("Use it as input image", key=f"use_output_as_input_{image_id}") - st.download_button("Download", data=image2bits(image), file_name="image.png", mime="image/png", key=f"download_output_{image_id}") - - -column_input, column_output = st.columns(2) -with st.sidebar: - # Select a model - with st.expander("Model", expanded=True): - model_type = st.selectbox("Model type", [model_type_ for model_type_ in config]) - fixed_parameters = config[model_type]["fixed_parameters"] - model_path_list = ["None"] + load_model_list(model_type) - model_path = st.selectbox("Model path", model_path_list) - - # Load the model - if model_path == "None": - # No models are selected. Release VRAM. - st.markdown("No models are selected.") - release_model() - else: - # A model is selected. - model_path = os.path.join(config[model_type]["model_folder"], model_path) - if st.session_state.get("loaded_model_path", "") != model_path: - # The loaded model is not the selected model. Reload it. - st.markdown(f"Loading model at {model_path}.") - st.markdown("Please wait a moment...") - release_model() - model_manager, pipeline = load_model(model_type, model_path) - st.markdown("Done.") - else: - # The loaded model is not the selected model. Fetch it from `st.session_state`. - st.markdown(f"Loading model at {model_path}.") - st.markdown("Please wait a moment...") - model_manager, pipeline = st.session_state.model_manager, st.session_state.pipeline - st.markdown("Done.") - - # Show parameters - with st.expander("Prompt", expanded=True): - prompt = st.text_area("Positive prompt") - if "negative_prompt" in fixed_parameters: - negative_prompt = fixed_parameters["negative_prompt"] - else: - negative_prompt = st.text_area("Negative prompt") - if "cfg_scale" in fixed_parameters: - cfg_scale = fixed_parameters["cfg_scale"] - else: - cfg_scale = st.slider("Classifier-free guidance scale", min_value=1.0, max_value=10.0, value=7.5) - with st.expander("Image", expanded=True): - if "num_inference_steps" in fixed_parameters: - num_inference_steps = fixed_parameters["num_inference_steps"] - else: - num_inference_steps = st.slider("Inference steps", min_value=1, max_value=100, value=20) - if "height" in fixed_parameters: - height = fixed_parameters["height"] - else: - height = st.select_slider("Height", options=[256, 512, 768, 1024, 2048], value=512) - if "width" in fixed_parameters: - width = fixed_parameters["width"] - else: - width = st.select_slider("Width", options=[256, 512, 768, 1024, 2048], value=512) - num_images = st.number_input("Number of images", value=2) - use_fixed_seed = st.checkbox("Use fixed seed", value=False) - if use_fixed_seed: - seed = st.number_input("Random seed", min_value=0, max_value=10**9, step=1, value=0) - - # Other fixed parameters - denoising_strength = 1.0 - repetition = 1 - - -# Show input image -with column_input: - with st.expander("Input image (Optional)", expanded=True): - with st.container(border=True): - column_white_board, column_upload_image = st.columns([1, 2]) - with column_white_board: - create_white_board = st.button("Create white board") - delete_input_image = st.button("Delete input image") - with column_upload_image: - upload_image = st.file_uploader("Upload image", type=["png", "jpg"], key="upload_image") - - if upload_image is not None: - st.session_state["input_image"] = crop_and_resize(Image.open(upload_image), height, width) - elif create_white_board: - st.session_state["input_image"] = Image.fromarray(np.ones((height, width, 3), dtype=np.uint8) * 255) - else: - use_output_image_as_input() - - if delete_input_image and "input_image" in st.session_state: - del st.session_state.input_image - if delete_input_image and "upload_image" in st.session_state: - del st.session_state.upload_image - - input_image = st.session_state.get("input_image", None) - if input_image is not None: - with st.container(border=True): - column_drawing_mode, column_color_1, column_color_2 = st.columns([4, 1, 1]) - with column_drawing_mode: - drawing_mode = st.radio("Drawing tool", ["transform", "freedraw", "line", "rect"], horizontal=True, index=1) - with column_color_1: - stroke_color = st.color_picker("Stroke color") - with column_color_2: - fill_color = st.color_picker("Fill color") - stroke_width = st.slider("Stroke width", min_value=1, max_value=50, value=10) - with st.container(border=True): - denoising_strength = st.slider("Denoising strength", min_value=0.0, max_value=1.0, value=0.7) - repetition = st.slider("Repetition", min_value=1, max_value=8, value=1) - with st.container(border=True): - input_width, input_height = input_image.size - canvas_result = st_canvas( - fill_color=fill_color, - stroke_width=stroke_width, - stroke_color=stroke_color, - background_color="rgba(255, 255, 255, 0)", - background_image=input_image, - update_streamlit=True, - height=int(512 / input_width * input_height), - width=512, - drawing_mode=drawing_mode, - key="canvas" - ) - - num_painter_layer = st.number_input("Number of painter layers", min_value=0, max_value=10, step=1, value=0) - local_prompts, masks, mask_scales = [], [], [] - white_board = Image.fromarray(np.ones((512, 512, 3), dtype=np.uint8) * 255) - painter_layers_json_data = [] - for painter_tab_id in range(num_painter_layer): - with st.expander(f"Painter layer {painter_tab_id}", expanded=True): - enable_local_prompt = st.checkbox(f"Enable prompt {painter_tab_id}", value=True) - local_prompt = st.text_area(f"Prompt {painter_tab_id}") - mask_scale = st.slider(f"Mask scale {painter_tab_id}", min_value=0.0, max_value=3.0, value=1.0) - stroke_width = st.slider(f"Stroke width {painter_tab_id}", min_value=1, max_value=300, value=100) - canvas_result_local = st_canvas( - fill_color="#000000", - stroke_width=stroke_width, - stroke_color="#000000", - background_color="rgba(255, 255, 255, 0)", - background_image=white_board, - update_streamlit=True, - height=512, - width=512, - drawing_mode="freedraw", - key=f"canvas_{painter_tab_id}" - ) - if canvas_result_local.json_data is not None: - painter_layers_json_data.append(canvas_result_local.json_data.copy()) - painter_layers_json_data[-1]["prompt"] = local_prompt - if enable_local_prompt: - local_prompts.append(local_prompt) - if canvas_result_local.image_data is not None: - mask = apply_stroke_to_image(canvas_result_local.image_data, white_board) - else: - mask = white_board - mask = Image.fromarray(255 - np.array(mask)) - masks.append(mask) - mask_scales.append(mask_scale) - save_painter_layers = st.button("Save painter layers") - if save_painter_layers: - os.makedirs("data/painter_layers", exist_ok=True) - json_file_path = f"data/painter_layers/{time.time_ns()}.json" - with open(json_file_path, "w") as f: - json.dump(painter_layers_json_data, f, indent=4) - st.markdown(f"Painter layers are saved in {json_file_path}.") - - -with column_output: - run_button = st.button("Generate image", type="primary") - auto_update = st.checkbox("Auto update", value=False) - num_image_columns = st.slider("Columns", min_value=1, max_value=8, value=2) - image_columns = st.columns(num_image_columns) - - # Run - if (run_button or auto_update) and model_path != "None": - - if input_image is not None: - input_image = input_image.resize((width, height)) - if canvas_result.image_data is not None: - input_image = apply_stroke_to_image(canvas_result.image_data, input_image) - - output_images = [] - for image_id in range(num_images * repetition): - if use_fixed_seed: - torch.manual_seed(seed + image_id) - else: - torch.manual_seed(np.random.randint(0, 10**9)) - if image_id >= num_images: - input_image = output_images[image_id - num_images] - with image_columns[image_id % num_image_columns]: - progress_bar_st = st.progress(0.0) - image = pipeline( - prompt, negative_prompt=negative_prompt, - local_prompts=local_prompts, masks=masks, mask_scales=mask_scales, - cfg_scale=cfg_scale, num_inference_steps=num_inference_steps, - height=height, width=width, - input_image=input_image, denoising_strength=denoising_strength, - progress_bar_st=progress_bar_st - ) - output_images.append(image) - progress_bar_st.progress(1.0) - show_output_image(image) - st.session_state["output_images"] = output_images - - elif "output_images" in st.session_state: - for image_id in range(len(st.session_state.output_images)): - with image_columns[image_id % num_image_columns]: - image = st.session_state.output_images[image_id] - progress_bar = st.progress(1.0) - show_output_image(image) - if "upload_image" in st.session_state and use_output_image_as_input(update=False): - st.markdown("If you want to use an output image as input image, please delete the uploaded image manually.") diff --git a/apps/streamlit/pages/2_Video_Creator.py b/apps/streamlit/pages/2_Video_Creator.py deleted file mode 100644 index 8748072..0000000 --- a/apps/streamlit/pages/2_Video_Creator.py +++ /dev/null @@ -1,197 +0,0 @@ -import streamlit as st -st.set_page_config(layout="wide") -from diffsynth import SDVideoPipelineRunner -import os -import numpy as np - - -def load_model_list(folder): - file_list = os.listdir(folder) - file_list = [i for i in file_list if i.endswith(".safetensors") or i.endswith(".pth") or i.endswith(".ckpt")] - file_list = sorted(file_list) - return file_list - - -def match_processor_id(model_name, supported_processor_id_list): - sorted_processor_id = [i[1] for i in sorted([(-len(i), i) for i in supported_processor_id_list])] - for processor_id in sorted_processor_id: - if processor_id in model_name: - return supported_processor_id_list.index(processor_id) + 1 - return 0 - - -config = { - "models": { - "model_list": [], - "textual_inversion_folder": "models/textual_inversion", - "device": "cuda", - "lora_alphas": [], - "controlnet_units": [] - }, - "data": { - "input_frames": None, - "controlnet_frames": [], - "output_folder": "output", - "fps": 60 - }, - "pipeline": { - "seed": 0, - "pipeline_inputs": {} - } -} - - -with st.expander("Model", expanded=True): - stable_diffusion_ckpt = st.selectbox("Stable Diffusion", ["None"] + load_model_list("models/stable_diffusion")) - if stable_diffusion_ckpt != "None": - config["models"]["model_list"].append(os.path.join("models/stable_diffusion", stable_diffusion_ckpt)) - animatediff_ckpt = st.selectbox("AnimateDiff", ["None"] + load_model_list("models/AnimateDiff")) - if animatediff_ckpt != "None": - config["models"]["model_list"].append(os.path.join("models/AnimateDiff", animatediff_ckpt)) - column_lora, column_lora_alpha = st.columns([2, 1]) - with column_lora: - sd_lora_ckpt = st.selectbox("LoRA", ["None"] + load_model_list("models/lora")) - with column_lora_alpha: - lora_alpha = st.slider("LoRA Alpha", min_value=-4.0, max_value=4.0, value=1.0, step=0.1) - if sd_lora_ckpt != "None": - config["models"]["model_list"].append(os.path.join("models/lora", sd_lora_ckpt)) - config["models"]["lora_alphas"].append(lora_alpha) - - -with st.expander("Data", expanded=True): - with st.container(border=True): - input_video = st.text_input("Input Video File Path (e.g., data/your_video.mp4)", value="") - column_height, column_width, column_start_frame_index, column_end_frame_index = st.columns([2, 2, 1, 1]) - with column_height: - height = st.select_slider("Height", options=[256, 512, 768, 1024, 1536, 2048], value=1024) - with column_width: - width = st.select_slider("Width", options=[256, 512, 768, 1024, 1536, 2048], value=1024) - with column_start_frame_index: - start_frame_id = st.number_input("Start Frame id", value=0) - with column_end_frame_index: - end_frame_id = st.number_input("End Frame id", value=16) - if input_video != "": - config["data"]["input_frames"] = { - "video_file": input_video, - "image_folder": None, - "height": height, - "width": width, - "start_frame_id": start_frame_id, - "end_frame_id": end_frame_id - } - with st.container(border=True): - output_video = st.text_input("Output Video File Path (e.g., data/a_folder_to_save_something)", value="output") - fps = st.number_input("FPS", value=60) - config["data"]["output_folder"] = output_video - config["data"]["fps"] = fps - - -with st.expander("ControlNet Units", expanded=True): - supported_processor_id_list = ["canny", "depth", "softedge", "lineart", "lineart_anime", "openpose", "tile"] - controlnet_units = st.tabs(["ControlNet Unit 0", "ControlNet Unit 1", "ControlNet Unit 2"]) - for controlnet_id in range(len(controlnet_units)): - with controlnet_units[controlnet_id]: - controlnet_ckpt = st.selectbox("ControlNet", ["None"] + load_model_list("models/ControlNet"), - key=f"controlnet_ckpt_{controlnet_id}") - processor_id = st.selectbox("Processor", ["None"] + supported_processor_id_list, - index=match_processor_id(controlnet_ckpt, supported_processor_id_list), - disabled=controlnet_ckpt == "None", key=f"processor_id_{controlnet_id}") - controlnet_scale = st.slider("Scale", min_value=0.0, max_value=1.0, step=0.01, value=0.5, - disabled=controlnet_ckpt == "None", key=f"controlnet_scale_{controlnet_id}") - use_input_video_as_controlnet_input = st.checkbox("Use input video as ControlNet input", value=True, - disabled=controlnet_ckpt == "None", - key=f"use_input_video_as_controlnet_input_{controlnet_id}") - if not use_input_video_as_controlnet_input: - controlnet_input_video = st.text_input("ControlNet Input Video File Path", value="", - disabled=controlnet_ckpt == "None", key=f"controlnet_input_video_{controlnet_id}") - column_height, column_width, column_start_frame_index, column_end_frame_index = st.columns([2, 2, 1, 1]) - with column_height: - height = st.select_slider("Height", options=[256, 512, 768, 1024, 1536, 2048], value=1024, - disabled=controlnet_ckpt == "None", key=f"controlnet_height_{controlnet_id}") - with column_width: - width = st.select_slider("Width", options=[256, 512, 768, 1024, 1536, 2048], value=1024, - disabled=controlnet_ckpt == "None", key=f"controlnet_width_{controlnet_id}") - with column_start_frame_index: - start_frame_id = st.number_input("Start Frame id", value=0, - disabled=controlnet_ckpt == "None", key=f"controlnet_start_frame_id_{controlnet_id}") - with column_end_frame_index: - end_frame_id = st.number_input("End Frame id", value=16, - disabled=controlnet_ckpt == "None", key=f"controlnet_end_frame_id_{controlnet_id}") - if input_video != "": - config["data"]["input_video"] = { - "video_file": input_video, - "image_folder": None, - "height": height, - "width": width, - "start_frame_id": start_frame_id, - "end_frame_id": end_frame_id - } - if controlnet_ckpt != "None": - config["models"]["model_list"].append(os.path.join("models/ControlNet", controlnet_ckpt)) - config["models"]["controlnet_units"].append({ - "processor_id": processor_id, - "model_path": os.path.join("models/ControlNet", controlnet_ckpt), - "scale": controlnet_scale, - }) - if use_input_video_as_controlnet_input: - config["data"]["controlnet_frames"].append(config["data"]["input_frames"]) - else: - config["data"]["controlnet_frames"].append({ - "video_file": input_video, - "image_folder": None, - "height": height, - "width": width, - "start_frame_id": start_frame_id, - "end_frame_id": end_frame_id - }) - - -with st.container(border=True): - with st.expander("Seed", expanded=True): - use_fixed_seed = st.checkbox("Use fixed seed", value=False) - if use_fixed_seed: - seed = st.number_input("Random seed", min_value=0, max_value=10**9, step=1, value=0) - else: - seed = np.random.randint(0, 10**9) - with st.expander("Textual Guidance", expanded=True): - prompt = st.text_area("Positive prompt") - negative_prompt = st.text_area("Negative prompt") - column_cfg_scale, column_clip_skip = st.columns(2) - with column_cfg_scale: - cfg_scale = st.slider("Classifier-free guidance scale", min_value=1.0, max_value=10.0, value=7.0) - with column_clip_skip: - clip_skip = st.slider("Clip Skip", min_value=1, max_value=4, value=1) - with st.expander("Denoising", expanded=True): - column_num_inference_steps, column_denoising_strength = st.columns(2) - with column_num_inference_steps: - num_inference_steps = st.slider("Inference steps", min_value=1, max_value=100, value=10) - with column_denoising_strength: - denoising_strength = st.slider("Denoising strength", min_value=0.0, max_value=1.0, value=1.0) - with st.expander("Efficiency", expanded=False): - animatediff_batch_size = st.slider("Animatediff batch size (sliding window size)", min_value=1, max_value=32, value=16, step=1) - animatediff_stride = st.slider("Animatediff stride", - min_value=1, - max_value=max(2, animatediff_batch_size), - value=max(1, animatediff_batch_size // 2), - step=1) - unet_batch_size = st.slider("UNet batch size", min_value=1, max_value=32, value=1, step=1) - controlnet_batch_size = st.slider("ControlNet batch size", min_value=1, max_value=32, value=1, step=1) - cross_frame_attention = st.checkbox("Enable Cross-Frame Attention", value=False) - config["pipeline"]["seed"] = seed - config["pipeline"]["pipeline_inputs"] = { - "prompt": prompt, - "negative_prompt": negative_prompt, - "cfg_scale": cfg_scale, - "clip_skip": clip_skip, - "denoising_strength": denoising_strength, - "num_inference_steps": num_inference_steps, - "animatediff_batch_size": animatediff_batch_size, - "animatediff_stride": animatediff_stride, - "unet_batch_size": unet_batch_size, - "controlnet_batch_size": controlnet_batch_size, - "cross_frame_attention": cross_frame_attention, - } - -run_button = st.button("☢️Run☢️", type="primary") -if run_button: - SDVideoPipelineRunner(in_streamlit=True).run(config) diff --git a/diffsynth/__init__.py b/diffsynth/__init__.py index ae0a45c..bb67a43 100644 --- a/diffsynth/__init__.py +++ b/diffsynth/__init__.py @@ -1,6 +1 @@ -from .data import * -from .models import * -from .prompters import * -from .schedulers import * -from .pipelines import * -from .controlnets import * +from .core import * diff --git a/diffsynth/configs/__init__.py b/diffsynth/configs/__init__.py index e69de29..144a822 100644 --- a/diffsynth/configs/__init__.py +++ b/diffsynth/configs/__init__.py @@ -0,0 +1,2 @@ +from .model_configs import MODEL_CONFIGS +from .vram_management_module_maps import VRAM_MANAGEMENT_MODULE_MAPS diff --git a/diffsynth/configs/model_config.py b/diffsynth/configs/model_config.py deleted file mode 100644 index 47e26e0..0000000 --- a/diffsynth/configs/model_config.py +++ /dev/null @@ -1,857 +0,0 @@ -from typing_extensions import Literal, TypeAlias - -from ..models.sd_text_encoder import SDTextEncoder -from ..models.sd_unet import SDUNet -from ..models.sd_vae_encoder import SDVAEEncoder -from ..models.sd_vae_decoder import SDVAEDecoder - -from ..models.sdxl_text_encoder import SDXLTextEncoder, SDXLTextEncoder2 -from ..models.sdxl_unet import SDXLUNet -from ..models.sdxl_vae_decoder import SDXLVAEDecoder -from ..models.sdxl_vae_encoder import SDXLVAEEncoder - -from ..models.sd3_text_encoder import SD3TextEncoder1, SD3TextEncoder2, SD3TextEncoder3 -from ..models.sd3_dit import SD3DiT -from ..models.sd3_vae_decoder import SD3VAEDecoder -from ..models.sd3_vae_encoder import SD3VAEEncoder - -from ..models.sd_controlnet import SDControlNet -from ..models.sdxl_controlnet import SDXLControlNetUnion - -from ..models.sd_motion import SDMotionModel -from ..models.sdxl_motion import SDXLMotionModel - -from ..models.svd_image_encoder import SVDImageEncoder -from ..models.svd_unet import SVDUNet -from ..models.svd_vae_decoder import SVDVAEDecoder -from ..models.svd_vae_encoder import SVDVAEEncoder - -from ..models.sd_ipadapter import SDIpAdapter, IpAdapterCLIPImageEmbedder -from ..models.sdxl_ipadapter import SDXLIpAdapter, IpAdapterXLCLIPImageEmbedder - -from ..models.hunyuan_dit_text_encoder import HunyuanDiTCLIPTextEncoder, HunyuanDiTT5TextEncoder -from ..models.hunyuan_dit import HunyuanDiT - -from ..models.flux_dit import FluxDiT -from ..models.flux_text_encoder import FluxTextEncoder2 -from ..models.flux_vae import FluxVAEEncoder, FluxVAEDecoder -from ..models.flux_controlnet import FluxControlNet -from ..models.flux_ipadapter import FluxIpAdapter -from ..models.flux_infiniteyou import InfiniteYouImageProjector - -from ..models.cog_vae import CogVAEEncoder, CogVAEDecoder -from ..models.cog_dit import CogDiT - -from ..models.omnigen import OmniGenTransformer - -from ..models.hunyuan_video_vae_decoder import HunyuanVideoVAEDecoder -from ..models.hunyuan_video_vae_encoder import HunyuanVideoVAEEncoder - -from ..extensions.RIFE import IFNet -from ..extensions.ESRGAN import RRDBNet - -from ..models.hunyuan_video_dit import HunyuanVideoDiT - -from ..models.stepvideo_vae import StepVideoVAE -from ..models.stepvideo_dit import StepVideoModel - -from ..models.wan_video_dit import WanModel -from ..models.wan_video_dit_s2v import WanS2VModel -from ..models.wan_video_text_encoder import WanTextEncoder -from ..models.wan_video_image_encoder import WanImageEncoder -from ..models.wan_video_vae import WanVideoVAE, WanVideoVAE38 -from ..models.wan_video_motion_controller import WanMotionControllerModel -from ..models.wan_video_vace import VaceWanModel -from ..models.wav2vec import WanS2VAudioEncoder -from ..models.wan_video_animate_adapter import WanAnimateAdapter - -from ..models.step1x_connector import Qwen2Connector - -from ..models.flux_value_control import SingleValueEncoder - -from ..lora.flux_lora import FluxLoraPatcher -from ..models.flux_lora_encoder import FluxLoRAEncoder - -from ..models.nexus_gen_projector import NexusGenAdapter, NexusGenImageEmbeddingMerger -from ..models.nexus_gen import NexusGenAutoregressiveModel - -from ..models.qwen_image_dit import QwenImageDiT -from ..models.qwen_image_text_encoder import QwenImageTextEncoder -from ..models.qwen_image_vae import QwenImageVAE -from ..models.qwen_image_controlnet import QwenImageBlockWiseControlNet - -from ..models.longcat_video_dit import LongCatVideoTransformer3DModel - -model_loader_configs = [ - # These configs are provided for detecting model type automatically. - # The format is (state_dict_keys_hash, state_dict_keys_hash_with_shape, model_names, model_classes, model_resource) - (None, "091b0e30e77c76626b3ba62acdf95343", ["sd_controlnet"], [SDControlNet], "civitai"), - (None, "4a6c8306a27d916dea81263c8c88f450", ["hunyuan_dit_clip_text_encoder"], [HunyuanDiTCLIPTextEncoder], "civitai"), - (None, "f4aec400fe394297961218c768004521", ["hunyuan_dit"], [HunyuanDiT], "civitai"), - (None, "9e6e58043a5a2e332803ed42f6ee7181", ["hunyuan_dit_t5_text_encoder"], [HunyuanDiTT5TextEncoder], "civitai"), - (None, "13115dd45a6e1c39860f91ab073b8a78", ["sdxl_vae_encoder", "sdxl_vae_decoder"], [SDXLVAEEncoder, SDXLVAEDecoder], "diffusers"), - (None, "d78aa6797382a6d455362358a3295ea9", ["sd_ipadapter_clip_image_encoder"], [IpAdapterCLIPImageEmbedder], "diffusers"), - (None, "e291636cc15e803186b47404262ef812", ["sd_ipadapter"], [SDIpAdapter], "civitai"), - (None, "399c81f2f8de8d1843d0127a00f3c224", ["sdxl_ipadapter_clip_image_encoder"], [IpAdapterXLCLIPImageEmbedder], "diffusers"), - (None, "a64eac9aa0db4b9602213bc0131281c7", ["sdxl_ipadapter"], [SDXLIpAdapter], "civitai"), - (None, "52817e4fdd89df154f02749ca6f692ac", ["sdxl_unet"], [SDXLUNet], "diffusers"), - (None, "03343c606f16d834d6411d0902b53636", ["sd_text_encoder", "sd_unet", "sd_vae_decoder", "sd_vae_encoder"], [SDTextEncoder, SDUNet, SDVAEDecoder, SDVAEEncoder], "civitai"), - (None, "d4ba77a7ece070679b4a987f58f201e9", ["sd_text_encoder"], [SDTextEncoder], "civitai"), - (None, "d0c89e55c5a57cf3981def0cb1c9e65a", ["sd_vae_decoder", "sd_vae_encoder"], [SDVAEDecoder, SDVAEEncoder], "civitai"), - (None, "3926bf373b39a67eeafd7901478a47a7", ["sd_unet"], [SDUNet], "civitai"), - (None, "1e0c39ec176b9007c05f76d52b554a4d", ["sd3_text_encoder_1", "sd3_text_encoder_2", "sd3_dit", "sd3_vae_encoder", "sd3_vae_decoder"], [SD3TextEncoder1, SD3TextEncoder2, SD3DiT, SD3VAEEncoder, SD3VAEDecoder], "civitai"), - (None, "d9e0290829ba8d98e28e1a2b1407db4a", ["sd3_text_encoder_1", "sd3_text_encoder_2", "sd3_text_encoder_3", "sd3_dit", "sd3_vae_encoder", "sd3_vae_decoder"], [SD3TextEncoder1, SD3TextEncoder2, SD3TextEncoder3, SD3DiT, SD3VAEEncoder, SD3VAEDecoder], "civitai"), - (None, "5072d0b24e406b49507abe861cf97691", ["sd3_text_encoder_3"], [SD3TextEncoder3], "civitai"), - (None, "4cf64a799d04260df438c6f33c9a047e", ["sdxl_text_encoder", "sdxl_text_encoder_2", "sdxl_unet", "sdxl_vae_decoder", "sdxl_vae_encoder"], [SDXLTextEncoder, SDXLTextEncoder2, SDXLUNet, SDXLVAEDecoder, SDXLVAEEncoder], "civitai"), - (None, "d9b008a867c498ab12ad24042eff8e3f", ["sdxl_text_encoder", "sdxl_text_encoder_2", "sdxl_unet", "sdxl_vae_decoder", "sdxl_vae_encoder"], [SDXLTextEncoder, SDXLTextEncoder2, SDXLUNet, SDXLVAEDecoder, SDXLVAEEncoder], "civitai"), # SDXL-Turbo - (None, "025bb7452e531a3853d951d77c63f032", ["sdxl_text_encoder", "sdxl_text_encoder_2"], [SDXLTextEncoder, SDXLTextEncoder2], "civitai"), - (None, "298997b403a4245c04102c9f36aac348", ["sdxl_unet"], [SDXLUNet], "civitai"), - (None, "2a07abce74b4bdc696b76254ab474da6", ["svd_image_encoder", "svd_unet", "svd_vae_decoder", "svd_vae_encoder"], [SVDImageEncoder, SVDUNet, SVDVAEDecoder, SVDVAEEncoder], "civitai"), - (None, "c96a285a6888465f87de22a984d049fb", ["sd_motion_modules"], [SDMotionModel], "civitai"), - (None, "72907b92caed19bdb2adb89aa4063fe2", ["sdxl_motion_modules"], [SDXLMotionModel], "civitai"), - (None, "31d2d9614fba60511fc9bf2604aa01f7", ["sdxl_controlnet"], [SDXLControlNetUnion], "diffusers"), - (None, "94eefa3dac9cec93cb1ebaf1747d7b78", ["sd3_text_encoder_1"], [SD3TextEncoder1], "diffusers"), - (None, "1aafa3cc91716fb6b300cc1cd51b85a3", ["flux_vae_encoder", "flux_vae_decoder"], [FluxVAEEncoder, FluxVAEDecoder], "diffusers"), - (None, "21ea55f476dfc4fd135587abb59dfe5d", ["flux_vae_encoder", "flux_vae_decoder"], [FluxVAEEncoder, FluxVAEDecoder], "civitai"), - (None, "a29710fea6dddb0314663ee823598e50", ["flux_dit"], [FluxDiT], "civitai"), - (None, "57b02550baab820169365b3ee3afa2c9", ["flux_dit"], [FluxDiT], "civitai"), - (None, "3394f306c4cbf04334b712bf5aaed95f", ["flux_dit"], [FluxDiT], "civitai"), - (None, "023f054d918a84ccf503481fd1e3379e", ["flux_dit"], [FluxDiT], "civitai"), - (None, "d02f41c13549fa5093d3521f62a5570a", ["flux_dit"], [FluxDiT], "civitai"), - (None, "605c56eab23e9e2af863ad8f0813a25d", ["flux_dit"], [FluxDiT], "diffusers"), - (None, "0629116fce1472503a66992f96f3eb1a", ["flux_value_controller"], [SingleValueEncoder], "civitai"), - (None, "280189ee084bca10f70907bf6ce1649d", ["cog_vae_encoder", "cog_vae_decoder"], [CogVAEEncoder, CogVAEDecoder], "diffusers"), - (None, "9b9313d104ac4df27991352fec013fd4", ["rife"], [IFNet], "civitai"), - (None, "6b7116078c4170bfbeaedc8fe71f6649", ["esrgan"], [RRDBNet], "civitai"), - (None, "61cbcbc7ac11f169c5949223efa960d1", ["omnigen_transformer"], [OmniGenTransformer], "diffusers"), - (None, "78d18b9101345ff695f312e7e62538c0", ["flux_controlnet"], [FluxControlNet], "diffusers"), - (None, "b001c89139b5f053c715fe772362dd2a", ["flux_controlnet"], [FluxControlNet], "diffusers"), - (None, "52357cb26250681367488a8954c271e8", ["flux_controlnet"], [FluxControlNet], "diffusers"), - (None, "0cfd1740758423a2a854d67c136d1e8c", ["flux_controlnet"], [FluxControlNet], "diffusers"), - (None, "7f9583eb8ba86642abb9a21a4b2c9e16", ["flux_controlnet"], [FluxControlNet], "diffusers"), - (None, "43ad5aaa27dd4ee01b832ed16773fa52", ["flux_controlnet"], [FluxControlNet], "diffusers"), - (None, "c07c0f04f5ff55e86b4e937c7a40d481", ["infiniteyou_image_projector"], [InfiniteYouImageProjector], "diffusers"), - (None, "4daaa66cc656a8fe369908693dad0a35", ["flux_ipadapter"], [FluxIpAdapter], "diffusers"), - (None, "51aed3d27d482fceb5e0739b03060e8f", ["sd3_dit", "sd3_vae_encoder", "sd3_vae_decoder"], [SD3DiT, SD3VAEEncoder, SD3VAEDecoder], "civitai"), - (None, "98cc34ccc5b54ae0e56bdea8688dcd5a", ["sd3_text_encoder_2"], [SD3TextEncoder2], "civitai"), - (None, "77ff18050dbc23f50382e45d51a779fe", ["sd3_dit", "sd3_vae_encoder", "sd3_vae_decoder"], [SD3DiT, SD3VAEEncoder, SD3VAEDecoder], "civitai"), - (None, "5da81baee73198a7c19e6d2fe8b5148e", ["sd3_text_encoder_1"], [SD3TextEncoder1], "diffusers"), - (None, "aeb82dce778a03dcb4d726cb03f3c43f", ["hunyuan_video_vae_decoder", "hunyuan_video_vae_encoder"], [HunyuanVideoVAEDecoder, HunyuanVideoVAEEncoder], "diffusers"), - (None, "b9588f02e78f5ccafc9d7c0294e46308", ["hunyuan_video_dit"], [HunyuanVideoDiT], "civitai"), - (None, "84ef4bd4757f60e906b54aa6a7815dc6", ["hunyuan_video_dit"], [HunyuanVideoDiT], "civitai"), - (None, "68beaf8429b7c11aa8ca05b1bd0058bd", ["stepvideo_vae"], [StepVideoVAE], "civitai"), - (None, "5c0216a2132b082c10cb7a0e0377e681", ["stepvideo_dit"], [StepVideoModel], "civitai"), - (None, "9269f8db9040a9d860eaca435be61814", ["wan_video_dit"], [WanModel], "civitai"), - (None, "aafcfd9672c3a2456dc46e1cb6e52c70", ["wan_video_dit"], [WanModel], "civitai"), - (None, "6bfcfb3b342cb286ce886889d519a77e", ["wan_video_dit"], [WanModel], "civitai"), - (None, "6d6ccde6845b95ad9114ab993d917893", ["wan_video_dit"], [WanModel], "civitai"), - (None, "349723183fc063b2bfc10bb2835cf677", ["wan_video_dit"], [WanModel], "civitai"), - (None, "efa44cddf936c70abd0ea28b6cbe946c", ["wan_video_dit"], [WanModel], "civitai"), - (None, "3ef3b1f8e1dab83d5b71fd7b617f859f", ["wan_video_dit"], [WanModel], "civitai"), - (None, "70ddad9d3a133785da5ea371aae09504", ["wan_video_dit"], [WanModel], "civitai"), - (None, "26bde73488a92e64cc20b0a7485b9e5b", ["wan_video_dit"], [WanModel], "civitai"), - (None, "ac6a5aa74f4a0aab6f64eb9a72f19901", ["wan_video_dit"], [WanModel], "civitai"), - (None, "b61c605c2adbd23124d152ed28e049ae", ["wan_video_dit"], [WanModel], "civitai"), - (None, "1f5ab7703c6fc803fdded85ff040c316", ["wan_video_dit"], [WanModel], "civitai"), - (None, "5b013604280dd715f8457c6ed6d6a626", ["wan_video_dit"], [WanModel], "civitai"), - (None, "2267d489f0ceb9f21836532952852ee5", ["wan_video_dit"], [WanModel], "civitai"), - (None, "5ec04e02b42d2580483ad69f4e76346a", ["wan_video_dit"], [WanModel], "civitai"), - (None, "47dbeab5e560db3180adf51dc0232fb1", ["wan_video_dit"], [WanModel], "civitai"), - (None, "a61453409b67cd3246cf0c3bebad47ba", ["wan_video_dit", "wan_video_vace"], [WanModel, VaceWanModel], "civitai"), - (None, "7a513e1f257a861512b1afd387a8ecd9", ["wan_video_dit", "wan_video_vace"], [WanModel, VaceWanModel], "civitai"), - (None, "cb104773c6c2cb6df4f9529ad5c60d0b", ["wan_video_dit"], [WanModel], "diffusers"), - (None, "966cffdcc52f9c46c391768b27637614", ["wan_video_dit"], [WanS2VModel], "civitai"), - (None, "8b27900f680d7251ce44e2dc8ae1ffef", ["wan_video_dit"], [LongCatVideoTransformer3DModel], "civitai"), - (None, "9c8818c2cbea55eca56c7b447df170da", ["wan_video_text_encoder"], [WanTextEncoder], "civitai"), - (None, "5941c53e207d62f20f9025686193c40b", ["wan_video_image_encoder"], [WanImageEncoder], "civitai"), - (None, "1378ea763357eea97acdef78e65d6d96", ["wan_video_vae"], [WanVideoVAE], "civitai"), - (None, "ccc42284ea13e1ad04693284c7a09be6", ["wan_video_vae"], [WanVideoVAE], "civitai"), - (None, "e1de6c02cdac79f8b739f4d3698cd216", ["wan_video_vae"], [WanVideoVAE38], "civitai"), - (None, "dbd5ec76bbf977983f972c151d545389", ["wan_video_motion_controller"], [WanMotionControllerModel], "civitai"), - (None, "d30fb9e02b1dbf4e509142f05cf7dd50", ["flux_dit", "step1x_connector"], [FluxDiT, Qwen2Connector], "civitai"), - (None, "30143afb2dea73d1ac580e0787628f8c", ["flux_lora_patcher"], [FluxLoraPatcher], "civitai"), - (None, "77c2e4dd2440269eb33bfaa0d004f6ab", ["flux_lora_encoder"], [FluxLoRAEncoder], "civitai"), - (None, "3e6c61b0f9471135fc9c6d6a98e98b6d", ["flux_dit", "nexus_gen_generation_adapter"], [FluxDiT, NexusGenAdapter], "civitai"), - (None, "63c969fd37cce769a90aa781fbff5f81", ["flux_dit", "nexus_gen_editing_adapter"], [FluxDiT, NexusGenImageEmbeddingMerger], "civitai"), - (None, "2bd19e845116e4f875a0a048e27fc219", ["nexus_gen_llm"], [NexusGenAutoregressiveModel], "civitai"), - (None, "0319a1cb19835fb510907dd3367c95ff", ["qwen_image_dit"], [QwenImageDiT], "civitai"), - (None, "8004730443f55db63092006dd9f7110e", ["qwen_image_text_encoder"], [QwenImageTextEncoder], "diffusers"), - (None, "ed4ea5824d55ec3107b09815e318123a", ["qwen_image_vae"], [QwenImageVAE], "diffusers"), - (None, "073bce9cf969e317e5662cd570c3e79c", ["qwen_image_blockwise_controlnet"], [QwenImageBlockWiseControlNet], "civitai"), - (None, "a9e54e480a628f0b956a688a81c33bab", ["qwen_image_blockwise_controlnet"], [QwenImageBlockWiseControlNet], "civitai"), - (None, "06be60f3a4526586d8431cd038a71486", ["wans2v_audio_encoder"], [WanS2VAudioEncoder], "civitai"), - (None, "31fa352acb8a1b1d33cd8764273d80a2", ["wan_video_dit", "wan_video_animate_adapter"], [WanModel, WanAnimateAdapter], "civitai"), -] -huggingface_model_loader_configs = [ - # These configs are provided for detecting model type automatically. - # The format is (architecture_in_huggingface_config, huggingface_lib, model_name, redirected_architecture) - ("ChatGLMModel", "diffsynth.models.kolors_text_encoder", "kolors_text_encoder", None), - ("MarianMTModel", "transformers.models.marian.modeling_marian", "translator", None), - ("BloomForCausalLM", "transformers.models.bloom.modeling_bloom", "beautiful_prompt", None), - ("Qwen2ForCausalLM", "transformers.models.qwen2.modeling_qwen2", "qwen_prompt", None), - # ("LlamaForCausalLM", "transformers.models.llama.modeling_llama", "omost_prompt", None), - ("T5EncoderModel", "diffsynth.models.flux_text_encoder", "flux_text_encoder_2", "FluxTextEncoder2"), - ("CogVideoXTransformer3DModel", "diffsynth.models.cog_dit", "cog_dit", "CogDiT"), - ("SiglipModel", "transformers.models.siglip.modeling_siglip", "siglip_vision_model", "SiglipVisionModel"), - ("LlamaForCausalLM", "diffsynth.models.hunyuan_video_text_encoder", "hunyuan_video_text_encoder_2", "HunyuanVideoLLMEncoder"), - ("LlavaForConditionalGeneration", "diffsynth.models.hunyuan_video_text_encoder", "hunyuan_video_text_encoder_2", "HunyuanVideoMLLMEncoder"), - ("Step1Model", "diffsynth.models.stepvideo_text_encoder", "stepvideo_text_encoder_2", "STEP1TextEncoder"), - ("Qwen2_5_VLForConditionalGeneration", "diffsynth.models.qwenvl", "qwenvl", "Qwen25VL_7b_Embedder"), -] -patch_model_loader_configs = [ - # These configs are provided for detecting model type automatically. - # The format is (state_dict_keys_hash_with_shape, model_name, model_class, extra_kwargs) - ("9a4ab6869ac9b7d6e31f9854e397c867", ["svd_unet"], [SVDUNet], {"add_positional_conv": 128}), -] - -preset_models_on_huggingface = { - "HunyuanDiT": [ - ("Tencent-Hunyuan/HunyuanDiT", "t2i/clip_text_encoder/pytorch_model.bin", "models/HunyuanDiT/t2i/clip_text_encoder"), - ("Tencent-Hunyuan/HunyuanDiT", "t2i/mt5/pytorch_model.bin", "models/HunyuanDiT/t2i/mt5"), - ("Tencent-Hunyuan/HunyuanDiT", "t2i/model/pytorch_model_ema.pt", "models/HunyuanDiT/t2i/model"), - ("Tencent-Hunyuan/HunyuanDiT", "t2i/sdxl-vae-fp16-fix/diffusion_pytorch_model.bin", "models/HunyuanDiT/t2i/sdxl-vae-fp16-fix"), - ], - "stable-video-diffusion-img2vid-xt": [ - ("stabilityai/stable-video-diffusion-img2vid-xt", "svd_xt.safetensors", "models/stable_video_diffusion"), - ], - "ExVideo-SVD-128f-v1": [ - ("ECNU-CILab/ExVideo-SVD-128f-v1", "model.fp16.safetensors", "models/stable_video_diffusion"), - ], - # Stable Diffusion - "StableDiffusion_v15": [ - ("benjamin-paine/stable-diffusion-v1-5", "v1-5-pruned-emaonly.safetensors", "models/stable_diffusion"), - ], - "DreamShaper_8": [ - ("Yntec/Dreamshaper8", "dreamshaper_8.safetensors", "models/stable_diffusion"), - ], - # Textual Inversion - "TextualInversion_VeryBadImageNegative_v1.3": [ - ("gemasai/verybadimagenegative_v1.3", "verybadimagenegative_v1.3.pt", "models/textual_inversion"), - ], - # Stable Diffusion XL - "StableDiffusionXL_v1": [ - ("stabilityai/stable-diffusion-xl-base-1.0", "sd_xl_base_1.0.safetensors", "models/stable_diffusion_xl"), - ], - "BluePencilXL_v200": [ - ("frankjoshua/bluePencilXL_v200", "bluePencilXL_v200.safetensors", "models/stable_diffusion_xl"), - ], - "StableDiffusionXL_Turbo": [ - ("stabilityai/sdxl-turbo", "sd_xl_turbo_1.0_fp16.safetensors", "models/stable_diffusion_xl_turbo"), - ], - # Stable Diffusion 3 - "StableDiffusion3": [ - ("stabilityai/stable-diffusion-3-medium", "sd3_medium_incl_clips_t5xxlfp16.safetensors", "models/stable_diffusion_3"), - ], - "StableDiffusion3_without_T5": [ - ("stabilityai/stable-diffusion-3-medium", "sd3_medium_incl_clips.safetensors", "models/stable_diffusion_3"), - ], - # ControlNet - "ControlNet_v11f1p_sd15_depth": [ - ("lllyasviel/ControlNet-v1-1", "control_v11f1p_sd15_depth.pth", "models/ControlNet"), - ("lllyasviel/Annotators", "dpt_hybrid-midas-501f0c75.pt", "models/Annotators") - ], - "ControlNet_v11p_sd15_softedge": [ - ("lllyasviel/ControlNet-v1-1", "control_v11p_sd15_softedge.pth", "models/ControlNet"), - ("lllyasviel/Annotators", "ControlNetHED.pth", "models/Annotators") - ], - "ControlNet_v11f1e_sd15_tile": [ - ("lllyasviel/ControlNet-v1-1", "control_v11f1e_sd15_tile.pth", "models/ControlNet") - ], - "ControlNet_v11p_sd15_lineart": [ - ("lllyasviel/ControlNet-v1-1", "control_v11p_sd15_lineart.pth", "models/ControlNet"), - ("lllyasviel/Annotators", "sk_model.pth", "models/Annotators"), - ("lllyasviel/Annotators", "sk_model2.pth", "models/Annotators") - ], - "ControlNet_union_sdxl_promax": [ - ("xinsir/controlnet-union-sdxl-1.0", "diffusion_pytorch_model_promax.safetensors", "models/ControlNet/controlnet_union"), - ("lllyasviel/Annotators", "dpt_hybrid-midas-501f0c75.pt", "models/Annotators") - ], - # AnimateDiff - "AnimateDiff_v2": [ - ("guoyww/animatediff", "mm_sd_v15_v2.ckpt", "models/AnimateDiff"), - ], - "AnimateDiff_xl_beta": [ - ("guoyww/animatediff", "mm_sdxl_v10_beta.ckpt", "models/AnimateDiff"), - ], - - # Qwen Prompt - "QwenPrompt": [ - ("Qwen/Qwen2-1.5B-Instruct", "config.json", "models/QwenPrompt/qwen2-1.5b-instruct"), - ("Qwen/Qwen2-1.5B-Instruct", "generation_config.json", "models/QwenPrompt/qwen2-1.5b-instruct"), - ("Qwen/Qwen2-1.5B-Instruct", "model.safetensors", "models/QwenPrompt/qwen2-1.5b-instruct"), - ("Qwen/Qwen2-1.5B-Instruct", "special_tokens_map.json", "models/QwenPrompt/qwen2-1.5b-instruct"), - ("Qwen/Qwen2-1.5B-Instruct", "tokenizer.json", "models/QwenPrompt/qwen2-1.5b-instruct"), - ("Qwen/Qwen2-1.5B-Instruct", "tokenizer_config.json", "models/QwenPrompt/qwen2-1.5b-instruct"), - ("Qwen/Qwen2-1.5B-Instruct", "merges.txt", "models/QwenPrompt/qwen2-1.5b-instruct"), - ("Qwen/Qwen2-1.5B-Instruct", "vocab.json", "models/QwenPrompt/qwen2-1.5b-instruct"), - ], - # Beautiful Prompt - "BeautifulPrompt": [ - ("alibaba-pai/pai-bloom-1b1-text2prompt-sd", "config.json", "models/BeautifulPrompt/pai-bloom-1b1-text2prompt-sd"), - ("alibaba-pai/pai-bloom-1b1-text2prompt-sd", "generation_config.json", "models/BeautifulPrompt/pai-bloom-1b1-text2prompt-sd"), - ("alibaba-pai/pai-bloom-1b1-text2prompt-sd", "model.safetensors", "models/BeautifulPrompt/pai-bloom-1b1-text2prompt-sd"), - ("alibaba-pai/pai-bloom-1b1-text2prompt-sd", "special_tokens_map.json", "models/BeautifulPrompt/pai-bloom-1b1-text2prompt-sd"), - ("alibaba-pai/pai-bloom-1b1-text2prompt-sd", "tokenizer.json", "models/BeautifulPrompt/pai-bloom-1b1-text2prompt-sd"), - ("alibaba-pai/pai-bloom-1b1-text2prompt-sd", "tokenizer_config.json", "models/BeautifulPrompt/pai-bloom-1b1-text2prompt-sd"), - ], - # Omost prompt - "OmostPrompt":[ - ("lllyasviel/omost-llama-3-8b-4bits", "model-00001-of-00002.safetensors", "models/OmostPrompt/omost-llama-3-8b-4bits"), - ("lllyasviel/omost-llama-3-8b-4bits", "model-00002-of-00002.safetensors", "models/OmostPrompt/omost-llama-3-8b-4bits"), - ("lllyasviel/omost-llama-3-8b-4bits", "tokenizer.json", "models/OmostPrompt/omost-llama-3-8b-4bits"), - ("lllyasviel/omost-llama-3-8b-4bits", "tokenizer_config.json", "models/OmostPrompt/omost-llama-3-8b-4bits"), - ("lllyasviel/omost-llama-3-8b-4bits", "config.json", "models/OmostPrompt/omost-llama-3-8b-4bits"), - ("lllyasviel/omost-llama-3-8b-4bits", "generation_config.json", "models/OmostPrompt/omost-llama-3-8b-4bits"), - ("lllyasviel/omost-llama-3-8b-4bits", "model.safetensors.index.json", "models/OmostPrompt/omost-llama-3-8b-4bits"), - ("lllyasviel/omost-llama-3-8b-4bits", "special_tokens_map.json", "models/OmostPrompt/omost-llama-3-8b-4bits"), - ], - # Translator - "opus-mt-zh-en": [ - ("Helsinki-NLP/opus-mt-zh-en", "config.json", "models/translator/opus-mt-zh-en"), - ("Helsinki-NLP/opus-mt-zh-en", "generation_config.json", "models/translator/opus-mt-zh-en"), - ("Helsinki-NLP/opus-mt-zh-en", "metadata.json", "models/translator/opus-mt-zh-en"), - ("Helsinki-NLP/opus-mt-zh-en", "pytorch_model.bin", "models/translator/opus-mt-zh-en"), - ("Helsinki-NLP/opus-mt-zh-en", "source.spm", "models/translator/opus-mt-zh-en"), - ("Helsinki-NLP/opus-mt-zh-en", "target.spm", "models/translator/opus-mt-zh-en"), - ("Helsinki-NLP/opus-mt-zh-en", "tokenizer_config.json", "models/translator/opus-mt-zh-en"), - ("Helsinki-NLP/opus-mt-zh-en", "vocab.json", "models/translator/opus-mt-zh-en"), - ], - # IP-Adapter - "IP-Adapter-SD": [ - ("h94/IP-Adapter", "models/image_encoder/model.safetensors", "models/IpAdapter/stable_diffusion/image_encoder"), - ("h94/IP-Adapter", "models/ip-adapter_sd15.bin", "models/IpAdapter/stable_diffusion"), - ], - "IP-Adapter-SDXL": [ - ("h94/IP-Adapter", "sdxl_models/image_encoder/model.safetensors", "models/IpAdapter/stable_diffusion_xl/image_encoder"), - ("h94/IP-Adapter", "sdxl_models/ip-adapter_sdxl.bin", "models/IpAdapter/stable_diffusion_xl"), - ], - "SDXL-vae-fp16-fix": [ - ("madebyollin/sdxl-vae-fp16-fix", "diffusion_pytorch_model.safetensors", "models/sdxl-vae-fp16-fix") - ], - # Kolors - "Kolors": [ - ("Kwai-Kolors/Kolors", "text_encoder/config.json", "models/kolors/Kolors/text_encoder"), - ("Kwai-Kolors/Kolors", "text_encoder/pytorch_model.bin.index.json", "models/kolors/Kolors/text_encoder"), - ("Kwai-Kolors/Kolors", "text_encoder/pytorch_model-00001-of-00007.bin", "models/kolors/Kolors/text_encoder"), - ("Kwai-Kolors/Kolors", "text_encoder/pytorch_model-00002-of-00007.bin", "models/kolors/Kolors/text_encoder"), - ("Kwai-Kolors/Kolors", "text_encoder/pytorch_model-00003-of-00007.bin", "models/kolors/Kolors/text_encoder"), - ("Kwai-Kolors/Kolors", "text_encoder/pytorch_model-00004-of-00007.bin", "models/kolors/Kolors/text_encoder"), - ("Kwai-Kolors/Kolors", "text_encoder/pytorch_model-00005-of-00007.bin", "models/kolors/Kolors/text_encoder"), - ("Kwai-Kolors/Kolors", "text_encoder/pytorch_model-00006-of-00007.bin", "models/kolors/Kolors/text_encoder"), - ("Kwai-Kolors/Kolors", "text_encoder/pytorch_model-00007-of-00007.bin", "models/kolors/Kolors/text_encoder"), - ("Kwai-Kolors/Kolors", "unet/diffusion_pytorch_model.safetensors", "models/kolors/Kolors/unet"), - ("Kwai-Kolors/Kolors", "vae/diffusion_pytorch_model.safetensors", "models/kolors/Kolors/vae"), - ], - # FLUX - "FLUX.1-dev": [ - ("black-forest-labs/FLUX.1-dev", "text_encoder/model.safetensors", "models/FLUX/FLUX.1-dev/text_encoder"), - ("black-forest-labs/FLUX.1-dev", "text_encoder_2/config.json", "models/FLUX/FLUX.1-dev/text_encoder_2"), - ("black-forest-labs/FLUX.1-dev", "text_encoder_2/model-00001-of-00002.safetensors", "models/FLUX/FLUX.1-dev/text_encoder_2"), - ("black-forest-labs/FLUX.1-dev", "text_encoder_2/model-00002-of-00002.safetensors", "models/FLUX/FLUX.1-dev/text_encoder_2"), - ("black-forest-labs/FLUX.1-dev", "text_encoder_2/model.safetensors.index.json", "models/FLUX/FLUX.1-dev/text_encoder_2"), - ("black-forest-labs/FLUX.1-dev", "ae.safetensors", "models/FLUX/FLUX.1-dev"), - ("black-forest-labs/FLUX.1-dev", "flux1-dev.safetensors", "models/FLUX/FLUX.1-dev"), - ], - "InstantX/FLUX.1-dev-IP-Adapter": { - "file_list": [ - ("InstantX/FLUX.1-dev-IP-Adapter", "ip-adapter.bin", "models/IpAdapter/InstantX/FLUX.1-dev-IP-Adapter"), - ("google/siglip-so400m-patch14-384", "model.safetensors", "models/IpAdapter/InstantX/FLUX.1-dev-IP-Adapter/image_encoder"), - ("google/siglip-so400m-patch14-384", "config.json", "models/IpAdapter/InstantX/FLUX.1-dev-IP-Adapter/image_encoder"), - ], - "load_path": [ - "models/IpAdapter/InstantX/FLUX.1-dev-IP-Adapter/ip-adapter.bin", - "models/IpAdapter/InstantX/FLUX.1-dev-IP-Adapter/image_encoder", - ], - }, - # RIFE - "RIFE": [ - ("AlexWortega/RIFE", "flownet.pkl", "models/RIFE"), - ], - # CogVideo - "CogVideoX-5B": [ - ("THUDM/CogVideoX-5b", "text_encoder/config.json", "models/CogVideo/CogVideoX-5b/text_encoder"), - ("THUDM/CogVideoX-5b", "text_encoder/model.safetensors.index.json", "models/CogVideo/CogVideoX-5b/text_encoder"), - ("THUDM/CogVideoX-5b", "text_encoder/model-00001-of-00002.safetensors", "models/CogVideo/CogVideoX-5b/text_encoder"), - ("THUDM/CogVideoX-5b", "text_encoder/model-00002-of-00002.safetensors", "models/CogVideo/CogVideoX-5b/text_encoder"), - ("THUDM/CogVideoX-5b", "transformer/config.json", "models/CogVideo/CogVideoX-5b/transformer"), - ("THUDM/CogVideoX-5b", "transformer/diffusion_pytorch_model.safetensors.index.json", "models/CogVideo/CogVideoX-5b/transformer"), - ("THUDM/CogVideoX-5b", "transformer/diffusion_pytorch_model-00001-of-00002.safetensors", "models/CogVideo/CogVideoX-5b/transformer"), - ("THUDM/CogVideoX-5b", "transformer/diffusion_pytorch_model-00002-of-00002.safetensors", "models/CogVideo/CogVideoX-5b/transformer"), - ("THUDM/CogVideoX-5b", "vae/diffusion_pytorch_model.safetensors", "models/CogVideo/CogVideoX-5b/vae"), - ], - # Stable Diffusion 3.5 - "StableDiffusion3.5-large": [ - ("stabilityai/stable-diffusion-3.5-large", "sd3.5_large.safetensors", "models/stable_diffusion_3"), - ("stabilityai/stable-diffusion-3.5-large", "text_encoders/clip_l.safetensors", "models/stable_diffusion_3/text_encoders"), - ("stabilityai/stable-diffusion-3.5-large", "text_encoders/clip_g.safetensors", "models/stable_diffusion_3/text_encoders"), - ("stabilityai/stable-diffusion-3.5-large", "text_encoders/t5xxl_fp16.safetensors", "models/stable_diffusion_3/text_encoders"), - ], -} -preset_models_on_modelscope = { - # Hunyuan DiT - "HunyuanDiT": [ - ("modelscope/HunyuanDiT", "t2i/clip_text_encoder/pytorch_model.bin", "models/HunyuanDiT/t2i/clip_text_encoder"), - ("modelscope/HunyuanDiT", "t2i/mt5/pytorch_model.bin", "models/HunyuanDiT/t2i/mt5"), - ("modelscope/HunyuanDiT", "t2i/model/pytorch_model_ema.pt", "models/HunyuanDiT/t2i/model"), - ("modelscope/HunyuanDiT", "t2i/sdxl-vae-fp16-fix/diffusion_pytorch_model.bin", "models/HunyuanDiT/t2i/sdxl-vae-fp16-fix"), - ], - # Stable Video Diffusion - "stable-video-diffusion-img2vid-xt": [ - ("AI-ModelScope/stable-video-diffusion-img2vid-xt", "svd_xt.safetensors", "models/stable_video_diffusion"), - ], - # ExVideo - "ExVideo-SVD-128f-v1": [ - ("ECNU-CILab/ExVideo-SVD-128f-v1", "model.fp16.safetensors", "models/stable_video_diffusion"), - ], - "ExVideo-CogVideoX-LoRA-129f-v1": [ - ("ECNU-CILab/ExVideo-CogVideoX-LoRA-129f-v1", "ExVideo-CogVideoX-LoRA-129f-v1.safetensors", "models/lora"), - ], - # Stable Diffusion - "StableDiffusion_v15": [ - ("AI-ModelScope/stable-diffusion-v1-5", "v1-5-pruned-emaonly.safetensors", "models/stable_diffusion"), - ], - "DreamShaper_8": [ - ("sd_lora/dreamshaper_8", "dreamshaper_8.safetensors", "models/stable_diffusion"), - ], - "AingDiffusion_v12": [ - ("sd_lora/aingdiffusion_v12", "aingdiffusion_v12.safetensors", "models/stable_diffusion"), - ], - "Flat2DAnimerge_v45Sharp": [ - ("sd_lora/Flat-2D-Animerge", "flat2DAnimerge_v45Sharp.safetensors", "models/stable_diffusion"), - ], - # Textual Inversion - "TextualInversion_VeryBadImageNegative_v1.3": [ - ("sd_lora/verybadimagenegative_v1.3", "verybadimagenegative_v1.3.pt", "models/textual_inversion"), - ], - # Stable Diffusion XL - "StableDiffusionXL_v1": [ - ("AI-ModelScope/stable-diffusion-xl-base-1.0", "sd_xl_base_1.0.safetensors", "models/stable_diffusion_xl"), - ], - "BluePencilXL_v200": [ - ("sd_lora/bluePencilXL_v200", "bluePencilXL_v200.safetensors", "models/stable_diffusion_xl"), - ], - "StableDiffusionXL_Turbo": [ - ("AI-ModelScope/sdxl-turbo", "sd_xl_turbo_1.0_fp16.safetensors", "models/stable_diffusion_xl_turbo"), - ], - "SDXL_lora_zyd232_ChineseInkStyle_SDXL_v1_0": [ - ("sd_lora/zyd232_ChineseInkStyle_SDXL_v1_0", "zyd232_ChineseInkStyle_SDXL_v1_0.safetensors", "models/lora"), - ], - # Stable Diffusion 3 - "StableDiffusion3": [ - ("AI-ModelScope/stable-diffusion-3-medium", "sd3_medium_incl_clips_t5xxlfp16.safetensors", "models/stable_diffusion_3"), - ], - "StableDiffusion3_without_T5": [ - ("AI-ModelScope/stable-diffusion-3-medium", "sd3_medium_incl_clips.safetensors", "models/stable_diffusion_3"), - ], - # ControlNet - "ControlNet_v11f1p_sd15_depth": [ - ("AI-ModelScope/ControlNet-v1-1", "control_v11f1p_sd15_depth.pth", "models/ControlNet"), - ("sd_lora/Annotators", "dpt_hybrid-midas-501f0c75.pt", "models/Annotators") - ], - "ControlNet_v11p_sd15_softedge": [ - ("AI-ModelScope/ControlNet-v1-1", "control_v11p_sd15_softedge.pth", "models/ControlNet"), - ("sd_lora/Annotators", "ControlNetHED.pth", "models/Annotators") - ], - "ControlNet_v11f1e_sd15_tile": [ - ("AI-ModelScope/ControlNet-v1-1", "control_v11f1e_sd15_tile.pth", "models/ControlNet") - ], - "ControlNet_v11p_sd15_lineart": [ - ("AI-ModelScope/ControlNet-v1-1", "control_v11p_sd15_lineart.pth", "models/ControlNet"), - ("sd_lora/Annotators", "sk_model.pth", "models/Annotators"), - ("sd_lora/Annotators", "sk_model2.pth", "models/Annotators") - ], - "ControlNet_union_sdxl_promax": [ - ("AI-ModelScope/controlnet-union-sdxl-1.0", "diffusion_pytorch_model_promax.safetensors", "models/ControlNet/controlnet_union"), - ("sd_lora/Annotators", "dpt_hybrid-midas-501f0c75.pt", "models/Annotators") - ], - "Annotators:Depth": [ - ("sd_lora/Annotators", "dpt_hybrid-midas-501f0c75.pt", "models/Annotators"), - ], - "Annotators:Softedge": [ - ("sd_lora/Annotators", "ControlNetHED.pth", "models/Annotators"), - ], - "Annotators:Lineart": [ - ("sd_lora/Annotators", "sk_model.pth", "models/Annotators"), - ("sd_lora/Annotators", "sk_model2.pth", "models/Annotators"), - ], - "Annotators:Normal": [ - ("sd_lora/Annotators", "scannet.pt", "models/Annotators"), - ], - "Annotators:Openpose": [ - ("sd_lora/Annotators", "body_pose_model.pth", "models/Annotators"), - ("sd_lora/Annotators", "facenet.pth", "models/Annotators"), - ("sd_lora/Annotators", "hand_pose_model.pth", "models/Annotators"), - ], - # AnimateDiff - "AnimateDiff_v2": [ - ("Shanghai_AI_Laboratory/animatediff", "mm_sd_v15_v2.ckpt", "models/AnimateDiff"), - ], - "AnimateDiff_xl_beta": [ - ("Shanghai_AI_Laboratory/animatediff", "mm_sdxl_v10_beta.ckpt", "models/AnimateDiff"), - ], - # RIFE - "RIFE": [ - ("Damo_XR_Lab/cv_rife_video-frame-interpolation", "flownet.pkl", "models/RIFE"), - ], - # Qwen Prompt - "QwenPrompt": { - "file_list": [ - ("qwen/Qwen2-1.5B-Instruct", "config.json", "models/QwenPrompt/qwen2-1.5b-instruct"), - ("qwen/Qwen2-1.5B-Instruct", "generation_config.json", "models/QwenPrompt/qwen2-1.5b-instruct"), - ("qwen/Qwen2-1.5B-Instruct", "model.safetensors", "models/QwenPrompt/qwen2-1.5b-instruct"), - ("qwen/Qwen2-1.5B-Instruct", "special_tokens_map.json", "models/QwenPrompt/qwen2-1.5b-instruct"), - ("qwen/Qwen2-1.5B-Instruct", "tokenizer.json", "models/QwenPrompt/qwen2-1.5b-instruct"), - ("qwen/Qwen2-1.5B-Instruct", "tokenizer_config.json", "models/QwenPrompt/qwen2-1.5b-instruct"), - ("qwen/Qwen2-1.5B-Instruct", "merges.txt", "models/QwenPrompt/qwen2-1.5b-instruct"), - ("qwen/Qwen2-1.5B-Instruct", "vocab.json", "models/QwenPrompt/qwen2-1.5b-instruct"), - ], - "load_path": [ - "models/QwenPrompt/qwen2-1.5b-instruct", - ], - }, - # Beautiful Prompt - "BeautifulPrompt": { - "file_list": [ - ("AI-ModelScope/pai-bloom-1b1-text2prompt-sd", "config.json", "models/BeautifulPrompt/pai-bloom-1b1-text2prompt-sd"), - ("AI-ModelScope/pai-bloom-1b1-text2prompt-sd", "generation_config.json", "models/BeautifulPrompt/pai-bloom-1b1-text2prompt-sd"), - ("AI-ModelScope/pai-bloom-1b1-text2prompt-sd", "model.safetensors", "models/BeautifulPrompt/pai-bloom-1b1-text2prompt-sd"), - ("AI-ModelScope/pai-bloom-1b1-text2prompt-sd", "special_tokens_map.json", "models/BeautifulPrompt/pai-bloom-1b1-text2prompt-sd"), - ("AI-ModelScope/pai-bloom-1b1-text2prompt-sd", "tokenizer.json", "models/BeautifulPrompt/pai-bloom-1b1-text2prompt-sd"), - ("AI-ModelScope/pai-bloom-1b1-text2prompt-sd", "tokenizer_config.json", "models/BeautifulPrompt/pai-bloom-1b1-text2prompt-sd"), - ], - "load_path": [ - "models/BeautifulPrompt/pai-bloom-1b1-text2prompt-sd", - ], - }, - # Omost prompt - "OmostPrompt": { - "file_list": [ - ("Omost/omost-llama-3-8b-4bits", "model-00001-of-00002.safetensors", "models/OmostPrompt/omost-llama-3-8b-4bits"), - ("Omost/omost-llama-3-8b-4bits", "model-00002-of-00002.safetensors", "models/OmostPrompt/omost-llama-3-8b-4bits"), - ("Omost/omost-llama-3-8b-4bits", "tokenizer.json", "models/OmostPrompt/omost-llama-3-8b-4bits"), - ("Omost/omost-llama-3-8b-4bits", "tokenizer_config.json", "models/OmostPrompt/omost-llama-3-8b-4bits"), - ("Omost/omost-llama-3-8b-4bits", "config.json", "models/OmostPrompt/omost-llama-3-8b-4bits"), - ("Omost/omost-llama-3-8b-4bits", "generation_config.json", "models/OmostPrompt/omost-llama-3-8b-4bits"), - ("Omost/omost-llama-3-8b-4bits", "model.safetensors.index.json", "models/OmostPrompt/omost-llama-3-8b-4bits"), - ("Omost/omost-llama-3-8b-4bits", "special_tokens_map.json", "models/OmostPrompt/omost-llama-3-8b-4bits"), - ], - "load_path": [ - "models/OmostPrompt/omost-llama-3-8b-4bits", - ], - }, - # Translator - "opus-mt-zh-en": { - "file_list": [ - ("moxying/opus-mt-zh-en", "config.json", "models/translator/opus-mt-zh-en"), - ("moxying/opus-mt-zh-en", "generation_config.json", "models/translator/opus-mt-zh-en"), - ("moxying/opus-mt-zh-en", "metadata.json", "models/translator/opus-mt-zh-en"), - ("moxying/opus-mt-zh-en", "pytorch_model.bin", "models/translator/opus-mt-zh-en"), - ("moxying/opus-mt-zh-en", "source.spm", "models/translator/opus-mt-zh-en"), - ("moxying/opus-mt-zh-en", "target.spm", "models/translator/opus-mt-zh-en"), - ("moxying/opus-mt-zh-en", "tokenizer_config.json", "models/translator/opus-mt-zh-en"), - ("moxying/opus-mt-zh-en", "vocab.json", "models/translator/opus-mt-zh-en"), - ], - "load_path": [ - "models/translator/opus-mt-zh-en", - ], - }, - # IP-Adapter - "IP-Adapter-SD": [ - ("AI-ModelScope/IP-Adapter", "models/image_encoder/model.safetensors", "models/IpAdapter/stable_diffusion/image_encoder"), - ("AI-ModelScope/IP-Adapter", "models/ip-adapter_sd15.bin", "models/IpAdapter/stable_diffusion"), - ], - "IP-Adapter-SDXL": [ - ("AI-ModelScope/IP-Adapter", "sdxl_models/image_encoder/model.safetensors", "models/IpAdapter/stable_diffusion_xl/image_encoder"), - ("AI-ModelScope/IP-Adapter", "sdxl_models/ip-adapter_sdxl.bin", "models/IpAdapter/stable_diffusion_xl"), - ], - # Kolors - "Kolors": { - "file_list": [ - ("Kwai-Kolors/Kolors", "text_encoder/config.json", "models/kolors/Kolors/text_encoder"), - ("Kwai-Kolors/Kolors", "text_encoder/pytorch_model.bin.index.json", "models/kolors/Kolors/text_encoder"), - ("Kwai-Kolors/Kolors", "text_encoder/pytorch_model-00001-of-00007.bin", "models/kolors/Kolors/text_encoder"), - ("Kwai-Kolors/Kolors", "text_encoder/pytorch_model-00002-of-00007.bin", "models/kolors/Kolors/text_encoder"), - ("Kwai-Kolors/Kolors", "text_encoder/pytorch_model-00003-of-00007.bin", "models/kolors/Kolors/text_encoder"), - ("Kwai-Kolors/Kolors", "text_encoder/pytorch_model-00004-of-00007.bin", "models/kolors/Kolors/text_encoder"), - ("Kwai-Kolors/Kolors", "text_encoder/pytorch_model-00005-of-00007.bin", "models/kolors/Kolors/text_encoder"), - ("Kwai-Kolors/Kolors", "text_encoder/pytorch_model-00006-of-00007.bin", "models/kolors/Kolors/text_encoder"), - ("Kwai-Kolors/Kolors", "text_encoder/pytorch_model-00007-of-00007.bin", "models/kolors/Kolors/text_encoder"), - ("Kwai-Kolors/Kolors", "unet/diffusion_pytorch_model.safetensors", "models/kolors/Kolors/unet"), - ("Kwai-Kolors/Kolors", "vae/diffusion_pytorch_model.safetensors", "models/kolors/Kolors/vae"), - ], - "load_path": [ - "models/kolors/Kolors/text_encoder", - "models/kolors/Kolors/unet/diffusion_pytorch_model.safetensors", - "models/kolors/Kolors/vae/diffusion_pytorch_model.safetensors", - ], - }, - "SDXL-vae-fp16-fix": [ - ("AI-ModelScope/sdxl-vae-fp16-fix", "diffusion_pytorch_model.safetensors", "models/sdxl-vae-fp16-fix") - ], - # FLUX - "FLUX.1-dev": { - "file_list": [ - ("AI-ModelScope/FLUX.1-dev", "text_encoder/model.safetensors", "models/FLUX/FLUX.1-dev/text_encoder"), - ("AI-ModelScope/FLUX.1-dev", "text_encoder_2/config.json", "models/FLUX/FLUX.1-dev/text_encoder_2"), - ("AI-ModelScope/FLUX.1-dev", "text_encoder_2/model-00001-of-00002.safetensors", "models/FLUX/FLUX.1-dev/text_encoder_2"), - ("AI-ModelScope/FLUX.1-dev", "text_encoder_2/model-00002-of-00002.safetensors", "models/FLUX/FLUX.1-dev/text_encoder_2"), - ("AI-ModelScope/FLUX.1-dev", "text_encoder_2/model.safetensors.index.json", "models/FLUX/FLUX.1-dev/text_encoder_2"), - ("AI-ModelScope/FLUX.1-dev", "ae.safetensors", "models/FLUX/FLUX.1-dev"), - ("AI-ModelScope/FLUX.1-dev", "flux1-dev.safetensors", "models/FLUX/FLUX.1-dev"), - ], - "load_path": [ - "models/FLUX/FLUX.1-dev/text_encoder/model.safetensors", - "models/FLUX/FLUX.1-dev/text_encoder_2", - "models/FLUX/FLUX.1-dev/ae.safetensors", - "models/FLUX/FLUX.1-dev/flux1-dev.safetensors" - ], - }, - "FLUX.1-schnell": { - "file_list": [ - ("AI-ModelScope/FLUX.1-dev", "text_encoder/model.safetensors", "models/FLUX/FLUX.1-dev/text_encoder"), - ("AI-ModelScope/FLUX.1-dev", "text_encoder_2/config.json", "models/FLUX/FLUX.1-dev/text_encoder_2"), - ("AI-ModelScope/FLUX.1-dev", "text_encoder_2/model-00001-of-00002.safetensors", "models/FLUX/FLUX.1-dev/text_encoder_2"), - ("AI-ModelScope/FLUX.1-dev", "text_encoder_2/model-00002-of-00002.safetensors", "models/FLUX/FLUX.1-dev/text_encoder_2"), - ("AI-ModelScope/FLUX.1-dev", "text_encoder_2/model.safetensors.index.json", "models/FLUX/FLUX.1-dev/text_encoder_2"), - ("AI-ModelScope/FLUX.1-dev", "ae.safetensors", "models/FLUX/FLUX.1-dev"), - ("AI-ModelScope/FLUX.1-schnell", "flux1-schnell.safetensors", "models/FLUX/FLUX.1-schnell"), - ], - "load_path": [ - "models/FLUX/FLUX.1-dev/text_encoder/model.safetensors", - "models/FLUX/FLUX.1-dev/text_encoder_2", - "models/FLUX/FLUX.1-dev/ae.safetensors", - "models/FLUX/FLUX.1-schnell/flux1-schnell.safetensors" - ], - }, - "InstantX/FLUX.1-dev-Controlnet-Union-alpha": [ - ("InstantX/FLUX.1-dev-Controlnet-Union-alpha", "diffusion_pytorch_model.safetensors", "models/ControlNet/InstantX/FLUX.1-dev-Controlnet-Union-alpha"), - ], - "jasperai/Flux.1-dev-Controlnet-Depth": [ - ("jasperai/Flux.1-dev-Controlnet-Depth", "diffusion_pytorch_model.safetensors", "models/ControlNet/jasperai/Flux.1-dev-Controlnet-Depth"), - ], - "jasperai/Flux.1-dev-Controlnet-Surface-Normals": [ - ("jasperai/Flux.1-dev-Controlnet-Surface-Normals", "diffusion_pytorch_model.safetensors", "models/ControlNet/jasperai/Flux.1-dev-Controlnet-Surface-Normals"), - ], - "jasperai/Flux.1-dev-Controlnet-Upscaler": [ - ("jasperai/Flux.1-dev-Controlnet-Upscaler", "diffusion_pytorch_model.safetensors", "models/ControlNet/jasperai/Flux.1-dev-Controlnet-Upscaler"), - ], - "alimama-creative/FLUX.1-dev-Controlnet-Inpainting-Alpha": [ - ("alimama-creative/FLUX.1-dev-Controlnet-Inpainting-Alpha", "diffusion_pytorch_model.safetensors", "models/ControlNet/alimama-creative/FLUX.1-dev-Controlnet-Inpainting-Alpha"), - ], - "alimama-creative/FLUX.1-dev-Controlnet-Inpainting-Beta": [ - ("alimama-creative/FLUX.1-dev-Controlnet-Inpainting-Beta", "diffusion_pytorch_model.safetensors", "models/ControlNet/alimama-creative/FLUX.1-dev-Controlnet-Inpainting-Beta"), - ], - "Shakker-Labs/FLUX.1-dev-ControlNet-Depth": [ - ("Shakker-Labs/FLUX.1-dev-ControlNet-Depth", "diffusion_pytorch_model.safetensors", "models/ControlNet/Shakker-Labs/FLUX.1-dev-ControlNet-Depth"), - ], - "Shakker-Labs/FLUX.1-dev-ControlNet-Union-Pro": [ - ("Shakker-Labs/FLUX.1-dev-ControlNet-Union-Pro", "diffusion_pytorch_model.safetensors", "models/ControlNet/Shakker-Labs/FLUX.1-dev-ControlNet-Union-Pro"), - ], - "InstantX/FLUX.1-dev-IP-Adapter": { - "file_list": [ - ("InstantX/FLUX.1-dev-IP-Adapter", "ip-adapter.bin", "models/IpAdapter/InstantX/FLUX.1-dev-IP-Adapter"), - ("AI-ModelScope/siglip-so400m-patch14-384", "model.safetensors", "models/IpAdapter/InstantX/FLUX.1-dev-IP-Adapter/image_encoder"), - ("AI-ModelScope/siglip-so400m-patch14-384", "config.json", "models/IpAdapter/InstantX/FLUX.1-dev-IP-Adapter/image_encoder"), - ], - "load_path": [ - "models/IpAdapter/InstantX/FLUX.1-dev-IP-Adapter/ip-adapter.bin", - "models/IpAdapter/InstantX/FLUX.1-dev-IP-Adapter/image_encoder", - ], - }, - "InfiniteYou":{ - "file_list":[ - ("ByteDance/InfiniteYou", "infu_flux_v1.0/aes_stage2/InfuseNetModel/diffusion_pytorch_model-00001-of-00002.safetensors", "models/InfiniteYou/InfuseNetModel"), - ("ByteDance/InfiniteYou", "infu_flux_v1.0/aes_stage2/InfuseNetModel/diffusion_pytorch_model-00002-of-00002.safetensors", "models/InfiniteYou/InfuseNetModel"), - ("ByteDance/InfiniteYou", "infu_flux_v1.0/aes_stage2/image_proj_model.bin", "models/InfiniteYou"), - ("ByteDance/InfiniteYou", "supports/insightface/models/antelopev2/1k3d68.onnx", "models/InfiniteYou/insightface/models/antelopev2"), - ("ByteDance/InfiniteYou", "supports/insightface/models/antelopev2/2d106det.onnx", "models/InfiniteYou/insightface/models/antelopev2"), - ("ByteDance/InfiniteYou", "supports/insightface/models/antelopev2/genderage.onnx", "models/InfiniteYou/insightface/models/antelopev2"), - ("ByteDance/InfiniteYou", "supports/insightface/models/antelopev2/glintr100.onnx", "models/InfiniteYou/insightface/models/antelopev2"), - ("ByteDance/InfiniteYou", "supports/insightface/models/antelopev2/scrfd_10g_bnkps.onnx", "models/InfiniteYou/insightface/models/antelopev2"), - ], - "load_path":[ - [ - "models/InfiniteYou/InfuseNetModel/diffusion_pytorch_model-00001-of-00002.safetensors", - "models/InfiniteYou/InfuseNetModel/diffusion_pytorch_model-00002-of-00002.safetensors" - ], - "models/InfiniteYou/image_proj_model.bin", - ], - }, - # ESRGAN - "ESRGAN_x4": [ - ("AI-ModelScope/Real-ESRGAN", "RealESRGAN_x4.pth", "models/ESRGAN"), - ], - # RIFE - "RIFE": [ - ("AI-ModelScope/RIFE", "flownet.pkl", "models/RIFE"), - ], - # Omnigen - "OmniGen-v1": { - "file_list": [ - ("BAAI/OmniGen-v1", "vae/diffusion_pytorch_model.safetensors", "models/OmniGen/OmniGen-v1/vae"), - ("BAAI/OmniGen-v1", "model.safetensors", "models/OmniGen/OmniGen-v1"), - ("BAAI/OmniGen-v1", "config.json", "models/OmniGen/OmniGen-v1"), - ("BAAI/OmniGen-v1", "special_tokens_map.json", "models/OmniGen/OmniGen-v1"), - ("BAAI/OmniGen-v1", "tokenizer_config.json", "models/OmniGen/OmniGen-v1"), - ("BAAI/OmniGen-v1", "tokenizer.json", "models/OmniGen/OmniGen-v1"), - ], - "load_path": [ - "models/OmniGen/OmniGen-v1/vae/diffusion_pytorch_model.safetensors", - "models/OmniGen/OmniGen-v1/model.safetensors", - ] - }, - # CogVideo - "CogVideoX-5B": { - "file_list": [ - ("ZhipuAI/CogVideoX-5b", "text_encoder/config.json", "models/CogVideo/CogVideoX-5b/text_encoder"), - ("ZhipuAI/CogVideoX-5b", "text_encoder/model.safetensors.index.json", "models/CogVideo/CogVideoX-5b/text_encoder"), - ("ZhipuAI/CogVideoX-5b", "text_encoder/model-00001-of-00002.safetensors", "models/CogVideo/CogVideoX-5b/text_encoder"), - ("ZhipuAI/CogVideoX-5b", "text_encoder/model-00002-of-00002.safetensors", "models/CogVideo/CogVideoX-5b/text_encoder"), - ("ZhipuAI/CogVideoX-5b", "transformer/config.json", "models/CogVideo/CogVideoX-5b/transformer"), - ("ZhipuAI/CogVideoX-5b", "transformer/diffusion_pytorch_model.safetensors.index.json", "models/CogVideo/CogVideoX-5b/transformer"), - ("ZhipuAI/CogVideoX-5b", "transformer/diffusion_pytorch_model-00001-of-00002.safetensors", "models/CogVideo/CogVideoX-5b/transformer"), - ("ZhipuAI/CogVideoX-5b", "transformer/diffusion_pytorch_model-00002-of-00002.safetensors", "models/CogVideo/CogVideoX-5b/transformer"), - ("ZhipuAI/CogVideoX-5b", "vae/diffusion_pytorch_model.safetensors", "models/CogVideo/CogVideoX-5b/vae"), - ], - "load_path": [ - "models/CogVideo/CogVideoX-5b/text_encoder", - "models/CogVideo/CogVideoX-5b/transformer", - "models/CogVideo/CogVideoX-5b/vae/diffusion_pytorch_model.safetensors", - ], - }, - # Stable Diffusion 3.5 - "StableDiffusion3.5-large": [ - ("AI-ModelScope/stable-diffusion-3.5-large", "sd3.5_large.safetensors", "models/stable_diffusion_3"), - ("AI-ModelScope/stable-diffusion-3.5-large", "text_encoders/clip_l.safetensors", "models/stable_diffusion_3/text_encoders"), - ("AI-ModelScope/stable-diffusion-3.5-large", "text_encoders/clip_g.safetensors", "models/stable_diffusion_3/text_encoders"), - ("AI-ModelScope/stable-diffusion-3.5-large", "text_encoders/t5xxl_fp16.safetensors", "models/stable_diffusion_3/text_encoders"), - ], - "StableDiffusion3.5-medium": [ - ("AI-ModelScope/stable-diffusion-3.5-medium", "sd3.5_medium.safetensors", "models/stable_diffusion_3"), - ("AI-ModelScope/stable-diffusion-3.5-large", "text_encoders/clip_l.safetensors", "models/stable_diffusion_3/text_encoders"), - ("AI-ModelScope/stable-diffusion-3.5-large", "text_encoders/clip_g.safetensors", "models/stable_diffusion_3/text_encoders"), - ("AI-ModelScope/stable-diffusion-3.5-large", "text_encoders/t5xxl_fp16.safetensors", "models/stable_diffusion_3/text_encoders"), - ], - "StableDiffusion3.5-large-turbo": [ - ("AI-ModelScope/stable-diffusion-3.5-large-turbo", "sd3.5_large_turbo.safetensors", "models/stable_diffusion_3"), - ("AI-ModelScope/stable-diffusion-3.5-large", "text_encoders/clip_l.safetensors", "models/stable_diffusion_3/text_encoders"), - ("AI-ModelScope/stable-diffusion-3.5-large", "text_encoders/clip_g.safetensors", "models/stable_diffusion_3/text_encoders"), - ("AI-ModelScope/stable-diffusion-3.5-large", "text_encoders/t5xxl_fp16.safetensors", "models/stable_diffusion_3/text_encoders"), - ], - "HunyuanVideo":{ - "file_list": [ - ("AI-ModelScope/clip-vit-large-patch14", "model.safetensors", "models/HunyuanVideo/text_encoder"), - ("DiffSynth-Studio/HunyuanVideo_MLLM_text_encoder", "model-00001-of-00004.safetensors", "models/HunyuanVideo/text_encoder_2"), - ("DiffSynth-Studio/HunyuanVideo_MLLM_text_encoder", "model-00002-of-00004.safetensors", "models/HunyuanVideo/text_encoder_2"), - ("DiffSynth-Studio/HunyuanVideo_MLLM_text_encoder", "model-00003-of-00004.safetensors", "models/HunyuanVideo/text_encoder_2"), - ("DiffSynth-Studio/HunyuanVideo_MLLM_text_encoder", "model-00004-of-00004.safetensors", "models/HunyuanVideo/text_encoder_2"), - ("DiffSynth-Studio/HunyuanVideo_MLLM_text_encoder", "config.json", "models/HunyuanVideo/text_encoder_2"), - ("DiffSynth-Studio/HunyuanVideo_MLLM_text_encoder", "model.safetensors.index.json", "models/HunyuanVideo/text_encoder_2"), - ("AI-ModelScope/HunyuanVideo", "hunyuan-video-t2v-720p/vae/pytorch_model.pt", "models/HunyuanVideo/vae"), - ("AI-ModelScope/HunyuanVideo", "hunyuan-video-t2v-720p/transformers/mp_rank_00_model_states.pt", "models/HunyuanVideo/transformers") - ], - "load_path": [ - "models/HunyuanVideo/text_encoder/model.safetensors", - "models/HunyuanVideo/text_encoder_2", - "models/HunyuanVideo/vae/pytorch_model.pt", - "models/HunyuanVideo/transformers/mp_rank_00_model_states.pt" - ], - }, - "HunyuanVideoI2V":{ - "file_list": [ - ("AI-ModelScope/clip-vit-large-patch14", "model.safetensors", "models/HunyuanVideoI2V/text_encoder"), - ("AI-ModelScope/llava-llama-3-8b-v1_1-transformers", "model-00001-of-00004.safetensors", "models/HunyuanVideoI2V/text_encoder_2"), - ("AI-ModelScope/llava-llama-3-8b-v1_1-transformers", "model-00002-of-00004.safetensors", "models/HunyuanVideoI2V/text_encoder_2"), - ("AI-ModelScope/llava-llama-3-8b-v1_1-transformers", "model-00003-of-00004.safetensors", "models/HunyuanVideoI2V/text_encoder_2"), - ("AI-ModelScope/llava-llama-3-8b-v1_1-transformers", "model-00004-of-00004.safetensors", "models/HunyuanVideoI2V/text_encoder_2"), - ("AI-ModelScope/llava-llama-3-8b-v1_1-transformers", "config.json", "models/HunyuanVideoI2V/text_encoder_2"), - ("AI-ModelScope/llava-llama-3-8b-v1_1-transformers", "model.safetensors.index.json", "models/HunyuanVideoI2V/text_encoder_2"), - ("AI-ModelScope/HunyuanVideo-I2V", "hunyuan-video-i2v-720p/vae/pytorch_model.pt", "models/HunyuanVideoI2V/vae"), - ("AI-ModelScope/HunyuanVideo-I2V", "hunyuan-video-i2v-720p/transformers/mp_rank_00_model_states.pt", "models/HunyuanVideoI2V/transformers") - ], - "load_path": [ - "models/HunyuanVideoI2V/text_encoder/model.safetensors", - "models/HunyuanVideoI2V/text_encoder_2", - "models/HunyuanVideoI2V/vae/pytorch_model.pt", - "models/HunyuanVideoI2V/transformers/mp_rank_00_model_states.pt" - ], - }, - "HunyuanVideo-fp8":{ - "file_list": [ - ("AI-ModelScope/clip-vit-large-patch14", "model.safetensors", "models/HunyuanVideo/text_encoder"), - ("DiffSynth-Studio/HunyuanVideo_MLLM_text_encoder", "model-00001-of-00004.safetensors", "models/HunyuanVideo/text_encoder_2"), - ("DiffSynth-Studio/HunyuanVideo_MLLM_text_encoder", "model-00002-of-00004.safetensors", "models/HunyuanVideo/text_encoder_2"), - ("DiffSynth-Studio/HunyuanVideo_MLLM_text_encoder", "model-00003-of-00004.safetensors", "models/HunyuanVideo/text_encoder_2"), - ("DiffSynth-Studio/HunyuanVideo_MLLM_text_encoder", "model-00004-of-00004.safetensors", "models/HunyuanVideo/text_encoder_2"), - ("DiffSynth-Studio/HunyuanVideo_MLLM_text_encoder", "config.json", "models/HunyuanVideo/text_encoder_2"), - ("DiffSynth-Studio/HunyuanVideo_MLLM_text_encoder", "model.safetensors.index.json", "models/HunyuanVideo/text_encoder_2"), - ("AI-ModelScope/HunyuanVideo", "hunyuan-video-t2v-720p/vae/pytorch_model.pt", "models/HunyuanVideo/vae"), - ("DiffSynth-Studio/HunyuanVideo-safetensors", "model.fp8.safetensors", "models/HunyuanVideo/transformers") - ], - "load_path": [ - "models/HunyuanVideo/text_encoder/model.safetensors", - "models/HunyuanVideo/text_encoder_2", - "models/HunyuanVideo/vae/pytorch_model.pt", - "models/HunyuanVideo/transformers/model.fp8.safetensors" - ], - }, -} -Preset_model_id: TypeAlias = Literal[ - "HunyuanDiT", - "stable-video-diffusion-img2vid-xt", - "ExVideo-SVD-128f-v1", - "ExVideo-CogVideoX-LoRA-129f-v1", - "StableDiffusion_v15", - "DreamShaper_8", - "AingDiffusion_v12", - "Flat2DAnimerge_v45Sharp", - "TextualInversion_VeryBadImageNegative_v1.3", - "StableDiffusionXL_v1", - "BluePencilXL_v200", - "StableDiffusionXL_Turbo", - "ControlNet_v11f1p_sd15_depth", - "ControlNet_v11p_sd15_softedge", - "ControlNet_v11f1e_sd15_tile", - "ControlNet_v11p_sd15_lineart", - "AnimateDiff_v2", - "AnimateDiff_xl_beta", - "RIFE", - "BeautifulPrompt", - "opus-mt-zh-en", - "IP-Adapter-SD", - "IP-Adapter-SDXL", - "StableDiffusion3", - "StableDiffusion3_without_T5", - "Kolors", - "SDXL-vae-fp16-fix", - "ControlNet_union_sdxl_promax", - "FLUX.1-dev", - "FLUX.1-schnell", - "InstantX/FLUX.1-dev-Controlnet-Union-alpha", - "jasperai/Flux.1-dev-Controlnet-Depth", - "jasperai/Flux.1-dev-Controlnet-Surface-Normals", - "jasperai/Flux.1-dev-Controlnet-Upscaler", - "alimama-creative/FLUX.1-dev-Controlnet-Inpainting-Alpha", - "alimama-creative/FLUX.1-dev-Controlnet-Inpainting-Beta", - "Shakker-Labs/FLUX.1-dev-ControlNet-Depth", - "Shakker-Labs/FLUX.1-dev-ControlNet-Union-Pro", - "InstantX/FLUX.1-dev-IP-Adapter", - "InfiniteYou", - "SDXL_lora_zyd232_ChineseInkStyle_SDXL_v1_0", - "QwenPrompt", - "OmostPrompt", - "ESRGAN_x4", - "RIFE", - "OmniGen-v1", - "CogVideoX-5B", - "Annotators:Depth", - "Annotators:Softedge", - "Annotators:Lineart", - "Annotators:Normal", - "Annotators:Openpose", - "StableDiffusion3.5-large", - "StableDiffusion3.5-medium", - "HunyuanVideo", - "HunyuanVideo-fp8", - "HunyuanVideoI2V", -] diff --git a/diffsynth/configs/model_configs.py b/diffsynth/configs/model_configs.py new file mode 100644 index 0000000..bda3ba7 --- /dev/null +++ b/diffsynth/configs/model_configs.py @@ -0,0 +1,29 @@ +MODEL_CONFIGS = [ + { + "model_hash": "0319a1cb19835fb510907dd3367c95ff", + "model_name": "qwen_image_dit", + "model_class": "diffsynth.models.qwen_image_dit.QwenImageDiT", + }, + { + "model_hash": "8004730443f55db63092006dd9f7110e", + "model_name": "qwen_image_text_encoder", + "model_class": "diffsynth.models.qwen_image_text_encoder.QwenImageTextEncoder", + "state_dict_converter": "diffsynth.utils.state_dict_converters.qwen_image_text_encoder.QwenImageTextEncoderStateDictConverter", + }, + { + "model_hash": "ed4ea5824d55ec3107b09815e318123a", + "model_name": "qwen_image_vae", + "model_class": "diffsynth.models.qwen_image_vae.QwenImageVAE", + }, + { + "model_hash": "073bce9cf969e317e5662cd570c3e79c", + "model_name": "qwen_image_blockwise_controlnet", + "model_class": "diffsynth.models.qwen_image_controlnet.QwenImageBlockWiseControlNet", + }, + { + "model_hash": "a9e54e480a628f0b956a688a81c33bab", + "model_name": "qwen_image_blockwise_controlnet", + "model_class": "diffsynth.models.qwen_image_controlnet.QwenImageBlockWiseControlNet", + "extra_kwargs": {"additional_in_dim": 4} + }, +] diff --git a/diffsynth/configs/vram_management_module_maps.py b/diffsynth/configs/vram_management_module_maps.py new file mode 100644 index 0000000..50c85d6 --- /dev/null +++ b/diffsynth/configs/vram_management_module_maps.py @@ -0,0 +1,24 @@ +VRAM_MANAGEMENT_MODULE_MAPS = { + "diffsynth.models.qwen_image_dit.QwenImageDiT": { + "diffsynth.models.qwen_image_dit.RMSNorm": "diffsynth.core.vram.layers.AutoWrappedModule", + "torch.nn.Linear": "diffsynth.core.vram.layers.AutoWrappedLinear", + }, + "diffsynth.models.qwen_image_text_encoder.QwenImageTextEncoder": { + "torch.nn.Linear": "diffsynth.core.vram.layers.AutoWrappedLinear", + "torch.nn.Embedding": "diffsynth.core.vram.layers.AutoWrappedModule", + "transformers.models.qwen2_5_vl.modeling_qwen2_5_vl.Qwen2_5_VLRotaryEmbedding": "diffsynth.core.vram.layers.AutoWrappedModule", + "transformers.models.qwen2_5_vl.modeling_qwen2_5_vl.Qwen2RMSNorm": "diffsynth.core.vram.layers.AutoWrappedModule", + "transformers.models.qwen2_5_vl.modeling_qwen2_5_vl.Qwen2_5_VisionPatchEmbed": "diffsynth.core.vram.layers.AutoWrappedModule", + "transformers.models.qwen2_5_vl.modeling_qwen2_5_vl.Qwen2_5_VisionRotaryEmbedding": "diffsynth.core.vram.layers.AutoWrappedModule", + }, + "diffsynth.models.qwen_image_vae.QwenImageVAE": { + "torch.nn.Linear": "diffsynth.core.vram.layers.AutoWrappedLinear", + "torch.nn.Conv3d": "diffsynth.core.vram.layers.AutoWrappedModule", + "torch.nn.Conv2d": "diffsynth.core.vram.layers.AutoWrappedModule", + "diffsynth.models.qwen_image_vae.QwenImageRMS_norm": "diffsynth.core.vram.layers.AutoWrappedModule", + }, + "diffsynth.models.qwen_image_controlnet.BlockWiseControlBlock": { + "diffsynth.models.qwen_image_dit.RMSNorm": "diffsynth.core.vram.layers.AutoWrappedModule", + "torch.nn.Linear": "diffsynth.core.vram.layers.AutoWrappedLinear", + }, +} diff --git a/diffsynth/controlnets/__init__.py b/diffsynth/controlnets/__init__.py deleted file mode 100644 index a3e15ad..0000000 --- a/diffsynth/controlnets/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .controlnet_unit import ControlNetConfigUnit, ControlNetUnit, MultiControlNetManager, FluxMultiControlNetManager -from .processors import Annotator diff --git a/diffsynth/controlnets/controlnet_unit.py b/diffsynth/controlnets/controlnet_unit.py deleted file mode 100644 index fdb4829..0000000 --- a/diffsynth/controlnets/controlnet_unit.py +++ /dev/null @@ -1,91 +0,0 @@ -import torch -import numpy as np -from .processors import Processor_id - - -class ControlNetConfigUnit: - def __init__(self, processor_id: Processor_id, model_path, scale=1.0, skip_processor=False): - self.processor_id = processor_id - self.model_path = model_path - self.scale = scale - self.skip_processor = skip_processor - - -class ControlNetUnit: - def __init__(self, processor, model, scale=1.0): - self.processor = processor - self.model = model - self.scale = scale - - -class MultiControlNetManager: - def __init__(self, controlnet_units=[]): - self.processors = [unit.processor for unit in controlnet_units] - self.models = [unit.model for unit in controlnet_units] - self.scales = [unit.scale for unit in controlnet_units] - - def cpu(self): - for model in self.models: - model.cpu() - - def to(self, device): - for model in self.models: - model.to(device) - for processor in self.processors: - processor.to(device) - - def process_image(self, image, processor_id=None): - if processor_id is None: - processed_image = [processor(image) for processor in self.processors] - else: - processed_image = [self.processors[processor_id](image)] - processed_image = torch.concat([ - torch.Tensor(np.array(image_, dtype=np.float32) / 255).permute(2, 0, 1).unsqueeze(0) - for image_ in processed_image - ], dim=0) - return processed_image - - def __call__( - self, - sample, timestep, encoder_hidden_states, conditionings, - tiled=False, tile_size=64, tile_stride=32, **kwargs - ): - res_stack = None - for processor, conditioning, model, scale in zip(self.processors, conditionings, self.models, self.scales): - res_stack_ = model( - sample, timestep, encoder_hidden_states, conditioning, **kwargs, - tiled=tiled, tile_size=tile_size, tile_stride=tile_stride, - processor_id=processor.processor_id - ) - res_stack_ = [res * scale for res in res_stack_] - if res_stack is None: - res_stack = res_stack_ - else: - res_stack = [i + j for i, j in zip(res_stack, res_stack_)] - return res_stack - - -class FluxMultiControlNetManager(MultiControlNetManager): - def __init__(self, controlnet_units=[]): - super().__init__(controlnet_units=controlnet_units) - - def process_image(self, image, processor_id=None): - if processor_id is None: - processed_image = [processor(image) for processor in self.processors] - else: - processed_image = [self.processors[processor_id](image)] - return processed_image - - def __call__(self, conditionings, **kwargs): - res_stack, single_res_stack = None, None - for processor, conditioning, model, scale in zip(self.processors, conditionings, self.models, self.scales): - res_stack_, single_res_stack_ = model(controlnet_conditioning=conditioning, processor_id=processor.processor_id, **kwargs) - res_stack_ = [res * scale for res in res_stack_] - single_res_stack_ = [res * scale for res in single_res_stack_] - if res_stack is None: - res_stack = res_stack_ - single_res_stack = single_res_stack_ - else: - res_stack = [i + j for i, j in zip(res_stack, res_stack_)] - single_res_stack = [i + j for i, j in zip(single_res_stack, single_res_stack_)] - return res_stack, single_res_stack diff --git a/diffsynth/core/__init__.py b/diffsynth/core/__init__.py new file mode 100644 index 0000000..72e501f --- /dev/null +++ b/diffsynth/core/__init__.py @@ -0,0 +1,5 @@ +from .attention import * +from .data import * +from .gradient import * +from .loader import * +from .vram import * diff --git a/diffsynth/core/attention/__init__.py b/diffsynth/core/attention/__init__.py new file mode 100644 index 0000000..45cf8a4 --- /dev/null +++ b/diffsynth/core/attention/__init__.py @@ -0,0 +1 @@ +from .attention import attention_forward diff --git a/diffsynth/core/attention/attention.py b/diffsynth/core/attention/attention.py new file mode 100644 index 0000000..15b55a4 --- /dev/null +++ b/diffsynth/core/attention/attention.py @@ -0,0 +1,121 @@ +import torch, os +from einops import rearrange + + +try: + import flash_attn_interface + FLASH_ATTN_3_AVAILABLE = True +except ModuleNotFoundError: + FLASH_ATTN_3_AVAILABLE = False + +try: + import flash_attn + FLASH_ATTN_2_AVAILABLE = True +except ModuleNotFoundError: + FLASH_ATTN_2_AVAILABLE = False + +try: + from sageattention import sageattn + SAGE_ATTN_AVAILABLE = True +except ModuleNotFoundError: + SAGE_ATTN_AVAILABLE = False + +try: + import xformers.ops as xops + XFORMERS_AVAILABLE = True +except ModuleNotFoundError: + XFORMERS_AVAILABLE = False + + +def initialize_attention_priority(): + if os.environ.get('DIFFSYNTH_ATTENTION_IMPLEMENTATION') is not None: + return os.environ.get('DIFFSYNTH_ATTENTION_IMPLEMENTATION').lower() + elif FLASH_ATTN_3_AVAILABLE: + return "flash_attention_3" + elif FLASH_ATTN_2_AVAILABLE: + return "flash_attention_2" + elif SAGE_ATTN_AVAILABLE: + return "sage_attention" + elif XFORMERS_AVAILABLE: + return "xformers" + else: + return "torch" + + +ATTENTION_IMPLEMENTATION = initialize_attention_priority() + + +def rearrange_qkv(q: torch.Tensor, k: torch.Tensor, v: torch.Tensor, q_pattern="b n s d", k_pattern="b n s d", v_pattern="b n s d", required_in_pattern="b n s d", dims=None): + dims = {} if dims is None else dims + if q_pattern != required_in_pattern: + q = rearrange(q, f"{q_pattern} -> {required_in_pattern}", **dims) + if k_pattern != required_in_pattern: + k = rearrange(k, f"{k_pattern} -> {required_in_pattern}", **dims) + if v_pattern != required_in_pattern: + v = rearrange(v, f"{q_pattern} -> {required_in_pattern}", **dims) + return q, k, v + + +def rearrange_out(out: torch.Tensor, out_pattern="b n s d", required_out_pattern="b n s d", dims=None): + dims = {} if dims is None else dims + if out_pattern != required_out_pattern: + out = rearrange(out, f"{required_out_pattern} -> {out_pattern}", **dims) + return out + + +def torch_sdpa(q: torch.Tensor, k: torch.Tensor, v: torch.Tensor, q_pattern="b n s d", k_pattern="b n s d", v_pattern="b n s d", out_pattern="b n s d", dims=None, attn_mask=None, scale=None): + required_in_pattern, required_out_pattern= "b n s d", "b n s d" + q, k, v = rearrange_qkv(q, k, v, q_pattern, k_pattern, v_pattern, required_in_pattern, dims) + out = torch.nn.functional.scaled_dot_product_attention(q, k, v, attn_mask, scale=scale) + out = rearrange_out(out, out_pattern, required_out_pattern, dims) + return out + + +def flash_attention_3(q: torch.Tensor, k: torch.Tensor, v: torch.Tensor, q_pattern="b n s d", k_pattern="b n s d", v_pattern="b n s d", out_pattern="b n s d", dims=None, scale=None): + required_in_pattern, required_out_pattern= "b s n d", "b s n d" + q, k, v = rearrange_qkv(q, k, v, q_pattern, k_pattern, v_pattern, required_in_pattern, dims) + out = flash_attn_interface.flash_attn_func(q, k, v, softmax_scale=scale) + if isinstance(out, tuple): + out = out[0] + out = rearrange_out(out, out_pattern, required_out_pattern, dims) + return out + + +def flash_attention_2(q: torch.Tensor, k: torch.Tensor, v: torch.Tensor, q_pattern="b n s d", k_pattern="b n s d", v_pattern="b n s d", out_pattern="b n s d", dims=None, scale=None): + required_in_pattern, required_out_pattern= "b s n d", "b s n d" + q, k, v = rearrange_qkv(q, k, v, q_pattern, k_pattern, v_pattern, required_in_pattern, dims) + out = flash_attn.flash_attn_func(q, k, v, softmax_scale=scale) + out = rearrange_out(out, out_pattern, required_out_pattern, dims) + return out + + +def sage_attention(q: torch.Tensor, k: torch.Tensor, v: torch.Tensor, q_pattern="b n s d", k_pattern="b n s d", v_pattern="b n s d", out_pattern="b n s d", dims=None, scale=None): + required_in_pattern, required_out_pattern= "b n s d", "b n s d" + q, k, v = rearrange_qkv(q, k, v, q_pattern, k_pattern, v_pattern, required_in_pattern, dims) + out = sageattn(q, k, v, sm_scale=scale) + out = rearrange_out(out, out_pattern, required_out_pattern, dims) + return out + + +def xformers_attention(q: torch.Tensor, k: torch.Tensor, v: torch.Tensor, q_pattern="b n s d", k_pattern="b n s d", v_pattern="b n s d", out_pattern="b n s d", dims=None, scale=None): + required_in_pattern, required_out_pattern= "b s n d", "b s n d" + q, k, v = rearrange_qkv(q, k, v, q_pattern, k_pattern, v_pattern, required_in_pattern, dims) + out = xops.memory_efficient_attention(q, k, v, scale=scale) + out = rearrange_out(out, out_pattern, required_out_pattern, dims) + return out + + +def attention_forward(q: torch.Tensor, k: torch.Tensor, v: torch.Tensor, q_pattern="b n s d", k_pattern="b n s d", v_pattern="b n s d", out_pattern="b n s d", dims=None, attn_mask=None, scale=None, compatibility_mode=False): + if compatibility_mode or (attn_mask is not None): + return torch_sdpa(q, k, v, q_pattern, k_pattern, v_pattern, out_pattern, dims, attn_mask=attn_mask, scale=scale) + else: + if ATTENTION_IMPLEMENTATION == "flash_attention_3": + return flash_attention_3(q, k, v, q_pattern, k_pattern, v_pattern, out_pattern, dims, scale=scale) + elif ATTENTION_IMPLEMENTATION == "flash_attention_2": + return flash_attention_2(q, k, v, q_pattern, k_pattern, v_pattern, out_pattern, dims, scale=scale) + elif ATTENTION_IMPLEMENTATION == "sage_attention": + return sage_attention(q, k, v, q_pattern, k_pattern, v_pattern, out_pattern, dims, scale=scale) + elif ATTENTION_IMPLEMENTATION == "xformers": + return xformers_attention(q, k, v, q_pattern, k_pattern, v_pattern, out_pattern, dims, scale=scale) + else: + return torch_sdpa(q, k, v, q_pattern, k_pattern, v_pattern, out_pattern, dims, scale=scale) diff --git a/diffsynth/core/data/__init__.py b/diffsynth/core/data/__init__.py new file mode 100644 index 0000000..d494a27 --- /dev/null +++ b/diffsynth/core/data/__init__.py @@ -0,0 +1 @@ +from .unified_dataset import UnifiedDataset diff --git a/diffsynth/trainers/unified_dataset.py b/diffsynth/core/data/operators.py similarity index 59% rename from diffsynth/trainers/unified_dataset.py rename to diffsynth/core/data/operators.py index c98a160..1b8d9a3 100644 --- a/diffsynth/trainers/unified_dataset.py +++ b/diffsynth/core/data/operators.py @@ -1,4 +1,4 @@ -import torch, torchvision, imageio, os, json, pandas +import torch, torchvision, imageio, os import imageio.v3 as iio from PIL import Image @@ -225,120 +225,3 @@ class ToAbsolutePath(DataProcessingOperator): def __call__(self, data): return os.path.join(self.base_path, data) -class LoadAudio(DataProcessingOperator): - def __init__(self, sr=16000): - self.sr = sr - def __call__(self, data: str): - import librosa - input_audio, sample_rate = librosa.load(data, sr=self.sr) - return input_audio - - -class UnifiedDataset(torch.utils.data.Dataset): - def __init__( - self, - base_path=None, metadata_path=None, - repeat=1, - data_file_keys=tuple(), - main_data_operator=lambda x: x, - special_operator_map=None, - ): - self.base_path = base_path - self.metadata_path = metadata_path - self.repeat = repeat - self.data_file_keys = data_file_keys - self.main_data_operator = main_data_operator - self.cached_data_operator = LoadTorchPickle() - self.special_operator_map = {} if special_operator_map is None else special_operator_map - self.data = [] - self.cached_data = [] - self.load_from_cache = metadata_path is None - self.load_metadata(metadata_path) - - @staticmethod - def default_image_operator( - base_path="", - max_pixels=1920*1080, height=None, width=None, - height_division_factor=16, width_division_factor=16, - ): - return RouteByType(operator_map=[ - (str, ToAbsolutePath(base_path) >> LoadImage() >> ImageCropAndResize(height, width, max_pixels, height_division_factor, width_division_factor)), - (list, SequencialProcess(ToAbsolutePath(base_path) >> LoadImage() >> ImageCropAndResize(height, width, max_pixels, height_division_factor, width_division_factor))), - ]) - - @staticmethod - def default_video_operator( - base_path="", - max_pixels=1920*1080, height=None, width=None, - height_division_factor=16, width_division_factor=16, - num_frames=81, time_division_factor=4, time_division_remainder=1, - ): - return RouteByType(operator_map=[ - (str, ToAbsolutePath(base_path) >> RouteByExtensionName(operator_map=[ - (("jpg", "jpeg", "png", "webp"), LoadImage() >> ImageCropAndResize(height, width, max_pixels, height_division_factor, width_division_factor) >> ToList()), - (("gif",), LoadGIF( - num_frames, time_division_factor, time_division_remainder, - frame_processor=ImageCropAndResize(height, width, max_pixels, height_division_factor, width_division_factor), - )), - (("mp4", "avi", "mov", "wmv", "mkv", "flv", "webm"), LoadVideo( - num_frames, time_division_factor, time_division_remainder, - frame_processor=ImageCropAndResize(height, width, max_pixels, height_division_factor, width_division_factor), - )), - ])), - ]) - - def search_for_cached_data_files(self, path): - for file_name in os.listdir(path): - subpath = os.path.join(path, file_name) - if os.path.isdir(subpath): - self.search_for_cached_data_files(subpath) - elif subpath.endswith(".pth"): - self.cached_data.append(subpath) - - def load_metadata(self, metadata_path): - if metadata_path is None: - print("No metadata_path. Searching for cached data files.") - self.search_for_cached_data_files(self.base_path) - print(f"{len(self.cached_data)} cached data files found.") - elif metadata_path.endswith(".json"): - with open(metadata_path, "r") as f: - metadata = json.load(f) - self.data = metadata - elif metadata_path.endswith(".jsonl"): - metadata = [] - with open(metadata_path, 'r') as f: - for line in f: - metadata.append(json.loads(line.strip())) - self.data = metadata - else: - metadata = pandas.read_csv(metadata_path) - self.data = [metadata.iloc[i].to_dict() for i in range(len(metadata))] - - def __getitem__(self, data_id): - if self.load_from_cache: - data = self.cached_data[data_id % len(self.cached_data)] - data = self.cached_data_operator(data) - else: - data = self.data[data_id % len(self.data)].copy() - for key in self.data_file_keys: - if key in data: - if key in self.special_operator_map: - data[key] = self.special_operator_map[key](data[key]) - elif key in self.data_file_keys: - data[key] = self.main_data_operator(data[key]) - return data - - def __len__(self): - if self.load_from_cache: - return len(self.cached_data) * self.repeat - else: - return len(self.data) * self.repeat - - def check_data_equal(self, data1, data2): - # Debug only - if len(data1) != len(data2): - return False - for k in data1: - if data1[k] != data2[k]: - return False - return True diff --git a/diffsynth/core/data/unified_dataset.py b/diffsynth/core/data/unified_dataset.py new file mode 100644 index 0000000..074208c --- /dev/null +++ b/diffsynth/core/data/unified_dataset.py @@ -0,0 +1,112 @@ +from .operators import * +import torch, json, pandas + + +class UnifiedDataset(torch.utils.data.Dataset): + def __init__( + self, + base_path=None, metadata_path=None, + repeat=1, + data_file_keys=tuple(), + main_data_operator=lambda x: x, + special_operator_map=None, + ): + self.base_path = base_path + self.metadata_path = metadata_path + self.repeat = repeat + self.data_file_keys = data_file_keys + self.main_data_operator = main_data_operator + self.cached_data_operator = LoadTorchPickle() + self.special_operator_map = {} if special_operator_map is None else special_operator_map + self.data = [] + self.cached_data = [] + self.load_from_cache = metadata_path is None + self.load_metadata(metadata_path) + + @staticmethod + def default_image_operator( + base_path="", + max_pixels=1920*1080, height=None, width=None, + height_division_factor=16, width_division_factor=16, + ): + return RouteByType(operator_map=[ + (str, ToAbsolutePath(base_path) >> LoadImage() >> ImageCropAndResize(height, width, max_pixels, height_division_factor, width_division_factor)), + (list, SequencialProcess(ToAbsolutePath(base_path) >> LoadImage() >> ImageCropAndResize(height, width, max_pixels, height_division_factor, width_division_factor))), + ]) + + @staticmethod + def default_video_operator( + base_path="", + max_pixels=1920*1080, height=None, width=None, + height_division_factor=16, width_division_factor=16, + num_frames=81, time_division_factor=4, time_division_remainder=1, + ): + return RouteByType(operator_map=[ + (str, ToAbsolutePath(base_path) >> RouteByExtensionName(operator_map=[ + (("jpg", "jpeg", "png", "webp"), LoadImage() >> ImageCropAndResize(height, width, max_pixels, height_division_factor, width_division_factor) >> ToList()), + (("gif",), LoadGIF( + num_frames, time_division_factor, time_division_remainder, + frame_processor=ImageCropAndResize(height, width, max_pixels, height_division_factor, width_division_factor), + )), + (("mp4", "avi", "mov", "wmv", "mkv", "flv", "webm"), LoadVideo( + num_frames, time_division_factor, time_division_remainder, + frame_processor=ImageCropAndResize(height, width, max_pixels, height_division_factor, width_division_factor), + )), + ])), + ]) + + def search_for_cached_data_files(self, path): + for file_name in os.listdir(path): + subpath = os.path.join(path, file_name) + if os.path.isdir(subpath): + self.search_for_cached_data_files(subpath) + elif subpath.endswith(".pth"): + self.cached_data.append(subpath) + + def load_metadata(self, metadata_path): + if metadata_path is None: + print("No metadata_path. Searching for cached data files.") + self.search_for_cached_data_files(self.base_path) + print(f"{len(self.cached_data)} cached data files found.") + elif metadata_path.endswith(".json"): + with open(metadata_path, "r") as f: + metadata = json.load(f) + self.data = metadata + elif metadata_path.endswith(".jsonl"): + metadata = [] + with open(metadata_path, 'r') as f: + for line in f: + metadata.append(json.loads(line.strip())) + self.data = metadata + else: + metadata = pandas.read_csv(metadata_path) + self.data = [metadata.iloc[i].to_dict() for i in range(len(metadata))] + + def __getitem__(self, data_id): + if self.load_from_cache: + data = self.cached_data[data_id % len(self.cached_data)] + data = self.cached_data_operator(data) + else: + data = self.data[data_id % len(self.data)].copy() + for key in self.data_file_keys: + if key in data: + if key in self.special_operator_map: + data[key] = self.special_operator_map[key](data[key]) + elif key in self.data_file_keys: + data[key] = self.main_data_operator(data[key]) + return data + + def __len__(self): + if self.load_from_cache: + return len(self.cached_data) * self.repeat + else: + return len(self.data) * self.repeat + + def check_data_equal(self, data1, data2): + # Debug only + if len(data1) != len(data2): + return False + for k in data1: + if data1[k] != data2[k]: + return False + return True diff --git a/diffsynth/core/gradient/__init__.py b/diffsynth/core/gradient/__init__.py new file mode 100644 index 0000000..5791479 --- /dev/null +++ b/diffsynth/core/gradient/__init__.py @@ -0,0 +1 @@ +from .gradient_checkpoint import gradient_checkpoint_forward diff --git a/diffsynth/vram_management/gradient_checkpointing.py b/diffsynth/core/gradient/gradient_checkpoint.py similarity index 100% rename from diffsynth/vram_management/gradient_checkpointing.py rename to diffsynth/core/gradient/gradient_checkpoint.py diff --git a/diffsynth/core/loader/__init__.py b/diffsynth/core/loader/__init__.py new file mode 100644 index 0000000..1f56d81 --- /dev/null +++ b/diffsynth/core/loader/__init__.py @@ -0,0 +1,3 @@ +from .file import load_state_dict, hash_state_dict_keys, hash_model_file +from .model import load_model, load_model_with_disk_offload +from .config import ModelConfig diff --git a/diffsynth/core/loader/config.py b/diffsynth/core/loader/config.py new file mode 100644 index 0000000..a74f476 --- /dev/null +++ b/diffsynth/core/loader/config.py @@ -0,0 +1,77 @@ +import torch, glob, os +from typing import Optional, Union +from dataclasses import dataclass +from modelscope import snapshot_download +from typing import Optional + + +@dataclass +class ModelConfig: + path: Union[str, list[str]] = None + model_id: str = None + origin_file_pattern: Union[str, list[str]] = None + download_resource: str = "ModelScope" + local_model_path: str = None + skip_download: bool = None + offload_device: Optional[Union[str, torch.device]] = None + offload_dtype: Optional[torch.dtype] = None + onload_device: Optional[Union[str, torch.device]] = None + onload_dtype: Optional[torch.dtype] = None + preparing_device: Optional[Union[str, torch.device]] = None + preparing_dtype: Optional[torch.dtype] = None + computation_device: Optional[Union[str, torch.device]] = None + computation_dtype: Optional[torch.dtype] = None + + def check_input(self): + if self.path is None and self.model_id is None: + raise ValueError(f"""No valid model files. Please use `ModelConfig(path="xxx")` or `ModelConfig(model_id="xxx/yyy", origin_file_pattern="zzz")`. `skip_download=True` only supports the first one.""") + + def download(self): + downloaded_files = glob.glob(self.origin_file_pattern, root_dir=os.path.join(self.local_model_path, self.model_id)) + snapshot_download( + self.model_id, + local_dir=os.path.join(self.local_model_path, self.model_id), + allow_file_pattern=self.origin_file_pattern, + ignore_file_pattern=downloaded_files, + local_files_only=False + ) + + def require_downloading(self): + if self.path is not None: + return False + if self.skip_download is None: + if os.environ.get('DIFFSYNTH_SKIP_DOWNLOAD') is not None: + if os.environ.get('DIFFSYNTH_SKIP_DOWNLOAD') in ["True", "true"]: + self.skip_download = True + elif os.environ.get('DIFFSYNTH_SKIP_DOWNLOAD') in ["False", "false"]: + self.skip_download = False + else: + self.skip_download = False + return not self.skip_download + + def reset_local_model_path(self): + if os.environ.get('DIFFSYNTH_MODEL_BASE_PATH') is not None: + self.local_model_path = os.environ.get('DIFFSYNTH_MODEL_BASE_PATH') + elif self.local_model_path is None: + self.local_model_path = "./models" + + def download_if_necessary(self): + self.check_input() + self.reset_local_model_path() + if self.require_downloading(): + self.download() + self.path = glob.glob(os.path.join(self.local_model_path, self.model_id, self.origin_file_pattern)) + if isinstance(self.path, list) and len(self.path) == 1: + self.path = self.path[0] + + def vram_config(self): + return { + "offload_device": self.offload_device, + "offload_dtype": self.offload_dtype, + "onload_device": self.onload_device, + "onload_dtype": self.onload_dtype, + "preparing_device": self.preparing_device, + "preparing_dtype": self.preparing_dtype, + "computation_device": self.computation_device, + "computation_dtype": self.computation_dtype, + } diff --git a/diffsynth/core/loader/file.py b/diffsynth/core/loader/file.py new file mode 100644 index 0000000..8817cd1 --- /dev/null +++ b/diffsynth/core/loader/file.py @@ -0,0 +1,106 @@ +from safetensors import safe_open +import torch, hashlib + + +def load_state_dict(file_path, torch_dtype=None, device="cpu"): + if isinstance(file_path, list): + state_dict = {} + for file_path_ in file_path: + state_dict.update(load_state_dict(file_path_, torch_dtype, device)) + return state_dict + if file_path.endswith(".safetensors"): + return load_state_dict_from_safetensors(file_path, torch_dtype=torch_dtype, device=device) + else: + return load_state_dict_from_bin(file_path, torch_dtype=torch_dtype, device=device) + + +def load_state_dict_from_safetensors(file_path, torch_dtype=None, device="cpu"): + state_dict = {} + with safe_open(file_path, framework="pt", device=str(device)) as f: + for k in f.keys(): + state_dict[k] = f.get_tensor(k) + if torch_dtype is not None: + state_dict[k] = state_dict[k].to(torch_dtype) + return state_dict + + +def load_state_dict_from_bin(file_path, torch_dtype=None, device="cpu"): + state_dict = torch.load(file_path, map_location=device, weights_only=True) + if torch_dtype is not None: + for i in state_dict: + if isinstance(state_dict[i], torch.Tensor): + state_dict[i] = state_dict[i].to(torch_dtype) + return state_dict + + +def convert_state_dict_keys_to_single_str(state_dict, with_shape=True): + keys = [] + for key, value in state_dict.items(): + if isinstance(key, str): + if isinstance(value, torch.Tensor): + if with_shape: + shape = "_".join(map(str, list(value.shape))) + keys.append(key + ":" + shape) + keys.append(key) + elif isinstance(value, dict): + keys.append(key + "|" + convert_state_dict_keys_to_single_str(value, with_shape=with_shape)) + keys.sort() + keys_str = ",".join(keys) + return keys_str + + +def hash_state_dict_keys(state_dict, with_shape=True): + keys_str = convert_state_dict_keys_to_single_str(state_dict, with_shape=with_shape) + keys_str = keys_str.encode(encoding="UTF-8") + return hashlib.md5(keys_str).hexdigest() + + +def load_keys_dict(file_path): + if isinstance(file_path, list): + state_dict = {} + for file_path_ in file_path: + state_dict.update(load_keys_dict(file_path_)) + return state_dict + if file_path.endswith(".safetensors"): + return load_keys_dict_from_safetensors(file_path) + else: + return load_keys_dict_from_bin(file_path) + + +def load_keys_dict_from_safetensors(file_path): + keys_dict = {} + with safe_open(file_path, framework="pt", device="cpu") as f: + for k in f.keys(): + keys_dict[k] = f.get_slice(k).get_shape() + return keys_dict + + +def load_keys_dict_from_bin(file_path): + state_dict = load_state_dict_from_bin(file_path) + keys_dict = {} + for k, v in state_dict.items(): + keys_dict[k] = list(v.shape) + return keys_dict + + +def convert_keys_dict_to_single_str(state_dict, with_shape=True): + keys = [] + for key, value in state_dict.items(): + if isinstance(key, str): + if isinstance(value, dict): + keys.append(key + "|" + convert_state_dict_keys_to_single_str(value, with_shape=with_shape)) + else: + if with_shape: + shape = "_".join(map(str, list(value))) + keys.append(key + ":" + shape) + keys.append(key) + keys.sort() + keys_str = ",".join(keys) + return keys_str + + +def hash_model_file(path, with_shape=True): + keys_dict = load_keys_dict(path) + keys_str = convert_keys_dict_to_single_str(keys_dict, with_shape=with_shape) + keys_str = keys_str.encode(encoding="UTF-8") + return hashlib.md5(keys_str).hexdigest() diff --git a/diffsynth/core/loader/model.py b/diffsynth/core/loader/model.py new file mode 100644 index 0000000..042bb77 --- /dev/null +++ b/diffsynth/core/loader/model.py @@ -0,0 +1,71 @@ +from ..vram.initialization import skip_model_initialization +from ..vram.disk_map import DiskMap +from ..vram.layers import enable_vram_management +from .file import load_state_dict +import torch + + +def load_model(model_class, path, config=None, torch_dtype=torch.bfloat16, device="cpu", state_dict_converter=None, use_disk_map=False, module_map=None, vram_config=None, vram_limit=None): + config = {} if config is None else config + # Why do we use `skip_model_initialization`? + # It skips the random initialization of model parameters, + # thereby speeding up model loading and avoiding excessive memory usage. + with skip_model_initialization(): + model = model_class(**config) + # What is `module_map`? + # This is a module mapping table for VRAM management. + if module_map is not None: + devices = [vram_config["offload_device"], vram_config["onload_device"], vram_config["preparing_device"], vram_config["computation_device"]] + device = [d for d in devices if d != "disk"][0] + disk_map = DiskMap(path, device, state_dict_converter=state_dict_converter) + if vram_config["offload_device"] != "disk": + state_dict = {i: disk_map[i].to(vram_config["offload_dtype"]) for i in disk_map} + model.load_state_dict(state_dict, assign=True) + model = enable_vram_management(model, module_map, vram_config=vram_config, disk_map=disk_map, vram_limit=vram_limit) + else: + # Why do we use `DiskMap`? + # Sometimes a model file contains multiple models, + # and DiskMap can load only the parameters of a single model, + # avoiding the need to load all parameters in the file. + if use_disk_map: + state_dict = DiskMap(path, device) + else: + state_dict = load_state_dict(path, torch_dtype, device) + # Why do we use `state_dict_converter`? + # Some models are saved in complex formats, + # and we need to convert the state dict into the appropriate format. + if state_dict_converter is not None: + state_dict = state_dict_converter(state_dict) + else: + state_dict = {i: state_dict[i] for i in state_dict} + model.load_state_dict(state_dict, assign=True) + # Why do we call `to()`? + # Because some models override the behavior of `to()`, + # especially those from libraries like Transformers. + model = model.to(dtype=torch_dtype, device=device) + if hasattr(model, "eval"): + model = model.eval() + return model + + +def load_model_with_disk_offload(model_class, path, config=None, torch_dtype=torch.bfloat16, device="cpu", state_dict_converter=None, module_map=None): + if isinstance(path, str): + path = [path] + config = {} if config is None else config + with skip_model_initialization(): + model = model_class(**config) + if hasattr(model, "eval"): + model = model.eval() + disk_map = DiskMap(path, device, state_dict_converter=state_dict_converter) + vram_config = { + "offload_dtype": "disk", + "offload_device": "disk", + "onload_dtype": "disk", + "onload_device": "disk", + "preparing_dtype": torch.float8_e4m3fn, + "preparing_device": device, + "computation_dtype": torch_dtype, + "computation_device": device, + } + enable_vram_management(model, module_map, vram_config=vram_config, disk_map=disk_map, vram_limit=80) + return model diff --git a/diffsynth/core/vram/__init__.py b/diffsynth/core/vram/__init__.py new file mode 100644 index 0000000..32763bb --- /dev/null +++ b/diffsynth/core/vram/__init__.py @@ -0,0 +1,2 @@ +from .initialization import skip_model_initialization +from .layers import * diff --git a/diffsynth/core/vram/disk_map.py b/diffsynth/core/vram/disk_map.py new file mode 100644 index 0000000..0034990 --- /dev/null +++ b/diffsynth/core/vram/disk_map.py @@ -0,0 +1,90 @@ +from safetensors import safe_open +import torch, os + + +class SafetensorsCompatibleTensor: + def __init__(self, tensor): + self.tensor = tensor + + def get_shape(self): + return list(self.tensor.shape) + + +class SafetensorsCompatibleBinaryLoader: + def __init__(self, path, device): + print("Detected non-safetensors files, which may cause slower loading. It's recommended to convert it to a safetensors file.") + self.state_dict = torch.load(path, weights_only=True, map_location=device) + + def keys(self): + return self.state_dict.keys() + + def get_tensor(self, name): + return self.state_dict[name] + + def get_slice(self, name): + return SafetensorsCompatibleTensor(self.state_dict[name]) + + +class DiskMap: + + def __init__(self, path, device, torch_dtype=None, state_dict_converter=None, buffer_size=10**9): + self.path = path if isinstance(path, list) else [path] + self.device = device + self.torch_dtype = torch_dtype + if os.environ.get('DIFFSYNTH_DISK_MAP_BUFFER_SIZE') is not None: + self.buffer_size = int(os.environ.get('DIFFSYNTH_DISK_MAP_BUFFER_SIZE')) + else: + self.buffer_size = buffer_size + self.files = [] + self.flush_files() + self.name_map = {} + for file_id, file in enumerate(self.files): + for name in file.keys(): + self.name_map[name] = file_id + self.rename_dict = self.fetch_rename_dict(state_dict_converter) + + def flush_files(self): + if len(self.files) == 0: + for path in self.path: + if path.endswith(".safetensors"): + self.files.append(safe_open(path, framework="pt", device=str(self.device))) + else: + self.files.append(SafetensorsCompatibleBinaryLoader(path, device=self.device)) + else: + for i, path in enumerate(self.path): + if path.endswith(".safetensors"): + self.files[i] = safe_open(path, framework="pt", device=str(self.device)) + self.num_params = 0 + + def __getitem__(self, name): + if self.rename_dict is not None: name = self.rename_dict[name] + file_id = self.name_map[name] + param = self.files[file_id].get_tensor(name) + if self.torch_dtype is not None: + param = param.to(self.torch_dtype) + self.num_params += param.numel() + if self.num_params > self.buffer_size: + self.flush_files() + return param + + def fetch_rename_dict(self, state_dict_converter): + if state_dict_converter is None: + return None + state_dict = {} + for file in self.files: + for name in file.keys(): + state_dict[name] = name + state_dict = state_dict_converter(state_dict) + return state_dict + + def __iter__(self): + if self.rename_dict is not None: + return self.rename_dict.__iter__() + else: + return self.name_map.__iter__() + + def __contains__(self, x): + if self.rename_dict is not None: + return x in self.rename_dict + else: + return x in self.name_map diff --git a/diffsynth/core/vram/initialization.py b/diffsynth/core/vram/initialization.py new file mode 100644 index 0000000..bff2498 --- /dev/null +++ b/diffsynth/core/vram/initialization.py @@ -0,0 +1,21 @@ +import torch +from contextlib import contextmanager + + +@contextmanager +def skip_model_initialization(device=torch.device("meta")): + + def register_empty_parameter(module, name, param): + old_register_parameter(module, name, param) + if param is not None: + param_cls = type(module._parameters[name]) + kwargs = module._parameters[name].__dict__ + kwargs["requires_grad"] = param.requires_grad + module._parameters[name] = param_cls(module._parameters[name].to(device), **kwargs) + + old_register_parameter = torch.nn.Module.register_parameter + torch.nn.Module.register_parameter = register_empty_parameter + try: + yield + finally: + torch.nn.Module.register_parameter = old_register_parameter diff --git a/diffsynth/core/vram/layers.py b/diffsynth/core/vram/layers.py new file mode 100644 index 0000000..f9d2bf2 --- /dev/null +++ b/diffsynth/core/vram/layers.py @@ -0,0 +1,418 @@ +import torch, copy +from typing import Union +from .initialization import skip_model_initialization +from .disk_map import DiskMap + + +class AutoTorchModule(torch.nn.Module): + + def __init__( + self, + offload_dtype: torch.dtype = None, + offload_device: Union[str, torch.device] = None, + onload_dtype: torch.dtype = None, + onload_device: Union[str, torch.device] = None, + preparing_dtype: torch.dtype = None, + preparing_device: Union[str, torch.device] = None, + computation_dtype: torch.dtype = None, + computation_device: Union[str, torch.device] = None, + vram_limit: float = None, + ): + super().__init__() + self.set_dtype_and_device( + offload_dtype, + offload_device, + onload_dtype, + onload_device, + preparing_dtype, + preparing_device, + computation_dtype, + computation_device, + vram_limit, + ) + self.state = 0 + self.name = "" + + def set_dtype_and_device( + self, + offload_dtype: torch.dtype = None, + offload_device: Union[str, torch.device] = None, + onload_dtype: torch.dtype = None, + onload_device: Union[str, torch.device] = None, + preparing_dtype: torch.dtype = None, + preparing_device: Union[str, torch.device] = None, + computation_dtype: torch.dtype = None, + computation_device: Union[str, torch.device] = None, + vram_limit: float = None, + ): + self.offload_dtype = offload_dtype or computation_dtype + self.offload_device = offload_device or computation_dtype + self.onload_dtype = onload_dtype or computation_dtype + self.onload_device = onload_device or computation_dtype + self.preparing_dtype = preparing_dtype or computation_dtype + self.preparing_device = preparing_device or computation_dtype + self.computation_dtype = computation_dtype + self.computation_device = computation_device + self.vram_limit = vram_limit + + def cast_to(self, weight, dtype, device): + r = torch.empty_like(weight, dtype=dtype, device=device) + r.copy_(weight) + return r + + def check_free_vram(self): + gpu_mem_state = torch.cuda.mem_get_info(self.computation_device) + used_memory = (gpu_mem_state[1] - gpu_mem_state[0]) / (1024**3) + return used_memory < self.vram_limit + + def offload(self): + if self.state != 0: + self.to(dtype=self.offload_dtype, device=self.offload_device) + self.state = 0 + + def onload(self): + if self.state != 1: + self.to(dtype=self.onload_dtype, device=self.onload_device) + self.state = 1 + + def param_name(self, name): + if self.name == "": + return name + else: + return self.name + "." + name + + +class AutoWrappedModule(AutoTorchModule): + + def __init__( + self, + module: torch.nn.Module, + offload_dtype: torch.dtype = None, + offload_device: Union[str, torch.device] = None, + onload_dtype: torch.dtype = None, + onload_device: Union[str, torch.device] = None, + preparing_dtype: torch.dtype = None, + preparing_device: Union[str, torch.device] = None, + computation_dtype: torch.dtype = None, + computation_device: Union[str, torch.device] = None, + vram_limit: float = None, + name: str = "", + disk_map: DiskMap = None, + **kwargs + ): + super().__init__( + offload_dtype, + offload_device, + onload_dtype, + onload_device, + preparing_dtype, + preparing_device, + computation_dtype, + computation_device, + vram_limit, + ) + self.module = module + if offload_dtype == "disk": + self.name = name + self.disk_map = disk_map + self.required_params = [name for name, _ in self.module.named_parameters()] + self.disk_offload = True + else: + self.disk_offload = False + + def load_from_disk(self, torch_dtype, device, copy_module=False): + if copy_module: + module = copy.deepcopy(self.module) + else: + module = self.module + state_dict = {} + for name in self.required_params: + param = self.disk_map[self.param_name(name)] + param = param.to(dtype=torch_dtype, device=device) + state_dict[name] = param + module.load_state_dict(state_dict, assign=True) + module.to(dtype=torch_dtype, device=device) + return module + + def offload_to_disk(self, model: torch.nn.Module): + for buf in model.buffers(): + # If there are some parameters are registed in buffers (not in state dict), + # We cannot offload the model. + for children in model.children(): + self.offload_to_disk(children) + break + else: + model.to("meta") + + def offload(self): + # offload / onload / preparing -> offload + if self.state != 0: + if self.disk_offload: + self.offload_to_disk(self.module) + else: + self.to(dtype=self.offload_dtype, device=self.offload_device) + self.state = 0 + + def onload(self): + # offload / onload / preparing -> onload + if self.state < 1: + if self.disk_offload and self.onload_device != "disk" and self.offload_device == "disk": + self.load_from_disk(self.onload_dtype, self.onload_device) + else: + self.to(dtype=self.onload_dtype, device=self.onload_device) + self.state = 1 + + def preparing(self): + # onload / preparing -> preparing + if self.state != 2: + if self.disk_offload and self.preparing_device != "disk" and self.onload_device == "disk": + self.load_from_disk(self.preparing_dtype, self.preparing_device) + else: + self.to(dtype=self.preparing_dtype, device=self.preparing_device) + self.state = 2 + + def computation(self): + # onload / preparing -> computation (temporary) + if self.state == 2: + torch_dtype, device = self.preparing_dtype, self.preparing_device + else: + torch_dtype, device = self.onload_dtype, self.onload_device + if torch_dtype == self.computation_dtype and device == self.computation_device: + module = self.module + elif self.disk_offload and device == "disk": + module = self.load_from_disk(self.computation_dtype, self.computation_device, copy_module=True) + else: + module = copy.deepcopy(self.module).to(dtype=self.computation_dtype, device=self.computation_device) + return module + + def forward(self, *args, **kwargs): + if self.state == 1 and (self.vram_limit is None or self.check_free_vram()): + self.preparing() + module = self.computation() + return module(*args, **kwargs) + + def __getattr__(self, name): + if name in self.__dict__ or name == "module": + return super().__getattr__(name) + else: + return getattr(self.module, name) + + +class AutoWrappedLinear(torch.nn.Linear, AutoTorchModule): + def __init__( + self, + module: torch.nn.Linear, + offload_dtype: torch.dtype = None, + offload_device: Union[str, torch.device] = None, + onload_dtype: torch.dtype = None, + onload_device: Union[str, torch.device] = None, + preparing_dtype: torch.dtype = None, + preparing_device: Union[str, torch.device] = None, + computation_dtype: torch.dtype = None, + computation_device: Union[str, torch.device] = None, + vram_limit: float = None, + name: str = "", + disk_map: DiskMap = None, + **kwargs + ): + with skip_model_initialization(): + super().__init__( + in_features=module.in_features, + out_features=module.out_features, + bias=module.bias is not None, + ) + self.set_dtype_and_device( + offload_dtype, + offload_device, + onload_dtype, + onload_device, + preparing_dtype, + preparing_device, + computation_dtype, + computation_device, + vram_limit, + ) + self.weight = module.weight + self.bias = module.bias + self.state = 0 + self.name = name + self.lora_A_weights = [] + self.lora_B_weights = [] + self.lora_merger = None + self.enable_fp8 = computation_dtype in [torch.float8_e4m3fn, torch.float8_e4m3fnuz] + + if offload_dtype == "disk": + self.disk_map = disk_map + self.disk_offload = True + else: + self.disk_offload = False + + def fp8_linear( + self, + input: torch.Tensor, + weight: torch.Tensor, + bias: torch.Tensor = None, + ) -> torch.Tensor: + device = input.device + origin_dtype = input.dtype + origin_shape = input.shape + input = input.reshape(-1, origin_shape[-1]) + + x_max = torch.max(torch.abs(input), dim=-1, keepdim=True).values + fp8_max = 448.0 + # For float8_e4m3fnuz, the maximum representable value is half of that of e4m3fn. + # To avoid overflow and ensure numerical compatibility during FP8 computation, + # we scale down the input by 2.0 in advance. + # This scaling will be compensated later during the final result scaling. + if self.computation_dtype == torch.float8_e4m3fnuz: + fp8_max = fp8_max / 2.0 + scale_a = torch.clamp(x_max / fp8_max, min=1.0).float().to(device=device) + scale_b = torch.ones((weight.shape[0], 1)).to(device=device) + input = input / (scale_a + 1e-8) + input = input.to(self.computation_dtype) + weight = weight.to(self.computation_dtype) + bias = bias.to(torch.bfloat16) + + result = torch._scaled_mm( + input, + weight.T, + scale_a=scale_a, + scale_b=scale_b.T, + bias=bias, + out_dtype=origin_dtype, + ) + new_shape = origin_shape[:-1] + result.shape[-1:] + result = result.reshape(new_shape) + return result + + def load_from_disk(self, torch_dtype, device, assign=True): + weight = self.disk_map[self.name + ".weight"].to(dtype=torch_dtype, device=device) + bias = None if self.bias is None else self.disk_map[self.name + ".bias"].to(dtype=torch_dtype, device=device) + if assign: + state_dict = {"weight": weight} + if bias is not None: state_dict["bias"] = bias + self.load_state_dict(state_dict, assign=True) + return weight, bias + + def offload(self): + # offload / onload / preparing -> offload + if self.state != 0: + if self.disk_offload: + self.to("meta") + else: + self.to(dtype=self.offload_dtype, device=self.offload_device) + self.state = 0 + + def onload(self): + # offload / onload / preparing -> onload + if self.state < 1: + if self.disk_offload and self.onload_device != "disk" and self.offload_device == "disk": + self.load_from_disk(self.onload_dtype, self.onload_device) + else: + self.to(dtype=self.onload_dtype, device=self.onload_device) + self.state = 1 + + def preparing(self): + # onload / preparing -> preparing + if self.state != 2: + if self.disk_offload and self.preparing_device != "disk" and self.onload_device == "disk": + self.load_from_disk(self.preparing_dtype, self.preparing_device) + else: + self.to(dtype=self.preparing_dtype, device=self.preparing_device) + self.state = 2 + + def computation(self): + # onload / preparing -> computation (temporary) + if self.state == 2: + torch_dtype, device = self.preparing_dtype, self.preparing_device + else: + torch_dtype, device = self.onload_dtype, self.onload_device + if torch_dtype == self.computation_dtype and device == self.computation_device: + weight, bias = self.weight, self.bias + elif self.disk_offload and device == "disk": + weight, bias = self.load_from_disk(self.computation_dtype, self.computation_device, assign=False) + else: + weight = self.cast_to(self.weight, self.computation_dtype, self.computation_device) + bias = None if self.bias is None else self.cast_to(self.bias, self.computation_dtype, self.computation_device) + return weight, bias + + def linear_forward(self, x, weight, bias): + if self.enable_fp8: + out = self.fp8_linear(x, weight, bias) + else: + out = torch.nn.functional.linear(x, weight, bias) + return out + + def lora_forward(self, x, out): + if self.lora_merger is None: + for lora_A, lora_B in zip(self.lora_A_weights, self.lora_B_weights): + out = out + x @ lora_A.T @ lora_B.T + else: + lora_output = [] + for lora_A, lora_B in zip(self.lora_A_weights, self.lora_B_weights): + lora_output.append(x @ lora_A.T @ lora_B.T) + lora_output = torch.stack(lora_output) + out = self.lora_merger(out, lora_output) + return out + + def forward(self, x, *args, **kwargs): + if self.state == 1 and (self.vram_limit is None or self.check_free_vram()): + self.preparing() + weight, bias = self.computation() + out = self.linear_forward(x, weight, bias) + if len(self.lora_A_weights) > 0: + out = self.lora_forward(x, out) + return out + + +def enable_vram_management_recursively(model: torch.nn.Module, module_map: dict, vram_config: dict, vram_limit=None, name_prefix="", disk_map=None, **kwargs): + for name, module in model.named_children(): + layer_name = name if name_prefix == "" else name_prefix + "." + name + for source_module, target_module in module_map.items(): + if isinstance(module, source_module): + module_ = target_module(module, **vram_config, vram_limit=vram_limit, name=layer_name, disk_map=disk_map, **kwargs) + setattr(model, name, module_) + break + else: + enable_vram_management_recursively(module, module_map, vram_config, vram_limit=vram_limit, name_prefix=layer_name, disk_map=disk_map, **kwargs) + + +def fill_vram_config(model, vram_config): + vram_config_ = vram_config.copy() + vram_config_["onload_dtype"] = vram_config["computation_dtype"] + vram_config_["onload_device"] = vram_config["computation_device"] + vram_config_["preparing_dtype"] = vram_config["computation_dtype"] + vram_config_["preparing_device"] = vram_config["computation_device"] + for k in vram_config: + if vram_config[k] != vram_config_[k]: + print(f"No fine-grained VRAM configuration is provided for {model.__class__.__name__}. [`onload`, `preparing`, `computation`] will be the same state. `vram_config` is set to {vram_config_}") + break + return vram_config_ + + +def enable_vram_management(model: torch.nn.Module, module_map: dict, vram_config: dict, vram_limit=None, disk_map=None, **kwargs): + for source_module, target_module in module_map.items(): + # If no fine-grained VRAM configuration is provided, the entire model will be managed uniformly. + if isinstance(model, source_module): + vram_config = fill_vram_config(model, vram_config) + model = target_module(model, **vram_config, vram_limit=vram_limit, disk_map=disk_map, **kwargs) + break + else: + enable_vram_management_recursively(model, module_map, vram_config, vram_limit=vram_limit, disk_map=disk_map, **kwargs) + # `vram_management_enabled` is a flag that allows the pipeline to determine whether VRAM management is enabled. + model.vram_management_enabled = True + return model + + +def reset_vram_config(model: torch.nn.Module, vram_config: dict, vram_limit=None): + disk_map = None + for module in model.modules(): + if isinstance(module, AutoTorchModule): + module.set_dtype_and_device(**vram_config, vram_limit=vram_limit) + if hasattr(module, "disk_map") and getattr(module, "disk_map") is not None: + disk_map = getattr(module, "disk_map") + if disk_map is not None: + devices = [vram_config["offload_device"], vram_config["onload_device"], vram_config["preparing_device"], vram_config["computation_device"]] + device = [d for d in devices if d != "disk"][0] + disk_map.device = device + disk_map.flush_files() diff --git a/diffsynth/data/__init__.py b/diffsynth/data/__init__.py deleted file mode 100644 index 749c03f..0000000 --- a/diffsynth/data/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .video import VideoData, save_video, save_frames, merge_video_audio, save_video_with_audio diff --git a/diffsynth/data/simple_text_image.py b/diffsynth/data/simple_text_image.py deleted file mode 100644 index 7a9525e..0000000 --- a/diffsynth/data/simple_text_image.py +++ /dev/null @@ -1,41 +0,0 @@ -import torch, os, torchvision -from torchvision import transforms -import pandas as pd -from PIL import Image - - - -class TextImageDataset(torch.utils.data.Dataset): - def __init__(self, dataset_path, steps_per_epoch=10000, height=1024, width=1024, center_crop=True, random_flip=False): - self.steps_per_epoch = steps_per_epoch - metadata = pd.read_csv(os.path.join(dataset_path, "train/metadata.csv")) - self.path = [os.path.join(dataset_path, "train", file_name) for file_name in metadata["file_name"]] - self.text = metadata["text"].to_list() - self.height = height - self.width = width - self.image_processor = transforms.Compose( - [ - transforms.CenterCrop((height, width)) if center_crop else transforms.RandomCrop((height, width)), - transforms.RandomHorizontalFlip() if random_flip else transforms.Lambda(lambda x: x), - transforms.ToTensor(), - transforms.Normalize([0.5], [0.5]), - ] - ) - - - def __getitem__(self, index): - data_id = torch.randint(0, len(self.path), (1,))[0] - data_id = (data_id + index) % len(self.path) # For fixed seed. - text = self.text[data_id] - image = Image.open(self.path[data_id]).convert("RGB") - target_height, target_width = self.height, self.width - width, height = image.size - scale = max(target_width / width, target_height / height) - shape = [round(height*scale),round(width*scale)] - image = torchvision.transforms.functional.resize(image,shape,interpolation=transforms.InterpolationMode.BILINEAR) - image = self.image_processor(image) - return {"text": text, "image": image} - - - def __len__(self): - return self.steps_per_epoch diff --git a/diffsynth/data/video.py b/diffsynth/data/video.py deleted file mode 100644 index c6b9daa..0000000 --- a/diffsynth/data/video.py +++ /dev/null @@ -1,217 +0,0 @@ -import imageio, os -import numpy as np -from PIL import Image -from tqdm import tqdm -import subprocess -import shutil - - -class LowMemoryVideo: - def __init__(self, file_name): - self.reader = imageio.get_reader(file_name) - - def __len__(self): - return self.reader.count_frames() - - def __getitem__(self, item): - return Image.fromarray(np.array(self.reader.get_data(item))).convert("RGB") - - def __del__(self): - self.reader.close() - - -def split_file_name(file_name): - result = [] - number = -1 - for i in file_name: - if ord(i)>=ord("0") and ord(i)<=ord("9"): - if number == -1: - number = 0 - number = number*10 + ord(i) - ord("0") - else: - if number != -1: - result.append(number) - number = -1 - result.append(i) - if number != -1: - result.append(number) - result = tuple(result) - return result - - -def search_for_images(folder): - file_list = [i for i in os.listdir(folder) if i.endswith(".jpg") or i.endswith(".png")] - file_list = [(split_file_name(file_name), file_name) for file_name in file_list] - file_list = [i[1] for i in sorted(file_list)] - file_list = [os.path.join(folder, i) for i in file_list] - return file_list - - -class LowMemoryImageFolder: - def __init__(self, folder, file_list=None): - if file_list is None: - self.file_list = search_for_images(folder) - else: - self.file_list = [os.path.join(folder, file_name) for file_name in file_list] - - def __len__(self): - return len(self.file_list) - - def __getitem__(self, item): - return Image.open(self.file_list[item]).convert("RGB") - - def __del__(self): - pass - - -def crop_and_resize(image, height, width): - image = np.array(image) - image_height, image_width, _ = image.shape - if image_height / image_width < height / width: - croped_width = int(image_height / height * width) - left = (image_width - croped_width) // 2 - image = image[:, left: left+croped_width] - image = Image.fromarray(image).resize((width, height)) - else: - croped_height = int(image_width / width * height) - left = (image_height - croped_height) // 2 - image = image[left: left+croped_height, :] - image = Image.fromarray(image).resize((width, height)) - return image - - -class VideoData: - def __init__(self, video_file=None, image_folder=None, height=None, width=None, **kwargs): - if video_file is not None: - self.data_type = "video" - self.data = LowMemoryVideo(video_file, **kwargs) - elif image_folder is not None: - self.data_type = "images" - self.data = LowMemoryImageFolder(image_folder, **kwargs) - else: - raise ValueError("Cannot open video or image folder") - self.length = None - self.set_shape(height, width) - - def raw_data(self): - frames = [] - for i in range(self.__len__()): - frames.append(self.__getitem__(i)) - return frames - - def set_length(self, length): - self.length = length - - def set_shape(self, height, width): - self.height = height - self.width = width - - def __len__(self): - if self.length is None: - return len(self.data) - else: - return self.length - - def shape(self): - if self.height is not None and self.width is not None: - return self.height, self.width - else: - height, width, _ = self.__getitem__(0).shape - return height, width - - def __getitem__(self, item): - frame = self.data.__getitem__(item) - width, height = frame.size - if self.height is not None and self.width is not None: - if self.height != height or self.width != width: - frame = crop_and_resize(frame, self.height, self.width) - return frame - - def __del__(self): - pass - - def save_images(self, folder): - os.makedirs(folder, exist_ok=True) - for i in tqdm(range(self.__len__()), desc="Saving images"): - frame = self.__getitem__(i) - frame.save(os.path.join(folder, f"{i}.png")) - - -def save_video(frames, save_path, fps, quality=9, ffmpeg_params=None): - writer = imageio.get_writer(save_path, fps=fps, quality=quality, ffmpeg_params=ffmpeg_params) - for frame in tqdm(frames, desc="Saving video"): - frame = np.array(frame) - writer.append_data(frame) - writer.close() - -def save_frames(frames, save_path): - os.makedirs(save_path, exist_ok=True) - for i, frame in enumerate(tqdm(frames, desc="Saving images")): - frame.save(os.path.join(save_path, f"{i}.png")) - - -def merge_video_audio(video_path: str, audio_path: str): - # TODO: may need a in-python implementation to avoid subprocess dependency - """ - Merge the video and audio into a new video, with the duration set to the shorter of the two, - and overwrite the original video file. - - Parameters: - video_path (str): Path to the original video file - audio_path (str): Path to the audio file - """ - - # check - if not os.path.exists(video_path): - raise FileNotFoundError(f"video file {video_path} does not exist") - if not os.path.exists(audio_path): - raise FileNotFoundError(f"audio file {audio_path} does not exist") - - base, ext = os.path.splitext(video_path) - temp_output = f"{base}_temp{ext}" - - try: - # create ffmpeg command - command = [ - 'ffmpeg', - '-y', # overwrite - '-i', - video_path, - '-i', - audio_path, - '-c:v', - 'copy', # copy video stream - '-c:a', - 'aac', # use AAC audio encoder - '-b:a', - '192k', # set audio bitrate (optional) - '-map', - '0:v:0', # select the first video stream - '-map', - '1:a:0', # select the first audio stream - '-shortest', # choose the shortest duration - temp_output - ] - - # execute the command - result = subprocess.run( - command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) - - # check result - if result.returncode != 0: - error_msg = f"FFmpeg execute failed: {result.stderr}" - print(error_msg) - raise RuntimeError(error_msg) - - shutil.move(temp_output, video_path) - print(f"Merge completed, saved to {video_path}") - - except Exception as e: - if os.path.exists(temp_output): - os.remove(temp_output) - print(f"merge_video_audio failed with error: {e}") - - -def save_video_with_audio(frames, save_path, audio_path, fps=16, quality=9, ffmpeg_params=None): - save_video(frames, save_path, fps, quality, ffmpeg_params) - merge_video_audio(save_path, audio_path) diff --git a/diffsynth/diffusion/__init__.py b/diffsynth/diffusion/__init__.py new file mode 100644 index 0000000..0a4a087 --- /dev/null +++ b/diffsynth/diffusion/__init__.py @@ -0,0 +1,6 @@ +from .flow_match import FlowMatchScheduler +from .training_module import DiffusionTrainingModule +from .logger import ModelLogger +from .runner import launch_training_task, launch_data_process_task +from .parsers import * +from .loss import * diff --git a/diffsynth/diffusion/base_pipeline.py b/diffsynth/diffusion/base_pipeline.py new file mode 100644 index 0000000..b638fc8 --- /dev/null +++ b/diffsynth/diffusion/base_pipeline.py @@ -0,0 +1,414 @@ +from PIL import Image +import torch +import numpy as np +from einops import repeat, reduce +from typing import Union +from ..core import AutoTorchModule, AutoWrappedLinear, load_state_dict, ModelConfig +from ..utils.lora import GeneralLoRALoader +from ..models.model_loader import ModelPool +from ..utils.controlnet import ControlNetInput + + +class PipelineUnit: + def __init__( + self, + seperate_cfg: bool = False, + take_over: bool = False, + input_params: tuple[str] = None, + output_params: tuple[str] = None, + input_params_posi: dict[str, str] = None, + input_params_nega: dict[str, str] = None, + onload_model_names: tuple[str] = None + ): + self.seperate_cfg = seperate_cfg + self.take_over = take_over + self.input_params = input_params + self.output_params = output_params + self.input_params_posi = input_params_posi + self.input_params_nega = input_params_nega + self.onload_model_names = onload_model_names + + def fetch_input_params(self): + params = [] + if self.input_params is not None: + for param in self.input_params: + params.append(param) + if self.input_params_posi is not None: + for _, param in self.input_params_posi.items(): + params.append(param) + if self.input_params_nega is not None: + for _, param in self.input_params_nega.items(): + params.append(param) + params = sorted(list(set(params))) + return params + + def fetch_output_params(self): + params = [] + if self.output_params is not None: + for param in self.output_params: + params.append(param) + return params + + def process(self, pipe, **kwargs) -> dict: + return {} + + def post_process(self, pipe, **kwargs) -> dict: + return {} + + +class BasePipeline(torch.nn.Module): + + def __init__( + self, + device="cuda", torch_dtype=torch.float16, + height_division_factor=64, width_division_factor=64, + time_division_factor=None, time_division_remainder=None, + ): + super().__init__() + # The device and torch_dtype is used for the storage of intermediate variables, not models. + self.device = device + self.torch_dtype = torch_dtype + # The following parameters are used for shape check. + self.height_division_factor = height_division_factor + self.width_division_factor = width_division_factor + self.time_division_factor = time_division_factor + self.time_division_remainder = time_division_remainder + # VRAM management + self.vram_management_enabled = False + # Pipeline Unit Runner + self.unit_runner = PipelineUnitRunner() + # LoRA Loader + self.lora_loader = GeneralLoRALoader + + + def to(self, *args, **kwargs): + device, dtype, non_blocking, convert_to_format = torch._C._nn._parse_to(*args, **kwargs) + if device is not None: + self.device = device + if dtype is not None: + self.torch_dtype = dtype + super().to(*args, **kwargs) + return self + + + def check_resize_height_width(self, height, width, num_frames=None): + # Shape check + if height % self.height_division_factor != 0: + height = (height + self.height_division_factor - 1) // self.height_division_factor * self.height_division_factor + print(f"height % {self.height_division_factor} != 0. We round it up to {height}.") + if width % self.width_division_factor != 0: + width = (width + self.width_division_factor - 1) // self.width_division_factor * self.width_division_factor + print(f"width % {self.width_division_factor} != 0. We round it up to {width}.") + if num_frames is None: + return height, width + else: + if num_frames % self.time_division_factor != self.time_division_remainder: + num_frames = (num_frames + self.time_division_factor - 1) // self.time_division_factor * self.time_division_factor + self.time_division_remainder + print(f"num_frames % {self.time_division_factor} != {self.time_division_remainder}. We round it up to {num_frames}.") + return height, width, num_frames + + + def preprocess_image(self, image, torch_dtype=None, device=None, pattern="B C H W", min_value=-1, max_value=1): + # Transform a PIL.Image to torch.Tensor + image = torch.Tensor(np.array(image, dtype=np.float32)) + image = image.to(dtype=torch_dtype or self.torch_dtype, device=device or self.device) + image = image * ((max_value - min_value) / 255) + min_value + image = repeat(image, f"H W C -> {pattern}", **({"B": 1} if "B" in pattern else {})) + return image + + + def preprocess_video(self, video, torch_dtype=None, device=None, pattern="B C T H W", min_value=-1, max_value=1): + # Transform a list of PIL.Image to torch.Tensor + video = [self.preprocess_image(image, torch_dtype=torch_dtype, device=device, min_value=min_value, max_value=max_value) for image in video] + video = torch.stack(video, dim=pattern.index("T") // 2) + return video + + + def vae_output_to_image(self, vae_output, pattern="B C H W", min_value=-1, max_value=1): + # Transform a torch.Tensor to PIL.Image + if pattern != "H W C": + vae_output = reduce(vae_output, f"{pattern} -> H W C", reduction="mean") + image = ((vae_output - min_value) * (255 / (max_value - min_value))).clip(0, 255) + image = image.to(device="cpu", dtype=torch.uint8) + image = Image.fromarray(image.numpy()) + return image + + + def vae_output_to_video(self, vae_output, pattern="B C T H W", min_value=-1, max_value=1): + # Transform a torch.Tensor to list of PIL.Image + if pattern != "T H W C": + vae_output = reduce(vae_output, f"{pattern} -> T H W C", reduction="mean") + video = [self.vae_output_to_image(image, pattern="H W C", min_value=min_value, max_value=max_value) for image in vae_output] + return video + + + def load_models_to_device(self, model_names): + if self.vram_management_enabled: + # offload models + for name, model in self.named_children(): + if name not in model_names: + if hasattr(model, "vram_management_enabled") and model.vram_management_enabled: + if hasattr(model, "offload"): + model.offload() + else: + for module in model.modules(): + if hasattr(module, "offload"): + module.offload() + torch.cuda.empty_cache() + # onload models + for name, model in self.named_children(): + if name in model_names: + if hasattr(model, "vram_management_enabled") and model.vram_management_enabled: + if hasattr(model, "onload"): + model.onload() + else: + for module in model.modules(): + if hasattr(module, "onload"): + module.onload() + + + def generate_noise(self, shape, seed=None, rand_device="cpu", rand_torch_dtype=torch.float32, device=None, torch_dtype=None): + # Initialize Gaussian noise + generator = None if seed is None else torch.Generator(rand_device).manual_seed(seed) + noise = torch.randn(shape, generator=generator, device=rand_device, dtype=rand_torch_dtype) + noise = noise.to(dtype=torch_dtype or self.torch_dtype, device=device or self.device) + return noise + + + def get_vram(self): + return torch.cuda.mem_get_info(self.device)[1] / (1024 ** 3) + + + def freeze_except(self, model_names): + for name, model in self.named_children(): + if name in model_names: + model.train() + model.requires_grad_(True) + else: + model.eval() + model.requires_grad_(False) + + + def blend_with_mask(self, base, addition, mask): + return base * (1 - mask) + addition * mask + + + def step(self, scheduler, latents, progress_id, noise_pred, input_latents=None, inpaint_mask=None, **kwargs): + timestep = scheduler.timesteps[progress_id] + if inpaint_mask is not None: + noise_pred_expected = scheduler.return_to_timestep(scheduler.timesteps[progress_id], latents, input_latents) + noise_pred = self.blend_with_mask(noise_pred_expected, noise_pred, inpaint_mask) + latents_next = scheduler.step(noise_pred, timestep, latents) + return latents_next + + + def split_pipeline_units(self, model_names: list[str]): + return PipelineUnitGraph().split_pipeline_units(self.units, model_names) + + + def flush_vram_management_device(self, device): + for module in self.modules(): + if isinstance(module, AutoTorchModule): + module.offload_device = device + module.onload_device = device + module.preparing_device = device + module.computation_device = device + + + def load_lora( + self, + module: torch.nn.Module, + lora_config: Union[ModelConfig, str] = None, + alpha=1, + hotload=False, + state_dict=None, + ): + if state_dict is None: + if isinstance(lora_config, str): + lora = load_state_dict(lora_config, torch_dtype=self.torch_dtype, device=self.device) + else: + lora_config.download_if_necessary() + lora = load_state_dict(lora_config.path, torch_dtype=self.torch_dtype, device=self.device) + else: + lora = state_dict + lora_loader = self.lora_loader(torch_dtype=self.torch_dtype, device=self.device) + lora = lora_loader.convert_state_dict(lora) + if hotload: + if not (hasattr(module, "vram_management_enabled") and getattr(module, "vram_management_enabled")): + raise ValueError("VRAM Management is not enabled. LoRA hotloading is not supported.") + updated_num = 0 + for name, module in module.named_modules(): + if isinstance(module, AutoWrappedLinear): + lora_a_name = f'{name}.lora_A.weight' + lora_b_name = f'{name}.lora_B.weight' + if lora_a_name in lora and lora_b_name in lora: + updated_num += 1 + module.lora_A_weights.append(lora[lora_a_name] * alpha) + module.lora_B_weights.append(lora[lora_b_name]) + print(f"{updated_num} tensors are patched by LoRA. You can use `pipe.clear_lora()` to clear all LoRA layers.") + else: + lora_loader.fuse_lora_to_base_model(module, lora, alpha=alpha) + + + def clear_lora(self): + cleared_num = 0 + for name, module in self.named_modules(): + if isinstance(module, AutoWrappedLinear): + if hasattr(module, "lora_A_weights"): + if len(module.lora_A_weights) > 0: + cleared_num += 1 + module.lora_A_weights.clear() + if hasattr(module, "lora_B_weights"): + module.lora_B_weights.clear() + print(f"{cleared_num} LoRA layers are cleared.") + + + def download_and_load_models(self, model_configs: list[ModelConfig] = [], vram_limit: float = None): + model_pool = ModelPool() + for model_config in model_configs: + model_config.download_if_necessary() + vram_config = model_config.vram_config() + vram_config["computation_dtype"] = vram_config["computation_dtype"] or self.torch_dtype + vram_config["computation_device"] = vram_config["computation_device"] or self.device + model_pool.auto_load_model( + model_config.path, + vram_config=vram_config, + vram_limit=vram_limit, + ) + return model_pool + + + def check_vram_management_state(self): + vram_management_enabled = False + for module in self.children(): + if hasattr(module, "vram_management_enabled") and getattr(module, "vram_management_enabled"): + vram_management_enabled = True + return vram_management_enabled + + +class PipelineUnitGraph: + def __init__(self): + pass + + def build_edges(self, units: list[PipelineUnit]): + # Establish dependencies between units + # to search for subsequent related computation units. + last_compute_unit_id = {} + edges = [] + for unit_id, unit in enumerate(units): + for input_param in unit.fetch_input_params(): + if input_param in last_compute_unit_id: + edges.append((last_compute_unit_id[input_param], unit_id)) + for output_param in unit.fetch_output_params(): + last_compute_unit_id[output_param] = unit_id + return edges + + def build_chains(self, units: list[PipelineUnit]): + # Establish updating chains for each variable + # to track their computation process. + params = sum([unit.fetch_input_params() + unit.fetch_output_params() for unit in units], []) + params = sorted(list(set(params))) + chains = {param: [] for param in params} + for unit_id, unit in enumerate(units): + for param in unit.fetch_output_params(): + chains[param].append(unit_id) + return chains + + def search_direct_unit_ids(self, units: list[PipelineUnit], model_names: list[str]): + # Search for units that directly participate in the model's computation. + related_unit_ids = [] + for unit_id, unit in enumerate(units): + for model_name in model_names: + if unit.onload_model_names is not None and model_name in unit.onload_model_names: + related_unit_ids.append(unit_id) + break + return related_unit_ids + + def search_related_unit_ids(self, edges, start_unit_ids, direction="target"): + # Search for subsequent related computation units. + related_unit_ids = [unit_id for unit_id in start_unit_ids] + while True: + neighbors = [] + for source, target in edges: + if direction == "target" and source in related_unit_ids and target not in related_unit_ids: + neighbors.append(target) + elif direction == "source" and source not in related_unit_ids and target in related_unit_ids: + neighbors.append(source) + neighbors = sorted(list(set(neighbors))) + if len(neighbors) == 0: + break + else: + related_unit_ids.extend(neighbors) + related_unit_ids = sorted(list(set(related_unit_ids))) + return related_unit_ids + + def search_updating_unit_ids(self, units: list[PipelineUnit], chains, related_unit_ids): + # If the input parameters of this subgraph are updated outside the subgraph, + # search for the units where these updates occur. + first_compute_unit_id = {} + for unit_id in related_unit_ids: + for param in units[unit_id].fetch_input_params(): + if param not in first_compute_unit_id: + first_compute_unit_id[param] = unit_id + updating_unit_ids = [] + for param in first_compute_unit_id: + unit_id = first_compute_unit_id[param] + chain = chains[param] + if unit_id in chain and chain.index(unit_id) != len(chain) - 1: + for unit_id_ in chain[chain.index(unit_id) + 1:]: + if unit_id_ not in related_unit_ids: + updating_unit_ids.append(unit_id_) + related_unit_ids.extend(updating_unit_ids) + related_unit_ids = sorted(list(set(related_unit_ids))) + return related_unit_ids + + def split_pipeline_units(self, units: list[PipelineUnit], model_names: list[str]): + # Split the computation graph, + # separating all model-related computations. + related_unit_ids = self.search_direct_unit_ids(units, model_names) + edges = self.build_edges(units) + chains = self.build_chains(units) + while True: + num_related_unit_ids = len(related_unit_ids) + related_unit_ids = self.search_related_unit_ids(edges, related_unit_ids, "target") + related_unit_ids = self.search_updating_unit_ids(units, chains, related_unit_ids) + if len(related_unit_ids) == num_related_unit_ids: + break + else: + num_related_unit_ids = len(related_unit_ids) + related_units = [units[i] for i in related_unit_ids] + unrelated_units = [units[i] for i in range(len(units)) if i not in related_unit_ids] + return related_units, unrelated_units + + +class PipelineUnitRunner: + def __init__(self): + pass + + def __call__(self, unit: PipelineUnit, pipe: BasePipeline, inputs_shared: dict, inputs_posi: dict, inputs_nega: dict) -> tuple[dict, dict]: + if unit.take_over: + # Let the pipeline unit take over this function. + inputs_shared, inputs_posi, inputs_nega = unit.process(pipe, inputs_shared=inputs_shared, inputs_posi=inputs_posi, inputs_nega=inputs_nega) + elif unit.seperate_cfg: + # Positive side + processor_inputs = {name: inputs_posi.get(name_) for name, name_ in unit.input_params_posi.items()} + if unit.input_params is not None: + for name in unit.input_params: + processor_inputs[name] = inputs_shared.get(name) + processor_outputs = unit.process(pipe, **processor_inputs) + inputs_posi.update(processor_outputs) + # Negative side + if inputs_shared["cfg_scale"] != 1: + processor_inputs = {name: inputs_nega.get(name_) for name, name_ in unit.input_params_nega.items()} + if unit.input_params is not None: + for name in unit.input_params: + processor_inputs[name] = inputs_shared.get(name) + processor_outputs = unit.process(pipe, **processor_inputs) + inputs_nega.update(processor_outputs) + else: + inputs_nega.update(processor_outputs) + else: + processor_inputs = {name: inputs_shared.get(name) for name in unit.input_params} + processor_outputs = unit.process(pipe, **processor_inputs) + inputs_shared.update(processor_outputs) + return inputs_shared, inputs_posi, inputs_nega diff --git a/diffsynth/schedulers/flow_match.py b/diffsynth/diffusion/flow_match.py similarity index 99% rename from diffsynth/schedulers/flow_match.py rename to diffsynth/diffusion/flow_match.py index 3bb2405..83c6a82 100644 --- a/diffsynth/schedulers/flow_match.py +++ b/diffsynth/diffusion/flow_match.py @@ -1,7 +1,6 @@ import torch, math - class FlowMatchScheduler(): def __init__( diff --git a/diffsynth/diffusion/logger.py b/diffsynth/diffusion/logger.py new file mode 100644 index 0000000..ff51e2c --- /dev/null +++ b/diffsynth/diffusion/logger.py @@ -0,0 +1,43 @@ +import os, torch +from accelerate import Accelerator + + +class ModelLogger: + def __init__(self, output_path, remove_prefix_in_ckpt=None, state_dict_converter=lambda x:x): + self.output_path = output_path + self.remove_prefix_in_ckpt = remove_prefix_in_ckpt + self.state_dict_converter = state_dict_converter + self.num_steps = 0 + + + def on_step_end(self, accelerator: Accelerator, model: torch.nn.Module, save_steps=None): + self.num_steps += 1 + if save_steps is not None and self.num_steps % save_steps == 0: + self.save_model(accelerator, model, f"step-{self.num_steps}.safetensors") + + + def on_epoch_end(self, accelerator: Accelerator, model: torch.nn.Module, epoch_id): + accelerator.wait_for_everyone() + if accelerator.is_main_process: + state_dict = accelerator.get_state_dict(model) + state_dict = accelerator.unwrap_model(model).export_trainable_state_dict(state_dict, remove_prefix=self.remove_prefix_in_ckpt) + state_dict = self.state_dict_converter(state_dict) + os.makedirs(self.output_path, exist_ok=True) + path = os.path.join(self.output_path, f"epoch-{epoch_id}.safetensors") + accelerator.save(state_dict, path, safe_serialization=True) + + + def on_training_end(self, accelerator: Accelerator, model: torch.nn.Module, save_steps=None): + if save_steps is not None and self.num_steps % save_steps != 0: + self.save_model(accelerator, model, f"step-{self.num_steps}.safetensors") + + + def save_model(self, accelerator: Accelerator, model: torch.nn.Module, file_name): + accelerator.wait_for_everyone() + if accelerator.is_main_process: + state_dict = accelerator.get_state_dict(model) + state_dict = accelerator.unwrap_model(model).export_trainable_state_dict(state_dict, remove_prefix=self.remove_prefix_in_ckpt) + state_dict = self.state_dict_converter(state_dict) + os.makedirs(self.output_path, exist_ok=True) + path = os.path.join(self.output_path, file_name) + accelerator.save(state_dict, path, safe_serialization=True) diff --git a/diffsynth/diffusion/loss.py b/diffsynth/diffusion/loss.py new file mode 100644 index 0000000..6c71387 --- /dev/null +++ b/diffsynth/diffusion/loss.py @@ -0,0 +1,29 @@ +from .base_pipeline import BasePipeline +import torch + + +def FlowMatchSFTLoss(pipe: BasePipeline, **inputs): + timestep_id = torch.randint(0, pipe.scheduler.num_train_timesteps, (1,)) + timestep = pipe.scheduler.timesteps[timestep_id].to(dtype=pipe.torch_dtype, device=pipe.device) + + noise = torch.randn_like(inputs["input_latents"]) + inputs["latents"] = pipe.scheduler.add_noise(inputs["input_latents"], noise, timestep) + training_target = pipe.scheduler.training_target(inputs["input_latents"], noise, timestep) + + models = {name: getattr(pipe, name) for name in pipe.in_iteration_models} + noise_pred = pipe.model_fn(**models, **inputs, timestep=timestep) + + loss = torch.nn.functional.mse_loss(noise_pred.float(), training_target.float()) + loss = loss * pipe.scheduler.training_weight(timestep) + return loss + + +def DirectDistillLoss(pipe: BasePipeline, **inputs): + pipe.scheduler.set_timesteps(inputs["num_inference_steps"]) + models = {name: getattr(pipe, name) for name in pipe.in_iteration_models} + for progress_id, timestep in enumerate(pipe.scheduler.timesteps): + timestep = timestep.unsqueeze(0).to(dtype=pipe.torch_dtype, device=pipe.device) + noise_pred = pipe.model_fn(**models, **inputs, timestep=timestep, progress_id=progress_id) + inputs["latents"] = pipe.step(pipe.scheduler, progress_id=progress_id, noise_pred=noise_pred, **inputs) + loss = torch.nn.functional.mse_loss(inputs["latents"].float(), inputs["input_latents"].float()) + return loss diff --git a/diffsynth/diffusion/parsers.py b/diffsynth/diffusion/parsers.py new file mode 100644 index 0000000..445711c --- /dev/null +++ b/diffsynth/diffusion/parsers.py @@ -0,0 +1,62 @@ +import argparse + + +def add_dataset_base_config(parser: argparse.ArgumentParser): + parser.add_argument("--dataset_base_path", type=str, default="", required=True, help="Base path of the dataset.") + parser.add_argument("--dataset_metadata_path", type=str, default=None, help="Path to the metadata file of the dataset.") + parser.add_argument("--dataset_repeat", type=int, default=1, help="Number of times to repeat the dataset per epoch.") + parser.add_argument("--dataset_num_workers", type=int, default=0, help="Number of workers for data loading.") + parser.add_argument("--data_file_keys", type=str, default="image", help="Data file keys in the metadata. Comma-separated.") + return parser + +def add_image_size_config(parser: argparse.ArgumentParser): + parser.add_argument("--max_pixels", type=int, default=1024*1024, help="Maximum number of pixels per frame, used for dynamic resolution..") + parser.add_argument("--height", type=int, default=None, help="Height of images. Leave `height` and `width` empty to enable dynamic resolution.") + parser.add_argument("--width", type=int, default=None, help="Width of images. Leave `height` and `width` empty to enable dynamic resolution.") + return parser + +def add_model_config(parser: argparse.ArgumentParser): + parser.add_argument("--model_paths", type=str, default=None, help="Paths to load models. In JSON format.") + parser.add_argument("--model_id_with_origin_paths", type=str, default=None, help="Model ID with origin paths, e.g., Wan-AI/Wan2.1-T2V-1.3B:diffusion_pytorch_model*.safetensors. Comma-separated.") + parser.add_argument("--extra_inputs", default=None, help="Additional model inputs, comma-separated.") + parser.add_argument("--fp8_models", default=None, help="Models with FP8 precision, comma-separated.") + return parser + +def add_training_config(parser: argparse.ArgumentParser): + parser.add_argument("--learning_rate", type=float, default=1e-4, help="Learning rate.") + parser.add_argument("--num_epochs", type=int, default=1, help="Number of epochs.") + parser.add_argument("--trainable_models", type=str, default=None, help="Models to train, e.g., dit, vae, text_encoder.") + parser.add_argument("--find_unused_parameters", default=False, action="store_true", help="Whether to find unused parameters in DDP.") + parser.add_argument("--weight_decay", type=float, default=0.01, help="Weight decay.") + parser.add_argument("--task", type=str, default="sft", required=False, help="Task type.") + return parser + +def add_output_config(parser: argparse.ArgumentParser): + parser.add_argument("--output_path", type=str, default="./models", help="Output save path.") + parser.add_argument("--remove_prefix_in_ckpt", type=str, default="pipe.dit.", help="Remove prefix in ckpt.") + parser.add_argument("--save_steps", type=int, default=None, help="Number of checkpoint saving invervals. If None, checkpoints will be saved every epoch.") + return parser + +def add_lora_config(parser: argparse.ArgumentParser): + parser.add_argument("--lora_base_model", type=str, default=None, help="Which model LoRA is added to.") + parser.add_argument("--lora_target_modules", type=str, default="q,k,v,o,ffn.0,ffn.2", help="Which layers LoRA is added to.") + parser.add_argument("--lora_rank", type=int, default=32, help="Rank of LoRA.") + parser.add_argument("--lora_checkpoint", type=str, default=None, help="Path to the LoRA checkpoint. If provided, LoRA will be loaded from this checkpoint.") + parser.add_argument("--preset_lora_path", type=str, default=None, help="Path to the preset LoRA checkpoint. If provided, this LoRA will be fused to the base model.") + parser.add_argument("--preset_lora_model", type=str, default=None, help="Which model the preset LoRA is fused to.") + return parser + +def add_gradient_config(parser: argparse.ArgumentParser): + parser.add_argument("--use_gradient_checkpointing", default=False, action="store_true", help="Whether to use gradient checkpointing.") + parser.add_argument("--use_gradient_checkpointing_offload", default=False, action="store_true", help="Whether to offload gradient checkpointing to CPU memory.") + parser.add_argument("--gradient_accumulation_steps", type=int, default=1, help="Gradient accumulation steps.") + return parser + +def add_general_config(parser: argparse.ArgumentParser): + parser = add_dataset_base_config(parser) + parser = add_model_config(parser) + parser = add_training_config(parser) + parser = add_output_config(parser) + parser = add_lora_config(parser) + parser = add_gradient_config(parser) + return parser diff --git a/diffsynth/diffusion/runner.py b/diffsynth/diffusion/runner.py new file mode 100644 index 0000000..05151cf --- /dev/null +++ b/diffsynth/diffusion/runner.py @@ -0,0 +1,71 @@ +import os, torch +from tqdm import tqdm +from accelerate import Accelerator +from .training_module import DiffusionTrainingModule +from .logger import ModelLogger + + +def launch_training_task( + accelerator: Accelerator, + dataset: torch.utils.data.Dataset, + model: DiffusionTrainingModule, + model_logger: ModelLogger, + learning_rate: float = 1e-5, + weight_decay: float = 1e-2, + num_workers: int = 8, + save_steps: int = None, + num_epochs: int = 1, + args = None, +): + if args is not None: + learning_rate = args.learning_rate + weight_decay = args.weight_decay + num_workers = args.dataset_num_workers + save_steps = args.save_steps + num_epochs = args.num_epochs + + optimizer = torch.optim.AdamW(model.trainable_modules(), lr=learning_rate, weight_decay=weight_decay) + scheduler = torch.optim.lr_scheduler.ConstantLR(optimizer) + dataloader = torch.utils.data.DataLoader(dataset, shuffle=True, collate_fn=lambda x: x[0], num_workers=num_workers) + + model, optimizer, dataloader, scheduler = accelerator.prepare(model, optimizer, dataloader, scheduler) + + for epoch_id in range(num_epochs): + for data in tqdm(dataloader): + with accelerator.accumulate(model): + optimizer.zero_grad() + if dataset.load_from_cache: + loss = model({}, inputs=data) + else: + loss = model(data) + accelerator.backward(loss) + optimizer.step() + model_logger.on_step_end(accelerator, model, save_steps) + scheduler.step() + if save_steps is None: + model_logger.on_epoch_end(accelerator, model, epoch_id) + model_logger.on_training_end(accelerator, model, save_steps) + + +def launch_data_process_task( + accelerator: Accelerator, + dataset: torch.utils.data.Dataset, + model: DiffusionTrainingModule, + model_logger: ModelLogger, + num_workers: int = 8, + args = None, +): + if args is not None: + num_workers = args.dataset_num_workers + + dataloader = torch.utils.data.DataLoader(dataset, shuffle=False, collate_fn=lambda x: x[0], num_workers=num_workers) + model, dataloader = accelerator.prepare(model, dataloader) + + for data_id, data in enumerate(tqdm(dataloader)): + with accelerator.accumulate(model): + with torch.no_grad(): + folder = os.path.join(model_logger.output_path, str(accelerator.process_index)) + os.makedirs(folder, exist_ok=True) + save_path = os.path.join(model_logger.output_path, str(accelerator.process_index), f"{data_id}.pth") + data = model(data) + torch.save(data, save_path) diff --git a/diffsynth/diffusion/training_module.py b/diffsynth/diffusion/training_module.py new file mode 100644 index 0000000..a7c5996 --- /dev/null +++ b/diffsynth/diffusion/training_module.py @@ -0,0 +1,186 @@ +import torch, json +from ..core import ModelConfig, load_state_dict +from ..utils.controlnet import ControlNetInput +from peft import LoraConfig, inject_adapter_in_model + + +class DiffusionTrainingModule(torch.nn.Module): + def __init__(self): + super().__init__() + + + def to(self, *args, **kwargs): + for name, model in self.named_children(): + model.to(*args, **kwargs) + return self + + + def trainable_modules(self): + trainable_modules = filter(lambda p: p.requires_grad, self.parameters()) + return trainable_modules + + + def trainable_param_names(self): + trainable_param_names = list(filter(lambda named_param: named_param[1].requires_grad, self.named_parameters())) + trainable_param_names = set([named_param[0] for named_param in trainable_param_names]) + return trainable_param_names + + + def add_lora_to_model(self, model, target_modules, lora_rank, lora_alpha=None, upcast_dtype=None): + if lora_alpha is None: + lora_alpha = lora_rank + lora_config = LoraConfig(r=lora_rank, lora_alpha=lora_alpha, target_modules=target_modules) + model = inject_adapter_in_model(lora_config, model) + if upcast_dtype is not None: + for param in model.parameters(): + if param.requires_grad: + param.data = param.to(upcast_dtype) + return model + + + def mapping_lora_state_dict(self, state_dict): + new_state_dict = {} + for key, value in state_dict.items(): + if "lora_A.weight" in key or "lora_B.weight" in key: + new_key = key.replace("lora_A.weight", "lora_A.default.weight").replace("lora_B.weight", "lora_B.default.weight") + new_state_dict[new_key] = value + elif "lora_A.default.weight" in key or "lora_B.default.weight" in key: + new_state_dict[key] = value + return new_state_dict + + + def export_trainable_state_dict(self, state_dict, remove_prefix=None): + trainable_param_names = self.trainable_param_names() + state_dict = {name: param for name, param in state_dict.items() if name in trainable_param_names} + if remove_prefix is not None: + state_dict_ = {} + for name, param in state_dict.items(): + if name.startswith(remove_prefix): + name = name[len(remove_prefix):] + state_dict_[name] = param + state_dict = state_dict_ + return state_dict + + + def transfer_data_to_device(self, data, device, torch_float_dtype=None): + if data is None: + return data + elif isinstance(data, torch.Tensor): + data = data.to(device) + if torch_float_dtype is not None and data.dtype in [torch.float, torch.float16, torch.bfloat16]: + data = data.to(torch_float_dtype) + return data + elif isinstance(data, tuple): + data = tuple(self.transfer_data_to_device(x, device, torch_float_dtype) for x in data) + return data + elif isinstance(data, list): + data = list(self.transfer_data_to_device(x, device, torch_float_dtype) for x in data) + return data + elif isinstance(data, dict): + data = {i: self.transfer_data_to_device(data[i], device, torch_float_dtype) for i in data} + return data + else: + return data + + + def parse_model_configs(self, model_paths, model_id_with_origin_paths, fp8_models=None, device="cpu"): + fp8_models = [] if fp8_models is None else fp8_models.split(",") + fp8_config = { + # To accommodate multi-GPU training, + # the model will be temporarily stored in CPU memory. + "offload_dtype": torch.float8_e4m3fn, + "offload_device": device, + "onload_dtype": torch.float8_e4m3fn, + "onload_device": device, + "preparing_dtype": torch.float8_e4m3fn, + "preparing_device": device, + "computation_dtype": torch.bfloat16, + "computation_device": device, + } + model_configs = [] + if model_paths is not None: + model_paths = json.loads(model_paths) + for path in model_paths: + vram_config = fp8_config if path in fp8_models else {} + model_configs.append(ModelConfig(path=path, **vram_config)) + if model_id_with_origin_paths is not None: + model_id_with_origin_paths = model_id_with_origin_paths.split(",") + for model_id_with_origin_path in model_id_with_origin_paths: + model_id, origin_file_pattern = model_id_with_origin_path.split(":") + vram_config = fp8_config if model_id_with_origin_path in fp8_models else {} + model_configs.append(ModelConfig(model_id=model_id, origin_file_pattern=origin_file_pattern, **vram_config)) + return model_configs + + + def switch_pipe_to_training_mode( + self, + pipe, + trainable_models, + lora_base_model, lora_target_modules, lora_rank, lora_checkpoint=None, + preset_lora_path=None, preset_lora_model=None, + ): + # Scheduler + pipe.scheduler.set_timesteps(1000, training=True) + + # Freeze untrainable models + pipe.freeze_except([] if trainable_models is None else trainable_models.split(",")) + + # Preset LoRA + if preset_lora_path is not None: + pipe.load_lora(getattr(pipe, preset_lora_model), preset_lora_path) + + # FP8 + # FP8 relies on a model-specific memory management scheme. + # It is delegated to the subclass. + + # Add LoRA to the base models + if lora_base_model is not None: + if (not hasattr(pipe, lora_base_model)) or getattr(pipe, lora_base_model) is None: + print(f"No {lora_base_model} models in the pipeline. We cannot patch LoRA on the model. If this occurs during the data processing stage, it is normal.") + return + model = self.add_lora_to_model( + getattr(pipe, lora_base_model), + target_modules=lora_target_modules.split(","), + lora_rank=lora_rank, + upcast_dtype=pipe.torch_dtype, + ) + if lora_checkpoint is not None: + state_dict = load_state_dict(lora_checkpoint) + state_dict = self.mapping_lora_state_dict(state_dict) + load_result = model.load_state_dict(state_dict, strict=False) + print(f"LoRA checkpoint loaded: {lora_checkpoint}, total {len(state_dict)} keys") + if len(load_result[1]) > 0: + print(f"Warning, LoRA key mismatch! Unexpected keys in LoRA checkpoint: {load_result[1]}") + setattr(pipe, lora_base_model, model) + + + def split_pipeline_units(self, task, pipe, trainable_models=None, lora_base_model=None): + models_require_backward = [] + if trainable_models is not None: + models_require_backward += trainable_models.split(",") + if lora_base_model is not None: + models_require_backward += [lora_base_model] + if task.endswith(":data_process"): + _, pipe.units = pipe.split_pipeline_units(models_require_backward) + elif task.endswith(":train"): + pipe.units, _ = pipe.split_pipeline_units(models_require_backward) + return pipe + + def parse_extra_inputs(self, data, extra_inputs, inputs_shared): + controlnet_keys_map = ( + ("blockwise_controlnet_", "blockwise_controlnet_inputs",), + ("controlnet_", "controlnet_inputs"), + ) + controlnet_inputs = {} + for extra_input in extra_inputs: + for prefix, name in controlnet_keys_map: + if extra_input.startswith(prefix): + if name not in controlnet_inputs: + controlnet_inputs[name] = {} + controlnet_inputs[name][extra_input.replace(prefix, "")] = data[extra_input] + break + else: + inputs_shared[extra_input] = data[extra_input] + for name, params in controlnet_inputs.items(): + inputs_shared[name] = [ControlNetInput(**params)] + return inputs_shared diff --git a/diffsynth/distributed/xdit_context_parallel.py b/diffsynth/distributed/xdit_context_parallel.py deleted file mode 100644 index 4887e2f..0000000 --- a/diffsynth/distributed/xdit_context_parallel.py +++ /dev/null @@ -1,131 +0,0 @@ -import torch -from typing import Optional -from einops import rearrange -from xfuser.core.distributed import (get_sequence_parallel_rank, - get_sequence_parallel_world_size, - get_sp_group) -from xfuser.core.long_ctx_attention import xFuserLongContextAttention - -def sinusoidal_embedding_1d(dim, position): - sinusoid = torch.outer(position.type(torch.float64), torch.pow( - 10000, -torch.arange(dim//2, dtype=torch.float64, device=position.device).div(dim//2))) - x = torch.cat([torch.cos(sinusoid), torch.sin(sinusoid)], dim=1) - return x.to(position.dtype) - -def pad_freqs(original_tensor, target_len): - seq_len, s1, s2 = original_tensor.shape - pad_size = target_len - seq_len - padding_tensor = torch.ones( - pad_size, - s1, - s2, - dtype=original_tensor.dtype, - device=original_tensor.device) - padded_tensor = torch.cat([original_tensor, padding_tensor], dim=0) - return padded_tensor - -def rope_apply(x, freqs, num_heads): - x = rearrange(x, "b s (n d) -> b s n d", n=num_heads) - s_per_rank = x.shape[1] - - x_out = torch.view_as_complex(x.to(torch.float64).reshape( - x.shape[0], x.shape[1], x.shape[2], -1, 2)) - - sp_size = get_sequence_parallel_world_size() - sp_rank = get_sequence_parallel_rank() - freqs = pad_freqs(freqs, s_per_rank * sp_size) - freqs_rank = freqs[(sp_rank * s_per_rank):((sp_rank + 1) * s_per_rank), :, :] - - x_out = torch.view_as_real(x_out * freqs_rank).flatten(2) - return x_out.to(x.dtype) - -def usp_dit_forward(self, - x: torch.Tensor, - timestep: torch.Tensor, - context: torch.Tensor, - clip_feature: Optional[torch.Tensor] = None, - y: Optional[torch.Tensor] = None, - use_gradient_checkpointing: bool = False, - use_gradient_checkpointing_offload: bool = False, - **kwargs, - ): - t = self.time_embedding( - sinusoidal_embedding_1d(self.freq_dim, timestep)) - t_mod = self.time_projection(t).unflatten(1, (6, self.dim)) - context = self.text_embedding(context) - - if self.has_image_input: - x = torch.cat([x, y], dim=1) # (b, c_x + c_y, f, h, w) - clip_embdding = self.img_emb(clip_feature) - context = torch.cat([clip_embdding, context], dim=1) - - x, (f, h, w) = self.patchify(x) - - freqs = torch.cat([ - self.freqs[0][:f].view(f, 1, 1, -1).expand(f, h, w, -1), - self.freqs[1][:h].view(1, h, 1, -1).expand(f, h, w, -1), - self.freqs[2][:w].view(1, 1, w, -1).expand(f, h, w, -1) - ], dim=-1).reshape(f * h * w, 1, -1).to(x.device) - - def create_custom_forward(module): - def custom_forward(*inputs): - return module(*inputs) - return custom_forward - - # Context Parallel - chunks = torch.chunk(x, get_sequence_parallel_world_size(), dim=1) - pad_shape = chunks[0].shape[1] - chunks[-1].shape[1] - chunks = [torch.nn.functional.pad(chunk, (0, 0, 0, chunks[0].shape[1]-chunk.shape[1]), value=0) for chunk in chunks] - x = chunks[get_sequence_parallel_rank()] - - for block in self.blocks: - if self.training and use_gradient_checkpointing: - if use_gradient_checkpointing_offload: - with torch.autograd.graph.save_on_cpu(): - x = torch.utils.checkpoint.checkpoint( - create_custom_forward(block), - x, context, t_mod, freqs, - use_reentrant=False, - ) - else: - x = torch.utils.checkpoint.checkpoint( - create_custom_forward(block), - x, context, t_mod, freqs, - use_reentrant=False, - ) - else: - x = block(x, context, t_mod, freqs) - - x = self.head(x, t) - - # Context Parallel - x = get_sp_group().all_gather(x, dim=1) - x = x[:, :-pad_shape] if pad_shape > 0 else x - - # unpatchify - x = self.unpatchify(x, (f, h, w)) - return x - - -def usp_attn_forward(self, x, freqs): - q = self.norm_q(self.q(x)) - k = self.norm_k(self.k(x)) - v = self.v(x) - - q = rope_apply(q, freqs, self.num_heads) - k = rope_apply(k, freqs, self.num_heads) - q = rearrange(q, "b s (n d) -> b s n d", n=self.num_heads) - k = rearrange(k, "b s (n d) -> b s n d", n=self.num_heads) - v = rearrange(v, "b s (n d) -> b s n d", n=self.num_heads) - - x = xFuserLongContextAttention()( - None, - query=q, - key=k, - value=v, - ) - x = x.flatten(2) - - del q, k, v - torch.cuda.empty_cache() - return self.o(x) \ No newline at end of file diff --git a/diffsynth/extensions/ESRGAN/__init__.py b/diffsynth/extensions/ESRGAN/__init__.py deleted file mode 100644 index 94aff4c..0000000 --- a/diffsynth/extensions/ESRGAN/__init__.py +++ /dev/null @@ -1,137 +0,0 @@ -import torch -from einops import repeat -from PIL import Image -import numpy as np - - -class ResidualDenseBlock(torch.nn.Module): - - def __init__(self, num_feat=64, num_grow_ch=32): - super(ResidualDenseBlock, self).__init__() - self.conv1 = torch.nn.Conv2d(num_feat, num_grow_ch, 3, 1, 1) - self.conv2 = torch.nn.Conv2d(num_feat + num_grow_ch, num_grow_ch, 3, 1, 1) - self.conv3 = torch.nn.Conv2d(num_feat + 2 * num_grow_ch, num_grow_ch, 3, 1, 1) - self.conv4 = torch.nn.Conv2d(num_feat + 3 * num_grow_ch, num_grow_ch, 3, 1, 1) - self.conv5 = torch.nn.Conv2d(num_feat + 4 * num_grow_ch, num_feat, 3, 1, 1) - self.lrelu = torch.nn.LeakyReLU(negative_slope=0.2, inplace=True) - - def forward(self, x): - x1 = self.lrelu(self.conv1(x)) - x2 = self.lrelu(self.conv2(torch.cat((x, x1), 1))) - x3 = self.lrelu(self.conv3(torch.cat((x, x1, x2), 1))) - x4 = self.lrelu(self.conv4(torch.cat((x, x1, x2, x3), 1))) - x5 = self.conv5(torch.cat((x, x1, x2, x3, x4), 1)) - return x5 * 0.2 + x - - -class RRDB(torch.nn.Module): - - def __init__(self, num_feat, num_grow_ch=32): - super(RRDB, self).__init__() - self.rdb1 = ResidualDenseBlock(num_feat, num_grow_ch) - self.rdb2 = ResidualDenseBlock(num_feat, num_grow_ch) - self.rdb3 = ResidualDenseBlock(num_feat, num_grow_ch) - - def forward(self, x): - out = self.rdb1(x) - out = self.rdb2(out) - out = self.rdb3(out) - return out * 0.2 + x - - -class RRDBNet(torch.nn.Module): - - def __init__(self, num_in_ch=3, num_out_ch=3, num_feat=64, num_block=23, num_grow_ch=32, **kwargs): - super(RRDBNet, self).__init__() - self.conv_first = torch.nn.Conv2d(num_in_ch, num_feat, 3, 1, 1) - self.body = torch.torch.nn.Sequential(*[RRDB(num_feat=num_feat, num_grow_ch=num_grow_ch) for _ in range(num_block)]) - self.conv_body = torch.nn.Conv2d(num_feat, num_feat, 3, 1, 1) - # upsample - self.conv_up1 = torch.nn.Conv2d(num_feat, num_feat, 3, 1, 1) - self.conv_up2 = torch.nn.Conv2d(num_feat, num_feat, 3, 1, 1) - self.conv_hr = torch.nn.Conv2d(num_feat, num_feat, 3, 1, 1) - self.conv_last = torch.nn.Conv2d(num_feat, num_out_ch, 3, 1, 1) - self.lrelu = torch.nn.LeakyReLU(negative_slope=0.2, inplace=True) - - def forward(self, x): - feat = x - feat = self.conv_first(feat) - body_feat = self.conv_body(self.body(feat)) - feat = feat + body_feat - # upsample - feat = repeat(feat, "B C H W -> B C (H 2) (W 2)") - feat = self.lrelu(self.conv_up1(feat)) - feat = repeat(feat, "B C H W -> B C (H 2) (W 2)") - feat = self.lrelu(self.conv_up2(feat)) - out = self.conv_last(self.lrelu(self.conv_hr(feat))) - return out - - @staticmethod - def state_dict_converter(): - return RRDBNetStateDictConverter() - - -class RRDBNetStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - return state_dict, {"upcast_to_float32": True} - - def from_civitai(self, state_dict): - return state_dict, {"upcast_to_float32": True} - - -class ESRGAN(torch.nn.Module): - def __init__(self, model): - super().__init__() - self.model = model - - @staticmethod - def from_model_manager(model_manager): - return ESRGAN(model_manager.fetch_model("esrgan")) - - def process_image(self, image): - image = torch.Tensor(np.array(image, dtype=np.float32) / 255).permute(2, 0, 1) - return image - - def process_images(self, images): - images = [self.process_image(image) for image in images] - images = torch.stack(images) - return images - - def decode_images(self, images): - images = (images.permute(0, 2, 3, 1) * 255).clip(0, 255).numpy().astype(np.uint8) - images = [Image.fromarray(image) for image in images] - return images - - @torch.no_grad() - def upscale(self, images, batch_size=4, progress_bar=lambda x:x): - if not isinstance(images, list): - images = [images] - is_single_image = True - else: - is_single_image = False - - # Preprocess - input_tensor = self.process_images(images) - - # Interpolate - output_tensor = [] - for batch_id in progress_bar(range(0, input_tensor.shape[0], batch_size)): - batch_id_ = min(batch_id + batch_size, input_tensor.shape[0]) - batch_input_tensor = input_tensor[batch_id: batch_id_] - batch_input_tensor = batch_input_tensor.to( - device=self.model.conv_first.weight.device, - dtype=self.model.conv_first.weight.dtype) - batch_output_tensor = self.model(batch_input_tensor) - output_tensor.append(batch_output_tensor.cpu()) - - # Output - output_tensor = torch.concat(output_tensor, dim=0) - - # To images - output_images = self.decode_images(output_tensor) - if is_single_image: - output_images = output_images[0] - return output_images diff --git a/diffsynth/extensions/FastBlend/__init__.py b/diffsynth/extensions/FastBlend/__init__.py deleted file mode 100644 index 2bf812c..0000000 --- a/diffsynth/extensions/FastBlend/__init__.py +++ /dev/null @@ -1,63 +0,0 @@ -from .runners.fast import TableManager, PyramidPatchMatcher -from PIL import Image -import numpy as np -import cupy as cp - - -class FastBlendSmoother: - def __init__(self): - self.batch_size = 8 - self.window_size = 64 - self.ebsynth_config = { - "minimum_patch_size": 5, - "threads_per_block": 8, - "num_iter": 5, - "gpu_id": 0, - "guide_weight": 10.0, - "initialize": "identity", - "tracking_window_size": 0, - } - - @staticmethod - def from_model_manager(model_manager): - # TODO: fetch GPU ID from model_manager - return FastBlendSmoother() - - def run(self, frames_guide, frames_style, batch_size, window_size, ebsynth_config): - frames_guide = [np.array(frame) for frame in frames_guide] - frames_style = [np.array(frame) for frame in frames_style] - table_manager = TableManager() - patch_match_engine = PyramidPatchMatcher( - image_height=frames_style[0].shape[0], - image_width=frames_style[0].shape[1], - channel=3, - **ebsynth_config - ) - # left part - table_l = table_manager.build_remapping_table(frames_guide, frames_style, patch_match_engine, batch_size, desc="FastBlend Step 1/4") - table_l = table_manager.remapping_table_to_blending_table(table_l) - table_l = table_manager.process_window_sum(frames_guide, table_l, patch_match_engine, window_size, batch_size, desc="FastBlend Step 2/4") - # right part - table_r = table_manager.build_remapping_table(frames_guide[::-1], frames_style[::-1], patch_match_engine, batch_size, desc="FastBlend Step 3/4") - table_r = table_manager.remapping_table_to_blending_table(table_r) - table_r = table_manager.process_window_sum(frames_guide[::-1], table_r, patch_match_engine, window_size, batch_size, desc="FastBlend Step 4/4")[::-1] - # merge - frames = [] - for (frame_l, weight_l), frame_m, (frame_r, weight_r) in zip(table_l, frames_style, table_r): - weight_m = -1 - weight = weight_l + weight_m + weight_r - frame = frame_l * (weight_l / weight) + frame_m * (weight_m / weight) + frame_r * (weight_r / weight) - frames.append(frame) - frames = [Image.fromarray(frame.clip(0, 255).astype("uint8")) for frame in frames] - return frames - - def __call__(self, rendered_frames, original_frames=None, **kwargs): - frames = self.run( - original_frames, rendered_frames, - self.batch_size, self.window_size, self.ebsynth_config - ) - mempool = cp.get_default_memory_pool() - pinned_mempool = cp.get_default_pinned_memory_pool() - mempool.free_all_blocks() - pinned_mempool.free_all_blocks() - return frames \ No newline at end of file diff --git a/diffsynth/extensions/FastBlend/api.py b/diffsynth/extensions/FastBlend/api.py deleted file mode 100644 index 2db2433..0000000 --- a/diffsynth/extensions/FastBlend/api.py +++ /dev/null @@ -1,397 +0,0 @@ -from .runners import AccurateModeRunner, FastModeRunner, BalancedModeRunner, InterpolationModeRunner, InterpolationModeSingleFrameRunner -from .data import VideoData, get_video_fps, save_video, search_for_images -import os -import gradio as gr - - -def check_input_for_blending(video_guide, video_guide_folder, video_style, video_style_folder): - frames_guide = VideoData(video_guide, video_guide_folder) - frames_style = VideoData(video_style, video_style_folder) - message = "" - if len(frames_guide) < len(frames_style): - message += f"The number of frames mismatches. Only the first {len(frames_guide)} frames of style video will be used.\n" - frames_style.set_length(len(frames_guide)) - elif len(frames_guide) > len(frames_style): - message += f"The number of frames mismatches. Only the first {len(frames_style)} frames of guide video will be used.\n" - frames_guide.set_length(len(frames_style)) - height_guide, width_guide = frames_guide.shape() - height_style, width_style = frames_style.shape() - if height_guide != height_style or width_guide != width_style: - message += f"The shape of frames mismatches. The frames in style video will be resized to (height: {height_guide}, width: {width_guide})\n" - frames_style.set_shape(height_guide, width_guide) - return frames_guide, frames_style, message - - -def smooth_video( - video_guide, - video_guide_folder, - video_style, - video_style_folder, - mode, - window_size, - batch_size, - tracking_window_size, - output_path, - fps, - minimum_patch_size, - num_iter, - guide_weight, - initialize, - progress = None, -): - # input - frames_guide, frames_style, message = check_input_for_blending(video_guide, video_guide_folder, video_style, video_style_folder) - if len(message) > 0: - print(message) - # output - if output_path == "": - if video_style is None: - output_path = os.path.join(video_style_folder, "output") - else: - output_path = os.path.join(os.path.split(video_style)[0], "output") - os.makedirs(output_path, exist_ok=True) - print("No valid output_path. Your video will be saved here:", output_path) - elif not os.path.exists(output_path): - os.makedirs(output_path, exist_ok=True) - print("Your video will be saved here:", output_path) - frames_path = os.path.join(output_path, "frames") - video_path = os.path.join(output_path, "video.mp4") - os.makedirs(frames_path, exist_ok=True) - # process - if mode == "Fast" or mode == "Balanced": - tracking_window_size = 0 - ebsynth_config = { - "minimum_patch_size": minimum_patch_size, - "threads_per_block": 8, - "num_iter": num_iter, - "gpu_id": 0, - "guide_weight": guide_weight, - "initialize": initialize, - "tracking_window_size": tracking_window_size, - } - if mode == "Fast": - FastModeRunner().run(frames_guide, frames_style, batch_size=batch_size, window_size=window_size, ebsynth_config=ebsynth_config, save_path=frames_path) - elif mode == "Balanced": - BalancedModeRunner().run(frames_guide, frames_style, batch_size=batch_size, window_size=window_size, ebsynth_config=ebsynth_config, save_path=frames_path) - elif mode == "Accurate": - AccurateModeRunner().run(frames_guide, frames_style, batch_size=batch_size, window_size=window_size, ebsynth_config=ebsynth_config, save_path=frames_path) - # output - try: - fps = int(fps) - except: - fps = get_video_fps(video_style) if video_style is not None else 30 - print("Fps:", fps) - print("Saving video...") - video_path = save_video(frames_path, video_path, num_frames=len(frames_style), fps=fps) - print("Success!") - print("Your frames are here:", frames_path) - print("Your video is here:", video_path) - return output_path, fps, video_path - - -class KeyFrameMatcher: - def __init__(self): - pass - - def extract_number_from_filename(self, file_name): - result = [] - number = -1 - for i in file_name: - if ord(i)>=ord("0") and ord(i)<=ord("9"): - if number == -1: - number = 0 - number = number*10 + ord(i) - ord("0") - else: - if number != -1: - result.append(number) - number = -1 - if number != -1: - result.append(number) - result = tuple(result) - return result - - def extract_number_from_filenames(self, file_names): - numbers = [self.extract_number_from_filename(file_name) for file_name in file_names] - min_length = min(len(i) for i in numbers) - for i in range(min_length-1, -1, -1): - if len(set(number[i] for number in numbers))==len(file_names): - return [number[i] for number in numbers] - return list(range(len(file_names))) - - def match_using_filename(self, file_names_a, file_names_b): - file_names_b_set = set(file_names_b) - matched_file_name = [] - for file_name in file_names_a: - if file_name not in file_names_b_set: - matched_file_name.append(None) - else: - matched_file_name.append(file_name) - return matched_file_name - - def match_using_numbers(self, file_names_a, file_names_b): - numbers_a = self.extract_number_from_filenames(file_names_a) - numbers_b = self.extract_number_from_filenames(file_names_b) - numbers_b_dict = {number: file_name for number, file_name in zip(numbers_b, file_names_b)} - matched_file_name = [] - for number in numbers_a: - if number in numbers_b_dict: - matched_file_name.append(numbers_b_dict[number]) - else: - matched_file_name.append(None) - return matched_file_name - - def match_filenames(self, file_names_a, file_names_b): - matched_file_name = self.match_using_filename(file_names_a, file_names_b) - if sum([i is not None for i in matched_file_name]) > 0: - return matched_file_name - matched_file_name = self.match_using_numbers(file_names_a, file_names_b) - return matched_file_name - - -def detect_frames(frames_path, keyframes_path): - if not os.path.exists(frames_path) and not os.path.exists(keyframes_path): - return "Please input the directory of guide video and rendered frames" - elif not os.path.exists(frames_path): - return "Please input the directory of guide video" - elif not os.path.exists(keyframes_path): - return "Please input the directory of rendered frames" - frames = [os.path.split(i)[-1] for i in search_for_images(frames_path)] - keyframes = [os.path.split(i)[-1] for i in search_for_images(keyframes_path)] - if len(frames)==0: - return f"No images detected in {frames_path}" - if len(keyframes)==0: - return f"No images detected in {keyframes_path}" - matched_keyframes = KeyFrameMatcher().match_filenames(frames, keyframes) - max_filename_length = max([len(i) for i in frames]) - if sum([i is not None for i in matched_keyframes])==0: - message = "" - for frame, matched_keyframe in zip(frames, matched_keyframes): - message += frame + " " * (max_filename_length - len(frame) + 1) - message += "--> No matched keyframes\n" - else: - message = "" - for frame, matched_keyframe in zip(frames, matched_keyframes): - message += frame + " " * (max_filename_length - len(frame) + 1) - if matched_keyframe is None: - message += "--> [to be rendered]\n" - else: - message += f"--> {matched_keyframe}\n" - return message - - -def check_input_for_interpolating(frames_path, keyframes_path): - # search for images - frames = [os.path.split(i)[-1] for i in search_for_images(frames_path)] - keyframes = [os.path.split(i)[-1] for i in search_for_images(keyframes_path)] - # match frames - matched_keyframes = KeyFrameMatcher().match_filenames(frames, keyframes) - file_list = [file_name for file_name in matched_keyframes if file_name is not None] - index_style = [i for i, file_name in enumerate(matched_keyframes) if file_name is not None] - frames_guide = VideoData(None, frames_path) - frames_style = VideoData(None, keyframes_path, file_list=file_list) - # match shape - message = "" - height_guide, width_guide = frames_guide.shape() - height_style, width_style = frames_style.shape() - if height_guide != height_style or width_guide != width_style: - message += f"The shape of frames mismatches. The rendered keyframes will be resized to (height: {height_guide}, width: {width_guide})\n" - frames_style.set_shape(height_guide, width_guide) - return frames_guide, frames_style, index_style, message - - -def interpolate_video( - frames_path, - keyframes_path, - output_path, - fps, - batch_size, - tracking_window_size, - minimum_patch_size, - num_iter, - guide_weight, - initialize, - progress = None, -): - # input - frames_guide, frames_style, index_style, message = check_input_for_interpolating(frames_path, keyframes_path) - if len(message) > 0: - print(message) - # output - if output_path == "": - output_path = os.path.join(keyframes_path, "output") - os.makedirs(output_path, exist_ok=True) - print("No valid output_path. Your video will be saved here:", output_path) - elif not os.path.exists(output_path): - os.makedirs(output_path, exist_ok=True) - print("Your video will be saved here:", output_path) - output_frames_path = os.path.join(output_path, "frames") - output_video_path = os.path.join(output_path, "video.mp4") - os.makedirs(output_frames_path, exist_ok=True) - # process - ebsynth_config = { - "minimum_patch_size": minimum_patch_size, - "threads_per_block": 8, - "num_iter": num_iter, - "gpu_id": 0, - "guide_weight": guide_weight, - "initialize": initialize, - "tracking_window_size": tracking_window_size - } - if len(index_style)==1: - InterpolationModeSingleFrameRunner().run(frames_guide, frames_style, index_style, batch_size=batch_size, ebsynth_config=ebsynth_config, save_path=output_frames_path) - else: - InterpolationModeRunner().run(frames_guide, frames_style, index_style, batch_size=batch_size, ebsynth_config=ebsynth_config, save_path=output_frames_path) - try: - fps = int(fps) - except: - fps = 30 - print("Fps:", fps) - print("Saving video...") - video_path = save_video(output_frames_path, output_video_path, num_frames=len(frames_guide), fps=fps) - print("Success!") - print("Your frames are here:", output_frames_path) - print("Your video is here:", video_path) - return output_path, fps, video_path - - -def on_ui_tabs(): - with gr.Blocks(analytics_enabled=False) as ui_component: - with gr.Tab("Blend"): - gr.Markdown(""" -# Blend - -Given a guide video and a style video, this algorithm will make the style video fluent according to the motion features of the guide video. Click [here](https://github.com/Artiprocher/sd-webui-fastblend/assets/35051019/208d902d-6aba-48d7-b7d5-cd120ebd306d) to see the example. Note that this extension doesn't support long videos. Please use short videos (e.g., several seconds). The algorithm is mainly designed for 512*512 resolution. Please use a larger `Minimum patch size` for higher resolution. - """) - with gr.Row(): - with gr.Column(): - with gr.Tab("Guide video"): - video_guide = gr.Video(label="Guide video") - with gr.Tab("Guide video (images format)"): - video_guide_folder = gr.Textbox(label="Guide video (images format)", value="") - with gr.Column(): - with gr.Tab("Style video"): - video_style = gr.Video(label="Style video") - with gr.Tab("Style video (images format)"): - video_style_folder = gr.Textbox(label="Style video (images format)", value="") - with gr.Column(): - output_path = gr.Textbox(label="Output directory", value="", placeholder="Leave empty to use the directory of style video") - fps = gr.Textbox(label="Fps", value="", placeholder="Leave empty to use the default fps") - video_output = gr.Video(label="Output video", interactive=False, show_share_button=True) - btn = gr.Button(value="Blend") - with gr.Row(): - with gr.Column(): - gr.Markdown("# Settings") - mode = gr.Radio(["Fast", "Balanced", "Accurate"], label="Inference mode", value="Fast", interactive=True) - window_size = gr.Slider(label="Sliding window size", value=15, minimum=1, maximum=1000, step=1, interactive=True) - batch_size = gr.Slider(label="Batch size", value=8, minimum=1, maximum=128, step=1, interactive=True) - tracking_window_size = gr.Slider(label="Tracking window size (only for accurate mode)", value=0, minimum=0, maximum=10, step=1, interactive=True) - gr.Markdown("## Advanced Settings") - minimum_patch_size = gr.Slider(label="Minimum patch size (odd number)", value=5, minimum=5, maximum=99, step=2, interactive=True) - num_iter = gr.Slider(label="Number of iterations", value=5, minimum=1, maximum=10, step=1, interactive=True) - guide_weight = gr.Slider(label="Guide weight", value=10.0, minimum=0.0, maximum=100.0, step=0.1, interactive=True) - initialize = gr.Radio(["identity", "random"], label="NNF initialization", value="identity", interactive=True) - with gr.Column(): - gr.Markdown(""" -# Reference - -* Output directory: the directory to save the video. -* Inference mode - -|Mode|Time|Memory|Quality|Frame by frame output|Description| -|-|-|-|-|-|-| -|Fast|■|■■■|■■|No|Blend the frames using a tree-like data structure, which requires much RAM but is fast.| -|Balanced|■■|■|■■|Yes|Blend the frames naively.| -|Accurate|■■■|■|■■■|Yes|Blend the frames and align them together for higher video quality. When [batch size] >= [sliding window size] * 2 + 1, the performance is the best.| - -* Sliding window size: our algorithm will blend the frames in a sliding windows. If the size is n, each frame will be blended with the last n frames and the next n frames. A large sliding window can make the video fluent but sometimes smoggy. -* Batch size: a larger batch size makes the program faster but requires more VRAM. -* Tracking window size (only for accurate mode): The size of window in which our algorithm tracks moving objects. Empirically, 1 is enough. -* Advanced settings - * Minimum patch size (odd number): the minimum patch size used for patch matching. (Default: 5) - * Number of iterations: the number of iterations of patch matching. (Default: 5) - * Guide weight: a parameter that determines how much motion feature applied to the style video. (Default: 10) - * NNF initialization: how to initialize the NNF (Nearest Neighbor Field). (Default: identity) - """) - btn.click( - smooth_video, - inputs=[ - video_guide, - video_guide_folder, - video_style, - video_style_folder, - mode, - window_size, - batch_size, - tracking_window_size, - output_path, - fps, - minimum_patch_size, - num_iter, - guide_weight, - initialize - ], - outputs=[output_path, fps, video_output] - ) - with gr.Tab("Interpolate"): - gr.Markdown(""" -# Interpolate - -Given a guide video and some rendered keyframes, this algorithm will render the remaining frames. Click [here](https://github.com/Artiprocher/sd-webui-fastblend/assets/35051019/3490c5b4-8f67-478f-86de-f9adc2ace16a) to see the example. The algorithm is experimental and is only tested for 512*512 resolution. - """) - with gr.Row(): - with gr.Column(): - with gr.Row(): - with gr.Column(): - video_guide_folder_ = gr.Textbox(label="Guide video (images format)", value="") - with gr.Column(): - rendered_keyframes_ = gr.Textbox(label="Rendered keyframes (images format)", value="") - with gr.Row(): - detected_frames = gr.Textbox(label="Detected frames", value="Please input the directory of guide video and rendered frames", lines=9, max_lines=9, interactive=False) - video_guide_folder_.change(detect_frames, inputs=[video_guide_folder_, rendered_keyframes_], outputs=detected_frames) - rendered_keyframes_.change(detect_frames, inputs=[video_guide_folder_, rendered_keyframes_], outputs=detected_frames) - with gr.Column(): - output_path_ = gr.Textbox(label="Output directory", value="", placeholder="Leave empty to use the directory of rendered keyframes") - fps_ = gr.Textbox(label="Fps", value="", placeholder="Leave empty to use the default fps") - video_output_ = gr.Video(label="Output video", interactive=False, show_share_button=True) - btn_ = gr.Button(value="Interpolate") - with gr.Row(): - with gr.Column(): - gr.Markdown("# Settings") - batch_size_ = gr.Slider(label="Batch size", value=8, minimum=1, maximum=128, step=1, interactive=True) - tracking_window_size_ = gr.Slider(label="Tracking window size", value=0, minimum=0, maximum=10, step=1, interactive=True) - gr.Markdown("## Advanced Settings") - minimum_patch_size_ = gr.Slider(label="Minimum patch size (odd number, larger is better)", value=15, minimum=5, maximum=99, step=2, interactive=True) - num_iter_ = gr.Slider(label="Number of iterations", value=5, minimum=1, maximum=10, step=1, interactive=True) - guide_weight_ = gr.Slider(label="Guide weight", value=10.0, minimum=0.0, maximum=100.0, step=0.1, interactive=True) - initialize_ = gr.Radio(["identity", "random"], label="NNF initialization", value="identity", interactive=True) - with gr.Column(): - gr.Markdown(""" -# Reference - -* Output directory: the directory to save the video. -* Batch size: a larger batch size makes the program faster but requires more VRAM. -* Tracking window size (only for accurate mode): The size of window in which our algorithm tracks moving objects. Empirically, 1 is enough. -* Advanced settings - * Minimum patch size (odd number): the minimum patch size used for patch matching. **This parameter should be larger than that in blending. (Default: 15)** - * Number of iterations: the number of iterations of patch matching. (Default: 5) - * Guide weight: a parameter that determines how much motion feature applied to the style video. (Default: 10) - * NNF initialization: how to initialize the NNF (Nearest Neighbor Field). (Default: identity) - """) - btn_.click( - interpolate_video, - inputs=[ - video_guide_folder_, - rendered_keyframes_, - output_path_, - fps_, - batch_size_, - tracking_window_size_, - minimum_patch_size_, - num_iter_, - guide_weight_, - initialize_, - ], - outputs=[output_path_, fps_, video_output_] - ) - - return [(ui_component, "FastBlend", "FastBlend_ui")] diff --git a/diffsynth/extensions/FastBlend/cupy_kernels.py b/diffsynth/extensions/FastBlend/cupy_kernels.py deleted file mode 100644 index 70e2790..0000000 --- a/diffsynth/extensions/FastBlend/cupy_kernels.py +++ /dev/null @@ -1,119 +0,0 @@ -import cupy as cp - -remapping_kernel = cp.RawKernel(r''' -extern "C" __global__ -void remap( - const int height, - const int width, - const int channel, - const int patch_size, - const int pad_size, - const float* source_style, - const int* nnf, - float* target_style -) { - const int r = (patch_size - 1) / 2; - const int x = blockDim.x * blockIdx.x + threadIdx.x; - const int y = blockDim.y * blockIdx.y + threadIdx.y; - if (x >= height or y >= width) return; - const int z = blockIdx.z * (height + pad_size * 2) * (width + pad_size * 2) * channel; - const int pid = (x + pad_size) * (width + pad_size * 2) + (y + pad_size); - const int min_px = x < r ? -x : -r; - const int max_px = x + r > height - 1 ? height - 1 - x : r; - const int min_py = y < r ? -y : -r; - const int max_py = y + r > width - 1 ? width - 1 - y : r; - int num = 0; - for (int px = min_px; px <= max_px; px++){ - for (int py = min_py; py <= max_py; py++){ - const int nid = (x + px) * width + y + py; - const int x_ = nnf[blockIdx.z * height * width * 2 + nid*2 + 0] - px; - const int y_ = nnf[blockIdx.z * height * width * 2 + nid*2 + 1] - py; - if (x_ < 0 or y_ < 0 or x_ >= height or y_ >= width)continue; - const int pid_ = (x_ + pad_size) * (width + pad_size * 2) + (y_ + pad_size); - num++; - for (int c = 0; c < channel; c++){ - target_style[z + pid * channel + c] += source_style[z + pid_ * channel + c]; - } - } - } - for (int c = 0; c < channel; c++){ - target_style[z + pid * channel + c] /= num; - } -} -''', 'remap') - - -patch_error_kernel = cp.RawKernel(r''' -extern "C" __global__ -void patch_error( - const int height, - const int width, - const int channel, - const int patch_size, - const int pad_size, - const float* source, - const int* nnf, - const float* target, - float* error -) { - const int r = (patch_size - 1) / 2; - const int x = blockDim.x * blockIdx.x + threadIdx.x; - const int y = blockDim.y * blockIdx.y + threadIdx.y; - const int z = blockIdx.z * (height + pad_size * 2) * (width + pad_size * 2) * channel; - if (x >= height or y >= width) return; - const int x_ = nnf[blockIdx.z * height * width * 2 + (x * width + y)*2 + 0]; - const int y_ = nnf[blockIdx.z * height * width * 2 + (x * width + y)*2 + 1]; - float e = 0; - for (int px = -r; px <= r; px++){ - for (int py = -r; py <= r; py++){ - const int pid = (x + pad_size + px) * (width + pad_size * 2) + y + pad_size + py; - const int pid_ = (x_ + pad_size + px) * (width + pad_size * 2) + y_ + pad_size + py; - for (int c = 0; c < channel; c++){ - const float diff = target[z + pid * channel + c] - source[z + pid_ * channel + c]; - e += diff * diff; - } - } - } - error[blockIdx.z * height * width + x * width + y] = e; -} -''', 'patch_error') - - -pairwise_patch_error_kernel = cp.RawKernel(r''' -extern "C" __global__ -void pairwise_patch_error( - const int height, - const int width, - const int channel, - const int patch_size, - const int pad_size, - const float* source_a, - const int* nnf_a, - const float* source_b, - const int* nnf_b, - float* error -) { - const int r = (patch_size - 1) / 2; - const int x = blockDim.x * blockIdx.x + threadIdx.x; - const int y = blockDim.y * blockIdx.y + threadIdx.y; - const int z = blockIdx.z * (height + pad_size * 2) * (width + pad_size * 2) * channel; - if (x >= height or y >= width) return; - const int z_nnf = blockIdx.z * height * width * 2 + (x * width + y) * 2; - const int x_a = nnf_a[z_nnf + 0]; - const int y_a = nnf_a[z_nnf + 1]; - const int x_b = nnf_b[z_nnf + 0]; - const int y_b = nnf_b[z_nnf + 1]; - float e = 0; - for (int px = -r; px <= r; px++){ - for (int py = -r; py <= r; py++){ - const int pid_a = (x_a + pad_size + px) * (width + pad_size * 2) + y_a + pad_size + py; - const int pid_b = (x_b + pad_size + px) * (width + pad_size * 2) + y_b + pad_size + py; - for (int c = 0; c < channel; c++){ - const float diff = source_a[z + pid_a * channel + c] - source_b[z + pid_b * channel + c]; - e += diff * diff; - } - } - } - error[blockIdx.z * height * width + x * width + y] = e; -} -''', 'pairwise_patch_error') diff --git a/diffsynth/extensions/FastBlend/data.py b/diffsynth/extensions/FastBlend/data.py deleted file mode 100644 index dcaddd7..0000000 --- a/diffsynth/extensions/FastBlend/data.py +++ /dev/null @@ -1,146 +0,0 @@ -import imageio, os -import numpy as np -from PIL import Image - - -def read_video(file_name): - reader = imageio.get_reader(file_name) - video = [] - for frame in reader: - frame = np.array(frame) - video.append(frame) - reader.close() - return video - - -def get_video_fps(file_name): - reader = imageio.get_reader(file_name) - fps = reader.get_meta_data()["fps"] - reader.close() - return fps - - -def save_video(frames_path, video_path, num_frames, fps): - writer = imageio.get_writer(video_path, fps=fps, quality=9) - for i in range(num_frames): - frame = np.array(Image.open(os.path.join(frames_path, "%05d.png" % i))) - writer.append_data(frame) - writer.close() - return video_path - - -class LowMemoryVideo: - def __init__(self, file_name): - self.reader = imageio.get_reader(file_name) - - def __len__(self): - return self.reader.count_frames() - - def __getitem__(self, item): - return np.array(self.reader.get_data(item)) - - def __del__(self): - self.reader.close() - - -def split_file_name(file_name): - result = [] - number = -1 - for i in file_name: - if ord(i)>=ord("0") and ord(i)<=ord("9"): - if number == -1: - number = 0 - number = number*10 + ord(i) - ord("0") - else: - if number != -1: - result.append(number) - number = -1 - result.append(i) - if number != -1: - result.append(number) - result = tuple(result) - return result - - -def search_for_images(folder): - file_list = [i for i in os.listdir(folder) if i.endswith(".jpg") or i.endswith(".png")] - file_list = [(split_file_name(file_name), file_name) for file_name in file_list] - file_list = [i[1] for i in sorted(file_list)] - file_list = [os.path.join(folder, i) for i in file_list] - return file_list - - -def read_images(folder): - file_list = search_for_images(folder) - frames = [np.array(Image.open(i)) for i in file_list] - return frames - - -class LowMemoryImageFolder: - def __init__(self, folder, file_list=None): - if file_list is None: - self.file_list = search_for_images(folder) - else: - self.file_list = [os.path.join(folder, file_name) for file_name in file_list] - - def __len__(self): - return len(self.file_list) - - def __getitem__(self, item): - return np.array(Image.open(self.file_list[item])) - - def __del__(self): - pass - - -class VideoData: - def __init__(self, video_file, image_folder, **kwargs): - if video_file is not None: - self.data_type = "video" - self.data = LowMemoryVideo(video_file, **kwargs) - elif image_folder is not None: - self.data_type = "images" - self.data = LowMemoryImageFolder(image_folder, **kwargs) - else: - raise ValueError("Cannot open video or image folder") - self.length = None - self.height = None - self.width = None - - def raw_data(self): - frames = [] - for i in range(self.__len__()): - frames.append(self.__getitem__(i)) - return frames - - def set_length(self, length): - self.length = length - - def set_shape(self, height, width): - self.height = height - self.width = width - - def __len__(self): - if self.length is None: - return len(self.data) - else: - return self.length - - def shape(self): - if self.height is not None and self.width is not None: - return self.height, self.width - else: - height, width, _ = self.__getitem__(0).shape - return height, width - - def __getitem__(self, item): - frame = self.data.__getitem__(item) - height, width, _ = frame.shape - if self.height is not None and self.width is not None: - if self.height != height or self.width != width: - frame = Image.fromarray(frame).resize((self.width, self.height)) - frame = np.array(frame) - return frame - - def __del__(self): - pass diff --git a/diffsynth/extensions/FastBlend/patch_match.py b/diffsynth/extensions/FastBlend/patch_match.py deleted file mode 100644 index 8ba6003..0000000 --- a/diffsynth/extensions/FastBlend/patch_match.py +++ /dev/null @@ -1,299 +0,0 @@ -from .cupy_kernels import remapping_kernel, patch_error_kernel, pairwise_patch_error_kernel -import numpy as np -import cupy as cp -import cv2 -import torch -import torch.nn.functional as F - -class PatchMatcher: - def __init__( - self, height, width, channel, minimum_patch_size, - threads_per_block=8, num_iter=5, gpu_id=0, guide_weight=10.0, - random_search_steps=3, random_search_range=4, - use_mean_target_style=False, use_pairwise_patch_error=False, - tracking_window_size=0 - ): - self.height = height - self.width = width - self.channel = channel - self.minimum_patch_size = minimum_patch_size - self.threads_per_block = threads_per_block - self.num_iter = num_iter - self.gpu_id = gpu_id - self.guide_weight = guide_weight - self.random_search_steps = random_search_steps - self.random_search_range = random_search_range - self.use_mean_target_style = use_mean_target_style - self.use_pairwise_patch_error = use_pairwise_patch_error - self.tracking_window_size = tracking_window_size - - self.patch_size_list = [minimum_patch_size + i*2 for i in range(num_iter)][::-1] - self.pad_size = self.patch_size_list[0] // 2 - self.grid = ( - (height + threads_per_block - 1) // threads_per_block, - (width + threads_per_block - 1) // threads_per_block - ) - self.block = (threads_per_block, threads_per_block) - - def pad_image(self, image): - return cp.pad(image, ((0, 0), (self.pad_size, self.pad_size), (self.pad_size, self.pad_size), (0, 0))) - - def unpad_image(self, image): - return image[:, self.pad_size: -self.pad_size, self.pad_size: -self.pad_size, :] - - def apply_nnf_to_image(self, nnf, source): - batch_size = source.shape[0] - target = cp.zeros((batch_size, self.height + self.pad_size * 2, self.width + self.pad_size * 2, self.channel), dtype=cp.float32) - remapping_kernel( - self.grid + (batch_size,), - self.block, - (self.height, self.width, self.channel, self.patch_size, self.pad_size, source, nnf, target) - ) - return target - - def get_patch_error(self, source, nnf, target): - batch_size = source.shape[0] - error = cp.zeros((batch_size, self.height, self.width), dtype=cp.float32) - patch_error_kernel( - self.grid + (batch_size,), - self.block, - (self.height, self.width, self.channel, self.patch_size, self.pad_size, source, nnf, target, error) - ) - return error - - def get_pairwise_patch_error(self, source, nnf): - batch_size = source.shape[0]//2 - error = cp.zeros((batch_size, self.height, self.width), dtype=cp.float32) - source_a, nnf_a = source[0::2].copy(), nnf[0::2].copy() - source_b, nnf_b = source[1::2].copy(), nnf[1::2].copy() - pairwise_patch_error_kernel( - self.grid + (batch_size,), - self.block, - (self.height, self.width, self.channel, self.patch_size, self.pad_size, source_a, nnf_a, source_b, nnf_b, error) - ) - error = error.repeat(2, axis=0) - return error - - def get_error(self, source_guide, target_guide, source_style, target_style, nnf): - error_guide = self.get_patch_error(source_guide, nnf, target_guide) - if self.use_mean_target_style: - target_style = self.apply_nnf_to_image(nnf, source_style) - target_style = target_style.mean(axis=0, keepdims=True) - target_style = target_style.repeat(source_guide.shape[0], axis=0) - if self.use_pairwise_patch_error: - error_style = self.get_pairwise_patch_error(source_style, nnf) - else: - error_style = self.get_patch_error(source_style, nnf, target_style) - error = error_guide * self.guide_weight + error_style - return error - - def clamp_bound(self, nnf): - nnf[:,:,:,0] = cp.clip(nnf[:,:,:,0], 0, self.height-1) - nnf[:,:,:,1] = cp.clip(nnf[:,:,:,1], 0, self.width-1) - return nnf - - def random_step(self, nnf, r): - batch_size = nnf.shape[0] - step = cp.random.randint(-r, r+1, size=(batch_size, self.height, self.width, 2), dtype=cp.int32) - upd_nnf = self.clamp_bound(nnf + step) - return upd_nnf - - def neighboor_step(self, nnf, d): - if d==0: - upd_nnf = cp.concatenate([nnf[:, :1, :], nnf[:, :-1, :]], axis=1) - upd_nnf[:, :, :, 0] += 1 - elif d==1: - upd_nnf = cp.concatenate([nnf[:, :, :1], nnf[:, :, :-1]], axis=2) - upd_nnf[:, :, :, 1] += 1 - elif d==2: - upd_nnf = cp.concatenate([nnf[:, 1:, :], nnf[:, -1:, :]], axis=1) - upd_nnf[:, :, :, 0] -= 1 - elif d==3: - upd_nnf = cp.concatenate([nnf[:, :, 1:], nnf[:, :, -1:]], axis=2) - upd_nnf[:, :, :, 1] -= 1 - upd_nnf = self.clamp_bound(upd_nnf) - return upd_nnf - - def shift_nnf(self, nnf, d): - if d>0: - d = min(nnf.shape[0], d) - upd_nnf = cp.concatenate([nnf[d:]] + [nnf[-1:]] * d, axis=0) - else: - d = max(-nnf.shape[0], d) - upd_nnf = cp.concatenate([nnf[:1]] * (-d) + [nnf[:d]], axis=0) - return upd_nnf - - def track_step(self, nnf, d): - if self.use_pairwise_patch_error: - upd_nnf = cp.zeros_like(nnf) - upd_nnf[0::2] = self.shift_nnf(nnf[0::2], d) - upd_nnf[1::2] = self.shift_nnf(nnf[1::2], d) - else: - upd_nnf = self.shift_nnf(nnf, d) - return upd_nnf - - def C(self, n, m): - # not used - c = 1 - for i in range(1, n+1): - c *= i - for i in range(1, m+1): - c //= i - for i in range(1, n-m+1): - c //= i - return c - - def bezier_step(self, nnf, r): - # not used - n = r * 2 - 1 - upd_nnf = cp.zeros(shape=nnf.shape, dtype=cp.float32) - for i, d in enumerate(list(range(-r, 0)) + list(range(1, r+1))): - if d>0: - ctl_nnf = cp.concatenate([nnf[d:]] + [nnf[-1:]] * d, axis=0) - elif d<0: - ctl_nnf = cp.concatenate([nnf[:1]] * (-d) + [nnf[:d]], axis=0) - upd_nnf += ctl_nnf * (self.C(n, i) / 2**n) - upd_nnf = self.clamp_bound(upd_nnf).astype(nnf.dtype) - return upd_nnf - - def update(self, source_guide, target_guide, source_style, target_style, nnf, err, upd_nnf): - upd_err = self.get_error(source_guide, target_guide, source_style, target_style, upd_nnf) - upd_idx = (upd_err < err) - nnf[upd_idx] = upd_nnf[upd_idx] - err[upd_idx] = upd_err[upd_idx] - return nnf, err - - def propagation(self, source_guide, target_guide, source_style, target_style, nnf, err): - for d in cp.random.permutation(4): - upd_nnf = self.neighboor_step(nnf, d) - nnf, err = self.update(source_guide, target_guide, source_style, target_style, nnf, err, upd_nnf) - return nnf, err - - def random_search(self, source_guide, target_guide, source_style, target_style, nnf, err): - for i in range(self.random_search_steps): - upd_nnf = self.random_step(nnf, self.random_search_range) - nnf, err = self.update(source_guide, target_guide, source_style, target_style, nnf, err, upd_nnf) - return nnf, err - - def track(self, source_guide, target_guide, source_style, target_style, nnf, err): - for d in range(1, self.tracking_window_size + 1): - upd_nnf = self.track_step(nnf, d) - nnf, err = self.update(source_guide, target_guide, source_style, target_style, nnf, err, upd_nnf) - upd_nnf = self.track_step(nnf, -d) - nnf, err = self.update(source_guide, target_guide, source_style, target_style, nnf, err, upd_nnf) - return nnf, err - - def iteration(self, source_guide, target_guide, source_style, target_style, nnf, err): - nnf, err = self.propagation(source_guide, target_guide, source_style, target_style, nnf, err) - nnf, err = self.random_search(source_guide, target_guide, source_style, target_style, nnf, err) - nnf, err = self.track(source_guide, target_guide, source_style, target_style, nnf, err) - return nnf, err - - def estimate_nnf(self, source_guide, target_guide, source_style, nnf): - with cp.cuda.Device(self.gpu_id): - source_guide = self.pad_image(source_guide) - target_guide = self.pad_image(target_guide) - source_style = self.pad_image(source_style) - for it in range(self.num_iter): - self.patch_size = self.patch_size_list[it] - target_style = self.apply_nnf_to_image(nnf, source_style) - err = self.get_error(source_guide, target_guide, source_style, target_style, nnf) - nnf, err = self.iteration(source_guide, target_guide, source_style, target_style, nnf, err) - target_style = self.unpad_image(self.apply_nnf_to_image(nnf, source_style)) - return nnf, target_style - - -class PyramidPatchMatcher: - def __init__( - self, image_height, image_width, channel, minimum_patch_size, - threads_per_block=8, num_iter=5, gpu_id=0, guide_weight=10.0, - use_mean_target_style=False, use_pairwise_patch_error=False, - tracking_window_size=0, - initialize="identity" - ): - maximum_patch_size = minimum_patch_size + (num_iter - 1) * 2 - self.pyramid_level = int(np.log2(min(image_height, image_width) / maximum_patch_size)) - self.pyramid_heights = [] - self.pyramid_widths = [] - self.patch_matchers = [] - self.minimum_patch_size = minimum_patch_size - self.num_iter = num_iter - self.gpu_id = gpu_id - self.initialize = initialize - for level in range(self.pyramid_level): - height = image_height//(2**(self.pyramid_level - 1 - level)) - width = image_width//(2**(self.pyramid_level - 1 - level)) - self.pyramid_heights.append(height) - self.pyramid_widths.append(width) - self.patch_matchers.append(PatchMatcher( - height, width, channel, minimum_patch_size=minimum_patch_size, - threads_per_block=threads_per_block, num_iter=num_iter, gpu_id=gpu_id, guide_weight=guide_weight, - use_mean_target_style=use_mean_target_style, use_pairwise_patch_error=use_pairwise_patch_error, - tracking_window_size=tracking_window_size - )) - - def resample_image(self, images, level): - height, width = self.pyramid_heights[level], self.pyramid_widths[level] - images_torch = torch.as_tensor(images, device='cuda', dtype=torch.float32) - images_torch = images_torch.permute(0, 3, 1, 2) - images_resample = F.interpolate(images_torch, size=(height, width), mode='area', align_corners=None) - images_resample = images_resample.permute(0, 2, 3, 1).contiguous() - return cp.asarray(images_resample) - - def initialize_nnf(self, batch_size): - if self.initialize == "random": - height, width = self.pyramid_heights[0], self.pyramid_widths[0] - nnf = cp.stack([ - cp.random.randint(0, height, (batch_size, height, width), dtype=cp.int32), - cp.random.randint(0, width, (batch_size, height, width), dtype=cp.int32) - ], axis=3) - elif self.initialize == "identity": - height, width = self.pyramid_heights[0], self.pyramid_widths[0] - nnf = cp.stack([ - cp.repeat(cp.arange(height), width).reshape(height, width), - cp.tile(cp.arange(width), height).reshape(height, width) - ], axis=2) - nnf = cp.stack([nnf] * batch_size) - else: - raise NotImplementedError() - return nnf - - def update_nnf(self, nnf, level): - # upscale - nnf = nnf.repeat(2, axis=1).repeat(2, axis=2) * 2 - nnf[:, 1::2, :, 0] += 1 - nnf[:, :, 1::2, 1] += 1 - # check if scale is 2 - height, width = self.pyramid_heights[level], self.pyramid_widths[level] - if height != nnf.shape[0] * 2 or width != nnf.shape[1] * 2: - nnf_torch = torch.as_tensor(nnf, device='cuda', dtype=torch.float32) - nnf_torch = nnf_torch.permute(0, 3, 1, 2) - nnf_resized = F.interpolate(nnf_torch, size=(height, width), mode='bilinear', align_corners=False) - nnf_resized = nnf_resized.permute(0, 2, 3, 1) - nnf = cp.asarray(nnf_resized).astype(cp.int32) - nnf = self.patch_matchers[level].clamp_bound(nnf) - return nnf - - def apply_nnf_to_image(self, nnf, image): - with cp.cuda.Device(self.gpu_id): - image = self.patch_matchers[-1].pad_image(image) - image = self.patch_matchers[-1].apply_nnf_to_image(nnf, image) - return image - - def estimate_nnf(self, source_guide, target_guide, source_style): - with cp.cuda.Device(self.gpu_id): - if not isinstance(source_guide, cp.ndarray): - source_guide = cp.array(source_guide, dtype=cp.float32) - if not isinstance(target_guide, cp.ndarray): - target_guide = cp.array(target_guide, dtype=cp.float32) - if not isinstance(source_style, cp.ndarray): - source_style = cp.array(source_style, dtype=cp.float32) - for level in range(self.pyramid_level): - nnf = self.initialize_nnf(source_guide.shape[0]) if level==0 else self.update_nnf(nnf, level) - source_guide_ = self.resample_image(source_guide, level) - target_guide_ = self.resample_image(target_guide, level) - source_style_ = self.resample_image(source_style, level) - nnf, target_style = self.patch_matchers[level].estimate_nnf( - source_guide_, target_guide_, source_style_, nnf - ) - return nnf.get(), target_style.get() diff --git a/diffsynth/extensions/FastBlend/runners/__init__.py b/diffsynth/extensions/FastBlend/runners/__init__.py deleted file mode 100644 index 0783827..0000000 --- a/diffsynth/extensions/FastBlend/runners/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .accurate import AccurateModeRunner -from .fast import FastModeRunner -from .balanced import BalancedModeRunner -from .interpolation import InterpolationModeRunner, InterpolationModeSingleFrameRunner diff --git a/diffsynth/extensions/FastBlend/runners/accurate.py b/diffsynth/extensions/FastBlend/runners/accurate.py deleted file mode 100644 index 2e4a47f..0000000 --- a/diffsynth/extensions/FastBlend/runners/accurate.py +++ /dev/null @@ -1,35 +0,0 @@ -from ..patch_match import PyramidPatchMatcher -import os -import numpy as np -from PIL import Image -from tqdm import tqdm - - -class AccurateModeRunner: - def __init__(self): - pass - - def run(self, frames_guide, frames_style, batch_size, window_size, ebsynth_config, desc="Accurate Mode", save_path=None): - patch_match_engine = PyramidPatchMatcher( - image_height=frames_style[0].shape[0], - image_width=frames_style[0].shape[1], - channel=3, - use_mean_target_style=True, - **ebsynth_config - ) - # run - n = len(frames_style) - for target in tqdm(range(n), desc=desc): - l, r = max(target - window_size, 0), min(target + window_size + 1, n) - remapped_frames = [] - for i in range(l, r, batch_size): - j = min(i + batch_size, r) - source_guide = np.stack([frames_guide[source] for source in range(i, j)]) - target_guide = np.stack([frames_guide[target]] * (j - i)) - source_style = np.stack([frames_style[source] for source in range(i, j)]) - _, target_style = patch_match_engine.estimate_nnf(source_guide, target_guide, source_style) - remapped_frames.append(target_style) - frame = np.concatenate(remapped_frames, axis=0).mean(axis=0) - frame = frame.clip(0, 255).astype("uint8") - if save_path is not None: - Image.fromarray(frame).save(os.path.join(save_path, "%05d.png" % target)) \ No newline at end of file diff --git a/diffsynth/extensions/FastBlend/runners/balanced.py b/diffsynth/extensions/FastBlend/runners/balanced.py deleted file mode 100644 index 1c9a2bb..0000000 --- a/diffsynth/extensions/FastBlend/runners/balanced.py +++ /dev/null @@ -1,46 +0,0 @@ -from ..patch_match import PyramidPatchMatcher -import os -import numpy as np -from PIL import Image -from tqdm import tqdm - - -class BalancedModeRunner: - def __init__(self): - pass - - def run(self, frames_guide, frames_style, batch_size, window_size, ebsynth_config, desc="Balanced Mode", save_path=None): - patch_match_engine = PyramidPatchMatcher( - image_height=frames_style[0].shape[0], - image_width=frames_style[0].shape[1], - channel=3, - **ebsynth_config - ) - # tasks - n = len(frames_style) - tasks = [] - for target in range(n): - for source in range(target - window_size, target + window_size + 1): - if source >= 0 and source < n and source != target: - tasks.append((source, target)) - # run - frames = [(None, 1) for i in range(n)] - for batch_id in tqdm(range(0, len(tasks), batch_size), desc=desc): - tasks_batch = tasks[batch_id: min(batch_id+batch_size, len(tasks))] - source_guide = np.stack([frames_guide[source] for source, target in tasks_batch]) - target_guide = np.stack([frames_guide[target] for source, target in tasks_batch]) - source_style = np.stack([frames_style[source] for source, target in tasks_batch]) - _, target_style = patch_match_engine.estimate_nnf(source_guide, target_guide, source_style) - for (source, target), result in zip(tasks_batch, target_style): - frame, weight = frames[target] - if frame is None: - frame = frames_style[target] - frames[target] = ( - frame * (weight / (weight + 1)) + result / (weight + 1), - weight + 1 - ) - if weight + 1 == min(n, target + window_size + 1) - max(0, target - window_size): - frame = frame.clip(0, 255).astype("uint8") - if save_path is not None: - Image.fromarray(frame).save(os.path.join(save_path, "%05d.png" % target)) - frames[target] = (None, 1) diff --git a/diffsynth/extensions/FastBlend/runners/fast.py b/diffsynth/extensions/FastBlend/runners/fast.py deleted file mode 100644 index 2ba5731..0000000 --- a/diffsynth/extensions/FastBlend/runners/fast.py +++ /dev/null @@ -1,141 +0,0 @@ -from ..patch_match import PyramidPatchMatcher -import functools, os -import numpy as np -from PIL import Image -from tqdm import tqdm - - -class TableManager: - def __init__(self): - pass - - def task_list(self, n): - tasks = [] - max_level = 1 - while (1<=n: - break - meta_data = { - "source": i, - "target": j, - "level": level + 1 - } - tasks.append(meta_data) - tasks.sort(key=functools.cmp_to_key(lambda u, v: u["level"]-v["level"])) - return tasks - - def build_remapping_table(self, frames_guide, frames_style, patch_match_engine, batch_size, desc=""): - n = len(frames_guide) - tasks = self.task_list(n) - remapping_table = [[(frames_style[i], 1)] for i in range(n)] - for batch_id in tqdm(range(0, len(tasks), batch_size), desc=desc): - tasks_batch = tasks[batch_id: min(batch_id+batch_size, len(tasks))] - source_guide = np.stack([frames_guide[task["source"]] for task in tasks_batch]) - target_guide = np.stack([frames_guide[task["target"]] for task in tasks_batch]) - source_style = np.stack([frames_style[task["source"]] for task in tasks_batch]) - _, target_style = patch_match_engine.estimate_nnf(source_guide, target_guide, source_style) - for task, result in zip(tasks_batch, target_style): - target, level = task["target"], task["level"] - if len(remapping_table[target])==level: - remapping_table[target].append((result, 1)) - else: - frame, weight = remapping_table[target][level] - remapping_table[target][level] = ( - frame * (weight / (weight + 1)) + result / (weight + 1), - weight + 1 - ) - return remapping_table - - def remapping_table_to_blending_table(self, table): - for i in range(len(table)): - for j in range(1, len(table[i])): - frame_1, weight_1 = table[i][j-1] - frame_2, weight_2 = table[i][j] - frame = (frame_1 + frame_2) / 2 - weight = weight_1 + weight_2 - table[i][j] = (frame, weight) - return table - - def tree_query(self, leftbound, rightbound): - node_list = [] - node_index = rightbound - while node_index>=leftbound: - node_level = 0 - while (1<=leftbound: - node_level += 1 - node_list.append((node_index, node_level)) - node_index -= 1<0: - tasks = [] - for m in range(index_style[0]): - tasks.append((index_style[0], m, index_style[0])) - task_group.append(tasks) - # middle frames - for l, r in zip(index_style[:-1], index_style[1:]): - tasks = [] - for m in range(l, r): - tasks.append((l, m, r)) - task_group.append(tasks) - # last frame - tasks = [] - for m in range(index_style[-1], n): - tasks.append((index_style[-1], m, index_style[-1])) - task_group.append(tasks) - return task_group - - def run(self, frames_guide, frames_style, index_style, batch_size, ebsynth_config, save_path=None): - patch_match_engine = PyramidPatchMatcher( - image_height=frames_style[0].shape[0], - image_width=frames_style[0].shape[1], - channel=3, - use_mean_target_style=False, - use_pairwise_patch_error=True, - **ebsynth_config - ) - # task - index_dict = self.get_index_dict(index_style) - task_group = self.get_task_group(index_style, len(frames_guide)) - # run - for tasks in task_group: - index_start, index_end = min([i[1] for i in tasks]), max([i[1] for i in tasks]) - for batch_id in tqdm(range(0, len(tasks), batch_size), desc=f"Rendering frames {index_start}...{index_end}"): - tasks_batch = tasks[batch_id: min(batch_id+batch_size, len(tasks))] - source_guide, target_guide, source_style = [], [], [] - for l, m, r in tasks_batch: - # l -> m - source_guide.append(frames_guide[l]) - target_guide.append(frames_guide[m]) - source_style.append(frames_style[index_dict[l]]) - # r -> m - source_guide.append(frames_guide[r]) - target_guide.append(frames_guide[m]) - source_style.append(frames_style[index_dict[r]]) - source_guide = np.stack(source_guide) - target_guide = np.stack(target_guide) - source_style = np.stack(source_style) - _, target_style = patch_match_engine.estimate_nnf(source_guide, target_guide, source_style) - if save_path is not None: - for frame_l, frame_r, (l, m, r) in zip(target_style[0::2], target_style[1::2], tasks_batch): - weight_l, weight_r = self.get_weight(l, m, r) - frame = frame_l * weight_l + frame_r * weight_r - frame = frame.clip(0, 255).astype("uint8") - Image.fromarray(frame).save(os.path.join(save_path, "%05d.png" % m)) - - -class InterpolationModeSingleFrameRunner: - def __init__(self): - pass - - def run(self, frames_guide, frames_style, index_style, batch_size, ebsynth_config, save_path=None): - # check input - tracking_window_size = ebsynth_config["tracking_window_size"] - if tracking_window_size * 2 >= batch_size: - raise ValueError("batch_size should be larger than track_window_size * 2") - frame_style = frames_style[0] - frame_guide = frames_guide[index_style[0]] - patch_match_engine = PyramidPatchMatcher( - image_height=frame_style.shape[0], - image_width=frame_style.shape[1], - channel=3, - **ebsynth_config - ) - # run - frame_id, n = 0, len(frames_guide) - for i in tqdm(range(0, n, batch_size - tracking_window_size * 2), desc=f"Rendering frames 0...{n}"): - if i + batch_size > n: - l, r = max(n - batch_size, 0), n - else: - l, r = i, i + batch_size - source_guide = np.stack([frame_guide] * (r-l)) - target_guide = np.stack([frames_guide[i] for i in range(l, r)]) - source_style = np.stack([frame_style] * (r-l)) - _, target_style = patch_match_engine.estimate_nnf(source_guide, target_guide, source_style) - for i, frame in zip(range(l, r), target_style): - if i==frame_id: - frame = frame.clip(0, 255).astype("uint8") - Image.fromarray(frame).save(os.path.join(save_path, "%05d.png" % frame_id)) - frame_id += 1 - if r < n and r-frame_id <= tracking_window_size: - break diff --git a/diffsynth/extensions/ImageQualityMetric/BLIP/__init__.py b/diffsynth/extensions/ImageQualityMetric/BLIP/__init__.py deleted file mode 100644 index 885dcf8..0000000 --- a/diffsynth/extensions/ImageQualityMetric/BLIP/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .blip_pretrain import * diff --git a/diffsynth/extensions/ImageQualityMetric/BLIP/blip.py b/diffsynth/extensions/ImageQualityMetric/BLIP/blip.py deleted file mode 100644 index 6b24c3c..0000000 --- a/diffsynth/extensions/ImageQualityMetric/BLIP/blip.py +++ /dev/null @@ -1,77 +0,0 @@ -''' - * Adapted from BLIP (https://github.com/salesforce/BLIP) -''' - -import warnings -warnings.filterwarnings("ignore") - -import torch -import os -from urllib.parse import urlparse -from timm.models.hub import download_cached_file -from transformers import BertTokenizer -from .vit import VisionTransformer, interpolate_pos_embed - - -def default_bert(): - current_dir = os.path.dirname(os.path.abspath(__file__)) - project_root = os.path.abspath(os.path.join(current_dir, '../../../../')) - model_path = os.path.join(project_root, 'models', 'QualityMetric') - return os.path.join(model_path, "bert-base-uncased") - - -def init_tokenizer(bert_model_path): - tokenizer = BertTokenizer.from_pretrained(bert_model_path) - tokenizer.add_special_tokens({'bos_token':'[DEC]'}) - tokenizer.add_special_tokens({'additional_special_tokens':['[ENC]']}) - tokenizer.enc_token_id = tokenizer.additional_special_tokens_ids[0] - return tokenizer - - -def create_vit(vit, image_size, use_grad_checkpointing=False, ckpt_layer=0, drop_path_rate=0): - - assert vit in ['base', 'large'], "vit parameter must be base or large" - if vit=='base': - vision_width = 768 - visual_encoder = VisionTransformer(img_size=image_size, patch_size=16, embed_dim=vision_width, depth=12, - num_heads=12, use_grad_checkpointing=use_grad_checkpointing, ckpt_layer=ckpt_layer, - drop_path_rate=0 or drop_path_rate - ) - elif vit=='large': - vision_width = 1024 - visual_encoder = VisionTransformer(img_size=image_size, patch_size=16, embed_dim=vision_width, depth=24, - num_heads=16, use_grad_checkpointing=use_grad_checkpointing, ckpt_layer=ckpt_layer, - drop_path_rate=0.1 or drop_path_rate - ) - return visual_encoder, vision_width - - -def is_url(url_or_filename): - parsed = urlparse(url_or_filename) - return parsed.scheme in ("http", "https") - -def load_checkpoint(model,url_or_filename): - if is_url(url_or_filename): - cached_file = download_cached_file(url_or_filename, check_hash=False, progress=True) - checkpoint = torch.load(cached_file, map_location='cpu') - elif os.path.isfile(url_or_filename): - checkpoint = torch.load(url_or_filename, map_location='cpu') - else: - raise RuntimeError('checkpoint url or path is invalid') - - state_dict = checkpoint['model'] - - state_dict['visual_encoder.pos_embed'] = interpolate_pos_embed(state_dict['visual_encoder.pos_embed'],model.visual_encoder) - if 'visual_encoder_m.pos_embed' in model.state_dict().keys(): - state_dict['visual_encoder_m.pos_embed'] = interpolate_pos_embed(state_dict['visual_encoder_m.pos_embed'], - model.visual_encoder_m) - for key in model.state_dict().keys(): - if key in state_dict.keys(): - if state_dict[key].shape!=model.state_dict()[key].shape: - print(key, ": ", state_dict[key].shape, ', ', model.state_dict()[key].shape) - del state_dict[key] - - msg = model.load_state_dict(state_dict,strict=False) - print('load checkpoint from %s'%url_or_filename) - return model,msg - diff --git a/diffsynth/extensions/ImageQualityMetric/BLIP/blip_pretrain.py b/diffsynth/extensions/ImageQualityMetric/BLIP/blip_pretrain.py deleted file mode 100644 index ba711e2..0000000 --- a/diffsynth/extensions/ImageQualityMetric/BLIP/blip_pretrain.py +++ /dev/null @@ -1,44 +0,0 @@ -''' - * Adapted from BLIP (https://github.com/salesforce/BLIP) -''' - -import transformers -transformers.logging.set_verbosity_error() - -from torch import nn -import os -from .med import BertConfig, BertModel -from .blip import create_vit, init_tokenizer - -class BLIP_Pretrain(nn.Module): - def __init__(self, - med_config = "med_config.json", - image_size = 224, - vit = 'base', - vit_grad_ckpt = False, - vit_ckpt_layer = 0, - embed_dim = 256, - queue_size = 57600, - momentum = 0.995, - bert_model_path = "" - ): - """ - Args: - med_config (str): path for the mixture of encoder-decoder model's configuration file - image_size (int): input image size - vit (str): model size of vision transformer - """ - super().__init__() - - self.visual_encoder, vision_width = create_vit(vit,image_size, vit_grad_ckpt, vit_ckpt_layer, 0) - - self.tokenizer = init_tokenizer(bert_model_path) - encoder_config = BertConfig.from_json_file(med_config) - encoder_config.encoder_width = vision_width - self.text_encoder = BertModel(config=encoder_config, add_pooling_layer=False) - - text_width = self.text_encoder.config.hidden_size - - self.vision_proj = nn.Linear(vision_width, embed_dim) - self.text_proj = nn.Linear(text_width, embed_dim) - diff --git a/diffsynth/extensions/ImageQualityMetric/BLIP/med.py b/diffsynth/extensions/ImageQualityMetric/BLIP/med.py deleted file mode 100644 index 5905a34..0000000 --- a/diffsynth/extensions/ImageQualityMetric/BLIP/med.py +++ /dev/null @@ -1,947 +0,0 @@ -''' - * Adapted from BLIP (https://github.com/salesforce/BLIP) - * Based on huggingface code base - * https://github.com/huggingface/transformers/blob/v4.15.0/src/transformers/models/bert -''' - -import math -from typing import Tuple - -import torch -from torch import Tensor, device, nn -import torch.utils.checkpoint -from torch import nn -from torch.nn import CrossEntropyLoss - -from transformers.activations import ACT2FN -from transformers.file_utils import ( - ModelOutput, -) -from transformers.modeling_outputs import ( - BaseModelOutputWithPastAndCrossAttentions, - BaseModelOutputWithPoolingAndCrossAttentions, - CausalLMOutputWithCrossAttentions, - MaskedLMOutput, - MultipleChoiceModelOutput, - NextSentencePredictorOutput, - QuestionAnsweringModelOutput, - SequenceClassifierOutput, - TokenClassifierOutput, -) -from transformers.modeling_utils import ( - PreTrainedModel, - apply_chunking_to_forward, - find_pruneable_heads_and_indices, - prune_linear_layer, -) -from transformers.utils import logging -from transformers.models.bert.configuration_bert import BertConfig - - -logger = logging.get_logger(__name__) - - -class BertEmbeddings(nn.Module): - """Construct the embeddings from word and position embeddings.""" - - def __init__(self, config): - super().__init__() - self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=config.pad_token_id) - self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size) - - # self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load - # any TensorFlow checkpoint file - self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) - self.dropout = nn.Dropout(config.hidden_dropout_prob) - - # position_ids (1, len position emb) is contiguous in memory and exported when serialized - self.register_buffer("position_ids", torch.arange(config.max_position_embeddings).expand((1, -1))) - self.position_embedding_type = getattr(config, "position_embedding_type", "absolute") - - self.config = config - - def forward( - self, input_ids=None, position_ids=None, inputs_embeds=None, past_key_values_length=0 - ): - if input_ids is not None: - input_shape = input_ids.size() - else: - input_shape = inputs_embeds.size()[:-1] - - seq_length = input_shape[1] - - if position_ids is None: - position_ids = self.position_ids[:, past_key_values_length : seq_length + past_key_values_length] - - if inputs_embeds is None: - inputs_embeds = self.word_embeddings(input_ids) - - embeddings = inputs_embeds - - if self.position_embedding_type == "absolute": - position_embeddings = self.position_embeddings(position_ids) - embeddings += position_embeddings - embeddings = self.LayerNorm(embeddings) - embeddings = self.dropout(embeddings) - return embeddings - - -class BertSelfAttention(nn.Module): - def __init__(self, config, is_cross_attention): - super().__init__() - self.config = config - if config.hidden_size % config.num_attention_heads != 0 and not hasattr(config, "embedding_size"): - raise ValueError( - "The hidden size (%d) is not a multiple of the number of attention " - "heads (%d)" % (config.hidden_size, config.num_attention_heads) - ) - - self.num_attention_heads = config.num_attention_heads - self.attention_head_size = int(config.hidden_size / config.num_attention_heads) - self.all_head_size = self.num_attention_heads * self.attention_head_size - - self.query = nn.Linear(config.hidden_size, self.all_head_size) - if is_cross_attention: - self.key = nn.Linear(config.encoder_width, self.all_head_size) - self.value = nn.Linear(config.encoder_width, self.all_head_size) - else: - self.key = nn.Linear(config.hidden_size, self.all_head_size) - self.value = nn.Linear(config.hidden_size, self.all_head_size) - - self.dropout = nn.Dropout(config.attention_probs_dropout_prob) - self.position_embedding_type = getattr(config, "position_embedding_type", "absolute") - if self.position_embedding_type == "relative_key" or self.position_embedding_type == "relative_key_query": - self.max_position_embeddings = config.max_position_embeddings - self.distance_embedding = nn.Embedding(2 * config.max_position_embeddings - 1, self.attention_head_size) - self.save_attention = False - - def save_attn_gradients(self, attn_gradients): - self.attn_gradients = attn_gradients - - def get_attn_gradients(self): - return self.attn_gradients - - def save_attention_map(self, attention_map): - self.attention_map = attention_map - - def get_attention_map(self): - return self.attention_map - - def transpose_for_scores(self, x): - new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size) - x = x.view(*new_x_shape) - return x.permute(0, 2, 1, 3) - - def forward( - self, - hidden_states, - attention_mask=None, - head_mask=None, - encoder_hidden_states=None, - encoder_attention_mask=None, - past_key_value=None, - output_attentions=False, - ): - mixed_query_layer = self.query(hidden_states) - - # If this is instantiated as a cross-attention module, the keys - # and values come from an encoder; the attention mask needs to be - # such that the encoder's padding tokens are not attended to. - is_cross_attention = encoder_hidden_states is not None - - if is_cross_attention: - key_layer = self.transpose_for_scores(self.key(encoder_hidden_states)) - value_layer = self.transpose_for_scores(self.value(encoder_hidden_states)) - attention_mask = encoder_attention_mask - elif past_key_value is not None: - key_layer = self.transpose_for_scores(self.key(hidden_states)) - value_layer = self.transpose_for_scores(self.value(hidden_states)) - key_layer = torch.cat([past_key_value[0], key_layer], dim=2) - value_layer = torch.cat([past_key_value[1], value_layer], dim=2) - else: - key_layer = self.transpose_for_scores(self.key(hidden_states)) - value_layer = self.transpose_for_scores(self.value(hidden_states)) - - query_layer = self.transpose_for_scores(mixed_query_layer) - - past_key_value = (key_layer, value_layer) - - # Take the dot product between "query" and "key" to get the raw attention scores. - attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2)) - - if self.position_embedding_type == "relative_key" or self.position_embedding_type == "relative_key_query": - seq_length = hidden_states.size()[1] - position_ids_l = torch.arange(seq_length, dtype=torch.long, device=hidden_states.device).view(-1, 1) - position_ids_r = torch.arange(seq_length, dtype=torch.long, device=hidden_states.device).view(1, -1) - distance = position_ids_l - position_ids_r - positional_embedding = self.distance_embedding(distance + self.max_position_embeddings - 1) - positional_embedding = positional_embedding.to(dtype=query_layer.dtype) # fp16 compatibility - - if self.position_embedding_type == "relative_key": - relative_position_scores = torch.einsum("bhld,lrd->bhlr", query_layer, positional_embedding) - attention_scores = attention_scores + relative_position_scores - elif self.position_embedding_type == "relative_key_query": - relative_position_scores_query = torch.einsum("bhld,lrd->bhlr", query_layer, positional_embedding) - relative_position_scores_key = torch.einsum("bhrd,lrd->bhlr", key_layer, positional_embedding) - attention_scores = attention_scores + relative_position_scores_query + relative_position_scores_key - - attention_scores = attention_scores / math.sqrt(self.attention_head_size) - if attention_mask is not None: - # Apply the attention mask is (precomputed for all layers in BertModel forward() function) - attention_scores = attention_scores + attention_mask - - # Normalize the attention scores to probabilities. - attention_probs = nn.Softmax(dim=-1)(attention_scores) - - if is_cross_attention and self.save_attention: - self.save_attention_map(attention_probs) - attention_probs.register_hook(self.save_attn_gradients) - - # This is actually dropping out entire tokens to attend to, which might - # seem a bit unusual, but is taken from the original Transformer paper. - attention_probs_dropped = self.dropout(attention_probs) - - # Mask heads if we want to - if head_mask is not None: - attention_probs_dropped = attention_probs_dropped * head_mask - - context_layer = torch.matmul(attention_probs_dropped, value_layer) - - context_layer = context_layer.permute(0, 2, 1, 3).contiguous() - new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,) - context_layer = context_layer.view(*new_context_layer_shape) - - outputs = (context_layer, attention_probs) if output_attentions else (context_layer,) - - outputs = outputs + (past_key_value,) - return outputs - - -class BertSelfOutput(nn.Module): - def __init__(self, config): - super().__init__() - self.dense = nn.Linear(config.hidden_size, config.hidden_size) - self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) - self.dropout = nn.Dropout(config.hidden_dropout_prob) - - def forward(self, hidden_states, input_tensor): - hidden_states = self.dense(hidden_states) - hidden_states = self.dropout(hidden_states) - hidden_states = self.LayerNorm(hidden_states + input_tensor) - return hidden_states - - -class BertAttention(nn.Module): - def __init__(self, config, is_cross_attention=False): - super().__init__() - self.self = BertSelfAttention(config, is_cross_attention) - self.output = BertSelfOutput(config) - self.pruned_heads = set() - - def prune_heads(self, heads): - if len(heads) == 0: - return - heads, index = find_pruneable_heads_and_indices( - heads, self.self.num_attention_heads, self.self.attention_head_size, self.pruned_heads - ) - - # Prune linear layers - self.self.query = prune_linear_layer(self.self.query, index) - self.self.key = prune_linear_layer(self.self.key, index) - self.self.value = prune_linear_layer(self.self.value, index) - self.output.dense = prune_linear_layer(self.output.dense, index, dim=1) - - # Update hyper params and store pruned heads - self.self.num_attention_heads = self.self.num_attention_heads - len(heads) - self.self.all_head_size = self.self.attention_head_size * self.self.num_attention_heads - self.pruned_heads = self.pruned_heads.union(heads) - - def forward( - self, - hidden_states, - attention_mask=None, - head_mask=None, - encoder_hidden_states=None, - encoder_attention_mask=None, - past_key_value=None, - output_attentions=False, - ): - self_outputs = self.self( - hidden_states, - attention_mask, - head_mask, - encoder_hidden_states, - encoder_attention_mask, - past_key_value, - output_attentions, - ) - attention_output = self.output(self_outputs[0], hidden_states) - outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them - return outputs - - -class BertIntermediate(nn.Module): - def __init__(self, config): - super().__init__() - self.dense = nn.Linear(config.hidden_size, config.intermediate_size) - if isinstance(config.hidden_act, str): - self.intermediate_act_fn = ACT2FN[config.hidden_act] - else: - self.intermediate_act_fn = config.hidden_act - - def forward(self, hidden_states): - hidden_states = self.dense(hidden_states) - hidden_states = self.intermediate_act_fn(hidden_states) - return hidden_states - - -class BertOutput(nn.Module): - def __init__(self, config): - super().__init__() - self.dense = nn.Linear(config.intermediate_size, config.hidden_size) - self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) - self.dropout = nn.Dropout(config.hidden_dropout_prob) - - def forward(self, hidden_states, input_tensor): - hidden_states = self.dense(hidden_states) - hidden_states = self.dropout(hidden_states) - hidden_states = self.LayerNorm(hidden_states + input_tensor) - return hidden_states - - -class BertLayer(nn.Module): - def __init__(self, config, layer_num): - super().__init__() - self.config = config - self.chunk_size_feed_forward = config.chunk_size_feed_forward - self.seq_len_dim = 1 - self.attention = BertAttention(config) - self.layer_num = layer_num - if self.config.add_cross_attention: - self.crossattention = BertAttention(config, is_cross_attention=self.config.add_cross_attention) - self.intermediate = BertIntermediate(config) - self.output = BertOutput(config) - - def forward( - self, - hidden_states, - attention_mask=None, - head_mask=None, - encoder_hidden_states=None, - encoder_attention_mask=None, - past_key_value=None, - output_attentions=False, - mode=None, - ): - # decoder uni-directional self-attention cached key/values tuple is at positions 1,2 - self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None - self_attention_outputs = self.attention( - hidden_states, - attention_mask, - head_mask, - output_attentions=output_attentions, - past_key_value=self_attn_past_key_value, - ) - attention_output = self_attention_outputs[0] - - outputs = self_attention_outputs[1:-1] - present_key_value = self_attention_outputs[-1] - - if mode=='multimodal': - assert encoder_hidden_states is not None, "encoder_hidden_states must be given for cross-attention layers" - - cross_attention_outputs = self.crossattention( - attention_output, - attention_mask, - head_mask, - encoder_hidden_states, - encoder_attention_mask, - output_attentions=output_attentions, - ) - attention_output = cross_attention_outputs[0] - outputs = outputs + cross_attention_outputs[1:-1] # add cross attentions if we output attention weights - layer_output = apply_chunking_to_forward( - self.feed_forward_chunk, self.chunk_size_feed_forward, self.seq_len_dim, attention_output - ) - outputs = (layer_output,) + outputs - - outputs = outputs + (present_key_value,) - - return outputs - - def feed_forward_chunk(self, attention_output): - intermediate_output = self.intermediate(attention_output) - layer_output = self.output(intermediate_output, attention_output) - return layer_output - - -class BertEncoder(nn.Module): - def __init__(self, config): - super().__init__() - self.config = config - self.layer = nn.ModuleList([BertLayer(config,i) for i in range(config.num_hidden_layers)]) - self.gradient_checkpointing = False - - def forward( - self, - hidden_states, - attention_mask=None, - head_mask=None, - encoder_hidden_states=None, - encoder_attention_mask=None, - past_key_values=None, - use_cache=None, - output_attentions=False, - output_hidden_states=False, - return_dict=True, - mode='multimodal', - ): - all_hidden_states = () if output_hidden_states else None - all_self_attentions = () if output_attentions else None - all_cross_attentions = () if output_attentions and self.config.add_cross_attention else None - - next_decoder_cache = () if use_cache else None - - for i in range(self.config.num_hidden_layers): - layer_module = self.layer[i] - if output_hidden_states: - all_hidden_states = all_hidden_states + (hidden_states,) - - layer_head_mask = head_mask[i] if head_mask is not None else None - past_key_value = past_key_values[i] if past_key_values is not None else None - - if self.gradient_checkpointing and self.training: - - if use_cache: - logger.warning( - "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." - ) - use_cache = False - - def create_custom_forward(module): - def custom_forward(*inputs): - return module(*inputs, past_key_value, output_attentions) - - return custom_forward - - layer_outputs = torch.utils.checkpoint.checkpoint( - create_custom_forward(layer_module), - hidden_states, - attention_mask, - layer_head_mask, - encoder_hidden_states, - encoder_attention_mask, - mode=mode, - ) - else: - layer_outputs = layer_module( - hidden_states, - attention_mask, - layer_head_mask, - encoder_hidden_states, - encoder_attention_mask, - past_key_value, - output_attentions, - mode=mode, - ) - - hidden_states = layer_outputs[0] - if use_cache: - next_decoder_cache += (layer_outputs[-1],) - if output_attentions: - all_self_attentions = all_self_attentions + (layer_outputs[1],) - - if output_hidden_states: - all_hidden_states = all_hidden_states + (hidden_states,) - - if not return_dict: - return tuple( - v - for v in [ - hidden_states, - next_decoder_cache, - all_hidden_states, - all_self_attentions, - all_cross_attentions, - ] - if v is not None - ) - return BaseModelOutputWithPastAndCrossAttentions( - last_hidden_state=hidden_states, - past_key_values=next_decoder_cache, - hidden_states=all_hidden_states, - attentions=all_self_attentions, - cross_attentions=all_cross_attentions, - ) - - -class BertPooler(nn.Module): - def __init__(self, config): - super().__init__() - self.dense = nn.Linear(config.hidden_size, config.hidden_size) - self.activation = nn.Tanh() - - def forward(self, hidden_states): - # We "pool" the model by simply taking the hidden state corresponding - # to the first token. - first_token_tensor = hidden_states[:, 0] - pooled_output = self.dense(first_token_tensor) - pooled_output = self.activation(pooled_output) - return pooled_output - - -class BertPredictionHeadTransform(nn.Module): - def __init__(self, config): - super().__init__() - self.dense = nn.Linear(config.hidden_size, config.hidden_size) - if isinstance(config.hidden_act, str): - self.transform_act_fn = ACT2FN[config.hidden_act] - else: - self.transform_act_fn = config.hidden_act - self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) - - def forward(self, hidden_states): - hidden_states = self.dense(hidden_states) - hidden_states = self.transform_act_fn(hidden_states) - hidden_states = self.LayerNorm(hidden_states) - return hidden_states - - -class BertLMPredictionHead(nn.Module): - def __init__(self, config): - super().__init__() - self.transform = BertPredictionHeadTransform(config) - - # The output weights are the same as the input embeddings, but there is - # an output-only bias for each token. - self.decoder = nn.Linear(config.hidden_size, config.vocab_size, bias=False) - - self.bias = nn.Parameter(torch.zeros(config.vocab_size)) - - # Need a link between the two variables so that the bias is correctly resized with `resize_token_embeddings` - self.decoder.bias = self.bias - - def forward(self, hidden_states): - hidden_states = self.transform(hidden_states) - hidden_states = self.decoder(hidden_states) - return hidden_states - - -class BertOnlyMLMHead(nn.Module): - def __init__(self, config): - super().__init__() - self.predictions = BertLMPredictionHead(config) - - def forward(self, sequence_output): - prediction_scores = self.predictions(sequence_output) - return prediction_scores - - -class BertPreTrainedModel(PreTrainedModel): - """ - An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained - models. - """ - - config_class = BertConfig - base_model_prefix = "bert" - _keys_to_ignore_on_load_missing = [r"position_ids"] - - def _init_weights(self, module): - """ Initialize the weights """ - if isinstance(module, (nn.Linear, nn.Embedding)): - # Slightly different from the TF version which uses truncated_normal for initialization - # cf https://github.com/pytorch/pytorch/pull/5617 - module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) - elif isinstance(module, nn.LayerNorm): - module.bias.data.zero_() - module.weight.data.fill_(1.0) - if isinstance(module, nn.Linear) and module.bias is not None: - module.bias.data.zero_() - - -class BertModel(BertPreTrainedModel): - """ - The model can behave as an encoder (with only self-attention) as well as a decoder, in which case a layer of - cross-attention is added between the self-attention layers, following the architecture described in `Attention is - all you need `__ by Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit, - Llion Jones, Aidan N. Gomez, Lukasz Kaiser and Illia Polosukhin. - argument and :obj:`add_cross_attention` set to :obj:`True`; an :obj:`encoder_hidden_states` is then expected as an - input to the forward pass. - """ - - def __init__(self, config, add_pooling_layer=True): - super().__init__(config) - self.config = config - - self.embeddings = BertEmbeddings(config) - - self.encoder = BertEncoder(config) - - self.pooler = BertPooler(config) if add_pooling_layer else None - - self.init_weights() - - - def get_input_embeddings(self): - return self.embeddings.word_embeddings - - def set_input_embeddings(self, value): - self.embeddings.word_embeddings = value - - def _prune_heads(self, heads_to_prune): - """ - Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base - class PreTrainedModel - """ - for layer, heads in heads_to_prune.items(): - self.encoder.layer[layer].attention.prune_heads(heads) - - - def get_extended_attention_mask(self, attention_mask: Tensor, input_shape: Tuple[int], device: device, is_decoder: bool) -> Tensor: - """ - Makes broadcastable attention and causal masks so that future and masked tokens are ignored. - - Arguments: - attention_mask (:obj:`torch.Tensor`): - Mask with ones indicating tokens to attend to, zeros for tokens to ignore. - input_shape (:obj:`Tuple[int]`): - The shape of the input to the model. - device: (:obj:`torch.device`): - The device of the input to the model. - - Returns: - :obj:`torch.Tensor` The extended attention mask, with a the same dtype as :obj:`attention_mask.dtype`. - """ - # We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length] - # ourselves in which case we just need to make it broadcastable to all heads. - if attention_mask.dim() == 3: - extended_attention_mask = attention_mask[:, None, :, :] - elif attention_mask.dim() == 2: - # Provided a padding mask of dimensions [batch_size, seq_length] - # - if the model is a decoder, apply a causal mask in addition to the padding mask - # - if the model is an encoder, make the mask broadcastable to [batch_size, num_heads, seq_length, seq_length] - if is_decoder: - batch_size, seq_length = input_shape - - seq_ids = torch.arange(seq_length, device=device) - causal_mask = seq_ids[None, None, :].repeat(batch_size, seq_length, 1) <= seq_ids[None, :, None] - # in case past_key_values are used we need to add a prefix ones mask to the causal mask - # causal and attention masks must have same type with pytorch version < 1.3 - causal_mask = causal_mask.to(attention_mask.dtype) - - if causal_mask.shape[1] < attention_mask.shape[1]: - prefix_seq_len = attention_mask.shape[1] - causal_mask.shape[1] - causal_mask = torch.cat( - [ - torch.ones((batch_size, seq_length, prefix_seq_len), device=device, dtype=causal_mask.dtype), - causal_mask, - ], - axis=-1, - ) - - extended_attention_mask = causal_mask[:, None, :, :] * attention_mask[:, None, None, :] - else: - extended_attention_mask = attention_mask[:, None, None, :] - else: - raise ValueError( - "Wrong shape for input_ids (shape {}) or attention_mask (shape {})".format( - input_shape, attention_mask.shape - ) - ) - - # Since attention_mask is 1.0 for positions we want to attend and 0.0 for - # masked positions, this operation will create a tensor which is 0.0 for - # positions we want to attend and -10000.0 for masked positions. - # Since we are adding it to the raw scores before the softmax, this is - # effectively the same as removing these entirely. - extended_attention_mask = extended_attention_mask.to(dtype=self.dtype) # fp16 compatibility - extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0 - return extended_attention_mask - - def forward( - self, - input_ids=None, - attention_mask=None, - position_ids=None, - head_mask=None, - inputs_embeds=None, - encoder_embeds=None, - encoder_hidden_states=None, - encoder_attention_mask=None, - past_key_values=None, - use_cache=None, - output_attentions=None, - output_hidden_states=None, - return_dict=None, - is_decoder=False, - mode='multimodal', - ): - r""" - encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): - Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if - the model is configured as a decoder. - encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): - Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in - the cross-attention if the model is configured as a decoder. Mask values selected in ``[0, 1]``: - - 1 for tokens that are **not masked**, - - 0 for tokens that are **masked**. - past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): - Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding. - If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids` - (those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)` - instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`. - use_cache (:obj:`bool`, `optional`): - If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up - decoding (see :obj:`past_key_values`). - """ - output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions - output_hidden_states = ( - output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states - ) - return_dict = return_dict if return_dict is not None else self.config.use_return_dict - - if is_decoder: - use_cache = use_cache if use_cache is not None else self.config.use_cache - else: - use_cache = False - - if input_ids is not None and inputs_embeds is not None: - raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") - elif input_ids is not None: - input_shape = input_ids.size() - batch_size, seq_length = input_shape - device = input_ids.device - elif inputs_embeds is not None: - input_shape = inputs_embeds.size()[:-1] - batch_size, seq_length = input_shape - device = inputs_embeds.device - elif encoder_embeds is not None: - input_shape = encoder_embeds.size()[:-1] - batch_size, seq_length = input_shape - device = encoder_embeds.device - else: - raise ValueError("You have to specify either input_ids or inputs_embeds or encoder_embeds") - - # past_key_values_length - past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0 - - if attention_mask is None: - attention_mask = torch.ones(((batch_size, seq_length + past_key_values_length)), device=device) - - # We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length] - # ourselves in which case we just need to make it broadcastable to all heads. - extended_attention_mask: torch.Tensor = self.get_extended_attention_mask(attention_mask, input_shape, - device, is_decoder) - - # If a 2D or 3D attention mask is provided for the cross-attention - # we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length] - if encoder_hidden_states is not None: - if type(encoder_hidden_states) == list: - encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states[0].size() - else: - encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states.size() - encoder_hidden_shape = (encoder_batch_size, encoder_sequence_length) - - if type(encoder_attention_mask) == list: - encoder_extended_attention_mask = [self.invert_attention_mask(mask) for mask in encoder_attention_mask] - elif encoder_attention_mask is None: - encoder_attention_mask = torch.ones(encoder_hidden_shape, device=device) - encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) - else: - encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) - else: - encoder_extended_attention_mask = None - - # Prepare head mask if needed - # 1.0 in head_mask indicate we keep the head - # attention_probs has shape bsz x n_heads x N x N - # input head_mask has shape [num_heads] or [num_hidden_layers x num_heads] - # and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length] - head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) - - if encoder_embeds is None: - embedding_output = self.embeddings( - input_ids=input_ids, - position_ids=position_ids, - inputs_embeds=inputs_embeds, - past_key_values_length=past_key_values_length, - ) - else: - embedding_output = encoder_embeds - - encoder_outputs = self.encoder( - embedding_output, - attention_mask=extended_attention_mask, - head_mask=head_mask, - encoder_hidden_states=encoder_hidden_states, - encoder_attention_mask=encoder_extended_attention_mask, - past_key_values=past_key_values, - use_cache=use_cache, - output_attentions=output_attentions, - output_hidden_states=output_hidden_states, - return_dict=return_dict, - mode=mode, - ) - sequence_output = encoder_outputs[0] - pooled_output = self.pooler(sequence_output) if self.pooler is not None else None - - if not return_dict: - return (sequence_output, pooled_output) + encoder_outputs[1:] - - return BaseModelOutputWithPoolingAndCrossAttentions( - last_hidden_state=sequence_output, - pooler_output=pooled_output, - past_key_values=encoder_outputs.past_key_values, - hidden_states=encoder_outputs.hidden_states, - attentions=encoder_outputs.attentions, - cross_attentions=encoder_outputs.cross_attentions, - ) - - - -class BertLMHeadModel(BertPreTrainedModel): - - _keys_to_ignore_on_load_unexpected = [r"pooler"] - _keys_to_ignore_on_load_missing = [r"position_ids", r"predictions.decoder.bias"] - - def __init__(self, config): - super().__init__(config) - - self.bert = BertModel(config, add_pooling_layer=False) - self.cls = BertOnlyMLMHead(config) - - self.init_weights() - - def get_output_embeddings(self): - return self.cls.predictions.decoder - - def set_output_embeddings(self, new_embeddings): - self.cls.predictions.decoder = new_embeddings - - def forward( - self, - input_ids=None, - attention_mask=None, - position_ids=None, - head_mask=None, - inputs_embeds=None, - encoder_hidden_states=None, - encoder_attention_mask=None, - labels=None, - past_key_values=None, - use_cache=None, - output_attentions=None, - output_hidden_states=None, - return_dict=None, - return_logits=False, - is_decoder=True, - reduction='mean', - mode='multimodal', - ): - r""" - encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): - Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if - the model is configured as a decoder. - encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): - Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in - the cross-attention if the model is configured as a decoder. Mask values selected in ``[0, 1]``: - - 1 for tokens that are **not masked**, - - 0 for tokens that are **masked**. - labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): - Labels for computing the left-to-right language modeling loss (next word prediction). Indices should be in - ``[-100, 0, ..., config.vocab_size]`` (see ``input_ids`` docstring) Tokens with indices set to ``-100`` are - ignored (masked), the loss is only computed for the tokens with labels n ``[0, ..., config.vocab_size]`` - past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): - Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding. - If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids` - (those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)` - instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`. - use_cache (:obj:`bool`, `optional`): - If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up - decoding (see :obj:`past_key_values`). - Returns: - Example:: - >>> from transformers import BertTokenizer, BertLMHeadModel, BertConfig - >>> import torch - >>> tokenizer = BertTokenizer.from_pretrained('bert-base-cased') - >>> config = BertConfig.from_pretrained("bert-base-cased") - >>> model = BertLMHeadModel.from_pretrained('bert-base-cased', config=config) - >>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt") - >>> outputs = model(**inputs) - >>> prediction_logits = outputs.logits - """ - return_dict = return_dict if return_dict is not None else self.config.use_return_dict - if labels is not None: - use_cache = False - - outputs = self.bert( - input_ids, - attention_mask=attention_mask, - position_ids=position_ids, - head_mask=head_mask, - inputs_embeds=inputs_embeds, - encoder_hidden_states=encoder_hidden_states, - encoder_attention_mask=encoder_attention_mask, - past_key_values=past_key_values, - use_cache=use_cache, - output_attentions=output_attentions, - output_hidden_states=output_hidden_states, - return_dict=return_dict, - is_decoder=is_decoder, - mode=mode, - ) - - sequence_output = outputs[0] - prediction_scores = self.cls(sequence_output) - - if return_logits: - return prediction_scores[:, :-1, :].contiguous() - - lm_loss = None - if labels is not None: - # we are doing next-token prediction; shift prediction scores and input ids by one - shifted_prediction_scores = prediction_scores[:, :-1, :].contiguous() - labels = labels[:, 1:].contiguous() - loss_fct = CrossEntropyLoss(reduction=reduction, label_smoothing=0.1) - lm_loss = loss_fct(shifted_prediction_scores.view(-1, self.config.vocab_size), labels.view(-1)) - if reduction=='none': - lm_loss = lm_loss.view(prediction_scores.size(0),-1).sum(1) - - if not return_dict: - output = (prediction_scores,) + outputs[2:] - return ((lm_loss,) + output) if lm_loss is not None else output - - return CausalLMOutputWithCrossAttentions( - loss=lm_loss, - logits=prediction_scores, - past_key_values=outputs.past_key_values, - hidden_states=outputs.hidden_states, - attentions=outputs.attentions, - cross_attentions=outputs.cross_attentions, - ) - - def prepare_inputs_for_generation(self, input_ids, past=None, attention_mask=None, **model_kwargs): - input_shape = input_ids.shape - # if model is used as a decoder in encoder-decoder model, the decoder attention mask is created on the fly - if attention_mask is None: - attention_mask = input_ids.new_ones(input_shape) - - # cut decoder_input_ids if past is used - if past is not None: - input_ids = input_ids[:, -1:] - - return { - "input_ids": input_ids, - "attention_mask": attention_mask, - "past_key_values": past, - "encoder_hidden_states": model_kwargs.get("encoder_hidden_states", None), - "encoder_attention_mask": model_kwargs.get("encoder_attention_mask", None), - "is_decoder": True, - } - - def _reorder_cache(self, past, beam_idx): - reordered_past = () - for layer_past in past: - reordered_past += (tuple(past_state.index_select(0, beam_idx) for past_state in layer_past),) - return reordered_past diff --git a/diffsynth/extensions/ImageQualityMetric/BLIP/vit.py b/diffsynth/extensions/ImageQualityMetric/BLIP/vit.py deleted file mode 100644 index cef7b65..0000000 --- a/diffsynth/extensions/ImageQualityMetric/BLIP/vit.py +++ /dev/null @@ -1,301 +0,0 @@ -''' - * Adapted from BLIP (https://github.com/salesforce/BLIP) - * Based on timm code base - * https://github.com/rwightman/pytorch-image-models/tree/master/timm -''' - -import torch -import torch.nn as nn -import torch.nn.functional as F -from functools import partial - -from timm.models.vision_transformer import _cfg, PatchEmbed -from timm.models.registry import register_model -from timm.models.layers import trunc_normal_, DropPath -from timm.models.helpers import named_apply, adapt_input_conv - -# from fairscale.nn.checkpoint.checkpoint_activations import checkpoint_wrapper - -class Mlp(nn.Module): - """ MLP as used in Vision Transformer, MLP-Mixer and related networks - """ - def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): - super().__init__() - out_features = out_features or in_features - hidden_features = hidden_features or in_features - self.fc1 = nn.Linear(in_features, hidden_features) - self.act = act_layer() - self.fc2 = nn.Linear(hidden_features, out_features) - self.drop = nn.Dropout(drop) - - def forward(self, x): - x = self.fc1(x) - x = self.act(x) - x = self.drop(x) - x = self.fc2(x) - x = self.drop(x) - return x - - -class Attention(nn.Module): - def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0.): - super().__init__() - self.num_heads = num_heads - head_dim = dim // num_heads - # NOTE scale factor was wrong in my original version, can set manually to be compat with prev weights - self.scale = qk_scale or head_dim ** -0.5 - self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) - self.attn_drop = nn.Dropout(attn_drop) - self.proj = nn.Linear(dim, dim) - self.proj_drop = nn.Dropout(proj_drop) - self.attn_gradients = None - self.attention_map = None - - def save_attn_gradients(self, attn_gradients): - self.attn_gradients = attn_gradients - - def get_attn_gradients(self): - return self.attn_gradients - - def save_attention_map(self, attention_map): - self.attention_map = attention_map - - def get_attention_map(self): - return self.attention_map - - def forward(self, x, register_hook=False): - B, N, C = x.shape - qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) - q, k, v = qkv[0], qkv[1], qkv[2] # make torchscript happy (cannot use tensor as tuple) - - attn = (q @ k.transpose(-2, -1)) * self.scale - attn = attn.softmax(dim=-1) - attn = self.attn_drop(attn) - - if register_hook: - self.save_attention_map(attn) - attn.register_hook(self.save_attn_gradients) - - x = (attn @ v).transpose(1, 2).reshape(B, N, C) - x = self.proj(x) - x = self.proj_drop(x) - return x - - -class Block(nn.Module): - - def __init__(self, dim, num_heads, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop=0., attn_drop=0., - drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm, use_grad_checkpointing=False): - super().__init__() - self.norm1 = norm_layer(dim) - self.attn = Attention( - dim, num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale, attn_drop=attn_drop, proj_drop=drop) - # NOTE: drop path for stochastic depth, we shall see if this is better than dropout here - self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() - self.norm2 = norm_layer(dim) - mlp_hidden_dim = int(dim * mlp_ratio) - self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) - - # if use_grad_checkpointing: - # self.attn = checkpoint_wrapper(self.attn) - # self.mlp = checkpoint_wrapper(self.mlp) - - def forward(self, x, register_hook=False): - x = x + self.drop_path(self.attn(self.norm1(x), register_hook=register_hook)) - x = x + self.drop_path(self.mlp(self.norm2(x))) - return x - - -class VisionTransformer(nn.Module): - """ Vision Transformer - A PyTorch impl of : `An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale` - - https://arxiv.org/abs/2010.11929 - """ - def __init__(self, img_size=224, patch_size=16, in_chans=3, num_classes=1000, embed_dim=768, depth=12, - num_heads=12, mlp_ratio=4., qkv_bias=True, qk_scale=None, representation_size=None, - drop_rate=0., attn_drop_rate=0., drop_path_rate=0., norm_layer=None, - use_grad_checkpointing=False, ckpt_layer=0): - """ - Args: - img_size (int, tuple): input image size - patch_size (int, tuple): patch size - in_chans (int): number of input channels - num_classes (int): number of classes for classification head - embed_dim (int): embedding dimension - depth (int): depth of transformer - num_heads (int): number of attention heads - mlp_ratio (int): ratio of mlp hidden dim to embedding dim - qkv_bias (bool): enable bias for qkv if True - qk_scale (float): override default qk scale of head_dim ** -0.5 if set - representation_size (Optional[int]): enable and set representation layer (pre-logits) to this value if set - drop_rate (float): dropout rate - attn_drop_rate (float): attention dropout rate - drop_path_rate (float): stochastic depth rate - norm_layer: (nn.Module): normalization layer - """ - super().__init__() - self.num_features = self.embed_dim = embed_dim # num_features for consistency with other models - norm_layer = norm_layer or partial(nn.LayerNorm, eps=1e-6) - - self.patch_embed = PatchEmbed( - img_size=img_size, patch_size=patch_size, in_chans=in_chans, embed_dim=embed_dim) - - num_patches = self.patch_embed.num_patches - - self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim)) - self.pos_embed = nn.Parameter(torch.zeros(1, num_patches + 1, embed_dim)) - self.pos_drop = nn.Dropout(p=drop_rate) - - dpr = [x.item() for x in torch.linspace(0, drop_path_rate, depth)] # stochastic depth decay rule - self.blocks = nn.ModuleList([ - Block( - dim=embed_dim, num_heads=num_heads, mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, qk_scale=qk_scale, - drop=drop_rate, attn_drop=attn_drop_rate, drop_path=dpr[i], norm_layer=norm_layer, - use_grad_checkpointing=(use_grad_checkpointing and i>=depth-ckpt_layer) - ) - for i in range(depth)]) - self.norm = norm_layer(embed_dim) - - trunc_normal_(self.pos_embed, std=.02) - trunc_normal_(self.cls_token, std=.02) - self.apply(self._init_weights) - - def _init_weights(self, m): - if isinstance(m, nn.Linear): - trunc_normal_(m.weight, std=.02) - if isinstance(m, nn.Linear) and m.bias is not None: - nn.init.constant_(m.bias, 0) - elif isinstance(m, nn.LayerNorm): - nn.init.constant_(m.bias, 0) - nn.init.constant_(m.weight, 1.0) - - @torch.jit.ignore - def no_weight_decay(self): - return {'pos_embed', 'cls_token'} - - def forward(self, x, register_blk=-1): - B = x.shape[0] - x = self.patch_embed(x) - - cls_tokens = self.cls_token.expand(B, -1, -1) # stole cls_tokens impl from Phil Wang, thanks - x = torch.cat((cls_tokens, x), dim=1) - - x = x + self.pos_embed[:,:x.size(1),:] - x = self.pos_drop(x) - - for i,blk in enumerate(self.blocks): - x = blk(x, register_blk==i) - x = self.norm(x) - - return x - - @torch.jit.ignore() - def load_pretrained(self, checkpoint_path, prefix=''): - _load_weights(self, checkpoint_path, prefix) - - -@torch.no_grad() -def _load_weights(model: VisionTransformer, checkpoint_path: str, prefix: str = ''): - """ Load weights from .npz checkpoints for official Google Brain Flax implementation - """ - import numpy as np - - def _n2p(w, t=True): - if w.ndim == 4 and w.shape[0] == w.shape[1] == w.shape[2] == 1: - w = w.flatten() - if t: - if w.ndim == 4: - w = w.transpose([3, 2, 0, 1]) - elif w.ndim == 3: - w = w.transpose([2, 0, 1]) - elif w.ndim == 2: - w = w.transpose([1, 0]) - return torch.from_numpy(w) - - w = np.load(checkpoint_path) - if not prefix and 'opt/target/embedding/kernel' in w: - prefix = 'opt/target/' - - if hasattr(model.patch_embed, 'backbone'): - # hybrid - backbone = model.patch_embed.backbone - stem_only = not hasattr(backbone, 'stem') - stem = backbone if stem_only else backbone.stem - stem.conv.weight.copy_(adapt_input_conv(stem.conv.weight.shape[1], _n2p(w[f'{prefix}conv_root/kernel']))) - stem.norm.weight.copy_(_n2p(w[f'{prefix}gn_root/scale'])) - stem.norm.bias.copy_(_n2p(w[f'{prefix}gn_root/bias'])) - if not stem_only: - for i, stage in enumerate(backbone.stages): - for j, block in enumerate(stage.blocks): - bp = f'{prefix}block{i + 1}/unit{j + 1}/' - for r in range(3): - getattr(block, f'conv{r + 1}').weight.copy_(_n2p(w[f'{bp}conv{r + 1}/kernel'])) - getattr(block, f'norm{r + 1}').weight.copy_(_n2p(w[f'{bp}gn{r + 1}/scale'])) - getattr(block, f'norm{r + 1}').bias.copy_(_n2p(w[f'{bp}gn{r + 1}/bias'])) - if block.downsample is not None: - block.downsample.conv.weight.copy_(_n2p(w[f'{bp}conv_proj/kernel'])) - block.downsample.norm.weight.copy_(_n2p(w[f'{bp}gn_proj/scale'])) - block.downsample.norm.bias.copy_(_n2p(w[f'{bp}gn_proj/bias'])) - embed_conv_w = _n2p(w[f'{prefix}embedding/kernel']) - else: - embed_conv_w = adapt_input_conv( - model.patch_embed.proj.weight.shape[1], _n2p(w[f'{prefix}embedding/kernel'])) - model.patch_embed.proj.weight.copy_(embed_conv_w) - model.patch_embed.proj.bias.copy_(_n2p(w[f'{prefix}embedding/bias'])) - model.cls_token.copy_(_n2p(w[f'{prefix}cls'], t=False)) - pos_embed_w = _n2p(w[f'{prefix}Transformer/posembed_input/pos_embedding'], t=False) - if pos_embed_w.shape != model.pos_embed.shape: - pos_embed_w = resize_pos_embed( # resize pos embedding when different size from pretrained weights - pos_embed_w, model.pos_embed, getattr(model, 'num_tokens', 1), model.patch_embed.grid_size) - model.pos_embed.copy_(pos_embed_w) - model.norm.weight.copy_(_n2p(w[f'{prefix}Transformer/encoder_norm/scale'])) - model.norm.bias.copy_(_n2p(w[f'{prefix}Transformer/encoder_norm/bias'])) -# if isinstance(model.head, nn.Linear) and model.head.bias.shape[0] == w[f'{prefix}head/bias'].shape[-1]: -# model.head.weight.copy_(_n2p(w[f'{prefix}head/kernel'])) -# model.head.bias.copy_(_n2p(w[f'{prefix}head/bias'])) -# if isinstance(getattr(model.pre_logits, 'fc', None), nn.Linear) and f'{prefix}pre_logits/bias' in w: -# model.pre_logits.fc.weight.copy_(_n2p(w[f'{prefix}pre_logits/kernel'])) -# model.pre_logits.fc.bias.copy_(_n2p(w[f'{prefix}pre_logits/bias'])) - for i, block in enumerate(model.blocks.children()): - block_prefix = f'{prefix}Transformer/encoderblock_{i}/' - mha_prefix = block_prefix + 'MultiHeadDotProductAttention_1/' - block.norm1.weight.copy_(_n2p(w[f'{block_prefix}LayerNorm_0/scale'])) - block.norm1.bias.copy_(_n2p(w[f'{block_prefix}LayerNorm_0/bias'])) - block.attn.qkv.weight.copy_(torch.cat([ - _n2p(w[f'{mha_prefix}{n}/kernel'], t=False).flatten(1).T for n in ('query', 'key', 'value')])) - block.attn.qkv.bias.copy_(torch.cat([ - _n2p(w[f'{mha_prefix}{n}/bias'], t=False).reshape(-1) for n in ('query', 'key', 'value')])) - block.attn.proj.weight.copy_(_n2p(w[f'{mha_prefix}out/kernel']).flatten(1)) - block.attn.proj.bias.copy_(_n2p(w[f'{mha_prefix}out/bias'])) - for r in range(2): - getattr(block.mlp, f'fc{r + 1}').weight.copy_(_n2p(w[f'{block_prefix}MlpBlock_3/Dense_{r}/kernel'])) - getattr(block.mlp, f'fc{r + 1}').bias.copy_(_n2p(w[f'{block_prefix}MlpBlock_3/Dense_{r}/bias'])) - block.norm2.weight.copy_(_n2p(w[f'{block_prefix}LayerNorm_2/scale'])) - block.norm2.bias.copy_(_n2p(w[f'{block_prefix}LayerNorm_2/bias'])) - - -def interpolate_pos_embed(pos_embed_checkpoint, visual_encoder): - # interpolate position embedding - embedding_size = pos_embed_checkpoint.shape[-1] - num_patches = visual_encoder.patch_embed.num_patches - num_extra_tokens = visual_encoder.pos_embed.shape[-2] - num_patches - # height (== width) for the checkpoint position embedding - orig_size = int((pos_embed_checkpoint.shape[-2] - num_extra_tokens) ** 0.5) - # height (== width) for the new position embedding - new_size = int(num_patches ** 0.5) - - if orig_size!=new_size: - # class_token and dist_token are kept unchanged - extra_tokens = pos_embed_checkpoint[:, :num_extra_tokens] - # only the position tokens are interpolated - pos_tokens = pos_embed_checkpoint[:, num_extra_tokens:] - pos_tokens = pos_tokens.reshape(-1, orig_size, orig_size, embedding_size).permute(0, 3, 1, 2) - pos_tokens = torch.nn.functional.interpolate( - pos_tokens, size=(new_size, new_size), mode='bicubic', align_corners=False) - pos_tokens = pos_tokens.permute(0, 2, 3, 1).flatten(1, 2) - new_pos_embed = torch.cat((extra_tokens, pos_tokens), dim=1) - print('reshape position embedding from %d to %d'%(orig_size ** 2,new_size ** 2)) - - return new_pos_embed - else: - return pos_embed_checkpoint \ No newline at end of file diff --git a/diffsynth/extensions/ImageQualityMetric/__init__.py b/diffsynth/extensions/ImageQualityMetric/__init__.py deleted file mode 100644 index fcfb7c0..0000000 --- a/diffsynth/extensions/ImageQualityMetric/__init__.py +++ /dev/null @@ -1,148 +0,0 @@ -from modelscope import snapshot_download -from typing_extensions import Literal, TypeAlias -import os -from diffsynth.extensions.ImageQualityMetric.aesthetic import AestheticScore -from diffsynth.extensions.ImageQualityMetric.imagereward import ImageRewardScore -from diffsynth.extensions.ImageQualityMetric.pickscore import PickScore -from diffsynth.extensions.ImageQualityMetric.clip import CLIPScore -from diffsynth.extensions.ImageQualityMetric.hps import HPScore_v2 -from diffsynth.extensions.ImageQualityMetric.mps import MPScore - - -preference_model_id: TypeAlias = Literal[ - "ImageReward", - "Aesthetic", - "PickScore", - "CLIP", - "HPSv2", - "HPSv2.1", - "MPS", -] -model_dict = { - "ImageReward": { - "model_id": "DiffSynth-Studio/QualityMetric_reward_pretrained", - "allow_file_pattern": [ - "ImageReward/ImageReward.safetensors", - "ImageReward/med_config.json", - "bert-base-uncased/config.json", - "bert-base-uncased/model.safetensors", - "bert-base-uncased/tokenizer.json", - "bert-base-uncased/tokenizer_config.json", - "bert-base-uncased/vocab.txt", - ], - "load_path": { - "imagereward": "ImageReward/ImageReward.safetensors", - "med_config": "ImageReward/med_config.json", - "bert_model_path": "bert-base-uncased", - }, - "model_class": ImageRewardScore - }, - "Aesthetic": { - "model_id": "DiffSynth-Studio/QualityMetric_reward_pretrained", - "allow_file_pattern": [ - "aesthetic-predictor/sac+logos+ava1-l14-linearMSE.safetensors", - "clip-vit-large-patch14/config.json", - "clip-vit-large-patch14/merges.txt", - "clip-vit-large-patch14/model.safetensors", - "clip-vit-large-patch14/preprocessor_config.json", - "clip-vit-large-patch14/special_tokens_map.json", - "clip-vit-large-patch14/tokenizer.json", - "clip-vit-large-patch14/tokenizer_config.json", - "clip-vit-large-patch14/vocab.json", - ], - "load_path": { - "aesthetic_predictor": "aesthetic-predictor/sac+logos+ava1-l14-linearMSE.safetensors", - "clip-large": "clip-vit-large-patch14", - }, - "model_class": AestheticScore - }, - "PickScore": { - "model_id": "DiffSynth-Studio/QualityMetric_reward_pretrained", - "allow_file_pattern": [ - "PickScore_v1/*", - "CLIP-ViT-H-14-laion2B-s32B-b79K/config.json", - "CLIP-ViT-H-14-laion2B-s32B-b79K/merges.txt", - "CLIP-ViT-H-14-laion2B-s32B-b79K/preprocessor_config.json", - "CLIP-ViT-H-14-laion2B-s32B-b79K/special_tokens_map.json", - "CLIP-ViT-H-14-laion2B-s32B-b79K/tokenizer.json", - "CLIP-ViT-H-14-laion2B-s32B-b79K/tokenizer_config.json", - "CLIP-ViT-H-14-laion2B-s32B-b79K/vocab.json", - ], - "load_path": { - "pickscore": "PickScore_v1", - "clip": "CLIP-ViT-H-14-laion2B-s32B-b79K", - }, - "model_class": PickScore - }, - "CLIP": { - "model_id": "DiffSynth-Studio/QualityMetric_reward_pretrained", - "allow_file_pattern": [ - "CLIP-ViT-H-14-laion2B-s32B-b79K/open_clip_pytorch_model.bin", - "bpe_simple_vocab_16e6.txt.gz", - ], - "load_path": { - "open_clip": "CLIP-ViT-H-14-laion2B-s32B-b79K/open_clip_pytorch_model.bin", - "open_clip_bpe": "bpe_simple_vocab_16e6.txt.gz", - }, - "model_class": CLIPScore - }, - "HPSv2": { - "model_id": "DiffSynth-Studio/QualityMetric_reward_pretrained", - "allow_file_pattern": [ - "HPS_v2/HPS_v2_compressed.safetensors", - "bpe_simple_vocab_16e6.txt.gz", - ], - "load_path": { - "hpsv2": "HPS_v2/HPS_v2_compressed.safetensors", - "open_clip_bpe": "bpe_simple_vocab_16e6.txt.gz", - }, - "model_class": HPScore_v2, - "extra_kwargs": {"model_version": "v2"} - }, - "HPSv2.1": { - "model_id": "DiffSynth-Studio/QualityMetric_reward_pretrained", - "allow_file_pattern": [ - "HPS_v2/HPS_v2.1_compressed.safetensors", - "bpe_simple_vocab_16e6.txt.gz", - ], - "load_path": { - "hpsv2.1": "HPS_v2/HPS_v2.1_compressed.safetensors", - "open_clip_bpe": "bpe_simple_vocab_16e6.txt.gz", - }, - "model_class": HPScore_v2, - "extra_kwargs": {"model_version": "v21"} - }, - "MPS": { - "model_id": "DiffSynth-Studio/QualityMetric_reward_pretrained", - "allow_file_pattern": [ - "MPS_overall_checkpoint/MPS_overall_checkpoint_diffsynth.safetensors", - "CLIP-ViT-H-14-laion2B-s32B-b79K/config.json", - "CLIP-ViT-H-14-laion2B-s32B-b79K/merges.txt", - "CLIP-ViT-H-14-laion2B-s32B-b79K/preprocessor_config.json", - "CLIP-ViT-H-14-laion2B-s32B-b79K/special_tokens_map.json", - "CLIP-ViT-H-14-laion2B-s32B-b79K/tokenizer.json", - "CLIP-ViT-H-14-laion2B-s32B-b79K/tokenizer_config.json", - "CLIP-ViT-H-14-laion2B-s32B-b79K/vocab.json", - ], - "load_path": { - "mps": "MPS_overall_checkpoint/MPS_overall_checkpoint_diffsynth.safetensors", - "clip": "CLIP-ViT-H-14-laion2B-s32B-b79K", - }, - "model_class": MPScore - }, -} - - -def download_preference_model(model_name: preference_model_id, cache_dir="models"): - metadata = model_dict[model_name] - snapshot_download(model_id=metadata["model_id"], allow_file_pattern=metadata["allow_file_pattern"], cache_dir=cache_dir) - load_path = metadata["load_path"] - load_path = {key: os.path.join(cache_dir, metadata["model_id"], path) for key, path in load_path.items()} - return load_path - - -def load_preference_model(model_name: preference_model_id, device = "cuda", path = None): - model_class = model_dict[model_name]["model_class"] - extra_kwargs = model_dict[model_name].get("extra_kwargs", {}) - preference_model = model_class(device=device, path=path, **extra_kwargs) - return preference_model diff --git a/diffsynth/extensions/ImageQualityMetric/aesthetic.py b/diffsynth/extensions/ImageQualityMetric/aesthetic.py deleted file mode 100644 index 13da98a..0000000 --- a/diffsynth/extensions/ImageQualityMetric/aesthetic.py +++ /dev/null @@ -1,148 +0,0 @@ -from typing import List, Optional -from PIL import Image -import torch -from transformers import AutoProcessor, AutoModel -from safetensors.torch import load_file -import os -from typing import Union, List -from .config import MODEL_PATHS - -class MLP(torch.nn.Module): - def __init__(self, input_size: int, xcol: str = "emb", ycol: str = "avg_rating"): - super().__init__() - self.input_size = input_size - self.xcol = xcol - self.ycol = ycol - self.layers = torch.nn.Sequential( - torch.nn.Linear(self.input_size, 1024), - #torch.nn.ReLU(), - torch.nn.Dropout(0.2), - torch.nn.Linear(1024, 128), - #torch.nn.ReLU(), - torch.nn.Dropout(0.2), - torch.nn.Linear(128, 64), - #torch.nn.ReLU(), - torch.nn.Dropout(0.1), - torch.nn.Linear(64, 16), - #torch.nn.ReLU(), - torch.nn.Linear(16, 1), - ) - - def forward(self, x: torch.Tensor) -> torch.Tensor: - return self.layers(x) - - def training_step(self, batch: dict, batch_idx: int) -> torch.Tensor: - x = batch[self.xcol] - y = batch[self.ycol].reshape(-1, 1) - x_hat = self.layers(x) - loss = torch.nn.functional.mse_loss(x_hat, y) - return loss - - def validation_step(self, batch: dict, batch_idx: int) -> torch.Tensor: - x = batch[self.xcol] - y = batch[self.ycol].reshape(-1, 1) - x_hat = self.layers(x) - loss = torch.nn.functional.mse_loss(x_hat, y) - return loss - - def configure_optimizers(self) -> torch.optim.Optimizer: - return torch.optim.Adam(self.parameters(), lr=1e-3) - - -class AestheticScore(torch.nn.Module): - def __init__(self, device: torch.device, path: str = MODEL_PATHS): - super().__init__() - self.device = device - self.aes_model_path = path.get("aesthetic_predictor") - # Load the MLP model - self.model = MLP(768) - try: - if self.aes_model_path.endswith(".safetensors"): - state_dict = load_file(self.aes_model_path) - else: - state_dict = torch.load(self.aes_model_path) - self.model.load_state_dict(state_dict) - except Exception as e: - raise ValueError(f"Error loading model weights from {self.aes_model_path}: {e}") - - self.model.to(device) - self.model.eval() - - # Load the CLIP model and processor - clip_model_name = path.get('clip-large') - self.model2 = AutoModel.from_pretrained(clip_model_name).eval().to(device) - self.processor = AutoProcessor.from_pretrained(clip_model_name) - - def _calculate_score(self, image: torch.Tensor) -> float: - """Calculate the aesthetic score for a single image. - - Args: - image (torch.Tensor): The processed image tensor. - - Returns: - float: The aesthetic score. - """ - with torch.no_grad(): - # Get image embeddings - image_embs = self.model2.get_image_features(image) - image_embs = image_embs / torch.norm(image_embs, dim=-1, keepdim=True) - - # Compute score - score = self.model(image_embs).cpu().flatten().item() - - return score - - @torch.no_grad() - def score(self, images: Union[str, List[str], Image.Image, List[Image.Image]], prompt: str = "") -> List[float]: - """Score the images based on their aesthetic quality. - - Args: - images (Union[str, List[str], Image.Image, List[Image.Image]]): Path(s) to the image(s) or PIL image(s). - - Returns: - List[float]: List of scores for the images. - """ - try: - if isinstance(images, (str, Image.Image)): - # Single image - if isinstance(images, str): - pil_image = Image.open(images) - else: - pil_image = images - - # Prepare image inputs - image_inputs = self.processor( - images=pil_image, - padding=True, - truncation=True, - max_length=77, - return_tensors="pt", - ).to(self.device) - - return [self._calculate_score(image_inputs["pixel_values"])] - elif isinstance(images, list): - # Multiple images - scores = [] - for one_image in images: - if isinstance(one_image, str): - pil_image = Image.open(one_image) - elif isinstance(one_image, Image.Image): - pil_image = one_image - else: - raise TypeError("The type of parameter images is illegal.") - - # Prepare image inputs - image_inputs = self.processor( - images=pil_image, - padding=True, - truncation=True, - max_length=77, - return_tensors="pt", - ).to(self.device) - - scores.append(self._calculate_score(image_inputs["pixel_values"])) - return scores - else: - raise TypeError("The type of parameter images is illegal.") - except Exception as e: - raise RuntimeError(f"Error in scoring images: {e}") diff --git a/diffsynth/extensions/ImageQualityMetric/clip.py b/diffsynth/extensions/ImageQualityMetric/clip.py deleted file mode 100644 index f70941e..0000000 --- a/diffsynth/extensions/ImageQualityMetric/clip.py +++ /dev/null @@ -1,97 +0,0 @@ -from typing import List, Union -from PIL import Image -import torch -from .open_clip import create_model_and_transforms, get_tokenizer -from .config import MODEL_PATHS - -class CLIPScore(torch.nn.Module): - def __init__(self, device: torch.device, path: str = MODEL_PATHS): - super().__init__() - """Initialize the CLIPScore with a model and tokenizer. - - Args: - device (torch.device): The device to load the model on. - """ - self.device = device - - # Create model and transforms - self.model, _, self.preprocess_val = create_model_and_transforms( - "ViT-H-14", - # "laion2B-s32B-b79K", - pretrained=path.get("open_clip"), - precision="amp", - device=device, - jit=False, - force_quick_gelu=False, - force_custom_text=False, - force_patch_dropout=False, - force_image_size=None, - pretrained_image=False, - image_mean=None, - image_std=None, - light_augmentation=True, - aug_cfg={}, - output_dict=True, - with_score_predictor=False, - with_region_predictor=False, - ) - - # Initialize tokenizer - self.tokenizer = get_tokenizer("ViT-H-14", path["open_clip_bpe"]) - self.model = self.model.to(device) - self.model.eval() - - def _calculate_score(self, image: torch.Tensor, prompt: str) -> float: - """Calculate the CLIP score for a single image and prompt. - - Args: - image (torch.Tensor): The processed image tensor. - prompt (str): The prompt text. - - Returns: - float: The CLIP score. - """ - with torch.no_grad(): - # Process the prompt - text = self.tokenizer([prompt]).to(device=self.device, non_blocking=True) - - # Calculate the CLIP score - outputs = self.model(image, text) - image_features, text_features = outputs["image_features"], outputs["text_features"] - logits_per_image = image_features @ text_features.T - clip_score = torch.diagonal(logits_per_image).cpu().numpy() - - return clip_score[0].item() - - @torch.no_grad() - def score(self, images: Union[str, List[str], Image.Image, List[Image.Image]], prompt: str) -> List[float]: - """Score the images based on the prompt. - - Args: - images (Union[str, List[str], Image.Image, List[Image.Image]]): Path(s) to the image(s) or PIL image(s). - prompt (str): The prompt text. - - Returns: - List[float]: List of CLIP scores for the images. - """ - if isinstance(images, (str, Image.Image)): - # Single image - if isinstance(images, str): - image = self.preprocess_val(Image.open(images)).unsqueeze(0).to(device=self.device, non_blocking=True) - else: - image = self.preprocess_val(images).unsqueeze(0).to(device=self.device, non_blocking=True) - return [self._calculate_score(image, prompt)] - elif isinstance(images, list): - # Multiple images - scores = [] - for one_images in images: - if isinstance(one_images, str): - image = self.preprocess_val(Image.open(one_images)).unsqueeze(0).to(device=self.device, non_blocking=True) - elif isinstance(one_images, Image.Image): - image = self.preprocess_val(one_images).unsqueeze(0).to(device=self.device, non_blocking=True) - else: - raise TypeError("The type of parameter images is illegal.") - scores.append(self._calculate_score(image, prompt)) - return scores - else: - raise TypeError("The type of parameter images is illegal.") diff --git a/diffsynth/extensions/ImageQualityMetric/config.py b/diffsynth/extensions/ImageQualityMetric/config.py deleted file mode 100644 index 60faadc..0000000 --- a/diffsynth/extensions/ImageQualityMetric/config.py +++ /dev/null @@ -1,23 +0,0 @@ -import os - -current_dir = os.path.dirname(os.path.abspath(__file__)) -project_root = os.path.abspath(os.path.join(current_dir, '../../../')) -model_path = os.path.join(project_root, 'models', 'QualityMetric') - - -def get_model_path(model_name): - return os.path.join(model_path, model_name) - - -MODEL_PATHS = { - "aesthetic_predictor": get_model_path("aesthetic-predictor/sac+logos+ava1-l14-linearMSE.safetensors"), - "open_clip": get_model_path("CLIP-ViT-H-14-laion2B-s32B-b79K/open_clip_pytorch_model.bin"), - "hpsv2": get_model_path("HPS_v2/HPS_v2_compressed.safetensors"), - "hpsv2.1": get_model_path("HPS_v2/HPS_v2.1_compressed.safetensors"), - "imagereward": get_model_path("ImageReward/ImageReward.safetensors"), - "med_config": get_model_path("ImageReward/med_config.json"), - "clip": get_model_path("CLIP-ViT-H-14-laion2B-s32B-b79K"), - "clip-large": get_model_path("clip-vit-large-patch14"), - "mps": get_model_path("MPS_overall_checkpoint/MPS_overall_checkpoint_diffsynth.safetensors"), - "pickscore": get_model_path("PickScore_v1") -} \ No newline at end of file diff --git a/diffsynth/extensions/ImageQualityMetric/hps.py b/diffsynth/extensions/ImageQualityMetric/hps.py deleted file mode 100644 index a4b266b..0000000 --- a/diffsynth/extensions/ImageQualityMetric/hps.py +++ /dev/null @@ -1,118 +0,0 @@ -from typing import List, Union -from PIL import Image -import torch -from .open_clip import create_model_and_transforms, get_tokenizer -from safetensors.torch import load_file -import os -from .config import MODEL_PATHS - -class HPScore_v2(torch.nn.Module): - def __init__(self, device: torch.device, path: str = MODEL_PATHS, model_version: str = "v2"): - super().__init__() - """Initialize the Selector with a model and tokenizer. - - Args: - device (torch.device): The device to load the model on. - model_version (str): The version of the model to load. Supports "v2" or "v21". Default is "v2". - """ - self.device = device - - if model_version == "v2": - safetensors_path = path.get("hpsv2") - elif model_version == "v21": - safetensors_path = path.get("hpsv2.1") - else: - raise ValueError(f"Unsupported model version: {model_version}. Choose 'v2' or 'v21'.") - - # Create model and transforms - model, _, self.preprocess_val = create_model_and_transforms( - "ViT-H-14", - # "laion2B-s32B-b79K", - pretrained=path.get("open_clip"), - precision="amp", - device=device, - jit=False, - force_quick_gelu=False, - force_custom_text=False, - force_patch_dropout=False, - force_image_size=None, - pretrained_image=False, - image_mean=None, - image_std=None, - light_augmentation=True, - aug_cfg={}, - output_dict=True, - with_score_predictor=False, - with_region_predictor=False, - ) - - # Load model weights - try: - state_dict = load_file(safetensors_path) - model.load_state_dict(state_dict) - except Exception as e: - raise ValueError(f"Error loading model weights from {safetensors_path}: {e}") - - # Initialize tokenizer and model - self.tokenizer = get_tokenizer("ViT-H-14", path["open_clip_bpe"]) - model = model.to(device) - model.eval() - self.model = model - - def _calculate_score(self, image: torch.Tensor, prompt: str) -> float: - """Calculate the HPS score for a single image and prompt. - - Args: - image (torch.Tensor): The processed image tensor. - prompt (str): The prompt text. - - Returns: - float: The HPS score. - """ - with torch.no_grad(): - # Process the prompt - text = self.tokenizer([prompt]).to(device=self.device, non_blocking=True) - - # Calculate the HPS score - outputs = self.model(image, text) - image_features, text_features = outputs["image_features"], outputs["text_features"] - logits_per_image = image_features @ text_features.T - hps_score = torch.diagonal(logits_per_image).cpu().numpy() - - return hps_score[0].item() - - @torch.no_grad() - def score(self, images: Union[str, List[str], Image.Image, List[Image.Image]], prompt: str) -> List[float]: - """Score the images based on the prompt. - - Args: - images (Union[str, List[str], Image.Image, List[Image.Image]]): Path(s) to the image(s) or PIL image(s). - prompt (str): The prompt text. - - Returns: - List[float]: List of HPS scores for the images. - """ - try: - if isinstance(images, (str, Image.Image)): - # Single image - if isinstance(images, str): - image = self.preprocess_val(Image.open(images)).unsqueeze(0).to(device=self.device, non_blocking=True) - else: - image = self.preprocess_val(images).unsqueeze(0).to(device=self.device, non_blocking=True) - return [self._calculate_score(image, prompt)] - elif isinstance(images, list): - # Multiple images - scores = [] - for one_images in images: - if isinstance(one_images, str): - image = self.preprocess_val(Image.open(one_images)).unsqueeze(0).to(device=self.device, non_blocking=True) - elif isinstance(one_images, Image.Image): - image = self.preprocess_val(one_images).unsqueeze(0).to(device=self.device, non_blocking=True) - else: - raise TypeError("The type of parameter images is illegal.") - scores.append(self._calculate_score(image, prompt)) - return scores - else: - raise TypeError("The type of parameter images is illegal.") - except Exception as e: - raise RuntimeError(f"Error in scoring images: {e}") diff --git a/diffsynth/extensions/ImageQualityMetric/imagereward.py b/diffsynth/extensions/ImageQualityMetric/imagereward.py deleted file mode 100644 index 2760790..0000000 --- a/diffsynth/extensions/ImageQualityMetric/imagereward.py +++ /dev/null @@ -1,212 +0,0 @@ -import os -import torch -from PIL import Image -from typing import List, Union -from torchvision.transforms import Compose, Resize, CenterCrop, ToTensor, Normalize -from .BLIP.blip_pretrain import BLIP_Pretrain -from torchvision.transforms import InterpolationMode -from safetensors.torch import load_file -from .config import MODEL_PATHS -BICUBIC = InterpolationMode.BICUBIC - -def _convert_image_to_rgb(image): - return image.convert("RGB") - -def _transform(n_px): - return Compose([ - Resize(n_px, interpolation=BICUBIC), - CenterCrop(n_px), - _convert_image_to_rgb, - ToTensor(), - Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711)), - ]) - -class MLP(torch.nn.Module): - def __init__(self, input_size): - super().__init__() - self.input_size = input_size - - self.layers = torch.nn.Sequential( - torch.nn.Linear(self.input_size, 1024), - #nn.ReLU(), - torch.nn.Dropout(0.2), - torch.nn.Linear(1024, 128), - #nn.ReLU(), - torch.nn.Dropout(0.2), - torch.nn.Linear(128, 64), - #nn.ReLU(), - torch.nn.Dropout(0.1), - torch.nn.Linear(64, 16), - #nn.ReLU(), - torch.nn.Linear(16, 1) - ) - - # initial MLP param - for name, param in self.layers.named_parameters(): - if 'weight' in name: - torch.nn.init.normal_(param, mean=0.0, std=1.0/(self.input_size+1)) - if 'bias' in name: - torch.nn.init.constant_(param, val=0) - - def forward(self, input): - return self.layers(input) - -class ImageReward(torch.nn.Module): - def __init__(self, med_config, device='cpu', bert_model_path=""): - super().__init__() - self.device = device - - self.blip = BLIP_Pretrain(image_size=224, vit='large', med_config=med_config, bert_model_path=bert_model_path) - self.preprocess = _transform(224) - self.mlp = MLP(768) - - self.mean = 0.16717362830052426 - self.std = 1.0333394966054072 - - def score_grad(self, prompt_ids, prompt_attention_mask, image): - """Calculate the score with gradient for a single image and prompt. - - Args: - prompt_ids (torch.Tensor): Tokenized prompt IDs. - prompt_attention_mask (torch.Tensor): Attention mask for the prompt. - image (torch.Tensor): The processed image tensor. - - Returns: - torch.Tensor: The reward score. - """ - image_embeds = self.blip.visual_encoder(image) - image_atts = torch.ones(image_embeds.size()[:-1], dtype=torch.long).to(self.device) - text_output = self.blip.text_encoder( - prompt_ids, - attention_mask=prompt_attention_mask, - encoder_hidden_states=image_embeds, - encoder_attention_mask=image_atts, - return_dict=True, - ) - txt_features = text_output.last_hidden_state[:, 0, :] - rewards = self.mlp(txt_features) - rewards = (rewards - self.mean) / self.std - return rewards - - def score(self, images: Union[str, List[str], Image.Image, List[Image.Image]], prompt: str = "") -> List[float]: - """Score the images based on the prompt. - - Args: - prompt (str): The prompt text. - images (Union[str, List[str], Image.Image, List[Image.Image]]): Path(s) to the image(s) or PIL image(s). - - Returns: - List[float]: List of scores for the images. - """ - if isinstance(images, (str, Image.Image)): - # Single image - if isinstance(images, str): - pil_image = Image.open(images) - else: - pil_image = images - image = self.preprocess(pil_image).unsqueeze(0).to(self.device) - return [self._calculate_score(prompt, image).item()] - elif isinstance(images, list): - # Multiple images - scores = [] - for one_image in images: - if isinstance(one_image, str): - pil_image = Image.open(one_image) - elif isinstance(one_image, Image.Image): - pil_image = one_image - else: - raise TypeError("The type of parameter images is illegal.") - image = self.preprocess(pil_image).unsqueeze(0).to(self.device) - scores.append(self._calculate_score(prompt, image).item()) - return scores - else: - raise TypeError("The type of parameter images is illegal.") - - def _calculate_score(self, prompt: str, image: torch.Tensor) -> torch.Tensor: - """Calculate the score for a single image and prompt. - - Args: - prompt (str): The prompt text. - image (torch.Tensor): The processed image tensor. - - Returns: - torch.Tensor: The reward score. - """ - text_input = self.blip.tokenizer(prompt, padding='max_length', truncation=True, max_length=35, return_tensors="pt").to(self.device) - image_embeds = self.blip.visual_encoder(image) - image_atts = torch.ones(image_embeds.size()[:-1], dtype=torch.long).to(self.device) - text_output = self.blip.text_encoder( - text_input.input_ids, - attention_mask=text_input.attention_mask, - encoder_hidden_states=image_embeds, - encoder_attention_mask=image_atts, - return_dict=True, - ) - txt_features = text_output.last_hidden_state[:, 0, :].float() - rewards = self.mlp(txt_features) - rewards = (rewards - self.mean) / self.std - return rewards - - def inference_rank(self, prompt: str, generations_list: List[Union[str, Image.Image]]) -> tuple: - """Rank the images based on the prompt. - - Args: - prompt (str): The prompt text. - generations_list (List[Union[str, Image.Image]]): List of image paths or PIL images. - - Returns: - tuple: (indices, rewards) where indices are the ranks and rewards are the scores. - """ - text_input = self.blip.tokenizer(prompt, padding='max_length', truncation=True, max_length=35, return_tensors="pt").to(self.device) - txt_set = [] - for generation in generations_list: - if isinstance(generation, str): - pil_image = Image.open(generation) - elif isinstance(generation, Image.Image): - pil_image = generation - else: - raise TypeError("The type of parameter generations_list is illegal.") - image = self.preprocess(pil_image).unsqueeze(0).to(self.device) - image_embeds = self.blip.visual_encoder(image) - image_atts = torch.ones(image_embeds.size()[:-1], dtype=torch.long).to(self.device) - text_output = self.blip.text_encoder( - text_input.input_ids, - attention_mask=text_input.attention_mask, - encoder_hidden_states=image_embeds, - encoder_attention_mask=image_atts, - return_dict=True, - ) - txt_set.append(text_output.last_hidden_state[:, 0, :]) - txt_features = torch.cat(txt_set, 0).float() - rewards = self.mlp(txt_features) - rewards = (rewards - self.mean) / self.std - rewards = torch.squeeze(rewards) - _, rank = torch.sort(rewards, dim=0, descending=True) - _, indices = torch.sort(rank, dim=0) - indices = indices + 1 - return indices.detach().cpu().numpy().tolist(), rewards.detach().cpu().numpy().tolist() - - -class ImageRewardScore(torch.nn.Module): - def __init__(self, device: Union[str, torch.device], path: str = MODEL_PATHS): - super().__init__() - self.device = device if isinstance(device, torch.device) else torch.device(device) - model_path = path.get("imagereward") - med_config = path.get("med_config") - state_dict = load_file(model_path) - self.model = ImageReward(device=self.device, med_config=med_config, bert_model_path=path.get("bert_model_path")).to(self.device) - self.model.load_state_dict(state_dict, strict=False) - self.model.eval() - - @torch.no_grad() - def score(self, images: Union[str, List[str], Image.Image, List[Image.Image]], prompt: str) -> List[float]: - """Score the images based on the prompt. - - Args: - images (Union[str, List[str], Image.Image, List[Image.Image]]): Path(s) to the image(s) or PIL image(s). - prompt (str): The prompt text. - - Returns: - List[float]: List of scores for the images. - """ - return self.model.score(images, prompt) diff --git a/diffsynth/extensions/ImageQualityMetric/mps.py b/diffsynth/extensions/ImageQualityMetric/mps.py deleted file mode 100644 index d15aad4..0000000 --- a/diffsynth/extensions/ImageQualityMetric/mps.py +++ /dev/null @@ -1,129 +0,0 @@ -import numpy as np -import torch -from PIL import Image -from io import BytesIO -from tqdm.auto import tqdm -from transformers import CLIPFeatureExtractor, CLIPImageProcessor -from transformers import CLIPConfig -from dataclasses import dataclass -from transformers import CLIPModel as HFCLIPModel -from safetensors.torch import load_file -from torch import nn, einsum - -from .trainer.models.base_model import BaseModelConfig - -from transformers import CLIPConfig -from transformers import AutoProcessor, AutoModel, AutoTokenizer -from typing import Any, Optional, Tuple, Union, List -import torch - -from .trainer.models.cross_modeling import Cross_model -from .trainer.models import clip_model -import torch.nn.functional as F -import gc -import json -from .config import MODEL_PATHS - -class MPScore(torch.nn.Module): - def __init__(self, device: Union[str, torch.device], path: str = MODEL_PATHS, condition: str = 'overall'): - super().__init__() - """Initialize the MPSModel with a processor, tokenizer, and model. - - Args: - device (Union[str, torch.device]): The device to load the model on. - """ - self.device = device - processor_name_or_path = path.get("clip") - self.image_processor = CLIPImageProcessor.from_pretrained(processor_name_or_path) - self.tokenizer = AutoTokenizer.from_pretrained(processor_name_or_path, trust_remote_code=True) - self.model = clip_model.CLIPModel(processor_name_or_path, config_file=True) - state_dict = load_file(path.get("mps")) - self.model.load_state_dict(state_dict, strict=False) - self.model.to(device) - self.condition = condition - - def _calculate_score(self, image: torch.Tensor, prompt: str) -> float: - """Calculate the reward score for a single image and prompt. - - Args: - image (torch.Tensor): The processed image tensor. - prompt (str): The prompt text. - - Returns: - float: The reward score. - """ - def _tokenize(caption): - input_ids = self.tokenizer( - caption, - max_length=self.tokenizer.model_max_length, - padding="max_length", - truncation=True, - return_tensors="pt" - ).input_ids - return input_ids - - text_input = _tokenize(prompt).to(self.device) - if self.condition == 'overall': - condition_prompt = 'light, color, clarity, tone, style, ambiance, artistry, shape, face, hair, hands, limbs, structure, instance, texture, quantity, attributes, position, number, location, word, things' - elif self.condition == 'aesthetics': - condition_prompt = 'light, color, clarity, tone, style, ambiance, artistry' - elif self.condition == 'quality': - condition_prompt = 'shape, face, hair, hands, limbs, structure, instance, texture' - elif self.condition == 'semantic': - condition_prompt = 'quantity, attributes, position, number, location' - else: - raise ValueError( - f"Unsupported condition: {self.condition}. Choose 'overall', 'aesthetics', 'quality', or 'semantic'.") - condition_batch = _tokenize(condition_prompt).repeat(text_input.shape[0], 1).to(self.device) - - with torch.no_grad(): - text_f, text_features = self.model.model.get_text_features(text_input) - - image_f = self.model.model.get_image_features(image.half()) - condition_f, _ = self.model.model.get_text_features(condition_batch) - - sim_text_condition = einsum('b i d, b j d -> b j i', text_f, condition_f) - sim_text_condition = torch.max(sim_text_condition, dim=1, keepdim=True)[0] - sim_text_condition = sim_text_condition / sim_text_condition.max() - mask = torch.where(sim_text_condition > 0.3, 0, float('-inf')) - mask = mask.repeat(1, image_f.shape[1], 1) - image_features = self.model.cross_model(image_f, text_f, mask.half())[:, 0, :] - - image_features = image_features / image_features.norm(dim=-1, keepdim=True) - text_features = text_features / text_features.norm(dim=-1, keepdim=True) - image_score = self.model.logit_scale.exp() * text_features @ image_features.T - - return image_score[0].cpu().numpy().item() - - @torch.no_grad() - def score(self, images: Union[str, List[str], Image.Image, List[Image.Image]], prompt: str) -> List[float]: - """Score the images based on the prompt. - - Args: - images (Union[str, List[str], Image.Image, List[Image.Image]]): Path(s) to the image(s) or PIL image(s). - prompt (str): The prompt text. - - Returns: - List[float]: List of reward scores for the images. - """ - if isinstance(images, (str, Image.Image)): - # Single image - if isinstance(images, str): - image = self.image_processor(Image.open(images), return_tensors="pt")["pixel_values"].to(self.device) - else: - image = self.image_processor(images, return_tensors="pt")["pixel_values"].to(self.device) - return [self._calculate_score(image, prompt)] - elif isinstance(images, list): - # Multiple images - scores = [] - for one_images in images: - if isinstance(one_images, str): - image = self.image_processor(Image.open(one_images), return_tensors="pt")["pixel_values"].to(self.device) - elif isinstance(one_images, Image.Image): - image = self.image_processor(one_images, return_tensors="pt")["pixel_values"].to(self.device) - else: - raise TypeError("The type of parameter images is illegal.") - scores.append(self._calculate_score(image, prompt)) - return scores - else: - raise TypeError("The type of parameter images is illegal.") diff --git a/diffsynth/extensions/ImageQualityMetric/open_clip/__init__.py b/diffsynth/extensions/ImageQualityMetric/open_clip/__init__.py deleted file mode 100644 index 1560db0..0000000 --- a/diffsynth/extensions/ImageQualityMetric/open_clip/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -from .coca_model import CoCa -from .constants import OPENAI_DATASET_MEAN, OPENAI_DATASET_STD -from .factory import create_model, create_model_and_transforms, create_model_from_pretrained, get_tokenizer, create_loss -from .factory import list_models, add_model_config, get_model_config, load_checkpoint -from .loss import ClipLoss, DistillClipLoss, CoCaLoss -from .model import CLIP, CustomTextCLIP, CLIPTextCfg, CLIPVisionCfg, \ - convert_weights_to_lp, convert_weights_to_fp16, trace_model, get_cast_dtype -from .openai import load_openai_model, list_openai_models -from .pretrained import list_pretrained, list_pretrained_models_by_tag, list_pretrained_tags_by_model, \ - get_pretrained_url, download_pretrained_from_url, is_pretrained_cfg, get_pretrained_cfg, download_pretrained -from .push_to_hf_hub import push_pretrained_to_hf_hub, push_to_hf_hub -from .tokenizer import SimpleTokenizer -from .transform import image_transform, AugmentationCfg -from .utils import freeze_batch_norm_2d diff --git a/diffsynth/extensions/ImageQualityMetric/open_clip/coca_model.py b/diffsynth/extensions/ImageQualityMetric/open_clip/coca_model.py deleted file mode 100644 index 039453a..0000000 --- a/diffsynth/extensions/ImageQualityMetric/open_clip/coca_model.py +++ /dev/null @@ -1,458 +0,0 @@ -from typing import Optional - -import torch -from torch import nn -from torch.nn import functional as F -import numpy as np -from dataclasses import dataclass - -from .transformer import ( - LayerNormFp32, - LayerNorm, - QuickGELU, - MultimodalTransformer, -) -from .model import CLIPTextCfg, CLIPVisionCfg, _build_vision_tower, _build_text_tower - -try: - from transformers import ( - BeamSearchScorer, - LogitsProcessorList, - TopPLogitsWarper, - TopKLogitsWarper, - RepetitionPenaltyLogitsProcessor, - MinLengthLogitsProcessor, - MaxLengthCriteria, - StoppingCriteriaList - ) - - GENERATION_TYPES = { - "top_k": TopKLogitsWarper, - "top_p": TopPLogitsWarper, - "beam_search": "beam_search" - } - _has_transformers = True -except ImportError as e: - GENERATION_TYPES = { - "top_k": None, - "top_p": None, - "beam_search": "beam_search" - } - _has_transformers = False - - -@dataclass -class MultimodalCfg(CLIPTextCfg): - mlp_ratio: int = 4 - dim_head: int = 64 - heads: int = 8 - n_queries: int = 256 - attn_pooler_heads: int = 8 - - -def _build_text_decoder_tower( - embed_dim, - multimodal_cfg, - quick_gelu: bool = False, - cast_dtype: Optional[torch.dtype] = None, -): - multimodal_cfg = MultimodalCfg(**multimodal_cfg) if isinstance(multimodal_cfg, dict) else multimodal_cfg - act_layer = QuickGELU if quick_gelu else nn.GELU - norm_layer = ( - LayerNormFp32 if cast_dtype in (torch.float16, torch.bfloat16) else LayerNorm - ) - - decoder = MultimodalTransformer( - context_length=multimodal_cfg.context_length, - width=multimodal_cfg.width, - heads=multimodal_cfg.heads, - layers=multimodal_cfg.layers, - ls_init_value=multimodal_cfg.ls_init_value, - output_dim=embed_dim, - act_layer=act_layer, - norm_layer=norm_layer, - ) - - return decoder - - -class CoCa(nn.Module): - def __init__( - self, - embed_dim, - multimodal_cfg: MultimodalCfg, - text_cfg: CLIPTextCfg, - vision_cfg: CLIPVisionCfg, - quick_gelu: bool = False, - cast_dtype: Optional[torch.dtype] = None, - pad_id: int = 0, - ): - super().__init__() - multimodal_cfg = MultimodalCfg(**multimodal_cfg) if isinstance(multimodal_cfg, dict) else multimodal_cfg - text_cfg = CLIPTextCfg(**text_cfg) if isinstance(text_cfg, dict) else text_cfg - vision_cfg = CLIPVisionCfg(**vision_cfg) if isinstance(vision_cfg, dict) else vision_cfg - - self.text = _build_text_tower( - embed_dim=embed_dim, - text_cfg=text_cfg, - quick_gelu=quick_gelu, - cast_dtype=cast_dtype, - ) - - vocab_size = ( - text_cfg.vocab_size # for hf models - if hasattr(text_cfg, "hf_model_name") and text_cfg.hf_model_name is not None - else text_cfg.vocab_size - ) - - self.visual = _build_vision_tower( - embed_dim=embed_dim, - vision_cfg=vision_cfg, - quick_gelu=quick_gelu, - cast_dtype=cast_dtype, - ) - - self.text_decoder = _build_text_decoder_tower( - vocab_size, - multimodal_cfg=multimodal_cfg, - quick_gelu=quick_gelu, - cast_dtype=cast_dtype, - ) - - self.logit_scale = nn.Parameter(torch.ones([]) * np.log(1 / 0.07)) - self.pad_id = pad_id - - @torch.jit.ignore - def set_grad_checkpointing(self, enable=True): - self.visual.set_grad_checkpointing(enable) - self.text.set_grad_checkpointing(enable) - self.text_decoder.set_grad_checkpointing(enable) - - def _encode_image(self, images, normalize=True): - image_latent, tokens_embs = self.visual(images) - image_latent = F.normalize(image_latent, dim=-1) if normalize else image_latent - return image_latent, tokens_embs - - def _encode_text(self, text, normalize=True, embed_cls=True): - text = text[:, :-1] if embed_cls else text # make space for CLS token - text_latent, token_emb = self.text(text) - text_latent = F.normalize(text_latent, dim=-1) if normalize else text_latent - return text_latent, token_emb - - def encode_image(self, images, normalize=True): - image_latent, _ = self._encode_image(images, normalize=normalize) - return image_latent - - def encode_text(self, text, normalize=True, embed_cls=True): - text_latent, _ = self._encode_text(text, normalize=normalize, embed_cls=embed_cls) - return text_latent - - def forward(self, image, text, embed_cls=True, image_latent=None, image_embs=None): - text_latent, token_embs = self._encode_text(text, embed_cls=embed_cls) - if image_latent is None or image_embs is None: - image_latent, image_embs = self._encode_image(image) - - # TODO: add assertion to avoid bugs? - labels = text[:, -token_embs.shape[1]:] - - logits = self.text_decoder(image_embs, token_embs) - return { - "image_features": image_latent, - "text_features": text_latent, - "logits": logits, - "labels": labels, - "logit_scale": self.logit_scale.exp() - } - - def generate( - self, - image, - text=None, - seq_len=30, - max_seq_len=77, - temperature=1., - generation_type="beam_search", - top_p=0.1, # keep tokens in the 1 - top_p quantile - top_k=1, # keeps the top_k most probable tokens - pad_token_id=None, - eos_token_id=None, - sot_token_id=None, - num_beams=6, - num_beam_groups=3, - min_seq_len=5, - stopping_criteria=None, - repetition_penalty=1.0, - fixed_output_length=False # if True output.shape == (batch_size, seq_len) - ): - # taking many ideas and components from HuggingFace GenerationMixin - # https://huggingface.co/docs/transformers/main/en/main_classes/text_generation - assert _has_transformers, "Please install transformers for generate functionality. `pip install transformers`." - assert seq_len > min_seq_len, "seq_len must be larger than min_seq_len" - - with torch.no_grad(): - sot_token_id = 49406 if sot_token_id is None else sot_token_id - eos_token_id = 49407 if eos_token_id is None else eos_token_id - pad_token_id = self.pad_id if pad_token_id is None else pad_token_id - logit_processor = LogitsProcessorList( - [ - MinLengthLogitsProcessor(min_seq_len, eos_token_id), - RepetitionPenaltyLogitsProcessor(repetition_penalty), - ] - ) - - if stopping_criteria is None: - stopping_criteria = [MaxLengthCriteria(max_length=seq_len)] - - stopping_criteria = StoppingCriteriaList( - stopping_criteria - ) - - device = image.device - - if generation_type == "beam_search": - output = self._generate_beamsearch( - image_inputs = image, - pad_token_id=pad_token_id, - eos_token_id=eos_token_id, - sot_token_id=sot_token_id, - num_beams=num_beams, - num_beam_groups=num_beam_groups, - min_seq_len=min_seq_len, - stopping_criteria=stopping_criteria, - logit_processor=logit_processor, - ) - if fixed_output_length and output.shape[1] < seq_len: - return torch.cat( - (output, torch.ones(output.shape[0], seq_len-output.shape[1], device=device, dtype=output.dtype) * self.pad_id), - dim=1 - ) - return output - - elif generation_type == "top_p": - logit_warper = GENERATION_TYPES[generation_type](top_p) - elif generation_type == "top_k": - logit_warper = GENERATION_TYPES[generation_type](top_k) - else: - raise ValueError( - f"generation_type has to be one of " - f"{'| ' + ' | '.join(list(GENERATION_TYPES.keys())) + ' |'}." - ) - - image_latent, image_embs = self._encode_image(image) - - if text is None: - text = torch.ones((image.shape[0], 1), device=device, dtype=torch.long) * sot_token_id - - was_training = self.training - num_dims = len(text.shape) - - if num_dims == 1: - text = text[None, :] - - cur_len = text.shape[1] - self.eval() - out = text - - while True: - x = out[:, -max_seq_len:] - cur_len = x.shape[1] - logits = self(image, x, image_latent=image_latent, image_embs=image_embs, embed_cls=False)["logits"][:, -1] - mask = (out[:, -1] == eos_token_id) | (out[:, -1] == pad_token_id) - sample = torch.ones((out.shape[0], 1), device=device, dtype=torch.long) * pad_token_id - - if mask.all(): - if not fixed_output_length: - break - else: - logits = logits[~mask, :] - filtered_logits = logit_processor(x[~mask, :], logits) - filtered_logits = logit_warper(x[~mask, :], filtered_logits) - probs = F.softmax(filtered_logits / temperature, dim=-1) - - if (cur_len + 1 == seq_len): - sample[~mask, :] = torch.ones((sum(~mask), 1), device=device, dtype=torch.long) * eos_token_id - else: - sample[~mask, :] = torch.multinomial(probs, 1) - - out = torch.cat((out, sample), dim=-1) - - cur_len += 1 - - if stopping_criteria(out, None): - break - - if num_dims == 1: - out = out.squeeze(0) - - self.train(was_training) - return out - - def _generate_beamsearch( - self, - image_inputs, - pad_token_id=None, - eos_token_id=None, - sot_token_id=None, - num_beams=6, - num_beam_groups=3, - min_seq_len=5, - stopping_criteria=None, - logit_processor=None, - logit_warper=None, - ): - device = image_inputs.device - batch_size = image_inputs.shape[0] - image_inputs = torch.repeat_interleave(image_inputs, num_beams, dim=0) - image_latent, image_embs = self._encode_image(image_inputs) - - input_ids = torch.ones((batch_size * num_beams, 1), device=device, dtype=torch.long) - input_ids = input_ids * sot_token_id - beam_scorer = BeamSearchScorer( - batch_size=batch_size, - num_beams=num_beams, - device=device, - num_beam_groups=num_beam_groups, - ) - # instantiate logits processors - logits_processor = ( - LogitsProcessorList([MinLengthLogitsProcessor(min_seq_len, eos_token_id=eos_token_id)]) - if logit_processor is None - else logit_processor - ) - - batch_size = len(beam_scorer._beam_hyps) - num_beams = beam_scorer.num_beams - num_beam_groups = beam_scorer.num_beam_groups - num_sub_beams = num_beams // num_beam_groups - batch_beam_size, cur_len = input_ids.shape - beam_indices = None - - if num_beams * batch_size != batch_beam_size: - raise ValueError( - f"Batch dimension of `input_ids` should be {num_beams * batch_size}, but is {batch_beam_size}." - ) - - beam_scores = torch.full((batch_size, num_beams), -1e9, dtype=torch.float, device=device) - # initialise score of first beam of each group with 0 and the rest with 1e-9. This ensures that the beams in - # the same group don't produce same tokens everytime. - beam_scores[:, ::num_sub_beams] = 0 - beam_scores = beam_scores.view((batch_size * num_beams,)) - - while True: - - # predicted tokens in cur_len step - current_tokens = torch.zeros(batch_size * num_beams, dtype=input_ids.dtype, device=device) - - # indices which will form the beams in the next time step - reordering_indices = torch.zeros(batch_size * num_beams, dtype=torch.long, device=device) - - # do one decoder step on all beams of all sentences in batch - model_inputs = prepare_inputs_for_generation(input_ids=input_ids, image_inputs=image_inputs) - outputs = self( - model_inputs['images'], - model_inputs['text'], - embed_cls=False, - image_latent=image_latent, - image_embs=image_embs - ) - - for beam_group_idx in range(num_beam_groups): - group_start_idx = beam_group_idx * num_sub_beams - group_end_idx = min(group_start_idx + num_sub_beams, num_beams) - group_size = group_end_idx - group_start_idx - - # indices of beams of current group among all sentences in batch - batch_group_indices = [] - - for batch_idx in range(batch_size): - batch_group_indices.extend( - [batch_idx * num_beams + idx for idx in range(group_start_idx, group_end_idx)] - ) - group_input_ids = input_ids[batch_group_indices] - - # select outputs of beams of currentg group only - next_token_logits = outputs['logits'][batch_group_indices, -1, :] - vocab_size = next_token_logits.shape[-1] - - next_token_scores_processed = logits_processor( - group_input_ids, next_token_logits, current_tokens=current_tokens, beam_group_idx=beam_group_idx - ) - next_token_scores = next_token_scores_processed + beam_scores[batch_group_indices].unsqueeze(-1) - next_token_scores = next_token_scores.expand_as(next_token_scores_processed) - - # reshape for beam search - next_token_scores = next_token_scores.view(batch_size, group_size * vocab_size) - - next_token_scores, next_tokens = torch.topk( - next_token_scores, 2 * group_size, dim=1, largest=True, sorted=True - ) - - next_indices = torch.div(next_tokens, vocab_size, rounding_mode="floor") - next_tokens = next_tokens % vocab_size - - # stateless - process_beam_indices = sum(beam_indices, ()) if beam_indices is not None else None - beam_outputs = beam_scorer.process( - group_input_ids, - next_token_scores, - next_tokens, - next_indices, - pad_token_id=pad_token_id, - eos_token_id=eos_token_id, - beam_indices=process_beam_indices, - ) - beam_scores[batch_group_indices] = beam_outputs["next_beam_scores"] - beam_next_tokens = beam_outputs["next_beam_tokens"] - beam_idx = beam_outputs["next_beam_indices"] - - input_ids[batch_group_indices] = group_input_ids[beam_idx] - group_input_ids = torch.cat([group_input_ids[beam_idx, :], beam_next_tokens.unsqueeze(-1)], dim=-1) - current_tokens[batch_group_indices] = group_input_ids[:, -1] - - # (beam_idx // group_size) -> batch_idx - # (beam_idx % group_size) -> offset of idx inside the group - reordering_indices[batch_group_indices] = ( - num_beams * torch.div(beam_idx, group_size, rounding_mode="floor") + group_start_idx + (beam_idx % group_size) - ) - - input_ids = torch.cat([input_ids, current_tokens.unsqueeze(-1)], dim=-1) - - # increase cur_len - cur_len = cur_len + 1 - if beam_scorer.is_done or stopping_criteria(input_ids, None): - break - - final_beam_indices = sum(beam_indices, ()) if beam_indices is not None else None - sequence_outputs = beam_scorer.finalize( - input_ids, - beam_scores, - next_tokens, - next_indices, - pad_token_id=pad_token_id, - eos_token_id=eos_token_id, - max_length=stopping_criteria.max_length, - beam_indices=final_beam_indices, - ) - return sequence_outputs['sequences'] - - -def prepare_inputs_for_generation(input_ids, image_inputs, past=None, **kwargs): - if past: - input_ids = input_ids[:, -1].unsqueeze(-1) - - attention_mask = kwargs.get("attention_mask", None) - position_ids = kwargs.get("position_ids", None) - - if attention_mask is not None and position_ids is None: - # create position_ids on the fly for batch generation - position_ids = attention_mask.long().cumsum(-1) - 1 - position_ids.masked_fill_(attention_mask == 0, 1) - else: - position_ids = None - return { - "text": input_ids, - "images": image_inputs, - "past_key_values": past, - "position_ids": position_ids, - "attention_mask": attention_mask, - } diff --git a/diffsynth/extensions/ImageQualityMetric/open_clip/constants.py b/diffsynth/extensions/ImageQualityMetric/open_clip/constants.py deleted file mode 100644 index a670bb3..0000000 --- a/diffsynth/extensions/ImageQualityMetric/open_clip/constants.py +++ /dev/null @@ -1,2 +0,0 @@ -OPENAI_DATASET_MEAN = (0.48145466, 0.4578275, 0.40821073) -OPENAI_DATASET_STD = (0.26862954, 0.26130258, 0.27577711) diff --git a/diffsynth/extensions/ImageQualityMetric/open_clip/factory.py b/diffsynth/extensions/ImageQualityMetric/open_clip/factory.py deleted file mode 100644 index 5bd51a1..0000000 --- a/diffsynth/extensions/ImageQualityMetric/open_clip/factory.py +++ /dev/null @@ -1,433 +0,0 @@ -import json -import logging -import os -import pathlib -import re -from copy import deepcopy -from pathlib import Path -# from turtle import forward -from typing import Any, Dict, Optional, Tuple, Union - -import torch - -from .constants import OPENAI_DATASET_MEAN, OPENAI_DATASET_STD -from .model import CLIP, CustomTextCLIP, convert_weights_to_lp, convert_to_custom_text_state_dict,\ - resize_pos_embed, get_cast_dtype -from .coca_model import CoCa -from .loss import ClipLoss, DistillClipLoss, CoCaLoss -from .openai import load_openai_model -from .pretrained import is_pretrained_cfg, get_pretrained_cfg, download_pretrained, list_pretrained_tags_by_model, download_pretrained_from_hf -from .transform import image_transform, AugmentationCfg -from .tokenizer import HFTokenizer, SimpleTokenizer - - -HF_HUB_PREFIX = 'hf-hub:' -_MODEL_CONFIG_PATHS = [Path(__file__).parent / f"model_configs/"] -_MODEL_CONFIGS = {} # directory (model_name: config) of model architecture configs - - -def _natural_key(string_): - return [int(s) if s.isdigit() else s for s in re.split(r'(\d+)', string_.lower())] - - -def _rescan_model_configs(): - global _MODEL_CONFIGS - - config_ext = ('.json',) - config_files = [] - for config_path in _MODEL_CONFIG_PATHS: - if config_path.is_file() and config_path.suffix in config_ext: - config_files.append(config_path) - elif config_path.is_dir(): - for ext in config_ext: - config_files.extend(config_path.glob(f'*{ext}')) - - for cf in config_files: - with open(cf, 'r') as f: - model_cfg = json.load(f) - if all(a in model_cfg for a in ('embed_dim', 'vision_cfg', 'text_cfg')): - _MODEL_CONFIGS[cf.stem] = model_cfg - - _MODEL_CONFIGS = {k: v for k, v in sorted(_MODEL_CONFIGS.items(), key=lambda x: _natural_key(x[0]))} - - -_rescan_model_configs() # initial populate of model config registry - - -def list_models(): - """ enumerate available model architectures based on config files """ - return list(_MODEL_CONFIGS.keys()) - - -def add_model_config(path): - """ add model config path or file and update registry """ - if not isinstance(path, Path): - path = Path(path) - _MODEL_CONFIG_PATHS.append(path) - _rescan_model_configs() - - -def get_model_config(model_name): - if model_name in _MODEL_CONFIGS: - return deepcopy(_MODEL_CONFIGS[model_name]) - else: - return None - - -def get_tokenizer(model_name, open_clip_bpe_path=None): - if model_name.startswith(HF_HUB_PREFIX): - tokenizer = HFTokenizer(model_name[len(HF_HUB_PREFIX):]) - else: - config = get_model_config(model_name) - tokenizer = HFTokenizer( - config['text_cfg']['hf_tokenizer_name']) if 'hf_tokenizer_name' in config['text_cfg'] else SimpleTokenizer(open_clip_bpe_path) - return tokenizer - - -def load_state_dict(checkpoint_path: str, map_location='cpu'): - checkpoint = torch.load(checkpoint_path, map_location=map_location) - if isinstance(checkpoint, dict) and 'state_dict' in checkpoint: - state_dict = checkpoint['state_dict'] - else: - state_dict = checkpoint - if next(iter(state_dict.items()))[0].startswith('module'): - state_dict = {k[7:]: v for k, v in state_dict.items()} - return state_dict - - -def load_checkpoint(model, checkpoint_path, strict=True): - state_dict = load_state_dict(checkpoint_path) - # detect old format and make compatible with new format - if 'positional_embedding' in state_dict and not hasattr(model, 'positional_embedding'): - state_dict = convert_to_custom_text_state_dict(state_dict) - resize_pos_embed(state_dict, model) - incompatible_keys = model.load_state_dict(state_dict, strict=strict) - return incompatible_keys - - -def create_model( - model_name: str, - pretrained: Optional[str] = None, - precision: str = 'fp32', - device: Union[str, torch.device] = 'cpu', - jit: bool = False, - force_quick_gelu: bool = False, - force_custom_text: bool = False, - force_patch_dropout: Optional[float] = None, - force_image_size: Optional[Union[int, Tuple[int, int]]] = None, - pretrained_image: bool = False, - pretrained_hf: bool = True, - cache_dir: Optional[str] = None, - output_dict: Optional[bool] = None, - require_pretrained: bool = False, -): - has_hf_hub_prefix = model_name.startswith(HF_HUB_PREFIX) - if has_hf_hub_prefix: - model_id = model_name[len(HF_HUB_PREFIX):] - checkpoint_path = download_pretrained_from_hf(model_id, cache_dir=cache_dir) - config_path = download_pretrained_from_hf(model_id, filename='open_clip_config.json', cache_dir=cache_dir) - - with open(config_path, 'r', encoding='utf-8') as f: - config = json.load(f) - pretrained_cfg = config['preprocess_cfg'] - model_cfg = config['model_cfg'] - else: - model_name = model_name.replace('/', '-') # for callers using old naming with / in ViT names - checkpoint_path = None - pretrained_cfg = {} - model_cfg = None - - if isinstance(device, str): - device = torch.device(device) - - if pretrained and pretrained.lower() == 'openai': - logging.info(f'Loading pretrained {model_name} from OpenAI.') - model = load_openai_model( - model_name, - precision=precision, - device=device, - jit=jit, - cache_dir=cache_dir, - ) - - # to always output dict even if it is clip - if output_dict and hasattr(model, "output_dict"): - model.output_dict = True - else: - model_cfg = model_cfg or get_model_config(model_name) - if model_cfg is not None: - logging.info(f'Loaded {model_name} model config.') - else: - logging.error(f'Model config for {model_name} not found; available models {list_models()}.') - raise RuntimeError(f'Model config for {model_name} not found.') - - if force_quick_gelu: - # override for use of QuickGELU on non-OpenAI transformer models - model_cfg["quick_gelu"] = True - - if force_patch_dropout is not None: - # override the default patch dropout value - model_cfg["vision_cfg"]["patch_dropout"] = force_patch_dropout - - if force_image_size is not None: - # override model config's image size - model_cfg["vision_cfg"]["image_size"] = force_image_size - - if pretrained_image: - if 'timm_model_name' in model_cfg.get('vision_cfg', {}): - # pretrained weight loading for timm models set via vision_cfg - model_cfg['vision_cfg']['timm_model_pretrained'] = True - else: - assert False, 'pretrained image towers currently only supported for timm models' - - cast_dtype = get_cast_dtype(precision) - is_hf_model = 'hf_model_name' in model_cfg.get('text_cfg', {}) - custom_text = model_cfg.pop('custom_text', False) or force_custom_text or is_hf_model - - if custom_text: - if is_hf_model: - model_cfg['text_cfg']['hf_model_pretrained'] = pretrained_hf - if "coca" in model_name: - model = CoCa(**model_cfg, cast_dtype=cast_dtype) - else: - model = CustomTextCLIP(**model_cfg, cast_dtype=cast_dtype) - else: - model = CLIP(**model_cfg, cast_dtype=cast_dtype) - - pretrained_loaded = False - if pretrained: - checkpoint_path = '' - pretrained_cfg = get_pretrained_cfg(model_name, pretrained) - if pretrained_cfg: - checkpoint_path = download_pretrained(pretrained_cfg, cache_dir=cache_dir) - elif os.path.exists(pretrained): - checkpoint_path = pretrained - - if checkpoint_path: - logging.info(f'Loading pretrained {model_name} weights ({pretrained}).') - load_checkpoint(model, checkpoint_path) - else: - error_str = ( - f'Pretrained weights ({pretrained}) not found for model {model_name}.' - f'Available pretrained tags ({list_pretrained_tags_by_model(model_name)}.') - logging.warning(error_str) - raise RuntimeError(error_str) - pretrained_loaded = True - elif has_hf_hub_prefix: - logging.info(f'Loading pretrained {model_name} weights ({pretrained}).') - load_checkpoint(model, checkpoint_path) - pretrained_loaded = True - - if require_pretrained and not pretrained_loaded: - # callers of create_model_from_pretrained always expect pretrained weights - raise RuntimeError( - f'Pretrained weights were required for (model: {model_name}, pretrained: {pretrained}) but not loaded.') - - model.to(device=device) - if precision in ("fp16", "bf16"): - convert_weights_to_lp(model, dtype=torch.bfloat16 if precision == 'bf16' else torch.float16) - - # set image / mean metadata from pretrained_cfg if available, or use default - model.visual.image_mean = pretrained_cfg.get('mean', None) or OPENAI_DATASET_MEAN - model.visual.image_std = pretrained_cfg.get('std', None) or OPENAI_DATASET_STD - - # to always output dict even if it is clip - if output_dict and hasattr(model, "output_dict"): - model.output_dict = True - - if jit: - model = torch.jit.script(model) - - return model - - -def create_loss(args): - if args.distill: - return DistillClipLoss( - local_loss=args.local_loss, - gather_with_grad=args.gather_with_grad, - cache_labels=True, - rank=args.rank, - world_size=args.world_size, - use_horovod=args.horovod, - ) - elif "coca" in args.model.lower(): - return CoCaLoss( - caption_loss_weight=args.coca_caption_loss_weight, - clip_loss_weight=args.coca_contrastive_loss_weight, - local_loss=args.local_loss, - gather_with_grad=args.gather_with_grad, - cache_labels=True, - rank=args.rank, - world_size=args.world_size, - use_horovod=args.horovod, - ) - return ClipLoss( - local_loss=args.local_loss, - gather_with_grad=args.gather_with_grad, - cache_labels=True, - rank=args.rank, - world_size=args.world_size, - use_horovod=args.horovod, - ) - -class MLP(torch.nn.Module): - def __init__(self, input_size): - super().__init__() - self.input_size = input_size - self.layers = torch.nn.Sequential( - torch.nn.Linear(self.input_size, 1024), - torch.nn.Dropout(0.2), - torch.nn.Linear(1024, 128), - torch.nn.Dropout(0.2), - torch.nn.Linear(128, 64), - torch.nn.Dropout(0.1), - torch.nn.Linear(64, 16), - torch.nn.Linear(16, 1) - ) - - def forward(self, x): - return self.layers(x) - -# class semantic_head(torch.nn.Module): -# def __init__(self, input_size): -# super().__init__() -# self.input_size = input_size # for ViT-L-14 is 1024 -# self.seg_head = torch.nn.Sequential( -# torch.nn.Linear(input_size, 128), -# torch.nn.Dropout(0.2), -# torch.nn.Linear(128, 64), -# torch.nn.Dropout(0.1), -# torch.nn.Linear(64, 16), -# torch.nn.Linear(16, 1), -# ) -# self.sigmoid = torch.nn.Sigmoid() - -# def forward(self, x): -# return self.sigmoid(self.seg_head(x)) - -def create_model_and_transforms( - model_name: str, - pretrained: Optional[str] = None, - precision: str = 'fp32', - device: Union[str, torch.device] = 'cpu', - jit: bool = False, - force_quick_gelu: bool = False, - force_custom_text: bool = False, - force_patch_dropout: Optional[float] = None, - force_image_size: Optional[Union[int, Tuple[int, int]]] = None, - pretrained_image: bool = False, - pretrained_hf: bool = True, - image_mean: Optional[Tuple[float, ...]] = None, - image_std: Optional[Tuple[float, ...]] = None, - aug_cfg: Optional[Union[Dict[str, Any], AugmentationCfg]] = None, - cache_dir: Optional[str] = None, - light_augmentation = False, - output_dict: Optional[bool] = None, - with_score_predictor: bool = False, - with_region_predictor: bool = False -): - model = create_model( - model_name, - pretrained, - precision=precision, - device=device, - jit=jit, - force_quick_gelu=force_quick_gelu, - force_custom_text=force_custom_text, - force_patch_dropout=force_patch_dropout, - force_image_size=force_image_size, - pretrained_image=pretrained_image, - pretrained_hf=pretrained_hf, - cache_dir=cache_dir, - output_dict=output_dict, - ) - - image_mean = image_mean or getattr(model.visual, 'image_mean', None) - image_std = image_std or getattr(model.visual, 'image_std', None) - - if with_score_predictor: - model.score_predictor = MLP(model.visual.proj.size(1)).to(device=device, dtype=model.visual.proj.dtype) - - if with_region_predictor: - # model.region_predictor = semantic_head(model.visual.proj.size(1)).to(device=device, dtype=model.visual.proj.dtype) - model.region_predictor = torch.nn.Linear(model.visual.proj.size(0), 1).to(device=device, dtype=model.visual.proj.dtype) - # preprocess_train = image_transform_region( - # model.visual.image_size, - # is_train=True, - # mean=image_mean, - # std=image_std - # ) - # preprocess_val = image_transform_region( - # model.visual.image_size, - # is_train=False, - # mean=image_mean, - # std=image_std - # ) - - if light_augmentation: - preprocess_val = image_transform( - model.visual.image_size, - is_train=False, - mean=image_mean, - std=image_std, - resize_longest_max=True, - ) - preprocess_train = preprocess_val - else: - preprocess_train = image_transform( - model.visual.image_size, - is_train=True, - mean=image_mean, - std=image_std - ) - preprocess_val = image_transform( - model.visual.image_size, - is_train=False, - mean=image_mean, - std=image_std - ) - - return model, preprocess_train, preprocess_val - - -def create_model_from_pretrained( - model_name: str, - pretrained: Optional[str] = None, - precision: str = 'fp32', - device: Union[str, torch.device] = 'cpu', - jit: bool = False, - force_quick_gelu: bool = False, - force_custom_text: bool = False, - force_image_size: Optional[Union[int, Tuple[int, int]]] = None, - return_transform: bool = True, - image_mean: Optional[Tuple[float, ...]] = None, - image_std: Optional[Tuple[float, ...]] = None, - cache_dir: Optional[str] = None, -): - model = create_model( - model_name, - pretrained, - precision=precision, - device=device, - jit=jit, - force_quick_gelu=force_quick_gelu, - force_custom_text=force_custom_text, - force_image_size=force_image_size, - cache_dir=cache_dir, - require_pretrained=True, - ) - - if not return_transform: - return model - - image_mean = image_mean or getattr(model.visual, 'image_mean', None) - image_std = image_std or getattr(model.visual, 'image_std', None) - preprocess = image_transform( - model.visual.image_size, - is_train=False, - mean=image_mean, - std=image_std, - ) - - return model, preprocess diff --git a/diffsynth/extensions/ImageQualityMetric/open_clip/hf_configs.py b/diffsynth/extensions/ImageQualityMetric/open_clip/hf_configs.py deleted file mode 100644 index e236222..0000000 --- a/diffsynth/extensions/ImageQualityMetric/open_clip/hf_configs.py +++ /dev/null @@ -1,45 +0,0 @@ -# HF architecture dict: -arch_dict = { - # https://huggingface.co/docs/transformers/model_doc/roberta#roberta - "roberta": { - "config_names": { - "context_length": "max_position_embeddings", - "vocab_size": "vocab_size", - "width": "hidden_size", - "heads": "num_attention_heads", - "layers": "num_hidden_layers", - "layer_attr": "layer", - "token_embeddings_attr": "embeddings" - }, - "pooler": "mean_pooler", - }, - # https://huggingface.co/docs/transformers/model_doc/xlm-roberta#transformers.XLMRobertaConfig - "xlm-roberta": { - "config_names": { - "context_length": "max_position_embeddings", - "vocab_size": "vocab_size", - "width": "hidden_size", - "heads": "num_attention_heads", - "layers": "num_hidden_layers", - "layer_attr": "layer", - "token_embeddings_attr": "embeddings" - }, - "pooler": "mean_pooler", - }, - # https://huggingface.co/docs/transformers/model_doc/mt5#mt5 - "mt5": { - "config_names": { - # unlimited seqlen - # https://github.com/google-research/text-to-text-transfer-transformer/issues/273 - # https://github.com/huggingface/transformers/blob/v4.24.0/src/transformers/models/t5/modeling_t5.py#L374 - "context_length": "", - "vocab_size": "vocab_size", - "width": "d_model", - "heads": "num_heads", - "layers": "num_layers", - "layer_attr": "block", - "token_embeddings_attr": "embed_tokens" - }, - "pooler": "mean_pooler", - }, -} diff --git a/diffsynth/extensions/ImageQualityMetric/open_clip/hf_model.py b/diffsynth/extensions/ImageQualityMetric/open_clip/hf_model.py deleted file mode 100644 index fbccc81..0000000 --- a/diffsynth/extensions/ImageQualityMetric/open_clip/hf_model.py +++ /dev/null @@ -1,176 +0,0 @@ -""" huggingface model adapter - -Wraps HuggingFace transformers (https://github.com/huggingface/transformers) models for use as a text tower in CLIP model. -""" - -import re - -import torch -import torch.nn as nn -from torch import TensorType - -try: - import transformers - from transformers import AutoModel, AutoTokenizer, AutoConfig, PretrainedConfig - from transformers.modeling_outputs import BaseModelOutput, BaseModelOutputWithPooling, \ - BaseModelOutputWithPoolingAndCrossAttentions -except ImportError as e: - transformers = None - - - class BaseModelOutput: - pass - - - class PretrainedConfig: - pass - -from .hf_configs import arch_dict - - -# utils -def _camel2snake(s): - return re.sub(r'(? torch.Tensor: - # calculated ground-truth and cache if enabled - if self.prev_num_logits != num_logits or device not in self.labels: - labels = torch.arange(num_logits, device=device, dtype=torch.long) - if self.world_size > 1 and self.local_loss: - labels = labels + num_logits * self.rank - if self.cache_labels: - self.labels[device] = labels - self.prev_num_logits = num_logits - else: - labels = self.labels[device] - return labels - - def get_logits(self, image_features, text_features, logit_scale): - if self.world_size > 1: - all_image_features, all_text_features = gather_features( - image_features, text_features, - self.local_loss, self.gather_with_grad, self.rank, self.world_size, self.use_horovod) - - if self.local_loss: - logits_per_image = logit_scale * image_features @ all_text_features.T - logits_per_text = logit_scale * text_features @ all_image_features.T - else: - logits_per_image = logit_scale * all_image_features @ all_text_features.T - logits_per_text = logits_per_image.T - else: - logits_per_image = logit_scale * image_features @ text_features.T - logits_per_text = logit_scale * text_features @ image_features.T - - return logits_per_image, logits_per_text - - def forward(self, image_features, text_features, logit_scale, output_dict=False): - device = image_features.device - logits_per_image, logits_per_text = self.get_logits(image_features, text_features, logit_scale) - - labels = self.get_ground_truth(device, logits_per_image.shape[0]) - - total_loss = ( - F.cross_entropy(logits_per_image, labels) + - F.cross_entropy(logits_per_text, labels) - ) / 2 - return total_loss - -class PreferenceLoss(nn.Module): - - def forward(self, logits_per_image, num_images, labels): - - paired_logits_list = [logit[:,i] for i, logit in enumerate(logits_per_image.split(num_images.tolist()))] - paired_logits = pad_sequence(paired_logits_list, batch_first=True, padding_value=-999) - - ce_loss = F.cross_entropy(paired_logits, labels) - return ce_loss - -class HPSLoss(nn.Module): - - def forward(self, text_logits, labels): - - device = text_logits.device - text_0_logits, text_1_logits = text_logits.chunk(2, dim=-1) - label_0, label_1 = labels.chunk(2, dim=-1) - - index = torch.arange(text_0_logits.shape[0], device=device, dtype=torch.long) - text_0_logits = text_0_logits[index, index] - text_1_logits = text_1_logits[index, index] - text_logits = torch.stack([text_0_logits, text_1_logits], dim=-1) - text_0_labels = torch.zeros(text_logits.shape[0], device=device, dtype=torch.long) - text_1_labels = text_0_labels + 1 - - text_0_loss = torch.nn.functional.cross_entropy(text_logits, text_0_labels, reduction="none") - text_1_loss = torch.nn.functional.cross_entropy(text_logits, text_1_labels, reduction="none") - - text_loss = label_0 * text_0_loss + label_1 * text_1_loss - - # absolute_example_weight = 1 / num_per_prompt - # denominator = absolute_example_weight.sum() - # weight_per_example = absolute_example_weight / denominator - # text_loss *= weight_per_example - - text_loss = text_loss.sum() - return text_loss - -class RankingLoss(nn.Module): - - def forward(self, logits_per_image, num_images, labels, margin = 1.0): - paired_logits_list = [logit[:,i] for i, logit in enumerate(logits_per_image.split(num_images.tolist()))] - label_list = [label for label in labels.split(num_images.tolist())] - # ranked_logits = [torch.index_select(paired_logits_list[i], 0, rank) for i, rank in enumerate(label_list)] - - paired_logits = pad_sequence(paired_logits_list, batch_first=True, padding_value=-1) - padded_labels = pad_sequence(label_list, batch_first=True, padding_value=10) - - # regulized_logits = torch.log(torch.sigmoid(paired_logits)) - - diff = paired_logits.unsqueeze(1) - paired_logits.unsqueeze(2) - # diff = paired_logits.unsqueeze(1) - paired_logits.unsqueeze(2) - # diff_label = torch.clamp(padded_labels.unsqueeze(1) - padded_labels.unsqueeze(2), min=-1, max=1) - diff_label = - (padded_labels.unsqueeze(1) - padded_labels.unsqueeze(2)) - mask = torch.triu(torch.ones(diff.shape[1], diff.shape[1]), diagonal=1).bool().detach() - - loss = torch.clamp(margin - torch.mul(diff[:, ~mask],diff_label[:,~mask]), min=0).mean() - return loss - -class CoCaLoss(ClipLoss): - def __init__( - self, - caption_loss_weight, - clip_loss_weight, - pad_id=0, # pad_token for open_clip custom tokenizer - local_loss=False, - gather_with_grad=False, - cache_labels=False, - rank=0, - world_size=1, - use_horovod=False, - ): - super().__init__( - local_loss=local_loss, - gather_with_grad=gather_with_grad, - cache_labels=cache_labels, - rank=rank, - world_size=world_size, - use_horovod=use_horovod - ) - - self.clip_loss_weight = clip_loss_weight - self.caption_loss_weight = caption_loss_weight - self.caption_loss = nn.CrossEntropyLoss(ignore_index=pad_id) - - def forward(self, image_features, text_features, logits, labels, logit_scale, output_dict=False): - clip_loss = super().forward(image_features, text_features, logit_scale) - clip_loss = self.clip_loss_weight * clip_loss - - caption_loss = self.caption_loss( - logits.permute(0, 2, 1), - labels, - ) - caption_loss = caption_loss * self.caption_loss_weight - - if output_dict: - return {"contrastive_loss": clip_loss, "caption_loss": caption_loss} - - return clip_loss, caption_loss - - -class DistillClipLoss(ClipLoss): - - def dist_loss(self, teacher_logits, student_logits): - return -(teacher_logits.softmax(dim=1) * student_logits.log_softmax(dim=1)).sum(dim=1).mean(dim=0) - - def forward( - self, - image_features, - text_features, - logit_scale, - dist_image_features, - dist_text_features, - dist_logit_scale, - output_dict=False, - ): - logits_per_image, logits_per_text = \ - self.get_logits(image_features, text_features, logit_scale) - - dist_logits_per_image, dist_logits_per_text = \ - self.get_logits(dist_image_features, dist_text_features, dist_logit_scale) - - labels = self.get_ground_truth(image_features.device, logits_per_image.shape[0]) - - contrastive_loss = ( - F.cross_entropy(logits_per_image, labels) + - F.cross_entropy(logits_per_text, labels) - ) / 2 - - distill_loss = ( - self.dist_loss(dist_logits_per_image, logits_per_image) + - self.dist_loss(dist_logits_per_text, logits_per_text) - ) / 2 - - if output_dict: - return {"contrastive_loss": contrastive_loss, "distill_loss": distill_loss} - - return contrastive_loss, distill_loss diff --git a/diffsynth/extensions/ImageQualityMetric/open_clip/model.py b/diffsynth/extensions/ImageQualityMetric/open_clip/model.py deleted file mode 100644 index e347c42..0000000 --- a/diffsynth/extensions/ImageQualityMetric/open_clip/model.py +++ /dev/null @@ -1,461 +0,0 @@ -""" CLIP Model - -Adapted from https://github.com/openai/CLIP. Originally MIT License, Copyright (c) 2021 OpenAI. -""" -from dataclasses import dataclass -import logging -import math -from typing import Optional, Tuple, Union - -import numpy as np -import torch -import torch.nn.functional as F -from torch import nn -from torch.utils.checkpoint import checkpoint - -from .hf_model import HFTextEncoder -from .modified_resnet import ModifiedResNet -from .timm_model import TimmModel -from .transformer import LayerNormFp32, LayerNorm, QuickGELU, Attention, VisionTransformer, TextTransformer -from .utils import to_2tuple - - -@dataclass -class CLIPVisionCfg: - layers: Union[Tuple[int, int, int, int], int] = 12 - width: int = 768 - head_width: int = 64 - mlp_ratio: float = 4.0 - patch_size: int = 16 - image_size: Union[Tuple[int, int], int] = 224 - ls_init_value: Optional[float] = None # layer scale initial value - patch_dropout: float = 0. # what fraction of patches to dropout during training (0 would mean disabled and no patches dropped) - 0.5 to 0.75 recommended in the paper for optimal results - input_patchnorm: bool = False # whether to use dual patchnorm - would only apply the input layernorm on each patch, as post-layernorm already exist in original clip vit design - global_average_pool: bool = False # whether to global average pool the last embedding layer, instead of using CLS token (https://arxiv.org/abs/2205.01580) - attentional_pool: bool = False # whether to use attentional pooler in the last embedding layer - n_queries: int = 256 # n_queries for attentional pooler - attn_pooler_heads: int = 8 # n heads for attentional_pooling - timm_model_name: str = None # a valid model name overrides layers, width, patch_size - timm_model_pretrained: bool = False # use (imagenet) pretrained weights for named model - timm_pool: str = 'avg' # feature pooling for timm model ('abs_attn', 'rot_attn', 'avg', '') - timm_proj: str = 'linear' # linear projection for timm model output ('linear', 'mlp', '') - timm_proj_bias: bool = False # enable bias final projection - timm_drop: float = 0. # head dropout - timm_drop_path: Optional[float] = None # backbone stochastic depth - output_tokens: bool = False - - -@dataclass -class CLIPTextCfg: - context_length: int = 77 - vocab_size: int = 49408 - width: int = 512 - heads: int = 8 - layers: int = 12 - ls_init_value: Optional[float] = None # layer scale initial value - hf_model_name: str = None - hf_tokenizer_name: str = None - hf_model_pretrained: bool = True - proj: str = 'mlp' - pooler_type: str = 'mean_pooler' - embed_cls: bool = False - pad_id: int = 0 - output_tokens: bool = False - - -def get_cast_dtype(precision: str): - cast_dtype = None - if precision == 'bf16': - cast_dtype = torch.bfloat16 - elif precision == 'fp16': - cast_dtype = torch.float16 - return cast_dtype - - -def _build_vision_tower( - embed_dim: int, - vision_cfg: CLIPVisionCfg, - quick_gelu: bool = False, - cast_dtype: Optional[torch.dtype] = None -): - if isinstance(vision_cfg, dict): - vision_cfg = CLIPVisionCfg(**vision_cfg) - - # OpenAI models are pretrained w/ QuickGELU but native nn.GELU is both faster and more - # memory efficient in recent PyTorch releases (>= 1.10). - # NOTE: timm models always use native GELU regardless of quick_gelu flag. - act_layer = QuickGELU if quick_gelu else nn.GELU - - if vision_cfg.timm_model_name: - visual = TimmModel( - vision_cfg.timm_model_name, - pretrained=vision_cfg.timm_model_pretrained, - pool=vision_cfg.timm_pool, - proj=vision_cfg.timm_proj, - proj_bias=vision_cfg.timm_proj_bias, - drop=vision_cfg.timm_drop, - drop_path=vision_cfg.timm_drop_path, - embed_dim=embed_dim, - image_size=vision_cfg.image_size, - ) - act_layer = nn.GELU # so that text transformer doesn't use QuickGELU w/ timm models - elif isinstance(vision_cfg.layers, (tuple, list)): - vision_heads = vision_cfg.width * 32 // vision_cfg.head_width - visual = ModifiedResNet( - layers=vision_cfg.layers, - output_dim=embed_dim, - heads=vision_heads, - image_size=vision_cfg.image_size, - width=vision_cfg.width, - ) - else: - vision_heads = vision_cfg.width // vision_cfg.head_width - norm_layer = LayerNormFp32 if cast_dtype in (torch.float16, torch.bfloat16) else LayerNorm - visual = VisionTransformer( - image_size=vision_cfg.image_size, - patch_size=vision_cfg.patch_size, - width=vision_cfg.width, - layers=vision_cfg.layers, - heads=vision_heads, - mlp_ratio=vision_cfg.mlp_ratio, - ls_init_value=vision_cfg.ls_init_value, - patch_dropout=vision_cfg.patch_dropout, - input_patchnorm=vision_cfg.input_patchnorm, - global_average_pool=vision_cfg.global_average_pool, - attentional_pool=vision_cfg.attentional_pool, - n_queries=vision_cfg.n_queries, - attn_pooler_heads=vision_cfg.attn_pooler_heads, - output_tokens=vision_cfg.output_tokens, - output_dim=embed_dim, - act_layer=act_layer, - norm_layer=norm_layer, - ) - - return visual - - -def _build_text_tower( - embed_dim: int, - text_cfg: CLIPTextCfg, - quick_gelu: bool = False, - cast_dtype: Optional[torch.dtype] = None, -): - if isinstance(text_cfg, dict): - text_cfg = CLIPTextCfg(**text_cfg) - - if text_cfg.hf_model_name: - text = HFTextEncoder( - text_cfg.hf_model_name, - output_dim=embed_dim, - proj=text_cfg.proj, - pooler_type=text_cfg.pooler_type, - pretrained=text_cfg.hf_model_pretrained, - output_tokens=text_cfg.output_tokens, - ) - else: - act_layer = QuickGELU if quick_gelu else nn.GELU - norm_layer = LayerNormFp32 if cast_dtype in (torch.float16, torch.bfloat16) else LayerNorm - - text = TextTransformer( - context_length=text_cfg.context_length, - vocab_size=text_cfg.vocab_size, - width=text_cfg.width, - heads=text_cfg.heads, - layers=text_cfg.layers, - ls_init_value=text_cfg.ls_init_value, - output_dim=embed_dim, - embed_cls=text_cfg.embed_cls, - output_tokens=text_cfg.output_tokens, - pad_id=text_cfg.pad_id, - act_layer=act_layer, - norm_layer=norm_layer, - ) - return text - - -class CLIP(nn.Module): - output_dict: torch.jit.Final[bool] - - def __init__( - self, - embed_dim: int, - vision_cfg: CLIPVisionCfg, - text_cfg: CLIPTextCfg, - quick_gelu: bool = False, - cast_dtype: Optional[torch.dtype] = None, - output_dict: bool = False, - ): - super().__init__() - self.output_dict = output_dict - self.visual = _build_vision_tower(embed_dim, vision_cfg, quick_gelu, cast_dtype) - - text = _build_text_tower(embed_dim, text_cfg, quick_gelu, cast_dtype) - self.transformer = text.transformer - self.vocab_size = text.vocab_size - self.token_embedding = text.token_embedding - self.positional_embedding = text.positional_embedding - self.ln_final = text.ln_final - self.text_projection = text.text_projection - self.register_buffer('attn_mask', text.attn_mask, persistent=False) - - self.logit_scale = nn.Parameter(torch.ones([]) * np.log(1 / 0.07)) - - def lock_image_tower(self, unlocked_groups=0, freeze_bn_stats=False): - # lock image tower as per LiT - https://arxiv.org/abs/2111.07991 - self.visual.lock(unlocked_groups=unlocked_groups, freeze_bn_stats=freeze_bn_stats) - - def lock_text_tower(self, unlocked_layers: int = 0, freeze_layer_norm: bool = True): - locked_layers = [] - locked_layers.append(self.token_embedding) - self.positional_embedding.requires_grad = False - if unlocked_layers > 0: - locked_layers.append(self.transformer.resblocks[:-unlocked_layers]) - else: - locked_layers.append(self.transformer) - locked_layers.append(self.ln_final) - self.text_projection.requires_grad = False - - # freeze layers - for module in locked_layers: - for n, p in module.named_parameters(): - p.requires_grad = (not freeze_layer_norm) if "LayerNorm" in n.split(".") else False - - @torch.jit.ignore - def set_grad_checkpointing(self, enable=True): - self.visual.set_grad_checkpointing(enable) - self.transformer.grad_checkpointing = enable - - def encode_image(self, image, normalize: bool = False): - features = self.visual(image) - return F.normalize(features, dim=-1) if normalize else features - - def encode_text(self, text, normalize: bool = False): - cast_dtype = self.transformer.get_cast_dtype() - - x = self.token_embedding(text).to(cast_dtype) # [batch_size, n_ctx, d_model] - - x = x + self.positional_embedding.to(cast_dtype) - x = x.permute(1, 0, 2) # NLD -> LND - x = self.transformer(x, attn_mask=self.attn_mask) - x = x.permute(1, 0, 2) # LND -> NLD - x = self.ln_final(x) # [batch_size, n_ctx, transformer.width] - # take features from the eot embedding (eot_token is the highest number in each sequence) - x = x[torch.arange(x.shape[0]), text.argmax(dim=-1)] @ self.text_projection - return F.normalize(x, dim=-1) if normalize else x - - def forward(self, image, text): - image_features = self.encode_image(image, normalize=True) - text_features = self.encode_text(text, normalize=True) - if self.output_dict: - return { - "image_features": image_features, - "text_features": text_features, - "logit_scale": self.logit_scale.exp() - } - return image_features, text_features, self.logit_scale.exp() - - -class CustomTextCLIP(nn.Module): - output_dict: torch.jit.Final[bool] - - def __init__( - self, - embed_dim: int, - vision_cfg: CLIPVisionCfg, - text_cfg: CLIPTextCfg, - quick_gelu: bool = False, - cast_dtype: Optional[torch.dtype] = None, - output_dict: bool = False, - ): - super().__init__() - self.output_dict = output_dict - self.visual = _build_vision_tower(embed_dim, vision_cfg, quick_gelu, cast_dtype) - self.text = _build_text_tower(embed_dim, text_cfg, quick_gelu, cast_dtype) - self.logit_scale = nn.Parameter(torch.ones([]) * np.log(1 / 0.07)) - - def lock_image_tower(self, unlocked_groups=0, freeze_bn_stats=False): - # lock image tower as per LiT - https://arxiv.org/abs/2111.07991 - self.visual.lock(unlocked_groups=unlocked_groups, freeze_bn_stats=freeze_bn_stats) - - def lock_text_tower(self, unlocked_layers: int = 0, freeze_layer_norm: bool = True): - self.text.lock(unlocked_layers, freeze_layer_norm) - - @torch.jit.ignore - def set_grad_checkpointing(self, enable=True): - self.visual.set_grad_checkpointing(enable) - self.text.set_grad_checkpointing(enable) - - def encode_image(self, image, normalize: bool = False): - features = self.visual(image) - return F.normalize(features, dim=-1) if normalize else features - - def encode_text(self, text, normalize: bool = False): - features = self.text(text) - return F.normalize(features, dim=-1) if normalize else features - - def forward(self, image, text): - image_features = self.encode_image(image, normalize=True) - text_features = self.encode_text(text, normalize=True) - if self.output_dict: - return { - "image_features": image_features, - "text_features": text_features, - "logit_scale": self.logit_scale.exp() - } - return image_features, text_features, self.logit_scale.exp() - - -def convert_weights_to_lp(model: nn.Module, dtype=torch.float16): - """Convert applicable model parameters to low-precision (bf16 or fp16)""" - - def _convert_weights(l): - if isinstance(l, (nn.Conv1d, nn.Conv2d, nn.Linear)): - l.weight.data = l.weight.data.to(dtype) - if l.bias is not None: - l.bias.data = l.bias.data.to(dtype) - - if isinstance(l, (nn.MultiheadAttention, Attention)): - for attr in [*[f"{s}_proj_weight" for s in ["in", "q", "k", "v"]], "in_proj_bias", "bias_k", "bias_v"]: - tensor = getattr(l, attr) - if tensor is not None: - tensor.data = tensor.data.to(dtype) - - for name in ["text_projection", "proj"]: - if hasattr(l, name): - attr = getattr(l, name) - if attr is not None: - attr.data = attr.data.to(dtype) - - model.apply(_convert_weights) - - -convert_weights_to_fp16 = convert_weights_to_lp # backwards compat - - -# used to maintain checkpoint compatibility -def convert_to_custom_text_state_dict(state_dict: dict): - if 'text_projection' in state_dict: - # old format state_dict, move text tower -> .text - new_state_dict = {} - for k, v in state_dict.items(): - if any(k.startswith(p) for p in ( - 'text_projection', - 'positional_embedding', - 'token_embedding', - 'transformer', - 'ln_final', - )): - k = 'text.' + k - new_state_dict[k] = v - return new_state_dict - return state_dict - - -def build_model_from_openai_state_dict( - state_dict: dict, - quick_gelu=True, - cast_dtype=torch.float16, -): - vit = "visual.proj" in state_dict - - if vit: - vision_width = state_dict["visual.conv1.weight"].shape[0] - vision_layers = len( - [k for k in state_dict.keys() if k.startswith("visual.") and k.endswith(".attn.in_proj_weight")]) - vision_patch_size = state_dict["visual.conv1.weight"].shape[-1] - grid_size = round((state_dict["visual.positional_embedding"].shape[0] - 1) ** 0.5) - image_size = vision_patch_size * grid_size - else: - counts: list = [ - len(set(k.split(".")[2] for k in state_dict if k.startswith(f"visual.layer{b}"))) for b in [1, 2, 3, 4]] - vision_layers = tuple(counts) - vision_width = state_dict["visual.layer1.0.conv1.weight"].shape[0] - output_width = round((state_dict["visual.attnpool.positional_embedding"].shape[0] - 1) ** 0.5) - vision_patch_size = None - assert output_width ** 2 + 1 == state_dict["visual.attnpool.positional_embedding"].shape[0] - image_size = output_width * 32 - - embed_dim = state_dict["text_projection"].shape[1] - context_length = state_dict["positional_embedding"].shape[0] - vocab_size = state_dict["token_embedding.weight"].shape[0] - transformer_width = state_dict["ln_final.weight"].shape[0] - transformer_heads = transformer_width // 64 - transformer_layers = len(set(k.split(".")[2] for k in state_dict if k.startswith(f"transformer.resblocks"))) - - vision_cfg = CLIPVisionCfg( - layers=vision_layers, - width=vision_width, - patch_size=vision_patch_size, - image_size=image_size, - ) - text_cfg = CLIPTextCfg( - context_length=context_length, - vocab_size=vocab_size, - width=transformer_width, - heads=transformer_heads, - layers=transformer_layers, - ) - model = CLIP( - embed_dim, - vision_cfg=vision_cfg, - text_cfg=text_cfg, - quick_gelu=quick_gelu, # OpenAI models were trained with QuickGELU - cast_dtype=cast_dtype, - ) - - for key in ["input_resolution", "context_length", "vocab_size"]: - state_dict.pop(key, None) - - convert_weights_to_fp16(model) # OpenAI state dicts are partially converted to float16 - model.load_state_dict(state_dict) - return model.eval() - - -def trace_model(model, batch_size=256, device=torch.device('cpu')): - model.eval() - image_size = model.visual.image_size - example_images = torch.ones((batch_size, 3, image_size, image_size), device=device) - example_text = torch.zeros((batch_size, model.context_length), dtype=torch.int, device=device) - model = torch.jit.trace_module( - model, - inputs=dict( - forward=(example_images, example_text), - encode_text=(example_text,), - encode_image=(example_images,) - )) - model.visual.image_size = image_size - return model - - -def resize_pos_embed(state_dict, model, interpolation: str = 'bicubic', antialias: bool = True): - # Rescale the grid of position embeddings when loading from state_dict - old_pos_embed = state_dict.get('visual.positional_embedding', None) - if old_pos_embed is None or not hasattr(model.visual, 'grid_size'): - return - grid_size = to_2tuple(model.visual.grid_size) - extra_tokens = 1 # FIXME detect different token configs (ie no class token, or more) - new_seq_len = grid_size[0] * grid_size[1] + extra_tokens - if new_seq_len == old_pos_embed.shape[0]: - return - - if extra_tokens: - pos_emb_tok, pos_emb_img = old_pos_embed[:extra_tokens], old_pos_embed[extra_tokens:] - else: - pos_emb_tok, pos_emb_img = None, old_pos_embed - old_grid_size = to_2tuple(int(math.sqrt(len(pos_emb_img)))) - - logging.info('Resizing position embedding grid-size from %s to %s', old_grid_size, grid_size) - pos_emb_img = pos_emb_img.reshape(1, old_grid_size[0], old_grid_size[1], -1).permute(0, 3, 1, 2) - pos_emb_img = F.interpolate( - pos_emb_img, - size=grid_size, - mode=interpolation, - antialias=antialias, - align_corners=False, - ) - pos_emb_img = pos_emb_img.permute(0, 2, 3, 1).reshape(1, grid_size[0] * grid_size[1], -1)[0] - if pos_emb_tok is not None: - new_pos_embed = torch.cat([pos_emb_tok, pos_emb_img], dim=0) - else: - new_pos_embed = pos_emb_img - state_dict['visual.positional_embedding'] = new_pos_embed diff --git a/diffsynth/extensions/ImageQualityMetric/open_clip/model_configs/ViT-H-14.json b/diffsynth/extensions/ImageQualityMetric/open_clip/model_configs/ViT-H-14.json deleted file mode 100644 index 3e3a7e9..0000000 --- a/diffsynth/extensions/ImageQualityMetric/open_clip/model_configs/ViT-H-14.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "embed_dim": 1024, - "vision_cfg": { - "image_size": 224, - "layers": 32, - "width": 1280, - "head_width": 80, - "patch_size": 14 - }, - "text_cfg": { - "context_length": 77, - "vocab_size": 49408, - "width": 1024, - "heads": 16, - "layers": 24 - } -} \ No newline at end of file diff --git a/diffsynth/extensions/ImageQualityMetric/open_clip/modified_resnet.py b/diffsynth/extensions/ImageQualityMetric/open_clip/modified_resnet.py deleted file mode 100644 index 6a8d3ae..0000000 --- a/diffsynth/extensions/ImageQualityMetric/open_clip/modified_resnet.py +++ /dev/null @@ -1,181 +0,0 @@ -from collections import OrderedDict - -import torch -from torch import nn -from torch.nn import functional as F - -from .utils import freeze_batch_norm_2d - - -class Bottleneck(nn.Module): - expansion = 4 - - def __init__(self, inplanes, planes, stride=1): - super().__init__() - - # all conv layers have stride 1. an avgpool is performed after the second convolution when stride > 1 - self.conv1 = nn.Conv2d(inplanes, planes, 1, bias=False) - self.bn1 = nn.BatchNorm2d(planes) - self.act1 = nn.ReLU(inplace=True) - - self.conv2 = nn.Conv2d(planes, planes, 3, padding=1, bias=False) - self.bn2 = nn.BatchNorm2d(planes) - self.act2 = nn.ReLU(inplace=True) - - self.avgpool = nn.AvgPool2d(stride) if stride > 1 else nn.Identity() - - self.conv3 = nn.Conv2d(planes, planes * self.expansion, 1, bias=False) - self.bn3 = nn.BatchNorm2d(planes * self.expansion) - self.act3 = nn.ReLU(inplace=True) - - self.downsample = None - self.stride = stride - - if stride > 1 or inplanes != planes * Bottleneck.expansion: - # downsampling layer is prepended with an avgpool, and the subsequent convolution has stride 1 - self.downsample = nn.Sequential(OrderedDict([ - ("-1", nn.AvgPool2d(stride)), - ("0", nn.Conv2d(inplanes, planes * self.expansion, 1, stride=1, bias=False)), - ("1", nn.BatchNorm2d(planes * self.expansion)) - ])) - - def forward(self, x: torch.Tensor): - identity = x - - out = self.act1(self.bn1(self.conv1(x))) - out = self.act2(self.bn2(self.conv2(out))) - out = self.avgpool(out) - out = self.bn3(self.conv3(out)) - - if self.downsample is not None: - identity = self.downsample(x) - - out += identity - out = self.act3(out) - return out - - -class AttentionPool2d(nn.Module): - def __init__(self, spacial_dim: int, embed_dim: int, num_heads: int, output_dim: int = None): - super().__init__() - self.positional_embedding = nn.Parameter(torch.randn(spacial_dim ** 2 + 1, embed_dim) / embed_dim ** 0.5) - self.k_proj = nn.Linear(embed_dim, embed_dim) - self.q_proj = nn.Linear(embed_dim, embed_dim) - self.v_proj = nn.Linear(embed_dim, embed_dim) - self.c_proj = nn.Linear(embed_dim, output_dim or embed_dim) - self.num_heads = num_heads - - def forward(self, x): - x = x.reshape(x.shape[0], x.shape[1], x.shape[2] * x.shape[3]).permute(2, 0, 1) # NCHW -> (HW)NC - x = torch.cat([x.mean(dim=0, keepdim=True), x], dim=0) # (HW+1)NC - x = x + self.positional_embedding[:, None, :].to(x.dtype) # (HW+1)NC - x, _ = F.multi_head_attention_forward( - query=x, key=x, value=x, - embed_dim_to_check=x.shape[-1], - num_heads=self.num_heads, - q_proj_weight=self.q_proj.weight, - k_proj_weight=self.k_proj.weight, - v_proj_weight=self.v_proj.weight, - in_proj_weight=None, - in_proj_bias=torch.cat([self.q_proj.bias, self.k_proj.bias, self.v_proj.bias]), - bias_k=None, - bias_v=None, - add_zero_attn=False, - dropout_p=0., - out_proj_weight=self.c_proj.weight, - out_proj_bias=self.c_proj.bias, - use_separate_proj_weight=True, - training=self.training, - need_weights=False - ) - - return x[0] - - -class ModifiedResNet(nn.Module): - """ - A ResNet class that is similar to torchvision's but contains the following changes: - - There are now 3 "stem" convolutions as opposed to 1, with an average pool instead of a max pool. - - Performs anti-aliasing strided convolutions, where an avgpool is prepended to convolutions with stride > 1 - - The final pooling layer is a QKV attention instead of an average pool - """ - - def __init__(self, layers, output_dim, heads, image_size=224, width=64): - super().__init__() - self.output_dim = output_dim - self.image_size = image_size - - # the 3-layer stem - self.conv1 = nn.Conv2d(3, width // 2, kernel_size=3, stride=2, padding=1, bias=False) - self.bn1 = nn.BatchNorm2d(width // 2) - self.act1 = nn.ReLU(inplace=True) - self.conv2 = nn.Conv2d(width // 2, width // 2, kernel_size=3, padding=1, bias=False) - self.bn2 = nn.BatchNorm2d(width // 2) - self.act2 = nn.ReLU(inplace=True) - self.conv3 = nn.Conv2d(width // 2, width, kernel_size=3, padding=1, bias=False) - self.bn3 = nn.BatchNorm2d(width) - self.act3 = nn.ReLU(inplace=True) - self.avgpool = nn.AvgPool2d(2) - - # residual layers - self._inplanes = width # this is a *mutable* variable used during construction - self.layer1 = self._make_layer(width, layers[0]) - self.layer2 = self._make_layer(width * 2, layers[1], stride=2) - self.layer3 = self._make_layer(width * 4, layers[2], stride=2) - self.layer4 = self._make_layer(width * 8, layers[3], stride=2) - - embed_dim = width * 32 # the ResNet feature dimension - self.attnpool = AttentionPool2d(image_size // 32, embed_dim, heads, output_dim) - - self.init_parameters() - - def _make_layer(self, planes, blocks, stride=1): - layers = [Bottleneck(self._inplanes, planes, stride)] - - self._inplanes = planes * Bottleneck.expansion - for _ in range(1, blocks): - layers.append(Bottleneck(self._inplanes, planes)) - - return nn.Sequential(*layers) - - def init_parameters(self): - if self.attnpool is not None: - std = self.attnpool.c_proj.in_features ** -0.5 - nn.init.normal_(self.attnpool.q_proj.weight, std=std) - nn.init.normal_(self.attnpool.k_proj.weight, std=std) - nn.init.normal_(self.attnpool.v_proj.weight, std=std) - nn.init.normal_(self.attnpool.c_proj.weight, std=std) - - for resnet_block in [self.layer1, self.layer2, self.layer3, self.layer4]: - for name, param in resnet_block.named_parameters(): - if name.endswith("bn3.weight"): - nn.init.zeros_(param) - - def lock(self, unlocked_groups=0, freeze_bn_stats=False): - assert unlocked_groups == 0, 'partial locking not currently supported for this model' - for param in self.parameters(): - param.requires_grad = False - if freeze_bn_stats: - freeze_batch_norm_2d(self) - - @torch.jit.ignore - def set_grad_checkpointing(self, enable=True): - # FIXME support for non-transformer - pass - - def stem(self, x): - x = self.act1(self.bn1(self.conv1(x))) - x = self.act2(self.bn2(self.conv2(x))) - x = self.act3(self.bn3(self.conv3(x))) - x = self.avgpool(x) - return x - - def forward(self, x): - x = self.stem(x) - x = self.layer1(x) - x = self.layer2(x) - x = self.layer3(x) - x = self.layer4(x) - x = self.attnpool(x) - - return x diff --git a/diffsynth/extensions/ImageQualityMetric/open_clip/openai.py b/diffsynth/extensions/ImageQualityMetric/open_clip/openai.py deleted file mode 100644 index cc4e13e..0000000 --- a/diffsynth/extensions/ImageQualityMetric/open_clip/openai.py +++ /dev/null @@ -1,144 +0,0 @@ -""" OpenAI pretrained model functions - -Adapted from https://github.com/openai/CLIP. Originally MIT License, Copyright (c) 2021 OpenAI. -""" - -import os -import warnings -from typing import List, Optional, Union - -import torch - -from .model import build_model_from_openai_state_dict, convert_weights_to_lp, get_cast_dtype -from .pretrained import get_pretrained_url, list_pretrained_models_by_tag, download_pretrained_from_url - -__all__ = ["list_openai_models", "load_openai_model"] - - -def list_openai_models() -> List[str]: - """Returns the names of available CLIP models""" - return list_pretrained_models_by_tag('openai') - - -def load_openai_model( - name: str, - precision: Optional[str] = None, - device: Optional[Union[str, torch.device]] = None, - jit: bool = True, - cache_dir: Optional[str] = None, -): - """Load a CLIP model - - Parameters - ---------- - name : str - A model name listed by `clip.available_models()`, or the path to a model checkpoint containing the state_dict - precision: str - Model precision, if None defaults to 'fp32' if device == 'cpu' else 'fp16'. - device : Union[str, torch.device] - The device to put the loaded model - jit : bool - Whether to load the optimized JIT model (default) or more hackable non-JIT model. - cache_dir : Optional[str] - The directory to cache the downloaded model weights - - Returns - ------- - model : torch.nn.Module - The CLIP model - preprocess : Callable[[PIL.Image], torch.Tensor] - A torchvision transform that converts a PIL image into a tensor that the returned model can take as its input - """ - if device is None: - device = "cuda" if torch.cuda.is_available() else "cpu" - if precision is None: - precision = 'fp32' if device == 'cpu' else 'fp16' - - if get_pretrained_url(name, 'openai'): - model_path = download_pretrained_from_url(get_pretrained_url(name, 'openai'), cache_dir=cache_dir) - elif os.path.isfile(name): - model_path = name - else: - raise RuntimeError(f"Model {name} not found; available models = {list_openai_models()}") - - try: - # loading JIT archive - model = torch.jit.load(model_path, map_location=device if jit else "cpu").eval() - state_dict = None - except RuntimeError: - # loading saved state dict - if jit: - warnings.warn(f"File {model_path} is not a JIT archive. Loading as a state dict instead") - jit = False - state_dict = torch.load(model_path, map_location="cpu") - - if not jit: - # Build a non-jit model from the OpenAI jitted model state dict - cast_dtype = get_cast_dtype(precision) - try: - model = build_model_from_openai_state_dict(state_dict or model.state_dict(), cast_dtype=cast_dtype) - except KeyError: - sd = {k[7:]: v for k, v in state_dict["state_dict"].items()} - model = build_model_from_openai_state_dict(sd, cast_dtype=cast_dtype) - - # model from OpenAI state dict is in manually cast fp16 mode, must be converted for AMP/fp32/bf16 use - model = model.to(device) - if precision.startswith('amp') or precision == 'fp32': - model.float() - elif precision == 'bf16': - convert_weights_to_lp(model, dtype=torch.bfloat16) - - return model - - # patch the device names - device_holder = torch.jit.trace(lambda: torch.ones([]).to(torch.device(device)), example_inputs=[]) - device_node = [n for n in device_holder.graph.findAllNodes("prim::Constant") if "Device" in repr(n)][-1] - - def patch_device(module): - try: - graphs = [module.graph] if hasattr(module, "graph") else [] - except RuntimeError: - graphs = [] - - if hasattr(module, "forward1"): - graphs.append(module.forward1.graph) - - for graph in graphs: - for node in graph.findAllNodes("prim::Constant"): - if "value" in node.attributeNames() and str(node["value"]).startswith("cuda"): - node.copyAttributes(device_node) - - model.apply(patch_device) - patch_device(model.encode_image) - patch_device(model.encode_text) - - # patch dtype to float32 (typically for CPU) - if precision == 'fp32': - float_holder = torch.jit.trace(lambda: torch.ones([]).float(), example_inputs=[]) - float_input = list(float_holder.graph.findNode("aten::to").inputs())[1] - float_node = float_input.node() - - def patch_float(module): - try: - graphs = [module.graph] if hasattr(module, "graph") else [] - except RuntimeError: - graphs = [] - - if hasattr(module, "forward1"): - graphs.append(module.forward1.graph) - - for graph in graphs: - for node in graph.findAllNodes("aten::to"): - inputs = list(node.inputs()) - for i in [1, 2]: # dtype can be the second or third argument to aten::to() - if inputs[i].node()["value"] == 5: - inputs[i].node().copyAttributes(float_node) - - model.apply(patch_float) - patch_float(model.encode_image) - patch_float(model.encode_text) - model.float() - - # ensure image_size attr available at consistent location for both jit and non-jit - model.visual.image_size = model.input_resolution.item() - return model diff --git a/diffsynth/extensions/ImageQualityMetric/open_clip/pretrained.py b/diffsynth/extensions/ImageQualityMetric/open_clip/pretrained.py deleted file mode 100644 index 87e7e52..0000000 --- a/diffsynth/extensions/ImageQualityMetric/open_clip/pretrained.py +++ /dev/null @@ -1,376 +0,0 @@ -import hashlib -import os -import urllib -import warnings -from functools import partial -from typing import Dict, Union - -from tqdm import tqdm - -from .version import __version__ - -try: - from huggingface_hub import hf_hub_download - hf_hub_download = partial(hf_hub_download, library_name="open_clip", library_version=__version__) - _has_hf_hub = True -except ImportError: - hf_hub_download = None - _has_hf_hub = False - - -def _pcfg(url='', hf_hub='', mean=None, std=None): - return dict( - url=url, - hf_hub=hf_hub, - mean=mean, - std=std, - ) - - -_RN50 = dict( - openai=_pcfg( - "https://openaipublic.azureedge.net/clip/models/afeb0e10f9e5a86da6080e35cf09123aca3b358a0c3e3b6c78a7b63bc04b6762/RN50.pt"), - yfcc15m=_pcfg( - "https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn50-quickgelu-yfcc15m-455df137.pt"), - cc12m=_pcfg( - "https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn50-quickgelu-cc12m-f000538c.pt"), -) - -_RN50_quickgelu = dict( - openai=_pcfg( - "https://openaipublic.azureedge.net/clip/models/afeb0e10f9e5a86da6080e35cf09123aca3b358a0c3e3b6c78a7b63bc04b6762/RN50.pt"), - yfcc15m=_pcfg( - "https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn50-quickgelu-yfcc15m-455df137.pt"), - cc12m=_pcfg( - "https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn50-quickgelu-cc12m-f000538c.pt"), -) - -_RN101 = dict( - openai=_pcfg( - "https://openaipublic.azureedge.net/clip/models/8fa8567bab74a42d41c5915025a8e4538c3bdbe8804a470a72f30b0d94fab599/RN101.pt"), - yfcc15m=_pcfg( - "https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn101-quickgelu-yfcc15m-3e04b30e.pt"), -) - -_RN101_quickgelu = dict( - openai=_pcfg( - "https://openaipublic.azureedge.net/clip/models/8fa8567bab74a42d41c5915025a8e4538c3bdbe8804a470a72f30b0d94fab599/RN101.pt"), - yfcc15m=_pcfg( - "https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn101-quickgelu-yfcc15m-3e04b30e.pt"), -) - -_RN50x4 = dict( - openai=_pcfg( - "https://openaipublic.azureedge.net/clip/models/7e526bd135e493cef0776de27d5f42653e6b4c8bf9e0f653bb11773263205fdd/RN50x4.pt"), -) - -_RN50x16 = dict( - openai=_pcfg( - "https://openaipublic.azureedge.net/clip/models/52378b407f34354e150460fe41077663dd5b39c54cd0bfd2b27167a4a06ec9aa/RN50x16.pt"), -) - -_RN50x64 = dict( - openai=_pcfg( - "https://openaipublic.azureedge.net/clip/models/be1cfb55d75a9666199fb2206c106743da0f6468c9d327f3e0d0a543a9919d9c/RN50x64.pt"), -) - -_VITB32 = dict( - openai=_pcfg( - "https://openaipublic.azureedge.net/clip/models/40d365715913c9da98579312b702a82c18be219cc2a73407c4526f58eba950af/ViT-B-32.pt"), - laion400m_e31=_pcfg( - "https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_e31-d867053b.pt"), - laion400m_e32=_pcfg( - "https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_e32-46683a32.pt"), - laion2b_e16=_pcfg( - "https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-laion2b_e16-af8dbd0c.pth"), - laion2b_s34b_b79k=_pcfg(hf_hub='laion/CLIP-ViT-B-32-laion2B-s34B-b79K/') -) - -_VITB32_quickgelu = dict( - openai=_pcfg( - "https://openaipublic.azureedge.net/clip/models/40d365715913c9da98579312b702a82c18be219cc2a73407c4526f58eba950af/ViT-B-32.pt"), - laion400m_e31=_pcfg( - "https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_e31-d867053b.pt"), - laion400m_e32=_pcfg( - "https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_e32-46683a32.pt"), -) - -_VITB16 = dict( - openai=_pcfg( - "https://openaipublic.azureedge.net/clip/models/5806e77cd80f8b59890b7e101eabd078d9fb84e6937f9e85e4ecb61988df416f/ViT-B-16.pt"), - laion400m_e31=_pcfg( - "https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_16-laion400m_e31-00efa78f.pt"), - laion400m_e32=_pcfg( - "https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_16-laion400m_e32-55e67d44.pt"), - # laion400m_32k=_pcfg( - # url="", - # mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)), - # laion400m_64k=_pcfg( - # url="", - # mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)), - laion2b_s34b_b88k=_pcfg(hf_hub='laion/CLIP-ViT-B-16-laion2B-s34B-b88K/'), -) - -_VITB16_PLUS_240 = dict( - laion400m_e31=_pcfg( - "https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_16_plus_240-laion400m_e31-8fb26589.pt"), - laion400m_e32=_pcfg( - "https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_16_plus_240-laion400m_e32-699c4b84.pt"), -) - -_VITL14 = dict( - openai=_pcfg( - "https://openaipublic.azureedge.net/clip/models/b8cca3fd41ae0c99ba7e8951adf17d267cdb84cd88be6f7c2e0eca1737a03836/ViT-L-14.pt"), - laion400m_e31=_pcfg( - "https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_l_14-laion400m_e31-69988bb6.pt"), - laion400m_e32=_pcfg( - "https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_l_14-laion400m_e32-3d133497.pt"), - laion2b_s32b_b82k=_pcfg( - hf_hub='laion/CLIP-ViT-L-14-laion2B-s32B-b82K/', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)), -) - -_VITL14_336 = dict( - openai=_pcfg( - "https://openaipublic.azureedge.net/clip/models/3035c92b350959924f9f00213499208652fc7ea050643e8b385c2dac08641f02/ViT-L-14-336px.pt"), -) - -_VITH14 = dict( - laion2b_s32b_b79k=_pcfg(hf_hub='laion/CLIP-ViT-H-14-laion2B-s32B-b79K/'), -) - -_VITg14 = dict( - laion2b_s12b_b42k=_pcfg(hf_hub='laion/CLIP-ViT-g-14-laion2B-s12B-b42K/'), - laion2b_s34b_b88k=_pcfg(hf_hub='laion/CLIP-ViT-g-14-laion2B-s34B-b88K/'), -) - -_VITbigG14 = dict( - laion2b_s39b_b160k=_pcfg(hf_hub='laion/CLIP-ViT-bigG-14-laion2B-39B-b160k/'), -) - -_robertaViTB32 = dict( - laion2b_s12b_b32k=_pcfg(hf_hub='laion/CLIP-ViT-B-32-roberta-base-laion2B-s12B-b32k/'), -) - -_xlmRobertaBaseViTB32 = dict( - laion5b_s13b_b90k=_pcfg(hf_hub='laion/CLIP-ViT-B-32-xlm-roberta-base-laion5B-s13B-b90k/'), -) - -_xlmRobertaLargeFrozenViTH14 = dict( - frozen_laion5b_s13b_b90k=_pcfg(hf_hub='laion/CLIP-ViT-H-14-frozen-xlm-roberta-large-laion5B-s13B-b90k/'), -) - -_convnext_base = dict( - laion400m_s13b_b51k=_pcfg(hf_hub='laion/CLIP-convnext_base-laion400M-s13B-b51K/'), -) - -_convnext_base_w = dict( - laion2b_s13b_b82k=_pcfg(hf_hub='laion/CLIP-convnext_base_w-laion2B-s13B-b82K/'), - laion2b_s13b_b82k_augreg=_pcfg(hf_hub='laion/CLIP-convnext_base_w-laion2B-s13B-b82K-augreg/'), - laion_aesthetic_s13b_b82k=_pcfg(hf_hub='laion/CLIP-convnext_base_w-laion_aesthetic-s13B-b82K/'), -) - -_convnext_base_w_320 = dict( - laion_aesthetic_s13b_b82k=_pcfg(hf_hub='laion/CLIP-convnext_base_w_320-laion_aesthetic-s13B-b82K/'), - laion_aesthetic_s13b_b82k_augreg=_pcfg(hf_hub='laion/CLIP-convnext_base_w_320-laion_aesthetic-s13B-b82K-augreg/'), -) - -_convnext_large_d = dict( - laion2b_s26b_b102k_augreg=_pcfg(hf_hub='laion/CLIP-convnext_large_d.laion2B-s26B-b102K-augreg/'), -) - -_convnext_large_d_320 = dict( - laion2b_s29b_b131k_ft=_pcfg(hf_hub='laion/CLIP-convnext_large_d_320.laion2B-s29B-b131K-ft/'), - laion2b_s29b_b131k_ft_soup=_pcfg(hf_hub='laion/CLIP-convnext_large_d_320.laion2B-s29B-b131K-ft-soup/'), -) - -_convnext_xxlarge = dict( - laion2b_s34b_b82k_augreg=_pcfg(hf_hub='laion/CLIP-convnext_xxlarge-laion2B-s34B-b82K-augreg/'), - laion2b_s34b_b82k_augreg_rewind=_pcfg(hf_hub='laion/CLIP-convnext_xxlarge-laion2B-s34B-b82K-augreg-rewind/'), - laion2b_s34b_b82k_augreg_soup=_pcfg(hf_hub='laion/CLIP-convnext_xxlarge-laion2B-s34B-b82K-augreg-soup/'), -) - -_coca_VITB32 = dict( - laion2b_s13b_b90k=_pcfg(hf_hub='laion/CoCa-ViT-B-32-laion2B-s13B-b90k/'), - mscoco_finetuned_laion2b_s13b_b90k=_pcfg(hf_hub='laion/mscoco_finetuned_CoCa-ViT-B-32-laion2B-s13B-b90k/') -) - -_coca_VITL14 = dict( - laion2b_s13b_b90k=_pcfg(hf_hub='laion/CoCa-ViT-L-14-laion2B-s13B-b90k/'), - mscoco_finetuned_laion2b_s13b_b90k=_pcfg(hf_hub='laion/mscoco_finetuned_CoCa-ViT-L-14-laion2B-s13B-b90k/') -) - - -_PRETRAINED = { - "RN50": _RN50, - "RN50-quickgelu": _RN50_quickgelu, - "RN101": _RN101, - "RN101-quickgelu": _RN101_quickgelu, - "RN50x4": _RN50x4, - "RN50x16": _RN50x16, - "RN50x64": _RN50x64, - "ViT-B-32": _VITB32, - "ViT-B-32-quickgelu": _VITB32_quickgelu, - "ViT-B-16": _VITB16, - "ViT-B-16-plus-240": _VITB16_PLUS_240, - "ViT-L-14": _VITL14, - "ViT-L-14-336": _VITL14_336, - "ViT-H-14": _VITH14, - "ViT-g-14": _VITg14, - "ViT-bigG-14": _VITbigG14, - "roberta-ViT-B-32": _robertaViTB32, - "xlm-roberta-base-ViT-B-32": _xlmRobertaBaseViTB32, - "xlm-roberta-large-ViT-H-14": _xlmRobertaLargeFrozenViTH14, - "convnext_base": _convnext_base, - "convnext_base_w": _convnext_base_w, - "convnext_base_w_320": _convnext_base_w_320, - "convnext_large_d": _convnext_large_d, - "convnext_large_d_320": _convnext_large_d_320, - "convnext_xxlarge": _convnext_xxlarge, - "coca_ViT-B-32": _coca_VITB32, - "coca_ViT-L-14": _coca_VITL14, -} - - -def _clean_tag(tag: str): - # normalize pretrained tags - return tag.lower().replace('-', '_') - - -def list_pretrained(as_str: bool = False): - """ returns list of pretrained models - Returns a tuple (model_name, pretrain_tag) by default or 'name:tag' if as_str == True - """ - return [':'.join([k, t]) if as_str else (k, t) for k in _PRETRAINED.keys() for t in _PRETRAINED[k].keys()] - - -def list_pretrained_models_by_tag(tag: str): - """ return all models having the specified pretrain tag """ - models = [] - tag = _clean_tag(tag) - for k in _PRETRAINED.keys(): - if tag in _PRETRAINED[k]: - models.append(k) - return models - - -def list_pretrained_tags_by_model(model: str): - """ return all pretrain tags for the specified model architecture """ - tags = [] - if model in _PRETRAINED: - tags.extend(_PRETRAINED[model].keys()) - return tags - - -def is_pretrained_cfg(model: str, tag: str): - if model not in _PRETRAINED: - return False - return _clean_tag(tag) in _PRETRAINED[model] - - -def get_pretrained_cfg(model: str, tag: str): - if model not in _PRETRAINED: - return {} - model_pretrained = _PRETRAINED[model] - return model_pretrained.get(_clean_tag(tag), {}) - - -def get_pretrained_url(model: str, tag: str): - cfg = get_pretrained_cfg(model, _clean_tag(tag)) - return cfg.get('url', '') - - -def download_pretrained_from_url( - url: str, - cache_dir: Union[str, None] = None, -): - if not cache_dir: - cache_dir = os.path.expanduser("~/.cache/clip") - os.makedirs(cache_dir, exist_ok=True) - filename = os.path.basename(url) - - if 'openaipublic' in url: - expected_sha256 = url.split("/")[-2] - elif 'mlfoundations' in url: - expected_sha256 = os.path.splitext(filename)[0].split("-")[-1] - else: - expected_sha256 = '' - - download_target = os.path.join(cache_dir, filename) - - if os.path.exists(download_target) and not os.path.isfile(download_target): - raise RuntimeError(f"{download_target} exists and is not a regular file") - - if os.path.isfile(download_target): - if expected_sha256: - if hashlib.sha256(open(download_target, "rb").read()).hexdigest().startswith(expected_sha256): - return download_target - else: - warnings.warn(f"{download_target} exists, but the SHA256 checksum does not match; re-downloading the file") - else: - return download_target - - with urllib.request.urlopen(url) as source, open(download_target, "wb") as output: - with tqdm(total=int(source.headers.get("Content-Length")), ncols=80, unit='iB', unit_scale=True) as loop: - while True: - buffer = source.read(8192) - if not buffer: - break - - output.write(buffer) - loop.update(len(buffer)) - - if expected_sha256 and not hashlib.sha256(open(download_target, "rb").read()).hexdigest().startswith(expected_sha256): - raise RuntimeError(f"Model has been downloaded but the SHA256 checksum does not not match") - - return download_target - - -def has_hf_hub(necessary=False): - if not _has_hf_hub and necessary: - # if no HF Hub module installed, and it is necessary to continue, raise error - raise RuntimeError( - 'Hugging Face hub model specified but package not installed. Run `pip install huggingface_hub`.') - return _has_hf_hub - - -def download_pretrained_from_hf( - model_id: str, - filename: str = 'open_clip_pytorch_model.bin', - revision=None, - cache_dir: Union[str, None] = None, -): - has_hf_hub(True) - cached_file = hf_hub_download(model_id, filename, revision=revision, cache_dir=cache_dir) - return cached_file - - -def download_pretrained( - cfg: Dict, - force_hf_hub: bool = False, - cache_dir: Union[str, None] = None, -): - target = '' - if not cfg: - return target - - download_url = cfg.get('url', '') - download_hf_hub = cfg.get('hf_hub', '') - if download_hf_hub and force_hf_hub: - # use HF hub even if url exists - download_url = '' - - if download_url: - target = download_pretrained_from_url(download_url, cache_dir=cache_dir) - elif download_hf_hub: - has_hf_hub(True) - # we assume the hf_hub entries in pretrained config combine model_id + filename in - # 'org/model_name/filename.pt' form. To specify just the model id w/o filename and - # use 'open_clip_pytorch_model.bin' default, there must be a trailing slash 'org/model_name/'. - model_id, filename = os.path.split(download_hf_hub) - if filename: - target = download_pretrained_from_hf(model_id, filename=filename, cache_dir=cache_dir) - else: - target = download_pretrained_from_hf(model_id, cache_dir=cache_dir) - - return target diff --git a/diffsynth/extensions/ImageQualityMetric/open_clip/push_to_hf_hub.py b/diffsynth/extensions/ImageQualityMetric/open_clip/push_to_hf_hub.py deleted file mode 100644 index 23c0631..0000000 --- a/diffsynth/extensions/ImageQualityMetric/open_clip/push_to_hf_hub.py +++ /dev/null @@ -1,243 +0,0 @@ -import argparse -import json -from pathlib import Path -from tempfile import TemporaryDirectory -from typing import Optional, Tuple - -import torch - -try: - from huggingface_hub import ( - create_repo, - get_hf_file_metadata, - hf_hub_download, - hf_hub_url, - repo_type_and_id_from_hf_id, - upload_folder, - ) - from huggingface_hub.utils import EntryNotFoundError - _has_hf_hub = True -except ImportError: - _has_hf_hub = False - -from .factory import create_model_from_pretrained, get_model_config, get_tokenizer -from .tokenizer import HFTokenizer - - -def save_config_for_hf( - model, - config_path: str, - model_config: Optional[dict] -): - preprocess_cfg = { - 'mean': model.visual.image_mean, - 'std': model.visual.image_std, - } - hf_config = { - 'model_cfg': model_config, - 'preprocess_cfg': preprocess_cfg, - } - - with config_path.open('w') as f: - json.dump(hf_config, f, indent=2) - - -def save_for_hf( - model, - tokenizer: HFTokenizer, - model_config: dict, - save_directory: str, - weights_filename='open_clip_pytorch_model.bin', - config_filename='open_clip_config.json', -): - save_directory = Path(save_directory) - save_directory.mkdir(exist_ok=True, parents=True) - - weights_path = save_directory / weights_filename - torch.save(model.state_dict(), weights_path) - - tokenizer.save_pretrained(save_directory) - - config_path = save_directory / config_filename - save_config_for_hf(model, config_path, model_config=model_config) - - -def push_to_hf_hub( - model, - tokenizer, - model_config: Optional[dict], - repo_id: str, - commit_message: str = 'Add model', - token: Optional[str] = None, - revision: Optional[str] = None, - private: bool = False, - create_pr: bool = False, - model_card: Optional[dict] = None, -): - if not isinstance(tokenizer, HFTokenizer): - # default CLIP tokenizers use https://huggingface.co/openai/clip-vit-large-patch14 - tokenizer = HFTokenizer('openai/clip-vit-large-patch14') - - # Create repo if it doesn't exist yet - repo_url = create_repo(repo_id, token=token, private=private, exist_ok=True) - - # Infer complete repo_id from repo_url - # Can be different from the input `repo_id` if repo_owner was implicit - _, repo_owner, repo_name = repo_type_and_id_from_hf_id(repo_url) - repo_id = f"{repo_owner}/{repo_name}" - - # Check if README file already exist in repo - try: - get_hf_file_metadata(hf_hub_url(repo_id=repo_id, filename="README.md", revision=revision)) - has_readme = True - except EntryNotFoundError: - has_readme = False - - # Dump model and push to Hub - with TemporaryDirectory() as tmpdir: - # Save model weights and config. - save_for_hf( - model, - tokenizer=tokenizer, - model_config=model_config, - save_directory=tmpdir, - ) - - # Add readme if it does not exist - if not has_readme: - model_card = model_card or {} - model_name = repo_id.split('/')[-1] - readme_path = Path(tmpdir) / "README.md" - readme_text = generate_readme(model_card, model_name) - readme_path.write_text(readme_text) - - # Upload model and return - return upload_folder( - repo_id=repo_id, - folder_path=tmpdir, - revision=revision, - create_pr=create_pr, - commit_message=commit_message, - ) - - -def push_pretrained_to_hf_hub( - model_name, - pretrained: str, - repo_id: str, - image_mean: Optional[Tuple[float, ...]] = None, - image_std: Optional[Tuple[float, ...]] = None, - commit_message: str = 'Add model', - token: Optional[str] = None, - revision: Optional[str] = None, - private: bool = False, - create_pr: bool = False, - model_card: Optional[dict] = None, -): - model, preprocess_eval = create_model_from_pretrained( - model_name, - pretrained=pretrained, - image_mean=image_mean, - image_std=image_std, - ) - - model_config = get_model_config(model_name) - assert model_config - - tokenizer = get_tokenizer(model_name) - - push_to_hf_hub( - model=model, - tokenizer=tokenizer, - model_config=model_config, - repo_id=repo_id, - commit_message=commit_message, - token=token, - revision=revision, - private=private, - create_pr=create_pr, - model_card=model_card, - ) - - -def generate_readme(model_card: dict, model_name: str): - readme_text = "---\n" - readme_text += "tags:\n- zero-shot-image-classification\n- clip\n" - readme_text += "library_tag: open_clip\n" - readme_text += f"license: {model_card.get('license', 'mit')}\n" - if 'details' in model_card and 'Dataset' in model_card['details']: - readme_text += 'datasets:\n' - readme_text += f"- {model_card['details']['Dataset'].lower()}\n" - readme_text += "---\n" - readme_text += f"# Model card for {model_name}\n" - if 'description' in model_card: - readme_text += f"\n{model_card['description']}\n" - if 'details' in model_card: - readme_text += f"\n## Model Details\n" - for k, v in model_card['details'].items(): - if isinstance(v, (list, tuple)): - readme_text += f"- **{k}:**\n" - for vi in v: - readme_text += f" - {vi}\n" - elif isinstance(v, dict): - readme_text += f"- **{k}:**\n" - for ki, vi in v.items(): - readme_text += f" - {ki}: {vi}\n" - else: - readme_text += f"- **{k}:** {v}\n" - if 'usage' in model_card: - readme_text += f"\n## Model Usage\n" - readme_text += model_card['usage'] - readme_text += '\n' - - if 'comparison' in model_card: - readme_text += f"\n## Model Comparison\n" - readme_text += model_card['comparison'] - readme_text += '\n' - - if 'citation' in model_card: - readme_text += f"\n## Citation\n" - if not isinstance(model_card['citation'], (list, tuple)): - citations = [model_card['citation']] - else: - citations = model_card['citation'] - for c in citations: - readme_text += f"```bibtex\n{c}\n```\n" - - return readme_text - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Push to Hugging Face Hub") - parser.add_argument( - "--model", type=str, help="Name of the model to use.", - ) - parser.add_argument( - "--pretrained", type=str, - help="Use a pretrained CLIP model weights with the specified tag or file path.", - ) - parser.add_argument( - "--repo-id", type=str, - help="Destination HF Hub repo-id ie 'organization/model_id'.", - ) - parser.add_argument( - '--image-mean', type=float, nargs='+', default=None, metavar='MEAN', - help='Override default image mean value of dataset') - parser.add_argument( - '--image-std', type=float, nargs='+', default=None, metavar='STD', - help='Override default image std deviation of of dataset') - args = parser.parse_args() - - print(f'Saving model {args.model} with pretrained weights {args.pretrained} to Hugging Face Hub at {args.repo_id}') - - # FIXME add support to pass model_card json / template from file via cmd line - - push_pretrained_to_hf_hub( - args.model, - args.pretrained, - args.repo_id, - image_mean=args.image_mean, # override image mean/std if trained w/ non defaults - image_std=args.image_std, - ) - - print(f'{args.model} saved.') diff --git a/diffsynth/extensions/ImageQualityMetric/open_clip/timm_model.py b/diffsynth/extensions/ImageQualityMetric/open_clip/timm_model.py deleted file mode 100644 index dc71a69..0000000 --- a/diffsynth/extensions/ImageQualityMetric/open_clip/timm_model.py +++ /dev/null @@ -1,127 +0,0 @@ -""" timm model adapter - -Wraps timm (https://github.com/rwightman/pytorch-image-models) models for use as a vision tower in CLIP model. -""" -import logging -from collections import OrderedDict - -import torch -import torch.nn as nn - -try: - import timm - from timm.models.layers import Mlp, to_2tuple - try: - # old timm imports < 0.8.1 - from timm.models.layers.attention_pool2d import RotAttentionPool2d - from timm.models.layers.attention_pool2d import AttentionPool2d as AbsAttentionPool2d - except ImportError: - # new timm imports >= 0.8.1 - from timm.layers import RotAttentionPool2d - from timm.layers import AttentionPool2d as AbsAttentionPool2d -except ImportError: - timm = None - -from .utils import freeze_batch_norm_2d - - -class TimmModel(nn.Module): - """ timm model adapter - # FIXME this adapter is a work in progress, may change in ways that break weight compat - """ - - def __init__( - self, - model_name, - embed_dim, - image_size=224, - pool='avg', - proj='linear', - proj_bias=False, - drop=0., - drop_path=None, - pretrained=False, - ): - super().__init__() - if timm is None: - raise RuntimeError("Please `pip install timm` to use timm models.") - - self.image_size = to_2tuple(image_size) - timm_kwargs = {} - if drop_path is not None: - timm_kwargs['drop_path_rate'] = drop_path - self.trunk = timm.create_model(model_name, pretrained=pretrained, **timm_kwargs) - feat_size = self.trunk.default_cfg.get('pool_size', None) - feature_ndim = 1 if not feat_size else 2 - if pool in ('abs_attn', 'rot_attn'): - assert feature_ndim == 2 - # if attn pooling used, remove both classifier and default pool - self.trunk.reset_classifier(0, global_pool='') - else: - # reset global pool if pool config set, otherwise leave as network default - reset_kwargs = dict(global_pool=pool) if pool else {} - self.trunk.reset_classifier(0, **reset_kwargs) - prev_chs = self.trunk.num_features - - head_layers = OrderedDict() - if pool == 'abs_attn': - head_layers['pool'] = AbsAttentionPool2d(prev_chs, feat_size=feat_size, out_features=embed_dim) - prev_chs = embed_dim - elif pool == 'rot_attn': - head_layers['pool'] = RotAttentionPool2d(prev_chs, out_features=embed_dim) - prev_chs = embed_dim - else: - assert proj, 'projection layer needed if non-attention pooling is used.' - - # NOTE attention pool ends with a projection layer, so proj should usually be set to '' if such pooling is used - if proj == 'linear': - head_layers['drop'] = nn.Dropout(drop) - head_layers['proj'] = nn.Linear(prev_chs, embed_dim, bias=proj_bias) - elif proj == 'mlp': - head_layers['mlp'] = Mlp(prev_chs, 2 * embed_dim, embed_dim, drop=(drop, 0), bias=(True, proj_bias)) - - self.head = nn.Sequential(head_layers) - - def lock(self, unlocked_groups=0, freeze_bn_stats=False): - """ lock modules - Args: - unlocked_groups (int): leave last n layer groups unlocked (default: 0) - """ - if not unlocked_groups: - # lock full model - for param in self.trunk.parameters(): - param.requires_grad = False - if freeze_bn_stats: - freeze_batch_norm_2d(self.trunk) - else: - # NOTE: partial freeze requires latest timm (master) branch and is subject to change - try: - # FIXME import here until API stable and in an official release - from timm.models.helpers import group_parameters, group_modules - except ImportError: - raise RuntimeError( - 'Please install latest timm `pip install git+https://github.com/rwightman/pytorch-image-models`') - matcher = self.trunk.group_matcher() - gparams = group_parameters(self.trunk, matcher) - max_layer_id = max(gparams.keys()) - max_layer_id = max_layer_id - unlocked_groups - for group_idx in range(max_layer_id + 1): - group = gparams[group_idx] - for param in group: - self.trunk.get_parameter(param).requires_grad = False - if freeze_bn_stats: - gmodules = group_modules(self.trunk, matcher, reverse=True) - gmodules = {k for k, v in gmodules.items() if v <= max_layer_id} - freeze_batch_norm_2d(self.trunk, gmodules) - - @torch.jit.ignore - def set_grad_checkpointing(self, enable=True): - try: - self.trunk.set_grad_checkpointing(enable) - except Exception as e: - logging.warning('grad checkpointing not supported for this timm image tower, continuing without...') - - def forward(self, x): - x = self.trunk(x) - x = self.head(x) - return x diff --git a/diffsynth/extensions/ImageQualityMetric/open_clip/tokenizer.py b/diffsynth/extensions/ImageQualityMetric/open_clip/tokenizer.py deleted file mode 100644 index 22ec488..0000000 --- a/diffsynth/extensions/ImageQualityMetric/open_clip/tokenizer.py +++ /dev/null @@ -1,211 +0,0 @@ -""" CLIP tokenizer - -Copied from https://github.com/openai/CLIP. Originally MIT License, Copyright (c) 2021 OpenAI. -""" -import gzip -import html -import os -from functools import lru_cache -from typing import Union, List - -import ftfy -import regex as re -import torch - -# https://stackoverflow.com/q/62691279 -import os -os.environ["TOKENIZERS_PARALLELISM"] = "false" - - -@lru_cache() -def default_bpe(): - current_dir = os.path.dirname(os.path.abspath(__file__)) - project_root = os.path.abspath(os.path.join(current_dir, '../../../../')) - quality_metric_path = os.path.join(project_root, 'models', 'QualityMetric') - return os.path.join(quality_metric_path, "bpe_simple_vocab_16e6.txt.gz") - - -@lru_cache() -def bytes_to_unicode(): - """ - Returns list of utf-8 byte and a corresponding list of unicode strings. - The reversible bpe codes work on unicode strings. - This means you need a large # of unicode characters in your vocab if you want to avoid UNKs. - When you're at something like a 10B token dataset you end up needing around 5K for decent coverage. - This is a significant percentage of your normal, say, 32K bpe vocab. - To avoid that, we want lookup tables between utf-8 bytes and unicode strings. - And avoids mapping to whitespace/control characters the bpe code barfs on. - """ - bs = list(range(ord("!"), ord("~")+1))+list(range(ord("¡"), ord("¬")+1))+list(range(ord("®"), ord("ÿ")+1)) - cs = bs[:] - n = 0 - for b in range(2**8): - if b not in bs: - bs.append(b) - cs.append(2**8+n) - n += 1 - cs = [chr(n) for n in cs] - return dict(zip(bs, cs)) - - -def get_pairs(word): - """Return set of symbol pairs in a word. - Word is represented as tuple of symbols (symbols being variable-length strings). - """ - pairs = set() - prev_char = word[0] - for char in word[1:]: - pairs.add((prev_char, char)) - prev_char = char - return pairs - - -def basic_clean(text): - text = ftfy.fix_text(text) - text = html.unescape(html.unescape(text)) - return text.strip() - - -def whitespace_clean(text): - text = re.sub(r'\s+', ' ', text) - text = text.strip() - return text - - -class SimpleTokenizer(object): - def __init__(self, bpe_path: str = default_bpe(), special_tokens=None): - self.byte_encoder = bytes_to_unicode() - self.byte_decoder = {v: k for k, v in self.byte_encoder.items()} - merges = gzip.open(bpe_path).read().decode("utf-8").split('\n') - merges = merges[1:49152-256-2+1] - merges = [tuple(merge.split()) for merge in merges] - vocab = list(bytes_to_unicode().values()) - vocab = vocab + [v+'' for v in vocab] - for merge in merges: - vocab.append(''.join(merge)) - if not special_tokens: - special_tokens = ['', ''] - else: - special_tokens = ['', ''] + special_tokens - vocab.extend(special_tokens) - self.encoder = dict(zip(vocab, range(len(vocab)))) - self.decoder = {v: k for k, v in self.encoder.items()} - self.bpe_ranks = dict(zip(merges, range(len(merges)))) - self.cache = {t:t for t in special_tokens} - special = "|".join(special_tokens) - self.pat = re.compile(special + r"""|'s|'t|'re|'ve|'m|'ll|'d|[\p{L}]+|[\p{N}]|[^\s\p{L}\p{N}]+""", re.IGNORECASE) - - self.vocab_size = len(self.encoder) - self.all_special_ids = [self.encoder[t] for t in special_tokens] - - def bpe(self, token): - if token in self.cache: - return self.cache[token] - word = tuple(token[:-1]) + ( token[-1] + '',) - pairs = get_pairs(word) - - if not pairs: - return token+'' - - while True: - bigram = min(pairs, key = lambda pair: self.bpe_ranks.get(pair, float('inf'))) - if bigram not in self.bpe_ranks: - break - first, second = bigram - new_word = [] - i = 0 - while i < len(word): - try: - j = word.index(first, i) - new_word.extend(word[i:j]) - i = j - except: - new_word.extend(word[i:]) - break - - if word[i] == first and i < len(word)-1 and word[i+1] == second: - new_word.append(first+second) - i += 2 - else: - new_word.append(word[i]) - i += 1 - new_word = tuple(new_word) - word = new_word - if len(word) == 1: - break - else: - pairs = get_pairs(word) - word = ' '.join(word) - self.cache[token] = word - return word - - def encode(self, text): - bpe_tokens = [] - text = whitespace_clean(basic_clean(text)).lower() - for token in re.findall(self.pat, text): - token = ''.join(self.byte_encoder[b] for b in token.encode('utf-8')) - bpe_tokens.extend(self.encoder[bpe_token] for bpe_token in self.bpe(token).split(' ')) - return bpe_tokens - - def decode(self, tokens): - text = ''.join([self.decoder[token] for token in tokens]) - text = bytearray([self.byte_decoder[c] for c in text]).decode('utf-8', errors="replace").replace('', ' ') - return text - - def __call__(self, texts: Union[str, List[str]], context_length: int = 77) -> torch.LongTensor: - """ - Returns the tokenized representation of given input string(s) - - Parameters - ---------- - texts : Union[str, List[str]] - An input string or a list of input strings to tokenize - context_length : int - The context length to use; all CLIP models use 77 as the context length - - Returns - ------- - A two-dimensional tensor containing the resulting tokens, shape = [number of input strings, context_length] - """ - if isinstance(texts, str): - texts = [texts] - - sot_token = self.encoder[""] - eot_token = self.encoder[""] - all_tokens = [[sot_token] + self.encode(text) + [eot_token] for text in texts] - result = torch.zeros(len(all_tokens), context_length, dtype=torch.long) - - for i, tokens in enumerate(all_tokens): - if len(tokens) > context_length: - tokens = tokens[:context_length] # Truncate - tokens[-1] = eot_token - result[i, :len(tokens)] = torch.tensor(tokens) - - return result - - - -class HFTokenizer: - """HuggingFace tokenizer wrapper""" - - def __init__(self, tokenizer_name: str): - from transformers import AutoTokenizer - self.tokenizer = AutoTokenizer.from_pretrained(tokenizer_name) - - def save_pretrained(self, dest): - self.tokenizer.save_pretrained(dest) - - def __call__(self, texts: Union[str, List[str]], context_length: int = 77) -> torch.Tensor: - # same cleaning as for default tokenizer, except lowercasing - # adding lower (for case-sensitive tokenizers) will make it more robust but less sensitive to nuance - if isinstance(texts, str): - texts = [texts] - texts = [whitespace_clean(basic_clean(text)) for text in texts] - input_ids = self.tokenizer( - texts, - return_tensors='pt', - max_length=context_length, - padding='max_length', - truncation=True, - ).input_ids - return input_ids diff --git a/diffsynth/extensions/ImageQualityMetric/open_clip/transform.py b/diffsynth/extensions/ImageQualityMetric/open_clip/transform.py deleted file mode 100644 index fe4e21f..0000000 --- a/diffsynth/extensions/ImageQualityMetric/open_clip/transform.py +++ /dev/null @@ -1,216 +0,0 @@ -import warnings -from dataclasses import dataclass, asdict -from typing import Any, Dict, Optional, Sequence, Tuple, Union - -import torch -import torch.nn as nn -import torchvision.transforms.functional as F -from functools import partial -from torchvision.transforms import Normalize, Compose, RandomResizedCrop, InterpolationMode, ToTensor, Resize, \ - CenterCrop - -from .constants import OPENAI_DATASET_MEAN, OPENAI_DATASET_STD - - -@dataclass -class AugmentationCfg: - scale: Tuple[float, float] = (0.9, 1.0) - ratio: Optional[Tuple[float, float]] = None - color_jitter: Optional[Union[float, Tuple[float, float, float]]] = None - interpolation: Optional[str] = None - re_prob: Optional[float] = None - re_count: Optional[int] = None - use_timm: bool = False - - -class ResizeMaxSize(nn.Module): - - def __init__(self, max_size, interpolation=InterpolationMode.BICUBIC, fn='max', fill=0): - super().__init__() - if not isinstance(max_size, int): - raise TypeError(f"Size should be int. Got {type(max_size)}") - self.max_size = max_size - self.interpolation = interpolation - self.fn = min if fn == 'min' else min - self.fill = fill - - def forward(self, img): - if isinstance(img, torch.Tensor): - height, width = img.shape[1:] - else: - width, height = img.size - scale = self.max_size / float(max(height, width)) - if scale != 1.0: - new_size = tuple(round(dim * scale) for dim in (height, width)) - img = F.resize(img, new_size, self.interpolation) - pad_h = self.max_size - new_size[0] - pad_w = self.max_size - new_size[1] - img = F.pad(img, padding=[pad_w//2, pad_h//2, pad_w - pad_w//2, pad_h - pad_h//2], fill=self.fill) - return img - - -def _convert_to_rgb_or_rgba(image): - if image.mode == 'RGBA': - return image - else: - return image.convert('RGB') - -# def transform_and_split(merged, transform_fn, normalize_fn): -# transformed = transform_fn(merged) -# crop_img, crop_label = torch.split(transformed, [3,1], dim=0) - -# # crop_img = _convert_to_rgb(crop_img) -# crop_img = normalize_fn(ToTensor()(crop_img)) -# return crop_img, crop_label - -class MaskAwareNormalize(nn.Module): - def __init__(self, mean, std): - super().__init__() - self.normalize = Normalize(mean=mean, std=std) - - def forward(self, tensor): - if tensor.shape[0] == 4: - return torch.cat([self.normalize(tensor[:3]), tensor[3:]], dim=0) - else: - return self.normalize(tensor) - -def image_transform( - image_size: int, - is_train: bool, - mean: Optional[Tuple[float, ...]] = None, - std: Optional[Tuple[float, ...]] = None, - resize_longest_max: bool = False, - fill_color: int = 0, - aug_cfg: Optional[Union[Dict[str, Any], AugmentationCfg]] = None, -): - mean = mean or OPENAI_DATASET_MEAN - if not isinstance(mean, (list, tuple)): - mean = (mean,) * 3 - - std = std or OPENAI_DATASET_STD - if not isinstance(std, (list, tuple)): - std = (std,) * 3 - - if isinstance(image_size, (list, tuple)) and image_size[0] == image_size[1]: - # for square size, pass size as int so that Resize() uses aspect preserving shortest edge - image_size = image_size[0] - - if isinstance(aug_cfg, dict): - aug_cfg = AugmentationCfg(**aug_cfg) - else: - aug_cfg = aug_cfg or AugmentationCfg() - normalize = MaskAwareNormalize(mean=mean, std=std) - if is_train: - aug_cfg_dict = {k: v for k, v in asdict(aug_cfg).items() if v is not None} - use_timm = aug_cfg_dict.pop('use_timm', False) - if use_timm: - assert False, "not tested for augmentation with mask" - from timm.data import create_transform # timm can still be optional - if isinstance(image_size, (tuple, list)): - assert len(image_size) >= 2 - input_size = (3,) + image_size[-2:] - else: - input_size = (3, image_size, image_size) - # by default, timm aug randomly alternates bicubic & bilinear for better robustness at inference time - aug_cfg_dict.setdefault('interpolation', 'random') - aug_cfg_dict.setdefault('color_jitter', None) # disable by default - train_transform = create_transform( - input_size=input_size, - is_training=True, - hflip=0., - mean=mean, - std=std, - re_mode='pixel', - **aug_cfg_dict, - ) - else: - train_transform = Compose([ - _convert_to_rgb_or_rgba, - ToTensor(), - RandomResizedCrop( - image_size, - scale=aug_cfg_dict.pop('scale'), - interpolation=InterpolationMode.BICUBIC, - ), - normalize, - ]) - if aug_cfg_dict: - warnings.warn(f'Unused augmentation cfg items, specify `use_timm` to use ({list(aug_cfg_dict.keys())}).') - return train_transform - else: - transforms = [ - _convert_to_rgb_or_rgba, - ToTensor(), - ] - if resize_longest_max: - transforms.extend([ - ResizeMaxSize(image_size, fill=fill_color) - ]) - else: - transforms.extend([ - Resize(image_size, interpolation=InterpolationMode.BICUBIC), - CenterCrop(image_size), - ]) - transforms.extend([ - normalize, - ]) - return Compose(transforms) - - -# def image_transform_region( -# image_size: int, -# is_train: bool, -# mean: Optional[Tuple[float, ...]] = None, -# std: Optional[Tuple[float, ...]] = None, -# resize_longest_max: bool = False, -# fill_color: int = 0, -# aug_cfg: Optional[Union[Dict[str, Any], AugmentationCfg]] = None, -# ): -# mean = mean or OPENAI_DATASET_MEAN -# if not isinstance(mean, (list, tuple)): -# mean = (mean,) * 3 - -# std = std or OPENAI_DATASET_STD -# if not isinstance(std, (list, tuple)): -# std = (std,) * 3 - -# if isinstance(image_size, (list, tuple)) and image_size[0] == image_size[1]: -# # for square size, pass size as int so that Resize() uses aspect preserving shortest edge -# image_size = image_size[0] - -# if isinstance(aug_cfg, dict): -# aug_cfg = AugmentationCfg(**aug_cfg) -# else: -# aug_cfg = aug_cfg or AugmentationCfg() -# normalize = Normalize(mean=mean, std=std) -# if is_train: -# aug_cfg_dict = {k: v for k, v in asdict(aug_cfg).items() if v is not None} - -# transform = Compose([ -# RandomResizedCrop( -# image_size, -# scale=aug_cfg_dict.pop('scale'), -# interpolation=InterpolationMode.BICUBIC, -# ), -# ]) -# train_transform = Compose([ -# partial(transform_and_split, transform_fn=transform,normalize_fn=normalize) -# ]) -# return train_transform -# else: -# if resize_longest_max: -# transform = [ -# ResizeMaxSize(image_size, fill=fill_color) -# ] -# val_transform = Compose([ -# partial(transform_and_split, transform_fn=transform,normalize_fn=normalize), -# ]) -# else: -# transform = [ -# Resize(image_size, interpolation=InterpolationMode.BICUBIC), -# CenterCrop(image_size), -# ] -# val_transform = Compose([ -# partial(transform_and_split, transform_fn=transform,normalize_fn=normalize), -# ]) -# return val_transform \ No newline at end of file diff --git a/diffsynth/extensions/ImageQualityMetric/open_clip/transformer.py b/diffsynth/extensions/ImageQualityMetric/open_clip/transformer.py deleted file mode 100644 index 7465c1b..0000000 --- a/diffsynth/extensions/ImageQualityMetric/open_clip/transformer.py +++ /dev/null @@ -1,727 +0,0 @@ -from collections import OrderedDict -import math -from typing import Callable, Optional, Sequence, Tuple - -import torch -from torch import nn -from torch.nn import functional as F -from torch.utils.checkpoint import checkpoint - -from .utils import to_2tuple - - -class LayerNormFp32(nn.LayerNorm): - """Subclass torch's LayerNorm to handle fp16 (by casting to float32 and back).""" - - def forward(self, x: torch.Tensor): - orig_type = x.dtype - x = F.layer_norm(x.to(torch.float32), self.normalized_shape, self.weight, self.bias, self.eps) - return x.to(orig_type) - - -class LayerNorm(nn.LayerNorm): - """Subclass torch's LayerNorm (with cast back to input dtype).""" - - def forward(self, x: torch.Tensor): - orig_type = x.dtype - x = F.layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps) - return x.to(orig_type) - - -class QuickGELU(nn.Module): - # NOTE This is slower than nn.GELU or nn.SiLU and uses more GPU memory - def forward(self, x: torch.Tensor): - return x * torch.sigmoid(1.702 * x) - - -class LayerScale(nn.Module): - def __init__(self, dim, init_values=1e-5, inplace=False): - super().__init__() - self.inplace = inplace - self.gamma = nn.Parameter(init_values * torch.ones(dim)) - - def forward(self, x): - return x.mul_(self.gamma) if self.inplace else x * self.gamma - - -class PatchDropout(nn.Module): - """ - https://arxiv.org/abs/2212.00794 - """ - - def __init__(self, prob, exclude_first_token=True): - super().__init__() - assert 0 <= prob < 1. - self.prob = prob - self.exclude_first_token = exclude_first_token # exclude CLS token - - def forward(self, x): - if not self.training or self.prob == 0.: - return x - - if self.exclude_first_token: - cls_tokens, x = x[:, :1], x[:, 1:] - else: - cls_tokens = torch.jit.annotate(torch.Tensor, x[:, :1]) - - batch = x.size()[0] - num_tokens = x.size()[1] - - batch_indices = torch.arange(batch) - batch_indices = batch_indices[..., None] - - keep_prob = 1 - self.prob - num_patches_keep = max(1, int(num_tokens * keep_prob)) - - rand = torch.randn(batch, num_tokens) - patch_indices_keep = rand.topk(num_patches_keep, dim=-1).indices - - x = x[batch_indices, patch_indices_keep] - - if self.exclude_first_token: - x = torch.cat((cls_tokens, x), dim=1) - - return x - - -class Attention(nn.Module): - def __init__( - self, - dim, - num_heads=8, - qkv_bias=True, - scaled_cosine=False, - scale_heads=False, - logit_scale_max=math.log(1. / 0.01), - attn_drop=0., - proj_drop=0. - ): - super().__init__() - self.scaled_cosine = scaled_cosine - self.scale_heads = scale_heads - assert dim % num_heads == 0, 'dim should be divisible by num_heads' - self.num_heads = num_heads - self.head_dim = dim // num_heads - self.scale = self.head_dim ** -0.5 - self.logit_scale_max = logit_scale_max - - # keeping in_proj in this form (instead of nn.Linear) to match weight scheme of original - self.in_proj_weight = nn.Parameter(torch.randn((dim * 3, dim)) * self.scale) - if qkv_bias: - self.in_proj_bias = nn.Parameter(torch.zeros(dim * 3)) - else: - self.in_proj_bias = None - - if self.scaled_cosine: - self.logit_scale = nn.Parameter(torch.log(10 * torch.ones((num_heads, 1, 1)))) - else: - self.logit_scale = None - self.attn_drop = nn.Dropout(attn_drop) - if self.scale_heads: - self.head_scale = nn.Parameter(torch.ones((num_heads, 1, 1))) - else: - self.head_scale = None - self.out_proj = nn.Linear(dim, dim) - self.out_drop = nn.Dropout(proj_drop) - - def forward(self, x, attn_mask: Optional[torch.Tensor] = None): - L, N, C = x.shape - q, k, v = F.linear(x, self.in_proj_weight, self.in_proj_bias).chunk(3, dim=-1) - q = q.contiguous().view(L, N * self.num_heads, -1).transpose(0, 1) - k = k.contiguous().view(L, N * self.num_heads, -1).transpose(0, 1) - v = v.contiguous().view(L, N * self.num_heads, -1).transpose(0, 1) - - if self.logit_scale is not None: - attn = torch.bmm(F.normalize(q, dim=-1), F.normalize(k, dim=-1).transpose(-1, -2)) - logit_scale = torch.clamp(self.logit_scale, max=self.logit_scale_max).exp() - attn = attn.view(N, self.num_heads, L, L) * logit_scale - attn = attn.view(-1, L, L) - else: - q = q * self.scale - attn = torch.bmm(q, k.transpose(-1, -2)) - - if attn_mask is not None: - if attn_mask.dtype == torch.bool: - new_attn_mask = torch.zeros_like(attn_mask, dtype=q.dtype) - new_attn_mask.masked_fill_(attn_mask, float("-inf")) - attn_mask = new_attn_mask - attn += attn_mask - - attn = attn.softmax(dim=-1) - attn = self.attn_drop(attn) - - x = torch.bmm(attn, v) - if self.head_scale is not None: - x = x.view(N, self.num_heads, L, C) * self.head_scale - x = x.view(-1, L, C) - x = x.transpose(0, 1).reshape(L, N, C) - x = self.out_proj(x) - x = self.out_drop(x) - return x - - -class AttentionalPooler(nn.Module): - def __init__( - self, - d_model: int, - context_dim: int, - n_head: int = 8, - n_queries: int = 256, - norm_layer: Callable = LayerNorm - ): - super().__init__() - self.query = nn.Parameter(torch.randn(n_queries, d_model)) - self.attn = nn.MultiheadAttention(d_model, n_head, kdim=context_dim, vdim=context_dim) - self.ln_q = norm_layer(d_model) - self.ln_k = norm_layer(context_dim) - - def forward(self, x: torch.Tensor): - x = self.ln_k(x).permute(1, 0, 2) # NLD -> LND - N = x.shape[1] - q = self.ln_q(self.query) - out = self.attn(self._repeat(q, N), x, x, need_weights=False)[0] - return out.permute(1, 0, 2) # LND -> NLD - - def _repeat(self, query, N: int): - return query.unsqueeze(1).repeat(1, N, 1) - - -class ResidualAttentionBlock(nn.Module): - def __init__( - self, - d_model: int, - n_head: int, - mlp_ratio: float = 4.0, - ls_init_value: float = None, - act_layer: Callable = nn.GELU, - norm_layer: Callable = LayerNorm, - is_cross_attention: bool = False, - ): - super().__init__() - - self.ln_1 = norm_layer(d_model) - self.attn = nn.MultiheadAttention(d_model, n_head) - self.ls_1 = LayerScale(d_model, ls_init_value) if ls_init_value is not None else nn.Identity() - if is_cross_attention: - self.ln_1_kv = norm_layer(d_model) - - self.ln_2 = norm_layer(d_model) - mlp_width = int(d_model * mlp_ratio) - self.mlp = nn.Sequential(OrderedDict([ - ("c_fc", nn.Linear(d_model, mlp_width)), - ("gelu", act_layer()), - ("c_proj", nn.Linear(mlp_width, d_model)) - ])) - self.ls_2 = LayerScale(d_model, ls_init_value) if ls_init_value is not None else nn.Identity() - - def attention( - self, - q_x: torch.Tensor, - k_x: Optional[torch.Tensor] = None, - v_x: Optional[torch.Tensor] = None, - attn_mask: Optional[torch.Tensor] = None, - ): - k_x = k_x if k_x is not None else q_x - v_x = v_x if v_x is not None else q_x - - attn_mask = attn_mask.to(q_x.dtype) if attn_mask is not None else None - return self.attn( - q_x, k_x, v_x, need_weights=False, attn_mask=attn_mask - )[0] - - def forward( - self, - q_x: torch.Tensor, - k_x: Optional[torch.Tensor] = None, - v_x: Optional[torch.Tensor] = None, - attn_mask: Optional[torch.Tensor] = None, - ): - k_x = self.ln_1_kv(k_x) if hasattr(self, "ln_1_kv") and k_x is not None else None - v_x = self.ln_1_kv(v_x) if hasattr(self, "ln_1_kv") and v_x is not None else None - - x = q_x + self.ls_1(self.attention(q_x=self.ln_1(q_x), k_x=k_x, v_x=v_x, attn_mask=attn_mask)) - x = x + self.ls_2(self.mlp(self.ln_2(x))) - return x - - -class CustomResidualAttentionBlock(nn.Module): - def __init__( - self, - d_model: int, - n_head: int, - mlp_ratio: float = 4.0, - ls_init_value: float = None, - act_layer: Callable = nn.GELU, - norm_layer: Callable = LayerNorm, - scale_cosine_attn: bool = False, - scale_heads: bool = False, - scale_attn: bool = False, - scale_fc: bool = False, - ): - super().__init__() - - self.ln_1 = norm_layer(d_model) - self.attn = Attention( - d_model, n_head, - scaled_cosine=scale_cosine_attn, - scale_heads=scale_heads, - ) - self.ln_attn = norm_layer(d_model) if scale_attn else nn.Identity() - self.ls_1 = LayerScale(d_model, ls_init_value) if ls_init_value is not None else nn.Identity() - - self.ln_2 = norm_layer(d_model) - mlp_width = int(d_model * mlp_ratio) - self.mlp = nn.Sequential(OrderedDict([ - ("c_fc", nn.Linear(d_model, mlp_width)), - ('ln', norm_layer(mlp_width) if scale_fc else nn.Identity()), - ("gelu", act_layer()), - ("c_proj", nn.Linear(mlp_width, d_model)) - ])) - self.ls_2 = LayerScale(d_model, ls_init_value) if ls_init_value is not None else nn.Identity() - - def forward(self, x: torch.Tensor, attn_mask: Optional[torch.Tensor] = None): - x = x + self.ls_1(self.ln_attn(self.attn(self.ln_1(x), attn_mask=attn_mask))) - x = x + self.ls_2(self.mlp(self.ln_2(x))) - return x - - -class Transformer(nn.Module): - def __init__( - self, - width: int, - layers: int, - heads: int, - mlp_ratio: float = 4.0, - ls_init_value: float = None, - act_layer: Callable = nn.GELU, - norm_layer: Callable = LayerNorm, - ): - super().__init__() - self.width = width - self.layers = layers - self.grad_checkpointing = False - - self.resblocks = nn.ModuleList([ - ResidualAttentionBlock( - width, heads, mlp_ratio, ls_init_value=ls_init_value, act_layer=act_layer, norm_layer=norm_layer) - for _ in range(layers) - ]) - - def get_cast_dtype(self) -> torch.dtype: - return self.resblocks[0].mlp.c_fc.weight.dtype - - def forward(self, x: torch.Tensor, attn_mask: Optional[torch.Tensor] = None): - for r in self.resblocks: - if self.grad_checkpointing and not torch.jit.is_scripting(): - # TODO: handle kwargs https://github.com/pytorch/pytorch/issues/79887#issuecomment-1161758372 - x = checkpoint(r, x, None, None, attn_mask) - else: - x = r(x, attn_mask=attn_mask) - return x - - -class VisionTransformer(nn.Module): - output_tokens: torch.jit.Final[bool] - - def __init__( - self, - image_size: int, - patch_size: int, - width: int, - layers: int, - heads: int, - mlp_ratio: float, - ls_init_value: float = None, - global_average_pool: bool = False, - attentional_pool: bool = False, - n_queries: int = 256, - attn_pooler_heads: int = 8, - output_dim: int = 512, - patch_dropout: float = 0., - input_patchnorm: bool = False, - act_layer: Callable = nn.GELU, - norm_layer: Callable = LayerNorm, - output_tokens: bool = False - ): - super().__init__() - self.output_tokens = output_tokens - image_height, image_width = self.image_size = to_2tuple(image_size) - patch_height, patch_width = self.patch_size = to_2tuple(patch_size) - self.grid_size = (image_height // patch_height, image_width // patch_width) - self.output_dim = output_dim - - # whether to layernorm each patch, as done in dual patchnorm paper - https://arxiv.org/abs/2302.01327v1 - self.input_patchnorm = input_patchnorm - - if input_patchnorm: - patch_input_dim = patch_height * patch_width * 3 - self.patchnorm_pre_ln = LayerNorm(patch_input_dim) - self.conv1 = nn.Linear(patch_input_dim, width) - else: - self.patchnorm_pre_ln = nn.Identity() - self.conv1 = nn.Conv2d(in_channels=3, out_channels=width, kernel_size=patch_size, stride=patch_size, bias=False) - - # class embeddings and positional embeddings - scale = width ** -0.5 - self.class_embedding = nn.Parameter(scale * torch.randn(width)) - self.positional_embedding = nn.Parameter(scale * torch.randn(self.grid_size[0] * self.grid_size[1] + 1, width)) - - # setting a patch_dropout of 0. would mean it is disabled and this function would be the identity fn - self.patch_dropout = PatchDropout(patch_dropout) if patch_dropout > 0. else nn.Identity() - - self.ln_pre = norm_layer(width) - self.transformer = Transformer( - width, - layers, - heads, - mlp_ratio, - ls_init_value=ls_init_value, - act_layer=act_layer, - norm_layer=norm_layer, - ) - - self.global_average_pool = global_average_pool - if attentional_pool: - self.attn_pool = AttentionalPooler(output_dim, width, n_head=attn_pooler_heads, n_queries=n_queries) - self.ln_post = norm_layer(output_dim) - self.proj = nn.Parameter(scale * torch.randn(output_dim, output_dim)) - else: - self.attn_pool = None - self.ln_post = norm_layer(width) - self.proj = nn.Parameter(scale * torch.randn(width, output_dim)) - - self.init_parameters() - - def lock(self, unlocked_groups=0, freeze_bn_stats=False): - for param in self.parameters(): - param.requires_grad = False - - if unlocked_groups != 0: - groups = [ - [ - self.conv1, - self.class_embedding, - self.positional_embedding, - self.ln_pre, - ], - *self.transformer.resblocks[:-1], - [ - self.transformer.resblocks[-1], - self.ln_post, - ], - self.proj, - ] - - def _unlock(x): - if isinstance(x, Sequence): - for g in x: - _unlock(g) - else: - if isinstance(x, torch.nn.Parameter): - x.requires_grad = True - else: - for p in x.parameters(): - p.requires_grad = True - - _unlock(groups[-unlocked_groups:]) - - def init_parameters(self): - # FIXME OpenAI CLIP did not define an init for the VisualTransformer - # TODO experiment if default PyTorch init, below, or alternate init is best. - - # nn.init.normal_(self.class_embedding, std=self.scale) - # nn.init.normal_(self.positional_embedding, std=self.scale) - # - # proj_std = (self.transformer.width ** -0.5) * ((2 * self.transformer.layers) ** -0.5) - # attn_std = self.transformer.width ** -0.5 - # fc_std = (2 * self.transformer.width) ** -0.5 - # for block in self.transformer.resblocks: - # nn.init.normal_(block.attn.in_proj_weight, std=attn_std) - # nn.init.normal_(block.attn.out_proj.weight, std=proj_std) - # nn.init.normal_(block.mlp.c_fc.weight, std=fc_std) - # nn.init.normal_(block.mlp.c_proj.weight, std=proj_std) - # - # if self.text_projection is not None: - # nn.init.normal_(self.text_projection, std=self.scale) - pass - - @torch.jit.ignore - def set_grad_checkpointing(self, enable=True): - self.transformer.grad_checkpointing = enable - - def _global_pool(self, x: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: - if self.global_average_pool: - return x.mean(dim=1), x - else: - return x[:, 0], x[:, 1:] - - def forward(self, x: torch.Tensor, skip_pool: bool = False): - - # to patches - whether to use dual patchnorm - https://arxiv.org/abs/2302.01327v1 - if self.input_patchnorm: - # einops - rearrange(x, 'b c (h p1) (w p2) -> b (h w) (c p1 p2)') - x = x.reshape(x.shape[0], x.shape[1], self.grid_size[0], self.patch_size[0], self.grid_size[1], self.patch_size[1]) - x = x.permute(0, 2, 4, 1, 3, 5) - x = x.reshape(x.shape[0], self.grid_size[0] * self.grid_size[1], -1) - x = self.patchnorm_pre_ln(x) - x = self.conv1(x) - else: - x = self.conv1(x) # shape = [*, width, grid, grid] - x = x.reshape(x.shape[0], x.shape[1], -1) # shape = [*, width, grid ** 2] - x = x.permute(0, 2, 1) # shape = [*, grid ** 2, width] - - # class embeddings and positional embeddings - x = torch.cat( - [self.class_embedding.to(x.dtype) + torch.zeros(x.shape[0], 1, x.shape[-1], dtype=x.dtype, device=x.device), - x], dim=1) # shape = [*, grid ** 2 + 1, width] - x = x + self.positional_embedding.to(x.dtype) - - # a patch_dropout of 0. would mean it is disabled and this function would do nothing but return what was passed in - x = self.patch_dropout(x) - x = self.ln_pre(x) - - x = x.permute(1, 0, 2) # NLD -> LND - x = self.transformer(x) - x = x.permute(1, 0, 2) # LND -> NLD - - if skip_pool: - return x - - if self.attn_pool is not None: - x = self.attn_pool(x) - x = self.ln_post(x) - pooled, tokens = self._global_pool(x) - else: - pooled, tokens = self._global_pool(x) - pooled = self.ln_post(pooled) - - if self.proj is not None: - pooled = pooled @ self.proj - - if self.output_tokens: - return pooled, tokens - - return pooled - - -class TextTransformer(nn.Module): - output_tokens: torch.jit.Final[bool] - - def __init__( - self, - context_length: int = 77, - vocab_size: int = 49408, - width: int = 512, - heads: int = 8, - layers: int = 12, - ls_init_value: float = None, - output_dim: int = 512, - act_layer: Callable = nn.GELU, - norm_layer: Callable = LayerNorm, - embed_cls: bool = False, - pad_id: int = 0, - output_tokens: bool = False, - ): - super().__init__() - self.output_tokens = output_tokens - self.num_pos = self.context_length = context_length - self.vocab_size = vocab_size - self.width = width - self.output_dim = output_dim - self.heads = heads - self.pad_id = pad_id - - self.text_projection = nn.Parameter(torch.empty(width, output_dim)) - - if embed_cls: - self.cls_emb = nn.Parameter(torch.empty(width)) - self.num_pos += 1 - else: - self.cls_emb = None - - self.token_embedding = nn.Embedding(vocab_size, width) - self.positional_embedding = nn.Parameter(torch.empty(self.num_pos, width)) - self.transformer = Transformer( - width=width, - layers=layers, - heads=heads, - ls_init_value=ls_init_value, - act_layer=act_layer, - norm_layer=norm_layer, - ) - self.ln_final = norm_layer(width) - - self.register_buffer('attn_mask', self.build_attention_mask(), persistent=False) - - self.init_parameters() - - def init_parameters(self): - nn.init.normal_(self.token_embedding.weight, std=0.02) - nn.init.normal_(self.positional_embedding, std=0.01) - if self.cls_emb is not None: - nn.init.normal_(self.cls_emb, std=0.01) - - proj_std = (self.transformer.width ** -0.5) * ((2 * self.transformer.layers) ** -0.5) - attn_std = self.transformer.width ** -0.5 - fc_std = (2 * self.transformer.width) ** -0.5 - for block in self.transformer.resblocks: - nn.init.normal_(block.attn.in_proj_weight, std=attn_std) - nn.init.normal_(block.attn.out_proj.weight, std=proj_std) - nn.init.normal_(block.mlp.c_fc.weight, std=fc_std) - nn.init.normal_(block.mlp.c_proj.weight, std=proj_std) - - if self.text_projection is not None: - nn.init.normal_(self.text_projection, std=self.transformer.width ** -0.5) - - @torch.jit.ignore - def set_grad_checkpointing(self, enable=True): - self.transformer.grad_checkpointing = enable - - def build_attention_mask(self): - # lazily create causal attention mask, with full attention between the tokens - # pytorch uses additive attention mask; fill with -inf - mask = torch.empty(self.num_pos, self.num_pos) - mask.fill_(float("-inf")) - mask.triu_(1) # zero out the lower diagonal - return mask - - def build_cls_mask(self, text, cast_dtype: torch.dtype): - cls_mask = (text != self.pad_id).unsqueeze(1) - cls_mask = F.pad(cls_mask, (1, 0, cls_mask.shape[2], 0), value=1.0) - additive_mask = torch.empty(cls_mask.shape, dtype=cast_dtype, device=cls_mask.device) - additive_mask.fill_(0) - additive_mask.masked_fill_(~cls_mask, float("-inf")) - additive_mask = torch.repeat_interleave(additive_mask, self.heads, 0) - return additive_mask - - def _repeat(self, t, N: int): - return t.reshape(1, 1, -1).repeat(N, 1, 1) - - def forward(self, text): - cast_dtype = self.transformer.get_cast_dtype() - seq_len = text.shape[1] - - x = self.token_embedding(text).to(cast_dtype) # [batch_size, n_ctx, d_model] - attn_mask = self.attn_mask - if self.cls_emb is not None: - seq_len += 1 - x = torch.cat([x, self._repeat(self.cls_emb, x.shape[0])], dim=1) - cls_mask = self.build_cls_mask(text, cast_dtype) - attn_mask = attn_mask[None, :seq_len, :seq_len] + cls_mask[:, :seq_len, :seq_len] - - x = x + self.positional_embedding[:seq_len].to(cast_dtype) - x = x.permute(1, 0, 2) # NLD -> LND - x = self.transformer(x, attn_mask=attn_mask) - x = x.permute(1, 0, 2) # LND -> NLD - - # x.shape = [batch_size, n_ctx, transformer.width] - # take features from the eot embedding (eot_token is the highest number in each sequence) - if self.cls_emb is not None: - pooled, tokens = x[:, -1], x[:, :-1] - pooled = self.ln_final(pooled) - else: - x = self.ln_final(x) - pooled, tokens = x[torch.arange(x.shape[0]), text.argmax(dim=-1)], x - - if self.text_projection is not None: - pooled = pooled @ self.text_projection - - if self.output_tokens: - return pooled, tokens - - return pooled - - -class MultimodalTransformer(Transformer): - def __init__( - self, - width: int, - layers: int, - heads: int, - context_length: int = 77, - mlp_ratio: float = 4.0, - ls_init_value: float = None, - act_layer: Callable = nn.GELU, - norm_layer: Callable = LayerNorm, - output_dim: int = 512, - ): - - super().__init__( - width=width, - layers=layers, - heads=heads, - mlp_ratio=mlp_ratio, - ls_init_value=ls_init_value, - act_layer=act_layer, - norm_layer=norm_layer, - ) - self.context_length = context_length - self.cross_attn = nn.ModuleList([ - ResidualAttentionBlock( - width, - heads, - mlp_ratio, - ls_init_value=ls_init_value, - act_layer=act_layer, - norm_layer=norm_layer, - is_cross_attention=True, - ) - for _ in range(layers) - ]) - - self.register_buffer('attn_mask', self.build_attention_mask(), persistent=False) - - self.ln_final = norm_layer(width) - self.text_projection = nn.Parameter(torch.empty(width, output_dim)) - - def init_parameters(self): - proj_std = (self.transformer.width ** -0.5) * ((2 * self.transformer.layers) ** -0.5) - attn_std = self.transformer.width ** -0.5 - fc_std = (2 * self.transformer.width) ** -0.5 - for block in self.transformer.resblocks: - nn.init.normal_(block.attn.in_proj_weight, std=attn_std) - nn.init.normal_(block.attn.out_proj.weight, std=proj_std) - nn.init.normal_(block.mlp.c_fc.weight, std=fc_std) - nn.init.normal_(block.mlp.c_proj.weight, std=proj_std) - for block in self.transformer.cross_attn: - nn.init.normal_(block.attn.in_proj_weight, std=attn_std) - nn.init.normal_(block.attn.out_proj.weight, std=proj_std) - nn.init.normal_(block.mlp.c_fc.weight, std=fc_std) - nn.init.normal_(block.mlp.c_proj.weight, std=proj_std) - - if self.text_projection is not None: - nn.init.normal_(self.text_projection, std=self.transformer.width ** -0.5) - - def build_attention_mask(self): - # lazily create causal attention mask, with full attention between the tokens - # pytorch uses additive attention mask; fill with -inf - mask = torch.empty(self.context_length, self.context_length) - mask.fill_(float("-inf")) - mask.triu_(1) # zero out the lower diagonal - return mask - - def forward(self, image_embs, text_embs): - text_embs = text_embs.permute(1, 0, 2) # NLD -> LNDsq - image_embs = image_embs.permute(1, 0, 2) # NLD -> LND - seq_len = text_embs.shape[0] - - for resblock, cross_attn in zip(self.resblocks, self.cross_attn): - if self.grad_checkpointing and not torch.jit.is_scripting(): - # TODO: handle kwargs https://github.com/pytorch/pytorch/issues/79887#issuecomment-1161758372 - text_embs = checkpoint(resblock, text_embs, None, None, self.attn_mask[:seq_len, :seq_len]) - text_embs = checkpoint(cross_attn, text_embs, image_embs, image_embs, None) - else: - text_embs = resblock(text_embs, attn_mask=self.attn_mask[:seq_len, :seq_len]) - text_embs = cross_attn(text_embs, k_x=image_embs, v_x=image_embs) - - x = text_embs.permute(1, 0, 2) # LND -> NLD - x = self.ln_final(x) - - if self.text_projection is not None: - x = x @ self.text_projection - - return x - - @torch.jit.ignore - def set_grad_checkpointing(self, enable=True): - self.grad_checkpointing = enable diff --git a/diffsynth/extensions/ImageQualityMetric/open_clip/utils.py b/diffsynth/extensions/ImageQualityMetric/open_clip/utils.py deleted file mode 100644 index 51e80c5..0000000 --- a/diffsynth/extensions/ImageQualityMetric/open_clip/utils.py +++ /dev/null @@ -1,60 +0,0 @@ -from itertools import repeat -import collections.abc - -from torch import nn as nn -from torchvision.ops.misc import FrozenBatchNorm2d - - -def freeze_batch_norm_2d(module, module_match={}, name=''): - """ - Converts all `BatchNorm2d` and `SyncBatchNorm` layers of provided module into `FrozenBatchNorm2d`. If `module` is - itself an instance of either `BatchNorm2d` or `SyncBatchNorm`, it is converted into `FrozenBatchNorm2d` and - returned. Otherwise, the module is walked recursively and submodules are converted in place. - - Args: - module (torch.nn.Module): Any PyTorch module. - module_match (dict): Dictionary of full module names to freeze (all if empty) - name (str): Full module name (prefix) - - Returns: - torch.nn.Module: Resulting module - - Inspired by https://github.com/pytorch/pytorch/blob/a5895f85be0f10212791145bfedc0261d364f103/torch/nn/modules/batchnorm.py#L762 - """ - res = module - is_match = True - if module_match: - is_match = name in module_match - if is_match and isinstance(module, (nn.modules.batchnorm.BatchNorm2d, nn.modules.batchnorm.SyncBatchNorm)): - res = FrozenBatchNorm2d(module.num_features) - res.num_features = module.num_features - res.affine = module.affine - if module.affine: - res.weight.data = module.weight.data.clone().detach() - res.bias.data = module.bias.data.clone().detach() - res.running_mean.data = module.running_mean.data - res.running_var.data = module.running_var.data - res.eps = module.eps - else: - for child_name, child in module.named_children(): - full_child_name = '.'.join([name, child_name]) if name else child_name - new_child = freeze_batch_norm_2d(child, module_match, full_child_name) - if new_child is not child: - res.add_module(child_name, new_child) - return res - - -# From PyTorch internals -def _ntuple(n): - def parse(x): - if isinstance(x, collections.abc.Iterable): - return x - return tuple(repeat(x, n)) - return parse - - -to_1tuple = _ntuple(1) -to_2tuple = _ntuple(2) -to_3tuple = _ntuple(3) -to_4tuple = _ntuple(4) -to_ntuple = lambda n, x: _ntuple(n)(x) diff --git a/diffsynth/extensions/ImageQualityMetric/open_clip/version.py b/diffsynth/extensions/ImageQualityMetric/open_clip/version.py deleted file mode 100644 index 48aa744..0000000 --- a/diffsynth/extensions/ImageQualityMetric/open_clip/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = '2.16.0' diff --git a/diffsynth/extensions/ImageQualityMetric/pickscore.py b/diffsynth/extensions/ImageQualityMetric/pickscore.py deleted file mode 100644 index 7370e09..0000000 --- a/diffsynth/extensions/ImageQualityMetric/pickscore.py +++ /dev/null @@ -1,112 +0,0 @@ -import torch -from PIL import Image -from transformers import AutoProcessor, AutoModel -from typing import List, Union -import os -from .config import MODEL_PATHS - -class PickScore(torch.nn.Module): - def __init__(self, device: Union[str, torch.device], path: str = MODEL_PATHS): - super().__init__() - """Initialize the Selector with a processor and model. - - Args: - device (Union[str, torch.device]): The device to load the model on. - """ - self.device = device if isinstance(device, torch.device) else torch.device(device) - processor_name_or_path = path.get("clip") - model_pretrained_name_or_path = path.get("pickscore") - self.processor = AutoProcessor.from_pretrained(processor_name_or_path) - self.model = AutoModel.from_pretrained(model_pretrained_name_or_path).eval().to(self.device) - - def _calculate_score(self, image: torch.Tensor, prompt: str, softmax: bool = False) -> float: - """Calculate the score for a single image and prompt. - - Args: - image (torch.Tensor): The processed image tensor. - prompt (str): The prompt text. - softmax (bool): Whether to apply softmax to the scores. - - Returns: - float: The score for the image. - """ - with torch.no_grad(): - # Prepare text inputs - text_inputs = self.processor( - text=prompt, - padding=True, - truncation=True, - max_length=77, - return_tensors="pt", - ).to(self.device) - - # Embed images and text - image_embs = self.model.get_image_features(pixel_values=image) - image_embs = image_embs / torch.norm(image_embs, dim=-1, keepdim=True) - text_embs = self.model.get_text_features(**text_inputs) - text_embs = text_embs / torch.norm(text_embs, dim=-1, keepdim=True) - - # Compute score - score = (text_embs @ image_embs.T)[0] - if softmax: - # Apply logit scale and softmax - score = torch.softmax(self.model.logit_scale.exp() * score, dim=-1) - - return score.cpu().item() - - @torch.no_grad() - def score(self, images: Union[str, List[str], Image.Image, List[Image.Image]], prompt: str, softmax: bool = False) -> List[float]: - """Score the images based on the prompt. - - Args: - images (Union[str, List[str], Image.Image, List[Image.Image]]): Path(s) to the image(s) or PIL image(s). - prompt (str): The prompt text. - softmax (bool): Whether to apply softmax to the scores. - - Returns: - List[float]: List of scores for the images. - """ - try: - if isinstance(images, (str, Image.Image)): - # Single image - if isinstance(images, str): - pil_image = Image.open(images) - else: - pil_image = images - - # Prepare image inputs - image_inputs = self.processor( - images=pil_image, - padding=True, - truncation=True, - max_length=77, - return_tensors="pt", - ).to(self.device) - - return [self._calculate_score(image_inputs["pixel_values"], prompt, softmax)] - elif isinstance(images, list): - # Multiple images - scores = [] - for one_image in images: - if isinstance(one_image, str): - pil_image = Image.open(one_image) - elif isinstance(one_image, Image.Image): - pil_image = one_image - else: - raise TypeError("The type of parameter images is illegal.") - - # Prepare image inputs - image_inputs = self.processor( - images=pil_image, - padding=True, - truncation=True, - max_length=77, - return_tensors="pt", - ).to(self.device) - - scores.append(self._calculate_score(image_inputs["pixel_values"], prompt, softmax)) - return scores - else: - raise TypeError("The type of parameter images is illegal.") - except Exception as e: - raise RuntimeError(f"Error in scoring images: {e}") diff --git a/diffsynth/extensions/ImageQualityMetric/trainer/__init__.py b/diffsynth/extensions/ImageQualityMetric/trainer/__init__.py deleted file mode 100644 index cf4f59d..0000000 --- a/diffsynth/extensions/ImageQualityMetric/trainer/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .models import * \ No newline at end of file diff --git a/diffsynth/extensions/ImageQualityMetric/trainer/models/__init__.py b/diffsynth/extensions/ImageQualityMetric/trainer/models/__init__.py deleted file mode 100644 index c4e2b69..0000000 --- a/diffsynth/extensions/ImageQualityMetric/trainer/models/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .base_model import * -from .clip_model import * -from .cross_modeling import * \ No newline at end of file diff --git a/diffsynth/extensions/ImageQualityMetric/trainer/models/base_model.py b/diffsynth/extensions/ImageQualityMetric/trainer/models/base_model.py deleted file mode 100644 index 8f28caf..0000000 --- a/diffsynth/extensions/ImageQualityMetric/trainer/models/base_model.py +++ /dev/null @@ -1,7 +0,0 @@ -from dataclasses import dataclass - - - -@dataclass -class BaseModelConfig: - pass diff --git a/diffsynth/extensions/ImageQualityMetric/trainer/models/clip_model.py b/diffsynth/extensions/ImageQualityMetric/trainer/models/clip_model.py deleted file mode 100644 index 0a1b370..0000000 --- a/diffsynth/extensions/ImageQualityMetric/trainer/models/clip_model.py +++ /dev/null @@ -1,146 +0,0 @@ -from dataclasses import dataclass -from transformers import CLIPModel as HFCLIPModel -from transformers import AutoTokenizer - -from torch import nn, einsum - -from .base_model import BaseModelConfig - -from transformers import CLIPConfig -from typing import Any, Optional, Tuple, Union -import torch - -from .cross_modeling import Cross_model - -import json, os - -class XCLIPModel(HFCLIPModel): - def __init__(self, config: CLIPConfig): - super().__init__(config) - - def get_text_features( - self, - input_ids: Optional[torch.Tensor] = None, - attention_mask: Optional[torch.Tensor] = None, - position_ids: Optional[torch.Tensor] = None, - output_attentions: Optional[bool] = None, - output_hidden_states: Optional[bool] = None, - return_dict: Optional[bool] = None, - ) -> torch.FloatTensor: - - # Use CLIP model's config for some fields (if specified) instead of those of vision & text components. - output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions - output_hidden_states = ( - output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states - ) - return_dict = return_dict if return_dict is not None else self.config.use_return_dict - - text_outputs = self.text_model( - input_ids=input_ids, - attention_mask=attention_mask, - position_ids=position_ids, - output_attentions=output_attentions, - output_hidden_states=output_hidden_states, - return_dict=return_dict, - ) - - # pooled_output = text_outputs[1] - # text_features = self.text_projection(pooled_output) - last_hidden_state = text_outputs[0] - text_features = self.text_projection(last_hidden_state) - - pooled_output = text_outputs[1] - text_features_EOS = self.text_projection(pooled_output) - - - # del last_hidden_state, text_outputs - # gc.collect() - - return text_features, text_features_EOS - - def get_image_features( - self, - pixel_values: Optional[torch.FloatTensor] = None, - output_attentions: Optional[bool] = None, - output_hidden_states: Optional[bool] = None, - return_dict: Optional[bool] = None, - ) -> torch.FloatTensor: - - # Use CLIP model's config for some fields (if specified) instead of those of vision & text components. - output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions - output_hidden_states = ( - output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states - ) - return_dict = return_dict if return_dict is not None else self.config.use_return_dict - - vision_outputs = self.vision_model( - pixel_values=pixel_values, - output_attentions=output_attentions, - output_hidden_states=output_hidden_states, - return_dict=return_dict, - ) - - # pooled_output = vision_outputs[1] # pooled_output - # image_features = self.visual_projection(pooled_output) - last_hidden_state = vision_outputs[0] - image_features = self.visual_projection(last_hidden_state) - - return image_features - - - -@dataclass -class ClipModelConfig(BaseModelConfig): - _target_: str = "diffsynth.extensions.QualityMetric.trainer.models.clip_model.CLIPModel" - pretrained_model_name_or_path: str ="checkpoints/clip-vit-base-patch32" - - -class CLIPModel(nn.Module): - def __init__(self, ckpt, config_file=False): - super().__init__() - if config_file is None: - self.model = XCLIPModel.from_pretrained(ckpt) - else: - with open(os.path.join(ckpt, "config.json"), "r", encoding="utf-8") as f: - config = json.load(f) - config = CLIPConfig(**config) - self.model = XCLIPModel._from_config(config) - self.cross_model = Cross_model(dim=1024, layer_num=4, heads=16) - - def get_text_features(self, *args, **kwargs): - return self.model.get_text_features(*args, **kwargs) - - def get_image_features(self, *args, **kwargs): - return self.model.get_image_features(*args, **kwargs) - - def forward(self, text_inputs=None, image_inputs=None, condition_inputs=None): - outputs = () - - text_f, text_EOS = self.model.get_text_features(text_inputs) # B*77*1024 - outputs += text_EOS, - - image_f = self.model.get_image_features(image_inputs.half()) # 2B*257*1024 - condition_f, _ = self.model.get_text_features(condition_inputs) # B*5*1024 - - sim_text_condition = einsum('b i d, b j d -> b j i', text_f, condition_f) - sim_text_condition = torch.max(sim_text_condition, dim=1, keepdim=True)[0] - sim_text_condition = sim_text_condition / sim_text_condition.max() - mask = torch.where(sim_text_condition > 0.01, 0, float('-inf')) # B*1*77 - - mask = mask.repeat(1,image_f.shape[1],1) # B*257*77 - bc = int(image_f.shape[0]/2) - - sim0 = self.cross_model(image_f[:bc,:,:], text_f,mask.half()) - sim1 = self.cross_model(image_f[bc:,:,:], text_f,mask.half()) - outputs += sim0[:,0,:], - outputs += sim1[:,0,:], - - return outputs - - @property - def logit_scale(self): - return self.model.logit_scale - - def save(self, path): - self.model.save_pretrained(path) - diff --git a/diffsynth/extensions/ImageQualityMetric/trainer/models/cross_modeling.py b/diffsynth/extensions/ImageQualityMetric/trainer/models/cross_modeling.py deleted file mode 100644 index 938f1b7..0000000 --- a/diffsynth/extensions/ImageQualityMetric/trainer/models/cross_modeling.py +++ /dev/null @@ -1,292 +0,0 @@ -import torch -from torch import einsum, nn -import torch.nn.functional as F -from einops import rearrange, repeat - -# helper functions - -def exists(val): - return val is not None - -def default(val, d): - return val if exists(val) else d - -# normalization -# they use layernorm without bias, something that pytorch does not offer - - -class LayerNorm(nn.Module): - def __init__(self, dim): - super().__init__() - self.weight = nn.Parameter(torch.ones(dim)) - self.register_buffer("bias", torch.zeros(dim)) - - def forward(self, x): - return F.layer_norm(x, x.shape[-1:], self.weight, self.bias) - -# residual - - -class Residual(nn.Module): - def __init__(self, fn): - super().__init__() - self.fn = fn - - def forward(self, x, *args, **kwargs): - return self.fn(x, *args, **kwargs) + x - - -# rotary positional embedding -# https://arxiv.org/abs/2104.09864 - - -class RotaryEmbedding(nn.Module): - def __init__(self, dim): - super().__init__() - inv_freq = 1.0 / (10000 ** (torch.arange(0, dim, 2).float() / dim)) - self.register_buffer("inv_freq", inv_freq) - - def forward(self, max_seq_len, *, device): - seq = torch.arange(max_seq_len, device=device, dtype=self.inv_freq.dtype) - freqs = einsum("i , j -> i j", seq, self.inv_freq) - return torch.cat((freqs, freqs), dim=-1) - - -def rotate_half(x): - x = rearrange(x, "... (j d) -> ... j d", j=2) - x1, x2 = x.unbind(dim=-2) - return torch.cat((-x2, x1), dim=-1) - - -def apply_rotary_pos_emb(pos, t): - return (t * pos.cos()) + (rotate_half(t) * pos.sin()) - - -# classic Noam Shazeer paper, except here they use SwiGLU instead of the more popular GEGLU for gating the feedforward -# https://arxiv.org/abs/2002.05202 - - -class SwiGLU(nn.Module): - def forward(self, x): - x, gate = x.chunk(2, dim=-1) - return F.silu(gate) * x - - -# parallel attention and feedforward with residual -# discovered by Wang et al + EleutherAI from GPT-J fame - -class ParallelTransformerBlock(nn.Module): - def __init__(self, dim, dim_head=64, heads=8, ff_mult=4): - super().__init__() - self.norm = LayerNorm(dim) - - attn_inner_dim = dim_head * heads - ff_inner_dim = dim * ff_mult - self.fused_dims = (attn_inner_dim, dim_head, dim_head, (ff_inner_dim * 2)) - - self.heads = heads - self.scale = dim_head**-0.5 - self.rotary_emb = RotaryEmbedding(dim_head) - - self.fused_attn_ff_proj = nn.Linear(dim, sum(self.fused_dims), bias=False) - self.attn_out = nn.Linear(attn_inner_dim, dim, bias=False) - - self.ff_out = nn.Sequential( - SwiGLU(), - nn.Linear(ff_inner_dim, dim, bias=False) - ) - - self.register_buffer("pos_emb", None, persistent=False) - - - def get_rotary_embedding(self, n, device): - if self.pos_emb is not None and self.pos_emb.shape[-2] >= n: - return self.pos_emb[:n] - - pos_emb = self.rotary_emb(n, device=device) - self.register_buffer("pos_emb", pos_emb, persistent=False) - return pos_emb - - def forward(self, x, attn_mask=None): - """ - einstein notation - b - batch - h - heads - n, i, j - sequence length (base sequence length, source, target) - d - feature dimension - """ - - n, device, h = x.shape[1], x.device, self.heads - - # pre layernorm - - x = self.norm(x) - - # attention queries, keys, values, and feedforward inner - - q, k, v, ff = self.fused_attn_ff_proj(x).split(self.fused_dims, dim=-1) - - # split heads - # they use multi-query single-key-value attention, yet another Noam Shazeer paper - # they found no performance loss past a certain scale, and more efficient decoding obviously - # https://arxiv.org/abs/1911.02150 - - q = rearrange(q, "b n (h d) -> b h n d", h=h) - - # rotary embeddings - - positions = self.get_rotary_embedding(n, device) - q, k = map(lambda t: apply_rotary_pos_emb(positions, t), (q, k)) - - # scale - - q = q * self.scale - - # similarity - - sim = einsum("b h i d, b j d -> b h i j", q, k) - - - # extra attention mask - for masking out attention from text CLS token to padding - - if exists(attn_mask): - attn_mask = rearrange(attn_mask, 'b i j -> b 1 i j') - sim = sim.masked_fill(~attn_mask, -torch.finfo(sim.dtype).max) - - # attention - - sim = sim - sim.amax(dim=-1, keepdim=True).detach() - attn = sim.softmax(dim=-1) - - # aggregate values - - out = einsum("b h i j, b j d -> b h i d", attn, v) - - # merge heads - - out = rearrange(out, "b h n d -> b n (h d)") - return self.attn_out(out) + self.ff_out(ff) - -# cross attention - using multi-query + one-headed key / values as in PaLM w/ optional parallel feedforward - -class CrossAttention(nn.Module): - def __init__( - self, - dim, - *, - context_dim=None, - dim_head=64, - heads=12, - parallel_ff=False, - ff_mult=4, - norm_context=False - ): - super().__init__() - self.heads = heads - self.scale = dim_head ** -0.5 - inner_dim = heads * dim_head - context_dim = default(context_dim, dim) - - self.norm = LayerNorm(dim) - self.context_norm = LayerNorm(context_dim) if norm_context else nn.Identity() - - self.to_q = nn.Linear(dim, inner_dim, bias=False) - self.to_kv = nn.Linear(context_dim, dim_head * 2, bias=False) - self.to_out = nn.Linear(inner_dim, dim, bias=False) - - # whether to have parallel feedforward - - ff_inner_dim = ff_mult * dim - - self.ff = nn.Sequential( - nn.Linear(dim, ff_inner_dim * 2, bias=False), - SwiGLU(), - nn.Linear(ff_inner_dim, dim, bias=False) - ) if parallel_ff else None - - def forward(self, x, context, mask): - """ - einstein notation - b - batch - h - heads - n, i, j - sequence length (base sequence length, source, target) - d - feature dimension - """ - - # pre-layernorm, for queries and context - - x = self.norm(x) - context = self.context_norm(context) - - # get queries - - q = self.to_q(x) - q = rearrange(q, 'b n (h d) -> b h n d', h = self.heads) - - # scale - - q = q * self.scale - - # get key / values - - k, v = self.to_kv(context).chunk(2, dim=-1) - - # query / key similarity - - sim = einsum('b h i d, b j d -> b h i j', q, k) - - # attention - mask = mask.unsqueeze(1).repeat(1,self.heads,1,1) - sim = sim + mask # context mask - sim = sim - sim.amax(dim=-1, keepdim=True) - attn = sim.softmax(dim=-1) - - # aggregate - - out = einsum('b h i j, b j d -> b h i d', attn, v) - - # merge and combine heads - - out = rearrange(out, 'b h n d -> b n (h d)') - out = self.to_out(out) - - # add parallel feedforward (for multimodal layers) - - if exists(self.ff): - out = out + self.ff(x) - - return out - - -class Cross_model(nn.Module): - def __init__( - self, - dim=512, - layer_num=4, - dim_head=64, - heads=8, - ff_mult=4 - ): - super().__init__() - - self.layers = nn.ModuleList([]) - - - for ind in range(layer_num): - self.layers.append(nn.ModuleList([ - Residual(CrossAttention(dim=dim, dim_head=dim_head, heads=heads, parallel_ff=True, ff_mult=ff_mult)), - Residual(ParallelTransformerBlock(dim=dim, dim_head=dim_head, heads=heads, ff_mult=ff_mult)) - ])) - - def forward( - self, - query_tokens, - context_tokens, - mask - ): - - for cross_attn, self_attn_ff in self.layers: - query_tokens = cross_attn(query_tokens, context_tokens,mask) - query_tokens = self_attn_ff(query_tokens) - - return query_tokens diff --git a/diffsynth/extensions/RIFE/__init__.py b/diffsynth/extensions/RIFE/__init__.py deleted file mode 100644 index e76c391..0000000 --- a/diffsynth/extensions/RIFE/__init__.py +++ /dev/null @@ -1,242 +0,0 @@ -import torch -import torch.nn as nn -import torch.nn.functional as F -import numpy as np -from PIL import Image - - -def warp(tenInput, tenFlow, device): - backwarp_tenGrid = {} - k = (str(tenFlow.device), str(tenFlow.size())) - if k not in backwarp_tenGrid: - tenHorizontal = torch.linspace(-1.0, 1.0, tenFlow.shape[3], device=device).view( - 1, 1, 1, tenFlow.shape[3]).expand(tenFlow.shape[0], -1, tenFlow.shape[2], -1) - tenVertical = torch.linspace(-1.0, 1.0, tenFlow.shape[2], device=device).view( - 1, 1, tenFlow.shape[2], 1).expand(tenFlow.shape[0], -1, -1, tenFlow.shape[3]) - backwarp_tenGrid[k] = torch.cat( - [tenHorizontal, tenVertical], 1).to(device) - - tenFlow = torch.cat([tenFlow[:, 0:1, :, :] / ((tenInput.shape[3] - 1.0) / 2.0), - tenFlow[:, 1:2, :, :] / ((tenInput.shape[2] - 1.0) / 2.0)], 1) - - g = (backwarp_tenGrid[k] + tenFlow).permute(0, 2, 3, 1) - return torch.nn.functional.grid_sample(input=tenInput, grid=g, mode='bilinear', padding_mode='border', align_corners=True) - - -def conv(in_planes, out_planes, kernel_size=3, stride=1, padding=1, dilation=1): - return nn.Sequential( - nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, - padding=padding, dilation=dilation, bias=True), - nn.PReLU(out_planes) - ) - - -class IFBlock(nn.Module): - def __init__(self, in_planes, c=64): - super(IFBlock, self).__init__() - self.conv0 = nn.Sequential(conv(in_planes, c//2, 3, 2, 1), conv(c//2, c, 3, 2, 1),) - self.convblock0 = nn.Sequential(conv(c, c), conv(c, c)) - self.convblock1 = nn.Sequential(conv(c, c), conv(c, c)) - self.convblock2 = nn.Sequential(conv(c, c), conv(c, c)) - self.convblock3 = nn.Sequential(conv(c, c), conv(c, c)) - self.conv1 = nn.Sequential(nn.ConvTranspose2d(c, c//2, 4, 2, 1), nn.PReLU(c//2), nn.ConvTranspose2d(c//2, 4, 4, 2, 1)) - self.conv2 = nn.Sequential(nn.ConvTranspose2d(c, c//2, 4, 2, 1), nn.PReLU(c//2), nn.ConvTranspose2d(c//2, 1, 4, 2, 1)) - - def forward(self, x, flow, scale=1): - x = F.interpolate(x, scale_factor= 1. / scale, mode="bilinear", align_corners=False, recompute_scale_factor=False) - flow = F.interpolate(flow, scale_factor= 1. / scale, mode="bilinear", align_corners=False, recompute_scale_factor=False) * 1. / scale - feat = self.conv0(torch.cat((x, flow), 1)) - feat = self.convblock0(feat) + feat - feat = self.convblock1(feat) + feat - feat = self.convblock2(feat) + feat - feat = self.convblock3(feat) + feat - flow = self.conv1(feat) - mask = self.conv2(feat) - flow = F.interpolate(flow, scale_factor=scale, mode="bilinear", align_corners=False, recompute_scale_factor=False) * scale - mask = F.interpolate(mask, scale_factor=scale, mode="bilinear", align_corners=False, recompute_scale_factor=False) - return flow, mask - - -class IFNet(nn.Module): - def __init__(self, **kwargs): - super(IFNet, self).__init__() - self.block0 = IFBlock(7+4, c=90) - self.block1 = IFBlock(7+4, c=90) - self.block2 = IFBlock(7+4, c=90) - self.block_tea = IFBlock(10+4, c=90) - - def forward(self, x, scale_list=[4, 2, 1], training=False): - if training == False: - channel = x.shape[1] // 2 - img0 = x[:, :channel] - img1 = x[:, channel:] - flow_list = [] - merged = [] - mask_list = [] - warped_img0 = img0 - warped_img1 = img1 - flow = (x[:, :4]).detach() * 0 - mask = (x[:, :1]).detach() * 0 - block = [self.block0, self.block1, self.block2] - for i in range(3): - f0, m0 = block[i](torch.cat((warped_img0[:, :3], warped_img1[:, :3], mask), 1), flow, scale=scale_list[i]) - f1, m1 = block[i](torch.cat((warped_img1[:, :3], warped_img0[:, :3], -mask), 1), torch.cat((flow[:, 2:4], flow[:, :2]), 1), scale=scale_list[i]) - flow = flow + (f0 + torch.cat((f1[:, 2:4], f1[:, :2]), 1)) / 2 - mask = mask + (m0 + (-m1)) / 2 - mask_list.append(mask) - flow_list.append(flow) - warped_img0 = warp(img0, flow[:, :2], device=x.device) - warped_img1 = warp(img1, flow[:, 2:4], device=x.device) - merged.append((warped_img0, warped_img1)) - ''' - c0 = self.contextnet(img0, flow[:, :2]) - c1 = self.contextnet(img1, flow[:, 2:4]) - tmp = self.unet(img0, img1, warped_img0, warped_img1, mask, flow, c0, c1) - res = tmp[:, 1:4] * 2 - 1 - ''' - for i in range(3): - mask_list[i] = torch.sigmoid(mask_list[i]) - merged[i] = merged[i][0] * mask_list[i] + merged[i][1] * (1 - mask_list[i]) - return flow_list, mask_list[2], merged - - @staticmethod - def state_dict_converter(): - return IFNetStateDictConverter() - - -class IFNetStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - state_dict_ = {k.replace("module.", ""): v for k, v in state_dict.items()} - return state_dict_ - - def from_civitai(self, state_dict): - return self.from_diffusers(state_dict), {"upcast_to_float32": True} - - -class RIFEInterpolater: - def __init__(self, model, device="cuda"): - self.model = model - self.device = device - # IFNet only does not support float16 - self.torch_dtype = torch.float32 - - @staticmethod - def from_model_manager(model_manager): - return RIFEInterpolater(model_manager.fetch_model("rife"), device=model_manager.device) - - def process_image(self, image): - width, height = image.size - if width % 32 != 0 or height % 32 != 0: - width = (width + 31) // 32 - height = (height + 31) // 32 - image = image.resize((width, height)) - image = torch.Tensor(np.array(image, dtype=np.float32)[:, :, [2,1,0]] / 255).permute(2, 0, 1) - return image - - def process_images(self, images): - images = [self.process_image(image) for image in images] - images = torch.stack(images) - return images - - def decode_images(self, images): - images = (images[:, [2,1,0]].permute(0, 2, 3, 1) * 255).clip(0, 255).numpy().astype(np.uint8) - images = [Image.fromarray(image) for image in images] - return images - - def add_interpolated_images(self, images, interpolated_images): - output_images = [] - for image, interpolated_image in zip(images, interpolated_images): - output_images.append(image) - output_images.append(interpolated_image) - output_images.append(images[-1]) - return output_images - - - @torch.no_grad() - def interpolate_(self, images, scale=1.0): - input_tensor = self.process_images(images) - input_tensor = torch.cat((input_tensor[:-1], input_tensor[1:]), dim=1) - input_tensor = input_tensor.to(device=self.device, dtype=self.torch_dtype) - flow, mask, merged = self.model(input_tensor, [4/scale, 2/scale, 1/scale]) - output_images = self.decode_images(merged[2].cpu()) - if output_images[0].size != images[0].size: - output_images = [image.resize(images[0].size) for image in output_images] - return output_images - - - @torch.no_grad() - def interpolate(self, images, scale=1.0, batch_size=4, num_iter=1, progress_bar=lambda x:x): - # Preprocess - processed_images = self.process_images(images) - - for iter in range(num_iter): - # Input - input_tensor = torch.cat((processed_images[:-1], processed_images[1:]), dim=1) - - # Interpolate - output_tensor = [] - for batch_id in progress_bar(range(0, input_tensor.shape[0], batch_size)): - batch_id_ = min(batch_id + batch_size, input_tensor.shape[0]) - batch_input_tensor = input_tensor[batch_id: batch_id_] - batch_input_tensor = batch_input_tensor.to(device=self.device, dtype=self.torch_dtype) - flow, mask, merged = self.model(batch_input_tensor, [4/scale, 2/scale, 1/scale]) - output_tensor.append(merged[2].cpu()) - - # Output - output_tensor = torch.concat(output_tensor, dim=0).clip(0, 1) - processed_images = self.add_interpolated_images(processed_images, output_tensor) - processed_images = torch.stack(processed_images) - - # To images - output_images = self.decode_images(processed_images) - if output_images[0].size != images[0].size: - output_images = [image.resize(images[0].size) for image in output_images] - return output_images - - -class RIFESmoother(RIFEInterpolater): - def __init__(self, model, device="cuda"): - super(RIFESmoother, self).__init__(model, device=device) - - @staticmethod - def from_model_manager(model_manager): - return RIFEInterpolater(model_manager.fetch_model("rife"), device=model_manager.device) - - def process_tensors(self, input_tensor, scale=1.0, batch_size=4): - output_tensor = [] - for batch_id in range(0, input_tensor.shape[0], batch_size): - batch_id_ = min(batch_id + batch_size, input_tensor.shape[0]) - batch_input_tensor = input_tensor[batch_id: batch_id_] - batch_input_tensor = batch_input_tensor.to(device=self.device, dtype=self.torch_dtype) - flow, mask, merged = self.model(batch_input_tensor, [4/scale, 2/scale, 1/scale]) - output_tensor.append(merged[2].cpu()) - output_tensor = torch.concat(output_tensor, dim=0) - return output_tensor - - @torch.no_grad() - def __call__(self, rendered_frames, scale=1.0, batch_size=4, num_iter=1, **kwargs): - # Preprocess - processed_images = self.process_images(rendered_frames) - - for iter in range(num_iter): - # Input - input_tensor = torch.cat((processed_images[:-2], processed_images[2:]), dim=1) - - # Interpolate - output_tensor = self.process_tensors(input_tensor, scale=scale, batch_size=batch_size) - - # Blend - input_tensor = torch.cat((processed_images[1:-1], output_tensor), dim=1) - output_tensor = self.process_tensors(input_tensor, scale=scale, batch_size=batch_size) - - # Add to frames - processed_images[1:-1] = output_tensor - - # To images - output_images = self.decode_images(processed_images) - if output_images[0].size != rendered_frames[0].size: - output_images = [image.resize(rendered_frames[0].size) for image in output_images] - return output_images diff --git a/diffsynth/lora/__init__.py b/diffsynth/lora/__init__.py deleted file mode 100644 index 33bd89c..0000000 --- a/diffsynth/lora/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -import torch - - - -class GeneralLoRALoader: - def __init__(self, device="cpu", torch_dtype=torch.float32): - self.device = device - self.torch_dtype = torch_dtype - - - def get_name_dict(self, lora_state_dict): - lora_name_dict = {} - for key in lora_state_dict: - if ".lora_B." not in key: - continue - keys = key.split(".") - if len(keys) > keys.index("lora_B") + 2: - keys.pop(keys.index("lora_B") + 1) - keys.pop(keys.index("lora_B")) - if keys[0] == "diffusion_model": - keys.pop(0) - keys.pop(-1) - target_name = ".".join(keys) - lora_name_dict[target_name] = (key, key.replace(".lora_B.", ".lora_A.")) - return lora_name_dict - - - def load(self, model: torch.nn.Module, state_dict_lora, alpha=1.0): - updated_num = 0 - lora_name_dict = self.get_name_dict(state_dict_lora) - for name, module in model.named_modules(): - if name in lora_name_dict: - weight_up = state_dict_lora[lora_name_dict[name][0]].to(device=self.device, dtype=self.torch_dtype) - weight_down = state_dict_lora[lora_name_dict[name][1]].to(device=self.device, dtype=self.torch_dtype) - if len(weight_up.shape) == 4: - weight_up = weight_up.squeeze(3).squeeze(2) - weight_down = weight_down.squeeze(3).squeeze(2) - weight_lora = alpha * torch.mm(weight_up, weight_down).unsqueeze(2).unsqueeze(3) - else: - weight_lora = alpha * torch.mm(weight_up, weight_down) - state_dict = module.state_dict() - state_dict["weight"] = state_dict["weight"].to(device=self.device, dtype=self.torch_dtype) + weight_lora - module.load_state_dict(state_dict) - updated_num += 1 - print(f"{updated_num} tensors are updated by LoRA.") diff --git a/diffsynth/lora/flux_lora.py b/diffsynth/lora/flux_lora.py deleted file mode 100644 index cb53b73..0000000 --- a/diffsynth/lora/flux_lora.py +++ /dev/null @@ -1,324 +0,0 @@ -import torch, math -from . import GeneralLoRALoader -from ..utils import ModelConfig -from ..models.utils import load_state_dict -from typing import Union - - -class FluxLoRALoader(GeneralLoRALoader): - def __init__(self, device="cpu", torch_dtype=torch.float32): - super().__init__(device=device, torch_dtype=torch_dtype) - - self.diffusers_rename_dict = { - "transformer.single_transformer_blocks.blockid.attn.to_k.lora_A.weight":"single_blocks.blockid.a_to_k.lora_A.default.weight", - "transformer.single_transformer_blocks.blockid.attn.to_k.lora_B.weight":"single_blocks.blockid.a_to_k.lora_B.default.weight", - "transformer.single_transformer_blocks.blockid.attn.to_q.lora_A.weight":"single_blocks.blockid.a_to_q.lora_A.default.weight", - "transformer.single_transformer_blocks.blockid.attn.to_q.lora_B.weight":"single_blocks.blockid.a_to_q.lora_B.default.weight", - "transformer.single_transformer_blocks.blockid.attn.to_v.lora_A.weight":"single_blocks.blockid.a_to_v.lora_A.default.weight", - "transformer.single_transformer_blocks.blockid.attn.to_v.lora_B.weight":"single_blocks.blockid.a_to_v.lora_B.default.weight", - "transformer.single_transformer_blocks.blockid.norm.linear.lora_A.weight":"single_blocks.blockid.norm.linear.lora_A.default.weight", - "transformer.single_transformer_blocks.blockid.norm.linear.lora_B.weight":"single_blocks.blockid.norm.linear.lora_B.default.weight", - "transformer.single_transformer_blocks.blockid.proj_mlp.lora_A.weight":"single_blocks.blockid.proj_in_besides_attn.lora_A.default.weight", - "transformer.single_transformer_blocks.blockid.proj_mlp.lora_B.weight":"single_blocks.blockid.proj_in_besides_attn.lora_B.default.weight", - "transformer.single_transformer_blocks.blockid.proj_out.lora_A.weight":"single_blocks.blockid.proj_out.lora_A.default.weight", - "transformer.single_transformer_blocks.blockid.proj_out.lora_B.weight":"single_blocks.blockid.proj_out.lora_B.default.weight", - "transformer.transformer_blocks.blockid.attn.add_k_proj.lora_A.weight":"blocks.blockid.attn.b_to_k.lora_A.default.weight", - "transformer.transformer_blocks.blockid.attn.add_k_proj.lora_B.weight":"blocks.blockid.attn.b_to_k.lora_B.default.weight", - "transformer.transformer_blocks.blockid.attn.add_q_proj.lora_A.weight":"blocks.blockid.attn.b_to_q.lora_A.default.weight", - "transformer.transformer_blocks.blockid.attn.add_q_proj.lora_B.weight":"blocks.blockid.attn.b_to_q.lora_B.default.weight", - "transformer.transformer_blocks.blockid.attn.add_v_proj.lora_A.weight":"blocks.blockid.attn.b_to_v.lora_A.default.weight", - "transformer.transformer_blocks.blockid.attn.add_v_proj.lora_B.weight":"blocks.blockid.attn.b_to_v.lora_B.default.weight", - "transformer.transformer_blocks.blockid.attn.to_add_out.lora_A.weight":"blocks.blockid.attn.b_to_out.lora_A.default.weight", - "transformer.transformer_blocks.blockid.attn.to_add_out.lora_B.weight":"blocks.blockid.attn.b_to_out.lora_B.default.weight", - "transformer.transformer_blocks.blockid.attn.to_k.lora_A.weight":"blocks.blockid.attn.a_to_k.lora_A.default.weight", - "transformer.transformer_blocks.blockid.attn.to_k.lora_B.weight":"blocks.blockid.attn.a_to_k.lora_B.default.weight", - "transformer.transformer_blocks.blockid.attn.to_out.0.lora_A.weight":"blocks.blockid.attn.a_to_out.lora_A.default.weight", - "transformer.transformer_blocks.blockid.attn.to_out.0.lora_B.weight":"blocks.blockid.attn.a_to_out.lora_B.default.weight", - "transformer.transformer_blocks.blockid.attn.to_q.lora_A.weight":"blocks.blockid.attn.a_to_q.lora_A.default.weight", - "transformer.transformer_blocks.blockid.attn.to_q.lora_B.weight":"blocks.blockid.attn.a_to_q.lora_B.default.weight", - "transformer.transformer_blocks.blockid.attn.to_v.lora_A.weight":"blocks.blockid.attn.a_to_v.lora_A.default.weight", - "transformer.transformer_blocks.blockid.attn.to_v.lora_B.weight":"blocks.blockid.attn.a_to_v.lora_B.default.weight", - "transformer.transformer_blocks.blockid.ff.net.0.proj.lora_A.weight":"blocks.blockid.ff_a.0.lora_A.default.weight", - "transformer.transformer_blocks.blockid.ff.net.0.proj.lora_B.weight":"blocks.blockid.ff_a.0.lora_B.default.weight", - "transformer.transformer_blocks.blockid.ff.net.2.lora_A.weight":"blocks.blockid.ff_a.2.lora_A.default.weight", - "transformer.transformer_blocks.blockid.ff.net.2.lora_B.weight":"blocks.blockid.ff_a.2.lora_B.default.weight", - "transformer.transformer_blocks.blockid.ff_context.net.0.proj.lora_A.weight":"blocks.blockid.ff_b.0.lora_A.default.weight", - "transformer.transformer_blocks.blockid.ff_context.net.0.proj.lora_B.weight":"blocks.blockid.ff_b.0.lora_B.default.weight", - "transformer.transformer_blocks.blockid.ff_context.net.2.lora_A.weight":"blocks.blockid.ff_b.2.lora_A.default.weight", - "transformer.transformer_blocks.blockid.ff_context.net.2.lora_B.weight":"blocks.blockid.ff_b.2.lora_B.default.weight", - "transformer.transformer_blocks.blockid.norm1.linear.lora_A.weight":"blocks.blockid.norm1_a.linear.lora_A.default.weight", - "transformer.transformer_blocks.blockid.norm1.linear.lora_B.weight":"blocks.blockid.norm1_a.linear.lora_B.default.weight", - "transformer.transformer_blocks.blockid.norm1_context.linear.lora_A.weight":"blocks.blockid.norm1_b.linear.lora_A.default.weight", - "transformer.transformer_blocks.blockid.norm1_context.linear.lora_B.weight":"blocks.blockid.norm1_b.linear.lora_B.default.weight", - } - - self.civitai_rename_dict = { - "lora_unet_double_blocks_blockid_img_mod_lin.lora_down.weight": "blocks.blockid.norm1_a.linear.lora_A.default.weight", - "lora_unet_double_blocks_blockid_img_mod_lin.lora_up.weight": "blocks.blockid.norm1_a.linear.lora_B.default.weight", - "lora_unet_double_blocks_blockid_txt_mod_lin.lora_down.weight": "blocks.blockid.norm1_b.linear.lora_A.default.weight", - "lora_unet_double_blocks_blockid_txt_mod_lin.lora_up.weight": "blocks.blockid.norm1_b.linear.lora_B.default.weight", - "lora_unet_double_blocks_blockid_img_attn_qkv.lora_down.weight": "blocks.blockid.attn.a_to_qkv.lora_A.default.weight", - "lora_unet_double_blocks_blockid_img_attn_qkv.lora_up.weight": "blocks.blockid.attn.a_to_qkv.lora_B.default.weight", - "lora_unet_double_blocks_blockid_txt_attn_qkv.lora_down.weight": "blocks.blockid.attn.b_to_qkv.lora_A.default.weight", - "lora_unet_double_blocks_blockid_txt_attn_qkv.lora_up.weight": "blocks.blockid.attn.b_to_qkv.lora_B.default.weight", - "lora_unet_double_blocks_blockid_img_attn_proj.lora_down.weight": "blocks.blockid.attn.a_to_out.lora_A.default.weight", - "lora_unet_double_blocks_blockid_img_attn_proj.lora_up.weight": "blocks.blockid.attn.a_to_out.lora_B.default.weight", - "lora_unet_double_blocks_blockid_txt_attn_proj.lora_down.weight": "blocks.blockid.attn.b_to_out.lora_A.default.weight", - "lora_unet_double_blocks_blockid_txt_attn_proj.lora_up.weight": "blocks.blockid.attn.b_to_out.lora_B.default.weight", - "lora_unet_double_blocks_blockid_img_mlp_0.lora_down.weight": "blocks.blockid.ff_a.0.lora_A.default.weight", - "lora_unet_double_blocks_blockid_img_mlp_0.lora_up.weight": "blocks.blockid.ff_a.0.lora_B.default.weight", - "lora_unet_double_blocks_blockid_img_mlp_2.lora_down.weight": "blocks.blockid.ff_a.2.lora_A.default.weight", - "lora_unet_double_blocks_blockid_img_mlp_2.lora_up.weight": "blocks.blockid.ff_a.2.lora_B.default.weight", - "lora_unet_double_blocks_blockid_txt_mlp_0.lora_down.weight": "blocks.blockid.ff_b.0.lora_A.default.weight", - "lora_unet_double_blocks_blockid_txt_mlp_0.lora_up.weight": "blocks.blockid.ff_b.0.lora_B.default.weight", - "lora_unet_double_blocks_blockid_txt_mlp_2.lora_down.weight": "blocks.blockid.ff_b.2.lora_A.default.weight", - "lora_unet_double_blocks_blockid_txt_mlp_2.lora_up.weight": "blocks.blockid.ff_b.2.lora_B.default.weight", - "lora_unet_single_blocks_blockid_modulation_lin.lora_down.weight": "single_blocks.blockid.norm.linear.lora_A.default.weight", - "lora_unet_single_blocks_blockid_modulation_lin.lora_up.weight": "single_blocks.blockid.norm.linear.lora_B.default.weight", - "lora_unet_single_blocks_blockid_linear1.lora_down.weight": "single_blocks.blockid.to_qkv_mlp.lora_A.default.weight", - "lora_unet_single_blocks_blockid_linear1.lora_up.weight": "single_blocks.blockid.to_qkv_mlp.lora_B.default.weight", - "lora_unet_single_blocks_blockid_linear2.lora_down.weight": "single_blocks.blockid.proj_out.lora_A.default.weight", - "lora_unet_single_blocks_blockid_linear2.lora_up.weight": "single_blocks.blockid.proj_out.lora_B.default.weight", - } - - def load(self, model: torch.nn.Module, state_dict_lora, alpha=1.0): - super().load(model, state_dict_lora, alpha) - - - def convert_state_dict(self,state_dict): - - def guess_block_id(name,model_resource): - if model_resource == 'civitai': - names = name.split("_") - for i in names: - if i.isdigit(): - return i, name.replace(f"_{i}_", "_blockid_") - if model_resource == 'diffusers': - names = name.split(".") - for i in names: - if i.isdigit(): - return i, name.replace(f"transformer_blocks.{i}.", "transformer_blocks.blockid.") - return None, None - - def guess_resource(state_dict): - for k in state_dict: - if "lora_unet_" in k: - return 'civitai' - elif k.startswith("transformer."): - return 'diffusers' - else: - None - - model_resource = guess_resource(state_dict) - if model_resource is None: - return state_dict - - rename_dict = self.diffusers_rename_dict if model_resource == 'diffusers' else self.civitai_rename_dict - def guess_alpha(state_dict): - for name, param in state_dict.items(): - if ".alpha" in name: - for suffix in [".lora_down.weight", ".lora_A.weight"]: - name_ = name.replace(".alpha", suffix) - if name_ in state_dict: - lora_alpha = param.item() / state_dict[name_].shape[0] - lora_alpha = math.sqrt(lora_alpha) - return lora_alpha - - return 1 - - alpha = guess_alpha(state_dict) - - state_dict_ = {} - for name, param in state_dict.items(): - block_id, source_name = guess_block_id(name,model_resource) - if alpha != 1: - param *= alpha - if source_name in rename_dict: - target_name = rename_dict[source_name] - target_name = target_name.replace(".blockid.", f".{block_id}.") - state_dict_[target_name] = param - else: - state_dict_[name] = param - - if model_resource == 'diffusers': - for name in list(state_dict_.keys()): - if "single_blocks." in name and ".a_to_q." in name: - mlp = state_dict_.get(name.replace(".a_to_q.", ".proj_in_besides_attn."), None) - if mlp is None: - dim = 4 - if 'lora_A' in name: - dim = 1 - mlp = torch.zeros(dim * state_dict_[name].shape[0], - *state_dict_[name].shape[1:], - dtype=state_dict_[name].dtype) - else: - state_dict_.pop(name.replace(".a_to_q.", ".proj_in_besides_attn.")) - if 'lora_A' in name: - param = torch.concat([ - state_dict_.pop(name), - state_dict_.pop(name.replace(".a_to_q.", ".a_to_k.")), - state_dict_.pop(name.replace(".a_to_q.", ".a_to_v.")), - mlp, - ], dim=0) - elif 'lora_B' in name: - d, r = state_dict_[name].shape - param = torch.zeros((3*d+mlp.shape[0], 3*r+mlp.shape[1]), dtype=state_dict_[name].dtype, device=state_dict_[name].device) - param[:d, :r] = state_dict_.pop(name) - param[d:2*d, r:2*r] = state_dict_.pop(name.replace(".a_to_q.", ".a_to_k.")) - param[2*d:3*d, 2*r:3*r] = state_dict_.pop(name.replace(".a_to_q.", ".a_to_v.")) - param[3*d:, 3*r:] = mlp - else: - param = torch.concat([ - state_dict_.pop(name), - state_dict_.pop(name.replace(".a_to_q.", ".a_to_k.")), - state_dict_.pop(name.replace(".a_to_q.", ".a_to_v.")), - mlp, - ], dim=0) - name_ = name.replace(".a_to_q.", ".to_qkv_mlp.") - state_dict_[name_] = param - for name in list(state_dict_.keys()): - for component in ["a", "b"]: - if f".{component}_to_q." in name: - name_ = name.replace(f".{component}_to_q.", f".{component}_to_qkv.") - concat_dim = 0 - if 'lora_A' in name: - param = torch.concat([ - state_dict_[name.replace(f".{component}_to_q.", f".{component}_to_q.")], - state_dict_[name.replace(f".{component}_to_q.", f".{component}_to_k.")], - state_dict_[name.replace(f".{component}_to_q.", f".{component}_to_v.")], - ], dim=0) - elif 'lora_B' in name: - origin = state_dict_[name.replace(f".{component}_to_q.", f".{component}_to_q.")] - d, r = origin.shape - # print(d, r) - param = torch.zeros((3*d, 3*r), dtype=origin.dtype, device=origin.device) - param[:d, :r] = state_dict_[name.replace(f".{component}_to_q.", f".{component}_to_q.")] - param[d:2*d, r:2*r] = state_dict_[name.replace(f".{component}_to_q.", f".{component}_to_k.")] - param[2*d:3*d, 2*r:3*r] = state_dict_[name.replace(f".{component}_to_q.", f".{component}_to_v.")] - else: - param = torch.concat([ - state_dict_[name.replace(f".{component}_to_q.", f".{component}_to_q.")], - state_dict_[name.replace(f".{component}_to_q.", f".{component}_to_k.")], - state_dict_[name.replace(f".{component}_to_q.", f".{component}_to_v.")], - ], dim=0) - state_dict_[name_] = param - state_dict_.pop(name.replace(f".{component}_to_q.", f".{component}_to_q.")) - state_dict_.pop(name.replace(f".{component}_to_q.", f".{component}_to_k.")) - state_dict_.pop(name.replace(f".{component}_to_q.", f".{component}_to_v.")) - return state_dict_ - - -class LoraMerger(torch.nn.Module): - def __init__(self, dim): - super().__init__() - self.weight_base = torch.nn.Parameter(torch.randn((dim,))) - self.weight_lora = torch.nn.Parameter(torch.randn((dim,))) - self.weight_cross = torch.nn.Parameter(torch.randn((dim,))) - self.weight_out = torch.nn.Parameter(torch.ones((dim,))) - self.bias = torch.nn.Parameter(torch.randn((dim,))) - self.activation = torch.nn.Sigmoid() - self.norm_base = torch.nn.LayerNorm(dim, eps=1e-5) - self.norm_lora = torch.nn.LayerNorm(dim, eps=1e-5) - - def forward(self, base_output, lora_outputs): - norm_base_output = self.norm_base(base_output) - norm_lora_outputs = self.norm_lora(lora_outputs) - gate = self.activation( - norm_base_output * self.weight_base \ - + norm_lora_outputs * self.weight_lora \ - + norm_base_output * norm_lora_outputs * self.weight_cross + self.bias - ) - output = base_output + (self.weight_out * gate * lora_outputs).sum(dim=0) - return output - - -class FluxLoraPatcher(torch.nn.Module): - def __init__(self, lora_patterns=None): - super().__init__() - if lora_patterns is None: - lora_patterns = self.default_lora_patterns() - model_dict = {} - for lora_pattern in lora_patterns: - name, dim = lora_pattern["name"], lora_pattern["dim"] - model_dict[name.replace(".", "___")] = LoraMerger(dim) - self.model_dict = torch.nn.ModuleDict(model_dict) - - def default_lora_patterns(self): - lora_patterns = [] - lora_dict = { - "attn.a_to_qkv": 9216, "attn.a_to_out": 3072, "ff_a.0": 12288, "ff_a.2": 3072, "norm1_a.linear": 18432, - "attn.b_to_qkv": 9216, "attn.b_to_out": 3072, "ff_b.0": 12288, "ff_b.2": 3072, "norm1_b.linear": 18432, - } - for i in range(19): - for suffix in lora_dict: - lora_patterns.append({ - "name": f"blocks.{i}.{suffix}", - "dim": lora_dict[suffix] - }) - lora_dict = {"to_qkv_mlp": 21504, "proj_out": 3072, "norm.linear": 9216} - for i in range(38): - for suffix in lora_dict: - lora_patterns.append({ - "name": f"single_blocks.{i}.{suffix}", - "dim": lora_dict[suffix] - }) - return lora_patterns - - def forward(self, base_output, lora_outputs, name): - return self.model_dict[name.replace(".", "___")](base_output, lora_outputs) - - @staticmethod - def state_dict_converter(): - return FluxLoraPatcherStateDictConverter() - - -class FluxLoraPatcherStateDictConverter: - def __init__(self): - pass - - def from_civitai(self, state_dict): - return state_dict - - -class FluxLoRAFuser: - def __init__(self, device="cuda", torch_dtype=torch.bfloat16): - self.device = device - self.torch_dtype = torch_dtype - - def Matrix_Decomposition_lowrank(self, A, k): - U, S, V = torch.svd_lowrank(A.float(), q=k) - S_k = torch.diag(S[:k]) - U_hat = U @ S_k - return U_hat, V.t() - - def LoRA_State_Dicts_Decomposition(self, lora_state_dicts=[], q=4): - lora_1 = lora_state_dicts[0] - state_dict_ = {} - for k,v in lora_1.items(): - if 'lora_A.' in k: - lora_B_name = k.replace('lora_A.', 'lora_B.') - lora_B = lora_1[lora_B_name] - weight = torch.mm(lora_B, v) - for lora_dict in lora_state_dicts[1:]: - lora_A_ = lora_dict[k] - lora_B_ = lora_dict[lora_B_name] - weight_ = torch.mm(lora_B_, lora_A_) - weight += weight_ - new_B, new_A = self.Matrix_Decomposition_lowrank(weight, q) - state_dict_[lora_B_name] = new_B.to(dtype=torch.bfloat16) - state_dict_[k] = new_A.to(dtype=torch.bfloat16) - return state_dict_ - - def __call__(self, lora_configs: list[Union[ModelConfig, str]]): - loras = [] - loader = FluxLoRALoader(torch_dtype=self.torch_dtype, device=self.device) - for lora_config in lora_configs: - if isinstance(lora_config, str): - lora = load_state_dict(lora_config, torch_dtype=self.torch_dtype, device=self.device) - else: - lora_config.download_if_necessary() - lora = load_state_dict(lora_config.path, torch_dtype=self.torch_dtype, device=self.device) - lora = loader.convert_state_dict(lora) - loras.append(lora) - lora = self.LoRA_State_Dicts_Decomposition(loras) - return lora diff --git a/diffsynth/models/__init__.py b/diffsynth/models/__init__.py deleted file mode 100644 index 96707b6..0000000 --- a/diffsynth/models/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .model_manager import * diff --git a/diffsynth/models/attention.py b/diffsynth/models/attention.py deleted file mode 100644 index eb90e1e..0000000 --- a/diffsynth/models/attention.py +++ /dev/null @@ -1,89 +0,0 @@ -import torch -from einops import rearrange - - -def low_version_attention(query, key, value, attn_bias=None): - scale = 1 / query.shape[-1] ** 0.5 - query = query * scale - attn = torch.matmul(query, key.transpose(-2, -1)) - if attn_bias is not None: - attn = attn + attn_bias - attn = attn.softmax(-1) - return attn @ value - - -class Attention(torch.nn.Module): - - def __init__(self, q_dim, num_heads, head_dim, kv_dim=None, bias_q=False, bias_kv=False, bias_out=False): - super().__init__() - dim_inner = head_dim * num_heads - kv_dim = kv_dim if kv_dim is not None else q_dim - self.num_heads = num_heads - self.head_dim = head_dim - - self.to_q = torch.nn.Linear(q_dim, dim_inner, bias=bias_q) - self.to_k = torch.nn.Linear(kv_dim, dim_inner, bias=bias_kv) - self.to_v = torch.nn.Linear(kv_dim, dim_inner, bias=bias_kv) - self.to_out = torch.nn.Linear(dim_inner, q_dim, bias=bias_out) - - def interact_with_ipadapter(self, hidden_states, q, ip_k, ip_v, scale=1.0): - batch_size = q.shape[0] - ip_k = ip_k.view(batch_size, -1, self.num_heads, self.head_dim).transpose(1, 2) - ip_v = ip_v.view(batch_size, -1, self.num_heads, self.head_dim).transpose(1, 2) - ip_hidden_states = torch.nn.functional.scaled_dot_product_attention(q, ip_k, ip_v) - hidden_states = hidden_states + scale * ip_hidden_states - return hidden_states - - def torch_forward(self, hidden_states, encoder_hidden_states=None, attn_mask=None, ipadapter_kwargs=None, qkv_preprocessor=None): - if encoder_hidden_states is None: - encoder_hidden_states = hidden_states - - batch_size = encoder_hidden_states.shape[0] - - q = self.to_q(hidden_states) - k = self.to_k(encoder_hidden_states) - v = self.to_v(encoder_hidden_states) - - q = q.view(batch_size, -1, self.num_heads, self.head_dim).transpose(1, 2) - k = k.view(batch_size, -1, self.num_heads, self.head_dim).transpose(1, 2) - v = v.view(batch_size, -1, self.num_heads, self.head_dim).transpose(1, 2) - - if qkv_preprocessor is not None: - q, k, v = qkv_preprocessor(q, k, v) - - hidden_states = torch.nn.functional.scaled_dot_product_attention(q, k, v, attn_mask=attn_mask) - if ipadapter_kwargs is not None: - hidden_states = self.interact_with_ipadapter(hidden_states, q, **ipadapter_kwargs) - hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, self.num_heads * self.head_dim) - hidden_states = hidden_states.to(q.dtype) - - hidden_states = self.to_out(hidden_states) - - return hidden_states - - def xformers_forward(self, hidden_states, encoder_hidden_states=None, attn_mask=None): - if encoder_hidden_states is None: - encoder_hidden_states = hidden_states - - q = self.to_q(hidden_states) - k = self.to_k(encoder_hidden_states) - v = self.to_v(encoder_hidden_states) - - q = rearrange(q, "b f (n d) -> (b n) f d", n=self.num_heads) - k = rearrange(k, "b f (n d) -> (b n) f d", n=self.num_heads) - v = rearrange(v, "b f (n d) -> (b n) f d", n=self.num_heads) - - if attn_mask is not None: - hidden_states = low_version_attention(q, k, v, attn_bias=attn_mask) - else: - import xformers.ops as xops - hidden_states = xops.memory_efficient_attention(q, k, v) - hidden_states = rearrange(hidden_states, "(b n) f d -> b f (n d)", n=self.num_heads) - - hidden_states = hidden_states.to(q.dtype) - hidden_states = self.to_out(hidden_states) - - return hidden_states - - def forward(self, hidden_states, encoder_hidden_states=None, attn_mask=None, ipadapter_kwargs=None, qkv_preprocessor=None): - return self.torch_forward(hidden_states, encoder_hidden_states=encoder_hidden_states, attn_mask=attn_mask, ipadapter_kwargs=ipadapter_kwargs, qkv_preprocessor=qkv_preprocessor) \ No newline at end of file diff --git a/diffsynth/models/cog_dit.py b/diffsynth/models/cog_dit.py deleted file mode 100644 index e93c4c3..0000000 --- a/diffsynth/models/cog_dit.py +++ /dev/null @@ -1,408 +0,0 @@ -import torch -from einops import rearrange, repeat -from .sd3_dit import TimestepEmbeddings -from .attention import Attention -from .utils import load_state_dict_from_folder -from .tiler import TileWorker2Dto3D -import numpy as np - - - -class CogPatchify(torch.nn.Module): - def __init__(self, dim_in, dim_out, patch_size) -> None: - super().__init__() - self.proj = torch.nn.Conv3d(dim_in, dim_out, kernel_size=(1, patch_size, patch_size), stride=(1, patch_size, patch_size)) - - def forward(self, hidden_states): - hidden_states = self.proj(hidden_states) - hidden_states = rearrange(hidden_states, "B C T H W -> B (T H W) C") - return hidden_states - - - -class CogAdaLayerNorm(torch.nn.Module): - def __init__(self, dim, dim_cond, single=False): - super().__init__() - self.single = single - self.linear = torch.nn.Linear(dim_cond, dim * (2 if single else 6)) - self.norm = torch.nn.LayerNorm(dim, elementwise_affine=True, eps=1e-5) - - - def forward(self, hidden_states, prompt_emb, emb): - emb = self.linear(torch.nn.functional.silu(emb)) - if self.single: - shift, scale = emb.unsqueeze(1).chunk(2, dim=2) - hidden_states = self.norm(hidden_states) * (1 + scale) + shift - return hidden_states - else: - shift_a, scale_a, gate_a, shift_b, scale_b, gate_b = emb.unsqueeze(1).chunk(6, dim=2) - hidden_states = self.norm(hidden_states) * (1 + scale_a) + shift_a - prompt_emb = self.norm(prompt_emb) * (1 + scale_b) + shift_b - return hidden_states, prompt_emb, gate_a, gate_b - - - -class CogDiTBlock(torch.nn.Module): - def __init__(self, dim, dim_cond, num_heads): - super().__init__() - self.norm1 = CogAdaLayerNorm(dim, dim_cond) - self.attn1 = Attention(q_dim=dim, num_heads=48, head_dim=dim//num_heads, bias_q=True, bias_kv=True, bias_out=True) - self.norm_q = torch.nn.LayerNorm((dim//num_heads,), eps=1e-06, elementwise_affine=True) - self.norm_k = torch.nn.LayerNorm((dim//num_heads,), eps=1e-06, elementwise_affine=True) - - self.norm2 = CogAdaLayerNorm(dim, dim_cond) - self.ff = torch.nn.Sequential( - torch.nn.Linear(dim, dim*4), - torch.nn.GELU(approximate="tanh"), - torch.nn.Linear(dim*4, dim) - ) - - - def apply_rotary_emb(self, x, freqs_cis): - cos, sin = freqs_cis # [S, D] - cos = cos[None, None] - sin = sin[None, None] - cos, sin = cos.to(x.device), sin.to(x.device) - x_real, x_imag = x.reshape(*x.shape[:-1], -1, 2).unbind(-1) # [B, S, H, D//2] - x_rotated = torch.stack([-x_imag, x_real], dim=-1).flatten(3) - out = (x.float() * cos + x_rotated.float() * sin).to(x.dtype) - return out - - - def process_qkv(self, q, k, v, image_rotary_emb, text_seq_length): - q = self.norm_q(q) - k = self.norm_k(k) - q[:, :, text_seq_length:] = self.apply_rotary_emb(q[:, :, text_seq_length:], image_rotary_emb) - k[:, :, text_seq_length:] = self.apply_rotary_emb(k[:, :, text_seq_length:], image_rotary_emb) - return q, k, v - - - def forward(self, hidden_states, prompt_emb, time_emb, image_rotary_emb): - # Attention - norm_hidden_states, norm_encoder_hidden_states, gate_a, gate_b = self.norm1( - hidden_states, prompt_emb, time_emb - ) - attention_io = torch.cat([norm_encoder_hidden_states, norm_hidden_states], dim=1) - attention_io = self.attn1( - attention_io, - qkv_preprocessor=lambda q, k, v: self.process_qkv(q, k, v, image_rotary_emb, prompt_emb.shape[1]) - ) - - hidden_states = hidden_states + gate_a * attention_io[:, prompt_emb.shape[1]:] - prompt_emb = prompt_emb + gate_b * attention_io[:, :prompt_emb.shape[1]] - - # Feed forward - norm_hidden_states, norm_encoder_hidden_states, gate_a, gate_b = self.norm2( - hidden_states, prompt_emb, time_emb - ) - ff_io = torch.cat([norm_encoder_hidden_states, norm_hidden_states], dim=1) - ff_io = self.ff(ff_io) - - hidden_states = hidden_states + gate_a * ff_io[:, prompt_emb.shape[1]:] - prompt_emb = prompt_emb + gate_b * ff_io[:, :prompt_emb.shape[1]] - - return hidden_states, prompt_emb - - - -class CogDiT(torch.nn.Module): - def __init__(self): - super().__init__() - self.patchify = CogPatchify(16, 3072, 2) - self.time_embedder = TimestepEmbeddings(3072, 512) - self.context_embedder = torch.nn.Linear(4096, 3072) - self.blocks = torch.nn.ModuleList([CogDiTBlock(3072, 512, 48) for _ in range(42)]) - self.norm_final = torch.nn.LayerNorm((3072,), eps=1e-05, elementwise_affine=True) - self.norm_out = CogAdaLayerNorm(3072, 512, single=True) - self.proj_out = torch.nn.Linear(3072, 64, bias=True) - - - def get_resize_crop_region_for_grid(self, src, tgt_width, tgt_height): - tw = tgt_width - th = tgt_height - h, w = src - r = h / w - if r > (th / tw): - resize_height = th - resize_width = int(round(th / h * w)) - else: - resize_width = tw - resize_height = int(round(tw / w * h)) - - crop_top = int(round((th - resize_height) / 2.0)) - crop_left = int(round((tw - resize_width) / 2.0)) - - return (crop_top, crop_left), (crop_top + resize_height, crop_left + resize_width) - - - def get_3d_rotary_pos_embed( - self, embed_dim, crops_coords, grid_size, temporal_size, theta: int = 10000, use_real: bool = True - ): - start, stop = crops_coords - grid_h = np.linspace(start[0], stop[0], grid_size[0], endpoint=False, dtype=np.float32) - grid_w = np.linspace(start[1], stop[1], grid_size[1], endpoint=False, dtype=np.float32) - grid_t = np.linspace(0, temporal_size, temporal_size, endpoint=False, dtype=np.float32) - - # Compute dimensions for each axis - dim_t = embed_dim // 4 - dim_h = embed_dim // 8 * 3 - dim_w = embed_dim // 8 * 3 - - # Temporal frequencies - freqs_t = 1.0 / (theta ** (torch.arange(0, dim_t, 2).float() / dim_t)) - grid_t = torch.from_numpy(grid_t).float() - freqs_t = torch.einsum("n , f -> n f", grid_t, freqs_t) - freqs_t = freqs_t.repeat_interleave(2, dim=-1) - - # Spatial frequencies for height and width - freqs_h = 1.0 / (theta ** (torch.arange(0, dim_h, 2).float() / dim_h)) - freqs_w = 1.0 / (theta ** (torch.arange(0, dim_w, 2).float() / dim_w)) - grid_h = torch.from_numpy(grid_h).float() - grid_w = torch.from_numpy(grid_w).float() - freqs_h = torch.einsum("n , f -> n f", grid_h, freqs_h) - freqs_w = torch.einsum("n , f -> n f", grid_w, freqs_w) - freqs_h = freqs_h.repeat_interleave(2, dim=-1) - freqs_w = freqs_w.repeat_interleave(2, dim=-1) - - # Broadcast and concatenate tensors along specified dimension - def broadcast(tensors, dim=-1): - num_tensors = len(tensors) - shape_lens = {len(t.shape) for t in tensors} - assert len(shape_lens) == 1, "tensors must all have the same number of dimensions" - shape_len = list(shape_lens)[0] - dim = (dim + shape_len) if dim < 0 else dim - dims = list(zip(*(list(t.shape) for t in tensors))) - expandable_dims = [(i, val) for i, val in enumerate(dims) if i != dim] - assert all( - [*(len(set(t[1])) <= 2 for t in expandable_dims)] - ), "invalid dimensions for broadcastable concatenation" - max_dims = [(t[0], max(t[1])) for t in expandable_dims] - expanded_dims = [(t[0], (t[1],) * num_tensors) for t in max_dims] - expanded_dims.insert(dim, (dim, dims[dim])) - expandable_shapes = list(zip(*(t[1] for t in expanded_dims))) - tensors = [t[0].expand(*t[1]) for t in zip(tensors, expandable_shapes)] - return torch.cat(tensors, dim=dim) - - freqs = broadcast((freqs_t[:, None, None, :], freqs_h[None, :, None, :], freqs_w[None, None, :, :]), dim=-1) - - t, h, w, d = freqs.shape - freqs = freqs.view(t * h * w, d) - - # Generate sine and cosine components - sin = freqs.sin() - cos = freqs.cos() - - if use_real: - return cos, sin - else: - freqs_cis = torch.polar(torch.ones_like(freqs), freqs) - return freqs_cis - - - def prepare_rotary_positional_embeddings( - self, - height: int, - width: int, - num_frames: int, - device: torch.device, - ): - grid_height = height // 2 - grid_width = width // 2 - base_size_width = 720 // (8 * 2) - base_size_height = 480 // (8 * 2) - - grid_crops_coords = self.get_resize_crop_region_for_grid( - (grid_height, grid_width), base_size_width, base_size_height - ) - freqs_cos, freqs_sin = self.get_3d_rotary_pos_embed( - embed_dim=64, - crops_coords=grid_crops_coords, - grid_size=(grid_height, grid_width), - temporal_size=num_frames, - use_real=True, - ) - - freqs_cos = freqs_cos.to(device=device) - freqs_sin = freqs_sin.to(device=device) - return freqs_cos, freqs_sin - - - def unpatchify(self, hidden_states, height, width): - hidden_states = rearrange(hidden_states, "B (T H W) (C P Q) -> B C T (H P) (W Q)", P=2, Q=2, H=height//2, W=width//2) - return hidden_states - - - def build_mask(self, T, H, W, dtype, device, is_bound): - t = repeat(torch.arange(T), "T -> T H W", T=T, H=H, W=W) - h = repeat(torch.arange(H), "H -> T H W", T=T, H=H, W=W) - w = repeat(torch.arange(W), "W -> T H W", T=T, H=H, W=W) - border_width = (H + W) // 4 - pad = torch.ones_like(h) * border_width - mask = torch.stack([ - pad if is_bound[0] else t + 1, - pad if is_bound[1] else T - t, - pad if is_bound[2] else h + 1, - pad if is_bound[3] else H - h, - pad if is_bound[4] else w + 1, - pad if is_bound[5] else W - w - ]).min(dim=0).values - mask = mask.clip(1, border_width) - mask = (mask / border_width).to(dtype=dtype, device=device) - mask = rearrange(mask, "T H W -> 1 1 T H W") - return mask - - - def tiled_forward(self, hidden_states, timestep, prompt_emb, tile_size=(60, 90), tile_stride=(30, 45)): - B, C, T, H, W = hidden_states.shape - value = torch.zeros((B, C, T, H, W), dtype=hidden_states.dtype, device=hidden_states.device) - weight = torch.zeros((B, C, T, H, W), dtype=hidden_states.dtype, device=hidden_states.device) - - # Split tasks - tasks = [] - for h in range(0, H, tile_stride): - for w in range(0, W, tile_stride): - if (h-tile_stride >= 0 and h-tile_stride+tile_size >= H) or (w-tile_stride >= 0 and w-tile_stride+tile_size >= W): - continue - h_, w_ = h + tile_size, w + tile_size - if h_ > H: h, h_ = max(H - tile_size, 0), H - if w_ > W: w, w_ = max(W - tile_size, 0), W - tasks.append((h, h_, w, w_)) - - # Run - for hl, hr, wl, wr in tasks: - mask = self.build_mask( - value.shape[2], (hr-hl), (wr-wl), - hidden_states.dtype, hidden_states.device, - is_bound=(True, True, hl==0, hr>=H, wl==0, wr>=W) - ) - model_output = self.forward(hidden_states[:, :, :, hl:hr, wl:wr], timestep, prompt_emb) - value[:, :, :, hl:hr, wl:wr] += model_output * mask - weight[:, :, :, hl:hr, wl:wr] += mask - value = value / weight - - return value - - - def forward(self, hidden_states, timestep, prompt_emb, image_rotary_emb=None, tiled=False, tile_size=90, tile_stride=30, use_gradient_checkpointing=False): - if tiled: - return TileWorker2Dto3D().tiled_forward( - forward_fn=lambda x: self.forward(x, timestep, prompt_emb), - model_input=hidden_states, - tile_size=tile_size, tile_stride=tile_stride, - tile_device=hidden_states.device, tile_dtype=hidden_states.dtype, - computation_device=self.context_embedder.weight.device, computation_dtype=self.context_embedder.weight.dtype - ) - num_frames, height, width = hidden_states.shape[-3:] - if image_rotary_emb is None: - image_rotary_emb = self.prepare_rotary_positional_embeddings(height, width, num_frames, device=self.context_embedder.weight.device) - hidden_states = self.patchify(hidden_states) - time_emb = self.time_embedder(timestep, dtype=hidden_states.dtype) - prompt_emb = self.context_embedder(prompt_emb) - - def create_custom_forward(module): - def custom_forward(*inputs): - return module(*inputs) - return custom_forward - - for block in self.blocks: - if self.training and use_gradient_checkpointing: - hidden_states, prompt_emb = torch.utils.checkpoint.checkpoint( - create_custom_forward(block), - hidden_states, prompt_emb, time_emb, image_rotary_emb, - use_reentrant=False, - ) - else: - hidden_states, prompt_emb = block(hidden_states, prompt_emb, time_emb, image_rotary_emb) - - hidden_states = torch.cat([prompt_emb, hidden_states], dim=1) - hidden_states = self.norm_final(hidden_states) - hidden_states = hidden_states[:, prompt_emb.shape[1]:] - hidden_states = self.norm_out(hidden_states, prompt_emb, time_emb) - hidden_states = self.proj_out(hidden_states) - hidden_states = self.unpatchify(hidden_states, height, width) - - return hidden_states - - - @staticmethod - def state_dict_converter(): - return CogDiTStateDictConverter() - - - @staticmethod - def from_pretrained(file_path, torch_dtype=torch.bfloat16): - model = CogDiT().to(torch_dtype) - state_dict = load_state_dict_from_folder(file_path, torch_dtype=torch_dtype) - state_dict = CogDiT.state_dict_converter().from_diffusers(state_dict) - model.load_state_dict(state_dict) - return model - - - -class CogDiTStateDictConverter: - def __init__(self): - pass - - - def from_diffusers(self, state_dict): - rename_dict = { - "patch_embed.proj.weight": "patchify.proj.weight", - "patch_embed.proj.bias": "patchify.proj.bias", - "patch_embed.text_proj.weight": "context_embedder.weight", - "patch_embed.text_proj.bias": "context_embedder.bias", - "time_embedding.linear_1.weight": "time_embedder.timestep_embedder.0.weight", - "time_embedding.linear_1.bias": "time_embedder.timestep_embedder.0.bias", - "time_embedding.linear_2.weight": "time_embedder.timestep_embedder.2.weight", - "time_embedding.linear_2.bias": "time_embedder.timestep_embedder.2.bias", - - "norm_final.weight": "norm_final.weight", - "norm_final.bias": "norm_final.bias", - "norm_out.linear.weight": "norm_out.linear.weight", - "norm_out.linear.bias": "norm_out.linear.bias", - "norm_out.norm.weight": "norm_out.norm.weight", - "norm_out.norm.bias": "norm_out.norm.bias", - "proj_out.weight": "proj_out.weight", - "proj_out.bias": "proj_out.bias", - } - suffix_dict = { - "norm1.linear.weight": "norm1.linear.weight", - "norm1.linear.bias": "norm1.linear.bias", - "norm1.norm.weight": "norm1.norm.weight", - "norm1.norm.bias": "norm1.norm.bias", - "attn1.norm_q.weight": "norm_q.weight", - "attn1.norm_q.bias": "norm_q.bias", - "attn1.norm_k.weight": "norm_k.weight", - "attn1.norm_k.bias": "norm_k.bias", - "attn1.to_q.weight": "attn1.to_q.weight", - "attn1.to_q.bias": "attn1.to_q.bias", - "attn1.to_k.weight": "attn1.to_k.weight", - "attn1.to_k.bias": "attn1.to_k.bias", - "attn1.to_v.weight": "attn1.to_v.weight", - "attn1.to_v.bias": "attn1.to_v.bias", - "attn1.to_out.0.weight": "attn1.to_out.weight", - "attn1.to_out.0.bias": "attn1.to_out.bias", - "norm2.linear.weight": "norm2.linear.weight", - "norm2.linear.bias": "norm2.linear.bias", - "norm2.norm.weight": "norm2.norm.weight", - "norm2.norm.bias": "norm2.norm.bias", - "ff.net.0.proj.weight": "ff.0.weight", - "ff.net.0.proj.bias": "ff.0.bias", - "ff.net.2.weight": "ff.2.weight", - "ff.net.2.bias": "ff.2.bias", - } - state_dict_ = {} - for name, param in state_dict.items(): - if name in rename_dict: - if name == "patch_embed.proj.weight": - param = param.unsqueeze(2) - state_dict_[rename_dict[name]] = param - else: - names = name.split(".") - if names[0] == "transformer_blocks": - suffix = ".".join(names[2:]) - state_dict_[f"blocks.{names[1]}." + suffix_dict[suffix]] = param - return state_dict_ - - - def from_civitai(self, state_dict): - return self.from_diffusers(state_dict) diff --git a/diffsynth/models/cog_vae.py b/diffsynth/models/cog_vae.py deleted file mode 100644 index 24ab3b3..0000000 --- a/diffsynth/models/cog_vae.py +++ /dev/null @@ -1,518 +0,0 @@ -import torch -from einops import rearrange, repeat -from .tiler import TileWorker2Dto3D - - - -class Downsample3D(torch.nn.Module): - def __init__( - self, - in_channels: int, - out_channels: int, - kernel_size: int = 3, - stride: int = 2, - padding: int = 0, - compress_time: bool = False, - ): - super().__init__() - - self.conv = torch.nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding) - self.compress_time = compress_time - - def forward(self, x: torch.Tensor, xq: torch.Tensor) -> torch.Tensor: - if self.compress_time: - batch_size, channels, frames, height, width = x.shape - - # (batch_size, channels, frames, height, width) -> (batch_size, height, width, channels, frames) -> (batch_size * height * width, channels, frames) - x = x.permute(0, 3, 4, 1, 2).reshape(batch_size * height * width, channels, frames) - - if x.shape[-1] % 2 == 1: - x_first, x_rest = x[..., 0], x[..., 1:] - if x_rest.shape[-1] > 0: - # (batch_size * height * width, channels, frames - 1) -> (batch_size * height * width, channels, (frames - 1) // 2) - x_rest = torch.nn.functional.avg_pool1d(x_rest, kernel_size=2, stride=2) - - x = torch.cat([x_first[..., None], x_rest], dim=-1) - # (batch_size * height * width, channels, (frames // 2) + 1) -> (batch_size, height, width, channels, (frames // 2) + 1) -> (batch_size, channels, (frames // 2) + 1, height, width) - x = x.reshape(batch_size, height, width, channels, x.shape[-1]).permute(0, 3, 4, 1, 2) - else: - # (batch_size * height * width, channels, frames) -> (batch_size * height * width, channels, frames // 2) - x = torch.nn.functional.avg_pool1d(x, kernel_size=2, stride=2) - # (batch_size * height * width, channels, frames // 2) -> (batch_size, height, width, channels, frames // 2) -> (batch_size, channels, frames // 2, height, width) - x = x.reshape(batch_size, height, width, channels, x.shape[-1]).permute(0, 3, 4, 1, 2) - - # Pad the tensor - pad = (0, 1, 0, 1) - x = torch.nn.functional.pad(x, pad, mode="constant", value=0) - batch_size, channels, frames, height, width = x.shape - # (batch_size, channels, frames, height, width) -> (batch_size, frames, channels, height, width) -> (batch_size * frames, channels, height, width) - x = x.permute(0, 2, 1, 3, 4).reshape(batch_size * frames, channels, height, width) - x = self.conv(x) - # (batch_size * frames, channels, height, width) -> (batch_size, frames, channels, height, width) -> (batch_size, channels, frames, height, width) - x = x.reshape(batch_size, frames, x.shape[1], x.shape[2], x.shape[3]).permute(0, 2, 1, 3, 4) - return x - - - -class Upsample3D(torch.nn.Module): - def __init__( - self, - in_channels: int, - out_channels: int, - kernel_size: int = 3, - stride: int = 1, - padding: int = 1, - compress_time: bool = False, - ) -> None: - super().__init__() - self.conv = torch.nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding) - self.compress_time = compress_time - - def forward(self, inputs: torch.Tensor, xq: torch.Tensor) -> torch.Tensor: - if self.compress_time: - if inputs.shape[2] > 1 and inputs.shape[2] % 2 == 1: - # split first frame - x_first, x_rest = inputs[:, :, 0], inputs[:, :, 1:] - - x_first = torch.nn.functional.interpolate(x_first, scale_factor=2.0) - x_rest = torch.nn.functional.interpolate(x_rest, scale_factor=2.0) - x_first = x_first[:, :, None, :, :] - inputs = torch.cat([x_first, x_rest], dim=2) - elif inputs.shape[2] > 1: - inputs = torch.nn.functional.interpolate(inputs, scale_factor=2.0) - else: - inputs = inputs.squeeze(2) - inputs = torch.nn.functional.interpolate(inputs, scale_factor=2.0) - inputs = inputs[:, :, None, :, :] - else: - # only interpolate 2D - b, c, t, h, w = inputs.shape - inputs = inputs.permute(0, 2, 1, 3, 4).reshape(b * t, c, h, w) - inputs = torch.nn.functional.interpolate(inputs, scale_factor=2.0) - inputs = inputs.reshape(b, t, c, *inputs.shape[2:]).permute(0, 2, 1, 3, 4) - - b, c, t, h, w = inputs.shape - inputs = inputs.permute(0, 2, 1, 3, 4).reshape(b * t, c, h, w) - inputs = self.conv(inputs) - inputs = inputs.reshape(b, t, *inputs.shape[1:]).permute(0, 2, 1, 3, 4) - - return inputs - - - -class CogVideoXSpatialNorm3D(torch.nn.Module): - def __init__(self, f_channels, zq_channels, groups): - super().__init__() - self.norm_layer = torch.nn.GroupNorm(num_channels=f_channels, num_groups=groups, eps=1e-6, affine=True) - self.conv_y = torch.nn.Conv3d(zq_channels, f_channels, kernel_size=1, stride=1) - self.conv_b = torch.nn.Conv3d(zq_channels, f_channels, kernel_size=1, stride=1) - - - def forward(self, f: torch.Tensor, zq: torch.Tensor) -> torch.Tensor: - if f.shape[2] > 1 and f.shape[2] % 2 == 1: - f_first, f_rest = f[:, :, :1], f[:, :, 1:] - f_first_size, f_rest_size = f_first.shape[-3:], f_rest.shape[-3:] - z_first, z_rest = zq[:, :, :1], zq[:, :, 1:] - z_first = torch.nn.functional.interpolate(z_first, size=f_first_size) - z_rest = torch.nn.functional.interpolate(z_rest, size=f_rest_size) - zq = torch.cat([z_first, z_rest], dim=2) - else: - zq = torch.nn.functional.interpolate(zq, size=f.shape[-3:]) - - norm_f = self.norm_layer(f) - new_f = norm_f * self.conv_y(zq) + self.conv_b(zq) - return new_f - - - -class Resnet3DBlock(torch.nn.Module): - def __init__(self, in_channels, out_channels, spatial_norm_dim, groups, eps=1e-6, use_conv_shortcut=False): - super().__init__() - self.nonlinearity = torch.nn.SiLU() - if spatial_norm_dim is None: - self.norm1 = torch.nn.GroupNorm(num_channels=in_channels, num_groups=groups, eps=eps) - self.norm2 = torch.nn.GroupNorm(num_channels=out_channels, num_groups=groups, eps=eps) - else: - self.norm1 = CogVideoXSpatialNorm3D(in_channels, spatial_norm_dim, groups) - self.norm2 = CogVideoXSpatialNorm3D(out_channels, spatial_norm_dim, groups) - - self.conv1 = CachedConv3d(in_channels, out_channels, kernel_size=3, padding=(0, 1, 1)) - - self.conv2 = CachedConv3d(out_channels, out_channels, kernel_size=3, padding=(0, 1, 1)) - - if in_channels != out_channels: - if use_conv_shortcut: - self.conv_shortcut = CachedConv3d(in_channels, out_channels, kernel_size=3, padding=(0, 1, 1)) - else: - self.conv_shortcut = torch.nn.Conv3d(in_channels, out_channels, kernel_size=1) - else: - self.conv_shortcut = lambda x: x - - - def forward(self, hidden_states, zq): - residual = hidden_states - - hidden_states = self.norm1(hidden_states, zq) if isinstance(self.norm1, CogVideoXSpatialNorm3D) else self.norm1(hidden_states) - hidden_states = self.nonlinearity(hidden_states) - hidden_states = self.conv1(hidden_states) - - hidden_states = self.norm2(hidden_states, zq) if isinstance(self.norm2, CogVideoXSpatialNorm3D) else self.norm2(hidden_states) - hidden_states = self.nonlinearity(hidden_states) - hidden_states = self.conv2(hidden_states) - - hidden_states = hidden_states + self.conv_shortcut(residual) - - return hidden_states - - - -class CachedConv3d(torch.nn.Conv3d): - def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0): - super().__init__(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding) - self.cached_tensor = None - - - def clear_cache(self): - self.cached_tensor = None - - - def forward(self, input: torch.Tensor, use_cache = True) -> torch.Tensor: - if use_cache: - if self.cached_tensor is None: - self.cached_tensor = torch.concat([input[:, :, :1]] * 2, dim=2) - input = torch.concat([self.cached_tensor, input], dim=2) - self.cached_tensor = input[:, :, -2:] - return super().forward(input) - - - -class CogVAEDecoder(torch.nn.Module): - def __init__(self): - super().__init__() - self.scaling_factor = 0.7 - self.conv_in = CachedConv3d(16, 512, kernel_size=3, stride=1, padding=(0, 1, 1)) - - self.blocks = torch.nn.ModuleList([ - Resnet3DBlock(512, 512, 16, 32), - Resnet3DBlock(512, 512, 16, 32), - Resnet3DBlock(512, 512, 16, 32), - Resnet3DBlock(512, 512, 16, 32), - Resnet3DBlock(512, 512, 16, 32), - Resnet3DBlock(512, 512, 16, 32), - Upsample3D(512, 512, compress_time=True), - Resnet3DBlock(512, 256, 16, 32), - Resnet3DBlock(256, 256, 16, 32), - Resnet3DBlock(256, 256, 16, 32), - Resnet3DBlock(256, 256, 16, 32), - Upsample3D(256, 256, compress_time=True), - Resnet3DBlock(256, 256, 16, 32), - Resnet3DBlock(256, 256, 16, 32), - Resnet3DBlock(256, 256, 16, 32), - Resnet3DBlock(256, 256, 16, 32), - Upsample3D(256, 256, compress_time=False), - Resnet3DBlock(256, 128, 16, 32), - Resnet3DBlock(128, 128, 16, 32), - Resnet3DBlock(128, 128, 16, 32), - Resnet3DBlock(128, 128, 16, 32), - ]) - - self.norm_out = CogVideoXSpatialNorm3D(128, 16, 32) - self.conv_act = torch.nn.SiLU() - self.conv_out = CachedConv3d(128, 3, kernel_size=3, stride=1, padding=(0, 1, 1)) - - - def forward(self, sample): - sample = sample / self.scaling_factor - hidden_states = self.conv_in(sample) - - for block in self.blocks: - hidden_states = block(hidden_states, sample) - - hidden_states = self.norm_out(hidden_states, sample) - hidden_states = self.conv_act(hidden_states) - hidden_states = self.conv_out(hidden_states) - - return hidden_states - - - def decode_video(self, sample, tiled=True, tile_size=(60, 90), tile_stride=(30, 45), progress_bar=lambda x:x): - if tiled: - B, C, T, H, W = sample.shape - return TileWorker2Dto3D().tiled_forward( - forward_fn=lambda x: self.decode_small_video(x), - model_input=sample, - tile_size=tile_size, tile_stride=tile_stride, - tile_device=sample.device, tile_dtype=sample.dtype, - computation_device=sample.device, computation_dtype=sample.dtype, - scales=(3/16, (T//2*8+T%2)/T, 8, 8), - progress_bar=progress_bar - ) - else: - return self.decode_small_video(sample) - - - def decode_small_video(self, sample): - B, C, T, H, W = sample.shape - computation_device = self.conv_in.weight.device - computation_dtype = self.conv_in.weight.dtype - value = [] - for i in range(T//2): - tl = i*2 + T%2 - (T%2 and i==0) - tr = i*2 + 2 + T%2 - model_input = sample[:, :, tl: tr, :, :].to(dtype=computation_dtype, device=computation_device) - model_output = self.forward(model_input).to(dtype=sample.dtype, device=sample.device) - value.append(model_output) - value = torch.concat(value, dim=2) - for name, module in self.named_modules(): - if isinstance(module, CachedConv3d): - module.clear_cache() - return value - - - @staticmethod - def state_dict_converter(): - return CogVAEDecoderStateDictConverter() - - - -class CogVAEEncoder(torch.nn.Module): - def __init__(self): - super().__init__() - self.scaling_factor = 0.7 - self.conv_in = CachedConv3d(3, 128, kernel_size=3, stride=1, padding=(0, 1, 1)) - - self.blocks = torch.nn.ModuleList([ - Resnet3DBlock(128, 128, None, 32), - Resnet3DBlock(128, 128, None, 32), - Resnet3DBlock(128, 128, None, 32), - Downsample3D(128, 128, compress_time=True), - Resnet3DBlock(128, 256, None, 32), - Resnet3DBlock(256, 256, None, 32), - Resnet3DBlock(256, 256, None, 32), - Downsample3D(256, 256, compress_time=True), - Resnet3DBlock(256, 256, None, 32), - Resnet3DBlock(256, 256, None, 32), - Resnet3DBlock(256, 256, None, 32), - Downsample3D(256, 256, compress_time=False), - Resnet3DBlock(256, 512, None, 32), - Resnet3DBlock(512, 512, None, 32), - Resnet3DBlock(512, 512, None, 32), - Resnet3DBlock(512, 512, None, 32), - Resnet3DBlock(512, 512, None, 32), - ]) - - self.norm_out = torch.nn.GroupNorm(32, 512, eps=1e-06, affine=True) - self.conv_act = torch.nn.SiLU() - self.conv_out = CachedConv3d(512, 32, kernel_size=3, stride=1, padding=(0, 1, 1)) - - - def forward(self, sample): - hidden_states = self.conv_in(sample) - - for block in self.blocks: - hidden_states = block(hidden_states, sample) - - hidden_states = self.norm_out(hidden_states) - hidden_states = self.conv_act(hidden_states) - hidden_states = self.conv_out(hidden_states)[:, :16] - hidden_states = hidden_states * self.scaling_factor - - return hidden_states - - - def encode_video(self, sample, tiled=True, tile_size=(60, 90), tile_stride=(30, 45), progress_bar=lambda x:x): - if tiled: - B, C, T, H, W = sample.shape - return TileWorker2Dto3D().tiled_forward( - forward_fn=lambda x: self.encode_small_video(x), - model_input=sample, - tile_size=(i * 8 for i in tile_size), tile_stride=(i * 8 for i in tile_stride), - tile_device=sample.device, tile_dtype=sample.dtype, - computation_device=sample.device, computation_dtype=sample.dtype, - scales=(16/3, (T//4+T%2)/T, 1/8, 1/8), - progress_bar=progress_bar - ) - else: - return self.encode_small_video(sample) - - - def encode_small_video(self, sample): - B, C, T, H, W = sample.shape - computation_device = self.conv_in.weight.device - computation_dtype = self.conv_in.weight.dtype - value = [] - for i in range(T//8): - t = i*8 + T%2 - (T%2 and i==0) - t_ = i*8 + 8 + T%2 - model_input = sample[:, :, t: t_, :, :].to(dtype=computation_dtype, device=computation_device) - model_output = self.forward(model_input).to(dtype=sample.dtype, device=sample.device) - value.append(model_output) - value = torch.concat(value, dim=2) - for name, module in self.named_modules(): - if isinstance(module, CachedConv3d): - module.clear_cache() - return value - - - @staticmethod - def state_dict_converter(): - return CogVAEEncoderStateDictConverter() - - - -class CogVAEEncoderStateDictConverter: - def __init__(self): - pass - - - def from_diffusers(self, state_dict): - rename_dict = { - "encoder.conv_in.conv.weight": "conv_in.weight", - "encoder.conv_in.conv.bias": "conv_in.bias", - "encoder.down_blocks.0.downsamplers.0.conv.weight": "blocks.3.conv.weight", - "encoder.down_blocks.0.downsamplers.0.conv.bias": "blocks.3.conv.bias", - "encoder.down_blocks.1.downsamplers.0.conv.weight": "blocks.7.conv.weight", - "encoder.down_blocks.1.downsamplers.0.conv.bias": "blocks.7.conv.bias", - "encoder.down_blocks.2.downsamplers.0.conv.weight": "blocks.11.conv.weight", - "encoder.down_blocks.2.downsamplers.0.conv.bias": "blocks.11.conv.bias", - "encoder.norm_out.weight": "norm_out.weight", - "encoder.norm_out.bias": "norm_out.bias", - "encoder.conv_out.conv.weight": "conv_out.weight", - "encoder.conv_out.conv.bias": "conv_out.bias", - } - prefix_dict = { - "encoder.down_blocks.0.resnets.0.": "blocks.0.", - "encoder.down_blocks.0.resnets.1.": "blocks.1.", - "encoder.down_blocks.0.resnets.2.": "blocks.2.", - "encoder.down_blocks.1.resnets.0.": "blocks.4.", - "encoder.down_blocks.1.resnets.1.": "blocks.5.", - "encoder.down_blocks.1.resnets.2.": "blocks.6.", - "encoder.down_blocks.2.resnets.0.": "blocks.8.", - "encoder.down_blocks.2.resnets.1.": "blocks.9.", - "encoder.down_blocks.2.resnets.2.": "blocks.10.", - "encoder.down_blocks.3.resnets.0.": "blocks.12.", - "encoder.down_blocks.3.resnets.1.": "blocks.13.", - "encoder.down_blocks.3.resnets.2.": "blocks.14.", - "encoder.mid_block.resnets.0.": "blocks.15.", - "encoder.mid_block.resnets.1.": "blocks.16.", - } - suffix_dict = { - "norm1.norm_layer.weight": "norm1.norm_layer.weight", - "norm1.norm_layer.bias": "norm1.norm_layer.bias", - "norm1.conv_y.conv.weight": "norm1.conv_y.weight", - "norm1.conv_y.conv.bias": "norm1.conv_y.bias", - "norm1.conv_b.conv.weight": "norm1.conv_b.weight", - "norm1.conv_b.conv.bias": "norm1.conv_b.bias", - "norm2.norm_layer.weight": "norm2.norm_layer.weight", - "norm2.norm_layer.bias": "norm2.norm_layer.bias", - "norm2.conv_y.conv.weight": "norm2.conv_y.weight", - "norm2.conv_y.conv.bias": "norm2.conv_y.bias", - "norm2.conv_b.conv.weight": "norm2.conv_b.weight", - "norm2.conv_b.conv.bias": "norm2.conv_b.bias", - "conv1.conv.weight": "conv1.weight", - "conv1.conv.bias": "conv1.bias", - "conv2.conv.weight": "conv2.weight", - "conv2.conv.bias": "conv2.bias", - "conv_shortcut.weight": "conv_shortcut.weight", - "conv_shortcut.bias": "conv_shortcut.bias", - "norm1.weight": "norm1.weight", - "norm1.bias": "norm1.bias", - "norm2.weight": "norm2.weight", - "norm2.bias": "norm2.bias", - } - state_dict_ = {} - for name, param in state_dict.items(): - if name in rename_dict: - state_dict_[rename_dict[name]] = param - else: - for prefix in prefix_dict: - if name.startswith(prefix): - suffix = name[len(prefix):] - state_dict_[prefix_dict[prefix] + suffix_dict[suffix]] = param - return state_dict_ - - - def from_civitai(self, state_dict): - return self.from_diffusers(state_dict) - - - -class CogVAEDecoderStateDictConverter: - def __init__(self): - pass - - - def from_diffusers(self, state_dict): - rename_dict = { - "decoder.conv_in.conv.weight": "conv_in.weight", - "decoder.conv_in.conv.bias": "conv_in.bias", - "decoder.up_blocks.0.upsamplers.0.conv.weight": "blocks.6.conv.weight", - "decoder.up_blocks.0.upsamplers.0.conv.bias": "blocks.6.conv.bias", - "decoder.up_blocks.1.upsamplers.0.conv.weight": "blocks.11.conv.weight", - "decoder.up_blocks.1.upsamplers.0.conv.bias": "blocks.11.conv.bias", - "decoder.up_blocks.2.upsamplers.0.conv.weight": "blocks.16.conv.weight", - "decoder.up_blocks.2.upsamplers.0.conv.bias": "blocks.16.conv.bias", - "decoder.norm_out.norm_layer.weight": "norm_out.norm_layer.weight", - "decoder.norm_out.norm_layer.bias": "norm_out.norm_layer.bias", - "decoder.norm_out.conv_y.conv.weight": "norm_out.conv_y.weight", - "decoder.norm_out.conv_y.conv.bias": "norm_out.conv_y.bias", - "decoder.norm_out.conv_b.conv.weight": "norm_out.conv_b.weight", - "decoder.norm_out.conv_b.conv.bias": "norm_out.conv_b.bias", - "decoder.conv_out.conv.weight": "conv_out.weight", - "decoder.conv_out.conv.bias": "conv_out.bias" - } - prefix_dict = { - "decoder.mid_block.resnets.0.": "blocks.0.", - "decoder.mid_block.resnets.1.": "blocks.1.", - "decoder.up_blocks.0.resnets.0.": "blocks.2.", - "decoder.up_blocks.0.resnets.1.": "blocks.3.", - "decoder.up_blocks.0.resnets.2.": "blocks.4.", - "decoder.up_blocks.0.resnets.3.": "blocks.5.", - "decoder.up_blocks.1.resnets.0.": "blocks.7.", - "decoder.up_blocks.1.resnets.1.": "blocks.8.", - "decoder.up_blocks.1.resnets.2.": "blocks.9.", - "decoder.up_blocks.1.resnets.3.": "blocks.10.", - "decoder.up_blocks.2.resnets.0.": "blocks.12.", - "decoder.up_blocks.2.resnets.1.": "blocks.13.", - "decoder.up_blocks.2.resnets.2.": "blocks.14.", - "decoder.up_blocks.2.resnets.3.": "blocks.15.", - "decoder.up_blocks.3.resnets.0.": "blocks.17.", - "decoder.up_blocks.3.resnets.1.": "blocks.18.", - "decoder.up_blocks.3.resnets.2.": "blocks.19.", - "decoder.up_blocks.3.resnets.3.": "blocks.20.", - } - suffix_dict = { - "norm1.norm_layer.weight": "norm1.norm_layer.weight", - "norm1.norm_layer.bias": "norm1.norm_layer.bias", - "norm1.conv_y.conv.weight": "norm1.conv_y.weight", - "norm1.conv_y.conv.bias": "norm1.conv_y.bias", - "norm1.conv_b.conv.weight": "norm1.conv_b.weight", - "norm1.conv_b.conv.bias": "norm1.conv_b.bias", - "norm2.norm_layer.weight": "norm2.norm_layer.weight", - "norm2.norm_layer.bias": "norm2.norm_layer.bias", - "norm2.conv_y.conv.weight": "norm2.conv_y.weight", - "norm2.conv_y.conv.bias": "norm2.conv_y.bias", - "norm2.conv_b.conv.weight": "norm2.conv_b.weight", - "norm2.conv_b.conv.bias": "norm2.conv_b.bias", - "conv1.conv.weight": "conv1.weight", - "conv1.conv.bias": "conv1.bias", - "conv2.conv.weight": "conv2.weight", - "conv2.conv.bias": "conv2.bias", - "conv_shortcut.weight": "conv_shortcut.weight", - "conv_shortcut.bias": "conv_shortcut.bias", - } - state_dict_ = {} - for name, param in state_dict.items(): - if name in rename_dict: - state_dict_[rename_dict[name]] = param - else: - for prefix in prefix_dict: - if name.startswith(prefix): - suffix = name[len(prefix):] - state_dict_[prefix_dict[prefix] + suffix_dict[suffix]] = param - return state_dict_ - - - def from_civitai(self, state_dict): - return self.from_diffusers(state_dict) - diff --git a/diffsynth/models/downloader.py b/diffsynth/models/downloader.py deleted file mode 100644 index 6c726f6..0000000 --- a/diffsynth/models/downloader.py +++ /dev/null @@ -1,111 +0,0 @@ -from huggingface_hub import hf_hub_download -from modelscope import snapshot_download -import os, shutil -from typing_extensions import Literal, TypeAlias -from typing import List -from ..configs.model_config import preset_models_on_huggingface, preset_models_on_modelscope, Preset_model_id - - -def download_from_modelscope(model_id, origin_file_path, local_dir): - os.makedirs(local_dir, exist_ok=True) - file_name = os.path.basename(origin_file_path) - if file_name in os.listdir(local_dir): - print(f" {file_name} has been already in {local_dir}.") - else: - print(f" Start downloading {os.path.join(local_dir, file_name)}") - snapshot_download(model_id, allow_file_pattern=origin_file_path, local_dir=local_dir) - downloaded_file_path = os.path.join(local_dir, origin_file_path) - target_file_path = os.path.join(local_dir, os.path.split(origin_file_path)[-1]) - if downloaded_file_path != target_file_path: - shutil.move(downloaded_file_path, target_file_path) - shutil.rmtree(os.path.join(local_dir, origin_file_path.split("/")[0])) - - -def download_from_huggingface(model_id, origin_file_path, local_dir): - os.makedirs(local_dir, exist_ok=True) - file_name = os.path.basename(origin_file_path) - if file_name in os.listdir(local_dir): - print(f" {file_name} has been already in {local_dir}.") - else: - print(f" Start downloading {os.path.join(local_dir, file_name)}") - hf_hub_download(model_id, origin_file_path, local_dir=local_dir) - downloaded_file_path = os.path.join(local_dir, origin_file_path) - target_file_path = os.path.join(local_dir, file_name) - if downloaded_file_path != target_file_path: - shutil.move(downloaded_file_path, target_file_path) - shutil.rmtree(os.path.join(local_dir, origin_file_path.split("/")[0])) - - -Preset_model_website: TypeAlias = Literal[ - "HuggingFace", - "ModelScope", -] -website_to_preset_models = { - "HuggingFace": preset_models_on_huggingface, - "ModelScope": preset_models_on_modelscope, -} -website_to_download_fn = { - "HuggingFace": download_from_huggingface, - "ModelScope": download_from_modelscope, -} - - -def download_customized_models( - model_id, - origin_file_path, - local_dir, - downloading_priority: List[Preset_model_website] = ["ModelScope", "HuggingFace"], -): - downloaded_files = [] - for website in downloading_priority: - # Check if the file is downloaded. - file_to_download = os.path.join(local_dir, os.path.basename(origin_file_path)) - if file_to_download in downloaded_files: - continue - # Download - website_to_download_fn[website](model_id, origin_file_path, local_dir) - if os.path.basename(origin_file_path) in os.listdir(local_dir): - downloaded_files.append(file_to_download) - return downloaded_files - - -def download_models( - model_id_list: List[Preset_model_id] = [], - downloading_priority: List[Preset_model_website] = ["ModelScope", "HuggingFace"], -): - print(f"Downloading models: {model_id_list}") - downloaded_files = [] - load_files = [] - - for model_id in model_id_list: - for website in downloading_priority: - if model_id in website_to_preset_models[website]: - - # Parse model metadata - model_metadata = website_to_preset_models[website][model_id] - if isinstance(model_metadata, list): - file_data = model_metadata - else: - file_data = model_metadata.get("file_list", []) - - # Try downloading the model from this website. - model_files = [] - for model_id, origin_file_path, local_dir in file_data: - # Check if the file is downloaded. - file_to_download = os.path.join(local_dir, os.path.basename(origin_file_path)) - if file_to_download in downloaded_files: - continue - # Download - website_to_download_fn[website](model_id, origin_file_path, local_dir) - if os.path.basename(origin_file_path) in os.listdir(local_dir): - downloaded_files.append(file_to_download) - model_files.append(file_to_download) - - # If the model is successfully downloaded, break. - if len(model_files) > 0: - if isinstance(model_metadata, dict) and "load_path" in model_metadata: - model_files = model_metadata["load_path"] - load_files.extend(model_files) - break - - return load_files diff --git a/diffsynth/models/flux_controlnet.py b/diffsynth/models/flux_controlnet.py deleted file mode 100644 index 85fccd7..0000000 --- a/diffsynth/models/flux_controlnet.py +++ /dev/null @@ -1,331 +0,0 @@ -import torch -from einops import rearrange, repeat -from .flux_dit import RoPEEmbedding, TimestepEmbeddings, FluxJointTransformerBlock, FluxSingleTransformerBlock, RMSNorm -from .utils import hash_state_dict_keys, init_weights_on_device - - - -class FluxControlNet(torch.nn.Module): - def __init__(self, disable_guidance_embedder=False, num_joint_blocks=5, num_single_blocks=10, num_mode=0, mode_dict={}, additional_input_dim=0): - super().__init__() - self.pos_embedder = RoPEEmbedding(3072, 10000, [16, 56, 56]) - self.time_embedder = TimestepEmbeddings(256, 3072) - self.guidance_embedder = None if disable_guidance_embedder else TimestepEmbeddings(256, 3072) - self.pooled_text_embedder = torch.nn.Sequential(torch.nn.Linear(768, 3072), torch.nn.SiLU(), torch.nn.Linear(3072, 3072)) - self.context_embedder = torch.nn.Linear(4096, 3072) - self.x_embedder = torch.nn.Linear(64, 3072) - - self.blocks = torch.nn.ModuleList([FluxJointTransformerBlock(3072, 24) for _ in range(num_joint_blocks)]) - self.single_blocks = torch.nn.ModuleList([FluxSingleTransformerBlock(3072, 24) for _ in range(num_single_blocks)]) - - self.controlnet_blocks = torch.nn.ModuleList([torch.nn.Linear(3072, 3072) for _ in range(num_joint_blocks)]) - self.controlnet_single_blocks = torch.nn.ModuleList([torch.nn.Linear(3072, 3072) for _ in range(num_single_blocks)]) - - self.mode_dict = mode_dict - self.controlnet_mode_embedder = torch.nn.Embedding(num_mode, 3072) if len(mode_dict) > 0 else None - self.controlnet_x_embedder = torch.nn.Linear(64 + additional_input_dim, 3072) - - - def prepare_image_ids(self, latents): - batch_size, _, height, width = latents.shape - latent_image_ids = torch.zeros(height // 2, width // 2, 3) - latent_image_ids[..., 1] = latent_image_ids[..., 1] + torch.arange(height // 2)[:, None] - latent_image_ids[..., 2] = latent_image_ids[..., 2] + torch.arange(width // 2)[None, :] - - latent_image_id_height, latent_image_id_width, latent_image_id_channels = latent_image_ids.shape - - latent_image_ids = latent_image_ids[None, :].repeat(batch_size, 1, 1, 1) - latent_image_ids = latent_image_ids.reshape( - batch_size, latent_image_id_height * latent_image_id_width, latent_image_id_channels - ) - latent_image_ids = latent_image_ids.to(device=latents.device, dtype=latents.dtype) - - return latent_image_ids - - - def patchify(self, hidden_states): - hidden_states = rearrange(hidden_states, "B C (H P) (W Q) -> B (H W) (C P Q)", P=2, Q=2) - return hidden_states - - - def align_res_stack_to_original_blocks(self, res_stack, num_blocks, hidden_states): - if len(res_stack) == 0: - return [torch.zeros_like(hidden_states)] * num_blocks - interval = (num_blocks + len(res_stack) - 1) // len(res_stack) - aligned_res_stack = [res_stack[block_id // interval] for block_id in range(num_blocks)] - return aligned_res_stack - - - def forward( - self, - hidden_states, - controlnet_conditioning, - timestep, prompt_emb, pooled_prompt_emb, guidance, text_ids, image_ids=None, - processor_id=None, - tiled=False, tile_size=128, tile_stride=64, - **kwargs - ): - if image_ids is None: - image_ids = self.prepare_image_ids(hidden_states) - - conditioning = self.time_embedder(timestep, hidden_states.dtype) + self.pooled_text_embedder(pooled_prompt_emb) - if self.guidance_embedder is not None: - guidance = guidance * 1000 - conditioning = conditioning + self.guidance_embedder(guidance, hidden_states.dtype) - prompt_emb = self.context_embedder(prompt_emb) - if self.controlnet_mode_embedder is not None: # Different from FluxDiT - processor_id = torch.tensor([self.mode_dict[processor_id]], dtype=torch.int) - processor_id = repeat(processor_id, "D -> B D", B=1).to(text_ids.device) - prompt_emb = torch.concat([self.controlnet_mode_embedder(processor_id), prompt_emb], dim=1) - text_ids = torch.cat([text_ids[:, :1], text_ids], dim=1) - image_rotary_emb = self.pos_embedder(torch.cat((text_ids, image_ids), dim=1)) - - hidden_states = self.patchify(hidden_states) - hidden_states = self.x_embedder(hidden_states) - controlnet_conditioning = self.patchify(controlnet_conditioning) # Different from FluxDiT - hidden_states = hidden_states + self.controlnet_x_embedder(controlnet_conditioning) # Different from FluxDiT - - controlnet_res_stack = [] - for block, controlnet_block in zip(self.blocks, self.controlnet_blocks): - hidden_states, prompt_emb = block(hidden_states, prompt_emb, conditioning, image_rotary_emb) - controlnet_res_stack.append(controlnet_block(hidden_states)) - - controlnet_single_res_stack = [] - hidden_states = torch.cat([prompt_emb, hidden_states], dim=1) - for block, controlnet_block in zip(self.single_blocks, self.controlnet_single_blocks): - hidden_states, prompt_emb = block(hidden_states, prompt_emb, conditioning, image_rotary_emb) - controlnet_single_res_stack.append(controlnet_block(hidden_states[:, prompt_emb.shape[1]:])) - - controlnet_res_stack = self.align_res_stack_to_original_blocks(controlnet_res_stack, 19, hidden_states[:, prompt_emb.shape[1]:]) - controlnet_single_res_stack = self.align_res_stack_to_original_blocks(controlnet_single_res_stack, 38, hidden_states[:, prompt_emb.shape[1]:]) - - return controlnet_res_stack, controlnet_single_res_stack - - - @staticmethod - def state_dict_converter(): - return FluxControlNetStateDictConverter() - - def quantize(self): - def cast_to(weight, dtype=None, device=None, copy=False): - if device is None or weight.device == device: - if not copy: - if dtype is None or weight.dtype == dtype: - return weight - return weight.to(dtype=dtype, copy=copy) - - r = torch.empty_like(weight, dtype=dtype, device=device) - r.copy_(weight) - return r - - def cast_weight(s, input=None, dtype=None, device=None): - if input is not None: - if dtype is None: - dtype = input.dtype - if device is None: - device = input.device - weight = cast_to(s.weight, dtype, device) - return weight - - def cast_bias_weight(s, input=None, dtype=None, device=None, bias_dtype=None): - if input is not None: - if dtype is None: - dtype = input.dtype - if bias_dtype is None: - bias_dtype = dtype - if device is None: - device = input.device - bias = None - weight = cast_to(s.weight, dtype, device) - bias = cast_to(s.bias, bias_dtype, device) - return weight, bias - - class quantized_layer: - class QLinear(torch.nn.Linear): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def forward(self,input,**kwargs): - weight,bias= cast_bias_weight(self,input) - return torch.nn.functional.linear(input,weight,bias) - - class QRMSNorm(torch.nn.Module): - def __init__(self, module): - super().__init__() - self.module = module - - def forward(self,hidden_states,**kwargs): - weight= cast_weight(self.module,hidden_states) - input_dtype = hidden_states.dtype - variance = hidden_states.to(torch.float32).square().mean(-1, keepdim=True) - hidden_states = hidden_states * torch.rsqrt(variance + self.module.eps) - hidden_states = hidden_states.to(input_dtype) * weight - return hidden_states - - class QEmbedding(torch.nn.Embedding): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def forward(self,input,**kwargs): - weight= cast_weight(self,input) - return torch.nn.functional.embedding( - input, weight, self.padding_idx, self.max_norm, - self.norm_type, self.scale_grad_by_freq, self.sparse) - - def replace_layer(model): - for name, module in model.named_children(): - if isinstance(module,quantized_layer.QRMSNorm): - continue - if isinstance(module, torch.nn.Linear): - with init_weights_on_device(): - new_layer = quantized_layer.QLinear(module.in_features,module.out_features) - new_layer.weight = module.weight - if module.bias is not None: - new_layer.bias = module.bias - setattr(model, name, new_layer) - elif isinstance(module, RMSNorm): - if hasattr(module,"quantized"): - continue - module.quantized= True - new_layer = quantized_layer.QRMSNorm(module) - setattr(model, name, new_layer) - elif isinstance(module,torch.nn.Embedding): - rows, cols = module.weight.shape - new_layer = quantized_layer.QEmbedding( - num_embeddings=rows, - embedding_dim=cols, - _weight=module.weight, - # _freeze=module.freeze, - padding_idx=module.padding_idx, - max_norm=module.max_norm, - norm_type=module.norm_type, - scale_grad_by_freq=module.scale_grad_by_freq, - sparse=module.sparse) - setattr(model, name, new_layer) - else: - replace_layer(module) - - replace_layer(self) - - - -class FluxControlNetStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - hash_value = hash_state_dict_keys(state_dict) - global_rename_dict = { - "context_embedder": "context_embedder", - "x_embedder": "x_embedder", - "time_text_embed.timestep_embedder.linear_1": "time_embedder.timestep_embedder.0", - "time_text_embed.timestep_embedder.linear_2": "time_embedder.timestep_embedder.2", - "time_text_embed.guidance_embedder.linear_1": "guidance_embedder.timestep_embedder.0", - "time_text_embed.guidance_embedder.linear_2": "guidance_embedder.timestep_embedder.2", - "time_text_embed.text_embedder.linear_1": "pooled_text_embedder.0", - "time_text_embed.text_embedder.linear_2": "pooled_text_embedder.2", - "norm_out.linear": "final_norm_out.linear", - "proj_out": "final_proj_out", - } - rename_dict = { - "proj_out": "proj_out", - "norm1.linear": "norm1_a.linear", - "norm1_context.linear": "norm1_b.linear", - "attn.to_q": "attn.a_to_q", - "attn.to_k": "attn.a_to_k", - "attn.to_v": "attn.a_to_v", - "attn.to_out.0": "attn.a_to_out", - "attn.add_q_proj": "attn.b_to_q", - "attn.add_k_proj": "attn.b_to_k", - "attn.add_v_proj": "attn.b_to_v", - "attn.to_add_out": "attn.b_to_out", - "ff.net.0.proj": "ff_a.0", - "ff.net.2": "ff_a.2", - "ff_context.net.0.proj": "ff_b.0", - "ff_context.net.2": "ff_b.2", - "attn.norm_q": "attn.norm_q_a", - "attn.norm_k": "attn.norm_k_a", - "attn.norm_added_q": "attn.norm_q_b", - "attn.norm_added_k": "attn.norm_k_b", - } - rename_dict_single = { - "attn.to_q": "a_to_q", - "attn.to_k": "a_to_k", - "attn.to_v": "a_to_v", - "attn.norm_q": "norm_q_a", - "attn.norm_k": "norm_k_a", - "norm.linear": "norm.linear", - "proj_mlp": "proj_in_besides_attn", - "proj_out": "proj_out", - } - state_dict_ = {} - for name, param in state_dict.items(): - if name.endswith(".weight") or name.endswith(".bias"): - suffix = ".weight" if name.endswith(".weight") else ".bias" - prefix = name[:-len(suffix)] - if prefix in global_rename_dict: - state_dict_[global_rename_dict[prefix] + suffix] = param - elif prefix.startswith("transformer_blocks."): - names = prefix.split(".") - names[0] = "blocks" - middle = ".".join(names[2:]) - if middle in rename_dict: - name_ = ".".join(names[:2] + [rename_dict[middle]] + [suffix[1:]]) - state_dict_[name_] = param - elif prefix.startswith("single_transformer_blocks."): - names = prefix.split(".") - names[0] = "single_blocks" - middle = ".".join(names[2:]) - if middle in rename_dict_single: - name_ = ".".join(names[:2] + [rename_dict_single[middle]] + [suffix[1:]]) - state_dict_[name_] = param - else: - state_dict_[name] = param - else: - state_dict_[name] = param - for name in list(state_dict_.keys()): - if ".proj_in_besides_attn." in name: - name_ = name.replace(".proj_in_besides_attn.", ".to_qkv_mlp.") - param = torch.concat([ - state_dict_[name.replace(".proj_in_besides_attn.", f".a_to_q.")], - state_dict_[name.replace(".proj_in_besides_attn.", f".a_to_k.")], - state_dict_[name.replace(".proj_in_besides_attn.", f".a_to_v.")], - state_dict_[name], - ], dim=0) - state_dict_[name_] = param - state_dict_.pop(name.replace(".proj_in_besides_attn.", f".a_to_q.")) - state_dict_.pop(name.replace(".proj_in_besides_attn.", f".a_to_k.")) - state_dict_.pop(name.replace(".proj_in_besides_attn.", f".a_to_v.")) - state_dict_.pop(name) - for name in list(state_dict_.keys()): - for component in ["a", "b"]: - if f".{component}_to_q." in name: - name_ = name.replace(f".{component}_to_q.", f".{component}_to_qkv.") - param = torch.concat([ - state_dict_[name.replace(f".{component}_to_q.", f".{component}_to_q.")], - state_dict_[name.replace(f".{component}_to_q.", f".{component}_to_k.")], - state_dict_[name.replace(f".{component}_to_q.", f".{component}_to_v.")], - ], dim=0) - state_dict_[name_] = param - state_dict_.pop(name.replace(f".{component}_to_q.", f".{component}_to_q.")) - state_dict_.pop(name.replace(f".{component}_to_q.", f".{component}_to_k.")) - state_dict_.pop(name.replace(f".{component}_to_q.", f".{component}_to_v.")) - if hash_value == "78d18b9101345ff695f312e7e62538c0": - extra_kwargs = {"num_mode": 10, "mode_dict": {"canny": 0, "tile": 1, "depth": 2, "blur": 3, "pose": 4, "gray": 5, "lq": 6}} - elif hash_value == "b001c89139b5f053c715fe772362dd2a": - extra_kwargs = {"num_single_blocks": 0} - elif hash_value == "52357cb26250681367488a8954c271e8": - extra_kwargs = {"num_joint_blocks": 6, "num_single_blocks": 0, "additional_input_dim": 4} - elif hash_value == "0cfd1740758423a2a854d67c136d1e8c": - extra_kwargs = {"num_joint_blocks": 4, "num_single_blocks": 1} - elif hash_value == "7f9583eb8ba86642abb9a21a4b2c9e16": - extra_kwargs = {"num_joint_blocks": 4, "num_single_blocks": 10} - elif hash_value == "43ad5aaa27dd4ee01b832ed16773fa52": - extra_kwargs = {"num_joint_blocks": 6, "num_single_blocks": 0} - else: - extra_kwargs = {} - return state_dict_, extra_kwargs - - - def from_civitai(self, state_dict): - return self.from_diffusers(state_dict) diff --git a/diffsynth/models/flux_dit.py b/diffsynth/models/flux_dit.py deleted file mode 100644 index 411ac9c..0000000 --- a/diffsynth/models/flux_dit.py +++ /dev/null @@ -1,748 +0,0 @@ -import torch -from .sd3_dit import TimestepEmbeddings, AdaLayerNorm, RMSNorm -from einops import rearrange -from .tiler import TileWorker -from .utils import init_weights_on_device, hash_state_dict_keys - -def interact_with_ipadapter(hidden_states, q, ip_k, ip_v, scale=1.0): - batch_size, num_tokens = hidden_states.shape[0:2] - ip_hidden_states = torch.nn.functional.scaled_dot_product_attention(q, ip_k, ip_v) - ip_hidden_states = ip_hidden_states.transpose(1, 2).reshape(batch_size, num_tokens, -1) - hidden_states = hidden_states + scale * ip_hidden_states - return hidden_states - - -class RoPEEmbedding(torch.nn.Module): - def __init__(self, dim, theta, axes_dim): - super().__init__() - self.dim = dim - self.theta = theta - self.axes_dim = axes_dim - - - def rope(self, pos: torch.Tensor, dim: int, theta: int) -> torch.Tensor: - assert dim % 2 == 0, "The dimension must be even." - - scale = torch.arange(0, dim, 2, dtype=torch.float64, device=pos.device) / dim - omega = 1.0 / (theta**scale) - - batch_size, seq_length = pos.shape - out = torch.einsum("...n,d->...nd", pos, omega) - cos_out = torch.cos(out) - sin_out = torch.sin(out) - - stacked_out = torch.stack([cos_out, -sin_out, sin_out, cos_out], dim=-1) - out = stacked_out.view(batch_size, -1, dim // 2, 2, 2) - return out.float() - - - def forward(self, ids): - n_axes = ids.shape[-1] - emb = torch.cat([self.rope(ids[..., i], self.axes_dim[i], self.theta) for i in range(n_axes)], dim=-3) - return emb.unsqueeze(1) - - - -class FluxJointAttention(torch.nn.Module): - def __init__(self, dim_a, dim_b, num_heads, head_dim, only_out_a=False): - super().__init__() - self.num_heads = num_heads - self.head_dim = head_dim - self.only_out_a = only_out_a - - self.a_to_qkv = torch.nn.Linear(dim_a, dim_a * 3) - self.b_to_qkv = torch.nn.Linear(dim_b, dim_b * 3) - - self.norm_q_a = RMSNorm(head_dim, eps=1e-6) - self.norm_k_a = RMSNorm(head_dim, eps=1e-6) - self.norm_q_b = RMSNorm(head_dim, eps=1e-6) - self.norm_k_b = RMSNorm(head_dim, eps=1e-6) - - self.a_to_out = torch.nn.Linear(dim_a, dim_a) - if not only_out_a: - self.b_to_out = torch.nn.Linear(dim_b, dim_b) - - - def apply_rope(self, xq, xk, freqs_cis): - xq_ = xq.float().reshape(*xq.shape[:-1], -1, 1, 2) - xk_ = xk.float().reshape(*xk.shape[:-1], -1, 1, 2) - xq_out = freqs_cis[..., 0] * xq_[..., 0] + freqs_cis[..., 1] * xq_[..., 1] - xk_out = freqs_cis[..., 0] * xk_[..., 0] + freqs_cis[..., 1] * xk_[..., 1] - return xq_out.reshape(*xq.shape).type_as(xq), xk_out.reshape(*xk.shape).type_as(xk) - - def forward(self, hidden_states_a, hidden_states_b, image_rotary_emb, attn_mask=None, ipadapter_kwargs_list=None): - batch_size = hidden_states_a.shape[0] - - # Part A - qkv_a = self.a_to_qkv(hidden_states_a) - qkv_a = qkv_a.view(batch_size, -1, 3 * self.num_heads, self.head_dim).transpose(1, 2) - q_a, k_a, v_a = qkv_a.chunk(3, dim=1) - q_a, k_a = self.norm_q_a(q_a), self.norm_k_a(k_a) - - # Part B - qkv_b = self.b_to_qkv(hidden_states_b) - qkv_b = qkv_b.view(batch_size, -1, 3 * self.num_heads, self.head_dim).transpose(1, 2) - q_b, k_b, v_b = qkv_b.chunk(3, dim=1) - q_b, k_b = self.norm_q_b(q_b), self.norm_k_b(k_b) - - q = torch.concat([q_b, q_a], dim=2) - k = torch.concat([k_b, k_a], dim=2) - v = torch.concat([v_b, v_a], dim=2) - - q, k = self.apply_rope(q, k, image_rotary_emb) - - hidden_states = torch.nn.functional.scaled_dot_product_attention(q, k, v, attn_mask=attn_mask) - hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, self.num_heads * self.head_dim) - hidden_states = hidden_states.to(q.dtype) - hidden_states_b, hidden_states_a = hidden_states[:, :hidden_states_b.shape[1]], hidden_states[:, hidden_states_b.shape[1]:] - if ipadapter_kwargs_list is not None: - hidden_states_a = interact_with_ipadapter(hidden_states_a, q_a, **ipadapter_kwargs_list) - hidden_states_a = self.a_to_out(hidden_states_a) - if self.only_out_a: - return hidden_states_a - else: - hidden_states_b = self.b_to_out(hidden_states_b) - return hidden_states_a, hidden_states_b - - - -class FluxJointTransformerBlock(torch.nn.Module): - def __init__(self, dim, num_attention_heads): - super().__init__() - self.norm1_a = AdaLayerNorm(dim) - self.norm1_b = AdaLayerNorm(dim) - - self.attn = FluxJointAttention(dim, dim, num_attention_heads, dim // num_attention_heads) - - self.norm2_a = torch.nn.LayerNorm(dim, elementwise_affine=False, eps=1e-6) - self.ff_a = torch.nn.Sequential( - torch.nn.Linear(dim, dim*4), - torch.nn.GELU(approximate="tanh"), - torch.nn.Linear(dim*4, dim) - ) - - self.norm2_b = torch.nn.LayerNorm(dim, elementwise_affine=False, eps=1e-6) - self.ff_b = torch.nn.Sequential( - torch.nn.Linear(dim, dim*4), - torch.nn.GELU(approximate="tanh"), - torch.nn.Linear(dim*4, dim) - ) - - - def forward(self, hidden_states_a, hidden_states_b, temb, image_rotary_emb, attn_mask=None, ipadapter_kwargs_list=None): - norm_hidden_states_a, gate_msa_a, shift_mlp_a, scale_mlp_a, gate_mlp_a = self.norm1_a(hidden_states_a, emb=temb) - norm_hidden_states_b, gate_msa_b, shift_mlp_b, scale_mlp_b, gate_mlp_b = self.norm1_b(hidden_states_b, emb=temb) - - # Attention - attn_output_a, attn_output_b = self.attn(norm_hidden_states_a, norm_hidden_states_b, image_rotary_emb, attn_mask, ipadapter_kwargs_list) - - # Part A - hidden_states_a = hidden_states_a + gate_msa_a * attn_output_a - norm_hidden_states_a = self.norm2_a(hidden_states_a) * (1 + scale_mlp_a) + shift_mlp_a - hidden_states_a = hidden_states_a + gate_mlp_a * self.ff_a(norm_hidden_states_a) - - # Part B - hidden_states_b = hidden_states_b + gate_msa_b * attn_output_b - norm_hidden_states_b = self.norm2_b(hidden_states_b) * (1 + scale_mlp_b) + shift_mlp_b - hidden_states_b = hidden_states_b + gate_mlp_b * self.ff_b(norm_hidden_states_b) - - return hidden_states_a, hidden_states_b - - - -class FluxSingleAttention(torch.nn.Module): - def __init__(self, dim_a, dim_b, num_heads, head_dim): - super().__init__() - self.num_heads = num_heads - self.head_dim = head_dim - - self.a_to_qkv = torch.nn.Linear(dim_a, dim_a * 3) - - self.norm_q_a = RMSNorm(head_dim, eps=1e-6) - self.norm_k_a = RMSNorm(head_dim, eps=1e-6) - - - def apply_rope(self, xq, xk, freqs_cis): - xq_ = xq.float().reshape(*xq.shape[:-1], -1, 1, 2) - xk_ = xk.float().reshape(*xk.shape[:-1], -1, 1, 2) - xq_out = freqs_cis[..., 0] * xq_[..., 0] + freqs_cis[..., 1] * xq_[..., 1] - xk_out = freqs_cis[..., 0] * xk_[..., 0] + freqs_cis[..., 1] * xk_[..., 1] - return xq_out.reshape(*xq.shape).type_as(xq), xk_out.reshape(*xk.shape).type_as(xk) - - - def forward(self, hidden_states, image_rotary_emb): - batch_size = hidden_states.shape[0] - - qkv_a = self.a_to_qkv(hidden_states) - qkv_a = qkv_a.view(batch_size, -1, 3 * self.num_heads, self.head_dim).transpose(1, 2) - q_a, k_a, v = qkv_a.chunk(3, dim=1) - q_a, k_a = self.norm_q_a(q_a), self.norm_k_a(k_a) - - q, k = self.apply_rope(q_a, k_a, image_rotary_emb) - - hidden_states = torch.nn.functional.scaled_dot_product_attention(q, k, v) - hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, self.num_heads * self.head_dim) - hidden_states = hidden_states.to(q.dtype) - return hidden_states - - - -class AdaLayerNormSingle(torch.nn.Module): - def __init__(self, dim): - super().__init__() - self.silu = torch.nn.SiLU() - self.linear = torch.nn.Linear(dim, 3 * dim, bias=True) - self.norm = torch.nn.LayerNorm(dim, elementwise_affine=False, eps=1e-6) - - - def forward(self, x, emb): - emb = self.linear(self.silu(emb)) - shift_msa, scale_msa, gate_msa = emb.chunk(3, dim=1) - x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None] - return x, gate_msa - - - -class FluxSingleTransformerBlock(torch.nn.Module): - def __init__(self, dim, num_attention_heads): - super().__init__() - self.num_heads = num_attention_heads - self.head_dim = dim // num_attention_heads - self.dim = dim - - self.norm = AdaLayerNormSingle(dim) - self.to_qkv_mlp = torch.nn.Linear(dim, dim * (3 + 4)) - self.norm_q_a = RMSNorm(self.head_dim, eps=1e-6) - self.norm_k_a = RMSNorm(self.head_dim, eps=1e-6) - - self.proj_out = torch.nn.Linear(dim * 5, dim) - - - def apply_rope(self, xq, xk, freqs_cis): - xq_ = xq.float().reshape(*xq.shape[:-1], -1, 1, 2) - xk_ = xk.float().reshape(*xk.shape[:-1], -1, 1, 2) - xq_out = freqs_cis[..., 0] * xq_[..., 0] + freqs_cis[..., 1] * xq_[..., 1] - xk_out = freqs_cis[..., 0] * xk_[..., 0] + freqs_cis[..., 1] * xk_[..., 1] - return xq_out.reshape(*xq.shape).type_as(xq), xk_out.reshape(*xk.shape).type_as(xk) - - - def process_attention(self, hidden_states, image_rotary_emb, attn_mask=None, ipadapter_kwargs_list=None): - batch_size = hidden_states.shape[0] - - qkv = hidden_states.view(batch_size, -1, 3 * self.num_heads, self.head_dim).transpose(1, 2) - q, k, v = qkv.chunk(3, dim=1) - q, k = self.norm_q_a(q), self.norm_k_a(k) - - q, k = self.apply_rope(q, k, image_rotary_emb) - - hidden_states = torch.nn.functional.scaled_dot_product_attention(q, k, v, attn_mask=attn_mask) - hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, self.num_heads * self.head_dim) - hidden_states = hidden_states.to(q.dtype) - if ipadapter_kwargs_list is not None: - hidden_states = interact_with_ipadapter(hidden_states, q, **ipadapter_kwargs_list) - return hidden_states - - - def forward(self, hidden_states_a, hidden_states_b, temb, image_rotary_emb, attn_mask=None, ipadapter_kwargs_list=None): - residual = hidden_states_a - norm_hidden_states, gate = self.norm(hidden_states_a, emb=temb) - hidden_states_a = self.to_qkv_mlp(norm_hidden_states) - attn_output, mlp_hidden_states = hidden_states_a[:, :, :self.dim * 3], hidden_states_a[:, :, self.dim * 3:] - - attn_output = self.process_attention(attn_output, image_rotary_emb, attn_mask, ipadapter_kwargs_list) - mlp_hidden_states = torch.nn.functional.gelu(mlp_hidden_states, approximate="tanh") - - hidden_states_a = torch.cat([attn_output, mlp_hidden_states], dim=2) - hidden_states_a = gate.unsqueeze(1) * self.proj_out(hidden_states_a) - hidden_states_a = residual + hidden_states_a - - return hidden_states_a, hidden_states_b - - - -class AdaLayerNormContinuous(torch.nn.Module): - def __init__(self, dim): - super().__init__() - self.silu = torch.nn.SiLU() - self.linear = torch.nn.Linear(dim, dim * 2, bias=True) - self.norm = torch.nn.LayerNorm(dim, eps=1e-6, elementwise_affine=False) - - def forward(self, x, conditioning): - emb = self.linear(self.silu(conditioning)) - scale, shift = torch.chunk(emb, 2, dim=1) - x = self.norm(x) * (1 + scale)[:, None] + shift[:, None] - return x - - - -class FluxDiT(torch.nn.Module): - def __init__(self, disable_guidance_embedder=False, input_dim=64, num_blocks=19): - super().__init__() - self.pos_embedder = RoPEEmbedding(3072, 10000, [16, 56, 56]) - self.time_embedder = TimestepEmbeddings(256, 3072) - self.guidance_embedder = None if disable_guidance_embedder else TimestepEmbeddings(256, 3072) - self.pooled_text_embedder = torch.nn.Sequential(torch.nn.Linear(768, 3072), torch.nn.SiLU(), torch.nn.Linear(3072, 3072)) - self.context_embedder = torch.nn.Linear(4096, 3072) - self.x_embedder = torch.nn.Linear(input_dim, 3072) - - self.blocks = torch.nn.ModuleList([FluxJointTransformerBlock(3072, 24) for _ in range(num_blocks)]) - self.single_blocks = torch.nn.ModuleList([FluxSingleTransformerBlock(3072, 24) for _ in range(38)]) - - self.final_norm_out = AdaLayerNormContinuous(3072) - self.final_proj_out = torch.nn.Linear(3072, 64) - - self.input_dim = input_dim - - - def patchify(self, hidden_states): - hidden_states = rearrange(hidden_states, "B C (H P) (W Q) -> B (H W) (C P Q)", P=2, Q=2) - return hidden_states - - - def unpatchify(self, hidden_states, height, width): - hidden_states = rearrange(hidden_states, "B (H W) (C P Q) -> B C (H P) (W Q)", P=2, Q=2, H=height//2, W=width//2) - return hidden_states - - - def prepare_image_ids(self, latents): - batch_size, _, height, width = latents.shape - latent_image_ids = torch.zeros(height // 2, width // 2, 3) - latent_image_ids[..., 1] = latent_image_ids[..., 1] + torch.arange(height // 2)[:, None] - latent_image_ids[..., 2] = latent_image_ids[..., 2] + torch.arange(width // 2)[None, :] - - latent_image_id_height, latent_image_id_width, latent_image_id_channels = latent_image_ids.shape - - latent_image_ids = latent_image_ids[None, :].repeat(batch_size, 1, 1, 1) - latent_image_ids = latent_image_ids.reshape( - batch_size, latent_image_id_height * latent_image_id_width, latent_image_id_channels - ) - latent_image_ids = latent_image_ids.to(device=latents.device, dtype=latents.dtype) - - return latent_image_ids - - - def tiled_forward( - self, - hidden_states, - timestep, prompt_emb, pooled_prompt_emb, guidance, text_ids, - tile_size=128, tile_stride=64, - **kwargs - ): - # Due to the global positional embedding, we cannot implement layer-wise tiled forward. - hidden_states = TileWorker().tiled_forward( - lambda x: self.forward(x, timestep, prompt_emb, pooled_prompt_emb, guidance, text_ids, image_ids=None), - hidden_states, - tile_size, - tile_stride, - tile_device=hidden_states.device, - tile_dtype=hidden_states.dtype - ) - return hidden_states - - - def construct_mask(self, entity_masks, prompt_seq_len, image_seq_len): - N = len(entity_masks) - batch_size = entity_masks[0].shape[0] - total_seq_len = N * prompt_seq_len + image_seq_len - patched_masks = [self.patchify(entity_masks[i]) for i in range(N)] - attention_mask = torch.ones((batch_size, total_seq_len, total_seq_len), dtype=torch.bool).to(device=entity_masks[0].device) - - image_start = N * prompt_seq_len - image_end = N * prompt_seq_len + image_seq_len - # prompt-image mask - for i in range(N): - prompt_start = i * prompt_seq_len - prompt_end = (i + 1) * prompt_seq_len - image_mask = torch.sum(patched_masks[i], dim=-1) > 0 - image_mask = image_mask.unsqueeze(1).repeat(1, prompt_seq_len, 1) - # prompt update with image - attention_mask[:, prompt_start:prompt_end, image_start:image_end] = image_mask - # image update with prompt - attention_mask[:, image_start:image_end, prompt_start:prompt_end] = image_mask.transpose(1, 2) - # prompt-prompt mask - for i in range(N): - for j in range(N): - if i != j: - prompt_start_i = i * prompt_seq_len - prompt_end_i = (i + 1) * prompt_seq_len - prompt_start_j = j * prompt_seq_len - prompt_end_j = (j + 1) * prompt_seq_len - attention_mask[:, prompt_start_i:prompt_end_i, prompt_start_j:prompt_end_j] = False - - attention_mask = attention_mask.float() - attention_mask[attention_mask == 0] = float('-inf') - attention_mask[attention_mask == 1] = 0 - return attention_mask - - - def process_entity_masks(self, hidden_states, prompt_emb, entity_prompt_emb, entity_masks, text_ids, image_ids, repeat_dim): - max_masks = 0 - attention_mask = None - prompt_embs = [prompt_emb] - if entity_masks is not None: - # entity_masks - batch_size, max_masks = entity_masks.shape[0], entity_masks.shape[1] - entity_masks = entity_masks.repeat(1, 1, repeat_dim, 1, 1) - entity_masks = [entity_masks[:, i, None].squeeze(1) for i in range(max_masks)] - # global mask - global_mask = torch.ones_like(entity_masks[0]).to(device=hidden_states.device, dtype=hidden_states.dtype) - entity_masks = entity_masks + [global_mask] # append global to last - # attention mask - attention_mask = self.construct_mask(entity_masks, prompt_emb.shape[1], hidden_states.shape[1]) - attention_mask = attention_mask.to(device=hidden_states.device, dtype=hidden_states.dtype) - attention_mask = attention_mask.unsqueeze(1) - # embds: n_masks * b * seq * d - local_embs = [entity_prompt_emb[:, i, None].squeeze(1) for i in range(max_masks)] - prompt_embs = local_embs + prompt_embs # append global to last - prompt_embs = [self.context_embedder(prompt_emb) for prompt_emb in prompt_embs] - prompt_emb = torch.cat(prompt_embs, dim=1) - - # positional embedding - text_ids = torch.cat([text_ids] * (max_masks + 1), dim=1) - image_rotary_emb = self.pos_embedder(torch.cat((text_ids, image_ids), dim=1)) - return prompt_emb, image_rotary_emb, attention_mask - - - def forward( - self, - hidden_states, - timestep, prompt_emb, pooled_prompt_emb, guidance, text_ids, image_ids=None, - tiled=False, tile_size=128, tile_stride=64, entity_prompt_emb=None, entity_masks=None, - use_gradient_checkpointing=False, - **kwargs - ): - if tiled: - return self.tiled_forward( - hidden_states, - timestep, prompt_emb, pooled_prompt_emb, guidance, text_ids, - tile_size=tile_size, tile_stride=tile_stride, - **kwargs - ) - - if image_ids is None: - image_ids = self.prepare_image_ids(hidden_states) - - conditioning = self.time_embedder(timestep, hidden_states.dtype) + self.pooled_text_embedder(pooled_prompt_emb) - if self.guidance_embedder is not None: - guidance = guidance * 1000 - conditioning = conditioning + self.guidance_embedder(guidance, hidden_states.dtype) - - height, width = hidden_states.shape[-2:] - hidden_states = self.patchify(hidden_states) - hidden_states = self.x_embedder(hidden_states) - - if entity_prompt_emb is not None and entity_masks is not None: - prompt_emb, image_rotary_emb, attention_mask = self.process_entity_masks(hidden_states, prompt_emb, entity_prompt_emb, entity_masks, text_ids, image_ids) - else: - prompt_emb = self.context_embedder(prompt_emb) - image_rotary_emb = self.pos_embedder(torch.cat((text_ids, image_ids), dim=1)) - attention_mask = None - - def create_custom_forward(module): - def custom_forward(*inputs): - return module(*inputs) - return custom_forward - - for block in self.blocks: - if self.training and use_gradient_checkpointing: - hidden_states, prompt_emb = torch.utils.checkpoint.checkpoint( - create_custom_forward(block), - hidden_states, prompt_emb, conditioning, image_rotary_emb, attention_mask, - use_reentrant=False, - ) - else: - hidden_states, prompt_emb = block(hidden_states, prompt_emb, conditioning, image_rotary_emb, attention_mask) - - hidden_states = torch.cat([prompt_emb, hidden_states], dim=1) - for block in self.single_blocks: - if self.training and use_gradient_checkpointing: - hidden_states, prompt_emb = torch.utils.checkpoint.checkpoint( - create_custom_forward(block), - hidden_states, prompt_emb, conditioning, image_rotary_emb, attention_mask, - use_reentrant=False, - ) - else: - hidden_states, prompt_emb = block(hidden_states, prompt_emb, conditioning, image_rotary_emb, attention_mask) - hidden_states = hidden_states[:, prompt_emb.shape[1]:] - - hidden_states = self.final_norm_out(hidden_states, conditioning) - hidden_states = self.final_proj_out(hidden_states) - hidden_states = self.unpatchify(hidden_states, height, width) - - return hidden_states - - - def quantize(self): - def cast_to(weight, dtype=None, device=None, copy=False): - if device is None or weight.device == device: - if not copy: - if dtype is None or weight.dtype == dtype: - return weight - return weight.to(dtype=dtype, copy=copy) - - r = torch.empty_like(weight, dtype=dtype, device=device) - r.copy_(weight) - return r - - def cast_weight(s, input=None, dtype=None, device=None): - if input is not None: - if dtype is None: - dtype = input.dtype - if device is None: - device = input.device - weight = cast_to(s.weight, dtype, device) - return weight - - def cast_bias_weight(s, input=None, dtype=None, device=None, bias_dtype=None): - if input is not None: - if dtype is None: - dtype = input.dtype - if bias_dtype is None: - bias_dtype = dtype - if device is None: - device = input.device - bias = None - weight = cast_to(s.weight, dtype, device) - bias = cast_to(s.bias, bias_dtype, device) - return weight, bias - - class quantized_layer: - class Linear(torch.nn.Linear): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def forward(self,input,**kwargs): - weight,bias= cast_bias_weight(self,input) - return torch.nn.functional.linear(input,weight,bias) - - class RMSNorm(torch.nn.Module): - def __init__(self, module): - super().__init__() - self.module = module - - def forward(self,hidden_states,**kwargs): - weight= cast_weight(self.module,hidden_states) - input_dtype = hidden_states.dtype - variance = hidden_states.to(torch.float32).square().mean(-1, keepdim=True) - hidden_states = hidden_states * torch.rsqrt(variance + self.module.eps) - hidden_states = hidden_states.to(input_dtype) * weight - return hidden_states - - def replace_layer(model): - for name, module in model.named_children(): - if isinstance(module, torch.nn.Linear): - with init_weights_on_device(): - new_layer = quantized_layer.Linear(module.in_features,module.out_features) - new_layer.weight = module.weight - if module.bias is not None: - new_layer.bias = module.bias - # del module - setattr(model, name, new_layer) - elif isinstance(module, RMSNorm): - if hasattr(module,"quantized"): - continue - module.quantized= True - new_layer = quantized_layer.RMSNorm(module) - setattr(model, name, new_layer) - else: - replace_layer(module) - - replace_layer(self) - - - @staticmethod - def state_dict_converter(): - return FluxDiTStateDictConverter() - - -class FluxDiTStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - global_rename_dict = { - "context_embedder": "context_embedder", - "x_embedder": "x_embedder", - "time_text_embed.timestep_embedder.linear_1": "time_embedder.timestep_embedder.0", - "time_text_embed.timestep_embedder.linear_2": "time_embedder.timestep_embedder.2", - "time_text_embed.guidance_embedder.linear_1": "guidance_embedder.timestep_embedder.0", - "time_text_embed.guidance_embedder.linear_2": "guidance_embedder.timestep_embedder.2", - "time_text_embed.text_embedder.linear_1": "pooled_text_embedder.0", - "time_text_embed.text_embedder.linear_2": "pooled_text_embedder.2", - "norm_out.linear": "final_norm_out.linear", - "proj_out": "final_proj_out", - } - rename_dict = { - "proj_out": "proj_out", - "norm1.linear": "norm1_a.linear", - "norm1_context.linear": "norm1_b.linear", - "attn.to_q": "attn.a_to_q", - "attn.to_k": "attn.a_to_k", - "attn.to_v": "attn.a_to_v", - "attn.to_out.0": "attn.a_to_out", - "attn.add_q_proj": "attn.b_to_q", - "attn.add_k_proj": "attn.b_to_k", - "attn.add_v_proj": "attn.b_to_v", - "attn.to_add_out": "attn.b_to_out", - "ff.net.0.proj": "ff_a.0", - "ff.net.2": "ff_a.2", - "ff_context.net.0.proj": "ff_b.0", - "ff_context.net.2": "ff_b.2", - "attn.norm_q": "attn.norm_q_a", - "attn.norm_k": "attn.norm_k_a", - "attn.norm_added_q": "attn.norm_q_b", - "attn.norm_added_k": "attn.norm_k_b", - } - rename_dict_single = { - "attn.to_q": "a_to_q", - "attn.to_k": "a_to_k", - "attn.to_v": "a_to_v", - "attn.norm_q": "norm_q_a", - "attn.norm_k": "norm_k_a", - "norm.linear": "norm.linear", - "proj_mlp": "proj_in_besides_attn", - "proj_out": "proj_out", - } - state_dict_ = {} - for name, param in state_dict.items(): - if name.endswith(".weight") or name.endswith(".bias"): - suffix = ".weight" if name.endswith(".weight") else ".bias" - prefix = name[:-len(suffix)] - if prefix in global_rename_dict: - state_dict_[global_rename_dict[prefix] + suffix] = param - elif prefix.startswith("transformer_blocks."): - names = prefix.split(".") - names[0] = "blocks" - middle = ".".join(names[2:]) - if middle in rename_dict: - name_ = ".".join(names[:2] + [rename_dict[middle]] + [suffix[1:]]) - state_dict_[name_] = param - elif prefix.startswith("single_transformer_blocks."): - names = prefix.split(".") - names[0] = "single_blocks" - middle = ".".join(names[2:]) - if middle in rename_dict_single: - name_ = ".".join(names[:2] + [rename_dict_single[middle]] + [suffix[1:]]) - state_dict_[name_] = param - else: - pass - else: - pass - for name in list(state_dict_.keys()): - if "single_blocks." in name and ".a_to_q." in name: - mlp = state_dict_.get(name.replace(".a_to_q.", ".proj_in_besides_attn."), None) - if mlp is None: - mlp = torch.zeros(4 * state_dict_[name].shape[0], - *state_dict_[name].shape[1:], - dtype=state_dict_[name].dtype) - else: - state_dict_.pop(name.replace(".a_to_q.", ".proj_in_besides_attn.")) - param = torch.concat([ - state_dict_.pop(name), - state_dict_.pop(name.replace(".a_to_q.", ".a_to_k.")), - state_dict_.pop(name.replace(".a_to_q.", ".a_to_v.")), - mlp, - ], dim=0) - name_ = name.replace(".a_to_q.", ".to_qkv_mlp.") - state_dict_[name_] = param - for name in list(state_dict_.keys()): - for component in ["a", "b"]: - if f".{component}_to_q." in name: - name_ = name.replace(f".{component}_to_q.", f".{component}_to_qkv.") - param = torch.concat([ - state_dict_[name.replace(f".{component}_to_q.", f".{component}_to_q.")], - state_dict_[name.replace(f".{component}_to_q.", f".{component}_to_k.")], - state_dict_[name.replace(f".{component}_to_q.", f".{component}_to_v.")], - ], dim=0) - state_dict_[name_] = param - state_dict_.pop(name.replace(f".{component}_to_q.", f".{component}_to_q.")) - state_dict_.pop(name.replace(f".{component}_to_q.", f".{component}_to_k.")) - state_dict_.pop(name.replace(f".{component}_to_q.", f".{component}_to_v.")) - return state_dict_ - - def from_civitai(self, state_dict): - if hash_state_dict_keys(state_dict, with_shape=True) in ["3e6c61b0f9471135fc9c6d6a98e98b6d", "63c969fd37cce769a90aa781fbff5f81"]: - dit_state_dict = {key.replace("pipe.dit.", ""): value for key, value in state_dict.items() if key.startswith('pipe.dit.')} - return dit_state_dict - rename_dict = { - "time_in.in_layer.bias": "time_embedder.timestep_embedder.0.bias", - "time_in.in_layer.weight": "time_embedder.timestep_embedder.0.weight", - "time_in.out_layer.bias": "time_embedder.timestep_embedder.2.bias", - "time_in.out_layer.weight": "time_embedder.timestep_embedder.2.weight", - "txt_in.bias": "context_embedder.bias", - "txt_in.weight": "context_embedder.weight", - "vector_in.in_layer.bias": "pooled_text_embedder.0.bias", - "vector_in.in_layer.weight": "pooled_text_embedder.0.weight", - "vector_in.out_layer.bias": "pooled_text_embedder.2.bias", - "vector_in.out_layer.weight": "pooled_text_embedder.2.weight", - "final_layer.linear.bias": "final_proj_out.bias", - "final_layer.linear.weight": "final_proj_out.weight", - "guidance_in.in_layer.bias": "guidance_embedder.timestep_embedder.0.bias", - "guidance_in.in_layer.weight": "guidance_embedder.timestep_embedder.0.weight", - "guidance_in.out_layer.bias": "guidance_embedder.timestep_embedder.2.bias", - "guidance_in.out_layer.weight": "guidance_embedder.timestep_embedder.2.weight", - "img_in.bias": "x_embedder.bias", - "img_in.weight": "x_embedder.weight", - "final_layer.adaLN_modulation.1.weight": "final_norm_out.linear.weight", - "final_layer.adaLN_modulation.1.bias": "final_norm_out.linear.bias", - } - suffix_rename_dict = { - "img_attn.norm.key_norm.scale": "attn.norm_k_a.weight", - "img_attn.norm.query_norm.scale": "attn.norm_q_a.weight", - "img_attn.proj.bias": "attn.a_to_out.bias", - "img_attn.proj.weight": "attn.a_to_out.weight", - "img_attn.qkv.bias": "attn.a_to_qkv.bias", - "img_attn.qkv.weight": "attn.a_to_qkv.weight", - "img_mlp.0.bias": "ff_a.0.bias", - "img_mlp.0.weight": "ff_a.0.weight", - "img_mlp.2.bias": "ff_a.2.bias", - "img_mlp.2.weight": "ff_a.2.weight", - "img_mod.lin.bias": "norm1_a.linear.bias", - "img_mod.lin.weight": "norm1_a.linear.weight", - "txt_attn.norm.key_norm.scale": "attn.norm_k_b.weight", - "txt_attn.norm.query_norm.scale": "attn.norm_q_b.weight", - "txt_attn.proj.bias": "attn.b_to_out.bias", - "txt_attn.proj.weight": "attn.b_to_out.weight", - "txt_attn.qkv.bias": "attn.b_to_qkv.bias", - "txt_attn.qkv.weight": "attn.b_to_qkv.weight", - "txt_mlp.0.bias": "ff_b.0.bias", - "txt_mlp.0.weight": "ff_b.0.weight", - "txt_mlp.2.bias": "ff_b.2.bias", - "txt_mlp.2.weight": "ff_b.2.weight", - "txt_mod.lin.bias": "norm1_b.linear.bias", - "txt_mod.lin.weight": "norm1_b.linear.weight", - - "linear1.bias": "to_qkv_mlp.bias", - "linear1.weight": "to_qkv_mlp.weight", - "linear2.bias": "proj_out.bias", - "linear2.weight": "proj_out.weight", - "modulation.lin.bias": "norm.linear.bias", - "modulation.lin.weight": "norm.linear.weight", - "norm.key_norm.scale": "norm_k_a.weight", - "norm.query_norm.scale": "norm_q_a.weight", - } - state_dict_ = {} - for name, param in state_dict.items(): - if name.startswith("model.diffusion_model."): - name = name[len("model.diffusion_model."):] - names = name.split(".") - if name in rename_dict: - rename = rename_dict[name] - if name.startswith("final_layer.adaLN_modulation.1."): - param = torch.concat([param[3072:], param[:3072]], dim=0) - state_dict_[rename] = param - elif names[0] == "double_blocks": - rename = f"blocks.{names[1]}." + suffix_rename_dict[".".join(names[2:])] - state_dict_[rename] = param - elif names[0] == "single_blocks": - if ".".join(names[2:]) in suffix_rename_dict: - rename = f"single_blocks.{names[1]}." + suffix_rename_dict[".".join(names[2:])] - state_dict_[rename] = param - else: - pass - if "guidance_embedder.timestep_embedder.0.weight" not in state_dict_: - return state_dict_, {"disable_guidance_embedder": True} - elif "blocks.8.attn.norm_k_a.weight" not in state_dict_: - return state_dict_, {"input_dim": 196, "num_blocks": 8} - else: - return state_dict_ diff --git a/diffsynth/models/flux_infiniteyou.py b/diffsynth/models/flux_infiniteyou.py deleted file mode 100644 index 861538a..0000000 --- a/diffsynth/models/flux_infiniteyou.py +++ /dev/null @@ -1,129 +0,0 @@ -import math -import torch -import torch.nn as nn - - -# FFN -def FeedForward(dim, mult=4): - inner_dim = int(dim * mult) - return nn.Sequential( - nn.LayerNorm(dim), - nn.Linear(dim, inner_dim, bias=False), - nn.GELU(), - nn.Linear(inner_dim, dim, bias=False), - ) - - -def reshape_tensor(x, heads): - bs, length, width = x.shape - #(bs, length, width) --> (bs, length, n_heads, dim_per_head) - x = x.view(bs, length, heads, -1) - # (bs, length, n_heads, dim_per_head) --> (bs, n_heads, length, dim_per_head) - x = x.transpose(1, 2) - # (bs, n_heads, length, dim_per_head) --> (bs*n_heads, length, dim_per_head) - x = x.reshape(bs, heads, length, -1) - return x - - -class PerceiverAttention(nn.Module): - - def __init__(self, *, dim, dim_head=64, heads=8): - super().__init__() - self.scale = dim_head**-0.5 - self.dim_head = dim_head - self.heads = heads - inner_dim = dim_head * heads - - self.norm1 = nn.LayerNorm(dim) - self.norm2 = nn.LayerNorm(dim) - - self.to_q = nn.Linear(dim, inner_dim, bias=False) - self.to_kv = nn.Linear(dim, inner_dim * 2, bias=False) - self.to_out = nn.Linear(inner_dim, dim, bias=False) - - def forward(self, x, latents): - """ - Args: - x (torch.Tensor): image features - shape (b, n1, D) - latent (torch.Tensor): latent features - shape (b, n2, D) - """ - x = self.norm1(x) - latents = self.norm2(latents) - - b, l, _ = latents.shape - - q = self.to_q(latents) - kv_input = torch.cat((x, latents), dim=-2) - k, v = self.to_kv(kv_input).chunk(2, dim=-1) - - q = reshape_tensor(q, self.heads) - k = reshape_tensor(k, self.heads) - v = reshape_tensor(v, self.heads) - - # attention - scale = 1 / math.sqrt(math.sqrt(self.dim_head)) - weight = (q * scale) @ (k * scale).transpose(-2, -1) # More stable with f16 than dividing afterwards - weight = torch.softmax(weight.float(), dim=-1).type(weight.dtype) - out = weight @ v - - out = out.permute(0, 2, 1, 3).reshape(b, l, -1) - - return self.to_out(out) - - -class InfiniteYouImageProjector(nn.Module): - - def __init__( - self, - dim=1280, - depth=4, - dim_head=64, - heads=20, - num_queries=8, - embedding_dim=512, - output_dim=4096, - ff_mult=4, - ): - super().__init__() - self.latents = nn.Parameter(torch.randn(1, num_queries, dim) / dim**0.5) - self.proj_in = nn.Linear(embedding_dim, dim) - - self.proj_out = nn.Linear(dim, output_dim) - self.norm_out = nn.LayerNorm(output_dim) - - self.layers = nn.ModuleList([]) - for _ in range(depth): - self.layers.append( - nn.ModuleList([ - PerceiverAttention(dim=dim, dim_head=dim_head, heads=heads), - FeedForward(dim=dim, mult=ff_mult), - ])) - - def forward(self, x): - - latents = self.latents.repeat(x.size(0), 1, 1) - latents = latents.to(dtype=x.dtype, device=x.device) - - x = self.proj_in(x) - - for attn, ff in self.layers: - latents = attn(x, latents) + latents - latents = ff(latents) + latents - - latents = self.proj_out(latents) - return self.norm_out(latents) - - @staticmethod - def state_dict_converter(): - return FluxInfiniteYouImageProjectorStateDictConverter() - - -class FluxInfiniteYouImageProjectorStateDictConverter: - - def __init__(self): - pass - - def from_diffusers(self, state_dict): - return state_dict['image_proj'] diff --git a/diffsynth/models/flux_ipadapter.py b/diffsynth/models/flux_ipadapter.py deleted file mode 100644 index 575c752..0000000 --- a/diffsynth/models/flux_ipadapter.py +++ /dev/null @@ -1,94 +0,0 @@ -from .svd_image_encoder import SVDImageEncoder -from .sd3_dit import RMSNorm -from transformers import CLIPImageProcessor -import torch - - -class MLPProjModel(torch.nn.Module): - def __init__(self, cross_attention_dim=768, id_embeddings_dim=512, num_tokens=4): - super().__init__() - - self.cross_attention_dim = cross_attention_dim - self.num_tokens = num_tokens - - self.proj = torch.nn.Sequential( - torch.nn.Linear(id_embeddings_dim, id_embeddings_dim*2), - torch.nn.GELU(), - torch.nn.Linear(id_embeddings_dim*2, cross_attention_dim*num_tokens), - ) - self.norm = torch.nn.LayerNorm(cross_attention_dim) - - def forward(self, id_embeds): - x = self.proj(id_embeds) - x = x.reshape(-1, self.num_tokens, self.cross_attention_dim) - x = self.norm(x) - return x - -class IpAdapterModule(torch.nn.Module): - def __init__(self, num_attention_heads, attention_head_dim, input_dim): - super().__init__() - self.num_heads = num_attention_heads - self.head_dim = attention_head_dim - output_dim = num_attention_heads * attention_head_dim - self.to_k_ip = torch.nn.Linear(input_dim, output_dim, bias=False) - self.to_v_ip = torch.nn.Linear(input_dim, output_dim, bias=False) - self.norm_added_k = RMSNorm(attention_head_dim, eps=1e-5, elementwise_affine=False) - - - def forward(self, hidden_states): - batch_size = hidden_states.shape[0] - # ip_k - ip_k = self.to_k_ip(hidden_states) - ip_k = ip_k.view(batch_size, -1, self.num_heads, self.head_dim).transpose(1, 2) - ip_k = self.norm_added_k(ip_k) - # ip_v - ip_v = self.to_v_ip(hidden_states) - ip_v = ip_v.view(batch_size, -1, self.num_heads, self.head_dim).transpose(1, 2) - return ip_k, ip_v - - -class FluxIpAdapter(torch.nn.Module): - def __init__(self, num_attention_heads=24, attention_head_dim=128, cross_attention_dim=4096, num_tokens=128, num_blocks=57): - super().__init__() - self.ipadapter_modules = torch.nn.ModuleList([IpAdapterModule(num_attention_heads, attention_head_dim, cross_attention_dim) for _ in range(num_blocks)]) - self.image_proj = MLPProjModel(cross_attention_dim=cross_attention_dim, id_embeddings_dim=1152, num_tokens=num_tokens) - self.set_adapter() - - def set_adapter(self): - self.call_block_id = {i:i for i in range(len(self.ipadapter_modules))} - - def forward(self, hidden_states, scale=1.0): - hidden_states = self.image_proj(hidden_states) - hidden_states = hidden_states.view(1, -1, hidden_states.shape[-1]) - ip_kv_dict = {} - for block_id in self.call_block_id: - ipadapter_id = self.call_block_id[block_id] - ip_k, ip_v = self.ipadapter_modules[ipadapter_id](hidden_states) - ip_kv_dict[block_id] = { - "ip_k": ip_k, - "ip_v": ip_v, - "scale": scale - } - return ip_kv_dict - - @staticmethod - def state_dict_converter(): - return FluxIpAdapterStateDictConverter() - - -class FluxIpAdapterStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - state_dict_ = {} - for name in state_dict["ip_adapter"]: - name_ = 'ipadapter_modules.' + name - state_dict_[name_] = state_dict["ip_adapter"][name] - for name in state_dict["image_proj"]: - name_ = "image_proj." + name - state_dict_[name_] = state_dict["image_proj"][name] - return state_dict_ - - def from_civitai(self, state_dict): - return self.from_diffusers(state_dict) diff --git a/diffsynth/models/flux_lora_encoder.py b/diffsynth/models/flux_lora_encoder.py deleted file mode 100644 index 695640a..0000000 --- a/diffsynth/models/flux_lora_encoder.py +++ /dev/null @@ -1,111 +0,0 @@ -import torch -from .sd_text_encoder import CLIPEncoderLayer - - -class LoRALayerBlock(torch.nn.Module): - def __init__(self, L, dim_in, dim_out): - super().__init__() - self.x = torch.nn.Parameter(torch.randn(1, L, dim_in)) - self.layer_norm = torch.nn.LayerNorm(dim_out) - - def forward(self, lora_A, lora_B): - x = self.x @ lora_A.T @ lora_B.T - x = self.layer_norm(x) - return x - - -class LoRAEmbedder(torch.nn.Module): - def __init__(self, lora_patterns=None, L=1, out_dim=2048): - super().__init__() - if lora_patterns is None: - lora_patterns = self.default_lora_patterns() - - model_dict = {} - for lora_pattern in lora_patterns: - name, dim = lora_pattern["name"], lora_pattern["dim"] - model_dict[name.replace(".", "___")] = LoRALayerBlock(L, dim[0], dim[1]) - self.model_dict = torch.nn.ModuleDict(model_dict) - - proj_dict = {} - for lora_pattern in lora_patterns: - layer_type, dim = lora_pattern["type"], lora_pattern["dim"] - if layer_type not in proj_dict: - proj_dict[layer_type.replace(".", "___")] = torch.nn.Linear(dim[1], out_dim) - self.proj_dict = torch.nn.ModuleDict(proj_dict) - - self.lora_patterns = lora_patterns - - - def default_lora_patterns(self): - lora_patterns = [] - lora_dict = { - "attn.a_to_qkv": (3072, 9216), "attn.a_to_out": (3072, 3072), "ff_a.0": (3072, 12288), "ff_a.2": (12288, 3072), "norm1_a.linear": (3072, 18432), - "attn.b_to_qkv": (3072, 9216), "attn.b_to_out": (3072, 3072), "ff_b.0": (3072, 12288), "ff_b.2": (12288, 3072), "norm1_b.linear": (3072, 18432), - } - for i in range(19): - for suffix in lora_dict: - lora_patterns.append({ - "name": f"blocks.{i}.{suffix}", - "dim": lora_dict[suffix], - "type": suffix, - }) - lora_dict = {"to_qkv_mlp": (3072, 21504), "proj_out": (15360, 3072), "norm.linear": (3072, 9216)} - for i in range(38): - for suffix in lora_dict: - lora_patterns.append({ - "name": f"single_blocks.{i}.{suffix}", - "dim": lora_dict[suffix], - "type": suffix, - }) - return lora_patterns - - def forward(self, lora): - lora_emb = [] - for lora_pattern in self.lora_patterns: - name, layer_type = lora_pattern["name"], lora_pattern["type"] - lora_A = lora[name + ".lora_A.default.weight"] - lora_B = lora[name + ".lora_B.default.weight"] - lora_out = self.model_dict[name.replace(".", "___")](lora_A, lora_B) - lora_out = self.proj_dict[layer_type.replace(".", "___")](lora_out) - lora_emb.append(lora_out) - lora_emb = torch.concat(lora_emb, dim=1) - return lora_emb - - -class FluxLoRAEncoder(torch.nn.Module): - def __init__(self, embed_dim=4096, encoder_intermediate_size=8192, num_encoder_layers=1, num_embeds_per_lora=16, num_special_embeds=1): - super().__init__() - self.num_embeds_per_lora = num_embeds_per_lora - # embedder - self.embedder = LoRAEmbedder(L=num_embeds_per_lora, out_dim=embed_dim) - - # encoders - self.encoders = torch.nn.ModuleList([CLIPEncoderLayer(embed_dim, encoder_intermediate_size, num_heads=32, head_dim=128) for _ in range(num_encoder_layers)]) - - # special embedding - self.special_embeds = torch.nn.Parameter(torch.randn(1, num_special_embeds, embed_dim)) - self.num_special_embeds = num_special_embeds - - # final layer - self.final_layer_norm = torch.nn.LayerNorm(embed_dim) - self.final_linear = torch.nn.Linear(embed_dim, embed_dim) - - def forward(self, lora): - lora_embeds = self.embedder(lora) - special_embeds = self.special_embeds.to(dtype=lora_embeds.dtype, device=lora_embeds.device) - embeds = torch.concat([special_embeds, lora_embeds], dim=1) - for encoder_id, encoder in enumerate(self.encoders): - embeds = encoder(embeds) - embeds = embeds[:, :self.num_special_embeds] - embeds = self.final_layer_norm(embeds) - embeds = self.final_linear(embeds) - return embeds - - @staticmethod - def state_dict_converter(): - return FluxLoRAEncoderStateDictConverter() - - -class FluxLoRAEncoderStateDictConverter: - def from_civitai(self, state_dict): - return state_dict diff --git a/diffsynth/models/flux_text_encoder.py b/diffsynth/models/flux_text_encoder.py deleted file mode 100644 index bff9d29..0000000 --- a/diffsynth/models/flux_text_encoder.py +++ /dev/null @@ -1,32 +0,0 @@ -import torch -from transformers import T5EncoderModel, T5Config -from .sd_text_encoder import SDTextEncoder - - - -class FluxTextEncoder2(T5EncoderModel): - def __init__(self, config): - super().__init__(config) - self.eval() - - def forward(self, input_ids): - outputs = super().forward(input_ids=input_ids) - prompt_emb = outputs.last_hidden_state - return prompt_emb - - @staticmethod - def state_dict_converter(): - return FluxTextEncoder2StateDictConverter() - - - -class FluxTextEncoder2StateDictConverter(): - def __init__(self): - pass - - def from_diffusers(self, state_dict): - state_dict_ = state_dict - return state_dict_ - - def from_civitai(self, state_dict): - return self.from_diffusers(state_dict) diff --git a/diffsynth/models/flux_vae.py b/diffsynth/models/flux_vae.py deleted file mode 100644 index c99c655..0000000 --- a/diffsynth/models/flux_vae.py +++ /dev/null @@ -1,303 +0,0 @@ -from .sd3_vae_encoder import SD3VAEEncoder, SDVAEEncoderStateDictConverter -from .sd3_vae_decoder import SD3VAEDecoder, SDVAEDecoderStateDictConverter - - -class FluxVAEEncoder(SD3VAEEncoder): - def __init__(self): - super().__init__() - self.scaling_factor = 0.3611 - self.shift_factor = 0.1159 - - @staticmethod - def state_dict_converter(): - return FluxVAEEncoderStateDictConverter() - - -class FluxVAEDecoder(SD3VAEDecoder): - def __init__(self): - super().__init__() - self.scaling_factor = 0.3611 - self.shift_factor = 0.1159 - - @staticmethod - def state_dict_converter(): - return FluxVAEDecoderStateDictConverter() - - -class FluxVAEEncoderStateDictConverter(SDVAEEncoderStateDictConverter): - def __init__(self): - pass - - def from_civitai(self, state_dict): - rename_dict = { - "encoder.conv_in.bias": "conv_in.bias", - "encoder.conv_in.weight": "conv_in.weight", - "encoder.conv_out.bias": "conv_out.bias", - "encoder.conv_out.weight": "conv_out.weight", - "encoder.down.0.block.0.conv1.bias": "blocks.0.conv1.bias", - "encoder.down.0.block.0.conv1.weight": "blocks.0.conv1.weight", - "encoder.down.0.block.0.conv2.bias": "blocks.0.conv2.bias", - "encoder.down.0.block.0.conv2.weight": "blocks.0.conv2.weight", - "encoder.down.0.block.0.norm1.bias": "blocks.0.norm1.bias", - "encoder.down.0.block.0.norm1.weight": "blocks.0.norm1.weight", - "encoder.down.0.block.0.norm2.bias": "blocks.0.norm2.bias", - "encoder.down.0.block.0.norm2.weight": "blocks.0.norm2.weight", - "encoder.down.0.block.1.conv1.bias": "blocks.1.conv1.bias", - "encoder.down.0.block.1.conv1.weight": "blocks.1.conv1.weight", - "encoder.down.0.block.1.conv2.bias": "blocks.1.conv2.bias", - "encoder.down.0.block.1.conv2.weight": "blocks.1.conv2.weight", - "encoder.down.0.block.1.norm1.bias": "blocks.1.norm1.bias", - "encoder.down.0.block.1.norm1.weight": "blocks.1.norm1.weight", - "encoder.down.0.block.1.norm2.bias": "blocks.1.norm2.bias", - "encoder.down.0.block.1.norm2.weight": "blocks.1.norm2.weight", - "encoder.down.0.downsample.conv.bias": "blocks.2.conv.bias", - "encoder.down.0.downsample.conv.weight": "blocks.2.conv.weight", - "encoder.down.1.block.0.conv1.bias": "blocks.3.conv1.bias", - "encoder.down.1.block.0.conv1.weight": "blocks.3.conv1.weight", - "encoder.down.1.block.0.conv2.bias": "blocks.3.conv2.bias", - "encoder.down.1.block.0.conv2.weight": "blocks.3.conv2.weight", - "encoder.down.1.block.0.nin_shortcut.bias": "blocks.3.conv_shortcut.bias", - "encoder.down.1.block.0.nin_shortcut.weight": "blocks.3.conv_shortcut.weight", - "encoder.down.1.block.0.norm1.bias": "blocks.3.norm1.bias", - "encoder.down.1.block.0.norm1.weight": "blocks.3.norm1.weight", - "encoder.down.1.block.0.norm2.bias": "blocks.3.norm2.bias", - "encoder.down.1.block.0.norm2.weight": "blocks.3.norm2.weight", - "encoder.down.1.block.1.conv1.bias": "blocks.4.conv1.bias", - "encoder.down.1.block.1.conv1.weight": "blocks.4.conv1.weight", - "encoder.down.1.block.1.conv2.bias": "blocks.4.conv2.bias", - "encoder.down.1.block.1.conv2.weight": "blocks.4.conv2.weight", - "encoder.down.1.block.1.norm1.bias": "blocks.4.norm1.bias", - "encoder.down.1.block.1.norm1.weight": "blocks.4.norm1.weight", - "encoder.down.1.block.1.norm2.bias": "blocks.4.norm2.bias", - "encoder.down.1.block.1.norm2.weight": "blocks.4.norm2.weight", - "encoder.down.1.downsample.conv.bias": "blocks.5.conv.bias", - "encoder.down.1.downsample.conv.weight": "blocks.5.conv.weight", - "encoder.down.2.block.0.conv1.bias": "blocks.6.conv1.bias", - "encoder.down.2.block.0.conv1.weight": "blocks.6.conv1.weight", - "encoder.down.2.block.0.conv2.bias": "blocks.6.conv2.bias", - "encoder.down.2.block.0.conv2.weight": "blocks.6.conv2.weight", - "encoder.down.2.block.0.nin_shortcut.bias": "blocks.6.conv_shortcut.bias", - "encoder.down.2.block.0.nin_shortcut.weight": "blocks.6.conv_shortcut.weight", - "encoder.down.2.block.0.norm1.bias": "blocks.6.norm1.bias", - "encoder.down.2.block.0.norm1.weight": "blocks.6.norm1.weight", - "encoder.down.2.block.0.norm2.bias": "blocks.6.norm2.bias", - "encoder.down.2.block.0.norm2.weight": "blocks.6.norm2.weight", - "encoder.down.2.block.1.conv1.bias": "blocks.7.conv1.bias", - "encoder.down.2.block.1.conv1.weight": "blocks.7.conv1.weight", - "encoder.down.2.block.1.conv2.bias": "blocks.7.conv2.bias", - "encoder.down.2.block.1.conv2.weight": "blocks.7.conv2.weight", - "encoder.down.2.block.1.norm1.bias": "blocks.7.norm1.bias", - "encoder.down.2.block.1.norm1.weight": "blocks.7.norm1.weight", - "encoder.down.2.block.1.norm2.bias": "blocks.7.norm2.bias", - "encoder.down.2.block.1.norm2.weight": "blocks.7.norm2.weight", - "encoder.down.2.downsample.conv.bias": "blocks.8.conv.bias", - "encoder.down.2.downsample.conv.weight": "blocks.8.conv.weight", - "encoder.down.3.block.0.conv1.bias": "blocks.9.conv1.bias", - "encoder.down.3.block.0.conv1.weight": "blocks.9.conv1.weight", - "encoder.down.3.block.0.conv2.bias": "blocks.9.conv2.bias", - "encoder.down.3.block.0.conv2.weight": "blocks.9.conv2.weight", - "encoder.down.3.block.0.norm1.bias": "blocks.9.norm1.bias", - "encoder.down.3.block.0.norm1.weight": "blocks.9.norm1.weight", - "encoder.down.3.block.0.norm2.bias": "blocks.9.norm2.bias", - "encoder.down.3.block.0.norm2.weight": "blocks.9.norm2.weight", - "encoder.down.3.block.1.conv1.bias": "blocks.10.conv1.bias", - "encoder.down.3.block.1.conv1.weight": "blocks.10.conv1.weight", - "encoder.down.3.block.1.conv2.bias": "blocks.10.conv2.bias", - "encoder.down.3.block.1.conv2.weight": "blocks.10.conv2.weight", - "encoder.down.3.block.1.norm1.bias": "blocks.10.norm1.bias", - "encoder.down.3.block.1.norm1.weight": "blocks.10.norm1.weight", - "encoder.down.3.block.1.norm2.bias": "blocks.10.norm2.bias", - "encoder.down.3.block.1.norm2.weight": "blocks.10.norm2.weight", - "encoder.mid.attn_1.k.bias": "blocks.12.transformer_blocks.0.to_k.bias", - "encoder.mid.attn_1.k.weight": "blocks.12.transformer_blocks.0.to_k.weight", - "encoder.mid.attn_1.norm.bias": "blocks.12.norm.bias", - "encoder.mid.attn_1.norm.weight": "blocks.12.norm.weight", - "encoder.mid.attn_1.proj_out.bias": "blocks.12.transformer_blocks.0.to_out.bias", - "encoder.mid.attn_1.proj_out.weight": "blocks.12.transformer_blocks.0.to_out.weight", - "encoder.mid.attn_1.q.bias": "blocks.12.transformer_blocks.0.to_q.bias", - "encoder.mid.attn_1.q.weight": "blocks.12.transformer_blocks.0.to_q.weight", - "encoder.mid.attn_1.v.bias": "blocks.12.transformer_blocks.0.to_v.bias", - "encoder.mid.attn_1.v.weight": "blocks.12.transformer_blocks.0.to_v.weight", - "encoder.mid.block_1.conv1.bias": "blocks.11.conv1.bias", - "encoder.mid.block_1.conv1.weight": "blocks.11.conv1.weight", - "encoder.mid.block_1.conv2.bias": "blocks.11.conv2.bias", - "encoder.mid.block_1.conv2.weight": "blocks.11.conv2.weight", - "encoder.mid.block_1.norm1.bias": "blocks.11.norm1.bias", - "encoder.mid.block_1.norm1.weight": "blocks.11.norm1.weight", - "encoder.mid.block_1.norm2.bias": "blocks.11.norm2.bias", - "encoder.mid.block_1.norm2.weight": "blocks.11.norm2.weight", - "encoder.mid.block_2.conv1.bias": "blocks.13.conv1.bias", - "encoder.mid.block_2.conv1.weight": "blocks.13.conv1.weight", - "encoder.mid.block_2.conv2.bias": "blocks.13.conv2.bias", - "encoder.mid.block_2.conv2.weight": "blocks.13.conv2.weight", - "encoder.mid.block_2.norm1.bias": "blocks.13.norm1.bias", - "encoder.mid.block_2.norm1.weight": "blocks.13.norm1.weight", - "encoder.mid.block_2.norm2.bias": "blocks.13.norm2.bias", - "encoder.mid.block_2.norm2.weight": "blocks.13.norm2.weight", - "encoder.norm_out.bias": "conv_norm_out.bias", - "encoder.norm_out.weight": "conv_norm_out.weight", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if "transformer_blocks" in rename_dict[name]: - param = param.squeeze() - state_dict_[rename_dict[name]] = param - return state_dict_ - - - -class FluxVAEDecoderStateDictConverter(SDVAEDecoderStateDictConverter): - def __init__(self): - pass - - def from_civitai(self, state_dict): - rename_dict = { - "decoder.conv_in.bias": "conv_in.bias", - "decoder.conv_in.weight": "conv_in.weight", - "decoder.conv_out.bias": "conv_out.bias", - "decoder.conv_out.weight": "conv_out.weight", - "decoder.mid.attn_1.k.bias": "blocks.1.transformer_blocks.0.to_k.bias", - "decoder.mid.attn_1.k.weight": "blocks.1.transformer_blocks.0.to_k.weight", - "decoder.mid.attn_1.norm.bias": "blocks.1.norm.bias", - "decoder.mid.attn_1.norm.weight": "blocks.1.norm.weight", - "decoder.mid.attn_1.proj_out.bias": "blocks.1.transformer_blocks.0.to_out.bias", - "decoder.mid.attn_1.proj_out.weight": "blocks.1.transformer_blocks.0.to_out.weight", - "decoder.mid.attn_1.q.bias": "blocks.1.transformer_blocks.0.to_q.bias", - "decoder.mid.attn_1.q.weight": "blocks.1.transformer_blocks.0.to_q.weight", - "decoder.mid.attn_1.v.bias": "blocks.1.transformer_blocks.0.to_v.bias", - "decoder.mid.attn_1.v.weight": "blocks.1.transformer_blocks.0.to_v.weight", - "decoder.mid.block_1.conv1.bias": "blocks.0.conv1.bias", - "decoder.mid.block_1.conv1.weight": "blocks.0.conv1.weight", - "decoder.mid.block_1.conv2.bias": "blocks.0.conv2.bias", - "decoder.mid.block_1.conv2.weight": "blocks.0.conv2.weight", - "decoder.mid.block_1.norm1.bias": "blocks.0.norm1.bias", - "decoder.mid.block_1.norm1.weight": "blocks.0.norm1.weight", - "decoder.mid.block_1.norm2.bias": "blocks.0.norm2.bias", - "decoder.mid.block_1.norm2.weight": "blocks.0.norm2.weight", - "decoder.mid.block_2.conv1.bias": "blocks.2.conv1.bias", - "decoder.mid.block_2.conv1.weight": "blocks.2.conv1.weight", - "decoder.mid.block_2.conv2.bias": "blocks.2.conv2.bias", - "decoder.mid.block_2.conv2.weight": "blocks.2.conv2.weight", - "decoder.mid.block_2.norm1.bias": "blocks.2.norm1.bias", - "decoder.mid.block_2.norm1.weight": "blocks.2.norm1.weight", - "decoder.mid.block_2.norm2.bias": "blocks.2.norm2.bias", - "decoder.mid.block_2.norm2.weight": "blocks.2.norm2.weight", - "decoder.norm_out.bias": "conv_norm_out.bias", - "decoder.norm_out.weight": "conv_norm_out.weight", - "decoder.up.0.block.0.conv1.bias": "blocks.15.conv1.bias", - "decoder.up.0.block.0.conv1.weight": "blocks.15.conv1.weight", - "decoder.up.0.block.0.conv2.bias": "blocks.15.conv2.bias", - "decoder.up.0.block.0.conv2.weight": "blocks.15.conv2.weight", - "decoder.up.0.block.0.nin_shortcut.bias": "blocks.15.conv_shortcut.bias", - "decoder.up.0.block.0.nin_shortcut.weight": "blocks.15.conv_shortcut.weight", - "decoder.up.0.block.0.norm1.bias": "blocks.15.norm1.bias", - "decoder.up.0.block.0.norm1.weight": "blocks.15.norm1.weight", - "decoder.up.0.block.0.norm2.bias": "blocks.15.norm2.bias", - "decoder.up.0.block.0.norm2.weight": "blocks.15.norm2.weight", - "decoder.up.0.block.1.conv1.bias": "blocks.16.conv1.bias", - "decoder.up.0.block.1.conv1.weight": "blocks.16.conv1.weight", - "decoder.up.0.block.1.conv2.bias": "blocks.16.conv2.bias", - "decoder.up.0.block.1.conv2.weight": "blocks.16.conv2.weight", - "decoder.up.0.block.1.norm1.bias": "blocks.16.norm1.bias", - "decoder.up.0.block.1.norm1.weight": "blocks.16.norm1.weight", - "decoder.up.0.block.1.norm2.bias": "blocks.16.norm2.bias", - "decoder.up.0.block.1.norm2.weight": "blocks.16.norm2.weight", - "decoder.up.0.block.2.conv1.bias": "blocks.17.conv1.bias", - "decoder.up.0.block.2.conv1.weight": "blocks.17.conv1.weight", - "decoder.up.0.block.2.conv2.bias": "blocks.17.conv2.bias", - "decoder.up.0.block.2.conv2.weight": "blocks.17.conv2.weight", - "decoder.up.0.block.2.norm1.bias": "blocks.17.norm1.bias", - "decoder.up.0.block.2.norm1.weight": "blocks.17.norm1.weight", - "decoder.up.0.block.2.norm2.bias": "blocks.17.norm2.bias", - "decoder.up.0.block.2.norm2.weight": "blocks.17.norm2.weight", - "decoder.up.1.block.0.conv1.bias": "blocks.11.conv1.bias", - "decoder.up.1.block.0.conv1.weight": "blocks.11.conv1.weight", - "decoder.up.1.block.0.conv2.bias": "blocks.11.conv2.bias", - "decoder.up.1.block.0.conv2.weight": "blocks.11.conv2.weight", - "decoder.up.1.block.0.nin_shortcut.bias": "blocks.11.conv_shortcut.bias", - "decoder.up.1.block.0.nin_shortcut.weight": "blocks.11.conv_shortcut.weight", - "decoder.up.1.block.0.norm1.bias": "blocks.11.norm1.bias", - "decoder.up.1.block.0.norm1.weight": "blocks.11.norm1.weight", - "decoder.up.1.block.0.norm2.bias": "blocks.11.norm2.bias", - "decoder.up.1.block.0.norm2.weight": "blocks.11.norm2.weight", - "decoder.up.1.block.1.conv1.bias": "blocks.12.conv1.bias", - "decoder.up.1.block.1.conv1.weight": "blocks.12.conv1.weight", - "decoder.up.1.block.1.conv2.bias": "blocks.12.conv2.bias", - "decoder.up.1.block.1.conv2.weight": "blocks.12.conv2.weight", - "decoder.up.1.block.1.norm1.bias": "blocks.12.norm1.bias", - "decoder.up.1.block.1.norm1.weight": "blocks.12.norm1.weight", - "decoder.up.1.block.1.norm2.bias": "blocks.12.norm2.bias", - "decoder.up.1.block.1.norm2.weight": "blocks.12.norm2.weight", - "decoder.up.1.block.2.conv1.bias": "blocks.13.conv1.bias", - "decoder.up.1.block.2.conv1.weight": "blocks.13.conv1.weight", - "decoder.up.1.block.2.conv2.bias": "blocks.13.conv2.bias", - "decoder.up.1.block.2.conv2.weight": "blocks.13.conv2.weight", - "decoder.up.1.block.2.norm1.bias": "blocks.13.norm1.bias", - "decoder.up.1.block.2.norm1.weight": "blocks.13.norm1.weight", - "decoder.up.1.block.2.norm2.bias": "blocks.13.norm2.bias", - "decoder.up.1.block.2.norm2.weight": "blocks.13.norm2.weight", - "decoder.up.1.upsample.conv.bias": "blocks.14.conv.bias", - "decoder.up.1.upsample.conv.weight": "blocks.14.conv.weight", - "decoder.up.2.block.0.conv1.bias": "blocks.7.conv1.bias", - "decoder.up.2.block.0.conv1.weight": "blocks.7.conv1.weight", - "decoder.up.2.block.0.conv2.bias": "blocks.7.conv2.bias", - "decoder.up.2.block.0.conv2.weight": "blocks.7.conv2.weight", - "decoder.up.2.block.0.norm1.bias": "blocks.7.norm1.bias", - "decoder.up.2.block.0.norm1.weight": "blocks.7.norm1.weight", - "decoder.up.2.block.0.norm2.bias": "blocks.7.norm2.bias", - "decoder.up.2.block.0.norm2.weight": "blocks.7.norm2.weight", - "decoder.up.2.block.1.conv1.bias": "blocks.8.conv1.bias", - "decoder.up.2.block.1.conv1.weight": "blocks.8.conv1.weight", - "decoder.up.2.block.1.conv2.bias": "blocks.8.conv2.bias", - "decoder.up.2.block.1.conv2.weight": "blocks.8.conv2.weight", - "decoder.up.2.block.1.norm1.bias": "blocks.8.norm1.bias", - "decoder.up.2.block.1.norm1.weight": "blocks.8.norm1.weight", - "decoder.up.2.block.1.norm2.bias": "blocks.8.norm2.bias", - "decoder.up.2.block.1.norm2.weight": "blocks.8.norm2.weight", - "decoder.up.2.block.2.conv1.bias": "blocks.9.conv1.bias", - "decoder.up.2.block.2.conv1.weight": "blocks.9.conv1.weight", - "decoder.up.2.block.2.conv2.bias": "blocks.9.conv2.bias", - "decoder.up.2.block.2.conv2.weight": "blocks.9.conv2.weight", - "decoder.up.2.block.2.norm1.bias": "blocks.9.norm1.bias", - "decoder.up.2.block.2.norm1.weight": "blocks.9.norm1.weight", - "decoder.up.2.block.2.norm2.bias": "blocks.9.norm2.bias", - "decoder.up.2.block.2.norm2.weight": "blocks.9.norm2.weight", - "decoder.up.2.upsample.conv.bias": "blocks.10.conv.bias", - "decoder.up.2.upsample.conv.weight": "blocks.10.conv.weight", - "decoder.up.3.block.0.conv1.bias": "blocks.3.conv1.bias", - "decoder.up.3.block.0.conv1.weight": "blocks.3.conv1.weight", - "decoder.up.3.block.0.conv2.bias": "blocks.3.conv2.bias", - "decoder.up.3.block.0.conv2.weight": "blocks.3.conv2.weight", - "decoder.up.3.block.0.norm1.bias": "blocks.3.norm1.bias", - "decoder.up.3.block.0.norm1.weight": "blocks.3.norm1.weight", - "decoder.up.3.block.0.norm2.bias": "blocks.3.norm2.bias", - "decoder.up.3.block.0.norm2.weight": "blocks.3.norm2.weight", - "decoder.up.3.block.1.conv1.bias": "blocks.4.conv1.bias", - "decoder.up.3.block.1.conv1.weight": "blocks.4.conv1.weight", - "decoder.up.3.block.1.conv2.bias": "blocks.4.conv2.bias", - "decoder.up.3.block.1.conv2.weight": "blocks.4.conv2.weight", - "decoder.up.3.block.1.norm1.bias": "blocks.4.norm1.bias", - "decoder.up.3.block.1.norm1.weight": "blocks.4.norm1.weight", - "decoder.up.3.block.1.norm2.bias": "blocks.4.norm2.bias", - "decoder.up.3.block.1.norm2.weight": "blocks.4.norm2.weight", - "decoder.up.3.block.2.conv1.bias": "blocks.5.conv1.bias", - "decoder.up.3.block.2.conv1.weight": "blocks.5.conv1.weight", - "decoder.up.3.block.2.conv2.bias": "blocks.5.conv2.bias", - "decoder.up.3.block.2.conv2.weight": "blocks.5.conv2.weight", - "decoder.up.3.block.2.norm1.bias": "blocks.5.norm1.bias", - "decoder.up.3.block.2.norm1.weight": "blocks.5.norm1.weight", - "decoder.up.3.block.2.norm2.bias": "blocks.5.norm2.bias", - "decoder.up.3.block.2.norm2.weight": "blocks.5.norm2.weight", - "decoder.up.3.upsample.conv.bias": "blocks.6.conv.bias", - "decoder.up.3.upsample.conv.weight": "blocks.6.conv.weight", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if "transformer_blocks" in rename_dict[name]: - param = param.squeeze() - state_dict_[rename_dict[name]] = param - return state_dict_ \ No newline at end of file diff --git a/diffsynth/models/flux_value_control.py b/diffsynth/models/flux_value_control.py deleted file mode 100644 index 6981344..0000000 --- a/diffsynth/models/flux_value_control.py +++ /dev/null @@ -1,60 +0,0 @@ -import torch -from diffsynth.models.svd_unet import TemporalTimesteps - - -class MultiValueEncoder(torch.nn.Module): - def __init__(self, encoders=()): - super().__init__() - self.encoders = torch.nn.ModuleList(encoders) - - def __call__(self, values, dtype): - emb = [] - for encoder, value in zip(self.encoders, values): - if value is not None: - value = value.unsqueeze(0) - emb.append(encoder(value, dtype)) - emb = torch.concat(emb, dim=0) - return emb - - -class SingleValueEncoder(torch.nn.Module): - def __init__(self, dim_in=256, dim_out=4096, prefer_len=32, computation_device=None): - super().__init__() - self.prefer_len = prefer_len - self.prefer_proj = TemporalTimesteps(num_channels=dim_in, flip_sin_to_cos=True, downscale_freq_shift=0, computation_device=computation_device) - self.prefer_value_embedder = torch.nn.Sequential( - torch.nn.Linear(dim_in, dim_out), torch.nn.SiLU(), torch.nn.Linear(dim_out, dim_out) - ) - self.positional_embedding = torch.nn.Parameter( - torch.randn(self.prefer_len, dim_out) - ) - self._initialize_weights() - - def _initialize_weights(self): - last_linear = self.prefer_value_embedder[-1] - torch.nn.init.zeros_(last_linear.weight) - torch.nn.init.zeros_(last_linear.bias) - - def forward(self, value, dtype): - value = value * 1000 - emb = self.prefer_proj(value).to(dtype) - emb = self.prefer_value_embedder(emb).squeeze(0) - base_embeddings = emb.expand(self.prefer_len, -1) - positional_embedding = self.positional_embedding.to(dtype=base_embeddings.dtype, device=base_embeddings.device) - learned_embeddings = base_embeddings + positional_embedding - return learned_embeddings - - @staticmethod - def state_dict_converter(): - return SingleValueEncoderStateDictConverter() - - -class SingleValueEncoderStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - return state_dict - - def from_civitai(self, state_dict): - return state_dict diff --git a/diffsynth/models/general_modules.py b/diffsynth/models/general_modules.py new file mode 100644 index 0000000..216247c --- /dev/null +++ b/diffsynth/models/general_modules.py @@ -0,0 +1,139 @@ +import torch, math + + +def get_timestep_embedding( + timesteps: torch.Tensor, + embedding_dim: int, + flip_sin_to_cos: bool = False, + downscale_freq_shift: float = 1, + scale: float = 1, + max_period: int = 10000, + computation_device = None, + align_dtype_to_timestep = False, +): + assert len(timesteps.shape) == 1, "Timesteps should be a 1d-array" + + half_dim = embedding_dim // 2 + exponent = -math.log(max_period) * torch.arange( + start=0, end=half_dim, dtype=torch.float32, device=timesteps.device if computation_device is None else computation_device + ) + exponent = exponent / (half_dim - downscale_freq_shift) + + emb = torch.exp(exponent).to(timesteps.device) + if align_dtype_to_timestep: + emb = emb.to(timesteps.dtype) + emb = timesteps[:, None].float() * emb[None, :] + + # scale embeddings + emb = scale * emb + + # concat sine and cosine embeddings + emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=-1) + + # flip sine and cosine embeddings + if flip_sin_to_cos: + emb = torch.cat([emb[:, half_dim:], emb[:, :half_dim]], dim=-1) + + # zero pad + if embedding_dim % 2 == 1: + emb = torch.nn.functional.pad(emb, (0, 1, 0, 0)) + return emb + + +class TemporalTimesteps(torch.nn.Module): + def __init__(self, num_channels: int, flip_sin_to_cos: bool, downscale_freq_shift: float, computation_device = None, scale=1, align_dtype_to_timestep=False): + super().__init__() + self.num_channels = num_channels + self.flip_sin_to_cos = flip_sin_to_cos + self.downscale_freq_shift = downscale_freq_shift + self.computation_device = computation_device + self.scale = scale + self.align_dtype_to_timestep = align_dtype_to_timestep + + def forward(self, timesteps): + t_emb = get_timestep_embedding( + timesteps, + self.num_channels, + flip_sin_to_cos=self.flip_sin_to_cos, + downscale_freq_shift=self.downscale_freq_shift, + computation_device=self.computation_device, + scale=self.scale, + align_dtype_to_timestep=self.align_dtype_to_timestep, + ) + return t_emb + + +class DiffusersCompatibleTimestepProj(torch.nn.Module): + def __init__(self, dim_in, dim_out): + super().__init__() + self.linear_1 = torch.nn.Linear(dim_in, dim_out) + self.act = torch.nn.SiLU() + self.linear_2 = torch.nn.Linear(dim_out, dim_out) + + def forward(self, x): + x = self.linear_1(x) + x = self.act(x) + x = self.linear_2(x) + return x + + +class TimestepEmbeddings(torch.nn.Module): + def __init__(self, dim_in, dim_out, computation_device=None, diffusers_compatible_format=False, scale=1, align_dtype_to_timestep=False): + super().__init__() + self.time_proj = TemporalTimesteps(num_channels=dim_in, flip_sin_to_cos=True, downscale_freq_shift=0, computation_device=computation_device, scale=scale, align_dtype_to_timestep=align_dtype_to_timestep) + if diffusers_compatible_format: + self.timestep_embedder = DiffusersCompatibleTimestepProj(dim_in, dim_out) + else: + self.timestep_embedder = torch.nn.Sequential( + torch.nn.Linear(dim_in, dim_out), torch.nn.SiLU(), torch.nn.Linear(dim_out, dim_out) + ) + + def forward(self, timestep, dtype): + time_emb = self.time_proj(timestep).to(dtype) + time_emb = self.timestep_embedder(time_emb) + return time_emb + + +class RMSNorm(torch.nn.Module): + def __init__(self, dim, eps, elementwise_affine=True): + super().__init__() + self.eps = eps + if elementwise_affine: + self.weight = torch.nn.Parameter(torch.ones((dim,))) + else: + self.weight = None + + def forward(self, hidden_states): + input_dtype = hidden_states.dtype + variance = hidden_states.to(torch.float32).square().mean(-1, keepdim=True) + hidden_states = hidden_states * torch.rsqrt(variance + self.eps) + hidden_states = hidden_states.to(input_dtype) + if self.weight is not None: + hidden_states = hidden_states * self.weight + return hidden_states + + +class AdaLayerNorm(torch.nn.Module): + def __init__(self, dim, single=False, dual=False): + super().__init__() + self.single = single + self.dual = dual + self.linear = torch.nn.Linear(dim, dim * [[6, 2][single], 9][dual]) + self.norm = torch.nn.LayerNorm(dim, elementwise_affine=False, eps=1e-6) + + def forward(self, x, emb): + emb = self.linear(torch.nn.functional.silu(emb)) + if self.single: + scale, shift = emb.unsqueeze(1).chunk(2, dim=2) + x = self.norm(x) * (1 + scale) + shift + return x + elif self.dual: + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp, shift_msa2, scale_msa2, gate_msa2 = emb.unsqueeze(1).chunk(9, dim=2) + norm_x = self.norm(x) + x = norm_x * (1 + scale_msa) + shift_msa + norm_x2 = norm_x * (1 + scale_msa2) + shift_msa2 + return x, gate_msa, shift_mlp, scale_mlp, gate_mlp, norm_x2, gate_msa2 + else: + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = emb.unsqueeze(1).chunk(6, dim=2) + x = self.norm(x) * (1 + scale_msa) + shift_msa + return x, gate_msa, shift_mlp, scale_mlp, gate_mlp diff --git a/diffsynth/models/hunyuan_dit.py b/diffsynth/models/hunyuan_dit.py deleted file mode 100644 index 8e27183..0000000 --- a/diffsynth/models/hunyuan_dit.py +++ /dev/null @@ -1,451 +0,0 @@ -from .attention import Attention -from einops import repeat, rearrange -import math -import torch - - -class HunyuanDiTRotaryEmbedding(torch.nn.Module): - - def __init__(self, q_norm_shape=88, k_norm_shape=88, rotary_emb_on_k=True): - super().__init__() - self.q_norm = torch.nn.LayerNorm((q_norm_shape,), elementwise_affine=True, eps=1e-06) - self.k_norm = torch.nn.LayerNorm((k_norm_shape,), elementwise_affine=True, eps=1e-06) - self.rotary_emb_on_k = rotary_emb_on_k - self.k_cache, self.v_cache = [], [] - - def reshape_for_broadcast(self, freqs_cis, x): - ndim = x.ndim - shape = [d if i == ndim - 2 or i == ndim - 1 else 1 for i, d in enumerate(x.shape)] - return freqs_cis[0].view(*shape), freqs_cis[1].view(*shape) - - def rotate_half(self, x): - x_real, x_imag = x.float().reshape(*x.shape[:-1], -1, 2).unbind(-1) - return torch.stack([-x_imag, x_real], dim=-1).flatten(3) - - def apply_rotary_emb(self, xq, xk, freqs_cis): - xk_out = None - cos, sin = self.reshape_for_broadcast(freqs_cis, xq) - cos, sin = cos.to(xq.device), sin.to(xq.device) - xq_out = (xq.float() * cos + self.rotate_half(xq.float()) * sin).type_as(xq) - if xk is not None: - xk_out = (xk.float() * cos + self.rotate_half(xk.float()) * sin).type_as(xk) - return xq_out, xk_out - - def forward(self, q, k, v, freqs_cis_img, to_cache=False): - # norm - q = self.q_norm(q) - k = self.k_norm(k) - - # RoPE - if self.rotary_emb_on_k: - q, k = self.apply_rotary_emb(q, k, freqs_cis_img) - else: - q, _ = self.apply_rotary_emb(q, None, freqs_cis_img) - - if to_cache: - self.k_cache.append(k) - self.v_cache.append(v) - elif len(self.k_cache) > 0 and len(self.v_cache) > 0: - k = torch.concat([k] + self.k_cache, dim=2) - v = torch.concat([v] + self.v_cache, dim=2) - self.k_cache, self.v_cache = [], [] - return q, k, v - - -class FP32_Layernorm(torch.nn.LayerNorm): - def forward(self, inputs): - origin_dtype = inputs.dtype - return torch.nn.functional.layer_norm(inputs.float(), self.normalized_shape, self.weight.float(), self.bias.float(), self.eps).to(origin_dtype) - - -class FP32_SiLU(torch.nn.SiLU): - def forward(self, inputs): - origin_dtype = inputs.dtype - return torch.nn.functional.silu(inputs.float(), inplace=False).to(origin_dtype) - - -class HunyuanDiTFinalLayer(torch.nn.Module): - def __init__(self, final_hidden_size=1408, condition_dim=1408, patch_size=2, out_channels=8): - super().__init__() - self.norm_final = torch.nn.LayerNorm(final_hidden_size, elementwise_affine=False, eps=1e-6) - self.linear = torch.nn.Linear(final_hidden_size, patch_size * patch_size * out_channels, bias=True) - self.adaLN_modulation = torch.nn.Sequential( - FP32_SiLU(), - torch.nn.Linear(condition_dim, 2 * final_hidden_size, bias=True) - ) - - def modulate(self, x, shift, scale): - return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) - - def forward(self, hidden_states, condition_emb): - shift, scale = self.adaLN_modulation(condition_emb).chunk(2, dim=1) - hidden_states = self.modulate(self.norm_final(hidden_states), shift, scale) - hidden_states = self.linear(hidden_states) - return hidden_states - - -class HunyuanDiTBlock(torch.nn.Module): - - def __init__( - self, - hidden_dim=1408, - condition_dim=1408, - num_heads=16, - mlp_ratio=4.3637, - text_dim=1024, - skip_connection=False - ): - super().__init__() - self.norm1 = FP32_Layernorm((hidden_dim,), eps=1e-6, elementwise_affine=True) - self.rota1 = HunyuanDiTRotaryEmbedding(hidden_dim//num_heads, hidden_dim//num_heads) - self.attn1 = Attention(hidden_dim, num_heads, hidden_dim//num_heads, bias_q=True, bias_kv=True, bias_out=True) - self.norm2 = FP32_Layernorm((hidden_dim,), eps=1e-6, elementwise_affine=True) - self.rota2 = HunyuanDiTRotaryEmbedding(hidden_dim//num_heads, hidden_dim//num_heads, rotary_emb_on_k=False) - self.attn2 = Attention(hidden_dim, num_heads, hidden_dim//num_heads, kv_dim=text_dim, bias_q=True, bias_kv=True, bias_out=True) - self.norm3 = FP32_Layernorm((hidden_dim,), eps=1e-6, elementwise_affine=True) - self.modulation = torch.nn.Sequential(FP32_SiLU(), torch.nn.Linear(condition_dim, hidden_dim, bias=True)) - self.mlp = torch.nn.Sequential( - torch.nn.Linear(hidden_dim, int(hidden_dim*mlp_ratio), bias=True), - torch.nn.GELU(approximate="tanh"), - torch.nn.Linear(int(hidden_dim*mlp_ratio), hidden_dim, bias=True) - ) - if skip_connection: - self.skip_norm = FP32_Layernorm((hidden_dim * 2,), eps=1e-6, elementwise_affine=True) - self.skip_linear = torch.nn.Linear(hidden_dim * 2, hidden_dim, bias=True) - else: - self.skip_norm, self.skip_linear = None, None - - def forward(self, hidden_states, condition_emb, text_emb, freq_cis_img, residual=None, to_cache=False): - # Long Skip Connection - if self.skip_norm is not None and self.skip_linear is not None: - hidden_states = torch.cat([hidden_states, residual], dim=-1) - hidden_states = self.skip_norm(hidden_states) - hidden_states = self.skip_linear(hidden_states) - - # Self-Attention - shift_msa = self.modulation(condition_emb).unsqueeze(dim=1) - attn_input = self.norm1(hidden_states) + shift_msa - hidden_states = hidden_states + self.attn1(attn_input, qkv_preprocessor=lambda q, k, v: self.rota1(q, k, v, freq_cis_img, to_cache=to_cache)) - - # Cross-Attention - attn_input = self.norm3(hidden_states) - hidden_states = hidden_states + self.attn2(attn_input, text_emb, qkv_preprocessor=lambda q, k, v: self.rota2(q, k, v, freq_cis_img)) - - # FFN Layer - mlp_input = self.norm2(hidden_states) - hidden_states = hidden_states + self.mlp(mlp_input) - return hidden_states - - -class AttentionPool(torch.nn.Module): - def __init__(self, spacial_dim, embed_dim, num_heads, output_dim = None): - super().__init__() - self.positional_embedding = torch.nn.Parameter(torch.randn(spacial_dim + 1, embed_dim) / embed_dim ** 0.5) - self.k_proj = torch.nn.Linear(embed_dim, embed_dim) - self.q_proj = torch.nn.Linear(embed_dim, embed_dim) - self.v_proj = torch.nn.Linear(embed_dim, embed_dim) - self.c_proj = torch.nn.Linear(embed_dim, output_dim or embed_dim) - self.num_heads = num_heads - - def forward(self, x): - x = x.permute(1, 0, 2) # NLC -> LNC - x = torch.cat([x.mean(dim=0, keepdim=True), x], dim=0) # (L+1)NC - x = x + self.positional_embedding[:, None, :].to(x.dtype) # (L+1)NC - x, _ = torch.nn.functional.multi_head_attention_forward( - query=x[:1], key=x, value=x, - embed_dim_to_check=x.shape[-1], - num_heads=self.num_heads, - q_proj_weight=self.q_proj.weight, - k_proj_weight=self.k_proj.weight, - v_proj_weight=self.v_proj.weight, - in_proj_weight=None, - in_proj_bias=torch.cat([self.q_proj.bias, self.k_proj.bias, self.v_proj.bias]), - bias_k=None, - bias_v=None, - add_zero_attn=False, - dropout_p=0, - out_proj_weight=self.c_proj.weight, - out_proj_bias=self.c_proj.bias, - use_separate_proj_weight=True, - training=self.training, - need_weights=False - ) - return x.squeeze(0) - - -class PatchEmbed(torch.nn.Module): - def __init__( - self, - patch_size=(2, 2), - in_chans=4, - embed_dim=1408, - bias=True, - ): - super().__init__() - self.proj = torch.nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size, bias=bias) - - def forward(self, x): - x = self.proj(x) - x = x.flatten(2).transpose(1, 2) # BCHW -> BNC - return x - - -def timestep_embedding(t, dim, max_period=10000, repeat_only=False): - # https://github.com/openai/glide-text2im/blob/main/glide_text2im/nn.py - if not repeat_only: - half = dim // 2 - freqs = torch.exp( - -math.log(max_period) - * torch.arange(start=0, end=half, dtype=torch.float32) - / half - ).to(device=t.device) # size: [dim/2], 一个指数衰减的曲线 - args = t[:, None].float() * freqs[None] - embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) - if dim % 2: - embedding = torch.cat( - [embedding, torch.zeros_like(embedding[:, :1])], dim=-1 - ) - else: - embedding = repeat(t, "b -> b d", d=dim) - return embedding - - -class TimestepEmbedder(torch.nn.Module): - def __init__(self, hidden_size=1408, frequency_embedding_size=256): - super().__init__() - self.mlp = torch.nn.Sequential( - torch.nn.Linear(frequency_embedding_size, hidden_size, bias=True), - torch.nn.SiLU(), - torch.nn.Linear(hidden_size, hidden_size, bias=True), - ) - self.frequency_embedding_size = frequency_embedding_size - - def forward(self, t): - t_freq = timestep_embedding(t, self.frequency_embedding_size).type(self.mlp[0].weight.dtype) - t_emb = self.mlp(t_freq) - return t_emb - - -class HunyuanDiT(torch.nn.Module): - def __init__(self, num_layers_down=21, num_layers_up=19, in_channels=4, out_channels=8, hidden_dim=1408, text_dim=1024, t5_dim=2048, text_length=77, t5_length=256): - super().__init__() - - # Embedders - self.text_emb_padding = torch.nn.Parameter(torch.randn(text_length + t5_length, text_dim, dtype=torch.float32)) - self.t5_embedder = torch.nn.Sequential( - torch.nn.Linear(t5_dim, t5_dim * 4, bias=True), - FP32_SiLU(), - torch.nn.Linear(t5_dim * 4, text_dim, bias=True), - ) - self.t5_pooler = AttentionPool(t5_length, t5_dim, num_heads=8, output_dim=1024) - self.style_embedder = torch.nn.Parameter(torch.randn(hidden_dim)) - self.patch_embedder = PatchEmbed(in_chans=in_channels) - self.timestep_embedder = TimestepEmbedder() - self.extra_embedder = torch.nn.Sequential( - torch.nn.Linear(256 * 6 + 1024 + hidden_dim, hidden_dim * 4), - FP32_SiLU(), - torch.nn.Linear(hidden_dim * 4, hidden_dim), - ) - - # Transformer blocks - self.num_layers_down = num_layers_down - self.num_layers_up = num_layers_up - self.blocks = torch.nn.ModuleList( - [HunyuanDiTBlock(skip_connection=False) for _ in range(num_layers_down)] + \ - [HunyuanDiTBlock(skip_connection=True) for _ in range(num_layers_up)] - ) - - # Output layers - self.final_layer = HunyuanDiTFinalLayer() - self.out_channels = out_channels - - def prepare_text_emb(self, text_emb, text_emb_t5, text_emb_mask, text_emb_mask_t5): - text_emb_mask = text_emb_mask.bool() - text_emb_mask_t5 = text_emb_mask_t5.bool() - text_emb_t5 = self.t5_embedder(text_emb_t5) - text_emb = torch.cat([text_emb, text_emb_t5], dim=1) - text_emb_mask = torch.cat([text_emb_mask, text_emb_mask_t5], dim=-1) - text_emb = torch.where(text_emb_mask.unsqueeze(2), text_emb, self.text_emb_padding.to(text_emb)) - return text_emb - - def prepare_extra_emb(self, text_emb_t5, timestep, size_emb, dtype, batch_size): - # Text embedding - pooled_text_emb_t5 = self.t5_pooler(text_emb_t5) - - # Timestep embedding - timestep_emb = self.timestep_embedder(timestep) - - # Size embedding - size_emb = timestep_embedding(size_emb.view(-1), 256).to(dtype) - size_emb = size_emb.view(-1, 6 * 256) - - # Style embedding - style_emb = repeat(self.style_embedder, "D -> B D", B=batch_size) - - # Concatenate all extra vectors - extra_emb = torch.cat([pooled_text_emb_t5, size_emb, style_emb], dim=1) - condition_emb = timestep_emb + self.extra_embedder(extra_emb) - - return condition_emb - - def unpatchify(self, x, h, w): - return rearrange(x, "B (H W) (P Q C) -> B C (H P) (W Q)", H=h, W=w, P=2, Q=2) - - def build_mask(self, data, is_bound): - _, _, H, W = data.shape - h = repeat(torch.arange(H), "H -> H W", H=H, W=W) - w = repeat(torch.arange(W), "W -> H W", H=H, W=W) - border_width = (H + W) // 4 - pad = torch.ones_like(h) * border_width - mask = torch.stack([ - pad if is_bound[0] else h + 1, - pad if is_bound[1] else H - h, - pad if is_bound[2] else w + 1, - pad if is_bound[3] else W - w - ]).min(dim=0).values - mask = mask.clip(1, border_width) - mask = (mask / border_width).to(dtype=data.dtype, device=data.device) - mask = rearrange(mask, "H W -> 1 H W") - return mask - - def tiled_block_forward(self, block, hidden_states, condition_emb, text_emb, freq_cis_img, residual, torch_dtype, data_device, computation_device, tile_size, tile_stride): - B, C, H, W = hidden_states.shape - - weight = torch.zeros((1, 1, H, W), dtype=torch_dtype, device=data_device) - values = torch.zeros((B, C, H, W), dtype=torch_dtype, device=data_device) - - # Split tasks - tasks = [] - for h in range(0, H, tile_stride): - for w in range(0, W, tile_stride): - if (h-tile_stride >= 0 and h-tile_stride+tile_size >= H) or (w-tile_stride >= 0 and w-tile_stride+tile_size >= W): - continue - h_, w_ = h + tile_size, w + tile_size - if h_ > H: h, h_ = H - tile_size, H - if w_ > W: w, w_ = W - tile_size, W - tasks.append((h, h_, w, w_)) - - # Run - for hl, hr, wl, wr in tasks: - hidden_states_batch = hidden_states[:, :, hl:hr, wl:wr].to(computation_device) - hidden_states_batch = rearrange(hidden_states_batch, "B C H W -> B (H W) C") - if residual is not None: - residual_batch = residual[:, :, hl:hr, wl:wr].to(computation_device) - residual_batch = rearrange(residual_batch, "B C H W -> B (H W) C") - else: - residual_batch = None - - # Forward - hidden_states_batch = block(hidden_states_batch, condition_emb, text_emb, freq_cis_img, residual_batch).to(data_device) - hidden_states_batch = rearrange(hidden_states_batch, "B (H W) C -> B C H W", H=hr-hl) - - mask = self.build_mask(hidden_states_batch, is_bound=(hl==0, hr>=H, wl==0, wr>=W)) - values[:, :, hl:hr, wl:wr] += hidden_states_batch * mask - weight[:, :, hl:hr, wl:wr] += mask - values /= weight - return values - - def forward( - self, hidden_states, text_emb, text_emb_t5, text_emb_mask, text_emb_mask_t5, timestep, size_emb, freq_cis_img, - tiled=False, tile_size=64, tile_stride=32, - to_cache=False, - use_gradient_checkpointing=False, - ): - # Embeddings - text_emb = self.prepare_text_emb(text_emb, text_emb_t5, text_emb_mask, text_emb_mask_t5) - condition_emb = self.prepare_extra_emb(text_emb_t5, timestep, size_emb, hidden_states.dtype, hidden_states.shape[0]) - - # Input - height, width = hidden_states.shape[-2], hidden_states.shape[-1] - hidden_states = self.patch_embedder(hidden_states) - - # Blocks - def create_custom_forward(module): - def custom_forward(*inputs): - return module(*inputs) - return custom_forward - if tiled: - hidden_states = rearrange(hidden_states, "B (H W) C -> B C H W", H=height//2) - residuals = [] - for block_id, block in enumerate(self.blocks): - residual = residuals.pop() if block_id >= self.num_layers_down else None - hidden_states = self.tiled_block_forward( - block, hidden_states, condition_emb, text_emb, freq_cis_img, residual, - torch_dtype=hidden_states.dtype, data_device=hidden_states.device, computation_device=hidden_states.device, - tile_size=tile_size, tile_stride=tile_stride - ) - if block_id < self.num_layers_down - 2: - residuals.append(hidden_states) - hidden_states = rearrange(hidden_states, "B C H W -> B (H W) C") - else: - residuals = [] - for block_id, block in enumerate(self.blocks): - residual = residuals.pop() if block_id >= self.num_layers_down else None - if self.training and use_gradient_checkpointing: - hidden_states = torch.utils.checkpoint.checkpoint( - create_custom_forward(block), - hidden_states, condition_emb, text_emb, freq_cis_img, residual, - use_reentrant=False, - ) - else: - hidden_states = block(hidden_states, condition_emb, text_emb, freq_cis_img, residual, to_cache=to_cache) - if block_id < self.num_layers_down - 2: - residuals.append(hidden_states) - - # Output - hidden_states = self.final_layer(hidden_states, condition_emb) - hidden_states = self.unpatchify(hidden_states, height//2, width//2) - hidden_states, _ = hidden_states.chunk(2, dim=1) - return hidden_states - - @staticmethod - def state_dict_converter(): - return HunyuanDiTStateDictConverter() - - - -class HunyuanDiTStateDictConverter(): - def __init__(self): - pass - - def from_diffusers(self, state_dict): - state_dict_ = {} - for name, param in state_dict.items(): - name_ = name - name_ = name_.replace(".default_modulation.", ".modulation.") - name_ = name_.replace(".mlp.fc1.", ".mlp.0.") - name_ = name_.replace(".mlp.fc2.", ".mlp.2.") - name_ = name_.replace(".attn1.q_norm.", ".rota1.q_norm.") - name_ = name_.replace(".attn2.q_norm.", ".rota2.q_norm.") - name_ = name_.replace(".attn1.k_norm.", ".rota1.k_norm.") - name_ = name_.replace(".attn2.k_norm.", ".rota2.k_norm.") - name_ = name_.replace(".q_proj.", ".to_q.") - name_ = name_.replace(".out_proj.", ".to_out.") - name_ = name_.replace("text_embedding_padding", "text_emb_padding") - name_ = name_.replace("mlp_t5.0.", "t5_embedder.0.") - name_ = name_.replace("mlp_t5.2.", "t5_embedder.2.") - name_ = name_.replace("pooler.", "t5_pooler.") - name_ = name_.replace("x_embedder.", "patch_embedder.") - name_ = name_.replace("t_embedder.", "timestep_embedder.") - name_ = name_.replace("t5_pooler.to_q.", "t5_pooler.q_proj.") - name_ = name_.replace("style_embedder.weight", "style_embedder") - if ".kv_proj." in name_: - param_k = param[:param.shape[0]//2] - param_v = param[param.shape[0]//2:] - state_dict_[name_.replace(".kv_proj.", ".to_k.")] = param_k - state_dict_[name_.replace(".kv_proj.", ".to_v.")] = param_v - elif ".Wqkv." in name_: - param_q = param[:param.shape[0]//3] - param_k = param[param.shape[0]//3:param.shape[0]//3*2] - param_v = param[param.shape[0]//3*2:] - state_dict_[name_.replace(".Wqkv.", ".to_q.")] = param_q - state_dict_[name_.replace(".Wqkv.", ".to_k.")] = param_k - state_dict_[name_.replace(".Wqkv.", ".to_v.")] = param_v - elif "style_embedder" in name_: - state_dict_[name_] = param.squeeze() - else: - state_dict_[name_] = param - return state_dict_ - - def from_civitai(self, state_dict): - return self.from_diffusers(state_dict) diff --git a/diffsynth/models/hunyuan_dit_text_encoder.py b/diffsynth/models/hunyuan_dit_text_encoder.py deleted file mode 100644 index 33999a8..0000000 --- a/diffsynth/models/hunyuan_dit_text_encoder.py +++ /dev/null @@ -1,163 +0,0 @@ -from transformers import BertModel, BertConfig, T5EncoderModel, T5Config -import torch - - - -class HunyuanDiTCLIPTextEncoder(BertModel): - def __init__(self): - config = BertConfig( - _name_or_path = "", - architectures = ["BertModel"], - attention_probs_dropout_prob = 0.1, - bos_token_id = 0, - classifier_dropout = None, - directionality = "bidi", - eos_token_id = 2, - hidden_act = "gelu", - hidden_dropout_prob = 0.1, - hidden_size = 1024, - initializer_range = 0.02, - intermediate_size = 4096, - layer_norm_eps = 1e-12, - max_position_embeddings = 512, - model_type = "bert", - num_attention_heads = 16, - num_hidden_layers = 24, - output_past = True, - pad_token_id = 0, - pooler_fc_size = 768, - pooler_num_attention_heads = 12, - pooler_num_fc_layers = 3, - pooler_size_per_head = 128, - pooler_type = "first_token_transform", - position_embedding_type = "absolute", - torch_dtype = "float32", - transformers_version = "4.37.2", - type_vocab_size = 2, - use_cache = True, - vocab_size = 47020 - ) - super().__init__(config, add_pooling_layer=False) - self.eval() - - def forward(self, input_ids, attention_mask, clip_skip=1): - input_shape = input_ids.size() - - batch_size, seq_length = input_shape - device = input_ids.device - - past_key_values_length = 0 - - if attention_mask is None: - attention_mask = torch.ones(((batch_size, seq_length + past_key_values_length)), device=device) - - extended_attention_mask: torch.Tensor = self.get_extended_attention_mask(attention_mask, input_shape) - - embedding_output = self.embeddings( - input_ids=input_ids, - position_ids=None, - token_type_ids=None, - inputs_embeds=None, - past_key_values_length=0, - ) - encoder_outputs = self.encoder( - embedding_output, - attention_mask=extended_attention_mask, - head_mask=None, - encoder_hidden_states=None, - encoder_attention_mask=None, - past_key_values=None, - use_cache=False, - output_attentions=False, - output_hidden_states=True, - return_dict=True, - ) - all_hidden_states = encoder_outputs.hidden_states - prompt_emb = all_hidden_states[-clip_skip] - if clip_skip > 1: - mean, std = all_hidden_states[-1].mean(), all_hidden_states[-1].std() - prompt_emb = (prompt_emb - prompt_emb.mean()) / prompt_emb.std() * std + mean - return prompt_emb - - @staticmethod - def state_dict_converter(): - return HunyuanDiTCLIPTextEncoderStateDictConverter() - - - -class HunyuanDiTT5TextEncoder(T5EncoderModel): - def __init__(self): - config = T5Config( - _name_or_path = "../HunyuanDiT/t2i/mt5", - architectures = ["MT5ForConditionalGeneration"], - classifier_dropout = 0.0, - d_ff = 5120, - d_kv = 64, - d_model = 2048, - decoder_start_token_id = 0, - dense_act_fn = "gelu_new", - dropout_rate = 0.1, - eos_token_id = 1, - feed_forward_proj = "gated-gelu", - initializer_factor = 1.0, - is_encoder_decoder = True, - is_gated_act = True, - layer_norm_epsilon = 1e-06, - model_type = "t5", - num_decoder_layers = 24, - num_heads = 32, - num_layers = 24, - output_past = True, - pad_token_id = 0, - relative_attention_max_distance = 128, - relative_attention_num_buckets = 32, - tie_word_embeddings = False, - tokenizer_class = "T5Tokenizer", - transformers_version = "4.37.2", - use_cache = True, - vocab_size = 250112 - ) - super().__init__(config) - self.eval() - - def forward(self, input_ids, attention_mask, clip_skip=1): - outputs = super().forward( - input_ids=input_ids, - attention_mask=attention_mask, - output_hidden_states=True, - ) - prompt_emb = outputs.hidden_states[-clip_skip] - if clip_skip > 1: - mean, std = outputs.hidden_states[-1].mean(), outputs.hidden_states[-1].std() - prompt_emb = (prompt_emb - prompt_emb.mean()) / prompt_emb.std() * std + mean - return prompt_emb - - @staticmethod - def state_dict_converter(): - return HunyuanDiTT5TextEncoderStateDictConverter() - - - -class HunyuanDiTCLIPTextEncoderStateDictConverter(): - def __init__(self): - pass - - def from_diffusers(self, state_dict): - state_dict_ = {name[5:]: param for name, param in state_dict.items() if name.startswith("bert.")} - return state_dict_ - - def from_civitai(self, state_dict): - return self.from_diffusers(state_dict) - - -class HunyuanDiTT5TextEncoderStateDictConverter(): - def __init__(self): - pass - - def from_diffusers(self, state_dict): - state_dict_ = {name: param for name, param in state_dict.items() if name.startswith("encoder.")} - state_dict_["shared.weight"] = state_dict["shared.weight"] - return state_dict_ - - def from_civitai(self, state_dict): - return self.from_diffusers(state_dict) diff --git a/diffsynth/models/hunyuan_video_dit.py b/diffsynth/models/hunyuan_video_dit.py deleted file mode 100644 index 1315536..0000000 --- a/diffsynth/models/hunyuan_video_dit.py +++ /dev/null @@ -1,920 +0,0 @@ -import torch -from .sd3_dit import TimestepEmbeddings, RMSNorm -from .utils import init_weights_on_device -from einops import rearrange, repeat -from tqdm import tqdm -from typing import Union, Tuple, List -from .utils import hash_state_dict_keys - - -def HunyuanVideoRope(latents): - def _to_tuple(x, dim=2): - if isinstance(x, int): - return (x,) * dim - elif len(x) == dim: - return x - else: - raise ValueError(f"Expected length {dim} or int, but got {x}") - - - def get_meshgrid_nd(start, *args, dim=2): - """ - Get n-D meshgrid with start, stop and num. - - Args: - start (int or tuple): If len(args) == 0, start is num; If len(args) == 1, start is start, args[0] is stop, - step is 1; If len(args) == 2, start is start, args[0] is stop, args[1] is num. For n-dim, start/stop/num - should be int or n-tuple. If n-tuple is provided, the meshgrid will be stacked following the dim order in - n-tuples. - *args: See above. - dim (int): Dimension of the meshgrid. Defaults to 2. - - Returns: - grid (np.ndarray): [dim, ...] - """ - if len(args) == 0: - # start is grid_size - num = _to_tuple(start, dim=dim) - start = (0,) * dim - stop = num - elif len(args) == 1: - # start is start, args[0] is stop, step is 1 - start = _to_tuple(start, dim=dim) - stop = _to_tuple(args[0], dim=dim) - num = [stop[i] - start[i] for i in range(dim)] - elif len(args) == 2: - # start is start, args[0] is stop, args[1] is num - start = _to_tuple(start, dim=dim) # Left-Top eg: 12,0 - stop = _to_tuple(args[0], dim=dim) # Right-Bottom eg: 20,32 - num = _to_tuple(args[1], dim=dim) # Target Size eg: 32,124 - else: - raise ValueError(f"len(args) should be 0, 1 or 2, but got {len(args)}") - - # PyTorch implement of np.linspace(start[i], stop[i], num[i], endpoint=False) - axis_grid = [] - for i in range(dim): - a, b, n = start[i], stop[i], num[i] - g = torch.linspace(a, b, n + 1, dtype=torch.float32)[:n] - axis_grid.append(g) - grid = torch.meshgrid(*axis_grid, indexing="ij") # dim x [W, H, D] - grid = torch.stack(grid, dim=0) # [dim, W, H, D] - - return grid - - - def get_1d_rotary_pos_embed( - dim: int, - pos: Union[torch.FloatTensor, int], - theta: float = 10000.0, - use_real: bool = False, - theta_rescale_factor: float = 1.0, - interpolation_factor: float = 1.0, - ) -> Union[torch.Tensor, Tuple[torch.Tensor, torch.Tensor]]: - """ - Precompute the frequency tensor for complex exponential (cis) with given dimensions. - (Note: `cis` means `cos + i * sin`, where i is the imaginary unit.) - - This function calculates a frequency tensor with complex exponential using the given dimension 'dim' - and the end index 'end'. The 'theta' parameter scales the frequencies. - The returned tensor contains complex values in complex64 data type. - - Args: - dim (int): Dimension of the frequency tensor. - pos (int or torch.FloatTensor): Position indices for the frequency tensor. [S] or scalar - theta (float, optional): Scaling factor for frequency computation. Defaults to 10000.0. - use_real (bool, optional): If True, return real part and imaginary part separately. - Otherwise, return complex numbers. - theta_rescale_factor (float, optional): Rescale factor for theta. Defaults to 1.0. - - Returns: - freqs_cis: Precomputed frequency tensor with complex exponential. [S, D/2] - freqs_cos, freqs_sin: Precomputed frequency tensor with real and imaginary parts separately. [S, D] - """ - if isinstance(pos, int): - pos = torch.arange(pos).float() - - # proposed by reddit user bloc97, to rescale rotary embeddings to longer sequence length without fine-tuning - # has some connection to NTK literature - if theta_rescale_factor != 1.0: - theta *= theta_rescale_factor ** (dim / (dim - 2)) - - freqs = 1.0 / ( - theta ** (torch.arange(0, dim, 2)[: (dim // 2)].float() / dim) - ) # [D/2] - # assert interpolation_factor == 1.0, f"interpolation_factor: {interpolation_factor}" - freqs = torch.outer(pos * interpolation_factor, freqs) # [S, D/2] - if use_real: - freqs_cos = freqs.cos().repeat_interleave(2, dim=1) # [S, D] - freqs_sin = freqs.sin().repeat_interleave(2, dim=1) # [S, D] - return freqs_cos, freqs_sin - else: - freqs_cis = torch.polar( - torch.ones_like(freqs), freqs - ) # complex64 # [S, D/2] - return freqs_cis - - - def get_nd_rotary_pos_embed( - rope_dim_list, - start, - *args, - theta=10000.0, - use_real=False, - theta_rescale_factor: Union[float, List[float]] = 1.0, - interpolation_factor: Union[float, List[float]] = 1.0, - ): - """ - This is a n-d version of precompute_freqs_cis, which is a RoPE for tokens with n-d structure. - - Args: - rope_dim_list (list of int): Dimension of each rope. len(rope_dim_list) should equal to n. - sum(rope_dim_list) should equal to head_dim of attention layer. - start (int | tuple of int | list of int): If len(args) == 0, start is num; If len(args) == 1, start is start, - args[0] is stop, step is 1; If len(args) == 2, start is start, args[0] is stop, args[1] is num. - *args: See above. - theta (float): Scaling factor for frequency computation. Defaults to 10000.0. - use_real (bool): If True, return real part and imaginary part separately. Otherwise, return complex numbers. - Some libraries such as TensorRT does not support complex64 data type. So it is useful to provide a real - part and an imaginary part separately. - theta_rescale_factor (float): Rescale factor for theta. Defaults to 1.0. - - Returns: - pos_embed (torch.Tensor): [HW, D/2] - """ - - grid = get_meshgrid_nd( - start, *args, dim=len(rope_dim_list) - ) # [3, W, H, D] / [2, W, H] - - if isinstance(theta_rescale_factor, int) or isinstance(theta_rescale_factor, float): - theta_rescale_factor = [theta_rescale_factor] * len(rope_dim_list) - elif isinstance(theta_rescale_factor, list) and len(theta_rescale_factor) == 1: - theta_rescale_factor = [theta_rescale_factor[0]] * len(rope_dim_list) - assert len(theta_rescale_factor) == len( - rope_dim_list - ), "len(theta_rescale_factor) should equal to len(rope_dim_list)" - - if isinstance(interpolation_factor, int) or isinstance(interpolation_factor, float): - interpolation_factor = [interpolation_factor] * len(rope_dim_list) - elif isinstance(interpolation_factor, list) and len(interpolation_factor) == 1: - interpolation_factor = [interpolation_factor[0]] * len(rope_dim_list) - assert len(interpolation_factor) == len( - rope_dim_list - ), "len(interpolation_factor) should equal to len(rope_dim_list)" - - # use 1/ndim of dimensions to encode grid_axis - embs = [] - for i in range(len(rope_dim_list)): - emb = get_1d_rotary_pos_embed( - rope_dim_list[i], - grid[i].reshape(-1), - theta, - use_real=use_real, - theta_rescale_factor=theta_rescale_factor[i], - interpolation_factor=interpolation_factor[i], - ) # 2 x [WHD, rope_dim_list[i]] - embs.append(emb) - - if use_real: - cos = torch.cat([emb[0] for emb in embs], dim=1) # (WHD, D/2) - sin = torch.cat([emb[1] for emb in embs], dim=1) # (WHD, D/2) - return cos, sin - else: - emb = torch.cat(embs, dim=1) # (WHD, D/2) - return emb - - freqs_cos, freqs_sin = get_nd_rotary_pos_embed( - [16, 56, 56], - [latents.shape[2], latents.shape[3] // 2, latents.shape[4] // 2], - theta=256, - use_real=True, - theta_rescale_factor=1, - ) - return freqs_cos, freqs_sin - - -class PatchEmbed(torch.nn.Module): - def __init__(self, patch_size=(1, 2, 2), in_channels=16, embed_dim=3072): - super().__init__() - self.proj = torch.nn.Conv3d(in_channels, embed_dim, kernel_size=patch_size, stride=patch_size) - - def forward(self, x): - x = self.proj(x) - x = x.flatten(2).transpose(1, 2) - return x - - -class IndividualTokenRefinerBlock(torch.nn.Module): - def __init__(self, hidden_size=3072, num_heads=24): - super().__init__() - self.num_heads = num_heads - self.norm1 = torch.nn.LayerNorm(hidden_size, elementwise_affine=True, eps=1e-6) - self.self_attn_qkv = torch.nn.Linear(hidden_size, hidden_size * 3) - self.self_attn_proj = torch.nn.Linear(hidden_size, hidden_size) - - self.norm2 = torch.nn.LayerNorm(hidden_size, elementwise_affine=True, eps=1e-6) - self.mlp = torch.nn.Sequential( - torch.nn.Linear(hidden_size, hidden_size * 4), - torch.nn.SiLU(), - torch.nn.Linear(hidden_size * 4, hidden_size) - ) - self.adaLN_modulation = torch.nn.Sequential( - torch.nn.SiLU(), - torch.nn.Linear(hidden_size, hidden_size * 2, device="cuda", dtype=torch.bfloat16), - ) - - def forward(self, x, c, attn_mask=None): - gate_msa, gate_mlp = self.adaLN_modulation(c).chunk(2, dim=1) - - norm_x = self.norm1(x) - qkv = self.self_attn_qkv(norm_x) - q, k, v = rearrange(qkv, "B L (K H D) -> K B H L D", K=3, H=self.num_heads) - - attn = torch.nn.functional.scaled_dot_product_attention(q, k, v, attn_mask=attn_mask) - attn = rearrange(attn, "B H L D -> B L (H D)") - - x = x + self.self_attn_proj(attn) * gate_msa.unsqueeze(1) - x = x + self.mlp(self.norm2(x)) * gate_mlp.unsqueeze(1) - - return x - - -class SingleTokenRefiner(torch.nn.Module): - def __init__(self, in_channels=4096, hidden_size=3072, depth=2): - super().__init__() - self.input_embedder = torch.nn.Linear(in_channels, hidden_size, bias=True) - self.t_embedder = TimestepEmbeddings(256, hidden_size, computation_device="cpu") - self.c_embedder = torch.nn.Sequential( - torch.nn.Linear(in_channels, hidden_size), - torch.nn.SiLU(), - torch.nn.Linear(hidden_size, hidden_size) - ) - self.blocks = torch.nn.ModuleList([IndividualTokenRefinerBlock(hidden_size=hidden_size) for _ in range(depth)]) - - def forward(self, x, t, mask=None): - timestep_aware_representations = self.t_embedder(t, dtype=torch.float32) - - mask_float = mask.float().unsqueeze(-1) - context_aware_representations = (x * mask_float).sum(dim=1) / mask_float.sum(dim=1) - context_aware_representations = self.c_embedder(context_aware_representations) - c = timestep_aware_representations + context_aware_representations - - x = self.input_embedder(x) - - mask = mask.to(device=x.device, dtype=torch.bool) - mask = repeat(mask, "B L -> B 1 D L", D=mask.shape[-1]) - mask = mask & mask.transpose(2, 3) - mask[:, :, :, 0] = True - - for block in self.blocks: - x = block(x, c, mask) - - return x - - -class ModulateDiT(torch.nn.Module): - def __init__(self, hidden_size, factor=6): - super().__init__() - self.act = torch.nn.SiLU() - self.linear = torch.nn.Linear(hidden_size, factor * hidden_size) - - def forward(self, x): - return self.linear(self.act(x)) - - -def modulate(x, shift=None, scale=None, tr_shift=None, tr_scale=None, tr_token=None): - if tr_shift is not None: - x_zero = x[:, :tr_token] * (1 + tr_scale.unsqueeze(1)) + tr_shift.unsqueeze(1) - x_orig = x[:, tr_token:] * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) - x = torch.concat((x_zero, x_orig), dim=1) - return x - if scale is None and shift is None: - return x - elif shift is None: - return x * (1 + scale.unsqueeze(1)) - elif scale is None: - return x + shift.unsqueeze(1) - else: - return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) - - -def reshape_for_broadcast( - freqs_cis, - x: torch.Tensor, - head_first=False, -): - ndim = x.ndim - assert 0 <= 1 < ndim - - if isinstance(freqs_cis, tuple): - # freqs_cis: (cos, sin) in real space - if head_first: - assert freqs_cis[0].shape == ( - x.shape[-2], - x.shape[-1], - ), f"freqs_cis shape {freqs_cis[0].shape} does not match x shape {x.shape}" - shape = [ - d if i == ndim - 2 or i == ndim - 1 else 1 - for i, d in enumerate(x.shape) - ] - else: - assert freqs_cis[0].shape == ( - x.shape[1], - x.shape[-1], - ), f"freqs_cis shape {freqs_cis[0].shape} does not match x shape {x.shape}" - shape = [d if i == 1 or i == ndim - 1 else 1 for i, d in enumerate(x.shape)] - return freqs_cis[0].view(*shape), freqs_cis[1].view(*shape) - else: - # freqs_cis: values in complex space - if head_first: - assert freqs_cis.shape == ( - x.shape[-2], - x.shape[-1], - ), f"freqs_cis shape {freqs_cis.shape} does not match x shape {x.shape}" - shape = [ - d if i == ndim - 2 or i == ndim - 1 else 1 - for i, d in enumerate(x.shape) - ] - else: - assert freqs_cis.shape == ( - x.shape[1], - x.shape[-1], - ), f"freqs_cis shape {freqs_cis.shape} does not match x shape {x.shape}" - shape = [d if i == 1 or i == ndim - 1 else 1 for i, d in enumerate(x.shape)] - return freqs_cis.view(*shape) - - -def rotate_half(x): - x_real, x_imag = ( - x.float().reshape(*x.shape[:-1], -1, 2).unbind(-1) - ) # [B, S, H, D//2] - return torch.stack([-x_imag, x_real], dim=-1).flatten(3) - - -def apply_rotary_emb( - xq: torch.Tensor, - xk: torch.Tensor, - freqs_cis, - head_first: bool = False, -): - xk_out = None - if isinstance(freqs_cis, tuple): - cos, sin = reshape_for_broadcast(freqs_cis, xq, head_first) # [S, D] - cos, sin = cos.to(xq.device), sin.to(xq.device) - # real * cos - imag * sin - # imag * cos + real * sin - xq_out = (xq.float() * cos + rotate_half(xq.float()) * sin).type_as(xq) - xk_out = (xk.float() * cos + rotate_half(xk.float()) * sin).type_as(xk) - else: - # view_as_complex will pack [..., D/2, 2](real) to [..., D/2](complex) - xq_ = torch.view_as_complex( - xq.float().reshape(*xq.shape[:-1], -1, 2) - ) # [B, S, H, D//2] - freqs_cis = reshape_for_broadcast(freqs_cis, xq_, head_first).to( - xq.device - ) # [S, D//2] --> [1, S, 1, D//2] - # (real, imag) * (cos, sin) = (real * cos - imag * sin, imag * cos + real * sin) - # view_as_real will expand [..., D/2](complex) to [..., D/2, 2](real) - xq_out = torch.view_as_real(xq_ * freqs_cis).flatten(3).type_as(xq) - xk_ = torch.view_as_complex( - xk.float().reshape(*xk.shape[:-1], -1, 2) - ) # [B, S, H, D//2] - xk_out = torch.view_as_real(xk_ * freqs_cis).flatten(3).type_as(xk) - - return xq_out, xk_out - - -def attention(q, k, v): - q, k, v = q.transpose(1, 2), k.transpose(1, 2), v.transpose(1, 2) - x = torch.nn.functional.scaled_dot_product_attention(q, k, v) - x = x.transpose(1, 2).flatten(2, 3) - return x - - -def apply_gate(x, gate, tr_gate=None, tr_token=None): - if tr_gate is not None: - x_zero = x[:, :tr_token] * tr_gate.unsqueeze(1) - x_orig = x[:, tr_token:] * gate.unsqueeze(1) - return torch.concat((x_zero, x_orig), dim=1) - else: - return x * gate.unsqueeze(1) - - -class MMDoubleStreamBlockComponent(torch.nn.Module): - def __init__(self, hidden_size=3072, heads_num=24, mlp_width_ratio=4): - super().__init__() - self.heads_num = heads_num - - self.mod = ModulateDiT(hidden_size) - self.norm1 = torch.nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) - - self.to_qkv = torch.nn.Linear(hidden_size, hidden_size * 3) - self.norm_q = RMSNorm(dim=hidden_size // heads_num, eps=1e-6) - self.norm_k = RMSNorm(dim=hidden_size // heads_num, eps=1e-6) - self.to_out = torch.nn.Linear(hidden_size, hidden_size) - - self.norm2 = torch.nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) - self.ff = torch.nn.Sequential( - torch.nn.Linear(hidden_size, hidden_size * mlp_width_ratio), - torch.nn.GELU(approximate="tanh"), - torch.nn.Linear(hidden_size * mlp_width_ratio, hidden_size) - ) - - def forward(self, hidden_states, conditioning, freqs_cis=None, token_replace_vec=None, tr_token=None): - mod1_shift, mod1_scale, mod1_gate, mod2_shift, mod2_scale, mod2_gate = self.mod(conditioning).chunk(6, dim=-1) - if token_replace_vec is not None: - assert tr_token is not None - tr_mod1_shift, tr_mod1_scale, tr_mod1_gate, tr_mod2_shift, tr_mod2_scale, tr_mod2_gate = self.mod(token_replace_vec).chunk(6, dim=-1) - else: - tr_mod1_shift, tr_mod1_scale, tr_mod1_gate, tr_mod2_shift, tr_mod2_scale, tr_mod2_gate = None, None, None, None, None, None - - norm_hidden_states = self.norm1(hidden_states) - norm_hidden_states = modulate(norm_hidden_states, shift=mod1_shift, scale=mod1_scale, - tr_shift=tr_mod1_shift, tr_scale=tr_mod1_scale, tr_token=tr_token) - qkv = self.to_qkv(norm_hidden_states) - q, k, v = rearrange(qkv, "B L (K H D) -> K B L H D", K=3, H=self.heads_num) - - q = self.norm_q(q) - k = self.norm_k(k) - - if freqs_cis is not None: - q, k = apply_rotary_emb(q, k, freqs_cis, head_first=False) - return (q, k, v), (mod1_gate, mod2_shift, mod2_scale, mod2_gate), (tr_mod1_gate, tr_mod2_shift, tr_mod2_scale, tr_mod2_gate) - - def process_ff(self, hidden_states, attn_output, mod, mod_tr=None, tr_token=None): - mod1_gate, mod2_shift, mod2_scale, mod2_gate = mod - if mod_tr is not None: - tr_mod1_gate, tr_mod2_shift, tr_mod2_scale, tr_mod2_gate = mod_tr - else: - tr_mod1_gate, tr_mod2_shift, tr_mod2_scale, tr_mod2_gate = None, None, None, None - hidden_states = hidden_states + apply_gate(self.to_out(attn_output), mod1_gate, tr_mod1_gate, tr_token) - x = self.ff(modulate(self.norm2(hidden_states), shift=mod2_shift, scale=mod2_scale, tr_shift=tr_mod2_shift, tr_scale=tr_mod2_scale, tr_token=tr_token)) - hidden_states = hidden_states + apply_gate(x, mod2_gate, tr_mod2_gate, tr_token) - return hidden_states - - -class MMDoubleStreamBlock(torch.nn.Module): - def __init__(self, hidden_size=3072, heads_num=24, mlp_width_ratio=4): - super().__init__() - self.component_a = MMDoubleStreamBlockComponent(hidden_size, heads_num, mlp_width_ratio) - self.component_b = MMDoubleStreamBlockComponent(hidden_size, heads_num, mlp_width_ratio) - - def forward(self, hidden_states_a, hidden_states_b, conditioning, freqs_cis, token_replace_vec=None, tr_token=None, split_token=71): - (q_a, k_a, v_a), mod_a, mod_tr = self.component_a(hidden_states_a, conditioning, freqs_cis, token_replace_vec, tr_token) - (q_b, k_b, v_b), mod_b, _ = self.component_b(hidden_states_b, conditioning, freqs_cis=None) - - q_a, q_b = torch.concat([q_a, q_b[:, :split_token]], dim=1), q_b[:, split_token:].contiguous() - k_a, k_b = torch.concat([k_a, k_b[:, :split_token]], dim=1), k_b[:, split_token:].contiguous() - v_a, v_b = torch.concat([v_a, v_b[:, :split_token]], dim=1), v_b[:, split_token:].contiguous() - attn_output_a = attention(q_a, k_a, v_a) - attn_output_b = attention(q_b, k_b, v_b) - attn_output_a, attn_output_b = attn_output_a[:, :-split_token].contiguous(), torch.concat([attn_output_a[:, -split_token:], attn_output_b], dim=1) - - hidden_states_a = self.component_a.process_ff(hidden_states_a, attn_output_a, mod_a, mod_tr, tr_token) - hidden_states_b = self.component_b.process_ff(hidden_states_b, attn_output_b, mod_b) - return hidden_states_a, hidden_states_b - - -class MMSingleStreamBlockOriginal(torch.nn.Module): - def __init__(self, hidden_size=3072, heads_num=24, mlp_width_ratio=4): - super().__init__() - self.hidden_size = hidden_size - self.heads_num = heads_num - self.mlp_hidden_dim = hidden_size * mlp_width_ratio - - self.linear1 = torch.nn.Linear(hidden_size, hidden_size * 3 + self.mlp_hidden_dim) - self.linear2 = torch.nn.Linear(hidden_size + self.mlp_hidden_dim, hidden_size) - - self.q_norm = RMSNorm(dim=hidden_size // heads_num, eps=1e-6) - self.k_norm = RMSNorm(dim=hidden_size // heads_num, eps=1e-6) - - self.pre_norm = torch.nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) - - self.mlp_act = torch.nn.GELU(approximate="tanh") - self.modulation = ModulateDiT(hidden_size, factor=3) - - def forward(self, x, vec, freqs_cis=None, txt_len=256): - mod_shift, mod_scale, mod_gate = self.modulation(vec).chunk(3, dim=-1) - x_mod = modulate(self.pre_norm(x), shift=mod_shift, scale=mod_scale) - qkv, mlp = torch.split(self.linear1(x_mod), [3 * self.hidden_size, self.mlp_hidden_dim], dim=-1) - q, k, v = rearrange(qkv, "B L (K H D) -> K B L H D", K=3, H=self.heads_num) - q = self.q_norm(q) - k = self.k_norm(k) - - q_a, q_b = q[:, :-txt_len, :, :], q[:, -txt_len:, :, :] - k_a, k_b = k[:, :-txt_len, :, :], k[:, -txt_len:, :, :] - q_a, k_a = apply_rotary_emb(q_a, k_a, freqs_cis, head_first=False) - q = torch.cat((q_a, q_b), dim=1) - k = torch.cat((k_a, k_b), dim=1) - - attn_output_a = attention(q[:, :-185].contiguous(), k[:, :-185].contiguous(), v[:, :-185].contiguous()) - attn_output_b = attention(q[:, -185:].contiguous(), k[:, -185:].contiguous(), v[:, -185:].contiguous()) - attn_output = torch.concat([attn_output_a, attn_output_b], dim=1) - - output = self.linear2(torch.cat((attn_output, self.mlp_act(mlp)), 2)) - return x + output * mod_gate.unsqueeze(1) - - -class MMSingleStreamBlock(torch.nn.Module): - def __init__(self, hidden_size=3072, heads_num=24, mlp_width_ratio=4): - super().__init__() - self.heads_num = heads_num - - self.mod = ModulateDiT(hidden_size, factor=3) - self.norm = torch.nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) - - self.to_qkv = torch.nn.Linear(hidden_size, hidden_size * 3) - self.norm_q = RMSNorm(dim=hidden_size // heads_num, eps=1e-6) - self.norm_k = RMSNorm(dim=hidden_size // heads_num, eps=1e-6) - self.to_out = torch.nn.Linear(hidden_size, hidden_size) - - self.ff = torch.nn.Sequential( - torch.nn.Linear(hidden_size, hidden_size * mlp_width_ratio), - torch.nn.GELU(approximate="tanh"), - torch.nn.Linear(hidden_size * mlp_width_ratio, hidden_size, bias=False) - ) - - def forward(self, hidden_states, conditioning, freqs_cis=None, txt_len=256, token_replace_vec=None, tr_token=None, split_token=71): - mod_shift, mod_scale, mod_gate = self.mod(conditioning).chunk(3, dim=-1) - if token_replace_vec is not None: - assert tr_token is not None - tr_mod_shift, tr_mod_scale, tr_mod_gate = self.mod(token_replace_vec).chunk(3, dim=-1) - else: - tr_mod_shift, tr_mod_scale, tr_mod_gate = None, None, None - - norm_hidden_states = self.norm(hidden_states) - norm_hidden_states = modulate(norm_hidden_states, shift=mod_shift, scale=mod_scale, - tr_shift=tr_mod_shift, tr_scale=tr_mod_scale, tr_token=tr_token) - qkv = self.to_qkv(norm_hidden_states) - - q, k, v = rearrange(qkv, "B L (K H D) -> K B L H D", K=3, H=self.heads_num) - - q = self.norm_q(q) - k = self.norm_k(k) - - q_a, q_b = q[:, :-txt_len, :, :], q[:, -txt_len:, :, :] - k_a, k_b = k[:, :-txt_len, :, :], k[:, -txt_len:, :, :] - q_a, k_a = apply_rotary_emb(q_a, k_a, freqs_cis, head_first=False) - - v_len = txt_len - split_token - q_a, q_b = torch.concat([q_a, q_b[:, :split_token]], dim=1), q_b[:, split_token:].contiguous() - k_a, k_b = torch.concat([k_a, k_b[:, :split_token]], dim=1), k_b[:, split_token:].contiguous() - v_a, v_b = v[:, :-v_len].contiguous(), v[:, -v_len:].contiguous() - - attn_output_a = attention(q_a, k_a, v_a) - attn_output_b = attention(q_b, k_b, v_b) - attn_output = torch.concat([attn_output_a, attn_output_b], dim=1) - - hidden_states = hidden_states + apply_gate(self.to_out(attn_output), mod_gate, tr_mod_gate, tr_token) - hidden_states = hidden_states + apply_gate(self.ff(norm_hidden_states), mod_gate, tr_mod_gate, tr_token) - return hidden_states - - -class FinalLayer(torch.nn.Module): - def __init__(self, hidden_size=3072, patch_size=(1, 2, 2), out_channels=16): - super().__init__() - - self.norm_final = torch.nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) - self.linear = torch.nn.Linear(hidden_size, patch_size[0] * patch_size[1] * patch_size[2] * out_channels) - - self.adaLN_modulation = torch.nn.Sequential(torch.nn.SiLU(), torch.nn.Linear(hidden_size, 2 * hidden_size)) - - def forward(self, x, c): - shift, scale = self.adaLN_modulation(c).chunk(2, dim=1) - x = modulate(self.norm_final(x), shift=shift, scale=scale) - x = self.linear(x) - return x - - -class HunyuanVideoDiT(torch.nn.Module): - def __init__(self, in_channels=16, hidden_size=3072, text_dim=4096, num_double_blocks=20, num_single_blocks=40, guidance_embed=True): - super().__init__() - self.img_in = PatchEmbed(in_channels=in_channels, embed_dim=hidden_size) - self.txt_in = SingleTokenRefiner(in_channels=text_dim, hidden_size=hidden_size) - self.time_in = TimestepEmbeddings(256, hidden_size, computation_device="cpu") - self.vector_in = torch.nn.Sequential( - torch.nn.Linear(768, hidden_size), - torch.nn.SiLU(), - torch.nn.Linear(hidden_size, hidden_size) - ) - self.guidance_in = TimestepEmbeddings(256, hidden_size, computation_device="cpu") if guidance_embed else None - self.double_blocks = torch.nn.ModuleList([MMDoubleStreamBlock(hidden_size) for _ in range(num_double_blocks)]) - self.single_blocks = torch.nn.ModuleList([MMSingleStreamBlock(hidden_size) for _ in range(num_single_blocks)]) - self.final_layer = FinalLayer(hidden_size) - - # TODO: remove these parameters - self.dtype = torch.bfloat16 - self.patch_size = [1, 2, 2] - self.hidden_size = 3072 - self.heads_num = 24 - self.rope_dim_list = [16, 56, 56] - - def unpatchify(self, x, T, H, W): - x = rearrange(x, "B (T H W) (C pT pH pW) -> B C (T pT) (H pH) (W pW)", H=H, W=W, pT=1, pH=2, pW=2) - return x - - def enable_block_wise_offload(self, warm_device="cuda", cold_device="cpu"): - self.warm_device = warm_device - self.cold_device = cold_device - self.to(self.cold_device) - - def load_models_to_device(self, loadmodel_names=[], device="cpu"): - for model_name in loadmodel_names: - model = getattr(self, model_name) - if model is not None: - model.to(device) - torch.cuda.empty_cache() - - def prepare_freqs(self, latents): - return HunyuanVideoRope(latents) - - def forward( - self, - x: torch.Tensor, - t: torch.Tensor, - prompt_emb: torch.Tensor = None, - text_mask: torch.Tensor = None, - pooled_prompt_emb: torch.Tensor = None, - freqs_cos: torch.Tensor = None, - freqs_sin: torch.Tensor = None, - guidance: torch.Tensor = None, - **kwargs - ): - B, C, T, H, W = x.shape - - vec = self.time_in(t, dtype=torch.float32) + self.vector_in(pooled_prompt_emb) - if self.guidance_in is not None: - vec += self.guidance_in(guidance * 1000, dtype=torch.float32) - img = self.img_in(x) - txt = self.txt_in(prompt_emb, t, text_mask) - - for block in tqdm(self.double_blocks, desc="Double stream blocks"): - img, txt = block(img, txt, vec, (freqs_cos, freqs_sin)) - - x = torch.concat([img, txt], dim=1) - for block in tqdm(self.single_blocks, desc="Single stream blocks"): - x = block(x, vec, (freqs_cos, freqs_sin)) - - img = x[:, :-256] - img = self.final_layer(img, vec) - img = self.unpatchify(img, T=T//1, H=H//2, W=W//2) - return img - - - def enable_auto_offload(self, dtype=torch.bfloat16, device="cuda"): - def cast_to(weight, dtype=None, device=None, copy=False): - if device is None or weight.device == device: - if not copy: - if dtype is None or weight.dtype == dtype: - return weight - return weight.to(dtype=dtype, copy=copy) - - r = torch.empty_like(weight, dtype=dtype, device=device) - r.copy_(weight) - return r - - def cast_weight(s, input=None, dtype=None, device=None): - if input is not None: - if dtype is None: - dtype = input.dtype - if device is None: - device = input.device - weight = cast_to(s.weight, dtype, device) - return weight - - def cast_bias_weight(s, input=None, dtype=None, device=None, bias_dtype=None): - if input is not None: - if dtype is None: - dtype = input.dtype - if bias_dtype is None: - bias_dtype = dtype - if device is None: - device = input.device - weight = cast_to(s.weight, dtype, device) - bias = cast_to(s.bias, bias_dtype, device) if s.bias is not None else None - return weight, bias - - class quantized_layer: - class Linear(torch.nn.Linear): - def __init__(self, *args, dtype=torch.bfloat16, device="cuda", **kwargs): - super().__init__(*args, **kwargs) - self.dtype = dtype - self.device = device - - def block_forward_(self, x, i, j, dtype, device): - weight_ = cast_to( - self.weight[j * self.block_size: (j + 1) * self.block_size, i * self.block_size: (i + 1) * self.block_size], - dtype=dtype, device=device - ) - if self.bias is None or i > 0: - bias_ = None - else: - bias_ = cast_to(self.bias[j * self.block_size: (j + 1) * self.block_size], dtype=dtype, device=device) - x_ = x[..., i * self.block_size: (i + 1) * self.block_size] - y_ = torch.nn.functional.linear(x_, weight_, bias_) - del x_, weight_, bias_ - torch.cuda.empty_cache() - return y_ - - def block_forward(self, x, **kwargs): - # This feature can only reduce 2GB VRAM, so we disable it. - y = torch.zeros(x.shape[:-1] + (self.out_features,), dtype=x.dtype, device=x.device) - for i in range((self.in_features + self.block_size - 1) // self.block_size): - for j in range((self.out_features + self.block_size - 1) // self.block_size): - y[..., j * self.block_size: (j + 1) * self.block_size] += self.block_forward_(x, i, j, dtype=x.dtype, device=x.device) - return y - - def forward(self, x, **kwargs): - weight, bias = cast_bias_weight(self, x, dtype=self.dtype, device=self.device) - return torch.nn.functional.linear(x, weight, bias) - - - class RMSNorm(torch.nn.Module): - def __init__(self, module, dtype=torch.bfloat16, device="cuda"): - super().__init__() - self.module = module - self.dtype = dtype - self.device = device - - def forward(self, hidden_states, **kwargs): - input_dtype = hidden_states.dtype - variance = hidden_states.to(torch.float32).square().mean(-1, keepdim=True) - hidden_states = hidden_states * torch.rsqrt(variance + self.module.eps) - hidden_states = hidden_states.to(input_dtype) - if self.module.weight is not None: - weight = cast_weight(self.module, hidden_states, dtype=torch.bfloat16, device="cuda") - hidden_states = hidden_states * weight - return hidden_states - - class Conv3d(torch.nn.Conv3d): - def __init__(self, *args, dtype=torch.bfloat16, device="cuda", **kwargs): - super().__init__(*args, **kwargs) - self.dtype = dtype - self.device = device - - def forward(self, x): - weight, bias = cast_bias_weight(self, x, dtype=self.dtype, device=self.device) - return torch.nn.functional.conv3d(x, weight, bias, self.stride, self.padding, self.dilation, self.groups) - - class LayerNorm(torch.nn.LayerNorm): - def __init__(self, *args, dtype=torch.bfloat16, device="cuda", **kwargs): - super().__init__(*args, **kwargs) - self.dtype = dtype - self.device = device - - def forward(self, x): - if self.weight is not None and self.bias is not None: - weight, bias = cast_bias_weight(self, x, dtype=self.dtype, device=self.device) - return torch.nn.functional.layer_norm(x, self.normalized_shape, weight, bias, self.eps) - else: - return torch.nn.functional.layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps) - - def replace_layer(model, dtype=torch.bfloat16, device="cuda"): - for name, module in model.named_children(): - if isinstance(module, torch.nn.Linear): - with init_weights_on_device(): - new_layer = quantized_layer.Linear( - module.in_features, module.out_features, bias=module.bias is not None, - dtype=dtype, device=device - ) - new_layer.load_state_dict(module.state_dict(), assign=True) - setattr(model, name, new_layer) - elif isinstance(module, torch.nn.Conv3d): - with init_weights_on_device(): - new_layer = quantized_layer.Conv3d( - module.in_channels, module.out_channels, kernel_size=module.kernel_size, stride=module.stride, - dtype=dtype, device=device - ) - new_layer.load_state_dict(module.state_dict(), assign=True) - setattr(model, name, new_layer) - elif isinstance(module, RMSNorm): - new_layer = quantized_layer.RMSNorm( - module, - dtype=dtype, device=device - ) - setattr(model, name, new_layer) - elif isinstance(module, torch.nn.LayerNorm): - with init_weights_on_device(): - new_layer = quantized_layer.LayerNorm( - module.normalized_shape, elementwise_affine=module.elementwise_affine, eps=module.eps, - dtype=dtype, device=device - ) - new_layer.load_state_dict(module.state_dict(), assign=True) - setattr(model, name, new_layer) - else: - replace_layer(module, dtype=dtype, device=device) - - replace_layer(self, dtype=dtype, device=device) - - @staticmethod - def state_dict_converter(): - return HunyuanVideoDiTStateDictConverter() - - -class HunyuanVideoDiTStateDictConverter: - def __init__(self): - pass - - def from_civitai(self, state_dict): - origin_hash_key = hash_state_dict_keys(state_dict, with_shape=True) - if "module" in state_dict: - state_dict = state_dict["module"] - direct_dict = { - "img_in.proj": "img_in.proj", - "time_in.mlp.0": "time_in.timestep_embedder.0", - "time_in.mlp.2": "time_in.timestep_embedder.2", - "vector_in.in_layer": "vector_in.0", - "vector_in.out_layer": "vector_in.2", - "guidance_in.mlp.0": "guidance_in.timestep_embedder.0", - "guidance_in.mlp.2": "guidance_in.timestep_embedder.2", - "txt_in.input_embedder": "txt_in.input_embedder", - "txt_in.t_embedder.mlp.0": "txt_in.t_embedder.timestep_embedder.0", - "txt_in.t_embedder.mlp.2": "txt_in.t_embedder.timestep_embedder.2", - "txt_in.c_embedder.linear_1": "txt_in.c_embedder.0", - "txt_in.c_embedder.linear_2": "txt_in.c_embedder.2", - "final_layer.linear": "final_layer.linear", - "final_layer.adaLN_modulation.1": "final_layer.adaLN_modulation.1", - } - txt_suffix_dict = { - "norm1": "norm1", - "self_attn_qkv": "self_attn_qkv", - "self_attn_proj": "self_attn_proj", - "norm2": "norm2", - "mlp.fc1": "mlp.0", - "mlp.fc2": "mlp.2", - "adaLN_modulation.1": "adaLN_modulation.1", - } - double_suffix_dict = { - "img_mod.linear": "component_a.mod.linear", - "img_attn_qkv": "component_a.to_qkv", - "img_attn_q_norm": "component_a.norm_q", - "img_attn_k_norm": "component_a.norm_k", - "img_attn_proj": "component_a.to_out", - "img_mlp.fc1": "component_a.ff.0", - "img_mlp.fc2": "component_a.ff.2", - "txt_mod.linear": "component_b.mod.linear", - "txt_attn_qkv": "component_b.to_qkv", - "txt_attn_q_norm": "component_b.norm_q", - "txt_attn_k_norm": "component_b.norm_k", - "txt_attn_proj": "component_b.to_out", - "txt_mlp.fc1": "component_b.ff.0", - "txt_mlp.fc2": "component_b.ff.2", - } - single_suffix_dict = { - "linear1": ["to_qkv", "ff.0"], - "linear2": ["to_out", "ff.2"], - "q_norm": "norm_q", - "k_norm": "norm_k", - "modulation.linear": "mod.linear", - } - # single_suffix_dict = { - # "linear1": "linear1", - # "linear2": "linear2", - # "q_norm": "q_norm", - # "k_norm": "k_norm", - # "modulation.linear": "modulation.linear", - # } - state_dict_ = {} - for name, param in state_dict.items(): - names = name.split(".") - direct_name = ".".join(names[:-1]) - if direct_name in direct_dict: - name_ = direct_dict[direct_name] + "." + names[-1] - state_dict_[name_] = param - elif names[0] == "double_blocks": - prefix = ".".join(names[:2]) - suffix = ".".join(names[2:-1]) - name_ = prefix + "." + double_suffix_dict[suffix] + "." + names[-1] - state_dict_[name_] = param - elif names[0] == "single_blocks": - prefix = ".".join(names[:2]) - suffix = ".".join(names[2:-1]) - if isinstance(single_suffix_dict[suffix], list): - if suffix == "linear1": - name_a, name_b = single_suffix_dict[suffix] - param_a, param_b = torch.split(param, (3072*3, 3072*4), dim=0) - state_dict_[prefix + "." + name_a + "." + names[-1]] = param_a - state_dict_[prefix + "." + name_b + "." + names[-1]] = param_b - elif suffix == "linear2": - if names[-1] == "weight": - name_a, name_b = single_suffix_dict[suffix] - param_a, param_b = torch.split(param, (3072*1, 3072*4), dim=-1) - state_dict_[prefix + "." + name_a + "." + names[-1]] = param_a - state_dict_[prefix + "." + name_b + "." + names[-1]] = param_b - else: - name_a, name_b = single_suffix_dict[suffix] - state_dict_[prefix + "." + name_a + "." + names[-1]] = param - else: - pass - else: - name_ = prefix + "." + single_suffix_dict[suffix] + "." + names[-1] - state_dict_[name_] = param - elif names[0] == "txt_in": - prefix = ".".join(names[:4]).replace(".individual_token_refiner.", ".") - suffix = ".".join(names[4:-1]) - name_ = prefix + "." + txt_suffix_dict[suffix] + "." + names[-1] - state_dict_[name_] = param - else: - pass - - return state_dict_ diff --git a/diffsynth/models/hunyuan_video_text_encoder.py b/diffsynth/models/hunyuan_video_text_encoder.py deleted file mode 100644 index ce7a680..0000000 --- a/diffsynth/models/hunyuan_video_text_encoder.py +++ /dev/null @@ -1,68 +0,0 @@ -from transformers import LlamaModel, LlamaConfig, DynamicCache, LlavaForConditionalGeneration -from copy import deepcopy -import torch - - -class HunyuanVideoLLMEncoder(LlamaModel): - - def __init__(self, config: LlamaConfig): - super().__init__(config) - self.auto_offload = False - - def enable_auto_offload(self, **kwargs): - self.auto_offload = True - - def forward(self, input_ids, attention_mask, hidden_state_skip_layer=2): - embed_tokens = deepcopy(self.embed_tokens).to(input_ids.device) if self.auto_offload else self.embed_tokens - inputs_embeds = embed_tokens(input_ids) - - past_key_values = DynamicCache() - - cache_position = torch.arange(0, inputs_embeds.shape[1], device=inputs_embeds.device) - position_ids = cache_position.unsqueeze(0) - - causal_mask = self._update_causal_mask(attention_mask, inputs_embeds, cache_position, None, False) - hidden_states = inputs_embeds - - # create position embeddings to be shared across the decoder layers - rotary_emb = deepcopy(self.rotary_emb).to(input_ids.device) if self.auto_offload else self.rotary_emb - position_embeddings = rotary_emb(hidden_states, position_ids) - - # decoder layers - for layer_id, decoder_layer in enumerate(self.layers): - if self.auto_offload: - decoder_layer = deepcopy(decoder_layer).to(hidden_states.device) - layer_outputs = decoder_layer( - hidden_states, - attention_mask=causal_mask, - position_ids=position_ids, - past_key_value=past_key_values, - output_attentions=False, - use_cache=True, - cache_position=cache_position, - position_embeddings=position_embeddings, - ) - hidden_states = layer_outputs[0] - if layer_id + hidden_state_skip_layer + 1 >= len(self.layers): - break - - return hidden_states - - -class HunyuanVideoMLLMEncoder(LlavaForConditionalGeneration): - - def __init__(self, config): - super().__init__(config) - self.auto_offload = False - - def enable_auto_offload(self, **kwargs): - self.auto_offload = True - - # TODO: implement the low VRAM inference for MLLM. - def forward(self, input_ids, pixel_values, attention_mask, hidden_state_skip_layer=2): - outputs = super().forward(input_ids=input_ids, - attention_mask=attention_mask, - output_hidden_states=True, - pixel_values=pixel_values) - hidden_state = outputs.hidden_states[-(hidden_state_skip_layer + 1)] - return hidden_state diff --git a/diffsynth/models/hunyuan_video_vae_decoder.py b/diffsynth/models/hunyuan_video_vae_decoder.py deleted file mode 100644 index ae09ff8..0000000 --- a/diffsynth/models/hunyuan_video_vae_decoder.py +++ /dev/null @@ -1,507 +0,0 @@ -import torch -import torch.nn as nn -import torch.nn.functional as F -from einops import rearrange -import numpy as np -from tqdm import tqdm -from einops import repeat - - -class CausalConv3d(nn.Module): - - def __init__(self, in_channel, out_channel, kernel_size, stride=1, dilation=1, pad_mode='replicate', **kwargs): - super().__init__() - self.pad_mode = pad_mode - self.time_causal_padding = (kernel_size // 2, kernel_size // 2, kernel_size // 2, kernel_size // 2, kernel_size - 1, 0 - ) # W, H, T - self.conv = nn.Conv3d(in_channel, out_channel, kernel_size, stride=stride, dilation=dilation, **kwargs) - - def forward(self, x): - x = F.pad(x, self.time_causal_padding, mode=self.pad_mode) - return self.conv(x) - - -class UpsampleCausal3D(nn.Module): - - def __init__(self, channels, use_conv=False, out_channels=None, kernel_size=None, bias=True, upsample_factor=(2, 2, 2)): - super().__init__() - self.channels = channels - self.out_channels = out_channels or channels - self.upsample_factor = upsample_factor - self.conv = None - if use_conv: - kernel_size = 3 if kernel_size is None else kernel_size - self.conv = CausalConv3d(self.channels, self.out_channels, kernel_size=kernel_size, bias=bias) - - def forward(self, hidden_states): - # Cast to float32 to as 'upsample_nearest2d_out_frame' op does not support bfloat16 - dtype = hidden_states.dtype - if dtype == torch.bfloat16: - hidden_states = hidden_states.to(torch.float32) - - # upsample_nearest_nhwc fails with large batch sizes. see https://github.com/huggingface/diffusers/issues/984 - if hidden_states.shape[0] >= 64: - hidden_states = hidden_states.contiguous() - - # interpolate - B, C, T, H, W = hidden_states.shape - first_h, other_h = hidden_states.split((1, T - 1), dim=2) - if T > 1: - other_h = F.interpolate(other_h, scale_factor=self.upsample_factor, mode="nearest") - first_h = F.interpolate(first_h.squeeze(2), scale_factor=self.upsample_factor[1:], mode="nearest").unsqueeze(2) - hidden_states = torch.cat((first_h, other_h), dim=2) if T > 1 else first_h - - # If the input is bfloat16, we cast back to bfloat16 - if dtype == torch.bfloat16: - hidden_states = hidden_states.to(dtype) - - if self.conv: - hidden_states = self.conv(hidden_states) - - return hidden_states - - -class ResnetBlockCausal3D(nn.Module): - - def __init__(self, in_channels, out_channels=None, dropout=0.0, groups=32, eps=1e-6, conv_shortcut_bias=True): - super().__init__() - self.pre_norm = True - self.in_channels = in_channels - out_channels = in_channels if out_channels is None else out_channels - self.out_channels = out_channels - - self.norm1 = nn.GroupNorm(num_groups=groups, num_channels=in_channels, eps=eps, affine=True) - self.conv1 = CausalConv3d(in_channels, out_channels, kernel_size=3, stride=1) - - self.norm2 = nn.GroupNorm(num_groups=groups, num_channels=out_channels, eps=eps, affine=True) - self.conv2 = CausalConv3d(out_channels, out_channels, kernel_size=3, stride=1) - - self.dropout = nn.Dropout(dropout) - self.nonlinearity = nn.SiLU() - - self.conv_shortcut = None - if in_channels != out_channels: - self.conv_shortcut = CausalConv3d(in_channels, out_channels, kernel_size=1, stride=1, bias=conv_shortcut_bias) - - def forward(self, input_tensor): - hidden_states = input_tensor - # conv1 - hidden_states = self.norm1(hidden_states) - hidden_states = self.nonlinearity(hidden_states) - hidden_states = self.conv1(hidden_states) - - # conv2 - hidden_states = self.norm2(hidden_states) - hidden_states = self.nonlinearity(hidden_states) - hidden_states = self.dropout(hidden_states) - hidden_states = self.conv2(hidden_states) - # shortcut - if self.conv_shortcut is not None: - input_tensor = (self.conv_shortcut(input_tensor)) - # shortcut and scale - output_tensor = input_tensor + hidden_states - - return output_tensor - - -def prepare_causal_attention_mask(n_frame, n_hw, dtype, device, batch_size=None): - seq_len = n_frame * n_hw - mask = torch.full((seq_len, seq_len), float("-inf"), dtype=dtype, device=device) - for i in range(seq_len): - i_frame = i // n_hw - mask[i, :(i_frame + 1) * n_hw] = 0 - if batch_size is not None: - mask = mask.unsqueeze(0).expand(batch_size, -1, -1) - return mask - - -class Attention(nn.Module): - - def __init__(self, - in_channels, - num_heads, - head_dim, - num_groups=32, - dropout=0.0, - eps=1e-6, - bias=True, - residual_connection=True): - super().__init__() - self.num_heads = num_heads - self.head_dim = head_dim - self.residual_connection = residual_connection - dim_inner = head_dim * num_heads - self.group_norm = nn.GroupNorm(num_groups=num_groups, num_channels=in_channels, eps=eps, affine=True) - self.to_q = nn.Linear(in_channels, dim_inner, bias=bias) - self.to_k = nn.Linear(in_channels, dim_inner, bias=bias) - self.to_v = nn.Linear(in_channels, dim_inner, bias=bias) - self.to_out = nn.Sequential(nn.Linear(dim_inner, in_channels, bias=bias), nn.Dropout(dropout)) - - def forward(self, input_tensor, attn_mask=None): - hidden_states = self.group_norm(input_tensor.transpose(1, 2)).transpose(1, 2) - batch_size = hidden_states.shape[0] - - q = self.to_q(hidden_states) - k = self.to_k(hidden_states) - v = self.to_v(hidden_states) - - q = q.view(batch_size, -1, self.num_heads, self.head_dim).transpose(1, 2) - k = k.view(batch_size, -1, self.num_heads, self.head_dim).transpose(1, 2) - v = v.view(batch_size, -1, self.num_heads, self.head_dim).transpose(1, 2) - - if attn_mask is not None: - attn_mask = attn_mask.view(batch_size, self.num_heads, -1, attn_mask.shape[-1]) - hidden_states = F.scaled_dot_product_attention(q, k, v, attn_mask=attn_mask) - hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, self.num_heads * self.head_dim) - hidden_states = self.to_out(hidden_states) - if self.residual_connection: - output_tensor = input_tensor + hidden_states - return output_tensor - - -class UNetMidBlockCausal3D(nn.Module): - - def __init__(self, in_channels, dropout=0.0, num_layers=1, eps=1e-6, num_groups=32, attention_head_dim=None): - super().__init__() - resnets = [ - ResnetBlockCausal3D( - in_channels=in_channels, - out_channels=in_channels, - dropout=dropout, - groups=num_groups, - eps=eps, - ) - ] - attentions = [] - attention_head_dim = attention_head_dim or in_channels - - for _ in range(num_layers): - attentions.append( - Attention( - in_channels, - num_heads=in_channels // attention_head_dim, - head_dim=attention_head_dim, - num_groups=num_groups, - dropout=dropout, - eps=eps, - bias=True, - residual_connection=True, - )) - - resnets.append( - ResnetBlockCausal3D( - in_channels=in_channels, - out_channels=in_channels, - dropout=dropout, - groups=num_groups, - eps=eps, - )) - - self.attentions = nn.ModuleList(attentions) - self.resnets = nn.ModuleList(resnets) - - def forward(self, hidden_states): - hidden_states = self.resnets[0](hidden_states) - for attn, resnet in zip(self.attentions, self.resnets[1:]): - B, C, T, H, W = hidden_states.shape - hidden_states = rearrange(hidden_states, "b c f h w -> b (f h w) c") - attn_mask = prepare_causal_attention_mask(T, H * W, hidden_states.dtype, hidden_states.device, batch_size=B) - hidden_states = attn(hidden_states, attn_mask=attn_mask) - hidden_states = rearrange(hidden_states, "b (f h w) c -> b c f h w", f=T, h=H, w=W) - hidden_states = resnet(hidden_states) - - return hidden_states - - -class UpDecoderBlockCausal3D(nn.Module): - - def __init__( - self, - in_channels, - out_channels, - dropout=0.0, - num_layers=1, - eps=1e-6, - num_groups=32, - add_upsample=True, - upsample_scale_factor=(2, 2, 2), - ): - super().__init__() - resnets = [] - for i in range(num_layers): - cur_in_channel = in_channels if i == 0 else out_channels - resnets.append( - ResnetBlockCausal3D( - in_channels=cur_in_channel, - out_channels=out_channels, - groups=num_groups, - dropout=dropout, - eps=eps, - )) - self.resnets = nn.ModuleList(resnets) - - self.upsamplers = None - if add_upsample: - self.upsamplers = nn.ModuleList([ - UpsampleCausal3D( - out_channels, - use_conv=True, - out_channels=out_channels, - upsample_factor=upsample_scale_factor, - ) - ]) - - def forward(self, hidden_states): - for resnet in self.resnets: - hidden_states = resnet(hidden_states) - if self.upsamplers is not None: - for upsampler in self.upsamplers: - hidden_states = upsampler(hidden_states) - return hidden_states - - -class DecoderCausal3D(nn.Module): - - def __init__( - self, - in_channels=16, - out_channels=3, - eps=1e-6, - dropout=0.0, - block_out_channels=[128, 256, 512, 512], - layers_per_block=2, - num_groups=32, - time_compression_ratio=4, - spatial_compression_ratio=8, - gradient_checkpointing=False, - ): - super().__init__() - self.layers_per_block = layers_per_block - - self.conv_in = CausalConv3d(in_channels, block_out_channels[-1], kernel_size=3, stride=1) - self.up_blocks = nn.ModuleList([]) - - # mid - self.mid_block = UNetMidBlockCausal3D( - in_channels=block_out_channels[-1], - dropout=dropout, - eps=eps, - num_groups=num_groups, - attention_head_dim=block_out_channels[-1], - ) - - # up - reversed_block_out_channels = list(reversed(block_out_channels)) - output_channel = reversed_block_out_channels[0] - for i in range(len(block_out_channels)): - prev_output_channel = output_channel - output_channel = reversed_block_out_channels[i] - is_final_block = i == len(block_out_channels) - 1 - num_spatial_upsample_layers = int(np.log2(spatial_compression_ratio)) - num_time_upsample_layers = int(np.log2(time_compression_ratio)) - - add_spatial_upsample = bool(i < num_spatial_upsample_layers) - add_time_upsample = bool(i >= len(block_out_channels) - 1 - num_time_upsample_layers and not is_final_block) - - upsample_scale_factor_HW = (2, 2) if add_spatial_upsample else (1, 1) - upsample_scale_factor_T = (2,) if add_time_upsample else (1,) - upsample_scale_factor = tuple(upsample_scale_factor_T + upsample_scale_factor_HW) - - up_block = UpDecoderBlockCausal3D( - in_channels=prev_output_channel, - out_channels=output_channel, - dropout=dropout, - num_layers=layers_per_block + 1, - eps=eps, - num_groups=num_groups, - add_upsample=bool(add_spatial_upsample or add_time_upsample), - upsample_scale_factor=upsample_scale_factor, - ) - - self.up_blocks.append(up_block) - prev_output_channel = output_channel - - # out - self.conv_norm_out = nn.GroupNorm(num_channels=block_out_channels[0], num_groups=num_groups, eps=eps) - self.conv_act = nn.SiLU() - self.conv_out = CausalConv3d(block_out_channels[0], out_channels, kernel_size=3) - - self.gradient_checkpointing = gradient_checkpointing - - def forward(self, hidden_states): - hidden_states = self.conv_in(hidden_states) - if self.training and self.gradient_checkpointing: - - def create_custom_forward(module): - - def custom_forward(*inputs): - return module(*inputs) - - return custom_forward - - # middle - hidden_states = torch.utils.checkpoint.checkpoint( - create_custom_forward(self.mid_block), - hidden_states, - use_reentrant=False, - ) - # up - for up_block in self.up_blocks: - hidden_states = torch.utils.checkpoint.checkpoint( - create_custom_forward(up_block), - hidden_states, - use_reentrant=False, - ) - else: - # middle - hidden_states = self.mid_block(hidden_states) - # up - for up_block in self.up_blocks: - hidden_states = up_block(hidden_states) - # post-process - hidden_states = self.conv_norm_out(hidden_states) - hidden_states = self.conv_act(hidden_states) - hidden_states = self.conv_out(hidden_states) - - return hidden_states - - -class HunyuanVideoVAEDecoder(nn.Module): - - def __init__( - self, - in_channels=16, - out_channels=3, - eps=1e-6, - dropout=0.0, - block_out_channels=[128, 256, 512, 512], - layers_per_block=2, - num_groups=32, - time_compression_ratio=4, - spatial_compression_ratio=8, - gradient_checkpointing=False, - ): - super().__init__() - self.decoder = DecoderCausal3D( - in_channels=in_channels, - out_channels=out_channels, - eps=eps, - dropout=dropout, - block_out_channels=block_out_channels, - layers_per_block=layers_per_block, - num_groups=num_groups, - time_compression_ratio=time_compression_ratio, - spatial_compression_ratio=spatial_compression_ratio, - gradient_checkpointing=gradient_checkpointing, - ) - self.post_quant_conv = nn.Conv3d(in_channels, in_channels, kernel_size=1) - self.scaling_factor = 0.476986 - - - def forward(self, latents): - latents = latents / self.scaling_factor - latents = self.post_quant_conv(latents) - dec = self.decoder(latents) - return dec - - - def build_1d_mask(self, length, left_bound, right_bound, border_width): - x = torch.ones((length,)) - if not left_bound: - x[:border_width] = (torch.arange(border_width) + 1) / border_width - if not right_bound: - x[-border_width:] = torch.flip((torch.arange(border_width) + 1) / border_width, dims=(0,)) - return x - - - def build_mask(self, data, is_bound, border_width): - _, _, T, H, W = data.shape - t = self.build_1d_mask(T, is_bound[0], is_bound[1], border_width[0]) - h = self.build_1d_mask(H, is_bound[2], is_bound[3], border_width[1]) - w = self.build_1d_mask(W, is_bound[4], is_bound[5], border_width[2]) - - t = repeat(t, "T -> T H W", T=T, H=H, W=W) - h = repeat(h, "H -> T H W", T=T, H=H, W=W) - w = repeat(w, "W -> T H W", T=T, H=H, W=W) - - mask = torch.stack([t, h, w]).min(dim=0).values - mask = rearrange(mask, "T H W -> 1 1 T H W") - return mask - - - def tile_forward(self, hidden_states, tile_size, tile_stride): - B, C, T, H, W = hidden_states.shape - size_t, size_h, size_w = tile_size - stride_t, stride_h, stride_w = tile_stride - - # Split tasks - tasks = [] - for t in range(0, T, stride_t): - if (t-stride_t >= 0 and t-stride_t+size_t >= T): continue - for h in range(0, H, stride_h): - if (h-stride_h >= 0 and h-stride_h+size_h >= H): continue - for w in range(0, W, stride_w): - if (w-stride_w >= 0 and w-stride_w+size_w >= W): continue - t_, h_, w_ = t + size_t, h + size_h, w + size_w - tasks.append((t, t_, h, h_, w, w_)) - - # Run - torch_dtype = self.post_quant_conv.weight.dtype - data_device = hidden_states.device - computation_device = self.post_quant_conv.weight.device - - weight = torch.zeros((1, 1, (T - 1) * 4 + 1, H * 8, W * 8), dtype=torch_dtype, device=data_device) - values = torch.zeros((B, 3, (T - 1) * 4 + 1, H * 8, W * 8), dtype=torch_dtype, device=data_device) - - for t, t_, h, h_, w, w_ in tqdm(tasks, desc="VAE decoding"): - hidden_states_batch = hidden_states[:, :, t:t_, h:h_, w:w_].to(computation_device) - hidden_states_batch = self.forward(hidden_states_batch).to(data_device) - if t > 0: - hidden_states_batch = hidden_states_batch[:, :, 1:] - - mask = self.build_mask( - hidden_states_batch, - is_bound=(t==0, t_>=T, h==0, h_>=H, w==0, w_>=W), - border_width=((size_t - stride_t) * 4, (size_h - stride_h) * 8, (size_w - stride_w) * 8) - ).to(dtype=torch_dtype, device=data_device) - - target_t = 0 if t==0 else t * 4 + 1 - target_h = h * 8 - target_w = w * 8 - values[ - :, - :, - target_t: target_t + hidden_states_batch.shape[2], - target_h: target_h + hidden_states_batch.shape[3], - target_w: target_w + hidden_states_batch.shape[4], - ] += hidden_states_batch * mask - weight[ - :, - :, - target_t: target_t + hidden_states_batch.shape[2], - target_h: target_h + hidden_states_batch.shape[3], - target_w: target_w + hidden_states_batch.shape[4], - ] += mask - return values / weight - - - def decode_video(self, latents, tile_size=(17, 32, 32), tile_stride=(12, 24, 24)): - latents = latents.to(self.post_quant_conv.weight.dtype) - return self.tile_forward(latents, tile_size=tile_size, tile_stride=tile_stride) - - @staticmethod - def state_dict_converter(): - return HunyuanVideoVAEDecoderStateDictConverter() - - -class HunyuanVideoVAEDecoderStateDictConverter: - - def __init__(self): - pass - - def from_diffusers(self, state_dict): - state_dict_ = {} - for name in state_dict: - if name.startswith('decoder.') or name.startswith('post_quant_conv.'): - state_dict_[name] = state_dict[name] - return state_dict_ diff --git a/diffsynth/models/hunyuan_video_vae_encoder.py b/diffsynth/models/hunyuan_video_vae_encoder.py deleted file mode 100644 index faaaeb9..0000000 --- a/diffsynth/models/hunyuan_video_vae_encoder.py +++ /dev/null @@ -1,307 +0,0 @@ -import torch -import torch.nn as nn -import torch.nn.functional as F -from einops import rearrange, repeat -import numpy as np -from tqdm import tqdm -from .hunyuan_video_vae_decoder import CausalConv3d, ResnetBlockCausal3D, UNetMidBlockCausal3D - - -class DownsampleCausal3D(nn.Module): - - def __init__(self, channels, out_channels, kernel_size=3, bias=True, stride=2): - super().__init__() - self.conv = CausalConv3d(channels, out_channels, kernel_size, stride=stride, bias=bias) - - def forward(self, hidden_states): - hidden_states = self.conv(hidden_states) - return hidden_states - - -class DownEncoderBlockCausal3D(nn.Module): - - def __init__( - self, - in_channels, - out_channels, - dropout=0.0, - num_layers=1, - eps=1e-6, - num_groups=32, - add_downsample=True, - downsample_stride=2, - ): - - super().__init__() - resnets = [] - for i in range(num_layers): - cur_in_channel = in_channels if i == 0 else out_channels - resnets.append( - ResnetBlockCausal3D( - in_channels=cur_in_channel, - out_channels=out_channels, - groups=num_groups, - dropout=dropout, - eps=eps, - )) - self.resnets = nn.ModuleList(resnets) - - self.downsamplers = None - if add_downsample: - self.downsamplers = nn.ModuleList([DownsampleCausal3D( - out_channels, - out_channels, - stride=downsample_stride, - )]) - - def forward(self, hidden_states): - for resnet in self.resnets: - hidden_states = resnet(hidden_states) - - if self.downsamplers is not None: - for downsampler in self.downsamplers: - hidden_states = downsampler(hidden_states) - - return hidden_states - - -class EncoderCausal3D(nn.Module): - - def __init__( - self, - in_channels: int = 3, - out_channels: int = 16, - eps=1e-6, - dropout=0.0, - block_out_channels=[128, 256, 512, 512], - layers_per_block=2, - num_groups=32, - time_compression_ratio: int = 4, - spatial_compression_ratio: int = 8, - gradient_checkpointing=False, - ): - super().__init__() - self.conv_in = CausalConv3d(in_channels, block_out_channels[0], kernel_size=3, stride=1) - self.down_blocks = nn.ModuleList([]) - - # down - output_channel = block_out_channels[0] - for i in range(len(block_out_channels)): - input_channel = output_channel - output_channel = block_out_channels[i] - is_final_block = i == len(block_out_channels) - 1 - num_spatial_downsample_layers = int(np.log2(spatial_compression_ratio)) - num_time_downsample_layers = int(np.log2(time_compression_ratio)) - - add_spatial_downsample = bool(i < num_spatial_downsample_layers) - add_time_downsample = bool(i >= (len(block_out_channels) - 1 - num_time_downsample_layers) and not is_final_block) - - downsample_stride_HW = (2, 2) if add_spatial_downsample else (1, 1) - downsample_stride_T = (2,) if add_time_downsample else (1,) - downsample_stride = tuple(downsample_stride_T + downsample_stride_HW) - down_block = DownEncoderBlockCausal3D( - in_channels=input_channel, - out_channels=output_channel, - dropout=dropout, - num_layers=layers_per_block, - eps=eps, - num_groups=num_groups, - add_downsample=bool(add_spatial_downsample or add_time_downsample), - downsample_stride=downsample_stride, - ) - self.down_blocks.append(down_block) - - # mid - self.mid_block = UNetMidBlockCausal3D( - in_channels=block_out_channels[-1], - dropout=dropout, - eps=eps, - num_groups=num_groups, - attention_head_dim=block_out_channels[-1], - ) - # out - self.conv_norm_out = nn.GroupNorm(num_channels=block_out_channels[-1], num_groups=num_groups, eps=eps) - self.conv_act = nn.SiLU() - self.conv_out = CausalConv3d(block_out_channels[-1], 2 * out_channels, kernel_size=3) - - self.gradient_checkpointing = gradient_checkpointing - - def forward(self, hidden_states): - hidden_states = self.conv_in(hidden_states) - if self.training and self.gradient_checkpointing: - - def create_custom_forward(module): - - def custom_forward(*inputs): - return module(*inputs) - - return custom_forward - - # down - for down_block in self.down_blocks: - torch.utils.checkpoint.checkpoint( - create_custom_forward(down_block), - hidden_states, - use_reentrant=False, - ) - # middle - hidden_states = torch.utils.checkpoint.checkpoint( - create_custom_forward(self.mid_block), - hidden_states, - use_reentrant=False, - ) - else: - # down - for down_block in self.down_blocks: - hidden_states = down_block(hidden_states) - # middle - hidden_states = self.mid_block(hidden_states) - # post-process - hidden_states = self.conv_norm_out(hidden_states) - hidden_states = self.conv_act(hidden_states) - hidden_states = self.conv_out(hidden_states) - - return hidden_states - - -class HunyuanVideoVAEEncoder(nn.Module): - - def __init__( - self, - in_channels=3, - out_channels=16, - eps=1e-6, - dropout=0.0, - block_out_channels=[128, 256, 512, 512], - layers_per_block=2, - num_groups=32, - time_compression_ratio=4, - spatial_compression_ratio=8, - gradient_checkpointing=False, - ): - super().__init__() - self.encoder = EncoderCausal3D( - in_channels=in_channels, - out_channels=out_channels, - eps=eps, - dropout=dropout, - block_out_channels=block_out_channels, - layers_per_block=layers_per_block, - num_groups=num_groups, - time_compression_ratio=time_compression_ratio, - spatial_compression_ratio=spatial_compression_ratio, - gradient_checkpointing=gradient_checkpointing, - ) - self.quant_conv = nn.Conv3d(2 * out_channels, 2 * out_channels, kernel_size=1) - self.scaling_factor = 0.476986 - - - def forward(self, images): - latents = self.encoder(images) - latents = self.quant_conv(latents) - latents = latents[:, :16] - latents = latents * self.scaling_factor - return latents - - - def build_1d_mask(self, length, left_bound, right_bound, border_width): - x = torch.ones((length,)) - if not left_bound: - x[:border_width] = (torch.arange(border_width) + 1) / border_width - if not right_bound: - x[-border_width:] = torch.flip((torch.arange(border_width) + 1) / border_width, dims=(0,)) - return x - - - def build_mask(self, data, is_bound, border_width): - _, _, T, H, W = data.shape - t = self.build_1d_mask(T, is_bound[0], is_bound[1], border_width[0]) - h = self.build_1d_mask(H, is_bound[2], is_bound[3], border_width[1]) - w = self.build_1d_mask(W, is_bound[4], is_bound[5], border_width[2]) - - t = repeat(t, "T -> T H W", T=T, H=H, W=W) - h = repeat(h, "H -> T H W", T=T, H=H, W=W) - w = repeat(w, "W -> T H W", T=T, H=H, W=W) - - mask = torch.stack([t, h, w]).min(dim=0).values - mask = rearrange(mask, "T H W -> 1 1 T H W") - return mask - - - def tile_forward(self, hidden_states, tile_size, tile_stride): - B, C, T, H, W = hidden_states.shape - size_t, size_h, size_w = tile_size - stride_t, stride_h, stride_w = tile_stride - - # Split tasks - tasks = [] - for t in range(0, T, stride_t): - if (t-stride_t >= 0 and t-stride_t+size_t >= T): continue - for h in range(0, H, stride_h): - if (h-stride_h >= 0 and h-stride_h+size_h >= H): continue - for w in range(0, W, stride_w): - if (w-stride_w >= 0 and w-stride_w+size_w >= W): continue - t_, h_, w_ = t + size_t, h + size_h, w + size_w - tasks.append((t, t_, h, h_, w, w_)) - - # Run - torch_dtype = self.quant_conv.weight.dtype - data_device = hidden_states.device - computation_device = self.quant_conv.weight.device - - weight = torch.zeros((1, 1, (T - 1) // 4 + 1, H // 8, W // 8), dtype=torch_dtype, device=data_device) - values = torch.zeros((B, 16, (T - 1) // 4 + 1, H // 8, W // 8), dtype=torch_dtype, device=data_device) - - for t, t_, h, h_, w, w_ in tqdm(tasks, desc="VAE encoding"): - hidden_states_batch = hidden_states[:, :, t:t_, h:h_, w:w_].to(computation_device) - hidden_states_batch = self.forward(hidden_states_batch).to(data_device) - if t > 0: - hidden_states_batch = hidden_states_batch[:, :, 1:] - - mask = self.build_mask( - hidden_states_batch, - is_bound=(t==0, t_>=T, h==0, h_>=H, w==0, w_>=W), - border_width=((size_t - stride_t) // 4, (size_h - stride_h) // 8, (size_w - stride_w) // 8) - ).to(dtype=torch_dtype, device=data_device) - - target_t = 0 if t==0 else t // 4 + 1 - target_h = h // 8 - target_w = w // 8 - values[ - :, - :, - target_t: target_t + hidden_states_batch.shape[2], - target_h: target_h + hidden_states_batch.shape[3], - target_w: target_w + hidden_states_batch.shape[4], - ] += hidden_states_batch * mask - weight[ - :, - :, - target_t: target_t + hidden_states_batch.shape[2], - target_h: target_h + hidden_states_batch.shape[3], - target_w: target_w + hidden_states_batch.shape[4], - ] += mask - return values / weight - - - def encode_video(self, latents, tile_size=(65, 256, 256), tile_stride=(48, 192, 192)): - latents = latents.to(self.quant_conv.weight.dtype) - return self.tile_forward(latents, tile_size=tile_size, tile_stride=tile_stride) - - - @staticmethod - def state_dict_converter(): - return HunyuanVideoVAEEncoderStateDictConverter() - - -class HunyuanVideoVAEEncoderStateDictConverter: - - def __init__(self): - pass - - def from_diffusers(self, state_dict): - state_dict_ = {} - for name in state_dict: - if name.startswith('encoder.') or name.startswith('quant_conv.'): - state_dict_[name] = state_dict[name] - return state_dict_ diff --git a/diffsynth/models/kolors_text_encoder.py b/diffsynth/models/kolors_text_encoder.py deleted file mode 100644 index 2e92121..0000000 --- a/diffsynth/models/kolors_text_encoder.py +++ /dev/null @@ -1,1551 +0,0 @@ -""" -This model is copied from https://github.com/Kwai-Kolors/Kolors/tree/master/kolors/models. -We didn't modify this model. -The tensor operation is performed in the prompter. -""" - - -""" PyTorch ChatGLM model. """ - -import math -import copy -import warnings -import re -import sys - -import torch -import torch.utils.checkpoint -import torch.nn.functional as F -from torch import nn -from torch.nn import CrossEntropyLoss, LayerNorm -from torch.nn import CrossEntropyLoss, LayerNorm, MSELoss, BCEWithLogitsLoss -from torch.nn.utils import skip_init -from typing import Optional, Tuple, Union, List, Callable, Dict, Any -from copy import deepcopy - -from transformers.modeling_outputs import ( - BaseModelOutputWithPast, - CausalLMOutputWithPast, - SequenceClassifierOutputWithPast, -) -from transformers.modeling_utils import PreTrainedModel -from transformers.utils import logging -from transformers.generation.logits_process import LogitsProcessor -from transformers.generation.utils import LogitsProcessorList, StoppingCriteriaList, GenerationConfig, ModelOutput -from transformers import PretrainedConfig -from torch.nn.parameter import Parameter -import bz2 -import torch -import base64 -import ctypes -from transformers.utils import logging -from typing import List - - - -logger = logging.get_logger(__name__) - -try: - from cpm_kernels.kernels.base import LazyKernelCModule, KernelFunction, round_up - - - class Kernel: - def __init__(self, code: bytes, function_names: List[str]): - self.code = code - self._function_names = function_names - self._cmodule = LazyKernelCModule(self.code) - - for name in self._function_names: - setattr(self, name, KernelFunction(self._cmodule, name)) - - - quantization_code = "$QlpoOTFBWSZTWU9yuJUAQHN//////////f/n/8/n///n//bt4dTidcVx8X3V9FV/92/v4B7/AD5FBQFAAAChSgKpFCFAFVSigUAAAEKhSgUUqgFBKigqVREQAABQBQIANDTTIGI00BkZBkNGE0A0BkBkGQGRkaNAaAGQNBoGgDIAAYIGTI0DQAQAaGmmQMRpoDIyDIaMJoBoDIDIMgMjI0aA0AMgaDQNAGQAAwQMmRoGgAgA0NNMgYjTQGRkGQ0YTQDQGQGQZAZGRo0BoAZA0GgaAMgABggZMjQNABABoaaZAxGmgMjIMhowmgGgMgMgyAyMjRoDQAyBoNA0AZAADBAyZGgaAAmqU1NEgJqnptU/Sn4jRR6J6epk2pqb1Q/SgAPUGgyNNGjQ2SBpoAZAAGg0NB6mgDIAAAAA2oaApSREBNAARhGiYEaEwU8pvImlP0k2aam1GaGqbFNM1MHpTwmkepmyU9R6nqPKekHqNNPUxNGhp6n6p6QaZ6o9TG1GMqcoV9ly6nRanHlq6zPNbnGZNi6HSug+2nPiZ13XcnFYZW+45W11CumhzYhchOJ2GLLV1OBjBjGf4TptOddTSOcVxhqYZMYwZXZZY00zI1paX5X9J+b+f4e+x43RXSxXPOdquiGpduatGyXneN696M9t4HU2eR5XX/kPhP261NTx3JO1Ow7LyuDmeo9a7d351T1ZxnvnrvYnrXv/hXxPCeuYx2XsNmO003eg9J3Z6U7b23meJ4ri01OdzTk9BNO96brz+qT5nuvvH3ds/G+m/JcG/F2XYuhXlvO+jP7U3XgrzPN/lr8Sf1n6j4j7jZs+s/T0tNaNNYzTs12rxjwztHlnire3Nzc3N1wuBwOBwXBvZfoHpD7rFmR99V5vj3aXza3xdBbXMalubTg/jIv5dfAi54Pdc75j4z412n3Npj3Ld/ENm7a3b/Cod6h/ret1/5vn/C+l+gdslMvgPSLJ8d8q+U66fevYn/tW1chleEtNTGlcHCbLRlq0tHzF5tsbbZZfHjjLgZu42XCuC3NrdjTasZGNzgxPIrGqp7r3p7L2p5XjnpPSmTd5XtzqnB6U87zzg1Ol0zd0zsLszxR6lkxp35u6/teL0L0W922cR7Lu1lpL9CsHirzuM2T+BgsyViT6LHcm0/Vr6U/7LGGyJeqTEjt0PHWhF5mCT7R9mtlDwriYv0Tyr/OxYt6qp5r0mPVT0608TqnqMZaarU2nFwrTzzlrs1ed7z1ux60wyr4ydCaTi3enW8x68x0zU7tXSlcmPSW1mGpWJMg4zmPC2lK96tp0OE80y4MfEvnZj8zGluR6b22ki1Ou9V2nCd9xovcPvcYMZYy0lvN60ScZ45vN6yeCeeXFb1lVjnnCar5fwXwE2bzJ4HI1XVPXfXZMm44GUsMpYsmLB65TuVdm0cl0b+i/wGNN66XjeV7zuPpHcnK/juhhjdfId5jMdE5nN0dGmmm2zZs2cexD5n9p/dY352XsvXHaZNWWsmmS1atjR452nYudzvqv2HMRyvNNnlMcDl3R2+yx2uVrBubTW9icHDVtbNXlZm7jma1rM4VurZZd2y6nUau7ZXZ7bVU+mnoOVxZGMrVmvX60605JwmzGZhhhjTWtaaaMaaGTGmNMZasY0iX8VMUl8eepaIrzGSpemWOQyZORk2bNpjUybMmxqYmknCGCFynutfksaZpjTNMaaatM0xsxcGR0sociNqxNSmhhR1ZJPbsn8qyF0t2qH6iYBclclalbtTTcHTDsPaX6rlnElph2Jyumumtynv2Kk8GI7rsvXbIcJgHJOSaSXnnGaI3m87RtVXJOZ/YtgdTE6Wpha6ZlE8ayXkef1fh602r2WwvfMXtMdLlkfnLFdYYwYso+bWqm7yJqHXZGw2nrS5ZanSYnWlxBxMF1V940K2wdrI7R6OYf7DGGamMmTSbRhlS45xmVOumF1EyPCmHrrN8wwZOOrdNtLeMtzFzDlWnfTBxMk2NaXIZHBYxYLD4w8yju0ao65Vz1OIXoS9dLanwCe1PWrYuWMqf1if1z2k2yYfKJ741PDgno1ZQ8DRqvUny3mNoWTzGO6m1DkrJI8JiR5cSd+vZdGOO8nrMoc5+NDUFsMSXaZJeNlMmGLtJsovOsUp7I9S5VojKxF6bTVEelXqlfJobQr3LozSh2Jk7VcrVMfhXqszGWMzNqGhqZY0OadxkyyMssKugZR0KNFXBHlqwmJgTE/BNVMk6ItJXZMR0H47GpXv/DMOvNkmVuaV1PRfEdxuqc7Hcd+ZV/zTLaRxWk0nl9CdCeM6mn5rstHIBcpiuwmUZXeq81DacHI2rmrZ5SuE5mOZd6LQrZg9mx32TprA8BMo5jKN6yLTCi3WzQaZSuhzTtM1fUTGVpG8Tw+KXI0tjEpiWxtLYynOlktSbVlaI5kxP8TDH8kx50xoxi5KcA4pcja8KWLRlO/Ks6q06ergnvm1ca3Tq8Uw7LTUsmWyctXPWmpitl/uvGcWTGXGuAXDfhqazGmjkxcJW5hMMMMpYsXl2TZYtVOddG3XCarUt6Ptq9CZXSNzyuRzqRZOjsxdBbFVz6OA5HI43r1jityVlVpVkxmOsyaYWE1NTGq1sOVh36mHMcxtSvcy70edG0ZGR3I1Go1GRlV7mWWo1G0ZGRqlvH40l7o4m5xMWLLLYyNjnqc8556mdPqLJ31n/1nWOncxzG1tizrHs/Z+d2vP/B/l8wdJ6rHUn2nbbDq4p6htFtYzMMMTaZis1K5GKzGNmxhmUx2DDlZ/qNnIx41xnaMfCZWYaZWtNLTNW8ND4Fw1MyZOCdM428suKG1ehW8TesOydg7J+YYcD4cYR+8dFK6M4E3HM9ZfRNNL+Sn6rsl4DsrDl2HpPCnfxjGXtbZtYys1ttlyJ4T+BvexjGWRjMszK4Jpc77D3GyuVD7q0+G8m9G+2+rGm7cOR2y7FdtY2XUYx/oNlfRYxhMYyYZkyyg55enna9Kt/FFi6GMMwYwdwxWgxGMLKYmUyGExTKMZkMFhkymKuh0NOBNnBu+23LdwDoZYYzGGMxtORaTU1pjTGWTTGGtMrNWUsyyTTLLG1qy2ZjbK2DBllWqxMtBMaYZQmcE7zvvRcTkclUwdkxTaSdyySt/7fpL+T1v516Ji97fwr5JbLu305zMn5+GMTTZ9F+y7ExwmGVfG44yxn3dLv6l5i+Wth1jCrDq21nW9LqvvDzz3Vf3LLH/O/32TJ/erx3bXftO4eF+G956D952K/An4NfvOpjFjExjevP/UmE0fIoZXx6/w6lX/no3D0bLt+ixjieBM6ksRd0yB4Lt2SwYNE+gd1detlZWUnpiZfGfFaK+4PyCa/v18V8X75pe9fLXzp7l3VjF76vWZmHwGz1IZNWT7b8yddJ4q5kyrVdfru6atWc7bVYztL9Jf4GXvT+Y8m9/YsXP6H018a8D4XVOqvfzqeR+6yZOD8dPv0+U7/q5Pl+2dNb0MjzGVH5p6MNQ7cOWvw62U9aHE8DprDek+McLyvDz+te+9Zhq5+YTruufMcWMabqysTmZVWjKPfnK0wyVcrsuhjZRdLkHNvD72b9abriOSGIxiLixMOoalNPXzy+wT/tf+U6HHONfsz+xe8ufHBdQWWGWLA9if0rsnmrxK5LvRZQeWsTCsrmOYy8VteVfuRfcVTtDLItLIsMYxZLdU/DbtSemxF6Z6Zo5WBXE4tFdCyVMMXMTEMZXVlS6Xec2T4e0tHsRcEuWshcJ2YsNF5rUx1E8ifCq6Z+ZP7qdCeu/aTwFd53l16/o0NOw6O3dLavP4Hbi4RdmuDk6DoYaninC0+o4uZjbJ7Rxeu0/FbuFg+q7DVS6fQe0rZ6NDGUNNU6DEqOaLTicKnYZMnBWruljQxoaS3dZhocDge0bSTyOvdAbG5hxe2xji7E/L55xX13wWNDi6HCekcFxfCPGxY0MXC+s7afWaMdDyjyr+o8Rudm/NabOZvdl274zH4f5XK9z6On1Pe/K5TdPAslg77BjuO6Y3eO7GqvOPG/stknp1leyvLL0Z7bl9I4noMvLkzytLhWYzrOZzLXCORe028rORzOg4N/L0HlMOQ3Pgmnbb6KczlabORpu980q37TBqRu0/p3PO6234Bl03Ynuz+9W7gnsEcmvYaYY3aMYY0wx3pYd+ujsXauWdaY5Xkbtl23fPzFHiDB/QMo0yFjBllYxTQYYyxkrwn7JufwJ/PfgJ+C83X69ni6zvXcnyXabv0ncbLwsceS+RNlyN2mnneJtX0ngYO0+e+0+UnA+Wch3ji8hj5an4h+i6XBySU4n+R0roVcbw5yvHrmr4Yw8Y7x6c+9POPYHI5HI5HI5HI5HGXGww4nE4nrVyOR8XeqPEO7PLOiukYa3Novk5hV4cdtYZLI93e+uxff2jRo0aNGjRo0aNG1bVtW1dy3m83m8+tQ5ZzHw3nObwOu8La9Rc1dtkdS8A3eTk823tnktXWlxN6Oixe06zrN70Isd9jiOgZFq9yfkPqP/SLhN2Myl8jDM43bl1nbcb4cO57jlh8Jow6pzXZdL4dyODTuuhu77FyO27DdwdRxmvO+O+3N2+BdqyTwLHVczDVY4UPE4O66/ZO2cx1LFzVdSXtF7G4HMbrauOHRw6c8FdZ5m9fHZHYZXfTlZquyynSyTTKke6vcffSD9pzPA/G7n7jxPmuhc1DHMynPMrGL6AdewYmwu5ko+UUyTwrMv27rPH1v1nGqd87+p6N6LU8k3NEng53xXyHS97+44OSg/sy/hn+Se6yfYNjW0/uTgP+PvWYzLMmjhcLB/gGpri6H83/84eUXWT6T9Hsv7785z/7z4icpW+zfXypuR7rx/gMdZb1/wC678pcs8/2a3mDitGHxl9mfPlll5MafWWqxk/eYuTDgcNMzDGWLWvsuglNxs53GtN6uWpktlW1tZZYcuinMMWmnNnJydze3b2Y1McBxrBkXw799izLMZZYyy0TkbsGM4p03S2uVu5s/XXUdSdec6smVxZYYGpVmT8A+8ajuEyV5FatkvVru2x6uxGXXbH4A+jvgP4GMYy3iPLXzq/6z65+E005ey+cwMZD3fZcqc6xpjTFjQ0P3U+e++cPYmTIwj0nrK5NPTfl3WvpfLtXDcb2HQMudYOxFXQBor4L4T6vrOauFctYXJQ++NUWmJe5bmx1jDiZS1dTqWxo4GR8jm3fttpmPHppk9PEyv4/y8/sO07XacOmcqc0x2Vi9BvNJvN5oW8x4mOsydpidRxMYJPx06m1bqPzq9KtK8sxXNXFodD/+MYYaJTLwOhc9brCsV18oOR1i4tXChyTkq4lf4y1Ke+9axjDHqs1mfBbMXuP4Hzi+X7t8vzv7bHerrUPgPCxhjre4fXdfLNtNM+Jd+Zdh8xd8wP87uNPoPgv4W7/5P2BuxfsMabNnMnza+54Pdi5U671GPZY8CehX8Voeoo7FHpkeEc6715FwHZrIrUrHaviPUbPZHND+IhczrP6FcYvhOZ0Di/ETt0OI+YwNWR9r7tpf6WDeZKZDB1+z2IthOl1mPyb5FluvEx9h9d0NnM0Y1XPFkWIsk1WotJ0PBMmkvjvQTd0e71tfeV+8r8lQ/tpzpsmxJ+InrI/dj2UajUajVTUajatRqNRtGo1Go1Go4wjeMpZFMVV9CHbofPraLsJ3JpWV2XOoanCuFky4y3PPNxucK2uKC1Lbdb1eo+m5XomN6HfeZsabHLHRX/K+offtNGGmHWctcVcG44MdSqsOLY9VzX+Zxfxn2HPdWTpzWvkrtJ8M5zorrKcquRytJ5N5DZmcaW02l76nWO+BqPXm1A2Ry/0q71dH/mqrqeFjkYxjEXtsX8qubTk67rGycyqsdm4tZx5D6D5hhi0waaWmiaMP81Yjii5qxPlPuU/GfTL1Y5E6Jyfiq63qTa39A4J0sOGDgO9WF9bOXl0XfPRbsY2bPNKPy1YrFYrFYmRhhlTIyMjJWJYZHXuCXI8OoXsvfljGLFicNifpp2XunoPiG1wtx3p1Tah+/DD66OnVtVXP9rKbVxOnL0tR/rHtqB5UDErUVcl11D4qqvjpOcxX7armUNJB3LpW6bxVvD08e8h3odKKvyCFZBdSh2FVcST9xV3n3T8t1j7Kr9qgrqXg+13Pt5U7JCvFXVIV1YG5lRhkVYZJYYDDD4KOIMoHCp26WS8GB7uBh2zIdgq/PKyInjV2STShuoapUdCpX1yTwqq/z1VvET7Kh5nVPkO8YyxjLt2MaaMmWTLQvx3qnzltnXW0p2jxgbEtSny/Osv8Y9pLMXYoHVPAhkVdWVeODhR6q9/Sxe2liwwZWMVvFXfRkeIDxAePUPIrdJ4ey6yquzH+PD/bUOWAu05qVHtFd8rrKHSoeNIOUqrYr3FXyToqfYJgwmJdKpXXOwYYegNNGMzfZPp/t3t/DVs4zjNTN61rRqaWaa4NYbRjTa0tWwy2Y2tGN8ZO8ofNKq4j9SL7I+cSm4/6ovLV5HNXLI0jJidwrtk6ynCaP6Z++GjRlWS3tLeW129Mi9evxU9mtz6s5J3Z7M2ngTgnKvmpomxpaLCzPfmx0JWE+m3NLDDGOX47RctdYYNK5jakdqLkRlI39n590T5zctGSwwZZDJj6kW8XSi6ot2MmWWJ0DUT3nuvebBudScjZ79g8cWJ8av0k+/bE5WKd5MdbFpbDVMxu1DVMmtNZGJvq1mtRbn6M+g/kP0FwDwr7quZs7xosNGpbscyxhhd9TyJyFwbLcxlTasg75vW7TsV5K7ji44XPMMrdoj+Y3rT0Hie62nlYV/pwczzOmdLqLhYkzGMzCZWGMQzGMSsZYY6Di1t4nlJ+Em63mJxrVLxPbYxNEdgc1dU2iOKyoYYWjNrEeHTYybVk0atSa7ehuwsWMWTqn1TrnS6hYsi71d1+s+k+ic70e20fzE/VaTdxT9ZtU4GIXdeNx3X77guYYfpHeTQjaMX6brOu4OY4K7Y2d9mbHarI5ox3p4GpJ2Vd/Tst60f7j999pppjR+Q/Qf8J/VaORs3cji7FfFuN61+ui9s8hix1OCh5KGVV23BPXvZfz3CLyHpix+exi8z/KnCnosY2eunor+cxyPO/xJ0vKey9OvE9VjqaYu0x3Z3jd6o2b1T12D+F8l232lwaaacD5LE8LBxu7WTlbWraWpew8Xexjel3E+wWD4APITdNqR8F3R3T0lunCQ4GaE9R37DxeCYfcHi4xci5ovKfxVs55y2hf+65E/Xdp6jR5nrebTmi5incpkyOjs50JvrZwstbbW6kfuuQw+2mykf/EXNFzxfKTrxew929TR6bWnGL//F3JFOFCQT3K4lQ" - - kernels = Kernel( - bz2.decompress(base64.b64decode(quantization_code)), - [ - "int4WeightCompression", - "int4WeightExtractionFloat", - "int4WeightExtractionHalf", - "int8WeightExtractionFloat", - "int8WeightExtractionHalf", - ], - ) -except Exception as exception: - kernels = None - - -class W8A16Linear(torch.autograd.Function): - @staticmethod - def forward(ctx, inp: torch.Tensor, quant_w: torch.Tensor, scale_w: torch.Tensor, weight_bit_width): - ctx.inp_shape = inp.size() - ctx.weight_bit_width = weight_bit_width - out_features = quant_w.size(0) - inp = inp.contiguous().view(-1, inp.size(-1)) - weight = extract_weight_to_half(quant_w, scale_w, weight_bit_width) - ctx.weight_shape = weight.size() - output = inp.mm(weight.t()) - ctx.save_for_backward(inp, quant_w, scale_w) - return output.view(*(ctx.inp_shape[:-1] + (out_features,))) - - @staticmethod - def backward(ctx, grad_output: torch.Tensor): - inp, quant_w, scale_w = ctx.saved_tensors - weight = extract_weight_to_half(quant_w, scale_w, ctx.weight_bit_width) - grad_output = grad_output.contiguous().view(-1, weight.size(0)) - grad_input = grad_output.mm(weight) - grad_weight = grad_output.t().mm(inp) - return grad_input.view(ctx.inp_shape), grad_weight.view(ctx.weight_shape), None, None - - -def compress_int4_weight(weight: torch.Tensor): # (n, m) - with torch.cuda.device(weight.device): - n, m = weight.size(0), weight.size(1) - assert m % 2 == 0 - m = m // 2 - out = torch.empty(n, m, dtype=torch.int8, device="cuda") - stream = torch.cuda.current_stream() - - gridDim = (n, 1, 1) - blockDim = (min(round_up(m, 32), 1024), 1, 1) - - kernels.int4WeightCompression( - gridDim, - blockDim, - 0, - stream, - [ctypes.c_void_p(weight.data_ptr()), ctypes.c_void_p(out.data_ptr()), ctypes.c_int32(n), ctypes.c_int32(m)], - ) - return out - - -def extract_weight_to_half(weight: torch.Tensor, scale_list: torch.Tensor, source_bit_width: int): - assert scale_list.dtype in [torch.half, torch.bfloat16] - assert weight.dtype in [torch.int8] - if source_bit_width == 8: - return weight.to(scale_list.dtype) * scale_list[:, None] - elif source_bit_width == 4: - func = ( - kernels.int4WeightExtractionHalf if scale_list.dtype == torch.half else kernels.int4WeightExtractionBFloat16 - ) - else: - assert False, "Unsupported bit-width" - - with torch.cuda.device(weight.device): - n, m = weight.size(0), weight.size(1) - out = torch.empty(n, m * (8 // source_bit_width), dtype=scale_list.dtype, device="cuda") - stream = torch.cuda.current_stream() - - gridDim = (n, 1, 1) - blockDim = (min(round_up(m, 32), 1024), 1, 1) - - func( - gridDim, - blockDim, - 0, - stream, - [ - ctypes.c_void_p(weight.data_ptr()), - ctypes.c_void_p(scale_list.data_ptr()), - ctypes.c_void_p(out.data_ptr()), - ctypes.c_int32(n), - ctypes.c_int32(m), - ], - ) - return out - - -class QuantizedLinear(torch.nn.Module): - def __init__(self, weight_bit_width: int, weight, bias=None, device="cuda", dtype=None, empty_init=False): - super().__init__() - weight = weight.to(device) # ensure the weight is on the cuda device - assert str(weight.device).startswith( - 'cuda'), 'The weights that need to be quantified should be on the CUDA device' - self.weight_bit_width = weight_bit_width - shape = weight.shape - - if weight is None or empty_init: - self.weight = torch.empty(shape[0], shape[1] * weight_bit_width // 8, dtype=torch.int8, device=device) - self.weight_scale = torch.empty(shape[0], dtype=dtype, device=device) - else: - self.weight_scale = weight.abs().max(dim=-1).values / ((2 ** (weight_bit_width - 1)) - 1) - self.weight = torch.round(weight / self.weight_scale[:, None]).to(torch.int8) - if weight_bit_width == 4: - self.weight = compress_int4_weight(self.weight) - - self.weight = Parameter(self.weight.to(device), requires_grad=False) - self.weight_scale = Parameter(self.weight_scale.to(device), requires_grad=False) - self.bias = Parameter(bias.to(device), requires_grad=False) if bias is not None else None - - def forward(self, input): - output = W8A16Linear.apply(input, self.weight, self.weight_scale, self.weight_bit_width) - if self.bias is not None: - output = output + self.bias - return output - - -def quantize(model, weight_bit_width, empty_init=False, device=None): - """Replace fp16 linear with quantized linear""" - for layer in model.layers: - layer.self_attention.query_key_value = QuantizedLinear( - weight_bit_width=weight_bit_width, - weight=layer.self_attention.query_key_value.weight, - bias=layer.self_attention.query_key_value.bias, - dtype=layer.self_attention.query_key_value.weight.dtype, - device=layer.self_attention.query_key_value.weight.device if device is None else device, - empty_init=empty_init - ) - layer.self_attention.dense = QuantizedLinear( - weight_bit_width=weight_bit_width, - weight=layer.self_attention.dense.weight, - bias=layer.self_attention.dense.bias, - dtype=layer.self_attention.dense.weight.dtype, - device=layer.self_attention.dense.weight.device if device is None else device, - empty_init=empty_init - ) - layer.mlp.dense_h_to_4h = QuantizedLinear( - weight_bit_width=weight_bit_width, - weight=layer.mlp.dense_h_to_4h.weight, - bias=layer.mlp.dense_h_to_4h.bias, - dtype=layer.mlp.dense_h_to_4h.weight.dtype, - device=layer.mlp.dense_h_to_4h.weight.device if device is None else device, - empty_init=empty_init - ) - layer.mlp.dense_4h_to_h = QuantizedLinear( - weight_bit_width=weight_bit_width, - weight=layer.mlp.dense_4h_to_h.weight, - bias=layer.mlp.dense_4h_to_h.bias, - dtype=layer.mlp.dense_4h_to_h.weight.dtype, - device=layer.mlp.dense_4h_to_h.weight.device if device is None else device, - empty_init=empty_init - ) - - return model - - - -class ChatGLMConfig(PretrainedConfig): - model_type = "chatglm" - def __init__( - self, - num_layers=28, - padded_vocab_size=65024, - hidden_size=4096, - ffn_hidden_size=13696, - kv_channels=128, - num_attention_heads=32, - seq_length=2048, - hidden_dropout=0.0, - classifier_dropout=None, - attention_dropout=0.0, - layernorm_epsilon=1e-5, - rmsnorm=True, - apply_residual_connection_post_layernorm=False, - post_layer_norm=True, - add_bias_linear=False, - add_qkv_bias=False, - bias_dropout_fusion=True, - multi_query_attention=False, - multi_query_group_num=1, - apply_query_key_layer_scaling=True, - attention_softmax_in_fp32=True, - fp32_residual_connection=False, - quantization_bit=0, - pre_seq_len=None, - prefix_projection=False, - **kwargs - ): - self.num_layers = num_layers - self.vocab_size = padded_vocab_size - self.padded_vocab_size = padded_vocab_size - self.hidden_size = hidden_size - self.ffn_hidden_size = ffn_hidden_size - self.kv_channels = kv_channels - self.num_attention_heads = num_attention_heads - self.seq_length = seq_length - self.hidden_dropout = hidden_dropout - self.classifier_dropout = classifier_dropout - self.attention_dropout = attention_dropout - self.layernorm_epsilon = layernorm_epsilon - self.rmsnorm = rmsnorm - self.apply_residual_connection_post_layernorm = apply_residual_connection_post_layernorm - self.post_layer_norm = post_layer_norm - self.add_bias_linear = add_bias_linear - self.add_qkv_bias = add_qkv_bias - self.bias_dropout_fusion = bias_dropout_fusion - self.multi_query_attention = multi_query_attention - self.multi_query_group_num = multi_query_group_num - self.apply_query_key_layer_scaling = apply_query_key_layer_scaling - self.attention_softmax_in_fp32 = attention_softmax_in_fp32 - self.fp32_residual_connection = fp32_residual_connection - self.quantization_bit = quantization_bit - self.pre_seq_len = pre_seq_len - self.prefix_projection = prefix_projection - super().__init__(**kwargs) - - - -# flags required to enable jit fusion kernels - -if sys.platform != 'darwin': - torch._C._jit_set_profiling_mode(False) - torch._C._jit_set_profiling_executor(False) - torch._C._jit_override_can_fuse_on_cpu(True) - torch._C._jit_override_can_fuse_on_gpu(True) - -logger = logging.get_logger(__name__) - -_CHECKPOINT_FOR_DOC = "THUDM/ChatGLM" -_CONFIG_FOR_DOC = "ChatGLM6BConfig" - -CHATGLM_6B_PRETRAINED_MODEL_ARCHIVE_LIST = [ - "THUDM/chatglm3-6b-base", - # See all ChatGLM models at https://huggingface.co/models?filter=chatglm -] - - -def default_init(cls, *args, **kwargs): - return cls(*args, **kwargs) - - -class InvalidScoreLogitsProcessor(LogitsProcessor): - def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor: - if torch.isnan(scores).any() or torch.isinf(scores).any(): - scores.zero_() - scores[..., 5] = 5e4 - return scores - - -class PrefixEncoder(torch.nn.Module): - """ - The torch.nn model to encode the prefix - Input shape: (batch-size, prefix-length) - Output shape: (batch-size, prefix-length, 2*layers*hidden) - """ - - def __init__(self, config: ChatGLMConfig): - super().__init__() - self.prefix_projection = config.prefix_projection - if self.prefix_projection: - # Use a two-layer MLP to encode the prefix - kv_size = config.num_layers * config.kv_channels * config.multi_query_group_num * 2 - self.embedding = torch.nn.Embedding(config.pre_seq_len, kv_size) - self.trans = torch.nn.Sequential( - torch.nn.Linear(kv_size, config.hidden_size), - torch.nn.Tanh(), - torch.nn.Linear(config.hidden_size, kv_size) - ) - else: - self.embedding = torch.nn.Embedding(config.pre_seq_len, - config.num_layers * config.kv_channels * config.multi_query_group_num * 2) - - def forward(self, prefix: torch.Tensor): - if self.prefix_projection: - prefix_tokens = self.embedding(prefix) - past_key_values = self.trans(prefix_tokens) - else: - past_key_values = self.embedding(prefix) - return past_key_values - - -def split_tensor_along_last_dim( - tensor: torch.Tensor, - num_partitions: int, - contiguous_split_chunks: bool = False, -) -> List[torch.Tensor]: - """Split a tensor along its last dimension. - - Arguments: - tensor: input tensor. - num_partitions: number of partitions to split the tensor - contiguous_split_chunks: If True, make each chunk contiguous - in memory. - - Returns: - A list of Tensors - """ - # Get the size and dimension. - last_dim = tensor.dim() - 1 - last_dim_size = tensor.size()[last_dim] // num_partitions - # Split. - tensor_list = torch.split(tensor, last_dim_size, dim=last_dim) - # Note: torch.split does not create contiguous tensors by default. - if contiguous_split_chunks: - return tuple(chunk.contiguous() for chunk in tensor_list) - - return tensor_list - - -class RotaryEmbedding(nn.Module): - def __init__(self, dim, original_impl=False, device=None, dtype=None): - super().__init__() - inv_freq = 1.0 / (10000 ** (torch.arange(0, dim, 2, device=device).to(dtype=dtype) / dim)) - self.register_buffer("inv_freq", inv_freq) - self.dim = dim - self.original_impl = original_impl - - def forward_impl( - self, seq_len: int, n_elem: int, dtype: torch.dtype, device: torch.device, base: int = 10000 - ): - """Enhanced Transformer with Rotary Position Embedding. - - Derived from: https://github.com/labmlai/annotated_deep_learning_paper_implementations/blob/master/labml_nn/ - transformers/rope/__init__.py. MIT License: - https://github.com/labmlai/annotated_deep_learning_paper_implementations/blob/master/license. - """ - # $\Theta = {\theta_i = 10000^{\frac{2(i-1)}{d}}, i \in [1, 2, ..., \frac{d}{2}]}$ - theta = 1.0 / (base ** (torch.arange(0, n_elem, 2, dtype=torch.float, device=device) / n_elem)) - - # Create position indexes `[0, 1, ..., seq_len - 1]` - seq_idx = torch.arange(seq_len, dtype=torch.float, device=device) - - # Calculate the product of position index and $\theta_i$ - idx_theta = torch.outer(seq_idx, theta).float() - - cache = torch.stack([torch.cos(idx_theta), torch.sin(idx_theta)], dim=-1) - - # this is to mimic the behaviour of complex32, else we will get different results - if dtype in (torch.float16, torch.bfloat16, torch.int8): - cache = cache.bfloat16() if dtype == torch.bfloat16 else cache.half() - return cache - - def forward(self, max_seq_len, offset=0): - return self.forward_impl( - max_seq_len, self.dim, dtype=self.inv_freq.dtype, device=self.inv_freq.device - ) - - -@torch.jit.script -def apply_rotary_pos_emb(x: torch.Tensor, rope_cache: torch.Tensor) -> torch.Tensor: - # x: [sq, b, np, hn] - sq, b, np, hn = x.size(0), x.size(1), x.size(2), x.size(3) - rot_dim = rope_cache.shape[-2] * 2 - x, x_pass = x[..., :rot_dim], x[..., rot_dim:] - # truncate to support variable sizes - rope_cache = rope_cache[:sq] - xshaped = x.reshape(sq, -1, np, rot_dim // 2, 2) - rope_cache = rope_cache.view(sq, -1, 1, xshaped.size(3), 2) - x_out2 = torch.stack( - [ - xshaped[..., 0] * rope_cache[..., 0] - xshaped[..., 1] * rope_cache[..., 1], - xshaped[..., 1] * rope_cache[..., 0] + xshaped[..., 0] * rope_cache[..., 1], - ], - -1, - ) - x_out2 = x_out2.flatten(3) - return torch.cat((x_out2, x_pass), dim=-1) - - -class RMSNorm(torch.nn.Module): - def __init__(self, normalized_shape, eps=1e-5, device=None, dtype=None, **kwargs): - super().__init__() - self.weight = torch.nn.Parameter(torch.empty(normalized_shape, device=device, dtype=dtype)) - self.eps = eps - - def forward(self, hidden_states: torch.Tensor): - input_dtype = hidden_states.dtype - variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True) - hidden_states = hidden_states * torch.rsqrt(variance + self.eps) - - return (self.weight * hidden_states).to(input_dtype) - - -class CoreAttention(torch.nn.Module): - def __init__(self, config: ChatGLMConfig, layer_number): - super(CoreAttention, self).__init__() - - self.apply_query_key_layer_scaling = config.apply_query_key_layer_scaling - self.attention_softmax_in_fp32 = config.attention_softmax_in_fp32 - if self.apply_query_key_layer_scaling: - self.attention_softmax_in_fp32 = True - self.layer_number = max(1, layer_number) - - projection_size = config.kv_channels * config.num_attention_heads - - # Per attention head and per partition values. - self.hidden_size_per_partition = projection_size - self.hidden_size_per_attention_head = projection_size // config.num_attention_heads - self.num_attention_heads_per_partition = config.num_attention_heads - - coeff = None - self.norm_factor = math.sqrt(self.hidden_size_per_attention_head) - if self.apply_query_key_layer_scaling: - coeff = self.layer_number - self.norm_factor *= coeff - self.coeff = coeff - - self.attention_dropout = torch.nn.Dropout(config.attention_dropout) - - def forward(self, query_layer, key_layer, value_layer, attention_mask): - pytorch_major_version = int(torch.__version__.split('.')[0]) - if pytorch_major_version >= 2: - query_layer, key_layer, value_layer = [k.permute(1, 2, 0, 3) for k in [query_layer, key_layer, value_layer]] - if attention_mask is None and query_layer.shape[2] == key_layer.shape[2]: - context_layer = torch.nn.functional.scaled_dot_product_attention(query_layer, key_layer, value_layer, - is_causal=True) - else: - if attention_mask is not None: - attention_mask = ~attention_mask - context_layer = torch.nn.functional.scaled_dot_product_attention(query_layer, key_layer, value_layer, - attention_mask) - context_layer = context_layer.permute(2, 0, 1, 3) - new_context_layer_shape = context_layer.size()[:-2] + (self.hidden_size_per_partition,) - context_layer = context_layer.reshape(*new_context_layer_shape) - else: - # Raw attention scores - - # [b, np, sq, sk] - output_size = (query_layer.size(1), query_layer.size(2), query_layer.size(0), key_layer.size(0)) - - # [sq, b, np, hn] -> [sq, b * np, hn] - query_layer = query_layer.view(output_size[2], output_size[0] * output_size[1], -1) - # [sk, b, np, hn] -> [sk, b * np, hn] - key_layer = key_layer.view(output_size[3], output_size[0] * output_size[1], -1) - - # preallocting input tensor: [b * np, sq, sk] - matmul_input_buffer = torch.empty( - output_size[0] * output_size[1], output_size[2], output_size[3], dtype=query_layer.dtype, - device=query_layer.device - ) - - # Raw attention scores. [b * np, sq, sk] - matmul_result = torch.baddbmm( - matmul_input_buffer, - query_layer.transpose(0, 1), # [b * np, sq, hn] - key_layer.transpose(0, 1).transpose(1, 2), # [b * np, hn, sk] - beta=0.0, - alpha=(1.0 / self.norm_factor), - ) - - # change view to [b, np, sq, sk] - attention_scores = matmul_result.view(*output_size) - - # =========================== - # Attention probs and dropout - # =========================== - - # attention scores and attention mask [b, np, sq, sk] - if self.attention_softmax_in_fp32: - attention_scores = attention_scores.float() - if self.coeff is not None: - attention_scores = attention_scores * self.coeff - if attention_mask is None and attention_scores.shape[2] == attention_scores.shape[3]: - attention_mask = torch.ones(output_size[0], 1, output_size[2], output_size[3], - device=attention_scores.device, dtype=torch.bool) - attention_mask.tril_() - attention_mask = ~attention_mask - if attention_mask is not None: - attention_scores = attention_scores.masked_fill(attention_mask, float("-inf")) - attention_probs = F.softmax(attention_scores, dim=-1) - attention_probs = attention_probs.type_as(value_layer) - - # This is actually dropping out entire tokens to attend to, which might - # seem a bit unusual, but is taken from the original Transformer paper. - attention_probs = self.attention_dropout(attention_probs) - # ========================= - # Context layer. [sq, b, hp] - # ========================= - - # value_layer -> context layer. - # [sk, b, np, hn] --> [b, np, sq, hn] - - # context layer shape: [b, np, sq, hn] - output_size = (value_layer.size(1), value_layer.size(2), query_layer.size(0), value_layer.size(3)) - # change view [sk, b * np, hn] - value_layer = value_layer.view(value_layer.size(0), output_size[0] * output_size[1], -1) - # change view [b * np, sq, sk] - attention_probs = attention_probs.view(output_size[0] * output_size[1], output_size[2], -1) - # matmul: [b * np, sq, hn] - context_layer = torch.bmm(attention_probs, value_layer.transpose(0, 1)) - # change view [b, np, sq, hn] - context_layer = context_layer.view(*output_size) - # [b, np, sq, hn] --> [sq, b, np, hn] - context_layer = context_layer.permute(2, 0, 1, 3).contiguous() - # [sq, b, np, hn] --> [sq, b, hp] - new_context_layer_shape = context_layer.size()[:-2] + (self.hidden_size_per_partition,) - context_layer = context_layer.view(*new_context_layer_shape) - - return context_layer - - -class SelfAttention(torch.nn.Module): - """Parallel self-attention layer abstract class. - - Self-attention layer takes input with size [s, b, h] - and returns output of the same size. - """ - - def __init__(self, config: ChatGLMConfig, layer_number, device=None): - super(SelfAttention, self).__init__() - self.layer_number = max(1, layer_number) - - self.projection_size = config.kv_channels * config.num_attention_heads - - # Per attention head and per partition values. - self.hidden_size_per_attention_head = self.projection_size // config.num_attention_heads - self.num_attention_heads_per_partition = config.num_attention_heads - - self.multi_query_attention = config.multi_query_attention - self.qkv_hidden_size = 3 * self.projection_size - if self.multi_query_attention: - self.num_multi_query_groups_per_partition = config.multi_query_group_num - self.qkv_hidden_size = ( - self.projection_size + 2 * self.hidden_size_per_attention_head * config.multi_query_group_num - ) - self.query_key_value = nn.Linear(config.hidden_size, self.qkv_hidden_size, - bias=config.add_bias_linear or config.add_qkv_bias, - device=device, **_config_to_kwargs(config) - ) - - self.core_attention = CoreAttention(config, self.layer_number) - - # Output. - self.dense = nn.Linear(self.projection_size, config.hidden_size, bias=config.add_bias_linear, - device=device, **_config_to_kwargs(config) - ) - - def _allocate_memory(self, inference_max_sequence_len, batch_size, device=None, dtype=None): - if self.multi_query_attention: - num_attention_heads = self.num_multi_query_groups_per_partition - else: - num_attention_heads = self.num_attention_heads_per_partition - return torch.empty( - inference_max_sequence_len, - batch_size, - num_attention_heads, - self.hidden_size_per_attention_head, - dtype=dtype, - device=device, - ) - - def forward( - self, hidden_states, attention_mask, rotary_pos_emb, kv_cache=None, use_cache=True - ): - # hidden_states: [sq, b, h] - - # ================================================= - # Pre-allocate memory for key-values for inference. - # ================================================= - # ===================== - # Query, Key, and Value - # ===================== - - # Attention heads [sq, b, h] --> [sq, b, (np * 3 * hn)] - mixed_x_layer = self.query_key_value(hidden_states) - - if self.multi_query_attention: - (query_layer, key_layer, value_layer) = mixed_x_layer.split( - [ - self.num_attention_heads_per_partition * self.hidden_size_per_attention_head, - self.num_multi_query_groups_per_partition * self.hidden_size_per_attention_head, - self.num_multi_query_groups_per_partition * self.hidden_size_per_attention_head, - ], - dim=-1, - ) - query_layer = query_layer.view( - query_layer.size()[:-1] + (self.num_attention_heads_per_partition, self.hidden_size_per_attention_head) - ) - key_layer = key_layer.view( - key_layer.size()[:-1] + (self.num_multi_query_groups_per_partition, self.hidden_size_per_attention_head) - ) - value_layer = value_layer.view( - value_layer.size()[:-1] - + (self.num_multi_query_groups_per_partition, self.hidden_size_per_attention_head) - ) - else: - new_tensor_shape = mixed_x_layer.size()[:-1] + \ - (self.num_attention_heads_per_partition, - 3 * self.hidden_size_per_attention_head) - mixed_x_layer = mixed_x_layer.view(*new_tensor_shape) - - # [sq, b, np, 3 * hn] --> 3 [sq, b, np, hn] - (query_layer, key_layer, value_layer) = split_tensor_along_last_dim(mixed_x_layer, 3) - - # apply relative positional encoding (rotary embedding) - if rotary_pos_emb is not None: - query_layer = apply_rotary_pos_emb(query_layer, rotary_pos_emb) - key_layer = apply_rotary_pos_emb(key_layer, rotary_pos_emb) - - # adjust key and value for inference - if kv_cache is not None: - cache_k, cache_v = kv_cache - key_layer = torch.cat((cache_k, key_layer), dim=0) - value_layer = torch.cat((cache_v, value_layer), dim=0) - if use_cache: - kv_cache = (key_layer, value_layer) - else: - kv_cache = None - - if self.multi_query_attention: - key_layer = key_layer.unsqueeze(-2) - key_layer = key_layer.expand( - -1, -1, -1, self.num_attention_heads_per_partition // self.num_multi_query_groups_per_partition, -1 - ) - key_layer = key_layer.contiguous().view( - key_layer.size()[:2] + (self.num_attention_heads_per_partition, self.hidden_size_per_attention_head) - ) - value_layer = value_layer.unsqueeze(-2) - value_layer = value_layer.expand( - -1, -1, -1, self.num_attention_heads_per_partition // self.num_multi_query_groups_per_partition, -1 - ) - value_layer = value_layer.contiguous().view( - value_layer.size()[:2] + (self.num_attention_heads_per_partition, self.hidden_size_per_attention_head) - ) - - # ================================== - # core attention computation - # ================================== - - context_layer = self.core_attention(query_layer, key_layer, value_layer, attention_mask) - - # ================= - # Output. [sq, b, h] - # ================= - - output = self.dense(context_layer) - - return output, kv_cache - - -def _config_to_kwargs(args): - common_kwargs = { - "dtype": args.torch_dtype, - } - return common_kwargs - - -class MLP(torch.nn.Module): - """MLP. - - MLP will take the input with h hidden state, project it to 4*h - hidden dimension, perform nonlinear transformation, and project the - state back into h hidden dimension. - """ - - def __init__(self, config: ChatGLMConfig, device=None): - super(MLP, self).__init__() - - self.add_bias = config.add_bias_linear - - # Project to 4h. If using swiglu double the output width, see https://arxiv.org/pdf/2002.05202.pdf - self.dense_h_to_4h = nn.Linear( - config.hidden_size, - config.ffn_hidden_size * 2, - bias=self.add_bias, - device=device, - **_config_to_kwargs(config) - ) - - def swiglu(x): - x = torch.chunk(x, 2, dim=-1) - return F.silu(x[0]) * x[1] - - self.activation_func = swiglu - - # Project back to h. - self.dense_4h_to_h = nn.Linear( - config.ffn_hidden_size, - config.hidden_size, - bias=self.add_bias, - device=device, - **_config_to_kwargs(config) - ) - - def forward(self, hidden_states): - # [s, b, 4hp] - intermediate_parallel = self.dense_h_to_4h(hidden_states) - intermediate_parallel = self.activation_func(intermediate_parallel) - # [s, b, h] - output = self.dense_4h_to_h(intermediate_parallel) - return output - - -class GLMBlock(torch.nn.Module): - """A single transformer layer. - - Transformer layer takes input with size [s, b, h] and returns an - output of the same size. - """ - - def __init__(self, config: ChatGLMConfig, layer_number, device=None): - super(GLMBlock, self).__init__() - self.layer_number = layer_number - - self.apply_residual_connection_post_layernorm = config.apply_residual_connection_post_layernorm - - self.fp32_residual_connection = config.fp32_residual_connection - - LayerNormFunc = RMSNorm if config.rmsnorm else LayerNorm - # Layernorm on the input data. - self.input_layernorm = LayerNormFunc(config.hidden_size, eps=config.layernorm_epsilon, device=device, - dtype=config.torch_dtype) - - # Self attention. - self.self_attention = SelfAttention(config, layer_number, device=device) - self.hidden_dropout = config.hidden_dropout - - # Layernorm on the attention output - self.post_attention_layernorm = LayerNormFunc(config.hidden_size, eps=config.layernorm_epsilon, device=device, - dtype=config.torch_dtype) - - # MLP - self.mlp = MLP(config, device=device) - - def forward( - self, hidden_states, attention_mask, rotary_pos_emb, kv_cache=None, use_cache=True, - ): - # hidden_states: [s, b, h] - - # Layer norm at the beginning of the transformer layer. - layernorm_output = self.input_layernorm(hidden_states) - # Self attention. - attention_output, kv_cache = self.self_attention( - layernorm_output, - attention_mask, - rotary_pos_emb, - kv_cache=kv_cache, - use_cache=use_cache - ) - - # Residual connection. - if self.apply_residual_connection_post_layernorm: - residual = layernorm_output - else: - residual = hidden_states - - layernorm_input = torch.nn.functional.dropout(attention_output, p=self.hidden_dropout, training=self.training) - layernorm_input = residual + layernorm_input - - # Layer norm post the self attention. - layernorm_output = self.post_attention_layernorm(layernorm_input) - - # MLP. - mlp_output = self.mlp(layernorm_output) - - # Second residual connection. - if self.apply_residual_connection_post_layernorm: - residual = layernorm_output - else: - residual = layernorm_input - - output = torch.nn.functional.dropout(mlp_output, p=self.hidden_dropout, training=self.training) - output = residual + output - - return output, kv_cache - - -class GLMTransformer(torch.nn.Module): - """Transformer class.""" - - def __init__(self, config: ChatGLMConfig, device=None): - super(GLMTransformer, self).__init__() - - self.fp32_residual_connection = config.fp32_residual_connection - self.post_layer_norm = config.post_layer_norm - - # Number of layers. - self.num_layers = config.num_layers - - # Transformer layers. - def build_layer(layer_number): - return GLMBlock(config, layer_number, device=device) - - self.layers = torch.nn.ModuleList([build_layer(i + 1) for i in range(self.num_layers)]) - - if self.post_layer_norm: - LayerNormFunc = RMSNorm if config.rmsnorm else LayerNorm - # Final layer norm before output. - self.final_layernorm = LayerNormFunc(config.hidden_size, eps=config.layernorm_epsilon, device=device, - dtype=config.torch_dtype) - - self.gradient_checkpointing = False - - def _get_layer(self, layer_number): - return self.layers[layer_number] - - def forward( - self, hidden_states, attention_mask, rotary_pos_emb, kv_caches=None, - use_cache: Optional[bool] = True, - output_hidden_states: Optional[bool] = False, - ): - if not kv_caches: - kv_caches = [None for _ in range(self.num_layers)] - presents = () if use_cache else None - if self.gradient_checkpointing and self.training: - if use_cache: - logger.warning_once( - "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." - ) - use_cache = False - - all_self_attentions = None - all_hidden_states = () if output_hidden_states else None - for index in range(self.num_layers): - if output_hidden_states: - all_hidden_states = all_hidden_states + (hidden_states,) - - layer = self._get_layer(index) - if self.gradient_checkpointing and self.training: - layer_ret = torch.utils.checkpoint.checkpoint( - layer, - hidden_states, - attention_mask, - rotary_pos_emb, - kv_caches[index], - use_cache - ) - else: - layer_ret = layer( - hidden_states, - attention_mask, - rotary_pos_emb, - kv_cache=kv_caches[index], - use_cache=use_cache - ) - hidden_states, kv_cache = layer_ret - if use_cache: - presents = presents + (kv_cache,) - - if output_hidden_states: - all_hidden_states = all_hidden_states + (hidden_states,) - - # Final layer norm. - if self.post_layer_norm: - hidden_states = self.final_layernorm(hidden_states) - - return hidden_states, presents, all_hidden_states, all_self_attentions - - -class ChatGLMPreTrainedModel(PreTrainedModel): - """ - An abstract class to handle weights initialization and - a simple interface for downloading and loading pretrained models. - """ - - is_parallelizable = False - supports_gradient_checkpointing = True - config_class = ChatGLMConfig - base_model_prefix = "transformer" - _no_split_modules = ["GLMBlock"] - - def _init_weights(self, module: nn.Module): - """Initialize the weights.""" - return - - def get_masks(self, input_ids, past_key_values, padding_mask=None): - batch_size, seq_length = input_ids.shape - full_attention_mask = torch.ones(batch_size, seq_length, seq_length, device=input_ids.device) - full_attention_mask.tril_() - past_length = 0 - if past_key_values: - past_length = past_key_values[0][0].shape[0] - if past_length: - full_attention_mask = torch.cat((torch.ones(batch_size, seq_length, past_length, - device=input_ids.device), full_attention_mask), dim=-1) - if padding_mask is not None: - full_attention_mask = full_attention_mask * padding_mask.unsqueeze(1) - if not past_length and padding_mask is not None: - full_attention_mask -= padding_mask.unsqueeze(-1) - 1 - full_attention_mask = (full_attention_mask < 0.5).bool() - full_attention_mask.unsqueeze_(1) - return full_attention_mask - - def get_position_ids(self, input_ids, device): - batch_size, seq_length = input_ids.shape - position_ids = torch.arange(seq_length, dtype=torch.long, device=device).unsqueeze(0).repeat(batch_size, 1) - return position_ids - - def _set_gradient_checkpointing(self, module, value=False): - if isinstance(module, GLMTransformer): - module.gradient_checkpointing = value - - -class Embedding(torch.nn.Module): - """Language model embeddings.""" - - def __init__(self, config: ChatGLMConfig, device=None): - super(Embedding, self).__init__() - - self.hidden_size = config.hidden_size - # Word embeddings (parallel). - self.word_embeddings = nn.Embedding( - config.padded_vocab_size, - self.hidden_size, - dtype=config.torch_dtype, - device=device - ) - self.fp32_residual_connection = config.fp32_residual_connection - - def forward(self, input_ids): - # Embeddings. - words_embeddings = self.word_embeddings(input_ids) - embeddings = words_embeddings - # Data format change to avoid explicit transposes : [b s h] --> [s b h]. - embeddings = embeddings.transpose(0, 1).contiguous() - # If the input flag for fp32 residual connection is set, convert for float. - if self.fp32_residual_connection: - embeddings = embeddings.float() - return embeddings - - -class ChatGLMModel(ChatGLMPreTrainedModel): - def __init__(self, config: ChatGLMConfig, device=None, empty_init=True): - super().__init__(config) - if empty_init: - init_method = skip_init - else: - init_method = default_init - init_kwargs = {} - if device is not None: - init_kwargs["device"] = device - self.embedding = init_method(Embedding, config, **init_kwargs) - self.num_layers = config.num_layers - self.multi_query_group_num = config.multi_query_group_num - self.kv_channels = config.kv_channels - - # Rotary positional embeddings - self.seq_length = config.seq_length - rotary_dim = ( - config.hidden_size // config.num_attention_heads if config.kv_channels is None else config.kv_channels - ) - - self.rotary_pos_emb = RotaryEmbedding(rotary_dim // 2, original_impl=config.original_rope, device=device, - dtype=config.torch_dtype) - self.encoder = init_method(GLMTransformer, config, **init_kwargs) - self.output_layer = init_method(nn.Linear, config.hidden_size, config.padded_vocab_size, bias=False, - dtype=config.torch_dtype, **init_kwargs) - self.pre_seq_len = config.pre_seq_len - self.prefix_projection = config.prefix_projection - if self.pre_seq_len is not None: - for param in self.parameters(): - param.requires_grad = False - self.prefix_tokens = torch.arange(self.pre_seq_len).long() - self.prefix_encoder = PrefixEncoder(config) - self.dropout = torch.nn.Dropout(0.1) - - def get_input_embeddings(self): - return self.embedding.word_embeddings - - def get_prompt(self, batch_size, device, dtype=torch.half): - prefix_tokens = self.prefix_tokens.unsqueeze(0).expand(batch_size, -1).to(device) - past_key_values = self.prefix_encoder(prefix_tokens).type(dtype) - past_key_values = past_key_values.view( - batch_size, - self.pre_seq_len, - self.num_layers * 2, - self.multi_query_group_num, - self.kv_channels - ) - # seq_len, b, nh, hidden_size - past_key_values = self.dropout(past_key_values) - past_key_values = past_key_values.permute([2, 1, 0, 3, 4]).split(2) - return past_key_values - - def forward( - self, - input_ids, - position_ids: Optional[torch.Tensor] = None, - attention_mask: Optional[torch.BoolTensor] = None, - full_attention_mask: Optional[torch.BoolTensor] = None, - past_key_values: Optional[Tuple[Tuple[torch.Tensor, torch.Tensor], ...]] = None, - inputs_embeds: Optional[torch.Tensor] = None, - use_cache: Optional[bool] = None, - output_hidden_states: Optional[bool] = None, - return_dict: Optional[bool] = None, - ): - output_hidden_states = ( - output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states - ) - use_cache = use_cache if use_cache is not None else self.config.use_cache - return_dict = return_dict if return_dict is not None else self.config.use_return_dict - - batch_size, seq_length = input_ids.shape - - if inputs_embeds is None: - inputs_embeds = self.embedding(input_ids) - - if self.pre_seq_len is not None: - if past_key_values is None: - past_key_values = self.get_prompt(batch_size=batch_size, device=input_ids.device, - dtype=inputs_embeds.dtype) - if attention_mask is not None: - attention_mask = torch.cat([attention_mask.new_ones((batch_size, self.pre_seq_len)), - attention_mask], dim=-1) - - if full_attention_mask is None: - if (attention_mask is not None and not attention_mask.all()) or (past_key_values and seq_length != 1): - full_attention_mask = self.get_masks(input_ids, past_key_values, padding_mask=attention_mask) - - # Rotary positional embeddings - rotary_pos_emb = self.rotary_pos_emb(self.seq_length) - if position_ids is not None: - rotary_pos_emb = rotary_pos_emb[position_ids] - else: - rotary_pos_emb = rotary_pos_emb[None, :seq_length] - rotary_pos_emb = rotary_pos_emb.transpose(0, 1).contiguous() - - # Run encoder. - hidden_states, presents, all_hidden_states, all_self_attentions = self.encoder( - inputs_embeds, full_attention_mask, rotary_pos_emb=rotary_pos_emb, - kv_caches=past_key_values, use_cache=use_cache, output_hidden_states=output_hidden_states - ) - - if not return_dict: - return tuple(v for v in [hidden_states, presents, all_hidden_states, all_self_attentions] if v is not None) - - return BaseModelOutputWithPast( - last_hidden_state=hidden_states, - past_key_values=presents, - hidden_states=all_hidden_states, - attentions=all_self_attentions, - ) - - def quantize(self, weight_bit_width: int): - # from .quantization import quantize - quantize(self.encoder, weight_bit_width) - return self - - -class ChatGLMForConditionalGeneration(ChatGLMPreTrainedModel): - def __init__(self, config: ChatGLMConfig, empty_init=True, device=None): - super().__init__(config) - - self.max_sequence_length = config.max_length - self.transformer = ChatGLMModel(config, empty_init=empty_init, device=device) - self.config = config - self.quantized = False - - if self.config.quantization_bit: - self.quantize(self.config.quantization_bit, empty_init=True) - - def _update_model_kwargs_for_generation( - self, - outputs: ModelOutput, - model_kwargs: Dict[str, Any], - is_encoder_decoder: bool = False, - standardize_cache_format: bool = False, - ) -> Dict[str, Any]: - # update past_key_values - model_kwargs["past_key_values"] = self._extract_past_from_model_output( - outputs, standardize_cache_format=standardize_cache_format - ) - - # update attention mask - if "attention_mask" in model_kwargs: - attention_mask = model_kwargs["attention_mask"] - model_kwargs["attention_mask"] = torch.cat( - [attention_mask, attention_mask.new_ones((attention_mask.shape[0], 1))], dim=-1 - ) - - # update position ids - if "position_ids" in model_kwargs: - position_ids = model_kwargs["position_ids"] - new_position_id = position_ids[..., -1:].clone() - new_position_id += 1 - model_kwargs["position_ids"] = torch.cat( - [position_ids, new_position_id], dim=-1 - ) - - model_kwargs["is_first_forward"] = False - return model_kwargs - - def prepare_inputs_for_generation( - self, - input_ids: torch.LongTensor, - past_key_values: Optional[torch.Tensor] = None, - attention_mask: Optional[torch.Tensor] = None, - position_ids: Optional[torch.Tensor] = None, - use_cache: Optional[bool] = None, - is_first_forward: bool = True, - **kwargs - ) -> dict: - # only last token for input_ids if past is not None - if position_ids is None: - position_ids = self.get_position_ids(input_ids, device=input_ids.device) - if not is_first_forward: - if past_key_values is not None: - position_ids = position_ids[..., -1:] - input_ids = input_ids[:, -1:] - return { - "input_ids": input_ids, - "past_key_values": past_key_values, - "position_ids": position_ids, - "attention_mask": attention_mask, - "return_last_logit": True, - "use_cache": use_cache - } - - def forward( - self, - input_ids: Optional[torch.Tensor] = None, - position_ids: Optional[torch.Tensor] = None, - attention_mask: Optional[torch.Tensor] = None, - past_key_values: Optional[Tuple[torch.FloatTensor]] = None, - inputs_embeds: Optional[torch.Tensor] = None, - labels: Optional[torch.Tensor] = None, - use_cache: Optional[bool] = None, - output_attentions: Optional[bool] = None, - output_hidden_states: Optional[bool] = None, - return_dict: Optional[bool] = None, - return_last_logit: Optional[bool] = False, - ): - use_cache = use_cache if use_cache is not None else self.config.use_cache - return_dict = return_dict if return_dict is not None else self.config.use_return_dict - - transformer_outputs = self.transformer( - input_ids=input_ids, - position_ids=position_ids, - attention_mask=attention_mask, - past_key_values=past_key_values, - inputs_embeds=inputs_embeds, - use_cache=use_cache, - output_hidden_states=output_hidden_states, - return_dict=return_dict, - ) - - hidden_states = transformer_outputs[0] - if return_last_logit: - hidden_states = hidden_states[-1:] - lm_logits = self.transformer.output_layer(hidden_states) - lm_logits = lm_logits.transpose(0, 1).contiguous() - - loss = None - if labels is not None: - lm_logits = lm_logits.to(torch.float32) - - # Shift so that tokens < n predict n - shift_logits = lm_logits[..., :-1, :].contiguous() - shift_labels = labels[..., 1:].contiguous() - # Flatten the tokens - loss_fct = CrossEntropyLoss(ignore_index=-100) - loss = loss_fct(shift_logits.view(-1, shift_logits.size(-1)), shift_labels.view(-1)) - - lm_logits = lm_logits.to(hidden_states.dtype) - loss = loss.to(hidden_states.dtype) - - if not return_dict: - output = (lm_logits,) + transformer_outputs[1:] - return ((loss,) + output) if loss is not None else output - - return CausalLMOutputWithPast( - loss=loss, - logits=lm_logits, - past_key_values=transformer_outputs.past_key_values, - hidden_states=transformer_outputs.hidden_states, - attentions=transformer_outputs.attentions, - ) - - @staticmethod - def _reorder_cache( - past: Tuple[Tuple[torch.Tensor, torch.Tensor], ...], beam_idx: torch.LongTensor - ) -> Tuple[Tuple[torch.Tensor, torch.Tensor], ...]: - """ - This function is used to re-order the `past_key_values` cache if [`~PreTrainedModel.beam_search`] or - [`~PreTrainedModel.beam_sample`] is called. This is required to match `past_key_values` with the correct - beam_idx at every generation step. - - Output shares the same memory storage as `past`. - """ - return tuple( - ( - layer_past[0].index_select(1, beam_idx.to(layer_past[0].device)), - layer_past[1].index_select(1, beam_idx.to(layer_past[1].device)), - ) - for layer_past in past - ) - - def process_response(self, output, history): - content = "" - history = deepcopy(history) - for response in output.split("<|assistant|>"): - metadata, content = response.split("\n", maxsplit=1) - if not metadata.strip(): - content = content.strip() - history.append({"role": "assistant", "metadata": metadata, "content": content}) - content = content.replace("[[训练时间]]", "2023年") - else: - history.append({"role": "assistant", "metadata": metadata, "content": content}) - if history[0]["role"] == "system" and "tools" in history[0]: - content = "\n".join(content.split("\n")[1:-1]) - def tool_call(**kwargs): - return kwargs - parameters = eval(content) - content = {"name": metadata.strip(), "parameters": parameters} - else: - content = {"name": metadata.strip(), "content": content} - return content, history - - @torch.inference_mode() - def chat(self, tokenizer, query: str, history: List[Tuple[str, str]] = None, role: str = "user", - max_length: int = 8192, num_beams=1, do_sample=True, top_p=0.8, temperature=0.8, logits_processor=None, - **kwargs): - if history is None: - history = [] - if logits_processor is None: - logits_processor = LogitsProcessorList() - logits_processor.append(InvalidScoreLogitsProcessor()) - gen_kwargs = {"max_length": max_length, "num_beams": num_beams, "do_sample": do_sample, "top_p": top_p, - "temperature": temperature, "logits_processor": logits_processor, **kwargs} - inputs = tokenizer.build_chat_input(query, history=history, role=role) - inputs = inputs.to(self.device) - eos_token_id = [tokenizer.eos_token_id, tokenizer.get_command("<|user|>"), - tokenizer.get_command("<|observation|>")] - outputs = self.generate(**inputs, **gen_kwargs, eos_token_id=eos_token_id) - outputs = outputs.tolist()[0][len(inputs["input_ids"][0]):-1] - response = tokenizer.decode(outputs) - history.append({"role": role, "content": query}) - response, history = self.process_response(response, history) - return response, history - - @torch.inference_mode() - def stream_chat(self, tokenizer, query: str, history: List[Tuple[str, str]] = None, role: str = "user", - past_key_values=None,max_length: int = 8192, do_sample=True, top_p=0.8, temperature=0.8, - logits_processor=None, return_past_key_values=False, **kwargs): - if history is None: - history = [] - if logits_processor is None: - logits_processor = LogitsProcessorList() - logits_processor.append(InvalidScoreLogitsProcessor()) - eos_token_id = [tokenizer.eos_token_id, tokenizer.get_command("<|user|>"), - tokenizer.get_command("<|observation|>")] - gen_kwargs = {"max_length": max_length, "do_sample": do_sample, "top_p": top_p, - "temperature": temperature, "logits_processor": logits_processor, **kwargs} - if past_key_values is None: - inputs = tokenizer.build_chat_input(query, history=history, role=role) - else: - inputs = tokenizer.build_chat_input(query, role=role) - inputs = inputs.to(self.device) - if past_key_values is not None: - past_length = past_key_values[0][0].shape[0] - if self.transformer.pre_seq_len is not None: - past_length -= self.transformer.pre_seq_len - inputs.position_ids += past_length - attention_mask = inputs.attention_mask - attention_mask = torch.cat((attention_mask.new_ones(1, past_length), attention_mask), dim=1) - inputs['attention_mask'] = attention_mask - history.append({"role": role, "content": query}) - for outputs in self.stream_generate(**inputs, past_key_values=past_key_values, - eos_token_id=eos_token_id, return_past_key_values=return_past_key_values, - **gen_kwargs): - if return_past_key_values: - outputs, past_key_values = outputs - outputs = outputs.tolist()[0][len(inputs["input_ids"][0]):-1] - response = tokenizer.decode(outputs) - if response and response[-1] != "�": - response, new_history = self.process_response(response, history) - if return_past_key_values: - yield response, new_history, past_key_values - else: - yield response, new_history - - @torch.inference_mode() - def stream_generate( - self, - input_ids, - generation_config: Optional[GenerationConfig] = None, - logits_processor: Optional[LogitsProcessorList] = None, - stopping_criteria: Optional[StoppingCriteriaList] = None, - prefix_allowed_tokens_fn: Optional[Callable[[int, torch.Tensor], List[int]]] = None, - return_past_key_values=False, - **kwargs, - ): - batch_size, input_ids_seq_length = input_ids.shape[0], input_ids.shape[-1] - - if generation_config is None: - generation_config = self.generation_config - generation_config = copy.deepcopy(generation_config) - model_kwargs = generation_config.update(**kwargs) - model_kwargs["use_cache"] = generation_config.use_cache - bos_token_id, eos_token_id = generation_config.bos_token_id, generation_config.eos_token_id - - if isinstance(eos_token_id, int): - eos_token_id = [eos_token_id] - eos_token_id_tensor = torch.tensor(eos_token_id).to(input_ids.device) if eos_token_id is not None else None - - has_default_max_length = kwargs.get("max_length") is None and generation_config.max_length is not None - if has_default_max_length and generation_config.max_new_tokens is None: - warnings.warn( - f"Using `max_length`'s default ({generation_config.max_length}) to control the generation length. " - "This behaviour is deprecated and will be removed from the config in v5 of Transformers -- we" - " recommend using `max_new_tokens` to control the maximum length of the generation.", - UserWarning, - ) - elif generation_config.max_new_tokens is not None: - generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length - if not has_default_max_length: - logger.warning( - f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(=" - f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. " - "Please refer to the documentation for more information. " - "(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)", - UserWarning, - ) - - if input_ids_seq_length >= generation_config.max_length: - input_ids_string = "decoder_input_ids" if self.config.is_encoder_decoder else "input_ids" - logger.warning( - f"Input length of {input_ids_string} is {input_ids_seq_length}, but `max_length` is set to" - f" {generation_config.max_length}. This can lead to unexpected behavior. You should consider" - " increasing `max_new_tokens`." - ) - - # 2. Set generation parameters if not already defined - logits_processor = logits_processor if logits_processor is not None else LogitsProcessorList() - stopping_criteria = stopping_criteria if stopping_criteria is not None else StoppingCriteriaList() - - logits_processor = self._get_logits_processor( - generation_config=generation_config, - input_ids_seq_length=input_ids_seq_length, - encoder_input_ids=input_ids, - prefix_allowed_tokens_fn=prefix_allowed_tokens_fn, - logits_processor=logits_processor, - ) - - stopping_criteria = self._get_stopping_criteria( - generation_config=generation_config, stopping_criteria=stopping_criteria - ) - logits_warper = self._get_logits_warper(generation_config) - - unfinished_sequences = input_ids.new(input_ids.shape[0]).fill_(1) - scores = None - while True: - model_inputs = self.prepare_inputs_for_generation(input_ids, **model_kwargs) - # forward pass to get next token - outputs = self( - **model_inputs, - return_dict=True, - output_attentions=False, - output_hidden_states=False, - ) - - next_token_logits = outputs.logits[:, -1, :] - - # pre-process distribution - next_token_scores = logits_processor(input_ids, next_token_logits) - next_token_scores = logits_warper(input_ids, next_token_scores) - - # sample - probs = nn.functional.softmax(next_token_scores, dim=-1) - if generation_config.do_sample: - next_tokens = torch.multinomial(probs, num_samples=1).squeeze(1) - else: - next_tokens = torch.argmax(probs, dim=-1) - # update generated ids, model inputs, and length for next step - input_ids = torch.cat([input_ids, next_tokens[:, None]], dim=-1) - model_kwargs = self._update_model_kwargs_for_generation( - outputs, model_kwargs, is_encoder_decoder=self.config.is_encoder_decoder - ) - unfinished_sequences = unfinished_sequences.mul( - next_tokens.tile(eos_token_id_tensor.shape[0], 1).ne(eos_token_id_tensor.unsqueeze(1)).prod(dim=0) - ) - if return_past_key_values: - yield input_ids, outputs.past_key_values - else: - yield input_ids - # stop when each sentence is finished, or if we exceed the maximum length - if unfinished_sequences.max() == 0 or stopping_criteria(input_ids, scores): - break - - def quantize(self, bits: int, empty_init=False, device=None, **kwargs): - if bits == 0: - return - - # from .quantization import quantize - - if self.quantized: - logger.info("Already quantized.") - return self - - self.quantized = True - - self.config.quantization_bit = bits - - self.transformer.encoder = quantize(self.transformer.encoder, bits, empty_init=empty_init, device=device, - **kwargs) - return self - - -class ChatGLMForSequenceClassification(ChatGLMPreTrainedModel): - def __init__(self, config: ChatGLMConfig, empty_init=True, device=None): - super().__init__(config) - - self.num_labels = config.num_labels - self.transformer = ChatGLMModel(config, empty_init=empty_init, device=device) - - self.classifier_head = nn.Linear(config.hidden_size, config.num_labels, bias=True, dtype=torch.half) - if config.classifier_dropout is not None: - self.dropout = nn.Dropout(config.classifier_dropout) - else: - self.dropout = None - self.config = config - - if self.config.quantization_bit: - self.quantize(self.config.quantization_bit, empty_init=True) - - def forward( - self, - input_ids: Optional[torch.LongTensor] = None, - position_ids: Optional[torch.LongTensor] = None, - attention_mask: Optional[torch.Tensor] = None, - full_attention_mask: Optional[torch.Tensor] = None, - past_key_values: Optional[Tuple[Tuple[torch.Tensor, torch.Tensor], ...]] = None, - inputs_embeds: Optional[torch.LongTensor] = None, - labels: Optional[torch.LongTensor] = None, - use_cache: Optional[bool] = None, - output_hidden_states: Optional[bool] = None, - return_dict: Optional[bool] = None, - ) -> Union[Tuple[torch.Tensor, ...], SequenceClassifierOutputWithPast]: - return_dict = return_dict if return_dict is not None else self.config.use_return_dict - - transformer_outputs = self.transformer( - input_ids=input_ids, - position_ids=position_ids, - attention_mask=attention_mask, - full_attention_mask=full_attention_mask, - past_key_values=past_key_values, - inputs_embeds=inputs_embeds, - use_cache=use_cache, - output_hidden_states=output_hidden_states, - return_dict=return_dict, - ) - - hidden_states = transformer_outputs[0] - pooled_hidden_states = hidden_states[-1] - if self.dropout is not None: - pooled_hidden_states = self.dropout(pooled_hidden_states) - logits = self.classifier_head(pooled_hidden_states) - - loss = None - if labels is not None: - if self.config.problem_type is None: - if self.num_labels == 1: - self.config.problem_type = "regression" - elif self.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int): - self.config.problem_type = "single_label_classification" - else: - self.config.problem_type = "multi_label_classification" - - if self.config.problem_type == "regression": - loss_fct = MSELoss() - if self.num_labels == 1: - loss = loss_fct(logits.squeeze().float(), labels.squeeze()) - else: - loss = loss_fct(logits.float(), labels) - elif self.config.problem_type == "single_label_classification": - loss_fct = CrossEntropyLoss() - loss = loss_fct(logits.view(-1, self.num_labels).float(), labels.view(-1)) - elif self.config.problem_type == "multi_label_classification": - loss_fct = BCEWithLogitsLoss() - loss = loss_fct(logits.float(), labels.view(-1, self.num_labels)) - - if not return_dict: - output = (logits,) + transformer_outputs[1:] - return ((loss,) + output) if loss is not None else output - - return SequenceClassifierOutputWithPast( - loss=loss, - logits=logits, - past_key_values=transformer_outputs.past_key_values, - hidden_states=transformer_outputs.hidden_states, - attentions=transformer_outputs.attentions, - ) diff --git a/diffsynth/models/longcat_video_dit.py b/diffsynth/models/longcat_video_dit.py deleted file mode 100644 index bc4e79d..0000000 --- a/diffsynth/models/longcat_video_dit.py +++ /dev/null @@ -1,901 +0,0 @@ -from typing import List, Optional, Tuple - -import math -import torch -import torch.nn as nn -import torch.amp as amp - -import numpy as np -import torch.nn.functional as F -from einops import rearrange, repeat -from .wan_video_dit import flash_attention -from ..vram_management import gradient_checkpoint_forward - - -class RMSNorm_FP32(torch.nn.Module): - def __init__(self, dim: int, eps: float): - super().__init__() - self.eps = eps - self.weight = nn.Parameter(torch.ones(dim)) - - def _norm(self, x): - return x * torch.rsqrt(x.pow(2).mean(-1, keepdim=True) + self.eps) - - def forward(self, x): - output = self._norm(x.float()).type_as(x) - return output * self.weight - - -def broadcat(tensors, dim=-1): - num_tensors = len(tensors) - shape_lens = set(list(map(lambda t: len(t.shape), tensors))) - assert len(shape_lens) == 1, "tensors must all have the same number of dimensions" - shape_len = list(shape_lens)[0] - dim = (dim + shape_len) if dim < 0 else dim - dims = list(zip(*map(lambda t: list(t.shape), tensors))) - expandable_dims = [(i, val) for i, val in enumerate(dims) if i != dim] - assert all( - [*map(lambda t: len(set(t[1])) <= 2, expandable_dims)] - ), "invalid dimensions for broadcastable concatentation" - max_dims = list(map(lambda t: (t[0], max(t[1])), expandable_dims)) - expanded_dims = list(map(lambda t: (t[0], (t[1],) * num_tensors), max_dims)) - expanded_dims.insert(dim, (dim, dims[dim])) - expandable_shapes = list(zip(*map(lambda t: t[1], expanded_dims))) - tensors = list(map(lambda t: t[0].expand(*t[1]), zip(tensors, expandable_shapes))) - return torch.cat(tensors, dim=dim) - - -def rotate_half(x): - x = rearrange(x, "... (d r) -> ... d r", r=2) - x1, x2 = x.unbind(dim=-1) - x = torch.stack((-x2, x1), dim=-1) - return rearrange(x, "... d r -> ... (d r)") - - -class RotaryPositionalEmbedding(nn.Module): - - def __init__(self, - head_dim, - cp_split_hw=None - ): - """Rotary positional embedding for 3D - Reference : https://blog.eleuther.ai/rotary-embeddings/ - Paper: https://arxiv.org/pdf/2104.09864.pdf - Args: - dim: Dimension of embedding - base: Base value for exponential - """ - super().__init__() - self.head_dim = head_dim - assert self.head_dim % 8 == 0, 'Dim must be a multiply of 8 for 3D RoPE.' - self.cp_split_hw = cp_split_hw - # We take the assumption that the longest side of grid will not larger than 512, i.e, 512 * 8 = 4098 input pixels - self.base = 10000 - self.freqs_dict = {} - - def register_grid_size(self, grid_size): - if grid_size not in self.freqs_dict: - self.freqs_dict.update({ - grid_size: self.precompute_freqs_cis_3d(grid_size) - }) - - def precompute_freqs_cis_3d(self, grid_size): - num_frames, height, width = grid_size - dim_t = self.head_dim - 4 * (self.head_dim // 6) - dim_h = 2 * (self.head_dim // 6) - dim_w = 2 * (self.head_dim // 6) - freqs_t = 1.0 / (self.base ** (torch.arange(0, dim_t, 2)[: (dim_t // 2)].float() / dim_t)) - freqs_h = 1.0 / (self.base ** (torch.arange(0, dim_h, 2)[: (dim_h // 2)].float() / dim_h)) - freqs_w = 1.0 / (self.base ** (torch.arange(0, dim_w, 2)[: (dim_w // 2)].float() / dim_w)) - grid_t = np.linspace(0, num_frames, num_frames, endpoint=False, dtype=np.float32) - grid_h = np.linspace(0, height, height, endpoint=False, dtype=np.float32) - grid_w = np.linspace(0, width, width, endpoint=False, dtype=np.float32) - grid_t = torch.from_numpy(grid_t).float() - grid_h = torch.from_numpy(grid_h).float() - grid_w = torch.from_numpy(grid_w).float() - freqs_t = torch.einsum("..., f -> ... f", grid_t, freqs_t) - freqs_h = torch.einsum("..., f -> ... f", grid_h, freqs_h) - freqs_w = torch.einsum("..., f -> ... f", grid_w, freqs_w) - freqs_t = repeat(freqs_t, "... n -> ... (n r)", r=2) - freqs_h = repeat(freqs_h, "... n -> ... (n r)", r=2) - freqs_w = repeat(freqs_w, "... n -> ... (n r)", r=2) - freqs = broadcat((freqs_t[:, None, None, :], freqs_h[None, :, None, :], freqs_w[None, None, :, :]), dim=-1) - # (T H W D) - freqs = rearrange(freqs, "T H W D -> (T H W) D") - # if self.cp_split_hw[0] * self.cp_split_hw[1] > 1: - # with torch.no_grad(): - # freqs = rearrange(freqs, "(T H W) D -> T H W D", T=num_frames, H=height, W=width) - # freqs = context_parallel_util.split_cp_2d(freqs, seq_dim_hw=(1, 2), split_hw=self.cp_split_hw) - # freqs = rearrange(freqs, "T H W D -> (T H W) D") - - return freqs - - def forward(self, q, k, grid_size): - """3D RoPE. - - Args: - query: [B, head, seq, head_dim] - key: [B, head, seq, head_dim] - Returns: - query and key with the same shape as input. - """ - - if grid_size not in self.freqs_dict: - self.register_grid_size(grid_size) - - freqs_cis = self.freqs_dict[grid_size].to(q.device) - q_, k_ = q.float(), k.float() - freqs_cis = freqs_cis.float().to(q.device) - cos, sin = freqs_cis.cos(), freqs_cis.sin() - cos, sin = rearrange(cos, 'n d -> 1 1 n d'), rearrange(sin, 'n d -> 1 1 n d') - q_ = (q_ * cos) + (rotate_half(q_) * sin) - k_ = (k_ * cos) + (rotate_half(k_) * sin) - - return q_.type_as(q), k_.type_as(k) - - -class Attention(nn.Module): - def __init__( - self, - dim: int, - num_heads: int, - enable_flashattn3: bool = False, - enable_flashattn2: bool = False, - enable_xformers: bool = False, - enable_bsa: bool = False, - bsa_params: dict = None, - cp_split_hw: Optional[List[int]] = None - ) -> None: - super().__init__() - assert dim % num_heads == 0, "dim should be divisible by num_heads" - self.dim = dim - self.num_heads = num_heads - self.head_dim = dim // num_heads - self.scale = self.head_dim**-0.5 - self.enable_flashattn3 = enable_flashattn3 - self.enable_flashattn2 = enable_flashattn2 - self.enable_xformers = enable_xformers - self.enable_bsa = enable_bsa - self.bsa_params = bsa_params - self.cp_split_hw = cp_split_hw - - self.qkv = nn.Linear(dim, dim * 3, bias=True) - self.q_norm = RMSNorm_FP32(self.head_dim, eps=1e-6) - self.k_norm = RMSNorm_FP32(self.head_dim, eps=1e-6) - self.proj = nn.Linear(dim, dim) - - self.rope_3d = RotaryPositionalEmbedding( - self.head_dim, - cp_split_hw=cp_split_hw - ) - - def _process_attn(self, q, k, v, shape): - q = rearrange(q, "B H S D -> B S (H D)") - k = rearrange(k, "B H S D -> B S (H D)") - v = rearrange(v, "B H S D -> B S (H D)") - x = flash_attention(q, k, v, num_heads=self.num_heads) - x = rearrange(x, "B S (H D) -> B H S D", H=self.num_heads) - return x - - def forward(self, x: torch.Tensor, shape=None, num_cond_latents=None, return_kv=False) -> torch.Tensor: - """ - """ - B, N, C = x.shape - qkv = self.qkv(x) - - qkv_shape = (B, N, 3, self.num_heads, self.head_dim) - qkv = qkv.view(qkv_shape).permute((2, 0, 3, 1, 4)) # [3, B, H, N, D] - q, k, v = qkv.unbind(0) - q, k = self.q_norm(q), self.k_norm(k) - - if return_kv: - k_cache, v_cache = k.clone(), v.clone() - - q, k = self.rope_3d(q, k, shape) - - # cond mode - if num_cond_latents is not None and num_cond_latents > 0: - num_cond_latents_thw = num_cond_latents * (N // shape[0]) - # process the condition tokens - q_cond = q[:, :, :num_cond_latents_thw].contiguous() - k_cond = k[:, :, :num_cond_latents_thw].contiguous() - v_cond = v[:, :, :num_cond_latents_thw].contiguous() - x_cond = self._process_attn(q_cond, k_cond, v_cond, shape) - # process the noise tokens - q_noise = q[:, :, num_cond_latents_thw:].contiguous() - x_noise = self._process_attn(q_noise, k, v, shape) - # merge x_cond and x_noise - x = torch.cat([x_cond, x_noise], dim=2).contiguous() - else: - x = self._process_attn(q, k, v, shape) - - x_output_shape = (B, N, C) - x = x.transpose(1, 2) # [B, H, N, D] --> [B, N, H, D] - x = x.reshape(x_output_shape) # [B, N, H, D] --> [B, N, C] - x = self.proj(x) - - if return_kv: - return x, (k_cache, v_cache) - else: - return x - - def forward_with_kv_cache(self, x: torch.Tensor, shape=None, num_cond_latents=None, kv_cache=None) -> torch.Tensor: - """ - """ - B, N, C = x.shape - qkv = self.qkv(x) - - qkv_shape = (B, N, 3, self.num_heads, self.head_dim) - qkv = qkv.view(qkv_shape).permute((2, 0, 3, 1, 4)) # [3, B, H, N, D] - q, k, v = qkv.unbind(0) - q, k = self.q_norm(q), self.k_norm(k) - - T, H, W = shape - k_cache, v_cache = kv_cache - assert k_cache.shape[0] == v_cache.shape[0] and k_cache.shape[0] in [1, B] - if k_cache.shape[0] == 1: - k_cache = k_cache.repeat(B, 1, 1, 1) - v_cache = v_cache.repeat(B, 1, 1, 1) - - if num_cond_latents is not None and num_cond_latents > 0: - k_full = torch.cat([k_cache, k], dim=2).contiguous() - v_full = torch.cat([v_cache, v], dim=2).contiguous() - q_padding = torch.cat([torch.empty_like(k_cache), q], dim=2).contiguous() - q_padding, k_full = self.rope_3d(q_padding, k_full, (T + num_cond_latents, H, W)) - q = q_padding[:, :, -N:].contiguous() - - x = self._process_attn(q, k_full, v_full, shape) - - x_output_shape = (B, N, C) - x = x.transpose(1, 2) # [B, H, N, D] --> [B, N, H, D] - x = x.reshape(x_output_shape) # [B, N, H, D] --> [B, N, C] - x = self.proj(x) - - return x - - -class MultiHeadCrossAttention(nn.Module): - def __init__( - self, - dim, - num_heads, - enable_flashattn3=False, - enable_flashattn2=False, - enable_xformers=False, - ): - super(MultiHeadCrossAttention, self).__init__() - assert dim % num_heads == 0, "d_model must be divisible by num_heads" - - self.dim = dim - self.num_heads = num_heads - self.head_dim = dim // num_heads - - self.q_linear = nn.Linear(dim, dim) - self.kv_linear = nn.Linear(dim, dim * 2) - self.proj = nn.Linear(dim, dim) - - self.q_norm = RMSNorm_FP32(self.head_dim, eps=1e-6) - self.k_norm = RMSNorm_FP32(self.head_dim, eps=1e-6) - - self.enable_flashattn3 = enable_flashattn3 - self.enable_flashattn2 = enable_flashattn2 - self.enable_xformers = enable_xformers - - def _process_cross_attn(self, x, cond, kv_seqlen): - B, N, C = x.shape - assert C == self.dim and cond.shape[2] == self.dim - - q = self.q_linear(x).view(1, -1, self.num_heads, self.head_dim) - kv = self.kv_linear(cond).view(1, -1, 2, self.num_heads, self.head_dim) - k, v = kv.unbind(2) - - q, k = self.q_norm(q), self.k_norm(k) - - q = rearrange(q, "B S H D -> B S (H D)") - k = rearrange(k, "B S H D -> B S (H D)") - v = rearrange(v, "B S H D -> B S (H D)") - x = flash_attention(q, k, v, num_heads=self.num_heads) - - x = x.view(B, -1, C) - x = self.proj(x) - return x - - def forward(self, x, cond, kv_seqlen, num_cond_latents=None, shape=None): - """ - x: [B, N, C] - cond: [B, M, C] - """ - if num_cond_latents is None or num_cond_latents == 0: - return self._process_cross_attn(x, cond, kv_seqlen) - else: - B, N, C = x.shape - if num_cond_latents is not None and num_cond_latents > 0: - assert shape is not None, "SHOULD pass in the shape" - num_cond_latents_thw = num_cond_latents * (N // shape[0]) - x_noise = x[:, num_cond_latents_thw:] # [B, N_noise, C] - output_noise = self._process_cross_attn(x_noise, cond, kv_seqlen) # [B, N_noise, C] - output = torch.cat([ - torch.zeros((B, num_cond_latents_thw, C), dtype=output_noise.dtype, device=output_noise.device), - output_noise - ], dim=1).contiguous() - else: - raise NotImplementedError - - return output - - -class LayerNorm_FP32(nn.LayerNorm): - def __init__(self, dim, eps, elementwise_affine): - super().__init__(dim, eps=eps, elementwise_affine=elementwise_affine) - - def forward(self, inputs: torch.Tensor) -> torch.Tensor: - origin_dtype = inputs.dtype - out = F.layer_norm( - inputs.float(), - self.normalized_shape, - None if self.weight is None else self.weight.float(), - None if self.bias is None else self.bias.float() , - self.eps - ).to(origin_dtype) - return out - - -def modulate_fp32(norm_func, x, shift, scale): - # Suppose x is (B, N, D), shift is (B, -1, D), scale is (B, -1, D) - # ensure the modulation params be fp32 - assert shift.dtype == torch.float32, scale.dtype == torch.float32 - dtype = x.dtype - x = norm_func(x.to(torch.float32)) - x = x * (scale + 1) + shift - x = x.to(dtype) - return x - - -class FinalLayer_FP32(nn.Module): - """ - The final layer of DiT. - """ - - def __init__(self, hidden_size, num_patch, out_channels, adaln_tembed_dim): - super().__init__() - self.hidden_size = hidden_size - self.num_patch = num_patch - self.out_channels = out_channels - self.adaln_tembed_dim = adaln_tembed_dim - - self.norm_final = LayerNorm_FP32(hidden_size, elementwise_affine=False, eps=1e-6) - self.linear = nn.Linear(hidden_size, num_patch * out_channels, bias=True) - self.adaLN_modulation = nn.Sequential(nn.SiLU(), nn.Linear(adaln_tembed_dim, 2 * hidden_size, bias=True)) - - def forward(self, x, t, latent_shape): - # timestep shape: [B, T, C] - assert t.dtype == torch.float32 - B, N, C = x.shape - T, _, _ = latent_shape - - with amp.autocast('cuda', dtype=torch.float32): - shift, scale = self.adaLN_modulation(t).unsqueeze(2).chunk(2, dim=-1) # [B, T, 1, C] - x = modulate_fp32(self.norm_final, x.view(B, T, -1, C), shift, scale).view(B, N, C) - x = self.linear(x) - return x - - -class FeedForwardSwiGLU(nn.Module): - def __init__( - self, - dim: int, - hidden_dim: int, - multiple_of: int = 256, - ffn_dim_multiplier: Optional[float] = None, - ): - super().__init__() - hidden_dim = int(2 * hidden_dim / 3) - # custom dim factor multiplier - if ffn_dim_multiplier is not None: - hidden_dim = int(ffn_dim_multiplier * hidden_dim) - hidden_dim = multiple_of * ((hidden_dim + multiple_of - 1) // multiple_of) - - self.dim = dim - self.hidden_dim = hidden_dim - self.w1 = nn.Linear(dim, hidden_dim, bias=False) - self.w2 = nn.Linear(hidden_dim, dim, bias=False) - self.w3 = nn.Linear(dim, hidden_dim, bias=False) - - def forward(self, x): - return self.w2(F.silu(self.w1(x)) * self.w3(x)) - - -class TimestepEmbedder(nn.Module): - """ - Embeds scalar timesteps into vector representations. - """ - - def __init__(self, t_embed_dim, frequency_embedding_size=256): - super().__init__() - self.t_embed_dim = t_embed_dim - self.frequency_embedding_size = frequency_embedding_size - self.mlp = nn.Sequential( - nn.Linear(frequency_embedding_size, t_embed_dim, bias=True), - nn.SiLU(), - nn.Linear(t_embed_dim, t_embed_dim, bias=True), - ) - - @staticmethod - def timestep_embedding(t, dim, max_period=10000): - """ - Create sinusoidal timestep embeddings. - :param t: a 1-D Tensor of N indices, one per batch element. - These may be fractional. - :param dim: the dimension of the output. - :param max_period: controls the minimum frequency of the embeddings. - :return: an (N, D) Tensor of positional embeddings. - """ - half = dim // 2 - freqs = torch.exp(-math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32) / half) - freqs = freqs.to(device=t.device) - args = t[:, None].float() * freqs[None] - embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) - if dim % 2: - embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) - return embedding - - def forward(self, t, dtype): - t_freq = self.timestep_embedding(t, self.frequency_embedding_size) - if t_freq.dtype != dtype: - t_freq = t_freq.to(dtype) - t_emb = self.mlp(t_freq) - return t_emb - - -class CaptionEmbedder(nn.Module): - """ - Embeds class labels into vector representations. - """ - - def __init__(self, in_channels, hidden_size): - super().__init__() - self.in_channels = in_channels - self.hidden_size = hidden_size - self.y_proj = nn.Sequential( - nn.Linear(in_channels, hidden_size, bias=True), - nn.GELU(approximate="tanh"), - nn.Linear(hidden_size, hidden_size, bias=True), - ) - - def forward(self, caption): - B, _, N, C = caption.shape - caption = self.y_proj(caption) - return caption - - -class PatchEmbed3D(nn.Module): - """Video to Patch Embedding. - - Args: - patch_size (int): Patch token size. Default: (2,4,4). - in_chans (int): Number of input video channels. Default: 3. - embed_dim (int): Number of linear projection output channels. Default: 96. - norm_layer (nn.Module, optional): Normalization layer. Default: None - """ - - def __init__( - self, - patch_size=(2, 4, 4), - in_chans=3, - embed_dim=96, - norm_layer=None, - flatten=True, - ): - super().__init__() - self.patch_size = patch_size - self.flatten = flatten - - self.in_chans = in_chans - self.embed_dim = embed_dim - - self.proj = nn.Conv3d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size) - if norm_layer is not None: - self.norm = norm_layer(embed_dim) - else: - self.norm = None - - def forward(self, x): - """Forward function.""" - # padding - _, _, D, H, W = x.size() - if W % self.patch_size[2] != 0: - x = F.pad(x, (0, self.patch_size[2] - W % self.patch_size[2])) - if H % self.patch_size[1] != 0: - x = F.pad(x, (0, 0, 0, self.patch_size[1] - H % self.patch_size[1])) - if D % self.patch_size[0] != 0: - x = F.pad(x, (0, 0, 0, 0, 0, self.patch_size[0] - D % self.patch_size[0])) - - B, C, T, H, W = x.shape - x = self.proj(x) # (B C T H W) - if self.norm is not None: - D, Wh, Ww = x.size(2), x.size(3), x.size(4) - x = x.flatten(2).transpose(1, 2) - x = self.norm(x) - x = x.transpose(1, 2).view(-1, self.embed_dim, D, Wh, Ww) - if self.flatten: - x = x.flatten(2).transpose(1, 2) # BCTHW -> BNC - return x - - -class LongCatSingleStreamBlock(nn.Module): - def __init__( - self, - hidden_size: int, - num_heads: int, - mlp_ratio: int, - adaln_tembed_dim: int, - enable_flashattn3: bool = False, - enable_flashattn2: bool = False, - enable_xformers: bool = False, - enable_bsa: bool = False, - bsa_params=None, - cp_split_hw=None - ): - super().__init__() - - self.hidden_size = hidden_size - - # scale and gate modulation - self.adaLN_modulation = nn.Sequential( - nn.SiLU(), - nn.Linear(adaln_tembed_dim, 6 * hidden_size, bias=True) - ) - - self.mod_norm_attn = LayerNorm_FP32(hidden_size, eps=1e-6, elementwise_affine=False) - self.mod_norm_ffn = LayerNorm_FP32(hidden_size, eps=1e-6, elementwise_affine=False) - self.pre_crs_attn_norm = LayerNorm_FP32(hidden_size, eps=1e-6, elementwise_affine=True) - - self.attn = Attention( - dim=hidden_size, - num_heads=num_heads, - enable_flashattn3=enable_flashattn3, - enable_flashattn2=enable_flashattn2, - enable_xformers=enable_xformers, - enable_bsa=enable_bsa, - bsa_params=bsa_params, - cp_split_hw=cp_split_hw - ) - self.cross_attn = MultiHeadCrossAttention( - dim=hidden_size, - num_heads=num_heads, - enable_flashattn3=enable_flashattn3, - enable_flashattn2=enable_flashattn2, - enable_xformers=enable_xformers, - ) - self.ffn = FeedForwardSwiGLU(dim=hidden_size, hidden_dim=int(hidden_size * mlp_ratio)) - - def forward(self, x, y, t, y_seqlen, latent_shape, num_cond_latents=None, return_kv=False, kv_cache=None, skip_crs_attn=False): - """ - x: [B, N, C] - y: [1, N_valid_tokens, C] - t: [B, T, C_t] - y_seqlen: [B]; type of a list - latent_shape: latent shape of a single item - """ - x_dtype = x.dtype - - B, N, C = x.shape - T, _, _ = latent_shape # S != T*H*W in case of CP split on H*W. - - # compute modulation params in fp32 - with amp.autocast(device_type='cuda', dtype=torch.float32): - shift_msa, scale_msa, gate_msa, \ - shift_mlp, scale_mlp, gate_mlp = \ - self.adaLN_modulation(t).unsqueeze(2).chunk(6, dim=-1) # [B, T, 1, C] - - # self attn with modulation - x_m = modulate_fp32(self.mod_norm_attn, x.view(B, T, -1, C), shift_msa, scale_msa).view(B, N, C) - - if kv_cache is not None: - kv_cache = (kv_cache[0].to(x.device), kv_cache[1].to(x.device)) - attn_outputs = self.attn.forward_with_kv_cache(x_m, shape=latent_shape, num_cond_latents=num_cond_latents, kv_cache=kv_cache) - else: - attn_outputs = self.attn(x_m, shape=latent_shape, num_cond_latents=num_cond_latents, return_kv=return_kv) - - if return_kv: - x_s, kv_cache = attn_outputs - else: - x_s = attn_outputs - - with amp.autocast(device_type='cuda', dtype=torch.float32): - x = x + (gate_msa * x_s.view(B, -1, N//T, C)).view(B, -1, C) # [B, N, C] - x = x.to(x_dtype) - - # cross attn - if not skip_crs_attn: - if kv_cache is not None: - num_cond_latents = None - x = x + self.cross_attn(self.pre_crs_attn_norm(x), y, y_seqlen, num_cond_latents=num_cond_latents, shape=latent_shape) - - # ffn with modulation - x_m = modulate_fp32(self.mod_norm_ffn, x.view(B, -1, N//T, C), shift_mlp, scale_mlp).view(B, -1, C) - x_s = self.ffn(x_m) - with amp.autocast(device_type='cuda', dtype=torch.float32): - x = x + (gate_mlp * x_s.view(B, -1, N//T, C)).view(B, -1, C) # [B, N, C] - x = x.to(x_dtype) - - if return_kv: - return x, kv_cache - else: - return x - - -class LongCatVideoTransformer3DModel(torch.nn.Module): - def __init__( - self, - in_channels: int = 16, - out_channels: int = 16, - hidden_size: int = 4096, - depth: int = 48, - num_heads: int = 32, - caption_channels: int = 4096, - mlp_ratio: int = 4, - adaln_tembed_dim: int = 512, - frequency_embedding_size: int = 256, - # default params - patch_size: Tuple[int] = (1, 2, 2), - # attention config - enable_flashattn3: bool = False, - enable_flashattn2: bool = True, - enable_xformers: bool = False, - enable_bsa: bool = False, - bsa_params: dict = {'sparsity': 0.9375, 'chunk_3d_shape_q': [4, 4, 4], 'chunk_3d_shape_k': [4, 4, 4]}, - cp_split_hw: Optional[List[int]] = [1, 1], - text_tokens_zero_pad: bool = True, - ) -> None: - super().__init__() - - self.patch_size = patch_size - self.in_channels = in_channels - self.out_channels = out_channels - self.cp_split_hw = cp_split_hw - - self.x_embedder = PatchEmbed3D(patch_size, in_channels, hidden_size) - self.t_embedder = TimestepEmbedder(t_embed_dim=adaln_tembed_dim, frequency_embedding_size=frequency_embedding_size) - self.y_embedder = CaptionEmbedder( - in_channels=caption_channels, - hidden_size=hidden_size, - ) - - self.blocks = nn.ModuleList( - [ - LongCatSingleStreamBlock( - hidden_size=hidden_size, - num_heads=num_heads, - mlp_ratio=mlp_ratio, - adaln_tembed_dim=adaln_tembed_dim, - enable_flashattn3=enable_flashattn3, - enable_flashattn2=enable_flashattn2, - enable_xformers=enable_xformers, - enable_bsa=enable_bsa, - bsa_params=bsa_params, - cp_split_hw=cp_split_hw - ) - for i in range(depth) - ] - ) - - self.final_layer = FinalLayer_FP32( - hidden_size, - np.prod(self.patch_size), - out_channels, - adaln_tembed_dim, - ) - - self.gradient_checkpointing = False - self.text_tokens_zero_pad = text_tokens_zero_pad - - self.lora_dict = {} - self.active_loras = [] - - def enable_loras(self, lora_key_list=[]): - self.disable_all_loras() - - module_loras = {} # {module_name: [lora1, lora2, ...]} - model_device = next(self.parameters()).device - model_dtype = next(self.parameters()).dtype - - for lora_key in lora_key_list: - if lora_key in self.lora_dict: - for lora in self.lora_dict[lora_key].loras: - lora.to(model_device, dtype=model_dtype, non_blocking=True) - module_name = lora.lora_name.replace("lora___lorahyphen___", "").replace("___lorahyphen___", ".") - if module_name not in module_loras: - module_loras[module_name] = [] - module_loras[module_name].append(lora) - self.active_loras.append(lora_key) - - for module_name, loras in module_loras.items(): - module = self._get_module_by_name(module_name) - if not hasattr(module, 'org_forward'): - module.org_forward = module.forward - module.forward = self._create_multi_lora_forward(module, loras) - - def _create_multi_lora_forward(self, module, loras): - def multi_lora_forward(x, *args, **kwargs): - weight_dtype = x.dtype - org_output = module.org_forward(x, *args, **kwargs) - - total_lora_output = 0 - for lora in loras: - if lora.use_lora: - lx = lora.lora_down(x.to(lora.lora_down.weight.dtype)) - lx = lora.lora_up(lx) - lora_output = lx.to(weight_dtype) * lora.multiplier * lora.alpha_scale - total_lora_output += lora_output - - return org_output + total_lora_output - - return multi_lora_forward - - def _get_module_by_name(self, module_name): - try: - module = self - for part in module_name.split('.'): - module = getattr(module, part) - return module - except AttributeError as e: - raise ValueError(f"Cannot find module: {module_name}, error: {e}") - - def disable_all_loras(self): - for name, module in self.named_modules(): - if hasattr(module, 'org_forward'): - module.forward = module.org_forward - delattr(module, 'org_forward') - - for lora_key, lora_network in self.lora_dict.items(): - for lora in lora_network.loras: - lora.to("cpu") - - self.active_loras.clear() - - def enable_bsa(self,): - for block in self.blocks: - block.attn.enable_bsa = True - - def disable_bsa(self,): - for block in self.blocks: - block.attn.enable_bsa = False - - def forward( - self, - hidden_states, - timestep, - encoder_hidden_states, - encoder_attention_mask=None, - num_cond_latents=0, - return_kv=False, - kv_cache_dict={}, - skip_crs_attn=False, - offload_kv_cache=False, - use_gradient_checkpointing=False, - use_gradient_checkpointing_offload=False, - ): - - B, _, T, H, W = hidden_states.shape - - N_t = T // self.patch_size[0] - N_h = H // self.patch_size[1] - N_w = W // self.patch_size[2] - - assert self.patch_size[0]==1, "Currently, 3D x_embedder should not compress the temporal dimension." - - # expand the shape of timestep from [B] to [B, T] - if len(timestep.shape) == 1: - timestep = timestep.unsqueeze(1).expand(-1, N_t).clone() # [B, T] - timestep[:, :num_cond_latents] = 0 - - dtype = hidden_states.dtype - hidden_states = hidden_states.to(dtype) - timestep = timestep.to(dtype) - encoder_hidden_states = encoder_hidden_states.to(dtype) - - hidden_states = self.x_embedder(hidden_states) # [B, N, C] - - with amp.autocast(device_type='cuda', dtype=torch.float32): - t = self.t_embedder(timestep.float().flatten(), dtype=torch.float32).reshape(B, N_t, -1) # [B, T, C_t] - - encoder_hidden_states = self.y_embedder(encoder_hidden_states) # [B, 1, N_token, C] - - if self.text_tokens_zero_pad and encoder_attention_mask is not None: - encoder_hidden_states = encoder_hidden_states * encoder_attention_mask[:, None, :, None] - encoder_attention_mask = (encoder_attention_mask * 0 + 1).to(encoder_attention_mask.dtype) - - if encoder_attention_mask is not None: - encoder_attention_mask = encoder_attention_mask.squeeze(1).squeeze(1) - encoder_hidden_states = encoder_hidden_states.squeeze(1).masked_select(encoder_attention_mask.unsqueeze(-1) != 0).view(1, -1, hidden_states.shape[-1]) # [1, N_valid_tokens, C] - y_seqlens = encoder_attention_mask.sum(dim=1).tolist() # [B] - else: - y_seqlens = [encoder_hidden_states.shape[2]] * encoder_hidden_states.shape[0] - encoder_hidden_states = encoder_hidden_states.squeeze(1).view(1, -1, hidden_states.shape[-1]) - - # if self.cp_split_hw[0] * self.cp_split_hw[1] > 1: - # hidden_states = rearrange(hidden_states, "B (T H W) C -> B T H W C", T=N_t, H=N_h, W=N_w) - # hidden_states = context_parallel_util.split_cp_2d(hidden_states, seq_dim_hw=(2, 3), split_hw=self.cp_split_hw) - # hidden_states = rearrange(hidden_states, "B T H W C -> B (T H W) C") - - # blocks - kv_cache_dict_ret = {} - for i, block in enumerate(self.blocks): - block_outputs = gradient_checkpoint_forward( - block, - use_gradient_checkpointing=use_gradient_checkpointing, - use_gradient_checkpointing_offload=use_gradient_checkpointing_offload, - x=hidden_states, - y=encoder_hidden_states, - t=t, - y_seqlen=y_seqlens, - latent_shape=(N_t, N_h, N_w), - num_cond_latents=num_cond_latents, - return_kv=return_kv, - kv_cache=kv_cache_dict.get(i, None), - skip_crs_attn=skip_crs_attn, - ) - - if return_kv: - hidden_states, kv_cache = block_outputs - if offload_kv_cache: - kv_cache_dict_ret[i] = (kv_cache[0].cpu(), kv_cache[1].cpu()) - else: - kv_cache_dict_ret[i] = (kv_cache[0].contiguous(), kv_cache[1].contiguous()) - else: - hidden_states = block_outputs - - hidden_states = self.final_layer(hidden_states, t, (N_t, N_h, N_w)) # [B, N, C=T_p*H_p*W_p*C_out] - - # if self.cp_split_hw[0] * self.cp_split_hw[1] > 1: - # hidden_states = context_parallel_util.gather_cp_2d(hidden_states, shape=(N_t, N_h, N_w), split_hw=self.cp_split_hw) - - hidden_states = self.unpatchify(hidden_states, N_t, N_h, N_w) # [B, C_out, H, W] - - # cast to float32 for better accuracy - hidden_states = hidden_states.to(torch.float32) - - if return_kv: - return hidden_states, kv_cache_dict_ret - else: - return hidden_states - - - def unpatchify(self, x, N_t, N_h, N_w): - """ - Args: - x (torch.Tensor): of shape [B, N, C] - - Return: - x (torch.Tensor): of shape [B, C_out, T, H, W] - """ - T_p, H_p, W_p = self.patch_size - x = rearrange( - x, - "B (N_t N_h N_w) (T_p H_p W_p C_out) -> B C_out (N_t T_p) (N_h H_p) (N_w W_p)", - N_t=N_t, - N_h=N_h, - N_w=N_w, - T_p=T_p, - H_p=H_p, - W_p=W_p, - C_out=self.out_channels, - ) - return x - - @staticmethod - def state_dict_converter(): - return LongCatVideoTransformer3DModelDictConverter() - - -class LongCatVideoTransformer3DModelDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - return state_dict - - def from_civitai(self, state_dict): - return state_dict - diff --git a/diffsynth/models/lora.py b/diffsynth/models/lora.py deleted file mode 100644 index 0278bb1..0000000 --- a/diffsynth/models/lora.py +++ /dev/null @@ -1,402 +0,0 @@ -import torch -from .sd_unet import SDUNet -from .sdxl_unet import SDXLUNet -from .sd_text_encoder import SDTextEncoder -from .sdxl_text_encoder import SDXLTextEncoder, SDXLTextEncoder2 -from .sd3_dit import SD3DiT -from .flux_dit import FluxDiT -from .hunyuan_dit import HunyuanDiT -from .cog_dit import CogDiT -from .hunyuan_video_dit import HunyuanVideoDiT -from .wan_video_dit import WanModel - - - -class LoRAFromCivitai: - def __init__(self): - self.supported_model_classes = [] - self.lora_prefix = [] - self.renamed_lora_prefix = {} - self.special_keys = {} - - - def convert_state_dict(self, state_dict, lora_prefix="lora_unet_", alpha=1.0): - for key in state_dict: - if ".lora_up" in key: - return self.convert_state_dict_up_down(state_dict, lora_prefix, alpha) - return self.convert_state_dict_AB(state_dict, lora_prefix, alpha) - - - def convert_state_dict_up_down(self, state_dict, lora_prefix="lora_unet_", alpha=1.0): - renamed_lora_prefix = self.renamed_lora_prefix.get(lora_prefix, "") - state_dict_ = {} - for key in state_dict: - if ".lora_up" not in key: - continue - if not key.startswith(lora_prefix): - continue - weight_up = state_dict[key].to(device="cuda", dtype=torch.float16) - weight_down = state_dict[key.replace(".lora_up", ".lora_down")].to(device="cuda", dtype=torch.float16) - if len(weight_up.shape) == 4: - weight_up = weight_up.squeeze(3).squeeze(2).to(torch.float32) - weight_down = weight_down.squeeze(3).squeeze(2).to(torch.float32) - lora_weight = alpha * torch.mm(weight_up, weight_down).unsqueeze(2).unsqueeze(3) - else: - lora_weight = alpha * torch.mm(weight_up, weight_down) - target_name = key.split(".")[0].replace(lora_prefix, renamed_lora_prefix).replace("_", ".") + ".weight" - for special_key in self.special_keys: - target_name = target_name.replace(special_key, self.special_keys[special_key]) - state_dict_[target_name] = lora_weight.cpu() - return state_dict_ - - - def convert_state_dict_AB(self, state_dict, lora_prefix="", alpha=1.0, device="cuda", torch_dtype=torch.float16): - state_dict_ = {} - for key in state_dict: - if ".lora_B." not in key: - continue - if not key.startswith(lora_prefix): - continue - weight_up = state_dict[key].to(device=device, dtype=torch_dtype) - weight_down = state_dict[key.replace(".lora_B.", ".lora_A.")].to(device=device, dtype=torch_dtype) - if len(weight_up.shape) == 4: - weight_up = weight_up.squeeze(3).squeeze(2) - weight_down = weight_down.squeeze(3).squeeze(2) - lora_weight = alpha * torch.mm(weight_up, weight_down).unsqueeze(2).unsqueeze(3) - else: - lora_weight = alpha * torch.mm(weight_up, weight_down) - keys = key.split(".") - keys.pop(keys.index("lora_B")) - target_name = ".".join(keys) - target_name = target_name[len(lora_prefix):] - state_dict_[target_name] = lora_weight.cpu() - return state_dict_ - - - def load(self, model, state_dict_lora, lora_prefix, alpha=1.0, model_resource=None): - state_dict_model = model.state_dict() - state_dict_lora = self.convert_state_dict(state_dict_lora, lora_prefix=lora_prefix, alpha=alpha) - if model_resource == "diffusers": - state_dict_lora = model.__class__.state_dict_converter().from_diffusers(state_dict_lora) - elif model_resource == "civitai": - state_dict_lora = model.__class__.state_dict_converter().from_civitai(state_dict_lora) - if isinstance(state_dict_lora, tuple): - state_dict_lora = state_dict_lora[0] - if len(state_dict_lora) > 0: - print(f" {len(state_dict_lora)} tensors are updated.") - for name in state_dict_lora: - fp8=False - if state_dict_model[name].dtype == torch.float8_e4m3fn: - state_dict_model[name]= state_dict_model[name].to(state_dict_lora[name].dtype) - fp8=True - state_dict_model[name] += state_dict_lora[name].to( - dtype=state_dict_model[name].dtype, device=state_dict_model[name].device) - if fp8: - state_dict_model[name] = state_dict_model[name].to(torch.float8_e4m3fn) - model.load_state_dict(state_dict_model) - - - def match(self, model, state_dict_lora): - for lora_prefix, model_class in zip(self.lora_prefix, self.supported_model_classes): - if not isinstance(model, model_class): - continue - state_dict_model = model.state_dict() - for model_resource in ["diffusers", "civitai"]: - try: - state_dict_lora_ = self.convert_state_dict(state_dict_lora, lora_prefix=lora_prefix, alpha=1.0) - converter_fn = model.__class__.state_dict_converter().from_diffusers if model_resource == "diffusers" \ - else model.__class__.state_dict_converter().from_civitai - state_dict_lora_ = converter_fn(state_dict_lora_) - if isinstance(state_dict_lora_, tuple): - state_dict_lora_ = state_dict_lora_[0] - if len(state_dict_lora_) == 0: - continue - for name in state_dict_lora_: - if name not in state_dict_model: - break - else: - return lora_prefix, model_resource - except: - pass - return None - - - -class SDLoRAFromCivitai(LoRAFromCivitai): - def __init__(self): - super().__init__() - self.supported_model_classes = [SDUNet, SDTextEncoder] - self.lora_prefix = ["lora_unet_", "lora_te_"] - self.special_keys = { - "down.blocks": "down_blocks", - "up.blocks": "up_blocks", - "mid.block": "mid_block", - "proj.in": "proj_in", - "proj.out": "proj_out", - "transformer.blocks": "transformer_blocks", - "to.q": "to_q", - "to.k": "to_k", - "to.v": "to_v", - "to.out": "to_out", - "text.model": "text_model", - "self.attn.q.proj": "self_attn.q_proj", - "self.attn.k.proj": "self_attn.k_proj", - "self.attn.v.proj": "self_attn.v_proj", - "self.attn.out.proj": "self_attn.out_proj", - "input.blocks": "model.diffusion_model.input_blocks", - "middle.block": "model.diffusion_model.middle_block", - "output.blocks": "model.diffusion_model.output_blocks", - } - - -class SDXLLoRAFromCivitai(LoRAFromCivitai): - def __init__(self): - super().__init__() - self.supported_model_classes = [SDXLUNet, SDXLTextEncoder, SDXLTextEncoder2] - self.lora_prefix = ["lora_unet_", "lora_te1_", "lora_te2_"] - self.renamed_lora_prefix = {"lora_te2_": "2"} - self.special_keys = { - "down.blocks": "down_blocks", - "up.blocks": "up_blocks", - "mid.block": "mid_block", - "proj.in": "proj_in", - "proj.out": "proj_out", - "transformer.blocks": "transformer_blocks", - "to.q": "to_q", - "to.k": "to_k", - "to.v": "to_v", - "to.out": "to_out", - "text.model": "conditioner.embedders.0.transformer.text_model", - "self.attn.q.proj": "self_attn.q_proj", - "self.attn.k.proj": "self_attn.k_proj", - "self.attn.v.proj": "self_attn.v_proj", - "self.attn.out.proj": "self_attn.out_proj", - "input.blocks": "model.diffusion_model.input_blocks", - "middle.block": "model.diffusion_model.middle_block", - "output.blocks": "model.diffusion_model.output_blocks", - "2conditioner.embedders.0.transformer.text_model.encoder.layers": "text_model.encoder.layers" - } - - -class FluxLoRAFromCivitai(LoRAFromCivitai): - def __init__(self): - super().__init__() - self.supported_model_classes = [FluxDiT, FluxDiT] - self.lora_prefix = ["lora_unet_", "transformer."] - self.renamed_lora_prefix = {} - self.special_keys = { - "single.blocks": "single_blocks", - "double.blocks": "double_blocks", - "img.attn": "img_attn", - "img.mlp": "img_mlp", - "img.mod": "img_mod", - "txt.attn": "txt_attn", - "txt.mlp": "txt_mlp", - "txt.mod": "txt_mod", - } - - - -class GeneralLoRAFromPeft: - def __init__(self): - self.supported_model_classes = [SDUNet, SDXLUNet, SD3DiT, HunyuanDiT, FluxDiT, CogDiT, WanModel] - - - def get_name_dict(self, lora_state_dict): - lora_name_dict = {} - for key in lora_state_dict: - if ".lora_B." not in key: - continue - keys = key.split(".") - if len(keys) > keys.index("lora_B") + 2: - keys.pop(keys.index("lora_B") + 1) - keys.pop(keys.index("lora_B")) - if keys[0] == "diffusion_model": - keys.pop(0) - target_name = ".".join(keys) - lora_name_dict[target_name] = (key, key.replace(".lora_B.", ".lora_A.")) - return lora_name_dict - - - def match(self, model: torch.nn.Module, state_dict_lora): - lora_name_dict = self.get_name_dict(state_dict_lora) - model_name_dict = {name: None for name, _ in model.named_parameters()} - matched_num = sum([i in model_name_dict for i in lora_name_dict]) - if matched_num == len(lora_name_dict): - return "", "" - else: - return None - - - def fetch_device_and_dtype(self, state_dict): - device, dtype = None, None - for name, param in state_dict.items(): - device, dtype = param.device, param.dtype - break - computation_device = device - computation_dtype = dtype - if computation_device == torch.device("cpu"): - if torch.cuda.is_available(): - computation_device = torch.device("cuda") - if computation_dtype == torch.float8_e4m3fn: - computation_dtype = torch.float32 - return device, dtype, computation_device, computation_dtype - - - def load(self, model, state_dict_lora, lora_prefix="", alpha=1.0, model_resource=""): - state_dict_model = model.state_dict() - device, dtype, computation_device, computation_dtype = self.fetch_device_and_dtype(state_dict_model) - lora_name_dict = self.get_name_dict(state_dict_lora) - for name in lora_name_dict: - weight_up = state_dict_lora[lora_name_dict[name][0]].to(device=computation_device, dtype=computation_dtype) - weight_down = state_dict_lora[lora_name_dict[name][1]].to(device=computation_device, dtype=computation_dtype) - if len(weight_up.shape) == 4: - weight_up = weight_up.squeeze(3).squeeze(2) - weight_down = weight_down.squeeze(3).squeeze(2) - weight_lora = alpha * torch.mm(weight_up, weight_down).unsqueeze(2).unsqueeze(3) - else: - weight_lora = alpha * torch.mm(weight_up, weight_down) - weight_model = state_dict_model[name].to(device=computation_device, dtype=computation_dtype) - weight_patched = weight_model + weight_lora - state_dict_model[name] = weight_patched.to(device=device, dtype=dtype) - print(f" {len(lora_name_dict)} tensors are updated.") - model.load_state_dict(state_dict_model) - - - -class HunyuanVideoLoRAFromCivitai(LoRAFromCivitai): - def __init__(self): - super().__init__() - self.supported_model_classes = [HunyuanVideoDiT, HunyuanVideoDiT] - self.lora_prefix = ["diffusion_model.", "transformer."] - self.special_keys = {} - - -class FluxLoRAConverter: - def __init__(self): - pass - - @staticmethod - def align_to_opensource_format(state_dict, alpha=None): - prefix_rename_dict = { - "single_blocks": "lora_unet_single_blocks", - "blocks": "lora_unet_double_blocks", - } - middle_rename_dict = { - "norm.linear": "modulation_lin", - "to_qkv_mlp": "linear1", - "proj_out": "linear2", - - "norm1_a.linear": "img_mod_lin", - "norm1_b.linear": "txt_mod_lin", - "attn.a_to_qkv": "img_attn_qkv", - "attn.b_to_qkv": "txt_attn_qkv", - "attn.a_to_out": "img_attn_proj", - "attn.b_to_out": "txt_attn_proj", - "ff_a.0": "img_mlp_0", - "ff_a.2": "img_mlp_2", - "ff_b.0": "txt_mlp_0", - "ff_b.2": "txt_mlp_2", - } - suffix_rename_dict = { - "lora_B.weight": "lora_up.weight", - "lora_A.weight": "lora_down.weight", - } - state_dict_ = {} - for name, param in state_dict.items(): - names = name.split(".") - if names[-2] != "lora_A" and names[-2] != "lora_B": - names.pop(-2) - prefix = names[0] - middle = ".".join(names[2:-2]) - suffix = ".".join(names[-2:]) - block_id = names[1] - if middle not in middle_rename_dict: - continue - rename = prefix_rename_dict[prefix] + "_" + block_id + "_" + middle_rename_dict[middle] + "." + suffix_rename_dict[suffix] - state_dict_[rename] = param - if rename.endswith("lora_up.weight"): - lora_alpha = alpha if alpha is not None else param.shape[-1] - state_dict_[rename.replace("lora_up.weight", "alpha")] = torch.tensor((lora_alpha,))[0] - return state_dict_ - - @staticmethod - def align_to_diffsynth_format(state_dict): - rename_dict = { - "lora_unet_double_blocks_blockid_img_mod_lin.lora_down.weight": "blocks.blockid.norm1_a.linear.lora_A.default.weight", - "lora_unet_double_blocks_blockid_img_mod_lin.lora_up.weight": "blocks.blockid.norm1_a.linear.lora_B.default.weight", - "lora_unet_double_blocks_blockid_txt_mod_lin.lora_down.weight": "blocks.blockid.norm1_b.linear.lora_A.default.weight", - "lora_unet_double_blocks_blockid_txt_mod_lin.lora_up.weight": "blocks.blockid.norm1_b.linear.lora_B.default.weight", - "lora_unet_double_blocks_blockid_img_attn_qkv.lora_down.weight": "blocks.blockid.attn.a_to_qkv.lora_A.default.weight", - "lora_unet_double_blocks_blockid_img_attn_qkv.lora_up.weight": "blocks.blockid.attn.a_to_qkv.lora_B.default.weight", - "lora_unet_double_blocks_blockid_txt_attn_qkv.lora_down.weight": "blocks.blockid.attn.b_to_qkv.lora_A.default.weight", - "lora_unet_double_blocks_blockid_txt_attn_qkv.lora_up.weight": "blocks.blockid.attn.b_to_qkv.lora_B.default.weight", - "lora_unet_double_blocks_blockid_img_attn_proj.lora_down.weight": "blocks.blockid.attn.a_to_out.lora_A.default.weight", - "lora_unet_double_blocks_blockid_img_attn_proj.lora_up.weight": "blocks.blockid.attn.a_to_out.lora_B.default.weight", - "lora_unet_double_blocks_blockid_txt_attn_proj.lora_down.weight": "blocks.blockid.attn.b_to_out.lora_A.default.weight", - "lora_unet_double_blocks_blockid_txt_attn_proj.lora_up.weight": "blocks.blockid.attn.b_to_out.lora_B.default.weight", - "lora_unet_double_blocks_blockid_img_mlp_0.lora_down.weight": "blocks.blockid.ff_a.0.lora_A.default.weight", - "lora_unet_double_blocks_blockid_img_mlp_0.lora_up.weight": "blocks.blockid.ff_a.0.lora_B.default.weight", - "lora_unet_double_blocks_blockid_img_mlp_2.lora_down.weight": "blocks.blockid.ff_a.2.lora_A.default.weight", - "lora_unet_double_blocks_blockid_img_mlp_2.lora_up.weight": "blocks.blockid.ff_a.2.lora_B.default.weight", - "lora_unet_double_blocks_blockid_txt_mlp_0.lora_down.weight": "blocks.blockid.ff_b.0.lora_A.default.weight", - "lora_unet_double_blocks_blockid_txt_mlp_0.lora_up.weight": "blocks.blockid.ff_b.0.lora_B.default.weight", - "lora_unet_double_blocks_blockid_txt_mlp_2.lora_down.weight": "blocks.blockid.ff_b.2.lora_A.default.weight", - "lora_unet_double_blocks_blockid_txt_mlp_2.lora_up.weight": "blocks.blockid.ff_b.2.lora_B.default.weight", - "lora_unet_single_blocks_blockid_modulation_lin.lora_down.weight": "single_blocks.blockid.norm.linear.lora_A.default.weight", - "lora_unet_single_blocks_blockid_modulation_lin.lora_up.weight": "single_blocks.blockid.norm.linear.lora_B.default.weight", - "lora_unet_single_blocks_blockid_linear1.lora_down.weight": "single_blocks.blockid.to_qkv_mlp.lora_A.default.weight", - "lora_unet_single_blocks_blockid_linear1.lora_up.weight": "single_blocks.blockid.to_qkv_mlp.lora_B.default.weight", - "lora_unet_single_blocks_blockid_linear2.lora_down.weight": "single_blocks.blockid.proj_out.lora_A.default.weight", - "lora_unet_single_blocks_blockid_linear2.lora_up.weight": "single_blocks.blockid.proj_out.lora_B.default.weight", - } - def guess_block_id(name): - names = name.split("_") - for i in names: - if i.isdigit(): - return i, name.replace(f"_{i}_", "_blockid_") - return None, None - state_dict_ = {} - for name, param in state_dict.items(): - block_id, source_name = guess_block_id(name) - if source_name in rename_dict: - target_name = rename_dict[source_name] - target_name = target_name.replace(".blockid.", f".{block_id}.") - state_dict_[target_name] = param - else: - state_dict_[name] = param - return state_dict_ - - -class WanLoRAConverter: - def __init__(self): - pass - - @staticmethod - def align_to_opensource_format(state_dict, **kwargs): - state_dict = {"diffusion_model." + name.replace(".default.", "."): param for name, param in state_dict.items()} - return state_dict - - @staticmethod - def align_to_diffsynth_format(state_dict, **kwargs): - state_dict = {name.replace("diffusion_model.", "").replace(".lora_A.weight", ".lora_A.default.weight").replace(".lora_B.weight", ".lora_B.default.weight"): param for name, param in state_dict.items()} - return state_dict - - -class QwenImageLoRAConverter: - def __init__(self): - pass - - @staticmethod - def align_to_opensource_format(state_dict, **kwargs): - state_dict = {name.replace(".default.", "."): param for name, param in state_dict.items()} - return state_dict - - @staticmethod - def align_to_diffsynth_format(state_dict, **kwargs): - state_dict = {name.replace(".lora_A.weight", ".lora_A.default.weight").replace(".lora_B.weight", ".lora_B.default.weight"): param for name, param in state_dict.items()} - return state_dict - - -def get_lora_loaders(): - return [SDLoRAFromCivitai(), SDXLLoRAFromCivitai(), FluxLoRAFromCivitai(), HunyuanVideoLoRAFromCivitai(), GeneralLoRAFromPeft()] diff --git a/diffsynth/models/model_loader.py b/diffsynth/models/model_loader.py new file mode 100644 index 0000000..76d69c3 --- /dev/null +++ b/diffsynth/models/model_loader.py @@ -0,0 +1,89 @@ +from ..core.loader import load_model, hash_model_file +from ..core.vram import AutoWrappedModule +from ..configs import MODEL_CONFIGS, VRAM_MANAGEMENT_MODULE_MAPS +import importlib, json + + +class ModelPool: + def __init__(self): + self.model = [] + self.model_name = [] + self.model_path = [] + + def import_model_class(self, model_class): + split = model_class.rfind(".") + model_resource, model_class = model_class[:split], model_class[split+1:] + model_class = importlib.import_module(model_resource).__getattribute__(model_class) + return model_class + + def need_to_enable_vram_management(self, vram_config): + return vram_config["offload_dtype"] is not None and vram_config["offload_device"] is not None + + def fetch_module_map(self, model_class, vram_config): + if self.need_to_enable_vram_management(vram_config): + if model_class in VRAM_MANAGEMENT_MODULE_MAPS: + module_map = {self.import_model_class(source): self.import_model_class(target) for source, target in VRAM_MANAGEMENT_MODULE_MAPS[model_class].items()} + else: + module_map = {self.import_model_class(model_class): AutoWrappedModule} + else: + module_map = None + return module_map + + def load_model_file(self, config, path, vram_config, vram_limit=None): + model_class = self.import_model_class(config["model_class"]) + model_config = config.get("extra_kwargs", {}) + if "state_dict_converter" in config: + state_dict_converter = self.import_model_class(config["state_dict_converter"]) + else: + state_dict_converter = None + module_map = self.fetch_module_map(config["model_class"], vram_config) + model = load_model( + model_class, path, model_config, + vram_config["computation_dtype"], vram_config["computation_device"], + state_dict_converter, + use_disk_map=True, + vram_config=vram_config, module_map=module_map, vram_limit=vram_limit, + ) + return model + + def auto_load_model(self, path, vram_config, vram_limit=None): + print(f"Loading models from: {json.dumps(path, indent=4)}") + model_hash = hash_model_file(path) + loaded = False + for config in MODEL_CONFIGS: + if config["model_hash"] == model_hash: + model = self.load_model_file(config, path, vram_config, vram_limit=vram_limit) + self.model.append(model) + model_name = config["model_name"] + self.model_name.append(model_name) + self.model_path.append(path) + model_info = {"model_name": model_name, "model_class": config["model_class"], "extra_kwargs": config.get("extra_kwargs")} + print(f"Loaded model: {json.dumps(model_info, indent=4)}") + loaded = True + if not loaded: + raise ValueError(f"Cannot detect the model type. File: {path}") + + def fetch_model(self, model_name, index=None): + fetched_models = [] + fetched_model_paths = [] + for model, model_path, model_name_ in zip(self.model, self.model_path, self.model_name): + if model_name == model_name_: + fetched_models.append(model) + fetched_model_paths.append(model_path) + if len(fetched_models) == 0: + print(f"No {model_name} models available. This is not an error.") + model = None + elif len(fetched_models) == 1: + print(f"Using {model_name} from {json.dumps(fetched_model_paths[0], indent=4)}.") + model = fetched_models[0] + else: + if index is None: + model = fetched_models[0] + print(f"More than one {model_name} models are loaded: {fetched_model_paths}. Using {model_name} from {json.dumps(fetched_model_paths[0], indent=4)}.") + elif isinstance(index, int): + model = fetched_models[:index] + print(f"More than one {model_name} models are loaded: {fetched_model_paths}. Using {model_name} from {json.dumps(fetched_model_paths[:index], indent=4)}.") + else: + model = fetched_models + print(f"More than one {model_name} models are loaded: {fetched_model_paths}. Using {model_name} from {json.dumps(fetched_model_paths, indent=4)}.") + return model diff --git a/diffsynth/models/model_manager.py b/diffsynth/models/model_manager.py deleted file mode 100644 index d46eedf..0000000 --- a/diffsynth/models/model_manager.py +++ /dev/null @@ -1,467 +0,0 @@ -import os, torch, json, importlib -from typing import List - -from .downloader import download_models, download_customized_models, Preset_model_id, Preset_model_website - -from .sd_text_encoder import SDTextEncoder -from .sd_unet import SDUNet -from .sd_vae_encoder import SDVAEEncoder -from .sd_vae_decoder import SDVAEDecoder -from .lora import get_lora_loaders - -from .sdxl_text_encoder import SDXLTextEncoder, SDXLTextEncoder2 -from .sdxl_unet import SDXLUNet -from .sdxl_vae_decoder import SDXLVAEDecoder -from .sdxl_vae_encoder import SDXLVAEEncoder - -from .sd3_text_encoder import SD3TextEncoder1, SD3TextEncoder2, SD3TextEncoder3 -from .sd3_dit import SD3DiT -from .sd3_vae_decoder import SD3VAEDecoder -from .sd3_vae_encoder import SD3VAEEncoder - -from .sd_controlnet import SDControlNet -from .sdxl_controlnet import SDXLControlNetUnion - -from .sd_motion import SDMotionModel -from .sdxl_motion import SDXLMotionModel - -from .svd_image_encoder import SVDImageEncoder -from .svd_unet import SVDUNet -from .svd_vae_decoder import SVDVAEDecoder -from .svd_vae_encoder import SVDVAEEncoder - -from .sd_ipadapter import SDIpAdapter, IpAdapterCLIPImageEmbedder -from .sdxl_ipadapter import SDXLIpAdapter, IpAdapterXLCLIPImageEmbedder - -from .hunyuan_dit_text_encoder import HunyuanDiTCLIPTextEncoder, HunyuanDiTT5TextEncoder -from .hunyuan_dit import HunyuanDiT -from .hunyuan_video_vae_decoder import HunyuanVideoVAEDecoder -from .hunyuan_video_vae_encoder import HunyuanVideoVAEEncoder - -from .flux_dit import FluxDiT -from .flux_text_encoder import FluxTextEncoder2 -from .flux_vae import FluxVAEEncoder, FluxVAEDecoder -from .flux_ipadapter import FluxIpAdapter - -from .cog_vae import CogVAEEncoder, CogVAEDecoder -from .cog_dit import CogDiT - -from ..extensions.RIFE import IFNet -from ..extensions.ESRGAN import RRDBNet - -from ..configs.model_config import model_loader_configs, huggingface_model_loader_configs, patch_model_loader_configs -from .utils import load_state_dict, init_weights_on_device, hash_state_dict_keys, split_state_dict_with_prefix - - -def load_model_from_single_file(state_dict, model_names, model_classes, model_resource, torch_dtype, device): - loaded_model_names, loaded_models = [], [] - for model_name, model_class in zip(model_names, model_classes): - print(f" model_name: {model_name} model_class: {model_class.__name__}") - state_dict_converter = model_class.state_dict_converter() - if model_resource == "civitai": - state_dict_results = state_dict_converter.from_civitai(state_dict) - elif model_resource == "diffusers": - state_dict_results = state_dict_converter.from_diffusers(state_dict) - if isinstance(state_dict_results, tuple): - model_state_dict, extra_kwargs = state_dict_results - print(f" This model is initialized with extra kwargs: {extra_kwargs}") - else: - model_state_dict, extra_kwargs = state_dict_results, {} - torch_dtype = torch.float32 if extra_kwargs.get("upcast_to_float32", False) else torch_dtype - with init_weights_on_device(): - model = model_class(**extra_kwargs) - if hasattr(model, "eval"): - model = model.eval() - model.load_state_dict(model_state_dict, assign=True) - model = model.to(dtype=torch_dtype, device=device) - loaded_model_names.append(model_name) - loaded_models.append(model) - return loaded_model_names, loaded_models - - -def load_model_from_huggingface_folder(file_path, model_names, model_classes, torch_dtype, device): - loaded_model_names, loaded_models = [], [] - for model_name, model_class in zip(model_names, model_classes): - if torch_dtype in [torch.float32, torch.float16, torch.bfloat16]: - model = model_class.from_pretrained(file_path, torch_dtype=torch_dtype).eval() - else: - model = model_class.from_pretrained(file_path).eval().to(dtype=torch_dtype) - if torch_dtype == torch.float16 and hasattr(model, "half"): - model = model.half() - try: - model = model.to(device=device) - except: - pass - loaded_model_names.append(model_name) - loaded_models.append(model) - return loaded_model_names, loaded_models - - -def load_single_patch_model_from_single_file(state_dict, model_name, model_class, base_model, extra_kwargs, torch_dtype, device): - print(f" model_name: {model_name} model_class: {model_class.__name__} extra_kwargs: {extra_kwargs}") - base_state_dict = base_model.state_dict() - base_model.to("cpu") - del base_model - model = model_class(**extra_kwargs) - model.load_state_dict(base_state_dict, strict=False) - model.load_state_dict(state_dict, strict=False) - model.to(dtype=torch_dtype, device=device) - return model - - -def load_patch_model_from_single_file(state_dict, model_names, model_classes, extra_kwargs, model_manager, torch_dtype, device): - loaded_model_names, loaded_models = [], [] - for model_name, model_class in zip(model_names, model_classes): - while True: - for model_id in range(len(model_manager.model)): - base_model_name = model_manager.model_name[model_id] - if base_model_name == model_name: - base_model_path = model_manager.model_path[model_id] - base_model = model_manager.model[model_id] - print(f" Adding patch model to {base_model_name} ({base_model_path})") - patched_model = load_single_patch_model_from_single_file( - state_dict, model_name, model_class, base_model, extra_kwargs, torch_dtype, device) - loaded_model_names.append(base_model_name) - loaded_models.append(patched_model) - model_manager.model.pop(model_id) - model_manager.model_path.pop(model_id) - model_manager.model_name.pop(model_id) - break - else: - break - return loaded_model_names, loaded_models - - - -class ModelDetectorTemplate: - def __init__(self): - pass - - def match(self, file_path="", state_dict={}): - return False - - def load(self, file_path="", state_dict={}, device="cuda", torch_dtype=torch.float16, **kwargs): - return [], [] - - - -class ModelDetectorFromSingleFile: - def __init__(self, model_loader_configs=[]): - self.keys_hash_with_shape_dict = {} - self.keys_hash_dict = {} - for metadata in model_loader_configs: - self.add_model_metadata(*metadata) - - - def add_model_metadata(self, keys_hash, keys_hash_with_shape, model_names, model_classes, model_resource): - self.keys_hash_with_shape_dict[keys_hash_with_shape] = (model_names, model_classes, model_resource) - if keys_hash is not None: - self.keys_hash_dict[keys_hash] = (model_names, model_classes, model_resource) - - - def match(self, file_path="", state_dict={}): - if isinstance(file_path, str) and os.path.isdir(file_path): - return False - if len(state_dict) == 0: - state_dict = load_state_dict(file_path) - keys_hash_with_shape = hash_state_dict_keys(state_dict, with_shape=True) - if keys_hash_with_shape in self.keys_hash_with_shape_dict: - return True - keys_hash = hash_state_dict_keys(state_dict, with_shape=False) - if keys_hash in self.keys_hash_dict: - return True - return False - - - def load(self, file_path="", state_dict={}, device="cuda", torch_dtype=torch.float16, **kwargs): - if len(state_dict) == 0: - state_dict = load_state_dict(file_path) - - # Load models with strict matching - keys_hash_with_shape = hash_state_dict_keys(state_dict, with_shape=True) - if keys_hash_with_shape in self.keys_hash_with_shape_dict: - model_names, model_classes, model_resource = self.keys_hash_with_shape_dict[keys_hash_with_shape] - loaded_model_names, loaded_models = load_model_from_single_file(state_dict, model_names, model_classes, model_resource, torch_dtype, device) - return loaded_model_names, loaded_models - - # Load models without strict matching - # (the shape of parameters may be inconsistent, and the state_dict_converter will modify the model architecture) - keys_hash = hash_state_dict_keys(state_dict, with_shape=False) - if keys_hash in self.keys_hash_dict: - model_names, model_classes, model_resource = self.keys_hash_dict[keys_hash] - loaded_model_names, loaded_models = load_model_from_single_file(state_dict, model_names, model_classes, model_resource, torch_dtype, device) - return loaded_model_names, loaded_models - - return loaded_model_names, loaded_models - - - -class ModelDetectorFromSplitedSingleFile(ModelDetectorFromSingleFile): - def __init__(self, model_loader_configs=[]): - super().__init__(model_loader_configs) - - - def match(self, file_path="", state_dict={}): - if isinstance(file_path, str) and os.path.isdir(file_path): - return False - if len(state_dict) == 0: - state_dict = load_state_dict(file_path) - splited_state_dict = split_state_dict_with_prefix(state_dict) - for sub_state_dict in splited_state_dict: - if super().match(file_path, sub_state_dict): - return True - return False - - - def load(self, file_path="", state_dict={}, device="cuda", torch_dtype=torch.float16, **kwargs): - # Split the state_dict and load from each component - splited_state_dict = split_state_dict_with_prefix(state_dict) - valid_state_dict = {} - for sub_state_dict in splited_state_dict: - if super().match(file_path, sub_state_dict): - valid_state_dict.update(sub_state_dict) - if super().match(file_path, valid_state_dict): - loaded_model_names, loaded_models = super().load(file_path, valid_state_dict, device, torch_dtype) - else: - loaded_model_names, loaded_models = [], [] - for sub_state_dict in splited_state_dict: - if super().match(file_path, sub_state_dict): - loaded_model_names_, loaded_models_ = super().load(file_path, valid_state_dict, device, torch_dtype) - loaded_model_names += loaded_model_names_ - loaded_models += loaded_models_ - return loaded_model_names, loaded_models - - - -class ModelDetectorFromHuggingfaceFolder: - def __init__(self, model_loader_configs=[]): - self.architecture_dict = {} - for metadata in model_loader_configs: - self.add_model_metadata(*metadata) - - - def add_model_metadata(self, architecture, huggingface_lib, model_name, redirected_architecture): - self.architecture_dict[architecture] = (huggingface_lib, model_name, redirected_architecture) - - - def match(self, file_path="", state_dict={}): - if not isinstance(file_path, str) or os.path.isfile(file_path): - return False - file_list = os.listdir(file_path) - if "config.json" not in file_list: - return False - with open(os.path.join(file_path, "config.json"), "r") as f: - config = json.load(f) - if "architectures" not in config and "_class_name" not in config: - return False - return True - - - def load(self, file_path="", state_dict={}, device="cuda", torch_dtype=torch.float16, **kwargs): - with open(os.path.join(file_path, "config.json"), "r") as f: - config = json.load(f) - loaded_model_names, loaded_models = [], [] - architectures = config["architectures"] if "architectures" in config else [config["_class_name"]] - for architecture in architectures: - huggingface_lib, model_name, redirected_architecture = self.architecture_dict[architecture] - if redirected_architecture is not None: - architecture = redirected_architecture - model_class = importlib.import_module(huggingface_lib).__getattribute__(architecture) - loaded_model_names_, loaded_models_ = load_model_from_huggingface_folder(file_path, [model_name], [model_class], torch_dtype, device) - loaded_model_names += loaded_model_names_ - loaded_models += loaded_models_ - return loaded_model_names, loaded_models - - - -class ModelDetectorFromPatchedSingleFile: - def __init__(self, model_loader_configs=[]): - self.keys_hash_with_shape_dict = {} - for metadata in model_loader_configs: - self.add_model_metadata(*metadata) - - - def add_model_metadata(self, keys_hash_with_shape, model_name, model_class, extra_kwargs): - self.keys_hash_with_shape_dict[keys_hash_with_shape] = (model_name, model_class, extra_kwargs) - - - def match(self, file_path="", state_dict={}): - if not isinstance(file_path, str) or os.path.isdir(file_path): - return False - if len(state_dict) == 0: - state_dict = load_state_dict(file_path) - keys_hash_with_shape = hash_state_dict_keys(state_dict, with_shape=True) - if keys_hash_with_shape in self.keys_hash_with_shape_dict: - return True - return False - - - def load(self, file_path="", state_dict={}, device="cuda", torch_dtype=torch.float16, model_manager=None, **kwargs): - if len(state_dict) == 0: - state_dict = load_state_dict(file_path) - - # Load models with strict matching - loaded_model_names, loaded_models = [], [] - keys_hash_with_shape = hash_state_dict_keys(state_dict, with_shape=True) - if keys_hash_with_shape in self.keys_hash_with_shape_dict: - model_names, model_classes, extra_kwargs = self.keys_hash_with_shape_dict[keys_hash_with_shape] - loaded_model_names_, loaded_models_ = load_patch_model_from_single_file( - state_dict, model_names, model_classes, extra_kwargs, model_manager, torch_dtype, device) - loaded_model_names += loaded_model_names_ - loaded_models += loaded_models_ - return loaded_model_names, loaded_models - - - -class ModelManager: - def __init__( - self, - torch_dtype=torch.float16, - device="cuda", - model_id_list: List[Preset_model_id] = [], - downloading_priority: List[Preset_model_website] = ["ModelScope", "HuggingFace"], - file_path_list: List[str] = [], - ): - self.torch_dtype = torch_dtype - self.device = device - self.model = [] - self.model_path = [] - self.model_name = [] - downloaded_files = download_models(model_id_list, downloading_priority) if len(model_id_list) > 0 else [] - self.model_detector = [ - ModelDetectorFromSingleFile(model_loader_configs), - ModelDetectorFromSplitedSingleFile(model_loader_configs), - ModelDetectorFromHuggingfaceFolder(huggingface_model_loader_configs), - ModelDetectorFromPatchedSingleFile(patch_model_loader_configs), - ] - self.load_models(downloaded_files + file_path_list) - - - def load_model_from_single_file(self, file_path="", state_dict={}, model_names=[], model_classes=[], model_resource=None): - print(f"Loading models from file: {file_path}") - if len(state_dict) == 0: - state_dict = load_state_dict(file_path) - model_names, models = load_model_from_single_file(state_dict, model_names, model_classes, model_resource, self.torch_dtype, self.device) - for model_name, model in zip(model_names, models): - self.model.append(model) - self.model_path.append(file_path) - self.model_name.append(model_name) - print(f" The following models are loaded: {model_names}.") - - - def load_model_from_huggingface_folder(self, file_path="", model_names=[], model_classes=[]): - print(f"Loading models from folder: {file_path}") - model_names, models = load_model_from_huggingface_folder(file_path, model_names, model_classes, self.torch_dtype, self.device) - for model_name, model in zip(model_names, models): - self.model.append(model) - self.model_path.append(file_path) - self.model_name.append(model_name) - print(f" The following models are loaded: {model_names}.") - - - def load_patch_model_from_single_file(self, file_path="", state_dict={}, model_names=[], model_classes=[], extra_kwargs={}): - print(f"Loading patch models from file: {file_path}") - model_names, models = load_patch_model_from_single_file( - state_dict, model_names, model_classes, extra_kwargs, self, self.torch_dtype, self.device) - for model_name, model in zip(model_names, models): - self.model.append(model) - self.model_path.append(file_path) - self.model_name.append(model_name) - print(f" The following patched models are loaded: {model_names}.") - - - def load_lora(self, file_path="", state_dict={}, lora_alpha=1.0): - if isinstance(file_path, list): - for file_path_ in file_path: - self.load_lora(file_path_, state_dict=state_dict, lora_alpha=lora_alpha) - else: - print(f"Loading LoRA models from file: {file_path}") - is_loaded = False - if len(state_dict) == 0: - state_dict = load_state_dict(file_path) - for model_name, model, model_path in zip(self.model_name, self.model, self.model_path): - for lora in get_lora_loaders(): - match_results = lora.match(model, state_dict) - if match_results is not None: - print(f" Adding LoRA to {model_name} ({model_path}).") - lora_prefix, model_resource = match_results - lora.load(model, state_dict, lora_prefix, alpha=lora_alpha, model_resource=model_resource) - is_loaded = True - break - if not is_loaded: - print(f" Cannot load LoRA: {file_path}") - - - def load_model(self, file_path, model_names=None, device=None, torch_dtype=None): - print(f"Loading models from: {file_path}") - if device is None: device = self.device - if torch_dtype is None: torch_dtype = self.torch_dtype - if isinstance(file_path, list): - state_dict = {} - for path in file_path: - state_dict.update(load_state_dict(path)) - elif os.path.isfile(file_path): - state_dict = load_state_dict(file_path) - else: - state_dict = None - for model_detector in self.model_detector: - if model_detector.match(file_path, state_dict): - model_names, models = model_detector.load( - file_path, state_dict, - device=device, torch_dtype=torch_dtype, - allowed_model_names=model_names, model_manager=self - ) - for model_name, model in zip(model_names, models): - self.model.append(model) - self.model_path.append(file_path) - self.model_name.append(model_name) - print(f" The following models are loaded: {model_names}.") - break - else: - print(f" We cannot detect the model type. No models are loaded.") - - - def load_models(self, file_path_list, model_names=None, device=None, torch_dtype=None): - for file_path in file_path_list: - self.load_model(file_path, model_names, device=device, torch_dtype=torch_dtype) - - - def fetch_model(self, model_name, file_path=None, require_model_path=False, index=None): - fetched_models = [] - fetched_model_paths = [] - for model, model_path, model_name_ in zip(self.model, self.model_path, self.model_name): - if file_path is not None and file_path != model_path: - continue - if model_name == model_name_: - fetched_models.append(model) - fetched_model_paths.append(model_path) - if len(fetched_models) == 0: - print(f"No {model_name} models available.") - return None - if len(fetched_models) == 1: - print(f"Using {model_name} from {fetched_model_paths[0]}.") - model = fetched_models[0] - path = fetched_model_paths[0] - else: - if index is None: - model = fetched_models[0] - path = fetched_model_paths[0] - print(f"More than one {model_name} models are loaded in model manager: {fetched_model_paths}. Using {model_name} from {fetched_model_paths[0]}.") - elif isinstance(index, int): - model = fetched_models[:index] - path = fetched_model_paths[:index] - print(f"More than one {model_name} models are loaded in model manager: {fetched_model_paths}. Using {model_name} from {fetched_model_paths[:index]}.") - else: - model = fetched_models - path = fetched_model_paths - print(f"More than one {model_name} models are loaded in model manager: {fetched_model_paths}. Using {model_name} from {fetched_model_paths}.") - if require_model_path: - return model, path - else: - return model - - - def to(self, device): - for model in self.model: - model.to(device) - diff --git a/diffsynth/models/nexus_gen.py b/diffsynth/models/nexus_gen.py deleted file mode 100644 index 0110398..0000000 --- a/diffsynth/models/nexus_gen.py +++ /dev/null @@ -1,161 +0,0 @@ -import torch -from PIL import Image - - -class NexusGenAutoregressiveModel(torch.nn.Module): - def __init__(self, max_length=1024, max_pixels=262640): - super(NexusGenAutoregressiveModel, self).__init__() - from .nexus_gen_ar_model import Qwen2_5_VLForConditionalGeneration - from transformers import Qwen2_5_VLConfig - self.max_length = max_length - self.max_pixels = max_pixels - model_config = Qwen2_5_VLConfig(**{ - "_name_or_path": "DiffSynth-Studio/Nexus-GenV2", - "architectures": [ - "Qwen2_5_VLForConditionalGeneration" - ], - "attention_dropout": 0.0, - "auto_map": { - "AutoConfig": "configuration_qwen2_5_vl.Qwen2_5_VLConfig", - "AutoModel": "modeling_qwen2_5_vl.Qwen2_5_VLModel", - "AutoModelForCausalLM": "modeling_qwen2_5_vl.Qwen2_5_VLForConditionalGeneration" - }, - "bos_token_id": 151643, - "eos_token_id": 151645, - "hidden_act": "silu", - "hidden_size": 3584, - "image_token_id": 151655, - "initializer_range": 0.02, - "intermediate_size": 18944, - "max_position_embeddings": 128000, - "max_window_layers": 28, - "model_type": "qwen2_5_vl", - "num_attention_heads": 28, - "num_hidden_layers": 28, - "num_key_value_heads": 4, - "pad_token_id": 151643, - "rms_norm_eps": 1e-06, - "rope_scaling": { - "mrope_section": [ - 16, - 24, - 24 - ], - "rope_type": "default", - "type": "default" - }, - "rope_theta": 1000000.0, - "sliding_window": 32768, - "tie_word_embeddings": False, - "torch_dtype": "bfloat16", - "transformers_version": "4.49.0", - "use_cache": False, - "use_sliding_window": False, - "video_token_id": 151656, - "vision_config": { - "hidden_size": 1280, - "in_chans": 3, - "model_type": "qwen2_5_vl", - "spatial_patch_size": 14, - "tokens_per_second": 2, - "torch_dtype": "bfloat16" - }, - "vision_end_token_id": 151653, - "vision_start_token_id": 151652, - "vision_token_id": 151654, - "vocab_size": 152064 - }) - self.model = Qwen2_5_VLForConditionalGeneration(model_config) - self.processor = None - - - def load_processor(self, path): - from .nexus_gen_ar_model import Qwen2_5_VLProcessor - self.processor = Qwen2_5_VLProcessor.from_pretrained(path) - - - @staticmethod - def state_dict_converter(): - return NexusGenAutoregressiveModelStateDictConverter() - - def bound_image(self, image, max_pixels=262640): - from qwen_vl_utils import smart_resize - resized_height, resized_width = smart_resize( - image.height, - image.width, - max_pixels=max_pixels, - ) - return image.resize((resized_width, resized_height)) - - def get_editing_msg(self, instruction): - if '' not in instruction: - instruction = ' ' + instruction - messages = [{"role":"user", "content":instruction}, {"role":"assistant", "content":"Here is the image: "}] - return messages - - def get_generation_msg(self, instruction): - instruction = "Generate an image according to the following description: {}".format(instruction) - messages = [{"role":"user", "content":instruction}, {"role":"assistant", "content":"Here is an image based on the description: "}] - return messages - - def forward(self, instruction, ref_image=None, num_img_tokens=81): - """ - Generate target embeddings for the given instruction and reference image. - """ - if ref_image is not None: - messages = self.get_editing_msg(instruction) - images = [self.bound_image(ref_image)] + [Image.new(mode='RGB', size=(252, 252), color=(255, 255, 255))] - output_image_embeddings = self.get_target_embeddings(images, messages, self.processor, self.model, num_img_tokens) - else: - messages = self.get_generation_msg(instruction) - images = [Image.new(mode='RGB', size=(252, 252), color=(255, 255, 255))] - output_image_embeddings = self.get_target_embeddings(images, messages, self.processor, self.model, num_img_tokens) - - return output_image_embeddings - - def get_target_embeddings(self, images, messages, processor, model, num_img_tokens=81): - text = processor.apply_chat_template(messages, tokenize=False, add_generation_prompt=False) - text = text.replace('', '<|vision_start|><|image_pad|><|vision_end|>') - inputs = processor( - text=[text], - images=images, - padding=True, - return_tensors="pt", - ) - inputs = inputs.to(model.device) - - input_embeds = model.model.embed_tokens(inputs['input_ids']) - image_embeds = model.visual(inputs['pixel_values'], grid_thw=inputs['image_grid_thw']) - ground_truth_image_embeds = image_embeds[-num_img_tokens:] - input_image_embeds = image_embeds[:-num_img_tokens] - - image_mask = inputs['input_ids'] == model.config.image_token_id - indices = image_mask.cumsum(dim=1) - input_image_mask = torch.logical_and(indices <= (image_embeds.shape[0] - ground_truth_image_embeds.shape[0]), image_mask) - gt_image_mask = torch.logical_and(image_mask, ~input_image_mask) - input_image_mask = input_image_mask.unsqueeze(-1).expand_as(input_embeds) - input_embeds = input_embeds.masked_scatter(input_image_mask, input_image_embeds) - - image_prefill_embeds = model.image_prefill_embeds( - torch.arange(81, device=model.device).long() - ) - input_embeds = input_embeds.masked_scatter(gt_image_mask.unsqueeze(-1).expand_as(input_embeds), image_prefill_embeds) - - position_ids, _ = model.get_rope_index( - inputs['input_ids'], - inputs['image_grid_thw'], - attention_mask=inputs['attention_mask']) - position_ids = position_ids.contiguous() - outputs = model(inputs_embeds=input_embeds, position_ids=position_ids, attention_mask=inputs['attention_mask'], return_dict=True) - output_image_embeddings = outputs.image_embeddings[:, :-1, :] - output_image_embeddings = output_image_embeddings[gt_image_mask[:, 1:]] - return output_image_embeddings, input_image_embeds, inputs['image_grid_thw'] - - -class NexusGenAutoregressiveModelStateDictConverter: - def __init__(self): - pass - - def from_civitai(self, state_dict): - state_dict = {"model." + key: value for key, value in state_dict.items()} - return state_dict diff --git a/diffsynth/models/nexus_gen_ar_model.py b/diffsynth/models/nexus_gen_ar_model.py deleted file mode 100644 index d5a2973..0000000 --- a/diffsynth/models/nexus_gen_ar_model.py +++ /dev/null @@ -1,1143 +0,0 @@ -import os -import re -from dataclasses import dataclass -from typing import Any, Dict, List, Optional, Tuple, Union - -import torch -import torch.nn as nn -from torch.nn import CrossEntropyLoss - -from transformers.cache_utils import Cache -from transformers.generation import GenerationMixin, LogitsProcessorList, StoppingCriteriaList, GenerationConfig, GenerateDecoderOnlyOutput, GenerateEncoderDecoderOutput -from transformers.utils import add_start_docstrings_to_model_forward, logging, replace_return_docstrings -from transformers.modeling_outputs import ModelOutput -from transformers.models.qwen2_5_vl.configuration_qwen2_5_vl import Qwen2_5_VLConfig -from transformers.models.qwen2_5_vl.modeling_qwen2_5_vl import ( - Qwen2_5_VisionTransformerPretrainedModel, - Qwen2_5_VLModel, - Qwen2_5_VLPreTrainedModel, - QWEN2_5_VL_INPUTS_DOCSTRING, - ) - -from transformers.feature_extraction_utils import BatchFeature -from transformers.image_utils import ImageInput, VideoInput -from transformers.processing_utils import ProcessingKwargs, ProcessorMixin, Unpack, VideosKwargs -from transformers.tokenization_utils_base import PreTokenizedInput, TextInput - -GenerateNonBeamOutput = Union[GenerateDecoderOnlyOutput, GenerateEncoderDecoderOutput] - -logger = logging.get_logger(__name__) - -_CONFIG_FOR_DOC = "Qwen2_5_VLConfig" - - -@dataclass -class Qwen2_5_VLCausalLMOutputWithPast(ModelOutput): - """ - Base class for Qwen2_5_VL causal language model (or autoregressive) outputs. - - Args: - loss (`torch.FloatTensor` of shape `(1,)`, *optional*, returned when `labels` is provided): - Language modeling loss (for next-token prediction). - logits (`torch.FloatTensor` of shape `(batch_size, sequence_length, config.vocab_size)`): - Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax). - past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): - Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of shape - `(batch_size, num_heads, sequence_length, embed_size_per_head)`) - - Contains pre-computed hidden-states (key and values in the self-attention blocks) that can be used (see - `past_key_values` input) to speed up sequential decoding. - hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): - Tuple of `torch.FloatTensor` (one for the output of the embeddings, if the model has an embedding layer, + - one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. - - Hidden-states of the model at the output of each layer plus the optional initial embedding outputs. - attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): - Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, - sequence_length)`. - - Attentions weights after the attention softmax, used to compute the weighted average in the self-attention - heads. - rope_deltas (`torch.LongTensor` of shape `(batch_size, )`, *optional*): - The rope index difference between sequence length and multimodal rope. - """ - - loss: Optional[torch.FloatTensor] = None - logits: torch.FloatTensor = None - image_embeddings: torch.FloatTensor = None - past_key_values: Optional[List[torch.FloatTensor]] = None - hidden_states: Optional[Tuple[torch.FloatTensor]] = None - attentions: Optional[Tuple[torch.FloatTensor]] = None - rope_deltas: Optional[torch.LongTensor] = None - - -class Qwen2_5_VLForConditionalGeneration(Qwen2_5_VLPreTrainedModel, GenerationMixin): - _tied_weights_keys = ["lm_head.weight"] - config_class = Qwen2_5_VLConfig - _no_split_modules = ["Qwen2_5_VLDecoderLayer", "Qwen2_5_VLVisionBlock"] - - def __init__(self, config): - super().__init__(config) - self.visual = Qwen2_5_VisionTransformerPretrainedModel._from_config(config.vision_config) - self.model = Qwen2_5_VLModel(config) - self.vocab_size = config.vocab_size - self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) - self.vision_head = nn.Linear(config.hidden_size, config.hidden_size, bias=False) - self.rope_deltas = None # cache rope_deltas here - self.image_prefill_embeds = nn.Embedding(81, config.hidden_size) - - # Initialize weights and apply final processing - self.post_init() - - def get_input_embeddings(self): - return self.model.embed_tokens - - def set_input_embeddings(self, value): - self.model.embed_tokens = value - - def get_output_embeddings(self): - return self.lm_head - - def set_output_embeddings(self, new_embeddings): - self.lm_head = new_embeddings - - def set_decoder(self, decoder): - self.model = decoder - - def get_decoder(self): - return self.model - - def get_rope_index( - self, - input_ids: Optional[torch.LongTensor] = None, - image_grid_thw: Optional[torch.LongTensor] = None, - video_grid_thw: Optional[torch.LongTensor] = None, - second_per_grid_ts: Optional[torch.Tensor] = None, - attention_mask: Optional[torch.Tensor] = None, - ) -> Tuple[torch.Tensor, torch.Tensor]: - """ - Calculate the 3D rope index based on image and video's temporal, height and width in LLM. - - Explanation: - Each embedding sequence contains vision embedding and text embedding or just contains text embedding. - - For pure text embedding sequence, the rotary position embedding has no difference with modern LLMs. - Examples: - input_ids: [T T T T T], here T is for text. - temporal position_ids: [0, 1, 2, 3, 4] - height position_ids: [0, 1, 2, 3, 4] - width position_ids: [0, 1, 2, 3, 4] - - For vision and text embedding sequence, we calculate 3D rotary position embedding for vision part - and 1D rotary position embedding for text part. - Examples: - Temporal (Time): 3 patches, representing different segments of the video in time. - Height: 2 patches, dividing each frame vertically. - Width: 2 patches, dividing each frame horizontally. - We also have some important parameters: - fps (Frames Per Second): The video's frame rate, set to 1. This means one frame is processed each second. - tokens_per_second: This is a crucial parameter. It dictates how many "time-steps" or "temporal tokens" are conceptually packed into a one-second interval of the video. In this case, we have 25 tokens per second. So each second of the video will be represented with 25 separate time points. It essentially defines the temporal granularity. - temporal_patch_size: The number of frames that compose one temporal patch. Here, it's 2 frames. - interval: The step size for the temporal position IDs, calculated as tokens_per_second * temporal_patch_size / fps. In this case, 25 * 2 / 1 = 50. This means that each temporal patch will be have a difference of 50 in the temporal position IDs. - input_ids: [V V V V V V V V V V V V T T T T T], here V is for vision. - vision temporal position_ids: [0, 0, 0, 0, 50, 50, 50, 50, 100, 100, 100, 100] - vision height position_ids: [0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1] - vision width position_ids: [0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1] - text temporal position_ids: [101, 102, 103, 104, 105] - text height position_ids: [101, 102, 103, 104, 105] - text width position_ids: [101, 102, 103, 104, 105] - Here we calculate the text start position_ids as the max vision position_ids plus 1. - - Args: - input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): - Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide - it. - image_grid_thw (`torch.LongTensor` of shape `(num_images, 3)`, *optional*): - The temporal, height and width of feature shape of each image in LLM. - video_grid_thw (`torch.LongTensor` of shape `(num_videos, 3)`, *optional*): - The temporal, height and width of feature shape of each video in LLM. - second_per_grid_ts (`torch.Tensor` of shape `(num_videos)`, *optional*): - The time interval (in seconds) for each grid along the temporal dimension in the 3D position IDs. - attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): - Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - - - 1 for tokens that are **not masked**, - - 0 for tokens that are **masked**. - - Returns: - position_ids (`torch.LongTensor` of shape `(3, batch_size, sequence_length)`) - mrope_position_deltas (`torch.Tensor` of shape `(batch_size)`) - """ - spatial_merge_size = self.config.vision_config.spatial_merge_size - image_token_id = self.config.image_token_id - video_token_id = self.config.video_token_id - vision_start_token_id = self.config.vision_start_token_id - mrope_position_deltas = [] - if input_ids is not None and (image_grid_thw is not None or video_grid_thw is not None): - total_input_ids = input_ids - if attention_mask is None: - attention_mask = torch.ones_like(total_input_ids) - position_ids = torch.ones( - 3, - input_ids.shape[0], - input_ids.shape[1], - dtype=input_ids.dtype, - device=input_ids.device, - ) - image_index, video_index = 0, 0 - attention_mask = attention_mask.to(total_input_ids.device) - for i, input_ids in enumerate(total_input_ids): - input_ids = input_ids[attention_mask[i] == 1] - image_nums, video_nums = 0, 0 - vision_start_indices = torch.argwhere(input_ids == vision_start_token_id).squeeze(1) - vision_tokens = input_ids[vision_start_indices + 1] - image_nums = (vision_tokens == image_token_id).sum() - video_nums = (vision_tokens == video_token_id).sum() - input_tokens = input_ids.tolist() - llm_pos_ids_list: list = [] - st = 0 - remain_images, remain_videos = image_nums, video_nums - for _ in range(image_nums + video_nums): - if image_token_id in input_tokens and remain_images > 0: - ed_image = input_tokens.index(image_token_id, st) - else: - ed_image = len(input_tokens) + 1 - if video_token_id in input_tokens and remain_videos > 0: - ed_video = input_tokens.index(video_token_id, st) - else: - ed_video = len(input_tokens) + 1 - if ed_image < ed_video: - t, h, w = ( - image_grid_thw[image_index][0], - image_grid_thw[image_index][1], - image_grid_thw[image_index][2], - ) - second_per_grid_t = 0 - image_index += 1 - remain_images -= 1 - ed = ed_image - - else: - t, h, w = ( - video_grid_thw[video_index][0], - video_grid_thw[video_index][1], - video_grid_thw[video_index][2], - ) - if second_per_grid_ts is not None: - second_per_grid_t = second_per_grid_ts[video_index] - else: - second_per_grid_t = 1.0 - video_index += 1 - remain_videos -= 1 - ed = ed_video - llm_grid_t, llm_grid_h, llm_grid_w = ( - t.item(), - h.item() // spatial_merge_size, - w.item() // spatial_merge_size, - ) - text_len = ed - st - - st_idx = llm_pos_ids_list[-1].max() + 1 if len(llm_pos_ids_list) > 0 else 0 - llm_pos_ids_list.append(torch.arange(text_len).view(1, -1).expand(3, -1) + st_idx) - - range_tensor = torch.arange(llm_grid_t).view(-1, 1) - expanded_range = range_tensor.expand(-1, llm_grid_h * llm_grid_w) - - time_tensor = expanded_range * second_per_grid_t * self.config.vision_config.tokens_per_second - - time_tensor_long = time_tensor.long() - t_index = time_tensor_long.flatten() - - h_index = torch.arange(llm_grid_h).view(1, -1, 1).expand(llm_grid_t, -1, llm_grid_w).flatten() - w_index = torch.arange(llm_grid_w).view(1, 1, -1).expand(llm_grid_t, llm_grid_h, -1).flatten() - llm_pos_ids_list.append(torch.stack([t_index, h_index, w_index]) + text_len + st_idx) - st = ed + llm_grid_t * llm_grid_h * llm_grid_w - - if st < len(input_tokens): - st_idx = llm_pos_ids_list[-1].max() + 1 if len(llm_pos_ids_list) > 0 else 0 - text_len = len(input_tokens) - st - llm_pos_ids_list.append(torch.arange(text_len).view(1, -1).expand(3, -1) + st_idx) - - llm_positions = torch.cat(llm_pos_ids_list, dim=1).reshape(3, -1) - position_ids[..., i, attention_mask[i] == 1] = llm_positions.to(position_ids.device) - mrope_position_deltas.append(llm_positions.max() + 1 - len(total_input_ids[i])) - mrope_position_deltas = torch.tensor(mrope_position_deltas, device=input_ids.device).unsqueeze(1) - return position_ids, mrope_position_deltas - else: - if attention_mask is not None: - position_ids = attention_mask.long().cumsum(-1) - 1 - position_ids.masked_fill_(attention_mask == 0, 1) - position_ids = position_ids.unsqueeze(0).expand(3, -1, -1).to(attention_mask.device) - max_position_ids = position_ids.max(0, keepdim=False)[0].max(-1, keepdim=True)[0] - mrope_position_deltas = max_position_ids + 1 - attention_mask.shape[-1] - else: - position_ids = ( - torch.arange(input_ids.shape[1], device=input_ids.device) - .view(1, 1, -1) - .expand(3, input_ids.shape[0], -1) - ) - mrope_position_deltas = torch.zeros( - [input_ids.shape[0], 1], - device=input_ids.device, - dtype=input_ids.dtype, - ) - - return position_ids, mrope_position_deltas - - @add_start_docstrings_to_model_forward(QWEN2_5_VL_INPUTS_DOCSTRING) - @replace_return_docstrings(output_type=Qwen2_5_VLCausalLMOutputWithPast, config_class=_CONFIG_FOR_DOC) - def forward( - self, - input_ids: torch.LongTensor = None, - attention_mask: Optional[torch.Tensor] = None, - position_ids: Optional[torch.LongTensor] = None, - past_key_values: Optional[List[torch.FloatTensor]] = None, - inputs_embeds: Optional[torch.FloatTensor] = None, - labels: Optional[torch.LongTensor] = None, - use_cache: Optional[bool] = None, - output_attentions: Optional[bool] = None, - output_hidden_states: Optional[bool] = None, - return_dict: Optional[bool] = None, - pixel_values: Optional[torch.Tensor] = None, - pixel_values_videos: Optional[torch.FloatTensor] = None, - image_grid_thw: Optional[torch.LongTensor] = None, - video_grid_thw: Optional[torch.LongTensor] = None, - rope_deltas: Optional[torch.LongTensor] = None, - cache_position: Optional[torch.LongTensor] = None, - second_per_grid_ts: Optional[torch.Tensor] = None, - image_embeddings: Optional[torch.Tensor] = None, - token_loss_weight: Optional[float] = 0.1, - img_loss_weight: Optional[float] = 1.0, - ) -> Union[Tuple, Qwen2_5_VLCausalLMOutputWithPast]: - r""" - labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): - Labels for computing the masked language modeling loss. Indices should either be in `[0, ..., - config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored - (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`. - - Returns: - - Example: - - ```python - >>> from PIL import Image - >>> import requests - >>> from transformers import AutoProcessor, Qwen2_5_VLForConditionalGeneration - - >>> model = Qwen2_5_VLForConditionalGeneration.from_pretrained("Qwen/Qwen2.5-VL-7B-Instruct") - >>> processor = AutoProcessor.from_pretrained("Qwen/Qwen2.5-VL-7B-Instruct") - - >>> messages = [ - { - "role": "user", - "content": [ - {"type": "image"}, - {"type": "text", "text": "What is shown in this image?"}, - ], - }, - ] - >>> url = "https://www.ilankelman.org/stopsigns/australia.jpg" - >>> image = Image.open(requests.get(url, stream=True).raw) - - >>> text = processor.apply_chat_template(messages, tokenize=False, add_generation_prompt=True) - >>> inputs = processor(text=[text], images=[image], vision_infos=[vision_infos]) - - >>> # Generate - >>> generate_ids = model.generate(inputs.input_ids, max_length=30) - >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0] - "The image shows a street scene with a red stop sign in the foreground. In the background, there is a large red gate with Chinese characters ..." - ```""" - - output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions - output_hidden_states = ( - output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states - ) - return_dict = return_dict if return_dict is not None else self.config.use_return_dict - - if inputs_embeds is None: - # test feature - inputs_embeds = self.model.embed_tokens(input_ids) - # for image encoding and training - if pixel_values is not None: - pixel_values = pixel_values.type(self.visual.dtype) - image_embeds = self.visual(pixel_values, grid_thw=image_grid_thw) - n_image_tokens = (input_ids == self.config.image_token_id).sum().item() - n_image_features = image_embeds.shape[0] - if n_image_tokens != n_image_features: - raise ValueError( - f"Image features and image tokens do not match: tokens: {n_image_tokens}, features {n_image_features}" - ) - - mask = input_ids == self.config.image_token_id - mask_unsqueezed = mask.unsqueeze(-1) - mask_expanded = mask_unsqueezed.expand_as(inputs_embeds) - image_mask = mask_expanded.to(inputs_embeds.device) - - image_embeds = image_embeds.to(inputs_embeds.device, inputs_embeds.dtype) - inputs_embeds = inputs_embeds.masked_scatter(image_mask, image_embeds) - - if pixel_values_videos is not None: - pixel_values_videos = pixel_values_videos.type(self.visual.dtype) - video_embeds = self.visual(pixel_values_videos, grid_thw=video_grid_thw) - n_video_tokens = (input_ids == self.config.video_token_id).sum().item() - n_video_features = video_embeds.shape[0] - if n_video_tokens != n_video_features: - raise ValueError( - f"Video features and video tokens do not match: tokens: {n_video_tokens}, features {n_video_features}" - ) - - mask = input_ids == self.config.video_token_id - mask_unsqueezed = mask.unsqueeze(-1) - mask_expanded = mask_unsqueezed.expand_as(inputs_embeds) - video_mask = mask_expanded.to(inputs_embeds.device) - - video_embeds = video_embeds.to(inputs_embeds.device, inputs_embeds.dtype) - inputs_embeds = inputs_embeds.masked_scatter(video_mask, video_embeds) - - if attention_mask is not None: - attention_mask = attention_mask.to(inputs_embeds.device) - - # if we get 4D attention mask we cannot calculate rope deltas anymore. TODO @raushan fixme - if position_ids is None and (attention_mask is None or attention_mask.ndim == 2): - # calculate RoPE index once per generation in the pre-fill stage only - if ( - (cache_position is not None and cache_position[0] == 0) - or self.rope_deltas is None - or (past_key_values is None or past_key_values.get_seq_length() == 0) - ): - position_ids, rope_deltas = self.get_rope_index( - input_ids, - image_grid_thw, - video_grid_thw, - second_per_grid_ts, - attention_mask, - ) - self.rope_deltas = rope_deltas - # then use the prev pre-calculated rope-deltas to get the correct position ids - else: - batch_size, seq_length, _ = inputs_embeds.shape - delta = ( - (cache_position[0] + self.rope_deltas).to(inputs_embeds.device) - if cache_position is not None - else 0 - ) - position_ids = torch.arange(seq_length, device=inputs_embeds.device) - position_ids = position_ids.view(1, -1).expand(batch_size, -1) - if cache_position is not None: # otherwise `deltas` is an int `0` - delta = delta.repeat_interleave(batch_size // delta.shape[0], dim=0) - position_ids = position_ids.add(delta) - position_ids = position_ids.unsqueeze(0).expand(3, -1, -1) - # position_ids [3, B, L] - - outputs = self.model( - input_ids=None, - position_ids=position_ids, - attention_mask=attention_mask, - past_key_values=past_key_values, - inputs_embeds=inputs_embeds, - use_cache=use_cache, - output_attentions=output_attentions, - output_hidden_states=output_hidden_states, - return_dict=return_dict, - cache_position=cache_position, - ) - - hidden_states = outputs[0] - logits = self.lm_head(hidden_states) - image_embeds = self.vision_head(hidden_states) - - loss = None - if labels is not None: - # Upcast to float if we need to compute the loss to avoid potential precision issues - # prepare labels for logits - logits_labels = labels.clone().detach() - image_tokens = (labels == self.config.image_token_id) - logits_labels[image_tokens] = -100 - - logits = logits.float() - # Shift so that tokens < n predict n - shift_logits = logits[..., :-1, :].contiguous() - shift_labels = logits_labels[..., 1:].contiguous() - # Flatten the tokens - loss_fct = CrossEntropyLoss() - shift_logits = shift_logits.view(-1, self.config.vocab_size) - shift_labels = shift_labels.view(-1) - # Enable model parallelism - shift_labels = shift_labels.to(shift_logits.device) - loss = loss_fct(shift_logits, shift_labels) * token_loss_weight - - shift_image_tokens_2d = (labels[..., 1:].contiguous() == self.config.image_token_id) # (B, L-1) - shifted_image_embeds = image_embeds[:, :-1, :].contiguous() # (B, L-1, D) - masked_image_embeds = shifted_image_embeds[shift_image_tokens_2d] # (num_image_tokens, D) - - mse_loss_fct = nn.MSELoss() - mse_loss_fct = mse_loss_fct.to(shift_logits.device) - if image_embeddings is None: - image_embeddings = torch.zeros_like(masked_image_embeds) - img_loss = mse_loss_fct(masked_image_embeds, image_embeddings) - - cos_sim = torch.cosine_similarity( - masked_image_embeds, - image_embeddings, - dim=-1 - ) - cos_loss = (1 - cos_sim).mean() - img_loss = 0.5 * img_loss + 0.5 * cos_loss - # fix nan for empty image tokens - if image_embeddings.size(0) == 0: - img_loss = img_loss.nan_to_num(0.0) - # combine the loss - loss = loss + img_loss_weight * img_loss - - if not return_dict: - output = (logits,) + outputs[1:] - return (loss,) + output if loss is not None else output - - return Qwen2_5_VLCausalLMOutputWithPast( - loss=loss, - logits=logits, - image_embeddings=image_embeds, - past_key_values=outputs.past_key_values, - hidden_states=outputs.hidden_states, - attentions=outputs.attentions, - rope_deltas=self.rope_deltas, - ) - - - - def _sample( - self, - input_ids: torch.LongTensor, - logits_processor: LogitsProcessorList, - stopping_criteria: StoppingCriteriaList, - generation_config: GenerationConfig, - synced_gpus: bool, - streamer: Optional["BaseStreamer"], - **model_kwargs, - ) -> Union[GenerateNonBeamOutput, torch.LongTensor]: - r""" - Generates sequences of token ids for models with a language modeling head using **multinomial sampling** and - can be used for text-decoder, text-to-text, speech-to-text, and vision-to-text models. - - Parameters: - input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): - The sequence used as a prompt for the generation. - logits_processor (`LogitsProcessorList`): - An instance of [`LogitsProcessorList`]. List of instances of class derived from [`LogitsProcessor`] - used to modify the prediction scores of the language modeling head applied at each generation step. - stopping_criteria (`StoppingCriteriaList`): - An instance of [`StoppingCriteriaList`]. List of instances of class derived from [`StoppingCriteria`] - used to tell if the generation loop should stop. - generation_config ([`~generation.GenerationConfig`]): - The generation configuration to be used as parametrization of the decoding method. - synced_gpus (`bool`): - Whether to continue running the while loop until max_length (needed to avoid deadlocking with - `FullyShardedDataParallel` and DeepSpeed ZeRO Stage 3). - streamer (`BaseStreamer`, *optional*): - Streamer object that will be used to stream the generated sequences. Generated tokens are passed - through `streamer.put(token_ids)` and the streamer is responsible for any further processing. - model_kwargs: - Additional model specific kwargs will be forwarded to the `forward` function of the model. If model is - an encoder-decoder model the kwargs should include `encoder_outputs`. - - Return: - [`~generation.GenerateDecoderOnlyOutput`], [`~generation.GenerateEncoderDecoderOutput`] or `torch.LongTensor`: - A `torch.LongTensor` containing the generated tokens (default behaviour) or a - [`~generation.GenerateDecoderOnlyOutput`] if `model.config.is_encoder_decoder=False` and - `return_dict_in_generate=True` or a [`~generation.GenerateEncoderDecoderOutput`] if - `model.config.is_encoder_decoder=True`. - """ - # init values - pad_token_id = generation_config._pad_token_tensor - output_attentions = generation_config.output_attentions - output_hidden_states = generation_config.output_hidden_states - output_scores = generation_config.output_scores - output_logits = generation_config.output_logits - return_dict_in_generate = generation_config.return_dict_in_generate - max_length = generation_config.max_length - has_eos_stopping_criteria = any(hasattr(criteria, "eos_token_id") for criteria in stopping_criteria) - do_sample = generation_config.do_sample - - # init attention / hidden states / scores tuples - scores = () if (return_dict_in_generate and output_scores) else None - raw_logits = () if (return_dict_in_generate and output_logits) else None - decoder_attentions = () if (return_dict_in_generate and output_attentions) else None - cross_attentions = () if (return_dict_in_generate and output_attentions) else None - decoder_hidden_states = () if (return_dict_in_generate and output_hidden_states) else None - - # if model is an encoder-decoder, retrieve encoder attention weights and hidden states - if return_dict_in_generate and self.config.is_encoder_decoder: - encoder_attentions = model_kwargs["encoder_outputs"].get("attentions") if output_attentions else None - encoder_hidden_states = ( - model_kwargs["encoder_outputs"].get("hidden_states") if output_hidden_states else None - ) - - # keep track of which sequences are already finished - batch_size, cur_len = input_ids.shape - this_peer_finished = False - unfinished_sequences = torch.ones(batch_size, dtype=torch.long, device=input_ids.device) - model_kwargs = self._get_initial_cache_position(input_ids, model_kwargs) - - model_forward = self.__call__ - if isinstance(model_kwargs.get("past_key_values"), Cache): - is_compileable = model_kwargs["past_key_values"].is_compileable and self._supports_static_cache - is_compileable = is_compileable and not self.generation_config.disable_compile - if is_compileable and ( - self.device.type == "cuda" or generation_config.compile_config._compile_all_devices - ): - os.environ["TOKENIZERS_PARALLELISM"] = "0" - model_forward = self.get_compiled_call(generation_config.compile_config) - - is_prefill = True - is_sampling_img = input_ids[:, -1] == self.config.vision_start_token_id - generation_image_grid_thw = model_kwargs.pop("generation_image_grid_thw", self.get_default_image_grid_thw()) - num_img_tokens = self.get_num_image_tokens(generation_image_grid_thw) - output_image_embeddings = [] - while self._has_unfinished_sequences( - this_peer_finished, synced_gpus, device=input_ids.device, cur_len=cur_len, max_length=max_length - ): - # prepare model inputs - model_inputs = self.prepare_inputs_for_generation(input_ids, **model_kwargs) - - # prepare prefilled embeds - model_inputs.update(self.prepare_prefilled_image_embeds(len(output_image_embeddings), num_img_tokens, is_sampling_img, **model_kwargs)) - - # parse position_ids from model_kwargs - model_inputs.update(self.prepare_image_position_ids(input_ids, generation_image_grid_thw, is_sampling_img, **model_kwargs)) - - # prepare variable output controls (note: some models won't accept all output controls) - model_inputs.update({"output_attentions": output_attentions} if output_attentions else {}) - model_inputs.update({"output_hidden_states": output_hidden_states} if output_hidden_states else {}) - - if is_prefill: - outputs = self(**model_inputs, return_dict=True) - is_prefill = False - else: - outputs = model_forward(**model_inputs, return_dict=True) - - # synced_gpus: don't waste resources running the code we don't need; kwargs must be updated before skipping - model_kwargs = self._update_model_kwargs_for_generation( - outputs, - model_kwargs, - is_encoder_decoder=self.config.is_encoder_decoder, - ) - # TODO: support batch image sampling - if bool(is_sampling_img) and len(output_image_embeddings) < num_img_tokens: - output_image_embeddings.append(outputs.image_embeddings[:, -1, :].unsqueeze(1)) - - if synced_gpus and this_peer_finished: - continue - # Clone is needed to avoid keeping a hanging ref to outputs.logits which may be very large for first iteration - # (the clone itself is always small) - next_token_logits = outputs.logits[:, -1, :].clone().float() - next_token_logits = next_token_logits.to(input_ids.device) - - # do not sample token - next_token_logits[:, self.config.vision_end_token_id] = -float('inf') - # pre-process distribution - next_token_scores = logits_processor(input_ids, next_token_logits) - # Store scores, attentions and hidden_states when required - if return_dict_in_generate: - if output_scores: - scores += (next_token_scores,) - if output_logits: - raw_logits += (next_token_logits,) - if output_attentions: - decoder_attentions += ( - (outputs.decoder_attentions,) if self.config.is_encoder_decoder else (outputs.attentions,) - ) - if self.config.is_encoder_decoder: - cross_attentions += (outputs.cross_attentions,) - - if output_hidden_states: - decoder_hidden_states += ( - (outputs.decoder_hidden_states,) - if self.config.is_encoder_decoder - else (outputs.hidden_states,) - ) - - # token selection - if do_sample: - probs = nn.functional.softmax(next_token_scores, dim=-1) - # TODO (joao): this OP throws "skipping cudagraphs due to ['incompatible ops']", find solution - next_tokens = torch.multinomial(probs, num_samples=1).squeeze(1) - # while not bool(is_sampling_img) and torch.any(next_tokens == self.config.vision_end_token_id): - # probs[:, self.config.vision_end_token_id] = 0 - # next_tokens = torch.multinomial(probs, num_samples=1).squeeze(1) - else: - next_tokens = torch.argmax(next_token_scores, dim=-1) - - # finished sentences should have their next token be a padding token - if has_eos_stopping_criteria: - next_tokens = next_tokens * unfinished_sequences + pad_token_id * (1 - unfinished_sequences) - - #TODO: support batch image sample - if num_img_tokens is not None: - cur_img_tokens = (input_ids == self.config.vision_start_token_id).flip(dims=[1]).float().argmax(dim=1) - # check whether is sampling images - is_end_img = torch.logical_and(cur_img_tokens == num_img_tokens, is_sampling_img) - is_sampling_img = torch.logical_and(is_sampling_img, cur_img_tokens < num_img_tokens) - next_tokens[is_sampling_img] = self.config.image_token_id - # check whether to end sampling images - next_tokens[is_end_img] = self.config.vision_end_token_id - else: - # check whether to end sampling images - is_sampling_img = torch.logical_and(is_sampling_img, (next_tokens != self.config.vision_end_token_id)) - # replace the next token with the image token if is sampling image - next_tokens[is_sampling_img] = self.config.image_token_id - # check whether to start sampling images - is_sampling_img = torch.logical_or(is_sampling_img, (next_tokens == self.config.vision_start_token_id)) - - # update generated ids, model inputs, and length for next step - input_ids = torch.cat([input_ids, next_tokens[:, None]], dim=-1) - - if streamer is not None: - streamer.put(next_tokens.cpu()) - - unfinished_sequences = unfinished_sequences & ~stopping_criteria(input_ids, scores) - this_peer_finished = unfinished_sequences.max() == 0 - cur_len += 1 - - # This is needed to properly delete outputs.logits which may be very large for first iteration - # Otherwise a reference to outputs is kept which keeps the logits alive in the next iteration - del outputs - - if streamer is not None: - streamer.end() - - # output the image embeddings - output_image_embeddings = torch.cat(output_image_embeddings, dim=1) if len(output_image_embeddings) > 0 else None - - if return_dict_in_generate: - return GenerateDecoderOnlyAll2AllOutput( - sequences=input_ids, - scores=scores, - logits=raw_logits, - attentions=decoder_attentions, - hidden_states=decoder_hidden_states, - past_key_values=model_kwargs.get("past_key_values"), - output_image_embeddings=output_image_embeddings, - ) - else: - return input_ids - - - def prepare_prefilled_image_embeds(self, cur_image_tokens, num_img_tokens, is_sampling_img, **model_kwargs): - if cur_image_tokens == 0 or cur_image_tokens > num_img_tokens or not bool(is_sampling_img): - return {} - # TODO: support batch image sample - image_idx = torch.tensor([cur_image_tokens-1]).to(self.device).long().unsqueeze(0) - inputs_embeds = self.image_prefill_embeds(image_idx) - return {"inputs_embeds": inputs_embeds} - - - def get_default_image_grid_thw(self,): - return torch.tensor([[1, 18, 18]]).to(self.device) - - - def get_num_image_tokens(self, image_grid_thw): - return int(torch.prod(image_grid_thw, dim=1).sum() // 4) - - - def _validate_model_kwargs(self, model_kwargs: Dict[str, Any]): - num_img_tokens = model_kwargs.pop("generation_image_grid_thw", None) - super()._validate_model_kwargs(model_kwargs) - model_kwargs["generation_image_grid_thw"] = num_img_tokens - - def prepare_image_position_ids(self, input_ids, generation_image_grid_thw, is_sampling_img, **model_kwargs): - # Overwritten -- prepare position_ids for image tokens - cur_img_tokens = int((input_ids == self.config.vision_start_token_id).flip(dims=[1]).float().argmax(dim=1)) - # TODO: support batch image sample - if cur_img_tokens > 0 and bool(is_sampling_img): - image_grid_thw = generation_image_grid_thw - if model_kwargs.get('image_grid_thw') is not None: - image_grid_thw = torch.cat([model_kwargs.get('image_grid_thw'), image_grid_thw]) - remaining_img_tokens = self.get_num_image_tokens(generation_image_grid_thw) - cur_img_tokens - padding_ids = input_ids.new_full((1, remaining_img_tokens), fill_value=self.config.image_token_id) - padded_ids = torch.cat([input_ids, padding_ids], dim=1) - position_ids, _ = self.get_rope_index(padded_ids, image_grid_thw, None, None) - if model_kwargs.get("use_cache", True): - position_ids = position_ids[:, :, input_ids.shape[1] - 1].unsqueeze(-1) - else: - position_ids = position_ids[:, :, :input_ids.shape[1]] - return {"position_ids": position_ids} - return {} - - def prepare_inputs_for_generation( - self, - input_ids, - past_key_values=None, - attention_mask=None, - inputs_embeds=None, - cache_position=None, - position_ids=None, - use_cache=True, - pixel_values=None, - pixel_values_videos=None, - image_grid_thw=None, - video_grid_thw=None, - second_per_grid_ts=None, - image_embeddings=None, - **kwargs, - ): - # Overwritten -- in specific circumstances we don't want to forward image inputs to the model - - model_inputs = super().prepare_inputs_for_generation( - input_ids, - past_key_values=past_key_values, - attention_mask=attention_mask, - inputs_embeds=inputs_embeds, - cache_position=cache_position, - position_ids=position_ids, - pixel_values=pixel_values, - pixel_values_videos=pixel_values_videos, - image_grid_thw=image_grid_thw, - video_grid_thw=video_grid_thw, - second_per_grid_ts=second_per_grid_ts, - use_cache=use_cache, - **kwargs, - ) - - # Qwen2-5-VL position_ids are prepared with rope_deltas in forward - model_inputs["position_ids"] = None - - if cache_position[0] != 0: - model_inputs["pixel_values"] = None - model_inputs["pixel_values_videos"] = None - return model_inputs - - def _get_image_nums_and_video_nums( - self, - input_ids: Optional[torch.LongTensor], - ) -> Tuple[torch.Tensor, torch.Tensor]: - """ - Get the number of images and videos for each sample to calculate the separation length of the sample tensor. - These parameters are not passed through the processor to avoid unpredictable impacts from interface modifications. - - Args: - input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): - Indices of input sequence tokens in the vocabulary. - - Returns: - image_nums (`torch.LongTensor` of shape `(batch_size, num_images_sample)`) - video_nums (`torch.LongTensor` of shape `(batch_size, num_videos_sample)`) - """ - image_token_id = self.config.image_token_id - video_token_id = self.config.video_token_id - vision_start_token_id = self.config.vision_start_token_id - - vision_start_mask = input_ids == vision_start_token_id - vision_first_mask = torch.roll(vision_start_mask, shifts=1, dims=1) - image_mask = input_ids == image_token_id - video_mask = input_ids == video_token_id - image_nums = torch.sum(vision_first_mask & image_mask, dim=1) - video_nums = torch.sum(vision_first_mask & video_mask, dim=1) - - return image_nums, video_nums - - def _expand_inputs_for_generation( - self, - expand_size: int = 1, - is_encoder_decoder: bool = False, - input_ids: Optional[torch.LongTensor] = None, - **model_kwargs, - ) -> Tuple[torch.LongTensor, Dict[str, Any]]: - # Overwritten -- Support for expanding tensors without a batch size dimension - # e.g., pixel_values, image_grid_thw, pixel_values_videos, video_grid_thw, second_per_grid_t - # pixel_values.shape[0] is sum(seqlen_images for samples) - # image_grid_thw.shape[0] is sum(num_images for samples) - - if expand_size == 1: - return input_ids, model_kwargs - - visual_keys = ["pixel_values", "image_grid_thw", "pixel_values_videos", "video_grid_thw", "second_per_grid_ts"] - - def _expand_dict_for_generation_visual(dict_to_expand): - image_grid_thw = model_kwargs.get("image_grid_thw", None) - video_grid_thw = model_kwargs.get("video_grid_thw", None) - image_nums, video_nums = self._get_image_nums_and_video_nums(input_ids) - - def _repeat_interleave_samples(x, lengths, repeat_times): - samples = torch.split(x, lengths) - repeat_args = [repeat_times] + [1] * (x.dim() - 1) - result = torch.cat([sample.repeat(*repeat_args) for sample in samples], dim=0) - return result - - for key in dict_to_expand: - if key == "pixel_values": - # split images into samples - samples = torch.split(image_grid_thw, list(image_nums)) - # compute the sequence length of images for each sample - lengths = [torch.prod(sample, dim=1).sum() for sample in samples] - dict_to_expand[key] = _repeat_interleave_samples( - dict_to_expand[key], lengths=lengths, repeat_times=expand_size - ) - elif key == "image_grid_thw": - # get the num of images for each sample - lengths = list(image_nums) - dict_to_expand[key] = _repeat_interleave_samples( - dict_to_expand[key], lengths=lengths, repeat_times=expand_size - ) - elif key == "pixel_values_videos": - samples = torch.split(video_grid_thw, list(video_nums)) - lengths = [torch.prod(sample, dim=1).sum() for sample in samples] - dict_to_expand[key] = _repeat_interleave_samples( - dict_to_expand[key], lengths=lengths, repeat_times=expand_size - ) - elif key == "video_grid_thw": - lengths = list(video_nums) - dict_to_expand[key] = _repeat_interleave_samples( - dict_to_expand[key], lengths=lengths, repeat_times=expand_size - ) - elif key == "second_per_grid_ts": - if not isinstance(dict_to_expand[key], list): - raise TypeError( - f"Expected value for key '{key}' to be a list, but got {type(dict_to_expand[key])} instead." - ) - tensor = torch.tensor(dict_to_expand[key]) - lengths = list(video_nums) - tensor = _repeat_interleave_samples(tensor, lengths=lengths, repeat_times=expand_size) - dict_to_expand[key] = tensor.tolist() - return dict_to_expand - - def _expand_dict_for_generation(dict_to_expand): - for key in dict_to_expand: - if ( - key != "cache_position" - and dict_to_expand[key] is not None - and isinstance(dict_to_expand[key], torch.Tensor) - and key not in visual_keys - ): - dict_to_expand[key] = dict_to_expand[key].repeat_interleave(expand_size, dim=0) - return dict_to_expand - - # input_ids is required for expanding visual inputs - # If input_ids is unavailable, visual inputs will not be used; therefore, there is no need to expand visual inputs. - if input_ids is not None and input_ids.numel() != 0: - model_kwargs = _expand_dict_for_generation_visual(model_kwargs) - - if input_ids is not None: - input_ids = input_ids.repeat_interleave(expand_size, dim=0) - - model_kwargs = _expand_dict_for_generation(model_kwargs) - - if is_encoder_decoder: - if model_kwargs.get("encoder_outputs") is None: - raise ValueError("If `is_encoder_decoder` is True, make sure that `encoder_outputs` is defined.") - model_kwargs["encoder_outputs"] = _expand_dict_for_generation(model_kwargs["encoder_outputs"]) - - return input_ids, model_kwargs - - -__all__ = ["Qwen2_5_VLForConditionalGeneration", "Qwen2_5_VLModel", "Qwen2_5_VLPreTrainedModel"] - - - -class Qwen2_5_VLVideosProcessorKwargs(VideosKwargs, total=False): - fps: Union[List[float], float] - - -class Qwen2_5_VLProcessorKwargs(ProcessingKwargs, total=False): - videos_kwargs: Qwen2_5_VLVideosProcessorKwargs - _defaults = { - "text_kwargs": { - "padding": False, - }, - "videos_kwargs": {"fps": 2.0}, - } - - -class Qwen2_5_VLProcessor(ProcessorMixin): - r""" - Constructs a Qwen2.5-VL processor which wraps a Qwen2.5-VL image processor and a Qwen2 tokenizer into a single processor. - [`Qwen2_5_VLProcessor`] offers all the functionalities of [`Qwen2VLImageProcessor`] and [`Qwen2TokenizerFast`]. See the - [`~Qwen2_5_VLProcessor.__call__`] and [`~Qwen2_5_VLProcessor.decode`] for more information. - Args: - image_processor ([`Qwen2VLImageProcessor`], *optional*): - The image processor is a required input. - tokenizer ([`Qwen2TokenizerFast`], *optional*): - The tokenizer is a required input. - chat_template (`str`, *optional*): A Jinja template which will be used to convert lists of messages - in a chat into a tokenizable string. - """ - - attributes = ["image_processor", "tokenizer"] - valid_kwargs = ["chat_template"] - - image_processor_class = "AutoImageProcessor" - tokenizer_class = ("Qwen2Tokenizer", "Qwen2TokenizerFast") - - def __init__(self, image_processor=None, tokenizer=None, chat_template=None, **kwargs): - self.image_token = "<|image_pad|>" if not hasattr(tokenizer, "image_token") else tokenizer.image_token - self.video_token = "<|video_pad|>" if not hasattr(tokenizer, "video_token") else tokenizer.video_token - super().__init__(image_processor, tokenizer, chat_template=chat_template) - - def __call__( - self, - images: ImageInput = None, - text: Union[TextInput, PreTokenizedInput, List[TextInput], List[PreTokenizedInput]] = None, - videos: VideoInput = None, - **kwargs: Unpack[Qwen2_5_VLProcessorKwargs], - ) -> BatchFeature: - """ - Main method to prepare for the model one or several sequences(s) and image(s). This method forwards the `text` - and `kwargs` arguments to Qwen2TokenizerFast's [`~Qwen2TokenizerFast.__call__`] if `text` is not `None` to encode - the text. To prepare the vision inputs, this method forwards the `vision_infos` and `kwrags` arguments to - Qwen2VLImageProcessor's [`~Qwen2VLImageProcessor.__call__`] if `vision_infos` is not `None`. - - Args: - images (`PIL.Image.Image`, `np.ndarray`, `torch.Tensor`, `List[PIL.Image.Image]`, `List[np.ndarray]`, `List[torch.Tensor]`): - The image or batch of images to be prepared. Each image can be a PIL image, NumPy array or PyTorch - tensor. Both channels-first and channels-last formats are supported. - text (`str`, `List[str]`, `List[List[str]]`): - The sequence or batch of sequences to be encoded. Each sequence can be a string or a list of strings - (pretokenized string). If the sequences are provided as list of strings (pretokenized), you must set - `is_split_into_words=True` (to lift the ambiguity with a batch of sequences). - videos (`np.ndarray`, `torch.Tensor`, `List[np.ndarray]`, `List[torch.Tensor]`): - The image or batch of videos to be prepared. Each video can be a 4D NumPy array or PyTorch - tensor, or a nested list of 3D frames. Both channels-first and channels-last formats are supported. - return_tensors (`str` or [`~utils.TensorType`], *optional*): - If set, will return tensors of a particular framework. Acceptable values are: - - `'tf'`: Return TensorFlow `tf.constant` objects. - - `'pt'`: Return PyTorch `torch.Tensor` objects. - - `'np'`: Return NumPy `np.ndarray` objects. - - `'jax'`: Return JAX `jnp.ndarray` objects. - - Returns: - [`BatchFeature`]: A [`BatchFeature`] with the following fields: - - - **input_ids** -- List of token ids to be fed to a model. Returned when `text` is not `None`. - - **attention_mask** -- List of indices specifying which tokens should be attended to by the model (when - `return_attention_mask=True` or if *"attention_mask"* is in `self.model_input_names` and if `text` is not - `None`). - - **pixel_values** -- Pixel values to be fed to a model. Returned when `images` is not `None`. - - **pixel_values_videos** -- Pixel values of videos to be fed to a model. Returned when `videos` is not `None`. - - **image_grid_thw** -- List of image 3D grid in LLM. Returned when `images` is not `None`. - - **video_grid_thw** -- List of video 3D grid in LLM. Returned when `videos` is not `None`. - - **second_per_grid_ts** -- List of video seconds per time grid. Returned when `videos` is not `None`. - """ - output_kwargs = self._merge_kwargs( - Qwen2_5_VLProcessorKwargs, - tokenizer_init_kwargs=self.tokenizer.init_kwargs, - **kwargs, - ) - if images is not None: - image_inputs = self.image_processor(images=images, videos=None, **output_kwargs["images_kwargs"]) - image_grid_thw = image_inputs["image_grid_thw"] - else: - image_inputs = {} - image_grid_thw = None - - if videos is not None: - videos_inputs = self.image_processor(images=None, videos=videos, **output_kwargs["images_kwargs"]) - video_grid_thw = videos_inputs["video_grid_thw"] - - fps = output_kwargs["videos_kwargs"].pop("fps", 2.0) - if isinstance(fps, (int, float)): - second_per_grid_ts = [self.image_processor.temporal_patch_size / fps] * len(video_grid_thw) - elif hasattr(fps, "__len__") and len(fps) == len(video_grid_thw): - second_per_grid_ts = [self.image_processor.temporal_patch_size / tmp for tmp in fps] - else: - raise ValueError( - f"The length of fps ({len(fps) if hasattr(fps, '__len__') else fps}) must be equal to the length of video_grid_thw ({len(video_grid_thw)}) or fps should be a single number." - ) - videos_inputs.update({"second_per_grid_ts": second_per_grid_ts}) - - else: - videos_inputs = {} - video_grid_thw = None - - if not isinstance(text, list): - text = [text] - - if image_grid_thw is not None: - merge_length = self.image_processor.merge_size**2 - index = 0 - for i in range(len(text)): - while self.image_token in text[i]: - text[i] = text[i].replace( - self.image_token, - "<|placeholder|>" * (image_grid_thw[index].prod() // merge_length), - 1, - ) - index += 1 - text[i] = text[i].replace("<|placeholder|>", self.image_token) - - if video_grid_thw is not None: - merge_length = self.image_processor.merge_size**2 - index = 0 - for i in range(len(text)): - while self.video_token in text[i]: - text[i] = text[i].replace( - self.video_token, - "<|placeholder|>" * (video_grid_thw[index].prod() // merge_length), - 1, - ) - index += 1 - text[i] = text[i].replace("<|placeholder|>", self.video_token) - - text_inputs = self.tokenizer(text, **output_kwargs["text_kwargs"]) - - return BatchFeature(data={**text_inputs, **image_inputs, **videos_inputs}) - - def batch_decode(self, *args, **kwargs): - """ - This method forwards all its arguments to Qwen2TokenizerFast's [`~PreTrainedTokenizer.batch_decode`]. Please - refer to the docstring of this method for more information. - """ - return self.tokenizer.batch_decode(*args, **kwargs) - - def batch_decode_all2all(self, *args, **kwargs): - """ - This method forwards all its arguments to Qwen2TokenizerFast's [`~PreTrainedTokenizer.batch_decode`]. Please - refer to the docstring of this method for more information. - """ - decoded = self.tokenizer.batch_decode(*args, **kwargs) - pattern = r'<\|vision_start\|>.*?<\|vision_end\|>' - decoded_with_image_tag = [re.sub(pattern, '', d, flags=re.DOTALL) for d in decoded] - decoded_with_image_tag = [re.sub(r'<\|im_end\|>', '', d) for d in decoded_with_image_tag] - return decoded_with_image_tag - - def decode(self, *args, **kwargs): - """ - This method forwards all its arguments to Qwen2TokenizerFast's [`~PreTrainedTokenizer.decode`]. Please refer to - the docstring of this method for more information. - """ - return self.tokenizer.decode(*args, **kwargs) - - def post_process_image_text_to_text( - self, generated_outputs, skip_special_tokens=True, clean_up_tokenization_spaces=False, **kwargs - ): - """ - Post-process the output of the model to decode the text. - - Args: - generated_outputs (`torch.Tensor` or `np.ndarray`): - The output of the model `generate` function. The output is expected to be a tensor of shape `(batch_size, sequence_length)` - or `(sequence_length,)`. - skip_special_tokens (`bool`, *optional*, defaults to `True`): - Whether or not to remove special tokens in the output. Argument passed to the tokenizer's `batch_decode` method. - Clean_up_tokenization_spaces (`bool`, *optional*, defaults to `False`): - Whether or not to clean up the tokenization spaces. Argument passed to the tokenizer's `batch_decode` method. - **kwargs: - Additional arguments to be passed to the tokenizer's `batch_decode method`. - - Returns: - `List[str]`: The decoded text. - """ - return self.tokenizer.batch_decode( - generated_outputs, - skip_special_tokens=skip_special_tokens, - clean_up_tokenization_spaces=clean_up_tokenization_spaces, - **kwargs, - ) - - @property - def model_input_names(self): - tokenizer_input_names = self.tokenizer.model_input_names - image_processor_input_names = self.image_processor.model_input_names - names_from_processor = list(dict.fromkeys(tokenizer_input_names + image_processor_input_names)) - return names_from_processor + ["second_per_grid_ts"] - - -__all__ = ["Qwen2_5_VLProcessor"] diff --git a/diffsynth/models/nexus_gen_projector.py b/diffsynth/models/nexus_gen_projector.py deleted file mode 100644 index d69b3e1..0000000 --- a/diffsynth/models/nexus_gen_projector.py +++ /dev/null @@ -1,417 +0,0 @@ -import math -import torch -import torch.nn as nn -from typing import Optional, Tuple - - - -def rotate_half(x): - """Rotates half the hidden dims of the input.""" - x1 = x[..., : x.shape[-1] // 2] - x2 = x[..., x.shape[-1] // 2 :] - return torch.cat((-x2, x1), dim=-1) - - -def apply_multimodal_rotary_pos_emb(q, k, cos, sin, mrope_section, unsqueeze_dim=1): - mrope_section = mrope_section * 2 - cos = torch.cat([m[i % 3] for i, m in enumerate(cos.split(mrope_section, dim=-1))], dim=-1).unsqueeze( - unsqueeze_dim - ) - sin = torch.cat([m[i % 3] for i, m in enumerate(sin.split(mrope_section, dim=-1))], dim=-1).unsqueeze( - unsqueeze_dim - ) - - q_embed = (q * cos) + (rotate_half(q) * sin) - k_embed = (k * cos) + (rotate_half(k) * sin) - return q_embed, k_embed - - -class Qwen2_5_VLRotaryEmbedding(nn.Module): - def __init__(self, config, device=None): - super().__init__() - # BC: "rope_type" was originally "type" - if hasattr(config, "rope_scaling") and config.rope_scaling is not None: - self.rope_type = config.rope_scaling.get("rope_type", config.rope_scaling.get("type")) - else: - self.rope_type = "default" - self.max_seq_len_cached = config.max_position_embeddings - self.original_max_seq_len = config.max_position_embeddings - - self.config = config - from transformers.modeling_rope_utils import _compute_default_rope_parameters - self.rope_init_fn = _compute_default_rope_parameters - - inv_freq, self.attention_scaling = self.rope_init_fn(self.config, device) - self.register_buffer("inv_freq", inv_freq, persistent=False) - self.original_inv_freq = self.inv_freq - - - def _dynamic_frequency_update(self, position_ids, device): - """ - dynamic RoPE layers should recompute `inv_freq` in the following situations: - 1 - growing beyond the cached sequence length (allow scaling) - 2 - the current sequence length is in the original scale (avoid losing precision with small sequences) - """ - seq_len = torch.max(position_ids) + 1 - if seq_len > self.max_seq_len_cached: # growth - inv_freq, self.attention_scaling = self.rope_init_fn( - self.config, device, seq_len=seq_len, **self.rope_kwargs - ) - self.register_buffer("inv_freq", inv_freq, persistent=False) # TODO joao: may break with compilation - self.max_seq_len_cached = seq_len - - if seq_len < self.original_max_seq_len and self.max_seq_len_cached > self.original_max_seq_len: # reset - self.register_buffer("inv_freq", self.original_inv_freq, persistent=False) - self.max_seq_len_cached = self.original_max_seq_len - - - @torch.no_grad() - def forward(self, x, position_ids): - if "dynamic" in self.rope_type: - self._dynamic_frequency_update(position_ids, device=x.device) - - # Core RoPE block. In contrast to other models, Qwen2_5_VL has different position ids for the grids - # So we expand the inv_freq to shape (3, ...) - inv_freq_expanded = self.inv_freq[None, None, :, None].float().expand(3, position_ids.shape[1], -1, 1) - position_ids_expanded = position_ids[:, :, None, :].float() # shape (3, bs, 1, positions) - # Force float32 (see https://github.com/huggingface/transformers/pull/29285) - device_type = x.device.type - device_type = device_type if isinstance(device_type, str) and device_type != "mps" else "cpu" - with torch.autocast(device_type=device_type, enabled=False): - freqs = (inv_freq_expanded.float() @ position_ids_expanded.float()).transpose(2, 3) - emb = torch.cat((freqs, freqs), dim=-1) - cos = emb.cos() - sin = emb.sin() - - # Advanced RoPE types (e.g. yarn) apply a post-processing scaling factor, equivalent to scaling attention - cos = cos * self.attention_scaling - sin = sin * self.attention_scaling - - return cos.to(dtype=x.dtype), sin.to(dtype=x.dtype) - - -def repeat_kv(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor: - """ - This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch, - num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim) - """ - batch, num_key_value_heads, slen, head_dim = hidden_states.shape - if n_rep == 1: - return hidden_states - hidden_states = hidden_states[:, :, None, :, :].expand(batch, num_key_value_heads, n_rep, slen, head_dim) - return hidden_states.reshape(batch, num_key_value_heads * n_rep, slen, head_dim) - - -class Qwen2_5_VLAttention(nn.Module): - def __init__(self, config, layer_idx: Optional[int] = None): - super().__init__() - self.config = config - self.layer_idx = layer_idx - - self.hidden_size = config.hidden_size - self.num_heads = config.num_attention_heads - self.head_dim = self.hidden_size // self.num_heads - self.num_key_value_heads = config.num_key_value_heads - self.num_key_value_groups = self.num_heads // self.num_key_value_heads - self.is_causal = True - self.attention_dropout = config.attention_dropout - self.rope_scaling = config.rope_scaling - - if (self.head_dim * self.num_heads) != self.hidden_size: - raise ValueError( - f"hidden_size must be divisible by num_heads (got `hidden_size`: {self.hidden_size}" - f" and `num_heads`: {self.num_heads})." - ) - self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) - self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=True) - self.v_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=True) - self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) - - - def forward( - self, - hidden_states: torch.Tensor, - position_embeddings: Optional[Tuple[torch.Tensor, torch.Tensor]] = None, # necessary, but kept here for BC - ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: - bsz, q_len, _ = hidden_states.size() - - query_states = self.q_proj(hidden_states) - key_states = self.k_proj(hidden_states) - value_states = self.v_proj(hidden_states) - - query_states = query_states.view(bsz, q_len, -1, self.head_dim).transpose(1, 2) - key_states = key_states.view(bsz, q_len, -1, self.head_dim).transpose(1, 2) - value_states = value_states.view(bsz, q_len, -1, self.head_dim).transpose(1, 2) - - cos, sin = position_embeddings - query_states, key_states = apply_multimodal_rotary_pos_emb( - query_states, key_states, cos, sin, self.rope_scaling["mrope_section"] - ) - - # repeat k/v heads if n_kv_heads < n_heads - key_states = repeat_kv(key_states, self.num_key_value_groups) - value_states = repeat_kv(value_states, self.num_key_value_groups) - - attn_weights = torch.matmul(query_states, key_states.transpose(2, 3)) / math.sqrt(self.head_dim) - - # Fix precision issues in Qwen2-VL float16 inference - # Replace inf values with zeros in attention weights to prevent NaN propagation - if query_states.dtype == torch.float16: - attn_weights = torch.where(torch.isinf(attn_weights), torch.zeros_like(attn_weights), attn_weights) - - # upcast attention to fp32 - attn_weights = nn.functional.softmax(attn_weights, dim=-1, dtype=torch.float32).to(query_states.dtype) - attn_weights = nn.functional.dropout(attn_weights, p=self.attention_dropout, training=self.training) - attn_output = torch.matmul(attn_weights, value_states) - - if attn_output.size() != (bsz, self.num_heads, q_len, self.head_dim): - raise ValueError( - f"`attn_output` should be of size {(bsz, self.num_heads, q_len, self.head_dim)}, but is" - f" {attn_output.size()}" - ) - - attn_output = attn_output.transpose(1, 2).contiguous() - attn_output = attn_output.reshape(bsz, q_len, -1) - - attn_output = self.o_proj(attn_output) - - return attn_output - - -class Qwen2MLP(nn.Module): - def __init__(self, config): - super().__init__() - from transformers.activations import ACT2FN - self.config = config - self.hidden_size = config.hidden_size - self.intermediate_size = config.intermediate_size - self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) - self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) - self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) - self.act_fn = ACT2FN[config.hidden_act] - - def forward(self, x): - down_proj = self.down_proj(self.act_fn(self.gate_proj(x)) * self.up_proj(x)) - return down_proj - - -class Qwen2RMSNorm(nn.Module): - def __init__(self, hidden_size, eps=1e-6): - """ - Qwen2RMSNorm is equivalent to T5LayerNorm - """ - super().__init__() - self.weight = nn.Parameter(torch.ones(hidden_size)) - self.variance_epsilon = eps - - def forward(self, hidden_states): - input_dtype = hidden_states.dtype - hidden_states = hidden_states.to(torch.float32) - variance = hidden_states.pow(2).mean(-1, keepdim=True) - hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon) - return self.weight * hidden_states.to(input_dtype) - - def extra_repr(self): - return f"{tuple(self.weight.shape)}, eps={self.variance_epsilon}" - - -class Qwen2_5_VLDecoderLayer(nn.Module): - def __init__(self, config, layer_idx): - super().__init__() - self.hidden_size = config.hidden_size - - self.self_attn = Qwen2_5_VLAttention(config, layer_idx) - - self.mlp = Qwen2MLP(config) - self.input_layernorm = Qwen2RMSNorm(config.hidden_size, eps=config.rms_norm_eps) - self.post_attention_layernorm = Qwen2RMSNorm(config.hidden_size, eps=config.rms_norm_eps) - - def forward( - self, - hidden_states: torch.Tensor, - position_embeddings: Optional[Tuple[torch.Tensor, torch.Tensor]] = None, # necessary, but kept here for BC - ) -> Tuple[torch.FloatTensor, Optional[Tuple[torch.FloatTensor, torch.FloatTensor]]]: - - residual = hidden_states - - hidden_states = self.input_layernorm(hidden_states) - - # Self Attention - hidden_states = self.self_attn( - hidden_states=hidden_states, - position_embeddings=position_embeddings, - ) - hidden_states = residual + hidden_states - - # Fully Connected - residual = hidden_states - hidden_states = self.post_attention_layernorm(hidden_states) - hidden_states = self.mlp(hidden_states) - hidden_states = residual + hidden_states - - return hidden_states - - -class NexusGenImageEmbeddingMerger(nn.Module): - def __init__(self, num_layers=1, out_channel=4096, expand_ratio=4, device='cpu'): - super().__init__() - from transformers import Qwen2_5_VLConfig - from transformers.activations import ACT2FN - config = Qwen2_5_VLConfig(**{ - "_name_or_path": "DiffSynth-Studio/Nexus-GenV2", - "architectures": [ - "Qwen2_5_VLForConditionalGeneration" - ], - "attention_dropout": 0.0, - "auto_map": { - "AutoConfig": "configuration_qwen2_5_vl.Qwen2_5_VLConfig", - "AutoModel": "modeling_qwen2_5_vl.Qwen2_5_VLModel", - "AutoModelForCausalLM": "modeling_qwen2_5_vl.Qwen2_5_VLForConditionalGeneration" - }, - "bos_token_id": 151643, - "eos_token_id": 151645, - "hidden_act": "silu", - "hidden_size": 3584, - "image_token_id": 151655, - "initializer_range": 0.02, - "intermediate_size": 18944, - "max_position_embeddings": 128000, - "max_window_layers": 28, - "model_type": "qwen2_5_vl", - "num_attention_heads": 28, - "num_hidden_layers": 28, - "num_key_value_heads": 4, - "pad_token_id": 151643, - "rms_norm_eps": 1e-06, - "rope_scaling": { - "mrope_section": [ - 16, - 24, - 24 - ], - "rope_type": "default", - "type": "default" - }, - "rope_theta": 1000000.0, - "sliding_window": 32768, - "tie_word_embeddings": False, - "torch_dtype": "bfloat16", - "transformers_version": "4.49.0", - "use_cache": False, - "use_sliding_window": False, - "video_token_id": 151656, - "vision_config": { - "hidden_size": 1280, - "in_chans": 3, - "model_type": "qwen2_5_vl", - "spatial_patch_size": 14, - "tokens_per_second": 2, - "torch_dtype": "bfloat16" - }, - "vision_end_token_id": 151653, - "vision_start_token_id": 151652, - "vision_token_id": 151654, - "vocab_size": 152064 - }) - self.config = config - self.num_layers = num_layers - self.layers = nn.ModuleList([Qwen2_5_VLDecoderLayer(config, layer_idx) for layer_idx in range(num_layers)]) - self.projector = nn.Sequential(Qwen2RMSNorm(config.hidden_size, eps=config.rms_norm_eps), - nn.Linear(config.hidden_size, out_channel * expand_ratio), - Qwen2RMSNorm(out_channel * expand_ratio, eps=config.rms_norm_eps), - ACT2FN[config.hidden_act], nn.Linear(out_channel * expand_ratio, out_channel), - Qwen2RMSNorm(out_channel, eps=config.rms_norm_eps)) - self.base_grid = torch.tensor([[1, 72, 72]], device=device) - self.rotary_emb = Qwen2_5_VLRotaryEmbedding(config=config, device=device) - - def get_position_ids(self, image_grid_thw): - """ - Generates position ids for the input embeddings grid. - modified from the qwen2_vl mrope. - """ - batch_size = image_grid_thw.shape[0] - spatial_merge_size = self.config.vision_config.spatial_merge_size - t, h, w = ( - image_grid_thw[0][0], - image_grid_thw[0][1], - image_grid_thw[0][2], - ) - llm_grid_t, llm_grid_h, llm_grid_w = ( - t.item(), - h.item() // spatial_merge_size, - w.item() // spatial_merge_size, - ) - scale_h = self.base_grid[0][1].item() / h.item() - scale_w = self.base_grid[0][2].item() / w.item() - - range_tensor = torch.arange(llm_grid_t).view(-1, 1) - expanded_range = range_tensor.expand(-1, llm_grid_h * llm_grid_w) - time_tensor = expanded_range * self.config.vision_config.tokens_per_second - t_index = time_tensor.long().flatten().to(image_grid_thw.device) - h_index = torch.arange(llm_grid_h).view(1, -1, 1).expand(llm_grid_t, -1, llm_grid_w).flatten().to(image_grid_thw.device) * scale_h - w_index = torch.arange(llm_grid_w).view(1, 1, -1).expand(llm_grid_t, llm_grid_h, -1).flatten().to(image_grid_thw.device) * scale_w - # 3, B, L - position_ids = torch.stack([t_index, h_index, w_index]).unsqueeze(0).repeat(batch_size, 1, 1).permute(1, 0, 2) - return position_ids - - def forward(self, embeds, embeds_grid, ref_embeds=None, ref_embeds_grid=None): - position_ids = self.get_position_ids(embeds_grid) - hidden_states = embeds - if ref_embeds is not None: - position_ids_ref_embeds = self.get_position_ids(ref_embeds_grid) - position_ids = torch.cat((position_ids, position_ids_ref_embeds), dim=-1) - hidden_states = torch.cat((embeds, ref_embeds), dim=1) - - position_embeddings = self.rotary_emb(hidden_states, position_ids) - for layer in self.layers: - hidden_states = layer(hidden_states, position_embeddings) - - hidden_states = self.projector(hidden_states) - return hidden_states - - @staticmethod - def state_dict_converter(): - return NexusGenMergerStateDictConverter() - - -class NexusGenMergerStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - return state_dict - - def from_civitai(self, state_dict): - merger_state_dict = {key.replace("embedding_merger.", ""): value for key, value in state_dict.items() if key.startswith('embedding_merger.')} - return merger_state_dict - - -class NexusGenAdapter(nn.Module): - """ - Adapter for Nexus-Gen generation decoder. - """ - def __init__(self, input_dim=3584, output_dim=4096): - super(NexusGenAdapter, self).__init__() - self.adapter = nn.Sequential(nn.Linear(input_dim, output_dim), - nn.LayerNorm(output_dim), nn.ReLU(), - nn.Linear(output_dim, output_dim), - nn.LayerNorm(output_dim)) - - def forward(self, x): - return self.adapter(x) - - @staticmethod - def state_dict_converter(): - return NexusGenAdapterStateDictConverter() - - -class NexusGenAdapterStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - return state_dict - - def from_civitai(self, state_dict): - adapter_state_dict = {key: value for key, value in state_dict.items() if key.startswith('adapter.')} - return adapter_state_dict diff --git a/diffsynth/models/omnigen.py b/diffsynth/models/omnigen.py deleted file mode 100644 index 571d6c0..0000000 --- a/diffsynth/models/omnigen.py +++ /dev/null @@ -1,803 +0,0 @@ -# The code is revised from DiT -import os -import torch -import torch.nn as nn -import numpy as np -import math -from safetensors.torch import load_file -from typing import List, Optional, Tuple, Union -import torch.utils.checkpoint -from huggingface_hub import snapshot_download -from transformers.modeling_outputs import BaseModelOutputWithPast -from transformers import Phi3Config, Phi3Model -from transformers.cache_utils import Cache, DynamicCache -from transformers.utils import logging - - -logger = logging.get_logger(__name__) - - -class Phi3Transformer(Phi3Model): - """ - Transformer decoder consisting of *config.num_hidden_layers* layers. Each layer is a [`Phi3DecoderLayer`] - We only modified the attention mask - Args: - config: Phi3Config - """ - def prefetch_layer(self, layer_idx: int, device: torch.device): - "Starts prefetching the next layer cache" - with torch.cuda.stream(self.prefetch_stream): - # Prefetch next layer tensors to GPU - for name, param in self.layers[layer_idx].named_parameters(): - param.data = param.data.to(device, non_blocking=True) - - def evict_previous_layer(self, layer_idx: int): - "Moves the previous layer cache to the CPU" - prev_layer_idx = layer_idx - 1 - for name, param in self.layers[prev_layer_idx].named_parameters(): - param.data = param.data.to("cpu", non_blocking=True) - - def get_offlaod_layer(self, layer_idx: int, device: torch.device): - # init stream - if not hasattr(self, "prefetch_stream"): - self.prefetch_stream = torch.cuda.Stream() - - # delete previous layer - torch.cuda.current_stream().synchronize() - self.evict_previous_layer(layer_idx) - - # make sure the current layer is ready - torch.cuda.synchronize(self.prefetch_stream) - - # load next layer - self.prefetch_layer((layer_idx + 1) % len(self.layers), device) - - - def forward( - self, - input_ids: torch.LongTensor = None, - attention_mask: Optional[torch.Tensor] = None, - position_ids: Optional[torch.LongTensor] = None, - past_key_values: Optional[List[torch.FloatTensor]] = None, - inputs_embeds: Optional[torch.FloatTensor] = None, - use_cache: Optional[bool] = None, - output_attentions: Optional[bool] = None, - output_hidden_states: Optional[bool] = None, - return_dict: Optional[bool] = None, - cache_position: Optional[torch.LongTensor] = None, - offload_model: Optional[bool] = False, - ) -> Union[Tuple, BaseModelOutputWithPast]: - output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions - output_hidden_states = ( - output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states - ) - use_cache = use_cache if use_cache is not None else self.config.use_cache - - return_dict = return_dict if return_dict is not None else self.config.use_return_dict - - if (input_ids is None) ^ (inputs_embeds is not None): - raise ValueError("You must specify exactly one of input_ids or inputs_embeds") - - if self.gradient_checkpointing and self.training: - if use_cache: - logger.warning_once( - "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." - ) - use_cache = False - - # kept for BC (non `Cache` `past_key_values` inputs) - return_legacy_cache = False - if use_cache and not isinstance(past_key_values, Cache): - return_legacy_cache = True - if past_key_values is None: - past_key_values = DynamicCache() - else: - past_key_values = DynamicCache.from_legacy_cache(past_key_values) - logger.warning_once( - "We detected that you are passing `past_key_values` as a tuple of tuples. This is deprecated and " - "will be removed in v4.47. Please convert your cache or use an appropriate `Cache` class " - "(https://huggingface.co/docs/transformers/kv_cache#legacy-cache-format)" - ) - - # if inputs_embeds is None: - # inputs_embeds = self.embed_tokens(input_ids) - - # if cache_position is None: - # past_seen_tokens = past_key_values.get_seq_length() if past_key_values is not None else 0 - # cache_position = torch.arange( - # past_seen_tokens, past_seen_tokens + inputs_embeds.shape[1], device=inputs_embeds.device - # ) - # if position_ids is None: - # position_ids = cache_position.unsqueeze(0) - - if attention_mask is not None and attention_mask.dim() == 3: - dtype = inputs_embeds.dtype - min_dtype = torch.finfo(dtype).min - attention_mask = (1 - attention_mask) * min_dtype - attention_mask = attention_mask.unsqueeze(1).to(inputs_embeds.dtype) - else: - raise Exception("attention_mask parameter was unavailable or invalid") - # causal_mask = self._update_causal_mask( - # attention_mask, inputs_embeds, cache_position, past_key_values, output_attentions - # ) - - hidden_states = inputs_embeds - - # decoder layers - all_hidden_states = () if output_hidden_states else None - all_self_attns = () if output_attentions else None - next_decoder_cache = None - - layer_idx = -1 - for decoder_layer in self.layers: - layer_idx += 1 - - if output_hidden_states: - all_hidden_states += (hidden_states,) - - if self.gradient_checkpointing and self.training: - layer_outputs = self._gradient_checkpointing_func( - decoder_layer.__call__, - hidden_states, - attention_mask, - position_ids, - past_key_values, - output_attentions, - use_cache, - cache_position, - ) - else: - if offload_model and not self.training: - self.get_offlaod_layer(layer_idx, device=inputs_embeds.device) - layer_outputs = decoder_layer( - hidden_states, - attention_mask=attention_mask, - position_ids=position_ids, - past_key_value=past_key_values, - output_attentions=output_attentions, - use_cache=use_cache, - cache_position=cache_position, - ) - - hidden_states = layer_outputs[0] - - if use_cache: - next_decoder_cache = layer_outputs[2 if output_attentions else 1] - - if output_attentions: - all_self_attns += (layer_outputs[1],) - - hidden_states = self.norm(hidden_states) - - # add hidden states from the last decoder layer - if output_hidden_states: - print('************') - all_hidden_states += (hidden_states,) - - next_cache = next_decoder_cache if use_cache else None - if return_legacy_cache: - next_cache = next_cache.to_legacy_cache() - - if not return_dict: - return tuple(v for v in [hidden_states, next_cache, all_hidden_states, all_self_attns] if v is not None) - return BaseModelOutputWithPast( - last_hidden_state=hidden_states, - past_key_values=next_cache, - hidden_states=all_hidden_states, - attentions=all_self_attns, - ) - - -def modulate(x, shift, scale): - return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) - - -class TimestepEmbedder(nn.Module): - """ - Embeds scalar timesteps into vector representations. - """ - def __init__(self, hidden_size, frequency_embedding_size=256): - super().__init__() - self.mlp = nn.Sequential( - nn.Linear(frequency_embedding_size, hidden_size, bias=True), - nn.SiLU(), - nn.Linear(hidden_size, hidden_size, bias=True), - ) - self.frequency_embedding_size = frequency_embedding_size - - @staticmethod - def timestep_embedding(t, dim, max_period=10000): - """ - Create sinusoidal timestep embeddings. - :param t: a 1-D Tensor of N indices, one per batch element. - These may be fractional. - :param dim: the dimension of the output. - :param max_period: controls the minimum frequency of the embeddings. - :return: an (N, D) Tensor of positional embeddings. - """ - # https://github.com/openai/glide-text2im/blob/main/glide_text2im/nn.py - half = dim // 2 - freqs = torch.exp( - -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32) / half - ).to(device=t.device) - args = t[:, None].float() * freqs[None] - embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) - if dim % 2: - embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) - return embedding - - def forward(self, t, dtype=torch.float32): - t_freq = self.timestep_embedding(t, self.frequency_embedding_size).to(dtype) - t_emb = self.mlp(t_freq) - return t_emb - - -class FinalLayer(nn.Module): - """ - The final layer of DiT. - """ - def __init__(self, hidden_size, patch_size, out_channels): - super().__init__() - self.norm_final = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) - self.linear = nn.Linear(hidden_size, patch_size * patch_size * out_channels, bias=True) - self.adaLN_modulation = nn.Sequential( - nn.SiLU(), - nn.Linear(hidden_size, 2 * hidden_size, bias=True) - ) - - def forward(self, x, c): - shift, scale = self.adaLN_modulation(c).chunk(2, dim=1) - x = modulate(self.norm_final(x), shift, scale) - x = self.linear(x) - return x - - -def get_2d_sincos_pos_embed(embed_dim, grid_size, cls_token=False, extra_tokens=0, interpolation_scale=1.0, base_size=1): - """ - grid_size: int of the grid height and width return: pos_embed: [grid_size*grid_size, embed_dim] or - [1+grid_size*grid_size, embed_dim] (w/ or w/o cls_token) - """ - if isinstance(grid_size, int): - grid_size = (grid_size, grid_size) - - grid_h = np.arange(grid_size[0], dtype=np.float32) / (grid_size[0] / base_size) / interpolation_scale - grid_w = np.arange(grid_size[1], dtype=np.float32) / (grid_size[1] / base_size) / interpolation_scale - grid = np.meshgrid(grid_w, grid_h) # here w goes first - grid = np.stack(grid, axis=0) - - grid = grid.reshape([2, 1, grid_size[1], grid_size[0]]) - pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid) - if cls_token and extra_tokens > 0: - pos_embed = np.concatenate([np.zeros([extra_tokens, embed_dim]), pos_embed], axis=0) - return pos_embed - - -def get_2d_sincos_pos_embed_from_grid(embed_dim, grid): - assert embed_dim % 2 == 0 - - # use half of dimensions to encode grid_h - emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[0]) # (H*W, D/2) - emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[1]) # (H*W, D/2) - - emb = np.concatenate([emb_h, emb_w], axis=1) # (H*W, D) - return emb - - -def get_1d_sincos_pos_embed_from_grid(embed_dim, pos): - """ - embed_dim: output dimension for each position - pos: a list of positions to be encoded: size (M,) - out: (M, D) - """ - assert embed_dim % 2 == 0 - omega = np.arange(embed_dim // 2, dtype=np.float64) - omega /= embed_dim / 2. - omega = 1. / 10000**omega # (D/2,) - - pos = pos.reshape(-1) # (M,) - out = np.einsum('m,d->md', pos, omega) # (M, D/2), outer product - - emb_sin = np.sin(out) # (M, D/2) - emb_cos = np.cos(out) # (M, D/2) - - emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D) - return emb - - -class PatchEmbedMR(nn.Module): - """ 2D Image to Patch Embedding - """ - def __init__( - self, - patch_size: int = 2, - in_chans: int = 4, - embed_dim: int = 768, - bias: bool = True, - ): - super().__init__() - self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size, bias=bias) - - def forward(self, x): - x = self.proj(x) - x = x.flatten(2).transpose(1, 2) # NCHW -> NLC - return x - - -class OmniGenOriginalModel(nn.Module): - """ - Diffusion model with a Transformer backbone. - """ - def __init__( - self, - transformer_config: Phi3Config, - patch_size=2, - in_channels=4, - pe_interpolation: float = 1.0, - pos_embed_max_size: int = 192, - ): - super().__init__() - self.in_channels = in_channels - self.out_channels = in_channels - self.patch_size = patch_size - self.pos_embed_max_size = pos_embed_max_size - - hidden_size = transformer_config.hidden_size - - self.x_embedder = PatchEmbedMR(patch_size, in_channels, hidden_size, bias=True) - self.input_x_embedder = PatchEmbedMR(patch_size, in_channels, hidden_size, bias=True) - - self.time_token = TimestepEmbedder(hidden_size) - self.t_embedder = TimestepEmbedder(hidden_size) - - self.pe_interpolation = pe_interpolation - pos_embed = get_2d_sincos_pos_embed(hidden_size, pos_embed_max_size, interpolation_scale=self.pe_interpolation, base_size=64) - self.register_buffer("pos_embed", torch.from_numpy(pos_embed).float().unsqueeze(0), persistent=True) - - self.final_layer = FinalLayer(hidden_size, patch_size, self.out_channels) - - self.initialize_weights() - - self.llm = Phi3Transformer(config=transformer_config) - self.llm.config.use_cache = False - - @classmethod - def from_pretrained(cls, model_name): - if not os.path.exists(model_name): - cache_folder = os.getenv('HF_HUB_CACHE') - model_name = snapshot_download(repo_id=model_name, - cache_dir=cache_folder, - ignore_patterns=['flax_model.msgpack', 'rust_model.ot', 'tf_model.h5']) - config = Phi3Config.from_pretrained(model_name) - model = cls(config) - if os.path.exists(os.path.join(model_name, 'model.safetensors')): - print("Loading safetensors") - ckpt = load_file(os.path.join(model_name, 'model.safetensors')) - else: - ckpt = torch.load(os.path.join(model_name, 'model.pt'), map_location='cpu') - model.load_state_dict(ckpt) - return model - - def initialize_weights(self): - assert not hasattr(self, "llama") - - # Initialize transformer layers: - def _basic_init(module): - if isinstance(module, nn.Linear): - torch.nn.init.xavier_uniform_(module.weight) - if module.bias is not None: - nn.init.constant_(module.bias, 0) - self.apply(_basic_init) - - # Initialize patch_embed like nn.Linear (instead of nn.Conv2d): - w = self.x_embedder.proj.weight.data - nn.init.xavier_uniform_(w.view([w.shape[0], -1])) - nn.init.constant_(self.x_embedder.proj.bias, 0) - - w = self.input_x_embedder.proj.weight.data - nn.init.xavier_uniform_(w.view([w.shape[0], -1])) - nn.init.constant_(self.x_embedder.proj.bias, 0) - - - # Initialize timestep embedding MLP: - nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02) - nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02) - nn.init.normal_(self.time_token.mlp[0].weight, std=0.02) - nn.init.normal_(self.time_token.mlp[2].weight, std=0.02) - - # Zero-out output layers: - nn.init.constant_(self.final_layer.adaLN_modulation[-1].weight, 0) - nn.init.constant_(self.final_layer.adaLN_modulation[-1].bias, 0) - nn.init.constant_(self.final_layer.linear.weight, 0) - nn.init.constant_(self.final_layer.linear.bias, 0) - - def unpatchify(self, x, h, w): - """ - x: (N, T, patch_size**2 * C) - imgs: (N, H, W, C) - """ - c = self.out_channels - - x = x.reshape(shape=(x.shape[0], h//self.patch_size, w//self.patch_size, self.patch_size, self.patch_size, c)) - x = torch.einsum('nhwpqc->nchpwq', x) - imgs = x.reshape(shape=(x.shape[0], c, h, w)) - return imgs - - - def cropped_pos_embed(self, height, width): - """Crops positional embeddings for SD3 compatibility.""" - if self.pos_embed_max_size is None: - raise ValueError("`pos_embed_max_size` must be set for cropping.") - - height = height // self.patch_size - width = width // self.patch_size - if height > self.pos_embed_max_size: - raise ValueError( - f"Height ({height}) cannot be greater than `pos_embed_max_size`: {self.pos_embed_max_size}." - ) - if width > self.pos_embed_max_size: - raise ValueError( - f"Width ({width}) cannot be greater than `pos_embed_max_size`: {self.pos_embed_max_size}." - ) - - top = (self.pos_embed_max_size - height) // 2 - left = (self.pos_embed_max_size - width) // 2 - spatial_pos_embed = self.pos_embed.reshape(1, self.pos_embed_max_size, self.pos_embed_max_size, -1) - spatial_pos_embed = spatial_pos_embed[:, top : top + height, left : left + width, :] - # print(top, top + height, left, left + width, spatial_pos_embed.size()) - spatial_pos_embed = spatial_pos_embed.reshape(1, -1, spatial_pos_embed.shape[-1]) - return spatial_pos_embed - - - def patch_multiple_resolutions(self, latents, padding_latent=None, is_input_images:bool=False): - if isinstance(latents, list): - return_list = False - if padding_latent is None: - padding_latent = [None] * len(latents) - return_list = True - patched_latents, num_tokens, shapes = [], [], [] - for latent, padding in zip(latents, padding_latent): - height, width = latent.shape[-2:] - if is_input_images: - latent = self.input_x_embedder(latent) - else: - latent = self.x_embedder(latent) - pos_embed = self.cropped_pos_embed(height, width) - latent = latent + pos_embed - if padding is not None: - latent = torch.cat([latent, padding], dim=-2) - patched_latents.append(latent) - - num_tokens.append(pos_embed.size(1)) - shapes.append([height, width]) - if not return_list: - latents = torch.cat(patched_latents, dim=0) - else: - latents = patched_latents - else: - height, width = latents.shape[-2:] - if is_input_images: - latents = self.input_x_embedder(latents) - else: - latents = self.x_embedder(latents) - pos_embed = self.cropped_pos_embed(height, width) - latents = latents + pos_embed - num_tokens = latents.size(1) - shapes = [height, width] - return latents, num_tokens, shapes - - - def forward(self, x, timestep, input_ids, input_img_latents, input_image_sizes, attention_mask, position_ids, padding_latent=None, past_key_values=None, return_past_key_values=True, offload_model:bool=False): - """ - - """ - input_is_list = isinstance(x, list) - x, num_tokens, shapes = self.patch_multiple_resolutions(x, padding_latent) - time_token = self.time_token(timestep, dtype=x[0].dtype).unsqueeze(1) - - if input_img_latents is not None: - input_latents, _, _ = self.patch_multiple_resolutions(input_img_latents, is_input_images=True) - if input_ids is not None: - condition_embeds = self.llm.embed_tokens(input_ids).clone() - input_img_inx = 0 - for b_inx in input_image_sizes.keys(): - for start_inx, end_inx in input_image_sizes[b_inx]: - condition_embeds[b_inx, start_inx: end_inx] = input_latents[input_img_inx] - input_img_inx += 1 - if input_img_latents is not None: - assert input_img_inx == len(input_latents) - - input_emb = torch.cat([condition_embeds, time_token, x], dim=1) - else: - input_emb = torch.cat([time_token, x], dim=1) - output = self.llm(inputs_embeds=input_emb, attention_mask=attention_mask, position_ids=position_ids, past_key_values=past_key_values, offload_model=offload_model) - output, past_key_values = output.last_hidden_state, output.past_key_values - if input_is_list: - image_embedding = output[:, -max(num_tokens):] - time_emb = self.t_embedder(timestep, dtype=x.dtype) - x = self.final_layer(image_embedding, time_emb) - latents = [] - for i in range(x.size(0)): - latent = x[i:i+1, :num_tokens[i]] - latent = self.unpatchify(latent, shapes[i][0], shapes[i][1]) - latents.append(latent) - else: - image_embedding = output[:, -num_tokens:] - time_emb = self.t_embedder(timestep, dtype=x.dtype) - x = self.final_layer(image_embedding, time_emb) - latents = self.unpatchify(x, shapes[0], shapes[1]) - - if return_past_key_values: - return latents, past_key_values - return latents - - @torch.no_grad() - def forward_with_cfg(self, x, timestep, input_ids, input_img_latents, input_image_sizes, attention_mask, position_ids, cfg_scale, use_img_cfg, img_cfg_scale, past_key_values, use_kv_cache, offload_model): - self.llm.config.use_cache = use_kv_cache - model_out, past_key_values = self.forward(x, timestep, input_ids, input_img_latents, input_image_sizes, attention_mask, position_ids, past_key_values=past_key_values, return_past_key_values=True, offload_model=offload_model) - if use_img_cfg: - cond, uncond, img_cond = torch.split(model_out, len(model_out) // 3, dim=0) - cond = uncond + img_cfg_scale * (img_cond - uncond) + cfg_scale * (cond - img_cond) - model_out = [cond, cond, cond] - else: - cond, uncond = torch.split(model_out, len(model_out) // 2, dim=0) - cond = uncond + cfg_scale * (cond - uncond) - model_out = [cond, cond] - - return torch.cat(model_out, dim=0), past_key_values - - - @torch.no_grad() - def forward_with_separate_cfg(self, x, timestep, input_ids, input_img_latents, input_image_sizes, attention_mask, position_ids, cfg_scale, use_img_cfg, img_cfg_scale, past_key_values, use_kv_cache, offload_model): - self.llm.config.use_cache = use_kv_cache - if past_key_values is None: - past_key_values = [None] * len(attention_mask) - - x = torch.split(x, len(x) // len(attention_mask), dim=0) - timestep = timestep.to(x[0].dtype) - timestep = torch.split(timestep, len(timestep) // len(input_ids), dim=0) - - model_out, pask_key_values = [], [] - for i in range(len(input_ids)): - temp_out, temp_pask_key_values = self.forward(x[i], timestep[i], input_ids[i], input_img_latents[i], input_image_sizes[i], attention_mask[i], position_ids[i], past_key_values=past_key_values[i], return_past_key_values=True, offload_model=offload_model) - model_out.append(temp_out) - pask_key_values.append(temp_pask_key_values) - - if len(model_out) == 3: - cond, uncond, img_cond = model_out - cond = uncond + img_cfg_scale * (img_cond - uncond) + cfg_scale * (cond - img_cond) - model_out = [cond, cond, cond] - elif len(model_out) == 2: - cond, uncond = model_out - cond = uncond + cfg_scale * (cond - uncond) - model_out = [cond, cond] - else: - return model_out[0] - - return torch.cat(model_out, dim=0), pask_key_values - - - -class OmniGenTransformer(OmniGenOriginalModel): - def __init__(self): - config = { - "_name_or_path": "Phi-3-vision-128k-instruct", - "architectures": [ - "Phi3ForCausalLM" - ], - "attention_dropout": 0.0, - "bos_token_id": 1, - "eos_token_id": 2, - "hidden_act": "silu", - "hidden_size": 3072, - "initializer_range": 0.02, - "intermediate_size": 8192, - "max_position_embeddings": 131072, - "model_type": "phi3", - "num_attention_heads": 32, - "num_hidden_layers": 32, - "num_key_value_heads": 32, - "original_max_position_embeddings": 4096, - "rms_norm_eps": 1e-05, - "rope_scaling": { - "long_factor": [ - 1.0299999713897705, - 1.0499999523162842, - 1.0499999523162842, - 1.0799999237060547, - 1.2299998998641968, - 1.2299998998641968, - 1.2999999523162842, - 1.4499999284744263, - 1.5999999046325684, - 1.6499998569488525, - 1.8999998569488525, - 2.859999895095825, - 3.68999981880188, - 5.419999599456787, - 5.489999771118164, - 5.489999771118164, - 9.09000015258789, - 11.579999923706055, - 15.65999984741211, - 15.769999504089355, - 15.789999961853027, - 18.360000610351562, - 21.989999771118164, - 23.079999923706055, - 30.009998321533203, - 32.35000228881836, - 32.590003967285156, - 35.56000518798828, - 39.95000457763672, - 53.840003967285156, - 56.20000457763672, - 57.95000457763672, - 59.29000473022461, - 59.77000427246094, - 59.920005798339844, - 61.190006256103516, - 61.96000671386719, - 62.50000762939453, - 63.3700065612793, - 63.48000717163086, - 63.48000717163086, - 63.66000747680664, - 63.850006103515625, - 64.08000946044922, - 64.760009765625, - 64.80001068115234, - 64.81001281738281, - 64.81001281738281 - ], - "short_factor": [ - 1.05, - 1.05, - 1.05, - 1.1, - 1.1, - 1.1, - 1.2500000000000002, - 1.2500000000000002, - 1.4000000000000004, - 1.4500000000000004, - 1.5500000000000005, - 1.8500000000000008, - 1.9000000000000008, - 2.000000000000001, - 2.000000000000001, - 2.000000000000001, - 2.000000000000001, - 2.000000000000001, - 2.000000000000001, - 2.000000000000001, - 2.000000000000001, - 2.000000000000001, - 2.000000000000001, - 2.000000000000001, - 2.000000000000001, - 2.000000000000001, - 2.000000000000001, - 2.000000000000001, - 2.000000000000001, - 2.000000000000001, - 2.000000000000001, - 2.000000000000001, - 2.1000000000000005, - 2.1000000000000005, - 2.2, - 2.3499999999999996, - 2.3499999999999996, - 2.3499999999999996, - 2.3499999999999996, - 2.3999999999999995, - 2.3999999999999995, - 2.6499999999999986, - 2.6999999999999984, - 2.8999999999999977, - 2.9499999999999975, - 3.049999999999997, - 3.049999999999997, - 3.049999999999997 - ], - "type": "su" - }, - "rope_theta": 10000.0, - "sliding_window": 131072, - "tie_word_embeddings": False, - "torch_dtype": "bfloat16", - "transformers_version": "4.38.1", - "use_cache": True, - "vocab_size": 32064, - "_attn_implementation": "sdpa" - } - config = Phi3Config(**config) - super().__init__(config) - - - def forward(self, x, timestep, input_ids, input_img_latents, input_image_sizes, attention_mask, position_ids, padding_latent=None, past_key_values=None, return_past_key_values=True, offload_model:bool=False): - input_is_list = isinstance(x, list) - x, num_tokens, shapes = self.patch_multiple_resolutions(x, padding_latent) - time_token = self.time_token(timestep, dtype=x[0].dtype).unsqueeze(1) - - if input_img_latents is not None: - input_latents, _, _ = self.patch_multiple_resolutions(input_img_latents, is_input_images=True) - if input_ids is not None: - condition_embeds = self.llm.embed_tokens(input_ids).clone() - input_img_inx = 0 - for b_inx in input_image_sizes.keys(): - for start_inx, end_inx in input_image_sizes[b_inx]: - condition_embeds[b_inx, start_inx: end_inx] = input_latents[input_img_inx] - input_img_inx += 1 - if input_img_latents is not None: - assert input_img_inx == len(input_latents) - - input_emb = torch.cat([condition_embeds, time_token, x], dim=1) - else: - input_emb = torch.cat([time_token, x], dim=1) - output = self.llm(inputs_embeds=input_emb, attention_mask=attention_mask, position_ids=position_ids, past_key_values=past_key_values, offload_model=offload_model) - output, past_key_values = output.last_hidden_state, output.past_key_values - if input_is_list: - image_embedding = output[:, -max(num_tokens):] - time_emb = self.t_embedder(timestep, dtype=x.dtype) - x = self.final_layer(image_embedding, time_emb) - latents = [] - for i in range(x.size(0)): - latent = x[i:i+1, :num_tokens[i]] - latent = self.unpatchify(latent, shapes[i][0], shapes[i][1]) - latents.append(latent) - else: - image_embedding = output[:, -num_tokens:] - time_emb = self.t_embedder(timestep, dtype=x.dtype) - x = self.final_layer(image_embedding, time_emb) - latents = self.unpatchify(x, shapes[0], shapes[1]) - - if return_past_key_values: - return latents, past_key_values - return latents - - - @torch.no_grad() - def forward_with_separate_cfg(self, x, timestep, input_ids, input_img_latents, input_image_sizes, attention_mask, position_ids, cfg_scale, use_img_cfg, img_cfg_scale, past_key_values, use_kv_cache, offload_model): - self.llm.config.use_cache = use_kv_cache - if past_key_values is None: - past_key_values = [None] * len(attention_mask) - - x = torch.split(x, len(x) // len(attention_mask), dim=0) - timestep = timestep.to(x[0].dtype) - timestep = torch.split(timestep, len(timestep) // len(input_ids), dim=0) - - model_out, pask_key_values = [], [] - for i in range(len(input_ids)): - temp_out, temp_pask_key_values = self.forward(x[i], timestep[i], input_ids[i], input_img_latents[i], input_image_sizes[i], attention_mask[i], position_ids[i], past_key_values=past_key_values[i], return_past_key_values=True, offload_model=offload_model) - model_out.append(temp_out) - pask_key_values.append(temp_pask_key_values) - - if len(model_out) == 3: - cond, uncond, img_cond = model_out - cond = uncond + img_cfg_scale * (img_cond - uncond) + cfg_scale * (cond - img_cond) - model_out = [cond, cond, cond] - elif len(model_out) == 2: - cond, uncond = model_out - cond = uncond + cfg_scale * (cond - uncond) - model_out = [cond, cond] - else: - return model_out[0] - - return torch.cat(model_out, dim=0), pask_key_values - - - @staticmethod - def state_dict_converter(): - return OmniGenTransformerStateDictConverter() - - - -class OmniGenTransformerStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - return state_dict - - def from_civitai(self, state_dict): - return state_dict diff --git a/diffsynth/models/qwen_image_controlnet.py b/diffsynth/models/qwen_image_controlnet.py index fc88eaa..6ce4080 100644 --- a/diffsynth/models/qwen_image_controlnet.py +++ b/diffsynth/models/qwen_image_controlnet.py @@ -1,7 +1,6 @@ import torch import torch.nn as nn -from .sd3_dit import RMSNorm -from .utils import hash_state_dict_keys +from .general_modules import RMSNorm class BlockWiseControlBlock(torch.nn.Module): @@ -55,20 +54,3 @@ class QwenImageBlockWiseControlNet(torch.nn.Module): def blockwise_forward(self, img, controlnet_conditioning, block_id): return self.controlnet_blocks[block_id](img, controlnet_conditioning) - - @staticmethod - def state_dict_converter(): - return QwenImageBlockWiseControlNetStateDictConverter() - - -class QwenImageBlockWiseControlNetStateDictConverter(): - def __init__(self): - pass - - def from_civitai(self, state_dict): - hash_value = hash_state_dict_keys(state_dict) - extra_kwargs = {} - if hash_value == "a9e54e480a628f0b956a688a81c33bab": - # inpaint controlnet - extra_kwargs = {"additional_in_dim": 4} - return state_dict, extra_kwargs diff --git a/diffsynth/models/qwen_image_dit.py b/diffsynth/models/qwen_image_dit.py index 13cb5ca..ac54945 100644 --- a/diffsynth/models/qwen_image_dit.py +++ b/diffsynth/models/qwen_image_dit.py @@ -2,8 +2,7 @@ import torch, math import torch.nn as nn from typing import Tuple, Optional, Union, List from einops import rearrange -from .sd3_dit import TimestepEmbeddings, RMSNorm -from .flux_dit import AdaLayerNorm +from .general_modules import TimestepEmbeddings, RMSNorm, AdaLayerNorm try: import flash_attn_interface @@ -532,16 +531,3 @@ class QwenImageDiT(torch.nn.Module): latents = rearrange(image, "B (H W) (C P Q) -> B C (H P) (W Q)", H=height//16, W=width//16, P=2, Q=2) return image - - @staticmethod - def state_dict_converter(): - return QwenImageDiTStateDictConverter() - - - -class QwenImageDiTStateDictConverter(): - def __init__(self): - pass - - def from_civitai(self, state_dict): - return state_dict diff --git a/diffsynth/models/qwen_image_text_encoder.py b/diffsynth/models/qwen_image_text_encoder.py index dc10f95..28555cf 100644 --- a/diffsynth/models/qwen_image_text_encoder.py +++ b/diffsynth/models/qwen_image_text_encoder.py @@ -1,4 +1,3 @@ -from transformers import Qwen2_5_VLModel import torch from typing import Optional, Union @@ -6,7 +5,7 @@ from typing import Optional, Union class QwenImageTextEncoder(torch.nn.Module): def __init__(self): super().__init__() - from transformers import Qwen2_5_VLConfig + from transformers import Qwen2_5_VLConfig, Qwen2_5_VLModel config = Qwen2_5_VLConfig(**{ "architectures": [ "Qwen2_5_VLForConditionalGeneration" @@ -39,7 +38,7 @@ class QwenImageTextEncoder(torch.nn.Module): "sliding_window": 32768, "text_config": { "architectures": [ - "Qwen2_5_VLForConditionalGeneration" + "Qwen2_5_VLForConditionalGeneration" ], "attention_dropout": 0.0, "bos_token_id": 151643, @@ -166,51 +165,6 @@ class QwenImageTextEncoder(torch.nn.Module): logits_to_keep: Union[int, torch.Tensor] = 0, **kwargs, ): - r""" - labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): - Labels for computing the masked language modeling loss. Indices should either be in `[0, ..., - config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored - (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`. - image_grid_thw (`torch.LongTensor` of shape `(num_images, 3)`, *optional*): - The temporal, height and width of feature shape of each image in LLM. - video_grid_thw (`torch.LongTensor` of shape `(num_videos, 3)`, *optional*): - The temporal, height and width of feature shape of each video in LLM. - rope_deltas (`torch.LongTensor` of shape `(batch_size, )`, *optional*): - The rope index difference between sequence length and multimodal rope. - second_per_grid_ts (`torch.Tensor` of shape `(num_videos)`, *optional*): - The time interval (in seconds) for each grid along the temporal dimension in the 3D position IDs. - - Example: - - ```python - >>> from PIL import Image - >>> import requests - >>> from transformers import AutoProcessor, Qwen2_5_VLForConditionalGeneration - - >>> model = Qwen2_5_VLForConditionalGeneration.from_pretrained("Qwen/Qwen2.5-VL-7B-Instruct") - >>> processor = AutoProcessor.from_pretrained("Qwen/Qwen2.5-VL-7B-Instruct") - - >>> messages = [ - { - "role": "user", - "content": [ - {"type": "image"}, - {"type": "text", "text": "What is shown in this image?"}, - ], - }, - ] - >>> url = "https://www.ilankelman.org/stopsigns/australia.jpg" - >>> image = Image.open(requests.get(url, stream=True).raw) - - >>> text = processor.apply_chat_template(messages, tokenize=False, add_generation_prompt=True) - >>> inputs = processor(text=[text], images=[image], vision_infos=[vision_infos]) - - >>> # Generate - >>> generate_ids = model.generate(inputs.input_ids, max_length=30) - >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0] - "The image shows a street scene with a red stop sign in the foreground. In the background, there is a large red gate with Chinese characters ..." - ```""" - output_attentions = False output_hidden_states = True @@ -233,23 +187,3 @@ class QwenImageTextEncoder(torch.nn.Module): **kwargs, ) return outputs.hidden_states - - @staticmethod - def state_dict_converter(): - return QwenImageTextEncoderStateDictConverter() - - - -class QwenImageTextEncoderStateDictConverter(): - def __init__(self): - pass - - def from_diffusers(self, state_dict): - state_dict_ = {} - for k, v in state_dict.items(): - if k.startswith("visual."): - k = "model." + k - elif k.startswith("model."): - k = k.replace("model.", "model.language_model.") - state_dict_[k] = v - return state_dict_ diff --git a/diffsynth/models/qwen_image_vae.py b/diffsynth/models/qwen_image_vae.py index 63de548..cb04713 100644 --- a/diffsynth/models/qwen_image_vae.py +++ b/diffsynth/models/qwen_image_vae.py @@ -721,16 +721,3 @@ class QwenImageVAE(torch.nn.Module): x = self.decoder(x) x = x.squeeze(2) return x - - @staticmethod - def state_dict_converter(): - return QwenImageVAEStateDictConverter() - - - -class QwenImageVAEStateDictConverter(): - def __init__(self): - pass - - def from_diffusers(self, state_dict): - return state_dict diff --git a/diffsynth/models/qwenvl.py b/diffsynth/models/qwenvl.py deleted file mode 100644 index 9677488..0000000 --- a/diffsynth/models/qwenvl.py +++ /dev/null @@ -1,168 +0,0 @@ -import torch - - -class Qwen25VL_7b_Embedder(torch.nn.Module): - def __init__(self, model_path, max_length=640, dtype=torch.bfloat16, device="cuda"): - super(Qwen25VL_7b_Embedder, self).__init__() - self.max_length = max_length - self.dtype = dtype - self.device = device - - from transformers import AutoProcessor, Qwen2_5_VLForConditionalGeneration - - self.model = Qwen2_5_VLForConditionalGeneration.from_pretrained( - model_path, - torch_dtype=dtype, - ).to(torch.cuda.current_device()) - - self.model.requires_grad_(False) - self.processor = AutoProcessor.from_pretrained( - model_path, min_pixels=256 * 28 * 28, max_pixels=324 * 28 * 28 - ) - - Qwen25VL_7b_PREFIX = '''Given a user prompt, generate an "Enhanced prompt" that provides detailed visual descriptions suitable for image generation. Evaluate the level of detail in the user prompt: -- If the prompt is simple, focus on adding specifics about colors, shapes, sizes, textures, and spatial relationships to create vivid and concrete scenes. -- If the prompt is already detailed, refine and enhance the existing details slightly without overcomplicating.\n -Here are examples of how to transform or refine prompts: -- User Prompt: A cat sleeping -> Enhanced: A small, fluffy white cat curled up in a round shape, sleeping peacefully on a warm sunny windowsill, surrounded by pots of blooming red flowers. -- User Prompt: A busy city street -> Enhanced: A bustling city street scene at dusk, featuring glowing street lamps, a diverse crowd of people in colorful clothing, and a double-decker bus passing by towering glass skyscrapers.\n -Please generate only the enhanced description for the prompt below and avoid including any additional commentary or evaluations: -User Prompt:''' - - self.prefix = Qwen25VL_7b_PREFIX - - @staticmethod - def from_pretrained(path, torch_dtype=torch.bfloat16, device="cuda"): - return Qwen25VL_7b_Embedder(path, dtype=torch_dtype, device=device) - - def forward(self, caption, ref_images): - text_list = caption - embs = torch.zeros( - len(text_list), - self.max_length, - self.model.config.hidden_size, - dtype=torch.bfloat16, - device=torch.cuda.current_device(), - ) - hidden_states = torch.zeros( - len(text_list), - self.max_length, - self.model.config.hidden_size, - dtype=torch.bfloat16, - device=torch.cuda.current_device(), - ) - masks = torch.zeros( - len(text_list), - self.max_length, - dtype=torch.long, - device=torch.cuda.current_device(), - ) - input_ids_list = [] - attention_mask_list = [] - emb_list = [] - - def split_string(s): - s = s.replace("“", '"').replace("”", '"').replace("'", '''"''') # use english quotes - result = [] - in_quotes = False - temp = "" - - for idx,char in enumerate(s): - if char == '"' and idx>155: - temp += char - if not in_quotes: - result.append(temp) - temp = "" - - in_quotes = not in_quotes - continue - if in_quotes: - if char.isspace(): - pass # have space token - - result.append("“" + char + "”") - else: - temp += char - - if temp: - result.append(temp) - - return result - - for idx, (txt, imgs) in enumerate(zip(text_list, ref_images)): - - messages = [{"role": "user", "content": []}] - - messages[0]["content"].append({"type": "text", "text": f"{self.prefix}"}) - - messages[0]["content"].append({"type": "image", "image": imgs}) - - # 再添加 text - messages[0]["content"].append({"type": "text", "text": f"{txt}"}) - - # Preparation for inference - text = self.processor.apply_chat_template( - messages, tokenize=False, add_generation_prompt=True, add_vision_id=True - ) - - image_inputs = [imgs] - - inputs = self.processor( - text=[text], - images=image_inputs, - padding=True, - return_tensors="pt", - ) - - old_inputs_ids = inputs.input_ids - text_split_list = split_string(text) - - token_list = [] - for text_each in text_split_list: - txt_inputs = self.processor( - text=text_each, - images=None, - videos=None, - padding=True, - return_tensors="pt", - ) - token_each = txt_inputs.input_ids - if token_each[0][0] == 2073 and token_each[0][-1] == 854: - token_each = token_each[:, 1:-1] - token_list.append(token_each) - else: - token_list.append(token_each) - - new_txt_ids = torch.cat(token_list, dim=1).to("cuda") - - new_txt_ids = new_txt_ids.to(old_inputs_ids.device) - - idx1 = (old_inputs_ids == 151653).nonzero(as_tuple=True)[1][0] - idx2 = (new_txt_ids == 151653).nonzero(as_tuple=True)[1][0] - inputs.input_ids = ( - torch.cat([old_inputs_ids[0, :idx1], new_txt_ids[0, idx2:]], dim=0) - .unsqueeze(0) - .to("cuda") - ) - inputs.attention_mask = (inputs.input_ids > 0).long().to("cuda") - outputs = self.model( - input_ids=inputs.input_ids, - attention_mask=inputs.attention_mask, - pixel_values=inputs.pixel_values.to("cuda"), - image_grid_thw=inputs.image_grid_thw.to("cuda"), - output_hidden_states=True, - ) - - emb = outputs["hidden_states"][-1] - - embs[idx, : min(self.max_length, emb.shape[1] - 217)] = emb[0, 217:][ - : self.max_length - ] - - masks[idx, : min(self.max_length, emb.shape[1] - 217)] = torch.ones( - (min(self.max_length, emb.shape[1] - 217)), - dtype=torch.long, - device=torch.cuda.current_device(), - ) - - return embs, masks \ No newline at end of file diff --git a/diffsynth/models/sd3_dit.py b/diffsynth/models/sd3_dit.py deleted file mode 100644 index eae0afa..0000000 --- a/diffsynth/models/sd3_dit.py +++ /dev/null @@ -1,567 +0,0 @@ -import torch -from einops import rearrange -from .svd_unet import TemporalTimesteps -from .tiler import TileWorker - - - -class RMSNorm(torch.nn.Module): - def __init__(self, dim, eps, elementwise_affine=True): - super().__init__() - self.eps = eps - if elementwise_affine: - self.weight = torch.nn.Parameter(torch.ones((dim,))) - else: - self.weight = None - - def forward(self, hidden_states): - input_dtype = hidden_states.dtype - variance = hidden_states.to(torch.float32).square().mean(-1, keepdim=True) - hidden_states = hidden_states * torch.rsqrt(variance + self.eps) - hidden_states = hidden_states.to(input_dtype) - if self.weight is not None: - hidden_states = hidden_states * self.weight - return hidden_states - - - -class PatchEmbed(torch.nn.Module): - def __init__(self, patch_size=2, in_channels=16, embed_dim=1536, pos_embed_max_size=192): - super().__init__() - self.pos_embed_max_size = pos_embed_max_size - self.patch_size = patch_size - - self.proj = torch.nn.Conv2d(in_channels, embed_dim, kernel_size=(patch_size, patch_size), stride=patch_size) - self.pos_embed = torch.nn.Parameter(torch.zeros(1, self.pos_embed_max_size, self.pos_embed_max_size, embed_dim)) - - def cropped_pos_embed(self, height, width): - height = height // self.patch_size - width = width // self.patch_size - top = (self.pos_embed_max_size - height) // 2 - left = (self.pos_embed_max_size - width) // 2 - spatial_pos_embed = self.pos_embed[:, top : top + height, left : left + width, :].flatten(1, 2) - return spatial_pos_embed - - def forward(self, latent): - height, width = latent.shape[-2:] - latent = self.proj(latent) - latent = latent.flatten(2).transpose(1, 2) - pos_embed = self.cropped_pos_embed(height, width) - return latent + pos_embed - - -class DiffusersCompatibleTimestepProj(torch.nn.Module): - def __init__(self, dim_in, dim_out): - super().__init__() - self.linear_1 = torch.nn.Linear(dim_in, dim_out) - self.act = torch.nn.SiLU() - self.linear_2 = torch.nn.Linear(dim_out, dim_out) - - def forward(self, x): - x = self.linear_1(x) - x = self.act(x) - x = self.linear_2(x) - return x - - -class TimestepEmbeddings(torch.nn.Module): - def __init__(self, dim_in, dim_out, computation_device=None, diffusers_compatible_format=False, scale=1, align_dtype_to_timestep=False): - super().__init__() - self.time_proj = TemporalTimesteps(num_channels=dim_in, flip_sin_to_cos=True, downscale_freq_shift=0, computation_device=computation_device, scale=scale, align_dtype_to_timestep=align_dtype_to_timestep) - if diffusers_compatible_format: - self.timestep_embedder = DiffusersCompatibleTimestepProj(dim_in, dim_out) - else: - self.timestep_embedder = torch.nn.Sequential( - torch.nn.Linear(dim_in, dim_out), torch.nn.SiLU(), torch.nn.Linear(dim_out, dim_out) - ) - - def forward(self, timestep, dtype): - time_emb = self.time_proj(timestep).to(dtype) - time_emb = self.timestep_embedder(time_emb) - return time_emb - - - -class AdaLayerNorm(torch.nn.Module): - def __init__(self, dim, single=False, dual=False): - super().__init__() - self.single = single - self.dual = dual - self.linear = torch.nn.Linear(dim, dim * [[6, 2][single], 9][dual]) - self.norm = torch.nn.LayerNorm(dim, elementwise_affine=False, eps=1e-6) - - def forward(self, x, emb): - emb = self.linear(torch.nn.functional.silu(emb)) - if self.single: - scale, shift = emb.unsqueeze(1).chunk(2, dim=2) - x = self.norm(x) * (1 + scale) + shift - return x - elif self.dual: - shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp, shift_msa2, scale_msa2, gate_msa2 = emb.unsqueeze(1).chunk(9, dim=2) - norm_x = self.norm(x) - x = norm_x * (1 + scale_msa) + shift_msa - norm_x2 = norm_x * (1 + scale_msa2) + shift_msa2 - return x, gate_msa, shift_mlp, scale_mlp, gate_mlp, norm_x2, gate_msa2 - else: - shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = emb.unsqueeze(1).chunk(6, dim=2) - x = self.norm(x) * (1 + scale_msa) + shift_msa - return x, gate_msa, shift_mlp, scale_mlp, gate_mlp - - - -class JointAttention(torch.nn.Module): - def __init__(self, dim_a, dim_b, num_heads, head_dim, only_out_a=False, use_rms_norm=False): - super().__init__() - self.num_heads = num_heads - self.head_dim = head_dim - self.only_out_a = only_out_a - - self.a_to_qkv = torch.nn.Linear(dim_a, dim_a * 3) - self.b_to_qkv = torch.nn.Linear(dim_b, dim_b * 3) - - self.a_to_out = torch.nn.Linear(dim_a, dim_a) - if not only_out_a: - self.b_to_out = torch.nn.Linear(dim_b, dim_b) - - if use_rms_norm: - self.norm_q_a = RMSNorm(head_dim, eps=1e-6) - self.norm_k_a = RMSNorm(head_dim, eps=1e-6) - self.norm_q_b = RMSNorm(head_dim, eps=1e-6) - self.norm_k_b = RMSNorm(head_dim, eps=1e-6) - else: - self.norm_q_a = None - self.norm_k_a = None - self.norm_q_b = None - self.norm_k_b = None - - - def process_qkv(self, hidden_states, to_qkv, norm_q, norm_k): - batch_size = hidden_states.shape[0] - qkv = to_qkv(hidden_states) - qkv = qkv.view(batch_size, -1, 3 * self.num_heads, self.head_dim).transpose(1, 2) - q, k, v = qkv.chunk(3, dim=1) - if norm_q is not None: - q = norm_q(q) - if norm_k is not None: - k = norm_k(k) - return q, k, v - - - def forward(self, hidden_states_a, hidden_states_b): - batch_size = hidden_states_a.shape[0] - - qa, ka, va = self.process_qkv(hidden_states_a, self.a_to_qkv, self.norm_q_a, self.norm_k_a) - qb, kb, vb = self.process_qkv(hidden_states_b, self.b_to_qkv, self.norm_q_b, self.norm_k_b) - q = torch.concat([qa, qb], dim=2) - k = torch.concat([ka, kb], dim=2) - v = torch.concat([va, vb], dim=2) - - hidden_states = torch.nn.functional.scaled_dot_product_attention(q, k, v) - hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, self.num_heads * self.head_dim) - hidden_states = hidden_states.to(q.dtype) - hidden_states_a, hidden_states_b = hidden_states[:, :hidden_states_a.shape[1]], hidden_states[:, hidden_states_a.shape[1]:] - hidden_states_a = self.a_to_out(hidden_states_a) - if self.only_out_a: - return hidden_states_a - else: - hidden_states_b = self.b_to_out(hidden_states_b) - return hidden_states_a, hidden_states_b - - - -class SingleAttention(torch.nn.Module): - def __init__(self, dim_a, num_heads, head_dim, use_rms_norm=False): - super().__init__() - self.num_heads = num_heads - self.head_dim = head_dim - - self.a_to_qkv = torch.nn.Linear(dim_a, dim_a * 3) - self.a_to_out = torch.nn.Linear(dim_a, dim_a) - - if use_rms_norm: - self.norm_q_a = RMSNorm(head_dim, eps=1e-6) - self.norm_k_a = RMSNorm(head_dim, eps=1e-6) - else: - self.norm_q_a = None - self.norm_k_a = None - - - def process_qkv(self, hidden_states, to_qkv, norm_q, norm_k): - batch_size = hidden_states.shape[0] - qkv = to_qkv(hidden_states) - qkv = qkv.view(batch_size, -1, 3 * self.num_heads, self.head_dim).transpose(1, 2) - q, k, v = qkv.chunk(3, dim=1) - if norm_q is not None: - q = norm_q(q) - if norm_k is not None: - k = norm_k(k) - return q, k, v - - - def forward(self, hidden_states_a): - batch_size = hidden_states_a.shape[0] - q, k, v = self.process_qkv(hidden_states_a, self.a_to_qkv, self.norm_q_a, self.norm_k_a) - - hidden_states = torch.nn.functional.scaled_dot_product_attention(q, k, v) - hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, self.num_heads * self.head_dim) - hidden_states = hidden_states.to(q.dtype) - hidden_states = self.a_to_out(hidden_states) - return hidden_states - - - -class DualTransformerBlock(torch.nn.Module): - def __init__(self, dim, num_attention_heads, use_rms_norm=False): - super().__init__() - self.norm1_a = AdaLayerNorm(dim, dual=True) - self.norm1_b = AdaLayerNorm(dim) - - self.attn = JointAttention(dim, dim, num_attention_heads, dim // num_attention_heads, use_rms_norm=use_rms_norm) - self.attn2 = JointAttention(dim, dim, num_attention_heads, dim // num_attention_heads, use_rms_norm=use_rms_norm) - - self.norm2_a = torch.nn.LayerNorm(dim, elementwise_affine=False, eps=1e-6) - self.ff_a = torch.nn.Sequential( - torch.nn.Linear(dim, dim*4), - torch.nn.GELU(approximate="tanh"), - torch.nn.Linear(dim*4, dim) - ) - - self.norm2_b = torch.nn.LayerNorm(dim, elementwise_affine=False, eps=1e-6) - self.ff_b = torch.nn.Sequential( - torch.nn.Linear(dim, dim*4), - torch.nn.GELU(approximate="tanh"), - torch.nn.Linear(dim*4, dim) - ) - - - def forward(self, hidden_states_a, hidden_states_b, temb): - norm_hidden_states_a, gate_msa_a, shift_mlp_a, scale_mlp_a, gate_mlp_a, norm_hidden_states_a_2, gate_msa_a_2 = self.norm1_a(hidden_states_a, emb=temb) - norm_hidden_states_b, gate_msa_b, shift_mlp_b, scale_mlp_b, gate_mlp_b = self.norm1_b(hidden_states_b, emb=temb) - - # Attention - attn_output_a, attn_output_b = self.attn(norm_hidden_states_a, norm_hidden_states_b) - - # Part A - hidden_states_a = hidden_states_a + gate_msa_a * attn_output_a - hidden_states_a = hidden_states_a + gate_msa_a_2 * self.attn2(norm_hidden_states_a_2) - norm_hidden_states_a = self.norm2_a(hidden_states_a) * (1 + scale_mlp_a) + shift_mlp_a - hidden_states_a = hidden_states_a + gate_mlp_a * self.ff_a(norm_hidden_states_a) - - # Part B - hidden_states_b = hidden_states_b + gate_msa_b * attn_output_b - norm_hidden_states_b = self.norm2_b(hidden_states_b) * (1 + scale_mlp_b) + shift_mlp_b - hidden_states_b = hidden_states_b + gate_mlp_b * self.ff_b(norm_hidden_states_b) - - return hidden_states_a, hidden_states_b - - - -class JointTransformerBlock(torch.nn.Module): - def __init__(self, dim, num_attention_heads, use_rms_norm=False, dual=False): - super().__init__() - self.norm1_a = AdaLayerNorm(dim, dual=dual) - self.norm1_b = AdaLayerNorm(dim) - - self.attn = JointAttention(dim, dim, num_attention_heads, dim // num_attention_heads, use_rms_norm=use_rms_norm) - if dual: - self.attn2 = SingleAttention(dim, num_attention_heads, dim // num_attention_heads, use_rms_norm=use_rms_norm) - - self.norm2_a = torch.nn.LayerNorm(dim, elementwise_affine=False, eps=1e-6) - self.ff_a = torch.nn.Sequential( - torch.nn.Linear(dim, dim*4), - torch.nn.GELU(approximate="tanh"), - torch.nn.Linear(dim*4, dim) - ) - - self.norm2_b = torch.nn.LayerNorm(dim, elementwise_affine=False, eps=1e-6) - self.ff_b = torch.nn.Sequential( - torch.nn.Linear(dim, dim*4), - torch.nn.GELU(approximate="tanh"), - torch.nn.Linear(dim*4, dim) - ) - - - def forward(self, hidden_states_a, hidden_states_b, temb): - if self.norm1_a.dual: - norm_hidden_states_a, gate_msa_a, shift_mlp_a, scale_mlp_a, gate_mlp_a, norm_hidden_states_a_2, gate_msa_a_2 = self.norm1_a(hidden_states_a, emb=temb) - else: - norm_hidden_states_a, gate_msa_a, shift_mlp_a, scale_mlp_a, gate_mlp_a = self.norm1_a(hidden_states_a, emb=temb) - norm_hidden_states_b, gate_msa_b, shift_mlp_b, scale_mlp_b, gate_mlp_b = self.norm1_b(hidden_states_b, emb=temb) - - # Attention - attn_output_a, attn_output_b = self.attn(norm_hidden_states_a, norm_hidden_states_b) - - # Part A - hidden_states_a = hidden_states_a + gate_msa_a * attn_output_a - if self.norm1_a.dual: - hidden_states_a = hidden_states_a + gate_msa_a_2 * self.attn2(norm_hidden_states_a_2) - norm_hidden_states_a = self.norm2_a(hidden_states_a) * (1 + scale_mlp_a) + shift_mlp_a - hidden_states_a = hidden_states_a + gate_mlp_a * self.ff_a(norm_hidden_states_a) - - # Part B - hidden_states_b = hidden_states_b + gate_msa_b * attn_output_b - norm_hidden_states_b = self.norm2_b(hidden_states_b) * (1 + scale_mlp_b) + shift_mlp_b - hidden_states_b = hidden_states_b + gate_mlp_b * self.ff_b(norm_hidden_states_b) - - return hidden_states_a, hidden_states_b - - - -class JointTransformerFinalBlock(torch.nn.Module): - def __init__(self, dim, num_attention_heads, use_rms_norm=False): - super().__init__() - self.norm1_a = AdaLayerNorm(dim) - self.norm1_b = AdaLayerNorm(dim, single=True) - - self.attn = JointAttention(dim, dim, num_attention_heads, dim // num_attention_heads, only_out_a=True, use_rms_norm=use_rms_norm) - - self.norm2_a = torch.nn.LayerNorm(dim, elementwise_affine=False, eps=1e-6) - self.ff_a = torch.nn.Sequential( - torch.nn.Linear(dim, dim*4), - torch.nn.GELU(approximate="tanh"), - torch.nn.Linear(dim*4, dim) - ) - - - def forward(self, hidden_states_a, hidden_states_b, temb): - norm_hidden_states_a, gate_msa_a, shift_mlp_a, scale_mlp_a, gate_mlp_a = self.norm1_a(hidden_states_a, emb=temb) - norm_hidden_states_b = self.norm1_b(hidden_states_b, emb=temb) - - # Attention - attn_output_a = self.attn(norm_hidden_states_a, norm_hidden_states_b) - - # Part A - hidden_states_a = hidden_states_a + gate_msa_a * attn_output_a - norm_hidden_states_a = self.norm2_a(hidden_states_a) * (1 + scale_mlp_a) + shift_mlp_a - hidden_states_a = hidden_states_a + gate_mlp_a * self.ff_a(norm_hidden_states_a) - - return hidden_states_a, hidden_states_b - - - -class SD3DiT(torch.nn.Module): - def __init__(self, embed_dim=1536, num_layers=24, use_rms_norm=False, num_dual_blocks=0, pos_embed_max_size=192): - super().__init__() - self.pos_embedder = PatchEmbed(patch_size=2, in_channels=16, embed_dim=embed_dim, pos_embed_max_size=pos_embed_max_size) - self.time_embedder = TimestepEmbeddings(256, embed_dim) - self.pooled_text_embedder = torch.nn.Sequential(torch.nn.Linear(2048, embed_dim), torch.nn.SiLU(), torch.nn.Linear(embed_dim, embed_dim)) - self.context_embedder = torch.nn.Linear(4096, embed_dim) - self.blocks = torch.nn.ModuleList([JointTransformerBlock(embed_dim, embed_dim//64, use_rms_norm=use_rms_norm, dual=True) for _ in range(num_dual_blocks)] - + [JointTransformerBlock(embed_dim, embed_dim//64, use_rms_norm=use_rms_norm) for _ in range(num_layers-1-num_dual_blocks)] - + [JointTransformerFinalBlock(embed_dim, embed_dim//64, use_rms_norm=use_rms_norm)]) - self.norm_out = AdaLayerNorm(embed_dim, single=True) - self.proj_out = torch.nn.Linear(embed_dim, 64) - - def tiled_forward(self, hidden_states, timestep, prompt_emb, pooled_prompt_emb, tile_size=128, tile_stride=64): - # Due to the global positional embedding, we cannot implement layer-wise tiled forward. - hidden_states = TileWorker().tiled_forward( - lambda x: self.forward(x, timestep, prompt_emb, pooled_prompt_emb), - hidden_states, - tile_size, - tile_stride, - tile_device=hidden_states.device, - tile_dtype=hidden_states.dtype - ) - return hidden_states - - def forward(self, hidden_states, timestep, prompt_emb, pooled_prompt_emb, tiled=False, tile_size=128, tile_stride=64, use_gradient_checkpointing=False): - if tiled: - return self.tiled_forward(hidden_states, timestep, prompt_emb, pooled_prompt_emb, tile_size, tile_stride) - conditioning = self.time_embedder(timestep, hidden_states.dtype) + self.pooled_text_embedder(pooled_prompt_emb) - prompt_emb = self.context_embedder(prompt_emb) - - height, width = hidden_states.shape[-2:] - hidden_states = self.pos_embedder(hidden_states) - - def create_custom_forward(module): - def custom_forward(*inputs): - return module(*inputs) - return custom_forward - - for block in self.blocks: - if self.training and use_gradient_checkpointing: - hidden_states, prompt_emb = torch.utils.checkpoint.checkpoint( - create_custom_forward(block), - hidden_states, prompt_emb, conditioning, - use_reentrant=False, - ) - else: - hidden_states, prompt_emb = block(hidden_states, prompt_emb, conditioning) - - hidden_states = self.norm_out(hidden_states, conditioning) - hidden_states = self.proj_out(hidden_states) - hidden_states = rearrange(hidden_states, "B (H W) (P Q C) -> B C (H P) (W Q)", P=2, Q=2, H=height//2, W=width//2) - return hidden_states - - @staticmethod - def state_dict_converter(): - return SD3DiTStateDictConverter() - - - -class SD3DiTStateDictConverter: - def __init__(self): - pass - - def infer_architecture(self, state_dict): - embed_dim = state_dict["blocks.0.ff_a.0.weight"].shape[1] - num_layers = 100 - while num_layers > 0 and f"blocks.{num_layers-1}.ff_a.0.bias" not in state_dict: - num_layers -= 1 - use_rms_norm = "blocks.0.attn.norm_q_a.weight" in state_dict - num_dual_blocks = 0 - while f"blocks.{num_dual_blocks}.attn2.a_to_out.bias" in state_dict: - num_dual_blocks += 1 - pos_embed_max_size = state_dict["pos_embedder.pos_embed"].shape[1] - return { - "embed_dim": embed_dim, - "num_layers": num_layers, - "use_rms_norm": use_rms_norm, - "num_dual_blocks": num_dual_blocks, - "pos_embed_max_size": pos_embed_max_size - } - - def from_diffusers(self, state_dict): - rename_dict = { - "context_embedder": "context_embedder", - "pos_embed.pos_embed": "pos_embedder.pos_embed", - "pos_embed.proj": "pos_embedder.proj", - "time_text_embed.timestep_embedder.linear_1": "time_embedder.timestep_embedder.0", - "time_text_embed.timestep_embedder.linear_2": "time_embedder.timestep_embedder.2", - "time_text_embed.text_embedder.linear_1": "pooled_text_embedder.0", - "time_text_embed.text_embedder.linear_2": "pooled_text_embedder.2", - "norm_out.linear": "norm_out.linear", - "proj_out": "proj_out", - - "norm1.linear": "norm1_a.linear", - "norm1_context.linear": "norm1_b.linear", - "attn.to_q": "attn.a_to_q", - "attn.to_k": "attn.a_to_k", - "attn.to_v": "attn.a_to_v", - "attn.to_out.0": "attn.a_to_out", - "attn.add_q_proj": "attn.b_to_q", - "attn.add_k_proj": "attn.b_to_k", - "attn.add_v_proj": "attn.b_to_v", - "attn.to_add_out": "attn.b_to_out", - "ff.net.0.proj": "ff_a.0", - "ff.net.2": "ff_a.2", - "ff_context.net.0.proj": "ff_b.0", - "ff_context.net.2": "ff_b.2", - - "attn.norm_q": "attn.norm_q_a", - "attn.norm_k": "attn.norm_k_a", - "attn.norm_added_q": "attn.norm_q_b", - "attn.norm_added_k": "attn.norm_k_b", - } - state_dict_ = {} - for name, param in state_dict.items(): - if name in rename_dict: - if name == "pos_embed.pos_embed": - param = param.reshape((1, 192, 192, param.shape[-1])) - state_dict_[rename_dict[name]] = param - elif name.endswith(".weight") or name.endswith(".bias"): - suffix = ".weight" if name.endswith(".weight") else ".bias" - prefix = name[:-len(suffix)] - if prefix in rename_dict: - state_dict_[rename_dict[prefix] + suffix] = param - elif prefix.startswith("transformer_blocks."): - names = prefix.split(".") - names[0] = "blocks" - middle = ".".join(names[2:]) - if middle in rename_dict: - name_ = ".".join(names[:2] + [rename_dict[middle]] + [suffix[1:]]) - state_dict_[name_] = param - merged_keys = [name for name in state_dict_ if ".a_to_q." in name or ".b_to_q." in name] - for key in merged_keys: - param = torch.concat([ - state_dict_[key.replace("to_q", "to_q")], - state_dict_[key.replace("to_q", "to_k")], - state_dict_[key.replace("to_q", "to_v")], - ], dim=0) - name = key.replace("to_q", "to_qkv") - state_dict_.pop(key.replace("to_q", "to_q")) - state_dict_.pop(key.replace("to_q", "to_k")) - state_dict_.pop(key.replace("to_q", "to_v")) - state_dict_[name] = param - return state_dict_, self.infer_architecture(state_dict_) - - def from_civitai(self, state_dict): - rename_dict = { - "model.diffusion_model.context_embedder.bias": "context_embedder.bias", - "model.diffusion_model.context_embedder.weight": "context_embedder.weight", - "model.diffusion_model.final_layer.linear.bias": "proj_out.bias", - "model.diffusion_model.final_layer.linear.weight": "proj_out.weight", - - "model.diffusion_model.pos_embed": "pos_embedder.pos_embed", - "model.diffusion_model.t_embedder.mlp.0.bias": "time_embedder.timestep_embedder.0.bias", - "model.diffusion_model.t_embedder.mlp.0.weight": "time_embedder.timestep_embedder.0.weight", - "model.diffusion_model.t_embedder.mlp.2.bias": "time_embedder.timestep_embedder.2.bias", - "model.diffusion_model.t_embedder.mlp.2.weight": "time_embedder.timestep_embedder.2.weight", - "model.diffusion_model.x_embedder.proj.bias": "pos_embedder.proj.bias", - "model.diffusion_model.x_embedder.proj.weight": "pos_embedder.proj.weight", - "model.diffusion_model.y_embedder.mlp.0.bias": "pooled_text_embedder.0.bias", - "model.diffusion_model.y_embedder.mlp.0.weight": "pooled_text_embedder.0.weight", - "model.diffusion_model.y_embedder.mlp.2.bias": "pooled_text_embedder.2.bias", - "model.diffusion_model.y_embedder.mlp.2.weight": "pooled_text_embedder.2.weight", - - "model.diffusion_model.joint_blocks.23.context_block.adaLN_modulation.1.weight": "blocks.23.norm1_b.linear.weight", - "model.diffusion_model.joint_blocks.23.context_block.adaLN_modulation.1.bias": "blocks.23.norm1_b.linear.bias", - "model.diffusion_model.final_layer.adaLN_modulation.1.weight": "norm_out.linear.weight", - "model.diffusion_model.final_layer.adaLN_modulation.1.bias": "norm_out.linear.bias", - } - for i in range(40): - rename_dict.update({ - f"model.diffusion_model.joint_blocks.{i}.context_block.adaLN_modulation.1.bias": f"blocks.{i}.norm1_b.linear.bias", - f"model.diffusion_model.joint_blocks.{i}.context_block.adaLN_modulation.1.weight": f"blocks.{i}.norm1_b.linear.weight", - f"model.diffusion_model.joint_blocks.{i}.context_block.attn.proj.bias": f"blocks.{i}.attn.b_to_out.bias", - f"model.diffusion_model.joint_blocks.{i}.context_block.attn.proj.weight": f"blocks.{i}.attn.b_to_out.weight", - f"model.diffusion_model.joint_blocks.{i}.context_block.attn.qkv.bias": [f'blocks.{i}.attn.b_to_q.bias', f'blocks.{i}.attn.b_to_k.bias', f'blocks.{i}.attn.b_to_v.bias'], - f"model.diffusion_model.joint_blocks.{i}.context_block.attn.qkv.weight": [f'blocks.{i}.attn.b_to_q.weight', f'blocks.{i}.attn.b_to_k.weight', f'blocks.{i}.attn.b_to_v.weight'], - f"model.diffusion_model.joint_blocks.{i}.context_block.mlp.fc1.bias": f"blocks.{i}.ff_b.0.bias", - f"model.diffusion_model.joint_blocks.{i}.context_block.mlp.fc1.weight": f"blocks.{i}.ff_b.0.weight", - f"model.diffusion_model.joint_blocks.{i}.context_block.mlp.fc2.bias": f"blocks.{i}.ff_b.2.bias", - f"model.diffusion_model.joint_blocks.{i}.context_block.mlp.fc2.weight": f"blocks.{i}.ff_b.2.weight", - f"model.diffusion_model.joint_blocks.{i}.x_block.adaLN_modulation.1.bias": f"blocks.{i}.norm1_a.linear.bias", - f"model.diffusion_model.joint_blocks.{i}.x_block.adaLN_modulation.1.weight": f"blocks.{i}.norm1_a.linear.weight", - f"model.diffusion_model.joint_blocks.{i}.x_block.attn.proj.bias": f"blocks.{i}.attn.a_to_out.bias", - f"model.diffusion_model.joint_blocks.{i}.x_block.attn.proj.weight": f"blocks.{i}.attn.a_to_out.weight", - f"model.diffusion_model.joint_blocks.{i}.x_block.attn.qkv.bias": [f'blocks.{i}.attn.a_to_q.bias', f'blocks.{i}.attn.a_to_k.bias', f'blocks.{i}.attn.a_to_v.bias'], - f"model.diffusion_model.joint_blocks.{i}.x_block.attn.qkv.weight": [f'blocks.{i}.attn.a_to_q.weight', f'blocks.{i}.attn.a_to_k.weight', f'blocks.{i}.attn.a_to_v.weight'], - f"model.diffusion_model.joint_blocks.{i}.x_block.mlp.fc1.bias": f"blocks.{i}.ff_a.0.bias", - f"model.diffusion_model.joint_blocks.{i}.x_block.mlp.fc1.weight": f"blocks.{i}.ff_a.0.weight", - f"model.diffusion_model.joint_blocks.{i}.x_block.mlp.fc2.bias": f"blocks.{i}.ff_a.2.bias", - f"model.diffusion_model.joint_blocks.{i}.x_block.mlp.fc2.weight": f"blocks.{i}.ff_a.2.weight", - f"model.diffusion_model.joint_blocks.{i}.x_block.attn.ln_q.weight": f"blocks.{i}.attn.norm_q_a.weight", - f"model.diffusion_model.joint_blocks.{i}.x_block.attn.ln_k.weight": f"blocks.{i}.attn.norm_k_a.weight", - f"model.diffusion_model.joint_blocks.{i}.context_block.attn.ln_q.weight": f"blocks.{i}.attn.norm_q_b.weight", - f"model.diffusion_model.joint_blocks.{i}.context_block.attn.ln_k.weight": f"blocks.{i}.attn.norm_k_b.weight", - - f"model.diffusion_model.joint_blocks.{i}.x_block.attn2.ln_q.weight": f"blocks.{i}.attn2.norm_q_a.weight", - f"model.diffusion_model.joint_blocks.{i}.x_block.attn2.ln_k.weight": f"blocks.{i}.attn2.norm_k_a.weight", - f"model.diffusion_model.joint_blocks.{i}.x_block.attn2.qkv.weight": f"blocks.{i}.attn2.a_to_qkv.weight", - f"model.diffusion_model.joint_blocks.{i}.x_block.attn2.qkv.bias": f"blocks.{i}.attn2.a_to_qkv.bias", - f"model.diffusion_model.joint_blocks.{i}.x_block.attn2.proj.weight": f"blocks.{i}.attn2.a_to_out.weight", - f"model.diffusion_model.joint_blocks.{i}.x_block.attn2.proj.bias": f"blocks.{i}.attn2.a_to_out.bias", - }) - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if name == "model.diffusion_model.pos_embed": - pos_embed_max_size = int(param.shape[1] ** 0.5 + 0.4) - param = param.reshape((1, pos_embed_max_size, pos_embed_max_size, param.shape[-1])) - if isinstance(rename_dict[name], str): - state_dict_[rename_dict[name]] = param - else: - name_ = rename_dict[name][0].replace(".a_to_q.", ".a_to_qkv.").replace(".b_to_q.", ".b_to_qkv.") - state_dict_[name_] = param - extra_kwargs = self.infer_architecture(state_dict_) - num_layers = extra_kwargs["num_layers"] - for name in [ - f"blocks.{num_layers-1}.norm1_b.linear.weight", f"blocks.{num_layers-1}.norm1_b.linear.bias", "norm_out.linear.weight", "norm_out.linear.bias", - ]: - param = state_dict_[name] - dim = param.shape[0] // 2 - param = torch.concat([param[dim:], param[:dim]], axis=0) - state_dict_[name] = param - return state_dict_, self.infer_architecture(state_dict_) diff --git a/diffsynth/models/sd3_text_encoder.py b/diffsynth/models/sd3_text_encoder.py deleted file mode 100644 index efe29ca..0000000 --- a/diffsynth/models/sd3_text_encoder.py +++ /dev/null @@ -1,1120 +0,0 @@ -import torch -from transformers import T5EncoderModel, T5Config -from .sd_text_encoder import SDTextEncoder -from .sdxl_text_encoder import SDXLTextEncoder2, SDXLTextEncoder2StateDictConverter - - -class SD3TextEncoder1(SDTextEncoder): - def __init__(self, vocab_size=49408): - super().__init__(vocab_size=vocab_size) - - def forward(self, input_ids, clip_skip=2, extra_mask=None): - embeds = self.token_embedding(input_ids) - embeds = embeds + self.position_embeds.to(dtype=embeds.dtype, device=input_ids.device) - attn_mask = self.attn_mask.to(device=embeds.device, dtype=embeds.dtype) - if extra_mask is not None: - attn_mask[:, extra_mask[0]==0] = float("-inf") - for encoder_id, encoder in enumerate(self.encoders): - embeds = encoder(embeds, attn_mask=attn_mask) - if encoder_id + clip_skip == len(self.encoders): - hidden_states = embeds - embeds = self.final_layer_norm(embeds) - pooled_embeds = embeds[torch.arange(embeds.shape[0]), input_ids.to(dtype=torch.int).argmax(dim=-1)] - return pooled_embeds, hidden_states - - @staticmethod - def state_dict_converter(): - return SD3TextEncoder1StateDictConverter() - - - -class SD3TextEncoder2(SDXLTextEncoder2): - def __init__(self): - super().__init__() - - @staticmethod - def state_dict_converter(): - return SD3TextEncoder2StateDictConverter() - - -class SD3TextEncoder3(T5EncoderModel): - def __init__(self): - config = T5Config( - _name_or_path = ".", - architectures = ["T5EncoderModel"], - classifier_dropout = 0.0, - d_ff = 10240, - d_kv = 64, - d_model = 4096, - decoder_start_token_id = 0, - dense_act_fn = "gelu_new", - dropout_rate = 0.1, - eos_token_id = 1, - feed_forward_proj = "gated-gelu", - initializer_factor = 1.0, - is_encoder_decoder = True, - is_gated_act = True, - layer_norm_epsilon = 1e-06, - model_type = "t5", - num_decoder_layers = 24, - num_heads = 64, - num_layers = 24, - output_past = True, - pad_token_id = 0, - relative_attention_max_distance = 128, - relative_attention_num_buckets = 32, - tie_word_embeddings = False, - torch_dtype = torch.float16, - transformers_version = "4.41.2", - use_cache = True, - vocab_size = 32128 - ) - super().__init__(config) - self.eval() - - def forward(self, input_ids): - outputs = super().forward(input_ids=input_ids) - prompt_emb = outputs.last_hidden_state - return prompt_emb - - @staticmethod - def state_dict_converter(): - return SD3TextEncoder3StateDictConverter() - - - -class SD3TextEncoder1StateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - rename_dict = { - "text_model.embeddings.token_embedding.weight": "token_embedding.weight", - "text_model.embeddings.position_embedding.weight": "position_embeds", - "text_model.final_layer_norm.weight": "final_layer_norm.weight", - "text_model.final_layer_norm.bias": "final_layer_norm.bias", - } - attn_rename_dict = { - "self_attn.q_proj": "attn.to_q", - "self_attn.k_proj": "attn.to_k", - "self_attn.v_proj": "attn.to_v", - "self_attn.out_proj": "attn.to_out", - "layer_norm1": "layer_norm1", - "layer_norm2": "layer_norm2", - "mlp.fc1": "fc1", - "mlp.fc2": "fc2", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if name == "text_model.embeddings.position_embedding.weight": - param = param.reshape((1, param.shape[0], param.shape[1])) - state_dict_[rename_dict[name]] = param - elif name.startswith("text_model.encoder.layers."): - param = state_dict[name] - names = name.split(".") - layer_id, layer_type, tail = names[3], ".".join(names[4:-1]), names[-1] - name_ = ".".join(["encoders", layer_id, attn_rename_dict[layer_type], tail]) - state_dict_[name_] = param - return state_dict_ - - def from_civitai(self, state_dict): - rename_dict = { - "text_encoders.clip_l.transformer.text_model.embeddings.position_embedding.weight": "position_embeds", - "text_encoders.clip_l.transformer.text_model.embeddings.token_embedding.weight": "token_embedding.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.0.layer_norm1.bias": "encoders.0.layer_norm1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.0.layer_norm1.weight": "encoders.0.layer_norm1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.0.layer_norm2.bias": "encoders.0.layer_norm2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.0.layer_norm2.weight": "encoders.0.layer_norm2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.0.mlp.fc1.bias": "encoders.0.fc1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.0.mlp.fc1.weight": "encoders.0.fc1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.0.mlp.fc2.bias": "encoders.0.fc2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.0.mlp.fc2.weight": "encoders.0.fc2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.0.self_attn.k_proj.bias": "encoders.0.attn.to_k.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.0.self_attn.k_proj.weight": "encoders.0.attn.to_k.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.0.self_attn.out_proj.bias": "encoders.0.attn.to_out.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.0.self_attn.out_proj.weight": "encoders.0.attn.to_out.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.0.self_attn.q_proj.bias": "encoders.0.attn.to_q.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.0.self_attn.q_proj.weight": "encoders.0.attn.to_q.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.0.self_attn.v_proj.bias": "encoders.0.attn.to_v.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.0.self_attn.v_proj.weight": "encoders.0.attn.to_v.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.1.layer_norm1.bias": "encoders.1.layer_norm1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.1.layer_norm1.weight": "encoders.1.layer_norm1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.1.layer_norm2.bias": "encoders.1.layer_norm2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.1.layer_norm2.weight": "encoders.1.layer_norm2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.1.mlp.fc1.bias": "encoders.1.fc1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.1.mlp.fc1.weight": "encoders.1.fc1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.1.mlp.fc2.bias": "encoders.1.fc2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.1.mlp.fc2.weight": "encoders.1.fc2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.1.self_attn.k_proj.bias": "encoders.1.attn.to_k.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.1.self_attn.k_proj.weight": "encoders.1.attn.to_k.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.1.self_attn.out_proj.bias": "encoders.1.attn.to_out.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.1.self_attn.out_proj.weight": "encoders.1.attn.to_out.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.1.self_attn.q_proj.bias": "encoders.1.attn.to_q.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.1.self_attn.q_proj.weight": "encoders.1.attn.to_q.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.1.self_attn.v_proj.bias": "encoders.1.attn.to_v.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.1.self_attn.v_proj.weight": "encoders.1.attn.to_v.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.10.layer_norm1.bias": "encoders.10.layer_norm1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.10.layer_norm1.weight": "encoders.10.layer_norm1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.10.layer_norm2.bias": "encoders.10.layer_norm2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.10.layer_norm2.weight": "encoders.10.layer_norm2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.10.mlp.fc1.bias": "encoders.10.fc1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.10.mlp.fc1.weight": "encoders.10.fc1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.10.mlp.fc2.bias": "encoders.10.fc2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.10.mlp.fc2.weight": "encoders.10.fc2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.10.self_attn.k_proj.bias": "encoders.10.attn.to_k.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.10.self_attn.k_proj.weight": "encoders.10.attn.to_k.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.10.self_attn.out_proj.bias": "encoders.10.attn.to_out.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.10.self_attn.out_proj.weight": "encoders.10.attn.to_out.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.10.self_attn.q_proj.bias": "encoders.10.attn.to_q.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.10.self_attn.q_proj.weight": "encoders.10.attn.to_q.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.10.self_attn.v_proj.bias": "encoders.10.attn.to_v.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.10.self_attn.v_proj.weight": "encoders.10.attn.to_v.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.11.layer_norm1.bias": "encoders.11.layer_norm1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.11.layer_norm1.weight": "encoders.11.layer_norm1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.11.layer_norm2.bias": "encoders.11.layer_norm2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.11.layer_norm2.weight": "encoders.11.layer_norm2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.11.mlp.fc1.bias": "encoders.11.fc1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.11.mlp.fc1.weight": "encoders.11.fc1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.11.mlp.fc2.bias": "encoders.11.fc2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.11.mlp.fc2.weight": "encoders.11.fc2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.11.self_attn.k_proj.bias": "encoders.11.attn.to_k.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.11.self_attn.k_proj.weight": "encoders.11.attn.to_k.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.11.self_attn.out_proj.bias": "encoders.11.attn.to_out.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.11.self_attn.out_proj.weight": "encoders.11.attn.to_out.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.11.self_attn.q_proj.bias": "encoders.11.attn.to_q.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.11.self_attn.q_proj.weight": "encoders.11.attn.to_q.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.11.self_attn.v_proj.bias": "encoders.11.attn.to_v.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.11.self_attn.v_proj.weight": "encoders.11.attn.to_v.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.2.layer_norm1.bias": "encoders.2.layer_norm1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.2.layer_norm1.weight": "encoders.2.layer_norm1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.2.layer_norm2.bias": "encoders.2.layer_norm2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.2.layer_norm2.weight": "encoders.2.layer_norm2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.2.mlp.fc1.bias": "encoders.2.fc1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.2.mlp.fc1.weight": "encoders.2.fc1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.2.mlp.fc2.bias": "encoders.2.fc2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.2.mlp.fc2.weight": "encoders.2.fc2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.2.self_attn.k_proj.bias": "encoders.2.attn.to_k.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.2.self_attn.k_proj.weight": "encoders.2.attn.to_k.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.2.self_attn.out_proj.bias": "encoders.2.attn.to_out.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.2.self_attn.out_proj.weight": "encoders.2.attn.to_out.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.2.self_attn.q_proj.bias": "encoders.2.attn.to_q.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.2.self_attn.q_proj.weight": "encoders.2.attn.to_q.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.2.self_attn.v_proj.bias": "encoders.2.attn.to_v.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.2.self_attn.v_proj.weight": "encoders.2.attn.to_v.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.3.layer_norm1.bias": "encoders.3.layer_norm1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.3.layer_norm1.weight": "encoders.3.layer_norm1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.3.layer_norm2.bias": "encoders.3.layer_norm2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.3.layer_norm2.weight": "encoders.3.layer_norm2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.3.mlp.fc1.bias": "encoders.3.fc1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.3.mlp.fc1.weight": "encoders.3.fc1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.3.mlp.fc2.bias": "encoders.3.fc2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.3.mlp.fc2.weight": "encoders.3.fc2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.3.self_attn.k_proj.bias": "encoders.3.attn.to_k.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.3.self_attn.k_proj.weight": "encoders.3.attn.to_k.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.3.self_attn.out_proj.bias": "encoders.3.attn.to_out.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.3.self_attn.out_proj.weight": "encoders.3.attn.to_out.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.3.self_attn.q_proj.bias": "encoders.3.attn.to_q.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.3.self_attn.q_proj.weight": "encoders.3.attn.to_q.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.3.self_attn.v_proj.bias": "encoders.3.attn.to_v.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.3.self_attn.v_proj.weight": "encoders.3.attn.to_v.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.4.layer_norm1.bias": "encoders.4.layer_norm1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.4.layer_norm1.weight": "encoders.4.layer_norm1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.4.layer_norm2.bias": "encoders.4.layer_norm2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.4.layer_norm2.weight": "encoders.4.layer_norm2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.4.mlp.fc1.bias": "encoders.4.fc1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.4.mlp.fc1.weight": "encoders.4.fc1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.4.mlp.fc2.bias": "encoders.4.fc2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.4.mlp.fc2.weight": "encoders.4.fc2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.4.self_attn.k_proj.bias": "encoders.4.attn.to_k.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.4.self_attn.k_proj.weight": "encoders.4.attn.to_k.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.4.self_attn.out_proj.bias": "encoders.4.attn.to_out.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.4.self_attn.out_proj.weight": "encoders.4.attn.to_out.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.4.self_attn.q_proj.bias": "encoders.4.attn.to_q.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.4.self_attn.q_proj.weight": "encoders.4.attn.to_q.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.4.self_attn.v_proj.bias": "encoders.4.attn.to_v.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.4.self_attn.v_proj.weight": "encoders.4.attn.to_v.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.5.layer_norm1.bias": "encoders.5.layer_norm1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.5.layer_norm1.weight": "encoders.5.layer_norm1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.5.layer_norm2.bias": "encoders.5.layer_norm2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.5.layer_norm2.weight": "encoders.5.layer_norm2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.5.mlp.fc1.bias": "encoders.5.fc1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.5.mlp.fc1.weight": "encoders.5.fc1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.5.mlp.fc2.bias": "encoders.5.fc2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.5.mlp.fc2.weight": "encoders.5.fc2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.5.self_attn.k_proj.bias": "encoders.5.attn.to_k.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.5.self_attn.k_proj.weight": "encoders.5.attn.to_k.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.5.self_attn.out_proj.bias": "encoders.5.attn.to_out.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.5.self_attn.out_proj.weight": "encoders.5.attn.to_out.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.5.self_attn.q_proj.bias": "encoders.5.attn.to_q.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.5.self_attn.q_proj.weight": "encoders.5.attn.to_q.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.5.self_attn.v_proj.bias": "encoders.5.attn.to_v.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.5.self_attn.v_proj.weight": "encoders.5.attn.to_v.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.6.layer_norm1.bias": "encoders.6.layer_norm1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.6.layer_norm1.weight": "encoders.6.layer_norm1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.6.layer_norm2.bias": "encoders.6.layer_norm2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.6.layer_norm2.weight": "encoders.6.layer_norm2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.6.mlp.fc1.bias": "encoders.6.fc1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.6.mlp.fc1.weight": "encoders.6.fc1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.6.mlp.fc2.bias": "encoders.6.fc2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.6.mlp.fc2.weight": "encoders.6.fc2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.6.self_attn.k_proj.bias": "encoders.6.attn.to_k.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.6.self_attn.k_proj.weight": "encoders.6.attn.to_k.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.6.self_attn.out_proj.bias": "encoders.6.attn.to_out.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.6.self_attn.out_proj.weight": "encoders.6.attn.to_out.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.6.self_attn.q_proj.bias": "encoders.6.attn.to_q.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.6.self_attn.q_proj.weight": "encoders.6.attn.to_q.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.6.self_attn.v_proj.bias": "encoders.6.attn.to_v.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.6.self_attn.v_proj.weight": "encoders.6.attn.to_v.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.7.layer_norm1.bias": "encoders.7.layer_norm1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.7.layer_norm1.weight": "encoders.7.layer_norm1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.7.layer_norm2.bias": "encoders.7.layer_norm2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.7.layer_norm2.weight": "encoders.7.layer_norm2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.7.mlp.fc1.bias": "encoders.7.fc1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.7.mlp.fc1.weight": "encoders.7.fc1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.7.mlp.fc2.bias": "encoders.7.fc2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.7.mlp.fc2.weight": "encoders.7.fc2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.7.self_attn.k_proj.bias": "encoders.7.attn.to_k.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.7.self_attn.k_proj.weight": "encoders.7.attn.to_k.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.7.self_attn.out_proj.bias": "encoders.7.attn.to_out.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.7.self_attn.out_proj.weight": "encoders.7.attn.to_out.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.7.self_attn.q_proj.bias": "encoders.7.attn.to_q.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.7.self_attn.q_proj.weight": "encoders.7.attn.to_q.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.7.self_attn.v_proj.bias": "encoders.7.attn.to_v.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.7.self_attn.v_proj.weight": "encoders.7.attn.to_v.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.8.layer_norm1.bias": "encoders.8.layer_norm1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.8.layer_norm1.weight": "encoders.8.layer_norm1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.8.layer_norm2.bias": "encoders.8.layer_norm2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.8.layer_norm2.weight": "encoders.8.layer_norm2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.8.mlp.fc1.bias": "encoders.8.fc1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.8.mlp.fc1.weight": "encoders.8.fc1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.8.mlp.fc2.bias": "encoders.8.fc2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.8.mlp.fc2.weight": "encoders.8.fc2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.8.self_attn.k_proj.bias": "encoders.8.attn.to_k.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.8.self_attn.k_proj.weight": "encoders.8.attn.to_k.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.8.self_attn.out_proj.bias": "encoders.8.attn.to_out.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.8.self_attn.out_proj.weight": "encoders.8.attn.to_out.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.8.self_attn.q_proj.bias": "encoders.8.attn.to_q.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.8.self_attn.q_proj.weight": "encoders.8.attn.to_q.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.8.self_attn.v_proj.bias": "encoders.8.attn.to_v.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.8.self_attn.v_proj.weight": "encoders.8.attn.to_v.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.9.layer_norm1.bias": "encoders.9.layer_norm1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.9.layer_norm1.weight": "encoders.9.layer_norm1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.9.layer_norm2.bias": "encoders.9.layer_norm2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.9.layer_norm2.weight": "encoders.9.layer_norm2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.9.mlp.fc1.bias": "encoders.9.fc1.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.9.mlp.fc1.weight": "encoders.9.fc1.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.9.mlp.fc2.bias": "encoders.9.fc2.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.9.mlp.fc2.weight": "encoders.9.fc2.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.9.self_attn.k_proj.bias": "encoders.9.attn.to_k.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.9.self_attn.k_proj.weight": "encoders.9.attn.to_k.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.9.self_attn.out_proj.bias": "encoders.9.attn.to_out.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.9.self_attn.out_proj.weight": "encoders.9.attn.to_out.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.9.self_attn.q_proj.bias": "encoders.9.attn.to_q.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.9.self_attn.q_proj.weight": "encoders.9.attn.to_q.weight", - "text_encoders.clip_l.transformer.text_model.encoder.layers.9.self_attn.v_proj.bias": "encoders.9.attn.to_v.bias", - "text_encoders.clip_l.transformer.text_model.encoder.layers.9.self_attn.v_proj.weight": "encoders.9.attn.to_v.weight", - "text_encoders.clip_l.transformer.text_model.final_layer_norm.bias": "final_layer_norm.bias", - "text_encoders.clip_l.transformer.text_model.final_layer_norm.weight": "final_layer_norm.weight", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if name == "text_encoders.clip_l.transformer.text_model.embeddings.position_embedding.weight": - param = param.reshape((1, param.shape[0], param.shape[1])) - state_dict_[rename_dict[name]] = param - elif ("text_encoders.clip_l.transformer." + name) in rename_dict: - param = state_dict[name] - if name == "text_model.embeddings.position_embedding.weight": - param = param.reshape((1, param.shape[0], param.shape[1])) - state_dict_[rename_dict["text_encoders.clip_l.transformer." + name]] = param - return state_dict_ - - - -class SD3TextEncoder2StateDictConverter(SDXLTextEncoder2StateDictConverter): - def __init__(self): - pass - - def from_diffusers(self, state_dict): - return super().from_diffusers(state_dict) - - def from_civitai(self, state_dict): - rename_dict = { - "text_encoders.clip_g.transformer.text_model.embeddings.position_embedding.weight": "position_embeds", - "text_encoders.clip_g.transformer.text_model.embeddings.token_embedding.weight": "token_embedding.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.0.layer_norm1.bias": "encoders.0.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.0.layer_norm1.weight": "encoders.0.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.0.layer_norm2.bias": "encoders.0.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.0.layer_norm2.weight": "encoders.0.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.0.mlp.fc1.bias": "encoders.0.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.0.mlp.fc1.weight": "encoders.0.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.0.mlp.fc2.bias": "encoders.0.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.0.mlp.fc2.weight": "encoders.0.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.0.self_attn.k_proj.bias": "encoders.0.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.0.self_attn.k_proj.weight": "encoders.0.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.0.self_attn.out_proj.bias": "encoders.0.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.0.self_attn.out_proj.weight": "encoders.0.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.0.self_attn.q_proj.bias": "encoders.0.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.0.self_attn.q_proj.weight": "encoders.0.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.0.self_attn.v_proj.bias": "encoders.0.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.0.self_attn.v_proj.weight": "encoders.0.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.1.layer_norm1.bias": "encoders.1.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.1.layer_norm1.weight": "encoders.1.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.1.layer_norm2.bias": "encoders.1.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.1.layer_norm2.weight": "encoders.1.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.1.mlp.fc1.bias": "encoders.1.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.1.mlp.fc1.weight": "encoders.1.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.1.mlp.fc2.bias": "encoders.1.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.1.mlp.fc2.weight": "encoders.1.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.1.self_attn.k_proj.bias": "encoders.1.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.1.self_attn.k_proj.weight": "encoders.1.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.1.self_attn.out_proj.bias": "encoders.1.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.1.self_attn.out_proj.weight": "encoders.1.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.1.self_attn.q_proj.bias": "encoders.1.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.1.self_attn.q_proj.weight": "encoders.1.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.1.self_attn.v_proj.bias": "encoders.1.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.1.self_attn.v_proj.weight": "encoders.1.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.10.layer_norm1.bias": "encoders.10.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.10.layer_norm1.weight": "encoders.10.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.10.layer_norm2.bias": "encoders.10.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.10.layer_norm2.weight": "encoders.10.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.10.mlp.fc1.bias": "encoders.10.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.10.mlp.fc1.weight": "encoders.10.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.10.mlp.fc2.bias": "encoders.10.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.10.mlp.fc2.weight": "encoders.10.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.10.self_attn.k_proj.bias": "encoders.10.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.10.self_attn.k_proj.weight": "encoders.10.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.10.self_attn.out_proj.bias": "encoders.10.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.10.self_attn.out_proj.weight": "encoders.10.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.10.self_attn.q_proj.bias": "encoders.10.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.10.self_attn.q_proj.weight": "encoders.10.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.10.self_attn.v_proj.bias": "encoders.10.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.10.self_attn.v_proj.weight": "encoders.10.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.11.layer_norm1.bias": "encoders.11.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.11.layer_norm1.weight": "encoders.11.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.11.layer_norm2.bias": "encoders.11.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.11.layer_norm2.weight": "encoders.11.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.11.mlp.fc1.bias": "encoders.11.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.11.mlp.fc1.weight": "encoders.11.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.11.mlp.fc2.bias": "encoders.11.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.11.mlp.fc2.weight": "encoders.11.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.11.self_attn.k_proj.bias": "encoders.11.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.11.self_attn.k_proj.weight": "encoders.11.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.11.self_attn.out_proj.bias": "encoders.11.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.11.self_attn.out_proj.weight": "encoders.11.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.11.self_attn.q_proj.bias": "encoders.11.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.11.self_attn.q_proj.weight": "encoders.11.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.11.self_attn.v_proj.bias": "encoders.11.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.11.self_attn.v_proj.weight": "encoders.11.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.12.layer_norm1.bias": "encoders.12.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.12.layer_norm1.weight": "encoders.12.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.12.layer_norm2.bias": "encoders.12.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.12.layer_norm2.weight": "encoders.12.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.12.mlp.fc1.bias": "encoders.12.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.12.mlp.fc1.weight": "encoders.12.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.12.mlp.fc2.bias": "encoders.12.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.12.mlp.fc2.weight": "encoders.12.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.12.self_attn.k_proj.bias": "encoders.12.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.12.self_attn.k_proj.weight": "encoders.12.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.12.self_attn.out_proj.bias": "encoders.12.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.12.self_attn.out_proj.weight": "encoders.12.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.12.self_attn.q_proj.bias": "encoders.12.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.12.self_attn.q_proj.weight": "encoders.12.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.12.self_attn.v_proj.bias": "encoders.12.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.12.self_attn.v_proj.weight": "encoders.12.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.13.layer_norm1.bias": "encoders.13.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.13.layer_norm1.weight": "encoders.13.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.13.layer_norm2.bias": "encoders.13.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.13.layer_norm2.weight": "encoders.13.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.13.mlp.fc1.bias": "encoders.13.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.13.mlp.fc1.weight": "encoders.13.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.13.mlp.fc2.bias": "encoders.13.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.13.mlp.fc2.weight": "encoders.13.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.13.self_attn.k_proj.bias": "encoders.13.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.13.self_attn.k_proj.weight": "encoders.13.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.13.self_attn.out_proj.bias": "encoders.13.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.13.self_attn.out_proj.weight": "encoders.13.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.13.self_attn.q_proj.bias": "encoders.13.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.13.self_attn.q_proj.weight": "encoders.13.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.13.self_attn.v_proj.bias": "encoders.13.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.13.self_attn.v_proj.weight": "encoders.13.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.14.layer_norm1.bias": "encoders.14.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.14.layer_norm1.weight": "encoders.14.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.14.layer_norm2.bias": "encoders.14.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.14.layer_norm2.weight": "encoders.14.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.14.mlp.fc1.bias": "encoders.14.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.14.mlp.fc1.weight": "encoders.14.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.14.mlp.fc2.bias": "encoders.14.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.14.mlp.fc2.weight": "encoders.14.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.14.self_attn.k_proj.bias": "encoders.14.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.14.self_attn.k_proj.weight": "encoders.14.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.14.self_attn.out_proj.bias": "encoders.14.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.14.self_attn.out_proj.weight": "encoders.14.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.14.self_attn.q_proj.bias": "encoders.14.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.14.self_attn.q_proj.weight": "encoders.14.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.14.self_attn.v_proj.bias": "encoders.14.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.14.self_attn.v_proj.weight": "encoders.14.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.15.layer_norm1.bias": "encoders.15.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.15.layer_norm1.weight": "encoders.15.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.15.layer_norm2.bias": "encoders.15.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.15.layer_norm2.weight": "encoders.15.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.15.mlp.fc1.bias": "encoders.15.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.15.mlp.fc1.weight": "encoders.15.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.15.mlp.fc2.bias": "encoders.15.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.15.mlp.fc2.weight": "encoders.15.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.15.self_attn.k_proj.bias": "encoders.15.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.15.self_attn.k_proj.weight": "encoders.15.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.15.self_attn.out_proj.bias": "encoders.15.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.15.self_attn.out_proj.weight": "encoders.15.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.15.self_attn.q_proj.bias": "encoders.15.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.15.self_attn.q_proj.weight": "encoders.15.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.15.self_attn.v_proj.bias": "encoders.15.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.15.self_attn.v_proj.weight": "encoders.15.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.16.layer_norm1.bias": "encoders.16.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.16.layer_norm1.weight": "encoders.16.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.16.layer_norm2.bias": "encoders.16.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.16.layer_norm2.weight": "encoders.16.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.16.mlp.fc1.bias": "encoders.16.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.16.mlp.fc1.weight": "encoders.16.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.16.mlp.fc2.bias": "encoders.16.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.16.mlp.fc2.weight": "encoders.16.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.16.self_attn.k_proj.bias": "encoders.16.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.16.self_attn.k_proj.weight": "encoders.16.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.16.self_attn.out_proj.bias": "encoders.16.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.16.self_attn.out_proj.weight": "encoders.16.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.16.self_attn.q_proj.bias": "encoders.16.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.16.self_attn.q_proj.weight": "encoders.16.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.16.self_attn.v_proj.bias": "encoders.16.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.16.self_attn.v_proj.weight": "encoders.16.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.17.layer_norm1.bias": "encoders.17.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.17.layer_norm1.weight": "encoders.17.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.17.layer_norm2.bias": "encoders.17.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.17.layer_norm2.weight": "encoders.17.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.17.mlp.fc1.bias": "encoders.17.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.17.mlp.fc1.weight": "encoders.17.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.17.mlp.fc2.bias": "encoders.17.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.17.mlp.fc2.weight": "encoders.17.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.17.self_attn.k_proj.bias": "encoders.17.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.17.self_attn.k_proj.weight": "encoders.17.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.17.self_attn.out_proj.bias": "encoders.17.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.17.self_attn.out_proj.weight": "encoders.17.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.17.self_attn.q_proj.bias": "encoders.17.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.17.self_attn.q_proj.weight": "encoders.17.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.17.self_attn.v_proj.bias": "encoders.17.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.17.self_attn.v_proj.weight": "encoders.17.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.18.layer_norm1.bias": "encoders.18.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.18.layer_norm1.weight": "encoders.18.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.18.layer_norm2.bias": "encoders.18.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.18.layer_norm2.weight": "encoders.18.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.18.mlp.fc1.bias": "encoders.18.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.18.mlp.fc1.weight": "encoders.18.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.18.mlp.fc2.bias": "encoders.18.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.18.mlp.fc2.weight": "encoders.18.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.18.self_attn.k_proj.bias": "encoders.18.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.18.self_attn.k_proj.weight": "encoders.18.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.18.self_attn.out_proj.bias": "encoders.18.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.18.self_attn.out_proj.weight": "encoders.18.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.18.self_attn.q_proj.bias": "encoders.18.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.18.self_attn.q_proj.weight": "encoders.18.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.18.self_attn.v_proj.bias": "encoders.18.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.18.self_attn.v_proj.weight": "encoders.18.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.19.layer_norm1.bias": "encoders.19.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.19.layer_norm1.weight": "encoders.19.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.19.layer_norm2.bias": "encoders.19.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.19.layer_norm2.weight": "encoders.19.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.19.mlp.fc1.bias": "encoders.19.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.19.mlp.fc1.weight": "encoders.19.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.19.mlp.fc2.bias": "encoders.19.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.19.mlp.fc2.weight": "encoders.19.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.19.self_attn.k_proj.bias": "encoders.19.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.19.self_attn.k_proj.weight": "encoders.19.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.19.self_attn.out_proj.bias": "encoders.19.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.19.self_attn.out_proj.weight": "encoders.19.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.19.self_attn.q_proj.bias": "encoders.19.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.19.self_attn.q_proj.weight": "encoders.19.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.19.self_attn.v_proj.bias": "encoders.19.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.19.self_attn.v_proj.weight": "encoders.19.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.2.layer_norm1.bias": "encoders.2.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.2.layer_norm1.weight": "encoders.2.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.2.layer_norm2.bias": "encoders.2.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.2.layer_norm2.weight": "encoders.2.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.2.mlp.fc1.bias": "encoders.2.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.2.mlp.fc1.weight": "encoders.2.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.2.mlp.fc2.bias": "encoders.2.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.2.mlp.fc2.weight": "encoders.2.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.2.self_attn.k_proj.bias": "encoders.2.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.2.self_attn.k_proj.weight": "encoders.2.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.2.self_attn.out_proj.bias": "encoders.2.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.2.self_attn.out_proj.weight": "encoders.2.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.2.self_attn.q_proj.bias": "encoders.2.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.2.self_attn.q_proj.weight": "encoders.2.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.2.self_attn.v_proj.bias": "encoders.2.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.2.self_attn.v_proj.weight": "encoders.2.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.20.layer_norm1.bias": "encoders.20.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.20.layer_norm1.weight": "encoders.20.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.20.layer_norm2.bias": "encoders.20.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.20.layer_norm2.weight": "encoders.20.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.20.mlp.fc1.bias": "encoders.20.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.20.mlp.fc1.weight": "encoders.20.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.20.mlp.fc2.bias": "encoders.20.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.20.mlp.fc2.weight": "encoders.20.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.20.self_attn.k_proj.bias": "encoders.20.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.20.self_attn.k_proj.weight": "encoders.20.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.20.self_attn.out_proj.bias": "encoders.20.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.20.self_attn.out_proj.weight": "encoders.20.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.20.self_attn.q_proj.bias": "encoders.20.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.20.self_attn.q_proj.weight": "encoders.20.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.20.self_attn.v_proj.bias": "encoders.20.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.20.self_attn.v_proj.weight": "encoders.20.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.21.layer_norm1.bias": "encoders.21.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.21.layer_norm1.weight": "encoders.21.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.21.layer_norm2.bias": "encoders.21.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.21.layer_norm2.weight": "encoders.21.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.21.mlp.fc1.bias": "encoders.21.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.21.mlp.fc1.weight": "encoders.21.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.21.mlp.fc2.bias": "encoders.21.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.21.mlp.fc2.weight": "encoders.21.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.21.self_attn.k_proj.bias": "encoders.21.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.21.self_attn.k_proj.weight": "encoders.21.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.21.self_attn.out_proj.bias": "encoders.21.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.21.self_attn.out_proj.weight": "encoders.21.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.21.self_attn.q_proj.bias": "encoders.21.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.21.self_attn.q_proj.weight": "encoders.21.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.21.self_attn.v_proj.bias": "encoders.21.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.21.self_attn.v_proj.weight": "encoders.21.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.22.layer_norm1.bias": "encoders.22.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.22.layer_norm1.weight": "encoders.22.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.22.layer_norm2.bias": "encoders.22.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.22.layer_norm2.weight": "encoders.22.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.22.mlp.fc1.bias": "encoders.22.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.22.mlp.fc1.weight": "encoders.22.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.22.mlp.fc2.bias": "encoders.22.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.22.mlp.fc2.weight": "encoders.22.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.22.self_attn.k_proj.bias": "encoders.22.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.22.self_attn.k_proj.weight": "encoders.22.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.22.self_attn.out_proj.bias": "encoders.22.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.22.self_attn.out_proj.weight": "encoders.22.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.22.self_attn.q_proj.bias": "encoders.22.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.22.self_attn.q_proj.weight": "encoders.22.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.22.self_attn.v_proj.bias": "encoders.22.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.22.self_attn.v_proj.weight": "encoders.22.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.23.layer_norm1.bias": "encoders.23.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.23.layer_norm1.weight": "encoders.23.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.23.layer_norm2.bias": "encoders.23.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.23.layer_norm2.weight": "encoders.23.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.23.mlp.fc1.bias": "encoders.23.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.23.mlp.fc1.weight": "encoders.23.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.23.mlp.fc2.bias": "encoders.23.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.23.mlp.fc2.weight": "encoders.23.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.23.self_attn.k_proj.bias": "encoders.23.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.23.self_attn.k_proj.weight": "encoders.23.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.23.self_attn.out_proj.bias": "encoders.23.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.23.self_attn.out_proj.weight": "encoders.23.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.23.self_attn.q_proj.bias": "encoders.23.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.23.self_attn.q_proj.weight": "encoders.23.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.23.self_attn.v_proj.bias": "encoders.23.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.23.self_attn.v_proj.weight": "encoders.23.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.24.layer_norm1.bias": "encoders.24.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.24.layer_norm1.weight": "encoders.24.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.24.layer_norm2.bias": "encoders.24.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.24.layer_norm2.weight": "encoders.24.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.24.mlp.fc1.bias": "encoders.24.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.24.mlp.fc1.weight": "encoders.24.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.24.mlp.fc2.bias": "encoders.24.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.24.mlp.fc2.weight": "encoders.24.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.24.self_attn.k_proj.bias": "encoders.24.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.24.self_attn.k_proj.weight": "encoders.24.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.24.self_attn.out_proj.bias": "encoders.24.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.24.self_attn.out_proj.weight": "encoders.24.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.24.self_attn.q_proj.bias": "encoders.24.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.24.self_attn.q_proj.weight": "encoders.24.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.24.self_attn.v_proj.bias": "encoders.24.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.24.self_attn.v_proj.weight": "encoders.24.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.25.layer_norm1.bias": "encoders.25.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.25.layer_norm1.weight": "encoders.25.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.25.layer_norm2.bias": "encoders.25.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.25.layer_norm2.weight": "encoders.25.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.25.mlp.fc1.bias": "encoders.25.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.25.mlp.fc1.weight": "encoders.25.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.25.mlp.fc2.bias": "encoders.25.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.25.mlp.fc2.weight": "encoders.25.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.25.self_attn.k_proj.bias": "encoders.25.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.25.self_attn.k_proj.weight": "encoders.25.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.25.self_attn.out_proj.bias": "encoders.25.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.25.self_attn.out_proj.weight": "encoders.25.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.25.self_attn.q_proj.bias": "encoders.25.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.25.self_attn.q_proj.weight": "encoders.25.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.25.self_attn.v_proj.bias": "encoders.25.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.25.self_attn.v_proj.weight": "encoders.25.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.26.layer_norm1.bias": "encoders.26.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.26.layer_norm1.weight": "encoders.26.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.26.layer_norm2.bias": "encoders.26.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.26.layer_norm2.weight": "encoders.26.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.26.mlp.fc1.bias": "encoders.26.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.26.mlp.fc1.weight": "encoders.26.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.26.mlp.fc2.bias": "encoders.26.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.26.mlp.fc2.weight": "encoders.26.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.26.self_attn.k_proj.bias": "encoders.26.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.26.self_attn.k_proj.weight": "encoders.26.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.26.self_attn.out_proj.bias": "encoders.26.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.26.self_attn.out_proj.weight": "encoders.26.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.26.self_attn.q_proj.bias": "encoders.26.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.26.self_attn.q_proj.weight": "encoders.26.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.26.self_attn.v_proj.bias": "encoders.26.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.26.self_attn.v_proj.weight": "encoders.26.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.27.layer_norm1.bias": "encoders.27.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.27.layer_norm1.weight": "encoders.27.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.27.layer_norm2.bias": "encoders.27.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.27.layer_norm2.weight": "encoders.27.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.27.mlp.fc1.bias": "encoders.27.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.27.mlp.fc1.weight": "encoders.27.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.27.mlp.fc2.bias": "encoders.27.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.27.mlp.fc2.weight": "encoders.27.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.27.self_attn.k_proj.bias": "encoders.27.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.27.self_attn.k_proj.weight": "encoders.27.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.27.self_attn.out_proj.bias": "encoders.27.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.27.self_attn.out_proj.weight": "encoders.27.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.27.self_attn.q_proj.bias": "encoders.27.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.27.self_attn.q_proj.weight": "encoders.27.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.27.self_attn.v_proj.bias": "encoders.27.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.27.self_attn.v_proj.weight": "encoders.27.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.28.layer_norm1.bias": "encoders.28.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.28.layer_norm1.weight": "encoders.28.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.28.layer_norm2.bias": "encoders.28.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.28.layer_norm2.weight": "encoders.28.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.28.mlp.fc1.bias": "encoders.28.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.28.mlp.fc1.weight": "encoders.28.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.28.mlp.fc2.bias": "encoders.28.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.28.mlp.fc2.weight": "encoders.28.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.28.self_attn.k_proj.bias": "encoders.28.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.28.self_attn.k_proj.weight": "encoders.28.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.28.self_attn.out_proj.bias": "encoders.28.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.28.self_attn.out_proj.weight": "encoders.28.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.28.self_attn.q_proj.bias": "encoders.28.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.28.self_attn.q_proj.weight": "encoders.28.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.28.self_attn.v_proj.bias": "encoders.28.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.28.self_attn.v_proj.weight": "encoders.28.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.29.layer_norm1.bias": "encoders.29.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.29.layer_norm1.weight": "encoders.29.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.29.layer_norm2.bias": "encoders.29.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.29.layer_norm2.weight": "encoders.29.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.29.mlp.fc1.bias": "encoders.29.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.29.mlp.fc1.weight": "encoders.29.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.29.mlp.fc2.bias": "encoders.29.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.29.mlp.fc2.weight": "encoders.29.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.29.self_attn.k_proj.bias": "encoders.29.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.29.self_attn.k_proj.weight": "encoders.29.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.29.self_attn.out_proj.bias": "encoders.29.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.29.self_attn.out_proj.weight": "encoders.29.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.29.self_attn.q_proj.bias": "encoders.29.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.29.self_attn.q_proj.weight": "encoders.29.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.29.self_attn.v_proj.bias": "encoders.29.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.29.self_attn.v_proj.weight": "encoders.29.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.3.layer_norm1.bias": "encoders.3.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.3.layer_norm1.weight": "encoders.3.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.3.layer_norm2.bias": "encoders.3.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.3.layer_norm2.weight": "encoders.3.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.3.mlp.fc1.bias": "encoders.3.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.3.mlp.fc1.weight": "encoders.3.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.3.mlp.fc2.bias": "encoders.3.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.3.mlp.fc2.weight": "encoders.3.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.3.self_attn.k_proj.bias": "encoders.3.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.3.self_attn.k_proj.weight": "encoders.3.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.3.self_attn.out_proj.bias": "encoders.3.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.3.self_attn.out_proj.weight": "encoders.3.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.3.self_attn.q_proj.bias": "encoders.3.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.3.self_attn.q_proj.weight": "encoders.3.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.3.self_attn.v_proj.bias": "encoders.3.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.3.self_attn.v_proj.weight": "encoders.3.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.30.layer_norm1.bias": "encoders.30.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.30.layer_norm1.weight": "encoders.30.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.30.layer_norm2.bias": "encoders.30.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.30.layer_norm2.weight": "encoders.30.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.30.mlp.fc1.bias": "encoders.30.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.30.mlp.fc1.weight": "encoders.30.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.30.mlp.fc2.bias": "encoders.30.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.30.mlp.fc2.weight": "encoders.30.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.30.self_attn.k_proj.bias": "encoders.30.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.30.self_attn.k_proj.weight": "encoders.30.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.30.self_attn.out_proj.bias": "encoders.30.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.30.self_attn.out_proj.weight": "encoders.30.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.30.self_attn.q_proj.bias": "encoders.30.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.30.self_attn.q_proj.weight": "encoders.30.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.30.self_attn.v_proj.bias": "encoders.30.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.30.self_attn.v_proj.weight": "encoders.30.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.31.layer_norm1.bias": "encoders.31.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.31.layer_norm1.weight": "encoders.31.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.31.layer_norm2.bias": "encoders.31.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.31.layer_norm2.weight": "encoders.31.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.31.mlp.fc1.bias": "encoders.31.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.31.mlp.fc1.weight": "encoders.31.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.31.mlp.fc2.bias": "encoders.31.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.31.mlp.fc2.weight": "encoders.31.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.31.self_attn.k_proj.bias": "encoders.31.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.31.self_attn.k_proj.weight": "encoders.31.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.31.self_attn.out_proj.bias": "encoders.31.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.31.self_attn.out_proj.weight": "encoders.31.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.31.self_attn.q_proj.bias": "encoders.31.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.31.self_attn.q_proj.weight": "encoders.31.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.31.self_attn.v_proj.bias": "encoders.31.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.31.self_attn.v_proj.weight": "encoders.31.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.4.layer_norm1.bias": "encoders.4.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.4.layer_norm1.weight": "encoders.4.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.4.layer_norm2.bias": "encoders.4.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.4.layer_norm2.weight": "encoders.4.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.4.mlp.fc1.bias": "encoders.4.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.4.mlp.fc1.weight": "encoders.4.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.4.mlp.fc2.bias": "encoders.4.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.4.mlp.fc2.weight": "encoders.4.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.4.self_attn.k_proj.bias": "encoders.4.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.4.self_attn.k_proj.weight": "encoders.4.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.4.self_attn.out_proj.bias": "encoders.4.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.4.self_attn.out_proj.weight": "encoders.4.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.4.self_attn.q_proj.bias": "encoders.4.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.4.self_attn.q_proj.weight": "encoders.4.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.4.self_attn.v_proj.bias": "encoders.4.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.4.self_attn.v_proj.weight": "encoders.4.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.5.layer_norm1.bias": "encoders.5.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.5.layer_norm1.weight": "encoders.5.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.5.layer_norm2.bias": "encoders.5.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.5.layer_norm2.weight": "encoders.5.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.5.mlp.fc1.bias": "encoders.5.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.5.mlp.fc1.weight": "encoders.5.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.5.mlp.fc2.bias": "encoders.5.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.5.mlp.fc2.weight": "encoders.5.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.5.self_attn.k_proj.bias": "encoders.5.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.5.self_attn.k_proj.weight": "encoders.5.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.5.self_attn.out_proj.bias": "encoders.5.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.5.self_attn.out_proj.weight": "encoders.5.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.5.self_attn.q_proj.bias": "encoders.5.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.5.self_attn.q_proj.weight": "encoders.5.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.5.self_attn.v_proj.bias": "encoders.5.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.5.self_attn.v_proj.weight": "encoders.5.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.6.layer_norm1.bias": "encoders.6.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.6.layer_norm1.weight": "encoders.6.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.6.layer_norm2.bias": "encoders.6.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.6.layer_norm2.weight": "encoders.6.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.6.mlp.fc1.bias": "encoders.6.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.6.mlp.fc1.weight": "encoders.6.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.6.mlp.fc2.bias": "encoders.6.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.6.mlp.fc2.weight": "encoders.6.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.6.self_attn.k_proj.bias": "encoders.6.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.6.self_attn.k_proj.weight": "encoders.6.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.6.self_attn.out_proj.bias": "encoders.6.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.6.self_attn.out_proj.weight": "encoders.6.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.6.self_attn.q_proj.bias": "encoders.6.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.6.self_attn.q_proj.weight": "encoders.6.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.6.self_attn.v_proj.bias": "encoders.6.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.6.self_attn.v_proj.weight": "encoders.6.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.7.layer_norm1.bias": "encoders.7.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.7.layer_norm1.weight": "encoders.7.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.7.layer_norm2.bias": "encoders.7.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.7.layer_norm2.weight": "encoders.7.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.7.mlp.fc1.bias": "encoders.7.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.7.mlp.fc1.weight": "encoders.7.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.7.mlp.fc2.bias": "encoders.7.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.7.mlp.fc2.weight": "encoders.7.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.7.self_attn.k_proj.bias": "encoders.7.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.7.self_attn.k_proj.weight": "encoders.7.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.7.self_attn.out_proj.bias": "encoders.7.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.7.self_attn.out_proj.weight": "encoders.7.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.7.self_attn.q_proj.bias": "encoders.7.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.7.self_attn.q_proj.weight": "encoders.7.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.7.self_attn.v_proj.bias": "encoders.7.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.7.self_attn.v_proj.weight": "encoders.7.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.8.layer_norm1.bias": "encoders.8.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.8.layer_norm1.weight": "encoders.8.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.8.layer_norm2.bias": "encoders.8.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.8.layer_norm2.weight": "encoders.8.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.8.mlp.fc1.bias": "encoders.8.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.8.mlp.fc1.weight": "encoders.8.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.8.mlp.fc2.bias": "encoders.8.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.8.mlp.fc2.weight": "encoders.8.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.8.self_attn.k_proj.bias": "encoders.8.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.8.self_attn.k_proj.weight": "encoders.8.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.8.self_attn.out_proj.bias": "encoders.8.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.8.self_attn.out_proj.weight": "encoders.8.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.8.self_attn.q_proj.bias": "encoders.8.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.8.self_attn.q_proj.weight": "encoders.8.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.8.self_attn.v_proj.bias": "encoders.8.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.8.self_attn.v_proj.weight": "encoders.8.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.9.layer_norm1.bias": "encoders.9.layer_norm1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.9.layer_norm1.weight": "encoders.9.layer_norm1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.9.layer_norm2.bias": "encoders.9.layer_norm2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.9.layer_norm2.weight": "encoders.9.layer_norm2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.9.mlp.fc1.bias": "encoders.9.fc1.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.9.mlp.fc1.weight": "encoders.9.fc1.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.9.mlp.fc2.bias": "encoders.9.fc2.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.9.mlp.fc2.weight": "encoders.9.fc2.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.9.self_attn.k_proj.bias": "encoders.9.attn.to_k.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.9.self_attn.k_proj.weight": "encoders.9.attn.to_k.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.9.self_attn.out_proj.bias": "encoders.9.attn.to_out.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.9.self_attn.out_proj.weight": "encoders.9.attn.to_out.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.9.self_attn.q_proj.bias": "encoders.9.attn.to_q.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.9.self_attn.q_proj.weight": "encoders.9.attn.to_q.weight", - "text_encoders.clip_g.transformer.text_model.encoder.layers.9.self_attn.v_proj.bias": "encoders.9.attn.to_v.bias", - "text_encoders.clip_g.transformer.text_model.encoder.layers.9.self_attn.v_proj.weight": "encoders.9.attn.to_v.weight", - "text_encoders.clip_g.transformer.text_model.final_layer_norm.bias": "final_layer_norm.bias", - "text_encoders.clip_g.transformer.text_model.final_layer_norm.weight": "final_layer_norm.weight", - "text_encoders.clip_g.transformer.text_projection.weight": "text_projection.weight", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if name == "text_encoders.clip_g.transformer.text_model.embeddings.position_embedding.weight": - param = param.reshape((1, param.shape[0], param.shape[1])) - state_dict_[rename_dict[name]] = param - elif ("text_encoders.clip_g.transformer." + name) in rename_dict: - param = state_dict[name] - if name == "text_model.embeddings.position_embedding.weight": - param = param.reshape((1, param.shape[0], param.shape[1])) - state_dict_[rename_dict["text_encoders.clip_g.transformer." + name]] = param - return state_dict_ - - - -class SD3TextEncoder3StateDictConverter(): - def __init__(self): - pass - - def from_diffusers(self, state_dict): - state_dict_ = state_dict - return state_dict_ - - def from_civitai(self, state_dict): - prefix = "text_encoders.t5xxl.transformer." - state_dict_ = {name[len(prefix):]: param for name, param in state_dict.items() if name.startswith(prefix)} - if len(state_dict_) > 0: - return self.from_diffusers(state_dict_) - name_list = [ - "encoder.block.0.layer.0.SelfAttention.k.weight", - "encoder.block.0.layer.0.SelfAttention.o.weight", - "encoder.block.0.layer.0.SelfAttention.q.weight", - "encoder.block.0.layer.0.SelfAttention.relative_attention_bias.weight", - "encoder.block.0.layer.0.SelfAttention.v.weight", - "encoder.block.0.layer.0.layer_norm.weight", - "encoder.block.0.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.0.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.0.layer.1.DenseReluDense.wo.weight", - "encoder.block.0.layer.1.layer_norm.weight", - "encoder.block.1.layer.0.SelfAttention.k.weight", - "encoder.block.1.layer.0.SelfAttention.o.weight", - "encoder.block.1.layer.0.SelfAttention.q.weight", - "encoder.block.1.layer.0.SelfAttention.v.weight", - "encoder.block.1.layer.0.layer_norm.weight", - "encoder.block.1.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.1.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.1.layer.1.DenseReluDense.wo.weight", - "encoder.block.1.layer.1.layer_norm.weight", - "encoder.block.10.layer.0.SelfAttention.k.weight", - "encoder.block.10.layer.0.SelfAttention.o.weight", - "encoder.block.10.layer.0.SelfAttention.q.weight", - "encoder.block.10.layer.0.SelfAttention.v.weight", - "encoder.block.10.layer.0.layer_norm.weight", - "encoder.block.10.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.10.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.10.layer.1.DenseReluDense.wo.weight", - "encoder.block.10.layer.1.layer_norm.weight", - "encoder.block.11.layer.0.SelfAttention.k.weight", - "encoder.block.11.layer.0.SelfAttention.o.weight", - "encoder.block.11.layer.0.SelfAttention.q.weight", - "encoder.block.11.layer.0.SelfAttention.v.weight", - "encoder.block.11.layer.0.layer_norm.weight", - "encoder.block.11.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.11.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.11.layer.1.DenseReluDense.wo.weight", - "encoder.block.11.layer.1.layer_norm.weight", - "encoder.block.12.layer.0.SelfAttention.k.weight", - "encoder.block.12.layer.0.SelfAttention.o.weight", - "encoder.block.12.layer.0.SelfAttention.q.weight", - "encoder.block.12.layer.0.SelfAttention.v.weight", - "encoder.block.12.layer.0.layer_norm.weight", - "encoder.block.12.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.12.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.12.layer.1.DenseReluDense.wo.weight", - "encoder.block.12.layer.1.layer_norm.weight", - "encoder.block.13.layer.0.SelfAttention.k.weight", - "encoder.block.13.layer.0.SelfAttention.o.weight", - "encoder.block.13.layer.0.SelfAttention.q.weight", - "encoder.block.13.layer.0.SelfAttention.v.weight", - "encoder.block.13.layer.0.layer_norm.weight", - "encoder.block.13.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.13.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.13.layer.1.DenseReluDense.wo.weight", - "encoder.block.13.layer.1.layer_norm.weight", - "encoder.block.14.layer.0.SelfAttention.k.weight", - "encoder.block.14.layer.0.SelfAttention.o.weight", - "encoder.block.14.layer.0.SelfAttention.q.weight", - "encoder.block.14.layer.0.SelfAttention.v.weight", - "encoder.block.14.layer.0.layer_norm.weight", - "encoder.block.14.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.14.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.14.layer.1.DenseReluDense.wo.weight", - "encoder.block.14.layer.1.layer_norm.weight", - "encoder.block.15.layer.0.SelfAttention.k.weight", - "encoder.block.15.layer.0.SelfAttention.o.weight", - "encoder.block.15.layer.0.SelfAttention.q.weight", - "encoder.block.15.layer.0.SelfAttention.v.weight", - "encoder.block.15.layer.0.layer_norm.weight", - "encoder.block.15.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.15.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.15.layer.1.DenseReluDense.wo.weight", - "encoder.block.15.layer.1.layer_norm.weight", - "encoder.block.16.layer.0.SelfAttention.k.weight", - "encoder.block.16.layer.0.SelfAttention.o.weight", - "encoder.block.16.layer.0.SelfAttention.q.weight", - "encoder.block.16.layer.0.SelfAttention.v.weight", - "encoder.block.16.layer.0.layer_norm.weight", - "encoder.block.16.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.16.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.16.layer.1.DenseReluDense.wo.weight", - "encoder.block.16.layer.1.layer_norm.weight", - "encoder.block.17.layer.0.SelfAttention.k.weight", - "encoder.block.17.layer.0.SelfAttention.o.weight", - "encoder.block.17.layer.0.SelfAttention.q.weight", - "encoder.block.17.layer.0.SelfAttention.v.weight", - "encoder.block.17.layer.0.layer_norm.weight", - "encoder.block.17.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.17.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.17.layer.1.DenseReluDense.wo.weight", - "encoder.block.17.layer.1.layer_norm.weight", - "encoder.block.18.layer.0.SelfAttention.k.weight", - "encoder.block.18.layer.0.SelfAttention.o.weight", - "encoder.block.18.layer.0.SelfAttention.q.weight", - "encoder.block.18.layer.0.SelfAttention.v.weight", - "encoder.block.18.layer.0.layer_norm.weight", - "encoder.block.18.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.18.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.18.layer.1.DenseReluDense.wo.weight", - "encoder.block.18.layer.1.layer_norm.weight", - "encoder.block.19.layer.0.SelfAttention.k.weight", - "encoder.block.19.layer.0.SelfAttention.o.weight", - "encoder.block.19.layer.0.SelfAttention.q.weight", - "encoder.block.19.layer.0.SelfAttention.v.weight", - "encoder.block.19.layer.0.layer_norm.weight", - "encoder.block.19.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.19.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.19.layer.1.DenseReluDense.wo.weight", - "encoder.block.19.layer.1.layer_norm.weight", - "encoder.block.2.layer.0.SelfAttention.k.weight", - "encoder.block.2.layer.0.SelfAttention.o.weight", - "encoder.block.2.layer.0.SelfAttention.q.weight", - "encoder.block.2.layer.0.SelfAttention.v.weight", - "encoder.block.2.layer.0.layer_norm.weight", - "encoder.block.2.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.2.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.2.layer.1.DenseReluDense.wo.weight", - "encoder.block.2.layer.1.layer_norm.weight", - "encoder.block.20.layer.0.SelfAttention.k.weight", - "encoder.block.20.layer.0.SelfAttention.o.weight", - "encoder.block.20.layer.0.SelfAttention.q.weight", - "encoder.block.20.layer.0.SelfAttention.v.weight", - "encoder.block.20.layer.0.layer_norm.weight", - "encoder.block.20.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.20.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.20.layer.1.DenseReluDense.wo.weight", - "encoder.block.20.layer.1.layer_norm.weight", - "encoder.block.21.layer.0.SelfAttention.k.weight", - "encoder.block.21.layer.0.SelfAttention.o.weight", - "encoder.block.21.layer.0.SelfAttention.q.weight", - "encoder.block.21.layer.0.SelfAttention.v.weight", - "encoder.block.21.layer.0.layer_norm.weight", - "encoder.block.21.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.21.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.21.layer.1.DenseReluDense.wo.weight", - "encoder.block.21.layer.1.layer_norm.weight", - "encoder.block.22.layer.0.SelfAttention.k.weight", - "encoder.block.22.layer.0.SelfAttention.o.weight", - "encoder.block.22.layer.0.SelfAttention.q.weight", - "encoder.block.22.layer.0.SelfAttention.v.weight", - "encoder.block.22.layer.0.layer_norm.weight", - "encoder.block.22.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.22.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.22.layer.1.DenseReluDense.wo.weight", - "encoder.block.22.layer.1.layer_norm.weight", - "encoder.block.23.layer.0.SelfAttention.k.weight", - "encoder.block.23.layer.0.SelfAttention.o.weight", - "encoder.block.23.layer.0.SelfAttention.q.weight", - "encoder.block.23.layer.0.SelfAttention.v.weight", - "encoder.block.23.layer.0.layer_norm.weight", - "encoder.block.23.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.23.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.23.layer.1.DenseReluDense.wo.weight", - "encoder.block.23.layer.1.layer_norm.weight", - "encoder.block.3.layer.0.SelfAttention.k.weight", - "encoder.block.3.layer.0.SelfAttention.o.weight", - "encoder.block.3.layer.0.SelfAttention.q.weight", - "encoder.block.3.layer.0.SelfAttention.v.weight", - "encoder.block.3.layer.0.layer_norm.weight", - "encoder.block.3.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.3.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.3.layer.1.DenseReluDense.wo.weight", - "encoder.block.3.layer.1.layer_norm.weight", - "encoder.block.4.layer.0.SelfAttention.k.weight", - "encoder.block.4.layer.0.SelfAttention.o.weight", - "encoder.block.4.layer.0.SelfAttention.q.weight", - "encoder.block.4.layer.0.SelfAttention.v.weight", - "encoder.block.4.layer.0.layer_norm.weight", - "encoder.block.4.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.4.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.4.layer.1.DenseReluDense.wo.weight", - "encoder.block.4.layer.1.layer_norm.weight", - "encoder.block.5.layer.0.SelfAttention.k.weight", - "encoder.block.5.layer.0.SelfAttention.o.weight", - "encoder.block.5.layer.0.SelfAttention.q.weight", - "encoder.block.5.layer.0.SelfAttention.v.weight", - "encoder.block.5.layer.0.layer_norm.weight", - "encoder.block.5.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.5.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.5.layer.1.DenseReluDense.wo.weight", - "encoder.block.5.layer.1.layer_norm.weight", - "encoder.block.6.layer.0.SelfAttention.k.weight", - "encoder.block.6.layer.0.SelfAttention.o.weight", - "encoder.block.6.layer.0.SelfAttention.q.weight", - "encoder.block.6.layer.0.SelfAttention.v.weight", - "encoder.block.6.layer.0.layer_norm.weight", - "encoder.block.6.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.6.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.6.layer.1.DenseReluDense.wo.weight", - "encoder.block.6.layer.1.layer_norm.weight", - "encoder.block.7.layer.0.SelfAttention.k.weight", - "encoder.block.7.layer.0.SelfAttention.o.weight", - "encoder.block.7.layer.0.SelfAttention.q.weight", - "encoder.block.7.layer.0.SelfAttention.v.weight", - "encoder.block.7.layer.0.layer_norm.weight", - "encoder.block.7.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.7.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.7.layer.1.DenseReluDense.wo.weight", - "encoder.block.7.layer.1.layer_norm.weight", - "encoder.block.8.layer.0.SelfAttention.k.weight", - "encoder.block.8.layer.0.SelfAttention.o.weight", - "encoder.block.8.layer.0.SelfAttention.q.weight", - "encoder.block.8.layer.0.SelfAttention.v.weight", - "encoder.block.8.layer.0.layer_norm.weight", - "encoder.block.8.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.8.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.8.layer.1.DenseReluDense.wo.weight", - "encoder.block.8.layer.1.layer_norm.weight", - "encoder.block.9.layer.0.SelfAttention.k.weight", - "encoder.block.9.layer.0.SelfAttention.o.weight", - "encoder.block.9.layer.0.SelfAttention.q.weight", - "encoder.block.9.layer.0.SelfAttention.v.weight", - "encoder.block.9.layer.0.layer_norm.weight", - "encoder.block.9.layer.1.DenseReluDense.wi_0.weight", - "encoder.block.9.layer.1.DenseReluDense.wi_1.weight", - "encoder.block.9.layer.1.DenseReluDense.wo.weight", - "encoder.block.9.layer.1.layer_norm.weight", - "encoder.embed_tokens.weight", - "encoder.final_layer_norm.weight", - "shared.weight", - ] - state_dict_ = {} - for name, param in state_dict.items(): - if name in name_list: - state_dict_[name] = param - return state_dict_ - diff --git a/diffsynth/models/sd3_vae_decoder.py b/diffsynth/models/sd3_vae_decoder.py deleted file mode 100644 index 55fd9c0..0000000 --- a/diffsynth/models/sd3_vae_decoder.py +++ /dev/null @@ -1,81 +0,0 @@ -import torch -from .sd_vae_decoder import VAEAttentionBlock, SDVAEDecoderStateDictConverter -from .sd_unet import ResnetBlock, UpSampler -from .tiler import TileWorker - - - -class SD3VAEDecoder(torch.nn.Module): - def __init__(self): - super().__init__() - self.scaling_factor = 1.5305 # Different from SD 1.x - self.shift_factor = 0.0609 # Different from SD 1.x - self.conv_in = torch.nn.Conv2d(16, 512, kernel_size=3, padding=1) # Different from SD 1.x - - self.blocks = torch.nn.ModuleList([ - # UNetMidBlock2D - ResnetBlock(512, 512, eps=1e-6), - VAEAttentionBlock(1, 512, 512, 1, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - # UpDecoderBlock2D - ResnetBlock(512, 512, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - UpSampler(512), - # UpDecoderBlock2D - ResnetBlock(512, 512, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - UpSampler(512), - # UpDecoderBlock2D - ResnetBlock(512, 256, eps=1e-6), - ResnetBlock(256, 256, eps=1e-6), - ResnetBlock(256, 256, eps=1e-6), - UpSampler(256), - # UpDecoderBlock2D - ResnetBlock(256, 128, eps=1e-6), - ResnetBlock(128, 128, eps=1e-6), - ResnetBlock(128, 128, eps=1e-6), - ]) - - self.conv_norm_out = torch.nn.GroupNorm(num_channels=128, num_groups=32, eps=1e-6) - self.conv_act = torch.nn.SiLU() - self.conv_out = torch.nn.Conv2d(128, 3, kernel_size=3, padding=1) - - def tiled_forward(self, sample, tile_size=64, tile_stride=32): - hidden_states = TileWorker().tiled_forward( - lambda x: self.forward(x), - sample, - tile_size, - tile_stride, - tile_device=sample.device, - tile_dtype=sample.dtype - ) - return hidden_states - - def forward(self, sample, tiled=False, tile_size=64, tile_stride=32, **kwargs): - # For VAE Decoder, we do not need to apply the tiler on each layer. - if tiled: - return self.tiled_forward(sample, tile_size=tile_size, tile_stride=tile_stride) - - # 1. pre-process - hidden_states = sample / self.scaling_factor + self.shift_factor - hidden_states = self.conv_in(hidden_states) - time_emb = None - text_emb = None - res_stack = None - - # 2. blocks - for i, block in enumerate(self.blocks): - hidden_states, time_emb, text_emb, res_stack = block(hidden_states, time_emb, text_emb, res_stack) - - # 3. output - hidden_states = self.conv_norm_out(hidden_states) - hidden_states = self.conv_act(hidden_states) - hidden_states = self.conv_out(hidden_states) - - return hidden_states - - @staticmethod - def state_dict_converter(): - return SDVAEDecoderStateDictConverter() \ No newline at end of file diff --git a/diffsynth/models/sd3_vae_encoder.py b/diffsynth/models/sd3_vae_encoder.py deleted file mode 100644 index c486866..0000000 --- a/diffsynth/models/sd3_vae_encoder.py +++ /dev/null @@ -1,95 +0,0 @@ -import torch -from .sd_unet import ResnetBlock, DownSampler -from .sd_vae_encoder import VAEAttentionBlock, SDVAEEncoderStateDictConverter -from .tiler import TileWorker -from einops import rearrange - - -class SD3VAEEncoder(torch.nn.Module): - def __init__(self): - super().__init__() - self.scaling_factor = 1.5305 # Different from SD 1.x - self.shift_factor = 0.0609 # Different from SD 1.x - self.conv_in = torch.nn.Conv2d(3, 128, kernel_size=3, padding=1) - - self.blocks = torch.nn.ModuleList([ - # DownEncoderBlock2D - ResnetBlock(128, 128, eps=1e-6), - ResnetBlock(128, 128, eps=1e-6), - DownSampler(128, padding=0, extra_padding=True), - # DownEncoderBlock2D - ResnetBlock(128, 256, eps=1e-6), - ResnetBlock(256, 256, eps=1e-6), - DownSampler(256, padding=0, extra_padding=True), - # DownEncoderBlock2D - ResnetBlock(256, 512, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - DownSampler(512, padding=0, extra_padding=True), - # DownEncoderBlock2D - ResnetBlock(512, 512, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - # UNetMidBlock2D - ResnetBlock(512, 512, eps=1e-6), - VAEAttentionBlock(1, 512, 512, 1, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - ]) - - self.conv_norm_out = torch.nn.GroupNorm(num_channels=512, num_groups=32, eps=1e-6) - self.conv_act = torch.nn.SiLU() - self.conv_out = torch.nn.Conv2d(512, 32, kernel_size=3, padding=1) - - def tiled_forward(self, sample, tile_size=64, tile_stride=32): - hidden_states = TileWorker().tiled_forward( - lambda x: self.forward(x), - sample, - tile_size, - tile_stride, - tile_device=sample.device, - tile_dtype=sample.dtype - ) - return hidden_states - - def forward(self, sample, tiled=False, tile_size=64, tile_stride=32, **kwargs): - # For VAE Decoder, we do not need to apply the tiler on each layer. - if tiled: - return self.tiled_forward(sample, tile_size=tile_size, tile_stride=tile_stride) - - # 1. pre-process - hidden_states = self.conv_in(sample) - time_emb = None - text_emb = None - res_stack = None - - # 2. blocks - for i, block in enumerate(self.blocks): - hidden_states, time_emb, text_emb, res_stack = block(hidden_states, time_emb, text_emb, res_stack) - - # 3. output - hidden_states = self.conv_norm_out(hidden_states) - hidden_states = self.conv_act(hidden_states) - hidden_states = self.conv_out(hidden_states) - hidden_states = hidden_states[:, :16] - hidden_states = (hidden_states - self.shift_factor) * self.scaling_factor - - return hidden_states - - def encode_video(self, sample, batch_size=8): - B = sample.shape[0] - hidden_states = [] - - for i in range(0, sample.shape[2], batch_size): - - j = min(i + batch_size, sample.shape[2]) - sample_batch = rearrange(sample[:,:,i:j], "B C T H W -> (B T) C H W") - - hidden_states_batch = self(sample_batch) - hidden_states_batch = rearrange(hidden_states_batch, "(B T) C H W -> B C T H W", B=B) - - hidden_states.append(hidden_states_batch) - - hidden_states = torch.concat(hidden_states, dim=2) - return hidden_states - - @staticmethod - def state_dict_converter(): - return SDVAEEncoderStateDictConverter() diff --git a/diffsynth/models/sd_controlnet.py b/diffsynth/models/sd_controlnet.py deleted file mode 100644 index 910e0db..0000000 --- a/diffsynth/models/sd_controlnet.py +++ /dev/null @@ -1,589 +0,0 @@ -import torch -from .sd_unet import Timesteps, ResnetBlock, AttentionBlock, PushBlock, DownSampler -from .tiler import TileWorker - - -class ControlNetConditioningLayer(torch.nn.Module): - def __init__(self, channels = (3, 16, 32, 96, 256, 320)): - super().__init__() - self.blocks = torch.nn.ModuleList([]) - self.blocks.append(torch.nn.Conv2d(channels[0], channels[1], kernel_size=3, padding=1)) - self.blocks.append(torch.nn.SiLU()) - for i in range(1, len(channels) - 2): - self.blocks.append(torch.nn.Conv2d(channels[i], channels[i], kernel_size=3, padding=1)) - self.blocks.append(torch.nn.SiLU()) - self.blocks.append(torch.nn.Conv2d(channels[i], channels[i+1], kernel_size=3, padding=1, stride=2)) - self.blocks.append(torch.nn.SiLU()) - self.blocks.append(torch.nn.Conv2d(channels[-2], channels[-1], kernel_size=3, padding=1)) - - def forward(self, conditioning): - for block in self.blocks: - conditioning = block(conditioning) - return conditioning - - -class SDControlNet(torch.nn.Module): - def __init__(self, global_pool=False): - super().__init__() - self.time_proj = Timesteps(320) - self.time_embedding = torch.nn.Sequential( - torch.nn.Linear(320, 1280), - torch.nn.SiLU(), - torch.nn.Linear(1280, 1280) - ) - self.conv_in = torch.nn.Conv2d(4, 320, kernel_size=3, padding=1) - - self.controlnet_conv_in = ControlNetConditioningLayer(channels=(3, 16, 32, 96, 256, 320)) - - self.blocks = torch.nn.ModuleList([ - # CrossAttnDownBlock2D - ResnetBlock(320, 320, 1280), - AttentionBlock(8, 40, 320, 1, 768), - PushBlock(), - ResnetBlock(320, 320, 1280), - AttentionBlock(8, 40, 320, 1, 768), - PushBlock(), - DownSampler(320), - PushBlock(), - # CrossAttnDownBlock2D - ResnetBlock(320, 640, 1280), - AttentionBlock(8, 80, 640, 1, 768), - PushBlock(), - ResnetBlock(640, 640, 1280), - AttentionBlock(8, 80, 640, 1, 768), - PushBlock(), - DownSampler(640), - PushBlock(), - # CrossAttnDownBlock2D - ResnetBlock(640, 1280, 1280), - AttentionBlock(8, 160, 1280, 1, 768), - PushBlock(), - ResnetBlock(1280, 1280, 1280), - AttentionBlock(8, 160, 1280, 1, 768), - PushBlock(), - DownSampler(1280), - PushBlock(), - # DownBlock2D - ResnetBlock(1280, 1280, 1280), - PushBlock(), - ResnetBlock(1280, 1280, 1280), - PushBlock(), - # UNetMidBlock2DCrossAttn - ResnetBlock(1280, 1280, 1280), - AttentionBlock(8, 160, 1280, 1, 768), - ResnetBlock(1280, 1280, 1280), - PushBlock() - ]) - - self.controlnet_blocks = torch.nn.ModuleList([ - torch.nn.Conv2d(320, 320, kernel_size=(1, 1)), - torch.nn.Conv2d(320, 320, kernel_size=(1, 1), bias=False), - torch.nn.Conv2d(320, 320, kernel_size=(1, 1), bias=False), - torch.nn.Conv2d(320, 320, kernel_size=(1, 1), bias=False), - torch.nn.Conv2d(640, 640, kernel_size=(1, 1)), - torch.nn.Conv2d(640, 640, kernel_size=(1, 1), bias=False), - torch.nn.Conv2d(640, 640, kernel_size=(1, 1), bias=False), - torch.nn.Conv2d(1280, 1280, kernel_size=(1, 1)), - torch.nn.Conv2d(1280, 1280, kernel_size=(1, 1), bias=False), - torch.nn.Conv2d(1280, 1280, kernel_size=(1, 1), bias=False), - torch.nn.Conv2d(1280, 1280, kernel_size=(1, 1), bias=False), - torch.nn.Conv2d(1280, 1280, kernel_size=(1, 1), bias=False), - torch.nn.Conv2d(1280, 1280, kernel_size=(1, 1), bias=False), - ]) - - self.global_pool = global_pool - - def forward( - self, - sample, timestep, encoder_hidden_states, conditioning, - tiled=False, tile_size=64, tile_stride=32, - **kwargs - ): - # 1. time - time_emb = self.time_proj(timestep).to(sample.dtype) - time_emb = self.time_embedding(time_emb) - time_emb = time_emb.repeat(sample.shape[0], 1) - - # 2. pre-process - height, width = sample.shape[2], sample.shape[3] - hidden_states = self.conv_in(sample) + self.controlnet_conv_in(conditioning) - text_emb = encoder_hidden_states - res_stack = [hidden_states] - - # 3. blocks - for i, block in enumerate(self.blocks): - if tiled and not isinstance(block, PushBlock): - _, _, inter_height, _ = hidden_states.shape - resize_scale = inter_height / height - hidden_states = TileWorker().tiled_forward( - lambda x: block(x, time_emb, text_emb, res_stack)[0], - hidden_states, - int(tile_size * resize_scale), - int(tile_stride * resize_scale), - tile_device=hidden_states.device, - tile_dtype=hidden_states.dtype - ) - else: - hidden_states, _, _, _ = block(hidden_states, time_emb, text_emb, res_stack) - - # 4. ControlNet blocks - controlnet_res_stack = [block(res) for block, res in zip(self.controlnet_blocks, res_stack)] - - # pool - if self.global_pool: - controlnet_res_stack = [res.mean(dim=(2, 3), keepdim=True) for res in controlnet_res_stack] - - return controlnet_res_stack - - @staticmethod - def state_dict_converter(): - return SDControlNetStateDictConverter() - - -class SDControlNetStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - # architecture - block_types = [ - 'ResnetBlock', 'AttentionBlock', 'PushBlock', 'ResnetBlock', 'AttentionBlock', 'PushBlock', 'DownSampler', 'PushBlock', - 'ResnetBlock', 'AttentionBlock', 'PushBlock', 'ResnetBlock', 'AttentionBlock', 'PushBlock', 'DownSampler', 'PushBlock', - 'ResnetBlock', 'AttentionBlock', 'PushBlock', 'ResnetBlock', 'AttentionBlock', 'PushBlock', 'DownSampler', 'PushBlock', - 'ResnetBlock', 'PushBlock', 'ResnetBlock', 'PushBlock', - 'ResnetBlock', 'AttentionBlock', 'ResnetBlock', - 'PopBlock', 'ResnetBlock', 'PopBlock', 'ResnetBlock', 'PopBlock', 'ResnetBlock', 'UpSampler', - 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'UpSampler', - 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'UpSampler', - 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'PopBlock', 'ResnetBlock', 'AttentionBlock' - ] - - # controlnet_rename_dict - controlnet_rename_dict = { - "controlnet_cond_embedding.conv_in.weight": "controlnet_conv_in.blocks.0.weight", - "controlnet_cond_embedding.conv_in.bias": "controlnet_conv_in.blocks.0.bias", - "controlnet_cond_embedding.blocks.0.weight": "controlnet_conv_in.blocks.2.weight", - "controlnet_cond_embedding.blocks.0.bias": "controlnet_conv_in.blocks.2.bias", - "controlnet_cond_embedding.blocks.1.weight": "controlnet_conv_in.blocks.4.weight", - "controlnet_cond_embedding.blocks.1.bias": "controlnet_conv_in.blocks.4.bias", - "controlnet_cond_embedding.blocks.2.weight": "controlnet_conv_in.blocks.6.weight", - "controlnet_cond_embedding.blocks.2.bias": "controlnet_conv_in.blocks.6.bias", - "controlnet_cond_embedding.blocks.3.weight": "controlnet_conv_in.blocks.8.weight", - "controlnet_cond_embedding.blocks.3.bias": "controlnet_conv_in.blocks.8.bias", - "controlnet_cond_embedding.blocks.4.weight": "controlnet_conv_in.blocks.10.weight", - "controlnet_cond_embedding.blocks.4.bias": "controlnet_conv_in.blocks.10.bias", - "controlnet_cond_embedding.blocks.5.weight": "controlnet_conv_in.blocks.12.weight", - "controlnet_cond_embedding.blocks.5.bias": "controlnet_conv_in.blocks.12.bias", - "controlnet_cond_embedding.conv_out.weight": "controlnet_conv_in.blocks.14.weight", - "controlnet_cond_embedding.conv_out.bias": "controlnet_conv_in.blocks.14.bias", - } - - # Rename each parameter - name_list = sorted([name for name in state_dict]) - rename_dict = {} - block_id = {"ResnetBlock": -1, "AttentionBlock": -1, "DownSampler": -1, "UpSampler": -1} - last_block_type_with_id = {"ResnetBlock": "", "AttentionBlock": "", "DownSampler": "", "UpSampler": ""} - for name in name_list: - names = name.split(".") - if names[0] in ["conv_in", "conv_norm_out", "conv_out"]: - pass - elif name in controlnet_rename_dict: - names = controlnet_rename_dict[name].split(".") - elif names[0] == "controlnet_down_blocks": - names[0] = "controlnet_blocks" - elif names[0] == "controlnet_mid_block": - names = ["controlnet_blocks", "12", names[-1]] - elif names[0] in ["time_embedding", "add_embedding"]: - if names[0] == "add_embedding": - names[0] = "add_time_embedding" - names[1] = {"linear_1": "0", "linear_2": "2"}[names[1]] - elif names[0] in ["down_blocks", "mid_block", "up_blocks"]: - if names[0] == "mid_block": - names.insert(1, "0") - block_type = {"resnets": "ResnetBlock", "attentions": "AttentionBlock", "downsamplers": "DownSampler", "upsamplers": "UpSampler"}[names[2]] - block_type_with_id = ".".join(names[:4]) - if block_type_with_id != last_block_type_with_id[block_type]: - block_id[block_type] += 1 - last_block_type_with_id[block_type] = block_type_with_id - while block_id[block_type] < len(block_types) and block_types[block_id[block_type]] != block_type: - block_id[block_type] += 1 - block_type_with_id = ".".join(names[:4]) - names = ["blocks", str(block_id[block_type])] + names[4:] - if "ff" in names: - ff_index = names.index("ff") - component = ".".join(names[ff_index:ff_index+3]) - component = {"ff.net.0": "act_fn", "ff.net.2": "ff"}[component] - names = names[:ff_index] + [component] + names[ff_index+3:] - if "to_out" in names: - names.pop(names.index("to_out") + 1) - else: - raise ValueError(f"Unknown parameters: {name}") - rename_dict[name] = ".".join(names) - - # Convert state_dict - state_dict_ = {} - for name, param in state_dict.items(): - if ".proj_in." in name or ".proj_out." in name: - param = param.squeeze() - if rename_dict[name] in [ - "controlnet_blocks.1.bias", "controlnet_blocks.2.bias", "controlnet_blocks.3.bias", "controlnet_blocks.5.bias", "controlnet_blocks.6.bias", - "controlnet_blocks.8.bias", "controlnet_blocks.9.bias", "controlnet_blocks.10.bias", "controlnet_blocks.11.bias", "controlnet_blocks.12.bias" - ]: - continue - state_dict_[rename_dict[name]] = param - return state_dict_ - - def from_civitai(self, state_dict): - if "mid_block.resnets.1.time_emb_proj.weight" in state_dict: - # For controlnets in diffusers format - return self.from_diffusers(state_dict) - rename_dict = { - "control_model.time_embed.0.weight": "time_embedding.0.weight", - "control_model.time_embed.0.bias": "time_embedding.0.bias", - "control_model.time_embed.2.weight": "time_embedding.2.weight", - "control_model.time_embed.2.bias": "time_embedding.2.bias", - "control_model.input_blocks.0.0.weight": "conv_in.weight", - "control_model.input_blocks.0.0.bias": "conv_in.bias", - "control_model.input_blocks.1.0.in_layers.0.weight": "blocks.0.norm1.weight", - "control_model.input_blocks.1.0.in_layers.0.bias": "blocks.0.norm1.bias", - "control_model.input_blocks.1.0.in_layers.2.weight": "blocks.0.conv1.weight", - "control_model.input_blocks.1.0.in_layers.2.bias": "blocks.0.conv1.bias", - "control_model.input_blocks.1.0.emb_layers.1.weight": "blocks.0.time_emb_proj.weight", - "control_model.input_blocks.1.0.emb_layers.1.bias": "blocks.0.time_emb_proj.bias", - "control_model.input_blocks.1.0.out_layers.0.weight": "blocks.0.norm2.weight", - "control_model.input_blocks.1.0.out_layers.0.bias": "blocks.0.norm2.bias", - "control_model.input_blocks.1.0.out_layers.3.weight": "blocks.0.conv2.weight", - "control_model.input_blocks.1.0.out_layers.3.bias": "blocks.0.conv2.bias", - "control_model.input_blocks.1.1.norm.weight": "blocks.1.norm.weight", - "control_model.input_blocks.1.1.norm.bias": "blocks.1.norm.bias", - "control_model.input_blocks.1.1.proj_in.weight": "blocks.1.proj_in.weight", - "control_model.input_blocks.1.1.proj_in.bias": "blocks.1.proj_in.bias", - "control_model.input_blocks.1.1.transformer_blocks.0.attn1.to_q.weight": "blocks.1.transformer_blocks.0.attn1.to_q.weight", - "control_model.input_blocks.1.1.transformer_blocks.0.attn1.to_k.weight": "blocks.1.transformer_blocks.0.attn1.to_k.weight", - "control_model.input_blocks.1.1.transformer_blocks.0.attn1.to_v.weight": "blocks.1.transformer_blocks.0.attn1.to_v.weight", - "control_model.input_blocks.1.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.1.transformer_blocks.0.attn1.to_out.weight", - "control_model.input_blocks.1.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.1.transformer_blocks.0.attn1.to_out.bias", - "control_model.input_blocks.1.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.1.transformer_blocks.0.act_fn.proj.weight", - "control_model.input_blocks.1.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.1.transformer_blocks.0.act_fn.proj.bias", - "control_model.input_blocks.1.1.transformer_blocks.0.ff.net.2.weight": "blocks.1.transformer_blocks.0.ff.weight", - "control_model.input_blocks.1.1.transformer_blocks.0.ff.net.2.bias": "blocks.1.transformer_blocks.0.ff.bias", - "control_model.input_blocks.1.1.transformer_blocks.0.attn2.to_q.weight": "blocks.1.transformer_blocks.0.attn2.to_q.weight", - "control_model.input_blocks.1.1.transformer_blocks.0.attn2.to_k.weight": "blocks.1.transformer_blocks.0.attn2.to_k.weight", - "control_model.input_blocks.1.1.transformer_blocks.0.attn2.to_v.weight": "blocks.1.transformer_blocks.0.attn2.to_v.weight", - "control_model.input_blocks.1.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.1.transformer_blocks.0.attn2.to_out.weight", - "control_model.input_blocks.1.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.1.transformer_blocks.0.attn2.to_out.bias", - "control_model.input_blocks.1.1.transformer_blocks.0.norm1.weight": "blocks.1.transformer_blocks.0.norm1.weight", - "control_model.input_blocks.1.1.transformer_blocks.0.norm1.bias": "blocks.1.transformer_blocks.0.norm1.bias", - "control_model.input_blocks.1.1.transformer_blocks.0.norm2.weight": "blocks.1.transformer_blocks.0.norm2.weight", - "control_model.input_blocks.1.1.transformer_blocks.0.norm2.bias": "blocks.1.transformer_blocks.0.norm2.bias", - "control_model.input_blocks.1.1.transformer_blocks.0.norm3.weight": "blocks.1.transformer_blocks.0.norm3.weight", - "control_model.input_blocks.1.1.transformer_blocks.0.norm3.bias": "blocks.1.transformer_blocks.0.norm3.bias", - "control_model.input_blocks.1.1.proj_out.weight": "blocks.1.proj_out.weight", - "control_model.input_blocks.1.1.proj_out.bias": "blocks.1.proj_out.bias", - "control_model.input_blocks.2.0.in_layers.0.weight": "blocks.3.norm1.weight", - "control_model.input_blocks.2.0.in_layers.0.bias": "blocks.3.norm1.bias", - "control_model.input_blocks.2.0.in_layers.2.weight": "blocks.3.conv1.weight", - "control_model.input_blocks.2.0.in_layers.2.bias": "blocks.3.conv1.bias", - "control_model.input_blocks.2.0.emb_layers.1.weight": "blocks.3.time_emb_proj.weight", - "control_model.input_blocks.2.0.emb_layers.1.bias": "blocks.3.time_emb_proj.bias", - "control_model.input_blocks.2.0.out_layers.0.weight": "blocks.3.norm2.weight", - "control_model.input_blocks.2.0.out_layers.0.bias": "blocks.3.norm2.bias", - "control_model.input_blocks.2.0.out_layers.3.weight": "blocks.3.conv2.weight", - "control_model.input_blocks.2.0.out_layers.3.bias": "blocks.3.conv2.bias", - "control_model.input_blocks.2.1.norm.weight": "blocks.4.norm.weight", - "control_model.input_blocks.2.1.norm.bias": "blocks.4.norm.bias", - "control_model.input_blocks.2.1.proj_in.weight": "blocks.4.proj_in.weight", - "control_model.input_blocks.2.1.proj_in.bias": "blocks.4.proj_in.bias", - "control_model.input_blocks.2.1.transformer_blocks.0.attn1.to_q.weight": "blocks.4.transformer_blocks.0.attn1.to_q.weight", - "control_model.input_blocks.2.1.transformer_blocks.0.attn1.to_k.weight": "blocks.4.transformer_blocks.0.attn1.to_k.weight", - "control_model.input_blocks.2.1.transformer_blocks.0.attn1.to_v.weight": "blocks.4.transformer_blocks.0.attn1.to_v.weight", - "control_model.input_blocks.2.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.4.transformer_blocks.0.attn1.to_out.weight", - "control_model.input_blocks.2.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.4.transformer_blocks.0.attn1.to_out.bias", - "control_model.input_blocks.2.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.4.transformer_blocks.0.act_fn.proj.weight", - "control_model.input_blocks.2.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.4.transformer_blocks.0.act_fn.proj.bias", - "control_model.input_blocks.2.1.transformer_blocks.0.ff.net.2.weight": "blocks.4.transformer_blocks.0.ff.weight", - "control_model.input_blocks.2.1.transformer_blocks.0.ff.net.2.bias": "blocks.4.transformer_blocks.0.ff.bias", - "control_model.input_blocks.2.1.transformer_blocks.0.attn2.to_q.weight": "blocks.4.transformer_blocks.0.attn2.to_q.weight", - "control_model.input_blocks.2.1.transformer_blocks.0.attn2.to_k.weight": "blocks.4.transformer_blocks.0.attn2.to_k.weight", - "control_model.input_blocks.2.1.transformer_blocks.0.attn2.to_v.weight": "blocks.4.transformer_blocks.0.attn2.to_v.weight", - "control_model.input_blocks.2.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.4.transformer_blocks.0.attn2.to_out.weight", - "control_model.input_blocks.2.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.4.transformer_blocks.0.attn2.to_out.bias", - "control_model.input_blocks.2.1.transformer_blocks.0.norm1.weight": "blocks.4.transformer_blocks.0.norm1.weight", - "control_model.input_blocks.2.1.transformer_blocks.0.norm1.bias": "blocks.4.transformer_blocks.0.norm1.bias", - "control_model.input_blocks.2.1.transformer_blocks.0.norm2.weight": "blocks.4.transformer_blocks.0.norm2.weight", - "control_model.input_blocks.2.1.transformer_blocks.0.norm2.bias": "blocks.4.transformer_blocks.0.norm2.bias", - "control_model.input_blocks.2.1.transformer_blocks.0.norm3.weight": "blocks.4.transformer_blocks.0.norm3.weight", - "control_model.input_blocks.2.1.transformer_blocks.0.norm3.bias": "blocks.4.transformer_blocks.0.norm3.bias", - "control_model.input_blocks.2.1.proj_out.weight": "blocks.4.proj_out.weight", - "control_model.input_blocks.2.1.proj_out.bias": "blocks.4.proj_out.bias", - "control_model.input_blocks.3.0.op.weight": "blocks.6.conv.weight", - "control_model.input_blocks.3.0.op.bias": "blocks.6.conv.bias", - "control_model.input_blocks.4.0.in_layers.0.weight": "blocks.8.norm1.weight", - "control_model.input_blocks.4.0.in_layers.0.bias": "blocks.8.norm1.bias", - "control_model.input_blocks.4.0.in_layers.2.weight": "blocks.8.conv1.weight", - "control_model.input_blocks.4.0.in_layers.2.bias": "blocks.8.conv1.bias", - "control_model.input_blocks.4.0.emb_layers.1.weight": "blocks.8.time_emb_proj.weight", - "control_model.input_blocks.4.0.emb_layers.1.bias": "blocks.8.time_emb_proj.bias", - "control_model.input_blocks.4.0.out_layers.0.weight": "blocks.8.norm2.weight", - "control_model.input_blocks.4.0.out_layers.0.bias": "blocks.8.norm2.bias", - "control_model.input_blocks.4.0.out_layers.3.weight": "blocks.8.conv2.weight", - "control_model.input_blocks.4.0.out_layers.3.bias": "blocks.8.conv2.bias", - "control_model.input_blocks.4.0.skip_connection.weight": "blocks.8.conv_shortcut.weight", - "control_model.input_blocks.4.0.skip_connection.bias": "blocks.8.conv_shortcut.bias", - "control_model.input_blocks.4.1.norm.weight": "blocks.9.norm.weight", - "control_model.input_blocks.4.1.norm.bias": "blocks.9.norm.bias", - "control_model.input_blocks.4.1.proj_in.weight": "blocks.9.proj_in.weight", - "control_model.input_blocks.4.1.proj_in.bias": "blocks.9.proj_in.bias", - "control_model.input_blocks.4.1.transformer_blocks.0.attn1.to_q.weight": "blocks.9.transformer_blocks.0.attn1.to_q.weight", - "control_model.input_blocks.4.1.transformer_blocks.0.attn1.to_k.weight": "blocks.9.transformer_blocks.0.attn1.to_k.weight", - "control_model.input_blocks.4.1.transformer_blocks.0.attn1.to_v.weight": "blocks.9.transformer_blocks.0.attn1.to_v.weight", - "control_model.input_blocks.4.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.9.transformer_blocks.0.attn1.to_out.weight", - "control_model.input_blocks.4.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.9.transformer_blocks.0.attn1.to_out.bias", - "control_model.input_blocks.4.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.9.transformer_blocks.0.act_fn.proj.weight", - "control_model.input_blocks.4.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.9.transformer_blocks.0.act_fn.proj.bias", - "control_model.input_blocks.4.1.transformer_blocks.0.ff.net.2.weight": "blocks.9.transformer_blocks.0.ff.weight", - "control_model.input_blocks.4.1.transformer_blocks.0.ff.net.2.bias": "blocks.9.transformer_blocks.0.ff.bias", - "control_model.input_blocks.4.1.transformer_blocks.0.attn2.to_q.weight": "blocks.9.transformer_blocks.0.attn2.to_q.weight", - "control_model.input_blocks.4.1.transformer_blocks.0.attn2.to_k.weight": "blocks.9.transformer_blocks.0.attn2.to_k.weight", - "control_model.input_blocks.4.1.transformer_blocks.0.attn2.to_v.weight": "blocks.9.transformer_blocks.0.attn2.to_v.weight", - "control_model.input_blocks.4.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.9.transformer_blocks.0.attn2.to_out.weight", - "control_model.input_blocks.4.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.9.transformer_blocks.0.attn2.to_out.bias", - "control_model.input_blocks.4.1.transformer_blocks.0.norm1.weight": "blocks.9.transformer_blocks.0.norm1.weight", - "control_model.input_blocks.4.1.transformer_blocks.0.norm1.bias": "blocks.9.transformer_blocks.0.norm1.bias", - "control_model.input_blocks.4.1.transformer_blocks.0.norm2.weight": "blocks.9.transformer_blocks.0.norm2.weight", - "control_model.input_blocks.4.1.transformer_blocks.0.norm2.bias": "blocks.9.transformer_blocks.0.norm2.bias", - "control_model.input_blocks.4.1.transformer_blocks.0.norm3.weight": "blocks.9.transformer_blocks.0.norm3.weight", - "control_model.input_blocks.4.1.transformer_blocks.0.norm3.bias": "blocks.9.transformer_blocks.0.norm3.bias", - "control_model.input_blocks.4.1.proj_out.weight": "blocks.9.proj_out.weight", - "control_model.input_blocks.4.1.proj_out.bias": "blocks.9.proj_out.bias", - "control_model.input_blocks.5.0.in_layers.0.weight": "blocks.11.norm1.weight", - "control_model.input_blocks.5.0.in_layers.0.bias": "blocks.11.norm1.bias", - "control_model.input_blocks.5.0.in_layers.2.weight": "blocks.11.conv1.weight", - "control_model.input_blocks.5.0.in_layers.2.bias": "blocks.11.conv1.bias", - "control_model.input_blocks.5.0.emb_layers.1.weight": "blocks.11.time_emb_proj.weight", - "control_model.input_blocks.5.0.emb_layers.1.bias": "blocks.11.time_emb_proj.bias", - "control_model.input_blocks.5.0.out_layers.0.weight": "blocks.11.norm2.weight", - "control_model.input_blocks.5.0.out_layers.0.bias": "blocks.11.norm2.bias", - "control_model.input_blocks.5.0.out_layers.3.weight": "blocks.11.conv2.weight", - "control_model.input_blocks.5.0.out_layers.3.bias": "blocks.11.conv2.bias", - "control_model.input_blocks.5.1.norm.weight": "blocks.12.norm.weight", - "control_model.input_blocks.5.1.norm.bias": "blocks.12.norm.bias", - "control_model.input_blocks.5.1.proj_in.weight": "blocks.12.proj_in.weight", - "control_model.input_blocks.5.1.proj_in.bias": "blocks.12.proj_in.bias", - "control_model.input_blocks.5.1.transformer_blocks.0.attn1.to_q.weight": "blocks.12.transformer_blocks.0.attn1.to_q.weight", - "control_model.input_blocks.5.1.transformer_blocks.0.attn1.to_k.weight": "blocks.12.transformer_blocks.0.attn1.to_k.weight", - "control_model.input_blocks.5.1.transformer_blocks.0.attn1.to_v.weight": "blocks.12.transformer_blocks.0.attn1.to_v.weight", - "control_model.input_blocks.5.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.12.transformer_blocks.0.attn1.to_out.weight", - "control_model.input_blocks.5.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.12.transformer_blocks.0.attn1.to_out.bias", - "control_model.input_blocks.5.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.12.transformer_blocks.0.act_fn.proj.weight", - "control_model.input_blocks.5.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.12.transformer_blocks.0.act_fn.proj.bias", - "control_model.input_blocks.5.1.transformer_blocks.0.ff.net.2.weight": "blocks.12.transformer_blocks.0.ff.weight", - "control_model.input_blocks.5.1.transformer_blocks.0.ff.net.2.bias": "blocks.12.transformer_blocks.0.ff.bias", - "control_model.input_blocks.5.1.transformer_blocks.0.attn2.to_q.weight": "blocks.12.transformer_blocks.0.attn2.to_q.weight", - "control_model.input_blocks.5.1.transformer_blocks.0.attn2.to_k.weight": "blocks.12.transformer_blocks.0.attn2.to_k.weight", - "control_model.input_blocks.5.1.transformer_blocks.0.attn2.to_v.weight": "blocks.12.transformer_blocks.0.attn2.to_v.weight", - "control_model.input_blocks.5.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.12.transformer_blocks.0.attn2.to_out.weight", - "control_model.input_blocks.5.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.12.transformer_blocks.0.attn2.to_out.bias", - "control_model.input_blocks.5.1.transformer_blocks.0.norm1.weight": "blocks.12.transformer_blocks.0.norm1.weight", - "control_model.input_blocks.5.1.transformer_blocks.0.norm1.bias": "blocks.12.transformer_blocks.0.norm1.bias", - "control_model.input_blocks.5.1.transformer_blocks.0.norm2.weight": "blocks.12.transformer_blocks.0.norm2.weight", - "control_model.input_blocks.5.1.transformer_blocks.0.norm2.bias": "blocks.12.transformer_blocks.0.norm2.bias", - "control_model.input_blocks.5.1.transformer_blocks.0.norm3.weight": "blocks.12.transformer_blocks.0.norm3.weight", - "control_model.input_blocks.5.1.transformer_blocks.0.norm3.bias": "blocks.12.transformer_blocks.0.norm3.bias", - "control_model.input_blocks.5.1.proj_out.weight": "blocks.12.proj_out.weight", - "control_model.input_blocks.5.1.proj_out.bias": "blocks.12.proj_out.bias", - "control_model.input_blocks.6.0.op.weight": "blocks.14.conv.weight", - "control_model.input_blocks.6.0.op.bias": "blocks.14.conv.bias", - "control_model.input_blocks.7.0.in_layers.0.weight": "blocks.16.norm1.weight", - "control_model.input_blocks.7.0.in_layers.0.bias": "blocks.16.norm1.bias", - "control_model.input_blocks.7.0.in_layers.2.weight": "blocks.16.conv1.weight", - "control_model.input_blocks.7.0.in_layers.2.bias": "blocks.16.conv1.bias", - "control_model.input_blocks.7.0.emb_layers.1.weight": "blocks.16.time_emb_proj.weight", - "control_model.input_blocks.7.0.emb_layers.1.bias": "blocks.16.time_emb_proj.bias", - "control_model.input_blocks.7.0.out_layers.0.weight": "blocks.16.norm2.weight", - "control_model.input_blocks.7.0.out_layers.0.bias": "blocks.16.norm2.bias", - "control_model.input_blocks.7.0.out_layers.3.weight": "blocks.16.conv2.weight", - "control_model.input_blocks.7.0.out_layers.3.bias": "blocks.16.conv2.bias", - "control_model.input_blocks.7.0.skip_connection.weight": "blocks.16.conv_shortcut.weight", - "control_model.input_blocks.7.0.skip_connection.bias": "blocks.16.conv_shortcut.bias", - "control_model.input_blocks.7.1.norm.weight": "blocks.17.norm.weight", - "control_model.input_blocks.7.1.norm.bias": "blocks.17.norm.bias", - "control_model.input_blocks.7.1.proj_in.weight": "blocks.17.proj_in.weight", - "control_model.input_blocks.7.1.proj_in.bias": "blocks.17.proj_in.bias", - "control_model.input_blocks.7.1.transformer_blocks.0.attn1.to_q.weight": "blocks.17.transformer_blocks.0.attn1.to_q.weight", - "control_model.input_blocks.7.1.transformer_blocks.0.attn1.to_k.weight": "blocks.17.transformer_blocks.0.attn1.to_k.weight", - "control_model.input_blocks.7.1.transformer_blocks.0.attn1.to_v.weight": "blocks.17.transformer_blocks.0.attn1.to_v.weight", - "control_model.input_blocks.7.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.17.transformer_blocks.0.attn1.to_out.weight", - "control_model.input_blocks.7.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.17.transformer_blocks.0.attn1.to_out.bias", - "control_model.input_blocks.7.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.17.transformer_blocks.0.act_fn.proj.weight", - "control_model.input_blocks.7.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.17.transformer_blocks.0.act_fn.proj.bias", - "control_model.input_blocks.7.1.transformer_blocks.0.ff.net.2.weight": "blocks.17.transformer_blocks.0.ff.weight", - "control_model.input_blocks.7.1.transformer_blocks.0.ff.net.2.bias": "blocks.17.transformer_blocks.0.ff.bias", - "control_model.input_blocks.7.1.transformer_blocks.0.attn2.to_q.weight": "blocks.17.transformer_blocks.0.attn2.to_q.weight", - "control_model.input_blocks.7.1.transformer_blocks.0.attn2.to_k.weight": "blocks.17.transformer_blocks.0.attn2.to_k.weight", - "control_model.input_blocks.7.1.transformer_blocks.0.attn2.to_v.weight": "blocks.17.transformer_blocks.0.attn2.to_v.weight", - "control_model.input_blocks.7.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.17.transformer_blocks.0.attn2.to_out.weight", - "control_model.input_blocks.7.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.17.transformer_blocks.0.attn2.to_out.bias", - "control_model.input_blocks.7.1.transformer_blocks.0.norm1.weight": "blocks.17.transformer_blocks.0.norm1.weight", - "control_model.input_blocks.7.1.transformer_blocks.0.norm1.bias": "blocks.17.transformer_blocks.0.norm1.bias", - "control_model.input_blocks.7.1.transformer_blocks.0.norm2.weight": "blocks.17.transformer_blocks.0.norm2.weight", - "control_model.input_blocks.7.1.transformer_blocks.0.norm2.bias": "blocks.17.transformer_blocks.0.norm2.bias", - "control_model.input_blocks.7.1.transformer_blocks.0.norm3.weight": "blocks.17.transformer_blocks.0.norm3.weight", - "control_model.input_blocks.7.1.transformer_blocks.0.norm3.bias": "blocks.17.transformer_blocks.0.norm3.bias", - "control_model.input_blocks.7.1.proj_out.weight": "blocks.17.proj_out.weight", - "control_model.input_blocks.7.1.proj_out.bias": "blocks.17.proj_out.bias", - "control_model.input_blocks.8.0.in_layers.0.weight": "blocks.19.norm1.weight", - "control_model.input_blocks.8.0.in_layers.0.bias": "blocks.19.norm1.bias", - "control_model.input_blocks.8.0.in_layers.2.weight": "blocks.19.conv1.weight", - "control_model.input_blocks.8.0.in_layers.2.bias": "blocks.19.conv1.bias", - "control_model.input_blocks.8.0.emb_layers.1.weight": "blocks.19.time_emb_proj.weight", - "control_model.input_blocks.8.0.emb_layers.1.bias": "blocks.19.time_emb_proj.bias", - "control_model.input_blocks.8.0.out_layers.0.weight": "blocks.19.norm2.weight", - "control_model.input_blocks.8.0.out_layers.0.bias": "blocks.19.norm2.bias", - "control_model.input_blocks.8.0.out_layers.3.weight": "blocks.19.conv2.weight", - "control_model.input_blocks.8.0.out_layers.3.bias": "blocks.19.conv2.bias", - "control_model.input_blocks.8.1.norm.weight": "blocks.20.norm.weight", - "control_model.input_blocks.8.1.norm.bias": "blocks.20.norm.bias", - "control_model.input_blocks.8.1.proj_in.weight": "blocks.20.proj_in.weight", - "control_model.input_blocks.8.1.proj_in.bias": "blocks.20.proj_in.bias", - "control_model.input_blocks.8.1.transformer_blocks.0.attn1.to_q.weight": "blocks.20.transformer_blocks.0.attn1.to_q.weight", - "control_model.input_blocks.8.1.transformer_blocks.0.attn1.to_k.weight": "blocks.20.transformer_blocks.0.attn1.to_k.weight", - "control_model.input_blocks.8.1.transformer_blocks.0.attn1.to_v.weight": "blocks.20.transformer_blocks.0.attn1.to_v.weight", - "control_model.input_blocks.8.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.20.transformer_blocks.0.attn1.to_out.weight", - "control_model.input_blocks.8.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.20.transformer_blocks.0.attn1.to_out.bias", - "control_model.input_blocks.8.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.20.transformer_blocks.0.act_fn.proj.weight", - "control_model.input_blocks.8.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.20.transformer_blocks.0.act_fn.proj.bias", - "control_model.input_blocks.8.1.transformer_blocks.0.ff.net.2.weight": "blocks.20.transformer_blocks.0.ff.weight", - "control_model.input_blocks.8.1.transformer_blocks.0.ff.net.2.bias": "blocks.20.transformer_blocks.0.ff.bias", - "control_model.input_blocks.8.1.transformer_blocks.0.attn2.to_q.weight": "blocks.20.transformer_blocks.0.attn2.to_q.weight", - "control_model.input_blocks.8.1.transformer_blocks.0.attn2.to_k.weight": "blocks.20.transformer_blocks.0.attn2.to_k.weight", - "control_model.input_blocks.8.1.transformer_blocks.0.attn2.to_v.weight": "blocks.20.transformer_blocks.0.attn2.to_v.weight", - "control_model.input_blocks.8.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.20.transformer_blocks.0.attn2.to_out.weight", - "control_model.input_blocks.8.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.20.transformer_blocks.0.attn2.to_out.bias", - "control_model.input_blocks.8.1.transformer_blocks.0.norm1.weight": "blocks.20.transformer_blocks.0.norm1.weight", - "control_model.input_blocks.8.1.transformer_blocks.0.norm1.bias": "blocks.20.transformer_blocks.0.norm1.bias", - "control_model.input_blocks.8.1.transformer_blocks.0.norm2.weight": "blocks.20.transformer_blocks.0.norm2.weight", - "control_model.input_blocks.8.1.transformer_blocks.0.norm2.bias": "blocks.20.transformer_blocks.0.norm2.bias", - "control_model.input_blocks.8.1.transformer_blocks.0.norm3.weight": "blocks.20.transformer_blocks.0.norm3.weight", - "control_model.input_blocks.8.1.transformer_blocks.0.norm3.bias": "blocks.20.transformer_blocks.0.norm3.bias", - "control_model.input_blocks.8.1.proj_out.weight": "blocks.20.proj_out.weight", - "control_model.input_blocks.8.1.proj_out.bias": "blocks.20.proj_out.bias", - "control_model.input_blocks.9.0.op.weight": "blocks.22.conv.weight", - "control_model.input_blocks.9.0.op.bias": "blocks.22.conv.bias", - "control_model.input_blocks.10.0.in_layers.0.weight": "blocks.24.norm1.weight", - "control_model.input_blocks.10.0.in_layers.0.bias": "blocks.24.norm1.bias", - "control_model.input_blocks.10.0.in_layers.2.weight": "blocks.24.conv1.weight", - "control_model.input_blocks.10.0.in_layers.2.bias": "blocks.24.conv1.bias", - "control_model.input_blocks.10.0.emb_layers.1.weight": "blocks.24.time_emb_proj.weight", - "control_model.input_blocks.10.0.emb_layers.1.bias": "blocks.24.time_emb_proj.bias", - "control_model.input_blocks.10.0.out_layers.0.weight": "blocks.24.norm2.weight", - "control_model.input_blocks.10.0.out_layers.0.bias": "blocks.24.norm2.bias", - "control_model.input_blocks.10.0.out_layers.3.weight": "blocks.24.conv2.weight", - "control_model.input_blocks.10.0.out_layers.3.bias": "blocks.24.conv2.bias", - "control_model.input_blocks.11.0.in_layers.0.weight": "blocks.26.norm1.weight", - "control_model.input_blocks.11.0.in_layers.0.bias": "blocks.26.norm1.bias", - "control_model.input_blocks.11.0.in_layers.2.weight": "blocks.26.conv1.weight", - "control_model.input_blocks.11.0.in_layers.2.bias": "blocks.26.conv1.bias", - "control_model.input_blocks.11.0.emb_layers.1.weight": "blocks.26.time_emb_proj.weight", - "control_model.input_blocks.11.0.emb_layers.1.bias": "blocks.26.time_emb_proj.bias", - "control_model.input_blocks.11.0.out_layers.0.weight": "blocks.26.norm2.weight", - "control_model.input_blocks.11.0.out_layers.0.bias": "blocks.26.norm2.bias", - "control_model.input_blocks.11.0.out_layers.3.weight": "blocks.26.conv2.weight", - "control_model.input_blocks.11.0.out_layers.3.bias": "blocks.26.conv2.bias", - "control_model.zero_convs.0.0.weight": "controlnet_blocks.0.weight", - "control_model.zero_convs.0.0.bias": "controlnet_blocks.0.bias", - "control_model.zero_convs.1.0.weight": "controlnet_blocks.1.weight", - "control_model.zero_convs.1.0.bias": "controlnet_blocks.0.bias", - "control_model.zero_convs.2.0.weight": "controlnet_blocks.2.weight", - "control_model.zero_convs.2.0.bias": "controlnet_blocks.0.bias", - "control_model.zero_convs.3.0.weight": "controlnet_blocks.3.weight", - "control_model.zero_convs.3.0.bias": "controlnet_blocks.0.bias", - "control_model.zero_convs.4.0.weight": "controlnet_blocks.4.weight", - "control_model.zero_convs.4.0.bias": "controlnet_blocks.4.bias", - "control_model.zero_convs.5.0.weight": "controlnet_blocks.5.weight", - "control_model.zero_convs.5.0.bias": "controlnet_blocks.4.bias", - "control_model.zero_convs.6.0.weight": "controlnet_blocks.6.weight", - "control_model.zero_convs.6.0.bias": "controlnet_blocks.4.bias", - "control_model.zero_convs.7.0.weight": "controlnet_blocks.7.weight", - "control_model.zero_convs.7.0.bias": "controlnet_blocks.7.bias", - "control_model.zero_convs.8.0.weight": "controlnet_blocks.8.weight", - "control_model.zero_convs.8.0.bias": "controlnet_blocks.7.bias", - "control_model.zero_convs.9.0.weight": "controlnet_blocks.9.weight", - "control_model.zero_convs.9.0.bias": "controlnet_blocks.7.bias", - "control_model.zero_convs.10.0.weight": "controlnet_blocks.10.weight", - "control_model.zero_convs.10.0.bias": "controlnet_blocks.7.bias", - "control_model.zero_convs.11.0.weight": "controlnet_blocks.11.weight", - "control_model.zero_convs.11.0.bias": "controlnet_blocks.7.bias", - "control_model.input_hint_block.0.weight": "controlnet_conv_in.blocks.0.weight", - "control_model.input_hint_block.0.bias": "controlnet_conv_in.blocks.0.bias", - "control_model.input_hint_block.2.weight": "controlnet_conv_in.blocks.2.weight", - "control_model.input_hint_block.2.bias": "controlnet_conv_in.blocks.2.bias", - "control_model.input_hint_block.4.weight": "controlnet_conv_in.blocks.4.weight", - "control_model.input_hint_block.4.bias": "controlnet_conv_in.blocks.4.bias", - "control_model.input_hint_block.6.weight": "controlnet_conv_in.blocks.6.weight", - "control_model.input_hint_block.6.bias": "controlnet_conv_in.blocks.6.bias", - "control_model.input_hint_block.8.weight": "controlnet_conv_in.blocks.8.weight", - "control_model.input_hint_block.8.bias": "controlnet_conv_in.blocks.8.bias", - "control_model.input_hint_block.10.weight": "controlnet_conv_in.blocks.10.weight", - "control_model.input_hint_block.10.bias": "controlnet_conv_in.blocks.10.bias", - "control_model.input_hint_block.12.weight": "controlnet_conv_in.blocks.12.weight", - "control_model.input_hint_block.12.bias": "controlnet_conv_in.blocks.12.bias", - "control_model.input_hint_block.14.weight": "controlnet_conv_in.blocks.14.weight", - "control_model.input_hint_block.14.bias": "controlnet_conv_in.blocks.14.bias", - "control_model.middle_block.0.in_layers.0.weight": "blocks.28.norm1.weight", - "control_model.middle_block.0.in_layers.0.bias": "blocks.28.norm1.bias", - "control_model.middle_block.0.in_layers.2.weight": "blocks.28.conv1.weight", - "control_model.middle_block.0.in_layers.2.bias": "blocks.28.conv1.bias", - "control_model.middle_block.0.emb_layers.1.weight": "blocks.28.time_emb_proj.weight", - "control_model.middle_block.0.emb_layers.1.bias": "blocks.28.time_emb_proj.bias", - "control_model.middle_block.0.out_layers.0.weight": "blocks.28.norm2.weight", - "control_model.middle_block.0.out_layers.0.bias": "blocks.28.norm2.bias", - "control_model.middle_block.0.out_layers.3.weight": "blocks.28.conv2.weight", - "control_model.middle_block.0.out_layers.3.bias": "blocks.28.conv2.bias", - "control_model.middle_block.1.norm.weight": "blocks.29.norm.weight", - "control_model.middle_block.1.norm.bias": "blocks.29.norm.bias", - "control_model.middle_block.1.proj_in.weight": "blocks.29.proj_in.weight", - "control_model.middle_block.1.proj_in.bias": "blocks.29.proj_in.bias", - "control_model.middle_block.1.transformer_blocks.0.attn1.to_q.weight": "blocks.29.transformer_blocks.0.attn1.to_q.weight", - "control_model.middle_block.1.transformer_blocks.0.attn1.to_k.weight": "blocks.29.transformer_blocks.0.attn1.to_k.weight", - "control_model.middle_block.1.transformer_blocks.0.attn1.to_v.weight": "blocks.29.transformer_blocks.0.attn1.to_v.weight", - "control_model.middle_block.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.29.transformer_blocks.0.attn1.to_out.weight", - "control_model.middle_block.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.29.transformer_blocks.0.attn1.to_out.bias", - "control_model.middle_block.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.29.transformer_blocks.0.act_fn.proj.weight", - "control_model.middle_block.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.29.transformer_blocks.0.act_fn.proj.bias", - "control_model.middle_block.1.transformer_blocks.0.ff.net.2.weight": "blocks.29.transformer_blocks.0.ff.weight", - "control_model.middle_block.1.transformer_blocks.0.ff.net.2.bias": "blocks.29.transformer_blocks.0.ff.bias", - "control_model.middle_block.1.transformer_blocks.0.attn2.to_q.weight": "blocks.29.transformer_blocks.0.attn2.to_q.weight", - "control_model.middle_block.1.transformer_blocks.0.attn2.to_k.weight": "blocks.29.transformer_blocks.0.attn2.to_k.weight", - "control_model.middle_block.1.transformer_blocks.0.attn2.to_v.weight": "blocks.29.transformer_blocks.0.attn2.to_v.weight", - "control_model.middle_block.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.29.transformer_blocks.0.attn2.to_out.weight", - "control_model.middle_block.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.29.transformer_blocks.0.attn2.to_out.bias", - "control_model.middle_block.1.transformer_blocks.0.norm1.weight": "blocks.29.transformer_blocks.0.norm1.weight", - "control_model.middle_block.1.transformer_blocks.0.norm1.bias": "blocks.29.transformer_blocks.0.norm1.bias", - "control_model.middle_block.1.transformer_blocks.0.norm2.weight": "blocks.29.transformer_blocks.0.norm2.weight", - "control_model.middle_block.1.transformer_blocks.0.norm2.bias": "blocks.29.transformer_blocks.0.norm2.bias", - "control_model.middle_block.1.transformer_blocks.0.norm3.weight": "blocks.29.transformer_blocks.0.norm3.weight", - "control_model.middle_block.1.transformer_blocks.0.norm3.bias": "blocks.29.transformer_blocks.0.norm3.bias", - "control_model.middle_block.1.proj_out.weight": "blocks.29.proj_out.weight", - "control_model.middle_block.1.proj_out.bias": "blocks.29.proj_out.bias", - "control_model.middle_block.2.in_layers.0.weight": "blocks.30.norm1.weight", - "control_model.middle_block.2.in_layers.0.bias": "blocks.30.norm1.bias", - "control_model.middle_block.2.in_layers.2.weight": "blocks.30.conv1.weight", - "control_model.middle_block.2.in_layers.2.bias": "blocks.30.conv1.bias", - "control_model.middle_block.2.emb_layers.1.weight": "blocks.30.time_emb_proj.weight", - "control_model.middle_block.2.emb_layers.1.bias": "blocks.30.time_emb_proj.bias", - "control_model.middle_block.2.out_layers.0.weight": "blocks.30.norm2.weight", - "control_model.middle_block.2.out_layers.0.bias": "blocks.30.norm2.bias", - "control_model.middle_block.2.out_layers.3.weight": "blocks.30.conv2.weight", - "control_model.middle_block.2.out_layers.3.bias": "blocks.30.conv2.bias", - "control_model.middle_block_out.0.weight": "controlnet_blocks.12.weight", - "control_model.middle_block_out.0.bias": "controlnet_blocks.7.bias", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if ".proj_in." in name or ".proj_out." in name: - param = param.squeeze() - state_dict_[rename_dict[name]] = param - return state_dict_ diff --git a/diffsynth/models/sd_ipadapter.py b/diffsynth/models/sd_ipadapter.py deleted file mode 100644 index 8d6ebd7..0000000 --- a/diffsynth/models/sd_ipadapter.py +++ /dev/null @@ -1,57 +0,0 @@ -from .svd_image_encoder import SVDImageEncoder -from .sdxl_ipadapter import IpAdapterImageProjModel, IpAdapterModule, SDXLIpAdapterStateDictConverter -from transformers import CLIPImageProcessor -import torch - - -class IpAdapterCLIPImageEmbedder(SVDImageEncoder): - def __init__(self): - super().__init__() - self.image_processor = CLIPImageProcessor() - - def forward(self, image): - pixel_values = self.image_processor(images=image, return_tensors="pt").pixel_values - pixel_values = pixel_values.to(device=self.embeddings.class_embedding.device, dtype=self.embeddings.class_embedding.dtype) - return super().forward(pixel_values) - - -class SDIpAdapter(torch.nn.Module): - def __init__(self): - super().__init__() - shape_list = [(768, 320)] * 2 + [(768, 640)] * 2 + [(768, 1280)] * 5 + [(768, 640)] * 3 + [(768, 320)] * 3 + [(768, 1280)] * 1 - self.ipadapter_modules = torch.nn.ModuleList([IpAdapterModule(*shape) for shape in shape_list]) - self.image_proj = IpAdapterImageProjModel(cross_attention_dim=768, clip_embeddings_dim=1024, clip_extra_context_tokens=4) - self.set_full_adapter() - - def set_full_adapter(self): - block_ids = [1, 4, 9, 12, 17, 20, 40, 43, 46, 50, 53, 56, 60, 63, 66, 29] - self.call_block_id = {(i, 0): j for j, i in enumerate(block_ids)} - - def set_less_adapter(self): - # IP-Adapter for SD v1.5 doesn't support this feature. - self.set_full_adapter() - - def forward(self, hidden_states, scale=1.0): - hidden_states = self.image_proj(hidden_states) - hidden_states = hidden_states.view(1, -1, hidden_states.shape[-1]) - ip_kv_dict = {} - for (block_id, transformer_id) in self.call_block_id: - ipadapter_id = self.call_block_id[(block_id, transformer_id)] - ip_k, ip_v = self.ipadapter_modules[ipadapter_id](hidden_states) - if block_id not in ip_kv_dict: - ip_kv_dict[block_id] = {} - ip_kv_dict[block_id][transformer_id] = { - "ip_k": ip_k, - "ip_v": ip_v, - "scale": scale - } - return ip_kv_dict - - @staticmethod - def state_dict_converter(): - return SDIpAdapterStateDictConverter() - - -class SDIpAdapterStateDictConverter(SDXLIpAdapterStateDictConverter): - def __init__(self): - pass diff --git a/diffsynth/models/sd_motion.py b/diffsynth/models/sd_motion.py deleted file mode 100644 index fb49138..0000000 --- a/diffsynth/models/sd_motion.py +++ /dev/null @@ -1,199 +0,0 @@ -from .sd_unet import SDUNet, Attention, GEGLU -import torch -from einops import rearrange, repeat - - -class TemporalTransformerBlock(torch.nn.Module): - - def __init__(self, dim, num_attention_heads, attention_head_dim, max_position_embeddings=32): - super().__init__() - - # 1. Self-Attn - self.pe1 = torch.nn.Parameter(torch.zeros(1, max_position_embeddings, dim)) - self.norm1 = torch.nn.LayerNorm(dim, elementwise_affine=True) - self.attn1 = Attention(q_dim=dim, num_heads=num_attention_heads, head_dim=attention_head_dim, bias_out=True) - - # 2. Cross-Attn - self.pe2 = torch.nn.Parameter(torch.zeros(1, max_position_embeddings, dim)) - self.norm2 = torch.nn.LayerNorm(dim, elementwise_affine=True) - self.attn2 = Attention(q_dim=dim, num_heads=num_attention_heads, head_dim=attention_head_dim, bias_out=True) - - # 3. Feed-forward - self.norm3 = torch.nn.LayerNorm(dim, elementwise_affine=True) - self.act_fn = GEGLU(dim, dim * 4) - self.ff = torch.nn.Linear(dim * 4, dim) - - - def forward(self, hidden_states, batch_size=1): - - # 1. Self-Attention - norm_hidden_states = self.norm1(hidden_states) - norm_hidden_states = rearrange(norm_hidden_states, "(b f) h c -> (b h) f c", b=batch_size) - attn_output = self.attn1(norm_hidden_states + self.pe1[:, :norm_hidden_states.shape[1]]) - attn_output = rearrange(attn_output, "(b h) f c -> (b f) h c", b=batch_size) - hidden_states = attn_output + hidden_states - - # 2. Cross-Attention - norm_hidden_states = self.norm2(hidden_states) - norm_hidden_states = rearrange(norm_hidden_states, "(b f) h c -> (b h) f c", b=batch_size) - attn_output = self.attn2(norm_hidden_states + self.pe2[:, :norm_hidden_states.shape[1]]) - attn_output = rearrange(attn_output, "(b h) f c -> (b f) h c", b=batch_size) - hidden_states = attn_output + hidden_states - - # 3. Feed-forward - norm_hidden_states = self.norm3(hidden_states) - ff_output = self.act_fn(norm_hidden_states) - ff_output = self.ff(ff_output) - hidden_states = ff_output + hidden_states - - return hidden_states - - -class TemporalBlock(torch.nn.Module): - - def __init__(self, num_attention_heads, attention_head_dim, in_channels, num_layers=1, norm_num_groups=32, eps=1e-5): - super().__init__() - inner_dim = num_attention_heads * attention_head_dim - - self.norm = torch.nn.GroupNorm(num_groups=norm_num_groups, num_channels=in_channels, eps=eps, affine=True) - self.proj_in = torch.nn.Linear(in_channels, inner_dim) - - self.transformer_blocks = torch.nn.ModuleList([ - TemporalTransformerBlock( - inner_dim, - num_attention_heads, - attention_head_dim - ) - for d in range(num_layers) - ]) - - self.proj_out = torch.nn.Linear(inner_dim, in_channels) - - def forward(self, hidden_states, time_emb, text_emb, res_stack, batch_size=1): - batch, _, height, width = hidden_states.shape - residual = hidden_states - - hidden_states = self.norm(hidden_states) - inner_dim = hidden_states.shape[1] - hidden_states = hidden_states.permute(0, 2, 3, 1).reshape(batch, height * width, inner_dim) - hidden_states = self.proj_in(hidden_states) - - for block in self.transformer_blocks: - hidden_states = block( - hidden_states, - batch_size=batch_size - ) - - hidden_states = self.proj_out(hidden_states) - hidden_states = hidden_states.reshape(batch, height, width, inner_dim).permute(0, 3, 1, 2).contiguous() - hidden_states = hidden_states + residual - - return hidden_states, time_emb, text_emb, res_stack - - -class SDMotionModel(torch.nn.Module): - def __init__(self): - super().__init__() - self.motion_modules = torch.nn.ModuleList([ - TemporalBlock(8, 40, 320, eps=1e-6), - TemporalBlock(8, 40, 320, eps=1e-6), - TemporalBlock(8, 80, 640, eps=1e-6), - TemporalBlock(8, 80, 640, eps=1e-6), - TemporalBlock(8, 160, 1280, eps=1e-6), - TemporalBlock(8, 160, 1280, eps=1e-6), - TemporalBlock(8, 160, 1280, eps=1e-6), - TemporalBlock(8, 160, 1280, eps=1e-6), - TemporalBlock(8, 160, 1280, eps=1e-6), - TemporalBlock(8, 160, 1280, eps=1e-6), - TemporalBlock(8, 160, 1280, eps=1e-6), - TemporalBlock(8, 160, 1280, eps=1e-6), - TemporalBlock(8, 160, 1280, eps=1e-6), - TemporalBlock(8, 160, 1280, eps=1e-6), - TemporalBlock(8, 160, 1280, eps=1e-6), - TemporalBlock(8, 80, 640, eps=1e-6), - TemporalBlock(8, 80, 640, eps=1e-6), - TemporalBlock(8, 80, 640, eps=1e-6), - TemporalBlock(8, 40, 320, eps=1e-6), - TemporalBlock(8, 40, 320, eps=1e-6), - TemporalBlock(8, 40, 320, eps=1e-6), - ]) - self.call_block_id = { - 1: 0, - 4: 1, - 9: 2, - 12: 3, - 17: 4, - 20: 5, - 24: 6, - 26: 7, - 29: 8, - 32: 9, - 34: 10, - 36: 11, - 40: 12, - 43: 13, - 46: 14, - 50: 15, - 53: 16, - 56: 17, - 60: 18, - 63: 19, - 66: 20 - } - - def forward(self): - pass - - @staticmethod - def state_dict_converter(): - return SDMotionModelStateDictConverter() - - -class SDMotionModelStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - rename_dict = { - "norm": "norm", - "proj_in": "proj_in", - "transformer_blocks.0.attention_blocks.0.to_q": "transformer_blocks.0.attn1.to_q", - "transformer_blocks.0.attention_blocks.0.to_k": "transformer_blocks.0.attn1.to_k", - "transformer_blocks.0.attention_blocks.0.to_v": "transformer_blocks.0.attn1.to_v", - "transformer_blocks.0.attention_blocks.0.to_out.0": "transformer_blocks.0.attn1.to_out", - "transformer_blocks.0.attention_blocks.0.pos_encoder": "transformer_blocks.0.pe1", - "transformer_blocks.0.attention_blocks.1.to_q": "transformer_blocks.0.attn2.to_q", - "transformer_blocks.0.attention_blocks.1.to_k": "transformer_blocks.0.attn2.to_k", - "transformer_blocks.0.attention_blocks.1.to_v": "transformer_blocks.0.attn2.to_v", - "transformer_blocks.0.attention_blocks.1.to_out.0": "transformer_blocks.0.attn2.to_out", - "transformer_blocks.0.attention_blocks.1.pos_encoder": "transformer_blocks.0.pe2", - "transformer_blocks.0.norms.0": "transformer_blocks.0.norm1", - "transformer_blocks.0.norms.1": "transformer_blocks.0.norm2", - "transformer_blocks.0.ff.net.0.proj": "transformer_blocks.0.act_fn.proj", - "transformer_blocks.0.ff.net.2": "transformer_blocks.0.ff", - "transformer_blocks.0.ff_norm": "transformer_blocks.0.norm3", - "proj_out": "proj_out", - } - name_list = sorted([i for i in state_dict if i.startswith("down_blocks.")]) - name_list += sorted([i for i in state_dict if i.startswith("mid_block.")]) - name_list += sorted([i for i in state_dict if i.startswith("up_blocks.")]) - state_dict_ = {} - last_prefix, module_id = "", -1 - for name in name_list: - names = name.split(".") - prefix_index = names.index("temporal_transformer") + 1 - prefix = ".".join(names[:prefix_index]) - if prefix != last_prefix: - last_prefix = prefix - module_id += 1 - middle_name = ".".join(names[prefix_index:-1]) - suffix = names[-1] - if "pos_encoder" in names: - rename = ".".join(["motion_modules", str(module_id), rename_dict[middle_name]]) - else: - rename = ".".join(["motion_modules", str(module_id), rename_dict[middle_name], suffix]) - state_dict_[rename] = state_dict[name] - return state_dict_ - - def from_civitai(self, state_dict): - return self.from_diffusers(state_dict) diff --git a/diffsynth/models/sd_text_encoder.py b/diffsynth/models/sd_text_encoder.py deleted file mode 100644 index 8fe8994..0000000 --- a/diffsynth/models/sd_text_encoder.py +++ /dev/null @@ -1,321 +0,0 @@ -import torch -from .attention import Attention - - -class CLIPEncoderLayer(torch.nn.Module): - def __init__(self, embed_dim, intermediate_size, num_heads=12, head_dim=64, use_quick_gelu=True): - super().__init__() - self.attn = Attention(q_dim=embed_dim, num_heads=num_heads, head_dim=head_dim, bias_q=True, bias_kv=True, bias_out=True) - self.layer_norm1 = torch.nn.LayerNorm(embed_dim) - self.layer_norm2 = torch.nn.LayerNorm(embed_dim) - self.fc1 = torch.nn.Linear(embed_dim, intermediate_size) - self.fc2 = torch.nn.Linear(intermediate_size, embed_dim) - - self.use_quick_gelu = use_quick_gelu - - def quickGELU(self, x): - return x * torch.sigmoid(1.702 * x) - - def forward(self, hidden_states, attn_mask=None): - residual = hidden_states - - hidden_states = self.layer_norm1(hidden_states) - hidden_states = self.attn(hidden_states, attn_mask=attn_mask) - hidden_states = residual + hidden_states - - residual = hidden_states - hidden_states = self.layer_norm2(hidden_states) - hidden_states = self.fc1(hidden_states) - if self.use_quick_gelu: - hidden_states = self.quickGELU(hidden_states) - else: - hidden_states = torch.nn.functional.gelu(hidden_states) - hidden_states = self.fc2(hidden_states) - hidden_states = residual + hidden_states - - return hidden_states - - -class SDTextEncoder(torch.nn.Module): - def __init__(self, embed_dim=768, vocab_size=49408, max_position_embeddings=77, num_encoder_layers=12, encoder_intermediate_size=3072): - super().__init__() - - # token_embedding - self.token_embedding = torch.nn.Embedding(vocab_size, embed_dim) - - # position_embeds (This is a fixed tensor) - self.position_embeds = torch.nn.Parameter(torch.zeros(1, max_position_embeddings, embed_dim)) - - # encoders - self.encoders = torch.nn.ModuleList([CLIPEncoderLayer(embed_dim, encoder_intermediate_size) for _ in range(num_encoder_layers)]) - - # attn_mask - self.attn_mask = self.attention_mask(max_position_embeddings) - - # final_layer_norm - self.final_layer_norm = torch.nn.LayerNorm(embed_dim) - - def attention_mask(self, length): - mask = torch.empty(length, length) - mask.fill_(float("-inf")) - mask.triu_(1) - return mask - - def forward(self, input_ids, clip_skip=1): - embeds = self.token_embedding(input_ids) + self.position_embeds - attn_mask = self.attn_mask.to(device=embeds.device, dtype=embeds.dtype) - for encoder_id, encoder in enumerate(self.encoders): - embeds = encoder(embeds, attn_mask=attn_mask) - if encoder_id + clip_skip == len(self.encoders): - break - embeds = self.final_layer_norm(embeds) - return embeds - - @staticmethod - def state_dict_converter(): - return SDTextEncoderStateDictConverter() - - -class SDTextEncoderStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - rename_dict = { - "text_model.embeddings.token_embedding.weight": "token_embedding.weight", - "text_model.embeddings.position_embedding.weight": "position_embeds", - "text_model.final_layer_norm.weight": "final_layer_norm.weight", - "text_model.final_layer_norm.bias": "final_layer_norm.bias" - } - attn_rename_dict = { - "self_attn.q_proj": "attn.to_q", - "self_attn.k_proj": "attn.to_k", - "self_attn.v_proj": "attn.to_v", - "self_attn.out_proj": "attn.to_out", - "layer_norm1": "layer_norm1", - "layer_norm2": "layer_norm2", - "mlp.fc1": "fc1", - "mlp.fc2": "fc2", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if name == "text_model.embeddings.position_embedding.weight": - param = param.reshape((1, param.shape[0], param.shape[1])) - state_dict_[rename_dict[name]] = param - elif name.startswith("text_model.encoder.layers."): - param = state_dict[name] - names = name.split(".") - layer_id, layer_type, tail = names[3], ".".join(names[4:-1]), names[-1] - name_ = ".".join(["encoders", layer_id, attn_rename_dict[layer_type], tail]) - state_dict_[name_] = param - return state_dict_ - - def from_civitai(self, state_dict): - rename_dict = { - "cond_stage_model.transformer.text_model.embeddings.token_embedding.weight": "token_embedding.weight", - "cond_stage_model.transformer.text_model.encoder.layers.0.layer_norm1.bias": "encoders.0.layer_norm1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.0.layer_norm1.weight": "encoders.0.layer_norm1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.0.layer_norm2.bias": "encoders.0.layer_norm2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.0.layer_norm2.weight": "encoders.0.layer_norm2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.0.mlp.fc1.bias": "encoders.0.fc1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.0.mlp.fc1.weight": "encoders.0.fc1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.0.mlp.fc2.bias": "encoders.0.fc2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.0.mlp.fc2.weight": "encoders.0.fc2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.0.self_attn.k_proj.bias": "encoders.0.attn.to_k.bias", - "cond_stage_model.transformer.text_model.encoder.layers.0.self_attn.k_proj.weight": "encoders.0.attn.to_k.weight", - "cond_stage_model.transformer.text_model.encoder.layers.0.self_attn.out_proj.bias": "encoders.0.attn.to_out.bias", - "cond_stage_model.transformer.text_model.encoder.layers.0.self_attn.out_proj.weight": "encoders.0.attn.to_out.weight", - "cond_stage_model.transformer.text_model.encoder.layers.0.self_attn.q_proj.bias": "encoders.0.attn.to_q.bias", - "cond_stage_model.transformer.text_model.encoder.layers.0.self_attn.q_proj.weight": "encoders.0.attn.to_q.weight", - "cond_stage_model.transformer.text_model.encoder.layers.0.self_attn.v_proj.bias": "encoders.0.attn.to_v.bias", - "cond_stage_model.transformer.text_model.encoder.layers.0.self_attn.v_proj.weight": "encoders.0.attn.to_v.weight", - "cond_stage_model.transformer.text_model.encoder.layers.1.layer_norm1.bias": "encoders.1.layer_norm1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.1.layer_norm1.weight": "encoders.1.layer_norm1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.1.layer_norm2.bias": "encoders.1.layer_norm2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.1.layer_norm2.weight": "encoders.1.layer_norm2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.1.mlp.fc1.bias": "encoders.1.fc1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.1.mlp.fc1.weight": "encoders.1.fc1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.1.mlp.fc2.bias": "encoders.1.fc2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.1.mlp.fc2.weight": "encoders.1.fc2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.1.self_attn.k_proj.bias": "encoders.1.attn.to_k.bias", - "cond_stage_model.transformer.text_model.encoder.layers.1.self_attn.k_proj.weight": "encoders.1.attn.to_k.weight", - "cond_stage_model.transformer.text_model.encoder.layers.1.self_attn.out_proj.bias": "encoders.1.attn.to_out.bias", - "cond_stage_model.transformer.text_model.encoder.layers.1.self_attn.out_proj.weight": "encoders.1.attn.to_out.weight", - "cond_stage_model.transformer.text_model.encoder.layers.1.self_attn.q_proj.bias": "encoders.1.attn.to_q.bias", - "cond_stage_model.transformer.text_model.encoder.layers.1.self_attn.q_proj.weight": "encoders.1.attn.to_q.weight", - "cond_stage_model.transformer.text_model.encoder.layers.1.self_attn.v_proj.bias": "encoders.1.attn.to_v.bias", - "cond_stage_model.transformer.text_model.encoder.layers.1.self_attn.v_proj.weight": "encoders.1.attn.to_v.weight", - "cond_stage_model.transformer.text_model.encoder.layers.10.layer_norm1.bias": "encoders.10.layer_norm1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.10.layer_norm1.weight": "encoders.10.layer_norm1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.10.layer_norm2.bias": "encoders.10.layer_norm2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.10.layer_norm2.weight": "encoders.10.layer_norm2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.10.mlp.fc1.bias": "encoders.10.fc1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.10.mlp.fc1.weight": "encoders.10.fc1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.10.mlp.fc2.bias": "encoders.10.fc2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.10.mlp.fc2.weight": "encoders.10.fc2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.10.self_attn.k_proj.bias": "encoders.10.attn.to_k.bias", - "cond_stage_model.transformer.text_model.encoder.layers.10.self_attn.k_proj.weight": "encoders.10.attn.to_k.weight", - "cond_stage_model.transformer.text_model.encoder.layers.10.self_attn.out_proj.bias": "encoders.10.attn.to_out.bias", - "cond_stage_model.transformer.text_model.encoder.layers.10.self_attn.out_proj.weight": "encoders.10.attn.to_out.weight", - "cond_stage_model.transformer.text_model.encoder.layers.10.self_attn.q_proj.bias": "encoders.10.attn.to_q.bias", - "cond_stage_model.transformer.text_model.encoder.layers.10.self_attn.q_proj.weight": "encoders.10.attn.to_q.weight", - "cond_stage_model.transformer.text_model.encoder.layers.10.self_attn.v_proj.bias": "encoders.10.attn.to_v.bias", - "cond_stage_model.transformer.text_model.encoder.layers.10.self_attn.v_proj.weight": "encoders.10.attn.to_v.weight", - "cond_stage_model.transformer.text_model.encoder.layers.11.layer_norm1.bias": "encoders.11.layer_norm1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.11.layer_norm1.weight": "encoders.11.layer_norm1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.11.layer_norm2.bias": "encoders.11.layer_norm2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.11.layer_norm2.weight": "encoders.11.layer_norm2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.11.mlp.fc1.bias": "encoders.11.fc1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.11.mlp.fc1.weight": "encoders.11.fc1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.11.mlp.fc2.bias": "encoders.11.fc2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.11.mlp.fc2.weight": "encoders.11.fc2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.11.self_attn.k_proj.bias": "encoders.11.attn.to_k.bias", - "cond_stage_model.transformer.text_model.encoder.layers.11.self_attn.k_proj.weight": "encoders.11.attn.to_k.weight", - "cond_stage_model.transformer.text_model.encoder.layers.11.self_attn.out_proj.bias": "encoders.11.attn.to_out.bias", - "cond_stage_model.transformer.text_model.encoder.layers.11.self_attn.out_proj.weight": "encoders.11.attn.to_out.weight", - "cond_stage_model.transformer.text_model.encoder.layers.11.self_attn.q_proj.bias": "encoders.11.attn.to_q.bias", - "cond_stage_model.transformer.text_model.encoder.layers.11.self_attn.q_proj.weight": "encoders.11.attn.to_q.weight", - "cond_stage_model.transformer.text_model.encoder.layers.11.self_attn.v_proj.bias": "encoders.11.attn.to_v.bias", - "cond_stage_model.transformer.text_model.encoder.layers.11.self_attn.v_proj.weight": "encoders.11.attn.to_v.weight", - "cond_stage_model.transformer.text_model.encoder.layers.2.layer_norm1.bias": "encoders.2.layer_norm1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.2.layer_norm1.weight": "encoders.2.layer_norm1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.2.layer_norm2.bias": "encoders.2.layer_norm2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.2.layer_norm2.weight": "encoders.2.layer_norm2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.2.mlp.fc1.bias": "encoders.2.fc1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.2.mlp.fc1.weight": "encoders.2.fc1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.2.mlp.fc2.bias": "encoders.2.fc2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.2.mlp.fc2.weight": "encoders.2.fc2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.2.self_attn.k_proj.bias": "encoders.2.attn.to_k.bias", - "cond_stage_model.transformer.text_model.encoder.layers.2.self_attn.k_proj.weight": "encoders.2.attn.to_k.weight", - "cond_stage_model.transformer.text_model.encoder.layers.2.self_attn.out_proj.bias": "encoders.2.attn.to_out.bias", - "cond_stage_model.transformer.text_model.encoder.layers.2.self_attn.out_proj.weight": "encoders.2.attn.to_out.weight", - "cond_stage_model.transformer.text_model.encoder.layers.2.self_attn.q_proj.bias": "encoders.2.attn.to_q.bias", - "cond_stage_model.transformer.text_model.encoder.layers.2.self_attn.q_proj.weight": "encoders.2.attn.to_q.weight", - "cond_stage_model.transformer.text_model.encoder.layers.2.self_attn.v_proj.bias": "encoders.2.attn.to_v.bias", - "cond_stage_model.transformer.text_model.encoder.layers.2.self_attn.v_proj.weight": "encoders.2.attn.to_v.weight", - "cond_stage_model.transformer.text_model.encoder.layers.3.layer_norm1.bias": "encoders.3.layer_norm1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.3.layer_norm1.weight": "encoders.3.layer_norm1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.3.layer_norm2.bias": "encoders.3.layer_norm2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.3.layer_norm2.weight": "encoders.3.layer_norm2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.3.mlp.fc1.bias": "encoders.3.fc1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.3.mlp.fc1.weight": "encoders.3.fc1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.3.mlp.fc2.bias": "encoders.3.fc2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.3.mlp.fc2.weight": "encoders.3.fc2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.3.self_attn.k_proj.bias": "encoders.3.attn.to_k.bias", - "cond_stage_model.transformer.text_model.encoder.layers.3.self_attn.k_proj.weight": "encoders.3.attn.to_k.weight", - "cond_stage_model.transformer.text_model.encoder.layers.3.self_attn.out_proj.bias": "encoders.3.attn.to_out.bias", - "cond_stage_model.transformer.text_model.encoder.layers.3.self_attn.out_proj.weight": "encoders.3.attn.to_out.weight", - "cond_stage_model.transformer.text_model.encoder.layers.3.self_attn.q_proj.bias": "encoders.3.attn.to_q.bias", - "cond_stage_model.transformer.text_model.encoder.layers.3.self_attn.q_proj.weight": "encoders.3.attn.to_q.weight", - "cond_stage_model.transformer.text_model.encoder.layers.3.self_attn.v_proj.bias": "encoders.3.attn.to_v.bias", - "cond_stage_model.transformer.text_model.encoder.layers.3.self_attn.v_proj.weight": "encoders.3.attn.to_v.weight", - "cond_stage_model.transformer.text_model.encoder.layers.4.layer_norm1.bias": "encoders.4.layer_norm1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.4.layer_norm1.weight": "encoders.4.layer_norm1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.4.layer_norm2.bias": "encoders.4.layer_norm2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.4.layer_norm2.weight": "encoders.4.layer_norm2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.4.mlp.fc1.bias": "encoders.4.fc1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.4.mlp.fc1.weight": "encoders.4.fc1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.4.mlp.fc2.bias": "encoders.4.fc2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.4.mlp.fc2.weight": "encoders.4.fc2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.4.self_attn.k_proj.bias": "encoders.4.attn.to_k.bias", - "cond_stage_model.transformer.text_model.encoder.layers.4.self_attn.k_proj.weight": "encoders.4.attn.to_k.weight", - "cond_stage_model.transformer.text_model.encoder.layers.4.self_attn.out_proj.bias": "encoders.4.attn.to_out.bias", - "cond_stage_model.transformer.text_model.encoder.layers.4.self_attn.out_proj.weight": "encoders.4.attn.to_out.weight", - "cond_stage_model.transformer.text_model.encoder.layers.4.self_attn.q_proj.bias": "encoders.4.attn.to_q.bias", - "cond_stage_model.transformer.text_model.encoder.layers.4.self_attn.q_proj.weight": "encoders.4.attn.to_q.weight", - "cond_stage_model.transformer.text_model.encoder.layers.4.self_attn.v_proj.bias": "encoders.4.attn.to_v.bias", - "cond_stage_model.transformer.text_model.encoder.layers.4.self_attn.v_proj.weight": "encoders.4.attn.to_v.weight", - "cond_stage_model.transformer.text_model.encoder.layers.5.layer_norm1.bias": "encoders.5.layer_norm1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.5.layer_norm1.weight": "encoders.5.layer_norm1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.5.layer_norm2.bias": "encoders.5.layer_norm2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.5.layer_norm2.weight": "encoders.5.layer_norm2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.5.mlp.fc1.bias": "encoders.5.fc1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.5.mlp.fc1.weight": "encoders.5.fc1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.5.mlp.fc2.bias": "encoders.5.fc2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.5.mlp.fc2.weight": "encoders.5.fc2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.5.self_attn.k_proj.bias": "encoders.5.attn.to_k.bias", - "cond_stage_model.transformer.text_model.encoder.layers.5.self_attn.k_proj.weight": "encoders.5.attn.to_k.weight", - "cond_stage_model.transformer.text_model.encoder.layers.5.self_attn.out_proj.bias": "encoders.5.attn.to_out.bias", - "cond_stage_model.transformer.text_model.encoder.layers.5.self_attn.out_proj.weight": "encoders.5.attn.to_out.weight", - "cond_stage_model.transformer.text_model.encoder.layers.5.self_attn.q_proj.bias": "encoders.5.attn.to_q.bias", - "cond_stage_model.transformer.text_model.encoder.layers.5.self_attn.q_proj.weight": "encoders.5.attn.to_q.weight", - "cond_stage_model.transformer.text_model.encoder.layers.5.self_attn.v_proj.bias": "encoders.5.attn.to_v.bias", - "cond_stage_model.transformer.text_model.encoder.layers.5.self_attn.v_proj.weight": "encoders.5.attn.to_v.weight", - "cond_stage_model.transformer.text_model.encoder.layers.6.layer_norm1.bias": "encoders.6.layer_norm1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.6.layer_norm1.weight": "encoders.6.layer_norm1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.6.layer_norm2.bias": "encoders.6.layer_norm2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.6.layer_norm2.weight": "encoders.6.layer_norm2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.6.mlp.fc1.bias": "encoders.6.fc1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.6.mlp.fc1.weight": "encoders.6.fc1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.6.mlp.fc2.bias": "encoders.6.fc2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.6.mlp.fc2.weight": "encoders.6.fc2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.6.self_attn.k_proj.bias": "encoders.6.attn.to_k.bias", - "cond_stage_model.transformer.text_model.encoder.layers.6.self_attn.k_proj.weight": "encoders.6.attn.to_k.weight", - "cond_stage_model.transformer.text_model.encoder.layers.6.self_attn.out_proj.bias": "encoders.6.attn.to_out.bias", - "cond_stage_model.transformer.text_model.encoder.layers.6.self_attn.out_proj.weight": "encoders.6.attn.to_out.weight", - "cond_stage_model.transformer.text_model.encoder.layers.6.self_attn.q_proj.bias": "encoders.6.attn.to_q.bias", - "cond_stage_model.transformer.text_model.encoder.layers.6.self_attn.q_proj.weight": "encoders.6.attn.to_q.weight", - "cond_stage_model.transformer.text_model.encoder.layers.6.self_attn.v_proj.bias": "encoders.6.attn.to_v.bias", - "cond_stage_model.transformer.text_model.encoder.layers.6.self_attn.v_proj.weight": "encoders.6.attn.to_v.weight", - "cond_stage_model.transformer.text_model.encoder.layers.7.layer_norm1.bias": "encoders.7.layer_norm1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.7.layer_norm1.weight": "encoders.7.layer_norm1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.7.layer_norm2.bias": "encoders.7.layer_norm2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.7.layer_norm2.weight": "encoders.7.layer_norm2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.7.mlp.fc1.bias": "encoders.7.fc1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.7.mlp.fc1.weight": "encoders.7.fc1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.7.mlp.fc2.bias": "encoders.7.fc2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.7.mlp.fc2.weight": "encoders.7.fc2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.7.self_attn.k_proj.bias": "encoders.7.attn.to_k.bias", - "cond_stage_model.transformer.text_model.encoder.layers.7.self_attn.k_proj.weight": "encoders.7.attn.to_k.weight", - "cond_stage_model.transformer.text_model.encoder.layers.7.self_attn.out_proj.bias": "encoders.7.attn.to_out.bias", - "cond_stage_model.transformer.text_model.encoder.layers.7.self_attn.out_proj.weight": "encoders.7.attn.to_out.weight", - "cond_stage_model.transformer.text_model.encoder.layers.7.self_attn.q_proj.bias": "encoders.7.attn.to_q.bias", - "cond_stage_model.transformer.text_model.encoder.layers.7.self_attn.q_proj.weight": "encoders.7.attn.to_q.weight", - "cond_stage_model.transformer.text_model.encoder.layers.7.self_attn.v_proj.bias": "encoders.7.attn.to_v.bias", - "cond_stage_model.transformer.text_model.encoder.layers.7.self_attn.v_proj.weight": "encoders.7.attn.to_v.weight", - "cond_stage_model.transformer.text_model.encoder.layers.8.layer_norm1.bias": "encoders.8.layer_norm1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.8.layer_norm1.weight": "encoders.8.layer_norm1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.8.layer_norm2.bias": "encoders.8.layer_norm2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.8.layer_norm2.weight": "encoders.8.layer_norm2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.8.mlp.fc1.bias": "encoders.8.fc1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.8.mlp.fc1.weight": "encoders.8.fc1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.8.mlp.fc2.bias": "encoders.8.fc2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.8.mlp.fc2.weight": "encoders.8.fc2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.8.self_attn.k_proj.bias": "encoders.8.attn.to_k.bias", - "cond_stage_model.transformer.text_model.encoder.layers.8.self_attn.k_proj.weight": "encoders.8.attn.to_k.weight", - "cond_stage_model.transformer.text_model.encoder.layers.8.self_attn.out_proj.bias": "encoders.8.attn.to_out.bias", - "cond_stage_model.transformer.text_model.encoder.layers.8.self_attn.out_proj.weight": "encoders.8.attn.to_out.weight", - "cond_stage_model.transformer.text_model.encoder.layers.8.self_attn.q_proj.bias": "encoders.8.attn.to_q.bias", - "cond_stage_model.transformer.text_model.encoder.layers.8.self_attn.q_proj.weight": "encoders.8.attn.to_q.weight", - "cond_stage_model.transformer.text_model.encoder.layers.8.self_attn.v_proj.bias": "encoders.8.attn.to_v.bias", - "cond_stage_model.transformer.text_model.encoder.layers.8.self_attn.v_proj.weight": "encoders.8.attn.to_v.weight", - "cond_stage_model.transformer.text_model.encoder.layers.9.layer_norm1.bias": "encoders.9.layer_norm1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.9.layer_norm1.weight": "encoders.9.layer_norm1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.9.layer_norm2.bias": "encoders.9.layer_norm2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.9.layer_norm2.weight": "encoders.9.layer_norm2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.9.mlp.fc1.bias": "encoders.9.fc1.bias", - "cond_stage_model.transformer.text_model.encoder.layers.9.mlp.fc1.weight": "encoders.9.fc1.weight", - "cond_stage_model.transformer.text_model.encoder.layers.9.mlp.fc2.bias": "encoders.9.fc2.bias", - "cond_stage_model.transformer.text_model.encoder.layers.9.mlp.fc2.weight": "encoders.9.fc2.weight", - "cond_stage_model.transformer.text_model.encoder.layers.9.self_attn.k_proj.bias": "encoders.9.attn.to_k.bias", - "cond_stage_model.transformer.text_model.encoder.layers.9.self_attn.k_proj.weight": "encoders.9.attn.to_k.weight", - "cond_stage_model.transformer.text_model.encoder.layers.9.self_attn.out_proj.bias": "encoders.9.attn.to_out.bias", - "cond_stage_model.transformer.text_model.encoder.layers.9.self_attn.out_proj.weight": "encoders.9.attn.to_out.weight", - "cond_stage_model.transformer.text_model.encoder.layers.9.self_attn.q_proj.bias": "encoders.9.attn.to_q.bias", - "cond_stage_model.transformer.text_model.encoder.layers.9.self_attn.q_proj.weight": "encoders.9.attn.to_q.weight", - "cond_stage_model.transformer.text_model.encoder.layers.9.self_attn.v_proj.bias": "encoders.9.attn.to_v.bias", - "cond_stage_model.transformer.text_model.encoder.layers.9.self_attn.v_proj.weight": "encoders.9.attn.to_v.weight", - "cond_stage_model.transformer.text_model.final_layer_norm.bias": "final_layer_norm.bias", - "cond_stage_model.transformer.text_model.final_layer_norm.weight": "final_layer_norm.weight", - "cond_stage_model.transformer.text_model.embeddings.position_embedding.weight": "position_embeds" - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if name == "cond_stage_model.transformer.text_model.embeddings.position_embedding.weight": - param = param.reshape((1, param.shape[0], param.shape[1])) - state_dict_[rename_dict[name]] = param - return state_dict_ diff --git a/diffsynth/models/sd_unet.py b/diffsynth/models/sd_unet.py deleted file mode 100644 index 3336390..0000000 --- a/diffsynth/models/sd_unet.py +++ /dev/null @@ -1,1108 +0,0 @@ -import torch, math -from .attention import Attention -from .tiler import TileWorker - - -class Timesteps(torch.nn.Module): - def __init__(self, num_channels): - super().__init__() - self.num_channels = num_channels - - def forward(self, timesteps): - half_dim = self.num_channels // 2 - exponent = -math.log(10000) * torch.arange(start=0, end=half_dim, dtype=torch.float32, device=timesteps.device) / half_dim - timesteps = timesteps.unsqueeze(-1) - emb = timesteps.float() * torch.exp(exponent) - emb = torch.cat([torch.cos(emb), torch.sin(emb)], dim=-1) - return emb - - -class GEGLU(torch.nn.Module): - - def __init__(self, dim_in, dim_out): - super().__init__() - self.proj = torch.nn.Linear(dim_in, dim_out * 2) - - def forward(self, hidden_states): - hidden_states, gate = self.proj(hidden_states).chunk(2, dim=-1) - return hidden_states * torch.nn.functional.gelu(gate) - - -class BasicTransformerBlock(torch.nn.Module): - - def __init__(self, dim, num_attention_heads, attention_head_dim, cross_attention_dim): - super().__init__() - - # 1. Self-Attn - self.norm1 = torch.nn.LayerNorm(dim, elementwise_affine=True) - self.attn1 = Attention(q_dim=dim, num_heads=num_attention_heads, head_dim=attention_head_dim, bias_out=True) - - # 2. Cross-Attn - self.norm2 = torch.nn.LayerNorm(dim, elementwise_affine=True) - self.attn2 = Attention(q_dim=dim, kv_dim=cross_attention_dim, num_heads=num_attention_heads, head_dim=attention_head_dim, bias_out=True) - - # 3. Feed-forward - self.norm3 = torch.nn.LayerNorm(dim, elementwise_affine=True) - self.act_fn = GEGLU(dim, dim * 4) - self.ff = torch.nn.Linear(dim * 4, dim) - - - def forward(self, hidden_states, encoder_hidden_states, ipadapter_kwargs=None): - # 1. Self-Attention - norm_hidden_states = self.norm1(hidden_states) - attn_output = self.attn1(norm_hidden_states, encoder_hidden_states=None) - hidden_states = attn_output + hidden_states - - # 2. Cross-Attention - norm_hidden_states = self.norm2(hidden_states) - attn_output = self.attn2(norm_hidden_states, encoder_hidden_states=encoder_hidden_states, ipadapter_kwargs=ipadapter_kwargs) - hidden_states = attn_output + hidden_states - - # 3. Feed-forward - norm_hidden_states = self.norm3(hidden_states) - ff_output = self.act_fn(norm_hidden_states) - ff_output = self.ff(ff_output) - hidden_states = ff_output + hidden_states - - return hidden_states - - -class DownSampler(torch.nn.Module): - def __init__(self, channels, padding=1, extra_padding=False): - super().__init__() - self.conv = torch.nn.Conv2d(channels, channels, 3, stride=2, padding=padding) - self.extra_padding = extra_padding - - def forward(self, hidden_states, time_emb, text_emb, res_stack, **kwargs): - if self.extra_padding: - hidden_states = torch.nn.functional.pad(hidden_states, (0, 1, 0, 1), mode="constant", value=0) - hidden_states = self.conv(hidden_states) - return hidden_states, time_emb, text_emb, res_stack - - -class UpSampler(torch.nn.Module): - def __init__(self, channels): - super().__init__() - self.conv = torch.nn.Conv2d(channels, channels, 3, padding=1) - - def forward(self, hidden_states, time_emb, text_emb, res_stack, **kwargs): - hidden_states = torch.nn.functional.interpolate(hidden_states, scale_factor=2.0, mode="nearest") - hidden_states = self.conv(hidden_states) - return hidden_states, time_emb, text_emb, res_stack - - -class ResnetBlock(torch.nn.Module): - def __init__(self, in_channels, out_channels, temb_channels=None, groups=32, eps=1e-5): - super().__init__() - self.norm1 = torch.nn.GroupNorm(num_groups=groups, num_channels=in_channels, eps=eps, affine=True) - self.conv1 = torch.nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=1, padding=1) - if temb_channels is not None: - self.time_emb_proj = torch.nn.Linear(temb_channels, out_channels) - self.norm2 = torch.nn.GroupNorm(num_groups=groups, num_channels=out_channels, eps=eps, affine=True) - self.conv2 = torch.nn.Conv2d(out_channels, out_channels, kernel_size=3, stride=1, padding=1) - self.nonlinearity = torch.nn.SiLU() - self.conv_shortcut = None - if in_channels != out_channels: - self.conv_shortcut = torch.nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=1, padding=0, bias=True) - - def forward(self, hidden_states, time_emb, text_emb, res_stack, **kwargs): - x = hidden_states - x = self.norm1(x) - x = self.nonlinearity(x) - x = self.conv1(x) - if time_emb is not None: - emb = self.nonlinearity(time_emb) - emb = self.time_emb_proj(emb)[:, :, None, None] - x = x + emb - x = self.norm2(x) - x = self.nonlinearity(x) - x = self.conv2(x) - if self.conv_shortcut is not None: - hidden_states = self.conv_shortcut(hidden_states) - hidden_states = hidden_states + x - return hidden_states, time_emb, text_emb, res_stack - - -class AttentionBlock(torch.nn.Module): - - def __init__(self, num_attention_heads, attention_head_dim, in_channels, num_layers=1, cross_attention_dim=None, norm_num_groups=32, eps=1e-5, need_proj_out=True): - super().__init__() - inner_dim = num_attention_heads * attention_head_dim - - self.norm = torch.nn.GroupNorm(num_groups=norm_num_groups, num_channels=in_channels, eps=eps, affine=True) - self.proj_in = torch.nn.Linear(in_channels, inner_dim) - - self.transformer_blocks = torch.nn.ModuleList([ - BasicTransformerBlock( - inner_dim, - num_attention_heads, - attention_head_dim, - cross_attention_dim=cross_attention_dim - ) - for d in range(num_layers) - ]) - self.need_proj_out = need_proj_out - if need_proj_out: - self.proj_out = torch.nn.Linear(inner_dim, in_channels) - - def forward( - self, - hidden_states, time_emb, text_emb, res_stack, - cross_frame_attention=False, - tiled=False, tile_size=64, tile_stride=32, - ipadapter_kwargs_list={}, - **kwargs - ): - batch, _, height, width = hidden_states.shape - residual = hidden_states - - hidden_states = self.norm(hidden_states) - inner_dim = hidden_states.shape[1] - hidden_states = hidden_states.permute(0, 2, 3, 1).reshape(batch, height * width, inner_dim) - hidden_states = self.proj_in(hidden_states) - - if cross_frame_attention: - hidden_states = hidden_states.reshape(1, batch * height * width, inner_dim) - encoder_hidden_states = text_emb.mean(dim=0, keepdim=True) - else: - encoder_hidden_states = text_emb - if encoder_hidden_states.shape[0] != hidden_states.shape[0]: - encoder_hidden_states = encoder_hidden_states.repeat(hidden_states.shape[0], 1, 1) - - if tiled: - tile_size = min(tile_size, min(height, width)) - hidden_states = hidden_states.permute(0, 2, 1).reshape(batch, inner_dim, height, width) - def block_tile_forward(x): - b, c, h, w = x.shape - x = x.permute(0, 2, 3, 1).reshape(b, h*w, c) - x = block(x, encoder_hidden_states) - x = x.reshape(b, h, w, c).permute(0, 3, 1, 2) - return x - for block in self.transformer_blocks: - hidden_states = TileWorker().tiled_forward( - block_tile_forward, - hidden_states, - tile_size, - tile_stride, - tile_device=hidden_states.device, - tile_dtype=hidden_states.dtype - ) - hidden_states = hidden_states.permute(0, 2, 3, 1).reshape(batch, height * width, inner_dim) - else: - for block_id, block in enumerate(self.transformer_blocks): - hidden_states = block( - hidden_states, - encoder_hidden_states=encoder_hidden_states, - ipadapter_kwargs=ipadapter_kwargs_list.get(block_id, None) - ) - if cross_frame_attention: - hidden_states = hidden_states.reshape(batch, height * width, inner_dim) - - if self.need_proj_out: - hidden_states = self.proj_out(hidden_states) - hidden_states = hidden_states.reshape(batch, height, width, inner_dim).permute(0, 3, 1, 2).contiguous() - hidden_states = hidden_states + residual - else: - hidden_states = hidden_states.reshape(batch, height, width, inner_dim).permute(0, 3, 1, 2).contiguous() - - return hidden_states, time_emb, text_emb, res_stack - - -class PushBlock(torch.nn.Module): - def __init__(self): - super().__init__() - - def forward(self, hidden_states, time_emb, text_emb, res_stack, **kwargs): - res_stack.append(hidden_states) - return hidden_states, time_emb, text_emb, res_stack - - -class PopBlock(torch.nn.Module): - def __init__(self): - super().__init__() - - def forward(self, hidden_states, time_emb, text_emb, res_stack, **kwargs): - res_hidden_states = res_stack.pop() - hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) - return hidden_states, time_emb, text_emb, res_stack - - -class SDUNet(torch.nn.Module): - def __init__(self): - super().__init__() - self.time_proj = Timesteps(320) - self.time_embedding = torch.nn.Sequential( - torch.nn.Linear(320, 1280), - torch.nn.SiLU(), - torch.nn.Linear(1280, 1280) - ) - self.conv_in = torch.nn.Conv2d(4, 320, kernel_size=3, padding=1) - - self.blocks = torch.nn.ModuleList([ - # CrossAttnDownBlock2D - ResnetBlock(320, 320, 1280), - AttentionBlock(8, 40, 320, 1, 768, eps=1e-6), - PushBlock(), - ResnetBlock(320, 320, 1280), - AttentionBlock(8, 40, 320, 1, 768, eps=1e-6), - PushBlock(), - DownSampler(320), - PushBlock(), - # CrossAttnDownBlock2D - ResnetBlock(320, 640, 1280), - AttentionBlock(8, 80, 640, 1, 768, eps=1e-6), - PushBlock(), - ResnetBlock(640, 640, 1280), - AttentionBlock(8, 80, 640, 1, 768, eps=1e-6), - PushBlock(), - DownSampler(640), - PushBlock(), - # CrossAttnDownBlock2D - ResnetBlock(640, 1280, 1280), - AttentionBlock(8, 160, 1280, 1, 768, eps=1e-6), - PushBlock(), - ResnetBlock(1280, 1280, 1280), - AttentionBlock(8, 160, 1280, 1, 768, eps=1e-6), - PushBlock(), - DownSampler(1280), - PushBlock(), - # DownBlock2D - ResnetBlock(1280, 1280, 1280), - PushBlock(), - ResnetBlock(1280, 1280, 1280), - PushBlock(), - # UNetMidBlock2DCrossAttn - ResnetBlock(1280, 1280, 1280), - AttentionBlock(8, 160, 1280, 1, 768, eps=1e-6), - ResnetBlock(1280, 1280, 1280), - # UpBlock2D - PopBlock(), - ResnetBlock(2560, 1280, 1280), - PopBlock(), - ResnetBlock(2560, 1280, 1280), - PopBlock(), - ResnetBlock(2560, 1280, 1280), - UpSampler(1280), - # CrossAttnUpBlock2D - PopBlock(), - ResnetBlock(2560, 1280, 1280), - AttentionBlock(8, 160, 1280, 1, 768, eps=1e-6), - PopBlock(), - ResnetBlock(2560, 1280, 1280), - AttentionBlock(8, 160, 1280, 1, 768, eps=1e-6), - PopBlock(), - ResnetBlock(1920, 1280, 1280), - AttentionBlock(8, 160, 1280, 1, 768, eps=1e-6), - UpSampler(1280), - # CrossAttnUpBlock2D - PopBlock(), - ResnetBlock(1920, 640, 1280), - AttentionBlock(8, 80, 640, 1, 768, eps=1e-6), - PopBlock(), - ResnetBlock(1280, 640, 1280), - AttentionBlock(8, 80, 640, 1, 768, eps=1e-6), - PopBlock(), - ResnetBlock(960, 640, 1280), - AttentionBlock(8, 80, 640, 1, 768, eps=1e-6), - UpSampler(640), - # CrossAttnUpBlock2D - PopBlock(), - ResnetBlock(960, 320, 1280), - AttentionBlock(8, 40, 320, 1, 768, eps=1e-6), - PopBlock(), - ResnetBlock(640, 320, 1280), - AttentionBlock(8, 40, 320, 1, 768, eps=1e-6), - PopBlock(), - ResnetBlock(640, 320, 1280), - AttentionBlock(8, 40, 320, 1, 768, eps=1e-6), - ]) - - self.conv_norm_out = torch.nn.GroupNorm(num_channels=320, num_groups=32, eps=1e-5) - self.conv_act = torch.nn.SiLU() - self.conv_out = torch.nn.Conv2d(320, 4, kernel_size=3, padding=1) - - def forward(self, sample, timestep, encoder_hidden_states, **kwargs): - # 1. time - time_emb = self.time_proj(timestep).to(sample.dtype) - time_emb = self.time_embedding(time_emb) - - # 2. pre-process - hidden_states = self.conv_in(sample) - text_emb = encoder_hidden_states - res_stack = [hidden_states] - - # 3. blocks - for i, block in enumerate(self.blocks): - hidden_states, time_emb, text_emb, res_stack = block(hidden_states, time_emb, text_emb, res_stack) - - # 4. output - hidden_states = self.conv_norm_out(hidden_states) - hidden_states = self.conv_act(hidden_states) - hidden_states = self.conv_out(hidden_states) - - return hidden_states - - @staticmethod - def state_dict_converter(): - return SDUNetStateDictConverter() - - -class SDUNetStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - # architecture - block_types = [ - 'ResnetBlock', 'AttentionBlock', 'PushBlock', 'ResnetBlock', 'AttentionBlock', 'PushBlock', 'DownSampler', 'PushBlock', - 'ResnetBlock', 'AttentionBlock', 'PushBlock', 'ResnetBlock', 'AttentionBlock', 'PushBlock', 'DownSampler', 'PushBlock', - 'ResnetBlock', 'AttentionBlock', 'PushBlock', 'ResnetBlock', 'AttentionBlock', 'PushBlock', 'DownSampler', 'PushBlock', - 'ResnetBlock', 'PushBlock', 'ResnetBlock', 'PushBlock', - 'ResnetBlock', 'AttentionBlock', 'ResnetBlock', - 'PopBlock', 'ResnetBlock', 'PopBlock', 'ResnetBlock', 'PopBlock', 'ResnetBlock', 'UpSampler', - 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'UpSampler', - 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'UpSampler', - 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'PopBlock', 'ResnetBlock', 'AttentionBlock' - ] - - # Rename each parameter - name_list = sorted([name for name in state_dict]) - rename_dict = {} - block_id = {"ResnetBlock": -1, "AttentionBlock": -1, "DownSampler": -1, "UpSampler": -1} - last_block_type_with_id = {"ResnetBlock": "", "AttentionBlock": "", "DownSampler": "", "UpSampler": ""} - for name in name_list: - names = name.split(".") - if names[0] in ["conv_in", "conv_norm_out", "conv_out"]: - pass - elif names[0] in ["time_embedding", "add_embedding"]: - if names[0] == "add_embedding": - names[0] = "add_time_embedding" - names[1] = {"linear_1": "0", "linear_2": "2"}[names[1]] - elif names[0] in ["down_blocks", "mid_block", "up_blocks"]: - if names[0] == "mid_block": - names.insert(1, "0") - block_type = {"resnets": "ResnetBlock", "attentions": "AttentionBlock", "downsamplers": "DownSampler", "upsamplers": "UpSampler"}[names[2]] - block_type_with_id = ".".join(names[:4]) - if block_type_with_id != last_block_type_with_id[block_type]: - block_id[block_type] += 1 - last_block_type_with_id[block_type] = block_type_with_id - while block_id[block_type] < len(block_types) and block_types[block_id[block_type]] != block_type: - block_id[block_type] += 1 - block_type_with_id = ".".join(names[:4]) - names = ["blocks", str(block_id[block_type])] + names[4:] - if "ff" in names: - ff_index = names.index("ff") - component = ".".join(names[ff_index:ff_index+3]) - component = {"ff.net.0": "act_fn", "ff.net.2": "ff"}[component] - names = names[:ff_index] + [component] + names[ff_index+3:] - if "to_out" in names: - names.pop(names.index("to_out") + 1) - else: - raise ValueError(f"Unknown parameters: {name}") - rename_dict[name] = ".".join(names) - - # Convert state_dict - state_dict_ = {} - for name, param in state_dict.items(): - if ".proj_in." in name or ".proj_out." in name: - param = param.squeeze() - state_dict_[rename_dict[name]] = param - return state_dict_ - - def from_civitai(self, state_dict): - rename_dict = { - "model.diffusion_model.input_blocks.0.0.bias": "conv_in.bias", - "model.diffusion_model.input_blocks.0.0.weight": "conv_in.weight", - "model.diffusion_model.input_blocks.1.0.emb_layers.1.bias": "blocks.0.time_emb_proj.bias", - "model.diffusion_model.input_blocks.1.0.emb_layers.1.weight": "blocks.0.time_emb_proj.weight", - "model.diffusion_model.input_blocks.1.0.in_layers.0.bias": "blocks.0.norm1.bias", - "model.diffusion_model.input_blocks.1.0.in_layers.0.weight": "blocks.0.norm1.weight", - "model.diffusion_model.input_blocks.1.0.in_layers.2.bias": "blocks.0.conv1.bias", - "model.diffusion_model.input_blocks.1.0.in_layers.2.weight": "blocks.0.conv1.weight", - "model.diffusion_model.input_blocks.1.0.out_layers.0.bias": "blocks.0.norm2.bias", - "model.diffusion_model.input_blocks.1.0.out_layers.0.weight": "blocks.0.norm2.weight", - "model.diffusion_model.input_blocks.1.0.out_layers.3.bias": "blocks.0.conv2.bias", - "model.diffusion_model.input_blocks.1.0.out_layers.3.weight": "blocks.0.conv2.weight", - "model.diffusion_model.input_blocks.1.1.norm.bias": "blocks.1.norm.bias", - "model.diffusion_model.input_blocks.1.1.norm.weight": "blocks.1.norm.weight", - "model.diffusion_model.input_blocks.1.1.proj_in.bias": "blocks.1.proj_in.bias", - "model.diffusion_model.input_blocks.1.1.proj_in.weight": "blocks.1.proj_in.weight", - "model.diffusion_model.input_blocks.1.1.proj_out.bias": "blocks.1.proj_out.bias", - "model.diffusion_model.input_blocks.1.1.proj_out.weight": "blocks.1.proj_out.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn1.to_k.weight": "blocks.1.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.1.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.1.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn1.to_q.weight": "blocks.1.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn1.to_v.weight": "blocks.1.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn2.to_k.weight": "blocks.1.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.1.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.1.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn2.to_q.weight": "blocks.1.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn2.to_v.weight": "blocks.1.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.1.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.1.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.ff.net.2.bias": "blocks.1.transformer_blocks.0.ff.bias", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.ff.net.2.weight": "blocks.1.transformer_blocks.0.ff.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.norm1.bias": "blocks.1.transformer_blocks.0.norm1.bias", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.norm1.weight": "blocks.1.transformer_blocks.0.norm1.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.norm2.bias": "blocks.1.transformer_blocks.0.norm2.bias", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.norm2.weight": "blocks.1.transformer_blocks.0.norm2.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.norm3.bias": "blocks.1.transformer_blocks.0.norm3.bias", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.norm3.weight": "blocks.1.transformer_blocks.0.norm3.weight", - "model.diffusion_model.input_blocks.10.0.emb_layers.1.bias": "blocks.24.time_emb_proj.bias", - "model.diffusion_model.input_blocks.10.0.emb_layers.1.weight": "blocks.24.time_emb_proj.weight", - "model.diffusion_model.input_blocks.10.0.in_layers.0.bias": "blocks.24.norm1.bias", - "model.diffusion_model.input_blocks.10.0.in_layers.0.weight": "blocks.24.norm1.weight", - "model.diffusion_model.input_blocks.10.0.in_layers.2.bias": "blocks.24.conv1.bias", - "model.diffusion_model.input_blocks.10.0.in_layers.2.weight": "blocks.24.conv1.weight", - "model.diffusion_model.input_blocks.10.0.out_layers.0.bias": "blocks.24.norm2.bias", - "model.diffusion_model.input_blocks.10.0.out_layers.0.weight": "blocks.24.norm2.weight", - "model.diffusion_model.input_blocks.10.0.out_layers.3.bias": "blocks.24.conv2.bias", - "model.diffusion_model.input_blocks.10.0.out_layers.3.weight": "blocks.24.conv2.weight", - "model.diffusion_model.input_blocks.11.0.emb_layers.1.bias": "blocks.26.time_emb_proj.bias", - "model.diffusion_model.input_blocks.11.0.emb_layers.1.weight": "blocks.26.time_emb_proj.weight", - "model.diffusion_model.input_blocks.11.0.in_layers.0.bias": "blocks.26.norm1.bias", - "model.diffusion_model.input_blocks.11.0.in_layers.0.weight": "blocks.26.norm1.weight", - "model.diffusion_model.input_blocks.11.0.in_layers.2.bias": "blocks.26.conv1.bias", - "model.diffusion_model.input_blocks.11.0.in_layers.2.weight": "blocks.26.conv1.weight", - "model.diffusion_model.input_blocks.11.0.out_layers.0.bias": "blocks.26.norm2.bias", - "model.diffusion_model.input_blocks.11.0.out_layers.0.weight": "blocks.26.norm2.weight", - "model.diffusion_model.input_blocks.11.0.out_layers.3.bias": "blocks.26.conv2.bias", - "model.diffusion_model.input_blocks.11.0.out_layers.3.weight": "blocks.26.conv2.weight", - "model.diffusion_model.input_blocks.2.0.emb_layers.1.bias": "blocks.3.time_emb_proj.bias", - "model.diffusion_model.input_blocks.2.0.emb_layers.1.weight": "blocks.3.time_emb_proj.weight", - "model.diffusion_model.input_blocks.2.0.in_layers.0.bias": "blocks.3.norm1.bias", - "model.diffusion_model.input_blocks.2.0.in_layers.0.weight": "blocks.3.norm1.weight", - "model.diffusion_model.input_blocks.2.0.in_layers.2.bias": "blocks.3.conv1.bias", - "model.diffusion_model.input_blocks.2.0.in_layers.2.weight": "blocks.3.conv1.weight", - "model.diffusion_model.input_blocks.2.0.out_layers.0.bias": "blocks.3.norm2.bias", - "model.diffusion_model.input_blocks.2.0.out_layers.0.weight": "blocks.3.norm2.weight", - "model.diffusion_model.input_blocks.2.0.out_layers.3.bias": "blocks.3.conv2.bias", - "model.diffusion_model.input_blocks.2.0.out_layers.3.weight": "blocks.3.conv2.weight", - "model.diffusion_model.input_blocks.2.1.norm.bias": "blocks.4.norm.bias", - "model.diffusion_model.input_blocks.2.1.norm.weight": "blocks.4.norm.weight", - "model.diffusion_model.input_blocks.2.1.proj_in.bias": "blocks.4.proj_in.bias", - "model.diffusion_model.input_blocks.2.1.proj_in.weight": "blocks.4.proj_in.weight", - "model.diffusion_model.input_blocks.2.1.proj_out.bias": "blocks.4.proj_out.bias", - "model.diffusion_model.input_blocks.2.1.proj_out.weight": "blocks.4.proj_out.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn1.to_k.weight": "blocks.4.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.4.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.4.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn1.to_q.weight": "blocks.4.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn1.to_v.weight": "blocks.4.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn2.to_k.weight": "blocks.4.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.4.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.4.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn2.to_q.weight": "blocks.4.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn2.to_v.weight": "blocks.4.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.4.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.4.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.ff.net.2.bias": "blocks.4.transformer_blocks.0.ff.bias", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.ff.net.2.weight": "blocks.4.transformer_blocks.0.ff.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.norm1.bias": "blocks.4.transformer_blocks.0.norm1.bias", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.norm1.weight": "blocks.4.transformer_blocks.0.norm1.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.norm2.bias": "blocks.4.transformer_blocks.0.norm2.bias", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.norm2.weight": "blocks.4.transformer_blocks.0.norm2.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.norm3.bias": "blocks.4.transformer_blocks.0.norm3.bias", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.norm3.weight": "blocks.4.transformer_blocks.0.norm3.weight", - "model.diffusion_model.input_blocks.3.0.op.bias": "blocks.6.conv.bias", - "model.diffusion_model.input_blocks.3.0.op.weight": "blocks.6.conv.weight", - "model.diffusion_model.input_blocks.4.0.emb_layers.1.bias": "blocks.8.time_emb_proj.bias", - "model.diffusion_model.input_blocks.4.0.emb_layers.1.weight": "blocks.8.time_emb_proj.weight", - "model.diffusion_model.input_blocks.4.0.in_layers.0.bias": "blocks.8.norm1.bias", - "model.diffusion_model.input_blocks.4.0.in_layers.0.weight": "blocks.8.norm1.weight", - "model.diffusion_model.input_blocks.4.0.in_layers.2.bias": "blocks.8.conv1.bias", - "model.diffusion_model.input_blocks.4.0.in_layers.2.weight": "blocks.8.conv1.weight", - "model.diffusion_model.input_blocks.4.0.out_layers.0.bias": "blocks.8.norm2.bias", - "model.diffusion_model.input_blocks.4.0.out_layers.0.weight": "blocks.8.norm2.weight", - "model.diffusion_model.input_blocks.4.0.out_layers.3.bias": "blocks.8.conv2.bias", - "model.diffusion_model.input_blocks.4.0.out_layers.3.weight": "blocks.8.conv2.weight", - "model.diffusion_model.input_blocks.4.0.skip_connection.bias": "blocks.8.conv_shortcut.bias", - "model.diffusion_model.input_blocks.4.0.skip_connection.weight": "blocks.8.conv_shortcut.weight", - "model.diffusion_model.input_blocks.4.1.norm.bias": "blocks.9.norm.bias", - "model.diffusion_model.input_blocks.4.1.norm.weight": "blocks.9.norm.weight", - "model.diffusion_model.input_blocks.4.1.proj_in.bias": "blocks.9.proj_in.bias", - "model.diffusion_model.input_blocks.4.1.proj_in.weight": "blocks.9.proj_in.weight", - "model.diffusion_model.input_blocks.4.1.proj_out.bias": "blocks.9.proj_out.bias", - "model.diffusion_model.input_blocks.4.1.proj_out.weight": "blocks.9.proj_out.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn1.to_k.weight": "blocks.9.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.9.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.9.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn1.to_q.weight": "blocks.9.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn1.to_v.weight": "blocks.9.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn2.to_k.weight": "blocks.9.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.9.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.9.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn2.to_q.weight": "blocks.9.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn2.to_v.weight": "blocks.9.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.9.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.9.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.ff.net.2.bias": "blocks.9.transformer_blocks.0.ff.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.ff.net.2.weight": "blocks.9.transformer_blocks.0.ff.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.norm1.bias": "blocks.9.transformer_blocks.0.norm1.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.norm1.weight": "blocks.9.transformer_blocks.0.norm1.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.norm2.bias": "blocks.9.transformer_blocks.0.norm2.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.norm2.weight": "blocks.9.transformer_blocks.0.norm2.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.norm3.bias": "blocks.9.transformer_blocks.0.norm3.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.norm3.weight": "blocks.9.transformer_blocks.0.norm3.weight", - "model.diffusion_model.input_blocks.5.0.emb_layers.1.bias": "blocks.11.time_emb_proj.bias", - "model.diffusion_model.input_blocks.5.0.emb_layers.1.weight": "blocks.11.time_emb_proj.weight", - "model.diffusion_model.input_blocks.5.0.in_layers.0.bias": "blocks.11.norm1.bias", - "model.diffusion_model.input_blocks.5.0.in_layers.0.weight": "blocks.11.norm1.weight", - "model.diffusion_model.input_blocks.5.0.in_layers.2.bias": "blocks.11.conv1.bias", - "model.diffusion_model.input_blocks.5.0.in_layers.2.weight": "blocks.11.conv1.weight", - "model.diffusion_model.input_blocks.5.0.out_layers.0.bias": "blocks.11.norm2.bias", - "model.diffusion_model.input_blocks.5.0.out_layers.0.weight": "blocks.11.norm2.weight", - "model.diffusion_model.input_blocks.5.0.out_layers.3.bias": "blocks.11.conv2.bias", - "model.diffusion_model.input_blocks.5.0.out_layers.3.weight": "blocks.11.conv2.weight", - "model.diffusion_model.input_blocks.5.1.norm.bias": "blocks.12.norm.bias", - "model.diffusion_model.input_blocks.5.1.norm.weight": "blocks.12.norm.weight", - "model.diffusion_model.input_blocks.5.1.proj_in.bias": "blocks.12.proj_in.bias", - "model.diffusion_model.input_blocks.5.1.proj_in.weight": "blocks.12.proj_in.weight", - "model.diffusion_model.input_blocks.5.1.proj_out.bias": "blocks.12.proj_out.bias", - "model.diffusion_model.input_blocks.5.1.proj_out.weight": "blocks.12.proj_out.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn1.to_k.weight": "blocks.12.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.12.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.12.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn1.to_q.weight": "blocks.12.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn1.to_v.weight": "blocks.12.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn2.to_k.weight": "blocks.12.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.12.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.12.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn2.to_q.weight": "blocks.12.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn2.to_v.weight": "blocks.12.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.12.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.12.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.ff.net.2.bias": "blocks.12.transformer_blocks.0.ff.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.ff.net.2.weight": "blocks.12.transformer_blocks.0.ff.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.norm1.bias": "blocks.12.transformer_blocks.0.norm1.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.norm1.weight": "blocks.12.transformer_blocks.0.norm1.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.norm2.bias": "blocks.12.transformer_blocks.0.norm2.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.norm2.weight": "blocks.12.transformer_blocks.0.norm2.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.norm3.bias": "blocks.12.transformer_blocks.0.norm3.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.norm3.weight": "blocks.12.transformer_blocks.0.norm3.weight", - "model.diffusion_model.input_blocks.6.0.op.bias": "blocks.14.conv.bias", - "model.diffusion_model.input_blocks.6.0.op.weight": "blocks.14.conv.weight", - "model.diffusion_model.input_blocks.7.0.emb_layers.1.bias": "blocks.16.time_emb_proj.bias", - "model.diffusion_model.input_blocks.7.0.emb_layers.1.weight": "blocks.16.time_emb_proj.weight", - "model.diffusion_model.input_blocks.7.0.in_layers.0.bias": "blocks.16.norm1.bias", - "model.diffusion_model.input_blocks.7.0.in_layers.0.weight": "blocks.16.norm1.weight", - "model.diffusion_model.input_blocks.7.0.in_layers.2.bias": "blocks.16.conv1.bias", - "model.diffusion_model.input_blocks.7.0.in_layers.2.weight": "blocks.16.conv1.weight", - "model.diffusion_model.input_blocks.7.0.out_layers.0.bias": "blocks.16.norm2.bias", - "model.diffusion_model.input_blocks.7.0.out_layers.0.weight": "blocks.16.norm2.weight", - "model.diffusion_model.input_blocks.7.0.out_layers.3.bias": "blocks.16.conv2.bias", - "model.diffusion_model.input_blocks.7.0.out_layers.3.weight": "blocks.16.conv2.weight", - "model.diffusion_model.input_blocks.7.0.skip_connection.bias": "blocks.16.conv_shortcut.bias", - "model.diffusion_model.input_blocks.7.0.skip_connection.weight": "blocks.16.conv_shortcut.weight", - "model.diffusion_model.input_blocks.7.1.norm.bias": "blocks.17.norm.bias", - "model.diffusion_model.input_blocks.7.1.norm.weight": "blocks.17.norm.weight", - "model.diffusion_model.input_blocks.7.1.proj_in.bias": "blocks.17.proj_in.bias", - "model.diffusion_model.input_blocks.7.1.proj_in.weight": "blocks.17.proj_in.weight", - "model.diffusion_model.input_blocks.7.1.proj_out.bias": "blocks.17.proj_out.bias", - "model.diffusion_model.input_blocks.7.1.proj_out.weight": "blocks.17.proj_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn1.to_k.weight": "blocks.17.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.17.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.17.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn1.to_q.weight": "blocks.17.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn1.to_v.weight": "blocks.17.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn2.to_k.weight": "blocks.17.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.17.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.17.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn2.to_q.weight": "blocks.17.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn2.to_v.weight": "blocks.17.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.17.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.17.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.ff.net.2.bias": "blocks.17.transformer_blocks.0.ff.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.ff.net.2.weight": "blocks.17.transformer_blocks.0.ff.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.norm1.bias": "blocks.17.transformer_blocks.0.norm1.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.norm1.weight": "blocks.17.transformer_blocks.0.norm1.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.norm2.bias": "blocks.17.transformer_blocks.0.norm2.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.norm2.weight": "blocks.17.transformer_blocks.0.norm2.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.norm3.bias": "blocks.17.transformer_blocks.0.norm3.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.norm3.weight": "blocks.17.transformer_blocks.0.norm3.weight", - "model.diffusion_model.input_blocks.8.0.emb_layers.1.bias": "blocks.19.time_emb_proj.bias", - "model.diffusion_model.input_blocks.8.0.emb_layers.1.weight": "blocks.19.time_emb_proj.weight", - "model.diffusion_model.input_blocks.8.0.in_layers.0.bias": "blocks.19.norm1.bias", - "model.diffusion_model.input_blocks.8.0.in_layers.0.weight": "blocks.19.norm1.weight", - "model.diffusion_model.input_blocks.8.0.in_layers.2.bias": "blocks.19.conv1.bias", - "model.diffusion_model.input_blocks.8.0.in_layers.2.weight": "blocks.19.conv1.weight", - "model.diffusion_model.input_blocks.8.0.out_layers.0.bias": "blocks.19.norm2.bias", - "model.diffusion_model.input_blocks.8.0.out_layers.0.weight": "blocks.19.norm2.weight", - "model.diffusion_model.input_blocks.8.0.out_layers.3.bias": "blocks.19.conv2.bias", - "model.diffusion_model.input_blocks.8.0.out_layers.3.weight": "blocks.19.conv2.weight", - "model.diffusion_model.input_blocks.8.1.norm.bias": "blocks.20.norm.bias", - "model.diffusion_model.input_blocks.8.1.norm.weight": "blocks.20.norm.weight", - "model.diffusion_model.input_blocks.8.1.proj_in.bias": "blocks.20.proj_in.bias", - "model.diffusion_model.input_blocks.8.1.proj_in.weight": "blocks.20.proj_in.weight", - "model.diffusion_model.input_blocks.8.1.proj_out.bias": "blocks.20.proj_out.bias", - "model.diffusion_model.input_blocks.8.1.proj_out.weight": "blocks.20.proj_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn1.to_k.weight": "blocks.20.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.20.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.20.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn1.to_q.weight": "blocks.20.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn1.to_v.weight": "blocks.20.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn2.to_k.weight": "blocks.20.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.20.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.20.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn2.to_q.weight": "blocks.20.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn2.to_v.weight": "blocks.20.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.20.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.20.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.ff.net.2.bias": "blocks.20.transformer_blocks.0.ff.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.ff.net.2.weight": "blocks.20.transformer_blocks.0.ff.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.norm1.bias": "blocks.20.transformer_blocks.0.norm1.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.norm1.weight": "blocks.20.transformer_blocks.0.norm1.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.norm2.bias": "blocks.20.transformer_blocks.0.norm2.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.norm2.weight": "blocks.20.transformer_blocks.0.norm2.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.norm3.bias": "blocks.20.transformer_blocks.0.norm3.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.norm3.weight": "blocks.20.transformer_blocks.0.norm3.weight", - "model.diffusion_model.input_blocks.9.0.op.bias": "blocks.22.conv.bias", - "model.diffusion_model.input_blocks.9.0.op.weight": "blocks.22.conv.weight", - "model.diffusion_model.middle_block.0.emb_layers.1.bias": "blocks.28.time_emb_proj.bias", - "model.diffusion_model.middle_block.0.emb_layers.1.weight": "blocks.28.time_emb_proj.weight", - "model.diffusion_model.middle_block.0.in_layers.0.bias": "blocks.28.norm1.bias", - "model.diffusion_model.middle_block.0.in_layers.0.weight": "blocks.28.norm1.weight", - "model.diffusion_model.middle_block.0.in_layers.2.bias": "blocks.28.conv1.bias", - "model.diffusion_model.middle_block.0.in_layers.2.weight": "blocks.28.conv1.weight", - "model.diffusion_model.middle_block.0.out_layers.0.bias": "blocks.28.norm2.bias", - "model.diffusion_model.middle_block.0.out_layers.0.weight": "blocks.28.norm2.weight", - "model.diffusion_model.middle_block.0.out_layers.3.bias": "blocks.28.conv2.bias", - "model.diffusion_model.middle_block.0.out_layers.3.weight": "blocks.28.conv2.weight", - "model.diffusion_model.middle_block.1.norm.bias": "blocks.29.norm.bias", - "model.diffusion_model.middle_block.1.norm.weight": "blocks.29.norm.weight", - "model.diffusion_model.middle_block.1.proj_in.bias": "blocks.29.proj_in.bias", - "model.diffusion_model.middle_block.1.proj_in.weight": "blocks.29.proj_in.weight", - "model.diffusion_model.middle_block.1.proj_out.bias": "blocks.29.proj_out.bias", - "model.diffusion_model.middle_block.1.proj_out.weight": "blocks.29.proj_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn1.to_k.weight": "blocks.29.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.29.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.29.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn1.to_q.weight": "blocks.29.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn1.to_v.weight": "blocks.29.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn2.to_k.weight": "blocks.29.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.29.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.29.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn2.to_q.weight": "blocks.29.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn2.to_v.weight": "blocks.29.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.29.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.29.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.ff.net.2.bias": "blocks.29.transformer_blocks.0.ff.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.ff.net.2.weight": "blocks.29.transformer_blocks.0.ff.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.norm1.bias": "blocks.29.transformer_blocks.0.norm1.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.norm1.weight": "blocks.29.transformer_blocks.0.norm1.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.norm2.bias": "blocks.29.transformer_blocks.0.norm2.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.norm2.weight": "blocks.29.transformer_blocks.0.norm2.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.norm3.bias": "blocks.29.transformer_blocks.0.norm3.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.norm3.weight": "blocks.29.transformer_blocks.0.norm3.weight", - "model.diffusion_model.middle_block.2.emb_layers.1.bias": "blocks.30.time_emb_proj.bias", - "model.diffusion_model.middle_block.2.emb_layers.1.weight": "blocks.30.time_emb_proj.weight", - "model.diffusion_model.middle_block.2.in_layers.0.bias": "blocks.30.norm1.bias", - "model.diffusion_model.middle_block.2.in_layers.0.weight": "blocks.30.norm1.weight", - "model.diffusion_model.middle_block.2.in_layers.2.bias": "blocks.30.conv1.bias", - "model.diffusion_model.middle_block.2.in_layers.2.weight": "blocks.30.conv1.weight", - "model.diffusion_model.middle_block.2.out_layers.0.bias": "blocks.30.norm2.bias", - "model.diffusion_model.middle_block.2.out_layers.0.weight": "blocks.30.norm2.weight", - "model.diffusion_model.middle_block.2.out_layers.3.bias": "blocks.30.conv2.bias", - "model.diffusion_model.middle_block.2.out_layers.3.weight": "blocks.30.conv2.weight", - "model.diffusion_model.out.0.bias": "conv_norm_out.bias", - "model.diffusion_model.out.0.weight": "conv_norm_out.weight", - "model.diffusion_model.out.2.bias": "conv_out.bias", - "model.diffusion_model.out.2.weight": "conv_out.weight", - "model.diffusion_model.output_blocks.0.0.emb_layers.1.bias": "blocks.32.time_emb_proj.bias", - "model.diffusion_model.output_blocks.0.0.emb_layers.1.weight": "blocks.32.time_emb_proj.weight", - "model.diffusion_model.output_blocks.0.0.in_layers.0.bias": "blocks.32.norm1.bias", - "model.diffusion_model.output_blocks.0.0.in_layers.0.weight": "blocks.32.norm1.weight", - "model.diffusion_model.output_blocks.0.0.in_layers.2.bias": "blocks.32.conv1.bias", - "model.diffusion_model.output_blocks.0.0.in_layers.2.weight": "blocks.32.conv1.weight", - "model.diffusion_model.output_blocks.0.0.out_layers.0.bias": "blocks.32.norm2.bias", - "model.diffusion_model.output_blocks.0.0.out_layers.0.weight": "blocks.32.norm2.weight", - "model.diffusion_model.output_blocks.0.0.out_layers.3.bias": "blocks.32.conv2.bias", - "model.diffusion_model.output_blocks.0.0.out_layers.3.weight": "blocks.32.conv2.weight", - "model.diffusion_model.output_blocks.0.0.skip_connection.bias": "blocks.32.conv_shortcut.bias", - "model.diffusion_model.output_blocks.0.0.skip_connection.weight": "blocks.32.conv_shortcut.weight", - "model.diffusion_model.output_blocks.1.0.emb_layers.1.bias": "blocks.34.time_emb_proj.bias", - "model.diffusion_model.output_blocks.1.0.emb_layers.1.weight": "blocks.34.time_emb_proj.weight", - "model.diffusion_model.output_blocks.1.0.in_layers.0.bias": "blocks.34.norm1.bias", - "model.diffusion_model.output_blocks.1.0.in_layers.0.weight": "blocks.34.norm1.weight", - "model.diffusion_model.output_blocks.1.0.in_layers.2.bias": "blocks.34.conv1.bias", - "model.diffusion_model.output_blocks.1.0.in_layers.2.weight": "blocks.34.conv1.weight", - "model.diffusion_model.output_blocks.1.0.out_layers.0.bias": "blocks.34.norm2.bias", - "model.diffusion_model.output_blocks.1.0.out_layers.0.weight": "blocks.34.norm2.weight", - "model.diffusion_model.output_blocks.1.0.out_layers.3.bias": "blocks.34.conv2.bias", - "model.diffusion_model.output_blocks.1.0.out_layers.3.weight": "blocks.34.conv2.weight", - "model.diffusion_model.output_blocks.1.0.skip_connection.bias": "blocks.34.conv_shortcut.bias", - "model.diffusion_model.output_blocks.1.0.skip_connection.weight": "blocks.34.conv_shortcut.weight", - "model.diffusion_model.output_blocks.10.0.emb_layers.1.bias": "blocks.62.time_emb_proj.bias", - "model.diffusion_model.output_blocks.10.0.emb_layers.1.weight": "blocks.62.time_emb_proj.weight", - "model.diffusion_model.output_blocks.10.0.in_layers.0.bias": "blocks.62.norm1.bias", - "model.diffusion_model.output_blocks.10.0.in_layers.0.weight": "blocks.62.norm1.weight", - "model.diffusion_model.output_blocks.10.0.in_layers.2.bias": "blocks.62.conv1.bias", - "model.diffusion_model.output_blocks.10.0.in_layers.2.weight": "blocks.62.conv1.weight", - "model.diffusion_model.output_blocks.10.0.out_layers.0.bias": "blocks.62.norm2.bias", - "model.diffusion_model.output_blocks.10.0.out_layers.0.weight": "blocks.62.norm2.weight", - "model.diffusion_model.output_blocks.10.0.out_layers.3.bias": "blocks.62.conv2.bias", - "model.diffusion_model.output_blocks.10.0.out_layers.3.weight": "blocks.62.conv2.weight", - "model.diffusion_model.output_blocks.10.0.skip_connection.bias": "blocks.62.conv_shortcut.bias", - "model.diffusion_model.output_blocks.10.0.skip_connection.weight": "blocks.62.conv_shortcut.weight", - "model.diffusion_model.output_blocks.10.1.norm.bias": "blocks.63.norm.bias", - "model.diffusion_model.output_blocks.10.1.norm.weight": "blocks.63.norm.weight", - "model.diffusion_model.output_blocks.10.1.proj_in.bias": "blocks.63.proj_in.bias", - "model.diffusion_model.output_blocks.10.1.proj_in.weight": "blocks.63.proj_in.weight", - "model.diffusion_model.output_blocks.10.1.proj_out.bias": "blocks.63.proj_out.bias", - "model.diffusion_model.output_blocks.10.1.proj_out.weight": "blocks.63.proj_out.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn1.to_k.weight": "blocks.63.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.63.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.63.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn1.to_q.weight": "blocks.63.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn1.to_v.weight": "blocks.63.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn2.to_k.weight": "blocks.63.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.63.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.63.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn2.to_q.weight": "blocks.63.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn2.to_v.weight": "blocks.63.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.63.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.63.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.ff.net.2.bias": "blocks.63.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.ff.net.2.weight": "blocks.63.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.norm1.bias": "blocks.63.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.norm1.weight": "blocks.63.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.norm2.bias": "blocks.63.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.norm2.weight": "blocks.63.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.norm3.bias": "blocks.63.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.norm3.weight": "blocks.63.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.11.0.emb_layers.1.bias": "blocks.65.time_emb_proj.bias", - "model.diffusion_model.output_blocks.11.0.emb_layers.1.weight": "blocks.65.time_emb_proj.weight", - "model.diffusion_model.output_blocks.11.0.in_layers.0.bias": "blocks.65.norm1.bias", - "model.diffusion_model.output_blocks.11.0.in_layers.0.weight": "blocks.65.norm1.weight", - "model.diffusion_model.output_blocks.11.0.in_layers.2.bias": "blocks.65.conv1.bias", - "model.diffusion_model.output_blocks.11.0.in_layers.2.weight": "blocks.65.conv1.weight", - "model.diffusion_model.output_blocks.11.0.out_layers.0.bias": "blocks.65.norm2.bias", - "model.diffusion_model.output_blocks.11.0.out_layers.0.weight": "blocks.65.norm2.weight", - "model.diffusion_model.output_blocks.11.0.out_layers.3.bias": "blocks.65.conv2.bias", - "model.diffusion_model.output_blocks.11.0.out_layers.3.weight": "blocks.65.conv2.weight", - "model.diffusion_model.output_blocks.11.0.skip_connection.bias": "blocks.65.conv_shortcut.bias", - "model.diffusion_model.output_blocks.11.0.skip_connection.weight": "blocks.65.conv_shortcut.weight", - "model.diffusion_model.output_blocks.11.1.norm.bias": "blocks.66.norm.bias", - "model.diffusion_model.output_blocks.11.1.norm.weight": "blocks.66.norm.weight", - "model.diffusion_model.output_blocks.11.1.proj_in.bias": "blocks.66.proj_in.bias", - "model.diffusion_model.output_blocks.11.1.proj_in.weight": "blocks.66.proj_in.weight", - "model.diffusion_model.output_blocks.11.1.proj_out.bias": "blocks.66.proj_out.bias", - "model.diffusion_model.output_blocks.11.1.proj_out.weight": "blocks.66.proj_out.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn1.to_k.weight": "blocks.66.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.66.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.66.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn1.to_q.weight": "blocks.66.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn1.to_v.weight": "blocks.66.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn2.to_k.weight": "blocks.66.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.66.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.66.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn2.to_q.weight": "blocks.66.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn2.to_v.weight": "blocks.66.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.66.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.66.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.ff.net.2.bias": "blocks.66.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.ff.net.2.weight": "blocks.66.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.norm1.bias": "blocks.66.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.norm1.weight": "blocks.66.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.norm2.bias": "blocks.66.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.norm2.weight": "blocks.66.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.norm3.bias": "blocks.66.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.norm3.weight": "blocks.66.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.2.0.emb_layers.1.bias": "blocks.36.time_emb_proj.bias", - "model.diffusion_model.output_blocks.2.0.emb_layers.1.weight": "blocks.36.time_emb_proj.weight", - "model.diffusion_model.output_blocks.2.0.in_layers.0.bias": "blocks.36.norm1.bias", - "model.diffusion_model.output_blocks.2.0.in_layers.0.weight": "blocks.36.norm1.weight", - "model.diffusion_model.output_blocks.2.0.in_layers.2.bias": "blocks.36.conv1.bias", - "model.diffusion_model.output_blocks.2.0.in_layers.2.weight": "blocks.36.conv1.weight", - "model.diffusion_model.output_blocks.2.0.out_layers.0.bias": "blocks.36.norm2.bias", - "model.diffusion_model.output_blocks.2.0.out_layers.0.weight": "blocks.36.norm2.weight", - "model.diffusion_model.output_blocks.2.0.out_layers.3.bias": "blocks.36.conv2.bias", - "model.diffusion_model.output_blocks.2.0.out_layers.3.weight": "blocks.36.conv2.weight", - "model.diffusion_model.output_blocks.2.0.skip_connection.bias": "blocks.36.conv_shortcut.bias", - "model.diffusion_model.output_blocks.2.0.skip_connection.weight": "blocks.36.conv_shortcut.weight", - "model.diffusion_model.output_blocks.2.1.conv.bias": "blocks.37.conv.bias", - "model.diffusion_model.output_blocks.2.1.conv.weight": "blocks.37.conv.weight", - "model.diffusion_model.output_blocks.3.0.emb_layers.1.bias": "blocks.39.time_emb_proj.bias", - "model.diffusion_model.output_blocks.3.0.emb_layers.1.weight": "blocks.39.time_emb_proj.weight", - "model.diffusion_model.output_blocks.3.0.in_layers.0.bias": "blocks.39.norm1.bias", - "model.diffusion_model.output_blocks.3.0.in_layers.0.weight": "blocks.39.norm1.weight", - "model.diffusion_model.output_blocks.3.0.in_layers.2.bias": "blocks.39.conv1.bias", - "model.diffusion_model.output_blocks.3.0.in_layers.2.weight": "blocks.39.conv1.weight", - "model.diffusion_model.output_blocks.3.0.out_layers.0.bias": "blocks.39.norm2.bias", - "model.diffusion_model.output_blocks.3.0.out_layers.0.weight": "blocks.39.norm2.weight", - "model.diffusion_model.output_blocks.3.0.out_layers.3.bias": "blocks.39.conv2.bias", - "model.diffusion_model.output_blocks.3.0.out_layers.3.weight": "blocks.39.conv2.weight", - "model.diffusion_model.output_blocks.3.0.skip_connection.bias": "blocks.39.conv_shortcut.bias", - "model.diffusion_model.output_blocks.3.0.skip_connection.weight": "blocks.39.conv_shortcut.weight", - "model.diffusion_model.output_blocks.3.1.norm.bias": "blocks.40.norm.bias", - "model.diffusion_model.output_blocks.3.1.norm.weight": "blocks.40.norm.weight", - "model.diffusion_model.output_blocks.3.1.proj_in.bias": "blocks.40.proj_in.bias", - "model.diffusion_model.output_blocks.3.1.proj_in.weight": "blocks.40.proj_in.weight", - "model.diffusion_model.output_blocks.3.1.proj_out.bias": "blocks.40.proj_out.bias", - "model.diffusion_model.output_blocks.3.1.proj_out.weight": "blocks.40.proj_out.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn1.to_k.weight": "blocks.40.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.40.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.40.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn1.to_q.weight": "blocks.40.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn1.to_v.weight": "blocks.40.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn2.to_k.weight": "blocks.40.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.40.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.40.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn2.to_q.weight": "blocks.40.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn2.to_v.weight": "blocks.40.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.40.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.40.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.ff.net.2.bias": "blocks.40.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.ff.net.2.weight": "blocks.40.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.norm1.bias": "blocks.40.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.norm1.weight": "blocks.40.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.norm2.bias": "blocks.40.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.norm2.weight": "blocks.40.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.norm3.bias": "blocks.40.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.norm3.weight": "blocks.40.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.4.0.emb_layers.1.bias": "blocks.42.time_emb_proj.bias", - "model.diffusion_model.output_blocks.4.0.emb_layers.1.weight": "blocks.42.time_emb_proj.weight", - "model.diffusion_model.output_blocks.4.0.in_layers.0.bias": "blocks.42.norm1.bias", - "model.diffusion_model.output_blocks.4.0.in_layers.0.weight": "blocks.42.norm1.weight", - "model.diffusion_model.output_blocks.4.0.in_layers.2.bias": "blocks.42.conv1.bias", - "model.diffusion_model.output_blocks.4.0.in_layers.2.weight": "blocks.42.conv1.weight", - "model.diffusion_model.output_blocks.4.0.out_layers.0.bias": "blocks.42.norm2.bias", - "model.diffusion_model.output_blocks.4.0.out_layers.0.weight": "blocks.42.norm2.weight", - "model.diffusion_model.output_blocks.4.0.out_layers.3.bias": "blocks.42.conv2.bias", - "model.diffusion_model.output_blocks.4.0.out_layers.3.weight": "blocks.42.conv2.weight", - "model.diffusion_model.output_blocks.4.0.skip_connection.bias": "blocks.42.conv_shortcut.bias", - "model.diffusion_model.output_blocks.4.0.skip_connection.weight": "blocks.42.conv_shortcut.weight", - "model.diffusion_model.output_blocks.4.1.norm.bias": "blocks.43.norm.bias", - "model.diffusion_model.output_blocks.4.1.norm.weight": "blocks.43.norm.weight", - "model.diffusion_model.output_blocks.4.1.proj_in.bias": "blocks.43.proj_in.bias", - "model.diffusion_model.output_blocks.4.1.proj_in.weight": "blocks.43.proj_in.weight", - "model.diffusion_model.output_blocks.4.1.proj_out.bias": "blocks.43.proj_out.bias", - "model.diffusion_model.output_blocks.4.1.proj_out.weight": "blocks.43.proj_out.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn1.to_k.weight": "blocks.43.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.43.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.43.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn1.to_q.weight": "blocks.43.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn1.to_v.weight": "blocks.43.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn2.to_k.weight": "blocks.43.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.43.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.43.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn2.to_q.weight": "blocks.43.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn2.to_v.weight": "blocks.43.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.43.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.43.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.ff.net.2.bias": "blocks.43.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.ff.net.2.weight": "blocks.43.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.norm1.bias": "blocks.43.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.norm1.weight": "blocks.43.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.norm2.bias": "blocks.43.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.norm2.weight": "blocks.43.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.norm3.bias": "blocks.43.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.norm3.weight": "blocks.43.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.5.0.emb_layers.1.bias": "blocks.45.time_emb_proj.bias", - "model.diffusion_model.output_blocks.5.0.emb_layers.1.weight": "blocks.45.time_emb_proj.weight", - "model.diffusion_model.output_blocks.5.0.in_layers.0.bias": "blocks.45.norm1.bias", - "model.diffusion_model.output_blocks.5.0.in_layers.0.weight": "blocks.45.norm1.weight", - "model.diffusion_model.output_blocks.5.0.in_layers.2.bias": "blocks.45.conv1.bias", - "model.diffusion_model.output_blocks.5.0.in_layers.2.weight": "blocks.45.conv1.weight", - "model.diffusion_model.output_blocks.5.0.out_layers.0.bias": "blocks.45.norm2.bias", - "model.diffusion_model.output_blocks.5.0.out_layers.0.weight": "blocks.45.norm2.weight", - "model.diffusion_model.output_blocks.5.0.out_layers.3.bias": "blocks.45.conv2.bias", - "model.diffusion_model.output_blocks.5.0.out_layers.3.weight": "blocks.45.conv2.weight", - "model.diffusion_model.output_blocks.5.0.skip_connection.bias": "blocks.45.conv_shortcut.bias", - "model.diffusion_model.output_blocks.5.0.skip_connection.weight": "blocks.45.conv_shortcut.weight", - "model.diffusion_model.output_blocks.5.1.norm.bias": "blocks.46.norm.bias", - "model.diffusion_model.output_blocks.5.1.norm.weight": "blocks.46.norm.weight", - "model.diffusion_model.output_blocks.5.1.proj_in.bias": "blocks.46.proj_in.bias", - "model.diffusion_model.output_blocks.5.1.proj_in.weight": "blocks.46.proj_in.weight", - "model.diffusion_model.output_blocks.5.1.proj_out.bias": "blocks.46.proj_out.bias", - "model.diffusion_model.output_blocks.5.1.proj_out.weight": "blocks.46.proj_out.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn1.to_k.weight": "blocks.46.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.46.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.46.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn1.to_q.weight": "blocks.46.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn1.to_v.weight": "blocks.46.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn2.to_k.weight": "blocks.46.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.46.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.46.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn2.to_q.weight": "blocks.46.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn2.to_v.weight": "blocks.46.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.46.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.46.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.ff.net.2.bias": "blocks.46.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.ff.net.2.weight": "blocks.46.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.norm1.bias": "blocks.46.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.norm1.weight": "blocks.46.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.norm2.bias": "blocks.46.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.norm2.weight": "blocks.46.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.norm3.bias": "blocks.46.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.norm3.weight": "blocks.46.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.5.2.conv.bias": "blocks.47.conv.bias", - "model.diffusion_model.output_blocks.5.2.conv.weight": "blocks.47.conv.weight", - "model.diffusion_model.output_blocks.6.0.emb_layers.1.bias": "blocks.49.time_emb_proj.bias", - "model.diffusion_model.output_blocks.6.0.emb_layers.1.weight": "blocks.49.time_emb_proj.weight", - "model.diffusion_model.output_blocks.6.0.in_layers.0.bias": "blocks.49.norm1.bias", - "model.diffusion_model.output_blocks.6.0.in_layers.0.weight": "blocks.49.norm1.weight", - "model.diffusion_model.output_blocks.6.0.in_layers.2.bias": "blocks.49.conv1.bias", - "model.diffusion_model.output_blocks.6.0.in_layers.2.weight": "blocks.49.conv1.weight", - "model.diffusion_model.output_blocks.6.0.out_layers.0.bias": "blocks.49.norm2.bias", - "model.diffusion_model.output_blocks.6.0.out_layers.0.weight": "blocks.49.norm2.weight", - "model.diffusion_model.output_blocks.6.0.out_layers.3.bias": "blocks.49.conv2.bias", - "model.diffusion_model.output_blocks.6.0.out_layers.3.weight": "blocks.49.conv2.weight", - "model.diffusion_model.output_blocks.6.0.skip_connection.bias": "blocks.49.conv_shortcut.bias", - "model.diffusion_model.output_blocks.6.0.skip_connection.weight": "blocks.49.conv_shortcut.weight", - "model.diffusion_model.output_blocks.6.1.norm.bias": "blocks.50.norm.bias", - "model.diffusion_model.output_blocks.6.1.norm.weight": "blocks.50.norm.weight", - "model.diffusion_model.output_blocks.6.1.proj_in.bias": "blocks.50.proj_in.bias", - "model.diffusion_model.output_blocks.6.1.proj_in.weight": "blocks.50.proj_in.weight", - "model.diffusion_model.output_blocks.6.1.proj_out.bias": "blocks.50.proj_out.bias", - "model.diffusion_model.output_blocks.6.1.proj_out.weight": "blocks.50.proj_out.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn1.to_k.weight": "blocks.50.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.50.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.50.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn1.to_q.weight": "blocks.50.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn1.to_v.weight": "blocks.50.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn2.to_k.weight": "blocks.50.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.50.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.50.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn2.to_q.weight": "blocks.50.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn2.to_v.weight": "blocks.50.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.50.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.50.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.ff.net.2.bias": "blocks.50.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.ff.net.2.weight": "blocks.50.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.norm1.bias": "blocks.50.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.norm1.weight": "blocks.50.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.norm2.bias": "blocks.50.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.norm2.weight": "blocks.50.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.norm3.bias": "blocks.50.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.norm3.weight": "blocks.50.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.7.0.emb_layers.1.bias": "blocks.52.time_emb_proj.bias", - "model.diffusion_model.output_blocks.7.0.emb_layers.1.weight": "blocks.52.time_emb_proj.weight", - "model.diffusion_model.output_blocks.7.0.in_layers.0.bias": "blocks.52.norm1.bias", - "model.diffusion_model.output_blocks.7.0.in_layers.0.weight": "blocks.52.norm1.weight", - "model.diffusion_model.output_blocks.7.0.in_layers.2.bias": "blocks.52.conv1.bias", - "model.diffusion_model.output_blocks.7.0.in_layers.2.weight": "blocks.52.conv1.weight", - "model.diffusion_model.output_blocks.7.0.out_layers.0.bias": "blocks.52.norm2.bias", - "model.diffusion_model.output_blocks.7.0.out_layers.0.weight": "blocks.52.norm2.weight", - "model.diffusion_model.output_blocks.7.0.out_layers.3.bias": "blocks.52.conv2.bias", - "model.diffusion_model.output_blocks.7.0.out_layers.3.weight": "blocks.52.conv2.weight", - "model.diffusion_model.output_blocks.7.0.skip_connection.bias": "blocks.52.conv_shortcut.bias", - "model.diffusion_model.output_blocks.7.0.skip_connection.weight": "blocks.52.conv_shortcut.weight", - "model.diffusion_model.output_blocks.7.1.norm.bias": "blocks.53.norm.bias", - "model.diffusion_model.output_blocks.7.1.norm.weight": "blocks.53.norm.weight", - "model.diffusion_model.output_blocks.7.1.proj_in.bias": "blocks.53.proj_in.bias", - "model.diffusion_model.output_blocks.7.1.proj_in.weight": "blocks.53.proj_in.weight", - "model.diffusion_model.output_blocks.7.1.proj_out.bias": "blocks.53.proj_out.bias", - "model.diffusion_model.output_blocks.7.1.proj_out.weight": "blocks.53.proj_out.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn1.to_k.weight": "blocks.53.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.53.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.53.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn1.to_q.weight": "blocks.53.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn1.to_v.weight": "blocks.53.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn2.to_k.weight": "blocks.53.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.53.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.53.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn2.to_q.weight": "blocks.53.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn2.to_v.weight": "blocks.53.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.53.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.53.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.ff.net.2.bias": "blocks.53.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.ff.net.2.weight": "blocks.53.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.norm1.bias": "blocks.53.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.norm1.weight": "blocks.53.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.norm2.bias": "blocks.53.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.norm2.weight": "blocks.53.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.norm3.bias": "blocks.53.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.norm3.weight": "blocks.53.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.8.0.emb_layers.1.bias": "blocks.55.time_emb_proj.bias", - "model.diffusion_model.output_blocks.8.0.emb_layers.1.weight": "blocks.55.time_emb_proj.weight", - "model.diffusion_model.output_blocks.8.0.in_layers.0.bias": "blocks.55.norm1.bias", - "model.diffusion_model.output_blocks.8.0.in_layers.0.weight": "blocks.55.norm1.weight", - "model.diffusion_model.output_blocks.8.0.in_layers.2.bias": "blocks.55.conv1.bias", - "model.diffusion_model.output_blocks.8.0.in_layers.2.weight": "blocks.55.conv1.weight", - "model.diffusion_model.output_blocks.8.0.out_layers.0.bias": "blocks.55.norm2.bias", - "model.diffusion_model.output_blocks.8.0.out_layers.0.weight": "blocks.55.norm2.weight", - "model.diffusion_model.output_blocks.8.0.out_layers.3.bias": "blocks.55.conv2.bias", - "model.diffusion_model.output_blocks.8.0.out_layers.3.weight": "blocks.55.conv2.weight", - "model.diffusion_model.output_blocks.8.0.skip_connection.bias": "blocks.55.conv_shortcut.bias", - "model.diffusion_model.output_blocks.8.0.skip_connection.weight": "blocks.55.conv_shortcut.weight", - "model.diffusion_model.output_blocks.8.1.norm.bias": "blocks.56.norm.bias", - "model.diffusion_model.output_blocks.8.1.norm.weight": "blocks.56.norm.weight", - "model.diffusion_model.output_blocks.8.1.proj_in.bias": "blocks.56.proj_in.bias", - "model.diffusion_model.output_blocks.8.1.proj_in.weight": "blocks.56.proj_in.weight", - "model.diffusion_model.output_blocks.8.1.proj_out.bias": "blocks.56.proj_out.bias", - "model.diffusion_model.output_blocks.8.1.proj_out.weight": "blocks.56.proj_out.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn1.to_k.weight": "blocks.56.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.56.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.56.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn1.to_q.weight": "blocks.56.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn1.to_v.weight": "blocks.56.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn2.to_k.weight": "blocks.56.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.56.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.56.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn2.to_q.weight": "blocks.56.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn2.to_v.weight": "blocks.56.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.56.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.56.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.ff.net.2.bias": "blocks.56.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.ff.net.2.weight": "blocks.56.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.norm1.bias": "blocks.56.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.norm1.weight": "blocks.56.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.norm2.bias": "blocks.56.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.norm2.weight": "blocks.56.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.norm3.bias": "blocks.56.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.norm3.weight": "blocks.56.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.8.2.conv.bias": "blocks.57.conv.bias", - "model.diffusion_model.output_blocks.8.2.conv.weight": "blocks.57.conv.weight", - "model.diffusion_model.output_blocks.9.0.emb_layers.1.bias": "blocks.59.time_emb_proj.bias", - "model.diffusion_model.output_blocks.9.0.emb_layers.1.weight": "blocks.59.time_emb_proj.weight", - "model.diffusion_model.output_blocks.9.0.in_layers.0.bias": "blocks.59.norm1.bias", - "model.diffusion_model.output_blocks.9.0.in_layers.0.weight": "blocks.59.norm1.weight", - "model.diffusion_model.output_blocks.9.0.in_layers.2.bias": "blocks.59.conv1.bias", - "model.diffusion_model.output_blocks.9.0.in_layers.2.weight": "blocks.59.conv1.weight", - "model.diffusion_model.output_blocks.9.0.out_layers.0.bias": "blocks.59.norm2.bias", - "model.diffusion_model.output_blocks.9.0.out_layers.0.weight": "blocks.59.norm2.weight", - "model.diffusion_model.output_blocks.9.0.out_layers.3.bias": "blocks.59.conv2.bias", - "model.diffusion_model.output_blocks.9.0.out_layers.3.weight": "blocks.59.conv2.weight", - "model.diffusion_model.output_blocks.9.0.skip_connection.bias": "blocks.59.conv_shortcut.bias", - "model.diffusion_model.output_blocks.9.0.skip_connection.weight": "blocks.59.conv_shortcut.weight", - "model.diffusion_model.output_blocks.9.1.norm.bias": "blocks.60.norm.bias", - "model.diffusion_model.output_blocks.9.1.norm.weight": "blocks.60.norm.weight", - "model.diffusion_model.output_blocks.9.1.proj_in.bias": "blocks.60.proj_in.bias", - "model.diffusion_model.output_blocks.9.1.proj_in.weight": "blocks.60.proj_in.weight", - "model.diffusion_model.output_blocks.9.1.proj_out.bias": "blocks.60.proj_out.bias", - "model.diffusion_model.output_blocks.9.1.proj_out.weight": "blocks.60.proj_out.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn1.to_k.weight": "blocks.60.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.60.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.60.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn1.to_q.weight": "blocks.60.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn1.to_v.weight": "blocks.60.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn2.to_k.weight": "blocks.60.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.60.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.60.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn2.to_q.weight": "blocks.60.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn2.to_v.weight": "blocks.60.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.60.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.60.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.ff.net.2.bias": "blocks.60.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.ff.net.2.weight": "blocks.60.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.norm1.bias": "blocks.60.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.norm1.weight": "blocks.60.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.norm2.bias": "blocks.60.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.norm2.weight": "blocks.60.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.norm3.bias": "blocks.60.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.norm3.weight": "blocks.60.transformer_blocks.0.norm3.weight", - "model.diffusion_model.time_embed.0.bias": "time_embedding.0.bias", - "model.diffusion_model.time_embed.0.weight": "time_embedding.0.weight", - "model.diffusion_model.time_embed.2.bias": "time_embedding.2.bias", - "model.diffusion_model.time_embed.2.weight": "time_embedding.2.weight", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if ".proj_in." in name or ".proj_out." in name: - param = param.squeeze() - state_dict_[rename_dict[name]] = param - return state_dict_ \ No newline at end of file diff --git a/diffsynth/models/sd_vae_decoder.py b/diffsynth/models/sd_vae_decoder.py deleted file mode 100644 index 93f015a..0000000 --- a/diffsynth/models/sd_vae_decoder.py +++ /dev/null @@ -1,336 +0,0 @@ -import torch -from .attention import Attention -from .sd_unet import ResnetBlock, UpSampler -from .tiler import TileWorker - - -class VAEAttentionBlock(torch.nn.Module): - - def __init__(self, num_attention_heads, attention_head_dim, in_channels, num_layers=1, norm_num_groups=32, eps=1e-5): - super().__init__() - inner_dim = num_attention_heads * attention_head_dim - - self.norm = torch.nn.GroupNorm(num_groups=norm_num_groups, num_channels=in_channels, eps=eps, affine=True) - - self.transformer_blocks = torch.nn.ModuleList([ - Attention( - inner_dim, - num_attention_heads, - attention_head_dim, - bias_q=True, - bias_kv=True, - bias_out=True - ) - for d in range(num_layers) - ]) - - def forward(self, hidden_states, time_emb, text_emb, res_stack): - batch, _, height, width = hidden_states.shape - residual = hidden_states - - hidden_states = self.norm(hidden_states) - inner_dim = hidden_states.shape[1] - hidden_states = hidden_states.permute(0, 2, 3, 1).reshape(batch, height * width, inner_dim) - - for block in self.transformer_blocks: - hidden_states = block(hidden_states) - - hidden_states = hidden_states.reshape(batch, height, width, inner_dim).permute(0, 3, 1, 2).contiguous() - hidden_states = hidden_states + residual - - return hidden_states, time_emb, text_emb, res_stack - - -class SDVAEDecoder(torch.nn.Module): - def __init__(self): - super().__init__() - self.scaling_factor = 0.18215 - self.post_quant_conv = torch.nn.Conv2d(4, 4, kernel_size=1) - self.conv_in = torch.nn.Conv2d(4, 512, kernel_size=3, padding=1) - - self.blocks = torch.nn.ModuleList([ - # UNetMidBlock2D - ResnetBlock(512, 512, eps=1e-6), - VAEAttentionBlock(1, 512, 512, 1, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - # UpDecoderBlock2D - ResnetBlock(512, 512, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - UpSampler(512), - # UpDecoderBlock2D - ResnetBlock(512, 512, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - UpSampler(512), - # UpDecoderBlock2D - ResnetBlock(512, 256, eps=1e-6), - ResnetBlock(256, 256, eps=1e-6), - ResnetBlock(256, 256, eps=1e-6), - UpSampler(256), - # UpDecoderBlock2D - ResnetBlock(256, 128, eps=1e-6), - ResnetBlock(128, 128, eps=1e-6), - ResnetBlock(128, 128, eps=1e-6), - ]) - - self.conv_norm_out = torch.nn.GroupNorm(num_channels=128, num_groups=32, eps=1e-5) - self.conv_act = torch.nn.SiLU() - self.conv_out = torch.nn.Conv2d(128, 3, kernel_size=3, padding=1) - - def tiled_forward(self, sample, tile_size=64, tile_stride=32): - hidden_states = TileWorker().tiled_forward( - lambda x: self.forward(x), - sample, - tile_size, - tile_stride, - tile_device=sample.device, - tile_dtype=sample.dtype - ) - return hidden_states - - def forward(self, sample, tiled=False, tile_size=64, tile_stride=32, **kwargs): - original_dtype = sample.dtype - sample = sample.to(dtype=next(iter(self.parameters())).dtype) - # For VAE Decoder, we do not need to apply the tiler on each layer. - if tiled: - return self.tiled_forward(sample, tile_size=tile_size, tile_stride=tile_stride) - - # 1. pre-process - sample = sample / self.scaling_factor - hidden_states = self.post_quant_conv(sample) - hidden_states = self.conv_in(hidden_states) - time_emb = None - text_emb = None - res_stack = None - - # 2. blocks - for i, block in enumerate(self.blocks): - hidden_states, time_emb, text_emb, res_stack = block(hidden_states, time_emb, text_emb, res_stack) - - # 3. output - hidden_states = self.conv_norm_out(hidden_states) - hidden_states = self.conv_act(hidden_states) - hidden_states = self.conv_out(hidden_states) - hidden_states = hidden_states.to(original_dtype) - - return hidden_states - - @staticmethod - def state_dict_converter(): - return SDVAEDecoderStateDictConverter() - - -class SDVAEDecoderStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - # architecture - block_types = [ - 'ResnetBlock', 'VAEAttentionBlock', 'ResnetBlock', - 'ResnetBlock', 'ResnetBlock', 'ResnetBlock', 'UpSampler', - 'ResnetBlock', 'ResnetBlock', 'ResnetBlock', 'UpSampler', - 'ResnetBlock', 'ResnetBlock', 'ResnetBlock', 'UpSampler', - 'ResnetBlock', 'ResnetBlock', 'ResnetBlock' - ] - - # Rename each parameter - local_rename_dict = { - "post_quant_conv": "post_quant_conv", - "decoder.conv_in": "conv_in", - "decoder.mid_block.attentions.0.group_norm": "blocks.1.norm", - "decoder.mid_block.attentions.0.to_q": "blocks.1.transformer_blocks.0.to_q", - "decoder.mid_block.attentions.0.to_k": "blocks.1.transformer_blocks.0.to_k", - "decoder.mid_block.attentions.0.to_v": "blocks.1.transformer_blocks.0.to_v", - "decoder.mid_block.attentions.0.to_out.0": "blocks.1.transformer_blocks.0.to_out", - "decoder.mid_block.resnets.0.norm1": "blocks.0.norm1", - "decoder.mid_block.resnets.0.conv1": "blocks.0.conv1", - "decoder.mid_block.resnets.0.norm2": "blocks.0.norm2", - "decoder.mid_block.resnets.0.conv2": "blocks.0.conv2", - "decoder.mid_block.resnets.1.norm1": "blocks.2.norm1", - "decoder.mid_block.resnets.1.conv1": "blocks.2.conv1", - "decoder.mid_block.resnets.1.norm2": "blocks.2.norm2", - "decoder.mid_block.resnets.1.conv2": "blocks.2.conv2", - "decoder.conv_norm_out": "conv_norm_out", - "decoder.conv_out": "conv_out", - } - name_list = sorted([name for name in state_dict]) - rename_dict = {} - block_id = {"ResnetBlock": 2, "DownSampler": 2, "UpSampler": 2} - last_block_type_with_id = {"ResnetBlock": "", "DownSampler": "", "UpSampler": ""} - for name in name_list: - names = name.split(".") - name_prefix = ".".join(names[:-1]) - if name_prefix in local_rename_dict: - rename_dict[name] = local_rename_dict[name_prefix] + "." + names[-1] - elif name.startswith("decoder.up_blocks"): - block_type = {"resnets": "ResnetBlock", "downsamplers": "DownSampler", "upsamplers": "UpSampler"}[names[3]] - block_type_with_id = ".".join(names[:5]) - if block_type_with_id != last_block_type_with_id[block_type]: - block_id[block_type] += 1 - last_block_type_with_id[block_type] = block_type_with_id - while block_id[block_type] < len(block_types) and block_types[block_id[block_type]] != block_type: - block_id[block_type] += 1 - block_type_with_id = ".".join(names[:5]) - names = ["blocks", str(block_id[block_type])] + names[5:] - rename_dict[name] = ".".join(names) - - # Convert state_dict - state_dict_ = {} - for name, param in state_dict.items(): - if name in rename_dict: - state_dict_[rename_dict[name]] = param - return state_dict_ - - def from_civitai(self, state_dict): - rename_dict = { - "first_stage_model.decoder.conv_in.bias": "conv_in.bias", - "first_stage_model.decoder.conv_in.weight": "conv_in.weight", - "first_stage_model.decoder.conv_out.bias": "conv_out.bias", - "first_stage_model.decoder.conv_out.weight": "conv_out.weight", - "first_stage_model.decoder.mid.attn_1.k.bias": "blocks.1.transformer_blocks.0.to_k.bias", - "first_stage_model.decoder.mid.attn_1.k.weight": "blocks.1.transformer_blocks.0.to_k.weight", - "first_stage_model.decoder.mid.attn_1.norm.bias": "blocks.1.norm.bias", - "first_stage_model.decoder.mid.attn_1.norm.weight": "blocks.1.norm.weight", - "first_stage_model.decoder.mid.attn_1.proj_out.bias": "blocks.1.transformer_blocks.0.to_out.bias", - "first_stage_model.decoder.mid.attn_1.proj_out.weight": "blocks.1.transformer_blocks.0.to_out.weight", - "first_stage_model.decoder.mid.attn_1.q.bias": "blocks.1.transformer_blocks.0.to_q.bias", - "first_stage_model.decoder.mid.attn_1.q.weight": "blocks.1.transformer_blocks.0.to_q.weight", - "first_stage_model.decoder.mid.attn_1.v.bias": "blocks.1.transformer_blocks.0.to_v.bias", - "first_stage_model.decoder.mid.attn_1.v.weight": "blocks.1.transformer_blocks.0.to_v.weight", - "first_stage_model.decoder.mid.block_1.conv1.bias": "blocks.0.conv1.bias", - "first_stage_model.decoder.mid.block_1.conv1.weight": "blocks.0.conv1.weight", - "first_stage_model.decoder.mid.block_1.conv2.bias": "blocks.0.conv2.bias", - "first_stage_model.decoder.mid.block_1.conv2.weight": "blocks.0.conv2.weight", - "first_stage_model.decoder.mid.block_1.norm1.bias": "blocks.0.norm1.bias", - "first_stage_model.decoder.mid.block_1.norm1.weight": "blocks.0.norm1.weight", - "first_stage_model.decoder.mid.block_1.norm2.bias": "blocks.0.norm2.bias", - "first_stage_model.decoder.mid.block_1.norm2.weight": "blocks.0.norm2.weight", - "first_stage_model.decoder.mid.block_2.conv1.bias": "blocks.2.conv1.bias", - "first_stage_model.decoder.mid.block_2.conv1.weight": "blocks.2.conv1.weight", - "first_stage_model.decoder.mid.block_2.conv2.bias": "blocks.2.conv2.bias", - "first_stage_model.decoder.mid.block_2.conv2.weight": "blocks.2.conv2.weight", - "first_stage_model.decoder.mid.block_2.norm1.bias": "blocks.2.norm1.bias", - "first_stage_model.decoder.mid.block_2.norm1.weight": "blocks.2.norm1.weight", - "first_stage_model.decoder.mid.block_2.norm2.bias": "blocks.2.norm2.bias", - "first_stage_model.decoder.mid.block_2.norm2.weight": "blocks.2.norm2.weight", - "first_stage_model.decoder.norm_out.bias": "conv_norm_out.bias", - "first_stage_model.decoder.norm_out.weight": "conv_norm_out.weight", - "first_stage_model.decoder.up.0.block.0.conv1.bias": "blocks.15.conv1.bias", - "first_stage_model.decoder.up.0.block.0.conv1.weight": "blocks.15.conv1.weight", - "first_stage_model.decoder.up.0.block.0.conv2.bias": "blocks.15.conv2.bias", - "first_stage_model.decoder.up.0.block.0.conv2.weight": "blocks.15.conv2.weight", - "first_stage_model.decoder.up.0.block.0.nin_shortcut.bias": "blocks.15.conv_shortcut.bias", - "first_stage_model.decoder.up.0.block.0.nin_shortcut.weight": "blocks.15.conv_shortcut.weight", - "first_stage_model.decoder.up.0.block.0.norm1.bias": "blocks.15.norm1.bias", - "first_stage_model.decoder.up.0.block.0.norm1.weight": "blocks.15.norm1.weight", - "first_stage_model.decoder.up.0.block.0.norm2.bias": "blocks.15.norm2.bias", - "first_stage_model.decoder.up.0.block.0.norm2.weight": "blocks.15.norm2.weight", - "first_stage_model.decoder.up.0.block.1.conv1.bias": "blocks.16.conv1.bias", - "first_stage_model.decoder.up.0.block.1.conv1.weight": "blocks.16.conv1.weight", - "first_stage_model.decoder.up.0.block.1.conv2.bias": "blocks.16.conv2.bias", - "first_stage_model.decoder.up.0.block.1.conv2.weight": "blocks.16.conv2.weight", - "first_stage_model.decoder.up.0.block.1.norm1.bias": "blocks.16.norm1.bias", - "first_stage_model.decoder.up.0.block.1.norm1.weight": "blocks.16.norm1.weight", - "first_stage_model.decoder.up.0.block.1.norm2.bias": "blocks.16.norm2.bias", - "first_stage_model.decoder.up.0.block.1.norm2.weight": "blocks.16.norm2.weight", - "first_stage_model.decoder.up.0.block.2.conv1.bias": "blocks.17.conv1.bias", - "first_stage_model.decoder.up.0.block.2.conv1.weight": "blocks.17.conv1.weight", - "first_stage_model.decoder.up.0.block.2.conv2.bias": "blocks.17.conv2.bias", - "first_stage_model.decoder.up.0.block.2.conv2.weight": "blocks.17.conv2.weight", - "first_stage_model.decoder.up.0.block.2.norm1.bias": "blocks.17.norm1.bias", - "first_stage_model.decoder.up.0.block.2.norm1.weight": "blocks.17.norm1.weight", - "first_stage_model.decoder.up.0.block.2.norm2.bias": "blocks.17.norm2.bias", - "first_stage_model.decoder.up.0.block.2.norm2.weight": "blocks.17.norm2.weight", - "first_stage_model.decoder.up.1.block.0.conv1.bias": "blocks.11.conv1.bias", - "first_stage_model.decoder.up.1.block.0.conv1.weight": "blocks.11.conv1.weight", - "first_stage_model.decoder.up.1.block.0.conv2.bias": "blocks.11.conv2.bias", - "first_stage_model.decoder.up.1.block.0.conv2.weight": "blocks.11.conv2.weight", - "first_stage_model.decoder.up.1.block.0.nin_shortcut.bias": "blocks.11.conv_shortcut.bias", - "first_stage_model.decoder.up.1.block.0.nin_shortcut.weight": "blocks.11.conv_shortcut.weight", - "first_stage_model.decoder.up.1.block.0.norm1.bias": "blocks.11.norm1.bias", - "first_stage_model.decoder.up.1.block.0.norm1.weight": "blocks.11.norm1.weight", - "first_stage_model.decoder.up.1.block.0.norm2.bias": "blocks.11.norm2.bias", - "first_stage_model.decoder.up.1.block.0.norm2.weight": "blocks.11.norm2.weight", - "first_stage_model.decoder.up.1.block.1.conv1.bias": "blocks.12.conv1.bias", - "first_stage_model.decoder.up.1.block.1.conv1.weight": "blocks.12.conv1.weight", - "first_stage_model.decoder.up.1.block.1.conv2.bias": "blocks.12.conv2.bias", - "first_stage_model.decoder.up.1.block.1.conv2.weight": "blocks.12.conv2.weight", - "first_stage_model.decoder.up.1.block.1.norm1.bias": "blocks.12.norm1.bias", - "first_stage_model.decoder.up.1.block.1.norm1.weight": "blocks.12.norm1.weight", - "first_stage_model.decoder.up.1.block.1.norm2.bias": "blocks.12.norm2.bias", - "first_stage_model.decoder.up.1.block.1.norm2.weight": "blocks.12.norm2.weight", - "first_stage_model.decoder.up.1.block.2.conv1.bias": "blocks.13.conv1.bias", - "first_stage_model.decoder.up.1.block.2.conv1.weight": "blocks.13.conv1.weight", - "first_stage_model.decoder.up.1.block.2.conv2.bias": "blocks.13.conv2.bias", - "first_stage_model.decoder.up.1.block.2.conv2.weight": "blocks.13.conv2.weight", - "first_stage_model.decoder.up.1.block.2.norm1.bias": "blocks.13.norm1.bias", - "first_stage_model.decoder.up.1.block.2.norm1.weight": "blocks.13.norm1.weight", - "first_stage_model.decoder.up.1.block.2.norm2.bias": "blocks.13.norm2.bias", - "first_stage_model.decoder.up.1.block.2.norm2.weight": "blocks.13.norm2.weight", - "first_stage_model.decoder.up.1.upsample.conv.bias": "blocks.14.conv.bias", - "first_stage_model.decoder.up.1.upsample.conv.weight": "blocks.14.conv.weight", - "first_stage_model.decoder.up.2.block.0.conv1.bias": "blocks.7.conv1.bias", - "first_stage_model.decoder.up.2.block.0.conv1.weight": "blocks.7.conv1.weight", - "first_stage_model.decoder.up.2.block.0.conv2.bias": "blocks.7.conv2.bias", - "first_stage_model.decoder.up.2.block.0.conv2.weight": "blocks.7.conv2.weight", - "first_stage_model.decoder.up.2.block.0.norm1.bias": "blocks.7.norm1.bias", - "first_stage_model.decoder.up.2.block.0.norm1.weight": "blocks.7.norm1.weight", - "first_stage_model.decoder.up.2.block.0.norm2.bias": "blocks.7.norm2.bias", - "first_stage_model.decoder.up.2.block.0.norm2.weight": "blocks.7.norm2.weight", - "first_stage_model.decoder.up.2.block.1.conv1.bias": "blocks.8.conv1.bias", - "first_stage_model.decoder.up.2.block.1.conv1.weight": "blocks.8.conv1.weight", - "first_stage_model.decoder.up.2.block.1.conv2.bias": "blocks.8.conv2.bias", - "first_stage_model.decoder.up.2.block.1.conv2.weight": "blocks.8.conv2.weight", - "first_stage_model.decoder.up.2.block.1.norm1.bias": "blocks.8.norm1.bias", - "first_stage_model.decoder.up.2.block.1.norm1.weight": "blocks.8.norm1.weight", - "first_stage_model.decoder.up.2.block.1.norm2.bias": "blocks.8.norm2.bias", - "first_stage_model.decoder.up.2.block.1.norm2.weight": "blocks.8.norm2.weight", - "first_stage_model.decoder.up.2.block.2.conv1.bias": "blocks.9.conv1.bias", - "first_stage_model.decoder.up.2.block.2.conv1.weight": "blocks.9.conv1.weight", - "first_stage_model.decoder.up.2.block.2.conv2.bias": "blocks.9.conv2.bias", - "first_stage_model.decoder.up.2.block.2.conv2.weight": "blocks.9.conv2.weight", - "first_stage_model.decoder.up.2.block.2.norm1.bias": "blocks.9.norm1.bias", - "first_stage_model.decoder.up.2.block.2.norm1.weight": "blocks.9.norm1.weight", - "first_stage_model.decoder.up.2.block.2.norm2.bias": "blocks.9.norm2.bias", - "first_stage_model.decoder.up.2.block.2.norm2.weight": "blocks.9.norm2.weight", - "first_stage_model.decoder.up.2.upsample.conv.bias": "blocks.10.conv.bias", - "first_stage_model.decoder.up.2.upsample.conv.weight": "blocks.10.conv.weight", - "first_stage_model.decoder.up.3.block.0.conv1.bias": "blocks.3.conv1.bias", - "first_stage_model.decoder.up.3.block.0.conv1.weight": "blocks.3.conv1.weight", - "first_stage_model.decoder.up.3.block.0.conv2.bias": "blocks.3.conv2.bias", - "first_stage_model.decoder.up.3.block.0.conv2.weight": "blocks.3.conv2.weight", - "first_stage_model.decoder.up.3.block.0.norm1.bias": "blocks.3.norm1.bias", - "first_stage_model.decoder.up.3.block.0.norm1.weight": "blocks.3.norm1.weight", - "first_stage_model.decoder.up.3.block.0.norm2.bias": "blocks.3.norm2.bias", - "first_stage_model.decoder.up.3.block.0.norm2.weight": "blocks.3.norm2.weight", - "first_stage_model.decoder.up.3.block.1.conv1.bias": "blocks.4.conv1.bias", - "first_stage_model.decoder.up.3.block.1.conv1.weight": "blocks.4.conv1.weight", - "first_stage_model.decoder.up.3.block.1.conv2.bias": "blocks.4.conv2.bias", - "first_stage_model.decoder.up.3.block.1.conv2.weight": "blocks.4.conv2.weight", - "first_stage_model.decoder.up.3.block.1.norm1.bias": "blocks.4.norm1.bias", - "first_stage_model.decoder.up.3.block.1.norm1.weight": "blocks.4.norm1.weight", - "first_stage_model.decoder.up.3.block.1.norm2.bias": "blocks.4.norm2.bias", - "first_stage_model.decoder.up.3.block.1.norm2.weight": "blocks.4.norm2.weight", - "first_stage_model.decoder.up.3.block.2.conv1.bias": "blocks.5.conv1.bias", - "first_stage_model.decoder.up.3.block.2.conv1.weight": "blocks.5.conv1.weight", - "first_stage_model.decoder.up.3.block.2.conv2.bias": "blocks.5.conv2.bias", - "first_stage_model.decoder.up.3.block.2.conv2.weight": "blocks.5.conv2.weight", - "first_stage_model.decoder.up.3.block.2.norm1.bias": "blocks.5.norm1.bias", - "first_stage_model.decoder.up.3.block.2.norm1.weight": "blocks.5.norm1.weight", - "first_stage_model.decoder.up.3.block.2.norm2.bias": "blocks.5.norm2.bias", - "first_stage_model.decoder.up.3.block.2.norm2.weight": "blocks.5.norm2.weight", - "first_stage_model.decoder.up.3.upsample.conv.bias": "blocks.6.conv.bias", - "first_stage_model.decoder.up.3.upsample.conv.weight": "blocks.6.conv.weight", - "first_stage_model.post_quant_conv.bias": "post_quant_conv.bias", - "first_stage_model.post_quant_conv.weight": "post_quant_conv.weight", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if "transformer_blocks" in rename_dict[name]: - param = param.squeeze() - state_dict_[rename_dict[name]] = param - return state_dict_ diff --git a/diffsynth/models/sd_vae_encoder.py b/diffsynth/models/sd_vae_encoder.py deleted file mode 100644 index 60965c5..0000000 --- a/diffsynth/models/sd_vae_encoder.py +++ /dev/null @@ -1,282 +0,0 @@ -import torch -from .sd_unet import ResnetBlock, DownSampler -from .sd_vae_decoder import VAEAttentionBlock -from .tiler import TileWorker -from einops import rearrange - - -class SDVAEEncoder(torch.nn.Module): - def __init__(self): - super().__init__() - self.scaling_factor = 0.18215 - self.quant_conv = torch.nn.Conv2d(8, 8, kernel_size=1) - self.conv_in = torch.nn.Conv2d(3, 128, kernel_size=3, padding=1) - - self.blocks = torch.nn.ModuleList([ - # DownEncoderBlock2D - ResnetBlock(128, 128, eps=1e-6), - ResnetBlock(128, 128, eps=1e-6), - DownSampler(128, padding=0, extra_padding=True), - # DownEncoderBlock2D - ResnetBlock(128, 256, eps=1e-6), - ResnetBlock(256, 256, eps=1e-6), - DownSampler(256, padding=0, extra_padding=True), - # DownEncoderBlock2D - ResnetBlock(256, 512, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - DownSampler(512, padding=0, extra_padding=True), - # DownEncoderBlock2D - ResnetBlock(512, 512, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - # UNetMidBlock2D - ResnetBlock(512, 512, eps=1e-6), - VAEAttentionBlock(1, 512, 512, 1, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - ]) - - self.conv_norm_out = torch.nn.GroupNorm(num_channels=512, num_groups=32, eps=1e-6) - self.conv_act = torch.nn.SiLU() - self.conv_out = torch.nn.Conv2d(512, 8, kernel_size=3, padding=1) - - def tiled_forward(self, sample, tile_size=64, tile_stride=32): - hidden_states = TileWorker().tiled_forward( - lambda x: self.forward(x), - sample, - tile_size, - tile_stride, - tile_device=sample.device, - tile_dtype=sample.dtype - ) - return hidden_states - - def forward(self, sample, tiled=False, tile_size=64, tile_stride=32, **kwargs): - original_dtype = sample.dtype - sample = sample.to(dtype=next(iter(self.parameters())).dtype) - # For VAE Decoder, we do not need to apply the tiler on each layer. - if tiled: - return self.tiled_forward(sample, tile_size=tile_size, tile_stride=tile_stride) - - # 1. pre-process - hidden_states = self.conv_in(sample) - time_emb = None - text_emb = None - res_stack = None - - # 2. blocks - for i, block in enumerate(self.blocks): - hidden_states, time_emb, text_emb, res_stack = block(hidden_states, time_emb, text_emb, res_stack) - - # 3. output - hidden_states = self.conv_norm_out(hidden_states) - hidden_states = self.conv_act(hidden_states) - hidden_states = self.conv_out(hidden_states) - hidden_states = self.quant_conv(hidden_states) - hidden_states = hidden_states[:, :4] - hidden_states *= self.scaling_factor - hidden_states = hidden_states.to(original_dtype) - - return hidden_states - - def encode_video(self, sample, batch_size=8): - B = sample.shape[0] - hidden_states = [] - - for i in range(0, sample.shape[2], batch_size): - - j = min(i + batch_size, sample.shape[2]) - sample_batch = rearrange(sample[:,:,i:j], "B C T H W -> (B T) C H W") - - hidden_states_batch = self(sample_batch) - hidden_states_batch = rearrange(hidden_states_batch, "(B T) C H W -> B C T H W", B=B) - - hidden_states.append(hidden_states_batch) - - hidden_states = torch.concat(hidden_states, dim=2) - return hidden_states - - @staticmethod - def state_dict_converter(): - return SDVAEEncoderStateDictConverter() - - -class SDVAEEncoderStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - # architecture - block_types = [ - 'ResnetBlock', 'ResnetBlock', 'DownSampler', - 'ResnetBlock', 'ResnetBlock', 'DownSampler', - 'ResnetBlock', 'ResnetBlock', 'DownSampler', - 'ResnetBlock', 'ResnetBlock', - 'ResnetBlock', 'VAEAttentionBlock', 'ResnetBlock' - ] - - # Rename each parameter - local_rename_dict = { - "quant_conv": "quant_conv", - "encoder.conv_in": "conv_in", - "encoder.mid_block.attentions.0.group_norm": "blocks.12.norm", - "encoder.mid_block.attentions.0.to_q": "blocks.12.transformer_blocks.0.to_q", - "encoder.mid_block.attentions.0.to_k": "blocks.12.transformer_blocks.0.to_k", - "encoder.mid_block.attentions.0.to_v": "blocks.12.transformer_blocks.0.to_v", - "encoder.mid_block.attentions.0.to_out.0": "blocks.12.transformer_blocks.0.to_out", - "encoder.mid_block.resnets.0.norm1": "blocks.11.norm1", - "encoder.mid_block.resnets.0.conv1": "blocks.11.conv1", - "encoder.mid_block.resnets.0.norm2": "blocks.11.norm2", - "encoder.mid_block.resnets.0.conv2": "blocks.11.conv2", - "encoder.mid_block.resnets.1.norm1": "blocks.13.norm1", - "encoder.mid_block.resnets.1.conv1": "blocks.13.conv1", - "encoder.mid_block.resnets.1.norm2": "blocks.13.norm2", - "encoder.mid_block.resnets.1.conv2": "blocks.13.conv2", - "encoder.conv_norm_out": "conv_norm_out", - "encoder.conv_out": "conv_out", - } - name_list = sorted([name for name in state_dict]) - rename_dict = {} - block_id = {"ResnetBlock": -1, "DownSampler": -1, "UpSampler": -1} - last_block_type_with_id = {"ResnetBlock": "", "DownSampler": "", "UpSampler": ""} - for name in name_list: - names = name.split(".") - name_prefix = ".".join(names[:-1]) - if name_prefix in local_rename_dict: - rename_dict[name] = local_rename_dict[name_prefix] + "." + names[-1] - elif name.startswith("encoder.down_blocks"): - block_type = {"resnets": "ResnetBlock", "downsamplers": "DownSampler", "upsamplers": "UpSampler"}[names[3]] - block_type_with_id = ".".join(names[:5]) - if block_type_with_id != last_block_type_with_id[block_type]: - block_id[block_type] += 1 - last_block_type_with_id[block_type] = block_type_with_id - while block_id[block_type] < len(block_types) and block_types[block_id[block_type]] != block_type: - block_id[block_type] += 1 - block_type_with_id = ".".join(names[:5]) - names = ["blocks", str(block_id[block_type])] + names[5:] - rename_dict[name] = ".".join(names) - - # Convert state_dict - state_dict_ = {} - for name, param in state_dict.items(): - if name in rename_dict: - state_dict_[rename_dict[name]] = param - return state_dict_ - - def from_civitai(self, state_dict): - rename_dict = { - "first_stage_model.encoder.conv_in.bias": "conv_in.bias", - "first_stage_model.encoder.conv_in.weight": "conv_in.weight", - "first_stage_model.encoder.conv_out.bias": "conv_out.bias", - "first_stage_model.encoder.conv_out.weight": "conv_out.weight", - "first_stage_model.encoder.down.0.block.0.conv1.bias": "blocks.0.conv1.bias", - "first_stage_model.encoder.down.0.block.0.conv1.weight": "blocks.0.conv1.weight", - "first_stage_model.encoder.down.0.block.0.conv2.bias": "blocks.0.conv2.bias", - "first_stage_model.encoder.down.0.block.0.conv2.weight": "blocks.0.conv2.weight", - "first_stage_model.encoder.down.0.block.0.norm1.bias": "blocks.0.norm1.bias", - "first_stage_model.encoder.down.0.block.0.norm1.weight": "blocks.0.norm1.weight", - "first_stage_model.encoder.down.0.block.0.norm2.bias": "blocks.0.norm2.bias", - "first_stage_model.encoder.down.0.block.0.norm2.weight": "blocks.0.norm2.weight", - "first_stage_model.encoder.down.0.block.1.conv1.bias": "blocks.1.conv1.bias", - "first_stage_model.encoder.down.0.block.1.conv1.weight": "blocks.1.conv1.weight", - "first_stage_model.encoder.down.0.block.1.conv2.bias": "blocks.1.conv2.bias", - "first_stage_model.encoder.down.0.block.1.conv2.weight": "blocks.1.conv2.weight", - "first_stage_model.encoder.down.0.block.1.norm1.bias": "blocks.1.norm1.bias", - "first_stage_model.encoder.down.0.block.1.norm1.weight": "blocks.1.norm1.weight", - "first_stage_model.encoder.down.0.block.1.norm2.bias": "blocks.1.norm2.bias", - "first_stage_model.encoder.down.0.block.1.norm2.weight": "blocks.1.norm2.weight", - "first_stage_model.encoder.down.0.downsample.conv.bias": "blocks.2.conv.bias", - "first_stage_model.encoder.down.0.downsample.conv.weight": "blocks.2.conv.weight", - "first_stage_model.encoder.down.1.block.0.conv1.bias": "blocks.3.conv1.bias", - "first_stage_model.encoder.down.1.block.0.conv1.weight": "blocks.3.conv1.weight", - "first_stage_model.encoder.down.1.block.0.conv2.bias": "blocks.3.conv2.bias", - "first_stage_model.encoder.down.1.block.0.conv2.weight": "blocks.3.conv2.weight", - "first_stage_model.encoder.down.1.block.0.nin_shortcut.bias": "blocks.3.conv_shortcut.bias", - "first_stage_model.encoder.down.1.block.0.nin_shortcut.weight": "blocks.3.conv_shortcut.weight", - "first_stage_model.encoder.down.1.block.0.norm1.bias": "blocks.3.norm1.bias", - "first_stage_model.encoder.down.1.block.0.norm1.weight": "blocks.3.norm1.weight", - "first_stage_model.encoder.down.1.block.0.norm2.bias": "blocks.3.norm2.bias", - "first_stage_model.encoder.down.1.block.0.norm2.weight": "blocks.3.norm2.weight", - "first_stage_model.encoder.down.1.block.1.conv1.bias": "blocks.4.conv1.bias", - "first_stage_model.encoder.down.1.block.1.conv1.weight": "blocks.4.conv1.weight", - "first_stage_model.encoder.down.1.block.1.conv2.bias": "blocks.4.conv2.bias", - "first_stage_model.encoder.down.1.block.1.conv2.weight": "blocks.4.conv2.weight", - "first_stage_model.encoder.down.1.block.1.norm1.bias": "blocks.4.norm1.bias", - "first_stage_model.encoder.down.1.block.1.norm1.weight": "blocks.4.norm1.weight", - "first_stage_model.encoder.down.1.block.1.norm2.bias": "blocks.4.norm2.bias", - "first_stage_model.encoder.down.1.block.1.norm2.weight": "blocks.4.norm2.weight", - "first_stage_model.encoder.down.1.downsample.conv.bias": "blocks.5.conv.bias", - "first_stage_model.encoder.down.1.downsample.conv.weight": "blocks.5.conv.weight", - "first_stage_model.encoder.down.2.block.0.conv1.bias": "blocks.6.conv1.bias", - "first_stage_model.encoder.down.2.block.0.conv1.weight": "blocks.6.conv1.weight", - "first_stage_model.encoder.down.2.block.0.conv2.bias": "blocks.6.conv2.bias", - "first_stage_model.encoder.down.2.block.0.conv2.weight": "blocks.6.conv2.weight", - "first_stage_model.encoder.down.2.block.0.nin_shortcut.bias": "blocks.6.conv_shortcut.bias", - "first_stage_model.encoder.down.2.block.0.nin_shortcut.weight": "blocks.6.conv_shortcut.weight", - "first_stage_model.encoder.down.2.block.0.norm1.bias": "blocks.6.norm1.bias", - "first_stage_model.encoder.down.2.block.0.norm1.weight": "blocks.6.norm1.weight", - "first_stage_model.encoder.down.2.block.0.norm2.bias": "blocks.6.norm2.bias", - "first_stage_model.encoder.down.2.block.0.norm2.weight": "blocks.6.norm2.weight", - "first_stage_model.encoder.down.2.block.1.conv1.bias": "blocks.7.conv1.bias", - "first_stage_model.encoder.down.2.block.1.conv1.weight": "blocks.7.conv1.weight", - "first_stage_model.encoder.down.2.block.1.conv2.bias": "blocks.7.conv2.bias", - "first_stage_model.encoder.down.2.block.1.conv2.weight": "blocks.7.conv2.weight", - "first_stage_model.encoder.down.2.block.1.norm1.bias": "blocks.7.norm1.bias", - "first_stage_model.encoder.down.2.block.1.norm1.weight": "blocks.7.norm1.weight", - "first_stage_model.encoder.down.2.block.1.norm2.bias": "blocks.7.norm2.bias", - "first_stage_model.encoder.down.2.block.1.norm2.weight": "blocks.7.norm2.weight", - "first_stage_model.encoder.down.2.downsample.conv.bias": "blocks.8.conv.bias", - "first_stage_model.encoder.down.2.downsample.conv.weight": "blocks.8.conv.weight", - "first_stage_model.encoder.down.3.block.0.conv1.bias": "blocks.9.conv1.bias", - "first_stage_model.encoder.down.3.block.0.conv1.weight": "blocks.9.conv1.weight", - "first_stage_model.encoder.down.3.block.0.conv2.bias": "blocks.9.conv2.bias", - "first_stage_model.encoder.down.3.block.0.conv2.weight": "blocks.9.conv2.weight", - "first_stage_model.encoder.down.3.block.0.norm1.bias": "blocks.9.norm1.bias", - "first_stage_model.encoder.down.3.block.0.norm1.weight": "blocks.9.norm1.weight", - "first_stage_model.encoder.down.3.block.0.norm2.bias": "blocks.9.norm2.bias", - "first_stage_model.encoder.down.3.block.0.norm2.weight": "blocks.9.norm2.weight", - "first_stage_model.encoder.down.3.block.1.conv1.bias": "blocks.10.conv1.bias", - "first_stage_model.encoder.down.3.block.1.conv1.weight": "blocks.10.conv1.weight", - "first_stage_model.encoder.down.3.block.1.conv2.bias": "blocks.10.conv2.bias", - "first_stage_model.encoder.down.3.block.1.conv2.weight": "blocks.10.conv2.weight", - "first_stage_model.encoder.down.3.block.1.norm1.bias": "blocks.10.norm1.bias", - "first_stage_model.encoder.down.3.block.1.norm1.weight": "blocks.10.norm1.weight", - "first_stage_model.encoder.down.3.block.1.norm2.bias": "blocks.10.norm2.bias", - "first_stage_model.encoder.down.3.block.1.norm2.weight": "blocks.10.norm2.weight", - "first_stage_model.encoder.mid.attn_1.k.bias": "blocks.12.transformer_blocks.0.to_k.bias", - "first_stage_model.encoder.mid.attn_1.k.weight": "blocks.12.transformer_blocks.0.to_k.weight", - "first_stage_model.encoder.mid.attn_1.norm.bias": "blocks.12.norm.bias", - "first_stage_model.encoder.mid.attn_1.norm.weight": "blocks.12.norm.weight", - "first_stage_model.encoder.mid.attn_1.proj_out.bias": "blocks.12.transformer_blocks.0.to_out.bias", - "first_stage_model.encoder.mid.attn_1.proj_out.weight": "blocks.12.transformer_blocks.0.to_out.weight", - "first_stage_model.encoder.mid.attn_1.q.bias": "blocks.12.transformer_blocks.0.to_q.bias", - "first_stage_model.encoder.mid.attn_1.q.weight": "blocks.12.transformer_blocks.0.to_q.weight", - "first_stage_model.encoder.mid.attn_1.v.bias": "blocks.12.transformer_blocks.0.to_v.bias", - "first_stage_model.encoder.mid.attn_1.v.weight": "blocks.12.transformer_blocks.0.to_v.weight", - "first_stage_model.encoder.mid.block_1.conv1.bias": "blocks.11.conv1.bias", - "first_stage_model.encoder.mid.block_1.conv1.weight": "blocks.11.conv1.weight", - "first_stage_model.encoder.mid.block_1.conv2.bias": "blocks.11.conv2.bias", - "first_stage_model.encoder.mid.block_1.conv2.weight": "blocks.11.conv2.weight", - "first_stage_model.encoder.mid.block_1.norm1.bias": "blocks.11.norm1.bias", - "first_stage_model.encoder.mid.block_1.norm1.weight": "blocks.11.norm1.weight", - "first_stage_model.encoder.mid.block_1.norm2.bias": "blocks.11.norm2.bias", - "first_stage_model.encoder.mid.block_1.norm2.weight": "blocks.11.norm2.weight", - "first_stage_model.encoder.mid.block_2.conv1.bias": "blocks.13.conv1.bias", - "first_stage_model.encoder.mid.block_2.conv1.weight": "blocks.13.conv1.weight", - "first_stage_model.encoder.mid.block_2.conv2.bias": "blocks.13.conv2.bias", - "first_stage_model.encoder.mid.block_2.conv2.weight": "blocks.13.conv2.weight", - "first_stage_model.encoder.mid.block_2.norm1.bias": "blocks.13.norm1.bias", - "first_stage_model.encoder.mid.block_2.norm1.weight": "blocks.13.norm1.weight", - "first_stage_model.encoder.mid.block_2.norm2.bias": "blocks.13.norm2.bias", - "first_stage_model.encoder.mid.block_2.norm2.weight": "blocks.13.norm2.weight", - "first_stage_model.encoder.norm_out.bias": "conv_norm_out.bias", - "first_stage_model.encoder.norm_out.weight": "conv_norm_out.weight", - "first_stage_model.quant_conv.bias": "quant_conv.bias", - "first_stage_model.quant_conv.weight": "quant_conv.weight", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if "transformer_blocks" in rename_dict[name]: - param = param.squeeze() - state_dict_[rename_dict[name]] = param - return state_dict_ diff --git a/diffsynth/models/sdxl_controlnet.py b/diffsynth/models/sdxl_controlnet.py deleted file mode 100644 index acddf1c..0000000 --- a/diffsynth/models/sdxl_controlnet.py +++ /dev/null @@ -1,318 +0,0 @@ -import torch -from .sd_unet import Timesteps, ResnetBlock, AttentionBlock, PushBlock, DownSampler -from .sdxl_unet import SDXLUNet -from .tiler import TileWorker -from .sd_controlnet import ControlNetConditioningLayer -from collections import OrderedDict - - - -class QuickGELU(torch.nn.Module): - - def forward(self, x: torch.Tensor): - return x * torch.sigmoid(1.702 * x) - - - -class ResidualAttentionBlock(torch.nn.Module): - - def __init__(self, d_model: int, n_head: int, attn_mask: torch.Tensor = None): - super().__init__() - - self.attn = torch.nn.MultiheadAttention(d_model, n_head) - self.ln_1 = torch.nn.LayerNorm(d_model) - self.mlp = torch.nn.Sequential(OrderedDict([ - ("c_fc", torch.nn.Linear(d_model, d_model * 4)), - ("gelu", QuickGELU()), - ("c_proj", torch.nn.Linear(d_model * 4, d_model)) - ])) - self.ln_2 = torch.nn.LayerNorm(d_model) - self.attn_mask = attn_mask - - def attention(self, x: torch.Tensor): - self.attn_mask = self.attn_mask.to(dtype=x.dtype, device=x.device) if self.attn_mask is not None else None - return self.attn(x, x, x, need_weights=False, attn_mask=self.attn_mask)[0] - - def forward(self, x: torch.Tensor): - x = x + self.attention(self.ln_1(x)) - x = x + self.mlp(self.ln_2(x)) - return x - - - -class SDXLControlNetUnion(torch.nn.Module): - def __init__(self, global_pool=False): - super().__init__() - self.time_proj = Timesteps(320) - self.time_embedding = torch.nn.Sequential( - torch.nn.Linear(320, 1280), - torch.nn.SiLU(), - torch.nn.Linear(1280, 1280) - ) - self.add_time_proj = Timesteps(256) - self.add_time_embedding = torch.nn.Sequential( - torch.nn.Linear(2816, 1280), - torch.nn.SiLU(), - torch.nn.Linear(1280, 1280) - ) - self.control_type_proj = Timesteps(256) - self.control_type_embedding = torch.nn.Sequential( - torch.nn.Linear(256 * 8, 1280), - torch.nn.SiLU(), - torch.nn.Linear(1280, 1280) - ) - self.conv_in = torch.nn.Conv2d(4, 320, kernel_size=3, padding=1) - - self.controlnet_conv_in = ControlNetConditioningLayer(channels=(3, 16, 32, 96, 256, 320)) - self.controlnet_transformer = ResidualAttentionBlock(320, 8) - self.task_embedding = torch.nn.Parameter(torch.randn(8, 320)) - self.spatial_ch_projs = torch.nn.Linear(320, 320) - - self.blocks = torch.nn.ModuleList([ - # DownBlock2D - ResnetBlock(320, 320, 1280), - PushBlock(), - ResnetBlock(320, 320, 1280), - PushBlock(), - DownSampler(320), - PushBlock(), - # CrossAttnDownBlock2D - ResnetBlock(320, 640, 1280), - AttentionBlock(10, 64, 640, 2, 2048), - PushBlock(), - ResnetBlock(640, 640, 1280), - AttentionBlock(10, 64, 640, 2, 2048), - PushBlock(), - DownSampler(640), - PushBlock(), - # CrossAttnDownBlock2D - ResnetBlock(640, 1280, 1280), - AttentionBlock(20, 64, 1280, 10, 2048), - PushBlock(), - ResnetBlock(1280, 1280, 1280), - AttentionBlock(20, 64, 1280, 10, 2048), - PushBlock(), - # UNetMidBlock2DCrossAttn - ResnetBlock(1280, 1280, 1280), - AttentionBlock(20, 64, 1280, 10, 2048), - ResnetBlock(1280, 1280, 1280), - PushBlock() - ]) - - self.controlnet_blocks = torch.nn.ModuleList([ - torch.nn.Conv2d(320, 320, kernel_size=(1, 1)), - torch.nn.Conv2d(320, 320, kernel_size=(1, 1)), - torch.nn.Conv2d(320, 320, kernel_size=(1, 1)), - torch.nn.Conv2d(320, 320, kernel_size=(1, 1)), - torch.nn.Conv2d(640, 640, kernel_size=(1, 1)), - torch.nn.Conv2d(640, 640, kernel_size=(1, 1)), - torch.nn.Conv2d(640, 640, kernel_size=(1, 1)), - torch.nn.Conv2d(1280, 1280, kernel_size=(1, 1)), - torch.nn.Conv2d(1280, 1280, kernel_size=(1, 1)), - torch.nn.Conv2d(1280, 1280, kernel_size=(1, 1)), - ]) - - self.global_pool = global_pool - - # 0 -- openpose - # 1 -- depth - # 2 -- hed/pidi/scribble/ted - # 3 -- canny/lineart/anime_lineart/mlsd - # 4 -- normal - # 5 -- segment - # 6 -- tile - # 7 -- repaint - self.task_id = { - "openpose": 0, - "depth": 1, - "softedge": 2, - "canny": 3, - "lineart": 3, - "lineart_anime": 3, - "tile": 6, - "inpaint": 7 - } - - - def fuse_condition_to_input(self, hidden_states, task_id, conditioning): - controlnet_cond = self.controlnet_conv_in(conditioning) - feat_seq = torch.mean(controlnet_cond, dim=(2, 3)) - feat_seq = feat_seq + self.task_embedding[task_id] - x = torch.stack([feat_seq, torch.mean(hidden_states, dim=(2, 3))], dim=1) - x = self.controlnet_transformer(x) - - alpha = self.spatial_ch_projs(x[:,0]).unsqueeze(-1).unsqueeze(-1) - controlnet_cond_fuser = controlnet_cond + alpha - - hidden_states = hidden_states + controlnet_cond_fuser - return hidden_states - - - def forward( - self, - sample, timestep, encoder_hidden_states, - conditioning, processor_id, add_time_id, add_text_embeds, - tiled=False, tile_size=64, tile_stride=32, - unet:SDXLUNet=None, - **kwargs - ): - task_id = self.task_id[processor_id] - - # 1. time - t_emb = self.time_proj(timestep).to(sample.dtype) - t_emb = self.time_embedding(t_emb) - - time_embeds = self.add_time_proj(add_time_id) - time_embeds = time_embeds.reshape((add_text_embeds.shape[0], -1)) - add_embeds = torch.concat([add_text_embeds, time_embeds], dim=-1) - add_embeds = add_embeds.to(sample.dtype) - if unet is not None and unet.is_kolors: - add_embeds = unet.add_time_embedding(add_embeds) - else: - add_embeds = self.add_time_embedding(add_embeds) - - control_type = torch.zeros((sample.shape[0], 8), dtype=sample.dtype, device=sample.device) - control_type[:, task_id] = 1 - control_embeds = self.control_type_proj(control_type.flatten()) - control_embeds = control_embeds.reshape((sample.shape[0], -1)) - control_embeds = control_embeds.to(sample.dtype) - control_embeds = self.control_type_embedding(control_embeds) - time_emb = t_emb + add_embeds + control_embeds - - # 2. pre-process - height, width = sample.shape[2], sample.shape[3] - hidden_states = self.conv_in(sample) - hidden_states = self.fuse_condition_to_input(hidden_states, task_id, conditioning) - text_emb = encoder_hidden_states - if unet is not None and unet.is_kolors: - text_emb = unet.text_intermediate_proj(text_emb) - res_stack = [hidden_states] - - # 3. blocks - for i, block in enumerate(self.blocks): - if tiled and not isinstance(block, PushBlock): - _, _, inter_height, _ = hidden_states.shape - resize_scale = inter_height / height - hidden_states = TileWorker().tiled_forward( - lambda x: block(x, time_emb, text_emb, res_stack)[0], - hidden_states, - int(tile_size * resize_scale), - int(tile_stride * resize_scale), - tile_device=hidden_states.device, - tile_dtype=hidden_states.dtype - ) - else: - hidden_states, _, _, _ = block(hidden_states, time_emb, text_emb, res_stack) - - # 4. ControlNet blocks - controlnet_res_stack = [block(res) for block, res in zip(self.controlnet_blocks, res_stack)] - - # pool - if self.global_pool: - controlnet_res_stack = [res.mean(dim=(2, 3), keepdim=True) for res in controlnet_res_stack] - - return controlnet_res_stack - - @staticmethod - def state_dict_converter(): - return SDXLControlNetUnionStateDictConverter() - - - -class SDXLControlNetUnionStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - # architecture - block_types = [ - "ResnetBlock", "PushBlock", "ResnetBlock", "PushBlock", "DownSampler", "PushBlock", - "ResnetBlock", "AttentionBlock", "PushBlock", "ResnetBlock", "AttentionBlock", "PushBlock", "DownSampler", "PushBlock", - "ResnetBlock", "AttentionBlock", "PushBlock", "ResnetBlock", "AttentionBlock", "PushBlock", - "ResnetBlock", "AttentionBlock", "ResnetBlock", "PushBlock" - ] - - # controlnet_rename_dict - controlnet_rename_dict = { - "controlnet_cond_embedding.conv_in.weight": "controlnet_conv_in.blocks.0.weight", - "controlnet_cond_embedding.conv_in.bias": "controlnet_conv_in.blocks.0.bias", - "controlnet_cond_embedding.blocks.0.weight": "controlnet_conv_in.blocks.2.weight", - "controlnet_cond_embedding.blocks.0.bias": "controlnet_conv_in.blocks.2.bias", - "controlnet_cond_embedding.blocks.1.weight": "controlnet_conv_in.blocks.4.weight", - "controlnet_cond_embedding.blocks.1.bias": "controlnet_conv_in.blocks.4.bias", - "controlnet_cond_embedding.blocks.2.weight": "controlnet_conv_in.blocks.6.weight", - "controlnet_cond_embedding.blocks.2.bias": "controlnet_conv_in.blocks.6.bias", - "controlnet_cond_embedding.blocks.3.weight": "controlnet_conv_in.blocks.8.weight", - "controlnet_cond_embedding.blocks.3.bias": "controlnet_conv_in.blocks.8.bias", - "controlnet_cond_embedding.blocks.4.weight": "controlnet_conv_in.blocks.10.weight", - "controlnet_cond_embedding.blocks.4.bias": "controlnet_conv_in.blocks.10.bias", - "controlnet_cond_embedding.blocks.5.weight": "controlnet_conv_in.blocks.12.weight", - "controlnet_cond_embedding.blocks.5.bias": "controlnet_conv_in.blocks.12.bias", - "controlnet_cond_embedding.conv_out.weight": "controlnet_conv_in.blocks.14.weight", - "controlnet_cond_embedding.conv_out.bias": "controlnet_conv_in.blocks.14.bias", - "control_add_embedding.linear_1.weight": "control_type_embedding.0.weight", - "control_add_embedding.linear_1.bias": "control_type_embedding.0.bias", - "control_add_embedding.linear_2.weight": "control_type_embedding.2.weight", - "control_add_embedding.linear_2.bias": "control_type_embedding.2.bias", - } - - # Rename each parameter - name_list = sorted([name for name in state_dict]) - rename_dict = {} - block_id = {"ResnetBlock": -1, "AttentionBlock": -1, "DownSampler": -1, "UpSampler": -1} - last_block_type_with_id = {"ResnetBlock": "", "AttentionBlock": "", "DownSampler": "", "UpSampler": ""} - for name in name_list: - names = name.split(".") - if names[0] in ["conv_in", "conv_norm_out", "conv_out", "task_embedding", "spatial_ch_projs"]: - pass - elif name in controlnet_rename_dict: - names = controlnet_rename_dict[name].split(".") - elif names[0] == "controlnet_down_blocks": - names[0] = "controlnet_blocks" - elif names[0] == "controlnet_mid_block": - names = ["controlnet_blocks", "9", names[-1]] - elif names[0] in ["time_embedding", "add_embedding"]: - if names[0] == "add_embedding": - names[0] = "add_time_embedding" - names[1] = {"linear_1": "0", "linear_2": "2"}[names[1]] - elif names[0] == "control_add_embedding": - names[0] = "control_type_embedding" - elif names[0] == "transformer_layes": - names[0] = "controlnet_transformer" - names.pop(1) - elif names[0] in ["down_blocks", "mid_block", "up_blocks"]: - if names[0] == "mid_block": - names.insert(1, "0") - block_type = {"resnets": "ResnetBlock", "attentions": "AttentionBlock", "downsamplers": "DownSampler", "upsamplers": "UpSampler"}[names[2]] - block_type_with_id = ".".join(names[:4]) - if block_type_with_id != last_block_type_with_id[block_type]: - block_id[block_type] += 1 - last_block_type_with_id[block_type] = block_type_with_id - while block_id[block_type] < len(block_types) and block_types[block_id[block_type]] != block_type: - block_id[block_type] += 1 - block_type_with_id = ".".join(names[:4]) - names = ["blocks", str(block_id[block_type])] + names[4:] - if "ff" in names: - ff_index = names.index("ff") - component = ".".join(names[ff_index:ff_index+3]) - component = {"ff.net.0": "act_fn", "ff.net.2": "ff"}[component] - names = names[:ff_index] + [component] + names[ff_index+3:] - if "to_out" in names: - names.pop(names.index("to_out") + 1) - else: - print(name, state_dict[name].shape) - # raise ValueError(f"Unknown parameters: {name}") - rename_dict[name] = ".".join(names) - - # Convert state_dict - state_dict_ = {} - for name, param in state_dict.items(): - if name not in rename_dict: - continue - if ".proj_in." in name or ".proj_out." in name: - param = param.squeeze() - state_dict_[rename_dict[name]] = param - return state_dict_ - - def from_civitai(self, state_dict): - return self.from_diffusers(state_dict) \ No newline at end of file diff --git a/diffsynth/models/sdxl_ipadapter.py b/diffsynth/models/sdxl_ipadapter.py deleted file mode 100644 index d959d3b..0000000 --- a/diffsynth/models/sdxl_ipadapter.py +++ /dev/null @@ -1,122 +0,0 @@ -from .svd_image_encoder import SVDImageEncoder -from transformers import CLIPImageProcessor -import torch - - -class IpAdapterXLCLIPImageEmbedder(SVDImageEncoder): - def __init__(self): - super().__init__(embed_dim=1664, encoder_intermediate_size=8192, projection_dim=1280, num_encoder_layers=48, num_heads=16, head_dim=104) - self.image_processor = CLIPImageProcessor() - - def forward(self, image): - pixel_values = self.image_processor(images=image, return_tensors="pt").pixel_values - pixel_values = pixel_values.to(device=self.embeddings.class_embedding.device, dtype=self.embeddings.class_embedding.dtype) - return super().forward(pixel_values) - - -class IpAdapterImageProjModel(torch.nn.Module): - def __init__(self, cross_attention_dim=2048, clip_embeddings_dim=1280, clip_extra_context_tokens=4): - super().__init__() - self.cross_attention_dim = cross_attention_dim - self.clip_extra_context_tokens = clip_extra_context_tokens - self.proj = torch.nn.Linear(clip_embeddings_dim, self.clip_extra_context_tokens * cross_attention_dim) - self.norm = torch.nn.LayerNorm(cross_attention_dim) - - def forward(self, image_embeds): - clip_extra_context_tokens = self.proj(image_embeds).reshape(-1, self.clip_extra_context_tokens, self.cross_attention_dim) - clip_extra_context_tokens = self.norm(clip_extra_context_tokens) - return clip_extra_context_tokens - - -class IpAdapterModule(torch.nn.Module): - def __init__(self, input_dim, output_dim): - super().__init__() - self.to_k_ip = torch.nn.Linear(input_dim, output_dim, bias=False) - self.to_v_ip = torch.nn.Linear(input_dim, output_dim, bias=False) - - def forward(self, hidden_states): - ip_k = self.to_k_ip(hidden_states) - ip_v = self.to_v_ip(hidden_states) - return ip_k, ip_v - - -class SDXLIpAdapter(torch.nn.Module): - def __init__(self): - super().__init__() - shape_list = [(2048, 640)] * 4 + [(2048, 1280)] * 50 + [(2048, 640)] * 6 + [(2048, 1280)] * 10 - self.ipadapter_modules = torch.nn.ModuleList([IpAdapterModule(*shape) for shape in shape_list]) - self.image_proj = IpAdapterImageProjModel() - self.set_full_adapter() - - def set_full_adapter(self): - map_list = sum([ - [(7, i) for i in range(2)], - [(10, i) for i in range(2)], - [(15, i) for i in range(10)], - [(18, i) for i in range(10)], - [(25, i) for i in range(10)], - [(28, i) for i in range(10)], - [(31, i) for i in range(10)], - [(35, i) for i in range(2)], - [(38, i) for i in range(2)], - [(41, i) for i in range(2)], - [(21, i) for i in range(10)], - ], []) - self.call_block_id = {i: j for j, i in enumerate(map_list)} - - def set_less_adapter(self): - map_list = sum([ - [(7, i) for i in range(2)], - [(10, i) for i in range(2)], - [(15, i) for i in range(10)], - [(18, i) for i in range(10)], - [(25, i) for i in range(10)], - [(28, i) for i in range(10)], - [(31, i) for i in range(10)], - [(35, i) for i in range(2)], - [(38, i) for i in range(2)], - [(41, i) for i in range(2)], - [(21, i) for i in range(10)], - ], []) - self.call_block_id = {i: j for j, i in enumerate(map_list) if j>=34 and j<44} - - def forward(self, hidden_states, scale=1.0): - hidden_states = self.image_proj(hidden_states) - hidden_states = hidden_states.view(1, -1, hidden_states.shape[-1]) - ip_kv_dict = {} - for (block_id, transformer_id) in self.call_block_id: - ipadapter_id = self.call_block_id[(block_id, transformer_id)] - ip_k, ip_v = self.ipadapter_modules[ipadapter_id](hidden_states) - if block_id not in ip_kv_dict: - ip_kv_dict[block_id] = {} - ip_kv_dict[block_id][transformer_id] = { - "ip_k": ip_k, - "ip_v": ip_v, - "scale": scale - } - return ip_kv_dict - - @staticmethod - def state_dict_converter(): - return SDXLIpAdapterStateDictConverter() - - -class SDXLIpAdapterStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - state_dict_ = {} - for name in state_dict["ip_adapter"]: - names = name.split(".") - layer_id = str(int(names[0]) // 2) - name_ = ".".join(["ipadapter_modules"] + [layer_id] + names[1:]) - state_dict_[name_] = state_dict["ip_adapter"][name] - for name in state_dict["image_proj"]: - name_ = "image_proj." + name - state_dict_[name_] = state_dict["image_proj"][name] - return state_dict_ - - def from_civitai(self, state_dict): - return self.from_diffusers(state_dict) - diff --git a/diffsynth/models/sdxl_motion.py b/diffsynth/models/sdxl_motion.py deleted file mode 100644 index 268c3e9..0000000 --- a/diffsynth/models/sdxl_motion.py +++ /dev/null @@ -1,104 +0,0 @@ -from .sd_motion import TemporalBlock -import torch - - - -class SDXLMotionModel(torch.nn.Module): - def __init__(self): - super().__init__() - self.motion_modules = torch.nn.ModuleList([ - TemporalBlock(8, 320//8, 320, eps=1e-6), - TemporalBlock(8, 320//8, 320, eps=1e-6), - - TemporalBlock(8, 640//8, 640, eps=1e-6), - TemporalBlock(8, 640//8, 640, eps=1e-6), - - TemporalBlock(8, 1280//8, 1280, eps=1e-6), - TemporalBlock(8, 1280//8, 1280, eps=1e-6), - - TemporalBlock(8, 1280//8, 1280, eps=1e-6), - TemporalBlock(8, 1280//8, 1280, eps=1e-6), - TemporalBlock(8, 1280//8, 1280, eps=1e-6), - - TemporalBlock(8, 640//8, 640, eps=1e-6), - TemporalBlock(8, 640//8, 640, eps=1e-6), - TemporalBlock(8, 640//8, 640, eps=1e-6), - - TemporalBlock(8, 320//8, 320, eps=1e-6), - TemporalBlock(8, 320//8, 320, eps=1e-6), - TemporalBlock(8, 320//8, 320, eps=1e-6), - ]) - self.call_block_id = { - 0: 0, - 2: 1, - 7: 2, - 10: 3, - 15: 4, - 18: 5, - 25: 6, - 28: 7, - 31: 8, - 35: 9, - 38: 10, - 41: 11, - 44: 12, - 46: 13, - 48: 14, - } - - def forward(self): - pass - - @staticmethod - def state_dict_converter(): - return SDMotionModelStateDictConverter() - - -class SDMotionModelStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - rename_dict = { - "norm": "norm", - "proj_in": "proj_in", - "transformer_blocks.0.attention_blocks.0.to_q": "transformer_blocks.0.attn1.to_q", - "transformer_blocks.0.attention_blocks.0.to_k": "transformer_blocks.0.attn1.to_k", - "transformer_blocks.0.attention_blocks.0.to_v": "transformer_blocks.0.attn1.to_v", - "transformer_blocks.0.attention_blocks.0.to_out.0": "transformer_blocks.0.attn1.to_out", - "transformer_blocks.0.attention_blocks.0.pos_encoder": "transformer_blocks.0.pe1", - "transformer_blocks.0.attention_blocks.1.to_q": "transformer_blocks.0.attn2.to_q", - "transformer_blocks.0.attention_blocks.1.to_k": "transformer_blocks.0.attn2.to_k", - "transformer_blocks.0.attention_blocks.1.to_v": "transformer_blocks.0.attn2.to_v", - "transformer_blocks.0.attention_blocks.1.to_out.0": "transformer_blocks.0.attn2.to_out", - "transformer_blocks.0.attention_blocks.1.pos_encoder": "transformer_blocks.0.pe2", - "transformer_blocks.0.norms.0": "transformer_blocks.0.norm1", - "transformer_blocks.0.norms.1": "transformer_blocks.0.norm2", - "transformer_blocks.0.ff.net.0.proj": "transformer_blocks.0.act_fn.proj", - "transformer_blocks.0.ff.net.2": "transformer_blocks.0.ff", - "transformer_blocks.0.ff_norm": "transformer_blocks.0.norm3", - "proj_out": "proj_out", - } - name_list = sorted([i for i in state_dict if i.startswith("down_blocks.")]) - name_list += sorted([i for i in state_dict if i.startswith("mid_block.")]) - name_list += sorted([i for i in state_dict if i.startswith("up_blocks.")]) - state_dict_ = {} - last_prefix, module_id = "", -1 - for name in name_list: - names = name.split(".") - prefix_index = names.index("temporal_transformer") + 1 - prefix = ".".join(names[:prefix_index]) - if prefix != last_prefix: - last_prefix = prefix - module_id += 1 - middle_name = ".".join(names[prefix_index:-1]) - suffix = names[-1] - if "pos_encoder" in names: - rename = ".".join(["motion_modules", str(module_id), rename_dict[middle_name]]) - else: - rename = ".".join(["motion_modules", str(module_id), rename_dict[middle_name], suffix]) - state_dict_[rename] = state_dict[name] - return state_dict_ - - def from_civitai(self, state_dict): - return self.from_diffusers(state_dict) diff --git a/diffsynth/models/sdxl_text_encoder.py b/diffsynth/models/sdxl_text_encoder.py deleted file mode 100644 index 5d30c7d..0000000 --- a/diffsynth/models/sdxl_text_encoder.py +++ /dev/null @@ -1,759 +0,0 @@ -import torch -from .sd_text_encoder import CLIPEncoderLayer - - -class SDXLTextEncoder(torch.nn.Module): - def __init__(self, embed_dim=768, vocab_size=49408, max_position_embeddings=77, num_encoder_layers=11, encoder_intermediate_size=3072): - super().__init__() - - # token_embedding - self.token_embedding = torch.nn.Embedding(vocab_size, embed_dim) - - # position_embeds (This is a fixed tensor) - self.position_embeds = torch.nn.Parameter(torch.zeros(1, max_position_embeddings, embed_dim)) - - # encoders - self.encoders = torch.nn.ModuleList([CLIPEncoderLayer(embed_dim, encoder_intermediate_size) for _ in range(num_encoder_layers)]) - - # attn_mask - self.attn_mask = self.attention_mask(max_position_embeddings) - - # The text encoder is different to that in Stable Diffusion 1.x. - # It does not include final_layer_norm. - - def attention_mask(self, length): - mask = torch.empty(length, length) - mask.fill_(float("-inf")) - mask.triu_(1) - return mask - - def forward(self, input_ids, clip_skip=1): - embeds = self.token_embedding(input_ids) + self.position_embeds - attn_mask = self.attn_mask.to(device=embeds.device, dtype=embeds.dtype) - for encoder_id, encoder in enumerate(self.encoders): - embeds = encoder(embeds, attn_mask=attn_mask) - if encoder_id + clip_skip == len(self.encoders): - break - return embeds - - @staticmethod - def state_dict_converter(): - return SDXLTextEncoderStateDictConverter() - - -class SDXLTextEncoder2(torch.nn.Module): - def __init__(self, embed_dim=1280, vocab_size=49408, max_position_embeddings=77, num_encoder_layers=32, encoder_intermediate_size=5120): - super().__init__() - - # token_embedding - self.token_embedding = torch.nn.Embedding(vocab_size, embed_dim) - - # position_embeds (This is a fixed tensor) - self.position_embeds = torch.nn.Parameter(torch.zeros(1, max_position_embeddings, embed_dim)) - - # encoders - self.encoders = torch.nn.ModuleList([CLIPEncoderLayer(embed_dim, encoder_intermediate_size, num_heads=20, head_dim=64, use_quick_gelu=False) for _ in range(num_encoder_layers)]) - - # attn_mask - self.attn_mask = self.attention_mask(max_position_embeddings) - - # final_layer_norm - self.final_layer_norm = torch.nn.LayerNorm(embed_dim) - - # text_projection - self.text_projection = torch.nn.Linear(embed_dim, embed_dim, bias=False) - - def attention_mask(self, length): - mask = torch.empty(length, length) - mask.fill_(float("-inf")) - mask.triu_(1) - return mask - - def forward(self, input_ids, clip_skip=2): - embeds = self.token_embedding(input_ids) + self.position_embeds - attn_mask = self.attn_mask.to(device=embeds.device, dtype=embeds.dtype) - for encoder_id, encoder in enumerate(self.encoders): - embeds = encoder(embeds, attn_mask=attn_mask) - if encoder_id + clip_skip == len(self.encoders): - hidden_states = embeds - embeds = self.final_layer_norm(embeds) - pooled_embeds = embeds[torch.arange(embeds.shape[0]), input_ids.to(dtype=torch.int).argmax(dim=-1)] - pooled_embeds = self.text_projection(pooled_embeds) - return pooled_embeds, hidden_states - - @staticmethod - def state_dict_converter(): - return SDXLTextEncoder2StateDictConverter() - - -class SDXLTextEncoderStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - rename_dict = { - "text_model.embeddings.token_embedding.weight": "token_embedding.weight", - "text_model.embeddings.position_embedding.weight": "position_embeds", - "text_model.final_layer_norm.weight": "final_layer_norm.weight", - "text_model.final_layer_norm.bias": "final_layer_norm.bias" - } - attn_rename_dict = { - "self_attn.q_proj": "attn.to_q", - "self_attn.k_proj": "attn.to_k", - "self_attn.v_proj": "attn.to_v", - "self_attn.out_proj": "attn.to_out", - "layer_norm1": "layer_norm1", - "layer_norm2": "layer_norm2", - "mlp.fc1": "fc1", - "mlp.fc2": "fc2", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if name == "text_model.embeddings.position_embedding.weight": - param = param.reshape((1, param.shape[0], param.shape[1])) - state_dict_[rename_dict[name]] = param - elif name.startswith("text_model.encoder.layers."): - param = state_dict[name] - names = name.split(".") - layer_id, layer_type, tail = names[3], ".".join(names[4:-1]), names[-1] - name_ = ".".join(["encoders", layer_id, attn_rename_dict[layer_type], tail]) - state_dict_[name_] = param - return state_dict_ - - def from_civitai(self, state_dict): - rename_dict = { - "conditioner.embedders.0.transformer.text_model.embeddings.position_embedding.weight": "position_embeds", - "conditioner.embedders.0.transformer.text_model.embeddings.token_embedding.weight": "token_embedding.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.0.layer_norm1.bias": "encoders.0.layer_norm1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.0.layer_norm1.weight": "encoders.0.layer_norm1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.0.layer_norm2.bias": "encoders.0.layer_norm2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.0.layer_norm2.weight": "encoders.0.layer_norm2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.0.mlp.fc1.bias": "encoders.0.fc1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.0.mlp.fc1.weight": "encoders.0.fc1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.0.mlp.fc2.bias": "encoders.0.fc2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.0.mlp.fc2.weight": "encoders.0.fc2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.0.self_attn.k_proj.bias": "encoders.0.attn.to_k.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.0.self_attn.k_proj.weight": "encoders.0.attn.to_k.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.0.self_attn.out_proj.bias": "encoders.0.attn.to_out.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.0.self_attn.out_proj.weight": "encoders.0.attn.to_out.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.0.self_attn.q_proj.bias": "encoders.0.attn.to_q.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.0.self_attn.q_proj.weight": "encoders.0.attn.to_q.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.0.self_attn.v_proj.bias": "encoders.0.attn.to_v.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.0.self_attn.v_proj.weight": "encoders.0.attn.to_v.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.1.layer_norm1.bias": "encoders.1.layer_norm1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.1.layer_norm1.weight": "encoders.1.layer_norm1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.1.layer_norm2.bias": "encoders.1.layer_norm2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.1.layer_norm2.weight": "encoders.1.layer_norm2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.1.mlp.fc1.bias": "encoders.1.fc1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.1.mlp.fc1.weight": "encoders.1.fc1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.1.mlp.fc2.bias": "encoders.1.fc2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.1.mlp.fc2.weight": "encoders.1.fc2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.1.self_attn.k_proj.bias": "encoders.1.attn.to_k.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.1.self_attn.k_proj.weight": "encoders.1.attn.to_k.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.1.self_attn.out_proj.bias": "encoders.1.attn.to_out.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.1.self_attn.out_proj.weight": "encoders.1.attn.to_out.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.1.self_attn.q_proj.bias": "encoders.1.attn.to_q.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.1.self_attn.q_proj.weight": "encoders.1.attn.to_q.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.1.self_attn.v_proj.bias": "encoders.1.attn.to_v.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.1.self_attn.v_proj.weight": "encoders.1.attn.to_v.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.10.layer_norm1.bias": "encoders.10.layer_norm1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.10.layer_norm1.weight": "encoders.10.layer_norm1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.10.layer_norm2.bias": "encoders.10.layer_norm2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.10.layer_norm2.weight": "encoders.10.layer_norm2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.10.mlp.fc1.bias": "encoders.10.fc1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.10.mlp.fc1.weight": "encoders.10.fc1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.10.mlp.fc2.bias": "encoders.10.fc2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.10.mlp.fc2.weight": "encoders.10.fc2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.10.self_attn.k_proj.bias": "encoders.10.attn.to_k.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.10.self_attn.k_proj.weight": "encoders.10.attn.to_k.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.10.self_attn.out_proj.bias": "encoders.10.attn.to_out.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.10.self_attn.out_proj.weight": "encoders.10.attn.to_out.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.10.self_attn.q_proj.bias": "encoders.10.attn.to_q.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.10.self_attn.q_proj.weight": "encoders.10.attn.to_q.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.10.self_attn.v_proj.bias": "encoders.10.attn.to_v.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.10.self_attn.v_proj.weight": "encoders.10.attn.to_v.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.2.layer_norm1.bias": "encoders.2.layer_norm1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.2.layer_norm1.weight": "encoders.2.layer_norm1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.2.layer_norm2.bias": "encoders.2.layer_norm2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.2.layer_norm2.weight": "encoders.2.layer_norm2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.2.mlp.fc1.bias": "encoders.2.fc1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.2.mlp.fc1.weight": "encoders.2.fc1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.2.mlp.fc2.bias": "encoders.2.fc2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.2.mlp.fc2.weight": "encoders.2.fc2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.2.self_attn.k_proj.bias": "encoders.2.attn.to_k.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.2.self_attn.k_proj.weight": "encoders.2.attn.to_k.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.2.self_attn.out_proj.bias": "encoders.2.attn.to_out.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.2.self_attn.out_proj.weight": "encoders.2.attn.to_out.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.2.self_attn.q_proj.bias": "encoders.2.attn.to_q.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.2.self_attn.q_proj.weight": "encoders.2.attn.to_q.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.2.self_attn.v_proj.bias": "encoders.2.attn.to_v.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.2.self_attn.v_proj.weight": "encoders.2.attn.to_v.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.3.layer_norm1.bias": "encoders.3.layer_norm1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.3.layer_norm1.weight": "encoders.3.layer_norm1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.3.layer_norm2.bias": "encoders.3.layer_norm2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.3.layer_norm2.weight": "encoders.3.layer_norm2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.3.mlp.fc1.bias": "encoders.3.fc1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.3.mlp.fc1.weight": "encoders.3.fc1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.3.mlp.fc2.bias": "encoders.3.fc2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.3.mlp.fc2.weight": "encoders.3.fc2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.3.self_attn.k_proj.bias": "encoders.3.attn.to_k.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.3.self_attn.k_proj.weight": "encoders.3.attn.to_k.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.3.self_attn.out_proj.bias": "encoders.3.attn.to_out.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.3.self_attn.out_proj.weight": "encoders.3.attn.to_out.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.3.self_attn.q_proj.bias": "encoders.3.attn.to_q.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.3.self_attn.q_proj.weight": "encoders.3.attn.to_q.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.3.self_attn.v_proj.bias": "encoders.3.attn.to_v.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.3.self_attn.v_proj.weight": "encoders.3.attn.to_v.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.4.layer_norm1.bias": "encoders.4.layer_norm1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.4.layer_norm1.weight": "encoders.4.layer_norm1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.4.layer_norm2.bias": "encoders.4.layer_norm2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.4.layer_norm2.weight": "encoders.4.layer_norm2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.4.mlp.fc1.bias": "encoders.4.fc1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.4.mlp.fc1.weight": "encoders.4.fc1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.4.mlp.fc2.bias": "encoders.4.fc2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.4.mlp.fc2.weight": "encoders.4.fc2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.4.self_attn.k_proj.bias": "encoders.4.attn.to_k.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.4.self_attn.k_proj.weight": "encoders.4.attn.to_k.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.4.self_attn.out_proj.bias": "encoders.4.attn.to_out.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.4.self_attn.out_proj.weight": "encoders.4.attn.to_out.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.4.self_attn.q_proj.bias": "encoders.4.attn.to_q.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.4.self_attn.q_proj.weight": "encoders.4.attn.to_q.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.4.self_attn.v_proj.bias": "encoders.4.attn.to_v.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.4.self_attn.v_proj.weight": "encoders.4.attn.to_v.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.5.layer_norm1.bias": "encoders.5.layer_norm1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.5.layer_norm1.weight": "encoders.5.layer_norm1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.5.layer_norm2.bias": "encoders.5.layer_norm2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.5.layer_norm2.weight": "encoders.5.layer_norm2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.5.mlp.fc1.bias": "encoders.5.fc1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.5.mlp.fc1.weight": "encoders.5.fc1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.5.mlp.fc2.bias": "encoders.5.fc2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.5.mlp.fc2.weight": "encoders.5.fc2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.5.self_attn.k_proj.bias": "encoders.5.attn.to_k.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.5.self_attn.k_proj.weight": "encoders.5.attn.to_k.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.5.self_attn.out_proj.bias": "encoders.5.attn.to_out.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.5.self_attn.out_proj.weight": "encoders.5.attn.to_out.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.5.self_attn.q_proj.bias": "encoders.5.attn.to_q.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.5.self_attn.q_proj.weight": "encoders.5.attn.to_q.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.5.self_attn.v_proj.bias": "encoders.5.attn.to_v.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.5.self_attn.v_proj.weight": "encoders.5.attn.to_v.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.6.layer_norm1.bias": "encoders.6.layer_norm1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.6.layer_norm1.weight": "encoders.6.layer_norm1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.6.layer_norm2.bias": "encoders.6.layer_norm2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.6.layer_norm2.weight": "encoders.6.layer_norm2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.6.mlp.fc1.bias": "encoders.6.fc1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.6.mlp.fc1.weight": "encoders.6.fc1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.6.mlp.fc2.bias": "encoders.6.fc2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.6.mlp.fc2.weight": "encoders.6.fc2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.6.self_attn.k_proj.bias": "encoders.6.attn.to_k.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.6.self_attn.k_proj.weight": "encoders.6.attn.to_k.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.6.self_attn.out_proj.bias": "encoders.6.attn.to_out.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.6.self_attn.out_proj.weight": "encoders.6.attn.to_out.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.6.self_attn.q_proj.bias": "encoders.6.attn.to_q.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.6.self_attn.q_proj.weight": "encoders.6.attn.to_q.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.6.self_attn.v_proj.bias": "encoders.6.attn.to_v.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.6.self_attn.v_proj.weight": "encoders.6.attn.to_v.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.7.layer_norm1.bias": "encoders.7.layer_norm1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.7.layer_norm1.weight": "encoders.7.layer_norm1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.7.layer_norm2.bias": "encoders.7.layer_norm2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.7.layer_norm2.weight": "encoders.7.layer_norm2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.7.mlp.fc1.bias": "encoders.7.fc1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.7.mlp.fc1.weight": "encoders.7.fc1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.7.mlp.fc2.bias": "encoders.7.fc2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.7.mlp.fc2.weight": "encoders.7.fc2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.7.self_attn.k_proj.bias": "encoders.7.attn.to_k.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.7.self_attn.k_proj.weight": "encoders.7.attn.to_k.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.7.self_attn.out_proj.bias": "encoders.7.attn.to_out.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.7.self_attn.out_proj.weight": "encoders.7.attn.to_out.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.7.self_attn.q_proj.bias": "encoders.7.attn.to_q.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.7.self_attn.q_proj.weight": "encoders.7.attn.to_q.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.7.self_attn.v_proj.bias": "encoders.7.attn.to_v.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.7.self_attn.v_proj.weight": "encoders.7.attn.to_v.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.8.layer_norm1.bias": "encoders.8.layer_norm1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.8.layer_norm1.weight": "encoders.8.layer_norm1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.8.layer_norm2.bias": "encoders.8.layer_norm2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.8.layer_norm2.weight": "encoders.8.layer_norm2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.8.mlp.fc1.bias": "encoders.8.fc1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.8.mlp.fc1.weight": "encoders.8.fc1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.8.mlp.fc2.bias": "encoders.8.fc2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.8.mlp.fc2.weight": "encoders.8.fc2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.8.self_attn.k_proj.bias": "encoders.8.attn.to_k.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.8.self_attn.k_proj.weight": "encoders.8.attn.to_k.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.8.self_attn.out_proj.bias": "encoders.8.attn.to_out.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.8.self_attn.out_proj.weight": "encoders.8.attn.to_out.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.8.self_attn.q_proj.bias": "encoders.8.attn.to_q.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.8.self_attn.q_proj.weight": "encoders.8.attn.to_q.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.8.self_attn.v_proj.bias": "encoders.8.attn.to_v.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.8.self_attn.v_proj.weight": "encoders.8.attn.to_v.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.9.layer_norm1.bias": "encoders.9.layer_norm1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.9.layer_norm1.weight": "encoders.9.layer_norm1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.9.layer_norm2.bias": "encoders.9.layer_norm2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.9.layer_norm2.weight": "encoders.9.layer_norm2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.9.mlp.fc1.bias": "encoders.9.fc1.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.9.mlp.fc1.weight": "encoders.9.fc1.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.9.mlp.fc2.bias": "encoders.9.fc2.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.9.mlp.fc2.weight": "encoders.9.fc2.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.9.self_attn.k_proj.bias": "encoders.9.attn.to_k.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.9.self_attn.k_proj.weight": "encoders.9.attn.to_k.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.9.self_attn.out_proj.bias": "encoders.9.attn.to_out.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.9.self_attn.out_proj.weight": "encoders.9.attn.to_out.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.9.self_attn.q_proj.bias": "encoders.9.attn.to_q.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.9.self_attn.q_proj.weight": "encoders.9.attn.to_q.weight", - "conditioner.embedders.0.transformer.text_model.encoder.layers.9.self_attn.v_proj.bias": "encoders.9.attn.to_v.bias", - "conditioner.embedders.0.transformer.text_model.encoder.layers.9.self_attn.v_proj.weight": "encoders.9.attn.to_v.weight", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if name == "conditioner.embedders.0.transformer.text_model.embeddings.position_embedding.weight": - param = param.reshape((1, param.shape[0], param.shape[1])) - state_dict_[rename_dict[name]] = param - return state_dict_ - - -class SDXLTextEncoder2StateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - rename_dict = { - "text_model.embeddings.token_embedding.weight": "token_embedding.weight", - "text_model.embeddings.position_embedding.weight": "position_embeds", - "text_model.final_layer_norm.weight": "final_layer_norm.weight", - "text_model.final_layer_norm.bias": "final_layer_norm.bias", - "text_projection.weight": "text_projection.weight" - } - attn_rename_dict = { - "self_attn.q_proj": "attn.to_q", - "self_attn.k_proj": "attn.to_k", - "self_attn.v_proj": "attn.to_v", - "self_attn.out_proj": "attn.to_out", - "layer_norm1": "layer_norm1", - "layer_norm2": "layer_norm2", - "mlp.fc1": "fc1", - "mlp.fc2": "fc2", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if name == "text_model.embeddings.position_embedding.weight": - param = param.reshape((1, param.shape[0], param.shape[1])) - state_dict_[rename_dict[name]] = param - elif name.startswith("text_model.encoder.layers."): - param = state_dict[name] - names = name.split(".") - layer_id, layer_type, tail = names[3], ".".join(names[4:-1]), names[-1] - name_ = ".".join(["encoders", layer_id, attn_rename_dict[layer_type], tail]) - state_dict_[name_] = param - return state_dict_ - - def from_civitai(self, state_dict): - rename_dict = { - "conditioner.embedders.1.model.ln_final.bias": "final_layer_norm.bias", - "conditioner.embedders.1.model.ln_final.weight": "final_layer_norm.weight", - "conditioner.embedders.1.model.positional_embedding": "position_embeds", - "conditioner.embedders.1.model.token_embedding.weight": "token_embedding.weight", - "conditioner.embedders.1.model.transformer.resblocks.0.attn.in_proj_bias": ['encoders.0.attn.to_q.bias', 'encoders.0.attn.to_k.bias', 'encoders.0.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.0.attn.in_proj_weight": ['encoders.0.attn.to_q.weight', 'encoders.0.attn.to_k.weight', 'encoders.0.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.0.attn.out_proj.bias": "encoders.0.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.0.attn.out_proj.weight": "encoders.0.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.0.ln_1.bias": "encoders.0.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.0.ln_1.weight": "encoders.0.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.0.ln_2.bias": "encoders.0.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.0.ln_2.weight": "encoders.0.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.0.mlp.c_fc.bias": "encoders.0.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.0.mlp.c_fc.weight": "encoders.0.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.0.mlp.c_proj.bias": "encoders.0.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.0.mlp.c_proj.weight": "encoders.0.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.1.attn.in_proj_bias": ['encoders.1.attn.to_q.bias', 'encoders.1.attn.to_k.bias', 'encoders.1.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.1.attn.in_proj_weight": ['encoders.1.attn.to_q.weight', 'encoders.1.attn.to_k.weight', 'encoders.1.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.1.attn.out_proj.bias": "encoders.1.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.1.attn.out_proj.weight": "encoders.1.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.1.ln_1.bias": "encoders.1.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.1.ln_1.weight": "encoders.1.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.1.ln_2.bias": "encoders.1.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.1.ln_2.weight": "encoders.1.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.1.mlp.c_fc.bias": "encoders.1.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.1.mlp.c_fc.weight": "encoders.1.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.1.mlp.c_proj.bias": "encoders.1.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.1.mlp.c_proj.weight": "encoders.1.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.10.attn.in_proj_bias": ['encoders.10.attn.to_q.bias', 'encoders.10.attn.to_k.bias', 'encoders.10.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.10.attn.in_proj_weight": ['encoders.10.attn.to_q.weight', 'encoders.10.attn.to_k.weight', 'encoders.10.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.10.attn.out_proj.bias": "encoders.10.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.10.attn.out_proj.weight": "encoders.10.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.10.ln_1.bias": "encoders.10.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.10.ln_1.weight": "encoders.10.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.10.ln_2.bias": "encoders.10.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.10.ln_2.weight": "encoders.10.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.10.mlp.c_fc.bias": "encoders.10.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.10.mlp.c_fc.weight": "encoders.10.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.10.mlp.c_proj.bias": "encoders.10.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.10.mlp.c_proj.weight": "encoders.10.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.11.attn.in_proj_bias": ['encoders.11.attn.to_q.bias', 'encoders.11.attn.to_k.bias', 'encoders.11.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.11.attn.in_proj_weight": ['encoders.11.attn.to_q.weight', 'encoders.11.attn.to_k.weight', 'encoders.11.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.11.attn.out_proj.bias": "encoders.11.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.11.attn.out_proj.weight": "encoders.11.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.11.ln_1.bias": "encoders.11.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.11.ln_1.weight": "encoders.11.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.11.ln_2.bias": "encoders.11.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.11.ln_2.weight": "encoders.11.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.11.mlp.c_fc.bias": "encoders.11.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.11.mlp.c_fc.weight": "encoders.11.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.11.mlp.c_proj.bias": "encoders.11.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.11.mlp.c_proj.weight": "encoders.11.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.12.attn.in_proj_bias": ['encoders.12.attn.to_q.bias', 'encoders.12.attn.to_k.bias', 'encoders.12.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.12.attn.in_proj_weight": ['encoders.12.attn.to_q.weight', 'encoders.12.attn.to_k.weight', 'encoders.12.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.12.attn.out_proj.bias": "encoders.12.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.12.attn.out_proj.weight": "encoders.12.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.12.ln_1.bias": "encoders.12.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.12.ln_1.weight": "encoders.12.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.12.ln_2.bias": "encoders.12.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.12.ln_2.weight": "encoders.12.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.12.mlp.c_fc.bias": "encoders.12.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.12.mlp.c_fc.weight": "encoders.12.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.12.mlp.c_proj.bias": "encoders.12.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.12.mlp.c_proj.weight": "encoders.12.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.13.attn.in_proj_bias": ['encoders.13.attn.to_q.bias', 'encoders.13.attn.to_k.bias', 'encoders.13.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.13.attn.in_proj_weight": ['encoders.13.attn.to_q.weight', 'encoders.13.attn.to_k.weight', 'encoders.13.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.13.attn.out_proj.bias": "encoders.13.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.13.attn.out_proj.weight": "encoders.13.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.13.ln_1.bias": "encoders.13.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.13.ln_1.weight": "encoders.13.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.13.ln_2.bias": "encoders.13.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.13.ln_2.weight": "encoders.13.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.13.mlp.c_fc.bias": "encoders.13.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.13.mlp.c_fc.weight": "encoders.13.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.13.mlp.c_proj.bias": "encoders.13.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.13.mlp.c_proj.weight": "encoders.13.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.14.attn.in_proj_bias": ['encoders.14.attn.to_q.bias', 'encoders.14.attn.to_k.bias', 'encoders.14.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.14.attn.in_proj_weight": ['encoders.14.attn.to_q.weight', 'encoders.14.attn.to_k.weight', 'encoders.14.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.14.attn.out_proj.bias": "encoders.14.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.14.attn.out_proj.weight": "encoders.14.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.14.ln_1.bias": "encoders.14.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.14.ln_1.weight": "encoders.14.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.14.ln_2.bias": "encoders.14.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.14.ln_2.weight": "encoders.14.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.14.mlp.c_fc.bias": "encoders.14.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.14.mlp.c_fc.weight": "encoders.14.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.14.mlp.c_proj.bias": "encoders.14.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.14.mlp.c_proj.weight": "encoders.14.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.15.attn.in_proj_bias": ['encoders.15.attn.to_q.bias', 'encoders.15.attn.to_k.bias', 'encoders.15.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.15.attn.in_proj_weight": ['encoders.15.attn.to_q.weight', 'encoders.15.attn.to_k.weight', 'encoders.15.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.15.attn.out_proj.bias": "encoders.15.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.15.attn.out_proj.weight": "encoders.15.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.15.ln_1.bias": "encoders.15.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.15.ln_1.weight": "encoders.15.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.15.ln_2.bias": "encoders.15.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.15.ln_2.weight": "encoders.15.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.15.mlp.c_fc.bias": "encoders.15.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.15.mlp.c_fc.weight": "encoders.15.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.15.mlp.c_proj.bias": "encoders.15.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.15.mlp.c_proj.weight": "encoders.15.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.16.attn.in_proj_bias": ['encoders.16.attn.to_q.bias', 'encoders.16.attn.to_k.bias', 'encoders.16.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.16.attn.in_proj_weight": ['encoders.16.attn.to_q.weight', 'encoders.16.attn.to_k.weight', 'encoders.16.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.16.attn.out_proj.bias": "encoders.16.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.16.attn.out_proj.weight": "encoders.16.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.16.ln_1.bias": "encoders.16.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.16.ln_1.weight": "encoders.16.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.16.ln_2.bias": "encoders.16.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.16.ln_2.weight": "encoders.16.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.16.mlp.c_fc.bias": "encoders.16.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.16.mlp.c_fc.weight": "encoders.16.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.16.mlp.c_proj.bias": "encoders.16.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.16.mlp.c_proj.weight": "encoders.16.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.17.attn.in_proj_bias": ['encoders.17.attn.to_q.bias', 'encoders.17.attn.to_k.bias', 'encoders.17.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.17.attn.in_proj_weight": ['encoders.17.attn.to_q.weight', 'encoders.17.attn.to_k.weight', 'encoders.17.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.17.attn.out_proj.bias": "encoders.17.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.17.attn.out_proj.weight": "encoders.17.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.17.ln_1.bias": "encoders.17.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.17.ln_1.weight": "encoders.17.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.17.ln_2.bias": "encoders.17.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.17.ln_2.weight": "encoders.17.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.17.mlp.c_fc.bias": "encoders.17.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.17.mlp.c_fc.weight": "encoders.17.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.17.mlp.c_proj.bias": "encoders.17.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.17.mlp.c_proj.weight": "encoders.17.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.18.attn.in_proj_bias": ['encoders.18.attn.to_q.bias', 'encoders.18.attn.to_k.bias', 'encoders.18.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.18.attn.in_proj_weight": ['encoders.18.attn.to_q.weight', 'encoders.18.attn.to_k.weight', 'encoders.18.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.18.attn.out_proj.bias": "encoders.18.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.18.attn.out_proj.weight": "encoders.18.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.18.ln_1.bias": "encoders.18.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.18.ln_1.weight": "encoders.18.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.18.ln_2.bias": "encoders.18.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.18.ln_2.weight": "encoders.18.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.18.mlp.c_fc.bias": "encoders.18.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.18.mlp.c_fc.weight": "encoders.18.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.18.mlp.c_proj.bias": "encoders.18.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.18.mlp.c_proj.weight": "encoders.18.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.19.attn.in_proj_bias": ['encoders.19.attn.to_q.bias', 'encoders.19.attn.to_k.bias', 'encoders.19.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.19.attn.in_proj_weight": ['encoders.19.attn.to_q.weight', 'encoders.19.attn.to_k.weight', 'encoders.19.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.19.attn.out_proj.bias": "encoders.19.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.19.attn.out_proj.weight": "encoders.19.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.19.ln_1.bias": "encoders.19.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.19.ln_1.weight": "encoders.19.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.19.ln_2.bias": "encoders.19.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.19.ln_2.weight": "encoders.19.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.19.mlp.c_fc.bias": "encoders.19.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.19.mlp.c_fc.weight": "encoders.19.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.19.mlp.c_proj.bias": "encoders.19.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.19.mlp.c_proj.weight": "encoders.19.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.2.attn.in_proj_bias": ['encoders.2.attn.to_q.bias', 'encoders.2.attn.to_k.bias', 'encoders.2.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.2.attn.in_proj_weight": ['encoders.2.attn.to_q.weight', 'encoders.2.attn.to_k.weight', 'encoders.2.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.2.attn.out_proj.bias": "encoders.2.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.2.attn.out_proj.weight": "encoders.2.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.2.ln_1.bias": "encoders.2.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.2.ln_1.weight": "encoders.2.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.2.ln_2.bias": "encoders.2.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.2.ln_2.weight": "encoders.2.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.2.mlp.c_fc.bias": "encoders.2.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.2.mlp.c_fc.weight": "encoders.2.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.2.mlp.c_proj.bias": "encoders.2.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.2.mlp.c_proj.weight": "encoders.2.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.20.attn.in_proj_bias": ['encoders.20.attn.to_q.bias', 'encoders.20.attn.to_k.bias', 'encoders.20.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.20.attn.in_proj_weight": ['encoders.20.attn.to_q.weight', 'encoders.20.attn.to_k.weight', 'encoders.20.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.20.attn.out_proj.bias": "encoders.20.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.20.attn.out_proj.weight": "encoders.20.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.20.ln_1.bias": "encoders.20.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.20.ln_1.weight": "encoders.20.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.20.ln_2.bias": "encoders.20.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.20.ln_2.weight": "encoders.20.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.20.mlp.c_fc.bias": "encoders.20.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.20.mlp.c_fc.weight": "encoders.20.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.20.mlp.c_proj.bias": "encoders.20.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.20.mlp.c_proj.weight": "encoders.20.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.21.attn.in_proj_bias": ['encoders.21.attn.to_q.bias', 'encoders.21.attn.to_k.bias', 'encoders.21.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.21.attn.in_proj_weight": ['encoders.21.attn.to_q.weight', 'encoders.21.attn.to_k.weight', 'encoders.21.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.21.attn.out_proj.bias": "encoders.21.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.21.attn.out_proj.weight": "encoders.21.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.21.ln_1.bias": "encoders.21.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.21.ln_1.weight": "encoders.21.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.21.ln_2.bias": "encoders.21.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.21.ln_2.weight": "encoders.21.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.21.mlp.c_fc.bias": "encoders.21.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.21.mlp.c_fc.weight": "encoders.21.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.21.mlp.c_proj.bias": "encoders.21.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.21.mlp.c_proj.weight": "encoders.21.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.22.attn.in_proj_bias": ['encoders.22.attn.to_q.bias', 'encoders.22.attn.to_k.bias', 'encoders.22.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.22.attn.in_proj_weight": ['encoders.22.attn.to_q.weight', 'encoders.22.attn.to_k.weight', 'encoders.22.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.22.attn.out_proj.bias": "encoders.22.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.22.attn.out_proj.weight": "encoders.22.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.22.ln_1.bias": "encoders.22.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.22.ln_1.weight": "encoders.22.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.22.ln_2.bias": "encoders.22.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.22.ln_2.weight": "encoders.22.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.22.mlp.c_fc.bias": "encoders.22.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.22.mlp.c_fc.weight": "encoders.22.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.22.mlp.c_proj.bias": "encoders.22.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.22.mlp.c_proj.weight": "encoders.22.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.23.attn.in_proj_bias": ['encoders.23.attn.to_q.bias', 'encoders.23.attn.to_k.bias', 'encoders.23.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.23.attn.in_proj_weight": ['encoders.23.attn.to_q.weight', 'encoders.23.attn.to_k.weight', 'encoders.23.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.23.attn.out_proj.bias": "encoders.23.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.23.attn.out_proj.weight": "encoders.23.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.23.ln_1.bias": "encoders.23.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.23.ln_1.weight": "encoders.23.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.23.ln_2.bias": "encoders.23.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.23.ln_2.weight": "encoders.23.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.23.mlp.c_fc.bias": "encoders.23.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.23.mlp.c_fc.weight": "encoders.23.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.23.mlp.c_proj.bias": "encoders.23.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.23.mlp.c_proj.weight": "encoders.23.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.24.attn.in_proj_bias": ['encoders.24.attn.to_q.bias', 'encoders.24.attn.to_k.bias', 'encoders.24.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.24.attn.in_proj_weight": ['encoders.24.attn.to_q.weight', 'encoders.24.attn.to_k.weight', 'encoders.24.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.24.attn.out_proj.bias": "encoders.24.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.24.attn.out_proj.weight": "encoders.24.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.24.ln_1.bias": "encoders.24.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.24.ln_1.weight": "encoders.24.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.24.ln_2.bias": "encoders.24.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.24.ln_2.weight": "encoders.24.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.24.mlp.c_fc.bias": "encoders.24.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.24.mlp.c_fc.weight": "encoders.24.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.24.mlp.c_proj.bias": "encoders.24.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.24.mlp.c_proj.weight": "encoders.24.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.25.attn.in_proj_bias": ['encoders.25.attn.to_q.bias', 'encoders.25.attn.to_k.bias', 'encoders.25.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.25.attn.in_proj_weight": ['encoders.25.attn.to_q.weight', 'encoders.25.attn.to_k.weight', 'encoders.25.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.25.attn.out_proj.bias": "encoders.25.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.25.attn.out_proj.weight": "encoders.25.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.25.ln_1.bias": "encoders.25.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.25.ln_1.weight": "encoders.25.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.25.ln_2.bias": "encoders.25.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.25.ln_2.weight": "encoders.25.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.25.mlp.c_fc.bias": "encoders.25.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.25.mlp.c_fc.weight": "encoders.25.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.25.mlp.c_proj.bias": "encoders.25.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.25.mlp.c_proj.weight": "encoders.25.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.26.attn.in_proj_bias": ['encoders.26.attn.to_q.bias', 'encoders.26.attn.to_k.bias', 'encoders.26.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.26.attn.in_proj_weight": ['encoders.26.attn.to_q.weight', 'encoders.26.attn.to_k.weight', 'encoders.26.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.26.attn.out_proj.bias": "encoders.26.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.26.attn.out_proj.weight": "encoders.26.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.26.ln_1.bias": "encoders.26.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.26.ln_1.weight": "encoders.26.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.26.ln_2.bias": "encoders.26.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.26.ln_2.weight": "encoders.26.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.26.mlp.c_fc.bias": "encoders.26.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.26.mlp.c_fc.weight": "encoders.26.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.26.mlp.c_proj.bias": "encoders.26.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.26.mlp.c_proj.weight": "encoders.26.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.27.attn.in_proj_bias": ['encoders.27.attn.to_q.bias', 'encoders.27.attn.to_k.bias', 'encoders.27.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.27.attn.in_proj_weight": ['encoders.27.attn.to_q.weight', 'encoders.27.attn.to_k.weight', 'encoders.27.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.27.attn.out_proj.bias": "encoders.27.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.27.attn.out_proj.weight": "encoders.27.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.27.ln_1.bias": "encoders.27.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.27.ln_1.weight": "encoders.27.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.27.ln_2.bias": "encoders.27.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.27.ln_2.weight": "encoders.27.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.27.mlp.c_fc.bias": "encoders.27.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.27.mlp.c_fc.weight": "encoders.27.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.27.mlp.c_proj.bias": "encoders.27.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.27.mlp.c_proj.weight": "encoders.27.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.28.attn.in_proj_bias": ['encoders.28.attn.to_q.bias', 'encoders.28.attn.to_k.bias', 'encoders.28.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.28.attn.in_proj_weight": ['encoders.28.attn.to_q.weight', 'encoders.28.attn.to_k.weight', 'encoders.28.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.28.attn.out_proj.bias": "encoders.28.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.28.attn.out_proj.weight": "encoders.28.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.28.ln_1.bias": "encoders.28.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.28.ln_1.weight": "encoders.28.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.28.ln_2.bias": "encoders.28.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.28.ln_2.weight": "encoders.28.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.28.mlp.c_fc.bias": "encoders.28.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.28.mlp.c_fc.weight": "encoders.28.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.28.mlp.c_proj.bias": "encoders.28.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.28.mlp.c_proj.weight": "encoders.28.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.29.attn.in_proj_bias": ['encoders.29.attn.to_q.bias', 'encoders.29.attn.to_k.bias', 'encoders.29.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.29.attn.in_proj_weight": ['encoders.29.attn.to_q.weight', 'encoders.29.attn.to_k.weight', 'encoders.29.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.29.attn.out_proj.bias": "encoders.29.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.29.attn.out_proj.weight": "encoders.29.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.29.ln_1.bias": "encoders.29.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.29.ln_1.weight": "encoders.29.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.29.ln_2.bias": "encoders.29.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.29.ln_2.weight": "encoders.29.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.29.mlp.c_fc.bias": "encoders.29.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.29.mlp.c_fc.weight": "encoders.29.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.29.mlp.c_proj.bias": "encoders.29.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.29.mlp.c_proj.weight": "encoders.29.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.3.attn.in_proj_bias": ['encoders.3.attn.to_q.bias', 'encoders.3.attn.to_k.bias', 'encoders.3.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.3.attn.in_proj_weight": ['encoders.3.attn.to_q.weight', 'encoders.3.attn.to_k.weight', 'encoders.3.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.3.attn.out_proj.bias": "encoders.3.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.3.attn.out_proj.weight": "encoders.3.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.3.ln_1.bias": "encoders.3.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.3.ln_1.weight": "encoders.3.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.3.ln_2.bias": "encoders.3.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.3.ln_2.weight": "encoders.3.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.3.mlp.c_fc.bias": "encoders.3.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.3.mlp.c_fc.weight": "encoders.3.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.3.mlp.c_proj.bias": "encoders.3.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.3.mlp.c_proj.weight": "encoders.3.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.30.attn.in_proj_bias": ['encoders.30.attn.to_q.bias', 'encoders.30.attn.to_k.bias', 'encoders.30.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.30.attn.in_proj_weight": ['encoders.30.attn.to_q.weight', 'encoders.30.attn.to_k.weight', 'encoders.30.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.30.attn.out_proj.bias": "encoders.30.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.30.attn.out_proj.weight": "encoders.30.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.30.ln_1.bias": "encoders.30.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.30.ln_1.weight": "encoders.30.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.30.ln_2.bias": "encoders.30.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.30.ln_2.weight": "encoders.30.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.30.mlp.c_fc.bias": "encoders.30.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.30.mlp.c_fc.weight": "encoders.30.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.30.mlp.c_proj.bias": "encoders.30.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.30.mlp.c_proj.weight": "encoders.30.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.31.attn.in_proj_bias": ['encoders.31.attn.to_q.bias', 'encoders.31.attn.to_k.bias', 'encoders.31.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.31.attn.in_proj_weight": ['encoders.31.attn.to_q.weight', 'encoders.31.attn.to_k.weight', 'encoders.31.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.31.attn.out_proj.bias": "encoders.31.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.31.attn.out_proj.weight": "encoders.31.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.31.ln_1.bias": "encoders.31.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.31.ln_1.weight": "encoders.31.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.31.ln_2.bias": "encoders.31.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.31.ln_2.weight": "encoders.31.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.31.mlp.c_fc.bias": "encoders.31.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.31.mlp.c_fc.weight": "encoders.31.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.31.mlp.c_proj.bias": "encoders.31.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.31.mlp.c_proj.weight": "encoders.31.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.4.attn.in_proj_bias": ['encoders.4.attn.to_q.bias', 'encoders.4.attn.to_k.bias', 'encoders.4.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.4.attn.in_proj_weight": ['encoders.4.attn.to_q.weight', 'encoders.4.attn.to_k.weight', 'encoders.4.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.4.attn.out_proj.bias": "encoders.4.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.4.attn.out_proj.weight": "encoders.4.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.4.ln_1.bias": "encoders.4.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.4.ln_1.weight": "encoders.4.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.4.ln_2.bias": "encoders.4.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.4.ln_2.weight": "encoders.4.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.4.mlp.c_fc.bias": "encoders.4.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.4.mlp.c_fc.weight": "encoders.4.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.4.mlp.c_proj.bias": "encoders.4.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.4.mlp.c_proj.weight": "encoders.4.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.5.attn.in_proj_bias": ['encoders.5.attn.to_q.bias', 'encoders.5.attn.to_k.bias', 'encoders.5.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.5.attn.in_proj_weight": ['encoders.5.attn.to_q.weight', 'encoders.5.attn.to_k.weight', 'encoders.5.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.5.attn.out_proj.bias": "encoders.5.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.5.attn.out_proj.weight": "encoders.5.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.5.ln_1.bias": "encoders.5.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.5.ln_1.weight": "encoders.5.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.5.ln_2.bias": "encoders.5.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.5.ln_2.weight": "encoders.5.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.5.mlp.c_fc.bias": "encoders.5.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.5.mlp.c_fc.weight": "encoders.5.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.5.mlp.c_proj.bias": "encoders.5.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.5.mlp.c_proj.weight": "encoders.5.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.6.attn.in_proj_bias": ['encoders.6.attn.to_q.bias', 'encoders.6.attn.to_k.bias', 'encoders.6.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.6.attn.in_proj_weight": ['encoders.6.attn.to_q.weight', 'encoders.6.attn.to_k.weight', 'encoders.6.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.6.attn.out_proj.bias": "encoders.6.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.6.attn.out_proj.weight": "encoders.6.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.6.ln_1.bias": "encoders.6.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.6.ln_1.weight": "encoders.6.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.6.ln_2.bias": "encoders.6.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.6.ln_2.weight": "encoders.6.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.6.mlp.c_fc.bias": "encoders.6.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.6.mlp.c_fc.weight": "encoders.6.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.6.mlp.c_proj.bias": "encoders.6.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.6.mlp.c_proj.weight": "encoders.6.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.7.attn.in_proj_bias": ['encoders.7.attn.to_q.bias', 'encoders.7.attn.to_k.bias', 'encoders.7.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.7.attn.in_proj_weight": ['encoders.7.attn.to_q.weight', 'encoders.7.attn.to_k.weight', 'encoders.7.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.7.attn.out_proj.bias": "encoders.7.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.7.attn.out_proj.weight": "encoders.7.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.7.ln_1.bias": "encoders.7.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.7.ln_1.weight": "encoders.7.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.7.ln_2.bias": "encoders.7.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.7.ln_2.weight": "encoders.7.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.7.mlp.c_fc.bias": "encoders.7.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.7.mlp.c_fc.weight": "encoders.7.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.7.mlp.c_proj.bias": "encoders.7.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.7.mlp.c_proj.weight": "encoders.7.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.8.attn.in_proj_bias": ['encoders.8.attn.to_q.bias', 'encoders.8.attn.to_k.bias', 'encoders.8.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.8.attn.in_proj_weight": ['encoders.8.attn.to_q.weight', 'encoders.8.attn.to_k.weight', 'encoders.8.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.8.attn.out_proj.bias": "encoders.8.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.8.attn.out_proj.weight": "encoders.8.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.8.ln_1.bias": "encoders.8.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.8.ln_1.weight": "encoders.8.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.8.ln_2.bias": "encoders.8.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.8.ln_2.weight": "encoders.8.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.8.mlp.c_fc.bias": "encoders.8.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.8.mlp.c_fc.weight": "encoders.8.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.8.mlp.c_proj.bias": "encoders.8.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.8.mlp.c_proj.weight": "encoders.8.fc2.weight", - "conditioner.embedders.1.model.transformer.resblocks.9.attn.in_proj_bias": ['encoders.9.attn.to_q.bias', 'encoders.9.attn.to_k.bias', 'encoders.9.attn.to_v.bias'], - "conditioner.embedders.1.model.transformer.resblocks.9.attn.in_proj_weight": ['encoders.9.attn.to_q.weight', 'encoders.9.attn.to_k.weight', 'encoders.9.attn.to_v.weight'], - "conditioner.embedders.1.model.transformer.resblocks.9.attn.out_proj.bias": "encoders.9.attn.to_out.bias", - "conditioner.embedders.1.model.transformer.resblocks.9.attn.out_proj.weight": "encoders.9.attn.to_out.weight", - "conditioner.embedders.1.model.transformer.resblocks.9.ln_1.bias": "encoders.9.layer_norm1.bias", - "conditioner.embedders.1.model.transformer.resblocks.9.ln_1.weight": "encoders.9.layer_norm1.weight", - "conditioner.embedders.1.model.transformer.resblocks.9.ln_2.bias": "encoders.9.layer_norm2.bias", - "conditioner.embedders.1.model.transformer.resblocks.9.ln_2.weight": "encoders.9.layer_norm2.weight", - "conditioner.embedders.1.model.transformer.resblocks.9.mlp.c_fc.bias": "encoders.9.fc1.bias", - "conditioner.embedders.1.model.transformer.resblocks.9.mlp.c_fc.weight": "encoders.9.fc1.weight", - "conditioner.embedders.1.model.transformer.resblocks.9.mlp.c_proj.bias": "encoders.9.fc2.bias", - "conditioner.embedders.1.model.transformer.resblocks.9.mlp.c_proj.weight": "encoders.9.fc2.weight", - "conditioner.embedders.1.model.text_projection": "text_projection.weight", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if name == "conditioner.embedders.1.model.positional_embedding": - param = param.reshape((1, param.shape[0], param.shape[1])) - elif name == "conditioner.embedders.1.model.text_projection": - param = param.T - if isinstance(rename_dict[name], str): - state_dict_[rename_dict[name]] = param - else: - length = param.shape[0] // 3 - for i, rename in enumerate(rename_dict[name]): - state_dict_[rename] = param[i*length: i*length+length] - return state_dict_ \ No newline at end of file diff --git a/diffsynth/models/sdxl_unet.py b/diffsynth/models/sdxl_unet.py deleted file mode 100644 index 9bc63e6..0000000 --- a/diffsynth/models/sdxl_unet.py +++ /dev/null @@ -1,1901 +0,0 @@ -import torch -from .sd_unet import Timesteps, ResnetBlock, AttentionBlock, PushBlock, PopBlock, DownSampler, UpSampler - - -class SDXLUNet(torch.nn.Module): - def __init__(self, is_kolors=False): - super().__init__() - self.time_proj = Timesteps(320) - self.time_embedding = torch.nn.Sequential( - torch.nn.Linear(320, 1280), - torch.nn.SiLU(), - torch.nn.Linear(1280, 1280) - ) - self.add_time_proj = Timesteps(256) - self.add_time_embedding = torch.nn.Sequential( - torch.nn.Linear(5632 if is_kolors else 2816, 1280), - torch.nn.SiLU(), - torch.nn.Linear(1280, 1280) - ) - self.conv_in = torch.nn.Conv2d(4, 320, kernel_size=3, padding=1) - self.text_intermediate_proj = torch.nn.Linear(4096, 2048) if is_kolors else None - - self.blocks = torch.nn.ModuleList([ - # DownBlock2D - ResnetBlock(320, 320, 1280), - PushBlock(), - ResnetBlock(320, 320, 1280), - PushBlock(), - DownSampler(320), - PushBlock(), - # CrossAttnDownBlock2D - ResnetBlock(320, 640, 1280), - AttentionBlock(10, 64, 640, 2, 2048), - PushBlock(), - ResnetBlock(640, 640, 1280), - AttentionBlock(10, 64, 640, 2, 2048), - PushBlock(), - DownSampler(640), - PushBlock(), - # CrossAttnDownBlock2D - ResnetBlock(640, 1280, 1280), - AttentionBlock(20, 64, 1280, 10, 2048), - PushBlock(), - ResnetBlock(1280, 1280, 1280), - AttentionBlock(20, 64, 1280, 10, 2048), - PushBlock(), - # UNetMidBlock2DCrossAttn - ResnetBlock(1280, 1280, 1280), - AttentionBlock(20, 64, 1280, 10, 2048), - ResnetBlock(1280, 1280, 1280), - # CrossAttnUpBlock2D - PopBlock(), - ResnetBlock(2560, 1280, 1280), - AttentionBlock(20, 64, 1280, 10, 2048), - PopBlock(), - ResnetBlock(2560, 1280, 1280), - AttentionBlock(20, 64, 1280, 10, 2048), - PopBlock(), - ResnetBlock(1920, 1280, 1280), - AttentionBlock(20, 64, 1280, 10, 2048), - UpSampler(1280), - # CrossAttnUpBlock2D - PopBlock(), - ResnetBlock(1920, 640, 1280), - AttentionBlock(10, 64, 640, 2, 2048), - PopBlock(), - ResnetBlock(1280, 640, 1280), - AttentionBlock(10, 64, 640, 2, 2048), - PopBlock(), - ResnetBlock(960, 640, 1280), - AttentionBlock(10, 64, 640, 2, 2048), - UpSampler(640), - # UpBlock2D - PopBlock(), - ResnetBlock(960, 320, 1280), - PopBlock(), - ResnetBlock(640, 320, 1280), - PopBlock(), - ResnetBlock(640, 320, 1280) - ]) - - self.conv_norm_out = torch.nn.GroupNorm(num_channels=320, num_groups=32, eps=1e-5) - self.conv_act = torch.nn.SiLU() - self.conv_out = torch.nn.Conv2d(320, 4, kernel_size=3, padding=1) - - self.is_kolors = is_kolors - - def forward( - self, - sample, timestep, encoder_hidden_states, add_time_id, add_text_embeds, - tiled=False, tile_size=64, tile_stride=8, - use_gradient_checkpointing=False, - **kwargs - ): - # 1. time - t_emb = self.time_proj(timestep).to(sample.dtype) - t_emb = self.time_embedding(t_emb) - - time_embeds = self.add_time_proj(add_time_id) - time_embeds = time_embeds.reshape((add_text_embeds.shape[0], -1)) - add_embeds = torch.concat([add_text_embeds, time_embeds], dim=-1) - add_embeds = add_embeds.to(sample.dtype) - add_embeds = self.add_time_embedding(add_embeds) - - time_emb = t_emb + add_embeds - - # 2. pre-process - height, width = sample.shape[2], sample.shape[3] - hidden_states = self.conv_in(sample) - text_emb = encoder_hidden_states if self.text_intermediate_proj is None else self.text_intermediate_proj(encoder_hidden_states) - res_stack = [hidden_states] - - # 3. blocks - def create_custom_forward(module): - def custom_forward(*inputs): - return module(*inputs) - return custom_forward - for i, block in enumerate(self.blocks): - if self.training and use_gradient_checkpointing and not (isinstance(block, PushBlock) or isinstance(block, PopBlock)): - hidden_states, time_emb, text_emb, res_stack = torch.utils.checkpoint.checkpoint( - create_custom_forward(block), - hidden_states, time_emb, text_emb, res_stack, - use_reentrant=False, - ) - else: - hidden_states, time_emb, text_emb, res_stack = block( - hidden_states, time_emb, text_emb, res_stack, - tiled=tiled, tile_size=tile_size, tile_stride=tile_stride - ) - - # 4. output - hidden_states = self.conv_norm_out(hidden_states) - hidden_states = self.conv_act(hidden_states) - hidden_states = self.conv_out(hidden_states) - - return hidden_states - - @staticmethod - def state_dict_converter(): - return SDXLUNetStateDictConverter() - - -class SDXLUNetStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - # architecture - block_types = [ - 'ResnetBlock', 'PushBlock', 'ResnetBlock', 'PushBlock', 'DownSampler', 'PushBlock', - 'ResnetBlock', 'AttentionBlock', 'PushBlock', 'ResnetBlock', 'AttentionBlock', 'PushBlock', 'DownSampler', 'PushBlock', - 'ResnetBlock', 'AttentionBlock', 'PushBlock', 'ResnetBlock', 'AttentionBlock', 'PushBlock', - 'ResnetBlock', 'AttentionBlock', 'ResnetBlock', - 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'UpSampler', - 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'PopBlock', 'ResnetBlock', 'AttentionBlock', 'UpSampler', - 'PopBlock', 'ResnetBlock', 'PopBlock', 'ResnetBlock', 'PopBlock', 'ResnetBlock' - ] - - # Rename each parameter - name_list = sorted([name for name in state_dict]) - rename_dict = {} - block_id = {"ResnetBlock": -1, "AttentionBlock": -1, "DownSampler": -1, "UpSampler": -1} - last_block_type_with_id = {"ResnetBlock": "", "AttentionBlock": "", "DownSampler": "", "UpSampler": ""} - for name in name_list: - names = name.split(".") - if names[0] in ["conv_in", "conv_norm_out", "conv_out"]: - pass - elif names[0] in ["encoder_hid_proj"]: - names[0] = "text_intermediate_proj" - elif names[0] in ["time_embedding", "add_embedding"]: - if names[0] == "add_embedding": - names[0] = "add_time_embedding" - names[1] = {"linear_1": "0", "linear_2": "2"}[names[1]] - elif names[0] in ["down_blocks", "mid_block", "up_blocks"]: - if names[0] == "mid_block": - names.insert(1, "0") - block_type = {"resnets": "ResnetBlock", "attentions": "AttentionBlock", "downsamplers": "DownSampler", "upsamplers": "UpSampler"}[names[2]] - block_type_with_id = ".".join(names[:4]) - if block_type_with_id != last_block_type_with_id[block_type]: - block_id[block_type] += 1 - last_block_type_with_id[block_type] = block_type_with_id - while block_id[block_type] < len(block_types) and block_types[block_id[block_type]] != block_type: - block_id[block_type] += 1 - block_type_with_id = ".".join(names[:4]) - names = ["blocks", str(block_id[block_type])] + names[4:] - if "ff" in names: - ff_index = names.index("ff") - component = ".".join(names[ff_index:ff_index+3]) - component = {"ff.net.0": "act_fn", "ff.net.2": "ff"}[component] - names = names[:ff_index] + [component] + names[ff_index+3:] - if "to_out" in names: - names.pop(names.index("to_out") + 1) - else: - raise ValueError(f"Unknown parameters: {name}") - rename_dict[name] = ".".join(names) - - # Convert state_dict - state_dict_ = {} - for name, param in state_dict.items(): - if ".proj_in." in name or ".proj_out." in name: - param = param.squeeze() - state_dict_[rename_dict[name]] = param - if "text_intermediate_proj.weight" in state_dict_: - return state_dict_, {"is_kolors": True} - else: - return state_dict_ - - def from_civitai(self, state_dict): - rename_dict = { - "model.diffusion_model.input_blocks.0.0.bias": "conv_in.bias", - "model.diffusion_model.input_blocks.0.0.weight": "conv_in.weight", - "model.diffusion_model.input_blocks.1.0.emb_layers.1.bias": "blocks.0.time_emb_proj.bias", - "model.diffusion_model.input_blocks.1.0.emb_layers.1.weight": "blocks.0.time_emb_proj.weight", - "model.diffusion_model.input_blocks.1.0.in_layers.0.bias": "blocks.0.norm1.bias", - "model.diffusion_model.input_blocks.1.0.in_layers.0.weight": "blocks.0.norm1.weight", - "model.diffusion_model.input_blocks.1.0.in_layers.2.bias": "blocks.0.conv1.bias", - "model.diffusion_model.input_blocks.1.0.in_layers.2.weight": "blocks.0.conv1.weight", - "model.diffusion_model.input_blocks.1.0.out_layers.0.bias": "blocks.0.norm2.bias", - "model.diffusion_model.input_blocks.1.0.out_layers.0.weight": "blocks.0.norm2.weight", - "model.diffusion_model.input_blocks.1.0.out_layers.3.bias": "blocks.0.conv2.bias", - "model.diffusion_model.input_blocks.1.0.out_layers.3.weight": "blocks.0.conv2.weight", - "model.diffusion_model.input_blocks.2.0.emb_layers.1.bias": "blocks.2.time_emb_proj.bias", - "model.diffusion_model.input_blocks.2.0.emb_layers.1.weight": "blocks.2.time_emb_proj.weight", - "model.diffusion_model.input_blocks.2.0.in_layers.0.bias": "blocks.2.norm1.bias", - "model.diffusion_model.input_blocks.2.0.in_layers.0.weight": "blocks.2.norm1.weight", - "model.diffusion_model.input_blocks.2.0.in_layers.2.bias": "blocks.2.conv1.bias", - "model.diffusion_model.input_blocks.2.0.in_layers.2.weight": "blocks.2.conv1.weight", - "model.diffusion_model.input_blocks.2.0.out_layers.0.bias": "blocks.2.norm2.bias", - "model.diffusion_model.input_blocks.2.0.out_layers.0.weight": "blocks.2.norm2.weight", - "model.diffusion_model.input_blocks.2.0.out_layers.3.bias": "blocks.2.conv2.bias", - "model.diffusion_model.input_blocks.2.0.out_layers.3.weight": "blocks.2.conv2.weight", - "model.diffusion_model.input_blocks.3.0.op.bias": "blocks.4.conv.bias", - "model.diffusion_model.input_blocks.3.0.op.weight": "blocks.4.conv.weight", - "model.diffusion_model.input_blocks.4.0.emb_layers.1.bias": "blocks.6.time_emb_proj.bias", - "model.diffusion_model.input_blocks.4.0.emb_layers.1.weight": "blocks.6.time_emb_proj.weight", - "model.diffusion_model.input_blocks.4.0.in_layers.0.bias": "blocks.6.norm1.bias", - "model.diffusion_model.input_blocks.4.0.in_layers.0.weight": "blocks.6.norm1.weight", - "model.diffusion_model.input_blocks.4.0.in_layers.2.bias": "blocks.6.conv1.bias", - "model.diffusion_model.input_blocks.4.0.in_layers.2.weight": "blocks.6.conv1.weight", - "model.diffusion_model.input_blocks.4.0.out_layers.0.bias": "blocks.6.norm2.bias", - "model.diffusion_model.input_blocks.4.0.out_layers.0.weight": "blocks.6.norm2.weight", - "model.diffusion_model.input_blocks.4.0.out_layers.3.bias": "blocks.6.conv2.bias", - "model.diffusion_model.input_blocks.4.0.out_layers.3.weight": "blocks.6.conv2.weight", - "model.diffusion_model.input_blocks.4.0.skip_connection.bias": "blocks.6.conv_shortcut.bias", - "model.diffusion_model.input_blocks.4.0.skip_connection.weight": "blocks.6.conv_shortcut.weight", - "model.diffusion_model.input_blocks.4.1.norm.bias": "blocks.7.norm.bias", - "model.diffusion_model.input_blocks.4.1.norm.weight": "blocks.7.norm.weight", - "model.diffusion_model.input_blocks.4.1.proj_in.bias": "blocks.7.proj_in.bias", - "model.diffusion_model.input_blocks.4.1.proj_in.weight": "blocks.7.proj_in.weight", - "model.diffusion_model.input_blocks.4.1.proj_out.bias": "blocks.7.proj_out.bias", - "model.diffusion_model.input_blocks.4.1.proj_out.weight": "blocks.7.proj_out.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn1.to_k.weight": "blocks.7.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.7.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.7.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn1.to_q.weight": "blocks.7.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn1.to_v.weight": "blocks.7.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn2.to_k.weight": "blocks.7.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.7.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.7.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn2.to_q.weight": "blocks.7.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn2.to_v.weight": "blocks.7.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.7.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.7.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.ff.net.2.bias": "blocks.7.transformer_blocks.0.ff.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.ff.net.2.weight": "blocks.7.transformer_blocks.0.ff.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.norm1.bias": "blocks.7.transformer_blocks.0.norm1.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.norm1.weight": "blocks.7.transformer_blocks.0.norm1.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.norm2.bias": "blocks.7.transformer_blocks.0.norm2.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.norm2.weight": "blocks.7.transformer_blocks.0.norm2.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.norm3.bias": "blocks.7.transformer_blocks.0.norm3.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.norm3.weight": "blocks.7.transformer_blocks.0.norm3.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.attn1.to_k.weight": "blocks.7.transformer_blocks.1.attn1.to_k.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.attn1.to_out.0.bias": "blocks.7.transformer_blocks.1.attn1.to_out.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.attn1.to_out.0.weight": "blocks.7.transformer_blocks.1.attn1.to_out.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.attn1.to_q.weight": "blocks.7.transformer_blocks.1.attn1.to_q.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.attn1.to_v.weight": "blocks.7.transformer_blocks.1.attn1.to_v.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.attn2.to_k.weight": "blocks.7.transformer_blocks.1.attn2.to_k.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.attn2.to_out.0.bias": "blocks.7.transformer_blocks.1.attn2.to_out.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.attn2.to_out.0.weight": "blocks.7.transformer_blocks.1.attn2.to_out.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.attn2.to_q.weight": "blocks.7.transformer_blocks.1.attn2.to_q.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.attn2.to_v.weight": "blocks.7.transformer_blocks.1.attn2.to_v.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.ff.net.0.proj.bias": "blocks.7.transformer_blocks.1.act_fn.proj.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.ff.net.0.proj.weight": "blocks.7.transformer_blocks.1.act_fn.proj.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.ff.net.2.bias": "blocks.7.transformer_blocks.1.ff.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.ff.net.2.weight": "blocks.7.transformer_blocks.1.ff.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.norm1.bias": "blocks.7.transformer_blocks.1.norm1.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.norm1.weight": "blocks.7.transformer_blocks.1.norm1.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.norm2.bias": "blocks.7.transformer_blocks.1.norm2.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.norm2.weight": "blocks.7.transformer_blocks.1.norm2.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.norm3.bias": "blocks.7.transformer_blocks.1.norm3.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.1.norm3.weight": "blocks.7.transformer_blocks.1.norm3.weight", - "model.diffusion_model.input_blocks.5.0.emb_layers.1.bias": "blocks.9.time_emb_proj.bias", - "model.diffusion_model.input_blocks.5.0.emb_layers.1.weight": "blocks.9.time_emb_proj.weight", - "model.diffusion_model.input_blocks.5.0.in_layers.0.bias": "blocks.9.norm1.bias", - "model.diffusion_model.input_blocks.5.0.in_layers.0.weight": "blocks.9.norm1.weight", - "model.diffusion_model.input_blocks.5.0.in_layers.2.bias": "blocks.9.conv1.bias", - "model.diffusion_model.input_blocks.5.0.in_layers.2.weight": "blocks.9.conv1.weight", - "model.diffusion_model.input_blocks.5.0.out_layers.0.bias": "blocks.9.norm2.bias", - "model.diffusion_model.input_blocks.5.0.out_layers.0.weight": "blocks.9.norm2.weight", - "model.diffusion_model.input_blocks.5.0.out_layers.3.bias": "blocks.9.conv2.bias", - "model.diffusion_model.input_blocks.5.0.out_layers.3.weight": "blocks.9.conv2.weight", - "model.diffusion_model.input_blocks.5.1.norm.bias": "blocks.10.norm.bias", - "model.diffusion_model.input_blocks.5.1.norm.weight": "blocks.10.norm.weight", - "model.diffusion_model.input_blocks.5.1.proj_in.bias": "blocks.10.proj_in.bias", - "model.diffusion_model.input_blocks.5.1.proj_in.weight": "blocks.10.proj_in.weight", - "model.diffusion_model.input_blocks.5.1.proj_out.bias": "blocks.10.proj_out.bias", - "model.diffusion_model.input_blocks.5.1.proj_out.weight": "blocks.10.proj_out.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn1.to_k.weight": "blocks.10.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.10.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.10.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn1.to_q.weight": "blocks.10.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn1.to_v.weight": "blocks.10.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn2.to_k.weight": "blocks.10.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.10.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.10.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn2.to_q.weight": "blocks.10.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn2.to_v.weight": "blocks.10.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.10.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.10.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.ff.net.2.bias": "blocks.10.transformer_blocks.0.ff.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.ff.net.2.weight": "blocks.10.transformer_blocks.0.ff.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.norm1.bias": "blocks.10.transformer_blocks.0.norm1.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.norm1.weight": "blocks.10.transformer_blocks.0.norm1.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.norm2.bias": "blocks.10.transformer_blocks.0.norm2.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.norm2.weight": "blocks.10.transformer_blocks.0.norm2.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.norm3.bias": "blocks.10.transformer_blocks.0.norm3.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.norm3.weight": "blocks.10.transformer_blocks.0.norm3.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.attn1.to_k.weight": "blocks.10.transformer_blocks.1.attn1.to_k.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.attn1.to_out.0.bias": "blocks.10.transformer_blocks.1.attn1.to_out.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.attn1.to_out.0.weight": "blocks.10.transformer_blocks.1.attn1.to_out.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.attn1.to_q.weight": "blocks.10.transformer_blocks.1.attn1.to_q.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.attn1.to_v.weight": "blocks.10.transformer_blocks.1.attn1.to_v.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.attn2.to_k.weight": "blocks.10.transformer_blocks.1.attn2.to_k.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.attn2.to_out.0.bias": "blocks.10.transformer_blocks.1.attn2.to_out.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.attn2.to_out.0.weight": "blocks.10.transformer_blocks.1.attn2.to_out.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.attn2.to_q.weight": "blocks.10.transformer_blocks.1.attn2.to_q.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.attn2.to_v.weight": "blocks.10.transformer_blocks.1.attn2.to_v.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.ff.net.0.proj.bias": "blocks.10.transformer_blocks.1.act_fn.proj.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.ff.net.0.proj.weight": "blocks.10.transformer_blocks.1.act_fn.proj.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.ff.net.2.bias": "blocks.10.transformer_blocks.1.ff.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.ff.net.2.weight": "blocks.10.transformer_blocks.1.ff.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.norm1.bias": "blocks.10.transformer_blocks.1.norm1.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.norm1.weight": "blocks.10.transformer_blocks.1.norm1.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.norm2.bias": "blocks.10.transformer_blocks.1.norm2.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.norm2.weight": "blocks.10.transformer_blocks.1.norm2.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.norm3.bias": "blocks.10.transformer_blocks.1.norm3.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.1.norm3.weight": "blocks.10.transformer_blocks.1.norm3.weight", - "model.diffusion_model.input_blocks.6.0.op.bias": "blocks.12.conv.bias", - "model.diffusion_model.input_blocks.6.0.op.weight": "blocks.12.conv.weight", - "model.diffusion_model.input_blocks.7.0.emb_layers.1.bias": "blocks.14.time_emb_proj.bias", - "model.diffusion_model.input_blocks.7.0.emb_layers.1.weight": "blocks.14.time_emb_proj.weight", - "model.diffusion_model.input_blocks.7.0.in_layers.0.bias": "blocks.14.norm1.bias", - "model.diffusion_model.input_blocks.7.0.in_layers.0.weight": "blocks.14.norm1.weight", - "model.diffusion_model.input_blocks.7.0.in_layers.2.bias": "blocks.14.conv1.bias", - "model.diffusion_model.input_blocks.7.0.in_layers.2.weight": "blocks.14.conv1.weight", - "model.diffusion_model.input_blocks.7.0.out_layers.0.bias": "blocks.14.norm2.bias", - "model.diffusion_model.input_blocks.7.0.out_layers.0.weight": "blocks.14.norm2.weight", - "model.diffusion_model.input_blocks.7.0.out_layers.3.bias": "blocks.14.conv2.bias", - "model.diffusion_model.input_blocks.7.0.out_layers.3.weight": "blocks.14.conv2.weight", - "model.diffusion_model.input_blocks.7.0.skip_connection.bias": "blocks.14.conv_shortcut.bias", - "model.diffusion_model.input_blocks.7.0.skip_connection.weight": "blocks.14.conv_shortcut.weight", - "model.diffusion_model.input_blocks.7.1.norm.bias": "blocks.15.norm.bias", - "model.diffusion_model.input_blocks.7.1.norm.weight": "blocks.15.norm.weight", - "model.diffusion_model.input_blocks.7.1.proj_in.bias": "blocks.15.proj_in.bias", - "model.diffusion_model.input_blocks.7.1.proj_in.weight": "blocks.15.proj_in.weight", - "model.diffusion_model.input_blocks.7.1.proj_out.bias": "blocks.15.proj_out.bias", - "model.diffusion_model.input_blocks.7.1.proj_out.weight": "blocks.15.proj_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn1.to_k.weight": "blocks.15.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.15.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.15.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn1.to_q.weight": "blocks.15.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn1.to_v.weight": "blocks.15.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn2.to_k.weight": "blocks.15.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.15.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.15.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn2.to_q.weight": "blocks.15.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn2.to_v.weight": "blocks.15.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.15.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.15.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.ff.net.2.bias": "blocks.15.transformer_blocks.0.ff.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.ff.net.2.weight": "blocks.15.transformer_blocks.0.ff.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.norm1.bias": "blocks.15.transformer_blocks.0.norm1.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.norm1.weight": "blocks.15.transformer_blocks.0.norm1.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.norm2.bias": "blocks.15.transformer_blocks.0.norm2.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.norm2.weight": "blocks.15.transformer_blocks.0.norm2.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.norm3.bias": "blocks.15.transformer_blocks.0.norm3.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.norm3.weight": "blocks.15.transformer_blocks.0.norm3.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.attn1.to_k.weight": "blocks.15.transformer_blocks.1.attn1.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.attn1.to_out.0.bias": "blocks.15.transformer_blocks.1.attn1.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.attn1.to_out.0.weight": "blocks.15.transformer_blocks.1.attn1.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.attn1.to_q.weight": "blocks.15.transformer_blocks.1.attn1.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.attn1.to_v.weight": "blocks.15.transformer_blocks.1.attn1.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.attn2.to_k.weight": "blocks.15.transformer_blocks.1.attn2.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.attn2.to_out.0.bias": "blocks.15.transformer_blocks.1.attn2.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.attn2.to_out.0.weight": "blocks.15.transformer_blocks.1.attn2.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.attn2.to_q.weight": "blocks.15.transformer_blocks.1.attn2.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.attn2.to_v.weight": "blocks.15.transformer_blocks.1.attn2.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.ff.net.0.proj.bias": "blocks.15.transformer_blocks.1.act_fn.proj.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.ff.net.0.proj.weight": "blocks.15.transformer_blocks.1.act_fn.proj.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.ff.net.2.bias": "blocks.15.transformer_blocks.1.ff.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.ff.net.2.weight": "blocks.15.transformer_blocks.1.ff.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.norm1.bias": "blocks.15.transformer_blocks.1.norm1.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.norm1.weight": "blocks.15.transformer_blocks.1.norm1.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.norm2.bias": "blocks.15.transformer_blocks.1.norm2.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.norm2.weight": "blocks.15.transformer_blocks.1.norm2.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.norm3.bias": "blocks.15.transformer_blocks.1.norm3.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.1.norm3.weight": "blocks.15.transformer_blocks.1.norm3.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.attn1.to_k.weight": "blocks.15.transformer_blocks.2.attn1.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.attn1.to_out.0.bias": "blocks.15.transformer_blocks.2.attn1.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.attn1.to_out.0.weight": "blocks.15.transformer_blocks.2.attn1.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.attn1.to_q.weight": "blocks.15.transformer_blocks.2.attn1.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.attn1.to_v.weight": "blocks.15.transformer_blocks.2.attn1.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.attn2.to_k.weight": "blocks.15.transformer_blocks.2.attn2.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.attn2.to_out.0.bias": "blocks.15.transformer_blocks.2.attn2.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.attn2.to_out.0.weight": "blocks.15.transformer_blocks.2.attn2.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.attn2.to_q.weight": "blocks.15.transformer_blocks.2.attn2.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.attn2.to_v.weight": "blocks.15.transformer_blocks.2.attn2.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.ff.net.0.proj.bias": "blocks.15.transformer_blocks.2.act_fn.proj.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.ff.net.0.proj.weight": "blocks.15.transformer_blocks.2.act_fn.proj.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.ff.net.2.bias": "blocks.15.transformer_blocks.2.ff.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.ff.net.2.weight": "blocks.15.transformer_blocks.2.ff.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.norm1.bias": "blocks.15.transformer_blocks.2.norm1.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.norm1.weight": "blocks.15.transformer_blocks.2.norm1.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.norm2.bias": "blocks.15.transformer_blocks.2.norm2.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.norm2.weight": "blocks.15.transformer_blocks.2.norm2.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.norm3.bias": "blocks.15.transformer_blocks.2.norm3.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.2.norm3.weight": "blocks.15.transformer_blocks.2.norm3.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.attn1.to_k.weight": "blocks.15.transformer_blocks.3.attn1.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.attn1.to_out.0.bias": "blocks.15.transformer_blocks.3.attn1.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.attn1.to_out.0.weight": "blocks.15.transformer_blocks.3.attn1.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.attn1.to_q.weight": "blocks.15.transformer_blocks.3.attn1.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.attn1.to_v.weight": "blocks.15.transformer_blocks.3.attn1.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.attn2.to_k.weight": "blocks.15.transformer_blocks.3.attn2.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.attn2.to_out.0.bias": "blocks.15.transformer_blocks.3.attn2.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.attn2.to_out.0.weight": "blocks.15.transformer_blocks.3.attn2.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.attn2.to_q.weight": "blocks.15.transformer_blocks.3.attn2.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.attn2.to_v.weight": "blocks.15.transformer_blocks.3.attn2.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.ff.net.0.proj.bias": "blocks.15.transformer_blocks.3.act_fn.proj.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.ff.net.0.proj.weight": "blocks.15.transformer_blocks.3.act_fn.proj.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.ff.net.2.bias": "blocks.15.transformer_blocks.3.ff.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.ff.net.2.weight": "blocks.15.transformer_blocks.3.ff.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.norm1.bias": "blocks.15.transformer_blocks.3.norm1.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.norm1.weight": "blocks.15.transformer_blocks.3.norm1.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.norm2.bias": "blocks.15.transformer_blocks.3.norm2.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.norm2.weight": "blocks.15.transformer_blocks.3.norm2.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.norm3.bias": "blocks.15.transformer_blocks.3.norm3.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.3.norm3.weight": "blocks.15.transformer_blocks.3.norm3.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.attn1.to_k.weight": "blocks.15.transformer_blocks.4.attn1.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.attn1.to_out.0.bias": "blocks.15.transformer_blocks.4.attn1.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.attn1.to_out.0.weight": "blocks.15.transformer_blocks.4.attn1.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.attn1.to_q.weight": "blocks.15.transformer_blocks.4.attn1.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.attn1.to_v.weight": "blocks.15.transformer_blocks.4.attn1.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.attn2.to_k.weight": "blocks.15.transformer_blocks.4.attn2.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.attn2.to_out.0.bias": "blocks.15.transformer_blocks.4.attn2.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.attn2.to_out.0.weight": "blocks.15.transformer_blocks.4.attn2.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.attn2.to_q.weight": "blocks.15.transformer_blocks.4.attn2.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.attn2.to_v.weight": "blocks.15.transformer_blocks.4.attn2.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.ff.net.0.proj.bias": "blocks.15.transformer_blocks.4.act_fn.proj.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.ff.net.0.proj.weight": "blocks.15.transformer_blocks.4.act_fn.proj.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.ff.net.2.bias": "blocks.15.transformer_blocks.4.ff.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.ff.net.2.weight": "blocks.15.transformer_blocks.4.ff.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.norm1.bias": "blocks.15.transformer_blocks.4.norm1.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.norm1.weight": "blocks.15.transformer_blocks.4.norm1.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.norm2.bias": "blocks.15.transformer_blocks.4.norm2.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.norm2.weight": "blocks.15.transformer_blocks.4.norm2.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.norm3.bias": "blocks.15.transformer_blocks.4.norm3.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.4.norm3.weight": "blocks.15.transformer_blocks.4.norm3.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.attn1.to_k.weight": "blocks.15.transformer_blocks.5.attn1.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.attn1.to_out.0.bias": "blocks.15.transformer_blocks.5.attn1.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.attn1.to_out.0.weight": "blocks.15.transformer_blocks.5.attn1.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.attn1.to_q.weight": "blocks.15.transformer_blocks.5.attn1.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.attn1.to_v.weight": "blocks.15.transformer_blocks.5.attn1.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.attn2.to_k.weight": "blocks.15.transformer_blocks.5.attn2.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.attn2.to_out.0.bias": "blocks.15.transformer_blocks.5.attn2.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.attn2.to_out.0.weight": "blocks.15.transformer_blocks.5.attn2.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.attn2.to_q.weight": "blocks.15.transformer_blocks.5.attn2.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.attn2.to_v.weight": "blocks.15.transformer_blocks.5.attn2.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.ff.net.0.proj.bias": "blocks.15.transformer_blocks.5.act_fn.proj.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.ff.net.0.proj.weight": "blocks.15.transformer_blocks.5.act_fn.proj.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.ff.net.2.bias": "blocks.15.transformer_blocks.5.ff.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.ff.net.2.weight": "blocks.15.transformer_blocks.5.ff.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.norm1.bias": "blocks.15.transformer_blocks.5.norm1.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.norm1.weight": "blocks.15.transformer_blocks.5.norm1.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.norm2.bias": "blocks.15.transformer_blocks.5.norm2.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.norm2.weight": "blocks.15.transformer_blocks.5.norm2.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.norm3.bias": "blocks.15.transformer_blocks.5.norm3.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.5.norm3.weight": "blocks.15.transformer_blocks.5.norm3.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.attn1.to_k.weight": "blocks.15.transformer_blocks.6.attn1.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.attn1.to_out.0.bias": "blocks.15.transformer_blocks.6.attn1.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.attn1.to_out.0.weight": "blocks.15.transformer_blocks.6.attn1.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.attn1.to_q.weight": "blocks.15.transformer_blocks.6.attn1.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.attn1.to_v.weight": "blocks.15.transformer_blocks.6.attn1.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.attn2.to_k.weight": "blocks.15.transformer_blocks.6.attn2.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.attn2.to_out.0.bias": "blocks.15.transformer_blocks.6.attn2.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.attn2.to_out.0.weight": "blocks.15.transformer_blocks.6.attn2.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.attn2.to_q.weight": "blocks.15.transformer_blocks.6.attn2.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.attn2.to_v.weight": "blocks.15.transformer_blocks.6.attn2.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.ff.net.0.proj.bias": "blocks.15.transformer_blocks.6.act_fn.proj.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.ff.net.0.proj.weight": "blocks.15.transformer_blocks.6.act_fn.proj.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.ff.net.2.bias": "blocks.15.transformer_blocks.6.ff.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.ff.net.2.weight": "blocks.15.transformer_blocks.6.ff.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.norm1.bias": "blocks.15.transformer_blocks.6.norm1.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.norm1.weight": "blocks.15.transformer_blocks.6.norm1.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.norm2.bias": "blocks.15.transformer_blocks.6.norm2.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.norm2.weight": "blocks.15.transformer_blocks.6.norm2.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.norm3.bias": "blocks.15.transformer_blocks.6.norm3.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.6.norm3.weight": "blocks.15.transformer_blocks.6.norm3.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.attn1.to_k.weight": "blocks.15.transformer_blocks.7.attn1.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.attn1.to_out.0.bias": "blocks.15.transformer_blocks.7.attn1.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.attn1.to_out.0.weight": "blocks.15.transformer_blocks.7.attn1.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.attn1.to_q.weight": "blocks.15.transformer_blocks.7.attn1.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.attn1.to_v.weight": "blocks.15.transformer_blocks.7.attn1.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.attn2.to_k.weight": "blocks.15.transformer_blocks.7.attn2.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.attn2.to_out.0.bias": "blocks.15.transformer_blocks.7.attn2.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.attn2.to_out.0.weight": "blocks.15.transformer_blocks.7.attn2.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.attn2.to_q.weight": "blocks.15.transformer_blocks.7.attn2.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.attn2.to_v.weight": "blocks.15.transformer_blocks.7.attn2.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.ff.net.0.proj.bias": "blocks.15.transformer_blocks.7.act_fn.proj.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.ff.net.0.proj.weight": "blocks.15.transformer_blocks.7.act_fn.proj.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.ff.net.2.bias": "blocks.15.transformer_blocks.7.ff.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.ff.net.2.weight": "blocks.15.transformer_blocks.7.ff.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.norm1.bias": "blocks.15.transformer_blocks.7.norm1.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.norm1.weight": "blocks.15.transformer_blocks.7.norm1.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.norm2.bias": "blocks.15.transformer_blocks.7.norm2.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.norm2.weight": "blocks.15.transformer_blocks.7.norm2.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.norm3.bias": "blocks.15.transformer_blocks.7.norm3.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.7.norm3.weight": "blocks.15.transformer_blocks.7.norm3.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.attn1.to_k.weight": "blocks.15.transformer_blocks.8.attn1.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.attn1.to_out.0.bias": "blocks.15.transformer_blocks.8.attn1.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.attn1.to_out.0.weight": "blocks.15.transformer_blocks.8.attn1.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.attn1.to_q.weight": "blocks.15.transformer_blocks.8.attn1.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.attn1.to_v.weight": "blocks.15.transformer_blocks.8.attn1.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.attn2.to_k.weight": "blocks.15.transformer_blocks.8.attn2.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.attn2.to_out.0.bias": "blocks.15.transformer_blocks.8.attn2.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.attn2.to_out.0.weight": "blocks.15.transformer_blocks.8.attn2.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.attn2.to_q.weight": "blocks.15.transformer_blocks.8.attn2.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.attn2.to_v.weight": "blocks.15.transformer_blocks.8.attn2.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.ff.net.0.proj.bias": "blocks.15.transformer_blocks.8.act_fn.proj.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.ff.net.0.proj.weight": "blocks.15.transformer_blocks.8.act_fn.proj.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.ff.net.2.bias": "blocks.15.transformer_blocks.8.ff.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.ff.net.2.weight": "blocks.15.transformer_blocks.8.ff.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.norm1.bias": "blocks.15.transformer_blocks.8.norm1.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.norm1.weight": "blocks.15.transformer_blocks.8.norm1.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.norm2.bias": "blocks.15.transformer_blocks.8.norm2.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.norm2.weight": "blocks.15.transformer_blocks.8.norm2.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.norm3.bias": "blocks.15.transformer_blocks.8.norm3.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.8.norm3.weight": "blocks.15.transformer_blocks.8.norm3.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.attn1.to_k.weight": "blocks.15.transformer_blocks.9.attn1.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.attn1.to_out.0.bias": "blocks.15.transformer_blocks.9.attn1.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.attn1.to_out.0.weight": "blocks.15.transformer_blocks.9.attn1.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.attn1.to_q.weight": "blocks.15.transformer_blocks.9.attn1.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.attn1.to_v.weight": "blocks.15.transformer_blocks.9.attn1.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.attn2.to_k.weight": "blocks.15.transformer_blocks.9.attn2.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.attn2.to_out.0.bias": "blocks.15.transformer_blocks.9.attn2.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.attn2.to_out.0.weight": "blocks.15.transformer_blocks.9.attn2.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.attn2.to_q.weight": "blocks.15.transformer_blocks.9.attn2.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.attn2.to_v.weight": "blocks.15.transformer_blocks.9.attn2.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.ff.net.0.proj.bias": "blocks.15.transformer_blocks.9.act_fn.proj.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.ff.net.0.proj.weight": "blocks.15.transformer_blocks.9.act_fn.proj.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.ff.net.2.bias": "blocks.15.transformer_blocks.9.ff.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.ff.net.2.weight": "blocks.15.transformer_blocks.9.ff.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.norm1.bias": "blocks.15.transformer_blocks.9.norm1.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.norm1.weight": "blocks.15.transformer_blocks.9.norm1.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.norm2.bias": "blocks.15.transformer_blocks.9.norm2.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.norm2.weight": "blocks.15.transformer_blocks.9.norm2.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.norm3.bias": "blocks.15.transformer_blocks.9.norm3.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.9.norm3.weight": "blocks.15.transformer_blocks.9.norm3.weight", - "model.diffusion_model.input_blocks.8.0.emb_layers.1.bias": "blocks.17.time_emb_proj.bias", - "model.diffusion_model.input_blocks.8.0.emb_layers.1.weight": "blocks.17.time_emb_proj.weight", - "model.diffusion_model.input_blocks.8.0.in_layers.0.bias": "blocks.17.norm1.bias", - "model.diffusion_model.input_blocks.8.0.in_layers.0.weight": "blocks.17.norm1.weight", - "model.diffusion_model.input_blocks.8.0.in_layers.2.bias": "blocks.17.conv1.bias", - "model.diffusion_model.input_blocks.8.0.in_layers.2.weight": "blocks.17.conv1.weight", - "model.diffusion_model.input_blocks.8.0.out_layers.0.bias": "blocks.17.norm2.bias", - "model.diffusion_model.input_blocks.8.0.out_layers.0.weight": "blocks.17.norm2.weight", - "model.diffusion_model.input_blocks.8.0.out_layers.3.bias": "blocks.17.conv2.bias", - "model.diffusion_model.input_blocks.8.0.out_layers.3.weight": "blocks.17.conv2.weight", - "model.diffusion_model.input_blocks.8.1.norm.bias": "blocks.18.norm.bias", - "model.diffusion_model.input_blocks.8.1.norm.weight": "blocks.18.norm.weight", - "model.diffusion_model.input_blocks.8.1.proj_in.bias": "blocks.18.proj_in.bias", - "model.diffusion_model.input_blocks.8.1.proj_in.weight": "blocks.18.proj_in.weight", - "model.diffusion_model.input_blocks.8.1.proj_out.bias": "blocks.18.proj_out.bias", - "model.diffusion_model.input_blocks.8.1.proj_out.weight": "blocks.18.proj_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn1.to_k.weight": "blocks.18.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.18.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.18.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn1.to_q.weight": "blocks.18.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn1.to_v.weight": "blocks.18.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn2.to_k.weight": "blocks.18.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.18.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.18.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn2.to_q.weight": "blocks.18.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn2.to_v.weight": "blocks.18.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.18.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.18.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.ff.net.2.bias": "blocks.18.transformer_blocks.0.ff.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.ff.net.2.weight": "blocks.18.transformer_blocks.0.ff.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.norm1.bias": "blocks.18.transformer_blocks.0.norm1.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.norm1.weight": "blocks.18.transformer_blocks.0.norm1.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.norm2.bias": "blocks.18.transformer_blocks.0.norm2.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.norm2.weight": "blocks.18.transformer_blocks.0.norm2.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.norm3.bias": "blocks.18.transformer_blocks.0.norm3.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.norm3.weight": "blocks.18.transformer_blocks.0.norm3.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.attn1.to_k.weight": "blocks.18.transformer_blocks.1.attn1.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.attn1.to_out.0.bias": "blocks.18.transformer_blocks.1.attn1.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.attn1.to_out.0.weight": "blocks.18.transformer_blocks.1.attn1.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.attn1.to_q.weight": "blocks.18.transformer_blocks.1.attn1.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.attn1.to_v.weight": "blocks.18.transformer_blocks.1.attn1.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.attn2.to_k.weight": "blocks.18.transformer_blocks.1.attn2.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.attn2.to_out.0.bias": "blocks.18.transformer_blocks.1.attn2.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.attn2.to_out.0.weight": "blocks.18.transformer_blocks.1.attn2.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.attn2.to_q.weight": "blocks.18.transformer_blocks.1.attn2.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.attn2.to_v.weight": "blocks.18.transformer_blocks.1.attn2.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.ff.net.0.proj.bias": "blocks.18.transformer_blocks.1.act_fn.proj.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.ff.net.0.proj.weight": "blocks.18.transformer_blocks.1.act_fn.proj.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.ff.net.2.bias": "blocks.18.transformer_blocks.1.ff.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.ff.net.2.weight": "blocks.18.transformer_blocks.1.ff.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.norm1.bias": "blocks.18.transformer_blocks.1.norm1.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.norm1.weight": "blocks.18.transformer_blocks.1.norm1.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.norm2.bias": "blocks.18.transformer_blocks.1.norm2.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.norm2.weight": "blocks.18.transformer_blocks.1.norm2.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.norm3.bias": "blocks.18.transformer_blocks.1.norm3.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.1.norm3.weight": "blocks.18.transformer_blocks.1.norm3.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.attn1.to_k.weight": "blocks.18.transformer_blocks.2.attn1.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.attn1.to_out.0.bias": "blocks.18.transformer_blocks.2.attn1.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.attn1.to_out.0.weight": "blocks.18.transformer_blocks.2.attn1.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.attn1.to_q.weight": "blocks.18.transformer_blocks.2.attn1.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.attn1.to_v.weight": "blocks.18.transformer_blocks.2.attn1.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.attn2.to_k.weight": "blocks.18.transformer_blocks.2.attn2.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.attn2.to_out.0.bias": "blocks.18.transformer_blocks.2.attn2.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.attn2.to_out.0.weight": "blocks.18.transformer_blocks.2.attn2.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.attn2.to_q.weight": "blocks.18.transformer_blocks.2.attn2.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.attn2.to_v.weight": "blocks.18.transformer_blocks.2.attn2.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.ff.net.0.proj.bias": "blocks.18.transformer_blocks.2.act_fn.proj.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.ff.net.0.proj.weight": "blocks.18.transformer_blocks.2.act_fn.proj.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.ff.net.2.bias": "blocks.18.transformer_blocks.2.ff.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.ff.net.2.weight": "blocks.18.transformer_blocks.2.ff.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.norm1.bias": "blocks.18.transformer_blocks.2.norm1.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.norm1.weight": "blocks.18.transformer_blocks.2.norm1.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.norm2.bias": "blocks.18.transformer_blocks.2.norm2.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.norm2.weight": "blocks.18.transformer_blocks.2.norm2.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.norm3.bias": "blocks.18.transformer_blocks.2.norm3.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.2.norm3.weight": "blocks.18.transformer_blocks.2.norm3.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.attn1.to_k.weight": "blocks.18.transformer_blocks.3.attn1.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.attn1.to_out.0.bias": "blocks.18.transformer_blocks.3.attn1.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.attn1.to_out.0.weight": "blocks.18.transformer_blocks.3.attn1.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.attn1.to_q.weight": "blocks.18.transformer_blocks.3.attn1.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.attn1.to_v.weight": "blocks.18.transformer_blocks.3.attn1.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.attn2.to_k.weight": "blocks.18.transformer_blocks.3.attn2.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.attn2.to_out.0.bias": "blocks.18.transformer_blocks.3.attn2.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.attn2.to_out.0.weight": "blocks.18.transformer_blocks.3.attn2.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.attn2.to_q.weight": "blocks.18.transformer_blocks.3.attn2.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.attn2.to_v.weight": "blocks.18.transformer_blocks.3.attn2.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.ff.net.0.proj.bias": "blocks.18.transformer_blocks.3.act_fn.proj.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.ff.net.0.proj.weight": "blocks.18.transformer_blocks.3.act_fn.proj.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.ff.net.2.bias": "blocks.18.transformer_blocks.3.ff.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.ff.net.2.weight": "blocks.18.transformer_blocks.3.ff.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.norm1.bias": "blocks.18.transformer_blocks.3.norm1.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.norm1.weight": "blocks.18.transformer_blocks.3.norm1.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.norm2.bias": "blocks.18.transformer_blocks.3.norm2.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.norm2.weight": "blocks.18.transformer_blocks.3.norm2.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.norm3.bias": "blocks.18.transformer_blocks.3.norm3.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.3.norm3.weight": "blocks.18.transformer_blocks.3.norm3.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.attn1.to_k.weight": "blocks.18.transformer_blocks.4.attn1.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.attn1.to_out.0.bias": "blocks.18.transformer_blocks.4.attn1.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.attn1.to_out.0.weight": "blocks.18.transformer_blocks.4.attn1.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.attn1.to_q.weight": "blocks.18.transformer_blocks.4.attn1.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.attn1.to_v.weight": "blocks.18.transformer_blocks.4.attn1.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.attn2.to_k.weight": "blocks.18.transformer_blocks.4.attn2.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.attn2.to_out.0.bias": "blocks.18.transformer_blocks.4.attn2.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.attn2.to_out.0.weight": "blocks.18.transformer_blocks.4.attn2.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.attn2.to_q.weight": "blocks.18.transformer_blocks.4.attn2.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.attn2.to_v.weight": "blocks.18.transformer_blocks.4.attn2.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.ff.net.0.proj.bias": "blocks.18.transformer_blocks.4.act_fn.proj.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.ff.net.0.proj.weight": "blocks.18.transformer_blocks.4.act_fn.proj.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.ff.net.2.bias": "blocks.18.transformer_blocks.4.ff.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.ff.net.2.weight": "blocks.18.transformer_blocks.4.ff.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.norm1.bias": "blocks.18.transformer_blocks.4.norm1.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.norm1.weight": "blocks.18.transformer_blocks.4.norm1.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.norm2.bias": "blocks.18.transformer_blocks.4.norm2.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.norm2.weight": "blocks.18.transformer_blocks.4.norm2.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.norm3.bias": "blocks.18.transformer_blocks.4.norm3.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.4.norm3.weight": "blocks.18.transformer_blocks.4.norm3.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.attn1.to_k.weight": "blocks.18.transformer_blocks.5.attn1.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.attn1.to_out.0.bias": "blocks.18.transformer_blocks.5.attn1.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.attn1.to_out.0.weight": "blocks.18.transformer_blocks.5.attn1.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.attn1.to_q.weight": "blocks.18.transformer_blocks.5.attn1.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.attn1.to_v.weight": "blocks.18.transformer_blocks.5.attn1.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.attn2.to_k.weight": "blocks.18.transformer_blocks.5.attn2.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.attn2.to_out.0.bias": "blocks.18.transformer_blocks.5.attn2.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.attn2.to_out.0.weight": "blocks.18.transformer_blocks.5.attn2.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.attn2.to_q.weight": "blocks.18.transformer_blocks.5.attn2.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.attn2.to_v.weight": "blocks.18.transformer_blocks.5.attn2.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.ff.net.0.proj.bias": "blocks.18.transformer_blocks.5.act_fn.proj.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.ff.net.0.proj.weight": "blocks.18.transformer_blocks.5.act_fn.proj.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.ff.net.2.bias": "blocks.18.transformer_blocks.5.ff.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.ff.net.2.weight": "blocks.18.transformer_blocks.5.ff.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.norm1.bias": "blocks.18.transformer_blocks.5.norm1.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.norm1.weight": "blocks.18.transformer_blocks.5.norm1.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.norm2.bias": "blocks.18.transformer_blocks.5.norm2.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.norm2.weight": "blocks.18.transformer_blocks.5.norm2.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.norm3.bias": "blocks.18.transformer_blocks.5.norm3.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.5.norm3.weight": "blocks.18.transformer_blocks.5.norm3.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.attn1.to_k.weight": "blocks.18.transformer_blocks.6.attn1.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.attn1.to_out.0.bias": "blocks.18.transformer_blocks.6.attn1.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.attn1.to_out.0.weight": "blocks.18.transformer_blocks.6.attn1.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.attn1.to_q.weight": "blocks.18.transformer_blocks.6.attn1.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.attn1.to_v.weight": "blocks.18.transformer_blocks.6.attn1.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.attn2.to_k.weight": "blocks.18.transformer_blocks.6.attn2.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.attn2.to_out.0.bias": "blocks.18.transformer_blocks.6.attn2.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.attn2.to_out.0.weight": "blocks.18.transformer_blocks.6.attn2.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.attn2.to_q.weight": "blocks.18.transformer_blocks.6.attn2.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.attn2.to_v.weight": "blocks.18.transformer_blocks.6.attn2.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.ff.net.0.proj.bias": "blocks.18.transformer_blocks.6.act_fn.proj.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.ff.net.0.proj.weight": "blocks.18.transformer_blocks.6.act_fn.proj.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.ff.net.2.bias": "blocks.18.transformer_blocks.6.ff.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.ff.net.2.weight": "blocks.18.transformer_blocks.6.ff.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.norm1.bias": "blocks.18.transformer_blocks.6.norm1.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.norm1.weight": "blocks.18.transformer_blocks.6.norm1.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.norm2.bias": "blocks.18.transformer_blocks.6.norm2.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.norm2.weight": "blocks.18.transformer_blocks.6.norm2.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.norm3.bias": "blocks.18.transformer_blocks.6.norm3.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.6.norm3.weight": "blocks.18.transformer_blocks.6.norm3.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.attn1.to_k.weight": "blocks.18.transformer_blocks.7.attn1.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.attn1.to_out.0.bias": "blocks.18.transformer_blocks.7.attn1.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.attn1.to_out.0.weight": "blocks.18.transformer_blocks.7.attn1.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.attn1.to_q.weight": "blocks.18.transformer_blocks.7.attn1.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.attn1.to_v.weight": "blocks.18.transformer_blocks.7.attn1.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.attn2.to_k.weight": "blocks.18.transformer_blocks.7.attn2.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.attn2.to_out.0.bias": "blocks.18.transformer_blocks.7.attn2.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.attn2.to_out.0.weight": "blocks.18.transformer_blocks.7.attn2.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.attn2.to_q.weight": "blocks.18.transformer_blocks.7.attn2.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.attn2.to_v.weight": "blocks.18.transformer_blocks.7.attn2.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.ff.net.0.proj.bias": "blocks.18.transformer_blocks.7.act_fn.proj.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.ff.net.0.proj.weight": "blocks.18.transformer_blocks.7.act_fn.proj.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.ff.net.2.bias": "blocks.18.transformer_blocks.7.ff.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.ff.net.2.weight": "blocks.18.transformer_blocks.7.ff.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.norm1.bias": "blocks.18.transformer_blocks.7.norm1.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.norm1.weight": "blocks.18.transformer_blocks.7.norm1.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.norm2.bias": "blocks.18.transformer_blocks.7.norm2.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.norm2.weight": "blocks.18.transformer_blocks.7.norm2.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.norm3.bias": "blocks.18.transformer_blocks.7.norm3.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.7.norm3.weight": "blocks.18.transformer_blocks.7.norm3.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.attn1.to_k.weight": "blocks.18.transformer_blocks.8.attn1.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.attn1.to_out.0.bias": "blocks.18.transformer_blocks.8.attn1.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.attn1.to_out.0.weight": "blocks.18.transformer_blocks.8.attn1.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.attn1.to_q.weight": "blocks.18.transformer_blocks.8.attn1.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.attn1.to_v.weight": "blocks.18.transformer_blocks.8.attn1.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.attn2.to_k.weight": "blocks.18.transformer_blocks.8.attn2.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.attn2.to_out.0.bias": "blocks.18.transformer_blocks.8.attn2.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.attn2.to_out.0.weight": "blocks.18.transformer_blocks.8.attn2.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.attn2.to_q.weight": "blocks.18.transformer_blocks.8.attn2.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.attn2.to_v.weight": "blocks.18.transformer_blocks.8.attn2.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.ff.net.0.proj.bias": "blocks.18.transformer_blocks.8.act_fn.proj.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.ff.net.0.proj.weight": "blocks.18.transformer_blocks.8.act_fn.proj.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.ff.net.2.bias": "blocks.18.transformer_blocks.8.ff.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.ff.net.2.weight": "blocks.18.transformer_blocks.8.ff.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.norm1.bias": "blocks.18.transformer_blocks.8.norm1.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.norm1.weight": "blocks.18.transformer_blocks.8.norm1.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.norm2.bias": "blocks.18.transformer_blocks.8.norm2.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.norm2.weight": "blocks.18.transformer_blocks.8.norm2.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.norm3.bias": "blocks.18.transformer_blocks.8.norm3.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.8.norm3.weight": "blocks.18.transformer_blocks.8.norm3.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.attn1.to_k.weight": "blocks.18.transformer_blocks.9.attn1.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.attn1.to_out.0.bias": "blocks.18.transformer_blocks.9.attn1.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.attn1.to_out.0.weight": "blocks.18.transformer_blocks.9.attn1.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.attn1.to_q.weight": "blocks.18.transformer_blocks.9.attn1.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.attn1.to_v.weight": "blocks.18.transformer_blocks.9.attn1.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.attn2.to_k.weight": "blocks.18.transformer_blocks.9.attn2.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.attn2.to_out.0.bias": "blocks.18.transformer_blocks.9.attn2.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.attn2.to_out.0.weight": "blocks.18.transformer_blocks.9.attn2.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.attn2.to_q.weight": "blocks.18.transformer_blocks.9.attn2.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.attn2.to_v.weight": "blocks.18.transformer_blocks.9.attn2.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.ff.net.0.proj.bias": "blocks.18.transformer_blocks.9.act_fn.proj.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.ff.net.0.proj.weight": "blocks.18.transformer_blocks.9.act_fn.proj.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.ff.net.2.bias": "blocks.18.transformer_blocks.9.ff.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.ff.net.2.weight": "blocks.18.transformer_blocks.9.ff.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.norm1.bias": "blocks.18.transformer_blocks.9.norm1.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.norm1.weight": "blocks.18.transformer_blocks.9.norm1.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.norm2.bias": "blocks.18.transformer_blocks.9.norm2.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.norm2.weight": "blocks.18.transformer_blocks.9.norm2.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.norm3.bias": "blocks.18.transformer_blocks.9.norm3.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.9.norm3.weight": "blocks.18.transformer_blocks.9.norm3.weight", - "model.diffusion_model.label_emb.0.0.bias": "add_time_embedding.0.bias", - "model.diffusion_model.label_emb.0.0.weight": "add_time_embedding.0.weight", - "model.diffusion_model.label_emb.0.2.bias": "add_time_embedding.2.bias", - "model.diffusion_model.label_emb.0.2.weight": "add_time_embedding.2.weight", - "model.diffusion_model.middle_block.0.emb_layers.1.bias": "blocks.20.time_emb_proj.bias", - "model.diffusion_model.middle_block.0.emb_layers.1.weight": "blocks.20.time_emb_proj.weight", - "model.diffusion_model.middle_block.0.in_layers.0.bias": "blocks.20.norm1.bias", - "model.diffusion_model.middle_block.0.in_layers.0.weight": "blocks.20.norm1.weight", - "model.diffusion_model.middle_block.0.in_layers.2.bias": "blocks.20.conv1.bias", - "model.diffusion_model.middle_block.0.in_layers.2.weight": "blocks.20.conv1.weight", - "model.diffusion_model.middle_block.0.out_layers.0.bias": "blocks.20.norm2.bias", - "model.diffusion_model.middle_block.0.out_layers.0.weight": "blocks.20.norm2.weight", - "model.diffusion_model.middle_block.0.out_layers.3.bias": "blocks.20.conv2.bias", - "model.diffusion_model.middle_block.0.out_layers.3.weight": "blocks.20.conv2.weight", - "model.diffusion_model.middle_block.1.norm.bias": "blocks.21.norm.bias", - "model.diffusion_model.middle_block.1.norm.weight": "blocks.21.norm.weight", - "model.diffusion_model.middle_block.1.proj_in.bias": "blocks.21.proj_in.bias", - "model.diffusion_model.middle_block.1.proj_in.weight": "blocks.21.proj_in.weight", - "model.diffusion_model.middle_block.1.proj_out.bias": "blocks.21.proj_out.bias", - "model.diffusion_model.middle_block.1.proj_out.weight": "blocks.21.proj_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn1.to_k.weight": "blocks.21.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.21.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.21.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn1.to_q.weight": "blocks.21.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn1.to_v.weight": "blocks.21.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn2.to_k.weight": "blocks.21.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.21.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.21.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn2.to_q.weight": "blocks.21.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn2.to_v.weight": "blocks.21.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.21.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.21.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.ff.net.2.bias": "blocks.21.transformer_blocks.0.ff.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.ff.net.2.weight": "blocks.21.transformer_blocks.0.ff.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.norm1.bias": "blocks.21.transformer_blocks.0.norm1.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.norm1.weight": "blocks.21.transformer_blocks.0.norm1.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.norm2.bias": "blocks.21.transformer_blocks.0.norm2.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.norm2.weight": "blocks.21.transformer_blocks.0.norm2.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.norm3.bias": "blocks.21.transformer_blocks.0.norm3.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.norm3.weight": "blocks.21.transformer_blocks.0.norm3.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.1.attn1.to_k.weight": "blocks.21.transformer_blocks.1.attn1.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.1.attn1.to_out.0.bias": "blocks.21.transformer_blocks.1.attn1.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.1.attn1.to_out.0.weight": "blocks.21.transformer_blocks.1.attn1.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.1.attn1.to_q.weight": "blocks.21.transformer_blocks.1.attn1.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.1.attn1.to_v.weight": "blocks.21.transformer_blocks.1.attn1.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.1.attn2.to_k.weight": "blocks.21.transformer_blocks.1.attn2.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.1.attn2.to_out.0.bias": "blocks.21.transformer_blocks.1.attn2.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.1.attn2.to_out.0.weight": "blocks.21.transformer_blocks.1.attn2.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.1.attn2.to_q.weight": "blocks.21.transformer_blocks.1.attn2.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.1.attn2.to_v.weight": "blocks.21.transformer_blocks.1.attn2.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.1.ff.net.0.proj.bias": "blocks.21.transformer_blocks.1.act_fn.proj.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.1.ff.net.0.proj.weight": "blocks.21.transformer_blocks.1.act_fn.proj.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.1.ff.net.2.bias": "blocks.21.transformer_blocks.1.ff.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.1.ff.net.2.weight": "blocks.21.transformer_blocks.1.ff.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.1.norm1.bias": "blocks.21.transformer_blocks.1.norm1.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.1.norm1.weight": "blocks.21.transformer_blocks.1.norm1.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.1.norm2.bias": "blocks.21.transformer_blocks.1.norm2.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.1.norm2.weight": "blocks.21.transformer_blocks.1.norm2.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.1.norm3.bias": "blocks.21.transformer_blocks.1.norm3.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.1.norm3.weight": "blocks.21.transformer_blocks.1.norm3.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.2.attn1.to_k.weight": "blocks.21.transformer_blocks.2.attn1.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.2.attn1.to_out.0.bias": "blocks.21.transformer_blocks.2.attn1.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.2.attn1.to_out.0.weight": "blocks.21.transformer_blocks.2.attn1.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.2.attn1.to_q.weight": "blocks.21.transformer_blocks.2.attn1.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.2.attn1.to_v.weight": "blocks.21.transformer_blocks.2.attn1.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.2.attn2.to_k.weight": "blocks.21.transformer_blocks.2.attn2.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.2.attn2.to_out.0.bias": "blocks.21.transformer_blocks.2.attn2.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.2.attn2.to_out.0.weight": "blocks.21.transformer_blocks.2.attn2.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.2.attn2.to_q.weight": "blocks.21.transformer_blocks.2.attn2.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.2.attn2.to_v.weight": "blocks.21.transformer_blocks.2.attn2.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.2.ff.net.0.proj.bias": "blocks.21.transformer_blocks.2.act_fn.proj.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.2.ff.net.0.proj.weight": "blocks.21.transformer_blocks.2.act_fn.proj.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.2.ff.net.2.bias": "blocks.21.transformer_blocks.2.ff.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.2.ff.net.2.weight": "blocks.21.transformer_blocks.2.ff.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.2.norm1.bias": "blocks.21.transformer_blocks.2.norm1.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.2.norm1.weight": "blocks.21.transformer_blocks.2.norm1.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.2.norm2.bias": "blocks.21.transformer_blocks.2.norm2.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.2.norm2.weight": "blocks.21.transformer_blocks.2.norm2.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.2.norm3.bias": "blocks.21.transformer_blocks.2.norm3.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.2.norm3.weight": "blocks.21.transformer_blocks.2.norm3.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.3.attn1.to_k.weight": "blocks.21.transformer_blocks.3.attn1.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.3.attn1.to_out.0.bias": "blocks.21.transformer_blocks.3.attn1.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.3.attn1.to_out.0.weight": "blocks.21.transformer_blocks.3.attn1.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.3.attn1.to_q.weight": "blocks.21.transformer_blocks.3.attn1.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.3.attn1.to_v.weight": "blocks.21.transformer_blocks.3.attn1.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.3.attn2.to_k.weight": "blocks.21.transformer_blocks.3.attn2.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.3.attn2.to_out.0.bias": "blocks.21.transformer_blocks.3.attn2.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.3.attn2.to_out.0.weight": "blocks.21.transformer_blocks.3.attn2.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.3.attn2.to_q.weight": "blocks.21.transformer_blocks.3.attn2.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.3.attn2.to_v.weight": "blocks.21.transformer_blocks.3.attn2.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.3.ff.net.0.proj.bias": "blocks.21.transformer_blocks.3.act_fn.proj.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.3.ff.net.0.proj.weight": "blocks.21.transformer_blocks.3.act_fn.proj.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.3.ff.net.2.bias": "blocks.21.transformer_blocks.3.ff.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.3.ff.net.2.weight": "blocks.21.transformer_blocks.3.ff.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.3.norm1.bias": "blocks.21.transformer_blocks.3.norm1.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.3.norm1.weight": "blocks.21.transformer_blocks.3.norm1.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.3.norm2.bias": "blocks.21.transformer_blocks.3.norm2.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.3.norm2.weight": "blocks.21.transformer_blocks.3.norm2.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.3.norm3.bias": "blocks.21.transformer_blocks.3.norm3.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.3.norm3.weight": "blocks.21.transformer_blocks.3.norm3.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.4.attn1.to_k.weight": "blocks.21.transformer_blocks.4.attn1.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.4.attn1.to_out.0.bias": "blocks.21.transformer_blocks.4.attn1.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.4.attn1.to_out.0.weight": "blocks.21.transformer_blocks.4.attn1.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.4.attn1.to_q.weight": "blocks.21.transformer_blocks.4.attn1.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.4.attn1.to_v.weight": "blocks.21.transformer_blocks.4.attn1.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.4.attn2.to_k.weight": "blocks.21.transformer_blocks.4.attn2.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.4.attn2.to_out.0.bias": "blocks.21.transformer_blocks.4.attn2.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.4.attn2.to_out.0.weight": "blocks.21.transformer_blocks.4.attn2.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.4.attn2.to_q.weight": "blocks.21.transformer_blocks.4.attn2.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.4.attn2.to_v.weight": "blocks.21.transformer_blocks.4.attn2.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.4.ff.net.0.proj.bias": "blocks.21.transformer_blocks.4.act_fn.proj.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.4.ff.net.0.proj.weight": "blocks.21.transformer_blocks.4.act_fn.proj.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.4.ff.net.2.bias": "blocks.21.transformer_blocks.4.ff.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.4.ff.net.2.weight": "blocks.21.transformer_blocks.4.ff.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.4.norm1.bias": "blocks.21.transformer_blocks.4.norm1.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.4.norm1.weight": "blocks.21.transformer_blocks.4.norm1.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.4.norm2.bias": "blocks.21.transformer_blocks.4.norm2.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.4.norm2.weight": "blocks.21.transformer_blocks.4.norm2.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.4.norm3.bias": "blocks.21.transformer_blocks.4.norm3.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.4.norm3.weight": "blocks.21.transformer_blocks.4.norm3.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.5.attn1.to_k.weight": "blocks.21.transformer_blocks.5.attn1.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.5.attn1.to_out.0.bias": "blocks.21.transformer_blocks.5.attn1.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.5.attn1.to_out.0.weight": "blocks.21.transformer_blocks.5.attn1.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.5.attn1.to_q.weight": "blocks.21.transformer_blocks.5.attn1.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.5.attn1.to_v.weight": "blocks.21.transformer_blocks.5.attn1.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.5.attn2.to_k.weight": "blocks.21.transformer_blocks.5.attn2.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.5.attn2.to_out.0.bias": "blocks.21.transformer_blocks.5.attn2.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.5.attn2.to_out.0.weight": "blocks.21.transformer_blocks.5.attn2.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.5.attn2.to_q.weight": "blocks.21.transformer_blocks.5.attn2.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.5.attn2.to_v.weight": "blocks.21.transformer_blocks.5.attn2.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.5.ff.net.0.proj.bias": "blocks.21.transformer_blocks.5.act_fn.proj.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.5.ff.net.0.proj.weight": "blocks.21.transformer_blocks.5.act_fn.proj.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.5.ff.net.2.bias": "blocks.21.transformer_blocks.5.ff.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.5.ff.net.2.weight": "blocks.21.transformer_blocks.5.ff.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.5.norm1.bias": "blocks.21.transformer_blocks.5.norm1.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.5.norm1.weight": "blocks.21.transformer_blocks.5.norm1.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.5.norm2.bias": "blocks.21.transformer_blocks.5.norm2.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.5.norm2.weight": "blocks.21.transformer_blocks.5.norm2.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.5.norm3.bias": "blocks.21.transformer_blocks.5.norm3.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.5.norm3.weight": "blocks.21.transformer_blocks.5.norm3.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.6.attn1.to_k.weight": "blocks.21.transformer_blocks.6.attn1.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.6.attn1.to_out.0.bias": "blocks.21.transformer_blocks.6.attn1.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.6.attn1.to_out.0.weight": "blocks.21.transformer_blocks.6.attn1.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.6.attn1.to_q.weight": "blocks.21.transformer_blocks.6.attn1.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.6.attn1.to_v.weight": "blocks.21.transformer_blocks.6.attn1.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.6.attn2.to_k.weight": "blocks.21.transformer_blocks.6.attn2.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.6.attn2.to_out.0.bias": "blocks.21.transformer_blocks.6.attn2.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.6.attn2.to_out.0.weight": "blocks.21.transformer_blocks.6.attn2.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.6.attn2.to_q.weight": "blocks.21.transformer_blocks.6.attn2.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.6.attn2.to_v.weight": "blocks.21.transformer_blocks.6.attn2.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.6.ff.net.0.proj.bias": "blocks.21.transformer_blocks.6.act_fn.proj.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.6.ff.net.0.proj.weight": "blocks.21.transformer_blocks.6.act_fn.proj.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.6.ff.net.2.bias": "blocks.21.transformer_blocks.6.ff.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.6.ff.net.2.weight": "blocks.21.transformer_blocks.6.ff.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.6.norm1.bias": "blocks.21.transformer_blocks.6.norm1.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.6.norm1.weight": "blocks.21.transformer_blocks.6.norm1.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.6.norm2.bias": "blocks.21.transformer_blocks.6.norm2.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.6.norm2.weight": "blocks.21.transformer_blocks.6.norm2.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.6.norm3.bias": "blocks.21.transformer_blocks.6.norm3.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.6.norm3.weight": "blocks.21.transformer_blocks.6.norm3.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.7.attn1.to_k.weight": "blocks.21.transformer_blocks.7.attn1.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.7.attn1.to_out.0.bias": "blocks.21.transformer_blocks.7.attn1.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.7.attn1.to_out.0.weight": "blocks.21.transformer_blocks.7.attn1.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.7.attn1.to_q.weight": "blocks.21.transformer_blocks.7.attn1.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.7.attn1.to_v.weight": "blocks.21.transformer_blocks.7.attn1.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.7.attn2.to_k.weight": "blocks.21.transformer_blocks.7.attn2.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.7.attn2.to_out.0.bias": "blocks.21.transformer_blocks.7.attn2.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.7.attn2.to_out.0.weight": "blocks.21.transformer_blocks.7.attn2.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.7.attn2.to_q.weight": "blocks.21.transformer_blocks.7.attn2.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.7.attn2.to_v.weight": "blocks.21.transformer_blocks.7.attn2.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.7.ff.net.0.proj.bias": "blocks.21.transformer_blocks.7.act_fn.proj.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.7.ff.net.0.proj.weight": "blocks.21.transformer_blocks.7.act_fn.proj.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.7.ff.net.2.bias": "blocks.21.transformer_blocks.7.ff.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.7.ff.net.2.weight": "blocks.21.transformer_blocks.7.ff.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.7.norm1.bias": "blocks.21.transformer_blocks.7.norm1.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.7.norm1.weight": "blocks.21.transformer_blocks.7.norm1.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.7.norm2.bias": "blocks.21.transformer_blocks.7.norm2.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.7.norm2.weight": "blocks.21.transformer_blocks.7.norm2.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.7.norm3.bias": "blocks.21.transformer_blocks.7.norm3.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.7.norm3.weight": "blocks.21.transformer_blocks.7.norm3.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.8.attn1.to_k.weight": "blocks.21.transformer_blocks.8.attn1.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.8.attn1.to_out.0.bias": "blocks.21.transformer_blocks.8.attn1.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.8.attn1.to_out.0.weight": "blocks.21.transformer_blocks.8.attn1.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.8.attn1.to_q.weight": "blocks.21.transformer_blocks.8.attn1.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.8.attn1.to_v.weight": "blocks.21.transformer_blocks.8.attn1.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.8.attn2.to_k.weight": "blocks.21.transformer_blocks.8.attn2.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.8.attn2.to_out.0.bias": "blocks.21.transformer_blocks.8.attn2.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.8.attn2.to_out.0.weight": "blocks.21.transformer_blocks.8.attn2.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.8.attn2.to_q.weight": "blocks.21.transformer_blocks.8.attn2.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.8.attn2.to_v.weight": "blocks.21.transformer_blocks.8.attn2.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.8.ff.net.0.proj.bias": "blocks.21.transformer_blocks.8.act_fn.proj.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.8.ff.net.0.proj.weight": "blocks.21.transformer_blocks.8.act_fn.proj.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.8.ff.net.2.bias": "blocks.21.transformer_blocks.8.ff.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.8.ff.net.2.weight": "blocks.21.transformer_blocks.8.ff.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.8.norm1.bias": "blocks.21.transformer_blocks.8.norm1.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.8.norm1.weight": "blocks.21.transformer_blocks.8.norm1.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.8.norm2.bias": "blocks.21.transformer_blocks.8.norm2.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.8.norm2.weight": "blocks.21.transformer_blocks.8.norm2.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.8.norm3.bias": "blocks.21.transformer_blocks.8.norm3.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.8.norm3.weight": "blocks.21.transformer_blocks.8.norm3.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.9.attn1.to_k.weight": "blocks.21.transformer_blocks.9.attn1.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.9.attn1.to_out.0.bias": "blocks.21.transformer_blocks.9.attn1.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.9.attn1.to_out.0.weight": "blocks.21.transformer_blocks.9.attn1.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.9.attn1.to_q.weight": "blocks.21.transformer_blocks.9.attn1.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.9.attn1.to_v.weight": "blocks.21.transformer_blocks.9.attn1.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.9.attn2.to_k.weight": "blocks.21.transformer_blocks.9.attn2.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.9.attn2.to_out.0.bias": "blocks.21.transformer_blocks.9.attn2.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.9.attn2.to_out.0.weight": "blocks.21.transformer_blocks.9.attn2.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.9.attn2.to_q.weight": "blocks.21.transformer_blocks.9.attn2.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.9.attn2.to_v.weight": "blocks.21.transformer_blocks.9.attn2.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.9.ff.net.0.proj.bias": "blocks.21.transformer_blocks.9.act_fn.proj.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.9.ff.net.0.proj.weight": "blocks.21.transformer_blocks.9.act_fn.proj.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.9.ff.net.2.bias": "blocks.21.transformer_blocks.9.ff.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.9.ff.net.2.weight": "blocks.21.transformer_blocks.9.ff.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.9.norm1.bias": "blocks.21.transformer_blocks.9.norm1.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.9.norm1.weight": "blocks.21.transformer_blocks.9.norm1.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.9.norm2.bias": "blocks.21.transformer_blocks.9.norm2.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.9.norm2.weight": "blocks.21.transformer_blocks.9.norm2.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.9.norm3.bias": "blocks.21.transformer_blocks.9.norm3.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.9.norm3.weight": "blocks.21.transformer_blocks.9.norm3.weight", - "model.diffusion_model.middle_block.2.emb_layers.1.bias": "blocks.22.time_emb_proj.bias", - "model.diffusion_model.middle_block.2.emb_layers.1.weight": "blocks.22.time_emb_proj.weight", - "model.diffusion_model.middle_block.2.in_layers.0.bias": "blocks.22.norm1.bias", - "model.diffusion_model.middle_block.2.in_layers.0.weight": "blocks.22.norm1.weight", - "model.diffusion_model.middle_block.2.in_layers.2.bias": "blocks.22.conv1.bias", - "model.diffusion_model.middle_block.2.in_layers.2.weight": "blocks.22.conv1.weight", - "model.diffusion_model.middle_block.2.out_layers.0.bias": "blocks.22.norm2.bias", - "model.diffusion_model.middle_block.2.out_layers.0.weight": "blocks.22.norm2.weight", - "model.diffusion_model.middle_block.2.out_layers.3.bias": "blocks.22.conv2.bias", - "model.diffusion_model.middle_block.2.out_layers.3.weight": "blocks.22.conv2.weight", - "model.diffusion_model.out.0.bias": "conv_norm_out.bias", - "model.diffusion_model.out.0.weight": "conv_norm_out.weight", - "model.diffusion_model.out.2.bias": "conv_out.bias", - "model.diffusion_model.out.2.weight": "conv_out.weight", - "model.diffusion_model.output_blocks.0.0.emb_layers.1.bias": "blocks.24.time_emb_proj.bias", - "model.diffusion_model.output_blocks.0.0.emb_layers.1.weight": "blocks.24.time_emb_proj.weight", - "model.diffusion_model.output_blocks.0.0.in_layers.0.bias": "blocks.24.norm1.bias", - "model.diffusion_model.output_blocks.0.0.in_layers.0.weight": "blocks.24.norm1.weight", - "model.diffusion_model.output_blocks.0.0.in_layers.2.bias": "blocks.24.conv1.bias", - "model.diffusion_model.output_blocks.0.0.in_layers.2.weight": "blocks.24.conv1.weight", - "model.diffusion_model.output_blocks.0.0.out_layers.0.bias": "blocks.24.norm2.bias", - "model.diffusion_model.output_blocks.0.0.out_layers.0.weight": "blocks.24.norm2.weight", - "model.diffusion_model.output_blocks.0.0.out_layers.3.bias": "blocks.24.conv2.bias", - "model.diffusion_model.output_blocks.0.0.out_layers.3.weight": "blocks.24.conv2.weight", - "model.diffusion_model.output_blocks.0.0.skip_connection.bias": "blocks.24.conv_shortcut.bias", - "model.diffusion_model.output_blocks.0.0.skip_connection.weight": "blocks.24.conv_shortcut.weight", - "model.diffusion_model.output_blocks.0.1.norm.bias": "blocks.25.norm.bias", - "model.diffusion_model.output_blocks.0.1.norm.weight": "blocks.25.norm.weight", - "model.diffusion_model.output_blocks.0.1.proj_in.bias": "blocks.25.proj_in.bias", - "model.diffusion_model.output_blocks.0.1.proj_in.weight": "blocks.25.proj_in.weight", - "model.diffusion_model.output_blocks.0.1.proj_out.bias": "blocks.25.proj_out.bias", - "model.diffusion_model.output_blocks.0.1.proj_out.weight": "blocks.25.proj_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.attn1.to_k.weight": "blocks.25.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.25.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.25.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.attn1.to_q.weight": "blocks.25.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.attn1.to_v.weight": "blocks.25.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.attn2.to_k.weight": "blocks.25.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.25.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.25.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.attn2.to_q.weight": "blocks.25.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.attn2.to_v.weight": "blocks.25.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.25.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.25.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.ff.net.2.bias": "blocks.25.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.ff.net.2.weight": "blocks.25.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.norm1.bias": "blocks.25.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.norm1.weight": "blocks.25.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.norm2.bias": "blocks.25.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.norm2.weight": "blocks.25.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.norm3.bias": "blocks.25.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.0.norm3.weight": "blocks.25.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.attn1.to_k.weight": "blocks.25.transformer_blocks.1.attn1.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.attn1.to_out.0.bias": "blocks.25.transformer_blocks.1.attn1.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.attn1.to_out.0.weight": "blocks.25.transformer_blocks.1.attn1.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.attn1.to_q.weight": "blocks.25.transformer_blocks.1.attn1.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.attn1.to_v.weight": "blocks.25.transformer_blocks.1.attn1.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.attn2.to_k.weight": "blocks.25.transformer_blocks.1.attn2.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.attn2.to_out.0.bias": "blocks.25.transformer_blocks.1.attn2.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.attn2.to_out.0.weight": "blocks.25.transformer_blocks.1.attn2.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.attn2.to_q.weight": "blocks.25.transformer_blocks.1.attn2.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.attn2.to_v.weight": "blocks.25.transformer_blocks.1.attn2.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.ff.net.0.proj.bias": "blocks.25.transformer_blocks.1.act_fn.proj.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.ff.net.0.proj.weight": "blocks.25.transformer_blocks.1.act_fn.proj.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.ff.net.2.bias": "blocks.25.transformer_blocks.1.ff.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.ff.net.2.weight": "blocks.25.transformer_blocks.1.ff.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.norm1.bias": "blocks.25.transformer_blocks.1.norm1.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.norm1.weight": "blocks.25.transformer_blocks.1.norm1.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.norm2.bias": "blocks.25.transformer_blocks.1.norm2.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.norm2.weight": "blocks.25.transformer_blocks.1.norm2.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.norm3.bias": "blocks.25.transformer_blocks.1.norm3.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.1.norm3.weight": "blocks.25.transformer_blocks.1.norm3.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.attn1.to_k.weight": "blocks.25.transformer_blocks.2.attn1.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.attn1.to_out.0.bias": "blocks.25.transformer_blocks.2.attn1.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.attn1.to_out.0.weight": "blocks.25.transformer_blocks.2.attn1.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.attn1.to_q.weight": "blocks.25.transformer_blocks.2.attn1.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.attn1.to_v.weight": "blocks.25.transformer_blocks.2.attn1.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.attn2.to_k.weight": "blocks.25.transformer_blocks.2.attn2.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.attn2.to_out.0.bias": "blocks.25.transformer_blocks.2.attn2.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.attn2.to_out.0.weight": "blocks.25.transformer_blocks.2.attn2.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.attn2.to_q.weight": "blocks.25.transformer_blocks.2.attn2.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.attn2.to_v.weight": "blocks.25.transformer_blocks.2.attn2.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.ff.net.0.proj.bias": "blocks.25.transformer_blocks.2.act_fn.proj.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.ff.net.0.proj.weight": "blocks.25.transformer_blocks.2.act_fn.proj.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.ff.net.2.bias": "blocks.25.transformer_blocks.2.ff.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.ff.net.2.weight": "blocks.25.transformer_blocks.2.ff.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.norm1.bias": "blocks.25.transformer_blocks.2.norm1.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.norm1.weight": "blocks.25.transformer_blocks.2.norm1.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.norm2.bias": "blocks.25.transformer_blocks.2.norm2.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.norm2.weight": "blocks.25.transformer_blocks.2.norm2.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.norm3.bias": "blocks.25.transformer_blocks.2.norm3.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.2.norm3.weight": "blocks.25.transformer_blocks.2.norm3.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.attn1.to_k.weight": "blocks.25.transformer_blocks.3.attn1.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.attn1.to_out.0.bias": "blocks.25.transformer_blocks.3.attn1.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.attn1.to_out.0.weight": "blocks.25.transformer_blocks.3.attn1.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.attn1.to_q.weight": "blocks.25.transformer_blocks.3.attn1.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.attn1.to_v.weight": "blocks.25.transformer_blocks.3.attn1.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.attn2.to_k.weight": "blocks.25.transformer_blocks.3.attn2.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.attn2.to_out.0.bias": "blocks.25.transformer_blocks.3.attn2.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.attn2.to_out.0.weight": "blocks.25.transformer_blocks.3.attn2.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.attn2.to_q.weight": "blocks.25.transformer_blocks.3.attn2.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.attn2.to_v.weight": "blocks.25.transformer_blocks.3.attn2.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.ff.net.0.proj.bias": "blocks.25.transformer_blocks.3.act_fn.proj.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.ff.net.0.proj.weight": "blocks.25.transformer_blocks.3.act_fn.proj.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.ff.net.2.bias": "blocks.25.transformer_blocks.3.ff.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.ff.net.2.weight": "blocks.25.transformer_blocks.3.ff.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.norm1.bias": "blocks.25.transformer_blocks.3.norm1.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.norm1.weight": "blocks.25.transformer_blocks.3.norm1.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.norm2.bias": "blocks.25.transformer_blocks.3.norm2.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.norm2.weight": "blocks.25.transformer_blocks.3.norm2.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.norm3.bias": "blocks.25.transformer_blocks.3.norm3.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.3.norm3.weight": "blocks.25.transformer_blocks.3.norm3.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.attn1.to_k.weight": "blocks.25.transformer_blocks.4.attn1.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.attn1.to_out.0.bias": "blocks.25.transformer_blocks.4.attn1.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.attn1.to_out.0.weight": "blocks.25.transformer_blocks.4.attn1.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.attn1.to_q.weight": "blocks.25.transformer_blocks.4.attn1.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.attn1.to_v.weight": "blocks.25.transformer_blocks.4.attn1.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.attn2.to_k.weight": "blocks.25.transformer_blocks.4.attn2.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.attn2.to_out.0.bias": "blocks.25.transformer_blocks.4.attn2.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.attn2.to_out.0.weight": "blocks.25.transformer_blocks.4.attn2.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.attn2.to_q.weight": "blocks.25.transformer_blocks.4.attn2.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.attn2.to_v.weight": "blocks.25.transformer_blocks.4.attn2.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.ff.net.0.proj.bias": "blocks.25.transformer_blocks.4.act_fn.proj.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.ff.net.0.proj.weight": "blocks.25.transformer_blocks.4.act_fn.proj.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.ff.net.2.bias": "blocks.25.transformer_blocks.4.ff.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.ff.net.2.weight": "blocks.25.transformer_blocks.4.ff.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.norm1.bias": "blocks.25.transformer_blocks.4.norm1.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.norm1.weight": "blocks.25.transformer_blocks.4.norm1.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.norm2.bias": "blocks.25.transformer_blocks.4.norm2.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.norm2.weight": "blocks.25.transformer_blocks.4.norm2.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.norm3.bias": "blocks.25.transformer_blocks.4.norm3.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.4.norm3.weight": "blocks.25.transformer_blocks.4.norm3.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.attn1.to_k.weight": "blocks.25.transformer_blocks.5.attn1.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.attn1.to_out.0.bias": "blocks.25.transformer_blocks.5.attn1.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.attn1.to_out.0.weight": "blocks.25.transformer_blocks.5.attn1.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.attn1.to_q.weight": "blocks.25.transformer_blocks.5.attn1.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.attn1.to_v.weight": "blocks.25.transformer_blocks.5.attn1.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.attn2.to_k.weight": "blocks.25.transformer_blocks.5.attn2.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.attn2.to_out.0.bias": "blocks.25.transformer_blocks.5.attn2.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.attn2.to_out.0.weight": "blocks.25.transformer_blocks.5.attn2.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.attn2.to_q.weight": "blocks.25.transformer_blocks.5.attn2.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.attn2.to_v.weight": "blocks.25.transformer_blocks.5.attn2.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.ff.net.0.proj.bias": "blocks.25.transformer_blocks.5.act_fn.proj.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.ff.net.0.proj.weight": "blocks.25.transformer_blocks.5.act_fn.proj.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.ff.net.2.bias": "blocks.25.transformer_blocks.5.ff.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.ff.net.2.weight": "blocks.25.transformer_blocks.5.ff.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.norm1.bias": "blocks.25.transformer_blocks.5.norm1.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.norm1.weight": "blocks.25.transformer_blocks.5.norm1.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.norm2.bias": "blocks.25.transformer_blocks.5.norm2.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.norm2.weight": "blocks.25.transformer_blocks.5.norm2.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.norm3.bias": "blocks.25.transformer_blocks.5.norm3.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.5.norm3.weight": "blocks.25.transformer_blocks.5.norm3.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.attn1.to_k.weight": "blocks.25.transformer_blocks.6.attn1.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.attn1.to_out.0.bias": "blocks.25.transformer_blocks.6.attn1.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.attn1.to_out.0.weight": "blocks.25.transformer_blocks.6.attn1.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.attn1.to_q.weight": "blocks.25.transformer_blocks.6.attn1.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.attn1.to_v.weight": "blocks.25.transformer_blocks.6.attn1.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.attn2.to_k.weight": "blocks.25.transformer_blocks.6.attn2.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.attn2.to_out.0.bias": "blocks.25.transformer_blocks.6.attn2.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.attn2.to_out.0.weight": "blocks.25.transformer_blocks.6.attn2.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.attn2.to_q.weight": "blocks.25.transformer_blocks.6.attn2.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.attn2.to_v.weight": "blocks.25.transformer_blocks.6.attn2.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.ff.net.0.proj.bias": "blocks.25.transformer_blocks.6.act_fn.proj.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.ff.net.0.proj.weight": "blocks.25.transformer_blocks.6.act_fn.proj.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.ff.net.2.bias": "blocks.25.transformer_blocks.6.ff.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.ff.net.2.weight": "blocks.25.transformer_blocks.6.ff.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.norm1.bias": "blocks.25.transformer_blocks.6.norm1.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.norm1.weight": "blocks.25.transformer_blocks.6.norm1.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.norm2.bias": "blocks.25.transformer_blocks.6.norm2.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.norm2.weight": "blocks.25.transformer_blocks.6.norm2.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.norm3.bias": "blocks.25.transformer_blocks.6.norm3.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.6.norm3.weight": "blocks.25.transformer_blocks.6.norm3.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.attn1.to_k.weight": "blocks.25.transformer_blocks.7.attn1.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.attn1.to_out.0.bias": "blocks.25.transformer_blocks.7.attn1.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.attn1.to_out.0.weight": "blocks.25.transformer_blocks.7.attn1.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.attn1.to_q.weight": "blocks.25.transformer_blocks.7.attn1.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.attn1.to_v.weight": "blocks.25.transformer_blocks.7.attn1.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.attn2.to_k.weight": "blocks.25.transformer_blocks.7.attn2.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.attn2.to_out.0.bias": "blocks.25.transformer_blocks.7.attn2.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.attn2.to_out.0.weight": "blocks.25.transformer_blocks.7.attn2.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.attn2.to_q.weight": "blocks.25.transformer_blocks.7.attn2.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.attn2.to_v.weight": "blocks.25.transformer_blocks.7.attn2.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.ff.net.0.proj.bias": "blocks.25.transformer_blocks.7.act_fn.proj.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.ff.net.0.proj.weight": "blocks.25.transformer_blocks.7.act_fn.proj.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.ff.net.2.bias": "blocks.25.transformer_blocks.7.ff.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.ff.net.2.weight": "blocks.25.transformer_blocks.7.ff.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.norm1.bias": "blocks.25.transformer_blocks.7.norm1.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.norm1.weight": "blocks.25.transformer_blocks.7.norm1.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.norm2.bias": "blocks.25.transformer_blocks.7.norm2.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.norm2.weight": "blocks.25.transformer_blocks.7.norm2.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.norm3.bias": "blocks.25.transformer_blocks.7.norm3.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.7.norm3.weight": "blocks.25.transformer_blocks.7.norm3.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.attn1.to_k.weight": "blocks.25.transformer_blocks.8.attn1.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.attn1.to_out.0.bias": "blocks.25.transformer_blocks.8.attn1.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.attn1.to_out.0.weight": "blocks.25.transformer_blocks.8.attn1.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.attn1.to_q.weight": "blocks.25.transformer_blocks.8.attn1.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.attn1.to_v.weight": "blocks.25.transformer_blocks.8.attn1.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.attn2.to_k.weight": "blocks.25.transformer_blocks.8.attn2.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.attn2.to_out.0.bias": "blocks.25.transformer_blocks.8.attn2.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.attn2.to_out.0.weight": "blocks.25.transformer_blocks.8.attn2.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.attn2.to_q.weight": "blocks.25.transformer_blocks.8.attn2.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.attn2.to_v.weight": "blocks.25.transformer_blocks.8.attn2.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.ff.net.0.proj.bias": "blocks.25.transformer_blocks.8.act_fn.proj.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.ff.net.0.proj.weight": "blocks.25.transformer_blocks.8.act_fn.proj.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.ff.net.2.bias": "blocks.25.transformer_blocks.8.ff.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.ff.net.2.weight": "blocks.25.transformer_blocks.8.ff.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.norm1.bias": "blocks.25.transformer_blocks.8.norm1.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.norm1.weight": "blocks.25.transformer_blocks.8.norm1.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.norm2.bias": "blocks.25.transformer_blocks.8.norm2.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.norm2.weight": "blocks.25.transformer_blocks.8.norm2.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.norm3.bias": "blocks.25.transformer_blocks.8.norm3.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.8.norm3.weight": "blocks.25.transformer_blocks.8.norm3.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.attn1.to_k.weight": "blocks.25.transformer_blocks.9.attn1.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.attn1.to_out.0.bias": "blocks.25.transformer_blocks.9.attn1.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.attn1.to_out.0.weight": "blocks.25.transformer_blocks.9.attn1.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.attn1.to_q.weight": "blocks.25.transformer_blocks.9.attn1.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.attn1.to_v.weight": "blocks.25.transformer_blocks.9.attn1.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.attn2.to_k.weight": "blocks.25.transformer_blocks.9.attn2.to_k.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.attn2.to_out.0.bias": "blocks.25.transformer_blocks.9.attn2.to_out.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.attn2.to_out.0.weight": "blocks.25.transformer_blocks.9.attn2.to_out.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.attn2.to_q.weight": "blocks.25.transformer_blocks.9.attn2.to_q.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.attn2.to_v.weight": "blocks.25.transformer_blocks.9.attn2.to_v.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.ff.net.0.proj.bias": "blocks.25.transformer_blocks.9.act_fn.proj.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.ff.net.0.proj.weight": "blocks.25.transformer_blocks.9.act_fn.proj.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.ff.net.2.bias": "blocks.25.transformer_blocks.9.ff.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.ff.net.2.weight": "blocks.25.transformer_blocks.9.ff.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.norm1.bias": "blocks.25.transformer_blocks.9.norm1.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.norm1.weight": "blocks.25.transformer_blocks.9.norm1.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.norm2.bias": "blocks.25.transformer_blocks.9.norm2.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.norm2.weight": "blocks.25.transformer_blocks.9.norm2.weight", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.norm3.bias": "blocks.25.transformer_blocks.9.norm3.bias", - "model.diffusion_model.output_blocks.0.1.transformer_blocks.9.norm3.weight": "blocks.25.transformer_blocks.9.norm3.weight", - "model.diffusion_model.output_blocks.1.0.emb_layers.1.bias": "blocks.27.time_emb_proj.bias", - "model.diffusion_model.output_blocks.1.0.emb_layers.1.weight": "blocks.27.time_emb_proj.weight", - "model.diffusion_model.output_blocks.1.0.in_layers.0.bias": "blocks.27.norm1.bias", - "model.diffusion_model.output_blocks.1.0.in_layers.0.weight": "blocks.27.norm1.weight", - "model.diffusion_model.output_blocks.1.0.in_layers.2.bias": "blocks.27.conv1.bias", - "model.diffusion_model.output_blocks.1.0.in_layers.2.weight": "blocks.27.conv1.weight", - "model.diffusion_model.output_blocks.1.0.out_layers.0.bias": "blocks.27.norm2.bias", - "model.diffusion_model.output_blocks.1.0.out_layers.0.weight": "blocks.27.norm2.weight", - "model.diffusion_model.output_blocks.1.0.out_layers.3.bias": "blocks.27.conv2.bias", - "model.diffusion_model.output_blocks.1.0.out_layers.3.weight": "blocks.27.conv2.weight", - "model.diffusion_model.output_blocks.1.0.skip_connection.bias": "blocks.27.conv_shortcut.bias", - "model.diffusion_model.output_blocks.1.0.skip_connection.weight": "blocks.27.conv_shortcut.weight", - "model.diffusion_model.output_blocks.1.1.norm.bias": "blocks.28.norm.bias", - "model.diffusion_model.output_blocks.1.1.norm.weight": "blocks.28.norm.weight", - "model.diffusion_model.output_blocks.1.1.proj_in.bias": "blocks.28.proj_in.bias", - "model.diffusion_model.output_blocks.1.1.proj_in.weight": "blocks.28.proj_in.weight", - "model.diffusion_model.output_blocks.1.1.proj_out.bias": "blocks.28.proj_out.bias", - "model.diffusion_model.output_blocks.1.1.proj_out.weight": "blocks.28.proj_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.attn1.to_k.weight": "blocks.28.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.28.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.28.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.attn1.to_q.weight": "blocks.28.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.attn1.to_v.weight": "blocks.28.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.attn2.to_k.weight": "blocks.28.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.28.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.28.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.attn2.to_q.weight": "blocks.28.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.attn2.to_v.weight": "blocks.28.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.28.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.28.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.ff.net.2.bias": "blocks.28.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.ff.net.2.weight": "blocks.28.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.norm1.bias": "blocks.28.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.norm1.weight": "blocks.28.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.norm2.bias": "blocks.28.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.norm2.weight": "blocks.28.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.norm3.bias": "blocks.28.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.0.norm3.weight": "blocks.28.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.attn1.to_k.weight": "blocks.28.transformer_blocks.1.attn1.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.attn1.to_out.0.bias": "blocks.28.transformer_blocks.1.attn1.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.attn1.to_out.0.weight": "blocks.28.transformer_blocks.1.attn1.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.attn1.to_q.weight": "blocks.28.transformer_blocks.1.attn1.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.attn1.to_v.weight": "blocks.28.transformer_blocks.1.attn1.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.attn2.to_k.weight": "blocks.28.transformer_blocks.1.attn2.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.attn2.to_out.0.bias": "blocks.28.transformer_blocks.1.attn2.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.attn2.to_out.0.weight": "blocks.28.transformer_blocks.1.attn2.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.attn2.to_q.weight": "blocks.28.transformer_blocks.1.attn2.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.attn2.to_v.weight": "blocks.28.transformer_blocks.1.attn2.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.ff.net.0.proj.bias": "blocks.28.transformer_blocks.1.act_fn.proj.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.ff.net.0.proj.weight": "blocks.28.transformer_blocks.1.act_fn.proj.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.ff.net.2.bias": "blocks.28.transformer_blocks.1.ff.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.ff.net.2.weight": "blocks.28.transformer_blocks.1.ff.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.norm1.bias": "blocks.28.transformer_blocks.1.norm1.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.norm1.weight": "blocks.28.transformer_blocks.1.norm1.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.norm2.bias": "blocks.28.transformer_blocks.1.norm2.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.norm2.weight": "blocks.28.transformer_blocks.1.norm2.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.norm3.bias": "blocks.28.transformer_blocks.1.norm3.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.1.norm3.weight": "blocks.28.transformer_blocks.1.norm3.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.attn1.to_k.weight": "blocks.28.transformer_blocks.2.attn1.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.attn1.to_out.0.bias": "blocks.28.transformer_blocks.2.attn1.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.attn1.to_out.0.weight": "blocks.28.transformer_blocks.2.attn1.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.attn1.to_q.weight": "blocks.28.transformer_blocks.2.attn1.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.attn1.to_v.weight": "blocks.28.transformer_blocks.2.attn1.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.attn2.to_k.weight": "blocks.28.transformer_blocks.2.attn2.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.attn2.to_out.0.bias": "blocks.28.transformer_blocks.2.attn2.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.attn2.to_out.0.weight": "blocks.28.transformer_blocks.2.attn2.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.attn2.to_q.weight": "blocks.28.transformer_blocks.2.attn2.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.attn2.to_v.weight": "blocks.28.transformer_blocks.2.attn2.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.ff.net.0.proj.bias": "blocks.28.transformer_blocks.2.act_fn.proj.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.ff.net.0.proj.weight": "blocks.28.transformer_blocks.2.act_fn.proj.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.ff.net.2.bias": "blocks.28.transformer_blocks.2.ff.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.ff.net.2.weight": "blocks.28.transformer_blocks.2.ff.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.norm1.bias": "blocks.28.transformer_blocks.2.norm1.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.norm1.weight": "blocks.28.transformer_blocks.2.norm1.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.norm2.bias": "blocks.28.transformer_blocks.2.norm2.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.norm2.weight": "blocks.28.transformer_blocks.2.norm2.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.norm3.bias": "blocks.28.transformer_blocks.2.norm3.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.2.norm3.weight": "blocks.28.transformer_blocks.2.norm3.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.attn1.to_k.weight": "blocks.28.transformer_blocks.3.attn1.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.attn1.to_out.0.bias": "blocks.28.transformer_blocks.3.attn1.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.attn1.to_out.0.weight": "blocks.28.transformer_blocks.3.attn1.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.attn1.to_q.weight": "blocks.28.transformer_blocks.3.attn1.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.attn1.to_v.weight": "blocks.28.transformer_blocks.3.attn1.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.attn2.to_k.weight": "blocks.28.transformer_blocks.3.attn2.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.attn2.to_out.0.bias": "blocks.28.transformer_blocks.3.attn2.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.attn2.to_out.0.weight": "blocks.28.transformer_blocks.3.attn2.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.attn2.to_q.weight": "blocks.28.transformer_blocks.3.attn2.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.attn2.to_v.weight": "blocks.28.transformer_blocks.3.attn2.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.ff.net.0.proj.bias": "blocks.28.transformer_blocks.3.act_fn.proj.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.ff.net.0.proj.weight": "blocks.28.transformer_blocks.3.act_fn.proj.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.ff.net.2.bias": "blocks.28.transformer_blocks.3.ff.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.ff.net.2.weight": "blocks.28.transformer_blocks.3.ff.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.norm1.bias": "blocks.28.transformer_blocks.3.norm1.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.norm1.weight": "blocks.28.transformer_blocks.3.norm1.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.norm2.bias": "blocks.28.transformer_blocks.3.norm2.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.norm2.weight": "blocks.28.transformer_blocks.3.norm2.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.norm3.bias": "blocks.28.transformer_blocks.3.norm3.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.3.norm3.weight": "blocks.28.transformer_blocks.3.norm3.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.attn1.to_k.weight": "blocks.28.transformer_blocks.4.attn1.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.attn1.to_out.0.bias": "blocks.28.transformer_blocks.4.attn1.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.attn1.to_out.0.weight": "blocks.28.transformer_blocks.4.attn1.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.attn1.to_q.weight": "blocks.28.transformer_blocks.4.attn1.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.attn1.to_v.weight": "blocks.28.transformer_blocks.4.attn1.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.attn2.to_k.weight": "blocks.28.transformer_blocks.4.attn2.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.attn2.to_out.0.bias": "blocks.28.transformer_blocks.4.attn2.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.attn2.to_out.0.weight": "blocks.28.transformer_blocks.4.attn2.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.attn2.to_q.weight": "blocks.28.transformer_blocks.4.attn2.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.attn2.to_v.weight": "blocks.28.transformer_blocks.4.attn2.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.ff.net.0.proj.bias": "blocks.28.transformer_blocks.4.act_fn.proj.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.ff.net.0.proj.weight": "blocks.28.transformer_blocks.4.act_fn.proj.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.ff.net.2.bias": "blocks.28.transformer_blocks.4.ff.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.ff.net.2.weight": "blocks.28.transformer_blocks.4.ff.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.norm1.bias": "blocks.28.transformer_blocks.4.norm1.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.norm1.weight": "blocks.28.transformer_blocks.4.norm1.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.norm2.bias": "blocks.28.transformer_blocks.4.norm2.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.norm2.weight": "blocks.28.transformer_blocks.4.norm2.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.norm3.bias": "blocks.28.transformer_blocks.4.norm3.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.4.norm3.weight": "blocks.28.transformer_blocks.4.norm3.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.attn1.to_k.weight": "blocks.28.transformer_blocks.5.attn1.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.attn1.to_out.0.bias": "blocks.28.transformer_blocks.5.attn1.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.attn1.to_out.0.weight": "blocks.28.transformer_blocks.5.attn1.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.attn1.to_q.weight": "blocks.28.transformer_blocks.5.attn1.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.attn1.to_v.weight": "blocks.28.transformer_blocks.5.attn1.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.attn2.to_k.weight": "blocks.28.transformer_blocks.5.attn2.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.attn2.to_out.0.bias": "blocks.28.transformer_blocks.5.attn2.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.attn2.to_out.0.weight": "blocks.28.transformer_blocks.5.attn2.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.attn2.to_q.weight": "blocks.28.transformer_blocks.5.attn2.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.attn2.to_v.weight": "blocks.28.transformer_blocks.5.attn2.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.ff.net.0.proj.bias": "blocks.28.transformer_blocks.5.act_fn.proj.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.ff.net.0.proj.weight": "blocks.28.transformer_blocks.5.act_fn.proj.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.ff.net.2.bias": "blocks.28.transformer_blocks.5.ff.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.ff.net.2.weight": "blocks.28.transformer_blocks.5.ff.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.norm1.bias": "blocks.28.transformer_blocks.5.norm1.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.norm1.weight": "blocks.28.transformer_blocks.5.norm1.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.norm2.bias": "blocks.28.transformer_blocks.5.norm2.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.norm2.weight": "blocks.28.transformer_blocks.5.norm2.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.norm3.bias": "blocks.28.transformer_blocks.5.norm3.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.5.norm3.weight": "blocks.28.transformer_blocks.5.norm3.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.attn1.to_k.weight": "blocks.28.transformer_blocks.6.attn1.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.attn1.to_out.0.bias": "blocks.28.transformer_blocks.6.attn1.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.attn1.to_out.0.weight": "blocks.28.transformer_blocks.6.attn1.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.attn1.to_q.weight": "blocks.28.transformer_blocks.6.attn1.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.attn1.to_v.weight": "blocks.28.transformer_blocks.6.attn1.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.attn2.to_k.weight": "blocks.28.transformer_blocks.6.attn2.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.attn2.to_out.0.bias": "blocks.28.transformer_blocks.6.attn2.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.attn2.to_out.0.weight": "blocks.28.transformer_blocks.6.attn2.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.attn2.to_q.weight": "blocks.28.transformer_blocks.6.attn2.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.attn2.to_v.weight": "blocks.28.transformer_blocks.6.attn2.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.ff.net.0.proj.bias": "blocks.28.transformer_blocks.6.act_fn.proj.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.ff.net.0.proj.weight": "blocks.28.transformer_blocks.6.act_fn.proj.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.ff.net.2.bias": "blocks.28.transformer_blocks.6.ff.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.ff.net.2.weight": "blocks.28.transformer_blocks.6.ff.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.norm1.bias": "blocks.28.transformer_blocks.6.norm1.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.norm1.weight": "blocks.28.transformer_blocks.6.norm1.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.norm2.bias": "blocks.28.transformer_blocks.6.norm2.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.norm2.weight": "blocks.28.transformer_blocks.6.norm2.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.norm3.bias": "blocks.28.transformer_blocks.6.norm3.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.6.norm3.weight": "blocks.28.transformer_blocks.6.norm3.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.attn1.to_k.weight": "blocks.28.transformer_blocks.7.attn1.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.attn1.to_out.0.bias": "blocks.28.transformer_blocks.7.attn1.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.attn1.to_out.0.weight": "blocks.28.transformer_blocks.7.attn1.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.attn1.to_q.weight": "blocks.28.transformer_blocks.7.attn1.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.attn1.to_v.weight": "blocks.28.transformer_blocks.7.attn1.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.attn2.to_k.weight": "blocks.28.transformer_blocks.7.attn2.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.attn2.to_out.0.bias": "blocks.28.transformer_blocks.7.attn2.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.attn2.to_out.0.weight": "blocks.28.transformer_blocks.7.attn2.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.attn2.to_q.weight": "blocks.28.transformer_blocks.7.attn2.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.attn2.to_v.weight": "blocks.28.transformer_blocks.7.attn2.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.ff.net.0.proj.bias": "blocks.28.transformer_blocks.7.act_fn.proj.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.ff.net.0.proj.weight": "blocks.28.transformer_blocks.7.act_fn.proj.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.ff.net.2.bias": "blocks.28.transformer_blocks.7.ff.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.ff.net.2.weight": "blocks.28.transformer_blocks.7.ff.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.norm1.bias": "blocks.28.transformer_blocks.7.norm1.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.norm1.weight": "blocks.28.transformer_blocks.7.norm1.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.norm2.bias": "blocks.28.transformer_blocks.7.norm2.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.norm2.weight": "blocks.28.transformer_blocks.7.norm2.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.norm3.bias": "blocks.28.transformer_blocks.7.norm3.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.7.norm3.weight": "blocks.28.transformer_blocks.7.norm3.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.attn1.to_k.weight": "blocks.28.transformer_blocks.8.attn1.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.attn1.to_out.0.bias": "blocks.28.transformer_blocks.8.attn1.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.attn1.to_out.0.weight": "blocks.28.transformer_blocks.8.attn1.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.attn1.to_q.weight": "blocks.28.transformer_blocks.8.attn1.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.attn1.to_v.weight": "blocks.28.transformer_blocks.8.attn1.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.attn2.to_k.weight": "blocks.28.transformer_blocks.8.attn2.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.attn2.to_out.0.bias": "blocks.28.transformer_blocks.8.attn2.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.attn2.to_out.0.weight": "blocks.28.transformer_blocks.8.attn2.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.attn2.to_q.weight": "blocks.28.transformer_blocks.8.attn2.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.attn2.to_v.weight": "blocks.28.transformer_blocks.8.attn2.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.ff.net.0.proj.bias": "blocks.28.transformer_blocks.8.act_fn.proj.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.ff.net.0.proj.weight": "blocks.28.transformer_blocks.8.act_fn.proj.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.ff.net.2.bias": "blocks.28.transformer_blocks.8.ff.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.ff.net.2.weight": "blocks.28.transformer_blocks.8.ff.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.norm1.bias": "blocks.28.transformer_blocks.8.norm1.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.norm1.weight": "blocks.28.transformer_blocks.8.norm1.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.norm2.bias": "blocks.28.transformer_blocks.8.norm2.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.norm2.weight": "blocks.28.transformer_blocks.8.norm2.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.norm3.bias": "blocks.28.transformer_blocks.8.norm3.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.8.norm3.weight": "blocks.28.transformer_blocks.8.norm3.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.attn1.to_k.weight": "blocks.28.transformer_blocks.9.attn1.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.attn1.to_out.0.bias": "blocks.28.transformer_blocks.9.attn1.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.attn1.to_out.0.weight": "blocks.28.transformer_blocks.9.attn1.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.attn1.to_q.weight": "blocks.28.transformer_blocks.9.attn1.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.attn1.to_v.weight": "blocks.28.transformer_blocks.9.attn1.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.attn2.to_k.weight": "blocks.28.transformer_blocks.9.attn2.to_k.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.attn2.to_out.0.bias": "blocks.28.transformer_blocks.9.attn2.to_out.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.attn2.to_out.0.weight": "blocks.28.transformer_blocks.9.attn2.to_out.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.attn2.to_q.weight": "blocks.28.transformer_blocks.9.attn2.to_q.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.attn2.to_v.weight": "blocks.28.transformer_blocks.9.attn2.to_v.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.ff.net.0.proj.bias": "blocks.28.transformer_blocks.9.act_fn.proj.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.ff.net.0.proj.weight": "blocks.28.transformer_blocks.9.act_fn.proj.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.ff.net.2.bias": "blocks.28.transformer_blocks.9.ff.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.ff.net.2.weight": "blocks.28.transformer_blocks.9.ff.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.norm1.bias": "blocks.28.transformer_blocks.9.norm1.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.norm1.weight": "blocks.28.transformer_blocks.9.norm1.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.norm2.bias": "blocks.28.transformer_blocks.9.norm2.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.norm2.weight": "blocks.28.transformer_blocks.9.norm2.weight", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.norm3.bias": "blocks.28.transformer_blocks.9.norm3.bias", - "model.diffusion_model.output_blocks.1.1.transformer_blocks.9.norm3.weight": "blocks.28.transformer_blocks.9.norm3.weight", - "model.diffusion_model.output_blocks.2.0.emb_layers.1.bias": "blocks.30.time_emb_proj.bias", - "model.diffusion_model.output_blocks.2.0.emb_layers.1.weight": "blocks.30.time_emb_proj.weight", - "model.diffusion_model.output_blocks.2.0.in_layers.0.bias": "blocks.30.norm1.bias", - "model.diffusion_model.output_blocks.2.0.in_layers.0.weight": "blocks.30.norm1.weight", - "model.diffusion_model.output_blocks.2.0.in_layers.2.bias": "blocks.30.conv1.bias", - "model.diffusion_model.output_blocks.2.0.in_layers.2.weight": "blocks.30.conv1.weight", - "model.diffusion_model.output_blocks.2.0.out_layers.0.bias": "blocks.30.norm2.bias", - "model.diffusion_model.output_blocks.2.0.out_layers.0.weight": "blocks.30.norm2.weight", - "model.diffusion_model.output_blocks.2.0.out_layers.3.bias": "blocks.30.conv2.bias", - "model.diffusion_model.output_blocks.2.0.out_layers.3.weight": "blocks.30.conv2.weight", - "model.diffusion_model.output_blocks.2.0.skip_connection.bias": "blocks.30.conv_shortcut.bias", - "model.diffusion_model.output_blocks.2.0.skip_connection.weight": "blocks.30.conv_shortcut.weight", - "model.diffusion_model.output_blocks.2.1.norm.bias": "blocks.31.norm.bias", - "model.diffusion_model.output_blocks.2.1.norm.weight": "blocks.31.norm.weight", - "model.diffusion_model.output_blocks.2.1.proj_in.bias": "blocks.31.proj_in.bias", - "model.diffusion_model.output_blocks.2.1.proj_in.weight": "blocks.31.proj_in.weight", - "model.diffusion_model.output_blocks.2.1.proj_out.bias": "blocks.31.proj_out.bias", - "model.diffusion_model.output_blocks.2.1.proj_out.weight": "blocks.31.proj_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.attn1.to_k.weight": "blocks.31.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.31.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.31.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.attn1.to_q.weight": "blocks.31.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.attn1.to_v.weight": "blocks.31.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.attn2.to_k.weight": "blocks.31.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.31.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.31.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.attn2.to_q.weight": "blocks.31.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.attn2.to_v.weight": "blocks.31.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.31.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.31.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.ff.net.2.bias": "blocks.31.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.ff.net.2.weight": "blocks.31.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.norm1.bias": "blocks.31.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.norm1.weight": "blocks.31.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.norm2.bias": "blocks.31.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.norm2.weight": "blocks.31.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.norm3.bias": "blocks.31.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.0.norm3.weight": "blocks.31.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.attn1.to_k.weight": "blocks.31.transformer_blocks.1.attn1.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.attn1.to_out.0.bias": "blocks.31.transformer_blocks.1.attn1.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.attn1.to_out.0.weight": "blocks.31.transformer_blocks.1.attn1.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.attn1.to_q.weight": "blocks.31.transformer_blocks.1.attn1.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.attn1.to_v.weight": "blocks.31.transformer_blocks.1.attn1.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.attn2.to_k.weight": "blocks.31.transformer_blocks.1.attn2.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.attn2.to_out.0.bias": "blocks.31.transformer_blocks.1.attn2.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.attn2.to_out.0.weight": "blocks.31.transformer_blocks.1.attn2.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.attn2.to_q.weight": "blocks.31.transformer_blocks.1.attn2.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.attn2.to_v.weight": "blocks.31.transformer_blocks.1.attn2.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.ff.net.0.proj.bias": "blocks.31.transformer_blocks.1.act_fn.proj.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.ff.net.0.proj.weight": "blocks.31.transformer_blocks.1.act_fn.proj.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.ff.net.2.bias": "blocks.31.transformer_blocks.1.ff.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.ff.net.2.weight": "blocks.31.transformer_blocks.1.ff.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.norm1.bias": "blocks.31.transformer_blocks.1.norm1.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.norm1.weight": "blocks.31.transformer_blocks.1.norm1.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.norm2.bias": "blocks.31.transformer_blocks.1.norm2.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.norm2.weight": "blocks.31.transformer_blocks.1.norm2.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.norm3.bias": "blocks.31.transformer_blocks.1.norm3.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.1.norm3.weight": "blocks.31.transformer_blocks.1.norm3.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.attn1.to_k.weight": "blocks.31.transformer_blocks.2.attn1.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.attn1.to_out.0.bias": "blocks.31.transformer_blocks.2.attn1.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.attn1.to_out.0.weight": "blocks.31.transformer_blocks.2.attn1.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.attn1.to_q.weight": "blocks.31.transformer_blocks.2.attn1.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.attn1.to_v.weight": "blocks.31.transformer_blocks.2.attn1.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.attn2.to_k.weight": "blocks.31.transformer_blocks.2.attn2.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.attn2.to_out.0.bias": "blocks.31.transformer_blocks.2.attn2.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.attn2.to_out.0.weight": "blocks.31.transformer_blocks.2.attn2.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.attn2.to_q.weight": "blocks.31.transformer_blocks.2.attn2.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.attn2.to_v.weight": "blocks.31.transformer_blocks.2.attn2.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.ff.net.0.proj.bias": "blocks.31.transformer_blocks.2.act_fn.proj.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.ff.net.0.proj.weight": "blocks.31.transformer_blocks.2.act_fn.proj.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.ff.net.2.bias": "blocks.31.transformer_blocks.2.ff.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.ff.net.2.weight": "blocks.31.transformer_blocks.2.ff.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.norm1.bias": "blocks.31.transformer_blocks.2.norm1.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.norm1.weight": "blocks.31.transformer_blocks.2.norm1.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.norm2.bias": "blocks.31.transformer_blocks.2.norm2.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.norm2.weight": "blocks.31.transformer_blocks.2.norm2.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.norm3.bias": "blocks.31.transformer_blocks.2.norm3.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.2.norm3.weight": "blocks.31.transformer_blocks.2.norm3.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.attn1.to_k.weight": "blocks.31.transformer_blocks.3.attn1.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.attn1.to_out.0.bias": "blocks.31.transformer_blocks.3.attn1.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.attn1.to_out.0.weight": "blocks.31.transformer_blocks.3.attn1.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.attn1.to_q.weight": "blocks.31.transformer_blocks.3.attn1.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.attn1.to_v.weight": "blocks.31.transformer_blocks.3.attn1.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.attn2.to_k.weight": "blocks.31.transformer_blocks.3.attn2.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.attn2.to_out.0.bias": "blocks.31.transformer_blocks.3.attn2.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.attn2.to_out.0.weight": "blocks.31.transformer_blocks.3.attn2.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.attn2.to_q.weight": "blocks.31.transformer_blocks.3.attn2.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.attn2.to_v.weight": "blocks.31.transformer_blocks.3.attn2.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.ff.net.0.proj.bias": "blocks.31.transformer_blocks.3.act_fn.proj.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.ff.net.0.proj.weight": "blocks.31.transformer_blocks.3.act_fn.proj.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.ff.net.2.bias": "blocks.31.transformer_blocks.3.ff.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.ff.net.2.weight": "blocks.31.transformer_blocks.3.ff.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.norm1.bias": "blocks.31.transformer_blocks.3.norm1.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.norm1.weight": "blocks.31.transformer_blocks.3.norm1.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.norm2.bias": "blocks.31.transformer_blocks.3.norm2.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.norm2.weight": "blocks.31.transformer_blocks.3.norm2.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.norm3.bias": "blocks.31.transformer_blocks.3.norm3.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.3.norm3.weight": "blocks.31.transformer_blocks.3.norm3.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.attn1.to_k.weight": "blocks.31.transformer_blocks.4.attn1.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.attn1.to_out.0.bias": "blocks.31.transformer_blocks.4.attn1.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.attn1.to_out.0.weight": "blocks.31.transformer_blocks.4.attn1.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.attn1.to_q.weight": "blocks.31.transformer_blocks.4.attn1.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.attn1.to_v.weight": "blocks.31.transformer_blocks.4.attn1.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.attn2.to_k.weight": "blocks.31.transformer_blocks.4.attn2.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.attn2.to_out.0.bias": "blocks.31.transformer_blocks.4.attn2.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.attn2.to_out.0.weight": "blocks.31.transformer_blocks.4.attn2.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.attn2.to_q.weight": "blocks.31.transformer_blocks.4.attn2.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.attn2.to_v.weight": "blocks.31.transformer_blocks.4.attn2.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.ff.net.0.proj.bias": "blocks.31.transformer_blocks.4.act_fn.proj.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.ff.net.0.proj.weight": "blocks.31.transformer_blocks.4.act_fn.proj.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.ff.net.2.bias": "blocks.31.transformer_blocks.4.ff.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.ff.net.2.weight": "blocks.31.transformer_blocks.4.ff.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.norm1.bias": "blocks.31.transformer_blocks.4.norm1.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.norm1.weight": "blocks.31.transformer_blocks.4.norm1.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.norm2.bias": "blocks.31.transformer_blocks.4.norm2.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.norm2.weight": "blocks.31.transformer_blocks.4.norm2.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.norm3.bias": "blocks.31.transformer_blocks.4.norm3.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.4.norm3.weight": "blocks.31.transformer_blocks.4.norm3.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.attn1.to_k.weight": "blocks.31.transformer_blocks.5.attn1.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.attn1.to_out.0.bias": "blocks.31.transformer_blocks.5.attn1.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.attn1.to_out.0.weight": "blocks.31.transformer_blocks.5.attn1.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.attn1.to_q.weight": "blocks.31.transformer_blocks.5.attn1.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.attn1.to_v.weight": "blocks.31.transformer_blocks.5.attn1.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.attn2.to_k.weight": "blocks.31.transformer_blocks.5.attn2.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.attn2.to_out.0.bias": "blocks.31.transformer_blocks.5.attn2.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.attn2.to_out.0.weight": "blocks.31.transformer_blocks.5.attn2.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.attn2.to_q.weight": "blocks.31.transformer_blocks.5.attn2.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.attn2.to_v.weight": "blocks.31.transformer_blocks.5.attn2.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.ff.net.0.proj.bias": "blocks.31.transformer_blocks.5.act_fn.proj.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.ff.net.0.proj.weight": "blocks.31.transformer_blocks.5.act_fn.proj.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.ff.net.2.bias": "blocks.31.transformer_blocks.5.ff.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.ff.net.2.weight": "blocks.31.transformer_blocks.5.ff.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.norm1.bias": "blocks.31.transformer_blocks.5.norm1.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.norm1.weight": "blocks.31.transformer_blocks.5.norm1.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.norm2.bias": "blocks.31.transformer_blocks.5.norm2.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.norm2.weight": "blocks.31.transformer_blocks.5.norm2.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.norm3.bias": "blocks.31.transformer_blocks.5.norm3.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.5.norm3.weight": "blocks.31.transformer_blocks.5.norm3.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.attn1.to_k.weight": "blocks.31.transformer_blocks.6.attn1.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.attn1.to_out.0.bias": "blocks.31.transformer_blocks.6.attn1.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.attn1.to_out.0.weight": "blocks.31.transformer_blocks.6.attn1.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.attn1.to_q.weight": "blocks.31.transformer_blocks.6.attn1.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.attn1.to_v.weight": "blocks.31.transformer_blocks.6.attn1.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.attn2.to_k.weight": "blocks.31.transformer_blocks.6.attn2.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.attn2.to_out.0.bias": "blocks.31.transformer_blocks.6.attn2.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.attn2.to_out.0.weight": "blocks.31.transformer_blocks.6.attn2.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.attn2.to_q.weight": "blocks.31.transformer_blocks.6.attn2.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.attn2.to_v.weight": "blocks.31.transformer_blocks.6.attn2.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.ff.net.0.proj.bias": "blocks.31.transformer_blocks.6.act_fn.proj.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.ff.net.0.proj.weight": "blocks.31.transformer_blocks.6.act_fn.proj.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.ff.net.2.bias": "blocks.31.transformer_blocks.6.ff.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.ff.net.2.weight": "blocks.31.transformer_blocks.6.ff.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.norm1.bias": "blocks.31.transformer_blocks.6.norm1.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.norm1.weight": "blocks.31.transformer_blocks.6.norm1.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.norm2.bias": "blocks.31.transformer_blocks.6.norm2.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.norm2.weight": "blocks.31.transformer_blocks.6.norm2.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.norm3.bias": "blocks.31.transformer_blocks.6.norm3.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.6.norm3.weight": "blocks.31.transformer_blocks.6.norm3.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.attn1.to_k.weight": "blocks.31.transformer_blocks.7.attn1.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.attn1.to_out.0.bias": "blocks.31.transformer_blocks.7.attn1.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.attn1.to_out.0.weight": "blocks.31.transformer_blocks.7.attn1.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.attn1.to_q.weight": "blocks.31.transformer_blocks.7.attn1.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.attn1.to_v.weight": "blocks.31.transformer_blocks.7.attn1.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.attn2.to_k.weight": "blocks.31.transformer_blocks.7.attn2.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.attn2.to_out.0.bias": "blocks.31.transformer_blocks.7.attn2.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.attn2.to_out.0.weight": "blocks.31.transformer_blocks.7.attn2.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.attn2.to_q.weight": "blocks.31.transformer_blocks.7.attn2.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.attn2.to_v.weight": "blocks.31.transformer_blocks.7.attn2.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.ff.net.0.proj.bias": "blocks.31.transformer_blocks.7.act_fn.proj.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.ff.net.0.proj.weight": "blocks.31.transformer_blocks.7.act_fn.proj.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.ff.net.2.bias": "blocks.31.transformer_blocks.7.ff.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.ff.net.2.weight": "blocks.31.transformer_blocks.7.ff.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.norm1.bias": "blocks.31.transformer_blocks.7.norm1.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.norm1.weight": "blocks.31.transformer_blocks.7.norm1.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.norm2.bias": "blocks.31.transformer_blocks.7.norm2.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.norm2.weight": "blocks.31.transformer_blocks.7.norm2.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.norm3.bias": "blocks.31.transformer_blocks.7.norm3.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.7.norm3.weight": "blocks.31.transformer_blocks.7.norm3.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.attn1.to_k.weight": "blocks.31.transformer_blocks.8.attn1.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.attn1.to_out.0.bias": "blocks.31.transformer_blocks.8.attn1.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.attn1.to_out.0.weight": "blocks.31.transformer_blocks.8.attn1.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.attn1.to_q.weight": "blocks.31.transformer_blocks.8.attn1.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.attn1.to_v.weight": "blocks.31.transformer_blocks.8.attn1.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.attn2.to_k.weight": "blocks.31.transformer_blocks.8.attn2.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.attn2.to_out.0.bias": "blocks.31.transformer_blocks.8.attn2.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.attn2.to_out.0.weight": "blocks.31.transformer_blocks.8.attn2.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.attn2.to_q.weight": "blocks.31.transformer_blocks.8.attn2.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.attn2.to_v.weight": "blocks.31.transformer_blocks.8.attn2.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.ff.net.0.proj.bias": "blocks.31.transformer_blocks.8.act_fn.proj.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.ff.net.0.proj.weight": "blocks.31.transformer_blocks.8.act_fn.proj.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.ff.net.2.bias": "blocks.31.transformer_blocks.8.ff.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.ff.net.2.weight": "blocks.31.transformer_blocks.8.ff.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.norm1.bias": "blocks.31.transformer_blocks.8.norm1.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.norm1.weight": "blocks.31.transformer_blocks.8.norm1.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.norm2.bias": "blocks.31.transformer_blocks.8.norm2.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.norm2.weight": "blocks.31.transformer_blocks.8.norm2.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.norm3.bias": "blocks.31.transformer_blocks.8.norm3.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.8.norm3.weight": "blocks.31.transformer_blocks.8.norm3.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.attn1.to_k.weight": "blocks.31.transformer_blocks.9.attn1.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.attn1.to_out.0.bias": "blocks.31.transformer_blocks.9.attn1.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.attn1.to_out.0.weight": "blocks.31.transformer_blocks.9.attn1.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.attn1.to_q.weight": "blocks.31.transformer_blocks.9.attn1.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.attn1.to_v.weight": "blocks.31.transformer_blocks.9.attn1.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.attn2.to_k.weight": "blocks.31.transformer_blocks.9.attn2.to_k.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.attn2.to_out.0.bias": "blocks.31.transformer_blocks.9.attn2.to_out.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.attn2.to_out.0.weight": "blocks.31.transformer_blocks.9.attn2.to_out.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.attn2.to_q.weight": "blocks.31.transformer_blocks.9.attn2.to_q.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.attn2.to_v.weight": "blocks.31.transformer_blocks.9.attn2.to_v.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.ff.net.0.proj.bias": "blocks.31.transformer_blocks.9.act_fn.proj.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.ff.net.0.proj.weight": "blocks.31.transformer_blocks.9.act_fn.proj.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.ff.net.2.bias": "blocks.31.transformer_blocks.9.ff.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.ff.net.2.weight": "blocks.31.transformer_blocks.9.ff.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.norm1.bias": "blocks.31.transformer_blocks.9.norm1.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.norm1.weight": "blocks.31.transformer_blocks.9.norm1.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.norm2.bias": "blocks.31.transformer_blocks.9.norm2.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.norm2.weight": "blocks.31.transformer_blocks.9.norm2.weight", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.norm3.bias": "blocks.31.transformer_blocks.9.norm3.bias", - "model.diffusion_model.output_blocks.2.1.transformer_blocks.9.norm3.weight": "blocks.31.transformer_blocks.9.norm3.weight", - "model.diffusion_model.output_blocks.2.2.conv.bias": "blocks.32.conv.bias", - "model.diffusion_model.output_blocks.2.2.conv.weight": "blocks.32.conv.weight", - "model.diffusion_model.output_blocks.3.0.emb_layers.1.bias": "blocks.34.time_emb_proj.bias", - "model.diffusion_model.output_blocks.3.0.emb_layers.1.weight": "blocks.34.time_emb_proj.weight", - "model.diffusion_model.output_blocks.3.0.in_layers.0.bias": "blocks.34.norm1.bias", - "model.diffusion_model.output_blocks.3.0.in_layers.0.weight": "blocks.34.norm1.weight", - "model.diffusion_model.output_blocks.3.0.in_layers.2.bias": "blocks.34.conv1.bias", - "model.diffusion_model.output_blocks.3.0.in_layers.2.weight": "blocks.34.conv1.weight", - "model.diffusion_model.output_blocks.3.0.out_layers.0.bias": "blocks.34.norm2.bias", - "model.diffusion_model.output_blocks.3.0.out_layers.0.weight": "blocks.34.norm2.weight", - "model.diffusion_model.output_blocks.3.0.out_layers.3.bias": "blocks.34.conv2.bias", - "model.diffusion_model.output_blocks.3.0.out_layers.3.weight": "blocks.34.conv2.weight", - "model.diffusion_model.output_blocks.3.0.skip_connection.bias": "blocks.34.conv_shortcut.bias", - "model.diffusion_model.output_blocks.3.0.skip_connection.weight": "blocks.34.conv_shortcut.weight", - "model.diffusion_model.output_blocks.3.1.norm.bias": "blocks.35.norm.bias", - "model.diffusion_model.output_blocks.3.1.norm.weight": "blocks.35.norm.weight", - "model.diffusion_model.output_blocks.3.1.proj_in.bias": "blocks.35.proj_in.bias", - "model.diffusion_model.output_blocks.3.1.proj_in.weight": "blocks.35.proj_in.weight", - "model.diffusion_model.output_blocks.3.1.proj_out.bias": "blocks.35.proj_out.bias", - "model.diffusion_model.output_blocks.3.1.proj_out.weight": "blocks.35.proj_out.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn1.to_k.weight": "blocks.35.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.35.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.35.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn1.to_q.weight": "blocks.35.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn1.to_v.weight": "blocks.35.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn2.to_k.weight": "blocks.35.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.35.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.35.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn2.to_q.weight": "blocks.35.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn2.to_v.weight": "blocks.35.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.35.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.35.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.ff.net.2.bias": "blocks.35.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.ff.net.2.weight": "blocks.35.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.norm1.bias": "blocks.35.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.norm1.weight": "blocks.35.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.norm2.bias": "blocks.35.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.norm2.weight": "blocks.35.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.norm3.bias": "blocks.35.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.norm3.weight": "blocks.35.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.attn1.to_k.weight": "blocks.35.transformer_blocks.1.attn1.to_k.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.attn1.to_out.0.bias": "blocks.35.transformer_blocks.1.attn1.to_out.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.attn1.to_out.0.weight": "blocks.35.transformer_blocks.1.attn1.to_out.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.attn1.to_q.weight": "blocks.35.transformer_blocks.1.attn1.to_q.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.attn1.to_v.weight": "blocks.35.transformer_blocks.1.attn1.to_v.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.attn2.to_k.weight": "blocks.35.transformer_blocks.1.attn2.to_k.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.attn2.to_out.0.bias": "blocks.35.transformer_blocks.1.attn2.to_out.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.attn2.to_out.0.weight": "blocks.35.transformer_blocks.1.attn2.to_out.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.attn2.to_q.weight": "blocks.35.transformer_blocks.1.attn2.to_q.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.attn2.to_v.weight": "blocks.35.transformer_blocks.1.attn2.to_v.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.ff.net.0.proj.bias": "blocks.35.transformer_blocks.1.act_fn.proj.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.ff.net.0.proj.weight": "blocks.35.transformer_blocks.1.act_fn.proj.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.ff.net.2.bias": "blocks.35.transformer_blocks.1.ff.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.ff.net.2.weight": "blocks.35.transformer_blocks.1.ff.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.norm1.bias": "blocks.35.transformer_blocks.1.norm1.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.norm1.weight": "blocks.35.transformer_blocks.1.norm1.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.norm2.bias": "blocks.35.transformer_blocks.1.norm2.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.norm2.weight": "blocks.35.transformer_blocks.1.norm2.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.norm3.bias": "blocks.35.transformer_blocks.1.norm3.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.1.norm3.weight": "blocks.35.transformer_blocks.1.norm3.weight", - "model.diffusion_model.output_blocks.4.0.emb_layers.1.bias": "blocks.37.time_emb_proj.bias", - "model.diffusion_model.output_blocks.4.0.emb_layers.1.weight": "blocks.37.time_emb_proj.weight", - "model.diffusion_model.output_blocks.4.0.in_layers.0.bias": "blocks.37.norm1.bias", - "model.diffusion_model.output_blocks.4.0.in_layers.0.weight": "blocks.37.norm1.weight", - "model.diffusion_model.output_blocks.4.0.in_layers.2.bias": "blocks.37.conv1.bias", - "model.diffusion_model.output_blocks.4.0.in_layers.2.weight": "blocks.37.conv1.weight", - "model.diffusion_model.output_blocks.4.0.out_layers.0.bias": "blocks.37.norm2.bias", - "model.diffusion_model.output_blocks.4.0.out_layers.0.weight": "blocks.37.norm2.weight", - "model.diffusion_model.output_blocks.4.0.out_layers.3.bias": "blocks.37.conv2.bias", - "model.diffusion_model.output_blocks.4.0.out_layers.3.weight": "blocks.37.conv2.weight", - "model.diffusion_model.output_blocks.4.0.skip_connection.bias": "blocks.37.conv_shortcut.bias", - "model.diffusion_model.output_blocks.4.0.skip_connection.weight": "blocks.37.conv_shortcut.weight", - "model.diffusion_model.output_blocks.4.1.norm.bias": "blocks.38.norm.bias", - "model.diffusion_model.output_blocks.4.1.norm.weight": "blocks.38.norm.weight", - "model.diffusion_model.output_blocks.4.1.proj_in.bias": "blocks.38.proj_in.bias", - "model.diffusion_model.output_blocks.4.1.proj_in.weight": "blocks.38.proj_in.weight", - "model.diffusion_model.output_blocks.4.1.proj_out.bias": "blocks.38.proj_out.bias", - "model.diffusion_model.output_blocks.4.1.proj_out.weight": "blocks.38.proj_out.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn1.to_k.weight": "blocks.38.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.38.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.38.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn1.to_q.weight": "blocks.38.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn1.to_v.weight": "blocks.38.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn2.to_k.weight": "blocks.38.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.38.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.38.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn2.to_q.weight": "blocks.38.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn2.to_v.weight": "blocks.38.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.38.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.38.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.ff.net.2.bias": "blocks.38.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.ff.net.2.weight": "blocks.38.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.norm1.bias": "blocks.38.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.norm1.weight": "blocks.38.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.norm2.bias": "blocks.38.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.norm2.weight": "blocks.38.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.norm3.bias": "blocks.38.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.norm3.weight": "blocks.38.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.attn1.to_k.weight": "blocks.38.transformer_blocks.1.attn1.to_k.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.attn1.to_out.0.bias": "blocks.38.transformer_blocks.1.attn1.to_out.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.attn1.to_out.0.weight": "blocks.38.transformer_blocks.1.attn1.to_out.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.attn1.to_q.weight": "blocks.38.transformer_blocks.1.attn1.to_q.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.attn1.to_v.weight": "blocks.38.transformer_blocks.1.attn1.to_v.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.attn2.to_k.weight": "blocks.38.transformer_blocks.1.attn2.to_k.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.attn2.to_out.0.bias": "blocks.38.transformer_blocks.1.attn2.to_out.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.attn2.to_out.0.weight": "blocks.38.transformer_blocks.1.attn2.to_out.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.attn2.to_q.weight": "blocks.38.transformer_blocks.1.attn2.to_q.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.attn2.to_v.weight": "blocks.38.transformer_blocks.1.attn2.to_v.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.ff.net.0.proj.bias": "blocks.38.transformer_blocks.1.act_fn.proj.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.ff.net.0.proj.weight": "blocks.38.transformer_blocks.1.act_fn.proj.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.ff.net.2.bias": "blocks.38.transformer_blocks.1.ff.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.ff.net.2.weight": "blocks.38.transformer_blocks.1.ff.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.norm1.bias": "blocks.38.transformer_blocks.1.norm1.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.norm1.weight": "blocks.38.transformer_blocks.1.norm1.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.norm2.bias": "blocks.38.transformer_blocks.1.norm2.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.norm2.weight": "blocks.38.transformer_blocks.1.norm2.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.norm3.bias": "blocks.38.transformer_blocks.1.norm3.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.1.norm3.weight": "blocks.38.transformer_blocks.1.norm3.weight", - "model.diffusion_model.output_blocks.5.0.emb_layers.1.bias": "blocks.40.time_emb_proj.bias", - "model.diffusion_model.output_blocks.5.0.emb_layers.1.weight": "blocks.40.time_emb_proj.weight", - "model.diffusion_model.output_blocks.5.0.in_layers.0.bias": "blocks.40.norm1.bias", - "model.diffusion_model.output_blocks.5.0.in_layers.0.weight": "blocks.40.norm1.weight", - "model.diffusion_model.output_blocks.5.0.in_layers.2.bias": "blocks.40.conv1.bias", - "model.diffusion_model.output_blocks.5.0.in_layers.2.weight": "blocks.40.conv1.weight", - "model.diffusion_model.output_blocks.5.0.out_layers.0.bias": "blocks.40.norm2.bias", - "model.diffusion_model.output_blocks.5.0.out_layers.0.weight": "blocks.40.norm2.weight", - "model.diffusion_model.output_blocks.5.0.out_layers.3.bias": "blocks.40.conv2.bias", - "model.diffusion_model.output_blocks.5.0.out_layers.3.weight": "blocks.40.conv2.weight", - "model.diffusion_model.output_blocks.5.0.skip_connection.bias": "blocks.40.conv_shortcut.bias", - "model.diffusion_model.output_blocks.5.0.skip_connection.weight": "blocks.40.conv_shortcut.weight", - "model.diffusion_model.output_blocks.5.1.norm.bias": "blocks.41.norm.bias", - "model.diffusion_model.output_blocks.5.1.norm.weight": "blocks.41.norm.weight", - "model.diffusion_model.output_blocks.5.1.proj_in.bias": "blocks.41.proj_in.bias", - "model.diffusion_model.output_blocks.5.1.proj_in.weight": "blocks.41.proj_in.weight", - "model.diffusion_model.output_blocks.5.1.proj_out.bias": "blocks.41.proj_out.bias", - "model.diffusion_model.output_blocks.5.1.proj_out.weight": "blocks.41.proj_out.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn1.to_k.weight": "blocks.41.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.41.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.41.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn1.to_q.weight": "blocks.41.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn1.to_v.weight": "blocks.41.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn2.to_k.weight": "blocks.41.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.41.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.41.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn2.to_q.weight": "blocks.41.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn2.to_v.weight": "blocks.41.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.41.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.41.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.ff.net.2.bias": "blocks.41.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.ff.net.2.weight": "blocks.41.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.norm1.bias": "blocks.41.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.norm1.weight": "blocks.41.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.norm2.bias": "blocks.41.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.norm2.weight": "blocks.41.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.norm3.bias": "blocks.41.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.norm3.weight": "blocks.41.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.attn1.to_k.weight": "blocks.41.transformer_blocks.1.attn1.to_k.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.attn1.to_out.0.bias": "blocks.41.transformer_blocks.1.attn1.to_out.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.attn1.to_out.0.weight": "blocks.41.transformer_blocks.1.attn1.to_out.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.attn1.to_q.weight": "blocks.41.transformer_blocks.1.attn1.to_q.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.attn1.to_v.weight": "blocks.41.transformer_blocks.1.attn1.to_v.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.attn2.to_k.weight": "blocks.41.transformer_blocks.1.attn2.to_k.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.attn2.to_out.0.bias": "blocks.41.transformer_blocks.1.attn2.to_out.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.attn2.to_out.0.weight": "blocks.41.transformer_blocks.1.attn2.to_out.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.attn2.to_q.weight": "blocks.41.transformer_blocks.1.attn2.to_q.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.attn2.to_v.weight": "blocks.41.transformer_blocks.1.attn2.to_v.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.ff.net.0.proj.bias": "blocks.41.transformer_blocks.1.act_fn.proj.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.ff.net.0.proj.weight": "blocks.41.transformer_blocks.1.act_fn.proj.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.ff.net.2.bias": "blocks.41.transformer_blocks.1.ff.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.ff.net.2.weight": "blocks.41.transformer_blocks.1.ff.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.norm1.bias": "blocks.41.transformer_blocks.1.norm1.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.norm1.weight": "blocks.41.transformer_blocks.1.norm1.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.norm2.bias": "blocks.41.transformer_blocks.1.norm2.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.norm2.weight": "blocks.41.transformer_blocks.1.norm2.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.norm3.bias": "blocks.41.transformer_blocks.1.norm3.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.1.norm3.weight": "blocks.41.transformer_blocks.1.norm3.weight", - "model.diffusion_model.output_blocks.5.2.conv.bias": "blocks.42.conv.bias", - "model.diffusion_model.output_blocks.5.2.conv.weight": "blocks.42.conv.weight", - "model.diffusion_model.output_blocks.6.0.emb_layers.1.bias": "blocks.44.time_emb_proj.bias", - "model.diffusion_model.output_blocks.6.0.emb_layers.1.weight": "blocks.44.time_emb_proj.weight", - "model.diffusion_model.output_blocks.6.0.in_layers.0.bias": "blocks.44.norm1.bias", - "model.diffusion_model.output_blocks.6.0.in_layers.0.weight": "blocks.44.norm1.weight", - "model.diffusion_model.output_blocks.6.0.in_layers.2.bias": "blocks.44.conv1.bias", - "model.diffusion_model.output_blocks.6.0.in_layers.2.weight": "blocks.44.conv1.weight", - "model.diffusion_model.output_blocks.6.0.out_layers.0.bias": "blocks.44.norm2.bias", - "model.diffusion_model.output_blocks.6.0.out_layers.0.weight": "blocks.44.norm2.weight", - "model.diffusion_model.output_blocks.6.0.out_layers.3.bias": "blocks.44.conv2.bias", - "model.diffusion_model.output_blocks.6.0.out_layers.3.weight": "blocks.44.conv2.weight", - "model.diffusion_model.output_blocks.6.0.skip_connection.bias": "blocks.44.conv_shortcut.bias", - "model.diffusion_model.output_blocks.6.0.skip_connection.weight": "blocks.44.conv_shortcut.weight", - "model.diffusion_model.output_blocks.7.0.emb_layers.1.bias": "blocks.46.time_emb_proj.bias", - "model.diffusion_model.output_blocks.7.0.emb_layers.1.weight": "blocks.46.time_emb_proj.weight", - "model.diffusion_model.output_blocks.7.0.in_layers.0.bias": "blocks.46.norm1.bias", - "model.diffusion_model.output_blocks.7.0.in_layers.0.weight": "blocks.46.norm1.weight", - "model.diffusion_model.output_blocks.7.0.in_layers.2.bias": "blocks.46.conv1.bias", - "model.diffusion_model.output_blocks.7.0.in_layers.2.weight": "blocks.46.conv1.weight", - "model.diffusion_model.output_blocks.7.0.out_layers.0.bias": "blocks.46.norm2.bias", - "model.diffusion_model.output_blocks.7.0.out_layers.0.weight": "blocks.46.norm2.weight", - "model.diffusion_model.output_blocks.7.0.out_layers.3.bias": "blocks.46.conv2.bias", - "model.diffusion_model.output_blocks.7.0.out_layers.3.weight": "blocks.46.conv2.weight", - "model.diffusion_model.output_blocks.7.0.skip_connection.bias": "blocks.46.conv_shortcut.bias", - "model.diffusion_model.output_blocks.7.0.skip_connection.weight": "blocks.46.conv_shortcut.weight", - "model.diffusion_model.output_blocks.8.0.emb_layers.1.bias": "blocks.48.time_emb_proj.bias", - "model.diffusion_model.output_blocks.8.0.emb_layers.1.weight": "blocks.48.time_emb_proj.weight", - "model.diffusion_model.output_blocks.8.0.in_layers.0.bias": "blocks.48.norm1.bias", - "model.diffusion_model.output_blocks.8.0.in_layers.0.weight": "blocks.48.norm1.weight", - "model.diffusion_model.output_blocks.8.0.in_layers.2.bias": "blocks.48.conv1.bias", - "model.diffusion_model.output_blocks.8.0.in_layers.2.weight": "blocks.48.conv1.weight", - "model.diffusion_model.output_blocks.8.0.out_layers.0.bias": "blocks.48.norm2.bias", - "model.diffusion_model.output_blocks.8.0.out_layers.0.weight": "blocks.48.norm2.weight", - "model.diffusion_model.output_blocks.8.0.out_layers.3.bias": "blocks.48.conv2.bias", - "model.diffusion_model.output_blocks.8.0.out_layers.3.weight": "blocks.48.conv2.weight", - "model.diffusion_model.output_blocks.8.0.skip_connection.bias": "blocks.48.conv_shortcut.bias", - "model.diffusion_model.output_blocks.8.0.skip_connection.weight": "blocks.48.conv_shortcut.weight", - "model.diffusion_model.time_embed.0.bias": "time_embedding.0.bias", - "model.diffusion_model.time_embed.0.weight": "time_embedding.0.weight", - "model.diffusion_model.time_embed.2.bias": "time_embedding.2.bias", - "model.diffusion_model.time_embed.2.weight": "time_embedding.2.weight", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if ".proj_in." in name or ".proj_out." in name: - param = param.squeeze() - state_dict_[rename_dict[name]] = param - if "text_intermediate_proj.weight" in state_dict_: - return state_dict_, {"is_kolors": True} - else: - return state_dict_ diff --git a/diffsynth/models/sdxl_vae_decoder.py b/diffsynth/models/sdxl_vae_decoder.py deleted file mode 100644 index 290c785..0000000 --- a/diffsynth/models/sdxl_vae_decoder.py +++ /dev/null @@ -1,24 +0,0 @@ -from .sd_vae_decoder import SDVAEDecoder, SDVAEDecoderStateDictConverter - - -class SDXLVAEDecoder(SDVAEDecoder): - def __init__(self, upcast_to_float32=True): - super().__init__() - self.scaling_factor = 0.13025 - - @staticmethod - def state_dict_converter(): - return SDXLVAEDecoderStateDictConverter() - - -class SDXLVAEDecoderStateDictConverter(SDVAEDecoderStateDictConverter): - def __init__(self): - super().__init__() - - def from_diffusers(self, state_dict): - state_dict = super().from_diffusers(state_dict) - return state_dict, {"upcast_to_float32": True} - - def from_civitai(self, state_dict): - state_dict = super().from_civitai(state_dict) - return state_dict, {"upcast_to_float32": True} diff --git a/diffsynth/models/sdxl_vae_encoder.py b/diffsynth/models/sdxl_vae_encoder.py deleted file mode 100644 index 14af09c..0000000 --- a/diffsynth/models/sdxl_vae_encoder.py +++ /dev/null @@ -1,24 +0,0 @@ -from .sd_vae_encoder import SDVAEEncoderStateDictConverter, SDVAEEncoder - - -class SDXLVAEEncoder(SDVAEEncoder): - def __init__(self, upcast_to_float32=True): - super().__init__() - self.scaling_factor = 0.13025 - - @staticmethod - def state_dict_converter(): - return SDXLVAEEncoderStateDictConverter() - - -class SDXLVAEEncoderStateDictConverter(SDVAEEncoderStateDictConverter): - def __init__(self): - super().__init__() - - def from_diffusers(self, state_dict): - state_dict = super().from_diffusers(state_dict) - return state_dict, {"upcast_to_float32": True} - - def from_civitai(self, state_dict): - state_dict = super().from_civitai(state_dict) - return state_dict, {"upcast_to_float32": True} diff --git a/diffsynth/models/step1x_connector.py b/diffsynth/models/step1x_connector.py deleted file mode 100644 index 9d5f0d9..0000000 --- a/diffsynth/models/step1x_connector.py +++ /dev/null @@ -1,683 +0,0 @@ -from typing import Optional - -import torch, math -import torch.nn -from einops import rearrange -from torch import nn -from functools import partial -from einops import rearrange - - - -def attention(q, k, v, attn_mask, mode="torch"): - q = q.transpose(1, 2) - k = k.transpose(1, 2) - v = v.transpose(1, 2) - x = torch.nn.functional.scaled_dot_product_attention(q, k, v, attn_mask=attn_mask) - x = rearrange(x, "b n s d -> b s (n d)") - return x - - - -class MLP(nn.Module): - """MLP as used in Vision Transformer, MLP-Mixer and related networks""" - - def __init__( - self, - in_channels, - hidden_channels=None, - out_features=None, - act_layer=nn.GELU, - norm_layer=None, - bias=True, - drop=0.0, - use_conv=False, - device=None, - dtype=None, - ): - super().__init__() - out_features = out_features or in_channels - hidden_channels = hidden_channels or in_channels - bias = (bias, bias) - drop_probs = (drop, drop) - linear_layer = partial(nn.Conv2d, kernel_size=1) if use_conv else nn.Linear - - self.fc1 = linear_layer( - in_channels, hidden_channels, bias=bias[0], device=device, dtype=dtype - ) - self.act = act_layer() - self.drop1 = nn.Dropout(drop_probs[0]) - self.norm = ( - norm_layer(hidden_channels, device=device, dtype=dtype) - if norm_layer is not None - else nn.Identity() - ) - self.fc2 = linear_layer( - hidden_channels, out_features, bias=bias[1], device=device, dtype=dtype - ) - self.drop2 = nn.Dropout(drop_probs[1]) - - def forward(self, x): - x = self.fc1(x) - x = self.act(x) - x = self.drop1(x) - x = self.norm(x) - x = self.fc2(x) - x = self.drop2(x) - return x - - -class TextProjection(nn.Module): - """ - Projects text embeddings. Also handles dropout for classifier-free guidance. - - Adapted from https://github.com/PixArt-alpha/PixArt-alpha/blob/master/diffusion/model/nets/PixArt_blocks.py - """ - - def __init__(self, in_channels, hidden_size, act_layer, dtype=None, device=None): - factory_kwargs = {"dtype": dtype, "device": device} - super().__init__() - self.linear_1 = nn.Linear( - in_features=in_channels, - out_features=hidden_size, - bias=True, - **factory_kwargs, - ) - self.act_1 = act_layer() - self.linear_2 = nn.Linear( - in_features=hidden_size, - out_features=hidden_size, - bias=True, - **factory_kwargs, - ) - - def forward(self, caption): - hidden_states = self.linear_1(caption) - hidden_states = self.act_1(hidden_states) - hidden_states = self.linear_2(hidden_states) - return hidden_states - - -class TimestepEmbedder(nn.Module): - """ - Embeds scalar timesteps into vector representations. - """ - - def __init__( - self, - hidden_size, - act_layer, - frequency_embedding_size=256, - max_period=10000, - out_size=None, - dtype=None, - device=None, - ): - factory_kwargs = {"dtype": dtype, "device": device} - super().__init__() - self.frequency_embedding_size = frequency_embedding_size - self.max_period = max_period - if out_size is None: - out_size = hidden_size - - self.mlp = nn.Sequential( - nn.Linear( - frequency_embedding_size, hidden_size, bias=True, **factory_kwargs - ), - act_layer(), - nn.Linear(hidden_size, out_size, bias=True, **factory_kwargs), - ) - nn.init.normal_(self.mlp[0].weight, std=0.02) # type: ignore - nn.init.normal_(self.mlp[2].weight, std=0.02) # type: ignore - - @staticmethod - def timestep_embedding(t, dim, max_period=10000): - """ - Create sinusoidal timestep embeddings. - - Args: - t (torch.Tensor): a 1-D Tensor of N indices, one per batch element. These may be fractional. - dim (int): the dimension of the output. - max_period (int): controls the minimum frequency of the embeddings. - - Returns: - embedding (torch.Tensor): An (N, D) Tensor of positional embeddings. - - .. ref_link: https://github.com/openai/glide-text2im/blob/main/glide_text2im/nn.py - """ - half = dim // 2 - freqs = torch.exp( - -math.log(max_period) - * torch.arange(start=0, end=half, dtype=torch.float32) - / half - ).to(device=t.device) - args = t[:, None].float() * freqs[None] - embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) - if dim % 2: - embedding = torch.cat( - [embedding, torch.zeros_like(embedding[:, :1])], dim=-1 - ) - return embedding - - def forward(self, t): - t_freq = self.timestep_embedding( - t, self.frequency_embedding_size, self.max_period - ).type(t.dtype) # type: ignore - t_emb = self.mlp(t_freq) - return t_emb - - -def apply_gate(x, gate=None, tanh=False): - """AI is creating summary for apply_gate - - Args: - x (torch.Tensor): input tensor. - gate (torch.Tensor, optional): gate tensor. Defaults to None. - tanh (bool, optional): whether to use tanh function. Defaults to False. - - Returns: - torch.Tensor: the output tensor after apply gate. - """ - if gate is None: - return x - if tanh: - return x * gate.unsqueeze(1).tanh() - else: - return x * gate.unsqueeze(1) - - -class RMSNorm(nn.Module): - def __init__( - self, - dim: int, - elementwise_affine=True, - eps: float = 1e-6, - device=None, - dtype=None, - ): - """ - Initialize the RMSNorm normalization layer. - - Args: - dim (int): The dimension of the input tensor. - eps (float, optional): A small value added to the denominator for numerical stability. Default is 1e-6. - - Attributes: - eps (float): A small value added to the denominator for numerical stability. - weight (nn.Parameter): Learnable scaling parameter. - - """ - factory_kwargs = {"device": device, "dtype": dtype} - super().__init__() - self.eps = eps - if elementwise_affine: - self.weight = nn.Parameter(torch.ones(dim, **factory_kwargs)) - - def _norm(self, x): - """ - Apply the RMSNorm normalization to the input tensor. - - Args: - x (torch.Tensor): The input tensor. - - Returns: - torch.Tensor: The normalized tensor. - - """ - return x * torch.rsqrt(x.pow(2).mean(-1, keepdim=True) + self.eps) - - def forward(self, x): - """ - Forward pass through the RMSNorm layer. - - Args: - x (torch.Tensor): The input tensor. - - Returns: - torch.Tensor: The output tensor after applying RMSNorm. - - """ - output = self._norm(x.float()).type_as(x) - if hasattr(self, "weight"): - output = output * self.weight - return output - - -def get_norm_layer(norm_layer): - """ - Get the normalization layer. - - Args: - norm_layer (str): The type of normalization layer. - - Returns: - norm_layer (nn.Module): The normalization layer. - """ - if norm_layer == "layer": - return nn.LayerNorm - elif norm_layer == "rms": - return RMSNorm - else: - raise NotImplementedError(f"Norm layer {norm_layer} is not implemented") - - -def get_activation_layer(act_type): - """get activation layer - - Args: - act_type (str): the activation type - - Returns: - torch.nn.functional: the activation layer - """ - if act_type == "gelu": - return lambda: nn.GELU() - elif act_type == "gelu_tanh": - return lambda: nn.GELU(approximate="tanh") - elif act_type == "relu": - return nn.ReLU - elif act_type == "silu": - return nn.SiLU - else: - raise ValueError(f"Unknown activation type: {act_type}") - -class IndividualTokenRefinerBlock(torch.nn.Module): - def __init__( - self, - hidden_size, - heads_num, - mlp_width_ratio: str = 4.0, - mlp_drop_rate: float = 0.0, - act_type: str = "silu", - qk_norm: bool = False, - qk_norm_type: str = "layer", - qkv_bias: bool = True, - need_CA: bool = False, - dtype: Optional[torch.dtype] = None, - device: Optional[torch.device] = None, - ): - factory_kwargs = {"device": device, "dtype": dtype} - super().__init__() - self.need_CA = need_CA - self.heads_num = heads_num - head_dim = hidden_size // heads_num - mlp_hidden_dim = int(hidden_size * mlp_width_ratio) - - self.norm1 = nn.LayerNorm( - hidden_size, elementwise_affine=True, eps=1e-6, **factory_kwargs - ) - self.self_attn_qkv = nn.Linear( - hidden_size, hidden_size * 3, bias=qkv_bias, **factory_kwargs - ) - qk_norm_layer = get_norm_layer(qk_norm_type) - self.self_attn_q_norm = ( - qk_norm_layer(head_dim, elementwise_affine=True, eps=1e-6, **factory_kwargs) - if qk_norm - else nn.Identity() - ) - self.self_attn_k_norm = ( - qk_norm_layer(head_dim, elementwise_affine=True, eps=1e-6, **factory_kwargs) - if qk_norm - else nn.Identity() - ) - self.self_attn_proj = nn.Linear( - hidden_size, hidden_size, bias=qkv_bias, **factory_kwargs - ) - - self.norm2 = nn.LayerNorm( - hidden_size, elementwise_affine=True, eps=1e-6, **factory_kwargs - ) - act_layer = get_activation_layer(act_type) - self.mlp = MLP( - in_channels=hidden_size, - hidden_channels=mlp_hidden_dim, - act_layer=act_layer, - drop=mlp_drop_rate, - **factory_kwargs, - ) - - self.adaLN_modulation = nn.Sequential( - act_layer(), - nn.Linear(hidden_size, 2 * hidden_size, bias=True, **factory_kwargs), - ) - - if self.need_CA: - self.cross_attnblock=CrossAttnBlock(hidden_size=hidden_size, - heads_num=heads_num, - mlp_width_ratio=mlp_width_ratio, - mlp_drop_rate=mlp_drop_rate, - act_type=act_type, - qk_norm=qk_norm, - qk_norm_type=qk_norm_type, - qkv_bias=qkv_bias, - **factory_kwargs,) - # Zero-initialize the modulation - nn.init.zeros_(self.adaLN_modulation[1].weight) - nn.init.zeros_(self.adaLN_modulation[1].bias) - - def forward( - self, - x: torch.Tensor, - c: torch.Tensor, # timestep_aware_representations + context_aware_representations - attn_mask: torch.Tensor = None, - y: torch.Tensor = None, - ): - gate_msa, gate_mlp = self.adaLN_modulation(c).chunk(2, dim=1) - - norm_x = self.norm1(x) - qkv = self.self_attn_qkv(norm_x) - q, k, v = rearrange(qkv, "B L (K H D) -> K B L H D", K=3, H=self.heads_num) - # Apply QK-Norm if needed - q = self.self_attn_q_norm(q).to(v) - k = self.self_attn_k_norm(k).to(v) - - # Self-Attention - attn = attention(q, k, v, mode="torch", attn_mask=attn_mask) - - x = x + apply_gate(self.self_attn_proj(attn), gate_msa) - - if self.need_CA: - x = self.cross_attnblock(x, c, attn_mask, y) - - # FFN Layer - x = x + apply_gate(self.mlp(self.norm2(x)), gate_mlp) - - return x - - - - -class CrossAttnBlock(torch.nn.Module): - def __init__( - self, - hidden_size, - heads_num, - mlp_width_ratio: str = 4.0, - mlp_drop_rate: float = 0.0, - act_type: str = "silu", - qk_norm: bool = False, - qk_norm_type: str = "layer", - qkv_bias: bool = True, - dtype: Optional[torch.dtype] = None, - device: Optional[torch.device] = None, - ): - factory_kwargs = {"device": device, "dtype": dtype} - super().__init__() - self.heads_num = heads_num - head_dim = hidden_size // heads_num - - self.norm1 = nn.LayerNorm( - hidden_size, elementwise_affine=True, eps=1e-6, **factory_kwargs - ) - self.norm1_2 = nn.LayerNorm( - hidden_size, elementwise_affine=True, eps=1e-6, **factory_kwargs - ) - self.self_attn_q = nn.Linear( - hidden_size, hidden_size, bias=qkv_bias, **factory_kwargs - ) - self.self_attn_kv = nn.Linear( - hidden_size, hidden_size*2, bias=qkv_bias, **factory_kwargs - ) - qk_norm_layer = get_norm_layer(qk_norm_type) - self.self_attn_q_norm = ( - qk_norm_layer(head_dim, elementwise_affine=True, eps=1e-6, **factory_kwargs) - if qk_norm - else nn.Identity() - ) - self.self_attn_k_norm = ( - qk_norm_layer(head_dim, elementwise_affine=True, eps=1e-6, **factory_kwargs) - if qk_norm - else nn.Identity() - ) - self.self_attn_proj = nn.Linear( - hidden_size, hidden_size, bias=qkv_bias, **factory_kwargs - ) - - self.norm2 = nn.LayerNorm( - hidden_size, elementwise_affine=True, eps=1e-6, **factory_kwargs - ) - act_layer = get_activation_layer(act_type) - - self.adaLN_modulation = nn.Sequential( - act_layer(), - nn.Linear(hidden_size, 2 * hidden_size, bias=True, **factory_kwargs), - ) - # Zero-initialize the modulation - nn.init.zeros_(self.adaLN_modulation[1].weight) - nn.init.zeros_(self.adaLN_modulation[1].bias) - - def forward( - self, - x: torch.Tensor, - c: torch.Tensor, # timestep_aware_representations + context_aware_representations - attn_mask: torch.Tensor = None, - y: torch.Tensor=None, - - ): - gate_msa, gate_mlp = self.adaLN_modulation(c).chunk(2, dim=1) - - norm_x = self.norm1(x) - norm_y = self.norm1_2(y) - q = self.self_attn_q(norm_x) - q = rearrange(q, "B L (H D) -> B L H D", H=self.heads_num) - kv = self.self_attn_kv(norm_y) - k, v = rearrange(kv, "B L (K H D) -> K B L H D", K=2, H=self.heads_num) - # Apply QK-Norm if needed - q = self.self_attn_q_norm(q).to(v) - k = self.self_attn_k_norm(k).to(v) - - # Self-Attention - attn = attention(q, k, v, mode="torch", attn_mask=attn_mask) - - x = x + apply_gate(self.self_attn_proj(attn), gate_msa) - - return x - - - -class IndividualTokenRefiner(torch.nn.Module): - def __init__( - self, - hidden_size, - heads_num, - depth, - mlp_width_ratio: float = 4.0, - mlp_drop_rate: float = 0.0, - act_type: str = "silu", - qk_norm: bool = False, - qk_norm_type: str = "layer", - qkv_bias: bool = True, - need_CA:bool=False, - dtype: Optional[torch.dtype] = None, - device: Optional[torch.device] = None, - ): - - factory_kwargs = {"device": device, "dtype": dtype} - super().__init__() - self.need_CA = need_CA - self.blocks = nn.ModuleList( - [ - IndividualTokenRefinerBlock( - hidden_size=hidden_size, - heads_num=heads_num, - mlp_width_ratio=mlp_width_ratio, - mlp_drop_rate=mlp_drop_rate, - act_type=act_type, - qk_norm=qk_norm, - qk_norm_type=qk_norm_type, - qkv_bias=qkv_bias, - need_CA=self.need_CA, - **factory_kwargs, - ) - for _ in range(depth) - ] - ) - - - def forward( - self, - x: torch.Tensor, - c: torch.LongTensor, - mask: Optional[torch.Tensor] = None, - y:torch.Tensor=None, - ): - self_attn_mask = None - if mask is not None: - batch_size = mask.shape[0] - seq_len = mask.shape[1] - mask = mask.to(x.device) - # batch_size x 1 x seq_len x seq_len - self_attn_mask_1 = mask.view(batch_size, 1, 1, seq_len).repeat( - 1, 1, seq_len, 1 - ) - # batch_size x 1 x seq_len x seq_len - self_attn_mask_2 = self_attn_mask_1.transpose(2, 3) - # batch_size x 1 x seq_len x seq_len, 1 for broadcasting of heads_num - self_attn_mask = (self_attn_mask_1 & self_attn_mask_2).bool() - # avoids self-attention weight being NaN for padding tokens - self_attn_mask[:, :, :, 0] = True - - - for block in self.blocks: - x = block(x, c, self_attn_mask,y) - - return x - - -class SingleTokenRefiner(torch.nn.Module): - """ - A single token refiner block for llm text embedding refine. - """ - def __init__( - self, - in_channels, - hidden_size, - heads_num, - depth, - mlp_width_ratio: float = 4.0, - mlp_drop_rate: float = 0.0, - act_type: str = "silu", - qk_norm: bool = False, - qk_norm_type: str = "layer", - qkv_bias: bool = True, - need_CA:bool=False, - attn_mode: str = "torch", - dtype: Optional[torch.dtype] = None, - device: Optional[torch.device] = None, - ): - factory_kwargs = {"device": device, "dtype": dtype} - super().__init__() - self.attn_mode = attn_mode - self.need_CA = need_CA - assert self.attn_mode == "torch", "Only support 'torch' mode for token refiner." - - self.input_embedder = nn.Linear( - in_channels, hidden_size, bias=True, **factory_kwargs - ) - if self.need_CA: - self.input_embedder_CA = nn.Linear( - in_channels, hidden_size, bias=True, **factory_kwargs - ) - - act_layer = get_activation_layer(act_type) - # Build timestep embedding layer - self.t_embedder = TimestepEmbedder(hidden_size, act_layer, **factory_kwargs) - # Build context embedding layer - self.c_embedder = TextProjection( - in_channels, hidden_size, act_layer, **factory_kwargs - ) - - self.individual_token_refiner = IndividualTokenRefiner( - hidden_size=hidden_size, - heads_num=heads_num, - depth=depth, - mlp_width_ratio=mlp_width_ratio, - mlp_drop_rate=mlp_drop_rate, - act_type=act_type, - qk_norm=qk_norm, - qk_norm_type=qk_norm_type, - qkv_bias=qkv_bias, - need_CA=need_CA, - **factory_kwargs, - ) - - def forward( - self, - x: torch.Tensor, - t: torch.LongTensor, - mask: Optional[torch.LongTensor] = None, - y: torch.LongTensor=None, - ): - timestep_aware_representations = self.t_embedder(t) - - if mask is None: - context_aware_representations = x.mean(dim=1) - else: - mask_float = mask.unsqueeze(-1) # [b, s1, 1] - context_aware_representations = (x * mask_float).sum( - dim=1 - ) / mask_float.sum(dim=1) - context_aware_representations = self.c_embedder(context_aware_representations) - c = timestep_aware_representations + context_aware_representations - - x = self.input_embedder(x) - if self.need_CA: - y = self.input_embedder_CA(y) - x = self.individual_token_refiner(x, c, mask, y) - else: - x = self.individual_token_refiner(x, c, mask) - - return x - - -class Qwen2Connector(torch.nn.Module): - def __init__( - self, - # biclip_dim=1024, - in_channels=3584, - hidden_size=4096, - heads_num=32, - depth=2, - need_CA=False, - device=None, - dtype=torch.bfloat16, - ): - super().__init__() - factory_kwargs = {"device": device, "dtype":dtype} - - self.S =SingleTokenRefiner(in_channels=in_channels,hidden_size=hidden_size,heads_num=heads_num,depth=depth,need_CA=need_CA,**factory_kwargs) - self.global_proj_out=nn.Linear(in_channels,768) - - self.scale_factor = nn.Parameter(torch.zeros(1)) - with torch.no_grad(): - self.scale_factor.data += -(1 - 0.09) - - def forward(self, x,t,mask): - mask_float = mask.unsqueeze(-1) # [b, s1, 1] - x_mean = (x * mask_float).sum( - dim=1 - ) / mask_float.sum(dim=1) * (1 + self.scale_factor.to(dtype=x.dtype, device=x.device)) - - global_out=self.global_proj_out(x_mean) - encoder_hidden_states = self.S(x,t,mask) - return encoder_hidden_states,global_out - - @staticmethod - def state_dict_converter(): - return Qwen2ConnectorStateDictConverter() - - -class Qwen2ConnectorStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - return state_dict - - def from_civitai(self, state_dict): - state_dict_ = {} - for name, param in state_dict.items(): - if name.startswith("connector."): - name_ = name[len("connector."):] - state_dict_[name_] = param - return state_dict_ diff --git a/diffsynth/models/stepvideo_dit.py b/diffsynth/models/stepvideo_dit.py deleted file mode 100644 index 10576e7..0000000 --- a/diffsynth/models/stepvideo_dit.py +++ /dev/null @@ -1,940 +0,0 @@ -# Copyright 2025 StepFun Inc. All Rights Reserved. -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# ============================================================================== -from typing import Dict, Optional, Tuple, Union, List -import torch, math -from torch import nn -from einops import rearrange, repeat -from tqdm import tqdm - - -class RMSNorm(nn.Module): - def __init__( - self, - dim: int, - elementwise_affine=True, - eps: float = 1e-6, - device=None, - dtype=None, - ): - """ - Initialize the RMSNorm normalization layer. - - Args: - dim (int): The dimension of the input tensor. - eps (float, optional): A small value added to the denominator for numerical stability. Default is 1e-6. - - Attributes: - eps (float): A small value added to the denominator for numerical stability. - weight (nn.Parameter): Learnable scaling parameter. - - """ - factory_kwargs = {"device": device, "dtype": dtype} - super().__init__() - self.eps = eps - if elementwise_affine: - self.weight = nn.Parameter(torch.ones(dim, **factory_kwargs)) - - def _norm(self, x): - """ - Apply the RMSNorm normalization to the input tensor. - - Args: - x (torch.Tensor): The input tensor. - - Returns: - torch.Tensor: The normalized tensor. - - """ - return x * torch.rsqrt(x.pow(2).mean(-1, keepdim=True) + self.eps) - - def forward(self, x): - """ - Forward pass through the RMSNorm layer. - - Args: - x (torch.Tensor): The input tensor. - - Returns: - torch.Tensor: The output tensor after applying RMSNorm. - - """ - output = self._norm(x.float()).type_as(x) - if hasattr(self, "weight"): - output = output * self.weight - return output - - -ACTIVATION_FUNCTIONS = { - "swish": nn.SiLU(), - "silu": nn.SiLU(), - "mish": nn.Mish(), - "gelu": nn.GELU(), - "relu": nn.ReLU(), -} - - -def get_activation(act_fn: str) -> nn.Module: - """Helper function to get activation function from string. - - Args: - act_fn (str): Name of activation function. - - Returns: - nn.Module: Activation function. - """ - - act_fn = act_fn.lower() - if act_fn in ACTIVATION_FUNCTIONS: - return ACTIVATION_FUNCTIONS[act_fn] - else: - raise ValueError(f"Unsupported activation function: {act_fn}") - - -def get_timestep_embedding( - timesteps: torch.Tensor, - embedding_dim: int, - flip_sin_to_cos: bool = False, - downscale_freq_shift: float = 1, - scale: float = 1, - max_period: int = 10000, -): - """ - This matches the implementation in Denoising Diffusion Probabilistic Models: Create sinusoidal timestep embeddings. - - :param timesteps: a 1-D Tensor of N indices, one per batch element. - These may be fractional. - :param embedding_dim: the dimension of the output. :param max_period: controls the minimum frequency of the - embeddings. :return: an [N x dim] Tensor of positional embeddings. - """ - assert len(timesteps.shape) == 1, "Timesteps should be a 1d-array" - - half_dim = embedding_dim // 2 - exponent = -math.log(max_period) * torch.arange( - start=0, end=half_dim, dtype=torch.float32, device=timesteps.device - ) - exponent = exponent / (half_dim - downscale_freq_shift) - - emb = torch.exp(exponent) - emb = timesteps[:, None].float() * emb[None, :] - - # scale embeddings - emb = scale * emb - - # concat sine and cosine embeddings - emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=-1) - - # flip sine and cosine embeddings - if flip_sin_to_cos: - emb = torch.cat([emb[:, half_dim:], emb[:, :half_dim]], dim=-1) - - # zero pad - if embedding_dim % 2 == 1: - emb = torch.nn.functional.pad(emb, (0, 1, 0, 0)) - return emb - - -class Timesteps(nn.Module): - def __init__(self, num_channels: int, flip_sin_to_cos: bool, downscale_freq_shift: float): - super().__init__() - self.num_channels = num_channels - self.flip_sin_to_cos = flip_sin_to_cos - self.downscale_freq_shift = downscale_freq_shift - - def forward(self, timesteps): - t_emb = get_timestep_embedding( - timesteps, - self.num_channels, - flip_sin_to_cos=self.flip_sin_to_cos, - downscale_freq_shift=self.downscale_freq_shift, - ) - return t_emb - - -class TimestepEmbedding(nn.Module): - def __init__( - self, - in_channels: int, - time_embed_dim: int, - act_fn: str = "silu", - out_dim: int = None, - post_act_fn: Optional[str] = None, - cond_proj_dim=None, - sample_proj_bias=True - ): - super().__init__() - linear_cls = nn.Linear - - self.linear_1 = linear_cls( - in_channels, - time_embed_dim, - bias=sample_proj_bias, - ) - - if cond_proj_dim is not None: - self.cond_proj = linear_cls( - cond_proj_dim, - in_channels, - bias=False, - ) - else: - self.cond_proj = None - - self.act = get_activation(act_fn) - - if out_dim is not None: - time_embed_dim_out = out_dim - else: - time_embed_dim_out = time_embed_dim - - self.linear_2 = linear_cls( - time_embed_dim, - time_embed_dim_out, - bias=sample_proj_bias, - ) - - if post_act_fn is None: - self.post_act = None - else: - self.post_act = get_activation(post_act_fn) - - def forward(self, sample, condition=None): - if condition is not None: - sample = sample + self.cond_proj(condition) - sample = self.linear_1(sample) - - if self.act is not None: - sample = self.act(sample) - - sample = self.linear_2(sample) - - if self.post_act is not None: - sample = self.post_act(sample) - return sample - - -class PixArtAlphaCombinedTimestepSizeEmbeddings(nn.Module): - def __init__(self, embedding_dim, size_emb_dim, use_additional_conditions: bool = False): - super().__init__() - - self.outdim = size_emb_dim - self.time_proj = Timesteps(num_channels=256, flip_sin_to_cos=True, downscale_freq_shift=0) - self.timestep_embedder = TimestepEmbedding(in_channels=256, time_embed_dim=embedding_dim) - - self.use_additional_conditions = use_additional_conditions - if self.use_additional_conditions: - self.additional_condition_proj = Timesteps(num_channels=256, flip_sin_to_cos=True, downscale_freq_shift=0) - self.resolution_embedder = TimestepEmbedding(in_channels=256, time_embed_dim=size_emb_dim) - self.nframe_embedder = TimestepEmbedding(in_channels=256, time_embed_dim=embedding_dim) - self.fps_embedder = TimestepEmbedding(in_channels=256, time_embed_dim=embedding_dim) - - def forward(self, timestep, resolution=None, nframe=None, fps=None): - hidden_dtype = timestep.dtype - - timesteps_proj = self.time_proj(timestep) - timesteps_emb = self.timestep_embedder(timesteps_proj.to(dtype=hidden_dtype)) # (N, D) - - if self.use_additional_conditions: - batch_size = timestep.shape[0] - resolution_emb = self.additional_condition_proj(resolution.flatten()).to(hidden_dtype) - resolution_emb = self.resolution_embedder(resolution_emb).reshape(batch_size, -1) - nframe_emb = self.additional_condition_proj(nframe.flatten()).to(hidden_dtype) - nframe_emb = self.nframe_embedder(nframe_emb).reshape(batch_size, -1) - conditioning = timesteps_emb + resolution_emb + nframe_emb - - if fps is not None: - fps_emb = self.additional_condition_proj(fps.flatten()).to(hidden_dtype) - fps_emb = self.fps_embedder(fps_emb).reshape(batch_size, -1) - conditioning = conditioning + fps_emb - else: - conditioning = timesteps_emb - - return conditioning - - -class AdaLayerNormSingle(nn.Module): - r""" - Norm layer adaptive layer norm single (adaLN-single). - - As proposed in PixArt-Alpha (see: https://arxiv.org/abs/2310.00426; Section 2.3). - - Parameters: - embedding_dim (`int`): The size of each embedding vector. - use_additional_conditions (`bool`): To use additional conditions for normalization or not. - """ - def __init__(self, embedding_dim: int, use_additional_conditions: bool = False, time_step_rescale=1000): - super().__init__() - - self.emb = PixArtAlphaCombinedTimestepSizeEmbeddings( - embedding_dim, size_emb_dim=embedding_dim // 2, use_additional_conditions=use_additional_conditions - ) - - self.silu = nn.SiLU() - self.linear = nn.Linear(embedding_dim, 6 * embedding_dim, bias=True) - - self.time_step_rescale = time_step_rescale ## timestep usually in [0, 1], we rescale it to [0,1000] for stability - - def forward( - self, - timestep: torch.Tensor, - added_cond_kwargs: Dict[str, torch.Tensor] = None, - ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]: - embedded_timestep = self.emb(timestep*self.time_step_rescale, **added_cond_kwargs) - - out = self.linear(self.silu(embedded_timestep)) - - return out, embedded_timestep - - -class PixArtAlphaTextProjection(nn.Module): - """ - Projects caption embeddings. Also handles dropout for classifier-free guidance. - - Adapted from https://github.com/PixArt-alpha/PixArt-alpha/blob/master/diffusion/model/nets/PixArt_blocks.py - """ - - def __init__(self, in_features, hidden_size): - super().__init__() - self.linear_1 = nn.Linear( - in_features, - hidden_size, - bias=True, - ) - self.act_1 = nn.GELU(approximate="tanh") - self.linear_2 = nn.Linear( - hidden_size, - hidden_size, - bias=True, - ) - - def forward(self, caption): - hidden_states = self.linear_1(caption) - hidden_states = self.act_1(hidden_states) - hidden_states = self.linear_2(hidden_states) - return hidden_states - - -class Attention(nn.Module): - def __init__(self): - super().__init__() - - def attn_processor(self, attn_type): - if attn_type == 'torch': - return self.torch_attn_func - elif attn_type == 'parallel': - return self.parallel_attn_func - else: - raise Exception('Not supported attention type...') - - def torch_attn_func( - self, - q, - k, - v, - attn_mask=None, - causal=False, - drop_rate=0.0, - **kwargs - ): - - if attn_mask is not None and attn_mask.dtype != torch.bool: - attn_mask = attn_mask.to(q.dtype) - - if attn_mask is not None and attn_mask.ndim == 3: ## no head - n_heads = q.shape[2] - attn_mask = attn_mask.unsqueeze(1).repeat(1, n_heads, 1, 1) - - q, k, v = map(lambda x: rearrange(x, 'b s h d -> b h s d'), (q, k, v)) - if attn_mask is not None: - attn_mask = attn_mask.to(q.device) - x = torch.nn.functional.scaled_dot_product_attention( - q, k, v, attn_mask=attn_mask, dropout_p=drop_rate, is_causal=causal - ) - x = rearrange(x, 'b h s d -> b s h d') - return x - - -class RoPE1D: - def __init__(self, freq=1e4, F0=1.0, scaling_factor=1.0): - self.base = freq - self.F0 = F0 - self.scaling_factor = scaling_factor - self.cache = {} - - def get_cos_sin(self, D, seq_len, device, dtype): - if (D, seq_len, device, dtype) not in self.cache: - inv_freq = 1.0 / (self.base ** (torch.arange(0, D, 2).float().to(device) / D)) - t = torch.arange(seq_len, device=device, dtype=inv_freq.dtype) - freqs = torch.einsum("i,j->ij", t, inv_freq).to(dtype) - freqs = torch.cat((freqs, freqs), dim=-1) - cos = freqs.cos() # (Seq, Dim) - sin = freqs.sin() - self.cache[D, seq_len, device, dtype] = (cos, sin) - return self.cache[D, seq_len, device, dtype] - - @staticmethod - def rotate_half(x): - x1, x2 = x[..., : x.shape[-1] // 2], x[..., x.shape[-1] // 2:] - return torch.cat((-x2, x1), dim=-1) - - def apply_rope1d(self, tokens, pos1d, cos, sin): - assert pos1d.ndim == 2 - cos = torch.nn.functional.embedding(pos1d, cos)[:, :, None, :] - sin = torch.nn.functional.embedding(pos1d, sin)[:, :, None, :] - return (tokens * cos) + (self.rotate_half(tokens) * sin) - - def __call__(self, tokens, positions): - """ - input: - * tokens: batch_size x ntokens x nheads x dim - * positions: batch_size x ntokens (t position of each token) - output: - * tokens after applying RoPE2D (batch_size x ntokens x nheads x dim) - """ - D = tokens.size(3) - assert positions.ndim == 2 # Batch, Seq - cos, sin = self.get_cos_sin(D, int(positions.max()) + 1, tokens.device, tokens.dtype) - tokens = self.apply_rope1d(tokens, positions, cos, sin) - return tokens - - -class RoPE3D(RoPE1D): - def __init__(self, freq=1e4, F0=1.0, scaling_factor=1.0): - super(RoPE3D, self).__init__(freq, F0, scaling_factor) - self.position_cache = {} - - def get_mesh_3d(self, rope_positions, bsz): - f, h, w = rope_positions - - if f"{f}-{h}-{w}" not in self.position_cache: - x = torch.arange(f, device='cpu') - y = torch.arange(h, device='cpu') - z = torch.arange(w, device='cpu') - self.position_cache[f"{f}-{h}-{w}"] = torch.cartesian_prod(x, y, z).view(1, f*h*w, 3).expand(bsz, -1, 3) - return self.position_cache[f"{f}-{h}-{w}"] - - def __call__(self, tokens, rope_positions, ch_split, parallel=False): - """ - input: - * tokens: batch_size x ntokens x nheads x dim - * rope_positions: list of (f, h, w) - output: - * tokens after applying RoPE2D (batch_size x ntokens x nheads x dim) - """ - assert sum(ch_split) == tokens.size(-1); - - mesh_grid = self.get_mesh_3d(rope_positions, bsz=tokens.shape[0]) - out = [] - for i, (D, x) in enumerate(zip(ch_split, torch.split(tokens, ch_split, dim=-1))): - cos, sin = self.get_cos_sin(D, int(mesh_grid.max()) + 1, tokens.device, tokens.dtype) - - if parallel: - pass - else: - mesh = mesh_grid[:, :, i].clone() - x = self.apply_rope1d(x, mesh.to(tokens.device), cos, sin) - out.append(x) - - tokens = torch.cat(out, dim=-1) - return tokens - - -class SelfAttention(Attention): - def __init__(self, hidden_dim, head_dim, bias=False, with_rope=True, with_qk_norm=True, attn_type='torch'): - super().__init__() - self.head_dim = head_dim - self.n_heads = hidden_dim // head_dim - - self.wqkv = nn.Linear(hidden_dim, hidden_dim*3, bias=bias) - self.wo = nn.Linear(hidden_dim, hidden_dim, bias=bias) - - self.with_rope = with_rope - self.with_qk_norm = with_qk_norm - if self.with_qk_norm: - self.q_norm = RMSNorm(head_dim, elementwise_affine=True) - self.k_norm = RMSNorm(head_dim, elementwise_affine=True) - - if self.with_rope: - self.rope_3d = RoPE3D(freq=1e4, F0=1.0, scaling_factor=1.0) - self.rope_ch_split = [64, 32, 32] - - self.core_attention = self.attn_processor(attn_type=attn_type) - self.parallel = attn_type=='parallel' - - def apply_rope3d(self, x, fhw_positions, rope_ch_split, parallel=True): - x = self.rope_3d(x, fhw_positions, rope_ch_split, parallel) - return x - - def forward( - self, - x, - cu_seqlens=None, - max_seqlen=None, - rope_positions=None, - attn_mask=None - ): - xqkv = self.wqkv(x) - xqkv = xqkv.view(*x.shape[:-1], self.n_heads, 3*self.head_dim) - - xq, xk, xv = torch.split(xqkv, [self.head_dim]*3, dim=-1) ## seq_len, n, dim - - if self.with_qk_norm: - xq = self.q_norm(xq) - xk = self.k_norm(xk) - - if self.with_rope: - xq = self.apply_rope3d(xq, rope_positions, self.rope_ch_split, parallel=self.parallel) - xk = self.apply_rope3d(xk, rope_positions, self.rope_ch_split, parallel=self.parallel) - - output = self.core_attention( - xq, - xk, - xv, - cu_seqlens=cu_seqlens, - max_seqlen=max_seqlen, - attn_mask=attn_mask - ) - output = rearrange(output, 'b s h d -> b s (h d)') - output = self.wo(output) - - return output - - -class CrossAttention(Attention): - def __init__(self, hidden_dim, head_dim, bias=False, with_qk_norm=True, attn_type='torch'): - super().__init__() - self.head_dim = head_dim - self.n_heads = hidden_dim // head_dim - - self.wq = nn.Linear(hidden_dim, hidden_dim, bias=bias) - self.wkv = nn.Linear(hidden_dim, hidden_dim*2, bias=bias) - self.wo = nn.Linear(hidden_dim, hidden_dim, bias=bias) - - self.with_qk_norm = with_qk_norm - if self.with_qk_norm: - self.q_norm = RMSNorm(head_dim, elementwise_affine=True) - self.k_norm = RMSNorm(head_dim, elementwise_affine=True) - - self.core_attention = self.attn_processor(attn_type=attn_type) - - def forward( - self, - x: torch.Tensor, - encoder_hidden_states: torch.Tensor, - attn_mask=None - ): - xq = self.wq(x) - xq = xq.view(*xq.shape[:-1], self.n_heads, self.head_dim) - - xkv = self.wkv(encoder_hidden_states) - xkv = xkv.view(*xkv.shape[:-1], self.n_heads, 2*self.head_dim) - - xk, xv = torch.split(xkv, [self.head_dim]*2, dim=-1) ## seq_len, n, dim - - if self.with_qk_norm: - xq = self.q_norm(xq) - xk = self.k_norm(xk) - - output = self.core_attention( - xq, - xk, - xv, - attn_mask=attn_mask - ) - - output = rearrange(output, 'b s h d -> b s (h d)') - output = self.wo(output) - - return output - - -class GELU(nn.Module): - r""" - GELU activation function with tanh approximation support with `approximate="tanh"`. - - Parameters: - dim_in (`int`): The number of channels in the input. - dim_out (`int`): The number of channels in the output. - approximate (`str`, *optional*, defaults to `"none"`): If `"tanh"`, use tanh approximation. - bias (`bool`, defaults to True): Whether to use a bias in the linear layer. - """ - - def __init__(self, dim_in: int, dim_out: int, approximate: str = "none", bias: bool = True): - super().__init__() - self.proj = nn.Linear(dim_in, dim_out, bias=bias) - self.approximate = approximate - - def gelu(self, gate: torch.Tensor) -> torch.Tensor: - return torch.nn.functional.gelu(gate, approximate=self.approximate) - - def forward(self, hidden_states): - hidden_states = self.proj(hidden_states) - hidden_states = self.gelu(hidden_states) - return hidden_states - - -class FeedForward(nn.Module): - def __init__( - self, - dim: int, - inner_dim: Optional[int] = None, - dim_out: Optional[int] = None, - mult: int = 4, - bias: bool = False, - ): - super().__init__() - inner_dim = dim*mult if inner_dim is None else inner_dim - dim_out = dim if dim_out is None else dim_out - self.net = nn.ModuleList([ - GELU(dim, inner_dim, approximate="tanh", bias=bias), - nn.Identity(), - nn.Linear(inner_dim, dim_out, bias=bias) - ]) - - - def forward(self, hidden_states: torch.Tensor, *args, **kwargs) -> torch.Tensor: - for module in self.net: - hidden_states = module(hidden_states) - return hidden_states - - -def modulate(x, scale, shift): - x = x * (1 + scale) + shift - return x - - -def gate(x, gate): - x = gate * x - return x - - -class StepVideoTransformerBlock(nn.Module): - r""" - A basic Transformer block. - - Parameters: - dim (`int`): The number of channels in the input and output. - num_attention_heads (`int`): The number of heads to use for multi-head attention. - attention_head_dim (`int`): The number of channels in each head. - dropout (`float`, *optional*, defaults to 0.0): The dropout probability to use. - cross_attention_dim (`int`, *optional*): The size of the encoder_hidden_states vector for cross attention. - activation_fn (`str`, *optional*, defaults to `"geglu"`): Activation function to be used in feed-forward. - num_embeds_ada_norm (: - obj: `int`, *optional*): The number of diffusion steps used during training. See `Transformer2DModel`. - attention_bias (: - obj: `bool`, *optional*, defaults to `False`): Configure if the attentions should contain a bias parameter. - only_cross_attention (`bool`, *optional*): - Whether to use only cross-attention layers. In this case two cross attention layers are used. - double_self_attention (`bool`, *optional*): - Whether to use two self-attention layers. In this case no cross attention layers are used. - upcast_attention (`bool`, *optional*): - Whether to upcast the attention computation to float32. This is useful for mixed precision training. - norm_elementwise_affine (`bool`, *optional*, defaults to `True`): - Whether to use learnable elementwise affine parameters for normalization. - norm_type (`str`, *optional*, defaults to `"layer_norm"`): - The normalization layer to use. Can be `"layer_norm"`, `"ada_norm"` or `"ada_norm_zero"`. - final_dropout (`bool` *optional*, defaults to False): - Whether to apply a final dropout after the last feed-forward layer. - attention_type (`str`, *optional*, defaults to `"default"`): - The type of attention to use. Can be `"default"` or `"gated"` or `"gated-text-image"`. - positional_embeddings (`str`, *optional*, defaults to `None`): - The type of positional embeddings to apply to. - num_positional_embeddings (`int`, *optional*, defaults to `None`): - The maximum number of positional embeddings to apply. - """ - - def __init__( - self, - dim: int, - attention_head_dim: int, - norm_eps: float = 1e-5, - ff_inner_dim: Optional[int] = None, - ff_bias: bool = False, - attention_type: str = 'parallel' - ): - super().__init__() - self.dim = dim - self.norm1 = nn.LayerNorm(dim, eps=norm_eps) - self.attn1 = SelfAttention(dim, attention_head_dim, bias=False, with_rope=True, with_qk_norm=True, attn_type=attention_type) - - self.norm2 = nn.LayerNorm(dim, eps=norm_eps) - self.attn2 = CrossAttention(dim, attention_head_dim, bias=False, with_qk_norm=True, attn_type='torch') - - self.ff = FeedForward(dim=dim, inner_dim=ff_inner_dim, dim_out=dim, bias=ff_bias) - - self.scale_shift_table = nn.Parameter(torch.randn(6, dim) /dim**0.5) - - @torch.no_grad() - def forward( - self, - q: torch.Tensor, - kv: Optional[torch.Tensor] = None, - timestep: Optional[torch.LongTensor] = None, - attn_mask = None, - rope_positions: list = None, - ) -> torch.Tensor: - shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = ( - torch.clone(chunk) for chunk in (self.scale_shift_table[None].to(dtype=q.dtype, device=q.device) + timestep.reshape(-1, 6, self.dim)).chunk(6, dim=1) - ) - - scale_shift_q = modulate(self.norm1(q), scale_msa, shift_msa) - - attn_q = self.attn1( - scale_shift_q, - rope_positions=rope_positions - ) - - q = gate(attn_q, gate_msa) + q - - attn_q = self.attn2( - q, - kv, - attn_mask - ) - - q = attn_q + q - - scale_shift_q = modulate(self.norm2(q), scale_mlp, shift_mlp) - - ff_output = self.ff(scale_shift_q) - - q = gate(ff_output, gate_mlp) + q - - return q - - -class PatchEmbed(nn.Module): - """2D Image to Patch Embedding""" - - def __init__( - self, - patch_size=64, - in_channels=3, - embed_dim=768, - layer_norm=False, - flatten=True, - bias=True, - ): - super().__init__() - - self.flatten = flatten - self.layer_norm = layer_norm - - self.proj = nn.Conv2d( - in_channels, embed_dim, kernel_size=(patch_size, patch_size), stride=patch_size, bias=bias - ) - - def forward(self, latent): - latent = self.proj(latent).to(latent.dtype) - if self.flatten: - latent = latent.flatten(2).transpose(1, 2) # BCHW -> BNC - if self.layer_norm: - latent = self.norm(latent) - - return latent - - -class StepVideoModel(torch.nn.Module): - def __init__( - self, - num_attention_heads: int = 48, - attention_head_dim: int = 128, - in_channels: int = 64, - out_channels: Optional[int] = 64, - num_layers: int = 48, - dropout: float = 0.0, - patch_size: int = 1, - norm_type: str = "ada_norm_single", - norm_elementwise_affine: bool = False, - norm_eps: float = 1e-6, - use_additional_conditions: Optional[bool] = False, - caption_channels: Optional[Union[int, List, Tuple]] = [6144, 1024], - attention_type: Optional[str] = "torch", - ): - super().__init__() - - # Set some common variables used across the board. - self.inner_dim = num_attention_heads * attention_head_dim - self.out_channels = in_channels if out_channels is None else out_channels - - self.use_additional_conditions = use_additional_conditions - - self.pos_embed = PatchEmbed( - patch_size=patch_size, - in_channels=in_channels, - embed_dim=self.inner_dim, - ) - - self.transformer_blocks = nn.ModuleList( - [ - StepVideoTransformerBlock( - dim=self.inner_dim, - attention_head_dim=attention_head_dim, - attention_type=attention_type - ) - for _ in range(num_layers) - ] - ) - - # 3. Output blocks. - self.norm_out = nn.LayerNorm(self.inner_dim, eps=norm_eps, elementwise_affine=norm_elementwise_affine) - self.scale_shift_table = nn.Parameter(torch.randn(2, self.inner_dim) / self.inner_dim**0.5) - self.proj_out = nn.Linear(self.inner_dim, patch_size * patch_size * self.out_channels) - self.patch_size = patch_size - - self.adaln_single = AdaLayerNormSingle( - self.inner_dim, use_additional_conditions=self.use_additional_conditions - ) - - if isinstance(caption_channels, int): - caption_channel = caption_channels - else: - caption_channel, clip_channel = caption_channels - self.clip_projection = nn.Linear(clip_channel, self.inner_dim) - - self.caption_norm = nn.LayerNorm(caption_channel, eps=norm_eps, elementwise_affine=norm_elementwise_affine) - - self.caption_projection = PixArtAlphaTextProjection( - in_features=caption_channel, hidden_size=self.inner_dim - ) - - self.parallel = attention_type=='parallel' - - def patchfy(self, hidden_states): - hidden_states = rearrange(hidden_states, 'b f c h w -> (b f) c h w') - hidden_states = self.pos_embed(hidden_states) - return hidden_states - - def prepare_attn_mask(self, encoder_attention_mask, encoder_hidden_states, q_seqlen): - kv_seqlens = encoder_attention_mask.sum(dim=1).int() - mask = torch.zeros([len(kv_seqlens), q_seqlen, max(kv_seqlens)], dtype=torch.bool, device=encoder_attention_mask.device) - encoder_hidden_states = encoder_hidden_states[:,: max(kv_seqlens)] - for i, kv_len in enumerate(kv_seqlens): - mask[i, :, :kv_len] = 1 - return encoder_hidden_states, mask - - - def block_forward( - self, - hidden_states, - encoder_hidden_states=None, - timestep=None, - rope_positions=None, - attn_mask=None, - parallel=True - ): - for block in tqdm(self.transformer_blocks, desc="Transformer blocks"): - hidden_states = block( - hidden_states, - encoder_hidden_states, - timestep=timestep, - attn_mask=attn_mask, - rope_positions=rope_positions - ) - - return hidden_states - - - @torch.inference_mode() - def forward( - self, - hidden_states: torch.Tensor, - encoder_hidden_states: Optional[torch.Tensor] = None, - encoder_hidden_states_2: Optional[torch.Tensor] = None, - timestep: Optional[torch.LongTensor] = None, - added_cond_kwargs: Dict[str, torch.Tensor] = None, - encoder_attention_mask: Optional[torch.Tensor] = None, - fps: torch.Tensor=None, - return_dict: bool = False, - ): - assert hidden_states.ndim==5; "hidden_states's shape should be (bsz, f, ch, h ,w)" - - bsz, frame, _, height, width = hidden_states.shape - height, width = height // self.patch_size, width // self.patch_size - - hidden_states = self.patchfy(hidden_states) - len_frame = hidden_states.shape[1] - - if self.use_additional_conditions: - added_cond_kwargs = { - "resolution": torch.tensor([(height, width)]*bsz, device=hidden_states.device, dtype=hidden_states.dtype), - "nframe": torch.tensor([frame]*bsz, device=hidden_states.device, dtype=hidden_states.dtype), - "fps": fps - } - else: - added_cond_kwargs = {} - - timestep, embedded_timestep = self.adaln_single( - timestep, added_cond_kwargs=added_cond_kwargs - ) - - encoder_hidden_states = self.caption_projection(self.caption_norm(encoder_hidden_states)) - - if encoder_hidden_states_2 is not None and hasattr(self, 'clip_projection'): - clip_embedding = self.clip_projection(encoder_hidden_states_2) - encoder_hidden_states = torch.cat([clip_embedding, encoder_hidden_states], dim=1) - - hidden_states = rearrange(hidden_states, '(b f) l d-> b (f l) d', b=bsz, f=frame, l=len_frame).contiguous() - encoder_hidden_states, attn_mask = self.prepare_attn_mask(encoder_attention_mask, encoder_hidden_states, q_seqlen=frame*len_frame) - - hidden_states = self.block_forward( - hidden_states, - encoder_hidden_states, - timestep=timestep, - rope_positions=[frame, height, width], - attn_mask=attn_mask, - parallel=self.parallel - ) - - hidden_states = rearrange(hidden_states, 'b (f l) d -> (b f) l d', b=bsz, f=frame, l=len_frame) - - embedded_timestep = repeat(embedded_timestep, 'b d -> (b f) d', f=frame).contiguous() - - shift, scale = (self.scale_shift_table[None].to(dtype=embedded_timestep.dtype, device=embedded_timestep.device) + embedded_timestep[:, None]).chunk(2, dim=1) - hidden_states = self.norm_out(hidden_states) - # Modulation - hidden_states = hidden_states * (1 + scale) + shift - hidden_states = self.proj_out(hidden_states) - - # unpatchify - hidden_states = hidden_states.reshape( - shape=(-1, height, width, self.patch_size, self.patch_size, self.out_channels) - ) - - hidden_states = rearrange(hidden_states, 'n h w p q c -> n c h p w q') - output = hidden_states.reshape( - shape=(-1, self.out_channels, height * self.patch_size, width * self.patch_size) - ) - - output = rearrange(output, '(b f) c h w -> b f c h w', f=frame) - - if return_dict: - return {'x': output} - return output - - @staticmethod - def state_dict_converter(): - return StepVideoDiTStateDictConverter() - - -class StepVideoDiTStateDictConverter: - def __init__(self): - super().__init__() - - def from_diffusers(self, state_dict): - return state_dict - - def from_civitai(self, state_dict): - return state_dict - - - \ No newline at end of file diff --git a/diffsynth/models/stepvideo_text_encoder.py b/diffsynth/models/stepvideo_text_encoder.py deleted file mode 100644 index 598825a..0000000 --- a/diffsynth/models/stepvideo_text_encoder.py +++ /dev/null @@ -1,553 +0,0 @@ -# Copyright 2025 StepFun Inc. All Rights Reserved. -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# ============================================================================== -import os -from typing import Optional - -import torch -import torch.nn as nn -import torch.nn.functional as F -from .stepvideo_dit import RMSNorm -from safetensors.torch import load_file -from transformers.modeling_utils import PretrainedConfig, PreTrainedModel -from einops import rearrange -import json -from typing import List -from functools import wraps -import warnings - - - -class EmptyInitOnDevice(torch.overrides.TorchFunctionMode): - def __init__(self, device=None): - self.device = device - - def __torch_function__(self, func, types, args=(), kwargs=None): - kwargs = kwargs or {} - if getattr(func, '__module__', None) == 'torch.nn.init': - if 'tensor' in kwargs: - return kwargs['tensor'] - else: - return args[0] - if self.device is not None and func in torch.utils._device._device_constructors() and kwargs.get('device') is None: - kwargs['device'] = self.device - return func(*args, **kwargs) - - -def with_empty_init(func): - @wraps(func) - def wrapper(*args, **kwargs): - with EmptyInitOnDevice('cpu'): - return func(*args, **kwargs) - return wrapper - - - -class LLaMaEmbedding(nn.Module): - """Language model embeddings. - - Arguments: - hidden_size: hidden size - vocab_size: vocabulary size - max_sequence_length: maximum size of sequence. This - is used for positional embedding - embedding_dropout_prob: dropout probability for embeddings - init_method: weight initialization method - num_tokentypes: size of the token-type embeddings. 0 value - will ignore this embedding - """ - - def __init__(self, - cfg, - ): - super().__init__() - self.hidden_size = cfg.hidden_size - self.params_dtype = cfg.params_dtype - self.fp32_residual_connection = cfg.fp32_residual_connection - self.embedding_weights_in_fp32 = cfg.embedding_weights_in_fp32 - self.word_embeddings = torch.nn.Embedding( - cfg.padded_vocab_size, self.hidden_size, - ) - self.embedding_dropout = torch.nn.Dropout(cfg.hidden_dropout) - - def forward(self, input_ids): - # Embeddings. - if self.embedding_weights_in_fp32: - self.word_embeddings = self.word_embeddings.to(torch.float32) - embeddings = self.word_embeddings(input_ids) - if self.embedding_weights_in_fp32: - embeddings = embeddings.to(self.params_dtype) - self.word_embeddings = self.word_embeddings.to(self.params_dtype) - - # Data format change to avoid explicit transposes : [b s h] --> [s b h]. - embeddings = embeddings.transpose(0, 1).contiguous() - - # If the input flag for fp32 residual connection is set, convert for float. - if self.fp32_residual_connection: - embeddings = embeddings.float() - - # Dropout. - embeddings = self.embedding_dropout(embeddings) - - return embeddings - - - -class StepChatTokenizer: - """Step Chat Tokenizer""" - - def __init__( - self, model_file, name="StepChatTokenizer", - bot_token="<|BOT|>", # Begin of Turn - eot_token="<|EOT|>", # End of Turn - call_start_token="<|CALL_START|>", # Call Start - call_end_token="<|CALL_END|>", # Call End - think_start_token="<|THINK_START|>", # Think Start - think_end_token="<|THINK_END|>", # Think End - mask_start_token="<|MASK_1e69f|>", # Mask start - mask_end_token="<|UNMASK_1e69f|>", # Mask end - ): - import sentencepiece - - self._tokenizer = sentencepiece.SentencePieceProcessor(model_file=model_file) - - self._vocab = {} - self._inv_vocab = {} - - self._special_tokens = {} - self._inv_special_tokens = {} - - self._t5_tokens = [] - - for idx in range(self._tokenizer.get_piece_size()): - text = self._tokenizer.id_to_piece(idx) - self._inv_vocab[idx] = text - self._vocab[text] = idx - - if self._tokenizer.is_control(idx) or self._tokenizer.is_unknown(idx): - self._special_tokens[text] = idx - self._inv_special_tokens[idx] = text - - self._unk_id = self._tokenizer.unk_id() - self._bos_id = self._tokenizer.bos_id() - self._eos_id = self._tokenizer.eos_id() - - for token in [ - bot_token, eot_token, call_start_token, call_end_token, - think_start_token, think_end_token - ]: - assert token in self._vocab, f"Token '{token}' not found in tokenizer" - assert token in self._special_tokens, f"Token '{token}' is not a special token" - - for token in [mask_start_token, mask_end_token]: - assert token in self._vocab, f"Token '{token}' not found in tokenizer" - - self._bot_id = self._tokenizer.piece_to_id(bot_token) - self._eot_id = self._tokenizer.piece_to_id(eot_token) - self._call_start_id = self._tokenizer.piece_to_id(call_start_token) - self._call_end_id = self._tokenizer.piece_to_id(call_end_token) - self._think_start_id = self._tokenizer.piece_to_id(think_start_token) - self._think_end_id = self._tokenizer.piece_to_id(think_end_token) - self._mask_start_id = self._tokenizer.piece_to_id(mask_start_token) - self._mask_end_id = self._tokenizer.piece_to_id(mask_end_token) - - self._underline_id = self._tokenizer.piece_to_id("\u2581") - - @property - def vocab(self): - return self._vocab - - @property - def inv_vocab(self): - return self._inv_vocab - - @property - def vocab_size(self): - return self._tokenizer.vocab_size() - - def tokenize(self, text: str) -> List[int]: - return self._tokenizer.encode_as_ids(text) - - def detokenize(self, token_ids: List[int]) -> str: - return self._tokenizer.decode_ids(token_ids) - - -class Tokens: - def __init__(self, input_ids, cu_input_ids, attention_mask, cu_seqlens, max_seq_len) -> None: - self.input_ids = input_ids - self.attention_mask = attention_mask - self.cu_input_ids = cu_input_ids - self.cu_seqlens = cu_seqlens - self.max_seq_len = max_seq_len - def to(self, device): - self.input_ids = self.input_ids.to(device) - self.attention_mask = self.attention_mask.to(device) - self.cu_input_ids = self.cu_input_ids.to(device) - self.cu_seqlens = self.cu_seqlens.to(device) - return self - -class Wrapped_StepChatTokenizer(StepChatTokenizer): - def __call__(self, text, max_length=320, padding="max_length", truncation=True, return_tensors="pt"): - # [bos, ..., eos, pad, pad, ..., pad] - self.BOS = 1 - self.EOS = 2 - self.PAD = 2 - out_tokens = [] - attn_mask = [] - if len(text) == 0: - part_tokens = [self.BOS] + [self.EOS] - valid_size = len(part_tokens) - if len(part_tokens) < max_length: - part_tokens += [self.PAD] * (max_length - valid_size) - out_tokens.append(part_tokens) - attn_mask.append([1]*valid_size+[0]*(max_length-valid_size)) - else: - for part in text: - part_tokens = self.tokenize(part) - part_tokens = part_tokens[:(max_length - 2)] # leave 2 space for bos and eos - part_tokens = [self.BOS] + part_tokens + [self.EOS] - valid_size = len(part_tokens) - if len(part_tokens) < max_length: - part_tokens += [self.PAD] * (max_length - valid_size) - out_tokens.append(part_tokens) - attn_mask.append([1]*valid_size+[0]*(max_length-valid_size)) - - out_tokens = torch.tensor(out_tokens, dtype=torch.long) - attn_mask = torch.tensor(attn_mask, dtype=torch.long) - - # padding y based on tp size - padded_len = 0 - padded_flag = True if padded_len > 0 else False - if padded_flag: - pad_tokens = torch.tensor([[self.PAD] * max_length], device=out_tokens.device) - pad_attn_mask = torch.tensor([[1]*padded_len+[0]*(max_length-padded_len)], device=attn_mask.device) - out_tokens = torch.cat([out_tokens, pad_tokens], dim=0) - attn_mask = torch.cat([attn_mask, pad_attn_mask], dim=0) - - # cu_seqlens - cu_out_tokens = out_tokens.masked_select(attn_mask != 0).unsqueeze(0) - seqlen = attn_mask.sum(dim=1).tolist() - cu_seqlens = torch.cumsum(torch.tensor([0]+seqlen), 0).to(device=out_tokens.device,dtype=torch.int32) - max_seq_len = max(seqlen) - return Tokens(out_tokens, cu_out_tokens, attn_mask, cu_seqlens, max_seq_len) - - - -def flash_attn_func(q, k, v, dropout_p=0.0, softmax_scale=None, causal=True, - return_attn_probs=False, tp_group_rank=0, tp_group_size=1): - softmax_scale = q.size(-1) ** (-0.5) if softmax_scale is None else softmax_scale - if hasattr(torch.ops.Optimus, "fwd"): - results = torch.ops.Optimus.fwd(q, k, v, None, dropout_p, softmax_scale, causal, return_attn_probs, None, tp_group_rank, tp_group_size)[0] - else: - warnings.warn("Cannot load `torch.ops.Optimus.fwd`. Using `torch.nn.functional.scaled_dot_product_attention` instead.") - results = torch.nn.functional.scaled_dot_product_attention(q.transpose(1, 2), k.transpose(1, 2), v.transpose(1, 2), is_causal=True, scale=softmax_scale).transpose(1, 2) - return results - - -class FlashSelfAttention(torch.nn.Module): - def __init__( - self, - attention_dropout=0.0, - ): - super().__init__() - self.dropout_p = attention_dropout - - - def forward(self, q, k, v, cu_seqlens=None, max_seq_len=None): - if cu_seqlens is None: - output = flash_attn_func(q, k, v, dropout_p=self.dropout_p) - else: - raise ValueError('cu_seqlens is not supported!') - - return output - - - -def safediv(n, d): - q, r = divmod(n, d) - assert r == 0 - return q - - -class MultiQueryAttention(nn.Module): - def __init__(self, cfg, layer_id=None): - super().__init__() - - self.head_dim = cfg.hidden_size // cfg.num_attention_heads - self.max_seq_len = cfg.seq_length - self.use_flash_attention = cfg.use_flash_attn - assert self.use_flash_attention, 'FlashAttention is required!' - - self.n_groups = cfg.num_attention_groups - self.tp_size = 1 - self.n_local_heads = cfg.num_attention_heads - self.n_local_groups = self.n_groups - - self.wqkv = nn.Linear( - cfg.hidden_size, - cfg.hidden_size + self.head_dim * 2 * self.n_groups, - bias=False, - ) - self.wo = nn.Linear( - cfg.hidden_size, - cfg.hidden_size, - bias=False, - ) - - assert self.use_flash_attention, 'non-Flash attention not supported yet.' - self.core_attention = FlashSelfAttention(attention_dropout=cfg.attention_dropout) - - self.layer_id = layer_id - - def forward( - self, - x: torch.Tensor, - mask: Optional[torch.Tensor], - cu_seqlens: Optional[torch.Tensor], - max_seq_len: Optional[torch.Tensor], - ): - seqlen, bsz, dim = x.shape - xqkv = self.wqkv(x) - - xq, xkv = torch.split( - xqkv, - (dim // self.tp_size, - self.head_dim*2*self.n_groups // self.tp_size - ), - dim=-1, - ) - - # gather on 1st dimension - xq = xq.view(seqlen, bsz, self.n_local_heads, self.head_dim) - xkv = xkv.view(seqlen, bsz, self.n_local_groups, 2 * self.head_dim) - xk, xv = xkv.chunk(2, -1) - - # rotary embedding + flash attn - xq = rearrange(xq, "s b h d -> b s h d") - xk = rearrange(xk, "s b h d -> b s h d") - xv = rearrange(xv, "s b h d -> b s h d") - - q_per_kv = self.n_local_heads // self.n_local_groups - if q_per_kv > 1: - b, s, h, d = xk.size() - if h == 1: - xk = xk.expand(b, s, q_per_kv, d) - xv = xv.expand(b, s, q_per_kv, d) - else: - ''' To cover the cases where h > 1, we have - the following implementation, which is equivalent to: - xk = xk.repeat_interleave(q_per_kv, dim=-2) - xv = xv.repeat_interleave(q_per_kv, dim=-2) - but can avoid calling aten::item() that involves cpu. - ''' - idx = torch.arange(q_per_kv * h, device=xk.device).reshape(q_per_kv, -1).permute(1, 0).flatten() - xk = torch.index_select(xk.repeat(1, 1, q_per_kv, 1), 2, idx).contiguous() - xv = torch.index_select(xv.repeat(1, 1, q_per_kv, 1), 2, idx).contiguous() - - if self.use_flash_attention: - output = self.core_attention(xq, xk, xv, - cu_seqlens=cu_seqlens, - max_seq_len=max_seq_len) - # reduce-scatter only support first dimension now - output = rearrange(output, "b s h d -> s b (h d)").contiguous() - else: - xq, xk, xv = [ - rearrange(x, "b s ... -> s b ...").contiguous() - for x in (xq, xk, xv) - ] - output = self.core_attention(xq, xk, xv, mask) - output = self.wo(output) - return output - - - -class FeedForward(nn.Module): - def __init__( - self, - cfg, - dim: int, - hidden_dim: int, - layer_id: int, - multiple_of: int=256, - ): - super().__init__() - - hidden_dim = multiple_of * ((hidden_dim + multiple_of - 1) // multiple_of) - def swiglu(x): - x = torch.chunk(x, 2, dim=-1) - return F.silu(x[0]) * x[1] - self.swiglu = swiglu - - self.w1 = nn.Linear( - dim, - 2 * hidden_dim, - bias=False, - ) - self.w2 = nn.Linear( - hidden_dim, - dim, - bias=False, - ) - - def forward(self, x): - x = self.swiglu(self.w1(x)) - output = self.w2(x) - return output - - - -class TransformerBlock(nn.Module): - def __init__( - self, cfg, layer_id: int - ): - super().__init__() - - self.n_heads = cfg.num_attention_heads - self.dim = cfg.hidden_size - self.head_dim = cfg.hidden_size // cfg.num_attention_heads - self.attention = MultiQueryAttention( - cfg, - layer_id=layer_id, - ) - - self.feed_forward = FeedForward( - cfg, - dim=cfg.hidden_size, - hidden_dim=cfg.ffn_hidden_size, - layer_id=layer_id, - ) - self.layer_id = layer_id - self.attention_norm = RMSNorm( - cfg.hidden_size, - eps=cfg.layernorm_epsilon, - ) - self.ffn_norm = RMSNorm( - cfg.hidden_size, - eps=cfg.layernorm_epsilon, - ) - - def forward( - self, - x: torch.Tensor, - mask: Optional[torch.Tensor], - cu_seqlens: Optional[torch.Tensor], - max_seq_len: Optional[torch.Tensor], - ): - residual = self.attention.forward( - self.attention_norm(x), mask, - cu_seqlens, max_seq_len - ) - h = x + residual - ffn_res = self.feed_forward.forward(self.ffn_norm(h)) - out = h + ffn_res - return out - - -class Transformer(nn.Module): - def __init__( - self, - config, - max_seq_size=8192, - ): - super().__init__() - self.num_layers = config.num_layers - self.layers = self._build_layers(config) - - def _build_layers(self, config): - layers = torch.nn.ModuleList() - for layer_id in range(self.num_layers): - layers.append( - TransformerBlock( - config, - layer_id=layer_id + 1 , - ) - ) - return layers - - def forward( - self, - hidden_states, - attention_mask, - cu_seqlens=None, - max_seq_len=None, - ): - - if max_seq_len is not None and not isinstance(max_seq_len, torch.Tensor): - max_seq_len = torch.tensor(max_seq_len, dtype=torch.int32, device="cpu") - - for lid, layer in enumerate(self.layers): - hidden_states = layer( - hidden_states, - attention_mask, - cu_seqlens, - max_seq_len, - ) - return hidden_states - - -class Step1Model(PreTrainedModel): - config_class=PretrainedConfig - @with_empty_init - def __init__( - self, - config, - ): - super().__init__(config) - self.tok_embeddings = LLaMaEmbedding(config) - self.transformer = Transformer(config) - - def forward( - self, - input_ids=None, - attention_mask=None, - ): - - hidden_states = self.tok_embeddings(input_ids) - - hidden_states = self.transformer( - hidden_states, - attention_mask, - ) - return hidden_states - - - -class STEP1TextEncoder(torch.nn.Module): - def __init__(self, model_dir, max_length=320): - super(STEP1TextEncoder, self).__init__() - self.max_length = max_length - self.text_tokenizer = Wrapped_StepChatTokenizer(os.path.join(model_dir, 'step1_chat_tokenizer.model')) - text_encoder = Step1Model.from_pretrained(model_dir) - self.text_encoder = text_encoder.eval().to(torch.bfloat16) - - @staticmethod - def from_pretrained(path, torch_dtype=torch.bfloat16): - model = STEP1TextEncoder(path).to(torch_dtype) - return model - - @torch.no_grad - def forward(self, prompts, with_mask=True, max_length=None, device="cuda"): - self.device = device - with torch.no_grad(), torch.amp.autocast(dtype=torch.bfloat16, device_type=device): - if type(prompts) is str: - prompts = [prompts] - - txt_tokens = self.text_tokenizer( - prompts, max_length=max_length or self.max_length, padding="max_length", truncation=True, return_tensors="pt" - ) - y = self.text_encoder( - txt_tokens.input_ids.to(self.device), - attention_mask=txt_tokens.attention_mask.to(self.device) if with_mask else None - ) - y_mask = txt_tokens.attention_mask - return y.transpose(0,1), y_mask - diff --git a/diffsynth/models/stepvideo_vae.py b/diffsynth/models/stepvideo_vae.py deleted file mode 100644 index db244c0..0000000 --- a/diffsynth/models/stepvideo_vae.py +++ /dev/null @@ -1,1132 +0,0 @@ -# Copyright 2025 StepFun Inc. All Rights Reserved. -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# ============================================================================== -import torch -from einops import rearrange -from torch import nn -from torch.nn import functional as F -from tqdm import tqdm -from einops import repeat - - -class BaseGroupNorm(nn.GroupNorm): - def __init__(self, num_groups, num_channels): - super().__init__(num_groups=num_groups, num_channels=num_channels) - - def forward(self, x, zero_pad=False, **kwargs): - if zero_pad: - return base_group_norm_with_zero_pad(x, self, **kwargs) - else: - return base_group_norm(x, self, **kwargs) - - -def base_group_norm(x, norm_layer, act_silu=False, channel_last=False): - if hasattr(base_group_norm, 'spatial') and base_group_norm.spatial: - assert channel_last == True - x_shape = x.shape - x = x.flatten(0, 1) - if channel_last: - # Permute to NCHW format - x = x.permute(0, 3, 1, 2) - - out = F.group_norm(x.contiguous(), norm_layer.num_groups, norm_layer.weight, norm_layer.bias, norm_layer.eps) - if act_silu: - out = F.silu(out) - - if channel_last: - # Permute back to NHWC format - out = out.permute(0, 2, 3, 1) - - out = out.view(x_shape) - else: - if channel_last: - # Permute to NCHW format - x = x.permute(0, 3, 1, 2) - out = F.group_norm(x.contiguous(), norm_layer.num_groups, norm_layer.weight, norm_layer.bias, norm_layer.eps) - if act_silu: - out = F.silu(out) - if channel_last: - # Permute back to NHWC format - out = out.permute(0, 2, 3, 1) - return out - -def base_conv2d(x, conv_layer, channel_last=False, residual=None): - if channel_last: - x = x.permute(0, 3, 1, 2) # NHWC to NCHW - out = F.conv2d(x, conv_layer.weight, conv_layer.bias, stride=conv_layer.stride, padding=conv_layer.padding) - if residual is not None: - if channel_last: - residual = residual.permute(0, 3, 1, 2) # NHWC to NCHW - out += residual - if channel_last: - out = out.permute(0, 2, 3, 1) # NCHW to NHWC - return out - -def base_conv3d(x, conv_layer, channel_last=False, residual=None, only_return_output=False): - if only_return_output: - size = cal_outsize(x.shape, conv_layer.weight.shape, conv_layer.stride, conv_layer.padding) - return torch.empty(size, device=x.device, dtype=x.dtype) - if channel_last: - x = x.permute(0, 4, 1, 2, 3) # NDHWC to NCDHW - out = F.conv3d(x, conv_layer.weight, conv_layer.bias, stride=conv_layer.stride, padding=conv_layer.padding) - if residual is not None: - if channel_last: - residual = residual.permute(0, 4, 1, 2, 3) # NDHWC to NCDHW - out += residual - if channel_last: - out = out.permute(0, 2, 3, 4, 1) # NCDHW to NDHWC - return out - - -def cal_outsize(input_sizes, kernel_sizes, stride, padding): - stride_d, stride_h, stride_w = stride - padding_d, padding_h, padding_w = padding - dilation_d, dilation_h, dilation_w = 1, 1, 1 - - in_d = input_sizes[1] - in_h = input_sizes[2] - in_w = input_sizes[3] - in_channel = input_sizes[4] - - - kernel_d = kernel_sizes[2] - kernel_h = kernel_sizes[3] - kernel_w = kernel_sizes[4] - out_channels = kernel_sizes[0] - - out_d = calc_out_(in_d, padding_d, dilation_d, kernel_d, stride_d) - out_h = calc_out_(in_h, padding_h, dilation_h, kernel_h, stride_h) - out_w = calc_out_(in_w, padding_w, dilation_w, kernel_w, stride_w) - size = [input_sizes[0], out_d, out_h, out_w, out_channels] - return size - - - - -def calc_out_(in_size, padding, dilation, kernel, stride): - return (in_size + 2 * padding - dilation * (kernel - 1) - 1) // stride + 1 - - - -def base_conv3d_channel_last(x, conv_layer, residual=None): - in_numel = x.numel() - out_numel = int(x.numel() * conv_layer.out_channels / conv_layer.in_channels) - if (in_numel >= 2**30) or (out_numel >= 2**30): - assert conv_layer.stride[0] == 1, "time split asks time stride = 1" - - B,T,H,W,C = x.shape - K = conv_layer.kernel_size[0] - - chunks = 4 - chunk_size = T // chunks - - if residual is None: - out_nhwc = base_conv3d(x, conv_layer, channel_last=True, residual=residual, only_return_output=True) - else: - out_nhwc = residual - - assert B == 1 - outs = [] - for i in range(chunks): - if i == chunks-1: - xi = x[:1,chunk_size*i:] - out_nhwci = out_nhwc[:1,chunk_size*i:] - else: - xi = x[:1,chunk_size*i:chunk_size*(i+1)+K-1] - out_nhwci = out_nhwc[:1,chunk_size*i:chunk_size*(i+1)] - if residual is not None: - if i == chunks-1: - ri = residual[:1,chunk_size*i:] - else: - ri = residual[:1,chunk_size*i:chunk_size*(i+1)] - else: - ri = None - out_nhwci.copy_(base_conv3d(xi, conv_layer, channel_last=True, residual=ri)) - else: - out_nhwc = base_conv3d(x, conv_layer, channel_last=True, residual=residual) - return out_nhwc - - - -class Upsample2D(nn.Module): - def __init__(self, - channels, - use_conv=False, - use_conv_transpose=False, - out_channels=None): - super().__init__() - self.channels = channels - self.out_channels = out_channels or channels - self.use_conv = use_conv - self.use_conv_transpose = use_conv_transpose - - if use_conv: - self.conv = nn.Conv2d(self.channels, self.out_channels, 3, padding=1) - else: - assert "Not Supported" - self.conv = nn.ConvTranspose2d(channels, self.out_channels, 4, 2, 1) - - def forward(self, x, output_size=None): - assert x.shape[-1] == self.channels - - if self.use_conv_transpose: - return self.conv(x) - - if output_size is None: - x = F.interpolate( - x.permute(0,3,1,2).to(memory_format=torch.channels_last), - scale_factor=2.0, mode='nearest').permute(0,2,3,1).contiguous() - else: - x = F.interpolate( - x.permute(0,3,1,2).to(memory_format=torch.channels_last), - size=output_size, mode='nearest').permute(0,2,3,1).contiguous() - - # x = self.conv(x) - x = base_conv2d(x, self.conv, channel_last=True) - return x - - -class Downsample2D(nn.Module): - def __init__(self, channels, use_conv=False, out_channels=None, padding=1): - super().__init__() - self.channels = channels - self.out_channels = out_channels or channels - self.use_conv = use_conv - self.padding = padding - stride = 2 - - if use_conv: - self.conv = nn.Conv2d(self.channels, self.out_channels, 3, stride=stride, padding=padding) - else: - assert self.channels == self.out_channels - self.conv = nn.AvgPool2d(kernel_size=stride, stride=stride) - - def forward(self, x): - assert x.shape[-1] == self.channels - if self.use_conv and self.padding == 0: - pad = (0, 0, 0, 1, 0, 1) - x = F.pad(x, pad, mode="constant", value=0) - - assert x.shape[-1] == self.channels - # x = self.conv(x) - x = base_conv2d(x, self.conv, channel_last=True) - return x - - - -class CausalConv(nn.Module): - def __init__(self, - chan_in, - chan_out, - kernel_size, - **kwargs - ): - super().__init__() - - if isinstance(kernel_size, int): - kernel_size = kernel_size if isinstance(kernel_size, tuple) else ((kernel_size,) * 3) - time_kernel_size, height_kernel_size, width_kernel_size = kernel_size - - self.dilation = kwargs.pop('dilation', 1) - self.stride = kwargs.pop('stride', 1) - if isinstance(self.stride, int): - self.stride = (self.stride, 1, 1) - time_pad = self.dilation * (time_kernel_size - 1) + max((1 - self.stride[0]), 0) - height_pad = height_kernel_size // 2 - width_pad = width_kernel_size // 2 - self.time_causal_padding = (width_pad, width_pad, height_pad, height_pad, time_pad, 0) - self.time_uncausal_padding = (width_pad, width_pad, height_pad, height_pad, 0, 0) - - self.conv = nn.Conv3d(chan_in, chan_out, kernel_size, stride=self.stride, dilation=self.dilation, **kwargs) - self.is_first_run = True - - def forward(self, x, is_init=True, residual=None): - x = nn.functional.pad(x, - self.time_causal_padding if is_init else self.time_uncausal_padding) - - x = self.conv(x) - if residual is not None: - x.add_(residual) - return x - - -class ChannelDuplicatingPixelUnshuffleUpSampleLayer3D(nn.Module): - def __init__( - self, - in_channels: int, - out_channels: int, - factor: int, - ): - super().__init__() - self.in_channels = in_channels - self.out_channels = out_channels - self.factor = factor - assert out_channels * factor**3 % in_channels == 0 - self.repeats = out_channels * factor**3 // in_channels - - def forward(self, x: torch.Tensor, is_init=True) -> torch.Tensor: - x = x.repeat_interleave(self.repeats, dim=1) - x = x.view(x.size(0), self.out_channels, self.factor, self.factor, self.factor, x.size(2), x.size(3), x.size(4)) - x = x.permute(0, 1, 5, 2, 6, 3, 7, 4).contiguous() - x = x.view(x.size(0), self.out_channels, x.size(2)*self.factor, x.size(4)*self.factor, x.size(6)*self.factor) - x = x[:, :, self.factor - 1:, :, :] - return x - -class ConvPixelShuffleUpSampleLayer3D(nn.Module): - def __init__( - self, - in_channels: int, - out_channels: int, - kernel_size: int, - factor: int, - ): - super().__init__() - self.factor = factor - out_ratio = factor**3 - self.conv = CausalConv( - in_channels, - out_channels * out_ratio, - kernel_size=kernel_size - ) - - def forward(self, x: torch.Tensor, is_init=True) -> torch.Tensor: - x = self.conv(x, is_init) - x = self.pixel_shuffle_3d(x, self.factor) - return x - - @staticmethod - def pixel_shuffle_3d(x: torch.Tensor, factor: int) -> torch.Tensor: - batch_size, channels, depth, height, width = x.size() - new_channels = channels // (factor ** 3) - new_depth = depth * factor - new_height = height * factor - new_width = width * factor - - x = x.view(batch_size, new_channels, factor, factor, factor, depth, height, width) - x = x.permute(0, 1, 5, 2, 6, 3, 7, 4).contiguous() - x = x.view(batch_size, new_channels, new_depth, new_height, new_width) - x = x[:, :, factor - 1:, :, :] - return x - -class ConvPixelUnshuffleDownSampleLayer3D(nn.Module): - def __init__( - self, - in_channels: int, - out_channels: int, - kernel_size: int, - factor: int, - ): - super().__init__() - self.factor = factor - out_ratio = factor**3 - assert out_channels % out_ratio == 0 - self.conv = CausalConv( - in_channels, - out_channels // out_ratio, - kernel_size=kernel_size - ) - - def forward(self, x: torch.Tensor, is_init=True) -> torch.Tensor: - x = self.conv(x, is_init) - x = self.pixel_unshuffle_3d(x, self.factor) - return x - - @staticmethod - def pixel_unshuffle_3d(x: torch.Tensor, factor: int) -> torch.Tensor: - pad = (0, 0, 0, 0, factor-1, 0) # (left, right, top, bottom, front, back) - x = F.pad(x, pad) - B, C, D, H, W = x.shape - x = x.view(B, C, D // factor, factor, H // factor, factor, W // factor, factor) - x = x.permute(0, 1, 3, 5, 7, 2, 4, 6).contiguous() - x = x.view(B, C * factor**3, D // factor, H // factor, W // factor) - return x - -class PixelUnshuffleChannelAveragingDownSampleLayer3D(nn.Module): - def __init__( - self, - in_channels: int, - out_channels: int, - factor: int, - ): - super().__init__() - self.in_channels = in_channels - self.out_channels = out_channels - self.factor = factor - assert in_channels * factor**3 % out_channels == 0 - self.group_size = in_channels * factor**3 // out_channels - - def forward(self, x: torch.Tensor, is_init=True) -> torch.Tensor: - pad = (0, 0, 0, 0, self.factor-1, 0) # (left, right, top, bottom, front, back) - x = F.pad(x, pad) - B, C, D, H, W = x.shape - x = x.view(B, C, D // self.factor, self.factor, H // self.factor, self.factor, W // self.factor, self.factor) - x = x.permute(0, 1, 3, 5, 7, 2, 4, 6).contiguous() - x = x.view(B, C * self.factor**3, D // self.factor, H // self.factor, W // self.factor) - x = x.view(B, self.out_channels, self.group_size, D // self.factor, H // self.factor, W // self.factor) - x = x.mean(dim=2) - return x - - def __init__( - self, - in_channels: int, - out_channels: int, - factor: int, - ): - super().__init__() - self.in_channels = in_channels - self.out_channels = out_channels - self.factor = factor - assert in_channels * factor**3 % out_channels == 0 - self.group_size = in_channels * factor**3 // out_channels - - def forward(self, x: torch.Tensor, is_init=True) -> torch.Tensor: - pad = (0, 0, 0, 0, self.factor-1, 0) # (left, right, top, bottom, front, back) - x = F.pad(x, pad) - B, C, D, H, W = x.shape - x = x.view(B, C, D // self.factor, self.factor, H // self.factor, self.factor, W // self.factor, self.factor) - x = x.permute(0, 1, 3, 5, 7, 2, 4, 6).contiguous() - x = x.view(B, C * self.factor**3, D // self.factor, H // self.factor, W // self.factor) - x = x.view(B, self.out_channels, self.group_size, D // self.factor, H // self.factor, W // self.factor) - x = x.mean(dim=2) - return x - - - - -def base_group_norm_with_zero_pad(x, norm_layer, act_silu=True, pad_size=2): - out_shape = list(x.shape) - out_shape[1] += pad_size - out = torch.empty(out_shape, dtype=x.dtype, device=x.device) - out[:, pad_size:] = base_group_norm(x, norm_layer, act_silu=act_silu, channel_last=True) - out[:, :pad_size] = 0 - return out - - -class CausalConvChannelLast(CausalConv): - def __init__(self, - chan_in, - chan_out, - kernel_size, - **kwargs - ): - super().__init__( - chan_in, chan_out, kernel_size, **kwargs) - - self.time_causal_padding = (0, 0) + self.time_causal_padding - self.time_uncausal_padding = (0, 0) + self.time_uncausal_padding - - def forward(self, x, is_init=True, residual=None): - if self.is_first_run: - self.is_first_run = False - # self.conv.weight = nn.Parameter(self.conv.weight.permute(0,2,3,4,1).contiguous()) - - x = nn.functional.pad(x, - self.time_causal_padding if is_init else self.time_uncausal_padding) - - x = base_conv3d_channel_last(x, self.conv, residual=residual) - return x - -class CausalConvAfterNorm(CausalConv): - def __init__(self, - chan_in, - chan_out, - kernel_size, - **kwargs - ): - super().__init__( - chan_in, chan_out, kernel_size, **kwargs) - - if self.time_causal_padding == (1, 1, 1, 1, 2, 0): - self.conv = nn.Conv3d(chan_in, chan_out, kernel_size, stride=self.stride, dilation=self.dilation, padding=(0, 1, 1), **kwargs) - else: - self.conv = nn.Conv3d(chan_in, chan_out, kernel_size, stride=self.stride, dilation=self.dilation, **kwargs) - self.is_first_run = True - - def forward(self, x, is_init=True, residual=None): - if self.is_first_run: - self.is_first_run = False - - if self.time_causal_padding == (1, 1, 1, 1, 2, 0): - pass - else: - x = nn.functional.pad(x, self.time_causal_padding).contiguous() - - x = base_conv3d_channel_last(x, self.conv, residual=residual) - return x - -class AttnBlock(nn.Module): - def __init__(self, - in_channels - ): - super().__init__() - - self.norm = BaseGroupNorm(num_groups=32, num_channels=in_channels) - self.q = CausalConvChannelLast(in_channels, in_channels, kernel_size=1) - self.k = CausalConvChannelLast(in_channels, in_channels, kernel_size=1) - self.v = CausalConvChannelLast(in_channels, in_channels, kernel_size=1) - self.proj_out = CausalConvChannelLast(in_channels, in_channels, kernel_size=1) - - def attention(self, x, is_init=True): - x = self.norm(x, act_silu=False, channel_last=True) - q = self.q(x, is_init) - k = self.k(x, is_init) - v = self.v(x, is_init) - - b, t, h, w, c = q.shape - q, k, v = map(lambda x: rearrange(x, "b t h w c -> b 1 (t h w) c"), (q, k, v)) - x = nn.functional.scaled_dot_product_attention(q, k, v, is_causal=True) - x = rearrange(x, "b 1 (t h w) c -> b t h w c", t=t, h=h, w=w) - - return x - - def forward(self, x): - x = x.permute(0,2,3,4,1).contiguous() - h = self.attention(x) - x = self.proj_out(h, residual=x) - x = x.permute(0,4,1,2,3) - return x - -class Resnet3DBlock(nn.Module): - def __init__(self, - in_channels, - out_channels=None, - temb_channels=512, - conv_shortcut=False, - ): - super().__init__() - - self.in_channels = in_channels - out_channels = in_channels if out_channels is None else out_channels - self.out_channels = out_channels - - self.norm1 = BaseGroupNorm(num_groups=32, num_channels=in_channels) - self.conv1 = CausalConvAfterNorm(in_channels, out_channels, kernel_size=3) - if temb_channels > 0: - self.temb_proj = nn.Linear(temb_channels, out_channels) - - self.norm2 = BaseGroupNorm(num_groups=32, num_channels=out_channels) - self.conv2 = CausalConvAfterNorm(out_channels, out_channels, kernel_size=3) - - assert conv_shortcut is False - self.use_conv_shortcut = conv_shortcut - if self.in_channels != self.out_channels: - if self.use_conv_shortcut: - self.conv_shortcut = CausalConvAfterNorm(in_channels, out_channels, kernel_size=3) - else: - self.nin_shortcut = CausalConvAfterNorm(in_channels, out_channels, kernel_size=1) - - def forward(self, x, temb=None, is_init=True): - x = x.permute(0,2,3,4,1).contiguous() - - h = self.norm1(x, zero_pad=True, act_silu=True, pad_size=2) - h = self.conv1(h) - if temb is not None: - h = h + self.temb_proj(nn.functional.silu(temb))[:, :, None, None] - - x = self.nin_shortcut(x) if self.in_channels != self.out_channels else x - - h = self.norm2(h, zero_pad=True, act_silu=True, pad_size=2) - x = self.conv2(h, residual=x) - - x = x.permute(0,4,1,2,3) - return x - - -class Downsample3D(nn.Module): - def __init__(self, - in_channels, - with_conv, - stride - ): - super().__init__() - - self.with_conv = with_conv - if with_conv: - self.conv = CausalConv(in_channels, in_channels, kernel_size=3, stride=stride) - - def forward(self, x, is_init=True): - if self.with_conv: - x = self.conv(x, is_init) - else: - x = nn.functional.avg_pool3d(x, kernel_size=2, stride=2) - return x - -class VideoEncoder(nn.Module): - def __init__(self, - ch=32, - ch_mult=(4, 8, 16, 16), - num_res_blocks=2, - in_channels=3, - z_channels=16, - double_z=True, - down_sampling_layer=[1, 2], - resamp_with_conv=True, - version=1, - ): - super().__init__() - - temb_ch = 0 - - self.num_resolutions = len(ch_mult) - self.num_res_blocks = num_res_blocks - - # downsampling - self.conv_in = CausalConv(in_channels, ch, kernel_size=3) - self.down_sampling_layer = down_sampling_layer - - in_ch_mult = (1,) + tuple(ch_mult) - self.down = nn.ModuleList() - for i_level in range(self.num_resolutions): - block = nn.ModuleList() - attn = nn.ModuleList() - block_in = ch * in_ch_mult[i_level] - block_out = ch * ch_mult[i_level] - for i_block in range(self.num_res_blocks): - block.append( - Resnet3DBlock(in_channels=block_in, out_channels=block_out, temb_channels=temb_ch)) - block_in = block_out - down = nn.Module() - down.block = block - down.attn = attn - if i_level != self.num_resolutions - 1: - if i_level in self.down_sampling_layer: - down.downsample = Downsample3D(block_in, resamp_with_conv, stride=(2, 2, 2)) - else: - down.downsample = Downsample2D(block_in, resamp_with_conv, padding=0) #DIFF - self.down.append(down) - - # middle - self.mid = nn.Module() - self.mid.block_1 = Resnet3DBlock(in_channels=block_in, out_channels=block_in, temb_channels=temb_ch) - self.mid.attn_1 = AttnBlock(block_in) - self.mid.block_2 = Resnet3DBlock(in_channels=block_in, out_channels=block_in, temb_channels=temb_ch) - - # end - self.norm_out = nn.GroupNorm(num_groups=32, num_channels=block_in) - self.version = version - if version == 2: - channels = 4 * z_channels * 2 ** 3 - self.conv_patchify = ConvPixelUnshuffleDownSampleLayer3D(block_in, channels, kernel_size=3, factor=2) - self.shortcut_pathify = PixelUnshuffleChannelAveragingDownSampleLayer3D(block_in, channels, 2) - self.shortcut_out = PixelUnshuffleChannelAveragingDownSampleLayer3D(channels, 2 * z_channels if double_z else z_channels, 1) - self.conv_out = CausalConvChannelLast(channels, 2 * z_channels if double_z else z_channels, kernel_size=3) - else: - self.conv_out = CausalConvAfterNorm(block_in, 2 * z_channels if double_z else z_channels, kernel_size=3) - - @torch.inference_mode() - def forward(self, x, video_frame_num, is_init=True): - # timestep embedding - temb = None - - t = video_frame_num - - # downsampling - h = self.conv_in(x, is_init) - - # make it real channel last, but behave like normal layout - h = h.permute(0,2,3,4,1).contiguous().permute(0,4,1,2,3) - - for i_level in range(self.num_resolutions): - for i_block in range(self.num_res_blocks): - h = self.down[i_level].block[i_block](h, temb, is_init) - if len(self.down[i_level].attn) > 0: - h = self.down[i_level].attn[i_block](h) - - if i_level != self.num_resolutions - 1: - if isinstance(self.down[i_level].downsample, Downsample2D): - _, _, t, _, _ = h.shape - h = rearrange(h, "b c t h w -> (b t) h w c", t=t) - h = self.down[i_level].downsample(h) - h = rearrange(h, "(b t) h w c -> b c t h w", t=t) - else: - h = self.down[i_level].downsample(h, is_init) - - h = self.mid.block_1(h, temb, is_init) - h = self.mid.attn_1(h) - h = self.mid.block_2(h, temb, is_init) - - h = h.permute(0,2,3,4,1).contiguous() # b c l h w -> b l h w c - if self.version == 2: - h = base_group_norm(h, self.norm_out, act_silu=True, channel_last=True) - h = h.permute(0,4,1,2,3).contiguous() - shortcut = self.shortcut_pathify(h, is_init) - h = self.conv_patchify(h, is_init) - h = h.add_(shortcut) - shortcut = self.shortcut_out(h, is_init).permute(0,2,3,4,1) - h = self.conv_out(h.permute(0,2,3,4,1).contiguous(), is_init) - h = h.add_(shortcut) - else: - h = base_group_norm_with_zero_pad(h, self.norm_out, act_silu=True, pad_size=2) - h = self.conv_out(h, is_init) - h = h.permute(0,4,1,2,3) # b l h w c -> b c l h w - - h = rearrange(h, "b c t h w -> b t c h w") - return h - - -class Res3DBlockUpsample(nn.Module): - def __init__(self, - input_filters, - num_filters, - down_sampling_stride, - down_sampling=False - ): - super().__init__() - - self.input_filters = input_filters - self.num_filters = num_filters - - self.act_ = nn.SiLU(inplace=True) - - self.conv1 = CausalConvChannelLast(num_filters, num_filters, kernel_size=[3, 3, 3]) - self.norm1 = BaseGroupNorm(32, num_filters) - - self.conv2 = CausalConvChannelLast(num_filters, num_filters, kernel_size=[3, 3, 3]) - self.norm2 = BaseGroupNorm(32, num_filters) - - self.down_sampling = down_sampling - if down_sampling: - self.down_sampling_stride = down_sampling_stride - else: - self.down_sampling_stride = [1, 1, 1] - - if num_filters != input_filters or down_sampling: - self.conv3 = CausalConvChannelLast(input_filters, num_filters, kernel_size=[1, 1, 1], stride=self.down_sampling_stride) - self.norm3 = BaseGroupNorm(32, num_filters) - - def forward(self, x, is_init=False): - x = x.permute(0,2,3,4,1).contiguous() - - residual = x - - h = self.conv1(x, is_init) - h = self.norm1(h, act_silu=True, channel_last=True) - - h = self.conv2(h, is_init) - h = self.norm2(h, act_silu=False, channel_last=True) - - if self.down_sampling or self.num_filters != self.input_filters: - x = self.conv3(x, is_init) - x = self.norm3(x, act_silu=False, channel_last=True) - - h.add_(x) - h = self.act_(h) - if residual is not None: - h.add_(residual) - - h = h.permute(0,4,1,2,3) - return h - -class Upsample3D(nn.Module): - def __init__(self, - in_channels, - scale_factor=2 - ): - super().__init__() - - self.scale_factor = scale_factor - self.conv3d = Res3DBlockUpsample(input_filters=in_channels, - num_filters=in_channels, - down_sampling_stride=(1, 1, 1), - down_sampling=False) - - def forward(self, x, is_init=True, is_split=True): - b, c, t, h, w = x.shape - - # x = x.permute(0,2,3,4,1).contiguous().permute(0,4,1,2,3).to(memory_format=torch.channels_last_3d) - if is_split: - split_size = c // 8 - x_slices = torch.split(x, split_size, dim=1) - x = [nn.functional.interpolate(x, scale_factor=self.scale_factor) for x in x_slices] - x = torch.cat(x, dim=1) - else: - x = nn.functional.interpolate(x, scale_factor=self.scale_factor) - - x = self.conv3d(x, is_init) - return x - -class VideoDecoder(nn.Module): - def __init__(self, - ch=128, - z_channels=16, - out_channels=3, - ch_mult=(1, 2, 4, 4), - num_res_blocks=2, - temporal_up_layers=[2, 3], - temporal_downsample=4, - resamp_with_conv=True, - version=1, - ): - super().__init__() - - temb_ch = 0 - - self.num_resolutions = len(ch_mult) - self.num_res_blocks = num_res_blocks - self.temporal_downsample = temporal_downsample - - block_in = ch * ch_mult[self.num_resolutions - 1] - self.version = version - if version == 2: - channels = 4 * z_channels * 2 ** 3 - self.conv_in = CausalConv(z_channels, channels, kernel_size=3) - self.shortcut_in = ChannelDuplicatingPixelUnshuffleUpSampleLayer3D(z_channels, channels, 1) - self.conv_unpatchify = ConvPixelShuffleUpSampleLayer3D(channels, block_in, kernel_size=3, factor=2) - self.shortcut_unpathify = ChannelDuplicatingPixelUnshuffleUpSampleLayer3D(channels, block_in, 2) - else: - self.conv_in = CausalConv(z_channels, block_in, kernel_size=3) - - # middle - self.mid = nn.Module() - self.mid.block_1 = Resnet3DBlock(in_channels=block_in, out_channels=block_in, temb_channels=temb_ch) - self.mid.attn_1 = AttnBlock(block_in) - self.mid.block_2 = Resnet3DBlock(in_channels=block_in, out_channels=block_in, temb_channels=temb_ch) - - # upsampling - self.up_id = len(temporal_up_layers) - self.video_frame_num = 1 - self.cur_video_frame_num = self.video_frame_num // 2 ** self.up_id + 1 - self.up = nn.ModuleList() - for i_level in reversed(range(self.num_resolutions)): - block = nn.ModuleList() - attn = nn.ModuleList() - block_out = ch * ch_mult[i_level] - for i_block in range(self.num_res_blocks + 1): - block.append( - Resnet3DBlock(in_channels=block_in, out_channels=block_out, temb_channels=temb_ch)) - block_in = block_out - up = nn.Module() - up.block = block - up.attn = attn - if i_level != 0: - if i_level in temporal_up_layers: - up.upsample = Upsample3D(block_in) - self.cur_video_frame_num = self.cur_video_frame_num * 2 - else: - up.upsample = Upsample2D(block_in, resamp_with_conv) - self.up.insert(0, up) # prepend to get consistent order - - # end - self.norm_out = nn.GroupNorm(num_groups=32, num_channels=block_in) - self.conv_out = CausalConvAfterNorm(block_in, out_channels, kernel_size=3) - - @torch.inference_mode() - def forward(self, z, is_init=True): - z = rearrange(z, "b t c h w -> b c t h w") - - h = self.conv_in(z, is_init=is_init) - if self.version == 2: - shortcut = self.shortcut_in(z, is_init=is_init) - h = h.add_(shortcut) - shortcut = self.shortcut_unpathify(h, is_init=is_init) - h = self.conv_unpatchify(h, is_init=is_init) - h = h.add_(shortcut) - - temb = None - - h = h.permute(0,2,3,4,1).contiguous().permute(0,4,1,2,3) - h = self.mid.block_1(h, temb, is_init=is_init) - h = self.mid.attn_1(h) - h = h.permute(0,2,3,4,1).contiguous().permute(0,4,1,2,3) - h = self.mid.block_2(h, temb, is_init=is_init) - - # upsampling - for i_level in reversed(range(self.num_resolutions)): - for i_block in range(self.num_res_blocks + 1): - h = h.permute(0,2,3,4,1).contiguous().permute(0,4,1,2,3) - h = self.up[i_level].block[i_block](h, temb, is_init=is_init) - if len(self.up[i_level].attn) > 0: - h = self.up[i_level].attn[i_block](h) - if i_level != 0: - if isinstance(self.up[i_level].upsample, Upsample2D) or (hasattr(self.up[i_level].upsample, "module") and isinstance(self.up[i_level].upsample.module, Upsample2D)): - B = h.size(0) - h = h.permute(0,2,3,4,1).flatten(0,1) - h = self.up[i_level].upsample(h) - h = h.unflatten(0, (B, -1)).permute(0,4,1,2,3) - else: - h = self.up[i_level].upsample(h, is_init=is_init) - - # end - h = h.permute(0,2,3,4,1) # b c l h w -> b l h w c - self.norm_out.to(dtype=h.dtype, device=h.device) # To be updated - h = base_group_norm_with_zero_pad(h, self.norm_out, act_silu=True, pad_size=2) - h = self.conv_out(h) - h = h.permute(0,4,1,2,3) - - if is_init: - h = h[:, :, (self.temporal_downsample - 1):] - return h - - - -def rms_norm(input, normalized_shape, eps=1e-6): - dtype = input.dtype - input = input.to(torch.float32) - variance = input.pow(2).flatten(-len(normalized_shape)).mean(-1)[(...,) + (None,) * len(normalized_shape)] - input = input * torch.rsqrt(variance + eps) - return input.to(dtype) - -class DiagonalGaussianDistribution(object): - def __init__(self, parameters, deterministic=False, rms_norm_mean=False, only_return_mean=False): - self.parameters = parameters - self.mean, self.logvar = torch.chunk(parameters, 2, dim=-3) #N,[X],C,H,W - self.logvar = torch.clamp(self.logvar, -30.0, 20.0) - self.std = torch.exp(0.5 * self.logvar) - self.var = torch.exp(self.logvar) - self.deterministic = deterministic - if self.deterministic: - self.var = self.std = torch.zeros_like( - self.mean, - device=self.parameters.device, - dtype=self.parameters.dtype) - if rms_norm_mean: - self.mean = rms_norm(self.mean, self.mean.size()[1:]) - self.only_return_mean = only_return_mean - - def sample(self, generator=None): - # make sure sample is on the same device - # as the parameters and has same dtype - sample = torch.randn( - self.mean.shape, generator=generator, device=self.parameters.device) - sample = sample.to(dtype=self.parameters.dtype) - x = self.mean + self.std * sample - if self.only_return_mean: - return self.mean - else: - return x - - -class StepVideoVAE(nn.Module): - def __init__(self, - in_channels=3, - out_channels=3, - z_channels=64, - num_res_blocks=2, - model_path=None, - weight_dict={}, - world_size=1, - version=2, - ): - super().__init__() - - self.frame_len = 17 - self.latent_len = 3 if version == 2 else 5 - - base_group_norm.spatial = True if version == 2 else False - - self.encoder = VideoEncoder( - in_channels=in_channels, - z_channels=z_channels, - num_res_blocks=num_res_blocks, - version=version, - ) - - self.decoder = VideoDecoder( - z_channels=z_channels, - out_channels=out_channels, - num_res_blocks=num_res_blocks, - version=version, - ) - - if model_path is not None: - weight_dict = self.init_from_ckpt(model_path) - if len(weight_dict) != 0: - self.load_from_dict(weight_dict) - self.convert_channel_last() - - self.world_size = world_size - - def init_from_ckpt(self, model_path): - from safetensors import safe_open - p = {} - with safe_open(model_path, framework="pt", device="cpu") as f: - for k in f.keys(): - tensor = f.get_tensor(k) - if k.startswith("decoder.conv_out."): - k = k.replace("decoder.conv_out.", "decoder.conv_out.conv.") - p[k] = tensor - return p - - def load_from_dict(self, p): - self.load_state_dict(p) - - def convert_channel_last(self): - #Conv2d NCHW->NHWC - pass - - def naive_encode(self, x, is_init_image=True): - b, l, c, h, w = x.size() - x = rearrange(x, 'b l c h w -> b c l h w').contiguous() - z = self.encoder(x, l, True) # 下采样[1, 4, 8, 16, 16] - return z - - @torch.inference_mode() - def encode(self, x): - # b (nc cf) c h w -> (b nc) cf c h w -> encode -> (b nc) cf c h w -> b (nc cf) c h w - chunks = list(x.split(self.frame_len, dim=1)) - for i in range(len(chunks)): - chunks[i] = self.naive_encode(chunks[i], True) - z = torch.cat(chunks, dim=1) - - posterior = DiagonalGaussianDistribution(z) - return posterior.sample() - - def decode_naive(self, z, is_init=True): - z = z.to(next(self.decoder.parameters()).dtype) - dec = self.decoder(z, is_init) - return dec - - @torch.inference_mode() - def decode_original(self, z): - # b (nc cf) c h w -> (b nc) cf c h w -> decode -> (b nc) c cf h w -> b (nc cf) c h w - chunks = list(z.split(self.latent_len, dim=1)) - - if self.world_size > 1: - chunks_total_num = len(chunks) - max_num_per_rank = (chunks_total_num + self.world_size - 1) // self.world_size - rank = torch.distributed.get_rank() - chunks_ = chunks[max_num_per_rank * rank : max_num_per_rank * (rank + 1)] - if len(chunks_) < max_num_per_rank: - chunks_.extend(chunks[:max_num_per_rank-len(chunks_)]) - chunks = chunks_ - - for i in range(len(chunks)): - chunks[i] = self.decode_naive(chunks[i], True).permute(0,2,1,3,4) - x = torch.cat(chunks, dim=1) - - if self.world_size > 1: - x_ = torch.empty([x.size(0), (self.world_size * max_num_per_rank) * self.frame_len, *x.shape[2:]], dtype=x.dtype, device=x.device) - torch.distributed.all_gather_into_tensor(x_, x) - x = x_[:, : chunks_total_num * self.frame_len] - - x = self.mix(x) - return x - - def mix(self, x, smooth_scale = 0.6): - remain_scale = smooth_scale - mix_scale = 1. - remain_scale - front = slice(self.frame_len - 1, x.size(1) - 1, self.frame_len) - back = slice(self.frame_len, x.size(1), self.frame_len) - x[:, front], x[:, back] = ( - x[:, front] * remain_scale + x[:, back] * mix_scale, - x[:, back] * remain_scale + x[:, front] * mix_scale - ) - return x - - def single_decode(self, hidden_states, device): - chunks = list(hidden_states.split(self.latent_len, dim=1)) - for i in range(len(chunks)): - chunks[i] = self.decode_naive(chunks[i].to(device), True).permute(0,2,1,3,4).cpu() - x = torch.cat(chunks, dim=1) - return x - - def build_1d_mask(self, length, left_bound, right_bound, border_width): - x = torch.ones((length,)) - if not left_bound: - x[:border_width] = (torch.arange(border_width) + 1) / border_width - if not right_bound: - x[-border_width:] = torch.flip((torch.arange(border_width) + 1) / border_width, dims=(0,)) - return x - - def build_mask(self, data, is_bound, border_width): - _, _, _, H, W = data.shape - h = self.build_1d_mask(H, is_bound[0], is_bound[1], border_width[0]) - w = self.build_1d_mask(W, is_bound[2], is_bound[3], border_width[1]) - - h = repeat(h, "H -> H W", H=H, W=W) - w = repeat(w, "W -> H W", H=H, W=W) - - mask = torch.stack([h, w]).min(dim=0).values - mask = rearrange(mask, "H W -> 1 1 1 H W") - return mask - - def tiled_decode(self, hidden_states, device, tile_size=(34, 34), tile_stride=(16, 16)): - B, T, C, H, W = hidden_states.shape - size_h, size_w = tile_size - stride_h, stride_w = tile_stride - - # Split tasks - tasks = [] - for t in range(0, T, 3): - for h in range(0, H, stride_h): - if (h-stride_h >= 0 and h-stride_h+size_h >= H): continue - for w in range(0, W, stride_w): - if (w-stride_w >= 0 and w-stride_w+size_w >= W): continue - t_, h_, w_ = t + 3, h + size_h, w + size_w - tasks.append((t, t_, h, h_, w, w_)) - - # Run - data_device = "cpu" - computation_device = device - - weight = torch.zeros((1, 1, T//3*17, H * 16, W * 16), dtype=hidden_states.dtype, device=data_device) - values = torch.zeros((B, 3, T//3*17, H * 16, W * 16), dtype=hidden_states.dtype, device=data_device) - - for t, t_, h, h_, w, w_ in tqdm(tasks, desc="VAE decoding"): - hidden_states_batch = hidden_states[:, t:t_, :, h:h_, w:w_].to(computation_device) - hidden_states_batch = self.decode_naive(hidden_states_batch, True).to(data_device) - - mask = self.build_mask( - hidden_states_batch, - is_bound=(h==0, h_>=H, w==0, w_>=W), - border_width=((size_h - stride_h) * 16, (size_w - stride_w) * 16) - ).to(dtype=hidden_states.dtype, device=data_device) - - target_t = t // 3 * 17 - target_h = h * 16 - target_w = w * 16 - values[ - :, - :, - target_t: target_t + hidden_states_batch.shape[2], - target_h: target_h + hidden_states_batch.shape[3], - target_w: target_w + hidden_states_batch.shape[4], - ] += hidden_states_batch * mask - weight[ - :, - :, - target_t: target_t + hidden_states_batch.shape[2], - target_h: target_h + hidden_states_batch.shape[3], - target_w: target_w + hidden_states_batch.shape[4], - ] += mask - return values / weight - - def decode(self, hidden_states, device, tiled=False, tile_size=(34, 34), tile_stride=(16, 16), smooth_scale=0.6): - hidden_states = hidden_states.to("cpu") - if tiled: - video = self.tiled_decode(hidden_states, device, tile_size, tile_stride) - else: - video = self.single_decode(hidden_states, device) - video = self.mix(video, smooth_scale=smooth_scale) - return video - - @staticmethod - def state_dict_converter(): - return StepVideoVAEStateDictConverter() - - -class StepVideoVAEStateDictConverter: - def __init__(self): - super().__init__() - - def from_diffusers(self, state_dict): - return self.from_civitai(state_dict) - - def from_civitai(self, state_dict): - state_dict_ = {} - for name, param in state_dict.items(): - if name.startswith("decoder.conv_out."): - name_ = name.replace("decoder.conv_out.", "decoder.conv_out.conv.") - else: - name_ = name - state_dict_[name_] = param - return state_dict_ diff --git a/diffsynth/models/svd_image_encoder.py b/diffsynth/models/svd_image_encoder.py deleted file mode 100644 index 4ee79c8..0000000 --- a/diffsynth/models/svd_image_encoder.py +++ /dev/null @@ -1,505 +0,0 @@ -import torch -from .sd_text_encoder import CLIPEncoderLayer - - -class CLIPVisionEmbeddings(torch.nn.Module): - def __init__(self, embed_dim=1280, image_size=224, patch_size=14, num_channels=3): - super().__init__() - - # class_embeds (This is a fixed tensor) - self.class_embedding = torch.nn.Parameter(torch.randn(1, 1, embed_dim)) - - # position_embeds - self.patch_embedding = torch.nn.Conv2d(in_channels=num_channels, out_channels=embed_dim, kernel_size=patch_size, stride=patch_size, bias=False) - - # position_embeds (This is a fixed tensor) - self.position_embeds = torch.nn.Parameter(torch.zeros(1, (image_size // patch_size) ** 2 + 1, embed_dim)) - - def forward(self, pixel_values): - batch_size = pixel_values.shape[0] - patch_embeds = self.patch_embedding(pixel_values) - patch_embeds = patch_embeds.flatten(2).transpose(1, 2) - class_embeds = self.class_embedding.repeat(batch_size, 1, 1) - embeddings = torch.cat([class_embeds, patch_embeds], dim=1) + self.position_embeds - return embeddings - - -class SVDImageEncoder(torch.nn.Module): - def __init__(self, embed_dim=1280, layer_norm_eps=1e-5, num_encoder_layers=32, encoder_intermediate_size=5120, projection_dim=1024, num_heads=16, head_dim=80): - super().__init__() - self.embeddings = CLIPVisionEmbeddings(embed_dim=embed_dim) - self.pre_layernorm = torch.nn.LayerNorm(embed_dim, eps=layer_norm_eps) - self.encoders = torch.nn.ModuleList([ - CLIPEncoderLayer(embed_dim, encoder_intermediate_size, num_heads=num_heads, head_dim=head_dim, use_quick_gelu=False) - for _ in range(num_encoder_layers)]) - self.post_layernorm = torch.nn.LayerNorm(embed_dim, eps=layer_norm_eps) - self.visual_projection = torch.nn.Linear(embed_dim, projection_dim, bias=False) - - def forward(self, pixel_values): - embeds = self.embeddings(pixel_values) - embeds = self.pre_layernorm(embeds) - for encoder_id, encoder in enumerate(self.encoders): - embeds = encoder(embeds) - embeds = self.post_layernorm(embeds[:, 0, :]) - embeds = self.visual_projection(embeds) - return embeds - - @staticmethod - def state_dict_converter(): - return SVDImageEncoderStateDictConverter() - - -class SVDImageEncoderStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - rename_dict = { - "vision_model.embeddings.patch_embedding.weight": "embeddings.patch_embedding.weight", - "vision_model.embeddings.class_embedding": "embeddings.class_embedding", - "vision_model.embeddings.position_embedding.weight": "embeddings.position_embeds", - "vision_model.pre_layrnorm.weight": "pre_layernorm.weight", - "vision_model.pre_layrnorm.bias": "pre_layernorm.bias", - "vision_model.post_layernorm.weight": "post_layernorm.weight", - "vision_model.post_layernorm.bias": "post_layernorm.bias", - "visual_projection.weight": "visual_projection.weight" - } - attn_rename_dict = { - "self_attn.q_proj": "attn.to_q", - "self_attn.k_proj": "attn.to_k", - "self_attn.v_proj": "attn.to_v", - "self_attn.out_proj": "attn.to_out", - "layer_norm1": "layer_norm1", - "layer_norm2": "layer_norm2", - "mlp.fc1": "fc1", - "mlp.fc2": "fc2", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if name == "vision_model.embeddings.class_embedding": - param = state_dict[name].view(1, 1, -1) - elif name == "vision_model.embeddings.position_embedding.weight": - param = state_dict[name].unsqueeze(0) - state_dict_[rename_dict[name]] = param - elif name.startswith("vision_model.encoder.layers."): - param = state_dict[name] - names = name.split(".") - layer_id, layer_type, tail = names[3], ".".join(names[4:-1]), names[-1] - name_ = ".".join(["encoders", layer_id, attn_rename_dict[layer_type], tail]) - state_dict_[name_] = param - return state_dict_ - - def from_civitai(self, state_dict): - rename_dict = { - "conditioner.embedders.0.open_clip.model.visual.class_embedding": "embeddings.class_embedding", - "conditioner.embedders.0.open_clip.model.visual.conv1.weight": "embeddings.patch_embedding.weight", - "conditioner.embedders.0.open_clip.model.visual.ln_post.bias": "post_layernorm.bias", - "conditioner.embedders.0.open_clip.model.visual.ln_post.weight": "post_layernorm.weight", - "conditioner.embedders.0.open_clip.model.visual.ln_pre.bias": "pre_layernorm.bias", - "conditioner.embedders.0.open_clip.model.visual.ln_pre.weight": "pre_layernorm.weight", - "conditioner.embedders.0.open_clip.model.visual.positional_embedding": "embeddings.position_embeds", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.0.attn.in_proj_bias": ['encoders.0.attn.to_q.bias', 'encoders.0.attn.to_k.bias', 'encoders.0.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.0.attn.in_proj_weight": ['encoders.0.attn.to_q.weight', 'encoders.0.attn.to_k.weight', 'encoders.0.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.0.attn.out_proj.bias": "encoders.0.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.0.attn.out_proj.weight": "encoders.0.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.0.ln_1.bias": "encoders.0.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.0.ln_1.weight": "encoders.0.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.0.ln_2.bias": "encoders.0.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.0.ln_2.weight": "encoders.0.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.0.mlp.c_fc.bias": "encoders.0.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.0.mlp.c_fc.weight": "encoders.0.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.0.mlp.c_proj.bias": "encoders.0.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.0.mlp.c_proj.weight": "encoders.0.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.1.attn.in_proj_bias": ['encoders.1.attn.to_q.bias', 'encoders.1.attn.to_k.bias', 'encoders.1.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.1.attn.in_proj_weight": ['encoders.1.attn.to_q.weight', 'encoders.1.attn.to_k.weight', 'encoders.1.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.1.attn.out_proj.bias": "encoders.1.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.1.attn.out_proj.weight": "encoders.1.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.1.ln_1.bias": "encoders.1.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.1.ln_1.weight": "encoders.1.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.1.ln_2.bias": "encoders.1.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.1.ln_2.weight": "encoders.1.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.1.mlp.c_fc.bias": "encoders.1.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.1.mlp.c_fc.weight": "encoders.1.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.1.mlp.c_proj.bias": "encoders.1.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.1.mlp.c_proj.weight": "encoders.1.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.10.attn.in_proj_bias": ['encoders.10.attn.to_q.bias', 'encoders.10.attn.to_k.bias', 'encoders.10.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.10.attn.in_proj_weight": ['encoders.10.attn.to_q.weight', 'encoders.10.attn.to_k.weight', 'encoders.10.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.10.attn.out_proj.bias": "encoders.10.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.10.attn.out_proj.weight": "encoders.10.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.10.ln_1.bias": "encoders.10.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.10.ln_1.weight": "encoders.10.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.10.ln_2.bias": "encoders.10.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.10.ln_2.weight": "encoders.10.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.10.mlp.c_fc.bias": "encoders.10.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.10.mlp.c_fc.weight": "encoders.10.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.10.mlp.c_proj.bias": "encoders.10.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.10.mlp.c_proj.weight": "encoders.10.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.11.attn.in_proj_bias": ['encoders.11.attn.to_q.bias', 'encoders.11.attn.to_k.bias', 'encoders.11.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.11.attn.in_proj_weight": ['encoders.11.attn.to_q.weight', 'encoders.11.attn.to_k.weight', 'encoders.11.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.11.attn.out_proj.bias": "encoders.11.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.11.attn.out_proj.weight": "encoders.11.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.11.ln_1.bias": "encoders.11.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.11.ln_1.weight": "encoders.11.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.11.ln_2.bias": "encoders.11.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.11.ln_2.weight": "encoders.11.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.11.mlp.c_fc.bias": "encoders.11.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.11.mlp.c_fc.weight": "encoders.11.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.11.mlp.c_proj.bias": "encoders.11.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.11.mlp.c_proj.weight": "encoders.11.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.12.attn.in_proj_bias": ['encoders.12.attn.to_q.bias', 'encoders.12.attn.to_k.bias', 'encoders.12.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.12.attn.in_proj_weight": ['encoders.12.attn.to_q.weight', 'encoders.12.attn.to_k.weight', 'encoders.12.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.12.attn.out_proj.bias": "encoders.12.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.12.attn.out_proj.weight": "encoders.12.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.12.ln_1.bias": "encoders.12.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.12.ln_1.weight": "encoders.12.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.12.ln_2.bias": "encoders.12.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.12.ln_2.weight": "encoders.12.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.12.mlp.c_fc.bias": "encoders.12.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.12.mlp.c_fc.weight": "encoders.12.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.12.mlp.c_proj.bias": "encoders.12.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.12.mlp.c_proj.weight": "encoders.12.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.13.attn.in_proj_bias": ['encoders.13.attn.to_q.bias', 'encoders.13.attn.to_k.bias', 'encoders.13.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.13.attn.in_proj_weight": ['encoders.13.attn.to_q.weight', 'encoders.13.attn.to_k.weight', 'encoders.13.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.13.attn.out_proj.bias": "encoders.13.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.13.attn.out_proj.weight": "encoders.13.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.13.ln_1.bias": "encoders.13.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.13.ln_1.weight": "encoders.13.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.13.ln_2.bias": "encoders.13.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.13.ln_2.weight": "encoders.13.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.13.mlp.c_fc.bias": "encoders.13.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.13.mlp.c_fc.weight": "encoders.13.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.13.mlp.c_proj.bias": "encoders.13.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.13.mlp.c_proj.weight": "encoders.13.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.14.attn.in_proj_bias": ['encoders.14.attn.to_q.bias', 'encoders.14.attn.to_k.bias', 'encoders.14.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.14.attn.in_proj_weight": ['encoders.14.attn.to_q.weight', 'encoders.14.attn.to_k.weight', 'encoders.14.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.14.attn.out_proj.bias": "encoders.14.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.14.attn.out_proj.weight": "encoders.14.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.14.ln_1.bias": "encoders.14.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.14.ln_1.weight": "encoders.14.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.14.ln_2.bias": "encoders.14.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.14.ln_2.weight": "encoders.14.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.14.mlp.c_fc.bias": "encoders.14.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.14.mlp.c_fc.weight": "encoders.14.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.14.mlp.c_proj.bias": "encoders.14.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.14.mlp.c_proj.weight": "encoders.14.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.15.attn.in_proj_bias": ['encoders.15.attn.to_q.bias', 'encoders.15.attn.to_k.bias', 'encoders.15.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.15.attn.in_proj_weight": ['encoders.15.attn.to_q.weight', 'encoders.15.attn.to_k.weight', 'encoders.15.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.15.attn.out_proj.bias": "encoders.15.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.15.attn.out_proj.weight": "encoders.15.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.15.ln_1.bias": "encoders.15.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.15.ln_1.weight": "encoders.15.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.15.ln_2.bias": "encoders.15.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.15.ln_2.weight": "encoders.15.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.15.mlp.c_fc.bias": "encoders.15.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.15.mlp.c_fc.weight": "encoders.15.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.15.mlp.c_proj.bias": "encoders.15.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.15.mlp.c_proj.weight": "encoders.15.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.16.attn.in_proj_bias": ['encoders.16.attn.to_q.bias', 'encoders.16.attn.to_k.bias', 'encoders.16.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.16.attn.in_proj_weight": ['encoders.16.attn.to_q.weight', 'encoders.16.attn.to_k.weight', 'encoders.16.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.16.attn.out_proj.bias": "encoders.16.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.16.attn.out_proj.weight": "encoders.16.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.16.ln_1.bias": "encoders.16.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.16.ln_1.weight": "encoders.16.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.16.ln_2.bias": "encoders.16.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.16.ln_2.weight": "encoders.16.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.16.mlp.c_fc.bias": "encoders.16.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.16.mlp.c_fc.weight": "encoders.16.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.16.mlp.c_proj.bias": "encoders.16.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.16.mlp.c_proj.weight": "encoders.16.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.17.attn.in_proj_bias": ['encoders.17.attn.to_q.bias', 'encoders.17.attn.to_k.bias', 'encoders.17.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.17.attn.in_proj_weight": ['encoders.17.attn.to_q.weight', 'encoders.17.attn.to_k.weight', 'encoders.17.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.17.attn.out_proj.bias": "encoders.17.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.17.attn.out_proj.weight": "encoders.17.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.17.ln_1.bias": "encoders.17.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.17.ln_1.weight": "encoders.17.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.17.ln_2.bias": "encoders.17.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.17.ln_2.weight": "encoders.17.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.17.mlp.c_fc.bias": "encoders.17.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.17.mlp.c_fc.weight": "encoders.17.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.17.mlp.c_proj.bias": "encoders.17.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.17.mlp.c_proj.weight": "encoders.17.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.18.attn.in_proj_bias": ['encoders.18.attn.to_q.bias', 'encoders.18.attn.to_k.bias', 'encoders.18.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.18.attn.in_proj_weight": ['encoders.18.attn.to_q.weight', 'encoders.18.attn.to_k.weight', 'encoders.18.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.18.attn.out_proj.bias": "encoders.18.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.18.attn.out_proj.weight": "encoders.18.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.18.ln_1.bias": "encoders.18.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.18.ln_1.weight": "encoders.18.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.18.ln_2.bias": "encoders.18.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.18.ln_2.weight": "encoders.18.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.18.mlp.c_fc.bias": "encoders.18.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.18.mlp.c_fc.weight": "encoders.18.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.18.mlp.c_proj.bias": "encoders.18.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.18.mlp.c_proj.weight": "encoders.18.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.19.attn.in_proj_bias": ['encoders.19.attn.to_q.bias', 'encoders.19.attn.to_k.bias', 'encoders.19.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.19.attn.in_proj_weight": ['encoders.19.attn.to_q.weight', 'encoders.19.attn.to_k.weight', 'encoders.19.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.19.attn.out_proj.bias": "encoders.19.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.19.attn.out_proj.weight": "encoders.19.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.19.ln_1.bias": "encoders.19.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.19.ln_1.weight": "encoders.19.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.19.ln_2.bias": "encoders.19.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.19.ln_2.weight": "encoders.19.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.19.mlp.c_fc.bias": "encoders.19.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.19.mlp.c_fc.weight": "encoders.19.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.19.mlp.c_proj.bias": "encoders.19.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.19.mlp.c_proj.weight": "encoders.19.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.2.attn.in_proj_bias": ['encoders.2.attn.to_q.bias', 'encoders.2.attn.to_k.bias', 'encoders.2.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.2.attn.in_proj_weight": ['encoders.2.attn.to_q.weight', 'encoders.2.attn.to_k.weight', 'encoders.2.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.2.attn.out_proj.bias": "encoders.2.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.2.attn.out_proj.weight": "encoders.2.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.2.ln_1.bias": "encoders.2.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.2.ln_1.weight": "encoders.2.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.2.ln_2.bias": "encoders.2.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.2.ln_2.weight": "encoders.2.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.2.mlp.c_fc.bias": "encoders.2.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.2.mlp.c_fc.weight": "encoders.2.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.2.mlp.c_proj.bias": "encoders.2.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.2.mlp.c_proj.weight": "encoders.2.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.20.attn.in_proj_bias": ['encoders.20.attn.to_q.bias', 'encoders.20.attn.to_k.bias', 'encoders.20.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.20.attn.in_proj_weight": ['encoders.20.attn.to_q.weight', 'encoders.20.attn.to_k.weight', 'encoders.20.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.20.attn.out_proj.bias": "encoders.20.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.20.attn.out_proj.weight": "encoders.20.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.20.ln_1.bias": "encoders.20.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.20.ln_1.weight": "encoders.20.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.20.ln_2.bias": "encoders.20.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.20.ln_2.weight": "encoders.20.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.20.mlp.c_fc.bias": "encoders.20.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.20.mlp.c_fc.weight": "encoders.20.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.20.mlp.c_proj.bias": "encoders.20.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.20.mlp.c_proj.weight": "encoders.20.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.21.attn.in_proj_bias": ['encoders.21.attn.to_q.bias', 'encoders.21.attn.to_k.bias', 'encoders.21.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.21.attn.in_proj_weight": ['encoders.21.attn.to_q.weight', 'encoders.21.attn.to_k.weight', 'encoders.21.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.21.attn.out_proj.bias": "encoders.21.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.21.attn.out_proj.weight": "encoders.21.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.21.ln_1.bias": "encoders.21.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.21.ln_1.weight": "encoders.21.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.21.ln_2.bias": "encoders.21.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.21.ln_2.weight": "encoders.21.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.21.mlp.c_fc.bias": "encoders.21.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.21.mlp.c_fc.weight": "encoders.21.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.21.mlp.c_proj.bias": "encoders.21.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.21.mlp.c_proj.weight": "encoders.21.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.22.attn.in_proj_bias": ['encoders.22.attn.to_q.bias', 'encoders.22.attn.to_k.bias', 'encoders.22.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.22.attn.in_proj_weight": ['encoders.22.attn.to_q.weight', 'encoders.22.attn.to_k.weight', 'encoders.22.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.22.attn.out_proj.bias": "encoders.22.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.22.attn.out_proj.weight": "encoders.22.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.22.ln_1.bias": "encoders.22.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.22.ln_1.weight": "encoders.22.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.22.ln_2.bias": "encoders.22.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.22.ln_2.weight": "encoders.22.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.22.mlp.c_fc.bias": "encoders.22.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.22.mlp.c_fc.weight": "encoders.22.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.22.mlp.c_proj.bias": "encoders.22.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.22.mlp.c_proj.weight": "encoders.22.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.23.attn.in_proj_bias": ['encoders.23.attn.to_q.bias', 'encoders.23.attn.to_k.bias', 'encoders.23.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.23.attn.in_proj_weight": ['encoders.23.attn.to_q.weight', 'encoders.23.attn.to_k.weight', 'encoders.23.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.23.attn.out_proj.bias": "encoders.23.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.23.attn.out_proj.weight": "encoders.23.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.23.ln_1.bias": "encoders.23.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.23.ln_1.weight": "encoders.23.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.23.ln_2.bias": "encoders.23.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.23.ln_2.weight": "encoders.23.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.23.mlp.c_fc.bias": "encoders.23.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.23.mlp.c_fc.weight": "encoders.23.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.23.mlp.c_proj.bias": "encoders.23.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.23.mlp.c_proj.weight": "encoders.23.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.24.attn.in_proj_bias": ['encoders.24.attn.to_q.bias', 'encoders.24.attn.to_k.bias', 'encoders.24.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.24.attn.in_proj_weight": ['encoders.24.attn.to_q.weight', 'encoders.24.attn.to_k.weight', 'encoders.24.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.24.attn.out_proj.bias": "encoders.24.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.24.attn.out_proj.weight": "encoders.24.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.24.ln_1.bias": "encoders.24.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.24.ln_1.weight": "encoders.24.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.24.ln_2.bias": "encoders.24.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.24.ln_2.weight": "encoders.24.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.24.mlp.c_fc.bias": "encoders.24.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.24.mlp.c_fc.weight": "encoders.24.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.24.mlp.c_proj.bias": "encoders.24.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.24.mlp.c_proj.weight": "encoders.24.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.25.attn.in_proj_bias": ['encoders.25.attn.to_q.bias', 'encoders.25.attn.to_k.bias', 'encoders.25.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.25.attn.in_proj_weight": ['encoders.25.attn.to_q.weight', 'encoders.25.attn.to_k.weight', 'encoders.25.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.25.attn.out_proj.bias": "encoders.25.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.25.attn.out_proj.weight": "encoders.25.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.25.ln_1.bias": "encoders.25.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.25.ln_1.weight": "encoders.25.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.25.ln_2.bias": "encoders.25.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.25.ln_2.weight": "encoders.25.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.25.mlp.c_fc.bias": "encoders.25.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.25.mlp.c_fc.weight": "encoders.25.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.25.mlp.c_proj.bias": "encoders.25.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.25.mlp.c_proj.weight": "encoders.25.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.26.attn.in_proj_bias": ['encoders.26.attn.to_q.bias', 'encoders.26.attn.to_k.bias', 'encoders.26.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.26.attn.in_proj_weight": ['encoders.26.attn.to_q.weight', 'encoders.26.attn.to_k.weight', 'encoders.26.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.26.attn.out_proj.bias": "encoders.26.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.26.attn.out_proj.weight": "encoders.26.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.26.ln_1.bias": "encoders.26.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.26.ln_1.weight": "encoders.26.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.26.ln_2.bias": "encoders.26.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.26.ln_2.weight": "encoders.26.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.26.mlp.c_fc.bias": "encoders.26.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.26.mlp.c_fc.weight": "encoders.26.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.26.mlp.c_proj.bias": "encoders.26.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.26.mlp.c_proj.weight": "encoders.26.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.27.attn.in_proj_bias": ['encoders.27.attn.to_q.bias', 'encoders.27.attn.to_k.bias', 'encoders.27.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.27.attn.in_proj_weight": ['encoders.27.attn.to_q.weight', 'encoders.27.attn.to_k.weight', 'encoders.27.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.27.attn.out_proj.bias": "encoders.27.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.27.attn.out_proj.weight": "encoders.27.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.27.ln_1.bias": "encoders.27.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.27.ln_1.weight": "encoders.27.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.27.ln_2.bias": "encoders.27.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.27.ln_2.weight": "encoders.27.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.27.mlp.c_fc.bias": "encoders.27.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.27.mlp.c_fc.weight": "encoders.27.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.27.mlp.c_proj.bias": "encoders.27.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.27.mlp.c_proj.weight": "encoders.27.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.28.attn.in_proj_bias": ['encoders.28.attn.to_q.bias', 'encoders.28.attn.to_k.bias', 'encoders.28.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.28.attn.in_proj_weight": ['encoders.28.attn.to_q.weight', 'encoders.28.attn.to_k.weight', 'encoders.28.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.28.attn.out_proj.bias": "encoders.28.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.28.attn.out_proj.weight": "encoders.28.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.28.ln_1.bias": "encoders.28.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.28.ln_1.weight": "encoders.28.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.28.ln_2.bias": "encoders.28.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.28.ln_2.weight": "encoders.28.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.28.mlp.c_fc.bias": "encoders.28.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.28.mlp.c_fc.weight": "encoders.28.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.28.mlp.c_proj.bias": "encoders.28.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.28.mlp.c_proj.weight": "encoders.28.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.29.attn.in_proj_bias": ['encoders.29.attn.to_q.bias', 'encoders.29.attn.to_k.bias', 'encoders.29.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.29.attn.in_proj_weight": ['encoders.29.attn.to_q.weight', 'encoders.29.attn.to_k.weight', 'encoders.29.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.29.attn.out_proj.bias": "encoders.29.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.29.attn.out_proj.weight": "encoders.29.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.29.ln_1.bias": "encoders.29.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.29.ln_1.weight": "encoders.29.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.29.ln_2.bias": "encoders.29.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.29.ln_2.weight": "encoders.29.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.29.mlp.c_fc.bias": "encoders.29.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.29.mlp.c_fc.weight": "encoders.29.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.29.mlp.c_proj.bias": "encoders.29.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.29.mlp.c_proj.weight": "encoders.29.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.3.attn.in_proj_bias": ['encoders.3.attn.to_q.bias', 'encoders.3.attn.to_k.bias', 'encoders.3.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.3.attn.in_proj_weight": ['encoders.3.attn.to_q.weight', 'encoders.3.attn.to_k.weight', 'encoders.3.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.3.attn.out_proj.bias": "encoders.3.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.3.attn.out_proj.weight": "encoders.3.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.3.ln_1.bias": "encoders.3.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.3.ln_1.weight": "encoders.3.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.3.ln_2.bias": "encoders.3.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.3.ln_2.weight": "encoders.3.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.3.mlp.c_fc.bias": "encoders.3.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.3.mlp.c_fc.weight": "encoders.3.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.3.mlp.c_proj.bias": "encoders.3.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.3.mlp.c_proj.weight": "encoders.3.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.30.attn.in_proj_bias": ['encoders.30.attn.to_q.bias', 'encoders.30.attn.to_k.bias', 'encoders.30.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.30.attn.in_proj_weight": ['encoders.30.attn.to_q.weight', 'encoders.30.attn.to_k.weight', 'encoders.30.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.30.attn.out_proj.bias": "encoders.30.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.30.attn.out_proj.weight": "encoders.30.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.30.ln_1.bias": "encoders.30.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.30.ln_1.weight": "encoders.30.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.30.ln_2.bias": "encoders.30.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.30.ln_2.weight": "encoders.30.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.30.mlp.c_fc.bias": "encoders.30.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.30.mlp.c_fc.weight": "encoders.30.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.30.mlp.c_proj.bias": "encoders.30.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.30.mlp.c_proj.weight": "encoders.30.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.31.attn.in_proj_bias": ['encoders.31.attn.to_q.bias', 'encoders.31.attn.to_k.bias', 'encoders.31.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.31.attn.in_proj_weight": ['encoders.31.attn.to_q.weight', 'encoders.31.attn.to_k.weight', 'encoders.31.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.31.attn.out_proj.bias": "encoders.31.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.31.attn.out_proj.weight": "encoders.31.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.31.ln_1.bias": "encoders.31.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.31.ln_1.weight": "encoders.31.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.31.ln_2.bias": "encoders.31.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.31.ln_2.weight": "encoders.31.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.31.mlp.c_fc.bias": "encoders.31.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.31.mlp.c_fc.weight": "encoders.31.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.31.mlp.c_proj.bias": "encoders.31.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.31.mlp.c_proj.weight": "encoders.31.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.4.attn.in_proj_bias": ['encoders.4.attn.to_q.bias', 'encoders.4.attn.to_k.bias', 'encoders.4.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.4.attn.in_proj_weight": ['encoders.4.attn.to_q.weight', 'encoders.4.attn.to_k.weight', 'encoders.4.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.4.attn.out_proj.bias": "encoders.4.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.4.attn.out_proj.weight": "encoders.4.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.4.ln_1.bias": "encoders.4.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.4.ln_1.weight": "encoders.4.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.4.ln_2.bias": "encoders.4.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.4.ln_2.weight": "encoders.4.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.4.mlp.c_fc.bias": "encoders.4.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.4.mlp.c_fc.weight": "encoders.4.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.4.mlp.c_proj.bias": "encoders.4.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.4.mlp.c_proj.weight": "encoders.4.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.5.attn.in_proj_bias": ['encoders.5.attn.to_q.bias', 'encoders.5.attn.to_k.bias', 'encoders.5.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.5.attn.in_proj_weight": ['encoders.5.attn.to_q.weight', 'encoders.5.attn.to_k.weight', 'encoders.5.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.5.attn.out_proj.bias": "encoders.5.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.5.attn.out_proj.weight": "encoders.5.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.5.ln_1.bias": "encoders.5.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.5.ln_1.weight": "encoders.5.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.5.ln_2.bias": "encoders.5.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.5.ln_2.weight": "encoders.5.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.5.mlp.c_fc.bias": "encoders.5.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.5.mlp.c_fc.weight": "encoders.5.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.5.mlp.c_proj.bias": "encoders.5.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.5.mlp.c_proj.weight": "encoders.5.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.6.attn.in_proj_bias": ['encoders.6.attn.to_q.bias', 'encoders.6.attn.to_k.bias', 'encoders.6.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.6.attn.in_proj_weight": ['encoders.6.attn.to_q.weight', 'encoders.6.attn.to_k.weight', 'encoders.6.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.6.attn.out_proj.bias": "encoders.6.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.6.attn.out_proj.weight": "encoders.6.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.6.ln_1.bias": "encoders.6.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.6.ln_1.weight": "encoders.6.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.6.ln_2.bias": "encoders.6.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.6.ln_2.weight": "encoders.6.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.6.mlp.c_fc.bias": "encoders.6.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.6.mlp.c_fc.weight": "encoders.6.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.6.mlp.c_proj.bias": "encoders.6.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.6.mlp.c_proj.weight": "encoders.6.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.7.attn.in_proj_bias": ['encoders.7.attn.to_q.bias', 'encoders.7.attn.to_k.bias', 'encoders.7.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.7.attn.in_proj_weight": ['encoders.7.attn.to_q.weight', 'encoders.7.attn.to_k.weight', 'encoders.7.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.7.attn.out_proj.bias": "encoders.7.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.7.attn.out_proj.weight": "encoders.7.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.7.ln_1.bias": "encoders.7.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.7.ln_1.weight": "encoders.7.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.7.ln_2.bias": "encoders.7.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.7.ln_2.weight": "encoders.7.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.7.mlp.c_fc.bias": "encoders.7.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.7.mlp.c_fc.weight": "encoders.7.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.7.mlp.c_proj.bias": "encoders.7.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.7.mlp.c_proj.weight": "encoders.7.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.8.attn.in_proj_bias": ['encoders.8.attn.to_q.bias', 'encoders.8.attn.to_k.bias', 'encoders.8.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.8.attn.in_proj_weight": ['encoders.8.attn.to_q.weight', 'encoders.8.attn.to_k.weight', 'encoders.8.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.8.attn.out_proj.bias": "encoders.8.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.8.attn.out_proj.weight": "encoders.8.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.8.ln_1.bias": "encoders.8.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.8.ln_1.weight": "encoders.8.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.8.ln_2.bias": "encoders.8.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.8.ln_2.weight": "encoders.8.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.8.mlp.c_fc.bias": "encoders.8.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.8.mlp.c_fc.weight": "encoders.8.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.8.mlp.c_proj.bias": "encoders.8.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.8.mlp.c_proj.weight": "encoders.8.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.9.attn.in_proj_bias": ['encoders.9.attn.to_q.bias', 'encoders.9.attn.to_k.bias', 'encoders.9.attn.to_v.bias'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.9.attn.in_proj_weight": ['encoders.9.attn.to_q.weight', 'encoders.9.attn.to_k.weight', 'encoders.9.attn.to_v.weight'], - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.9.attn.out_proj.bias": "encoders.9.attn.to_out.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.9.attn.out_proj.weight": "encoders.9.attn.to_out.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.9.ln_1.bias": "encoders.9.layer_norm1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.9.ln_1.weight": "encoders.9.layer_norm1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.9.ln_2.bias": "encoders.9.layer_norm2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.9.ln_2.weight": "encoders.9.layer_norm2.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.9.mlp.c_fc.bias": "encoders.9.fc1.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.9.mlp.c_fc.weight": "encoders.9.fc1.weight", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.9.mlp.c_proj.bias": "encoders.9.fc2.bias", - "conditioner.embedders.0.open_clip.model.visual.transformer.resblocks.9.mlp.c_proj.weight": "encoders.9.fc2.weight", - "conditioner.embedders.0.open_clip.model.visual.proj": "visual_projection.weight", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if name == "conditioner.embedders.0.open_clip.model.visual.class_embedding": - param = param.reshape((1, 1, param.shape[0])) - elif name == "conditioner.embedders.0.open_clip.model.visual.positional_embedding": - param = param.reshape((1, param.shape[0], param.shape[1])) - elif name == "conditioner.embedders.0.open_clip.model.visual.proj": - param = param.T - if isinstance(rename_dict[name], str): - state_dict_[rename_dict[name]] = param - else: - length = param.shape[0] // 3 - for i, rename in enumerate(rename_dict[name]): - state_dict_[rename] = param[i*length: i*length+length] - return state_dict_ diff --git a/diffsynth/models/svd_unet.py b/diffsynth/models/svd_unet.py deleted file mode 100644 index f50eb0d..0000000 --- a/diffsynth/models/svd_unet.py +++ /dev/null @@ -1,2014 +0,0 @@ -import torch, math -from einops import rearrange, repeat -from .sd_unet import Timesteps, PushBlock, PopBlock, Attention, GEGLU, ResnetBlock, AttentionBlock, DownSampler, UpSampler - - -class TemporalResnetBlock(torch.nn.Module): - def __init__(self, in_channels, out_channels, temb_channels=None, groups=32, eps=1e-5): - super().__init__() - self.norm1 = torch.nn.GroupNorm(num_groups=groups, num_channels=in_channels, eps=eps, affine=True) - self.conv1 = torch.nn.Conv3d(in_channels, out_channels, kernel_size=(3, 1, 1), stride=(1, 1, 1), padding=(1, 0, 0)) - if temb_channels is not None: - self.time_emb_proj = torch.nn.Linear(temb_channels, out_channels) - self.norm2 = torch.nn.GroupNorm(num_groups=groups, num_channels=out_channels, eps=eps, affine=True) - self.conv2 = torch.nn.Conv3d(out_channels, out_channels, kernel_size=(3, 1, 1), stride=(1, 1, 1), padding=(1, 0, 0)) - self.nonlinearity = torch.nn.SiLU() - self.conv_shortcut = None - if in_channels != out_channels: - self.conv_shortcut = torch.nn.Conv3d(in_channels, out_channels, kernel_size=1, stride=1, padding=0, bias=True) - - def forward(self, hidden_states, time_emb, text_emb, res_stack, **kwargs): - x = rearrange(hidden_states, "f c h w -> 1 c f h w") - x = self.norm1(x) - x = self.nonlinearity(x) - x = self.conv1(x) - if time_emb is not None: - emb = self.nonlinearity(time_emb) - emb = self.time_emb_proj(emb) - emb = repeat(emb, "b c -> b c f 1 1", f=hidden_states.shape[0]) - x = x + emb - x = self.norm2(x) - x = self.nonlinearity(x) - x = self.conv2(x) - if self.conv_shortcut is not None: - hidden_states = self.conv_shortcut(hidden_states) - x = rearrange(x[0], "c f h w -> f c h w") - hidden_states = hidden_states + x - return hidden_states, time_emb, text_emb, res_stack - - -def get_timestep_embedding( - timesteps: torch.Tensor, - embedding_dim: int, - flip_sin_to_cos: bool = False, - downscale_freq_shift: float = 1, - scale: float = 1, - max_period: int = 10000, - computation_device = None, - align_dtype_to_timestep = False, -): - """ - This matches the implementation in Denoising Diffusion Probabilistic Models: Create sinusoidal timestep embeddings. - - :param timesteps: a 1-D Tensor of N indices, one per batch element. - These may be fractional. - :param embedding_dim: the dimension of the output. :param max_period: controls the minimum frequency of the - embeddings. :return: an [N x dim] Tensor of positional embeddings. - """ - assert len(timesteps.shape) == 1, "Timesteps should be a 1d-array" - - half_dim = embedding_dim // 2 - exponent = -math.log(max_period) * torch.arange( - start=0, end=half_dim, dtype=torch.float32, device=timesteps.device if computation_device is None else computation_device - ) - exponent = exponent / (half_dim - downscale_freq_shift) - - emb = torch.exp(exponent).to(timesteps.device) - if align_dtype_to_timestep: - emb = emb.to(timesteps.dtype) - emb = timesteps[:, None].float() * emb[None, :] - - # scale embeddings - emb = scale * emb - - # concat sine and cosine embeddings - emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=-1) - - # flip sine and cosine embeddings - if flip_sin_to_cos: - emb = torch.cat([emb[:, half_dim:], emb[:, :half_dim]], dim=-1) - - # zero pad - if embedding_dim % 2 == 1: - emb = torch.nn.functional.pad(emb, (0, 1, 0, 0)) - return emb - - -class TemporalTimesteps(torch.nn.Module): - def __init__(self, num_channels: int, flip_sin_to_cos: bool, downscale_freq_shift: float, computation_device = None, scale=1, align_dtype_to_timestep=False): - super().__init__() - self.num_channels = num_channels - self.flip_sin_to_cos = flip_sin_to_cos - self.downscale_freq_shift = downscale_freq_shift - self.computation_device = computation_device - self.scale = scale - self.align_dtype_to_timestep = align_dtype_to_timestep - - def forward(self, timesteps): - t_emb = get_timestep_embedding( - timesteps, - self.num_channels, - flip_sin_to_cos=self.flip_sin_to_cos, - downscale_freq_shift=self.downscale_freq_shift, - computation_device=self.computation_device, - scale=self.scale, - align_dtype_to_timestep=self.align_dtype_to_timestep, - ) - return t_emb - - -class TrainableTemporalTimesteps(torch.nn.Module): - def __init__(self, num_channels: int, flip_sin_to_cos: bool, downscale_freq_shift: float, num_frames: int): - super().__init__() - timesteps = PositionalID()(num_frames) - embeddings = get_timestep_embedding(timesteps, num_channels, flip_sin_to_cos, downscale_freq_shift) - self.embeddings = torch.nn.Parameter(embeddings) - - def forward(self, timesteps): - t_emb = self.embeddings[timesteps] - return t_emb - - -class PositionalID(torch.nn.Module): - def __init__(self, max_id=25, repeat_length=20): - super().__init__() - self.max_id = max_id - self.repeat_length = repeat_length - - def frame_id_to_position_id(self, frame_id): - if frame_id < self.max_id: - position_id = frame_id - else: - position_id = (frame_id - self.max_id) % (self.repeat_length * 2) - if position_id < self.repeat_length: - position_id = self.max_id - 2 - position_id - else: - position_id = self.max_id - 2 * self.repeat_length + position_id - return position_id - - def forward(self, num_frames, pivot_frame_id=0): - position_ids = [self.frame_id_to_position_id(abs(i-pivot_frame_id)) for i in range(num_frames)] - position_ids = torch.IntTensor(position_ids) - return position_ids - - -class TemporalAttentionBlock(torch.nn.Module): - - def __init__(self, num_attention_heads, attention_head_dim, in_channels, cross_attention_dim=None, add_positional_conv=None): - super().__init__() - - self.positional_embedding_proj = torch.nn.Sequential( - torch.nn.Linear(in_channels, in_channels * 4), - torch.nn.SiLU(), - torch.nn.Linear(in_channels * 4, in_channels) - ) - if add_positional_conv is not None: - self.positional_embedding = TrainableTemporalTimesteps(in_channels, True, 0, add_positional_conv) - self.positional_conv = torch.nn.Conv3d(in_channels, in_channels, kernel_size=3, padding=1, padding_mode="reflect") - else: - self.positional_embedding = TemporalTimesteps(in_channels, True, 0) - self.positional_conv = None - - self.norm_in = torch.nn.LayerNorm(in_channels) - self.act_fn_in = GEGLU(in_channels, in_channels * 4) - self.ff_in = torch.nn.Linear(in_channels * 4, in_channels) - - self.norm1 = torch.nn.LayerNorm(in_channels) - self.attn1 = Attention( - q_dim=in_channels, - num_heads=num_attention_heads, - head_dim=attention_head_dim, - bias_out=True - ) - - self.norm2 = torch.nn.LayerNorm(in_channels) - self.attn2 = Attention( - q_dim=in_channels, - kv_dim=cross_attention_dim, - num_heads=num_attention_heads, - head_dim=attention_head_dim, - bias_out=True - ) - - self.norm_out = torch.nn.LayerNorm(in_channels) - self.act_fn_out = GEGLU(in_channels, in_channels * 4) - self.ff_out = torch.nn.Linear(in_channels * 4, in_channels) - - def forward(self, hidden_states, time_emb, text_emb, res_stack, **kwargs): - - batch, inner_dim, height, width = hidden_states.shape - pos_emb = torch.arange(batch) - pos_emb = self.positional_embedding(pos_emb).to(dtype=hidden_states.dtype, device=hidden_states.device) - pos_emb = self.positional_embedding_proj(pos_emb) - - hidden_states = rearrange(hidden_states, "T C H W -> 1 C T H W") + rearrange(pos_emb, "T C -> 1 C T 1 1") - if self.positional_conv is not None: - hidden_states = self.positional_conv(hidden_states) - hidden_states = rearrange(hidden_states[0], "C T H W -> (H W) T C") - - residual = hidden_states - hidden_states = self.norm_in(hidden_states) - hidden_states = self.act_fn_in(hidden_states) - hidden_states = self.ff_in(hidden_states) - hidden_states = hidden_states + residual - - norm_hidden_states = self.norm1(hidden_states) - attn_output = self.attn1(norm_hidden_states, encoder_hidden_states=None) - hidden_states = attn_output + hidden_states - - norm_hidden_states = self.norm2(hidden_states) - attn_output = self.attn2(norm_hidden_states, encoder_hidden_states=text_emb.repeat(height * width, 1)) - hidden_states = attn_output + hidden_states - - residual = hidden_states - hidden_states = self.norm_out(hidden_states) - hidden_states = self.act_fn_out(hidden_states) - hidden_states = self.ff_out(hidden_states) - hidden_states = hidden_states + residual - - hidden_states = hidden_states.reshape(height, width, batch, inner_dim).permute(2, 3, 0, 1) - - return hidden_states, time_emb, text_emb, res_stack - - -class PopMixBlock(torch.nn.Module): - def __init__(self, in_channels=None): - super().__init__() - self.mix_factor = torch.nn.Parameter(torch.Tensor([0.5])) - self.need_proj = in_channels is not None - if self.need_proj: - self.proj = torch.nn.Linear(in_channels, in_channels) - - def forward(self, hidden_states, time_emb, text_emb, res_stack, **kwargs): - res_hidden_states = res_stack.pop() - alpha = torch.sigmoid(self.mix_factor) - hidden_states = alpha * res_hidden_states + (1 - alpha) * hidden_states - if self.need_proj: - hidden_states = hidden_states.permute(0, 2, 3, 1) - hidden_states = self.proj(hidden_states) - hidden_states = hidden_states.permute(0, 3, 1, 2) - res_hidden_states = res_stack.pop() - hidden_states = hidden_states + res_hidden_states - return hidden_states, time_emb, text_emb, res_stack - - -class SVDUNet(torch.nn.Module): - def __init__(self, add_positional_conv=None): - super().__init__() - self.time_proj = Timesteps(320) - self.time_embedding = torch.nn.Sequential( - torch.nn.Linear(320, 1280), - torch.nn.SiLU(), - torch.nn.Linear(1280, 1280) - ) - self.add_time_proj = Timesteps(256) - self.add_time_embedding = torch.nn.Sequential( - torch.nn.Linear(768, 1280), - torch.nn.SiLU(), - torch.nn.Linear(1280, 1280) - ) - self.conv_in = torch.nn.Conv2d(8, 320, kernel_size=3, padding=1) - - self.blocks = torch.nn.ModuleList([ - # CrossAttnDownBlockSpatioTemporal - ResnetBlock(320, 320, 1280, eps=1e-6), PushBlock(), TemporalResnetBlock(320, 320, 1280, eps=1e-6), PopMixBlock(), PushBlock(), - AttentionBlock(5, 64, 320, 1, 1024, need_proj_out=False), PushBlock(), TemporalAttentionBlock(5, 64, 320, 1024, add_positional_conv), PopMixBlock(320), PushBlock(), - ResnetBlock(320, 320, 1280, eps=1e-6), PushBlock(), TemporalResnetBlock(320, 320, 1280, eps=1e-6), PopMixBlock(), PushBlock(), - AttentionBlock(5, 64, 320, 1, 1024, need_proj_out=False), PushBlock(), TemporalAttentionBlock(5, 64, 320, 1024, add_positional_conv), PopMixBlock(320), PushBlock(), - DownSampler(320), PushBlock(), - # CrossAttnDownBlockSpatioTemporal - ResnetBlock(320, 640, 1280, eps=1e-6), PushBlock(), TemporalResnetBlock(640, 640, 1280, eps=1e-6), PopMixBlock(), PushBlock(), - AttentionBlock(10, 64, 640, 1, 1024, need_proj_out=False), PushBlock(), TemporalAttentionBlock(10, 64, 640, 1024, add_positional_conv), PopMixBlock(640), PushBlock(), - ResnetBlock(640, 640, 1280, eps=1e-6), PushBlock(), TemporalResnetBlock(640, 640, 1280, eps=1e-6), PopMixBlock(), PushBlock(), - AttentionBlock(10, 64, 640, 1, 1024, need_proj_out=False), PushBlock(), TemporalAttentionBlock(10, 64, 640, 1024, add_positional_conv), PopMixBlock(640), PushBlock(), - DownSampler(640), PushBlock(), - # CrossAttnDownBlockSpatioTemporal - ResnetBlock(640, 1280, 1280, eps=1e-6), PushBlock(), TemporalResnetBlock(1280, 1280, 1280, eps=1e-6), PopMixBlock(), PushBlock(), - AttentionBlock(20, 64, 1280, 1, 1024, need_proj_out=False), PushBlock(), TemporalAttentionBlock(20, 64, 1280, 1024, add_positional_conv), PopMixBlock(1280), PushBlock(), - ResnetBlock(1280, 1280, 1280, eps=1e-6), PushBlock(), TemporalResnetBlock(1280, 1280, 1280, eps=1e-6), PopMixBlock(), PushBlock(), - AttentionBlock(20, 64, 1280, 1, 1024, need_proj_out=False), PushBlock(), TemporalAttentionBlock(20, 64, 1280, 1024, add_positional_conv), PopMixBlock(1280), PushBlock(), - DownSampler(1280), PushBlock(), - # DownBlockSpatioTemporal - ResnetBlock(1280, 1280, 1280, eps=1e-5), PushBlock(), TemporalResnetBlock(1280, 1280, 1280, eps=1e-5), PopMixBlock(), PushBlock(), - ResnetBlock(1280, 1280, 1280, eps=1e-5), PushBlock(), TemporalResnetBlock(1280, 1280, 1280, eps=1e-5), PopMixBlock(), PushBlock(), - # UNetMidBlockSpatioTemporal - ResnetBlock(1280, 1280, 1280, eps=1e-5), PushBlock(), TemporalResnetBlock(1280, 1280, 1280, eps=1e-5), PopMixBlock(), PushBlock(), - AttentionBlock(20, 64, 1280, 1, 1024, need_proj_out=False), PushBlock(), TemporalAttentionBlock(20, 64, 1280, 1024, add_positional_conv), PopMixBlock(1280), - ResnetBlock(1280, 1280, 1280, eps=1e-5), PushBlock(), TemporalResnetBlock(1280, 1280, 1280, eps=1e-5), PopMixBlock(), - # UpBlockSpatioTemporal - PopBlock(), ResnetBlock(2560, 1280, 1280, eps=1e-6), PushBlock(), TemporalResnetBlock(1280, 1280, 1280, eps=1e-5), PopMixBlock(), - PopBlock(), ResnetBlock(2560, 1280, 1280, eps=1e-6), PushBlock(), TemporalResnetBlock(1280, 1280, 1280, eps=1e-5), PopMixBlock(), - PopBlock(), ResnetBlock(2560, 1280, 1280, eps=1e-6), PushBlock(), TemporalResnetBlock(1280, 1280, 1280, eps=1e-5), PopMixBlock(), - UpSampler(1280), - # CrossAttnUpBlockSpatioTemporal - PopBlock(), ResnetBlock(2560, 1280, 1280, eps=1e-6), PushBlock(), TemporalResnetBlock(1280, 1280, 1280, eps=1e-6), PopMixBlock(), PushBlock(), - AttentionBlock(20, 64, 1280, 1, 1024, need_proj_out=False), PushBlock(), TemporalAttentionBlock(20, 64, 1280, 1024, add_positional_conv), PopMixBlock(1280), - PopBlock(), ResnetBlock(2560, 1280, 1280, eps=1e-6), PushBlock(), TemporalResnetBlock(1280, 1280, 1280, eps=1e-6), PopMixBlock(), PushBlock(), - AttentionBlock(20, 64, 1280, 1, 1024, need_proj_out=False), PushBlock(), TemporalAttentionBlock(20, 64, 1280, 1024, add_positional_conv), PopMixBlock(1280), - PopBlock(), ResnetBlock(1920, 1280, 1280, eps=1e-6), PushBlock(), TemporalResnetBlock(1280, 1280, 1280, eps=1e-6), PopMixBlock(), PushBlock(), - AttentionBlock(20, 64, 1280, 1, 1024, need_proj_out=False), PushBlock(), TemporalAttentionBlock(20, 64, 1280, 1024, add_positional_conv), PopMixBlock(1280), - UpSampler(1280), - # CrossAttnUpBlockSpatioTemporal - PopBlock(), ResnetBlock(1920, 640, 1280, eps=1e-6), PushBlock(), TemporalResnetBlock(640, 640, 1280, eps=1e-6), PopMixBlock(), PushBlock(), - AttentionBlock(10, 64, 640, 1, 1024, need_proj_out=False), PushBlock(), TemporalAttentionBlock(10, 64, 640, 1024, add_positional_conv), PopMixBlock(640), - PopBlock(), ResnetBlock(1280, 640, 1280, eps=1e-6), PushBlock(), TemporalResnetBlock(640, 640, 1280, eps=1e-6), PopMixBlock(), PushBlock(), - AttentionBlock(10, 64, 640, 1, 1024, need_proj_out=False), PushBlock(), TemporalAttentionBlock(10, 64, 640, 1024, add_positional_conv), PopMixBlock(640), - PopBlock(), ResnetBlock(960, 640, 1280, eps=1e-6), PushBlock(), TemporalResnetBlock(640, 640, 1280, eps=1e-6), PopMixBlock(), PushBlock(), - AttentionBlock(10, 64, 640, 1, 1024, need_proj_out=False), PushBlock(), TemporalAttentionBlock(10, 64, 640, 1024, add_positional_conv), PopMixBlock(640), - UpSampler(640), - # CrossAttnUpBlockSpatioTemporal - PopBlock(), ResnetBlock(960, 320, 1280, eps=1e-6), PushBlock(), TemporalResnetBlock(320, 320, 1280, eps=1e-6), PopMixBlock(), PushBlock(), - AttentionBlock(5, 64, 320, 1, 1024, need_proj_out=False), PushBlock(), TemporalAttentionBlock(5, 64, 320, 1024, add_positional_conv), PopMixBlock(320), - PopBlock(), ResnetBlock(640, 320, 1280, eps=1e-6), PushBlock(), TemporalResnetBlock(320, 320, 1280, eps=1e-6), PopMixBlock(), PushBlock(), - AttentionBlock(5, 64, 320, 1, 1024, need_proj_out=False), PushBlock(), TemporalAttentionBlock(5, 64, 320, 1024, add_positional_conv), PopMixBlock(320), - PopBlock(), ResnetBlock(640, 320, 1280, eps=1e-6), PushBlock(), TemporalResnetBlock(320, 320, 1280, eps=1e-6), PopMixBlock(), PushBlock(), - AttentionBlock(5, 64, 320, 1, 1024, need_proj_out=False), PushBlock(), TemporalAttentionBlock(5, 64, 320, 1024, add_positional_conv), PopMixBlock(320), - ]) - - self.conv_norm_out = torch.nn.GroupNorm(32, 320, eps=1e-05, affine=True) - self.conv_act = torch.nn.SiLU() - self.conv_out = torch.nn.Conv2d(320, 4, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) - - - def build_mask(self, data, is_bound): - T, C, H, W = data.shape - t = repeat(torch.arange(T), "T -> T H W", T=T, H=H, W=W) - h = repeat(torch.arange(H), "H -> T H W", T=T, H=H, W=W) - w = repeat(torch.arange(W), "W -> T H W", T=T, H=H, W=W) - border_width = (T + H + W) // 6 - pad = torch.ones_like(t) * border_width - mask = torch.stack([ - pad if is_bound[0] else t + 1, - pad if is_bound[1] else T - t, - pad if is_bound[2] else h + 1, - pad if is_bound[3] else H - h, - pad if is_bound[4] else w + 1, - pad if is_bound[5] else W - w - ]).min(dim=0).values - mask = mask.clip(1, border_width) - mask = (mask / border_width).to(dtype=data.dtype, device=data.device) - mask = rearrange(mask, "T H W -> T 1 H W") - return mask - - - def tiled_forward( - self, sample, timestep, encoder_hidden_states, add_time_id, - batch_time=25, batch_height=128, batch_width=128, - stride_time=5, stride_height=64, stride_width=64, - progress_bar=lambda x:x - ): - data_device = sample.device - computation_device = self.conv_in.weight.device - torch_dtype = sample.dtype - T, C, H, W = sample.shape - - weight = torch.zeros((T, 1, H, W), dtype=torch_dtype, device=data_device) - values = torch.zeros((T, 4, H, W), dtype=torch_dtype, device=data_device) - - # Split tasks - tasks = [] - for t in range(0, T, stride_time): - for h in range(0, H, stride_height): - for w in range(0, W, stride_width): - if (t-stride_time >= 0 and t-stride_time+batch_time >= T)\ - or (h-stride_height >= 0 and h-stride_height+batch_height >= H)\ - or (w-stride_width >= 0 and w-stride_width+batch_width >= W): - continue - tasks.append((t, t+batch_time, h, h+batch_height, w, w+batch_width)) - - # Run - for tl, tr, hl, hr, wl, wr in progress_bar(tasks): - sample_batch = sample[tl:tr, :, hl:hr, wl:wr].to(computation_device) - sample_batch = self.forward(sample_batch, timestep, encoder_hidden_states, add_time_id).to(data_device) - mask = self.build_mask(sample_batch, is_bound=(tl==0, tr>=T, hl==0, hr>=H, wl==0, wr>=W)) - values[tl:tr, :, hl:hr, wl:wr] += sample_batch * mask - weight[tl:tr, :, hl:hr, wl:wr] += mask - values /= weight - return values - - - def forward(self, sample, timestep, encoder_hidden_states, add_time_id, use_gradient_checkpointing=False, **kwargs): - # 1. time - timestep = torch.tensor((timestep,)).to(sample.device) - t_emb = self.time_proj(timestep).to(sample.dtype) - t_emb = self.time_embedding(t_emb) - - add_embeds = self.add_time_proj(add_time_id.flatten()).to(sample.dtype) - add_embeds = add_embeds.reshape((-1, 768)) - add_embeds = self.add_time_embedding(add_embeds) - - time_emb = t_emb + add_embeds - - # 2. pre-process - height, width = sample.shape[2], sample.shape[3] - hidden_states = self.conv_in(sample) - text_emb = encoder_hidden_states - res_stack = [hidden_states] - - # 3. blocks - def create_custom_forward(module): - def custom_forward(*inputs): - return module(*inputs) - return custom_forward - for i, block in enumerate(self.blocks): - if self.training and use_gradient_checkpointing and not (isinstance(block, PushBlock) or isinstance(block, PopBlock) or isinstance(block, PopMixBlock)): - hidden_states, time_emb, text_emb, res_stack = torch.utils.checkpoint.checkpoint( - create_custom_forward(block), - hidden_states, time_emb, text_emb, res_stack, - use_reentrant=False, - ) - else: - hidden_states, time_emb, text_emb, res_stack = block(hidden_states, time_emb, text_emb, res_stack) - - # 4. output - hidden_states = self.conv_norm_out(hidden_states) - hidden_states = self.conv_act(hidden_states) - hidden_states = self.conv_out(hidden_states) - - return hidden_states - - @staticmethod - def state_dict_converter(): - return SVDUNetStateDictConverter() - - - -class SVDUNetStateDictConverter: - def __init__(self): - pass - - def get_block_name(self, names): - if names[0] in ["down_blocks", "mid_block", "up_blocks"]: - if names[4] in ["norm", "proj_in"]: - return ".".join(names[:4] + ["transformer_blocks"]) - elif names[4] in ["time_pos_embed"]: - return ".".join(names[:4] + ["temporal_transformer_blocks"]) - elif names[4] in ["proj_out"]: - return ".".join(names[:4] + ["time_mixer"]) - else: - return ".".join(names[:5]) - return "" - - def from_diffusers(self, state_dict): - rename_dict = { - "time_embedding.linear_1": "time_embedding.0", - "time_embedding.linear_2": "time_embedding.2", - "add_embedding.linear_1": "add_time_embedding.0", - "add_embedding.linear_2": "add_time_embedding.2", - "conv_in": "conv_in", - "conv_norm_out": "conv_norm_out", - "conv_out": "conv_out", - } - blocks_rename_dict = [ - "down_blocks.0.resnets.0.spatial_res_block", None, "down_blocks.0.resnets.0.temporal_res_block", "down_blocks.0.resnets.0.time_mixer", None, - "down_blocks.0.attentions.0.transformer_blocks", None, "down_blocks.0.attentions.0.temporal_transformer_blocks", "down_blocks.0.attentions.0.time_mixer", None, - "down_blocks.0.resnets.1.spatial_res_block", None, "down_blocks.0.resnets.1.temporal_res_block", "down_blocks.0.resnets.1.time_mixer", None, - "down_blocks.0.attentions.1.transformer_blocks", None, "down_blocks.0.attentions.1.temporal_transformer_blocks", "down_blocks.0.attentions.1.time_mixer", None, - "down_blocks.0.downsamplers.0.conv", None, - "down_blocks.1.resnets.0.spatial_res_block", None, "down_blocks.1.resnets.0.temporal_res_block", "down_blocks.1.resnets.0.time_mixer", None, - "down_blocks.1.attentions.0.transformer_blocks", None, "down_blocks.1.attentions.0.temporal_transformer_blocks", "down_blocks.1.attentions.0.time_mixer", None, - "down_blocks.1.resnets.1.spatial_res_block", None, "down_blocks.1.resnets.1.temporal_res_block", "down_blocks.1.resnets.1.time_mixer", None, - "down_blocks.1.attentions.1.transformer_blocks", None, "down_blocks.1.attentions.1.temporal_transformer_blocks", "down_blocks.1.attentions.1.time_mixer", None, - "down_blocks.1.downsamplers.0.conv", None, - "down_blocks.2.resnets.0.spatial_res_block", None, "down_blocks.2.resnets.0.temporal_res_block", "down_blocks.2.resnets.0.time_mixer", None, - "down_blocks.2.attentions.0.transformer_blocks", None, "down_blocks.2.attentions.0.temporal_transformer_blocks", "down_blocks.2.attentions.0.time_mixer", None, - "down_blocks.2.resnets.1.spatial_res_block", None, "down_blocks.2.resnets.1.temporal_res_block", "down_blocks.2.resnets.1.time_mixer", None, - "down_blocks.2.attentions.1.transformer_blocks", None, "down_blocks.2.attentions.1.temporal_transformer_blocks", "down_blocks.2.attentions.1.time_mixer", None, - "down_blocks.2.downsamplers.0.conv", None, - "down_blocks.3.resnets.0.spatial_res_block", None, "down_blocks.3.resnets.0.temporal_res_block", "down_blocks.3.resnets.0.time_mixer", None, - "down_blocks.3.resnets.1.spatial_res_block", None, "down_blocks.3.resnets.1.temporal_res_block", "down_blocks.3.resnets.1.time_mixer", None, - "mid_block.mid_block.resnets.0.spatial_res_block", None, "mid_block.mid_block.resnets.0.temporal_res_block", "mid_block.mid_block.resnets.0.time_mixer", None, - "mid_block.mid_block.attentions.0.transformer_blocks", None, "mid_block.mid_block.attentions.0.temporal_transformer_blocks", "mid_block.mid_block.attentions.0.time_mixer", - "mid_block.mid_block.resnets.1.spatial_res_block", None, "mid_block.mid_block.resnets.1.temporal_res_block", "mid_block.mid_block.resnets.1.time_mixer", - None, "up_blocks.0.resnets.0.spatial_res_block", None, "up_blocks.0.resnets.0.temporal_res_block", "up_blocks.0.resnets.0.time_mixer", - None, "up_blocks.0.resnets.1.spatial_res_block", None, "up_blocks.0.resnets.1.temporal_res_block", "up_blocks.0.resnets.1.time_mixer", - None, "up_blocks.0.resnets.2.spatial_res_block", None, "up_blocks.0.resnets.2.temporal_res_block", "up_blocks.0.resnets.2.time_mixer", - "up_blocks.0.upsamplers.0.conv", - None, "up_blocks.1.resnets.0.spatial_res_block", None, "up_blocks.1.resnets.0.temporal_res_block", "up_blocks.1.resnets.0.time_mixer", None, - "up_blocks.1.attentions.0.transformer_blocks", None, "up_blocks.1.attentions.0.temporal_transformer_blocks", "up_blocks.1.attentions.0.time_mixer", - None, "up_blocks.1.resnets.1.spatial_res_block", None, "up_blocks.1.resnets.1.temporal_res_block", "up_blocks.1.resnets.1.time_mixer", None, - "up_blocks.1.attentions.1.transformer_blocks", None, "up_blocks.1.attentions.1.temporal_transformer_blocks", "up_blocks.1.attentions.1.time_mixer", - None, "up_blocks.1.resnets.2.spatial_res_block", None, "up_blocks.1.resnets.2.temporal_res_block", "up_blocks.1.resnets.2.time_mixer", None, - "up_blocks.1.attentions.2.transformer_blocks", None, "up_blocks.1.attentions.2.temporal_transformer_blocks", "up_blocks.1.attentions.2.time_mixer", - "up_blocks.1.upsamplers.0.conv", - None, "up_blocks.2.resnets.0.spatial_res_block", None, "up_blocks.2.resnets.0.temporal_res_block", "up_blocks.2.resnets.0.time_mixer", None, - "up_blocks.2.attentions.0.transformer_blocks", None, "up_blocks.2.attentions.0.temporal_transformer_blocks", "up_blocks.2.attentions.0.time_mixer", - None, "up_blocks.2.resnets.1.spatial_res_block", None, "up_blocks.2.resnets.1.temporal_res_block", "up_blocks.2.resnets.1.time_mixer", None, - "up_blocks.2.attentions.1.transformer_blocks", None, "up_blocks.2.attentions.1.temporal_transformer_blocks", "up_blocks.2.attentions.1.time_mixer", - None, "up_blocks.2.resnets.2.spatial_res_block", None, "up_blocks.2.resnets.2.temporal_res_block", "up_blocks.2.resnets.2.time_mixer", None, - "up_blocks.2.attentions.2.transformer_blocks", None, "up_blocks.2.attentions.2.temporal_transformer_blocks", "up_blocks.2.attentions.2.time_mixer", - "up_blocks.2.upsamplers.0.conv", - None, "up_blocks.3.resnets.0.spatial_res_block", None, "up_blocks.3.resnets.0.temporal_res_block", "up_blocks.3.resnets.0.time_mixer", None, - "up_blocks.3.attentions.0.transformer_blocks", None, "up_blocks.3.attentions.0.temporal_transformer_blocks", "up_blocks.3.attentions.0.time_mixer", - None, "up_blocks.3.resnets.1.spatial_res_block", None, "up_blocks.3.resnets.1.temporal_res_block", "up_blocks.3.resnets.1.time_mixer", None, - "up_blocks.3.attentions.1.transformer_blocks", None, "up_blocks.3.attentions.1.temporal_transformer_blocks", "up_blocks.3.attentions.1.time_mixer", - None, "up_blocks.3.resnets.2.spatial_res_block", None, "up_blocks.3.resnets.2.temporal_res_block", "up_blocks.3.resnets.2.time_mixer", None, - "up_blocks.3.attentions.2.transformer_blocks", None, "up_blocks.3.attentions.2.temporal_transformer_blocks", "up_blocks.3.attentions.2.time_mixer", - ] - blocks_rename_dict = {i:j for j,i in enumerate(blocks_rename_dict) if i is not None} - state_dict_ = {} - for name, param in sorted(state_dict.items()): - names = name.split(".") - if names[0] == "mid_block": - names = ["mid_block"] + names - if names[-1] in ["weight", "bias"]: - name_prefix = ".".join(names[:-1]) - if name_prefix in rename_dict: - state_dict_[rename_dict[name_prefix] + "." + names[-1]] = param - else: - block_name = self.get_block_name(names) - if "resnets" in block_name and block_name in blocks_rename_dict: - rename = ".".join(["blocks", str(blocks_rename_dict[block_name])] + names[5:]) - state_dict_[rename] = param - elif ("downsamplers" in block_name or "upsamplers" in block_name) and block_name in blocks_rename_dict: - rename = ".".join(["blocks", str(blocks_rename_dict[block_name])] + names[-2:]) - state_dict_[rename] = param - elif "attentions" in block_name and block_name in blocks_rename_dict: - attention_id = names[5] - if "transformer_blocks" in names: - suffix_dict = { - "attn1.to_out.0": "attn1.to_out", - "attn2.to_out.0": "attn2.to_out", - "ff.net.0.proj": "act_fn.proj", - "ff.net.2": "ff", - } - suffix = ".".join(names[6:-1]) - suffix = suffix_dict.get(suffix, suffix) - rename = ".".join(["blocks", str(blocks_rename_dict[block_name]), "transformer_blocks", attention_id, suffix, names[-1]]) - elif "temporal_transformer_blocks" in names: - suffix_dict = { - "attn1.to_out.0": "attn1.to_out", - "attn2.to_out.0": "attn2.to_out", - "ff_in.net.0.proj": "act_fn_in.proj", - "ff_in.net.2": "ff_in", - "ff.net.0.proj": "act_fn_out.proj", - "ff.net.2": "ff_out", - "norm3": "norm_out", - } - suffix = ".".join(names[6:-1]) - suffix = suffix_dict.get(suffix, suffix) - rename = ".".join(["blocks", str(blocks_rename_dict[block_name]), suffix, names[-1]]) - elif "time_mixer" in block_name: - rename = ".".join(["blocks", str(blocks_rename_dict[block_name]), "proj", names[-1]]) - else: - suffix_dict = { - "linear_1": "positional_embedding_proj.0", - "linear_2": "positional_embedding_proj.2", - } - suffix = names[-2] - suffix = suffix_dict.get(suffix, suffix) - rename = ".".join(["blocks", str(blocks_rename_dict[block_name]), suffix, names[-1]]) - state_dict_[rename] = param - else: - print(name) - else: - block_name = self.get_block_name(names) - if len(block_name)>0 and block_name in blocks_rename_dict: - rename = ".".join(["blocks", str(blocks_rename_dict[block_name]), names[-1]]) - state_dict_[rename] = param - return state_dict_ - - - def from_civitai(self, state_dict, add_positional_conv=None): - rename_dict = { - "model.diffusion_model.input_blocks.0.0.bias": "conv_in.bias", - "model.diffusion_model.input_blocks.0.0.weight": "conv_in.weight", - "model.diffusion_model.input_blocks.1.0.emb_layers.1.bias": "blocks.0.time_emb_proj.bias", - "model.diffusion_model.input_blocks.1.0.emb_layers.1.weight": "blocks.0.time_emb_proj.weight", - "model.diffusion_model.input_blocks.1.0.in_layers.0.bias": "blocks.0.norm1.bias", - "model.diffusion_model.input_blocks.1.0.in_layers.0.weight": "blocks.0.norm1.weight", - "model.diffusion_model.input_blocks.1.0.in_layers.2.bias": "blocks.0.conv1.bias", - "model.diffusion_model.input_blocks.1.0.in_layers.2.weight": "blocks.0.conv1.weight", - "model.diffusion_model.input_blocks.1.0.out_layers.0.bias": "blocks.0.norm2.bias", - "model.diffusion_model.input_blocks.1.0.out_layers.0.weight": "blocks.0.norm2.weight", - "model.diffusion_model.input_blocks.1.0.out_layers.3.bias": "blocks.0.conv2.bias", - "model.diffusion_model.input_blocks.1.0.out_layers.3.weight": "blocks.0.conv2.weight", - "model.diffusion_model.input_blocks.1.0.time_mixer.mix_factor": "blocks.3.mix_factor", - "model.diffusion_model.input_blocks.1.0.time_stack.emb_layers.1.bias": "blocks.2.time_emb_proj.bias", - "model.diffusion_model.input_blocks.1.0.time_stack.emb_layers.1.weight": "blocks.2.time_emb_proj.weight", - "model.diffusion_model.input_blocks.1.0.time_stack.in_layers.0.bias": "blocks.2.norm1.bias", - "model.diffusion_model.input_blocks.1.0.time_stack.in_layers.0.weight": "blocks.2.norm1.weight", - "model.diffusion_model.input_blocks.1.0.time_stack.in_layers.2.bias": "blocks.2.conv1.bias", - "model.diffusion_model.input_blocks.1.0.time_stack.in_layers.2.weight": "blocks.2.conv1.weight", - "model.diffusion_model.input_blocks.1.0.time_stack.out_layers.0.bias": "blocks.2.norm2.bias", - "model.diffusion_model.input_blocks.1.0.time_stack.out_layers.0.weight": "blocks.2.norm2.weight", - "model.diffusion_model.input_blocks.1.0.time_stack.out_layers.3.bias": "blocks.2.conv2.bias", - "model.diffusion_model.input_blocks.1.0.time_stack.out_layers.3.weight": "blocks.2.conv2.weight", - "model.diffusion_model.input_blocks.1.1.norm.bias": "blocks.5.norm.bias", - "model.diffusion_model.input_blocks.1.1.norm.weight": "blocks.5.norm.weight", - "model.diffusion_model.input_blocks.1.1.proj_in.bias": "blocks.5.proj_in.bias", - "model.diffusion_model.input_blocks.1.1.proj_in.weight": "blocks.5.proj_in.weight", - "model.diffusion_model.input_blocks.1.1.proj_out.bias": "blocks.8.proj.bias", - "model.diffusion_model.input_blocks.1.1.proj_out.weight": "blocks.8.proj.weight", - "model.diffusion_model.input_blocks.1.1.time_mixer.mix_factor": "blocks.8.mix_factor", - "model.diffusion_model.input_blocks.1.1.time_pos_embed.0.bias": "blocks.7.positional_embedding_proj.0.bias", - "model.diffusion_model.input_blocks.1.1.time_pos_embed.0.weight": "blocks.7.positional_embedding_proj.0.weight", - "model.diffusion_model.input_blocks.1.1.time_pos_embed.2.bias": "blocks.7.positional_embedding_proj.2.bias", - "model.diffusion_model.input_blocks.1.1.time_pos_embed.2.weight": "blocks.7.positional_embedding_proj.2.weight", - "model.diffusion_model.input_blocks.1.1.time_stack.0.attn1.to_k.weight": "blocks.7.attn1.to_k.weight", - "model.diffusion_model.input_blocks.1.1.time_stack.0.attn1.to_out.0.bias": "blocks.7.attn1.to_out.bias", - "model.diffusion_model.input_blocks.1.1.time_stack.0.attn1.to_out.0.weight": "blocks.7.attn1.to_out.weight", - "model.diffusion_model.input_blocks.1.1.time_stack.0.attn1.to_q.weight": "blocks.7.attn1.to_q.weight", - "model.diffusion_model.input_blocks.1.1.time_stack.0.attn1.to_v.weight": "blocks.7.attn1.to_v.weight", - "model.diffusion_model.input_blocks.1.1.time_stack.0.attn2.to_k.weight": "blocks.7.attn2.to_k.weight", - "model.diffusion_model.input_blocks.1.1.time_stack.0.attn2.to_out.0.bias": "blocks.7.attn2.to_out.bias", - "model.diffusion_model.input_blocks.1.1.time_stack.0.attn2.to_out.0.weight": "blocks.7.attn2.to_out.weight", - "model.diffusion_model.input_blocks.1.1.time_stack.0.attn2.to_q.weight": "blocks.7.attn2.to_q.weight", - "model.diffusion_model.input_blocks.1.1.time_stack.0.attn2.to_v.weight": "blocks.7.attn2.to_v.weight", - "model.diffusion_model.input_blocks.1.1.time_stack.0.ff.net.0.proj.bias": "blocks.7.act_fn_out.proj.bias", - "model.diffusion_model.input_blocks.1.1.time_stack.0.ff.net.0.proj.weight": "blocks.7.act_fn_out.proj.weight", - "model.diffusion_model.input_blocks.1.1.time_stack.0.ff.net.2.bias": "blocks.7.ff_out.bias", - "model.diffusion_model.input_blocks.1.1.time_stack.0.ff.net.2.weight": "blocks.7.ff_out.weight", - "model.diffusion_model.input_blocks.1.1.time_stack.0.ff_in.net.0.proj.bias": "blocks.7.act_fn_in.proj.bias", - "model.diffusion_model.input_blocks.1.1.time_stack.0.ff_in.net.0.proj.weight": "blocks.7.act_fn_in.proj.weight", - "model.diffusion_model.input_blocks.1.1.time_stack.0.ff_in.net.2.bias": "blocks.7.ff_in.bias", - "model.diffusion_model.input_blocks.1.1.time_stack.0.ff_in.net.2.weight": "blocks.7.ff_in.weight", - "model.diffusion_model.input_blocks.1.1.time_stack.0.norm1.bias": "blocks.7.norm1.bias", - "model.diffusion_model.input_blocks.1.1.time_stack.0.norm1.weight": "blocks.7.norm1.weight", - "model.diffusion_model.input_blocks.1.1.time_stack.0.norm2.bias": "blocks.7.norm2.bias", - "model.diffusion_model.input_blocks.1.1.time_stack.0.norm2.weight": "blocks.7.norm2.weight", - "model.diffusion_model.input_blocks.1.1.time_stack.0.norm3.bias": "blocks.7.norm_out.bias", - "model.diffusion_model.input_blocks.1.1.time_stack.0.norm3.weight": "blocks.7.norm_out.weight", - "model.diffusion_model.input_blocks.1.1.time_stack.0.norm_in.bias": "blocks.7.norm_in.bias", - "model.diffusion_model.input_blocks.1.1.time_stack.0.norm_in.weight": "blocks.7.norm_in.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn1.to_k.weight": "blocks.5.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.5.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.5.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn1.to_q.weight": "blocks.5.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn1.to_v.weight": "blocks.5.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn2.to_k.weight": "blocks.5.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.5.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.5.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn2.to_q.weight": "blocks.5.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.attn2.to_v.weight": "blocks.5.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.5.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.5.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.ff.net.2.bias": "blocks.5.transformer_blocks.0.ff.bias", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.ff.net.2.weight": "blocks.5.transformer_blocks.0.ff.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.norm1.bias": "blocks.5.transformer_blocks.0.norm1.bias", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.norm1.weight": "blocks.5.transformer_blocks.0.norm1.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.norm2.bias": "blocks.5.transformer_blocks.0.norm2.bias", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.norm2.weight": "blocks.5.transformer_blocks.0.norm2.weight", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.norm3.bias": "blocks.5.transformer_blocks.0.norm3.bias", - "model.diffusion_model.input_blocks.1.1.transformer_blocks.0.norm3.weight": "blocks.5.transformer_blocks.0.norm3.weight", - "model.diffusion_model.input_blocks.10.0.emb_layers.1.bias": "blocks.66.time_emb_proj.bias", - "model.diffusion_model.input_blocks.10.0.emb_layers.1.weight": "blocks.66.time_emb_proj.weight", - "model.diffusion_model.input_blocks.10.0.in_layers.0.bias": "blocks.66.norm1.bias", - "model.diffusion_model.input_blocks.10.0.in_layers.0.weight": "blocks.66.norm1.weight", - "model.diffusion_model.input_blocks.10.0.in_layers.2.bias": "blocks.66.conv1.bias", - "model.diffusion_model.input_blocks.10.0.in_layers.2.weight": "blocks.66.conv1.weight", - "model.diffusion_model.input_blocks.10.0.out_layers.0.bias": "blocks.66.norm2.bias", - "model.diffusion_model.input_blocks.10.0.out_layers.0.weight": "blocks.66.norm2.weight", - "model.diffusion_model.input_blocks.10.0.out_layers.3.bias": "blocks.66.conv2.bias", - "model.diffusion_model.input_blocks.10.0.out_layers.3.weight": "blocks.66.conv2.weight", - "model.diffusion_model.input_blocks.10.0.time_mixer.mix_factor": "blocks.69.mix_factor", - "model.diffusion_model.input_blocks.10.0.time_stack.emb_layers.1.bias": "blocks.68.time_emb_proj.bias", - "model.diffusion_model.input_blocks.10.0.time_stack.emb_layers.1.weight": "blocks.68.time_emb_proj.weight", - "model.diffusion_model.input_blocks.10.0.time_stack.in_layers.0.bias": "blocks.68.norm1.bias", - "model.diffusion_model.input_blocks.10.0.time_stack.in_layers.0.weight": "blocks.68.norm1.weight", - "model.diffusion_model.input_blocks.10.0.time_stack.in_layers.2.bias": "blocks.68.conv1.bias", - "model.diffusion_model.input_blocks.10.0.time_stack.in_layers.2.weight": "blocks.68.conv1.weight", - "model.diffusion_model.input_blocks.10.0.time_stack.out_layers.0.bias": "blocks.68.norm2.bias", - "model.diffusion_model.input_blocks.10.0.time_stack.out_layers.0.weight": "blocks.68.norm2.weight", - "model.diffusion_model.input_blocks.10.0.time_stack.out_layers.3.bias": "blocks.68.conv2.bias", - "model.diffusion_model.input_blocks.10.0.time_stack.out_layers.3.weight": "blocks.68.conv2.weight", - "model.diffusion_model.input_blocks.11.0.emb_layers.1.bias": "blocks.71.time_emb_proj.bias", - "model.diffusion_model.input_blocks.11.0.emb_layers.1.weight": "blocks.71.time_emb_proj.weight", - "model.diffusion_model.input_blocks.11.0.in_layers.0.bias": "blocks.71.norm1.bias", - "model.diffusion_model.input_blocks.11.0.in_layers.0.weight": "blocks.71.norm1.weight", - "model.diffusion_model.input_blocks.11.0.in_layers.2.bias": "blocks.71.conv1.bias", - "model.diffusion_model.input_blocks.11.0.in_layers.2.weight": "blocks.71.conv1.weight", - "model.diffusion_model.input_blocks.11.0.out_layers.0.bias": "blocks.71.norm2.bias", - "model.diffusion_model.input_blocks.11.0.out_layers.0.weight": "blocks.71.norm2.weight", - "model.diffusion_model.input_blocks.11.0.out_layers.3.bias": "blocks.71.conv2.bias", - "model.diffusion_model.input_blocks.11.0.out_layers.3.weight": "blocks.71.conv2.weight", - "model.diffusion_model.input_blocks.11.0.time_mixer.mix_factor": "blocks.74.mix_factor", - "model.diffusion_model.input_blocks.11.0.time_stack.emb_layers.1.bias": "blocks.73.time_emb_proj.bias", - "model.diffusion_model.input_blocks.11.0.time_stack.emb_layers.1.weight": "blocks.73.time_emb_proj.weight", - "model.diffusion_model.input_blocks.11.0.time_stack.in_layers.0.bias": "blocks.73.norm1.bias", - "model.diffusion_model.input_blocks.11.0.time_stack.in_layers.0.weight": "blocks.73.norm1.weight", - "model.diffusion_model.input_blocks.11.0.time_stack.in_layers.2.bias": "blocks.73.conv1.bias", - "model.diffusion_model.input_blocks.11.0.time_stack.in_layers.2.weight": "blocks.73.conv1.weight", - "model.diffusion_model.input_blocks.11.0.time_stack.out_layers.0.bias": "blocks.73.norm2.bias", - "model.diffusion_model.input_blocks.11.0.time_stack.out_layers.0.weight": "blocks.73.norm2.weight", - "model.diffusion_model.input_blocks.11.0.time_stack.out_layers.3.bias": "blocks.73.conv2.bias", - "model.diffusion_model.input_blocks.11.0.time_stack.out_layers.3.weight": "blocks.73.conv2.weight", - "model.diffusion_model.input_blocks.2.0.emb_layers.1.bias": "blocks.10.time_emb_proj.bias", - "model.diffusion_model.input_blocks.2.0.emb_layers.1.weight": "blocks.10.time_emb_proj.weight", - "model.diffusion_model.input_blocks.2.0.in_layers.0.bias": "blocks.10.norm1.bias", - "model.diffusion_model.input_blocks.2.0.in_layers.0.weight": "blocks.10.norm1.weight", - "model.diffusion_model.input_blocks.2.0.in_layers.2.bias": "blocks.10.conv1.bias", - "model.diffusion_model.input_blocks.2.0.in_layers.2.weight": "blocks.10.conv1.weight", - "model.diffusion_model.input_blocks.2.0.out_layers.0.bias": "blocks.10.norm2.bias", - "model.diffusion_model.input_blocks.2.0.out_layers.0.weight": "blocks.10.norm2.weight", - "model.diffusion_model.input_blocks.2.0.out_layers.3.bias": "blocks.10.conv2.bias", - "model.diffusion_model.input_blocks.2.0.out_layers.3.weight": "blocks.10.conv2.weight", - "model.diffusion_model.input_blocks.2.0.time_mixer.mix_factor": "blocks.13.mix_factor", - "model.diffusion_model.input_blocks.2.0.time_stack.emb_layers.1.bias": "blocks.12.time_emb_proj.bias", - "model.diffusion_model.input_blocks.2.0.time_stack.emb_layers.1.weight": "blocks.12.time_emb_proj.weight", - "model.diffusion_model.input_blocks.2.0.time_stack.in_layers.0.bias": "blocks.12.norm1.bias", - "model.diffusion_model.input_blocks.2.0.time_stack.in_layers.0.weight": "blocks.12.norm1.weight", - "model.diffusion_model.input_blocks.2.0.time_stack.in_layers.2.bias": "blocks.12.conv1.bias", - "model.diffusion_model.input_blocks.2.0.time_stack.in_layers.2.weight": "blocks.12.conv1.weight", - "model.diffusion_model.input_blocks.2.0.time_stack.out_layers.0.bias": "blocks.12.norm2.bias", - "model.diffusion_model.input_blocks.2.0.time_stack.out_layers.0.weight": "blocks.12.norm2.weight", - "model.diffusion_model.input_blocks.2.0.time_stack.out_layers.3.bias": "blocks.12.conv2.bias", - "model.diffusion_model.input_blocks.2.0.time_stack.out_layers.3.weight": "blocks.12.conv2.weight", - "model.diffusion_model.input_blocks.2.1.norm.bias": "blocks.15.norm.bias", - "model.diffusion_model.input_blocks.2.1.norm.weight": "blocks.15.norm.weight", - "model.diffusion_model.input_blocks.2.1.proj_in.bias": "blocks.15.proj_in.bias", - "model.diffusion_model.input_blocks.2.1.proj_in.weight": "blocks.15.proj_in.weight", - "model.diffusion_model.input_blocks.2.1.proj_out.bias": "blocks.18.proj.bias", - "model.diffusion_model.input_blocks.2.1.proj_out.weight": "blocks.18.proj.weight", - "model.diffusion_model.input_blocks.2.1.time_mixer.mix_factor": "blocks.18.mix_factor", - "model.diffusion_model.input_blocks.2.1.time_pos_embed.0.bias": "blocks.17.positional_embedding_proj.0.bias", - "model.diffusion_model.input_blocks.2.1.time_pos_embed.0.weight": "blocks.17.positional_embedding_proj.0.weight", - "model.diffusion_model.input_blocks.2.1.time_pos_embed.2.bias": "blocks.17.positional_embedding_proj.2.bias", - "model.diffusion_model.input_blocks.2.1.time_pos_embed.2.weight": "blocks.17.positional_embedding_proj.2.weight", - "model.diffusion_model.input_blocks.2.1.time_stack.0.attn1.to_k.weight": "blocks.17.attn1.to_k.weight", - "model.diffusion_model.input_blocks.2.1.time_stack.0.attn1.to_out.0.bias": "blocks.17.attn1.to_out.bias", - "model.diffusion_model.input_blocks.2.1.time_stack.0.attn1.to_out.0.weight": "blocks.17.attn1.to_out.weight", - "model.diffusion_model.input_blocks.2.1.time_stack.0.attn1.to_q.weight": "blocks.17.attn1.to_q.weight", - "model.diffusion_model.input_blocks.2.1.time_stack.0.attn1.to_v.weight": "blocks.17.attn1.to_v.weight", - "model.diffusion_model.input_blocks.2.1.time_stack.0.attn2.to_k.weight": "blocks.17.attn2.to_k.weight", - "model.diffusion_model.input_blocks.2.1.time_stack.0.attn2.to_out.0.bias": "blocks.17.attn2.to_out.bias", - "model.diffusion_model.input_blocks.2.1.time_stack.0.attn2.to_out.0.weight": "blocks.17.attn2.to_out.weight", - "model.diffusion_model.input_blocks.2.1.time_stack.0.attn2.to_q.weight": "blocks.17.attn2.to_q.weight", - "model.diffusion_model.input_blocks.2.1.time_stack.0.attn2.to_v.weight": "blocks.17.attn2.to_v.weight", - "model.diffusion_model.input_blocks.2.1.time_stack.0.ff.net.0.proj.bias": "blocks.17.act_fn_out.proj.bias", - "model.diffusion_model.input_blocks.2.1.time_stack.0.ff.net.0.proj.weight": "blocks.17.act_fn_out.proj.weight", - "model.diffusion_model.input_blocks.2.1.time_stack.0.ff.net.2.bias": "blocks.17.ff_out.bias", - "model.diffusion_model.input_blocks.2.1.time_stack.0.ff.net.2.weight": "blocks.17.ff_out.weight", - "model.diffusion_model.input_blocks.2.1.time_stack.0.ff_in.net.0.proj.bias": "blocks.17.act_fn_in.proj.bias", - "model.diffusion_model.input_blocks.2.1.time_stack.0.ff_in.net.0.proj.weight": "blocks.17.act_fn_in.proj.weight", - "model.diffusion_model.input_blocks.2.1.time_stack.0.ff_in.net.2.bias": "blocks.17.ff_in.bias", - "model.diffusion_model.input_blocks.2.1.time_stack.0.ff_in.net.2.weight": "blocks.17.ff_in.weight", - "model.diffusion_model.input_blocks.2.1.time_stack.0.norm1.bias": "blocks.17.norm1.bias", - "model.diffusion_model.input_blocks.2.1.time_stack.0.norm1.weight": "blocks.17.norm1.weight", - "model.diffusion_model.input_blocks.2.1.time_stack.0.norm2.bias": "blocks.17.norm2.bias", - "model.diffusion_model.input_blocks.2.1.time_stack.0.norm2.weight": "blocks.17.norm2.weight", - "model.diffusion_model.input_blocks.2.1.time_stack.0.norm3.bias": "blocks.17.norm_out.bias", - "model.diffusion_model.input_blocks.2.1.time_stack.0.norm3.weight": "blocks.17.norm_out.weight", - "model.diffusion_model.input_blocks.2.1.time_stack.0.norm_in.bias": "blocks.17.norm_in.bias", - "model.diffusion_model.input_blocks.2.1.time_stack.0.norm_in.weight": "blocks.17.norm_in.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn1.to_k.weight": "blocks.15.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.15.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.15.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn1.to_q.weight": "blocks.15.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn1.to_v.weight": "blocks.15.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn2.to_k.weight": "blocks.15.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.15.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.15.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn2.to_q.weight": "blocks.15.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn2.to_v.weight": "blocks.15.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.15.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.15.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.ff.net.2.bias": "blocks.15.transformer_blocks.0.ff.bias", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.ff.net.2.weight": "blocks.15.transformer_blocks.0.ff.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.norm1.bias": "blocks.15.transformer_blocks.0.norm1.bias", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.norm1.weight": "blocks.15.transformer_blocks.0.norm1.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.norm2.bias": "blocks.15.transformer_blocks.0.norm2.bias", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.norm2.weight": "blocks.15.transformer_blocks.0.norm2.weight", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.norm3.bias": "blocks.15.transformer_blocks.0.norm3.bias", - "model.diffusion_model.input_blocks.2.1.transformer_blocks.0.norm3.weight": "blocks.15.transformer_blocks.0.norm3.weight", - "model.diffusion_model.input_blocks.3.0.op.bias": "blocks.20.conv.bias", - "model.diffusion_model.input_blocks.3.0.op.weight": "blocks.20.conv.weight", - "model.diffusion_model.input_blocks.4.0.emb_layers.1.bias": "blocks.22.time_emb_proj.bias", - "model.diffusion_model.input_blocks.4.0.emb_layers.1.weight": "blocks.22.time_emb_proj.weight", - "model.diffusion_model.input_blocks.4.0.in_layers.0.bias": "blocks.22.norm1.bias", - "model.diffusion_model.input_blocks.4.0.in_layers.0.weight": "blocks.22.norm1.weight", - "model.diffusion_model.input_blocks.4.0.in_layers.2.bias": "blocks.22.conv1.bias", - "model.diffusion_model.input_blocks.4.0.in_layers.2.weight": "blocks.22.conv1.weight", - "model.diffusion_model.input_blocks.4.0.out_layers.0.bias": "blocks.22.norm2.bias", - "model.diffusion_model.input_blocks.4.0.out_layers.0.weight": "blocks.22.norm2.weight", - "model.diffusion_model.input_blocks.4.0.out_layers.3.bias": "blocks.22.conv2.bias", - "model.diffusion_model.input_blocks.4.0.out_layers.3.weight": "blocks.22.conv2.weight", - "model.diffusion_model.input_blocks.4.0.skip_connection.bias": "blocks.22.conv_shortcut.bias", - "model.diffusion_model.input_blocks.4.0.skip_connection.weight": "blocks.22.conv_shortcut.weight", - "model.diffusion_model.input_blocks.4.0.time_mixer.mix_factor": "blocks.25.mix_factor", - "model.diffusion_model.input_blocks.4.0.time_stack.emb_layers.1.bias": "blocks.24.time_emb_proj.bias", - "model.diffusion_model.input_blocks.4.0.time_stack.emb_layers.1.weight": "blocks.24.time_emb_proj.weight", - "model.diffusion_model.input_blocks.4.0.time_stack.in_layers.0.bias": "blocks.24.norm1.bias", - "model.diffusion_model.input_blocks.4.0.time_stack.in_layers.0.weight": "blocks.24.norm1.weight", - "model.diffusion_model.input_blocks.4.0.time_stack.in_layers.2.bias": "blocks.24.conv1.bias", - "model.diffusion_model.input_blocks.4.0.time_stack.in_layers.2.weight": "blocks.24.conv1.weight", - "model.diffusion_model.input_blocks.4.0.time_stack.out_layers.0.bias": "blocks.24.norm2.bias", - "model.diffusion_model.input_blocks.4.0.time_stack.out_layers.0.weight": "blocks.24.norm2.weight", - "model.diffusion_model.input_blocks.4.0.time_stack.out_layers.3.bias": "blocks.24.conv2.bias", - "model.diffusion_model.input_blocks.4.0.time_stack.out_layers.3.weight": "blocks.24.conv2.weight", - "model.diffusion_model.input_blocks.4.1.norm.bias": "blocks.27.norm.bias", - "model.diffusion_model.input_blocks.4.1.norm.weight": "blocks.27.norm.weight", - "model.diffusion_model.input_blocks.4.1.proj_in.bias": "blocks.27.proj_in.bias", - "model.diffusion_model.input_blocks.4.1.proj_in.weight": "blocks.27.proj_in.weight", - "model.diffusion_model.input_blocks.4.1.proj_out.bias": "blocks.30.proj.bias", - "model.diffusion_model.input_blocks.4.1.proj_out.weight": "blocks.30.proj.weight", - "model.diffusion_model.input_blocks.4.1.time_mixer.mix_factor": "blocks.30.mix_factor", - "model.diffusion_model.input_blocks.4.1.time_pos_embed.0.bias": "blocks.29.positional_embedding_proj.0.bias", - "model.diffusion_model.input_blocks.4.1.time_pos_embed.0.weight": "blocks.29.positional_embedding_proj.0.weight", - "model.diffusion_model.input_blocks.4.1.time_pos_embed.2.bias": "blocks.29.positional_embedding_proj.2.bias", - "model.diffusion_model.input_blocks.4.1.time_pos_embed.2.weight": "blocks.29.positional_embedding_proj.2.weight", - "model.diffusion_model.input_blocks.4.1.time_stack.0.attn1.to_k.weight": "blocks.29.attn1.to_k.weight", - "model.diffusion_model.input_blocks.4.1.time_stack.0.attn1.to_out.0.bias": "blocks.29.attn1.to_out.bias", - "model.diffusion_model.input_blocks.4.1.time_stack.0.attn1.to_out.0.weight": "blocks.29.attn1.to_out.weight", - "model.diffusion_model.input_blocks.4.1.time_stack.0.attn1.to_q.weight": "blocks.29.attn1.to_q.weight", - "model.diffusion_model.input_blocks.4.1.time_stack.0.attn1.to_v.weight": "blocks.29.attn1.to_v.weight", - "model.diffusion_model.input_blocks.4.1.time_stack.0.attn2.to_k.weight": "blocks.29.attn2.to_k.weight", - "model.diffusion_model.input_blocks.4.1.time_stack.0.attn2.to_out.0.bias": "blocks.29.attn2.to_out.bias", - "model.diffusion_model.input_blocks.4.1.time_stack.0.attn2.to_out.0.weight": "blocks.29.attn2.to_out.weight", - "model.diffusion_model.input_blocks.4.1.time_stack.0.attn2.to_q.weight": "blocks.29.attn2.to_q.weight", - "model.diffusion_model.input_blocks.4.1.time_stack.0.attn2.to_v.weight": "blocks.29.attn2.to_v.weight", - "model.diffusion_model.input_blocks.4.1.time_stack.0.ff.net.0.proj.bias": "blocks.29.act_fn_out.proj.bias", - "model.diffusion_model.input_blocks.4.1.time_stack.0.ff.net.0.proj.weight": "blocks.29.act_fn_out.proj.weight", - "model.diffusion_model.input_blocks.4.1.time_stack.0.ff.net.2.bias": "blocks.29.ff_out.bias", - "model.diffusion_model.input_blocks.4.1.time_stack.0.ff.net.2.weight": "blocks.29.ff_out.weight", - "model.diffusion_model.input_blocks.4.1.time_stack.0.ff_in.net.0.proj.bias": "blocks.29.act_fn_in.proj.bias", - "model.diffusion_model.input_blocks.4.1.time_stack.0.ff_in.net.0.proj.weight": "blocks.29.act_fn_in.proj.weight", - "model.diffusion_model.input_blocks.4.1.time_stack.0.ff_in.net.2.bias": "blocks.29.ff_in.bias", - "model.diffusion_model.input_blocks.4.1.time_stack.0.ff_in.net.2.weight": "blocks.29.ff_in.weight", - "model.diffusion_model.input_blocks.4.1.time_stack.0.norm1.bias": "blocks.29.norm1.bias", - "model.diffusion_model.input_blocks.4.1.time_stack.0.norm1.weight": "blocks.29.norm1.weight", - "model.diffusion_model.input_blocks.4.1.time_stack.0.norm2.bias": "blocks.29.norm2.bias", - "model.diffusion_model.input_blocks.4.1.time_stack.0.norm2.weight": "blocks.29.norm2.weight", - "model.diffusion_model.input_blocks.4.1.time_stack.0.norm3.bias": "blocks.29.norm_out.bias", - "model.diffusion_model.input_blocks.4.1.time_stack.0.norm3.weight": "blocks.29.norm_out.weight", - "model.diffusion_model.input_blocks.4.1.time_stack.0.norm_in.bias": "blocks.29.norm_in.bias", - "model.diffusion_model.input_blocks.4.1.time_stack.0.norm_in.weight": "blocks.29.norm_in.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn1.to_k.weight": "blocks.27.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.27.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.27.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn1.to_q.weight": "blocks.27.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn1.to_v.weight": "blocks.27.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn2.to_k.weight": "blocks.27.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.27.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.27.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn2.to_q.weight": "blocks.27.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.attn2.to_v.weight": "blocks.27.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.27.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.27.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.ff.net.2.bias": "blocks.27.transformer_blocks.0.ff.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.ff.net.2.weight": "blocks.27.transformer_blocks.0.ff.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.norm1.bias": "blocks.27.transformer_blocks.0.norm1.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.norm1.weight": "blocks.27.transformer_blocks.0.norm1.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.norm2.bias": "blocks.27.transformer_blocks.0.norm2.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.norm2.weight": "blocks.27.transformer_blocks.0.norm2.weight", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.norm3.bias": "blocks.27.transformer_blocks.0.norm3.bias", - "model.diffusion_model.input_blocks.4.1.transformer_blocks.0.norm3.weight": "blocks.27.transformer_blocks.0.norm3.weight", - "model.diffusion_model.input_blocks.5.0.emb_layers.1.bias": "blocks.32.time_emb_proj.bias", - "model.diffusion_model.input_blocks.5.0.emb_layers.1.weight": "blocks.32.time_emb_proj.weight", - "model.diffusion_model.input_blocks.5.0.in_layers.0.bias": "blocks.32.norm1.bias", - "model.diffusion_model.input_blocks.5.0.in_layers.0.weight": "blocks.32.norm1.weight", - "model.diffusion_model.input_blocks.5.0.in_layers.2.bias": "blocks.32.conv1.bias", - "model.diffusion_model.input_blocks.5.0.in_layers.2.weight": "blocks.32.conv1.weight", - "model.diffusion_model.input_blocks.5.0.out_layers.0.bias": "blocks.32.norm2.bias", - "model.diffusion_model.input_blocks.5.0.out_layers.0.weight": "blocks.32.norm2.weight", - "model.diffusion_model.input_blocks.5.0.out_layers.3.bias": "blocks.32.conv2.bias", - "model.diffusion_model.input_blocks.5.0.out_layers.3.weight": "blocks.32.conv2.weight", - "model.diffusion_model.input_blocks.5.0.time_mixer.mix_factor": "blocks.35.mix_factor", - "model.diffusion_model.input_blocks.5.0.time_stack.emb_layers.1.bias": "blocks.34.time_emb_proj.bias", - "model.diffusion_model.input_blocks.5.0.time_stack.emb_layers.1.weight": "blocks.34.time_emb_proj.weight", - "model.diffusion_model.input_blocks.5.0.time_stack.in_layers.0.bias": "blocks.34.norm1.bias", - "model.diffusion_model.input_blocks.5.0.time_stack.in_layers.0.weight": "blocks.34.norm1.weight", - "model.diffusion_model.input_blocks.5.0.time_stack.in_layers.2.bias": "blocks.34.conv1.bias", - "model.diffusion_model.input_blocks.5.0.time_stack.in_layers.2.weight": "blocks.34.conv1.weight", - "model.diffusion_model.input_blocks.5.0.time_stack.out_layers.0.bias": "blocks.34.norm2.bias", - "model.diffusion_model.input_blocks.5.0.time_stack.out_layers.0.weight": "blocks.34.norm2.weight", - "model.diffusion_model.input_blocks.5.0.time_stack.out_layers.3.bias": "blocks.34.conv2.bias", - "model.diffusion_model.input_blocks.5.0.time_stack.out_layers.3.weight": "blocks.34.conv2.weight", - "model.diffusion_model.input_blocks.5.1.norm.bias": "blocks.37.norm.bias", - "model.diffusion_model.input_blocks.5.1.norm.weight": "blocks.37.norm.weight", - "model.diffusion_model.input_blocks.5.1.proj_in.bias": "blocks.37.proj_in.bias", - "model.diffusion_model.input_blocks.5.1.proj_in.weight": "blocks.37.proj_in.weight", - "model.diffusion_model.input_blocks.5.1.proj_out.bias": "blocks.40.proj.bias", - "model.diffusion_model.input_blocks.5.1.proj_out.weight": "blocks.40.proj.weight", - "model.diffusion_model.input_blocks.5.1.time_mixer.mix_factor": "blocks.40.mix_factor", - "model.diffusion_model.input_blocks.5.1.time_pos_embed.0.bias": "blocks.39.positional_embedding_proj.0.bias", - "model.diffusion_model.input_blocks.5.1.time_pos_embed.0.weight": "blocks.39.positional_embedding_proj.0.weight", - "model.diffusion_model.input_blocks.5.1.time_pos_embed.2.bias": "blocks.39.positional_embedding_proj.2.bias", - "model.diffusion_model.input_blocks.5.1.time_pos_embed.2.weight": "blocks.39.positional_embedding_proj.2.weight", - "model.diffusion_model.input_blocks.5.1.time_stack.0.attn1.to_k.weight": "blocks.39.attn1.to_k.weight", - "model.diffusion_model.input_blocks.5.1.time_stack.0.attn1.to_out.0.bias": "blocks.39.attn1.to_out.bias", - "model.diffusion_model.input_blocks.5.1.time_stack.0.attn1.to_out.0.weight": "blocks.39.attn1.to_out.weight", - "model.diffusion_model.input_blocks.5.1.time_stack.0.attn1.to_q.weight": "blocks.39.attn1.to_q.weight", - "model.diffusion_model.input_blocks.5.1.time_stack.0.attn1.to_v.weight": "blocks.39.attn1.to_v.weight", - "model.diffusion_model.input_blocks.5.1.time_stack.0.attn2.to_k.weight": "blocks.39.attn2.to_k.weight", - "model.diffusion_model.input_blocks.5.1.time_stack.0.attn2.to_out.0.bias": "blocks.39.attn2.to_out.bias", - "model.diffusion_model.input_blocks.5.1.time_stack.0.attn2.to_out.0.weight": "blocks.39.attn2.to_out.weight", - "model.diffusion_model.input_blocks.5.1.time_stack.0.attn2.to_q.weight": "blocks.39.attn2.to_q.weight", - "model.diffusion_model.input_blocks.5.1.time_stack.0.attn2.to_v.weight": "blocks.39.attn2.to_v.weight", - "model.diffusion_model.input_blocks.5.1.time_stack.0.ff.net.0.proj.bias": "blocks.39.act_fn_out.proj.bias", - "model.diffusion_model.input_blocks.5.1.time_stack.0.ff.net.0.proj.weight": "blocks.39.act_fn_out.proj.weight", - "model.diffusion_model.input_blocks.5.1.time_stack.0.ff.net.2.bias": "blocks.39.ff_out.bias", - "model.diffusion_model.input_blocks.5.1.time_stack.0.ff.net.2.weight": "blocks.39.ff_out.weight", - "model.diffusion_model.input_blocks.5.1.time_stack.0.ff_in.net.0.proj.bias": "blocks.39.act_fn_in.proj.bias", - "model.diffusion_model.input_blocks.5.1.time_stack.0.ff_in.net.0.proj.weight": "blocks.39.act_fn_in.proj.weight", - "model.diffusion_model.input_blocks.5.1.time_stack.0.ff_in.net.2.bias": "blocks.39.ff_in.bias", - "model.diffusion_model.input_blocks.5.1.time_stack.0.ff_in.net.2.weight": "blocks.39.ff_in.weight", - "model.diffusion_model.input_blocks.5.1.time_stack.0.norm1.bias": "blocks.39.norm1.bias", - "model.diffusion_model.input_blocks.5.1.time_stack.0.norm1.weight": "blocks.39.norm1.weight", - "model.diffusion_model.input_blocks.5.1.time_stack.0.norm2.bias": "blocks.39.norm2.bias", - "model.diffusion_model.input_blocks.5.1.time_stack.0.norm2.weight": "blocks.39.norm2.weight", - "model.diffusion_model.input_blocks.5.1.time_stack.0.norm3.bias": "blocks.39.norm_out.bias", - "model.diffusion_model.input_blocks.5.1.time_stack.0.norm3.weight": "blocks.39.norm_out.weight", - "model.diffusion_model.input_blocks.5.1.time_stack.0.norm_in.bias": "blocks.39.norm_in.bias", - "model.diffusion_model.input_blocks.5.1.time_stack.0.norm_in.weight": "blocks.39.norm_in.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn1.to_k.weight": "blocks.37.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.37.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.37.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn1.to_q.weight": "blocks.37.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn1.to_v.weight": "blocks.37.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn2.to_k.weight": "blocks.37.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.37.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.37.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn2.to_q.weight": "blocks.37.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.attn2.to_v.weight": "blocks.37.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.37.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.37.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.ff.net.2.bias": "blocks.37.transformer_blocks.0.ff.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.ff.net.2.weight": "blocks.37.transformer_blocks.0.ff.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.norm1.bias": "blocks.37.transformer_blocks.0.norm1.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.norm1.weight": "blocks.37.transformer_blocks.0.norm1.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.norm2.bias": "blocks.37.transformer_blocks.0.norm2.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.norm2.weight": "blocks.37.transformer_blocks.0.norm2.weight", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.norm3.bias": "blocks.37.transformer_blocks.0.norm3.bias", - "model.diffusion_model.input_blocks.5.1.transformer_blocks.0.norm3.weight": "blocks.37.transformer_blocks.0.norm3.weight", - "model.diffusion_model.input_blocks.6.0.op.bias": "blocks.42.conv.bias", - "model.diffusion_model.input_blocks.6.0.op.weight": "blocks.42.conv.weight", - "model.diffusion_model.input_blocks.7.0.emb_layers.1.bias": "blocks.44.time_emb_proj.bias", - "model.diffusion_model.input_blocks.7.0.emb_layers.1.weight": "blocks.44.time_emb_proj.weight", - "model.diffusion_model.input_blocks.7.0.in_layers.0.bias": "blocks.44.norm1.bias", - "model.diffusion_model.input_blocks.7.0.in_layers.0.weight": "blocks.44.norm1.weight", - "model.diffusion_model.input_blocks.7.0.in_layers.2.bias": "blocks.44.conv1.bias", - "model.diffusion_model.input_blocks.7.0.in_layers.2.weight": "blocks.44.conv1.weight", - "model.diffusion_model.input_blocks.7.0.out_layers.0.bias": "blocks.44.norm2.bias", - "model.diffusion_model.input_blocks.7.0.out_layers.0.weight": "blocks.44.norm2.weight", - "model.diffusion_model.input_blocks.7.0.out_layers.3.bias": "blocks.44.conv2.bias", - "model.diffusion_model.input_blocks.7.0.out_layers.3.weight": "blocks.44.conv2.weight", - "model.diffusion_model.input_blocks.7.0.skip_connection.bias": "blocks.44.conv_shortcut.bias", - "model.diffusion_model.input_blocks.7.0.skip_connection.weight": "blocks.44.conv_shortcut.weight", - "model.diffusion_model.input_blocks.7.0.time_mixer.mix_factor": "blocks.47.mix_factor", - "model.diffusion_model.input_blocks.7.0.time_stack.emb_layers.1.bias": "blocks.46.time_emb_proj.bias", - "model.diffusion_model.input_blocks.7.0.time_stack.emb_layers.1.weight": "blocks.46.time_emb_proj.weight", - "model.diffusion_model.input_blocks.7.0.time_stack.in_layers.0.bias": "blocks.46.norm1.bias", - "model.diffusion_model.input_blocks.7.0.time_stack.in_layers.0.weight": "blocks.46.norm1.weight", - "model.diffusion_model.input_blocks.7.0.time_stack.in_layers.2.bias": "blocks.46.conv1.bias", - "model.diffusion_model.input_blocks.7.0.time_stack.in_layers.2.weight": "blocks.46.conv1.weight", - "model.diffusion_model.input_blocks.7.0.time_stack.out_layers.0.bias": "blocks.46.norm2.bias", - "model.diffusion_model.input_blocks.7.0.time_stack.out_layers.0.weight": "blocks.46.norm2.weight", - "model.diffusion_model.input_blocks.7.0.time_stack.out_layers.3.bias": "blocks.46.conv2.bias", - "model.diffusion_model.input_blocks.7.0.time_stack.out_layers.3.weight": "blocks.46.conv2.weight", - "model.diffusion_model.input_blocks.7.1.norm.bias": "blocks.49.norm.bias", - "model.diffusion_model.input_blocks.7.1.norm.weight": "blocks.49.norm.weight", - "model.diffusion_model.input_blocks.7.1.proj_in.bias": "blocks.49.proj_in.bias", - "model.diffusion_model.input_blocks.7.1.proj_in.weight": "blocks.49.proj_in.weight", - "model.diffusion_model.input_blocks.7.1.proj_out.bias": "blocks.52.proj.bias", - "model.diffusion_model.input_blocks.7.1.proj_out.weight": "blocks.52.proj.weight", - "model.diffusion_model.input_blocks.7.1.time_mixer.mix_factor": "blocks.52.mix_factor", - "model.diffusion_model.input_blocks.7.1.time_pos_embed.0.bias": "blocks.51.positional_embedding_proj.0.bias", - "model.diffusion_model.input_blocks.7.1.time_pos_embed.0.weight": "blocks.51.positional_embedding_proj.0.weight", - "model.diffusion_model.input_blocks.7.1.time_pos_embed.2.bias": "blocks.51.positional_embedding_proj.2.bias", - "model.diffusion_model.input_blocks.7.1.time_pos_embed.2.weight": "blocks.51.positional_embedding_proj.2.weight", - "model.diffusion_model.input_blocks.7.1.time_stack.0.attn1.to_k.weight": "blocks.51.attn1.to_k.weight", - "model.diffusion_model.input_blocks.7.1.time_stack.0.attn1.to_out.0.bias": "blocks.51.attn1.to_out.bias", - "model.diffusion_model.input_blocks.7.1.time_stack.0.attn1.to_out.0.weight": "blocks.51.attn1.to_out.weight", - "model.diffusion_model.input_blocks.7.1.time_stack.0.attn1.to_q.weight": "blocks.51.attn1.to_q.weight", - "model.diffusion_model.input_blocks.7.1.time_stack.0.attn1.to_v.weight": "blocks.51.attn1.to_v.weight", - "model.diffusion_model.input_blocks.7.1.time_stack.0.attn2.to_k.weight": "blocks.51.attn2.to_k.weight", - "model.diffusion_model.input_blocks.7.1.time_stack.0.attn2.to_out.0.bias": "blocks.51.attn2.to_out.bias", - "model.diffusion_model.input_blocks.7.1.time_stack.0.attn2.to_out.0.weight": "blocks.51.attn2.to_out.weight", - "model.diffusion_model.input_blocks.7.1.time_stack.0.attn2.to_q.weight": "blocks.51.attn2.to_q.weight", - "model.diffusion_model.input_blocks.7.1.time_stack.0.attn2.to_v.weight": "blocks.51.attn2.to_v.weight", - "model.diffusion_model.input_blocks.7.1.time_stack.0.ff.net.0.proj.bias": "blocks.51.act_fn_out.proj.bias", - "model.diffusion_model.input_blocks.7.1.time_stack.0.ff.net.0.proj.weight": "blocks.51.act_fn_out.proj.weight", - "model.diffusion_model.input_blocks.7.1.time_stack.0.ff.net.2.bias": "blocks.51.ff_out.bias", - "model.diffusion_model.input_blocks.7.1.time_stack.0.ff.net.2.weight": "blocks.51.ff_out.weight", - "model.diffusion_model.input_blocks.7.1.time_stack.0.ff_in.net.0.proj.bias": "blocks.51.act_fn_in.proj.bias", - "model.diffusion_model.input_blocks.7.1.time_stack.0.ff_in.net.0.proj.weight": "blocks.51.act_fn_in.proj.weight", - "model.diffusion_model.input_blocks.7.1.time_stack.0.ff_in.net.2.bias": "blocks.51.ff_in.bias", - "model.diffusion_model.input_blocks.7.1.time_stack.0.ff_in.net.2.weight": "blocks.51.ff_in.weight", - "model.diffusion_model.input_blocks.7.1.time_stack.0.norm1.bias": "blocks.51.norm1.bias", - "model.diffusion_model.input_blocks.7.1.time_stack.0.norm1.weight": "blocks.51.norm1.weight", - "model.diffusion_model.input_blocks.7.1.time_stack.0.norm2.bias": "blocks.51.norm2.bias", - "model.diffusion_model.input_blocks.7.1.time_stack.0.norm2.weight": "blocks.51.norm2.weight", - "model.diffusion_model.input_blocks.7.1.time_stack.0.norm3.bias": "blocks.51.norm_out.bias", - "model.diffusion_model.input_blocks.7.1.time_stack.0.norm3.weight": "blocks.51.norm_out.weight", - "model.diffusion_model.input_blocks.7.1.time_stack.0.norm_in.bias": "blocks.51.norm_in.bias", - "model.diffusion_model.input_blocks.7.1.time_stack.0.norm_in.weight": "blocks.51.norm_in.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn1.to_k.weight": "blocks.49.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.49.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.49.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn1.to_q.weight": "blocks.49.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn1.to_v.weight": "blocks.49.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn2.to_k.weight": "blocks.49.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.49.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.49.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn2.to_q.weight": "blocks.49.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.attn2.to_v.weight": "blocks.49.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.49.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.49.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.ff.net.2.bias": "blocks.49.transformer_blocks.0.ff.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.ff.net.2.weight": "blocks.49.transformer_blocks.0.ff.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.norm1.bias": "blocks.49.transformer_blocks.0.norm1.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.norm1.weight": "blocks.49.transformer_blocks.0.norm1.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.norm2.bias": "blocks.49.transformer_blocks.0.norm2.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.norm2.weight": "blocks.49.transformer_blocks.0.norm2.weight", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.norm3.bias": "blocks.49.transformer_blocks.0.norm3.bias", - "model.diffusion_model.input_blocks.7.1.transformer_blocks.0.norm3.weight": "blocks.49.transformer_blocks.0.norm3.weight", - "model.diffusion_model.input_blocks.8.0.emb_layers.1.bias": "blocks.54.time_emb_proj.bias", - "model.diffusion_model.input_blocks.8.0.emb_layers.1.weight": "blocks.54.time_emb_proj.weight", - "model.diffusion_model.input_blocks.8.0.in_layers.0.bias": "blocks.54.norm1.bias", - "model.diffusion_model.input_blocks.8.0.in_layers.0.weight": "blocks.54.norm1.weight", - "model.diffusion_model.input_blocks.8.0.in_layers.2.bias": "blocks.54.conv1.bias", - "model.diffusion_model.input_blocks.8.0.in_layers.2.weight": "blocks.54.conv1.weight", - "model.diffusion_model.input_blocks.8.0.out_layers.0.bias": "blocks.54.norm2.bias", - "model.diffusion_model.input_blocks.8.0.out_layers.0.weight": "blocks.54.norm2.weight", - "model.diffusion_model.input_blocks.8.0.out_layers.3.bias": "blocks.54.conv2.bias", - "model.diffusion_model.input_blocks.8.0.out_layers.3.weight": "blocks.54.conv2.weight", - "model.diffusion_model.input_blocks.8.0.time_mixer.mix_factor": "blocks.57.mix_factor", - "model.diffusion_model.input_blocks.8.0.time_stack.emb_layers.1.bias": "blocks.56.time_emb_proj.bias", - "model.diffusion_model.input_blocks.8.0.time_stack.emb_layers.1.weight": "blocks.56.time_emb_proj.weight", - "model.diffusion_model.input_blocks.8.0.time_stack.in_layers.0.bias": "blocks.56.norm1.bias", - "model.diffusion_model.input_blocks.8.0.time_stack.in_layers.0.weight": "blocks.56.norm1.weight", - "model.diffusion_model.input_blocks.8.0.time_stack.in_layers.2.bias": "blocks.56.conv1.bias", - "model.diffusion_model.input_blocks.8.0.time_stack.in_layers.2.weight": "blocks.56.conv1.weight", - "model.diffusion_model.input_blocks.8.0.time_stack.out_layers.0.bias": "blocks.56.norm2.bias", - "model.diffusion_model.input_blocks.8.0.time_stack.out_layers.0.weight": "blocks.56.norm2.weight", - "model.diffusion_model.input_blocks.8.0.time_stack.out_layers.3.bias": "blocks.56.conv2.bias", - "model.diffusion_model.input_blocks.8.0.time_stack.out_layers.3.weight": "blocks.56.conv2.weight", - "model.diffusion_model.input_blocks.8.1.norm.bias": "blocks.59.norm.bias", - "model.diffusion_model.input_blocks.8.1.norm.weight": "blocks.59.norm.weight", - "model.diffusion_model.input_blocks.8.1.proj_in.bias": "blocks.59.proj_in.bias", - "model.diffusion_model.input_blocks.8.1.proj_in.weight": "blocks.59.proj_in.weight", - "model.diffusion_model.input_blocks.8.1.proj_out.bias": "blocks.62.proj.bias", - "model.diffusion_model.input_blocks.8.1.proj_out.weight": "blocks.62.proj.weight", - "model.diffusion_model.input_blocks.8.1.time_mixer.mix_factor": "blocks.62.mix_factor", - "model.diffusion_model.input_blocks.8.1.time_pos_embed.0.bias": "blocks.61.positional_embedding_proj.0.bias", - "model.diffusion_model.input_blocks.8.1.time_pos_embed.0.weight": "blocks.61.positional_embedding_proj.0.weight", - "model.diffusion_model.input_blocks.8.1.time_pos_embed.2.bias": "blocks.61.positional_embedding_proj.2.bias", - "model.diffusion_model.input_blocks.8.1.time_pos_embed.2.weight": "blocks.61.positional_embedding_proj.2.weight", - "model.diffusion_model.input_blocks.8.1.time_stack.0.attn1.to_k.weight": "blocks.61.attn1.to_k.weight", - "model.diffusion_model.input_blocks.8.1.time_stack.0.attn1.to_out.0.bias": "blocks.61.attn1.to_out.bias", - "model.diffusion_model.input_blocks.8.1.time_stack.0.attn1.to_out.0.weight": "blocks.61.attn1.to_out.weight", - "model.diffusion_model.input_blocks.8.1.time_stack.0.attn1.to_q.weight": "blocks.61.attn1.to_q.weight", - "model.diffusion_model.input_blocks.8.1.time_stack.0.attn1.to_v.weight": "blocks.61.attn1.to_v.weight", - "model.diffusion_model.input_blocks.8.1.time_stack.0.attn2.to_k.weight": "blocks.61.attn2.to_k.weight", - "model.diffusion_model.input_blocks.8.1.time_stack.0.attn2.to_out.0.bias": "blocks.61.attn2.to_out.bias", - "model.diffusion_model.input_blocks.8.1.time_stack.0.attn2.to_out.0.weight": "blocks.61.attn2.to_out.weight", - "model.diffusion_model.input_blocks.8.1.time_stack.0.attn2.to_q.weight": "blocks.61.attn2.to_q.weight", - "model.diffusion_model.input_blocks.8.1.time_stack.0.attn2.to_v.weight": "blocks.61.attn2.to_v.weight", - "model.diffusion_model.input_blocks.8.1.time_stack.0.ff.net.0.proj.bias": "blocks.61.act_fn_out.proj.bias", - "model.diffusion_model.input_blocks.8.1.time_stack.0.ff.net.0.proj.weight": "blocks.61.act_fn_out.proj.weight", - "model.diffusion_model.input_blocks.8.1.time_stack.0.ff.net.2.bias": "blocks.61.ff_out.bias", - "model.diffusion_model.input_blocks.8.1.time_stack.0.ff.net.2.weight": "blocks.61.ff_out.weight", - "model.diffusion_model.input_blocks.8.1.time_stack.0.ff_in.net.0.proj.bias": "blocks.61.act_fn_in.proj.bias", - "model.diffusion_model.input_blocks.8.1.time_stack.0.ff_in.net.0.proj.weight": "blocks.61.act_fn_in.proj.weight", - "model.diffusion_model.input_blocks.8.1.time_stack.0.ff_in.net.2.bias": "blocks.61.ff_in.bias", - "model.diffusion_model.input_blocks.8.1.time_stack.0.ff_in.net.2.weight": "blocks.61.ff_in.weight", - "model.diffusion_model.input_blocks.8.1.time_stack.0.norm1.bias": "blocks.61.norm1.bias", - "model.diffusion_model.input_blocks.8.1.time_stack.0.norm1.weight": "blocks.61.norm1.weight", - "model.diffusion_model.input_blocks.8.1.time_stack.0.norm2.bias": "blocks.61.norm2.bias", - "model.diffusion_model.input_blocks.8.1.time_stack.0.norm2.weight": "blocks.61.norm2.weight", - "model.diffusion_model.input_blocks.8.1.time_stack.0.norm3.bias": "blocks.61.norm_out.bias", - "model.diffusion_model.input_blocks.8.1.time_stack.0.norm3.weight": "blocks.61.norm_out.weight", - "model.diffusion_model.input_blocks.8.1.time_stack.0.norm_in.bias": "blocks.61.norm_in.bias", - "model.diffusion_model.input_blocks.8.1.time_stack.0.norm_in.weight": "blocks.61.norm_in.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn1.to_k.weight": "blocks.59.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.59.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.59.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn1.to_q.weight": "blocks.59.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn1.to_v.weight": "blocks.59.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn2.to_k.weight": "blocks.59.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.59.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.59.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn2.to_q.weight": "blocks.59.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.attn2.to_v.weight": "blocks.59.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.59.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.59.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.ff.net.2.bias": "blocks.59.transformer_blocks.0.ff.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.ff.net.2.weight": "blocks.59.transformer_blocks.0.ff.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.norm1.bias": "blocks.59.transformer_blocks.0.norm1.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.norm1.weight": "blocks.59.transformer_blocks.0.norm1.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.norm2.bias": "blocks.59.transformer_blocks.0.norm2.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.norm2.weight": "blocks.59.transformer_blocks.0.norm2.weight", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.norm3.bias": "blocks.59.transformer_blocks.0.norm3.bias", - "model.diffusion_model.input_blocks.8.1.transformer_blocks.0.norm3.weight": "blocks.59.transformer_blocks.0.norm3.weight", - "model.diffusion_model.input_blocks.9.0.op.bias": "blocks.64.conv.bias", - "model.diffusion_model.input_blocks.9.0.op.weight": "blocks.64.conv.weight", - "model.diffusion_model.label_emb.0.0.bias": "add_time_embedding.0.bias", - "model.diffusion_model.label_emb.0.0.weight": "add_time_embedding.0.weight", - "model.diffusion_model.label_emb.0.2.bias": "add_time_embedding.2.bias", - "model.diffusion_model.label_emb.0.2.weight": "add_time_embedding.2.weight", - "model.diffusion_model.middle_block.0.emb_layers.1.bias": "blocks.76.time_emb_proj.bias", - "model.diffusion_model.middle_block.0.emb_layers.1.weight": "blocks.76.time_emb_proj.weight", - "model.diffusion_model.middle_block.0.in_layers.0.bias": "blocks.76.norm1.bias", - "model.diffusion_model.middle_block.0.in_layers.0.weight": "blocks.76.norm1.weight", - "model.diffusion_model.middle_block.0.in_layers.2.bias": "blocks.76.conv1.bias", - "model.diffusion_model.middle_block.0.in_layers.2.weight": "blocks.76.conv1.weight", - "model.diffusion_model.middle_block.0.out_layers.0.bias": "blocks.76.norm2.bias", - "model.diffusion_model.middle_block.0.out_layers.0.weight": "blocks.76.norm2.weight", - "model.diffusion_model.middle_block.0.out_layers.3.bias": "blocks.76.conv2.bias", - "model.diffusion_model.middle_block.0.out_layers.3.weight": "blocks.76.conv2.weight", - "model.diffusion_model.middle_block.0.time_mixer.mix_factor": "blocks.79.mix_factor", - "model.diffusion_model.middle_block.0.time_stack.emb_layers.1.bias": "blocks.78.time_emb_proj.bias", - "model.diffusion_model.middle_block.0.time_stack.emb_layers.1.weight": "blocks.78.time_emb_proj.weight", - "model.diffusion_model.middle_block.0.time_stack.in_layers.0.bias": "blocks.78.norm1.bias", - "model.diffusion_model.middle_block.0.time_stack.in_layers.0.weight": "blocks.78.norm1.weight", - "model.diffusion_model.middle_block.0.time_stack.in_layers.2.bias": "blocks.78.conv1.bias", - "model.diffusion_model.middle_block.0.time_stack.in_layers.2.weight": "blocks.78.conv1.weight", - "model.diffusion_model.middle_block.0.time_stack.out_layers.0.bias": "blocks.78.norm2.bias", - "model.diffusion_model.middle_block.0.time_stack.out_layers.0.weight": "blocks.78.norm2.weight", - "model.diffusion_model.middle_block.0.time_stack.out_layers.3.bias": "blocks.78.conv2.bias", - "model.diffusion_model.middle_block.0.time_stack.out_layers.3.weight": "blocks.78.conv2.weight", - "model.diffusion_model.middle_block.1.norm.bias": "blocks.81.norm.bias", - "model.diffusion_model.middle_block.1.norm.weight": "blocks.81.norm.weight", - "model.diffusion_model.middle_block.1.proj_in.bias": "blocks.81.proj_in.bias", - "model.diffusion_model.middle_block.1.proj_in.weight": "blocks.81.proj_in.weight", - "model.diffusion_model.middle_block.1.proj_out.bias": "blocks.84.proj.bias", - "model.diffusion_model.middle_block.1.proj_out.weight": "blocks.84.proj.weight", - "model.diffusion_model.middle_block.1.time_mixer.mix_factor": "blocks.84.mix_factor", - "model.diffusion_model.middle_block.1.time_pos_embed.0.bias": "blocks.83.positional_embedding_proj.0.bias", - "model.diffusion_model.middle_block.1.time_pos_embed.0.weight": "blocks.83.positional_embedding_proj.0.weight", - "model.diffusion_model.middle_block.1.time_pos_embed.2.bias": "blocks.83.positional_embedding_proj.2.bias", - "model.diffusion_model.middle_block.1.time_pos_embed.2.weight": "blocks.83.positional_embedding_proj.2.weight", - "model.diffusion_model.middle_block.1.time_stack.0.attn1.to_k.weight": "blocks.83.attn1.to_k.weight", - "model.diffusion_model.middle_block.1.time_stack.0.attn1.to_out.0.bias": "blocks.83.attn1.to_out.bias", - "model.diffusion_model.middle_block.1.time_stack.0.attn1.to_out.0.weight": "blocks.83.attn1.to_out.weight", - "model.diffusion_model.middle_block.1.time_stack.0.attn1.to_q.weight": "blocks.83.attn1.to_q.weight", - "model.diffusion_model.middle_block.1.time_stack.0.attn1.to_v.weight": "blocks.83.attn1.to_v.weight", - "model.diffusion_model.middle_block.1.time_stack.0.attn2.to_k.weight": "blocks.83.attn2.to_k.weight", - "model.diffusion_model.middle_block.1.time_stack.0.attn2.to_out.0.bias": "blocks.83.attn2.to_out.bias", - "model.diffusion_model.middle_block.1.time_stack.0.attn2.to_out.0.weight": "blocks.83.attn2.to_out.weight", - "model.diffusion_model.middle_block.1.time_stack.0.attn2.to_q.weight": "blocks.83.attn2.to_q.weight", - "model.diffusion_model.middle_block.1.time_stack.0.attn2.to_v.weight": "blocks.83.attn2.to_v.weight", - "model.diffusion_model.middle_block.1.time_stack.0.ff.net.0.proj.bias": "blocks.83.act_fn_out.proj.bias", - "model.diffusion_model.middle_block.1.time_stack.0.ff.net.0.proj.weight": "blocks.83.act_fn_out.proj.weight", - "model.diffusion_model.middle_block.1.time_stack.0.ff.net.2.bias": "blocks.83.ff_out.bias", - "model.diffusion_model.middle_block.1.time_stack.0.ff.net.2.weight": "blocks.83.ff_out.weight", - "model.diffusion_model.middle_block.1.time_stack.0.ff_in.net.0.proj.bias": "blocks.83.act_fn_in.proj.bias", - "model.diffusion_model.middle_block.1.time_stack.0.ff_in.net.0.proj.weight": "blocks.83.act_fn_in.proj.weight", - "model.diffusion_model.middle_block.1.time_stack.0.ff_in.net.2.bias": "blocks.83.ff_in.bias", - "model.diffusion_model.middle_block.1.time_stack.0.ff_in.net.2.weight": "blocks.83.ff_in.weight", - "model.diffusion_model.middle_block.1.time_stack.0.norm1.bias": "blocks.83.norm1.bias", - "model.diffusion_model.middle_block.1.time_stack.0.norm1.weight": "blocks.83.norm1.weight", - "model.diffusion_model.middle_block.1.time_stack.0.norm2.bias": "blocks.83.norm2.bias", - "model.diffusion_model.middle_block.1.time_stack.0.norm2.weight": "blocks.83.norm2.weight", - "model.diffusion_model.middle_block.1.time_stack.0.norm3.bias": "blocks.83.norm_out.bias", - "model.diffusion_model.middle_block.1.time_stack.0.norm3.weight": "blocks.83.norm_out.weight", - "model.diffusion_model.middle_block.1.time_stack.0.norm_in.bias": "blocks.83.norm_in.bias", - "model.diffusion_model.middle_block.1.time_stack.0.norm_in.weight": "blocks.83.norm_in.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn1.to_k.weight": "blocks.81.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.81.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.81.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn1.to_q.weight": "blocks.81.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn1.to_v.weight": "blocks.81.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn2.to_k.weight": "blocks.81.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.81.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.81.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn2.to_q.weight": "blocks.81.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.attn2.to_v.weight": "blocks.81.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.81.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.81.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.ff.net.2.bias": "blocks.81.transformer_blocks.0.ff.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.ff.net.2.weight": "blocks.81.transformer_blocks.0.ff.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.norm1.bias": "blocks.81.transformer_blocks.0.norm1.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.norm1.weight": "blocks.81.transformer_blocks.0.norm1.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.norm2.bias": "blocks.81.transformer_blocks.0.norm2.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.norm2.weight": "blocks.81.transformer_blocks.0.norm2.weight", - "model.diffusion_model.middle_block.1.transformer_blocks.0.norm3.bias": "blocks.81.transformer_blocks.0.norm3.bias", - "model.diffusion_model.middle_block.1.transformer_blocks.0.norm3.weight": "blocks.81.transformer_blocks.0.norm3.weight", - "model.diffusion_model.middle_block.2.emb_layers.1.bias": "blocks.85.time_emb_proj.bias", - "model.diffusion_model.middle_block.2.emb_layers.1.weight": "blocks.85.time_emb_proj.weight", - "model.diffusion_model.middle_block.2.in_layers.0.bias": "blocks.85.norm1.bias", - "model.diffusion_model.middle_block.2.in_layers.0.weight": "blocks.85.norm1.weight", - "model.diffusion_model.middle_block.2.in_layers.2.bias": "blocks.85.conv1.bias", - "model.diffusion_model.middle_block.2.in_layers.2.weight": "blocks.85.conv1.weight", - "model.diffusion_model.middle_block.2.out_layers.0.bias": "blocks.85.norm2.bias", - "model.diffusion_model.middle_block.2.out_layers.0.weight": "blocks.85.norm2.weight", - "model.diffusion_model.middle_block.2.out_layers.3.bias": "blocks.85.conv2.bias", - "model.diffusion_model.middle_block.2.out_layers.3.weight": "blocks.85.conv2.weight", - "model.diffusion_model.middle_block.2.time_mixer.mix_factor": "blocks.88.mix_factor", - "model.diffusion_model.middle_block.2.time_stack.emb_layers.1.bias": "blocks.87.time_emb_proj.bias", - "model.diffusion_model.middle_block.2.time_stack.emb_layers.1.weight": "blocks.87.time_emb_proj.weight", - "model.diffusion_model.middle_block.2.time_stack.in_layers.0.bias": "blocks.87.norm1.bias", - "model.diffusion_model.middle_block.2.time_stack.in_layers.0.weight": "blocks.87.norm1.weight", - "model.diffusion_model.middle_block.2.time_stack.in_layers.2.bias": "blocks.87.conv1.bias", - "model.diffusion_model.middle_block.2.time_stack.in_layers.2.weight": "blocks.87.conv1.weight", - "model.diffusion_model.middle_block.2.time_stack.out_layers.0.bias": "blocks.87.norm2.bias", - "model.diffusion_model.middle_block.2.time_stack.out_layers.0.weight": "blocks.87.norm2.weight", - "model.diffusion_model.middle_block.2.time_stack.out_layers.3.bias": "blocks.87.conv2.bias", - "model.diffusion_model.middle_block.2.time_stack.out_layers.3.weight": "blocks.87.conv2.weight", - "model.diffusion_model.out.0.bias": "conv_norm_out.bias", - "model.diffusion_model.out.0.weight": "conv_norm_out.weight", - "model.diffusion_model.out.2.bias": "conv_out.bias", - "model.diffusion_model.out.2.weight": "conv_out.weight", - "model.diffusion_model.output_blocks.0.0.emb_layers.1.bias": "blocks.90.time_emb_proj.bias", - "model.diffusion_model.output_blocks.0.0.emb_layers.1.weight": "blocks.90.time_emb_proj.weight", - "model.diffusion_model.output_blocks.0.0.in_layers.0.bias": "blocks.90.norm1.bias", - "model.diffusion_model.output_blocks.0.0.in_layers.0.weight": "blocks.90.norm1.weight", - "model.diffusion_model.output_blocks.0.0.in_layers.2.bias": "blocks.90.conv1.bias", - "model.diffusion_model.output_blocks.0.0.in_layers.2.weight": "blocks.90.conv1.weight", - "model.diffusion_model.output_blocks.0.0.out_layers.0.bias": "blocks.90.norm2.bias", - "model.diffusion_model.output_blocks.0.0.out_layers.0.weight": "blocks.90.norm2.weight", - "model.diffusion_model.output_blocks.0.0.out_layers.3.bias": "blocks.90.conv2.bias", - "model.diffusion_model.output_blocks.0.0.out_layers.3.weight": "blocks.90.conv2.weight", - "model.diffusion_model.output_blocks.0.0.skip_connection.bias": "blocks.90.conv_shortcut.bias", - "model.diffusion_model.output_blocks.0.0.skip_connection.weight": "blocks.90.conv_shortcut.weight", - "model.diffusion_model.output_blocks.0.0.time_mixer.mix_factor": "blocks.93.mix_factor", - "model.diffusion_model.output_blocks.0.0.time_stack.emb_layers.1.bias": "blocks.92.time_emb_proj.bias", - "model.diffusion_model.output_blocks.0.0.time_stack.emb_layers.1.weight": "blocks.92.time_emb_proj.weight", - "model.diffusion_model.output_blocks.0.0.time_stack.in_layers.0.bias": "blocks.92.norm1.bias", - "model.diffusion_model.output_blocks.0.0.time_stack.in_layers.0.weight": "blocks.92.norm1.weight", - "model.diffusion_model.output_blocks.0.0.time_stack.in_layers.2.bias": "blocks.92.conv1.bias", - "model.diffusion_model.output_blocks.0.0.time_stack.in_layers.2.weight": "blocks.92.conv1.weight", - "model.diffusion_model.output_blocks.0.0.time_stack.out_layers.0.bias": "blocks.92.norm2.bias", - "model.diffusion_model.output_blocks.0.0.time_stack.out_layers.0.weight": "blocks.92.norm2.weight", - "model.diffusion_model.output_blocks.0.0.time_stack.out_layers.3.bias": "blocks.92.conv2.bias", - "model.diffusion_model.output_blocks.0.0.time_stack.out_layers.3.weight": "blocks.92.conv2.weight", - "model.diffusion_model.output_blocks.1.0.emb_layers.1.bias": "blocks.95.time_emb_proj.bias", - "model.diffusion_model.output_blocks.1.0.emb_layers.1.weight": "blocks.95.time_emb_proj.weight", - "model.diffusion_model.output_blocks.1.0.in_layers.0.bias": "blocks.95.norm1.bias", - "model.diffusion_model.output_blocks.1.0.in_layers.0.weight": "blocks.95.norm1.weight", - "model.diffusion_model.output_blocks.1.0.in_layers.2.bias": "blocks.95.conv1.bias", - "model.diffusion_model.output_blocks.1.0.in_layers.2.weight": "blocks.95.conv1.weight", - "model.diffusion_model.output_blocks.1.0.out_layers.0.bias": "blocks.95.norm2.bias", - "model.diffusion_model.output_blocks.1.0.out_layers.0.weight": "blocks.95.norm2.weight", - "model.diffusion_model.output_blocks.1.0.out_layers.3.bias": "blocks.95.conv2.bias", - "model.diffusion_model.output_blocks.1.0.out_layers.3.weight": "blocks.95.conv2.weight", - "model.diffusion_model.output_blocks.1.0.skip_connection.bias": "blocks.95.conv_shortcut.bias", - "model.diffusion_model.output_blocks.1.0.skip_connection.weight": "blocks.95.conv_shortcut.weight", - "model.diffusion_model.output_blocks.1.0.time_mixer.mix_factor": "blocks.98.mix_factor", - "model.diffusion_model.output_blocks.1.0.time_stack.emb_layers.1.bias": "blocks.97.time_emb_proj.bias", - "model.diffusion_model.output_blocks.1.0.time_stack.emb_layers.1.weight": "blocks.97.time_emb_proj.weight", - "model.diffusion_model.output_blocks.1.0.time_stack.in_layers.0.bias": "blocks.97.norm1.bias", - "model.diffusion_model.output_blocks.1.0.time_stack.in_layers.0.weight": "blocks.97.norm1.weight", - "model.diffusion_model.output_blocks.1.0.time_stack.in_layers.2.bias": "blocks.97.conv1.bias", - "model.diffusion_model.output_blocks.1.0.time_stack.in_layers.2.weight": "blocks.97.conv1.weight", - "model.diffusion_model.output_blocks.1.0.time_stack.out_layers.0.bias": "blocks.97.norm2.bias", - "model.diffusion_model.output_blocks.1.0.time_stack.out_layers.0.weight": "blocks.97.norm2.weight", - "model.diffusion_model.output_blocks.1.0.time_stack.out_layers.3.bias": "blocks.97.conv2.bias", - "model.diffusion_model.output_blocks.1.0.time_stack.out_layers.3.weight": "blocks.97.conv2.weight", - "model.diffusion_model.output_blocks.10.0.emb_layers.1.bias": "blocks.178.time_emb_proj.bias", - "model.diffusion_model.output_blocks.10.0.emb_layers.1.weight": "blocks.178.time_emb_proj.weight", - "model.diffusion_model.output_blocks.10.0.in_layers.0.bias": "blocks.178.norm1.bias", - "model.diffusion_model.output_blocks.10.0.in_layers.0.weight": "blocks.178.norm1.weight", - "model.diffusion_model.output_blocks.10.0.in_layers.2.bias": "blocks.178.conv1.bias", - "model.diffusion_model.output_blocks.10.0.in_layers.2.weight": "blocks.178.conv1.weight", - "model.diffusion_model.output_blocks.10.0.out_layers.0.bias": "blocks.178.norm2.bias", - "model.diffusion_model.output_blocks.10.0.out_layers.0.weight": "blocks.178.norm2.weight", - "model.diffusion_model.output_blocks.10.0.out_layers.3.bias": "blocks.178.conv2.bias", - "model.diffusion_model.output_blocks.10.0.out_layers.3.weight": "blocks.178.conv2.weight", - "model.diffusion_model.output_blocks.10.0.skip_connection.bias": "blocks.178.conv_shortcut.bias", - "model.diffusion_model.output_blocks.10.0.skip_connection.weight": "blocks.178.conv_shortcut.weight", - "model.diffusion_model.output_blocks.10.0.time_mixer.mix_factor": "blocks.181.mix_factor", - "model.diffusion_model.output_blocks.10.0.time_stack.emb_layers.1.bias": "blocks.180.time_emb_proj.bias", - "model.diffusion_model.output_blocks.10.0.time_stack.emb_layers.1.weight": "blocks.180.time_emb_proj.weight", - "model.diffusion_model.output_blocks.10.0.time_stack.in_layers.0.bias": "blocks.180.norm1.bias", - "model.diffusion_model.output_blocks.10.0.time_stack.in_layers.0.weight": "blocks.180.norm1.weight", - "model.diffusion_model.output_blocks.10.0.time_stack.in_layers.2.bias": "blocks.180.conv1.bias", - "model.diffusion_model.output_blocks.10.0.time_stack.in_layers.2.weight": "blocks.180.conv1.weight", - "model.diffusion_model.output_blocks.10.0.time_stack.out_layers.0.bias": "blocks.180.norm2.bias", - "model.diffusion_model.output_blocks.10.0.time_stack.out_layers.0.weight": "blocks.180.norm2.weight", - "model.diffusion_model.output_blocks.10.0.time_stack.out_layers.3.bias": "blocks.180.conv2.bias", - "model.diffusion_model.output_blocks.10.0.time_stack.out_layers.3.weight": "blocks.180.conv2.weight", - "model.diffusion_model.output_blocks.10.1.norm.bias": "blocks.183.norm.bias", - "model.diffusion_model.output_blocks.10.1.norm.weight": "blocks.183.norm.weight", - "model.diffusion_model.output_blocks.10.1.proj_in.bias": "blocks.183.proj_in.bias", - "model.diffusion_model.output_blocks.10.1.proj_in.weight": "blocks.183.proj_in.weight", - "model.diffusion_model.output_blocks.10.1.proj_out.bias": "blocks.186.proj.bias", - "model.diffusion_model.output_blocks.10.1.proj_out.weight": "blocks.186.proj.weight", - "model.diffusion_model.output_blocks.10.1.time_mixer.mix_factor": "blocks.186.mix_factor", - "model.diffusion_model.output_blocks.10.1.time_pos_embed.0.bias": "blocks.185.positional_embedding_proj.0.bias", - "model.diffusion_model.output_blocks.10.1.time_pos_embed.0.weight": "blocks.185.positional_embedding_proj.0.weight", - "model.diffusion_model.output_blocks.10.1.time_pos_embed.2.bias": "blocks.185.positional_embedding_proj.2.bias", - "model.diffusion_model.output_blocks.10.1.time_pos_embed.2.weight": "blocks.185.positional_embedding_proj.2.weight", - "model.diffusion_model.output_blocks.10.1.time_stack.0.attn1.to_k.weight": "blocks.185.attn1.to_k.weight", - "model.diffusion_model.output_blocks.10.1.time_stack.0.attn1.to_out.0.bias": "blocks.185.attn1.to_out.bias", - "model.diffusion_model.output_blocks.10.1.time_stack.0.attn1.to_out.0.weight": "blocks.185.attn1.to_out.weight", - "model.diffusion_model.output_blocks.10.1.time_stack.0.attn1.to_q.weight": "blocks.185.attn1.to_q.weight", - "model.diffusion_model.output_blocks.10.1.time_stack.0.attn1.to_v.weight": "blocks.185.attn1.to_v.weight", - "model.diffusion_model.output_blocks.10.1.time_stack.0.attn2.to_k.weight": "blocks.185.attn2.to_k.weight", - "model.diffusion_model.output_blocks.10.1.time_stack.0.attn2.to_out.0.bias": "blocks.185.attn2.to_out.bias", - "model.diffusion_model.output_blocks.10.1.time_stack.0.attn2.to_out.0.weight": "blocks.185.attn2.to_out.weight", - "model.diffusion_model.output_blocks.10.1.time_stack.0.attn2.to_q.weight": "blocks.185.attn2.to_q.weight", - "model.diffusion_model.output_blocks.10.1.time_stack.0.attn2.to_v.weight": "blocks.185.attn2.to_v.weight", - "model.diffusion_model.output_blocks.10.1.time_stack.0.ff.net.0.proj.bias": "blocks.185.act_fn_out.proj.bias", - "model.diffusion_model.output_blocks.10.1.time_stack.0.ff.net.0.proj.weight": "blocks.185.act_fn_out.proj.weight", - "model.diffusion_model.output_blocks.10.1.time_stack.0.ff.net.2.bias": "blocks.185.ff_out.bias", - "model.diffusion_model.output_blocks.10.1.time_stack.0.ff.net.2.weight": "blocks.185.ff_out.weight", - "model.diffusion_model.output_blocks.10.1.time_stack.0.ff_in.net.0.proj.bias": "blocks.185.act_fn_in.proj.bias", - "model.diffusion_model.output_blocks.10.1.time_stack.0.ff_in.net.0.proj.weight": "blocks.185.act_fn_in.proj.weight", - "model.diffusion_model.output_blocks.10.1.time_stack.0.ff_in.net.2.bias": "blocks.185.ff_in.bias", - "model.diffusion_model.output_blocks.10.1.time_stack.0.ff_in.net.2.weight": "blocks.185.ff_in.weight", - "model.diffusion_model.output_blocks.10.1.time_stack.0.norm1.bias": "blocks.185.norm1.bias", - "model.diffusion_model.output_blocks.10.1.time_stack.0.norm1.weight": "blocks.185.norm1.weight", - "model.diffusion_model.output_blocks.10.1.time_stack.0.norm2.bias": "blocks.185.norm2.bias", - "model.diffusion_model.output_blocks.10.1.time_stack.0.norm2.weight": "blocks.185.norm2.weight", - "model.diffusion_model.output_blocks.10.1.time_stack.0.norm3.bias": "blocks.185.norm_out.bias", - "model.diffusion_model.output_blocks.10.1.time_stack.0.norm3.weight": "blocks.185.norm_out.weight", - "model.diffusion_model.output_blocks.10.1.time_stack.0.norm_in.bias": "blocks.185.norm_in.bias", - "model.diffusion_model.output_blocks.10.1.time_stack.0.norm_in.weight": "blocks.185.norm_in.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn1.to_k.weight": "blocks.183.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.183.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.183.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn1.to_q.weight": "blocks.183.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn1.to_v.weight": "blocks.183.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn2.to_k.weight": "blocks.183.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.183.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.183.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn2.to_q.weight": "blocks.183.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.attn2.to_v.weight": "blocks.183.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.183.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.183.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.ff.net.2.bias": "blocks.183.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.ff.net.2.weight": "blocks.183.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.norm1.bias": "blocks.183.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.norm1.weight": "blocks.183.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.norm2.bias": "blocks.183.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.norm2.weight": "blocks.183.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.norm3.bias": "blocks.183.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.10.1.transformer_blocks.0.norm3.weight": "blocks.183.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.11.0.emb_layers.1.bias": "blocks.188.time_emb_proj.bias", - "model.diffusion_model.output_blocks.11.0.emb_layers.1.weight": "blocks.188.time_emb_proj.weight", - "model.diffusion_model.output_blocks.11.0.in_layers.0.bias": "blocks.188.norm1.bias", - "model.diffusion_model.output_blocks.11.0.in_layers.0.weight": "blocks.188.norm1.weight", - "model.diffusion_model.output_blocks.11.0.in_layers.2.bias": "blocks.188.conv1.bias", - "model.diffusion_model.output_blocks.11.0.in_layers.2.weight": "blocks.188.conv1.weight", - "model.diffusion_model.output_blocks.11.0.out_layers.0.bias": "blocks.188.norm2.bias", - "model.diffusion_model.output_blocks.11.0.out_layers.0.weight": "blocks.188.norm2.weight", - "model.diffusion_model.output_blocks.11.0.out_layers.3.bias": "blocks.188.conv2.bias", - "model.diffusion_model.output_blocks.11.0.out_layers.3.weight": "blocks.188.conv2.weight", - "model.diffusion_model.output_blocks.11.0.skip_connection.bias": "blocks.188.conv_shortcut.bias", - "model.diffusion_model.output_blocks.11.0.skip_connection.weight": "blocks.188.conv_shortcut.weight", - "model.diffusion_model.output_blocks.11.0.time_mixer.mix_factor": "blocks.191.mix_factor", - "model.diffusion_model.output_blocks.11.0.time_stack.emb_layers.1.bias": "blocks.190.time_emb_proj.bias", - "model.diffusion_model.output_blocks.11.0.time_stack.emb_layers.1.weight": "blocks.190.time_emb_proj.weight", - "model.diffusion_model.output_blocks.11.0.time_stack.in_layers.0.bias": "blocks.190.norm1.bias", - "model.diffusion_model.output_blocks.11.0.time_stack.in_layers.0.weight": "blocks.190.norm1.weight", - "model.diffusion_model.output_blocks.11.0.time_stack.in_layers.2.bias": "blocks.190.conv1.bias", - "model.diffusion_model.output_blocks.11.0.time_stack.in_layers.2.weight": "blocks.190.conv1.weight", - "model.diffusion_model.output_blocks.11.0.time_stack.out_layers.0.bias": "blocks.190.norm2.bias", - "model.diffusion_model.output_blocks.11.0.time_stack.out_layers.0.weight": "blocks.190.norm2.weight", - "model.diffusion_model.output_blocks.11.0.time_stack.out_layers.3.bias": "blocks.190.conv2.bias", - "model.diffusion_model.output_blocks.11.0.time_stack.out_layers.3.weight": "blocks.190.conv2.weight", - "model.diffusion_model.output_blocks.11.1.norm.bias": "blocks.193.norm.bias", - "model.diffusion_model.output_blocks.11.1.norm.weight": "blocks.193.norm.weight", - "model.diffusion_model.output_blocks.11.1.proj_in.bias": "blocks.193.proj_in.bias", - "model.diffusion_model.output_blocks.11.1.proj_in.weight": "blocks.193.proj_in.weight", - "model.diffusion_model.output_blocks.11.1.proj_out.bias": "blocks.196.proj.bias", - "model.diffusion_model.output_blocks.11.1.proj_out.weight": "blocks.196.proj.weight", - "model.diffusion_model.output_blocks.11.1.time_mixer.mix_factor": "blocks.196.mix_factor", - "model.diffusion_model.output_blocks.11.1.time_pos_embed.0.bias": "blocks.195.positional_embedding_proj.0.bias", - "model.diffusion_model.output_blocks.11.1.time_pos_embed.0.weight": "blocks.195.positional_embedding_proj.0.weight", - "model.diffusion_model.output_blocks.11.1.time_pos_embed.2.bias": "blocks.195.positional_embedding_proj.2.bias", - "model.diffusion_model.output_blocks.11.1.time_pos_embed.2.weight": "blocks.195.positional_embedding_proj.2.weight", - "model.diffusion_model.output_blocks.11.1.time_stack.0.attn1.to_k.weight": "blocks.195.attn1.to_k.weight", - "model.diffusion_model.output_blocks.11.1.time_stack.0.attn1.to_out.0.bias": "blocks.195.attn1.to_out.bias", - "model.diffusion_model.output_blocks.11.1.time_stack.0.attn1.to_out.0.weight": "blocks.195.attn1.to_out.weight", - "model.diffusion_model.output_blocks.11.1.time_stack.0.attn1.to_q.weight": "blocks.195.attn1.to_q.weight", - "model.diffusion_model.output_blocks.11.1.time_stack.0.attn1.to_v.weight": "blocks.195.attn1.to_v.weight", - "model.diffusion_model.output_blocks.11.1.time_stack.0.attn2.to_k.weight": "blocks.195.attn2.to_k.weight", - "model.diffusion_model.output_blocks.11.1.time_stack.0.attn2.to_out.0.bias": "blocks.195.attn2.to_out.bias", - "model.diffusion_model.output_blocks.11.1.time_stack.0.attn2.to_out.0.weight": "blocks.195.attn2.to_out.weight", - "model.diffusion_model.output_blocks.11.1.time_stack.0.attn2.to_q.weight": "blocks.195.attn2.to_q.weight", - "model.diffusion_model.output_blocks.11.1.time_stack.0.attn2.to_v.weight": "blocks.195.attn2.to_v.weight", - "model.diffusion_model.output_blocks.11.1.time_stack.0.ff.net.0.proj.bias": "blocks.195.act_fn_out.proj.bias", - "model.diffusion_model.output_blocks.11.1.time_stack.0.ff.net.0.proj.weight": "blocks.195.act_fn_out.proj.weight", - "model.diffusion_model.output_blocks.11.1.time_stack.0.ff.net.2.bias": "blocks.195.ff_out.bias", - "model.diffusion_model.output_blocks.11.1.time_stack.0.ff.net.2.weight": "blocks.195.ff_out.weight", - "model.diffusion_model.output_blocks.11.1.time_stack.0.ff_in.net.0.proj.bias": "blocks.195.act_fn_in.proj.bias", - "model.diffusion_model.output_blocks.11.1.time_stack.0.ff_in.net.0.proj.weight": "blocks.195.act_fn_in.proj.weight", - "model.diffusion_model.output_blocks.11.1.time_stack.0.ff_in.net.2.bias": "blocks.195.ff_in.bias", - "model.diffusion_model.output_blocks.11.1.time_stack.0.ff_in.net.2.weight": "blocks.195.ff_in.weight", - "model.diffusion_model.output_blocks.11.1.time_stack.0.norm1.bias": "blocks.195.norm1.bias", - "model.diffusion_model.output_blocks.11.1.time_stack.0.norm1.weight": "blocks.195.norm1.weight", - "model.diffusion_model.output_blocks.11.1.time_stack.0.norm2.bias": "blocks.195.norm2.bias", - "model.diffusion_model.output_blocks.11.1.time_stack.0.norm2.weight": "blocks.195.norm2.weight", - "model.diffusion_model.output_blocks.11.1.time_stack.0.norm3.bias": "blocks.195.norm_out.bias", - "model.diffusion_model.output_blocks.11.1.time_stack.0.norm3.weight": "blocks.195.norm_out.weight", - "model.diffusion_model.output_blocks.11.1.time_stack.0.norm_in.bias": "blocks.195.norm_in.bias", - "model.diffusion_model.output_blocks.11.1.time_stack.0.norm_in.weight": "blocks.195.norm_in.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn1.to_k.weight": "blocks.193.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.193.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.193.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn1.to_q.weight": "blocks.193.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn1.to_v.weight": "blocks.193.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn2.to_k.weight": "blocks.193.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.193.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.193.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn2.to_q.weight": "blocks.193.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.attn2.to_v.weight": "blocks.193.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.193.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.193.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.ff.net.2.bias": "blocks.193.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.ff.net.2.weight": "blocks.193.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.norm1.bias": "blocks.193.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.norm1.weight": "blocks.193.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.norm2.bias": "blocks.193.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.norm2.weight": "blocks.193.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.norm3.bias": "blocks.193.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.11.1.transformer_blocks.0.norm3.weight": "blocks.193.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.2.0.emb_layers.1.bias": "blocks.100.time_emb_proj.bias", - "model.diffusion_model.output_blocks.2.0.emb_layers.1.weight": "blocks.100.time_emb_proj.weight", - "model.diffusion_model.output_blocks.2.0.in_layers.0.bias": "blocks.100.norm1.bias", - "model.diffusion_model.output_blocks.2.0.in_layers.0.weight": "blocks.100.norm1.weight", - "model.diffusion_model.output_blocks.2.0.in_layers.2.bias": "blocks.100.conv1.bias", - "model.diffusion_model.output_blocks.2.0.in_layers.2.weight": "blocks.100.conv1.weight", - "model.diffusion_model.output_blocks.2.0.out_layers.0.bias": "blocks.100.norm2.bias", - "model.diffusion_model.output_blocks.2.0.out_layers.0.weight": "blocks.100.norm2.weight", - "model.diffusion_model.output_blocks.2.0.out_layers.3.bias": "blocks.100.conv2.bias", - "model.diffusion_model.output_blocks.2.0.out_layers.3.weight": "blocks.100.conv2.weight", - "model.diffusion_model.output_blocks.2.0.skip_connection.bias": "blocks.100.conv_shortcut.bias", - "model.diffusion_model.output_blocks.2.0.skip_connection.weight": "blocks.100.conv_shortcut.weight", - "model.diffusion_model.output_blocks.2.0.time_mixer.mix_factor": "blocks.103.mix_factor", - "model.diffusion_model.output_blocks.2.0.time_stack.emb_layers.1.bias": "blocks.102.time_emb_proj.bias", - "model.diffusion_model.output_blocks.2.0.time_stack.emb_layers.1.weight": "blocks.102.time_emb_proj.weight", - "model.diffusion_model.output_blocks.2.0.time_stack.in_layers.0.bias": "blocks.102.norm1.bias", - "model.diffusion_model.output_blocks.2.0.time_stack.in_layers.0.weight": "blocks.102.norm1.weight", - "model.diffusion_model.output_blocks.2.0.time_stack.in_layers.2.bias": "blocks.102.conv1.bias", - "model.diffusion_model.output_blocks.2.0.time_stack.in_layers.2.weight": "blocks.102.conv1.weight", - "model.diffusion_model.output_blocks.2.0.time_stack.out_layers.0.bias": "blocks.102.norm2.bias", - "model.diffusion_model.output_blocks.2.0.time_stack.out_layers.0.weight": "blocks.102.norm2.weight", - "model.diffusion_model.output_blocks.2.0.time_stack.out_layers.3.bias": "blocks.102.conv2.bias", - "model.diffusion_model.output_blocks.2.0.time_stack.out_layers.3.weight": "blocks.102.conv2.weight", - "model.diffusion_model.output_blocks.2.1.conv.bias": "blocks.104.conv.bias", - "model.diffusion_model.output_blocks.2.1.conv.weight": "blocks.104.conv.weight", - "model.diffusion_model.output_blocks.3.0.emb_layers.1.bias": "blocks.106.time_emb_proj.bias", - "model.diffusion_model.output_blocks.3.0.emb_layers.1.weight": "blocks.106.time_emb_proj.weight", - "model.diffusion_model.output_blocks.3.0.in_layers.0.bias": "blocks.106.norm1.bias", - "model.diffusion_model.output_blocks.3.0.in_layers.0.weight": "blocks.106.norm1.weight", - "model.diffusion_model.output_blocks.3.0.in_layers.2.bias": "blocks.106.conv1.bias", - "model.diffusion_model.output_blocks.3.0.in_layers.2.weight": "blocks.106.conv1.weight", - "model.diffusion_model.output_blocks.3.0.out_layers.0.bias": "blocks.106.norm2.bias", - "model.diffusion_model.output_blocks.3.0.out_layers.0.weight": "blocks.106.norm2.weight", - "model.diffusion_model.output_blocks.3.0.out_layers.3.bias": "blocks.106.conv2.bias", - "model.diffusion_model.output_blocks.3.0.out_layers.3.weight": "blocks.106.conv2.weight", - "model.diffusion_model.output_blocks.3.0.skip_connection.bias": "blocks.106.conv_shortcut.bias", - "model.diffusion_model.output_blocks.3.0.skip_connection.weight": "blocks.106.conv_shortcut.weight", - "model.diffusion_model.output_blocks.3.0.time_mixer.mix_factor": "blocks.109.mix_factor", - "model.diffusion_model.output_blocks.3.0.time_stack.emb_layers.1.bias": "blocks.108.time_emb_proj.bias", - "model.diffusion_model.output_blocks.3.0.time_stack.emb_layers.1.weight": "blocks.108.time_emb_proj.weight", - "model.diffusion_model.output_blocks.3.0.time_stack.in_layers.0.bias": "blocks.108.norm1.bias", - "model.diffusion_model.output_blocks.3.0.time_stack.in_layers.0.weight": "blocks.108.norm1.weight", - "model.diffusion_model.output_blocks.3.0.time_stack.in_layers.2.bias": "blocks.108.conv1.bias", - "model.diffusion_model.output_blocks.3.0.time_stack.in_layers.2.weight": "blocks.108.conv1.weight", - "model.diffusion_model.output_blocks.3.0.time_stack.out_layers.0.bias": "blocks.108.norm2.bias", - "model.diffusion_model.output_blocks.3.0.time_stack.out_layers.0.weight": "blocks.108.norm2.weight", - "model.diffusion_model.output_blocks.3.0.time_stack.out_layers.3.bias": "blocks.108.conv2.bias", - "model.diffusion_model.output_blocks.3.0.time_stack.out_layers.3.weight": "blocks.108.conv2.weight", - "model.diffusion_model.output_blocks.3.1.norm.bias": "blocks.111.norm.bias", - "model.diffusion_model.output_blocks.3.1.norm.weight": "blocks.111.norm.weight", - "model.diffusion_model.output_blocks.3.1.proj_in.bias": "blocks.111.proj_in.bias", - "model.diffusion_model.output_blocks.3.1.proj_in.weight": "blocks.111.proj_in.weight", - "model.diffusion_model.output_blocks.3.1.proj_out.bias": "blocks.114.proj.bias", - "model.diffusion_model.output_blocks.3.1.proj_out.weight": "blocks.114.proj.weight", - "model.diffusion_model.output_blocks.3.1.time_mixer.mix_factor": "blocks.114.mix_factor", - "model.diffusion_model.output_blocks.3.1.time_pos_embed.0.bias": "blocks.113.positional_embedding_proj.0.bias", - "model.diffusion_model.output_blocks.3.1.time_pos_embed.0.weight": "blocks.113.positional_embedding_proj.0.weight", - "model.diffusion_model.output_blocks.3.1.time_pos_embed.2.bias": "blocks.113.positional_embedding_proj.2.bias", - "model.diffusion_model.output_blocks.3.1.time_pos_embed.2.weight": "blocks.113.positional_embedding_proj.2.weight", - "model.diffusion_model.output_blocks.3.1.time_stack.0.attn1.to_k.weight": "blocks.113.attn1.to_k.weight", - "model.diffusion_model.output_blocks.3.1.time_stack.0.attn1.to_out.0.bias": "blocks.113.attn1.to_out.bias", - "model.diffusion_model.output_blocks.3.1.time_stack.0.attn1.to_out.0.weight": "blocks.113.attn1.to_out.weight", - "model.diffusion_model.output_blocks.3.1.time_stack.0.attn1.to_q.weight": "blocks.113.attn1.to_q.weight", - "model.diffusion_model.output_blocks.3.1.time_stack.0.attn1.to_v.weight": "blocks.113.attn1.to_v.weight", - "model.diffusion_model.output_blocks.3.1.time_stack.0.attn2.to_k.weight": "blocks.113.attn2.to_k.weight", - "model.diffusion_model.output_blocks.3.1.time_stack.0.attn2.to_out.0.bias": "blocks.113.attn2.to_out.bias", - "model.diffusion_model.output_blocks.3.1.time_stack.0.attn2.to_out.0.weight": "blocks.113.attn2.to_out.weight", - "model.diffusion_model.output_blocks.3.1.time_stack.0.attn2.to_q.weight": "blocks.113.attn2.to_q.weight", - "model.diffusion_model.output_blocks.3.1.time_stack.0.attn2.to_v.weight": "blocks.113.attn2.to_v.weight", - "model.diffusion_model.output_blocks.3.1.time_stack.0.ff.net.0.proj.bias": "blocks.113.act_fn_out.proj.bias", - "model.diffusion_model.output_blocks.3.1.time_stack.0.ff.net.0.proj.weight": "blocks.113.act_fn_out.proj.weight", - "model.diffusion_model.output_blocks.3.1.time_stack.0.ff.net.2.bias": "blocks.113.ff_out.bias", - "model.diffusion_model.output_blocks.3.1.time_stack.0.ff.net.2.weight": "blocks.113.ff_out.weight", - "model.diffusion_model.output_blocks.3.1.time_stack.0.ff_in.net.0.proj.bias": "blocks.113.act_fn_in.proj.bias", - "model.diffusion_model.output_blocks.3.1.time_stack.0.ff_in.net.0.proj.weight": "blocks.113.act_fn_in.proj.weight", - "model.diffusion_model.output_blocks.3.1.time_stack.0.ff_in.net.2.bias": "blocks.113.ff_in.bias", - "model.diffusion_model.output_blocks.3.1.time_stack.0.ff_in.net.2.weight": "blocks.113.ff_in.weight", - "model.diffusion_model.output_blocks.3.1.time_stack.0.norm1.bias": "blocks.113.norm1.bias", - "model.diffusion_model.output_blocks.3.1.time_stack.0.norm1.weight": "blocks.113.norm1.weight", - "model.diffusion_model.output_blocks.3.1.time_stack.0.norm2.bias": "blocks.113.norm2.bias", - "model.diffusion_model.output_blocks.3.1.time_stack.0.norm2.weight": "blocks.113.norm2.weight", - "model.diffusion_model.output_blocks.3.1.time_stack.0.norm3.bias": "blocks.113.norm_out.bias", - "model.diffusion_model.output_blocks.3.1.time_stack.0.norm3.weight": "blocks.113.norm_out.weight", - "model.diffusion_model.output_blocks.3.1.time_stack.0.norm_in.bias": "blocks.113.norm_in.bias", - "model.diffusion_model.output_blocks.3.1.time_stack.0.norm_in.weight": "blocks.113.norm_in.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn1.to_k.weight": "blocks.111.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.111.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.111.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn1.to_q.weight": "blocks.111.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn1.to_v.weight": "blocks.111.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn2.to_k.weight": "blocks.111.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.111.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.111.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn2.to_q.weight": "blocks.111.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.attn2.to_v.weight": "blocks.111.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.111.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.111.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.ff.net.2.bias": "blocks.111.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.ff.net.2.weight": "blocks.111.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.norm1.bias": "blocks.111.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.norm1.weight": "blocks.111.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.norm2.bias": "blocks.111.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.norm2.weight": "blocks.111.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.norm3.bias": "blocks.111.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.3.1.transformer_blocks.0.norm3.weight": "blocks.111.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.4.0.emb_layers.1.bias": "blocks.116.time_emb_proj.bias", - "model.diffusion_model.output_blocks.4.0.emb_layers.1.weight": "blocks.116.time_emb_proj.weight", - "model.diffusion_model.output_blocks.4.0.in_layers.0.bias": "blocks.116.norm1.bias", - "model.diffusion_model.output_blocks.4.0.in_layers.0.weight": "blocks.116.norm1.weight", - "model.diffusion_model.output_blocks.4.0.in_layers.2.bias": "blocks.116.conv1.bias", - "model.diffusion_model.output_blocks.4.0.in_layers.2.weight": "blocks.116.conv1.weight", - "model.diffusion_model.output_blocks.4.0.out_layers.0.bias": "blocks.116.norm2.bias", - "model.diffusion_model.output_blocks.4.0.out_layers.0.weight": "blocks.116.norm2.weight", - "model.diffusion_model.output_blocks.4.0.out_layers.3.bias": "blocks.116.conv2.bias", - "model.diffusion_model.output_blocks.4.0.out_layers.3.weight": "blocks.116.conv2.weight", - "model.diffusion_model.output_blocks.4.0.skip_connection.bias": "blocks.116.conv_shortcut.bias", - "model.diffusion_model.output_blocks.4.0.skip_connection.weight": "blocks.116.conv_shortcut.weight", - "model.diffusion_model.output_blocks.4.0.time_mixer.mix_factor": "blocks.119.mix_factor", - "model.diffusion_model.output_blocks.4.0.time_stack.emb_layers.1.bias": "blocks.118.time_emb_proj.bias", - "model.diffusion_model.output_blocks.4.0.time_stack.emb_layers.1.weight": "blocks.118.time_emb_proj.weight", - "model.diffusion_model.output_blocks.4.0.time_stack.in_layers.0.bias": "blocks.118.norm1.bias", - "model.diffusion_model.output_blocks.4.0.time_stack.in_layers.0.weight": "blocks.118.norm1.weight", - "model.diffusion_model.output_blocks.4.0.time_stack.in_layers.2.bias": "blocks.118.conv1.bias", - "model.diffusion_model.output_blocks.4.0.time_stack.in_layers.2.weight": "blocks.118.conv1.weight", - "model.diffusion_model.output_blocks.4.0.time_stack.out_layers.0.bias": "blocks.118.norm2.bias", - "model.diffusion_model.output_blocks.4.0.time_stack.out_layers.0.weight": "blocks.118.norm2.weight", - "model.diffusion_model.output_blocks.4.0.time_stack.out_layers.3.bias": "blocks.118.conv2.bias", - "model.diffusion_model.output_blocks.4.0.time_stack.out_layers.3.weight": "blocks.118.conv2.weight", - "model.diffusion_model.output_blocks.4.1.norm.bias": "blocks.121.norm.bias", - "model.diffusion_model.output_blocks.4.1.norm.weight": "blocks.121.norm.weight", - "model.diffusion_model.output_blocks.4.1.proj_in.bias": "blocks.121.proj_in.bias", - "model.diffusion_model.output_blocks.4.1.proj_in.weight": "blocks.121.proj_in.weight", - "model.diffusion_model.output_blocks.4.1.proj_out.bias": "blocks.124.proj.bias", - "model.diffusion_model.output_blocks.4.1.proj_out.weight": "blocks.124.proj.weight", - "model.diffusion_model.output_blocks.4.1.time_mixer.mix_factor": "blocks.124.mix_factor", - "model.diffusion_model.output_blocks.4.1.time_pos_embed.0.bias": "blocks.123.positional_embedding_proj.0.bias", - "model.diffusion_model.output_blocks.4.1.time_pos_embed.0.weight": "blocks.123.positional_embedding_proj.0.weight", - "model.diffusion_model.output_blocks.4.1.time_pos_embed.2.bias": "blocks.123.positional_embedding_proj.2.bias", - "model.diffusion_model.output_blocks.4.1.time_pos_embed.2.weight": "blocks.123.positional_embedding_proj.2.weight", - "model.diffusion_model.output_blocks.4.1.time_stack.0.attn1.to_k.weight": "blocks.123.attn1.to_k.weight", - "model.diffusion_model.output_blocks.4.1.time_stack.0.attn1.to_out.0.bias": "blocks.123.attn1.to_out.bias", - "model.diffusion_model.output_blocks.4.1.time_stack.0.attn1.to_out.0.weight": "blocks.123.attn1.to_out.weight", - "model.diffusion_model.output_blocks.4.1.time_stack.0.attn1.to_q.weight": "blocks.123.attn1.to_q.weight", - "model.diffusion_model.output_blocks.4.1.time_stack.0.attn1.to_v.weight": "blocks.123.attn1.to_v.weight", - "model.diffusion_model.output_blocks.4.1.time_stack.0.attn2.to_k.weight": "blocks.123.attn2.to_k.weight", - "model.diffusion_model.output_blocks.4.1.time_stack.0.attn2.to_out.0.bias": "blocks.123.attn2.to_out.bias", - "model.diffusion_model.output_blocks.4.1.time_stack.0.attn2.to_out.0.weight": "blocks.123.attn2.to_out.weight", - "model.diffusion_model.output_blocks.4.1.time_stack.0.attn2.to_q.weight": "blocks.123.attn2.to_q.weight", - "model.diffusion_model.output_blocks.4.1.time_stack.0.attn2.to_v.weight": "blocks.123.attn2.to_v.weight", - "model.diffusion_model.output_blocks.4.1.time_stack.0.ff.net.0.proj.bias": "blocks.123.act_fn_out.proj.bias", - "model.diffusion_model.output_blocks.4.1.time_stack.0.ff.net.0.proj.weight": "blocks.123.act_fn_out.proj.weight", - "model.diffusion_model.output_blocks.4.1.time_stack.0.ff.net.2.bias": "blocks.123.ff_out.bias", - "model.diffusion_model.output_blocks.4.1.time_stack.0.ff.net.2.weight": "blocks.123.ff_out.weight", - "model.diffusion_model.output_blocks.4.1.time_stack.0.ff_in.net.0.proj.bias": "blocks.123.act_fn_in.proj.bias", - "model.diffusion_model.output_blocks.4.1.time_stack.0.ff_in.net.0.proj.weight": "blocks.123.act_fn_in.proj.weight", - "model.diffusion_model.output_blocks.4.1.time_stack.0.ff_in.net.2.bias": "blocks.123.ff_in.bias", - "model.diffusion_model.output_blocks.4.1.time_stack.0.ff_in.net.2.weight": "blocks.123.ff_in.weight", - "model.diffusion_model.output_blocks.4.1.time_stack.0.norm1.bias": "blocks.123.norm1.bias", - "model.diffusion_model.output_blocks.4.1.time_stack.0.norm1.weight": "blocks.123.norm1.weight", - "model.diffusion_model.output_blocks.4.1.time_stack.0.norm2.bias": "blocks.123.norm2.bias", - "model.diffusion_model.output_blocks.4.1.time_stack.0.norm2.weight": "blocks.123.norm2.weight", - "model.diffusion_model.output_blocks.4.1.time_stack.0.norm3.bias": "blocks.123.norm_out.bias", - "model.diffusion_model.output_blocks.4.1.time_stack.0.norm3.weight": "blocks.123.norm_out.weight", - "model.diffusion_model.output_blocks.4.1.time_stack.0.norm_in.bias": "blocks.123.norm_in.bias", - "model.diffusion_model.output_blocks.4.1.time_stack.0.norm_in.weight": "blocks.123.norm_in.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn1.to_k.weight": "blocks.121.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.121.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.121.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn1.to_q.weight": "blocks.121.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn1.to_v.weight": "blocks.121.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn2.to_k.weight": "blocks.121.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.121.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.121.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn2.to_q.weight": "blocks.121.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.attn2.to_v.weight": "blocks.121.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.121.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.121.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.ff.net.2.bias": "blocks.121.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.ff.net.2.weight": "blocks.121.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.norm1.bias": "blocks.121.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.norm1.weight": "blocks.121.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.norm2.bias": "blocks.121.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.norm2.weight": "blocks.121.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.norm3.bias": "blocks.121.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.4.1.transformer_blocks.0.norm3.weight": "blocks.121.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.5.0.emb_layers.1.bias": "blocks.126.time_emb_proj.bias", - "model.diffusion_model.output_blocks.5.0.emb_layers.1.weight": "blocks.126.time_emb_proj.weight", - "model.diffusion_model.output_blocks.5.0.in_layers.0.bias": "blocks.126.norm1.bias", - "model.diffusion_model.output_blocks.5.0.in_layers.0.weight": "blocks.126.norm1.weight", - "model.diffusion_model.output_blocks.5.0.in_layers.2.bias": "blocks.126.conv1.bias", - "model.diffusion_model.output_blocks.5.0.in_layers.2.weight": "blocks.126.conv1.weight", - "model.diffusion_model.output_blocks.5.0.out_layers.0.bias": "blocks.126.norm2.bias", - "model.diffusion_model.output_blocks.5.0.out_layers.0.weight": "blocks.126.norm2.weight", - "model.diffusion_model.output_blocks.5.0.out_layers.3.bias": "blocks.126.conv2.bias", - "model.diffusion_model.output_blocks.5.0.out_layers.3.weight": "blocks.126.conv2.weight", - "model.diffusion_model.output_blocks.5.0.skip_connection.bias": "blocks.126.conv_shortcut.bias", - "model.diffusion_model.output_blocks.5.0.skip_connection.weight": "blocks.126.conv_shortcut.weight", - "model.diffusion_model.output_blocks.5.0.time_mixer.mix_factor": "blocks.129.mix_factor", - "model.diffusion_model.output_blocks.5.0.time_stack.emb_layers.1.bias": "blocks.128.time_emb_proj.bias", - "model.diffusion_model.output_blocks.5.0.time_stack.emb_layers.1.weight": "blocks.128.time_emb_proj.weight", - "model.diffusion_model.output_blocks.5.0.time_stack.in_layers.0.bias": "blocks.128.norm1.bias", - "model.diffusion_model.output_blocks.5.0.time_stack.in_layers.0.weight": "blocks.128.norm1.weight", - "model.diffusion_model.output_blocks.5.0.time_stack.in_layers.2.bias": "blocks.128.conv1.bias", - "model.diffusion_model.output_blocks.5.0.time_stack.in_layers.2.weight": "blocks.128.conv1.weight", - "model.diffusion_model.output_blocks.5.0.time_stack.out_layers.0.bias": "blocks.128.norm2.bias", - "model.diffusion_model.output_blocks.5.0.time_stack.out_layers.0.weight": "blocks.128.norm2.weight", - "model.diffusion_model.output_blocks.5.0.time_stack.out_layers.3.bias": "blocks.128.conv2.bias", - "model.diffusion_model.output_blocks.5.0.time_stack.out_layers.3.weight": "blocks.128.conv2.weight", - "model.diffusion_model.output_blocks.5.1.norm.bias": "blocks.131.norm.bias", - "model.diffusion_model.output_blocks.5.1.norm.weight": "blocks.131.norm.weight", - "model.diffusion_model.output_blocks.5.1.proj_in.bias": "blocks.131.proj_in.bias", - "model.diffusion_model.output_blocks.5.1.proj_in.weight": "blocks.131.proj_in.weight", - "model.diffusion_model.output_blocks.5.1.proj_out.bias": "blocks.134.proj.bias", - "model.diffusion_model.output_blocks.5.1.proj_out.weight": "blocks.134.proj.weight", - "model.diffusion_model.output_blocks.5.1.time_mixer.mix_factor": "blocks.134.mix_factor", - "model.diffusion_model.output_blocks.5.1.time_pos_embed.0.bias": "blocks.133.positional_embedding_proj.0.bias", - "model.diffusion_model.output_blocks.5.1.time_pos_embed.0.weight": "blocks.133.positional_embedding_proj.0.weight", - "model.diffusion_model.output_blocks.5.1.time_pos_embed.2.bias": "blocks.133.positional_embedding_proj.2.bias", - "model.diffusion_model.output_blocks.5.1.time_pos_embed.2.weight": "blocks.133.positional_embedding_proj.2.weight", - "model.diffusion_model.output_blocks.5.1.time_stack.0.attn1.to_k.weight": "blocks.133.attn1.to_k.weight", - "model.diffusion_model.output_blocks.5.1.time_stack.0.attn1.to_out.0.bias": "blocks.133.attn1.to_out.bias", - "model.diffusion_model.output_blocks.5.1.time_stack.0.attn1.to_out.0.weight": "blocks.133.attn1.to_out.weight", - "model.diffusion_model.output_blocks.5.1.time_stack.0.attn1.to_q.weight": "blocks.133.attn1.to_q.weight", - "model.diffusion_model.output_blocks.5.1.time_stack.0.attn1.to_v.weight": "blocks.133.attn1.to_v.weight", - "model.diffusion_model.output_blocks.5.1.time_stack.0.attn2.to_k.weight": "blocks.133.attn2.to_k.weight", - "model.diffusion_model.output_blocks.5.1.time_stack.0.attn2.to_out.0.bias": "blocks.133.attn2.to_out.bias", - "model.diffusion_model.output_blocks.5.1.time_stack.0.attn2.to_out.0.weight": "blocks.133.attn2.to_out.weight", - "model.diffusion_model.output_blocks.5.1.time_stack.0.attn2.to_q.weight": "blocks.133.attn2.to_q.weight", - "model.diffusion_model.output_blocks.5.1.time_stack.0.attn2.to_v.weight": "blocks.133.attn2.to_v.weight", - "model.diffusion_model.output_blocks.5.1.time_stack.0.ff.net.0.proj.bias": "blocks.133.act_fn_out.proj.bias", - "model.diffusion_model.output_blocks.5.1.time_stack.0.ff.net.0.proj.weight": "blocks.133.act_fn_out.proj.weight", - "model.diffusion_model.output_blocks.5.1.time_stack.0.ff.net.2.bias": "blocks.133.ff_out.bias", - "model.diffusion_model.output_blocks.5.1.time_stack.0.ff.net.2.weight": "blocks.133.ff_out.weight", - "model.diffusion_model.output_blocks.5.1.time_stack.0.ff_in.net.0.proj.bias": "blocks.133.act_fn_in.proj.bias", - "model.diffusion_model.output_blocks.5.1.time_stack.0.ff_in.net.0.proj.weight": "blocks.133.act_fn_in.proj.weight", - "model.diffusion_model.output_blocks.5.1.time_stack.0.ff_in.net.2.bias": "blocks.133.ff_in.bias", - "model.diffusion_model.output_blocks.5.1.time_stack.0.ff_in.net.2.weight": "blocks.133.ff_in.weight", - "model.diffusion_model.output_blocks.5.1.time_stack.0.norm1.bias": "blocks.133.norm1.bias", - "model.diffusion_model.output_blocks.5.1.time_stack.0.norm1.weight": "blocks.133.norm1.weight", - "model.diffusion_model.output_blocks.5.1.time_stack.0.norm2.bias": "blocks.133.norm2.bias", - "model.diffusion_model.output_blocks.5.1.time_stack.0.norm2.weight": "blocks.133.norm2.weight", - "model.diffusion_model.output_blocks.5.1.time_stack.0.norm3.bias": "blocks.133.norm_out.bias", - "model.diffusion_model.output_blocks.5.1.time_stack.0.norm3.weight": "blocks.133.norm_out.weight", - "model.diffusion_model.output_blocks.5.1.time_stack.0.norm_in.bias": "blocks.133.norm_in.bias", - "model.diffusion_model.output_blocks.5.1.time_stack.0.norm_in.weight": "blocks.133.norm_in.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn1.to_k.weight": "blocks.131.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.131.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.131.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn1.to_q.weight": "blocks.131.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn1.to_v.weight": "blocks.131.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn2.to_k.weight": "blocks.131.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.131.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.131.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn2.to_q.weight": "blocks.131.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.attn2.to_v.weight": "blocks.131.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.131.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.131.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.ff.net.2.bias": "blocks.131.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.ff.net.2.weight": "blocks.131.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.norm1.bias": "blocks.131.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.norm1.weight": "blocks.131.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.norm2.bias": "blocks.131.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.norm2.weight": "blocks.131.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.norm3.bias": "blocks.131.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.5.1.transformer_blocks.0.norm3.weight": "blocks.131.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.5.2.conv.bias": "blocks.135.conv.bias", - "model.diffusion_model.output_blocks.5.2.conv.weight": "blocks.135.conv.weight", - "model.diffusion_model.output_blocks.6.0.emb_layers.1.bias": "blocks.137.time_emb_proj.bias", - "model.diffusion_model.output_blocks.6.0.emb_layers.1.weight": "blocks.137.time_emb_proj.weight", - "model.diffusion_model.output_blocks.6.0.in_layers.0.bias": "blocks.137.norm1.bias", - "model.diffusion_model.output_blocks.6.0.in_layers.0.weight": "blocks.137.norm1.weight", - "model.diffusion_model.output_blocks.6.0.in_layers.2.bias": "blocks.137.conv1.bias", - "model.diffusion_model.output_blocks.6.0.in_layers.2.weight": "blocks.137.conv1.weight", - "model.diffusion_model.output_blocks.6.0.out_layers.0.bias": "blocks.137.norm2.bias", - "model.diffusion_model.output_blocks.6.0.out_layers.0.weight": "blocks.137.norm2.weight", - "model.diffusion_model.output_blocks.6.0.out_layers.3.bias": "blocks.137.conv2.bias", - "model.diffusion_model.output_blocks.6.0.out_layers.3.weight": "blocks.137.conv2.weight", - "model.diffusion_model.output_blocks.6.0.skip_connection.bias": "blocks.137.conv_shortcut.bias", - "model.diffusion_model.output_blocks.6.0.skip_connection.weight": "blocks.137.conv_shortcut.weight", - "model.diffusion_model.output_blocks.6.0.time_mixer.mix_factor": "blocks.140.mix_factor", - "model.diffusion_model.output_blocks.6.0.time_stack.emb_layers.1.bias": "blocks.139.time_emb_proj.bias", - "model.diffusion_model.output_blocks.6.0.time_stack.emb_layers.1.weight": "blocks.139.time_emb_proj.weight", - "model.diffusion_model.output_blocks.6.0.time_stack.in_layers.0.bias": "blocks.139.norm1.bias", - "model.diffusion_model.output_blocks.6.0.time_stack.in_layers.0.weight": "blocks.139.norm1.weight", - "model.diffusion_model.output_blocks.6.0.time_stack.in_layers.2.bias": "blocks.139.conv1.bias", - "model.diffusion_model.output_blocks.6.0.time_stack.in_layers.2.weight": "blocks.139.conv1.weight", - "model.diffusion_model.output_blocks.6.0.time_stack.out_layers.0.bias": "blocks.139.norm2.bias", - "model.diffusion_model.output_blocks.6.0.time_stack.out_layers.0.weight": "blocks.139.norm2.weight", - "model.diffusion_model.output_blocks.6.0.time_stack.out_layers.3.bias": "blocks.139.conv2.bias", - "model.diffusion_model.output_blocks.6.0.time_stack.out_layers.3.weight": "blocks.139.conv2.weight", - "model.diffusion_model.output_blocks.6.1.norm.bias": "blocks.142.norm.bias", - "model.diffusion_model.output_blocks.6.1.norm.weight": "blocks.142.norm.weight", - "model.diffusion_model.output_blocks.6.1.proj_in.bias": "blocks.142.proj_in.bias", - "model.diffusion_model.output_blocks.6.1.proj_in.weight": "blocks.142.proj_in.weight", - "model.diffusion_model.output_blocks.6.1.proj_out.bias": "blocks.145.proj.bias", - "model.diffusion_model.output_blocks.6.1.proj_out.weight": "blocks.145.proj.weight", - "model.diffusion_model.output_blocks.6.1.time_mixer.mix_factor": "blocks.145.mix_factor", - "model.diffusion_model.output_blocks.6.1.time_pos_embed.0.bias": "blocks.144.positional_embedding_proj.0.bias", - "model.diffusion_model.output_blocks.6.1.time_pos_embed.0.weight": "blocks.144.positional_embedding_proj.0.weight", - "model.diffusion_model.output_blocks.6.1.time_pos_embed.2.bias": "blocks.144.positional_embedding_proj.2.bias", - "model.diffusion_model.output_blocks.6.1.time_pos_embed.2.weight": "blocks.144.positional_embedding_proj.2.weight", - "model.diffusion_model.output_blocks.6.1.time_stack.0.attn1.to_k.weight": "blocks.144.attn1.to_k.weight", - "model.diffusion_model.output_blocks.6.1.time_stack.0.attn1.to_out.0.bias": "blocks.144.attn1.to_out.bias", - "model.diffusion_model.output_blocks.6.1.time_stack.0.attn1.to_out.0.weight": "blocks.144.attn1.to_out.weight", - "model.diffusion_model.output_blocks.6.1.time_stack.0.attn1.to_q.weight": "blocks.144.attn1.to_q.weight", - "model.diffusion_model.output_blocks.6.1.time_stack.0.attn1.to_v.weight": "blocks.144.attn1.to_v.weight", - "model.diffusion_model.output_blocks.6.1.time_stack.0.attn2.to_k.weight": "blocks.144.attn2.to_k.weight", - "model.diffusion_model.output_blocks.6.1.time_stack.0.attn2.to_out.0.bias": "blocks.144.attn2.to_out.bias", - "model.diffusion_model.output_blocks.6.1.time_stack.0.attn2.to_out.0.weight": "blocks.144.attn2.to_out.weight", - "model.diffusion_model.output_blocks.6.1.time_stack.0.attn2.to_q.weight": "blocks.144.attn2.to_q.weight", - "model.diffusion_model.output_blocks.6.1.time_stack.0.attn2.to_v.weight": "blocks.144.attn2.to_v.weight", - "model.diffusion_model.output_blocks.6.1.time_stack.0.ff.net.0.proj.bias": "blocks.144.act_fn_out.proj.bias", - "model.diffusion_model.output_blocks.6.1.time_stack.0.ff.net.0.proj.weight": "blocks.144.act_fn_out.proj.weight", - "model.diffusion_model.output_blocks.6.1.time_stack.0.ff.net.2.bias": "blocks.144.ff_out.bias", - "model.diffusion_model.output_blocks.6.1.time_stack.0.ff.net.2.weight": "blocks.144.ff_out.weight", - "model.diffusion_model.output_blocks.6.1.time_stack.0.ff_in.net.0.proj.bias": "blocks.144.act_fn_in.proj.bias", - "model.diffusion_model.output_blocks.6.1.time_stack.0.ff_in.net.0.proj.weight": "blocks.144.act_fn_in.proj.weight", - "model.diffusion_model.output_blocks.6.1.time_stack.0.ff_in.net.2.bias": "blocks.144.ff_in.bias", - "model.diffusion_model.output_blocks.6.1.time_stack.0.ff_in.net.2.weight": "blocks.144.ff_in.weight", - "model.diffusion_model.output_blocks.6.1.time_stack.0.norm1.bias": "blocks.144.norm1.bias", - "model.diffusion_model.output_blocks.6.1.time_stack.0.norm1.weight": "blocks.144.norm1.weight", - "model.diffusion_model.output_blocks.6.1.time_stack.0.norm2.bias": "blocks.144.norm2.bias", - "model.diffusion_model.output_blocks.6.1.time_stack.0.norm2.weight": "blocks.144.norm2.weight", - "model.diffusion_model.output_blocks.6.1.time_stack.0.norm3.bias": "blocks.144.norm_out.bias", - "model.diffusion_model.output_blocks.6.1.time_stack.0.norm3.weight": "blocks.144.norm_out.weight", - "model.diffusion_model.output_blocks.6.1.time_stack.0.norm_in.bias": "blocks.144.norm_in.bias", - "model.diffusion_model.output_blocks.6.1.time_stack.0.norm_in.weight": "blocks.144.norm_in.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn1.to_k.weight": "blocks.142.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.142.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.142.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn1.to_q.weight": "blocks.142.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn1.to_v.weight": "blocks.142.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn2.to_k.weight": "blocks.142.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.142.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.142.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn2.to_q.weight": "blocks.142.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.attn2.to_v.weight": "blocks.142.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.142.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.142.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.ff.net.2.bias": "blocks.142.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.ff.net.2.weight": "blocks.142.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.norm1.bias": "blocks.142.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.norm1.weight": "blocks.142.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.norm2.bias": "blocks.142.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.norm2.weight": "blocks.142.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.norm3.bias": "blocks.142.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.6.1.transformer_blocks.0.norm3.weight": "blocks.142.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.7.0.emb_layers.1.bias": "blocks.147.time_emb_proj.bias", - "model.diffusion_model.output_blocks.7.0.emb_layers.1.weight": "blocks.147.time_emb_proj.weight", - "model.diffusion_model.output_blocks.7.0.in_layers.0.bias": "blocks.147.norm1.bias", - "model.diffusion_model.output_blocks.7.0.in_layers.0.weight": "blocks.147.norm1.weight", - "model.diffusion_model.output_blocks.7.0.in_layers.2.bias": "blocks.147.conv1.bias", - "model.diffusion_model.output_blocks.7.0.in_layers.2.weight": "blocks.147.conv1.weight", - "model.diffusion_model.output_blocks.7.0.out_layers.0.bias": "blocks.147.norm2.bias", - "model.diffusion_model.output_blocks.7.0.out_layers.0.weight": "blocks.147.norm2.weight", - "model.diffusion_model.output_blocks.7.0.out_layers.3.bias": "blocks.147.conv2.bias", - "model.diffusion_model.output_blocks.7.0.out_layers.3.weight": "blocks.147.conv2.weight", - "model.diffusion_model.output_blocks.7.0.skip_connection.bias": "blocks.147.conv_shortcut.bias", - "model.diffusion_model.output_blocks.7.0.skip_connection.weight": "blocks.147.conv_shortcut.weight", - "model.diffusion_model.output_blocks.7.0.time_mixer.mix_factor": "blocks.150.mix_factor", - "model.diffusion_model.output_blocks.7.0.time_stack.emb_layers.1.bias": "blocks.149.time_emb_proj.bias", - "model.diffusion_model.output_blocks.7.0.time_stack.emb_layers.1.weight": "blocks.149.time_emb_proj.weight", - "model.diffusion_model.output_blocks.7.0.time_stack.in_layers.0.bias": "blocks.149.norm1.bias", - "model.diffusion_model.output_blocks.7.0.time_stack.in_layers.0.weight": "blocks.149.norm1.weight", - "model.diffusion_model.output_blocks.7.0.time_stack.in_layers.2.bias": "blocks.149.conv1.bias", - "model.diffusion_model.output_blocks.7.0.time_stack.in_layers.2.weight": "blocks.149.conv1.weight", - "model.diffusion_model.output_blocks.7.0.time_stack.out_layers.0.bias": "blocks.149.norm2.bias", - "model.diffusion_model.output_blocks.7.0.time_stack.out_layers.0.weight": "blocks.149.norm2.weight", - "model.diffusion_model.output_blocks.7.0.time_stack.out_layers.3.bias": "blocks.149.conv2.bias", - "model.diffusion_model.output_blocks.7.0.time_stack.out_layers.3.weight": "blocks.149.conv2.weight", - "model.diffusion_model.output_blocks.7.1.norm.bias": "blocks.152.norm.bias", - "model.diffusion_model.output_blocks.7.1.norm.weight": "blocks.152.norm.weight", - "model.diffusion_model.output_blocks.7.1.proj_in.bias": "blocks.152.proj_in.bias", - "model.diffusion_model.output_blocks.7.1.proj_in.weight": "blocks.152.proj_in.weight", - "model.diffusion_model.output_blocks.7.1.proj_out.bias": "blocks.155.proj.bias", - "model.diffusion_model.output_blocks.7.1.proj_out.weight": "blocks.155.proj.weight", - "model.diffusion_model.output_blocks.7.1.time_mixer.mix_factor": "blocks.155.mix_factor", - "model.diffusion_model.output_blocks.7.1.time_pos_embed.0.bias": "blocks.154.positional_embedding_proj.0.bias", - "model.diffusion_model.output_blocks.7.1.time_pos_embed.0.weight": "blocks.154.positional_embedding_proj.0.weight", - "model.diffusion_model.output_blocks.7.1.time_pos_embed.2.bias": "blocks.154.positional_embedding_proj.2.bias", - "model.diffusion_model.output_blocks.7.1.time_pos_embed.2.weight": "blocks.154.positional_embedding_proj.2.weight", - "model.diffusion_model.output_blocks.7.1.time_stack.0.attn1.to_k.weight": "blocks.154.attn1.to_k.weight", - "model.diffusion_model.output_blocks.7.1.time_stack.0.attn1.to_out.0.bias": "blocks.154.attn1.to_out.bias", - "model.diffusion_model.output_blocks.7.1.time_stack.0.attn1.to_out.0.weight": "blocks.154.attn1.to_out.weight", - "model.diffusion_model.output_blocks.7.1.time_stack.0.attn1.to_q.weight": "blocks.154.attn1.to_q.weight", - "model.diffusion_model.output_blocks.7.1.time_stack.0.attn1.to_v.weight": "blocks.154.attn1.to_v.weight", - "model.diffusion_model.output_blocks.7.1.time_stack.0.attn2.to_k.weight": "blocks.154.attn2.to_k.weight", - "model.diffusion_model.output_blocks.7.1.time_stack.0.attn2.to_out.0.bias": "blocks.154.attn2.to_out.bias", - "model.diffusion_model.output_blocks.7.1.time_stack.0.attn2.to_out.0.weight": "blocks.154.attn2.to_out.weight", - "model.diffusion_model.output_blocks.7.1.time_stack.0.attn2.to_q.weight": "blocks.154.attn2.to_q.weight", - "model.diffusion_model.output_blocks.7.1.time_stack.0.attn2.to_v.weight": "blocks.154.attn2.to_v.weight", - "model.diffusion_model.output_blocks.7.1.time_stack.0.ff.net.0.proj.bias": "blocks.154.act_fn_out.proj.bias", - "model.diffusion_model.output_blocks.7.1.time_stack.0.ff.net.0.proj.weight": "blocks.154.act_fn_out.proj.weight", - "model.diffusion_model.output_blocks.7.1.time_stack.0.ff.net.2.bias": "blocks.154.ff_out.bias", - "model.diffusion_model.output_blocks.7.1.time_stack.0.ff.net.2.weight": "blocks.154.ff_out.weight", - "model.diffusion_model.output_blocks.7.1.time_stack.0.ff_in.net.0.proj.bias": "blocks.154.act_fn_in.proj.bias", - "model.diffusion_model.output_blocks.7.1.time_stack.0.ff_in.net.0.proj.weight": "blocks.154.act_fn_in.proj.weight", - "model.diffusion_model.output_blocks.7.1.time_stack.0.ff_in.net.2.bias": "blocks.154.ff_in.bias", - "model.diffusion_model.output_blocks.7.1.time_stack.0.ff_in.net.2.weight": "blocks.154.ff_in.weight", - "model.diffusion_model.output_blocks.7.1.time_stack.0.norm1.bias": "blocks.154.norm1.bias", - "model.diffusion_model.output_blocks.7.1.time_stack.0.norm1.weight": "blocks.154.norm1.weight", - "model.diffusion_model.output_blocks.7.1.time_stack.0.norm2.bias": "blocks.154.norm2.bias", - "model.diffusion_model.output_blocks.7.1.time_stack.0.norm2.weight": "blocks.154.norm2.weight", - "model.diffusion_model.output_blocks.7.1.time_stack.0.norm3.bias": "blocks.154.norm_out.bias", - "model.diffusion_model.output_blocks.7.1.time_stack.0.norm3.weight": "blocks.154.norm_out.weight", - "model.diffusion_model.output_blocks.7.1.time_stack.0.norm_in.bias": "blocks.154.norm_in.bias", - "model.diffusion_model.output_blocks.7.1.time_stack.0.norm_in.weight": "blocks.154.norm_in.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn1.to_k.weight": "blocks.152.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.152.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.152.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn1.to_q.weight": "blocks.152.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn1.to_v.weight": "blocks.152.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn2.to_k.weight": "blocks.152.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.152.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.152.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn2.to_q.weight": "blocks.152.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.attn2.to_v.weight": "blocks.152.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.152.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.152.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.ff.net.2.bias": "blocks.152.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.ff.net.2.weight": "blocks.152.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.norm1.bias": "blocks.152.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.norm1.weight": "blocks.152.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.norm2.bias": "blocks.152.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.norm2.weight": "blocks.152.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.norm3.bias": "blocks.152.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.7.1.transformer_blocks.0.norm3.weight": "blocks.152.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.8.0.emb_layers.1.bias": "blocks.157.time_emb_proj.bias", - "model.diffusion_model.output_blocks.8.0.emb_layers.1.weight": "blocks.157.time_emb_proj.weight", - "model.diffusion_model.output_blocks.8.0.in_layers.0.bias": "blocks.157.norm1.bias", - "model.diffusion_model.output_blocks.8.0.in_layers.0.weight": "blocks.157.norm1.weight", - "model.diffusion_model.output_blocks.8.0.in_layers.2.bias": "blocks.157.conv1.bias", - "model.diffusion_model.output_blocks.8.0.in_layers.2.weight": "blocks.157.conv1.weight", - "model.diffusion_model.output_blocks.8.0.out_layers.0.bias": "blocks.157.norm2.bias", - "model.diffusion_model.output_blocks.8.0.out_layers.0.weight": "blocks.157.norm2.weight", - "model.diffusion_model.output_blocks.8.0.out_layers.3.bias": "blocks.157.conv2.bias", - "model.diffusion_model.output_blocks.8.0.out_layers.3.weight": "blocks.157.conv2.weight", - "model.diffusion_model.output_blocks.8.0.skip_connection.bias": "blocks.157.conv_shortcut.bias", - "model.diffusion_model.output_blocks.8.0.skip_connection.weight": "blocks.157.conv_shortcut.weight", - "model.diffusion_model.output_blocks.8.0.time_mixer.mix_factor": "blocks.160.mix_factor", - "model.diffusion_model.output_blocks.8.0.time_stack.emb_layers.1.bias": "blocks.159.time_emb_proj.bias", - "model.diffusion_model.output_blocks.8.0.time_stack.emb_layers.1.weight": "blocks.159.time_emb_proj.weight", - "model.diffusion_model.output_blocks.8.0.time_stack.in_layers.0.bias": "blocks.159.norm1.bias", - "model.diffusion_model.output_blocks.8.0.time_stack.in_layers.0.weight": "blocks.159.norm1.weight", - "model.diffusion_model.output_blocks.8.0.time_stack.in_layers.2.bias": "blocks.159.conv1.bias", - "model.diffusion_model.output_blocks.8.0.time_stack.in_layers.2.weight": "blocks.159.conv1.weight", - "model.diffusion_model.output_blocks.8.0.time_stack.out_layers.0.bias": "blocks.159.norm2.bias", - "model.diffusion_model.output_blocks.8.0.time_stack.out_layers.0.weight": "blocks.159.norm2.weight", - "model.diffusion_model.output_blocks.8.0.time_stack.out_layers.3.bias": "blocks.159.conv2.bias", - "model.diffusion_model.output_blocks.8.0.time_stack.out_layers.3.weight": "blocks.159.conv2.weight", - "model.diffusion_model.output_blocks.8.1.norm.bias": "blocks.162.norm.bias", - "model.diffusion_model.output_blocks.8.1.norm.weight": "blocks.162.norm.weight", - "model.diffusion_model.output_blocks.8.1.proj_in.bias": "blocks.162.proj_in.bias", - "model.diffusion_model.output_blocks.8.1.proj_in.weight": "blocks.162.proj_in.weight", - "model.diffusion_model.output_blocks.8.1.proj_out.bias": "blocks.165.proj.bias", - "model.diffusion_model.output_blocks.8.1.proj_out.weight": "blocks.165.proj.weight", - "model.diffusion_model.output_blocks.8.1.time_mixer.mix_factor": "blocks.165.mix_factor", - "model.diffusion_model.output_blocks.8.1.time_pos_embed.0.bias": "blocks.164.positional_embedding_proj.0.bias", - "model.diffusion_model.output_blocks.8.1.time_pos_embed.0.weight": "blocks.164.positional_embedding_proj.0.weight", - "model.diffusion_model.output_blocks.8.1.time_pos_embed.2.bias": "blocks.164.positional_embedding_proj.2.bias", - "model.diffusion_model.output_blocks.8.1.time_pos_embed.2.weight": "blocks.164.positional_embedding_proj.2.weight", - "model.diffusion_model.output_blocks.8.1.time_stack.0.attn1.to_k.weight": "blocks.164.attn1.to_k.weight", - "model.diffusion_model.output_blocks.8.1.time_stack.0.attn1.to_out.0.bias": "blocks.164.attn1.to_out.bias", - "model.diffusion_model.output_blocks.8.1.time_stack.0.attn1.to_out.0.weight": "blocks.164.attn1.to_out.weight", - "model.diffusion_model.output_blocks.8.1.time_stack.0.attn1.to_q.weight": "blocks.164.attn1.to_q.weight", - "model.diffusion_model.output_blocks.8.1.time_stack.0.attn1.to_v.weight": "blocks.164.attn1.to_v.weight", - "model.diffusion_model.output_blocks.8.1.time_stack.0.attn2.to_k.weight": "blocks.164.attn2.to_k.weight", - "model.diffusion_model.output_blocks.8.1.time_stack.0.attn2.to_out.0.bias": "blocks.164.attn2.to_out.bias", - "model.diffusion_model.output_blocks.8.1.time_stack.0.attn2.to_out.0.weight": "blocks.164.attn2.to_out.weight", - "model.diffusion_model.output_blocks.8.1.time_stack.0.attn2.to_q.weight": "blocks.164.attn2.to_q.weight", - "model.diffusion_model.output_blocks.8.1.time_stack.0.attn2.to_v.weight": "blocks.164.attn2.to_v.weight", - "model.diffusion_model.output_blocks.8.1.time_stack.0.ff.net.0.proj.bias": "blocks.164.act_fn_out.proj.bias", - "model.diffusion_model.output_blocks.8.1.time_stack.0.ff.net.0.proj.weight": "blocks.164.act_fn_out.proj.weight", - "model.diffusion_model.output_blocks.8.1.time_stack.0.ff.net.2.bias": "blocks.164.ff_out.bias", - "model.diffusion_model.output_blocks.8.1.time_stack.0.ff.net.2.weight": "blocks.164.ff_out.weight", - "model.diffusion_model.output_blocks.8.1.time_stack.0.ff_in.net.0.proj.bias": "blocks.164.act_fn_in.proj.bias", - "model.diffusion_model.output_blocks.8.1.time_stack.0.ff_in.net.0.proj.weight": "blocks.164.act_fn_in.proj.weight", - "model.diffusion_model.output_blocks.8.1.time_stack.0.ff_in.net.2.bias": "blocks.164.ff_in.bias", - "model.diffusion_model.output_blocks.8.1.time_stack.0.ff_in.net.2.weight": "blocks.164.ff_in.weight", - "model.diffusion_model.output_blocks.8.1.time_stack.0.norm1.bias": "blocks.164.norm1.bias", - "model.diffusion_model.output_blocks.8.1.time_stack.0.norm1.weight": "blocks.164.norm1.weight", - "model.diffusion_model.output_blocks.8.1.time_stack.0.norm2.bias": "blocks.164.norm2.bias", - "model.diffusion_model.output_blocks.8.1.time_stack.0.norm2.weight": "blocks.164.norm2.weight", - "model.diffusion_model.output_blocks.8.1.time_stack.0.norm3.bias": "blocks.164.norm_out.bias", - "model.diffusion_model.output_blocks.8.1.time_stack.0.norm3.weight": "blocks.164.norm_out.weight", - "model.diffusion_model.output_blocks.8.1.time_stack.0.norm_in.bias": "blocks.164.norm_in.bias", - "model.diffusion_model.output_blocks.8.1.time_stack.0.norm_in.weight": "blocks.164.norm_in.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn1.to_k.weight": "blocks.162.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.162.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.162.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn1.to_q.weight": "blocks.162.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn1.to_v.weight": "blocks.162.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn2.to_k.weight": "blocks.162.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.162.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.162.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn2.to_q.weight": "blocks.162.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.attn2.to_v.weight": "blocks.162.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.162.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.162.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.ff.net.2.bias": "blocks.162.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.ff.net.2.weight": "blocks.162.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.norm1.bias": "blocks.162.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.norm1.weight": "blocks.162.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.norm2.bias": "blocks.162.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.norm2.weight": "blocks.162.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.norm3.bias": "blocks.162.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.8.1.transformer_blocks.0.norm3.weight": "blocks.162.transformer_blocks.0.norm3.weight", - "model.diffusion_model.output_blocks.8.2.conv.bias": "blocks.166.conv.bias", - "model.diffusion_model.output_blocks.8.2.conv.weight": "blocks.166.conv.weight", - "model.diffusion_model.output_blocks.9.0.emb_layers.1.bias": "blocks.168.time_emb_proj.bias", - "model.diffusion_model.output_blocks.9.0.emb_layers.1.weight": "blocks.168.time_emb_proj.weight", - "model.diffusion_model.output_blocks.9.0.in_layers.0.bias": "blocks.168.norm1.bias", - "model.diffusion_model.output_blocks.9.0.in_layers.0.weight": "blocks.168.norm1.weight", - "model.diffusion_model.output_blocks.9.0.in_layers.2.bias": "blocks.168.conv1.bias", - "model.diffusion_model.output_blocks.9.0.in_layers.2.weight": "blocks.168.conv1.weight", - "model.diffusion_model.output_blocks.9.0.out_layers.0.bias": "blocks.168.norm2.bias", - "model.diffusion_model.output_blocks.9.0.out_layers.0.weight": "blocks.168.norm2.weight", - "model.diffusion_model.output_blocks.9.0.out_layers.3.bias": "blocks.168.conv2.bias", - "model.diffusion_model.output_blocks.9.0.out_layers.3.weight": "blocks.168.conv2.weight", - "model.diffusion_model.output_blocks.9.0.skip_connection.bias": "blocks.168.conv_shortcut.bias", - "model.diffusion_model.output_blocks.9.0.skip_connection.weight": "blocks.168.conv_shortcut.weight", - "model.diffusion_model.output_blocks.9.0.time_mixer.mix_factor": "blocks.171.mix_factor", - "model.diffusion_model.output_blocks.9.0.time_stack.emb_layers.1.bias": "blocks.170.time_emb_proj.bias", - "model.diffusion_model.output_blocks.9.0.time_stack.emb_layers.1.weight": "blocks.170.time_emb_proj.weight", - "model.diffusion_model.output_blocks.9.0.time_stack.in_layers.0.bias": "blocks.170.norm1.bias", - "model.diffusion_model.output_blocks.9.0.time_stack.in_layers.0.weight": "blocks.170.norm1.weight", - "model.diffusion_model.output_blocks.9.0.time_stack.in_layers.2.bias": "blocks.170.conv1.bias", - "model.diffusion_model.output_blocks.9.0.time_stack.in_layers.2.weight": "blocks.170.conv1.weight", - "model.diffusion_model.output_blocks.9.0.time_stack.out_layers.0.bias": "blocks.170.norm2.bias", - "model.diffusion_model.output_blocks.9.0.time_stack.out_layers.0.weight": "blocks.170.norm2.weight", - "model.diffusion_model.output_blocks.9.0.time_stack.out_layers.3.bias": "blocks.170.conv2.bias", - "model.diffusion_model.output_blocks.9.0.time_stack.out_layers.3.weight": "blocks.170.conv2.weight", - "model.diffusion_model.output_blocks.9.1.norm.bias": "blocks.173.norm.bias", - "model.diffusion_model.output_blocks.9.1.norm.weight": "blocks.173.norm.weight", - "model.diffusion_model.output_blocks.9.1.proj_in.bias": "blocks.173.proj_in.bias", - "model.diffusion_model.output_blocks.9.1.proj_in.weight": "blocks.173.proj_in.weight", - "model.diffusion_model.output_blocks.9.1.proj_out.bias": "blocks.176.proj.bias", - "model.diffusion_model.output_blocks.9.1.proj_out.weight": "blocks.176.proj.weight", - "model.diffusion_model.output_blocks.9.1.time_mixer.mix_factor": "blocks.176.mix_factor", - "model.diffusion_model.output_blocks.9.1.time_pos_embed.0.bias": "blocks.175.positional_embedding_proj.0.bias", - "model.diffusion_model.output_blocks.9.1.time_pos_embed.0.weight": "blocks.175.positional_embedding_proj.0.weight", - "model.diffusion_model.output_blocks.9.1.time_pos_embed.2.bias": "blocks.175.positional_embedding_proj.2.bias", - "model.diffusion_model.output_blocks.9.1.time_pos_embed.2.weight": "blocks.175.positional_embedding_proj.2.weight", - "model.diffusion_model.output_blocks.9.1.time_stack.0.attn1.to_k.weight": "blocks.175.attn1.to_k.weight", - "model.diffusion_model.output_blocks.9.1.time_stack.0.attn1.to_out.0.bias": "blocks.175.attn1.to_out.bias", - "model.diffusion_model.output_blocks.9.1.time_stack.0.attn1.to_out.0.weight": "blocks.175.attn1.to_out.weight", - "model.diffusion_model.output_blocks.9.1.time_stack.0.attn1.to_q.weight": "blocks.175.attn1.to_q.weight", - "model.diffusion_model.output_blocks.9.1.time_stack.0.attn1.to_v.weight": "blocks.175.attn1.to_v.weight", - "model.diffusion_model.output_blocks.9.1.time_stack.0.attn2.to_k.weight": "blocks.175.attn2.to_k.weight", - "model.diffusion_model.output_blocks.9.1.time_stack.0.attn2.to_out.0.bias": "blocks.175.attn2.to_out.bias", - "model.diffusion_model.output_blocks.9.1.time_stack.0.attn2.to_out.0.weight": "blocks.175.attn2.to_out.weight", - "model.diffusion_model.output_blocks.9.1.time_stack.0.attn2.to_q.weight": "blocks.175.attn2.to_q.weight", - "model.diffusion_model.output_blocks.9.1.time_stack.0.attn2.to_v.weight": "blocks.175.attn2.to_v.weight", - "model.diffusion_model.output_blocks.9.1.time_stack.0.ff.net.0.proj.bias": "blocks.175.act_fn_out.proj.bias", - "model.diffusion_model.output_blocks.9.1.time_stack.0.ff.net.0.proj.weight": "blocks.175.act_fn_out.proj.weight", - "model.diffusion_model.output_blocks.9.1.time_stack.0.ff.net.2.bias": "blocks.175.ff_out.bias", - "model.diffusion_model.output_blocks.9.1.time_stack.0.ff.net.2.weight": "blocks.175.ff_out.weight", - "model.diffusion_model.output_blocks.9.1.time_stack.0.ff_in.net.0.proj.bias": "blocks.175.act_fn_in.proj.bias", - "model.diffusion_model.output_blocks.9.1.time_stack.0.ff_in.net.0.proj.weight": "blocks.175.act_fn_in.proj.weight", - "model.diffusion_model.output_blocks.9.1.time_stack.0.ff_in.net.2.bias": "blocks.175.ff_in.bias", - "model.diffusion_model.output_blocks.9.1.time_stack.0.ff_in.net.2.weight": "blocks.175.ff_in.weight", - "model.diffusion_model.output_blocks.9.1.time_stack.0.norm1.bias": "blocks.175.norm1.bias", - "model.diffusion_model.output_blocks.9.1.time_stack.0.norm1.weight": "blocks.175.norm1.weight", - "model.diffusion_model.output_blocks.9.1.time_stack.0.norm2.bias": "blocks.175.norm2.bias", - "model.diffusion_model.output_blocks.9.1.time_stack.0.norm2.weight": "blocks.175.norm2.weight", - "model.diffusion_model.output_blocks.9.1.time_stack.0.norm3.bias": "blocks.175.norm_out.bias", - "model.diffusion_model.output_blocks.9.1.time_stack.0.norm3.weight": "blocks.175.norm_out.weight", - "model.diffusion_model.output_blocks.9.1.time_stack.0.norm_in.bias": "blocks.175.norm_in.bias", - "model.diffusion_model.output_blocks.9.1.time_stack.0.norm_in.weight": "blocks.175.norm_in.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn1.to_k.weight": "blocks.173.transformer_blocks.0.attn1.to_k.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn1.to_out.0.bias": "blocks.173.transformer_blocks.0.attn1.to_out.bias", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn1.to_out.0.weight": "blocks.173.transformer_blocks.0.attn1.to_out.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn1.to_q.weight": "blocks.173.transformer_blocks.0.attn1.to_q.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn1.to_v.weight": "blocks.173.transformer_blocks.0.attn1.to_v.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn2.to_k.weight": "blocks.173.transformer_blocks.0.attn2.to_k.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn2.to_out.0.bias": "blocks.173.transformer_blocks.0.attn2.to_out.bias", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn2.to_out.0.weight": "blocks.173.transformer_blocks.0.attn2.to_out.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn2.to_q.weight": "blocks.173.transformer_blocks.0.attn2.to_q.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.attn2.to_v.weight": "blocks.173.transformer_blocks.0.attn2.to_v.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.ff.net.0.proj.bias": "blocks.173.transformer_blocks.0.act_fn.proj.bias", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.ff.net.0.proj.weight": "blocks.173.transformer_blocks.0.act_fn.proj.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.ff.net.2.bias": "blocks.173.transformer_blocks.0.ff.bias", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.ff.net.2.weight": "blocks.173.transformer_blocks.0.ff.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.norm1.bias": "blocks.173.transformer_blocks.0.norm1.bias", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.norm1.weight": "blocks.173.transformer_blocks.0.norm1.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.norm2.bias": "blocks.173.transformer_blocks.0.norm2.bias", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.norm2.weight": "blocks.173.transformer_blocks.0.norm2.weight", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.norm3.bias": "blocks.173.transformer_blocks.0.norm3.bias", - "model.diffusion_model.output_blocks.9.1.transformer_blocks.0.norm3.weight": "blocks.173.transformer_blocks.0.norm3.weight", - "model.diffusion_model.time_embed.0.bias": "time_embedding.0.bias", - "model.diffusion_model.time_embed.0.weight": "time_embedding.0.weight", - "model.diffusion_model.time_embed.2.bias": "time_embedding.2.bias", - "model.diffusion_model.time_embed.2.weight": "time_embedding.2.weight", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if ".proj_in." in name or ".proj_out." in name: - param = param.squeeze() - state_dict_[rename_dict[name]] = param - if add_positional_conv is not None: - extra_names = [ - "blocks.7.positional_conv", "blocks.17.positional_conv", "blocks.29.positional_conv", "blocks.39.positional_conv", - "blocks.51.positional_conv", "blocks.61.positional_conv", "blocks.83.positional_conv", "blocks.113.positional_conv", - "blocks.123.positional_conv", "blocks.133.positional_conv", "blocks.144.positional_conv", "blocks.154.positional_conv", - "blocks.164.positional_conv", "blocks.175.positional_conv", "blocks.185.positional_conv", "blocks.195.positional_conv", - ] - extra_channels = [320, 320, 640, 640, 1280, 1280, 1280, 1280, 1280, 1280, 640, 640, 640, 320, 320, 320] - for name, channels in zip(extra_names, extra_channels): - weight = torch.zeros((channels, channels, 3, 3, 3)) - weight[:,:,1,1,1] = torch.eye(channels, channels) - bias = torch.zeros((channels,)) - state_dict_[name + ".weight"] = weight - state_dict_[name + ".bias"] = bias - return state_dict_ diff --git a/diffsynth/models/svd_vae_decoder.py b/diffsynth/models/svd_vae_decoder.py deleted file mode 100644 index a481596..0000000 --- a/diffsynth/models/svd_vae_decoder.py +++ /dev/null @@ -1,578 +0,0 @@ -import torch -from .attention import Attention -from .sd_unet import ResnetBlock, UpSampler -from .tiler import TileWorker -from einops import rearrange, repeat - - -class VAEAttentionBlock(torch.nn.Module): - - def __init__(self, num_attention_heads, attention_head_dim, in_channels, num_layers=1, norm_num_groups=32, eps=1e-5): - super().__init__() - inner_dim = num_attention_heads * attention_head_dim - - self.norm = torch.nn.GroupNorm(num_groups=norm_num_groups, num_channels=in_channels, eps=eps, affine=True) - - self.transformer_blocks = torch.nn.ModuleList([ - Attention( - inner_dim, - num_attention_heads, - attention_head_dim, - bias_q=True, - bias_kv=True, - bias_out=True - ) - for d in range(num_layers) - ]) - - def forward(self, hidden_states, time_emb, text_emb, res_stack): - batch, _, height, width = hidden_states.shape - residual = hidden_states - - hidden_states = self.norm(hidden_states) - inner_dim = hidden_states.shape[1] - hidden_states = hidden_states.permute(0, 2, 3, 1).reshape(batch, height * width, inner_dim) - - for block in self.transformer_blocks: - hidden_states = block(hidden_states) - - hidden_states = hidden_states.reshape(batch, height, width, inner_dim).permute(0, 3, 1, 2).contiguous() - hidden_states = hidden_states + residual - - return hidden_states, time_emb, text_emb, res_stack - - -class TemporalResnetBlock(torch.nn.Module): - - def __init__(self, in_channels, out_channels, groups=32, eps=1e-5): - super().__init__() - self.norm1 = torch.nn.GroupNorm(num_groups=groups, num_channels=in_channels, eps=eps, affine=True) - self.conv1 = torch.nn.Conv3d(in_channels, out_channels, kernel_size=(3, 1, 1), stride=1, padding=(1, 0, 0)) - self.norm2 = torch.nn.GroupNorm(num_groups=groups, num_channels=out_channels, eps=eps, affine=True) - self.conv2 = torch.nn.Conv3d(out_channels, out_channels, kernel_size=(3, 1, 1), stride=1, padding=(1, 0, 0)) - self.nonlinearity = torch.nn.SiLU() - self.mix_factor = torch.nn.Parameter(torch.Tensor([0.5])) - - def forward(self, hidden_states, time_emb, text_emb, res_stack, **kwargs): - x_spatial = hidden_states - x = rearrange(hidden_states, "T C H W -> 1 C T H W") - x = self.norm1(x) - x = self.nonlinearity(x) - x = self.conv1(x) - x = self.norm2(x) - x = self.nonlinearity(x) - x = self.conv2(x) - x_temporal = hidden_states + x[0].permute(1, 0, 2, 3) - alpha = torch.sigmoid(self.mix_factor) - hidden_states = alpha * x_temporal + (1 - alpha) * x_spatial - return hidden_states, time_emb, text_emb, res_stack - - -class SVDVAEDecoder(torch.nn.Module): - def __init__(self): - super().__init__() - self.scaling_factor = 0.18215 - self.conv_in = torch.nn.Conv2d(4, 512, kernel_size=3, padding=1) - - self.blocks = torch.nn.ModuleList([ - # UNetMidBlock - ResnetBlock(512, 512, eps=1e-6), - TemporalResnetBlock(512, 512, eps=1e-6), - VAEAttentionBlock(1, 512, 512, 1, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - TemporalResnetBlock(512, 512, eps=1e-6), - # UpDecoderBlock - ResnetBlock(512, 512, eps=1e-6), - TemporalResnetBlock(512, 512, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - TemporalResnetBlock(512, 512, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - TemporalResnetBlock(512, 512, eps=1e-6), - UpSampler(512), - # UpDecoderBlock - ResnetBlock(512, 512, eps=1e-6), - TemporalResnetBlock(512, 512, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - TemporalResnetBlock(512, 512, eps=1e-6), - ResnetBlock(512, 512, eps=1e-6), - TemporalResnetBlock(512, 512, eps=1e-6), - UpSampler(512), - # UpDecoderBlock - ResnetBlock(512, 256, eps=1e-6), - TemporalResnetBlock(256, 256, eps=1e-6), - ResnetBlock(256, 256, eps=1e-6), - TemporalResnetBlock(256, 256, eps=1e-6), - ResnetBlock(256, 256, eps=1e-6), - TemporalResnetBlock(256, 256, eps=1e-6), - UpSampler(256), - # UpDecoderBlock - ResnetBlock(256, 128, eps=1e-6), - TemporalResnetBlock(128, 128, eps=1e-6), - ResnetBlock(128, 128, eps=1e-6), - TemporalResnetBlock(128, 128, eps=1e-6), - ResnetBlock(128, 128, eps=1e-6), - TemporalResnetBlock(128, 128, eps=1e-6), - ]) - - self.conv_norm_out = torch.nn.GroupNorm(num_channels=128, num_groups=32, eps=1e-5) - self.conv_act = torch.nn.SiLU() - self.conv_out = torch.nn.Conv2d(128, 3, kernel_size=3, padding=1) - self.time_conv_out = torch.nn.Conv3d(3, 3, kernel_size=(3, 1, 1), padding=(1, 0, 0)) - - - def forward(self, sample): - # 1. pre-process - hidden_states = rearrange(sample, "C T H W -> T C H W") - hidden_states = hidden_states / self.scaling_factor - hidden_states = self.conv_in(hidden_states) - time_emb, text_emb, res_stack = None, None, None - - # 2. blocks - for i, block in enumerate(self.blocks): - hidden_states, time_emb, text_emb, res_stack = block(hidden_states, time_emb, text_emb, res_stack) - - # 3. output - hidden_states = self.conv_norm_out(hidden_states) - hidden_states = self.conv_act(hidden_states) - hidden_states = self.conv_out(hidden_states) - hidden_states = rearrange(hidden_states, "T C H W -> C T H W") - hidden_states = self.time_conv_out(hidden_states) - - return hidden_states - - - def build_mask(self, data, is_bound): - _, T, H, W = data.shape - t = repeat(torch.arange(T), "T -> T H W", T=T, H=H, W=W) - h = repeat(torch.arange(H), "H -> T H W", T=T, H=H, W=W) - w = repeat(torch.arange(W), "W -> T H W", T=T, H=H, W=W) - border_width = (T + H + W) // 6 - pad = torch.ones_like(t) * border_width - mask = torch.stack([ - pad if is_bound[0] else t + 1, - pad if is_bound[1] else T - t, - pad if is_bound[2] else h + 1, - pad if is_bound[3] else H - h, - pad if is_bound[4] else w + 1, - pad if is_bound[5] else W - w - ]).min(dim=0).values - mask = mask.clip(1, border_width) - mask = (mask / border_width).to(dtype=data.dtype, device=data.device) - mask = rearrange(mask, "T H W -> 1 T H W") - return mask - - - def decode_video( - self, sample, - batch_time=8, batch_height=128, batch_width=128, - stride_time=4, stride_height=32, stride_width=32, - progress_bar=lambda x:x - ): - sample = sample.permute(1, 0, 2, 3) - data_device = sample.device - computation_device = self.conv_in.weight.device - torch_dtype = sample.dtype - _, T, H, W = sample.shape - - weight = torch.zeros((1, T, H*8, W*8), dtype=torch_dtype, device=data_device) - values = torch.zeros((3, T, H*8, W*8), dtype=torch_dtype, device=data_device) - - # Split tasks - tasks = [] - for t in range(0, T, stride_time): - for h in range(0, H, stride_height): - for w in range(0, W, stride_width): - if (t-stride_time >= 0 and t-stride_time+batch_time >= T)\ - or (h-stride_height >= 0 and h-stride_height+batch_height >= H)\ - or (w-stride_width >= 0 and w-stride_width+batch_width >= W): - continue - tasks.append((t, t+batch_time, h, h+batch_height, w, w+batch_width)) - - # Run - for tl, tr, hl, hr, wl, wr in progress_bar(tasks): - sample_batch = sample[:, tl:tr, hl:hr, wl:wr].to(computation_device) - sample_batch = self.forward(sample_batch).to(data_device) - mask = self.build_mask(sample_batch, is_bound=(tl==0, tr>=T, hl==0, hr>=H, wl==0, wr>=W)) - values[:, tl:tr, hl*8:hr*8, wl*8:wr*8] += sample_batch * mask - weight[:, tl:tr, hl*8:hr*8, wl*8:wr*8] += mask - values /= weight - return values - - - @staticmethod - def state_dict_converter(): - return SVDVAEDecoderStateDictConverter() - - -class SVDVAEDecoderStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - static_rename_dict = { - "decoder.conv_in": "conv_in", - "decoder.mid_block.attentions.0.group_norm": "blocks.2.norm", - "decoder.mid_block.attentions.0.to_q": "blocks.2.transformer_blocks.0.to_q", - "decoder.mid_block.attentions.0.to_k": "blocks.2.transformer_blocks.0.to_k", - "decoder.mid_block.attentions.0.to_v": "blocks.2.transformer_blocks.0.to_v", - "decoder.mid_block.attentions.0.to_out.0": "blocks.2.transformer_blocks.0.to_out", - "decoder.up_blocks.0.upsamplers.0.conv": "blocks.11.conv", - "decoder.up_blocks.1.upsamplers.0.conv": "blocks.18.conv", - "decoder.up_blocks.2.upsamplers.0.conv": "blocks.25.conv", - "decoder.conv_norm_out": "conv_norm_out", - "decoder.conv_out": "conv_out", - "decoder.time_conv_out": "time_conv_out" - } - prefix_rename_dict = { - "decoder.mid_block.resnets.0.spatial_res_block": "blocks.0", - "decoder.mid_block.resnets.0.temporal_res_block": "blocks.1", - "decoder.mid_block.resnets.0.time_mixer": "blocks.1", - "decoder.mid_block.resnets.1.spatial_res_block": "blocks.3", - "decoder.mid_block.resnets.1.temporal_res_block": "blocks.4", - "decoder.mid_block.resnets.1.time_mixer": "blocks.4", - - "decoder.up_blocks.0.resnets.0.spatial_res_block": "blocks.5", - "decoder.up_blocks.0.resnets.0.temporal_res_block": "blocks.6", - "decoder.up_blocks.0.resnets.0.time_mixer": "blocks.6", - "decoder.up_blocks.0.resnets.1.spatial_res_block": "blocks.7", - "decoder.up_blocks.0.resnets.1.temporal_res_block": "blocks.8", - "decoder.up_blocks.0.resnets.1.time_mixer": "blocks.8", - "decoder.up_blocks.0.resnets.2.spatial_res_block": "blocks.9", - "decoder.up_blocks.0.resnets.2.temporal_res_block": "blocks.10", - "decoder.up_blocks.0.resnets.2.time_mixer": "blocks.10", - - "decoder.up_blocks.1.resnets.0.spatial_res_block": "blocks.12", - "decoder.up_blocks.1.resnets.0.temporal_res_block": "blocks.13", - "decoder.up_blocks.1.resnets.0.time_mixer": "blocks.13", - "decoder.up_blocks.1.resnets.1.spatial_res_block": "blocks.14", - "decoder.up_blocks.1.resnets.1.temporal_res_block": "blocks.15", - "decoder.up_blocks.1.resnets.1.time_mixer": "blocks.15", - "decoder.up_blocks.1.resnets.2.spatial_res_block": "blocks.16", - "decoder.up_blocks.1.resnets.2.temporal_res_block": "blocks.17", - "decoder.up_blocks.1.resnets.2.time_mixer": "blocks.17", - - "decoder.up_blocks.2.resnets.0.spatial_res_block": "blocks.19", - "decoder.up_blocks.2.resnets.0.temporal_res_block": "blocks.20", - "decoder.up_blocks.2.resnets.0.time_mixer": "blocks.20", - "decoder.up_blocks.2.resnets.1.spatial_res_block": "blocks.21", - "decoder.up_blocks.2.resnets.1.temporal_res_block": "blocks.22", - "decoder.up_blocks.2.resnets.1.time_mixer": "blocks.22", - "decoder.up_blocks.2.resnets.2.spatial_res_block": "blocks.23", - "decoder.up_blocks.2.resnets.2.temporal_res_block": "blocks.24", - "decoder.up_blocks.2.resnets.2.time_mixer": "blocks.24", - - "decoder.up_blocks.3.resnets.0.spatial_res_block": "blocks.26", - "decoder.up_blocks.3.resnets.0.temporal_res_block": "blocks.27", - "decoder.up_blocks.3.resnets.0.time_mixer": "blocks.27", - "decoder.up_blocks.3.resnets.1.spatial_res_block": "blocks.28", - "decoder.up_blocks.3.resnets.1.temporal_res_block": "blocks.29", - "decoder.up_blocks.3.resnets.1.time_mixer": "blocks.29", - "decoder.up_blocks.3.resnets.2.spatial_res_block": "blocks.30", - "decoder.up_blocks.3.resnets.2.temporal_res_block": "blocks.31", - "decoder.up_blocks.3.resnets.2.time_mixer": "blocks.31", - } - suffix_rename_dict = { - "norm1.weight": "norm1.weight", - "conv1.weight": "conv1.weight", - "norm2.weight": "norm2.weight", - "conv2.weight": "conv2.weight", - "conv_shortcut.weight": "conv_shortcut.weight", - "norm1.bias": "norm1.bias", - "conv1.bias": "conv1.bias", - "norm2.bias": "norm2.bias", - "conv2.bias": "conv2.bias", - "conv_shortcut.bias": "conv_shortcut.bias", - "mix_factor": "mix_factor", - } - - state_dict_ = {} - for name in static_rename_dict: - state_dict_[static_rename_dict[name] + ".weight"] = state_dict[name + ".weight"] - state_dict_[static_rename_dict[name] + ".bias"] = state_dict[name + ".bias"] - for prefix_name in prefix_rename_dict: - for suffix_name in suffix_rename_dict: - name = prefix_name + "." + suffix_name - name_ = prefix_rename_dict[prefix_name] + "." + suffix_rename_dict[suffix_name] - if name in state_dict: - state_dict_[name_] = state_dict[name] - - return state_dict_ - - - def from_civitai(self, state_dict): - rename_dict = { - "first_stage_model.decoder.conv_in.bias": "conv_in.bias", - "first_stage_model.decoder.conv_in.weight": "conv_in.weight", - "first_stage_model.decoder.conv_out.bias": "conv_out.bias", - "first_stage_model.decoder.conv_out.time_mix_conv.bias": "time_conv_out.bias", - "first_stage_model.decoder.conv_out.time_mix_conv.weight": "time_conv_out.weight", - "first_stage_model.decoder.conv_out.weight": "conv_out.weight", - "first_stage_model.decoder.mid.attn_1.k.bias": "blocks.2.transformer_blocks.0.to_k.bias", - "first_stage_model.decoder.mid.attn_1.k.weight": "blocks.2.transformer_blocks.0.to_k.weight", - "first_stage_model.decoder.mid.attn_1.norm.bias": "blocks.2.norm.bias", - "first_stage_model.decoder.mid.attn_1.norm.weight": "blocks.2.norm.weight", - "first_stage_model.decoder.mid.attn_1.proj_out.bias": "blocks.2.transformer_blocks.0.to_out.bias", - "first_stage_model.decoder.mid.attn_1.proj_out.weight": "blocks.2.transformer_blocks.0.to_out.weight", - "first_stage_model.decoder.mid.attn_1.q.bias": "blocks.2.transformer_blocks.0.to_q.bias", - "first_stage_model.decoder.mid.attn_1.q.weight": "blocks.2.transformer_blocks.0.to_q.weight", - "first_stage_model.decoder.mid.attn_1.v.bias": "blocks.2.transformer_blocks.0.to_v.bias", - "first_stage_model.decoder.mid.attn_1.v.weight": "blocks.2.transformer_blocks.0.to_v.weight", - "first_stage_model.decoder.mid.block_1.conv1.bias": "blocks.0.conv1.bias", - "first_stage_model.decoder.mid.block_1.conv1.weight": "blocks.0.conv1.weight", - "first_stage_model.decoder.mid.block_1.conv2.bias": "blocks.0.conv2.bias", - "first_stage_model.decoder.mid.block_1.conv2.weight": "blocks.0.conv2.weight", - "first_stage_model.decoder.mid.block_1.mix_factor": "blocks.1.mix_factor", - "first_stage_model.decoder.mid.block_1.norm1.bias": "blocks.0.norm1.bias", - "first_stage_model.decoder.mid.block_1.norm1.weight": "blocks.0.norm1.weight", - "first_stage_model.decoder.mid.block_1.norm2.bias": "blocks.0.norm2.bias", - "first_stage_model.decoder.mid.block_1.norm2.weight": "blocks.0.norm2.weight", - "first_stage_model.decoder.mid.block_1.time_stack.in_layers.0.bias": "blocks.1.norm1.bias", - "first_stage_model.decoder.mid.block_1.time_stack.in_layers.0.weight": "blocks.1.norm1.weight", - "first_stage_model.decoder.mid.block_1.time_stack.in_layers.2.bias": "blocks.1.conv1.bias", - "first_stage_model.decoder.mid.block_1.time_stack.in_layers.2.weight": "blocks.1.conv1.weight", - "first_stage_model.decoder.mid.block_1.time_stack.out_layers.0.bias": "blocks.1.norm2.bias", - "first_stage_model.decoder.mid.block_1.time_stack.out_layers.0.weight": "blocks.1.norm2.weight", - "first_stage_model.decoder.mid.block_1.time_stack.out_layers.3.bias": "blocks.1.conv2.bias", - "first_stage_model.decoder.mid.block_1.time_stack.out_layers.3.weight": "blocks.1.conv2.weight", - "first_stage_model.decoder.mid.block_2.conv1.bias": "blocks.3.conv1.bias", - "first_stage_model.decoder.mid.block_2.conv1.weight": "blocks.3.conv1.weight", - "first_stage_model.decoder.mid.block_2.conv2.bias": "blocks.3.conv2.bias", - "first_stage_model.decoder.mid.block_2.conv2.weight": "blocks.3.conv2.weight", - "first_stage_model.decoder.mid.block_2.mix_factor": "blocks.4.mix_factor", - "first_stage_model.decoder.mid.block_2.norm1.bias": "blocks.3.norm1.bias", - "first_stage_model.decoder.mid.block_2.norm1.weight": "blocks.3.norm1.weight", - "first_stage_model.decoder.mid.block_2.norm2.bias": "blocks.3.norm2.bias", - "first_stage_model.decoder.mid.block_2.norm2.weight": "blocks.3.norm2.weight", - "first_stage_model.decoder.mid.block_2.time_stack.in_layers.0.bias": "blocks.4.norm1.bias", - "first_stage_model.decoder.mid.block_2.time_stack.in_layers.0.weight": "blocks.4.norm1.weight", - "first_stage_model.decoder.mid.block_2.time_stack.in_layers.2.bias": "blocks.4.conv1.bias", - "first_stage_model.decoder.mid.block_2.time_stack.in_layers.2.weight": "blocks.4.conv1.weight", - "first_stage_model.decoder.mid.block_2.time_stack.out_layers.0.bias": "blocks.4.norm2.bias", - "first_stage_model.decoder.mid.block_2.time_stack.out_layers.0.weight": "blocks.4.norm2.weight", - "first_stage_model.decoder.mid.block_2.time_stack.out_layers.3.bias": "blocks.4.conv2.bias", - "first_stage_model.decoder.mid.block_2.time_stack.out_layers.3.weight": "blocks.4.conv2.weight", - "first_stage_model.decoder.norm_out.bias": "conv_norm_out.bias", - "first_stage_model.decoder.norm_out.weight": "conv_norm_out.weight", - "first_stage_model.decoder.up.0.block.0.conv1.bias": "blocks.26.conv1.bias", - "first_stage_model.decoder.up.0.block.0.conv1.weight": "blocks.26.conv1.weight", - "first_stage_model.decoder.up.0.block.0.conv2.bias": "blocks.26.conv2.bias", - "first_stage_model.decoder.up.0.block.0.conv2.weight": "blocks.26.conv2.weight", - "first_stage_model.decoder.up.0.block.0.mix_factor": "blocks.27.mix_factor", - "first_stage_model.decoder.up.0.block.0.nin_shortcut.bias": "blocks.26.conv_shortcut.bias", - "first_stage_model.decoder.up.0.block.0.nin_shortcut.weight": "blocks.26.conv_shortcut.weight", - "first_stage_model.decoder.up.0.block.0.norm1.bias": "blocks.26.norm1.bias", - "first_stage_model.decoder.up.0.block.0.norm1.weight": "blocks.26.norm1.weight", - "first_stage_model.decoder.up.0.block.0.norm2.bias": "blocks.26.norm2.bias", - "first_stage_model.decoder.up.0.block.0.norm2.weight": "blocks.26.norm2.weight", - "first_stage_model.decoder.up.0.block.0.time_stack.in_layers.0.bias": "blocks.27.norm1.bias", - "first_stage_model.decoder.up.0.block.0.time_stack.in_layers.0.weight": "blocks.27.norm1.weight", - "first_stage_model.decoder.up.0.block.0.time_stack.in_layers.2.bias": "blocks.27.conv1.bias", - "first_stage_model.decoder.up.0.block.0.time_stack.in_layers.2.weight": "blocks.27.conv1.weight", - "first_stage_model.decoder.up.0.block.0.time_stack.out_layers.0.bias": "blocks.27.norm2.bias", - "first_stage_model.decoder.up.0.block.0.time_stack.out_layers.0.weight": "blocks.27.norm2.weight", - "first_stage_model.decoder.up.0.block.0.time_stack.out_layers.3.bias": "blocks.27.conv2.bias", - "first_stage_model.decoder.up.0.block.0.time_stack.out_layers.3.weight": "blocks.27.conv2.weight", - "first_stage_model.decoder.up.0.block.1.conv1.bias": "blocks.28.conv1.bias", - "first_stage_model.decoder.up.0.block.1.conv1.weight": "blocks.28.conv1.weight", - "first_stage_model.decoder.up.0.block.1.conv2.bias": "blocks.28.conv2.bias", - "first_stage_model.decoder.up.0.block.1.conv2.weight": "blocks.28.conv2.weight", - "first_stage_model.decoder.up.0.block.1.mix_factor": "blocks.29.mix_factor", - "first_stage_model.decoder.up.0.block.1.norm1.bias": "blocks.28.norm1.bias", - "first_stage_model.decoder.up.0.block.1.norm1.weight": "blocks.28.norm1.weight", - "first_stage_model.decoder.up.0.block.1.norm2.bias": "blocks.28.norm2.bias", - "first_stage_model.decoder.up.0.block.1.norm2.weight": "blocks.28.norm2.weight", - "first_stage_model.decoder.up.0.block.1.time_stack.in_layers.0.bias": "blocks.29.norm1.bias", - "first_stage_model.decoder.up.0.block.1.time_stack.in_layers.0.weight": "blocks.29.norm1.weight", - "first_stage_model.decoder.up.0.block.1.time_stack.in_layers.2.bias": "blocks.29.conv1.bias", - "first_stage_model.decoder.up.0.block.1.time_stack.in_layers.2.weight": "blocks.29.conv1.weight", - "first_stage_model.decoder.up.0.block.1.time_stack.out_layers.0.bias": "blocks.29.norm2.bias", - "first_stage_model.decoder.up.0.block.1.time_stack.out_layers.0.weight": "blocks.29.norm2.weight", - "first_stage_model.decoder.up.0.block.1.time_stack.out_layers.3.bias": "blocks.29.conv2.bias", - "first_stage_model.decoder.up.0.block.1.time_stack.out_layers.3.weight": "blocks.29.conv2.weight", - "first_stage_model.decoder.up.0.block.2.conv1.bias": "blocks.30.conv1.bias", - "first_stage_model.decoder.up.0.block.2.conv1.weight": "blocks.30.conv1.weight", - "first_stage_model.decoder.up.0.block.2.conv2.bias": "blocks.30.conv2.bias", - "first_stage_model.decoder.up.0.block.2.conv2.weight": "blocks.30.conv2.weight", - "first_stage_model.decoder.up.0.block.2.mix_factor": "blocks.31.mix_factor", - "first_stage_model.decoder.up.0.block.2.norm1.bias": "blocks.30.norm1.bias", - "first_stage_model.decoder.up.0.block.2.norm1.weight": "blocks.30.norm1.weight", - "first_stage_model.decoder.up.0.block.2.norm2.bias": "blocks.30.norm2.bias", - "first_stage_model.decoder.up.0.block.2.norm2.weight": "blocks.30.norm2.weight", - "first_stage_model.decoder.up.0.block.2.time_stack.in_layers.0.bias": "blocks.31.norm1.bias", - "first_stage_model.decoder.up.0.block.2.time_stack.in_layers.0.weight": "blocks.31.norm1.weight", - "first_stage_model.decoder.up.0.block.2.time_stack.in_layers.2.bias": "blocks.31.conv1.bias", - "first_stage_model.decoder.up.0.block.2.time_stack.in_layers.2.weight": "blocks.31.conv1.weight", - "first_stage_model.decoder.up.0.block.2.time_stack.out_layers.0.bias": "blocks.31.norm2.bias", - "first_stage_model.decoder.up.0.block.2.time_stack.out_layers.0.weight": "blocks.31.norm2.weight", - "first_stage_model.decoder.up.0.block.2.time_stack.out_layers.3.bias": "blocks.31.conv2.bias", - "first_stage_model.decoder.up.0.block.2.time_stack.out_layers.3.weight": "blocks.31.conv2.weight", - "first_stage_model.decoder.up.1.block.0.conv1.bias": "blocks.19.conv1.bias", - "first_stage_model.decoder.up.1.block.0.conv1.weight": "blocks.19.conv1.weight", - "first_stage_model.decoder.up.1.block.0.conv2.bias": "blocks.19.conv2.bias", - "first_stage_model.decoder.up.1.block.0.conv2.weight": "blocks.19.conv2.weight", - "first_stage_model.decoder.up.1.block.0.mix_factor": "blocks.20.mix_factor", - "first_stage_model.decoder.up.1.block.0.nin_shortcut.bias": "blocks.19.conv_shortcut.bias", - "first_stage_model.decoder.up.1.block.0.nin_shortcut.weight": "blocks.19.conv_shortcut.weight", - "first_stage_model.decoder.up.1.block.0.norm1.bias": "blocks.19.norm1.bias", - "first_stage_model.decoder.up.1.block.0.norm1.weight": "blocks.19.norm1.weight", - "first_stage_model.decoder.up.1.block.0.norm2.bias": "blocks.19.norm2.bias", - "first_stage_model.decoder.up.1.block.0.norm2.weight": "blocks.19.norm2.weight", - "first_stage_model.decoder.up.1.block.0.time_stack.in_layers.0.bias": "blocks.20.norm1.bias", - "first_stage_model.decoder.up.1.block.0.time_stack.in_layers.0.weight": "blocks.20.norm1.weight", - "first_stage_model.decoder.up.1.block.0.time_stack.in_layers.2.bias": "blocks.20.conv1.bias", - "first_stage_model.decoder.up.1.block.0.time_stack.in_layers.2.weight": "blocks.20.conv1.weight", - "first_stage_model.decoder.up.1.block.0.time_stack.out_layers.0.bias": "blocks.20.norm2.bias", - "first_stage_model.decoder.up.1.block.0.time_stack.out_layers.0.weight": "blocks.20.norm2.weight", - "first_stage_model.decoder.up.1.block.0.time_stack.out_layers.3.bias": "blocks.20.conv2.bias", - "first_stage_model.decoder.up.1.block.0.time_stack.out_layers.3.weight": "blocks.20.conv2.weight", - "first_stage_model.decoder.up.1.block.1.conv1.bias": "blocks.21.conv1.bias", - "first_stage_model.decoder.up.1.block.1.conv1.weight": "blocks.21.conv1.weight", - "first_stage_model.decoder.up.1.block.1.conv2.bias": "blocks.21.conv2.bias", - "first_stage_model.decoder.up.1.block.1.conv2.weight": "blocks.21.conv2.weight", - "first_stage_model.decoder.up.1.block.1.mix_factor": "blocks.22.mix_factor", - "first_stage_model.decoder.up.1.block.1.norm1.bias": "blocks.21.norm1.bias", - "first_stage_model.decoder.up.1.block.1.norm1.weight": "blocks.21.norm1.weight", - "first_stage_model.decoder.up.1.block.1.norm2.bias": "blocks.21.norm2.bias", - "first_stage_model.decoder.up.1.block.1.norm2.weight": "blocks.21.norm2.weight", - "first_stage_model.decoder.up.1.block.1.time_stack.in_layers.0.bias": "blocks.22.norm1.bias", - "first_stage_model.decoder.up.1.block.1.time_stack.in_layers.0.weight": "blocks.22.norm1.weight", - "first_stage_model.decoder.up.1.block.1.time_stack.in_layers.2.bias": "blocks.22.conv1.bias", - "first_stage_model.decoder.up.1.block.1.time_stack.in_layers.2.weight": "blocks.22.conv1.weight", - "first_stage_model.decoder.up.1.block.1.time_stack.out_layers.0.bias": "blocks.22.norm2.bias", - "first_stage_model.decoder.up.1.block.1.time_stack.out_layers.0.weight": "blocks.22.norm2.weight", - "first_stage_model.decoder.up.1.block.1.time_stack.out_layers.3.bias": "blocks.22.conv2.bias", - "first_stage_model.decoder.up.1.block.1.time_stack.out_layers.3.weight": "blocks.22.conv2.weight", - "first_stage_model.decoder.up.1.block.2.conv1.bias": "blocks.23.conv1.bias", - "first_stage_model.decoder.up.1.block.2.conv1.weight": "blocks.23.conv1.weight", - "first_stage_model.decoder.up.1.block.2.conv2.bias": "blocks.23.conv2.bias", - "first_stage_model.decoder.up.1.block.2.conv2.weight": "blocks.23.conv2.weight", - "first_stage_model.decoder.up.1.block.2.mix_factor": "blocks.24.mix_factor", - "first_stage_model.decoder.up.1.block.2.norm1.bias": "blocks.23.norm1.bias", - "first_stage_model.decoder.up.1.block.2.norm1.weight": "blocks.23.norm1.weight", - "first_stage_model.decoder.up.1.block.2.norm2.bias": "blocks.23.norm2.bias", - "first_stage_model.decoder.up.1.block.2.norm2.weight": "blocks.23.norm2.weight", - "first_stage_model.decoder.up.1.block.2.time_stack.in_layers.0.bias": "blocks.24.norm1.bias", - "first_stage_model.decoder.up.1.block.2.time_stack.in_layers.0.weight": "blocks.24.norm1.weight", - "first_stage_model.decoder.up.1.block.2.time_stack.in_layers.2.bias": "blocks.24.conv1.bias", - "first_stage_model.decoder.up.1.block.2.time_stack.in_layers.2.weight": "blocks.24.conv1.weight", - "first_stage_model.decoder.up.1.block.2.time_stack.out_layers.0.bias": "blocks.24.norm2.bias", - "first_stage_model.decoder.up.1.block.2.time_stack.out_layers.0.weight": "blocks.24.norm2.weight", - "first_stage_model.decoder.up.1.block.2.time_stack.out_layers.3.bias": "blocks.24.conv2.bias", - "first_stage_model.decoder.up.1.block.2.time_stack.out_layers.3.weight": "blocks.24.conv2.weight", - "first_stage_model.decoder.up.1.upsample.conv.bias": "blocks.25.conv.bias", - "first_stage_model.decoder.up.1.upsample.conv.weight": "blocks.25.conv.weight", - "first_stage_model.decoder.up.2.block.0.conv1.bias": "blocks.12.conv1.bias", - "first_stage_model.decoder.up.2.block.0.conv1.weight": "blocks.12.conv1.weight", - "first_stage_model.decoder.up.2.block.0.conv2.bias": "blocks.12.conv2.bias", - "first_stage_model.decoder.up.2.block.0.conv2.weight": "blocks.12.conv2.weight", - "first_stage_model.decoder.up.2.block.0.mix_factor": "blocks.13.mix_factor", - "first_stage_model.decoder.up.2.block.0.norm1.bias": "blocks.12.norm1.bias", - "first_stage_model.decoder.up.2.block.0.norm1.weight": "blocks.12.norm1.weight", - "first_stage_model.decoder.up.2.block.0.norm2.bias": "blocks.12.norm2.bias", - "first_stage_model.decoder.up.2.block.0.norm2.weight": "blocks.12.norm2.weight", - "first_stage_model.decoder.up.2.block.0.time_stack.in_layers.0.bias": "blocks.13.norm1.bias", - "first_stage_model.decoder.up.2.block.0.time_stack.in_layers.0.weight": "blocks.13.norm1.weight", - "first_stage_model.decoder.up.2.block.0.time_stack.in_layers.2.bias": "blocks.13.conv1.bias", - "first_stage_model.decoder.up.2.block.0.time_stack.in_layers.2.weight": "blocks.13.conv1.weight", - "first_stage_model.decoder.up.2.block.0.time_stack.out_layers.0.bias": "blocks.13.norm2.bias", - "first_stage_model.decoder.up.2.block.0.time_stack.out_layers.0.weight": "blocks.13.norm2.weight", - "first_stage_model.decoder.up.2.block.0.time_stack.out_layers.3.bias": "blocks.13.conv2.bias", - "first_stage_model.decoder.up.2.block.0.time_stack.out_layers.3.weight": "blocks.13.conv2.weight", - "first_stage_model.decoder.up.2.block.1.conv1.bias": "blocks.14.conv1.bias", - "first_stage_model.decoder.up.2.block.1.conv1.weight": "blocks.14.conv1.weight", - "first_stage_model.decoder.up.2.block.1.conv2.bias": "blocks.14.conv2.bias", - "first_stage_model.decoder.up.2.block.1.conv2.weight": "blocks.14.conv2.weight", - "first_stage_model.decoder.up.2.block.1.mix_factor": "blocks.15.mix_factor", - "first_stage_model.decoder.up.2.block.1.norm1.bias": "blocks.14.norm1.bias", - "first_stage_model.decoder.up.2.block.1.norm1.weight": "blocks.14.norm1.weight", - "first_stage_model.decoder.up.2.block.1.norm2.bias": "blocks.14.norm2.bias", - "first_stage_model.decoder.up.2.block.1.norm2.weight": "blocks.14.norm2.weight", - "first_stage_model.decoder.up.2.block.1.time_stack.in_layers.0.bias": "blocks.15.norm1.bias", - "first_stage_model.decoder.up.2.block.1.time_stack.in_layers.0.weight": "blocks.15.norm1.weight", - "first_stage_model.decoder.up.2.block.1.time_stack.in_layers.2.bias": "blocks.15.conv1.bias", - "first_stage_model.decoder.up.2.block.1.time_stack.in_layers.2.weight": "blocks.15.conv1.weight", - "first_stage_model.decoder.up.2.block.1.time_stack.out_layers.0.bias": "blocks.15.norm2.bias", - "first_stage_model.decoder.up.2.block.1.time_stack.out_layers.0.weight": "blocks.15.norm2.weight", - "first_stage_model.decoder.up.2.block.1.time_stack.out_layers.3.bias": "blocks.15.conv2.bias", - "first_stage_model.decoder.up.2.block.1.time_stack.out_layers.3.weight": "blocks.15.conv2.weight", - "first_stage_model.decoder.up.2.block.2.conv1.bias": "blocks.16.conv1.bias", - "first_stage_model.decoder.up.2.block.2.conv1.weight": "blocks.16.conv1.weight", - "first_stage_model.decoder.up.2.block.2.conv2.bias": "blocks.16.conv2.bias", - "first_stage_model.decoder.up.2.block.2.conv2.weight": "blocks.16.conv2.weight", - "first_stage_model.decoder.up.2.block.2.mix_factor": "blocks.17.mix_factor", - "first_stage_model.decoder.up.2.block.2.norm1.bias": "blocks.16.norm1.bias", - "first_stage_model.decoder.up.2.block.2.norm1.weight": "blocks.16.norm1.weight", - "first_stage_model.decoder.up.2.block.2.norm2.bias": "blocks.16.norm2.bias", - "first_stage_model.decoder.up.2.block.2.norm2.weight": "blocks.16.norm2.weight", - "first_stage_model.decoder.up.2.block.2.time_stack.in_layers.0.bias": "blocks.17.norm1.bias", - "first_stage_model.decoder.up.2.block.2.time_stack.in_layers.0.weight": "blocks.17.norm1.weight", - "first_stage_model.decoder.up.2.block.2.time_stack.in_layers.2.bias": "blocks.17.conv1.bias", - "first_stage_model.decoder.up.2.block.2.time_stack.in_layers.2.weight": "blocks.17.conv1.weight", - "first_stage_model.decoder.up.2.block.2.time_stack.out_layers.0.bias": "blocks.17.norm2.bias", - "first_stage_model.decoder.up.2.block.2.time_stack.out_layers.0.weight": "blocks.17.norm2.weight", - "first_stage_model.decoder.up.2.block.2.time_stack.out_layers.3.bias": "blocks.17.conv2.bias", - "first_stage_model.decoder.up.2.block.2.time_stack.out_layers.3.weight": "blocks.17.conv2.weight", - "first_stage_model.decoder.up.2.upsample.conv.bias": "blocks.18.conv.bias", - "first_stage_model.decoder.up.2.upsample.conv.weight": "blocks.18.conv.weight", - "first_stage_model.decoder.up.3.block.0.conv1.bias": "blocks.5.conv1.bias", - "first_stage_model.decoder.up.3.block.0.conv1.weight": "blocks.5.conv1.weight", - "first_stage_model.decoder.up.3.block.0.conv2.bias": "blocks.5.conv2.bias", - "first_stage_model.decoder.up.3.block.0.conv2.weight": "blocks.5.conv2.weight", - "first_stage_model.decoder.up.3.block.0.mix_factor": "blocks.6.mix_factor", - "first_stage_model.decoder.up.3.block.0.norm1.bias": "blocks.5.norm1.bias", - "first_stage_model.decoder.up.3.block.0.norm1.weight": "blocks.5.norm1.weight", - "first_stage_model.decoder.up.3.block.0.norm2.bias": "blocks.5.norm2.bias", - "first_stage_model.decoder.up.3.block.0.norm2.weight": "blocks.5.norm2.weight", - "first_stage_model.decoder.up.3.block.0.time_stack.in_layers.0.bias": "blocks.6.norm1.bias", - "first_stage_model.decoder.up.3.block.0.time_stack.in_layers.0.weight": "blocks.6.norm1.weight", - "first_stage_model.decoder.up.3.block.0.time_stack.in_layers.2.bias": "blocks.6.conv1.bias", - "first_stage_model.decoder.up.3.block.0.time_stack.in_layers.2.weight": "blocks.6.conv1.weight", - "first_stage_model.decoder.up.3.block.0.time_stack.out_layers.0.bias": "blocks.6.norm2.bias", - "first_stage_model.decoder.up.3.block.0.time_stack.out_layers.0.weight": "blocks.6.norm2.weight", - "first_stage_model.decoder.up.3.block.0.time_stack.out_layers.3.bias": "blocks.6.conv2.bias", - "first_stage_model.decoder.up.3.block.0.time_stack.out_layers.3.weight": "blocks.6.conv2.weight", - "first_stage_model.decoder.up.3.block.1.conv1.bias": "blocks.7.conv1.bias", - "first_stage_model.decoder.up.3.block.1.conv1.weight": "blocks.7.conv1.weight", - "first_stage_model.decoder.up.3.block.1.conv2.bias": "blocks.7.conv2.bias", - "first_stage_model.decoder.up.3.block.1.conv2.weight": "blocks.7.conv2.weight", - "first_stage_model.decoder.up.3.block.1.mix_factor": "blocks.8.mix_factor", - "first_stage_model.decoder.up.3.block.1.norm1.bias": "blocks.7.norm1.bias", - "first_stage_model.decoder.up.3.block.1.norm1.weight": "blocks.7.norm1.weight", - "first_stage_model.decoder.up.3.block.1.norm2.bias": "blocks.7.norm2.bias", - "first_stage_model.decoder.up.3.block.1.norm2.weight": "blocks.7.norm2.weight", - "first_stage_model.decoder.up.3.block.1.time_stack.in_layers.0.bias": "blocks.8.norm1.bias", - "first_stage_model.decoder.up.3.block.1.time_stack.in_layers.0.weight": "blocks.8.norm1.weight", - "first_stage_model.decoder.up.3.block.1.time_stack.in_layers.2.bias": "blocks.8.conv1.bias", - "first_stage_model.decoder.up.3.block.1.time_stack.in_layers.2.weight": "blocks.8.conv1.weight", - "first_stage_model.decoder.up.3.block.1.time_stack.out_layers.0.bias": "blocks.8.norm2.bias", - "first_stage_model.decoder.up.3.block.1.time_stack.out_layers.0.weight": "blocks.8.norm2.weight", - "first_stage_model.decoder.up.3.block.1.time_stack.out_layers.3.bias": "blocks.8.conv2.bias", - "first_stage_model.decoder.up.3.block.1.time_stack.out_layers.3.weight": "blocks.8.conv2.weight", - "first_stage_model.decoder.up.3.block.2.conv1.bias": "blocks.9.conv1.bias", - "first_stage_model.decoder.up.3.block.2.conv1.weight": "blocks.9.conv1.weight", - "first_stage_model.decoder.up.3.block.2.conv2.bias": "blocks.9.conv2.bias", - "first_stage_model.decoder.up.3.block.2.conv2.weight": "blocks.9.conv2.weight", - "first_stage_model.decoder.up.3.block.2.mix_factor": "blocks.10.mix_factor", - "first_stage_model.decoder.up.3.block.2.norm1.bias": "blocks.9.norm1.bias", - "first_stage_model.decoder.up.3.block.2.norm1.weight": "blocks.9.norm1.weight", - "first_stage_model.decoder.up.3.block.2.norm2.bias": "blocks.9.norm2.bias", - "first_stage_model.decoder.up.3.block.2.norm2.weight": "blocks.9.norm2.weight", - "first_stage_model.decoder.up.3.block.2.time_stack.in_layers.0.bias": "blocks.10.norm1.bias", - "first_stage_model.decoder.up.3.block.2.time_stack.in_layers.0.weight": "blocks.10.norm1.weight", - "first_stage_model.decoder.up.3.block.2.time_stack.in_layers.2.bias": "blocks.10.conv1.bias", - "first_stage_model.decoder.up.3.block.2.time_stack.in_layers.2.weight": "blocks.10.conv1.weight", - "first_stage_model.decoder.up.3.block.2.time_stack.out_layers.0.bias": "blocks.10.norm2.bias", - "first_stage_model.decoder.up.3.block.2.time_stack.out_layers.0.weight": "blocks.10.norm2.weight", - "first_stage_model.decoder.up.3.block.2.time_stack.out_layers.3.bias": "blocks.10.conv2.bias", - "first_stage_model.decoder.up.3.block.2.time_stack.out_layers.3.weight": "blocks.10.conv2.weight", - "first_stage_model.decoder.up.3.upsample.conv.bias": "blocks.11.conv.bias", - "first_stage_model.decoder.up.3.upsample.conv.weight": "blocks.11.conv.weight", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if "blocks.2.transformer_blocks.0" in rename_dict[name]: - param = param.squeeze() - state_dict_[rename_dict[name]] = param - return state_dict_ diff --git a/diffsynth/models/svd_vae_encoder.py b/diffsynth/models/svd_vae_encoder.py deleted file mode 100644 index 04a543a..0000000 --- a/diffsynth/models/svd_vae_encoder.py +++ /dev/null @@ -1,139 +0,0 @@ -from .sd_vae_encoder import SDVAEEncoderStateDictConverter, SDVAEEncoder - - -class SVDVAEEncoder(SDVAEEncoder): - def __init__(self): - super().__init__() - self.scaling_factor = 0.13025 - - @staticmethod - def state_dict_converter(): - return SVDVAEEncoderStateDictConverter() - - -class SVDVAEEncoderStateDictConverter(SDVAEEncoderStateDictConverter): - def __init__(self): - super().__init__() - - def from_diffusers(self, state_dict): - return super().from_diffusers(state_dict) - - def from_civitai(self, state_dict): - rename_dict = { - "conditioner.embedders.3.encoder.encoder.conv_in.bias": "conv_in.bias", - "conditioner.embedders.3.encoder.encoder.conv_in.weight": "conv_in.weight", - "conditioner.embedders.3.encoder.encoder.conv_out.bias": "conv_out.bias", - "conditioner.embedders.3.encoder.encoder.conv_out.weight": "conv_out.weight", - "conditioner.embedders.3.encoder.encoder.down.0.block.0.conv1.bias": "blocks.0.conv1.bias", - "conditioner.embedders.3.encoder.encoder.down.0.block.0.conv1.weight": "blocks.0.conv1.weight", - "conditioner.embedders.3.encoder.encoder.down.0.block.0.conv2.bias": "blocks.0.conv2.bias", - "conditioner.embedders.3.encoder.encoder.down.0.block.0.conv2.weight": "blocks.0.conv2.weight", - "conditioner.embedders.3.encoder.encoder.down.0.block.0.norm1.bias": "blocks.0.norm1.bias", - "conditioner.embedders.3.encoder.encoder.down.0.block.0.norm1.weight": "blocks.0.norm1.weight", - "conditioner.embedders.3.encoder.encoder.down.0.block.0.norm2.bias": "blocks.0.norm2.bias", - "conditioner.embedders.3.encoder.encoder.down.0.block.0.norm2.weight": "blocks.0.norm2.weight", - "conditioner.embedders.3.encoder.encoder.down.0.block.1.conv1.bias": "blocks.1.conv1.bias", - "conditioner.embedders.3.encoder.encoder.down.0.block.1.conv1.weight": "blocks.1.conv1.weight", - "conditioner.embedders.3.encoder.encoder.down.0.block.1.conv2.bias": "blocks.1.conv2.bias", - "conditioner.embedders.3.encoder.encoder.down.0.block.1.conv2.weight": "blocks.1.conv2.weight", - "conditioner.embedders.3.encoder.encoder.down.0.block.1.norm1.bias": "blocks.1.norm1.bias", - "conditioner.embedders.3.encoder.encoder.down.0.block.1.norm1.weight": "blocks.1.norm1.weight", - "conditioner.embedders.3.encoder.encoder.down.0.block.1.norm2.bias": "blocks.1.norm2.bias", - "conditioner.embedders.3.encoder.encoder.down.0.block.1.norm2.weight": "blocks.1.norm2.weight", - "conditioner.embedders.3.encoder.encoder.down.0.downsample.conv.bias": "blocks.2.conv.bias", - "conditioner.embedders.3.encoder.encoder.down.0.downsample.conv.weight": "blocks.2.conv.weight", - "conditioner.embedders.3.encoder.encoder.down.1.block.0.conv1.bias": "blocks.3.conv1.bias", - "conditioner.embedders.3.encoder.encoder.down.1.block.0.conv1.weight": "blocks.3.conv1.weight", - "conditioner.embedders.3.encoder.encoder.down.1.block.0.conv2.bias": "blocks.3.conv2.bias", - "conditioner.embedders.3.encoder.encoder.down.1.block.0.conv2.weight": "blocks.3.conv2.weight", - "conditioner.embedders.3.encoder.encoder.down.1.block.0.nin_shortcut.bias": "blocks.3.conv_shortcut.bias", - "conditioner.embedders.3.encoder.encoder.down.1.block.0.nin_shortcut.weight": "blocks.3.conv_shortcut.weight", - "conditioner.embedders.3.encoder.encoder.down.1.block.0.norm1.bias": "blocks.3.norm1.bias", - "conditioner.embedders.3.encoder.encoder.down.1.block.0.norm1.weight": "blocks.3.norm1.weight", - "conditioner.embedders.3.encoder.encoder.down.1.block.0.norm2.bias": "blocks.3.norm2.bias", - "conditioner.embedders.3.encoder.encoder.down.1.block.0.norm2.weight": "blocks.3.norm2.weight", - "conditioner.embedders.3.encoder.encoder.down.1.block.1.conv1.bias": "blocks.4.conv1.bias", - "conditioner.embedders.3.encoder.encoder.down.1.block.1.conv1.weight": "blocks.4.conv1.weight", - "conditioner.embedders.3.encoder.encoder.down.1.block.1.conv2.bias": "blocks.4.conv2.bias", - "conditioner.embedders.3.encoder.encoder.down.1.block.1.conv2.weight": "blocks.4.conv2.weight", - "conditioner.embedders.3.encoder.encoder.down.1.block.1.norm1.bias": "blocks.4.norm1.bias", - "conditioner.embedders.3.encoder.encoder.down.1.block.1.norm1.weight": "blocks.4.norm1.weight", - "conditioner.embedders.3.encoder.encoder.down.1.block.1.norm2.bias": "blocks.4.norm2.bias", - "conditioner.embedders.3.encoder.encoder.down.1.block.1.norm2.weight": "blocks.4.norm2.weight", - "conditioner.embedders.3.encoder.encoder.down.1.downsample.conv.bias": "blocks.5.conv.bias", - "conditioner.embedders.3.encoder.encoder.down.1.downsample.conv.weight": "blocks.5.conv.weight", - "conditioner.embedders.3.encoder.encoder.down.2.block.0.conv1.bias": "blocks.6.conv1.bias", - "conditioner.embedders.3.encoder.encoder.down.2.block.0.conv1.weight": "blocks.6.conv1.weight", - "conditioner.embedders.3.encoder.encoder.down.2.block.0.conv2.bias": "blocks.6.conv2.bias", - "conditioner.embedders.3.encoder.encoder.down.2.block.0.conv2.weight": "blocks.6.conv2.weight", - "conditioner.embedders.3.encoder.encoder.down.2.block.0.nin_shortcut.bias": "blocks.6.conv_shortcut.bias", - "conditioner.embedders.3.encoder.encoder.down.2.block.0.nin_shortcut.weight": "blocks.6.conv_shortcut.weight", - "conditioner.embedders.3.encoder.encoder.down.2.block.0.norm1.bias": "blocks.6.norm1.bias", - "conditioner.embedders.3.encoder.encoder.down.2.block.0.norm1.weight": "blocks.6.norm1.weight", - "conditioner.embedders.3.encoder.encoder.down.2.block.0.norm2.bias": "blocks.6.norm2.bias", - "conditioner.embedders.3.encoder.encoder.down.2.block.0.norm2.weight": "blocks.6.norm2.weight", - "conditioner.embedders.3.encoder.encoder.down.2.block.1.conv1.bias": "blocks.7.conv1.bias", - "conditioner.embedders.3.encoder.encoder.down.2.block.1.conv1.weight": "blocks.7.conv1.weight", - "conditioner.embedders.3.encoder.encoder.down.2.block.1.conv2.bias": "blocks.7.conv2.bias", - "conditioner.embedders.3.encoder.encoder.down.2.block.1.conv2.weight": "blocks.7.conv2.weight", - "conditioner.embedders.3.encoder.encoder.down.2.block.1.norm1.bias": "blocks.7.norm1.bias", - "conditioner.embedders.3.encoder.encoder.down.2.block.1.norm1.weight": "blocks.7.norm1.weight", - "conditioner.embedders.3.encoder.encoder.down.2.block.1.norm2.bias": "blocks.7.norm2.bias", - "conditioner.embedders.3.encoder.encoder.down.2.block.1.norm2.weight": "blocks.7.norm2.weight", - "conditioner.embedders.3.encoder.encoder.down.2.downsample.conv.bias": "blocks.8.conv.bias", - "conditioner.embedders.3.encoder.encoder.down.2.downsample.conv.weight": "blocks.8.conv.weight", - "conditioner.embedders.3.encoder.encoder.down.3.block.0.conv1.bias": "blocks.9.conv1.bias", - "conditioner.embedders.3.encoder.encoder.down.3.block.0.conv1.weight": "blocks.9.conv1.weight", - "conditioner.embedders.3.encoder.encoder.down.3.block.0.conv2.bias": "blocks.9.conv2.bias", - "conditioner.embedders.3.encoder.encoder.down.3.block.0.conv2.weight": "blocks.9.conv2.weight", - "conditioner.embedders.3.encoder.encoder.down.3.block.0.norm1.bias": "blocks.9.norm1.bias", - "conditioner.embedders.3.encoder.encoder.down.3.block.0.norm1.weight": "blocks.9.norm1.weight", - "conditioner.embedders.3.encoder.encoder.down.3.block.0.norm2.bias": "blocks.9.norm2.bias", - "conditioner.embedders.3.encoder.encoder.down.3.block.0.norm2.weight": "blocks.9.norm2.weight", - "conditioner.embedders.3.encoder.encoder.down.3.block.1.conv1.bias": "blocks.10.conv1.bias", - "conditioner.embedders.3.encoder.encoder.down.3.block.1.conv1.weight": "blocks.10.conv1.weight", - "conditioner.embedders.3.encoder.encoder.down.3.block.1.conv2.bias": "blocks.10.conv2.bias", - "conditioner.embedders.3.encoder.encoder.down.3.block.1.conv2.weight": "blocks.10.conv2.weight", - "conditioner.embedders.3.encoder.encoder.down.3.block.1.norm1.bias": "blocks.10.norm1.bias", - "conditioner.embedders.3.encoder.encoder.down.3.block.1.norm1.weight": "blocks.10.norm1.weight", - "conditioner.embedders.3.encoder.encoder.down.3.block.1.norm2.bias": "blocks.10.norm2.bias", - "conditioner.embedders.3.encoder.encoder.down.3.block.1.norm2.weight": "blocks.10.norm2.weight", - "conditioner.embedders.3.encoder.encoder.mid.attn_1.k.bias": "blocks.12.transformer_blocks.0.to_k.bias", - "conditioner.embedders.3.encoder.encoder.mid.attn_1.k.weight": "blocks.12.transformer_blocks.0.to_k.weight", - "conditioner.embedders.3.encoder.encoder.mid.attn_1.norm.bias": "blocks.12.norm.bias", - "conditioner.embedders.3.encoder.encoder.mid.attn_1.norm.weight": "blocks.12.norm.weight", - "conditioner.embedders.3.encoder.encoder.mid.attn_1.proj_out.bias": "blocks.12.transformer_blocks.0.to_out.bias", - "conditioner.embedders.3.encoder.encoder.mid.attn_1.proj_out.weight": "blocks.12.transformer_blocks.0.to_out.weight", - "conditioner.embedders.3.encoder.encoder.mid.attn_1.q.bias": "blocks.12.transformer_blocks.0.to_q.bias", - "conditioner.embedders.3.encoder.encoder.mid.attn_1.q.weight": "blocks.12.transformer_blocks.0.to_q.weight", - "conditioner.embedders.3.encoder.encoder.mid.attn_1.v.bias": "blocks.12.transformer_blocks.0.to_v.bias", - "conditioner.embedders.3.encoder.encoder.mid.attn_1.v.weight": "blocks.12.transformer_blocks.0.to_v.weight", - "conditioner.embedders.3.encoder.encoder.mid.block_1.conv1.bias": "blocks.11.conv1.bias", - "conditioner.embedders.3.encoder.encoder.mid.block_1.conv1.weight": "blocks.11.conv1.weight", - "conditioner.embedders.3.encoder.encoder.mid.block_1.conv2.bias": "blocks.11.conv2.bias", - "conditioner.embedders.3.encoder.encoder.mid.block_1.conv2.weight": "blocks.11.conv2.weight", - "conditioner.embedders.3.encoder.encoder.mid.block_1.norm1.bias": "blocks.11.norm1.bias", - "conditioner.embedders.3.encoder.encoder.mid.block_1.norm1.weight": "blocks.11.norm1.weight", - "conditioner.embedders.3.encoder.encoder.mid.block_1.norm2.bias": "blocks.11.norm2.bias", - "conditioner.embedders.3.encoder.encoder.mid.block_1.norm2.weight": "blocks.11.norm2.weight", - "conditioner.embedders.3.encoder.encoder.mid.block_2.conv1.bias": "blocks.13.conv1.bias", - "conditioner.embedders.3.encoder.encoder.mid.block_2.conv1.weight": "blocks.13.conv1.weight", - "conditioner.embedders.3.encoder.encoder.mid.block_2.conv2.bias": "blocks.13.conv2.bias", - "conditioner.embedders.3.encoder.encoder.mid.block_2.conv2.weight": "blocks.13.conv2.weight", - "conditioner.embedders.3.encoder.encoder.mid.block_2.norm1.bias": "blocks.13.norm1.bias", - "conditioner.embedders.3.encoder.encoder.mid.block_2.norm1.weight": "blocks.13.norm1.weight", - "conditioner.embedders.3.encoder.encoder.mid.block_2.norm2.bias": "blocks.13.norm2.bias", - "conditioner.embedders.3.encoder.encoder.mid.block_2.norm2.weight": "blocks.13.norm2.weight", - "conditioner.embedders.3.encoder.encoder.norm_out.bias": "conv_norm_out.bias", - "conditioner.embedders.3.encoder.encoder.norm_out.weight": "conv_norm_out.weight", - "conditioner.embedders.3.encoder.quant_conv.bias": "quant_conv.bias", - "conditioner.embedders.3.encoder.quant_conv.weight": "quant_conv.weight", - } - state_dict_ = {} - for name in state_dict: - if name in rename_dict: - param = state_dict[name] - if "transformer_blocks" in rename_dict[name]: - param = param.squeeze() - state_dict_[rename_dict[name]] = param - return state_dict_ diff --git a/diffsynth/models/tiler.py b/diffsynth/models/tiler.py deleted file mode 100644 index dff5ebf..0000000 --- a/diffsynth/models/tiler.py +++ /dev/null @@ -1,234 +0,0 @@ -import torch -from einops import rearrange, repeat - - -class TileWorker: - def __init__(self): - pass - - - def mask(self, height, width, border_width): - # Create a mask with shape (height, width). - # The centre area is filled with 1, and the border line is filled with values in range (0, 1]. - x = torch.arange(height).repeat(width, 1).T - y = torch.arange(width).repeat(height, 1) - mask = torch.stack([x + 1, height - x, y + 1, width - y]).min(dim=0).values - mask = (mask / border_width).clip(0, 1) - return mask - - - def tile(self, model_input, tile_size, tile_stride, tile_device, tile_dtype): - # Convert a tensor (b, c, h, w) to (b, c, tile_size, tile_size, tile_num) - batch_size, channel, _, _ = model_input.shape - model_input = model_input.to(device=tile_device, dtype=tile_dtype) - unfold_operator = torch.nn.Unfold( - kernel_size=(tile_size, tile_size), - stride=(tile_stride, tile_stride) - ) - model_input = unfold_operator(model_input) - model_input = model_input.view((batch_size, channel, tile_size, tile_size, -1)) - - return model_input - - - def tiled_inference(self, forward_fn, model_input, tile_batch_size, inference_device, inference_dtype, tile_device, tile_dtype): - # Call y=forward_fn(x) for each tile - tile_num = model_input.shape[-1] - model_output_stack = [] - - for tile_id in range(0, tile_num, tile_batch_size): - - # process input - tile_id_ = min(tile_id + tile_batch_size, tile_num) - x = model_input[:, :, :, :, tile_id: tile_id_] - x = x.to(device=inference_device, dtype=inference_dtype) - x = rearrange(x, "b c h w n -> (n b) c h w") - - # process output - y = forward_fn(x) - y = rearrange(y, "(n b) c h w -> b c h w n", n=tile_id_-tile_id) - y = y.to(device=tile_device, dtype=tile_dtype) - model_output_stack.append(y) - - model_output = torch.concat(model_output_stack, dim=-1) - return model_output - - - def io_scale(self, model_output, tile_size): - # Determine the size modification happened in forward_fn - # We only consider the same scale on height and width. - io_scale = model_output.shape[2] / tile_size - return io_scale - - - def untile(self, model_output, height, width, tile_size, tile_stride, border_width, tile_device, tile_dtype): - # The reversed function of tile - mask = self.mask(tile_size, tile_size, border_width) - mask = mask.to(device=tile_device, dtype=tile_dtype) - mask = rearrange(mask, "h w -> 1 1 h w 1") - model_output = model_output * mask - - fold_operator = torch.nn.Fold( - output_size=(height, width), - kernel_size=(tile_size, tile_size), - stride=(tile_stride, tile_stride) - ) - mask = repeat(mask[0, 0, :, :, 0], "h w -> 1 (h w) n", n=model_output.shape[-1]) - model_output = rearrange(model_output, "b c h w n -> b (c h w) n") - model_output = fold_operator(model_output) / fold_operator(mask) - - return model_output - - - def tiled_forward(self, forward_fn, model_input, tile_size, tile_stride, tile_batch_size=1, tile_device="cpu", tile_dtype=torch.float32, border_width=None): - # Prepare - inference_device, inference_dtype = model_input.device, model_input.dtype - height, width = model_input.shape[2], model_input.shape[3] - border_width = int(tile_stride*0.5) if border_width is None else border_width - - # tile - model_input = self.tile(model_input, tile_size, tile_stride, tile_device, tile_dtype) - - # inference - model_output = self.tiled_inference(forward_fn, model_input, tile_batch_size, inference_device, inference_dtype, tile_device, tile_dtype) - - # resize - io_scale = self.io_scale(model_output, tile_size) - height, width = int(height*io_scale), int(width*io_scale) - tile_size, tile_stride = int(tile_size*io_scale), int(tile_stride*io_scale) - border_width = int(border_width*io_scale) - - # untile - model_output = self.untile(model_output, height, width, tile_size, tile_stride, border_width, tile_device, tile_dtype) - - # Done! - model_output = model_output.to(device=inference_device, dtype=inference_dtype) - return model_output - - - -class FastTileWorker: - def __init__(self): - pass - - - def build_mask(self, data, is_bound): - _, _, H, W = data.shape - h = repeat(torch.arange(H), "H -> H W", H=H, W=W) - w = repeat(torch.arange(W), "W -> H W", H=H, W=W) - border_width = (H + W) // 4 - pad = torch.ones_like(h) * border_width - mask = torch.stack([ - pad if is_bound[0] else h + 1, - pad if is_bound[1] else H - h, - pad if is_bound[2] else w + 1, - pad if is_bound[3] else W - w - ]).min(dim=0).values - mask = mask.clip(1, border_width) - mask = (mask / border_width).to(dtype=data.dtype, device=data.device) - mask = rearrange(mask, "H W -> 1 H W") - return mask - - - def tiled_forward(self, forward_fn, model_input, tile_size, tile_stride, tile_device="cpu", tile_dtype=torch.float32, border_width=None): - # Prepare - B, C, H, W = model_input.shape - border_width = int(tile_stride*0.5) if border_width is None else border_width - weight = torch.zeros((1, 1, H, W), dtype=tile_dtype, device=tile_device) - values = torch.zeros((B, C, H, W), dtype=tile_dtype, device=tile_device) - - # Split tasks - tasks = [] - for h in range(0, H, tile_stride): - for w in range(0, W, tile_stride): - if (h-tile_stride >= 0 and h-tile_stride+tile_size >= H) or (w-tile_stride >= 0 and w-tile_stride+tile_size >= W): - continue - h_, w_ = h + tile_size, w + tile_size - if h_ > H: h, h_ = H - tile_size, H - if w_ > W: w, w_ = W - tile_size, W - tasks.append((h, h_, w, w_)) - - # Run - for hl, hr, wl, wr in tasks: - # Forward - hidden_states_batch = forward_fn(hl, hr, wl, wr).to(dtype=tile_dtype, device=tile_device) - - mask = self.build_mask(hidden_states_batch, is_bound=(hl==0, hr>=H, wl==0, wr>=W)) - values[:, :, hl:hr, wl:wr] += hidden_states_batch * mask - weight[:, :, hl:hr, wl:wr] += mask - values /= weight - return values - - - -class TileWorker2Dto3D: - """ - Process 3D tensors, but only enable TileWorker on 2D. - """ - def __init__(self): - pass - - - def build_mask(self, T, H, W, dtype, device, is_bound, border_width): - t = repeat(torch.arange(T), "T -> T H W", T=T, H=H, W=W) - h = repeat(torch.arange(H), "H -> T H W", T=T, H=H, W=W) - w = repeat(torch.arange(W), "W -> T H W", T=T, H=H, W=W) - border_width = (H + W) // 4 if border_width is None else border_width - pad = torch.ones_like(h) * border_width - mask = torch.stack([ - pad if is_bound[0] else t + 1, - pad if is_bound[1] else T - t, - pad if is_bound[2] else h + 1, - pad if is_bound[3] else H - h, - pad if is_bound[4] else w + 1, - pad if is_bound[5] else W - w - ]).min(dim=0).values - mask = mask.clip(1, border_width) - mask = (mask / border_width).to(dtype=dtype, device=device) - mask = rearrange(mask, "T H W -> 1 1 T H W") - return mask - - - def tiled_forward( - self, - forward_fn, - model_input, - tile_size, tile_stride, - tile_device="cpu", tile_dtype=torch.float32, - computation_device="cuda", computation_dtype=torch.float32, - border_width=None, scales=[1, 1, 1, 1], - progress_bar=lambda x:x - ): - B, C, T, H, W = model_input.shape - scale_C, scale_T, scale_H, scale_W = scales - tile_size_H, tile_size_W = tile_size - tile_stride_H, tile_stride_W = tile_stride - - value = torch.zeros((B, int(C*scale_C), int(T*scale_T), int(H*scale_H), int(W*scale_W)), dtype=tile_dtype, device=tile_device) - weight = torch.zeros((1, 1, int(T*scale_T), int(H*scale_H), int(W*scale_W)), dtype=tile_dtype, device=tile_device) - - # Split tasks - tasks = [] - for h in range(0, H, tile_stride_H): - for w in range(0, W, tile_stride_W): - if (h-tile_stride_H >= 0 and h-tile_stride_H+tile_size_H >= H) or (w-tile_stride_W >= 0 and w-tile_stride_W+tile_size_W >= W): - continue - h_, w_ = h + tile_size_H, w + tile_size_W - if h_ > H: h, h_ = max(H - tile_size_H, 0), H - if w_ > W: w, w_ = max(W - tile_size_W, 0), W - tasks.append((h, h_, w, w_)) - - # Run - for hl, hr, wl, wr in progress_bar(tasks): - mask = self.build_mask( - int(T*scale_T), int((hr-hl)*scale_H), int((wr-wl)*scale_W), - tile_dtype, tile_device, - is_bound=(True, True, hl==0, hr>=H, wl==0, wr>=W), - border_width=border_width - ) - grid_input = model_input[:, :, :, hl:hr, wl:wr].to(dtype=computation_dtype, device=computation_device) - grid_output = forward_fn(grid_input).to(dtype=tile_dtype, device=tile_device) - value[:, :, :, int(hl*scale_H):int(hr*scale_H), int(wl*scale_W):int(wr*scale_W)] += grid_output * mask - weight[:, :, :, int(hl*scale_H):int(hr*scale_H), int(wl*scale_W):int(wr*scale_W)] += mask - value = value / weight - return value \ No newline at end of file diff --git a/diffsynth/models/utils.py b/diffsynth/models/utils.py deleted file mode 100644 index 86104d0..0000000 --- a/diffsynth/models/utils.py +++ /dev/null @@ -1,182 +0,0 @@ -import torch, os -from safetensors import safe_open -from contextlib import contextmanager -import hashlib - -@contextmanager -def init_weights_on_device(device = torch.device("meta"), include_buffers :bool = False): - - old_register_parameter = torch.nn.Module.register_parameter - if include_buffers: - old_register_buffer = torch.nn.Module.register_buffer - - def register_empty_parameter(module, name, param): - old_register_parameter(module, name, param) - if param is not None: - param_cls = type(module._parameters[name]) - kwargs = module._parameters[name].__dict__ - kwargs["requires_grad"] = param.requires_grad - module._parameters[name] = param_cls(module._parameters[name].to(device), **kwargs) - - def register_empty_buffer(module, name, buffer, persistent=True): - old_register_buffer(module, name, buffer, persistent=persistent) - if buffer is not None: - module._buffers[name] = module._buffers[name].to(device) - - def patch_tensor_constructor(fn): - def wrapper(*args, **kwargs): - kwargs["device"] = device - return fn(*args, **kwargs) - - return wrapper - - if include_buffers: - tensor_constructors_to_patch = { - torch_function_name: getattr(torch, torch_function_name) - for torch_function_name in ["empty", "zeros", "ones", "full"] - } - else: - tensor_constructors_to_patch = {} - - try: - torch.nn.Module.register_parameter = register_empty_parameter - if include_buffers: - torch.nn.Module.register_buffer = register_empty_buffer - for torch_function_name in tensor_constructors_to_patch.keys(): - setattr(torch, torch_function_name, patch_tensor_constructor(getattr(torch, torch_function_name))) - yield - finally: - torch.nn.Module.register_parameter = old_register_parameter - if include_buffers: - torch.nn.Module.register_buffer = old_register_buffer - for torch_function_name, old_torch_function in tensor_constructors_to_patch.items(): - setattr(torch, torch_function_name, old_torch_function) - -def load_state_dict_from_folder(file_path, torch_dtype=None): - state_dict = {} - for file_name in os.listdir(file_path): - if "." in file_name and file_name.split(".")[-1] in [ - "safetensors", "bin", "ckpt", "pth", "pt" - ]: - state_dict.update(load_state_dict(os.path.join(file_path, file_name), torch_dtype=torch_dtype)) - return state_dict - - -def load_state_dict(file_path, torch_dtype=None, device="cpu"): - if file_path.endswith(".safetensors"): - return load_state_dict_from_safetensors(file_path, torch_dtype=torch_dtype, device=device) - else: - return load_state_dict_from_bin(file_path, torch_dtype=torch_dtype, device=device) - - -def load_state_dict_from_safetensors(file_path, torch_dtype=None, device="cpu"): - state_dict = {} - with safe_open(file_path, framework="pt", device=str(device)) as f: - for k in f.keys(): - state_dict[k] = f.get_tensor(k) - if torch_dtype is not None: - state_dict[k] = state_dict[k].to(torch_dtype) - return state_dict - - -def load_state_dict_from_bin(file_path, torch_dtype=None, device="cpu"): - state_dict = torch.load(file_path, map_location=device, weights_only=True) - if torch_dtype is not None: - for i in state_dict: - if isinstance(state_dict[i], torch.Tensor): - state_dict[i] = state_dict[i].to(torch_dtype) - return state_dict - - -def search_for_embeddings(state_dict): - embeddings = [] - for k in state_dict: - if isinstance(state_dict[k], torch.Tensor): - embeddings.append(state_dict[k]) - elif isinstance(state_dict[k], dict): - embeddings += search_for_embeddings(state_dict[k]) - return embeddings - - -def search_parameter(param, state_dict): - for name, param_ in state_dict.items(): - if param.numel() == param_.numel(): - if param.shape == param_.shape: - if torch.dist(param, param_) < 1e-3: - return name - else: - if torch.dist(param.flatten(), param_.flatten()) < 1e-3: - return name - return None - - -def build_rename_dict(source_state_dict, target_state_dict, split_qkv=False): - matched_keys = set() - with torch.no_grad(): - for name in source_state_dict: - rename = search_parameter(source_state_dict[name], target_state_dict) - if rename is not None: - print(f'"{name}": "{rename}",') - matched_keys.add(rename) - elif split_qkv and len(source_state_dict[name].shape)>=1 and source_state_dict[name].shape[0]%3==0: - length = source_state_dict[name].shape[0] // 3 - rename = [] - for i in range(3): - rename.append(search_parameter(source_state_dict[name][i*length: i*length+length], target_state_dict)) - if None not in rename: - print(f'"{name}": {rename},') - for rename_ in rename: - matched_keys.add(rename_) - for name in target_state_dict: - if name not in matched_keys: - print("Cannot find", name, target_state_dict[name].shape) - - -def search_for_files(folder, extensions): - files = [] - if os.path.isdir(folder): - for file in sorted(os.listdir(folder)): - files += search_for_files(os.path.join(folder, file), extensions) - elif os.path.isfile(folder): - for extension in extensions: - if folder.endswith(extension): - files.append(folder) - break - return files - - -def convert_state_dict_keys_to_single_str(state_dict, with_shape=True): - keys = [] - for key, value in state_dict.items(): - if isinstance(key, str): - if isinstance(value, torch.Tensor): - if with_shape: - shape = "_".join(map(str, list(value.shape))) - keys.append(key + ":" + shape) - keys.append(key) - elif isinstance(value, dict): - keys.append(key + "|" + convert_state_dict_keys_to_single_str(value, with_shape=with_shape)) - keys.sort() - keys_str = ",".join(keys) - return keys_str - - -def split_state_dict_with_prefix(state_dict): - keys = sorted([key for key in state_dict if isinstance(key, str)]) - prefix_dict = {} - for key in keys: - prefix = key if "." not in key else key.split(".")[0] - if prefix not in prefix_dict: - prefix_dict[prefix] = [] - prefix_dict[prefix].append(key) - state_dicts = [] - for prefix, keys in prefix_dict.items(): - sub_state_dict = {key: state_dict[key] for key in keys} - state_dicts.append(sub_state_dict) - return state_dicts - - -def hash_state_dict_keys(state_dict, with_shape=True): - keys_str = convert_state_dict_keys_to_single_str(state_dict, with_shape=with_shape) - keys_str = keys_str.encode(encoding="UTF-8") - return hashlib.md5(keys_str).hexdigest() \ No newline at end of file diff --git a/diffsynth/models/wan_video_animate_adapter.py b/diffsynth/models/wan_video_animate_adapter.py deleted file mode 100644 index 771280a..0000000 --- a/diffsynth/models/wan_video_animate_adapter.py +++ /dev/null @@ -1,670 +0,0 @@ -import torch -import torch.nn as nn -from torch.nn import functional as F -import math -from typing import Tuple, Optional, List -from einops import rearrange - - - -MEMORY_LAYOUT = { - "flash": ( - lambda x: x.view(x.shape[0] * x.shape[1], *x.shape[2:]), - lambda x: x, - ), - "torch": ( - lambda x: x.transpose(1, 2), - lambda x: x.transpose(1, 2), - ), - "vanilla": ( - lambda x: x.transpose(1, 2), - lambda x: x.transpose(1, 2), - ), -} - - -def attention( - q, - k, - v, - mode="torch", - drop_rate=0, - attn_mask=None, - causal=False, - max_seqlen_q=None, - batch_size=1, -): - pre_attn_layout, post_attn_layout = MEMORY_LAYOUT[mode] - - if mode == "torch": - if attn_mask is not None and attn_mask.dtype != torch.bool: - attn_mask = attn_mask.to(q.dtype) - x = F.scaled_dot_product_attention(q, k, v, attn_mask=attn_mask, dropout_p=drop_rate, is_causal=causal) - - x = post_attn_layout(x) - b, s, a, d = x.shape - out = x.reshape(b, s, -1) - return out - - -class CausalConv1d(nn.Module): - - def __init__(self, chan_in, chan_out, kernel_size=3, stride=1, dilation=1, pad_mode="replicate", **kwargs): - super().__init__() - - self.pad_mode = pad_mode - padding = (kernel_size - 1, 0) # T - self.time_causal_padding = padding - - self.conv = nn.Conv1d(chan_in, chan_out, kernel_size, stride=stride, dilation=dilation, **kwargs) - - def forward(self, x): - x = F.pad(x, self.time_causal_padding, mode=self.pad_mode) - return self.conv(x) - - - -class FaceEncoder(nn.Module): - def __init__(self, in_dim: int, hidden_dim: int, num_heads=int, dtype=None, device=None): - factory_kwargs = {"dtype": dtype, "device": device} - super().__init__() - - self.num_heads = num_heads - self.conv1_local = CausalConv1d(in_dim, 1024 * num_heads, 3, stride=1) - self.norm1 = nn.LayerNorm(hidden_dim // 8, elementwise_affine=False, eps=1e-6, **factory_kwargs) - self.act = nn.SiLU() - self.conv2 = CausalConv1d(1024, 1024, 3, stride=2) - self.conv3 = CausalConv1d(1024, 1024, 3, stride=2) - - self.out_proj = nn.Linear(1024, hidden_dim) - self.norm1 = nn.LayerNorm(1024, elementwise_affine=False, eps=1e-6, **factory_kwargs) - - self.norm2 = nn.LayerNorm(1024, elementwise_affine=False, eps=1e-6, **factory_kwargs) - - self.norm3 = nn.LayerNorm(1024, elementwise_affine=False, eps=1e-6, **factory_kwargs) - - self.padding_tokens = nn.Parameter(torch.zeros(1, 1, 1, hidden_dim)) - - def forward(self, x): - - x = rearrange(x, "b t c -> b c t") - b, c, t = x.shape - - x = self.conv1_local(x) - x = rearrange(x, "b (n c) t -> (b n) t c", n=self.num_heads) - - x = self.norm1(x) - x = self.act(x) - x = rearrange(x, "b t c -> b c t") - x = self.conv2(x) - x = rearrange(x, "b c t -> b t c") - x = self.norm2(x) - x = self.act(x) - x = rearrange(x, "b t c -> b c t") - x = self.conv3(x) - x = rearrange(x, "b c t -> b t c") - x = self.norm3(x) - x = self.act(x) - x = self.out_proj(x) - x = rearrange(x, "(b n) t c -> b t n c", b=b) - padding = self.padding_tokens.repeat(b, x.shape[1], 1, 1) - x = torch.cat([x, padding], dim=-2) - x_local = x.clone() - - return x_local - - - -class RMSNorm(nn.Module): - def __init__( - self, - dim: int, - elementwise_affine=True, - eps: float = 1e-6, - device=None, - dtype=None, - ): - """ - Initialize the RMSNorm normalization layer. - - Args: - dim (int): The dimension of the input tensor. - eps (float, optional): A small value added to the denominator for numerical stability. Default is 1e-6. - - Attributes: - eps (float): A small value added to the denominator for numerical stability. - weight (nn.Parameter): Learnable scaling parameter. - - """ - factory_kwargs = {"device": device, "dtype": dtype} - super().__init__() - self.eps = eps - if elementwise_affine: - self.weight = nn.Parameter(torch.ones(dim, **factory_kwargs)) - - def _norm(self, x): - """ - Apply the RMSNorm normalization to the input tensor. - - Args: - x (torch.Tensor): The input tensor. - - Returns: - torch.Tensor: The normalized tensor. - - """ - return x * torch.rsqrt(x.pow(2).mean(-1, keepdim=True) + self.eps) - - def forward(self, x): - """ - Forward pass through the RMSNorm layer. - - Args: - x (torch.Tensor): The input tensor. - - Returns: - torch.Tensor: The output tensor after applying RMSNorm. - - """ - output = self._norm(x.float()).type_as(x) - if hasattr(self, "weight"): - output = output * self.weight - return output - - -def get_norm_layer(norm_layer): - """ - Get the normalization layer. - - Args: - norm_layer (str): The type of normalization layer. - - Returns: - norm_layer (nn.Module): The normalization layer. - """ - if norm_layer == "layer": - return nn.LayerNorm - elif norm_layer == "rms": - return RMSNorm - else: - raise NotImplementedError(f"Norm layer {norm_layer} is not implemented") - - -class FaceAdapter(nn.Module): - def __init__( - self, - hidden_dim: int, - heads_num: int, - qk_norm: bool = True, - qk_norm_type: str = "rms", - num_adapter_layers: int = 1, - dtype=None, - device=None, - ): - - factory_kwargs = {"dtype": dtype, "device": device} - super().__init__() - self.hidden_size = hidden_dim - self.heads_num = heads_num - self.fuser_blocks = nn.ModuleList( - [ - FaceBlock( - self.hidden_size, - self.heads_num, - qk_norm=qk_norm, - qk_norm_type=qk_norm_type, - **factory_kwargs, - ) - for _ in range(num_adapter_layers) - ] - ) - - def forward( - self, - x: torch.Tensor, - motion_embed: torch.Tensor, - idx: int, - freqs_cis_q: Tuple[torch.Tensor, torch.Tensor] = None, - freqs_cis_k: Tuple[torch.Tensor, torch.Tensor] = None, - ) -> torch.Tensor: - - return self.fuser_blocks[idx](x, motion_embed, freqs_cis_q, freqs_cis_k) - - - -class FaceBlock(nn.Module): - def __init__( - self, - hidden_size: int, - heads_num: int, - qk_norm: bool = True, - qk_norm_type: str = "rms", - qk_scale: float = None, - dtype: Optional[torch.dtype] = None, - device: Optional[torch.device] = None, - ): - factory_kwargs = {"device": device, "dtype": dtype} - super().__init__() - - self.deterministic = False - self.hidden_size = hidden_size - self.heads_num = heads_num - head_dim = hidden_size // heads_num - self.scale = qk_scale or head_dim**-0.5 - - self.linear1_kv = nn.Linear(hidden_size, hidden_size * 2, **factory_kwargs) - self.linear1_q = nn.Linear(hidden_size, hidden_size, **factory_kwargs) - - self.linear2 = nn.Linear(hidden_size, hidden_size, **factory_kwargs) - - qk_norm_layer = get_norm_layer(qk_norm_type) - self.q_norm = ( - qk_norm_layer(head_dim, elementwise_affine=True, eps=1e-6, **factory_kwargs) if qk_norm else nn.Identity() - ) - self.k_norm = ( - qk_norm_layer(head_dim, elementwise_affine=True, eps=1e-6, **factory_kwargs) if qk_norm else nn.Identity() - ) - - self.pre_norm_feat = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6, **factory_kwargs) - - self.pre_norm_motion = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6, **factory_kwargs) - - def forward( - self, - x: torch.Tensor, - motion_vec: torch.Tensor, - motion_mask: Optional[torch.Tensor] = None, - use_context_parallel=False, - ) -> torch.Tensor: - - B, T, N, C = motion_vec.shape - T_comp = T - - x_motion = self.pre_norm_motion(motion_vec) - x_feat = self.pre_norm_feat(x) - - kv = self.linear1_kv(x_motion) - q = self.linear1_q(x_feat) - - k, v = rearrange(kv, "B L N (K H D) -> K B L N H D", K=2, H=self.heads_num) - q = rearrange(q, "B S (H D) -> B S H D", H=self.heads_num) - - # Apply QK-Norm if needed. - q = self.q_norm(q).to(v) - k = self.k_norm(k).to(v) - - k = rearrange(k, "B L N H D -> (B L) H N D") - v = rearrange(v, "B L N H D -> (B L) H N D") - - q = rearrange(q, "B (L S) H D -> (B L) H S D", L=T_comp) - # Compute attention. - attn = F.scaled_dot_product_attention(q, k, v) - - attn = rearrange(attn, "(B L) H S D -> B (L S) (H D)", L=T_comp) - - output = self.linear2(attn) - - if motion_mask is not None: - output = output * rearrange(motion_mask, "B T H W -> B (T H W)").unsqueeze(-1) - - return output - - - -def custom_qr(input_tensor): - original_dtype = input_tensor.dtype - if original_dtype == torch.bfloat16: - q, r = torch.linalg.qr(input_tensor.to(torch.float32)) - return q.to(original_dtype), r.to(original_dtype) - return torch.linalg.qr(input_tensor) - -def fused_leaky_relu(input, bias, negative_slope=0.2, scale=2 ** 0.5): - return F.leaky_relu(input + bias, negative_slope) * scale - - -def upfirdn2d_native(input, kernel, up_x, up_y, down_x, down_y, pad_x0, pad_x1, pad_y0, pad_y1): - _, minor, in_h, in_w = input.shape - kernel_h, kernel_w = kernel.shape - - out = input.view(-1, minor, in_h, 1, in_w, 1) - out = F.pad(out, [0, up_x - 1, 0, 0, 0, up_y - 1, 0, 0]) - out = out.view(-1, minor, in_h * up_y, in_w * up_x) - - out = F.pad(out, [max(pad_x0, 0), max(pad_x1, 0), max(pad_y0, 0), max(pad_y1, 0)]) - out = out[:, :, max(-pad_y0, 0): out.shape[2] - max(-pad_y1, 0), - max(-pad_x0, 0): out.shape[3] - max(-pad_x1, 0), ] - - out = out.reshape([-1, 1, in_h * up_y + pad_y0 + pad_y1, in_w * up_x + pad_x0 + pad_x1]) - w = torch.flip(kernel, [0, 1]).view(1, 1, kernel_h, kernel_w) - out = F.conv2d(out, w) - out = out.reshape(-1, minor, in_h * up_y + pad_y0 + pad_y1 - kernel_h + 1, - in_w * up_x + pad_x0 + pad_x1 - kernel_w + 1, ) - return out[:, :, ::down_y, ::down_x] - - -def upfirdn2d(input, kernel, up=1, down=1, pad=(0, 0)): - return upfirdn2d_native(input, kernel, up, up, down, down, pad[0], pad[1], pad[0], pad[1]) - - -def make_kernel(k): - k = torch.tensor(k, dtype=torch.float32) - if k.ndim == 1: - k = k[None, :] * k[:, None] - k /= k.sum() - return k - - -class FusedLeakyReLU(nn.Module): - def __init__(self, channel, negative_slope=0.2, scale=2 ** 0.5): - super().__init__() - self.bias = nn.Parameter(torch.zeros(1, channel, 1, 1)) - self.negative_slope = negative_slope - self.scale = scale - - def forward(self, input): - out = fused_leaky_relu(input, self.bias, self.negative_slope, self.scale) - return out - - -class Blur(nn.Module): - def __init__(self, kernel, pad, upsample_factor=1): - super().__init__() - - kernel = make_kernel(kernel) - - if upsample_factor > 1: - kernel = kernel * (upsample_factor ** 2) - - self.register_buffer('kernel', kernel) - - self.pad = pad - - def forward(self, input): - return upfirdn2d(input, self.kernel, pad=self.pad) - - -class ScaledLeakyReLU(nn.Module): - def __init__(self, negative_slope=0.2): - super().__init__() - - self.negative_slope = negative_slope - - def forward(self, input): - return F.leaky_relu(input, negative_slope=self.negative_slope) - - -class EqualConv2d(nn.Module): - def __init__(self, in_channel, out_channel, kernel_size, stride=1, padding=0, bias=True): - super().__init__() - - self.weight = nn.Parameter(torch.randn(out_channel, in_channel, kernel_size, kernel_size)) - self.scale = 1 / math.sqrt(in_channel * kernel_size ** 2) - - self.stride = stride - self.padding = padding - - if bias: - self.bias = nn.Parameter(torch.zeros(out_channel)) - else: - self.bias = None - - def forward(self, input): - - return F.conv2d(input, self.weight * self.scale, bias=self.bias, stride=self.stride, padding=self.padding) - - def __repr__(self): - return ( - f'{self.__class__.__name__}({self.weight.shape[1]}, {self.weight.shape[0]},' - f' {self.weight.shape[2]}, stride={self.stride}, padding={self.padding})' - ) - - -class EqualLinear(nn.Module): - def __init__(self, in_dim, out_dim, bias=True, bias_init=0, lr_mul=1, activation=None): - super().__init__() - - self.weight = nn.Parameter(torch.randn(out_dim, in_dim).div_(lr_mul)) - - if bias: - self.bias = nn.Parameter(torch.zeros(out_dim).fill_(bias_init)) - else: - self.bias = None - - self.activation = activation - - self.scale = (1 / math.sqrt(in_dim)) * lr_mul - self.lr_mul = lr_mul - - def forward(self, input): - - if self.activation: - out = F.linear(input, self.weight * self.scale) - out = fused_leaky_relu(out, self.bias * self.lr_mul) - else: - out = F.linear(input, self.weight * self.scale, bias=self.bias * self.lr_mul) - - return out - - def __repr__(self): - return (f'{self.__class__.__name__}({self.weight.shape[1]}, {self.weight.shape[0]})') - - -class ConvLayer(nn.Sequential): - def __init__( - self, - in_channel, - out_channel, - kernel_size, - downsample=False, - blur_kernel=[1, 3, 3, 1], - bias=True, - activate=True, - ): - layers = [] - - if downsample: - factor = 2 - p = (len(blur_kernel) - factor) + (kernel_size - 1) - pad0 = (p + 1) // 2 - pad1 = p // 2 - - layers.append(Blur(blur_kernel, pad=(pad0, pad1))) - - stride = 2 - self.padding = 0 - - else: - stride = 1 - self.padding = kernel_size // 2 - - layers.append(EqualConv2d(in_channel, out_channel, kernel_size, padding=self.padding, stride=stride, - bias=bias and not activate)) - - if activate: - if bias: - layers.append(FusedLeakyReLU(out_channel)) - else: - layers.append(ScaledLeakyReLU(0.2)) - - super().__init__(*layers) - - -class ResBlock(nn.Module): - def __init__(self, in_channel, out_channel, blur_kernel=[1, 3, 3, 1]): - super().__init__() - - self.conv1 = ConvLayer(in_channel, in_channel, 3) - self.conv2 = ConvLayer(in_channel, out_channel, 3, downsample=True) - - self.skip = ConvLayer(in_channel, out_channel, 1, downsample=True, activate=False, bias=False) - - def forward(self, input): - out = self.conv1(input) - out = self.conv2(out) - - skip = self.skip(input) - out = (out + skip) / math.sqrt(2) - - return out - - -class EncoderApp(nn.Module): - def __init__(self, size, w_dim=512): - super(EncoderApp, self).__init__() - - channels = { - 4: 512, - 8: 512, - 16: 512, - 32: 512, - 64: 256, - 128: 128, - 256: 64, - 512: 32, - 1024: 16 - } - - self.w_dim = w_dim - log_size = int(math.log(size, 2)) - - self.convs = nn.ModuleList() - self.convs.append(ConvLayer(3, channels[size], 1)) - - in_channel = channels[size] - for i in range(log_size, 2, -1): - out_channel = channels[2 ** (i - 1)] - self.convs.append(ResBlock(in_channel, out_channel)) - in_channel = out_channel - - self.convs.append(EqualConv2d(in_channel, self.w_dim, 4, padding=0, bias=False)) - - def forward(self, x): - - res = [] - h = x - for conv in self.convs: - h = conv(h) - res.append(h) - - return res[-1].squeeze(-1).squeeze(-1), res[::-1][2:] - - -class Encoder(nn.Module): - def __init__(self, size, dim=512, dim_motion=20): - super(Encoder, self).__init__() - - # appearance netmork - self.net_app = EncoderApp(size, dim) - - # motion network - fc = [EqualLinear(dim, dim)] - for i in range(3): - fc.append(EqualLinear(dim, dim)) - - fc.append(EqualLinear(dim, dim_motion)) - self.fc = nn.Sequential(*fc) - - def enc_app(self, x): - h_source = self.net_app(x) - return h_source - - def enc_motion(self, x): - h, _ = self.net_app(x) - h_motion = self.fc(h) - return h_motion - - -class Direction(nn.Module): - def __init__(self, motion_dim): - super(Direction, self).__init__() - self.weight = nn.Parameter(torch.randn(512, motion_dim)) - - def forward(self, input): - - weight = self.weight + 1e-8 - Q, R = custom_qr(weight) - if input is None: - return Q - else: - input_diag = torch.diag_embed(input) # alpha, diagonal matrix - out = torch.matmul(input_diag, Q.T) - out = torch.sum(out, dim=1) - return out - - -class Synthesis(nn.Module): - def __init__(self, motion_dim): - super(Synthesis, self).__init__() - self.direction = Direction(motion_dim) - - -class Generator(nn.Module): - def __init__(self, size, style_dim=512, motion_dim=20): - super().__init__() - - self.enc = Encoder(size, style_dim, motion_dim) - self.dec = Synthesis(motion_dim) - - def get_motion(self, img): - #motion_feat = self.enc.enc_motion(img) - motion_feat = torch.utils.checkpoint.checkpoint((self.enc.enc_motion), img, use_reentrant=True) - motion = self.dec.direction(motion_feat) - return motion - - -class WanAnimateAdapter(torch.nn.Module): - def __init__(self): - super().__init__() - self.pose_patch_embedding = torch.nn.Conv3d(16, 5120, kernel_size=(1, 2, 2), stride=(1, 2, 2)) - self.motion_encoder = Generator(size=512, style_dim=512, motion_dim=20) - self.face_adapter = FaceAdapter(heads_num=40, hidden_dim=5120, num_adapter_layers=40 // 5) - self.face_encoder = FaceEncoder(in_dim=512, hidden_dim=5120, num_heads=4) - - def after_patch_embedding(self, x: List[torch.Tensor], pose_latents, face_pixel_values): - pose_latents = self.pose_patch_embedding(pose_latents) - x[:, :, 1:] += pose_latents - - b,c,T,h,w = face_pixel_values.shape - face_pixel_values = rearrange(face_pixel_values, "b c t h w -> (b t) c h w") - - encode_bs = 8 - face_pixel_values_tmp = [] - for i in range(math.ceil(face_pixel_values.shape[0]/encode_bs)): - face_pixel_values_tmp.append(self.motion_encoder.get_motion(face_pixel_values[i*encode_bs:(i+1)*encode_bs])) - - motion_vec = torch.cat(face_pixel_values_tmp) - - motion_vec = rearrange(motion_vec, "(b t) c -> b t c", t=T) - motion_vec = self.face_encoder(motion_vec) - - B, L, H, C = motion_vec.shape - pad_face = torch.zeros(B, 1, H, C).type_as(motion_vec) - motion_vec = torch.cat([pad_face, motion_vec], dim=1) - return x, motion_vec - - def after_transformer_block(self, block_idx, x, motion_vec, motion_masks=None): - if block_idx % 5 == 0: - adapter_args = [x, motion_vec, motion_masks, False] - residual_out = self.face_adapter.fuser_blocks[block_idx // 5](*adapter_args) - x = residual_out + x - return x - - @staticmethod - def state_dict_converter(): - return WanAnimateAdapterStateDictConverter() - - -class WanAnimateAdapterStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - return state_dict - - def from_civitai(self, state_dict): - state_dict_ = {} - for name, param in state_dict.items(): - if name.startswith("pose_patch_embedding.") or name.startswith("face_adapter") or name.startswith("face_encoder") or name.startswith("motion_encoder"): - state_dict_[name] = param - return state_dict_ - diff --git a/diffsynth/models/wan_video_camera_controller.py b/diffsynth/models/wan_video_camera_controller.py deleted file mode 100644 index 45a44ee..0000000 --- a/diffsynth/models/wan_video_camera_controller.py +++ /dev/null @@ -1,206 +0,0 @@ -import torch -import torch.nn as nn -import numpy as np -from einops import rearrange -import os -from typing_extensions import Literal - -class SimpleAdapter(nn.Module): - def __init__(self, in_dim, out_dim, kernel_size, stride, num_residual_blocks=1): - super(SimpleAdapter, self).__init__() - - # Pixel Unshuffle: reduce spatial dimensions by a factor of 8 - self.pixel_unshuffle = nn.PixelUnshuffle(downscale_factor=8) - - # Convolution: reduce spatial dimensions by a factor - # of 2 (without overlap) - self.conv = nn.Conv2d(in_dim * 64, out_dim, kernel_size=kernel_size, stride=stride, padding=0) - - # Residual blocks for feature extraction - self.residual_blocks = nn.Sequential( - *[ResidualBlock(out_dim) for _ in range(num_residual_blocks)] - ) - - def forward(self, x): - # Reshape to merge the frame dimension into batch - bs, c, f, h, w = x.size() - x = x.permute(0, 2, 1, 3, 4).contiguous().view(bs * f, c, h, w) - - # Pixel Unshuffle operation - x_unshuffled = self.pixel_unshuffle(x) - - # Convolution operation - x_conv = self.conv(x_unshuffled) - - # Feature extraction with residual blocks - out = self.residual_blocks(x_conv) - - # Reshape to restore original bf dimension - out = out.view(bs, f, out.size(1), out.size(2), out.size(3)) - - # Permute dimensions to reorder (if needed), e.g., swap channels and feature frames - out = out.permute(0, 2, 1, 3, 4) - - return out - - def process_camera_coordinates( - self, - direction: Literal["Left", "Right", "Up", "Down", "LeftUp", "LeftDown", "RightUp", "RightDown"], - length: int, - height: int, - width: int, - speed: float = 1/54, - origin=(0, 0.532139961, 0.946026558, 0.5, 0.5, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0) - ): - if origin is None: - origin = (0, 0.532139961, 0.946026558, 0.5, 0.5, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0) - coordinates = generate_camera_coordinates(direction, length, speed, origin) - plucker_embedding = process_pose_file(coordinates, width, height) - return plucker_embedding - - - -class ResidualBlock(nn.Module): - def __init__(self, dim): - super(ResidualBlock, self).__init__() - self.conv1 = nn.Conv2d(dim, dim, kernel_size=3, padding=1) - self.relu = nn.ReLU(inplace=True) - self.conv2 = nn.Conv2d(dim, dim, kernel_size=3, padding=1) - - def forward(self, x): - residual = x - out = self.relu(self.conv1(x)) - out = self.conv2(out) - out += residual - return out - -class Camera(object): - """Copied from https://github.com/hehao13/CameraCtrl/blob/main/inference.py - """ - def __init__(self, entry): - fx, fy, cx, cy = entry[1:5] - self.fx = fx - self.fy = fy - self.cx = cx - self.cy = cy - w2c_mat = np.array(entry[7:]).reshape(3, 4) - w2c_mat_4x4 = np.eye(4) - w2c_mat_4x4[:3, :] = w2c_mat - self.w2c_mat = w2c_mat_4x4 - self.c2w_mat = np.linalg.inv(w2c_mat_4x4) - -def get_relative_pose(cam_params): - """Copied from https://github.com/hehao13/CameraCtrl/blob/main/inference.py - """ - abs_w2cs = [cam_param.w2c_mat for cam_param in cam_params] - abs_c2ws = [cam_param.c2w_mat for cam_param in cam_params] - cam_to_origin = 0 - target_cam_c2w = np.array([ - [1, 0, 0, 0], - [0, 1, 0, -cam_to_origin], - [0, 0, 1, 0], - [0, 0, 0, 1] - ]) - abs2rel = target_cam_c2w @ abs_w2cs[0] - ret_poses = [target_cam_c2w, ] + [abs2rel @ abs_c2w for abs_c2w in abs_c2ws[1:]] - ret_poses = np.array(ret_poses, dtype=np.float32) - return ret_poses - -def custom_meshgrid(*args): - # torch>=2.0.0 only - return torch.meshgrid(*args, indexing='ij') - - -def ray_condition(K, c2w, H, W, device): - """Copied from https://github.com/hehao13/CameraCtrl/blob/main/inference.py - """ - # c2w: B, V, 4, 4 - # K: B, V, 4 - - B = K.shape[0] - - j, i = custom_meshgrid( - torch.linspace(0, H - 1, H, device=device, dtype=c2w.dtype), - torch.linspace(0, W - 1, W, device=device, dtype=c2w.dtype), - ) - i = i.reshape([1, 1, H * W]).expand([B, 1, H * W]) + 0.5 # [B, HxW] - j = j.reshape([1, 1, H * W]).expand([B, 1, H * W]) + 0.5 # [B, HxW] - - fx, fy, cx, cy = K.chunk(4, dim=-1) # B,V, 1 - - zs = torch.ones_like(i) # [B, HxW] - xs = (i - cx) / fx * zs - ys = (j - cy) / fy * zs - zs = zs.expand_as(ys) - - directions = torch.stack((xs, ys, zs), dim=-1) # B, V, HW, 3 - directions = directions / directions.norm(dim=-1, keepdim=True) # B, V, HW, 3 - - rays_d = directions @ c2w[..., :3, :3].transpose(-1, -2) # B, V, 3, HW - rays_o = c2w[..., :3, 3] # B, V, 3 - rays_o = rays_o[:, :, None].expand_as(rays_d) # B, V, 3, HW - # c2w @ dirctions - rays_dxo = torch.linalg.cross(rays_o, rays_d) - plucker = torch.cat([rays_dxo, rays_d], dim=-1) - plucker = plucker.reshape(B, c2w.shape[1], H, W, 6) # B, V, H, W, 6 - # plucker = plucker.permute(0, 1, 4, 2, 3) - return plucker - - -def process_pose_file(cam_params, width=672, height=384, original_pose_width=1280, original_pose_height=720, device='cpu', return_poses=False): - if return_poses: - return cam_params - else: - cam_params = [Camera(cam_param) for cam_param in cam_params] - - sample_wh_ratio = width / height - pose_wh_ratio = original_pose_width / original_pose_height # Assuming placeholder ratios, change as needed - - if pose_wh_ratio > sample_wh_ratio: - resized_ori_w = height * pose_wh_ratio - for cam_param in cam_params: - cam_param.fx = resized_ori_w * cam_param.fx / width - else: - resized_ori_h = width / pose_wh_ratio - for cam_param in cam_params: - cam_param.fy = resized_ori_h * cam_param.fy / height - - intrinsic = np.asarray([[cam_param.fx * width, - cam_param.fy * height, - cam_param.cx * width, - cam_param.cy * height] - for cam_param in cam_params], dtype=np.float32) - - K = torch.as_tensor(intrinsic)[None] # [1, 1, 4] - c2ws = get_relative_pose(cam_params) # Assuming this function is defined elsewhere - c2ws = torch.as_tensor(c2ws)[None] # [1, n_frame, 4, 4] - plucker_embedding = ray_condition(K, c2ws, height, width, device=device)[0].permute(0, 3, 1, 2).contiguous() # V, 6, H, W - plucker_embedding = plucker_embedding[None] - plucker_embedding = rearrange(plucker_embedding, "b f c h w -> b f h w c")[0] - return plucker_embedding - - - -def generate_camera_coordinates( - direction: Literal["Left", "Right", "Up", "Down", "LeftUp", "LeftDown", "RightUp", "RightDown", "In", "Out"], - length: int, - speed: float = 1/54, - origin=(0, 0.532139961, 0.946026558, 0.5, 0.5, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0) -): - coordinates = [list(origin)] - while len(coordinates) < length: - coor = coordinates[-1].copy() - if "Left" in direction: - coor[9] += speed - if "Right" in direction: - coor[9] -= speed - if "Up" in direction: - coor[13] += speed - if "Down" in direction: - coor[13] -= speed - if "In" in direction: - coor[18] -= speed - if "Out" in direction: - coor[18] += speed - coordinates.append(coor) - return coordinates diff --git a/diffsynth/models/wan_video_dit.py b/diffsynth/models/wan_video_dit.py deleted file mode 100644 index cdebad4..0000000 --- a/diffsynth/models/wan_video_dit.py +++ /dev/null @@ -1,746 +0,0 @@ -import torch -import torch.nn as nn -import torch.nn.functional as F -import math -from typing import Tuple, Optional -from einops import rearrange -from .utils import hash_state_dict_keys -from .wan_video_camera_controller import SimpleAdapter -try: - import flash_attn_interface - FLASH_ATTN_3_AVAILABLE = True -except ModuleNotFoundError: - FLASH_ATTN_3_AVAILABLE = False - -try: - import flash_attn - FLASH_ATTN_2_AVAILABLE = True -except ModuleNotFoundError: - FLASH_ATTN_2_AVAILABLE = False - -try: - from sageattention import sageattn - SAGE_ATTN_AVAILABLE = True -except ModuleNotFoundError: - SAGE_ATTN_AVAILABLE = False - - -def flash_attention(q: torch.Tensor, k: torch.Tensor, v: torch.Tensor, num_heads: int, compatibility_mode=False): - if compatibility_mode: - q = rearrange(q, "b s (n d) -> b n s d", n=num_heads) - k = rearrange(k, "b s (n d) -> b n s d", n=num_heads) - v = rearrange(v, "b s (n d) -> b n s d", n=num_heads) - x = F.scaled_dot_product_attention(q, k, v) - x = rearrange(x, "b n s d -> b s (n d)", n=num_heads) - elif FLASH_ATTN_3_AVAILABLE: - q = rearrange(q, "b s (n d) -> b s n d", n=num_heads) - k = rearrange(k, "b s (n d) -> b s n d", n=num_heads) - v = rearrange(v, "b s (n d) -> b s n d", n=num_heads) - x = flash_attn_interface.flash_attn_func(q, k, v) - if isinstance(x,tuple): - x = x[0] - x = rearrange(x, "b s n d -> b s (n d)", n=num_heads) - elif FLASH_ATTN_2_AVAILABLE: - q = rearrange(q, "b s (n d) -> b s n d", n=num_heads) - k = rearrange(k, "b s (n d) -> b s n d", n=num_heads) - v = rearrange(v, "b s (n d) -> b s n d", n=num_heads) - x = flash_attn.flash_attn_func(q, k, v) - x = rearrange(x, "b s n d -> b s (n d)", n=num_heads) - elif SAGE_ATTN_AVAILABLE: - q = rearrange(q, "b s (n d) -> b n s d", n=num_heads) - k = rearrange(k, "b s (n d) -> b n s d", n=num_heads) - v = rearrange(v, "b s (n d) -> b n s d", n=num_heads) - x = sageattn(q, k, v) - x = rearrange(x, "b n s d -> b s (n d)", n=num_heads) - else: - q = rearrange(q, "b s (n d) -> b n s d", n=num_heads) - k = rearrange(k, "b s (n d) -> b n s d", n=num_heads) - v = rearrange(v, "b s (n d) -> b n s d", n=num_heads) - x = F.scaled_dot_product_attention(q, k, v) - x = rearrange(x, "b n s d -> b s (n d)", n=num_heads) - return x - - -def modulate(x: torch.Tensor, shift: torch.Tensor, scale: torch.Tensor): - return (x * (1 + scale) + shift) - - -def sinusoidal_embedding_1d(dim, position): - sinusoid = torch.outer(position.type(torch.float64), torch.pow( - 10000, -torch.arange(dim//2, dtype=torch.float64, device=position.device).div(dim//2))) - x = torch.cat([torch.cos(sinusoid), torch.sin(sinusoid)], dim=1) - return x.to(position.dtype) - - -def precompute_freqs_cis_3d(dim: int, end: int = 1024, theta: float = 10000.0): - # 3d rope precompute - f_freqs_cis = precompute_freqs_cis(dim - 2 * (dim // 3), end, theta) - h_freqs_cis = precompute_freqs_cis(dim // 3, end, theta) - w_freqs_cis = precompute_freqs_cis(dim // 3, end, theta) - return f_freqs_cis, h_freqs_cis, w_freqs_cis - - -def precompute_freqs_cis(dim: int, end: int = 1024, theta: float = 10000.0): - # 1d rope precompute - freqs = 1.0 / (theta ** (torch.arange(0, dim, 2) - [: (dim // 2)].double() / dim)) - freqs = torch.outer(torch.arange(end, device=freqs.device), freqs) - freqs_cis = torch.polar(torch.ones_like(freqs), freqs) # complex64 - return freqs_cis - - -def rope_apply(x, freqs, num_heads): - x = rearrange(x, "b s (n d) -> b s n d", n=num_heads) - x_out = torch.view_as_complex(x.to(torch.float64).reshape( - x.shape[0], x.shape[1], x.shape[2], -1, 2)) - x_out = torch.view_as_real(x_out * freqs).flatten(2) - return x_out.to(x.dtype) - - -class RMSNorm(nn.Module): - def __init__(self, dim, eps=1e-5): - super().__init__() - self.eps = eps - self.weight = nn.Parameter(torch.ones(dim)) - - def norm(self, x): - return x * torch.rsqrt(x.pow(2).mean(dim=-1, keepdim=True) + self.eps) - - def forward(self, x): - dtype = x.dtype - return self.norm(x.float()).to(dtype) * self.weight - - -class AttentionModule(nn.Module): - def __init__(self, num_heads): - super().__init__() - self.num_heads = num_heads - - def forward(self, q, k, v): - x = flash_attention(q=q, k=k, v=v, num_heads=self.num_heads) - return x - - -class SelfAttention(nn.Module): - def __init__(self, dim: int, num_heads: int, eps: float = 1e-6): - super().__init__() - self.dim = dim - self.num_heads = num_heads - self.head_dim = dim // num_heads - - self.q = nn.Linear(dim, dim) - self.k = nn.Linear(dim, dim) - self.v = nn.Linear(dim, dim) - self.o = nn.Linear(dim, dim) - self.norm_q = RMSNorm(dim, eps=eps) - self.norm_k = RMSNorm(dim, eps=eps) - - self.attn = AttentionModule(self.num_heads) - - def forward(self, x, freqs): - q = self.norm_q(self.q(x)) - k = self.norm_k(self.k(x)) - v = self.v(x) - q = rope_apply(q, freqs, self.num_heads) - k = rope_apply(k, freqs, self.num_heads) - x = self.attn(q, k, v) - return self.o(x) - - -class CrossAttention(nn.Module): - def __init__(self, dim: int, num_heads: int, eps: float = 1e-6, has_image_input: bool = False): - super().__init__() - self.dim = dim - self.num_heads = num_heads - self.head_dim = dim // num_heads - - self.q = nn.Linear(dim, dim) - self.k = nn.Linear(dim, dim) - self.v = nn.Linear(dim, dim) - self.o = nn.Linear(dim, dim) - self.norm_q = RMSNorm(dim, eps=eps) - self.norm_k = RMSNorm(dim, eps=eps) - self.has_image_input = has_image_input - if has_image_input: - self.k_img = nn.Linear(dim, dim) - self.v_img = nn.Linear(dim, dim) - self.norm_k_img = RMSNorm(dim, eps=eps) - - self.attn = AttentionModule(self.num_heads) - - def forward(self, x: torch.Tensor, y: torch.Tensor): - if self.has_image_input: - img = y[:, :257] - ctx = y[:, 257:] - else: - ctx = y - q = self.norm_q(self.q(x)) - k = self.norm_k(self.k(ctx)) - v = self.v(ctx) - x = self.attn(q, k, v) - if self.has_image_input: - k_img = self.norm_k_img(self.k_img(img)) - v_img = self.v_img(img) - y = flash_attention(q, k_img, v_img, num_heads=self.num_heads) - x = x + y - return self.o(x) - - -class GateModule(nn.Module): - def __init__(self,): - super().__init__() - - def forward(self, x, gate, residual): - return x + gate * residual - -class DiTBlock(nn.Module): - def __init__(self, has_image_input: bool, dim: int, num_heads: int, ffn_dim: int, eps: float = 1e-6): - super().__init__() - self.dim = dim - self.num_heads = num_heads - self.ffn_dim = ffn_dim - - self.self_attn = SelfAttention(dim, num_heads, eps) - self.cross_attn = CrossAttention( - dim, num_heads, eps, has_image_input=has_image_input) - self.norm1 = nn.LayerNorm(dim, eps=eps, elementwise_affine=False) - self.norm2 = nn.LayerNorm(dim, eps=eps, elementwise_affine=False) - self.norm3 = nn.LayerNorm(dim, eps=eps) - self.ffn = nn.Sequential(nn.Linear(dim, ffn_dim), nn.GELU( - approximate='tanh'), nn.Linear(ffn_dim, dim)) - self.modulation = nn.Parameter(torch.randn(1, 6, dim) / dim**0.5) - self.gate = GateModule() - - def forward(self, x, context, t_mod, freqs): - has_seq = len(t_mod.shape) == 4 - chunk_dim = 2 if has_seq else 1 - # msa: multi-head self-attention mlp: multi-layer perceptron - shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = ( - self.modulation.to(dtype=t_mod.dtype, device=t_mod.device) + t_mod).chunk(6, dim=chunk_dim) - if has_seq: - shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = ( - shift_msa.squeeze(2), scale_msa.squeeze(2), gate_msa.squeeze(2), - shift_mlp.squeeze(2), scale_mlp.squeeze(2), gate_mlp.squeeze(2), - ) - input_x = modulate(self.norm1(x), shift_msa, scale_msa) - x = self.gate(x, gate_msa, self.self_attn(input_x, freqs)) - x = x + self.cross_attn(self.norm3(x), context) - input_x = modulate(self.norm2(x), shift_mlp, scale_mlp) - x = self.gate(x, gate_mlp, self.ffn(input_x)) - return x - - -class MLP(torch.nn.Module): - def __init__(self, in_dim, out_dim, has_pos_emb=False): - super().__init__() - self.proj = torch.nn.Sequential( - nn.LayerNorm(in_dim), - nn.Linear(in_dim, in_dim), - nn.GELU(), - nn.Linear(in_dim, out_dim), - nn.LayerNorm(out_dim) - ) - self.has_pos_emb = has_pos_emb - if has_pos_emb: - self.emb_pos = torch.nn.Parameter(torch.zeros((1, 514, 1280))) - - def forward(self, x): - if self.has_pos_emb: - x = x + self.emb_pos.to(dtype=x.dtype, device=x.device) - return self.proj(x) - - -class Head(nn.Module): - def __init__(self, dim: int, out_dim: int, patch_size: Tuple[int, int, int], eps: float): - super().__init__() - self.dim = dim - self.patch_size = patch_size - self.norm = nn.LayerNorm(dim, eps=eps, elementwise_affine=False) - self.head = nn.Linear(dim, out_dim * math.prod(patch_size)) - self.modulation = nn.Parameter(torch.randn(1, 2, dim) / dim**0.5) - - def forward(self, x, t_mod): - if len(t_mod.shape) == 3: - shift, scale = (self.modulation.unsqueeze(0).to(dtype=t_mod.dtype, device=t_mod.device) + t_mod.unsqueeze(2)).chunk(2, dim=2) - x = (self.head(self.norm(x) * (1 + scale.squeeze(2)) + shift.squeeze(2))) - else: - shift, scale = (self.modulation.to(dtype=t_mod.dtype, device=t_mod.device) + t_mod).chunk(2, dim=1) - x = (self.head(self.norm(x) * (1 + scale) + shift)) - return x - - -class WanModel(torch.nn.Module): - def __init__( - self, - dim: int, - in_dim: int, - ffn_dim: int, - out_dim: int, - text_dim: int, - freq_dim: int, - eps: float, - patch_size: Tuple[int, int, int], - num_heads: int, - num_layers: int, - has_image_input: bool, - has_image_pos_emb: bool = False, - has_ref_conv: bool = False, - add_control_adapter: bool = False, - in_dim_control_adapter: int = 24, - seperated_timestep: bool = False, - require_vae_embedding: bool = True, - require_clip_embedding: bool = True, - fuse_vae_embedding_in_latents: bool = False, - ): - super().__init__() - self.dim = dim - self.in_dim = in_dim - self.freq_dim = freq_dim - self.has_image_input = has_image_input - self.patch_size = patch_size - self.seperated_timestep = seperated_timestep - self.require_vae_embedding = require_vae_embedding - self.require_clip_embedding = require_clip_embedding - self.fuse_vae_embedding_in_latents = fuse_vae_embedding_in_latents - - self.patch_embedding = nn.Conv3d( - in_dim, dim, kernel_size=patch_size, stride=patch_size) - self.text_embedding = nn.Sequential( - nn.Linear(text_dim, dim), - nn.GELU(approximate='tanh'), - nn.Linear(dim, dim) - ) - self.time_embedding = nn.Sequential( - nn.Linear(freq_dim, dim), - nn.SiLU(), - nn.Linear(dim, dim) - ) - self.time_projection = nn.Sequential( - nn.SiLU(), nn.Linear(dim, dim * 6)) - self.blocks = nn.ModuleList([ - DiTBlock(has_image_input, dim, num_heads, ffn_dim, eps) - for _ in range(num_layers) - ]) - self.head = Head(dim, out_dim, patch_size, eps) - head_dim = dim // num_heads - self.freqs = precompute_freqs_cis_3d(head_dim) - - if has_image_input: - self.img_emb = MLP(1280, dim, has_pos_emb=has_image_pos_emb) # clip_feature_dim = 1280 - if has_ref_conv: - self.ref_conv = nn.Conv2d(16, dim, kernel_size=(2, 2), stride=(2, 2)) - self.has_image_pos_emb = has_image_pos_emb - self.has_ref_conv = has_ref_conv - if add_control_adapter: - self.control_adapter = SimpleAdapter(in_dim_control_adapter, dim, kernel_size=patch_size[1:], stride=patch_size[1:]) - else: - self.control_adapter = None - - def patchify(self, x: torch.Tensor, control_camera_latents_input: Optional[torch.Tensor] = None): - x = self.patch_embedding(x) - if self.control_adapter is not None and control_camera_latents_input is not None: - y_camera = self.control_adapter(control_camera_latents_input) - x = [u + v for u, v in zip(x, y_camera)] - x = x[0].unsqueeze(0) - return x - - def unpatchify(self, x: torch.Tensor, grid_size: torch.Tensor): - return rearrange( - x, 'b (f h w) (x y z c) -> b c (f x) (h y) (w z)', - f=grid_size[0], h=grid_size[1], w=grid_size[2], - x=self.patch_size[0], y=self.patch_size[1], z=self.patch_size[2] - ) - - def forward(self, - x: torch.Tensor, - timestep: torch.Tensor, - context: torch.Tensor, - clip_feature: Optional[torch.Tensor] = None, - y: Optional[torch.Tensor] = None, - use_gradient_checkpointing: bool = False, - use_gradient_checkpointing_offload: bool = False, - **kwargs, - ): - t = self.time_embedding( - sinusoidal_embedding_1d(self.freq_dim, timestep)) - t_mod = self.time_projection(t).unflatten(1, (6, self.dim)) - context = self.text_embedding(context) - - if self.has_image_input: - x = torch.cat([x, y], dim=1) # (b, c_x + c_y, f, h, w) - clip_embdding = self.img_emb(clip_feature) - context = torch.cat([clip_embdding, context], dim=1) - - x, (f, h, w) = self.patchify(x) - - freqs = torch.cat([ - self.freqs[0][:f].view(f, 1, 1, -1).expand(f, h, w, -1), - self.freqs[1][:h].view(1, h, 1, -1).expand(f, h, w, -1), - self.freqs[2][:w].view(1, 1, w, -1).expand(f, h, w, -1) - ], dim=-1).reshape(f * h * w, 1, -1).to(x.device) - - def create_custom_forward(module): - def custom_forward(*inputs): - return module(*inputs) - return custom_forward - - for block in self.blocks: - if self.training and use_gradient_checkpointing: - if use_gradient_checkpointing_offload: - with torch.autograd.graph.save_on_cpu(): - x = torch.utils.checkpoint.checkpoint( - create_custom_forward(block), - x, context, t_mod, freqs, - use_reentrant=False, - ) - else: - x = torch.utils.checkpoint.checkpoint( - create_custom_forward(block), - x, context, t_mod, freqs, - use_reentrant=False, - ) - else: - x = block(x, context, t_mod, freqs) - - x = self.head(x, t) - x = self.unpatchify(x, (f, h, w)) - return x - - @staticmethod - def state_dict_converter(): - return WanModelStateDictConverter() - - -class WanModelStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - rename_dict = { - "blocks.0.attn1.norm_k.weight": "blocks.0.self_attn.norm_k.weight", - "blocks.0.attn1.norm_q.weight": "blocks.0.self_attn.norm_q.weight", - "blocks.0.attn1.to_k.bias": "blocks.0.self_attn.k.bias", - "blocks.0.attn1.to_k.weight": "blocks.0.self_attn.k.weight", - "blocks.0.attn1.to_out.0.bias": "blocks.0.self_attn.o.bias", - "blocks.0.attn1.to_out.0.weight": "blocks.0.self_attn.o.weight", - "blocks.0.attn1.to_q.bias": "blocks.0.self_attn.q.bias", - "blocks.0.attn1.to_q.weight": "blocks.0.self_attn.q.weight", - "blocks.0.attn1.to_v.bias": "blocks.0.self_attn.v.bias", - "blocks.0.attn1.to_v.weight": "blocks.0.self_attn.v.weight", - "blocks.0.attn2.norm_k.weight": "blocks.0.cross_attn.norm_k.weight", - "blocks.0.attn2.norm_q.weight": "blocks.0.cross_attn.norm_q.weight", - "blocks.0.attn2.to_k.bias": "blocks.0.cross_attn.k.bias", - "blocks.0.attn2.to_k.weight": "blocks.0.cross_attn.k.weight", - "blocks.0.attn2.to_out.0.bias": "blocks.0.cross_attn.o.bias", - "blocks.0.attn2.to_out.0.weight": "blocks.0.cross_attn.o.weight", - "blocks.0.attn2.to_q.bias": "blocks.0.cross_attn.q.bias", - "blocks.0.attn2.to_q.weight": "blocks.0.cross_attn.q.weight", - "blocks.0.attn2.to_v.bias": "blocks.0.cross_attn.v.bias", - "blocks.0.attn2.to_v.weight": "blocks.0.cross_attn.v.weight", - "blocks.0.ffn.net.0.proj.bias": "blocks.0.ffn.0.bias", - "blocks.0.ffn.net.0.proj.weight": "blocks.0.ffn.0.weight", - "blocks.0.ffn.net.2.bias": "blocks.0.ffn.2.bias", - "blocks.0.ffn.net.2.weight": "blocks.0.ffn.2.weight", - "blocks.0.norm2.bias": "blocks.0.norm3.bias", - "blocks.0.norm2.weight": "blocks.0.norm3.weight", - "blocks.0.scale_shift_table": "blocks.0.modulation", - "condition_embedder.text_embedder.linear_1.bias": "text_embedding.0.bias", - "condition_embedder.text_embedder.linear_1.weight": "text_embedding.0.weight", - "condition_embedder.text_embedder.linear_2.bias": "text_embedding.2.bias", - "condition_embedder.text_embedder.linear_2.weight": "text_embedding.2.weight", - "condition_embedder.time_embedder.linear_1.bias": "time_embedding.0.bias", - "condition_embedder.time_embedder.linear_1.weight": "time_embedding.0.weight", - "condition_embedder.time_embedder.linear_2.bias": "time_embedding.2.bias", - "condition_embedder.time_embedder.linear_2.weight": "time_embedding.2.weight", - "condition_embedder.time_proj.bias": "time_projection.1.bias", - "condition_embedder.time_proj.weight": "time_projection.1.weight", - "patch_embedding.bias": "patch_embedding.bias", - "patch_embedding.weight": "patch_embedding.weight", - "scale_shift_table": "head.modulation", - "proj_out.bias": "head.head.bias", - "proj_out.weight": "head.head.weight", - } - state_dict_ = {} - for name, param in state_dict.items(): - if name in rename_dict: - state_dict_[rename_dict[name]] = param - else: - name_ = ".".join(name.split(".")[:1] + ["0"] + name.split(".")[2:]) - if name_ in rename_dict: - name_ = rename_dict[name_] - name_ = ".".join(name_.split(".")[:1] + [name.split(".")[1]] + name_.split(".")[2:]) - state_dict_[name_] = param - if hash_state_dict_keys(state_dict) == "cb104773c6c2cb6df4f9529ad5c60d0b": - config = { - "model_type": "t2v", - "patch_size": (1, 2, 2), - "text_len": 512, - "in_dim": 16, - "dim": 5120, - "ffn_dim": 13824, - "freq_dim": 256, - "text_dim": 4096, - "out_dim": 16, - "num_heads": 40, - "num_layers": 40, - "window_size": (-1, -1), - "qk_norm": True, - "cross_attn_norm": True, - "eps": 1e-6, - } - else: - config = {} - return state_dict_, config - - def from_civitai(self, state_dict): - state_dict = {name: param for name, param in state_dict.items() if not name.startswith("vace")} - state_dict = {name: param for name, param in state_dict.items() if name.split(".")[0] not in ["pose_patch_embedding", "face_adapter", "face_encoder", "motion_encoder"]} - state_dict_ = {} - for name, param in state_dict.items(): - if name.startswith("model."): - name = name[len("model."):] - state_dict_[name] = param - state_dict = state_dict_ - if hash_state_dict_keys(state_dict) == "9269f8db9040a9d860eaca435be61814": - config = { - "has_image_input": False, - "patch_size": [1, 2, 2], - "in_dim": 16, - "dim": 1536, - "ffn_dim": 8960, - "freq_dim": 256, - "text_dim": 4096, - "out_dim": 16, - "num_heads": 12, - "num_layers": 30, - "eps": 1e-6 - } - elif hash_state_dict_keys(state_dict) == "aafcfd9672c3a2456dc46e1cb6e52c70": - config = { - "has_image_input": False, - "patch_size": [1, 2, 2], - "in_dim": 16, - "dim": 5120, - "ffn_dim": 13824, - "freq_dim": 256, - "text_dim": 4096, - "out_dim": 16, - "num_heads": 40, - "num_layers": 40, - "eps": 1e-6 - } - elif hash_state_dict_keys(state_dict) == "6bfcfb3b342cb286ce886889d519a77e": - config = { - "has_image_input": True, - "patch_size": [1, 2, 2], - "in_dim": 36, - "dim": 5120, - "ffn_dim": 13824, - "freq_dim": 256, - "text_dim": 4096, - "out_dim": 16, - "num_heads": 40, - "num_layers": 40, - "eps": 1e-6 - } - elif hash_state_dict_keys(state_dict) == "6d6ccde6845b95ad9114ab993d917893": - config = { - "has_image_input": True, - "patch_size": [1, 2, 2], - "in_dim": 36, - "dim": 1536, - "ffn_dim": 8960, - "freq_dim": 256, - "text_dim": 4096, - "out_dim": 16, - "num_heads": 12, - "num_layers": 30, - "eps": 1e-6 - } - elif hash_state_dict_keys(state_dict) == "349723183fc063b2bfc10bb2835cf677": - # 1.3B PAI control - config = { - "has_image_input": True, - "patch_size": [1, 2, 2], - "in_dim": 48, - "dim": 1536, - "ffn_dim": 8960, - "freq_dim": 256, - "text_dim": 4096, - "out_dim": 16, - "num_heads": 12, - "num_layers": 30, - "eps": 1e-6 - } - elif hash_state_dict_keys(state_dict) == "efa44cddf936c70abd0ea28b6cbe946c": - # 14B PAI control - config = { - "has_image_input": True, - "patch_size": [1, 2, 2], - "in_dim": 48, - "dim": 5120, - "ffn_dim": 13824, - "freq_dim": 256, - "text_dim": 4096, - "out_dim": 16, - "num_heads": 40, - "num_layers": 40, - "eps": 1e-6 - } - elif hash_state_dict_keys(state_dict) == "3ef3b1f8e1dab83d5b71fd7b617f859f": - config = { - "has_image_input": True, - "patch_size": [1, 2, 2], - "in_dim": 36, - "dim": 5120, - "ffn_dim": 13824, - "freq_dim": 256, - "text_dim": 4096, - "out_dim": 16, - "num_heads": 40, - "num_layers": 40, - "eps": 1e-6, - "has_image_pos_emb": True - } - elif hash_state_dict_keys(state_dict) == "70ddad9d3a133785da5ea371aae09504": - # 1.3B PAI control v1.1 - config = { - "has_image_input": True, - "patch_size": [1, 2, 2], - "in_dim": 48, - "dim": 1536, - "ffn_dim": 8960, - "freq_dim": 256, - "text_dim": 4096, - "out_dim": 16, - "num_heads": 12, - "num_layers": 30, - "eps": 1e-6, - "has_ref_conv": True - } - elif hash_state_dict_keys(state_dict) == "26bde73488a92e64cc20b0a7485b9e5b": - # 14B PAI control v1.1 - config = { - "has_image_input": True, - "patch_size": [1, 2, 2], - "in_dim": 48, - "dim": 5120, - "ffn_dim": 13824, - "freq_dim": 256, - "text_dim": 4096, - "out_dim": 16, - "num_heads": 40, - "num_layers": 40, - "eps": 1e-6, - "has_ref_conv": True - } - elif hash_state_dict_keys(state_dict) == "ac6a5aa74f4a0aab6f64eb9a72f19901": - # 1.3B PAI control-camera v1.1 - config = { - "has_image_input": True, - "patch_size": [1, 2, 2], - "in_dim": 32, - "dim": 1536, - "ffn_dim": 8960, - "freq_dim": 256, - "text_dim": 4096, - "out_dim": 16, - "num_heads": 12, - "num_layers": 30, - "eps": 1e-6, - "has_ref_conv": False, - "add_control_adapter": True, - "in_dim_control_adapter": 24, - } - elif hash_state_dict_keys(state_dict) == "b61c605c2adbd23124d152ed28e049ae": - # 14B PAI control-camera v1.1 - config = { - "has_image_input": True, - "patch_size": [1, 2, 2], - "in_dim": 32, - "dim": 5120, - "ffn_dim": 13824, - "freq_dim": 256, - "text_dim": 4096, - "out_dim": 16, - "num_heads": 40, - "num_layers": 40, - "eps": 1e-6, - "has_ref_conv": False, - "add_control_adapter": True, - "in_dim_control_adapter": 24, - } - elif hash_state_dict_keys(state_dict) == "1f5ab7703c6fc803fdded85ff040c316": - # Wan-AI/Wan2.2-TI2V-5B - config = { - "has_image_input": False, - "patch_size": [1, 2, 2], - "in_dim": 48, - "dim": 3072, - "ffn_dim": 14336, - "freq_dim": 256, - "text_dim": 4096, - "out_dim": 48, - "num_heads": 24, - "num_layers": 30, - "eps": 1e-6, - "seperated_timestep": True, - "require_clip_embedding": False, - "require_vae_embedding": False, - "fuse_vae_embedding_in_latents": True, - } - elif hash_state_dict_keys(state_dict) == "5b013604280dd715f8457c6ed6d6a626": - # Wan-AI/Wan2.2-I2V-A14B - config = { - "has_image_input": False, - "patch_size": [1, 2, 2], - "in_dim": 36, - "dim": 5120, - "ffn_dim": 13824, - "freq_dim": 256, - "text_dim": 4096, - "out_dim": 16, - "num_heads": 40, - "num_layers": 40, - "eps": 1e-6, - "require_clip_embedding": False, - } - elif hash_state_dict_keys(state_dict) == "2267d489f0ceb9f21836532952852ee5": - # Wan2.2-Fun-A14B-Control - config = { - "has_image_input": False, - "patch_size": [1, 2, 2], - "in_dim": 52, - "dim": 5120, - "ffn_dim": 13824, - "freq_dim": 256, - "text_dim": 4096, - "out_dim": 16, - "num_heads": 40, - "num_layers": 40, - "eps": 1e-6, - "has_ref_conv": True, - "require_clip_embedding": False, - } - elif hash_state_dict_keys(state_dict) == "47dbeab5e560db3180adf51dc0232fb1": - # Wan2.2-Fun-A14B-Control-Camera - config = { - "has_image_input": False, - "patch_size": [1, 2, 2], - "in_dim": 36, - "dim": 5120, - "ffn_dim": 13824, - "freq_dim": 256, - "text_dim": 4096, - "out_dim": 16, - "num_heads": 40, - "num_layers": 40, - "eps": 1e-6, - "has_ref_conv": False, - "add_control_adapter": True, - "in_dim_control_adapter": 24, - "require_clip_embedding": False, - } - else: - config = {} - return state_dict, config diff --git a/diffsynth/models/wan_video_dit_s2v.py b/diffsynth/models/wan_video_dit_s2v.py deleted file mode 100644 index 70881e6..0000000 --- a/diffsynth/models/wan_video_dit_s2v.py +++ /dev/null @@ -1,625 +0,0 @@ -import numpy as np -import torch -import torch.nn as nn -import torch.nn.functional as F -from typing import Tuple -from .utils import hash_state_dict_keys -from .wan_video_dit import rearrange, precompute_freqs_cis_3d, DiTBlock, Head, CrossAttention, modulate, sinusoidal_embedding_1d - - -def torch_dfs(model: nn.Module, parent_name='root'): - module_names, modules = [], [] - current_name = parent_name if parent_name else 'root' - module_names.append(current_name) - modules.append(model) - - for name, child in model.named_children(): - if parent_name: - child_name = f'{parent_name}.{name}' - else: - child_name = name - child_modules, child_names = torch_dfs(child, child_name) - module_names += child_names - modules += child_modules - return modules, module_names - - -def rope_precompute(x, grid_sizes, freqs, start=None): - b, s, n, c = x.size(0), x.size(1), x.size(2), x.size(3) // 2 - - # split freqs - if type(freqs) is list: - trainable_freqs = freqs[1] - freqs = freqs[0] - freqs = freqs.split([c - 2 * (c // 3), c // 3, c // 3], dim=1) - - # loop over samples - output = torch.view_as_complex(x.detach().reshape(b, s, n, -1, 2).to(torch.float64)) - seq_bucket = [0] - if not type(grid_sizes) is list: - grid_sizes = [grid_sizes] - for g in grid_sizes: - if not type(g) is list: - g = [torch.zeros_like(g), g] - batch_size = g[0].shape[0] - for i in range(batch_size): - if start is None: - f_o, h_o, w_o = g[0][i] - else: - f_o, h_o, w_o = start[i] - - f, h, w = g[1][i] - t_f, t_h, t_w = g[2][i] - seq_f, seq_h, seq_w = f - f_o, h - h_o, w - w_o - seq_len = int(seq_f * seq_h * seq_w) - if seq_len > 0: - if t_f > 0: - factor_f, factor_h, factor_w = (t_f / seq_f).item(), (t_h / seq_h).item(), (t_w / seq_w).item() - # Generate a list of seq_f integers starting from f_o and ending at math.ceil(factor_f * seq_f.item() + f_o.item()) - if f_o >= 0: - f_sam = np.linspace(f_o.item(), (t_f + f_o).item() - 1, seq_f).astype(int).tolist() - else: - f_sam = np.linspace(-f_o.item(), (-t_f - f_o).item() + 1, seq_f).astype(int).tolist() - h_sam = np.linspace(h_o.item(), (t_h + h_o).item() - 1, seq_h).astype(int).tolist() - w_sam = np.linspace(w_o.item(), (t_w + w_o).item() - 1, seq_w).astype(int).tolist() - - assert f_o * f >= 0 and h_o * h >= 0 and w_o * w >= 0 - freqs_0 = freqs[0][f_sam] if f_o >= 0 else freqs[0][f_sam].conj() - freqs_0 = freqs_0.view(seq_f, 1, 1, -1) - - freqs_i = torch.cat( - [ - freqs_0.expand(seq_f, seq_h, seq_w, -1), - freqs[1][h_sam].view(1, seq_h, 1, -1).expand(seq_f, seq_h, seq_w, -1), - freqs[2][w_sam].view(1, 1, seq_w, -1).expand(seq_f, seq_h, seq_w, -1), - ], - dim=-1 - ).reshape(seq_len, 1, -1) - elif t_f < 0: - freqs_i = trainable_freqs.unsqueeze(1) - # apply rotary embedding - output[i, seq_bucket[-1]:seq_bucket[-1] + seq_len] = freqs_i - seq_bucket.append(seq_bucket[-1] + seq_len) - return output - - -class CausalConv1d(nn.Module): - - def __init__(self, chan_in, chan_out, kernel_size=3, stride=1, dilation=1, pad_mode='replicate', **kwargs): - super().__init__() - - self.pad_mode = pad_mode - padding = (kernel_size - 1, 0) # T - self.time_causal_padding = padding - - self.conv = nn.Conv1d(chan_in, chan_out, kernel_size, stride=stride, dilation=dilation, **kwargs) - - def forward(self, x): - x = F.pad(x, self.time_causal_padding, mode=self.pad_mode) - return self.conv(x) - - -class MotionEncoder_tc(nn.Module): - - def __init__(self, in_dim: int, hidden_dim: int, num_heads=int, need_global=True, dtype=None, device=None): - factory_kwargs = {"dtype": dtype, "device": device} - super().__init__() - - self.num_heads = num_heads - self.need_global = need_global - self.conv1_local = CausalConv1d(in_dim, hidden_dim // 4 * num_heads, 3, stride=1) - if need_global: - self.conv1_global = CausalConv1d(in_dim, hidden_dim // 4, 3, stride=1) - self.norm1 = nn.LayerNorm(hidden_dim // 4, elementwise_affine=False, eps=1e-6, **factory_kwargs) - self.act = nn.SiLU() - self.conv2 = CausalConv1d(hidden_dim // 4, hidden_dim // 2, 3, stride=2) - self.conv3 = CausalConv1d(hidden_dim // 2, hidden_dim, 3, stride=2) - - if need_global: - self.final_linear = nn.Linear(hidden_dim, hidden_dim, **factory_kwargs) - - self.norm1 = nn.LayerNorm(hidden_dim // 4, elementwise_affine=False, eps=1e-6, **factory_kwargs) - self.norm2 = nn.LayerNorm(hidden_dim // 2, elementwise_affine=False, eps=1e-6, **factory_kwargs) - self.norm3 = nn.LayerNorm(hidden_dim, elementwise_affine=False, eps=1e-6, **factory_kwargs) - self.padding_tokens = nn.Parameter(torch.zeros(1, 1, 1, hidden_dim)) - - def forward(self, x): - x = rearrange(x, 'b t c -> b c t') - x_ori = x.clone() - b, c, t = x.shape - x = self.conv1_local(x) - x = rearrange(x, 'b (n c) t -> (b n) t c', n=self.num_heads) - x = self.norm1(x) - x = self.act(x) - x = rearrange(x, 'b t c -> b c t') - x = self.conv2(x) - x = rearrange(x, 'b c t -> b t c') - x = self.norm2(x) - x = self.act(x) - x = rearrange(x, 'b t c -> b c t') - x = self.conv3(x) - x = rearrange(x, 'b c t -> b t c') - x = self.norm3(x) - x = self.act(x) - x = rearrange(x, '(b n) t c -> b t n c', b=b) - padding = self.padding_tokens.repeat(b, x.shape[1], 1, 1).to(device=x.device, dtype=x.dtype) - x = torch.cat([x, padding], dim=-2) - x_local = x.clone() - - if not self.need_global: - return x_local - - x = self.conv1_global(x_ori) - x = rearrange(x, 'b c t -> b t c') - x = self.norm1(x) - x = self.act(x) - x = rearrange(x, 'b t c -> b c t') - x = self.conv2(x) - x = rearrange(x, 'b c t -> b t c') - x = self.norm2(x) - x = self.act(x) - x = rearrange(x, 'b t c -> b c t') - x = self.conv3(x) - x = rearrange(x, 'b c t -> b t c') - x = self.norm3(x) - x = self.act(x) - x = self.final_linear(x) - x = rearrange(x, '(b n) t c -> b t n c', b=b) - - return x, x_local - - -class FramePackMotioner(nn.Module): - - def __init__(self, inner_dim=1024, num_heads=16, zip_frame_buckets=[1, 2, 16], drop_mode="drop", *args, **kwargs): - super().__init__(*args, **kwargs) - self.proj = nn.Conv3d(16, inner_dim, kernel_size=(1, 2, 2), stride=(1, 2, 2)) - self.proj_2x = nn.Conv3d(16, inner_dim, kernel_size=(2, 4, 4), stride=(2, 4, 4)) - self.proj_4x = nn.Conv3d(16, inner_dim, kernel_size=(4, 8, 8), stride=(4, 8, 8)) - self.zip_frame_buckets = torch.tensor(zip_frame_buckets, dtype=torch.long) - - self.inner_dim = inner_dim - self.num_heads = num_heads - self.freqs = torch.cat(precompute_freqs_cis_3d(inner_dim // num_heads), dim=1) - self.drop_mode = drop_mode - - def forward(self, motion_latents, add_last_motion=2): - motion_frames = motion_latents[0].shape[1] - mot = [] - mot_remb = [] - for m in motion_latents: - lat_height, lat_width = m.shape[2], m.shape[3] - padd_lat = torch.zeros(16, self.zip_frame_buckets.sum(), lat_height, lat_width).to(device=m.device, dtype=m.dtype) - overlap_frame = min(padd_lat.shape[1], m.shape[1]) - if overlap_frame > 0: - padd_lat[:, -overlap_frame:] = m[:, -overlap_frame:] - - if add_last_motion < 2 and self.drop_mode != "drop": - zero_end_frame = self.zip_frame_buckets[:self.zip_frame_buckets.__len__() - add_last_motion - 1].sum() - padd_lat[:, -zero_end_frame:] = 0 - - padd_lat = padd_lat.unsqueeze(0) - clean_latents_4x, clean_latents_2x, clean_latents_post = padd_lat[:, :, -self.zip_frame_buckets.sum():, :, :].split( - list(self.zip_frame_buckets)[::-1], dim=2 - ) # 16, 2 ,1 - - # patchfy - clean_latents_post = self.proj(clean_latents_post).flatten(2).transpose(1, 2) - clean_latents_2x = self.proj_2x(clean_latents_2x).flatten(2).transpose(1, 2) - clean_latents_4x = self.proj_4x(clean_latents_4x).flatten(2).transpose(1, 2) - - if add_last_motion < 2 and self.drop_mode == "drop": - clean_latents_post = clean_latents_post[:, :0] if add_last_motion < 2 else clean_latents_post - clean_latents_2x = clean_latents_2x[:, :0] if add_last_motion < 1 else clean_latents_2x - - motion_lat = torch.cat([clean_latents_post, clean_latents_2x, clean_latents_4x], dim=1) - - # rope - start_time_id = -(self.zip_frame_buckets[:1].sum()) - end_time_id = start_time_id + self.zip_frame_buckets[0] - grid_sizes = [] if add_last_motion < 2 and self.drop_mode == "drop" else \ - [ - [torch.tensor([start_time_id, 0, 0]).unsqueeze(0).repeat(1, 1), - torch.tensor([end_time_id, lat_height // 2, lat_width // 2]).unsqueeze(0).repeat(1, 1), - torch.tensor([self.zip_frame_buckets[0], lat_height // 2, lat_width // 2]).unsqueeze(0).repeat(1, 1), ] - ] - - start_time_id = -(self.zip_frame_buckets[:2].sum()) - end_time_id = start_time_id + self.zip_frame_buckets[1] // 2 - grid_sizes_2x = [] if add_last_motion < 1 and self.drop_mode == "drop" else \ - [ - [torch.tensor([start_time_id, 0, 0]).unsqueeze(0).repeat(1, 1), - torch.tensor([end_time_id, lat_height // 4, lat_width // 4]).unsqueeze(0).repeat(1, 1), - torch.tensor([self.zip_frame_buckets[1], lat_height // 2, lat_width // 2]).unsqueeze(0).repeat(1, 1), ] - ] - - start_time_id = -(self.zip_frame_buckets[:3].sum()) - end_time_id = start_time_id + self.zip_frame_buckets[2] // 4 - grid_sizes_4x = [ - [ - torch.tensor([start_time_id, 0, 0]).unsqueeze(0).repeat(1, 1), - torch.tensor([end_time_id, lat_height // 8, lat_width // 8]).unsqueeze(0).repeat(1, 1), - torch.tensor([self.zip_frame_buckets[2], lat_height // 2, lat_width // 2]).unsqueeze(0).repeat(1, 1), - ] - ] - - grid_sizes = grid_sizes + grid_sizes_2x + grid_sizes_4x - - motion_rope_emb = rope_precompute( - motion_lat.detach().view(1, motion_lat.shape[1], self.num_heads, self.inner_dim // self.num_heads), - grid_sizes, - self.freqs, - start=None - ) - - mot.append(motion_lat) - mot_remb.append(motion_rope_emb) - return mot, mot_remb - - -class AdaLayerNorm(nn.Module): - - def __init__( - self, - embedding_dim: int, - output_dim: int, - norm_eps: float = 1e-5, - ): - super().__init__() - self.silu = nn.SiLU() - self.linear = nn.Linear(embedding_dim, output_dim) - self.norm = nn.LayerNorm(output_dim // 2, norm_eps, elementwise_affine=False) - - def forward(self, x, temb): - temb = self.linear(F.silu(temb)) - shift, scale = temb.chunk(2, dim=1) - shift = shift[:, None, :] - scale = scale[:, None, :] - x = self.norm(x) * (1 + scale) + shift - return x - - -class AudioInjector_WAN(nn.Module): - - def __init__( - self, - all_modules, - all_modules_names, - dim=2048, - num_heads=32, - inject_layer=[0, 27], - enable_adain=False, - adain_dim=2048, - ): - super().__init__() - self.injected_block_id = {} - audio_injector_id = 0 - for mod_name, mod in zip(all_modules_names, all_modules): - if isinstance(mod, DiTBlock): - for inject_id in inject_layer: - if f'transformer_blocks.{inject_id}' in mod_name: - self.injected_block_id[inject_id] = audio_injector_id - audio_injector_id += 1 - - self.injector = nn.ModuleList([CrossAttention( - dim=dim, - num_heads=num_heads, - ) for _ in range(audio_injector_id)]) - self.injector_pre_norm_feat = nn.ModuleList([nn.LayerNorm( - dim, - elementwise_affine=False, - eps=1e-6, - ) for _ in range(audio_injector_id)]) - self.injector_pre_norm_vec = nn.ModuleList([nn.LayerNorm( - dim, - elementwise_affine=False, - eps=1e-6, - ) for _ in range(audio_injector_id)]) - if enable_adain: - self.injector_adain_layers = nn.ModuleList([AdaLayerNorm(output_dim=dim * 2, embedding_dim=adain_dim) for _ in range(audio_injector_id)]) - - -class CausalAudioEncoder(nn.Module): - - def __init__(self, dim=5120, num_layers=25, out_dim=2048, num_token=4, need_global=False): - super().__init__() - self.encoder = MotionEncoder_tc(in_dim=dim, hidden_dim=out_dim, num_heads=num_token, need_global=need_global) - weight = torch.ones((1, num_layers, 1, 1)) * 0.01 - - self.weights = torch.nn.Parameter(weight) - self.act = torch.nn.SiLU() - - def forward(self, features): - # features B * num_layers * dim * video_length - weights = self.act(self.weights.to(device=features.device, dtype=features.dtype)) - weights_sum = weights.sum(dim=1, keepdims=True) - weighted_feat = ((features * weights) / weights_sum).sum(dim=1) # b dim f - weighted_feat = weighted_feat.permute(0, 2, 1) # b f dim - res = self.encoder(weighted_feat) # b f n dim - return res # b f n dim - - -class WanS2VDiTBlock(DiTBlock): - - def forward(self, x, context, t_mod, seq_len_x, freqs): - t_mod = (self.modulation.unsqueeze(2).to(dtype=t_mod.dtype, device=t_mod.device) + t_mod).chunk(6, dim=1) - # t_mod[:, :, 0] for x, t_mod[:, :, 1] for other like ref, motion, etc. - t_mod = [ - torch.cat([element[:, :, 0].expand(1, seq_len_x, x.shape[-1]), element[:, :, 1].expand(1, x.shape[1] - seq_len_x, x.shape[-1])], dim=1) - for element in t_mod - ] - shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = t_mod - input_x = modulate(self.norm1(x), shift_msa, scale_msa) - x = self.gate(x, gate_msa, self.self_attn(input_x, freqs)) - x = x + self.cross_attn(self.norm3(x), context) - input_x = modulate(self.norm2(x), shift_mlp, scale_mlp) - x = self.gate(x, gate_mlp, self.ffn(input_x)) - return x - - -class WanS2VModel(torch.nn.Module): - - def __init__( - self, - dim: int, - in_dim: int, - ffn_dim: int, - out_dim: int, - text_dim: int, - freq_dim: int, - eps: float, - patch_size: Tuple[int, int, int], - num_heads: int, - num_layers: int, - cond_dim: int, - audio_dim: int, - num_audio_token: int, - enable_adain: bool = True, - audio_inject_layers: list = [0, 4, 8, 12, 16, 20, 24, 27, 30, 33, 36, 39], - zero_timestep: bool = True, - add_last_motion: bool = True, - framepack_drop_mode: str = "padd", - fuse_vae_embedding_in_latents: bool = True, - require_vae_embedding: bool = False, - seperated_timestep: bool = False, - require_clip_embedding: bool = False, - ): - super().__init__() - self.dim = dim - self.in_dim = in_dim - self.freq_dim = freq_dim - self.patch_size = patch_size - self.num_heads = num_heads - self.enbale_adain = enable_adain - self.add_last_motion = add_last_motion - self.zero_timestep = zero_timestep - self.fuse_vae_embedding_in_latents = fuse_vae_embedding_in_latents - self.require_vae_embedding = require_vae_embedding - self.seperated_timestep = seperated_timestep - self.require_clip_embedding = require_clip_embedding - - self.patch_embedding = nn.Conv3d(in_dim, dim, kernel_size=patch_size, stride=patch_size) - self.text_embedding = nn.Sequential(nn.Linear(text_dim, dim), nn.GELU(approximate='tanh'), nn.Linear(dim, dim)) - self.time_embedding = nn.Sequential(nn.Linear(freq_dim, dim), nn.SiLU(), nn.Linear(dim, dim)) - self.time_projection = nn.Sequential(nn.SiLU(), nn.Linear(dim, dim * 6)) - - self.blocks = nn.ModuleList([WanS2VDiTBlock(False, dim, num_heads, ffn_dim, eps) for _ in range(num_layers)]) - self.head = Head(dim, out_dim, patch_size, eps) - self.freqs = torch.cat(precompute_freqs_cis_3d(dim // num_heads), dim=1) - - self.cond_encoder = nn.Conv3d(cond_dim, dim, kernel_size=patch_size, stride=patch_size) - self.casual_audio_encoder = CausalAudioEncoder(dim=audio_dim, out_dim=dim, num_token=num_audio_token, need_global=enable_adain) - all_modules, all_modules_names = torch_dfs(self.blocks, parent_name="root.transformer_blocks") - self.audio_injector = AudioInjector_WAN( - all_modules, - all_modules_names, - dim=dim, - num_heads=num_heads, - inject_layer=audio_inject_layers, - enable_adain=enable_adain, - adain_dim=dim, - ) - self.trainable_cond_mask = nn.Embedding(3, dim) - self.frame_packer = FramePackMotioner(inner_dim=dim, num_heads=num_heads, zip_frame_buckets=[1, 2, 16], drop_mode=framepack_drop_mode) - - def patchify(self, x: torch.Tensor): - grid_size = x.shape[2:] - x = rearrange(x, 'b c f h w -> b (f h w) c').contiguous() - return x, grid_size # x, grid_size: (f, h, w) - - def unpatchify(self, x: torch.Tensor, grid_size: torch.Tensor): - return rearrange( - x, - 'b (f h w) (x y z c) -> b c (f x) (h y) (w z)', - f=grid_size[0], - h=grid_size[1], - w=grid_size[2], - x=self.patch_size[0], - y=self.patch_size[1], - z=self.patch_size[2] - ) - - def process_motion_frame_pack(self, motion_latents, drop_motion_frames=False, add_last_motion=2): - flattern_mot, mot_remb = self.frame_packer(motion_latents, add_last_motion) - if drop_motion_frames: - return [m[:, :0] for m in flattern_mot], [m[:, :0] for m in mot_remb] - else: - return flattern_mot, mot_remb - - def inject_motion(self, x, rope_embs, mask_input, motion_latents, drop_motion_frames=True, add_last_motion=2): - # inject the motion frames token to the hidden states - mot, mot_remb = self.process_motion_frame_pack(motion_latents, drop_motion_frames=drop_motion_frames, add_last_motion=add_last_motion) - if len(mot) > 0: - x = torch.cat([x, mot[0]], dim=1) - rope_embs = torch.cat([rope_embs, mot_remb[0]], dim=1) - mask_input = torch.cat( - [mask_input, 2 * torch.ones([1, x.shape[1] - mask_input.shape[1]], device=mask_input.device, dtype=mask_input.dtype)], dim=1 - ) - return x, rope_embs, mask_input - - def after_transformer_block(self, block_idx, hidden_states, audio_emb_global, audio_emb, original_seq_len, use_unified_sequence_parallel=False): - if block_idx in self.audio_injector.injected_block_id.keys(): - audio_attn_id = self.audio_injector.injected_block_id[block_idx] - num_frames = audio_emb.shape[1] - if use_unified_sequence_parallel: - from xfuser.core.distributed import get_sp_group - hidden_states = get_sp_group().all_gather(hidden_states, dim=1) - - input_hidden_states = hidden_states[:, :original_seq_len].clone() # b (f h w) c - input_hidden_states = rearrange(input_hidden_states, "b (t n) c -> (b t) n c", t=num_frames) - - audio_emb_global = rearrange(audio_emb_global, "b t n c -> (b t) n c") - adain_hidden_states = self.audio_injector.injector_adain_layers[audio_attn_id](input_hidden_states, temb=audio_emb_global[:, 0]) - attn_hidden_states = adain_hidden_states - - audio_emb = rearrange(audio_emb, "b t n c -> (b t) n c", t=num_frames) - attn_audio_emb = audio_emb - residual_out = self.audio_injector.injector[audio_attn_id](attn_hidden_states, attn_audio_emb) - residual_out = rearrange(residual_out, "(b t) n c -> b (t n) c", t=num_frames) - hidden_states[:, :original_seq_len] = hidden_states[:, :original_seq_len] + residual_out - if use_unified_sequence_parallel: - from xfuser.core.distributed import get_sequence_parallel_world_size, get_sequence_parallel_rank - hidden_states = torch.chunk(hidden_states, get_sequence_parallel_world_size(), dim=1)[get_sequence_parallel_rank()] - return hidden_states - - def cal_audio_emb(self, audio_input, motion_frames=[73, 19]): - audio_input = torch.cat([audio_input[..., 0:1].repeat(1, 1, 1, motion_frames[0]), audio_input], dim=-1) - audio_emb_global, audio_emb = self.casual_audio_encoder(audio_input) - audio_emb_global = audio_emb_global[:, motion_frames[1]:].clone() - merged_audio_emb = audio_emb[:, motion_frames[1]:, :] - return audio_emb_global, merged_audio_emb - - def get_grid_sizes(self, grid_size_x, grid_size_ref): - f, h, w = grid_size_x - rf, rh, rw = grid_size_ref - grid_sizes_x = torch.tensor([f, h, w], dtype=torch.long).unsqueeze(0) - grid_sizes_x = [[torch.zeros_like(grid_sizes_x), grid_sizes_x, grid_sizes_x]] - grid_sizes_ref = [[ - torch.tensor([30, 0, 0]).unsqueeze(0), - torch.tensor([31, rh, rw]).unsqueeze(0), - torch.tensor([1, rh, rw]).unsqueeze(0), - ]] - return grid_sizes_x + grid_sizes_ref - - def forward( - self, - latents, - timestep, - context, - audio_input, - motion_latents, - pose_cond, - use_gradient_checkpointing_offload=False, - use_gradient_checkpointing=False - ): - origin_ref_latents = latents[:, :, 0:1] - x = latents[:, :, 1:] - - # context embedding - context = self.text_embedding(context) - - # audio encode - audio_emb_global, merged_audio_emb = self.cal_audio_emb(audio_input) - - # x and pose_cond - pose_cond = torch.zeros_like(x) if pose_cond is None else pose_cond - x, (f, h, w) = self.patchify(self.patch_embedding(x) + self.cond_encoder(pose_cond)) # torch.Size([1, 29120, 5120]) - seq_len_x = x.shape[1] - - # reference image - ref_latents, (rf, rh, rw) = self.patchify(self.patch_embedding(origin_ref_latents)) # torch.Size([1, 1456, 5120]) - grid_sizes = self.get_grid_sizes((f, h, w), (rf, rh, rw)) - x = torch.cat([x, ref_latents], dim=1) - # mask - mask = torch.cat([torch.zeros([1, seq_len_x]), torch.ones([1, ref_latents.shape[1]])], dim=1).to(torch.long).to(x.device) - # freqs - pre_compute_freqs = rope_precompute( - x.detach().view(1, x.size(1), self.num_heads, self.dim // self.num_heads), grid_sizes, self.freqs, start=None - ) - # motion - x, pre_compute_freqs, mask = self.inject_motion(x, pre_compute_freqs, mask, motion_latents, add_last_motion=2) - - x = x + self.trainable_cond_mask(mask).to(x.dtype) - - # t_mod - timestep = torch.cat([timestep, torch.zeros([1], dtype=timestep.dtype, device=timestep.device)]) - t = self.time_embedding(sinusoidal_embedding_1d(self.freq_dim, timestep)) - t_mod = self.time_projection(t).unflatten(1, (6, self.dim)).unsqueeze(2).transpose(0, 2) - - def create_custom_forward(module): - def custom_forward(*inputs): - return module(*inputs) - return custom_forward - - for block_id, block in enumerate(self.blocks): - if use_gradient_checkpointing_offload: - with torch.autograd.graph.save_on_cpu(): - x = torch.utils.checkpoint.checkpoint( - create_custom_forward(block), - x, - context, - t_mod, - seq_len_x, - pre_compute_freqs[0], - use_reentrant=False, - ) - x = torch.utils.checkpoint.checkpoint( - create_custom_forward(lambda x: self.after_transformer_block(block_id, x, audio_emb_global, merged_audio_emb, seq_len_x)), - x, - use_reentrant=False, - ) - elif use_gradient_checkpointing: - x = torch.utils.checkpoint.checkpoint( - create_custom_forward(block), - x, - context, - t_mod, - seq_len_x, - pre_compute_freqs[0], - use_reentrant=False, - ) - x = torch.utils.checkpoint.checkpoint( - create_custom_forward(lambda x: self.after_transformer_block(block_id, x, audio_emb_global, merged_audio_emb, seq_len_x)), - x, - use_reentrant=False, - ) - else: - x = block(x, context, t_mod, seq_len_x, pre_compute_freqs[0]) - x = self.after_transformer_block(block_id, x, audio_emb_global, merged_audio_emb, seq_len_x) - - x = x[:, :seq_len_x] - x = self.head(x, t[:-1]) - x = self.unpatchify(x, (f, h, w)) - # make compatible with wan video - x = torch.cat([origin_ref_latents, x], dim=2) - return x - - @staticmethod - def state_dict_converter(): - return WanS2VModelStateDictConverter() - - -class WanS2VModelStateDictConverter: - - def __init__(self): - pass - - def from_civitai(self, state_dict): - config = {} - if hash_state_dict_keys(state_dict) == "966cffdcc52f9c46c391768b27637614": - config = { - "dim": 5120, - "in_dim": 16, - "ffn_dim": 13824, - "out_dim": 16, - "text_dim": 4096, - "freq_dim": 256, - "eps": 1e-06, - "patch_size": (1, 2, 2), - "num_heads": 40, - "num_layers": 40, - "cond_dim": 16, - "audio_dim": 1024, - "num_audio_token": 4, - } - return state_dict, config diff --git a/diffsynth/models/wan_video_image_encoder.py b/diffsynth/models/wan_video_image_encoder.py deleted file mode 100644 index 5ca878b..0000000 --- a/diffsynth/models/wan_video_image_encoder.py +++ /dev/null @@ -1,902 +0,0 @@ -""" -Concise re-implementation of -``https://github.com/openai/CLIP'' and -``https://github.com/mlfoundations/open_clip''. -""" -import math -import torch -import torch.nn as nn -import torch.nn.functional as F -import torchvision.transforms as T -from .wan_video_dit import flash_attention - - -class SelfAttention(nn.Module): - - def __init__(self, dim, num_heads, dropout=0.1, eps=1e-5): - assert dim % num_heads == 0 - super().__init__() - self.dim = dim - self.num_heads = num_heads - self.head_dim = dim // num_heads - self.eps = eps - - # layers - self.q = nn.Linear(dim, dim) - self.k = nn.Linear(dim, dim) - self.v = nn.Linear(dim, dim) - self.o = nn.Linear(dim, dim) - self.dropout = nn.Dropout(dropout) - - def forward(self, x, mask): - """ - x: [B, L, C]. - """ - b, s, c, n, d = *x.size(), self.num_heads, self.head_dim - - # compute query, key, value - q = self.q(x).reshape(b, s, n, d).permute(0, 2, 1, 3) - k = self.k(x).reshape(b, s, n, d).permute(0, 2, 1, 3) - v = self.v(x).reshape(b, s, n, d).permute(0, 2, 1, 3) - - # compute attention - p = self.dropout.p if self.training else 0.0 - x = F.scaled_dot_product_attention(q, k, v, mask, p) - x = x.permute(0, 2, 1, 3).reshape(b, s, c) - - # output - x = self.o(x) - x = self.dropout(x) - return x - - -class AttentionBlock(nn.Module): - - def __init__(self, dim, num_heads, post_norm, dropout=0.1, eps=1e-5): - super().__init__() - self.dim = dim - self.num_heads = num_heads - self.post_norm = post_norm - self.eps = eps - - # layers - self.attn = SelfAttention(dim, num_heads, dropout, eps) - self.norm1 = nn.LayerNorm(dim, eps=eps) - self.ffn = nn.Sequential( - nn.Linear(dim, dim * 4), nn.GELU(), nn.Linear(dim * 4, dim), - nn.Dropout(dropout)) - self.norm2 = nn.LayerNorm(dim, eps=eps) - - def forward(self, x, mask): - if self.post_norm: - x = self.norm1(x + self.attn(x, mask)) - x = self.norm2(x + self.ffn(x)) - else: - x = x + self.attn(self.norm1(x), mask) - x = x + self.ffn(self.norm2(x)) - return x - - -class XLMRoberta(nn.Module): - """ - XLMRobertaModel with no pooler and no LM head. - """ - - def __init__(self, - vocab_size=250002, - max_seq_len=514, - type_size=1, - pad_id=1, - dim=1024, - num_heads=16, - num_layers=24, - post_norm=True, - dropout=0.1, - eps=1e-5): - super().__init__() - self.vocab_size = vocab_size - self.max_seq_len = max_seq_len - self.type_size = type_size - self.pad_id = pad_id - self.dim = dim - self.num_heads = num_heads - self.num_layers = num_layers - self.post_norm = post_norm - self.eps = eps - - # embeddings - self.token_embedding = nn.Embedding(vocab_size, dim, padding_idx=pad_id) - self.type_embedding = nn.Embedding(type_size, dim) - self.pos_embedding = nn.Embedding(max_seq_len, dim, padding_idx=pad_id) - self.dropout = nn.Dropout(dropout) - - # blocks - self.blocks = nn.ModuleList([ - AttentionBlock(dim, num_heads, post_norm, dropout, eps) - for _ in range(num_layers) - ]) - - # norm layer - self.norm = nn.LayerNorm(dim, eps=eps) - - def forward(self, ids): - """ - ids: [B, L] of torch.LongTensor. - """ - b, s = ids.shape - mask = ids.ne(self.pad_id).long() - - # embeddings - x = self.token_embedding(ids) + \ - self.type_embedding(torch.zeros_like(ids)) + \ - self.pos_embedding(self.pad_id + torch.cumsum(mask, dim=1) * mask) - if self.post_norm: - x = self.norm(x) - x = self.dropout(x) - - # blocks - mask = torch.where( - mask.view(b, 1, 1, s).gt(0), 0.0, - torch.finfo(x.dtype).min) - for block in self.blocks: - x = block(x, mask) - - # output - if not self.post_norm: - x = self.norm(x) - return x - - -def xlm_roberta_large(pretrained=False, - return_tokenizer=False, - device='cpu', - **kwargs): - """ - XLMRobertaLarge adapted from Huggingface. - """ - # params - cfg = dict( - vocab_size=250002, - max_seq_len=514, - type_size=1, - pad_id=1, - dim=1024, - num_heads=16, - num_layers=24, - post_norm=True, - dropout=0.1, - eps=1e-5) - cfg.update(**kwargs) - - # init model - if pretrained: - from sora import DOWNLOAD_TO_CACHE - - # init a meta model - with torch.device('meta'): - model = XLMRoberta(**cfg) - - # load checkpoint - model.load_state_dict( - torch.load( - DOWNLOAD_TO_CACHE('models/xlm_roberta/xlm_roberta_large.pth'), - map_location=device), - assign=True) - else: - # init a model on device - with torch.device(device): - model = XLMRoberta(**cfg) - - # init tokenizer - if return_tokenizer: - from sora.data import HuggingfaceTokenizer - tokenizer = HuggingfaceTokenizer( - name='xlm-roberta-large', - seq_len=model.text_len, - clean='whitespace') - return model, tokenizer - else: - return model - - - -def pos_interpolate(pos, seq_len): - if pos.size(1) == seq_len: - return pos - else: - src_grid = int(math.sqrt(pos.size(1))) - tar_grid = int(math.sqrt(seq_len)) - n = pos.size(1) - src_grid * src_grid - return torch.cat([ - pos[:, :n], - F.interpolate( - pos[:, n:].float().reshape(1, src_grid, src_grid, -1).permute( - 0, 3, 1, 2), - size=(tar_grid, tar_grid), - mode='bicubic', - align_corners=False).flatten(2).transpose(1, 2) - ], - dim=1) - - -class QuickGELU(nn.Module): - - def forward(self, x): - return x * torch.sigmoid(1.702 * x) - - -class LayerNorm(nn.LayerNorm): - - def forward(self, x): - return super().forward(x).type_as(x) - - -class SelfAttention(nn.Module): - - def __init__(self, - dim, - num_heads, - causal=False, - attn_dropout=0.0, - proj_dropout=0.0): - assert dim % num_heads == 0 - super().__init__() - self.dim = dim - self.num_heads = num_heads - self.head_dim = dim // num_heads - self.causal = causal - self.attn_dropout = attn_dropout - self.proj_dropout = proj_dropout - - # layers - self.to_qkv = nn.Linear(dim, dim * 3) - self.proj = nn.Linear(dim, dim) - - def forward(self, x): - """ - x: [B, L, C]. - """ - # compute query, key, value - q, k, v = self.to_qkv(x).chunk(3, dim=-1) - - # compute attention - x = flash_attention(q, k, v, num_heads=self.num_heads, compatibility_mode=True) - - # output - x = self.proj(x) - x = F.dropout(x, self.proj_dropout, self.training) - return x - - -class SwiGLU(nn.Module): - - def __init__(self, dim, mid_dim): - super().__init__() - self.dim = dim - self.mid_dim = mid_dim - - # layers - self.fc1 = nn.Linear(dim, mid_dim) - self.fc2 = nn.Linear(dim, mid_dim) - self.fc3 = nn.Linear(mid_dim, dim) - - def forward(self, x): - x = F.silu(self.fc1(x)) * self.fc2(x) - x = self.fc3(x) - return x - - -class AttentionBlock(nn.Module): - - def __init__(self, - dim, - mlp_ratio, - num_heads, - post_norm=False, - causal=False, - activation='quick_gelu', - attn_dropout=0.0, - proj_dropout=0.0, - norm_eps=1e-5): - assert activation in ['quick_gelu', 'gelu', 'swi_glu'] - super().__init__() - self.dim = dim - self.mlp_ratio = mlp_ratio - self.num_heads = num_heads - self.post_norm = post_norm - self.causal = causal - self.norm_eps = norm_eps - - # layers - self.norm1 = LayerNorm(dim, eps=norm_eps) - self.attn = SelfAttention(dim, num_heads, causal, attn_dropout, - proj_dropout) - self.norm2 = LayerNorm(dim, eps=norm_eps) - if activation == 'swi_glu': - self.mlp = SwiGLU(dim, int(dim * mlp_ratio)) - else: - self.mlp = nn.Sequential( - nn.Linear(dim, int(dim * mlp_ratio)), - QuickGELU() if activation == 'quick_gelu' else nn.GELU(), - nn.Linear(int(dim * mlp_ratio), dim), nn.Dropout(proj_dropout)) - - def forward(self, x): - if self.post_norm: - x = x + self.norm1(self.attn(x)) - x = x + self.norm2(self.mlp(x)) - else: - x = x + self.attn(self.norm1(x)) - x = x + self.mlp(self.norm2(x)) - return x - - -class AttentionPool(nn.Module): - - def __init__(self, - dim, - mlp_ratio, - num_heads, - activation='gelu', - proj_dropout=0.0, - norm_eps=1e-5): - assert dim % num_heads == 0 - super().__init__() - self.dim = dim - self.mlp_ratio = mlp_ratio - self.num_heads = num_heads - self.head_dim = dim // num_heads - self.proj_dropout = proj_dropout - self.norm_eps = norm_eps - - # layers - gain = 1.0 / math.sqrt(dim) - self.cls_embedding = nn.Parameter(gain * torch.randn(1, 1, dim)) - self.to_q = nn.Linear(dim, dim) - self.to_kv = nn.Linear(dim, dim * 2) - self.proj = nn.Linear(dim, dim) - self.norm = LayerNorm(dim, eps=norm_eps) - self.mlp = nn.Sequential( - nn.Linear(dim, int(dim * mlp_ratio)), - QuickGELU() if activation == 'quick_gelu' else nn.GELU(), - nn.Linear(int(dim * mlp_ratio), dim), nn.Dropout(proj_dropout)) - - def forward(self, x): - """ - x: [B, L, C]. - """ - b, s, c, n, d = *x.size(), self.num_heads, self.head_dim - - # compute query, key, value - q = self.to_q(self.cls_embedding).view(1, 1, n*d).expand(b, -1, -1) - k, v = self.to_kv(x).chunk(2, dim=-1) - - # compute attention - x = flash_attention(q, k, v, num_heads=self.num_heads, compatibility_mode=True) - x = x.reshape(b, 1, c) - - # output - x = self.proj(x) - x = F.dropout(x, self.proj_dropout, self.training) - - # mlp - x = x + self.mlp(self.norm(x)) - return x[:, 0] - - -class VisionTransformer(nn.Module): - - def __init__(self, - image_size=224, - patch_size=16, - dim=768, - mlp_ratio=4, - out_dim=512, - num_heads=12, - num_layers=12, - pool_type='token', - pre_norm=True, - post_norm=False, - activation='quick_gelu', - attn_dropout=0.0, - proj_dropout=0.0, - embedding_dropout=0.0, - norm_eps=1e-5): - if image_size % patch_size != 0: - print( - '[WARNING] image_size is not divisible by patch_size', - flush=True) - assert pool_type in ('token', 'token_fc', 'attn_pool') - out_dim = out_dim or dim - super().__init__() - self.image_size = image_size - self.patch_size = patch_size - self.num_patches = (image_size // patch_size)**2 - self.dim = dim - self.mlp_ratio = mlp_ratio - self.out_dim = out_dim - self.num_heads = num_heads - self.num_layers = num_layers - self.pool_type = pool_type - self.post_norm = post_norm - self.norm_eps = norm_eps - - # embeddings - gain = 1.0 / math.sqrt(dim) - self.patch_embedding = nn.Conv2d( - 3, - dim, - kernel_size=patch_size, - stride=patch_size, - bias=not pre_norm) - if pool_type in ('token', 'token_fc'): - self.cls_embedding = nn.Parameter(gain * torch.randn(1, 1, dim)) - self.pos_embedding = nn.Parameter(gain * torch.randn( - 1, self.num_patches + - (1 if pool_type in ('token', 'token_fc') else 0), dim)) - self.dropout = nn.Dropout(embedding_dropout) - - # transformer - self.pre_norm = LayerNorm(dim, eps=norm_eps) if pre_norm else None - self.transformer = nn.Sequential(*[ - AttentionBlock(dim, mlp_ratio, num_heads, post_norm, False, - activation, attn_dropout, proj_dropout, norm_eps) - for _ in range(num_layers) - ]) - self.post_norm = LayerNorm(dim, eps=norm_eps) - - # head - if pool_type == 'token': - self.head = nn.Parameter(gain * torch.randn(dim, out_dim)) - elif pool_type == 'token_fc': - self.head = nn.Linear(dim, out_dim) - elif pool_type == 'attn_pool': - self.head = AttentionPool(dim, mlp_ratio, num_heads, activation, - proj_dropout, norm_eps) - - def forward(self, x, interpolation=False, use_31_block=False): - b = x.size(0) - - # embeddings - x = self.patch_embedding(x).flatten(2).permute(0, 2, 1) - if self.pool_type in ('token', 'token_fc'): - x = torch.cat([self.cls_embedding.expand(b, -1, -1).to(dtype=x.dtype, device=x.device), x], dim=1) - if interpolation: - e = pos_interpolate(self.pos_embedding, x.size(1)) - else: - e = self.pos_embedding - e = e.to(dtype=x.dtype, device=x.device) - x = self.dropout(x + e) - if self.pre_norm is not None: - x = self.pre_norm(x) - - # transformer - if use_31_block: - x = self.transformer[:-1](x) - return x - else: - x = self.transformer(x) - return x - - -class CLIP(nn.Module): - - def __init__(self, - embed_dim=512, - image_size=224, - patch_size=16, - vision_dim=768, - vision_mlp_ratio=4, - vision_heads=12, - vision_layers=12, - vision_pool='token', - vision_pre_norm=True, - vision_post_norm=False, - vocab_size=49408, - text_len=77, - text_dim=512, - text_mlp_ratio=4, - text_heads=8, - text_layers=12, - text_causal=True, - text_pool='argmax', - text_head_bias=False, - logit_bias=None, - activation='quick_gelu', - attn_dropout=0.0, - proj_dropout=0.0, - embedding_dropout=0.0, - norm_eps=1e-5): - super().__init__() - self.embed_dim = embed_dim - self.image_size = image_size - self.patch_size = patch_size - self.vision_dim = vision_dim - self.vision_mlp_ratio = vision_mlp_ratio - self.vision_heads = vision_heads - self.vision_layers = vision_layers - self.vision_pool = vision_pool - self.vision_pre_norm = vision_pre_norm - self.vision_post_norm = vision_post_norm - self.vocab_size = vocab_size - self.text_len = text_len - self.text_dim = text_dim - self.text_mlp_ratio = text_mlp_ratio - self.text_heads = text_heads - self.text_layers = text_layers - self.text_causal = text_causal - self.text_pool = text_pool - self.text_head_bias = text_head_bias - self.norm_eps = norm_eps - - # models - self.visual = VisionTransformer( - image_size=image_size, - patch_size=patch_size, - dim=vision_dim, - mlp_ratio=vision_mlp_ratio, - out_dim=embed_dim, - num_heads=vision_heads, - num_layers=vision_layers, - pool_type=vision_pool, - pre_norm=vision_pre_norm, - post_norm=vision_post_norm, - activation=activation, - attn_dropout=attn_dropout, - proj_dropout=proj_dropout, - embedding_dropout=embedding_dropout, - norm_eps=norm_eps) - self.textual = TextTransformer( - vocab_size=vocab_size, - text_len=text_len, - dim=text_dim, - mlp_ratio=text_mlp_ratio, - out_dim=embed_dim, - num_heads=text_heads, - num_layers=text_layers, - causal=text_causal, - pool_type=text_pool, - head_bias=text_head_bias, - activation=activation, - attn_dropout=attn_dropout, - proj_dropout=proj_dropout, - embedding_dropout=embedding_dropout, - norm_eps=norm_eps) - self.log_scale = nn.Parameter(math.log(1 / 0.07) * torch.ones([])) - if logit_bias is not None: - self.logit_bias = nn.Parameter(logit_bias * torch.ones([])) - - # initialize weights - self.init_weights() - - def forward(self, imgs, txt_ids): - """ - imgs: [B, 3, H, W] of torch.float32. - - mean: [0.48145466, 0.4578275, 0.40821073] - - std: [0.26862954, 0.26130258, 0.27577711] - txt_ids: [B, L] of torch.long. Encoded by data.CLIPTokenizer. - """ - xi = self.visual(imgs) - xt = self.textual(txt_ids) - return xi, xt - - def init_weights(self): - # embeddings - nn.init.normal_(self.textual.token_embedding.weight, std=0.02) - nn.init.normal_(self.visual.patch_embedding.weight, std=0.1) - - # attentions - for modality in ['visual', 'textual']: - dim = self.vision_dim if modality == 'visual' else self.text_dim - transformer = getattr(self, modality).transformer - proj_gain = (1.0 / math.sqrt(dim)) * ( - 1.0 / math.sqrt(2 * len(transformer))) - attn_gain = 1.0 / math.sqrt(dim) - mlp_gain = 1.0 / math.sqrt(2.0 * dim) - for block in transformer: - nn.init.normal_(block.attn.to_qkv.weight, std=attn_gain) - nn.init.normal_(block.attn.proj.weight, std=proj_gain) - nn.init.normal_(block.mlp[0].weight, std=mlp_gain) - nn.init.normal_(block.mlp[2].weight, std=proj_gain) - - def param_groups(self): - groups = [{ - 'params': [ - p for n, p in self.named_parameters() - if 'norm' in n or n.endswith('bias') - ], - 'weight_decay': 0.0 - }, { - 'params': [ - p for n, p in self.named_parameters() - if not ('norm' in n or n.endswith('bias')) - ] - }] - return groups - - -class XLMRobertaWithHead(XLMRoberta): - - def __init__(self, **kwargs): - self.out_dim = kwargs.pop('out_dim') - super().__init__(**kwargs) - - # head - mid_dim = (self.dim + self.out_dim) // 2 - self.head = nn.Sequential( - nn.Linear(self.dim, mid_dim, bias=False), nn.GELU(), - nn.Linear(mid_dim, self.out_dim, bias=False)) - - def forward(self, ids): - # xlm-roberta - x = super().forward(ids) - - # average pooling - mask = ids.ne(self.pad_id).unsqueeze(-1).to(x) - x = (x * mask).sum(dim=1) / mask.sum(dim=1) - - # head - x = self.head(x) - return x - - -class XLMRobertaCLIP(nn.Module): - - def __init__(self, - embed_dim=1024, - image_size=224, - patch_size=14, - vision_dim=1280, - vision_mlp_ratio=4, - vision_heads=16, - vision_layers=32, - vision_pool='token', - vision_pre_norm=True, - vision_post_norm=False, - activation='gelu', - vocab_size=250002, - max_text_len=514, - type_size=1, - pad_id=1, - text_dim=1024, - text_heads=16, - text_layers=24, - text_post_norm=True, - text_dropout=0.1, - attn_dropout=0.0, - proj_dropout=0.0, - embedding_dropout=0.0, - norm_eps=1e-5): - super().__init__() - self.embed_dim = embed_dim - self.image_size = image_size - self.patch_size = patch_size - self.vision_dim = vision_dim - self.vision_mlp_ratio = vision_mlp_ratio - self.vision_heads = vision_heads - self.vision_layers = vision_layers - self.vision_pre_norm = vision_pre_norm - self.vision_post_norm = vision_post_norm - self.activation = activation - self.vocab_size = vocab_size - self.max_text_len = max_text_len - self.type_size = type_size - self.pad_id = pad_id - self.text_dim = text_dim - self.text_heads = text_heads - self.text_layers = text_layers - self.text_post_norm = text_post_norm - self.norm_eps = norm_eps - - # models - self.visual = VisionTransformer( - image_size=image_size, - patch_size=patch_size, - dim=vision_dim, - mlp_ratio=vision_mlp_ratio, - out_dim=embed_dim, - num_heads=vision_heads, - num_layers=vision_layers, - pool_type=vision_pool, - pre_norm=vision_pre_norm, - post_norm=vision_post_norm, - activation=activation, - attn_dropout=attn_dropout, - proj_dropout=proj_dropout, - embedding_dropout=embedding_dropout, - norm_eps=norm_eps) - self.textual = None - self.log_scale = nn.Parameter(math.log(1 / 0.07) * torch.ones([])) - - def forward(self, imgs, txt_ids): - """ - imgs: [B, 3, H, W] of torch.float32. - - mean: [0.48145466, 0.4578275, 0.40821073] - - std: [0.26862954, 0.26130258, 0.27577711] - txt_ids: [B, L] of torch.long. - Encoded by data.CLIPTokenizer. - """ - xi = self.visual(imgs) - xt = self.textual(txt_ids) - return xi, xt - - def param_groups(self): - groups = [{ - 'params': [ - p for n, p in self.named_parameters() - if 'norm' in n or n.endswith('bias') - ], - 'weight_decay': 0.0 - }, { - 'params': [ - p for n, p in self.named_parameters() - if not ('norm' in n or n.endswith('bias')) - ] - }] - return groups - - -def _clip(pretrained=False, - pretrained_name=None, - model_cls=CLIP, - return_transforms=False, - return_tokenizer=False, - tokenizer_padding='eos', - dtype=torch.float32, - device='cpu', - **kwargs): - # init model - if pretrained and pretrained_name: - from sora import BUCKET, DOWNLOAD_TO_CACHE - - # init a meta model - with torch.device('meta'): - model = model_cls(**kwargs) - - # checkpoint path - checkpoint = f'models/clip/{pretrained_name}' - if dtype in (torch.float16, torch.bfloat16): - suffix = '-' + { - torch.float16: 'fp16', - torch.bfloat16: 'bf16' - }[dtype] - if object_exists(BUCKET, f'{checkpoint}{suffix}.pth'): - checkpoint = f'{checkpoint}{suffix}' - checkpoint += '.pth' - - # load - model.load_state_dict( - torch.load(DOWNLOAD_TO_CACHE(checkpoint), map_location=device), - assign=True, - strict=False) - else: - # init a model on device - with torch.device(device): - model = model_cls(**kwargs) - - # set device - output = (model,) - - # init transforms - if return_transforms: - # mean and std - if 'siglip' in pretrained_name.lower(): - mean, std = [0.5, 0.5, 0.5], [0.5, 0.5, 0.5] - else: - mean = [0.48145466, 0.4578275, 0.40821073] - std = [0.26862954, 0.26130258, 0.27577711] - - # transforms - transforms = T.Compose([ - T.Resize((model.image_size, model.image_size), - interpolation=T.InterpolationMode.BICUBIC), - T.ToTensor(), - T.Normalize(mean=mean, std=std) - ]) - output += (transforms,) - - # init tokenizer - if return_tokenizer: - from sora import data - if 'siglip' in pretrained_name.lower(): - tokenizer = data.HuggingfaceTokenizer( - name=f'timm/{pretrained_name}', - seq_len=model.text_len, - clean='canonicalize') - elif 'xlm' in pretrained_name.lower(): - tokenizer = data.HuggingfaceTokenizer( - name='xlm-roberta-large', - seq_len=model.max_text_len - 2, - clean='whitespace') - elif 'mba' in pretrained_name.lower(): - tokenizer = data.HuggingfaceTokenizer( - name='facebook/xlm-roberta-xl', - seq_len=model.max_text_len - 2, - clean='whitespace') - else: - tokenizer = data.CLIPTokenizer( - seq_len=model.text_len, padding=tokenizer_padding) - output += (tokenizer,) - return output[0] if len(output) == 1 else output - - -def clip_xlm_roberta_vit_h_14( - pretrained=False, - pretrained_name='open-clip-xlm-roberta-large-vit-huge-14', - **kwargs): - cfg = dict( - embed_dim=1024, - image_size=224, - patch_size=14, - vision_dim=1280, - vision_mlp_ratio=4, - vision_heads=16, - vision_layers=32, - vision_pool='token', - activation='gelu', - vocab_size=250002, - max_text_len=514, - type_size=1, - pad_id=1, - text_dim=1024, - text_heads=16, - text_layers=24, - text_post_norm=True, - text_dropout=0.1, - attn_dropout=0.0, - proj_dropout=0.0, - embedding_dropout=0.0) - cfg.update(**kwargs) - return _clip(pretrained, pretrained_name, XLMRobertaCLIP, **cfg) - - -class WanImageEncoder(torch.nn.Module): - - def __init__(self): - super().__init__() - # init model - self.model, self.transforms = clip_xlm_roberta_vit_h_14( - pretrained=False, - return_transforms=True, - return_tokenizer=False, - dtype=torch.float32, - device="cpu") - - def encode_image(self, videos): - # preprocess - size = (self.model.image_size,) * 2 - videos = torch.cat([ - F.interpolate( - u, - size=size, - mode='bicubic', - align_corners=False) for u in videos - ]) - videos = self.transforms.transforms[-1](videos.mul_(0.5).add_(0.5)) - - # forward - dtype = next(iter(self.model.visual.parameters())).dtype - videos = videos.to(dtype) - out = self.model.visual(videos, use_31_block=True) - return out - - @staticmethod - def state_dict_converter(): - return WanImageEncoderStateDictConverter() - - -class WanImageEncoderStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - return state_dict - - def from_civitai(self, state_dict): - state_dict_ = {} - for name, param in state_dict.items(): - if name.startswith("textual."): - continue - name = "model." + name - state_dict_[name] = param - return state_dict_ - diff --git a/diffsynth/models/wan_video_motion_controller.py b/diffsynth/models/wan_video_motion_controller.py deleted file mode 100644 index 518c1c6..0000000 --- a/diffsynth/models/wan_video_motion_controller.py +++ /dev/null @@ -1,44 +0,0 @@ -import torch -import torch.nn as nn -from .wan_video_dit import sinusoidal_embedding_1d - - - -class WanMotionControllerModel(torch.nn.Module): - def __init__(self, freq_dim=256, dim=1536): - super().__init__() - self.freq_dim = freq_dim - self.linear = nn.Sequential( - nn.Linear(freq_dim, dim), - nn.SiLU(), - nn.Linear(dim, dim), - nn.SiLU(), - nn.Linear(dim, dim * 6), - ) - - def forward(self, motion_bucket_id): - emb = sinusoidal_embedding_1d(self.freq_dim, motion_bucket_id * 10) - emb = self.linear(emb) - return emb - - def init(self): - state_dict = self.linear[-1].state_dict() - state_dict = {i: state_dict[i] * 0 for i in state_dict} - self.linear[-1].load_state_dict(state_dict) - - @staticmethod - def state_dict_converter(): - return WanMotionControllerModelDictConverter() - - - -class WanMotionControllerModelDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - return state_dict - - def from_civitai(self, state_dict): - return state_dict - diff --git a/diffsynth/models/wan_video_text_encoder.py b/diffsynth/models/wan_video_text_encoder.py deleted file mode 100644 index c288737..0000000 --- a/diffsynth/models/wan_video_text_encoder.py +++ /dev/null @@ -1,269 +0,0 @@ -import math - -import torch -import torch.nn as nn -import torch.nn.functional as F - - -def fp16_clamp(x): - if x.dtype == torch.float16 and torch.isinf(x).any(): - clamp = torch.finfo(x.dtype).max - 1000 - x = torch.clamp(x, min=-clamp, max=clamp) - return x - - -class GELU(nn.Module): - - def forward(self, x): - return 0.5 * x * (1.0 + torch.tanh( - math.sqrt(2.0 / math.pi) * (x + 0.044715 * torch.pow(x, 3.0)))) - - -class T5LayerNorm(nn.Module): - - def __init__(self, dim, eps=1e-6): - super(T5LayerNorm, self).__init__() - self.dim = dim - self.eps = eps - self.weight = nn.Parameter(torch.ones(dim)) - - def forward(self, x): - x = x * torch.rsqrt(x.float().pow(2).mean(dim=-1, keepdim=True) + - self.eps) - if self.weight.dtype in [torch.float16, torch.bfloat16]: - x = x.type_as(self.weight) - return self.weight * x - - -class T5Attention(nn.Module): - - def __init__(self, dim, dim_attn, num_heads, dropout=0.1): - assert dim_attn % num_heads == 0 - super(T5Attention, self).__init__() - self.dim = dim - self.dim_attn = dim_attn - self.num_heads = num_heads - self.head_dim = dim_attn // num_heads - - # layers - self.q = nn.Linear(dim, dim_attn, bias=False) - self.k = nn.Linear(dim, dim_attn, bias=False) - self.v = nn.Linear(dim, dim_attn, bias=False) - self.o = nn.Linear(dim_attn, dim, bias=False) - self.dropout = nn.Dropout(dropout) - - def forward(self, x, context=None, mask=None, pos_bias=None): - """ - x: [B, L1, C]. - context: [B, L2, C] or None. - mask: [B, L2] or [B, L1, L2] or None. - """ - # check inputs - context = x if context is None else context - b, n, c = x.size(0), self.num_heads, self.head_dim - - # compute query, key, value - q = self.q(x).view(b, -1, n, c) - k = self.k(context).view(b, -1, n, c) - v = self.v(context).view(b, -1, n, c) - - # attention bias - attn_bias = x.new_zeros(b, n, q.size(1), k.size(1)) - if pos_bias is not None: - attn_bias += pos_bias - if mask is not None: - assert mask.ndim in [2, 3] - mask = mask.view(b, 1, 1, - -1) if mask.ndim == 2 else mask.unsqueeze(1) - attn_bias.masked_fill_(mask == 0, torch.finfo(x.dtype).min) - - # compute attention (T5 does not use scaling) - attn = torch.einsum('binc,bjnc->bnij', q, k) + attn_bias - attn = F.softmax(attn.float(), dim=-1).type_as(attn) - x = torch.einsum('bnij,bjnc->binc', attn, v) - - # output - x = x.reshape(b, -1, n * c) - x = self.o(x) - x = self.dropout(x) - return x - - -class T5FeedForward(nn.Module): - - def __init__(self, dim, dim_ffn, dropout=0.1): - super(T5FeedForward, self).__init__() - self.dim = dim - self.dim_ffn = dim_ffn - - # layers - self.gate = nn.Sequential(nn.Linear(dim, dim_ffn, bias=False), GELU()) - self.fc1 = nn.Linear(dim, dim_ffn, bias=False) - self.fc2 = nn.Linear(dim_ffn, dim, bias=False) - self.dropout = nn.Dropout(dropout) - - def forward(self, x): - x = self.fc1(x) * self.gate(x) - x = self.dropout(x) - x = self.fc2(x) - x = self.dropout(x) - return x - - -class T5SelfAttention(nn.Module): - - def __init__(self, - dim, - dim_attn, - dim_ffn, - num_heads, - num_buckets, - shared_pos=True, - dropout=0.1): - super(T5SelfAttention, self).__init__() - self.dim = dim - self.dim_attn = dim_attn - self.dim_ffn = dim_ffn - self.num_heads = num_heads - self.num_buckets = num_buckets - self.shared_pos = shared_pos - - # layers - self.norm1 = T5LayerNorm(dim) - self.attn = T5Attention(dim, dim_attn, num_heads, dropout) - self.norm2 = T5LayerNorm(dim) - self.ffn = T5FeedForward(dim, dim_ffn, dropout) - self.pos_embedding = None if shared_pos else T5RelativeEmbedding( - num_buckets, num_heads, bidirectional=True) - - def forward(self, x, mask=None, pos_bias=None): - e = pos_bias if self.shared_pos else self.pos_embedding( - x.size(1), x.size(1)) - x = fp16_clamp(x + self.attn(self.norm1(x), mask=mask, pos_bias=e)) - x = fp16_clamp(x + self.ffn(self.norm2(x))) - return x - - -class T5RelativeEmbedding(nn.Module): - - def __init__(self, num_buckets, num_heads, bidirectional, max_dist=128): - super(T5RelativeEmbedding, self).__init__() - self.num_buckets = num_buckets - self.num_heads = num_heads - self.bidirectional = bidirectional - self.max_dist = max_dist - - # layers - self.embedding = nn.Embedding(num_buckets, num_heads) - - def forward(self, lq, lk): - device = self.embedding.weight.device - # rel_pos = torch.arange(lk).unsqueeze(0).to(device) - \ - # torch.arange(lq).unsqueeze(1).to(device) - rel_pos = torch.arange(lk, device=device).unsqueeze(0) - \ - torch.arange(lq, device=device).unsqueeze(1) - rel_pos = self._relative_position_bucket(rel_pos) - rel_pos_embeds = self.embedding(rel_pos) - rel_pos_embeds = rel_pos_embeds.permute(2, 0, 1).unsqueeze( - 0) # [1, N, Lq, Lk] - return rel_pos_embeds.contiguous() - - def _relative_position_bucket(self, rel_pos): - # preprocess - if self.bidirectional: - num_buckets = self.num_buckets // 2 - rel_buckets = (rel_pos > 0).long() * num_buckets - rel_pos = torch.abs(rel_pos) - else: - num_buckets = self.num_buckets - rel_buckets = 0 - rel_pos = -torch.min(rel_pos, torch.zeros_like(rel_pos)) - - # embeddings for small and large positions - max_exact = num_buckets // 2 - rel_pos_large = max_exact + (torch.log(rel_pos.float() / max_exact) / - math.log(self.max_dist / max_exact) * - (num_buckets - max_exact)).long() - rel_pos_large = torch.min( - rel_pos_large, torch.full_like(rel_pos_large, num_buckets - 1)) - rel_buckets += torch.where(rel_pos < max_exact, rel_pos, rel_pos_large) - return rel_buckets - -def init_weights(m): - if isinstance(m, T5LayerNorm): - nn.init.ones_(m.weight) - elif isinstance(m, T5FeedForward): - nn.init.normal_(m.gate[0].weight, std=m.dim**-0.5) - nn.init.normal_(m.fc1.weight, std=m.dim**-0.5) - nn.init.normal_(m.fc2.weight, std=m.dim_ffn**-0.5) - elif isinstance(m, T5Attention): - nn.init.normal_(m.q.weight, std=(m.dim * m.dim_attn)**-0.5) - nn.init.normal_(m.k.weight, std=m.dim**-0.5) - nn.init.normal_(m.v.weight, std=m.dim**-0.5) - nn.init.normal_(m.o.weight, std=(m.num_heads * m.dim_attn)**-0.5) - elif isinstance(m, T5RelativeEmbedding): - nn.init.normal_( - m.embedding.weight, std=(2 * m.num_buckets * m.num_heads)**-0.5) - - -class WanTextEncoder(torch.nn.Module): - - def __init__(self, - vocab=256384, - dim=4096, - dim_attn=4096, - dim_ffn=10240, - num_heads=64, - num_layers=24, - num_buckets=32, - shared_pos=False, - dropout=0.1): - super(WanTextEncoder, self).__init__() - self.dim = dim - self.dim_attn = dim_attn - self.dim_ffn = dim_ffn - self.num_heads = num_heads - self.num_layers = num_layers - self.num_buckets = num_buckets - self.shared_pos = shared_pos - - # layers - self.token_embedding = vocab if isinstance(vocab, nn.Embedding) \ - else nn.Embedding(vocab, dim) - self.pos_embedding = T5RelativeEmbedding( - num_buckets, num_heads, bidirectional=True) if shared_pos else None - self.dropout = nn.Dropout(dropout) - self.blocks = nn.ModuleList([ - T5SelfAttention(dim, dim_attn, dim_ffn, num_heads, num_buckets, - shared_pos, dropout) for _ in range(num_layers) - ]) - self.norm = T5LayerNorm(dim) - - # initialize weights - self.apply(init_weights) - - def forward(self, ids, mask=None): - x = self.token_embedding(ids) - x = self.dropout(x) - e = self.pos_embedding(x.size(1), - x.size(1)) if self.shared_pos else None - for block in self.blocks: - x = block(x, mask, pos_bias=e) - x = self.norm(x) - x = self.dropout(x) - return x - - @staticmethod - def state_dict_converter(): - return WanTextEncoderStateDictConverter() - - -class WanTextEncoderStateDictConverter: - def __init__(self): - pass - - def from_diffusers(self, state_dict): - return state_dict - - def from_civitai(self, state_dict): - return state_dict diff --git a/diffsynth/models/wan_video_vace.py b/diffsynth/models/wan_video_vace.py deleted file mode 100644 index 40f3804..0000000 --- a/diffsynth/models/wan_video_vace.py +++ /dev/null @@ -1,113 +0,0 @@ -import torch -from .wan_video_dit import DiTBlock -from .utils import hash_state_dict_keys - -class VaceWanAttentionBlock(DiTBlock): - def __init__(self, has_image_input, dim, num_heads, ffn_dim, eps=1e-6, block_id=0): - super().__init__(has_image_input, dim, num_heads, ffn_dim, eps=eps) - self.block_id = block_id - if block_id == 0: - self.before_proj = torch.nn.Linear(self.dim, self.dim) - self.after_proj = torch.nn.Linear(self.dim, self.dim) - - def forward(self, c, x, context, t_mod, freqs): - if self.block_id == 0: - c = self.before_proj(c) + x - all_c = [] - else: - all_c = list(torch.unbind(c)) - c = all_c.pop(-1) - c = super().forward(c, context, t_mod, freqs) - c_skip = self.after_proj(c) - all_c += [c_skip, c] - c = torch.stack(all_c) - return c - - -class VaceWanModel(torch.nn.Module): - def __init__( - self, - vace_layers=(0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28), - vace_in_dim=96, - patch_size=(1, 2, 2), - has_image_input=False, - dim=1536, - num_heads=12, - ffn_dim=8960, - eps=1e-6, - ): - super().__init__() - self.vace_layers = vace_layers - self.vace_in_dim = vace_in_dim - self.vace_layers_mapping = {i: n for n, i in enumerate(self.vace_layers)} - - # vace blocks - self.vace_blocks = torch.nn.ModuleList([ - VaceWanAttentionBlock(has_image_input, dim, num_heads, ffn_dim, eps, block_id=i) - for i in self.vace_layers - ]) - - # vace patch embeddings - self.vace_patch_embedding = torch.nn.Conv3d(vace_in_dim, dim, kernel_size=patch_size, stride=patch_size) - - def forward( - self, x, vace_context, context, t_mod, freqs, - use_gradient_checkpointing: bool = False, - use_gradient_checkpointing_offload: bool = False, - ): - c = [self.vace_patch_embedding(u.unsqueeze(0)) for u in vace_context] - c = [u.flatten(2).transpose(1, 2) for u in c] - c = torch.cat([ - torch.cat([u, u.new_zeros(1, x.shape[1] - u.size(1), u.size(2))], - dim=1) for u in c - ]) - - def create_custom_forward(module): - def custom_forward(*inputs): - return module(*inputs) - return custom_forward - - for block in self.vace_blocks: - if use_gradient_checkpointing_offload: - with torch.autograd.graph.save_on_cpu(): - c = torch.utils.checkpoint.checkpoint( - create_custom_forward(block), - c, x, context, t_mod, freqs, - use_reentrant=False, - ) - elif use_gradient_checkpointing: - c = torch.utils.checkpoint.checkpoint( - create_custom_forward(block), - c, x, context, t_mod, freqs, - use_reentrant=False, - ) - else: - c = block(c, x, context, t_mod, freqs) - hints = torch.unbind(c)[:-1] - return hints - - @staticmethod - def state_dict_converter(): - return VaceWanModelDictConverter() - - -class VaceWanModelDictConverter: - def __init__(self): - pass - - def from_civitai(self, state_dict): - state_dict_ = {name: param for name, param in state_dict.items() if name.startswith("vace")} - if hash_state_dict_keys(state_dict_) == '3b2726384e4f64837bdf216eea3f310d': # vace 14B - config = { - "vace_layers": (0, 5, 10, 15, 20, 25, 30, 35), - "vace_in_dim": 96, - "patch_size": (1, 2, 2), - "has_image_input": False, - "dim": 5120, - "num_heads": 40, - "ffn_dim": 13824, - "eps": 1e-06, - } - else: - config = {} - return state_dict_, config diff --git a/diffsynth/models/wan_video_vae.py b/diffsynth/models/wan_video_vae.py deleted file mode 100644 index d24e29d..0000000 --- a/diffsynth/models/wan_video_vae.py +++ /dev/null @@ -1,1382 +0,0 @@ -from einops import rearrange, repeat - -import torch -import torch.nn as nn -import torch.nn.functional as F -from tqdm import tqdm - -CACHE_T = 2 - - -def check_is_instance(model, module_class): - if isinstance(model, module_class): - return True - if hasattr(model, "module") and isinstance(model.module, module_class): - return True - return False - - -def block_causal_mask(x, block_size): - # params - b, n, s, _, device = *x.size(), x.device - assert s % block_size == 0 - num_blocks = s // block_size - - # build mask - mask = torch.zeros(b, n, s, s, dtype=torch.bool, device=device) - for i in range(num_blocks): - mask[:, :, - i * block_size:(i + 1) * block_size, :(i + 1) * block_size] = 1 - return mask - - -class CausalConv3d(nn.Conv3d): - """ - Causal 3d convolusion. - """ - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self._padding = (self.padding[2], self.padding[2], self.padding[1], - self.padding[1], 2 * self.padding[0], 0) - self.padding = (0, 0, 0) - - def forward(self, x, cache_x=None): - padding = list(self._padding) - if cache_x is not None and self._padding[4] > 0: - cache_x = cache_x.to(x.device) - x = torch.cat([cache_x, x], dim=2) - padding[4] -= cache_x.shape[2] - x = F.pad(x, padding) - - return super().forward(x) - - -class RMS_norm(nn.Module): - - def __init__(self, dim, channel_first=True, images=True, bias=False): - super().__init__() - broadcastable_dims = (1, 1, 1) if not images else (1, 1) - shape = (dim, *broadcastable_dims) if channel_first else (dim,) - - self.channel_first = channel_first - self.scale = dim**0.5 - self.gamma = nn.Parameter(torch.ones(shape)) - self.bias = nn.Parameter(torch.zeros(shape)) if bias else 0. - - def forward(self, x): - return F.normalize( - x, dim=(1 if self.channel_first else - -1)) * self.scale * self.gamma + self.bias - - -class Upsample(nn.Upsample): - - def forward(self, x): - """ - Fix bfloat16 support for nearest neighbor interpolation. - """ - return super().forward(x.float()).type_as(x) - - -class Resample(nn.Module): - - def __init__(self, dim, mode): - assert mode in ('none', 'upsample2d', 'upsample3d', 'downsample2d', - 'downsample3d') - super().__init__() - self.dim = dim - self.mode = mode - - # layers - if mode == 'upsample2d': - self.resample = nn.Sequential( - Upsample(scale_factor=(2., 2.), mode='nearest-exact'), - nn.Conv2d(dim, dim // 2, 3, padding=1)) - elif mode == 'upsample3d': - self.resample = nn.Sequential( - Upsample(scale_factor=(2., 2.), mode='nearest-exact'), - nn.Conv2d(dim, dim // 2, 3, padding=1)) - self.time_conv = CausalConv3d(dim, - dim * 2, (3, 1, 1), - padding=(1, 0, 0)) - - elif mode == 'downsample2d': - self.resample = nn.Sequential( - nn.ZeroPad2d((0, 1, 0, 1)), - nn.Conv2d(dim, dim, 3, stride=(2, 2))) - elif mode == 'downsample3d': - self.resample = nn.Sequential( - nn.ZeroPad2d((0, 1, 0, 1)), - nn.Conv2d(dim, dim, 3, stride=(2, 2))) - self.time_conv = CausalConv3d(dim, - dim, (3, 1, 1), - stride=(2, 1, 1), - padding=(0, 0, 0)) - - else: - self.resample = nn.Identity() - - def forward(self, x, feat_cache=None, feat_idx=[0]): - b, c, t, h, w = x.size() - if self.mode == 'upsample3d': - if feat_cache is not None: - idx = feat_idx[0] - if feat_cache[idx] is None: - feat_cache[idx] = 'Rep' - feat_idx[0] += 1 - else: - - cache_x = x[:, :, -CACHE_T:, :, :].clone() - if cache_x.shape[2] < 2 and feat_cache[ - idx] is not None and feat_cache[idx] != 'Rep': - # cache last frame of last two chunk - cache_x = torch.cat([ - feat_cache[idx][:, :, -1, :, :].unsqueeze(2).to( - cache_x.device), cache_x - ], - dim=2) - if cache_x.shape[2] < 2 and feat_cache[ - idx] is not None and feat_cache[idx] == 'Rep': - cache_x = torch.cat([ - torch.zeros_like(cache_x).to(cache_x.device), - cache_x - ], - dim=2) - if feat_cache[idx] == 'Rep': - x = self.time_conv(x) - else: - x = self.time_conv(x, feat_cache[idx]) - feat_cache[idx] = cache_x - feat_idx[0] += 1 - - x = x.reshape(b, 2, c, t, h, w) - x = torch.stack((x[:, 0, :, :, :, :], x[:, 1, :, :, :, :]), - 3) - x = x.reshape(b, c, t * 2, h, w) - t = x.shape[2] - x = rearrange(x, 'b c t h w -> (b t) c h w') - x = self.resample(x) - x = rearrange(x, '(b t) c h w -> b c t h w', t=t) - - if self.mode == 'downsample3d': - if feat_cache is not None: - idx = feat_idx[0] - if feat_cache[idx] is None: - feat_cache[idx] = x.clone() - feat_idx[0] += 1 - else: - cache_x = x[:, :, -1:, :, :].clone() - x = self.time_conv( - torch.cat([feat_cache[idx][:, :, -1:, :, :], x], 2)) - feat_cache[idx] = cache_x - feat_idx[0] += 1 - return x - - def init_weight(self, conv): - conv_weight = conv.weight - nn.init.zeros_(conv_weight) - c1, c2, t, h, w = conv_weight.size() - one_matrix = torch.eye(c1, c2) - init_matrix = one_matrix - nn.init.zeros_(conv_weight) - conv_weight.data[:, :, 1, 0, 0] = init_matrix - conv.weight.data.copy_(conv_weight) - nn.init.zeros_(conv.bias.data) - - def init_weight2(self, conv): - conv_weight = conv.weight.data - nn.init.zeros_(conv_weight) - c1, c2, t, h, w = conv_weight.size() - init_matrix = torch.eye(c1 // 2, c2) - conv_weight[:c1 // 2, :, -1, 0, 0] = init_matrix - conv_weight[c1 // 2:, :, -1, 0, 0] = init_matrix - conv.weight.data.copy_(conv_weight) - nn.init.zeros_(conv.bias.data) - - - -def patchify(x, patch_size): - if patch_size == 1: - return x - if x.dim() == 4: - x = rearrange(x, "b c (h q) (w r) -> b (c r q) h w", q=patch_size, r=patch_size) - elif x.dim() == 5: - x = rearrange(x, - "b c f (h q) (w r) -> b (c r q) f h w", - q=patch_size, - r=patch_size) - else: - raise ValueError(f"Invalid input shape: {x.shape}") - return x - - -def unpatchify(x, patch_size): - if patch_size == 1: - return x - if x.dim() == 4: - x = rearrange(x, "b (c r q) h w -> b c (h q) (w r)", q=patch_size, r=patch_size) - elif x.dim() == 5: - x = rearrange(x, - "b (c r q) f h w -> b c f (h q) (w r)", - q=patch_size, - r=patch_size) - return x - - -class Resample38(Resample): - - def __init__(self, dim, mode): - assert mode in ( - "none", - "upsample2d", - "upsample3d", - "downsample2d", - "downsample3d", - ) - super(Resample, self).__init__() - self.dim = dim - self.mode = mode - - # layers - if mode == "upsample2d": - self.resample = nn.Sequential( - Upsample(scale_factor=(2.0, 2.0), mode="nearest-exact"), - nn.Conv2d(dim, dim, 3, padding=1), - ) - elif mode == "upsample3d": - self.resample = nn.Sequential( - Upsample(scale_factor=(2.0, 2.0), mode="nearest-exact"), - nn.Conv2d(dim, dim, 3, padding=1), - ) - self.time_conv = CausalConv3d(dim, dim * 2, (3, 1, 1), padding=(1, 0, 0)) - elif mode == "downsample2d": - self.resample = nn.Sequential( - nn.ZeroPad2d((0, 1, 0, 1)), nn.Conv2d(dim, dim, 3, stride=(2, 2)) - ) - elif mode == "downsample3d": - self.resample = nn.Sequential( - nn.ZeroPad2d((0, 1, 0, 1)), nn.Conv2d(dim, dim, 3, stride=(2, 2)) - ) - self.time_conv = CausalConv3d( - dim, dim, (3, 1, 1), stride=(2, 1, 1), padding=(0, 0, 0) - ) - else: - self.resample = nn.Identity() - -class ResidualBlock(nn.Module): - - def __init__(self, in_dim, out_dim, dropout=0.0): - super().__init__() - self.in_dim = in_dim - self.out_dim = out_dim - - # layers - self.residual = nn.Sequential( - RMS_norm(in_dim, images=False), nn.SiLU(), - CausalConv3d(in_dim, out_dim, 3, padding=1), - RMS_norm(out_dim, images=False), nn.SiLU(), nn.Dropout(dropout), - CausalConv3d(out_dim, out_dim, 3, padding=1)) - self.shortcut = CausalConv3d(in_dim, out_dim, 1) \ - if in_dim != out_dim else nn.Identity() - - def forward(self, x, feat_cache=None, feat_idx=[0]): - h = self.shortcut(x) - for layer in self.residual: - if check_is_instance(layer, CausalConv3d) and feat_cache is not None: - idx = feat_idx[0] - cache_x = x[:, :, -CACHE_T:, :, :].clone() - if cache_x.shape[2] < 2 and feat_cache[idx] is not None: - # cache last frame of last two chunk - cache_x = torch.cat([ - feat_cache[idx][:, :, -1, :, :].unsqueeze(2).to( - cache_x.device), cache_x - ], - dim=2) - x = layer(x, feat_cache[idx]) - feat_cache[idx] = cache_x - feat_idx[0] += 1 - else: - x = layer(x) - return x + h - - -class AttentionBlock(nn.Module): - """ - Causal self-attention with a single head. - """ - - def __init__(self, dim): - super().__init__() - self.dim = dim - - # layers - self.norm = RMS_norm(dim) - self.to_qkv = nn.Conv2d(dim, dim * 3, 1) - self.proj = nn.Conv2d(dim, dim, 1) - - # zero out the last layer params - nn.init.zeros_(self.proj.weight) - - def forward(self, x): - identity = x - b, c, t, h, w = x.size() - x = rearrange(x, 'b c t h w -> (b t) c h w') - x = self.norm(x) - # compute query, key, value - q, k, v = self.to_qkv(x).reshape(b * t, 1, c * 3, -1).permute( - 0, 1, 3, 2).contiguous().chunk(3, dim=-1) - - # apply attention - x = F.scaled_dot_product_attention( - q, - k, - v, - #attn_mask=block_causal_mask(q, block_size=h * w) - ) - x = x.squeeze(1).permute(0, 2, 1).reshape(b * t, c, h, w) - - # output - x = self.proj(x) - x = rearrange(x, '(b t) c h w-> b c t h w', t=t) - return x + identity - - -class AvgDown3D(nn.Module): - def __init__( - self, - in_channels, - out_channels, - factor_t, - factor_s=1, - ): - super().__init__() - self.in_channels = in_channels - self.out_channels = out_channels - self.factor_t = factor_t - self.factor_s = factor_s - self.factor = self.factor_t * self.factor_s * self.factor_s - - assert in_channels * self.factor % out_channels == 0 - self.group_size = in_channels * self.factor // out_channels - - def forward(self, x: torch.Tensor) -> torch.Tensor: - pad_t = (self.factor_t - x.shape[2] % self.factor_t) % self.factor_t - pad = (0, 0, 0, 0, pad_t, 0) - x = F.pad(x, pad) - B, C, T, H, W = x.shape - x = x.view( - B, - C, - T // self.factor_t, - self.factor_t, - H // self.factor_s, - self.factor_s, - W // self.factor_s, - self.factor_s, - ) - x = x.permute(0, 1, 3, 5, 7, 2, 4, 6).contiguous() - x = x.view( - B, - C * self.factor, - T // self.factor_t, - H // self.factor_s, - W // self.factor_s, - ) - x = x.view( - B, - self.out_channels, - self.group_size, - T // self.factor_t, - H // self.factor_s, - W // self.factor_s, - ) - x = x.mean(dim=2) - return x - - -class DupUp3D(nn.Module): - def __init__( - self, - in_channels: int, - out_channels: int, - factor_t, - factor_s=1, - ): - super().__init__() - self.in_channels = in_channels - self.out_channels = out_channels - - self.factor_t = factor_t - self.factor_s = factor_s - self.factor = self.factor_t * self.factor_s * self.factor_s - - assert out_channels * self.factor % in_channels == 0 - self.repeats = out_channels * self.factor // in_channels - - def forward(self, x: torch.Tensor, first_chunk=False) -> torch.Tensor: - x = x.repeat_interleave(self.repeats, dim=1) - x = x.view( - x.size(0), - self.out_channels, - self.factor_t, - self.factor_s, - self.factor_s, - x.size(2), - x.size(3), - x.size(4), - ) - x = x.permute(0, 1, 5, 2, 6, 3, 7, 4).contiguous() - x = x.view( - x.size(0), - self.out_channels, - x.size(2) * self.factor_t, - x.size(4) * self.factor_s, - x.size(6) * self.factor_s, - ) - if first_chunk: - x = x[:, :, self.factor_t - 1 :, :, :] - return x - - -class Down_ResidualBlock(nn.Module): - def __init__( - self, in_dim, out_dim, dropout, mult, temperal_downsample=False, down_flag=False - ): - super().__init__() - - # Shortcut path with downsample - self.avg_shortcut = AvgDown3D( - in_dim, - out_dim, - factor_t=2 if temperal_downsample else 1, - factor_s=2 if down_flag else 1, - ) - - # Main path with residual blocks and downsample - downsamples = [] - for _ in range(mult): - downsamples.append(ResidualBlock(in_dim, out_dim, dropout)) - in_dim = out_dim - - # Add the final downsample block - if down_flag: - mode = "downsample3d" if temperal_downsample else "downsample2d" - downsamples.append(Resample38(out_dim, mode=mode)) - - self.downsamples = nn.Sequential(*downsamples) - - def forward(self, x, feat_cache=None, feat_idx=[0]): - x_copy = x.clone() - for module in self.downsamples: - x = module(x, feat_cache, feat_idx) - - return x + self.avg_shortcut(x_copy) - - -class Up_ResidualBlock(nn.Module): - def __init__( - self, in_dim, out_dim, dropout, mult, temperal_upsample=False, up_flag=False - ): - super().__init__() - # Shortcut path with upsample - if up_flag: - self.avg_shortcut = DupUp3D( - in_dim, - out_dim, - factor_t=2 if temperal_upsample else 1, - factor_s=2 if up_flag else 1, - ) - else: - self.avg_shortcut = None - - # Main path with residual blocks and upsample - upsamples = [] - for _ in range(mult): - upsamples.append(ResidualBlock(in_dim, out_dim, dropout)) - in_dim = out_dim - - # Add the final upsample block - if up_flag: - mode = "upsample3d" if temperal_upsample else "upsample2d" - upsamples.append(Resample38(out_dim, mode=mode)) - - self.upsamples = nn.Sequential(*upsamples) - - def forward(self, x, feat_cache=None, feat_idx=[0], first_chunk=False): - x_main = x.clone() - for module in self.upsamples: - x_main = module(x_main, feat_cache, feat_idx) - if self.avg_shortcut is not None: - x_shortcut = self.avg_shortcut(x, first_chunk) - return x_main + x_shortcut - else: - return x_main - - -class Encoder3d(nn.Module): - - def __init__(self, - dim=128, - z_dim=4, - dim_mult=[1, 2, 4, 4], - num_res_blocks=2, - attn_scales=[], - temperal_downsample=[True, True, False], - dropout=0.0): - super().__init__() - self.dim = dim - self.z_dim = z_dim - self.dim_mult = dim_mult - self.num_res_blocks = num_res_blocks - self.attn_scales = attn_scales - self.temperal_downsample = temperal_downsample - - # dimensions - dims = [dim * u for u in [1] + dim_mult] - scale = 1.0 - - # init block - self.conv1 = CausalConv3d(3, dims[0], 3, padding=1) - - # downsample blocks - downsamples = [] - for i, (in_dim, out_dim) in enumerate(zip(dims[:-1], dims[1:])): - # residual (+attention) blocks - for _ in range(num_res_blocks): - downsamples.append(ResidualBlock(in_dim, out_dim, dropout)) - if scale in attn_scales: - downsamples.append(AttentionBlock(out_dim)) - in_dim = out_dim - - # downsample block - if i != len(dim_mult) - 1: - mode = 'downsample3d' if temperal_downsample[ - i] else 'downsample2d' - downsamples.append(Resample(out_dim, mode=mode)) - scale /= 2.0 - self.downsamples = nn.Sequential(*downsamples) - - # middle blocks - self.middle = nn.Sequential(ResidualBlock(out_dim, out_dim, dropout), - AttentionBlock(out_dim), - ResidualBlock(out_dim, out_dim, dropout)) - - # output blocks - self.head = nn.Sequential(RMS_norm(out_dim, images=False), nn.SiLU(), - CausalConv3d(out_dim, z_dim, 3, padding=1)) - - def forward(self, x, feat_cache=None, feat_idx=[0]): - if feat_cache is not None: - idx = feat_idx[0] - cache_x = x[:, :, -CACHE_T:, :, :].clone() - if cache_x.shape[2] < 2 and feat_cache[idx] is not None: - # cache last frame of last two chunk - cache_x = torch.cat([ - feat_cache[idx][:, :, -1, :, :].unsqueeze(2).to( - cache_x.device), cache_x - ], - dim=2) - x = self.conv1(x, feat_cache[idx]) - feat_cache[idx] = cache_x - feat_idx[0] += 1 - else: - x = self.conv1(x) - - ## downsamples - for layer in self.downsamples: - if feat_cache is not None: - x = layer(x, feat_cache, feat_idx) - else: - x = layer(x) - - ## middle - for layer in self.middle: - if check_is_instance(layer, ResidualBlock) and feat_cache is not None: - x = layer(x, feat_cache, feat_idx) - else: - x = layer(x) - - ## head - for layer in self.head: - if check_is_instance(layer, CausalConv3d) and feat_cache is not None: - idx = feat_idx[0] - cache_x = x[:, :, -CACHE_T:, :, :].clone() - if cache_x.shape[2] < 2 and feat_cache[idx] is not None: - # cache last frame of last two chunk - cache_x = torch.cat([ - feat_cache[idx][:, :, -1, :, :].unsqueeze(2).to( - cache_x.device), cache_x - ], - dim=2) - x = layer(x, feat_cache[idx]) - feat_cache[idx] = cache_x - feat_idx[0] += 1 - else: - x = layer(x) - return x - - -class Encoder3d_38(nn.Module): - - def __init__(self, - dim=128, - z_dim=4, - dim_mult=[1, 2, 4, 4], - num_res_blocks=2, - attn_scales=[], - temperal_downsample=[False, True, True], - dropout=0.0): - super().__init__() - self.dim = dim - self.z_dim = z_dim - self.dim_mult = dim_mult - self.num_res_blocks = num_res_blocks - self.attn_scales = attn_scales - self.temperal_downsample = temperal_downsample - - # dimensions - dims = [dim * u for u in [1] + dim_mult] - scale = 1.0 - - # init block - self.conv1 = CausalConv3d(12, dims[0], 3, padding=1) - - # downsample blocks - downsamples = [] - for i, (in_dim, out_dim) in enumerate(zip(dims[:-1], dims[1:])): - t_down_flag = ( - temperal_downsample[i] if i < len(temperal_downsample) else False - ) - downsamples.append( - Down_ResidualBlock( - in_dim=in_dim, - out_dim=out_dim, - dropout=dropout, - mult=num_res_blocks, - temperal_downsample=t_down_flag, - down_flag=i != len(dim_mult) - 1, - ) - ) - scale /= 2.0 - self.downsamples = nn.Sequential(*downsamples) - - # middle blocks - self.middle = nn.Sequential( - ResidualBlock(out_dim, out_dim, dropout), - AttentionBlock(out_dim), - ResidualBlock(out_dim, out_dim, dropout), - ) - - # # output blocks - self.head = nn.Sequential( - RMS_norm(out_dim, images=False), - nn.SiLU(), - CausalConv3d(out_dim, z_dim, 3, padding=1), - ) - - - def forward(self, x, feat_cache=None, feat_idx=[0]): - - if feat_cache is not None: - idx = feat_idx[0] - cache_x = x[:, :, -CACHE_T:, :, :].clone() - if cache_x.shape[2] < 2 and feat_cache[idx] is not None: - cache_x = torch.cat( - [ - feat_cache[idx][:, :, -1, :, :].unsqueeze(2).to(cache_x.device), - cache_x, - ], - dim=2, - ) - x = self.conv1(x, feat_cache[idx]) - feat_cache[idx] = cache_x - feat_idx[0] += 1 - else: - x = self.conv1(x) - - ## downsamples - for layer in self.downsamples: - if feat_cache is not None: - x = layer(x, feat_cache, feat_idx) - else: - x = layer(x) - - ## middle - for layer in self.middle: - if isinstance(layer, ResidualBlock) and feat_cache is not None: - x = layer(x, feat_cache, feat_idx) - else: - x = layer(x) - - ## head - for layer in self.head: - if isinstance(layer, CausalConv3d) and feat_cache is not None: - idx = feat_idx[0] - cache_x = x[:, :, -CACHE_T:, :, :].clone() - if cache_x.shape[2] < 2 and feat_cache[idx] is not None: - cache_x = torch.cat( - [ - feat_cache[idx][:, :, -1, :, :] - .unsqueeze(2) - .to(cache_x.device), - cache_x, - ], - dim=2, - ) - x = layer(x, feat_cache[idx]) - feat_cache[idx] = cache_x - feat_idx[0] += 1 - else: - x = layer(x) - - return x - - -class Decoder3d(nn.Module): - - def __init__(self, - dim=128, - z_dim=4, - dim_mult=[1, 2, 4, 4], - num_res_blocks=2, - attn_scales=[], - temperal_upsample=[False, True, True], - dropout=0.0): - super().__init__() - self.dim = dim - self.z_dim = z_dim - self.dim_mult = dim_mult - self.num_res_blocks = num_res_blocks - self.attn_scales = attn_scales - self.temperal_upsample = temperal_upsample - - # dimensions - dims = [dim * u for u in [dim_mult[-1]] + dim_mult[::-1]] - scale = 1.0 / 2**(len(dim_mult) - 2) - - # init block - self.conv1 = CausalConv3d(z_dim, dims[0], 3, padding=1) - - # middle blocks - self.middle = nn.Sequential(ResidualBlock(dims[0], dims[0], dropout), - AttentionBlock(dims[0]), - ResidualBlock(dims[0], dims[0], dropout)) - - # upsample blocks - upsamples = [] - for i, (in_dim, out_dim) in enumerate(zip(dims[:-1], dims[1:])): - # residual (+attention) blocks - if i == 1 or i == 2 or i == 3: - in_dim = in_dim // 2 - for _ in range(num_res_blocks + 1): - upsamples.append(ResidualBlock(in_dim, out_dim, dropout)) - if scale in attn_scales: - upsamples.append(AttentionBlock(out_dim)) - in_dim = out_dim - - # upsample block - if i != len(dim_mult) - 1: - mode = 'upsample3d' if temperal_upsample[i] else 'upsample2d' - upsamples.append(Resample(out_dim, mode=mode)) - scale *= 2.0 - self.upsamples = nn.Sequential(*upsamples) - - # output blocks - self.head = nn.Sequential(RMS_norm(out_dim, images=False), nn.SiLU(), - CausalConv3d(out_dim, 3, 3, padding=1)) - - def forward(self, x, feat_cache=None, feat_idx=[0]): - ## conv1 - if feat_cache is not None: - idx = feat_idx[0] - cache_x = x[:, :, -CACHE_T:, :, :].clone() - if cache_x.shape[2] < 2 and feat_cache[idx] is not None: - # cache last frame of last two chunk - cache_x = torch.cat([ - feat_cache[idx][:, :, -1, :, :].unsqueeze(2).to( - cache_x.device), cache_x - ], - dim=2) - x = self.conv1(x, feat_cache[idx]) - feat_cache[idx] = cache_x - feat_idx[0] += 1 - else: - x = self.conv1(x) - - ## middle - for layer in self.middle: - if check_is_instance(layer, ResidualBlock) and feat_cache is not None: - x = layer(x, feat_cache, feat_idx) - else: - x = layer(x) - - ## upsamples - for layer in self.upsamples: - if feat_cache is not None: - x = layer(x, feat_cache, feat_idx) - else: - x = layer(x) - - ## head - for layer in self.head: - if check_is_instance(layer, CausalConv3d) and feat_cache is not None: - idx = feat_idx[0] - cache_x = x[:, :, -CACHE_T:, :, :].clone() - if cache_x.shape[2] < 2 and feat_cache[idx] is not None: - # cache last frame of last two chunk - cache_x = torch.cat([ - feat_cache[idx][:, :, -1, :, :].unsqueeze(2).to( - cache_x.device), cache_x - ], - dim=2) - x = layer(x, feat_cache[idx]) - feat_cache[idx] = cache_x - feat_idx[0] += 1 - else: - x = layer(x) - return x - - - -class Decoder3d_38(nn.Module): - - def __init__(self, - dim=128, - z_dim=4, - dim_mult=[1, 2, 4, 4], - num_res_blocks=2, - attn_scales=[], - temperal_upsample=[False, True, True], - dropout=0.0): - super().__init__() - self.dim = dim - self.z_dim = z_dim - self.dim_mult = dim_mult - self.num_res_blocks = num_res_blocks - self.attn_scales = attn_scales - self.temperal_upsample = temperal_upsample - - # dimensions - dims = [dim * u for u in [dim_mult[-1]] + dim_mult[::-1]] - scale = 1.0 / 2 ** (len(dim_mult) - 2) - # init block - self.conv1 = CausalConv3d(z_dim, dims[0], 3, padding=1) - - # middle blocks - self.middle = nn.Sequential(ResidualBlock(dims[0], dims[0], dropout), - AttentionBlock(dims[0]), - ResidualBlock(dims[0], dims[0], dropout)) - - # upsample blocks - upsamples = [] - for i, (in_dim, out_dim) in enumerate(zip(dims[:-1], dims[1:])): - t_up_flag = temperal_upsample[i] if i < len(temperal_upsample) else False - upsamples.append( - Up_ResidualBlock(in_dim=in_dim, - out_dim=out_dim, - dropout=dropout, - mult=num_res_blocks + 1, - temperal_upsample=t_up_flag, - up_flag=i != len(dim_mult) - 1)) - self.upsamples = nn.Sequential(*upsamples) - - # output blocks - self.head = nn.Sequential(RMS_norm(out_dim, images=False), nn.SiLU(), - CausalConv3d(out_dim, 12, 3, padding=1)) - - - def forward(self, x, feat_cache=None, feat_idx=[0], first_chunk=False): - if feat_cache is not None: - idx = feat_idx[0] - cache_x = x[:, :, -CACHE_T:, :, :].clone() - if cache_x.shape[2] < 2 and feat_cache[idx] is not None: - cache_x = torch.cat( - [ - feat_cache[idx][:, :, -1, :, :].unsqueeze(2).to(cache_x.device), - cache_x, - ], - dim=2, - ) - x = self.conv1(x, feat_cache[idx]) - feat_cache[idx] = cache_x - feat_idx[0] += 1 - else: - x = self.conv1(x) - - for layer in self.middle: - if check_is_instance(layer, ResidualBlock) and feat_cache is not None: - x = layer(x, feat_cache, feat_idx) - else: - x = layer(x) - - ## upsamples - for layer in self.upsamples: - if feat_cache is not None: - x = layer(x, feat_cache, feat_idx, first_chunk) - else: - x = layer(x) - - ## head - for layer in self.head: - if check_is_instance(layer, CausalConv3d) and feat_cache is not None: - idx = feat_idx[0] - cache_x = x[:, :, -CACHE_T:, :, :].clone() - if cache_x.shape[2] < 2 and feat_cache[idx] is not None: - cache_x = torch.cat( - [ - feat_cache[idx][:, :, -1, :, :] - .unsqueeze(2) - .to(cache_x.device), - cache_x, - ], - dim=2, - ) - x = layer(x, feat_cache[idx]) - feat_cache[idx] = cache_x - feat_idx[0] += 1 - else: - x = layer(x) - return x - - -def count_conv3d(model): - count = 0 - for m in model.modules(): - if isinstance(m, CausalConv3d): - count += 1 - return count - - -class VideoVAE_(nn.Module): - - def __init__(self, - dim=96, - z_dim=16, - dim_mult=[1, 2, 4, 4], - num_res_blocks=2, - attn_scales=[], - temperal_downsample=[False, True, True], - dropout=0.0): - super().__init__() - self.dim = dim - self.z_dim = z_dim - self.dim_mult = dim_mult - self.num_res_blocks = num_res_blocks - self.attn_scales = attn_scales - self.temperal_downsample = temperal_downsample - self.temperal_upsample = temperal_downsample[::-1] - - # modules - self.encoder = Encoder3d(dim, z_dim * 2, dim_mult, num_res_blocks, - attn_scales, self.temperal_downsample, dropout) - self.conv1 = CausalConv3d(z_dim * 2, z_dim * 2, 1) - self.conv2 = CausalConv3d(z_dim, z_dim, 1) - self.decoder = Decoder3d(dim, z_dim, dim_mult, num_res_blocks, - attn_scales, self.temperal_upsample, dropout) - - def forward(self, x): - mu, log_var = self.encode(x) - z = self.reparameterize(mu, log_var) - x_recon = self.decode(z) - return x_recon, mu, log_var - - def encode(self, x, scale): - self.clear_cache() - ## cache - t = x.shape[2] - iter_ = 1 + (t - 1) // 4 - - for i in range(iter_): - self._enc_conv_idx = [0] - if i == 0: - out = self.encoder(x[:, :, :1, :, :], - feat_cache=self._enc_feat_map, - feat_idx=self._enc_conv_idx) - else: - out_ = self.encoder(x[:, :, 1 + 4 * (i - 1):1 + 4 * i, :, :], - feat_cache=self._enc_feat_map, - feat_idx=self._enc_conv_idx) - out = torch.cat([out, out_], 2) - mu, log_var = self.conv1(out).chunk(2, dim=1) - if isinstance(scale[0], torch.Tensor): - scale = [s.to(dtype=mu.dtype, device=mu.device) for s in scale] - mu = (mu - scale[0].view(1, self.z_dim, 1, 1, 1)) * scale[1].view( - 1, self.z_dim, 1, 1, 1) - else: - scale = scale.to(dtype=mu.dtype, device=mu.device) - mu = (mu - scale[0]) * scale[1] - return mu - - def decode(self, z, scale): - self.clear_cache() - # z: [b,c,t,h,w] - if isinstance(scale[0], torch.Tensor): - scale = [s.to(dtype=z.dtype, device=z.device) for s in scale] - z = z / scale[1].view(1, self.z_dim, 1, 1, 1) + scale[0].view( - 1, self.z_dim, 1, 1, 1) - else: - scale = scale.to(dtype=z.dtype, device=z.device) - z = z / scale[1] + scale[0] - iter_ = z.shape[2] - x = self.conv2(z) - for i in range(iter_): - self._conv_idx = [0] - if i == 0: - out = self.decoder(x[:, :, i:i + 1, :, :], - feat_cache=self._feat_map, - feat_idx=self._conv_idx) - else: - out_ = self.decoder(x[:, :, i:i + 1, :, :], - feat_cache=self._feat_map, - feat_idx=self._conv_idx) - out = torch.cat([out, out_], 2) # may add tensor offload - return out - - def reparameterize(self, mu, log_var): - std = torch.exp(0.5 * log_var) - eps = torch.randn_like(std) - return eps * std + mu - - def sample(self, imgs, deterministic=False): - mu, log_var = self.encode(imgs) - if deterministic: - return mu - std = torch.exp(0.5 * log_var.clamp(-30.0, 20.0)) - return mu + std * torch.randn_like(std) - - def clear_cache(self): - self._conv_num = count_conv3d(self.decoder) - self._conv_idx = [0] - self._feat_map = [None] * self._conv_num - # cache encode - self._enc_conv_num = count_conv3d(self.encoder) - self._enc_conv_idx = [0] - self._enc_feat_map = [None] * self._enc_conv_num - - -class WanVideoVAE(nn.Module): - - def __init__(self, z_dim=16): - super().__init__() - - mean = [ - -0.7571, -0.7089, -0.9113, 0.1075, -0.1745, 0.9653, -0.1517, 1.5508, - 0.4134, -0.0715, 0.5517, -0.3632, -0.1922, -0.9497, 0.2503, -0.2921 - ] - std = [ - 2.8184, 1.4541, 2.3275, 2.6558, 1.2196, 1.7708, 2.6052, 2.0743, - 3.2687, 2.1526, 2.8652, 1.5579, 1.6382, 1.1253, 2.8251, 1.9160 - ] - self.mean = torch.tensor(mean) - self.std = torch.tensor(std) - self.scale = [self.mean, 1.0 / self.std] - - # init model - self.model = VideoVAE_(z_dim=z_dim).eval().requires_grad_(False) - self.upsampling_factor = 8 - self.z_dim = z_dim - - - def build_1d_mask(self, length, left_bound, right_bound, border_width): - x = torch.ones((length,)) - if not left_bound: - x[:border_width] = (torch.arange(border_width) + 1) / border_width - if not right_bound: - x[-border_width:] = torch.flip((torch.arange(border_width) + 1) / border_width, dims=(0,)) - return x - - - def build_mask(self, data, is_bound, border_width): - _, _, _, H, W = data.shape - h = self.build_1d_mask(H, is_bound[0], is_bound[1], border_width[0]) - w = self.build_1d_mask(W, is_bound[2], is_bound[3], border_width[1]) - - h = repeat(h, "H -> H W", H=H, W=W) - w = repeat(w, "W -> H W", H=H, W=W) - - mask = torch.stack([h, w]).min(dim=0).values - mask = rearrange(mask, "H W -> 1 1 1 H W") - return mask - - - def tiled_decode(self, hidden_states, device, tile_size, tile_stride): - _, _, T, H, W = hidden_states.shape - size_h, size_w = tile_size - stride_h, stride_w = tile_stride - - # Split tasks - tasks = [] - for h in range(0, H, stride_h): - if (h-stride_h >= 0 and h-stride_h+size_h >= H): continue - for w in range(0, W, stride_w): - if (w-stride_w >= 0 and w-stride_w+size_w >= W): continue - h_, w_ = h + size_h, w + size_w - tasks.append((h, h_, w, w_)) - - data_device = "cpu" - computation_device = device - - out_T = T * 4 - 3 - weight = torch.zeros((1, 1, out_T, H * self.upsampling_factor, W * self.upsampling_factor), dtype=hidden_states.dtype, device=data_device) - values = torch.zeros((1, 3, out_T, H * self.upsampling_factor, W * self.upsampling_factor), dtype=hidden_states.dtype, device=data_device) - - for h, h_, w, w_ in tqdm(tasks, desc="VAE decoding"): - hidden_states_batch = hidden_states[:, :, :, h:h_, w:w_].to(computation_device) - hidden_states_batch = self.model.decode(hidden_states_batch, self.scale).to(data_device) - - mask = self.build_mask( - hidden_states_batch, - is_bound=(h==0, h_>=H, w==0, w_>=W), - border_width=((size_h - stride_h) * self.upsampling_factor, (size_w - stride_w) * self.upsampling_factor) - ).to(dtype=hidden_states.dtype, device=data_device) - - target_h = h * self.upsampling_factor - target_w = w * self.upsampling_factor - values[ - :, - :, - :, - target_h:target_h + hidden_states_batch.shape[3], - target_w:target_w + hidden_states_batch.shape[4], - ] += hidden_states_batch * mask - weight[ - :, - :, - :, - target_h: target_h + hidden_states_batch.shape[3], - target_w: target_w + hidden_states_batch.shape[4], - ] += mask - values = values / weight - values = values.clamp_(-1, 1) - return values - - - def tiled_encode(self, video, device, tile_size, tile_stride): - _, _, T, H, W = video.shape - size_h, size_w = tile_size - stride_h, stride_w = tile_stride - - # Split tasks - tasks = [] - for h in range(0, H, stride_h): - if (h-stride_h >= 0 and h-stride_h+size_h >= H): continue - for w in range(0, W, stride_w): - if (w-stride_w >= 0 and w-stride_w+size_w >= W): continue - h_, w_ = h + size_h, w + size_w - tasks.append((h, h_, w, w_)) - - data_device = "cpu" - computation_device = device - - out_T = (T + 3) // 4 - weight = torch.zeros((1, 1, out_T, H // self.upsampling_factor, W // self.upsampling_factor), dtype=video.dtype, device=data_device) - values = torch.zeros((1, self.z_dim, out_T, H // self.upsampling_factor, W // self.upsampling_factor), dtype=video.dtype, device=data_device) - - for h, h_, w, w_ in tqdm(tasks, desc="VAE encoding"): - hidden_states_batch = video[:, :, :, h:h_, w:w_].to(computation_device) - hidden_states_batch = self.model.encode(hidden_states_batch, self.scale).to(data_device) - - mask = self.build_mask( - hidden_states_batch, - is_bound=(h==0, h_>=H, w==0, w_>=W), - border_width=((size_h - stride_h) // self.upsampling_factor, (size_w - stride_w) // self.upsampling_factor) - ).to(dtype=video.dtype, device=data_device) - - target_h = h // self.upsampling_factor - target_w = w // self.upsampling_factor - values[ - :, - :, - :, - target_h:target_h + hidden_states_batch.shape[3], - target_w:target_w + hidden_states_batch.shape[4], - ] += hidden_states_batch * mask - weight[ - :, - :, - :, - target_h: target_h + hidden_states_batch.shape[3], - target_w: target_w + hidden_states_batch.shape[4], - ] += mask - values = values / weight - return values - - - def single_encode(self, video, device): - video = video.to(device) - x = self.model.encode(video, self.scale) - return x - - - def single_decode(self, hidden_state, device): - hidden_state = hidden_state.to(device) - video = self.model.decode(hidden_state, self.scale) - return video.clamp_(-1, 1) - - - def encode(self, videos, device, tiled=False, tile_size=(34, 34), tile_stride=(18, 16)): - videos = [video.to("cpu") for video in videos] - hidden_states = [] - for video in videos: - video = video.unsqueeze(0) - if tiled: - tile_size = (tile_size[0] * self.upsampling_factor, tile_size[1] * self.upsampling_factor) - tile_stride = (tile_stride[0] * self.upsampling_factor, tile_stride[1] * self.upsampling_factor) - hidden_state = self.tiled_encode(video, device, tile_size, tile_stride) - else: - hidden_state = self.single_encode(video, device) - hidden_state = hidden_state.squeeze(0) - hidden_states.append(hidden_state) - hidden_states = torch.stack(hidden_states) - return hidden_states - - - def decode(self, hidden_states, device, tiled=False, tile_size=(34, 34), tile_stride=(18, 16)): - hidden_states = [hidden_state.to("cpu") for hidden_state in hidden_states] - videos = [] - for hidden_state in hidden_states: - hidden_state = hidden_state.unsqueeze(0) - if tiled: - video = self.tiled_decode(hidden_state, device, tile_size, tile_stride) - else: - video = self.single_decode(hidden_state, device) - video = video.squeeze(0) - videos.append(video) - videos = torch.stack(videos) - return videos - - - @staticmethod - def state_dict_converter(): - return WanVideoVAEStateDictConverter() - - -class WanVideoVAEStateDictConverter: - - def __init__(self): - pass - - def from_civitai(self, state_dict): - state_dict_ = {} - if 'model_state' in state_dict: - state_dict = state_dict['model_state'] - for name in state_dict: - state_dict_['model.' + name] = state_dict[name] - return state_dict_ - - -class VideoVAE38_(VideoVAE_): - - def __init__(self, - dim=160, - z_dim=48, - dec_dim=256, - dim_mult=[1, 2, 4, 4], - num_res_blocks=2, - attn_scales=[], - temperal_downsample=[False, True, True], - dropout=0.0): - super(VideoVAE_, self).__init__() - self.dim = dim - self.z_dim = z_dim - self.dim_mult = dim_mult - self.num_res_blocks = num_res_blocks - self.attn_scales = attn_scales - self.temperal_downsample = temperal_downsample - self.temperal_upsample = temperal_downsample[::-1] - - # modules - self.encoder = Encoder3d_38(dim, z_dim * 2, dim_mult, num_res_blocks, - attn_scales, self.temperal_downsample, dropout) - self.conv1 = CausalConv3d(z_dim * 2, z_dim * 2, 1) - self.conv2 = CausalConv3d(z_dim, z_dim, 1) - self.decoder = Decoder3d_38(dec_dim, z_dim, dim_mult, num_res_blocks, - attn_scales, self.temperal_upsample, dropout) - - - def encode(self, x, scale): - self.clear_cache() - x = patchify(x, patch_size=2) - t = x.shape[2] - iter_ = 1 + (t - 1) // 4 - for i in range(iter_): - self._enc_conv_idx = [0] - if i == 0: - out = self.encoder(x[:, :, :1, :, :], - feat_cache=self._enc_feat_map, - feat_idx=self._enc_conv_idx) - else: - out_ = self.encoder(x[:, :, 1 + 4 * (i - 1):1 + 4 * i, :, :], - feat_cache=self._enc_feat_map, - feat_idx=self._enc_conv_idx) - out = torch.cat([out, out_], 2) - mu, log_var = self.conv1(out).chunk(2, dim=1) - if isinstance(scale[0], torch.Tensor): - scale = [s.to(dtype=mu.dtype, device=mu.device) for s in scale] - mu = (mu - scale[0].view(1, self.z_dim, 1, 1, 1)) * scale[1].view( - 1, self.z_dim, 1, 1, 1) - else: - scale = scale.to(dtype=mu.dtype, device=mu.device) - mu = (mu - scale[0]) * scale[1] - self.clear_cache() - return mu - - - def decode(self, z, scale): - self.clear_cache() - if isinstance(scale[0], torch.Tensor): - scale = [s.to(dtype=z.dtype, device=z.device) for s in scale] - z = z / scale[1].view(1, self.z_dim, 1, 1, 1) + scale[0].view( - 1, self.z_dim, 1, 1, 1) - else: - scale = scale.to(dtype=z.dtype, device=z.device) - z = z / scale[1] + scale[0] - iter_ = z.shape[2] - x = self.conv2(z) - for i in range(iter_): - self._conv_idx = [0] - if i == 0: - out = self.decoder(x[:, :, i:i + 1, :, :], - feat_cache=self._feat_map, - feat_idx=self._conv_idx, - first_chunk=True) - else: - out_ = self.decoder(x[:, :, i:i + 1, :, :], - feat_cache=self._feat_map, - feat_idx=self._conv_idx) - out = torch.cat([out, out_], 2) - out = unpatchify(out, patch_size=2) - self.clear_cache() - return out - - -class WanVideoVAE38(WanVideoVAE): - - def __init__(self, z_dim=48, dim=160): - super(WanVideoVAE, self).__init__() - - mean = [ - -0.2289, -0.0052, -0.1323, -0.2339, -0.2799, 0.0174, 0.1838, 0.1557, - -0.1382, 0.0542, 0.2813, 0.0891, 0.1570, -0.0098, 0.0375, -0.1825, - -0.2246, -0.1207, -0.0698, 0.5109, 0.2665, -0.2108, -0.2158, 0.2502, - -0.2055, -0.0322, 0.1109, 0.1567, -0.0729, 0.0899, -0.2799, -0.1230, - -0.0313, -0.1649, 0.0117, 0.0723, -0.2839, -0.2083, -0.0520, 0.3748, - 0.0152, 0.1957, 0.1433, -0.2944, 0.3573, -0.0548, -0.1681, -0.0667 - ] - std = [ - 0.4765, 1.0364, 0.4514, 1.1677, 0.5313, 0.4990, 0.4818, 0.5013, - 0.8158, 1.0344, 0.5894, 1.0901, 0.6885, 0.6165, 0.8454, 0.4978, - 0.5759, 0.3523, 0.7135, 0.6804, 0.5833, 1.4146, 0.8986, 0.5659, - 0.7069, 0.5338, 0.4889, 0.4917, 0.4069, 0.4999, 0.6866, 0.4093, - 0.5709, 0.6065, 0.6415, 0.4944, 0.5726, 1.2042, 0.5458, 1.6887, - 0.3971, 1.0600, 0.3943, 0.5537, 0.5444, 0.4089, 0.7468, 0.7744 - ] - self.mean = torch.tensor(mean) - self.std = torch.tensor(std) - self.scale = [self.mean, 1.0 / self.std] - - # init model - self.model = VideoVAE38_(z_dim=z_dim, dim=dim).eval().requires_grad_(False) - self.upsampling_factor = 16 - self.z_dim = z_dim diff --git a/diffsynth/models/wav2vec.py b/diffsynth/models/wav2vec.py deleted file mode 100644 index f98b721..0000000 --- a/diffsynth/models/wav2vec.py +++ /dev/null @@ -1,204 +0,0 @@ -import math -import numpy as np -import torch -import torch.nn.functional as F - - -def get_sample_indices(original_fps, total_frames, target_fps, num_sample, fixed_start=None): - required_duration = num_sample / target_fps - required_origin_frames = int(np.ceil(required_duration * original_fps)) - if required_duration > total_frames / original_fps: - raise ValueError("required_duration must be less than video length") - - if not fixed_start is None and fixed_start >= 0: - start_frame = fixed_start - else: - max_start = total_frames - required_origin_frames - if max_start < 0: - raise ValueError("video length is too short") - start_frame = np.random.randint(0, max_start + 1) - start_time = start_frame / original_fps - - end_time = start_time + required_duration - time_points = np.linspace(start_time, end_time, num_sample, endpoint=False) - - frame_indices = np.round(np.array(time_points) * original_fps).astype(int) - frame_indices = np.clip(frame_indices, 0, total_frames - 1) - return frame_indices - - -def linear_interpolation(features, input_fps, output_fps, output_len=None): - """ - features: shape=[1, T, 512] - input_fps: fps for audio, f_a - output_fps: fps for video, f_m - output_len: video length - """ - features = features.transpose(1, 2) - seq_len = features.shape[2] / float(input_fps) - if output_len is None: - output_len = int(seq_len * output_fps) - output_features = F.interpolate(features, size=output_len, align_corners=True, mode='linear') # [1, 512, output_len] - return output_features.transpose(1, 2) - - -class WanS2VAudioEncoder(torch.nn.Module): - - def __init__(self): - super().__init__() - from transformers import Wav2Vec2ForCTC, Wav2Vec2Config - config = { - "_name_or_path": "facebook/wav2vec2-large-xlsr-53", - "activation_dropout": 0.05, - "apply_spec_augment": True, - "architectures": ["Wav2Vec2ForCTC"], - "attention_dropout": 0.1, - "bos_token_id": 1, - "conv_bias": True, - "conv_dim": [512, 512, 512, 512, 512, 512, 512], - "conv_kernel": [10, 3, 3, 3, 3, 2, 2], - "conv_stride": [5, 2, 2, 2, 2, 2, 2], - "ctc_loss_reduction": "mean", - "ctc_zero_infinity": True, - "do_stable_layer_norm": True, - "eos_token_id": 2, - "feat_extract_activation": "gelu", - "feat_extract_dropout": 0.0, - "feat_extract_norm": "layer", - "feat_proj_dropout": 0.05, - "final_dropout": 0.0, - "hidden_act": "gelu", - "hidden_dropout": 0.05, - "hidden_size": 1024, - "initializer_range": 0.02, - "intermediate_size": 4096, - "layer_norm_eps": 1e-05, - "layerdrop": 0.05, - "mask_channel_length": 10, - "mask_channel_min_space": 1, - "mask_channel_other": 0.0, - "mask_channel_prob": 0.0, - "mask_channel_selection": "static", - "mask_feature_length": 10, - "mask_feature_prob": 0.0, - "mask_time_length": 10, - "mask_time_min_space": 1, - "mask_time_other": 0.0, - "mask_time_prob": 0.05, - "mask_time_selection": "static", - "model_type": "wav2vec2", - "num_attention_heads": 16, - "num_conv_pos_embedding_groups": 16, - "num_conv_pos_embeddings": 128, - "num_feat_extract_layers": 7, - "num_hidden_layers": 24, - "pad_token_id": 0, - "transformers_version": "4.7.0.dev0", - "vocab_size": 33 - } - self.model = Wav2Vec2ForCTC(Wav2Vec2Config(**config)) - self.video_rate = 30 - - def extract_audio_feat(self, input_audio, sample_rate, processor, return_all_layers=False, dtype=torch.float32, device='cpu'): - input_values = processor(input_audio, sampling_rate=sample_rate, return_tensors="pt").input_values.to(dtype=dtype, device=device) - - # retrieve logits & take argmax - res = self.model(input_values, output_hidden_states=True) - if return_all_layers: - feat = torch.cat(res.hidden_states) - else: - feat = res.hidden_states[-1] - feat = linear_interpolation(feat, input_fps=50, output_fps=self.video_rate) - return feat - - def get_audio_embed_bucket(self, audio_embed, stride=2, batch_frames=12, m=2): - num_layers, audio_frame_num, audio_dim = audio_embed.shape - - if num_layers > 1: - return_all_layers = True - else: - return_all_layers = False - - min_batch_num = int(audio_frame_num / (batch_frames * stride)) + 1 - - bucket_num = min_batch_num * batch_frames - batch_idx = [stride * i for i in range(bucket_num)] - batch_audio_eb = [] - for bi in batch_idx: - if bi < audio_frame_num: - audio_sample_stride = 2 - chosen_idx = list(range(bi - m * audio_sample_stride, bi + (m + 1) * audio_sample_stride, audio_sample_stride)) - chosen_idx = [0 if c < 0 else c for c in chosen_idx] - chosen_idx = [audio_frame_num - 1 if c >= audio_frame_num else c for c in chosen_idx] - - if return_all_layers: - frame_audio_embed = audio_embed[:, chosen_idx].flatten(start_dim=-2, end_dim=-1) - else: - frame_audio_embed = audio_embed[0][chosen_idx].flatten() - else: - frame_audio_embed = \ - torch.zeros([audio_dim * (2 * m + 1)], device=audio_embed.device) if not return_all_layers \ - else torch.zeros([num_layers, audio_dim * (2 * m + 1)], device=audio_embed.device) - batch_audio_eb.append(frame_audio_embed) - batch_audio_eb = torch.cat([c.unsqueeze(0) for c in batch_audio_eb], dim=0) - - return batch_audio_eb, min_batch_num - - def get_audio_embed_bucket_fps(self, audio_embed, fps=16, batch_frames=81, m=0): - num_layers, audio_frame_num, audio_dim = audio_embed.shape - - if num_layers > 1: - return_all_layers = True - else: - return_all_layers = False - - scale = self.video_rate / fps - - min_batch_num = int(audio_frame_num / (batch_frames * scale)) + 1 - - bucket_num = min_batch_num * batch_frames - padd_audio_num = math.ceil(min_batch_num * batch_frames / fps * self.video_rate) - audio_frame_num - batch_idx = get_sample_indices( - original_fps=self.video_rate, total_frames=audio_frame_num + padd_audio_num, target_fps=fps, num_sample=bucket_num, fixed_start=0 - ) - batch_audio_eb = [] - audio_sample_stride = int(self.video_rate / fps) - for bi in batch_idx: - if bi < audio_frame_num: - - chosen_idx = list(range(bi - m * audio_sample_stride, bi + (m + 1) * audio_sample_stride, audio_sample_stride)) - chosen_idx = [0 if c < 0 else c for c in chosen_idx] - chosen_idx = [audio_frame_num - 1 if c >= audio_frame_num else c for c in chosen_idx] - - if return_all_layers: - frame_audio_embed = audio_embed[:, chosen_idx].flatten(start_dim=-2, end_dim=-1) - else: - frame_audio_embed = audio_embed[0][chosen_idx].flatten() - else: - frame_audio_embed = \ - torch.zeros([audio_dim * (2 * m + 1)], device=audio_embed.device) if not return_all_layers \ - else torch.zeros([num_layers, audio_dim * (2 * m + 1)], device=audio_embed.device) - batch_audio_eb.append(frame_audio_embed) - batch_audio_eb = torch.cat([c.unsqueeze(0) for c in batch_audio_eb], dim=0) - - return batch_audio_eb, min_batch_num - - def get_audio_feats_per_inference(self, input_audio, sample_rate, processor, fps=16, batch_frames=80, m=0, dtype=torch.float32, device='cpu'): - audio_feat = self.extract_audio_feat(input_audio, sample_rate, processor, return_all_layers=True, dtype=dtype, device=device) - audio_embed_bucket, min_batch_num = self.get_audio_embed_bucket_fps(audio_feat, fps=fps, batch_frames=batch_frames, m=m) - audio_embed_bucket = audio_embed_bucket.unsqueeze(0).permute(0, 2, 3, 1).to(device, dtype) - audio_embeds = [audio_embed_bucket[..., i * batch_frames:(i + 1) * batch_frames] for i in range(min_batch_num)] - return audio_embeds - - @staticmethod - def state_dict_converter(): - return WanS2VAudioEncoderStateDictConverter() - - -class WanS2VAudioEncoderStateDictConverter(): - def __init__(self): - pass - - def from_civitai(self, state_dict): - state_dict = {'model.' + k: v for k, v in state_dict.items()} - return state_dict diff --git a/diffsynth/pipelines/__init__.py b/diffsynth/pipelines/__init__.py deleted file mode 100644 index e2ad551..0000000 --- a/diffsynth/pipelines/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -from .sd_image import SDImagePipeline -from .sd_video import SDVideoPipeline -from .sdxl_image import SDXLImagePipeline -from .sdxl_video import SDXLVideoPipeline -from .sd3_image import SD3ImagePipeline -from .hunyuan_image import HunyuanDiTImagePipeline -from .svd_video import SVDVideoPipeline -from .flux_image import FluxImagePipeline -from .cog_video import CogVideoPipeline -from .omnigen_image import OmnigenImagePipeline -from .pipeline_runner import SDVideoPipelineRunner -from .hunyuan_video import HunyuanVideoPipeline -from .step_video import StepVideoPipeline -from .wan_video import WanVideoPipeline -KolorsImagePipeline = SDXLImagePipeline diff --git a/diffsynth/pipelines/base.py b/diffsynth/pipelines/base.py deleted file mode 100644 index 2a4f01c..0000000 --- a/diffsynth/pipelines/base.py +++ /dev/null @@ -1,127 +0,0 @@ -import torch -import numpy as np -from PIL import Image -from torchvision.transforms import GaussianBlur - - - -class BasePipeline(torch.nn.Module): - - def __init__(self, device="cuda", torch_dtype=torch.float16, height_division_factor=64, width_division_factor=64): - super().__init__() - self.device = device - self.torch_dtype = torch_dtype - self.height_division_factor = height_division_factor - self.width_division_factor = width_division_factor - self.cpu_offload = False - self.model_names = [] - - - def check_resize_height_width(self, height, width): - if height % self.height_division_factor != 0: - height = (height + self.height_division_factor - 1) // self.height_division_factor * self.height_division_factor - print(f"The height cannot be evenly divided by {self.height_division_factor}. We round it up to {height}.") - if width % self.width_division_factor != 0: - width = (width + self.width_division_factor - 1) // self.width_division_factor * self.width_division_factor - print(f"The width cannot be evenly divided by {self.width_division_factor}. We round it up to {width}.") - return height, width - - - def preprocess_image(self, image): - image = torch.Tensor(np.array(image, dtype=np.float32) * (2 / 255) - 1).permute(2, 0, 1).unsqueeze(0) - return image - - - def preprocess_images(self, images): - return [self.preprocess_image(image) for image in images] - - - def vae_output_to_image(self, vae_output): - image = vae_output[0].cpu().float().permute(1, 2, 0).numpy() - image = Image.fromarray(((image / 2 + 0.5).clip(0, 1) * 255).astype("uint8")) - return image - - - def vae_output_to_video(self, vae_output): - video = vae_output.cpu().permute(1, 2, 0).numpy() - video = [Image.fromarray(((image / 2 + 0.5).clip(0, 1) * 255).astype("uint8")) for image in video] - return video - - - def merge_latents(self, value, latents, masks, scales, blur_kernel_size=33, blur_sigma=10.0): - if len(latents) > 0: - blur = GaussianBlur(kernel_size=blur_kernel_size, sigma=blur_sigma) - height, width = value.shape[-2:] - weight = torch.ones_like(value) - for latent, mask, scale in zip(latents, masks, scales): - mask = self.preprocess_image(mask.resize((width, height))).mean(dim=1, keepdim=True) > 0 - mask = mask.repeat(1, latent.shape[1], 1, 1).to(dtype=latent.dtype, device=latent.device) - mask = blur(mask) - value += latent * mask * scale - weight += mask * scale - value /= weight - return value - - - def control_noise_via_local_prompts(self, prompt_emb_global, prompt_emb_locals, masks, mask_scales, inference_callback, special_kwargs=None, special_local_kwargs_list=None): - if special_kwargs is None: - noise_pred_global = inference_callback(prompt_emb_global) - else: - noise_pred_global = inference_callback(prompt_emb_global, special_kwargs) - if special_local_kwargs_list is None: - noise_pred_locals = [inference_callback(prompt_emb_local) for prompt_emb_local in prompt_emb_locals] - else: - noise_pred_locals = [inference_callback(prompt_emb_local, special_kwargs) for prompt_emb_local, special_kwargs in zip(prompt_emb_locals, special_local_kwargs_list)] - noise_pred = self.merge_latents(noise_pred_global, noise_pred_locals, masks, mask_scales) - return noise_pred - - - def extend_prompt(self, prompt, local_prompts, masks, mask_scales): - local_prompts = local_prompts or [] - masks = masks or [] - mask_scales = mask_scales or [] - extended_prompt_dict = self.prompter.extend_prompt(prompt) - prompt = extended_prompt_dict.get("prompt", prompt) - local_prompts += extended_prompt_dict.get("prompts", []) - masks += extended_prompt_dict.get("masks", []) - mask_scales += [100.0] * len(extended_prompt_dict.get("masks", [])) - return prompt, local_prompts, masks, mask_scales - - - def enable_cpu_offload(self): - self.cpu_offload = True - - - def load_models_to_device(self, loadmodel_names=[]): - # only load models to device if cpu_offload is enabled - if not self.cpu_offload: - return - # offload the unneeded models to cpu - for model_name in self.model_names: - if model_name not in loadmodel_names: - model = getattr(self, model_name) - if model is not None: - if hasattr(model, "vram_management_enabled") and model.vram_management_enabled: - for module in model.modules(): - if hasattr(module, "offload"): - module.offload() - else: - model.cpu() - # load the needed models to device - for model_name in loadmodel_names: - model = getattr(self, model_name) - if model is not None: - if hasattr(model, "vram_management_enabled") and model.vram_management_enabled: - for module in model.modules(): - if hasattr(module, "onload"): - module.onload() - else: - model.to(self.device) - # fresh the cuda cache - torch.cuda.empty_cache() - - - def generate_noise(self, shape, seed=None, device="cpu", dtype=torch.float16): - generator = None if seed is None else torch.Generator(device).manual_seed(seed) - noise = torch.randn(shape, generator=generator, device=device, dtype=dtype) - return noise diff --git a/diffsynth/pipelines/cog_video.py b/diffsynth/pipelines/cog_video.py deleted file mode 100644 index f42d295..0000000 --- a/diffsynth/pipelines/cog_video.py +++ /dev/null @@ -1,135 +0,0 @@ -from ..models import ModelManager, FluxTextEncoder2, CogDiT, CogVAEEncoder, CogVAEDecoder -from ..prompters import CogPrompter -from ..schedulers import EnhancedDDIMScheduler -from .base import BasePipeline -import torch -from tqdm import tqdm -from PIL import Image -import numpy as np -from einops import rearrange - - - -class CogVideoPipeline(BasePipeline): - - def __init__(self, device="cuda", torch_dtype=torch.float16): - super().__init__(device=device, torch_dtype=torch_dtype, height_division_factor=16, width_division_factor=16) - self.scheduler = EnhancedDDIMScheduler(rescale_zero_terminal_snr=True, prediction_type="v_prediction") - self.prompter = CogPrompter() - # models - self.text_encoder: FluxTextEncoder2 = None - self.dit: CogDiT = None - self.vae_encoder: CogVAEEncoder = None - self.vae_decoder: CogVAEDecoder = None - - - def fetch_models(self, model_manager: ModelManager, prompt_refiner_classes=[]): - self.text_encoder = model_manager.fetch_model("flux_text_encoder_2") - self.dit = model_manager.fetch_model("cog_dit") - self.vae_encoder = model_manager.fetch_model("cog_vae_encoder") - self.vae_decoder = model_manager.fetch_model("cog_vae_decoder") - self.prompter.fetch_models(self.text_encoder) - self.prompter.load_prompt_refiners(model_manager, prompt_refiner_classes) - - - @staticmethod - def from_model_manager(model_manager: ModelManager, prompt_refiner_classes=[]): - pipe = CogVideoPipeline( - device=model_manager.device, - torch_dtype=model_manager.torch_dtype - ) - pipe.fetch_models(model_manager, prompt_refiner_classes) - return pipe - - - def tensor2video(self, frames): - frames = rearrange(frames, "C T H W -> T H W C") - frames = ((frames.float() + 1) * 127.5).clip(0, 255).cpu().numpy().astype(np.uint8) - frames = [Image.fromarray(frame) for frame in frames] - return frames - - - def encode_prompt(self, prompt, positive=True): - prompt_emb = self.prompter.encode_prompt(prompt, device=self.device, positive=positive) - return {"prompt_emb": prompt_emb} - - - def prepare_extra_input(self, latents): - return {"image_rotary_emb": self.dit.prepare_rotary_positional_embeddings(latents.shape[3], latents.shape[4], latents.shape[2], device=self.device)} - - - @torch.no_grad() - def __call__( - self, - prompt, - negative_prompt="", - input_video=None, - cfg_scale=7.0, - denoising_strength=1.0, - num_frames=49, - height=480, - width=720, - num_inference_steps=20, - tiled=False, - tile_size=(60, 90), - tile_stride=(30, 45), - seed=None, - progress_bar_cmd=tqdm, - progress_bar_st=None, - ): - height, width = self.check_resize_height_width(height, width) - - # Tiler parameters - tiler_kwargs = {"tiled": tiled, "tile_size": tile_size, "tile_stride": tile_stride} - - # Prepare scheduler - self.scheduler.set_timesteps(num_inference_steps, denoising_strength=denoising_strength) - - # Prepare latent tensors - noise = self.generate_noise((1, 16, num_frames // 4 + 1, height//8, width//8), seed=seed, device="cpu", dtype=self.torch_dtype) - - if denoising_strength == 1.0: - latents = noise.clone() - else: - input_video = self.preprocess_images(input_video) - input_video = torch.stack(input_video, dim=2) - latents = self.vae_encoder.encode_video(input_video, **tiler_kwargs, progress_bar=progress_bar_cmd).to(dtype=self.torch_dtype) - latents = self.scheduler.add_noise(latents, noise, self.scheduler.timesteps[0]) - if not tiled: latents = latents.to(self.device) - - # Encode prompt - prompt_emb_posi = self.encode_prompt(prompt, positive=True) - if cfg_scale != 1.0: - prompt_emb_nega = self.encode_prompt(negative_prompt, positive=False) - - # Extra input - extra_input = self.prepare_extra_input(latents) - - # Denoise - for progress_id, timestep in enumerate(progress_bar_cmd(self.scheduler.timesteps)): - timestep = timestep.unsqueeze(0).to(self.device) - - # Classifier-free guidance - noise_pred_posi = self.dit( - latents, timestep=timestep, **prompt_emb_posi, **tiler_kwargs, **extra_input - ) - if cfg_scale != 1.0: - noise_pred_nega = self.dit( - latents, timestep=timestep, **prompt_emb_nega, **tiler_kwargs, **extra_input - ) - noise_pred = noise_pred_nega + cfg_scale * (noise_pred_posi - noise_pred_nega) - else: - noise_pred = noise_pred_posi - - # DDIM - latents = self.scheduler.step(noise_pred, self.scheduler.timesteps[progress_id], latents) - - # Update progress bar - if progress_bar_st is not None: - progress_bar_st.progress(progress_id / len(self.scheduler.timesteps)) - - # Decode image - video = self.vae_decoder.decode_video(latents.to("cpu"), **tiler_kwargs, progress_bar=progress_bar_cmd) - video = self.tensor2video(video[0]) - - return video diff --git a/diffsynth/pipelines/dancer.py b/diffsynth/pipelines/dancer.py deleted file mode 100644 index 593b57c..0000000 --- a/diffsynth/pipelines/dancer.py +++ /dev/null @@ -1,236 +0,0 @@ -import torch -from ..models import SDUNet, SDMotionModel, SDXLUNet, SDXLMotionModel -from ..models.sd_unet import PushBlock, PopBlock -from ..controlnets import MultiControlNetManager - - -def lets_dance( - unet: SDUNet, - motion_modules: SDMotionModel = None, - controlnet: MultiControlNetManager = None, - sample = None, - timestep = None, - encoder_hidden_states = None, - ipadapter_kwargs_list = {}, - controlnet_frames = None, - unet_batch_size = 1, - controlnet_batch_size = 1, - cross_frame_attention = False, - tiled=False, - tile_size=64, - tile_stride=32, - device = "cuda", - vram_limit_level = 0, -): - # 0. Text embedding alignment (only for video processing) - if encoder_hidden_states.shape[0] != sample.shape[0]: - encoder_hidden_states = encoder_hidden_states.repeat(sample.shape[0], 1, 1, 1) - - # 1. ControlNet - # This part will be repeated on overlapping frames if animatediff_batch_size > animatediff_stride. - # I leave it here because I intend to do something interesting on the ControlNets. - controlnet_insert_block_id = 30 - if controlnet is not None and controlnet_frames is not None: - res_stacks = [] - # process controlnet frames with batch - for batch_id in range(0, sample.shape[0], controlnet_batch_size): - batch_id_ = min(batch_id + controlnet_batch_size, sample.shape[0]) - res_stack = controlnet( - sample[batch_id: batch_id_], - timestep, - encoder_hidden_states[batch_id: batch_id_], - controlnet_frames[:, batch_id: batch_id_], - tiled=tiled, tile_size=tile_size, tile_stride=tile_stride - ) - if vram_limit_level >= 1: - res_stack = [res.cpu() for res in res_stack] - res_stacks.append(res_stack) - # concat the residual - additional_res_stack = [] - for i in range(len(res_stacks[0])): - res = torch.concat([res_stack[i] for res_stack in res_stacks], dim=0) - additional_res_stack.append(res) - else: - additional_res_stack = None - - # 2. time - time_emb = unet.time_proj(timestep).to(sample.dtype) - time_emb = unet.time_embedding(time_emb) - - # 3. pre-process - height, width = sample.shape[2], sample.shape[3] - hidden_states = unet.conv_in(sample) - text_emb = encoder_hidden_states - res_stack = [hidden_states.cpu() if vram_limit_level>=1 else hidden_states] - - # 4. blocks - for block_id, block in enumerate(unet.blocks): - # 4.1 UNet - if isinstance(block, PushBlock): - hidden_states, time_emb, text_emb, res_stack = block(hidden_states, time_emb, text_emb, res_stack) - if vram_limit_level>=1: - res_stack[-1] = res_stack[-1].cpu() - elif isinstance(block, PopBlock): - if vram_limit_level>=1: - res_stack[-1] = res_stack[-1].to(device) - hidden_states, time_emb, text_emb, res_stack = block(hidden_states, time_emb, text_emb, res_stack) - else: - hidden_states_input = hidden_states - hidden_states_output = [] - for batch_id in range(0, sample.shape[0], unet_batch_size): - batch_id_ = min(batch_id + unet_batch_size, sample.shape[0]) - hidden_states, _, _, _ = block( - hidden_states_input[batch_id: batch_id_], - time_emb, - text_emb[batch_id: batch_id_], - res_stack, - cross_frame_attention=cross_frame_attention, - ipadapter_kwargs_list=ipadapter_kwargs_list.get(block_id, {}), - tiled=tiled, tile_size=tile_size, tile_stride=tile_stride - ) - hidden_states_output.append(hidden_states) - hidden_states = torch.concat(hidden_states_output, dim=0) - # 4.2 AnimateDiff - if motion_modules is not None: - if block_id in motion_modules.call_block_id: - motion_module_id = motion_modules.call_block_id[block_id] - hidden_states, time_emb, text_emb, res_stack = motion_modules.motion_modules[motion_module_id]( - hidden_states, time_emb, text_emb, res_stack, - batch_size=1 - ) - # 4.3 ControlNet - if block_id == controlnet_insert_block_id and additional_res_stack is not None: - hidden_states += additional_res_stack.pop().to(device) - if vram_limit_level>=1: - res_stack = [(res.to(device) + additional_res.to(device)).cpu() for res, additional_res in zip(res_stack, additional_res_stack)] - else: - res_stack = [res + additional_res for res, additional_res in zip(res_stack, additional_res_stack)] - - # 5. output - hidden_states = unet.conv_norm_out(hidden_states) - hidden_states = unet.conv_act(hidden_states) - hidden_states = unet.conv_out(hidden_states) - - return hidden_states - - - - -def lets_dance_xl( - unet: SDXLUNet, - motion_modules: SDXLMotionModel = None, - controlnet: MultiControlNetManager = None, - sample = None, - add_time_id = None, - add_text_embeds = None, - timestep = None, - encoder_hidden_states = None, - ipadapter_kwargs_list = {}, - controlnet_frames = None, - unet_batch_size = 1, - controlnet_batch_size = 1, - cross_frame_attention = False, - tiled=False, - tile_size=64, - tile_stride=32, - device = "cuda", - vram_limit_level = 0, -): - # 0. Text embedding alignment (only for video processing) - if encoder_hidden_states.shape[0] != sample.shape[0]: - encoder_hidden_states = encoder_hidden_states.repeat(sample.shape[0], 1, 1, 1) - if add_text_embeds.shape[0] != sample.shape[0]: - add_text_embeds = add_text_embeds.repeat(sample.shape[0], 1) - - # 1. ControlNet - controlnet_insert_block_id = 22 - if controlnet is not None and controlnet_frames is not None: - res_stacks = [] - # process controlnet frames with batch - for batch_id in range(0, sample.shape[0], controlnet_batch_size): - batch_id_ = min(batch_id + controlnet_batch_size, sample.shape[0]) - res_stack = controlnet( - sample[batch_id: batch_id_], - timestep, - encoder_hidden_states[batch_id: batch_id_], - controlnet_frames[:, batch_id: batch_id_], - add_time_id=add_time_id, - add_text_embeds=add_text_embeds, - tiled=tiled, tile_size=tile_size, tile_stride=tile_stride, - unet=unet, # for Kolors, some modules in ControlNets will be replaced. - ) - if vram_limit_level >= 1: - res_stack = [res.cpu() for res in res_stack] - res_stacks.append(res_stack) - # concat the residual - additional_res_stack = [] - for i in range(len(res_stacks[0])): - res = torch.concat([res_stack[i] for res_stack in res_stacks], dim=0) - additional_res_stack.append(res) - else: - additional_res_stack = None - - # 2. time - t_emb = unet.time_proj(timestep).to(sample.dtype) - t_emb = unet.time_embedding(t_emb) - - time_embeds = unet.add_time_proj(add_time_id) - time_embeds = time_embeds.reshape((add_text_embeds.shape[0], -1)) - add_embeds = torch.concat([add_text_embeds, time_embeds], dim=-1) - add_embeds = add_embeds.to(sample.dtype) - add_embeds = unet.add_time_embedding(add_embeds) - - time_emb = t_emb + add_embeds - - # 3. pre-process - height, width = sample.shape[2], sample.shape[3] - hidden_states = unet.conv_in(sample) - text_emb = encoder_hidden_states if unet.text_intermediate_proj is None else unet.text_intermediate_proj(encoder_hidden_states) - res_stack = [hidden_states] - - # 4. blocks - for block_id, block in enumerate(unet.blocks): - # 4.1 UNet - if isinstance(block, PushBlock): - hidden_states, time_emb, text_emb, res_stack = block(hidden_states, time_emb, text_emb, res_stack) - if vram_limit_level>=1: - res_stack[-1] = res_stack[-1].cpu() - elif isinstance(block, PopBlock): - if vram_limit_level>=1: - res_stack[-1] = res_stack[-1].to(device) - hidden_states, time_emb, text_emb, res_stack = block(hidden_states, time_emb, text_emb, res_stack) - else: - hidden_states_input = hidden_states - hidden_states_output = [] - for batch_id in range(0, sample.shape[0], unet_batch_size): - batch_id_ = min(batch_id + unet_batch_size, sample.shape[0]) - hidden_states, _, _, _ = block( - hidden_states_input[batch_id: batch_id_], - time_emb[batch_id: batch_id_], - text_emb[batch_id: batch_id_], - res_stack, - cross_frame_attention=cross_frame_attention, - ipadapter_kwargs_list=ipadapter_kwargs_list.get(block_id, {}), - tiled=tiled, tile_size=tile_size, tile_stride=tile_stride, - ) - hidden_states_output.append(hidden_states) - hidden_states = torch.concat(hidden_states_output, dim=0) - # 4.2 AnimateDiff - if motion_modules is not None: - if block_id in motion_modules.call_block_id: - motion_module_id = motion_modules.call_block_id[block_id] - hidden_states, time_emb, text_emb, res_stack = motion_modules.motion_modules[motion_module_id]( - hidden_states, time_emb, text_emb, res_stack, - batch_size=1 - ) - # 4.3 ControlNet - if block_id == controlnet_insert_block_id and additional_res_stack is not None: - hidden_states += additional_res_stack.pop().to(device) - res_stack = [res + additional_res for res, additional_res in zip(res_stack, additional_res_stack)] - - # 5. output - hidden_states = unet.conv_norm_out(hidden_states) - hidden_states = unet.conv_act(hidden_states) - hidden_states = unet.conv_out(hidden_states) - - return hidden_states \ No newline at end of file diff --git a/diffsynth/pipelines/flux_image.py b/diffsynth/pipelines/flux_image.py deleted file mode 100644 index 55a84c0..0000000 --- a/diffsynth/pipelines/flux_image.py +++ /dev/null @@ -1,823 +0,0 @@ -from ..models import ModelManager, FluxDiT, SD3TextEncoder1, FluxTextEncoder2, FluxVAEDecoder, FluxVAEEncoder, FluxIpAdapter -from ..models.step1x_connector import Qwen2Connector -from ..controlnets import FluxMultiControlNetManager, ControlNetUnit, ControlNetConfigUnit, Annotator -from ..prompters import FluxPrompter -from ..schedulers import FlowMatchScheduler -from .base import BasePipeline -from typing import List -import torch -from tqdm import tqdm -import numpy as np -from PIL import Image -from ..models.tiler import FastTileWorker -from transformers import SiglipVisionModel -from copy import deepcopy -from transformers.models.t5.modeling_t5 import T5LayerNorm, T5DenseActDense, T5DenseGatedActDense -from ..models.flux_dit import RMSNorm -from ..vram_management import enable_vram_management, AutoWrappedModule, AutoWrappedLinear - - -class FluxImagePipeline(BasePipeline): - - def __init__(self, device="cuda", torch_dtype=torch.float16): - super().__init__(device=device, torch_dtype=torch_dtype, height_division_factor=16, width_division_factor=16) - self.scheduler = FlowMatchScheduler() - self.prompter = FluxPrompter() - # models - self.text_encoder_1: SD3TextEncoder1 = None - self.text_encoder_2: FluxTextEncoder2 = None - self.dit: FluxDiT = None - self.vae_decoder: FluxVAEDecoder = None - self.vae_encoder: FluxVAEEncoder = None - self.controlnet: FluxMultiControlNetManager = None - self.ipadapter: FluxIpAdapter = None - self.ipadapter_image_encoder: SiglipVisionModel = None - self.infinityou_processor: InfinitYou = None - self.qwenvl = None - self.step1x_connector: Qwen2Connector = None - self.model_names = ['text_encoder_1', 'text_encoder_2', 'dit', 'vae_decoder', 'vae_encoder', 'controlnet', 'ipadapter', 'ipadapter_image_encoder', 'qwenvl', 'step1x_connector'] - - - def enable_vram_management(self, num_persistent_param_in_dit=None): - if self.text_encoder_1 is not None: - dtype = next(iter(self.text_encoder_1.parameters())).dtype - enable_vram_management( - self.text_encoder_1, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Embedding: AutoWrappedModule, - torch.nn.LayerNorm: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - ) - if self.text_encoder_2 is not None: - dtype = next(iter(self.text_encoder_2.parameters())).dtype - enable_vram_management( - self.text_encoder_2, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Embedding: AutoWrappedModule, - T5LayerNorm: AutoWrappedModule, - T5DenseActDense: AutoWrappedModule, - T5DenseGatedActDense: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - ) - if self.dit is not None: - dtype = next(iter(self.dit.parameters())).dtype - enable_vram_management( - self.dit, - module_map = { - RMSNorm: AutoWrappedModule, - torch.nn.Linear: AutoWrappedLinear, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cuda", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - max_num_param=num_persistent_param_in_dit, - overflow_module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - ) - if self.vae_decoder is not None: - dtype = next(iter(self.vae_decoder.parameters())).dtype - enable_vram_management( - self.vae_decoder, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Conv2d: AutoWrappedModule, - torch.nn.GroupNorm: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - ) - if self.vae_encoder is not None: - dtype = next(iter(self.vae_encoder.parameters())).dtype - enable_vram_management( - self.vae_encoder, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Conv2d: AutoWrappedModule, - torch.nn.GroupNorm: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - ) - self.enable_cpu_offload() - - - def denoising_model(self): - return self.dit - - - def fetch_models(self, model_manager: ModelManager, controlnet_config_units: List[ControlNetConfigUnit]=[], prompt_refiner_classes=[], prompt_extender_classes=[]): - self.text_encoder_1 = model_manager.fetch_model("sd3_text_encoder_1") - self.text_encoder_2 = model_manager.fetch_model("flux_text_encoder_2") - self.dit = model_manager.fetch_model("flux_dit") - self.vae_decoder = model_manager.fetch_model("flux_vae_decoder") - self.vae_encoder = model_manager.fetch_model("flux_vae_encoder") - self.prompter.fetch_models(self.text_encoder_1, self.text_encoder_2) - self.prompter.load_prompt_refiners(model_manager, prompt_refiner_classes) - self.prompter.load_prompt_extenders(model_manager, prompt_extender_classes) - - # ControlNets - controlnet_units = [] - for config in controlnet_config_units: - controlnet_unit = ControlNetUnit( - Annotator(config.processor_id, device=self.device, skip_processor=config.skip_processor), - model_manager.fetch_model("flux_controlnet", config.model_path), - config.scale - ) - controlnet_units.append(controlnet_unit) - self.controlnet = FluxMultiControlNetManager(controlnet_units) - - # IP-Adapters - self.ipadapter = model_manager.fetch_model("flux_ipadapter") - self.ipadapter_image_encoder = model_manager.fetch_model("siglip_vision_model") - - # InfiniteYou - self.image_proj_model = model_manager.fetch_model("infiniteyou_image_projector") - if self.image_proj_model is not None: - self.infinityou_processor = InfinitYou(device=self.device) - - # Step1x - self.qwenvl = model_manager.fetch_model("qwenvl") - self.step1x_connector = model_manager.fetch_model("step1x_connector") - - - @staticmethod - def from_model_manager(model_manager: ModelManager, controlnet_config_units: List[ControlNetConfigUnit]=[], prompt_refiner_classes=[], prompt_extender_classes=[], device=None, torch_dtype=None): - pipe = FluxImagePipeline( - device=model_manager.device if device is None else device, - torch_dtype=model_manager.torch_dtype if torch_dtype is None else torch_dtype, - ) - pipe.fetch_models(model_manager, controlnet_config_units, prompt_refiner_classes, prompt_extender_classes) - return pipe - - - def encode_image(self, image, tiled=False, tile_size=64, tile_stride=32): - latents = self.vae_encoder(image, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - return latents - - - def decode_image(self, latent, tiled=False, tile_size=64, tile_stride=32): - image = self.vae_decoder(latent.to(self.device), tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - image = self.vae_output_to_image(image) - return image - - - def encode_prompt(self, prompt, positive=True, t5_sequence_length=512): - if self.text_encoder_1 is not None and self.text_encoder_2 is not None: - prompt_emb, pooled_prompt_emb, text_ids = self.prompter.encode_prompt( - prompt, device=self.device, positive=positive, t5_sequence_length=t5_sequence_length - ) - return {"prompt_emb": prompt_emb, "pooled_prompt_emb": pooled_prompt_emb, "text_ids": text_ids} - else: - return {} - - - def prepare_extra_input(self, latents=None, guidance=1.0): - latent_image_ids = self.dit.prepare_image_ids(latents) - guidance = torch.Tensor([guidance] * latents.shape[0]).to(device=latents.device, dtype=latents.dtype) - return {"image_ids": latent_image_ids, "guidance": guidance} - - - def apply_controlnet_mask_on_latents(self, latents, mask): - mask = (self.preprocess_image(mask) + 1) / 2 - mask = mask.mean(dim=1, keepdim=True) - mask = mask.to(dtype=self.torch_dtype, device=self.device) - mask = 1 - torch.nn.functional.interpolate(mask, size=latents.shape[-2:]) - latents = torch.concat([latents, mask], dim=1) - return latents - - - def apply_controlnet_mask_on_image(self, image, mask): - mask = mask.resize(image.size) - mask = self.preprocess_image(mask).mean(dim=[0, 1]) - image = np.array(image) - image[mask > 0] = 0 - image = Image.fromarray(image) - return image - - - def prepare_controlnet_input(self, controlnet_image, controlnet_inpaint_mask, tiler_kwargs): - if isinstance(controlnet_image, Image.Image): - controlnet_image = [controlnet_image] * len(self.controlnet.processors) - - controlnet_frames = [] - for i in range(len(self.controlnet.processors)): - # image annotator - image = self.controlnet.process_image(controlnet_image[i], processor_id=i)[0] - if controlnet_inpaint_mask is not None and self.controlnet.processors[i].processor_id == "inpaint": - image = self.apply_controlnet_mask_on_image(image, controlnet_inpaint_mask) - - # image to tensor - image = self.preprocess_image(image).to(device=self.device, dtype=self.torch_dtype) - - # vae encoder - image = self.encode_image(image, **tiler_kwargs) - if controlnet_inpaint_mask is not None and self.controlnet.processors[i].processor_id == "inpaint": - image = self.apply_controlnet_mask_on_latents(image, controlnet_inpaint_mask) - - # store it - controlnet_frames.append(image) - return controlnet_frames - - - def prepare_ipadapter_inputs(self, images, height=384, width=384): - images = [image.convert("RGB").resize((width, height), resample=3) for image in images] - images = [self.preprocess_image(image).to(device=self.device, dtype=self.torch_dtype) for image in images] - return torch.cat(images, dim=0) - - - def inpaint_fusion(self, latents, inpaint_latents, pred_noise, fg_mask, bg_mask, progress_id, background_weight=0.): - # inpaint noise - inpaint_noise = (latents - inpaint_latents) / self.scheduler.sigmas[progress_id] - # merge noise - weight = torch.ones_like(inpaint_noise) - inpaint_noise[fg_mask] = pred_noise[fg_mask] - inpaint_noise[bg_mask] += pred_noise[bg_mask] * background_weight - weight[bg_mask] += background_weight - inpaint_noise /= weight - return inpaint_noise - - - def preprocess_masks(self, masks, height, width, dim): - out_masks = [] - for mask in masks: - mask = self.preprocess_image(mask.resize((width, height), resample=Image.NEAREST)).mean(dim=1, keepdim=True) > 0 - mask = mask.repeat(1, dim, 1, 1).to(device=self.device, dtype=self.torch_dtype) - out_masks.append(mask) - return out_masks - - - def prepare_entity_inputs(self, entity_prompts, entity_masks, width, height, t5_sequence_length=512, enable_eligen_inpaint=False): - fg_mask, bg_mask = None, None - if enable_eligen_inpaint: - masks_ = deepcopy(entity_masks) - fg_masks = torch.cat([self.preprocess_image(mask.resize((width//8, height//8))).mean(dim=1, keepdim=True) for mask in masks_]) - fg_masks = (fg_masks > 0).float() - fg_mask = fg_masks.sum(dim=0, keepdim=True).repeat(1, 16, 1, 1) > 0 - bg_mask = ~fg_mask - entity_masks = self.preprocess_masks(entity_masks, height//8, width//8, 1) - entity_masks = torch.cat(entity_masks, dim=0).unsqueeze(0) # b, n_mask, c, h, w - entity_prompts = self.encode_prompt(entity_prompts, t5_sequence_length=t5_sequence_length)['prompt_emb'].unsqueeze(0) - return entity_prompts, entity_masks, fg_mask, bg_mask - - - def prepare_latents(self, input_image, height, width, seed, tiled, tile_size, tile_stride): - if input_image is not None: - self.load_models_to_device(['vae_encoder']) - image = self.preprocess_image(input_image).to(device=self.device, dtype=self.torch_dtype) - input_latents = self.encode_image(image, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - noise = self.generate_noise((1, 16, height//8, width//8), seed=seed, device=self.device, dtype=self.torch_dtype) - latents = self.scheduler.add_noise(input_latents, noise, timestep=self.scheduler.timesteps[0]) - else: - latents = self.generate_noise((1, 16, height//8, width//8), seed=seed, device=self.device, dtype=self.torch_dtype) - input_latents = None - return latents, input_latents - - - def prepare_ipadapter(self, ipadapter_images, ipadapter_scale): - if ipadapter_images is not None: - self.load_models_to_device(['ipadapter_image_encoder']) - ipadapter_images = self.prepare_ipadapter_inputs(ipadapter_images) - ipadapter_image_encoding = self.ipadapter_image_encoder(ipadapter_images).pooler_output - self.load_models_to_device(['ipadapter']) - ipadapter_kwargs_list_posi = {"ipadapter_kwargs_list": self.ipadapter(ipadapter_image_encoding, scale=ipadapter_scale)} - ipadapter_kwargs_list_nega = {"ipadapter_kwargs_list": self.ipadapter(torch.zeros_like(ipadapter_image_encoding))} - else: - ipadapter_kwargs_list_posi, ipadapter_kwargs_list_nega = {"ipadapter_kwargs_list": {}}, {"ipadapter_kwargs_list": {}} - return ipadapter_kwargs_list_posi, ipadapter_kwargs_list_nega - - - def prepare_controlnet(self, controlnet_image, masks, controlnet_inpaint_mask, tiler_kwargs, enable_controlnet_on_negative): - if controlnet_image is not None: - self.load_models_to_device(['vae_encoder']) - controlnet_kwargs_posi = {"controlnet_frames": self.prepare_controlnet_input(controlnet_image, controlnet_inpaint_mask, tiler_kwargs)} - if len(masks) > 0 and controlnet_inpaint_mask is not None: - print("The controlnet_inpaint_mask will be overridden by masks.") - local_controlnet_kwargs = [{"controlnet_frames": self.prepare_controlnet_input(controlnet_image, mask, tiler_kwargs)} for mask in masks] - else: - local_controlnet_kwargs = None - else: - controlnet_kwargs_posi, local_controlnet_kwargs = {"controlnet_frames": None}, [{}] * len(masks) - controlnet_kwargs_nega = controlnet_kwargs_posi if enable_controlnet_on_negative else {} - return controlnet_kwargs_posi, controlnet_kwargs_nega, local_controlnet_kwargs - - - def prepare_eligen(self, prompt_emb_nega, eligen_entity_prompts, eligen_entity_masks, width, height, t5_sequence_length, enable_eligen_inpaint, enable_eligen_on_negative, cfg_scale): - if eligen_entity_masks is not None: - entity_prompt_emb_posi, entity_masks_posi, fg_mask, bg_mask = self.prepare_entity_inputs(eligen_entity_prompts, eligen_entity_masks, width, height, t5_sequence_length, enable_eligen_inpaint) - if enable_eligen_on_negative and cfg_scale != 1.0: - entity_prompt_emb_nega = prompt_emb_nega['prompt_emb'].unsqueeze(1).repeat(1, entity_masks_posi.shape[1], 1, 1) - entity_masks_nega = entity_masks_posi - else: - entity_prompt_emb_nega, entity_masks_nega = None, None - else: - entity_prompt_emb_posi, entity_masks_posi, entity_prompt_emb_nega, entity_masks_nega = None, None, None, None - fg_mask, bg_mask = None, None - eligen_kwargs_posi = {"entity_prompt_emb": entity_prompt_emb_posi, "entity_masks": entity_masks_posi} - eligen_kwargs_nega = {"entity_prompt_emb": entity_prompt_emb_nega, "entity_masks": entity_masks_nega} - return eligen_kwargs_posi, eligen_kwargs_nega, fg_mask, bg_mask - - - def prepare_prompts(self, prompt, local_prompts, masks, mask_scales, t5_sequence_length, negative_prompt, cfg_scale): - # Extend prompt - self.load_models_to_device(['text_encoder_1', 'text_encoder_2']) - prompt, local_prompts, masks, mask_scales = self.extend_prompt(prompt, local_prompts, masks, mask_scales) - - # Encode prompts - prompt_emb_posi = self.encode_prompt(prompt, t5_sequence_length=t5_sequence_length) - prompt_emb_nega = self.encode_prompt(negative_prompt, positive=False, t5_sequence_length=t5_sequence_length) if cfg_scale != 1.0 else None - prompt_emb_locals = [self.encode_prompt(prompt_local, t5_sequence_length=t5_sequence_length) for prompt_local in local_prompts] - return prompt_emb_posi, prompt_emb_nega, prompt_emb_locals - - - def prepare_infinite_you(self, id_image, controlnet_image, infinityou_guidance, height, width): - if self.infinityou_processor is not None and id_image is not None: - return self.infinityou_processor.prepare_infinite_you(self.image_proj_model, id_image, controlnet_image, infinityou_guidance, height, width) - else: - return {}, controlnet_image - - - def prepare_flex_kwargs(self, latents, flex_inpaint_image=None, flex_inpaint_mask=None, flex_control_image=None, flex_control_strength=0.5, flex_control_stop=0.5, tiled=False, tile_size=64, tile_stride=32): - if self.dit.input_dim == 196: - if flex_inpaint_image is None: - flex_inpaint_image = torch.zeros_like(latents) - else: - flex_inpaint_image = self.preprocess_image(flex_inpaint_image).to(device=self.device, dtype=self.torch_dtype) - flex_inpaint_image = self.encode_image(flex_inpaint_image, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - if flex_inpaint_mask is None: - flex_inpaint_mask = torch.ones_like(latents)[:, 0:1, :, :] - else: - flex_inpaint_mask = flex_inpaint_mask.resize((latents.shape[3], latents.shape[2])) - flex_inpaint_mask = self.preprocess_image(flex_inpaint_mask).to(device=self.device, dtype=self.torch_dtype) - flex_inpaint_mask = (flex_inpaint_mask[:, 0:1, :, :] + 1) / 2 - flex_inpaint_image = flex_inpaint_image * (1 - flex_inpaint_mask) - if flex_control_image is None: - flex_control_image = torch.zeros_like(latents) - else: - flex_control_image = self.preprocess_image(flex_control_image).to(device=self.device, dtype=self.torch_dtype) - flex_control_image = self.encode_image(flex_control_image, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) * flex_control_strength - flex_condition = torch.concat([flex_inpaint_image, flex_inpaint_mask, flex_control_image], dim=1) - flex_uncondition = torch.concat([flex_inpaint_image, flex_inpaint_mask, torch.zeros_like(flex_control_image)], dim=1) - flex_control_stop_timestep = self.scheduler.timesteps[int(flex_control_stop * (len(self.scheduler.timesteps) - 1))] - flex_kwargs = {"flex_condition": flex_condition, "flex_uncondition": flex_uncondition, "flex_control_stop_timestep": flex_control_stop_timestep} - else: - flex_kwargs = {} - return flex_kwargs - - - def prepare_step1x_kwargs(self, prompt, negative_prompt, image): - if image is None: - return {}, {} - self.load_models_to_device(["qwenvl", "vae_encoder"]) - captions = [prompt, negative_prompt] - ref_images = [image, image] - embs, masks = self.qwenvl(captions, ref_images) - image = self.preprocess_image(image).to(device=self.device, dtype=self.torch_dtype) - image = self.encode_image(image) - return {"step1x_llm_embedding": embs[0:1], "step1x_mask": masks[0:1], "step1x_reference_latents": image}, {"step1x_llm_embedding": embs[1:2], "step1x_mask": masks[1:2], "step1x_reference_latents": image} - - - @torch.no_grad() - def __call__( - self, - # Prompt - prompt, - negative_prompt="", - cfg_scale=1.0, - embedded_guidance=3.5, - t5_sequence_length=512, - # Image - input_image=None, - denoising_strength=1.0, - height=1024, - width=1024, - seed=None, - # Steps - num_inference_steps=30, - # local prompts - local_prompts=(), - masks=(), - mask_scales=(), - # ControlNet - controlnet_image=None, - controlnet_inpaint_mask=None, - enable_controlnet_on_negative=False, - # IP-Adapter - ipadapter_images=None, - ipadapter_scale=1.0, - # EliGen - eligen_entity_prompts=None, - eligen_entity_masks=None, - enable_eligen_on_negative=False, - enable_eligen_inpaint=False, - # InfiniteYou - infinityou_id_image=None, - infinityou_guidance=1.0, - # Flex - flex_inpaint_image=None, - flex_inpaint_mask=None, - flex_control_image=None, - flex_control_strength=0.5, - flex_control_stop=0.5, - # Step1x - step1x_reference_image=None, - # TeaCache - tea_cache_l1_thresh=None, - # Tile - tiled=False, - tile_size=128, - tile_stride=64, - # Progress bar - progress_bar_cmd=tqdm, - progress_bar_st=None, - ): - height, width = self.check_resize_height_width(height, width) - - # Tiler parameters - tiler_kwargs = {"tiled": tiled, "tile_size": tile_size, "tile_stride": tile_stride} - - # Prepare scheduler - self.scheduler.set_timesteps(num_inference_steps, denoising_strength) - - # Prepare latent tensors - latents, input_latents = self.prepare_latents(input_image, height, width, seed, tiled, tile_size, tile_stride) - - # Prompt - prompt_emb_posi, prompt_emb_nega, prompt_emb_locals = self.prepare_prompts(prompt, local_prompts, masks, mask_scales, t5_sequence_length, negative_prompt, cfg_scale) - - # Extra input - extra_input = self.prepare_extra_input(latents, guidance=embedded_guidance) - - # InfiniteYou - infiniteyou_kwargs, controlnet_image = self.prepare_infinite_you(infinityou_id_image, controlnet_image, infinityou_guidance, height, width) - - # Entity control - eligen_kwargs_posi, eligen_kwargs_nega, fg_mask, bg_mask = self.prepare_eligen(prompt_emb_nega, eligen_entity_prompts, eligen_entity_masks, width, height, t5_sequence_length, enable_eligen_inpaint, enable_eligen_on_negative, cfg_scale) - - # IP-Adapter - ipadapter_kwargs_list_posi, ipadapter_kwargs_list_nega = self.prepare_ipadapter(ipadapter_images, ipadapter_scale) - - # ControlNets - controlnet_kwargs_posi, controlnet_kwargs_nega, local_controlnet_kwargs = self.prepare_controlnet(controlnet_image, masks, controlnet_inpaint_mask, tiler_kwargs, enable_controlnet_on_negative) - - # Flex - flex_kwargs = self.prepare_flex_kwargs(latents, flex_inpaint_image, flex_inpaint_mask, flex_control_image, flex_control_strength=flex_control_strength, flex_control_stop=flex_control_stop, **tiler_kwargs) - - # Step1x - step1x_kwargs_posi, step1x_kwargs_nega = self.prepare_step1x_kwargs(prompt, negative_prompt, image=step1x_reference_image) - - # TeaCache - tea_cache_kwargs = {"tea_cache": TeaCache(num_inference_steps, rel_l1_thresh=tea_cache_l1_thresh) if tea_cache_l1_thresh is not None else None} - - # Denoise - self.load_models_to_device(['dit', 'controlnet', 'step1x_connector']) - for progress_id, timestep in enumerate(progress_bar_cmd(self.scheduler.timesteps)): - timestep = timestep.unsqueeze(0).to(self.device) - - # Positive side - inference_callback = lambda prompt_emb_posi, controlnet_kwargs: lets_dance_flux( - dit=self.dit, controlnet=self.controlnet, step1x_connector=self.step1x_connector, - hidden_states=latents, timestep=timestep, - **prompt_emb_posi, **tiler_kwargs, **extra_input, **controlnet_kwargs, **ipadapter_kwargs_list_posi, **eligen_kwargs_posi, **tea_cache_kwargs, **infiniteyou_kwargs, **flex_kwargs, **step1x_kwargs_posi, - ) - noise_pred_posi = self.control_noise_via_local_prompts( - prompt_emb_posi, prompt_emb_locals, masks, mask_scales, inference_callback, - special_kwargs=controlnet_kwargs_posi, special_local_kwargs_list=local_controlnet_kwargs - ) - - # Inpaint - if enable_eligen_inpaint: - noise_pred_posi = self.inpaint_fusion(latents, input_latents, noise_pred_posi, fg_mask, bg_mask, progress_id) - - # Classifier-free guidance - if cfg_scale != 1.0: - # Negative side - noise_pred_nega = lets_dance_flux( - dit=self.dit, controlnet=self.controlnet, step1x_connector=self.step1x_connector, - hidden_states=latents, timestep=timestep, - **prompt_emb_nega, **tiler_kwargs, **extra_input, **controlnet_kwargs_nega, **ipadapter_kwargs_list_nega, **eligen_kwargs_nega, **infiniteyou_kwargs, **flex_kwargs, **step1x_kwargs_nega, - ) - noise_pred = noise_pred_nega + cfg_scale * (noise_pred_posi - noise_pred_nega) - else: - noise_pred = noise_pred_posi - - # Iterate - latents = self.scheduler.step(noise_pred, self.scheduler.timesteps[progress_id], latents) - - # UI - if progress_bar_st is not None: - progress_bar_st.progress(progress_id / len(self.scheduler.timesteps)) - - # Decode image - self.load_models_to_device(['vae_decoder']) - image = self.decode_image(latents, **tiler_kwargs) - - # Offload all models - self.load_models_to_device([]) - return image - - - -class InfinitYou: - def __init__(self, device="cuda", torch_dtype=torch.bfloat16): - from facexlib.recognition import init_recognition_model - from insightface.app import FaceAnalysis - self.device = device - self.torch_dtype = torch_dtype - insightface_root_path = 'models/InfiniteYou/insightface' - self.app_640 = FaceAnalysis(name='antelopev2', root=insightface_root_path, providers=['CUDAExecutionProvider', 'CPUExecutionProvider']) - self.app_640.prepare(ctx_id=0, det_size=(640, 640)) - self.app_320 = FaceAnalysis(name='antelopev2', root=insightface_root_path, providers=['CUDAExecutionProvider', 'CPUExecutionProvider']) - self.app_320.prepare(ctx_id=0, det_size=(320, 320)) - self.app_160 = FaceAnalysis(name='antelopev2', root=insightface_root_path, providers=['CUDAExecutionProvider', 'CPUExecutionProvider']) - self.app_160.prepare(ctx_id=0, det_size=(160, 160)) - self.arcface_model = init_recognition_model('arcface', device=self.device) - - def _detect_face(self, id_image_cv2): - face_info = self.app_640.get(id_image_cv2) - if len(face_info) > 0: - return face_info - face_info = self.app_320.get(id_image_cv2) - if len(face_info) > 0: - return face_info - face_info = self.app_160.get(id_image_cv2) - return face_info - - def extract_arcface_bgr_embedding(self, in_image, landmark): - from insightface.utils import face_align - arc_face_image = face_align.norm_crop(in_image, landmark=np.array(landmark), image_size=112) - arc_face_image = torch.from_numpy(arc_face_image).unsqueeze(0).permute(0, 3, 1, 2) / 255. - arc_face_image = 2 * arc_face_image - 1 - arc_face_image = arc_face_image.contiguous().to(self.device) - face_emb = self.arcface_model(arc_face_image)[0] # [512], normalized - return face_emb - - def prepare_infinite_you(self, model, id_image, controlnet_image, infinityou_guidance, height, width): - import cv2 - if id_image is None: - return {'id_emb': None}, controlnet_image - id_image_cv2 = cv2.cvtColor(np.array(id_image), cv2.COLOR_RGB2BGR) - face_info = self._detect_face(id_image_cv2) - if len(face_info) == 0: - raise ValueError('No face detected in the input ID image') - landmark = sorted(face_info, key=lambda x:(x['bbox'][2]-x['bbox'][0])*(x['bbox'][3]-x['bbox'][1]))[-1]['kps'] # only use the maximum face - id_emb = self.extract_arcface_bgr_embedding(id_image_cv2, landmark) - id_emb = model(id_emb.unsqueeze(0).reshape([1, -1, 512]).to(dtype=self.torch_dtype)) - if controlnet_image is None: - controlnet_image = Image.fromarray(np.zeros([height, width, 3]).astype(np.uint8)) - infinityou_guidance = torch.Tensor([infinityou_guidance]).to(device=self.device, dtype=self.torch_dtype) - return {'id_emb': id_emb, 'infinityou_guidance': infinityou_guidance}, controlnet_image - - -class TeaCache: - def __init__(self, num_inference_steps, rel_l1_thresh): - self.num_inference_steps = num_inference_steps - self.step = 0 - self.accumulated_rel_l1_distance = 0 - self.previous_modulated_input = None - self.rel_l1_thresh = rel_l1_thresh - self.previous_residual = None - self.previous_hidden_states = None - - def check(self, dit: FluxDiT, hidden_states, conditioning): - inp = hidden_states.clone() - temb_ = conditioning.clone() - modulated_inp, _, _, _, _ = dit.blocks[0].norm1_a(inp, emb=temb_) - if self.step == 0 or self.step == self.num_inference_steps - 1: - should_calc = True - self.accumulated_rel_l1_distance = 0 - else: - coefficients = [4.98651651e+02, -2.83781631e+02, 5.58554382e+01, -3.82021401e+00, 2.64230861e-01] - rescale_func = np.poly1d(coefficients) - self.accumulated_rel_l1_distance += rescale_func(((modulated_inp-self.previous_modulated_input).abs().mean() / self.previous_modulated_input.abs().mean()).cpu().item()) - if self.accumulated_rel_l1_distance < self.rel_l1_thresh: - should_calc = False - else: - should_calc = True - self.accumulated_rel_l1_distance = 0 - self.previous_modulated_input = modulated_inp - self.step += 1 - if self.step == self.num_inference_steps: - self.step = 0 - if should_calc: - self.previous_hidden_states = hidden_states.clone() - return not should_calc - - def store(self, hidden_states): - self.previous_residual = hidden_states - self.previous_hidden_states - self.previous_hidden_states = None - - def update(self, hidden_states): - hidden_states = hidden_states + self.previous_residual - return hidden_states - - -def lets_dance_flux( - dit: FluxDiT, - controlnet: FluxMultiControlNetManager = None, - step1x_connector: Qwen2Connector = None, - hidden_states=None, - timestep=None, - prompt_emb=None, - pooled_prompt_emb=None, - guidance=None, - text_ids=None, - image_ids=None, - controlnet_frames=None, - tiled=False, - tile_size=128, - tile_stride=64, - entity_prompt_emb=None, - entity_masks=None, - ipadapter_kwargs_list={}, - id_emb=None, - infinityou_guidance=None, - flex_condition=None, - flex_uncondition=None, - flex_control_stop_timestep=None, - step1x_llm_embedding=None, - step1x_mask=None, - step1x_reference_latents=None, - tea_cache: TeaCache = None, - **kwargs -): - if tiled: - def flux_forward_fn(hl, hr, wl, wr): - tiled_controlnet_frames = [f[:, :, hl: hr, wl: wr] for f in controlnet_frames] if controlnet_frames is not None else None - return lets_dance_flux( - dit=dit, - controlnet=controlnet, - hidden_states=hidden_states[:, :, hl: hr, wl: wr], - timestep=timestep, - prompt_emb=prompt_emb, - pooled_prompt_emb=pooled_prompt_emb, - guidance=guidance, - text_ids=text_ids, - image_ids=None, - controlnet_frames=tiled_controlnet_frames, - tiled=False, - **kwargs - ) - return FastTileWorker().tiled_forward( - flux_forward_fn, - hidden_states, - tile_size=tile_size, - tile_stride=tile_stride, - tile_device=hidden_states.device, - tile_dtype=hidden_states.dtype - ) - - - # ControlNet - if controlnet is not None and controlnet_frames is not None: - controlnet_extra_kwargs = { - "hidden_states": hidden_states, - "timestep": timestep, - "prompt_emb": prompt_emb, - "pooled_prompt_emb": pooled_prompt_emb, - "guidance": guidance, - "text_ids": text_ids, - "image_ids": image_ids, - "tiled": tiled, - "tile_size": tile_size, - "tile_stride": tile_stride, - } - if id_emb is not None: - controlnet_text_ids = torch.zeros(id_emb.shape[0], id_emb.shape[1], 3).to(device=hidden_states.device, dtype=hidden_states.dtype) - controlnet_extra_kwargs.update({"prompt_emb": id_emb, 'text_ids': controlnet_text_ids, 'guidance': infinityou_guidance}) - controlnet_res_stack, controlnet_single_res_stack = controlnet( - controlnet_frames, **controlnet_extra_kwargs - ) - - # Flex - if flex_condition is not None: - if timestep.tolist()[0] >= flex_control_stop_timestep: - hidden_states = torch.concat([hidden_states, flex_condition], dim=1) - else: - hidden_states = torch.concat([hidden_states, flex_uncondition], dim=1) - - # Step1x - if step1x_llm_embedding is not None: - prompt_emb, pooled_prompt_emb = step1x_connector(step1x_llm_embedding, timestep / 1000, step1x_mask) - text_ids = torch.zeros((1, prompt_emb.shape[1], 3), dtype=prompt_emb.dtype, device=prompt_emb.device) - - if image_ids is None: - image_ids = dit.prepare_image_ids(hidden_states) - - conditioning = dit.time_embedder(timestep, hidden_states.dtype) + dit.pooled_text_embedder(pooled_prompt_emb) - if dit.guidance_embedder is not None: - guidance = guidance * 1000 - conditioning = conditioning + dit.guidance_embedder(guidance, hidden_states.dtype) - - height, width = hidden_states.shape[-2:] - hidden_states = dit.patchify(hidden_states) - - # Step1x - if step1x_reference_latents is not None: - step1x_reference_image_ids = dit.prepare_image_ids(step1x_reference_latents) - step1x_reference_latents = dit.patchify(step1x_reference_latents) - image_ids = torch.concat([image_ids, step1x_reference_image_ids], dim=-2) - hidden_states = torch.concat([hidden_states, step1x_reference_latents], dim=1) - - hidden_states = dit.x_embedder(hidden_states) - - if entity_prompt_emb is not None and entity_masks is not None: - prompt_emb, image_rotary_emb, attention_mask = dit.process_entity_masks(hidden_states, prompt_emb, entity_prompt_emb, entity_masks, text_ids, image_ids, 16) - else: - prompt_emb = dit.context_embedder(prompt_emb) - image_rotary_emb = dit.pos_embedder(torch.cat((text_ids, image_ids), dim=1)) - attention_mask = None - - # TeaCache - if tea_cache is not None: - tea_cache_update = tea_cache.check(dit, hidden_states, conditioning) - else: - tea_cache_update = False - - if tea_cache_update: - hidden_states = tea_cache.update(hidden_states) - else: - # Joint Blocks - for block_id, block in enumerate(dit.blocks): - hidden_states, prompt_emb = block( - hidden_states, - prompt_emb, - conditioning, - image_rotary_emb, - attention_mask, - ipadapter_kwargs_list=ipadapter_kwargs_list.get(block_id, None) - ) - # ControlNet - if controlnet is not None and controlnet_frames is not None: - hidden_states = hidden_states + controlnet_res_stack[block_id] - - # Single Blocks - hidden_states = torch.cat([prompt_emb, hidden_states], dim=1) - num_joint_blocks = len(dit.blocks) - for block_id, block in enumerate(dit.single_blocks): - hidden_states, prompt_emb = block( - hidden_states, - prompt_emb, - conditioning, - image_rotary_emb, - attention_mask, - ipadapter_kwargs_list=ipadapter_kwargs_list.get(block_id + num_joint_blocks, None) - ) - # ControlNet - if controlnet is not None and controlnet_frames is not None: - hidden_states[:, prompt_emb.shape[1]:] = hidden_states[:, prompt_emb.shape[1]:] + controlnet_single_res_stack[block_id] - hidden_states = hidden_states[:, prompt_emb.shape[1]:] - - if tea_cache is not None: - tea_cache.store(hidden_states) - - hidden_states = dit.final_norm_out(hidden_states, conditioning) - hidden_states = dit.final_proj_out(hidden_states) - - # Step1x - if step1x_reference_latents is not None: - hidden_states = hidden_states[:, :hidden_states.shape[1] // 2] - - hidden_states = dit.unpatchify(hidden_states, height, width) - - return hidden_states diff --git a/diffsynth/pipelines/flux_image_new.py b/diffsynth/pipelines/flux_image_new.py deleted file mode 100644 index 63a9dff..0000000 --- a/diffsynth/pipelines/flux_image_new.py +++ /dev/null @@ -1,1310 +0,0 @@ -import torch, warnings, glob, os, types -import numpy as np -from PIL import Image -from einops import repeat, reduce -from typing import Optional, Union -from dataclasses import dataclass -from modelscope import snapshot_download -from einops import rearrange -import numpy as np -from PIL import Image -from tqdm import tqdm -from typing import Optional -from typing_extensions import Literal - -from ..schedulers import FlowMatchScheduler -from ..prompters import FluxPrompter -from ..models import ModelManager, load_state_dict, SD3TextEncoder1, FluxTextEncoder2, FluxDiT, FluxVAEEncoder, FluxVAEDecoder -from ..models.step1x_connector import Qwen2Connector -from ..models.flux_controlnet import FluxControlNet -from ..models.flux_ipadapter import FluxIpAdapter -from ..models.flux_value_control import MultiValueEncoder -from ..models.flux_infiniteyou import InfiniteYouImageProjector -from ..models.flux_lora_encoder import FluxLoRAEncoder, LoRALayerBlock -from ..models.tiler import FastTileWorker -from ..models.nexus_gen import NexusGenAutoregressiveModel -from ..models.nexus_gen_projector import NexusGenAdapter, NexusGenImageEmbeddingMerger -from ..utils import BasePipeline, ModelConfig, PipelineUnitRunner, PipelineUnit -from ..lora.flux_lora import FluxLoRALoader, FluxLoraPatcher, FluxLoRAFuser - -from ..models.flux_dit import RMSNorm -from ..vram_management import gradient_checkpoint_forward, enable_vram_management, AutoWrappedModule, AutoWrappedLinear - - - -@dataclass -class ControlNetInput: - controlnet_id: int = 0 - scale: float = 1.0 - start: float = 1.0 - end: float = 0.0 - image: Image.Image = None - inpaint_mask: Image.Image = None - processor_id: str = None - - - -class MultiControlNet(torch.nn.Module): - def __init__(self, models: list[FluxControlNet]): - super().__init__() - self.models = torch.nn.ModuleList(models) - - def process_single_controlnet(self, controlnet_input: ControlNetInput, conditioning: torch.Tensor, **kwargs): - model = self.models[controlnet_input.controlnet_id] - res_stack, single_res_stack = model( - controlnet_conditioning=conditioning, - processor_id=controlnet_input.processor_id, - **kwargs - ) - res_stack = [res * controlnet_input.scale for res in res_stack] - single_res_stack = [res * controlnet_input.scale for res in single_res_stack] - return res_stack, single_res_stack - - def forward(self, conditionings: list[torch.Tensor], controlnet_inputs: list[ControlNetInput], progress_id, num_inference_steps, **kwargs): - res_stack, single_res_stack = None, None - for controlnet_input, conditioning in zip(controlnet_inputs, conditionings): - progress = (num_inference_steps - 1 - progress_id) / max(num_inference_steps - 1, 1) - if progress > controlnet_input.start or progress < controlnet_input.end: - continue - res_stack_, single_res_stack_ = self.process_single_controlnet(controlnet_input, conditioning, **kwargs) - if res_stack is None: - res_stack = res_stack_ - single_res_stack = single_res_stack_ - else: - res_stack = [i + j for i, j in zip(res_stack, res_stack_)] - single_res_stack = [i + j for i, j in zip(single_res_stack, single_res_stack_)] - return res_stack, single_res_stack - - - -class FluxImagePipeline(BasePipeline): - - def __init__(self, device="cuda", torch_dtype=torch.bfloat16): - super().__init__( - device=device, torch_dtype=torch_dtype, - height_division_factor=16, width_division_factor=16, - ) - self.scheduler = FlowMatchScheduler() - self.prompter = FluxPrompter() - self.text_encoder_1: SD3TextEncoder1 = None - self.text_encoder_2: FluxTextEncoder2 = None - self.dit: FluxDiT = None - self.vae_decoder: FluxVAEDecoder = None - self.vae_encoder: FluxVAEEncoder = None - self.controlnet: MultiControlNet = None - self.ipadapter: FluxIpAdapter = None - self.ipadapter_image_encoder = None - self.qwenvl = None - self.step1x_connector: Qwen2Connector = None - self.nexus_gen: NexusGenAutoregressiveModel = None - self.nexus_gen_generation_adapter: NexusGenAdapter = None - self.nexus_gen_editing_adapter: NexusGenImageEmbeddingMerger = None - self.value_controller: MultiValueEncoder = None - self.infinityou_processor: InfinitYou = None - self.image_proj_model: InfiniteYouImageProjector = None - self.lora_patcher: FluxLoraPatcher = None - self.lora_encoder: FluxLoRAEncoder = None - self.unit_runner = PipelineUnitRunner() - self.in_iteration_models = ("dit", "step1x_connector", "controlnet", "lora_patcher") - self.units = [ - FluxImageUnit_ShapeChecker(), - FluxImageUnit_NoiseInitializer(), - FluxImageUnit_PromptEmbedder(), - FluxImageUnit_InputImageEmbedder(), - FluxImageUnit_ImageIDs(), - FluxImageUnit_EmbeddedGuidanceEmbedder(), - FluxImageUnit_Kontext(), - FluxImageUnit_InfiniteYou(), - FluxImageUnit_ControlNet(), - FluxImageUnit_IPAdapter(), - FluxImageUnit_EntityControl(), - FluxImageUnit_NexusGen(), - FluxImageUnit_TeaCache(), - FluxImageUnit_Flex(), - FluxImageUnit_Step1x(), - FluxImageUnit_ValueControl(), - FluxImageUnit_LoRAEncode(), - ] - self.model_fn = model_fn_flux_image - - - def load_lora( - self, - module: torch.nn.Module, - lora_config: Union[ModelConfig, str] = None, - alpha=1, - hotload=False, - state_dict=None, - ): - if state_dict is None: - if isinstance(lora_config, str): - lora = load_state_dict(lora_config, torch_dtype=self.torch_dtype, device=self.device) - else: - lora_config.download_if_necessary() - lora = load_state_dict(lora_config.path, torch_dtype=self.torch_dtype, device=self.device) - else: - lora = state_dict - loader = FluxLoRALoader(torch_dtype=self.torch_dtype, device=self.device) - lora = loader.convert_state_dict(lora) - if hotload: - for name, module in module.named_modules(): - if isinstance(module, AutoWrappedLinear): - lora_a_name = f'{name}.lora_A.default.weight' - lora_b_name = f'{name}.lora_B.default.weight' - if lora_a_name in lora and lora_b_name in lora: - module.lora_A_weights.append(lora[lora_a_name] * alpha) - module.lora_B_weights.append(lora[lora_b_name]) - else: - loader.load(module, lora, alpha=alpha) - - - def load_loras( - self, - module: torch.nn.Module, - lora_configs: list[Union[ModelConfig, str]], - alpha=1, - hotload=False, - extra_fused_lora=False, - ): - for lora_config in lora_configs: - self.load_lora(module, lora_config, hotload=hotload, alpha=alpha) - if extra_fused_lora: - lora_fuser = FluxLoRAFuser(device="cuda", torch_dtype=torch.bfloat16) - fused_lora = lora_fuser(lora_configs) - self.load_lora(module, state_dict=fused_lora, hotload=hotload, alpha=alpha) - - - def clear_lora(self): - for name, module in self.named_modules(): - if isinstance(module, AutoWrappedLinear): - if hasattr(module, "lora_A_weights"): - module.lora_A_weights.clear() - if hasattr(module, "lora_B_weights"): - module.lora_B_weights.clear() - - - def training_loss(self, **inputs): - timestep_id = torch.randint(0, self.scheduler.num_train_timesteps, (1,)) - timestep = self.scheduler.timesteps[timestep_id].to(dtype=self.torch_dtype, device=self.device) - - inputs["latents"] = self.scheduler.add_noise(inputs["input_latents"], inputs["noise"], timestep) - training_target = self.scheduler.training_target(inputs["input_latents"], inputs["noise"], timestep) - - noise_pred = self.model_fn(**inputs, timestep=timestep) - - loss = torch.nn.functional.mse_loss(noise_pred.float(), training_target.float()) - loss = loss * self.scheduler.training_weight(timestep) - return loss - - - def _enable_vram_management_with_default_config(self, model, vram_limit): - if model is not None: - dtype = next(iter(model.parameters())).dtype - enable_vram_management( - model, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Embedding: AutoWrappedModule, - torch.nn.LayerNorm: AutoWrappedModule, - torch.nn.Conv2d: AutoWrappedModule, - torch.nn.GroupNorm: AutoWrappedModule, - RMSNorm: AutoWrappedModule, - LoRALayerBlock: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - vram_limit=vram_limit, - ) - - - def enable_lora_magic(self): - if self.dit is not None: - if not (hasattr(self.dit, "vram_management_enabled") and self.dit.vram_management_enabled): - dtype = next(iter(self.dit.parameters())).dtype - enable_vram_management( - self.dit, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - }, - module_config = dict( - offload_dtype=dtype, - offload_device=self.device, - onload_dtype=dtype, - onload_device=self.device, - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - vram_limit=None, - ) - if self.lora_patcher is not None: - for name, module in self.dit.named_modules(): - if isinstance(module, AutoWrappedLinear): - merger_name = name.replace(".", "___") - if merger_name in self.lora_patcher.model_dict: - module.lora_merger = self.lora_patcher.model_dict[merger_name] - - - def enable_vram_management(self, num_persistent_param_in_dit=None, vram_limit=None, vram_buffer=0.5): - self.vram_management_enabled = True - if num_persistent_param_in_dit is not None: - vram_limit = None - else: - if vram_limit is None: - vram_limit = self.get_vram() - vram_limit = vram_limit - vram_buffer - - # Default config - default_vram_management_models = ["text_encoder_1", "vae_decoder", "vae_encoder", "controlnet", "image_proj_model", "ipadapter", "lora_patcher", "value_controller", "step1x_connector", "lora_encoder"] - for model_name in default_vram_management_models: - self._enable_vram_management_with_default_config(getattr(self, model_name), vram_limit) - - # Special config - if self.text_encoder_2 is not None: - from transformers.models.t5.modeling_t5 import T5LayerNorm, T5DenseActDense, T5DenseGatedActDense - dtype = next(iter(self.text_encoder_2.parameters())).dtype - enable_vram_management( - self.text_encoder_2, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Embedding: AutoWrappedModule, - T5LayerNorm: AutoWrappedModule, - T5DenseActDense: AutoWrappedModule, - T5DenseGatedActDense: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - vram_limit=vram_limit, - ) - if self.dit is not None: - dtype = next(iter(self.dit.parameters())).dtype - device = "cpu" if vram_limit is not None else self.device - enable_vram_management( - self.dit, - module_map = { - RMSNorm: AutoWrappedModule, - torch.nn.Linear: AutoWrappedLinear, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device=device, - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - max_num_param=num_persistent_param_in_dit, - overflow_module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - vram_limit=vram_limit, - ) - if self.ipadapter_image_encoder is not None: - from transformers.models.siglip.modeling_siglip import SiglipVisionEmbeddings, SiglipEncoder, SiglipMultiheadAttentionPoolingHead - dtype = next(iter(self.ipadapter_image_encoder.parameters())).dtype - enable_vram_management( - self.ipadapter_image_encoder, - module_map = { - SiglipVisionEmbeddings: AutoWrappedModule, - SiglipEncoder: AutoWrappedModule, - SiglipMultiheadAttentionPoolingHead: AutoWrappedModule, - torch.nn.MultiheadAttention: AutoWrappedModule, - torch.nn.Linear: AutoWrappedLinear, - torch.nn.LayerNorm: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - vram_limit=vram_limit, - ) - if self.qwenvl is not None: - from transformers.models.qwen2_5_vl.modeling_qwen2_5_vl import ( - Qwen2_5_VisionPatchEmbed, Qwen2_5_VLVisionBlock, Qwen2_5_VLPatchMerger, - Qwen2_5_VLDecoderLayer, Qwen2_5_VisionRotaryEmbedding, Qwen2_5_VLRotaryEmbedding, Qwen2RMSNorm - ) - dtype = next(iter(self.qwenvl.parameters())).dtype - enable_vram_management( - self.qwenvl, - module_map = { - Qwen2_5_VisionPatchEmbed: AutoWrappedModule, - Qwen2_5_VLVisionBlock: AutoWrappedModule, - Qwen2_5_VLPatchMerger: AutoWrappedModule, - Qwen2_5_VLDecoderLayer: AutoWrappedModule, - Qwen2_5_VisionRotaryEmbedding: AutoWrappedModule, - Qwen2_5_VLRotaryEmbedding: AutoWrappedModule, - Qwen2RMSNorm: AutoWrappedModule, - torch.nn.Embedding: AutoWrappedModule, - torch.nn.Linear: AutoWrappedLinear, - torch.nn.LayerNorm: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - vram_limit=vram_limit, - ) - - - @staticmethod - def from_pretrained( - torch_dtype: torch.dtype = torch.bfloat16, - device: Union[str, torch.device] = "cuda", - model_configs: list[ModelConfig] = [], - nexus_gen_processor_config: ModelConfig = ModelConfig(model_id="DiffSynth-Studio/Nexus-GenV2", origin_file_pattern="processor/"), - ): - # Download and load models - model_manager = ModelManager() - for model_config in model_configs: - model_config.download_if_necessary() - model_manager.load_model( - model_config.path, - device=model_config.offload_device or device, - torch_dtype=model_config.offload_dtype or torch_dtype - ) - - # Initialize pipeline - pipe = FluxImagePipeline(device=device, torch_dtype=torch_dtype) - pipe.text_encoder_1 = model_manager.fetch_model("sd3_text_encoder_1") - pipe.text_encoder_2 = model_manager.fetch_model("flux_text_encoder_2") - pipe.dit = model_manager.fetch_model("flux_dit") - pipe.vae_decoder = model_manager.fetch_model("flux_vae_decoder") - pipe.vae_encoder = model_manager.fetch_model("flux_vae_encoder") - pipe.prompter.fetch_models(pipe.text_encoder_1, pipe.text_encoder_2) - pipe.ipadapter = model_manager.fetch_model("flux_ipadapter") - pipe.ipadapter_image_encoder = model_manager.fetch_model("siglip_vision_model") - pipe.qwenvl = model_manager.fetch_model("qwenvl") - pipe.step1x_connector = model_manager.fetch_model("step1x_connector") - pipe.image_proj_model = model_manager.fetch_model("infiniteyou_image_projector") - if pipe.image_proj_model is not None: - pipe.infinityou_processor = InfinitYou(device=device) - pipe.lora_patcher = model_manager.fetch_model("flux_lora_patcher") - pipe.lora_encoder = model_manager.fetch_model("flux_lora_encoder") - pipe.nexus_gen = model_manager.fetch_model("nexus_gen_llm") - pipe.nexus_gen_generation_adapter = model_manager.fetch_model("nexus_gen_generation_adapter") - pipe.nexus_gen_editing_adapter = model_manager.fetch_model("nexus_gen_editing_adapter") - if nexus_gen_processor_config is not None and pipe.nexus_gen is not None: - nexus_gen_processor_config.download_if_necessary() - pipe.nexus_gen.load_processor(nexus_gen_processor_config.path) - - # ControlNet - controlnets = [] - for model_name, model in zip(model_manager.model_name, model_manager.model): - if model_name == "flux_controlnet": - controlnets.append(model) - if len(controlnets) > 0: - pipe.controlnet = MultiControlNet(controlnets) - - # Value Controller - value_controllers = [] - for model_name, model in zip(model_manager.model_name, model_manager.model): - if model_name == "flux_value_controller": - value_controllers.append(model) - if len(value_controllers) > 0: - pipe.value_controller = MultiValueEncoder(value_controllers) - - return pipe - - - @torch.no_grad() - def __call__( - self, - # Prompt - prompt: str, - negative_prompt: str = "", - cfg_scale: float = 1.0, - embedded_guidance: float = 3.5, - t5_sequence_length: int = 512, - # Image - input_image: Image.Image = None, - denoising_strength: float = 1.0, - # Shape - height: int = 1024, - width: int = 1024, - # Randomness - seed: int = None, - rand_device: str = "cpu", - # Scheduler - sigma_shift: float = None, - # Steps - num_inference_steps: int = 30, - # local prompts - multidiffusion_prompts=(), - multidiffusion_masks=(), - multidiffusion_scales=(), - # Kontext - kontext_images: Union[list[Image.Image], Image.Image] = None, - # ControlNet - controlnet_inputs: list[ControlNetInput] = None, - # IP-Adapter - ipadapter_images: Union[list[Image.Image], Image.Image] = None, - ipadapter_scale: float = 1.0, - # EliGen - eligen_entity_prompts: list[str] = None, - eligen_entity_masks: list[Image.Image] = None, - eligen_enable_on_negative: bool = False, - eligen_enable_inpaint: bool = False, - # InfiniteYou - infinityou_id_image: Image.Image = None, - infinityou_guidance: float = 1.0, - # Flex - flex_inpaint_image: Image.Image = None, - flex_inpaint_mask: Image.Image = None, - flex_control_image: Image.Image = None, - flex_control_strength: float = 0.5, - flex_control_stop: float = 0.5, - # Value Controller - value_controller_inputs: Union[list[float], float] = None, - # Step1x - step1x_reference_image: Image.Image = None, - # NexusGen - nexus_gen_reference_image: Image.Image = None, - # LoRA Encoder - lora_encoder_inputs: Union[list[ModelConfig], ModelConfig, str] = None, - lora_encoder_scale: float = 1.0, - # TeaCache - tea_cache_l1_thresh: float = None, - # Tile - tiled: bool = False, - tile_size: int = 128, - tile_stride: int = 64, - # Progress bar - progress_bar_cmd = tqdm, - ): - # Scheduler - self.scheduler.set_timesteps(num_inference_steps, denoising_strength=denoising_strength, shift=sigma_shift) - - inputs_posi = { - "prompt": prompt, - } - inputs_nega = { - "negative_prompt": negative_prompt, - } - inputs_shared = { - "cfg_scale": cfg_scale, "embedded_guidance": embedded_guidance, "t5_sequence_length": t5_sequence_length, - "input_image": input_image, "denoising_strength": denoising_strength, - "height": height, "width": width, - "seed": seed, "rand_device": rand_device, - "sigma_shift": sigma_shift, "num_inference_steps": num_inference_steps, - "multidiffusion_prompts": multidiffusion_prompts, "multidiffusion_masks": multidiffusion_masks, "multidiffusion_scales": multidiffusion_scales, - "kontext_images": kontext_images, - "controlnet_inputs": controlnet_inputs, - "ipadapter_images": ipadapter_images, "ipadapter_scale": ipadapter_scale, - "eligen_entity_prompts": eligen_entity_prompts, "eligen_entity_masks": eligen_entity_masks, "eligen_enable_on_negative": eligen_enable_on_negative, "eligen_enable_inpaint": eligen_enable_inpaint, - "infinityou_id_image": infinityou_id_image, "infinityou_guidance": infinityou_guidance, - "flex_inpaint_image": flex_inpaint_image, "flex_inpaint_mask": flex_inpaint_mask, "flex_control_image": flex_control_image, "flex_control_strength": flex_control_strength, "flex_control_stop": flex_control_stop, - "value_controller_inputs": value_controller_inputs, - "step1x_reference_image": step1x_reference_image, - "nexus_gen_reference_image": nexus_gen_reference_image, - "lora_encoder_inputs": lora_encoder_inputs, "lora_encoder_scale": lora_encoder_scale, - "tea_cache_l1_thresh": tea_cache_l1_thresh, - "tiled": tiled, "tile_size": tile_size, "tile_stride": tile_stride, - "progress_bar_cmd": progress_bar_cmd, - } - for unit in self.units: - inputs_shared, inputs_posi, inputs_nega = self.unit_runner(unit, self, inputs_shared, inputs_posi, inputs_nega) - - # Denoise - self.load_models_to_device(self.in_iteration_models) - models = {name: getattr(self, name) for name in self.in_iteration_models} - for progress_id, timestep in enumerate(progress_bar_cmd(self.scheduler.timesteps)): - timestep = timestep.unsqueeze(0).to(dtype=self.torch_dtype, device=self.device) - - # Inference - noise_pred_posi = self.model_fn(**models, **inputs_shared, **inputs_posi, timestep=timestep, progress_id=progress_id) - if cfg_scale != 1.0: - noise_pred_nega = self.model_fn(**models, **inputs_shared, **inputs_nega, timestep=timestep, progress_id=progress_id) - noise_pred = noise_pred_nega + cfg_scale * (noise_pred_posi - noise_pred_nega) - else: - noise_pred = noise_pred_posi - - # Scheduler - inputs_shared["latents"] = self.scheduler.step(noise_pred, self.scheduler.timesteps[progress_id], inputs_shared["latents"]) - - # Decode - self.load_models_to_device(['vae_decoder']) - image = self.vae_decoder(inputs_shared["latents"], device=self.device, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - image = self.vae_output_to_image(image) - self.load_models_to_device([]) - - return image - - - -class FluxImageUnit_ShapeChecker(PipelineUnit): - def __init__(self): - super().__init__(input_params=("height", "width")) - - def process(self, pipe: FluxImagePipeline, height, width): - height, width = pipe.check_resize_height_width(height, width) - return {"height": height, "width": width} - - - -class FluxImageUnit_NoiseInitializer(PipelineUnit): - def __init__(self): - super().__init__(input_params=("height", "width", "seed", "rand_device")) - - def process(self, pipe: FluxImagePipeline, height, width, seed, rand_device): - noise = pipe.generate_noise((1, 16, height//8, width//8), seed=seed, rand_device=rand_device) - return {"noise": noise} - - - -class FluxImageUnit_InputImageEmbedder(PipelineUnit): - def __init__(self): - super().__init__( - input_params=("input_image", "noise", "tiled", "tile_size", "tile_stride"), - onload_model_names=("vae_encoder",) - ) - - def process(self, pipe: FluxImagePipeline, input_image, noise, tiled, tile_size, tile_stride): - if input_image is None: - return {"latents": noise, "input_latents": None} - pipe.load_models_to_device(['vae_encoder']) - image = pipe.preprocess_image(input_image).to(device=pipe.device, dtype=pipe.torch_dtype) - input_latents = pipe.vae_encoder(image, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - if pipe.scheduler.training: - return {"latents": noise, "input_latents": input_latents} - else: - latents = pipe.scheduler.add_noise(input_latents, noise, timestep=pipe.scheduler.timesteps[0]) - return {"latents": latents, "input_latents": None} - - - -class FluxImageUnit_PromptEmbedder(PipelineUnit): - def __init__(self): - super().__init__( - seperate_cfg=True, - input_params_posi={"prompt": "prompt", "positive": "positive"}, - input_params_nega={"prompt": "negative_prompt", "positive": "positive"}, - input_params=("t5_sequence_length",), - onload_model_names=("text_encoder_1", "text_encoder_2") - ) - - def process(self, pipe: FluxImagePipeline, prompt, t5_sequence_length, positive) -> dict: - if pipe.text_encoder_1 is not None and pipe.text_encoder_2 is not None: - prompt_emb, pooled_prompt_emb, text_ids = pipe.prompter.encode_prompt( - prompt, device=pipe.device, positive=positive, t5_sequence_length=t5_sequence_length - ) - return {"prompt_emb": prompt_emb, "pooled_prompt_emb": pooled_prompt_emb, "text_ids": text_ids} - else: - return {} - - -class FluxImageUnit_ImageIDs(PipelineUnit): - def __init__(self): - super().__init__(input_params=("latents",)) - - def process(self, pipe: FluxImagePipeline, latents): - latent_image_ids = pipe.dit.prepare_image_ids(latents) - return {"image_ids": latent_image_ids} - - - -class FluxImageUnit_EmbeddedGuidanceEmbedder(PipelineUnit): - def __init__(self): - super().__init__(input_params=("embedded_guidance", "latents")) - - def process(self, pipe: FluxImagePipeline, embedded_guidance, latents): - guidance = torch.Tensor([embedded_guidance] * latents.shape[0]).to(device=latents.device, dtype=latents.dtype) - return {"guidance": guidance} - - - -class FluxImageUnit_Kontext(PipelineUnit): - def __init__(self): - super().__init__(input_params=("kontext_images", "tiled", "tile_size", "tile_stride")) - - def process(self, pipe: FluxImagePipeline, kontext_images, tiled, tile_size, tile_stride): - if kontext_images is None: - return {} - if not isinstance(kontext_images, list): - kontext_images = [kontext_images] - - kontext_latents = [] - kontext_image_ids = [] - for kontext_image in kontext_images: - kontext_image = pipe.preprocess_image(kontext_image) - kontext_latent = pipe.vae_encoder(kontext_image, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - image_ids = pipe.dit.prepare_image_ids(kontext_latent) - image_ids[..., 0] = 1 - kontext_image_ids.append(image_ids) - kontext_latent = pipe.dit.patchify(kontext_latent) - kontext_latents.append(kontext_latent) - kontext_latents = torch.concat(kontext_latents, dim=1) - kontext_image_ids = torch.concat(kontext_image_ids, dim=-2) - return {"kontext_latents": kontext_latents, "kontext_image_ids": kontext_image_ids} - - - -class FluxImageUnit_ControlNet(PipelineUnit): - def __init__(self): - super().__init__( - input_params=("controlnet_inputs", "tiled", "tile_size", "tile_stride"), - onload_model_names=("vae_encoder",) - ) - - def apply_controlnet_mask_on_latents(self, pipe, latents, mask): - mask = (pipe.preprocess_image(mask) + 1) / 2 - mask = mask.mean(dim=1, keepdim=True) - mask = 1 - torch.nn.functional.interpolate(mask, size=latents.shape[-2:]) - latents = torch.concat([latents, mask], dim=1) - return latents - - def apply_controlnet_mask_on_image(self, pipe, image, mask): - mask = mask.resize(image.size) - mask = pipe.preprocess_image(mask).mean(dim=[0, 1]).cpu() - image = np.array(image) - image[mask > 0] = 0 - image = Image.fromarray(image) - return image - - def process(self, pipe: FluxImagePipeline, controlnet_inputs: list[ControlNetInput], tiled, tile_size, tile_stride): - if controlnet_inputs is None: - return {} - pipe.load_models_to_device(['vae_encoder']) - conditionings = [] - for controlnet_input in controlnet_inputs: - image = controlnet_input.image - if controlnet_input.inpaint_mask is not None: - image = self.apply_controlnet_mask_on_image(pipe, image, controlnet_input.inpaint_mask) - - image = pipe.preprocess_image(image).to(device=pipe.device, dtype=pipe.torch_dtype) - image = pipe.vae_encoder(image, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - - if controlnet_input.inpaint_mask is not None: - image = self.apply_controlnet_mask_on_latents(pipe, image, controlnet_input.inpaint_mask) - conditionings.append(image) - return {"controlnet_conditionings": conditionings} - - - -class FluxImageUnit_IPAdapter(PipelineUnit): - def __init__(self): - super().__init__( - take_over=True, - onload_model_names=("ipadapter_image_encoder", "ipadapter") - ) - - def process(self, pipe: FluxImagePipeline, inputs_shared, inputs_posi, inputs_nega): - ipadapter_images, ipadapter_scale = inputs_shared.get("ipadapter_images", None), inputs_shared.get("ipadapter_scale", 1.0) - if ipadapter_images is None: - return inputs_shared, inputs_posi, inputs_nega - if not isinstance(ipadapter_images, list): - ipadapter_images = [ipadapter_images] - - pipe.load_models_to_device(self.onload_model_names) - images = [image.convert("RGB").resize((384, 384), resample=3) for image in ipadapter_images] - images = [pipe.preprocess_image(image).to(device=pipe.device, dtype=pipe.torch_dtype) for image in images] - ipadapter_images = torch.cat(images, dim=0) - ipadapter_image_encoding = pipe.ipadapter_image_encoder(ipadapter_images).pooler_output - - inputs_posi.update({"ipadapter_kwargs_list": pipe.ipadapter(ipadapter_image_encoding, scale=ipadapter_scale)}) - if inputs_shared.get("cfg_scale", 1.0) != 1.0: - inputs_nega.update({"ipadapter_kwargs_list": pipe.ipadapter(torch.zeros_like(ipadapter_image_encoding))}) - return inputs_shared, inputs_posi, inputs_nega - - - -class FluxImageUnit_EntityControl(PipelineUnit): - def __init__(self): - super().__init__( - take_over=True, - onload_model_names=("text_encoder_1", "text_encoder_2") - ) - - def preprocess_masks(self, pipe, masks, height, width, dim): - out_masks = [] - for mask in masks: - mask = pipe.preprocess_image(mask.resize((width, height), resample=Image.NEAREST)).mean(dim=1, keepdim=True) > 0 - mask = mask.repeat(1, dim, 1, 1).to(device=pipe.device, dtype=pipe.torch_dtype) - out_masks.append(mask) - return out_masks - - def prepare_entity_inputs(self, pipe, entity_prompts, entity_masks, width, height, t5_sequence_length=512): - entity_masks = self.preprocess_masks(pipe, entity_masks, height//8, width//8, 1) - entity_masks = torch.cat(entity_masks, dim=0).unsqueeze(0) # b, n_mask, c, h, w - - prompt_emb, _, _ = pipe.prompter.encode_prompt( - entity_prompts, device=pipe.device, t5_sequence_length=t5_sequence_length - ) - return prompt_emb.unsqueeze(0), entity_masks - - def prepare_eligen(self, pipe, prompt_emb_nega, eligen_entity_prompts, eligen_entity_masks, width, height, t5_sequence_length, enable_eligen_on_negative, cfg_scale): - entity_prompt_emb_posi, entity_masks_posi = self.prepare_entity_inputs(pipe, eligen_entity_prompts, eligen_entity_masks, width, height, t5_sequence_length) - if enable_eligen_on_negative and cfg_scale != 1.0: - entity_prompt_emb_nega = prompt_emb_nega['prompt_emb'].unsqueeze(1).repeat(1, entity_masks_posi.shape[1], 1, 1) - entity_masks_nega = entity_masks_posi - else: - entity_prompt_emb_nega, entity_masks_nega = None, None - eligen_kwargs_posi = {"entity_prompt_emb": entity_prompt_emb_posi, "entity_masks": entity_masks_posi} - eligen_kwargs_nega = {"entity_prompt_emb": entity_prompt_emb_nega, "entity_masks": entity_masks_nega} - return eligen_kwargs_posi, eligen_kwargs_nega - - def process(self, pipe: FluxImagePipeline, inputs_shared, inputs_posi, inputs_nega): - eligen_entity_prompts, eligen_entity_masks = inputs_shared.get("eligen_entity_prompts", None), inputs_shared.get("eligen_entity_masks", None) - if eligen_entity_prompts is None or eligen_entity_masks is None: - return inputs_shared, inputs_posi, inputs_nega - pipe.load_models_to_device(self.onload_model_names) - eligen_enable_on_negative = inputs_shared.get("eligen_enable_on_negative", False) - eligen_kwargs_posi, eligen_kwargs_nega = self.prepare_eligen(pipe, inputs_nega, - eligen_entity_prompts, eligen_entity_masks, inputs_shared["width"], inputs_shared["height"], - inputs_shared["t5_sequence_length"], eligen_enable_on_negative, inputs_shared["cfg_scale"]) - inputs_posi.update(eligen_kwargs_posi) - if inputs_shared.get("cfg_scale", 1.0) != 1.0: - inputs_nega.update(eligen_kwargs_nega) - return inputs_shared, inputs_posi, inputs_nega - - -class FluxImageUnit_NexusGen(PipelineUnit): - def __init__(self): - super().__init__( - take_over=True, - onload_model_names=("nexus_gen", "nexus_gen_generation_adapter", "nexus_gen_editing_adapter"), - ) - - def process(self, pipe: FluxImagePipeline, inputs_shared, inputs_posi, inputs_nega): - if pipe.nexus_gen is None: - return inputs_shared, inputs_posi, inputs_nega - pipe.load_models_to_device(self.onload_model_names) - if inputs_shared.get("nexus_gen_reference_image", None) is None: - assert pipe.nexus_gen_generation_adapter is not None, "NexusGen requires a generation adapter to be set." - embed = pipe.nexus_gen(inputs_posi["prompt"])[0].unsqueeze(0) - inputs_posi["prompt_emb"] = pipe.nexus_gen_generation_adapter(embed) - inputs_posi['text_ids'] = torch.zeros(embed.shape[0], embed.shape[1], 3).to(device=pipe.device, dtype=pipe.torch_dtype) - else: - assert pipe.nexus_gen_editing_adapter is not None, "NexusGen requires an editing adapter to be set." - embed, ref_embed, grids = pipe.nexus_gen(inputs_posi["prompt"], inputs_shared["nexus_gen_reference_image"]) - embeds_grid = grids[1:2].to(device=pipe.device, dtype=torch.long) - ref_embeds_grid = grids[0:1].to(device=pipe.device, dtype=torch.long) - - inputs_posi["prompt_emb"] = pipe.nexus_gen_editing_adapter(embed.unsqueeze(0), embeds_grid, ref_embed.unsqueeze(0), ref_embeds_grid) - inputs_posi["text_ids"] = self.get_editing_text_ids( - inputs_shared["latents"], - embeds_grid[0][1].item(), embeds_grid[0][2].item(), - ref_embeds_grid[0][1].item(), ref_embeds_grid[0][2].item(), - ) - return inputs_shared, inputs_posi, inputs_nega - - - def get_editing_text_ids(self, latents, target_embed_height, target_embed_width, ref_embed_height, ref_embed_width): - # prepare text ids for target and reference embeddings - batch_size, height, width = latents.shape[0], target_embed_height, target_embed_width - embed_ids = torch.zeros(height // 2, width // 2, 3) - scale_factor_height, scale_factor_width = latents.shape[-2] / height, latents.shape[-1] / width - embed_ids[..., 1] = embed_ids[..., 1] + torch.arange(height // 2)[:, None] * scale_factor_height - embed_ids[..., 2] = embed_ids[..., 2] + torch.arange(width // 2)[None, :] * scale_factor_width - embed_ids = embed_ids[None, :].repeat(batch_size, 1, 1, 1).reshape(batch_size, height // 2 * width // 2, 3) - embed_text_ids = embed_ids.to(device=latents.device, dtype=latents.dtype) - - batch_size, height, width = latents.shape[0], ref_embed_height, ref_embed_width - ref_embed_ids = torch.zeros(height // 2, width // 2, 3) - scale_factor_height, scale_factor_width = latents.shape[-2] / height, latents.shape[-1] / width - ref_embed_ids[..., 0] = ref_embed_ids[..., 0] + 1.0 - ref_embed_ids[..., 1] = ref_embed_ids[..., 1] + torch.arange(height // 2)[:, None] * scale_factor_height - ref_embed_ids[..., 2] = ref_embed_ids[..., 2] + torch.arange(width // 2)[None, :] * scale_factor_width - ref_embed_ids = ref_embed_ids[None, :].repeat(batch_size, 1, 1, 1).reshape(batch_size, height // 2 * width // 2, 3) - ref_embed_text_ids = ref_embed_ids.to(device=latents.device, dtype=latents.dtype) - - text_ids = torch.cat([embed_text_ids, ref_embed_text_ids], dim=1) - return text_ids - - -class FluxImageUnit_Step1x(PipelineUnit): - def __init__(self): - super().__init__(take_over=True,onload_model_names=("qwenvl","vae_encoder")) - - def process(self, pipe: FluxImagePipeline, inputs_shared: dict, inputs_posi: dict, inputs_nega: dict): - image = inputs_shared.get("step1x_reference_image",None) - if image is None: - return inputs_shared, inputs_posi, inputs_nega - else: - pipe.load_models_to_device(self.onload_model_names) - prompt = inputs_posi["prompt"] - nega_prompt = inputs_nega["negative_prompt"] - captions = [prompt, nega_prompt] - ref_images = [image, image] - embs, masks = pipe.qwenvl(captions, ref_images) - image = pipe.preprocess_image(image).to(device=pipe.device, dtype=pipe.torch_dtype) - image = pipe.vae_encoder(image) - inputs_posi.update({"step1x_llm_embedding": embs[0:1], "step1x_mask": masks[0:1], "step1x_reference_latents": image}) - if inputs_shared.get("cfg_scale", 1) != 1: - inputs_nega.update({"step1x_llm_embedding": embs[1:2], "step1x_mask": masks[1:2], "step1x_reference_latents": image}) - return inputs_shared, inputs_posi, inputs_nega - - -class FluxImageUnit_TeaCache(PipelineUnit): - def __init__(self): - super().__init__(input_params=("num_inference_steps","tea_cache_l1_thresh")) - - def process(self, pipe: FluxImagePipeline, num_inference_steps, tea_cache_l1_thresh): - if tea_cache_l1_thresh is None: - return {} - else: - return {"tea_cache": TeaCache(num_inference_steps=num_inference_steps, rel_l1_thresh=tea_cache_l1_thresh)} - -class FluxImageUnit_Flex(PipelineUnit): - def __init__(self): - super().__init__( - input_params=("latents", "flex_inpaint_image", "flex_inpaint_mask", "flex_control_image", "flex_control_strength", "flex_control_stop", "tiled", "tile_size", "tile_stride"), - onload_model_names=("vae_encoder",) - ) - - def process(self, pipe: FluxImagePipeline, latents, flex_inpaint_image, flex_inpaint_mask, flex_control_image, flex_control_strength, flex_control_stop, tiled, tile_size, tile_stride): - if pipe.dit.input_dim == 196: - if flex_control_stop is None: - flex_control_stop = 1 - pipe.load_models_to_device(self.onload_model_names) - if flex_inpaint_image is None: - flex_inpaint_image = torch.zeros_like(latents) - else: - flex_inpaint_image = pipe.preprocess_image(flex_inpaint_image).to(device=pipe.device, dtype=pipe.torch_dtype) - flex_inpaint_image = pipe.vae_encoder(flex_inpaint_image, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - if flex_inpaint_mask is None: - flex_inpaint_mask = torch.ones_like(latents)[:, 0:1, :, :] - else: - flex_inpaint_mask = flex_inpaint_mask.resize((latents.shape[3], latents.shape[2])) - flex_inpaint_mask = pipe.preprocess_image(flex_inpaint_mask).to(device=pipe.device, dtype=pipe.torch_dtype) - flex_inpaint_mask = (flex_inpaint_mask[:, 0:1, :, :] + 1) / 2 - flex_inpaint_image = flex_inpaint_image * (1 - flex_inpaint_mask) - if flex_control_image is None: - flex_control_image = torch.zeros_like(latents) - else: - flex_control_image = pipe.preprocess_image(flex_control_image).to(device=pipe.device, dtype=pipe.torch_dtype) - flex_control_image = pipe.vae_encoder(flex_control_image, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) * flex_control_strength - flex_condition = torch.concat([flex_inpaint_image, flex_inpaint_mask, flex_control_image], dim=1) - flex_uncondition = torch.concat([flex_inpaint_image, flex_inpaint_mask, torch.zeros_like(flex_control_image)], dim=1) - flex_control_stop_timestep = pipe.scheduler.timesteps[int(flex_control_stop * (len(pipe.scheduler.timesteps) - 1))] - return {"flex_condition": flex_condition, "flex_uncondition": flex_uncondition, "flex_control_stop_timestep": flex_control_stop_timestep} - else: - return {} - - - -class FluxImageUnit_InfiniteYou(PipelineUnit): - def __init__(self): - super().__init__( - input_params=("infinityou_id_image", "infinityou_guidance"), - onload_model_names=("infinityou_processor",) - ) - - def process(self, pipe: FluxImagePipeline, infinityou_id_image, infinityou_guidance): - pipe.load_models_to_device("infinityou_processor") - if infinityou_id_image is not None: - return pipe.infinityou_processor.prepare_infinite_you(pipe.image_proj_model, infinityou_id_image, infinityou_guidance, pipe.device) - else: - return {} - - - -class FluxImageUnit_ValueControl(PipelineUnit): - def __init__(self): - super().__init__( - seperate_cfg=True, - input_params_posi={"prompt_emb": "prompt_emb", "text_ids": "text_ids"}, - input_params_nega={"prompt_emb": "prompt_emb", "text_ids": "text_ids"}, - input_params=("value_controller_inputs",), - onload_model_names=("value_controller",) - ) - - def add_to_text_embedding(self, prompt_emb, text_ids, value_emb): - prompt_emb = torch.concat([prompt_emb, value_emb], dim=1) - extra_text_ids = torch.zeros((value_emb.shape[0], value_emb.shape[1], 3), device=value_emb.device, dtype=value_emb.dtype) - text_ids = torch.concat([text_ids, extra_text_ids], dim=1) - return prompt_emb, text_ids - - def process(self, pipe: FluxImagePipeline, prompt_emb, text_ids, value_controller_inputs): - if value_controller_inputs is None: - return {} - if not isinstance(value_controller_inputs, list): - value_controller_inputs = [value_controller_inputs] - value_controller_inputs = torch.tensor(value_controller_inputs).to(dtype=pipe.torch_dtype, device=pipe.device) - pipe.load_models_to_device(["value_controller"]) - value_emb = pipe.value_controller(value_controller_inputs, pipe.torch_dtype) - value_emb = value_emb.unsqueeze(0) - prompt_emb, text_ids = self.add_to_text_embedding(prompt_emb, text_ids, value_emb) - return {"prompt_emb": prompt_emb, "text_ids": text_ids} - - - -class InfinitYou(torch.nn.Module): - def __init__(self, device="cuda", torch_dtype=torch.bfloat16): - super().__init__() - from facexlib.recognition import init_recognition_model - from insightface.app import FaceAnalysis - self.device = device - self.torch_dtype = torch_dtype - insightface_root_path = 'models/ByteDance/InfiniteYou/supports/insightface' - self.app_640 = FaceAnalysis(name='antelopev2', root=insightface_root_path, providers=['CUDAExecutionProvider', 'CPUExecutionProvider']) - self.app_640.prepare(ctx_id=0, det_size=(640, 640)) - self.app_320 = FaceAnalysis(name='antelopev2', root=insightface_root_path, providers=['CUDAExecutionProvider', 'CPUExecutionProvider']) - self.app_320.prepare(ctx_id=0, det_size=(320, 320)) - self.app_160 = FaceAnalysis(name='antelopev2', root=insightface_root_path, providers=['CUDAExecutionProvider', 'CPUExecutionProvider']) - self.app_160.prepare(ctx_id=0, det_size=(160, 160)) - self.arcface_model = init_recognition_model('arcface', device=self.device).to(torch_dtype) - - def _detect_face(self, id_image_cv2): - face_info = self.app_640.get(id_image_cv2) - if len(face_info) > 0: - return face_info - face_info = self.app_320.get(id_image_cv2) - if len(face_info) > 0: - return face_info - face_info = self.app_160.get(id_image_cv2) - return face_info - - def extract_arcface_bgr_embedding(self, in_image, landmark, device): - from insightface.utils import face_align - arc_face_image = face_align.norm_crop(in_image, landmark=np.array(landmark), image_size=112) - arc_face_image = torch.from_numpy(arc_face_image).unsqueeze(0).permute(0, 3, 1, 2) / 255. - arc_face_image = 2 * arc_face_image - 1 - arc_face_image = arc_face_image.contiguous().to(device=device, dtype=self.torch_dtype) - face_emb = self.arcface_model(arc_face_image)[0] # [512], normalized - return face_emb - - def prepare_infinite_you(self, model, id_image, infinityou_guidance, device): - import cv2 - if id_image is None: - return {'id_emb': None} - id_image_cv2 = cv2.cvtColor(np.array(id_image), cv2.COLOR_RGB2BGR) - face_info = self._detect_face(id_image_cv2) - if len(face_info) == 0: - raise ValueError('No face detected in the input ID image') - landmark = sorted(face_info, key=lambda x:(x['bbox'][2]-x['bbox'][0])*(x['bbox'][3]-x['bbox'][1]))[-1]['kps'] # only use the maximum face - id_emb = self.extract_arcface_bgr_embedding(id_image_cv2, landmark, device) - id_emb = model(id_emb.unsqueeze(0).reshape([1, -1, 512]).to(dtype=self.torch_dtype)) - infinityou_guidance = torch.Tensor([infinityou_guidance]).to(device=device, dtype=self.torch_dtype) - return {'id_emb': id_emb, 'infinityou_guidance': infinityou_guidance} - - - -class FluxImageUnit_LoRAEncode(PipelineUnit): - def __init__(self): - super().__init__( - take_over=True, - onload_model_names=("lora_encoder",) - ) - - def parse_lora_encoder_inputs(self, lora_encoder_inputs): - if not isinstance(lora_encoder_inputs, list): - lora_encoder_inputs = [lora_encoder_inputs] - lora_configs = [] - for lora_encoder_input in lora_encoder_inputs: - if isinstance(lora_encoder_input, str): - lora_encoder_input = ModelConfig(path=lora_encoder_input) - lora_encoder_input.download_if_necessary() - lora_configs.append(lora_encoder_input) - return lora_configs - - def load_lora(self, lora_config, dtype, device): - loader = FluxLoRALoader(torch_dtype=dtype, device=device) - lora = load_state_dict(lora_config.path, torch_dtype=dtype, device=device) - lora = loader.convert_state_dict(lora) - return lora - - def lora_embedding(self, pipe, lora_encoder_inputs): - lora_emb = [] - for lora_config in self.parse_lora_encoder_inputs(lora_encoder_inputs): - lora = self.load_lora(lora_config, pipe.torch_dtype, pipe.device) - lora_emb.append(pipe.lora_encoder(lora)) - lora_emb = torch.concat(lora_emb, dim=1) - return lora_emb - - def add_to_text_embedding(self, prompt_emb, text_ids, lora_emb): - prompt_emb = torch.concat([prompt_emb, lora_emb], dim=1) - extra_text_ids = torch.zeros((lora_emb.shape[0], lora_emb.shape[1], 3), device=lora_emb.device, dtype=lora_emb.dtype) - text_ids = torch.concat([text_ids, extra_text_ids], dim=1) - return prompt_emb, text_ids - - def process(self, pipe: FluxImagePipeline, inputs_shared, inputs_posi, inputs_nega): - if inputs_shared.get("lora_encoder_inputs", None) is None: - return inputs_shared, inputs_posi, inputs_nega - - # Encode - pipe.load_models_to_device(["lora_encoder"]) - lora_encoder_inputs = inputs_shared["lora_encoder_inputs"] - lora_emb = self.lora_embedding(pipe, lora_encoder_inputs) - - # Scale - lora_encoder_scale = inputs_shared.get("lora_encoder_scale", None) - if lora_encoder_scale is not None: - lora_emb = lora_emb * lora_encoder_scale - - # Add to prompt embedding - inputs_posi["prompt_emb"], inputs_posi["text_ids"] = self.add_to_text_embedding( - inputs_posi["prompt_emb"], inputs_posi["text_ids"], lora_emb) - return inputs_shared, inputs_posi, inputs_nega - - - -class TeaCache: - def __init__(self, num_inference_steps, rel_l1_thresh): - self.num_inference_steps = num_inference_steps - self.step = 0 - self.accumulated_rel_l1_distance = 0 - self.previous_modulated_input = None - self.rel_l1_thresh = rel_l1_thresh - self.previous_residual = None - self.previous_hidden_states = None - - def check(self, dit: FluxDiT, hidden_states, conditioning): - inp = hidden_states.clone() - temb_ = conditioning.clone() - modulated_inp, _, _, _, _ = dit.blocks[0].norm1_a(inp, emb=temb_) - if self.step == 0 or self.step == self.num_inference_steps - 1: - should_calc = True - self.accumulated_rel_l1_distance = 0 - else: - coefficients = [4.98651651e+02, -2.83781631e+02, 5.58554382e+01, -3.82021401e+00, 2.64230861e-01] - rescale_func = np.poly1d(coefficients) - self.accumulated_rel_l1_distance += rescale_func(((modulated_inp-self.previous_modulated_input).abs().mean() / self.previous_modulated_input.abs().mean()).cpu().item()) - if self.accumulated_rel_l1_distance < self.rel_l1_thresh: - should_calc = False - else: - should_calc = True - self.accumulated_rel_l1_distance = 0 - self.previous_modulated_input = modulated_inp - self.step += 1 - if self.step == self.num_inference_steps: - self.step = 0 - if should_calc: - self.previous_hidden_states = hidden_states.clone() - return not should_calc - - def store(self, hidden_states): - self.previous_residual = hidden_states - self.previous_hidden_states - self.previous_hidden_states = None - - def update(self, hidden_states): - hidden_states = hidden_states + self.previous_residual - return hidden_states - - -def model_fn_flux_image( - dit: FluxDiT, - controlnet=None, - step1x_connector=None, - latents=None, - timestep=None, - prompt_emb=None, - pooled_prompt_emb=None, - guidance=None, - text_ids=None, - image_ids=None, - kontext_latents=None, - kontext_image_ids=None, - controlnet_inputs=None, - controlnet_conditionings=None, - tiled=False, - tile_size=128, - tile_stride=64, - entity_prompt_emb=None, - entity_masks=None, - ipadapter_kwargs_list={}, - id_emb=None, - infinityou_guidance=None, - flex_condition=None, - flex_uncondition=None, - flex_control_stop_timestep=None, - step1x_llm_embedding=None, - step1x_mask=None, - step1x_reference_latents=None, - tea_cache: TeaCache = None, - progress_id=0, - num_inference_steps=1, - use_gradient_checkpointing=False, - use_gradient_checkpointing_offload=False, - **kwargs -): - if tiled: - def flux_forward_fn(hl, hr, wl, wr): - tiled_controlnet_conditionings = [f[:, :, hl: hr, wl: wr] for f in controlnet_conditionings] if controlnet_conditionings is not None else None - return model_fn_flux_image( - dit=dit, - controlnet=controlnet, - latents=latents[:, :, hl: hr, wl: wr], - timestep=timestep, - prompt_emb=prompt_emb, - pooled_prompt_emb=pooled_prompt_emb, - guidance=guidance, - text_ids=text_ids, - image_ids=None, - controlnet_inputs=controlnet_inputs, - controlnet_conditionings=tiled_controlnet_conditionings, - tiled=False, - **kwargs - ) - return FastTileWorker().tiled_forward( - flux_forward_fn, - latents, - tile_size=tile_size, - tile_stride=tile_stride, - tile_device=latents.device, - tile_dtype=latents.dtype - ) - - hidden_states = latents - - # ControlNet - if controlnet is not None and controlnet_conditionings is not None: - controlnet_extra_kwargs = { - "hidden_states": hidden_states, - "timestep": timestep, - "prompt_emb": prompt_emb, - "pooled_prompt_emb": pooled_prompt_emb, - "guidance": guidance, - "text_ids": text_ids, - "image_ids": image_ids, - "controlnet_inputs": controlnet_inputs, - "tiled": tiled, - "tile_size": tile_size, - "tile_stride": tile_stride, - "progress_id": progress_id, - "num_inference_steps": num_inference_steps, - } - if id_emb is not None: - controlnet_text_ids = torch.zeros(id_emb.shape[0], id_emb.shape[1], 3).to(device=hidden_states.device, dtype=hidden_states.dtype) - controlnet_extra_kwargs.update({"prompt_emb": id_emb, 'text_ids': controlnet_text_ids, 'guidance': infinityou_guidance}) - controlnet_res_stack, controlnet_single_res_stack = controlnet( - controlnet_conditionings, **controlnet_extra_kwargs - ) - - # Flex - if flex_condition is not None: - if timestep.tolist()[0] >= flex_control_stop_timestep: - hidden_states = torch.concat([hidden_states, flex_condition], dim=1) - else: - hidden_states = torch.concat([hidden_states, flex_uncondition], dim=1) - - # Step1x - if step1x_llm_embedding is not None: - prompt_emb, pooled_prompt_emb = step1x_connector(step1x_llm_embedding, timestep / 1000, step1x_mask) - text_ids = torch.zeros((1, prompt_emb.shape[1], 3), dtype=prompt_emb.dtype, device=prompt_emb.device) - - if image_ids is None: - image_ids = dit.prepare_image_ids(hidden_states) - - conditioning = dit.time_embedder(timestep, hidden_states.dtype) + dit.pooled_text_embedder(pooled_prompt_emb) - if dit.guidance_embedder is not None: - guidance = guidance * 1000 - conditioning = conditioning + dit.guidance_embedder(guidance, hidden_states.dtype) - - height, width = hidden_states.shape[-2:] - hidden_states = dit.patchify(hidden_states) - - # Kontext - if kontext_latents is not None: - image_ids = torch.concat([image_ids, kontext_image_ids], dim=-2) - hidden_states = torch.concat([hidden_states, kontext_latents], dim=1) - - # Step1x - if step1x_reference_latents is not None: - step1x_reference_image_ids = dit.prepare_image_ids(step1x_reference_latents) - step1x_reference_latents = dit.patchify(step1x_reference_latents) - image_ids = torch.concat([image_ids, step1x_reference_image_ids], dim=-2) - hidden_states = torch.concat([hidden_states, step1x_reference_latents], dim=1) - - hidden_states = dit.x_embedder(hidden_states) - - # EliGen - if entity_prompt_emb is not None and entity_masks is not None: - prompt_emb, image_rotary_emb, attention_mask = dit.process_entity_masks(hidden_states, prompt_emb, entity_prompt_emb, entity_masks, text_ids, image_ids, latents.shape[1]) - else: - prompt_emb = dit.context_embedder(prompt_emb) - image_rotary_emb = dit.pos_embedder(torch.cat((text_ids, image_ids), dim=1)) - attention_mask = None - - # TeaCache - if tea_cache is not None: - tea_cache_update = tea_cache.check(dit, hidden_states, conditioning) - else: - tea_cache_update = False - - if tea_cache_update: - hidden_states = tea_cache.update(hidden_states) - else: - # Joint Blocks - for block_id, block in enumerate(dit.blocks): - hidden_states, prompt_emb = gradient_checkpoint_forward( - block, - use_gradient_checkpointing, - use_gradient_checkpointing_offload, - hidden_states, - prompt_emb, - conditioning, - image_rotary_emb, - attention_mask, - ipadapter_kwargs_list=ipadapter_kwargs_list.get(block_id, None), - ) - # ControlNet - if controlnet is not None and controlnet_conditionings is not None and controlnet_res_stack is not None: - if kontext_latents is None: - hidden_states = hidden_states + controlnet_res_stack[block_id] - else: - hidden_states[:, :-kontext_latents.shape[1]] = hidden_states[:, :-kontext_latents.shape[1]] + controlnet_res_stack[block_id] - - # Single Blocks - hidden_states = torch.cat([prompt_emb, hidden_states], dim=1) - num_joint_blocks = len(dit.blocks) - for block_id, block in enumerate(dit.single_blocks): - hidden_states, prompt_emb = gradient_checkpoint_forward( - block, - use_gradient_checkpointing, - use_gradient_checkpointing_offload, - hidden_states, - prompt_emb, - conditioning, - image_rotary_emb, - attention_mask, - ipadapter_kwargs_list=ipadapter_kwargs_list.get(block_id + num_joint_blocks, None), - ) - # ControlNet - if controlnet is not None and controlnet_conditionings is not None and controlnet_single_res_stack is not None: - if kontext_latents is None: - hidden_states[:, prompt_emb.shape[1]:] = hidden_states[:, prompt_emb.shape[1]:] + controlnet_single_res_stack[block_id] - else: - hidden_states[:, prompt_emb.shape[1]:-kontext_latents.shape[1]] = hidden_states[:, prompt_emb.shape[1]:-kontext_latents.shape[1]] + controlnet_single_res_stack[block_id] - hidden_states = hidden_states[:, prompt_emb.shape[1]:] - - if tea_cache is not None: - tea_cache.store(hidden_states) - - hidden_states = dit.final_norm_out(hidden_states, conditioning) - hidden_states = dit.final_proj_out(hidden_states) - - # Step1x - if step1x_reference_latents is not None: - hidden_states = hidden_states[:, :hidden_states.shape[1] // 2] - - # Kontext - if kontext_latents is not None: - hidden_states = hidden_states[:, :-kontext_latents.shape[1]] - - hidden_states = dit.unpatchify(hidden_states, height, width) - - return hidden_states diff --git a/diffsynth/pipelines/hunyuan_image.py b/diffsynth/pipelines/hunyuan_image.py deleted file mode 100644 index 0c6f6d5..0000000 --- a/diffsynth/pipelines/hunyuan_image.py +++ /dev/null @@ -1,288 +0,0 @@ -from ..models.hunyuan_dit import HunyuanDiT -from ..models.hunyuan_dit_text_encoder import HunyuanDiTCLIPTextEncoder, HunyuanDiTT5TextEncoder -from ..models.sdxl_vae_encoder import SDXLVAEEncoder -from ..models.sdxl_vae_decoder import SDXLVAEDecoder -from ..models import ModelManager -from ..prompters import HunyuanDiTPrompter -from ..schedulers import EnhancedDDIMScheduler -from .base import BasePipeline -import torch -from tqdm import tqdm -import numpy as np - - - -class ImageSizeManager: - def __init__(self): - pass - - - def _to_tuple(self, x): - if isinstance(x, int): - return x, x - else: - return x - - - def get_fill_resize_and_crop(self, src, tgt): - th, tw = self._to_tuple(tgt) - h, w = self._to_tuple(src) - - tr = th / tw # base 分辨率 - r = h / w # 目标分辨率 - - # resize - if r > tr: - resize_height = th - resize_width = int(round(th / h * w)) - else: - resize_width = tw - resize_height = int(round(tw / w * h)) # 根据base分辨率,将目标分辨率resize下来 - - crop_top = int(round((th - resize_height) / 2.0)) - crop_left = int(round((tw - resize_width) / 2.0)) - - return (crop_top, crop_left), (crop_top + resize_height, crop_left + resize_width) - - - def get_meshgrid(self, start, *args): - if len(args) == 0: - # start is grid_size - num = self._to_tuple(start) - start = (0, 0) - stop = num - elif len(args) == 1: - # start is start, args[0] is stop, step is 1 - start = self._to_tuple(start) - stop = self._to_tuple(args[0]) - num = (stop[0] - start[0], stop[1] - start[1]) - elif len(args) == 2: - # start is start, args[0] is stop, args[1] is num - start = self._to_tuple(start) # 左上角 eg: 12,0 - stop = self._to_tuple(args[0]) # 右下角 eg: 20,32 - num = self._to_tuple(args[1]) # 目标大小 eg: 32,124 - else: - raise ValueError(f"len(args) should be 0, 1 or 2, but got {len(args)}") - - grid_h = np.linspace(start[0], stop[0], num[0], endpoint=False, dtype=np.float32) # 12-20 中间差值32份 0-32 中间差值124份 - grid_w = np.linspace(start[1], stop[1], num[1], endpoint=False, dtype=np.float32) - grid = np.meshgrid(grid_w, grid_h) # here w goes first - grid = np.stack(grid, axis=0) # [2, W, H] - return grid - - - def get_2d_rotary_pos_embed(self, embed_dim, start, *args, use_real=True): - grid = self.get_meshgrid(start, *args) # [2, H, w] - grid = grid.reshape([2, 1, *grid.shape[1:]]) # 返回一个采样矩阵 分辨率与目标分辨率一致 - pos_embed = self.get_2d_rotary_pos_embed_from_grid(embed_dim, grid, use_real=use_real) - return pos_embed - - - def get_2d_rotary_pos_embed_from_grid(self, embed_dim, grid, use_real=False): - assert embed_dim % 4 == 0 - - # use half of dimensions to encode grid_h - emb_h = self.get_1d_rotary_pos_embed(embed_dim // 2, grid[0].reshape(-1), use_real=use_real) # (H*W, D/4) - emb_w = self.get_1d_rotary_pos_embed(embed_dim // 2, grid[1].reshape(-1), use_real=use_real) # (H*W, D/4) - - if use_real: - cos = torch.cat([emb_h[0], emb_w[0]], dim=1) # (H*W, D/2) - sin = torch.cat([emb_h[1], emb_w[1]], dim=1) # (H*W, D/2) - return cos, sin - else: - emb = torch.cat([emb_h, emb_w], dim=1) # (H*W, D/2) - return emb - - - def get_1d_rotary_pos_embed(self, dim: int, pos, theta: float = 10000.0, use_real=False): - if isinstance(pos, int): - pos = np.arange(pos) - freqs = 1.0 / (theta ** (torch.arange(0, dim, 2)[: (dim // 2)].float() / dim)) # [D/2] - t = torch.from_numpy(pos).to(freqs.device) # type: ignore # [S] - freqs = torch.outer(t, freqs).float() # type: ignore # [S, D/2] - if use_real: - freqs_cos = freqs.cos().repeat_interleave(2, dim=1) # [S, D] - freqs_sin = freqs.sin().repeat_interleave(2, dim=1) # [S, D] - return freqs_cos, freqs_sin - else: - freqs_cis = torch.polar(torch.ones_like(freqs), freqs) # complex64 # [S, D/2] - return freqs_cis - - - def calc_rope(self, height, width): - patch_size = 2 - head_size = 88 - th = height // 8 // patch_size - tw = width // 8 // patch_size - base_size = 512 // 8 // patch_size - start, stop = self.get_fill_resize_and_crop((th, tw), base_size) - sub_args = [start, stop, (th, tw)] - rope = self.get_2d_rotary_pos_embed(head_size, *sub_args) - return rope - - - -class HunyuanDiTImagePipeline(BasePipeline): - - def __init__(self, device="cuda", torch_dtype=torch.float16): - super().__init__(device=device, torch_dtype=torch_dtype, height_division_factor=16, width_division_factor=16) - self.scheduler = EnhancedDDIMScheduler(prediction_type="v_prediction", beta_start=0.00085, beta_end=0.03) - self.prompter = HunyuanDiTPrompter() - self.image_size_manager = ImageSizeManager() - # models - self.text_encoder: HunyuanDiTCLIPTextEncoder = None - self.text_encoder_t5: HunyuanDiTT5TextEncoder = None - self.dit: HunyuanDiT = None - self.vae_decoder: SDXLVAEDecoder = None - self.vae_encoder: SDXLVAEEncoder = None - self.model_names = ['text_encoder', 'text_encoder_t5', 'dit', 'vae_decoder', 'vae_encoder'] - - - def denoising_model(self): - return self.dit - - - def fetch_models(self, model_manager: ModelManager, prompt_refiner_classes=[]): - # Main models - self.text_encoder = model_manager.fetch_model("hunyuan_dit_clip_text_encoder") - self.text_encoder_t5 = model_manager.fetch_model("hunyuan_dit_t5_text_encoder") - self.dit = model_manager.fetch_model("hunyuan_dit") - self.vae_decoder = model_manager.fetch_model("sdxl_vae_decoder") - self.vae_encoder = model_manager.fetch_model("sdxl_vae_encoder") - self.prompter.fetch_models(self.text_encoder, self.text_encoder_t5) - self.prompter.load_prompt_refiners(model_manager, prompt_refiner_classes) - - - @staticmethod - def from_model_manager(model_manager: ModelManager, prompt_refiner_classes=[], device=None): - pipe = HunyuanDiTImagePipeline( - device=model_manager.device if device is None else device, - torch_dtype=model_manager.torch_dtype, - ) - pipe.fetch_models(model_manager, prompt_refiner_classes) - return pipe - - - def encode_image(self, image, tiled=False, tile_size=64, tile_stride=32): - latents = self.vae_encoder(image, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - return latents - - - def decode_image(self, latent, tiled=False, tile_size=64, tile_stride=32): - image = self.vae_decoder(latent.to(self.device), tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - image = self.vae_output_to_image(image) - return image - - - def encode_prompt(self, prompt, clip_skip=1, clip_skip_2=1, positive=True): - text_emb, text_emb_mask, text_emb_t5, text_emb_mask_t5 = self.prompter.encode_prompt( - prompt, - clip_skip=clip_skip, - clip_skip_2=clip_skip_2, - positive=positive, - device=self.device - ) - return { - "text_emb": text_emb, - "text_emb_mask": text_emb_mask, - "text_emb_t5": text_emb_t5, - "text_emb_mask_t5": text_emb_mask_t5 - } - - - def prepare_extra_input(self, latents=None, tiled=False, tile_size=64, tile_stride=32): - batch_size, height, width = latents.shape[0], latents.shape[2] * 8, latents.shape[3] * 8 - if tiled: - height, width = tile_size * 16, tile_size * 16 - image_meta_size = torch.as_tensor([width, height, width, height, 0, 0]).to(device=self.device) - freqs_cis_img = self.image_size_manager.calc_rope(height, width) - image_meta_size = torch.stack([image_meta_size] * batch_size) - return { - "size_emb": image_meta_size, - "freq_cis_img": (freqs_cis_img[0].to(dtype=self.torch_dtype, device=self.device), freqs_cis_img[1].to(dtype=self.torch_dtype, device=self.device)), - "tiled": tiled, - "tile_size": tile_size, - "tile_stride": tile_stride - } - - - @torch.no_grad() - def __call__( - self, - prompt, - local_prompts=[], - masks=[], - mask_scales=[], - negative_prompt="", - cfg_scale=7.5, - clip_skip=1, - clip_skip_2=1, - input_image=None, - reference_strengths=[0.4], - denoising_strength=1.0, - height=1024, - width=1024, - num_inference_steps=20, - tiled=False, - tile_size=64, - tile_stride=32, - seed=None, - progress_bar_cmd=tqdm, - progress_bar_st=None, - ): - height, width = self.check_resize_height_width(height, width) - - # Prepare scheduler - self.scheduler.set_timesteps(num_inference_steps, denoising_strength) - - # Prepare latent tensors - noise = self.generate_noise((1, 4, height//8, width//8), seed=seed, device=self.device, dtype=self.torch_dtype) - if input_image is not None: - self.load_models_to_device(['vae_encoder']) - image = self.preprocess_image(input_image).to(device=self.device, dtype=torch.float32) - latents = self.vae_encoder(image, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride).to(self.torch_dtype) - latents = self.scheduler.add_noise(latents, noise, timestep=self.scheduler.timesteps[0]) - else: - latents = noise.clone() - - # Encode prompts - self.load_models_to_device(['text_encoder', 'text_encoder_t5']) - prompt_emb_posi = self.encode_prompt(prompt, clip_skip=clip_skip, clip_skip_2=clip_skip_2, positive=True) - if cfg_scale != 1.0: - prompt_emb_nega = self.encode_prompt(negative_prompt, clip_skip=clip_skip, clip_skip_2=clip_skip_2, positive=True) - prompt_emb_locals = [self.encode_prompt(prompt_local, clip_skip=clip_skip, clip_skip_2=clip_skip_2, positive=True) for prompt_local in local_prompts] - - # Prepare positional id - extra_input = self.prepare_extra_input(latents, tiled, tile_size) - - # Denoise - self.load_models_to_device(['dit']) - for progress_id, timestep in enumerate(progress_bar_cmd(self.scheduler.timesteps)): - timestep = torch.tensor([timestep]).to(dtype=self.torch_dtype, device=self.device) - - # Positive side - inference_callback = lambda prompt_emb_posi: self.dit(latents, timestep=timestep, **prompt_emb_posi, **extra_input) - noise_pred_posi = self.control_noise_via_local_prompts(prompt_emb_posi, prompt_emb_locals, masks, mask_scales, inference_callback) - - if cfg_scale != 1.0: - # Negative side - noise_pred_nega = self.dit( - latents, timestep=timestep, **prompt_emb_nega, **extra_input, - ) - # Classifier-free guidance - noise_pred = noise_pred_nega + cfg_scale * (noise_pred_posi - noise_pred_nega) - else: - noise_pred = noise_pred_posi - - latents = self.scheduler.step(noise_pred, self.scheduler.timesteps[progress_id], latents) - - if progress_bar_st is not None: - progress_bar_st.progress(progress_id / len(self.scheduler.timesteps)) - - # Decode image - self.load_models_to_device(['vae_decoder']) - image = self.decode_image(latents.to(torch.float32), tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - - # Offload all models - self.load_models_to_device([]) - return image diff --git a/diffsynth/pipelines/hunyuan_video.py b/diffsynth/pipelines/hunyuan_video.py deleted file mode 100644 index d8a0411..0000000 --- a/diffsynth/pipelines/hunyuan_video.py +++ /dev/null @@ -1,395 +0,0 @@ -from ..models import ModelManager, SD3TextEncoder1, HunyuanVideoVAEDecoder, HunyuanVideoVAEEncoder -from ..models.hunyuan_video_dit import HunyuanVideoDiT -from ..models.hunyuan_video_text_encoder import HunyuanVideoLLMEncoder -from ..schedulers.flow_match import FlowMatchScheduler -from .base import BasePipeline -from ..prompters import HunyuanVideoPrompter -import torch -import torchvision.transforms as transforms -from einops import rearrange -import numpy as np -from PIL import Image -from tqdm import tqdm - - -class HunyuanVideoPipeline(BasePipeline): - - def __init__(self, device="cuda", torch_dtype=torch.float16): - super().__init__(device=device, torch_dtype=torch_dtype) - self.scheduler = FlowMatchScheduler(shift=7.0, sigma_min=0.0, extra_one_step=True) - self.prompter = HunyuanVideoPrompter() - self.text_encoder_1: SD3TextEncoder1 = None - self.text_encoder_2: HunyuanVideoLLMEncoder = None - self.dit: HunyuanVideoDiT = None - self.vae_decoder: HunyuanVideoVAEDecoder = None - self.vae_encoder: HunyuanVideoVAEEncoder = None - self.model_names = ['text_encoder_1', 'text_encoder_2', 'dit', 'vae_decoder', 'vae_encoder'] - self.vram_management = False - - - def enable_vram_management(self): - self.vram_management = True - self.enable_cpu_offload() - self.text_encoder_2.enable_auto_offload(dtype=self.torch_dtype, device=self.device) - self.dit.enable_auto_offload(dtype=self.torch_dtype, device=self.device) - - - def fetch_models(self, model_manager: ModelManager): - self.text_encoder_1 = model_manager.fetch_model("sd3_text_encoder_1") - self.text_encoder_2 = model_manager.fetch_model("hunyuan_video_text_encoder_2") - self.dit = model_manager.fetch_model("hunyuan_video_dit") - self.vae_decoder = model_manager.fetch_model("hunyuan_video_vae_decoder") - self.vae_encoder = model_manager.fetch_model("hunyuan_video_vae_encoder") - self.prompter.fetch_models(self.text_encoder_1, self.text_encoder_2) - - - @staticmethod - def from_model_manager(model_manager: ModelManager, torch_dtype=None, device=None, enable_vram_management=True): - if device is None: device = model_manager.device - if torch_dtype is None: torch_dtype = model_manager.torch_dtype - pipe = HunyuanVideoPipeline(device=device, torch_dtype=torch_dtype) - pipe.fetch_models(model_manager) - if enable_vram_management: - pipe.enable_vram_management() - return pipe - - def generate_crop_size_list(self, base_size=256, patch_size=32, max_ratio=4.0): - num_patches = round((base_size / patch_size)**2) - assert max_ratio >= 1.0 - crop_size_list = [] - wp, hp = num_patches, 1 - while wp > 0: - if max(wp, hp) / min(wp, hp) <= max_ratio: - crop_size_list.append((wp * patch_size, hp * patch_size)) - if (hp + 1) * wp <= num_patches: - hp += 1 - else: - wp -= 1 - return crop_size_list - - - def get_closest_ratio(self, height: float, width: float, ratios: list, buckets: list): - aspect_ratio = float(height) / float(width) - closest_ratio_id = np.abs(ratios - aspect_ratio).argmin() - closest_ratio = min(ratios, key=lambda ratio: abs(float(ratio) - aspect_ratio)) - return buckets[closest_ratio_id], float(closest_ratio) - - - def prepare_vae_images_inputs(self, semantic_images, i2v_resolution="720p"): - if i2v_resolution == "720p": - bucket_hw_base_size = 960 - elif i2v_resolution == "540p": - bucket_hw_base_size = 720 - elif i2v_resolution == "360p": - bucket_hw_base_size = 480 - else: - raise ValueError(f"i2v_resolution: {i2v_resolution} must be in [360p, 540p, 720p]") - origin_size = semantic_images[0].size - - crop_size_list = self.generate_crop_size_list(bucket_hw_base_size, 32) - aspect_ratios = np.array([round(float(h) / float(w), 5) for h, w in crop_size_list]) - closest_size, closest_ratio = self.get_closest_ratio(origin_size[1], origin_size[0], aspect_ratios, crop_size_list) - ref_image_transform = transforms.Compose([ - transforms.Resize(closest_size), - transforms.CenterCrop(closest_size), - transforms.ToTensor(), - transforms.Normalize([0.5], [0.5]) - ]) - - semantic_image_pixel_values = [ref_image_transform(semantic_image) for semantic_image in semantic_images] - semantic_image_pixel_values = torch.cat(semantic_image_pixel_values).unsqueeze(0).unsqueeze(2).to(self.device) - target_height, target_width = closest_size - return semantic_image_pixel_values, target_height, target_width - - - def encode_prompt(self, prompt, positive=True, clip_sequence_length=77, llm_sequence_length=256, input_images=None): - prompt_emb, pooled_prompt_emb, text_mask = self.prompter.encode_prompt( - prompt, device=self.device, positive=positive, clip_sequence_length=clip_sequence_length, llm_sequence_length=llm_sequence_length, images=input_images - ) - return {"prompt_emb": prompt_emb, "pooled_prompt_emb": pooled_prompt_emb, "text_mask": text_mask} - - - def prepare_extra_input(self, latents=None, guidance=1.0): - freqs_cos, freqs_sin = self.dit.prepare_freqs(latents) - guidance = torch.Tensor([guidance] * latents.shape[0]).to(device=latents.device, dtype=latents.dtype) - return {"freqs_cos": freqs_cos, "freqs_sin": freqs_sin, "guidance": guidance} - - - def tensor2video(self, frames): - frames = rearrange(frames, "C T H W -> T H W C") - frames = ((frames.float() + 1) * 127.5).clip(0, 255).cpu().numpy().astype(np.uint8) - frames = [Image.fromarray(frame) for frame in frames] - return frames - - - def encode_video(self, frames, tile_size=(17, 30, 30), tile_stride=(12, 20, 20)): - tile_size = ((tile_size[0] - 1) * 4 + 1, tile_size[1] * 8, tile_size[2] * 8) - tile_stride = (tile_stride[0] * 4, tile_stride[1] * 8, tile_stride[2] * 8) - latents = self.vae_encoder.encode_video(frames, tile_size=tile_size, tile_stride=tile_stride) - return latents - - - @torch.no_grad() - def __call__( - self, - prompt, - negative_prompt="", - input_video=None, - input_images=None, - i2v_resolution="720p", - i2v_stability=True, - denoising_strength=1.0, - seed=None, - rand_device=None, - height=720, - width=1280, - num_frames=129, - embedded_guidance=6.0, - cfg_scale=1.0, - num_inference_steps=30, - tea_cache_l1_thresh=None, - tile_size=(17, 30, 30), - tile_stride=(12, 20, 20), - step_processor=None, - progress_bar_cmd=lambda x: x, - progress_bar_st=None, - ): - # Tiler parameters - tiler_kwargs = {"tile_size": tile_size, "tile_stride": tile_stride} - - # Scheduler - self.scheduler.set_timesteps(num_inference_steps, denoising_strength) - - # encoder input images - if input_images is not None: - self.load_models_to_device(['vae_encoder']) - image_pixel_values, height, width = self.prepare_vae_images_inputs(input_images, i2v_resolution=i2v_resolution) - with torch.autocast(device_type=self.device, dtype=torch.float16, enabled=True): - image_latents = self.vae_encoder(image_pixel_values) - - # Initialize noise - rand_device = self.device if rand_device is None else rand_device - noise = self.generate_noise((1, 16, (num_frames - 1) // 4 + 1, height//8, width//8), seed=seed, device=rand_device, dtype=self.torch_dtype).to(self.device) - if input_video is not None: - self.load_models_to_device(['vae_encoder']) - input_video = self.preprocess_images(input_video) - input_video = torch.stack(input_video, dim=2) - latents = self.encode_video(input_video, **tiler_kwargs).to(dtype=self.torch_dtype, device=self.device) - latents = self.scheduler.add_noise(latents, noise, timestep=self.scheduler.timesteps[0]) - elif input_images is not None and i2v_stability: - noise = self.generate_noise((1, 16, (num_frames - 1) // 4 + 1, height//8, width//8), seed=seed, device=rand_device, dtype=image_latents.dtype).to(self.device) - t = torch.tensor([0.999]).to(device=self.device) - latents = noise * t + image_latents.repeat(1, 1, (num_frames - 1) // 4 + 1, 1, 1) * (1 - t) - latents = latents.to(dtype=image_latents.dtype) - else: - latents = noise - - # Encode prompts - # current mllm does not support vram_management - self.load_models_to_device(["text_encoder_1"] if self.vram_management and input_images is None else ["text_encoder_1", "text_encoder_2"]) - prompt_emb_posi = self.encode_prompt(prompt, positive=True, input_images=input_images) - if cfg_scale != 1.0: - prompt_emb_nega = self.encode_prompt(negative_prompt, positive=False) - - # Extra input - extra_input = self.prepare_extra_input(latents, guidance=embedded_guidance) - - # TeaCache - tea_cache_kwargs = {"tea_cache": TeaCache(num_inference_steps, rel_l1_thresh=tea_cache_l1_thresh) if tea_cache_l1_thresh is not None else None} - - # Denoise - self.load_models_to_device([] if self.vram_management else ["dit"]) - for progress_id, timestep in enumerate(progress_bar_cmd(self.scheduler.timesteps)): - timestep = timestep.unsqueeze(0).to(self.device) - print(f"Step {progress_id + 1} / {len(self.scheduler.timesteps)}") - - forward_func = lets_dance_hunyuan_video - if input_images is not None: - latents = torch.concat([image_latents, latents[:, :, 1:, :, :]], dim=2) - forward_func = lets_dance_hunyuan_video_i2v - - # Inference - with torch.autocast(device_type=self.device, dtype=self.torch_dtype): - noise_pred_posi = forward_func(self.dit, latents, timestep, **prompt_emb_posi, **extra_input, **tea_cache_kwargs) - if cfg_scale != 1.0: - noise_pred_nega = forward_func(self.dit, latents, timestep, **prompt_emb_nega, **extra_input) - noise_pred = noise_pred_nega + cfg_scale * (noise_pred_posi - noise_pred_nega) - else: - noise_pred = noise_pred_posi - - # (Experimental feature, may be removed in the future) - if step_processor is not None: - self.load_models_to_device(['vae_decoder']) - rendered_frames = self.scheduler.step(noise_pred, self.scheduler.timesteps[progress_id], latents, to_final=True) - rendered_frames = self.vae_decoder.decode_video(rendered_frames, **tiler_kwargs) - rendered_frames = self.tensor2video(rendered_frames[0]) - rendered_frames = step_processor(rendered_frames, original_frames=input_video) - self.load_models_to_device(['vae_encoder']) - rendered_frames = self.preprocess_images(rendered_frames) - rendered_frames = torch.stack(rendered_frames, dim=2) - target_latents = self.encode_video(rendered_frames).to(dtype=self.torch_dtype, device=self.device) - noise_pred = self.scheduler.return_to_timestep(self.scheduler.timesteps[progress_id], latents, target_latents) - self.load_models_to_device([] if self.vram_management else ["dit"]) - - # Scheduler - if input_images is not None: - latents = self.scheduler.step(noise_pred[:, :, 1:, :, :], self.scheduler.timesteps[progress_id], latents[:, :, 1:, :, :]) - latents = torch.concat([image_latents, latents], dim=2) - else: - latents = self.scheduler.step(noise_pred, self.scheduler.timesteps[progress_id], latents) - - # Decode - self.load_models_to_device(['vae_decoder']) - frames = self.vae_decoder.decode_video(latents, **tiler_kwargs) - self.load_models_to_device([]) - frames = self.tensor2video(frames[0]) - - return frames - - - -class TeaCache: - def __init__(self, num_inference_steps, rel_l1_thresh): - self.num_inference_steps = num_inference_steps - self.step = 0 - self.accumulated_rel_l1_distance = 0 - self.previous_modulated_input = None - self.rel_l1_thresh = rel_l1_thresh - self.previous_residual = None - self.previous_hidden_states = None - - def check(self, dit: HunyuanVideoDiT, img, vec): - img_ = img.clone() - vec_ = vec.clone() - img_mod1_shift, img_mod1_scale, _, _, _, _ = dit.double_blocks[0].component_a.mod(vec_).chunk(6, dim=-1) - normed_inp = dit.double_blocks[0].component_a.norm1(img_) - modulated_inp = normed_inp * (1 + img_mod1_scale.unsqueeze(1)) + img_mod1_shift.unsqueeze(1) - if self.step == 0 or self.step == self.num_inference_steps - 1: - should_calc = True - self.accumulated_rel_l1_distance = 0 - else: - coefficients = [7.33226126e+02, -4.01131952e+02, 6.75869174e+01, -3.14987800e+00, 9.61237896e-02] - rescale_func = np.poly1d(coefficients) - self.accumulated_rel_l1_distance += rescale_func(((modulated_inp-self.previous_modulated_input).abs().mean() / self.previous_modulated_input.abs().mean()).cpu().item()) - if self.accumulated_rel_l1_distance < self.rel_l1_thresh: - should_calc = False - else: - should_calc = True - self.accumulated_rel_l1_distance = 0 - self.previous_modulated_input = modulated_inp - self.step += 1 - if self.step == self.num_inference_steps: - self.step = 0 - if should_calc: - self.previous_hidden_states = img.clone() - return not should_calc - - def store(self, hidden_states): - self.previous_residual = hidden_states - self.previous_hidden_states - self.previous_hidden_states = None - - def update(self, hidden_states): - hidden_states = hidden_states + self.previous_residual - return hidden_states - - - -def lets_dance_hunyuan_video( - dit: HunyuanVideoDiT, - x: torch.Tensor, - t: torch.Tensor, - prompt_emb: torch.Tensor = None, - text_mask: torch.Tensor = None, - pooled_prompt_emb: torch.Tensor = None, - freqs_cos: torch.Tensor = None, - freqs_sin: torch.Tensor = None, - guidance: torch.Tensor = None, - tea_cache: TeaCache = None, - **kwargs -): - B, C, T, H, W = x.shape - - vec = dit.time_in(t, dtype=torch.float32) + dit.vector_in(pooled_prompt_emb) + dit.guidance_in(guidance * 1000, dtype=torch.float32) - img = dit.img_in(x) - txt = dit.txt_in(prompt_emb, t, text_mask) - - # TeaCache - if tea_cache is not None: - tea_cache_update = tea_cache.check(dit, img, vec) - else: - tea_cache_update = False - - if tea_cache_update: - print("TeaCache skip forward.") - img = tea_cache.update(img) - else: - split_token = int(text_mask.sum(dim=1)) - txt_len = int(txt.shape[1]) - for block in tqdm(dit.double_blocks, desc="Double stream blocks"): - img, txt = block(img, txt, vec, (freqs_cos, freqs_sin), split_token=split_token) - - x = torch.concat([img, txt], dim=1) - for block in tqdm(dit.single_blocks, desc="Single stream blocks"): - x = block(x, vec, (freqs_cos, freqs_sin), txt_len=txt_len, split_token=split_token) - img = x[:, :-txt_len] - - if tea_cache is not None: - tea_cache.store(img) - img = dit.final_layer(img, vec) - img = dit.unpatchify(img, T=T//1, H=H//2, W=W//2) - return img - - -def lets_dance_hunyuan_video_i2v( - dit: HunyuanVideoDiT, - x: torch.Tensor, - t: torch.Tensor, - prompt_emb: torch.Tensor = None, - text_mask: torch.Tensor = None, - pooled_prompt_emb: torch.Tensor = None, - freqs_cos: torch.Tensor = None, - freqs_sin: torch.Tensor = None, - guidance: torch.Tensor = None, - tea_cache: TeaCache = None, - **kwargs -): - B, C, T, H, W = x.shape - # Uncomment below to keep same as official implementation - # guidance = guidance.to(dtype=torch.float32).to(torch.bfloat16) - vec = dit.time_in(t, dtype=torch.bfloat16) - vec_2 = dit.vector_in(pooled_prompt_emb) - vec = vec + vec_2 - vec = vec + dit.guidance_in(guidance * 1000., dtype=torch.bfloat16) - - token_replace_vec = dit.time_in(torch.zeros_like(t), dtype=torch.bfloat16) - tr_token = (H // 2) * (W // 2) - token_replace_vec = token_replace_vec + vec_2 - - img = dit.img_in(x) - txt = dit.txt_in(prompt_emb, t, text_mask) - - # TeaCache - if tea_cache is not None: - tea_cache_update = tea_cache.check(dit, img, vec) - else: - tea_cache_update = False - - if tea_cache_update: - print("TeaCache skip forward.") - img = tea_cache.update(img) - else: - split_token = int(text_mask.sum(dim=1)) - txt_len = int(txt.shape[1]) - for block in tqdm(dit.double_blocks, desc="Double stream blocks"): - img, txt = block(img, txt, vec, (freqs_cos, freqs_sin), token_replace_vec, tr_token, split_token) - - x = torch.concat([img, txt], dim=1) - for block in tqdm(dit.single_blocks, desc="Single stream blocks"): - x = block(x, vec, (freqs_cos, freqs_sin), txt_len, token_replace_vec, tr_token, split_token) - img = x[:, :-txt_len] - - if tea_cache is not None: - tea_cache.store(img) - img = dit.final_layer(img, vec) - img = dit.unpatchify(img, T=T//1, H=H//2, W=W//2) - return img diff --git a/diffsynth/pipelines/omnigen_image.py b/diffsynth/pipelines/omnigen_image.py deleted file mode 100644 index ddb2ae6..0000000 --- a/diffsynth/pipelines/omnigen_image.py +++ /dev/null @@ -1,289 +0,0 @@ -from ..models.omnigen import OmniGenTransformer -from ..models.sdxl_vae_encoder import SDXLVAEEncoder -from ..models.sdxl_vae_decoder import SDXLVAEDecoder -from ..models.model_manager import ModelManager -from ..prompters.omnigen_prompter import OmniGenPrompter -from ..schedulers import FlowMatchScheduler -from .base import BasePipeline -from typing import Optional, Dict, Any, Tuple, List -from transformers.cache_utils import DynamicCache -import torch, os -from tqdm import tqdm - - - -class OmniGenCache(DynamicCache): - def __init__(self, - num_tokens_for_img: int, offload_kv_cache: bool=False) -> None: - if not torch.cuda.is_available(): - print("No available GPU, offload_kv_cache will be set to False, which will result in large memory usage and time cost when input multiple images!!!") - offload_kv_cache = False - raise RuntimeError("OffloadedCache can only be used with a GPU") - super().__init__() - self.original_device = [] - self.prefetch_stream = torch.cuda.Stream() - self.num_tokens_for_img = num_tokens_for_img - self.offload_kv_cache = offload_kv_cache - - def prefetch_layer(self, layer_idx: int): - "Starts prefetching the next layer cache" - if layer_idx < len(self): - with torch.cuda.stream(self.prefetch_stream): - # Prefetch next layer tensors to GPU - device = self.original_device[layer_idx] - self.key_cache[layer_idx] = self.key_cache[layer_idx].to(device, non_blocking=True) - self.value_cache[layer_idx] = self.value_cache[layer_idx].to(device, non_blocking=True) - - - def evict_previous_layer(self, layer_idx: int): - "Moves the previous layer cache to the CPU" - if len(self) > 2: - # We do it on the default stream so it occurs after all earlier computations on these tensors are done - if layer_idx == 0: - prev_layer_idx = -1 - else: - prev_layer_idx = (layer_idx - 1) % len(self) - self.key_cache[prev_layer_idx] = self.key_cache[prev_layer_idx].to("cpu", non_blocking=True) - self.value_cache[prev_layer_idx] = self.value_cache[prev_layer_idx].to("cpu", non_blocking=True) - - - def __getitem__(self, layer_idx: int) -> List[Tuple[torch.Tensor]]: - "Gets the cache for this layer to the device. Prefetches the next and evicts the previous layer." - if layer_idx < len(self): - if self.offload_kv_cache: - # Evict the previous layer if necessary - torch.cuda.current_stream().synchronize() - self.evict_previous_layer(layer_idx) - # Load current layer cache to its original device if not already there - original_device = self.original_device[layer_idx] - # self.prefetch_stream.synchronize(original_device) - torch.cuda.synchronize(self.prefetch_stream) - key_tensor = self.key_cache[layer_idx] - value_tensor = self.value_cache[layer_idx] - - # Prefetch the next layer - self.prefetch_layer((layer_idx + 1) % len(self)) - else: - key_tensor = self.key_cache[layer_idx] - value_tensor = self.value_cache[layer_idx] - return (key_tensor, value_tensor) - else: - raise KeyError(f"Cache only has {len(self)} layers, attempted to access layer with index {layer_idx}") - - - def update( - self, - key_states: torch.Tensor, - value_states: torch.Tensor, - layer_idx: int, - cache_kwargs: Optional[Dict[str, Any]] = None, - ) -> Tuple[torch.Tensor, torch.Tensor]: - """ - Updates the cache with the new `key_states` and `value_states` for the layer `layer_idx`. - Parameters: - key_states (`torch.Tensor`): - The new key states to cache. - value_states (`torch.Tensor`): - The new value states to cache. - layer_idx (`int`): - The index of the layer to cache the states for. - cache_kwargs (`Dict[str, Any]`, `optional`): - Additional arguments for the cache subclass. No additional arguments are used in `OffloadedCache`. - Return: - A tuple containing the updated key and value states. - """ - # Update the cache - if len(self.key_cache) < layer_idx: - raise ValueError("OffloadedCache does not support model usage where layers are skipped. Use DynamicCache.") - elif len(self.key_cache) == layer_idx: - # only cache the states for condition tokens - key_states = key_states[..., :-(self.num_tokens_for_img+1), :] - value_states = value_states[..., :-(self.num_tokens_for_img+1), :] - - # Update the number of seen tokens - if layer_idx == 0: - self._seen_tokens += key_states.shape[-2] - - self.key_cache.append(key_states) - self.value_cache.append(value_states) - self.original_device.append(key_states.device) - if self.offload_kv_cache: - self.evict_previous_layer(layer_idx) - return self.key_cache[layer_idx], self.value_cache[layer_idx] - else: - # only cache the states for condition tokens - key_tensor, value_tensor = self[layer_idx] - k = torch.cat([key_tensor, key_states], dim=-2) - v = torch.cat([value_tensor, value_states], dim=-2) - return k, v - - - -class OmnigenImagePipeline(BasePipeline): - - def __init__(self, device="cuda", torch_dtype=torch.float16): - super().__init__(device=device, torch_dtype=torch_dtype) - self.scheduler = FlowMatchScheduler(num_train_timesteps=1, shift=1, inverse_timesteps=True, sigma_min=0, sigma_max=1) - # models - self.vae_decoder: SDXLVAEDecoder = None - self.vae_encoder: SDXLVAEEncoder = None - self.transformer: OmniGenTransformer = None - self.prompter: OmniGenPrompter = None - self.model_names = ['transformer', 'vae_decoder', 'vae_encoder'] - - - def denoising_model(self): - return self.transformer - - - def fetch_models(self, model_manager: ModelManager, prompt_refiner_classes=[]): - # Main models - self.transformer, model_path = model_manager.fetch_model("omnigen_transformer", require_model_path=True) - self.vae_decoder = model_manager.fetch_model("sdxl_vae_decoder") - self.vae_encoder = model_manager.fetch_model("sdxl_vae_encoder") - self.prompter = OmniGenPrompter.from_pretrained(os.path.dirname(model_path)) - - - @staticmethod - def from_model_manager(model_manager: ModelManager, prompt_refiner_classes=[], device=None): - pipe = OmnigenImagePipeline( - device=model_manager.device if device is None else device, - torch_dtype=model_manager.torch_dtype, - ) - pipe.fetch_models(model_manager, prompt_refiner_classes=[]) - return pipe - - - def encode_image(self, image, tiled=False, tile_size=64, tile_stride=32): - latents = self.vae_encoder(image, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - return latents - - - def encode_images(self, images, tiled=False, tile_size=64, tile_stride=32): - latents = [self.encode_image(image.to(device=self.device), tiled, tile_size, tile_stride).to(self.torch_dtype) for image in images] - return latents - - - def decode_image(self, latent, tiled=False, tile_size=64, tile_stride=32): - image = self.vae_decoder(latent.to(self.device), tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - image = self.vae_output_to_image(image) - return image - - - def encode_prompt(self, prompt, clip_skip=1, positive=True): - prompt_emb = self.prompter.encode_prompt(prompt, clip_skip=clip_skip, device=self.device, positive=positive) - return {"encoder_hidden_states": prompt_emb} - - - def prepare_extra_input(self, latents=None): - return {} - - - def crop_position_ids_for_cache(self, position_ids, num_tokens_for_img): - if isinstance(position_ids, list): - for i in range(len(position_ids)): - position_ids[i] = position_ids[i][:, -(num_tokens_for_img+1):] - else: - position_ids = position_ids[:, -(num_tokens_for_img+1):] - return position_ids - - - def crop_attention_mask_for_cache(self, attention_mask, num_tokens_for_img): - if isinstance(attention_mask, list): - return [x[..., -(num_tokens_for_img+1):, :] for x in attention_mask] - return attention_mask[..., -(num_tokens_for_img+1):, :] - - - @torch.no_grad() - def __call__( - self, - prompt, - reference_images=[], - cfg_scale=2.0, - image_cfg_scale=2.0, - use_kv_cache=True, - offload_kv_cache=True, - input_image=None, - denoising_strength=1.0, - height=1024, - width=1024, - num_inference_steps=20, - tiled=False, - tile_size=64, - tile_stride=32, - seed=None, - progress_bar_cmd=tqdm, - progress_bar_st=None, - ): - height, width = self.check_resize_height_width(height, width) - - # Tiler parameters - tiler_kwargs = {"tiled": tiled, "tile_size": tile_size, "tile_stride": tile_stride} - - # Prepare scheduler - self.scheduler.set_timesteps(num_inference_steps, denoising_strength) - - # Prepare latent tensors - if input_image is not None: - self.load_models_to_device(['vae_encoder']) - image = self.preprocess_image(input_image).to(device=self.device, dtype=self.torch_dtype) - latents = self.encode_image(image, **tiler_kwargs) - noise = self.generate_noise((1, 4, height//8, width//8), seed=seed, device=self.device, dtype=self.torch_dtype) - latents = self.scheduler.add_noise(latents, noise, timestep=self.scheduler.timesteps[0]) - else: - latents = self.generate_noise((1, 4, height//8, width//8), seed=seed, device=self.device, dtype=self.torch_dtype) - latents = latents.repeat(3, 1, 1, 1) - - # Encode prompts - input_data = self.prompter(prompt, reference_images, height=height, width=width, use_img_cfg=True, separate_cfg_input=True, use_input_image_size_as_output=False) - - # Encode images - reference_latents = [self.encode_images(images, **tiler_kwargs) for images in input_data['input_pixel_values']] - - # Pack all parameters - model_kwargs = dict(input_ids=[input_ids.to(self.device) for input_ids in input_data['input_ids']], - input_img_latents=reference_latents, - input_image_sizes=input_data['input_image_sizes'], - attention_mask=[attention_mask.to(self.device) for attention_mask in input_data["attention_mask"]], - position_ids=[position_ids.to(self.device) for position_ids in input_data["position_ids"]], - cfg_scale=cfg_scale, - img_cfg_scale=image_cfg_scale, - use_img_cfg=True, - use_kv_cache=use_kv_cache, - offload_model=False, - ) - - # Denoise - self.load_models_to_device(['transformer']) - cache = [OmniGenCache(latents.size(-1)*latents.size(-2) // 4, offload_kv_cache) for _ in range(len(model_kwargs['input_ids']))] if use_kv_cache else None - for progress_id, timestep in enumerate(progress_bar_cmd(self.scheduler.timesteps)): - timestep = timestep.unsqueeze(0).repeat(latents.shape[0]).to(self.device) - - # Forward - noise_pred, cache = self.transformer.forward_with_separate_cfg(latents, timestep, past_key_values=cache, **model_kwargs) - - # Scheduler - latents = self.scheduler.step(noise_pred, self.scheduler.timesteps[progress_id], latents) - - # Update KV cache - if progress_id == 0 and use_kv_cache: - num_tokens_for_img = latents.size(-1)*latents.size(-2) // 4 - if isinstance(cache, list): - model_kwargs['input_ids'] = [None] * len(cache) - else: - model_kwargs['input_ids'] = None - model_kwargs['position_ids'] = self.crop_position_ids_for_cache(model_kwargs['position_ids'], num_tokens_for_img) - model_kwargs['attention_mask'] = self.crop_attention_mask_for_cache(model_kwargs['attention_mask'], num_tokens_for_img) - - # UI - if progress_bar_st is not None: - progress_bar_st.progress(progress_id / len(self.scheduler.timesteps)) - - # Decode image - del cache - self.load_models_to_device(['vae_decoder']) - image = self.decode_image(latents, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - - # offload all models - self.load_models_to_device([]) - return image diff --git a/diffsynth/pipelines/pipeline_runner.py b/diffsynth/pipelines/pipeline_runner.py deleted file mode 100644 index 1b842f9..0000000 --- a/diffsynth/pipelines/pipeline_runner.py +++ /dev/null @@ -1,105 +0,0 @@ -import os, torch, json -from .sd_video import ModelManager, SDVideoPipeline, ControlNetConfigUnit -from ..processors.sequencial_processor import SequencialProcessor -from ..data import VideoData, save_frames, save_video - - - -class SDVideoPipelineRunner: - def __init__(self, in_streamlit=False): - self.in_streamlit = in_streamlit - - - def load_pipeline(self, model_list, textual_inversion_folder, device, lora_alphas, controlnet_units): - # Load models - model_manager = ModelManager(torch_dtype=torch.float16, device=device) - model_manager.load_models(model_list) - pipe = SDVideoPipeline.from_model_manager( - model_manager, - [ - ControlNetConfigUnit( - processor_id=unit["processor_id"], - model_path=unit["model_path"], - scale=unit["scale"] - ) for unit in controlnet_units - ] - ) - textual_inversion_paths = [] - for file_name in os.listdir(textual_inversion_folder): - if file_name.endswith(".pt") or file_name.endswith(".bin") or file_name.endswith(".pth") or file_name.endswith(".safetensors"): - textual_inversion_paths.append(os.path.join(textual_inversion_folder, file_name)) - pipe.prompter.load_textual_inversions(textual_inversion_paths) - return model_manager, pipe - - - def load_smoother(self, model_manager, smoother_configs): - smoother = SequencialProcessor.from_model_manager(model_manager, smoother_configs) - return smoother - - - def synthesize_video(self, model_manager, pipe, seed, smoother, **pipeline_inputs): - torch.manual_seed(seed) - if self.in_streamlit: - import streamlit as st - progress_bar_st = st.progress(0.0) - output_video = pipe(**pipeline_inputs, smoother=smoother, progress_bar_st=progress_bar_st) - progress_bar_st.progress(1.0) - else: - output_video = pipe(**pipeline_inputs, smoother=smoother) - model_manager.to("cpu") - return output_video - - - def load_video(self, video_file, image_folder, height, width, start_frame_id, end_frame_id): - video = VideoData(video_file=video_file, image_folder=image_folder, height=height, width=width) - if start_frame_id is None: - start_frame_id = 0 - if end_frame_id is None: - end_frame_id = len(video) - frames = [video[i] for i in range(start_frame_id, end_frame_id)] - return frames - - - def add_data_to_pipeline_inputs(self, data, pipeline_inputs): - pipeline_inputs["input_frames"] = self.load_video(**data["input_frames"]) - pipeline_inputs["num_frames"] = len(pipeline_inputs["input_frames"]) - pipeline_inputs["width"], pipeline_inputs["height"] = pipeline_inputs["input_frames"][0].size - if len(data["controlnet_frames"]) > 0: - pipeline_inputs["controlnet_frames"] = [self.load_video(**unit) for unit in data["controlnet_frames"]] - return pipeline_inputs - - - def save_output(self, video, output_folder, fps, config): - os.makedirs(output_folder, exist_ok=True) - save_frames(video, os.path.join(output_folder, "frames")) - save_video(video, os.path.join(output_folder, "video.mp4"), fps=fps) - config["pipeline"]["pipeline_inputs"]["input_frames"] = [] - config["pipeline"]["pipeline_inputs"]["controlnet_frames"] = [] - with open(os.path.join(output_folder, "config.json"), 'w') as file: - json.dump(config, file, indent=4) - - - def run(self, config): - if self.in_streamlit: - import streamlit as st - if self.in_streamlit: st.markdown("Loading videos ...") - config["pipeline"]["pipeline_inputs"] = self.add_data_to_pipeline_inputs(config["data"], config["pipeline"]["pipeline_inputs"]) - if self.in_streamlit: st.markdown("Loading videos ... done!") - if self.in_streamlit: st.markdown("Loading models ...") - model_manager, pipe = self.load_pipeline(**config["models"]) - if self.in_streamlit: st.markdown("Loading models ... done!") - if "smoother_configs" in config: - if self.in_streamlit: st.markdown("Loading smoother ...") - smoother = self.load_smoother(model_manager, config["smoother_configs"]) - if self.in_streamlit: st.markdown("Loading smoother ... done!") - else: - smoother = None - if self.in_streamlit: st.markdown("Synthesizing videos ...") - output_video = self.synthesize_video(model_manager, pipe, config["pipeline"]["seed"], smoother, **config["pipeline"]["pipeline_inputs"]) - if self.in_streamlit: st.markdown("Synthesizing videos ... done!") - if self.in_streamlit: st.markdown("Saving videos ...") - self.save_output(output_video, config["data"]["output_folder"], config["data"]["fps"], config) - if self.in_streamlit: st.markdown("Saving videos ... done!") - if self.in_streamlit: st.markdown("Finished!") - video_file = open(os.path.join(os.path.join(config["data"]["output_folder"], "video.mp4")), 'rb') - if self.in_streamlit: st.video(video_file.read()) diff --git a/diffsynth/pipelines/qwen_image.py b/diffsynth/pipelines/qwen_image.py index e949b56..1849b08 100644 --- a/diffsynth/pipelines/qwen_image.py +++ b/diffsynth/pipelines/qwen_image.py @@ -1,48 +1,18 @@ -import torch +import torch, math from PIL import Image from typing import Union -from PIL import Image from tqdm import tqdm from einops import rearrange import numpy as np -from ..models import ModelManager, load_state_dict +from ..diffusion import FlowMatchScheduler +from ..core import ModelConfig, gradient_checkpoint_forward +from ..diffusion.base_pipeline import BasePipeline, PipelineUnit, ControlNetInput + from ..models.qwen_image_dit import QwenImageDiT from ..models.qwen_image_text_encoder import QwenImageTextEncoder from ..models.qwen_image_vae import QwenImageVAE from ..models.qwen_image_controlnet import QwenImageBlockWiseControlNet -from ..schedulers import FlowMatchScheduler -from ..utils import BasePipeline, ModelConfig, PipelineUnitRunner, PipelineUnit -from ..lora import GeneralLoRALoader -from .flux_image_new import ControlNetInput - -from ..vram_management import gradient_checkpoint_forward, enable_vram_management, AutoWrappedModule, AutoWrappedLinear - - -class QwenImageBlockwiseMultiControlNet(torch.nn.Module): - def __init__(self, models: list[QwenImageBlockWiseControlNet]): - super().__init__() - if not isinstance(models, list): - models = [models] - self.models = torch.nn.ModuleList(models) - - def preprocess(self, controlnet_inputs: list[ControlNetInput], conditionings: list[torch.Tensor], **kwargs): - processed_conditionings = [] - for controlnet_input, conditioning in zip(controlnet_inputs, conditionings): - conditioning = rearrange(conditioning, "B C (H P) (W Q) -> B (H W) (C P Q)", P=2, Q=2) - model_output = self.models[controlnet_input.controlnet_id].process_controlnet_conditioning(conditioning) - processed_conditionings.append(model_output) - return processed_conditionings - - def blockwise_forward(self, image, conditionings: list[torch.Tensor], controlnet_inputs: list[ControlNetInput], progress_id, num_inference_steps, block_id, **kwargs): - res = 0 - for controlnet_input, conditioning in zip(controlnet_inputs, conditionings): - progress = (num_inference_steps - 1 - progress_id) / max(num_inference_steps - 1, 1) - if progress > controlnet_input.start + (1e-4) or progress < controlnet_input.end - (1e-4): - continue - model_output = self.models[controlnet_input.controlnet_id].blockwise_forward(image, conditioning, block_id) - res = res + model_output * controlnet_input.scale - return res class QwenImagePipeline(BasePipeline): @@ -61,7 +31,6 @@ class QwenImagePipeline(BasePipeline): self.blockwise_controlnet: QwenImageBlockwiseMultiControlNet = None self.tokenizer: Qwen2Tokenizer = None self.processor: Qwen2VLProcessor = None - self.unit_runner = PipelineUnitRunner() self.in_iteration_models = ("dit", "blockwise_controlnet") self.units = [ QwenImageUnit_ShapeChecker(), @@ -75,245 +44,6 @@ class QwenImagePipeline(BasePipeline): QwenImageUnit_BlockwiseControlNet(), ] self.model_fn = model_fn_qwen_image - - - def load_lora( - self, - module: torch.nn.Module, - lora_config: Union[ModelConfig, str] = None, - alpha=1, - hotload=False, - state_dict=None, - ): - if state_dict is None: - if isinstance(lora_config, str): - lora = load_state_dict(lora_config, torch_dtype=self.torch_dtype, device=self.device) - else: - lora_config.download_if_necessary() - lora = load_state_dict(lora_config.path, torch_dtype=self.torch_dtype, device=self.device) - else: - lora = state_dict - if hotload: - for name, module in module.named_modules(): - if isinstance(module, AutoWrappedLinear): - lora_a_name = f'{name}.lora_A.default.weight' - lora_b_name = f'{name}.lora_B.default.weight' - if lora_a_name in lora and lora_b_name in lora: - module.lora_A_weights.append(lora[lora_a_name] * alpha) - module.lora_B_weights.append(lora[lora_b_name]) - else: - loader = GeneralLoRALoader(torch_dtype=self.torch_dtype, device=self.device) - loader.load(module, lora, alpha=alpha) - - - def clear_lora(self): - for name, module in self.named_modules(): - if isinstance(module, AutoWrappedLinear): - if hasattr(module, "lora_A_weights"): - module.lora_A_weights.clear() - if hasattr(module, "lora_B_weights"): - module.lora_B_weights.clear() - - - def enable_lora_magic(self): - if self.dit is not None: - if not (hasattr(self.dit, "vram_management_enabled") and self.dit.vram_management_enabled): - dtype = next(iter(self.dit.parameters())).dtype - enable_vram_management( - self.dit, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - }, - module_config = dict( - offload_dtype=dtype, - offload_device=self.device, - onload_dtype=dtype, - onload_device=self.device, - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - vram_limit=None, - ) - - - def training_loss(self, **inputs): - timestep_id = torch.randint(0, self.scheduler.num_train_timesteps, (1,)) - timestep = self.scheduler.timesteps[timestep_id].to(dtype=self.torch_dtype, device=self.device) - - noise = torch.randn_like(inputs["input_latents"]) - inputs["latents"] = self.scheduler.add_noise(inputs["input_latents"], noise, timestep) - training_target = self.scheduler.training_target(inputs["input_latents"], noise, timestep) - - noise_pred = self.model_fn(**inputs, timestep=timestep) - - loss = torch.nn.functional.mse_loss(noise_pred.float(), training_target.float()) - loss = loss * self.scheduler.training_weight(timestep) - return loss - - - def direct_distill_loss(self, **inputs): - self.scheduler.set_timesteps(inputs["num_inference_steps"]) - models = {name: getattr(self, name) for name in self.in_iteration_models} - for progress_id, timestep in enumerate(self.scheduler.timesteps): - timestep = timestep.unsqueeze(0).to(dtype=self.torch_dtype, device=self.device) - noise_pred = self.model_fn(**models, **inputs, timestep=timestep, progress_id=progress_id) - inputs["latents"] = self.step(self.scheduler, progress_id=progress_id, noise_pred=noise_pred, **inputs) - loss = torch.nn.functional.mse_loss(inputs["latents"].float(), inputs["input_latents"].float()) - return loss - - - def _enable_fp8_lora_training(self, dtype): - from transformers.models.qwen2_5_vl.modeling_qwen2_5_vl import Qwen2_5_VLRotaryEmbedding, Qwen2RMSNorm, Qwen2_5_VisionPatchEmbed, Qwen2_5_VisionRotaryEmbedding - from ..models.qwen_image_dit import RMSNorm - from ..models.qwen_image_vae import QwenImageRMS_norm - module_map = { - RMSNorm: AutoWrappedModule, - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Conv3d: AutoWrappedModule, - torch.nn.Conv2d: AutoWrappedModule, - torch.nn.Embedding: AutoWrappedModule, - Qwen2_5_VLRotaryEmbedding: AutoWrappedModule, - Qwen2RMSNorm: AutoWrappedModule, - Qwen2_5_VisionPatchEmbed: AutoWrappedModule, - Qwen2_5_VisionRotaryEmbedding: AutoWrappedModule, - QwenImageRMS_norm: AutoWrappedModule, - } - model_config = dict( - offload_dtype=dtype, - offload_device="cuda", - onload_dtype=dtype, - onload_device="cuda", - computation_dtype=self.torch_dtype, - computation_device="cuda", - ) - if self.text_encoder is not None: - enable_vram_management(self.text_encoder, module_map=module_map, module_config=model_config) - if self.dit is not None: - enable_vram_management(self.dit, module_map=module_map, module_config=model_config) - if self.vae is not None: - enable_vram_management(self.vae, module_map=module_map, module_config=model_config) - - - def enable_vram_management(self, num_persistent_param_in_dit=None, vram_limit=None, vram_buffer=0.5, auto_offload=True, enable_dit_fp8_computation=False): - self.vram_management_enabled = True - if vram_limit is None and auto_offload: - vram_limit = self.get_vram() - if vram_limit is not None: - vram_limit = vram_limit - vram_buffer - - if self.text_encoder is not None: - from transformers.models.qwen2_5_vl.modeling_qwen2_5_vl import Qwen2_5_VLRotaryEmbedding, Qwen2RMSNorm, Qwen2_5_VisionPatchEmbed, Qwen2_5_VisionRotaryEmbedding - dtype = next(iter(self.text_encoder.parameters())).dtype - enable_vram_management( - self.text_encoder, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Embedding: AutoWrappedModule, - Qwen2_5_VLRotaryEmbedding: AutoWrappedModule, - Qwen2RMSNorm: AutoWrappedModule, - Qwen2_5_VisionPatchEmbed: AutoWrappedModule, - Qwen2_5_VisionRotaryEmbedding: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - vram_limit=vram_limit, - ) - if self.dit is not None: - from ..models.qwen_image_dit import RMSNorm - dtype = next(iter(self.dit.parameters())).dtype - device = "cpu" if vram_limit is not None else self.device - if not enable_dit_fp8_computation: - enable_vram_management( - self.dit, - module_map = { - RMSNorm: AutoWrappedModule, - torch.nn.Linear: AutoWrappedLinear, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device=device, - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - vram_limit=vram_limit, - ) - else: - enable_vram_management( - self.dit, - module_map = { - RMSNorm: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device=device, - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - vram_limit=vram_limit, - ) - enable_vram_management( - self.dit, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device=device, - computation_dtype=dtype, - computation_device=self.device, - ), - vram_limit=vram_limit, - ) - if self.vae is not None: - from ..models.qwen_image_vae import QwenImageRMS_norm - dtype = next(iter(self.vae.parameters())).dtype - enable_vram_management( - self.vae, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Conv3d: AutoWrappedModule, - torch.nn.Conv2d: AutoWrappedModule, - QwenImageRMS_norm: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - vram_limit=vram_limit, - ) - if self.blockwise_controlnet is not None: - enable_vram_management( - self.blockwise_controlnet, - module_map = { - RMSNorm: AutoWrappedModule, - torch.nn.Linear: AutoWrappedLinear, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device=device, - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - vram_limit=vram_limit, - ) @staticmethod @@ -323,24 +53,18 @@ class QwenImagePipeline(BasePipeline): model_configs: list[ModelConfig] = [], tokenizer_config: ModelConfig = ModelConfig(model_id="Qwen/Qwen-Image", origin_file_pattern="tokenizer/"), processor_config: ModelConfig = None, + vram_limit: float = None, ): - # Download and load models - model_manager = ModelManager() - for model_config in model_configs: - model_config.download_if_necessary() - model_manager.load_model( - model_config.path, - device=model_config.offload_device or device, - torch_dtype=model_config.offload_dtype or torch_dtype - ) - # Initialize pipeline pipe = QwenImagePipeline(device=device, torch_dtype=torch_dtype) - pipe.text_encoder = model_manager.fetch_model("qwen_image_text_encoder") - pipe.dit = model_manager.fetch_model("qwen_image_dit") - pipe.vae = model_manager.fetch_model("qwen_image_vae") - pipe.blockwise_controlnet = QwenImageBlockwiseMultiControlNet(model_manager.fetch_model("qwen_image_blockwise_controlnet", index="all")) - if tokenizer_config is not None and pipe.text_encoder is not None: + model_pool = pipe.download_and_load_models(model_configs, vram_limit) + + # Fetch models + pipe.text_encoder = model_pool.fetch_model("qwen_image_text_encoder") + pipe.dit = model_pool.fetch_model("qwen_image_dit") + pipe.vae = model_pool.fetch_model("qwen_image_vae") + pipe.blockwise_controlnet = QwenImageBlockwiseMultiControlNet(model_pool.fetch_model("qwen_image_blockwise_controlnet", index="all")) + if tokenizer_config is not None: tokenizer_config.download_if_necessary() from transformers import Qwen2Tokenizer pipe.tokenizer = Qwen2Tokenizer.from_pretrained(tokenizer_config.path) @@ -348,6 +72,9 @@ class QwenImagePipeline(BasePipeline): processor_config.download_if_necessary() from transformers import Qwen2VLProcessor pipe.processor = Qwen2VLProcessor.from_pretrained(processor_config.path) + + # VRAM Management + pipe.vram_management_enabled = pipe.check_vram_management_state() return pipe @@ -448,10 +175,41 @@ class QwenImagePipeline(BasePipeline): return image +class QwenImageBlockwiseMultiControlNet(torch.nn.Module): + def __init__(self, models: list[QwenImageBlockWiseControlNet]): + super().__init__() + if not isinstance(models, list): + models = [models] + self.models = torch.nn.ModuleList(models) + for model in models: + if hasattr(model, "vram_management_enabled") and getattr(model, "vram_management_enabled"): + self.vram_management_enabled = True + + def preprocess(self, controlnet_inputs: list[ControlNetInput], conditionings: list[torch.Tensor], **kwargs): + processed_conditionings = [] + for controlnet_input, conditioning in zip(controlnet_inputs, conditionings): + conditioning = rearrange(conditioning, "B C (H P) (W Q) -> B (H W) (C P Q)", P=2, Q=2) + model_output = self.models[controlnet_input.controlnet_id].process_controlnet_conditioning(conditioning) + processed_conditionings.append(model_output) + return processed_conditionings + + def blockwise_forward(self, image, conditionings: list[torch.Tensor], controlnet_inputs: list[ControlNetInput], progress_id, num_inference_steps, block_id, **kwargs): + res = 0 + for controlnet_input, conditioning in zip(controlnet_inputs, conditionings): + progress = (num_inference_steps - 1 - progress_id) / max(num_inference_steps - 1, 1) + if progress > controlnet_input.start + (1e-4) or progress < controlnet_input.end - (1e-4): + continue + model_output = self.models[controlnet_input.controlnet_id].blockwise_forward(image, conditioning, block_id) + res = res + model_output * controlnet_input.scale + return res + class QwenImageUnit_ShapeChecker(PipelineUnit): def __init__(self): - super().__init__(input_params=("height", "width")) + super().__init__( + input_params=("height", "width"), + output_params=("height", "width"), + ) def process(self, pipe: QwenImagePipeline, height, width): height, width = pipe.check_resize_height_width(height, width) @@ -461,7 +219,10 @@ class QwenImageUnit_ShapeChecker(PipelineUnit): class QwenImageUnit_NoiseInitializer(PipelineUnit): def __init__(self): - super().__init__(input_params=("height", "width", "seed", "rand_device")) + super().__init__( + input_params=("height", "width", "seed", "rand_device"), + output_params=("noise",), + ) def process(self, pipe: QwenImagePipeline, height, width, seed, rand_device): noise = pipe.generate_noise((1, 16, height//8, width//8), seed=seed, rand_device=rand_device, rand_torch_dtype=pipe.torch_dtype) @@ -473,6 +234,7 @@ class QwenImageUnit_InputImageEmbedder(PipelineUnit): def __init__(self): super().__init__( input_params=("input_image", "noise", "tiled", "tile_size", "tile_stride"), + output_params=("latents", "input_latents"), onload_model_names=("vae",) ) @@ -494,6 +256,7 @@ class QwenImageUnit_Inpaint(PipelineUnit): def __init__(self): super().__init__( input_params=("inpaint_mask", "height", "width", "inpaint_blur_size", "inpaint_blur_sigma"), + output_params=("inpaint_mask",), ) def process(self, pipe: QwenImagePipeline, inpaint_mask, height, width, inpaint_blur_size, inpaint_blur_sigma): @@ -515,6 +278,7 @@ class QwenImageUnit_PromptEmbedder(PipelineUnit): input_params_posi={"prompt": "prompt"}, input_params_nega={"prompt": "negative_prompt"}, input_params=("edit_image",), + output_params=("prompt_emb", "prompt_emb_mask"), onload_model_names=("text_encoder",) ) @@ -526,7 +290,6 @@ class QwenImageUnit_PromptEmbedder(PipelineUnit): return split_result def calculate_dimensions(self, target_area, ratio): - import math width = math.sqrt(target_area * ratio) height = width / ratio width = round(width / 32) * 32 @@ -573,6 +336,7 @@ class QwenImageUnit_PromptEmbedder(PipelineUnit): return split_hidden_states def process(self, pipe: QwenImagePipeline, prompt, edit_image=None) -> dict: + pipe.load_models_to_device(self.onload_model_names) if pipe.text_encoder is not None: prompt = [prompt] if edit_image is None: @@ -595,6 +359,8 @@ class QwenImageUnit_EntityControl(PipelineUnit): def __init__(self): super().__init__( take_over=True, + input_params=("eligen_entity_prompts", "width", "height", "eligen_enable_on_negative", "cfg_scale"), + output_params=("entity_prompt_emb", "entity_masks", "entity_prompt_emb_mask"), onload_model_names=("text_encoder",) ) @@ -675,6 +441,7 @@ class QwenImageUnit_BlockwiseControlNet(PipelineUnit): def __init__(self): super().__init__( input_params=("blockwise_controlnet_inputs", "tiled", "tile_size", "tile_stride"), + output_params=("blockwise_controlnet_conditioning",), onload_model_names=("vae",) ) @@ -717,6 +484,7 @@ class QwenImageUnit_EditImageEmbedder(PipelineUnit): def __init__(self): super().__init__( input_params=("edit_image", "tiled", "tile_size", "tile_stride", "edit_image_auto_resize"), + output_params=("edit_latents", "edit_image"), onload_model_names=("vae",) ) @@ -738,7 +506,7 @@ class QwenImageUnit_EditImageEmbedder(PipelineUnit): def process(self, pipe: QwenImagePipeline, edit_image, tiled, tile_size, tile_stride, edit_image_auto_resize=False): if edit_image is None: return {} - pipe.load_models_to_device(['vae']) + pipe.load_models_to_device(self.onload_model_names) if isinstance(edit_image, Image.Image): resized_edit_image = self.edit_image_auto_resize(edit_image) if edit_image_auto_resize else edit_image edit_image = pipe.preprocess_image(resized_edit_image).to(device=pipe.device, dtype=pipe.torch_dtype) @@ -759,13 +527,14 @@ class QwenImageUnit_ContextImageEmbedder(PipelineUnit): def __init__(self): super().__init__( input_params=("context_image", "height", "width", "tiled", "tile_size", "tile_stride"), + output_params=("context_latents",), onload_model_names=("vae",) ) def process(self, pipe: QwenImagePipeline, context_image, height, width, tiled, tile_size, tile_stride): if context_image is None: return {} - pipe.load_models_to_device(['vae']) + pipe.load_models_to_device(self.onload_model_names) context_image = pipe.preprocess_image(context_image.resize((width, height))).to(device=pipe.device, dtype=pipe.torch_dtype) context_latents = pipe.vae.encode(context_image, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) return {"context_latents": context_latents} diff --git a/diffsynth/pipelines/sd3_image.py b/diffsynth/pipelines/sd3_image.py deleted file mode 100644 index c609873..0000000 --- a/diffsynth/pipelines/sd3_image.py +++ /dev/null @@ -1,147 +0,0 @@ -from ..models import ModelManager, SD3TextEncoder1, SD3TextEncoder2, SD3TextEncoder3, SD3DiT, SD3VAEDecoder, SD3VAEEncoder -from ..prompters import SD3Prompter -from ..schedulers import FlowMatchScheduler -from .base import BasePipeline -import torch -from tqdm import tqdm - - - -class SD3ImagePipeline(BasePipeline): - - def __init__(self, device="cuda", torch_dtype=torch.float16): - super().__init__(device=device, torch_dtype=torch_dtype, height_division_factor=16, width_division_factor=16) - self.scheduler = FlowMatchScheduler() - self.prompter = SD3Prompter() - # models - self.text_encoder_1: SD3TextEncoder1 = None - self.text_encoder_2: SD3TextEncoder2 = None - self.text_encoder_3: SD3TextEncoder3 = None - self.dit: SD3DiT = None - self.vae_decoder: SD3VAEDecoder = None - self.vae_encoder: SD3VAEEncoder = None - self.model_names = ['text_encoder_1', 'text_encoder_2', 'text_encoder_3', 'dit', 'vae_decoder', 'vae_encoder'] - - - def denoising_model(self): - return self.dit - - - def fetch_models(self, model_manager: ModelManager, prompt_refiner_classes=[]): - self.text_encoder_1 = model_manager.fetch_model("sd3_text_encoder_1") - self.text_encoder_2 = model_manager.fetch_model("sd3_text_encoder_2") - self.text_encoder_3 = model_manager.fetch_model("sd3_text_encoder_3") - self.dit = model_manager.fetch_model("sd3_dit") - self.vae_decoder = model_manager.fetch_model("sd3_vae_decoder") - self.vae_encoder = model_manager.fetch_model("sd3_vae_encoder") - self.prompter.fetch_models(self.text_encoder_1, self.text_encoder_2, self.text_encoder_3) - self.prompter.load_prompt_refiners(model_manager, prompt_refiner_classes) - - - @staticmethod - def from_model_manager(model_manager: ModelManager, prompt_refiner_classes=[], device=None): - pipe = SD3ImagePipeline( - device=model_manager.device if device is None else device, - torch_dtype=model_manager.torch_dtype, - ) - pipe.fetch_models(model_manager, prompt_refiner_classes) - return pipe - - - def encode_image(self, image, tiled=False, tile_size=64, tile_stride=32): - latents = self.vae_encoder(image, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - return latents - - - def decode_image(self, latent, tiled=False, tile_size=64, tile_stride=32): - image = self.vae_decoder(latent.to(self.device), tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - image = self.vae_output_to_image(image) - return image - - - def encode_prompt(self, prompt, positive=True, t5_sequence_length=77): - prompt_emb, pooled_prompt_emb = self.prompter.encode_prompt( - prompt, device=self.device, positive=positive, t5_sequence_length=t5_sequence_length - ) - return {"prompt_emb": prompt_emb, "pooled_prompt_emb": pooled_prompt_emb} - - - def prepare_extra_input(self, latents=None): - return {} - - - @torch.no_grad() - def __call__( - self, - prompt, - local_prompts=[], - masks=[], - mask_scales=[], - negative_prompt="", - cfg_scale=7.5, - input_image=None, - denoising_strength=1.0, - height=1024, - width=1024, - num_inference_steps=20, - t5_sequence_length=77, - tiled=False, - tile_size=128, - tile_stride=64, - seed=None, - progress_bar_cmd=tqdm, - progress_bar_st=None, - ): - height, width = self.check_resize_height_width(height, width) - - # Tiler parameters - tiler_kwargs = {"tiled": tiled, "tile_size": tile_size, "tile_stride": tile_stride} - - # Prepare scheduler - self.scheduler.set_timesteps(num_inference_steps, denoising_strength) - - # Prepare latent tensors - if input_image is not None: - self.load_models_to_device(['vae_encoder']) - image = self.preprocess_image(input_image).to(device=self.device, dtype=self.torch_dtype) - latents = self.encode_image(image, **tiler_kwargs) - noise = self.generate_noise((1, 16, height//8, width//8), seed=seed, device=self.device, dtype=self.torch_dtype) - latents = self.scheduler.add_noise(latents, noise, timestep=self.scheduler.timesteps[0]) - else: - latents = self.generate_noise((1, 16, height//8, width//8), seed=seed, device=self.device, dtype=self.torch_dtype) - - # Encode prompts - self.load_models_to_device(['text_encoder_1', 'text_encoder_2', 'text_encoder_3']) - prompt_emb_posi = self.encode_prompt(prompt, positive=True, t5_sequence_length=t5_sequence_length) - prompt_emb_nega = self.encode_prompt(negative_prompt, positive=False, t5_sequence_length=t5_sequence_length) - prompt_emb_locals = [self.encode_prompt(prompt_local, t5_sequence_length=t5_sequence_length) for prompt_local in local_prompts] - - # Denoise - self.load_models_to_device(['dit']) - for progress_id, timestep in enumerate(progress_bar_cmd(self.scheduler.timesteps)): - timestep = timestep.unsqueeze(0).to(self.device) - - # Classifier-free guidance - inference_callback = lambda prompt_emb_posi: self.dit( - latents, timestep=timestep, **prompt_emb_posi, **tiler_kwargs, - ) - noise_pred_posi = self.control_noise_via_local_prompts(prompt_emb_posi, prompt_emb_locals, masks, mask_scales, inference_callback) - noise_pred_nega = self.dit( - latents, timestep=timestep, **prompt_emb_nega, **tiler_kwargs, - ) - noise_pred = noise_pred_nega + cfg_scale * (noise_pred_posi - noise_pred_nega) - - # DDIM - latents = self.scheduler.step(noise_pred, self.scheduler.timesteps[progress_id], latents) - - # UI - if progress_bar_st is not None: - progress_bar_st.progress(progress_id / len(self.scheduler.timesteps)) - - # Decode image - self.load_models_to_device(['vae_decoder']) - image = self.decode_image(latents, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - - # offload all models - self.load_models_to_device([]) - return image diff --git a/diffsynth/pipelines/sd_image.py b/diffsynth/pipelines/sd_image.py deleted file mode 100644 index c22c3fe..0000000 --- a/diffsynth/pipelines/sd_image.py +++ /dev/null @@ -1,191 +0,0 @@ -from ..models import SDTextEncoder, SDUNet, SDVAEDecoder, SDVAEEncoder, SDIpAdapter, IpAdapterCLIPImageEmbedder -from ..models.model_manager import ModelManager -from ..controlnets import MultiControlNetManager, ControlNetUnit, ControlNetConfigUnit, Annotator -from ..prompters import SDPrompter -from ..schedulers import EnhancedDDIMScheduler -from .base import BasePipeline -from .dancer import lets_dance -from typing import List -import torch -from tqdm import tqdm - - - -class SDImagePipeline(BasePipeline): - - def __init__(self, device="cuda", torch_dtype=torch.float16): - super().__init__(device=device, torch_dtype=torch_dtype) - self.scheduler = EnhancedDDIMScheduler() - self.prompter = SDPrompter() - # models - self.text_encoder: SDTextEncoder = None - self.unet: SDUNet = None - self.vae_decoder: SDVAEDecoder = None - self.vae_encoder: SDVAEEncoder = None - self.controlnet: MultiControlNetManager = None - self.ipadapter_image_encoder: IpAdapterCLIPImageEmbedder = None - self.ipadapter: SDIpAdapter = None - self.model_names = ['text_encoder', 'unet', 'vae_decoder', 'vae_encoder', 'controlnet', 'ipadapter_image_encoder', 'ipadapter'] - - - def denoising_model(self): - return self.unet - - - def fetch_models(self, model_manager: ModelManager, controlnet_config_units: List[ControlNetConfigUnit]=[], prompt_refiner_classes=[]): - # Main models - self.text_encoder = model_manager.fetch_model("sd_text_encoder") - self.unet = model_manager.fetch_model("sd_unet") - self.vae_decoder = model_manager.fetch_model("sd_vae_decoder") - self.vae_encoder = model_manager.fetch_model("sd_vae_encoder") - self.prompter.fetch_models(self.text_encoder) - self.prompter.load_prompt_refiners(model_manager, prompt_refiner_classes) - - # ControlNets - controlnet_units = [] - for config in controlnet_config_units: - controlnet_unit = ControlNetUnit( - Annotator(config.processor_id, device=self.device), - model_manager.fetch_model("sd_controlnet", config.model_path), - config.scale - ) - controlnet_units.append(controlnet_unit) - self.controlnet = MultiControlNetManager(controlnet_units) - - # IP-Adapters - self.ipadapter = model_manager.fetch_model("sd_ipadapter") - self.ipadapter_image_encoder = model_manager.fetch_model("sd_ipadapter_clip_image_encoder") - - - @staticmethod - def from_model_manager(model_manager: ModelManager, controlnet_config_units: List[ControlNetConfigUnit]=[], prompt_refiner_classes=[], device=None): - pipe = SDImagePipeline( - device=model_manager.device if device is None else device, - torch_dtype=model_manager.torch_dtype, - ) - pipe.fetch_models(model_manager, controlnet_config_units, prompt_refiner_classes=[]) - return pipe - - - def encode_image(self, image, tiled=False, tile_size=64, tile_stride=32): - latents = self.vae_encoder(image, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - return latents - - - def decode_image(self, latent, tiled=False, tile_size=64, tile_stride=32): - image = self.vae_decoder(latent.to(self.device), tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - image = self.vae_output_to_image(image) - return image - - - def encode_prompt(self, prompt, clip_skip=1, positive=True): - prompt_emb = self.prompter.encode_prompt(prompt, clip_skip=clip_skip, device=self.device, positive=positive) - return {"encoder_hidden_states": prompt_emb} - - - def prepare_extra_input(self, latents=None): - return {} - - - @torch.no_grad() - def __call__( - self, - prompt, - local_prompts=[], - masks=[], - mask_scales=[], - negative_prompt="", - cfg_scale=7.5, - clip_skip=1, - input_image=None, - ipadapter_images=None, - ipadapter_scale=1.0, - controlnet_image=None, - denoising_strength=1.0, - height=512, - width=512, - num_inference_steps=20, - tiled=False, - tile_size=64, - tile_stride=32, - seed=None, - progress_bar_cmd=tqdm, - progress_bar_st=None, - ): - height, width = self.check_resize_height_width(height, width) - - # Tiler parameters - tiler_kwargs = {"tiled": tiled, "tile_size": tile_size, "tile_stride": tile_stride} - - # Prepare scheduler - self.scheduler.set_timesteps(num_inference_steps, denoising_strength) - - # Prepare latent tensors - if input_image is not None: - self.load_models_to_device(['vae_encoder']) - image = self.preprocess_image(input_image).to(device=self.device, dtype=self.torch_dtype) - latents = self.encode_image(image, **tiler_kwargs) - noise = self.generate_noise((1, 4, height//8, width//8), seed=seed, device=self.device, dtype=self.torch_dtype) - latents = self.scheduler.add_noise(latents, noise, timestep=self.scheduler.timesteps[0]) - else: - latents = self.generate_noise((1, 4, height//8, width//8), seed=seed, device=self.device, dtype=self.torch_dtype) - - # Encode prompts - self.load_models_to_device(['text_encoder']) - prompt_emb_posi = self.encode_prompt(prompt, clip_skip=clip_skip, positive=True) - prompt_emb_nega = self.encode_prompt(negative_prompt, clip_skip=clip_skip, positive=False) - prompt_emb_locals = [self.encode_prompt(prompt_local, clip_skip=clip_skip, positive=True) for prompt_local in local_prompts] - - # IP-Adapter - if ipadapter_images is not None: - self.load_models_to_device(['ipadapter_image_encoder']) - ipadapter_image_encoding = self.ipadapter_image_encoder(ipadapter_images) - self.load_models_to_device(['ipadapter']) - ipadapter_kwargs_list_posi = {"ipadapter_kwargs_list": self.ipadapter(ipadapter_image_encoding, scale=ipadapter_scale)} - ipadapter_kwargs_list_nega = {"ipadapter_kwargs_list": self.ipadapter(torch.zeros_like(ipadapter_image_encoding))} - else: - ipadapter_kwargs_list_posi, ipadapter_kwargs_list_nega = {"ipadapter_kwargs_list": {}}, {"ipadapter_kwargs_list": {}} - - # Prepare ControlNets - if controlnet_image is not None: - self.load_models_to_device(['controlnet']) - controlnet_image = self.controlnet.process_image(controlnet_image).to(device=self.device, dtype=self.torch_dtype) - controlnet_image = controlnet_image.unsqueeze(1) - controlnet_kwargs = {"controlnet_frames": controlnet_image} - else: - controlnet_kwargs = {"controlnet_frames": None} - - # Denoise - self.load_models_to_device(['controlnet', 'unet']) - for progress_id, timestep in enumerate(progress_bar_cmd(self.scheduler.timesteps)): - timestep = timestep.unsqueeze(0).to(self.device) - - # Classifier-free guidance - inference_callback = lambda prompt_emb_posi: lets_dance( - self.unet, motion_modules=None, controlnet=self.controlnet, - sample=latents, timestep=timestep, - **prompt_emb_posi, **controlnet_kwargs, **tiler_kwargs, **ipadapter_kwargs_list_posi, - device=self.device, - ) - noise_pred_posi = self.control_noise_via_local_prompts(prompt_emb_posi, prompt_emb_locals, masks, mask_scales, inference_callback) - noise_pred_nega = lets_dance( - self.unet, motion_modules=None, controlnet=self.controlnet, - sample=latents, timestep=timestep, **prompt_emb_nega, **controlnet_kwargs, **tiler_kwargs, **ipadapter_kwargs_list_nega, - device=self.device, - ) - noise_pred = noise_pred_nega + cfg_scale * (noise_pred_posi - noise_pred_nega) - - # DDIM - latents = self.scheduler.step(noise_pred, timestep, latents) - - # UI - if progress_bar_st is not None: - progress_bar_st.progress(progress_id / len(self.scheduler.timesteps)) - - # Decode image - self.load_models_to_device(['vae_decoder']) - image = self.decode_image(latents, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - - # offload all models - self.load_models_to_device([]) - return image diff --git a/diffsynth/pipelines/sd_video.py b/diffsynth/pipelines/sd_video.py deleted file mode 100644 index 4337beb..0000000 --- a/diffsynth/pipelines/sd_video.py +++ /dev/null @@ -1,269 +0,0 @@ -from ..models import SDTextEncoder, SDUNet, SDVAEDecoder, SDVAEEncoder, SDIpAdapter, IpAdapterCLIPImageEmbedder, SDMotionModel -from ..models.model_manager import ModelManager -from ..controlnets import MultiControlNetManager, ControlNetUnit, ControlNetConfigUnit, Annotator -from ..prompters import SDPrompter -from ..schedulers import EnhancedDDIMScheduler -from .sd_image import SDImagePipeline -from .dancer import lets_dance -from typing import List -import torch -from tqdm import tqdm - - - -def lets_dance_with_long_video( - unet: SDUNet, - motion_modules: SDMotionModel = None, - controlnet: MultiControlNetManager = None, - sample = None, - timestep = None, - encoder_hidden_states = None, - ipadapter_kwargs_list = {}, - controlnet_frames = None, - unet_batch_size = 1, - controlnet_batch_size = 1, - cross_frame_attention = False, - tiled=False, - tile_size=64, - tile_stride=32, - device="cuda", - animatediff_batch_size=16, - animatediff_stride=8, -): - num_frames = sample.shape[0] - hidden_states_output = [(torch.zeros(sample[0].shape, dtype=sample[0].dtype), 0) for i in range(num_frames)] - - for batch_id in range(0, num_frames, animatediff_stride): - batch_id_ = min(batch_id + animatediff_batch_size, num_frames) - - # process this batch - hidden_states_batch = lets_dance( - unet, motion_modules, controlnet, - sample[batch_id: batch_id_].to(device), - timestep, - encoder_hidden_states, - ipadapter_kwargs_list=ipadapter_kwargs_list, - controlnet_frames=controlnet_frames[:, batch_id: batch_id_].to(device) if controlnet_frames is not None else None, - unet_batch_size=unet_batch_size, controlnet_batch_size=controlnet_batch_size, - cross_frame_attention=cross_frame_attention, - tiled=tiled, tile_size=tile_size, tile_stride=tile_stride, device=device - ).cpu() - - # update hidden_states - for i, hidden_states_updated in zip(range(batch_id, batch_id_), hidden_states_batch): - bias = max(1 - abs(i - (batch_id + batch_id_ - 1) / 2) / ((batch_id_ - batch_id - 1 + 1e-2) / 2), 1e-2) - hidden_states, num = hidden_states_output[i] - hidden_states = hidden_states * (num / (num + bias)) + hidden_states_updated * (bias / (num + bias)) - hidden_states_output[i] = (hidden_states, num + bias) - - if batch_id_ == num_frames: - break - - # output - hidden_states = torch.stack([h for h, _ in hidden_states_output]) - return hidden_states - - - -class SDVideoPipeline(SDImagePipeline): - - def __init__(self, device="cuda", torch_dtype=torch.float16, use_original_animatediff=True): - super().__init__(device=device, torch_dtype=torch_dtype) - self.scheduler = EnhancedDDIMScheduler(beta_schedule="linear" if use_original_animatediff else "scaled_linear") - self.prompter = SDPrompter() - # models - self.text_encoder: SDTextEncoder = None - self.unet: SDUNet = None - self.vae_decoder: SDVAEDecoder = None - self.vae_encoder: SDVAEEncoder = None - self.controlnet: MultiControlNetManager = None - self.ipadapter_image_encoder: IpAdapterCLIPImageEmbedder = None - self.ipadapter: SDIpAdapter = None - self.motion_modules: SDMotionModel = None - - - def fetch_models(self, model_manager: ModelManager, controlnet_config_units: List[ControlNetConfigUnit]=[], prompt_refiner_classes=[]): - # Main models - self.text_encoder = model_manager.fetch_model("sd_text_encoder") - self.unet = model_manager.fetch_model("sd_unet") - self.vae_decoder = model_manager.fetch_model("sd_vae_decoder") - self.vae_encoder = model_manager.fetch_model("sd_vae_encoder") - self.prompter.fetch_models(self.text_encoder) - self.prompter.load_prompt_refiners(model_manager, prompt_refiner_classes) - - # ControlNets - controlnet_units = [] - for config in controlnet_config_units: - controlnet_unit = ControlNetUnit( - Annotator(config.processor_id, device=self.device), - model_manager.fetch_model("sd_controlnet", config.model_path), - config.scale - ) - controlnet_units.append(controlnet_unit) - self.controlnet = MultiControlNetManager(controlnet_units) - - # IP-Adapters - self.ipadapter = model_manager.fetch_model("sd_ipadapter") - self.ipadapter_image_encoder = model_manager.fetch_model("sd_ipadapter_clip_image_encoder") - - # Motion Modules - self.motion_modules = model_manager.fetch_model("sd_motion_modules") - if self.motion_modules is None: - self.scheduler = EnhancedDDIMScheduler(beta_schedule="scaled_linear") - - - @staticmethod - def from_model_manager(model_manager: ModelManager, controlnet_config_units: List[ControlNetConfigUnit]=[], prompt_refiner_classes=[]): - pipe = SDVideoPipeline( - device=model_manager.device, - torch_dtype=model_manager.torch_dtype, - ) - pipe.fetch_models(model_manager, controlnet_config_units, prompt_refiner_classes) - return pipe - - - def decode_video(self, latents, tiled=False, tile_size=64, tile_stride=32): - images = [ - self.decode_image(latents[frame_id: frame_id+1], tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - for frame_id in range(latents.shape[0]) - ] - return images - - - def encode_video(self, processed_images, tiled=False, tile_size=64, tile_stride=32): - latents = [] - for image in processed_images: - image = self.preprocess_image(image).to(device=self.device, dtype=self.torch_dtype) - latent = self.encode_image(image, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - latents.append(latent.cpu()) - latents = torch.concat(latents, dim=0) - return latents - - - @torch.no_grad() - def __call__( - self, - prompt, - negative_prompt="", - cfg_scale=7.5, - clip_skip=1, - num_frames=None, - input_frames=None, - ipadapter_images=None, - ipadapter_scale=1.0, - controlnet_frames=None, - denoising_strength=1.0, - height=512, - width=512, - num_inference_steps=20, - animatediff_batch_size = 16, - animatediff_stride = 8, - unet_batch_size = 1, - controlnet_batch_size = 1, - cross_frame_attention = False, - smoother=None, - smoother_progress_ids=[], - tiled=False, - tile_size=64, - tile_stride=32, - seed=None, - progress_bar_cmd=tqdm, - progress_bar_st=None, - ): - height, width = self.check_resize_height_width(height, width) - - # Tiler parameters, batch size ... - tiler_kwargs = {"tiled": tiled, "tile_size": tile_size, "tile_stride": tile_stride} - other_kwargs = { - "animatediff_batch_size": animatediff_batch_size, "animatediff_stride": animatediff_stride, - "unet_batch_size": unet_batch_size, "controlnet_batch_size": controlnet_batch_size, - "cross_frame_attention": cross_frame_attention, - } - - # Prepare scheduler - self.scheduler.set_timesteps(num_inference_steps, denoising_strength) - - # Prepare latent tensors - if self.motion_modules is None: - noise = self.generate_noise((1, 4, height//8, width//8), seed=seed, device="cpu", dtype=self.torch_dtype).repeat(num_frames, 1, 1, 1) - else: - noise = self.generate_noise((num_frames, 4, height//8, width//8), seed=seed, device="cpu", dtype=self.torch_dtype) - if input_frames is None or denoising_strength == 1.0: - latents = noise - else: - latents = self.encode_video(input_frames, **tiler_kwargs) - latents = self.scheduler.add_noise(latents, noise, timestep=self.scheduler.timesteps[0]) - - # Encode prompts - prompt_emb_posi = self.encode_prompt(prompt, clip_skip=clip_skip, positive=True) - prompt_emb_nega = self.encode_prompt(negative_prompt, clip_skip=clip_skip, positive=False) - - # IP-Adapter - if ipadapter_images is not None: - ipadapter_image_encoding = self.ipadapter_image_encoder(ipadapter_images) - ipadapter_kwargs_list_posi = {"ipadapter_kwargs_list": self.ipadapter(ipadapter_image_encoding, scale=ipadapter_scale)} - ipadapter_kwargs_list_nega = {"ipadapter_kwargs_list": self.ipadapter(torch.zeros_like(ipadapter_image_encoding))} - else: - ipadapter_kwargs_list_posi, ipadapter_kwargs_list_nega = {"ipadapter_kwargs_list": {}}, {"ipadapter_kwargs_list": {}} - - # Prepare ControlNets - if controlnet_frames is not None: - if isinstance(controlnet_frames[0], list): - controlnet_frames_ = [] - for processor_id in range(len(controlnet_frames)): - controlnet_frames_.append( - torch.stack([ - self.controlnet.process_image(controlnet_frame, processor_id=processor_id).to(self.torch_dtype) - for controlnet_frame in progress_bar_cmd(controlnet_frames[processor_id]) - ], dim=1) - ) - controlnet_frames = torch.concat(controlnet_frames_, dim=0) - else: - controlnet_frames = torch.stack([ - self.controlnet.process_image(controlnet_frame).to(self.torch_dtype) - for controlnet_frame in progress_bar_cmd(controlnet_frames) - ], dim=1) - controlnet_kwargs = {"controlnet_frames": controlnet_frames} - else: - controlnet_kwargs = {"controlnet_frames": None} - - # Denoise - for progress_id, timestep in enumerate(progress_bar_cmd(self.scheduler.timesteps)): - timestep = timestep.unsqueeze(0).to(self.device) - - # Classifier-free guidance - noise_pred_posi = lets_dance_with_long_video( - self.unet, motion_modules=self.motion_modules, controlnet=self.controlnet, - sample=latents, timestep=timestep, - **prompt_emb_posi, **controlnet_kwargs, **ipadapter_kwargs_list_posi, **other_kwargs, **tiler_kwargs, - device=self.device, - ) - noise_pred_nega = lets_dance_with_long_video( - self.unet, motion_modules=self.motion_modules, controlnet=self.controlnet, - sample=latents, timestep=timestep, - **prompt_emb_nega, **controlnet_kwargs, **ipadapter_kwargs_list_nega, **other_kwargs, **tiler_kwargs, - device=self.device, - ) - noise_pred = noise_pred_nega + cfg_scale * (noise_pred_posi - noise_pred_nega) - - # DDIM and smoother - if smoother is not None and progress_id in smoother_progress_ids: - rendered_frames = self.scheduler.step(noise_pred, timestep, latents, to_final=True) - rendered_frames = self.decode_video(rendered_frames) - rendered_frames = smoother(rendered_frames, original_frames=input_frames) - target_latents = self.encode_video(rendered_frames) - noise_pred = self.scheduler.return_to_timestep(timestep, latents, target_latents) - latents = self.scheduler.step(noise_pred, timestep, latents) - - # UI - if progress_bar_st is not None: - progress_bar_st.progress(progress_id / len(self.scheduler.timesteps)) - - # Decode image - output_frames = self.decode_video(latents, **tiler_kwargs) - - # Post-process - if smoother is not None and (num_inference_steps in smoother_progress_ids or -1 in smoother_progress_ids): - output_frames = smoother(output_frames, original_frames=input_frames) - - return output_frames diff --git a/diffsynth/pipelines/sdxl_image.py b/diffsynth/pipelines/sdxl_image.py deleted file mode 100644 index 499c4bb..0000000 --- a/diffsynth/pipelines/sdxl_image.py +++ /dev/null @@ -1,226 +0,0 @@ -from ..models import SDXLTextEncoder, SDXLTextEncoder2, SDXLUNet, SDXLVAEDecoder, SDXLVAEEncoder, SDXLIpAdapter, IpAdapterXLCLIPImageEmbedder -from ..models.kolors_text_encoder import ChatGLMModel -from ..models.model_manager import ModelManager -from ..controlnets import MultiControlNetManager, ControlNetUnit, ControlNetConfigUnit, Annotator -from ..prompters import SDXLPrompter, KolorsPrompter -from ..schedulers import EnhancedDDIMScheduler -from .base import BasePipeline -from .dancer import lets_dance_xl -from typing import List -import torch -from tqdm import tqdm -from einops import repeat - - - -class SDXLImagePipeline(BasePipeline): - - def __init__(self, device="cuda", torch_dtype=torch.float16): - super().__init__(device=device, torch_dtype=torch_dtype) - self.scheduler = EnhancedDDIMScheduler() - self.prompter = SDXLPrompter() - # models - self.text_encoder: SDXLTextEncoder = None - self.text_encoder_2: SDXLTextEncoder2 = None - self.text_encoder_kolors: ChatGLMModel = None - self.unet: SDXLUNet = None - self.vae_decoder: SDXLVAEDecoder = None - self.vae_encoder: SDXLVAEEncoder = None - self.controlnet: MultiControlNetManager = None - self.ipadapter_image_encoder: IpAdapterXLCLIPImageEmbedder = None - self.ipadapter: SDXLIpAdapter = None - self.model_names = ['text_encoder', 'text_encoder_2', 'text_encoder_kolors', 'unet', 'vae_decoder', 'vae_encoder', 'controlnet', 'ipadapter_image_encoder', 'ipadapter'] - - - def denoising_model(self): - return self.unet - - - def fetch_models(self, model_manager: ModelManager, controlnet_config_units: List[ControlNetConfigUnit]=[], prompt_refiner_classes=[]): - # Main models - self.text_encoder = model_manager.fetch_model("sdxl_text_encoder") - self.text_encoder_2 = model_manager.fetch_model("sdxl_text_encoder_2") - self.text_encoder_kolors = model_manager.fetch_model("kolors_text_encoder") - self.unet = model_manager.fetch_model("sdxl_unet") - self.vae_decoder = model_manager.fetch_model("sdxl_vae_decoder") - self.vae_encoder = model_manager.fetch_model("sdxl_vae_encoder") - - # ControlNets - controlnet_units = [] - for config in controlnet_config_units: - controlnet_unit = ControlNetUnit( - Annotator(config.processor_id, device=self.device), - model_manager.fetch_model("sdxl_controlnet", config.model_path), - config.scale - ) - controlnet_units.append(controlnet_unit) - self.controlnet = MultiControlNetManager(controlnet_units) - - # IP-Adapters - self.ipadapter = model_manager.fetch_model("sdxl_ipadapter") - self.ipadapter_image_encoder = model_manager.fetch_model("sdxl_ipadapter_clip_image_encoder") - - # Kolors - if self.text_encoder_kolors is not None: - print("Switch to Kolors. The prompter and scheduler will be replaced.") - self.prompter = KolorsPrompter() - self.prompter.fetch_models(self.text_encoder_kolors) - self.scheduler = EnhancedDDIMScheduler(beta_end=0.014, num_train_timesteps=1100) - else: - self.prompter.fetch_models(self.text_encoder, self.text_encoder_2) - self.prompter.load_prompt_refiners(model_manager, prompt_refiner_classes) - - - @staticmethod - def from_model_manager(model_manager: ModelManager, controlnet_config_units: List[ControlNetConfigUnit]=[], prompt_refiner_classes=[], device=None): - pipe = SDXLImagePipeline( - device=model_manager.device if device is None else device, - torch_dtype=model_manager.torch_dtype, - ) - pipe.fetch_models(model_manager, controlnet_config_units, prompt_refiner_classes) - return pipe - - - def encode_image(self, image, tiled=False, tile_size=64, tile_stride=32): - latents = self.vae_encoder(image, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - return latents - - - def decode_image(self, latent, tiled=False, tile_size=64, tile_stride=32): - image = self.vae_decoder(latent.to(self.device), tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - image = self.vae_output_to_image(image) - return image - - - def encode_prompt(self, prompt, clip_skip=1, clip_skip_2=2, positive=True): - add_prompt_emb, prompt_emb = self.prompter.encode_prompt( - prompt, - clip_skip=clip_skip, clip_skip_2=clip_skip_2, - device=self.device, - positive=positive, - ) - return {"encoder_hidden_states": prompt_emb, "add_text_embeds": add_prompt_emb} - - - def prepare_extra_input(self, latents=None): - height, width = latents.shape[2] * 8, latents.shape[3] * 8 - add_time_id = torch.tensor([height, width, 0, 0, height, width], device=self.device).repeat(latents.shape[0]) - return {"add_time_id": add_time_id} - - - @torch.no_grad() - def __call__( - self, - prompt, - local_prompts=[], - masks=[], - mask_scales=[], - negative_prompt="", - cfg_scale=7.5, - clip_skip=1, - clip_skip_2=2, - input_image=None, - ipadapter_images=None, - ipadapter_scale=1.0, - ipadapter_use_instant_style=False, - controlnet_image=None, - denoising_strength=1.0, - height=1024, - width=1024, - num_inference_steps=20, - tiled=False, - tile_size=64, - tile_stride=32, - seed=None, - progress_bar_cmd=tqdm, - progress_bar_st=None, - ): - height, width = self.check_resize_height_width(height, width) - - # Tiler parameters - tiler_kwargs = {"tiled": tiled, "tile_size": tile_size, "tile_stride": tile_stride} - - # Prepare scheduler - self.scheduler.set_timesteps(num_inference_steps, denoising_strength) - - # Prepare latent tensors - if input_image is not None: - self.load_models_to_device(['vae_encoder']) - image = self.preprocess_image(input_image).to(device=self.device, dtype=self.torch_dtype) - latents = self.encode_image(image, **tiler_kwargs) - noise = self.generate_noise((1, 4, height//8, width//8), seed=seed, device=self.device, dtype=self.torch_dtype) - latents = self.scheduler.add_noise(latents, noise, timestep=self.scheduler.timesteps[0]) - else: - latents = self.generate_noise((1, 4, height//8, width//8), seed=seed, device=self.device, dtype=self.torch_dtype) - - # Encode prompts - self.load_models_to_device(['text_encoder', 'text_encoder_2', 'text_encoder_kolors']) - prompt_emb_posi = self.encode_prompt(prompt, clip_skip=clip_skip, clip_skip_2=clip_skip_2, positive=True) - prompt_emb_nega = self.encode_prompt(negative_prompt, clip_skip=clip_skip, clip_skip_2=clip_skip_2, positive=False) - prompt_emb_locals = [self.encode_prompt(prompt_local, clip_skip=clip_skip, clip_skip_2=clip_skip_2, positive=True) for prompt_local in local_prompts] - - # IP-Adapter - if ipadapter_images is not None: - if ipadapter_use_instant_style: - self.ipadapter.set_less_adapter() - else: - self.ipadapter.set_full_adapter() - self.load_models_to_device(['ipadapter_image_encoder']) - ipadapter_image_encoding = self.ipadapter_image_encoder(ipadapter_images) - self.load_models_to_device(['ipadapter']) - ipadapter_kwargs_list_posi = {"ipadapter_kwargs_list": self.ipadapter(ipadapter_image_encoding, scale=ipadapter_scale)} - ipadapter_kwargs_list_nega = {"ipadapter_kwargs_list": self.ipadapter(torch.zeros_like(ipadapter_image_encoding))} - else: - ipadapter_kwargs_list_posi, ipadapter_kwargs_list_nega = {"ipadapter_kwargs_list": {}}, {"ipadapter_kwargs_list": {}} - - # Prepare ControlNets - if controlnet_image is not None: - self.load_models_to_device(['controlnet']) - controlnet_image = self.controlnet.process_image(controlnet_image).to(device=self.device, dtype=self.torch_dtype) - controlnet_image = controlnet_image.unsqueeze(1) - controlnet_kwargs = {"controlnet_frames": controlnet_image} - else: - controlnet_kwargs = {"controlnet_frames": None} - - # Prepare extra input - extra_input = self.prepare_extra_input(latents) - - # Denoise - self.load_models_to_device(['controlnet', 'unet']) - for progress_id, timestep in enumerate(progress_bar_cmd(self.scheduler.timesteps)): - timestep = timestep.unsqueeze(0).to(self.device) - - # Classifier-free guidance - inference_callback = lambda prompt_emb_posi: lets_dance_xl( - self.unet, motion_modules=None, controlnet=self.controlnet, - sample=latents, timestep=timestep, **extra_input, - **prompt_emb_posi, **controlnet_kwargs, **tiler_kwargs, **ipadapter_kwargs_list_posi, - device=self.device, - ) - noise_pred_posi = self.control_noise_via_local_prompts(prompt_emb_posi, prompt_emb_locals, masks, mask_scales, inference_callback) - - if cfg_scale != 1.0: - noise_pred_nega = lets_dance_xl( - self.unet, motion_modules=None, controlnet=self.controlnet, - sample=latents, timestep=timestep, **extra_input, - **prompt_emb_nega, **controlnet_kwargs, **tiler_kwargs, **ipadapter_kwargs_list_nega, - device=self.device, - ) - noise_pred = noise_pred_nega + cfg_scale * (noise_pred_posi - noise_pred_nega) - else: - noise_pred = noise_pred_posi - - # DDIM - latents = self.scheduler.step(noise_pred, timestep, latents) - - # UI - if progress_bar_st is not None: - progress_bar_st.progress(progress_id / len(self.scheduler.timesteps)) - - # Decode image - self.load_models_to_device(['vae_decoder']) - image = self.decode_image(latents, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - - # offload all models - self.load_models_to_device([]) - return image diff --git a/diffsynth/pipelines/sdxl_video.py b/diffsynth/pipelines/sdxl_video.py deleted file mode 100644 index 308590c..0000000 --- a/diffsynth/pipelines/sdxl_video.py +++ /dev/null @@ -1,226 +0,0 @@ -from ..models import SDXLTextEncoder, SDXLTextEncoder2, SDXLUNet, SDXLVAEDecoder, SDXLVAEEncoder, SDXLIpAdapter, IpAdapterXLCLIPImageEmbedder, SDXLMotionModel -from ..models.kolors_text_encoder import ChatGLMModel -from ..models.model_manager import ModelManager -from ..controlnets import MultiControlNetManager, ControlNetUnit, ControlNetConfigUnit, Annotator -from ..prompters import SDXLPrompter, KolorsPrompter -from ..schedulers import EnhancedDDIMScheduler -from .sdxl_image import SDXLImagePipeline -from .dancer import lets_dance_xl -from typing import List -import torch -from tqdm import tqdm - - - -class SDXLVideoPipeline(SDXLImagePipeline): - - def __init__(self, device="cuda", torch_dtype=torch.float16, use_original_animatediff=True): - super().__init__(device=device, torch_dtype=torch_dtype) - self.scheduler = EnhancedDDIMScheduler(beta_schedule="linear" if use_original_animatediff else "scaled_linear") - self.prompter = SDXLPrompter() - # models - self.text_encoder: SDXLTextEncoder = None - self.text_encoder_2: SDXLTextEncoder2 = None - self.text_encoder_kolors: ChatGLMModel = None - self.unet: SDXLUNet = None - self.vae_decoder: SDXLVAEDecoder = None - self.vae_encoder: SDXLVAEEncoder = None - # self.controlnet: MultiControlNetManager = None (TODO) - self.ipadapter_image_encoder: IpAdapterXLCLIPImageEmbedder = None - self.ipadapter: SDXLIpAdapter = None - self.motion_modules: SDXLMotionModel = None - - - def fetch_models(self, model_manager: ModelManager, controlnet_config_units: List[ControlNetConfigUnit]=[], prompt_refiner_classes=[]): - # Main models - self.text_encoder = model_manager.fetch_model("sdxl_text_encoder") - self.text_encoder_2 = model_manager.fetch_model("sdxl_text_encoder_2") - self.text_encoder_kolors = model_manager.fetch_model("kolors_text_encoder") - self.unet = model_manager.fetch_model("sdxl_unet") - self.vae_decoder = model_manager.fetch_model("sdxl_vae_decoder") - self.vae_encoder = model_manager.fetch_model("sdxl_vae_encoder") - self.prompter.fetch_models(self.text_encoder) - self.prompter.load_prompt_refiners(model_manager, prompt_refiner_classes) - - # ControlNets (TODO) - - # IP-Adapters - self.ipadapter = model_manager.fetch_model("sdxl_ipadapter") - self.ipadapter_image_encoder = model_manager.fetch_model("sdxl_ipadapter_clip_image_encoder") - - # Motion Modules - self.motion_modules = model_manager.fetch_model("sdxl_motion_modules") - if self.motion_modules is None: - self.scheduler = EnhancedDDIMScheduler(beta_schedule="scaled_linear") - - # Kolors - if self.text_encoder_kolors is not None: - print("Switch to Kolors. The prompter will be replaced.") - self.prompter = KolorsPrompter() - self.prompter.fetch_models(self.text_encoder_kolors) - # The schedulers of AniamteDiff and Kolors are incompatible. We align it with AniamteDiff. - if self.motion_modules is None: - self.scheduler = EnhancedDDIMScheduler(beta_end=0.014, num_train_timesteps=1100) - else: - self.prompter.fetch_models(self.text_encoder, self.text_encoder_2) - - - @staticmethod - def from_model_manager(model_manager: ModelManager, controlnet_config_units: List[ControlNetConfigUnit]=[], prompt_refiner_classes=[]): - pipe = SDXLVideoPipeline( - device=model_manager.device, - torch_dtype=model_manager.torch_dtype, - ) - pipe.fetch_models(model_manager, controlnet_config_units, prompt_refiner_classes) - return pipe - - - def decode_video(self, latents, tiled=False, tile_size=64, tile_stride=32): - images = [ - self.decode_image(latents[frame_id: frame_id+1], tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - for frame_id in range(latents.shape[0]) - ] - return images - - - def encode_video(self, processed_images, tiled=False, tile_size=64, tile_stride=32): - latents = [] - for image in processed_images: - image = self.preprocess_image(image).to(device=self.device, dtype=self.torch_dtype) - latent = self.encode_image(image, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - latents.append(latent.cpu()) - latents = torch.concat(latents, dim=0) - return latents - - - @torch.no_grad() - def __call__( - self, - prompt, - negative_prompt="", - cfg_scale=7.5, - clip_skip=1, - num_frames=None, - input_frames=None, - ipadapter_images=None, - ipadapter_scale=1.0, - ipadapter_use_instant_style=False, - controlnet_frames=None, - denoising_strength=1.0, - height=512, - width=512, - num_inference_steps=20, - animatediff_batch_size = 16, - animatediff_stride = 8, - unet_batch_size = 1, - controlnet_batch_size = 1, - cross_frame_attention = False, - smoother=None, - smoother_progress_ids=[], - tiled=False, - tile_size=64, - tile_stride=32, - seed=None, - progress_bar_cmd=tqdm, - progress_bar_st=None, - ): - height, width = self.check_resize_height_width(height, width) - - # Tiler parameters, batch size ... - tiler_kwargs = {"tiled": tiled, "tile_size": tile_size, "tile_stride": tile_stride} - - # Prepare scheduler - self.scheduler.set_timesteps(num_inference_steps, denoising_strength) - - # Prepare latent tensors - if self.motion_modules is None: - noise = self.generate_noise((1, 4, height//8, width//8), seed=seed, device="cpu", dtype=self.torch_dtype).repeat(num_frames, 1, 1, 1) - else: - noise = self.generate_noise((num_frames, 4, height//8, width//8), seed=seed, device="cpu", dtype=self.torch_dtype) - if input_frames is None or denoising_strength == 1.0: - latents = noise - else: - latents = self.encode_video(input_frames, **tiler_kwargs) - latents = self.scheduler.add_noise(latents, noise, timestep=self.scheduler.timesteps[0]) - latents = latents.to(self.device) # will be deleted for supporting long videos - - # Encode prompts - prompt_emb_posi = self.encode_prompt(prompt, clip_skip=clip_skip, positive=True) - prompt_emb_nega = self.encode_prompt(negative_prompt, clip_skip=clip_skip, positive=False) - - # IP-Adapter - if ipadapter_images is not None: - if ipadapter_use_instant_style: - self.ipadapter.set_less_adapter() - else: - self.ipadapter.set_full_adapter() - ipadapter_image_encoding = self.ipadapter_image_encoder(ipadapter_images) - ipadapter_kwargs_list_posi = {"ipadapter_kwargs_list": self.ipadapter(ipadapter_image_encoding, scale=ipadapter_scale)} - ipadapter_kwargs_list_nega = {"ipadapter_kwargs_list": self.ipadapter(torch.zeros_like(ipadapter_image_encoding))} - else: - ipadapter_kwargs_list_posi, ipadapter_kwargs_list_nega = {"ipadapter_kwargs_list": {}}, {"ipadapter_kwargs_list": {}} - - # Prepare ControlNets - if controlnet_frames is not None: - if isinstance(controlnet_frames[0], list): - controlnet_frames_ = [] - for processor_id in range(len(controlnet_frames)): - controlnet_frames_.append( - torch.stack([ - self.controlnet.process_image(controlnet_frame, processor_id=processor_id).to(self.torch_dtype) - for controlnet_frame in progress_bar_cmd(controlnet_frames[processor_id]) - ], dim=1) - ) - controlnet_frames = torch.concat(controlnet_frames_, dim=0) - else: - controlnet_frames = torch.stack([ - self.controlnet.process_image(controlnet_frame).to(self.torch_dtype) - for controlnet_frame in progress_bar_cmd(controlnet_frames) - ], dim=1) - controlnet_kwargs = {"controlnet_frames": controlnet_frames} - else: - controlnet_kwargs = {"controlnet_frames": None} - - # Prepare extra input - extra_input = self.prepare_extra_input(latents) - - # Denoise - for progress_id, timestep in enumerate(progress_bar_cmd(self.scheduler.timesteps)): - timestep = timestep.unsqueeze(0).to(self.device) - - # Classifier-free guidance - noise_pred_posi = lets_dance_xl( - self.unet, motion_modules=self.motion_modules, controlnet=None, - sample=latents, timestep=timestep, - **prompt_emb_posi, **controlnet_kwargs, **ipadapter_kwargs_list_posi, **extra_input, **tiler_kwargs, - device=self.device, - ) - noise_pred_nega = lets_dance_xl( - self.unet, motion_modules=self.motion_modules, controlnet=None, - sample=latents, timestep=timestep, - **prompt_emb_nega, **controlnet_kwargs, **ipadapter_kwargs_list_nega, **extra_input, **tiler_kwargs, - device=self.device, - ) - noise_pred = noise_pred_nega + cfg_scale * (noise_pred_posi - noise_pred_nega) - - # DDIM and smoother - if smoother is not None and progress_id in smoother_progress_ids: - rendered_frames = self.scheduler.step(noise_pred, timestep, latents, to_final=True) - rendered_frames = self.decode_video(rendered_frames) - rendered_frames = smoother(rendered_frames, original_frames=input_frames) - target_latents = self.encode_video(rendered_frames) - noise_pred = self.scheduler.return_to_timestep(timestep, latents, target_latents) - latents = self.scheduler.step(noise_pred, timestep, latents) - - # UI - if progress_bar_st is not None: - progress_bar_st.progress(progress_id / len(self.scheduler.timesteps)) - - # Decode image - output_frames = self.decode_video(latents, **tiler_kwargs) - - # Post-process - if smoother is not None and (num_inference_steps in smoother_progress_ids or -1 in smoother_progress_ids): - output_frames = smoother(output_frames, original_frames=input_frames) - - return output_frames diff --git a/diffsynth/pipelines/step_video.py b/diffsynth/pipelines/step_video.py deleted file mode 100644 index 5614017..0000000 --- a/diffsynth/pipelines/step_video.py +++ /dev/null @@ -1,209 +0,0 @@ -from ..models import ModelManager -from ..models.hunyuan_dit_text_encoder import HunyuanDiTCLIPTextEncoder -from ..models.stepvideo_text_encoder import STEP1TextEncoder -from ..models.stepvideo_dit import StepVideoModel -from ..models.stepvideo_vae import StepVideoVAE -from ..schedulers.flow_match import FlowMatchScheduler -from .base import BasePipeline -from ..prompters import StepVideoPrompter -import torch -from einops import rearrange -import numpy as np -from PIL import Image -from ..vram_management import enable_vram_management, AutoWrappedModule, AutoWrappedLinear -from transformers.models.bert.modeling_bert import BertEmbeddings -from ..models.stepvideo_dit import RMSNorm -from ..models.stepvideo_vae import CausalConv, CausalConvAfterNorm, Upsample2D, BaseGroupNorm - - - -class StepVideoPipeline(BasePipeline): - - def __init__(self, device="cuda", torch_dtype=torch.float16): - super().__init__(device=device, torch_dtype=torch_dtype) - self.scheduler = FlowMatchScheduler(sigma_min=0.0, extra_one_step=True, shift=13.0, reverse_sigmas=True, num_train_timesteps=1) - self.prompter = StepVideoPrompter() - self.text_encoder_1: HunyuanDiTCLIPTextEncoder = None - self.text_encoder_2: STEP1TextEncoder = None - self.dit: StepVideoModel = None - self.vae: StepVideoVAE = None - self.model_names = ['text_encoder_1', 'text_encoder_2', 'dit', 'vae'] - - - def enable_vram_management(self, num_persistent_param_in_dit=None): - dtype = next(iter(self.text_encoder_1.parameters())).dtype - enable_vram_management( - self.text_encoder_1, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - BertEmbeddings: AutoWrappedModule, - torch.nn.LayerNorm: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=torch.float32, - computation_device=self.device, - ), - ) - dtype = next(iter(self.text_encoder_2.parameters())).dtype - enable_vram_management( - self.text_encoder_2, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - RMSNorm: AutoWrappedModule, - torch.nn.Embedding: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - ) - dtype = next(iter(self.dit.parameters())).dtype - enable_vram_management( - self.dit, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Conv2d: AutoWrappedModule, - torch.nn.LayerNorm: AutoWrappedModule, - RMSNorm: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device=self.device, - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - max_num_param=num_persistent_param_in_dit, - overflow_module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - ) - dtype = next(iter(self.vae.parameters())).dtype - enable_vram_management( - self.vae, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Conv3d: AutoWrappedModule, - CausalConv: AutoWrappedModule, - CausalConvAfterNorm: AutoWrappedModule, - Upsample2D: AutoWrappedModule, - BaseGroupNorm: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - ) - self.enable_cpu_offload() - - - def fetch_models(self, model_manager: ModelManager): - self.text_encoder_1 = model_manager.fetch_model("hunyuan_dit_clip_text_encoder") - self.text_encoder_2 = model_manager.fetch_model("stepvideo_text_encoder_2") - self.dit = model_manager.fetch_model("stepvideo_dit") - self.vae = model_manager.fetch_model("stepvideo_vae") - self.prompter.fetch_models(self.text_encoder_1, self.text_encoder_2) - - - @staticmethod - def from_model_manager(model_manager: ModelManager, torch_dtype=None, device=None): - if device is None: device = model_manager.device - if torch_dtype is None: torch_dtype = model_manager.torch_dtype - pipe = StepVideoPipeline(device=device, torch_dtype=torch_dtype) - pipe.fetch_models(model_manager) - return pipe - - - def encode_prompt(self, prompt, positive=True): - clip_embeds, llm_embeds, llm_mask = self.prompter.encode_prompt(prompt, device=self.device, positive=positive) - clip_embeds = clip_embeds.to(dtype=self.torch_dtype, device=self.device) - llm_embeds = llm_embeds.to(dtype=self.torch_dtype, device=self.device) - llm_mask = llm_mask.to(dtype=self.torch_dtype, device=self.device) - return {"encoder_hidden_states_2": clip_embeds, "encoder_hidden_states": llm_embeds, "encoder_attention_mask": llm_mask} - - - def tensor2video(self, frames): - frames = rearrange(frames, "C T H W -> T H W C") - frames = ((frames.float() + 1) * 127.5).clip(0, 255).cpu().numpy().astype(np.uint8) - frames = [Image.fromarray(frame) for frame in frames] - return frames - - - @torch.no_grad() - def __call__( - self, - prompt, - negative_prompt="", - input_video=None, - denoising_strength=1.0, - seed=None, - rand_device="cpu", - height=544, - width=992, - num_frames=204, - cfg_scale=9.0, - num_inference_steps=30, - tiled=True, - tile_size=(34, 34), - tile_stride=(16, 16), - smooth_scale=0.6, - progress_bar_cmd=lambda x: x, - progress_bar_st=None, - ): - # Tiler parameters - tiler_kwargs = {"tiled": tiled, "tile_size": tile_size, "tile_stride": tile_stride} - - # Scheduler - self.scheduler.set_timesteps(num_inference_steps, denoising_strength) - - # Initialize noise - latents = self.generate_noise((1, max(num_frames//17*3, 1), 64, height//16, width//16), seed=seed, device=rand_device, dtype=self.torch_dtype).to(self.device) - - # Encode prompts - self.load_models_to_device(["text_encoder_1", "text_encoder_2"]) - prompt_emb_posi = self.encode_prompt(prompt, positive=True) - if cfg_scale != 1.0: - prompt_emb_nega = self.encode_prompt(negative_prompt, positive=False) - - # Denoise - self.load_models_to_device(["dit"]) - for progress_id, timestep in enumerate(progress_bar_cmd(self.scheduler.timesteps)): - timestep = timestep.unsqueeze(0).to(dtype=self.torch_dtype, device=self.device) - print(f"Step {progress_id + 1} / {len(self.scheduler.timesteps)}") - - # Inference - noise_pred_posi = self.dit(latents, timestep=timestep, **prompt_emb_posi) - if cfg_scale != 1.0: - noise_pred_nega = self.dit(latents, timestep=timestep, **prompt_emb_nega) - noise_pred = noise_pred_nega + cfg_scale * (noise_pred_posi - noise_pred_nega) - else: - noise_pred = noise_pred_posi - - # Scheduler - latents = self.scheduler.step(noise_pred, self.scheduler.timesteps[progress_id], latents) - - # Decode - self.load_models_to_device(['vae']) - frames = self.vae.decode(latents, device=self.device, smooth_scale=smooth_scale, **tiler_kwargs) - self.load_models_to_device([]) - frames = self.tensor2video(frames[0]) - - return frames diff --git a/diffsynth/pipelines/svd_video.py b/diffsynth/pipelines/svd_video.py deleted file mode 100644 index b71597e..0000000 --- a/diffsynth/pipelines/svd_video.py +++ /dev/null @@ -1,300 +0,0 @@ -from ..models import ModelManager, SVDImageEncoder, SVDUNet, SVDVAEEncoder, SVDVAEDecoder -from ..schedulers import ContinuousODEScheduler -from .base import BasePipeline -import torch -from tqdm import tqdm -from PIL import Image -import numpy as np -from einops import rearrange, repeat - - - -class SVDVideoPipeline(BasePipeline): - - def __init__(self, device="cuda", torch_dtype=torch.float16): - super().__init__(device=device, torch_dtype=torch_dtype) - self.scheduler = ContinuousODEScheduler() - # models - self.image_encoder: SVDImageEncoder = None - self.unet: SVDUNet = None - self.vae_encoder: SVDVAEEncoder = None - self.vae_decoder: SVDVAEDecoder = None - - - def fetch_models(self, model_manager: ModelManager): - self.image_encoder = model_manager.fetch_model("svd_image_encoder") - self.unet = model_manager.fetch_model("svd_unet") - self.vae_encoder = model_manager.fetch_model("svd_vae_encoder") - self.vae_decoder = model_manager.fetch_model("svd_vae_decoder") - - - @staticmethod - def from_model_manager(model_manager: ModelManager, **kwargs): - pipe = SVDVideoPipeline( - device=model_manager.device, - torch_dtype=model_manager.torch_dtype - ) - pipe.fetch_models(model_manager) - return pipe - - - def encode_image_with_clip(self, image): - image = self.preprocess_image(image).to(device=self.device, dtype=self.torch_dtype) - image = SVDCLIPImageProcessor().resize_with_antialiasing(image, (224, 224)) - image = (image + 1.0) / 2.0 - mean = torch.tensor([0.48145466, 0.4578275, 0.40821073]).reshape(1, 3, 1, 1).to(device=self.device, dtype=self.torch_dtype) - std = torch.tensor([0.26862954, 0.26130258, 0.27577711]).reshape(1, 3, 1, 1).to(device=self.device, dtype=self.torch_dtype) - image = (image - mean) / std - image_emb = self.image_encoder(image) - return image_emb - - - def encode_image_with_vae(self, image, noise_aug_strength, seed=None): - image = self.preprocess_image(image).to(device=self.device, dtype=self.torch_dtype) - noise = self.generate_noise(image.shape, seed=seed, device=self.device, dtype=self.torch_dtype) - image = image + noise_aug_strength * noise - image_emb = self.vae_encoder(image) / self.vae_encoder.scaling_factor - return image_emb - - - def encode_video_with_vae(self, video): - video = torch.concat([self.preprocess_image(frame) for frame in video], dim=0) - video = rearrange(video, "T C H W -> 1 C T H W") - video = video.to(device=self.device, dtype=self.torch_dtype) - latents = self.vae_encoder.encode_video(video) - latents = rearrange(latents[0], "C T H W -> T C H W") - return latents - - - def tensor2video(self, frames): - frames = rearrange(frames, "C T H W -> T H W C") - frames = ((frames.float() + 1) * 127.5).clip(0, 255).cpu().numpy().astype(np.uint8) - frames = [Image.fromarray(frame) for frame in frames] - return frames - - - def calculate_noise_pred( - self, - latents, - timestep, - add_time_id, - cfg_scales, - image_emb_vae_posi, image_emb_clip_posi, - image_emb_vae_nega, image_emb_clip_nega - ): - # Positive side - noise_pred_posi = self.unet( - torch.cat([latents, image_emb_vae_posi], dim=1), - timestep, image_emb_clip_posi, add_time_id - ) - # Negative side - noise_pred_nega = self.unet( - torch.cat([latents, image_emb_vae_nega], dim=1), - timestep, image_emb_clip_nega, add_time_id - ) - - # Classifier-free guidance - noise_pred = noise_pred_nega + cfg_scales * (noise_pred_posi - noise_pred_nega) - - return noise_pred - - - def post_process_latents(self, latents, post_normalize=True, contrast_enhance_scale=1.0): - if post_normalize: - mean, std = latents.mean(), latents.std() - latents = (latents - latents.mean(dim=[1, 2, 3], keepdim=True)) / latents.std(dim=[1, 2, 3], keepdim=True) * std + mean - latents = latents * contrast_enhance_scale - return latents - - - @torch.no_grad() - def __call__( - self, - input_image=None, - input_video=None, - mask_frames=[], - mask_frame_ids=[], - min_cfg_scale=1.0, - max_cfg_scale=3.0, - denoising_strength=1.0, - num_frames=25, - height=576, - width=1024, - fps=7, - motion_bucket_id=127, - noise_aug_strength=0.02, - num_inference_steps=20, - post_normalize=True, - contrast_enhance_scale=1.2, - seed=None, - progress_bar_cmd=tqdm, - progress_bar_st=None, - ): - height, width = self.check_resize_height_width(height, width) - - # Prepare scheduler - self.scheduler.set_timesteps(num_inference_steps, denoising_strength=denoising_strength) - - # Prepare latent tensors - noise = self.generate_noise((num_frames, 4, height//8, width//8), seed=seed, device=self.device, dtype=self.torch_dtype) - if denoising_strength == 1.0: - latents = noise.clone() - else: - latents = self.encode_video_with_vae(input_video) - latents = self.scheduler.add_noise(latents, noise, self.scheduler.timesteps[0]) - - # Prepare mask frames - if len(mask_frames) > 0: - mask_latents = self.encode_video_with_vae(mask_frames) - - # Encode image - image_emb_clip_posi = self.encode_image_with_clip(input_image) - image_emb_clip_nega = torch.zeros_like(image_emb_clip_posi) - image_emb_vae_posi = repeat(self.encode_image_with_vae(input_image, noise_aug_strength, seed=seed), "B C H W -> (B T) C H W", T=num_frames) - image_emb_vae_nega = torch.zeros_like(image_emb_vae_posi) - - # Prepare classifier-free guidance - cfg_scales = torch.linspace(min_cfg_scale, max_cfg_scale, num_frames) - cfg_scales = cfg_scales.reshape(num_frames, 1, 1, 1).to(device=self.device, dtype=self.torch_dtype) - - # Prepare positional id - add_time_id = torch.tensor([[fps-1, motion_bucket_id, noise_aug_strength]], device=self.device) - - # Denoise - for progress_id, timestep in enumerate(progress_bar_cmd(self.scheduler.timesteps)): - - # Mask frames - for frame_id, mask_frame_id in enumerate(mask_frame_ids): - latents[mask_frame_id] = self.scheduler.add_noise(mask_latents[frame_id], noise[mask_frame_id], timestep) - - # Fetch model output - noise_pred = self.calculate_noise_pred( - latents, timestep, add_time_id, cfg_scales, - image_emb_vae_posi, image_emb_clip_posi, image_emb_vae_nega, image_emb_clip_nega - ) - - # Forward Euler - latents = self.scheduler.step(noise_pred, timestep, latents) - - # Update progress bar - if progress_bar_st is not None: - progress_bar_st.progress(progress_id / len(self.scheduler.timesteps)) - - # Decode image - latents = self.post_process_latents(latents, post_normalize=post_normalize, contrast_enhance_scale=contrast_enhance_scale) - video = self.vae_decoder.decode_video(latents, progress_bar=progress_bar_cmd) - video = self.tensor2video(video) - - return video - - - -class SVDCLIPImageProcessor: - def __init__(self): - pass - - def resize_with_antialiasing(self, input, size, interpolation="bicubic", align_corners=True): - h, w = input.shape[-2:] - factors = (h / size[0], w / size[1]) - - # First, we have to determine sigma - # Taken from skimage: https://github.com/scikit-image/scikit-image/blob/v0.19.2/skimage/transform/_warps.py#L171 - sigmas = ( - max((factors[0] - 1.0) / 2.0, 0.001), - max((factors[1] - 1.0) / 2.0, 0.001), - ) - - # Now kernel size. Good results are for 3 sigma, but that is kind of slow. Pillow uses 1 sigma - # https://github.com/python-pillow/Pillow/blob/master/src/libImaging/Resample.c#L206 - # But they do it in the 2 passes, which gives better results. Let's try 2 sigmas for now - ks = int(max(2.0 * 2 * sigmas[0], 3)), int(max(2.0 * 2 * sigmas[1], 3)) - - # Make sure it is odd - if (ks[0] % 2) == 0: - ks = ks[0] + 1, ks[1] - - if (ks[1] % 2) == 0: - ks = ks[0], ks[1] + 1 - - input = self._gaussian_blur2d(input, ks, sigmas) - - output = torch.nn.functional.interpolate(input, size=size, mode=interpolation, align_corners=align_corners) - return output - - - def _compute_padding(self, kernel_size): - """Compute padding tuple.""" - # 4 or 6 ints: (padding_left, padding_right,padding_top,padding_bottom) - # https://pytorch.org/docs/stable/nn.html#torch.nn.functional.pad - if len(kernel_size) < 2: - raise AssertionError(kernel_size) - computed = [k - 1 for k in kernel_size] - - # for even kernels we need to do asymmetric padding :( - out_padding = 2 * len(kernel_size) * [0] - - for i in range(len(kernel_size)): - computed_tmp = computed[-(i + 1)] - - pad_front = computed_tmp // 2 - pad_rear = computed_tmp - pad_front - - out_padding[2 * i + 0] = pad_front - out_padding[2 * i + 1] = pad_rear - - return out_padding - - - def _filter2d(self, input, kernel): - # prepare kernel - b, c, h, w = input.shape - tmp_kernel = kernel[:, None, ...].to(device=input.device, dtype=input.dtype) - - tmp_kernel = tmp_kernel.expand(-1, c, -1, -1) - - height, width = tmp_kernel.shape[-2:] - - padding_shape: list[int] = self._compute_padding([height, width]) - input = torch.nn.functional.pad(input, padding_shape, mode="reflect") - - # kernel and input tensor reshape to align element-wise or batch-wise params - tmp_kernel = tmp_kernel.reshape(-1, 1, height, width) - input = input.view(-1, tmp_kernel.size(0), input.size(-2), input.size(-1)) - - # convolve the tensor with the kernel. - output = torch.nn.functional.conv2d(input, tmp_kernel, groups=tmp_kernel.size(0), padding=0, stride=1) - - out = output.view(b, c, h, w) - return out - - - def _gaussian(self, window_size: int, sigma): - if isinstance(sigma, float): - sigma = torch.tensor([[sigma]]) - - batch_size = sigma.shape[0] - - x = (torch.arange(window_size, device=sigma.device, dtype=sigma.dtype) - window_size // 2).expand(batch_size, -1) - - if window_size % 2 == 0: - x = x + 0.5 - - gauss = torch.exp(-x.pow(2.0) / (2 * sigma.pow(2.0))) - - return gauss / gauss.sum(-1, keepdim=True) - - - def _gaussian_blur2d(self, input, kernel_size, sigma): - if isinstance(sigma, tuple): - sigma = torch.tensor([sigma], dtype=input.dtype) - else: - sigma = sigma.to(dtype=input.dtype) - - ky, kx = int(kernel_size[0]), int(kernel_size[1]) - bs = sigma.shape[0] - kernel_x = self._gaussian(kx, sigma[:, 1].view(bs, 1)) - kernel_y = self._gaussian(ky, sigma[:, 0].view(bs, 1)) - out_x = self._filter2d(input, kernel_x[..., None, :]) - out = self._filter2d(out_x, kernel_y[..., None]) - - return out diff --git a/diffsynth/pipelines/wan_video.py b/diffsynth/pipelines/wan_video.py deleted file mode 100644 index e70e0cc..0000000 --- a/diffsynth/pipelines/wan_video.py +++ /dev/null @@ -1,626 +0,0 @@ -import types -from ..models import ModelManager -from ..models.wan_video_dit import WanModel -from ..models.wan_video_text_encoder import WanTextEncoder -from ..models.wan_video_vae import WanVideoVAE -from ..models.wan_video_image_encoder import WanImageEncoder -from ..models.wan_video_vace import VaceWanModel -from ..schedulers.flow_match import FlowMatchScheduler -from .base import BasePipeline -from ..prompters import WanPrompter -import torch, os -from einops import rearrange -import numpy as np -from PIL import Image -from tqdm import tqdm -from typing import Optional - -from ..vram_management import enable_vram_management, AutoWrappedModule, AutoWrappedLinear -from ..models.wan_video_text_encoder import T5RelativeEmbedding, T5LayerNorm -from ..models.wan_video_dit import RMSNorm, sinusoidal_embedding_1d -from ..models.wan_video_vae import RMS_norm, CausalConv3d, Upsample -from ..models.wan_video_motion_controller import WanMotionControllerModel - - - -class WanVideoPipeline(BasePipeline): - - def __init__(self, device="cuda", torch_dtype=torch.float16, tokenizer_path=None): - super().__init__(device=device, torch_dtype=torch_dtype) - self.scheduler = FlowMatchScheduler(shift=5, sigma_min=0.0, extra_one_step=True) - self.prompter = WanPrompter(tokenizer_path=tokenizer_path) - self.text_encoder: WanTextEncoder = None - self.image_encoder: WanImageEncoder = None - self.dit: WanModel = None - self.vae: WanVideoVAE = None - self.motion_controller: WanMotionControllerModel = None - self.vace: VaceWanModel = None - self.model_names = ['text_encoder', 'dit', 'vae', 'image_encoder', 'motion_controller', 'vace'] - self.height_division_factor = 16 - self.width_division_factor = 16 - self.use_unified_sequence_parallel = False - - - def enable_vram_management(self, num_persistent_param_in_dit=None): - dtype = next(iter(self.text_encoder.parameters())).dtype - enable_vram_management( - self.text_encoder, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Embedding: AutoWrappedModule, - T5RelativeEmbedding: AutoWrappedModule, - T5LayerNorm: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - ) - dtype = next(iter(self.dit.parameters())).dtype - enable_vram_management( - self.dit, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Conv3d: AutoWrappedModule, - torch.nn.LayerNorm: AutoWrappedModule, - RMSNorm: AutoWrappedModule, - torch.nn.Conv2d: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device=self.device, - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - max_num_param=num_persistent_param_in_dit, - overflow_module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - ) - dtype = next(iter(self.vae.parameters())).dtype - enable_vram_management( - self.vae, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Conv2d: AutoWrappedModule, - RMS_norm: AutoWrappedModule, - CausalConv3d: AutoWrappedModule, - Upsample: AutoWrappedModule, - torch.nn.SiLU: AutoWrappedModule, - torch.nn.Dropout: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device=self.device, - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - ) - if self.image_encoder is not None: - dtype = next(iter(self.image_encoder.parameters())).dtype - enable_vram_management( - self.image_encoder, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Conv2d: AutoWrappedModule, - torch.nn.LayerNorm: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=dtype, - computation_device=self.device, - ), - ) - if self.motion_controller is not None: - dtype = next(iter(self.motion_controller.parameters())).dtype - enable_vram_management( - self.motion_controller, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=dtype, - computation_device=self.device, - ), - ) - if self.vace is not None: - enable_vram_management( - self.vace, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Conv3d: AutoWrappedModule, - torch.nn.LayerNorm: AutoWrappedModule, - RMSNorm: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device=self.device, - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - ) - self.enable_cpu_offload() - - - def fetch_models(self, model_manager: ModelManager): - text_encoder_model_and_path = model_manager.fetch_model("wan_video_text_encoder", require_model_path=True) - if text_encoder_model_and_path is not None: - self.text_encoder, tokenizer_path = text_encoder_model_and_path - self.prompter.fetch_models(self.text_encoder) - self.prompter.fetch_tokenizer(os.path.join(os.path.dirname(tokenizer_path), "google/umt5-xxl")) - self.dit = model_manager.fetch_model("wan_video_dit") - self.vae = model_manager.fetch_model("wan_video_vae") - self.image_encoder = model_manager.fetch_model("wan_video_image_encoder") - self.motion_controller = model_manager.fetch_model("wan_video_motion_controller") - self.vace = model_manager.fetch_model("wan_video_vace") - - - @staticmethod - def from_model_manager(model_manager: ModelManager, torch_dtype=None, device=None, use_usp=False): - if device is None: device = model_manager.device - if torch_dtype is None: torch_dtype = model_manager.torch_dtype - pipe = WanVideoPipeline(device=device, torch_dtype=torch_dtype) - pipe.fetch_models(model_manager) - if use_usp: - from xfuser.core.distributed import get_sequence_parallel_world_size - from ..distributed.xdit_context_parallel import usp_attn_forward, usp_dit_forward - - for block in pipe.dit.blocks: - block.self_attn.forward = types.MethodType(usp_attn_forward, block.self_attn) - pipe.dit.forward = types.MethodType(usp_dit_forward, pipe.dit) - pipe.sp_size = get_sequence_parallel_world_size() - pipe.use_unified_sequence_parallel = True - return pipe - - - def denoising_model(self): - return self.dit - - - def encode_prompt(self, prompt, positive=True): - prompt_emb = self.prompter.encode_prompt(prompt, positive=positive, device=self.device) - return {"context": prompt_emb} - - - def encode_image(self, image, end_image, num_frames, height, width, tiled=False, tile_size=(34, 34), tile_stride=(18, 16)): - image = self.preprocess_image(image.resize((width, height))).to(self.device) - clip_context = self.image_encoder.encode_image([image]) - msk = torch.ones(1, num_frames, height//8, width//8, device=self.device) - msk[:, 1:] = 0 - if end_image is not None: - end_image = self.preprocess_image(end_image.resize((width, height))).to(self.device) - vae_input = torch.concat([image.transpose(0,1), torch.zeros(3, num_frames-2, height, width).to(image.device), end_image.transpose(0,1)],dim=1) - if self.dit.has_image_pos_emb: - clip_context = torch.concat([clip_context, self.image_encoder.encode_image([end_image])], dim=1) - msk[:, -1:] = 1 - else: - vae_input = torch.concat([image.transpose(0, 1), torch.zeros(3, num_frames-1, height, width).to(image.device)], dim=1) - - msk = torch.concat([torch.repeat_interleave(msk[:, 0:1], repeats=4, dim=1), msk[:, 1:]], dim=1) - msk = msk.view(1, msk.shape[1] // 4, 4, height//8, width//8) - msk = msk.transpose(1, 2)[0] - - y = self.vae.encode([vae_input.to(dtype=self.torch_dtype, device=self.device)], device=self.device, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride)[0] - y = y.to(dtype=self.torch_dtype, device=self.device) - y = torch.concat([msk, y]) - y = y.unsqueeze(0) - clip_context = clip_context.to(dtype=self.torch_dtype, device=self.device) - y = y.to(dtype=self.torch_dtype, device=self.device) - return {"clip_feature": clip_context, "y": y} - - - def encode_control_video(self, control_video, tiled=True, tile_size=(34, 34), tile_stride=(18, 16)): - control_video = self.preprocess_images(control_video) - control_video = torch.stack(control_video, dim=2).to(dtype=self.torch_dtype, device=self.device) - latents = self.encode_video(control_video, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride).to(dtype=self.torch_dtype, device=self.device) - return latents - - - def prepare_reference_image(self, reference_image, height, width): - if reference_image is not None: - self.load_models_to_device(["vae"]) - reference_image = reference_image.resize((width, height)) - reference_image = self.preprocess_images([reference_image]) - reference_image = torch.stack(reference_image, dim=2).to(dtype=self.torch_dtype, device=self.device) - reference_latents = self.vae.encode(reference_image, device=self.device) - return {"reference_latents": reference_latents} - else: - return {} - - - def prepare_controlnet_kwargs(self, control_video, num_frames, height, width, clip_feature=None, y=None, tiled=True, tile_size=(34, 34), tile_stride=(18, 16)): - if control_video is not None: - control_latents = self.encode_control_video(control_video, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - if clip_feature is None or y is None: - clip_feature = torch.zeros((1, 257, 1280), dtype=self.torch_dtype, device=self.device) - y = torch.zeros((1, 16, (num_frames - 1) // 4 + 1, height//8, width//8), dtype=self.torch_dtype, device=self.device) - else: - y = y[:, -16:] - y = torch.concat([control_latents, y], dim=1) - return {"clip_feature": clip_feature, "y": y} - - - def tensor2video(self, frames): - frames = rearrange(frames, "C T H W -> T H W C") - frames = ((frames.float() + 1) * 127.5).clip(0, 255).cpu().numpy().astype(np.uint8) - frames = [Image.fromarray(frame) for frame in frames] - return frames - - - def prepare_extra_input(self, latents=None): - return {} - - - def encode_video(self, input_video, tiled=True, tile_size=(34, 34), tile_stride=(18, 16)): - latents = self.vae.encode(input_video, device=self.device, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - return latents - - - def decode_video(self, latents, tiled=True, tile_size=(34, 34), tile_stride=(18, 16)): - frames = self.vae.decode(latents, device=self.device, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - return frames - - - def prepare_unified_sequence_parallel(self): - return {"use_unified_sequence_parallel": self.use_unified_sequence_parallel} - - - def prepare_motion_bucket_id(self, motion_bucket_id): - motion_bucket_id = torch.Tensor((motion_bucket_id,)).to(dtype=self.torch_dtype, device=self.device) - return {"motion_bucket_id": motion_bucket_id} - - - def prepare_vace_kwargs( - self, - latents, - vace_video=None, vace_mask=None, vace_reference_image=None, vace_scale=1.0, - height=480, width=832, num_frames=81, - seed=None, rand_device="cpu", - tiled=True, tile_size=(34, 34), tile_stride=(18, 16) - ): - if vace_video is not None or vace_mask is not None or vace_reference_image is not None: - self.load_models_to_device(["vae"]) - if vace_video is None: - vace_video = torch.zeros((1, 3, num_frames, height, width), dtype=self.torch_dtype, device=self.device) - else: - vace_video = self.preprocess_images(vace_video) - vace_video = torch.stack(vace_video, dim=2).to(dtype=self.torch_dtype, device=self.device) - - if vace_mask is None: - vace_mask = torch.ones_like(vace_video) - else: - vace_mask = self.preprocess_images(vace_mask) - vace_mask = torch.stack(vace_mask, dim=2).to(dtype=self.torch_dtype, device=self.device) - - inactive = vace_video * (1 - vace_mask) + 0 * vace_mask - reactive = vace_video * vace_mask + 0 * (1 - vace_mask) - inactive = self.encode_video(inactive, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride).to(dtype=self.torch_dtype, device=self.device) - reactive = self.encode_video(reactive, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride).to(dtype=self.torch_dtype, device=self.device) - vace_video_latents = torch.concat((inactive, reactive), dim=1) - - vace_mask_latents = rearrange(vace_mask[0,0], "T (H P) (W Q) -> 1 (P Q) T H W", P=8, Q=8) - vace_mask_latents = torch.nn.functional.interpolate(vace_mask_latents, size=((vace_mask_latents.shape[2] + 3) // 4, vace_mask_latents.shape[3], vace_mask_latents.shape[4]), mode='nearest-exact') - - if vace_reference_image is None: - pass - else: - vace_reference_image = self.preprocess_images([vace_reference_image]) - vace_reference_image = torch.stack(vace_reference_image, dim=2).to(dtype=self.torch_dtype, device=self.device) - vace_reference_latents = self.encode_video(vace_reference_image, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride).to(dtype=self.torch_dtype, device=self.device) - vace_reference_latents = torch.concat((vace_reference_latents, torch.zeros_like(vace_reference_latents)), dim=1) - vace_video_latents = torch.concat((vace_reference_latents, vace_video_latents), dim=2) - vace_mask_latents = torch.concat((torch.zeros_like(vace_mask_latents[:, :, :1]), vace_mask_latents), dim=2) - - noise = self.generate_noise((1, 16, 1, latents.shape[3], latents.shape[4]), seed=seed, device=rand_device, dtype=torch.float32) - noise = noise.to(dtype=self.torch_dtype, device=self.device) - latents = torch.concat((noise, latents), dim=2) - - vace_context = torch.concat((vace_video_latents, vace_mask_latents), dim=1) - return latents, {"vace_context": vace_context, "vace_scale": vace_scale} - else: - return latents, {"vace_context": None, "vace_scale": vace_scale} - - - @torch.no_grad() - def __call__( - self, - prompt, - negative_prompt="", - input_image=None, - end_image=None, - input_video=None, - control_video=None, - reference_image=None, - vace_video=None, - vace_video_mask=None, - vace_reference_image=None, - vace_scale=1.0, - denoising_strength=1.0, - seed=None, - rand_device="cpu", - height=480, - width=832, - num_frames=81, - cfg_scale=5.0, - num_inference_steps=50, - sigma_shift=5.0, - motion_bucket_id=None, - tiled=True, - tile_size=(30, 52), - tile_stride=(15, 26), - tea_cache_l1_thresh=None, - tea_cache_model_id="", - progress_bar_cmd=tqdm, - progress_bar_st=None, - ): - # Parameter check - height, width = self.check_resize_height_width(height, width) - if num_frames % 4 != 1: - num_frames = (num_frames + 2) // 4 * 4 + 1 - print(f"Only `num_frames % 4 == 1` is acceptable. We round it up to {num_frames}.") - - # Tiler parameters - tiler_kwargs = {"tiled": tiled, "tile_size": tile_size, "tile_stride": tile_stride} - - # Scheduler - self.scheduler.set_timesteps(num_inference_steps, denoising_strength=denoising_strength, shift=sigma_shift) - - # Initialize noise - noise = self.generate_noise((1, 16, (num_frames - 1) // 4 + 1, height//8, width//8), seed=seed, device=rand_device, dtype=torch.float32) - noise = noise.to(dtype=self.torch_dtype, device=self.device) - if input_video is not None: - self.load_models_to_device(['vae']) - input_video = self.preprocess_images(input_video) - input_video = torch.stack(input_video, dim=2).to(dtype=self.torch_dtype, device=self.device) - latents = self.encode_video(input_video, **tiler_kwargs).to(dtype=self.torch_dtype, device=self.device) - latents = self.scheduler.add_noise(latents, noise, timestep=self.scheduler.timesteps[0]) - else: - latents = noise - - # Encode prompts - self.load_models_to_device(["text_encoder"]) - prompt_emb_posi = self.encode_prompt(prompt, positive=True) - if cfg_scale != 1.0: - prompt_emb_nega = self.encode_prompt(negative_prompt, positive=False) - - # Encode image - if input_image is not None and self.image_encoder is not None: - self.load_models_to_device(["image_encoder", "vae"]) - image_emb = self.encode_image(input_image, end_image, num_frames, height, width, **tiler_kwargs) - else: - image_emb = {} - - # Reference image - reference_image_kwargs = self.prepare_reference_image(reference_image, height, width) - - # ControlNet - if control_video is not None: - self.load_models_to_device(["image_encoder", "vae"]) - image_emb = self.prepare_controlnet_kwargs(control_video, num_frames, height, width, **image_emb, **tiler_kwargs) - - # Motion Controller - if self.motion_controller is not None and motion_bucket_id is not None: - motion_kwargs = self.prepare_motion_bucket_id(motion_bucket_id) - else: - motion_kwargs = {} - - # Extra input - extra_input = self.prepare_extra_input(latents) - - # VACE - latents, vace_kwargs = self.prepare_vace_kwargs( - latents, vace_video, vace_video_mask, vace_reference_image, vace_scale, - height=height, width=width, num_frames=num_frames, seed=seed, rand_device=rand_device, **tiler_kwargs - ) - - # TeaCache - tea_cache_posi = {"tea_cache": TeaCache(num_inference_steps, rel_l1_thresh=tea_cache_l1_thresh, model_id=tea_cache_model_id) if tea_cache_l1_thresh is not None else None} - tea_cache_nega = {"tea_cache": TeaCache(num_inference_steps, rel_l1_thresh=tea_cache_l1_thresh, model_id=tea_cache_model_id) if tea_cache_l1_thresh is not None else None} - - # Unified Sequence Parallel - usp_kwargs = self.prepare_unified_sequence_parallel() - - # Denoise - self.load_models_to_device(["dit", "motion_controller", "vace"]) - for progress_id, timestep in enumerate(progress_bar_cmd(self.scheduler.timesteps)): - timestep = timestep.unsqueeze(0).to(dtype=self.torch_dtype, device=self.device) - - # Inference - noise_pred_posi = model_fn_wan_video( - self.dit, motion_controller=self.motion_controller, vace=self.vace, - x=latents, timestep=timestep, - **prompt_emb_posi, **image_emb, **extra_input, - **tea_cache_posi, **usp_kwargs, **motion_kwargs, **vace_kwargs, **reference_image_kwargs, - ) - if cfg_scale != 1.0: - noise_pred_nega = model_fn_wan_video( - self.dit, motion_controller=self.motion_controller, vace=self.vace, - x=latents, timestep=timestep, - **prompt_emb_nega, **image_emb, **extra_input, - **tea_cache_nega, **usp_kwargs, **motion_kwargs, **vace_kwargs, **reference_image_kwargs, - ) - noise_pred = noise_pred_nega + cfg_scale * (noise_pred_posi - noise_pred_nega) - else: - noise_pred = noise_pred_posi - - # Scheduler - latents = self.scheduler.step(noise_pred, self.scheduler.timesteps[progress_id], latents) - - if vace_reference_image is not None: - latents = latents[:, :, 1:] - - # Decode - self.load_models_to_device(['vae']) - frames = self.decode_video(latents, **tiler_kwargs) - self.load_models_to_device([]) - frames = self.tensor2video(frames[0]) - - return frames - - - -class TeaCache: - def __init__(self, num_inference_steps, rel_l1_thresh, model_id): - self.num_inference_steps = num_inference_steps - self.step = 0 - self.accumulated_rel_l1_distance = 0 - self.previous_modulated_input = None - self.rel_l1_thresh = rel_l1_thresh - self.previous_residual = None - self.previous_hidden_states = None - - self.coefficients_dict = { - "Wan2.1-T2V-1.3B": [-5.21862437e+04, 9.23041404e+03, -5.28275948e+02, 1.36987616e+01, -4.99875664e-02], - "Wan2.1-T2V-14B": [-3.03318725e+05, 4.90537029e+04, -2.65530556e+03, 5.87365115e+01, -3.15583525e-01], - "Wan2.1-I2V-14B-480P": [2.57151496e+05, -3.54229917e+04, 1.40286849e+03, -1.35890334e+01, 1.32517977e-01], - "Wan2.1-I2V-14B-720P": [ 8.10705460e+03, 2.13393892e+03, -3.72934672e+02, 1.66203073e+01, -4.17769401e-02], - } - if model_id not in self.coefficients_dict: - supported_model_ids = ", ".join([i for i in self.coefficients_dict]) - raise ValueError(f"{model_id} is not a supported TeaCache model id. Please choose a valid model id in ({supported_model_ids}).") - self.coefficients = self.coefficients_dict[model_id] - - def check(self, dit: WanModel, x, t_mod): - modulated_inp = t_mod.clone() - if self.step == 0 or self.step == self.num_inference_steps - 1: - should_calc = True - self.accumulated_rel_l1_distance = 0 - else: - coefficients = self.coefficients - rescale_func = np.poly1d(coefficients) - self.accumulated_rel_l1_distance += rescale_func(((modulated_inp-self.previous_modulated_input).abs().mean() / self.previous_modulated_input.abs().mean()).cpu().item()) - if self.accumulated_rel_l1_distance < self.rel_l1_thresh: - should_calc = False - else: - should_calc = True - self.accumulated_rel_l1_distance = 0 - self.previous_modulated_input = modulated_inp - self.step += 1 - if self.step == self.num_inference_steps: - self.step = 0 - if should_calc: - self.previous_hidden_states = x.clone() - return not should_calc - - def store(self, hidden_states): - self.previous_residual = hidden_states - self.previous_hidden_states - self.previous_hidden_states = None - - def update(self, hidden_states): - hidden_states = hidden_states + self.previous_residual - return hidden_states - - - -def model_fn_wan_video( - dit: WanModel, - motion_controller: WanMotionControllerModel = None, - vace: VaceWanModel = None, - x: torch.Tensor = None, - timestep: torch.Tensor = None, - context: torch.Tensor = None, - clip_feature: Optional[torch.Tensor] = None, - y: Optional[torch.Tensor] = None, - reference_latents = None, - vace_context = None, - vace_scale = 1.0, - tea_cache: TeaCache = None, - use_unified_sequence_parallel: bool = False, - motion_bucket_id: Optional[torch.Tensor] = None, - **kwargs, -): - if use_unified_sequence_parallel: - import torch.distributed as dist - from xfuser.core.distributed import (get_sequence_parallel_rank, - get_sequence_parallel_world_size, - get_sp_group) - - t = dit.time_embedding(sinusoidal_embedding_1d(dit.freq_dim, timestep)) - t_mod = dit.time_projection(t).unflatten(1, (6, dit.dim)) - if motion_bucket_id is not None and motion_controller is not None: - t_mod = t_mod + motion_controller(motion_bucket_id).unflatten(1, (6, dit.dim)) - context = dit.text_embedding(context) - - if dit.has_image_input: - x = torch.cat([x, y], dim=1) # (b, c_x + c_y, f, h, w) - clip_embdding = dit.img_emb(clip_feature) - context = torch.cat([clip_embdding, context], dim=1) - - x, (f, h, w) = dit.patchify(x) - - # Reference image - if reference_latents is not None: - reference_latents = dit.ref_conv(reference_latents[:, :, 0]).flatten(2).transpose(1, 2) - x = torch.concat([reference_latents, x], dim=1) - f += 1 - - freqs = torch.cat([ - dit.freqs[0][:f].view(f, 1, 1, -1).expand(f, h, w, -1), - dit.freqs[1][:h].view(1, h, 1, -1).expand(f, h, w, -1), - dit.freqs[2][:w].view(1, 1, w, -1).expand(f, h, w, -1) - ], dim=-1).reshape(f * h * w, 1, -1).to(x.device) - - # TeaCache - if tea_cache is not None: - tea_cache_update = tea_cache.check(dit, x, t_mod) - else: - tea_cache_update = False - - if vace_context is not None: - vace_hints = vace(x, vace_context, context, t_mod, freqs) - - # blocks - if use_unified_sequence_parallel: - if dist.is_initialized() and dist.get_world_size() > 1: - chunks = torch.chunk(x, get_sequence_parallel_world_size(), dim=1) - pad_shape = chunks[0].shape[1] - chunks[-1].shape[1] - chunks = [torch.nn.functional.pad(chunk, (0, 0, 0, chunks[0].shape[1]-chunk.shape[1]), value=0) for chunk in chunks] - x = chunks[get_sequence_parallel_rank()] - - if tea_cache_update: - x = tea_cache.update(x) - else: - for block_id, block in enumerate(dit.blocks): - x = block(x, context, t_mod, freqs) - if vace_context is not None and block_id in vace.vace_layers_mapping: - current_vace_hint = vace_hints[vace.vace_layers_mapping[block_id]] - if use_unified_sequence_parallel and dist.is_initialized() and dist.get_world_size() > 1: - current_vace_hint = torch.chunk(current_vace_hint, get_sequence_parallel_world_size(), dim=1)[get_sequence_parallel_rank()] - current_vace_hint = torch.nn.functional.pad(current_vace_hint, (0, 0, 0, chunks[0].shape[1] - current_vace_hint.shape[1]), value=0) - x = x + current_vace_hint * vace_scale - if tea_cache is not None: - tea_cache.store(x) - - x = dit.head(x, t) - if use_unified_sequence_parallel: - if dist.is_initialized() and dist.get_world_size() > 1: - x = get_sp_group().all_gather(x, dim=1) - x = x[:, :-pad_shape] if pad_shape > 0 else x - # Remove reference latents - if reference_latents is not None: - x = x[:, reference_latents.shape[1]:] - f -= 1 - x = dit.unpatchify(x, (f, h, w)) - return x diff --git a/diffsynth/pipelines/wan_video_new.py b/diffsynth/pipelines/wan_video_new.py deleted file mode 100644 index d374afd..0000000 --- a/diffsynth/pipelines/wan_video_new.py +++ /dev/null @@ -1,1637 +0,0 @@ -import torch, warnings, glob, os, types -import numpy as np -from PIL import Image -from einops import repeat, reduce -from typing import Optional, Union -from dataclasses import dataclass -from modelscope import snapshot_download -from einops import rearrange -import numpy as np -from PIL import Image -from tqdm import tqdm -from typing import Optional -from typing_extensions import Literal - -from ..utils import BasePipeline, ModelConfig, PipelineUnit, PipelineUnitRunner -from ..models import ModelManager, load_state_dict -from ..models.wan_video_dit import WanModel, RMSNorm, sinusoidal_embedding_1d -from ..models.wan_video_dit_s2v import rope_precompute -from ..models.wan_video_text_encoder import WanTextEncoder, T5RelativeEmbedding, T5LayerNorm -from ..models.wan_video_vae import WanVideoVAE, RMS_norm, CausalConv3d, Upsample -from ..models.wan_video_image_encoder import WanImageEncoder -from ..models.wan_video_vace import VaceWanModel -from ..models.wan_video_motion_controller import WanMotionControllerModel -from ..models.wan_video_animate_adapter import WanAnimateAdapter -from ..models.longcat_video_dit import LongCatVideoTransformer3DModel -from ..schedulers.flow_match import FlowMatchScheduler -from ..prompters import WanPrompter -from ..vram_management import enable_vram_management, AutoWrappedModule, AutoWrappedLinear, WanAutoCastLayerNorm -from ..lora import GeneralLoRALoader - - - -class WanVideoPipeline(BasePipeline): - - def __init__(self, device="cuda", torch_dtype=torch.bfloat16, tokenizer_path=None): - super().__init__( - device=device, torch_dtype=torch_dtype, - height_division_factor=16, width_division_factor=16, time_division_factor=4, time_division_remainder=1 - ) - self.scheduler = FlowMatchScheduler(shift=5, sigma_min=0.0, extra_one_step=True) - self.prompter = WanPrompter(tokenizer_path=tokenizer_path) - self.text_encoder: WanTextEncoder = None - self.image_encoder: WanImageEncoder = None - self.dit: WanModel = None - self.dit2: WanModel = None - self.vae: WanVideoVAE = None - self.motion_controller: WanMotionControllerModel = None - self.vace: VaceWanModel = None - self.vace2: VaceWanModel = None - self.animate_adapter: WanAnimateAdapter = None - self.in_iteration_models = ("dit", "motion_controller", "vace", "animate_adapter") - self.in_iteration_models_2 = ("dit2", "motion_controller", "vace2", "animate_adapter") - self.unit_runner = PipelineUnitRunner() - self.units = [ - WanVideoUnit_ShapeChecker(), - WanVideoUnit_NoiseInitializer(), - WanVideoUnit_PromptEmbedder(), - WanVideoUnit_S2V(), - WanVideoUnit_InputVideoEmbedder(), - WanVideoUnit_ImageEmbedderVAE(), - WanVideoUnit_ImageEmbedderCLIP(), - WanVideoUnit_ImageEmbedderFused(), - WanVideoUnit_FunControl(), - WanVideoUnit_FunReference(), - WanVideoUnit_FunCameraControl(), - WanVideoUnit_SpeedControl(), - WanVideoUnit_VACE(), - WanVideoPostUnit_AnimateVideoSplit(), - WanVideoPostUnit_AnimatePoseLatents(), - WanVideoPostUnit_AnimateFacePixelValues(), - WanVideoPostUnit_AnimateInpaint(), - WanVideoUnit_UnifiedSequenceParallel(), - WanVideoUnit_TeaCache(), - WanVideoUnit_CfgMerger(), - WanVideoUnit_LongCatVideo(), - ] - self.post_units = [ - WanVideoPostUnit_S2V(), - ] - self.model_fn = model_fn_wan_video - - def load_lora( - self, - module: torch.nn.Module, - lora_config: Union[ModelConfig, str] = None, - alpha=1, - hotload=False, - state_dict=None, - ): - if state_dict is None: - if isinstance(lora_config, str): - lora = load_state_dict(lora_config, torch_dtype=self.torch_dtype, device=self.device) - else: - lora_config.download_if_necessary() - lora = load_state_dict(lora_config.path, torch_dtype=self.torch_dtype, device=self.device) - else: - lora = state_dict - if hotload: - for name, module in module.named_modules(): - if isinstance(module, AutoWrappedLinear): - lora_a_name = f'{name}.lora_A.default.weight' - lora_b_name = f'{name}.lora_B.default.weight' - if lora_a_name in lora and lora_b_name in lora: - module.lora_A_weights.append(lora[lora_a_name] * alpha) - module.lora_B_weights.append(lora[lora_b_name]) - else: - loader = GeneralLoRALoader(torch_dtype=self.torch_dtype, device=self.device) - loader.load(module, lora, alpha=alpha) - - def training_loss(self, **inputs): - max_timestep_boundary = int(inputs.get("max_timestep_boundary", 1) * self.scheduler.num_train_timesteps) - min_timestep_boundary = int(inputs.get("min_timestep_boundary", 0) * self.scheduler.num_train_timesteps) - timestep_id = torch.randint(min_timestep_boundary, max_timestep_boundary, (1,)) - timestep = self.scheduler.timesteps[timestep_id].to(dtype=self.torch_dtype, device=self.device) - - inputs["latents"] = self.scheduler.add_noise(inputs["input_latents"], inputs["noise"], timestep) - training_target = self.scheduler.training_target(inputs["input_latents"], inputs["noise"], timestep) - - noise_pred = self.model_fn(**inputs, timestep=timestep) - - loss = torch.nn.functional.mse_loss(noise_pred.float(), training_target.float()) - loss = loss * self.scheduler.training_weight(timestep) - return loss - - - def enable_vram_management(self, num_persistent_param_in_dit=None, vram_limit=None, vram_buffer=0.5): - self.vram_management_enabled = True - if num_persistent_param_in_dit is not None: - vram_limit = None - else: - if vram_limit is None: - vram_limit = self.get_vram() - vram_limit = vram_limit - vram_buffer - if self.text_encoder is not None: - dtype = next(iter(self.text_encoder.parameters())).dtype - enable_vram_management( - self.text_encoder, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Embedding: AutoWrappedModule, - T5RelativeEmbedding: AutoWrappedModule, - T5LayerNorm: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - vram_limit=vram_limit, - ) - if self.dit is not None: - from ..models.longcat_video_dit import LayerNorm_FP32, RMSNorm_FP32 - dtype = next(iter(self.dit.parameters())).dtype - device = "cpu" if vram_limit is not None else self.device - enable_vram_management( - self.dit, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Conv3d: AutoWrappedModule, - torch.nn.LayerNorm: WanAutoCastLayerNorm, - RMSNorm: AutoWrappedModule, - torch.nn.Conv2d: AutoWrappedModule, - torch.nn.Conv1d: AutoWrappedModule, - torch.nn.Embedding: AutoWrappedModule, - LayerNorm_FP32: AutoWrappedModule, - RMSNorm_FP32: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device=device, - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - max_num_param=num_persistent_param_in_dit, - overflow_module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - vram_limit=vram_limit, - ) - if self.dit2 is not None: - dtype = next(iter(self.dit2.parameters())).dtype - device = "cpu" if vram_limit is not None else self.device - enable_vram_management( - self.dit2, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Conv3d: AutoWrappedModule, - torch.nn.LayerNorm: WanAutoCastLayerNorm, - RMSNorm: AutoWrappedModule, - torch.nn.Conv2d: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device=device, - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - max_num_param=num_persistent_param_in_dit, - overflow_module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - vram_limit=vram_limit, - ) - if self.vae is not None: - dtype = next(iter(self.vae.parameters())).dtype - enable_vram_management( - self.vae, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Conv2d: AutoWrappedModule, - RMS_norm: AutoWrappedModule, - CausalConv3d: AutoWrappedModule, - Upsample: AutoWrappedModule, - torch.nn.SiLU: AutoWrappedModule, - torch.nn.Dropout: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device=self.device, - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - ) - if self.image_encoder is not None: - dtype = next(iter(self.image_encoder.parameters())).dtype - enable_vram_management( - self.image_encoder, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Conv2d: AutoWrappedModule, - torch.nn.LayerNorm: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=dtype, - computation_device=self.device, - ), - ) - if self.motion_controller is not None: - dtype = next(iter(self.motion_controller.parameters())).dtype - enable_vram_management( - self.motion_controller, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=dtype, - computation_device=self.device, - ), - ) - if self.vace is not None: - device = "cpu" if vram_limit is not None else self.device - enable_vram_management( - self.vace, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.Conv3d: AutoWrappedModule, - torch.nn.LayerNorm: AutoWrappedModule, - RMSNorm: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device=device, - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - vram_limit=vram_limit, - ) - if self.audio_encoder is not None: - # TODO: need check - dtype = next(iter(self.audio_encoder.parameters())).dtype - enable_vram_management( - self.audio_encoder, - module_map = { - torch.nn.Linear: AutoWrappedLinear, - torch.nn.LayerNorm: AutoWrappedModule, - torch.nn.Conv1d: AutoWrappedModule, - }, - module_config = dict( - offload_dtype=dtype, - offload_device="cpu", - onload_dtype=dtype, - onload_device="cpu", - computation_dtype=self.torch_dtype, - computation_device=self.device, - ), - ) - - - def initialize_usp(self): - import torch.distributed as dist - from xfuser.core.distributed import initialize_model_parallel, init_distributed_environment - dist.init_process_group(backend="nccl", init_method="env://") - init_distributed_environment(rank=dist.get_rank(), world_size=dist.get_world_size()) - initialize_model_parallel( - sequence_parallel_degree=dist.get_world_size(), - ring_degree=1, - ulysses_degree=dist.get_world_size(), - ) - torch.cuda.set_device(dist.get_rank()) - - - def enable_usp(self): - from xfuser.core.distributed import get_sequence_parallel_world_size - from ..distributed.xdit_context_parallel import usp_attn_forward, usp_dit_forward - - for block in self.dit.blocks: - block.self_attn.forward = types.MethodType(usp_attn_forward, block.self_attn) - self.dit.forward = types.MethodType(usp_dit_forward, self.dit) - if self.dit2 is not None: - for block in self.dit2.blocks: - block.self_attn.forward = types.MethodType(usp_attn_forward, block.self_attn) - self.dit2.forward = types.MethodType(usp_dit_forward, self.dit2) - self.sp_size = get_sequence_parallel_world_size() - self.use_unified_sequence_parallel = True - - - @staticmethod - def from_pretrained( - torch_dtype: torch.dtype = torch.bfloat16, - device: Union[str, torch.device] = "cuda", - model_configs: list[ModelConfig] = [], - tokenizer_config: ModelConfig = ModelConfig(model_id="Wan-AI/Wan2.1-T2V-1.3B", origin_file_pattern="google/*"), - audio_processor_config: ModelConfig = None, - redirect_common_files: bool = True, - use_usp=False, - ): - # Redirect model path - if redirect_common_files: - redirect_dict = { - "models_t5_umt5-xxl-enc-bf16.pth": "Wan-AI/Wan2.1-T2V-1.3B", - "Wan2.1_VAE.pth": "Wan-AI/Wan2.1-T2V-1.3B", - "models_clip_open-clip-xlm-roberta-large-vit-huge-14.pth": "Wan-AI/Wan2.1-I2V-14B-480P", - } - for model_config in model_configs: - if model_config.origin_file_pattern is None or model_config.model_id is None: - continue - if model_config.origin_file_pattern in redirect_dict and model_config.model_id != redirect_dict[model_config.origin_file_pattern]: - print(f"To avoid repeatedly downloading model files, ({model_config.model_id}, {model_config.origin_file_pattern}) is redirected to ({redirect_dict[model_config.origin_file_pattern]}, {model_config.origin_file_pattern}). You can use `redirect_common_files=False` to disable file redirection.") - model_config.model_id = redirect_dict[model_config.origin_file_pattern] - - # Initialize pipeline - pipe = WanVideoPipeline(device=device, torch_dtype=torch_dtype) - if use_usp: pipe.initialize_usp() - - # Download and load models - model_manager = ModelManager() - for model_config in model_configs: - model_config.download_if_necessary(use_usp=use_usp) - model_manager.load_model( - model_config.path, - device=model_config.offload_device or device, - torch_dtype=model_config.offload_dtype or torch_dtype - ) - - # Load models - pipe.text_encoder = model_manager.fetch_model("wan_video_text_encoder") - dit = model_manager.fetch_model("wan_video_dit", index=2) - if isinstance(dit, list): - pipe.dit, pipe.dit2 = dit - else: - pipe.dit = dit - pipe.vae = model_manager.fetch_model("wan_video_vae") - pipe.image_encoder = model_manager.fetch_model("wan_video_image_encoder") - pipe.motion_controller = model_manager.fetch_model("wan_video_motion_controller") - vace = model_manager.fetch_model("wan_video_vace", index=2) - if isinstance(vace, list): - pipe.vace, pipe.vace2 = vace - else: - pipe.vace = vace - pipe.audio_encoder = model_manager.fetch_model("wans2v_audio_encoder") - pipe.animate_adapter = model_manager.fetch_model("wan_video_animate_adapter") - - # Size division factor - if pipe.vae is not None: - pipe.height_division_factor = pipe.vae.upsampling_factor * 2 - pipe.width_division_factor = pipe.vae.upsampling_factor * 2 - - # Initialize tokenizer - tokenizer_config.download_if_necessary(use_usp=use_usp) - pipe.prompter.fetch_models(pipe.text_encoder) - pipe.prompter.fetch_tokenizer(tokenizer_config.path) - - if audio_processor_config is not None: - audio_processor_config.download_if_necessary(use_usp=use_usp) - from transformers import Wav2Vec2Processor - pipe.audio_processor = Wav2Vec2Processor.from_pretrained(audio_processor_config.path) - # Unified Sequence Parallel - if use_usp: pipe.enable_usp() - return pipe - - - @torch.no_grad() - def __call__( - self, - # Prompt - prompt: str, - negative_prompt: Optional[str] = "", - # Image-to-video - input_image: Optional[Image.Image] = None, - # First-last-frame-to-video - end_image: Optional[Image.Image] = None, - # Video-to-video - input_video: Optional[list[Image.Image]] = None, - denoising_strength: Optional[float] = 1.0, - # Speech-to-video - input_audio: Optional[np.array] = None, - audio_embeds: Optional[torch.Tensor] = None, - audio_sample_rate: Optional[int] = 16000, - s2v_pose_video: Optional[list[Image.Image]] = None, - s2v_pose_latents: Optional[torch.Tensor] = None, - motion_video: Optional[list[Image.Image]] = None, - # ControlNet - control_video: Optional[list[Image.Image]] = None, - reference_image: Optional[Image.Image] = None, - # Camera control - camera_control_direction: Optional[Literal["Left", "Right", "Up", "Down", "LeftUp", "LeftDown", "RightUp", "RightDown"]] = None, - camera_control_speed: Optional[float] = 1/54, - camera_control_origin: Optional[tuple] = (0, 0.532139961, 0.946026558, 0.5, 0.5, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0), - # VACE - vace_video: Optional[list[Image.Image]] = None, - vace_video_mask: Optional[Image.Image] = None, - vace_reference_image: Optional[Image.Image] = None, - vace_scale: Optional[float] = 1.0, - # Animate - animate_pose_video: Optional[list[Image.Image]] = None, - animate_face_video: Optional[list[Image.Image]] = None, - animate_inpaint_video: Optional[list[Image.Image]] = None, - animate_mask_video: Optional[list[Image.Image]] = None, - # Randomness - seed: Optional[int] = None, - rand_device: Optional[str] = "cpu", - # Shape - height: Optional[int] = 480, - width: Optional[int] = 832, - num_frames=81, - # Classifier-free guidance - cfg_scale: Optional[float] = 5.0, - cfg_merge: Optional[bool] = False, - # Boundary - switch_DiT_boundary: Optional[float] = 0.875, - # Scheduler - num_inference_steps: Optional[int] = 50, - sigma_shift: Optional[float] = 5.0, - # Speed control - motion_bucket_id: Optional[int] = None, - # LongCat-Video - longcat_video: Optional[list[Image.Image]] = None, - # VAE tiling - tiled: Optional[bool] = True, - tile_size: Optional[tuple[int, int]] = (30, 52), - tile_stride: Optional[tuple[int, int]] = (15, 26), - # Sliding window - sliding_window_size: Optional[int] = None, - sliding_window_stride: Optional[int] = None, - # Teacache - tea_cache_l1_thresh: Optional[float] = None, - tea_cache_model_id: Optional[str] = "", - # progress_bar - progress_bar_cmd=tqdm, - ): - # Scheduler - self.scheduler.set_timesteps(num_inference_steps, denoising_strength=denoising_strength, shift=sigma_shift) - - # Inputs - inputs_posi = { - "prompt": prompt, - "tea_cache_l1_thresh": tea_cache_l1_thresh, "tea_cache_model_id": tea_cache_model_id, "num_inference_steps": num_inference_steps, - } - inputs_nega = { - "negative_prompt": negative_prompt, - "tea_cache_l1_thresh": tea_cache_l1_thresh, "tea_cache_model_id": tea_cache_model_id, "num_inference_steps": num_inference_steps, - } - inputs_shared = { - "input_image": input_image, - "end_image": end_image, - "input_video": input_video, "denoising_strength": denoising_strength, - "control_video": control_video, "reference_image": reference_image, - "camera_control_direction": camera_control_direction, "camera_control_speed": camera_control_speed, "camera_control_origin": camera_control_origin, - "vace_video": vace_video, "vace_video_mask": vace_video_mask, "vace_reference_image": vace_reference_image, "vace_scale": vace_scale, - "seed": seed, "rand_device": rand_device, - "height": height, "width": width, "num_frames": num_frames, - "cfg_scale": cfg_scale, "cfg_merge": cfg_merge, - "sigma_shift": sigma_shift, - "motion_bucket_id": motion_bucket_id, - "longcat_video": longcat_video, - "tiled": tiled, "tile_size": tile_size, "tile_stride": tile_stride, - "sliding_window_size": sliding_window_size, "sliding_window_stride": sliding_window_stride, - "input_audio": input_audio, "audio_sample_rate": audio_sample_rate, "s2v_pose_video": s2v_pose_video, "audio_embeds": audio_embeds, "s2v_pose_latents": s2v_pose_latents, "motion_video": motion_video, - "animate_pose_video": animate_pose_video, "animate_face_video": animate_face_video, "animate_inpaint_video": animate_inpaint_video, "animate_mask_video": animate_mask_video, - } - for unit in self.units: - inputs_shared, inputs_posi, inputs_nega = self.unit_runner(unit, self, inputs_shared, inputs_posi, inputs_nega) - - # Denoise - self.load_models_to_device(self.in_iteration_models) - models = {name: getattr(self, name) for name in self.in_iteration_models} - for progress_id, timestep in enumerate(progress_bar_cmd(self.scheduler.timesteps)): - # Switch DiT if necessary - if timestep.item() < switch_DiT_boundary * self.scheduler.num_train_timesteps and self.dit2 is not None and not models["dit"] is self.dit2: - self.load_models_to_device(self.in_iteration_models_2) - models["dit"] = self.dit2 - models["vace"] = self.vace2 - - # Timestep - timestep = timestep.unsqueeze(0).to(dtype=self.torch_dtype, device=self.device) - - # Inference - noise_pred_posi = self.model_fn(**models, **inputs_shared, **inputs_posi, timestep=timestep) - if cfg_scale != 1.0: - if cfg_merge: - noise_pred_posi, noise_pred_nega = noise_pred_posi.chunk(2, dim=0) - else: - noise_pred_nega = self.model_fn(**models, **inputs_shared, **inputs_nega, timestep=timestep) - noise_pred = noise_pred_nega + cfg_scale * (noise_pred_posi - noise_pred_nega) - else: - noise_pred = noise_pred_posi - - # Scheduler - inputs_shared["latents"] = self.scheduler.step(noise_pred, self.scheduler.timesteps[progress_id], inputs_shared["latents"]) - if "first_frame_latents" in inputs_shared: - inputs_shared["latents"][:, :, 0:1] = inputs_shared["first_frame_latents"] - - # VACE (TODO: remove it) - if vace_reference_image is not None or (animate_pose_video is not None and animate_face_video is not None): - if vace_reference_image is not None and isinstance(vace_reference_image, list): - f = len(vace_reference_image) - else: - f = 1 - inputs_shared["latents"] = inputs_shared["latents"][:, :, f:] - # post-denoising, pre-decoding processing logic - for unit in self.post_units: - inputs_shared, _, _ = self.unit_runner(unit, self, inputs_shared, inputs_posi, inputs_nega) - # Decode - self.load_models_to_device(['vae']) - video = self.vae.decode(inputs_shared["latents"], device=self.device, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - video = self.vae_output_to_video(video) - self.load_models_to_device([]) - - return video - - - -class WanVideoUnit_ShapeChecker(PipelineUnit): - def __init__(self): - super().__init__(input_params=("height", "width", "num_frames")) - - def process(self, pipe: WanVideoPipeline, height, width, num_frames): - height, width, num_frames = pipe.check_resize_height_width(height, width, num_frames) - return {"height": height, "width": width, "num_frames": num_frames} - - - -class WanVideoUnit_NoiseInitializer(PipelineUnit): - def __init__(self): - super().__init__(input_params=("height", "width", "num_frames", "seed", "rand_device", "vace_reference_image")) - - def process(self, pipe: WanVideoPipeline, height, width, num_frames, seed, rand_device, vace_reference_image): - length = (num_frames - 1) // 4 + 1 - if vace_reference_image is not None: - f = len(vace_reference_image) if isinstance(vace_reference_image, list) else 1 - length += f - shape = (1, pipe.vae.model.z_dim, length, height // pipe.vae.upsampling_factor, width // pipe.vae.upsampling_factor) - noise = pipe.generate_noise(shape, seed=seed, rand_device=rand_device) - if vace_reference_image is not None: - noise = torch.concat((noise[:, :, -f:], noise[:, :, :-f]), dim=2) - return {"noise": noise} - - - -class WanVideoUnit_InputVideoEmbedder(PipelineUnit): - def __init__(self): - super().__init__( - input_params=("input_video", "noise", "tiled", "tile_size", "tile_stride", "vace_reference_image"), - onload_model_names=("vae",) - ) - - def process(self, pipe: WanVideoPipeline, input_video, noise, tiled, tile_size, tile_stride, vace_reference_image): - if input_video is None: - return {"latents": noise} - pipe.load_models_to_device(["vae"]) - input_video = pipe.preprocess_video(input_video) - input_latents = pipe.vae.encode(input_video, device=pipe.device, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride).to(dtype=pipe.torch_dtype, device=pipe.device) - if vace_reference_image is not None: - if not isinstance(vace_reference_image, list): - vace_reference_image = [vace_reference_image] - vace_reference_image = pipe.preprocess_video(vace_reference_image) - vace_reference_latents = pipe.vae.encode(vace_reference_image, device=pipe.device).to(dtype=pipe.torch_dtype, device=pipe.device) - input_latents = torch.concat([vace_reference_latents, input_latents], dim=2) - if pipe.scheduler.training: - return {"latents": noise, "input_latents": input_latents} - else: - latents = pipe.scheduler.add_noise(input_latents, noise, timestep=pipe.scheduler.timesteps[0]) - return {"latents": latents} - - - -class WanVideoUnit_PromptEmbedder(PipelineUnit): - def __init__(self): - super().__init__( - seperate_cfg=True, - input_params_posi={"prompt": "prompt", "positive": "positive"}, - input_params_nega={"prompt": "negative_prompt", "positive": "positive"}, - onload_model_names=("text_encoder",) - ) - - def process(self, pipe: WanVideoPipeline, prompt, positive) -> dict: - pipe.load_models_to_device(self.onload_model_names) - prompt_emb = pipe.prompter.encode_prompt(prompt, positive=positive, device=pipe.device) - return {"context": prompt_emb} - - - -class WanVideoUnit_ImageEmbedder(PipelineUnit): - """ - Deprecated - """ - def __init__(self): - super().__init__( - input_params=("input_image", "end_image", "num_frames", "height", "width", "tiled", "tile_size", "tile_stride"), - onload_model_names=("image_encoder", "vae") - ) - - def process(self, pipe: WanVideoPipeline, input_image, end_image, num_frames, height, width, tiled, tile_size, tile_stride): - if input_image is None or pipe.image_encoder is None: - return {} - pipe.load_models_to_device(self.onload_model_names) - image = pipe.preprocess_image(input_image.resize((width, height))).to(pipe.device) - clip_context = pipe.image_encoder.encode_image([image]) - msk = torch.ones(1, num_frames, height//8, width//8, device=pipe.device) - msk[:, 1:] = 0 - if end_image is not None: - end_image = pipe.preprocess_image(end_image.resize((width, height))).to(pipe.device) - vae_input = torch.concat([image.transpose(0,1), torch.zeros(3, num_frames-2, height, width).to(image.device), end_image.transpose(0,1)],dim=1) - if pipe.dit.has_image_pos_emb: - clip_context = torch.concat([clip_context, pipe.image_encoder.encode_image([end_image])], dim=1) - msk[:, -1:] = 1 - else: - vae_input = torch.concat([image.transpose(0, 1), torch.zeros(3, num_frames-1, height, width).to(image.device)], dim=1) - - msk = torch.concat([torch.repeat_interleave(msk[:, 0:1], repeats=4, dim=1), msk[:, 1:]], dim=1) - msk = msk.view(1, msk.shape[1] // 4, 4, height//8, width//8) - msk = msk.transpose(1, 2)[0] - - y = pipe.vae.encode([vae_input.to(dtype=pipe.torch_dtype, device=pipe.device)], device=pipe.device, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride)[0] - y = y.to(dtype=pipe.torch_dtype, device=pipe.device) - y = torch.concat([msk, y]) - y = y.unsqueeze(0) - clip_context = clip_context.to(dtype=pipe.torch_dtype, device=pipe.device) - y = y.to(dtype=pipe.torch_dtype, device=pipe.device) - return {"clip_feature": clip_context, "y": y} - - - -class WanVideoUnit_ImageEmbedderCLIP(PipelineUnit): - def __init__(self): - super().__init__( - input_params=("input_image", "end_image", "height", "width"), - onload_model_names=("image_encoder",) - ) - - def process(self, pipe: WanVideoPipeline, input_image, end_image, height, width): - if input_image is None or pipe.image_encoder is None or not pipe.dit.require_clip_embedding: - return {} - pipe.load_models_to_device(self.onload_model_names) - image = pipe.preprocess_image(input_image.resize((width, height))).to(pipe.device) - clip_context = pipe.image_encoder.encode_image([image]) - if end_image is not None: - end_image = pipe.preprocess_image(end_image.resize((width, height))).to(pipe.device) - if pipe.dit.has_image_pos_emb: - clip_context = torch.concat([clip_context, pipe.image_encoder.encode_image([end_image])], dim=1) - clip_context = clip_context.to(dtype=pipe.torch_dtype, device=pipe.device) - return {"clip_feature": clip_context} - - - -class WanVideoUnit_ImageEmbedderVAE(PipelineUnit): - def __init__(self): - super().__init__( - input_params=("input_image", "end_image", "num_frames", "height", "width", "tiled", "tile_size", "tile_stride"), - onload_model_names=("vae",) - ) - - def process(self, pipe: WanVideoPipeline, input_image, end_image, num_frames, height, width, tiled, tile_size, tile_stride): - if input_image is None or not pipe.dit.require_vae_embedding: - return {} - pipe.load_models_to_device(self.onload_model_names) - image = pipe.preprocess_image(input_image.resize((width, height))).to(pipe.device) - msk = torch.ones(1, num_frames, height//8, width//8, device=pipe.device) - msk[:, 1:] = 0 - if end_image is not None: - end_image = pipe.preprocess_image(end_image.resize((width, height))).to(pipe.device) - vae_input = torch.concat([image.transpose(0,1), torch.zeros(3, num_frames-2, height, width).to(image.device), end_image.transpose(0,1)],dim=1) - msk[:, -1:] = 1 - else: - vae_input = torch.concat([image.transpose(0, 1), torch.zeros(3, num_frames-1, height, width).to(image.device)], dim=1) - - msk = torch.concat([torch.repeat_interleave(msk[:, 0:1], repeats=4, dim=1), msk[:, 1:]], dim=1) - msk = msk.view(1, msk.shape[1] // 4, 4, height//8, width//8) - msk = msk.transpose(1, 2)[0] - - y = pipe.vae.encode([vae_input.to(dtype=pipe.torch_dtype, device=pipe.device)], device=pipe.device, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride)[0] - y = y.to(dtype=pipe.torch_dtype, device=pipe.device) - y = torch.concat([msk, y]) - y = y.unsqueeze(0) - y = y.to(dtype=pipe.torch_dtype, device=pipe.device) - return {"y": y} - - - -class WanVideoUnit_ImageEmbedderFused(PipelineUnit): - """ - Encode input image to latents using VAE. This unit is for Wan-AI/Wan2.2-TI2V-5B. - """ - def __init__(self): - super().__init__( - input_params=("input_image", "latents", "height", "width", "tiled", "tile_size", "tile_stride"), - onload_model_names=("vae",) - ) - - def process(self, pipe: WanVideoPipeline, input_image, latents, height, width, tiled, tile_size, tile_stride): - if input_image is None or not pipe.dit.fuse_vae_embedding_in_latents: - return {} - pipe.load_models_to_device(self.onload_model_names) - image = pipe.preprocess_image(input_image.resize((width, height))).transpose(0, 1) - z = pipe.vae.encode([image], device=pipe.device, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - latents[:, :, 0: 1] = z - return {"latents": latents, "fuse_vae_embedding_in_latents": True, "first_frame_latents": z} - - - -class WanVideoUnit_FunControl(PipelineUnit): - def __init__(self): - super().__init__( - input_params=("control_video", "num_frames", "height", "width", "tiled", "tile_size", "tile_stride", "clip_feature", "y", "latents"), - onload_model_names=("vae",) - ) - - def process(self, pipe: WanVideoPipeline, control_video, num_frames, height, width, tiled, tile_size, tile_stride, clip_feature, y, latents): - if control_video is None: - return {} - pipe.load_models_to_device(self.onload_model_names) - control_video = pipe.preprocess_video(control_video) - control_latents = pipe.vae.encode(control_video, device=pipe.device, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride).to(dtype=pipe.torch_dtype, device=pipe.device) - control_latents = control_latents.to(dtype=pipe.torch_dtype, device=pipe.device) - y_dim = pipe.dit.in_dim-control_latents.shape[1]-latents.shape[1] - if clip_feature is None or y is None: - clip_feature = torch.zeros((1, 257, 1280), dtype=pipe.torch_dtype, device=pipe.device) - y = torch.zeros((1, y_dim, (num_frames - 1) // 4 + 1, height//8, width//8), dtype=pipe.torch_dtype, device=pipe.device) - else: - y = y[:, -y_dim:] - y = torch.concat([control_latents, y], dim=1) - return {"clip_feature": clip_feature, "y": y} - - - -class WanVideoUnit_FunReference(PipelineUnit): - def __init__(self): - super().__init__( - input_params=("reference_image", "height", "width", "reference_image"), - onload_model_names=("vae",) - ) - - def process(self, pipe: WanVideoPipeline, reference_image, height, width): - if reference_image is None: - return {} - pipe.load_models_to_device(["vae"]) - reference_image = reference_image.resize((width, height)) - reference_latents = pipe.preprocess_video([reference_image]) - reference_latents = pipe.vae.encode(reference_latents, device=pipe.device) - if pipe.image_encoder is None: - return {"reference_latents": reference_latents} - clip_feature = pipe.preprocess_image(reference_image) - clip_feature = pipe.image_encoder.encode_image([clip_feature]) - return {"reference_latents": reference_latents, "clip_feature": clip_feature} - - - -class WanVideoUnit_FunCameraControl(PipelineUnit): - def __init__(self): - super().__init__( - input_params=("height", "width", "num_frames", "camera_control_direction", "camera_control_speed", "camera_control_origin", "latents", "input_image", "tiled", "tile_size", "tile_stride"), - onload_model_names=("vae",) - ) - - def process(self, pipe: WanVideoPipeline, height, width, num_frames, camera_control_direction, camera_control_speed, camera_control_origin, latents, input_image, tiled, tile_size, tile_stride): - if camera_control_direction is None: - return {} - pipe.load_models_to_device(self.onload_model_names) - camera_control_plucker_embedding = pipe.dit.control_adapter.process_camera_coordinates( - camera_control_direction, num_frames, height, width, camera_control_speed, camera_control_origin) - - control_camera_video = camera_control_plucker_embedding[:num_frames].permute([3, 0, 1, 2]).unsqueeze(0) - control_camera_latents = torch.concat( - [ - torch.repeat_interleave(control_camera_video[:, :, 0:1], repeats=4, dim=2), - control_camera_video[:, :, 1:] - ], dim=2 - ).transpose(1, 2) - b, f, c, h, w = control_camera_latents.shape - control_camera_latents = control_camera_latents.contiguous().view(b, f // 4, 4, c, h, w).transpose(2, 3) - control_camera_latents = control_camera_latents.contiguous().view(b, f // 4, c * 4, h, w).transpose(1, 2) - control_camera_latents_input = control_camera_latents.to(device=pipe.device, dtype=pipe.torch_dtype) - - input_image = input_image.resize((width, height)) - input_latents = pipe.preprocess_video([input_image]) - input_latents = pipe.vae.encode(input_latents, device=pipe.device) - y = torch.zeros_like(latents).to(pipe.device) - y[:, :, :1] = input_latents - y = y.to(dtype=pipe.torch_dtype, device=pipe.device) - - if y.shape[1] != pipe.dit.in_dim - latents.shape[1]: - image = pipe.preprocess_image(input_image.resize((width, height))).to(pipe.device) - vae_input = torch.concat([image.transpose(0, 1), torch.zeros(3, num_frames-1, height, width).to(image.device)], dim=1) - y = pipe.vae.encode([vae_input.to(dtype=pipe.torch_dtype, device=pipe.device)], device=pipe.device, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride)[0] - y = y.to(dtype=pipe.torch_dtype, device=pipe.device) - msk = torch.ones(1, num_frames, height//8, width//8, device=pipe.device) - msk[:, 1:] = 0 - msk = torch.concat([torch.repeat_interleave(msk[:, 0:1], repeats=4, dim=1), msk[:, 1:]], dim=1) - msk = msk.view(1, msk.shape[1] // 4, 4, height//8, width//8) - msk = msk.transpose(1, 2)[0] - y = torch.cat([msk,y]) - y = y.unsqueeze(0) - y = y.to(dtype=pipe.torch_dtype, device=pipe.device) - return {"control_camera_latents_input": control_camera_latents_input, "y": y} - - - -class WanVideoUnit_SpeedControl(PipelineUnit): - def __init__(self): - super().__init__(input_params=("motion_bucket_id",)) - - def process(self, pipe: WanVideoPipeline, motion_bucket_id): - if motion_bucket_id is None: - return {} - motion_bucket_id = torch.Tensor((motion_bucket_id,)).to(dtype=pipe.torch_dtype, device=pipe.device) - return {"motion_bucket_id": motion_bucket_id} - - - -class WanVideoUnit_VACE(PipelineUnit): - def __init__(self): - super().__init__( - input_params=("vace_video", "vace_video_mask", "vace_reference_image", "vace_scale", "height", "width", "num_frames", "tiled", "tile_size", "tile_stride"), - onload_model_names=("vae",) - ) - - def process( - self, - pipe: WanVideoPipeline, - vace_video, vace_video_mask, vace_reference_image, vace_scale, - height, width, num_frames, - tiled, tile_size, tile_stride - ): - if vace_video is not None or vace_video_mask is not None or vace_reference_image is not None: - pipe.load_models_to_device(["vae"]) - if vace_video is None: - vace_video = torch.zeros((1, 3, num_frames, height, width), dtype=pipe.torch_dtype, device=pipe.device) - else: - vace_video = pipe.preprocess_video(vace_video) - - if vace_video_mask is None: - vace_video_mask = torch.ones_like(vace_video) - else: - vace_video_mask = pipe.preprocess_video(vace_video_mask, min_value=0, max_value=1) - - inactive = vace_video * (1 - vace_video_mask) + 0 * vace_video_mask - reactive = vace_video * vace_video_mask + 0 * (1 - vace_video_mask) - inactive = pipe.vae.encode(inactive, device=pipe.device, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride).to(dtype=pipe.torch_dtype, device=pipe.device) - reactive = pipe.vae.encode(reactive, device=pipe.device, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride).to(dtype=pipe.torch_dtype, device=pipe.device) - vace_video_latents = torch.concat((inactive, reactive), dim=1) - - vace_mask_latents = rearrange(vace_video_mask[0,0], "T (H P) (W Q) -> 1 (P Q) T H W", P=8, Q=8) - vace_mask_latents = torch.nn.functional.interpolate(vace_mask_latents, size=((vace_mask_latents.shape[2] + 3) // 4, vace_mask_latents.shape[3], vace_mask_latents.shape[4]), mode='nearest-exact') - - if vace_reference_image is None: - pass - else: - if not isinstance(vace_reference_image,list): - vace_reference_image = [vace_reference_image] - - vace_reference_image = pipe.preprocess_video(vace_reference_image) - - bs, c, f, h, w = vace_reference_image.shape - new_vace_ref_images = [] - for j in range(f): - new_vace_ref_images.append(vace_reference_image[0, :, j:j+1]) - vace_reference_image = new_vace_ref_images - - vace_reference_latents = pipe.vae.encode(vace_reference_image, device=pipe.device, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride).to(dtype=pipe.torch_dtype, device=pipe.device) - vace_reference_latents = torch.concat((vace_reference_latents, torch.zeros_like(vace_reference_latents)), dim=1) - vace_reference_latents = [u.unsqueeze(0) for u in vace_reference_latents] - - vace_video_latents = torch.concat((*vace_reference_latents, vace_video_latents), dim=2) - vace_mask_latents = torch.concat((torch.zeros_like(vace_mask_latents[:, :, :f]), vace_mask_latents), dim=2) - - vace_context = torch.concat((vace_video_latents, vace_mask_latents), dim=1) - return {"vace_context": vace_context, "vace_scale": vace_scale} - else: - return {"vace_context": None, "vace_scale": vace_scale} - - - -class WanVideoUnit_UnifiedSequenceParallel(PipelineUnit): - def __init__(self): - super().__init__(input_params=()) - - def process(self, pipe: WanVideoPipeline): - if hasattr(pipe, "use_unified_sequence_parallel"): - if pipe.use_unified_sequence_parallel: - return {"use_unified_sequence_parallel": True} - return {} - - - -class WanVideoUnit_TeaCache(PipelineUnit): - def __init__(self): - super().__init__( - seperate_cfg=True, - input_params_posi={"num_inference_steps": "num_inference_steps", "tea_cache_l1_thresh": "tea_cache_l1_thresh", "tea_cache_model_id": "tea_cache_model_id"}, - input_params_nega={"num_inference_steps": "num_inference_steps", "tea_cache_l1_thresh": "tea_cache_l1_thresh", "tea_cache_model_id": "tea_cache_model_id"}, - ) - - def process(self, pipe: WanVideoPipeline, num_inference_steps, tea_cache_l1_thresh, tea_cache_model_id): - if tea_cache_l1_thresh is None: - return {} - return {"tea_cache": TeaCache(num_inference_steps, rel_l1_thresh=tea_cache_l1_thresh, model_id=tea_cache_model_id)} - - - -class WanVideoUnit_CfgMerger(PipelineUnit): - def __init__(self): - super().__init__(take_over=True) - self.concat_tensor_names = ["context", "clip_feature", "y", "reference_latents"] - - def process(self, pipe: WanVideoPipeline, inputs_shared, inputs_posi, inputs_nega): - if not inputs_shared["cfg_merge"]: - return inputs_shared, inputs_posi, inputs_nega - for name in self.concat_tensor_names: - tensor_posi = inputs_posi.get(name) - tensor_nega = inputs_nega.get(name) - tensor_shared = inputs_shared.get(name) - if tensor_posi is not None and tensor_nega is not None: - inputs_shared[name] = torch.concat((tensor_posi, tensor_nega), dim=0) - elif tensor_shared is not None: - inputs_shared[name] = torch.concat((tensor_shared, tensor_shared), dim=0) - inputs_posi.clear() - inputs_nega.clear() - return inputs_shared, inputs_posi, inputs_nega - - -class WanVideoUnit_S2V(PipelineUnit): - def __init__(self): - super().__init__( - take_over=True, - onload_model_names=("audio_encoder", "vae",) - ) - - def process_audio(self, pipe: WanVideoPipeline, input_audio, audio_sample_rate, num_frames, fps=16, audio_embeds=None, return_all=False): - if audio_embeds is not None: - return {"audio_embeds": audio_embeds} - pipe.load_models_to_device(["audio_encoder"]) - audio_embeds = pipe.audio_encoder.get_audio_feats_per_inference(input_audio, audio_sample_rate, pipe.audio_processor, fps=fps, batch_frames=num_frames-1, dtype=pipe.torch_dtype, device=pipe.device) - if return_all: - return audio_embeds - else: - return {"audio_embeds": audio_embeds[0]} - - def process_motion_latents(self, pipe: WanVideoPipeline, height, width, tiled, tile_size, tile_stride, motion_video=None): - pipe.load_models_to_device(["vae"]) - motion_frames = 73 - kwargs = {} - if motion_video is not None and len(motion_video) > 0: - assert len(motion_video) == motion_frames, f"motion video must have {motion_frames} frames, but got {len(motion_video)}" - motion_latents = pipe.preprocess_video(motion_video) - kwargs["drop_motion_frames"] = False - else: - motion_latents = torch.zeros([1, 3, motion_frames, height, width], dtype=pipe.torch_dtype, device=pipe.device) - kwargs["drop_motion_frames"] = True - motion_latents = pipe.vae.encode(motion_latents, device=pipe.device, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride).to(dtype=pipe.torch_dtype, device=pipe.device) - kwargs.update({"motion_latents": motion_latents}) - return kwargs - - def process_pose_cond(self, pipe: WanVideoPipeline, s2v_pose_video, num_frames, height, width, tiled, tile_size, tile_stride, s2v_pose_latents=None, num_repeats=1, return_all=False): - if s2v_pose_latents is not None: - return {"s2v_pose_latents": s2v_pose_latents} - if s2v_pose_video is None: - return {"s2v_pose_latents": None} - pipe.load_models_to_device(["vae"]) - infer_frames = num_frames - 1 - input_video = pipe.preprocess_video(s2v_pose_video)[:, :, :infer_frames * num_repeats] - # pad if not enough frames - padding_frames = infer_frames * num_repeats - input_video.shape[2] - input_video = torch.cat([input_video, -torch.ones(1, 3, padding_frames, height, width, device=input_video.device, dtype=input_video.dtype)], dim=2) - input_videos = input_video.chunk(num_repeats, dim=2) - pose_conds = [] - for r in range(num_repeats): - cond = input_videos[r] - cond = torch.cat([cond[:, :, 0:1].repeat(1, 1, 1, 1, 1), cond], dim=2) - cond_latents = pipe.vae.encode(cond, device=pipe.device, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride).to(dtype=pipe.torch_dtype, device=pipe.device) - pose_conds.append(cond_latents[:,:,1:]) - if return_all: - return pose_conds - else: - return {"s2v_pose_latents": pose_conds[0]} - - def process(self, pipe: WanVideoPipeline, inputs_shared, inputs_posi, inputs_nega): - if (inputs_shared.get("input_audio") is None and inputs_shared.get("audio_embeds") is None) or pipe.audio_encoder is None or pipe.audio_processor is None: - return inputs_shared, inputs_posi, inputs_nega - num_frames, height, width, tiled, tile_size, tile_stride = inputs_shared.get("num_frames"), inputs_shared.get("height"), inputs_shared.get("width"), inputs_shared.get("tiled"), inputs_shared.get("tile_size"), inputs_shared.get("tile_stride") - input_audio, audio_embeds, audio_sample_rate = inputs_shared.pop("input_audio", None), inputs_shared.pop("audio_embeds", None), inputs_shared.get("audio_sample_rate", 16000) - s2v_pose_video, s2v_pose_latents, motion_video = inputs_shared.pop("s2v_pose_video", None), inputs_shared.pop("s2v_pose_latents", None), inputs_shared.pop("motion_video", None) - - audio_input_positive = self.process_audio(pipe, input_audio, audio_sample_rate, num_frames, audio_embeds=audio_embeds) - inputs_posi.update(audio_input_positive) - inputs_nega.update({"audio_embeds": 0.0 * audio_input_positive["audio_embeds"]}) - - inputs_shared.update(self.process_motion_latents(pipe, height, width, tiled, tile_size, tile_stride, motion_video)) - inputs_shared.update(self.process_pose_cond(pipe, s2v_pose_video, num_frames, height, width, tiled, tile_size, tile_stride, s2v_pose_latents=s2v_pose_latents)) - return inputs_shared, inputs_posi, inputs_nega - - @staticmethod - def pre_calculate_audio_pose(pipe: WanVideoPipeline, input_audio=None, audio_sample_rate=16000, s2v_pose_video=None, num_frames=81, height=448, width=832, fps=16, tiled=True, tile_size=(30, 52), tile_stride=(15, 26)): - assert pipe.audio_encoder is not None and pipe.audio_processor is not None, "Please load audio encoder and audio processor first." - shapes = WanVideoUnit_ShapeChecker().process(pipe, height, width, num_frames) - height, width, num_frames = shapes["height"], shapes["width"], shapes["num_frames"] - unit = WanVideoUnit_S2V() - audio_embeds = unit.process_audio(pipe, input_audio, audio_sample_rate, num_frames, fps, return_all=True) - pose_latents = unit.process_pose_cond(pipe, s2v_pose_video, num_frames, height, width, num_repeats=len(audio_embeds), return_all=True, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride) - pose_latents = None if s2v_pose_video is None else pose_latents - return audio_embeds, pose_latents, len(audio_embeds) - - -class WanVideoPostUnit_S2V(PipelineUnit): - def __init__(self): - super().__init__(input_params=("latents", "motion_latents", "drop_motion_frames")) - - def process(self, pipe: WanVideoPipeline, latents, motion_latents, drop_motion_frames): - if pipe.audio_encoder is None or motion_latents is None or drop_motion_frames: - return {} - latents = torch.cat([motion_latents, latents[:,:,1:]], dim=2) - return {"latents": latents} - - -class WanVideoPostUnit_AnimateVideoSplit(PipelineUnit): - def __init__(self): - super().__init__(input_params=("input_video", "animate_pose_video", "animate_face_video", "animate_inpaint_video", "animate_mask_video")) - - def process(self, pipe: WanVideoPipeline, input_video, animate_pose_video, animate_face_video, animate_inpaint_video, animate_mask_video): - if input_video is None: - return {} - if animate_pose_video is not None: - animate_pose_video = animate_pose_video[:len(input_video) - 4] - if animate_face_video is not None: - animate_face_video = animate_face_video[:len(input_video) - 4] - if animate_inpaint_video is not None: - animate_inpaint_video = animate_inpaint_video[:len(input_video) - 4] - if animate_mask_video is not None: - animate_mask_video = animate_mask_video[:len(input_video) - 4] - return {"animate_pose_video": animate_pose_video, "animate_face_video": animate_face_video, "animate_inpaint_video": animate_inpaint_video, "animate_mask_video": animate_mask_video} - - -class WanVideoPostUnit_AnimatePoseLatents(PipelineUnit): - def __init__(self): - super().__init__( - input_params=("animate_pose_video", "tiled", "tile_size", "tile_stride"), - onload_model_names=("vae",) - ) - - def process(self, pipe: WanVideoPipeline, animate_pose_video, tiled, tile_size, tile_stride): - if animate_pose_video is None: - return {} - pipe.load_models_to_device(self.onload_model_names) - animate_pose_video = pipe.preprocess_video(animate_pose_video) - pose_latents = pipe.vae.encode(animate_pose_video, device=pipe.device, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride).to(dtype=pipe.torch_dtype, device=pipe.device) - return {"pose_latents": pose_latents} - - -class WanVideoPostUnit_AnimateFacePixelValues(PipelineUnit): - def __init__(self): - super().__init__(take_over=True) - - def process(self, pipe: WanVideoPipeline, inputs_shared, inputs_posi, inputs_nega): - if inputs_shared.get("animate_face_video", None) is None: - return inputs_shared, inputs_posi, inputs_nega - inputs_posi["face_pixel_values"] = pipe.preprocess_video(inputs_shared["animate_face_video"]) - inputs_nega["face_pixel_values"] = torch.zeros_like(inputs_posi["face_pixel_values"]) - 1 - return inputs_shared, inputs_posi, inputs_nega - - -class WanVideoPostUnit_AnimateInpaint(PipelineUnit): - def __init__(self): - super().__init__( - input_params=("animate_inpaint_video", "animate_mask_video", "input_image", "tiled", "tile_size", "tile_stride"), - onload_model_names=("vae",) - ) - - def get_i2v_mask(self, lat_t, lat_h, lat_w, mask_len=1, mask_pixel_values=None, device="cuda"): - if mask_pixel_values is None: - msk = torch.zeros(1, (lat_t-1) * 4 + 1, lat_h, lat_w, device=device) - else: - msk = mask_pixel_values.clone() - msk[:, :mask_len] = 1 - msk = torch.concat([torch.repeat_interleave(msk[:, 0:1], repeats=4, dim=1), msk[:, 1:]], dim=1) - msk = msk.view(1, msk.shape[1] // 4, 4, lat_h, lat_w) - msk = msk.transpose(1, 2)[0] - return msk - - def process(self, pipe: WanVideoPipeline, animate_inpaint_video, animate_mask_video, input_image, tiled, tile_size, tile_stride): - if animate_inpaint_video is None or animate_mask_video is None: - return {} - pipe.load_models_to_device(self.onload_model_names) - - bg_pixel_values = pipe.preprocess_video(animate_inpaint_video) - y_reft = pipe.vae.encode(bg_pixel_values, device=pipe.device, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride)[0].to(dtype=pipe.torch_dtype, device=pipe.device) - _, lat_t, lat_h, lat_w = y_reft.shape - - ref_pixel_values = pipe.preprocess_video([input_image]) - ref_latents = pipe.vae.encode(ref_pixel_values, device=pipe.device, tiled=tiled, tile_size=tile_size, tile_stride=tile_stride).to(dtype=pipe.torch_dtype, device=pipe.device) - mask_ref = self.get_i2v_mask(1, lat_h, lat_w, 1, device=pipe.device) - y_ref = torch.concat([mask_ref, ref_latents[0]]).to(dtype=torch.bfloat16, device=pipe.device) - - mask_pixel_values = 1 - pipe.preprocess_video(animate_mask_video, max_value=1, min_value=0) - mask_pixel_values = rearrange(mask_pixel_values, "b c t h w -> (b t) c h w") - mask_pixel_values = torch.nn.functional.interpolate(mask_pixel_values, size=(lat_h, lat_w), mode='nearest') - mask_pixel_values = rearrange(mask_pixel_values, "(b t) c h w -> b t c h w", b=1)[:,:,0] - msk_reft = self.get_i2v_mask(lat_t, lat_h, lat_w, 0, mask_pixel_values=mask_pixel_values, device=pipe.device) - - y_reft = torch.concat([msk_reft, y_reft]).to(dtype=torch.bfloat16, device=pipe.device) - y = torch.concat([y_ref, y_reft], dim=1).unsqueeze(0) - return {"y": y} - - -class WanVideoUnit_LongCatVideo(PipelineUnit): - def __init__(self): - super().__init__( - input_params=("longcat_video",), - onload_model_names=("vae",) - ) - - def process(self, pipe: WanVideoPipeline, longcat_video): - if longcat_video is None: - return {} - pipe.load_models_to_device(self.onload_model_names) - longcat_video = pipe.preprocess_video(longcat_video) - longcat_latents = pipe.vae.encode(longcat_video, device=pipe.device).to(dtype=pipe.torch_dtype, device=pipe.device) - return {"longcat_latents": longcat_latents} - - -class TeaCache: - def __init__(self, num_inference_steps, rel_l1_thresh, model_id): - self.num_inference_steps = num_inference_steps - self.step = 0 - self.accumulated_rel_l1_distance = 0 - self.previous_modulated_input = None - self.rel_l1_thresh = rel_l1_thresh - self.previous_residual = None - self.previous_hidden_states = None - - self.coefficients_dict = { - "Wan2.1-T2V-1.3B": [-5.21862437e+04, 9.23041404e+03, -5.28275948e+02, 1.36987616e+01, -4.99875664e-02], - "Wan2.1-T2V-14B": [-3.03318725e+05, 4.90537029e+04, -2.65530556e+03, 5.87365115e+01, -3.15583525e-01], - "Wan2.1-I2V-14B-480P": [2.57151496e+05, -3.54229917e+04, 1.40286849e+03, -1.35890334e+01, 1.32517977e-01], - "Wan2.1-I2V-14B-720P": [ 8.10705460e+03, 2.13393892e+03, -3.72934672e+02, 1.66203073e+01, -4.17769401e-02], - } - if model_id not in self.coefficients_dict: - supported_model_ids = ", ".join([i for i in self.coefficients_dict]) - raise ValueError(f"{model_id} is not a supported TeaCache model id. Please choose a valid model id in ({supported_model_ids}).") - self.coefficients = self.coefficients_dict[model_id] - - def check(self, dit: WanModel, x, t_mod): - modulated_inp = t_mod.clone() - if self.step == 0 or self.step == self.num_inference_steps - 1: - should_calc = True - self.accumulated_rel_l1_distance = 0 - else: - coefficients = self.coefficients - rescale_func = np.poly1d(coefficients) - self.accumulated_rel_l1_distance += rescale_func(((modulated_inp-self.previous_modulated_input).abs().mean() / self.previous_modulated_input.abs().mean()).cpu().item()) - if self.accumulated_rel_l1_distance < self.rel_l1_thresh: - should_calc = False - else: - should_calc = True - self.accumulated_rel_l1_distance = 0 - self.previous_modulated_input = modulated_inp - self.step += 1 - if self.step == self.num_inference_steps: - self.step = 0 - if should_calc: - self.previous_hidden_states = x.clone() - return not should_calc - - def store(self, hidden_states): - self.previous_residual = hidden_states - self.previous_hidden_states - self.previous_hidden_states = None - - def update(self, hidden_states): - hidden_states = hidden_states + self.previous_residual - return hidden_states - - - -class TemporalTiler_BCTHW: - def __init__(self): - pass - - def build_1d_mask(self, length, left_bound, right_bound, border_width): - x = torch.ones((length,)) - if border_width == 0: - return x - - shift = 0.5 - if not left_bound: - x[:border_width] = (torch.arange(border_width) + shift) / border_width - if not right_bound: - x[-border_width:] = torch.flip((torch.arange(border_width) + shift) / border_width, dims=(0,)) - return x - - def build_mask(self, data, is_bound, border_width): - _, _, T, _, _ = data.shape - t = self.build_1d_mask(T, is_bound[0], is_bound[1], border_width[0]) - mask = repeat(t, "T -> 1 1 T 1 1") - return mask - - def run(self, model_fn, sliding_window_size, sliding_window_stride, computation_device, computation_dtype, model_kwargs, tensor_names, batch_size=None): - tensor_names = [tensor_name for tensor_name in tensor_names if model_kwargs.get(tensor_name) is not None] - tensor_dict = {tensor_name: model_kwargs[tensor_name] for tensor_name in tensor_names} - B, C, T, H, W = tensor_dict[tensor_names[0]].shape - if batch_size is not None: - B *= batch_size - data_device, data_dtype = tensor_dict[tensor_names[0]].device, tensor_dict[tensor_names[0]].dtype - value = torch.zeros((B, C, T, H, W), device=data_device, dtype=data_dtype) - weight = torch.zeros((1, 1, T, 1, 1), device=data_device, dtype=data_dtype) - for t in range(0, T, sliding_window_stride): - if t - sliding_window_stride >= 0 and t - sliding_window_stride + sliding_window_size >= T: - continue - t_ = min(t + sliding_window_size, T) - model_kwargs.update({ - tensor_name: tensor_dict[tensor_name][:, :, t: t_:, :].to(device=computation_device, dtype=computation_dtype) \ - for tensor_name in tensor_names - }) - model_output = model_fn(**model_kwargs).to(device=data_device, dtype=data_dtype) - mask = self.build_mask( - model_output, - is_bound=(t == 0, t_ == T), - border_width=(sliding_window_size - sliding_window_stride,) - ).to(device=data_device, dtype=data_dtype) - value[:, :, t: t_, :, :] += model_output * mask - weight[:, :, t: t_, :, :] += mask - value /= weight - model_kwargs.update(tensor_dict) - return value - - - -def model_fn_wan_video( - dit: WanModel, - motion_controller: WanMotionControllerModel = None, - vace: VaceWanModel = None, - animate_adapter: WanAnimateAdapter = None, - latents: torch.Tensor = None, - timestep: torch.Tensor = None, - context: torch.Tensor = None, - clip_feature: Optional[torch.Tensor] = None, - y: Optional[torch.Tensor] = None, - reference_latents = None, - vace_context = None, - vace_scale = 1.0, - audio_embeds: Optional[torch.Tensor] = None, - motion_latents: Optional[torch.Tensor] = None, - s2v_pose_latents: Optional[torch.Tensor] = None, - drop_motion_frames: bool = True, - tea_cache: TeaCache = None, - use_unified_sequence_parallel: bool = False, - motion_bucket_id: Optional[torch.Tensor] = None, - pose_latents=None, - face_pixel_values=None, - longcat_latents=None, - sliding_window_size: Optional[int] = None, - sliding_window_stride: Optional[int] = None, - cfg_merge: bool = False, - use_gradient_checkpointing: bool = False, - use_gradient_checkpointing_offload: bool = False, - control_camera_latents_input = None, - fuse_vae_embedding_in_latents: bool = False, - **kwargs, -): - if sliding_window_size is not None and sliding_window_stride is not None: - model_kwargs = dict( - dit=dit, - motion_controller=motion_controller, - vace=vace, - latents=latents, - timestep=timestep, - context=context, - clip_feature=clip_feature, - y=y, - reference_latents=reference_latents, - vace_context=vace_context, - vace_scale=vace_scale, - tea_cache=tea_cache, - use_unified_sequence_parallel=use_unified_sequence_parallel, - motion_bucket_id=motion_bucket_id, - ) - return TemporalTiler_BCTHW().run( - model_fn_wan_video, - sliding_window_size, sliding_window_stride, - latents.device, latents.dtype, - model_kwargs=model_kwargs, - tensor_names=["latents", "y"], - batch_size=2 if cfg_merge else 1 - ) - # LongCat-Video - if isinstance(dit, LongCatVideoTransformer3DModel): - return model_fn_longcat_video( - dit=dit, - latents=latents, - timestep=timestep, - context=context, - longcat_latents=longcat_latents, - use_gradient_checkpointing=use_gradient_checkpointing, - use_gradient_checkpointing_offload=use_gradient_checkpointing_offload, - ) - - # wan2.2 s2v - if audio_embeds is not None: - return model_fn_wans2v( - dit=dit, - latents=latents, - timestep=timestep, - context=context, - audio_embeds=audio_embeds, - motion_latents=motion_latents, - s2v_pose_latents=s2v_pose_latents, - drop_motion_frames=drop_motion_frames, - use_gradient_checkpointing_offload=use_gradient_checkpointing_offload, - use_gradient_checkpointing=use_gradient_checkpointing, - use_unified_sequence_parallel=use_unified_sequence_parallel, - ) - - if use_unified_sequence_parallel: - import torch.distributed as dist - from xfuser.core.distributed import (get_sequence_parallel_rank, - get_sequence_parallel_world_size, - get_sp_group) - - # Timestep - if dit.seperated_timestep and fuse_vae_embedding_in_latents: - timestep = torch.concat([ - torch.zeros((1, latents.shape[3] * latents.shape[4] // 4), dtype=latents.dtype, device=latents.device), - torch.ones((latents.shape[2] - 1, latents.shape[3] * latents.shape[4] // 4), dtype=latents.dtype, device=latents.device) * timestep - ]).flatten() - t = dit.time_embedding(sinusoidal_embedding_1d(dit.freq_dim, timestep).unsqueeze(0)) - if use_unified_sequence_parallel and dist.is_initialized() and dist.get_world_size() > 1: - t_chunks = torch.chunk(t, get_sequence_parallel_world_size(), dim=1) - t_chunks = [torch.nn.functional.pad(chunk, (0, 0, 0, t_chunks[0].shape[1]-chunk.shape[1]), value=0) for chunk in t_chunks] - t = t_chunks[get_sequence_parallel_rank()] - t_mod = dit.time_projection(t).unflatten(2, (6, dit.dim)) - else: - t = dit.time_embedding(sinusoidal_embedding_1d(dit.freq_dim, timestep)) - t_mod = dit.time_projection(t).unflatten(1, (6, dit.dim)) - - # Motion Controller - if motion_bucket_id is not None and motion_controller is not None: - t_mod = t_mod + motion_controller(motion_bucket_id).unflatten(1, (6, dit.dim)) - context = dit.text_embedding(context) - - x = latents - # Merged cfg - if x.shape[0] != context.shape[0]: - x = torch.concat([x] * context.shape[0], dim=0) - if timestep.shape[0] != context.shape[0]: - timestep = torch.concat([timestep] * context.shape[0], dim=0) - - # Image Embedding - if y is not None and dit.require_vae_embedding: - x = torch.cat([x, y], dim=1) - if clip_feature is not None and dit.require_clip_embedding: - clip_embdding = dit.img_emb(clip_feature) - context = torch.cat([clip_embdding, context], dim=1) - - # Camera control - x = dit.patchify(x, control_camera_latents_input) - - # Animate - if pose_latents is not None and face_pixel_values is not None: - x, motion_vec = animate_adapter.after_patch_embedding(x, pose_latents, face_pixel_values) - - # Patchify - f, h, w = x.shape[2:] - x = rearrange(x, 'b c f h w -> b (f h w) c').contiguous() - - # Reference image - if reference_latents is not None: - if len(reference_latents.shape) == 5: - reference_latents = reference_latents[:, :, 0] - reference_latents = dit.ref_conv(reference_latents).flatten(2).transpose(1, 2) - x = torch.concat([reference_latents, x], dim=1) - f += 1 - - freqs = torch.cat([ - dit.freqs[0][:f].view(f, 1, 1, -1).expand(f, h, w, -1), - dit.freqs[1][:h].view(1, h, 1, -1).expand(f, h, w, -1), - dit.freqs[2][:w].view(1, 1, w, -1).expand(f, h, w, -1) - ], dim=-1).reshape(f * h * w, 1, -1).to(x.device) - - # TeaCache - if tea_cache is not None: - tea_cache_update = tea_cache.check(dit, x, t_mod) - else: - tea_cache_update = False - - if vace_context is not None: - vace_hints = vace( - x, vace_context, context, t_mod, freqs, - use_gradient_checkpointing=use_gradient_checkpointing, - use_gradient_checkpointing_offload=use_gradient_checkpointing_offload - ) - - # blocks - if use_unified_sequence_parallel: - if dist.is_initialized() and dist.get_world_size() > 1: - chunks = torch.chunk(x, get_sequence_parallel_world_size(), dim=1) - pad_shape = chunks[0].shape[1] - chunks[-1].shape[1] - chunks = [torch.nn.functional.pad(chunk, (0, 0, 0, chunks[0].shape[1]-chunk.shape[1]), value=0) for chunk in chunks] - x = chunks[get_sequence_parallel_rank()] - if tea_cache_update: - x = tea_cache.update(x) - else: - def create_custom_forward(module): - def custom_forward(*inputs): - return module(*inputs) - return custom_forward - - for block_id, block in enumerate(dit.blocks): - # Block - if use_gradient_checkpointing_offload: - with torch.autograd.graph.save_on_cpu(): - x = torch.utils.checkpoint.checkpoint( - create_custom_forward(block), - x, context, t_mod, freqs, - use_reentrant=False, - ) - elif use_gradient_checkpointing: - x = torch.utils.checkpoint.checkpoint( - create_custom_forward(block), - x, context, t_mod, freqs, - use_reentrant=False, - ) - else: - x = block(x, context, t_mod, freqs) - - # VACE - if vace_context is not None and block_id in vace.vace_layers_mapping: - current_vace_hint = vace_hints[vace.vace_layers_mapping[block_id]] - if use_unified_sequence_parallel and dist.is_initialized() and dist.get_world_size() > 1: - current_vace_hint = torch.chunk(current_vace_hint, get_sequence_parallel_world_size(), dim=1)[get_sequence_parallel_rank()] - current_vace_hint = torch.nn.functional.pad(current_vace_hint, (0, 0, 0, chunks[0].shape[1] - current_vace_hint.shape[1]), value=0) - x = x + current_vace_hint * vace_scale - - # Animate - if pose_latents is not None and face_pixel_values is not None: - x = animate_adapter.after_transformer_block(block_id, x, motion_vec) - if tea_cache is not None: - tea_cache.store(x) - - x = dit.head(x, t) - if use_unified_sequence_parallel: - if dist.is_initialized() and dist.get_world_size() > 1: - x = get_sp_group().all_gather(x, dim=1) - x = x[:, :-pad_shape] if pad_shape > 0 else x - # Remove reference latents - if reference_latents is not None: - x = x[:, reference_latents.shape[1]:] - f -= 1 - x = dit.unpatchify(x, (f, h, w)) - return x - - -def model_fn_longcat_video( - dit: LongCatVideoTransformer3DModel, - latents: torch.Tensor = None, - timestep: torch.Tensor = None, - context: torch.Tensor = None, - longcat_latents: torch.Tensor = None, - use_gradient_checkpointing=False, - use_gradient_checkpointing_offload=False, -): - if longcat_latents is not None: - latents[:, :, :longcat_latents.shape[2]] = longcat_latents - num_cond_latents = longcat_latents.shape[2] - else: - num_cond_latents = 0 - context = context.unsqueeze(0) - encoder_attention_mask = torch.any(context != 0, dim=-1)[:, 0].to(torch.int64) - output = dit( - latents, - timestep, - context, - encoder_attention_mask, - num_cond_latents=num_cond_latents, - use_gradient_checkpointing=use_gradient_checkpointing, - use_gradient_checkpointing_offload=use_gradient_checkpointing_offload, - ) - output = -output - output = output.to(latents.dtype) - return output - - -def model_fn_wans2v( - dit, - latents, - timestep, - context, - audio_embeds, - motion_latents, - s2v_pose_latents, - drop_motion_frames=True, - use_gradient_checkpointing_offload=False, - use_gradient_checkpointing=False, - use_unified_sequence_parallel=False, -): - if use_unified_sequence_parallel: - import torch.distributed as dist - from xfuser.core.distributed import (get_sequence_parallel_rank, - get_sequence_parallel_world_size, - get_sp_group) - origin_ref_latents = latents[:, :, 0:1] - x = latents[:, :, 1:] - - # context embedding - context = dit.text_embedding(context) - - # audio encode - audio_emb_global, merged_audio_emb = dit.cal_audio_emb(audio_embeds) - - # x and s2v_pose_latents - s2v_pose_latents = torch.zeros_like(x) if s2v_pose_latents is None else s2v_pose_latents - x, (f, h, w) = dit.patchify(dit.patch_embedding(x) + dit.cond_encoder(s2v_pose_latents)) - seq_len_x = seq_len_x_global = x.shape[1] # global used for unified sequence parallel - - # reference image - ref_latents, (rf, rh, rw) = dit.patchify(dit.patch_embedding(origin_ref_latents)) - grid_sizes = dit.get_grid_sizes((f, h, w), (rf, rh, rw)) - x = torch.cat([x, ref_latents], dim=1) - # mask - mask = torch.cat([torch.zeros([1, seq_len_x]), torch.ones([1, ref_latents.shape[1]])], dim=1).to(torch.long).to(x.device) - # freqs - pre_compute_freqs = rope_precompute(x.detach().view(1, x.size(1), dit.num_heads, dit.dim // dit.num_heads), grid_sizes, dit.freqs, start=None) - # motion - x, pre_compute_freqs, mask = dit.inject_motion(x, pre_compute_freqs, mask, motion_latents, drop_motion_frames=drop_motion_frames, add_last_motion=2) - - x = x + dit.trainable_cond_mask(mask).to(x.dtype) - - # tmod - timestep = torch.cat([timestep, torch.zeros([1], dtype=timestep.dtype, device=timestep.device)]) - t = dit.time_embedding(sinusoidal_embedding_1d(dit.freq_dim, timestep)) - t_mod = dit.time_projection(t).unflatten(1, (6, dit.dim)).unsqueeze(2).transpose(0, 2) - - if use_unified_sequence_parallel and dist.is_initialized() and dist.get_world_size() > 1: - world_size, sp_rank = get_sequence_parallel_world_size(), get_sequence_parallel_rank() - assert x.shape[1] % world_size == 0, f"the dimension after chunk must be divisible by world size, but got {x.shape[1]} and {get_sequence_parallel_world_size()}" - x = torch.chunk(x, world_size, dim=1)[sp_rank] - seg_idxs = [0] + list(torch.cumsum(torch.tensor([x.shape[1]] * world_size), dim=0).cpu().numpy()) - seq_len_x_list = [min(max(0, seq_len_x - seg_idxs[i]), x.shape[1]) for i in range(len(seg_idxs)-1)] - seq_len_x = seq_len_x_list[sp_rank] - - def create_custom_forward(module): - def custom_forward(*inputs): - return module(*inputs) - return custom_forward - - for block_id, block in enumerate(dit.blocks): - if use_gradient_checkpointing_offload: - with torch.autograd.graph.save_on_cpu(): - x = torch.utils.checkpoint.checkpoint( - create_custom_forward(block), - x, context, t_mod, seq_len_x, pre_compute_freqs[0], - use_reentrant=False, - ) - x = torch.utils.checkpoint.checkpoint( - create_custom_forward(lambda x: dit.after_transformer_block(block_id, x, audio_emb_global, merged_audio_emb, seq_len_x)), - x, - use_reentrant=False, - ) - elif use_gradient_checkpointing: - x = torch.utils.checkpoint.checkpoint( - create_custom_forward(block), - x, context, t_mod, seq_len_x, pre_compute_freqs[0], - use_reentrant=False, - ) - x = torch.utils.checkpoint.checkpoint( - create_custom_forward(lambda x: dit.after_transformer_block(block_id, x, audio_emb_global, merged_audio_emb, seq_len_x)), - x, - use_reentrant=False, - ) - else: - x = block(x, context, t_mod, seq_len_x, pre_compute_freqs[0]) - x = dit.after_transformer_block(block_id, x, audio_emb_global, merged_audio_emb, seq_len_x_global, use_unified_sequence_parallel) - - if use_unified_sequence_parallel and dist.is_initialized() and dist.get_world_size() > 1: - x = get_sp_group().all_gather(x, dim=1) - - x = x[:, :seq_len_x_global] - x = dit.head(x, t[:-1]) - x = dit.unpatchify(x, (f, h, w)) - # make compatible with wan video - x = torch.cat([origin_ref_latents, x], dim=2) - return x diff --git a/diffsynth/processors/FastBlend.py b/diffsynth/processors/FastBlend.py deleted file mode 100644 index fed33f4..0000000 --- a/diffsynth/processors/FastBlend.py +++ /dev/null @@ -1,142 +0,0 @@ -from PIL import Image -import cupy as cp -import numpy as np -from tqdm import tqdm -from ..extensions.FastBlend.patch_match import PyramidPatchMatcher -from ..extensions.FastBlend.runners.fast import TableManager -from .base import VideoProcessor - - -class FastBlendSmoother(VideoProcessor): - def __init__( - self, - inference_mode="fast", batch_size=8, window_size=60, - minimum_patch_size=5, threads_per_block=8, num_iter=5, gpu_id=0, guide_weight=10.0, initialize="identity", tracking_window_size=0 - ): - self.inference_mode = inference_mode - self.batch_size = batch_size - self.window_size = window_size - self.ebsynth_config = { - "minimum_patch_size": minimum_patch_size, - "threads_per_block": threads_per_block, - "num_iter": num_iter, - "gpu_id": gpu_id, - "guide_weight": guide_weight, - "initialize": initialize, - "tracking_window_size": tracking_window_size - } - - @staticmethod - def from_model_manager(model_manager, **kwargs): - # TODO: fetch GPU ID from model_manager - return FastBlendSmoother(**kwargs) - - def inference_fast(self, frames_guide, frames_style): - table_manager = TableManager() - patch_match_engine = PyramidPatchMatcher( - image_height=frames_style[0].shape[0], - image_width=frames_style[0].shape[1], - channel=3, - **self.ebsynth_config - ) - # left part - table_l = table_manager.build_remapping_table(frames_guide, frames_style, patch_match_engine, self.batch_size, desc="Fast Mode Step 1/4") - table_l = table_manager.remapping_table_to_blending_table(table_l) - table_l = table_manager.process_window_sum(frames_guide, table_l, patch_match_engine, self.window_size, self.batch_size, desc="Fast Mode Step 2/4") - # right part - table_r = table_manager.build_remapping_table(frames_guide[::-1], frames_style[::-1], patch_match_engine, self.batch_size, desc="Fast Mode Step 3/4") - table_r = table_manager.remapping_table_to_blending_table(table_r) - table_r = table_manager.process_window_sum(frames_guide[::-1], table_r, patch_match_engine, self.window_size, self.batch_size, desc="Fast Mode Step 4/4")[::-1] - # merge - frames = [] - for (frame_l, weight_l), frame_m, (frame_r, weight_r) in zip(table_l, frames_style, table_r): - weight_m = -1 - weight = weight_l + weight_m + weight_r - frame = frame_l * (weight_l / weight) + frame_m * (weight_m / weight) + frame_r * (weight_r / weight) - frames.append(frame) - frames = [frame.clip(0, 255).astype("uint8") for frame in frames] - frames = [Image.fromarray(frame) for frame in frames] - return frames - - def inference_balanced(self, frames_guide, frames_style): - patch_match_engine = PyramidPatchMatcher( - image_height=frames_style[0].shape[0], - image_width=frames_style[0].shape[1], - channel=3, - **self.ebsynth_config - ) - output_frames = [] - # tasks - n = len(frames_style) - tasks = [] - for target in range(n): - for source in range(target - self.window_size, target + self.window_size + 1): - if source >= 0 and source < n and source != target: - tasks.append((source, target)) - # run - frames = [(None, 1) for i in range(n)] - for batch_id in tqdm(range(0, len(tasks), self.batch_size), desc="Balanced Mode"): - tasks_batch = tasks[batch_id: min(batch_id+self.batch_size, len(tasks))] - source_guide = np.stack([frames_guide[source] for source, target in tasks_batch]) - target_guide = np.stack([frames_guide[target] for source, target in tasks_batch]) - source_style = np.stack([frames_style[source] for source, target in tasks_batch]) - _, target_style = patch_match_engine.estimate_nnf(source_guide, target_guide, source_style) - for (source, target), result in zip(tasks_batch, target_style): - frame, weight = frames[target] - if frame is None: - frame = frames_style[target] - frames[target] = ( - frame * (weight / (weight + 1)) + result / (weight + 1), - weight + 1 - ) - if weight + 1 == min(n, target + self.window_size + 1) - max(0, target - self.window_size): - frame = frame.clip(0, 255).astype("uint8") - output_frames.append(Image.fromarray(frame)) - frames[target] = (None, 1) - return output_frames - - def inference_accurate(self, frames_guide, frames_style): - patch_match_engine = PyramidPatchMatcher( - image_height=frames_style[0].shape[0], - image_width=frames_style[0].shape[1], - channel=3, - use_mean_target_style=True, - **self.ebsynth_config - ) - output_frames = [] - # run - n = len(frames_style) - for target in tqdm(range(n), desc="Accurate Mode"): - l, r = max(target - self.window_size, 0), min(target + self.window_size + 1, n) - remapped_frames = [] - for i in range(l, r, self.batch_size): - j = min(i + self.batch_size, r) - source_guide = np.stack([frames_guide[source] for source in range(i, j)]) - target_guide = np.stack([frames_guide[target]] * (j - i)) - source_style = np.stack([frames_style[source] for source in range(i, j)]) - _, target_style = patch_match_engine.estimate_nnf(source_guide, target_guide, source_style) - remapped_frames.append(target_style) - frame = np.concatenate(remapped_frames, axis=0).mean(axis=0) - frame = frame.clip(0, 255).astype("uint8") - output_frames.append(Image.fromarray(frame)) - return output_frames - - def release_vram(self): - mempool = cp.get_default_memory_pool() - pinned_mempool = cp.get_default_pinned_memory_pool() - mempool.free_all_blocks() - pinned_mempool.free_all_blocks() - - def __call__(self, rendered_frames, original_frames=None, **kwargs): - rendered_frames = [np.array(frame) for frame in rendered_frames] - original_frames = [np.array(frame) for frame in original_frames] - if self.inference_mode == "fast": - output_frames = self.inference_fast(original_frames, rendered_frames) - elif self.inference_mode == "balanced": - output_frames = self.inference_balanced(original_frames, rendered_frames) - elif self.inference_mode == "accurate": - output_frames = self.inference_accurate(original_frames, rendered_frames) - else: - raise ValueError("inference_mode must be fast, balanced or accurate") - self.release_vram() - return output_frames diff --git a/diffsynth/processors/PILEditor.py b/diffsynth/processors/PILEditor.py deleted file mode 100644 index 01011d8..0000000 --- a/diffsynth/processors/PILEditor.py +++ /dev/null @@ -1,28 +0,0 @@ -from PIL import ImageEnhance -from .base import VideoProcessor - - -class ContrastEditor(VideoProcessor): - def __init__(self, rate=1.5): - self.rate = rate - - @staticmethod - def from_model_manager(model_manager, **kwargs): - return ContrastEditor(**kwargs) - - def __call__(self, rendered_frames, **kwargs): - rendered_frames = [ImageEnhance.Contrast(i).enhance(self.rate) for i in rendered_frames] - return rendered_frames - - -class SharpnessEditor(VideoProcessor): - def __init__(self, rate=1.5): - self.rate = rate - - @staticmethod - def from_model_manager(model_manager, **kwargs): - return SharpnessEditor(**kwargs) - - def __call__(self, rendered_frames, **kwargs): - rendered_frames = [ImageEnhance.Sharpness(i).enhance(self.rate) for i in rendered_frames] - return rendered_frames diff --git a/diffsynth/processors/RIFE.py b/diffsynth/processors/RIFE.py deleted file mode 100644 index 4186eb3..0000000 --- a/diffsynth/processors/RIFE.py +++ /dev/null @@ -1,77 +0,0 @@ -import torch -import numpy as np -from PIL import Image -from .base import VideoProcessor - - -class RIFESmoother(VideoProcessor): - def __init__(self, model, device="cuda", scale=1.0, batch_size=4, interpolate=True): - self.model = model - self.device = device - - # IFNet only does not support float16 - self.torch_dtype = torch.float32 - - # Other parameters - self.scale = scale - self.batch_size = batch_size - self.interpolate = interpolate - - @staticmethod - def from_model_manager(model_manager, **kwargs): - return RIFESmoother(model_manager.RIFE, device=model_manager.device, **kwargs) - - def process_image(self, image): - width, height = image.size - if width % 32 != 0 or height % 32 != 0: - width = (width + 31) // 32 - height = (height + 31) // 32 - image = image.resize((width, height)) - image = torch.Tensor(np.array(image, dtype=np.float32)[:, :, [2,1,0]] / 255).permute(2, 0, 1) - return image - - def process_images(self, images): - images = [self.process_image(image) for image in images] - images = torch.stack(images) - return images - - def decode_images(self, images): - images = (images[:, [2,1,0]].permute(0, 2, 3, 1) * 255).clip(0, 255).numpy().astype(np.uint8) - images = [Image.fromarray(image) for image in images] - return images - - def process_tensors(self, input_tensor, scale=1.0, batch_size=4): - output_tensor = [] - for batch_id in range(0, input_tensor.shape[0], batch_size): - batch_id_ = min(batch_id + batch_size, input_tensor.shape[0]) - batch_input_tensor = input_tensor[batch_id: batch_id_] - batch_input_tensor = batch_input_tensor.to(device=self.device, dtype=self.torch_dtype) - flow, mask, merged = self.model(batch_input_tensor, [4/scale, 2/scale, 1/scale]) - output_tensor.append(merged[2].cpu()) - output_tensor = torch.concat(output_tensor, dim=0) - return output_tensor - - @torch.no_grad() - def __call__(self, rendered_frames, **kwargs): - # Preprocess - processed_images = self.process_images(rendered_frames) - - # Input - input_tensor = torch.cat((processed_images[:-2], processed_images[2:]), dim=1) - - # Interpolate - output_tensor = self.process_tensors(input_tensor, scale=self.scale, batch_size=self.batch_size) - - if self.interpolate: - # Blend - input_tensor = torch.cat((processed_images[1:-1], output_tensor), dim=1) - output_tensor = self.process_tensors(input_tensor, scale=self.scale, batch_size=self.batch_size) - processed_images[1:-1] = output_tensor - else: - processed_images[1:-1] = (processed_images[1:-1] + output_tensor) / 2 - - # To images - output_images = self.decode_images(processed_images) - if output_images[0].size != rendered_frames[0].size: - output_images = [image.resize(rendered_frames[0].size) for image in output_images] - return output_images diff --git a/diffsynth/processors/base.py b/diffsynth/processors/base.py deleted file mode 100644 index 278a9c1..0000000 --- a/diffsynth/processors/base.py +++ /dev/null @@ -1,6 +0,0 @@ -class VideoProcessor: - def __init__(self): - pass - - def __call__(self): - raise NotImplementedError diff --git a/diffsynth/processors/sequencial_processor.py b/diffsynth/processors/sequencial_processor.py deleted file mode 100644 index 9b5bc94..0000000 --- a/diffsynth/processors/sequencial_processor.py +++ /dev/null @@ -1,41 +0,0 @@ -from .base import VideoProcessor - - -class AutoVideoProcessor(VideoProcessor): - def __init__(self): - pass - - @staticmethod - def from_model_manager(model_manager, processor_type, **kwargs): - if processor_type == "FastBlend": - from .FastBlend import FastBlendSmoother - return FastBlendSmoother.from_model_manager(model_manager, **kwargs) - elif processor_type == "Contrast": - from .PILEditor import ContrastEditor - return ContrastEditor.from_model_manager(model_manager, **kwargs) - elif processor_type == "Sharpness": - from .PILEditor import SharpnessEditor - return SharpnessEditor.from_model_manager(model_manager, **kwargs) - elif processor_type == "RIFE": - from .RIFE import RIFESmoother - return RIFESmoother.from_model_manager(model_manager, **kwargs) - else: - raise ValueError(f"invalid processor_type: {processor_type}") - - -class SequencialProcessor(VideoProcessor): - def __init__(self, processors=[]): - self.processors = processors - - @staticmethod - def from_model_manager(model_manager, configs): - processors = [ - AutoVideoProcessor.from_model_manager(model_manager, config["processor_type"], **config["config"]) - for config in configs - ] - return SequencialProcessor(processors) - - def __call__(self, rendered_frames, **kwargs): - for processor in self.processors: - rendered_frames = processor(rendered_frames, **kwargs) - return rendered_frames diff --git a/diffsynth/prompters/__init__.py b/diffsynth/prompters/__init__.py deleted file mode 100644 index f27c6f1..0000000 --- a/diffsynth/prompters/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -from .prompt_refiners import Translator, BeautifulPrompt, QwenPrompt -from .sd_prompter import SDPrompter -from .sdxl_prompter import SDXLPrompter -from .sd3_prompter import SD3Prompter -from .hunyuan_dit_prompter import HunyuanDiTPrompter -from .kolors_prompter import KolorsPrompter -from .flux_prompter import FluxPrompter -from .omost import OmostPromter -from .cog_prompter import CogPrompter -from .hunyuan_video_prompter import HunyuanVideoPrompter -from .stepvideo_prompter import StepVideoPrompter -from .wan_prompter import WanPrompter diff --git a/diffsynth/prompters/base_prompter.py b/diffsynth/prompters/base_prompter.py deleted file mode 100644 index 136abd1..0000000 --- a/diffsynth/prompters/base_prompter.py +++ /dev/null @@ -1,70 +0,0 @@ -from ..models.model_manager import ModelManager -import torch - - - -def tokenize_long_prompt(tokenizer, prompt, max_length=None): - # Get model_max_length from self.tokenizer - length = tokenizer.model_max_length if max_length is None else max_length - - # To avoid the warning. set self.tokenizer.model_max_length to +oo. - tokenizer.model_max_length = 99999999 - - # Tokenize it! - input_ids = tokenizer(prompt, return_tensors="pt").input_ids - - # Determine the real length. - max_length = (input_ids.shape[1] + length - 1) // length * length - - # Restore tokenizer.model_max_length - tokenizer.model_max_length = length - - # Tokenize it again with fixed length. - input_ids = tokenizer( - prompt, - return_tensors="pt", - padding="max_length", - max_length=max_length, - truncation=True - ).input_ids - - # Reshape input_ids to fit the text encoder. - num_sentence = input_ids.shape[1] // length - input_ids = input_ids.reshape((num_sentence, length)) - - return input_ids - - - -class BasePrompter: - def __init__(self): - self.refiners = [] - self.extenders = [] - - - def load_prompt_refiners(self, model_manager: ModelManager, refiner_classes=[]): - for refiner_class in refiner_classes: - refiner = refiner_class.from_model_manager(model_manager) - self.refiners.append(refiner) - - def load_prompt_extenders(self,model_manager:ModelManager,extender_classes=[]): - for extender_class in extender_classes: - extender = extender_class.from_model_manager(model_manager) - self.extenders.append(extender) - - - @torch.no_grad() - def process_prompt(self, prompt, positive=True): - if isinstance(prompt, list): - prompt = [self.process_prompt(prompt_, positive=positive) for prompt_ in prompt] - else: - for refiner in self.refiners: - prompt = refiner(prompt, positive=positive) - return prompt - - @torch.no_grad() - def extend_prompt(self, prompt:str, positive=True): - extended_prompt = dict(prompt=prompt) - for extender in self.extenders: - extended_prompt = extender(extended_prompt) - return extended_prompt \ No newline at end of file diff --git a/diffsynth/prompters/cog_prompter.py b/diffsynth/prompters/cog_prompter.py deleted file mode 100644 index a1ab84a..0000000 --- a/diffsynth/prompters/cog_prompter.py +++ /dev/null @@ -1,46 +0,0 @@ -from .base_prompter import BasePrompter -from ..models.flux_text_encoder import FluxTextEncoder2 -from transformers import T5TokenizerFast -import os - - -class CogPrompter(BasePrompter): - def __init__( - self, - tokenizer_path=None - ): - if tokenizer_path is None: - base_path = os.path.dirname(os.path.dirname(__file__)) - tokenizer_path = os.path.join(base_path, "tokenizer_configs/cog/tokenizer") - super().__init__() - self.tokenizer = T5TokenizerFast.from_pretrained(tokenizer_path) - self.text_encoder: FluxTextEncoder2 = None - - - def fetch_models(self, text_encoder: FluxTextEncoder2 = None): - self.text_encoder = text_encoder - - - def encode_prompt_using_t5(self, prompt, text_encoder, tokenizer, max_length, device): - input_ids = tokenizer( - prompt, - return_tensors="pt", - padding="max_length", - max_length=max_length, - truncation=True, - ).input_ids.to(device) - prompt_emb = text_encoder(input_ids) - prompt_emb = prompt_emb.reshape((1, prompt_emb.shape[0]*prompt_emb.shape[1], -1)) - - return prompt_emb - - - def encode_prompt( - self, - prompt, - positive=True, - device="cuda" - ): - prompt = self.process_prompt(prompt, positive=positive) - prompt_emb = self.encode_prompt_using_t5(prompt, self.text_encoder, self.tokenizer, 226, device) - return prompt_emb diff --git a/diffsynth/prompters/flux_prompter.py b/diffsynth/prompters/flux_prompter.py deleted file mode 100644 index a3a06ff..0000000 --- a/diffsynth/prompters/flux_prompter.py +++ /dev/null @@ -1,74 +0,0 @@ -from .base_prompter import BasePrompter -from ..models.flux_text_encoder import FluxTextEncoder2 -from ..models.sd3_text_encoder import SD3TextEncoder1 -from transformers import CLIPTokenizer, T5TokenizerFast -import os, torch - - -class FluxPrompter(BasePrompter): - def __init__( - self, - tokenizer_1_path=None, - tokenizer_2_path=None - ): - if tokenizer_1_path is None: - base_path = os.path.dirname(os.path.dirname(__file__)) - tokenizer_1_path = os.path.join(base_path, "tokenizer_configs/flux/tokenizer_1") - if tokenizer_2_path is None: - base_path = os.path.dirname(os.path.dirname(__file__)) - tokenizer_2_path = os.path.join(base_path, "tokenizer_configs/flux/tokenizer_2") - super().__init__() - self.tokenizer_1 = CLIPTokenizer.from_pretrained(tokenizer_1_path) - self.tokenizer_2 = T5TokenizerFast.from_pretrained(tokenizer_2_path) - self.text_encoder_1: SD3TextEncoder1 = None - self.text_encoder_2: FluxTextEncoder2 = None - - - def fetch_models(self, text_encoder_1: SD3TextEncoder1 = None, text_encoder_2: FluxTextEncoder2 = None): - self.text_encoder_1 = text_encoder_1 - self.text_encoder_2 = text_encoder_2 - - - def encode_prompt_using_clip(self, prompt, text_encoder, tokenizer, max_length, device): - input_ids = tokenizer( - prompt, - return_tensors="pt", - padding="max_length", - max_length=max_length, - truncation=True - ).input_ids.to(device) - pooled_prompt_emb, _ = text_encoder(input_ids) - return pooled_prompt_emb - - - def encode_prompt_using_t5(self, prompt, text_encoder, tokenizer, max_length, device): - input_ids = tokenizer( - prompt, - return_tensors="pt", - padding="max_length", - max_length=max_length, - truncation=True, - ).input_ids.to(device) - prompt_emb = text_encoder(input_ids) - return prompt_emb - - - def encode_prompt( - self, - prompt, - positive=True, - device="cuda", - t5_sequence_length=512, - ): - prompt = self.process_prompt(prompt, positive=positive) - - # CLIP - pooled_prompt_emb = self.encode_prompt_using_clip(prompt, self.text_encoder_1, self.tokenizer_1, 77, device) - - # T5 - prompt_emb = self.encode_prompt_using_t5(prompt, self.text_encoder_2, self.tokenizer_2, t5_sequence_length, device) - - # text_ids - text_ids = torch.zeros(prompt_emb.shape[0], prompt_emb.shape[1], 3).to(device=device, dtype=prompt_emb.dtype) - - return prompt_emb, pooled_prompt_emb, text_ids diff --git a/diffsynth/prompters/hunyuan_dit_prompter.py b/diffsynth/prompters/hunyuan_dit_prompter.py deleted file mode 100644 index 52a22ed..0000000 --- a/diffsynth/prompters/hunyuan_dit_prompter.py +++ /dev/null @@ -1,69 +0,0 @@ -from .base_prompter import BasePrompter -from ..models.model_manager import ModelManager -from ..models import HunyuanDiTCLIPTextEncoder, HunyuanDiTT5TextEncoder -from transformers import BertTokenizer, AutoTokenizer -import warnings, os - - -class HunyuanDiTPrompter(BasePrompter): - def __init__( - self, - tokenizer_path=None, - tokenizer_t5_path=None - ): - if tokenizer_path is None: - base_path = os.path.dirname(os.path.dirname(__file__)) - tokenizer_path = os.path.join(base_path, "tokenizer_configs/hunyuan_dit/tokenizer") - if tokenizer_t5_path is None: - base_path = os.path.dirname(os.path.dirname(__file__)) - tokenizer_t5_path = os.path.join(base_path, "tokenizer_configs/hunyuan_dit/tokenizer_t5") - super().__init__() - self.tokenizer = BertTokenizer.from_pretrained(tokenizer_path) - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - self.tokenizer_t5 = AutoTokenizer.from_pretrained(tokenizer_t5_path) - self.text_encoder: HunyuanDiTCLIPTextEncoder = None - self.text_encoder_t5: HunyuanDiTT5TextEncoder = None - - - def fetch_models(self, text_encoder: HunyuanDiTCLIPTextEncoder = None, text_encoder_t5: HunyuanDiTT5TextEncoder = None): - self.text_encoder = text_encoder - self.text_encoder_t5 = text_encoder_t5 - - - def encode_prompt_using_signle_model(self, prompt, text_encoder, tokenizer, max_length, clip_skip, device): - text_inputs = tokenizer( - prompt, - padding="max_length", - max_length=max_length, - truncation=True, - return_attention_mask=True, - return_tensors="pt", - ) - text_input_ids = text_inputs.input_ids - attention_mask = text_inputs.attention_mask.to(device) - prompt_embeds = text_encoder( - text_input_ids.to(device), - attention_mask=attention_mask, - clip_skip=clip_skip - ) - return prompt_embeds, attention_mask - - - def encode_prompt( - self, - prompt, - clip_skip=1, - clip_skip_2=1, - positive=True, - device="cuda" - ): - prompt = self.process_prompt(prompt, positive=positive) - - # CLIP - prompt_emb, attention_mask = self.encode_prompt_using_signle_model(prompt, self.text_encoder, self.tokenizer, self.tokenizer.model_max_length, clip_skip, device) - - # T5 - prompt_emb_t5, attention_mask_t5 = self.encode_prompt_using_signle_model(prompt, self.text_encoder_t5, self.tokenizer_t5, self.tokenizer_t5.model_max_length, clip_skip_2, device) - - return prompt_emb, attention_mask, prompt_emb_t5, attention_mask_t5 diff --git a/diffsynth/prompters/hunyuan_video_prompter.py b/diffsynth/prompters/hunyuan_video_prompter.py deleted file mode 100644 index 5b97356..0000000 --- a/diffsynth/prompters/hunyuan_video_prompter.py +++ /dev/null @@ -1,275 +0,0 @@ -from .base_prompter import BasePrompter -from ..models.sd3_text_encoder import SD3TextEncoder1 -from ..models.hunyuan_video_text_encoder import HunyuanVideoLLMEncoder, HunyuanVideoMLLMEncoder -from transformers import CLIPTokenizer, LlamaTokenizerFast, CLIPImageProcessor -import os, torch -from typing import Union - -PROMPT_TEMPLATE_ENCODE = ( - "<|start_header_id|>system<|end_header_id|>\n\nDescribe the image by detailing the color, shape, size, texture, " - "quantity, text, spatial relationships of the objects and background:<|eot_id|>" - "<|start_header_id|>user<|end_header_id|>\n\n{}<|eot_id|>") - -PROMPT_TEMPLATE_ENCODE_VIDEO = ( - "<|start_header_id|>system<|end_header_id|>\n\nDescribe the video by detailing the following aspects: " - "1. The main content and theme of the video." - "2. The color, shape, size, texture, quantity, text, and spatial relationships of the objects." - "3. Actions, events, behaviors temporal relationships, physical movement changes of the objects." - "4. background environment, light, style and atmosphere." - "5. camera angles, movements, and transitions used in the video:<|eot_id|>" - "<|start_header_id|>user<|end_header_id|>\n\n{}<|eot_id|>") - -PROMPT_TEMPLATE_ENCODE_I2V = ( - "<|start_header_id|>system<|end_header_id|>\n\n\nDescribe the image by detailing the color, shape, size, texture, " - "quantity, text, spatial relationships of the objects and background:<|eot_id|>" - "<|start_header_id|>user<|end_header_id|>\n\n{}<|eot_id|>" - "<|start_header_id|>assistant<|end_header_id|>\n\n" -) - -PROMPT_TEMPLATE_ENCODE_VIDEO_I2V = ( - "<|start_header_id|>system<|end_header_id|>\n\n\nDescribe the video by detailing the following aspects according to the reference image: " - "1. The main content and theme of the video." - "2. The color, shape, size, texture, quantity, text, and spatial relationships of the objects." - "3. Actions, events, behaviors temporal relationships, physical movement changes of the objects." - "4. background environment, light, style and atmosphere." - "5. camera angles, movements, and transitions used in the video:<|eot_id|>\n\n" - "<|start_header_id|>user<|end_header_id|>\n\n{}<|eot_id|>" - "<|start_header_id|>assistant<|end_header_id|>\n\n" -) - -PROMPT_TEMPLATE = { - "dit-llm-encode": { - "template": PROMPT_TEMPLATE_ENCODE, - "crop_start": 36, - }, - "dit-llm-encode-video": { - "template": PROMPT_TEMPLATE_ENCODE_VIDEO, - "crop_start": 95, - }, - "dit-llm-encode-i2v": { - "template": PROMPT_TEMPLATE_ENCODE_I2V, - "crop_start": 36, - "image_emb_start": 5, - "image_emb_end": 581, - "image_emb_len": 576, - "double_return_token_id": 271 - }, - "dit-llm-encode-video-i2v": { - "template": PROMPT_TEMPLATE_ENCODE_VIDEO_I2V, - "crop_start": 103, - "image_emb_start": 5, - "image_emb_end": 581, - "image_emb_len": 576, - "double_return_token_id": 271 - }, -} - -NEGATIVE_PROMPT = "Aerial view, aerial view, overexposed, low quality, deformation, a poor composition, bad hands, bad teeth, bad eyes, bad limbs, distortion" - - -class HunyuanVideoPrompter(BasePrompter): - - def __init__( - self, - tokenizer_1_path=None, - tokenizer_2_path=None, - ): - if tokenizer_1_path is None: - base_path = os.path.dirname(os.path.dirname(__file__)) - tokenizer_1_path = os.path.join( - base_path, "tokenizer_configs/hunyuan_video/tokenizer_1") - if tokenizer_2_path is None: - base_path = os.path.dirname(os.path.dirname(__file__)) - tokenizer_2_path = os.path.join( - base_path, "tokenizer_configs/hunyuan_video/tokenizer_2") - super().__init__() - self.tokenizer_1 = CLIPTokenizer.from_pretrained(tokenizer_1_path) - self.tokenizer_2 = LlamaTokenizerFast.from_pretrained(tokenizer_2_path, padding_side='right') - self.text_encoder_1: SD3TextEncoder1 = None - self.text_encoder_2: HunyuanVideoLLMEncoder = None - - self.prompt_template = PROMPT_TEMPLATE['dit-llm-encode'] - self.prompt_template_video = PROMPT_TEMPLATE['dit-llm-encode-video'] - - def fetch_models(self, - text_encoder_1: SD3TextEncoder1 = None, - text_encoder_2: Union[HunyuanVideoLLMEncoder, HunyuanVideoMLLMEncoder] = None): - self.text_encoder_1 = text_encoder_1 - self.text_encoder_2 = text_encoder_2 - if isinstance(text_encoder_2, HunyuanVideoMLLMEncoder): - # processor - # TODO: may need to replace processor with local implementation - base_path = os.path.dirname(os.path.dirname(__file__)) - tokenizer_2_path = os.path.join(base_path, "tokenizer_configs/hunyuan_video/tokenizer_2") - self.processor = CLIPImageProcessor.from_pretrained(tokenizer_2_path) - # template - self.prompt_template = PROMPT_TEMPLATE['dit-llm-encode-i2v'] - self.prompt_template_video = PROMPT_TEMPLATE['dit-llm-encode-video-i2v'] - - def apply_text_to_template(self, text, template): - assert isinstance(template, str) - if isinstance(text, list): - return [self.apply_text_to_template(text_) for text_ in text] - elif isinstance(text, str): - # Will send string to tokenizer. Used for llm - return template.format(text) - else: - raise TypeError(f"Unsupported prompt type: {type(text)}") - - def encode_prompt_using_clip(self, prompt, max_length, device): - tokenized_result = self.tokenizer_1( - prompt, - return_tensors="pt", - padding="max_length", - max_length=max_length, - truncation=True, - return_attention_mask=True - ) - input_ids = tokenized_result.input_ids.to(device) - attention_mask = tokenized_result.attention_mask.to(device) - return self.text_encoder_1(input_ids=input_ids, extra_mask=attention_mask)[0] - - def encode_prompt_using_llm(self, - prompt, - max_length, - device, - crop_start, - hidden_state_skip_layer=2, - use_attention_mask=True): - max_length += crop_start - inputs = self.tokenizer_2(prompt, - return_tensors="pt", - padding="max_length", - max_length=max_length, - truncation=True) - input_ids = inputs.input_ids.to(device) - attention_mask = inputs.attention_mask.to(device) - last_hidden_state = self.text_encoder_2(input_ids, attention_mask, hidden_state_skip_layer) - - # crop out - if crop_start > 0: - last_hidden_state = last_hidden_state[:, crop_start:] - attention_mask = (attention_mask[:, crop_start:] if use_attention_mask else None) - - return last_hidden_state, attention_mask - - def encode_prompt_using_mllm(self, - prompt, - images, - max_length, - device, - crop_start, - hidden_state_skip_layer=2, - use_attention_mask=True, - image_embed_interleave=4): - image_outputs = self.processor(images, return_tensors="pt")["pixel_values"].to(device) - max_length += crop_start - inputs = self.tokenizer_2(prompt, - return_tensors="pt", - padding="max_length", - max_length=max_length, - truncation=True) - input_ids = inputs.input_ids.to(device) - attention_mask = inputs.attention_mask.to(device) - last_hidden_state = self.text_encoder_2(input_ids=input_ids, - attention_mask=attention_mask, - hidden_state_skip_layer=hidden_state_skip_layer, - pixel_values=image_outputs) - - text_crop_start = (crop_start - 1 + self.prompt_template_video.get("image_emb_len", 576)) - image_crop_start = self.prompt_template_video.get("image_emb_start", 5) - image_crop_end = self.prompt_template_video.get("image_emb_end", 581) - batch_indices, last_double_return_token_indices = torch.where( - input_ids == self.prompt_template_video.get("double_return_token_id", 271)) - if last_double_return_token_indices.shape[0] == 3: - # in case the prompt is too long - last_double_return_token_indices = torch.cat(( - last_double_return_token_indices, - torch.tensor([input_ids.shape[-1]]), - )) - batch_indices = torch.cat((batch_indices, torch.tensor([0]))) - last_double_return_token_indices = (last_double_return_token_indices.reshape(input_ids.shape[0], -1)[:, -1]) - batch_indices = batch_indices.reshape(input_ids.shape[0], -1)[:, -1] - assistant_crop_start = (last_double_return_token_indices - 1 + self.prompt_template_video.get("image_emb_len", 576) - 4) - assistant_crop_end = (last_double_return_token_indices - 1 + self.prompt_template_video.get("image_emb_len", 576)) - attention_mask_assistant_crop_start = (last_double_return_token_indices - 4) - attention_mask_assistant_crop_end = last_double_return_token_indices - text_last_hidden_state = [] - text_attention_mask = [] - image_last_hidden_state = [] - image_attention_mask = [] - for i in range(input_ids.shape[0]): - text_last_hidden_state.append( - torch.cat([ - last_hidden_state[i, text_crop_start:assistant_crop_start[i].item()], - last_hidden_state[i, assistant_crop_end[i].item():], - ])) - text_attention_mask.append( - torch.cat([ - attention_mask[ - i, - crop_start:attention_mask_assistant_crop_start[i].item(), - ], - attention_mask[i, attention_mask_assistant_crop_end[i].item():], - ]) if use_attention_mask else None) - image_last_hidden_state.append(last_hidden_state[i, image_crop_start:image_crop_end]) - image_attention_mask.append( - torch.ones(image_last_hidden_state[-1].shape[0]).to(last_hidden_state.device). - to(attention_mask.dtype) if use_attention_mask else None) - - text_last_hidden_state = torch.stack(text_last_hidden_state) - text_attention_mask = torch.stack(text_attention_mask) - image_last_hidden_state = torch.stack(image_last_hidden_state) - image_attention_mask = torch.stack(image_attention_mask) - - image_last_hidden_state = image_last_hidden_state[:, ::image_embed_interleave, :] - image_attention_mask = image_attention_mask[:, ::image_embed_interleave] - - assert (text_last_hidden_state.shape[0] == text_attention_mask.shape[0] and - image_last_hidden_state.shape[0] == image_attention_mask.shape[0]) - - last_hidden_state = torch.cat([image_last_hidden_state, text_last_hidden_state], dim=1) - attention_mask = torch.cat([image_attention_mask, text_attention_mask], dim=1) - - return last_hidden_state, attention_mask - - def encode_prompt(self, - prompt, - images=None, - positive=True, - device="cuda", - clip_sequence_length=77, - llm_sequence_length=256, - data_type='video', - use_template=True, - hidden_state_skip_layer=2, - use_attention_mask=True, - image_embed_interleave=4): - - prompt = self.process_prompt(prompt, positive=positive) - - # apply template - if use_template: - template = self.prompt_template_video if data_type == 'video' else self.prompt_template - prompt_formated = self.apply_text_to_template(prompt, template['template']) - else: - prompt_formated = prompt - # Text encoder - if data_type == 'video': - crop_start = self.prompt_template_video.get("crop_start", 0) - else: - crop_start = self.prompt_template.get("crop_start", 0) - - # CLIP - pooled_prompt_emb = self.encode_prompt_using_clip(prompt, clip_sequence_length, device) - - # LLM - if images is None: - prompt_emb, attention_mask = self.encode_prompt_using_llm(prompt_formated, llm_sequence_length, device, crop_start, - hidden_state_skip_layer, use_attention_mask) - else: - prompt_emb, attention_mask = self.encode_prompt_using_mllm(prompt_formated, images, llm_sequence_length, device, - crop_start, hidden_state_skip_layer, use_attention_mask, - image_embed_interleave) - - return prompt_emb, pooled_prompt_emb, attention_mask diff --git a/diffsynth/prompters/kolors_prompter.py b/diffsynth/prompters/kolors_prompter.py deleted file mode 100644 index e3d5d58..0000000 --- a/diffsynth/prompters/kolors_prompter.py +++ /dev/null @@ -1,354 +0,0 @@ -from .base_prompter import BasePrompter -from ..models.model_manager import ModelManager -import json, os, re -from typing import List, Optional, Union, Dict -from sentencepiece import SentencePieceProcessor -from transformers import PreTrainedTokenizer -from transformers.utils import PaddingStrategy -from transformers.tokenization_utils_base import EncodedInput, BatchEncoding -from ..models.kolors_text_encoder import ChatGLMModel - - -class SPTokenizer: - def __init__(self, model_path: str): - # reload tokenizer - assert os.path.isfile(model_path), model_path - self.sp_model = SentencePieceProcessor(model_file=model_path) - - # BOS / EOS token IDs - self.n_words: int = self.sp_model.vocab_size() - self.bos_id: int = self.sp_model.bos_id() - self.eos_id: int = self.sp_model.eos_id() - self.pad_id: int = self.sp_model.unk_id() - assert self.sp_model.vocab_size() == self.sp_model.get_piece_size() - - role_special_tokens = ["<|system|>", "<|user|>", "<|assistant|>", "<|observation|>"] - special_tokens = ["[MASK]", "[gMASK]", "[sMASK]", "sop", "eop"] + role_special_tokens - self.special_tokens = {} - self.index_special_tokens = {} - for token in special_tokens: - self.special_tokens[token] = self.n_words - self.index_special_tokens[self.n_words] = token - self.n_words += 1 - self.role_special_token_expression = "|".join([re.escape(token) for token in role_special_tokens]) - - def tokenize(self, s: str, encode_special_tokens=False): - if encode_special_tokens: - last_index = 0 - t = [] - for match in re.finditer(self.role_special_token_expression, s): - if last_index < match.start(): - t.extend(self.sp_model.EncodeAsPieces(s[last_index:match.start()])) - t.append(s[match.start():match.end()]) - last_index = match.end() - if last_index < len(s): - t.extend(self.sp_model.EncodeAsPieces(s[last_index:])) - return t - else: - return self.sp_model.EncodeAsPieces(s) - - def encode(self, s: str, bos: bool = False, eos: bool = False) -> List[int]: - assert type(s) is str - t = self.sp_model.encode(s) - if bos: - t = [self.bos_id] + t - if eos: - t = t + [self.eos_id] - return t - - def decode(self, t: List[int]) -> str: - text, buffer = "", [] - for token in t: - if token in self.index_special_tokens: - if buffer: - text += self.sp_model.decode(buffer) - buffer = [] - text += self.index_special_tokens[token] - else: - buffer.append(token) - if buffer: - text += self.sp_model.decode(buffer) - return text - - def decode_tokens(self, tokens: List[str]) -> str: - text = self.sp_model.DecodePieces(tokens) - return text - - def convert_token_to_id(self, token): - """ Converts a token (str) in an id using the vocab. """ - if token in self.special_tokens: - return self.special_tokens[token] - return self.sp_model.PieceToId(token) - - def convert_id_to_token(self, index): - """Converts an index (integer) in a token (str) using the vocab.""" - if index in self.index_special_tokens: - return self.index_special_tokens[index] - if index in [self.eos_id, self.bos_id, self.pad_id] or index < 0: - return "" - return self.sp_model.IdToPiece(index) - - - -class ChatGLMTokenizer(PreTrainedTokenizer): - vocab_files_names = {"vocab_file": "tokenizer.model"} - - model_input_names = ["input_ids", "attention_mask", "position_ids"] - - def __init__(self, vocab_file, padding_side="left", clean_up_tokenization_spaces=False, encode_special_tokens=False, - **kwargs): - self.name = "GLMTokenizer" - - self.vocab_file = vocab_file - self.tokenizer = SPTokenizer(vocab_file) - self.special_tokens = { - "": self.tokenizer.bos_id, - "": self.tokenizer.eos_id, - "": self.tokenizer.pad_id - } - self.encode_special_tokens = encode_special_tokens - super().__init__(padding_side=padding_side, clean_up_tokenization_spaces=clean_up_tokenization_spaces, - encode_special_tokens=encode_special_tokens, - **kwargs) - - def get_command(self, token): - if token in self.special_tokens: - return self.special_tokens[token] - assert token in self.tokenizer.special_tokens, f"{token} is not a special token for {self.name}" - return self.tokenizer.special_tokens[token] - - @property - def unk_token(self) -> str: - return "" - - @property - def pad_token(self) -> str: - return "" - - @property - def pad_token_id(self): - return self.get_command("") - - @property - def eos_token(self) -> str: - return "" - - @property - def eos_token_id(self): - return self.get_command("") - - @property - def vocab_size(self): - return self.tokenizer.n_words - - def get_vocab(self): - """ Returns vocab as a dict """ - vocab = {self._convert_id_to_token(i): i for i in range(self.vocab_size)} - vocab.update(self.added_tokens_encoder) - return vocab - - def _tokenize(self, text, **kwargs): - return self.tokenizer.tokenize(text, encode_special_tokens=self.encode_special_tokens) - - def _convert_token_to_id(self, token): - """ Converts a token (str) in an id using the vocab. """ - return self.tokenizer.convert_token_to_id(token) - - def _convert_id_to_token(self, index): - """Converts an index (integer) in a token (str) using the vocab.""" - return self.tokenizer.convert_id_to_token(index) - - def convert_tokens_to_string(self, tokens: List[str]) -> str: - return self.tokenizer.decode_tokens(tokens) - - def save_vocabulary(self, save_directory, filename_prefix=None): - """ - Save the vocabulary and special tokens file to a directory. - - Args: - save_directory (`str`): - The directory in which to save the vocabulary. - filename_prefix (`str`, *optional*): - An optional prefix to add to the named of the saved files. - - Returns: - `Tuple(str)`: Paths to the files saved. - """ - if os.path.isdir(save_directory): - vocab_file = os.path.join( - save_directory, self.vocab_files_names["vocab_file"] - ) - else: - vocab_file = save_directory - - with open(self.vocab_file, 'rb') as fin: - proto_str = fin.read() - - with open(vocab_file, "wb") as writer: - writer.write(proto_str) - - return (vocab_file,) - - def get_prefix_tokens(self): - prefix_tokens = [self.get_command("[gMASK]"), self.get_command("sop")] - return prefix_tokens - - def build_single_message(self, role, metadata, message): - assert role in ["system", "user", "assistant", "observation"], role - role_tokens = [self.get_command(f"<|{role}|>")] + self.tokenizer.encode(f"{metadata}\n") - message_tokens = self.tokenizer.encode(message) - tokens = role_tokens + message_tokens - return tokens - - def build_chat_input(self, query, history=None, role="user"): - if history is None: - history = [] - input_ids = [] - for item in history: - content = item["content"] - if item["role"] == "system" and "tools" in item: - content = content + "\n" + json.dumps(item["tools"], indent=4, ensure_ascii=False) - input_ids.extend(self.build_single_message(item["role"], item.get("metadata", ""), content)) - input_ids.extend(self.build_single_message(role, "", query)) - input_ids.extend([self.get_command("<|assistant|>")]) - return self.batch_encode_plus([input_ids], return_tensors="pt", is_split_into_words=True) - - def build_inputs_with_special_tokens( - self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None - ) -> List[int]: - """ - Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and - adding special tokens. A BERT sequence has the following format: - - - single sequence: `[CLS] X [SEP]` - - pair of sequences: `[CLS] A [SEP] B [SEP]` - - Args: - token_ids_0 (`List[int]`): - List of IDs to which the special tokens will be added. - token_ids_1 (`List[int]`, *optional*): - Optional second list of IDs for sequence pairs. - - Returns: - `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. - """ - prefix_tokens = self.get_prefix_tokens() - token_ids_0 = prefix_tokens + token_ids_0 - if token_ids_1 is not None: - token_ids_0 = token_ids_0 + token_ids_1 + [self.get_command("")] - return token_ids_0 - - def _pad( - self, - encoded_inputs: Union[Dict[str, EncodedInput], BatchEncoding], - max_length: Optional[int] = None, - padding_strategy: PaddingStrategy = PaddingStrategy.DO_NOT_PAD, - pad_to_multiple_of: Optional[int] = None, - return_attention_mask: Optional[bool] = None, - padding_side: Optional[str] = None, - ) -> dict: - """ - Pad encoded inputs (on left/right and up to predefined length or max length in the batch) - - Args: - encoded_inputs: - Dictionary of tokenized inputs (`List[int]`) or batch of tokenized inputs (`List[List[int]]`). - max_length: maximum length of the returned list and optionally padding length (see below). - Will truncate by taking into account the special tokens. - padding_strategy: PaddingStrategy to use for padding. - - - PaddingStrategy.LONGEST Pad to the longest sequence in the batch - - PaddingStrategy.MAX_LENGTH: Pad to the max length (default) - - PaddingStrategy.DO_NOT_PAD: Do not pad - The tokenizer padding sides are defined in self.padding_side: - - - 'left': pads on the left of the sequences - - 'right': pads on the right of the sequences - pad_to_multiple_of: (optional) Integer if set will pad the sequence to a multiple of the provided value. - This is especially useful to enable the use of Tensor Core on NVIDIA hardware with compute capability - `>= 7.5` (Volta). - return_attention_mask: - (optional) Set to False to avoid returning attention mask (default: set to model specifics) - """ - # Load from model defaults - assert self.padding_side == "left" - - required_input = encoded_inputs[self.model_input_names[0]] - seq_length = len(required_input) - - if padding_strategy == PaddingStrategy.LONGEST: - max_length = len(required_input) - - if max_length is not None and pad_to_multiple_of is not None and (max_length % pad_to_multiple_of != 0): - max_length = ((max_length // pad_to_multiple_of) + 1) * pad_to_multiple_of - - needs_to_be_padded = padding_strategy != PaddingStrategy.DO_NOT_PAD and len(required_input) != max_length - - # Initialize attention mask if not present. - if "attention_mask" not in encoded_inputs: - encoded_inputs["attention_mask"] = [1] * seq_length - - if "position_ids" not in encoded_inputs: - encoded_inputs["position_ids"] = list(range(seq_length)) - - if needs_to_be_padded: - difference = max_length - len(required_input) - - if "attention_mask" in encoded_inputs: - encoded_inputs["attention_mask"] = [0] * difference + encoded_inputs["attention_mask"] - if "position_ids" in encoded_inputs: - encoded_inputs["position_ids"] = [0] * difference + encoded_inputs["position_ids"] - encoded_inputs[self.model_input_names[0]] = [self.pad_token_id] * difference + required_input - - return encoded_inputs - - - -class KolorsPrompter(BasePrompter): - def __init__( - self, - tokenizer_path=None - ): - if tokenizer_path is None: - base_path = os.path.dirname(os.path.dirname(__file__)) - tokenizer_path = os.path.join(base_path, "tokenizer_configs/kolors/tokenizer") - super().__init__() - self.tokenizer = ChatGLMTokenizer.from_pretrained(tokenizer_path) - self.text_encoder: ChatGLMModel = None - - - def fetch_models(self, text_encoder: ChatGLMModel = None): - self.text_encoder = text_encoder - - - def encode_prompt_using_ChatGLM(self, prompt, text_encoder, tokenizer, max_length, clip_skip, device): - text_inputs = tokenizer( - prompt, - padding="max_length", - max_length=max_length, - truncation=True, - return_tensors="pt", - ).to(device) - output = text_encoder( - input_ids=text_inputs['input_ids'] , - attention_mask=text_inputs['attention_mask'], - position_ids=text_inputs['position_ids'], - output_hidden_states=True - ) - prompt_emb = output.hidden_states[-clip_skip].permute(1, 0, 2).clone() - pooled_prompt_emb = output.hidden_states[-1][-1, :, :].clone() - return prompt_emb, pooled_prompt_emb - - - def encode_prompt( - self, - prompt, - clip_skip=1, - clip_skip_2=2, - positive=True, - device="cuda" - ): - prompt = self.process_prompt(prompt, positive=positive) - prompt_emb, pooled_prompt_emb = self.encode_prompt_using_ChatGLM(prompt, self.text_encoder, self.tokenizer, 256, clip_skip_2, device) - - return pooled_prompt_emb, prompt_emb diff --git a/diffsynth/prompters/omnigen_prompter.py b/diffsynth/prompters/omnigen_prompter.py deleted file mode 100644 index 616efab..0000000 --- a/diffsynth/prompters/omnigen_prompter.py +++ /dev/null @@ -1,356 +0,0 @@ -import os -import re -from typing import Dict, List - -import torch -from PIL import Image -from torchvision import transforms -from transformers import AutoTokenizer -from huggingface_hub import snapshot_download -import numpy as np - - - -def crop_arr(pil_image, max_image_size): - while min(*pil_image.size) >= 2 * max_image_size: - pil_image = pil_image.resize( - tuple(x // 2 for x in pil_image.size), resample=Image.BOX - ) - - if max(*pil_image.size) > max_image_size: - scale = max_image_size / max(*pil_image.size) - pil_image = pil_image.resize( - tuple(round(x * scale) for x in pil_image.size), resample=Image.BICUBIC - ) - - if min(*pil_image.size) < 16: - scale = 16 / min(*pil_image.size) - pil_image = pil_image.resize( - tuple(round(x * scale) for x in pil_image.size), resample=Image.BICUBIC - ) - - arr = np.array(pil_image) - crop_y1 = (arr.shape[0] % 16) // 2 - crop_y2 = arr.shape[0] % 16 - crop_y1 - - crop_x1 = (arr.shape[1] % 16) // 2 - crop_x2 = arr.shape[1] % 16 - crop_x1 - - arr = arr[crop_y1:arr.shape[0]-crop_y2, crop_x1:arr.shape[1]-crop_x2] - return Image.fromarray(arr) - - - -class OmniGenPrompter: - def __init__(self, - text_tokenizer, - max_image_size: int=1024): - self.text_tokenizer = text_tokenizer - self.max_image_size = max_image_size - - self.image_transform = transforms.Compose([ - transforms.Lambda(lambda pil_image: crop_arr(pil_image, max_image_size)), - transforms.ToTensor(), - transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5], inplace=True) - ]) - - self.collator = OmniGenCollator() - self.separate_collator = OmniGenSeparateCollator() - - @classmethod - def from_pretrained(cls, model_name): - if not os.path.exists(model_name): - cache_folder = os.getenv('HF_HUB_CACHE') - model_name = snapshot_download(repo_id=model_name, - cache_dir=cache_folder, - allow_patterns="*.json") - text_tokenizer = AutoTokenizer.from_pretrained(model_name) - - return cls(text_tokenizer) - - - def process_image(self, image): - return self.image_transform(image) - - def process_multi_modal_prompt(self, text, input_images): - text = self.add_prefix_instruction(text) - if input_images is None or len(input_images) == 0: - model_inputs = self.text_tokenizer(text) - return {"input_ids": model_inputs.input_ids, "pixel_values": None, "image_sizes": None} - - pattern = r"<\|image_\d+\|>" - prompt_chunks = [self.text_tokenizer(chunk).input_ids for chunk in re.split(pattern, text)] - - for i in range(1, len(prompt_chunks)): - if prompt_chunks[i][0] == 1: - prompt_chunks[i] = prompt_chunks[i][1:] - - image_tags = re.findall(pattern, text) - image_ids = [int(s.split("|")[1].split("_")[-1]) for s in image_tags] - - unique_image_ids = sorted(list(set(image_ids))) - assert unique_image_ids == list(range(1, len(unique_image_ids)+1)), f"image_ids must start from 1, and must be continuous int, e.g. [1, 2, 3], cannot be {unique_image_ids}" - # total images must be the same as the number of image tags - assert len(unique_image_ids) == len(input_images), f"total images must be the same as the number of image tags, got {len(unique_image_ids)} image tags and {len(input_images)} images" - - input_images = [input_images[x-1] for x in image_ids] - - all_input_ids = [] - img_inx = [] - idx = 0 - for i in range(len(prompt_chunks)): - all_input_ids.extend(prompt_chunks[i]) - if i != len(prompt_chunks) -1: - start_inx = len(all_input_ids) - size = input_images[i].size(-2) * input_images[i].size(-1) // 16 // 16 - img_inx.append([start_inx, start_inx+size]) - all_input_ids.extend([0]*size) - - return {"input_ids": all_input_ids, "pixel_values": input_images, "image_sizes": img_inx} - - - def add_prefix_instruction(self, prompt): - user_prompt = '<|user|>\n' - generation_prompt = 'Generate an image according to the following instructions\n' - assistant_prompt = '<|assistant|>\n<|diffusion|>' - prompt_suffix = "<|end|>\n" - prompt = f"{user_prompt}{generation_prompt}{prompt}{prompt_suffix}{assistant_prompt}" - return prompt - - - def __call__(self, - instructions: List[str], - input_images: List[List[str]] = None, - height: int = 1024, - width: int = 1024, - negative_prompt: str = "low quality, jpeg artifacts, ugly, duplicate, morbid, mutilated, extra fingers, mutated hands, poorly drawn hands, poorly drawn face, mutation, deformed, blurry, dehydrated, bad anatomy, bad proportions, extra limbs, cloned face, disfigured, gross proportions, malformed limbs, missing arms, missing legs, extra arms, extra legs, fused fingers, too many fingers.", - use_img_cfg: bool = True, - separate_cfg_input: bool = False, - use_input_image_size_as_output: bool=False, - ) -> Dict: - - if input_images is None: - use_img_cfg = False - if isinstance(instructions, str): - instructions = [instructions] - input_images = [input_images] - - input_data = [] - for i in range(len(instructions)): - cur_instruction = instructions[i] - cur_input_images = None if input_images is None else input_images[i] - if cur_input_images is not None and len(cur_input_images) > 0: - cur_input_images = [self.process_image(x) for x in cur_input_images] - else: - cur_input_images = None - assert "<|image_1|>" not in cur_instruction - - mllm_input = self.process_multi_modal_prompt(cur_instruction, cur_input_images) - - - neg_mllm_input, img_cfg_mllm_input = None, None - neg_mllm_input = self.process_multi_modal_prompt(negative_prompt, None) - if use_img_cfg: - if cur_input_images is not None and len(cur_input_images) >= 1: - img_cfg_prompt = [f"<|image_{i+1}|>" for i in range(len(cur_input_images))] - img_cfg_mllm_input = self.process_multi_modal_prompt(" ".join(img_cfg_prompt), cur_input_images) - else: - img_cfg_mllm_input = neg_mllm_input - - if use_input_image_size_as_output: - input_data.append((mllm_input, neg_mllm_input, img_cfg_mllm_input, [mllm_input['pixel_values'][0].size(-2), mllm_input['pixel_values'][0].size(-1)])) - else: - input_data.append((mllm_input, neg_mllm_input, img_cfg_mllm_input, [height, width])) - - if separate_cfg_input: - return self.separate_collator(input_data) - return self.collator(input_data) - - - - -class OmniGenCollator: - def __init__(self, pad_token_id=2, hidden_size=3072): - self.pad_token_id = pad_token_id - self.hidden_size = hidden_size - - def create_position(self, attention_mask, num_tokens_for_output_images): - position_ids = [] - text_length = attention_mask.size(-1) - img_length = max(num_tokens_for_output_images) - for mask in attention_mask: - temp_l = torch.sum(mask) - temp_position = [0]*(text_length-temp_l) + [i for i in range(temp_l+img_length+1)] # we add a time embedding into the sequence, so add one more token - position_ids.append(temp_position) - return torch.LongTensor(position_ids) - - def create_mask(self, attention_mask, num_tokens_for_output_images): - extended_mask = [] - padding_images = [] - text_length = attention_mask.size(-1) - img_length = max(num_tokens_for_output_images) - seq_len = text_length + img_length + 1 # we add a time embedding into the sequence, so add one more token - inx = 0 - for mask in attention_mask: - temp_l = torch.sum(mask) - pad_l = text_length - temp_l - - temp_mask = torch.tril(torch.ones(size=(temp_l+1, temp_l+1))) - - image_mask = torch.zeros(size=(temp_l+1, img_length)) - temp_mask = torch.cat([temp_mask, image_mask], dim=-1) - - image_mask = torch.ones(size=(img_length, temp_l+img_length+1)) - temp_mask = torch.cat([temp_mask, image_mask], dim=0) - - if pad_l > 0: - pad_mask = torch.zeros(size=(temp_l+1+img_length, pad_l)) - temp_mask = torch.cat([pad_mask, temp_mask], dim=-1) - - pad_mask = torch.ones(size=(pad_l, seq_len)) - temp_mask = torch.cat([pad_mask, temp_mask], dim=0) - - true_img_length = num_tokens_for_output_images[inx] - pad_img_length = img_length - true_img_length - if pad_img_length > 0: - temp_mask[:, -pad_img_length:] = 0 - temp_padding_imgs = torch.zeros(size=(1, pad_img_length, self.hidden_size)) - else: - temp_padding_imgs = None - - extended_mask.append(temp_mask.unsqueeze(0)) - padding_images.append(temp_padding_imgs) - inx += 1 - return torch.cat(extended_mask, dim=0), padding_images - - def adjust_attention_for_input_images(self, attention_mask, image_sizes): - for b_inx in image_sizes.keys(): - for start_inx, end_inx in image_sizes[b_inx]: - attention_mask[b_inx][start_inx:end_inx, start_inx:end_inx] = 1 - - return attention_mask - - def pad_input_ids(self, input_ids, image_sizes): - max_l = max([len(x) for x in input_ids]) - padded_ids = [] - attention_mask = [] - new_image_sizes = [] - - for i in range(len(input_ids)): - temp_ids = input_ids[i] - temp_l = len(temp_ids) - pad_l = max_l - temp_l - if pad_l == 0: - attention_mask.append([1]*max_l) - padded_ids.append(temp_ids) - else: - attention_mask.append([0]*pad_l+[1]*temp_l) - padded_ids.append([self.pad_token_id]*pad_l+temp_ids) - - if i in image_sizes: - new_inx = [] - for old_inx in image_sizes[i]: - new_inx.append([x+pad_l for x in old_inx]) - image_sizes[i] = new_inx - - return torch.LongTensor(padded_ids), torch.LongTensor(attention_mask), image_sizes - - - def process_mllm_input(self, mllm_inputs, target_img_size): - num_tokens_for_output_images = [] - for img_size in target_img_size: - num_tokens_for_output_images.append(img_size[0]*img_size[1]//16//16) - - pixel_values, image_sizes = [], {} - b_inx = 0 - for x in mllm_inputs: - if x['pixel_values'] is not None: - pixel_values.extend(x['pixel_values']) - for size in x['image_sizes']: - if b_inx not in image_sizes: - image_sizes[b_inx] = [size] - else: - image_sizes[b_inx].append(size) - b_inx += 1 - pixel_values = [x.unsqueeze(0) for x in pixel_values] - - - input_ids = [x['input_ids'] for x in mllm_inputs] - padded_input_ids, attention_mask, image_sizes = self.pad_input_ids(input_ids, image_sizes) - position_ids = self.create_position(attention_mask, num_tokens_for_output_images) - attention_mask, padding_images = self.create_mask(attention_mask, num_tokens_for_output_images) - attention_mask = self.adjust_attention_for_input_images(attention_mask, image_sizes) - - return padded_input_ids, position_ids, attention_mask, padding_images, pixel_values, image_sizes - - - def __call__(self, features): - mllm_inputs = [f[0] for f in features] - cfg_mllm_inputs = [f[1] for f in features] - img_cfg_mllm_input = [f[2] for f in features] - target_img_size = [f[3] for f in features] - - - if img_cfg_mllm_input[0] is not None: - mllm_inputs = mllm_inputs + cfg_mllm_inputs + img_cfg_mllm_input - target_img_size = target_img_size + target_img_size + target_img_size - else: - mllm_inputs = mllm_inputs + cfg_mllm_inputs - target_img_size = target_img_size + target_img_size - - - all_padded_input_ids, all_position_ids, all_attention_mask, all_padding_images, all_pixel_values, all_image_sizes = self.process_mllm_input(mllm_inputs, target_img_size) - - data = {"input_ids": all_padded_input_ids, - "attention_mask": all_attention_mask, - "position_ids": all_position_ids, - "input_pixel_values": all_pixel_values, - "input_image_sizes": all_image_sizes, - "padding_images": all_padding_images, - } - return data - - -class OmniGenSeparateCollator(OmniGenCollator): - def __call__(self, features): - mllm_inputs = [f[0] for f in features] - cfg_mllm_inputs = [f[1] for f in features] - img_cfg_mllm_input = [f[2] for f in features] - target_img_size = [f[3] for f in features] - - all_padded_input_ids, all_attention_mask, all_position_ids, all_pixel_values, all_image_sizes, all_padding_images = [], [], [], [], [], [] - - - padded_input_ids, position_ids, attention_mask, padding_images, pixel_values, image_sizes = self.process_mllm_input(mllm_inputs, target_img_size) - all_padded_input_ids.append(padded_input_ids) - all_attention_mask.append(attention_mask) - all_position_ids.append(position_ids) - all_pixel_values.append(pixel_values) - all_image_sizes.append(image_sizes) - all_padding_images.append(padding_images) - - if cfg_mllm_inputs[0] is not None: - padded_input_ids, position_ids, attention_mask, padding_images, pixel_values, image_sizes = self.process_mllm_input(cfg_mllm_inputs, target_img_size) - all_padded_input_ids.append(padded_input_ids) - all_attention_mask.append(attention_mask) - all_position_ids.append(position_ids) - all_pixel_values.append(pixel_values) - all_image_sizes.append(image_sizes) - all_padding_images.append(padding_images) - if img_cfg_mllm_input[0] is not None: - padded_input_ids, position_ids, attention_mask, padding_images, pixel_values, image_sizes = self.process_mllm_input(img_cfg_mllm_input, target_img_size) - all_padded_input_ids.append(padded_input_ids) - all_attention_mask.append(attention_mask) - all_position_ids.append(position_ids) - all_pixel_values.append(pixel_values) - all_image_sizes.append(image_sizes) - all_padding_images.append(padding_images) - - data = {"input_ids": all_padded_input_ids, - "attention_mask": all_attention_mask, - "position_ids": all_position_ids, - "input_pixel_values": all_pixel_values, - "input_image_sizes": all_image_sizes, - "padding_images": all_padding_images, - } - return data diff --git a/diffsynth/prompters/omost.py b/diffsynth/prompters/omost.py deleted file mode 100644 index 81828ad..0000000 --- a/diffsynth/prompters/omost.py +++ /dev/null @@ -1,323 +0,0 @@ -from transformers import AutoTokenizer, TextIteratorStreamer -import difflib -import torch -import numpy as np -import re -from ..models.model_manager import ModelManager -from PIL import Image - -valid_colors = { # r, g, b - 'aliceblue': (240, 248, 255), 'antiquewhite': (250, 235, 215), 'aqua': (0, 255, 255), - 'aquamarine': (127, 255, 212), 'azure': (240, 255, 255), 'beige': (245, 245, 220), - 'bisque': (255, 228, 196), 'black': (0, 0, 0), 'blanchedalmond': (255, 235, 205), 'blue': (0, 0, 255), - 'blueviolet': (138, 43, 226), 'brown': (165, 42, 42), 'burlywood': (222, 184, 135), - 'cadetblue': (95, 158, 160), 'chartreuse': (127, 255, 0), 'chocolate': (210, 105, 30), - 'coral': (255, 127, 80), 'cornflowerblue': (100, 149, 237), 'cornsilk': (255, 248, 220), - 'crimson': (220, 20, 60), 'cyan': (0, 255, 255), 'darkblue': (0, 0, 139), 'darkcyan': (0, 139, 139), - 'darkgoldenrod': (184, 134, 11), 'darkgray': (169, 169, 169), 'darkgrey': (169, 169, 169), - 'darkgreen': (0, 100, 0), 'darkkhaki': (189, 183, 107), 'darkmagenta': (139, 0, 139), - 'darkolivegreen': (85, 107, 47), 'darkorange': (255, 140, 0), 'darkorchid': (153, 50, 204), - 'darkred': (139, 0, 0), 'darksalmon': (233, 150, 122), 'darkseagreen': (143, 188, 143), - 'darkslateblue': (72, 61, 139), 'darkslategray': (47, 79, 79), 'darkslategrey': (47, 79, 79), - 'darkturquoise': (0, 206, 209), 'darkviolet': (148, 0, 211), 'deeppink': (255, 20, 147), - 'deepskyblue': (0, 191, 255), 'dimgray': (105, 105, 105), 'dimgrey': (105, 105, 105), - 'dodgerblue': (30, 144, 255), 'firebrick': (178, 34, 34), 'floralwhite': (255, 250, 240), - 'forestgreen': (34, 139, 34), 'fuchsia': (255, 0, 255), 'gainsboro': (220, 220, 220), - 'ghostwhite': (248, 248, 255), 'gold': (255, 215, 0), 'goldenrod': (218, 165, 32), - 'gray': (128, 128, 128), 'grey': (128, 128, 128), 'green': (0, 128, 0), 'greenyellow': (173, 255, 47), - 'honeydew': (240, 255, 240), 'hotpink': (255, 105, 180), 'indianred': (205, 92, 92), - 'indigo': (75, 0, 130), 'ivory': (255, 255, 240), 'khaki': (240, 230, 140), 'lavender': (230, 230, 250), - 'lavenderblush': (255, 240, 245), 'lawngreen': (124, 252, 0), 'lemonchiffon': (255, 250, 205), - 'lightblue': (173, 216, 230), 'lightcoral': (240, 128, 128), 'lightcyan': (224, 255, 255), - 'lightgoldenrodyellow': (250, 250, 210), 'lightgray': (211, 211, 211), 'lightgrey': (211, 211, 211), - 'lightgreen': (144, 238, 144), 'lightpink': (255, 182, 193), 'lightsalmon': (255, 160, 122), - 'lightseagreen': (32, 178, 170), 'lightskyblue': (135, 206, 250), 'lightslategray': (119, 136, 153), - 'lightslategrey': (119, 136, 153), 'lightsteelblue': (176, 196, 222), 'lightyellow': (255, 255, 224), - 'lime': (0, 255, 0), 'limegreen': (50, 205, 50), 'linen': (250, 240, 230), 'magenta': (255, 0, 255), - 'maroon': (128, 0, 0), 'mediumaquamarine': (102, 205, 170), 'mediumblue': (0, 0, 205), - 'mediumorchid': (186, 85, 211), 'mediumpurple': (147, 112, 219), 'mediumseagreen': (60, 179, 113), - 'mediumslateblue': (123, 104, 238), 'mediumspringgreen': (0, 250, 154), - 'mediumturquoise': (72, 209, 204), 'mediumvioletred': (199, 21, 133), 'midnightblue': (25, 25, 112), - 'mintcream': (245, 255, 250), 'mistyrose': (255, 228, 225), 'moccasin': (255, 228, 181), - 'navajowhite': (255, 222, 173), 'navy': (0, 0, 128), 'navyblue': (0, 0, 128), - 'oldlace': (253, 245, 230), 'olive': (128, 128, 0), 'olivedrab': (107, 142, 35), - 'orange': (255, 165, 0), 'orangered': (255, 69, 0), 'orchid': (218, 112, 214), - 'palegoldenrod': (238, 232, 170), 'palegreen': (152, 251, 152), 'paleturquoise': (175, 238, 238), - 'palevioletred': (219, 112, 147), 'papayawhip': (255, 239, 213), 'peachpuff': (255, 218, 185), - 'peru': (205, 133, 63), 'pink': (255, 192, 203), 'plum': (221, 160, 221), 'powderblue': (176, 224, 230), - 'purple': (128, 0, 128), 'rebeccapurple': (102, 51, 153), 'red': (255, 0, 0), - 'rosybrown': (188, 143, 143), 'royalblue': (65, 105, 225), 'saddlebrown': (139, 69, 19), - 'salmon': (250, 128, 114), 'sandybrown': (244, 164, 96), 'seagreen': (46, 139, 87), - 'seashell': (255, 245, 238), 'sienna': (160, 82, 45), 'silver': (192, 192, 192), - 'skyblue': (135, 206, 235), 'slateblue': (106, 90, 205), 'slategray': (112, 128, 144), - 'slategrey': (112, 128, 144), 'snow': (255, 250, 250), 'springgreen': (0, 255, 127), - 'steelblue': (70, 130, 180), 'tan': (210, 180, 140), 'teal': (0, 128, 128), 'thistle': (216, 191, 216), - 'tomato': (255, 99, 71), 'turquoise': (64, 224, 208), 'violet': (238, 130, 238), - 'wheat': (245, 222, 179), 'white': (255, 255, 255), 'whitesmoke': (245, 245, 245), - 'yellow': (255, 255, 0), 'yellowgreen': (154, 205, 50) -} - -valid_locations = { # x, y in 90*90 - 'in the center': (45, 45), - 'on the left': (15, 45), - 'on the right': (75, 45), - 'on the top': (45, 15), - 'on the bottom': (45, 75), - 'on the top-left': (15, 15), - 'on the top-right': (75, 15), - 'on the bottom-left': (15, 75), - 'on the bottom-right': (75, 75) -} - -valid_offsets = { # x, y in 90*90 - 'no offset': (0, 0), - 'slightly to the left': (-10, 0), - 'slightly to the right': (10, 0), - 'slightly to the upper': (0, -10), - 'slightly to the lower': (0, 10), - 'slightly to the upper-left': (-10, -10), - 'slightly to the upper-right': (10, -10), - 'slightly to the lower-left': (-10, 10), - 'slightly to the lower-right': (10, 10)} - -valid_areas = { # w, h in 90*90 - "a small square area": (50, 50), - "a small vertical area": (40, 60), - "a small horizontal area": (60, 40), - "a medium-sized square area": (60, 60), - "a medium-sized vertical area": (50, 80), - "a medium-sized horizontal area": (80, 50), - "a large square area": (70, 70), - "a large vertical area": (60, 90), - "a large horizontal area": (90, 60) -} - -def safe_str(x): - return x.strip(',. ') + '.' - -def closest_name(input_str, options): - input_str = input_str.lower() - - closest_match = difflib.get_close_matches(input_str, list(options.keys()), n=1, cutoff=0.5) - assert isinstance(closest_match, list) and len(closest_match) > 0, f'The value [{input_str}] is not valid!' - result = closest_match[0] - - if result != input_str: - print(f'Automatically corrected [{input_str}] -> [{result}].') - - return result - -class Canvas: - @staticmethod - def from_bot_response(response: str): - - matched = re.search(r'```python\n(.*?)\n```', response, re.DOTALL) - assert matched, 'Response does not contain codes!' - code_content = matched.group(1) - assert 'canvas = Canvas()' in code_content, 'Code block must include valid canvas var!' - local_vars = {'Canvas': Canvas} - exec(code_content, {}, local_vars) - canvas = local_vars.get('canvas', None) - assert isinstance(canvas, Canvas), 'Code block must produce valid canvas var!' - return canvas - - def __init__(self): - self.components = [] - self.color = None - self.record_tags = True - self.prefixes = [] - self.suffixes = [] - return - - def set_global_description(self, description: str, detailed_descriptions: list, tags: str, - HTML_web_color_name: str): - assert isinstance(description, str), 'Global description is not valid!' - assert isinstance(detailed_descriptions, list) and all(isinstance(item, str) for item in detailed_descriptions), \ - 'Global detailed_descriptions is not valid!' - assert isinstance(tags, str), 'Global tags is not valid!' - - HTML_web_color_name = closest_name(HTML_web_color_name, valid_colors) - self.color = np.array([[valid_colors[HTML_web_color_name]]], dtype=np.uint8) - - self.prefixes = [description] - self.suffixes = detailed_descriptions - - if self.record_tags: - self.suffixes = self.suffixes + [tags] - - self.prefixes = [safe_str(x) for x in self.prefixes] - self.suffixes = [safe_str(x) for x in self.suffixes] - - return - - def add_local_description(self, location: str, offset: str, area: str, distance_to_viewer: float, description: str, - detailed_descriptions: list, tags: str, atmosphere: str, style: str, - quality_meta: str, HTML_web_color_name: str): - assert isinstance(description, str), 'Local description is wrong!' - assert isinstance(distance_to_viewer, (int, float)) and distance_to_viewer > 0, \ - f'The distance_to_viewer for [{description}] is not positive float number!' - assert isinstance(detailed_descriptions, list) and all(isinstance(item, str) for item in detailed_descriptions), \ - f'The detailed_descriptions for [{description}] is not valid!' - assert isinstance(tags, str), f'The tags for [{description}] is not valid!' - assert isinstance(atmosphere, str), f'The atmosphere for [{description}] is not valid!' - assert isinstance(style, str), f'The style for [{description}] is not valid!' - assert isinstance(quality_meta, str), f'The quality_meta for [{description}] is not valid!' - - location = closest_name(location, valid_locations) - offset = closest_name(offset, valid_offsets) - area = closest_name(area, valid_areas) - HTML_web_color_name = closest_name(HTML_web_color_name, valid_colors) - - xb, yb = valid_locations[location] - xo, yo = valid_offsets[offset] - w, h = valid_areas[area] - rect = (yb + yo - h // 2, yb + yo + h // 2, xb + xo - w // 2, xb + xo + w // 2) - rect = [max(0, min(90, i)) for i in rect] - color = np.array([[valid_colors[HTML_web_color_name]]], dtype=np.uint8) - - prefixes = self.prefixes + [description] - suffixes = detailed_descriptions - - if self.record_tags: - suffixes = suffixes + [tags, atmosphere, style, quality_meta] - - prefixes = [safe_str(x) for x in prefixes] - suffixes = [safe_str(x) for x in suffixes] - - self.components.append(dict( - rect=rect, - distance_to_viewer=distance_to_viewer, - color=color, - prefixes=prefixes, - suffixes=suffixes, - location=location, - )) - - return - - def process(self): - # sort components - self.components = sorted(self.components, key=lambda x: x['distance_to_viewer'], reverse=True) - - # compute initial latent - # print(self.color) - initial_latent = np.zeros(shape=(90, 90, 3), dtype=np.float32) + self.color - - for component in self.components: - a, b, c, d = component['rect'] - initial_latent[a:b, c:d] = 0.7 * component['color'] + 0.3 * initial_latent[a:b, c:d] - - initial_latent = initial_latent.clip(0, 255).astype(np.uint8) - - # compute conditions - - bag_of_conditions = [ - dict(mask=np.ones(shape=(90, 90), dtype=np.float32), prefixes=self.prefixes, suffixes=self.suffixes,location= "full") - ] - - for i, component in enumerate(self.components): - a, b, c, d = component['rect'] - m = np.zeros(shape=(90, 90), dtype=np.float32) - m[a:b, c:d] = 1.0 - bag_of_conditions.append(dict( - mask = m, - prefixes = component['prefixes'], - suffixes = component['suffixes'], - location = component['location'], - )) - - return dict( - initial_latent = initial_latent, - bag_of_conditions = bag_of_conditions, - ) - - -class OmostPromter(torch.nn.Module): - - def __init__(self,model = None,tokenizer = None, template = "",device="cpu"): - super().__init__() - self.model=model - self.tokenizer = tokenizer - self.device = device - if template == "": - template = r'''You are a helpful AI assistant to compose images using the below python class `Canvas`: - ```python - class Canvas: - def set_global_description(self, description: str, detailed_descriptions: list[str], tags: str, HTML_web_color_name: str): - pass - - def add_local_description(self, location: str, offset: str, area: str, distance_to_viewer: float, description: str, detailed_descriptions: list[str], tags: str, atmosphere: str, style: str, quality_meta: str, HTML_web_color_name: str): - assert location in ["in the center", "on the left", "on the right", "on the top", "on the bottom", "on the top-left", "on the top-right", "on the bottom-left", "on the bottom-right"] - assert offset in ["no offset", "slightly to the left", "slightly to the right", "slightly to the upper", "slightly to the lower", "slightly to the upper-left", "slightly to the upper-right", "slightly to the lower-left", "slightly to the lower-right"] - assert area in ["a small square area", "a small vertical area", "a small horizontal area", "a medium-sized square area", "a medium-sized vertical area", "a medium-sized horizontal area", "a large square area", "a large vertical area", "a large horizontal area"] - assert distance_to_viewer > 0 - pass - ```''' - self.template = template - - @staticmethod - def from_model_manager(model_manager: ModelManager): - model, model_path = model_manager.fetch_model("omost_prompt", require_model_path=True) - tokenizer = AutoTokenizer.from_pretrained(model_path) - omost = OmostPromter( - model= model, - tokenizer = tokenizer, - device = model_manager.device - ) - return omost - - - def __call__(self,prompt_dict:dict): - raw_prompt=prompt_dict["prompt"] - conversation = [{"role": "system", "content": self.template}] - conversation.append({"role": "user", "content": raw_prompt}) - - input_ids = self.tokenizer.apply_chat_template(conversation, return_tensors="pt", add_generation_prompt=True).to(self.device) - streamer = TextIteratorStreamer(self.tokenizer, timeout=10.0, skip_prompt=True, skip_special_tokens=True) - attention_mask = torch.ones(input_ids.shape, dtype=torch.bfloat16, device=self.device) - - generate_kwargs = dict( - input_ids = input_ids, - streamer = streamer, - # stopping_criteria=stopping_criteria, - # max_new_tokens=max_new_tokens, - do_sample = True, - attention_mask = attention_mask, - pad_token_id = self.tokenizer.eos_token_id, - # temperature=temperature, - # top_p=top_p, - ) - self.model.generate(**generate_kwargs) - outputs = [] - for text in streamer: - outputs.append(text) - llm_outputs = "".join(outputs) - - canvas = Canvas.from_bot_response(llm_outputs) - canvas_output = canvas.process() - - prompts = [" ".join(_["prefixes"]+_["suffixes"][:2]) for _ in canvas_output["bag_of_conditions"]] - canvas_output["prompt"] = prompts[0] - canvas_output["prompts"] = prompts[1:] - - raw_masks = [_["mask"] for _ in canvas_output["bag_of_conditions"]] - masks=[] - for mask in raw_masks: - mask[mask>0.5]=255 - mask = np.stack([mask] * 3, axis=-1).astype("uint8") - masks.append(Image.fromarray(mask)) - - canvas_output["masks"] = masks - prompt_dict.update(canvas_output) - print(f"Your prompt is extended by Omost:\n") - cnt = 0 - for component,pmt in zip(canvas_output["bag_of_conditions"],prompts): - loc = component["location"] - cnt += 1 - print(f"Component {cnt} - Location : {loc}\nPrompt:{pmt}\n") - - return prompt_dict - - - - \ No newline at end of file diff --git a/diffsynth/prompters/prompt_refiners.py b/diffsynth/prompters/prompt_refiners.py deleted file mode 100644 index 0ac19f5..0000000 --- a/diffsynth/prompters/prompt_refiners.py +++ /dev/null @@ -1,130 +0,0 @@ -from transformers import AutoTokenizer -from ..models.model_manager import ModelManager -import torch -from .omost import OmostPromter - -class BeautifulPrompt(torch.nn.Module): - def __init__(self, tokenizer_path=None, model=None, template=""): - super().__init__() - self.tokenizer = AutoTokenizer.from_pretrained(tokenizer_path) - self.model = model - self.template = template - - - @staticmethod - def from_model_manager(model_manager: ModelManager): - model, model_path = model_manager.fetch_model("beautiful_prompt", require_model_path=True) - template = 'Instruction: Give a simple description of the image to generate a drawing prompt.\nInput: {raw_prompt}\nOutput:' - if model_path.endswith("v2"): - template = """Converts a simple image description into a prompt. \ -Prompts are formatted as multiple related tags separated by commas, plus you can use () to increase the weight, [] to decrease the weight, \ -or use a number to specify the weight. You should add appropriate words to make the images described in the prompt more aesthetically pleasing, \ -but make sure there is a correlation between the input and output.\n\ -### Input: {raw_prompt}\n### Output:""" - beautiful_prompt = BeautifulPrompt( - tokenizer_path=model_path, - model=model, - template=template - ) - return beautiful_prompt - - - def __call__(self, raw_prompt, positive=True, **kwargs): - if positive: - model_input = self.template.format(raw_prompt=raw_prompt) - input_ids = self.tokenizer.encode(model_input, return_tensors='pt').to(self.model.device) - outputs = self.model.generate( - input_ids, - max_new_tokens=384, - do_sample=True, - temperature=0.9, - top_k=50, - top_p=0.95, - repetition_penalty=1.1, - num_return_sequences=1 - ) - prompt = raw_prompt + ", " + self.tokenizer.batch_decode( - outputs[:, input_ids.size(1):], - skip_special_tokens=True - )[0].strip() - print(f"Your prompt is refined by BeautifulPrompt: {prompt}") - return prompt - else: - return raw_prompt - - - -class QwenPrompt(torch.nn.Module): - # This class leverages the open-source Qwen model to translate Chinese prompts into English, - # with an integrated optimization mechanism for enhanced translation quality. - def __init__(self, tokenizer_path=None, model=None, system_prompt=""): - super().__init__() - self.tokenizer = AutoTokenizer.from_pretrained(tokenizer_path) - self.model = model - self.system_prompt = system_prompt - - - @staticmethod - def from_model_manager(model_nameger: ModelManager): - model, model_path = model_nameger.fetch_model("qwen_prompt", require_model_path=True) - system_prompt = """You are an English image describer. Here are some example image styles:\n\n1. Extreme close-up: Clear focus on a single object with a blurred background, highlighted under natural sunlight.\n2. Vintage: A photograph of a historical scene, using techniques such as Daguerreotype or cyanotype.\n3. Anime: A stylized cartoon image, emphasizing hyper-realistic portraits and luminous brushwork.\n4. Candid: A natural, unposed shot capturing spontaneous moments, often with cinematic qualities.\n5. Landscape: A photorealistic image of natural scenery, such as a sunrise over the sea.\n6. Design: Colorful and detailed illustrations, often in the style of 2D game art or botanical illustrations.\n7. Urban: An ultrarealistic scene in a modern setting, possibly a cityscape viewed from indoors.\n\nYour task is to translate a given Chinese image description into a concise and precise English description. Ensure that the imagery is vivid and descriptive, and include stylistic elements to enrich the description.\nPlease note the following points:\n\n1. Capture the essence and mood of the Chinese description without including direct phrases or words from the examples provided.\n2. You should add appropriate words to make the images described in the prompt more aesthetically pleasing. If the Chinese description does not specify a style, you need to add some stylistic descriptions based on the essence of the Chinese text.\n3. The generated English description should not exceed 200 words.\n\n""" - qwen_prompt = QwenPrompt( - tokenizer_path=model_path, - model=model, - system_prompt=system_prompt - ) - return qwen_prompt - - - def __call__(self, raw_prompt, positive=True, **kwargs): - if positive: - messages = [{ - 'role': 'system', - 'content': self.system_prompt - }, { - 'role': 'user', - 'content': raw_prompt - }] - text = self.tokenizer.apply_chat_template( - messages, - tokenize=False, - add_generation_prompt=True - ) - model_inputs = self.tokenizer([text], return_tensors="pt").to(self.model.device) - - generated_ids = self.model.generate( - model_inputs.input_ids, - max_new_tokens=512 - ) - generated_ids = [ - output_ids[len(input_ids):] for input_ids, output_ids in zip(model_inputs.input_ids, generated_ids) - ] - - prompt = self.tokenizer.batch_decode(generated_ids, skip_special_tokens=True)[0] - print(f"Your prompt is refined by Qwen: {prompt}") - return prompt - else: - return raw_prompt - - - -class Translator(torch.nn.Module): - def __init__(self, tokenizer_path=None, model=None): - super().__init__() - self.tokenizer = AutoTokenizer.from_pretrained(tokenizer_path) - self.model = model - - - @staticmethod - def from_model_manager(model_manager: ModelManager): - model, model_path = model_manager.fetch_model("translator", require_model_path=True) - translator = Translator(tokenizer_path=model_path, model=model) - return translator - - - def __call__(self, prompt, **kwargs): - input_ids = self.tokenizer.encode(prompt, return_tensors='pt').to(self.model.device) - output_ids = self.model.generate(input_ids) - prompt = self.tokenizer.batch_decode(output_ids, skip_special_tokens=True)[0] - print(f"Your prompt is translated: {prompt}") - return prompt diff --git a/diffsynth/prompters/sd3_prompter.py b/diffsynth/prompters/sd3_prompter.py deleted file mode 100644 index ecf9bca..0000000 --- a/diffsynth/prompters/sd3_prompter.py +++ /dev/null @@ -1,93 +0,0 @@ -from .base_prompter import BasePrompter -from ..models.model_manager import ModelManager -from ..models import SD3TextEncoder1, SD3TextEncoder2, SD3TextEncoder3 -from transformers import CLIPTokenizer, T5TokenizerFast -import os, torch - - -class SD3Prompter(BasePrompter): - def __init__( - self, - tokenizer_1_path=None, - tokenizer_2_path=None, - tokenizer_3_path=None - ): - if tokenizer_1_path is None: - base_path = os.path.dirname(os.path.dirname(__file__)) - tokenizer_1_path = os.path.join(base_path, "tokenizer_configs/stable_diffusion_3/tokenizer_1") - if tokenizer_2_path is None: - base_path = os.path.dirname(os.path.dirname(__file__)) - tokenizer_2_path = os.path.join(base_path, "tokenizer_configs/stable_diffusion_3/tokenizer_2") - if tokenizer_3_path is None: - base_path = os.path.dirname(os.path.dirname(__file__)) - tokenizer_3_path = os.path.join(base_path, "tokenizer_configs/stable_diffusion_3/tokenizer_3") - super().__init__() - self.tokenizer_1 = CLIPTokenizer.from_pretrained(tokenizer_1_path) - self.tokenizer_2 = CLIPTokenizer.from_pretrained(tokenizer_2_path) - self.tokenizer_3 = T5TokenizerFast.from_pretrained(tokenizer_3_path) - self.text_encoder_1: SD3TextEncoder1 = None - self.text_encoder_2: SD3TextEncoder2 = None - self.text_encoder_3: SD3TextEncoder3 = None - - - def fetch_models(self, text_encoder_1: SD3TextEncoder1 = None, text_encoder_2: SD3TextEncoder2 = None, text_encoder_3: SD3TextEncoder3 = None): - self.text_encoder_1 = text_encoder_1 - self.text_encoder_2 = text_encoder_2 - self.text_encoder_3 = text_encoder_3 - - - def encode_prompt_using_clip(self, prompt, text_encoder, tokenizer, max_length, device): - input_ids = tokenizer( - prompt, - return_tensors="pt", - padding="max_length", - max_length=max_length, - truncation=True - ).input_ids.to(device) - pooled_prompt_emb, prompt_emb = text_encoder(input_ids) - return pooled_prompt_emb, prompt_emb - - - def encode_prompt_using_t5(self, prompt, text_encoder, tokenizer, max_length, device): - input_ids = tokenizer( - prompt, - return_tensors="pt", - padding="max_length", - max_length=max_length, - truncation=True, - add_special_tokens=True, - ).input_ids.to(device) - prompt_emb = text_encoder(input_ids) - prompt_emb = prompt_emb.reshape((1, prompt_emb.shape[0]*prompt_emb.shape[1], -1)) - - return prompt_emb - - - def encode_prompt( - self, - prompt, - positive=True, - device="cuda", - t5_sequence_length=77, - ): - prompt = self.process_prompt(prompt, positive=positive) - - # CLIP - pooled_prompt_emb_1, prompt_emb_1 = self.encode_prompt_using_clip(prompt, self.text_encoder_1, self.tokenizer_1, 77, device) - pooled_prompt_emb_2, prompt_emb_2 = self.encode_prompt_using_clip(prompt, self.text_encoder_2, self.tokenizer_2, 77, device) - - # T5 - if self.text_encoder_3 is None: - prompt_emb_3 = torch.zeros((prompt_emb_1.shape[0], t5_sequence_length, 4096), dtype=prompt_emb_1.dtype, device=device) - else: - prompt_emb_3 = self.encode_prompt_using_t5(prompt, self.text_encoder_3, self.tokenizer_3, t5_sequence_length, device) - prompt_emb_3 = prompt_emb_3.to(prompt_emb_1.dtype) # float32 -> float16 - - # Merge - prompt_emb = torch.cat([ - torch.nn.functional.pad(torch.cat([prompt_emb_1, prompt_emb_2], dim=-1), (0, 4096 - 768 - 1280)), - prompt_emb_3 - ], dim=-2) - pooled_prompt_emb = torch.cat([pooled_prompt_emb_1, pooled_prompt_emb_2], dim=-1) - - return prompt_emb, pooled_prompt_emb diff --git a/diffsynth/prompters/sd_prompter.py b/diffsynth/prompters/sd_prompter.py deleted file mode 100644 index e3b31ea..0000000 --- a/diffsynth/prompters/sd_prompter.py +++ /dev/null @@ -1,73 +0,0 @@ -from .base_prompter import BasePrompter, tokenize_long_prompt -from ..models.utils import load_state_dict, search_for_embeddings -from ..models import SDTextEncoder -from transformers import CLIPTokenizer -import torch, os - - - -class SDPrompter(BasePrompter): - def __init__(self, tokenizer_path=None): - if tokenizer_path is None: - base_path = os.path.dirname(os.path.dirname(__file__)) - tokenizer_path = os.path.join(base_path, "tokenizer_configs/stable_diffusion/tokenizer") - super().__init__() - self.tokenizer = CLIPTokenizer.from_pretrained(tokenizer_path) - self.text_encoder: SDTextEncoder = None - self.textual_inversion_dict = {} - self.keyword_dict = {} - - - def fetch_models(self, text_encoder: SDTextEncoder = None): - self.text_encoder = text_encoder - - - def add_textual_inversions_to_model(self, textual_inversion_dict, text_encoder): - dtype = next(iter(text_encoder.parameters())).dtype - state_dict = text_encoder.token_embedding.state_dict() - token_embeddings = [state_dict["weight"]] - for keyword in textual_inversion_dict: - _, embeddings = textual_inversion_dict[keyword] - token_embeddings.append(embeddings.to(dtype=dtype, device=token_embeddings[0].device)) - token_embeddings = torch.concat(token_embeddings, dim=0) - state_dict["weight"] = token_embeddings - text_encoder.token_embedding = torch.nn.Embedding(token_embeddings.shape[0], token_embeddings.shape[1]) - text_encoder.token_embedding = text_encoder.token_embedding.to(dtype=dtype, device=token_embeddings[0].device) - text_encoder.token_embedding.load_state_dict(state_dict) - - - def add_textual_inversions_to_tokenizer(self, textual_inversion_dict, tokenizer): - additional_tokens = [] - for keyword in textual_inversion_dict: - tokens, _ = textual_inversion_dict[keyword] - additional_tokens += tokens - self.keyword_dict[keyword] = " " + " ".join(tokens) + " " - tokenizer.add_tokens(additional_tokens) - - - def load_textual_inversions(self, model_paths): - for model_path in model_paths: - keyword = os.path.splitext(os.path.split(model_path)[-1])[0] - state_dict = load_state_dict(model_path) - - # Search for embeddings - for embeddings in search_for_embeddings(state_dict): - if len(embeddings.shape) == 2 and embeddings.shape[1] == 768: - tokens = [f"{keyword}_{i}" for i in range(embeddings.shape[0])] - self.textual_inversion_dict[keyword] = (tokens, embeddings) - - self.add_textual_inversions_to_model(self.textual_inversion_dict, self.text_encoder) - self.add_textual_inversions_to_tokenizer(self.textual_inversion_dict, self.tokenizer) - - - def encode_prompt(self, prompt, clip_skip=1, device="cuda", positive=True): - prompt = self.process_prompt(prompt, positive=positive) - for keyword in self.keyword_dict: - if keyword in prompt: - print(f"Textual inversion {keyword} is enabled.") - prompt = prompt.replace(keyword, self.keyword_dict[keyword]) - input_ids = tokenize_long_prompt(self.tokenizer, prompt).to(device) - prompt_emb = self.text_encoder(input_ids, clip_skip=clip_skip) - prompt_emb = prompt_emb.reshape((1, prompt_emb.shape[0]*prompt_emb.shape[1], -1)) - - return prompt_emb \ No newline at end of file diff --git a/diffsynth/prompters/sdxl_prompter.py b/diffsynth/prompters/sdxl_prompter.py deleted file mode 100644 index d841454..0000000 --- a/diffsynth/prompters/sdxl_prompter.py +++ /dev/null @@ -1,61 +0,0 @@ -from .base_prompter import BasePrompter, tokenize_long_prompt -from ..models.model_manager import ModelManager -from ..models import SDXLTextEncoder, SDXLTextEncoder2 -from transformers import CLIPTokenizer -import torch, os - - - -class SDXLPrompter(BasePrompter): - def __init__( - self, - tokenizer_path=None, - tokenizer_2_path=None - ): - if tokenizer_path is None: - base_path = os.path.dirname(os.path.dirname(__file__)) - tokenizer_path = os.path.join(base_path, "tokenizer_configs/stable_diffusion/tokenizer") - if tokenizer_2_path is None: - base_path = os.path.dirname(os.path.dirname(__file__)) - tokenizer_2_path = os.path.join(base_path, "tokenizer_configs/stable_diffusion_xl/tokenizer_2") - super().__init__() - self.tokenizer = CLIPTokenizer.from_pretrained(tokenizer_path) - self.tokenizer_2 = CLIPTokenizer.from_pretrained(tokenizer_2_path) - self.text_encoder: SDXLTextEncoder = None - self.text_encoder_2: SDXLTextEncoder2 = None - - - def fetch_models(self, text_encoder: SDXLTextEncoder = None, text_encoder_2: SDXLTextEncoder2 = None): - self.text_encoder = text_encoder - self.text_encoder_2 = text_encoder_2 - - - def encode_prompt( - self, - prompt, - clip_skip=1, - clip_skip_2=2, - positive=True, - device="cuda" - ): - prompt = self.process_prompt(prompt, positive=positive) - - # 1 - input_ids = tokenize_long_prompt(self.tokenizer, prompt).to(device) - prompt_emb_1 = self.text_encoder(input_ids, clip_skip=clip_skip) - - # 2 - input_ids_2 = tokenize_long_prompt(self.tokenizer_2, prompt).to(device) - add_text_embeds, prompt_emb_2 = self.text_encoder_2(input_ids_2, clip_skip=clip_skip_2) - - # Merge - if prompt_emb_1.shape[0] != prompt_emb_2.shape[0]: - max_batch_size = min(prompt_emb_1.shape[0], prompt_emb_2.shape[0]) - prompt_emb_1 = prompt_emb_1[: max_batch_size] - prompt_emb_2 = prompt_emb_2[: max_batch_size] - prompt_emb = torch.concatenate([prompt_emb_1, prompt_emb_2], dim=-1) - - # For very long prompt, we only use the first 77 tokens to compute `add_text_embeds`. - add_text_embeds = add_text_embeds[0:1] - prompt_emb = prompt_emb.reshape((1, prompt_emb.shape[0]*prompt_emb.shape[1], -1)) - return add_text_embeds, prompt_emb diff --git a/diffsynth/prompters/stepvideo_prompter.py b/diffsynth/prompters/stepvideo_prompter.py deleted file mode 100644 index 79d374b..0000000 --- a/diffsynth/prompters/stepvideo_prompter.py +++ /dev/null @@ -1,56 +0,0 @@ -from .base_prompter import BasePrompter -from ..models.hunyuan_dit_text_encoder import HunyuanDiTCLIPTextEncoder -from ..models.stepvideo_text_encoder import STEP1TextEncoder -from transformers import BertTokenizer -import os, torch - - -class StepVideoPrompter(BasePrompter): - - def __init__( - self, - tokenizer_1_path=None, - ): - if tokenizer_1_path is None: - base_path = os.path.dirname(os.path.dirname(__file__)) - tokenizer_1_path = os.path.join( - base_path, "tokenizer_configs/hunyuan_dit/tokenizer") - super().__init__() - self.tokenizer_1 = BertTokenizer.from_pretrained(tokenizer_1_path) - - def fetch_models(self, text_encoder_1: HunyuanDiTCLIPTextEncoder = None, text_encoder_2: STEP1TextEncoder = None): - self.text_encoder_1 = text_encoder_1 - self.text_encoder_2 = text_encoder_2 - - def encode_prompt_using_clip(self, prompt, max_length, device): - text_inputs = self.tokenizer_1( - prompt, - padding="max_length", - max_length=max_length, - truncation=True, - return_attention_mask=True, - return_tensors="pt", - ) - prompt_embeds = self.text_encoder_1( - text_inputs.input_ids.to(device), - attention_mask=text_inputs.attention_mask.to(device), - ) - return prompt_embeds - - def encode_prompt_using_llm(self, prompt, max_length, device): - y, y_mask = self.text_encoder_2(prompt, max_length=max_length, device=device) - return y, y_mask - - def encode_prompt(self, - prompt, - positive=True, - device="cuda"): - - prompt = self.process_prompt(prompt, positive=positive) - - clip_embeds = self.encode_prompt_using_clip(prompt, max_length=77, device=device) - llm_embeds, llm_mask = self.encode_prompt_using_llm(prompt, max_length=320, device=device) - - llm_mask = torch.nn.functional.pad(llm_mask, (clip_embeds.shape[1], 0), value=1) - - return clip_embeds, llm_embeds, llm_mask diff --git a/diffsynth/prompters/wan_prompter.py b/diffsynth/prompters/wan_prompter.py deleted file mode 100644 index 01a765d..0000000 --- a/diffsynth/prompters/wan_prompter.py +++ /dev/null @@ -1,109 +0,0 @@ -from .base_prompter import BasePrompter -from ..models.wan_video_text_encoder import WanTextEncoder -from transformers import AutoTokenizer -import os, torch -import ftfy -import html -import string -import regex as re - - -def basic_clean(text): - text = ftfy.fix_text(text) - text = html.unescape(html.unescape(text)) - return text.strip() - - -def whitespace_clean(text): - text = re.sub(r'\s+', ' ', text) - text = text.strip() - return text - - -def canonicalize(text, keep_punctuation_exact_string=None): - text = text.replace('_', ' ') - if keep_punctuation_exact_string: - text = keep_punctuation_exact_string.join( - part.translate(str.maketrans('', '', string.punctuation)) - for part in text.split(keep_punctuation_exact_string)) - else: - text = text.translate(str.maketrans('', '', string.punctuation)) - text = text.lower() - text = re.sub(r'\s+', ' ', text) - return text.strip() - - -class HuggingfaceTokenizer: - - def __init__(self, name, seq_len=None, clean=None, **kwargs): - assert clean in (None, 'whitespace', 'lower', 'canonicalize') - self.name = name - self.seq_len = seq_len - self.clean = clean - - # init tokenizer - self.tokenizer = AutoTokenizer.from_pretrained(name, **kwargs) - self.vocab_size = self.tokenizer.vocab_size - - def __call__(self, sequence, **kwargs): - return_mask = kwargs.pop('return_mask', False) - - # arguments - _kwargs = {'return_tensors': 'pt'} - if self.seq_len is not None: - _kwargs.update({ - 'padding': 'max_length', - 'truncation': True, - 'max_length': self.seq_len - }) - _kwargs.update(**kwargs) - - # tokenization - if isinstance(sequence, str): - sequence = [sequence] - if self.clean: - sequence = [self._clean(u) for u in sequence] - ids = self.tokenizer(sequence, **_kwargs) - - # output - if return_mask: - return ids.input_ids, ids.attention_mask - else: - return ids.input_ids - - def _clean(self, text): - if self.clean == 'whitespace': - text = whitespace_clean(basic_clean(text)) - elif self.clean == 'lower': - text = whitespace_clean(basic_clean(text)).lower() - elif self.clean == 'canonicalize': - text = canonicalize(basic_clean(text)) - return text - - -class WanPrompter(BasePrompter): - - def __init__(self, tokenizer_path=None, text_len=512): - super().__init__() - self.text_len = text_len - self.text_encoder = None - self.fetch_tokenizer(tokenizer_path) - - def fetch_tokenizer(self, tokenizer_path=None): - if tokenizer_path is not None: - self.tokenizer = HuggingfaceTokenizer(name=tokenizer_path, seq_len=self.text_len, clean='whitespace') - - def fetch_models(self, text_encoder: WanTextEncoder = None): - self.text_encoder = text_encoder - - def encode_prompt(self, prompt, positive=True, device="cuda"): - prompt = self.process_prompt(prompt, positive=positive) - - ids, mask = self.tokenizer(prompt, return_mask=True, add_special_tokens=True) - ids = ids.to(device) - mask = mask.to(device) - seq_lens = mask.gt(0).sum(dim=1).long() - prompt_emb = self.text_encoder(ids, mask) - for i, v in enumerate(seq_lens): - prompt_emb[:, v:] = 0 - return prompt_emb diff --git a/diffsynth/schedulers/__init__.py b/diffsynth/schedulers/__init__.py deleted file mode 100644 index 0ec4325..0000000 --- a/diffsynth/schedulers/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .ddim import EnhancedDDIMScheduler -from .continuous_ode import ContinuousODEScheduler -from .flow_match import FlowMatchScheduler diff --git a/diffsynth/schedulers/continuous_ode.py b/diffsynth/schedulers/continuous_ode.py deleted file mode 100644 index c73b9e2..0000000 --- a/diffsynth/schedulers/continuous_ode.py +++ /dev/null @@ -1,59 +0,0 @@ -import torch - - -class ContinuousODEScheduler(): - - def __init__(self, num_inference_steps=100, sigma_max=700.0, sigma_min=0.002, rho=7.0): - self.sigma_max = sigma_max - self.sigma_min = sigma_min - self.rho = rho - self.set_timesteps(num_inference_steps) - - - def set_timesteps(self, num_inference_steps=100, denoising_strength=1.0, **kwargs): - ramp = torch.linspace(1-denoising_strength, 1, num_inference_steps) - min_inv_rho = torch.pow(torch.tensor((self.sigma_min,)), (1 / self.rho)) - max_inv_rho = torch.pow(torch.tensor((self.sigma_max,)), (1 / self.rho)) - self.sigmas = torch.pow(max_inv_rho + ramp * (min_inv_rho - max_inv_rho), self.rho) - self.timesteps = torch.log(self.sigmas) * 0.25 - - - def step(self, model_output, timestep, sample, to_final=False): - timestep_id = torch.argmin((self.timesteps - timestep).abs()) - sigma = self.sigmas[timestep_id] - sample *= (sigma*sigma + 1).sqrt() - estimated_sample = -sigma / (sigma*sigma + 1).sqrt() * model_output + 1 / (sigma*sigma + 1) * sample - if to_final or timestep_id + 1 >= len(self.timesteps): - prev_sample = estimated_sample - else: - sigma_ = self.sigmas[timestep_id + 1] - derivative = 1 / sigma * (sample - estimated_sample) - prev_sample = sample + derivative * (sigma_ - sigma) - prev_sample /= (sigma_*sigma_ + 1).sqrt() - return prev_sample - - - def return_to_timestep(self, timestep, sample, sample_stablized): - # This scheduler doesn't support this function. - pass - - - def add_noise(self, original_samples, noise, timestep): - timestep_id = torch.argmin((self.timesteps - timestep).abs()) - sigma = self.sigmas[timestep_id] - sample = (original_samples + noise * sigma) / (sigma*sigma + 1).sqrt() - return sample - - - def training_target(self, sample, noise, timestep): - timestep_id = torch.argmin((self.timesteps - timestep).abs()) - sigma = self.sigmas[timestep_id] - target = (-(sigma*sigma + 1).sqrt() / sigma + 1 / (sigma*sigma + 1).sqrt() / sigma) * sample + 1 / (sigma*sigma + 1).sqrt() * noise - return target - - - def training_weight(self, timestep): - timestep_id = torch.argmin((self.timesteps - timestep).abs()) - sigma = self.sigmas[timestep_id] - weight = (1 + sigma*sigma).sqrt() / sigma - return weight diff --git a/diffsynth/schedulers/ddim.py b/diffsynth/schedulers/ddim.py deleted file mode 100644 index da52496..0000000 --- a/diffsynth/schedulers/ddim.py +++ /dev/null @@ -1,105 +0,0 @@ -import torch, math - - -class EnhancedDDIMScheduler(): - - def __init__(self, num_train_timesteps=1000, beta_start=0.00085, beta_end=0.012, beta_schedule="scaled_linear", prediction_type="epsilon", rescale_zero_terminal_snr=False): - self.num_train_timesteps = num_train_timesteps - if beta_schedule == "scaled_linear": - betas = torch.square(torch.linspace(math.sqrt(beta_start), math.sqrt(beta_end), num_train_timesteps, dtype=torch.float32)) - elif beta_schedule == "linear": - betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32) - else: - raise NotImplementedError(f"{beta_schedule} is not implemented") - self.alphas_cumprod = torch.cumprod(1.0 - betas, dim=0) - if rescale_zero_terminal_snr: - self.alphas_cumprod = self.rescale_zero_terminal_snr(self.alphas_cumprod) - self.alphas_cumprod = self.alphas_cumprod.tolist() - self.set_timesteps(10) - self.prediction_type = prediction_type - - - def rescale_zero_terminal_snr(self, alphas_cumprod): - alphas_bar_sqrt = alphas_cumprod.sqrt() - - # Store old values. - alphas_bar_sqrt_0 = alphas_bar_sqrt[0].clone() - alphas_bar_sqrt_T = alphas_bar_sqrt[-1].clone() - - # Shift so the last timestep is zero. - alphas_bar_sqrt -= alphas_bar_sqrt_T - - # Scale so the first timestep is back to the old value. - alphas_bar_sqrt *= alphas_bar_sqrt_0 / (alphas_bar_sqrt_0 - alphas_bar_sqrt_T) - - # Convert alphas_bar_sqrt to betas - alphas_bar = alphas_bar_sqrt.square() # Revert sqrt - - return alphas_bar - - - def set_timesteps(self, num_inference_steps, denoising_strength=1.0, **kwargs): - # The timesteps are aligned to 999...0, which is different from other implementations, - # but I think this implementation is more reasonable in theory. - max_timestep = max(round(self.num_train_timesteps * denoising_strength) - 1, 0) - num_inference_steps = min(num_inference_steps, max_timestep + 1) - if num_inference_steps == 1: - self.timesteps = torch.Tensor([max_timestep]) - else: - step_length = max_timestep / (num_inference_steps - 1) - self.timesteps = torch.Tensor([round(max_timestep - i*step_length) for i in range(num_inference_steps)]) - - - def denoise(self, model_output, sample, alpha_prod_t, alpha_prod_t_prev): - if self.prediction_type == "epsilon": - weight_e = math.sqrt(1 - alpha_prod_t_prev) - math.sqrt(alpha_prod_t_prev * (1 - alpha_prod_t) / alpha_prod_t) - weight_x = math.sqrt(alpha_prod_t_prev / alpha_prod_t) - prev_sample = sample * weight_x + model_output * weight_e - elif self.prediction_type == "v_prediction": - weight_e = -math.sqrt(alpha_prod_t_prev * (1 - alpha_prod_t)) + math.sqrt(alpha_prod_t * (1 - alpha_prod_t_prev)) - weight_x = math.sqrt(alpha_prod_t * alpha_prod_t_prev) + math.sqrt((1 - alpha_prod_t) * (1 - alpha_prod_t_prev)) - prev_sample = sample * weight_x + model_output * weight_e - else: - raise NotImplementedError(f"{self.prediction_type} is not implemented") - return prev_sample - - - def step(self, model_output, timestep, sample, to_final=False): - alpha_prod_t = self.alphas_cumprod[int(timestep.flatten().tolist()[0])] - if isinstance(timestep, torch.Tensor): - timestep = timestep.cpu() - timestep_id = torch.argmin((self.timesteps - timestep).abs()) - if to_final or timestep_id + 1 >= len(self.timesteps): - alpha_prod_t_prev = 1.0 - else: - timestep_prev = int(self.timesteps[timestep_id + 1]) - alpha_prod_t_prev = self.alphas_cumprod[timestep_prev] - - return self.denoise(model_output, sample, alpha_prod_t, alpha_prod_t_prev) - - - def return_to_timestep(self, timestep, sample, sample_stablized): - alpha_prod_t = self.alphas_cumprod[int(timestep.flatten().tolist()[0])] - noise_pred = (sample - math.sqrt(alpha_prod_t) * sample_stablized) / math.sqrt(1 - alpha_prod_t) - return noise_pred - - - def add_noise(self, original_samples, noise, timestep): - sqrt_alpha_prod = math.sqrt(self.alphas_cumprod[int(timestep.flatten().tolist()[0])]) - sqrt_one_minus_alpha_prod = math.sqrt(1 - self.alphas_cumprod[int(timestep.flatten().tolist()[0])]) - noisy_samples = sqrt_alpha_prod * original_samples + sqrt_one_minus_alpha_prod * noise - return noisy_samples - - - def training_target(self, sample, noise, timestep): - if self.prediction_type == "epsilon": - return noise - else: - sqrt_alpha_prod = math.sqrt(self.alphas_cumprod[int(timestep.flatten().tolist()[0])]) - sqrt_one_minus_alpha_prod = math.sqrt(1 - self.alphas_cumprod[int(timestep.flatten().tolist()[0])]) - target = sqrt_alpha_prod * noise - sqrt_one_minus_alpha_prod * sample - return target - - - def training_weight(self, timestep): - return 1.0 diff --git a/diffsynth/tokenizer_configs/cog/tokenizer/added_tokens.json b/diffsynth/tokenizer_configs/cog/tokenizer/added_tokens.json deleted file mode 100644 index 3f51320..0000000 --- a/diffsynth/tokenizer_configs/cog/tokenizer/added_tokens.json +++ /dev/null @@ -1,102 +0,0 @@ -{ - "": 32099, - "": 32089, - "": 32088, - "": 32087, - "": 32086, - "": 32085, - "": 32084, - "": 32083, - "": 32082, - "": 32081, - "": 32080, - "": 32098, - "": 32079, - "": 32078, - "": 32077, - "": 32076, - "": 32075, - "": 32074, - "": 32073, - "": 32072, - "": 32071, - "": 32070, - "": 32097, - "": 32069, - "": 32068, - "": 32067, - "": 32066, - "": 32065, - "": 32064, - "": 32063, - "": 32062, - "": 32061, - "": 32060, - "": 32096, - "": 32059, - "": 32058, - "": 32057, - "": 32056, - "": 32055, - "": 32054, - "": 32053, - "": 32052, - "": 32051, - "": 32050, - "": 32095, - "": 32049, - "": 32048, - "": 32047, - "": 32046, - "": 32045, - "": 32044, - "": 32043, - "": 32042, - "": 32041, - "": 32040, - "": 32094, - "": 32039, - "": 32038, - "": 32037, - "": 32036, - "": 32035, - "": 32034, - "": 32033, - "": 32032, - "": 32031, - "": 32030, - "": 32093, - "": 32029, - "": 32028, - "": 32027, - "": 32026, - "": 32025, - "": 32024, - "": 32023, - "": 32022, - "": 32021, - "": 32020, - "": 32092, - "": 32019, - "": 32018, - "": 32017, - "": 32016, - "": 32015, - "": 32014, - "": 32013, - "": 32012, - "": 32011, - "": 32010, - "": 32091, - "": 32009, - "": 32008, - "": 32007, - "": 32006, - "": 32005, - "": 32004, - "": 32003, - "": 32002, - "": 32001, - "": 32000, - "": 32090 -} diff --git a/diffsynth/tokenizer_configs/cog/tokenizer/special_tokens_map.json b/diffsynth/tokenizer_configs/cog/tokenizer/special_tokens_map.json deleted file mode 100644 index 17ade34..0000000 --- a/diffsynth/tokenizer_configs/cog/tokenizer/special_tokens_map.json +++ /dev/null @@ -1,125 +0,0 @@ -{ - "additional_special_tokens": [ - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "" - ], - "eos_token": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false - }, - "pad_token": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false - }, - "unk_token": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false - } -} diff --git a/diffsynth/tokenizer_configs/cog/tokenizer/spiece.model b/diffsynth/tokenizer_configs/cog/tokenizer/spiece.model deleted file mode 100644 index 4e28ff6ebdf584f5372d9de68867399142435d9a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 791656 zcmYhEd7zY2AIGh!Ys>Nr2`!XY`!31bZ7;2Ei&DJL%sltbbmz|8=9#&-TcOCllQmhg zhJ=U+QCYKlkz|QimXIV;ct5}A_sqHGuk-yrXZfAq*?(u5k^@TiKDpXUoFcpNJABWQ z{Y&;bxntmF{|hbhVR#SENE$ zAg8`lw~{Cu5GJ4WrCa9g@Pj8ak32tgoh0r1T~YMnWfIb=2Do3z-Wnn+*}5x>C|fS& zBd=XqazM#Gd@E0-({gn=7%6<&deikqf2oXA?YcSS^D=keg`umx9Mj@%=Od7J4_Z`2 z3152b`Cw6m@@!6SxbC5qVMb@)EbA{?97d3mLmDk9CZ11vdAYj!v0|XQY(dVGrJ=`! z{4^2Qz9oAh%r$bj(+THeS zF$FK@OW7Mh(@ZKamo|QGWl5uwJ^YMx9EQ_A#C%yl4`eDIi^}^x{Cx4F7s!wgUMPx# z&^h_K_v$dQw7gV<%cMSDKK^!1F%bGV;JKHJfs$U0FNIfMDSkx{_s@8(__aKjt&&qa zz3#qdWNTNDNmEPulY0QI1oO#MAYF%mH5WO#<6;*s?a4bgfhgK4d35)+p(`hs?1zgA zn95Yh)WdLFym2ohbK>Tyh5S78jbaM5T1N(41CEqaCtX^+8AkSRx$*gTixFh9d0BGM zd!ZocdEtjRN%~*1>dg0xVQYQ4>pBp8ooxFVmnB}H%C>&`!y-(Q#~aE%DnbtdHqCP| zk@Ds9d)F7g#tM0PVXqBEsO9nWQNROB8d`fveLaS3iWS3&Oj7#HfMjePtiP751k<>Zl{7GtKJ*2&oWKP$$S&G<4d z`FT++FP25@+lFo|hOVlUKSqLxwvd)Pb2u#nwaHXGDO;O<8Gc)Jhji$Q(~=yTk)ho| z$k$O$J;}UKTU$;tvRr0f;5?^&>9hc3dDG-u9raaFTrZ9IqX&VDSr4T5UjK6m<>iP@ z4i&29gY!YA(3W!lMS!R}rmB3o>M<}GS$)3r++faBxoqC-e2vvgyI0Gvi_uT;W%uJY zh54wKL)zi8{3Wt7cLYeg%h}h-jEixbd>d!qDxcg2BpKTMCy(Pq3rX=?eNzk_q-4v- zAiR+G|IPch7&z?*^8B9P72&FfGqR(xgDTrO2?uMFSy|Wn`(g&usY+jZZUmEDOd9OD zx%j0vNhfmC31GhTmajXyFEO7ltIh&jk!GnJIe9Jvp|aIU<2x->OL=dfAHwhzGWq~q zriOSywhjfEisRag4g0|1TJ1*9UJo|aWc*sWXBSXalasZ5e=LR!vJ_6)D`4|k;pb&n z*-ypKwLTHQ^t8iM3qf8kd=c!L5+45jiX&>Xg>*u8z5_v)%jBK!Tv&2gC0=$*F)1%o zC&TXtBGkV+NX2=UA#jn{KisIP-Y^h9S5Ng_JTM;I+wNf)4 zXkjy1c`N~HT+)cXrP|L{Q?r5@FyxD5R?Wc~*472G^LC?GEs}Xl&B=iC;0rh{vZ3X& z|BE1WS1IFL{aWO!QF12`QBoDVOu`vy08z(Z4z!HbW^ov)xJ=NvFZ0)d zEWTH6PDziSfhKp-Pgl$IMt>G@%%@Zr><`vP*iH^>>X#o$AH*{u`E;tqUYN+$y?aQUhg zIr(jt`|K6U<@a-dCSX^YIU9gln8qeA#z`AiZC%rFw-=%)VqUybRyT7vC@?I4*Ai^S z8zjTtuyrRG6fj-x?SsoQ1FHs)3r;180Jf zOQ-jnJac#G&_Tw{bpDds!OxoqCM)!%=iiFc67XqHyS?Om`ZYAD;V-+-(+a*cUF%?i zx_82R&UapW*XflYT6IiLP~Nz!?sTzok@1Gc6X^W&a&NF&aJl9B^xeW?r!kSZ^q zqD75=0GeVhqzfteafyqjJcS;ozYeCf`pby#aYc298Ptf~_jn885OcOEH8U!J=MY-z5!JtcQN3Ou-EU-BL+1b7Gn+4m(l(_t=Ek<3fU%Px+vai!jI z9%4jVA$R-=VWRbup1--T`D7OTzWh6c)%BQP4wVK{c_zqWA0gje;sW{}ZKLl) zU`t^d#XRvDpk=)}?a@gtei;I3;;gKmTx)Kv!$0r4S5bgKr`aP82V20gp^kp;1ffNc zlVl&9mPQ#Xkky`p*;>}2)j-P%rJk1?r-L>Bq|)wO+!iZiHdCfQ=RlB`D*bxc`So9u zDF(B)%OEVFe$3L}T@Ez8dkiWl`jEFRIHf&YPJRb~gbazNZ@?MJkOmLjWACCm$7bb= zy+EeG?s8uPfO$<)Ipl~V98PDc3hkSLDWd_h&oM680NJ3xWT9E6nPeHV#pxceJ=Skbga9c(^?InL)=5RWjt(5CucR0qpTV{P~VbgS3PpI3%CbSI8 zzO-+78uhlXaYxzk@;xN^wPNp{nqrDOGY6};zn&IyE z*3M(X%X{)^Td?V3thDarKBk$L$^+fOmT$UiW-c@PK$uLOJITO)_1-=DogD)m4m`$z zrKh^!+9g!T_hrsQd5-b%x=X>P<^;3#11@v^gS@W!hhGJOnmzj4_SczP$Enlr#a--s z^R>FTnYkQ_43C!6SKy*LVnTUo9ZoA-uf|KI! zpQ1tr%l5{ALreCja`@A${anh=P=Dqg4Qpzt$ZEcl9U-hVf)uiT-4Bc~Q)R#qT$%$q ztJ+bxEw=&dp7$>@PhF(PJY3NjUdPx{B|~n4F~L(+)!AHLyNWG$xbWqyuo^!Fw6Z8q z$;FCDNc)gmp22P6Owgs-dGCO&Z;qM(^`KG1VjgJF%ndFqfhYz-q4(_vHldiyYma|l zGZ>S9BN3?e?*bXlFt)}$Cr8mFMp!yp--cAoP-qsgz+z)_cyW}X>H&2$*c z&H!JoodvSA;*8FnFK{r)X!zB&K&!Nau#W&NDs7wFW(rj?lyL6`7z7?Gy}onb5`~;b(ClZ&lzUYvdY(nUIVPRT=jk|)-w9Lf zD71zrrBRWnr55L8YZC~|m{!0QM*%I@r5U-kwV{}&3aL>tn*2Ym*Lew zDwa?72IrjDtk#*cXM%NXDoElY7mB%%G`$#T^025+P@kI2b72!%%pZr|0Jic_gQ{AC z7AR%PU=2L_W*0@m4^kBwS-HS{ukmE#5|C*!!EEonbwJbBD0z631LLIm9{?SnswLOB zq!`9$7`v<$(26UajbjSfy#s{lBI8fbOOIX-Glj%rTI#~5F*dH705pM5^QFTi_c5QX zmggorIEv}#{PTd8R#2t(#9c~QTIB`sAD4rz)~cB#tjp4IJ|x;NPyzq<0B*~04P(c` zH9+d>K>15u7E?uvZhPN%z?M{1IPXfl2VuJQFp{e6=h9E1OkF*s?kj-BT#6(Qf9<@} zlH)p+Yw-;fi4B+czsF^fmxptKb^GjFOr1_XA?Xrul+l!Smz$1p9ww;ie6JQ@;yP6} zw8o_+QzcV+;*2T`o>(iCoaBO4t2JQYNU#Zs0ZOi(2n;pj%eFLb%gji5tJ;BK(j)+o zLMkqs&c;cyr^&o4aS>FT$@O=c2TJIVdvRIeOMxa)bs?4}hSAGlOwX0RUo9`K z0b8;|B>pnMvYn;E=j4Qs9TDVpLe}te2xReR$gRKPw4^r9zDY*^4b;5kQgY#5`xWa{ zmrl~Q#Gz3cDL()drJ++58Q2}d3U*q-qZ6tVzmKETJfl6=r(;|sBeT+>3}m?&tD{3n zg~Js(YOk6HHrbezOOqRbmS#SytFkk0b3{gG1#-tS7Xd?YrF``a*jhzdURJDfU(0=; zCiWWGqOYdSU;jQZ>IVE+IC}2(F%$(gQBL^Gg(c-c4*T4Breap)7)Xx((os6w((Eq! z4a$n;R5^GDAR7Df1v(-OPxtJEvyzSL+72V%pY;sJI(o$!yX{}pQcP!YYjy`yM5^Mp zd*hBeZD!B=f-D8KD(60Sl=%B#Roofs>k$TGBhq;eBg_R51fh{`FTOC0rT$LB* zXL&dGy`L|q^#Dc5<+6!FoDBV`oE2%Dp+nB@<>Cn9$!s<)eNKiktp}80^PxcGVOrwJ ziK79Q{s`*N1dx??HItjX9C(Hcij7|GI}>Q>$5|{Sm@muz9$chQY;Q;~cn*w7gju3i zV%LC80Vm2GHvlXNCad`VUI~++^NBL(HupL01~zL7wydiGGLXxZ(7YvO zd<&dW$+Bq8W~dEUwzSW*%Z}}FMk>uxE=#+BO=(lv)EWaa5i*GIahZb^3{=&pJHG?6 z^6(iT?f>ZIUZ0HH^p~VE(mmf7gR$HP)qXsM6m8CiLdXs+W#g3969N|@XboYPHcOlBvjyL{RhpRm*LC1w) z(@s9izJU&6M=XLv1pWTt1aW3y{lVSMd9RErANw z>D97OiDHaYp}N6ft!%7ehTre?cEexTZA5`UFZIV(BfHtRLjZjs{X!rS49@6qfgUx%W=8|im5bVXpvfHgVEsM1G zS04gdax~348SxxAs;*v*_V)jJ9>y|Von@{mQ(tpIn9`-?w+&!RF05$?J@Zo+3${C5 zynMU8K9F`1mmb)-SV_jqI|l=->gDLJyc~ryrWu@x*}04ScofLAP>a3bn-;*ZY2;*5 zOWY=(P3dZSIl>Dq9iI+@peVY@E_GS%ym&b8y0FZ7PBYl1<*$08I;X~rnkmkQ+T}9R z(t|LS1-c0Py%KDaj?2idD(Ah*S3~Cwvmi+Qc&VF<%Q6v5Rmj@Q9UM%1oO3V|%DRk{5Wr{4rYU9nzq6Z+%ESk_fWUE7~(~yUtEkd=GO0&fdGb$w{ z`LM$b$qD)45paFA$uX)OyUbBkzHl<~%(E^OmOggqR)Z~5Q?oI7<~{d0>CwKw+vGmi zGO1krz55&vjs1UepS?sv`fmYR)@rhO8h!s?od2}WydP0)9&+I{qV0A^l!hkwtP&>A za8^MZesi##^gr0=ps?a-Kh?YuZu6PNVz;Xq(6UeCEZ0G4QGvP*2T_+eSLMNHm1D7Qgs8SgLHukoljMp-CIBrV5 zs!EogiPP*O<1#29#{{lUUiV;45>85x@I&PgCTRbxyq9vHwe1BmYbr#s+UJ-<$@WT^ zsLiIeHq=SUG$^|5b{l6ekdhq0>P{@n$6sfHO;-aja@Cvk5oaTE73Y&Ul`1=p*rN;V(qh!}0Q zR-S1ELH!sdCme^%M$ld|s*CfNrfbBA(FcO8>Wco`Q`|>22xX;a7=%ge=i~C;M4(lS z7ax>b&wZT1@Yc%*nkFi}Oh)=7!KUS^RHl$eStnOI%Af9q#d3YE3q`#S?~ceeo$09u7|T^nEf%Euaetugd*$1sM|RNIPOX#H3!tKY zOvkbd?r=C?$4>f%iqVJG!{zFB<01A&nuo;ha{)p-+v86<9M|TQT?MwHi6?#b055pi z5sA?Lc;;&m6bPH-EQZ$tEQ{sTg4XW=E&l^E()&Y@3ClTyoLsTqLXvQW^x6clgaUd; zU%vVVLOTqiJ@;GOrYdsDViN;c$K4Jo2B`7UY~VHA9fHa_BPB2Hh11gYVzR0c$Qm5x zYu%EWy&sH8!fBX5Zd7cEXTr+;-r+8S0hpSO0a>Xq!_4Kek6qu~g`noqR1R+BPQ0zl2>F2^Qz-DAbe=xe$UO|2x0x%)*O)dtG zbm2q0N+6}99HY|@VK;;&}T^!ywkg~Awm$XuMJkgWCs&s<`mDwsF) zxg57eTDfXjMyC>^uYt0Nv z$kaKDaa*-0t&;1X0GT}4@G`PxCD>%psiDpu?s?q;>vS~tDQ>HSu>xyPt+Xe74rgV` z7FyaN@P53v5)XY*kL_Y5{aXHLE*rQ*b^EH`zO>k$t)I9?U~D3DUp zDe5iH*9Y|zDNjbe0Ab_n8S>cM04owQ$<&V#;kS?Ata8SDuaHk+K$gQlbJ3_3nDKuj zgh`MfJEOk=T4kuvGiRBwGW7h;dCD*zY}(=?@#!+t(qyY6^4UDf?eqtPl|x4BC)5_D zOt}ooSSEIYoya=y2QqV)3s&RhQaUnUx|u-m5iRx$;drNLs$nhG%c?-2br|xx_gJkMOwHJoWMZ*SuG(fg;=3Z za>oKq`q2JSPKWzh8#ogy6$JDLGO;sElxMXM9^cJjCMO9w<3zB@6<~L)lZuldELxTl zsqkb-w^A1Z*>q=U)qDLR7JBl$J~9L0I)I&!!n!w43o4Ko)xlKSx*k!2^zA&Y)}Ekg=FJ z6wC1a*W)hgShz`d_%aBSGUsy;BggnV@kuz^+M2skGV~eTR%~p4bB^{|FdgYx;y-7; zwB;{-0k_qmgeUi`a%fzcoc%J$a9c}L) zD`mN$4Cv`D`4Ga2y%N({&yU@AU4#a5)F%)oG{)ONhJEI6mrfW=f7|2&gk7oZyBWgd zADEMaeg=_$J+(AuwD)NA@fXh@7@n*)Yb~Qur|~+b`*<<6yt|{a*ze!8ps<5G1h0@w4~I}m8BpR! zx{slqS=t^0VOn5>&dFD;z$VLR8x{Z6!Gfz%yImI-JRNSl4phRtlb9Oir<`cswPyX+ z2e;Lmk>MFD`h~WqxF~{r9kuPez7WMW&+6v#{{QgcaVlfxP)BfR9(8Y-0$~M#iLxZ_ z!k6ptoSo`$jLCy+QfxxljwhA(%1Y;_R=dNQS+K=8z>|t;AX7P0xNxPuqu>HC__1wP z1GXZaK?mRF9H8l(=7ET-l|bjTb-C<3+$KOgntjUhi(P3l%%T7vJ>G^CA zBJ0`r!bFWi4}jdi*kQH@6Vmlbhx_X)YTLh0}S%F zA`2d4V3O;Ow&3Anap5uMm6=~zN;)?OS!7ub8is@VS*_tv;feD43An7(7?|mm6D(e3 zaR*pRNv(Kabi!@w(k;_Kx}OALn#fk$)PIpurt}f4sqzCI9I5*&gMp^ZoFASoxoId& zSPOFU@o3x@f3l#bKCU<&!USfV*9lTWE|~;p^|4=_{Bjm3Djhv`6o{9AAwxYS=eut@ zo}$`DX&BR0mCt0emtu>ndnajBRsr8=aSYcp~fe z+aXNcDo?i6qfuO>dVp`FJW1LtalShDlH;EQ*&HIJ3yN@#GUq8s%L+%iY@zb-vv5UC zhNk{Zb8vqZC~7M@wUf_QyGWvK7nHxod5Wu*OO9U)LGkKEo-J?UwiLW{mF^yV z`xXpUJ*E0hybrM4b?zoR*MaCl83@X zQs`QGLlbZ`Rij;H!abmxBOqP%RSQ`l+SvMHMav6jt{}_FSLPgc3C-zd1b+*~? z(3)+beRC?bDJw=fE*}ZBte2}HtmPOm^)H+fFCFjv4nUmiCplQdTJw`LfGAZ@lk}gA z+r-n|I$z>bAWXdKY;}R%cxmf7KV?{W*$7KOATOf;qsr9s`p@A;aL)>eu-B(4bW8yv zm5~6_oH-SSmP=7F?wbZSAsG=#^3yzoDQp;Xw1WGb zs?vSot+f!SKk3QX>FyhS19h%*T|I&EJtHy`0%;jnCdsq20jA^xN}fFjXo3vV`>_{Z z0ygC&d=@&KjaYjnl*y(?X*dCYHAJzcU^dmM=Iz(PX_nbaIdq=;&glh?|1eS6?`AlQ zw~A_|$I^P=;=F0^6Pjh#*K^!6V2r@uiUs&PtfVpGb8>@`p*WRYey@v@1syZ6g#3Iz zl*wA{>E!PhC9H5%QVL|sBABRJsR{V{#b8rN7Sq6mkGRj_ykz83a8z4zsTv&|#w~{` zHaP^+3F(5T;mCiC+O%XveH0!k>{?lm^9-{;zW}E2xcu?QYMhpM8AIsiSAZrh+1Ha? z1J^o6+cxrl^aczm>4an5ySS|*tIn!nQT4UuJxJ3*j1eRuZ+zg0@|4f6+Y{?tFlGu~ zJ~TBTVPDjG>~wQ_o(eqrsvS)ss-FnIrFB68&wJ4{SIYH zu%j{BWKaGZ5>dG0Il93SMIHA?$3a7&dH3nfY@WB~07#Uo8;aW-?8brJixK_#_BZP9d<(jA^~0tozaWU&AyRkMWaTNWE22 zC*LVyc~1K*sM_^$!PG-49(I%4l{H^geljP+P64AeJ=K0=Kip9}@pEyVXYV(_c`x@E zNnRZYHhGgwvg>5nU@Ld&W<6Srl6u|@0MhO=Ny4WZtsKR+u)0KUsZ9_}cAY~sqrF&6=2u^xqZFYY3U zhlln8IiSLMVw09)l+G!~=$$bd!T6~z7_%q3q(GikDoQI$ZVQ!Q%gIRgk&@Ci4H1rF z97dm81+c8J|EF2mw;IM2L1{_rz_1oUb)Ydv&2W0%Lf;NOnNiE z$+Z{5nDlBcn|_t^kn_V8?YJ9YBK2VLk_|UHA1oB}G>W6NO&>E~WMQtFY)p?Ns-_h1-N;Skij~-@FTBX|g&`%h(Sc4zEJAUgtto zWn((>Uh^r86}21FzfjKN#j>C$65jvZ#gj^+jsw1}_mRx%+~X%@kc&xxocx`+)fAuj z(fJ&l&FaQzgP&k*;^}8^jQ!Vcd5Py@>m>fB1N{XgnT^}&f=u=QOnYl zX)s8DF|0v=(=w>@%fs_PYMsa4^d-}AlQM@{ax!fuz!DmwM$8w^HII5GqU&tjRuxA0 z^5+H4C)e2%^3-MENdFkWDrM_c&O58f!>$II0CCQ+ym-Cytyh=>x$<@h%aQIzbBZM= zP49y-$H**gV?GA5S~q)l9!=V}acD3>^6g*IS)m0GKio^neU_7b{~WMq7(l z(%*D`5eh9ZIguk*<2LzfSj1fWGSKpxNXb@37Pc~#XMvKDwXZlT&+9LtJkh-SP?8 zB+_j(ov_?qPkB||3~grLC`AT!KDOJJxKU3BJtubacL2++E-#Xs?HixkN#Cu2sJiQE z1~msX{RPglrl%ui-8P3O=(N4;Plvf{%3X=Qk96@fAzIP^XuCn)5;j~D$mX}}Z8Qr-c)!XZtiZt`+v}|qULg~~8NoMWuBF1t{+HWgC z$zngE7#|Rjm66_Z)mds@4mk`GRi*Nb(@T#6(1OdGdkn4h(^D5 z$8D8B8VrUo)dbNriW%WhNQ-tf#XQV;@o7|ce)Wvukffz=Uc4{@w-vND5mIkA8iv$c zc92WQ25q6M&Xhw@xx;63D9M&~OcpJ$L*Wj*Co^328hT&U90# zrY}F?Kj5epPN(gpwXK}5q6Gn1c^E1~ zFKJeT%VebnC*-*4V3W0T2l-~U`^r98MoQ;^BWZQ$2;`HwFo<$e$Br`S5_9W_bKljt zO{{W{i*vf^^}#jvJ}#GfsS1v zX-S6NhR*hyJcvir&(JD4cXlzaza3hVDlpUn3vVn6%6#dP(*eFbdOilOMLmZ3@P+63%`6Hc(cCBXaUnM^L2h@ao51T_7yBM3!0J zzq&gQs}|`XcW^HV(`PI@T}t}^EvvL-8lYVDZ|8wiDRN|AAoZQ26IieN15BRiypr59 z0Lmmth2#7B;SeSP9biIUALGL4ri9k3UQ--}%{N@NU#^tta}f5y@WNr+GhIw0bWdrO z2iC!wQ#GVB>@fprVcq+blgjmEqdUT|^l@ zRFszaSHPIadhaffo!2@qvOQvuEl(WM^vm@bv2nLKF9A)i-JL*c zkRCc4ewXuZ_Hf41h02+1MZ0+*ZSRIONvc*ozn1-a;PqA(Uwu3u3`c$II72 zl1>fgNglmaT#CHp8QzSZFpk#*UW`8$c#ycrgk?@K-P< z1?NS>TbtFJp-@A3SUvHR3&HfTfIgRPx6j0Qg7nz|usjW*w;rv~l%yvncxvb`h^TmU z+1Fv8qr$ku7ePMX7ht}5*o0Z;^u`coK4H)|yB;-u|AU~(BPW_gXeJDyJ$w~iep_zhjr~HFLeUjX{*UGiZPd}mL7^t$=O^5_eW+})9RRXk&q6`yEf$l^0=ZeD;1lp{i$o7 zl%$=9(Q5U0rpkFxedMLtG)Kgk2PC+31z~kM>`hsk>Uj2G0UB{D7z-BF;x2ZRzPzK* zq}K_O%&l{{mNqFRGyaJy<+GV!qDpeHf7Wc=)+vqTP{xHI6Ms$Cbi2pO6(72f!}M9c^Vfr|lIfc-DNGew8GlyVV&!nVR=Yz#fr&~f zyk*ky(|-_ITH_C2Kv>o}ES!+DHiAu8rL>6WH`V*+G)h_$-#CIopo(0pge5wh%^Bt} zZ~o{Q<|}&Jc(0#eEYk6m)1N=PIP?%c3jG~|szo!U@t>_IDte6Da;{=aK{w-wZ}J~5 zg2CY(?je7PShX@J|(9-$kr zdzaWZeWCQ){anC`f^K`h1Swhe27R8ahWaj6ZV-%-lKutm=Ck(Tvd zXb@WSUOCQDEMYLL`?BbGD6*=DK%P`Lf`;bP4gUg|mN*bB51!zH+FhjS@Maw zo+nlFg|75lOoBxv;X2?Q4`5=lZVpdj9a0Wuqqm;R3Wt^blW-O{Q+a)1p?exe>r)Vy zv#N2MOnx1&D^3HN5>-ic^1?ah7n`;Gp>9$Sx6M1v0a&dgFQ1+VvV`gG^5H%7yU&NS zxbj(j&@Vj~!UU#=V3`Fd_5>7xp-Aa0C`5y826$un5D400MdVMi?vF$iTozGHwAR z=|=AYNl!~PtL~^%^kky&lA;;7

l4{)H$osCy`k*$ zC2o_heS1lN1FEmm)mX^Bg|X}wXNbg5p)3-5mD2Y+7eS9sa?)lOK|s4_Ovr6<%m91y z?G0tDE~Pp5pjNiZ-=QpH`Y2;uxkq=vndz|poTRRf%6Gc(Ilm%2o0I+%iX7=Xn*08R z+p^zJw_4i&4Mx_}uo3ThOtF;NO_#s-0-9n5Yab=6_lB^-@wrx-t@BB)%|4K(6T3kn zKQwgV)O5?dqXa^$7xn%9{c&6FB725p4uvv_)E+0h>Tyihh<}Oup3q0gUTFrl6iW*g zdMx72V_-}$c2%_3V*)j#Ii%IuQl6eZv89WpRtJh(TDvge<8$E?2#t=1L{c_><%AP( zM?EExhF3UOw}rDfS&2xlJjGg}VGU9xqq~AFw>I+RWuj6R?MQaB!)~Z^ zH#p0Ym9IS6(?x^@qYDq=f-T{p^f!0+1r{gAbmLf?f+UNjX8l}5;fXF^yg@Kp^tlQ- zSKTHZ-9TPm9_etcK0vhRD6nM*n>ORmJ!KFkD;Mi+aeLZ0IBOAII!NwxkY%ve=QIeD zg(;I=pzz+hawmFlmcb$R=F(tgsCF&eNryO2OO{C}>x;&|BlIC8p3A6!uvBU|C4kvi zx+EQ?%LA>+4bv`|KEobJHzh2;F} z>hM#X(b?fOr1^RlEF%;9K3I!zr>x|<^B|G2 zxP-2}1Z?_9a9%t-?y}`FILkdVknlB|4VODgM?Z~z_Ek_OE9N*ZgNO5+^~#z`I65hF zuLs)j-by~a89;e=?j%p&g40wrmB$=p_5!fwiY*t`42Rs|2yTf{F*`1FA%+#0>vX;g zZ0e+oQ&a2S_rOrivERw$2VHRb0jhD&#SoUy>LjJEi{VL2;Y9&xgy7_YE zYAB1J2|CKSLJ87-@nN~~AK&PIDLweAvuGUf2MG^{KCP-H5Ab2Ps}C;u^?TE{aLq zQIi?}10#DyK?eNjKI!>=nXlMH%Vbls>nC8;>h!U29xd4dqxxs&xK-UITCJW9UA7e* z={S0@n&sF-+h9#~Hog4pHwcpF0#LKRa1tBmcchiNttAf3%hP|GCtjM|u}AY_wTOi(pO7omX{9xRgGstOpWn+-RQu4Px_ECWlcG{z-jOF7LRd~q*}$J$;(`qG zG6mh*Yc+N{SFweVIQV|E3XY(;Wp)$(QDB4EKtUx4Bslz%9K69acUxEfZ;!m+QXSj z7!J$z$)yK7Kv~%g@bx%wSkv$8>^%C@Q7%p5C#5XaQTDp>+^#Ulqz_?VdLnMLmSXPD ztmv#>KvRjngcZogeIQI{`V5Jj-p~A#;RN?crw#;K1`NJBCA9 zr1|h7*XA-u)Q0DPmyCs=$0}qvv$tgu0P*t71x}3Nw&ZC(X&Ir|y4Ucf)F4S$-5?HW zS`42{*2&nxKAfe=PCSqHrjig=8&Azjoua74($7t*0&2CT_Z&D4wWtmt5t_d)?RJS@~EI8PUIM zc=sXPk*XQ@w<#nc?$c1;1}t_T%UJ_GrqD*NS@O)I?pr@UCEnveD~D+4lfp766MCd4 zv!4XfZBCPaJ%fw9^H{>xl{l@L^yk%DiYtS~=f)RcqiVzf6i%z5&*zmS6gL-_yol4% z(wAhlS(dJbvTSI#`NJCLgBv?)z?5TNfwD-MwbG;Ly^o2nLz1Ia3t5LRSqrdA8H9b- zfp0-XTI9a6T=6y-z3D{ePjy>?>%n}K6?@0|9__I_JMUw#WxszxhHfxKGo{CPcO!^& z^&q<3{T0BX(sRMo*Sj{s6cc9M+W#AeN2}qq1#pO*h&JfOoFye$x>XMN9CE#R?j^|9Hj4)kHUKOjx{ zV}}fsuN9i~3|m-o|8!xPBWCC{$!hP7&kAsOSJ*y2}UVYXEDyl@(ioEP|JVdd9@hGK2 zi<8kvgD7SPQ!5==2N=^@DO2$9ovLFy!dZ+JDQfA=PGGBd zT|3A_T|guszQ31~kGcXQrRl*DI_-VCnGePcE-A<`g%*6^bY3q3S%L%DqY9U0TTgOa zWVhq$dS5upHbyAy-#7PnM8M>j+3Ro!6O$)QV{+68uw|}{*(ejU-<2`h`turyPZ2ab z4a$UB^|1693!>G8r=4$C5M^7BdXJn0HVNoOsg;AzfS|50`pTK=w#2y44<55|3^*y;?g*RYMlq;$n%I218n=bLLHz+#6t^Awmg z1NAQM!X++rwe2x5bAAlY3WH%mje4yghhdmJL4JG+m+7T=OF1E9mqSL?5R0nLrPe6c=PsD|@)UB(btb~%7MVWuS zWib=X+zIjSUXJJ=J|_8bi6dyJXl=#5 zj?k5~FR$zmVXBGikn79D17S$R&IK%KY>1JhIxD9h46-bO(JH~)`x z0ZMt3@K_KE(}UA8OaWQe;}-y`z-L%3p^B()uKaInj^dU9qLiUk1o0eSlWk zI7&ld^B~sDf5U~X+?T7>ZP8KjxWLjMY$Egzuccf+z&tY+$khX#=LvcP^`=2!%YpX1 zbbogYb-}r3fnmJOa7To8cgc@-VfY&>UZ#{M#yBr}Tt6eLl|bd8@wH(Yzy#D66{UKd z^IaAmBK>YWL^MN<-epY4-xFOZOl!XEGYL#h3s=I@=?svmHT-7)y4MDip{zxPht8RX zs5xlxS&r6&nQSPBmZ-`Ki;k)~Bqaw{gQ8-~spWjdGzerao??5y7HowOzAdgj_3)W+ zCWG2lb<|3q?Y!xNJyBp7SgM?P<6vocGw0=cZ{dVkpIq2u4&EqI-QT=V-4BU6>JQX8e0%W7|OJ_y4MOlI5+6E;`YzjGCscMS7g<>cF(_bU6JnUXu4 zk5WA@()Uj1gF{!kVji&2`QY3+|C@3b*gCKQDVcLW2o1dboSglT^H`x%imMf)I}J2r zv3Z+XkRzAiw1zZ}HCNkZK+9v^j*354&*-5GH2=|4_%Y>h%9O%MvmAYGxeJKNQY%&b zX&B2bo6dpsc@Av#2OZPBf24$^#n`R)%FcfU#>zB^vr@p*$4VhK!?@i0s*3?h(0Avx z5SIBaCraI$=2Z`H-TW5rVods<9y}HOHk6GC3>$e*U%|;szjvHRJqaJ@USae9T0HvC zOt*c6+l2PQA+=%ygjI4@Pb_)3EwDp56Jx9=TRsI@w%D{%F)#QWCaT@4LW*&>FC5h` z`ajG$N}0a6y_S?^8^KmGTuzq%eFcov$8A!Mkbe(h`LX9lLOc4D&9Igh^Ib0Q(U&M? z5mhl_&8+vq5ECwpFICD!jwi#nY8UJ)wghk0DLjS&D@_~*CFa7wEY3vxDveH{uWe34nv4juZ>Or?aCMauhh5Hl< zxmOu9T$7R;4zB;G>;6DqI|Rmbu73!k-R7Z(LRp5nj}k~mTp%{w^uY7f!(pr%j_xmO z6q$cEqS(MX(tYihlD(U`uPnlNlA{IK5{xnC=`4B2KPZpgeNq^HEWT)ry2`#t-Ik*a z6JYws*N$^>GQHv-qS}D9N7bol#lPIgA-WR%L$M_o&F$p(j*imXi#l!Jr<0>-HhL3J z#&mWOF{iGQL%V^km^pZ>Cxa{GsP1qk8*`8h6S;*aTBuYIZg|S`iqSsXqgV9CX_C;w z>A7-p=*i}rfv-~ko_(ca8#YU$>7;6CeP3uxW|Td-`BFdUQM(xa2P?Gh{?s3u)Vg(- zAp>w)u?*J%;!DMm9_Vl@khSYzC~L30c`nsMfGE8N_vB%?t%yfpz*J;$vBFHq$Pr+x zdeP)jrTku5^A(=8Su)CD7Hk|N9S7DzWlj72c-)pF9j+1PjMJTOY*Kl7X`=JNF(Xy> z5of@dOcPS_^;sYjeuNj7{o~HNod}Vs3CHj}r$+En1&pa+aP%zW<4Gu!*$iQdi?OEV zu#}6V%>Dv$+EkcmOmdgQR!xJn7EQgNM&<$tvZ~KVzOHVIvWmNQ3-iFJT-0R#Z9Uda zS2@0x?!?}_6$X6f4Pz-%EdDbHOD4yy*6l9%JQ7{k904&=>iU@ZFs zXiOMXm7p3{rsSh*and%!P1^J40W8`W#$|b;9`k~FAPcXzh-j!x{}HRlO%RqA8!)=+ z|93s3n=VZ0ZlCWW)pz&grv+fuJ#Dz(Z5AQz%VYJNkXg4wn0ECoeXSM$zT5fXcslns zb}3=Hz}g}C``vdA@?&H;;34-t=2ggUi-AR5@pljhC^l`BVu#|Y`oqox2lXnY@e;6Q zDx5CoWWrJyME2PP?YIoLbu*am=_t@k?bPp+{L_6*NTMV178 zqHIuTN$8n`&>&j+vg1xG{!7o(%2`6ydb+9NH5U!DWJ$m7;5dpr_7<>M3G^VgeDI!Q zLR%aAMeoB<&BC{dmPTNP?)Pr~fG?KCEvc% zjJ&Ok6)HAUn&D<8ta-0m#1&<0-px0{nh@9l_=n0b!B+H4H^TXC^RJ;uNB^X?^(New z$+2ucy}H@?W_`>3JeCVTI3Ib==X_TWdAC#^{E@Gg+(dsq`;_uin#r#KMXlq50nSk#R|usv8sG7XzQT7k3HCh(%VT{xlx>-Dqc9(QFd~ZdMZ5J8%X=Zpsdb~_2vG$bz6>E#fGb>4lpFnD~%00xsUqi8|15*GS=Q~I;-1^^E*SD z{PfK9dsW?BxI%UKRCvSgFebuS{R`u(dV-@W$e^Pg;oOtzDgC=>=Dlw!MMYE1%kRAb zmZ#dRoOlYzY8m~vWcoVDMP2^84QRR%$S=xJTJ1Z_{82bfB{7U(i^td(E(ZUuPAZ!o z$bN9Fx%Jjh={Vdb50^Yrx=fm;6kfS(A`ef*ZK>(iMH=(_lVOU*h&;WffK6>`-(V_m zCWOUk{|X&Q?OBcr-`v61r%I7XB}#1U;U)uPN5~oKwAj3Kj0a$b`Vgj^ER^a+oMoGb+rfCm6Z8DE@ej3&A?OLMGL^I5}Lc&G*ddFz9pO0O>sgr{yNR z-ma~z!Pzc?1c$=qa>Z6)RD8}U*U9X2oR{G#?$M7t7aZx`%i-ZXB`l1dQqw~LO=d$` z-dMt`nPJWxn5cpB?{CW$S|mJlPSuW|?}!QEDr<)l(Tq2t^B&z#?t395`s09{w7A&Z zdTaI?bz5}U%4i2IFR_o9=H%H+ahrThRKmTnotHa`9SXKCiqD$IF~IIn<%5BKi%znhoegA zfpy!V&7CgP4BiHjjf$h1VjB}3?{YpEfs^w0-C*>RUA0{PbsugMRR3EikO!48!7!k6 zzl)Kr+<8R<0K9zdtT zGjJ*yMLX|h+_p9R&nui<*?f%;QFY^n_r%Jfw_O3(U)Ld1O79++YyC`xBfd zR2O=e<2L|{I@dwwu+Lo3QFwEu{tP6!bJ@%^&kyy{)-b z8M0H&bmHDGaa%)*{?`C&Oxgb+tw{TEhfFtK7JLI`;^;pWW~Ar05GHPnX-%u|fR>8B zwLl~LSsAj{tCQTl8K>pC+~Y8f{NOxej#umah96;~N`xl$AKXs+2?{A!pI;?!tJ^+v z43xW)z8t&-%Hrg3A^&x_6>I`pln4F_VFC{4Hsx-=0j&j}mXdD2gQDI}bEq?umz1*@ z<5^w;t=<7916*+YW2ZS;QLNvE6S3%VDkt9988_PgOBc zovu_=-NJe6RrSQGht**4v=XF&nOTn8C4#)>o0BeS~Ea#a`SD|{B4{k*O)S~RS%E?2jyG2Dmu z^<8>4gS$bS+&X?!OBsBRIK{&03$h3H#ckr)!={0Zsi#ITCV6zvla#W8*Q=({Wxacq zwa9h2^V#q5Wk4xpv3KF9K5thlV`aq4vh>Rr^@E^n!-?G^{oPki^3#A18w_CzqjT*g zj|>A_+U!c`OB!l2KKE4EXp|o+Lq>osQGNHCGHx~!#>$Vwz4FOuAjQ+QyUZJd)5Ku_ zA#*LuT;T8@9{Hz-agO2EgZ7Z`OmKlt(_T$3I~~H}@5l7|r-?3vp1)yVr{iQ8%Lf0k z%Q4?G9Uh%#A%D_;z@~U=RM@ottCW>GCL(QpT#NHuu$*e=uNc?{N(@x)@7)5~Jaz!9HQMs`fRY(i&5e1?X+_ok!0JFe<7F)VRddKFS}g;veY@c6`q2-R4I#cIK8Yq zeWAl6)%tSPMfE{=hDsIj%f(PpnNO#&>DjGWSNwz1H^LZ054#dBs?XT^uDA+lDb?y+ zzVEdT4_dV%C;zz)Y?&F$@p>MDy9vT%=fBKOz14l@H3#mE3|Ig``{>+Lo{pS7JIRf= z!&(-p7);19klqP}g7fV9zo2du>~T*;j$H^aiH3Qs&?*#1666Y8>%X(!15cXsA1HU> z!6K(dh1)kT-VbF$mQ_oKMJ@&oQqbm(dJuxNdEH5#RJSEpp5;v32Z}8=eWX%VyY68Z zKH;e?uIUmB&$&Zs{0MF`{PrVqa^&pXO?o{FYqcbNwUGybmpZ1Dhc%g`^?w2;N?HFA zW7rCZS*_;f^k?g_TI>#Z-r>Qz2e)2v)VH(S6}}j_W+hxSux2yNvck#1Z_1hqd2$cs zw|@Zw{qR3;REgJDf$X?WG}Ku)a`$R@OMp_-D<}!sZw;Ic4~&35lhW`4WbQR+YlcG^ zEjh|VDjzCo$qozuuwlqr2-AwTQ|4bi-+-Yc7@4HOo90e2U)rK>D}`hs2oDM#@sMH+^P)_4$MK>b5#~Dt8FK08z!m z*T+te0AnbZp}yoxvC|Cy7z#D-P%_F7hk;sb0$REp=}*f&itV$W82jIMKqA*3R9b!y zhzgrh@Z{_tAx!I|dCP@5``k}3CR(*8r)&YK8c&q#)M+I?Ahed#Kpv~-nBeNUJRb1e zia*mJLn-4Lk4pUtWvOr@MeA$t?Jlwyi_~0}jO_Rg&N31j0sh8CP#aeKxn7eS(>TS+6N|lJ(?WO zGQLon7Q)7~J4QOs)6=>Vumgm4G*%Yz`Sgt^-;Z4L#R5aItyF`m~mY;^0v zzXz0w;s}-7TOh16p#7@FR3|)_HGx%OPn2)f87Vi&b7M!=AK~JNg>RfpI2yugggu18 zQt`)Q;HW;S@Oj{o&0Qq?!Ke0@XSQ$*I@i_bwaRGrwez`NfknxTikTQ=Xx~l3%_t{5)onhR1IHLFYg;=e{NLa9-SZ!JRXh&^qQP^0&e8>^dIFPR$#@Y4L~0i`m8M!8qgQucp1K(^H9lY6ji~d z_E44|=AI|YpB*d!vznkFZ*+74VyOx`0UjdheOV<1wU1_ z_gli@M6OUFpZQ?xP_#P<#b-8K4r>LghkuVvx}>m{Q}mC{#jPdTLmJLDTHiDst8UYx9(d6st2Ij5_+7l&&T{~B zAgywQ|4OeThkr=J=v3fWbmF#;7U&(eq*5B&1 zk*xkEOdw|~ZGwAoY##a+;3DNrQnja(l>;t?iPWxF#_qWcjN0|y<-BWfTgw>D-uiVQ z)2JT2*XITwQOb>>NNMdPGY=Zsx*sR&Zop{?hBNv=j<^xV667ob(|X0`<21d%nwO?G z!amxbc)&LWG;7Ji>+o;UB^-~DUObLX8obH>k{Inx_Y2(nD; zIDsz2y8>Vx-vcrGB8S`96~3pi)2rn`ps2&Ky34@K1s1ylqUQ=|*9Uz;ie{v>%2oM? z9Dx-o-n!aq_t0jR=pU9#!jd1})T+?n@6y8df>Vyy=pQOfK|3Y(2-azj4)L1N{qbt~gpyHdZkE_n!}$wWXGLQ#bEWGJbNlOBPFEA2A}KAuNrH$~Ea z65t9QMXO3rb^amD*`Z#Kt)75%Rj|U+q(d*^ARnyE3SVA$i+up zGBb+L=HDc1?Kq|ckZ(_8{P+1hk>qNhVQcjl^F&z6^pAH+bd$_Aetwz%EfZY@V9oy0 zD|t3}uV!1c&8q;erO7=}DGKSxNb4|#Exx2~md-Q|d>v3@@1UrDlb}n$%Dg51cpKU& zVQM_~-JBYO6(!v>8c+9%t5tZe2HCu%Avv>7QXcE zH-64Pt6^`|9m9Xk@ogNkVVX@yoVw3>fb;tw`6Co#ZAl?XkkszNmb=y+w_%J z_GU?&_;+DuO8zc1%ipsE5aNFBuGn@-2PYI_x`NJu(xi4>jH3lw66K8Y$1at}$yQes zXy~$lmIF_k&;y!EfcAjw+Xa}<_5x=UgO-aK$(=ZBm846Xj*_4)u%-MU#Cuy zuNQ45iicv3&I9XfgTO@ci&7VL>*gOc7H|N1*?Of9WPH~h$E^?I7Tehp^&3N(Y2R?C zT&r#hW*%!%Y-k?YdNY5-ywB(`cnc_VTq!g8A4FRaHRSNqR{8hS5 zaSF2P=*KUv6+v0CmWpS$CF}~esjbu`pX>x^0kWP%`~2I^0M4ro_4nEXAv1v#@RPtJ z)`LJY33L1?yU}Zh0+9a5jCeo8J{gHZpoXY)`_L^1u;sZ5dS7(WMDhD}S*P%qeojOHFP&I*-)O71`uL z^k`w03F9F)8M4~hcMPCcz1b;`++RH-)PiYeHaTo8r0a-F54gGA3)&2g>A|iirz$4J zB2j%^t4~KaeNqlGwV(sgQB7qvV5ZLN^D|J4ev?5hE;czT+yWskFrMf90^m>kftiEB z{i5?gD9f)VyDD?fK|p4s66Kypu}vBQT@Kw>i7Q3Ze3F~R?@ND{$y8|EbpFg*O9%IF z<1hhE5-D)*DDBA98GvR4RqzI_P-Zm^DlOA{qpcmxvfLJHqyZg}(yTjoC7H3QQ z3~fZeJrcx8qvCCeJ&(#`pz`ctk22+G5ND=z#oMuhoY}NOBOO5<`Cos@Q>xe9z|&8F zcIGVP=upR=2%sK`7ntq;N;pe&HXh1p_Cp2ncQ+mOtWyX(11-ogrQ~??3xSeVfg$nU zQz4vsa)lGio(7vI~NJTHFSoYS*3TwNjB33?e3SKR_)-jZ<+9{w;rx)o6MgukHre-L)zJjUbs z?Kc0-02!Am>=b!7f(<}?eFunBl(jXEh*_k+d#68P4-$X5i?ErlmfMvd?}qjYL^oo! z6PLBv_v|!1ot9DxcKO)p0Pwzp+xAFbZu!{Lnyq1N#S=Q7%V4TyWxL@I{sUSmxiW+|93%ShKuqx^N^nCp6(?svyQZ{&!z_3`oBIf9-*7oMJa2P9Q41}qQ13*jUg zgE3@#58BzLWvl9IwfDiYs*u-%#&p+y=0{G3!3_&q5iS&SG)-^J#{gz6y;j;7X9}^9 zSe(t`!GxIk36L|VtxS)5MEfIb5t3Evr#WJm!O{5{h&#k6Mx53TKO(lr=fHhc(1qHv z9lLoTwOj7dy`(T@*5irswg~EG6yE&5BIvr|ZeVQrHKbefR91YVh*wq}OMOGooF-G4 zlfQ>{c9}*nE*50w$IygWsXSgn(u%YYEB!zm*jR84eR#MCr^)*bUGaH|u9S*y(og(v z-tgfXCzZ(Yu)4z{{)M>(mqqs<1mQYanMBo++x8_pS1c{V%zlxVjH7NPXtx}O`4(&nczHsY<4lvA zug%whc3!zwG4@;wFq5UaI)yth^6zkN%u&ud(8#S>7jO3yLCGggj)H>b3YSzeV7g&_ z5GR(FM=eCZ< zy8HdYW}q~)Zd8!3XJK9Ze}D5l#Wws=7CHC;aO0|-6dP>?Wv-^zp&Mw3AB9jz-5-s? zTSK_=%V{hB-6sFGz9TN)7Rq9+M9mQIZx4+`>U-iZ0}1}GKZX|iBkGZ-g{L;6sjqo3 z6j`NHq92Aqn3Hq>(;gcS2eB|}xP#UjQv^^(UGsZhVM~}JgycQ!xm|$FPg8+WX!G52 zn#;Z|vBF4br$^UE6dUaU;7-f{gF4+Wa;~u_uzDJb`iP0>?jU0Nm!P8e*r~Jo+eA z!)>DZmt!|rdT&bpudc4du2Z3@4SX`jXocMzE7xF`@IOl*oN_BWa-*8N<(pX5ftu*4T&Fn9IUK}s@eI+t*Dio;}8MPURU?oCVwb|5Bm>^rREW& zC2>tJR#(`S0`JM5X|JPzY@e1M0Ms6feRnZ_)gMlDOhL!XvC;7$mLK*GEInAVY$J>w zk(Qe)odA*53CpyrN@NW~#LFW4T8U;Ls^qcLz$}+?ZnnfXqTOXG%A)QZC`$<=Vh%@N zJ1>te8@xE^LTHL92fqqBrD6yBrQ$`xcf<_sZyjY~rrCzVaH z{^g)q#@r6Y*UXgw8XURe)c-2N<`L_}GEvSKS6_vw675e}s&R+lqpJa&XSzP$5xZOu zk_m$81~mb;#y8}jp?_x=P%glqmD^DB$QwZ{U5ry=(9Mv}7=7`Oc;c4)gCZIQW~SmL4b;<8UT{a#2XUc<&AM%)MOQWZHyX6<%A00Pmm z)L$QPmO!!534HO6N1^ElJwLusUFmmdYL zM08*&#$;Jmo+R2Vk6{J3%fpav+j8ZgYFzozJPkcgMs+_R#O09MkafZ10A>Ymlzqnv zd8CF`Y3omV0>DYzIEK5Xf}Hs%(yesI%Pr zkWMojAmZi^fLx6&)jR=Gh)q7szZs-sj*lRnsg?;X9#A*>I8UiAoz558ujCOK;FFJJ zWc2N4pyo*C4KeR?XjjD4j(A6u8Pc*?#%wdc%s=SjJ=j?PD`<@FN5)`<{OOpMxLK5o z#)(KQ_!<&XZnoKG@wCFOyDB+lijMyPxDjC{)|ejm+2~s^my0Pedy+Y}63T!zZemPS z(2|*r3sI`;U*CbaEZ8FT#+pAsJD)n-G4Xn)0KP``I{znvFv}v4R$TZED(ft=>uHSN z{tJ>-1)B(M=N4N8y+bkmbumI&~VKI{5x8fsp#>mN;-_f(&iR4cQ?gNMym4UGYd3mM5xL6t+~f zD%G8UOIHDMt&_@{t3sK_=^OyXO``p8#i+P!btsb626yW<2%EQAn5xbcY2Gw`^X~L& zYdQi4gg7LQ`68Wixn9}(T(nt{Izg_;uU{*V(#dmGvBuiaWG7$t2P*9JStzI1JJ-z- zb};Qttjg8{DNT7={uN@=4FH{QQ%hHCOcU*D%idkUi`#~IoUuC3{7y7tt*nWW8xf?K zom}*!n7eKaX*R|)2GMB8Yzm_KB7Bj;E|Z)b=}Os?g4~3#V!(9f<<0UGu>6-KcG)8T zik>!8H~RJx;aKePjwp5r?NUm7pv{IyW z(#oS0(X=Cx;7l{nd;JJIhbkEZb;Zv@5OGgCUAY+B?VP8mHvus`8VJA$wPj3J$R*|} zx2CvXwA0cU&Oq?I5ZGbbwYqQCAOKYl8;89I6Lw>s(iy{sK)JFiWN(Td|HffJeKJaV ze3*Z5S3qZ4lnunccF*Y=OY+;+>%D^=W$?WLMDzHH}jP$5-*kx+|=XZ6wUf`+rNl-??p?#6Pt05hG&V z$ppP`XF9f<1CBqwnhNZ(TFXToIjRD#p9bn8Vk{|lf4p40FBmdN=X?`IWR;HJJT(rl z2roH^KU@y37}4sl5#Xe%bl+4R<1S(9Hq5`LACiAigGmwIVCw+LhtljGr`AKb>hRIV zx2UiMB|b>0u>m3*D(ZTRaYSP&zD84K$HoGX1(~-0=&gm2Lb~E{f`~qbxg8jDoAM;G zyFYPBGf>vVbhd!iZ6QvY3Fz9bJ8aIVQGG3n1h6eT%jSSTWE|H*3z zeSMeX!PL-(#6BkxRNrJ%eUHKxN+XSgY2(vV{O?TE5!ang*agw*DyE;2(-U+w_sW?$ zjSX%wem)D@snua!a7{55I|sz9>eXhM&<+t~L34Tr@8i$SztRgfDQ@|7Uw$4^(#i_R40 z{A%!Ae6dLLBSnQ4I{P~4 ztP{#G3W4?9kVE3L4O_~01UZ=n7wW}}MFRTd)>fZHzwqy1@R_;rc`G9k1nyyC=#BjA zvid=}xa1}fXU#a>szbVqZUtf{n9bW#&GEP>3WW)HOZ5H&ffleG5aUP5h?ovG8&Cz0XtUzmj(+s6YU$y(K7w$P}(&$+74k$QDcWpKh zk#+ZU9pU6V{jYSvvGQFI7I3Ymz%J1)_qbx0u6dqaLKwufjo$U{JPvz)t-u?FSib3D zL$Uh#eR;}qofJR*6Pm_FFOHM$Cv2YNfv6a7JqVET-^{BQP+8l1Ie;mw->s=^K3>0&| zP=BduCysmu+G)4gvgJb|j)7C6Vr=#-bk-fvX{K$$RSUsfvdSrO@bgeEI1fIuQ(fkT z9H>r{)Uz+;0B-nc%+;>|P?;!x8sqU-A)JGqd6Db$!knh8+lz7h>i|gl{XgFj-zn^( zX|5$p9Trr*;TTvf#*uI4G3-GgS-_p}7N`@|4K1#cz6}5y4f5l%NQ5g0byllhii&sh z4<<9dvau3iacLbwcUkj&5O;)0^yRfafV5=Ts$TI46il;djz<-y^9&ggk9|teIThNm zCtxH${c|8EtdWVP^^_1t;?TN9cXNL%MXJ)aygiz}BvRH!3-x~cyG58;7*!d+mST)n zC+5aU%>0@du*KFSKKzETWm2uzr()`V09^lFGAR2%G!cBCo~_Ke-vXFbw&!xid65u* zGFckrA4Mb9WS-poJp}C3re)WxAN`lE<;C*9=HIckm=nAH25nxrgo(H({};f;lZr7_ z;tRt1D!+ZYG*587!|(a0dvRw6(?0=y#XwRI|B=&8J*;FNTx|Qkys+1js$PKQ%|b0{ ziLP1#$aSk`_vfV`o#g12*0wlu>6|9Vx|p>bw7KQIaTbqEMYDxz$Q&BKl>SafFJgP) ziu|nx($Nz?txnjvwWIldVl7DLR;^{mT5Cfi$BN|b{tZ#)b|O}cQv9v4E|8@sM>u#$ z72rV`J2uYZHL9(-Q67t>ClAklv?+kIEt@zhHrXsED;U&v-2&2F)bmbMj27)oWNXIl zz-ejm+aV z;QcVR+5_54c@0)w1vazEGHT?W{9(T7TRJ~v@>oD;Ih83E#m)~xvNqe^J3AR3Cd@M9buu1N z8b=)RtgVc1_995NG-C}mZ-T?R{W!zi!fp9))kGVs@hiRqJpoC{|qEvZH9S({^c z6mCskg^w0S3AE6QC#FJ>ms;(q(+GQag@bkPLV@oUOnm9BW0`#*oG-S1oU6CTIVH%N zGA;uNvef8wX!Azz>omAdS{KXj2SJ`JGNRuBge`dd+>R6F)HU*Jt^K?pBFnGYiwaw2 ztR7lp@dKgZj2oKm@zz0(h_QLxbTHw*bjY8HXYG0*k7*q- zPn7xRk^s}**QHPT9-@tBXA%j{y4Y?k*Pef%LANpH2;fiI3oTvD|7=UIUmYaC>>&^O z1MYjf9lr_l_DgyQd$1^{Fb|6CD@;2T(;dIh&LNqKY0-4doE%~soAgx6%>{Bfu^g)7 z}$K_lXZqChQV0gtHr|JT(XOL|#`+KP^we zuX|FgPGJ@Uh6*wHbV!$aR0qq>>q_)^`5qs0CjY0#&=&1k7$dZJBn!*w^cLsjNlxPC z{y`$m*A#7FW8rxK7F&f58*K3Mve2wGaHsR17ebnmiSjJ7@kKd}EeW?=MfbHW=}{Wv z2SLtA<54m8xFiP{p%K6>mjSr-b18G=6{dVEWwt0M*xexyIK8p-RX`NBi51jx3j0&; z7w#>J<~4L^oT{LP8CfHoaJBJ@=2+s|{DUTZnu&6Ca-C9p-?2jYj!U;K@so%?8KuGI z2LIb44-B1uhj88lcZ@G?f$A&y^fd8(bQ_RU$4XsxWeaZyvG}ARj@#~lw#@b38zR?X zMB#2w5@t(6y*zl&|D>YFJYv^-^HeZD?TFU;*~p z7J-(e6o&D!Xs5|~SJ$gndKH9bS2-c-Ud#W)N>TSC3qqVBa~gBrIj;j`nPg`}Os)c* z2?y3g&TXvJT?dFJA?YZu`IvCV9);RpkA43IaM~;h(i`W^i}E;KytLXFp9pZA8fo9@IbW9`E}*21M%(ZCXB_o1 zldbj#fZGu}J*LdKObAk#z>0FQ9s1IQvGf-E>Wc%pVw$j5r^$1I)gv(LS=cnuyv|!9 zkA^W?whk>@641SWBA#cTTMAN*VHP*~r*hVKvv&mjSZ)P@H#h6w}4LFU`^7 z%khty;G8|Xb+Cg#OHu2l*hw_aiD{N|Ny2KE%%xE+!V#;PZDZLLp{a~1X$^6)D3=Ap zW(|GEtptKV1`m#9S0(6XkqHF2Lu6T2G0H@F0gSJ?54QLplOU20wlh%S8 zIZGty4GH{mn4dkc@M?gY0otkTF%MiD#0gf&A^f-NLR(hTycdsi*8?KmNi}hwg65|V zc8c+F2}q70WB&U2_h_K1ybcdas>uBn?X3YnS6Qv=KgZ%@VH7re>t7= zGM(quYyp(bRvc5QsWaGovo)x>X;cC3*fys}GZn=q+vPM~9O`1KXpaun&GBbZmN{-) zwF10(dk~kn)31gXV46Y$ z3bFADyU+`+u8TkVLAqFMqxzf-&E6T*Y%qEIMUGR2nOP<-6>iV~5Eq`MJnq*9 zOkgJTj-I+0I1C!0a!^8>8Y0S>l%b$uh!Q}FliLm~wuk5c)0xQkG66`ZFqemeM>sB% zY-cO}p?86VKUzymEVdhj1;AQNv*V>BLCkRtXTs?=z(afb6PDvG@wEt7H;M0G24%jn zo$8Dc<iDtDC_uCC>mz4W0>B=ZeWG;?;l6sY&NDtA3r4Egte^qyirT&M z1W;cw@SIQrXrs*ZUE_&iuEzJoJW#aZ)7*b=|GHccMNPz=-^S)0SUJ(?3$xl zgtM&IHRCqX?!)pDD>WFo{z<57dF-e-N|dUW70|f~lUTBg8rccq#Om~DL`xCcC5KsV z{P#wo2guwqoW%LFAiWZ1mDr;jEq;*csyY1O*4fut~O+a+E}+I8C`WGm|1N^QLBqGTO9-Biq!Us`?Lbw z8qkMv_SV5V=*wgCSS+(x2fuzCfKz8HBDF~|j{PebtjyA^Gy8847IFTB8iw8{gVIC? z4UZE}$y3md9@p@NxLueFi)KVyo|R9{k;#R0uCvb>AT9&8Yxtzc7WYgb*9b566yiA1 zj)G-(GH;r97Es2oY$i09u6%a>SyL-h(HPOx8gfAQ6q}v{i7-+5#a;?qN>X9-oc6f@ zPR}REr1#Nzc_gm9F|drh5Wsw}aZTKxAjo~fkD4-v(ezFhoYg`HN|u;=5dfJtGIi~G zF=2!@p)&rZpi^vHa9=w48FL8`Qm1RTN6n?#XN1pIn9qg`i7yl+`DERs+Bxtt0Jotn z2gakK$V~d@6D}ug2E9A>Kk4GYZ{vyW`-S0APP+Z@jFaOFd0$QI5~Y-<(HhXUX_; zq5xM&+k!iqbW13jZpkCHw96UZMFPxuYaRNCZEuBkTTg8e%U;y#w*$I3JcTQ1?I^_E zc?#|w2a0qmEX8|cSJ8-%^+$YI`Z(R{_uxHC0hXIY@4-iIaYMm>~yt>b=R4K{8@IM##!#e`Dbj@+T-^}pq&#M zy=Z+L5;={g)cu|y3`e*fY>G)w=J9mhbjzn8ojdBv))+228?D=$P`jiFY`V~_)oDoL z*}oDpp-{(pUaa*ralCZ*iD#yPIPG8HS-tCgJ>6sbbs-=up~@=6zn+J1p4e+@(XWe( ztG)nA2{09na};*rc*`oT$o|43e%zz5#l>f7{CQH4tE8cBE=#|0F9EopF>7E~YBS^m zUIupA%Zlg*k9swyX&uec`Z_cm@Yr>{pm5fw(wx1=8#y4iti_lwz~yILcH zGsv@HC}7)Tz*|5r4hI2oqDX{+V!k)t6p{6d&tS%bv`MRSb)DF1+p>tUvXjky9TyH1F2 z``BUJF~=645+JTj%=tHeTTZx8i-})Cy1wvp(#_4woNg>&k@=oz=dVMTq;5a%n>-FzN9y9P z|3JITkE@OozJ+oIY%61z?;x|@tv5xBafT2xs5_0Sj5*%}x!CD4Mm+yxo=7<}+wDK) zpV+r6SJ!^dA)DwGV^9gg4vU`I9p4Kw`%({dNzZrLFJRdemY(tEfgU>X_P+sB3YC$! zD}Fa+W4*=xFqO`o;wVurKsrOGnLjDWVw@`zvM(0jv9HvvJ#*yXVCoVeS(9b8rX1H? zvIL>u6SU1zKyGNc9YEYFklMw;Qv9GWT>4<|CsCGLy%bfk&e8w~A00|3!arUlEvup< zmR-jGu+Qm0Pd8Dd#|KU+`yPS0WLeO@cC2aq#O3m@YFI{{w|xG8atIp_3vjs`J5V64 zuwqW5g%}ijh_?Le3(eduVCWnrjN){%G^kjKFbobI6hA2FyvO$FsC@fX0kUCpj=F!k zl6u{mpl(m?1r8Lrzq=NQWhUn>I93ZOm10>!<-we>%s6DXzSaEX@4~aT+m!=9uABq6sZeQcT zrJ6W;i~M8ynbr^23NZ(5+|Zf6B_z@xH95++b#VBIxN19sS^w10By1eDgZ~{dJQgTu z*=fn2_VK6gn13Pf+jYF^@YcW_lE=|>hkP&)RU*som^u`~rLDp12A6gj8O{@E_P7lk zKb7LJx{^O)9N`jzN8$auZ=Tvbg(GdcvK!M z8Rxb~^Joxrh2ckhD>54ns92sMj^)PWF(!03#@b^covbb>FopNVq7p{KwbFmqc;Zos z!-mIx69}5wDd`;cb^*?X0Rw*kt#M~54)uao^4zNYqfRxtxKKd$5iZOm;6W3C+!s>q zb%=IhunUgIOJwz^(XqlL{>?gW=E?%~{eWsv*Emv3TH<2V3RM3e5l<@UJecTtk9Tiq z7iKCZF&$Bu4B&AabrcovmHl;sT_Mv8xO?Npx*8DY&oHQMi^_0QZJwU}s%$tV|1ce; zqs|f{Q4Wo+sRYgKbj~fM>0w{C4tBLfHJHdDRfX%y(ZMAc@Q5F1ia4ZeQ7pLB)7zdI{@gxRL9yRa(HzFL!ks zqhTg=U-MuHKySQ88;~<&P0xTK-Fat69*-L!8op!iP9Rq)_Zb-ThIi!vb~VVXsvE$W zabnRCyU&7l%4zORtHLh?Qfyu+R>jVr4e5e3vkK)n5QSkc7$s#F&YK}~A)Kw8pS8y> zhXFX31|6(zbvSg^oY@0SI0DioWEe&x!k+3#Akx#c8xM+bdPOu#952oTFbBNqV2iZF zjsnwkz!~qo3cHHuUl9xc-1iU5t)mG$DP2p%b;46e+9@2E0r`ge3c+GHKPY7 zY*GI=C&9GB_U61(pePv!sqsk`W{i&uPX{-nZAJK=v*+;#pm;L3R+R6KAv9Q zY1gc=!uk9G&rPg3PEgqCHuAVZI(k|AA}}+{;tcbFS(oGh4sNjrnkT@e!`d!B5=nx( zA^WYuZltX|=uqtLMXvWvb!Q~&LdQj&04oYIvd>`NNbNxg!v;)DXhslg=B*U+r?P+wwxZV z`)u&hkq;9l zrJ)03*GCB=)dtxv$1S3=!K9f3#{C|HrU)83VzI~l)$rkQt%6xRsr_$%654{)=^bsN z7`HwJ<{H8$QT8g&T2BL-QJhA#G!)|$A!NZ3XmcF!4207yV>uLSJ_`-gx~n}+VgJXw z6K!$zbI|U0qY6FUjAqI8U-Lp>(o1`>9ubykMdhSeDAGdFVsmCpc|HfGC4>6iU+~xT z;X{oVyQNSs|DhDC zphK~!euG#R3w}0b%+eslSxl~s)7~`Y?)cSPP?kgwrVer1+tAK*43{!-cK;p#yy^v( z1qwSd?mW0l{XVp_(wNMOaruuxXboi(;%NmPnNg}MUi%n2KW^2g9lH_MZsR`zca9zE zpHcBC0PLz8HpXtB<$vROj+x+P0a-_5YG9j;Ezy_$giTz391MO9<($iOed0*b6oEsa zxH$`VEx0efEyZI~HY_gtj(9NN)D+h#n2AM3en)-}?UF9I78{=KX!rr7Z+_@Zr{XvM z4}|FydTx9s<>}0p!tJP^pj}wZ_c2R-|K}Xg$rp!qzW_L=-9`a!QaEe9$oQf~kmi0eiV__TzAA`p1`UhT7Tc*W zas!L@UqqTE&J|l(M*OxoNLKhRZcZWO|63A>&xa0);Y$;w0$CrjUY#k*qHIz-m?PTt zr(IIAiukiIv)(PeRV-Q-0M>a=A|79XFv&?fSd67t%0Fj{L<`Ny$%vI5g9G6!yW|)8M8GY&WH5_qNN8G2Nxx|WVPTHp~6ow4# z?Js`GV%JQGvNid~D{Giy|AB2YN7zDLa!l9p5WN<(M;QiTZczCm|Gt9Fhm9uV3?XnS zH$H2xP1vPD^EIMhRF(ieM8zTIdinRCHP{dGG7NZq({iSUH z-&`li`Sf(jD0Z2R04#xux^~Ru!Tv4Gt*D$C+mk zY(j*-qPB5^J0=S8Cny&v@7V&f=E1ptau_mWQ!o!;RXwr6W>97xRUBIkbn%<#ANd6m z5_)DUFz1yN>g};mh?5zOGxe>Z5Xao!SbG}>2M>(P6*NC=W9h;3wgqr9*$L#~+d1~c zj@Wbuf))Tvu0qu8!F3Ys-9T|{di2JPmRogpNB`f9^V3F;hXAg`NMXmZo$SYNc`@3yOPNq!9fpK%58PGs8b7zsRI<@_a9nnkv9Do{Nu5y}A;|2(w70%JcW?qoFBm{}Hi=g8qvs zgqyyiDMdf*iRO=Sv_XSn;#h*_n2ntbn=TPTM5!o-jw9?M&h26Lp}VvRa)G$8k2%8C z0{SNSh7K0$+)SFcmlNS&D}GYgS!k8V6g+%92yE8W#>gsyu7COjUcfq2MIJlRF{)~! zWl|QoD!LRjJG@jx?!9rDAWKCPLb}p(uQ0ld?%s6AqkBV{VHrpJ7{k@f@&A{uGO?_*P-Z#Nk43eAfi+oG{Z*qr}_IK-u(VOGX8zOx+Q*Ex){U`V zCy<-mfFb>3Zg-xN^lTh=JFckPQ&F3d@56Q$+GpJoF&>1w|E zi%51Gv??y}&Vit6Fr@jWs_r11^YU6LjnI_6!%(z{y+&9qUmn{7E@9*%3%FUVJ<5}OnS5OvXg;ao@C0( zE8C}nm{m^C>$vLx?UZM-@WxL1EC5N58Fy=i{U>jn@B-C81Z2%i{>QtMTY(kN26e`G zgh&_Y))(YPT-Fd5oRfc+EAx0$_gv7dDYoF3qbuGP;)J+2mr8KVc|Zt!R9Or;pP(gE zn1iKUYC<=bz>cICmTlVQ7Z5X(jtsr!P2#!|R>R8#X^4JKm@5}|J<)z4Bt@@aO?QRD z{)-y|@$*HHu<`f>QFXC{{RhU83c8e>t6q5pRF*3P6(=i=@u)DDu^g9lw2eQ6kkYtO z(eFxvRW<4oL`*PIJG5R_PbDZvj%3;LV>%rVWnmTxmc%2)dUB>bBt-nLL4&>L} zL-DZ!ow00k=~BN4al?@V!X4x08v!iNBATx|MUrTDi>!*q-UQ{@mm$W5V<7(OW?&>W zVptSzA?O@xSl7}co)Kb3Z2K+nKeqz9ywo8buQj^<0fYo+v3)vQVaphMJz2iXNba9P z{n2!sw&5ZoJ`;qHutF%rzePB2x|Qq#I&TND*jOsCJR_CW?*Mb!eQqRaMP~_h+T$?y z)Bv~Yoj^*Pu`1d{SlA797~y?)SDuQTH5qUnBfH>jmqWLZ;;+4llas7~gVcNBKU zu#Y&Ca&pD{LCv&`pqLJqdmu+<>*S#;&V3LB`3~4Io>q|h=#k}Ld?L!SO^+v`Cg8cD zhXCPQZZzYN`4E->mW{0_ziYjcG7fn>Pi1Joc=U-p6>5Zy|MMjP4Ki!$wVotKA1`&9*L7O>sZdKB zdmA3!dMZzcd!JlvY<(KQ+}M=5G4_21$lPH0q)u6k6NMpu?b|L<*bRFUNme}vY0+7! zDkJu30-dZ~BXd?K2-#Ov#%2o%k{TwPah1ZB&ve}A@qk$g;1GcP%AO~NE0+C@xaoNz z|AtprW;e!l0?cwvcaK(|#rXV1u&m$OeF&01^d&%z1zKnQD#BmMG8^lmu4s7`%wnWJ z^}TV?`x>ZODEheMv^Vl^?f8tu-+nX4vgIF>&s$)w0C`}D!%H*=n2oXjyZrCQQqdC2 zzXyekCRfHI3Yz_N?L4haCcmG9C@WeUXESm{n?ODqIylzhhZVo)WmP|VL_;;Q{D?=i>;o2`j5C!MhR7A~JP!=5OwgFN2 zCQpL<3(4|h^0z={9CJfXX&CH}5oX5NR!OOLqmVqI!SUJuM-Pn0zstXn4J*2>&%Ot7 z;nM>%apDg-kdy=QjR1{kxV4A}euO|Y@U3;q?;>4RtOHV0T;f+CWYgU;GcHtEpUN6y z^Z!Cnbp!jw4T}i6AxxPVgMWuI(=+sR^p_=?BkSh4aPfY9)iEA{-?ap!r;`S3!t?;U zCWk+k1kM^btf)K1V@rd%iD=)?IHzT4zhyz;3ZJ%JF>W~s*Y#vB#Jnuhyfn%YY~}LM zPL;(lt(e*GJYi-aotd{6nkjlbFW4zfs2mmFiL_{#sTg^?qHcx!L;2ujq)j*O9$OJO zyG+z~(BsAV6D#o#A{#y=s#eaE!-quD$(^?fka-=WQG1hBq1_y~>>-8lq}4zuDeuQM zMR!JUMXx(nT!VjH?JQB5a~59{fUJg%ho2%l{h17A1CD-+v7o5Um>$ zbatcI9gp1@(rsvT2l_UszD_qCV}L?T+ziM&OL_-0O{VNC3U}^hw4OLgv_*ljaAA6! zzXgEHKeq4jqldT5BWVT0#iy+REbLy4Bse+g6haBHuZu-n=Si>-s7n?RW3~Y^i#Q8x z&@H$}w*xXOwNe`VbNf6BMic2|wR#62=aU>H)Wv>6%&%PRG{$TJPGy?9^tGb%$v_v< ziS+4?`B$h0Sq-n%4?trWz53vt31>Bqwg3-Y#aJNBspy)J9)HDmfr5Om}dcohvB2oOs#;q%bjiy#{8f#}Y1}xs#`?QJJC$0T>OHe-PEMRu8{6y# zX+d_j$d75e0JC1x9LIfn@;H?}am1cbeWI5CSzS4NS)kLcVHH}4 zSIcrh-od!VRGud?sWV;_04TTvP)>LF?It-Kj9iV3AdfF;gI1NzC~y zxto|8E)i%^*!AGc0Z3JDG z$*kT`EHB%hBds{d;`ZA{9gbcxIo3Ti|C37vIu9H#gtW?QqDnzaf$@vkXO@7h&N*7s zVQ5pdbOB~{Neeu!2x$hYS{fUf3Rdg}A;n>+iWGEQ41;689!P2_y#Vp?Y)2bc6MM|b zqm5?^I;|HHLDsd;jd==Za&GBjgWer;=K?v$3cqUI{U;E~V_58a1VNV}dCrV;1@K?( z)c8O_MkbcI431^<@|e|WJSi7Iwp}=&_(I_Tn16xfWTDeR47js+qE-olE9k0Y!Gi-IPeJU)+$`#0J(e!TwzWXV-A&m4LJ zqzBb0U2(N2K2G($#YqtPxpxt7A#Q8i{1sdUuZhDHbUh~9!_;NI7nm*U=u22^sRT>> z&0h^097~)`&@!my0*chtI|*{Pn5m7{38LM6rB!5`Sr;3gl4JBtw9agBcMz5@A${ev z5KEp4+UMxO)<|Z3mi}d?-RqV{8n# z!Z><`rWk*v@!YhZbcPhi5sCU1q}K+(jDufux!Xn3i0?zhz1O* z@pFZpMGb>}U0i(=fQ9cj>NJS`cr&0GV>>cCR=fqeZ=h<4##&VrLC#m~2c zI%AgZQjScy0|b$zcg0q^(;ropNAa#aW-@~*#tA|!PmJz3nB_g?dw}3t#yvaVOW0hG zVP}gitpNU%N80iC<=^6UJSm!59|UrT!XHs&8&EQ)W&WSAn`jHdc+(N(ct^vV&af5#qR6vS3-|QK-Ih;GdoDxtSmjKxvM^bd36s4ySmJc!f4cOYCn zJP3CB`%o@gxzq)J`w-f>;zO{jzBji07{uag<4lsyP>j=rISuZBkNpJ7tgv`yLvT_F z=t(aM4E>B4ZUla7rw})N0pyG@E7G?4xe~(7I6ZN=!oP_lzR$y4ZmTbyOHU^kEYu~B z{VGo(z4P1{#|eQgl%D95mipQsV1rezJHF8W_?{O{5hvM%-G=FAw~4-s{5UV`ip(*;$l%2Zkeo4+Ir*sUxCDfw!6=W zpnVP4D)#@4pi6@T9$q8f#QC{b@}@?64%1NB(i8H^G@AtM!MQfKJIkju66!Nof@4mHtOX(1_vS z`9j>E(>5)6m3Vq7K$>4)504p3=jqZ^bl3GAA&6kJ&12(b2*R`cBQC!zVZ_v-)A-v& zxD~s*y-)(M*GB)i?(&Y_rgz&95@F`6Fvr5F*$*W|%X=h$@e0JTxG_I&jV)J%hK&&e zqG6@{bG;?Znqc>pK`1*1fNZp$5#^HN=%%fJE*5j9RUC!0H`@PiqMUlQ4juPc723^P zmlF7*F-{TY*37|ltiBqg%UHokR*Yi=WUU?ff4({-0%6}bGky`_|C%@{>sZ78>3}kh z72$-iSD|LMT@%1nzU{y`Lli0oII&6;V3Beo4xJ~%%IWKaxl6Vc+A!x#J$i!;^2D(Goyp*}mk`$=o-TCe z0=U?Qd4fJV2AeI+5gC@SHqyfeQ#JxLE4&iKeE5g}+Ro&%c%k$k2QUn5H*CUxE*P87 zSaDNGq(74;xc}yavv!nJ0%|!k1@=ji_u_RCkD2VFEr2Z$T$uyL23vuUX+>q6rJ#$g z(}-jW)EEP|26g4)cD5xh6m5aY9*}jCDukUpAu@iP55+Hg@asZhVgr zORJm<0uPEb%UnXzrlc!Q+b#b{j>@Q{&34aYsVkzr?Tj6Sk&s`xj-5p!c0J=&K9aE6 zs%dD(tneNIW<$OyDdNIC@+d5AhwT{8m*r7(+(xFSmV=lzHXu?)bj2&9z$}?6c^Fx4 zG&GVKUB;{J1Yw|^ro7D~jBTh)9(k~ke=)P9h+R&jl=*O3!w0vXlRVT;-#xir{ zI43IG$k7V>`^Eyj6&73ea%9w0l<62zSr2P&;gt+XOMxjF&&Nww0b~`exvVP=pOi;w zNar{gR0FttD12weLnXQn6V>>5GXGm9EPL56a$Ej@nmiH8N=v?3fD_^U?z&jK7TSz= z)Td{tV)7Iqv!Z0^Dh~=ll4Vsm74#{Jbb@6^3*)`Vzxw~qWs?m_D?G)g|Lwh(kZC9XAwT_w!b z!ZpeE*sl%R>@r=RlK3^3J^OT!^e! zr8OE&a*!~mYx7-A9Z!}bHnHza^L6K+i0B5|l;%x5DGGwa26#0W6`aj(Y7LM+$La zyY-^W4AD-t(8cj3%jo#~y!CYEkwbY|ta=A)QFnC5UVkBqOWKH;TRNRTT^Ov{<=n^@A6kj_|&)_`fElFAYxRDo5SNNWc=X#m zVSE7)Y)TzGM`8cX(JTt#rud=+(J|6OdH%)3BGYaeB7G%-&)Js6oJ$B>-cxGI<13M_ zHWa!YU3&R$kITR;!=et#{)e7`)Z%FUPudJPbnnX{NV{cvZ+v+LVT88eAMyQF1YJ&A z3~PY5uLf|#%=~b&3*P=3K<88`XIN`p2W??=V*ElqbPIu94436&_;iuv(0@?WTu;!P zb9q$#u{UlJ=EC93HjcZ|q}0zy@hXw<&BCfZ9=gQ@H>V15!>tfhW`P}lAvXMnDOT@X z#~vk$EJu%vmG2tn8E|M04Ib;VhM4=gCK6f zobmKzdiQM~0;OO(4UC<#;MkftP(h0|yBm+PQ<%%$oGub=w*Y{Gv}5(rq_DYUC5M@3 z5pPOBuGR_-QENN`ZH~0l;L!c>M?sw8R7MA0C3r0Vgm;QtV(llOVQ%<m zKbc2E!`>NJiuTB93$2zWYv%WUia(qIukLV0Irtd>^S~ZMOUP@4xN;`bAcj8+>4J~y zX=_Ir-xGTYbCkV03UP->C)$(@x|Vw`|9>ir+G`g=W`nOhZ6-BA`wRI;aum=NV_(dH z8jMA|WBN+~mLN7b$$o3smw_xnO}(+3Xh$h)Z(_RV=IX0Jv}XK=G{=Uo<^N+P!;N&d zNP_skL5^CNcmu*M0DTT?pfpI_{wAn%nW$a+jJKf8KjzUaGNjA@Ss;A_0}VRhB2gA2 ztxe<6{%?a=Ml&&W`duW^ZR)qvJ9%`L18j4m;e8M)7W=_dJ|IZN*3X?Cn}0~y1!N7D z)Uxd#0hzneat&7gF|?(RH4_rp=@S65N>9bb2vL1S)ZO27Ww)5cK~;&E7K$QA%>##h zM%aAI9Dmo(AzgNRhMkKHPZo#_wI+?1zJzemrB%TOdqVm=IfXq3_FV2$EiC?Unh5 z5~1X8ioc3*O3>^N&Ri7$d6Z3zp4ADuw4>PoXc<%{VK^F78LugrwJF2|Lo2Tf z;(ezaLl@=UlTyZf;UvJhRBW>zVYh}fge9fZ`RfC^aE12ioN*Q7@eP2oO3=RVKN~~4 z5~>(WzS$Hq6BRCoi}8a1Ct>TFoCip$5}ih0AOWk##~wR~}au0NdDBNq2wkHu?XPxgXjQ zHwZwULk36FwvI{{*KztEB1xAu5ss<1gRnI8ltWy;J+zC0H{c!yh@XUHb+6^)Mms>8 zlga54_$C4zfhABOJ`?REsu=ij|FP1JIl|2}J&Jfv3E?qxb_j9VPQ=No0jr|D`$1aj zjKs-@#_l`+Pb&0W_K_u|S})TV;_&`?D%DIJ!v{bjgejG=U=Tr?N*kO17Ze7rtc=bf z1f7rEtY|BAk04@lX)kUbO4x<1LIJ?joWlTISQZIQarAI#S8G{k)Qo_#!hl-|vbaf# zor?uK6Ag*7tEt^3PiL~u)%MsG+C7T(3Wecit5tW;!2N17LR8iep$S)$x@ADS+>q4j2pN0^r3qo*D4SNJhf1hcuMuYGA4m0_krM@E;z{z3 zFKY8BC=A%ZS5E<;G0g2@#!=Ycqw}E{FHQBI_>qZM6sFNFxRyIZzlxx=>85^MHVw++ zVBSs+m#^3d$VrL6LOj1OfD6e;h$q$kpv_uxN75D>3vrApI$<0y+AV>*opHKIS3j5C z@bJnMv-AOA&SgfAs0@W1j@ia(u$0L-U-(HG-C2LqW69fDEj9qaN) zI*rrD^{IwDA(R8C&^{AjUTU;T&}L*nzPMcMy&;?genyqWx;i3L?TTjY(}a-=L>z>SsKK`0le{f)71 z2ZV;Xs<>1^l~?A~uXI9~eXhasz*QHt*>6B0g5?sIiiGj)psiwqB0);tO3Qpw;cT#A zMS&#@vRbCwamy;ANkQViKkwX_*F!jy0JG)aM3N#ez$`b5APlfAL8aTyol$646Kj2T z$MU_dWy#_C!%38P%!vy`S_0`RG0w_+K`gTg9a51#Ccw>!jTJg;4h-hzkaStPZ*;#P zr(1zlRP$jui7|4la(GU%S&4l`I!XFAW>LlX{eK{eps{Aa5yUblZLH%OqP_%7Ow;@5 zd3lT(JvxcLSb({x=79Noky!)Q{bNn|*Z&y|p-q|)Pbf(D_RYRGj)pQ*Qs?3VGFChW z$Q(>cs)GM30j%>l&>40tG5UHlo7KN$bmne2CLYKCW@uj;WI-gEFSsNwI)Px;_8vZ3 zY27Bo+)5;AseIsxVCG=7+`q<#Cjrpx&@SObPlR(%PIylL8#HZ2Cs02s>@RWJjX0M( z8NjT__1J2sK*CDJl=$XUf@XzR8T1NQ@iY*#!tC3~<_y1v!kl1vs`6{k0C4WyIP8tv zM0-x^^N*z#x7L~YcWgS@D(o#lJx32dJS~DcV2G!5&!1(A#k?Is9)f(auR#)fl&&o2O9I8!wfp3B9rB`TR!-QYVo}4ib=Mz_tCDCpv2a?d=P$ zWyCmLkYz)o)LLTS3qTMuFHyxh7ZFB?H65{@g20blQHZk@_DAge*n-4;LYxz-u424! zDKwQKGiAo`{+B_jR%Ob2xd<1_1~C{FJ|(Pg=i12nEscmgIdxbO z*AsTB28`gz`@2gt4Y)fN|GU$fT!Vjif-V4c#RCQDM)Sm*K+TW_5}n0Q5#(mt&UX7E zk06CjaGJ zK;wfJ>Nkt%O9X$jOtwos0p#*a8%Tw%Da1+a-BaMJG67j_a6ozNQ;_Ojx|Dp82&afu z3UgOmv_9kiTjnxpJSc(&-qY9;oBfNhYY1EL`=5n!#<&mC2sCXWhy{T@N6&+(L{AB| zAQ;yRvBe9}E_D;Tje@oVe|<4e3&HT9b(|+8TUqIBs!2WwHGs}|i9g&%TWH64SR&2c zUIy%|XWgRbs*NB^p`62#rr6^Z0B>^oTr)U9;Sw-UfW+6YnW&fL>4-NVT*7+hdK&En zA(mt{bJQkpLL!_d_!|5+Vdtj{-<*sHaJg%nwdD~_J{4Hwzd&IF2#%ZXeg{|~H5P3FeM|_J2n6K;Z zQ0b2N021cq+Pm^Y$3q#}9p8v>Nowd}+-m&DAJQ}RX54WfL&7_+53+7O_mey}7pa&d z{`eFCR%zw5YQ2?n&}Sgz#M?M=p2B7l(+LbR8slz3&Tm>$WIQ5(4qQDU3ZD~nlk(h= zj&6Px>S`!s`+e{ij$b)3ZdcHa3%__=q@ZRV6z24qzA4~B0es%lgwKxdnDZs1Wh*@+ zW5f<$1RpFWoPjFtEz#}L?*DCh z{#R$_t)mCBFz&J9Lxr7cD{evlEz%{M+S0{RjH&gE6~MB!h%R{A#OXqzmT4U?SmP^4 zfGW3zBcfAQgvbVAU47=5pcronbY8q?8>g?7lN0HM7l?F`nP?h&aNG2(AOteHBHmNb z*~x4TQ>2yi=vZXR5pyiF3XnMHW{W&QgfqkTFg_M(hKf9@z2I8Ca@VzL9%n2^(>IAk zMvO~w|7s5FU6eN!wg8eQgO)~lKN6a?_CfLZnowky?s1~#UMu^Y;cHigT`)Crj3VN^ z0CH_HNVdoSi6&=xs9C5mg&w$7e4D}YKDhTfz%D>#?&9WkfvzqbVa`m~Ajhohv=-cm zDr7yUHK1RNTc4oiQO5aKb@mq3y8xI+lq5`o zyZ%oE9zTm01emGv%;$cbu$w>Xd-D3M-JzXT8ILV8n4loJU!W_sttqY_31!Y@0Kv!Csf9x{m-??%3*hw-(B2;Upt=nF8tNAjdN4x_4YE z&@%Gm7b{E!$?~adD@qwrHVw%6@bWP-cu|1k@V-SdYOL5NPe_YRW`!3^2=}}+@3-vh zI0!%{R3N_zu?W}+W8p&L>+KJgwS7IZwYz8p&hnMVy$*mtAhqM-eg$1TIbHtbAV?>L zYiFNyo^dc3&6LGq+*{`_%gSOzylz`EpbU!GWHmv#oXM>j-0dz1 zS)@xXD#FAJU}@2WKVKUUy z?I4VZ@g!b|-8&&H|Atlu&mPf;#2uog3yKP;sEPsI1kG_;5HkOk3o^&mC>3Y-JXSvo)-J*%v)6#7&JAGnUJz^a5z4p?h9DmvA-?(CRo8ZH#jd12ea^ z9C^mjKS7(RvB|qwZ|o*WJyxZw5#co0^WZq_|1ove;dND68^>J-7Vc2Ag|@V`*g%>l zZQ7;@OqMf1p-IYaVw5KCCoFI%ZUSdfSB!N9oO%wl z8Tyww9Kx}&@bx0?RdM!2o8{!L>j(%ZS%$upt2swPA@gE#rR6Lk=aJ2<;c@;^8BP>d z?hz(EZ3wXSXuB~h7_S|Z`KwjeKI2%hdjQUF()*Yfi*)w-c@%~kwc2qGLMqlP&%sUh zTBX}{*YTOxx<1zXJ@JSbrwp^#o$=HOnU8#~Nfy_aI}wKZGI~VplR9gs#kMEGrXAHl z>>@<^o6&9EM{b9hBlGcxu#-nSQcJr3WSF#lu?}H?(R^s(Q(#G`tZ~JO0tDFFmWy>y z#qA=H4jZ>+xWp&U5>FISkoXy=Lpjtu8&B;r5mt;##rk7n$tBlQCY%9B;4Bqlw%o*; zyk-2rnE;2%W_7Zk@w+J3ES8J0+dqKL+*FwYsyi#gOikP3bYX(dV=m+~xgB+qY%Ps8 zI2+DYXCe#Gffy~qa%}2~0U=M8^k5PTQlGuvBc3vtP!pfXX>nHY3=6uC>zo7QG)?a$ zOCw5ng!V(eD`9LWHphR3$uieDct`nMfaBk2Yt&gm5<45P*I$6!NlofnEN~%2+Q?AB zntPGI%t_TTmb}=tz;8wmR=T{r4U ztg>$=l8jHQiakpKke;rlI9~*bl|J(5OYLubMZ6+sI!dP2yYsQaWsr`&p-ZQAXNho8 zcWTZrZh6iPAybbQNyIjyAyT*k>-l;j@pbk$<^sVVodMf9Uz*U{OxAn)=N6MT83rJKQ^7 zm;H_5D^~8?Tn|BP-3$nu+<@B=u-hi(_nXByG_2(?Lz{b}y|}ESekcgL1B<$ytoYL^xMmO2|j^{b1tBB`Te> z9?1T!rB{o&!j53m5u3$p{{%XMWdD`$Y94}e1el1Rd+&KT^Hb!*lyZN&^<`qK%>`zm$D}c78tt?Xs_= zU+A1;1p(xgW6b(PZlqaT5##=a)A>gGm&wONuiD@K4DbuQhMOoGbQ6Cm0Rqs&STp5z zHD|I-Z;&iAFBR)F)28WV;+8Z38QzJ}uk*L1hf+;j9Q+2@1u}ld_&DUv?CXXusVT&X zZ^1ZiMK-H?<604(rV!MaZ#o%T3nUpA%zopuWfs z=!x%2ObfP*5X(Qmy$o4e$e-juKnG8EGttbnoC4G&}s^I zwf_KJ!3Iq>(nM|-oAy%l{hG;sEo!jNCHuSImpo}^p#8m5f54NoQ{LXyhrwnMX^~nV zeBB>m$e?WEZShm~Z=H7Gi*eP@SpYiTrQ;JpSuu3dwq6oieW|}``U$C}DO9RJ_U6V<0Z(K)+HT$+1!fpmfK!4kCx z-n5;x#IN{r*7~`NK#5d}ab0AJ*&7S}FAImBFVTOW{09{A_cKA#4s85G1hpY~y>R-U z0GACFHO>~rd@@JECeNE9O zY^e+`K6%p?&Qf7-o|L~N!XG7bgv%ENJ7JR<3f>iT7|9hC=@b357@Wm{DwaCJQy0%Z z8a;eSTvvhY!10lnnK5D6?9<7V65+ozqsD#ojBz4CYlzil6_@UZ_)3Y##oYm0(C(a**|afJMrL zj9CqriqiklaBb%)9sWl&8n{T>E`lVLGae>?Xi0@Jp%?Y+=trw$cs6%)WB96Iw^HhJ zX33&Pv;{tnlZuZ7-N1R>n(H(g$0}EcbT`)66Augd3r4XxX^rg9%Jz7EO%Q!WS$$Nm zg_HKz#suspxt%w}p4?w3t_@{TSF@2A_X*=2OZ2hSI(ExI+YxfRxYaWw*yF|cOLXVZ z>0K9`c1kSRnG91Wwq378Ca*~7Y<=zZA)WMERAhb=bkN;g;bir;@CGmz(%@AG4O?r9 zwnrvwsEx&RF-}8bxca@)hpFyG(pN09A>I(jux+A6PJbbtmv}=Mp;ERacg2lBj#&K- zwht1?PZ!PiTW=Z556Ql-mf4>dga<{+i43)O^^KuC8nR8@$%B-Mm5LZA-sM-{5U9&~@J*|kA&4JNSDA)cb zY}VxCQ4y9QZ%pgtc;#W)SDf5ocAtl`L}{pLiUqgCNdnj|oUP@=R>E#{>CRd)9!mXC z=h3|Dh%2|k4~f*lXiq#PL_tm55s1 zNsk9aILjC|iB(1dUFjH;(lzI%C6YxD%4e!m-6F};i?_$z(*L=3u5`-cY}_p}6%1QI z2xjR~FiuROTNIa-2#$>^0(~cVQh8X9cJ!Bay%E!Cn$Ug?L z;@Gjc9nDnj?Z!PK9BM6>jAZBQLs2gFsk_JY?K2O&KiVpj2KS3{>e;wdmp*z2drBr` zdhL23Zru^eF<~;TbyiIMJ4D)KnM9yOg0FK#Iw)+{%2wMQV*DK^`J=XsaXXnn2NgY9 zYVM^XoJS6S^NFtI#yi6~&Ac79uaKj%2Aan3CCVUOn3GY=x~BvuqKkUVIR16r@6=-N zs&X(wL&GKx9FN;cs!X)8uM$BY&b+H9?n*&&r~IvwSC+MHQ|SI0V$jn~_%(qOrj z)jkQg^HI(9HSRWcMvpiuPFqKuBBx7+8>E^%#IKY6HB;mGdo%#;v8$`H12qO zF6=%m->S)>jM)@pr5VsJRu;ZmtuMPPjEjT{lWa@p<1~>Zic^w4CLwR_miggEZ1P?e zZz1e%KeYD|BZtS6f(U5L$f(#8rv*eVlVM_xC<=x-4J+Nfvj5Sx(LIJZa&IV?WEGXI zJ-*l{BRG_1O?1n?5YDuwfL*$ke$;+&77u$k*sY_bVPrc8>*y1+&;-A?sH}s)s)r>1 zYy+f4R%m9Q;)O)cY0mi99&4TV>#U=<#HR;sTD_N&DLp#Q5KM5JtE!n3s z8(xf4L^yP5)3Y}|rWMBVPu2aPXM~XvE5O#6zCVC!&OvJIB{v@R9-|U|sIY_LjS?+Y zt`{@dt~wL7CJr%6-V$4`y|G>!-Ymm{tmi1O7sL@!IZbcncHGX#q^_1ujjfM~N<~bg z(!&+>X3vYYU}z!qz#o=~@p|?)w(r1)t93ae)-s(yCpoz%!~Tbuv}#i+&|lNwzI-<< z)qnI*ZCIAbyCTye8f~2&c=uLA!Qi56LiHG`gk2No*JuIuy5qW+_eW8-EytYhN}Wr0M4m zhjyZ8vg4tng1ban1QSuw*x?9Z+Q#*SL88kwQKbFs%GBa0Xdzh))pQs z7IbWWBq+8#5r&lYbu`5MlW>#d)?T*7E;<=t$<__t-ajS#y01HV*Lx>1_SDxcrFLr6 zX;5i9o5k!$M`Oo`vo}8-!!Cv>=aQg$<1Aq(bOMvCO-=`rton*+ac}A#HezH9Jp-O3 z%1}rA{TzVHi0216Z!H(Wzp8w3%efAMhfw3`^ZW-(+tzqT0P&0+5_8VSX?c`MIjEn5 z0GPB?WzXTr3&4(p{INWkbs+@BfwBgj>)Ar~&g`u-eh{|z2?f?evGzp}Bq>o#IZwc{ z;qtq$PFyd}6jE18!xDaq&wsT+M7c#$5^~Tolb(;|>AGN>gQveuK;G zyCGeKZzJq{qJ<;vfZEGpTqNCCT9RtaM^`{OC0JG@Ep+pYSeIOx(Rv<;Ie5|yzAPFk zj;o4=uf|DX>7Dl7=HgDv1eMT%Xt@T$d1kC;sz#89h;!sD5!oEBG$eUj4UZDQ z?=F4un4DP=qiE;-o0V_1_vC%)$pVghSETl7`X8%>_Q>7Fe`%|wNg=Jm?J(}tx*BA3 zOT*6TcV^x={l>a{oG*qnOf8Gs?!xK%G?4?sxaaN+V>hHXZoDV^*oPbOofv;XPhNS|XJ53R)O*HYx#W?+w=yf-~&z`qoBs=7R%yS3pLl(+&#W?7EVgFn_`5;)8 zS(E>F1uQKY;L)X833tnfvTs`S;_Ab~X}!|Xi~MZ#2poyv3ZL$4?I}o#QNE0u<#v_R z_7=vQV(WQOX|ZIt?h5hzW6*WfCU&`5k@MV$Qv zKAgbugVu?be=-ZDQH#5!o&u+>g@q%h=Ou#s0`y|fh;Y0Zq$tJ@!p=vHo~xq|n)(cs zlR^iD)|NHgH+&WvVIUZGv|^s1qiUe9EX1cGsQbOiIKBJxAcvb=`{5?VrJ`JRsG%0ANl zuXc9zei!A$*0S}88Q53-DXYgA^%`z!3&xf5aqR1v&vuMLagpIo2sdd?>nnuFgOY0YAsDUMW?)mOxjw{RyFk=8RqfRK13m&){D`d4CW^s=}4D=oL=G~#Iy z77iC|cuhPue8*q2L>ONaV}$%ada0_bzm}MORWt6|yqg883(37P{5=R{R8<*|%IQj( z?Dcv-dJY< zhW7_WIGTn6rc0V){Esk*N-7xV$nDD88aw{vKl+*EH^`l)PHV-E*!|~>Y3Li!s^CO1 z#7x&nRs3qck#?Dj@xNa{PJFI`x}fVfm;D#ciRWH3tB>~B;8&=$rlBx|b^;yRsUj_h zaZIO<`VE-Y6b$*YJ74yD<^g@~DEnXLfx5uDHs**S1ijirx3$b4hODL~`FsBaB7kPp z_7+}n>%ncuWWvyy+8?N02tLcd8s7tAoO->NBWmS^p`6fKRBK-lB;1x?zCOd z8?n*57{uTZp&jyMQPD5f(fCepT(UR}LgnEb4nF1yIT#GlQUR7;0)jYZ>Wci5*~jBs zi!zCJ|57j%r>+deeoJS6=cH)GA-$NvipVA{;$WB>y9_K{#`Nl#D(B#lq;4>4f1cIC zKFh*88CV3Nhj@262$C`H3U=6+$L%u2H*HC~)m0+>1scp$kB?V`u&m0s`~SnrKvJqn zM>MShuoR|uC#rI1i*XZToNS1%NtFc>;tYD^4xsYni;{!!v4+^B3!u{7k#UK-?iYJ zhg?1}#B{S5`=Y68xy_Pv?TkyBXD;6rw~Nc}n`!qmImPXaE!N==gv`B%I7x079aowu z%|zq!NMP z|HL6fR>v~Z)xROoeazOQ;#(m;Zek&bzTA9uXun zV}``Rqj09f1j8U}6k?wvk{mYY(6KySz-eQ0LfP8A0@UE^xp%Tzcg=~;eOdH>=RB)XJ~A)J;>6;@H#?>uf#YwHtyot9e@@t zIzf#sTvz#ONBcOFrAnKSEvq68*PB28#oZJOnXy@$zNz9+RTjJU>2S7C_XDDBH z9MDm;(2eO_V4Nt#gUSiFumzBYrqpxB+dY=Cb@vL~7TY8lIEtkz{U_!(Vn4YZDATwC z#scD0QTETKCmN`~On{(}rdGv}i8x(IQVQYdnX0x}RrUdg%Z#4QlOZgd@r;)D2s(}& zFQsdHbW(LjaVC`*syt^Z6nUYiBZ)Ntmrg?;9sTvfu1qr-7*o?t4Yjb2r>wIx?iA!B zEMBSl#Q&y&kuTxrdt+D~D9tHTPHjYAC5ns<*(_G8&;FH;sBCdIi*jBQP4}ci-YU`w zn;Dj;iT`w-Fw!wD=t07lMrJNS|L~POUJ;C71l=!&^Q zB%-`NZkE$oV}92W9}7EC#aM!4wbqiXk(S*TF6&xYdL3XE$%ieAUroVE~*Oy+a<- zY3}W7hz>!dR#_jX%V}xXChx=d#+#y?WHfBslO6733UHQsWluaPM0q@RITM^F+)j3D zEoTfLG=o(w#z%PzPN$7Ql(Op-k&2E!?BZ6Ss+YE5R&O&v6SFh66>y;3ok4A(9qd%f z?0$h6YgLKTY+Z(4nnV#EFBbR4S{+#sXpE1I9fci4u5%VoK2xo_pqx?Z&?KrkHR2pI zDtR1K2|EwDymVuRc0&*eC$e0&6L6q1Dx#CPgL2TuSU zacwWy!DGCOHvL+On4)8!nA?XB%WN9Ga@EWXQU_v+AgO6(<1!8sNL3egE4!6r4GoH4 z^}{=rI?~jM)wQ#soDLrA<^`4mGn~A^v(-Uh!tIqB+cyHPc<2!7O(157EeAt7<+>5Y zjW`h$d|73DB&R=Mxzu14>^}~LqzdW0pfyGv4sy22Te@P^BY?!mjlf(Sd87gLqB%w% z1#sfacnFy8H%=DiZ>y@R;sZgaq?eL!i6xH7K9Rvc^&o4Os60B~GEBVjvG^iN>_2kf z@i+j9pH>^!&B5u!>kx>=8=3n_B%XO#q%$@<0pMb7&-H6+blr(CPC^yta0>CA2*R6K z6C0g`b8uOSq5xG*dzTYMy9BT_REYe^{$fH|Tqh@z(k5qX{3}I%%y6kw`H#F~TuMp= z8wm1N(w?!eTxn&-4hD%RMw>V%tFD!DjfGFkJn1S(ze>05OgJhm%gyMNn{2Yb$`xAy z3xWwP!-|qU^&eRXY%Z_}q}}~4&e3=-&~O%nJGjJ7maM-N)^sd2SY% z*We75jpxBB^T}%@3ta#ryI2*M_J6scg~qdlm=qFL8L}>qs|3i1cJ(&65VxhF@i0-J z*jJpZ%``fy_bviDtbs0G;F)qUIL$7{f8LiLc?l#D^vbU0xdQG4YwF@kA?G@IOg(ND zkyZWpyEOCCz~*}~CSDHVIN2HLh(=*TM2tKke5w$UaR}BHR|$|Cu8_p?SK@YV z*kkGFR-@iRoZAHpxnuBH=Cov2he z*u48#&{?hQi~k5YH2PX*u4~POa8}t7(_Eyg#4v%BRh=#|1R<3_47OhbB5lJ*#nW;+ zv^rj)#rE(GS!mKS;oJRhgdw$$T}%VW-3)MAw%#^w60$7P#aS`l7KitK>6k5k3xJrm z9vXYeX>qd(L^DO$$>u;+)p_Y#OFoz&Ma6AkLh7W-#)Y>792MH?%*puR#Jk|sy(7j4 z@6JB1W)m>hxF_>Z=-$z}R-7}-Ng0FA4pFu9Vlb$CFtV*>E=YC>%Met&gka^xulGOSkK{e+`X9WM(IWozl>t@uM6DXOf9MW4++ z9nYL*W5L1NtbK=@MIf+T@^st-CEpATvF&sIlQmI8Ps|qbx7@kd zvO@sX!g0a6l3LA;ho~nKJF`{;lGP0HEz9zlM`^!*RTZrMKGJQyL?^f8vGeI1r|(ZEu_<_+8yLIdMFE&o_CZW0@fjyK)}IZ@d6 z(HTv|+fa^pqUy{N?|?~UgZhnC1^o4>k+Es|7t1=PobP7eq4rdW(}bO$vf8RR={-ZX z)1(sF{63IGR!psrf6MJ1Ax_Suy<`3D_~PIEVNo|FSCbe20K#dTpqbk255bOO0_&XL z1=9-Kt*z;sK7w%eWPwEk*PEhJiD35=>?!-Q7_vc{e_^)PQi*ddF7>rjMUFw8KQnBvIM{%Zx(?6hF9BpI#0_V+Wj%KPUx0B3@^0EIf{ z+l*+y22x9`@*RZ5MmCdE{3)U=>1lM#{}N2|-N}nS63$!VELrJfVZW&Rdl=W2y0%#P zKOl;{n|^M%+)jh`c#5&W4-n3H69#X2)t-skKui+tF!$%(Y*b>76hn74y*#dxGp&q6 z<2@nE7|RreSn|j0Q#Ryj?J-P@^S}xsZYp`8`_S3foni?4#rxw0Io-420JxL7jYfzHT?`MAlDwOmy;wPKv^woWc6 zC)hF5-z3K;)bus~n}w`;#fnN6WdAPKVw^VbetPKLMUaCj<*~!BINf=nqu9dUHob6* zNHTypI%1{Y0G7u%Bs=;Kpv#nwpu45Dk%#BRr8R+mnNo>sL=glUmF;nx070-VqlwY< zKY>mU);4Ghi!U%L`Jac}v@rQp&{3qS*Twi;9O3oNWHrCaLI&MhY3v~29LS~u8b#FJ z7ercIoMvJpe-W@#xJ#e3C)k=?9Lf?zIY6^r9pB6qYs0=An;=?3^8v{R`X`+{Bo;nQ z|8ogwi)Yi3o5eay0!gR#TH{v%r-IIohv$|8J7eWJh9eqo+$+w0Mh-^>a*L(GuFG`A zvguzZh9DEAtdEw-{%){c#dVg4u{5hu?Q4rCMWpSiJYE-a&O7Ga+1SFM`TL4cj#5=J z{`NPpv(QG@K78fu3!ThKSA0KF)XhHjDL#@o~P~h#;{X{-l7z<9rx7#up`~g%O!5 zwB%Y@c#~;9662quEc|XHPeUxm7Hh*<5*=K`iDAOTDJu%E%k6x&W3n_KZ;5ccOrG_^ z;8$XhG^;r}$XC|^rJVs2y8&NYIAUE`N5Q#kYqYPIh0@f6j(;)US|5hYp*lpaH^}}* z;Zt%Lx*?23qqT|@97k;gj*!iDHW^ z*etv`&^h6COM1&PF>9mcjh--#tU*V;V=b5nk1LZoQH`mp9#BL+t92r$7AGZoyAUuwx zU^g2HLxCll8e;`4of)i3w=cm4+Qxxf^RKgl9W4s`P7yAY(k!MBQ$}U}YISiX%0_4Y zva@)CUt*h#&NZS#roAZWQKB6U`rn*D^^MK^(S|iyN&GFmEwshWZb}#PTY8{_MLL7@ zTAIB6Z95o?1#MRZqa8?bjv_}bWFN3Z=phB(so5SME?(kH29o1M`6E`rbk-;C05Ldh z^$x1QohKH7Cgz(z67V-=MVZ9hXh#SaNb&^79>PvlWT}^zJ z^6V$%;^x0+!6av3|J*4HhT}9DC7!r5%wLSUvsWkyA>RY_kE7(8DxGNjPmCr3_r)<~ zIO#NaH8LI=hugJb7v6PVV?5AF=L$e~Y$VLzDi9_gn+aNCwLM+?7qG#0Nd=U1&xSiT zn{N=|s4=&Yr^Pl_!nhDQjpm?9HyW-L>7e*7*|NS@lq0WJsjfT$?4T2K+p)C>d&ue1 z;KVAh^RKO5?HcLOm3hV9`o~c=qqRu!(m{@!l##m1$8wvtQ6&;#Xa{FN?zG@gu!=n= zg9p=y@pr8=N|ZDWSOsBE311W@n%af@#tfo)c~~Q)nF2x z+G4yemb6e6+T++pf1x!tm3hUyA%)t7g3-7pLn;#L0`c!=pc9@LTBjB7(gGzxa43mU zW+BI4jSj+p1zpX`wSf@7wL+-M-WxmYpLrd}!tuBqLb`F3|&*=8r^gEa-CYX2fRG`T;Rc3r4v)>gGkNUMP}Um{o{EA8zu+8FzDR zI1`Z8DGk#zOBlVT#+cZjg~d#!Rj2B^iE=`<3y-BvrUdgvTFN=i9b#Zs7VvcG-aIah z)Th_S4g;Cr7V2?N>?*>EDC&;PQnSIXGL9jcZ!U09=8G$11JNpMIqc6i-r<5471MSd zx1oP>h(A%q(=c(mu>D|FinMcd$fVG|~al%@LGEl0IS2`Szj!*g!+X=cX zw;2_03ZjMtBYODhZy<=!<1=;(U8*y8Dp zOOLk)-EG0n!e@e15X=EHyFC%$jA8qLwds)}oF4o)#(gJ&Jz?V1y|tLCtF3Wz=8c=5 zow3R(VAVi0kybr5b)y0~OKyK&o*q3(*zcSU?Mjv^z)+lxXFxjT)G^)>jF-iz8&Tc; zKLMiX<9&qkGjThW{ar2XG4vnyDD|*-S^$sDon7&Na;Gf}GY4cDtGj3U|2o8brrc@% zsn^(MI6EWgGf34BA}n6+-Nw1+WdEb?M(?)Wxh0~NjhQH%2jRRieq+Z<`v;eZBux!0 z$A6cbw(`;u(R@Bmm4#NshX}xzO^*vxUMHLJoPaaS9!XbxD(ro!(xY+;;MfcJ!vgf{ zK=FeZ!q<#EPP!P7c4DbHz)MA?r7e3SYh03f;nD;Bbz|&zse|AomaRGg$B@?*vCg

1IoaV{Q10NSe7G1NL8jQNvKu?B6QNa1lGrQD2;bQke zSwIue=WD}W6ogZV3MC3=B@ajtksMJXIAo-HY`d0wh1 ze8(ej?sk2!?LD^@=Gp6qn6z+t7UN=ZP9O7b%qkuED1_73A*(kJK9+f?$3{_G{8xkv zS9@qYbXtrR9*1+nHT2R}-w;EEV3(3Uto8|z1EnFi#X?VkZ4QQWA9Xd1)(pgK3f-W; z9275g@H7zzV%f))YPp{YyZ(>siOrt@IRn!$@W@lMBB%uHDDpg`0Mg~UaGdk3|5*RS z!!C^DpYtE+V#IIH<4#M>=R>Ez1mk*z3RArHGLRUt&dCFU(V#jTpu-M`8Vvh z6(z(Gayur*w7z&y*y$_y^;Bk!Z^Jou-s4H`2o&CdvKXuC;uayNrFL3P{37TuXX@6} zqVIx9TMuV=2gvQVnrL$-r!GIe2TA`lY)s5}A14LCQPx1L@Na+v+EoV|`+Wc=R%&Fi zBc2oDU-dxh%=h}p|FZSVX7}+zL|VqE{<7STl*LJENacsr1BVV#`7s{ShNgi&dPKNL zSh{NypJceXl|JBJVTZ~t1!k+B{xl1Nmy(j+`y)~K zQO89kF2+uuL6Aq3k%{6rA;h7@d@S=hz}cvzo^&PF_|FyT0;^_clbh{WQ~VOn2}A9$ z5Vs1a%|dp$dCXLdB$>Qi^7~gHWQBR)SngZglttaNSm--DtEa>#ayo>%9J_4;@t^Nu z9EZ;GW8MD)6UJ07kUsk(Zr6^j$Hc2b-kT`<%iaV7!jeDnkL%6IEr&WM5mhl9ok7vSC1#m&w?zcy6yH0(4{t2H|v;m9aW+jUT6dD zasIFPazS;s&c^)ZPQO8r0Vxi&#)#iR6oP6_%n?YdN~=yZt`p$`@UoO7bf z)KnIFwNFJlE$qZFLGO)^{>=Pg`YV@bG~`tYkq%HB1&Mux;R}pTN@aR&OqSEZNVi@) zH?u^!bhsj_!}SBjIF+m+sa8k-<*ObHP?PIr7cBXa1(eu+p)8nYsgC8NSBwjY1B&=o z&}GgQh+@ny{oUHaHhl`&f711srbY0b=2x5jEX;aEQMsnn#>sLfHJi!PxWxdDr-9M; zi4tUX8@~z?5V_UCj{4~f7Kd^T8$CMemjJmklup!$ZddX4#oYH1p|r;8n03UGV5hHw zJ?CcDTi-2}eNZzw)>;~5AvWvwR9qs$UtriGF)X$7G8x4LyIp#1Wujaf7$xACCR)Te zX1&=OM+*;@a9=)|;*44r&aDKa4v7idZr?I~TmSg$CZ0f!1DDG%+fds?=L%V9+&rTO z(L6pAm*$oB8b_=Ic2+tP^L@vPz(W)B3N38*2(S~)R;(-)zExsc`q)e6okTGf=cZf7 zMXP`;jGlSdBsUm-Ts8ABeiBbAfGAlKem4-0tZNVBrp0S=x-@wxr!P@$-fBHKdc$&h z#tqlU?W|LG718TrkPLTGdSd4d04}NtI?~%;*nNkd%hFy;A^JrUUU$nttg<0)XKx%^ zziSCvj!ec`wfDr;qS6{@J3w>ASO#>KoC+kjwO;v)KMv#ga<7f>M?6}tz9=`TOl-lu zEZ`uEg;|`##;ZeM(!POd&N>?d5nfGwl*{RQG9B};LJEe(R163>v%5;+?FeClWz1&F zK(cAw#NJzrth~Muu=n;H+x*XM3MP4pxs=W9-k<%)lQzfgK>~Y*yu783F&`^#VK0o= zagf|3L+4BJs(@w2`i+sa$ZHl*j;exc6^{+Ie~e`g#FoPVE)xdoM6Wg$HJdR`QvbqYj)C; zM2vtSPzGF{vZWTLxS5L?dem(7k^Dbc)tO)9uy@Yw0I^Bs$;D4{yPNszAvsNL)lm*x z6MU@x#Ji)x&NCB!)_r=4v9JxK^IVwKg1P{A3dTT@XUyC#Dkl+jAnym|cJ&xf#&#GB zOiM`i)YOhh+7{bFTLMYjDaJ4{loJCC<3j20rrf-%X|@M#mj&Be!|Q*7PNZHL#E1-4 zsIWccVE3AA@9|cQID+leJ@7qt05})rQkErS7nCUaE8YO_m~xohj;)w%Lnd>@70Y2QV{BxX=Gh)?!lb{C05+Ly_qJSw znj<$c*Hy+^Q*c@&ZD^h{;n`1&vmx_u&4P|5dApIEwUsE=gh|^zLX_o*aeI=oeRURY zavh>Kt}8L<>1(IxRxy%L19~n?P6ebgE$Gq4MEzvT8i&ictpyvpz#^PpE!-*Or|Ys0 zrBl}_Ge3=S(DclojD8@14QIeOBlTUH&UeK&yFt;uwQW+2YlU10*}Nt05$TMf5;}6r z_)>%ttsQ_Hch5f2dcD{rgCpDR0cinNQgMzDOpBhRF$NamMsdzX8Chd-^}HC0q-K1~ zFMVI#SRU){$v;HMSx;}QFW?ep541hDP5;ycF%B1SS}{(=#QAs;X|-baKe6N(hwKIE z|qcWJ|q+d=`|1Qtc=eoqnWBu>$NmK)l^WL#&e@o*;vE~ouz z*D-$W0y_=UaxH2^=XArkUT`v=8uK1@0Ho^0VYl*&i+fUd*5UP>k1p1i1=6fnnVRGD znJ{ioxjw0E=i@d}j%io!P7u>4Vg~y??F{kZx8jhUHpLe1&jMj%Zl>Bs)hsC2;+o_k ztKCI-0yBIBhf<)y`jrkb=ZnL~qepUj7YiSl`Jfhg5L&s!<*_*4E34x_i6b4l0si$t zxDnsPs(9^SoX!`kjKti)N1`mzelCc_@Ix{$bUDc`>|{|CMz!qGUMb)#jHh0A#6u!1 z7;2ZQLFeHx4w4O$&iF>ynWQwgIs)VZVj!hA()q>WM`oY2^Hg)JE9_Y6*mF z2OH>DfAK^$D#q8PP&D_9>yP3iN6Zs~dma6kFzEo|pc0|&y^g3k1|RM|%IQ_FD#2+? z{iYt9B>_ztWKSP8M;vly9FMEzcD5_EkF(!#U<;dZLwX|djVQe5I31cWC-dIT+gEX% zu;o~HK%erQJ3eD3OJl8*hgD!4Bvp_D2%cI#0m^yYW^_C$avyN5d zrWiSW>WZN!0i4xoNUl}T@@9XPGP_y?AI?V6^bNTkXuX~#{^4Y>Q>uj#GrHYRfg$L| zfyVY|6mVE9q)_oVF8wz#Vs^5C1FK`17554cCZsXmP5rX>#}3u0*?;wgPW$*oe;_tJ17kf#xD75{ZYzs{XQJ{P%NlJLl*W_Uax)P82!fg(WVQ6{F`ou;b@qjbw9# z7=MIP8Iz6nIHTl=R*2Q(SpIxGkwNBM15p%k^_apfODvgOkOh%!R1;6XD3ZkvLcAxp z6}boD3a^*i1G=Q&a7G6_3BqDjL6eN6H50_tFh?9TKLjRx_8 zfD@z3Tgv~ZqS7j(^=OQ`F(Von$ck}|2&V#(v@*r!(ziGZLT?r_bJq}j%T2HjUYj1O z<&TSU@MYSLjFWGMaOOG3;F0EAvXGkTnbmR#f1_JrE%%wMJ4XsSVBXY99(r2sHmJd3 zA2iFo-R+Q0s`Mu3&85uFyEFSXxp~Y2;xSRK%9T^%Z6SZeOoKTJ?QEU9;HVW7s^SDW z(?Y>Mq^4W5@P1RN(bKnw@n2ezR4I{T94?#(5tfX!T$)1vp| z%hKgR^w{(PprcI<#}s4n2eW{ReHg!{O#Us(`C+>adni{u1TolSVd97rWlp*wbQj8 zgQi5WQ$UfwDuh%94~c#kc0y%x6;;gE$6*{j?-KHnCYZN~v@q**KE2EnU?iQ>Ks`gi zU+4<Mj=~D{S2(@bzA_PbT#|p(fxss@NQD ziF-vj4O37JNFbkLmvx6?I<36!Nz`%EiQF!pkK>o4T>D2$vp{5H0WKl)Mx> z^n_^nD|m4^uug7?wf_YsLRN^o7ouLs|BlC^W%QIVTkoUevJ!5f*NA_-ivL8Kvt!BE z9TNQ+C*d0lxp}c2lkDzYB#Nwd7Lp~>zXXw>o*s^I*qHYms+00-VT;@%n zbJy6_t9`Ar#kh)Rn|n0BP2Yxgxi|3wHS1jV!^HVxwgWWY?)wf5X|JXL|NXAr++~Zc z-^cA%h*_edUkr@}KY&F}MP?7pah4Eir$5WbXCDJBG`7FnW5g$5mlL;g>E6md%?Or7 zME#ry3x}C&vMXNy8I-f4jlCSBgf@Ae6`drKm7d%rfyWJ5$*=u6$d#U5CFT%~o{1jt z1-xUI24OKy{tM9}ov&ZM#D`sU5pyI< zf0KPYgE3E=zzctyQEHD0?^ID13D?lDCRg%;tyV0nV7K$T%*%MK6@L&$qz$qiF#kUQ zr=f-}gh;0TV1~JzS{ZS4{U408!R4;jJPGaj5$Z2pUaEk%;!n_|zmsTpl$$!$#Qh`W zn@?ezS2kmQ=3iPox1+w6)4?-=(PSwupAY4>Qb+F~G}z4*60^($vH33<=^Ge2JK5pC zkc3)YAJ@p~P#X%`4&=yN6d7+PPFb=;XHw#6iM@9IPiJA>8 zw=h`sJei5^E95e*==Z=>u7j?vfqAwHM1gv2vvOe=oxIfML8n&(3Q;7G50E zoruQg^xou_Vx1S-63bFgX8s|X4Ci}f`skn~K`xHT%#sgX3g~vuydk;gkr1?YNGnP{!Fi*O=! zuBWT{o39F$PWxpGfl5HsZ?BepiIGXxJnO6u;dHl1fA!-v>>)X(RxdZOCWOnPtg}-` z4o8Ve1Vx{!~D$bhNQBEg-=LpzC`c;I}%rr`yP~UEl zF*)Xh%>UNj5XOZ_Q;<1pHnzpNC26Kh@6B3as7T9xe^j>rE=auNtK%#=iB}5l9U3(j z*(mc`*VY=B2+C`1R1U#unW5Rx9j6KN*T`Y9`^Gq3P@KSWoShu~e6dOP0ix@Q?}VLP z9f)#UxfpwI>OksdM8BMb&&el8^6l}uFkm3nLm^05<+ONKPQJoKXLGzIU@0`CJNt(q#eybryf_TED<3wnlZF0+TV@pI4V$&t z9<>#eQk0C(I|>mT=eW|IcxAXDodWk{e=L}Tp5kC6baiUz*>$c*#W*P)bRAk9UN#QOLQk&x=+W%vxKFG-7L!}P z`FOTOr6X&+B#Popp4C32957gY+v6NTCr5fk6vJENVchh2HkpVsZT(UowOZ|_ zVx7LafxHa!ej&yZW7;eohQiW8oFbn8h}&KD6>mrOL(fZgd2pZX){@YA6<*vshfeu zkPZoAhh5+X8<5@!Oy4YLOnaR0Ee_L2s z_Eov{)WJAc6*Tny>VbnPXM8$Fn2#~w8K22b{9IHiU^A&B{v*QQj-y^IF&*f@Wm}qo zb$W@RchN%bOEIoHtPZqk!GdzD8Cf`0^wlQ|I$IUo2v()JT9lJDMXK7D?h2-doOccC zUGce)|Lv5?x7=>vG+kXhK`H3)3LKN@-q*FFEJ_UAvZH+79vOi~7(?0nA}G(UyjHw> z0-S2Ducn56|1FX<&=GGmDi+ualtwa317cs)igL1~njD?N1jz|yd;C|x#m)wh_G&ug zf_<_NbQv~&C}CFgqoR9X{`ZuW#hy3h$;?}#E%#(oi_vaB7^gRTaXzjQ>EzIX7UP{9 z*eT%H(>I5wHo#eOoW#d5d7#C_Y=v*OD`elW&ua57ed6q?8`a8=xU1wTv0bItKTdAK z6ZNQp@+fb{ZBey$?#~W+K29!C{fTiTY$mj1zM2yAA;q|&6$$~eBTdJ!KOpUeG|NqF zG(Xp#k%RB*@nD2VXR4kx)AxcF7caP~4Q$o{;~YrQwHUKS@P*a_H1)d#ExAgWd4Hl4 zY{@CdTzPGbfi5^VS!U4eq9^xEPZsN_Q4x$yx`EC)#;AG7kcYKI*+*s9tk~%Qpa-#J z?vk3C!NbD2h{2^*KWP9I!2&W#C`RMYYawRJ3s2FYgw13kp)_IzU_I$eQ@uUEG9FH;U*7!R8 z3r*0*SnY8BqSVorh|zL8h>^pG#J+--YI6@qTr-88H}>gOUNuL+5H5=>P6y(s?0=d6 zWi7wH7!vX1`3ys~rN5I`H;YkyboPN1gBby@7vm^d-{fM|W5BA1Gh!b(Ek}+Tbh|eG zJQj-Jlikkcj>|sf9CjdX5GFI}g{Y}>fKI#?zYGE+j)zH`O6NeLAanl-Q0eO071!#n zZ=DG5WR2J1@#ZIGpLP~7&ql@AL6pNkpbum1#du5%X`fOT@1L6aFT<`Db1v+8LLoYK zq0oeHO3}P)+G${O&fvea-=uU=+04f(XTqhO5mRqA)|kjW_ZJeaFWZ#b>MVS^c44(i z=Sj&ZEY{!GGez~)>Tg6^1itKqQoAT*nj9YAcQzpHhI9u*I-0AW17}%qVK5hK2$K=E z{}99PLKcJW8Itf#&V_M{nt`eNV$3NKyb!~@@(dBKcHm7SAi&NHCMdAsTS z%p=yaVu&CflV$E`0q1WV&kM52!l}+>A{{q}9_+8*Cc=s5J~1nX3&56@jGAz$C|Tvi z4z{qSCZ+&1Wv-`+_Qy;Nxt%YpgzCZD6EDvGWZl&h=UoC$1);++)ZN5LvN}w-N5FaJ z#%{cL84w}5Prvc<>{}^J6WVKH9QA~bhKcc~Fk#Vx88WW`I4sTF4-ur^)nH@xN}Mh~ z)}pFu#h7-L{~k9k?!FqQ)96<+<1sN7*o0)oKYK2O!>(hm)5ZS@;e0gp#Yzv_FSpn`W7UTNZn5m5>qY}MqfqlL(J0A~Ta^f=j6}PN zq6C$AZjWCdfpksB9DAHL&%r-=E=j*v!2h-DHJB7)(n<=v8$O!-E8BmY3Ob*Zx$Mlv z$P(4S387lV)+MUgJHU;;mY81(r-MJm*J%Ljnhec}I z35nI703akktFoI=S~*x+7vrBIvYm;+_ak|Q_(_M67-(N!fH0cS)5ynAAxld_M3)cA z9{m)g1D3i~cU(~-${94<;|>v|ij(p7m|yz0Gf~w)=4t-5kZRe-rr(RpL^;T*yz_)* zpc26@VIQy1JcAFXMC%_tZ9=as(jiTvKhUeK=ZYE}wz_DTtk`;=&Af5HAvtrpM3h^8 zbpfMx`FLH7lOPjM`FKMFnMjP6eOvNBmB%vaYFB%nza?x==<+e}LKYsImD)*L>?IiL z4dVw2HnEwYlZu@-rZ7B5A<8YFAX}BQUje%!kLMozVE?-DtI%o~n$7)Pz~8V(REVcu z16wTg6%4~VA^WaG)?pGty)-&cujA7R;F>s_eY|`w%2}ps=EF5aC{KyO#Z84ASB-Lf zjxaqy8-3K(a=TSb;fDXSf({%jWsF2v+Ij=Z67E1>mG8HEGy7)5RwHBYx3X_oDd=Kp zEd34?F{)v4hE=(f*0z*;qG;BEgX z^HfbPZxwW$95{CM5XJHzXB0P8(FM!L`JxD!%A$>>hXoyRxia^Iu(H>`b1;Rd{nB_}fEvblG-_CE@&(AHkev0? z!$iF}yffb{#t8yWUOgAw;yqzUHk;EWmXTj(Auw+6MhjW|+gEU^#Zsq>)xHLiVU3w= zS|6W662QLQ%s0?3W2}|v_QCSs!8qoLg-&|G2Sm7tFtib?eGha> z+0Jc?3Bn|znC#SEE#&Z04e)EkxU?Aad10>*TmO)G%H_HL!JI;jJtZqZ8vIWG2jz4$ zGV90miU|9$;nhO?^dl5CtC?Y==BMm$X2vp}7u7#Qxi%%$wOyx=@v>M=Tv#fTi>#KK*atx`#(9YQTH#mnnV#Jm) z^0NFwxE&>9pKSU!$6ez51ydyLm~Fo>jD^8FRqb)2a4KXb zHmvWT7ekB_nRhR{NalOm0Ofh1utyzDJhV%NW`{`2vnj_@S#j8+5KakG>1z@@Xorg< z-o8{J zan$nmOi^jC^NtmOPFsJW6=nA)M5M<#U97Vkw0>6I{dgt*Aln?3#d<5_woo|k(4f9b zz>uOU9uMt|V}%?l{cQs$^s7LS#HJjlCcL*Tmm#?N;}r71zSVGq*xw_qB1S6(QMHh*$o?^s!aYz_~Df9C!_@u8jo!Nl#ycW(uE_Jg~Zp zUj<25?S$BRJ)9N^Z%$z-QrH=mrTe1hQcsCeV(tcf;FL_$TIiOIz|MtUV4|yCXh;@IT`v|Z zDbamJIcdy%>E=gl3_;e$Pmf#WbfRS8vWXEoze(o3dERAYtwn?*K_yTR6V-2;eZ>H* z*P&w%QTD`rVjIG1l$@k1)N{hakm{BSBOZZrk>?qVV9rI4mo@uh^NIN2$~Gr zC#_g~imkTH!ogfNGrbE$49;lKAM9+_p3OyKsfZ0!niaOPyJl)dtT#OSyt%!LNBJg- zaL=%7Sv)4>Y2e^ABAFm- zx#ynj7c96fq@|0hIpP;~qU&=5Ivi-(4u(Qy{@xu6Zx65#CoubEwJAf{7m9W39LN2W zRd&e2NbG=+aWNKNWqBMRoGtPDgs_sqKjL$v-!Ej1UBd37KR}P!Fr^xMK>R99#;&lG_DQbCF`=Un7&iq%Zs3VUOMc> zn`2D+FLMB98c24!2Dq@i@iuw656ev^XdEuI%gTpb1V z)(%=vC2e%Q-JuabN9oM1u-vhy!=XM*V=mme7tnI9)}C@4 zE&_?siYU;%0ZuR4t2z>DjIrYU1%iY)a9;=)FsB)PasPf`#+ZuwxFDDPg>qqAd{e?4 zVqgzWcu=Z-`zRk*HbA<)j~N;h^B_kzHQA0iK?Idn_qKnK+v)2-A(UsS+v6v3WT2Ci zjkvr3a0xY_eRPGOrIt&~_{6nhiXdXVW$iOeg6<5pQE;n zpXE%;ReJ+^gQLF%&I#ANwh&wFZz8pf)EPU8ASc6kk>I)XPl^Sl2o??7wE0g?KVq)j zB(kV?PM#J>^T7LNjGW(yA(NWL6VD;-Ku1i&pcF3>VIkBqYJ4o{04k+oRfxknV91vi z-97QO&|rhah5<&WsW%ID!IB0Y=fvg$4x*RUBdvVK0WgjqBRu7vSisd3uk6U%JnKW2841_Xs$_?9BA0 zjq`w+X$UMM<2L~}vKfpSYP{?9LpcRVoGm=zG=bdZUwszPWj%aoY?`97`l_0#F-yqB zog5msQFpEq=iqq!vJhJifF0vRCW=zutPn+cH)6f*!`XJD!W54kh}$t&Aj-850=iKo z55dwe6{Ad~=A;wj`qKaG<#C}(=Mgs@%KuK1CYWqMwZ=!{oFsOs2V$|q!NlLhmgm`W zTcoUs+2D_wqcYzd^R!2cuv0}_jc<BlA(Fge@Wv%Z!?s zB_}neU;FP@l{}*VMYr~g2uD#?AEQnH5vDAV?jSecwdj$Qvjr@&7ODSVk^YJ8nbuh7 zMEgXUtQc#Y1R&-KwJ}0YC%BfG`VA)ook-^45c{11LEKhCYZY=yF-2=Z&9N9~i*pFo z%v?S#!EsD2=L?cT?S916Qw^w4#kfBOoPV{%o~QATh#3&xLQujGFAAei+n<4Dg>4rYP!FOa^IV1!C2RE(W^3# z-F>LcQafjgvUsPk!aq*Xk<))+vM9a~L*nVv=mW2|Z#og(hwPjSq-wBeVCQ(FYe1GR z|F&aJ1B3M?Zt$IiSmD~tU!rfSDeCItEEQcd<5foy&S`&1}HVB(?qyf8rb*e6z;HFVH`!WXq$IM6E$yz+hEBPw~w1+=M=Ew;psmC zwKr7dKa$(u(s%XB04yWPuD8QFj=Z)I7+XpV2Zfwk#1?nr2a)G|**hi)6CWc}SKKP# zU|E$6UI2YkESa2I9wYDa*Yq8^1_r7L!WJd_Ysu3u`-*bTn@Lw8P7*O#sgg(Adt=N! zaO9yPBie!tX3ISQ zkq!j2y4gvh5B%goIP#eo8rV~Hzb=^kC@e9U?{^Ov4A=HnD`1cc$+ zp7=_@+X7O?ORlsluvH#`Cx3Yir|m0uDpb~OUBxb0u*-=}J2Lc05>s#Ih;wt8q_+Bz zFoK^_6<^6|L9?ZlDE&>F2W6R5SH+b=mSR<1P5dZGQ0V?Ay~*m2W`ECQraDD%unAC6 zSZv%}@-dEm;B6nnhwBm-IMmhhSuW0n?wr%?&jJ=d6YBTG-lzGvWZEEu7Vo2hP53pQs5O5w6 z%Vz4n-zPDkE6VVY*PvYWC-ubbLW2TH zk}~vl7#Dpto$*eBgS9F#R>qv|oHrmHY&~6Rnx|{>C z{F~W_Tx3nA*=?en0$szVV?0BQDwp;lWBs?X|2aHhIUNIHoJ%y#F_$~yZGS)yH)2?{ z2|2(P?4}UJ{1U@1wZ?F~&51^d!Djjbm#@auGa|)Sa(vL6rQT83jX0$mb^`abU*_0{qg&;*cqbw>u#_13% zW~s9viI<4OH;q#|ffs!O^wg`ibc-?mEn5An9@zd?z;bAyRnplH{S?Nvl6wq=*hX01 zi6eFtaGKeK;A(sKXa094uhYdHp92sAYxy|l3%kpv)yFwRTYSiH?dlwXPL%9GHVn zWPdm6#>DZ$PHA1Co!hN(ju@vjeFa(T*x$sediHn4XFuY$EI3YQr#I$9I8I*tn;pZ1 zDRCaC%f~+j93Dq;UDz^Hcn|ynYnkizD7$HNaN_t(in;OgZvdy8GoIM>_Y!1JW9<{JQR{?*M?hm*uE?f}avFk#mZg%{>kiSbbRyJPPfDO|^j1jP$ zvWHTOals<6PN4cJ=Dk}j3PVk^j>xG(mSY|E6%qxfGsLBt=3))&@K|UuD3Xh?nqm_H zic1>*DvL4cpKKeqL_YnKUJeWL?Qx5+B`rOtV$4_^!jeX>J$WWufqWu5?ZK1x5j6@N zwge>6&StQQPX(ORY1+uefKz-X&Y8|NvxCa-K&E3z!XTw`I{~W zljWCmH*kiYkMl%3jp+EHHZ1HkN?oTX9uVP>bRU~fhAa<*G<5V9TM6*}bZa)oSYhXY zqhq4zjKx;WDB4G|jj^sMg3um&Y#@L@lX*;f7inv8&JN>jJRz8-is^8B-2JzV=uPJS z7p`Oi-?U81>?sjMk=N3H+m#Ji9~EMnfODkyDfH=LT%>4-%bEiEfdOa1G{}<06T(hH zHAVqjDKc$+=wl7AZN0@>P)=}SUy3JB#Skvj_MZ4t$Yun<-SE2n%`r zUkJa?dFSbEe*4#)v-76h=`(lk#K_Gd97BE5ILo+E)_iUeb|8fwCIwOx__FMWhM9}Q zx4;jBUiE}ncT1d>vh=>#-D7;-K%{HTII6^pzsrJT;nK|B1SXiaf=km?+JhV))K?E0V0INeAE@&0-WuVY^e;yZDJ5hkL-lT<3cWBCJVZL8Xd!+2zr3m z(e9UkE4eBb2{K=-qVS5h53{9E|p7=%_c|^6Y6zhxz3=QQ&J13=hOtf1-UZ(PVo$6Du&b}TaSA9Hjhsk7j&L-DhqQz1bzH7dpVJHnBPIqbZjyOZ5$K*arWTb@%HwEE&B5tbCT2v!>l zBJs1;eXJ|s%+J)m*6zao7F`A=B0Am|-mhRkcDemDLl)g5%1g}7WO{hmWqj>7U7 z`G&JFHTX%GUy2%P3mmxc{G=G?5JSdY@n7Lol6uxu=cMgbaJ1ga@o~1CWFyCGVeuyc zOQR*Z=<~V=Qmpr_5$sRZhE&0N_MO(R1v>I%U$7W&h#?R3n^dLWj?4b%s<|%UY$C>e z1#c{N4bD-gxS2>7S9M?fW<1E5qchMvOBXqp8l$YF@dK(+z=(S!~BAdH6-FQ^iFqrRun;FwQ7?%e+6_71h(SPq~nY ziYbSx`Dki@_4HM9%w+Z~np3cN7tbxQBItljGeKXHn7G_# zR`z9UM=OhzO}ik{GGp1svUmS7(>uV)MNf?F!4K&e-~i@exrw^1kZS3jof1^Yaa2(* z4;1l1O7#S2JSX6mlS|h~pNkqAlrnYuSg{w{B`}UF)+Y-(i#2SF55{$eKoD%QJ-c=v zzzx@1C9ID*jTXx{GIhuh(iZ;~L6KCkjN*nyyk9aVtyOyFV~c*U>o<=ejuxUiG)#{{ zIf?A?tJ(29V>ZA^sh!JIKkgS{!Pe_ELR&M>h;!*rDzvu78)ZKz1#!B764i@y@I!Ro zgMU}ZUyP?H;!9zo@9d0UQs<^y$BuJhT|Sk1HK!-~2VqECKaUH?rvjEQ7i_3XygNBB zqnHvTdwsQ{kgaY|(dbVRBpqDaWO97+p&(~ZdZw6u6){vm$@PF_3;w3Vpk2qbD)cqp zMIwhLn60D6v4$ zQmSq*w8m?~PHiP`SaGfFC}WB^>^d5X7;AgtdO4j1a=@F5dZT@_ zV>6P~t;}c8*cTm_g~JK~wLGq59uMV+##P2cWi)zpEOi3^S!7&y^Icr(?E9i!QFw>j z&k3J)BAn~_Og07nC};uJv6^Mzy+Mq#;MXThQGHV8se6DqW>@ShhDIJ6plI0iZ?V+r_R9-sJpXkoIVD zr6HC-C;LIfD$%7mc9t56Eg&nQQ$>i~u@vmhuQ+HZ#z9Pen z&-3EeE5VjcrS42?b^Eb63!|NvG6!SYRS@KYwu@b&y@k?x>W!6eulE1aL?4Ksg#7tr zHsyL^uWKM&tFTCxRJ8e`97bYpqCegjOJV%^)YAKdbzhrK|`*m>0jdeCJW(cHl6x*?oE$oD$n35by>@pvU=E`P49GSXV zN5_qFJHZ?VvUL<6iE)yW>6PYUYhDjWY|WaCydmIn9<{D(xB+&WllTQ}%T+pO1qDEtlzC8cSP6I3X-N zsAC$!PQDq^vS=A#2)Rf&^F&3Er!8)QbgrwDt&eCIgUFbCCb!_IF3o>}a^h#P1Mq;L zqgQFS#um3ikQ;8nw8nsdJvVZMa-X0*S0>uKUy4ZU3X3jj26x{Mg#bnl8-W$b3saO> z=8ETq2%Cr9`{J-W0S>!{rQcVA&LIZ*^4wUrU)joue+&3eo_8$8PwAhsHWnin@Fxv; zT3wteC-tF*?t9VQxE=mZq-5khK=)k#vyUEs5}oFd4s*?WfzD7p69w%stu0D*OmD@O z|JKo&w#g=oB2Rf4$$z~3KP7QM;fO6;IHS1aup%78z8!S!XInoh`c zU+i_iL#HFzA=Z2VU@3Izt>N{Bo%R`OxDNtJXjeyPtoD%IdgWjfx%sS7^_qCD6ZU6S zSOwqa&p_82j{2q6Fhz`$oF`?;Zs7Og)WDOAW`{fsvY2{{ezSAmN1&WI4trYTKw*z7 z)lBA-Y)W1yJnDcrAz=ufx)4N~_h4+Um)p^zuGu2#$I*|$IjwcfI4%?<6--yFo6uPR zrK!+7h1&h)<4|c6kQ!2f_m*Ltu5rBj5`#}>A2Ao?6ao{yPeBoCU(!)+FT@8L4VB<0 zg#FlZW>OlNXg6uZ(^*)aKy*jHsG;`M!3o=gf60hsbcm-!IMvLzbWbGieg?`p?<~;s z==6Mr=Q57n3oa|MZ8J+8(&|KV|9u`eiPm+Sd9m?dGvCQ|P&JaV;+*rz=mo^5!tOKJ zdDM9b0$ch8NDFKlhlk{MIWf)wvnP%MHWxwahBV}~tbe}jsSW)r77`+eYzo^R+VER* z5m?=b>>nk-L2A4n7K_q^U95$1#6{NZSfJR|Ovgyd&0m!$w>& znqLB|k#hU)^_R0C&@7<^pZ^MkC7#Uxk@yj>LZ!WZVvL-^j(>=BWot#>e)rch*ubG- zlb}BtdMkr{O_3Jg6qIfW`Hk0M$atGhKtB_3!D17RnGlWtb8$|(HojxQ8<`*85g`Z` zSZ|A?OY3ZlWB-ZM?T8^r+puYa6z~3wHH&H{YXxoz{woWO-HaR;>H1@NajGil9^_*q zA$3^AzSh_;1*~)W<46IgtWJgqo)C7ip=rv5_SCmBqPa`gw=Nc;kdv~zOvu7uGhLVe zmwp?@A@[-|zqhBr(43DLpJ^{F=cclsf(4z-Q9fSP_aK zZ3UUOJzs#@Rbpw?9{T1&O#UPb zqKfl>DovvpJg^ThC8{<-;;XN1h(Wn67fzXz7aQoZt`wVged*QUSJIEVFqu^KZ~k_@ zW2P#T;Dz}5Q#jY|skJks@-v`wOx5A&>{t>02IG_zCJJA^HR)-ICBASllvKsoF(aZ!*m=@69&(&rq%@ShA6VVJXJ^4>x3edD4IJ=Tg{e^SD+hjZ_!t9ul_9E77H0{uiJN ziH#|=0Whr?<3zfmq?h3|A}_JT*dYPqRd9;uxX!fKSAWpbej7X0;D3x)>R=^ zSQ6y4r~1tK*hHLD)|=zl>sDdDAT9>l6_y4e@3F&U6FHrs(G@XI$kK0O)2lN+5aBE^ z`D5#eAvWeO1L+9SbK+o|mh*}@XT6kbi4~Uxy25ZPR(3&m5<>wL*pvN-++>D>$Nu;w z1?Y?x(6!(n7Y!St?C0fUk|>8=bj*HPp|{c2If zJn;DCF}Pe7K9VFmeQ}K_=XpwUo$Bu8Ge3#33hMu_SAeoOCt$=dwidQnCa@`q{cl9@ zsz{YevcP;($T{ibRdH#SzEuu|=S4ZcSP?JIStF%#b{W?3I@x1_YGGVlil+sf0InkU z#J_~e(`-iMUw(rdStBj_0d4bau#y=%CuXENh~9c-7*dlg@>!Qs<%fxKkZo)_#t31{ zfn7P$lNUh(TH1-^S^*;LWxN=>O6IdJS)Pv;T!zV`IJ|AY5T}p@ke@Eye}NR6<4+;+e9iw1a<73~_KL zB1Wy9g_*n>5_^{kR&Q;^=qwX-N?Z>(NrY>F)Godibd=oGOy-%D>p-b%oE~een}y#~ zK>siLML1K5X|mIV)5gkh#{ZvqpR`35K64W{UAi~ z=lbZ42G;j?TvgR1=t)6Kt5#a#{qd?8&)pdRn1c+UJ;nj!2GGRL;TikoC;k@XcdC

kM9h*Ie1)a4tw zEkm2l;ReFSPz2Yf_3S?cT=IOv8S%25M7HpHEG)#@ zn}Zxd9S2Nk=*0?KK)E2s>4Lz?!Y)?yOyU|r*Lv-}GtuLYfH+GKtxwJU_S_OCt&$jq z%g1XXh?XmLXhFR3JCJ13#eUfGTLH*0uf)VuxgFW`hADB9AYVyNJ@LJeqoIvxJ9xuw z{3+%-W2)SQ$y|!wDFt&dc^TgcI5F(^vrcQ+HVdd$+D&~iPmJTOqPO^qpud=~Z~=Q; zpNgTlyZq=<@0d$d-deJ^6^&5zCcsfdFX)s$1-z1t$ zbO@Dc*RkH}eoQnuAV(NKCsx;We<|XT#zo*~P;#n`mL8I{IsC(IZkpzwu zWx-G;strjKcv!T1K5r7US;E6>qhZPA(BgJgVbYU4br`T3&+b zYKtl{#HA$?yK3_V-I|(oheflaTg17KurWXNDc9_lg~a?@Pf~p-hG=IqL-|H-r;(`+ zo44PJa4u(b6#9FZlP|S9RGMtvr{1xQtE7i>k+7veO5zPc2g#&|5#i_Z|CJot?70X3 zyM)=uPSi^Q9@iXiA)*+k$;zOol4#r*~D9FD5RixYzpTh!0v znleKZ&Cv>X9*3WF{y;rS4hwD)=kJo|W=iph7!rd56<0~d8w~U@u+&WeIOV!Wq$OIn zC`7gJx;RTtheX9ImOA1lF(jLtgUK@R!SeSkACqP~W+GmOT0$Y-6tni}>CX(JyO4p@m6LLti3Ye(p0=<~D9sA0vy4-il{?f<)-Y@%W$@LByvR-2U zEIfLFZW_UHe$MOflb2E@HeX3PEA3C4O@N?ldp6Q#+V3S7cffIYW3s&^4ho z(SNQ#5CWl3X^K1KOnWL>Wr$-Bf^a4$atOfQ%JpKh&WZ}aWSwWC56=80_r952x>#S-oOVc}vwHkt*=HI+U(Y}7+%US z>Z&7SpMJ2z*HV*DxL7HU(2@ztxdIk-`GsQAc#U{x6ytu$#=>=?kZLm;rz;HLw!j-O ze6g~ilgv|pbfD2tW;$&&;At_I%Itv{I~(NiXBPaZ>Zmzcn3I`>j1i<26kB+=K{JNK z=0Z5H99op(ph0_JYEon$vm<^K;Z*bv%$r9^tUeEhtfCMyH r;565^6l2rFfKJa2 zBV)Lb+bj>p%#+r2drTJTL`tt#y8_1_o>5%MPNv#R908@;#)DsL%k4CdA0Ukf9tpPm ziL)m$vva&S`=Q_AK4BTp>p*eg&*g8dQ$U^gR-DsLj}R*#1$3!tlFBBV>JFD3#k=QV z_@r@fFT!!5rq&w+!i1v-87bw91(5-R5tF-Tg~&+P!Uge$+|<#j^)d5koDQ^#6aLP4 zrc5v=L@3)FgO8!)XfCX+4qDq{@y)?^6&_z0#nzO`@!o)sq zTr6=sP8Vv4Nd=zU#n|)AA_sqe5>87gHOlv#DDw6A)oPC~p8z5QeiDhb+t(*SILu_r z1ue;w{efN+?TP(_hyi;xvY8u8o|5@bZkF&6w;0Q$p(sh7dTK_n1&z+rxguO*lL+$L z(|}Giry_%~(&^dXn^%m8R|TDiYFSrh?`-umppe1%fmkG`<;eOk-_^lWc-x$racC{+ zeI+zg#W@tTH#+jn2KPM+$_Yi&ot5B4=RhEu4z7mx#Z^N7n?)V5#<>uViU+t^06^?#|=C zB3(Rf>?bjw{?+ecki`yL$9i%)0nO^tl3Bnp>Hm!r;tV;{`l4wX21UhC67-}pt@mT< zcZcCI{yhG1x~EYOrNEdI$z)8! zzVUjOX8st2Vq-yOg#}!n^e4X;c1}4+tiKH8xN|Z(7Asy3VZk-B zR-~Oz65|43pGr@E#_^(D0jufKs5-3X%O6Vl8@be6k@;e7&{L8{%a6n%t0ISxvEh{f z=W}10-@}5GN9EL}c=RgV>4b+EP%~3sRk|8B?W>ZlPm;I9HE_-W_Y|-!J6Qx7l)*w2 zxr7`C*MH+OLD$T*-zGcvicG5*mzbrG^Svl!-ZU;oUyIWbG@@C;&Z!7{r#U1V`X~Gm zidZ<2?`QdTr64(BZ#P~~-P=`EU}y!N+|QAg!)yUdx=HFh%#-Ik6b8%ML997n4|H0G zOypwRQY6JWbYgY2@Nz3Gd06~aF4c|XX~|boe}vUsKxyZj^xxJFU8z{=2K+j1Rs$@R z;yf`HEel5uEZMSL_C`3%ZWIS}!vyVxEwnsFgrjj`NTkJS8R4O zz!SpS^1Wet!gl?EFBCre;*SEZme>@zLkIz~UWq54?FY*3zw79hRY5Pm6UyR9r=#lfSG@~Xm0lfng@A=xjoiD_|JWkzk5d=$AE8VC zb@)}3>WC${~GthAd{M^_~QfqlsB!E;vWS`u?)3s`5D-CvhEt|xOW0QZ+i z@#i|2Oo%Y3#ZxRI$$KZ_?_P)-VRIjhb)$fN7kXwD2a|(>iH|{&0a7PpLT9It0%@W% zxL?o}Cs`HhZc!;d6>HJe=pyTk$H5j)9ep4Yp?37)B}m2ImetQGY^wwq`DN(i?I;7Qk-0N+yAM|llFOe{;ORKa?pAF zpX7E5$Fp!{A58>;p%TuG6`uw;xMVhq;MNeu|H+Z~rUGdvRFE}AWKj7`_6?6GaAwKw zS()PKNdsM6^{j(o+SMORJqK_i8k6q%S0Y>>l`N>~A?c*f5ozJ}OI7q?VMpFesLA`~ zD?Ok2k+MiUF6@Rg4mE(a|C)V*a%ys*6-9BDa&J3V&ah1>2KiK?o%V&?o&!jM+Am0v z{%v1?cfb{xxI9zP-5GoFOE1b0FV4RuWUtuljn!TRIzG-IsI4&|hL3H$@p>Tyj8YXl zECQBHX+Y)G{Wl1gNV3~Rausvj~+v*JhD|5Y9R$*Z*Mh)U(cz5e6{nDfLD zU0vnWxIu0bDFrhQ+&%_5n;1jZX3N86MwW?~tE}@$=BE)O@AN#={|!UwF@7X>y1G+L zXIH6RuET{LYH|dEx}q3mvLTlI6sH?m3x%@2pe5;PqYAgV>}RWrouWg~a_KKh|KwN^ z2toG#Vx`XjLk$hR3>wkrqS6m!0Vpo|9Kx{_<`z)tR(ahk(xS3?Vs3$aBhG;(*UQN5 zGG9QYxj{#n4-XRKI>V`mwvv>7m@Z6N>QSr%<#w*JYt*sJSCGh8*I^mmj}>(4lasri z_`MjS;Pk_D%4-%wIEq>x#*E*54NjX*a#OM=TFO)_!zDS4Uqm@zOcYC=#cv!i)u0+{ z1iT&m9|t@>wU4^Ius_LjTb|lBQ;egUSnA6$GR3Q9%FUX2!~c9o(ed4R-}0S1dsa?p z`N(`Cx&2DK#mAW%g}3f^5Ec=m8HZCjE(mTf(y?}Ow5#UXFUm5&AXzS+7Ir{1MHWN< zE_*-~b1+W%9uHK(q|w|h;EcI?#lR04!zsHCs*V-o5Ua5~mXEW_1bU+QcvFPaz@A?& zCj1C?GP%ouN(Ap+iK6r?url60yn22oAMo z8^SKd3nDEYtznp4?Y$%nvGD{pvOiDA|F;&Bo&0yjIQ~h}i`ZtV3^&rB#=gR74W;2| z@9*xVp&WCLWlKAwP)mF*(o(46f;bQBEd$}Gn%QWgCM^(yMCYLgy7scT$!2nA|8)U> zOXcFg^am0Cw!>TZ7yJsw$-pXDqDHmUuc4gsMrjaiE$lYVePEwC6_uO|e^SQ82zU(4+| zu#R!*pEMm`eg#;k74v^s5H8~e)uClpJLYZSqm+%MUdQ{h1ayS zYh;Z%WiJ>2<37?asTVqjh62+~-op8fKZ2=4w~ZCsTrp{fh$dTyw193Dhg^Kh_pwlt zLsa&EyHXZFqilQd+S|(ZY%7dE30Y(E?Cz__0{a zf2Q_^S6K~AiM26lI#+I|otOENJ4v^$4&{__DNkE_4~ua*G)jpe>ehfjJm}Amz%>DB zUI)}DC+Q0(TaVMn1OW{M;Yy*!4;dT?`)#haR~)1W;Knk`OlO|PnOts2eZfm$9L1ib{>x*?`;MkTX^SlV=wA=3#Yi@&6m7Kn> z6uStKT4wh$6?RejCpQb}WOvz?Kb^Z8mUMa*`yNpg0ctn#SGg^6ZhFQ3!+@5W4HZa@ z;sKHPXL%Hlr|x!isa})YDXcDPWB!2cAdp0d7FAaWIHy^`y(o?^I96v%iT>fkl%Up* zON_Di4aWzWq^VVHkJBl~Y9jWg`eL^c{yw>?gwhF50%d;CfJw^uU!t6&wn8!96?73u zcaKhGtqK@YsV9A7vOpq>-Z)Zj=Lf5nj1NbNa8Tp0B7UT{1tIfy&2XVKSJU=Wn-b_5q3}8anfF zi8xoKt^rIqQSkTd21QZP;H6l*%kG&ETO}yPiK0}SwGnbo`Tyi`2?A}|ga0in-IpoF zt$Suf9d)Y^--{rGCJZXaW|aVEm9r-8bX_FIsiK2StcJfL3OTlM>8CBWtMd2L&{c@D z1RNc+R=S$^MGVOjozKbDnTI5av8X2VFhTDI466lOl%9+aTj)6ej_l!Fq+j zt*2K+S#aEXYK|o)XF;g`7NSXnzrh0~*%yBW#ScaETRdp<1&wnmN-h@t-WvfLh@hIiXUI5D_ zF=_{TlI|4&=cF6!9n9O}^}V1hPVQqC;{SwQ$TE_}05_rm#seIcj8oy{XsK4LQZ4P# zXv{v!7g}}W;(RfViC!B$_fkAtrZ~To=3uPa1lYon!w}_)b&Gm!Ar*WeIRj*>Zk+H`N+*#w|&v!+TINAvhEzbm`Ng>bDYF{kM#ehAYLSl{Qq`fn5 z?G&kGcZn!ugMJIus5vV4$-Jn+tJj#auf0%0EDu`Z7-46kntrNcKOhN_W@wDqAK+wG z4&*u9oG-#zOok*Dzhy5C$??S22jIn}+{#`RZGz+xloB|L2*kQj9tX{|K3fQ{v2naZ@vUlkKTr z=RiZV9<(8>LHe{<=f0Ks!MJ81xu-AUJ(v{mW?pC9CO{6@bn)@<8Z8h*t(r=Awy?wJ zgG8~M0cW?o1EfuN#B~CWtclU|c0miXhRrNyJCBJW^?h8qU#@`L;bW1Bm&%1zLu3)- zMgctb_728(t+*}QW*KmfVQtx$EwWeBFYI_|KP@a)Z)?v!nb6UNx=B+u5uK9G|)D>o-z5On&xXeKCwd@hFaWHH(iTlWK!taitlaywnsT?4b?FT!c@ z<$ASli2)clF|G31p{KT9E|TbJ*E4Hl((Eh{M4pd*go!{#xxP=`Z7hkFos)evdQ8lj zYhRtLeeWIgKi0BYoFcYn=;il6l>bOOO_!ZR0ZZGpAQzvODc*SGxU@euKMY@<*vZ6v zawCOQH;PU(S4hnK&nq*`X}KR3w~9&oKD0rSQ@bz3S-j)x8{?3}fn<+oJY$~RE;LF% zUyR2^xMfa6t+dpi3Q5&Kvw9yr5ycY9KI&Jxwun= zEjnmqO^qI**wl_OE&d{1m>Q4IhC5FFQ4e%uG835Pw%Jx_wlopTn z@;Ng)6UM=-tC4n0hbZT{3X`QVSjOxy%#PE};(x+tjmS&IX9Fw{bZ0pH?mGt}m8{Mj zdSZbXS15EonUt_?!IkB6VM!R(hGkQY5MeHy&jjyC0TNJ&)lH6&<7HvT!9f9fY1^M? z4>aD|xmPUcPM1$goKMGFB9ju+Y;V?; zAeU-&qRB05rSY;@f5;vfmpU}fntBzaV`k-5h`WV}35DO7bq#Kdnt?`2^hb%YZy7g< zi-n!PZXb&!9n$94LL(^h(igJ@5VB@jl;?BT0i8B3#A)>>iQGIN*20rHHSKD~bK+b& zGufYiM=3G@EkH`3h=J8}=RO4Op)7(9>*p?o1k;9SCM#mLxfr$%2OTji_lQjW4fE%=h*(PEC_7F$C?WqghrcA znSWXSZV005WcQ;&ZilZb!y!!iXRma{aO=xoedD92% zIeB^dWC2%6RD~9w*(XeXnFv-t z4B=w6idBi{yP%vt?zP2DWmw15bMr^|*MeftTIaCZIw**BOjv@Ft&N$FIutg9_592+ zf-XK+3- z2i#3h28%~Txo;S+?Hg9F@w><2h^um9)X6!Nh3uu1+r%Q-W;AP_l-pr8Fg;e!ao!V9 zmdbd>iOU4>-OAecak-tMeTqD|{deJ^K3rER*nLAavEoT+i-5h6jY`-pNHv|5c>!md?Ql5>ew{{rC>VsAFb zZL=CNmJ&7qDHSGVFN<@BkXblS^zgQQhCkBmGtc~1(BINrbbYNqR(=+aQo~>YonwWN z6V7u~$qKy`cRdH`RP{5xpl=s}7-#du*@1t}{+yI+DK^K+A{h*8e{urZ(Tw#AF(jdKYE7*3B5tP-4H4#J zhn5NE`&8>|MJU0nSfBkHZi20u7XOjc;*lQxPlB!ibPFt}{1(aTFZts>rm$Pf?XpHS zhY14jcZwnqHUzZL7$ZoglBarK5^(ueX-S@7f38K8jvP;X881%!9G1SNxL1S)K^dWP z5=*@TLyE^uizYcKElh3GW#xs)Rv-6cru`ka!>Q-(yskK1gyZg&t&}^2T?R}G*u6+L ziXSQann;$T>Q#J^{2m?hA0Xi5P8;aY7imF9igE=>m9rpE73rY$+$5(wta##du}%>> z->m4ac@2WFxnI~4FA0!#YD28@I&LQwDLlw+;6 z@*Ky*ap)Vc6h{j-y2--N{{Rvv2YWll$bV*Ea4S!vA!apV>wnn`Og_e>fOp~^idq4u zOXor{LD)r;X#DaXlo+Q}yBwOsE&mpj>pufCOAyL*{M&Hp{3$U+<2|O6MLYa@G*09E zcfe^gDe|TUTjcDbZt*T`Dh+z&S@fRuqNoN;f2ZZ+5Fy9dq`L?7?{U632i>YY-xJ=; zz8E_!+TYLqoueUm_6OiJ8+v#IL9F>96y?*)VE3Wi&Nqj#QsUX*BL~3v#gkP1@!J%W zAU#?YV}+e0Pl)JA4-%J*54z~{w2;F@mqDkfFP9lk^i^d)5aSxu*ee?sUyGn6Os7Vq4 zy%+TeTE9Rlfv!YXXE6O+>L77O3R=4e`g#G^jM)R6eWP2Bd>#_(Tqibo5bJwok8F7* zFP75^R{Ro=WOWV|O08mLVRw8@By#hw{5P^})^`7;!j^kJ*H84*jbdzw2tvI#+UJ|+`M^lkTB`wEw=bv?x7li zf#$B*;6E^As#jOW(5(=3UzgKcLreoX8vYj2k#bK$H)3M*@1Pthi}SebvY7tN{K z%YSpk*%hyrF;`+M;x%D+4jj1adRLOswZDgUA=C^=J7eV^AjtVvTgU2hT4Y#aMyyG7 zSW_$+(ju5`H+IAK6GQEz%qiyTA3@F!0;MYGk-P;Wop?+ow#7nWB!u0Qcv5cXb>D%` zfgaS&Ulv6onx@ANKjCyDqydbEi;e+L6`P*<=zN8V26y#2BY#YPSlqQTdBqD{|INJO zOLsKWtRJR7T=v>u>F7(GWED({V6}?0zM|-XaoNaw!`XZl zXy-%^j3Op~aY5>LzNsHLt&wI!L)igeP|C8PJ&6@L}y#ywT5syJ>n2&aKQMC zsp6@P9|RqjY+{qZs&t7FFeIxw0Tv}=z1fe?*IuhT!dF77luG`dP`E+>?8=y;jP4#w;9=;YTk#9X;P z&~eP-5Rn9w>5RSeM8!w{BJ3}vbg`fH@h-5WRju{)_ zwA|2vA_t#{AORdS&52nwc@#ZM+nzurVca5f=FU3$MW%e0f`O^_20?uT#rY$E_Mc{;~#b>hbFO;MDsaH z?3e=(@F&g8wfEfxXhE^a=!%1d`5Wm=LE|Sumo>vVHsTovw%Qer;Al1ZSZX(b1*RQQ zTcX-WBtCQ!5&Ncqy#+Q{1swYXj+6%kiJkW@lSR_byMvKKV#!ex`rA^14y(I3NP?E# z6CZAXtOj%4+7T;L>I(bVCfceJU&yFpg*HQb=bATyJ4XLokoC!-p0ML+D{whNHZ6V>=a00P#@28x z1kG}KRg9L?f~@31CxbGD)hLo`&@!mUC+o+7oT3@5dfyauXxuBO&wfXQbIQ(Ovh*sB zcK|d=v{N3L0CG*mDxzxY>Jwqy&!q;eJgb>LDGN%P^;~U;`pFK8qL^A4R|q*MHceU> zZAA3##xRGKnZ0PcOi&%eDH-KG??)a<@#!{1tUVJU=dOy7@dZzl{^jbl@ zE$kd4KKjCnX%LpkR-?9#T0zUavY(Dgud^Id=ISt@!rUPjec~KzN%yE0l!M{HqnJ^j zg<8cN;5ot6w|e)!y}%Z8wNI7eC{ZfMnmA8RC#jC!{Z>If?qFdn)8>VS>|+e-uzOpI zTbiKARdUBi5j-y1<>g>eLz(XGkHhu`lP%0oXt(VaLH}JRP2=J| z;IzN0kCTPcUX;a+%>HuRUM3qjUX%fhQY;qfaO$bDar(Ys*N`Sz#K1W4elSDRZmt;7 zG3laFLZ^#1H28Y8h}HJmVtFT>Z3S-k^vCu41MQ8W3(I|}k%PO%yC$(*V4UzO zJ&ZX)m^A5JI2uzx4}&Ro0c97Hi4V$6V%ZL6#d~)vD3wk6jMIpAD2uq6osDL}R2)O5 z)C=OA?wPWm#GzkLEZLC-Rn0y#_WoiyQHU)Oj1-*KEc@xg*10q#>Sy66Em5{o&lM!8 zQVu>}ZZgD1?&w&uGxOI)6OPDP%y5_J#-HmC9X68RMua8bplV-=BYGTeVtDFpfwaD%3DOh$6d_zLMcpho zO4!w(NNs~kJ+%@ICdqe#R2Mb{k>Lg<5T!GZ zq4`O(rK4X$a#^`Z%fYpQH?6ynN3F!g|!ndil7^8nkWBbuo;U=_$kQ+_E*G+xX69H| z+#swrQ|OFu<#upYi5AGYM?yGJ$vGOO_>?FTEiLY-Kgs}Ix30KKz;QDZ7}V{N__H_% zQ16EkGi% zaox!vC!v}Z%bh}giQN1h|U|>hk8Kr)9#2ICb_*&uy zVT;ojhp0rKi6a$>?aMgsG!R9H(pr2Vw`!^EY(089z(I2rurpo|=1)xo<9h+8y+Pf^ z895>giwDX+Fu{j_SbLv|4@V%IjHsW1-IQdxL;3XphW~yydh5W@iCTSY73qu7&llv!;X}cyT^b~W?g(2kG zoEV%cx3fNF8m}ilE$pIBj!6?miM`Itf@o#IMb7pXy0Fa5VLO5;!5w0p3M}lg$ISUHngJJOUv|)6F&cg@ zhO*V2;&}LC11wW^#ZH$1TsmZvLm3w?#4^u$%*dMcE)o; ze6r{$4qrbNK*C8)qeXm?{;492QJ3?lMN*qndw%5#2t-D&KCtlm`1wkpbChau>#U>q zDp;p-D)v>hoPAZ4ONj*-&*w5LS}e{2?c(h{D)ld7NT%+pVB_X$5aPv(Nt`aX6T)N0 zoC%z8jR~f^EcX%PSt8Q5C5^Oigb9l}+B6tTT?=xOa!loS6@cD@dG8VPeq8htz?%2lksy(XR&MDiS* zcEutArx;5=UC}t-e_~Xx7=ILS8Z;cSl5i5RbDVl(1EnKYydH0sOAC7F?NKknX=rES zgiZdqs!Yvb_g-s@xi{d;<%a&xQ0`{k2uoge8nH#R-GtkTo1&aQA?%FkUA4cJ|DIXs z>x`9e=I>N6NS`RiiE)WzJCkirs^L-M80e==iS=*6i6lBDrB?-rY@ij*a~j@j>7TMo z9EbmjKhrv$J%>tVIqOz%F6lP>CN{)F4IZ}jMm zT5XdkWT2M?Ke-(@F>t6P(>|Nu0dyXD35KpPwh-eunUN>0zU$7+6W4I{An5U8?1>i_ zIH_RI`YtGUewxJCM)*YwAT4n$jqsZD-I=FkyH7Ljs1)g1UE3RH3R%Y7=u|QCc!)TH z;dH*Ju5YohGBme8HoeFH(I%1|)iJ^rVYL+h1Su1yK)vw?0lphGW{0T1H}ls>g?dZy ze;y36wGzERebs#qp{0m^@9y^loVuB4=jl<%=oOc?V`iZLAJMGq_i@<+_^^~U+Rz-& zdJrZpS(N-$O3#)lMmwE<#;+d2m#Z{JMbcBMSo>jE3pIISa9A00g1J-Vg~_Gf_B};z z$E;C})UNqR_BF;@N5sv-E;lV_`WD_vo{xGo`;3uHGnUiEAl6jr^LataOtv{O{;&|_ z6tP2}k57f2?y?p{Z2wrs%2sf4F##1`v6kM*5hFMnT;}nN&^i`Fz-6L2c>;X62n(+( zN86%%ZV=^sR`dP~$54%8T&t>Q#3@29H&r||K=bk36L3@wY=n2mg--&U3{FXq{bM35 z7nU6zvv`3p7hj50ZK7hvHBSMY>wUF++UsdBF`|P;r?ui=Kn}3BZ4d(uTZwSRoH}kw z99V`GBZoy%(9v;&tIIZxE-sTZuz-*ZuCZ7o9nQl0W7IQQ96iZgO`tNIv2+VGAyKol^bg;PY?}v=z-Z`hyq5AQxuH zJfkh(AT>>ojs9vZ8~Uo~e@HPccB(9u_rJjZPEix{wO9mnjOpHKUrZB6_=}ESbW98g zAb{%0ak88ivW8o3)4vEY)C<{Q7T4-U(jc^@)bcWJht4tvolFvR>?>I)v)M1`h{Yo8 zNr#?Vc1Go^4y2K5v1iClNmCu!D}7eTzS*xv|B!L-6>%2Pc&a)gdNcig;hj{4*ZsYf z*n5QBTFOcsG5if!XDzwv8rzGoHy(`|y3%*9SR}*v&>OD`IM&LZL75F%z@PaiFhz z?7J{dBdYeS(uAoKmHWmbIVl`&`En%E{65HqgDyp)nj6b}V9!l8@mo0^GWSgMqRd<| zRF+=ecm9jq7FRb)y*kUS{}9R%PteBKEMX#UWSi+DoTR_KzZ=8%0uGo{UrmMX6GOV5 zx>$D{z7(QhxSpXZIRE3!dllN>7uknA7?W7_ z9Pw55A@?}D5^DpogGh^|O7&z%;dHrKD32W$fro^)SdrR&*6qOF~IQKYUsf^*^N{XT#V$v2}%rBge zNN*EG4rc1f+~)-xFZ~!x)s>cpka% zn`Ba5O6m$n3nMrtLp^b%0ClUDM^fZWYauhMv(wKgxqj_#uC|gtE3u~YVi%2V!KKAow$hN{S$AeBGemmH#!9Rc8>D)CxD@lmJJ6iZR##aK z#`&j?#KwZoqBNi}P*WzFI2Pl@k=5~mkVmssT`PB5uZz5JOK*LED2F$m{gO)sow~#b zR$@Z&s`6*HjMafYSN2&Q+ZwHF7;;*oeXOTam`*Cc7j!Pr7}JQSV6pq%R3Ds@ca7tEu?lAv-P|hz zjBC`#@s-iAp8ukTU=n}*>}ze{F)PtZzCocgI{YC!`)o5f2r{tAY-#QK9`J?D)|NCfjRo2;r)jOoue2 zs?(e&o@lYxNz4C9h+tSqCPVWkzxBVwvcu@uT*!fHjn6~%A{=WIeRpgjjLaB4i*c-g zWzWTyV4!l6fLF~LXTR$ zE{44IYhJeJW&kSOHrs3yKcwzX_R`kf0zU0w(d*ZySt*)CI(F3E>5<|rF{G}$*vv`c zW?O;?SvxN=U%-7AHYPhz&P+PpA4OZ3vUa0s#jk(o&oCy=!+AqsFy9w12)LX$E$0yY z6A_m5Jk~S4TV?-O6?>R(Jg_wc<;Vd_auH{>-4>iq>o`eB zSNC(prX3)PJ|5VX7zRmWuNfC1CnDpHN^B}OIUX^b&4gV9Tq#EE7{{c4A&gv{C*X8f zj2gz{oQ8cYcB#5w2s*}&o>^&(t8E8KG>9%Xk(+4dB*USACD_uAogVgFw%s1e`!{U$ zv}$FiH!2wfj@3VwV3YLc;zJScnR=vuAR~T@#Zj*L!a%=tB6{Mw5im|s*TCHL#?{D* zEG&+cSo=*9L9)~%#ohwWVm4Yz5f<>% z38+;%J;fajm~U{aOMvq1B!S!S-A!#|6T~&A7{O_-y(w z{n&)2x+yVvEdM$!OyOve2a2$$nWl7Us*ch8*m7r(<*UO~nzmIV4;AlZG`1Au z;az~Km{OzUTlcYjO9p-09d`%1$?@iJF7_8j3Z>3m z%-GZJc6Qnik=yy;iXPLDf0v13FI!e9zv083Pdg8EF>>4@#$mHjfhlrf!k)&`|NXM> zHg%Le@u?UmtYU{@v0b&H>N`v7CP9Z)CvBTq!p^=-tg&*V3f@wa1%VZx*sT`mG-J}9 zf#S$AQN<*wFOD0R`CuSo@AgI!B!Z0uy*u-!ASHw8SxzT+7!Ps|=XhN>UQF6qqMOl0 z<=ekZNlU1gQ0@`soQ)c>MLZ%%pt|7P8?Otwj4-jG8!n|ddO{W~!!l9jVo{l*H8bz- zkAI0uv&UsaibMp}yq+s86LC69v=pLB&^2uHF>z7(&vA(98$k~Q_0j-ff~9SZ6(&JD zwdkH#> zoNOS_d7LTA@l4ip_VJ|%%cxG)TC|7rl{i<3QW0I`E$hI}F=jgAX~C=&c|oB%xyMv{ zlVK2w=0HJ*!kI%U4o?3jaon?vm8aQva{IX_P8C!?G9@O~<8+dEzg@;W`{Oim7Nmyb zq~g*FPZ#YtCaAxgFH9(VY!YiV;3UzFD$(vZPS8HbGpmg!gc_nGUuCAZNIe*g zTN?3W`ElgJj4U5_i=v$sc`p0q)GZ@UJ2k-*n~u-viH;Zamsl7});E`l;>$K#>y+uZ z9eFk9W1NqiA%=e^a+-9%-2S*xZ&H!{HD*A$*f>DrxNW=$%ZP!(Hw{~6!a0lF&*+WU z%D6j+&csoBXW=$;q@l&=)P10w5cFSl)A@ceZv2?{Dh)D8>5lhAQpae~Makr-@w zcIm!p%-$Em5jXPaQ9Lc|#MMZ!Ty)CJ?>+~TtSE3Dz&nn9M~CQ%E9K-nIi0=Mpspq zZUHk&V%shk#|v26nB-uao+W;~DiTpK^^EsZx2(!sorfoxx-L!KUm)m=r?cokryewc zCLeznB&J?AKVzo?z{!wOQ|v9QQC~IV&{hC3ZZ&%AI7)73ptDDB^t~a%GexZjll=oO zX|=&xW_hyxPeFo{A^upk9pJbc&SXi?d z%SQh2MyGe9NP?Cch)VzXSzxCYJ5MyEOGWTi`V@pG-ph1Ck^pVU7GfnKSHBt_rQx1# zOczD4= zTq2wXL0d|mA$w7rgGlBtRI^XZR6Bi)dhojs!B?6|n+hdEZ;7;sTRYg6ipsun_#A~N zJ69))a~e-3=%_49x(K(CJ(P|K9;ZfGa>Ls1rLfhjYKv_t(_ zYIi}4j6+0`Vb0WZ@v4A7V9SztMjr-2+KGfZvbT_n6@{+uSoUzR^P&w(TeP1g(qSYM zkvO?bAaM@oh97|si-_YCt$QbxnHenONY?RU+`mt>sR_&fbMclGRuOa*j&_!~wC=FC z!uWV31j1@xcqfLWVk<$l4o(%nO5NL4jESnF;GGU))$Q%uQifx9N?dl(Ha;3ZY4c>L zjpk#FRn|G|Q1%`RMts=P<7La^0B+h1nC^a4&}n7sg~=2m{6U--d)ilKIi!aNi2C&7 z`D3WDvAU8S?RZh7Lr7+#s`NjJb0kg7WR^PtNCGqqij4%2L^CftCIjgfA}mJC$`tjc zO;VlS7Vm(olcqrB_~;}!2Rw!GW%S90J#442O5IJ+!fM6#Fh^=%h)KoH{F_I)PJuwq zT^N`eDL2VVW|hYYxb^C^B2B|pVl7%uNOEzPu;b^w*f`--AfZ(Z9~sxloz|%qy5zX^ zGzc&ASxqEwTcS~{inIRd4h8iJbpaa)x}oq`Xi)>|U1iE9fOGM0QN-1$jf^uKKw`|c zKMoQkBf450{Q~%73o0H=-8-RbvDBIH7LM;8#YSQ*;Bly!f5pa_zSN*{0J z;&#B&d|=e4EiKL@^l9oy`{N8DWHi7b&eN&8gBo`3AK($q zv}y5`oK7w?aJ{PbjTl$xCZ--eg?7DxUVj0sMWDSWnlO)Li%WIg*zBQW9y+=>{6hY5 zMwqC^nSw67$^rW4xKV^d!+4=A=ak|JagLW7L@&WZ5nb_YIV3h8;=S^RO}yirj7lGi zBR3P8Vy}yE60ht|P`9p4A@f-++#eNmGFU2cZ1cJZmke8OSgZDO^O}n@e+?Wb7h>`y z5bB4dCAp7))5&0*nn8-2#afQ6;^xtFKP84rDRVr1><=f~*MAdf!AxdH@Bai z*==GSeFG}}4+~OTwAkU4_HvN3&|MhN@?y^`U@S27mUbE*bs}6N$4lLOe__YMMfskE z*KrA*!+r^v0I=QIb#f=Gt zI-~3rJO0dnuet`WSxb{1>d#`FyyU=cmuta9wD62LL{9(J)W>ewJHr0BNwy5x_Yh;* z)T8WHmI@g>S(Ix8st!G!r8ZgxJ=?FYhew(yI&gzvTOsF!vgf{F zoRt1gwXd&@Gs^!ru*b#r#J6Hd7Wpd0GB+Bi0mLc-PDEu74?^&0_3EP1>bLNE9Um5A z_)Ty~Lh72yQ2e`_A)L25%r`tGXi4!z1U>(cBAi*K^_{WBE#R~_VKXDS8oB46;G7R? ztUBd=#5kClMdp%mvk0PT)e%Qeye5bod~@{FTS0Ejeh1KF=4qlGNU2lrnJg?5O&Ukz zeGyK4#g4JASfN-v` zWU0Py`8#1qbyuQmvyG4??st;xtn>dnrN{FS!Tr$cpmoWn67gM_Z zMLOdFan3y-#dc2t$&H`1z--%JvQOvMkmI#o8+53O{j?n<+L<80B+?3EPfW{!kovU7Q!0-98B7epfOWKX)soPKJzt0 zdH_Aq{?{xh8FOJ-66c6=vWQt`m^8#ZB-Z(@Z>)(uUI02T%`y@Z z3hjOIdts*z)7lzQMlFJIQ8JP5U@T(@o-7if_i6k2_W~3WhF;=Axrtmghpb&D=-^13 z4tTB;V~KQYvy!>Uo`3WA*Z_=2dGzXWyzt)nD(!d8U8Po6_!&UK|IN5Y2J z0Q+Nah_PT2>mR;E*8g(m52Z_8PIyj?{gpbp*&9t3D9gQ)`NOtOD`U+Te}{2N|Bq?p zQtTt%velzNv40t}tCEj?VZ?;$AdjKG3UFlg$!1Lw*)6Zby6~&HsF3Iw&Uyom3dGUR zeR4Xx$)$0|^qv0!6-t-kEPzsWn@-2cZs29C0lv1&YK{|U$57v zV=oa7G_O~cxj=H4sI2B=d6n*K!?!XHOS;V5xvRgsIDe`ks)Tm*v7#KkY$;JlTfPH> zT zsF@N6lkTlPf_5aRiei^7=8AD7l_)KtAPaUEQrQs8eC&TY|8AF_@?*kILSOQBLMeU_ zMXX(1f9?GQH_2hK$5E;P^>s>J-1=`jr}I#ToDKo=w?%a9CjG11sqD8gUrh=v<2^!?wIeN~~H z+7sJ+fiH4z!(S%|rCrDXhrg(h(+h;`@jA{HK^c20 zsOk88F^-YJjJ-9wZyN1)qFv$hbZ|9`fkSPG(EyXsgHaIa;+n;wl&n%s_!^1=uN+_d z|EN0ez`U#K{{w>TDFXe0h>Q8Dx?tX`3>XB|}8^5ZNG9 zw#cpn!d?QhrwE9EfPid)-|KzfpL`yE$v^kJpZmGv+h~htKiQ!7$HQMjyJ$&Dgo2u6U+)X{*kYF_UC7VA0&z)D{E@W&Yk+J{L{}w! zHeZ-Oqda;C`EemmAECIqD#@c)D%e#o;A`3*z>ipNuhFu%=z&7|*S&(^tBxQj`exLG z^rHMTqFf@$4ba$G9w3{EkQ5|+hE8Ewi8@d^#DTOBbD&XH_bacE$7p8+GjMJe;uzR` z!*kIvkVUT$pOmBP`HN(VAoJqYXfnU07}Mv01s-l$2;W+gY5T8am>p37ggLZ72*;BE|8>e zJWr0r@hV`N7?M`sW1_NNm|90BYOAdZn9XdhT(h!W`=kspBD!TKc*AOWELqmc@ydIG zvR=XFmw{J`F{^`RZNMC-ZP^+i)WcKpAPTF)OlbXtrf zX=A;WrN#2uII;}&btJ8$(eF zgn)&S3($WPO=HN6F&-4*4ul&uu1DoB<)lsg1qoK6S{b)}&tK?GrC78Xge%Srj=1A} z0h($S%E{)In?tz@dZ%?*yX38f`IAP?TJe7Z%$Nl%?{R85!pt;tJevXXi;H$wYdkN& zth1hBd~XX7&jhvA^|56}>S-j7z&nVflIddp_HtB=bK*IXE)L~qcG!H&JaJS>wc;OAZ;bc!zcakav3_hxM>Xy@g(^4Do$IZ%XqZ=c?=k)5VG-Xh^< zR5c3)u4;I5Ntj#1#}K9V3xQ;&J^8y^6NcB`6MZO(U#e>wqHbINLYr+pDgGeJ?5fb~ z6YX)E5Gt)rO)`SISAfgS?m9_%*4-Y+vuTz6OC_7%%>+BK9yUmDq*n;V!1ETZ_q+1% z{m94|;tvXO+KsZHMGkQO7$8^IngB}*vGWg{3<~D#eRmb*7$``{%DX*&zav;SVtnnt z;!ggCdd?e;l|;D{G$O5BPh`exMkP)My3-BweZt-1)Sro9?#=*aw5<&^7zcI%bOAKw zW2AyQo3N~Pxl9mLF&4;qj@xZ)>?_h!4<%;rk=3=W;jPZiZYaV35Cswk! zDn*xY7Xei&4mN2;Ub1VB%IsbzaSApv5!402AqZ3Ye+8Jg)44O4xErLi)&7U}zCeh3Hci_iVTw-HmkMT>Ykw8T?+%d_ypQ+V z7!(ZCeI|p_9CF%>vtm&$UmGqPKPZz;uj)L&+|CfV!8V zwpx-L1n-bwZda_3in02h(C(heJwsBq{GmYSjafm=5otz9*oKys04Gw9-{zRP7c{d& zQ$s~utFSrJ)S<;Ot{0L`HIkmB_Sj^fJX#AfC^oCV65>q9?Yd)ZTmhN&FLo6uYw8Sf zk5D4%IK`p=WszjYC;&baY6f0Ca6$X0p zT8ofI2u*a1|;8dnaf$5TIc`EJ6 zIwDgN{oZu&tT>D#`uwS7L~|wkhEIezaVB~;A$!fpfvPkutTPk93`Nv&P?3)w5af^0 zhoV0;)B&ixNa~_p1RUvN=iqUZFpQ1q*m4D zS5sl!AjCa6IpCEn_C;aJRC-;z)kBPSp0P1sq!Z;zlp)t0j|!qFea*?zpeyu>s!=^mx8x5ivG-+y3&0AmRxzq_eHrNT#D%JrM~?@+-itf8C2)~B#(k| z(n78jHryY?rJE|9)SX1TC-*YON1ePxt8g~XO%3rE1)YyP&LCRd;{cFsY2;eeo0UJ8 zL789sl=bufBPNxvs+bjP9Z1-$Mb#Sx>fty^kmrgXYpg=H4&~I%ZJvESi7|hX*r?q?<9!Ur90wox=s|8 zn9X@_c9HO?eF^(7HYMik8T!G@PHH&_3VD~R4baWlD?RP$qTvs&Bh%R?ZV^CnaJrJ@ zIOOmjL9T0Q2zSPV@nty`R3`90)lLlRUP4UB9^mI0$@JNTjr%PkU69<6Ee`GQC&bZA zdX`AHlMFzMu~dKyIz=~N50)b!vSkH)uN+~K3`SP&vow^l^bGnQO=m7 z7i0N^GQd6_CF{opxO$CTA;+tt-Fu_}RoR`BjR=?yER#(LK2Pf5SDGIWkMX|3E*Up0 z>7BY`dV!qw{9>UaP8Ch`T@a96Q2vc)Z+W|3G_|1IGlpZ0zC1CeW>mRxjwp!6w?{gy zDQ}k}HL>H<`tXq;=g6LKAkH`#I;#!3>9~*{&1CGZvAHOZG&VkZl~~)Qg!&U*OL3R*_kQ_YL6)0b@u(?y0rB+{i{{r;@*+et3|I|B7T7GL3Y^ zDMJu$CA3(P%RDN;DN6rFu3+|92&7hKB5jT5M7dO`o9P~{H8xrV=H6W+X<(A2%oIpI z?QB}&RS`~B+B$NGzV2cmnxqlG>bENFVsxi^z9E2*+2EAoa}nQ|`4Le><8b~3?%tT+ zuUO(QxOUhr{w0DUcXAb2jJHHNb?St}uq_S&a9btQ_d(I-SySJzo)GCL=o7Mn_#1 ztBpt24p%svZu-&K=P98wvH%lHI56g?PN}Pa+R)ELWZ7B>R*J`jxnIwk9^XGAk1>@K zYBDfuIugv;)HA73qIp6nYWGk}9HFqkpSWABd=wOm+T8lsM?o{AWoV)FlVYJD=U;(W z)U<6qL72JR!h0TZVmS`_ljtFk-?m2+2X^2Vt|JZ=;SZ3D@N(NR{&7%Fiidt~0tM{B z_|35p=KbiS?Cg0T6iVR-rF5>aHHeVh^~fDZS`tdJ?{SV*QB@P|3Y!7ii*iaHdpwBy zN+afsrTF#<0F)2~F5dGN;Sx?^QjDL9&W6uH5_}#fz^uUsanf7(r6A|g)!8cF-glQH zc|XYPQHExB591fQC2CL1)8;W@M%kt(0WkLa7$L|iuMz3ID_Xgr!#`!7QF>RT6or#P zXhBZ57-rlq%4JLI3-j0$Sp+n{SYN&psoF}s`s2vd8|Ab3GxPf!!a?ijC% zbj^|r2F{n|2vd2*bk3C5NX0!fH>|h$=Litl$eGwS*DNokarSQj%$2H9)y23~pg)>TvNh*HQt+L| z?HaWT`=@#~FZ0ib%z7JIYdVuYCWtC<22UFBUzMXs5XhxsY<>Yz+@)spCc*s)WrX7! z=jd4HLgIL?u`dJ|zNIj8hpD&}f6QpVep8BPMN^uz6_JRlJzmU`u@65>wBHtH=Cki< zgRe*_SgTgNC{JFda&f$9R}OO;*1&-{r3~Q+03{SUryyDY?@U8+uZU6p>Ur8p7Xx?> z(+f=L9LzPg&`g_F8Wn7eiYP-f5bxG6&eJ|6v6B%*23Tmf#-}1imjEvF`YFMlmw?eG zNELZsI<69FE@9k^KD=lT$86)2L?}})9sLTW>)vP<>5{Xgad&p10OvGF4`v{v`lDWV&_AcQK)*LZaP z7Q$U}5rX6n7=!hgKzEtxQY%K$>2eSkcwSN(T_C{qULap#YhD3uR&p_#?rFCXdV!|6tNcsq9xq@f=^|>Pwf|T|zxFW^xD~66uOp zv1Ma3#LLyfoLN0H0(%|-E;5gv_rwangLZx`=(_PI(%}Wm;Ae^#3o`Ft9}t;U=5qo;dDW2uB&YrU!-&^>WE@w$QAHpo!es z!YH^_n7e?)jPyR3HTF8dEK^>FkOj^yf?Q{WA*}xKkPyw#SOcQma(zzmEXtrL3WGqc z5cg*UTclD<`+c5Ha+8FhK!{7wlY6>2wHyt@V;y&15aJePGtUZf+YJD2s~TMm-7Z=T zK>H>R`vU|MDZ0~fp~B99QOOf3nyvmV(8-NUx6JPea0U0p_MiUw#El^CW^)-|Gj7Vs z2C_R)q!VcD=b|bW3UGOt-;uv%H04Rpa4vTy3J*im?U>Gd<&qfQaf^~WNXI?AszIb(rI2CxJ#J( zAMc!Jx@J6RaVs#zWU# z(oXSfp_H3b*~D?N-JeV)sY~o8YILw;LdPzNx?L&M1;VPgHSQPf#j232sxU;B|1-GR ziSUvUA8!iD293cTWjb7mm^QAVaE*EezZ2ysSNT^mVIU)~-S?)+zB3lV4D z1?>`N?jz%XyA3t@IvLHAI8%@r+fa&|L^+MA%(YQ}543xcmT5o3#coQl3okXB0l!@I zict8$Bi7n+Pq-J-`TOYG8K(#uEjoLJ-XSm6e-du))Fk&AE8hp;CV*Wi{-{Ow66gfc z*+?onCkisZSk)zHx~Lqf5)CvdlsqYjhTud#5G&jd!NF|um>931GeJX=J-iGg&JyUV zp`)4AI1YQjU)5E|Ru2+%S#>ARp-ccA^e(Ng{4-9m*>j&OARCC89w6Ijw042bX=L2} zvE4(^8MjcKkO-+L$T{havSJ+eFaW~{mpbu97Vf|b{0EQZ-?Gq4Juyxe6wmE_LE70WO?ITauA%{W#c0tF6aJ68C6ti_ICe!?~A! za`YZHM`)R}#N@w$QQf{~DP+VYBF#BT8;9c00{l$_!lC5QVdp2n=oai=O3^OD%xLW6 z{P5T8M;(P@npxcxREP6D&WVr=(( zo&*Xs@wP~p3#;+Iu6R#?JKRF88!W3|3Cc!0r%4?-buF^;3&1q)xXELqOJOGiYq+Pp zUx16o;ZUZ??+I~1=Aw}r=e?N6(4muCjLw%poHZ9Cn8&onUM~ZU4){K{vvSPvBY|X$ zzqP1Q*cr0}!??6ofHS5es?Q(!cMv!76f}#M6k+7i!u*-uYQj&C07^Hy2Ypwxld0`a zvgnV5nD_O_6(_s`=~QLbgmwBroGi?xMf)nVfZP5bfN*bQsg`_Mi>Bkj9fHk{dcE24 zsAyLnUDJhWw^ELBkVLGLUd=y2Owtwme1k3`-1&oOSITa=?=Axv6|;mrEx@&^NY4Vt zrLTd&%SFtAlmAIrqs6ZRHHma`O|p1FIoQ85_$q0J)3e{}ItSb7|E+Ph09PT2=jkKY z3Nn}KikOB=MehS)46x+gvG)xVbv3h^;$aa^K>IoDUgCg%0lCC-4$boyXP1!*e0gI# zc99UzJI=BPLKO2%tF&J zKaNn)%%LXQ>Gj5$!d%~KW&l|DmJrybyGLGA&wtjl5;V#V@8^lsqxP$PdpseGL|k>P zk&{0FaowcEg@Tj-=gryMn#k7*bKW3#9W@Y72r<)Ua)gf8MSBcX@+Q>O4J4(J4sNCOWG-lH(-qZA^wwyi&gr0RIu9Eh1B z@=XGF4suS$8g{a9XI#;{h?8rQAbtYqg3r(qul`eLGNEB(NMZkmxUwxy6iqJ8`tTYN zG%sv~xu1yg%x8@f-X8*3OhD8wJs^1N{@_a(HOl(P(-k9L3Z zD==3;+X-3D3U2>8|Fl~^RzDFo7zGD;=bSAt#I2d~_Z(4WqV#QIS zT@U0{?6TwW6+oyavgzjdOoTaw{T(M>L>e*iTY&HwPd7BpYTt(P=#{}79F`%%M4`@C z{!n5M(N0KKPVD8${NQq=TI9rWX*tqRFK&3!nQPL@d`Wp0q23aWA}koH8@Y*TtEHn0 z2`7WLSHEQQZ%YRkxu~aSUk-F4Z#ZHVCxi9qV9Z_>!kt*o68fS;fQwh1J%)|?y+G=T z>M$AnTa?>#CYmGfiKK#hm=VEo9J(5`yEkI5q;Vsk*|Fv7;MvBi^&pvsi#@BZ;jfv6 z8qvFn?M2i1*ulqv3cDuF=(%ylg$6DVJm}Oiak&ULnNBvG*{asezhJk*u^PuAWoTA$ z5FbU0e=S4RauUZX@J~VJoeYlO6-n*|rYRaPVjXB0t*6v@%vF>*wy1NkLk|@( zjecugP!|UiH4b0uK${ArINWM*3IO;-fX9&hI5i7SzeEMit}GGL$annV_E)LM{Oj!Jo&vi7_1O)+=g&1SkoxS+^3=v*8hWfq7nyg{ zO0e+kECPEvB#H4-IX(~S#&(+%A8w4_b!>d7unXFy@wN9B(C(zQy4d%;e3Z~^Rc5=y zA%?4HVXjgQ$DB2`gmjf!3T;df8wj8$ba3ku^F=tDdDEJv#_aFsv5|)&a(P97>md0V z>*|_Y

HQ(g?2>;w;e2=PiM21&~GZId`WBH_Q|kK6HZP)UER*ko9XTxJ-yg+gt=j zI@9AistoG6X|${0@%U}>Z9H-^Zm@x+0 z>w)yHlU|gzJ8i<kx_mXqI%#$NrJLA1R~U{dM;Z zr&HXMLMc(HMVdq%JJ%T1FR(n3K?yxQC~1rXxEA07Ntn} zV8F^=X?#r5Pt?jUvkr=KlFh6=h)upV3B)DlSz*r9w8mP(q^>tVj?RMhQ(~EdH2EUL zQFC@DOdr#AY+NN`w7WB^pDWXKY;t1x$^4%6>U4i7iNhKKJ)GH;Fhl#6qFS(vV~HZ8 z`Xpgy7Y5&~CzlCuZy7&s$9PX91?%kUi|IckJUSkAa+MeL*8~y)OOJRv3lE`Cwc;M& zE;y@TAtsBaR?Vnu#7{+-RdogK#arW_<>=G;78US≦`29}wc4xEBb_W}V~$=~7Wn zo)u{@o)JwkX3d@#|IA`?=tZTXcJDl;Mr+F*FNC5X{X&0u9}{V@ogu<~Wg1ua(w9Wm z*eulS)GNfvjU-tnIO~s{_%hSfeWjvONCxuQ-Q!O`f^s#H+(^tH6(F?!jEZtG+6Q8>@dlSqO*PD=R373iYX%ZtwAqTNy`0iRA zAhKCB_tP=yKjmocOlN@Tm1ms5A1Xqi6){SgSqF%K5 zSkZ>-ZVs$-W&=^9k!8%+ai}O~P*qtSr;Bt33`aH^v1fgbu)*V@+c;c^)5P{7z0q>K zAafEkd|0fqoFUNFW*?!Oq+)EfZyu3J8#{FwvF;+!g>I7B`F5xr<20uSaZ%g48%60mn%?;6ZI50T z%S7b65%~sA!*E^`Fnr^&0nRpm6=f!LX1B0=%>l~X()48~LuycnTlE8z!&u(dIA|VW zGo_jpI%!jUD9rV*l_2*c(eADE4k;b$EI&GnSKSoPDVQ}kx(z%?7tafF z%9Yv^OCq!Oet9%HUaNe&tzU-Z1V#C8Ajp}{n--JG5vFtwCg%-P1i57!D(YfEnQX$} z%ug~BeJ#D>bF4_0gXLG!hJ|sSFlvYgpPPR|(7h4^l-|}f@J0mE7>n5V+^_IxJrL_j zmUPYj{=H)6%$R%tVOJeBq$Ef=U69j6pBgsH#^THa{Z0SCP&}-#BP4&BY1g<;Ay0_2 zZ?_DRHy7kiI&w{~US`^*9I3ciy^aZ2kb6c;VQ5MGpcxwWr6=8DwH7GPM!0FG>>CQ; zXRW#DponrwkkjKrBdL-Kq`%0oKs+EK>u4iO(FkM3F6Q(gb^&o)8#Mf{o)sIlJ27wML^TR}9Grn4EgGl|lB`%v3rI-zw1AAf}a0@z|~$!CNFVP;Ai+;vP4* zepXBq={_;LtGmFVybvD#=$J5f9epsA8P!<8GU>OXU01DKnAFk+ZwqviXj7@< ze-j#6swc~eu}+CtE@gGvimWfdY2(`-rE}4)DzX;Nl<>Ay7&Sq&czm2M!ufFbqb*nb zRuHAN0?aytc?5Y{mcU?g8N&7&N#a19I+Vwfot6BjK4a202wkP+33{k313i+YiK&73!Ra;f`c!46*ZG`H1`*S}tAU9w5l8sHP*%T`Z$5@oT|m1a4F~GQ>JZf@Do5iPHI^ zDf-CLG+*z36r`JM;)ECwWp=ZbvZ~04Fej<~FKDuq-p}UOM9wAsMT2biKyUA!)@vz1qA$MAnMD zX07Yz1i3zq^J4X5iI9zhP70sqw`ze}q#NQ+1AMI{t6Qp&7!1N+9KKoJu;sB<{&b;qZo%&18`yEXMZNVkgKtwZLMIuBmQ^n*FpOrw_KWC0Xn3bz#7 zoKH9#F&qlyWirMH%Nh-=6s-ctsfQJTK6|4`Si|HY$ACz>IdDHCklG-RMssIL^j!e$ z+TfQ!TFpa3oK@NfNtgG0VHv-MQ1n>)!u%_$pabYKAudc^Qoi5$A^-~0gkKbz|9Fu! zi42(IN)e8XO#!pr?=J=*eeFi1dOGtG$6h3p|G$WE=FJ5r+o4OL;mb^{N*89~Htxu- zR5+8`%G_0(IIU@GUIy+8HQ=ByozC_VrU_jyAFTFU2zSl;jy{Cwi(|GR*n*Glq?(F` zLWa=D7T^;t*T)5vA{*GAWCDo`lAuj#&!jdI%lxWvoUXP1$j(}W7nEWdw zP3*^oyJuJ6C`g*ZI1gS5=;mS_l>GlRAx^KNmCb(4{v7~S{^sy-Cfx8_xe|1*@xKedmY)Ycbq8Pg;ife@O z85iVmf;nTE09tPB_?_b|h0Sy(CIn;nTfW-yVczs1Su{5YP%EJn6DNzHNcLrLrYLg< z@h>-uy05+O8=%$|loWg76@lt5Y>VE%j<7SWN*o=IuLYSy=~+Ojo9}Rao_OvHKd$~g zpzAzm5QEIuL^~}+FjUdR|qp~|twC8UCrsO@HtxVouiEF*Ij;U&so0)JVgrW;+z4BZCd-bpVUU9CSB$`|yuzW#yO>UK!V)Hbrr9x(d_WIe18 z32l-W-6W87OHxRDMTAorH+jdXT9%XJciky|_(w?RieVz3%dt*02zALh`W3N7`K1u& zBlk((O57>XTuX`>oQ7k;pYl&xA9V}!x)4f(gzHbS;$QNgIY&v(xQ7szZ?4`4zQeHx zaBLOph%iTS-iB9i(QXnp`@KW4K!B$u#xHJiM(@joyJYi5tRzf^Zhu^DEqL3v+psRl~2#aU_RK zg7U?8=W#G$;Qmsy>r_`5X4?9@0CGXGp+A0j4+ILrD0Uv z^y1eB!d!(4E*KaxGlZDKHFFx`mhwk+s7T4ODE7aPC@y%Fgtyz??@#p14^!e&k?tkF z1md=)7*7eM_tQ6b+bQ19DBPmZ4GItNn<^nar0YdE;*{C(FHz1*T`vA7+Qpm7fpMJ& z{Us;DLUf5Rb6Bb9OurN`+TVLQ;cDpofI<-OHlg8q--b|1L7C zD$4ZDj2p83{*#V`o;xc+REVT1ZGHF;IYERIOfsqAr=VSPv;ml2;xr*n6JhG;D;Bp4 z^>_8SsE#9_&gmZM^qwl(wQogKkL$%aU67en!QCm3&lThM!rVb5t7hxK61~PVfG)#y z?he`9?fWc<8D8qp2It$)+NDf7tJvDdEyB#@N+y;+izGQ5hQ%ui!_0!V_@}&N0y$+@d?LcNKsTa| zUGNbv`)7Gyj1~S4;WVp|22Xn>kBuaeg*6@)LIk}Tj!@|tk!0F}&gB~aAe=E@OWpTH z!ybtV)2(BTfKl$Fr_9d&pfZ%++GL)1T@Wnuh1^E3nxYj(E{!HpF5ZN(JH=BX$x$LF zjp6fNgLVcO9i`d%DM7ApBQq|2VBOb2+_*X^EK0-TfHKIP7eWLbJ}(#O_HAM{FBxzR zlMdMW|0IHGZ7*Y6(OHK`cNh}{c$?IkdEzbL4rUL*RD}$UNFEASCAeNwZg`LQpUTt5r&Q`n$ z=*f$I?m;9uN2hQX+Y)Nlbl7{TV0smf1H0ntw;)C*l0Kvl#kft7^B7fWXpJM@2F!TY z$AhM~iZ+kva16G!{tZB7kuP9*x1A{eOy}k2@!8Mmf$!M&o&0B%-k3)37vMbQ78hAE ztKYis0=m)B(A3q^Swfs_MRz|gV16gS#hN3be4;&{WP%O}qW-E_<|?-wB}^=OWBd2= zq>?5~R13(aB4!ID7h=XkLY;x^0phavp=peY#-?~nVV7p1PFV3__T!zm?-&a{;77R3 zMRIGrA!3w!=;x045ZYYleuMiPylV&|J_W=tjfkuppjVih?ZGSp;}9hS5oFEFu0#0D zZjIai3*aGD%hdgpNK$KKNslkfzx5(WtNV!Gs6uCPNgS*&t%ksubLAZ$n`+0MYH$4R z6DU^?-w-TZw+nE8!cl`1%B4~fn|%uG%JE8gYfKmI!Y3~^+Wr4nn46MEY9&b;`3!{J z%&gjYSV8|-sbdsAJU<6Vo4O*6Vywc~_2O<5k`1xx^p8d)kg!0(E3`I^zn z*x)P3(F~BrB4QNcG`ot!=u=`JyWZDe;yc6c0EOKFuo9P_g6I`AS}{G%5=RLkmmb-? z_Qsi4J zO=z=P4=Sjn^V<-nX;e9ggb-n9GPpqG@eNik}Rb zZV|yO%c{t7Nm3e`w+S>`Iq@Yg?^~`1^6g$QAh7`wk=( z;fR5%5thWF%}|s~Ulipgl5(<~-)il=a6^AY-z=d!9nXuVPPm4M6TVB>CBoHZnuhn> z2*}LQQJyzFWLZ34s4Fikv0_|X2Bhz}b37)1noLF28f5?);vFICU7TEF#f|+lrKo2v zqg9%OWaA*+t9u+Q5bT_|ey05Gn*dPWx@k>uro#SlMzMF87w*dd$~>rFZL}#d+*79L z7;=wjGg$Yj%uzr79*FDAFiZDcmkXk#cVB|s<7N>sm6BF0iRCtf8tr+SUPZf?v`Xzo zs=((9yCd3^Nyj=Er(Zw9e6JlNJ;KG>b`Sh%+?TQ5?2DlF;{fPVm({J z6nMEH=al>a$ISxV&v4jNSnSrX*#?xx=B6!H+mkoz_l;3>GYap&scv+5n0L8 zz+HV@4h`baPznQ23ZmXfr*Tyz!c+9*vD?H0<4q!Smg&z^B3(Gt)pW1D!-PEk3=Ug^ zA{~Jfnao)Ky(@^>mgdD{Cqk1Ovf07dXSe+87ACH`GKurpD2eo^m3p)O1<`IqREhC~ zQH*0I(yF0X}qQ{PiJw&-QoTl~S%lksey>BUy0pcPzO`X;lUx?PMHNXNCtL~6;6Xx${>Q--=Xm_C+of*}JNkCGu7eBa|N!_kB76>6L9zvxXog*q= zlr#nD)HQx@P$xfb^6tCEiK5+)+VHN`9lP(7egecG-E6&#r0S6yW6yiqbQPzI9w2IG;%Qv_>Bm+5p#W! zS8gzlxuYuoh>M$oE^0Qb269cfv7>A4Ab?bwrS&^m1gWrIGqX!AkjZJ9KxbW#ZGjeu zMnSIVRH^e{Cfcd)JEZ00{u%%ek1Cd-2c|%}bTr{Y7Jnv+Hw3zAWJ1mf^08VFnx-}F zVE!x0AK^!%o7Q4v?KTyVob|Z9lm(}Y_9Q1gS8XaZ2`v`v0&%6y>ishTuztquxJJRO z=c09rc=355u1-aflkbYtpxr6bEGR3Hq1a8Rv$8TMFQ(I1_7?1q@fbfG)uP>9r(m?5 zdf6|ggSj}mO2E=k0L5vV8Glxgfiezf4xeY?8dL_qHN$u#tLc3T`}<1n!Y}}v3E)h* z%;=3hMY~R=mXRxH$|@nWs}y)ry?Tev}qXGJ)feopps=lPr zBQy@8-HFurIzd@|@WS|L-%`;`FmBKaiaUV|Iq1&yFinv-?9K`IG?8$7Gdk+}&X>*FjU zY~k>Hf385c6IwR|u`Hvps$iNd#eQ>~0(wG)I8OvzY3Urfp*OnbLOH);H!Bq*ah>@< z77``b!K1fIL&CF34EqeYqp9sep>Dj2WOxt@09-=(Xmw?$2^^hWk^f4+VTmwj-N2&& z-8Ag;Wr(%;uA?xFsNXM7efNY8F1i*fbcL!ongwvbBuH!EvouR#e$D=}= zL-Nrq1ED<*0HehH9sTjN!v39&99uVb6f-}_4*j-Uvn@To1UERaI@un~j3XH2e zW7}3}KER*JK+Frx&5jnqn^+?;WvC z8;Hvw!(K7w90cTl%aFNF2;uUb4tm z{^y9Mekk{`)W<_2&2AEE>x}J60Imb~8VIdvg1-uLs+HVm;tTrAa;$U~QTxvuBo;O8 zMS*!RRvyaJ;n`v5Jzf}j_e&{gAf|{US?L0$cb#I)LL&^)#+D8#0iRMvIGD=|^UFe9 zKrPA1bHoS2%ss@g-BGm&+T3G@wnWm38G_(^9g}ap!f+lXqt0l`eyFaPAI~V9H9jv| zt+p7_$u{?~S3FNNKVt%qP&PIn&SQ_8v}4Q>X)e(%QD~2wg?N&IxygZ(UI%(yut!fi zaI%<|;>~hmh?1jo32`XyxUm!CT!r06JS)LbW2Hj?Ts_1b(glxcg3Og>bbmS8aa%9U z1#0YLCLN3mgkHF$K^G|Dw%_Enh8y;^9^F&s!&9#g?2IE7aX3q@nDW)9; z2^*Or3$ajy%T&>W-*q~~*@9qSdK{TVe<#|X&&Jw*mBS&aV$I_In53|ak4Tc2$eH)w z5JvI?ooxSWehOh8v<=B-?Wbh`FHT(EoFRbdTk;p@lz&AjiE(^~0Ovh^{EqRYNQ#E) zdvY=QjA*k;zS>z(-WB5dR3W%l^%wjM$Ynqkff2m*kpN~VyI598)w)d}Ni(r>HsBUM zqv_L?@{hRvQT*<#7Ir54^ZqizaZAr5oFK$G)uw0J9}@tt#!lWjx{fC7*}Z-^#{N8y zs%fGW*9gcvZD%YK?WP>{Q$*kW{8&)9C7qi1#qkg>68kjm;lC6@eghaPuYUqzC&)!% zS3D-#g=2MOU$xdR0J2ptxdp^(ur<1cl0-8d_M#IBYZk4I+Z2Sax-LYkP3N91e+iUj zKh&y)qFRWv@Wtq10!VD+qTcv}!tN{Z(>?Q1VKi^Dizvq4Cz*uj)tvw5i{$G{9^e{O znA)HLmFBjWMZ*Yo>p05#6-3sFd*K9rO|B3`(Ps_e_}Wp^hWtShq*+x_9S4sRJ-THg*dCmOmXc7K@A|R(32bK}{*J-iM!^PRFJZ1qQNGmYi}rwLf6E(+YRUb}V4=NT zYL?8$M+-E67a|bRrsU%IH*eRSmnT9RLoxk)XmbL;ENYF`GJ=Rh!qmSAF((F! za=OJBeY+eBs~3FE#3~o$DK|=gw@>tF$Kx&tCOj`Fn-~y>;?+{yvz0FdrFe1?xi||C z40p%&7lFG~8@U8+jRP+RFt-uSaKWd$>to7blll{qPE1AxGICfQyJO2sAe<{3yfkp= z@Ouf(8YGEFOR?u=KxEi5%>GsTxkChuRspkqYI&j{3R`Gyjt{c<%qr8-|767cwj5t? zY?4j0pu88!ehjyW!Wb|;dK)rUxE#v0lGi`m>HbV0+0;{ta};LA8MjNUbOk|Li%yk2 z#kN3*C;w_5RTGu1Y7=&hWg=liGtQY~#ifqPV#z_QRg{@q)zQi3>kk54P(0pB?9NNC zzXNmUz+tzfOKV&SLRqSZVoe28S$N_h^@)!&1b+DV9F14yiP~71M4uJrG1Q=uvBTAl zqsxK8&h}1ZrE!EX*JIo+yT(Hzy>Fsh({Fr!e_ybR$K0dIaD!{Uf#7W$4}8~xxCyoJ z<4Yo56=a6%P=*+fcG~Wgm{4M3FixtfWx!h-gC8aZl|{qNq-sEmZC)jb=hg$gg8!Nch>46M$GYN0ZtyXadcDfgm&`n z+BtJgiyURbNd6h`jB#cEq=0-WXu&~&R;mw#x}N@+{ZY1!NJ zZXg$`QBQHiy7vH>5lQbUS%zOJkUF(+BO4Ek@FxwOvT9iOUH}-tb)%##r)4BVUOf`W~_ zjaCbRp^|>+Dq=vi%f*R79^*>!D`8~IEyfJs<`RdweHyVF+jLp$NvAU#~opoo(@9mD5)~+ z_&31O-Z?NZ9uk=bd|hnt1Yyq+vQERsFG-Et1-rP}IX_Mi zL?VmWTfmg3MVcw-OBFf2y(q+uQ%KGZiS-|0X4DkjPp|nTG+ED?RUMZq?9Ru?MhMhu zC;ndvcixzcrfd4Gp8{fqtC=0UWWk;Gjy1fe>v#ba5u>ZL+kYAoCZu<*W0ok# z#y*6Faeo1H92UjasC+j6)i*?~0$iI+C6Kp!gt>(>AII$KE-pvxWfn=kOWrT1#z>=s ztoQEa2ITcZsY%@yfH+qf;~7)^J1Rba+lJgu=3?*BZ9IXRDU8p}kxeC+up zuq(vJFXVA5m~CSEi_q>Hn4HrM(=7XuP|DJQ4#sB+lTTl>Zdo-4AMlb9W)r3`%`eBk z4C0)tq||etXxJ*RHN|*96t$5rA;ich{~a=$@>mcg<6Z9+Ao#qb*um^1f{#fXhxz{y zrH?O7o7i>#k$+sx#SogI@f|_LM>7JSDI16ytx$pU7iv?(5wC)}fXp?>Ek%=IZxI!g zCK2$7yUvc@^tJ$xs0brbv@O{EA1lfoqr@A-#kl@8|2#Fx)$bPN=Gt}aHu0WFXLe9= z5qrYdp3sRl6v#rW)9uxxH)JC6HM0j^L_=RxQJ$7e!FM0YV5V!xOF z&La(4gAxJ@Q6?CmmP{JS|D1zYpei+akT8u}_Kx20ie*%YmObk~LPZdCD@DbD!D?xh%2fH)A1~Dgf{z~qpw_ff?qZMQGD{bJ| zOOT5qXt=O_wJO zlcWR_i$u`%xj2#XQ|k(l?zOzj!2?1ZZje#94|{egBx%+<_t z+ph@d3h3^N^7IIy`B-TrO4M27k#eLZo!wFxdrXLHQR1_%_}fYV&a9R_a(ph@5vHZ- zZ}o5I5%AR56J4Sm0Xr!+9v2CKap|q8xK)%3%a*RK$VE_ZJTDM-U>i}0AFK>PRa+>| z1clw@xRObYb)Xu#<(TWfp2Y_pM^ecY2qIasm~tsf)&ySp-xN`1p;CLaxNI zp<>lM@yuWM!$RC&bkmZ=N8?w^6J6NHyO0dVxoZH?h6Uzn^iRLHCL~;2L<8b9BeoYk z+NSNg#Gr{533S=GlVaeP;%q^#91dNS2A+17dXlaUfj|x zU*$SrG}b`plGr~BkDah{^egOQPh~g_igbC>Jc@90SdddwFGhsNWAnn?cV>-TiZ;&4 z0!aQKU9@CBwiml3eeI3&*X2jAOf(J8HdvVRm|%Fw(Wu#*rq$L1v1f98Hc(%DIu0(; zVf6_8Li&A63h|i2=6wm7J;P(}cXC7{2zf-IjBwqj4cRe5VC#^iO&5xA`MIQF{(D#e zY^dn&k#gbNA~oL*#l`Cr^vpNAC%y>(P&XxJZ9tGBb;Hpng`J)ZZ%eU2!02&8n$XFP zYk#3`P23{%#;u~w{J9k3TN^@VoeF<7%upgiw^9B9R)&MIo@gr6&9;-Xbw;J7BTg3O5=`alii%t*#H}ws-syRi zy*38RxQFIMXFMr@BvG+rC;#Fm`6u%8$aWS#V4LQjlsXqJaZ3SICdBo=hJOSiY%jL*X zo993F_6^ZfP7~ndxR6q6%pS4)7N8o0a>B5O2p53sQIx$Q-6#wQEU&QzE<-jp=!`Z& zc`zj&`#{6lIM^;1^T0T?9F1CXdhYA2F2$Igb3S$(}=U@#A^!j z3D+XHFDu5Z?Vw#zsgiJJkK?un%30FF;hFKlnNP6$Fk2ID{$r~#Al~l|m8TUZrb+t2 zOu^cWMP`lwGX*YVWyU&nmN0T2owxaJM3A#!=ZwTPz7%54&0@oI_YQf4TKqkr4Dquc zfS3i%LzMDO(f**KXE-+85z^Juv&51yZ6$~<$ZB7V9Yr|DO4QGo^O}XY>=?!~qRuY^ z=-=R#_qm;%K&5;!4egvKAXO#0&YeP>03K;z)87QZ{RK6#`7Zfy2qoxLr)Gc*SCUrY z8e@quI&4(lq&0qkP$q)pz&1`0#kk{!HeOTM^&K~UrznkcMD8b9u&wK0DJkU3uqUt-%`p(zOu zMdSM|_7a&jOdmseYMDmZJsb_9sS---yrA=-DDuKUF&>i5PI<&Pz5k4-1$ zi8so5^EA;O9onSuY6gqZp#q%_QVx{m2jXHuG{ua@*n77;367!oHWBUmmwXNPo{&)i zkbK#&HP+m)#w2h`uJ$U$nxbGLY9KtmAR^0>y&u<@vArO3y&~C4Y_U6l$3pf9Rw-^5 zMx=Qx`Y1$TBsn=xs0TTqyqmTOasg3^DxhEUQz2$C=F1qc|IoA)b$B!(J`@d8J1{Vd zJqVMlJecWnRR_lL!YDeLm;rVEO+s8XIPP~-Z!TkXBg@d4hj{Mf*ugL?)4_Cqq0V3^S^Ix1fbyjU zbMqfTz;%i=r7r44Ij-K#D8<17oHYjTctmGzze8Bw0g{XKj|IUp4C7Kssw(nySWxNo z2MTac!u?P(SSrLJ0y8;urfb>Ji6O<|!s+ttd9@cjUZnHGLof%@Vr*6kL=`xCb;Uzf z`S18UNG`%ARRejhrDOM&q>ReI-BYkz7QuIaY)}K8v4&TJF{BaT&M=E!Nx}bKj#4iv za8yspqhJbNLYh`5g#IudCD?+(t|!Wq196dJIj zn5D2eUWMK+tm=*yVJ=p2F-vm``tCWR=SP?h_nrolafy`;xn3M7$Vtf+Qc`EWQkXe| zJrf;lAPJ)G6-*8&VlC!=i;3$wA?8)BOa<4U4xMihbjhwkyN6KciUDEr+ z*AJ`9=49b{&rC21G^+MWtA8cP(Wc7rq7G!0bzl^dT`HDtA{@br>6~g4Mx!plWAxNn zPLnah6wb~TSFD04%goUQ=57IQ(Mq)8HlOX^`J}BAZH?GXfc6SA;@a|WdatGsYuEFe z^VhS8$-Lk@!pvA)2kY)6ie-rY%rseyUkY-^ki%8H5}q%_V+vbxO|4AiNJ}5t7u@BW zHNPr0X~@Y2Uc0$eq|@OM=w8&`?{5S#v+@3bNOsdE017!JWzANeL?;V#A8D6cmi@~# zI&yvSvgo|ev}JBtHl73O=GA3+(sy8(Hwz|5drrVN(;)&}sP177rKMOZ#QEVqqa%JZ z7up9fUUI`5*pk%47%@xri`mDzD(%f!Z61(0(1!GKSftZJX0wxD z?^TeQTAe@_kC~TE`Xyz+Qtr#jK=M%=O9hxYyaj=AR{Zn;5VsA2+`jm|XeWw)3HEoZ z9|%Balww04PHj|yhlIh=4t(Oo+alal(8l_PNdc zUy8@GNHmJH+@2EY@r9j6a*mfIBjcOF$(=0)+us%lc*!IMbx=l8aAux^MVV_&TFsIo z!4iS)$PMzjsAmAY{+z8}+`A;XSLMGDWNy@>KQ$P4wgND>>^>$wQqa6a3a@L2J&GXa zWuqRY8xT$J*k#=0C@JjNjcK9H^D^{WQJY|`;gvPzLy`(nyEOHzLzX z#{qxcUDP(RPn2uU#$9(>rMOxcZIQdyiU)+i*treyse)w34Lted+#Ax_wGB|oSZmN9 z!vI!L>9&j!;w+}4ebp%kx4pu0PNeqkFGoWsvBZp5ieCsLGd6UZN=_H)svtXWiRnYo zFc(FLLR=-nKh_R)x5ZyXdkCh@9RdfcHylq1r*_jgA!u3QF^PpB8H;gLp}}*oAbM5B zv{|w5qC6pdnfY2to0_6_&Uy{+Zw|)3hXR-+Mi3<{p-R zSwe@k5StyIe~D||?qP=4-vxPaqf92pUfcgPPs{26X?{CMsN>NxdO7YJNM&(Y4r{Jg z92X4FC~4F3h9DQd9UE-?z6qeBFmybxyAA#fl8QDJ7sqJ|6IaiEP>GiKT!5G|sJ9nl z`y(889&=!g!kRF^<9419 zCo4>%Oc*_Jo(Quv^WLP6b+1s0geX%wM~{j0QoxRbdQ+GrO$?0%!!xZQ=*V z6LtmAxU)ZvDZ)rw?(QF9X8->L`n#F(kchd!FF+{3APZh0J{3hLK-p3%$E%-ck}h3`md=_YnR}+r zj2|kh6@wky^IAQrQz;avGGGB`-^QUwSxZo&lIG>{3wBJjV(DyJrGZC(f9Km?y$<$W?EfrkjxO zo(1fz>pErd`17*?oBp9!R!PRmepN5BWySn zD=Rz#vElf88Q@}p%kRnyh(U2Wxs}}P!aPmcvLF>>$rR=^xmVI0SrX!=Z{vw1wic<> z*OVhNGYs)w-wQ%u2N!`dW{b%er!CzTpIs8-_{%NUIs>1I4ePpyvuK)T%mq>O6Jf}D~N*0hhqFV zBP2YC8$`JEQP5KhYTq&Ea!{9cE~lF~;|gdRsFs2Jqow&5GJw!Ym2?uC)dvMD91tlQ zpHxk)iKV|IOjmD1w9D{+BO|pj;0f$auH;uf^&9Y7Gz7T0xe$=A*%?BpAop5ScSw{Q z#Hz1qrTJF@Di5i=^~Q@LsWDSMPrQmycSq*F!qtRbVY_F$`vQ94nnKN0thIT_fnp?# zuNrEq<4^kCMe1QSVQ>155VNR#h#|W88tBmmWvz>M1n_xdQ$u{9us>&E8|01CZLbBP zM)m1jwYMnG1>?q!i<3lVJ+5zvDQpnGgPUFl$QZAk6`x-3k6V+i*G|8Oa?VI^sm##= zT$v;>kj>vOg}K@V>9Qkff|GX&btSmU=B*0R8Rw}K=fBNv$dRg!alRHp zO)Ki_}tbZG%(`@X+H_w)$DHnd`nWmS!-4v=KO9rD;lv$s;McgGIW4)gOYmFy`xiyi; zA#>Lhv)-~iBGYtwC_VNO=HjC2DQS9z5c8@7|4GPXbcIuj5#esl{*GY`hLg4`yZinImTF@w^M5O@=Ip}W6QdMP)J z`FZ-qm7Ox`QGH4I>wY;z{*7q*#YG5jaOt{+lrhG}f|XApRc8Y462k3}(N5h~yb zU4$d^coy61IJ}IYUd^3ZJS@Z&>gK$Jv?(6^3y?Vjr%V6{}JMBWD>#Sn}|$)_Ap?!KVg>P zzBXBJfBO+&M#I?Mc8*OIb{$K}Y4Pp?UZ7kb9QC5yyXJLH>5N}xv;=CJ zCeIf=+TBr!=J@-%Aa_8$0g$PuulBf;l~kuW))(bMG@y`!?xz6HY2VP8;1Wl;+21JZ z+}=2$jG!PWySj6PxED(Bnt1x0F!Nuh0A$-#{j9$MQkitK4*h^=7++00zpSwNj3cc0 zXZBNFMMF${!k^IuC7A-Zh^Du&kS&Z;o`e`J;1cb1Z+|LBD&&d15MKz%=6`1EG)>Nb z8q6F(4}`0UvCrf{Ki6z{B@w{VKD{DNRnWCrgt-idDJI5?1Ugljjdtjb&JToXvXJ;C z7Cj5)vQ-Z7$Rr*ng}{MyWk9XZD90$YB1?+uG zK=FpV=92}$nP%?A#1f&i;G;YZBAP z`2wi2oYwO7XCi4g<_;$L6<^6?7ug&Y;=2OqsN=@$9M>qE&F{QOj`DLe3*+5cjA_{C zZ1@jixbF4R0$wQEt>-sW>{slqax|pj#kl2FqEXza=A;ROjNhX}k6BoY@WVXX| zw6i1t8xiPg&qm3Ymo@}Y6WRfZ^u7OtGC!?FL^@9k%Q{n50V!AQ_6D$92n$l_wmc`q zX=~$Fik1Ea0Mn*%6yiKGNhDuT772Xle-{g(=4_|Z^F{v??OIf2N4>f?fn1AZi=K`u?57!vCWpfvs1CbMf9D-!nc)aJN&MT9F;%Q286 z<|pOP+mk!Vv2Qy*Mht^-nFyCk(?0GS`q0=C=yv2zsyo*EH?&KDy&xJk+X|or@Om7N z?R{AOy`B={J4FC9$du~%uY&X-Tq?wQ?+|vnGxd7s??gLYtjHE&sQfN~TMggvuolt$ zHt&JCM)DlMa$F^Z+{ceimMkxnBaAb0?SJx5l3FGn@rAhWqT9*tE*)N97V64ZNGi=q z8jo@B=O5Q$qkw3nAjGA@H6o7U88_>EkSB&SC2id=t;woXz z9k~OS>Du03EzpJL&WTR*djYN$BGJBhL$t@pU{YD9{XYhB5oSqOh0gs6h#M|?y?Ds~%=|KS%VaELacfXv9bq`tvNJuJfG&HNIOVT|r+JC~lfWn{{SOP;Ut{MC86-euNQ@7?8VesFWZa5n5R zGGKAsTev&x6z;|mYH~bYu41%O<0fOmHCeP7QO$$Q9Fb069>l#AZ7GU+SsCc$abv8z zS0IM@iG${(OEOoGdBr|{Fgm^k?E!_bxwCI^Cq7Sj4^=QzUk9D&@hn_ZUlFk)c$O!J zkJi}m|50_`@p+cj|7UMx9t1>LWmKSau*x(^)23~bhNOdnj3;@XjAoK2X_~^9Rb+@D zL-r6EG6mVl5=oIE3dj)IviFv$@_T>o&z<|`mskF{&*!=3x%RovIhO#Z$A)Ym>O{NK zbE&}1zuqv5HCF<4LD;}3qo@+(dAMfeve;K-+J?H_*iSSW@wlUh$o^^g1<6~<@kAcI zfoXrYm9ywW64E#2G*>m0_H;b7Oxmn1f|nux2|~LYR{@*?f}yz zmFFlp6w9s#NO5^-Q!jl*zi9IXF(G@YqXjr6{7p#hHqv$eC&ZO#gqvC`0QN_ci8in0AneJy z;uaB36gw%MjqVcSDki2hvE$kRX)CaA&;qqkkc(WU2f)u1o%T^!*_McJ%ki*aCsoz2 zGsd)aGN5v>Lu&6iP+C+#hFuciBBv)zNnD48rbEkSC`SvSp19CMu~~#m$QpKz`p8HLDD3$A4FAK>|uS0M9cCMd8$b)svJz5au){R@@9d+|Cz?Ot<5tHJq9I&n6 z?E+kcX(f4H?uz~mfO6Jy^6&dTh%3jwoQp2zkzzb5)E$v&Mb-g$EZPu^u}68jur=9g zBS`bRwRfNwYZL!0xfT;{zeuN`Jpdw9yrkw)$f_^a-k2~hO-*tZjh_i|Y4k{&?sajY z@>Cj*4zJ5(y7WaqAe>uqHihnt<$nmk)PrxOXwWxZxle+fxHqNX)82bW=a2Z!S+dON zZl`WjkhC7G@4OTM?UCESx<{Vm*O>=GI@KSOHp>Dr9$HGt7O7SsDIt?h44dYNb|D*k zo0)r;+Z@0Rf@=^i#(QFCLG&5?)0X2z5fr?m=l)9zvDFs-N$Q$$fCx&(v-RZ?!m4DI zsoxUNvmt`#I98;SuaUTwz3|vEKyC$lqi6Y@Bh1|pb1kjnOzFc>5KQLny^OzrPp@<&wa5{8W|W0zu@7Td`tn_G1XQ9UUjx zqLyQfKsN*XTb#aC=b>b2$}5AM@h=m|s$NkOH|f*4EMP{^%O|&O_Z_IeL+*J0B*?5q z3)MHePo@}q%)~i`&TStEbN1{IaFNrsJ%Cxkt6;q7v4a!JWr&6NfiSnx^n7Cx=L&XO zHCzC)IOG5d)ST2B#}UGO?W5=@7u+BO4t6Gs#{>EA&6q^cl#ivq>*BE|R@jlyE*#^W zICdgrdSviHD0%jt?)u0ielaUpDq2c%^-i(h5lq2mPLKEW>Gr};BUW7<@z0%r zT-GVRd6>R4h)XexzLV@|QHKzkhLfLxo){1^+N=d{KI&%538N+Z9KN zCY@Q$ye~RfW>&u^4^heP%(RuXmfViv-Vfj)l7<0)G1+~P0!ocTo$zOaX(oJT4;uiAx{$xM~J6$NYOTT*x z3*%vZlYcv-pcEg8G6&flkDlK@70US2d)l$w{o3xMdonLB6Y^1e}Alta^dY><1KAn&Qa^L`oD z=EBXCX)qso;d??{=c-{{G}O%mfaTca@;ZeGhnIdB7g;?*sDNMFV3Sb`P3aI+u;Y7N zlv86f$$|Dqby)(4O!Z9lzCs*?XH>ap{ha_Z?ABr6!IVIJ(-$k(^Nag-UoX}X+le;s z`x4~@t>YR5cjyPR8pg`0HT$T43#3<7SIv%9X7N2bo0Zv`b12_Z7@WkJ8#8Y*#Al}v z;}E}uTyYGAb7-#SFV@(lf;=E9Dy!n(BF!WnxajVE@ue`=2i-Cp9W+6^_1IQZ|NR7{X>j^G zW;Uc-Vqdv=N;X=@=OEJ`<4BU?*9B6yLBzcAwg{)vG-q!7a1Nw%oh?(xokY8a@)L#v zar6uFY&5?k9ueiD*J6rRDo*c-C-O+h54ktvwcEK_Focg5>Fz1a2@SQ!6jAQ*^X$QG z&i{uVgIXz9qbd4LqLS_fEmeJs=Uf}9pJf*i>da|G{+(qTGBh`WF`Fl-%~ zkssYH)Hx(4eD$sPvp@=(*yTjyeo$@*^fB3;&7BWmo*`=K_%4n8D%WKC-8s?r?YuKo zD0j!y{j+4yh(bqemH@L_2V`EgjQcZ9m{7)ELejK}r3Gz2L2fw7>kef%fCUVgQ%|jl2>7)?YTwtfh%Ntu1EE~|X^A(n zUkGs0>5DRPtsr7_4)$<$bcZOHt`5`Wcv|%6jDVa-tNsVV=!I=K#6m{2>_J%&Bs$5V zA?_5WAhI89i)9XmG;7!q$;f1p5clMsQd=A$(%IG&So+@*tx1Wo)OiTsu0%5rZ z;Q82FjBVjZb3n?~$tkNtnB&i#-Vi5>bO|ep5?B0OfY+5W#}Mn3aa%m02<|4+v^1F9 z$267z$sOYX&VWlrIf#5F;jWC8zt{>0JId`i$=0{aqn(>%6Y;h%SD7h&AjY?4VX*ID zj-n0D6Xrs4!Ng>8nE(=f2)_)h~oXUuS`r`=!P7CoZhdInY7i3TiZ$(O)vr%UTB~NlPHB<@mV3bCT?B!Uo z_7&>d>RH$*bY*l=y811mso1s?#>Qj%X8OdjOjoSdodv8d(b?yVcDb13Sh;x&BZnk2 zlF{M=c`U?L#rROP8<(!jmMA^QtlZmp)a-)=Cqp8p5dD5Xdy6W z0Ul^K>vIaM;lmmw&xlSNtzDW^%O46rA4g+RvZhBxItO$)+Hm87Yk>YNjYfnLv0fRP z^sCqaF4VVkr#w>c{EZL~nu$AYA9ss%D|TbP6_2JrC!((YSaBeWFU1N@`(uQpB^*?@ znkT?TlJPQ$bi|f}U~bYWY-Z)_wL*|ND2)joh{Y};P7q4=v~u!%#ap6B%d2fpY_%{$ z@S7!bj^7D!k!5)iO+(OhE}j%F#Vw1n;K?P`K-@1REijMJ^~BeC5T3M=mx!gqS&+8g zkvn-MMgS~i7Z_VD<~yB5(q)*o=i$D?90_evh9LoG9uD9d7UV}E>5$QS1SscQ*ia&)-}Kll7+c8RVsg1;)X6R2k#>5} z5enyCCGTrE2`j`)!d#?UjDKS7pPBB&%bkYfQqhjnL}tk<`Kd7X#Kbco{pGCVvSiRB z}Zbs&vT#||=neTXQiOI)ERIi- z&qTS?{xRZmV(@1vviM#ObM5~0$q+` zshM)j72v)~=dm%6PNox20#7;zyQqE?RNeIFg)oJM1*9KZY}zmvs; z@e|`ReN!rC=ziRsQtF*g1tC+ta)|6Cad*`rjA|jnxIK;#;T)y5C1Z!HPXjW45)=J+ zRe-~ACL7ji>~le`Of#0IvNnxP&H&?3v(MOApiloy%OpxWap|wXC|sgzuMKtFC?p+D z2p`$p(6FBgq;&0iGwDkaFg-bW#oRNq@UVEGmsh7zR2S&dGA7ycTqeL%b$zKfuFc7b zledr0M8d9)#l3OgSy|KyJ?IfH3D5+FZ-~dw=9{L`rK}d$RnLJ=nIJVX(R;OCrpH)QdlB-^Bu{0QQDDz27C0f-v<#7US=t zeMUrTr@NJ8!s8jv^Y|g{lhR~CQ%8uq7fI28D1Mx4xPMW8o&|vxT z1zF67!Z2H+*jUxdYqySB20&^wQXxzjk`WM{AV#B1Aj> z>_P{VRy5`aGbX$LI7A5XnGWMTebdT?ESKRws=yZjrt2Xmu6RW{W zSAe>MSWUnS|6zh*(V>IAah1MZKsG+QuXtF9lkO?to8~3aZv27`tz)MvoxrT7*|C?t z{c~lhV}W|=euC)AtvZ}Wt0=R!D>;oN0DTQ{QF~l-RTiB)g~YILi>ram$e!K~xv)dy z;e3H`M1u|nU7nIKe>z5JcZ)Prd!&4!&F2rV0V7{b7_9R2kpR-wc9AD&%!_@mbtn>Q z4VPr`T#DnahoZCub-^wa;D$k@ML&s)g*bJ@=Il088ncd0WGpf1_fCSUv@KnN z9q6PR0BGll<0iyK`gSu-C3iK`or0VL3xoR0(>H-oB$Q}4Bz`T*op2gE!f`ie!TXZ8 zR$?b1E|7EpqF%I_g7^%96v9VtpoDrZm1u^YdQw4d3iX3~Cp=8ds8y^p1iM)BB+k}! z(k)rQLJ{{T>AN{Q33m34ywMu>h#rl|wih*wTS46QlB@i9OaN6vMN;$C8zP+oK2s*f zo1&dUbN`@(@o%LO;OVr%;h1zA0el69(P~ngvP^RZVupS(vm{pgwMchJ+~T%#A{vUZ zw>xz^gi+TzN{A!XS60N+B1eZ(51aRaSnUsvL;i_xc|7$9q0BTQb_vB;;f^fCLLNco zV0O9?*9z}(U2$U`VnHvB&w<6t`SY@!6Oq^JZy$h5c z%~aMMi&J9cYW7TLi*Sv38bEsOoaltPB-4ADt*h>ab_M0#h`WrqUXX*Zu=K?TqA7)* z?xc1@_hd2HTw!=uilu_wNR8~=5p;c;BU7bO$O~D2%%b6#j2U$}#t3p>kROjiR10uA z?8jv$|GW^!%K!_)JNl-g4YL|&$IAC+aS_Gh`CvN%t_XJ?iS&4ZAR-{X=#QiG2$DwN(frXcsM#rq%_)s~{1BZ{AR;tMA@=}+2T7vpYGE&!q?W;6=5(FmCH;&Ec+qqhlw zNj#kwt1so7#?wgGjI@sE(OD5GEL%95_z*QY21lpS=+sl>Ipp*_8hg&>F8S*! z$5!_{aYA9rb5$H7I&DC)BCZv{UU*7>ysl3Q)zrtcvw!B>{kVr{Cy1n(^}+|&UtDzl zDG$(uylnFa9DscK_?ZIF5CV&EZI110{xi0m5)NJ_#BEYed|qqT`XC1vb|6zr&d2d! zmh^0Ol7ZMph`W2Oyi8sun!;ix*w3!;z=t5I$ApQKqF>+C7j67PpESI$5a8y)A_{2+ zm)Z{tqiy9#t}{LnMZsH!QTB<4Aw0d-Nl}e|G$7)1!S42noQBr}{{lj$yr&(D^=&4% zj$BPoIzoU8JWtPYjeo>}o0$zK>zl$C7IL9qi~~eEDBc$PuffcS7K5jOIxYn1Y&6vfQY{J+Dx7jLIqTjy`?t ztU~AA3VTF*U$_Ub=C&U088>Vi%!j> zN{V6MSMh2d6-OPME#f~yC_*CKTKx$K*DCRiB~PPO!p!{S9x1tHxm6%ctEi5Zp5&8G z)zhI?xmc7_XvKGMqQiQFFqgLv>tkfP4?hJ$C*s}hcwXP|2nPg-SCy|sJM=8X)l7#Q zJPkq)Ec$)zF(k@46p<|@)=9q*X4XmIr_S)xe*n2};#m^u@6QF0ZgS(3c&)!u2&F*z zC02NbZ`V@FZ%A2jmHeDQC(B&arQTBcPp888pT;f$q$niq$lVA^hD1B#>T;O_-7Nwr zzPwhn#8Oe-6wGFl!3u>7#bo@i^sK|}h#AD>*in=V#7bsm$Jj%l^Wyw65I;$2d~fSD zk*E=E-mpoMDj3qR9ODTd6dyQf+$hY6b5ou8F5_-cAU$DdnX%}YyhybveD9W9=! zcm-OJXXe_$Me$}Hr?S*P*vb%mF-x{7x#piEz$L8bF`>J72$%KZ2%+YQB#((mV`&=I z=KM^^Z{=y}v50cKo5z}s7q&jl^c62BD zkd|fv{i_fc1)m;8sUXKHFN4ALLCsG`i!ftse;X&KKlf?IzDfivK_I65?#@HwA4tk# ze-z?&Wn(jPD{6SS!4>FPjrFqw3wG^6%Q5y};PgwL>EhaBf+(|_xiPMP#iYFDkj9g* zLZ(dQA|ZJ)<}G1vvu-qlRic$%19Ho4Rn5W+Cv*LlxLw2ZoEtxf@EL@tIX!7T#3m8inu2)EaLnm@LB-&tXQh3R7aSY+B& zxzN$8b2vs5Xoj}1Wa0m;C$13Y?prbXT8w0sZ>H(-z>XApxBh@&)Emh)$DWkH@9^z$ zS4s>OP(fQN!mS}!4vE+veUc9ynJar7yL|-V@*`s!xi#@6Fc1?2lR<;7M0Xcq#^P$P zC)z~2j0d^g~RWzq0@_eOngvhk-q|QXp z_qkZ8S=S&PN2cdu+#`_Uq0HPDuZnO^tmBvsFd8=f1k6P)>m2m9XpJu&-%I7;nD;3F z!(*VZD2~#%>%;J9Ep>92B*^g_rL(Z!XIT)oKZ(Nn-hxPBn4VRkZ+A5mEV=j`%)?vb zvmB2Iaxw9hQo!K9EuP4OG0$VpSB(AtO)z+b0*w?PUl&P#_hXoIK6gG1y&NNDdh$~N zloN4o(heoju8J~{?pB{5Hx2r`+yck5LR=EIbMin`j5WTPMsG2@ZD&6R`Y+$+l^Rju9yN#qU_LtQN@-5Pl;UR z%GU5U{p4gDG`qbb+QrPCUPr}vB~W#o9#*S8R))y>thQ18alSC~2m=KA>_EIGh~k$L zZI|O#fnwvnZ%M&auPCzA$VOuOT#wIJaFLy{BJMWWKlsYg4jdjyaJyXxln zK!k@mZ?v{_CUWNI*9LT{I(6xS$EkHdJjk@4;&PV-SiA5?^g}v++NBw?+`3tUGuXSz z1pzOA3Zz68$y)^rM45AgeJE9HpmYfnmpJCc1tQ$C5~u0+%JqP#VxdRR!^8rSY1^wD zTngTvhUmsxg%&(^{VYTS_k)V`s32EsZsLbEeh?t)`aA3pKi0Q@O>S%1UI?MFB!kt7 zwn8*z&`l`h&r*`?IGn`=d3>$DFzt#B2yfmdreMjwc-8MaQRYYVZF}N0(PjqXU$$pd z_Df+dit3Ns#a%Z9N#_wt)6U~L03I__CG&Bg+)Su*iUrxT%02^-(tF2 zvMCWKBmkW+19o5~L4pF-$a=+VXdc?7P|WY-wiZ{|#;`yZc*aO|0S3!)Tj zZ|>MTu<^cT3%=7{Bz;%z6w5JrOF)-F29WCiHw$968@V7Z7?VZmOE*otsxHt`e4AE| zdjz=`^bN|X%bPhMbHaf*V{1ZqL&}iXEk4^T`qbmX-4>|GpwYwT^($enMl*Lc9EQu= zWJo=yMC$#m5Dg)3fcDxJ(!H*>mt)$!qTQ>>uD1(^N@i!a#b6%0PjT# zbxE3-x{G>!;ay>_2t7znbCwwo;taXsOT>CB3vxtMQcFCXG923oGjFH!R$x!up93u& z{n#PCHh~asSOgO&WKGNxu4C5IE%Ba$((aNxO&!CNvSb$)bYJtf05v?)ulaD9^@DHd;Q;dr|zUKH&PR7p|R+8vUj=*hA8fe3Q0 z#=pxZ={H)4F+txhVt;AmiotkY0EtKsf^m7s9@+26zR`>r769wfX@wI{h$0W9b(rPv z31QYV@cUztX!j7^_(~@vj?b|~fP?8}{6!dvVAWEJ&G&+EPwVPvO|}4&1bIA8&;7&4 z-hwGfRU%4XB8m!Du;aZ{->y_^;??SE0jcx{Cw|=MS^Be7IF*%>GX?p<-jJ>UH-(+C zr)Zaj{avwR5a(WbkcFkicpMXLo_BE!8m|6HWYRAuRA>|FtrW~x@vPT_Q6?$&zVk}KiA z3vrfg56khSXc&f%fOseWt)|2vx}lQa(q$Ge&{pF9M4;!7k-HGoVIs&~bxjp$I$F^* zU`Q_!Lc6ez!_&(|xhWuFbpAaz&j;N5m`G2`?y#LMcrZIPVu|^ z*N)lf5|`sg)%@z}<3pc&b|&5~VU&TbD@U`t@(@LwVjxLF`YsH%&gn^9NYze-G+R6L z1SETnxzikiq1}X|z^7^;>7^(_lw;|12+GVG61|f6|5G&WS6>-*GyD^Ge{3Uq;%w1r z@i^rqJH#u6ksEf;m>ebz_K_g-XSG~CCH^lh|hHUqyR^m(aSUABHcUKMWGoLf1L$FTaKGNIiAort(3UziIxT^ zr`6ocBY?3`fFt+dKZmBCzAuQ&h>l7N%UrD12;^SLh(qNGxom%|pQ9C{UdS-!5lY_G z?97V#cH-KkFtUCx#2l7;D%o$M(UC?OzuUODs)+zDV1-tmxLtrJCLd*c5@ATp1}2+H zxGK3y-{$``oVmq+MAKq&yMl2Mgv+2SJviPgL}Q1`%~G5s%8_9m4=;*#0a&Uj-TL#g z08_g#Mx2n-6L#i}j$1`LLgyei;qey%WIqoLi}=xgS?rnuuPPiOIz2=pZ|FerMC0qX zf=Ps%4mP#g!U}MYX6ZrcgZ1U{`C!x;7a;MXzRk$mmwP0mlt{}j;7tQ}0e<4hFd z`p~|55INbRb_#Y5GdP^b2cjui#r(MJKt4wqOb3q_1-LuUq`~(*D2vK{6#j`*x+`_@ zJV$iOnG&X#GG5*BU@-UDYLBn==>l?OYw7e|@2W*m7pf9j%mR@v0Z+%3V~J==fP}deo0cG4=l+qa z*$G@IV6;Ykv0p2sb4ZPJGy|O})Um4A#G>oomc^ROthj?nvP>3RvOG+5I;uI8Aj0`p zh*p9`uvpy=<@9DJcD9WVQx<3#W?HliT4FL%f3P0IP8L?I0!BJ z;&c&CqCv{`4F0%Zm@jFIgWX7o7RuUBy3C8Zvy`SW3SLf0Ia*@He_bSOu<7&HqFfO6 z_lr9?Uv1L^M9vlURn<{1!fe%xZp1GMA%1eD7+;H`6V}d+alL$|OCV1Pa4*7c<0gSl zpB7ED^d9O1a*|UMS5NWBLqXh9QwA{*T)*EmLV38=&r3O3jQ)}ywu?)JnC&P@up4M8 z12`Yf8Hr%&h#Z<*LPkc}pArO*rF6vDziPm#lzQ~y3G3w}0;n6eE-c?)h;o|9S~$bw zEMd?w78YOccJ^&d$gP6hSR97APGiCQKVf9d{HSZ!qw`pZ)LG8Z(9dH@ zg+aY1?pjDJ^JNBi;PHP$S<2;7oA=1q3UdMQ0+Hx=u)pS-dXC8pD9jbmThv-f5y;9gYbLWg#3F3v;IU_hPDTGvsLd7kDh;as{Er zdP0Nk(!cXmOO+f&cUjEO&bm5zmqzac?JbaGhj8@`r;zzZt03nNLj5S=)xvyS(7}bZ zSFd3Y$3cgIr=0?=*4{Wu0L|IX0k}VI5alVXTP8u=sB#$jn@}@rN{RKd6#o$7nzU-| zd^wK?m(Yetj#jH2p2g#>7NmCZGa-x{h8g12|AR<5)H^JVy=W2SD%Yw&y2Ovx9ApOV z?qdz)#btq{w0JRXS9t`4YbEP|LcDz>fXml_ge2x41?_qz-ieuPdt0MVs0$zm5fc8q zEQplmv4(N6*dMDM4d5xK1`iS2iFA}y;V~MI0b;5{hdb`ow?iY^i?2kQaaGB?XHi;` z#pe-pmX-ear697zLeTTU)dC!g4VR?euYWgI89^OaL&p**y-w}>j(+VF;VvNe5rgs( zu=3AVS!)I1B$ZV*CKVummrE1i)2j0;^9E@CqwX>olbO9rzyHGPe@MP3H1jdnqb)Ou`n-xqSH-;K@@Bg(P_Ha4(O=dDad6l zk|vq2^m7n~OLg+#!fm45U0GHT+e)o#os+V37>=Bl$}wJ$!)c4?i`-pu5M3r*qDY{9 zia^ruNM2vKMwD9uMK;QOivagyc8N37~D}}H(C(k zI!*89>HwXolQX1_B-wE+dkP3WrhzR%ufAOdJRI||?KZ#60ImXffObCtW}LJH=pb>B zAeWQF3yew5j7JM~Gj(f%Rd@K?sbDk;^O>Bv)SQ;3g)U4ID}fvf%Rf{+*wmh>U{u1! zucRIivr6z5 zrD#~A=b`?d|HeRF81v8IH}x zt!Ls+ZI538xinL#LX3#^sMm#^UxXy;mF%{p6agAs?Su^i>#@!;^X>f9j zrrM&j9hoD&mbz0#sz;)|9=D5d>FhRwOM-KN%t>iHa!rOaBw?cqv@W47i1HfbQGS+F?kQ^mj24S z!pvYj>=)}?01eM%Js*=qz_aAgUf|GPip7GQF88$zE_T}23v(KEJQW@||tSP3t^&F^G#qyO!fy(Po-pUHsgme~H5Q4wpI%BAJf3P=pgh z-HK)X@)RIOb0Sc9O@Mig0fS#OUF9+`IN3D2I@ZxQW#j2cEM!Db<0iz%$rBBttv!G1r~aJV3xB%*B4lPpg+h5JkWY0LTXq{O4@^XyqhP&{&*MxSuAsH zmPofmCIhryyD%6#Tc_F!M7WR(>3O)@jb97$c%Xix`tRjfa2+_AwrP&xUwk9T=wyqy z9q}^SzXH>Vt0d=t@f#uLziwTU1J+H#C^g=vkUrmJg1NLSYDKt1PUG?fb43Bn5A$lG zRi91?hU$(7CF&e2@LCSMZ;Q2WCLBzhHFavV=$mS>&lv1N6mhL+D$Sh>S#NR+lzECq zEiQ)W@x{1Ws9A@Lx6(qUo!<#^U9rm?z<(tz@0sX!uBY&hN!NLS(F zh7((C`3Fcdu?p#GZ_F3q8cEw+vcdxec?N01(mW0p?IBUk2Bsq=(b&=4394*p|O~FEj<=OcZPU$=}?X#Rm8B?Q$fiaysJ9f?N+K0qf=LBg}0(PiN>@ zD8OBl)05nMOQ<2xHK^;Q2Mon;@_1AA1WVkU0}UOE@lyMofV7~vEsT>!peg9SbE@MH z`gVG}XjjC#>`@`qcjPMBalM`Y&Seo-*RjmfEQZ8{&HeGI5La+2djs{`b?(msRnpSz zI=X~7P-BVfE{<}i2r?H?mF<)Jg?J^8sNEyd+PDAgP>DRqPqM{|4`eZ`c`mqFjx@W4 zIfIIx0VZAU+y!~7kuP1t@vdO=qpypFblV4^;m7p4>bOMTX1e6TGDj-Kat}E|0V6`h z6tTHzGp%`$%>$yo97$Y!q3Q7*1gRb8;X8B0jt*m+he^jxPEIgsngIImRJo_ULxgkT z4P88=#I#31Jd`V%2V<#7*8`QQ$?=tFM@Xt$He&HBZ~kx>j{d(^mvVIcHA`wS%=?{4 zv7n+hD*l!wgSRv$z-SdRx>)SQMQ!p!8?D%&OvyLU1&H70K zobdn;OKJ&yOAzczTqW^@g-DlI_D;#WTk*XovUCvSGQfreI2~ShqqOCSCxIw#Cx@(8 z^gVk1Bau;0Pycz!VcOAwT<&SUU0U|Gh`nQ5A>hA-wEg{z)?=n+bI-tOGACk z*QZONJ%M)HXA5FnCUV!fG!HPFRa-aQ@BJgoq>QU5`2yy^Es!+#+B;TxhEI2wdL0pe zAR5jOO7Cn_5iSfnZYJM-1yJb)F!E4+yZCy<(lgP80-ap5G_-C@BWNl-;h#>8AovS6y=zLNOELN+5C#aQwk z2)%T@E4kW>&OoyCxK;$^6<><+gO{LOMl6gu%<%S7j1lOGC|$Q$&^zKX!JYt7OJBN# z!pD8W%vM=d$BHj!^pp<79~+8x95%+5S3dO$7>%bz8z~viSp;~ZK(i%&@+zdqujJ}n zZ>vof2u-J73)V6G&O;_*WQppFahgzf4laV25Z8VU00t+|SC13n%xbHq#M2@jc&c7^ zjAdU3aOJD}TX|v@1ue?Bk>JtUYqtq;nP?YBW=r^ZV-gjnuE3(w5gWY$@tTq|=dT1fd^g9B zb>4yW8K_Q*wye6y7Z_pZ=?8`aa`Ct@{caw)D}JYMCx~SOoj3u=5lJF~xPf>+4}kp` zGtHjw<}v!@MP*J7ActVidqaS^ikCGlg8viZ_8IJ>uWJCU{azLd*K;@zh&e(WiWP?= z2Q$}k!e~Xt_m&gmQc)ftP01rpV%d`S0nNxV&qF2BleYMmPvo$-!52XX{f}@xNUK;ur&SV#e?y;l$Ob^6k(qf ztqy>aBSZWCk0ISns_dt?LoONrE7%p9O~L(q`}9v7l{%xp5Qm9$PI}}#juY*^B=ZJI ziq`uSD6L%YLJiQ!ktrA-u{d2R#K~0ZZ2r7xca{HdeUHk|_#qwf*c7pj>xH)&LY-?( zM-Q__IeslD?K`O5a{v&)&_=oE_i^CAAzjl@YXV7*6rA5UWI|!DXXTQm?%E2zSBy_pwgu%TUWCk7o;hWiJN=@9VPiMYh zkjpe$`p<$WDpowqjL+o(uqaMMooD}-0PY;bs_qj7zz+@-$r@0X z2A~F&^xxL#72shwYjNBniuxiQVvdOq(;wwPy&NBlax+#UM(w~6PARtj)-mwx-N6j; zfJo(sVxT;t%vz>uw96H7*{A6lv`455&UKsSk>3fS`lD(j90Wc~A&T1C&uc8q8EY;} zD5|FoH>zvpxJ-b1bahV~b5wFQ@t{yAk7N=-Mbs|`;vq6&@}%+cm}oav#b9fj9J9y! z!W@g0u*7>S-w5$MQiWCQVar2OMmYJ+epp%RYSVRFNsxmzB(FEK zcWD>qVzMeSyXge ze{eb@DrF#>J$^p3!~_D zy~Q|E-%dbp#iRT2Ga*bE9lG;>U4*lrfnH{OA=)`Fz0+yb#(I!BZ{I=f9B z*mSPieQifUc&wxIm~|k1u*03#sB0s2h28ZF#p0c?xK-b7I@Q~CiZv( zB_2`ua$T^|<<}3iP_C`k19b!PY%x_nOMn}Yu_S?bye-I8n=5<2z1N2(`-=LCxL4oi z3YXTpYWPTqE8d8G*uLS&^af5^gV8HG>mYhcD~=MB_DF4~neF1lG#IxWqt41N5|j?a z`lwcKh%9C(L7qVmA>Uc#WmR z?u7x5Plz;3vKu-i@v36rM`e7Ev;(fvtggNh-OqOl2 z(T@OJxJJDF#VMklRTKUo87OB6VN_3>8t3X$14v#9Hro`!Nws2y+a6m9aG9inkZ3vW zuo;-S)PoTQgMlWUD$sc#D%a3HUWhwcTGTjCkQ;Xf=Mx?lXT}+bO*hX%Hlwwq*G4WE zq8og9oFNdwTfbx1Qd-kJdBJ(pXoN&gT459Hx5ZrcXJ4S~wyU~;Y0w`~?< zexV)HfU^X6w%Q+~P3gwVVHh*Ym1DD@x}4*%_+tRKP8mPHQmlJd5L{qShrv;Qd?}g= zFrUG}*nhh$2Ad^aaYp0zKrS-6#T?NO7T~TSGk)J+Tq6*+OB)5jjwtg%F5hUD5AsO$ zS$}$P2ZGTA3sD?hejML00*}@$F;|2OUdNsSt;#q_ked(B8w0V$jt-Cas>RVQB5f}^ z1}o?iA)XvDdX8U-qW96TAONxjDiG3QyA<$RzV&kCbNyX}tR_U9s8B5rl0 zNgQA2andtfIrf-HFtX*L0ZFCapJbBOZ(__=>;&mzRwcSV(UK#&ccyH=5<*GUt^={} z&QR`l_@v?7#LUj_@m0ZY6N;;Oa2Eg`*-DcHzV7) zmUi6)#-Ko_Q{BZufGg%-=HdF2_d|5SoPeHplV{2XyQEonhJu=la`d5RwBib3aIr@l zsRu;35$oh~=bw257LkRCd}NE=2*Z$_R~==2I`|ZnL*fF_P9OadY19qJox&(_y1c8e zZ9O>)#@)pN>087e!p!=nLN`Mz_7XzDs64ML><;1jh|kQh$U|Zsp^n7SL|e$aa->xT zqFwQh5Oa(dh!al;o9vM#rmJWjiD1M&LfvM#LS}REnE=dEZ7XU+P*Ks??R(td9mL1j5=c-*G<&bDg>O8AiVGz7W^2L$7kc^e=NrdJr*_&AcGLbouNvIzqj zZeawtWU}k#P(zQuPoUFhJD;pR?+T(eBRBQLH~OYO)Yru93O=0&2PGzWj-uxagKLQ@ ze^-pJgmhxlZR;P41*N=VZSRi}0knKkQkO%kAY9)H%o`ZA3x&XGPLIXG*k}rrhfy8= zEhmX|`YqBgoJ0Vng z+nvY8!TNRz3^_bwW1`S4jB>DT<7z#Q6zOs#zEre}7X+hf=EtVfvINp=?fw`m%oV9c z33HrCmmpD{DMo`3*P~TACNw4DxvMxxzwtc_5lwF%b0KQGwEaUu+^kjYz3@d!v~@Lr zX=C-F3@M?{2g00bazVfg@Y6wD4_=PcJBjhNAkTS?>0~7puLP#m5@FNHGYI20S%yT31<#9$1&r+;M>~iFjuHU#s|MpjeNq(Z;=~6cJh3q5@g~gljT&|udd|#Pww?fH6B1SK1JFm_ zL>N6#63do&SCp#Oi=*}(8vGM(mta++sB8{Lw20PaoGy^s%gHm{dDnd*UDalJ*r^ar zaufw|y9jrRIz+Iyh)mmuQwEn0&*#XL#C|T;Ys|u6iq7p-GzuZb2KoCR6k%pF(Xt(A zi-m&B1Flla$fia`#09836pmkA4ePjZH};Y z7?e)P?Sd#L@3Z#Aqas}SN;E8b<68l)Dx$|`pS@8~`l%x!5iUi)h|x|t*pDCF!-PGqb5NF!551B4#X1C z=^#%`6Q0lGP+GhYbi}3y6Nc)IT-DtXJBwg6_4a7}i(^E(;OK|w;iEf*IK^%hK)T{S z0j@uzUn;~Lb=%{`JSM6fXsg6(g)Cs=#sLEoSs2U^?Ec2(sa)W(@Au6rqU9?+DBsbD)^jOIK{cVRFwd|tOmG#8e)r?GJL?f1HEz-jvovK{D- z6NET@UUX)PD8x;IU`_gfUZnocOM!_0`u%PzBoIgeS% z#xI(Mkivus6XGO&TXN6Iohx;nLL|RgVL=wHQCds9y)K0I;&q?4=ogVLQS<;wf8){| ztLDgrB?s2Ne5Ws@u(QJ-9Rv+j>&gFeLK&}!4}NIAe|?> zT4}f8!L}z$LGEIte%_ly$l8;0$09+lO!MI4#c`Zymjnk#x|^!(1#$iHlFpL4PhSSq z@Zd&&%n$&pt7(k5Hve@t+qSNFNI=r+?D>#Qbsq}N>^3CfOFS&f6`6*S?&1BAu1M-V z9Y>L6Fjm}&JH{RQBrb!JG4Y5f7}7a%Yg{*wMNBRuHLPzDM%qotT9{tm5owmr?B(G~ zc01n+a!1#zC2h&;fXJr?fvNI>k;~Y{A^kf@fVomZS}b)(3vps{f-A-9lLV!sX4LB$ z?eNRIw0dy)MW#t>g-oMv+<2heFNG>6{D@ zPV9Xch>OA7#f;}h0W>sTtx+#t<5{u;_`Qtq1G%3tT4MUJ;Vz%hX5t}#i{twU< z<($st6ABCi@w!kK1uu>iycBbe0HV+C%<=7Zefwt#Ql)tEtRS;yniL~AO&$s27UTSf z9ph2ZaK>6f+Z+v*_5!W!S`~(c5u+M`fi9BI7EM(X%ZwXSqR6rqw~KHA&}1wupvXra zlO@O?LvT!2+j0q*Tc;JH<*t}3z@?;CQY?B)h?!E+%pL}DV>vcF7LcNH*MVH3JEanB z-Kab+OF}-3x`(4i2%)*AL(b8kQe2bvb;Xf+=o+kH>8EQRPiQKIz8zPFBFv-m5Vk*C zp8)Lw;|r)T7<=bHW&beKPM-izJNsk3zfhzr#8sXS@#o|S0tg8f9u?wDhD)59PCXHt z5)A1H+X+91pjjui{J@mh7fm84S9usAkX|Qh z7T^g>Di!z@LAY3s-ZX(`jwBcB+qr4mf{pV)yeG`PMx$I;{@)0q=5%0uUKiq2QMT2j(1Ga{SlVUhJi_!mp($ zHKDY)m-!Whd4YR47VZ@VxVuiTuZjkdM8yv^rRY8r%KZf8s~%1bI>0`q>Oj;gE?0T?>jlda-l8Ic_5}j_Ob1`8PGH!yMsLhxXIA$;D()z zu^@^+t63`WagfM#B-@4XFVjSHV$eHBarAkPAz3X0>O7Hd=d$($Tb-Y!fKylXin&6_ zB2kIxkH1_1=|T)}lHqBU`2K}JZi3N=)o9_L3T2h5n-_ihbS72m$%{nO(Z=)I&2joB zdHJIu)=46%!D9TTx|^MvBKVG#brfL^xyaFXoUlt=k$yMKYKm|5?Y7*XDQov%Lps-4 zowQk{Xmg#(UB^e#IzXT~hKifqfPXB+U4UR61WRlM_PQ8&v{zEHI8uO_upjVJnOf1cGLw$lU zmxOy|ofZBo#CbI05{|2e-)2E5m&6tS5@Mc>J{ZM9KM+G#WKkPXeea4V1h@k@AOM_Of!?t8 z6+n&NcP)r}>vSym5dkG9!ovkSU{#?=@6uKL8oahvh z_Ea2k_Qyp6T#Nad1uJiWPMrbcADo`YIoe*7F49kK;#tZ{>wa7=(0v6rT^&-}r9uhn zUT%fQV!Wz=W=|!}$u_g;MiBKnrU+X6G?C^_H?{$EIR+~B@;3pydAN1!lpcI+Bh1N< zTqnU*Tl5H`mdFY zXLMrh9FG48@f9!E*xFq&0-2&(V)xtl#qEbg5<&HR0ghZ-NZww*K#1qV+3kgR@lyO*2(!%~^XDu2cKK-;e38>K|IK3{0@G_<|I3jI&gNWyJaQND+%z*0RuA0` zNtHR3562P_6rm11io^u@dI6)nEaGWPyu4MQtIQlb(2k-inya@7b(Lo$S!!igyFE>g z-OQZi&ijKrCVE&Ma^_61mF^)Xwc26g#2C=GnLs1aOiKhXY*-uVyQig;6h;T)DpBSj z;=)DowP>@1OPfTCF}@K-b@2)(d%HFN2$?n)xtCj$#{_N=>V#(YwxeTmFSM&&r_DE4 zKUh@@G~>`yilIL_4CijtGkKXsgZ>hMgu(fk+)TxNkTADThqCD+TspLFB-AU$N+V#- z7=xc(VtvuncJRS-xg-2Q6deGSMK(HO)sL3?cgaw=TM%WI*Wu(y!ih@+!OIDB;Dhex zo5HYf(xv_ak!CGs7#x-Ra)2h}ox}G8xHPg8iWUCs#O78v#sYo2^h^gl*1-nh8etC4 zeyJmF6iw-;BsU0;h;sMUB_>CORUZIxd7EUKhCBY-1rbO`Gn;xmC?qW=a%i-+9?W9Z z;qgUdX;=^$A*iNPoGi-4m?vfV9Up>r#z;jq)+-+dp&j%TP?SUvxmUi9W3|6PIlJzT zMX+g#0LMW1}dV4V#1bOy4d5_sP2OANy#Q08a>_Codi}JqDN-JTb99K!CZ+v8Ag6 zJHc~u2#cO%dwF*bHL!E+LUsMQ9AeSX;u#+bqWl$H{lvV-vz%(#HuuMsIl%HyE8Zl) zMedf{p>izycMz(_(64TZHAK0r>?Fx@eF1b$9N=m{+a%JRtDdbB-K94U6XrBA4d`nh zjbA>R%DS!?a#|xn(u$`?4Y~24z01B#8Xn*`w z6!pekR#!YN!tI9VOx8Y)@OaAM{qVv>Q6$%{`||-_)@~8NxvFMyoS;ufXJx`Y7&A%S zFEBm7U?PSY2!0BN<5|I;mKs>^%Dj`rv%Q}QO~;&8bIA+qp3bmlM2N}7{;k5yUe;eq z{)Pb8TTgQ{Mn24=amB$3%tr7(!pu*3S&kL|k1QtkxxiAW#pD2=9}fzo@i;7T=h^=+DEC8DImAHuYl($|X+Epx#OWfl5rq+Z+#|%f z%`NqJ76x16DM7BhZoK<=WRbPuW1+5m$6zx@w`*R>faEFBxIuv9RUw;`XH~}6wy%Pw zlkEVL=Ty<~Z1$}BI91RENkhi!krt79b+jO zO(7Jf$i5w)@S@0DZ=Yza*Nb*nCEJ zY`782y~($ER>uk?7kdW?a!uL_$#Y=G3UOZC>~MF>_Tj`FboR`wN_TP@T(zAk@Hp^mE?@^wRjp0^mcTb1=TrxB0*YLh?e%$?s(l3k&mc zDaO1HL?iU!6o;o_Vko6&CuTyWI9&ATw6j>fhKBeK$Pl%LOEi8!NUFoE(J(hY66NY5 z=7$Nf&PO0FPh+A(N*f9ykQ{%@RJ1J$LYy5kh15Lr;xrg3b3E03(>Wg#%-we0^xE1u zPc+>GZ+|#^%zxyma#UhJ$pW+M+c|1PQ_zZ{g!z9H<$_9d#N_?D5cdt4uOO8!Me)-N z;~-1(f!!l4?PwHVTdw{84WureC|7pHplDZzM(in5`f?mD&|@P}fx$HuGu+yrgF079 zVA{3STK9`AI+9g7g$CjJ0^L#2zM%%=^H8{IL)0cl?kdoka4X4rh}fY!_7w`BP@w9H zk3^V>m8cj-@k?m+M_Qs3SBY}%)bn+kC--Lnpz=DYOm`bv)z6@2{seFNqUSRsA=IS~5^bs{xvW)2ZKI@NKl6$kwX z!1dsG+!A++cFKCoSCebponzH)!^n16tv38G;ao%c|3~9J<_dD!Eu6Br{}tfgF%>~4 zf{0IqxJjAk;i`D>{r>?v%hVb2Ks@tp7Ij`C>5tchI24x{h&#yO3t?1FYJ*HC6PB4W z8l)SO)kLmcC5$rADY{1Pk_W0cg_ut$ICQW}*35M7vY@oz++N)deId$Kso{!zI96W{ zz}4mK&gP&P8wzrP8f*o~V-Uj3O_^`Uh-lJDhNE`(e-of30#^qw=$kBYKF0+5q9`ZF zflwZ_7!k{ZnSpi}X~dzB;>cYOFFGldE3^<#C-gQoP+K0i0dXzM%K16M&X#wj;#nb1 zjj=j%r7lgHR>&|m=2~shNfrj5S$VX%{fEf3?GY5}vTOW`4nbe7VkN8)Nxes19mIa= z@01O-inv;&E7q;SUx<}f0)a7&74@-|zR6A-Mi|{8l5y8E*j$KHQ=+$k`qgWPzGGe z^~64_Lb(17T1%L_4qMH?Yk2K9T?_@fXdH{QXx)(q>+6@xW%_8`C(vo@ zT&vmW3qfur=3EK-hGXjLV3da~Gp-xffS^*%$>hT^beaGsf<6Opu6M@o1$h%CHAlN- zEXUV*NL-~Pw}XW>3F+*pnkH**427k$9kyKYf1=&xSjQ#VRF`{GC>^@MhPn{ndK#(s|wE*Of@92ooKIYDj_Hf`+OH&`o6n_C(th{OfguZ6l)iMft$X+_nI+O?u0vlW-V^N<@N>!y8qK!(I$&-TJ#tG4#|d%GxRL^4?}Q;~9u9`e zRoe4H+{Vo#muRY?MR!>j(A6s~(jlxE4+)}rZ6%)Akp<3s0vwTv#5!T|-yCH7ZNYVH zzFwBvG&US^5I0s3qof`Cwe9sy?O}caIZ&HOm~Z>!-->cBRa}6{!6+^Z(-;^UXd+x? zeZsi%(rbYoYYHLxZ6}V6WAsfAt3^F5z7=8KaSOw=$rxTez|3F9S}|%wn{&u_`g#|$ zZCw}$DUu{7jNc_r*LTY8L5_{ham)r;8tg81jaNjw*cDxixksnmv1mg;_Yf&T56AHW z97^xC*=P1yX(+#!3!~o<;M~x?E74}lZUkakJ*)DllR~Zzu8M8-6U@ym!kyW(8lzR;aHo~qhxoe)=QLFw4tiqaO+eH$OM`kv=^K%>4f7|X^EW?) zO3&4jRnryaWbRSH?mP@JcHX@8lS4=ixDP7Fx<4Wm8FQiC7aNFh6#S+zoO)uv9IC}= zU#rN*n-VG=ypr_I7Y#pkwZ$rmY*Ij)GtxfF6St&c;0Ldy(6u+%j4-aFl+HOwZZE{K zrgrdl9pb7vg4|8~b{1QqK4Gvzs;Zo_e=d^p>jFC-5s{wkbWNr+0>{6G&4FRpxN+m~ zAK#KviBu%!Z{g%9*tQcU#sMO!OJYp7nDO?E04M1-mhBD?-fao&l5w+!s{vNE1;Wgi znR@TDOSJo0XAdiOd?~;+Y+?jSSMItoSvn(EF@<5S8qWzMUN&3QZIva&#T$#vi2~fa zDo~2KSR@(ig%g?SjHEv&{-yfkt2BDzc}|YcSKONLbiC@?>R3zP@J3F#dg50pRoUB1 zJFdG8za_k7gF0D+3$F`7u7l$=L2kntTnSRJGliH1vvp^2WgdcAg=1vABgBm3EZ*HG z3D7Frf>8!3^l)1&$|0ENt(v!&FsC~t2i&cqopXIJMyJEE{8$i=usXS4OFY#~6X>L5 zNzUxp6^9CQt8lDB6!cF4FmqO7bQGKZ7&2|7ffDwt1F@YTv%8D#bgoEJrAhhK!taf@ z19Pf&{dKtjmr(A@5oWxXB1MF+%;ePjBO$Z{cCpdDeU`vfR7==;2ylg}S+*?|dqSXl zV>Yi-r|~-gn>`aI@3LE*pVMvHCa*V+-MhFd#WnxF% zl>f}_DJo}i-H!ZBJNccXJ4B_kCLK>UR%qd+LC`&Co?3T2LClrp-IMsC0LPi0Xe&zW zQ8fY39OsD0g%SRJ1vwlWqU5}N~PQOlC8kh_MNCB}TUd%%`Y3an% zQtMVH!)pYaT16#uOEFSB~}wiAuWVX49dPIh;C8A%@BM1--vWV z*x<S~5qJ2U6v@+)ltwUy>D@#6KV~?RG=-RXWULg4}x>v76#rdN>{z=1dY( z;qG|lJBS>-mE)Y931hCXL12@5s{r@k1}@vi?UMc8in?(*<_a)l5;Ijs4_m^Eh0;5B zm^dL`(YK4Nu994YNA<25*oc%@TPvRV7D(pFT!ivu9K9QW1~;1UrTCL5mu*0gXT*a7 z+;z#dBQ~21?VR*ft={4&M~y&=!o75i+k{DK@;%Ic@>z8!fOUnEQmX& z05Oj0>*6VWx)Y~Q4kr%3pAqaxZQL~LDrk>tAeSihT~>~hr-GSZHFGPf;$qQm|8l1o z_(={l_UdKmuksj`y~Fr#o<@|{&Y=rkPORlPMOZr4 zlf9)z&o2d1kve+Ii~4p;s|V}OWZ8Xz$c23maLYG zkYgstWKphXZDELWIs3Ce2umlSjzy~E(?Z;&FoKOUjR-tx9}%yAS(FSjrhH~N`zBkW3A)&vqIcdO^vnjxky-E z!Kt=pKHt8{Y##O;oJW5q#3>_)l^YcbakW6ls%jsMEq@B>j>ogvjE^w_oHL>? zUN1&GvF?FcoTlk@@qtJR!X7ZO&|2%DEK1@YMgEkx$}x@xYz8?UjT4r3KwY9D(A-&& z3(cNPp6n0G5t>qBi6J2_VjnjXT*e-p<%f(OH(k{ep zgy{*YcP-FP8vB5rme^JR0Xq9fF4L3nHKOTTGO3IGMbI{N@RdWt^XYFW5lxP*iu^{m z>c+doW+f*=`DWKt%#Q6vd(hMm$FD_sI3%iDEWmmiAKMp?`?s1*xireKG|mYlh-Re(E)Hq9lTbm{_eAy}5Aiy5~Ia&;vNL(nx4le)pI zQ?4!!7g80c=-~(0I~9XxTInf~H0t8Tz41)?6Kt$*Ym0A1k~jKpWKFqas29Y=n+sP9 zaj^h*YCK%160Bse=Alrh#>>ljeOV}#1*rwsaSsV}N1s_hlDpQS(5@-w$7-Zqg`}fh zLN0X3elLinV7u+&W_`NSecX{I0=h?pIZZTYc|5oun!2J@C+U2rNN3SBoQO297h*2; zW$zDYkiV-RoHv_)+)I_QqjO?#XIf!#JTn047Uc4q6CCno=JMwRJC7*^ocb1`eh?(p zUQ9GUvZvD;^^3!Db;uidvC~2jSE^PsuqL6hFc$?KNu)%j__?5D%%hxmt_U~ER3;ld zNDl!pxbR(1&pt}jXn~Rh_gduGZMq=4bFm3_}bAS$R^sNJthkf4!f-1e)Yp9v$d-2TPp z$3nPz96-?RkFA8b0{D`|r_!RI0i-j1fBa398Opm#Xgw6-X+cgy^CqS*KRFJ>4c;RA z>64C!cA7I#avsL8EDKP}?RCsOAxok~qSZ35ae+A3rpo9N*XIME9IgxzR7O6lNmU zD#S4Y+-4F*p~OrnPZj7=_SM9%L{ZBB$JCj}*I86=96QM7#1Rn$*Ex7Q!gSDN7Ge@BB$5yeM%q?WVJ8k@aFSH+P;A;@NQl`qs_Q zhIEGQTr<##%c+7gvy)X9Y_)B;K{%2!#9wTD4utEEp9r>)I?OmhARVW5ZhTuoOJ-r9 zSBFx4ENlKL)RKS?9u}16`h$gY;uZz@32U*m$i7Fkxx<63c->`C{ZN=QO=f|~Ug8ty z0aD!}lGdxRKjH+vFKI9~I3LUv#?xCIC(^ZJQ>P09Nz)M^z8(yCuv4J zN(8N&o`Z__L^;`dbf?QMgrpR6>gUDH3iA`wE8}H6F48QXq4zdm@wAJ8NH-n5ohZV; z(<_lzUL+ReHb&v6W7m&=8-(;5(2X9VFzL%(GK1xJv!Ckc)WwI{Pg<8PsEb3tLrjmO ze!|T6!Ya|JADn@XASu$25I5LD-iejXF9kVAs2i+v0I!SJTntUYSNvqfEwTD{A;b{* zB#35FW?;G;pU&=&66m_kWZGuPjX^=~jEgYAM&}~Hjh2o>byDzcVdicVPNbg_>0)3{ zUWgBJxs-q3E<8J0uEC~xYIC7ZxLLao?a=?2W2|>|RDlD(%ZO)YEGUg&;Sn>0 zP|eAca8=eX!UblT#jmI4@kw9=YwdL|cl;9jjQE8J*O`u=-no8Lkdxr0M~1*ht^jb+ zxPZ@XUyiMw7v}F)osRuZkiQ$o(+j%5@A*3x4BS=qaR0b47hkWvGP2xsB?#P;FB5jO zD@D3x+SO&!B=J-hhZllmsndLwKj|xV%bxNQkzSk}#;k2`cE#A}YG9(;2lgmx<`aLubIrzeDQZ7W~1~v8r!$H7Ra?(WwopmwQU8%7umw_v>0V) z^gK?WGhs1WE$9jME@93@@9-cpW6Nr>beoI!FS>Yj3e1f~QkW9&c0GVgLUHv9QFIG3 zBQ!c_X~Kf*?2kt5s%jbdh9DQCo4ZaNTyU;(16bC>&?hl((Qq3IblF%N%T8?*A*;$! z?5RYz0C+geQu+jiGalk_ZQ?pM@zhPI*!hP< zL!cLzmX_iU5q!gethRs5vd;=ZF7cU6-4D4L%H>;#z9&XSn?15HqsPQ)f?Sq`rGAdN z&^N^AZvnI@A^KEjUv^5hlP~;z>J3uNV;G3S(nCgdvZH?aOWjHSbvJ z=scnluL#R_7PwU%81f1DI=2BcJ0Ctj>J)T#^W^>|iUORBu2Ckp1vd(FhEfws;Y{bg zRj4bShD8_L@T^^*J zURcQicBEYWT`9oK#6zWgiqi`>yA#l*)<&E?Uw2Hu3(O7ESsmz%uZVVe@s*pb%+AiD zRJas~HX$y^qa3C!DCgVX7v${NoH34|MvrR+dN!3h4VlY~Ptp&Jw+%%+c#X$xis!ad z*Ja_-^`lyHu$wVa_glUr&?UpOV>(Lu)Q^Eyu{@0!lDrLsx|(=^q#Ge0gTkB#M;S8L zSpV)C!U>z661_(V#bN-BcOuXcaOM8fb(dsR#=In2gspz?4;CC>)c;Uw_`-UgQg2GUwb*% zW{std=zc=!5PMIJbsr!|=YHv=OED?fK$Mx=Ot0k#{$3$&0^|XAB%JFn_$ipn!bu~8 z?|6&|qJlgH%kqh(jcB;lSL%=dO%Qgktm|brN!L;5IQ1c57Y$3^Dhp*&Wv;r$#4^~6?8F7aT?$DFGiFkt z{zEVgr4GWW-=;r<%&LZBLW9aNIV5d{w3cTG;-mEbTQP1E>7?htU@6^#mELBmd-v}^oYVV8O-Y=JT3j~SVPji-%wo)VHZ9}>mma{|Z=Go$2=V7*^LTfnpx zOq<@R!rX?853BCCGO-9J^L=5s!jI{HGE_I+`y062w-N?|EG)@MYkYE{e# zW6E#*t2R8kjWr^goaeN+$Eylcvto~2x=16t-jj~q#6yhIGxcONU!zn*FDBYW>t(^! z^Au#(iCAvguLcZySU7cMb=e&UKWzeaW@mJWps)pH$w;&$qsgEowf9w`Eq18uN2Ppy zFORarRJI!%K0_2sbba#2B7^Z)1^PRAI%b0te-UIxF`%*9n)xh%`yYc*rO1MaE+6L! zCZ5#T_<8J(D+D;Lc^GS}zdt943iTBE{rUWN_9nP+8Hp`_%kS>4ItoPttDRg`C@HUc z3^lG6;2ty+cm8qqbB;jm=b{7di^p?-IRiDyhXP#LluFtGZ1p^#3n=xSPPht!+&!4A zq)Z=)ZwjN(6ZfF~9eV-7+>jewj7@W(GR|>H^u1V1LeC(io!>RWXm4GnWR3f4kt8CU z`Nbui)gJQu+BfY?`N^H_7lb*d7L0obVo(6J#``4Cag<|eMpNs04aJB4f#0pa^#chd zuJI(cANdl$xb~a}=z0??--BKTbXD~vICtQ<#4BvoXpS;oFK{ZWIl|p8%`jB^YqZwS z38jQ}YCS!8c+@KZ&Zh~VJi7VJ8P=Cq^Cy^z$o zSh^o0A`=DP-IKe4D+Q5@n84ode%z;i^Er`;|328X>FtvDkn2Qy@S|_^@u)PL3lxr7 zN;;KKkG;Po046oa@zG&#LeQ#|yM3$>OGTQiemqpG+G_;5CP;>CeY)e*Z-KcuGm4yR zu@ezOO03K}>6gPI&3*cM+PwcjkP|QPOgP<_75d`>o%lioG(DBn{VylA&!luUwJMT| zCN~k}J^r>oN#`QE8qIt(;~g+JNPT*$^Lzn_)$EyZr-D?tZbnlhx0xVZ;YKV}ZWQTa zGOG8A^*wO%XVG z8jPRikuk&BC)WJ8Gh`RerijjX)_(x#AjNb`VA_A3HcOMVz57x|ql8U zh)TSfBTX#zm>Ay{LjG+3dSdK-2v?Buu>e2r0{}Cjt*9$ABXQ{eYH?baTF2uI0o0Hw zm5xz~OGWz|4mKyQW_x~3{*A7>SK?j)Z>@%0ZOymiEVKv!RHv4^Lm&|JQ(A2PA% zra;{Cz$@QsjVZffIWeWpzbD$2WM*fR5G#eaFX`wv`G=^)qaUlGTF6`38~+nZLGVme zj{R1LaP^xw?;4MX1*kD6u4Yx=8=I~H%`h~tsUbG}xWB{}b39HK;X-g|Q!Nk0%|d7? zy3a1V$DN{FlScNVZ0>2y6#^~bbU+j`Fz-WQ2xnV0c36|3C1;kjD0^k};)y$$OMByV z(c&RfW;`y!!rOv}nlF6<()p#WMzSsWia@F-=Y?r|b&-H<3doGDVCZE+Gr`BLC_2hm zZ!I8KkCogg7EhZAffG|%W}m4r1zB-71~`8dLA~WeW+?ufMO#>RWL&v6zc}6DQg70; zt`tP%cI~5Lqjj7P`=EiiSA^RFB?r2^C7<-K90GDHTZGf1(NRHR*|#oG#$qu1lrhAu zFOZ^PY}XeDi7<~EFkw4dBvs&gT|AV9=eKmknx6vqXbJD|sqd>jQw6)V8NF|nwc2&@{vEAZ*u{=tfyr|c&M-&QP zE$i-HG>LSs?dW;YFfSHjX0iUmRAq?(7o%lfM`N5L+J$HRhgm=U_7Y*#zn$ZouFnuQ zYi1Ai#F--fd0Um0`Z=OuBMZ*KSRulU=5a$E%`@dZEs&P1R5hFaM-+mkackm^bjg`j z=l$!~KE(Ar=SxzmJd%OTdTHy-2C_RQZ9q)t&p5bm%>5`mZT~nvr@6^D5d9kx-yC87 z8)2b%i4ZReWn75Y2Okh#lgCU)kBj4WA!bc88ppKbUwxxmdUGj1o$YcV<}k09qrG`s z04y0pdl@YoL%=@fR$0TGB9h`4Fcj9dZD12(uq6k3dAzuJ;Z>q}Yo&M(ngH zL?&&F4YH_?eT9(=b_k4K(VCHrFg*Jahv#1`||K_y_t*n7Z;}cr|STvY`k}Df--3X)* z8aX5!^F(J=!%CTx8;nPUIjhF>2I6Z1JTz4v{7RV@cD2;md`r;G+JnB5?jcE-@xiHI zV4m)g#D7IFh%T7LBUuF9nR))YN8UdaJ;7O{>!<-!wm3jhEkxX_WXBLQgpeo)xP`b} zgg@aSByRdh&V{+#%;H#l?QI}ki%Oqf8~L&Ty4XHEXLxcJuAe<8mMiRbNuKo$8F6?mMV1NEsV zFB3qK*c@U>b%!XYjX3f|QEy!OxmptCA@%(?1h}B>zNoy$4j^Qy6>4lFg0k$#95bx2 zxzBM*AKj=sz9Y;sqDwGCXy8$Sd`~F#tD6`9QP5?Odpe26_>Mqsv0ZqJ>uQm%Rr8Rp z+-Z&M+#l8b;Nv8M>C)qwu#(|ZukxKdt{mf9N#GFKXgy5b^{E@Yb~&O36N zYY4@-dapbM+&gJwzH$-8PwUlf3-LE0)N%$56d&6Mf?Adqm0}Zx-Lk_I*U>l?0sdA; zPz=-8W(dxR8Mryhia!V;wWQ1V?!L7T7D%KkvFFrUnljj`#NGlZpES2DXyT+iLS>*> z?2mHLWG&eRTiXv zgZ%-_JQVDV$T3fd?}HW?!toGLDo$LEIc8s+sGr<#s^v};7kXgtf&*$vHZisq<81*h zhqPN9wWG4G#Q5idX~W4F)t;!Zi?NXVGrUJE!0f>TMS6m7;ekNrA_|>q8Yv$aMznN8 zVkted5Pud#vbqNUuk2?!SP$oEMH+77gKDXu1@27(lQ24+TpSf*f5mX6M!9V%ts)&> zpv!~PMcFyUMS@&r>vQSa!Dv z%i2~}GliHZ#NtFZlgA!S4Mzz@LRhR6;!Y9plcT73H%~w|7I1s-8IH=_gNttwaHNRW zwJsfn-z$I`a?YizdGCnK@?V4>GBix}Ag(i~i~T*NHVPJ*Mb>`eUQjexFdCr(Tk^JFklKb@m&C8PjiMHS11x>!wI zXq6%2S~G!EH#DS4_IDRRk*b{teoTncTst>@ll{QL0CiN`EPn9v^rLI!xL!DQNQYh2 z@J7+FN?zR%(_2Ks^*YoQe^l6lHjIP%@wj?+4ZxBR>;1CYNk_Y*B_S_D2qb}0 zC%xru5oQXVska=TZ33t%hcQpLdO5fbyh`YXI@A8~B_ZxHI>qEtCrhVdyd~IG@8xz? zY|sqt92Rk5qbs%&K>iw#V!DW|w=yH?4D)?KNZ7<3__x3b93vzxxu^xytdLh@a$@5x z%pC!D5?b#*B?M_=;O7O{--~kM9DTAvFUM~4fZzy*nkaoT>!nEoDG_U=p_ng%^6Cju z+Nlx)0zBE4VG$O!BfRv9Zs<4qA?rWv>EdnC&JI6o#VW_iH?#t|gxNX60f*N<>X#k& zA)=Ac;qIPjSD4c44an{|B9Dolv>aa+4F^0y$k?qMujO&t_$X$y5r>-8;aWc$M3`k- zjwO4UBZSexrfwhI?F7lW6GMtVg{jJ_qpi(5Oef1IM$q<*MznYcZaB)KoizQOwr9k7 zMwqxLrPsFeA>8WqG~!55bG}hvbZvtv%FN3?k><`hB_Ey zjgh2xKUy%n!rdSS+RH^z{t;apCGqtaLX&+fCBw+76t@d-fzgg6+l|=d2rxI!EaoTJ zH!DXN57-;TkAygPc;ShGG2rezB8#eC)7-$rlsXR-;o0a>JuJT%Q*~oic@V zto_AW^jU188IjKyLUiu86yh%;EHOj6{FD6s?0OuaYsw(rE$fsxLYR3;t>~;LdnR*?8OSJ{Mymb2D^4b zODyc`c)3V7Spy5`xJxwLU_FeIP>YM}yJ|5wt|-TyqRlUfKkJ>9LRI#?cEKO z4aHi{G7>BE_XU&NVTG<_&lW{!1cJ9cIf^-2lvy$xeaUzX2q3ylDp@ASX`92^-|Ae_9jiqv0YilDrv#%DFd%T=ZQqil0Is1;JliQ$F z15ip7hhuvI7WXE`sC`Ak=1G$#$J{L3k90L>;YJ*zU8Ash!-~FF#%P~jR7)23TO3b! zX8;Q?9#N*ACkb%Xcv`cIUVpX_ixr1kOzG+@2lWF|9QF{hX}nvctIuLX20G<buUPdv>>@q2y$)neEB&H4a4&_=V!v&_Sl4uGrW%{JJP+^jiDKJluh%&S1*EPn2 zLnccYNwD>LNTdtLi4;N)8;5~h{|ZKQjFy~Iyf!m6(%tS6NGS2_v+o&ny8UV{ujEUc52x zC~3e*>de)aItr4Fc^l53_Y;sM&AkS8SjgKk0-f{>T->ur>K5Ym;q+AdrFd8ng>Avt z$qNdTb2~gpF54%62|#0>+z2y({Z=#)(nGWwkJmfF5w!Br#J;s?H|NNMUm2XZk}d2F z!Ynr(^IGF=kuEBeJ?=WMLoPlL=%VU)Mt5mX1Tp*T(z{240vrX0OM2_$Tp^4n=v%At z9T6TdID8w9$3?H2dy@hq+4en|r@~r9_vrqVf5^45?%4dxeCXotGG$VO>ABZXh(-Tb`=SX|{!)=HN&8SwAzly-hr4-2>fydbMTum3uO9HIXs0m4 zm&ZSQiV>!O%;VV}LMU}>!_3%EVG4tf?xpcP5iaUXWUo8!6X5!`6foz0RJ4U`MzsfX ziJ^E-ka^lp*LopOq78jwIiCGWEzP;OK4)ieDu7EU1EJ%q@tzQPfawYHvgKEyEN7@W zy0jaO$As1PWI^wb>c)@hRzh42bZNb# zSo%FHh?>%Lc!=+gd0&Gj<&t$PH;BxHzr=(utAm?_Ib9qG=)QCB>Hdf=GjqnAD4zjI znzm6b#95+ia?1eim(*?$?9woHF#dPPt%BT<v-7u~VR*8yGG7FI*giX`>yF_d^E z|M{m^O^G)%l51nsI!^i~v0VX~?Q*0u66XjrSGlgO`@p`takX$)Y_<$BMq;&Zfgmvh zT+*pin05L=+sCO2IsrVZU@wxn^7)2v%Mt5$2B&KUcyXP?wp<+Li3A$^IsM?Qw4Fhq z_c;^9+>mvdjPrX1A*1cOcz3!8r(G$_vf>8K%&VvucrsIjLrZ zzgUQ6C40_LYK41+)<&@GH0^=>V{T_^Yxa;3b8{Ap6Q*OH!aWndrz z-^)k@a86r$Y;&Hog&g2;pb#?!xQy*Oi!F=RNYFMfE>PHf9a<^G z^~UV16WtU3`p&Om`YNqmuFNrAV&lk#)N(AjXe_c){B@3@zN597cjjC`dTtnWQ{2u; zyRadlWEj?{T9wVC=cs1nP#4so6vDKI+np`LEJtg+b!c+Fz zH;z!4q7^z>Onx!{2)i{_&7!k$2>(TD!A-y8bdyIr%=E@ZI|>8%DErb_CdfTtR-f#~ zZpgl=!y~7jE!*m1BDh;(2asM1PCQ&8++}74ug0#IK)dF0B`GIIF(`~NDl@Uv;G+Tf z8Ntp|{)*va{w*p5Itr8Jd%|4Sv0^84RWTaB3*@3`fjYA4apnQxZqB(#_BA@4&}@k+w3nOx4}ctU{Z47JfvSwjem zZNadES0?aVlM$T0-W^M?;ujZ;y#-GlaZU0(VdeyDLuuN7Cd8er1rPn%TY;|#B~@$` zOR?$I5FWEXVsER>wSFKL3)q3U;2H>e3X+NR+>=r5Y5b^spWmohU2|jnC<`~!D}Sjl z-(aT5!n1U(W69;{YYJOJn{>Tkjq9LYaMW9UtkC*$L<<^RC!Q(9qEts8yI-UVBr~OG zS_b77w&fqbw*+OvBj3OALje@Cfa{Ha=0Ec8CR_34*YhKlmF-F_ya9r&xXf5)@%~+r z%!CUY;!{5$Xs%;ohiM0+(rLn6nRXe3Z+N4>*3yb>PY~&HV2jN_be;fLmS-7P8T82-fwjZlLa7MHiR`MrmQk3K zajLt?9sFi4&X74!|3xnUBQh<4rdTzjc!Jnk3d{FvUf)%lAMbGI#B zu9)=W8h}qlMwul7s5x)KVH*GD-B3_?X>VL4kTmLfOJt*;5T>hhtc_R>P)%7JzaKR9~Iftk9tA9L>~#Z_Aksc>|Ow-QZ)%@z5$ zq=mlv0Eo+vfHoQy+4Q z9gQvVvcjHiIpi2A;&4j6H{KMEfGsXz8Wy`hTuXzsrC#1B3E_9_#`yi)8P(aTT3;v1 z%*ix;-MnxHmbIsL5^-xDX#t8N78aG*;SnOi9QIR#(UlQnI+}PwMAn<-S8VRjoSY?_ z23aD2^03fYSdGU-xdihDxPK!{)!$_?M!D%V5PuMr4YHWSNB7Tb^h{U~2mS)uTx-=v zqZFqIahgftEs_19FbX`e9O7jWBstupBgoMhf7CxB&!y;mtoAh*zLg@^jDU((_HcMDfeok8K!_T_Mu_svVu!D5v(L@vuNADQy$RNWT)|k#m)u z0+Z^qdAvS9-?irBwJ+rfjAs&d6=D_*>ddU|SI}<4V)7$0eFZ=^w9_3IibPNg7zb^< zlCXO#qhLC?KP`hQXh0agHEmre$il}%$atEMCV;P`kqH}i&3CMO3&0jjYN_>pQ$yN)-|8?S{-&*wXE(l( z$6%*e;vlq|A#4Ps_J(a22r+X}R&jDP5X%L*DJrOCc6kz#BDXJ$;}moT*sb+PZ$>8> zrqGv*HY*pPpkB%ygF;*-(9;=Pw{-sK0WJODPEgM^7nF4au`Ex4$1+&OOWWV`6e*aC zhjH7Q(J=>jrfcyyPpc8bNpXxXC-@`rfiMdzi)k$1sBR@rc^Z@|j1>nsH5iF+iFW;^ z`H=ze3t1Fe0$l^1$etmJSGP>>%wMkcPq7oaHk#vHAKsz>Tq3qV>Bx4BC^BK!#?p1K~Acj7czO)>vAFPM=U%` zTx=YT?+bGq6qq{32c>vj@kc=NTtt&p;;D>EPn*S`MNuKVeaJrS;MZztHsH8xPEtySeNC?^>Ku zya{2YQ;Fk*xnNyLpl)_z5Gv5=W3tcwbToPeQ60P*u)4iO6oM!pG;C6D6lsp&&V`vc zek#P{HNK;iXgYx3`aiXZ8rxa)EfT`e)ldI;Mq#pLw9uJU38UnF{|n?QaL;IRR5Ci< z`=a=#i8gmI04Q|J(bPADt*TPC_K24Rl24g)s5Rbqcoz0EpH~osbva@Uh^(|Mlr(H`fn1=I?B`k;1Kh!R zw02|yD>ORp41s2SGt=))B3;LZVp;3HxJ8f~oeMw%sA*K7p9;13U?)C`dZZF>^T&zjV$c)t0mSvyS{9gNCdJtE8HE2gj;QJJ<=B4BS}bhPlD1~KAVg?yo)fBm zf^e2S_SZW3i4%muBeqnj>uf$Rx$VQU1 zTwzZ_v{WDK6^2{b^onpd$3n(U-k~!s2N%5I6}yTySFx9lTSU6Fc#%Z;!4hPfb->&S zdI~c7|EvJ>87B0hrI$74!9uA7P0H&OY^LK_A#QEFFUwa2(asj=n&}p&YW6@5)m1T* z!h(G3PZG=J$WBEVZH4(u?s}#5$RUDq=|}T}pw#8@x~i}Wf!b=_T5_l{Mwj9=JEjVw zy!h&3RiY8NA|w-CX`0Ryot3}PgJw;(?aOnpSCcjhZkDB=CMH786~Jp$WXf7D_R9|y3qBO%*KWuU^n%HgrpLb(c9EqR>yMs|YCs3)n_ck_0iGl4Fo`1t z*-6I#1ZV7=Ul->EBx$xYS^Z1lteV&svhvt!6A+gFy#}Q)i5rEGTAc^Zz_xuV4R3M&DcyN5>=LwVS65*qwFr(+r++`J3^sE z?=Ol#Q|UAKmG|CkpX$+$s<@5Qk|{=--`3q0fD(4Y{#OysKdL z3!XMs-vQF%t=$Vj6|zdylA&bMlFb+FHr11eC3HKd34-Ok)`ap)gnBMUHidXN z`-vrYHC_{uNmB9%II$N1EI{kTt)EzT@7nJ)9xjOP5|DoH)r7pxBuF!aO`nXNG4f4K$MoMUlc}{o6}wwA1LgKG-IuzacZM!KxTUfM)%`! zfB>h#@&Pj=#=u*Ix#$Q4r-=#`!Q|2?6GdMqohryx!irFreG)>k znlTtI}&%Tp>x8%>yn@B1Lkg)M3Un?UoKx!G3vjFGBq6pr7@Atp7YF0BBFyA; zps$6^m@sFQM%YTcDug6b=VZDbKBN{ykKS-Gf4LCnT8GAv+wlV2_8cbg;)raT82Dx# z3hrK{GY^DQ))_|&b!?48Qhg;o)A7P#Ukl5+^Au)AY-x%c6{HGiA4cPD5#}Nu8b;C1 z;^G_sje;rjC{G|UufaXLzpWrj7U8La7B4K!5zH-k;Pl zW9b&akqyvJ6pfP>*q>?yaoJ>=T!Z936W~hW^e^q3|0oE-7}FtZsh5en91XLa5W^p>M3IgW;P1w=bG>5)dDX9rSUMW; zg~9J@$Y})&v)alAP1yDmOM4p7ksCt3%r6cI6oB^oqO($A5*9pOycc&xLZq zHI}2pkDUd%k<<9q>C?0x!C8IEi-yKiZ|M_gv8&Vg@Th1Q!?g^I&3TySuOIy?9R;DS z+oP$L)Y7WfOI*B(L4h=mwK;K?Nc!H~S#fAHLD#I%Eq4=4f`d6m-;zH#b{N83LTpc@ zWS^RvCYLcJoPN&D^%zswa$eUdE$P<AeNY(d$1tEfMg=+5R*pB>N1ZIhHLLJ+U%HxqhA`SEZJXl# zjAE#i-Fa&*(M~jD(t~X!$fKLQ!?K@LH@Qq`#xlJVqjwRm%CQROI$RLKCF0@4gA4tA zg?Le6b1FA*B(2liz+LPf?Q$ydnf4mO4c4?3&k$)ldef`L6(Qznqn>(*)j9yoL2u3R zkvAWR+^ftH4=!-H0prr2D+~)Bx?B(1JSNH?wsD(XynbE~s*Hio%BTc8X}gm#!a586 zsVq*H#37t7~$=mxQDZ$-ndY-i_C@}WyXyH{P|Ign25o0 zA?7v*i)b^`6UqP1;xUqG#-Xy?9!Wf8qksntb_h)(-A!0VvEdi(=0UU1!j1#q%knrw z+MR866mb{{4jbw#$95T^YxuFV2$vJX%7JPeF2LoK=L=?)ULjNy9YdU!N5@p0jp5G) zP&j#l$LjhCk>)Q?HcL$rXMMpjxB{!K`$~~cpUp7`QL*G`5Enr+bXR;!fD=Kh!qhMn z-_PTq(!!<7X2%eRX5|aSY9~=1W?0&;a&{J{2{x-H@4xTVxcOLUYBHyOPOSDt!W6u- z#K}chY%9`*n1MNGPuwHG-?Cg>8jpx}p}Kjdg|`Ocmst|hnb1yu&%fYy;6OZd9A9Mp zM&8t=%DJ#m!&rl+2Mgi`VIFGI0}RaMUGWovWIwZ1modJJSM<3>?rY8{7TU-FDj1i zs=LrPv=ci|7~I6-0|}kE2+Dp z7BasOX^zN7O5X9;8TQw-csV{L!f9h6g01NW0+72Y(VX$oh@^_lTE^(XBO7~j=;u0_9#$Pcu9IqvTBx-mY zjV}o^x6`F$u3s=-9lR9OU8s&BwL7f~4-x1bG=Cv{r{{>ChGEz6Jt356URy()@TFRO zIHbpso)v;{=sbYogpZ%#ALXcqo!@36Su4(Jh{qLlMP_l_P>9hJ0W3jd zc+%0`#?`(I(M58nsuM2a@WNgO1l#y=JlK{<2 z)VK(1OgS0E4bhnSE}&I^C)C4d-A66MV~tbz!9s=FafBPE(9VUskau7(&!G3JKngo1 ztBAe73gNQxY`fH#<#<(CRt%g?v8wr>5YOOJ`7r7xUCTMA)gmq~@z7NvK7uq@7y9L| z5r-lnTioHnnqIVrf_aV2u}ox^7uRax<-J0j*GJp|#RGz2cVkO?taUnJSF>Isvq?_( zJouHaXb|m08}umZ01rZfxP4jYq+5B{%=pUS?#hQ(B+_Y1-b+n;&#&q#xf*bQqaT6afT4LA~Vl0ukDJdH17@i z0U_Uga9Sz@dk|?<2K83TsQW%%nPK=!iajr;Lox|^99DW|O3)#fhk2X0;h;zVC16Mvc1B^Qa*4iB- z(88j4R9LnXuT~hMIi!>M_rLN4yi3!-vEBJZcD`i|o1Yi$eBq}2n_n%2A+7_(CW~}Y zY4AWZzaoz;b52C*Z6PGri`67r;Mn{EXbqRkn3B3pH~QS z11BA4+#rBT)(;F}>V8X}3f^3CpG3;LT|^8G<{eS_HbL_Un@=gn9v8yr@+dJF?~8IR zrJs@o;PKx9a)!<5H#s~L;54*B@_(yyqGXt9@ezEnFx3&+fHVVsUqEC z?cD8)=R~{8c7P01*S`eJEYz}xn=*hq3v{uVN-#Yt#~y-QESw&o9ym|{-F)mL7tPZK z_ICZ?#X*Ip#X?TqF&QoF`Q2K&Iz5sR@m+$bOA~%%R=<=md3e~MefJmOs#p40^Yurw z5Lan2Z{$T$v`Z>a5^QDS>?{?UdM1LnG=3|9EQb24(RLYOvp|Jo^u|-Nu&fdMVtD*m zkh5u$i}jy~cI_L}C8p;Ekmhu}PQ0dYmZqHdsS8fO98Aie`LR?%8YMkjHV}7aWFyWM znYE6)f?rA5^BCfLfdJPT69u#rafJ}4KSx&qHJ09z$4c&(i}9K75z7t9?L%5ZYUv?? zo^&wf(E!Chb~H{C?CQ)$?yB*j0N1x(D!r|*ghs05Ix@a20+B_9NzJ|{it6B(Pv@w8 zS3!CvXVtHDfgB7i66zxBh!jOlcl=QpAF@TqGeMkrH6&Ti#adOp<_Xc6Adc#4txSQ} zyatf0iy8r9fhe=4gA2Nx@0}`yG!H|TcrW>@d4$2jBIyBsA;cA9uq;LW_iIV8&Et8p z*!Wry^Mdn9hWMQYxFFiXS=oM|Ko~MM)He`A`6pP|Bg9n!6cdqKxG+u=YU^vVUF0x0X`lF$#2~o0G&Pu z*g6lmR|rx+Wv_kWPYSyG(0#Ikyh!`7@$%Y-6dj@c)J*_zec}$u(}tp)jJh>yQePw|`iRDy1ERaZ>#5c5^KkuKbI2Q-!#a z>8LvXDuA-;hIBR7z6AL*ex-Ov(tD|G_r*>&YieA{*eE?5Y=WJdMiJhwN3K7OB4J(hi0K3>*bvMLqQ~> zhkZEhSnW1QGjevdfC9(}Rn!9{vQvuP^C;*LOIV5&ggB?p5tcu2@Z>xaT-F@^fe^C_ z4-~Y=Cbt8?oXLAlk8Krp3f?CS#S%g0S5-;|{A9#MImUj-Z#(@x|CAMBy1xFFAZJCl zq?tCp1HhbH$ksc)C7P^x;3a;lup=*&$}Bdz6TmFcew)vi3vubvkV|bE?*g(g0WpB< z!4EC~3wB04%)TsspV1AB4v0}~^kaS}<9ZeV%?i7M6U%g~nsM+HVa}TswQNal&XHL% zqQ_O15ErLB3vg-B%rOgi58Lzzu-UG2N2wUUE{HYBf{wU3|8;h{r&o!`1Z7-e z^*~me{tN{E;_D#3{&T|SDH{25EE7#>+s6kxnTo#o3&>2=^qyFnRDP6$jg~)VpVtJs zWfxA|quog{{vyo0(4h``zDEJPgk*5Z{2^hLg9osy^b?iXds-}itdE_YZ!p!hiZIX+z;|dTnxtY_l*j+RY zu*m$}uGex;R7aiq8u`YCT&XFUnQ6FmmRee}Ttx^1D-jMXn8HP~%ES zvKwa77*yEVapj0=o}B$yU)LU|XW@=H^>MAju0>lB?;Uu65|T~(+KLaV`^NMqY6`P;gXxC1+Zsn`s-x<`6<)oZp zk>_r|A(qS5-qnZIeH<+W9&rc+3*XHs4sh9mf9%OxY~Ax`nS7!U=c1jlv8ZbnP*0$?rCTS zCp>KqMz07nGh44|7G05}n5QuPV!&98e+eZsIh97=iBqNBp8;_NhFKZtqF0L`4;R$I zmrdyoZFx-TO663wTM*^yK(;oBO<3gQVdSqND z-|QjgJPS+}d0J~U&JvLgV0zdk8^F#MO!i#;lIkSR7j0HDPe^=U72?|I=w9{)TmBY^ zgfUIQgSH5dz0@hWg(DLE{OspyAI?UVxs-;wE{8a~M6RS907+=yxy{yP7)wg~JLY8&I*mAJgfW40ka+cV<^d@G!EsWNadv(5= z_9CPuHQg16W&!@D!jnl{M;nNL3v;iNMj?v7ho-9255UvNBO?64u>5sOckyx-fqC`h zxbhD~u)OrK7kOHwo0$$O{X|-i{5Fr(g1k4pBV!ccny{`Z;hRK@ZF@^p4Fp4pTiD-Ql2=*a9jMF;j?hxW)mE=|jBNt82*9#_L zJKKU`uV(-~U58O4vCSWe;riFJO&sJZXzVHshT*`35e3(GqAjy?rAH~mv@8b6RC{Tv zL^B{^`OBfsf;>LwePw=sR0vFz{XQGKUx}mz`q8(?o`3Rhq}9OT|Na??f?R#9Q=}WO zy*knpKM+k$GOsMeTcTVtZp`RP_}te(NSgggZ?ub``aPvyTc2Jepf;!{e@O7)rYssI z=tDrdW6eL;l5HxCVKpD?2_f6MZfOf{5#{!(6b2ZfZWEA+s!l4k(L3lbPGlyoiJbd! zLM%$HjG?cHbSed2Bq_vy{tB>an9&s+%ht}X1E;Rpi61Z#&Ixl>R1ygIAF>GC$SB24 zZxF%LJuVz2Lb@=^qK)Y@fo^+@($grRea`A{*WwQIl7*fmG1kEHq+ykSnFLd*JgGh zxrP0V5XWLQL53{Swh-pVPNj{b^3P;bkEwZ=5OUYbS7Y3eXcw?Whu4@d9q?W)73n%S zmJk463$j>yTLfHVQ6CbVD02ivWh$s!lxjDkhSnYe45PHD$^cd{jqMQSgi>(^>g!51I=p_RAM2@hZ zmp<#SLOiM+Ue$!I{ahUTK`nc{wMg^UCB!W`r_9u^_3L6`&Qgj|iQ~Oly87s{=FFQ|DU#O>LR_5=oiRtOZUE&TE)7rvE*3!fWz0kU9}t<(8_58FaqP1OC`{mm zSaiW7ahzyAVn-z}nrDb~zMQFOdG?kN_k3QnMRTNU_S<|M&}nP2t%o`3n?DokBBFF+ zn0ZZrQ^T{3>b32fAY`W#8w}*H6zOzOGNr=`PUwFv)C^h3o}G)fc*XhznA1KCbN7MR zNr1~mcO4y~dm_GT0ebjYDqp+ziq2-I+O5?}JR;cTYA&URE8opOtPG*1IAU#LP)=H~ zolExrC(`9)sNlLRE4h)_;FF+ke);^cB_Yd!kF5)C`9vGTCTcYPEXW0usypIS&@LD^ z8F@RD9W-0LNrGK-3{DYu*4k5qIkh>mvL&_Uf?R$%NM{E9VIE0K4(6t~C5yCFCsJuW zyxXVw%sj*$FwHnkfUBitIvxfzfSfKA*=%g9i}v*hyPWLf^e78zg5ekt>~b!qYmLi7 z^(0}|9Iy*v$yAd0h1xWh>nB$sc}Atf-X+KkL5bEGujEnS8Qp;QSOsbHyAJVx;WH36 zoI>DOQ2M0{q;Adi`Guxp0v8L{7P*RratqGh|234rxnX)VN70pa?ykWPmYLP)~X zCYG!{@t`0xU>FT=JS&d|;M z4~^fsx>c^Z(akveUt3( zy#*lboQyK_8HJt6jPx$`ky`?|j!onEmHCqBtQ6xM;Kr>2T+p7OQ5|vZuoVbwz+q)M z&K2SK-3*!|@u!afycmz0T>2<4X`ur&~427L_3-oG?tVm+yL6PmOdH_ z_i0k7T6W)#o;`*U4IZlg;=tT z#Kqe~kUq08YoXMI-uxU8MP*E8Way1`c7S%B=cJpA-xfduxD*+U7e%BA>dp`gmVqGpM0h=^y2phxK*^a{uUHdg~QQ3|6^XuxRYNK#;48$4L6eNxK2i zQ0?50h-*by5aiq%WB9j)I7J?(i=o{i%@M9NILRINa1?aX-OSiKi*yCJKQ}z``S5+>6L(IkG;gYdjV2qZ4#O6Me#k(ojECn2c6l^@Nns9oGOac z=5QJ75rtj+jy_)V9*^fUq&|5w*QDCJH*s8p?4_Mb3<*;ftKMAsKLJh%j}7d0<3=H{ zsaVEu+N*gQoR1-QwA){Wx!SEd0r}6QS`rvsQr6gSGKkx25Ck!ZdO=E6H>Yk8K}|Yw zt@Gn7%&Z=Brhr>^Sdxyh2xRebotAxkgRL+oR5a;9}A*9 z>_w5Xr$w2`*q1XcfjukC{k0xb6DC1!U(yTyohMc|Aj@s8BTOSI)nU@bFb$`r)1h4* zxmm%e7KZOH5Z(>zJm5bf+$%L?$q5!&uCpJg%UR`;nI&Io#e`2|?SXQ_- z$Ax8W?{4F)I6&!m>tZLXTq(sC`!GPd5f*qt63blv>g*Id&D|4$%nLIcgR_ zDy+>oE-uGaqNy29_bPF-2&AIK$nviP39AKpqN|ifi{^ts%$bF(2ZotlSfvPbshE%O zHImZ}+_;LJ59W6Yz`Ibq844>85#SFd@6UGf;hgT2ea!ZU@H+*PiZ6B)VOGzOu_>4R zIZ&@;QD*DSo*fS*in+%_N9BI@*ry313GP1+#c~lD+jLCuifFe0BYkoN8vhhVy_?(S z#DR5$wXmBN8_lSFszEOrMVq~>LOG3!a*k9mJEsG!R$u$(Fw6;JN6{`G@+>>W=ojQR z!NoVD)(Zm6ShS*OKJ~ieZ4H17hG+<+k69^_I<>3Le-fc_sJ$`XQ`o6#tA}09%o>30 zB}Le1mOpLgy}ei{!sS6fnp&h!kU26em*Z$f;w)jSIyJUz+~_OCl>%M=c3(1Eo=2l) zc#eVbA|4b-$+c0(RQ5HI6qX%tysNO=Q-;oLTy>vfz1g5{BCKymq3b@H6vJT z^v4q-yrx)y!4JmjXv9Wh?>SDu2jdtLiFQHJzsn)&YeHPd7G5kXRQl209tP&ZaN3B~ zOWIgW7fR-Raz(pe9)*QzI$)tK{w*})14AOKu5IUnP{$@LIOi(tWYZip#ynDv!-c|> zY**H{ij;XfATeA?Oe4HU=Y89^|A0SX|wPTHv%ntWvx!6t)A_E{kwOjj-B zSj^~^Q*c*QtCMWC_OwvTBiaPU{+&Z`O+EtzXj10^l4eu zZ_^H$agB@IX{ftHm|J)h`C&r)s}Med`8(iHrn3W*l3|CJl(JjThjt}(T#&r^N0&e| zs7x$wqUc)Rl|sEen^>02I8F$;@REzn?Ba6K zU>w3oiwkQWuAbB{=Fc2VIpn_SX+ah+62AN_!7_&xu{NZ@I9@aXpsn zf}HtyQWDZv|0c|`UFTD6RNs4a1G=R+fsk_ZL?Q4JpQ)t%v?%yjq5H6^dLfUFhX6S~ zZz~ePId$?}Afhg>Q*NTf2cKun^`LHpr z5NIJ}#OAzcheZHxXqK~GddUoy-``*Rc96@P81%3W+G_yNWvMHpmPhh(WPFH2y3u$_ zh*{B~yL+&F^}!m%Sv7adx?^uaE(N~37%{#t!0h6%V`&^%hW3PM9W=tg#PPCVdi0Ez z__Kl(M%J6-OVtTi9|D09*hh`T;UY*7Lp96|2V(1CX!Dk%$K={6CJ9q&v*LLLDK^K0 zQpiZwryLLDk+aafC=L>7Zp!RJ6E7QGfi89f_EXUz+PP2&9LT`6-WEPKLwG};#v&n#xIKQ&Gt zgEmiQYO`AzkGljpscdP*$&4^pxHa9kEb;OpJpOzBxgJ^9)UnADqMBL#*k2Z7GXXAX zdwWwHBT{{eVfq&eBV%=CPUQY9!bRz-4tH@+BHj_?(l$w#J98X5OKRfkSR#_o%ewc~ zy|ngQEA#7FA{mD!r?aF>zLdr6L#MOO2@uXm%Q@83ai<^`8ifMaK3~n_%wSoe<>ecK z$PI1vNNjN;1SLrqeTy+&q`wE@yzXEDW?~vF1~>+qBh0nYLn_gc(NgcTqm8O)OAkI* zF-1zflw<#I>j#e%tmxEpZx&*SNAJlxOv|*G^<_{>xZ>^=w?~%$1Po@# zgR49@A=GTwOL|ciO~E--PKRPiLw8~xsf8U~oGjV}n^9!F*wYhV5#&-}&dX|RnE;o9 zH^6H8oxX1ZyIJiL7qQ&`TVQ4uIzc^{#(dB!&=F{a6khD@tz+qE_`{rng(OX|j{p}~hq~-F4;SJ(rWeN2IqTa3DOeT7>=x(OB4{PUOpdA3 ze1T4fArWs@akdaD&Qr!|fXGM&h(@%6&x>^J8b`{wM$-FcanJ>zZqZhm)Uer~CCCvm zqbae>(DZVvP!g}_h=9@N(-+ph(~&p=96Jl5bMb)XP`oYzuA?7XjK6_c?;>a?DhJ)F zBBSX%p{@h!#!ikN286h^=CbXMifHqun-z01wT_Q}8&C^dU6qalzT;0|4Xc~MI8Zbt zWjaBj7Kda%!-nD#W}}&+VQdGAk)`=>1yuYo>AU8*zr;@s;)UK4i=GBRSt#J$426Lp-w6J{iNRwT?XXZYnWj7ceN198rw@YoXmEp z@b~wgF5rVW4J6^)mjC6+(>fKsb9}ni=b4`!l zs3!;xxPl09b?RP|&;pK%QUbbTn0#R0BE%JEHO5AU2JHKuW5HbJZk-UHEW`~c1z|iP znp8_Xe^HE`u7sj(h~Giq{FO+rWjMl_xPyUKHI5yw0(VxORjdOvBON8oY^9}_XwJa` z_G>~tQEMB+DG7J8w8%|AP~9n0r^V%p1n+y0`cf~axufx^tAWgEbt#5|q-)wtuv=*s z_u91qyHb#Ojoojifm!34T0Bl((r#s&Yk}OFD2(Lj@pCzXW`o^l93jNX&&9Gy1KfFn zTvc(9g>`paCCofz5bTPTd7Lp!cvEYfcO7xyYfrjBzf6=Rgq;bNm~G1dsQx?@s3$4x zAKCA-zPMfh-{Mq-33SEvwV&BIvvK;TfUL%FeyE^_zz(?qkOUif-J(xnS`mll1xz2m zEYdZfRqVuWEUp#e3g9@jCmt5{UTe~`An{<1rHNM~(KEsz0fc`eaa`F}~1^p6BmVa*a~25GTE64xYAuJ@+QPPxp_RUMaa(S zV>c6zw&2T_g|WIQHx)eL=bs9oQ*)1(3$RQ9CIr% z+}|2n+T#t85*pt48I1AUYM-M?}){jf3&32=g6l6Q=0#m~=ak8+xpSt4ppO zWC-fUvIghwxFbVkZcn4qMYq1A_LV(d#|=3mlVGH4?K_F%W=c)0o^iS`OT(fnkr*P2 zap{i$VS%rD7GrrvYe2zePdq4^D&T#*6x;q70{$%JRo~>zDekT%nGR?1Sc`L9fo^(6 z%T?wtl~^F$W#TTmIz9)1!pw0UCXB@Wd5Y;JWDX&4b@*;k|HT)|1Lm1xg8G{_Y$@MG!+Xyv4v?puVH{tmf;n_t9i6za^NIWCbMP*S_j9=Ujt?_uT{o?lulNwQN2Q%#f0LvPqF=u+C z(I$xGbdC?p&K0T8vDrB2r-UsSns9O4g)-tOf$ndxjzdzMa`(oUg!0)O%$#mjn7SwH z>i9z*AN3Oovb3suCr2sBVg_H?qt2P&OS8EZGaasG^CE;WQr>gwr09E)aAvoZ-uiVfT9RgB`yI3Q1M=6UypI$(pABoGd2mlQl|F`}*AGj2(ku>cyoBUZI^^^eN zF_Ohd9Q_L?%y)CK-HC@pQ?7|K%dt{nmrJ8Pb^Gn3AV?e<^Iq&4*LV!ljf8S<3{#fD zm?8|uVIfwEn?+c{XCM{vx@fb3nv&g7zXYgBHLk$8<{-$4wpVpF$NKeikAt}+SV5}k zrwO5cELQr`G$v6#P@o&6-lMGq;yYPXs)(mB`8CsICeRt9k|h_6aB>Jgp^2-n2||`8 z&R}GXHGTy}?Pan<5w;iUB=q2w+(WJuqygbJR2qL1;nLt*Lf65hHT*}Gh)mg7YCpD; zNUqs}Qu46>a*kLYW8Ei+V=gW(b&kZIqG_Ya$j;LWo1^2^K^zdgC4eHdcd!ci>aT0R zCe2@*E5K#u!NatWd`S?s@(uI&KqQGT#WV8Zzp2GWCoQcF*VlktDJgh$=EVSfrBJsS z_Nz8(yFnNQ;p)~{toCFrC02esImqQML9SJ^%!@G`Ll#;edc9U#K zvb%Ii`I611d?9rc5TvPy2!9LG6hx#;6Obk%0i;L=K|w@mQlzOM{r`LCy}6e>&pvz4 z`^}v@bLLDt=ggVMOfoacr+U*r=czHv5vfx2%;3S~PUq&71tWU_80Ap)1GBC9u`6kS#xPcKOE~n0bte= z-^7YnEPa1Dkj8OAM&LEyA6;2WuB|vm7WCAaLIW!96~Y*Pf3=&*NZ|HwnzDSlb&5VT%C2oLaxqV`x|H`n%agPD%$cT z=NP8?I9w2|La}D=K0CI4%5R{kVbjM2B%)zM5AI0BUL@$D0H<%}#LLy$@4Q}JV$B*i z3GgTI=ty1Wmh2U7^u|OMe-~+9)8%AtEdM=!X9+6^X*FIIWbVua7>W)5ktb`!AQB{L?bN&&83%?cegiaF2cnQ){8QK}Mvkfex>T_T(w&4d=k zPYa=4D>^&l=6~{RcDGFqMaK(}uFuE-;~ZN4m=fkP)_1kz#(&m}Igq^I;u-;7A95E2 zr^RLDq8w|y1n$fiGTQI(GNgBuX+r7b1RbOMu7dqdW~Ow{QT$98MXaf)j<5cUXIG#N zO(pq$O^6xEG{efIJ(`L{L+z6a;L%?sBHMn9_%SHRtvNDAgB&5+HLK<Le5puhJ)h?7F0Y94>p&j0 zfZ8E$6uM2PyeK%)}76_NMv1W;H zWG}Jubb!!56}b$hHrwGX=VGTxjdEWX;t$oy#U~CIz_4eBuv0}*EOe~8*8Mt>G|Uh) z*RKEL*%f1?_Pr4Q6Gn23JXAKm@HV75rdgIqt(c}pqfM|mHZXloeC!=a`UtVxvuO88 zlM@>VH)zeip@90zgMBjjEEVJiWlI2i)xGu==tjT?gzf!%c~-614H%>13_(<7 zN$+@^SNxpr(Lp3uexILR#;T^K_?k$Ur6ea881;8S2%Z+k@v;a$hzcYH<6HRyXg61D zcbcX@C=hIl@P%8CTK>JAn!4$45-g3AFG>TBxUCk7VQu?r@s=gONIY9 z_9`NF3-X82o@(26m=KDF<5UXzah+&thPQ%PI$gYgKe{+m{c#ew5Dlv72--5PsftPA zaI9)kLp-}O&;ASs?LmA!5Br3(#nP(iK0jCm($Yc0qwJvFe=Cd(X3v@(4XftABR!o- z2|Ou`l4+qsIle2BX&AbNnNhS1dp2oE zUtI%G(yZ7!6>CDc-{O1KAw9MB3x^c2s|(DLl`g?BRdF$gk%Wgu({8%1`#n9wKWbjX zX!G&4AhR)J4DTVut?UuT>m)%@G!XBIcJYuLh9qq%He1_?P_8Gh%SqAhd?WFHA}k86 zeUqG$xLg3AL7IIVE;&3P%sl53D=p$)6vBr~gLHbT_oYumQ|_I1*eS;KZ1zr|mc;ok z{v^m>K>*P>-v1c@SEez|MvVUd6z0OCUeLXMe0rVF<_Sk|qF~r1Jhd=0W&wlgERmE2 z%|r4DUm%*&b`s%^Q4SG!DN6yHaE|AM`MOR4-GBo=tu{-D%QG;hyCf*|SA~(URwEjb z&lBw_8Y9KbF`N5@W>xE5fjIt60PJnWpcnsL55ko{!`+0KT}+cU!bhDzie2s{mh3qq+}WxLi;|8t-zLnBT94u* zZZC3CJQu9vIH*1?&_!zDP$6eRHuxflD_6zjuF365L5N&-6Bt)|eksqoy%Q_U4F%oE zV8m^E{(U|>qvW+I+Ig~2Md?a>hc5%s2D%utD{dC$8p?%N(OC!MUV-LmH6~0sB6HEf zS2hRD`U71cE*I?@;4Gwz;~y$gv}qgltY|pJ;$bR2u>}NTNC3h3ik|(d4sW9i-baYb zftOaVZVhdZg9MtnOaU?@W0Vx;QuMb^O~%)@%+s)BZ%&susg~<7F=NH#M?pfKur89oJ*AWu zSezoVwNh6MZ4I39NIf16VS%u8P++dzw;>IT>BDJiFFm`x*piKwmcs-=&Z0`uQ-|Y5 zA=HKS77DE};hvSDuk}CAB9M?I!NP-Bgn2bw50!Ib`NO8X8vMs#N&|>8eYM? z3Q^{{+5pFE77I3Nvt}K(maMEgaZjPXGjb&a64HUG_YLOXzNT&YCd`@HZHJ z0<*!E^LCu39f{|JQAkbhQnK8zSNSwb9;D&sqwVFJxi3!Ca*~!rn|TR_bU(gVP<1?`LjTCSoU6|CN{2g zR)k`poo0!e*+mn_jJcl4hG+)DDWA@sQNH^{(ku#R9RJF4kf~}gK3&C6{=}m8z7%<6 zb3yQQ1%2q3dN#|DX%@HWd9D^VGfZi+XLo}Mz8ciBh`?f6vUE#2MIS0nf0VVWFzBHOeBLc7BTV`dtB8duAcfE)nFCb+%7w;r^#0k+hmx zt$ckUiI67tZ{^+?7v)@-A{eC+FhTxs1KQ_{L{g?f)PSF9;28!H6*5gukBWAoU<4kA z34qxgF6)i=^=r0FDB^gI#KDc=S%YbzBKM<5Xnz6sHwN{1I!i(L8nu0_ybmd;je-quJ}ZB{MzLUq3%ZNJD4r4R z?BuoDHyMjtawKzuy-0|Q(AwTV74H-&8g;?JaD1wj6mH0BWKEn{Q;=JSZQ{sSd@ciY z&=`yVa2!{>gmXqZ^>vaU#6uT?4aZHQT()io0|w$Jvpf*X3W7%wA+Jb+5kgCfPb|)h z;%E!wv56qcKTOaO8SA=5J72a>GI+fs#LYqIPZl9EixsIbxoId|ZwaYfC1wr2zPSL8 z6lo!;eE{X2CD^&K&Fesc(-j8_vsBf!_r#GRCD+^<@obTtF2FfHHc#g+&9lIoXG6z@ z!dz^2Sc)w1L4l-uK}zxE{Yc{?wxq>cjQ~E>u&6B#*RwM~J0wf~KeAuT#N37!teeJ zI$FH&E)n3;*5Q_c#er0h38b{Gw9{xjUL+YR>q=lvQH$D1;^N_zkX&mP7ZBU|p(z$x z9}05r3)wsLr4#EP>i~2aD$`IprvTW9BK!*lxE7VVY`#IX#i4R>Tf8FD-^Nx)c!2)c zyA#N~UPmU;<0USQLV-6f$T_k8 zCw9@50`f^ONs|oxHx%iHxh5HXVmIkr|Cv4LX#7ebLbenB*>7jhL*4z+(F5+H=)MV@ zlJ5}WiXuwzYAn`U21HSk8CLeq{WE}0jw2mPyKQ@&qIJf&yte@JrV&$jnshG^21mOH zEyeJ2vq*;MMK$qzJ$eLgDUD>e$i-_}dX2O=5yx^8xq;e02N<8)Dy?)*-grkpEx`FskN>m4jJGl*P5gJC9O+$9R%WgQcx-D4a?B3 z60*l^&3ZLOy77(8|&M#8S7M1$)oFV7P>&Y>1foRu2UQ_f+77L5AMAEEj zh&@m=6_{doepvP!d?(BHI3xS5wW29**RwOLV!>I$528O-In*!Do;N4z^z7QVRMyA; ziFC`irmcAVRfu_~u^wfVfUL7KLMCm@iNpD8G)p@-ZeAz^)-kr@ER<&7B_oC?C()GS(X7(aof@^wKM~334Hu zS!AZBI!qXu&z?Omj?gpXif&XGj3-2yODP;@TCu$^kX9txP&rl|_q(&uSMRN77rI*4 zFL!0XH8L=MY{D<$dvkd_Bf`wA#`MWd^!XJaW+rNEnqo?Ti>@wDUyD-&x#)^kri&De z`MqY8#{Kd2NfJ@GhWe(OSbqw_-_g-YCexo6NF`oUeCSajRJZ~2B?tHqgYst?S-E_v z6xD(<9?0u3>1=xhI_HHFyutXU5I&6hXfXaJ!knlg=3Dd}4o$WkR%eu-z5pxc8s7kd z>2t9|Ez`3*wC>Q>?v`2oR-vvh$6HzZpfNk|2rz$_HMN@PP9ZKa!z!ozmc{FWJoMR@ zmn)Qb_f5h6igcA(80D5DK`Hn2RTS!J5f-H;E|+~*Bwt&u!xmC{#H??EWNm;AGsStE zEzAr`dsybD(YXF7K(b|^N8KgD1=5OQCItPNM>{h$Fl(qJi?zpqxk2kkbv@Pv0{pIa zeVmOS9i!Pgj|C?Giu&r-xL<^MfR+yCq1reO$Su%1h}HgV(HV;;>54X+-&LSk=5d4` z>|!o^NhnRz!_l&?Z}H6MXTlVym^ARif?dpHIgAay4WRle&fPW{Ar(rzD#C?hamIWw z8e4w{$XujV**V}3R4dHM=5y>8l}`)_qBXKmEH{WWpE0d0lRw``JSxxy(Nds0HaI>9 zP)Tx{V+R4$rj1+YcGt7TXIy74I-{vb&M{f8kL?gZf6xii?&vRG7?{3sgpvBP0$IxX z8&>~ANE$wNi~8^KJTqVnaZpIlTKyP2%uV!w#WBXeR&6gy@LSBXASAVE5Eo6jtYp zbP-W}=%`zJY)FeZysQX-^H^_TuHPu0rN8ph2Z3&{pF7F=aOYOZW=~h{_|Y*(yIGCDNi}t$o;` zSX-=eN}l;0XS5HqwY*yhlU)7&agQEJzob_~Geak?hCcx^bD9yC4@IgZNut&`6^fQ& zn3#%Y5&Tq{tXX%Q2IUsOb5g4??fM@Un)5aVUysLyxmDP6OUK5jFcN#64h)+d4xuZ? z&VV+%yR{=I#Z5wZae{EhQtNUztF%_1I}_MlL^oOCq{7K9AQy%v#l5gc=R&V35-yu0 zDhP96d!Tyr~J0LQ&M1i7g7a^1m1 ze|DY|o`UqxPy8H!5A~P(knl}&O1-Z~l)qBJmE~&S7&`~dUgc>98j-CnRrd?O;1_=k z7R#sl5FwTiZqrQ#9gl8-^w=Rnt@Y@cHf6<2u#GrQB&|4mcg(MAp9|sa8mJzl)Rscb zh3a;B6HEypD^?frKRwe*y7*J#QvFNQtiNhe^u+0+on2FDLfh#N&I2L4YOaV&i}Pd8 zH;_$dw2J`puWzV{yRsk2k1%Oy1~5mUJ2Xtc5#eBhdt~0q!jX-l^!+|B{T9 zt2~#*&b^pl&3bm~{nHl^6fW8Xx;<%p#vBCVAYm?P3%b#`qewG~@jvOqcDy7{)7p+@ zwO_RJN$MDF4W1Nc$wpAP;)Qq_T=VcN(3xW6d@~rg3$c{W$5%$Gpa+UXwYc^&5FTyGfgKXlTVN)^ffL6x4>|J4M$-Y2|&Oq z?l`SIzAeIKTTC2&3`EZhfu$_i2Vli2mqWU!++n1!Jh4O&C74Y_g?2srtJ&2*K0f+= z;r?n@xqVPei1x; zR*-p}nw!ZAfw=K1P`64Ot0g%g#y!GZ#|33hc(%u5Ldb4J>(Sw)76^;os*@nVk1=nmMu=|Q4mlv>} zBujTdpgB6c011G>`5j5^kpe05lKR&8m7d+6c!?$bA2Tk2;e~s|wfaKm7dXB0;B zbc-+wIDIZ-)Q&eo&>{+xu|q5tHItl9q6TDPY1OpJ=2f5kpw|>Y^#je;Nc`A!f9L0##){ZX z&*sVyj-6!DBE*~==UjUSCdYO`<_RA0rSbTK0M}_m<`S78*0>o=oe+0#3_P_W-LM2G z=#2wK`xV8r#`g*rK4GpxI`_!t8s~L^{yq}bH3oMd7DV9}aysX^;+;ds%31BU{=F0I zID2;NtY^xR%y0N@-va4!q=0Y(qG=qi8AVcmJnh6i81XeEi*$kHgV2jl#c-T>E1+wS zF`Lcx$8Q60RjN^_a9|5{>gVONN9KH202gRU0nig~`oIYZElY+y@gI~r&H zA;;R7O_<|K@pECW1R-YT#Aolw)8ICku9b;x?gVp7rjtM;@jD?dIGjZpxb0m4(upjL z9z8n8N)~xBD%vG!VN4j5iQ;%+)Tw(|!JfY_3K8#@QSxUZXpsi4xw>1=8v8iai(H;@ zH>7i}LImSee}r}^B0 zr|-x4Cq8i>1Z8A-jrsf~QQ5c4LzA)p{rS6GEHl&{?E+NRsdfdpxkM!CC#G*?%It~f z9)Kpd?dQxQQrVUdLQ%=`R5=#w+2fM;>B)-uEumTarAufTk8c#_@)BvEk9_hWfUHnl z>KaQ#d#c9yLTbLWXdV}A@fhKX35nxzg4~;n6@te6^YnwcgkD*@ocoHmnfH?i&ejj- z-_faHS?c7I@~B{c2g6r#7<*liv+KZJoO{C`egwo7Yr`m)4zM)+DNmKu6oeKO0)uAH z-ZgH1lxJEI<-t_!{aF6`d@gvvnYlj>5@t5kG62U9MAH-m`jfC-DbmGaW=L0Y5C1ub zsyQ55W?O#LUx3`Pa3+u^GyW$(5=hR23<8j7QLv(-r7<3v`9-k?>*(m2Bb-eFGFOI}(Br$JT|wRE!qT{#-X_rBu4KmPiKhi*B7_-&UX?`X z`68X~aoYVU(ose>Kx}tPqWpPumArwNg6n{?S5wztTUQCpFxh)j@+uHvmU@*l)6&8HsVBv>UF%ctE}`%B_tq zt4mX|%qOe80PNzHDs`l*P5|FtQq!_uj1<30(=ZTkXFv@r1>()I)OwNRZlY8_YM3Br zSwjUTW4_YB%WiHCoYu41T(hDx-WTb#iv}3QqVXjFS7rX#V7i5cMa&YReybMe*E2=B zLGVQ*Zf!>Ew4T;+ABa}3N}eF=ybP7e`Ha^lQ_3aj6Deyr&J}G=F)(+O<8cA{R61y~`s+~c7rMR;qtzxtvh{Np%PVY+ z$rv^g3*X=uw;8G;?y=$C06`WqZh#%`?P5wjc(821XIplvNRvEEzxD_ul}Oq%jJ z!d%rw2=xsjErhuOg$us_2kLxv@@beG)s7HEzB|s|IX?S#{!#;70fkgMOOaB6rs$l+ z3}IF{MQGLnxxG;UjKzRx*Q^btpZten@FQu6;~(c2X{r==P44%134>cK zv8Uo&@8;iW#t~HQaiJi$lcJtuLq#Ql#`Im`9+wu*j3Ke=dwxGz3*>HzyWjgjmcxY% zU43zi01B?yKr9;K4bkutA(xl(J0cOx>2n8CECGi351hd9CAoyj2^hZ=%p4u>?McUC{ve3PXPEYRdD3x{Go6f^(K}ix3x-tF#ckpR5evQnby zj=ze|nuDboL5Nq)-(l_BAyvlOpY%JW!EU<4NIWmvV_J5?PWSf2JAz$nzJ{_OjdZ2e z^8A*TN~l4);$lJmCeC40Xs=J@X}Wt?q;+GLAec4Kz8pTt<$v?ljnK}l{Ip3qAVZi( zQ3Gl4y*xuo#5P86skvN;>&vcjAbz_>{wYS|4koeKWKAHKdnvc+p~gk=@!AsxJ4MkFxR)Iy(ZGxR}3hS4q@wuWA>*(ojnQ$ zmAzI7t(vZpUs(LyiDwaOokN9KG&t}sD||dHh!MOrc-t@9W8$+={?5o)hkRjGnA3&& zJM(4FF2{chgi!QyJvw6@(zrd?W3kM^xB74RU==yeEhn-hU%@=i2ok;L-?JM&mlRP$ccdDU1{fr$=-qBOF0t zj?U7{JBdKo`-SsrZLX<`J=cfuw^-~FL8d+W1i7(v+7NAgSNvWW#T%L=guv%`Hn(sN zpn%DevsZ!gR;Ig%GlcS9MRiTwpl1(r^5bD?%y7K&27p;znVQ9%C4!ue8<2eASv(?; zI^!Sxk8l1oJ{0MGG~ok2>B8Bj4S{J^ofX4RO_U3&m9V}DDQ*5x=l;CMaaH2@ue>oJje$7{U2|6qh;~8I zJ?gA|vK*W$*wyB|s=O59q3lgMX$3bVN6jYA37Y*@cdWZ9w2O)({6O-{*hpB`)ajP= zaRDxqVp}Tu3bUs`FGsZZWdp$0Jw-cScQbx*rKU7uaV0{5w6c%Foa98Lv(@^A{r^C`oF##2EgbCoP^1gff|^$rm-;Q8 zNfMi$I3Oc=fjF=+A<`9;6#JBQ3~mLASh2dAJ}WK}Wu`AJ^#V)_@IcwhGB|D*O*zSP z0ehB{wt;jxEAh!i_DACP!u*{!bfigN$Z=PpZVqhjM0AJ;h4@=7#AWP_CEMni^)O2e zq0NbIVg6o4TVtFd(v?O~aM&M+vjvf{93~j>X|q+o0z#RYV5e`yz%eY^1;X$pXDsOi zXKx2Ov(`p(l5YVz#yf>OwGa2E`Kn=iP#0DKfxDtffJ=gTSVyK~nV?LHIVdV;^4NBV z{JjRwd@qX&1;B6S4g~OhQJMH?e$jaJ@g2bsi=8?l@asjpkZgsA&|JjVv*Z;GIA!SBUsrscG=!`^yTE#RMat;43T&X4 zSyr(+w-H7u`(V%|cqPRJjgtkKDeV4qsr!9J679?hF|ZR!+yRv<6FeS?hYIw2 zl@2pFAtd8Sm(Bz;jDJ%YLqH$V6k}S1i#x(HzdfE8;EV|{&#}L9};mdUhcQpdo9(fDm&4W~CveW)~oT1*-zaM#=eBXc20XW6+DDDQYVki;d>- z?1JgG`VngS6+z}l1&g2YSXKZ%(oMv10$g-DK1OHp^nl&-*V=nz?z&!pxxgtn9i95U z5Pxit*!a@qJXj>cCN87Io?p!qaT){_Y;Sz!Yd};0AKT=*cSA zU#ErP+5((4dUfo9vhI8YWJNhp!j#z;eM(7*x?~%BL>-` zFjnbU_R~V(4O^u#ToBf&hIDy30nd!Ly8sIbo1A1;9&?cjy4#XAYXez!A#My}x1k&5Oi6B;vjq0IbR9bYvDjDrx73RWV z8z;DKIo=oK0<*@)T(!l59H_t)I2y+b&;nx$mK?PZg6cNvfZA|0H9)##7?U-7e7+IH z?Ng2UF^$+e3Zp7@jWM7{w{N8kUC$LXwwaycEzvHambZ!wSJ4E-_ldc+Yy4bHQ(E3&e2|bfx{}`LwS^Ge^1{^2Spmn_v=a(Njq-)5`y&fabjjT#@e7i<<$uy*Ah^k; zS^e>eB_?WhE^!?Z)S{N@ZPI8?0HodM9JkgGk`A(kXU{RStFzD4u0*ffE@(`ZhI9oKgUsq@Xp=5#Uzm zmCVKw4+zzKOn{_Q~`|WP0%8G~pVplC@Ch>48aOMfQFAcri*%fTz{j*VP8MXg^|RwTOC(=sH|toV7YK0k zTO&<7uhR~eH9!7yOu_Pei(LdGlZ5tX1{Q%B?#60$SkGn)#zib3QdY%Zh4SUZ{uS}8 zNEbM}Hh~WFx>^Nx z=&UT9C}EgcuRG74@bx|ExZv@^TnYTYaK+{tIYAVOZC-E8>VcrNY)Y^fw}^6{+)|-n z051r!k@X`nRFKH3W&GfbTZvjJmCjdta|o$ThgkL&WDy$~!#orF3Gf%>#ZE>gAv9le zO;h|(&oq?A9!xujE{CL~<$ly`*NNaaoU(O7YV$t-wnDCV4I=nWk)sg4!$i7&RqNo$ zk43wz3$!1^b@z%QNeh0`@#}(as;a1nr$uM|s~;bFozyDDp8cS1z{P{CBIURDLt)OG zbr&~Yjl_Qhx&3f(U>hbi;o1X$7Q%(?9kGo_b4DixO9uGeHwDEq{qAhrIpw6&O4kcA z?>Z)jw4ix7OT=J>e*}x5u2^G`ME>>wryp!7+FGD9Bj6&9#Z7pE;O+DU=oIIP$eM|z z{6M@Zz!jAP2b)@yXl1|A+Qv1s2Ms}(#fzF6_KU|wyQ3=h0X|wtWPf}p*p;qb)*HJV zn!nXD)-f61DCjB{i1G7+##0xENzup#!TMv5;XJGEa!=eQlJcjMAuo$CXPPI|O{||A z0dWP!IG)%(g%bNq!dyl+e2LixMGGO4h?${6@4(Qk}fxV&$nk zMYDR`2BP^avO5#q=~O}PbX5!vy@Xg{GkSiOkkK30gF8-R$bUc(xvSR9!gTrfgisz$ zpwgNhahPdds*{!Y_ZgjHGr^ZT59fEYn{LdZK25uC3N$B(h?O)sZwoS)Q0~&djg$`)g>nQb&kvPm#2GB_iQXO=D}^lReL8EWKaPZa+5B#KjTq zPqGPB)OJJgsX=~Rt9+Z^{h>zUnZl6kgqU;EM}S;H|d$bAwIB0>wM{p|5VT?DO9=%rT(p{^;&*JSK_JY?2x`FZ37 znsM~F>|85X11N3Q4Nz>trw`_pQg=aT+!L%!!~b4mtIqyX%U5gwb@f zQGkr;nGb0_#7W-6MY<^LV<-t1{(K+EmCxLi=>7)?^w&{hFxj14q_SOIhuhB-$X2?3 zpLk1;bYm`oxL_oHb5fos*OAK}1JgTC281m#v}v1gsOTBVVerJC?kGWSsSea!Z1Ha{ z5EgkF%%8}zVX$1tz!bCok9SML+J(?IXrCz1RoU+pns(7n@&zH;OePH#>q2bYd!L%4EEUJo z4?P?&a$Z!JW&QUvs*g#RX)2@O2-@PkP9vo&(v2OtADz&Vf*2T&lPmxRJR3c$XYjdDfM9~u*_}1d%HLin_xItL zCxo&rYN?66vgg@z=EP-srX)0#F1sKaUG`%Y2i(^=JI|($E8VHm7lh1ojCSnss(O_` z*u`ydY3;TB&!K5Dh31ZZMQC8zdFQxO&z5z3Pg$z*y~=aI%nEd|n0O8r0H>xDHZ{I2 z3O>kXO=nX_988h3e^1cV6)_-r?NKxS? zaCXCAoA=+LpL1%$rA8|rP|;#K>q0>zmwf3tR=-d7Xw-6*U6qL zIg@B9IZt>tarJTy6b_^mYVAvMv^a1Qy9lMV_!5`e#H2_{&e`|4RnKNm`6JQ>-wpPcrMJ6m1h-*vaiYxL=@cNL3^%5a2%K|PB zi{FcOjZm}nq$3>1T?xi)K7VO!^lKi~>+v3pJw!NdvyRvgvgN*3m^mvKA%(YK{1@nt zuo4`Ki?4#FF7#8Dd0MhPadnBr!8&>u(?_%)n?WW?g&AaukP}w&MtK`3Lh&J3Fev+>c8o=P-f!L^f|06ZY%(eEt7Gx z02iLKRa0@FXmaH?4-8byKFLLKt?Tl9SlnaBxlDjxTf!nVt`+U}!@X&oJyO*T`70Q) zk=sTA7F0G;AR}?IAa@-Zy8G}wO!wqoB;4Z*zU}mOw7h)u(jtc{ma`hd{wv6hB$WmM zt}l$;ZUlpIl7YBd1OSzS0>DH?j4H)aQEqh1 zi!B76lgPg07WctfbBles&`J~#1fK32(* z;m4vaujd<(-KF@xHz&;G6PZbC@MuOGFGeW4ya;uuh zZ-MlN8)GJSTIyybJ{WiE2R9&&%EYGzdqfxvYOJr071KPs!dPZGJ~8#Q7^znXZXh`HXgqclzJzXi91D65xhvT z+Lhwoj3(12{3GJsg2n_fJI?uo)9bLDPHcVg4(HZE2-{NZCJL@qbdASCJyZJzsRx$b z36TvHFkcsmF=}mm7pS>rv#F-Ueu2D)lif`*EP^Iv2pgkx&t<=LNYVR}2)99vG-H?i z5!!Xd{TpxJk$6K8b=9fhu~>W$6s23j+Vt$~nXnUarJhN}IKwfKr|yN~`z;#tX59zj zQJ`K!QRWIVA=m|H9g`Vj4j1U}vl&8>!AK@3D})o>mL+i`sU zeiBh?G1s!d7$#)A8*KUAo^B%}w;XW{F*g26a%tdZM z>ma2SiX4G1AKv4G@mlfX2riOoS^B95Nkg@=HJetd>pqm{C1tWBw+;UEl zMyC?ShEcf0-2yEJ>J&Jit1mBp7!>}aKxFB)*&})8X1I1NTM9I9bx1>nI5&f^YwPw1 z>B(*qmXB}4o7MgV?61-}y`JN49ieUuMsl52`23^!M;K(`8%}kCoM@!X%7J@s z9s@DU8nvx|O0@Zkx}4<)n#P#?GoZh?4Cg6zZFb>D33bx3A?C_-cIIe-u30*ZMW6hQ zAh&oOw)u1beC5C7IW}WXkl!HVXS+}rkAYGSP_GJccDiCsyqkJFPsE%pb@?Vw?#iiW30oKDawY`)eFL>1KHz_XtAz zIaWRudp_m#3}LO9z~WBPu84fJaFL)q@pnKzqj92?))&VMG1q;K7(-0kKfuVhO=H2O zBAftoeXoKp#=nIjy8V+QH2dtQp9d>P_E|8PF9A1csoJtW_k zheX4*N*Tc)7eU>nxgCw2{|V*tb(Mz3aLan3K;){;fOpjkr13YroYw?$jX=LxrA^F8 z-2TyI7~FAG<$Cfgnf7Na#u#AV6T+~BG!Q2~KKUY~hlms)6itNo4 z(r72AwZJpUE=}NhNDzdVfXsO9oe?LkWIhxMn~8+33pi0$zMLl;lECY*GETrkNj5V$ z#cu@g0UUrxmQ*0GWoan!0K=MYxAIHUh4ylrId@wDSZ1^yVX;0KYrg{KMyMVph|e9O zU2>E{@RcwpuL4oYo#xJp@95b~M(vilfq(3^JdY)eFR`a++D;=b(G5ho2`026?ux$( zA;T)77;O4F&op|!Vh?{MBa)%}L=k?k2YG=vR|>IA4iHvm!~msXJ{uenr_@F7S1K2= zk!aEFWAX-h^bJ5)ovEkH<$1)p5az1t=&H1tpZPZs+#sqqabk!vEm}1hB4kC>|Hr?= z;RW(QJ3+L|GXXF9;?VyBP??5`rs_CC1QLWy$!@>en~?q*U5vO4L^}{f@_C9M^PC7* z7YWox<*K*xHxz4F9QczU%2S`VQ7ioq$~8srnCWac6=+E%u7ZLn@HIk|2zK_0g+YIr z|27EaD|cWH9E)C&mQP|7;WWX@kkR}dP@1ZOt6#6no_E@QPJH!UaDSQwN(zuc*AnPH zQ%SIZ7m73t<+wQDY8I{UIm5QaEpddNEqiptjyOfMGi~MeDu#EeyxRMKGmSpTpnY5c zU@;-ZQvzldy2)El#|duTd#|SoW$g3mH5p7AD zPrPsB_(>r%qCyDz z#9JA`x~xCGyehxARvP!TSK3n$td}uOv8Q5Ofb*_I`F>>fJ1s#`$^8TYl&Zd}H8%RB zU*^Y3-7RFm4Wmn~rLBnVmB>QLYEk+NR1XUU!UINamNQz6ZoQiG9qZ_bBlJuS7-W#b zPeHiQ+WJU5ej>dhi-~)|lnc zKMciFg2eRZidcV5o;O^59jb?#lo2vJ@`1E{O+Qh7Eza?KLOc!@;twok0jR?t{qgMD zfHd0_fxk*|@~5Gw)}&lw&Jw{-HH|e*ZSk@w%Ltu(G&cN`xNnii3cofX51iHyj4g5IMdMq=iiv^PthbL5SN3x4!ZSTIskg!)ecn&!>d>i?|CZb}0*jmDkNd?ZMd+B5IcibgOZkI@xX3T@Q#Fvt=c#2Sv?{ z5UfBIH@P=H6zZ1cPU_e;Ak7w>wQ*S%?ObugNfmrukc-6nTprvs&%*TqY5o-FPZG0C z0Qq{(jX{ylN1Y$tP#h(QPbt!!-gs2B*<@uuX`U8_a8$Hb#2b26kCa%%VV{Fi(<)Tr z6C%uWT^5Be#Y;k5*~<127QQK_#5+Qj9g%E)w*k-2j-AMxqAb4iQxwT9HUw~KIAqWI zmC_t8jMv+0TH_%-yF!R$n*AOXL>}-22Mjvm1{(pGxAQCZiEoK?nL0S>gOXcU!_NOa zC~V+}XP5N)D@1$9Mpa3NF-J{Jf64jt#j8``% zk!!&{j2tv%9lGfkz^KI{7OPY_M7xDr7*ptu385JEZLM*Fe&u;_OGBKkXX;hxM3MMk z3UryWBlvMk_D&NaWzO3y|CvkL`GIfE*K+n`6 z8E6?&N*SG^{<5C=sb~)aEu8#U;2b$y&iayHqbVBXP1jt|BMO|cShO3OJ)2Gx%BnK; zWl+SUQaVD#U^y%U$fjzUBL6-rz)ebPu-VsDEz_F=!dHxKEL1ny0?LeNWmCe~9-9gx zUxos%Z(t~^6`;YKdj_IvO9++}i)(t<6-?y54iv8`+s}?wXtX$lYF3- zIsasl77En79SHR~+vfR6fx-FGcu0^%lo_F`Jit2XEn(Rht8&s zC1dRzcojPi7MwLjsh?|IqC*JfVtRq^9R3&0r)j&2{(cTSJ|n=NW?@HCS;^i8r2%%zfkE6R@ zBa+6c;E2PGdNvyy%Q%U(%!PJwQ@@SV1h^rVurs=^AkpH*>muP%eN$UabnKkx)s5OQ zo)qm$Y0Z)p&~Fu3#ULJ*5`>j@Asyuzr|`S&>I{jT(-*@cC=5O|=w&GRQ35Coac;_5VM9q_tncQl%fL9Q#lH7|#fPc0U+vjmyb**J*$EFKn0vA8iW zKG3rpjcH3ULFwje@9qSRHPvyPo?Tb&sK4zj*z zG=E%p|5YHDyA5Sc94p$*pp{!WzAYdVSnO~dRfKWh5$JNKxX?^h$o54-vpSqE~@qB`^cf}0?82P8q*K+-CQ8SEV%u5HWcB}$)w%oNb82eU3XWftc8!^%m zq)eD=rtN?{O{nksHK6`lBkPNH?j6dLAz`d`2jmoaiBMM#&pN`{#IFj3yW!;-9 zSsK8u7_(}jT!aB`W@DlwQi(8%$vDBZf<{_25;?fMH$Gj*v%kQBS4??vk&vts>|O}$ z(ib-gl>F?9Ci(Xmoz-c<_0W6NrS74UnmzY|dY6<&5C62@Qrr`{b`-AXev?6hdM4qr1fu#m}vr0jEH(3dw4wJ&e}f z>$iZxMk#s-*22bIfSa{d=DF2cpbr!$V zmV`qETOulSIY!(hfP$ugaICezy9Ano&f9gjIA71MmhM!bqU$aNQRAf@=K@jY)pDW+ zC3R7QFy1NQQG&A%4Btn{%y7vC?+k_X{b`}BUF!CU@xx zL7OcL(A;Y+{E{HEgpp0O*g5>cp1)J9>m>Lc=DRh2HD~^K^6o_Lo{?#7Kd1 zk-UuEGAJG^6iI1Pq= zdAJ!v8pHT{j>v_Q=ux9WoC|k#EgMbt#+L=suXT|b?fr(xY*=Gf?1~)@0iZN9$^g<- z2y){zqcz|ZOmF-_SQ2D;bPTcH>j1Env`L8~TRJu0La@t;ElAp!SwdW0+~rc3_p=0f zC>1NQ-O{pXbJpqvcLlsWQ>4lSntik9&D|}=l*mi0MwUQ_iFAcn_ucl3_@MyUHlWq` zDIyS%il(Mmw~OcetC{tgh1a&i`9PnXa&f;pNVE%#5Ng95-326G%Q*EdL42lN8_&x{ zxZo^XvkRtnE&;k^)tCh*qgDW2ph_0*7!c)pY(9Gqmn1>Dnr&lvd`*jXBQU}xcec2_ zNK;p_q$VB~?Jty9Fb2k-1z46D{aL`K`For0JXvKrO#HflnSP~Xg3=^#KrK&JEf0to zSO(y>tWASyTq(p$9UZYIKo{m>4O%bh2j@tX9IdRTFJ!Rm1)bq|QVDt}IZ(LE-3c$2 z#o-?TaQGzHqX4%-Lx~Hik*w1MnISDi-5l&K^~O(&WQuH}_1t4cGFD@qCAs@FE$3C2 z4n0R-W)V0+sH>Omt|Azg5Pub(V~HP(mj%&En2k|=#%+C&F1e0%69-)YZAu6TmLPu_ zgmRX3oCIWB^6|2NZyuS%*m0RC%L!fsjMh@8TwQ>4{b81J3=&%mkrMGrBklGgTy>6v zqB2A2@=ak#_VkSdvE8Bh?@Rg_m#_lGgfNTO0z8!dEYelO#X<(iSZ6p-(ab)%LZk(- zK}O)mN1*vYeOpC5sb_v08^ME~t?{#>T@MCQF5$-996h5s$eK<1H^!1>V}KN+k4*rx z=A=m2HGPdd#E%}&-xw~L}I3=Z|e zTuH3b(-&f|U3CQzEkb!T&rt182_gT~L2woOx#+yn(+vgdO#)HMeuc4*O+`5YZkEYE za-krXN!v<{#Yus^RIr<$07!~9@b3bt(CPK|*m%l$*T~Ws3q@Hn=}9z9c^Ku`lR{~H zHrt$PI2^(i<(>+>Pf+i-3iI~~x5$AxRNoAhmk9Rv(O)Vs(AYQfmr>yAa=+0ySD0lO z57BaG{HFlYN-pAvJRb~JC=jD0gk~JjyfPWz{73?wsO4S;Auc5wF2+rI!&|~&qvAIX z#6#b7rf6JrG6lik<0uffbpP0jQgn*uBZIwnoCe?D6bTmyS2-HXkA`rKS-EgfResnU zF%|4AxE-h?s*iz|(4qx7RnPv7aKXcIuV~t_+ILte6wu!US&lJeqDYVmErD)t%}O0~ z`&Eu5omockGB%hS32?hnSK5gM;9z`NC>7|X!ZhRdMIwZaJtyt~#F!B3VpXzJi4oCR z`=-+cjB6(g^Y@4*&%pkG09ZLOea*7Lcu>?#mtY@4_;!RRo)ApsTPmvK?|SxE*h%-s z$B%OcatcDXFN%~EZS_&F=d5o1+Wwv)=7G^3H^YCpuy{pCyG?-VZ?eUQI4K)~^0v*=sudN^4I5kqvrWNmMrvUmR+7h(KTmeqinp_;^QGSrX zoR7M=^@l?IWmXcHry0Oc`bnOu(IIz6PX(ci`68+~Po!Ux!&tHxUY{jFM=hJgUZ;`7 z%;suH#@bRW5ac3OjI!X0r2^D(b$J}up_~rs5-VChOvSPOJ%Mzt?y+UVvEi8zZggf* z854C{@C!l_diwIPwp?E=QjLt^T*G~PjHlleN=7X53HKzz6<~hUN#m|~w!j*v&+SUm z_(Ap(Dl$Ts{gjs&{U{mHnZGBBuP#FatG@7W(XKGwf%rUe)8x;9s2?NM0Nw1zXF>X- zQcWSYr8rj@xyW3u@L03XhIS1**rcFNlcZfMl(J)VpBw)aVVRYR8Os58&ko1`1ZNDV zJN0v;UFFYn2;~^}zw|_X0WD76iltaC$ZXHXC1&UU73t(NDi@hG&LN$PpKXh{Kvoz{ zs<8zv#$;^y3jhmP3+o)dxU&$K%j-%4coYzehCz0A@hd?dxR%z@d9M}ik1_l850?p6 z_s1d){OyYE&dtBoDi%Fa1<=ME;!tuK0qir~T zCd}_NVEUSjO9fC_tXxRmNNjL{X+AT*Wl>{%S+tqRsI5yg4irKb$;u%uV^5ZZYre}! zAPdTYA`{$Dx?{A+BrT`L;%Xt3jqyt3ZJJ{KT%;nRs)od;Go;rJ>F7n@7UH&H+ldq0 zg{FzNpBxCT7M%$O8#K0LECPh3Zp)nb@r(R|kCJd-rf7IZecS4r;}wy#nyfD+&PyRQ z`(5leNWqn6ebFvD$r<-gp=FCR1bA?yA8|#D=4|c!|DYdSn2`yrh!Vkvg}GC4%QpKG zX0Oipmr!pDta8(aeJ>Q+#j*jhBD9c6QiB6X=qcfaWyal{!cEz>S_nP4|E zn?m$0XJxO^HFQ|%U%%pamn-@0O5`h9&oh#pBnZ`A-_p2Mk8VwgAHK*bMcZ$HT@lV9N)?UIsW7vk35-D= z_3TR50x~CX4-9P+86^SZ77S4oFnNx!6&C5HsX%e5)jCU!*MvIP`Dk~^6VXhdtKY%s z*a5E-H;-?v~#W zqwaw>fm$B$${%I-<}JjOa9X8RM(u$(NhF=8o}1qe{TAZulZqNiU7x~g zd|v|2)Gg0RBdoob3ve3` zNwejHk$9#40USBSMWH>m6G032;2k;V4xat35uLzRi>!4gnA?%n)Af zvC+BY?)+QH4A*YNMefAiNE*kuSE-CuGmz+zajf-6Fmqg82d5C~c8);TjkWPq{3D|g zZ=?xE-IITA3Cn**xz&ZtOsss4k%~PcC7Z7w%$Bw>#fm?q0BAS3@?-`uOVQT}x$>rf z#QCY%<6fR!JXYX30L~C{fItfb&VeWpQZv6Q)L*KY(#B!s`v9l~Uf;S~Gu9GK^OkTk zkIh7odYnzFEN!vD{Z0*6S#C|n4x&jteHGRFwg~qU1#DJ8gZB%FF_{MLiqAYiDz`Xy z3otRnkA#@x9OozxQwL+}UPSL>)cijZ3gdJT zQE|`LeFWOIs9+_U4&i?UlM$9H>`JlZPks*;?lx=I_OYL6Y62$+`ApGY$p8}Q6j)a@ zJW2|4tXYwsOED(IoK6?T5Se?s$ACPRRqLu~EP|Q@HvV&-QeAxlZR6g8%(^N?MvkQd zT=Myu;v&}jOa5Mmq6;G_V}g*lxjXC>8$QmnSwes7VjIE!BkmOn-_$W_>G~(~JQ(az zEU-;2pvv;#a#>mbD~N2dKg7y^<=I7R=r2!p#g+mr*)5n(*+awmJp{Vrv1TsEgGHh? zRudScSt7nK5CIs{M)%?;{YJW`6`kT0qWNYsOVHY<^4#z=nT#gUvOU(unt$if#p%QL zKNy<{Fy9ppUPl#X3o<8$6hChOdr5}6$?)ISIk2OZ&NX2P#cb1o3I4I-1B!7t8K3_L zsoaW#r5>UJd2rtOX;pmqK2pn^uUUK8L-uy~#zOGN&ImjEqE&A6qu zX48CvEe>@$X@C67`Mao9CX%%;ek_o7NQduU5}|I-ow8r}ms7Km$h3lJ0rnH1nsZzI#f_pYmX-K9F~x833JB$>s%VV4 zdiEzLW?c3uNvVLbP@rt4Jd7&m2{wO--hpqaqFg>K5H4cH@Y?lh0eqjkbMeJ__0{~H zv?P^Lg!5H^v=he(6;S8{k?JTcEeGPoH=tZ%P0=0gQS~2?84hp}&6%QS<~f~s1Q0yXX6P0?fV)A&Ui&05N3gFcqU2&4;?m90og5G+b<(zOWyi zCO6b+6?&#n*He3Byh7tTL6o_p)W?Ec=alL{%o8bolk^Y=70`UPORSROL_wJtl({9c zH_9JdI3uhTGfR7RR+p^=?DphN4;oK4>$qH~+2Es`DBt29fo4NfFFp)$^v40rhH6H` zxL344T1hk$3j44SzXF4?RIRgeo}`&_GJYK-gvRfjnv4sJUmZswt|*Y^@`Plo_Y-*z z6~lw_ja@DTk?CwFoLd%(;;&gM*1$4-Q1;$gc$X`|LtS0Ig$OPRRXN zPQHtl$$t>RP=k?QYSYiGmA{RXESlE^0{C6a5&G`RYeV`B7I~*zX$X$UKB7NX(@#_k zH4plquGmpDrD~sK>9n^9mw8z-WnEkVBlsBLp8IK1WL=Ey85yqh84&ZDgL>S`!S_$j zFjgV7kVWxgVdkLYO>w?M5T!16Oq2=xwaRB9Ev)kHOp0kr__APsl9g8fp<}W4Iypk< zJDm%=Nr(%W*V_KJbpb3|98n=y1$xEU zV?98Bzlkg12!(#45WY5jA?I_3JgQzTEQ#0b_(o*U99@1;&n}tle(c0YVu$s?oTJW_ z!}tY4ToP0z=tn>Ox%@STQd{Gf2r~!kCRzC-rr!``L8gPEwMZj;LY)xt9#@N8qJ%I* zhu}re-vHVl#9stXea0k(uk=S@6 zzu$!Qhe3i z_`%qGa|1N!B3bZa3jx%PTRYJob;WN*yF_@grhWLIw*c~2o3Sgcvn8afE9*6TM*7_u z0{vPQeN#FAP7qDMtgJJd650O<(PD~DgoD;rQ07Ql19B!9ZSr@7n%7m*({E7g7y)|39thaTZPa7qS?FzaXWn6quv}?gQ#Th5HpKDP`|5gtooiRh^B0;sEw{|d4`GXgsDV)U!+IM zKyV~WtE%VwY+W}HquF8leJv#wjyCt2>8jj`L1G27IQW=lx z(IU}81mQJzfOIcUDz=XJS%JvOA947DknEeR9mx=By-T16u{O4V+#awa0L7lO-Ofy! zw~KPViy1rc4%==P2n>QbL`O(d)hmKrdqT0Yu0@JAn4M!7tBEXx=FJAub*!n}OJNXK2eO zt{ZaNk1;`J5>pdXSP!$|bpqY4Xr0m3{6zqt*I8C{sdTJ&K9UtR!Acb8?Ls0d-`v8{ z-c5Feu!xQ0VYc^fkp6O4ubp`Z1(96G8slLR=9QG7X}Ek;n2UyBjNf{UE^WGd9v~7{ zkENN&5|vSuQ%FBY%W<$jIcby;$f9~1V6@M2>6R=fJuB2~^rm1y00Q=G0iaei1 zK4=;Pf+%1AmYpJ6O0OMDxyuGS;b$75k_TsLw6krGKMVGj*Pf2uy~%&1%XM^ppk=7md?gtV3v;D}^DTDGWG$=wZ>$sFllp(WJ(l#{4zz>9)t?cbf8K zgIuh|jv#&{$lp^e2b%HcLYQNg*2e96bU&z8tgU?)<+Lno@k`PEYzHehEfVj|l5l2+ zA*U1@HIu|lX-HlO2MO>yiaEe(t2n(#!$yX4BaaKP^o$WAqF*_)quwXas)HpzG9kca zuO^xUUq7Zm(6y*<{y>ODgY`7iwl9LeLohvZSZ3SzMEpNjX98YlQT=}eWRXpfBZ45v zzO{6P76i@KHf_>|ByDBqCb>y&o8*Sv+_s4#ASj@SAj&Q%JE-g;0-_Q&1zB8J6j2rd z*;H^rLHU2a^L=w)e*g68^StMK=bd-v%$b=pXU?3Vo$A^b#U>p*yUHCnpT{Re`~7AK z#wxEZPZsQIu`|K^@Rd#g_aN-yYv@`qcVsCb5<~(JRuTKxnezv{)GumN7W3$0`EIkVQ7PWR1q@qTLJdP8jO3 zw|XU>5YC5iXX6(1<+WUTme=%!JWm{@WIG{wbGBaQqlk%&!ub6uRPXD1!T{1;b?Hq#Lr97QxBF*n>N+-2sr z>7)QWEXa+T%n=xSW6vYO+^F-lzKE|1Ab(x`C3lyZM?t$qSBV!y z!3(s5ag=zRZC?|@$D1+W%29?i`oSrem4?O!;wu6uQ6pEF{aDYIxK7+w z<8ILi=NMK*7=Dk)jNx)8<#IY9^qm2e)p%ZnN7$^9)AmUps6V)zOXF6?S^WTRnY0qa z#u85mGqD4$UG5o2r#AnPJpYc7M|SW z%94$67@iaKJ+W{l$tK%Wu{4JX1-Rot4Wym>Qb&$e0?qyDb7#+qd4tfdajSGM#>n;@ z;`(4Dcr?xuM1^&uI|}JdBI!td1V$;wi=tc+;xwYtbE)c?LtxZ4`7kj($4dgtvc(Hp z;y)r?j7BMlCzfk@5D^PgC`#A!!ZPWWGaojN<_=$YEnp%eyQ3&tY_W$nZUpDfnPiJQ9LN{K+lz3Onb5NfN_V2S=xo(UJf6SA(p*jUrVtmX z33uZP*S6JwHfFs3_?Q4^<&7&@-V4zp7}jxMEUmAO5S>XYi#eu;SbH@PvPknTIwWR` zqNdywREi@-@OFx~Ou@QEQ#mFbX0YfJ>B>!Q3}M7EOo zYk(Q}rv86_u}nYk#Rj%MHJH6Anz^EGVQfCeBPGMjrW{`r;d;~|P+BK^Pmtw_2>?3; z|KnDHlz6@(su1@+f6c1#X#6Tmk*=cU5UfbPk{l3D8Hb=W{W=^;FHW>ODmE^;9IlQJ zeWv!=j^sJ1`h83wjXZtEelb_i9)Z+Hu)J#$IbA3PYmzfhF)kMg<8Ylp`C&ks@!8s2 zO^&UrVR}4AH5bYYl`Z3hx*c#H!fF;DJ(jo4p3)G50Y%#4bN*mAHU%W=Xpv4K!H~)H zNnt}{A}b~OJkWRi6~RZh6ahF<_mGQp0) zR1wZuYGqZT_oOht!wB0W%kp!AXkTuRqJPGpMfy9dY-XSNMG$0*ZpUT;$Av{ZUjpoL z5S%gp3Sk!MG*>5c@MC#G*2qkb=uA%t<7*|R6OOml%4r` z(Qr#nAY6cO3Y5#=j#D%{|FPgypvlHk47SArEDLoGz%DWRqELScuTe!XyERXzP$8`C zrM~}DC~aEDBL3rF;+b!2pQ0z`isD0@ZQ-B;z}o^y+K#F-`^!B0qwtlb9}7z^9}!A> ztY#}NiXwPF%>?Mf@e2VilA~b9FS8`PZr97#@+2dKnqsl|fzzDjBe!CJ+*E{f;Mxlf z+Ea5R+y5ICV*bz*;m>0N%)V2QUJ$@BQlOeubZHz@GNi+BX+rt6&_A4nY>Hp&|nQ{xfQHr$nk&{z(yB5N|?FNsi*-l`;t6a zFC!lTpSV>)n0h?U-G#V96oo*AjXU-1y0PKM6+o{G@Ulcfugeu3e3N3k_67Vv6QDV1 zzu*Cpl%9n@$GS9hKYAeu<*WH@Qoe+dstpqgo5CPZD3#@j@6U~AMY{%-;W0FF&P9LI zNr**}D!GAZC()XVpfRyTkc+*dpndxv37~YeyRI64B!8vc<<-Id7m*@UIdxchCEFKq z)5QiI%7m8{0%}4adPn`TxLc2~s*@G+{@>!+?WOGk1i4;_S`#0_ReE+IDWgu`#QVPu zM41+|dBl0y%|)8COLeawuB|%t+!IIZ2fxaK9urA1P7&m9v0tO}ZZG7C5aLk9T8TZ{YNYZIQ#l{}AR1;|M#VJ)#Z10|fWdEjn@EcTKe^`b#`4iXw6& zfe+Pe@jU>KN%hV1V}VFBhV6EOszjF%iqy1>>l^+nf~=Wx;n~*LLS@pziU_?eih?Yk zba}KC8N%)n2peYZH!Yg3t0h4B%R&VY2&548)pQBKXGCVLQ|;yGNc>EQIV5SqVf@qtTMX~n{Ji1`>iYPRziyV~R;zno76*-)AM~HV5KxJ{g z<2=Pp5YDiGj-+kVT?M({4@gU%E_!lyN3ckSBDYK^)#-I~%{s9J3{H*y#>3B%liwX6>h1Gnfc48ewwHy&N zDP}}!OVOjR7wE=W&H=IbfoOyk8!pQIUbod=!E}{5um10LgXqB7HPi^fy8;n{cE#yD z;tmtTn%ag$xU;n3)B9JEZZ2I3wJNsyQ4O$5*gXPh1oSp;)KCw9R|uK2hp{r^&RY6T zwA@O3Sb&*?3$6#BZwhnu@gI}o6r29onM|KHb$T2q!i8#TZ;LxbQtQ>)X^aU`E+RS| zYQ#&~t8D|Amj0c+itCIv!5036#4u1dMs>l6t3|sd+3sK`+b#KmU>As~26uU`?arar z5=MANuXBD{ONBKx4JAJmR&eUw zPKWJNC4DW5l=aAq53#YH5zw^*kvExX?z+y#N%M^J+tJ zrU>%lU5 z7ZzyRq`Wnwk**cmfUZ8w@2*(~t2k^c0xa^1MqERY&y8yTD0-bFdiMxfEd&HaQ(w@s>g6N_9 zPMyZN!P*P+nQIY{?+S4tk@O1gb@0BCz!F+UuI*u~l$bR9MF7y1M8RY1Yl-eDP@fR)AT@;D=He z`#%BV529ycM=QqhIn<`(C~))3g2)IZftv;3#!Uhc0i75WL+=;ujC)fwpg7`5zXCsS zmRVJb-VEUM6t-Ok`+I*&ib?%xQp1V<-+{V7y3bta`UeC#VbOtT^~rIe*(WIl9?Ulj zu~_qF1@$8hP9J^>SQ;o-)BjA*?tE?ZVMnuA=l6h?=hgzpFcyeL65tv4eLoFB)hP&F z8unBLxPTn9VzXl?F3ZwTSC)ymIJ@N7%fn400vk-e3 zhd%>M8^RIJ>qc25g?Z#wCcqWXLO9kvxI0S;yBg0diFOe99N{caefAk6xp(; zr@^;t1yNfcIIP6>&p}&AdJ5W7-#Z6J6@)RqF2MZh*N)wtqAih%X;&WYp*Nt3z9ZZM zje9Ojc}B!_p9eF;v9O^g$hv1(3X7#|j2Sm7@CnAig)s@I~^AMOvQF{L1o1;2^{eLj4)`?9l#Z zy#N4f8(NxTxt?8(dGd`)J44TB2?jBi{Y%dh6K)@6$DbQeUIZ}HU=CLqh;}7A(LERy z-}e$o)>#>HR*K|vbxRv#|3CBWvNSM#jm2>Ss3D$ig*a7&%iomSg6|gUdbhIrV;eNy z1Eo0`ae6?h+(x~HE~-KQTKDA|mS!!^vd7!PTwGkmhL|Mxe5IB@;kbr01MVltTw#dR zi3BWL?E)#P0)@mKB3uz>a)$P`Uj?9+Gp6nv8|s-7Cqru=qfuXU3G!!$2~k0qlgo32 zc#;<*pWhMUJh5L>j0au=KC0vPMY^z8mH!O506QBd*U=f)L;Py-kbF7;^L%(T#MN)|?CKLoYGj<-V4oCZ@nPkHT_=r_uL;$#fcL>TCbo_K4y`_;r3B(E zk~}-xm&B-^T>|XFnxjq>LJP&vc^jdY z43>Z86lY>zVSJGzuF;Y`&zv$X)_bS+9fh@#WAOzgqNpHofQ}88NSIt zOVwY@erhk5<3D<)mMm1#;_1u(^=tKPUw&85ZWT=`vT5+CUkT+?46#F;pFwEe6yma@ zXzSWv_D6)77b$2fyx8u4K$C-Oso0ZtXTS6=AjO>5+!>c=&(jz*CiLw0d)TT|$9+*y z*5iv^F}7CAq?NLX&B-CkgH4olPwEH1I+#VI`^@`*%$^1KPcylEa&0FgJ&j^5Z%h+_;bse5*Mn1%*+*l0ebO&>7 zd~jWUH}C3N8)J@0jArxZ#{qgI+d6H^X{+T80WP2%iBib$-@hNs*%M-8EWW!Qv{{S_ zwyKO7>!uu9&S5$=#9e|svMnm-4o1HcY>vtbAvZy;+*rSsV_UB_LohpS07RKNEg_%2 zPiCaf1+Z>Pq>C^=yGraDrI_qP4iOaB<`}^jSrjuPZ?Pfi%t`z?an;yefVtDr))>=8 z!r1Bi&W_gXnF#oCn4bMfa>7C|8Yc^KUE0_Jz$1tJPtCzX7t?A5=Q|ts)qXxN)AT>p!BLxPu*Z#_6>;sv%vB#;7;n7UJ5rF|4Ndrfm%7c4|gj ziy_g7w7eR$_@n>QH;09~9`qSD=_+wd_99{Ih%Y=6$BCwFtQ81#B*N@zsC_XNs+W8$D4HWyT_uMCJXi0Lp{mJ}rtL7wt03%L-QmhgaCYOi`v`OVT7%u6@xGsRoC1SYxK;! z_g}!|b>W8~T)`F$S?W%A2r?sRb|!o6k~}WZ-k<}M6#Y+cW>6S7$ooGkvMk;ryDAvF5e)+ zxgbLn=?6J5gmPbq-wW_}aU3A-pJ-YY*KCY`5}NAALC8PZW=G=(BHbTX%C%qj@qzdw zP07#e9RZY%)qd=-6VF*2GZ819YmP9ozHS$S5R|r`&%j&7?Vy+fo9wDhijv;wV z4k9ZAgHzDOrkzQd4XSupcd>vT*~R%IBfUB#ITAn3kY=|0nbVln?-zpLa3X^q`VUbq zWr{Z@r<}KiO%?$!gfSgp-fGuc)^gHi2{T2AlQopt$x6D~B?8^@=?+ra!V%sC`!hPR z&YEV-n3K-qQssWOiSEEH^3v=>s|r}^lU4C*A+!{VC(e*PvBhrCW_uk@Dj7gS4YTHf z{8;`w*Mp!|E*9XX#vg>Z%c3=I;;1wlSBc{7iVluqJ;{F5iJ_IT&hGr^LP{Z>+;?gd z?7HI(g1R6De@&h?3(}hye(4^hrO_Fa(YqyB3-$T_um9>^Agev9QpO_Az~;Q;+9p+RR5C$D02hRDM*+>4uvqmDc6Mp<&-ROSHNN zE8GXN-)J+ImZP!p6#q7JzuB=w&s5z}F=&&!1z6O%LP{pdcu^2}6I^5BGA8wzQ=!cr zJiAuHjRitnQKDtgaPBF_ zzeH0GA{(U50GyF#gIKVa7YA5YvjLT3;l#PW*3Qe$QH8_t_G+^5^^c z6_N}xQO9GVNy3Gaj5kAC2v3_+duz!6G5Anv6Xde98_+{YE;d_~jM-&O2C%n9XeiK* za5>(YB+D2sI2k&ZUtKlaLWbjB(asJNyJzWN2(uVApd!+3f1e}BiB^IO_a}|(BSVes z>l+6D=&)nx0qq%>CZBB-A_3Yf`aj~92<)^A6w8GxISEf z_>5&1^1_THeZ%60cuS;0`AZ_ewllq)8IJh7OQ$=f@cXj@x}(36Y11 zJFt#kCBp3F;DnfSs}QQmZji!09@|jMR_+u?cM8uGCkf}piK`1kid0yMzY56&uZ%(# zb&XD(Jzy#@P;5-lf@1$=>1JlmQ4cT zmE(+@ZkOlV<)Y1qq&tpqkDrV~59jGx2@4kM%=3qs$5!DuB1LISL78yLsd6;-6hh%r zKWBS8Bbj}fq~i*aE(*a_hvP}nSyT0MP)7>QGdWfnDh%USF&h68<{~yQOJJeIeud_| z)BIX~OhUz2E}EvA&-Q3sE5gjn@4z^8fm1MT^u`xNP<$M9(rJ*MxL!!6lvM4#Ay(Wh z*jeFMQXyy$rsBH=QdOF3`aUz`)hsi}Oi9?anFMs?i62Ir9;rCxk^w(X6YY0fF&~UC zg!E_IIbV-UJ>9YrPYR|w_|p%^|3tW*X?X_Np=ep;T#~bHB@PzpQp!;r%LKaMJuNkm zYoIuVqOF4a1!ik=UHDVG%KRz);ExR7eWLsY<8Oi(bY^17+qRWw3N+4AI?5v8o=)O; zdikjkOLW7StTRsupna{h(a9+E-?K!_!yLFlZT_zS7dshIS$6fr*kV8zB;DwS@Vq9- z!lP~OXj@{MZm5$iak^+;(ZUsNQV#f+33AqCinUJ~)ei+)XdCE43_Q2x2p<>nqbx5P z6SYLXobg{l<{8z2!$%m-tT7Wu`Vl!1x6HarrkOCWOi$Zgz* z{e%cJU=iya$=Er|f!w|7o0r73B3&vh3k)1+;+6Q5P`^hEs}vof`O`q|hqX}NWOu$` zYKf7ZBYoV*o6(f1PPZd%c!+<89xALNE~n_M&9JJE#L0)&0Cp&l!7~K7@tF!Rv#V1I zW7MWoaH;suL@{_QSinhS9?hvne5ujv4irM4Y-ntV20i;@t)&6x^274v3*=He7N_J$ z2RApO{+=qt?KZD@9%qX4Od5-;81XI;Kw-MmT~yx@WmYltry$pl338!W&naHmt63hS zxFMxm?2Ct!gjT}!wHn(UVPa)@xHpDG@Ofk}>7^s0-9U|8HJBYX_`;E({sdm6Wo_gM zp;QcijP~CYWhVAgD>lAI;zLJ)nHvqs^JSg@Sk=+g675Ix>f<*llz4rs+9bi$$7^vCqX9qh<_k~+eMJG zA#yHyR&)a(rrHB5_rxJ0vzAN&>CO~Do_$=+UO+Y3st0;fj_F5^jI}~2xpeE?LMgtU zk$kvuX>)v2WH!vOO^GxX<0@fpP7KiLtm{TaAb)!vTk>NuHv@bco(g_R04y1)N_jc7 z7mBv+NVltt;AO5RDWN@3Nfd@Mq{V!jxNz| zl*%xUjMAYPAh-v!mJ?$+`$MsB34}6@R_{Nr6mN-gReSJ3(;i41vH~oVTxzddWxE8q zKnyW5SYDMUN)b@2!*O4pXg&%rlJ&38H)ut(3XkPM@-{(!%rWssxs zUn2x|^_RGG@p=)w+O&9nyp#Qao7OFFVh zw^Qnkiv_x79lEsWQPHXyPMCkxvxT+2sI~nY*^lkG)O=#t31_ldTBm0>HTGtuY!c#c zWirBOyilOO)w%@lmZE6$rh)yhbQ0x@IhF$F$iMiIV`}M=J5U8N`He8=RbL(FhJX=h z^R$P38yWQ96y#D36*1$jj>bypq^KTzN89%V-|T3*S0Yh%;g@p4}<#wi874@sq~F9G}lfdQy9}H!jLwaQxO% zygkkfaF2sMNWj*gseJ>NCtYjy10m)H!>{J#p?F!Czq@?QN84w97Rb##cYhp~mx`8X z+hOb;kSEk*ESN{}K6a+Lut4X<-v2pg#DGBw24W-8ev=T51Sdyp`++bE zFZTn>v52D;Q;!35p&T4?G^Xd!@@$8qSCC&#zB_DW7UK$mE_@4{+|hD8G=&$}h?UbS zk`^Ssp*&6y!k=Xc*df%W@UKL|LnakWJlmY$+*nB`=bQo2{Mu6GV{y#qp`3Ty;+D8o zq)SZ$%5yxHomhkFhBV<3f>e+Th(r#@H$+S1wUZYQh_Zwg@FHete^3ac)$%2T*E#eg zC#E*@4vC^D^AolebsRsYd;tvUBWBKJNpGAhfc#nCAd8>Ke#V~$DQ7bIS_U-gqBeOE zZuCXcy9A8%MT|FJ5n?`fa9)bGL{WJtPlk!9mlv-;nPikEzZ88ao)QL!szi}smi(tk z=e>%M060s2^c0Y+vlMkf>sqG*z{J6cTL@c{Y7k$pz16vz;dLx_$`Bm|cf`x7c^ak} zt??EK;nU6dYE#}7BHcLBUAo3_z!C&!6)-za$}`ZZBh~@m5aPwr>Lgz>sN8~xU7#P# zc6>A0wTA7F33ELL@tWe&!P9C0&VQT-eDNy)d~JyiF>Q7_goOZ$WHI&@O(S(wW%6qh z<;)u7r5Se$pe|Uyz72I{yg>^c0x`QCM9TsC*BT(Av1v>x3 zh#hc5Tp}QA7x^C*<2ypKuFAG42e^k~${AqJegUf+b)F*xx!=o$E3*roDAd`d4Sb@= zoLNJ#yXa!v_8p0{{|CH9uVomB$La1J`{nx=^IbN5f`kW}fhDm2It`|WuI209Ie}iW# zo$g}dgeFqFMv&RZJcN#ON|uIRNqd#p366^enSZP3#oU;M0xrx|?o0ZYigD1{HBin< zqs&U(Q7@1$FuRuV0v(Na1;EidjtD7HcWi%-5xxmn9psRq5X%%Xow@5=G!?94MKTa~ zgbF!&Lh2>7dc6W*7F3hU|YH@{-{S+nxB|VI?e~+bG;Z!qyZO2 zCuL@4ths>atcMJgy^VEYjheT53QMAw331UG ze3H>BZqKpRoa#!;fro`reab+e>WcP@Ow%5PE?h;<5$!S!7ACIc%soo^BLd-P)4|bv zF^{f#6Dvl}y~x2~r*DC}(Cqp4z~vKUD~vX=U%_96h^5OddzQI8goRFe+NV8NtgNU6dGB) zdN~6~%VjsC z6zFmhrAS*E(}cKOElZed^91!$WJcq8ATl>t zR|_=TTG$3Bde~JpU_PQ=jRgYucJm;9`9pD>Nar$nYwb|%bv2+%oZS$!caC&$s)!yp zU5H;y20kp?#h7&sAUvzWOJWEY$4mk0jsM*}~vgGT(A51$#?`{5bIcW~-0#N-Xc|Ggd5^>k1Gvfy$sbiWEwY^5}6T;{`ix25& zirues0;IKZOxFq!0(Q2rY%#}}$=I80X8Q@YNGz&i+l;3L@Ga`j_!2i>U&~6Copkp1 z0bzWL!$GC^g$UuIYIXck_atp zj;r(x`(+~1E_nS9pfwG0!Ywwv6~g7|WdoU`8|aHo0-a$SaX#TkoFd5IRY*##&74yH zA(%PX!7+4Pe*`!)t_~dJNR?d11p3WIGG0l`cvYC2wXHM~=iLS=fk0%&<4IBOOrNAM+F(eEcrbxOSDauU?E# z2y$_|WWi?xGegi*Sq)%AjHiXT6vU@Rtk}UowqDO)?c@eyqK)i5K|;zk8DLr5KTMbf zqlG>AI7>8TV3tO;OH6-5kn?9Al?QEt*Si}SzD%FFZ|taN7qfSOP0VWSCB$`UO@}MS zq9;#8j5H0vgE`XAb$zhq`200u%i#4j8utp~I~|?PariwvTUJK+=J&X_mKLRv;9D>-W(ag+H{wHv>v9hCrZdco1h^`gFPQ%0Bq8pHBNJEkWm3HD zeV~jUx_=>d6yX-9=cK5;F(S-;n~U4yev$riuY7J|z5AU3!%ry|i!i&`cVR}yUMt82 zVLx6=nQMgjD~!bWSF-eaCC8c=i`XkCkhw4yxOoT+(UZ*;dg{P zqYmUpLjI{=09it^5VS@4D_ki!ll`XoOX5D!yoteX6t6@r($;?n$c@;+vAjgWi1*Cu zd~qiwaz2MJ$i+dTU0M8>Z<)9lb?9(m)TO<-Ay(_z?8M6jekNCvbFy5x5)#vRPy9@@ z+rOSw2=^cSEla`C)-sw(Z1ylID4e25^o+(7k(MwNOD6K!0$h#-cnj%Ba~vehJ*b0i zD_o5kFTa;3^l3m6{{B4)v(qIb@v=ad9OI#Hvu6H#r1l+IiIW)YCCn^uRalM`grgFt z33lV??0rvpO*}5h+#^EHU_Ab4PB(WpMfsPtSJKI8cCOzR{6aHT<`B8P~%Z4|(Kt)Q)-l^*k04QZ1uIvQ69aWS#~ucejKxCk;#S9Gy4G#Yn40b(|B_#{aZV)DFDO45PGo^HW=Nq~i`eMxg1_#`A> zsFOp_*&>_;sgYt9(PQzXP?w!^!aA6_$!|g6UTKKdg$rAXc8UchYCI6Dh0x733y;Uf zzw_^_)h)Hf659)*6cbmcpyK6j2@Mi7HI`K3P~$b4p2sa;BT_fxmf`-j@A9sOknziS0J|3*Tylc*98 z2=fr2^|W-1G&Q}Rzg&-jYh@hxM-o!0LPaZ$r!$J%+2G*%&-hEasa4m=#_t5UkUCSQ zgU$r(dlt~`fs%k*l3e!Ugy%rve%+9q?~twMq2XnU?NEuIh~(#a92wBDp?gG=b8?eF zF`z~IV#=QY%_J&@!Z9eo9b)=EGj@zCMY|0O_OTG`(qC#x&ejLK;v85Cu26iBLqt142a#My<30fh|9st!_j*Rm z=91yqJtVlyFj7Xb&;BAeA-3Wc@(Yk)AwK|50APY~o1HxXVw=DZH=?~PRZS)F`U zfW--?ZA6Fha_hg;u*K!og`NTjJ+~2N<`Zs*RwPWlAj?NGlwmn3$MyovxFl8%|8$U0 zYQ^5ja4gJ-@&FeTj*2krQH-V7wf_wS7GmVZMJlcr=?}MP$%CXkD2O?v6VK>>>zP8J zNhAkamIhc7-T<5w3tSzz`6xpwWAtX0uidea5OV^_raA`*$i$0lg>{cqlOT#xraxhz zn4hIU=}6&CSQNJAxuCi6>E!}6cww}Qzlz8v9=1@^(W1(mV5tjgN&OvB=3!}QW%6ku z#<4((KU}tv6Te|0Zbr^G4#i(YJLAe2(HmHy$LIbI=JALXnQwig4%_{smb8&Fq(x)B zFv@_UfY0Z*MUiUPW(jpu;zNlwvBxRMw5_8SHLd^b#vRqxd{{=w)jp*`pjNgiMYcx@2rpdVm0JtS;YzrU(Si>4`28=7^i;{tMy~VJ;SD#<57M58p13T-c!}Y-Ue<^xw4? zb^lMgRCy0!{y3u>BWGVU3Zi(~$%?i91L@*(TR6(`6alRA>lbjomPa>Md!eTqy`n9R zysbc5{X$?(T}xe@q-SzkHhDq%_&Wgpc(W&vxKWTRSI@1A!?a?&k|&#A9OPgqeg%Ro z0iG))7v0Cgb70XE0(xILr2ZtjVv zM8giEChAo5X7ASC*FGdSKnswRuPUO-JS)l^Y2miCShZHG>)%!vm+RS<4aLOOP+OX~ zisj6j_wfV8u9Qc*;&>6xfpY@0<80Apl45O+av0%}EIoS?{qdX#^IQ`+)4(fhJG<3* zT*MyhKv*(tlvDh=0|fFxjvY^rkE~lup&*WmSzL^Z1e(Deikvxu10}aDUo6_Wmc_B&`)j#MZ*)A{Ed}!3#u7V&9~D8tOPEE`O5yEPA+CDElGe7kLG+|> z5^st=zHB|eMC+iMsJ9+1#Ggk9@bFEmk6xjf)GC@u^a*fpjkl3s+DnCo=AbklOs+%m}f-mN+K((Fi5AgtHh>NPGqZm?qd&A#9N+qx!+T?AC;B?4W3hRd1Jo6{}T?vePsX!ui+=W?tcrZiYrVqFhj{fr?17&4)qEv92OBPf-VK;v%7zl4W@J z48*enTptDNW4T0ot=2eB`sKWFt|+R` z4g*i=ws!>a4~+*}Da7U7Bzn%ifhwA%?d~eBxuIRyQUb<>T2i zYsdJro?Sfr|Ku*OTjSy_!P&P5INB5IZV%w{Vt~?lkyuZV#ZM;cQ8rI1u|gotuMvl_ z@tKTdJZ-_na<3g~*|wKqcpN0ajZ&y6+HAQJRbiGhPR($tMA3_5EJsn9hsvutB-j_{ z33aiO0?u%7sUR1fT{6~a4+*eD&YeCzj{3NNC&cq*Fl#if7m#(5a=&JQ8-+Mm1{mzu zJ+b|cKq##X*@e_2tem)ZFs{_I%etsatQso7g%sZrN<*>nggSMzD7P!7f!_GOXxACx z)NM+m@uxf=sdfze@`#wa6DhqRvMjkH>FOSN{lppi#i}3{Tqd|79JaLAlYVgRSF&d~ z9^X((*RGx$Ch#;oTZo%HiLJ~M2>u&_VG}pN%I3BA&X8o}-5r|pfn5O9ySNfNj@lK% zb)tQG<3`c`Y&r=O4+^kw%=5kvHt#bzqgZ!$AoHAs3Omj?xAerOLe14K)M}i1M`NxqN?GVG-hUn@^Z9!~JA0H=**lly z$nxaG6yri*1NIONr+16gmSk&uX3yH&oJy0uHKNy3UCA2vjywr*L^V0RZ?D=* zi*ZJ!Cp8FhL76vt)75P&g&{EHi(YbvC|6KBSug^{J~rMP6z0_tqhaRcGmehUdN!xq z$^&EScSJ4B!H$0IHx#jvBoKBaFCoUY4X4z;L+EcE{MUp&LnviQCthO9sZg+kKrgzy z^Kg+ad_x(7miARggt_h8F-oU8E~Tv{3~BTsa%WoqT>3r|nBcJ4Gi~ zsluzB_pRmH$y!pyXcXi`b=7qL;fN3lrkNPC*V`G%lrVouTg=~&U#Ul%E@7+eMj6NVAzv!4JkXIWCqE%F;e zUB z+b}xcFUlP1E9$5QYt09RS79KOW54bG&FZMKnU7UfK|_LQeVJS)sR z;wnAnh9er^lW4@(!2U)Ua;y!$SgQ%bpIyYL7q^M_XBSs>CS@f4kVDHbaS(|)o)P4F zvoE<)LE=?=<8DYa3K@A)O6R93giu|a)b;ltJ^s-XqfG>(=e|>C#M63q+RAVN3r{uf znx8{*w`VTxqXX&qK;r@cc#C|}a>rzpj?|CFb)qaI_=zfbzIwqX&CY`6QNYSl ztg{fn#Zk$+ezg)MdxMZfGHV;7YWR2j*NK^B}+a?EA>o9Z8RMg#z#c*rt_sE+|>#|<2N_dwM5rqCq@Ai>xv>N>)gcFr$kvYW=umM zr!0YX{aNVXf`BF?$h`#11)H++VR;YOQSEYw_}HzDl-c@27Xi}AbOHR4Ih-_=KC1{P z3$|!$xMmNr8oM40rfGKCoM_Xti&Bq-N?-W0AnJuJaAwSDhoB;iPk2ZjAAV|2lrwFAIkgvYR$!EsC&jn%d=F7D)!+!tfMh&M#|i-i zz$V|t&W_=)N02*7TY*3;BXNunR|nDL;KuO+oIRr`Yf)8an?q|L6NEehc=d6C&cBPB z4tp@Nf9NnE=dW-?><9D;L73^Mo&D7~IU`q3TsA`W-gkH{3t9}-8rO6|W>9wTGHhbZ zSoa8U1cKpr2*+(v&i1ZLXUBlZtv|2@UqRY>S>qydX4ja0qJl~JQ=`6}y^7IR`%PCJ z$?wQTw~h-vlo8DSeX-V2{6g;`mIr4(M&f;_#OtPZzOU zJP^YI;FN5GSg=)!fun(Vi(+LFgolKfoeZ<96nS|<5VgdlCN1+&le@Y2s=k8s=ZsInbzDWme_sl_gT31H-XiO8x7Yide(o^b2 z(Uft#%9?OUF)!k;f-HHc3O!syzcMc9_6KScs$BZ+5@@chBDTd)%qcn*jmEYJ)(rL7 zHi52VM~PeajuxHC59<8xy^vD zeYH&MF)uIghn(c=?ERuD+9FsTlB)>niy{fF5pH(cU!ERB2`(1whSL#5Eiv{fIY*qY z=_!?1E!uTwFOdXa7vOpjcv4C37lM3`a$t-QeOU(ZW;Ncj@r-DHgOModH!liuDVfu~ z4v>j!;}zg;T`l^`$tz?lfv#f9lJ>?pP&91Sjo0+=E|HT};0i{#_)|eHA;;IyP2!xD zHH48Kf0#IV0K`S8Fi+qU<`C=`2&Yd?oi=-Fd^;oNEopC!od)@Za^U_8WAK6(?XPf- zYYhEk$qJdSR~_(lF6HzW|AD_+i# zKGX}=5!;r@$-G%6^XT59{jEZ;Cd61S#NX=2W~AmC$P?AmRoQPnLkKJ_SCi}V%Oc?c zSG)Ab-owrkKNmLqWr;sUNG5ap;%RWTXx>haJmb-Mj9*~#;Kofg>#agu2Ko6)%AOQ7 z+0sK5PW3XLy_BaTN(P>qE8}fJdDf$5F#Srw z{v-x|*@ogSL1cx$A5K9W1)?1ZEzZT5-G(9~a^Qf>DjDENS+x9821P zy3up_o6AZC{Ci{9DsNi6xDpeWFDjsg-3!9u6=A@1waJoIkY+<2KDRIoMgRK((KRNn zWdC)w(^JSr1pPTmq_brKET>^4jLEAg+(oBP%9*jariO4^3|%FLg=8$zd`!cfC&33 zaOcNv0gll$_;teE1(<6_<9jDSn>(mQse%l;n|>bDUuCaElWy!Gh_;?XIH@vc8RgJ@Bf+}MFf<6J?s0ZJX` zKCTdjrnnX;48ynCYjS0_H+v z5*Zmw0n8KV{HlbvV>FQiQbKthy&e%EPzbDBtk<2glk+B}zLo5V#h6fYi*3zx0pjlk zG1$$}G3EnLg@ECMm?`O=O(JQn#T|9=BR#u|jWE*j>)(X9{W{SriT3;@fXVj3K99{Z z4i)Cte04^=Cd6e}#8!i1g#9Z|qiClaHRx1LD;$}iKKIsl!jVMP5b02Qz zWB`LtN}6CNy0}5lE`0+cN>TSe zt%l4iFiKWpp%8y5nR2zV`bVA$2akb?Z^Z^*A(d;*o&mmVL$Qk>GUr&5b~Y)?jMHn0 z6x?4Cds+oing*I+sh$x#ucG=yWdg!Id1Sy!>MH_WVBJiI$v7cSh6MnDQA;JzrC}uI z^v?f*P<_(vT4yOU*G&36-iyUJPnc}sopHAw%{k^;IPpsXly6m`2dn)Dz6#}{bZ~f; zeWrOrvQs&XFt{ZVm|w+dK7$`z9F767eal!N$UUh=X9qVt(@ByF#iUA8Oc4ZYrq7)d z9of(Bq<9StM6V!!UuG@FkF~xA;_q{1dQpKv`4k0TUq6^f#K#@QRfREadx6x3EO(j} z9ip5Ywyu$}atxjY;_4G|3r*pQuh(+vV5Pu_#rXZYKyu+uIOc-JZ`9tQuQI>?PypEy z?gRx_Q6laWLak;`pAl=F&9i1p3Q2`866rEDC}`$j{8EU^fQ=ZY?|BYD#z}M)*^=9G zNIS+dgT}d}a((IOoMx6k&kh17dkhK3;6cK&2{P@e6I`60KSxR3JCP}D0Q8%lI8dk) z(}n3@Oj&SXVU{n;rAlFNFxj$C5=JFwO`jI`>)CZ+P^Xy=IKP&UMrAT*eqwnc*o7X} zMNLc!*h2*}dbMau+vEZWc&#NQ1E{F1KBVs^qHeSc^S3z4MtG490cPnix;Z}D143L2 z>EH@_Bvs*`f{{n&N`{h=cuO=*BOi6@o=9ihpyEd3g|&=F9JcaUA(ReNFE*yn<-b=3 zvEhxx1wvduT35kU0bdvBN_3$>r5(YKe6yA{Ry8@|e^m&DVfsKuuCHdZJz=#o(kL9fWfmTreAl33M?~L{rPgwK-Oifu)O? z=zAB}(%~yas0#GVUB3lJTQ{|L#)*1%uWr*xlIKOcFbi0Jjm7TY2B0vCLq5r-1T-_{QbgD-0yM zo%N77>C##vi5RQY(Kt^SW#L4kHjet@@;uQ3X6lmq?Piye$n2P3#4t4)e-wiJYr5cQ zaqZ_i4tflfBQ6KAOrgh4+>vY?y9=Zdusb5j+?XvIo-Y|n+R1Su{Snzt5U*2nM8h*@ z46nX|G#&sr-iYSQ!fqg56Y4h?C{o90^j-*)j-HQVtjKzNic2f&gei~pD~ z%22>C!-Q~>NatVZ(Xv1r330Mew<;85>*!RqIr4 z(P$7zJ~D)67l*Wc2grh1Kd_36??k%*ERW@Tt^wxHLMn^;wEQ@i@k^z1q}X6NuJ%zZhE?MgccvNqiNL(*nTUbF;EomOxk6>hG~ z$D;&?$o~o9YqO`!i0y9US-l`_m+m0SX*uvCSDr0y2l1jb&9Os@gpverNWZvjELN+; zEtH1#1)-ThGvy4JR9hp-!%FEyM-$Gr-`pGK<*S@RvGNXmOL!v5(dQ6|SZ@i^v zGd4Ms48&FwAg;(fVp6d|!02{@KwjzZXNynIeuZr;1)=(-9RoND^Wx# zd8U6)C>3k1ju+{v4~X=83#-KXLI}6L#}EfhSW^^b*Pkbdm*hanKJIt26m&@0V0FId z20=6ic0rj-d*V?62u~X=U5O_|nJwfwN}uS7CxwwI?lXAI#fu_c+*K3TSgEhX-uLD& z%BfqCVP^`1J=jJFGoz?pjCBVJ^x%ted~O^p8g`9wE5})Sb|vL0*@qH+TMjM6tz`rQ z-cJQlg+Fa)nFRi}j-B-()$%H5}Shep5a@z^2YPT;I=3qmA zaTTFWsqm)-X5B#^Ny9|ZJ588nnxo4sVZJATGUy&_4K{CP39P2T=aHY0z;$bo9)JdN zwxBHIe$FY&!}MRm5N=&f9Vh?XnQ|5by>c+_5KT+870bG}=+7BBV|Lm_-0cB=r6tSV zV`yTBiZT;gw1--bbA`AB3mE<|V#S?;VE$~jgnydE)P(wL zgnsB@t1JtlmVVG!m)vjP*2`|hN-y{55g~Fk9)8GR9hV(Qr%h_jBLXvC7x1o^X(nNw z{iqGX?W6e#HlGN6O>3 zc|Ij}=t{BHqa<`WdkYg+Y9iS5m(F?#H^Ie@BAhjYu*|5fLMU;2xht0CzwXX`cC+?~ zBZW|I29rb4{=X^G4a?Q4WAU13iruF3O|8GG{kTM%{&$IXS>~4p6#gL|$&)O@;WT#p zHA!5Y;X;*5!(xdLzobiJweNS5Aj&{YXR-Klk#1DHiSUe|Q#~ro~A%Je3hdQ&<@x|Fp*~KjA z4iVM|mAG7>TcJ=*htl`{9SF;z`OD(-dNeB=m`-3>nyG%C!}P4QKgduY33eA|R~_fu zJGj|Ig?L`+T|1WQJzP6A9A`cS274wh!OXk$?;*@$_FM;a_rh_4vT_l55{=^oA+)p1 zH?n+u@abBLh1x#heqtdWSv(Br6i(bG)STlu2MekF{s2Jh;Tu%u0DYH8|IQqy3t$D9 zPhQtZsra8j7qK}-0vL!7{L%TWE~L0wyNUE`KFqDL@(zK1t%KMcOv>>}_F5k#&HRaH zNI?sA9MTc}dUmDf>0)vGPcwu840m`j^FKnIBU*pz74Hi2mkBdLE8(dgi=M5e!#K@) zp%N{E;NUQGrULyRB3f;5OaDMzo&AWKo+1@KD%xdf=Pqwb`hpMyau{B4UV?OZ}TMW*q#Pry+4t}5~q_kTDPn7o+;Rs zMzf&WJ+a9PKoowi4ye5%0wy*Pz;^Q&{k$4aqh0k(Q)6G_nwL0BBsJ)(YB2fgOHgFm zG&tN#I7twN!pMhO7q5tP>#?vYazhifTl`AxwFR7aVUH`$7UXoqU*n*O zG=j}v1$5U!A(rY>jID&ZPzwuvZ0p5dLSPS0_3;fod$=Ul*l-;78nm-T%}?6$KzvRh zMeHphVTHKhbx5~lT^gI76yTSd);RinF*^SO=v7CXO?cePmfV)oFP_4Z(M0p0R!|S> z4VMMga7KLOuh3>z0|(ZmR((nkmEjU@F2a(7@>lXiw2?Mf;|)PBAGeltaYpBF05miW zNRO+CBHMm0{gU4d(BKG!-0$s-8GNzpSM7u7ScUav> z%|)92AY8pe3!GT>CKL?h)UwKa$v+?|fvlvN!t;AU&ZkasVDWjmOtzAQyJYnW;artm zCeVy25XDlO>$U&leGhi6NERk|)PmUY-=J>$mb7UQ`wQ`_+GP|^zbeR8>fo4EJSEyv zk9Lo5h&JR)@v2~?rmkT@eB?hoyHqPCF6+l)!BBL*Kv#i7p_8GXxI;s@i^eQ)b+)y6 zf1ZU*CM8|K5Dy4+>E>w*h7J7gw`-qD16WD>Cdq`WrOJYwkd=~@kXJrV1Nj_xVTA0{JtQ* zG{h|)Sk*^k!*>BFN^&}&Li0qM!}Hh%#bZNK@;Ra8lt#|-n40<$L77_Jjd2efnZJ6S zexR&4>&O5nr?%)`Yw=`b;3m)B$*S1xeL&{HJi;1BooH8)?P7wiGBC9ZL)Pafm!&vX zBurd_GS0bJQBN{gJlCZeRTU3v2hL& z=&Mir5}(bXc^<#wPC@35a?FCp|5&J{yc4}k*E(w?cu6p&XZ?fAMXbLrq)Q_U;0X6& z*oWnO{eV<)qEps1zP#Q0YZ+oMWIQ990$e<3Jz#%*8D1R3*<%wW77DfSw3NEIT=e$> zTsw+`u z!d#B#%vOxvFjXimDYFeO9S3g?>CbW$JG(ylRe_d=cKT;I_S+&)Rb&dlGKu4~Kr^wf zN=yqj*AD(r4XUsOsyK&5K`t;WsVuHVCB_6>f)-XehJ1!-e{wl`33H?XxP!?DRc7}s zq1;9NO{71Rw$KPfEdAC-7Um@^}of$n7K+Xlq^sNG^1?vvw0S7+6v%g(DNrXWu~f86wn)B`E92KfnDRL0LcqFh zA+lyF(7B>Z0KIi7p6D;@8G%dF9NT5;IAh#I3rf*-Qoa-|qFnSwmXm`D<(4pilRl&a zgj;@$RGDa%xR#wzIG+|oqP*x&3!yBSv-{Hm>@5M(BN`iHv+e!d*vw56dZq*`lPm6= zjA~8#)3F(a*@|VtvK>h2l5ne+BFmhYBL#wTAWoZqoHVpda@u7q-4W6qD7$f#PWH8( z04W;U3f^ESbAQSJniGTD>Y@0n0Qf+Ke$ViRV~>Ld z?6^lQrxjcVNN6ho76^RExeSbEIB`!fk6gV-S#~Tlp)N!n>%_4*P09RfLCqzvzpo0T zS_|qH$K!hD1(eoaJfu07mi;(==6Ui=7m*xHtswuMO2Vpm~{6n)|#0WLo`-lj5W z7XOi81|s?$K@2Op-Piu9yX?&m<{QyfIRnY8AA1UvM0R$@r?cl7)Ao%^^z5qO?S~5Z zssPuRjZsXC5{X!Q3Mgfi8q55*kx1lYyvhzeF|~%`79sxVB1R$f3WBWtN}$=I8&}2n z_?6n>tDM(hw)lepOLH$~Qq98epI(C&$Xkm>+d79D z(DY<~*)@k4gVHr0hY8~Qtyq2sVpNnHkKUmB+)`FA2=+RHl|8;&jL@>&ye{0ut?ffc zaR#7E$GzT3@BOVHXWKBjvCoL`nqa=2S|xU$Sxba6q9CwI{X0=GT{#_TV!eJ&o-C~{ zJYL4Rg3WqGPEZ>Qap8O)y2NnRto$R`CYcYD6*IxST}Q2mpp6=~S_v%-orR(CoE~GaIxPnq2%JciExxwLzNDm* zT4MAuOzbb(Zz90*b7WjPRiM8|lq$}l_r#Y4S^nlPZRQ{+qx0B7)zfPoh6#M8a+mu-Y)wC zx&R8wjd4MK69Yp1K~5fMRr#kp9b$(oZEEU$4yb+N(GwoIGd?Mr_oU8OQp~tkA?^yT zI!PY~3t&8*K4;%)an>gwT%;bh8f}@nR3NqIhOpJ@7LSRBi&|(C)mkQ?;e!m(ix-rK zu@zS0WI-;njO{b_i0=!5N379wwru}TLe>b^^Ynv@xVS7E{ue(5;AX2Y62%;y zb-Owsvxy}Z)>6c7C=L;7k;6czpz=epLKp*KV_id>q-SS@p&-SpxG~SDPN9axZw|lY z2`OxfN${8;vxXZgrI*8s7X`YKxc^9`cc}+KemN&bXW3O0AD_;NnS=H0YT>oPZCmGL zKi1(ElvkHUAQH4`rPTW;yKagKx>uG zF~}~&N1DL04|C~8G5#XJRlyickI{gB_dG!JrqG4`59l62nXDAXu_J$QK7hZ5*H}4r z5KRK6>$IYc(6c&o3G4UUvu1Hz2HffZhg+S(t{1A0KE6xzcC&WvR z2E=)y=?8tB@Q6R=FA!kJPFm+La=w^H@m$1YGZuFXa%H%w8iW1%EdYo(b{?JTK0#z^ z{y8zd)vwb<$e`52dHdhy5QcgNsqR>7F{#YqLy@s~N;KkB&pnns@o!Pt;Gkn#$UM^W z(Iuef;A*nQhK^(=(40q);gD$Tu4I(019Kv4v^gRvTW685V0yN6ElNj5vy4<8wmHX{L9rK`ZgPFzTRPo_%BH!L<}DU0A}i#`?T4 z^D)ITj}vpC8%ZCDGX+rEc3dW()wAcq_gGWn6~S)zZk?g0${p>7umB;CsmUsW%q*pm zF|ZPAauD|z_$LltERf2#7WxX~tcy17g!XtvOpjsKhFR&i*ALDTm#PtYWsk((0?nF^ zVNOlq{+S~R^@`2;cp->fKOq<6!%HFDw%SY2bioI6R90W^k}R?`NGKe|jY_NSWxQ#b z+5Oq$FN(dF*XR^cXtwBV)GPF){I^%V7vgGHM~^<1dH{5&Bf zD=CgYj@k(s9wJ0Av4ahEJ^j4Sl)jKn(BwBHJGM)mXv^`xy22g)3V z2EFFYoVUNAU({)FWaNvdi*)5NT5(8$mv1`)OhxBRl+?DZkA!w(*|{%8Fl-b`dvcPw z5~ zvsQn5N;KadCzuQeCqLEgpBZb|IGG+_7j1EB9N}h(IIjo5l7Tx#(twl6{yE{U7|Soz zDoqs$bc3nWGv>>0XQW6{b85IWfcbSW&g=zpXF@1f#yc5J;AL(r8H$bj_+2fDucjQ7 z;$s;=IUC1lnP?F}Jsv&b(a%3}V+@OgU5l5r$Jeu;U;)7wwq|^+ObYXfGN73j(5^QX+h$Ju&vVN{G>e?~JdGeb? z&|IwO=`F8_f{DK1wkOsfhIWqHZpAzk^90d0SRP<$Tq4r4O+Tb_QQ9#Mb=FXdiUBw6 zGG8Oq{gZJ=x)f_}VSKTZ{jDB&^gq#VHqX!+@Vk$xy{I`eEo$!)MmaEvR~YfeMoh}F z1yR0JBy~&{+j~T~TX9&eH;%4AyByrS%f14-&-$Za{w{XBYy#XsATMIGA7M&}jYXRs z17*9V?Ip+qLB2;v&%8TJ$_7(+zm7%^#owy6Pqe1Hi&0u)tyQ%w)2<2bR)^$Ji}tLT z-A)ojE2Vp+<91OlKli@jQHO}WEQ|{4%7NHwbu9t+U|Bf!5N7ddzrd1lKBEL?>BInS)U+SfG}FAHFtXyvAb&DQX2c4MXJj^jjA zG{RfVh+kwsPUe2vSS|6P^2mEkG&RM(%L?Pq*{>YXN}Ev|j`OQKqQ*c*lVATorp`OC zvZDIqNJj(|ID!QgL}Y=5?FDH`HoKeMWH%()U5b>K;#SLnXz7 z)P_8{11)2FRWARN}vt;P; zhO=n+qj|FB+y=?mO!P2esTHNcd_8-KvAdz<1P>iT0yx!)?P5Ige-M<9vrJ}adT*MI z#~ljl5@BPcD6BC>5d3b%G_%Zw^uNn!GF~R*j+uw?yK7#B%~x+6D8MpHt*Cp^Aqx)| z?uM(E^Y$_Mi;Xgvj9&}LXQ^>_4?)>`>Yzh$i4b!w z1*l?NiD&W`7Y?-dX;QlRM3TAa%&)%kkTLU4!7f@8^NoU^-z&%&E-7dxP7NhI& zLd>%HSSOFfTUipMkM2X<&zSJzN~WlA(>D0*p8&Z(5g2z$JS*DFVaG0i&@Fxn;<_}- zi&fqA(3AZZTQ0opP(X>cC=-^6b4j^{9Vd_iW%GP{d>|lWv*QMHY2GPLL?MwyrsYQk zz@~}V0==MTXUoE)w~GQ1GW^s^E_3NK>4LX+bBqfOT3Lp-#p*wE&g^2fnk`1P0IJGT za;U^v^xs6gIL)?HAB^*V4o3AW*Z0Ocr-Qg5IBn$grYFV;^7;qOBI6zcX?xr*{o(?n z1Cz3h<-!R9&C){qQr5BY!z@qMD>(P1G4o78G`_CZihDC+aqWVZ_>Bm+D@xMALcCUa zdzceS1saHXXOPB{xv*jjiesi=PahmJ%TiVoV||XIC}P!xU*C{X7rja97rOpnG$`p< zyRsC;#}~hPffThy2RToPpx-tpAJO=`NLp`EOKo(W$#XVTV|y26(dGkj!F3+|vJkSD zF^rrplkLPO0^KaQ&LG{7y)dl*gbTmta{*lP+`a*~0uqPLwmx1}RnJu`C02CmLUL zRPwAKK9EAheeoO!s?GJ6NS&e@?IHvwRn;}e(RyaSCHxJu+0CNhCcUAluq@U%7jmp~ z;*4^zXquTrFQz1^^%KWV7EB(v$TAK6OqAKoDv@p;>z!9Yl5!81aUo-Zp=A^iG`P~_ z{7SmzIudR-;&jQZLYrlLoR!Wz=GeU45u029;Bxlkoe>9$HUrD8DC66(R& z(R?A#E+Ut&A(nBx5VsQA64vO&I4Ms=2tmz_p9q1i#9^BomtItPL#A39AfFaQG3wf> zHQL7)M7x}QY>3+{C(ZX1d7=BZjrE0&;N) ziO4!YLHK41bi1K$TN<~C_D2eBnpfiE92qF4vFj^WlZHZ1C&YB0p0jo@aFOmAqAA4? ztI&a%{0k^o38ywj?pJaEwKMkX2VO%87phJx4H?qJ@dBw=AEDObRuO!ysjh`uGv?Rgg1FzDS(z;jk*uHA3~P0E`o^2N`Q~?ywo}p*zPUL<@fchIUDrv*^6=hB^W768c6+j&) z5QHgKiEz%n_zE#zRJH=q!Cem|>3|Ke}NWjIswsz(PjBRG3j(iOSZucJe{o$Cd<tTDpS*x5WaupbZN5z}z|# z*WC_iHgk5|F~rpdeMyhB?2bxm8Eff+vY|L!pqZk{lv}r@DNU#=%WTS_Qh&UrgvdD! z0B`5r3FYFl)Tc?}Fd<}=<_nIonU)cj|K#ZkC~nXUef}=L$rP5Huuu&Np}>x5#CWO0 zEh!~-)(>Mcr0~f@u`q**+%J-?8(IXp8}%w8fW+rYVKjX@kfh)3dN;H=2TxcNwhM5z zvBW}!%NlPmE)eQkRdW#mt@(-&I@%0^qQ)loR9+fAoBqIH#+mo`gu)m!HEcus?}c(N zVv%J}9IA4IU`C9IQzpkBvgfHgPK&?m*#(n|sSq372jJRBAIlIo9OH$V4?6mm?s9`) z0nr`MAV@tOA}K=?F0+JB5#><>^N^NBDX3C zfD!COaOS>Al-rXF2s=7P;!h7$5Q34*2Xo6`gJ>wdbJfl92N5*Wq6Kr}Yro-{I&))5 zse>c42c52E1f#FxMTq_M`^$!7_lF=Um&^?brQH*=1^B}{T{Nrc=#YBt36zKi#z+|FEm34<*BNh75~njC+|2tW_zAKQbV-D9DyF3QLrzTCF8t3cTLM*FvX}V=4@`7M@nkCE) zR8}4qe-&y0T8@63g-2hk{WzG%AaoTlx-;7-?W-SL&JsFFdg56^U<}^I93c(G;BTQ3 zB?bV^IBMnR1^Fcm&j-rr_g7Vr+7)P3hhni1m!eydCnQei=BaAR!*ZFqREYV;;t^%W zhXO3vEi8n-^E*hGFmvjJn677jB6yozrx%FytG%TaaHCHE1-yYYbo5F3{#I2M9ENnun3A5Mbgq8 z81E6WLX^u{Gf-HG8Sj$-&XNOfHsx`N5GPrdR5~3`0U!u?Lg30O0v@1HjhpoBf^(Fh zQzNFK*-r!VJ=PGcB5oJu@--+k^u{j>qDdto3S#pw(Jp~pw5j_h&w#iD3vs|A?vMZp zdQ)@!Dtn%Spg*o>awi@dXBTN3y87=c?@AB0EY1<&>|0?E7c2fIM`jHV=nmPx386^o z0z;GpOa1^&9hS&wJ-+xIdl}!EVMie;qU1Ju{cSz`BU32MawS)xJR-D z)alDX!|W(0{27={#lAa5iykvc=5CodQVwhY@UKhzo0|k#W)(19 z_XuM>iK}9k0YmGUB9^1-H-u8^+5%PhR0O3&i^&NwTzU6-|B3+_H}d=if^RWV4tMAP zl8t~Mi_;?FMy3OV*!%@RN{aBXzrp-NEgdfirGaoW;ll8wUNRoITG!mV$zESs4cE63`^4ln4NoL!+M-Z z=S;Y%WJsu^TbG?a~>#<5V>b0N=gUEmX8oaNY5_WBHJBo@K67` z0N>`V^z2`=cqM1N24WV~5G_giFFMtOLR}n5EHRj^kSjH$ zy)K%HvHFF(ABdvXoaF01_|;yA^ye@g#AqsQNH!O00ZkSN+HCw(7)9kW38aE)H69V- zLV5Xxg-gtT!>`Kiii1&&qj8il_)*(X7Z+yFQzlG_OS9*?n)z{A_8tw4x3iT*`@G3} z5_PJ~&|8ev{$+rmjVRg3xmti#Tvb^}rflLpLBF_g_+VzSTh0{d+KvqjgcLtlC@kR< zXj$UY|3JF(`mok(@u@I)$e7KfaNt`|u)=5EI^ypWLJfyGC%RA1{s?=;F~_xEW$(~S zIcUdHA=Y`DbQT=O1-T%_=LPxGEUO4wLe~HOud|*uetLZF9iEZ-m8g{?M0lWW#GbSq zUlZWBYwQO=*t-9H7VE%Ne<13GS#HvuznOjHVS-&AuG&r#w!yoVw=v~KbIY=$Fpq_2zaWUH?w4`p*~bIZVe-?3&yKUl(pJlyLpb+#mm)zdd&rz3G5Y zoq2N+oj6-okroMx!(zT}wS{BlM6FK=bdv>pOu^gw(v|{9l*T{eS%ang_UuWK&r;Y%LB5+*THxcF@9}m*MZSH z#mvFK5zI|WlPQe*c; z)P=RXoZx;Z$ijgDYlkksj(9>ab)PzkOQAmRFSRPjPkR(ZniazX>_Uf_jyW}2AJnzT zY(R>!tuV?g8@99u-cEqZ%wYzm6gy_WGt4yWQjNGmfLn^Y`i8h#4)A|D$jHso-V+J#Ldw*B-O&4mEw%akLgN5 z7BsBhb&mdGmJ^M!r?jB)QJy3n{gq?wjs245ER>)eCwy0kb8bjsI2nKh!L(U*O|>;G zF+L+%>PhCOh|I*L2g4D@w$3OCWbEtUHXQ;xZvy2SG&)|wNSq+d-)Y89j4m2i34)XO zAjG43CIgOk2u<*kD1RfnvV>jED*`PdjDsuy(h5JJW9ze;vRD?kEw`;-HK!uAo&>b_|Pl|CUSon8N!V!F*?KRa0!S1PwbBMCBLi&&|?kh#&kn{W+urpN#3zAlhXd?(az9L7N1*Y3c|PLt6v#M?lb zk%PE$_r$lp>lbvSXaBtIgt%E~&Q@8yNR_yYV0H8XRQ7{WBa)BSH7<-fdNzM)M*Qy8 z)8`4K`p6|K1SY=rxHEfaR&7;$sAo$c`)PWWWH9#G7SzSZdq@KQUm<20ha$R9VmLk# z=I^1(LkFp2k=4cln%xU^3sD;WL}3Qcw&ahzysW*d*F+YS;I z$)|#%5teOx02itbLs-^<oFYNni3}>le3gbBABgb)WQzJuria^zbTx+idJ4}HPGU#LIFopLx!5k6xAkZV@55C2plE+>AXSmUrWnJ3II>Rdu9fqFsa>>Sn??P!LG z0LYb~PU+ZW_;wShPLO(ubTH8)N~~rugDba-M2IF$of>;g;pqtVXTm9ILqk>EtY?3|pEG0$`|4BtGkqf-)w5aHjN2@lzs}fuDwrjf z8%%IxXyZ&jf$C8@1V_`YG|pCQGztIe(<)G-ydG(cbp#r#W$&Xft8B4~Ca0jaGG63Wo{u%-L@S z&#uuNbmn*^F+05|kOcLF&-qx-Zjn|Fa$?(=(Ee;A-U-P6b3)80wypFE6x}=Q45n^_ zGc2ozO}|%3(pi+si_+dCjE_`f)3nwuPEN1~so58agoRk$&@*61d-Mo$Ggl9zJn{PY zkt`wenshjkM3^RicbX=~a)7{RLLwak&6kMqkxGN!t6h&RB}Z1S7vJ+c`??YnIJGki`cKB0uUgCzHz1ic+6pwPTYPY686#2`p`Ag1-4%R;yR4Q z!qC*4V3!$v04f|+B!Od-SrKQN|*b|BN zhq!g9M{SZIzt&ht&J~5IS2CBK?XoX?AbgNe*n&cqRRHGm-~UW*WrFl@9<@?XmK!3E z0VFn=!YmBMK6Zyloo0-GXW20sEiZJ!_9GTMe`Ta9x17$mkqXB;oo!YHF5yL~;JCtakKxYoM(b?!q_`b~S0Q;Wg) z=Dv`Y@%eoyz9)!=!Gqjtj5(8Ag}SAh+$I?A?FUB2bq$R%V}G709EOCn#ZcTPnzGYeTN9$< zf&(D4F36@*CL1^#2{eBzhlS$r+6RKW1X?p-e|;_ih8Kj9F)YA@0YN&WV1Tk?pT=VAb^$bNEjr@3 zQ-rfvpr{5KOnxoQ)obDy#iyPLw4I66ty0)ou2v~^CgEc)O00MPnP-epvP)$EnAl2E z_dt$=74$Eb2r_qOl?Is;k}>)|c|tUix-={95M+k2(t)pAwgJGJA^dBpY%CS+nlQGo zMqz&WX_kh%56Q&o6w$PC&D@3Ym3F_+Z@6Vlj4edF!*OC>z}~bh#Kk3`5yQ?%oGJ)G zfct4zUW7DL+K3;Qq9QS@55>v)!R@y7L@Zp+5$(e1FrL5^1MzSN7=@`~xAn1}UAubX zY^{uKIx8}JM7N*xd=+^g>}sAg5;wu zbq#p(R^F^dS&6aLb|nxOv$;sfY)w&r6^6)RaZG#FF0G`X36l)OkTA;&e8VD5x{uog zj#@BiPUL3&V0^pA({P+4+VWDRK$>ixMg@^?GS2>E{yU}>Fsc+kK8WAV zrJ8hj+ojDRt@@IF$XcSS$RQZ>OTx0Awu1Ou>Vm95J_C<=25BNsu;qeXk+y-=1LO&B z_?1=O>lroJBQN5&MOgrwain9ZJP3fG^j3AnGCiARi*$S{y=g|5j4q?;@Qb61(Al3XL&8EF8cq*b*IY;($MyRAlC$!0NNJ2uK3ajpc{cro{nqq zhR_6lH*sS|{08H-JjukFlL@)9k|glFw=mMm%^jj( zYHdqZ?5bzKEO!hP8S!`y?K65OMeB;^1W|XI5Hkw+{H22dkSNTwx?}5%SatWRyW)Ey z$Pi0hEhDudY8J%X=#3ou^C$skZjG+PEXOO^TXdM_e%W7+#FdATNM#=3v@C8EmCb9k z7`kff72E$0+~rril73cLBZmS}y#@ka_Z`MF;?b@EINSjsk`FUX;%%t1{ac77ZmtwL z(R;W7tVL~yu~NWTe{W@TjP|GZ2Vfou@m4|S^` zN>`6h&|CS>i|ODp0{_wx{7iYO(DBd%v1AtD?%gK`n%SZ)J8TV5o3wJwRVUEjslob< z_*AjKAlS$D1?&8$@&xI!X~xinN0Pv$UBtm4xwHy#P1(%ITxKK=JgSnamq15sj*b!H zR3-eob-6&?CCuOF;Efm>3>|_bGVDe$_F%Vkbmcv{^I?zF9sA^%RW``(jAJT!v35w+ z+woW+*BOl%W3$GlTLilN1g4QC{x^>Up|OeM#BdsYBAtNHJ{(iTNkSwURgH1@@qV7p zF^q58J;!x9EZ1l*`DBmNY1rjTw`CHJj9W>R)*#v~qPNPo z@oj--W}lWih4?5(8Ve&thKO(cxKfxZg|UrY1Y|9sYq|#GRUz<&HNu3M<6@VeR1!30 zwn;)rK%r5K#BYD<1f2TPflwvJ=T8Q4jae-Wbi{Z8$YZ|u=!)$BH4d%0!O#8c#2M3LXFam>!0v$(fsxPv1pz2+b0FX ztfso!7=Nzc8Yp6IWw)I(1-Y=z?6C`RnE-!IeHUK=`tFC>iwf`AgFy=Y>Z0@fBFCnh zcY5NU^8sATHhL;5!DocHkTMHFl@R|CMB~sG*$u?I`8(Api18Wv*##tUr`bn#GwA=~ zC1F_^S7HkguL|&|B&STvKe({+8lGG@6jA82gz+t1m8FHeNXFw;*RjS}_o7M;RaBDk zcb*WXMtQ}oeyhl={||Hd=-r}SF1$O?I55opC(9&R_atM4-(KuYFq@PY;{}&MQchi; zD&56U{7Z@S4cAYeC`oU|O zMjM~FKTgsME^RYb+uf|uqb!UXwaP%0cTW=S?{Gy>3gCE2mPATsMHZNMWhEQwSP(r7 zL4^6W7VOQ~JI7Umlyka9TL^X?O)N{4p>cHMbGu&~a5!CwH zrnq0vbUqpR^~D;$fZ~(M4TZUb<~d(b)+uxbDNlp=oxt=B4yVx948_cA{3c@oZhE?c zHl|+-WY*AY>sD@@y}G6blxja@a?d+p-4c55>;~ zSZ3U&{rB<<1s-HyrRLiCSJ3bh zdlZE*8;ny%0bE+Dj)pGI$dQ5K=w;Uq$eW?aV7DF5Vrh;O)lM2sPWK*FQGPT-BbO~_5 zwE$<+n3gVQWa+p)2djD5b;X0EqbaoQWw-jg$gDqPQAJo9{8Okohyp%oCRck1$n{#T zaNO;&mJo6pET;en+vf?Gr*yeqjS$L+`ia?q+55sgi5AlI-A{!e38?v){w{tP%F|W~ zMOq%}8oivbDBM%Y4}4r}2Ss}vj372oO?O*H{$`9_5)&+f0IiZ$AM} z&jkmAUD)-XBFJCk5V5PwNY02$-e>9uNh|&&l8iWY&q*-bzj-aJ^OWeJN2McqF_2%^?3BY~&#pLKpjFWUHde$IMFDGg7> zxL%km=b*^N_(%|+K?fr(!1%vZ-m2-++L&$jLSgE&c+d5eiB7$L1~62yfM&h+wHHW1 zQ3gs}aP$okmWw&8HDhk+$hj8f{85Oyi0)JB!)vzx)SRJVXk~_^0MOwhnFgu(Hj*(oZ<5u z0$q-|iba4&s1Rq*4jAzg?iGRx?S%Y|fuDf1iV++Pc#7!!#E`M-{8%Z>rC-8639rA`09=2c zwMNJ5Ak-g&Hq<7`{_HYAu2CI^q4A?PoQrP2NJrLp39zu}m>iz5u6kIY8A;#_ZguR7 zPXxip1>8<^=bO$1;g&-n<^5MBJu=uX6N}vhjm_!V89F=lYSzo#6> zi8K>yWw;Ti=Kw((Dd3j>sbtDwCoG9mgz!}@Y8Z4L6zMExr}fn)Zy8YRlH`qb<=cKi z-^69<*8f7f$}*3UT@C!);++aQ$VIPGfFRSKLDRmWl;T5grL=AiMQ>*hA1#91u5-Do zEYAH5U2biSKmPACudsh&`g>mxBM?5>z0ve8gn36u7ge()#2=}`JWvu=&J(eklUL%6 zLXd)KOlBLt$Fuu_ZoEkIpvL&`3U@}VLDTv64T8*qG3y$}fhUBz)j7nFati%N0-ftJ z9a-rxNv4n=RMsXdE`=|oW(S5!tid6#Lahqk*EW6*& zB*YdI4YA(7N^jGC3>E{j$@sl!SC6gOAVyuW|Hn?UysNDK90!Yb5;S>?h=eE-q|L}u z=?ShE<$^CQkLc|F&FpugSh4Rp=Tm;C@rctE8?V+lHp0}EITO}t+-}0m&xsSXTsbCB zG@n{)Mu<~{nHM_Z)gF5wt`_Jo&mt@oTAbVSSJ^dijLT-?gU^wQIun9g8|pZ1b!aLy zc4W)M^0A;wU!g}OQ`jkM7}co8{(!Mwde?V^`lJ2B?6`*FD{F!v;T*Ge#=#<7P^M{x zBBU*z5C{tg z0J#`d(jpJWwL<)&oQx6Tk+@!%dAoGDge}Q!0$e<;E7D*Xcja$#XvcWO9Q3R(+PH?b z7t`^&P!zh@#uf>Ubw>7}7or`8M8aXt$I&KYMzS8X%O-nFj4Kv@9wci%g|*TKBvuGC z*O}3}u-w(bMnbcsIz4A}#LPrVog-%DG8=nX2uzpeC=Ob`l0BX`$d&F=9uP>Gv8yHL zI8h{p)K*WYVP|DDaeGqC)pJCf$1EktrGzhG1?nX-9=@RQ?Jqb}^z@3Dv6o1fs|K?! z!p+|$#2m*1nX3OL2Uv7*&i|(@3p9e=wC#=?@PfpX2x&U~Y!nUSv2-FN!j>EPS9TXD z$U380w7J6#E=)Eu8vf*qfbgPIR*4h81Yt3tG1^mHqGyHqtrQ(EognpY4Cv)vZjd6* z{82x^xVphYiQC(s73mD6*U?p=@qCV-eKIx`Sk=S~&ud_iUgR(!m7wSa6n zf>M6WDgQ~q{={OWlC^1TJSPmkFrCq{M&iXhuNFGi@;H1`5>QM8Rywg`L^_Xje}*=} z4+(RTFji$cFUHe?=!%nfoDq8j&u%?*7ShH(EyOQ0;q<}H=&|z4KrTm17dqafM4N3g zdXZK#UEmi#*AKY>b+c4BT@dxJ#tb4|`*fxNjhriFjd86gcK}&@M1Mz1e@DDF$%($KxMS5&$lj>{TuR|w^NNz=2 zigW>)c%{2k4?0()EwwkXJ$4bZqhsKnj5$ zBX`&DB+B_IgK6WUD8x16vT6Dj*2IFmywD+nTN??uMY!|lwiGPAG>LsAkZQEs7if=f zLQ+eHOj#hs|A}@(^rwcnAWJ|`;SO42xqXWSE|K;Z2>s$5;V@?zlf^b$lE%DQz}_9k zC?3x$p?wS0T1uamz0cDE-JHpSRwKfngi%T~!P3LWnp-(5tCPg|=Y;sJ z77V|!x#P<0Z-coQOms}JETKyAq);>-2%v`EeDrZ_~TYmON9TZ|D7SEyhM zAq>e|xM)sXu4i*5MYYp~V?*(RQ0m=>;|T^`+7$nETOij{w#$RD)i|d~`+%g^iEcr} zK-jY;4%ed_l(jYkC(a1*jzA_5T;H%{+P;!Pb{qtnV_z!SbQVaiUE z;*=erDVfaR80-`^{Ot@P$Xj`Z0)rG|qw%CNCzc@HEGM~@NmwSX(*F@nTZqk!=`9?z zkA>`u$@%*YtRE>}d;DidQd2bCWHj;j#oGe1NromIvj+a*PQY~A)*_0V`1}MYH__}x z3t~HwRA)I$5HqAh~`ELbohiJuE{IjcJ8E^H2RWG+fy#_p$cWN||>O@1~*iYriq zrY+by6UoOs;PjU{oa6S7CxOxO3s8P;IN6^@kkrH)#HR`}vl|td>i?!xvf!>PdEF5w zSD0(9M&k+zBnGXFV3)d|&(X(SZ?4CtOj2o&_aQKW`DdQno>@jIw`i^z48K4zq&C)V%Xj#2iYVNY(xfH*9de8 z=1?8v{5Bz;s}LYGyU7@4B>pPg73JKHy3==G6y_>1J?rk*csYldeNs@N$MztV+0h`o zDhu@|RiGB8#T>Qt#IDr~sjl}`qdvKN@s#d?SIWE()OPtPOq_l}2>t-RHo_L=x{fl#n)%1#UY?SwgRjv;hXv9Ay}13E((&bZze?b*9XvE%$QXS^@W zmBRCi795?I1Ned$<6!~LeUThO24mcO5RYQq;mXwSAo(xnFvfE2C>hdu^WpWRrdllS zhp;kGcRW!LoT_QAi!V3$dD4U_vEc%q&2_E~Anp)V`Ba!c-?W_C72=gf5c5g*K1eCL za8IDI4yl+|+6MlysgjO;4L<8b@fShPY;TkiU4{5oGYGS42gWRMlZZ^(xw4ATasvD* zCTaNC9$#sxKsY9+t@KucD0bS)X`p7QwaX$feZi@ocDY#Me;*0~nX<#wn6GCSs9Gy! zgtlG~-{vxV?%-;V3q`wTtT(g{xKD^%vITuQMszu_x0dYDwx9E00H~i+-4fS`b_3zW z%osbw`C;s^Kd6gaWyOiy*9Ky$@Jt4YuPe`_8bRcvt+RbBLm zGzSD@AN+GfrliQOEdJl4YwkIY(qgJytNy)W57P zwrb;<21Id#{$!jeeyQdhcVhNji!*wu9X#vd=xEvmJ z$ZRm9|Ep~fnBNF>3)HH~*{wb#$QjY*9i>>e0{~W99VO-#g;1_Z6Q;+}dM1ZK26J@L z8kk-YWFDof4btqpdMBW>rNa{-v|Rw%>i#!;7-*qGg-~KvBsz0GRU{2Y5Ll3JqA8VHSM-*b6oLrpQ4M|NpiAO3)-M<&w#hIfZVe;(i z5avc^`&ewlY7M^e=HE1@9`qjHNrR1mz&T+1{__kmD4rao!&>NB#6$jkit zql{!Lq66^PMY=fLT*0Y1VS^Ty!CdQp8nusGWiMxIlwh|~BXK|(2J8AsNUvXtyKwAy zP$eg-pddU0Fn(TBuJq3|x*I&yaUQ_<>J z*wdb?XTGh|sklgli$>TRjZweIUR_?)CW8QL1EjDV&qYtp%r3yJ>?=Qb1}eSSauA5p zYKc&p*!LI6H)k!ZiOcouOt7S9O8ry-+|y`Y=;Z_i2LEixFI1yz{iU8=E$%dtk*u0~ zyJ+40RqFRShI$GHC@M0!aOuaE850B(3NXybymDZKh;Cx}(>yIuYv9>joHHY6Q4= z(ubs+-xo&Cv$atqB7dgCuhjli1aw2V|$Kkk}kFXFy}$@D=sXSD+0^{d44g-C|z zI9i|!50^1$O@O!lXmPt`xMA<`pDLxfqjMfY94N~E#X@Ps=DPZLQO~YplZ+vlDPI)? zqiWgc#WqJk@GaeomiF8W1h`VDOvtsTM>EErf-P!o3d8-ZXjm~gf{R33Y;Ytbd2+m3 zim!^WRB*ydwPM>GnWs~jzL+A!`7OqhCFY6tXK_+t9Wj*7^7c6j*sMTz#`JOM(UrWb zwC*j&j>mwwhI29gQDJ9iP;X)MycFfIFB$|ne|5TaeIRr9LcuNu!T<5G%jjk36?HRe z;aGmRpfN0A3dHbrvM{RNQDCJ#5~Fz%tcax(8i|R=k;Lu7TAY*Wqz(T>sCcG}wua)O z6Do<&U@_zMqBw|O2(?7ik}w5Uj+ajaG#g|bTZs1rxYE7Ef>!X#b$?jNjs;$FhGdPm zwNSsQiKRWNKLgOATBAb%(lT-Cj5t`&{&+HQ)Cwlf70C3?tiTrWz3hFiL9xYW=_GzO z3)u^FN21MX6gDgobcOj~+^A&cBym-}j(A5Hg=;N#r5#9o{zm{vj<(po+G2wYp))y# zeC#iT;;dw-MbjE$2dfU3)UzT zK_z%WSk^6df=uAF0fWw`vA(vdxihv!|ANx^;_()rJ85Mw!M%{R?~{P9Fqm5o`CjvDDD>NYLw*2QvEY%F=6`5n5Ad4 zjYB$0AD0Mme)I8Blt%2bJkcD2{9%pyst^~ham*2DB;FJ1cI8Me3mtsvX@Fugx5TlZ z-0J6$F3h54Rt|@W=5sZy5KyT}qD~hCt5QgjA~HYsbZ3ENYfwE|Bz&qb40m?L1KE$v z*?M`#8O~3u1J-6*X7>v+>t~k~CA)YQfOlp(t-iS%|GXwZ+A41*}7YoQ_MQUdSaJVJMmM{U+6-HwJb4X`)H>9wpc#8|7 z7&^2gBo%|t<>!LHG;}fy%7h0Koj61|?{4EcD2F(@e5^@tq`*O5%b%Ojk*fOSZX~=Wp z90&z^?0y01khG~gPLA1nc43JfdeliH-3x85ysAbtQwj1lm-8`MjVk0|b%+Y?LP&ED<1x zF`+Ss7NaGSX+xhNiivJn`@XGy;hY;$n;^Ez`4U0SS({*PY8ZD7h}oihQnd>!Mq3WD z%qL16<%%1{D8nQsbd#SpaI@bsM}r}+u>NHWad&0BdK<=FXs>!?BEX$ zn+3Ua#6TD%AaLWgAX!Ew?U2tCK$|wRV;qVvURQZx5tbYDlLjGv0Sh*g)Lai>i7sLX zo79kT1DH!gShIA`=w^bLA6MP_yC1~)`R}a+?4qu5X%02AsF0=d6*(lME15~$Cdk~7 z8l9b}ZXy20ji8jt;h|`l*92s9EpZ$*3lbzeOT}n~u@~Y!`6g1ia)+jLm1-9lqvmN+@Ggmbin3?`n;H91-7M_^A5%8zvMkPL^Ul}ioFE5Xqfj7b*Tui z3v)xT@@5l16z>W`hQ0NP@81eZ_0(PQ;Jpnh%QV>w?Igf;tCgUlB2n!4m?hjFn^R=Q zlp5v$Vg48qSkom0-C2Ig6oVyN>~=e8+<^0C$jH5+qLJ-~aE~i1#I50%?))HW;s>dBbbTQb@*LI`rr6uVAsW(S(9XW0BbNN5j-BHqVhJ1!e7sZwPVbH3ZM;j_Cs6g$|(O3K8y<+T}^JnU>%yegmEf z5vQBU42$XVV8QSi$1TmCFNmbJW>HsM{2(-qIb+(i_}4=`yAI7d<6@^9p9sqa(+=@k zSGcbJFd*fTGBef}L5|3KBf7#^Dw-T8O`8&HKf*KP%cLD8aGQzZH9R}H)j|Zxs#+T3 z)9eSukBO;|@`Lm1RCJ9cqR9jE7Y59IvtMc5(K`vuzhCw%J_d07q3qY?Xz)10jvXGW z*8G z7Q&|nY|Pj5J16ipBU*`_F2n__CX^vI=q!966KIQ?rlQV&W#yX&vGj|(G+38x;h$Dw z_=2(=-vUOD&p!b`>r9+JEq2$l(_^x$<#=rLBoIa7ib=&t{p2Y~*S?n9%u`dW_cR#I z=1cp|5k-E@%{X=Fnfy4TWDHJ4TJ0G_$WR?#@^UQRLXhW*DyG7c&I)>BnowAUcRSZ% zh{!r2(+eYyfK=+~aoq2na&c`_bDSl@RcOLaMFu9X2r|=Ha!8L(_H+LTNRFIlb)->r zZvlJ{58hkjFcFr_4#mwqUbM4F=FLb#Uz{$`-E&boPA2k`X1N=LyImL1?@>74mLbgj z*ve>De@2MwEdMSm!DHT^DsMJ$F(dQG6+&E)?!IzsT$iOlY1mU9(G0!Sv!rl^8NyiH zbj6;6cxgF9sQN~SXqZ)BRULO`&l4F1PWyA^Rd#hrsraTKw=Zf`hAmk2!{_|^l<^bd z1wFeVs?rfr;rR+MFRiwo5CE$jW(oFK%-u4+vaf2{KYkiSi- zy@A_MnA&;Lcj8NWHZQPcE60yTJB#_{UX~t1amZgQSrlZ=&ZT|vf5IrTb?DsRa)AK9 z$4)g(tRH1*kV?v@tLk6$O90&H$XHSLQst%o!4ay~7V`vIDA=B04I{JPvOt7_Gk(ms zb;|}K!7k%R!l_7I!@_tf%d6avev4U-R&0LRukk%a2Uhq z1IXsL_&3sIor4}-j-Lsza8WQteAQCqfjk+3I|qj2VF6>)3+hl*OB_OS^V8oe`L&nZ znaQ3Iz!*T`8rm?Ydj-<<&Uz~^-7U}s#TsO&j7_%?zqR$mDY4e8kXb1Qq>i1M1AWBH z#wAyP^Id{}V^UK@^FJ#2Hp}~bBszqUk?hH&%#Jq%Se5pfV*<7;xBinKXz1+j0FAKS zuleQ58PzEnQ0iN%T{(LS0b`=Na+qV~>Nh|vmrDzntK&;!%wu8(xPs||M8~T&dj>J+=V_aws;rNY)2!)*tM4c z%D|!sUuW60oG1jwH*v`N^n3mlHq0sv5c@FxB?LaSD|!Np?|+NV_Y>oB5*z3(jg zh;=$Hd!EK^U(-ME%bbs6WKxXfqTQ~9f;7N$O{A{+Pn-?hL@&VsL0N#4 zl=_xo(I4jsA)nS_MHKH5L_w=;rdu3658H)^PeJL2Xm+?ss!tSLn~Fo+!FtY^r0ATz zaclPL^yw2~$JO>sAK+A*rDhV80|c2nDbgP|^hSp;_eQSynGk=^UZd!>NUAhIALXwp zDncKdIAfjxI7r9RSQTexOKB}7Y+My%gSK1ab9{r*tC77Es;SLHx){1rg|_UEDq*e* zMnddx;wM5Z6Azw2KZ>&inDayjP1~{!R|j(8SJJF;qG(oP3kYr;mx*wC)MvxGuKH?0 z{xVY{^DJZM|AfKOi8H6g`fF5vw?wcocxz2CttO@X`B(%sA;7Mz4u;pNq;I89bfZ*> zE7k^cO>iWUp5WI)_(E&BEoQFEbJi_6pBsu10Zu;0Yp&yjxZKMyEt4|ul{^tgNd&%# zw{t}9myB#{tyjs2kPn=9M~9Hgh{AkMe;AJAgd$HJI9d$G19=)|R#M1DKi2*{X((}Z zRZVQJXY*|9i4&|9xLX)(n4PQuvmS{;fBLMdXg zjnSqM_hfG()-a+yZG7W7fv#HGd$W0rzvKyFM~Zqn6n_;+BQe%1Fd8eTSADlGgi2$o^l;}+Y{iW(cS6^4TBOVc?Ot|zR zap7B=0r=C+Sf-)fyg`WNpqWM*l!+w4t2eKpDUc}pUMy%H6q*fF!>u@8X-fF!R{;42 zS5Y%Xp&LCk0}vWT(ZMP{1LRYZntk1`@`_8!HW`I{Pkcv^S%;W$zQnewAdH-wJ2(JR zq`AZM_i%XUo>sI2f?T1!N`-Q7DSn;jr09;Lm(w%HeT`HI5k;zRnH779^!N7}cD(4m zECtGE=Cn9cq*o*?4wCJrcA8J;FLM5<-9YT}by8}e)37Zvse^xLvhh&n{1gF3lN;CNOf}y=$vV zCQRSMD`V$xR{$)hsmIL%U@{R!SxLw4TSMw2Qzu7b-0hq6@$ecLN2&Ro%iXgO? z3h}H&WSoweydAU)*vt`rvO2m|m{~!yq4ieCm$*%+n}dtrbW{?*7DSI>yy%Jlo= z(O^*%V1HIqsO~$9hlIKKbC~3j)Q5%8U?|kJ7dUY|B%kB! z?XhA<2p5tS9>1qi@xmO%0h=93vOKv}s9TYq(TigkhtLUh5m*Z`p3_{nDIpz>{>AP` zb~ggdu3GIWM|XmT2@0Kz>R%yYKhB_z0)dd_RMj566Pyhrx-<`kxIlovwG?YzZh#}q zXO7`~j)wt?&$on`1%zoRW8=U6M378o>+0eJkuKo;k^&j6I|)GZUy3?MEa%D4@CdEc zvY0akA{+a(>c}oxzg4hXvJrOxj4R?rL42mWyd0me_}WyH@L|%<6Jtw}yny8es!d#5 zuwWD9k`6OJGCZ~jp(LnWb>l}AvX{t%PK}0uIB8iDppHVRH=dg2e4z@eNA zaLAR$o&};^R)wvRR;pW&KfQ#J6PNc5W`I~0a3n|_g1y-ZLfz_pSTZXTerMbxkcy(j z9qt*7RU+NTh)dh8K1*k!lm1RW_#<3uh{;*}bS8*JfqA?Xw~Kbk@TBUN{loi$+&WFt zLB!;p0n$uAGk%yoPh*quw4R+iv&l-9!FTOa$!!kDAn1YOZb4c3iB5@v38{TXAa7Q& zxntYMRQi$-%7x!qO7YK(u95#E^YQ<(H`ETExW2I~Z&(^Q z`&~jLNnEoO+wKP8%61ersrSWXL9T3v;@n1q09Q%@W*Nbc7Xq{UyI79k0G5$OT>RKj zpDfbSNnf?O2xI)If?Ybwn=G~CK4Fvv7uu0{dMq1tnZD@Vo!|JZR(7M;pcROZGlUu! z21j(<^2a@#2863Z3TP?c7n1dKDl`}yS5<&{V5ar^bl~~sZaC@+}NSEKD z8Wd>8G?z364aPY&6^M6P{`ST*g8a35Io`(kvnqLMkWj25hEsu-FlK6wp+u86ZIWFH z8Q-q0yoaKSb0%r3wx8{E)NZg1l@f*$2MU7o9UaN@;3$#skF%2r@naF@I~|i_ewHpY z)`Nmw#x{J(i}70l%#R%TAZFXnfpW8SFuW)GkA7k1NE5>)=0zwcKNUzVa1u=xZhOqF zyvPwWP0B9$QDN?|M29a9rm-pB7iwN&Plib+Hwfm?J`_^x%XRBWWigx0Cm2aL%@OQ$ zkj^+;hzmwURQBN7f4nY`3J#T5Fm@wIA7lW0lVMUJmd&eVy{K1S)3GW?YB32Tmp=xm6ntFccQ<>+O=KLQDa!so_+Q)*md*LwiZtRo)&J_o8 zqS{x8*DXa^Bcxihmt1#&-)ohBM{Fg^491SJtsHyhz+CA><0=8vV(emD*l?X7vq6EI zIhvr?-Is%dM2%@h3fg0xg`~9f%PwBdb>hr;!OpqgKLAdm_xWRHu=O2Vn`r&?*rP{7DA-pckRX2}i!Gn;YlyVg_z477aYOH+Is`JK8@)zs9Ob?X$(ziN{H?DsbHD26dAc!7_~$zQI7jW_-i=8^p@jK0vIG|-iF1E(eiyrb3w)+%;!sUWSH2ruVD5g}pWj`j21Rd2_Z+JqP{{=a3k&o3z7V9Lh?=ek zo$a{?R^DO^NjJB~e{)PKFSK_(@t!c3xmjmsD`L$8NY=&KGPbcYTVIexE9){csV_xy zD>(IMhSwFLJ( zcq~%9jQFQ$+F9DU_^*hu0fZdl#E$$MLk#qtv3aKv`=+xhitUBC^0HheaE`ozCJA)~ zbi2fZXVFx3!pz_r?8mjA-%}7WfEy;KKwES{Q9}$D*dfFtBFEAr)eN20B_Mx{Ygn>i zeX&p|KIsmsQ~i^Yf_!@+#$pgI~hOScSTaXiU|%ppSaW*LokLXc32$W4Mq{z ztxET(X4pxPnS_-K0Rv7JKpp0wmFS{&Jg5a&;EHbr}c& z!+)}@djO`2hC5AlbLO_haDOFhtcy?#6qDBYOu;TgjaJ%RJs`w&O13xTbf+NKsk+$H zsZJHU9t1|S>d22K+;9Mrj}v?&mG6Dgex<_$1kRg-V0?=hPxNwe;65RA%qmv5M-4d# zViRCbH4xt!hW6~HB_E|lkarO1!ZnN(`mkA!MnUFT?F!vxZ!JcXU{`El^m6H@f1YPJ z+^cZpS|Gn7kP=Lq$W?&LDz9KxSs*6nsY2+ZRcLzSnB@=_KTZyb-gKb=SU?-8z*ma& z7r9D}87tQWzvPtup?g1(x9%8&P^Y^?x;!{OnSM|?A{!}G@YgM{Q~@b?e?@#-R$TJgrK6= zKt(LbHEb#q5t(srWt=V09iwX2+*$FcXp2e%_S2ZTKQ6>|X2ok$VJg#i46sX1%ta=y z3k5hQRBFrPPdPnt=A@Z%-?98o!`8OUjTiKcsPeU zOuq8=l_kkPL{r&REl)=~9PbR&C8;SZ*?WaBT;Qs-GS)cJ#8S@o<7XoHblt)QRrPVc zC^r))FEAN;?58}ta5|Vx!_A+Bku9en zQiWMz`l7&0@CHgNS29#3DB~!XL*61s_LuVfIBjB4hHmt4!d#P9*4wNR-WKATu&ZSr zdPe|FHKdrwvDV2@&X`u!9g#z^NscizFq5)j?h)pUIp~(={%Rpe5a$A#Oyb7;tyvtH zr7iU@vos8Mi^~Mked9yk;i@f8S_n;z0|MP(^_t@1>*oNtbgA%c0pc=YmEl8|CMH3ibS`kESJ0N^T8dkR zy6Dx#c2=A1@z!S&%Hs(&CL{j3=T%ssrYt)8vVYxRT$*7t zvc^fBGsItnA!K$+{Nnjg*#NIi4B8J29b$LEuGm89Jf#^Oe*qX}r5DldSaBDkN)X?# zsji8m^lauZH!>mLDZn+Bm6+@W2pzoOLQv<-wt=w$6YG^ZmcMaNg}Q@a1jQ`po|j+?Qc>1}XzY`b`_)SBQqfOjG!t(M zk|5R9#Jkt>3^!R{4Wzxy*RKPh$f#Jb5h8XF9p^Y<6uE9eP5q)-Ts;QqEK}%bX1A@c z2O`UfQ>VwadbTLwErbP794*8zXt$vWX^k5y5E7}_9r3LjfoKuN2JQ`>9&6uhy2$04 zooMs+6m53cvupHnM!aQ0GZyo$q=>)x7BGL0E8<}LP&_Bd$!4>OKpljROIXHV0s?Bi z3i_r{7r&vcMpO5mzpNnA^^xXcArzJIzoZ@CdwB{nmYR9DTS)<5drN2!HM<`zz|E$> zp^QK2h~*Q({+hHA>~4y;foKgvKaNL5kd2H{Q**wQ{aRnQV1Dd;JHNV~H5}$^sd}Iw zN>)`}S6|n%cWiVAwCh+yIk*&FYx0EpMM9ya9dd%qpc;cGlM5n{Uq`9%VrNYize5DLuxm_e)aJ}8fU z1DN>Gl?5Oy4FuGW1AhhK`WJeV9d+z83gi~Vl$D)%+L3(*SzOWG6YD+Tk6^@#&wbjcHvbxkmTuvGr|G}p*&oDQ2hUe+;ttC}djo1s zm_V1dl>l+9$l@nTXGlRWWmHiAUCD*dQD_P| zwm)Pzs1`&n@EpxJ3+dY*1F&c}vI>R<&r2WiIeoo@@~l)j+2so_zOl-bT6x zu|a2@zP(7xj)J^MD-w%@QSZhs1!25Tl*OW2mP2vSZvpHVg<4rbz`P$PO1ge=W^G&} zNleU=F}GO?E1Yf!NeL+g?LN19VXVsEYebRGTC*pf7Ut%{N<7W*tFNkLpUzHI)-MQi z_BsIID3ry?w*>m#jxs|pd*}r@RMjf&!ALxkXQ%a@oE~D2-|?pVNENQt=$;k`q536u z8>}IEVvzu6E+Z}(9dzapMhECpdgHx3BNq8u=db;QGosF|7@w?+Y5}mhs;8e0dzvVJ z89pH&aj_6{bCCF2$su^$lTO9pD18D~7knTjn`e^86PxwHSo5h$Zd{F}VPqR2W)!1x zQ7&d(aYGJbBq%{C0tI?Jxg*B|Ya&@+tgKh|@QWH1GNUT)lT-WmWb54T6AxR2Nm0UQ}QP z7-mMKWRfXil9`ZXW&jnqNp8xd+>o10CPP=Gh#*ahh)9#(q$% z_w(%MK-|X?<|U*B!+EK5nFqy=MOp+= zt)lI*zc6=^~qbb}DprGUjx8w6b&Qjf@7(ZgfDS|z`M~p5PLIPz5WT%6Q!qP3~PK`rex+cI@S)E zUblM@$gNPMsg&C>!d!&xY&~#+$P+ICGCbzZo?nf3MK}ej3D4wM;blOoj%3agV#9H} zXu4jZIN(>yE0E@Rdi+*)(V@6qsH@b9jac3=j}*{smPA=RUKd1D>gf}$rd(Ti6~y&U za;pT_tAshP8C=Ifc8eDTQ3pMC&3(|*U(3JJIZ+PfG5Sw1*NfvLG5+aaUWI-g)XalV zGJriJBx5|6a0%_n6kX{JKzAcTur9VG_X*=!^|*P}E{~&|mDbfuIyzRmyE8}+fHDST z_RsH`4N<)~M>N4yX-T|Om;}^*;R~BWya^0HeGqe;QFOX&v zTPVq>Pl`qunmlz{JgeU>qKjD}js5QL0>L!iJeQ^GFCty}#x!0Szdi5e7&lkbSdW8+ znYptTG&ICdMAPZq_{^TtzP0$u-aM>V?+f$F*(|rVxchwr<)umeHykH_0HE6LIyD~C zZ_1D+SvskYL{ml!@f$eX(l2v91aW(C^Bp4g2-m%U+fSLawm& z3H0FYLn!O(>E;n}qh50~q#0RBIg}DR*FiiS(qj1wW#FXESk8trecAoYwF?ELc;#+@o{wQz9t<4HcZAxfo{um0^r8WL_N;hvNGmlg=FE48-y3Yyq^5 zn?gh5m-lB?69`{mJX=XRcitq(Eg&-u_HxYpB*zHKj&d25*S^B2J`oh;b{j8I(d(6Hq(!GrR{xI!A@^u0*T|V6L=|Ux4Z(HW$Lz(|8H!x7qJ0 z+p|h!zo+vS@%k%~#)G_tVp%cq5i7;+g56dt4q4-Bgt5lq#oZHsUm1ehboJ@hWt_PR zq??mu!K5T!~tqbwH`M-bV7;veCLAuv=ETq>8E$K#g}I()My+ z@}C@8(GhXfeno(JEiFGQ?@&w&FtY*+DI0t=2%!~d?O6MwOQfsJ0RkepomxHqkKyl;7qT|b?@JCwP z5gX%#uK>|~s5djZP0gLudVm*Y1hWj@sqUFA*_w+eNQ@VHCS z8I3U%i2Bjf5icr*d$N11e<1$8MP6dJOm#_+ct)W40HByL@sc3g-BcNj|LAvCHqGRX zEyhi(zNJ&t)YZqP`psaNU8%??=j$Tfzcn0}xUGAf5LbiV8H#$N(k`>`)LV_qjms* z--9w)b;Z|r^!FIQ5df!6a%wKr3?b=t#5n>yN@`_7U+F3$+s#~(pX%o z52+W&ZTj#bQQ0uSi-1ugqvMW~L9-!@M9*wwahO_wNiCtpG(Hvx)9Y$lYhs<9AY3QR zHJVH9afdKx#XVAC6P+@P=cxo^@lB^T=BV4 zw3*m~B#LLb09T>8Jcd&qZ%IBa%wtI=0WST&DTK^ZJ95o>OaY}9UCBGing>D74vz+= z7DbE-^9YdrH#fK5A>1`)6JnC5JB1tW>P%TVBr#}H|FnSSAw$aXce$yg%H)}0`)6vx zt}NKiYgf0&?HPcM68!}1ojr{dW(exvUE?0nu5c^QHNx_acvu)cuD7o`;uTTukOu0* zaR6I;|FHl!sjY+v#9;5*J%?m%&`AHDAaY5EXB2pQKxTc&BrIbgU}jF) z6Vxn2O2!&3ZR`mG;XIz(>5bT%MY{sLb3|)z5nu-DNXynVf3F;|&qxdkA?Iw_MkPzm zzYqwUP_Gk2{kq~KbgM)#kE?6)Y#J-esCnEV#QeeJXHV#Rid0JoVhzU<;$|#F<{!UF z3bL1#%xr4yE^F(E zx$#XS1B-bbYl~VCHlJOI%KNx}PYgE%FvOc@1(|(vXX=Xw>Hw&P-Z#~@SUYp#`(5H- z{dRlN>T#a|tbxI;8G#jDM>`TGS>8{Nhtsz{P}hw7b30t3Nd2jfQq$OV8Gh`Zx- z(JY)h>+S6aMY!PPz;fR?ScPKuy>pbHV|sf;gK-a`v;?mX)_2*zYlL|5z$*gx&m^?O zm*xUf&oZioP?kAwDk0;m(txb4H3o5~ITf&>d$Oqa!?c>v}!Hz-ikNk$ZA z!3Vb$G=s>`*l+0b^pA%Hfs}}nt(D~@C8rizX9n?yjdKK;@5IGUY0ekq4_T5r2)K>L z`vZaQu{7nmHaZ_kJ|0WgUm3Hk)exC)Dh(p|fN4eV{$u z2xtK>4le+iQ`n9Lurlx_2#t&{HbXLeCr_CkKhbYjgy$WG@C@QolrXn}T_rhBb{KTR z>KK=GhY4Quuo6;o_GD*YJSHF;xOkg#bK;*u>>8s8IB50I)>%D%L<@g8#}W%*)t<_%t1R?yqSeL*i}($f(2Nr7?4ooTXdu3lp$vd} zdzr{Cn9!_DR4Y84L#SD$baUquJ=7}3O9f6$YkWC4bk5p0htkQ6b@2m1pBd~JiW%&6 z`+;U$z`n~<3iU$hGB%bWtvbKkAKKMGs8)zIt_^-0jG>=4a%`fk1d=cM1iN=}0B-Ax ziwgkv21d!w-ywyIn}Pot<9ddWY+R;zM+5OwL1G^69_JSy;2J9x{cS?Tb}lF&;r~-4 z)$ORrST`K|eK*f<{}C=-FviXjSDQ@o%#jS#}sg^zc%9UY(WY2pc$uvAJI_tL-gqhv{U&35M(@l_d0KAbdg#4FDnTlM|BHCPT!PB4=4+}608JnDEh^r|GCZ%y!if2lY zY!XBjrHD1#^7M@oCe;~Z#qp+8OK+9 zAhODHK4LN?#JiS8p^TVV-J(GW4X5j@+4{XuFd4Vww)lj8yZYSjkV|CjvpCPbn^#h( z`_n?q-5KMz%eSOIad#+|_U8X^j^(`wNuB8hqQmqLHJ-nqp)OYFgP`$T@KvO+SX;C^ zh4^h9Xf1T(;fexH;*osx06mb8U9pk^P(Qzh)5B zeci2Vh5cMEjQ4Q`_rzj zSbh}1LvEg|1w@rOT@Zt%rGgK|SC>M#%6_`nw}B56Z2rtF49J&`fqF`|mhich+3_-A zPKgmDg>%On_i~|b1(T=)U+(LRZ|5qWwd|NT33F{5dzQk?LzV&1)Dhn@|BXoJ-NK@Z zo1K&{D<1+%{cGpcG{^cP80_*gl$YBi(QdD7h=fg!-33uf8)k(RGyYituG~ycM99zW zhZ>O~lbJa>5N(HpxM)@*sS%X>;X++uTx8fuZWZ9-nsql8p?2C4In;m!m*aGN?MR@k zPy3L4`;0jIv}&>XvxKa7l(XmZF}=$sbf*9_93^*!Wu_bpzXz5x+#c@(vAH%xmK|=g3lh0O$Q-v{+ z@c64>JQhV8G>;jWi;p+iqOepQvEYZ048i{K^Q&>W2!9naO*tHHI}yk&Vo#pT z(sRBLw+L+nQH_wsFBV4DJMT0tRz8W}YEL&^UX34#Bu{w*B6#YUTK+K*&6qFO6|BRZ zqP;-1+G6COzwZfmNem!@PxlbW@nkTsBAh?(JDb6VgGpQ^m}(|ZNLIrtPed6(WFgQh z@>+cF6cBHBJZp=Iw-=wm)WN6fA7_e_kK8ZP0X4xMg}mA!gGW4~l%AHg6~&j??or~~h( z!dxIv0ZOa=rJsP94f~I1(;SGm1-VEa`A1||eNULnkP(4M@@e;fU$9%H4n()hR-Zs; z+0u>QGo`G42I*XG4H_*9*-;2>@JS{f5Xm>JVv1#?bM_i%0%Z#n@fxTbQc6q_N|(s} zQx&U-X1XbCa&H`2e8USQNLhs1sm7*fk;b)bsB~#NB_P>m0?9}Z7s~;BC(&;8z2|A* zag(C)r}>B2Zi#s*U%rY!K4e+K4U0>?=j1eDRYu}&(X>^EQM7EF8>apY#4Twi8!E;O zkHdtTCwq@yR%H$!A;eY4VuY9DsN&lhS_Z0e3X3a*!o3DeD+9}7mvbRmM`kvl={fK` z2spBYWw{)8h$1Bp>I$m&kVqGbszgeQ$-l^v{YzZ4BYeX7Anw0e+_lhpx~Cv>kjdKK z+a`a4y9BBW>5ED=o-97og(D7X@h?G)W&*w;#!%RW7XVN^4m~M6GEdzJVR5OSH7mY) zA-~Nh#=$U>p=|YM3iXtj$*WP-m~>GN%uraaI8%Tdg=&ipATG)fEguS(i97Sfr11b& z^o8*YumUrcPWUCT+scZgD9G`iAo!=N354aoBu`MoF_~~3Q4z!dm|Gd(1-*gzo@n?` zhu)9XYSyKYbjJ8i@MuJYyVRE=SP@Ssu+^q##0r6sqT$6^SCJS-5s@ir zh`(`lo+EZo?V!je-xTN^i(?Rszd^Xh4tqm#W4~QMHet1we&rfy`UxX+wGt}Q+ZkhI_nV>35+b8}xx7di z^&?p2?d-!{@wr<7XhXC7)@5ic$BG%mI7pGRuw*C56+$7G^l_zFwMgeF)f3ZhbN0M^ zGDak4qAUxblTc~mkhIqAP&AF#*8JSciyaRj(iL<2<<#b7t1s4tHFHKYd?LV^zaX{OA*8`#yC2QzEKKs+MMY*f%k z?J+&^RFM#;C`TVy;Ws3tQ8+eYjDl4g3vey^ODXE}lj9&RRxdg!a=-4Yt#VIZM02@) zOqVoxNkyP}REHj*6q^fxAM{F!12#WPfTdCXvej7aUVp(l%;JJrcZ~p79gBm^<@DIE z3veDfh5^{!6?^=al+2K+c*;Dc-x(9oaExC{4G;WXp3|V7;z)MJW%mJQQ)#4gRA-_? z1i3^xYc5GPv$gKe(<$iqFs^qS3UcG-&E%z(xIut9!)!*^VCC!|1)GI!3^8i*ln~dB zE8AVkzN>w5toT{RkHbd-#PI&f#g8o(2iuDjmQ5C;th8ZSO2s70K5GKlx35b zW-IZa2)CVsNHTQ2E(o@19B_6|nxicr$deIqzSNCoVlP3O>`YZ=%_5PMj(FN;lf)rH z%zj1gO_B7De-O+Q6P+b~KLml&YWH*DFs%bgA`O`H1dv_!LLC=2-V@?E&dG%_{t&bq z$7K*=$Dv;<$1Ot5{U%0PDSj=$4WytU&BzxX26Dv~qM=C_B7gSR9OE)6m(@zk;sIg) z6urBAA!Fyi0h#f2(&;h$#^N@Ct_g=Y1z^#nOekN=ev&Rb<6mAROs90Kt3DD+l{rqd zB_ZzXj{v|~v^|A}C69ilmG4_e@H$ z#tZ&{)kL<9jYN77^i|sEnW5N4kS7K&j>EU33ShQc9A{?$Dj`l5RQC86`NX`Jn=X?0 ziZA7v@LDF%`TbCUxGb$3=QpW3mwOpZ!xzON1s^HW9m@@YYEQO093^-{-NQ8EPCsV| zhD-Xx1A6xS`E_x+5_-1c0LUqn5&f(%*R0A@MmYPuSEOp>04p&sCcQ$cY=!VM*HW|A z2sI;W6$3PW`6_^!)vM(nzu?;hx$lT{OV}^%2~Dp7y0c0hnblsZnTLe4o=)chx>)C* z5Dcg$VyoP(-~OPUxXm=uz5Bj!=RP;R*&wMSfo>*Op72y<$4Qu*iIceO03R2hV${br zpnFaK73gFQ3=j-etG}Kj+@_Kod}fCDi6kj<)_j9BbW>|ZgC?F52^VxSBc`^dOM+ab z63epYX)OOH$&5*PkRIL|E->voT%950h%gWIhIzB&|NaGOrq`iyE5)3*TpSaD65I8t zWv3vkc-VuJlX+$oeoO!GbqnilnR>+6-UdMIY#Ed%UbiTBATPTimZLKfWESHGf*eV; z;x2*a(>%6xxbkL!wBy4JgS-Dt8b&MOm?U*EkkWQ^m0$+tedkE7>xnhqfu^nt(PiI+ z1)C>?kXF?hFTBe?)D<;nxlNk-1h^`-r6E+VniL82q^O@aYkqTMO)K$u3{wozT{t)J zn&x}X3@%|pLw|In0B6QLV5TDZ#|;AA{u0+@G)`_RK5Quy4ldSspEPd!_(h5c5u8p+Z!F_@O9Q5r<(m zQI1P2LC*>1>zOl~;ywL_H4K9#B?Z&|%6|ahu%Gpaqd$c-OB~OL!yvcPgi=gxLw)Vs znEPKS*RPwUh!(d9@mGv|lnNHWbegUVZxTyFPFQF>B1HF>G z>HkOrr*sQZ#!lE3J1y6eYKj$7qSwDg(&?I|(pO!$JTxt<<6=44 zBEXf`yT*#5nAY~Mt(+%hmEolRLjkZxfholCXNm+Z-P|UMO;;g-Ti23e;Ke~gJdEWo zqJZ{81=NJ6V^4WBZZEz|wk@vueu3{8KK=3(>xd&nQZiDsF0sUVt3rDSh;@5k{1_wtxr)oXi!s3NnY_kF*D5{AYp6n1h)*D!vG9?#yTZ(vUx1keOI% zV}>a{>JNqKv-Uok+7XAYk$Mt zi2!qGHewP5VcBd+ud%ns zbAl*~obVB8Ul)p+qqe{qQ5=zB@kpUAkQD>#h{oxTUjih5?n4q{jpGGuo|9p8v_38^ zX01t>%Vguja`XR#SZQ$1;C6_<^n|OU(`Z5}WU*jshkgJr)$fTi>v>~B;o)8uqM3nz zK9OtI4@j!dv>@bu9}jo4QpIZPfif6a%&`#+$70cZ)H#&in3=RbB-_mNou9Y-~&v8t{oQ_jO|m;(Xjj026W}`Z>L8pw6qt zdK>c(ja$&z6dPr~dlnPYRKHyo%1#cn(yYBzfCQDq?NN*VPMhQ@35bATu6?ILpq9_^ zn+Ti|^F_L2ni%cuy!!|-3lL8_WrjXAON0-fHY;*D=qt`eYaf$gNF+^_?wVr~vI;>; zGYlrpr&rTz{;RRi%4A?_2I zY~TvY8C;46<6ZsZ`lC)|0oM_#c5^^RI`L{)&R-Kny&G&P?})bqP;b3s#0Yz9D@f{@ z#uBbosC`!08pw5G*Q=1p=Y?d6B#)73UqNJp)&M0(oGj7xDRmgPW{cYOzjKzio7YD_BNv@ z>0=}P1|%|Qxx9=gCPlb|w5mzn!lI+|@mII!A1_8VBh~!HuWbdpQA|&papTVgs6Y{f zBH--t!49N!D`qm5(Lwh|`;Iw=MrnW>)Lq6baZB~6Qy|wESX@(LkLHEtp+RhsYE4;L zcyyw0FOT5-dsLBYeR8p5?j0x0{6gu{i=2C6_8}%%h2l|X{GrH;(><;n<#SDjPD0fiQkqy3i3vOwNm&!wSV!(X)lP`CZrxnaRVk<4(?t51Y_? z65J*Z6hty?YznlOmY`=djAR{UJiyxUnRru}Cn?I({@8hEXw8L@P9BMmgG6RyZKSF| zZU_L!W@$Px*E3(P73uMTHUkM)2OAc*H?m}Oh!j@yc5%*F!v+XPeU?ZHCOr?hb;SHB zrZHk9@7TjdyFW@Di*+wFChZF3rZ>uURo%9?F!L2Tiw)`9(+nt<1v=HfSR%j!ZZ6Ru z$z6V&hMhl`-O+0NKM)?jtfGhMW7yHFC@JNrpKh|9MF8eqBhqL^cge3SC>67VNw25l*?A-?FgvDYe=tbu z@HFVoXXu{#!_7dzqUTxThJ-O2i0x9vwSXG`tVq>JI8ju@sJn#0#Kmkr+=_U-NTO$t z5zL3;YkQDHvPg2k8xY0D+S^@;@9DQ|uQgyvj~*?by(ge&VIBJ$9ks6zTCJ6sot5Xr z1-QCs>eV^T+?fRm?WSQbz?g#tQL7!`&Gdyn594;SPBdiz6W@UIK!?O)yX zs;xDWG~A3a^aVm_L1TSOjMh0fMDs4XfhAwofw;>)wZJdn84JAfj@kV~1@ z)2nwWwo)PwWFoc5q?KHQrU+(G=nWJtK2H|OCKgQ%rLWB<88vFfUGZf7_6o>fJ^gLc zI>qk_6ay4eCjx3;w9j$&B$Vzx7K^5*nvwwXm?$r@4nsuvSXPO?*sh*Gq><=A&R=eC z=b@`c6?`PVEZUPc38Znj0Jnc8a+qE^y+V-LgxS=ure0fMxDV*nB_3rf@*T z=9y2PvOO2Aj}%}|5}2orjc~7q9O1N3CK6*$fy}_R&G>mwh!@7j`g!$nT_d!snC{bX ze%feWo(*aWJ$tvu{5+BLr@E%MbrTT$YON?n-7Oi_%}pq_Xi@G5iL1vBYQ*c6oDQpY`;hqT;t^+aB-0q?VC;z%%$H6 zG@}u1nJCQI3!3wyOL^~H2mmb_yVq6eIcGvAE#g`ZGoAD%bLu|6}vuVdKrEZG^ zWkXu~hX>4wi$IykLw@Auwv5z1+0hy6?aRN^QudTl#yhud0O+elS{$NTlxxIbPZI8c zATt}j4yV_9f#PDQyUGqRNj|tXd%#8a8Ff%1;V|&yigaWBzjf^|Va&U$a_pFvZ zWX&?N+0U|cbmd(9VmM2jS7e9K#q^N(*Hr@PimL9%NCai?gAp?haw&YRs9OxjIYP{R z9LMDK6-Nki{S-W$ONFHSQ7KQyDLn06337L%zqTEQd6nRd2e=<5Kn_@?oggCpowv-5LX)AJ?2@h z;a}(k^eU}*^HuKP%0;m&w$LB0Iw~M0IUJgR-Bq>SLA1Y){MrL54(Ive0{M~vyQV$wumCH`G~?uF|kav3&f*c z8VhE~Z34~F79CgHv6TEN`;cO0<4_lW7VW0C^Mo2cobnw+a8G|w0tpw?Hq^v7dr3%b zQQu+he@2x5oy&lYH$|JVgXs5=i*D}&ap9OomZa#!tMr3;(CFy`w7s#KASct&IT`=g zEt28C#if~n7!yQ!y6vU~cZ~sP&+geeV|~WMd_jf3%nAgV5m z-iyZydhPc{;Ak+Y*VseCzFrvyf(ublrd0L3GYBf0V}y=ad_tPb)L zP15N51)Sm(5w=#!l(nm*X04e&Z$b3vw`(~>Ku$!0m4`vxlolcw;QkhC3NjOsobS(7 zMGqCFRg*1=?Z18$($(O=k?G-nyc7_oa(F_7W(G3aE)(V@U6PKCrYy2I3wP6+den$& z{9ce*i~B7WpMMCT5onlEAAN0E{&znw_zlHP0^I6amI5TE)eZq6Q_gOL_KLLG)9-c?nJljwy&E;0)9(IDQg_sdF-TjyhrwU-LnN!mk_ZNTS z-_2?0T>;dQjaGJ#JAMQShiHPLK*!7zp4( z#5F(mSLD*912p&6J_GGt9zZ`|jejW>Wua~?5j{5!Jvq;*)Q+h}agpB==1%NK>CH|U z`<)78Hfn>ym41lx>S03NYE}f|8*O+R0M$g?(`}G;krdXH1)@JE`+HtZOI^)A(Rw<6 zyA^t#i;If=#ysqr5tO>p#jljm>jD9GQbbvc9O)0IPQyX(u{#S_f3Rq$cu1jsg=j5qmGSeU=`0Aw&6FuqqD{YDYt}j3e3SI` zu~2i3vmYT`5Cm2{JI9*2y*(tO^}52Ss5}(<_4a62OfQnnz!^ZXHsWew9(-Hwg0IkX zqRmpBCX)0UhyN6i7P15q_bI-54x|~!;|jdC6^%m76|X z_IUZ{`G=NNc{uAlAgaZ-g@j18R{BMrhSS*q8ec=~g}fTenz~d8$XEf3VF?WHi24ii zk2=arRLy8CAX!Rv%6>_Zc}wuYUE3zNRF9NazBX?kYf|4%y=Yrh}=$7Oo zN=3H5*x7NmlJ++q7tL7Nb%$MJ(_ix2t#9PifIM7^cZ9(s%%)Oa($=#s0m-IDvgTG} zlS_d#N43q90vljUX09mhX6Vkdl#pJ2St(BNELVaZrm^9Jt3|D znmNQF=GTY9$P?|c{28{r8ZxUkQLj0R#Ib_>6?+E7HvxfTr8b`U;EmdZsLog7o3 zM}Nit?)xOrA_sBej*A34t7hWIryb`FVJ;Ex2Ss*@pIr}RF3cg64$JLhLTK5n9x{rT zMNwyB4(a4nb3>jSqZy{1c%wiT;)w`rcDT`>^m0f={GTtg?KTVIfO==V>d@ zaY&YI$@t}&U%~Z4=%fbb79-+)k*;x)N0;b+=5Q>3JFwT0(o#+fKM>7upWoUV>)qiT zM%hi!QS@fyR6UzJDJ~G{YM@nDEHcLY;{x4Mda+tr68ql?;{KUMObp*rhJy*uS}|P8 z{2ESCVmchXJQqD+=s5zieJZ~O8`u9DbYd8@I)OrcC%eH_sBPtRD>D)z2{NVr>CKfqg2W3jGM zx~|*;t(FJV>E<}0u4^lrPw9K&1Yuq{M$uzd# zW;ZyTRO95t!E&i!*sbdwymWUzl-t-!(6QmTNC50n=8C89 zZhKxR<87>he~f;+>Wdn1P~Z6Xc^0+YdFzQS1(@e^crJ`n*Bu3dQ@1X`{I>XLR+%$! z?Ei=SBc?(dL*;rQ*)$vJ!!-))#_%7(+$Ms6p{MahwgfvV9!W##tbeGIdPFQ3p!aFWAu+3!$n&8m;OeGs-)S7__p<5^AzpeuA!&@Ss?X_>%h}5 zej+5=4Y^EqL%{8r--;=6P8^?h=S_U(-6jIkx|Mo}w8$3a@#y0G=Z-;|Kn25?ikpM78Lr zc{69$#gj!6iI$A}__rXpyNyc@aB(0$dNTj4xm@myU7mtA$Cxe%GwJ+&iO_7e;2WSz z67hql!8`~*8&@|z67D8-@o*jk9VL3lGk|H_rSr(KB7HcM2^AeWp-U9;<^%oVhNY`% zoOIWD7ARY9h!H##=R5~shWB!(tAG4bHcvE$f?X~ntIcxJEdo7CW^pqERol}ULfAVk ztIVbypU*!eIuf2TTndeK{sEYc975(w5MbzkNvNyWNEC^7_SF#NVsVM*xQxcnp!qdR z#$TMiV?8Cz4y5T}0Te47o-ttR4>z!$5E@n983Axt4k}o{S}#Jg{t`19HQQuS6U#$C z5r^UeA!NYZ>FJh_)`dk9jDw15e3y`H$fKubjN&){64*q|WyU}>j}ryb!o>*kxV-lD zRpTr{^c^Eu+L3ETx_rb+JsH3L%OEWE4FnSXs($l5ceiPE5`H#&1<1|8J8N1@73~b> zVZx?KbwX$$+bD7OmWj#+o1@r`#RJ03_*UX>%cE=ESAo3d_=z7G9MV0ldHRO}c&s^7 z0`+7FlVf4>fr;ycP${nZpt|CA_iHX71)7q+q-uy-Vf$sc|3nF(2vNo;n)FYoY%;Rsn((oFUL3*JjUCq8B^q4N!WKU3-ZAWH_$O02URF z2FU80zeNhxg>E?ke!TC%H68N^;tsH3rU%Uc)7xRB=j7Z!}Bg(v6BGUjq-}c?c1Vg8mGOUa!}7=5nnQ-O+@m6=&toLXD+9 zHuwHGOQ~Gj{uFFbGT1o?@?fx5M+4a?dPHYwkZ?F1;>j+| zqXlI|zq(zL$e#pzv>=(GEbfcf1ewn)k15I_r=eI|;@27;Rf_ z1tVB&Bq^}E9wtOnlq<`0^cz_McqtER1hOeBlED3k0}1{)$(y3HKv78{)2B844xw(@ zOzHc@_3wN>$JmNEfHQnMgt=KYnF(${kf&RpE)vB-E9GhARH_SPB|%QZbS6sgo+|^e zOf=4)*BW15#sAVWHcU9Os{l8@34pW6^%-C#(Zx^O(JWG3|U*O>b!`& ziVHIbuB`+(FTU(#eV8P~ZNS!7!sjC92y*lHo_NgR;364^bX0m{ai<_}yLeHu5W+j^ z$1@G9^M@;gnSnrVDbR~yXN&LfWu4qyj(;jCV@mPj^>Ws|Uo_1aOK%-)zlLcP7YK13 zTuOk3b&A}Az5|oAfGpD#5RJ9*0b%ZHPElG~WSxAyNQv;DVwT6TYdZV#Qq9V8{6Vyt zA>Jw=)nC>Ia%T~ec}Y*aEP#>LCs)OexOkoX6T}F9avy?S%~RNfrIUdXZ<&9T1cfLornlYewJrO(+1rDT*v=@Eq$dBe86< z9)PEKjl%1uD^=43Qgj*9m{+qMHvn-%@t%T3c*+VgCsk`^csYJ3%)_OwtcMD4$}dt4 zu^&qFdBBEwW{o_ZDmB3?8|BFuRfMkNB5=aYmhw_XgG~-emv5Y>oQF3~8-}8|?#p1b zirJ1sIGlP{fLk?#>MA7n5Ki@gwzq3MNxd%-Q}0i-=r1>~73+s$HkAaqbe!FBROd$6 zSllkujYc$KZK9(t`)dAihgK5~cW(-U>FgQU>{i_jit6oy?=0@n)%Z#V;H0a8T#6ks zfaM2~mnmNr;BushNb!iivN@RR&tPLq!nrKj+{-04-WHGp34^&@((*il2yZ1v%l5;8o8JWnpdx z+5z1&JY{PTI7$vn#xKAsbBzFZU~@^fvaYyZP{tDnl1`3OA7!6%Xe2^ELvQzONawMV zSx^alQD1?qM%RN2?RVTfC7#7Wb$$?^2|KyGzgNkR4FZ6VG`ezs$A(2fA`erK+UPl~Qd zP_V9^wOf0f;^mx@VKAx_AD$$`oGZ*3&%!WEM_(xf77&&pU71YU{a*=mFU=Vpi@QZq zIDRk{%*;dat^l_JZ(ONEV)w~Ftj-*LQvBDqcY<~oNI+qYPSGbm5j??hw$W7Fb~^)7 zd*zO7#u&yaGCNF(TR-r`MhCJue>4l1SgIE&A;u39_Az4Lk;se}z*Jt2=JU z1=K8S4#Lj^pI(q_nvLevoz=&YAt}rKS0vYBRnxkrfJZ?F;aO}E3fGaZttr!EJj5+<7OU?>S48~-mePvDu_X_ErkM1LicO>V`6Q^-O0*=NHE1{SSyke z%lb-nqy)0QE=7{uR}j@jEakoGp*Sr|LiII(6^Y})ED2i_xv)*mk&u#%BPot98BDl%gzmRSV1-w3g|Or&<1*>rFnjzzP;=?j)RUCbSdg|nS; zAAE;gZjN&zH5w!7#D6HnUDLtZB+srZ1!)duL9Xw~K2oc=XOFCx5H}(nXpjM6(C>GIm?c`i1^6ueDMrn&&SC zxmX=8IWzsQK!^{;3)OMp{QNU60!gHZvjy=P9=^EZYCT!03B=WCVTb99s|9%Waw`i{ zRaZ5-_Q^5C2-GbYxql>#+0(#XT%Oz$(B-0O^Nt6t^ccb=1tO;a*3J8bpz2tHOW&5?=C^x5ffnWFhYEAI?|{M> zy0c#)O9*B72{;FLdG)AFh zxG@lGh^Bve5~wxq6p^(;kKD(SZU8Dw{qa{}#+BnaL7wNj@Pt$%pP#upr)hVH_^ZJ<^Mqio%;SOl;bh zr-Y5t+;HPonCp&th{bwoKY(X`O&`%65N(bSwEb_TWD=+)sz4}rO&WI*JyG-ZD+)cn-ZW5SNReihhN!e5@{;D})Z+H;BD zT8--jOca6trXmR^%th2IR8B`y^d!_wYb_6AGVgXmLpZGCgFE)w{FX3`)9K*}mEmLx zOU&UYzBvK}V+R!{8}aV6CD7T-=UxRbkT4p)F`Czn1qWLeM%n4YTsO3aa#mA6t-Tb~ z%}4Olee`r^>2kqtTr*EGC5MBbj^!DkP-;Usk&KhjtZ_IjCnpCb^vARDOy*ezt}Par z+y$lLjSGa)6#B2ZWnoiXESfRWtLnv5qTDcsczTqu92+ci)-pMEBirjd|4(7=s{RV< z%UJOc0FM-`CfM_0kr0wFK&A2=iv6-AoCnbNYIIz9C`nwI2ECZ@rf9Qk5$CNeBKW++ zK;7CIJbp$L^EmqO9GcV9Pk7PJc(#DBho`o#c0`{2QWW7qL z*~1gfy{K_xND19LC~+A2hYE119Gsbjakh|&o|dK(KfbuUNXCN3bFXoy5cklWF+yLi zeYA7dooJ5wL$Q$n=RAx3jvGcd3t=IyM*xqWA3(U6dZMPXcoV7n+s6P>W0uA?8m#ej zydan;=do(scPtd8wrAlnJ{9eTvp*8RH63~mJPy=(GvV5N8?GeK+^E(5i@eh=$lSp4 zKsbdi0cP(k*(bGX!^j@NE@i;)c_^6rfKXPrUS6cVK);&3iY2V!Gq6OcQ_jF*nIa#(Dh%%FiC#SxK+x+Sf@G#!RJ?L(8_Ze`)v4po(N(N@i54+jstxLU5_Tbz0+#hq=e3#!NI_9 zi%$hPHx?A)6vi$mSG1Qf7u+LFJMR(v#=YoD7J zGf!_FaBkV4fCv)I8&&<$DaiFlkBk~EE-8?Oj6<04DJ;JA_#>k4t}7BRu~fNVLwERYhquutf~x0z`7H{mq7 z9p89~0W*YcvQGe2l)Z((Y`a{Rf6$7xWh`b1AV)^rVucfZLiEHm)orU%{Pyx3;(fjj zEZ(uo6+mn@t#VG;L4?`M=0$AO)TC;Zg*z)Rv_tV{K}^rK{?T|#zujc~I(Ydaw!9LA zx{O~o%w}1QJ4KUXw5L~AbE zi(o9TbZ)yY84od;zF0slI#r^u*;$aM9qT`mHhs!$YPt^CUC)+<%18d+hyN-E(;Enj znY(d>K=W|6{q%mHC9~5NHZ=)0w_cwo!}5z{7)>{Tz-kG?m}TNX(Js=lzcrUWDZqK< zQJmf-!R&q`DH%{&@1|@Q3vV*XwOrY|?-c3I$YMA!7~)}}&Yfd-I(HCkzDUZ^4$T)L z?r{^%A!TE&DzdD0u}-pn ziFE+gc_frwVR274hn-KA#$$jLK!R36T$}m*B@_bxRT7W0_6kuA#a71Ic6R|gT@wN9 zhND#g746_MCXD!H_P@>_x|9^ncjq58%9s)r0qzCX?|zinq#70I(){$ekI4ykTWF)! z<{=(XUZ%t_1A_qP<0QwORAfqUSrST%b&0?|N2C>pTeC zI*7HZ8hZ-x2-PhnxnK1Nay_|ptpm_bzx7wo6Be(c&3sP*lsA4EOk`oy)E&(*C8Bu< zpaZeAz?!9Y)6J=)ip*Nro_pf7`^d~KBOsIO-V52V;}}t7FJEpJ?3YFI zg;rM!NtF4az--NH!=Khq=6>?09Mu@@L*ixKO=#9}BiPXrlRp1v7gE99m@<1svPn+e zetKM?->x=SNZ1JIibMVa1{b&s!dy<`!(k5?g3bGnmeHaTMfpJ>R-l?zT>iR5(2U8x z|M3@*IhQ20XTgS-dze35+{`2bG7VM~W~MLD8=VM%2_m}(k9y$N2kwluixjw=a~H4@ zn+ozcpPfSBJX@sEQAam2ul}pk3_?%^=Ai(p%*Ca_IPz~0u4B58ERTnzPW`_AaD6z* zN*^BAWe{dKj>3r}{}SX1Xm6>+J6W1k7OP6nP~7AhU>l7vm4{ ziywt%JaB0V5$|MC^v!4sui)snKUa89*-gh{rN;nWZmF-Ug!iz3thzdWvK?k{8{;@g&?AhZ1iA#mgA<)`IJQ$dy1S{hr7p%qxSp6G z;3r4Rr-e~ZHPDX*EP5KmY?wuF@#KyG*e1;#^5JVDor|CB!<=&3Ghklbk>`1|O5=kU zET7Gj;u*wU%D(nEO&DCq=0`xju6X4+Xm884JfVgIl|o6h$NJCb-zLX+Ia_XAUCAt)(oqIebq+Hl9vAJ-K)P1$FBdsUGp;8r;;-2O$YpX1 z(`$OGzLbBvP&+*fVx1rt(Sk*gow*vDznp&zcW|jp6EdM56%A~#Z2lx|R8rSQlL6s0 zS_LsG7U@BS5fL;fT{!QH&0qC@wcXrfm?pwwp=Ci6hx2a=fU`CDO38z0zl>H*2N@i1 ziFRSy54npItGxyU^HUh`q#GUiCx}~4EJ_XqNhI0vb*H1#h;PO)k4ZwDnQnw@Roq38 zdC@o#PU&04hg1QzOzb7Z12z6uHiOh&Cw*YFpnR8OYOMcuo@@pu2Ccih3vy2=0xK`dQmxN`<)yuH+CbJx zpzGRHNr6suR=rZ+djt<~Q!YLb?GX%LrKn%=-+7@;oC|Q9isj!0GINrI#Htt133C(5 zM8+YedrW%Ic?~L(OOo2ggqSm}b86<@DC<$G_}0gu z)LfDTRwub2JS&J#QNidYAFI`JpMbbuac@ICNA&hMK%n!RfeJ5sSLr~Zt}O?n#heK+ zItim&mk`fY&qdu>q`?r&gh5Q(<3D*3HEkRV5TJ*nS0IzLR;x}ajuXii*-~E{y@yO8%0xU8xhdh zo38tx|K|?k_Gb=K^~d1tM>NQbjdcpoaOuZ?BgCC8Z^qJnXEUKb7D%120qEZJ&`2+X zJU%Isvc7Zl({{Dq3M8dOf-rQ(wj$tV1BYZL`1eE_IgJAdXQg^F+r(&P*^dP8aHQJhV7GHWW7&2ywA>Ng!6r(~RoI&~R*)A$UqK zoxADcAFNCoYAI2gi;qu;bS*jH^~iJL&{cpCQ1CfoK*txLs7?0RJi}G4ckc;J;|~+W zur5Atx+)0ywvS(+z3*g^ZiH^7G7fZo{am5ucV~s`pxjfg#`ywWg}E4EXmPBu8jx3~ zw@uKDG*cbhw+)5`5w!hCX#yYbX zxnhJMNo$afruP#vWh{;nm357d0|W6_Aq+Xa+mv2RdQO0A)aDn5+T%rGZn_?wOL92@ z8rIBH(hs%rnoAy){<=7wUp6Y8Hg z*wz7bdy&sdXtQ`1uE6ju!sf@sdP7(?tQ2XQCrf{~t|6ZAS;mY;O&5RqQeK4hWtvk7 zbl=P6T^@b^4~!iplOYK?qAaNP9Bg5j&#UpK0Gds=?)=TkvBCO~&Mhr4nWlIz!S32Q ztd>}FuG;{_E#u@=Wj)~`zYW1Moh-*+cz}ZtJ5a*S8f^W#{KZ)!fzAM1XuI4*<5guq z?WhcogN${=hXOn{>Uye#eMlR^>KlPF?C?CmuK%hi*KNiSPG$pf&&K{AXI0J=$9x6K zyq#ShC8WSmeEzFIZYAE4a9d8{9fqA#gQnnctfqvn@5@iT(`y9A#%+#K31;b}DlpyZ%>tk7)GV0|Hk1B3JPTL97}C0?RnV67pN z@(2(i=PHo%g}6nCS2*C{GruK}E2<~6(EAL;*22t91W#uAcLlgKMk97l^}|6$Dy-gU z)=w4S+_#+KqkHXEK<1Ym{8;!{ILc8KOg}8g0f!hk@h|n zd})>=KFX-;>(x7;xg4uaGSWLx!64H`E1wbK?4+*b zz?UpO?+a$=C2h7mcu$`UV8+p*cwZ%5*qS?m!lQYtDSZ*5D9?hbWar~{263x-TpDed zd;k;Zs-lU)vkc8e!mPK!`jDKDRFNzP_dh`-xI#}?6q(uu7|l?eB+%(_1ncE8T%0V3 zHt4~2HoSqjb{7B_jB>!^=D8`K!FZJM29C`e?Fz=YOktgn8K#T&cdb_U<7ro5>IM9f zFhUEXKq5-(Qtwclk!94R2d}8WlT)35Wh4G@Xk)l%TVSYXlCylgG@W$rUK|lP_jKV*)e(E`222a(-zvYE zdqk6+u9o4;g%s8ihkwgysBF!G*7-4gcWAFh^L2ec&K1BMAP{_4JS@UvZ4M6+vvB-P zi2J@)!<3b=6ps|iP#SO~B^Eo$yyq~Ay1ID}72h3RSrP6QvT(A0vh%(v%vRkL(#E4x zirTO2!9Q-NbiCM{&~3EGE`k{qv`PWWwDx{WkohU=G$NIb^xw}CqV3QvK&f#q6GY!= z$HCUMaId@sl-s2t1RcqJU4ls^hZ&asGio5+=uUaql##IGEP>1tydPOf4x9m%HH>AL zaAw2tA7SPm8mji*q4-pY$FuY{924Sy#fK6ad6lOcYt@p>^G4GbYe**Pm-UAkG8dH# zyQ=Q2#U(|0yV~Y&+18Psb|^3zBX#Ob$TXBux@wW1C4kN*a!zk_i!cva@L4TWEZUQW zlEpB7_{{x1XXS~A_mJ$744UocfRZuSK5S*2n*kU+nUnCMU4Z$cfbYoY<=DF(jPXkJ z06qK@^D-KpVp_mQMVsyYyl~*d&Q^N^x|VQ2ac*Vs=@m)^^m4JrWSl2}-dV;HR*Fp; zpeTt^-#|W55$%4U8&I_m$M%g*!bu6?AB&M_Cz*rSByYMHa?vPMlfeY7O0yEWWiTK4 zgiW_y7^S0pLeRRQNP(gfez1%^Ak1SUMJOAHRp;eMi)IFakAyg9sU^`m_QVsyTrKWb zvg!B5JM;4o2P-4UkMVp{fiM`!>dbvW=tUL=3}hn&##3PPML8;SO>8xBxsDO`=^vR-pB?!2IF?Br_+tr=1>XeDuH^8Z7y!_naW>RO*QRr{PU# zhqNF+ufJSQ0|!zz+*b=^rh2I@jyZ@lZi3E%3iKUowt;z!WJUt*plb(^2XR5KO zFb}we*d2+AQGD9SRTgeJDzr)w?}U z!|S@qSv)#`*!$S%DWAIyRvkukf};$3{>TfWq~FXe1?%Bhd$`DnlDI6vYFyU^l*vx% z#76V*GOqfMEGDZftE`}@r%VE zu97b4VqcC6d-DP^w++?(exb~y={roH677BYSMxi@;OJP4334B&sDwH$CCpVtZc9(! zAccGe%?`8DStr!9N$!9=J;YFCfA01_8P^8UC*$^WqzbbjBh!-{6S%w35&QRp(}A6i zGxRT!jImap=HOy!H9ivH_H*n)R}uXKAn*-;t)vP&G@}WXdpox9xIUwq%AMu5q4-I~ zS>g$aL`9Kj3UKW@G>OnEe=rC{f7PeUG2cHJ%CmX~bE_)tS^TO1Ekm(z#Ek;UN=|Kv zqms355#(9IqpAmw#NPza!X~2ODggeF|L2I;mEyoZBEZ#|h0>YbIc~1{6P)(tk@&le ztf>+b7U4&vS<)RI+8=|RjjON5|-r947u^biq{Kh5f}ayc(Qpk z&z)nhM1bR#f_PIS)br4UsBp^|Fg!!jVl=lO;?EXghGaQrR$L>*!wV%34`N`k-15*I zt3e`UCRAf9VIFem>v?NK>!?xp!GQWTjjL^oq{Ao-yW1Eg-SI7kG>p0sJ#+q}c0Jnr;kI*7X_!EI?5F`2{vb7%p?KW^} z4iy|hkd6W}Upf^N0UI@|)=5H1$m-LE8Bsgm3xZ&@0%RhgC%eVy_rN>~n~=mY)fWJR zh{(I!97782vwCH&T?B3GmYYlTiE@owNQ(0zqvt$<*^WSD=;XV6zA%r(MI*#1>WjOD zxE)m{pgQK z#RO(N%D%$ z<))|bkNcU>Tg0zr&av4Fb=CTD$dK)IGeORx0f~LIXJ{gV;@0}dTv2GGLGA0?2+Kx* zUUQ5C1-P8FTce;taJx|`_2hB3CF-repX#hoOc7ZejS|>zPRqX}0s)3mEip#nF*JoGv43`)dsB4ro zn>sqxm${8{S5azZ5REqGhE0D)GT2wk)`!u4Ymu&3EAFn`Ac|>%=qyHJ^Q`*1IN;}b zk{0>y_Eh3NL9QqU1LDWWljr5g+zJs#D)H7YKwL?220s_1)^`f9O1Rhb?m(<~J}Etj z>IfP$7V8UerRo^_Z9GAIkRYwKDfr_V*%vHz=}JtTenI{PMk(CT%5lbpKs1za5|YZd zy~r8hg~|{x$g9FARSO>dCh6(5i-Bl^W;3g*v<+zc1-iPk>YH0y zW7;nP%neit%joUBgt#xH%R#0biq-;4uRY4Z@B?A6n*K^p32tyno)gLp={zqLGO;Z( zG7(^;E4favIaJXdJ5I4RmjY2O9`CTq;|2jvlRW|y{}BWid*mM*U6=U_I=V)$^2PlH zfTJ^a#O}YG6wIbMH4P2%vIuwRgq6A!-@gJdtH>||3X|+-pc00GKAu|pd6p{0pzRs# zp+7DbMAq}>FRY0#T$yK$+{ObWIsvY8bq==mES7UN4c}Iv`N4SV>Xa&i8Ju7kxg@iZ zExLweo;PjSka;0hQ6?U~7S!E>7>S(#S@bDkZdEH9UeEP^6bZ4?b8`!c_;q2l6vKp;9Xbu8(K|NaV+YVqt_(q|ugJ+up($qvu1%ni6N-vsEo`j%SS z&^8h3K~?K#XSNh#R?`?nvW`fQ8$|Tuv@b`yl6iQdH$r3ABfu=HK^T%-I=*RfzhJ7` z$o9aV+Hy0btA_ZZj$)xXLZI`~!vK|dSOB%s#kH;!&*EP~XktfZ!aMN)pfhb4@f5HZ zNS81Drv9M%^|&P^pZwvtH-i{}OdNz3{3UL20i)fVX$X>rs#%Dey@)ZXV_e^DK(4D+ zzu_Ki?FO9=;RffG?&w!Kx4e_P856fhDuSI-&qwJLb8-RA$=nA|6XXu4Lw5{|-YF0c zv67Qkxt%mLo^g&~R*HQ@XI<&5Gs^rSp>Ud;A6jxRx+722hUZiYpM$sIouF=0dV{SK zw_k&<0}W!=j$tKpkF@fz=U8lZ7XXb>giZVoqz}D3gJ5fG1;-cSvfS}&rvk#ehzPpv z@kc=(J8ZQQrm@Tym}5TCZ%+wx9TuT7L&}(ZcaF@^v)a77C&V0`!Qj%3v(p5*qQvds z__)S6K-R!^-DpCM5nTdVkeD5%?%44jD9ax3fcgIaF?R0ZRTbCYpOXL)^yCp$t6) zSkulzD4fB1U*kZ4EfX533V{}(@_UMFrOSy9CS9k&;ZT85*RQgZ%8fK{rhf{Up!iH-Z7ucf|3 z9j)``nDPjVYA>@)ojveb0;A0sWarX3VX2B=Y2j92>9f%V`}^9`EK6XeFIo0?!mQt6 zH%bluBEWiz6bOhS;qZLP>%dfu5rKnw{!Rsr=2yxjIO!h|a!RM3M2OenybHfWb9O#a z%4G}?e^Xm@G;*bE(7hBP_-lkEG)!E&&yrO4LL0|=bcYaY8+PTXF<_~$c+*fFk;ctQ z39IxsITpL$S-5T9zu;@iqVnI@K*^iN)|(Mt~$P6 z2xZWAjTA@I-z5y|a4JftbF%(U&jEooP42*m%|dAE|g;Q}Mz|?x{Em z%CD!G@KJ!G{tIKF3Dg906v{?Dd4#jy0j2D)f<&4M|9xcXu3acl4QV6+~$LBKmnB$pGfHZy2u0$zyi$X+Tc|&Ui1;gc^8U(x2VP@-a z(?~&H1!u1JI)z9f zpa1fqQD$yrWv?i94yH47v>Hh;`Tr3G12PLTSLy^DYMMT@w zK?n*Na4RG$Vvb7J`o|R2DuX(U@sUvjTVg;+t~=LRC;RORLx5b)9dSzi4+>f`VS|&d z`l6uLVFqU{j0s3n1HT59%zS~fSBDcY%>fZK4-SReLw zwRQ`zanrn#O1!`sVyOnHO*He1m8#XNfL6d;q5xW){*tKR*59ubjz$YD@QeJ442zMi z{S|DEURT(f2RV;BzBuq*<}dAN-yw8rADww{f)IEhKeM2~Z#_2k(L{z}*`|>B$3l&_ z!x4dMwr4*PW+j|mSv)IlfR(Ubn{S^+5$J(-4agw@t;}s6mD_brK?U+B4YQmg$nqM4 z#dj@QG~ZvCB|QdS@YM=iP*1pHr?fNA^bIHQ zuN7CD$E+m6F7xw(jZsIUspmWL2_PlPmKhP8%WbTRW@Vh3tSM$`y<3P;8Ry@@m+=Qt zB&Gu<3GFd_D#XYvIUKriF7iY$tDKHA&Csr>%06GXl|+MT7~*}4Kucf}#gng3|E@4A zO9997QG=az5{Q+hkP~Q+P6<57EH>x43i}MTc=yN7`=bW& z5F-c@DtySne?c^gW@F7rVI{q`Enb#d0HNiK?-7q2OLsmVt~&62&IYgw(VDYZ>d8(g zpyWU^u_GJS^bO|_5hlQyInGYrR*0EkoczRG+@AoYj@OoBx`p8y6!@z!;}QwLsCn7W zd0im6qcMvIhtuB(u+bVhXr9kK*TS-P)-*aT3SSwo2)m2pzR&xplgbPw?kj~D#gJ6y zvEHfiKRYjl&Eupu2O^S&k+p=#T4rv4w<1MdiB&ZRJ0VX1O@9?U$%gfqL>vEGMYo(s zbDta1^ix7e5fcogYsk=hKq^WmyRKZO^q3HudjvxQMEkRX)^5_ytNVQ<2s&ov1E&3d1s`rw`t&Fa;_O8wz510aj)d7E+Kb z6JqQ>g@cO83e5hs_&)YRa3e-Gs_8<`B^LY60*$on@3N5y83JZin8@l(CImedjY^_C zD@_#n(-ovfxeS?pkOD?^77Fz4!2;}i?3aQ+u`EHx1JiOcC;FVWD0B+uvT8r%B1=Y# z{%~%nI09%*?82GCe=Wc&z_~yMMs3bKG}toZs)69lN+h|&QsOk1&N!l!5n!XY769fP zThiBpOH(Owc8_JCZk`RG)n`_+vSyY6Rv$!IP5^O!TpvP@EcSfa@3>t-i&0WpjQKrh zmjux+oQg=!2$T0SLLx3sN_1cdx_yYnqK-PEi8b%rLZZy7oAxw z@T0atZt>>{Z>4-NzuUFO|5%`LUS6VvNzN}4Vm&LH!Bm!OnmlNdt(Wu0YK2l&OHaQ; zkdZlo7mTFuHzF0Ml2Jz2_T3=J@(t6A4N`3&YiSMw$yoJ1ftFfsIE?z30Q#BkC7I_t zO z^ww&lgTi1v*4x^&|1b)&>A3TWa)0TSyp1TPbBnv~Lxz1ypp~fvy+2b4(iE!$@Hgav8MowcxL0bWU?Xs&aAQPn zb0IFKJT*R`5z9R>E7-^&YZ`2vQGg-^;mXg~_g36Eo?*3}>K6;6);Lh*h=+f;EelC} z!Br_{IYKS1N%#oR0jX2Q*?019i&~?%g4SGR&)is|xcD_Y-nArXXu1^d^k7zGM;zC8$B1VIrM1*}mc0Dg@qH zVzfOG1f$6;Qc8&bO(B+%U|bLeZ@mhDo-(2y@xlLo8li}PWj`jJL$4o!8IeZdsYpu% zuH>02G-@W@56?j7-w;OP64@mX`Rr>9PytHo@gqJq0)jhu**4uGfNI4Hbs$@X{&YHs z)qS>9PcVJQ9|KX-++^*jxnx^^|=e%Q9)RuMuj+$wcyziPqlhEIl2mV?Ln(cNV2SWyd-w zL?C8B?NhXvFmA2?xgt^&*p}p_bh%$hD;;Se5#=>u*0q8Lz16b$^XO})7-^1RyYymW zS{Av{)N`#Ms8dxr&opqq7C@dTN!gatA~re0zG~Ip04)$?G%BD+ao@>=5)g=9SvV63 z(SNF-MbKb`*W{N=EfpGfRMbgMPXa}i!Le^xo{bP@MI6gMP))qvXSGG6ziEdi;eR2_ z(ywLmL6y6@?ZfDzIRB;~8%3}Rsi@WcK6Kc3X8~Jt%KIfe1Ka*XAPGi}&R8gJr~t8& z8M0g|h%P{>Dl1q|50AnlcWw(8M&u}LrHs$P&0x^|jEd4B`{2tS zd*5&BhzETzv1Le=`C5-TmNHGreOjDqoJ*L}a70SZ6XO1T0mcI(Ie8hJ5j|gT$&m7u z_{8G;NI*~WO^9g8f-T5f zRW|~_zSul)$EklnK`O$oI%g~7m%02V5FjVC z1Y$JEk{7ygqAV_u1}LtspTqd(s}yF2n8MIe=hGJxG76|87|$1f0Ob!{a|!Y>qP4U3?{DzDZcDzy&WPJD+{=<1OKpKhN6xe`pxZFBPcY4n zi3aZbAwb450IRaj*p#Ax^A`&v{oo<&Nq@#|1g&Oh)Un$@&iCU5k`MRUlHo+fTa5>8tEG6m$;m1DH<`&9tO2D1|!PGH?3#F|M`dNR27_!StW zWQIj{^Oq^eM_4mesk_wp+WSC^$MaB+%?f+m3IDJAfvqjCqL_kSm7HeEuzUnu`FmmE3y{rb$sR=*m;S{I?%;-?aHGmWt9dZT3 z2e&3E7J&Isrq^5)0f`EfF(pj<69gDpaU{wtpv>ocMGmrscSRe+&Q)szIWu@zn}h zN=TN{5*7Jpg;{xn>jrwrzXe&JnMBXWBI>t~gOIK*2^iNl3$Q4K$;v9Ms2|<}Vrk2? zrjE;gaJ~y>-2?SJSB0VN)h0l|Ne=h^;iSHAR79%|<3P%r`=^4fZ26cvOLjQ!Ng%Zb z$`R=iFHqPD!Z3wvZvXeKAVvdZ6h`=Se{aA@oV#O`R^bZ-L`}rxp+mrTwn1Yt0@DJg z@~KohS=d1*{i)jktcD}Y>PoZ^>o41GkyN>0GBiQ}X|tKhW*3$M#eU7xK=hAE+Jsft zLlmj^cRvFfO?B!_Y`ijw3AB=7>xD3fl=>`?8AAITNS&13N5?F9Gyn<=*d1y=Uujwr~6+VRB|x)?SOseMAsB7s()4ely^w zcDul=p~h%uyml5F)(L;CU}z5eLMnFMx(Tpm$xd)U6iYcFaAHBN^h7fivRX|<2xV~L zq~TEVnA$QIB_Km@{820L1{eL8~PYp(#pNUVOy zam%Zg5MSlei`cdHErP7cBDZ@w!*KatP;0Uh`52htb0Q=v16=#aYs8^8d6J_xtn zD%5(@3|8ntsDN<-Y$-|sP|WJ<4#hM|WN9fHCV~3}MpeRTFyqHrZ-Cg?SIDuqq@Q;b zpnFaIMQ^6QDL{xt=6Fho6+(9vhpi-dG5(fC2JVPMJk2hB2V-=`)xq0N`7gb{JLEx#O4l}7lo7P z45x-$6Cl6qJan-irnr`MFwyka9!Mp}p!ont1Md++6iKo{%)j(q0HbI^*GvZ!uX_X< z<;xNn?69$P=sh57IkOTE8w>$Gxzq8`0xwfY!XikC9rz0GtS(boKGM!D?_07nWVvQn zg6{Z-LDJ7*{-mCH_CJBFW@y|sesR!An5Cut^U6AZSO^u#EaNDvB=zGy0H8mN#xkJX zUmk@xfk2o2gF-NLH7d)}a)0%Q1g%9fv4v%wC-t@ewP34t5&m#c78=ArQd6~ao2Ja) zD2fT{nXO%xnsAAuUKkk}VOZyB)$^4=bDdP0_{)r(>{^jO=Xq6DF0SlZ$vI`yQ5y4gi=zM({+{Y1Ay7oH?l3dE}T1<6VAceqQMd!e4{P551qcRjPj2P<`H1=-w zMajr_`hP&J^GTQ!jf2`C;1DOA00^44A&O~@8?Ts)n%zGrgdDSUT24oqz7{|+QPb$C zMvpHEGH7IvpE}N$D8T1I^yE^u3ZHQp1lsC^j+_uuu@eN50Oy+U?B_31kPKwG$*wohM%c-d4ppy=&`(FvQ zX&7raoh7u(aO&-iRg>%aqU(D?^k_(}*S0c3M!<|{S-8MZUiT?g=ngldR!5z-Fb#k> zY@%VR$NX6A*QXnl7koklp6T7rKGRT%C2z4mPk@>eX90ZDsXaj}5_ah98)#c&x%&Z3|DgIpxXaR0V9Yt7&Ga%j$lR*sa8Ame~p zC(fx38^z*+eo;r{;HW4berV_%;WU1}AS;X< z5C*sWL4jYQ7giW7%GrNn*!x--l+>wjzqcy^E16|7S9Qqm4ho?PFw(}d6jA?mA+$Hm zF)3JSkL>{fnWV&F)Edx}ppDX~r?~o6VNzhV$mA{V+#4YhqT&p|3BTK09Ma_EoAh|E z2r&{cJF+aP^&hoCx{gU6`4*m#`ffxbUJmp8q77l&fzv7ek`q%YjjLoyfo}BilTr{? zFySJr02yne4m3kTi)UQRT~b);QXht-E&AL@iJnUKgY1d7C0T>IN3NlJ^dcIBn4_a8 z@bRm?Eu2FeGg|uJ_i0NeILIN+Pfi81;eh6=w7XJ*DR6jW1i`2++}$!?ks*Ax?KV6Y z1%tPux&DSTd255#q&dN9#MrY^NI_z*6p`bEkb?H;Al@Vejl-iG)|k5})nwsOV+6N7 zHU1hw#%HpN#7_vGRn%v>N3!N@1VXe9xXMM5`wIr?x$wJgMH{D^ai1cPF!Zf=Xl^#`E%+Cr2K z6!&r8D+tb~LAg*t0b8X}6)uccMJ(-;&*hbM3dDN`XNJ#e-&ZI(h|*!-Sq2DWvrqbF1dB(qVk%fzNp$6m9fr|+n)pU8fj%kom zufd=cLI=hQWm7#%fJGaNZkKsgJ!!j8=H{v6CyvebPb*~A!>@EDK0qrBGqR1SW06oj z0|E4z=(9QM9C(Fn`%Eh(q-Q_lQcGCZHWc|2hFA>PO)gnz#<(9N3~JAoi$b(fL+yi$ z&%c1%4)0b-O#yRY9rB2Rv?huQ@i*R&7FxywYQgLaq?BI?;*P#j^kJ9?$YFQp_ zR@lD5#)ng3N&ja-#^t5SnL6#zc?1Xyjg@JIzghv3j0^?heqUB9{?%+AXc3YPVstEF z+Z2{VGHez|&N6?fL5G=@OW10M@|bSR1_8ATFpV@3LsXK=R;<>}fkKI0RKu*y1$I}E5>R+=t>Zyg@y;aA9=7qlA6RRI2~giqOj2- zhaC`;v`F$Rr@Jcv`9HG+(XSt&pr z{1nCgX?dW=NbEz%Ukf&e3>o&qii0~-j&RsAI>fiCAPO*#%B}H8C1MREZzw@C{-iJ~ zWle=-`!@y9sJL!r-uHh~&j()@JaRPq1W0R-;UoD09k0`ws6xjBUt2 z+&(49%2dN8AULJNoHl?~{_%wfAK^~vdY@iET%!Wk8BE|I(GEf>4Eq5XBFs~w#|Wah z9EwKG;73m;NNa^59A6WA6gDOz4eS;Dn?fu@?Y5c6C$uk9>_cW!S)BOTRKi9pqRG8e zjnBLa%qoEYNrpl#aQ6$efrQOlM%D6K4N-A6X`R>g3Qb%Y(^o3eIdZokL&JC zls^6|VOBIG=XqH2any{<$gj3_Y1NH zW@KSS*Y_F&a1BdMrB0p>014SVmG`#JKPCtXIo%ZQ=4+0^jD_0#^8ZxW^1vRR$+gxW zjG`c8h8^YqwngC{$|BA<_+zi-D{BZwLQE5Uxe%kksMh;2ZWU&QWgbCM`|Nc9#s{o+ zu$T!V(x!MSu>vL&I?|>>5DLxq2dm!_A1fkEotO*J0{9^c@*1Plup9Ay6gE%j6@E=I z@2zF=_`;kcf0zNZmX)gCIwLFCfE)Qu$EAyRgT!=HGCLpP!~z=uSXL@+t3m;a+Q|FS zLqaLwEXEJm?bixg(?_1X!+QGD1hC4jeRJl_uV)Z8+E-L!`pFdV^AaEE&QkJ7Y!|z^ki3E#HEQdf8V5#Nu(SyVy!=rGQ$kvf>p?b1W`u^sQwvDPCHMJ9z;X zDcq`{o6U}jR8XONU+1(%VuVxIDe^xNX7rXCy^;)nDJU8?H4Ftk>w|6af=4?fzQSta zS#h~<6?YsAM)Q10O)5bw7!Xfu{0)LE!KqMFdH`I6x5bOIX(8u}{}2QfOY7K#x~7f* zRV<2gJVTB;7DNDUev#|xW19q6ojC`N`2>UDx58)_JU}GzqxHkt1fen8>RJ;+lxu|0 z$AVT-Ynx~(5O6CaI>ADjqSabtBl8Td*sk|C>uaNyD2Nk&KVOiQf&l|tGC206xnT4n zcx+5^PSO`fVNNQA9sX@wt@IBHvdp=HqNL6by20Y$qd*#a zbSubX|py15V3>bMg0z(BYG9Xk%6vS!A9D;8sNba+>oS+xh_(KswdBZW45`X_q z#GwYLQ^JPW12+@4!XTru8ULjKvdEOk%3gefy=U$(WZhElyEj?_;Vv>NI5-MtGL7Lm z+JC07(L1YBcS}o~H*`LrRc9iu-2)6m2H4_KOC)5fX`s=)fF&KJbJPbu+?YHs1T z381RMJ8@XR9*m;Ypv}^DhQDUTn{q+}>fHg0+sgHh6v7(s?JO0O;fpcp9P656+oEhTKl zprJ9B(MtVXftC`c&|z8Q&ujuy6_e&dw%CslVxO{6LPwyd`!jE~Xf$ylE(cgfo+ZSh zg-hwfsCu42?i%@-h1d*&0236l*!i+w!yYR_xN9mJWL_0Qt~3G@H6!x(?*y=N zj^en;jQCuC{9UPZ&;(U*Br+gTCr3a^Mh>t89Cd&?)OWhs`b2$fSe8y1Xyg2nqY_M% zW31~J3!$y5*m&|EDnLgbS&asa%O+M3q)oJu&E6}fIdtYV0x35blLiaa{JR0**0DRE zz#}&sSN9OM$`_%oq$F<$v9@I!zE17{lK$XI`%I<>uz)}I7xoz=ci5lCenE)Ubq4EK zZkR*ViC{~O)1>UZYm^OfOJj{6^&8*#`j6`^I>UEkBBC(uCvI}kr z=PC~iwyKTC226f%{GN!C_ln|JK>F$d>$Twa1$61 zaxZ9=txnL#0I1DOKhh{5;zvd>R`IN~9s;xGDZrgw6;@5h3W5i5(aZ4%|F^anupw$M zPO`5f2AO9w_0gLdJPr!6q{D?#;)pZXTRJs$+yukDYzi86#>ElSeU}Xe=u~46(+gSw zwl)}=>;3HlEY(a5rNZf>w1>frP{Fw?t3Vc|xf?-STNfwD3{&`=5NMy{Js5o=biQ90 zrQkTL7R(j?m`4Dp3omi-z`){TzXgf3HXOwc17VQw!waK%Hh1inCVgB{>3N(0Lj9TW z+c#NaaCW)u`;$KH(G-$XjP|S8rwgG}q1voyWbZg$7$n!N@E7&>H$Fy?M$wjKg}j&H z7)=mg>CVeC22HJU#yt*Z6vb|a!#BT*0M<^(&QRcg0*qLb*kt0kAuZ_Bx1_$~<|3?x zvISVhBu>)8zY$^|G60J-%*Vd{lc2^dI7FlIaf>g=SbUO{wixSag6kwW8VxkJqxqx6 z=;?)9lVCxPu7;h@Uc#sb6C9c_yx$ILi^M$6xF?xDid0@7nc@%I%2!qv4t$0IY?B}> z^wrQ+hS7LOd7}-jh_jC-4L{C3o+7R>Os!ONoUN@s8pJG!%T4HnzQ1A`aYo@~6&@-R z!iYUm?rksMMt~Y28A3_=j$+mSA8NX8BJ-#ehz>ix6w~5o0s6#`tamtTMU?J{ZjS;{{R(@#4;3m-(xs zu-s4L1kwLhVR$Y+lQ|>l-&4?9W-9ibD9(!gxM#qug!BRgt)%~58^m2b$+G$i-+KqK zDBvu%VzAev^^f}l2n969R}i3aXDV^+A+t|j@BJUaj79LfL|^1w2h8J76mGSfT#Fk3 zpDDmffeQpp1%8Jh>V_?f7}4MTTq;Vql9{i1KMxd*y}=s7Y%EU~Of?z9!|8$q6Y8&D z0D}@xsst}=Zz^c{aZEm3W77LiDTsEK<7GcdkkKoP;YuCVPiu>YnHj6EAd3D>DGpge|A7O#+&wh#2A`S_PygBfQKcmI+sHj2fr?X0Y-Sg#CzL1js%rtSO zF#EbdSD%^7?sIm7MqE*>BVH2fj{+qSm(7zlDu6cP$wpqD#ZBYZUr87#M$pG*v$hz} z1{L9sOX$4tm;Kx{BQw@fmI&M9v4%Xk2 z^Ip*lh#F2Aam8%HKl%oUrK+_Vn{*Iz)LVeop)h#DnQH(nn=GtR&``>La*U$U1{5dd zv2ms_BWj*>>2$SeZ(B4fhy6n$xX~{YMt8(;nn3^=`5nU6_t*rK(q@L(`A6>pTfSwL z%m^HNg(Ke)YI#T$LW1+33nIO+8fW;p`tO9P0b_D7X!q9%AOa`LwBYe~M_~-c=r}e0 zviB`bO_tH6F$qD&V&$lxgY{d$ENjLCPLme-(&$U72PY=yu>)Bq2=+$tB;SrVDM;Sh z9pUmTzg1ymz=;fymHwQ65TZ@;w5($9s?588f>8K52_3_uHhCWa729#w+L zCKn@8Snq2Et-P!cCFv*q@dv>uZ%~)W{RXD(AEu(QQLoNB&+ipx>^(A}i!&mT`o@pI zjsA=hXoHxo=J|jc&#=*pFfUW|07jINNdUrZhmZN%x-g1A#s@z^h!rt6$uSzPGWAP8 z1+&x&aSsyiZtzV4kFGIa!@su*HpU!Nj{_Kgn*b{y_G2_`*f_saCSS9$y%MZ3$P=vlQR_U;BpPie;EQtN!3i5b8;@n(>o_y9&QUaMUYX zjmCZF{{dR5vy-?<5*t4w%<5l28gryxWVcl)ZBY;|MWqkK{j-87Zt!cvT8k;*D~kl* zVg`hcj@`i{K+pi_VlG5&_l?CsRt*-;^wa(BsxY!3)5HJT50G@kkP%*3^N$^!H+oV| zwn0`2N$c=7YlD9&0jtDVc2E(`X8FUyjFhZ3D;cn#je*#wj2MO3tYanhCxJwh?HT#P z4sQs7;&`rQvj*Ni5CPi1L~x7y{51Q9W~M)2_~9RmkZ=7Rl{1_$Jn# zFqw0cEz(6~IPqQwK${t;J3c~Lf`l!mZ}oz4JU)X3^1=)P6o@;axWt(u@|9x%th&W0 z9k6(*@ZCFsS#__D*U@6WNQe<{bRoV$ljZ)*E~yXkIAm9~4G=~_F_e%7)vxPHm~kYI zfvkBKk?B0#Yp9eoYAU^Jtxt;s^H2XId@2XzC$XB=EgImCVCBqCYY4x3RN zbk?e>suyr8x0GObMEeMNRY;~U%W~H z^@3?RSQ*ZfgpRiIWN>SXNx>DY-+GFDO08HRRVDoq0Z_T5z6zShdlRw_htk~qe$DfX zgj(U{MA@z*381wq=5+IzH~syJM_Qa7<2Ie&C}a^#puoR~AgdhCf~uMHX8GQIz^sh& zjz(*&6=HP0vaW_1CH+(YqiGfj-C3+u+I}WHeWis6WfxCcxG+rZDi+4I-13RfyT|xLNp2PODM^60^sW^bQ_qCyx2FDh%Bek zXX66@3@{p82N78iYgi5X zhJ&vKQlK2py971I@UsDo12T2@Aa~E60}@Tluwk+|sEIxh3VY%9mUcb%`<6_c<44#L8IBSd1c&uHH|ek%Gw&nM5jA zJdPZdAm8H4)!!fpQqTuv)gA`!TZGYHC)Kl9qI*88uw}s!0Hy^rP=4w8fL6FMs26ns zXt#ktmTQiz8MxI;x`Xb5jbk`cJfjScR{-RxX{y!dMb03@;G&UO=A!k*Z{&qwmTzvI zwo|wv#z4zA7ylS)x^oo|!g2(KOPjt?2=UWz0}J(XL#;t3!uxRcB*ins#lh=M6LBR6tt>ncYt#_*fFZLl2JU&9%b4) zPeF=}b#+*-UOqIHIKx0f+8sI@%SVkql0$|ilE9{^KFL@Y+iNeUSsjKdRKr7ssi zQ_f+17+h);`ni_@S(7s>Rwrj_IU7K0h};CeY%2T>ibkozz1fJ6orhca;tT9r@R>g( zY-B;QMf|`v@GfCw&umvS3w5slxd2nfj%JR-6uxzt{42dN;vmjwwG++~*Z8N~USwC! zf*o6J9Sbk#3#gkT9{>se=@kSiG3TAN*GV0IpJ@@ebfppll9p_X0J|5XN8mv7D?+U1 zd2)mnM(^C5)~_&>tMpGrK`v$5zI(d`tt(8V4H4M=%Om*!h%T=K_fkcCm}?MwNkN~5 zb~SAjkP#HEv8M7;A$-RzN{C{d+r|`U43HIQYLc5q6WktWpk? zbwKK|u@;sAwR|PMp)j8y0oTn!l7(v<@_MP;YyJ1efzw1%Q0{yXi|?}q zSQ#w2B=hSyfyi_NFBm$|Z*Gghb{rxil8JwPynV*lCyTy{V%BaxXM%zBFHHC~$FVLN zDbV@~(@m*#->3+WLQ{vK;yzb#DV=V|k?z7T7GPY@>Kp@Y*+AjhWTK_1?ZL<%7ODRu zp?nRqOBYw;e^&rtb+eMoaCgQeKx15q&CCK77(LmS*uAnEgximw0sym3u3%}I^y?HP zN*rUBRB#m*Yi0u8oQ zDXL-6N`ylaPhJIJbc2kuSTISE`X8aNLZMs`kkHht30tm=pm@E~QuM{?fL1yQbt%YF z;{+`3NZ`L>i)df7xxu)Fv7&)SAaX1ZySX7ni-?CCHVZU)UiRZuHn0K5v^mal9gt;% zo`hUbKE(Y;La7^TFFeI)B95n`jl|4>IX{k7Y}aB?%OsBh8r=y~M0bIfqG>#t%vrk$ zCB z8A`F}Y?Jonw?-&5WYE<Vsg4<9*R#pZm?rD`f5k~$evwmWUu`5=ohA;M?3ZdB$uY)$L>ShZw{WClj z`x_exTRnB33YQIPcdKAQ3e}k-)8P1JLaj=OsE9CZ`vuIJIe+5#DWkJ<{7>7W>1uBd zT(f;CEE?~1O$Q?(t5tumU@EV*2qQb?>ADbv@`jm$ZhiS8!f=QBz@ElE^Zf(?MBx4( zw&E)M-o=E)rqYzp^D~Snz`OZ(S}vh~1g!}Qpbi;!KZp?*l?+QlE_u6L7!{HQ zl}^W&D@<;z+gg_u_X{Go@rA+V-JU3zjU2{u;oJx80~jP*C%ibea^)HzTNW^0={gpq zbPbc?g%W?TdR{OW=i+e11>*cpE3FO-f&!#iL$@yT7pygaV-0q6`}PQs8aSMYE_{d> z)RjK1b6psK&WjKwGJ{PIrb!`GLw>|q0X)Mp1 z5j9K8lfk;A>n339qf^=4XI{V*KN$3fH`bW!DL`N5%%Fn`FO0v&B}&-S@;#IY)PWRJ zh=n4;_Z4K3CUUns6Z!>0tf*}62G>&b`>TXfK?=$8sm9M#n8unYlPwkn3R+FXQQXI! z^rt@tW;F?pT!O2ITAwMHlyIr4tK!4Mxf06tR>8a=L0D|}|8(0$2=b|v_lxhH2sIXPgmf8N`9}HpHQd_H?3O{Nm z2ql!pGYD;P?Py`vMENvP4aRqzsk~CCQ9`Q#^aJSI#t5`h$)L1Y$Bm*CAvL$3pu%9> zn;AvRmm_f7XE^1L_9f~z7SX|-!OMc6OeIVSgMX$V#U}>W%4sJ4V3$RxVe<@y!S&A( zG}2+SCdwoFCj3o;`A)(m#YbR%J3?BGViJDu^TeUt8o=q8>h)i~00hPHZHqS}a_jmc z0Ax7I3prf#xKOL{m2o*>iXRP4%^6X^MwUTVv`mhEu|^6*^7CyU=Otk!Nlk*NbU{6n zXpuknPXw*aSPDwDeVY)=Fkjwsu<3awf{_00P$F)_1d$9>pUJtGo-f%)G&6eS&`s`X zwir8`*e=7=D+(5%5T~of{%jv`|7Gh3KM-WYKmnovDz1b-Yd4rNL0sqAbe!jCkeyTX zG+OC~n8rj9RCdcjiq`i=7kt<2m?UMX_5B1>YnB31s6+hkMF{no7%#7(C!QmO8e-3d zRO2)Eq)L)iX?H!=`acPTR>6mpT_-pFFF-cd=Caf%=AQK0d10)b{t9obgq)3zu4C~5 zHF9UMYS*cj`(7m?{VZH|#%h1=Udx5Di8r~|#IF(rUEn!RFhzb8-;6-wX)=8x$p1fo zEtP8xp0C3F#~h{+YP2qbDB*nny~3!lL>N{xk1A-SXDC7X=McKzA<#;TF*srk_gB1b zpJkQeG#x|weHH^>FRWq1u=GIe;M#Xkk9iW(g0LyO)dOlP1zY!txuuM4mOPC^#N zZkTHy_JbJ#!^Td~JR2HyTe=@@*+ub8;47`K;+9kM|6;fm9`8|%tu?-@qFOdA@|lkD z!`WS!b%}6sXr&(^1TJFGmguRiRSBu1-m!?ZW>~k)GsM_!1lk5!DM$hh5viS&3zGW% zy68(Ci(t~ZCLSV-d@c-vXno0|0)3u;^gSS$ATbBqQjU@%-L5r^u@r+jGh7d}oEAw@Ad`tVOI9$ic0Ou{e!6o9NS z`DQSDP9dttRD^pHN?zih`jpNoU4#Tc{Ot1SU+x`-u=E z7JVgjqPyE5?$r)@#RmjI75V6AIAVq`^Vj|x%&5X3g}o_boj?0?OHTbr=iOKY4T&HM zH=E6S^bB-9qi{GG zijJiJm-Zgvgpp&OU!yS00T;(PLH(QPwbs>|E{{KK(K&OCZ|q`!x5CyB3lm73+`%DB zQa|i};3jm23o=rNo(uGX^tEYqL4SWlK`4xsyd1%hV$ZKYpwlR-1fwB9BR)QS1dt}J zLfk9%v2X07s^ITH5_jXbAjXCm5uiL_NPX5#YsDZhG)ItlPM9@9W`!I%tLy$*Ac^b3 z+qi#SA!}jj`Ve3J@i9v&x}GgeeJyEVG(ff}Us1wztSKhFoqe2xUkk)l_$>vgO>~)# znn|&YXgOS>S`;kNS#|{6N{2oH^|ch`0kn=6d~UWjwbC#K(Bi+u{5iE3YALXfU58=; zjYapisF;Z(rb*B1PZml&wK)p!$d>5&3?O(VIAX&15+_$D6<(I zb2?fC>UP#8L(liUIuW))Q2{NS*f;jK3Z+ot!T{PKtXFOmXf1Z(1%oagFlf*KA21_* zZc>K<>ilmMjVx(Z1YK6w&j_H5MG20>Acv(LOBh-fpi@Q6HG?r2aCetf`i#KjiZJ_z zFryAihp4~3Cy*SdLmg|qT7OJe04oJk00vZL{$fGInW6*S9OCgqx&c^4bxOR|Ft|tR zOEjk3XH(`+YJ=niQ3odO6lATBSsf(ebd7dwf7BD)8v5wFlCKdAcaATrjnDRjPOvNx zSArWTM&_X>8bk&10?(EC2Lu^$%ahm@Yv(p#R;@BRh%^;z6wQV=EG{&qyxEqPa?q*E zlTNWnOa!~`3+}q72r)8hWnR7OKB%^?Hy|V$F}Yy$xEwTN7oKXd5VOrjLnCi1NLg|I zMU!G}V4#(ZjesDwXX@jqKmQaJ0+W}Y6eY;F3R6B>=G4=O$hXYcMcO1#A3PnXQ>XNE z7+!kxO{FtJR~cv-cY`pib#_j{=-hFJSc{C8{E4VUIZFIe#e~<&l2}8rmizh)!c?4r zx~#SiLwzkio6Z6>R^VJMMyT&X0Dw_n)L04%7Gd?@SY@1@dUe_ZF?x1IxHLooi|(dlcs zAak67{dqy+$Q~jf56_U1~HfW-!eIL|{JvQ>B7$9TVBE~h2 zJO`D?Q83h?nV5MQZvrghMUqs|fM45|>J)4d5)4A6f~-FlU_my=|5gCh2|7mElluDx z7-EMnrHWfC#8^`5lWN(j`3~m;QZHOv=(NUa1uagi&Nb=R3WOHO#|iB?CI=CQ@wAZD znnNvohcK&WYqygb(ILT5BwN}PyUYFa=xh1+X7CRZG#E&`ww!9r7;1qxde;Ft}2m4q~Vb1nv@Y|J8vC)%FG74=|CiqyHk3RiiP1t=-# zE+1yq3R}B{DV~b~&bkyx1eHXSkp0QBAs|K?#tK@7i&_n|J~*>(ZY|2KC?csbb7~Hd zmC|kc)-uSGW|%9*17!SMP?o^J^k@W4CX%#2pwGhI_TYg&{RgT1(FmHQj2|G-5=JwC zS`xpT!+?x8#@1s3;?su%SQ$&uY9M?i{W+I|8S#PQ7k5g zu$Y!q{9;b(vmhp+&NO0>?Nga z6)w;P6|d`ays?LzooK72%&SL_%k^b~tTD7`t_xyG07<9_C$nnAW5=bw z!#P2vj{aOH2=-&Kk@Vfi6Ck5ve51-ejelAIZ^tAORsNWK0+z)jZaBpaFEu)@05BT+ z@b{s8jzT2G09#y~mvgs=OqT*^ZI%d!tC7i0)c1x}F>k%Fl41mmVsrSaUb?73ienU# z^)FYdJRIek+7737j6}yzA|fBM@T_AI=I1Fa86G7Ytbc=oRwz!LQ$NjZGo}Di0IkM# zoK1`B{ApmuNE763V?<@;tQ!B6Q2QKZBe&mGmel)gZBemSlkQW5eod&=3gsEZV#U<+ zYA^~X6Gn6~eu=`cRb{=@#N&TtA;e4Uq@GjAvXXs44R#8}zT0#_YxNqu&1%^+MiBYv zkR4ZuAknfC5k@}vJqX4k^RFcg^>n5l^LrZS9}$un$&zO+{!@(5{ z*17MY1kh+F6&U81K1Yz18clp12k2^qSd|%>f(j1qdp?R+Y||TKW9H0MVw~kcbY+|p zM9Ek&)N0MuX%=DY7VHz^qn6H2v|)m&QREz%Rj2?sG+41<*H}i-_y$sGF@-H$IhYk4 z>0Nu;{wYB=;*6%!CR8C>>=bUFC(Tb=wcpkDc~+h2uAdWR^~7`pQJjS;9&9Q=O;S^$ zjDAe}!I@&8>!S?9(C-v&(06MEmYCnAu?Bs&Rv7f% zT7ji)Yo^DI8z|eZMD;)x$HfED!05*3fgBi03}k_ktQb&PQa%71@A!Z@I#^LXu%@cK zmG3T+zGR>d>1s`li>9K)NwL!#{ujHz{U9A~oP62aH(!2L!yo=Sum;>;Q#mdf4Flu=>Lxmzg7PabFO8ZbN6<14Zq@9 z&9k5nANO*uk!N@x*D#nTk0*zxVGuaagT!m%clcH5ZrG@FcSR@H@Waw{7h6OTVlkI~ zn{#PR&b43RT)XAYWvp_p!%F8mZgB3H)y^He(Ya1*o$IpIxz5|2>&DZS|9k9muKNz> zj&E`9xEG!Cd!6gK$GH>tJ9ol9=X$;G+(`$VJLMDSPCn>dpDeaN}fjyQMfVdvsQ zI=TeqEV`nrEBx#jmpG@JD{koON?z~gW_;GoC968S(hEAfS)X-wGhgrQ%13o^Wxcz& z%I?Rxin1 zLo(czpJlkbsth+aJHw6tteqRz+|I3ouKDM+cLn!%aud6^cavQwHz~i9o4WfLH>LZr zZrZ40+*MB=;}-UC?&@L3xCZLqxXQWL$#VeZSddI}^K;T%nr%^#$@ zmg~~oMABMMyIsZak9mrD_A73Zblz&`=J$xX_X*Dg|AgNoJnc`Ry8J!{jhgRo=URx@ zd`!$eL3k}?T+BBI-1pr(H=gbOzI0UDyPb!mtpZ=NJm$9WyOF11Ukuui#~;(&huOtx zE5N#t?!QWl(;9nZxcP@-?z7#urZr85uJ6a3=iQP6F&de07wTN^+-%C!$Zx}nm}?#q zb1gk%ZrP5QyAgaFWm)56Zs%Pmx5`6Th zcTV8j6Wm~E{^xC{yO)32%k61C%k6%>pL^wlv)twhF?Sj7*6q)5+xhObZD+ZCFP!aO zfAtLa=9g!?0V>-kl$rFJzij9F5bj63B`wb3Y}Gb@aR8q_gy3hHGlc zaCd>-z9YjeBd=fZ|A+9>y6NN(Pr||t5Am$Y?BH_vW-`xB9( ztD&RDI_HiMr+IgVYi68n-y6PYBHq{zuB9FPUd3-MzgJ|qdk1uL&FeE<%flINF>SeI zayR(Bo9jp$&X09=3kWyRc8!c34^pn%Y2#&|IQKLD-%lNGrCdwPySXO5?Z!8c>$`r= z<*6N@R{_8NRK^*eems462Jp1!Y3NU#c)sSFE|hU$J#!wvjd}3S>XZAk0pAS=8^-VE7x|WSClk)$SwcHa=l6QbaS89&w5KeGX_NKoE&)CY zY}shq`4Ih!@Eqce023eFM7WNp`C<6%1?TotrzJ_}-r=`#JADwk{ax7Bm>b78%hwt| zt>T+WgqKo|wfukYa^_9K{~)gV{$Ke2Ii6>Ent2xFkQQk)@-(cCx#6_Y;_IEej&}#%NwhI{7iTCMVr39LgL{yiXMOE!u5V znk%RL2ci93(ptp-zlH{?;}^vF3D}xFv^97gPxI1tuCb1^c^dU@3+cOdu4Pi1J3{;= zU<>IdxAEH^e(u84yt}JwK9Bmi_O9ic_U>cy`WH|0JzbgS_`Vrl9!4DGhL&IQ`)W7W zGS{${A;fu~cIR2ZGoR;GzFo+#WPnW4Z(7S7vJd*bnBj&Jp1?C&`Cc4zUl6xpKV?1W zT$aYmBb2>`@L|y~!zGCq=V_vj&F92iJ^x?NbAa;e`#1S*m`>aTd=sM#asG$*8b&)e zhmuI+tQ@&f0 z@Y22xZYjS_Uu3w^d~-W<_p*J==cM-~ar%?qSNtwtO*)yR!=rxuB6-Xwojv^S<5{x; z`Cw6olT6Tj2krh&d$;)@Wrxm_N%u!Qah?*MZ%A)#BEucvyXGg;-TKKH?jZk{^X?G8 zF~;hSJcso^*qV*38~9z;uY=3bZ=Q1-DA)XQXo4ItKZbk)omPV_;diTkdmtZdWWM45 z=6uS7jMqrJHM|Hd`PKN}aD=wo8*?j(vyAzoX|QuOq#t+;+Akgey}%aU#9D;%%wNQ} z{NJFth4G>nSQ!5gI9JbiH}iDjS=t49($4GP`Q@a2E7%a|xu##t{gB^H(|8ZAezb$% zjXvgX2OmUwi@_EVzc2sy=UGSFG;|?9cwosO=sB7;>t%EEP~LatyXC~+I03r93Lk$N zbL%sm%j8{ae{TFF-Tj<6t9X`Cwol-jHT>VWi~dhv-@yO#`F}LuEPOFN@W^8FnnqYO zZXk~d{J$j5nu^~fkEh%kt6Ls6`9Sk@7t-m@b3D&7=(dI5r+7B+rJnrO^WBSlw^=-X zLCj^7?v*@ylvcZ#d!9Jg6KCC8=U(CeHMdcxo{UXfX=k2wi&!rbe|~~6@s!8^DXout zyOyt+|G^&F=-fWOUC#f_&oZy?M*iFmz3#|xZ-Y0|{>>-#aV@8(Ijx&M0&97ax&Pq~ zZZ+S$uQ>goG5>GiS@Q|;uOL3M&{o1bcp5vx|NI7gcbZ$z7|@e)DG!aO&DW7{aR+yZ zw0iUGA@1`2F?U31x18qo5&kd#ALRFd()lCtDQhD%Z`jBBm3L?GXzp2%1JClSIcV|p zm^%cm7V>+L-xk)C&7U#m#(KM^`k0F`PToG5IVK)+hk4()|5VrLPjf5DqjCCaZdv1L zZb{~8)Sq@MWp2nj%{8t)&GjR%1sixz++}-Cb>|RXvgI`9Cf={)fldpy6JEssO{cj- zq$fG+Uh3S$`@@7ar*@z%C`v;^7ux3t>wErRv+p#wrv6m7hPLa>`;CK7bL--#x^?8Y zn(r49cOAcLz`swv8%clR^C4lwmHP9{?+YD~*%m^VLEwv<=u7a#Qu=Vy!I;}e+P5BoZtpYBQ=X2* zxqUz1K=b8{A&tas(6gHVc@C4;w>+Xj59-*oBIfRGf1f&0+#cBe+Rex3*x(WuH|FKg#`71R%7_)dg9F^y&Qdi60{sjn}gN!G+#qn z{hT|W_SF0~%&(Q?*?5HXp!-VjKtGB@pDxJA{Qm;O+9}8v8E1#$l~h%Px9TvJWKe0DSf+X zOU&J}o%y%FciTv>kJ^7O{r7eB1n`4ouJhL)7!j}HHU*uFz=cc(bwL~aL=qxckRjV zuc{A3ZF#RVyxZ9|Ay+J&%(@4jxP@oY7UX)KB`eSYHlpvP?Cp3mcP@H?1i4|#1~;aoqyeT?5nc?R?U5S}Y|F5@|$r;fM} zQQm(1-b4BO^hIYhIo)OPt{4A%p7`m|qRhFw5?$QST$=kO?YwGj%>AN?ahm6zmGnb? zn@?xnZAaOz;P)DSAzBN%*ye2+ZjmyIu_o<;o|yE0&o_(OJ9j<*Pvd!d5xmhKT|mEX zEi&7rcEoL(PccJ(C5nd{es^){2tmSkC(WzTlGluol4tNConLFACI&c0MM*f2I!B<18E%4d^-v5p^ZJ-W~)Tj9z`ZM)t zXd!&KYiOqhv|mFVZLpc&L;UWIxea`e7}B72-pv0Ew7KT~`8PRtC-3h%K-r+zO5P!! zG)T839aDe)zY`w2YXEu$-YL$YD4mUjhw6WLV?J>g4R++mcL$J zdY+}o_-jcctk1R~|FSL^PkM`y16J|Afj-=L1Uf5S%Go%YveM^U-hu}Ft|iWD9z?f> z7Uti(7*}p1y%x&Z+!}G)!PlMEa%VU50$g`#62= zu9b`}&t67=a_ytul(nUpcyoz&BiKfGnP*XgHeAL36To=xI>5Mutgo@*&RwkI`Tl*< zT-w+8@e}2L5^eB0={@e?d&-NIlZ~7sNTj?|y#U^K{|yJRNyrJR6}|4}N>` zG(SmyLH5vuDxHh?zYn<5S_B`&h`T{_VeW5af>XVHCAw?-0C8dK-kR{5+F+H(c+_c@Is+D+|`fTo;v}`YQh({NJ9ZFY%gR zfF5t9xreDU;(bGq^Mh8S!}AckvDr0DG&y`H1ja}=mR*^HqBH!y(- z6helwX@de4C__cu`E6nQru%kRf0z&ddY-lRK5Or__j-Gtwbnj#Cf26YI3LB!LzJHm z`p78#W5Q=W|6l9tje|DPH;dciw?BSQJ0|_==s8C5(Y?9uSaIEczd8E7xTs8Ldv9;< zlmBKmcJxkPa&Dgb z9TCNke0Wdqef?Ia zvEi?_7)jz57v zMg4&HtSp>t)6etuzkfOE@7?>$q`BD0RzLYe@qGR#`TpPkkbKwpclKApkGs7$bElg> z$_#U*y%3wS^TL)5>;m^Y*7J+z{6_1-HtW+j=_8`&S8$T_j0&PYMmgKG5c>L__b-i} z`}h0`&PP4c<<`LBcx!ml{KnS6=dG*bNN zuY@;uc@7&w80Flz)c^N??_KiytZ>eo^W9riPTgO^JKZ2IIwvNLo_7Dx!+Vd)aiRB@ z5FfqA6#1IG7o7In^UgU5<#4lS5Y^XXj@P0YNAPB?@-L0|=sopp;HT%0b$%ZITXers zIgwt}|NVF~{kM5x;uqClzMiNL+hSb5%X$A=KK7B@_(3j4=}9B%S9~Y_3?fJWK!|}cH6}2UA%Ika1y(2EFzo+SM-C@s>J$|c;?&qpD z!SRoxb3O_Wuj6{r`Ny1hD~k90PSWpvn$B5z?UC$Iqd!g2>&V3U?9gyhU+vzbHl$f! zeTcpXt)Bf!d_mqaq>)YjqwxE$Wrlx6KY2M{Sy$%Oo9~O?iRZ;{z$W=T`8&ry$PYgj ze!q?0l&@ja7aJoKxK7b4A>=!@MV}v)Z|-s9PtSGgSHf%Z7xh1eeVbnMJpPXS4~WX| z9x`g*enA#>sKfIAZ=+-G<4`1@V262UuZ>o&v_G?>I5N7=U(vUWWs|()e!k~^&blA@ zFUT;)FlKrGyYA)v%<#ge>AnA$`*~Dd?Jupj9k21b2JO#p$X}D29_B0N{Ah0C{hw?Q zgk~+^xMvR9Z^5@@5%T1+Sf-?dldZm<-y_n08O481`s&Cx=oR?cud~BmeafDv7{=cJo8Mv+yXI@|>5WgO zh9b}BU|Du3oX&p|{r-2EVSj^j7n`%K$_~H!I4issy^G&VSN~bumKAEE^S+ZAj*=_! zE@F>ng;QkZpR&x?xVP*Oeka`cVXu%JniUdX63>pSCx5ce^*5MX|3OwLQr6PHWN)ky zH!Oq;e$(=G@13pC^k-$HOj@2}oA_vMYHKtGd@eJ*B>a9>X84JF`l;iqgnv5NeQGCu zK;OZ3e@&V{H;=vV8NM)a#vYsA|9Yn9A^lIE-|i@#Pw%%r(xZ6kCFxu5SeNiSel7he zGi1|wc|OBjJd6JqZ%y<($MaRcoEa=f+H_vpZ^`#1y80a6QXjtY1fSg?cKjE@rZZ%e z#^2HtzwZ?q*v?TJ=f9O17Kw{~d;Xi!aQ*SZmw&*o@vgiN^lZq>oA@2YZ+^$S{x~!I z@?C!GZ#(Y%ebM=anUVh^9H9U5lgx1WPy9CT_X>M{YYtLe{pDVv)Ng*}9(Mmegzt(= z9|_^_$)Ay{$#vL-sQ$d)_LYz}VE>75+YcWLDSFGF9t*ACd@MAPyTvEb*z{OvArls& z)V;$;@y=u6kZ|o+dxf}iU#*TeWt+eJnt3Ae)hK(xbM*`^(0@h#(lac1O<9@PD>UZy z3cKBBg50;5tv{R1ua511g%5yy8~?B+BOGMgZGAg4q+=oMEeYXgDewPTez({EHoV{T zoc`^(@DF>eCsM8sIQQ*4d@i*aA^t;tHrGvKU!L}x{Drc!zmMP3{}GqJmKm&{3sv9F z3^C8~zPP)1fDf_tJH0{|S?ztL)sLTf?wL>dT~%i2O%{E^r}A;HaJeBfH?Wq1QW!+HECT*qY`#E*@IHs8w#RrGqa;0WGD2{tu6EH@pGdcN-KmN;v+ z!Vd5H4!t8N>&igqTTg_@{`lE8V~8iUBR|x}hO97Siuo(d!8|O$0ne)I^_Rojhcdz< z;Uze{Pd>AE^$LJFy#$?7ie<{-g03VXyFh zG~Y80=+A#NSeX_+j8ar!Q;ojoHF=}|SU*+xO>O13vqGJ4q51Gl7xkxOJx}dWTG?t5 zpT<^osp##jP_OSjDttVOm*4cz?2t4jNSyB#PKayO2e;hn750DYA2SLs^BWqYq#a91 zw+-p=R47q*OZ)W-r<`9n^r>)$JcnYxtJ06h5O+@XVBuf>b!O}jLozE|w{oQ4e|>OB ze{D#(;QTZXaKq&xnTg*G%S?VbH?zw-yClBj9sbVehGceXvs!|2<#&f>rvEfFvpqbK zxhv}*Gv4^%ZpP&oGQ#dh^E0nF{~E62Cel?y!)@{|?&AS6ndiOH7n^HVc!>OSL+6Ep z-~WfO<%|16;T!3&_0j*Nj$8}dzW28ouU&p7<0nsT%-H_>zslHgxg_KDKQ7An>7V~| z#?Ej3G^6J17eZ~u%b{+^-}Ax#eQ22Z_aU+8{|q_OzWn`+Fn}C{A;`rh^(gWu_0&({ zF*sC`84mM@7eD_<_@%XTB^GOyT5$C%ll)~FLqoZNWyQ>=LuJMjq3X_Gg;?ybLOkQi zP<`(Up=^BbP>zb(y+h^n-l2+&vDxE0dWY&Q{P~;I^QU@+ajr7~lQ0GOn1&{P{N^=} zgq97DgjTXmIK{7!HaF0Yn)2S}=X!^_b&rI4G;k~@kX-yoXq-=ap7lqpb(PjAB!~45 zv&cC}h;MnNcW8BA^Mn^5T`pa;A%$XTFA~25OR)^43q1qz%juOF@;0`2SV>=v>Kl)Q zIAW+mnQN4f%?K6hQDvTXfQ$*p$!hg_t>3K2MiihZk}2;1o6vNCKRu>=m1KlgZEM>a z)>h-n7WdXV zkw0G@P9eP^BebvK_rIY|JMS<`Q4vXTC5Y22*U8%|*QX~@Jz2cAAvRxL(IPI5qd1Ne z=%~sFr^wFl^bTjpbGU%6Z}twC$SX)U3<=lB>$r)cmxqR}+VtD>ySR@B_{I`x)0E19AOh5g6uTX(XR3SD~eP5-%qwtHv!Vu@>VgyEE48~ysnm*FD-3g(kJA~F- zp8qb-A8Bo8J8C-kX;FuIG$3)q`0KpB2Az*ZK0AC_8~*4cVUjebARp5(1G6v(^U(G4 zN5TSfQKY}7OmE8!OXy3H-ZUgEBa`A1SWa(!$uq`E`f9WrW2A*s-d!6S^LmFQ5@;CS zJJe59S5T|m*7WHe+Lh(>+}xoEu_@ z)2m13hI;yb=N-ghlp@JFl|Ti(^?Yt<;rxoz>(Ktn(;@xh(;-E+9mx%4w{k-{Do}~4 ziPFz=K3P3BHzfV01!)|`ah$*@oIw*qyqU4xLbfuD+ZfQPU9N%l9Z!dv`PK(4&JA^B zefQI$!FdViC&@;Jeet@d!#U|(z$IKkY3k`vf@}23ecDcG6}_fU|GG9ozT?B)WArRG zGooi|3sA67`P!EouKP`5oBN)d5pL3N<1Sjps=p(>bM-hyrqMpoeBNyNcW)K$v68G3 zjvY`(jU%cz>yOpN`+olbnR)sh^hFK^ph;P3Uh%MuwQ3XEgj2#%8EaR*Yf7{~G4-FU zPicSX3F#;ID0_>w9~O2FlFkt1VgyQu8S{Amqv(~dWQK|tGs77AI8-k*RuC6M)vC-e zLEI!vK|ZEo24-On{<8kh6JCTR*lO%v^zE!r`1P!?RCpN*v$8|y7c;_gdPkW$Lw3D2 zB&;UaVm&sZ07cl2`a`|KPI5Q)Vn31_j6ZOY-n!8EV{Wf-m|lu@?L->UoM@Z2rfjaY zP4ZMBKT%$*67rUkN3uG`7B71wRQO#Sb%@rEC&^7@G+)uAJ~yMKkN1nu+h=+^YR;PD zy_gy5$of}2f9-JsNqXap_8&CicTb1T=kVBHYv+}99K{Ko!Wo>y1*E6;375#O!oLm? z#fNPF5ia^LD{N()U89$7@cwX}UK!<09&gfbqqUyC?nRaq)L?9}ke+rajux zJ7jXFv}z|>gnQHbqWzBcNI0b(X(MyQ4Zt7_K`usM6uJtBg)wA@ap5?!^UqnFjh@@E zhvSzC;>vDmi%`)|+mx-1LadKAOM6(Y9h~HxDd?$xPxK1;^l6xZS!n97Egb9~wVkbK zyJK9yhmj`RKQjK9tu39eOM)&G+0uwR-7aTpaSJ(L|v64@b6uY4~%RNTuBMHba< z9jZ?ru5L_LN5-owr?W$ybLwwph9ud7q$+ayxbO+2$M*{DdB*?)xU|#XrB5L znrs)Yxz#Jw-m|vh2)_XuYPAh1et-n~v%FWhE{&VGjk~yy2gv-2@>@0}q%HdCcq=3H z7VaFXzwM{|u2O!{S6uO(ULl7ZfI{~45VBN#E4kV$5XZnR|&n_;X zvH9|fj&Jl1Bb?Xyr2Q$sek_clkHI)hz$8pTKBi#?W})kce;v}FJRauI=V1Zrz4Jw6 zVxRX*UlPTu&r8V`WubMp@-bieSgd@I?K8c94&FhIFT--IM5(xv`8i=Vy>fF7+aV{c zrLRZzmYfjZm*XAfger2AF>W+36|E!Q=y=yteL?|Qgze~@+Ar)RJHExA6UFltZEfR^ zpzlYSbjqbu@m@}-A3EPD0>X1a!k;g+bTF{C%q>x7Y zoyS9sbqBT6b3z@baXs0v*qR0TOQI2Fb6tO->pQRVfa}X&j2O&a{tZlza^H#PVsy_+Z35bG(N96=K3dHpRAX+1~QRweQ7t6_1QV$f^;t73a%m9 z&pEhGZ|x)PjGSS$2Lmt&L(sHJ z{X`de;3t1vi+p`cUu34I@F_KuKI^08j&lF`j0Zh2r|7Xi+?UNjG~W0Py5k7 zGqhry@C2lHNf&Jy)|_CHxG5-R1Lu<^`s``s43w_R4712NsFcqN#AeGQs@-E8WoPC8 zqGMy(eFEFl;d`MeAw$Dzu2-~p} zUEj$GyUD%Sk1gNc5Dt=u(fM8LBFK&gwjbH`j`B>FYS#)s(Pq<=XhFR`XBnBsQ5;87 zTteS_g5G*aAEtjjML&afHbXk5??PL`x_afW%lfv@%3u21y}~)?6@8+-eUJZ@ehF7_ z4cF26LRPp*cD&AhFXOw9-vfQiL^7H_ulk{k-gHJ-WS#GB|cUki7NM)Bd(sG zVE{P@$(fIYA!IHR)?>7w6(fX4AwAK!U^4%~bmN3P_P;dB^^p~1rSVMF0cr1WjavOL z#`w)ROu!`cwEwl*J4_L7@_d>-i=KYoJ-5+6Z(p^yHK+Bzs5`6wb*#ZTiDu)Ea^nyA zEKcOWbDD#Ww_@dN6}9yYAsti)=p z#d>T+0gBM%oMz{?98yowwo3g+8ttgLq5j`d|51NT{a4o#3FC!YYge&d8auHY#aGRp zkR@bkhj}9UK^#WKNcWfL{>Hk$VeW6B``h5W=osQC{OXS^SkWJSI=-wEm;U%ip@l5^ z+6F#3_eDR7<2Zq`1M=^_Dvn4WRrJ_N>EDt*PC4fc&fx;y3&tPh6Rz1xyB~!q!lRInq9*0-ciK|=49vnD%)oYt!-lYu z-a36_Xu)dwTC~&CNX_49EV9vFl?`FN_>Cw)5lUxo2qoA~uiUaRRA484H>&Az#P)0q zRfyW3`tch-Z-0`vUJZrjCH6WuG2FFJz8YFjzxuEp{axgjdsVyqs&T@r#tE;6va7F# z@;k4FitblKC0TXj)exKL8hIN-HMw6p2XPpss6ZTbNTTWd#?aj1I%q}PReAeJ9`|hw z?Wmc((Z1}Bp$_$ExU(@N?rjW7=QN_&{j^9YjiWe@(!|D4^3to}1if;P{BPSBPSMYx z+I_|4H|7~s;heZA{};$hNFK`wSIBEfoK`O1Q!a5`_$JcIXS@EXO z|FR)esavrO<=;3p8mr#+o4dG=2k7zR{V&?V{^}W;`m6tg)&Jq@KibafmoI9^$aeLp zW@fa;S^ZzA{-a^G`afR%Mtp)t z9S`eemADuNIIr+s>yF;fX5aJ*gM<@g@Ux<=7as zhpbSnjTuKTAxp>dZO|uS3MyjmqeT0Va$PcxsC~?LY#L@@7UrPy{a#@n8TnBckX=K2 zg+=5NEX6V`$4Yd3|J5)fs()GPi17L-?yZclk?j0dhDj^W83iNN;SZE|@kQvmyfJJi z>oYRK&M59fd+wge3cKk^?Rz4dFPR?sL|gbg_tOueUB8t+&Ho|IHu1g1t;J!K;!s9b zc&EQ-F*t<7$Ff41{;FIbRq>Jjv0I;WL*H{&A9PXwsQ;|dkJiqAgnx}MbOrzZ8vgx( z{QATA_sOQQ<_9L~hsW!O`{{QF>vxgvqaQ}J*T2GbJP))!~SFV2lsZc`G@xLy&8T#|NG$=hkg|H-}_N0c=3DT#}=I*mG*HI^foFFrFnR6m~`203R4axnsP};$i@}oYO_LN&<@#@FiCn-kUX6g5+zw7pWb>hOCORIrqO4h zy;<6|t})TIFiTwVovbj2oQKjqSz!^m1SK6=+OI6_m$s{{UnqM2u@Jw*uhDIu!`LW> z$o}tm_L0!}RdW&Fdc=Av_5Gsvb=G{aG?rmGR$?{QVm-Rv)*ih3NZ3d(K>b8*B$-H< zQzCaFnqx|T%$n-+(|BZg3yt`khxYI9Gu3#6gVHa*-FOb#3 z3;cF3_TwN9qZAd0qv;O&x0{Vhwm#4PMrtPedlmZ|H3Qkd!`Q!M{Ym!kS+?y(_Al8e zT)d%QsFO|-El8tOdL=kYuRPC&J08_VKYF`;8D;a?|BGE?4?8&~&4jd5u0d7{pOMBn zT)-tYERi9hY_KIhWw6S3OS>wO6@?RqV(yBis|MUbsc|iU>|Kede;kI<{ z;#yQ!$kIId#{+uhi|$Q+GpDONsD8!0iHogquj_I`UvW7YfI%37T#Udd^w=u{Js)G@ zF~Z|80fj$wkJ^XMCm#<}ggY+x3x$DCzD8MnlAl%FG|a#(%)vZ#{rd5+fLw$nSc+v> zj(UCUN;2}jC)e}}tLba89+3~e6>Z9YO8HNJr2p;c6*f9nfFf+iPVB~B>_^!u#JLhXd2Exfflr)4JoA2j-Gwm{XHK< z^ABB~$wAjWjPw)cBi;>sD4x%6`2@&1B+-I2D%@Kodi?L-(QX|vE_ly4!5AoZ)*3eF z6i-)n$rC8mUY#P(pk%)9x{w!e30H6p*KreFg?v3#S)t>%%7}31lNsSIc^?muIm0to z&U%x5k%Iw9cI*E!gdW)>EhxO46>^2!4|(q?WfW~F%htaSduU@;j(uoT$MW>=6ZP+- zdxuijDbU_;zN&4)AZd(1(NOiO#`7J?{ynCzKFQBbwiy?s$h2@fS+kJ+i#lo5qhT`p zcO?54jjmgKpm!K0y)hVv2`EkJ|1pVP>39XE(DPB<;oiE9K~RNh;_BVs400BdH~90( zc}Te5mL2*xLwue58;30Mo26KWVDd;%D*vEnv8rUHT~qjzx<M`;pGmE)DnYCo2OJtsM||7=_x%^roSql-@DaHObB(508!uae5u1{w7Jb zAdRCqjuR->r=B89&RfGsoMnWAio{oHsT>9l<;fmj1!*%qm>yG@*`^>KjH+Lw5quCy0%SY-q8I1v=wk2v~YG<=S zP|r508>{ahX$&w~-{0RDfL^A)mb=diR9erqO4h{XO?~#66O2&MUUD{jR*ta%>Lfp>$wwDDknS1@y{nbJOPI7txoXx{vwq z40F_EmGh!Cznc-+g-ab@hUM7$gudiQ`UCoEti^h4L;;Gh9c3j?hw_*?cCyl(dleZI zj+4~`pAI{nvm1M{9|v(5rKmttKlA^C&HoQK|Bp6yO=R1oIcnOE@V~C|jR4f4e!BVp z=gt4mHvgYz{(qu4YYgJjsY4PikrbCen%9$L|cl)R^J$vb?7ZV zu^jpU3_?_|qVhL{-l~4JD5JUb5oi}qpXVb`-`dDg;>KVcia%18$&!2QZ~7$q6y&4g ztoo@vs2cdNjp*_J32%DQb4OJEra5mG3PYB$jWIdB=-0+G-(Zi^7hn;VU@4ZNET;aa z)c-`3zUM#C^B?BeOhAEk@+UwA;ToU1Qc$Cp1-nL$tg zHB+HTwEZ4hCQl9{S(yU#c~}5pJ5zZk?~6U#y=e+k{j4 z__V&geHFX)7`yc(JC>}!$ZjPQ!b!4mow$!43Axf4fx=&VZ@>3$I-DbZl(?RJX7Ini zrrz=Aw99u|I7LR!WsGx*V~>P!GCz3k6(Bi;=<9qe${;WB+KNz z{3ZEc;XLxc&VRK2XNKR+!W_)Q0xZH3H0@IU^o1?Pb**SyrENzV?Wnn>{GslS@~2!j z98>;k2qIKI-rA%&<{hvGoE4 zWD!c0o9*OIl*HKetJEv(7EXW3xP7|5eY|@7l6pK_{XMJxM)qz8AC+^W^*;y6X#LM& zGHQ!T$)5EGFB|`BYbu1}s6+h@V|}s(g_|A=X)>8&18lY~ncm7SXu)y%3AEGGNTCg7 z&#V9Lqr!bfYh|itx~~n!9`ae;9F0Hvg;Rca2Io-ty~n}@@)E9~NqufsuUinU^=nh# zQv*H!{+>T--sAs7owlwX4fF((&TCww{{N8e|8sU&6FKkZ{tICLpbVtqc*U3i#7XX$u|1|#1BAef?e`e>kfYNSg4f0D338zp&HSzDL>%&414| z|IH4bBd%C@9=QOe?Bhk`5|q#@MziZD`nIp|GQ?f4`t;+WYNUJ)H2csx{)anvD+7Nl_$$8iFua0Z

#{;M+;2h{&sb>pl$a#7tNt50g5PTC*RNB;Q3=lK0REJFJG*;LX~|NYhfSM)!K z`shkj(PM}o&`<5@9Tr6Cp#pK#A&C~G(Zr5zzGp13gZ;ZjpXhffGL7~G`}YR>*Ex0F zY!$LWIB}Z&i$>JnV%r)wZ%$~(a5PH)BjEQo+cW>~TL0qxd%vI0|3`U6&-{N%{-xdMUhDf98+~2*M_=S1nIZq; z2hdvw%0C9thoId%O0SatZ23p7xZ>H?kdfEar_$+cMfw<&h_8^>Xbd(^cmjI-I(hQ> zynJFa8#&tJQ}~)avux&~nyF#yYx=!!_6k#+pO0ynfmxV?d02o&==>ue3EA;wwrZFf zmeQADIaXpdwmh2`){^VdRl{dZ7N7{*u@k$I{>{{|mn=A-&LQ&6mGx6UUs8rQsGn=p z&xPtI;(UPBsEKTR)Uo;N(LhfixtRY?+t9Rv|Bo-S1+DypZQ>)}ea||?(e@>|-a#Bj z1vAD(!vwVG2DT?Kh;26xuLN zTyaN!m_g1$DSZw(4<)zc>!N)5cEkeVMTnQn7pf4$PqRmcdih-9*is}%=Z9tFazyvp zvO&J4=ZBTTtC4nJ?J3umuN3(Y8=eaXUz!{a+{$O0KX1-(@)M!#rAI@#dBzGKL#yo5FKiz_F8t)szX=itt`}doEl=N@7aCUOg+}2dSv-7dh)bsqNwlDJ?9@<# zG`;fqsrtsL;VAt$s+*l(JCz@EsNoTXXUJ&Z<2mvIqJ5B;$SY_)rVT=> zSsRq#pY%?ndBkguUB^vyeOp@`^@V&%(SFOW*Yh9t|9{R8w;k^snjh|yk^k>O6#k-d z|D9eTb57W_)pIKPURLN$FI}kq&+QfZ(kt0G7096vK=nvAnD!y2eW=RQ-b?2$201na zxfp>_7=to>O?lLh?9h*F(QoMQV&rH018chV@6WTZQI7^BX0or@-i>G)&OS%WNcQ<; z_W5Y``9SvhF!p)3-;C5Zxy}Sk!W4YA{xF~3%AStaA5NpsKs%c{EiRRi7G{Yne$hHO zavnt8ST}Mp>==rJQ%tc^Bo6jOG^Ncl7`4xJ|}M zn~ebrjv3Q^mp%1{aYnF5MOrJ-blUjuyzyU$@gLfD8UN|G(tC{mw(0+k`D)h~|8Fq< zCmVzl#^y;hqF#T!OggKv7VDARp#K-Yk=|M>?Q++r7a>}^mljSPaxHSZxZ;f7VJEp8 zrN%OQ$^9r1-{V)jWBf0C7(IT)RmOfd9{L$OzQhI)-}w#wM06Z=NTLO49K~^*K(zKf z+JoBlHhXhj2&crILE*c`x61bgdV);i;rO5Z`GWoX<@(2$}P%_jmuH7+4urgh}IQ-zW(`p{lbIjI^XFRGXFQ@ zH1tId2Hj=?xgz$A2i&pY_Q`k9%TVTy1*rePLJ#g$+Vy>hg? zjZ|*G<~{u3G0$4w#>ykA_+fh1zq9o#*!-1b6?-^F#)YfNO;>$q@W$hzfSe({d02o& zSc0ar?0tqG``&emW2cxWIKv)TV-qjc*>vdyKl49l?+ zNpT4_`)YdY7HP9_*V5OceVep5y9U{|>+!HrTrodD0a=7n`gU?BO85>c7R%R>$HQ*n zy@(%soL$HMcfT>RjNM;;-g%Sx-%&MMy+B+Yj`siV_nU(_j8bgs_s~`;Nby4nH#w)- zx&Q2c<9}C2(r8DGdRBYqQ~#Sbq~W6apZI70TU;7-sBczgeywb%(@EhLM0vIfXr{G59s`X|G!{AbUbT4 zf^ZH7U=Vtqmp(1>4-65`Mb|GM4#9g{@>~5_l?b~^i?r^X?%_G{Q=)U`sKio z{*iv_QR9oD_M!XD1WdvdG~J25ab*63Y~8`8-|YD#jds*rwg2~q{l8>Ao4BJ$z_OdP>!DWtoZ_B-dV*OH!TQ+87qJu8&yYswSa z-IR8htcq!e_0Mtrb2YiwZ{ny!5-mvMD2}5^pWcj?_q4lcJFgu^`lNRG82`I5KE~^a<&l!Wo=H=^p#PE1UmIJIozEx}WD+e%<0y{f1Ww@$%J}iiQGrTSA%-}r zQKQ|c)n?R@_1cdH`e*Bp8nrD=Z2abl#@>06FUj~L!}vp65$yqP*A~?4AJ4hY1zf@v zB!_E<#TR~KSZGzhqrDB+#ot7`x}6r6LR+oxh|N~c(DgPOZL$42fAlRM2A?N60h2HV(VS^M zISn%~3(?+*67h5BpY7kCM^Bdx4gdB2?XvOqpQ8ems9J1Zdb&CB+2*OqYwo+?q;L91 zAFu279ewF6LeKi2PdwjaxuMy7b_>~h(VRG$5>Aut3+=zpv;RKZ{`-FR-;)i)%&l8L zkfb+ui)+peOQg3H%di|tzfG7MUrBE@U*2*rH>{?wMZ06^^R98)waE42ilwuWEI{dI z^Qq)^loKr={_LzSkE&r&P@4OY_*NERB|75lK`@Md%9|v(5 zrKmt0b!hrX{_nWQZuuwMZn!tsO`{z(!{q;}{EM$A8=RLQlfsQ;{YAQTT9C$3eD?i> z!f$C)hD%#M(_b=o>sY&Rda`SbcP;X>@>jgaS_I{1(~eGxSP%sc=8B zb@I6CVcz0oj~|DBzJ9ZM;JgHRfxLty{WbGVSLm(ERLc$J{;G0+kMHWVdO*MC*mV?2 zBmKHDfWGjiaA&ZF;LlHo+w_j9xuNS1))0K>>2O#0J{}6@}l+4!xb%7daS!K^THuG=HQFcBtF;)NwK;+(x!*3({omXzPDQ8hhs%gAZ4) zjei@AeH+iSQTp3=D2-7VgW@g57i0-piV5^7$VY{AE3ayQy0t&jjidT3dwr(9X1+cf z#oC1Q&$IYrj0-T$?}{#Ghpp(@U#RV_>Tev7t$*pKO(thJXBOt5>klE!BNt#%6n@SB z5ojknK2}GBJBQ|mrDSCPFN?xo)_%#`mhW5(%Y|2BHImx(1lH196A#Cx>**U2*`(>s z`VTVND?UkF0gA94JFy#ku^(j_)^(#|jdk8^?<#$9OrIPtH$Fg(zP^?XR(DVTen(I_UR@ans(ihbM<+G;T_!o83te0kkXPrQDq46)$&zOJvrg1#_A_oI7 z1i9!LZ%$Oc^L!6ZxabXbnB~|UL~G;Yh#gVa-m^ET#M&M43$O@FP4u{FYkNtNY+4WoZNybr!Bw7&7 z>7~h|IF1uYs^ii4`xL!3PyL(df8*%q(BAEQL~8`va6w$hpM6(}?40^YxI$jTb=<^l zq`&uAxKBPn=6vr8ebM!#`J2V+4~nny5vfzrcb{?`8-UV%Y{#<4LJ=QArFNhqrR_PO z4MO!AZ4k=X*5#;Rb4T;!Rp#AebJ_o>Mv-gY#USYoK`usM6vm+GhV}nlet=!hzK_$Zn;#G3 zTw?+zVG8mw4KpwcO?&{&`}hbB@jsx=nt~KRLwY9v18Ug#wQT)5K8kv>K{!#%|A0nB z`|sySXC4+{5t8mRF)$}Ap||#zw?2Fn^ul-8d~4)QIOUu+vOrzy*?+GtMgPB3I^A*o z_A=*}czzYLJ=gi3qq)gw{ZL%E+B2ylW6oXfyp>puwOEgh80483kY%n>j*7w3K@~mr zqW6QyKk>PLL7A$*!@ltT6W;4*>r4Kd{;6B)J=#&^8r!iGyRjG1{QiD2vgf03`bFyl z4hkPeDJl?09qMoO3rVsCX&gmzbH9-2=ogODTjv@#%*+fY=%>&#ZV-;gdv!8YRz`gK(6v*W@sW0PnP;Ah|ADVS|flj9xV9S}Od zZmjX>zY5X*yW8TrrW%VpH7MMr7k$HjzJ0A%xKDq8p80=i{TKc};rJF~kYWB)Wr6yJ zzR1A<48jl;Hf4t<&$@YoI!?CsQKz-#J>O@ew`X`i+JRbaL>*c0yoOi2pT*wKeD>e$ zjF2mx5g3Ir7>5bydX`P`>zokH{dau*ao?A;M(FLFFv+oksNdY6-eZbzKGMI@|4z4G zm!2S#v-vXUMPHTQH?oY~+@Ch0t=s*P=??dI)^|?#JQ@yWKN=2P>=)k7do287{9|Fi zy+HeJJQ04L_eA*F+5TZ~?ZB`n`^oUu)q&y7#Ne=d{8M4qlU+fp&T+`2YDD}aP z?>*MXUc{{M){e)_Z}qW<-Tprmz57pLt8=3NV%@aPH&};f1vYdzbj-W(yW)&Lhr{RD z^8>rXFB9G2S3cTVn(>Lc|4As@bI_v1PT>sB;Q}t<3a+7SM^-4`lNBmZS&|j1QduG9*~ITb z#@~Mq*PVA0w{aKu@c^0shxT_}w*4R3_J3rDR@_kvM7IicEs?cDq ze*#H*BZ`M*h4lM>4!xz-7da?hm=#JUW`zOt%Ej&tgXlw0eZU?Y#88D?arOM^BgipG zYH!Ao6Od?r*!Hx}kNn@=Ve7B|9MWuxcD6v9I+7yGF3SH*eY^Rx$`!^O{Eo3b#vsNR z)y>BDJIw#6F9qt$Bxz1TKBi#?nw;D0+!nGmW=xYXwn-V|knOHpV+>P!mVW~EjyH(^ zY<)nZ@GNP}LGfwh9J1t2udskzgwkQX!V+>RD)}-hvfbM%_t?k1k@0oxtuH+imO0+} zf$v?C9bfV-swix)$ZB#e)}!lNzRyDzpePEzYwrkIHoKdDr8`ud?hcixy47u7tUJU} zjZx8W`YX4CmD|(*6m|;lM&S?S5lw3zX#XB){~mb%54`^e-v0yk|AWwe=Rv61)opy< z9qLezhWQVa(FfYU2im^}_Jntbz0xbH(m%2x_tTTBxqwy+ijcu6UUHCF`PdCU%D;*@BWh$NS3{(!xg(Kl>n5yT_^w`9#^te+uRDRe{Rk z;t@j})i~}qCvXa9a1Ix630KgxL;l@Q%O3fU$$!GVHM_@h`A1F119kKP|FgW_dtiNv z{JV#wduViC{lb3=*QAqFpRSWPBmIzb{Ps4zb)>X$mwq4Z{iQwDHPANxLC?Ch;$i>f z&ln!Qqf*y-;JnOl$^VT9q2k2{p%R7P)y|kNj=v&b^W}4meA4?muk+alA&2bvQTN0B ze~sfQadkQUU30&FHv_XU2lKE1 zi_m;e8N8+L%~l7=)brAncJ%F$G`+TuI;fu4tA}+N?(4idc-q<^;b?w7^2aTaPUj!( zUm=&_FYSlr!Yi>F>#-39=z7N3i7ftzEkS;^|8FO~$3EHT8{_n7&tir4U@v_?di-zl z)sxR%zELRdFiKH@IO>o@mvLm_J6Yxk|0%QxcNThw; zC+@K==&dEjCeFD;zk>Dy(m+alTg>?1oNIYMc545J^R89nBmX!1dbIv`q;J{zO~HHi z{~dY6pHT3(3tvZ%|L@P9{SEf@9eulgzm;qgPR)JTw@34`HH+Ej!`axA+2_)27|gat z@*?~Eto;M~eD_3pw{aKuQCe&NK(qY=^vEY4eg7wOp>Y_h{Vw|UKumn}ErPz{>fKik zIRHsMhe6~JM1F{#{eLf6<1ah{J^KQL+cub|SY!Twto%=OjXe2xe?9B+r@IEC{ePqU zW(>w*0w!S!@-YofAN@D-zl)DxkM)l`KHdN4UhCXzJ=xGL|723Qk&M>cIbvy1lsEfBv5)xz<~58qK0`Coj0bh@(aoSywX^riHouWDDnYiu&w_-~|l zp6A^UR|XJ$e{`91mZN9?UwABZ>OZ3lY3E0-%lPychbwB!s#{ba&$9_E=rG*N_QHLa2@c-BU z*R%iUtb4=f`+tnxd**+}dQPr!6en;BXK)S|a0yMGV>AB6_}X~8{i^Z5aenP$b#&?5)6e++p#6V0g*)HypWI~R|GOK7 zA5*5ZpZDooU+{f_uUo4?Prm0G7uhb0)PHrq<+M2mdSA4s+`rVsP*B4+2^RD#}F@PpQGkH`}zj^`W{>Q4*Pl!`zv4uD1 zgel^p>lS_JJH>6?`;+}IejXNJ z5lTn9{&4m`z0$o@q}c!TWvK4>^!lz{#lNv!+)Av*TCB%L6rc#(u@k$o7d`XgXT9%> z_PYp|J%}%#3hhKBceSib<#n#d9-$55`7BVhpB(~Y(Gc7xQ=hw>q2%; zRZhuSn1gv(fJNx!ms~f?zgeZ z|2IDP7yIj6qsg&m)(7CT@Ajt;@B!@c8^?FL-fry0enflo5@&s{hu%8;;o5@3 z^is5s)+UNek!=&*m$+hmew?g}&Y>sC7L@FAe8{yF zq6)EX+W%V5703PN1Ww@$&fx+s;R>3JyPNr4TgX=am^LycoF?1V@tQ;0|Csi_MEl>Y z{r9f^`TN@;T$4tDb}w4WZ{jv0KVo53X1Gh=`l!19x^IEgOQW(+W`0}wo2hLl z`yvMekQA4|AbRU)+Fh35aF{(p|}JVgEf0&)@ld;NczwxC=aQ!!CHF;ly+ zP`iLQz1sM_=l>D=XfN5HOQf?D%MpEBvS{c~<4@23mgj%R^WWk5@A3RMd;VxodH#JO z8^GA#dG#Hie*d3LUVZre|CQ2OjkT!PUacn+7x~YO2R70RP=pqFiS{zL>AzExJvYy_ z9i=<$zj1sgO12pLkkOp{UUEO;tCS;Dtx&FBjOwRx1`axQ7^UbaG{z%4|HjxZ3VY^t zQJ9U9{%}}G(p%8Qhg|q)YYYB3G^C^O*B%K+$>TVIQ~0bLouNm2qKm$2J;HB>h6}=% za0STtI>Lb`Ylg~9O(?eAPhlr zn{$v$kJ4=Ecsh)rk3#!<(iWEz*XBB7#1$WTI*cPHp!DR^VG=n7CG?6C`HDRq@`a}% zPOsh}pXKuDSeg0za#Y+i|Botq4Dmeg&Aa}~^}mJR$qX~3F$-JySBl>D{kzk~f9KV) z4&!V5i}k;=)&Ke4=VJAr?AiZ)%lHpH>wnF`&yn6dEWjcpU$I8uMRUIN)?L!xW-L!% zhMx7m!l})!MJ|uxy^~^V0ansWMq4XEu0^S}0_(|*sC17NN0h<$ltr?7gEo#VaI6U1 zu@k$o7yEG#hf#_O#8HP9q$7+OIzBK*ZO*^*!>2>E?>_QvZbmdOa@0AW?f*MYKY>#? zgRUCgqAxuM1270lanUz` zhR}QFA4mGmFnt8tjakz8Z2qxqA{%70aSHxh>whNMq%#RqkdJAYfm!JKZEny0 zhmLP6Bf_25p3fr}U=fxeTK~J0T!!UXiO<&muBKOxl{dbXwe|Zu-g6$i9C%^H$xQo`VOJ_IsVn3o|i95Z+L3-;*X`{$EY%3pOJHJGF zxNDGY?)|WHA|FC2S%K0+?wzbd2|vPT-xWv-w;=u!yMBdyu98o($NtZA-ZuG1m9%>P z|A=0VwBH=Xah$*@oWVI=3+E4oSrgNfi_>|If=tW-m*0=P} z$0GY!|E%uBhUuTlYT=HzzYsc~;v;yH{w&|YQ@+XRcY`nloxk<%53bUov| z4@QPD^ypjMvAGX@ebp~1j~VJ?A7yr->&-W&__$vf=e!A+g!Cu%sJ z7y10qbb~*hUGdrX#-jg1O`K&H%3mW{!(Q(3#|zgBN8k6FA)V-dIA)QB-}Fs=avm1o z&_|i!oq_x`)*Bp_-!4AXMdFuWDVAY5wmds3MBgGP2QJr&Sqnm0_+vH zZ^hH$=f|E7zj*Jj!~S(c!htnI!rRCGI{d>u<7ee)*DHg<8@2yAyt(2FVfV=|ggvMK zD!lc|0N628V)*k z7(d-IFznoC?#o`X*Y^wyZ_a%(?4JH)D0Qp?KYP(PFxNa9_V|;&x9$xz_hmjY)-S{z zt3whkNaO8W{>O1ezi^aZoRJj{Z&7CVWrbhv$_l?4{9GtCk5u00xllIxId#)MtPK0E zGQJ$DhD{CCqrYs9=F8faXU%^-s~&yH{MWPmyY|$b{!%zD%@a6)+aK$;-@$mmgd~=F^8+UOZP3G~N=UO9x zRmP>kkJx`uhk7&|;~zQ6KSDOHvnJx~(tYL>9=rlcn3^-te4K~Ek)0OV(uIksSey=vjpbYY$3o_`dh}zO%k>^<59yuugvSoj1)<|EY&sT$>%M{u^ly z-Y{d-!dr;u#IUfK%#zE=a%E1=`o#Qai^59oPW5w~hui(oe`r4!<`LXg_xWuN)}nIG zu&|!2B9jY-g^l!zpR2FmD+;OW%9B&-yerBZ&sI3!$MaTurkZKW8_)Hx=a2YZ9Uc29 zHw$AcN>PTNu){0J-PnsZ&(fauY*pf~jo-CN{jY76CwsUXJY%D0ZaN|UXlWFG|DTgK ztu^cgVf2*`4+qKKUz>M99>sAaQT{1&-^dJ_kVF43eQ#tQ&G+pwNEXH)-YsGm6oohj znibzO{5!XKwoUeCqn}654DUvKyO%sEiz=rL4omr;#zkC4a-TJ&69$K?^xAI!?<|;W z^c$#uX#DlQ@z(>!U$I>|vjb)9?B+f8GIvd!I~UIox*Grv`^HLsW;VQs8gR6+9$Cd*N4z>{loc(WIP9;<)*#^G!jnz85i1=Mn3$F67xnaZbg4za2zUUO!A8MHM|YMZWUQ9p^6p zxpss65ck!u8!aCq|F_C@zdb$AGpTjU+w!vmCmO8+`J zfI+K#pXmSE$HP!^IGSJ8M)~tuVFY~)ijm`&#dvz>`|_WBKaoBOJ=}TjZnEo~e#N-| z&yURQvHmaa|MP4<(>Z(fyM8LBBmds#!%VUS{Vy3SFfKEjUWk9utT30}yg?g=%#QYc zhgb_jUyP;bNccX`g>K{#=OKB!tpBHHk=ycGXmw3e zx>n9830s9V3dz??LMd5>szsh_p65aZ_ikk9^{7J{h4HUNz7I6-doApBO}72Dkel;b z*iWzc0=uC|dyu&XX8FYKa2Wxp3QTnH+a@WNjT5%A}-@9uHgo5;SQelR}T*19(P_lw;Ye?1Nfcs zFL|zRn*1jZX2^Q(LI0a?9z!u4BQOeMP>i;l+S};ZrG1MoZQpL>={;yzqW^7$yh=9T zH1_^bdmK4>YcIbQ@`-RNKBJ!giT(KLh5fs_KNcp@JB8J;&m3d=RK&Bx^5{m_4*ULx zKMa+l%{B1bOeBR}Le54NJ#|69;uZV?}uz2>-!FplCllBian*C6$Z`Ui1LIqof682{&9 ziMVj`?LLjZ?knDvwz9#743ryAKJg{_0z&RgR?k~ zT)S%;&6%fnYD;wBBK?8ywP!S&$a59!f6p27edTrK=Ih#|NcxYfuDyn;`|8z8;)RO8 zP!4>}xSG1Qeua2$6Hj`zFbew*zWI^%#TN1BuE!1G+`=8)!vj3R00w*Rm3 zbDR86c5~;+!uZo@_CK0-h(B78MGmcqX9qqfoS_(w5y(As&0Tx`&^xb)KgQ6D(Zkly zqk9SaALIGOGn6KhlMwe#o=i?fJVPnYov6ih?wQCW#0#FrQqA#aN2v_?`T}Q=U8`Z|;ys(Ubj;^Z)jB`X8#~pS1FOiS@I>%r+_?u!>%8 z9AXXGA>7Ub!rvqODZ)n&V%}ft_nlvVGpr{!BKg=j1UA!aFDV1g87qkE-;EE~Z&S7* zt}(7VVLhX3N>PRi^f&06_^~-X^u6eNOWy`tq_@pF9Q(2D=XVf?aTLdqM7ebY`7aL( zh5oT=;xo$_E&mLfP&ohZuk-`Z^EipqID@k|kBhjBtGI@K_I`QYFzbJOhunSI2e-&O zxQ7RbbG{#u0~oYgS&g{2aNJ{VD7`TLIL?{?`Uv#g*EU4=1mEhb>i<{GFP~h_sifDG!f z&^?Q>6w9#^tFQ)Z(I%bR_pryv&Lix9dUuRdmi@ma+JEAYCN!TC{}a|0Acs~|J{%s_ z3uhxXV=Iy`ntw2QcqpaEH5nkDeT>x~>-W9bk8)#3c~pGC zxiJ4?t^vQJIF2Mz$e?-``*)xGe?b0k*Dk-HU4DxFi~9TQR@davfqdjG`)ZP>aRzPj zQM-K9fx`J4WcT~>=Uw>|4SU$XbJYI})b(V`3iUsk<8CG6{FSr9Igg9DjNBaUXYGQk z^v)f^p685P`VI7O=MVXheNWmNxA;{qv*wVzhh)NdDftLh%7)Z1@jCUfFu*-%jW}OX z25{Gr=?foozK;I=+r~lUm;N^U7r3UEy;lA`V;VoTuQ7N2mqlR&+5d?UMiqY7??U!{ z)w&LK{yTqd9x``xx3&zKO|$Q-wE5|iFc}@Ms~^xMZ+0*6Zpfa8`v3hl9Wzma*_ey@ z=>NIr_*zlu{i*MeyYD6YT#`$%94k>dMIQ)R{%P}o$+bwnV*T&9L18`p={oO?^sOkY z`+46u&PuT}g<_g3)1Dm-^QSrx5<_EN$)Bk-* z9o4J;N7p`e(-C#l0rTH>ng2FV{fFNTr^quKAs(OXg2Utk#f zonI%q_;usy{=Q|N<)C+4LGH$0?2mppjOx4QFR%46Zwl zBvQz&Q2&n82B3G&5jQkNfAn;V8@dtu`aAeluH#3ZL~^wD40#4stDp28)F$jx$bBA} zhtd^wNTb@lHI2sR=9z!so2Xmi-H;jINIiMcJs+JPaGComt|9-WL}=S#{Ckh_@14fK z53&DuDcif1@mco2Hfh5w_J6PWKWIh^vNw%?voBlGIavO>#MZYC@SV@fv%};seoyE3 z#4~el`L8=D_xn9EcU}HeHawsg_RW0VdK&rwGW7a+^4B8y3xoc5=_4;ZM-D~um~jPi z1mgUlni_jB9JBV(^QJs^`ieD>sK2YN_E zz1@W#dOYi_@qjdtCd~_^0X@4y8laWieqUj#Fs5TBN-!I9F&_)@QTu5z_j0VnJO5Lf zy){<5peW@3=_Tj)JQr5+UxT$+kB!)jttdqqDzF=IAH==nejLQ%=w~eAD0v*o=oV&* zjOzn4WZeI+DY}*Gx#<4tb7A|7AJ)gOh~r~v!2jv|l05w+%D*<&_o4hbByDy{qeapN zJ*b>g9G1G~G|u2GlFRJ>zo%*-4!5}PAkW>yziXs1eEtpB%@4S3egN5QK0phZy=i{HX!8Tm z_KNuh<_&br5)Rq*qWi|010akZVciqv1KjZKJt7A%=&!YdFcj71CDfoW51{+^&KEfH za;RKT9EQ7Y1V&*DlKiSrOs^I9)a%7zJbfbS`DeJ({OhFAN9*57exHn~C~uL*==nt= z%;c^xSK}RPy2`uk1W&)z6#%&HzphyT^tU3w(l#$Qhv{%3B8{h5q@x2 z{r;&@;UC7m9FCela^%pTgde>6C(ea4j@JE0;n0jf3g2t|qww8be-IAV{6RQ??`)bJ zzCGhlt$+K|u&?n?!{1N)U&FW36GPuOUk>Mu3-tb#J^AIaod4KMetzoO!Bi+%_9 z@Boi6fI)Bi{xKBWXDJ_sXdk|Bjy!keL*tm*Wyhv`Ed1~T`ETBa(3p5DG!1zxG>_XB zS`yz3*^%E2(}k172>0Yaqn-0S`}tkxD{_xPF~(yejww&A4GopsLU=EpVK>s6>HE)z ze@uKLRFAXY%h&b8d_7cO*Z#Px{ejw7wLhk5f6UVU zAnUm&34b!CVmfA`1hX+0ZI87-_Na$f;_Jqfl_R_9d(pN{{0EEwF!3k5#)>}~=LqzW4M()UHmNsU z*G#r7(zaP4{!7Gv0l&LN_UM!!aTrID+h&a(j?+6Et?gT8y(K+`oG$9Lau3{E?s_I-JjOe+)W z@O1x=GwwNy^SFr1xQc7Ifx`TMHh%1TEbRYrB zdEZ`}e1(KEihGwjJ7UWn}+<*Vmw3(D&=2(EEwF??+MS zH!jfssUrJUJs*nQKOVj1i7=7u`%zsNh~m zF2+(++h?{0sden@)ofPwe45OlehK?`C)@T_X)rPr=D#j?Pm*8N0rT(awY}_LbMRNu z*P#BUd-yN~U^$NYbD{Wo)OMShBY2|S(uU(Jp# z?0+!N^UV?F3SqzQKTyAk4ep*YRA4vuVm}VzFxrkuA9M_pzJsOjSm~RPz9_8!9wvR? zm%f*z?*-}GD1DKuk-qM$JTW{R70z)ak&2REm1{HfTKA{o{q!8-GsUwR(+SV&w+?=J zoWyAq+9T6^Ps;r>+-Gqf<-VCbD*jA6(QaD-HEP;zK?U>1$wa2Z0}B1qHR=a-=d<(akJ&%xruqdP@2g)jwheq8p^&6hfL#s}!|FZ1!ZuK8o$DO`nKEL)# z{R#a5!s$STKEimWSe||or*Q^t>x}OY*2YTcA5b@Ush4BjoPWjmJ{orDUqF+3wt2Pw z1@&+C4dEir4``moHW$u$T*PG*?EeYcT=b{*|26sz#5P4<8g(~5vHx%JtK49&19=b8 z{(nF|Le(4pYX1*#5Bi$4dCdOr{m}luul+wk`+u7EtX*D9*1hVz?)%sEKhL>mD28JM zMxm@*pE3{$Hp4ze)RFJ1kH3aR0mOf4bSeSvGKuJvoI}jPWSnt`GYq z=WEa_{^wi9kK#CzNFjqJ1Mowx=|-A1KwvDGi-frTVl=l~1M0r&8rpX=qqo8XDJ? zhNdm0p?O1TXjxXOZ@o0+T-TZ{4b@9ZL(Lnd=5UmTTGSzp4C*o7{~Q#zi4pp9n1sog zifUojAcb1g?R(qz|F-`3xAnij9j3cxCQ2|Hb1@$au^4ST-`4(rTl@cQ{qJw1|)>?7Gd{x6=v6w^27Z;pD^ihqsQW`0|dd0o1o zkWQ|xrq?WT9ldrNf9ai;-kFEq?_K4-JoV1!)Cuk_Lj`tYFZSbi_Mc%V{G0hd2iX5d z*#Bhn0`~tB-{uPS6B+lPX}+>892CxB9K~_uZf^@&BY^bai3KA>k%*#B~$_U|G6 z1FvgS^Q%0xE#$}!Bx|;XJb4mT^wdc0im}eW(V+GE4@hpQ`el-JXj~bH}yGv7U|R?@n~xBuvIsOvg-=plz7(-xu|};o17? z3+}t3ekU801C40fBCR(_|7Fr2xf#-b8oyWV*(02}n2&{cI)7*}J+|39u$;aUzrTL} zqvv18J;7JGrvI03hyE_#FTMAZ%7vfepG(;jZ-=$f?~A2jJ=yo^(r`l>E~VG96;mk8 zYa{E`i}9?O3T5UypHWWlwBL^Fs>sdcRwU{1{GL*J87iynu_lo{+q-JOz;Mw`RD?9`H^ssx5;y98>A%iC5&~{V(kB*1xe{|8ikzb+y zSHC}N-z3<&W3_L@Ez35JXC1a)GXJkp`S=n0qeGa<8uR~-34>nySla|A>E-YFCf?UJ zIiZb0)?IMTPrO6%IPLc{$kk|1kmnKiAuOy5QT`Y9|9akcL67Uk;@Z1zJd6J_@n@UY zX*0xm37M??Ut|2o7|=ywUB*>h!wuZR9sGy$k1qNCj`{x3j20C37~kjnLnV9so^T%E z5eATCn^zeF8uWjP|7v+d{(X)<6!r2;hF^MU*Wpl*o*x* zh{MQ#V_0ZgC;tzYN0kH5>_6^2cMsWc$@nLlT-Ur>`fro|WDc!M?Eh~rR_|Z3`CDQi z;K!fL{~N-7Q~n+GAIFhG22Hr5KFE>%WdB#?&#y>7`}G^SFr1xQc72Ucx?J#s)pcKJI37rrDkc*q{&D$M@L} z``EW=LNlJ(w~g#uwDD^{!cKKhw8OiW8GFam{rQ@Q3=cQ_&n?`+J>;gbkq7Hrpm%O^ zFCNhc(6dC}1G-l~nQK4j>*}8c&M72^BDrCB7*38r)$5*niswf8d)kV~yy&@6H%>fI zT_t|vlp<@}`Cs9GLi`Z-8rXjONk49xv>GMM!utCb>uKr5XnWuLN8Dqj^W2B_oixrL z^8OEa|2w6RcmHhvH}5^mU3mYlYkqZj7%!ZOn1sp5y)ORZIF;V14CvsVPM?V$?z}wQ zO?EY!SIn>SP3y)t(g-F@vy@?Y2Av!nHA)g95c zCNrqdYPY**C01b#)?z(2Vl&#bdH(#L%+TF{1Kfc3e_5DsAAlYsHf0OwT?x_ebDIcZ5tNwq2|DUCOK;{wq z@~iiWA5y4Aow%ftQIFSStuWSOBR1psICbW#-qDF5&q%C;P8X@hlC?Id~8_we)sdnuI%C1J}ewOwjq3X&W7-vIh(?P#tmWr zu}$`H+!X%)`j)WogDqkAijCp#=4>>MCVjWR6!t1>zqNXJc(;9H*h4Ryu{Hb+4*LBt zDmH8m?;P43%7<(XJJ)RvhxNaFU%hhFZ^w~DuXU3*W_ewhavjrv0Z-vu9MO-Z?7NCO)aG9u?9XMufWTh)~}?Ib;%_4ac_0YwoQ) zRveDk7?*7v6W+W3a`?yVe-e`P@}IELzxYyUa!n2$$m1kVqtUvshD(z}voXe|o0CHJ z{>0Fd_;e`$`q*&B?}hcfY`EBm72|)F-+8>#X6*9wW5Z?oRb0aj+`=8)!vj3R00wQ8 zU(vVSdStTqhvwXo{pJpiAV*;giZLD&F$t3q&-a^3_UZqh9^LwAXGV9O@gMDg*f$=Z zum2@u$NXm_-}bUIfaU+2`d_%?8vlx4z7!VHbJ9MmJ+qh|*Bo|WDSbJ5+I^dsp7cR> z-S_>yGB#ZF{B!-b6049rG&WT28ynWpYpn-NVJ&?<>bDsGHm;g}u{hLCv(}tn^SYPA zW^yZXt6vVKWEryjJB+V(qJn!j@)L~P8`JNaVjLgU$BfS-g<8}hjST9s*FF1j5QlLT z$B{$|ZE5oZvi`@|eka+*-Mz&8fJOdif$>dYG@=R3XsI&4pvL?HWB;uOjQ_u4-+XP; zIH%Fa5;8IDalSMDP4wE;#UX{l`oG_(JJ*T-8{)r7{E>4_^P%Fxj3DGoJpjs@a>)ol~@AMnVI$h`0Uop?Fi z@r?KI0FN+$LI0P$f}t3W5r{T^<)X1+6uoN8*pNIhD2$;OV?3U%&%dkweXLBlum1Jz zPx4#uuhnm4-&={oSsXhy$uHZ??crD0f8f2MFr7XVC76x5=zrJP<2>zuq*kckm1A{V zgpW+O@Uz+;KO7w9yLKTKBi~gNmXgb{5^b+afA6uwd+j7+pI-L`>3>D~e<1xo(Eh)m z{eQ(6B-z3p>*m~8=}-2S8~^{>%c0NQf!;6M-(Q$(uommF5u33UrMMZ}{l9je!WVtF z+|37+Q{-;!#eU@4{YRs9SoEj+FBi^d)R)n-&$Ef|)BTqxx~6i`;Bb^Yh-4i1jS^Lx z^xN+-ervop#hr<8*Q<}~$Ta(Z`z~_>QFciEul{Uu&C~IN9K8eiC>N-IwF!QI{a^L# z>fZ%V>iB=N|G?_!!b$g@#u=Q&d0fP0Tt(Y5_CGq_6h6APsNa!a{iJ=-a7zFG2m1fX z=1bZq?`xkR=h{|OUa{wzaBkoh?jU)c{eRzHYxJk<{~pjE;oq$P)5pFe+H1;>0lyFW zhBo8(q;cG%f?n|>{dd|QL+QgY0;4bn#VD-*+oS*gi2mIJ+U3gUG&1sR{R#R1hvr$k zZXzaOGNxiWW}I>ZO?O4xj?5#@^=?pxG@@yq{Euh-PxRL4H`4hr!kLY^ zn2+2H-_I2L_|QA&s6Vinz7#!2w2{$`F6?~E8YXqka=)!aa-05qat*3B$yW*a3TwI7 zBXdcP)aXD1$JXE_TwN9<0y_Ji4^+2BJIdn z|2C1aFEB@TAdizcjWall>Nn+oq_%iY)X~#;n*Z~4&xiA_yNJtp)<0RJf0Daxk@R0C z{oj!P(KTBB$20r?-^>3SQtaEsp^ z+(VrE^MHI5^>5fClN_{3d5SpycqsXF{_$}72#i8}hn*|DGqPKIAWz2m$D7?>wat3z zO~b+s`WV+1V>~L?Sr2}|dhjFGgOl~In4iyo5>ni?d(1hwZl6BxshEzL$nneGA0A5R zoyXYkm)NKDx#+24zq7Nu8`MrxR$J&#r`HU-1TI^KK!u#GvECSu^8pX zVwRF^o~a!jGxRT@i{6d=W8?hytv^`7{uWM?@Zwp6EyB-^GyYC*9c}!*>V>deI2E6< zuih8!pGi;htIEC*R?%yFt)V~kLRdpzi~8%D2^kE zw)?(6`MU$1=t4Z(CGPRqgJ8vdHrxj#0LvUpaF_IJa;I<;Fknkq__)`Ogjx1LUC1 z@)-KQpnXsFe#Y7WvQnNJL5{*06eFojjb-0>dR+S)*FaCCPeT0#>4&sy>d@!A>kaw< zKEeLS_QxOAC))j!T|X5wQG(g%cQ(UZa(>jUKZtV<7Sapr|DPWm7SnV5Vjt5|dgpCp z!&pvVi5}^aSKfB(2k0UT^$+{9aPDfidYzqGIA4dXAN}z#-94+Y25Ye%8?hN%(ZSyp!YM-qb|c9zo@=+4 z9?u*~VL$yK>JPC0YuNw#0qV$)mJf&heiX-1{w3eoUm1_1r%>?=_3#&cSM(<2(1ASq zii*NXvbSY$I8C0xS)9j3T*g&g!wvKsXSzk+LGx{U^NJXw`C z-_IPqiS$Xxf5sYpeGIjT-)wrUZ|m*r2Rvydr;- z&9~+6`||f=`I~IzZd1k-<|nzfQ~6Rjf0)c8oDn{z;ant!*_ zOHn^s++XwzWZgLJmp8p16y|68eK+=EKjJ##@~`Q;{7xbqp20 zrzep@dAT#s$tL8`Z)_m{waeLlF&hR&U?)_{hI!L>+9(~ufJye&H80D3A-6BcdcJW&ieM&+uDx%oM|qc3gziL zKQiZ%o;+7#|L>A8=$o9yjT63_BaOLx?t@chy%-Rt>pmV|0?tw9R4*ZJ{} zzh#75kHYwWc?fHTwH_O>8Cy|`wr$csA^p*bE_B;3AU~zVJjW8{uk_y}{ax2gwk(tW zbELm_+e%jNl)nzhU#P9}O>0A@FZjmK$=@jR9~Ib*o=@LM>?QZ(AP%E#iTt%pzFsGP zt(L#G$>(IAyN7JZvVVoyw1fSNmJRYJa%e?d|94b4$B{%l2Pn6M{V~n_KHoy8HhISe z&*!%c;+o=kw#L)_Ynu4Q^?y0C14(%^Po6|oyiYi_v-II}pF!rXaszd)OQX6*{E=#R z9qQ<5WM+6**sh#>TK=AO|9Rw{`*WGRing1|Uvy}5b?#KAACb3qc-QF3YJbgRe=T8) ztWf@vzrX)u{~tQzO*n=14?i0mZqUoWtbQT;e_RsokiFzRvhUCAQJ@TY5dE|p9+A0i zgTeqgXp8TClW|(Ic{MwL9F9Ui#2cRLis!oEIWKum<^Bl2jY6gQ-51?IhF*;ENSfDO zr7oCAk7u!`hM8wfpN#sfx=DQ!&uXtDr}CSQnJB?W$2Vqk=f7r5mn@8b8_$_fUx>w6 zit2Xr|B%vtsMU6;8=`L4ZpchA|L>KLhjQ(+<*r+aRak?ySdWcpd*ApIIxew)&uP;< z)V4wXkoF%MR%m~W)BZs7EbR|rW|2#1f1r7%GY*8a6{RRcP8{PJ{R(>Lb!G1%{qOX> z=(%nF0=jRSzi{8Y27U)|7)Nm&Nu<#K_eCK?_Wn%$OZNRzf5|KAi)han@5<}1is$rp z_^nX>u|M+k3fnM`$SU42u{<_Ansil_b@2*|8FwafM2un{XBUQabL;H?MuF3A5^l^0H3u7pTqw>DCG+8yp_!~yi z$DkN-4npln&!^3iZv3$RC!fx{Dy&C$UB$21&0kSQeDe7)iF=K2dono{(=ihzn2ovk zkIKLKu33o1Sc>IXiB(vGwoUT?Ht9@uDl59=vA6~?|GxZxLjKSGUir5~-RylloqrY2 zStz$RLEOuKt?g zj5m4*dPcLYjVpJ%rVDv~m6z<{N1jG<->`6oJc}y#rG!)4?Tk0>i^#BP>)Ez-u1lku zUh@X~7qzIvv-9ofm)&y}*Kh;3a0mDB0BsBGKd{I=e{`a2gZTx>qX!KqesBLh?ksX> zMf1%U!Xx42w*Um987FqEt z^Xq?V?8mhuFbd^gH%3bqqxa{=`^mn)knUvVn=gb(_RxK?!{} z>gjQBhBybK4s-c6Pci0CE=0~Zx0qatthQlX6C2NfS*Ji zu*z?15ZBMICD$X)k1wpBSD$R;j&sQ4+@ClHs7_gt#y=c;J{;XLG#q(zX!yaoPlWF` zP7jASO%I38eJ*^jcWU_Vx~bvd>Z!*1UJKuOqa=KLLrK^_?Z1V68)k;T-!e07KcGB3 zqC7li{NI1{|GD<^H(m&(^uCvzdnYZ+=)FJp{r;8mQhquvy^mcP9)8%aFQ-Nyf#1u2 zR1|W@=)W=_|6huf>-O;A-@QSZq^#*-r{w8-`Q?;dv9F**SyXn#`oqM3vo?aw`Xgoj zi?4-uM!#nL;eQJ|SIi8%wVVEK!ha6?-FFas+oy$Zjr=d+-D&?h?AgKp*nbJ%zBDA{ ze>^%Ij@L8}v$y68;X7OYQ#kO(e+q{OzYxB!J$}@0$B~5YiSPsMq!hjPFZ3soaczGS znL`KiIElvB|Hv5DABE?zUZnLpwT-IRDK!L07xAVaT&#d(d!w zaA>@3{TR+dTIs@z^3BXc#G8{4y(~Q%tuxo{65S8X9WJ>h7VTCOtF^ z^`FBr0;4bn#Tbu?D4egL-IID%`)8i^&jRfq?LQOOvY49$4r!9Hs+#jiS-%g zNOX{$8?;HZ9lGzTf6#-5H2Z&#_J?*(^AYnO=~>t1v>jTtBPy$kerNvUd|@W97lo=* zMPVVmcAhxBsb7h{9Q9do=9fmDaifZ_m|yV;=hXSFd8fXAGM*tG&kkQhUyJql|7su9 zEm4lF(B61Mxq@o-dkw$Be7r^4A9eQ8b^m5;Mcn_slq^F9y5!@S$79}(XMX*De_+c# z?F~GgZ`439|v(5ao@nBWS=@A|JN^tfEP=@~rj|Eu_v^!xGHaMo|FPR4#H}yn9Fa zZ48Q$++h6MIRAKh?SbNu+EpAT(kG#Qnz3D^cNT{_Y+v_bdsf{0e=|BvcFj~w$4pfG zNWS$BOX#yP7rpj$m{0co#Q1UA`1c3?QSz*Gb5}f$~ z=BBX!dk2M!^v;p&e_WrL!qahu zy4S@M)tAJ7ob*C1>X1eT^%!u^pi=n?vHdfY9F7qfgSI`||MFl5Vmqs=+xyopmqww7 zY`D(;zs>%C$o{{}{=Xz|A}4>gX7&C5v3(|l)AyN|!+3H>Rq)^;Or)2Qh4Bw({Y>JX zjH#H8{;w2=nPdrOW9P4aotR6`N8eYC36QZITTJ%rGgwM4$5MH{{5RGs(pMqwe=~|) zgSA+XSchi$Z=`qDs9TP)k?C8}qYlZVyGmW7Zs~3LcqrxH_f7Q(xm~<>ux-mmdUozY z{{OOamcAGJaS;8_7lp%QrSOlE$B{%TO8-~IKSPgu5~e&)6FrCevHX#qp#R}j&&Ds# zJIs?O<2Bnp9!`^IkTu@W!Tyi){?2lrM?5>IM_pT(2dIysdY1n89sWc8SbIR2?ZU>> z`F|JPa~W504L4BO|62QJd%L}7q(OVP=g6vm|GoTwMg1$Snq1c`UM-000&@BkTK!+; zFnhWQ^A7GI?mP8#{$I?;wQoGJ|0_PB9MxYC_auyG7^KzTaWCXY?rHWN4UmH@LyC6g zbL3FOc|IK*e3x64|J);xpQ2qb%XhlScZz>A{;e!X@00&gk5TR!gJO)wL`=eDOvUf+ zf32;(N#4}9jc0`Q=mY$Z_P?GkjF~7wn3)`k#jQ{$}Wl<+loJu^#!KKOZ)dn^9q}zIUEC{&+(E|3Eo$O8Clx zy7$H3a}?$mNvEx@Ek#+p2fMKs`;q^%=R?~m`JavTZ2tFM>59Vq??cLe-$dgA`JZf7 zH?)vh?zl%^>tp%f`u>B$>QmPrCXeDcl1QPv+SohUgdFk>Y+W+84f5nkB$ZL8$usEt z%Mi|zasI=3vR~ivW%8X*#q$AQ2-|1LpW~#@1oId8H-9iJtRXMr8g3x>nEiiZShz*+ zG-lk97#{A>@1ch~&)rRS=?8eguTP)FBeM55_Od4jZC74mD28JMMqvz!(f^g_!+3Hc zDi;h3lgP=Kis?vhGyeR#a~|l0^Y?bJf9bPPzl;6LohIX6eX;&)o@U(i!}>30%)k6+ z`W#)GH3y*MoPGfm%F&<2a#_E?YW)Ok|L!gJ|1+LnbK4ldvHV)pv8B_|BJ&T>gg8&268&p1o`^#fG!i~Bz8Cif!u&>ofKL1a((E^Fj} z<-=j_qbThAa6tY~`%bIm|LgW=(8m+osBxWMp?__b?|+*7wSukfo+MJp;G_M!P26p- z+W+3Q9r9);x-OVsaz!6J*&|;yyrTa1dy_Dm$rjgVN3svb+W(&H5JveATu)Z+&`u_+ zcFF&Z`t0cypDNOy!dC9}e2+cfb@}tQ=Og1f!Z^;CH-5SE-N%VI?svvL&E#3~Jo3t- z!uX@Hh*&Pgwa9Tz@K6p7Y!j`7gzCti&p;!CF+SuWHm+DRp8kd$g`s-E&(Vbe(;AihZx{Yn-Eh zo!#23u5BS>y&LP{*0JXQt9#qkw;g2XEcLFw_wJGE>lf9-sC>~mv;JozHe)N23)KJf z%nP8`9%CCXv(GQR0`>bo9RIIjD;qcX==lF`zwgC<#PcKaI7mN?!uZ3x)>P4tBZ(BM zk9-)vD)AGSI&m$mZ@3_SABd-Gnvg>W@;HgpIDCUy9}ER}ZZu_d{d^Ow zd!pB#leXG4$D==Dn<>McKJlcjvzcEC88o5)y~l|h*?~OzzWF$DlI;D$ZxW};GdPR$ zxQOQK1Bs9Je_ZC5M+ zVGO0$j`M879ZnyC`d2&~zw|WEI%6O)ieK~1fy5ZH7`X!jiSgt_WchbA4kS9;2NILG zC!=sKj545W-$0@p)nmn9TvOs&EADkahKx{Ee~&A?*Cdr^0&Ilsi+j{HH}>Bfa7a>hCX^OGDp^ zQj|r131Vf??Hon-KY#`I;FoaZTzuae;{)7)+*W`hOt|ve)?8NwV*2+7{$? z?dBcEaO0eqv)s)Oop(%D{IzkPzcQ9i&#lnL!DV{qJmm&kmh8$Nh8dZhBl>pJkuUr+2E~JBC=}M_-H{^=cm73H=BBpRF%E zpq^3(*Qqblr_{Tszwg|8_bkUstil?s#d>T++a~t+Hvd6(exQAELA|9u%ac9a4eH)T zVK*INf0Kp%@6?;Ij%`Kd*pC~}`DJ3Wu(qNU$vNi#PqU_hUb{#gH}B)2g1#Gt`G*J8 zZLh22_#d6~F!2vn4--Gw_c(E6!NbJiC65!|U-3Bcy}@@Ahm-{etL`Pf`^MeGch=oc z9GG@LvA_3b;@dkPB>sNMFBAJ#+)Ok!jtvb}V?*<<(V^*eYikmp4lVabhrRx5KMta- zMjkV^73Z2AH||h5cuJLNegOS=cFNQi}MfHi7LT1y8<{|Va{^phb#8F`!M@8@b#5*rONR-dGmDsuG zR$}*-2Z_Jyy_<;p&!zmn_u=Ejx0EUG&bpV_^Txfz{&@q5Zyy;@r#~MmzGmO4#z%?o zJ{%mrv+Pmg01m(LT=@P8@6)|a$e{y2h;1aYIvw`_G{*3O@uw@s6v*^rV+{9=tBkd8 zpleR!49?;_F5)t-qHU9Lnr+6Ph8VX&mwUPs#-GqL%Q(#j<1}bOGg=lI$3YIQ3;4|` z3fF{l1GjJoxot%u%l{s|Gh3u@rzkw2KSIxeBK?m=Y$t6b4DgF**A3b!Zz4HEA0as$ zRs2(Ti$X0%aL2O@Gv|szJ?eUkLK?l+{FOg%{Qon)S^XDdT-VQ@Da`->?66SG-KYI9 zp6vaNIlc1g#CXj=85<%eV=Cg=gVV_@Ig>0wE^D48ITxMAKRf0r8_BL^%0@EJT{*{I zo#HUcHS@6$$!+%VpZ(~;@)_>$-PJ# zd#~cZpI*C1-vA2jkFRMv(=$k;4hLP+?D-FqM-k`$A19N@_UaeNig$M$!&k>3ukMfQ z@}ABAfAPch597pNeCu8n|0UwTLj3g^lvmrg+I=~6#QSj)r*Q^t>iu^1dk)fMxd}R!!s84 zp`75oh=1!5=OmI@?v8HnRXVp-dC&CxtKREK@7X)Yi(P!z@+N2(i9yFlHzOx%#6*UvHjVPE3u!kLK@l(!Xy+2mZz zN84llAFoS)bfOEtJO5h$2b-bk1N|ST^nYB@HXw7{tz_>{+5ew3_un3Vz0cd{Uzm%r z6w9#^tC0V6QCLfA0*C%T$4}%9?y}YTztrc2{;zfH`6K3P@ZXH=0d0?6`ejg2tsVAr z?_&kqMZDtKZE+4|rSkHDceBRzTTzPSGUM;c$1-}YGBSk<`fk+kP&OY@M(hJsl^*`A#LH+-#`k!o`rT!4-&_d6CAe_7MC)yJ7H#*RX-#@?YmN4(&9v+~= zyL?0rV9-12*9Fp7|4B;RYo|$H`9FQ#_!jD8{=YCdJm*H04rV{f52NU{Z^#?I z-!b%J)W0cj@O!%d&v$Xmlz{$n;Y zy}6hEBkdpd{6g-<$f>86lFRXQzD{>mzm;}<4;jxmSn0RkXkU|k&iSkOy76@N=Nj&{ zSdWd^jIAg|87k2KP2+#^K#jbR5~mo~x^v=&4C4If-LBh<{WyrjIEv#)qU}xZ2OUGa zA3R(CM=$LEll^f19~t)wY}w-dB1dn1L;wGe+5Yco`?u)_cZPPJ(Pdhb8>0Wt4F-8k{z zY5aSiabnb_jSFXu3tuq)ea<-XDdW__xQwf~h8wtrJGh6oH;lid1D)tXH}dE~L$~p7 z|IxI^SU1^n#Mm~O<8CFJM;C_&!g+)N4EkH|eTi#k6^G~Oov$1J-clTf(ubpmJHO2M z_afupLZ6KPaQ~k<|3BkD59N8!H&r;(F%u=oiC0$q zX45-&`L=dA2Y@~wJ=}TjxW=z*pMG6_i&44BISypi=;E-PT#1+#bEWAjdhH2g-0yqu zWISUat}TvhjMw;WE!JZrHlzHH?d?yNq6`(-jlI~9!#Ilk*PqxVh4n96taS-R=HIDj zzG07fzxO}?a!8WBb<&sY`;~Mhn~*~X@;HgpID`H#SuaSQ$3^tL$BriB{y$gAYq)`1 zxPy46&^_`29$^4Ee%U4V1>7b5#yx5G=6|SN&fW8h?|@(TH0|IS#{Xu>|48kT|98s& z>d!QpL9~$z`#;?`2NA>FUtv!7J4J)T2>K|rdB5$-_ztpjg?BRV!}?zy??GYxf0g<_ zE&m@=|I-WWbNx^2HuFM-QH=4Jh)I}?sp$V(>%X<1dcRa0rgQgw$ypL)31(w1D(5Ia z$%R;qrAYFtQm!qh*UICmdCpa%uR{GR;>IsMOR;D1-B$mSEsxc|#t?F7 zMe`-s{&)RD!rF>b9TYsRPk`i7m>h|$R2K6}Zo+MK7UNj+x4&>3c zQ~dXMMlzmN*hPxEba2B~2T{HT*aGu_& z&hEfP`enpD{qyK{P1gkDB(EDM;C~ItdCwKv;@joL%3kR=(s?@kVqD{yhZ&TA&RR5i zB);nu@;|Zvy?f(65_ib#CF@_w2gs@aDz=+X|6ld3ozj-X(|xsiM#@KcV+2NF42lu^yT_A#pZ4EhXG8in)5xfU>wUA;W97%ezFE}rE3bY&OmzJu zOh&$ISeQyq$4qoBkeB84JmPwzwww9`(Xqla^J{ruS%BOn{e5T{tv*H*y%{CK==&FY ze3Nniz`11X``UpxRz9D5Ar@mfR-#{;ts>W;^2*1;TC(C7aovIQ$LYyK=Kt+8Z;@Wh z9#3I2eJkRaYK&`~^H_&cer2e@ZtTT=97N&#FMA;LzU%wtj_3azC66PC6q>c+GGr5S z=!jCikUgZHpg&#zcanYv&-M>JqO3yoV0IC^Fg2E)G?HD!=1e2QMy|(F_mq9W#%C*U zXEUGXK8p%#8{cV&>23TQ9go?$_u2Ps;qE2uv_Z zJ6D^_|98GiJRASt#(vyn{GXj!GtGZY5r5Pz5%wJafwrJxOoKv$I_{7xPhGq%UZf zJwNDA_rF_A@A=G(u#`;glpk;VW*_=?@pS*YzAsyA@*}pZx@x)WR$>*_U@g{TBQ|3z z`acubyqnKJ@BMRWpOxp|v_>Q$&kqqc8Rz)aPw^l0xGzwdYb&rDd$At}aTx8_eS_#k z2fERP9^}!eu4_=fG*6J{k$puS%rDM8$e}R*M>&_eFMh+~xTrSLV0j90{ohglaU3aR z(1aX1kVj$vpIOrMwzR!2jUP*EGSA&ZHoU6+H;sMgnr5ZSJ<5N&PLBx z<9*X_p|JlCclR;RN+$W;L28crPyJ9wrWdFy$a?k0J-6)v24wttg!T zcTT!qF_(e644H1}S|y#ybT;;X*#G~&wz==7maOyLq%nfg3E(*jNh+RqVF+v> zfn-P^h7C++c7Aow&+hrzJplz2tbhVq(1I3}VHOlnU>0uR25w;Qry7IoWcTiS|M=>8 zPW3s}r_MR`S7 zsu9;BtsBp|e=p@pvD=?u(ug;vBp#G`H(NZ49CD zkY|yQ=Ec&El;_ie3*xwh%eaco)71O<`ZDQT=-Il$aFc!;Ti-4W_sNGS{nE=}+b@(W z+WY->hE2b5j|0iUNDE702)%QH{5Q@x9eo743)~+vW89zLdcQbbzaQJbLAm&{eV&~& z8e=gYC0{U?{%7W?()Vm=vt z1K%XKTzkcuORt1Yw_XY5=8Atm?{ndkdKx#(VUokEGbe*|h8(s|i)=UX|$4m-8-Z~}xXw?@& z*|IN~FZ=oM!-`4a2lGA`cGDa4pA8KIo(;|BB{eO0R^P~Tp(XWPSna>oVm;o$Mr^_1 z(=YSW$_qbRH^cbX%i+)o`9OFagE=hER}}QOK4ky!Pxn31nUC~w?4p7>jUH$g-zP3o3Ukpbq4)k)>wT{ zLt(f;?|s#}1z$3DO}~u3KfDsIlGjoC6Lzrq+|ANf^2z?;rm)+%i}avE{%7(Ez4MN} z?eUP_ZEe?pZz<4t$Tz0dq>^c^P&BJQCKxQ zb<(3}ys1EWFV3{^=p&z`cb*WpdbWvP!oMJA|EsezGt|H8*0^_gxp>dT@6I`u`o_mo!=r2ph_`Pdry~9XFAl<{Z!K zHodbzdT^J1AKlBOd4u#9OFte8D_@{3_I+(J#I=(LlS2^u2BPm|ErvNBfn=fdkMW*o zNGGb)&DFk#nhDn5Lfs7CKa%u%jB?FrjKxGu##Ch6j5m{QH`v=-<*Uu|72=+F8TM|r z(Yr#!LTzv~Ij>oJyyXFVoJ^zjto)^3-i$c^H>LZII|Iv|iXd*og8;);l39 zw5coQ=TiC(>_UyaT8lqlAEEb`)+m3*`hC{wiSzU}t&twY@^p{uN`IkW;H&vzKm8#3 z+;d+r7nfd%8YIz#G&+#QQS|*huK$>C?Q`otIPQJMI01^4#QrV((Ei=b{@uy`-OBz=4GDW)a~JpV5FhRT>-Pii z{{Z_x!Tzma|FieI*#4Qt?Eg9J|C#LnlkDHK>|ge5ZvQ)RrZVh*vek9vWX!_@#Wfg1 zP?^vE9W*2iqt^~)|I$n2{1-l0^&8yV8uysT{uMsLIioNd&8bg?v1H0Uq}{`K_dDM4 zM0D`=??jie{U`V59;;TcK~W>lTEsa82_#XE$*!G>>39i6cn!1hI@&y^_MOsCc6t_F z^o;anrR!1upY-37el#Cr{~wWlq|u7o|5aYH^-qMwNw5tlmAhx{;E?R#5yAS zh}J)_SFdySVm}U|J3Jc>la;7JTe19qL>@yYa`o&9c})K6M#EP1-z@omuDnXNES5LP zwBuGXuK$x1PZQGUh;p$y5?OjIQ##a}N9o6r+t+iS{JYzKksk<)>;If0&!AG@!8!5* z;uu)2AMw~z;gaLaNGc2J9oLa@ox$tEdVkHf>TfJw|KFw)?C}SW`unrbm|yPq?C1N3 z+vHu`M_hyRA=z)YXZc(0AbS=Jqz^`~IWR-Wp5J~d3?uuV<)c83!f1@ecud4(OvQA( zgd)6#*?1kX|8HymVSMy_BaY)50lD>K)RVD%nJ=t)ihh2w! zBykOhmGsrft^XyBnIrN)S#^#76>9GBzd{{7fuy>j9#^IFP1hL%3UU9mwe3}*){TlcBX_%w_L(^h)oqDQeuDb1(`VXxrU&GHpJf+xyU8r2a z|6s|0u!kP&?i%c+??-N5BJn0RtHa2H!VaU^n0zIfB5TMb(pNqa*1P{EdS}KQghPA- z`0jP-U+5;YsLE6SkMkd6)RU+~Vv_n_SX}2g?b?s_Z*(}$;%JmFvcEl}cI5U)JR$v_ zMFv@VHyXs%I7RxWN&g_}A1VDv(_0beKbBaBy43v7ZGWSD7jI?a(fr3#^x8Y_?V7Ow z`Z?5JbZ}B^bN(aqPwoSkB8{DCv5s&v~$V97=pGN+Q0I6hjOHIv-U4NL(gXTnJ60? zp!v*m$rlT4#kSrgZu9wwd<7=_WO+~EEDF2>Sp56Ht9PoIb<{ztz1IDWNd zC_h!_H9&#`C z;~;YVJNkHHem?9t`?cpo$=9syukW^xPjHVuo}NGP4|v&nKCZ8EP785a&+DvM)=o9pw2HDnIGRQSV!bdk4ln=HnV@Rl5s9^*(F-A1Vm7s5@QY z+bRgjvjw64L_wGy!7Yy5wKvF*ji|H;-lqeI(C{sU+E4_xFwKz7~YKk$J6z%~8@XsEFMe53TCd8hUJ z%go=uWB$H#TFENo-PNd>Wc=P(dfhnV_w*!LFV4&E@hYz4CT`=nZ}~2HAI(=@2oK4A zKh%E4V5D#O4-BDqPLsAV{0!(L(7i_5jH_q-XP5ujvcg!~p%=oYg}%Re=HFMp5Jov~ zG%Dyd%iNc1a`U);@7YUtT-Ts(iTm^%s;0Pq)Xa1qa`W;%m!xazF;+a|F%gq771Qw& ziqJN}_%k|Y$mi31o0H`0)7sLAbG@Fd|L=V_qeYpYLV6_s6Eu6ruZd?iUdJ0q3rk@> zy))l;k2mRy&`r-GgD$)!tlaxuN-jqweI>aX74$gvT#L1i*CR=<$K!FooP9mi_m6k{ z-uG)`V&oQ-{6L@B7yRGn^-27TeotYg&!}(z*_;>p9yBj8$BW#LgE)+|c70r9u#(=X zy%EO(Yv@UI_joq)TjtiIzT$HEzA0XV4rEc;F8?&jGxW#h&vE((h-(7GwE+`T$Mj8CUTneZ%!BI_~-H^Wi4h``s79Z8Ep#>i6{de_4OR7y5_$!XBdU z=hpcD!|2cx@VE8{_=0@`8b*hHKk}VoFos|lMxdmme~9}Fj-sdZFQ$j*h0*k}7>_6Z zV|S$efoDK=-_Yl(uBpx_S5oS~6Y3OoR{}|T{SiLKuAPjjn2wiFgx4?|Pxjx<=YOZq zz6%*-(T#@f>OVBC($~IB|9i3icQTE*|8A+Ymd`L(KwNj}m5cSq)8`}Z0aP=`ec?^V zixBtzsjo0jKqgY`tyPc8{kR6_TYg`PxCZrdawSS%)o%Gl2&?I9u^#VWBevkl{y**h z2X)obkECZ%k5cFCz%J~;UhKy~97daG-j3Wnd$Q~NqqbF6`>LC5_GEvg?dpHD%#;6+ zMl0g@L#23XkVF$6k3Xd8oyF?#b)aX_U8dea#&hbzQDJfX;W+sL;`qZU@(gn058{vO z3YXNmj{oB0ePioJN++r=YX9F*N2rhgVt#GmQ{jSmF5xn=-+Ve;C9mTqZlg_Gza1Ut zwf|3Rli$-eM|Pj~f0_DU8^2LozbUr$<--;-)dbezo=vD*zfPFV=>A(CDsNQO^(HQOvGeNMc?zrS;?28u6~Zb7LV6+jOz~+3GX#F z^BNh~-*`Qq_icUDztRuz^Zf9J0ioU8hpe_&H@fH;W@{@u;~MKgV`c+Po;`^keyC!B-B^v-2$Yxh-2uR-@JwzaU#I<_`q zzAwMQE{g3xdgV3u?z|>c&};M;#4+@=;|?V0aUX-n^ZcupNWXY$ik(N+i9c~i91omF zmhwB><~?Rzdlbj<0Z!ozKDPd&dNqzYWsW%?x%|IV`pcyMfV4{QU+ll@KhBBg0xsb) z((9!E3ZDmh=RE13GbCK6-$eI5aqspYv!q}6ZDHk&)?py;qjJiS@R01cSNc8cntbUR zZhu6_gONOIT+uz&36Fg!Rq}rIBza_<{y+J>j!ZaClJ)Z05Z4UD2#msLj79c4`q;^~ zo$^0hyaP|x*IuW7DOSIb4cFvepQV zkuoTW#4>3WKHE9H!F>9!eal)lp9-%#egpIICJt3U6@K=>Jdq=O8uPV7^g;Kr?R%9; zi~Rl;mZGG8|B%IUdffACCAk`F(W||(p6vOmHV(NFTTqIRjxX$RybF8q{oS7leeaw9 z@mponKbv17toPrCgoET^ls>0l=Lh}6zAK*&KfzD0S!-~{{|E;jScmb}{|NsmKOU4X zcTadB{NUtg!Vky25cX(mmsuO|M-%=k{CMQ2?Zx(2;d?#Lgq`D_3A-je6TW|sua&s> zV#kbU!#|Wg8%nM{7q$*~E|gw*E^NE?Y^d~G4Yo{qKK%Xg=fnHe{X^1m6Vm8F7W+^4 z4?i`J>POc1D7*5>uopj0eKH*N+i~n!_;mOo4($%%1IOig?}o$sjNi4t8-Bj~-SAKQ z{w`GZ6o=}~8$#8gzYDe7zY}WmzT=zycBotUolw8^+aY=2+o7@NTcIKETgDIH4owB$ zVjH||jN$EYN}OkK4i|6aTB*uRmlH9KR^v?^$XNp)UG(j{{Zy``rV!LQTzYC zCh!HsA_GjjbO)0E?d?0|zbgT<9w z|JPifA@sO+VoKX+7`-zkZj7LhLia6kYe(ePU#b`wMhh!n_=zx<9FIzEu8HJiR1B3a z=htGYw~j-PdZU{CuP&*{u>aYKb>sB^&C@4`dc5SCBD{v#D0$f$&g2`I zkG2Z-e}aubc4pWFWX5rp%-R1_*#Br^k2a%)4gGk3n$`pC3UMsLTWFp!AS@+Q+T>~N zlI8T3SdET*;zt)dJcBrgwtE@7!f%zE)%OqhEYr)EsYAc^ZYceicf+>#-_?gDElZ@y ze?{NkxQ0jE>wBYfx8T29|9A6NeS#>B=igw1kh`!4d-1XIUq9P_yZ)~>-$&bj`^9k( zhf#^17qv&eX->fBwMTxb-S*kyP$Rt0*j3+i>UVk*(&#|*V$b5s?}jY>D2^lgHN~~S zKcII?e_Wg86#We1p5$3%oYRH!1I1yz@N>9;OQ<|s94b=9;WEAUp8vm99In!@qkj0t zkVL{cb=Wjl`Mp5-jpoLU;ihv^%ic5o{9d?CFMXBYPk;5pwf90Ny6(NFefXX+;P*oJ z$@jut=iJ9YJVd{r&@mW8P_=4fs9wA=)GXK-YPY@@>NdX@5=f#R4Kv>hjc7tMTGqX% z4f*K*TgkS??}heB?}d(W{=d-w(=!GBf4Kj@qwVnEy)exGjKC<2M&+1|*6-gK#?osm z{5QtaC!&6{^zWAb)J9`L8^dH_Q!yPcp$M;GHeN?=y(Rrpv7PdUIXiPdOh!?kba1GAI=57~8qeMj;q2;F4E z2L3;2a$fTlJ}+#)6q!crSz*4Pwc=Ticd!xZeZC)gWeYv#=?;|Acc5GSo1LK!o}w-$ zqitS3k`IyJ_Mq}&zp$6wkBam1+C6y=2OS?qvRJz!*1cq6o$G(qKU4~@LH3vW9myu7 z(SeUXzpUe4$m2~^vn7MzG__``UPCVWn9H|+{A74nqP32?0MC?ndHM5 z_PO^$zkR}#7X!(`7=mFa`PP6if*gg>7>mB=-V5W&iDYg*K#p1WSM2|B*lDLitZQ5XXvpUVSgjc3$t7HtK)uAGVyA&+Q4YsY1S@zaD@4 zg>Q%K`DcDS(m$R44mM&7O0fgGP_>U;ftnlYkq7FHE9?q-ZvCH&>Nj;zV?zB#Mql<8daS#0 z`}@qbuFN|AdBytP$<8_Y2h>#=Wa)8!Uh^z{qW)(u_TwPZbH#zf^xXP4Guh#B{7>g| zcVoO({pa_%{>_%f@^BtIY?kLmuN>rFU6({f)CaileD@dQjp6kAJL>u2(&6{q{I~_q zL+y6yYZRW5zG~;8(ls5(;wX;e1DwJcw9R4vBl^^K;>r9EZR}XqHh4x)p3hFt$Foc! zO>f=mnfd*kcrM@)E+e)LQZw}9(>qtF|3_Nqhkg^?i`DDl0_i{d;rPGrwH9^LBWU_ve_W4OR`iJG$ z5Z4bwoFA0^4&MO!6My^d+UHx92?rk8=-JKM=i2uT3$*_iYyXqY?0}Yg+W$z?TYI$2 zKg%wDhF_?-#$r4sVlw*7=bK9Q$Pd%WUVHgv|HwyxUWC^WeR(?A6ise{|)mV%5cn57G z+3l08KQQi*9p82Sk=>p>$&Nq9ZeKMZG_GUEqj?s)eKEVe(E0<@*zpqvgdTGud%mi^ ze^!0pW&Va|y{S5cjpB}L7{)aNw$QhI*ZK<93D`m3g}z^ZBJ};z`dJg63VR&y#eN(_ z<%37pR%~(a*k%?VlmDj=RZ*v+UoC>|Zix|Ch0C{ZE=~^}o&R&=16O z3TJQ*ac)2g7wDbF!8?Wy376<2(7llT&nC{<|LoVx!phmfSIO(Bq~9cOqk>+uK)M#Q zqkjHWxGO9gm`KxM_e$volrJFp9T5a$iYdBl6^opY5B*iS!*?s3Y886UP^r2DY2O4J~UzOQ{MG?8g^ zp!Wy9H?qgNn@7px_yDJH25~Qr&C(qA;wX7;fIT+!nF`N7Yh8P?_cu?4%Vdv!=&NMu zZ?sdsW^H@=P29#^+{Z)odtdt(gV9&V|Nr;>?dLHh3~?Oi{|zHYU=&6pu9-TP9FK{Z zjLL=b|2$v|9v0U?+NupM){{6PtcQO?EgOJp5oX{ z>|5J(|2F9V zM;xPmT|93f`n{FDsGk42Z>4Zxm@h2n=eCY5JxjfZo5?J?Q`$$@q#sSrX+{e@by59)Lj8|8CvvZN_TwN9BOT9Sn^)31 z-FpXW=t*>s6u09Hy0B$<9DmgQS>hcb`VKTXFYZ5FQQ^83(uERzm>n?=dYlXRIGs8B zcc*WpO#R>Po6sJ}`TvmhC$;-G8E5{cJQL&m##7-ac^n_$6x!Cw|Hbkc*(slQotL+e zrFS1u|7Y0$;&0l@{+AD17RZ~6<$to(arFN=Bc75^YP*vc@Yw(768-V|S(oWo(Y;UH zyZr|l*UxGQ((ik}?zfwW{y(?LyU6+f_{MAV`-S_CA0l~Hxpq-GH$0}(e8mIu8wR7V zZeSQf4#NnHLiTshhtcF%OvGgLZ2Cl)O7?!kbJymK&wUG;i&Ek%lPKgOFvs6 zzni;%PyL_{$&C-4SB5vLAJF%+5Q?1l8fN2lyn*?M`<;}0>-o_0YvW&zgb$MYeeLd<6*_KEwPzRvuhVg&nioS7}?d@qcv8F@BG(g~sn^8NZ)v{GM#M zW&Zy?^Z$>S>yMUp~g4;X5EA5Cc1N7;guKFjoCT*@$~+)M&%k-q7dh_zfhleVBjmXs6J#B~OLdc<*ms2>Z!{=;3#HIEKB=2Z5|X5`Ev*4kFW0uk%flSscZ2e1KCp zgLCNno6+F{+4HZX!zHr!`O)Drc^x-#8=F^nZ{BtJ73nC}e@@;NUh-`JP)U!zi}xMZ zZdMK`ZxUy!hi8l(PGkJOA)PZX*b$ejP1IwJc2uXpWLlq#RMeb4@!^nU-S z%*0@{sjG7H|9Adb{pYxQtNKrzAM^iL2d0p|q5iw#_mk%5oi#u2w7!5F`~%qLsR#NG zu4x~jO@Dm*IPHr``VYw5y8UF>*GbEp^zu=tcr(d_u^RMu365bKH>6*NWEf3`9 z&*=|*@l*D<;_D!;rD!`Q|DTlq$06N8tlfaz zIvTs>DPiSo;vM8JRL@`o6;N1Ck`Mj=8r1v0<^T6h zeL{Y_j+?lRySR^RU@N?8?3do}XYvCEV+e*}1V*7;`{ytYDSLk=554L9;~4F?u^111 zfMN6QU~ZWD)A(KW0c*gOnWH_9eO$;sW>+RrkA`CQEnB^509$^N@$DIGSLD`lbN-X{ z0gmvGM^`)B{Q#S)jDI}axVg~!|Ne6_reZqMjpFe8OZ3ib?EiiGA->Bl@ZEQ_&$Gy! z7uPNO0yw99gMA1~w3W=0sARLhCTun;PP2cVTbrMJvpxl=-Y@sv=v*j%=}7q!3iKe*>O$DtN=Bi$F0`R;R) zdGg}jf>P|jF6_Zx>_=OH{uj@=!?~SgSD|Mk&&M&I?vwf-7I;64rIXCPpJ|@0^B%vS z=FR!xpm+|W21%s%+5f@bjM=aD51sT5dg*uBtzFvmJ;JRAn7L>FN7rtdqwYDDZ{1k^ zg!D>lk2b|PP;u6GgIZx7j*|+|8MqxCfZ$qk4yNKQ?Z5_z1zxQ)}g7hpN z?_V7EgY5Z&^*=vp4M5}kIe&=L$`6cp{X|U0R7}T9C_>+N`7C_F{E^|>OOChx@_&Ze z9!O`ln&eqN=1CL2zu|Iu`jt$UXJds6+sPW@i2ekWtt zdhxu2jo5_lML8?GyhW=O?rc(7%_j4M=ttX!DU7 z$62yl8*slk58^N?QG;aEeJ4$1)g;e%n&*o+4qtcL|00Qc#Qy)Z^E!~lQKYB1hZ%hO z=$*r*A0N<9p?j6Kgmy<}o%C-oFF;uA|361wK&9|Yt944z#-yHos~b?F2m^2|7QgGc;0RoTOf`-X3>pp?yK~hFNY@Wl2OhZjowf84`a!m z-wzDq$-bbCM^466Ovg(o!fTj~*YO5=f5x65d#a4jl70VfZghcf3(-DWBk<1RO1{M}fgXJ;q91xyXH7Pz$tum{@)qLZO&;&$DCK#|F5wB zUkMpx=e`oUXTA~|66U9;%ujDLKYge9>HEx2M|!*Y=_vPGod0r8To-T&l>>_OT@{7P z^x8GkUp~Wf%)_F%*7)0tNHVuVZU;D0fR9F z!*Ev~89|Oh)ndYU|RiPebh|Fz_Lyo0s^^{;yLznuTKUH>1NCdmJ2Q8%TKrnl-Bi2Z*X#j^#a z*n#vy_JzK^UG&bm>QC&U??v|m`3xC!VZX40IE+g4Jneary;ZM-B)M5TeA5(d>A!s? zmyer-W&hYelS zRq{G+;x-S;Y^oOWd1|(xSfY|@u@8`ZB48{-)!w8H*-{*ZBWY5b*VJzAE zrM$3dg0g71^3r>sC@jwZi#`<5cVM#Psdzm9Z#w<){J)pzMfm?a|1a&H$Ga}q{^rZ` z8htii#~b)h=l|_iN1+@4JM;hMi|0)&!ds|Z@o4_vQu^chf6M7B@#pjZRtsy^cd(XR zkMyligm=h|NU`fW{1*4O+2XhqSvG$+x{hh5kZmjUU!Y^V{tI-eOEZW*cDebBL$wbI zv=7w5&CY94=cbURw=PkC&QZTF_(=OHwky=Z$u;U(%r5tBf297{;U0EjPrR3JvmMIV zsPvrw-H!6wcrrV*bBCUczNOng7S;|JeM$M*eyD-m z6Sq;h;nDoRyY$ENZ|>6{;?L**ZC)jRqhE#RJV3w4*Pjl9=|j*L*VflQ=y~28L&v>; z`a~E(jza0bu)nRX_uu*djdt!>jK@Sw##BtlOZb@o-wDt4yyuE6z559JE2aF)DE~a4 zX8ig5k0NozzJu4u*~mWsbaafWg z`0dOa zd(r)oEsj&pORus1KgzGURyxOVHC_01%FJIQGde78xZz8?;sAN5;aJo32m!BVjqr_{|Cwch4TM2`TxB9 zuPxPxCg(LPcc1M4MsHnU{NNkr5?G%h&h3xwkF+?W-_K@kmN<6N;W+l;XUU^cw^vwf zfAsx^-;Cqj`UC3!Q}kwK%^5OP%zt{FHZ}bME}>(d@)=$1g`9s1*?ohJi6~@MQiyJ!hwq!(1~0qc9p{F&+~!8EseeKj_o% zAUpN%caa&#S+ZN-L~i~6Y3fJyVw`K&B3-%p_tMsy7w6w=8)#2$PH9_VsyHhTS^r=7 zbb77!L|p&B09oNUP18s_g`0I>M6bXy_3gy=Sf6n_$*$+wBY@{;MK1aX70_|_d zZy-Cvb4S-K?}4mxU)98%3hqOOV>)#j8TCB%AsJx~Ay*D6iq{lUHYUJZB z^iq7xe=N`X_`-H!5B4JW-Lgma)8lu1kUWg+=Zxi%Ni?DKU-%Vhf2Zjk$Rf_a@BOU( zw&*>-$_vNIz8Cqilc#V7=Wqd+a2c`x?J9X4H*p(}``_-;JLj=Y=IFbiKScETj{DwZ zX0jj1s!9Jv|D!s)UVh5HT@dRJN}1{LzY@uYKtM*PsmG|2IsWqfq*s=kwKqFq%FV<1rE0KRzEO zlYRd_F!cS__glu#$MJN$gd#kizx*1#R-TV@uxHa>NBtdji{rTWd7Oh?waWbSb)!R# zG3;7n+jV8erH%b3>Gj6BN4Vw<%*UHpgtxF1%h9&o_&+-Kng4HWrpwrV2H9fc|LctZ z8}DvBY5bgQzG(cPOgT=It7 zy(V%m_M_6bQc6~(JfE%N*(n~f&a+8)F3AVtaqkVn8_T2*%@xv@aqk~_zq#*Yp6>&3 zEv7#&f1Afi&nJyR`LDzAe~}&|j%ogr-nmBlSNK1A6QWPyll>2tc#gu#$qq7$%4MU& zQSvw{T$@`LL0lg=K84)=2dA|&{BPXk+lcTP4YIr zKV@RLPxk%t{|OJtA1;|3_7wbe*nNIt_`#fs;m7BVm#rHX%JPku9hn?{T3|eF=7_Lw z&WP}n{7;8}bWaC|ekL3+ZukBMZHGhlraWLgs_cdEv+d@%f7hCrpLAbu+Z*#w=D`0e zADMUgglvBpTJruBn)B{^*7rkW-XHmn+zs_TcSADo4H142poMA$pem?3^N z?=zun#;CBrc(i$N`ZdKp44ah|n-=7S_ZR9{^gU#M#U>Fx3ZpR=<1rC^|2#TOCVM_- zj|j5&ZT>~%ODMu?=v<+Fwov}2zmDz#Wfn3c)%9zXIXA57EPOuR#3H2KPfGY(^zwZB z|9Fo}=_|1umFo8j;j8Jj+l^th^NWb%|Li@0k)C1~P^>|Vref>!By0#QM zunT*z7yEG#ADdq=K>P*b_upA`qrsR&BbqiCYd{PAqw^EW{nugfRH6pSs28)phify? zYZLAbX?h3Zo&ZV5(MPflSz$+U93S9G{h;nU<@gLPAh-XpaivT2%eac`sM^W@XP@@} zdF}t0FK=o8Bd#$K*Po8<|C`Rcjk~yyhv-+MPD0yBmAn(=pQ-<^$%^{M|*rP^lo$^gQlIHo3>6v%5(I7;+jgW z+E*>NJV(@MW7RJ39_N@xiR4A^?N}UN@gC2b+vpyLU>HVV6h>n##-sVPdBo&o#QpE4 zlIdH%&wK0$`s4LSUZNM_&(|Nx`9G=yH!V^AqcUaQjq_edMdnf67uO%j&A+d+cOCts z>yP}W`wus2|Dzsp{k=EDH6L$c5#GX5EJxcE^*=fg``Z6v{@-cse>5ScwPmR1k2G4* zV{Ct=czR=B#jBqRtLbt4acvCyg|RX69c)COae&|Jf7ufCZ}K4^cVHLxpt;@LD>7BB z{V$L2rys;@52UDBUH&Od#h_TM`5Cj4HB8YH9Mm>(*Z8ONp9mOUE#&;DFn&2ij2 zEs4Zd_CG#0|8E&v+BqpQ%?{7fbMyb6)dpivce3}pl+77-c$PihO;#;r|LQxaK`rW# zKoa#hDvsm$0H<&U=Wqd+(3Z#kM+Z6!#GfzzY2q)8@%#SWOCx)~iLKu}m;Jwh{lD1# z(OYM-|0kG3AfBtZj+>|)!~Pe3n_kP_uhE}zmwq47cOb3}m>}aGau0>|tCbfq7=541 z4@1ad7=d1P+9v_%9+YJ+0DHWz4^N9owRqpCB3M^e0a9pF};@`p@-v z2j{BgBXSE$u>+ePSnqF;_c1brU5@u4`#16qc@S~_!C`XSAC)2c0xIb>NTLbN(wHVY zki}8NwO!&GXUFNC`dK^HS=Wtz3f-}902yt7F7$l+Q{jy8-ZpboeqsK=ulRgAzJNUj*N4?)!ox&g{{dXlVHpEe9qFQ93p zdSa-0!MQ1<{eSBT{p{y*|zzq{~UKYXKWD$H~DKO-;-qmfRC17qo(H(ZPH z^ofXVhAc8y#3g*PuyXa*RB}2huYDrCL>8ffUQ_*OUjJ*3XCt}KeIvKez@bls*M+@- z`FInH@D`S0IUbL%t)$0qWHq@K*?-Fm>&epJ@=Id(zeC@MEinu`(AUsE>>}g%?H;oC z>;1!Caz75@Fsk-_SpVmH9|Pq70`H5=?LRS1{r_D)Mb4{15=}^>16i~kk$=#EPIMuI zEV_|f|9_nPKjuUKKYH|SeO&(>6~}Qj?__6_ao+DK@(j|#&XE_;c}+R6S=}LjccHsS z9U|p6Q?ac!J>Wl5|{6BWKi)Y4;_aM^X&(f39B4w2iQGmig- zKZV=$ySR^s=$BBoVlalFH<$xM_WU+4j37Hw{C+a#BGN}AuI<~6t_n6jIo|i(?3}TF z8;|sEe#zuyq?}WwF04k)LidK;x@7Cz^BVR4DrtKALCF66Kf+Y!Psd9r!fTj~wtLn; zK(3!>jy%0s-kvFsqq~^>KV1D^p#CSDPpJRTD_fDKx1#s6?0;>5xW8HNGXp>7Klr-% z-@tq06dY=Oh+L zqjTb3OG#7}KMd8IANuWKs6`z+D}iMDL+|ckcvC#@U?aAm6g#jBd(f787}_%r^?&?_ z{*V6%UC1DdZZw>D7#h)Z@uBt)8%~@lq<23It!N(lFzgl2ejLPMqz5@C@1eOYzPo&J zqlTVDxBJZ^GvJ{%s(6K!FAyKufy%jlCy%0Hmh0w77fL?=R5&gyIYYWo=e)$shvv5` z|JA`;(CofGa8Am7rq4bMr|6~MQ17Bc{_K=5yX4OdnROiZcRb^qbGU#@xQw1(JP23G z-tYY>TqkehHu_rr6z-Du@euuz^8Wke1KUVKN>Hiz){)YShj;a5pv59QHsQpjo_7^2vH{^x!;+TlZC~todrjiw>AB4)Y z4?@Xro(eM^XZhg7KET=mz8&9kf=oKDCpXWL&u8j`8N{Al%>R7kQ(=~C=Ad$@JpBuP zga4*(&R0hxk*AI>U`LWk*EIC-4?+{-T)q}|d5VuuZhi1Vd;1LFU*Nh9veR{4WX5rp z?9PNR*Z<7J0xZN;WyWH136`PimiECt|6T1KP}lArkfhgF_|KXB!V2fD!Wyi@WB-E< z^vC{t#q`bSo})}~O>9GV$v<0#ZO2aRM&I|I3T5Oz96;~4^lgwmf9u(h)yV$cfY3;m ze4E{(4U_ZlzNcLytW=$`?W+SqJH78!-#OXy^@rgI8R!2UBTwKYPNP}F27Q0%AJ&mo_w;`Z@*hJz|4HJUA?|VhgI-T=a85BcV=Eqy zlSTVsyW_Si?9UtCG1>V*{wFhzvt;)|`QLMFG>+1QX0#yg%N+L@Y(>>+^`H7E=FM7p zD(1}unMD0G`Rtqh!fyXlhJ84IL#RMC+Q!MBlRRItQ-19tbL(@-bKPi|D}RrXzlAsZ zy@gCUKTWp!ujZ|MaDJss5m%f8*hr>@rOH6h?d!E&Ir2&4_3w-GntM`KWXLXgd+YB9 zhv~k_@{{hv`4t!aMjk<>uw)FzYxN2Kl0qcWhLjAg({zn#WeoJZfUo(dPq9$Z0h z%TwW6Jl6icK}KJz&Fg$`#Ub2sdf}MA~Bkw0u z;T^SmH}Y+kKFE3b7>W}8AH&I!C_vw@^THUi=L-YFI5LinMEidNeG;Z%8p^fhXOM-6 zbN^=>D46{LO6t7J6zL`+o}iAM3=`XD!4HWHB~lE4E`NcB2gYZ~%wUTcv-5 z?D<}PXg=ck(GzGy3h86gc$!~6z4HX$e8(C35p?TM$O_B2u8WLw_m2rbfs;6m$^rTx zh7Jj5>2Xd#jpOt5Xp7db`MCeVrg`3lvTw^0<=-)7yKg2%rcdfWcTLX!?^)m83HAZH z&U-${(z~7axL&Rvq%GVo{(SiY3DhCZ`Mn~ZYq)`1xPyDhzG%&h3G)6(c@J%Q@~8Zl zjd>7peqWsP8`u7iYkX7Acq(`ESKs$k$ZPa1lLN>>$VXg*FsUp^ zC==>XOOJc??DSm@_4{y)L;=QN9425AreGRopb#_hq<>akoaMON*vlMp9{P*{Eg(yN z)GsU~7h{{YPicQ^iiPLG635HX^Mkywg6!QC!YXnN)*+Sw8^~A&q*vvIV)|xmMXr3l zC-2LX8RXW7%;bgbe%p!N$o_IQSJIJDE6E(^6^{5}-JF%s!z>ezJyLjk>0UEAULj-ii3_i)b!8FhP{zaQHl-{1GA zFu`wAFb%o(hi5Z`UVF{E>ET}x_Xl{NJ$?33+bnTKTfwtExN1=NN9lb(-`bBi28I3R z=KOReA=Y`yh|u;a+Thkr1(Hp_2wu&d|k@crRK!uO`#4mx!GC33r+l*A+=k&!-3{}dn>S@Z9Lia+}O;XmRlsOsg zA9Aa6w__)E<4O6^9zvPpwz=&8#q9r8`qu}s2}V8|d&|!?_t(phsWYJWqK{b}Dx}$YXvxfk`-klQ@mDIFF0yK~=)~04eLZ zqjsF}??V4uU_9E`as4{u-}%PBCmH|tTeGq7mNH}42aHRTt&ZDL`~xz^r^!y?U0aRa zFR(s<`TgBwb3sA4;(xB;25uqk{FLkO&^u4K_S)!hkNyDN8;qA5L(Y(0r;YzlQ0^@( z@J*=)m4TJ_Mu)s+?JrdLy(VGY-hB*m9QO}PPIKRd(joo?S!K+*nqK4HY8!>SCUIW+ zPCD<{=#cNap~(3^e=9Exr;kJd#-MGna`uS&6`klp=7jS7mNp(5q@_{ZIsX7<@Du+p z^+T)Unn|APw1O~BToaIVTwnIQ@6bLB;|lce7^AUA!z8~=LFLebFpZpn3X@Wr7mN;t z^qH82IY^K6{4kf^Np{Q_9p=#&pu5MtsfRM|tt)SISSV~UmZ01|5X;Dlvjyz`g0Ol3L6>|H z^W0hC=W!7|NEgd1$Jia;9-#fp{*jhAH>Z}Ylcu;#;FsFp z*Bsx%9kf-;|9H~>x6r++TeHaZ|DF8E_@DDyw6Ri1(_0VObLjJY9lpr#={xfGbG~zN zK0s;Pqke#rFMT59wfLSf2w8KD^U1zn^Nsiw|A{;O!cfP z|DSR63HaFjgIW6jmasqZcpdRNdIFEv5x?PHCb@13reOvOF%z@UrcQ512Re~k|E5tL zhi){eryA8$O=R-}^*@<%oIbDqSC>Wq!8ziYi+Nap$Nqy0>5uEr#q=fUR-eYTbu(nw zln>YcOXxS9mlARHij^y4(2=1W4aKs6FbOJ@p=^v-I}=8$80JG#~PSz#G; zc|KL`ANoIxbf3tr%U9_6PjjErm~qWV{fmz{K86!GjW%t`cI4KVk*}Y$FO^H(3zR?O z+{Yx(pNwlNwa`-qp8s&q-#1;l$#eO)e&MXR%K3eqCoA}YR^lSP2Uk#YgMa81J|EHd zgG{3Staa?P;jj6<90&Hz53}eV~n1D%`g6Kn=8{bntO>>-W zcq+^ww|zD$r$K-q9d88N`S!cJkkQHGLNpAP%T12}{}dA)+{`J8Wr z>@}CG^atM8pPmklj#Jpg_KSXRTiAc)7kvxDD#Tyue=_tVIEJ|PM=k2MYyTtZ{_5T5 zN$;T0?BG^z!M(4M1~z#Ya{ecChbWx92gLOUM>;S24F5ynHS;_-lzdKKj_~9( z&#lmNoas5DY6hDQH47h|S10ZSlDoYhl&WvGm4qKkvKlS*L!HHnM%zhy6C5O&2}0&U4H2jLWXyGlQX?H)XnoAQRp6LVix9LF6LnY7NU7xeppN{!7{8sdX4o5Hsps@ z^iFkUhcUx7^mXXgzRnJP)W+^oe{K*~K0H4ZlbcaFEkA4}x1*wv?~3}d_5nYh-#ry} z3yWi3_3Fnu^<#ppTBZI^s3R+!mk~}T94E>8V!nFp=*_ntt^b&{hHsfT_Fp41 zo!X~c-QNNCM`j(zwIs_E1Hw+fpTudLMWwI`oTtaR^EEREhKuwb)H_Z(PLOr;28Jub zuHgo5;fZ}TBZNDS@8JP*{%@bv$I~jGVG#0Br4Fu6>7U4a=wC2M-~JT#Kk6s(yRUjG z40Yacj6?y(U>qjkzx999H_$ka{V%TOLiRtI8qWSNVE^kYoFtAOe%DjT-aiZo)5sYp z#7xY>9L&W$EWko6M&Iu|bGGi5N%FpSVoduI;c@MN)Di6vdZ)U-LmOcQJ67?&%crv*3m1c7=Osq_fM}~;`t997>eneQ9sY~U*Y*9uGO$rSaU`{ z61fv;<@#>246zRCkk308=Y@Ta4&P;6Jm*_mABC7xK^dw=Mbo%i)mg zDo~9C8j(Uf+7`1nwX^@r^*7FH|6VlyO*XG$|8CGuaDJNJN*4GwGU7Rc*p@v;R!rf~ zf)n(UIE|VD`E$7Y%a=b>+60a4bM1hf{V_=WH!_5?&O47DWS@CCTp>$;@p9PaySqle zfm^tPdw78VZvJ0ho9`8akdL7lj*%$9$ID;&>%8ZBO#VWw>;Gc@-=CGgJwMksB%a>C z^<8XwIg~vAGWkjvCu{;HVG5>U1`06~(f5BAxw+VPwkh^EP>lo{kwV+7SMXFdQRG_3>`&&j#m`k6B1z3p1Sb}9(fmK+8 zIIn0OS*1*=mWCSl|78D@N&d@m^pV-%oMLRoR&2*klr;1YyUDiG(tlq1-CrlVc1!;O z=`WN1?b0u!2U4-He@J@<~3{PERVd!OHL z{~qVJA8YNGvMjFueL~uC4Y5ab?y*9T6id-ke>*tfAS zy!{0G@(lZu?DSig`Yg_Si}T=mh5N+!pHc@d(f^L$SpQ!*ssACxw#?g;quHFUIfQ1m zWy|&A@QB`EonEq6XP>@Wat6A+m;AcGTMNQW;n~P;QZERncglZcf?eJ;)BAOwE$%b6 zsZw9PYh$(T{5Z^Y&U{qjD=fkiEW--)sZaW)Ie<9UG)=~Ivt_O`Uj0)V{V&>|kF-D4 zM_pvMa1Yt*xO#T}D(S32cCq|zUAo4L+JAS8!#Z&-@@Vtb;;@0f32nlyQ`Es^a<;KE z;CID@ljKoe5vz`Om&e?9)uc;B@4O$OOh+Bs-H!vDVLT_5B>yZ`uC@}qS6kt<^J zkUcSOmG(0|u4yo^M7xB3H6A0JJtglPlZVLb;%?$L?qcMe@dM-#9$|Qlwnq%F<{zD0 z>cjAU5Pdq&W4wBQsLuTUIzIm8pM-tKOTtU>Ba8nuypF$5{V2R8hqwJnC>Zp8V>Ct* z>ZHf~T6NZgEGhLJe-ui^H^|>($?<3uH<2tuoCDG9-cxJ*Z!45-!f}tM#3pvzPWC5z z{=@!v-RwAadR+hiscTPh{xrIk~#hP?fd{8 zxA_O|^AC_+!rj~1k?3_^=gUvSLg_5VQY=T$T*t5{SJDT^vcK8ltLbZzW1D7$Gy2Wb zWZwSn_<{Y+c3$tey#0OFbFsl&R(%>ai>oz$qE)!R+3$Haq-$+U)Q%oY-3#j+a)3pC0;A_{sQr;aKJTa1=k@G%buI zt(6dp!hyPiP~|$?umdBH3qmzHWQ^->GM4i-Q5wrq8{#)hOmv$le9{=c!I-zxoozOP>#^1nN`4{2P$>AfLbigD_u{){|E{S6OFpi#aKdO_{sWy45;^V@tzE*Z5X>r>i7ypIs5YhCROPEdPJ! z8UMxM;&4OU9o$1tLy7ipNq9gXv?t!cb9--0H~(+4_4_7z&arWPLrF;Ep=0XrmW0RT z6Ex}{eMY`OgY%lD(~>C(uY}*Ab(M0B&6GS<5>g21P*a3aD8U$v!vu_^3&JFFX!oaK zGC91wAWS8vqa3r)={?LL=b-`%(Bpl^K8D5g{Q4gov=8*N9N){`3%?dr{B z6{4LzFj~EjZNfW{RiEaNMh3UMlZE1|u^Tn0#X%fKLf?JfKhVuT@Kl}aoD^E=ZIATt zH?i+=ehK>?U2E9)=%M$bZUo?qi+n(lg7943WQb?j^tMXi%E!v++zqh0Ir23cL)kWVQ^w(apPar*w3%GiN`KEiesQ(ey-+CsU7sy)c{}tJ@%J(z(qwt14xJ%yL{*gp| zr^xxXvUtCKwFN6vmMg(BQmg>BM&RB;mYjEhBi|cP` zc7y|W%EEqY^`G?n`}Ma@(+976f2gCMLyn$B25Fr4|95WEj*jiLRocyaw42Y^Cy0z= zNCu7=W2GIO-ye9NaY&byLE2+!?c?rdq5ZqPEL?HiHQc~0G;S#i4I9eB9eT?VegC*ee}Fc6D^f_}p}5Z5pNGff6ZGge ze@4DQxBl~iv-?HNoJcO3rwn%W}%ocMnuu5VT(E-^*_Jbq_? zf&%^XXh#RmqZ3`l`o2dSf1q!_cdqdVrN$qOcOPZ$gG>u&9vOe|)c6Ck{@52`l4;hEC_lq!w-r|^MjH6FLoAX+S>>=>dUILgTu5;$+VKO-taoodnvK(<7 z#DHfU#4O=C$nMlG-=m#=)_ovx)AQZ+d_%?`Ai2W$1GJ(I^PE$G1z3p1Sc>IXiN5>t zzxpjdzU+u}Pig<-y??iTw)cbc=tS2``Two8!oS8xqCa0??# zzX*}I{?;Agdw77yc)$ME6Z-r0w`!-#yARaAx8;Al-+#03SQ-1+{vc<|E*}?XD71X$Fkqa zS>o!oMdpz65d8!dyQIEOHGOk0Fp>6p$YcGzQeK)HOSE?zBeExOOTclkZq zrn3fr&))!2<1^r^%9 zF)-S7iltj7{};&r$jxQ{%jX~Z{~Y%l_5TLxOXDC8qh5RNDB18Ge~Y^LIQ=9}<2S~~ zbg&%}=f~Ii?Hsbo?pk9k67(e6(S`hYd3__%-t7}k;{pz_7h?Z=Tod?;@HO1PE%a2% zqqsvK)ZQK#TNv)qA0XE+Z+FP!WV)a*JQNql2R$Z-`DdPx&+r1T@CM<6?;4{}f-x9} zk)`a1Rs4VQaI-dY3x2!*BWs`LV$Mt_+_|BzYj#-$83M{}vEJjw}|59?KO8aBF`n&0) zuv~cmX5#{^Be;^@xQAaAtLbr0WAloS!dm)z#5LtwpKD{1$*(;BKKB0+b~2gY|Bs$x z`?q$pliBQZoU;*|QH5>TfokkVUrPVJV+WiwNT&4}XZEns$sGH>y@~xV{=DBi$u7so zwf*zs|H-)jUyXEXaS(^mQ|XxEPs36AAbWmbD|?cD61f%n-?y;;H@FrV^KRV#FW(PD zZ`}21IPJJPG<@Z`W_Yf-`roZxe@_9a-EBR^2?wR}x{QoudtbYE3g$-x;#F#!#5%v z=l@+IuizSPp#J$s;TCxZ_wWFX-^u?&ABBhXmYwoH9@C%TL;H>YFR8ps%uxTJx$@Wk zXa4lqx8J&poq$^BL?7FG|G+chTHnJ9@)i27s^1ao=s~3E8D!O&Ikb;v*Wmm_?E`et zyAjv;??q?#$Kj21%$W~yd|vo(*c-R47{3sZ%sqm|x-44S@K``*I`Isf01xp z8*NmSJ3=TShqO2AuNH(c^l{j?ZC98;HeNLLO#CEz3)y_RAWWuDMO*`qYPNy7%neq;7jZ+K?T5Tj!0{>qHm5`?~pi&#iBRJ~nkf z2Jk!k`}tN6O7k#|Vx#}+IC&DMQHO-KMiZLVeJzjpDBl`GMz*5ub_nMj^TYAQ3E?E# zkzL7eT%#^~@=@p$PU8YD;R>?mU=Hv2I$Wa<_3a5a$XmFBdw76{c#J1_hLHk2hbJ zfB6+A3-5ctcTJDuhLRofFNR)etN3kr?H8e(9C=X~W|8}?vwOgN%i-N$hdIJSH3eZF zS%C!@dH9E6A=$aTAS@=AVmVeKuC3C&fgMO6l*i-RU~B2?k((&r`#nRZrxb*Z;x?lS z+pq)G*o~1(-+0-s{pRaXOAbF1E>%wL)ZW+=!a;F|aTN8hr5i?tIOq4I@M$!jQYIye zLmfSi{cY}-?~lp%O>9fD)$bqbfAvcXk~7o|E7TFI)PKv=o6e2>X6MM`s2{So;i;l< zc4ASed(JQEoPNYQdhoJ3>b|;)%p&(({Wn+rU#b2lJ4@C7VL9#w7G9*t3oGR zI`4n8zF^N;YstAz65SpA29Nj+`1;$0yO5>FHGtAD`SIz0|GGN--J@#jN$v`#r|b^D zc)mOQ?S?)2@3x1NcejO~4{ZxS+qEN{*t#Pe$4^sR>>0N?9NV=y94-4V!?#nmhQECD zUxhwtr?G$8=5XM|&QN=5OW23MYS2AhqrK|~>@*MV{PRhn<04-Xe^Hk_ z^;kOHLlgL7CWa^UXLx~E_%J^6ZDDvLTtB`zgv*}$Ap22UbrijEn*N`;#i4}WvV{+2 zLva{GABVOz@{qU`lG}^J1aXs4TT~JzlT$HN#f~M1pZwRMoScO@7)g8_=8+XxfQ4w{ z-%jl1qe;2WDe0b-Za*JQgX=U|Qn#a;~TaK_s!OSi2?15xR+v@ZIJ2KZpp9@$o4Jle|#AKG+)~YJ!7>&u)ofGUs@Wf zrL`L)=RVdC=Y2-~8ovPF#*nz7dBzBtmvB)0VI0MAoWyB#&Xga?I0xe#nLrOcNw#C~ zk$XhuzI#Nxw=9bMwgX-0LmC%w30IJq%|2#7HW#pu@nL*A`|`v1|7q-7c5ElvHHGbt z9`vH`GXDTOI?jO~WK*ZfjBwUk0XcLweHyO0&JEnc9rWCk#@$cDJ$l?TaNzBy;Q>8c z&SrCdHm2d2v}2;(S^xCYkheouXuqBKG(2?dV>Fy`-~H};A3NTa?HXU%tHkh zpidjF|F$}YJwHgMg)=ME1<0XY8XY)4&Hv+n?3&@-x=xSl_3}?Fl*VE#MZNN7IoZ%q z%7(Dd-#ugM`0rPXYyM7IgrqS8DSH66qV2x%EQ{1f%hX4F0QJ7NwekF~?0bv#^!)li zK|MTEJv`0#SSf#zt!Nu>&4D#+fPH@7>v#Ni{@W;x&8WgQ>_9bkqXvDO*!Nqt5y-(k z+5}`qI7{a4^Zz%n0aE+}WT*C07rpz8dgcWGz&dfGJ)?9E;xLY)r&J!rae97zU3T(G z`f23cZ&rTJAg-@lC+-{)NFw$-wv%!1-!3vgE}UHv{Q}XB?h}{B1;p_Oab3uvB zu01`l#<%g34`hjNgN%E`=I!5AY(&JhZj;^W|BU*dY!klZ+&2jSz3(5RP=YZShrTWP z*YUoeKRr>sjBKfTTAyS4ZS}u4!FjUNab3Ig&m*qS)4M_cyWb{AXA&l3DjF-Io_j39yv}O3;kwUWIV*@;We_MTjo3x)-xb7YQ?{?{9j^p<~knj1U^6LZ4*KU~M z{)|h=mbt&Xp8o{@#~Ifk|GDR%Cyfd$z(Op>QY^J&pY(poc61@mMe8He zxPVKzf@`>eTj-SM?vU@t_ur#Gz(Wi~8~u^|`BwgXDt~I5=kVBXPw)&c@Ct7buK3=N zP$o3tL-{~Yp%rat|4RN|CVwxImsiPiyZry?nJ9mwZ-(*#1M^)UX?lKrBEQA4Jo$c~ zvC6I~@767BAV;}w2^wd76vmJf&~lc4g6}WY#P;v!pE$ukfm(eC*~Mkyz()Q5Pt4=x z!9_ouwD|{IH?N4;TT*|(S`qwdlL8OsE7CE%9(|;+Q z^U{lJ-FHpVf9X1Lj(RWZ&wd_`O7A#M;xrl?J~!|2^H4{Red5ihJ`d;U3A8;Rj%ydRBCcH!|EsumR`fk(OA5j(zxUAJkm0K5 zSH47_)8I7o2p9Xmj33L=b2I(-vwZ_7a@;7yu@_mkehGafVeA7r4!P;G+4x_C3G|_P zpNC20WK6|$lw%g^uN#L(HfUSUBP-A-ZUMOvac`Zt7f?#OIEgm-Deke;qz#%d2BD?E z@1y-rCMWt`o4S==TU8tuJAWybV};f(63DCLw=nwGJ`a7$f9G4Huw_nXqq-S zx@Q=JFRpjIHaHUQvFWC3qoq{2I-ZSO%r-7yBV)Det;Kq5M%+8Firj`B=yQC3nfE-y zd&Y0??~q@AeUx{J`yZj&BbCv&<%75>Fzd3~mt`vuAX^o^q*iF`;Hii%Tf6J`_ z^6c|)PT^*;N#A(#>0rXsUjr#?mPI&O9d%Mnlrzeo>@C{t` z-(PgEWc}UZuu*&x?dU>dvHG{n*nN77y0@9#o~BRztp9aJ`)`Tyfvb!UTw$!1y8pxa#M87tv;#WTv16n&4ihj5aSVU= zv5&)K`XCv{@K2>rNA4}(1~N!Hu3TLG9^(+mIcU7d#w9DzaNE1Ps{SSy2*>$<@Ap+s zIxf|(uKkXG4$ZHP|3i|V!u$3AYqZh!zSX|D7{Wqn#5KPl#c|ZX)y^gxo|^y1Ry|Fx!#Ol73t}0OL@Xm>+0cd`jxR_! zE{S$zSC)i6a{mSP^;Bg7{Q@rG3a;S>V&6lPeBO)}B<1~-av;C{^88r;mWEr7yMudp zfQNXDC+M3g|I2S5`hVrC{Q58c%Q(&>zy9(H`Cs1eoFV^9vwNESkKXZYdt(ltNoUwt z!WZN##5ovm$nZbP&sCp=A~L)FvoMM*!5EBa3ydRc#```d`aWjr`?H+{ey{6VZ2-iRzchxh9QCf2ckx4OnA_HRs+ObNG=ZT+8x z&CaR9Htaw(cB2Nh=rhN>-yDR2Rj#u|y2c)4%tgo=dyv~;{J~Z8)^YwWUjw@6-RN;# z-2d;OG~zt=9vqJ8Y76Gyr+%T%@zN`ko^-OvIp?Tjj$^=YgQM*qkoqi~6wZ(Txnf@z zdZV}o)X`g(c{ZG*C(uTZa}ZL9a}W~SJwKY!g5(+B-#*_T+Pan1Px$Pe*N!gqA&m>T z6vG>QfB3Nf6g`71a%flfcQp9^&iek)Mek1e{*L+n(D}MFT#?Q-+`ujL6ssrIjd$pS z8~BaX%lGIHklUiYD=xE>{fmd<>W#sFOg=&5SH=^QFVL_;y}C@kRqwtMeuMnHx|#B^ zawCPkPlHKaaX&kAFv}Dm*OcwpKaX9zyfBo|$6y>LAg=p8i5xa>elj@~(@~DjV|@2y zcfa=kp29GPJ`WX$YhDfF{W>IBWoop66N}{kRqhilo7~qP_tl|%K^)%{`~Md>e<2oQ zDVAd;RwKXu(^l_)lls3@ojTF`N0y!|_Wp13|KY>_A3MGOJ>EYV=U)v~%d>OL{qN%k zFwTFiG}mJzHlqsLP>tQF!N@~%|BVf(rPrTfx9A(%%MTXUf~-Al?ACVu|J@&jgMQDi z|Kr*AFZMpK6@?Gue>`^*dHc6PJ;l~)|#)LC^rz3u+TVTUvh<0y{fBu=9a=a66j zcPIOQ4?jJb-pand%)Y+Pz9!qH(;@Bi>bkf;P?!2N)~!A3)W7QG&S~mE=_JvPF7!-s z%v|jc`rsDz?*?@({Q`2U)W67V61P?RLtMSzuaMW!DE?e1 zLcX8FbrMbNe>A(lmJadPJ^yU^4{fNqs;!A{xyc_mKU=L$iih;a_!r0Du8{w59&xRh z-yVM(_auBGo&5a&dj;Vcy{E%BvgqURfriv?kVxJj6dshEy(%)%VZLj@LKAr@mP zmLt2(`goxt3|-Psy-;7-;ZHuSgA%{V5k9be3;jMuo5JzKcmA`me|cp%ppR{>@W?sy zMxtEsQP@asMisVU2dYt9Q4w~NHBrYw4C{|POve82&J~5>DE&B2;xu}8MITUMsG~=l ze_%GBI6Z;)eyNS+2LfttZ-`ltnjxd=7wL)m=jKKofFQS znj8MEa$flRx%0wGY4+UyGIU@5GF+l}-uyDWA;bUVdqnQ;mm%x-jNj83qz`=eWfPXt9v;=oGq>lzx=Ku)W56@V;nmU6EF!K7srKmeP{W8vOVKM_ig=RnC!Qy_;$*) z@Ru{Dh3WKi?7uuC9N1ePYA4PJ`|wu zMRUVXC(aG~AL}pXll|G6`Qhgq=7r-g=Y$jVzdc(KezB-BR5*75_U=(mqp*!@2u=c>>qpAm;Q+T^P{lPIg7Cr%drxxu@>vm=eYjy)<3}DH0vK! zT94qZcmDFrkbC-NXy0S~gRiWAfKGIwdztkQ9M?PdN1=XiMc62v&8WgQH14Yi4JRtX z4tfi{xxXS*(|4ooP=&Thg}#%DkVNejem83c)cCCyJ=PF7NFGKveIQjC2CV^cRQNcu zXDUN(inc>TWyqYc{)0J{O=Z>}m}32bO6w0yv;M$L>kp7^YouE$e`w>zw!ul~pGF-L zF|9tfzx>*-Ju)y+{~OZUDj8&5BVMOnS{-vM!+CTTSB5Uv>ORZ%M_hle-h9WTwDR-& zlq>D@#+2tr7ro`V=l5;((bH(##P@jD^WSvdP(`>Pu5-pm;SzZTaqQDI@&@7_W&^j~ z*L`F3gzq4`OSypbW%o(m6ZZgd52c4>!(3za$R}u|KO%D%j*0r@FGFIc^?$eOd)@S| z4>oDMe9CzFR>^W)0wUe?B3d1<(>>tBde9{;T`XuzJpZjASt*(h<_0z@b=F)d# z)^h5$_S^bk@9TRdI~~`xOWlqhdhZ7H?|k*IG4;)-e!c#0Kl>l8ciI29KM9jvV=Csj zkLhGNX2me(p-&s4Ut3~8f5ITr`W!O&u>U}c{m-v(9-aHx|7Y|+Xs7gOuk>ygr!7z+ zodsBk#prQQ-R^xUeNf#x;9i&0S0b)ym|f!;S9n%!jn(4nwMW*H>k;=3+(>RlgL9j; zJ>tCnD&cKtEz|yZshu%JJ7c`(?`Qwx!~S2D%I0bQ+nMTVa))z%=wGQ8-i;b$x0zSC z*Z80#{2%-HKTh#~9OC~_pJdf1Ips|II_(eT=6Q6Y3*FxtUv$&>BINyt4IhR4dU(^{ z<%@&TK8&~rSls(-aEkxGNQHN-|$GyMK(Ocw~xZh8L zoAkb>5oT*KF#Jm+b4eaV|gm`YHSR1bg}n`}&Y|<=F83``G74qJ8fB^xiVa z(sS5*Mtz9&t`*zZ8_CV6!Zz%WeN)t{ZV1=%!`vbdJdSJm6@@$Wdw76{NccvYd^62t%QXK@H7smVjOZyx*=pDs2K$?C5xt+=-WG;)trI?meij-x@vYF`pkeDrA^aQfKV@7GISQ>q@;c(0A1O3{ z<^7|?Z)1=+rT#^8lX?ot{&#-qR(c!8Ip&A;&jjH~n2hWW?XQ&jzgr#2Z!k!vg)@BZ zS>(`umi>?O=tLLdyttkQ^}p*bbl=ej5Z90GqW4ttwM%db~3%^y+TcjO*01M~~5%<(-MM~Ont=;_m`>l@IvG5q*y7ckF=kKJ0&x(ne@-pL^JUZ$1BI&wt(XlU?^cKiMPP zOLjihw=bP4Y{L%p6gy_@$Dx{@pZ_@DymWdEetZ67t+;ys(?Rku8o#oh0a@$2Z&>BI z_IR#E?ngOvQhe(=&n28BQ+`jJV*g(BU7!U?q~zaL`MM2z_0`wlv~$0GZH&NC^Y8ud zb;9S+cUk}Xbv8CRcwf7Kp3%pjeW{&r=GXo6`smN26J3aVfApXiolCS0q?1HDy3ix8 z8-4V_ebPpnegQdp7MT;`a7kQ!sWCI;H8j$1khjo4k9+>K;EwP;kqS?>#+m-0+PM9+Hpo1S9jzpN;Z`I+J`A^;gE9sLS5lC$*t0yb&M%7kOh$ zSr|p`zu$W4Gha$%;#9}PPa;(H^tVLgw{%;Jd)6c#_zxsCl>d5ZY&%Q;!{9FCs*Y$s2*8iQ- z|J|?u8$I-1{rsKu_0rjh%{cD;^^`dVRlFh>Lygm-%FplClKFq(}rJaYgwqOSNcHV9If1l?+;`zzWGoGL97VaT?wZ%H0 zejZLr=QQeY4n5+!k)RJQ@%&%8FM2z2!dc-AnO-41arI5sIUv($q+cK}p@H69?72#f zV-UWER^c|`k;lE_^Y*v;CGK(8a!i@gt?am| z47sW-xo9k!bCPIB7xMmu4*wVWg!?wIzcGM8Hg$Rv`&%DCmdur@{|_mPz@Q}3k7KUp2Zsdfs!WlAsO!*Z)4UNwp5)=xt-T9BO0wnZ!2aLzrAbKYc3#dMTo7UrPuJLCH? zz!o0dXIwnHIm0&2BG=7UT4ntCI^)mDPRDhTXsTaiN2HCBr2B5$7ai5cl}Y zp?#+3pX>SEZ+?G-@t%L8=O69)QQ!2b4TR;>8ubGo&ptka#+2vBLGdlm-P?EOFHG0J zFo9p}zURN``5$?H@rNDL`BWW19!DJed6GPhxXx$3UwW(WS@;~X^xQiCeY8R831j%1 zjO%ZnBL5?a6yh46ZAdsLiFS11mt*1b;Wp8jjw@>F<6ooEdMu!9{i^Z!G^9l!t8{67qToszCKBTx8_ezxVa@bmkhg?oM*v7Y}=uKs@b>7L&Y zr{-^O4ZCSVdKBf-XKdZd3~h^>JnJ-=S>P5lFCXMcC#JUY>(j_XF8!{2)`jv-V( zVqll{0rKnbO;h)dRR@=<|Cbbpsjf2}<(P%WW#*q@4!vcc`VaHy6=+izwj!=wmc#;a z3$YkWu^cP08fy{X)n3H%a=mcuGpfaAdKD6v)&I9$197k4Hz_qg(=pqy z-`wj1llbH5)!2<1eAs{IkoG^)r{pVS={dB^XC2Dg^JM3D{BO$TST^_2d--KL*YSNw z=O7NFHtn}Fj@ikM7ar{Q|8*3E=w121m$N87)dUbEtef590`oBZ{kLE7kCdU4U z!lqOIp|H8&MPWE9M+FTE+OMSH=M!j4rVh3A)z6n1X^e+tJ*ZvrM^ zGNxiW$}tQ1^?&CS@hkH0KmIt(5jPJNSb&isLwm)2PEaB#^IveOKRNq~vX3 z=PLIsz8%>G+Gk|X9@p6QePJJcaOU@g1IUlhOZp~Dzc0)de_xmx|9xQ^^(Wl3<1XM5 zuAnjVtHOr{=55a;W+6`K=i>* zA{(#1Eo{JKdW-rcj=h~u&*~p<6OLnVQ)FDjvu26!bCv%CasFSq;~L0D%%bP#|BYjV z&(IE^rhHMxeDE*GXLB4g4;5H|g;HVSV>ocotsr=7D?JBMB+ZX0%>8oLqghZ-`@1*#O?|cP4h~xi;#}tPb(iwVI6kd@Z z+SlsrH^Sk6_l|x&{-B6HVvge|@&JGAe(m9U_gW%62IDXRjq20}Orp2!@@(7L$@Hm+ zYX-NzHpbw&xdz%RiDRB0&B)LH>-YQ(oY;`aNVUpnWd*GcC4Kbo|UPQB~@h-)G(md;Wv$4c}}(}tRDYymy?eGE8e zEqy(5Tlw-gz3cl(?=sdvT)k)AOje=M@!QC_262A;f!|^uNY*!5ExvV%=klK?D?R62 z&wg1Q(4p;5HY3{pNwV#we2(4Dsfp>~AP(awj^iYv?caA%-nuJ~kJ2z(&8@Q5~ljzM)coZp;SIw7q3p&elwb_T zVFD&$GNxiW$}tNg*R;={u<5M>G)H)Nf;C9U3M{}vbUrc{gsiRoEG#9L|zHJM*0k9_Ffztyk5_t9rI`uZ%a zl*W4OKdC>?oYDOHe~Z}J%h=gu8X0<)%*DSHT#!6+u6YW&#-^VIM2TK#HV4S zv_?GRX0i&~umjcDjjZ=yL)PLT4rACjilgL^H7DvHSsRdk5~opzMsW=<`7Y>jPC+w! zJV8$)&Z%#m|IWveBtPuGx5D$|!}`9IL^oG$bsjUW2gF2wko?0*bk&~?*8?0;mD zL;G0IkMrn6*K^O09`vHqyC_y6&_8$)1bsf5&xp zn`0}TahQNf=$YskZHvkDLH9L)sr2cHdunI>p7DF!Yr9-rTmx?wIS+B~+X`|48dfRO zc6l!BU#yPxEOAY-Hf2kaO!@s^+W%{zbQWVNmLu*3SaXUUn9mn+?2+;xALjqum$$U7 zKCJ)f`8qP5f3Ii7hy53|+v56>e~5d&&JlkU$8i#k#_BZ~ z|96_6U;qDkaj2u8Lz}uNj-gLECW)FA+J9@5e;fEBv>6hPc|ZR?Nzc#!TdDkhs(qmD zO(U+`lfB9Qzpj1Ym@f1ojSIMhkp)HJ3OO{3kDnYqUKDPSw{Qpd@Bq>FeMmmW6FfuX zTkH4Y1-(VTL$kKVEBYI>=?7?Cr~Vh0^jpF`G`W{%#CZb0vH$KFRF|n0^d6XK?JhGCecjIoh_decP`5@Ayjl ze^K-`X#bPl%e4Q=UdK(BMmc67?%O+ue80cOJbDEdAiqC{@*sUi-rFY+IyOhfIlc@1 zz8DSi^-{9_wl!(Vm1w26-GA4nNRiD?<;R=+^)IX|;`hXC{f}ry3zEl-0Yod>jN4o5 znDy9*&8WgQ>_9d8GR8k`GX80+@lRx0J)9x4!a1^CTQ|SH+I8P&6Z^2=7{CT&bj}(B zh?4if>20{;ib%kao<_Hf>_@!=w1h$dPNFfxLiAG2VR7E9B6P zf^dz@kH2^^Da8H%ZV1;a+isC}a1RgA=$mS|YdtA?%SGSTYwIJ^pCG^fpJz@X>6lt^ z-;S4uADQ2eXTmS=3U81Y{e|^utbt$lMQ9oSMM#!f`(FQkE85UL_6y_7z6j^}Pdl%E z;aR>2-50;`eSg9KXD(@~D0^_HXA^b7URU z?Y;KUtLXddw3CW_1L}mql>fZjfA8IA$+#9n;*|W4=2H2;*nd*yzajJUPv2>sTNT69>Z;^MCFV&lG-`NIjG3>W?bRo8X`^Ypd;1crwLf_Z7WxiwK zYq)^}U2*;IqP+j_j_^G^z{q3c;K|2$g5fvDz?1R&e?h*&8-)MMH-pX%`u52ZjKMg( zpZ_y~9&P>sOrlRlZb;dnOvoVif4BFmpK*S)_k*s9-cPajgI@2dkIe6{w$=OHO<-WC_gsIY~Pv% z1lCws5Ov>8c1%%NOkawT65lDg605N`#_jmr_&4+aVjcXIvP_w|Ui|y{KO5;S>s%Z8 z{eLR>?1y|?Z+&A(BC(GRc!d2+wwzM`smtQJxUCs>Vu$g0(%6hDY{L#zV>fEhr;hAT zss9@Ee~(tbPgK7nzy43L`u(Q*59iT|-`*cEt_fHxor5@xqv#pU{w?5ppbw6hHcrw{ zBezT4x?LSxtgc0!xS_hDaE=^at36WcUCi~pRq}0!OQIdM>)DJ|^29TJY)91t;zmwd z|NqD*p>~wIHL%+b8`B`Z@sRezUe7{rIiYQU3-n89<2z{OJ4o%*e!vxRo!@;Nu8}v; zL%&7dK{x%q|L>mg1N@Hvk3HUm{5-`Bg+bKW)Y?tp7S>JJf|G&%s3ICttili|LC8$4Zojx4z=PEj+sB9}E9}!^h$8<`#!D zi~b~>-ts5m7po@fqx(FZT4UVPB5T{M4`Gt@hnIdCCX@9YABA7uwjN^lC!y|;Z>L~h zXdnHhw&h=h^GE(d-@(5MT?JFkE&Pj+omCQ=uKsyQT>SIUVxC&_(7)!t_|KJ5e-_$a ze;HZ}D#F=`e3voZt)JRoE!u^@7tUE<@gMg6erP=Pd;WLh@)z0%ZS=n_{7cl%{>Q?7 zl^Zz&;xc!fT=ZPEgJ({E$_7=qX zrT#PDV1@VvScnmQPK(Jn|8FT7*Z*EluEc7rMcn&yJ-HE^QH7o}?59&7g>Ce~L+qyv z8-ZSp{Q6}pwBgoh%N^7HpHUiii?2Z~4x(|Lc?4@p!(n>M5`Dva*xU4zXgg)?w`10T zBaCmfwf}t+`;7@W!0$84@g*38ahQNf=-c;$ z`9EjWX~LN!o{!9Be%=08=be9~{rgtA_f-4$zV@%SL9gGV{Ww`VQ!yRo=uy{qpI}qd z8ONE!Cbt}KU z`Z&)0NzoJ1X+raS?SCZcDYTx^{y!1IO6RP`TCB%LeA^_?cyBf6JLUOP+W!sO|64u( zPS3x`^OHH@cF)`K((|M9vgh~g-JZLr+w-#rhN`u}+10U~JzT-gmVOnou|f2p=lX~?`R4RQb19j?=bKBUoTEOM*^ zF3{uLwdNV-+0(Be?g`L3Q5`i_U8UZ-Chi7q;SPT2zrQE^01uI0f0MuPG5rai;RO;6 zzuw<3!~VXiPQ9ycMJv7SqIQIK&@0EiLHNJ5`7jD47z1;$L;r36KL&l{u}zmb;r~D5 z|0mnkCgvJvC!GnHgvsbxqy4eM*lGIUS#|g_{Q&fG z0PGmF}E)ayRR3Vy;3txp$A}-X6j%dK}|2kF1Elt2eSMt-WxxAS@6bwuazB zaxs=-{~hn=g*o$QD#InuF-P2TtVHAOO8&pfu$tcT+T85tm0>M?J=!K}SB?ELq#TpP zMsb~IzYLqnD)gkj4BN;Z=1{|Nh0gmGAt63LzCeFX zlHOxpeD}+W&`uvTcYdJ5TylCJa%!- z-G5Avd-4v<|0q16KSOR0-ybr&v>(S>S3+F<*~;*We1paY1>QsDG#XOA9mlpTv!gTndsf3|ps8Q}LqHy3z z5xa=}|D655$bW!D1N&25mS6v-U!6ivp;aB!hFWPH$4Q(<9nK+vB>J`+|Bw9se)rkT zm)ZY(>RB@Pmi;fS4t3ml{L}ik>faUWU&Qrq+NIMK(?S|OGt|H8xC`{b?dspH+8^{Q z$g#z<>gLQE_3sAl4{>q*n;YaUG&=SUc@GVn-nCs?)RPZ{AEI@<=ent#G0k(98Bbm6 z-Kjs~_xITEPw))2_xPL0{Wbh|lg;m@zd?B9+r}uAU<`(L6oqkQToZ2sISG?771L3U zS(t-)80jhs732aW*!&;HUo1~MP_ZK>DF_vOER$?{QqHnAG&rTR%Lk=QM z&y*_rXUqR+zbpUayyH4I$^YuAZn6ix>Y%-uLOyVP@A`704?M0-zh1go`L9PEyqR8w zL3Qr{V&7i6gYUmdIY4gnTjLh}?fg)+?AQ7u+LPk;zv54c^EICH6?`YJYY!*MSQf-J z!Kxjz8#SoKK^(@gcYTzM^YD(7ac!WJ}3UVj3O^|8ORzYHh_79ppcU&tcNsnWTTh}P3luu|r!Ff(9m(i#hzXwYEA1_{=vk;ED+R_OcPSpB`f_jCUEex7yPYwx|*UiW9M zP1+1|{c#vPz??x3I`=C%V~{j*Q;*G`|9_UWnIVl}5c3dp;NJ;dpq`c36Y|~K=J)6S zpCZ2Z;WV6qkKr>o57Q?(0}82*|G!2~+Bh3wNvM2_Sw7-B1a<9m5poGEg=O&l|G$=F zZt3OSU?t{j;rnxoop#<8e$ap4&Nw5UaYZ8I3TQJj&VZBLYYo?}h4rujZiP*-1ze0l z+_Q``kRHY<^ZNgcmj_{9|NkuG66hjK_XWl!ql`<&8K)roM{qmL8UAYjt(W=>?j%li zZ$K|&oI%E9o@nBRjHmqNF8qeVa1UYqJCb`vu}1(mEBDIGrkrv{g{nJ$KW;n0ZfESC z$a}#<*dKwm-MklLq1GhclQDj`o-@~Qe*&I@r=c&6`{VvBW=|RSH(V;uV}1dKX1G7^ z;p3iOcoDY)pn(jSToNqV$cg35pCG4Omq;P96e^(x%wU5?6+gyx@WcNVFgu|Q)V&U@ zNB!{qwGzGqGPY2@IoHvJ*%eOulRg8Hr2kpU|BTwN#~o!sL z@1b1X86bVT{pBQXr(pj4kbR5`&;xiM`)L^5$hZK!yC{RuQG||=5#1mQbrAHNpzJ~) zsQ>74U1WSQLm3_+o}*kh&UMH^>_f;i#Q8CN2IoO{AWXhQehp9Y?KVbGzA1O=8CXBe zUtVDR0PXNC;pA_WC0GR0J2`I(xfGVcWQNFcMor4DT-#_YJ@iknlwvFGri(OZlb$S4o@>!r1Yv!Q66; z=bB}nAF~bS=O6a*oGCo#37#FdZX@kEvIEq9MGw*i^ZyTGtUrG)A@)~b{=bCV=%?WB zV-Da5Zm+>G9EX!|3f_n5#y~lZoVYbm&LAiGr{l-S&)__K3D$7_zZd;oA7Xxf4>!js z&rhG@Tg)vNc>c4D@jfOW*3zaQCSAvQ{t2ER_y2YO0eR6zyzR)r&zSq;+WGyD*Ya&* zUJA=#Ik-|;e<2SCg2@AF3P(*f%3_C-m{nT$(Y^;gD?d1 z>uUzuKOOBK^L;(Zl%RVtxYJsY}$d??kHl%Z;>w z>KWwCKHeYM>g4??7j2Y}cD0-y{%IHeFZkj7#|!koVE+H>Q|R!YqHjUFmu9E^g9F5^fee^F!nqE}3FZd!kdvF( zBY=KDq3XuBREn&Gd3`_9CgaYP^zm53#mx*h7~0D_fNzv%gYWkPHR9I_R{kOW0>h9_`lcRPGNr^PD3yC<-Qd> zcMoZNiarqbWbBQs|2KvHU-|_b(Jx?azYU#&_K0hkANl{nr~E(Hr()-f6>*O-|HmA+ z2fRhh|G^;4pMOmpoy6G%-Ov++ZUKD%f4%wm`OnStL*YVFNSb;NFZVpoGe)sK5Xt|} ziu}bA8ZPEw)~htDL`RrOFXkhB)1&Owr+=uvlm6e=_Hk~2dd7e2X>{U9s{uQ9-q(Qt zkb(XC;KR)e)=<`bzrSCINI_zxIO0hE*i}+Q_?BeSe=lLwbYQN!eFy0ePTbV93aZDK zN!3)S82zqdj(Mrn?Yl;5Q^LeF6DBo&S4+uMv=oQjEJaCCQV10@AyR%JM9OsQ*}E2v zE&y}jrOcsSxL(o?>m;LWo#-akOJ?{@l9h6kv~u4vo+D@X&5|2+IsZLlKU~-ql7C^D z6og+n|6WG?i+5ioB}L1nluRz0Ss~>!E2Scp^BGS2OUWgyf9>X3$)_5=)AS{jcW~(! zORHZ1eF^HjG{$nA6FSV8p$|V3alVQ?3a^0>N1FW>#fv$B@2RSc^|amBDc&OLyG}K~ zsqbi0T-2%a`?0ZGh@*%4ws(a34*IEg=g(uAqCPY+_lwj$^e2Bi;j+G^KjpVroS4sY zk4|J>(eK1yL+|E1{dtzU4ugIf;ldV6?h=s|bkBYUZo&@%&YGfE{N&G=T}g}O4dh99 z560m%u`HOTaY)CmcKy8QWd5l_aXm; zyb*aVVd9XFA^#KkIC2|)&%k!<{PsZeGh_{tP%;u1iY|@+5keMj*%m+1JN+bw_2^t)CXe|CgWXSl zN?ZlV!U;bqI=4`Y`O!oP^BtvxDciMxGUg{0jJqpO_=%Brw~F?+n)mg= zQwQecWs1eUK87{-3Bbg7@&oC?i{96dqsG z%+S3R+SMUEen;Rh@F5(-|1xk{=o^ri{Xo;9c>_PXrke3U@)Wm$K;`G!L+r0wA&M$aRnWW>pRgE?PLFEKVf?=at6&g_JzjLcRb90^pH(4oEtfU zjtPFPNaxW&smI+K7$|1$VTnbrAUseSu-lOKP|myI8LGMdcI0lT9S@YcjRCBaD`FZA zl;7f}2OH14VYR9XPTPjLfV}%0zJNmVTp|A#Ksn!kDfV{C;&1TFg$k&LW)=MY$#-`7 zWqjW-9PlG$q+^3Jd@b@caw$9wk8Dsz@>KIiWn|e#W%#5jZ&QY!zg8JJeuFZ+0rMK% zmuyi+p1wmFUVeu%a;qxil#y3%QHJl@f;m~PUKqTBIIik z%E+4QmEl&nbCWV$8L5oCwpkfoxkedTx<(n!_%YZ27=PToxVIAKYwmw~lQQzcCgld+ z;VaT-58rq;j9j~pYj0GVaa&J%4cmBLGvPN~ue=_BebZ*XAL3gYtBgExyE05-4#u$- zcy7J&4*nkM)Bw`R9k*I}lQ>w4)7WuG4MNj}{0-j?-4XuxEShA@?LPizXS_*q+Ly?y z*jtXSR{Zflg8y~M<4C5(G*Orn!0NYLG2^y^c*CJCVTsi4T7rJa61f3;b$o=t4e;)Ze zV5&ehbh3JJp{)$?uWsJqBv{N9JRkDJ2KQ_vIlO{5n;JzWuFef;KB@!J8GW-)1E} zaI=yTwpr1IZ&orxHY-`7o0aUS&5AyHvyv0BS;>vutmMURRt#~QmHdsHm4X<)N7}}s zvzJM_dcR?wEllK)YJMNxpegh(Vt6i|w?K8{+0(bur= z=3Kjk%S3&@#-`0mlXJ7enHypyj2d5q8g?h+_g?P59=!q8&RSnL`W-#!cf5(cF9`h) zQ2!t9qOx>tBW*S@c8I!6-o}3r-I3u(I9qZG-Hwlm--j;2F}1Cyezj39HLI181C+T^ z%6UMPGV&Q^<|)EW5$-T;!YJo6Kt(5KL-Ky@#Muq~pr(Nb`7YEY{#f}7a!7Tj0UM^z zsD1~3!2T!IZA|3P$iJ%Q6RZQM@&e(8f290PHB*rffBYlmQ`JlwH0|YGR5$WgowM)X z5zJUFSbCYyQRjACyG25;?2TTK5eP;}$yMU5^-{|Wm;eE;+Mf5Ef z@lV}FQj$b{#r}n|@I_Lt<66wajGc{fi=>LaUNzU(aJ`A^hws5}DLSTm7mJ0yQa#cY zxKM1J0lY8kzeraw`uSt_{sSB&*%LvckBn3l>a45bCfEuO!LOmvxmb#}2crKKC?$IWr4-6Am+uXfijY95 zj8We|P^ysC!R)CBMHh~?uNGMsM7!7&B$ip)$&0j;ezc3UjdsF0kPTedh-|{%jBE*` zUDXHi{_6S6{0n$CNILw2q%)QCm6K_wxxObc2;E8AX~Oq&y$k7{h*Sn}^MLn6q~ePU zVt-B$`&fe{eM6vFUgST@od@M5-p7X7eh+;d%zua60|7Eln|p;H?aMs@vJ!bMtO16g zn)N{UQqv`@<$TLO7cnyo)AXHTedqi_`3Uo;FmRgmuVLOLn|YT;{_BKW_u+$L9S)F- zn7@H^36Oswb*!h@ucbaFuI&2*BnWvaTm|jXJlhd;L?W2;iCQR318UehN;|+9cJD&@ z5E)K8^Pli2{2ub*ZNhy9frMQJbTKqu-c37xqmn*#qoSFm9L(IP%;!6KpE0qRaS-1< z>75JNpoe^2kle+!c@vZaLy#1^Z&XUQ-k=m3Z&ZqoULxh3`%#g6gHn1lSjtYUQL3<4 zkF8TGk;asT@^h|z25M$6kyK8ULm9c*+`k$%jr=_yLBvm@MA7p;xAoI%yncq3c z{LVq?OFSt3;EF#eZWxF|l$FZzJU50)Zb!9#I z)(Y+5e;rK~_#$5vZ|@_@A2(7KH?tLM9d2+8#$W=@!9|#bpd0CTKr-l{4D8Sgqo7GS zBxz}fL<{MrLy}=QB)ZN+k_lOmJ#k3%kOR51ha_+6kQmM#l6+*r_#r8@ACe;HAt^?d z_zp=avJ889(m|<+J}8x;2gS&HR-HX4)lf5Ykg@qesl{A3bx_RV2gMS3Q0l=NcTj9d zJJ&go4G9OOaqmHCBCcj+3+dX5bYgEqw&ULsd64}Vq(8Eo>wA#Bq;p^V@1=hqXlH_-CZcq42Qdsn`QWUjciqrN>$;f{8i|?1Rv->Fr`=#PU zyi#e|FUGx(DpeQuOZARNm6{X##kA)osZD!WsT8 z!%-LoWAbA88oA~s>M_^}kHHi06lgztNz$KsSq@;1yKLW z0VzRNgB6DF+^&qsc4he6dl`4!r?lYK59eSOLMRvcPzBA<)p|g>?>xX-F7qkyI=lfr z&mNH8fCChWg_34kC|YEC6T0wj`oEA#$3F|Q&n^@_z-_P}UIHy-LOv8j9}K`d@E&{sAHr9#;#S%oSPgeWCgeaf z48W`KAuNbtUIiY6U%_wTCEyGgO(x_(JviW1cpct?ci=<#1U`o^;2W5O+S~XRpjNj; zK0##?dms~Y7G%%(iym??=RzKS2FR!3EnxkvkO5T@|7$5` zov{Q;<1Z!uFD3u!8$)HvrR4vm_kB|=qkPUjsfn3M{9b`flXdn%=kPg@X2j4W@2ET>~A_;db~5+zpSzlkf~2 z1TADjJ`_VaR6#AY!5`sucn98t58y-i1U`o^U=9}0XSftDhZUf4GyVtdG3NguV~qVl zkcl}9vS%3UK@R3z$ivS7`Irl!(9QfG6f+M{0;L;8%3u|7uZDGS3&hcP*-KlL2$5@e zpC=SCsvTwYi_$ntL3?I_q-O_6#>uNiSGqznm$P2MHQ7g2iC&gUP9x{dwT4KZtQAAg zjgs$LCk5Up{tv<2^b4GW$GC5jd139d?5pQGBmJnsyKtv3rG1)vG4A%@KFt_i)v4BI zphLzOa*__>B%Q+{{3h9_@ds5hPX3cBS%>?xdJS{kde^cL2vj`+*ls{JH;S?E-+ z;yZX(4Uc)AGyd`y)$StvV{pH}{8bHq59TikgZXc&{Z`D(_p5umKUMAUcbM!6kbfZm z3IBrWX9L8aG+PMMj|9l{&H%Xt^Odjyd?e%K*8%b>awGOtz*2{1vL-+lMT+KspbgaP z-a`glk9kXgL?Jan_bHhxe?OmATGDZFry{rGZVaK_3MP+RNmDw?hNo$>4l9y-0Lj{5 zZ4_=ThWu;xqN&zk@cgn`H$ zqklHo%J?rTNaoLHWQ|vw#~ASA0MRlR^jq%XI?pw%2OE>vqqPT_!n!eWt7i3ngLiUI z{A-!GoBv+z|MvG`Uy1)R{8?8Syq~zCYMii8cXWYFF2T)<{|x?5v0m{d{ul6PeWng- z$B?6h3m{A+^Wzc3`6A}H@7%O^5gvE--KJ}Jra;1nE)aEpXjL-Lie1aKs{bU^;#L<- zynf$3oBWf)OT0&-q^%7k8u4F!Wap!#>Mxb zEyZp?j}x;;gmg!TlJ%t8edtYK*FMDDdjRE^__`@$#!lw`pQnxHefu{N&cd43t@!gS zEzDu5_UXse`=K+ilyE$|a~ap^c&==9G|xT#5as`QSOaSbOPsYn!t&1Z`N#VC{PlM3aVu`5 zzmfScH6NzGB`o!l`u=K~7Lxwz{$7UL^Zu3CYr|D_c=YBX*UhJ$T92vUj`|I?;XnNf zaS)$&IdN~|9;AVhdbf&t&Zw5j7tmEETwNsTM7+kcxb-jxMjci)!ZV-Zddjx;l)uap zE{(iV|FLt;4eGNHj_1k1O}iGIl%<~rfrJEsd7!YY(;h+6$vDK zH?nJ-vHvmlXdPy67We2ws%Ja6z%#-eHuwnRg(1QZ5;llygJB2kfe;9V>18kaPs=_2 z6Q_3gPi$&bCLc@jpX}M`@5HYYdch6z|Mz5FPCHKfpG4jxb^FMBKlI_~Yh_0={~5#l ze_WvC(g(|9p3Oi%EFW_L-&0{Pb-j=FA6c@4bEbFm4WJKGj;z35nauZqzDpId8hZ`y zrg*-C?f&$C{ptUrTLATk(PbHDKMZ|pJAG>hvSAkeRQlXa(0qbKJ;s#5BkBi3;i4z01tfs->&cV@O-4fVK@rMU<6LUD2%~4Ou+eT6`4Yw zg>!HLE<(-4+r%_`o6KM?DpX_^*#z5QyDG`!fy7g?9k~Nsowtb_c46KP@sI$!2)hTl z7j|QghXmLQiI4>QAQ@6171BTldN4o{l!0338j*>_bslpP(uCatcJRX<2t_Zb|JM4k z5`=m3)M_Od83LgY2H{|54rw>@15L=o;DmC_C?Ia4ICYl zOOX_J%?cpNHP^#>xCJ)B?Qj>|3p-&#^P+N|zT@PwXB4ewkxZB`QzjN&!Tm2)CYP^L z9wyAK{@m}@A1l)vu2D8)zEia?xm>v$8OgIn0bMlBgAk2*Bhaj99*6y)g>1-&Qm6u| zGtE;FL)chI!u%Nbio?7Ws2(*`4H~K;%}Y=TPeL&~2UG)^lb~S@JS~BCc{lxE+M|pV zbTS|la~5RBFA%+j@r<4EjBlajAq}**`N%K0{xdiY#X-xYgniqk7g&FXa;SjH6SRR) z6}wETku|LAo1iw8wmy6rbv1JoP#wn{1(-0`Km_lg_UDS|&lH|mASIEMaVW#D6e_}* z;{g2yp7HDg$wlVOED%E{x?0!^nifdDn|H%qbB=dIYVp^d;2rVLLS~MsXD6sTTRnqSb_z2#E0Ngr|U&Ct5i=h^?2Ks;|O*03XKvSq` zM$+xj$6k!A#qIJ< z)J5y^uQqe&s>xWqvmec-SCi-(LBxmYs$&Cn?yx4FtV9rlmDg~n} zq)>OY6z$-=^KF+&32U~c$TIBZ$cj|{!G|NQUd?}RFB22gLLHb7UnUl)2P@dz zmxvu4O_xZ+2?U5V`fhn_v`|A)R<)_=hj z!v234pda|Z>BoObTKogPfmslW>$NZu!1s>*ZMYQst#BVa4Tqo{C|;Vc;4;Eq&Nsew z5%n|NfjM>!^C@73*T8)}Yc}v248w7_c`bDzltUE^!Jl9fF2W4V!obO|q<}Ap@ zPY*eX%>O}N3iE%EkGTK}@hgI2`b#Cz^nYUL|4^5gQ>RxTE0Y+jCI4Ih30Z^P6vH@; zab6wLJc6FyDEd#-vsR>ShW-yY&eH#Z#xd$R>P>YIdJDA1qB{-q|M7^UZrny4x#PRD zMY|KI8?pcJf00z`*6s9vV1EB!D*DlT(HGg}5gRn{Z8_o>iJNdP1}Pro0O5T|uRB;a689kXPV60H z!R&1amM-kQ$R7Ork$u=V5$+!N1$2QIiogQLA?;i+m07T)&vHfy=rCtM7I9`qET?P* zOC?mZ#$UyHz9}Ub9m`;;OAKZ_87xJ5&hAJHmJ(wyYc#A$VlPEjqy|el@-*MYwcN)5 z!|(wt;@gadpTK$S-@p>gVXzS%fJb0I+zY>g=RgB_PzqM~0BWEM8o&o9;Y0AdfxZ@8 z4L^lq_?v3x`%Zv8uoun~_9onKhkM{U_!;~X4#M+L4zI#7aKPIz4n44(^0OE|#T<$J z0{Jcc2=kqAFFXW~!*AgbWI`?2p$q;DqwqKI(xtzW;e0_yXy5KX+W_T7}yj-SV31-d!UckH)8sU8) z&8M%uLRbpZ@OvK3rNoD7i{=z&)U-4am>1&rYvdB-e&nsl|A1!9eSj(p7-GF;O{sqXwq&O)` zj+5flI4MDvVlTtJ9RCVrCE<-RarCX@q#9X+-GqBB{&h(6xj3<0jFbA=II$va*zLGG z@NYmi622*hedJu$-yPY5y%+bskT~fF*Ty(;!vJ`| z8^V9*VGxEu!~fmWcy}%Do*sOMWQ5)!y6`(B6PblQJLV42$K4@0$Xx7sxEt`#M;2f& zOuj>kI7^@yS%SS3_p-1%q&(;jsesDZJH!Z8n5&@%KNHkqt^@PT9b$oc%vP}BX9ox7 z255}ELzxDkd{osn_d;}Q4>;W%+J{ZJ21ezJ% zAGF)!Bpov1<3tCUn6n@|h5Uyc%(;+^hVmqYx_3xV+Z+xG-I~}l3(Pt73npRo_5lc>xYmo>~0%pD?tS?8BRjBn9wom<$S-8x-pyi2+AR6Vg4E5Dy=^o47MmppnTtIGcOCwj$Smxd zz|Dfbrf8Ar*lQ9tiD`=O=^Wn^vJ$&-+a}fzH%T?J)V+!I!%eIoZesm#lN54Y5!V+Z zOZ;Nx73!c<@By5M{nSsZsV|w5)-X)gJcQYV{4@CA2zA^0v`sJ57QF#M*l&ir;g|3d z6v4Z+Z|5P1u&W^k?tx#xbC3q5@C~2_r&$g+z-_P%9)O?26OagHPzwn>z_U~&ol7~EfC+;xFAupap+YxzeR^SK#^Rc(lo&!TBZ z;bV9g{foMc7cd z)#&SfCO+FV>pGvyknNnfWP3%#E=$Dm8tdh3g7}d57xE5)+~3J0 zO)G1M&NEj?+sBtn`x7D3cz&@otqhXpudkGrZG-^<+d7VgyrJBZ_Uu8HN^|Hi$OFrRb3T&}sCcvOYYG|$-nJca(#InF#pT4&LJMymhraUdIZp#PlA{=`)DUzx*c zf&ce>h#$^(V0=6Of2ow!^8Nnb{&VQRjIuv*oc)Q&AI_g;PH2$0hQLDl7|4e?(&|^F z>FuQB0O|7)TtgbI!tD-Z8~%Z$VHzBThw+~yUB4m?@8z0D$%`1`Ity2k&JA31ko^1! zVaoA)iErf%_z2G8=ivI!xo(pD8Y28(;D2y`j%xVCZ{eK7Z|H-WXr?UQ!%*L+AJ;!vTjs_B1>MjCz0e2!p#Gb~4eGx+Jm3W%48qVc_D+xnPm-P=!WVE8`4dfgX-T^W zu)hu!q}c_^%`V*UAq}5~3exzGr28AB<4dId3c`)yR*wuOPa;YGjc_?(?u7>k*9_~3 z_fqb;nfrW7yj_IfO1yc*c?;n_B%gnZdmY4b{Q&+K@E;@27YXw!_Xx%PN%#uxCC)*3 zlsLapzYnf|hieiD{{ZogBVR{8N0=P^2k~p>-fyYz$@QJF*GWeRX%|WwLND_dJc0&K!NR!OT*NtgO~LFF<=g~!FgonP(tzE8Y{I`0_xb-2Bb_PK z$@^HlNo0N&Sro?jJcRLiG~;t*SsdeYsK8zh#&E{>P@Tm19_GjQY3NQRemA~PBYsse zHl6!viC@S4Gdh_cz@CZJKh7``F*cZ&)O)&drf@K|P^GDL|kL1fIu!3}| z$L&|7V=(D{6KQr4IR$?w{jVVXzareNm=_Rk3i~I7Z6;kWBhU5_E{3wEhf(}aU|&s~ zAHls`_bYe?4#GEF^EtAZdpt}Wi?QDZ50Ezx!ox5CH{te2*hpNvArT_U*Nw0bBDvRA z*bX`Wu!?uz425CnYaok5sZ)_3Kv@jsKaTQ`tPH07<5v|<`A61ZH(jLskFS*n zNfUFRpWH$k)cg6#!^j80PMnS)KY0}MgM9_Z#FMLokrCN<82i;T#*}VA(44;MkkI6&(a* z7khZSx2;0YoBx=z7r8Hi|CVF#LH4qz_xt~gH{#b6yh>i;S!?0@|BKgie!3ORU;zg- z%+Ssu?NG&D=xXj+i8Sg~Nv(dB)Ikk)6ZYaj_77iVA5{=##SkmyovRpEGR|O)yv%Q< zhJ%HU4L%9ccB=rEO{~R9#^&jM3urg^}59%HcGc=gky8(7k zb^ltJuy5{U4@cw;qW(*>-N@dJqCjyXTP^7D`Jv0ly^E4#rSKv;e%Q;y(D@5N_YYak zK8>o_KrwDaHxOADj;^UC+5qX@Qc4R44Ycn0(3v9UbP3wCg1oa}MZxS<8k3 z%=uuzKM&p@uQpTue*zD~L3jfc%H%U}IpuO4JPuES1DZjhJiZ63D4(}MI%TWy6UsT) zx@jwXl+jeRoWfoxf(XiL9E4ClBOnfpa0br963XmKxP-W_fd7PFLJ71$HMGMt{1@f- zXYdP1hak#wI6O}Idl)j|O5W{5RZ`BLgo98Ie}vcJ6VMna|7Dba6XhS7fnC>2`S(%& zk=fYwhr=bui2ffkPe=L3EgxAhOZ$&33JaIwBGw-TC9Et&<7vED)R9GxP@=vQFsHcB;QxVPvCBN5T1jN;7Y!Qd*K=AgG>1))x7~a9||0(@-xRNp2_3&SeBOZj^a4$RnKW7Yj;ZpJv{svzGzr@jW z@tpTvMqWZ1+zgMxui-aff>*%>K4^wcnB!W1fQKQX7XuAb4$`KW}+>_lG}vWw6sh8)O+ybG+qLp~HhVG;WO zW$6DiNl-#Or8?H%nK&qi3e1&m*58>ps3P9#PS)SKhY7tnRUfX7Ys`BVie>LY&SvLd zz=`btOJ@JszJ=m|2F#7yIUjfjXYg<1eBiC@`AT5#S3Ktfr^7tYy4q3k~kVgFe$`_F>V#k+tG5$%d5i2M(x{D+hOVdVdq zhz|Xt%u&uGLWe7xaC*Y!%nJJ-IA8HB=XlWe=(>VWv@Fv37;$DZlIa$QAkG)Q*>~BpGu_f+Q>`6Nn#~${% zqW9Do&z{c&#=yHpT6S^H$PWIGvz`Cr#8LiZDgRMBmCop$3detF7Q!6)7>ryDVKDaC zgUU6?NLUSP;YPR>ZUU=?d)s-2BA%s;d-BWH+eP%YjOcBd(Bpz0%pNCtTTSS3aSs>FpGQNzrv7`}o5-1-Jj2i7 zPM})S+yf85W*A$)OSupE5Tx*46MWwrmr}RDY6yptts*xd*TWT15l`BaR%OW21k!vT zX`e_Q>?JRt4r=$12OEE;)FkmtOhmb106gG@UEFWb5Fj6Mzm9SG20H?zYa9Ch3G4?z z_G0fteu{sig|_|NR%v$9*4t^*eYEY!Htg+4a}#}oqOGhQ-YM$;6>RYR{}mcA|BE=5 z^W2X>0K^)u}r~?}`K^t^~3;qnp;WJne%339?hr3`qJPf}F z^e;6%@CqD-5%?Sy@!ebnw?Zszg}Z^@!)qP~jg#_Ed#d$O{*f8jb)(e(~pmL z7pebeY5$RV*bTVn<6nR*Bz#dX^*`5_AWN~A?MDAE0sTK@WfJ;-v>8>Y=>H*Wu$yqN z#lH?|b~1+SMgPx7UlM7E(qb9K!r;a1qQygVmJZsz$_@Z zClc;C=m#&nN*sTJa;OGd9O=D{^xlEK0n&`!5=nkUlOM>|RP-%zZ^OSG*@(SqFa5D3 z@&nmJ_}-)Rw?;^BWC!-nNb-a0yOG{m#tOK9e_o*ryL*cKILDjCNe90 z2`{ol(wdftHf)Kc?_#XZ85IV|hXTlsS|a+;C6YtfTx4n75-B78a_(P&Ec9FQ-F_@& z3Dj^u6YjP6*C8vh8>jdlxqmfMlNcarpxqZh{SY7-rU23D0wk0AFAJHCBDx-P4(43^ z@{oomblYYEqyP%_0q7V6(EkmP5-9Bqp#2MA{)_b=XMj|e1&EQw>Z(N6f1oCX^&hB3 z|E-Qa(B@#)f5OqN2xI+c8@d&NtpDsl|0jm^pI-Jjgs}bt&Cn9VeyivJary;tHerCY zhqL}O!un4V>p#hyZ%14`j17Bn??d)ucM*>}j`bg;2fG({AO3^LA?%uq^nYg2(?_P$ zer24a{b&4_30aW6k#QyDKrZCz=>LQ=&J1Uqi7ZS~$EEata4#|ayZ?i|9D9Y2{twcK zy$V^4e+|-vy%t%gqs>jF?M>r*11s3T4i0F5M*1yHI=(jp-y5>k#P?|7dxUoANT&Y- zUG$f_b?pBi36$Q8?Ejx-EPIUdKg0J11GF_Bn4kYU>MtLJP(MRP%|GRO%F2zf2YvJ~2oxsM|KS~qX=h8I)bCsBzd7o^IqJVT>c2TLCeBG! z${b_aIjO;Hg4*~wsROfpPAtwj`XF<(k8`w-b7IHrz^wtuQ~%9Lzi&=lx;g5_xXp0@NZk~Tm3nb1l<(+SOS^g|=*mm-^D=x3(V z&*b{~|5~g0aEx;p_R=pUKRRLlKiFLFJ;V@jxM5I;@{ik;Xgs)TSx;B^uUee_j0%f9)_Pn0vv;D_ztp= zFM|zQAP%>k@FE<7OlSZv`~gr=(0m2ca6k2ODrCSV)bU|(IdG($CK9<34nR7Tz%L*X z>c9a{!hN&{55nIue*XdxXkOtk+x z+J8OmKV;LU>uK9_khuoh|1#Qt!sH_huxAs;fAIdrv$X#gY5&jB{^M6pm{|RH>%lv=R|1$o^yQ9peMX&^WHF7!9j$DImM{Yp6kuk_O>Few!9V($3ZseUG1~Ux9 z$M7lq9fC=VrSLmA3>(z+f(odE4!)yKzNapv`X87cWH0tUWPbwlzk8YgNn)NCss0Px zi~I}MZRR-=K@;efvR?04*|3R1vsKn1W+9g%vT~bZIp~g-754D)<($N2~F#d=7h%T|( zsfTG3?TjlOTf1eH_!?;|o64wnP1L){2l#eghucWE+xc!fW~pa&)VqXx8hekCdK1}4 z*nZqxz0|Wl>RqGhYX!r(fQ0uW~W)n!kq0kEBYd{ zl4CL}x%lVdHb4JIT~V-&_mAcM>0cCYWRC!pMx*~S%KP))72~`=>1l+jbM${m&l=Li zM7q_IMs=9YLG*v1o_>$j5B-;L`ahT*xHZr}X(Y}jXyzX3e+*iq(0_rpNc3L_+p&lK z4|FBb|4F3(lT807mHy8@`ahUm+v)$n0MF{#O8+N;{txCs+*JJ+O()|+&@ygKcQZb; zGd^VOnn~M{g*n?uI|Dgov^z!285@{8rv1sMpHV#PiaLdl&I^6RwAOed-12i)Jn&lJR{Qa}N`I|L6Gr zr}+M7{Nxnh)i?}Y%v2g!2m287-{4Kk=NNQT)+hKhQmIq^xtcP0Ql|``)+ztOoEAkH zUPIdg2Jn(LJ{aVkhm2lyfdj=21A6*f&_f#Zj&@4lc&GFerlYesiD z0)JRbodg#k`+CvnxKAQP;TH{KPni||L!cSk$v2AR=pjuGWWi_DgVQ|Qm&nS!q<2V{ ze2aN(gIOt!EmF#MkRRK~j|B27k^F+dly^?;^D;Y&z(V4uGY4=K& z!7JI$5=B4dW&NvA$?f$@UPz&0DDz5wbcYlitx*ckdZp;_Yf`+mP${`!Qc6d?Qbzdl z39nS_C{!x<7AnTnLZ!;>rTi~u{EPm-Z?XKH=dEMH)9gh5Ka%smI0MYe8DO^H#q|Hs ztqwfN8Y;TkM#jI;9L4%Kw4!(Igf`6W&=HURzv+Z@m7Spf`;PQv1VOe4O_`F2zs+rBDXtPyv--962skM~_SO_;IP380Gut{EIQx zaKSwK7xq7lN&VQESdljNfZ0dJ#DQ!G8{@(2h>2eeHXakGTNyzyQU2PDu%z<=W4o zeD5i#NIE5@$*0hRIVDvIr}*#XDfv5o=h-0Ex#6!;be?*EYrBp!?uljXd{?bvjzrfU zvtu{w*K8WIVYesn?*Z1lTl^RY?qMx`7yTX@(FVpGt=OGM75SQYvxfcU{6n0pw{t*> zP6bHymw%C><>=kq`WG?15Fmr$%=&fP=Owrhy_1@j2ng+U-}qd`XmR1T;!*D0kzgHi_N`+QOXT33Kn(C)gAkcN+-%XB}w zOlClRrsc{1j`)!dlzu%cTY#xH>vjwdlPY&xc9nmsd;m z*H=lEEMt5`KO8hXUn}vCET&Jfgt0*|b4~CJ><82)GzZ`iXrTxypbAWIE#>D=$ammd z_!nHtIulcF8m8nl%kjSj$uykiUHT_WnPYwr>5}#0&bwI#vTu?B+MUdrt7P~^>fy?0 z(LHvR^yBtAWD9x->zB#70CZ)jw*Lw%Xuj`+pTi4~3!8YAEpR8?1^2-HV59H16Iq|k zIwO5RGyN9}c=rv6uc%K3kwf-Aab0kUJ9$6`jBkhs*|TRrdXomEFJ*xK#DH{!4ba{W z@I4Pmci;g3fE^I$zCOy!fV2~?@q~-@iUHPZ`dF_R5Zk_T#lELpaUdIf1MK&yRxB3= zr2g1|SdR`cA5^87q6at^;0@+O%9TpuHTIS(Rj~t7t?%PJl>zxWf^Q*lfcd0yrI6<+ ziW#84HNgDV0Qy*Oh+(Qv^24f>0?UA8gMMqDRXG14z$^hxK2N_;4RQ=3x&# z8zPN1NsG)@v`seFzZ&Hie78EEO)^cif1%k*wn?YxEjHG_ZIWANle~y*#jr73$wwBP zv(f%Jq-cjtig!0k$yS?`LfM{1DMwbM+N5&3O^leU;%!pB+a@&|ZDNYENo|x(>LP7o z-s2FXkG9(TE*x{A$qKPkKifqg(!~6qT^_;hF)-ksA7qz;Ks)oTcJ!(3 zQXFQNl2AMIpAGT^Vamr&p#Ni+O2Qg1G)Wb*`e>8X%$$&?@ULysDRsD+x7*Ptw@W?o zSU1|mhO~#}D2{kLx+Hp~G1kue>y>8i*WzxH*6ljQ$@8_vosjlkz0#4Ip>%GwOIL~= z{U1B}e|n|&gkI@G_VZk>Jq_YMXP2kBr)R=0Uho}ml0oFNxb-JFq;E7+>5XcZ9^G5g z?QUi);gC*4vveSbB65_$DF=N9y~42{8jrytFCg7+hqx{}&`{}lG&p?Ny`y8ST%v925ni*uj zB?T9oC4ZYk499YmJf1W6LXMKd{q)ItB|FJMW7HsN5e=e^ZD8K5K{9qVNG12m+}^7Aol(nG$`L=Akj`<%W&rUq0#sp}zT|Jh1aQ-f3|2TA?X^X^klp7Rq-UlY7m!&{C76UfV)9Fy$$StA^*!7SpRujhLD;fGrB_siuRb9{V#b+hTbf?m^>xZ zZI-N{R>uDtMQ4|zNOE5Fu zXqAdxW~tm~7UQl~<{iyay~8Xu@n$jMUc22ab=%BhK5Q1th?)McS*)XG+KYOzUo^A+ zMgJe!c*4xL(kji!mcUl}|0l&+)+%j@8l@e#jv%vi5>MAwvvlt>OAoR)zCh^Xg zQ(P%#aUac72DqPRrd7N_8pX%+4W`onznG_Jd>SP!%_3U2h4nuR->pH>g&34fWY&m< z_QWdsXp7{;INASfq5iXo!OtoA$bxVS<0*?2?XgI4ibYD2Em9g{kuu!NgDkAiTBI_~ zDn?}0*-@!R*7#Y(G~tw5WZkHf@&73OOQ+PwrzzG6i`e2VVh^>5gLoPuER6px(iD}i zG;^O8r$K3rvhe-}r7g@N?M+VlmuX68l0~|9j7s-WgVK|vQF?b-IDge5{d+Cc{|3c9 zVNeE;o;Ztm_gTd^Ymvbzrwm=NFlSINU5uMLksaqqGk2TxCDlu>p-p;_o*nfva8awc zgW6cXe}_4fdKvW9i_cvz-i!5O#lJqjO)OjMY1_@xK>j&!vnREQ4cQW3kM2UfG+}Ps zR?nENp1F&9=F;o=&s4pXl(k7Q_M)bIr7*N!DvbF`dB{7Qp;nJBy;i9XtCuR=jnVbg zrRhrDo_eWu<|`&&zEXosPi$lEqF&Mx+UTU#OBQ4XY8Bl~n`G>MM{*PDB`2v~^t@j- z_bIrLujI$niy_&dx0`9EcqstK!9pRh9i zvx;dZPpNfVnU^n6%wbmYzd)(yepX{U`XA|vJWFwoGJ&oWz? zS^0j}`z_V`{mS<{b4*oHB^ni|)!0Z3UI|rJRYsT{GspBEwTJHg;->KIoZYjh@)vO* zaU?sXWyE)-jCRrgPv`nL{rMB6f^9N~ z|Ci;{U032Ay;5eGoP~91zYOa4ANauWX^x)(1XD($^Wiu)_J!VOpPu3*-dyCiGN(jS+3e@4>8sn1=m7o>^zPH;mYNx*WFdt!Jcb=d5%-5w>-t2BrPdSv-{vTPx{W zVh&sLi?cif*lp39W_f3BR>B8oc_(95f}{(SX2pML*yg*y`TpP;@f_g$pl_Bwok3|N zc6RZ7@!}b=6E}QrT3#P-w4M02+xGgl9C6O}`d@@?U*~vxz0<}2$!O&F?47m~_d9K` z_Y((+Z*AcJbu{q5J%}3{Y~8zgbcAmw$EuI}fP?5^^b~4AFQPv582aDQ>%XU*M#{NA zYdbMrur-|E_a8YH9q)=bi5r=BeTe^zEGl%+uwafMt^^sjQ!z@GAw&!5a}r{k@NuG7mpQnVC5H@LX_( zq>fy{_rR!R-m2J)$715eD|kok3Yl_EaGhst;sAAI++hwg!7TM1*nb6MoQ!QeSF{Zh zk34vV3=@wLkD{&@X(LzB)&USKyJLZ`5@K?+8yw(}NXTw944Zi7W7!%t+hm zE2Q-!##RngZ0)JM&DV7W9#wuDb#KMy|B&BbI?X(hxzt{(wgv@c+58VT5%*_SP-_4EXFoRIbl6Vib?+xY%(3fQ_+ z0b5Uq`+wi0^r53X6COkT*G|X)8hm+DhK_Lme=uMhc`<|IxOWP4NCV z&wuw$%FOhhn!rGUA=?;-_OforYGP3yc{7O|J)9lIP!U!{p=2zW1H#AJ})yUe%B8A z#ptu!NuBtTO+UCpGWYJF9d_^@_zuZGvIA~-NRwl}?7Km-E2ecL6%Hc`?w)wWA> zYP+E*&SfODFSP%y+Y{r)#@RP<9NRqwEqfU*EhtvYqGu?J{)r zc3EQC$RpRu=)UbTwspIVf0k{YyG~B9d}`-)GJVN)_@Azm+0SjK|6@D%|J%8D-!AdJ z+a>Y9c1a%L{m%!tOZw66k~z3tvdrh0&$F&@^>+Etl#Sj;AE1WNE!;odB90l}e=Xg@ z_}?vz|J}m)&n@D4_ZIP1ZxP@7xA6NV+U^K`R`U4IDRY7e_W?{VQl!s ztun$7*^gFlrT^zv8Bgt$iQG<^e1E4*#kytsWV_77?v&ZoopLqh&+l*L{o%f7`kLZRh^Co%`Q*?tk04|83{~x1IancJ6=MWy!Sv%iCnozD;f* zzu}qdW381LIAnG@UOUY6yUKS*5Y+QwJ|=Nftu9YGEIt{3}9+r)9nHgR6PO%Ain^~m+& z-nWf$m~G^6Kp*VF#jbN_lh{V3P-{CPe8vuzS#n<(j;4sDa>z1z6{Y?Ia_+obKm zZTPRYG5)qqI+^cczMFMDS8wCH^=9e2^w<3Vg`4FVYM8lO?5VqXhPs>QUw;3Bc~@=) z|2w~baf08!_>kYf_=w-X=vtxggx@Q;M?j;1+yH zTks)$McTQ}cf7boI$7Rz?H2BTx&H6{d+8e!;+H8wVQr6VLSL*)17gsp`x5yl2%-_KEe<%LzyS7MzG)dB? zKEnStf&UHtX*~7_yph@IP?9iyk`8y(r`VcX5p(w#NUL82^9t|HuE!?~WXO1V1L~ zpTKX526OoT&@j^@X!JDxKQvy$Z;B=_;Q#pu{|_FY8N5ET#B+xiWF8M-4E^c%Z%9>M>E%1l>KgK+=JG50cRL+drZpNLbagifMAAkFgM zBjJ$Qg8l{iuPncuc;E)Uv(Ud{dK>zXbYCI<3-oRDkIa9c_y_2A?qw%P`!v%Gc{DVl zEsF0E;>(D8i91n0nny`AL;6MH*U_8k?@^3ppCWyV`2S-5kLaJ#TU>ws70b`;<#~~^ za;*O{^Pgn--OPUjm00#~h@V1}*ZzV3`^)csjPSc3CpKjE-rwV^MTzY9Y85#s!1Uy*0YCrg||f5Wr~Em6j=Q2bqf=i^d-r{>^) zmKf($`sLr#$Fjyf6u(n*=qvI$>J}mW5E?@n^tWgMEh9(jkMag_!$+K_oQvm~-a!A1 z{wJP;yG#78%tt4rlei0xqmS@AAgCAhp=LY3OLM5kbLyHj#rRzs<~!_vV4Pw@+PdzR ztH|Te(2dmNdgA{D?M7cle~EUo{9)qzI8MGr{55nOu75}TBzhJ-j!v-5LwpPg(xYHTpV;f6M&y#M#U6N1;5=>jhL~ zx`fL6@W-S16HDA*zmC5Ye?3b2@Yka>>oRD(#`yek{Q0N({}dBTGQEU9{{;V^f_Ux% zYxtaDL{{#G{J@Q_7m2oqE*Wx*T*W!KpNth0y|JVDo_-;HeGd_&Z#zoxC7<|tO ze&?czaW2;xIm-Mo=6m15=R@55Nq!dt?@pAsiEY}6J3i!hGKkxlpQLYL>Rx<4#1nh* z^*ljv0nlQ33 z$u~hc^TaXoNMB8#21Sp>A^d)=> z9bJAkK2$W{FFPpSftb z6fHCT3VIziT=HA7f57z*IZ@+h*{`&Xo7hA9dawH}{+Hj1pSBI4AfBuc3e$#>mtT|U z!QayV&3;BL@Bd0#UuM5P!G6B>KT5|Tj*Ca|VeEWGx*y~?xryU~ebC3gIJ%SL1ofX} zzoJ1jgoZz0zoJnzhQ?2FT%bubg{F6MT%cJrhvpw(zoIxwpyYGxSCmE>lzo8X0_89K zCn->e;z8bPrw(Q6P%-iQJnQn@SR^j-oS8q4PYz|5{#J6tX_jYLpCnGPJ|4nn$Nc=s zcky-L!~1||T{O+Q89G0$->KsHbbN_tW8zWLju8*DeuQ|C`61%|3;6!}-j$=o$51co zL*1wcbt3b>6Rh_rh+Cg~SK5f1*|vqeqQp()6()|jxGy0#zDz%L2wd{6c&U^5ZDRMY z_^n~o$g~SNnwIJ3q?5LW?{VorTG#)R90xY~>HbIb8MO6F+}opfSpEg(uV(rz(^aHU zJIg-DvO(fe^q*Pw2UKSIT9)&b)$n&r{~BG#{68`OUzomuX&c}9|H$+wi2nuGa zvHWJ@+tAn1PUb(ux~tG$rvF#szhe2d#6Kf`34MX-KSRw--%8v~{1=M*cGAb0PNV%y z=ZUZ6zF;qD?nL*J=05aWmj45N%nc{-|GtI)_g(sbi5r=BeTe`2BmBR_Ugmu)_p?6m z;bjs$j{h5l`R#!S|EE5B?=_5nUn9-LExaq-O5DbLJ8{R$*GT97W%_@vq5tO^`hTvW z|K}R|fA}Bm=-6ZbhxC`Ok%6Uaxc|KdKg2ck|6C&@#G}lQ5s!a#jZ6?vGCxH;&H5SQ zS?1@6=h;4XFaO{E;5GbC1OMOtqvMi#aNi?Eez`pV-O1 zZhZd_;yTIxM;_$;fc=mBD1d?|+5aevA}IRu0@t+#X@2yAw2)8h2>$;W{QtxqJOgw- z!TVN+dEbh-=VjjiBkp7VDDkm<3-teS|BnXI5E@1!XcUd1@pIh&qd&d>$Mg)EMRVx? z^ZtK;_t8)WWgo+rh4QF?il~IjBMVX)SdhGF1KQ;8YP2)cub@@*8|KdtyIAK(Utrv2 z7rGNgnE!L)pP=W_OXwNoKyK8^`ex$8w0Spi7xT|C{dY_cb1o|4qbwUoFS7gy`piD= z^O=8{={WH&<}<|i5!+ezL*k#HXV6Q?#j+ynN12WzAM+uUX8KnwZ({mY;=e_2qBiFL zfp!0gZ2Zo^C(s#|M>)5fIJcXLTi$2?eaQa%)gQ(B|Lqt4D4iS=U99USjrITAkN;83 z?>F5`ei5#%4cueck)y`(gBmaJ{ej#}dytoPKIFgbR}$d32&OoGI8MUE5$5~JV*m}J zAvBCe&?p*1 zwS4ZD?Xp8YFW1Qzx4dIJ3fJ&Asb4xyi+r_j^rFnR_(i++xtL(iiZ&|jk$ z(M#xW&@a#t^h$d%w7rH`}h|TTNIPGGWnfg=X8TYsv~|Oz2OVFl5$NLneJS8Zu#1(S$uc zChX-O>U^;}GfKGLF=E1AuN9gS*L%lJ*gt8)d6V0Q-Q>UF%bBppl)f=sHsOe=+s23~ zchh6iZ+a3Yj1QPFVOpT(F*Q{4hD$p-O&Az9VV|jw9y8_Yc~j5dIi^k6)@Z`c;otRN zoOr*-nmB6O>UTqCtKZF=8vZV4>h`|V`TaR_wA??6OwumYusKFDn}h$YixkcHJnD?z)waiOlLiD-6OmA%I@1__wBO#4%xj=c9RVYt;|fw zLX+iglciPDJ+kLUVSnwpRrc(ZJ-cMjZrQ`ay|U*v*>k(>xkL8slRZ`j7SSHFsYSI{ z_TDJ$SGwT$vi>$JyhAL|y(X2lr*D(nZWMO4g?*b@Z0+sa<@Ote-F>@NDXS@NH!H3E zeuvy~qp;(xhPlHmw)Xu#*>|I`^Q~suXBJ!g|4zB{&b#Ex@)fyD_zCE{(4WiQa`&}z z_ucXrvS0Q;DEr^@wjGwQ%2%J0ugTZ$k*_^_vwV$ZPdzF3$UXPTJwKFteku1Xl~7sk zm9NX!_y0h?A>a6_eB+zHlyBVsn0(`id{e%8zkKr{`R0?t!UJ-^e9(Pz;6XX?kR14d z+$Z0XZ+%t1MR-uYE#JORzWx1sWbj+$Z1pnSAG2`AfN9?*FFTfB!-Bklg=Wx&Qn33aKBE@5*;yknhR&?vd{ulD`(*h2gYuv}Bo7_nV~M~5e^?&=nmqhNdH6^21Np%LJ_qCx`Jwz9`M2_@ z{FVGW`S_RO4rTmzdd_tahNS=89N%@KV#C#5+r^Q5shvi9mk}Q8J zKZWh5kI5l9WIpI;a_AX3^pYIWJWb;t7E+Pu@P0Y`kQ{zk4*y6FKPiU~JuQd% zu-dFQ37%#41vyO1T?}{0GxE%SdFJc#%tP|b!}83}rVY65SE3AEHueNvn;g8LaQva$wIpNnoGi@CLQEFovXGF4q%5RlAuS6z zS;)&mK^BU#P?Cj;EH=oZT^1d(=#<4qS#-&wTNXXC=#@pEEc#_JAd5j+49Q|x79+CQ zEQ>9&*eZ){ve+(*9kSRdi(RtVEsH&}*ei>DvUpS$kI7=cEDp%xpezo_;;<}^$l|Ch zj>+PB1_}4G$BiqvNR=2)3P)pOS7^xCrk6P6qBX6 zEG1+qDN89?N(&DCrK~LFWGOE=@t2CiHx#GUt6_OHDzEm-s{``tl)O48ug=SINS4F0 z9FgUyEH}w=vn;pBa;q%2$#T0acgS+5EO*Irw=DO_a<42OmE~iy+%L-mvOFluL$W+7 z%VV-UF3S_LJSodlvOF!zGqOA@%X6|!5lLB2$#PbfbF!S5<$^31Ww|8FWm&GsN`tJ} zWyK*YPFZP`6_>2IWyK>aURm+UieFX&vJ#Y)kgSAdB_b5!F9S?Q9MZdvJ(m7}t9OjZVEWmHziWMy1dCS+w&R;FZST2^LcWmZ<^WMy7fVzLsK zm4vKhMA+vIqc9Pg3iy>k4R93PP5LvnmXj*klF|Kk&Kd{T~2$?+LE zJ}1ZL<#(F?L#lGU=TR^*KadBZMmIOGkdywNCcxa1ADyy1~Iyz+)m-tfyC0eK@RZ-nHH zu)NVLZ}iEV4f3X4-gL^F9(gk;Z-(T}u)NtNZ|3C9lAQF*$)KE!%E=}<*&-)9X@7wlv6`; zYFJK<$f;2|H72Jf<Da%xsi&B>{GITe#raXFQcQ%O0Ml2chZm6Nwde(jLA zl5dTeV9EqDCP>R$WrE*C`TRO#f~*OO1i$H(-!$k+WGdl$!F|zBY&hBB9jS`+3>W>+JeYrZ>ltDsx(<&%Cadw zX1ghq+my*|%H%d>l1`bVQzq$@*_>3%tU7C>Bem)u$qkY78Ze(vR*uM^^|5S zGessABY`z*N`%(rY9f=OKFt&qv!>`-vz65hRwGzVFeh)%^P%U&)IRoh;x$VvrJiOdbR#7b25=qCRcaBw@-Ss4urNPnbLtrfLM%)^#Z6>I;EQSm{lLIY5#o z5rH*p@=h9>dSp_VB9bQ4q_woZhnTL2*f{W7^=d2VaZN^Mx`Ir?sKBhPVYqnll=Ie7-<(M|xnTRcO z^=YQ^e9X3ala}D)h_q(Tq%djoOr37v+$9KG!RaOwMXaDc9W~RY1CGcFn&q^e57`sU zm|)flDC4yKVqo&MQvv}~4n8KO<8&jfOGHY7`XW;|GiN0fsrlh4~@CZHInN$E7j zI5|`a*n;5W$Rsx#I?aZxZ{&NBV8{d`CLo_ilMlhg$ZX1-wSmdJ(K-gLh>Xmf$;y;S zU?sj7Sc&VEHD7$zabf_gf0?2~}#?Ya7d$*aDV zIit;VJ&8Hp>d=tar>Uzo<(AWKla{~~OJp`N5h+Z>mhSo*t30ObYb;K;Db4+M%mi@) zYg;ll+g{9{?>VO7b=Q}YA2S|PrpF{FxHw&(V^fbQ)WdfS*Ofn=V0`l=#bn_#D@fos zn-SEfsk^`4_I^_*zp0bo)X8tMCHOcpTbep?_L_hU{Z zB4--pj8o1u${Dwu@yHpkoC(UAkemt2nI<{YDQ5=c%&?ppl`|7^W=77;$(fj(NywSJ zoGHkevb@tE@7U!XhrHtwu8v$A-|{w9Fj<=s3XXSWDJ?gDT zQ}rlbkFr)YJ#coKl+%M&G~<=CGvqNt9 z(~WvjA37>$bD^`@xSY*Toy~XEqX9Wv2%XKdu*f#0hO^}qk>hMRLlmq>P4%d~9`)3t z*x3>rm56{RDpQqm)Y`nQVep+r;aeCjPV_VHTWg7?pF*UZKAm z*^vV|Q6q98H}W7a@*zJ8pdbpNFp8ikYC_GZ6}6#mWb*DqM^Qf-K!a!qjiCuNg{ILA znniOcj?!|@MZH{<@1i_c2kJy!h`e0nt^`V=l$>+J?S|V8uNxjWJZ^a0)Za~cZt`=Jmz%uY@v4q23ELpRhGP2YuOF3EgVV1J8?87YO zW!Z;W%FMD4v+Tnx%vs9MQs*q(S@vU=GP9JKrR}pYWZAFT9z-3o)FDgTWMRs(PqMU8 zmVJ_CpJdr5S?ZT%pJdr5*>S`^$ zXO`n2n?YHWLwN*qwuoq>Y#CMLTn_#m{5kk@@aN#q!JmUa2Y(L!9Q-->bMWWj&%vLA zKL>vf{v7-{_;c{*;LpLIgFgp<4*necIrww%=itx5pMyUKe-8c}{5kk@@aN#q!OzR# z=W_7p;LpLIgFgp<4*necIrww%=itx5pMyUKe-8c}{5kk@@aN#q!JmUa2R}a|bS?*f z4*necdHD12=i$%8pNBsWe;)ok{CW8E@aN&r!=Hyg4}Tv1Jp6h1^YG{4&%>XGKM#K% z{yhA7`1wLRmxnVCXCBTxoOw9&aOUC6!*0Dl4g z0{jK|3-A}K>1YZfh5`0{Z&v7X}SAwYo4?muAt^|jeN~2~3`-f7GAw0S%CMASDZ^4`pOxV$v(L&fmDy)yxXK)p zW!TEFm0>HxR)(z%TN$=8Y-QNWu$5se!&Zi^3|kqtGHhko%CME$cV+m>?7K3IWf-|Z zIah|W%zi7wT86a(V+F`sU!ndL>R+M$73yE1t`+K9 zp{^C`S)ra4>RF+l73x``o)zj@p^g>mQ=vW;>QkXU73x!=J{9UxDav~dQPhH3QJl}k z=)aq)&!paKNU_#L)|6vH-gC1#m-+X&%)jR*8+SMAL9_CnCxpT%BJX(}s1bRP4+Rn1 zdD+f8j7HHEpNr9dH&vgRea}m#UK3eUKD)fq`sB~cn>P!8o$;k+GSdl8l7yu*9mK^r(I0jrI1+MIWgvxByB zkh5d(ytC=Nvv}S~gPEDB^Uk)5YtK6q=bhawt*;@?6jMWJ*yy}7U{W}!kC`DYZSSC_ zj!~oYjtLerMZF#4M(1f0$DCCddxm+6bxe4C;*0dFRNV5;Ajf*$maU@-yX_#pj&^R=M6fY<&@R zzF6KY$!6;%&7L>>rd+0|M?-yPjs>KrzQEBerXc3Y2$~Qxrn&xGnHQ&^sW%oo))6yl zu{)f1#F1g4mFtnUm%yStzqQ=t+Q(E9rHo9WRt2mXYp+;E8$gZ}2Sv;}yjzm)*lL7N)f7PdGWhdvUbqaiZoS6!9N(@=08jvizA zE*&i6Gs{QE0cRCtLyE)9N{!<}lE-jLDHHPFE)fu|XhxJD}QToY!0OA4Whdc=K;a)^OJX+y-(fUGr5$yzIF>q7&G>5dLWew`^<>n_V$k4M%<*=B51)+T0A zTGl4xvNoNNwYjjYC5&cJR@PFK&$Im+&-Sb<^aSy%5z3v+KwQJnRu5tgkRxY55RJm8EHcUyCD`%DKW0ixy$|+gp z7^^l;BGS;GQ+2se2(_YNG=fM&pG}oM8yt^l24$q`CcT?{-K6Ixt?F(=?WhA$hMVo( zq^D1&O5aP>%{Cq%3Zh|QU-abTmUh3;DNY&>?l;evd^5KrE>hD2A zD1nkv4X|y12go~+L*x^LEl55=>KUYrApF5BBL5)#Av|u6 zQ!*(W z)VrUyA0WR0+GBuv4U*3w`*x7BhB^`34pH{dEFzB~(hsq(hADrTvWIDxVd^~0_Iwvq zhpE#r$JX%J`V1JuH17x~BcP8^%@LY@gd`(mKSuUr6g5tD$Ju0pz9_K3mFszy)&lKgQKu)nfRg`KPt8_V4=~AlZ66;>FqF1R(4^oxhqbj{dReEiz^vG1{iK)^9Q>Evm zO3z7^-jOQ3BUO4rs`Ow~>7}R^$d+D+D!mO=dK9Yk6jXT%uk!R=<;l9rlXaCR=qgXo z)e`yf4N>Jux5^W2l_%FKPoPzvI;%WARx4ogWLT}xyp^1+^YpOZ&@1cql&m}Ks8!av z?OEprV4aKKIz7?G>-DJ?q;6Hxplq~*4=X`F6*5A>-4g&d(yJb*}v{( zTiRgV+k<9N60yz~KrN_G*8OcLChMH#>wySjd4ReEil{8>LFyDN$$E%RJDNsWS?7#f zZ|^`|h`ie85pC5$-W`<18Modsghph&GrS(B8l6 zX1>Rb$gRgrQ&KOjgUfWim-L*Y>%Fv4FRk7ST^}vmN49;`tdAD$qnM-A;3(;iB@nIO z&vgGWG-*^VM%Iem7|CUQ0PI0_=O7ylk`X8L`f#hPk2qv~l&X)h!8p_76fnuUDdH&# zpQc%+;h3fpGo+sZl{0vKhTS=nK{UfG19x`aIL~ zNm=KjzaATt^*EsMimWGSpaf|XG+%0Dn=vP}}mBbbJo}n&uim%g&y-sKDIvug=c^aCF=sF#e>vTe{)8V*IXW}{?bL(`(t#j(E z)5*3@=h8ZzDC?CeRF;i~9&{AtWW(-9Q)nJ#P)Rnpux{`aw!sb6hLdRqR5rMvZn%7C zRyN$dvO%xKh9@c;9O@e!>Kop1G>OQ|n?h++Ko!}b`(eXJIv;uYNJ|$4F9XO1T?`wX z3LA7OY|xdk5n!HEU?WIAT$naOdD#dzBDN1xb~u1WP+T@5)SrvbMwI0o`WsE8X`)^n z{u@m(*=VM|&8%zgl#Lb`TUggJDjThA)5@~eV`vyr?^f7b$+IH5RmV-gmBpXAd8)ABhx(!bv+J=!%bNp{`rP&yP zb%c8H3oE=K~(?aoQ`+vIKF0dL?L! z1e}})8{BkmB-u7eos%#p+5aipGDRDtNS7v`G}~s_uNmsWfYSyyr5g;=Y;cpg!Jx|q zoxU4%>~7FOyFsVvMu~dSS-L@|=mwpj8+2}Na7oxGvtQ`U+oZE;v%!Ues2L?>lbi2N zx@|V?b7&rwQAIW#J`_MRC@z}}SZ+45&eMs8&^VeztoM+Qrzo3V@}~!Hlb*XxFX=ch zHoeDWldiT+y4p6m>D#1xZIfev(@)<1Ce(*wh&=oy*$j-JQP~W#JV-ggv}}f`Yp4Y= zAEvx8=_1sR8?;Ss&^9BzXbe#XH)fkr);F=fsY5nfD7%GaE#%)q9b4OFvu#*5J3@#y z;bv;Hll(i$w-fFzmUl%Fd3KQ}$M$B|lx*_lzS+%m&!BAfh7tUIEboKmD0v(uuVVwU z*-snrCB4ZF*5&|p8=!3msP~WqMG$owI*Ld;L|Y88-7w{iu-ypt8ljEpBHSFMeMYJ4 z74DHXI)8;H~G@p^p7adh4ON^xxP-MpEm9n-WU-_4sD z+B?H?2B0>%o!MlNXOn@9O$IME8Hm_q0AiEdl}$R+H|a#*qyv1DLHSKOy=x6aC@VF- ze`|CT*6eXql$s-gnoz6M=vk@p_+R6Jw&pBJjlu6)V~^DM9Tm zwjR=O%Te=^H^1gy^O0wu19eI**eJCSAU|y2z)Sa=NK^H~DsxwwvYMvk#-M49TiY6gmbE;2F*sSvQ%431YYYU|7znI!J+3jx zRx7YQx3x6})@lr#)wn*_xV5bnsVf6wHI9=SgHbgGpK1&=)flv?F=$g`0H(&kOO1gS z)jKAtZ%)*7M$}BR@IoB2BL{M#M&v?nt^6hTqcgiKy7s1>!LcGQ77 zQ5Wh)J*XG;p`+*+qE7PzXb=sdVKjn95p|s(M-ylgO`&NtgJuyN^YbW%;wXWVD237p zMxJ2Qd=BMN0TodRl~F|$`$xs#kHH^&ze++)^&r}Tl z82mB#WAMk|kHOD(gW}i96#HGp;E%!2FQBOy{4w}r@W&ze++(}z*G$W82mhmDgHZ=;(DoK@W|{M=6{?yD6~ zuZr(%#do$!z@LCW0e=F1zQYyY;VJgc34aoPu3ah#e-i#A{7Lwe@F(Fgc34ap)B>YMElkjt| zr;_j|;ZMR(C#m9DNG0Jgc34ap)B>YMElkg|u=Xpsb;ZMQObCcqpQPG2;Qt+qX zPr;vpKLvjZemryv51qn8r&92z;7`F%kB3UZpMpOHe+vE-{B(q>6#OapQ}CzY=bluh z;OAegs}%ex_*3vRfT>dObHA#%M^GvFQ}A=GP$~F%rc*rADSB2^3jP%Q^suND{M=8d z6#OapQ}CzYPr;vppZg5OGoVVrpMoFrm7-ThrQlD&pMpOPe;WQY{Au{p@bk>5((tF@ zPs5*vKMj8x{xtk)_|x#G;ZMV#hCdB|8vZo=Y53`^R%!Ut@TcKV!=Hvf4L|>8UZvqr z!_WP>O2eOqKMj8x{xtk)_|x#G;ZMV#hCdB|8vZo=Y53Fdr{Pb-PfwHLo=S0TRcZLs z@TcKV!=HwqUMQ7@pL;HqhM#8wm4-h9e+K>x{2BN&@Mqx9z|S*-%D|t2KLbDaYAOSN z2L25E8Td2sXW-AkpMgIEe+K>x{2BN&@MqxX`CHKortn@Wyq5~^rOLpcfuEi+MbDVZ zz@LGi9x_D_naaSQfjx{2BPUZ&chjDtgjX2L25EJmaVg{2BN&@MqxX znO||gp)&Ai;HTG3(d(u%@Y7kZ=&V;f7b!aI6-GRT5l>;nQyB3SMm)vylA;q|(TT4x z;wd`v6-GRT5l>;nQyB3SMm&WPPi5i9h^H{(DU5guBc7tuU(x5FFyg5!{21{R_dY5M zKSn%-5l>;nQyB3SMm&WPPccZKFybkC1{Fp;g%M9-#8VjY6axneBc7rULt(^I81WPX z2?`^g!ic9Z;wc6d6wi2yfd$3Dg2ITWFybkUc#57zg%M9N(4a8lDV_rrMm&WPPhrGU zJQFI6cnTw)!ic9Z;wg-H3L~Dvh^KP!W5iP!@f1cpg%M9-#8VjY6h=IS5l>;nQyB3S zMm)tphQf%aFybkUc#1&{g%M9-#8VjY6h=IS5l>;nQyB3SMm&WPPhrGU81YmNevEhu zBc8&Dr!e9v`fe0PJcSWYVZ>APnktNViY`;dJ*C2kr!e9vjCcwoo}wF7VZ>8(r7Dbg z3L~E4zEjcvqcGwr23ZtFJcSWYG1#IQY*85TR33hecnTw)!ic9BfKeFn6h=IS5l>;n zQyB3SMm&WPPcb;7FybkUc#3;kg%M9-#8Y|r^YGKxr1J3R;pcu=F_5D$=qU_(Di1${ zI*NN>MOUxFprMNpWAUxGz^2^b`g?#r?UW3tG_yt$21*81xhdJ%vF}VbD`_ODha|ias@k zK~G`OQyBCV20evAPhrqg1^6-ODF(0<20evAPhrqg1^6-ODGYiFgPx*$TVc>s81xhd zJ%vF}VbD_;^b`g?Mc22&prYir_Y{L|3ge#QSwzuas4(s+ zo=H>@ehhpH1E0dcrx>(U82A(hK2?Oj2tUs)3L~GQk5SRbs4(&=jC_jcUxkrR72(Io zr+5}t82J=NK82A_VdPU7`4mPzg^^ET(eF|frD#4GjPnF=u+^0(LWAIZY_%ZpZ68sqb6oZtC;Rwa^y~60H zF#0KsehQ45%;zD*C|` zLnMkJ62)_+;<-{`22_{<6=pz%8Bmqs#|)_GFH@KS6+2oC zh{6o0Fas*gfC@99qHjdeH=-~EDhz=NL!iPCs2KcJm;x33c&ZFP1Hq~Se+7Pwf{LLY zg;7vd;HRHkVHQ-F1yu!p41=lyKc+#&aFD9NUx6P3p~66@Fc7K={1x~s@M9)a3>hiR zgbFjE!c3?t@K@lkz+ZvC0zU>rg~3o^FjN)zF&QdMh62{oe@)QZ|rJL*84s0($Y9@LBa&{1>@^`ika zh=$NG8bPCI42`1+G>N9rG@3!PXb#Pz7>c6=N}?1>qYTQT9Ll2tDxwl9Blzv`+u^sv zZ-<{BtCe@6`0eo9;irF1kqIe%cN{ea6}jKXyNDhu;oAwm)r$AM2mC!%x4Sw!_c(inhaV zhu;oAea)J_W{oXS)9kr>1~p?dn*M1`|Fm|% z&k&||!0&+H0lx!&2mB8B9q`kKtuYg7`mwbGeh2&x_~~QR4*2P3)DHL^@YCO@9q?mB z(GK_>@G}miF(7IO{0{gX@MA{Q4)`7LJK%S~?|>hVp~g(89q>EgcfgNJP~#HRI0Cf; zeh2&x_#N=$tJ4nn>08&BsWeW8czP&a@ML ztYjLGn0CU?kg#U#OJgq4Sh+NQEsaY{(;utxRB8NDnjvG29ZF-|(b%Ch)+deiNn=LQ z7@jo7CXL}q<6_ddm^AJ!?Svo8lE$c{>DSd5l{7{r?S$V6zY~58OB%zH#;~L@ENKi& z8pD#N4_ISb(oXn!nM(6AmF8tC&DezIWh&hWKQB{hUZ&CvWourh(v9%*GL`0KD$UDO znwP0GFH>n=rqc8$YhI?(jqvkAmF9&i-3Wgp{EhH8!p{pj@bl7@ZiK%PeoQ%j@bf~JZiK%P{zmv4;ctY$5&lN_8H3T7d^9E>?SkJ0zYBhb?KNYR+66x^b7_X~ zHT~q;1-}b^hVwOsAkC1zcERt0-vz%5ei!^0g|rKP7yP^grg;fW^AebL!S90K1-}b^ z7yK^xu?1*e4%56Grg=F`V=~g1j5IHYX^ci1%Yepcr0J*En2j_qhiME)n(!2|V|>yWpR^l( z#uqehb&XqHGv=$^@bi+IcEj(6pO@9N8-6$ZSR%9=emDGX_}%ck;pb&Gjd@CAp3=O~ zrrq#kozT44rg^bVW1-Nz*rplB(3q>V8-5H{+6})OemDGX_}%ck;djIDhTj9f2YwIy z9{4@*d*Ej*MDuc-=H)nzNlSa+_rTBih{kQN8MoGqlW4}SHDe{(13xd%X%GA!_&xCR zBAsTOPxCUJ_Q3CfAETG%#X61ML-TT-#_***@O$8AJWzY!_rULg-vdA6gW3Z>V}#lR zzXyKC3bhA*5BwhZc|lKO9np**Y7hJv$TT(*?SbC|zXyH~{2urjU(_D>J@9+r$7-S( zZ`7CtHHI_IxTE&M?}Z=pnZ|~qd5b`M;rGJNn56c??}gtBKjZeAw+l3H7icg1UiiK6 zd*R2prWwD~UiiK6Gmfdf@MC7vUicZ))L!_#@O$C+!p|ED+6zB#DQL`#8b5%>_@?m$ zXgmSh3%?hBEHT;(zZZTl{9gFI@H1wm@eOEv1KJBe<5$`XKgKzYk3f6j_rmXmA5Vey z!O!ar+6TW6eqM9XKKL=%Y5WG-2S2YoXdnDoZ?q47jCa}xzYl)=2-*j~4}NSp+6TW6 zejofk_SI|EA@hoT`{8)Fi4}Kr~KKOm`<7Lo3__rZ^M zLi2it_QUUopVu@ruW4vM{C@a(T|;9o)PDFGYu0}F{qXzY_rvdppYdnyhu;ssAAYP}F|-8LQTQ`2Fzv;rGMuhadBz_QUUo-w(eZen0$v`2Fzv;m6~l zd2K}V+K9&Mq5beP?yddsGxn`9K59Swe)#?H`{DP)?}y(HzaM_a$F(1RKl}ms1Mmmn z55OOQKLCFKehid406+FB9e_Uoe*pdf`~mm_@CV=zz#o7=06+FC9e_UoKjWo30Dl1f z0Q>>?1Mmmn$B?N5@blV>=Cv2iYcHDDUieW;1V3ZE4x@P;M)Nw1=5-hy zfIk3#0R8~{0r&&(2jIu#sRQr_;19r$;ZtMy)B*VM#pnS1*u!)H{viCAgLM%8ApDH& zYK);8W2nYHri1VY;Sa(egg*#>5dI+iLHL942jLIGAA~;$KZa5rgdbC>4#FRVA9JY= z!XJbmPl67@AA~;$e-Qp4{P+}f5dI+iLHL942jLIGkC#UW;Sa(egg*#>5dI+iLHL94 z2jR!$tb_35chEujF|_I+{6Y9Jw(21KSmJaLeoU@91V2Vs9fCguKZaM0;Z=v=55XUT zAM>kbyjzFh55XUTKLmdWeynvGW30woq48E|46+)loesetfuZKA^5TEX^gfy1b+zr5d0zdL-2>-$AGIt@Q2{Xh^s^J z<9X5{_(SlA;19u%2TEi8(;@gV>go{un00j+{xJNQb~Uy@jZaF4;Sa-)UrLAJ$1|nF z@Z+1(Vfe%Fhv5&yABH~+e;EES{9*XR@Q2|K!ykq}41XB@F#KWo!|;dU55tcQQHS9V z!ykq}3_sp19fm&)e;9sjiaHE`82&K)cvo~7{xJMu_`~qy<0)GU44E8z#e+2#r{Fv=^1pWy8cx-e8etc&dL$r>-AAvstKjvtS4O3%~ z))Dye;^+wc5%?qUW17|x_#^N~;K!$3;gTSwuK!XJe{3V#&- zDEv|Q@r39o{89L$@JHc~!XJeni>b!kt)uWq;g7-}g&*&oj=~>>KMH>o{wVxW_@nSg z;g7-}g+B^^6#gjuQTU_q|KA+lNlu+@ux{JF?_DF2e!v(EHmNZ=NT*phX7u5Xa|&+* z9%)Id5@UY<+5eyY|Jnba{r}njpZ$)=#}WDbXa9fp|7X9uruonQ|Li}p|HS?i`%mmY zvH!&W6Z_o)&WZhQpytH>6Z=o>Ke7MB{uBF8>_4&J$?Tlie`5cM{U`RH*neXGiTx+` zpV;rVa8B$$vH!&W6Z=o>Ke7MB{uBF8>_4&p#C}Kb;}&sF?1wex#QqcePwYRj|HOWG zigRNBiTx+`pV)t5zw_NWvH!&W6Z=o>Ke7MBerLUNV*jcAr}m%Pe`^1!{caxT)c#ZZ zPwjXAIH&fX+V4_4;r%>Fa`-H* zpV@zA|C#-6Oy|u0GyBi%KePYL{xkc}?01hhXZD}j?=El7?01_tXZD}j4@bN@!I)e{}20r*zdk|e%SxR{vY=Lu>XhsKkWZu{}20r z*#E=+ANK#S|A+lQ?1!Jmec=4C-;M12u>XhsKkWZu{}20r*zb0Be%SxR{vY=Lu-`51 z{ILIr{Xgt?S35uK|6xCNf7tKlc7E9J?snYWj??t{VgC>N-Qdm-`+wN~ z!+!U;^TYlh_PfiSANHTye{TP|{pa?d+kbBVx&7z%pWA&+T_^Kj-$J+kbBVx&7z%pWA-R;d!`+wU1(|&kne%k-jekc9&)Bd0K|FqwI-~6=yr~N(cupZ5Q>|EK*w?f+^2Py5k<`Dy=8`+wU1)Bd0K z|Fr+7{Xgw@V>mzU|7kyFFhA}8X+PvMKkff%KkPH^5a*};7{dIt|EK*w?f+^2Py2t` ze_{WH{ZPZpT3;QqZ zzp($p{tNpr?7y)8!u|{UFYLdt|HA$Y`!DRju>ZpT3;QqZzp($p{!9BW?Z34D(*8^P zFYR~FJD2uf+J9;PrTv%oU)q0Z|E2wx_Pb%9OZ(ma4$m={_Fvk6Y5%4Dm-b)Ue`)`v z{q91?&HG&1@9us0kGZt}(*8^PFYUjy|I+?T`!DUkwExonOZzYFzqH?N>0H`>Y5%4D zm-b)Ue`!CsIG6Tc+J9;PrTv%oU)q0Z|E2wx_PbM_U-tj9-~Ijkvj3O;zwG~I|1bN0 z*$<}7FZX|FZv= z{Q%AUvY#WEU-rAXonQ9?zQL2 z{ww>h?7y=A%Kj_+uk63FAJ&;G`>*W3vj58dEBmkPzq0?z{ww>h?1y^h%Kj_+uk63F z|H}R=`>*W3vj58dEBmkPzp~%G_*~h4W&f4^SN8J}b7lXP{a5y1*?(m}Ff>>8yDc8K z#pAYk+!oK3{a5y1*?(m}*gAa0T-$$b|F!+s_Fvn7Z9jB0*Y;oAe{KJ@{nz$i+kb8U zwf)!jU)z6e|F!+s_Fvn7ZU43X*Y;oAe{KJ@{nz$i+kb8Uwf)!jU)z6ezq{vg_dM73 zU)z6eKkqT_py%5DYx}S5zqbF{ehy@=?FXag+Wu?%ukB|;=GuO@({pV}Q$g#{L`oZ|uLZ|Hl3s z`)};OvH!;Y8~bnUzp?+u{u}#m?7y-9#{L`oxstiD|Hl3s`)};OvH#ZoTl;VAzqS9? z{#*NR?Z37E*8W@jZ|&y{=GOjO`(fa@wg1-sTl;VAzqS9?{#*NR?Z37E*8W@jZ|!Fk z=GOjO`)}>PwVzoS-eq{LxwZe+{#*NR?Z37E*8W@jZ|%Re|JMFn`)}>Pwg1-sTl;VA z=N<-7o?H8G?Z37E*8W@jZ|%Re|JMFn`)}>Pwg1k3PGautzq9|&er96s?7y@B&i*_5 z@9gI&=FWZqdhYDMv;WTiJNxhKzq9|&{yY2c><2A}yO=xs@9e*`|IYq9`|s@Md*;sm zJNxhKzq9|&{yY2c?7y@B&VFuV?(Dy_|IYq9`|s=*RhT>bVeEmi=g$5+`|s?(v;WTi zJNxhKzq6nHm^=IL?7y@B&VIgV?(M&~|K9$4`|s_)xBuS$d;9P0zqkM1eimu&?dR&| z-u`?0@9k$t=idH%`|s_)xBuS$d;9P0hs5XJ{(JlH?PpNt-hQrW?(M&~pKqFb`|s_) zxBuS$d;9P0zqkM1{(JlH?Z3DG-u`?0@9n?0|K9$4`|s_)xBuS$d;9P0zqkM1{(JlH z?Z3DG-u?&snb&!+|H1wT`}x;-u>ZmS2m2rFf3W|-evWV+?B`?W!TtyPAMAgy|H1wT z`ycFou>ZmS2m2rFf3W|-{s;RX?0>NT!TtyPAMAgy|H1wT`ycFou>Zk+PG=tMf3W|- z{s;RX?0>NT!TtyPAMAgyAH1Ij`ycFou>ZmS2m2rFf3W|-{s;RX?0>NT!Tv}4AMNML z=F$E~`ycIpwExlmNBbY`=O*XT{zv;C?SHiY(f&vKAMJm%|Iz+O`ycIpw4YI%NBbY` zf3%-Bnn(K|?SHhNJ(@@R8Ma}N=F$E~`ycIpwExlmNBbY`f3*M6{zv;C?SHhNSsG!1 zd9+_#U>@y%wExlmNBhMFhK-v?`}w$GK8JCdNBbY`f3*Lv{eSHjC78eV|F!?G{Q?E^ z*Z#lu|F!?G{eSKMYyV&S|Jwi8e&%le+W*)7zxMOeBWf^z?f+~4U;F>sFLE${?H4+j zzxMyN|F8Xj?f+}P_`&?O|F8Xj?f+}P7{c&a!($DrI)Cl|YyV&S|Jwi8{=fGBwg0dE zf9?Nk|6lw6+W*)7zxMyNpL3nR_Oq_@*Z#lu|F!?ger9kO?|HKS$^Iw%pX`6K|H=L* z`=9K8vj55cC;Ol5f3lyI9d(9zvj53`m4Rf*TV$^Iw%pX`6K|H=L*`=9K8vj55cC;OEl=E?pi`=9K8 zvj55cC;Ol5f3p9{{wMqS$YFZtpZ#3#{Ima`{r~KjR~vS6{@MS}{(tuWv!A7$fA;^g zp8*~Qc>dY{&;Eb*|Fi#}{r~Le=jNaN|Lo`J=AZrl?Eh!~Kl}gL|Ihw^_W!g0pZ$#e z{Ima`{r~J|@8+NV|Lp%~|3CX#y!mH8dp!T_|7ZU{`~TVh&;Eb*|Fi#}{r~L$Xa7I@ z|Jnb~ensZ_XFsz%|LlLZ|Ji=NZ=UUcw*T4wXZxS+f42YGe!h90?SHoa+5TtypY4CP z|Ji=Nbe`>hw*T4wXZxS+f42YG{%8B2?SHoa+5TtypY4CP|JnX$`}x#)w*T4wXZxS+ zf42YG{%8B2?SHoa+5TtypY4CPpG%x)`=9N9w*T4wXZxS+f42YG{%8B2?SHoa*?x|3 zp6%yh=f!?qgL$$4#r_xjU+jOe|Hb|n`(NyTvH!*X7yDoAf3g3?{uldS?AJw@7yDoA zf3g3?{uldS?0>QU#eOz?UhIFd|Hb|n`(NyTvH!*X7yDoAf3aWLWM1rlvH!*X7yDoA zf3aUQXQU#r_xjU+fp%o)`OH?0>PJQ=eD+U+sUj z|JD9i`(N#Uwg1)rSNmV>f3^SB{#W~7?SHlZ)&5ueU+sUj|JD9i`(N#Uwg1)rSNmV> zf3^SB{#W~7?SHlZ)&5ueU+sUj|JD9i`_)|L)&5ueU+rg3=hgmK`(N#Uwg1)rSNmV> zf3^SBe%5|o?SHlZ)&5ueU+w4b=hgmK`(N#Uwg1)rH~Zi0f3yG1{x|#I?0>WW&Hgw0 z-|T<0|IPk4``_$;v;WQhH~Zi0f3yG1{x|#I?0>WW&Hgw0-|T<0|IPk4``_$;v;WQh zH~Zi0f3yG1{x|#I?0>WW&Hgw0-|T<0|IPk4``_$;v;WQhH~Zi0f3yG1{x|#I?0>WW z&Hgw0-|T<0|IPk4``_$;v;WQhcl+P%f4BeL{&)M|?SHrb-Trs`-|c_5|K0v~``_(< zxBuP#cl+P%f4BeL{&)M|?SHrb-Trs`-|c_5|K0v~``_(<>}RRx!~PHZm5k=Y z{tx>{=F|R9`#Cvj5BeFZ;jj*FTsq`@ih}vj59|-H!RP|I7X_`@ih>O@jHd|I2=*srj=1%l zzwH0AU)XNG?AOGZFZ;jj|FZwf{xAE#?EkX=%lzwH0A|I7X_`@ihh(wQ&&zwH0A z|I2=Do%yo=%lzwH0A|I2>OozdKxFZ;jj|FZwfe%+Dzw*T9HRjm28|J!~olKHm( z+x~C+zwOuSnQ!~Q?fn`@ik~wqM0-zU}|E z|J(j=`>>+x~C+zwQ6F|J(j=`@iky|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSm zhyNe`fB665|A+q{{(ty?pV0N}=l_TQAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{ z{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K z{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665 z|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&% z|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe` zfB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yK zAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSm zhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y z;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K; z`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{ z{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K z{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665 z|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&% z|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe` zfB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yK zAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSm zhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y z;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K; z`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{ z{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K z{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665 z|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&% z|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe` zfB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yK zAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSm zhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y z;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K; z`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{ z{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K z{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665 z|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&% z|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe` zfB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yK zAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSm zhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y z;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K; z`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{ z{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K z{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665 z|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&% z|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe` zfB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yK zAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSm zhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyRcNkN=PVkN=PVkN=PV zkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PV zkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PV zkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PV zkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PV zkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PV zkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PV zkN=PVkN=PVkN@v~?C1aE|KtDT|KtDrAN%?L`2YC-`2YC-`2YC-`2YC-`2YC-`2YC- z`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-{?~r~KmI@dKmI@dzyGzL|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|L?&51N#r`Kd}G6{sa3D>_4#o!2SdK59~j%|G@qO z`w#3tu>ZjR1N#r`Kd}G6{sa3D>_4#o!2SdK59~j%|G@qO`w#3tu%G{r|BwHV|BwHV z|BwHV|L?&51N#r`Kd_(wkN@w${sa3D>_4#o!2SdK59~j%|G@qO`w#3tu>ZjR1N#r` zKd}G6{sa5@|M>s-|M>s-|M>s-|M>q7?LV~t(EdaF`TzL;4(;dvJGB4M{zLl@?LV~t z(EdaF5A8p+|Iq$J`w#6uwExilL;DZyKeYeQ{zLl@?LV~t(EdaF5A8p+|Iq$J`w#6u zwExilL;DZyKeYeQ{zLl@?LV~t(EdaF5A8p+|Iq$J`w#6uwExilL;DZyKeGSG{v-R3 z>_4*q$o?bykL*9PpZ|~lkN@w;{v-R3>_4)f|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwIg*#2YtkL^FU|JeRx`;YBEw*T1v zWBd94`2UXWKeqqa{$u<3|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>qN?0>MI|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwIg(f&vK z`TzL;`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC- z`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-_zuW(A|GWL~ z_P^WzZvVUe@Akjj|8D=g{U7#!*#BYwhy5S+f7t(F|A+k__J7#_VgHByANGIP|6%`! z{U7#!*#BYwhy5S+f7t(F|A+k__J7#_VgHByANGIP|6%`!{U7#!*#BYwhy5S+f7t(F z|A+k__J7zf1CR_rANGIP|6%`!{W1W_03-vD3_vmf$p9n+kPJXF0LcI(1CR_rG62Z{ zBmxnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE z16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbH zRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowE zST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTx zfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE z16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE1K9tu zUkzZ@09FlP)c{rvVATLt4Pey(Rt;d)09FlP)c{rvVATLt4Pey(Rt;d)09FlP)c{rv zVATLt4Pey(Rt;d)09FlP)c{rvVATLt4PgJ*el>tq16VbHRRdTxfK>xnHGowEST%rE z16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbH zRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowE zST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTx zfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE z16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbH zRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowE zST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTx zfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE z16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbH zRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowE zST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTx zfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE z16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbH zRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowE zST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTx zfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE z16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbH zRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowE zST%rE16VbHRRh=q`_%we4Pey(Rt;d)09FlP)c{rvVATLt4Pey(Rt;d)09FlP)c{rv zVATLt4Pey(Rt;d)09FlP)c{rvVATLt4Pey(Rt;d)09FlP)d2R;el>tq16VbHRRdTx zfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE z16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%q>vj52bBm0l+R|8lzfK>xnHGn;`|H%F$`;Y8Dvj52bBm0l+KeGSG{v-R3 z>_4*q$o?bykL*9P|H%F$`;Y8Dvj52bBm0l+KeGSG{v-R3>_4*q$o?bykL*9P|H%F$ z`;Y8Dvj52bBm0l+KeGSG{v-R3>_4*q$o?bykL*9T|JeRx`;YBEw*T1vWBZToKeqqa z{$u-(?LW5v*#2YtkL^FU|JeRx`;YBEw*T1vWBZToKeqqa{$u-(?LW5v*#2YtkL_0j zST%rE16VbHRRdTxfK>xnHGowEST%q>w*T0EHGowEST%rE1K4BxkL^FU|JeRx`;YBE zw*T1vWBZToKeqqa{$u-(?LW5v*nTyDRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbH zRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowE zST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTx zfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE z16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbH zRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowE zST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRh@n zzlwXwapH^uFbcb1m^H9R7$7Eq5DbW7cRk!TC?PNkirCR7-6e1gG><{wuClE1L)}$> z-N&H;L<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G z0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLaw zq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V z0Yn3c1`rJ(8bCCFp8NbXfM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks118 z0MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT z(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G z0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLaw zq5(t$hz1Z1pcg(r4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCF zXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks118 z0MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT z(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G z0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLaw zq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V z0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?W zL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz z1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$ zhz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c z1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh z5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC? z4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1 zAR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ( z8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2 zKs1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4Immo zG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4 zfM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCF zXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks118 z0MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT z(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G z0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLaw zq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V z0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?W zL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz z1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$ zhz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c z1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh z5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC? z4ImmoG=O=gP$;~)hPQ$`kMNHm`yxCQtmB3Mui>-c(k^)H&quHPY}bv~zirrYnD3AJ z?QlPL-v7Gu@2@)_>^%O@Q?G~owqfUbq#rxiBR&3f_mr<2cTc(7@sxw=)*LK-1;;vA zNY23(X%2?(=U_5;4(8kDU=U=EZUN@#yf;Vx^m8a*#$o=tPg>?$g= zD-X;rPd~dP>+G_6v%A%r-Mz%@7LT(tB?ND6f9H7n^{wqXv}e~&JG*|O*)@>NNd_soX", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "1": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "2": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32000": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32001": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32002": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32003": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32004": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32005": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32006": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32007": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32008": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32009": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32010": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32011": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32012": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32013": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32014": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32015": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32016": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32017": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32018": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32019": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32020": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32021": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32022": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32023": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32024": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32025": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32026": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32027": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32028": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32029": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32030": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32031": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32032": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32033": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32034": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32035": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32036": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32037": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32038": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32039": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32040": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32041": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32042": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32043": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32044": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32045": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32046": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32047": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32048": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32049": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32050": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32051": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32052": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32053": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32054": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32055": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32056": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32057": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32058": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32059": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32060": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32061": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32062": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32063": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32064": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32065": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32066": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32067": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32068": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32069": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32070": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32071": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32072": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32073": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32074": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32075": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32076": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32077": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32078": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32079": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32080": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32081": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32082": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32083": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32084": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32085": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32086": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32087": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32088": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32089": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32090": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32091": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32092": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32093": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32094": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32095": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32096": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32097": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32098": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32099": { - "content": "", - "lstrip": true, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - } - }, - "additional_special_tokens": [ - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "" - ], - "clean_up_tokenization_spaces": true, - "eos_token": "", - "extra_ids": 100, - "legacy": true, - "model_max_length": 226, - "pad_token": "", - "sp_model_kwargs": {}, - "tokenizer_class": "T5Tokenizer", - "unk_token": "" -} diff --git a/diffsynth/tokenizer_configs/flux/tokenizer_1/merges.txt b/diffsynth/tokenizer_configs/flux/tokenizer_1/merges.txt deleted file mode 100644 index 76e821f..0000000 --- a/diffsynth/tokenizer_configs/flux/tokenizer_1/merges.txt +++ /dev/null @@ -1,48895 +0,0 @@ -#version: 0.2 -i n -t h -a n -r e -a r -e r -th e -in g -o u -o n -s t -o r -e n -o n -a l -a t -e r -i t -i n -t o -r o -i s -l e -i c -a t -an d -e d -o f -c h -o r -e s -i l -e l -s t -a c -o m -a m -l o -a n -a y -s h -r i -l i -t i -f or -n e -ð Ł -r a -h a -d e -o l -v e -s i -u r -a l -s e -' s -u n -d i -b e -l a -w h -o o -d ay -e n -m a -n o -l e -t o -ou r -i r -g h -w it -i t -y o -a s -s p -th is -t s -at i -yo u -wit h -a d -i s -a b -l y -w e -th e -t e -a s -a g -v i -p p -s u -h o -m y -. . -b u -c om -s e -er s -m e -m e -al l -c on -m o -k e -g e -ou t -en t -c o -f e -v er -a r -f ro -a u -p o -c e -gh t -ar e -s s -fro m -c h -t r -ou n -on e -b y -d o -t h -w or -er e -k e -p ro -f or -d s -b o -t a -w e -g o -h e -t er -in g -d e -b e -ati on -m or -a y -e x -il l -p e -k s -s c -l u -f u -q u -v er -ðŁ ĺ -j u -m u -at e -an d -v e -k ing -m ar -o p -h i -.. . -p re -a d -r u -th at -j o -o f -c e -ne w -a m -a p -g re -s s -d u -no w -y e -t ing -y our -it y -n i -c i -p ar -g u -f i -a f -p er -t er -u p -s o -g i -on s -g r -g e -b r -p l -' t -m i -in e -we e -b i -u s -sh o -ha ve -to day -a v -m an -en t -ac k -ur e -ou r -â Ģ -c u -l d -lo o -i m -ic e -s om -f in -re d -re n -oo d -w as -ti on -p i -i r -th er -t y -p h -ar d -e c -! ! -m on -mor e -w ill -t ra -c an -c ol -p u -t e -w n -m b -s o -it i -ju st -n ing -h ere -t u -p a -p r -bu t -wh at -al ly -f ir -m in -c a -an t -s a -t ed -e v -m ent -f a -ge t -am e -ab out -g ra -no t -ha pp -ay s -m an -h is -ti me -li ke -g h -ha s -th an -lo ve -ar t -st e -d ing -h e -c re -w s -w at -d er -it e -s er -ac e -ag e -en d -st r -a w -st or -r e -c ar -el l -al l -p s -f ri -p ho -p or -d o -a k -w i -f re -wh o -sh i -b oo -s on -el l -wh en -il l -ho w -gre at -w in -e l -b l -s si -al i -som e -ðŁ Ĵ -t on -d er -le s -p la -ï ¸ -e d -s ch -h u -on g -d on -k i -s h -an n -c or -. . -oun d -a z -in e -ar y -fu l -st u -ou ld -st i -g o -se e -ab le -ar s -l l -m is -b er -c k -w a -en ts -n o -si g -f e -fir st -e t -sp e -ac k -i f -ou s -' m -st er -a pp -an g -an ce -an s -g ood -b re -e ver -the y -t ic -com e -of f -b ack -as e -ing s -ol d -i ght -f o -h er -happ y -p ic -it s -v ing -u s -m at -h om -d y -e m -s k -y ing -the ir -le d -r y -u l -h ar -c k -t on -on al -h el -r ic -b ir -vi e -w ay -t ri -d a -p le -b ro -st o -oo l -ni ght -tr u -b a -re ad -re s -ye ar -f r -t or -al s -c oun -c la -t ure -v el -at ed -le c -en d -th ing -v o -ic i -be st -c an -wor k -la st -af ter -en ce -p ri -p e -e s -i l -âĢ ¦ -d re -y s -o ver -i es -ðŁ ij -com m -t w -in k -s un -c l -li fe -t t -a ch -l and -s y -t re -t al -p ol -s m -du c -s al -f t -' re -ch e -w ar -t ur -ati ons -ac h -m s -il e -p m -ou gh -at e -st ar -wee k -! !! -c lu -th ere -n er -t om -s el -ï¸ ı -wor ld -v es -c am -go t -in ter -of f -u m -ton ight -o ther -h ou -loo k -j e -i d -si on -be au -at t -el i -or t -re c -f f -st er -su pp -g en -be en -il y -te am -m m -i c -pe op -it t -at s -on ly -mb er -en g -b ri -m p -k now -b ur -b ar -in s -lo w -sh e -ro w -â Ŀ -t ro -peop le -vi a -lo w -ag a -be t -x t -f ac -ch ar -e ar -w al -s en -f am -b le -n ati -is h -n or -g ame -li ve -s co -le y -d on -ic k -b all -ver y -the se -p an -i a -at ing -c r -a re -g ir -ma ke -st re -sho w -. " -f l -u p -d r -than ks -il li -w om -st s -i g -s ur -ever y -c ur -vie w -le t -in to -mo st -n a -in di -g ar -ha d -s ou -v ed -an t -iti on -ma de -f ol -un i -it ed -ðŁ ı -ic al -th r -read y -ch ec -d ra -k es -boo k -e p -si c -mor ning -ne ws -c au -c t -w ell -an c -pho to -th an -or s -bir th -g g -ou t -ne xt -som e -en ing -stor y -ch ri -do wn -hom e -f fe -fre e -d a -b or -f il -ci al -than k -si de -le ar -qu e -l ine -t en -at es -ye ars -m y -pho to -beau ti -ri ght -n u -for m -shi p -b an -th er -d ays -g am -as on -g y -ðŁ İ -birth day -se t -ic k -e t -st ill -com ing -ta ke -ðŁ ĩ -b b -s ol -s on -d en -e p -mu sic -the m -de n -wh y -f oo -c ra -am az -w n -h ol -t ting -w r -u e -ma g -c ro -l an -c lo -b ra -a k -s ing -c al -re ad -' ve -jo h -b ab -d ri -b lo -bi g -er ic -in t -t or -tr y -l a -le g -hou se -m ic -v al -beauti ful -l itt -chec k -ne w -ver s -s w -ar i -pla y -h er -âĢ ĵ -w in -m a -con gr -sch ool -f un -. @ -he al -ic h -d el -wh ere -l on -ke t -tw o -mu ch -wat ch -v en -d ed -a st -k ed -b as -go ing -m p -e ver -w ays -ro o -de sig -l y -s ed -to p -l in -ch an -to o -it ing -d ent -gh ts -t y -sp o -ne ed -b lu -in st -be ing -âĿ ¤ -w el -l s -hi m -m ay -st ing -n a -el y -litt le -g a -n at -tom or -m c -h on -w ant -a ir -pi c -am eric -p er -le ss -wee k -ve l -a h -c ap -ch am -g er -ti m -tomor row -ne ss -st ate -h al -ser v -z e -o s -p at -v is -ex c -s in -f f -c ity -c en -an y -b el -su mm -t in -w ould -loo king -k o -ce le -fam ily -m er -po w -hel p -bu s -c o -c le -sel f -en s -ic s -th o -an i -ch o -le ad -b s -t wee -th ink -for e -ch il -vi de -di d -al e -ch i -v il -en ds -w ing -p as -' ll -v ol -s a -g s -man y -j ec -be fore -gra ph -n y -ur ing -w il -d d -bu il -f av -st ed -tr an -l ing -ou d -d ge -fi el -nati onal -st a -c er -w ere -in a -se ason -c ou -n ed -amaz ing -ti ons -cele br -n s -a th -he ad -s day -d ar -lo c -v in -an other -g oo -s at -n y -jo in -pre s -s es -s ing -an a -in ing -.. .. -c our -ï¸ ı -ac t -cau se -li ght -am s -t a -b al -f c -hi gh -off ici -t t -chri st -d ic -d ay -ra l -h or -: ) -vi si -n am -o b -ma s -gh t -re ally -t un -fin d -thr ough -por t -u t -ti ve -st y -n e -or e -ðŁĺ Ĥ -supp ort -ne ver -ev en -ðŁ Ķ -h a -y a -l d -u k -r an -j am -wi th -me di -d es -ne y -ch ing -al e -h y -k in -! ! -d y -pl ace -al so -b le -wh ich -bl ack -b li -s ay -par k -pl ay -ir e -vide o -week end -a il -ke y -p t -w ard -fri day -d in -ine ss -g ro -b en -al ways -t ball -ag o -m il -c y -pro duc -di sc -un der -ple ase -sp or -fu ll -e y -ðŁ Ļ -is e -iti es -c at -k no -u se -fo re -k er -ar t -hi gh -op en -s an -e f -our s -sh ed -st ri -d ro -aga in -i m -ðŁ ĵ -en jo -fu n -ge tting -p en -g er -c li -an y -ever y -e u -wom en -â ľ -e st -c ould -r y -" @ -th ou -sh a -comm un -b er -d ents -di s -wh ile -aw ay -di o -h am -g la -d ate -k a -mis s -un ch -w on -in f -roo m -g a -re al -ex per -di rec -sh ould -sp r -g ol -l ong -bet ter -or i -e y -i ence -il s -z z -h an -f ound -v s -â Ļ -po st -ti c -par t -m en -ren ce -ce ss -v ic -s il -sho p -ðŁĺ Ĥ -f ood -v al -sti c -y ou -s ays -e lec -st ar -o c -l and -i d -c tion -fiel d -s of -st art -wat er -fri ends -on es -ðŁ Į -f la -f ar -wh ite -par ty -in st -gr ou -t v -every one -m ent -j a -ch a -pr in -an ts -d uring -l at -l ar -we st -th en -k a -y oun -in sp -in te -we en -visi t -aga inst -re le -he ad -c es -to wn -loo ks -th re -re gi -ren t -pro jec -gir l -se ar -w o -m om -c ar -h un -pu bli -d i -p le -c all -c ri -u m -for d -per fe -fri end -h ard -ssi on -te st -pla ying -ar ound -be cause -ke ts -me et -sat ur -ar ti -wor k -j un -v en -r un -me mber -por t -su per -t wit -s am -el s -t ly -ad v -ati ve -at h -s ure -av ail -la r -s qu -ar ds -ev ent -m en -l l -o ver -lo gy -it al -tim es -m al -b ack -c oo -ma king -st ru -â ģ -it u -sh ar -g an -c as -s n -summ er -pic ture -f an -h in -christ mas -c y -pr oud -cham pi -desig n -pp ing -ho pe -c a -avail able -ma y -we d -photo graph -spe cial -sal e -sto p -er y -a we -al ity -hi story -am a -pre si -b ru -wor king -d one -d r -k en -fe at -w ood -ate st -sun day -mo vi -vel y -s le -f ace -sp ec -stu dents -b y -ha m -sp on -bus iness -d at -i e -i p -so ci -g lo -h and -re cor -r s -me e -ke ep -p ur -heal th -sh e -com ple -go d -da vi -col lec -li st -r a -clu b -t ers -in clu -th ings -pl an -â ĺ -joh n -sh ing -at ul -so on -blu e -g or -satur day -w on -congr atul -se e -âĿ¤ ï¸ı -tho se -ðŁĺ į -fin al -d ou -it h -o wn -ro ad -t our -a st -indi a -ti l -n d -f er -fav or -su l -lear n -fir e -ju st -grou p -a h -r ac -bo dy -u r -c are -à ¸ -p lo -o h -po s -gi ve -te ch -su b -c ent -er ing -y m -il ity -f ic -lon don -v ir -gu ys -b a -ðŁ ¤ -bab y -sc re -ðŁĺ į -tru mp -un der -chan ge -i an -col le -ss es -l er -ss ed -n ice -ann oun -pow er -s ar -a king -min i -s li -s wee -k ar -fu l -c ru -ac tion -a ther -) . -st and -de vel -a a -g an -le ft -lo l -re l -tran s -m ents -in t -e f -man ag -di g -gen er -do wn -p au -ti v -k u -th ur -k en -st on -f ans -tal k -twee t -t oo -sty le -pro te -se con -fr on -awe some -g l -p al -ne t -s or -la u -g on -sin ce -t ty -ser ies -me mor -b eli -fil m -di d -di es -o t -congratul ations -p ra -e ve -w oo -offici al -su c -in cre -b on -par t -pp ed -cla ss -si ve -bo y -cu l -perfe ct -t ou -d am -wel come -foo tball -h i -p ap -wa it -ad a -congr ats -youn g -exc ited -re ce -j an -v a -re d -st ra -medi a -' d -do es -le t -mu l -ill s -gre en -m el -to ge -fu ture -ye ster -vers ity -for m -ta in -i de -ch es -ki ds -qu i -ha ha -de ta -bi g -favor ite -gir ls -con tin -do m -sear ch -u al -a ir -d ers -mon th -c er -yester day -commun ity -ad e -do g -vil le -ic es -d eli -sy ste -ru n -is m -he art -c up -en ti -fe w -presi dent -e ds -un til -fe sti -o k -f lo -sa id -ol e -me d -tra vel - £ -ph one -toge ther -fa st -lo t -gam es -sh ir -bet ween -y es -th ers -do ing -m ac -at or -b and -fol low -projec t -devel op -di ffe -con fe -spe ci -ca st -y s -bo ard -r d -i al -sh oo -r am -ha ving -sh are -fol low -on e -n ame -m r -pu t -disc u -or y -c ame -ou s -s ite -twit ter -t b -t it -fin ally -z ed -su per -com pan -us ing -all s -li st -r is -sho t -g al -t ar -de l -joh n -âĢ Ķ -some thing -ra m -inte re -wh e -b it -ðŁ į -stre et -oun d -a i -tic kets -movi e -re al -k y -ta king -o pp -c c -l am -m oun -in ve -bl ack -us ed -on line -y or -loc al -gu e -c ks -o w -ge st -bo ys -illi on -con t -re ci -in ed -eu ro -no w -se en -p h -te ach -de f -sou th -su ch -aw ard -mu st -is su -ca re -fe el -p lu -l atest -spor ts -we b -te x -e ment -s k -fi c -w an -te ch -o t -bo x -n er -fre e -t al -a sh -c ase -ho t -won der -mee ting -er a -ch all -ðŁ IJ -jo b -il i -c ool -j our -th s -m o -f el -di e -mic ha -e le -te am -serv ice -st and -ma kes -p ing -ear ly -com es -e k -ho li -v ers -ag ue -s au -thre e -mon day -fa shi -some one -th ro -se a -b ad -supp or -tur n -ur y -m ing -photograph y -n ic -mar k -pre tty -ss ing -wat ching -me mb -ar ri -coun ty -be ach -fr an -cen ter -pol ice -b at -publi c -t an -pre ss -s af -s y -ge ts -ro y -n ers -y our -bu y -st ers -sho w -as ed -chil dre -af ric -in es -sp ace -sc ri -h all -pa in -ar ing -hom e -m ur -heal th -ch ed -s and -rece i -gu y -e a -americ an -re si -childre n -- - -i ri -ing ton -coun try -ro ss -le n -ann a -boo ks -b c -e ce -d om -lo vely -k h -pe t -g y -g ri -st age -off ice -ro ck -m on -b ay -t able -su n -m ed -th in -l or -f low -( @ -uni versity -stor e -fron t -goo d -z a -vo te -nor th -he y -an im -or der -mi d -with out -a de -re member -mar ket -? ? -mu s -tra ining -e duc -bu t -co ver -st an -sc en -b la -bre ak -l ou -s ame -g old -a in -o s -bo th -l it -ver n -a i -al bu -p a -enjo y -be g -ell ing -thur sday -inf o -s an -americ a -ha ir -te l -mar ch -con cer -colle ge -confe rence -ap p -h our -ch ang -â ļ -s our -ol s -we ather -w ar -p hi -festi val -secon d -cu te -pr ac -en er -str y -le a -pol it -s av -se n -o w -m i -ne ar -ou ght -z e -co ffe -w illi -d an -se y -davi d -e se -f an -de ci -the at -no v -ati on -tr ac -sc i -re view -c el -e m -u n -ju ly -or ig -ti on -d ru -form er -st ay -af ter -in v -too k -dat a -b al -tu es -d an -ev ening -ðŁĺĤ ðŁĺĤ -d ol -u res -pro vi -t s -e st -sig n -j ac -u k -s ong -ye t -bo w -in du -j ap -h oo -po int -any one -z y -i st -h ur -it al -buil ding -wom an -ch ur -j er -per for -co ach -le ague -ce ss -ne t -i mag -nati on -br it -qu e -aw ards -ag es -wor ks -c ed -man ce -l ate -ig n -mon ey -tru e -i i -t ell -pl ac -p ac -as y -wor ld -be hin -im port -read ing -gra m -gi ving -me t -h it -for ward -st om -pres ent -jun e -so cial -no on -mar t -hal f -s we -go vern -k er -deta ils -li sh -_ _ -ac y -si a -ber t -f all -! !!! -) , -th i -d iti -sp ort -k ing -f it -st af -c at -mu se -cen tr -y er -con tro -b loo -wal k -ac tu -did n -li m -lear ning -re search -wed ne -au th -h ours -k y -f ar -h en -.. .. -it ch -ri l -str ong -sk y -que sti -jam es -r on -d g -f ur -c in -do es -app ro -mar ke -tu res -ful ly -ch at -behin d -te m -fin i -mis sion -b att -fe el -he av -every thing -b ar -w ish -pre mi -i ma -exper ience -e ach -re port -swee t -tic s -spr ing -re spon -syste m -vic tor -l in -sa w -al ready -gh ter -f le -ã ĥ -br ing -albu m -- - -ell s -st an -to m -inter national -w ent -an ni -mat ch -pp er -st one -sm all -ra in -fashi on -are a -v an -ag ram -k o -thou ght -wor th -v an -m er -coffe e -it es -g n -arti st -c on -ar ch -c ir -se cre -gr ound -is o -h and -co m -bri dge -h s -x i -l ink -pu l -sp l -r ace -f li -ri ver -g as -di sco -d al -play er -f it -photo s -it y -o k -j or -tr a -ap ril -ad s -a di -sol u -beau ty -do or -me ss -up date -ali a -sch o -en ed -mom ent -sco t -sc ience -i or -ti es -ac ross -ous ly -sh es -does n -p age -wat er -m illion -cla ssi -l ic -ca st -form ation -micha el -ell o -s mo -in ts -vi sion -op ening -ld n -au str -tues day -win ner -po ssi -r ound -shir t -di t -b o -u es -il led -al ong -tri p -star ting -im pro -k an -per son -no t -re co -ne eds -c le -li e -re st -r ing -win ter -si mp -mo m -be er -fac e -tor s -us a -collec tion -ge or -se ssion -tr ying -la s -la ke -j en -orig in -stu dent -se cur -v in -pic s -ex pe -com p -gon na -e qu -b ad -le y -a u -memb ers -bre ak -w all -gi c -din ner -bu l -insp ir -r i -min d -ic a -win ning -tal king -t ren -s is -t en -wonder ful -s now -he ar -th om -no thing -gu i -st in -blo g -fe st -b un -le e -war ds -ch ance -dre ss -re n -pau l -p es -tech no -ru ssi -c ard -e ast -mar i -w ine -t i -la w -str ic -k i -ap e -au gu -pro fe -as h -cour se -ma il -ren tly -d un -m un -lo ve -is land -dri ve -s l -end ed -ma in -lo st -nat ure -âĿ¤ ï¸ı -ch ic -re por -p in -pr o -st ation -ce p -ta kes -compan y -go es -on d -ma ch -ra dio -d ad -ro ck -j a -p ay -champi on -e e -in de -tt a -ati c -t ab -beli eve -ener gy -z i -t at -wor d -on ce -re sul -y l -and re -an o -inst agram -clo se -t am -cu stom -w a -con om -sho ws -li fe -k in -ro b -t age -n ation -al most -list en -sa ve -re li -ac e -mar y -tre e -for get -j ack -wa iting -direc tor -h ill -bor n -te mp -f l -st e -on a -sing le -wedne sday -un ited -in o -@ _ -ne l -celebr ate -en ding -de al -j i -can ada -hu ge -tr ack -âĢ ¢ -f y -fan ta -an g -yor k -rele ase -p un -ep iso -wor ds -t our -p ack -i gh -classi c -perfor mance -ke t -after noon -recor d -win s -pro ble -âĿ ¤ -f our -b ed -ban k -d ance -s la -cal led -mi ght -a p -pa st -ðŁ ļ -diffe rent -it e -gi ft -ssi ve -chur ch -c us -pro gram -ho tel -ic e -ma d -secur ity -en ge -d c -en ough -st a -e ty -de ad -g un -he ar -m ir -hu man -gre ss -oun ds -pi ece -bre aking -gar den -fi ght -vie ws -f ish -star ted -run ning -gre en -ser i -s m -as k -d or -de ath -e conom -er i -ir d -s er -l unch -âģ ¦ -bo x -nat u -ba se -b an -f al -glo bal -wil d -wo w -out side -mo ve -le ad -an al -muse um -on g -ha w -pow er -than k -b ac -char ac -cam pa -dig ital -r o -op er -de v -w ol -p ati -f a -m ale -pap er -ill ing -c s -â ĥ -educ ation -ta ken -e ffe -m ou -s ad -" . -bas ed -staf f -inclu ding -li ving -a c -ch ina -mo b -stor m -lu ck -ph il -o o -y n -tra vel -k el -ti al -pr ice -boo k -import ant -bi o -p ool -ny c -f ab -lo ad -? ! -chall enge -cr y -ser ve -we ar -bu s -ta in -nu mber -ro r -k at -i z -th ough -ho sp -m m -fa ir -ut es -ho t -po p -fi ed -cam p -develop ment -li br -c ali -em s -âģ¦ @ -b ol -is ed -stand ing -mo del -it a -g le -bro wn -ima ge -ve red -for ce -o il -par tic -sh u -da ily -la w -se c -cla ss -cam p -holi day -cl in -k ers -pres ent -gam e -incre di -er ship -inter view -b ill -du e -and y -ab o -in nov -ke y -ac ade -p il -mo der -st ars -br and -f er -wee ks -con si -pr e -sa fe -wr it -di um -la unch -marke ting -ann ual -as si -cour t -la dy -c ted -and a -in side -chil d -opp or -sm ith -centr e -gu e -âģ © -f ren -st y -for t -ent ly -is n -ke ep -to ber -on y -bo y -al d -col la -de mo -le vel -com pet -ad o -b our -fanta stic -m ate -s u -sou th -oppor tun -vers ary -lat er -bu d -face book -la un -ster n -p it -! " -ma j -gr am -tb t -fi re -happ y -a ks -wh ole -actu ally -ill er -ell a -lo ts -al ex -an ge -lan ds -ðŁĺ Ń -en ter -r ou -episo de -p ed -in ten -sh ire -wh o -pl an -h o -ca ke -we st -mag az -fre sh -c c -n ar -ch ris -wr iting -w er -n om -l o -mi dd -dre am -o l -ti onal -de b -> > -be come -s i -gr and -all ing -hi stor -ri de -i red -saf e -que en -ci l -in tro -vi l -d ani -.. . -ar tic -st at -sh ort -or ing -sel fi -mis si -do c -b it -g all -b om -i re -se lec -d ition -ðŁĶ ¥ -fri end -be at -gh ting -ðŁĺ Ĭ -pe ace -ex hi -ant a -ab ility -il lu -j on -qu ality -tri bu -m es -play ers -fa ir -cu t -c ab -suc cess -b i -su s -pro mo -sch e -an ge -ic o -comm it -cat ch -ill a -kin d -feel ing -qu o -s ay -anni versary -spo t -mo ther -an e -p end -your self -op s -app le -min utes -p o -gr and -ri es -ha ha -care er -ed ition -de c -ric k -am i -concer t -iti ve -ge ous -d ly -t te -adv ent -i g -li ghts -ak er -sk y -âĥ £ -r ay -fini shed -w ay -s d -ac coun -ðŁĴ ķ -ck y -ch el -lit er -pain ting -lo s -st un -techno logy -n as -ma r -b il -afric a -ki e -ey es -gol f -plu s -ni a -it ec -serv ices -wed ding -kno wn -te le -.. ... -star ts -pa ren -w ants -ati onal -mon ths -win do -fav our -er t -magaz ine -ex clu -re ve -b c -origin al -e ss -n al -an ti -st ro -t ice -stu dy -à ¤ -v ac -nation al -fi ve -ra in -ve ment -u te -ver se -em er -ar my -possi ble -gue ss -val ley -ther n -cro w -m r -col or -on to -pic k -cle ar -dar k -t ac -wan ted -it ting -can cer -govern ment -di e -ri se -z ing -col d -f oun -stu dio -str ation -bro ther -a head -sh el -mic ro -ic ally -d au -sig ned -vi ol -a x -as se -i o -w re -spl ay -ch ick -augu st -pl at -ti ps -sp i -hu man -e asy -lo gi -mi ke -gro w -ag re -w w -sh ad -mo tiv -wi de -tur ns -om g -v ar -de fin -su g -j im -ðŁĶ ¥ -t d -campa ign -nam ed -re tweet -co p -t v -le av -k is -dou ble -s mar -issu e -vil la -in formation -li es -sto ck -n t -di stric -sh or -mi x -er o -se p -me x -see ing -li ve -re min -co de -g ur -s c -wil d -l un -h ood -spo t -fa ther -fore ver -up d -tra f -f ly -ne ed -gra du -tra in -ma ke -s ab -be y -si ze -lead er -tal ks -e u -lo g -fo x -gor geous -le ss -le ts -sur pri -my self -no te -li ves -f ru -lo ved -se ver -de m -j i -so c -h old -do gs -n i -â ŀ -lea ve -air port -ben ef -ex pl -shi ps -comple te -ach i -gre at -vin tage -j ack -ro c -woo d -pri v -off er -ey e -ver sion -te a -co ach -off ic -w ell -g en -s at -h h -you th -o x -? " -m t -mi x -g g -d le -natu ral -buil d -break fast -thin king -theat re -mo on -ber g -go als -geor ge -en e -exc ell -il ing -tun e -y ed -g ate -m it -net work -jo e -h ello -f b -tu be -we aring -ath le -stru c -har d -gla ss -g ers -thro w -g es -b t -indu stry -manag ement -ali st -go al -stre am -y el -a vi -ici ous -o thers -s ki -chri sti -bir d -e sc -m in -tr o -l t -j an -im p -ri ghts -sh a -or gan -cent ral -ar a -ro ll -favour ite -che ster -el se -p ay -car s -m ine -ste p -prac tice -maj or -h ang -ðŁĺ ĺ -n on -v ari -eng ine -vol un -di a -i led -arch itec -p ink -d s -th y -wa sh -web site -ba g -contro l -el li -f ra -an sw -d ence -y u -r on -ol a -g in -dr in -li c -cou ple -sp ar -g on -cre ate -c t -celebr ating -de ep -e at -te e -vo ice -dro p -vis it -at ors -sta dium -f t -w is -ro l -gra de -fam il -po ints -re pre -w as -traf fic -jap an -or g -hon or -tex as -man u -âĻ ¥ -safe ty -re r -b ag -em plo -rele ased -re gu -ak a -n av -ro le -sen ior -spec t -cro ss -lin es -be st -p ack -s in -ti e -mis sing -sun set -li ber -is ing -j ay -sk i -champion ship -ac tiv -la dies -play ed -y y -pu bl -al o -pri de -s r -pa ki -lu x -sur vi -ck ed -e ts -cho col -austr alia -par is -mi les -h at -ment al -al a -me an -mob ile -en a -in si -f ound -chi ef -t ag -incredi ble -re turn -à © -goo gle -fren ch -cre w -hal lo -ali an -j az -ch er -sil ver -nor th -eng lish -base ball -c af -lim ited -follow ing -app reci -ear th -k ir -ve mber -w ed -p tion -g ed -oc tober -fl ori -c r -en cy -ga ve -lor d -stu ff -ber ry -po st -sm ile -bro ad -st ate -gg er -me ans -ic y -gu n -y o -ma ster -bur g -han ds -ni e -/ / -uni on -brit ish -big gest -distric t -am ing -h il -o ce -per son -pas s -en vir -scho ols -arri ved -anc es -insp ired -ex pla -be n -libr ary -bo tt -am p -ste ph -cont act -b ang -m s -cali for -t old -batt le -b b -chic ago -âľ ¨ -str ate -sh i -de ce -- ) -ad d -la b -j ones -leg end -cast le -ing er -st ance -be l -ur a -re fu -lead ers -po t -se x -h ic -artic le -ki d -fr ance -x x -ex e -gui de -volun te -pr int -al i -ce o -twee ts -w x -scen e -vol u -ant i -h an -as soci -shar ing -ro se -mini ster -sh er -in ste -cle an -demo cr -po ster -sk in -p sy -pro per -cra zy -i am -o re -in i -any thing -po d -mo ving -cl ick -ex plo -com b -cra ft -f i -bloo d -is ra -publ ic -d ent -ol ym -eng land -a si -ch er -fac t -envir on -har ry -g one -me dic -enjo ying -just ice -j r -indi an -wi fe -s ound -t es -dra wing -p al -ide a -cr it -ju li -il er -war m -cl ar -thou ghts -def en -coun cil -intro duc -di ed -jan u -an i -s end -li er -m l -intere sting -tra de -win d -b ay -s ac -anc y -sour ce -b es -org ani -ar ly -lar ge -ff ici -ta g -u t -de sp -o es -tit le -sy m -pic tures -op en -wom en -sho wing -ri a -le ast -lead ership -cur rent -elec tr -val ent -list ening -c key -gener al -de ser -du ce -; ) -c ent -ðŁĺį ðŁĺį -sco tt -po or -selfi e -ev ents -i on -wr ong -de v -h ill -sep te -cul ture -l ine -sor ry -s ent -si ster -ce pt -k ri -no vember -ar i -announ ce -z ation -br an -g ent -d u -l en -per s -f m -mart in -o p -e mb -om e -midd le -suc cess -pe ter -janu ary -f lu -rac ing -d av -bi ke -ðŁı » -pe t -shoo t -profe ssi -feat uring -septe mber -now playing -sta ur -z a -on ic -qu ick -bas ke -spe aking -mil it -z er -chick en -b ell -s ad -co ast -lo ving -y ers -d j -pan el -ver age -s wit -ic ks -b ou -califor nia -s am -paren ts -er o -k illed -ph ys -jo bs -mi gr -an th -e mo -hallo ween -and er -c m -compet ition -e ag -s ket -sp ir -may be -exclu sive -app e -jour ney -scre en -for d -i o -h ate -u g -sou l -her o -soci ety -sy n -gu it -n h -d j -as es -im pre -ti me -sal es -d d -f ts -summ it -stun ning -om s -tur ned -cle an -sof t -be at -re staur -de red -en ces -ma gic -di o -sh ine -gu est -health y -exhi b -stor ies -po pu -n is -el a -bel ow -fun ny -resul ts -s ne -cur rently -ar d -down load -f light -m al -f ine -p ad -ch u -ent ed -h at -ðŁij ı -ste ve -j o -mar k -r at -b all -p c -p on -b by -o li -ar ts -as ure -bow l -att ack -mi c -de ar -ran ge -en ter -chocol ate -br illi -ac cess -, " -? ?? -ch ap -con st -t n -mat ter -blu e -gall ery -em p -work shop -lead ing -y ours -baske tball -w anna -th u -_ _ -mar ri -sle ep -bi a -ch e -ma d -imp act -o wn -si r -chan nel -euro pe -e sp -k itch -hosp ital -w ra -roy al -f s -ne u -qu ar -ne y -ac ks -ch ase -pp y -st al -at ely -ti m -dece mber -r are -per form -cre am -we ight -ch oo -ni ght -ha ven -fr anc -kh an -buil t -hel ping -tru st -ty pe -gol den -ta x -s now -s wi -di sa -questi ons -ve y -li ght -c n -cl oud -thom as -ag ed -sh ou -te ams -gr an -re ason -a a -you tube -v p -pi zz -manag er -bur y -cre dit -tre at -ma x -i k -ma in -g ing -de ad -pro bab -ye ah -ã Ĥ -br and -so li -pl ant -ta yl -gir l -ðŁĺ Ń -nam ent -au to -mess age -ko re -n ur -ter r -ag u -ma p -sen ting -lo ves -gi ves -g ab -z en -ro bert -con fir -w ars -o m -sta in -cam era -and er -won der -a b -ca p -s old -su it -wal king -contin ue -effe c -dau ghter -d anc -cha in -mul ti -ki d -y an -champi on -v o -ta ins -ho st -min i -mis sed -re sc -ly n -fin ish -del icious -s as -tayl or -i b -pro mis -produc ts -moun tain -flori da -regi ster -tre at -rec ent -fe male -boo th -mat t -ve hic -s op -mo tor -suppor ting -phi c -ex tre -dr ink -lan e -th ird -p s -con stru -ce re -far m -ðŁİ ī -tu red -ðŁij ī -c ats -a j -gi e -shoo ting -as ked -paki stan -am e -m b -g il -leg al -squ are -in vol -dra w -oo oo -!! !! -opportun ity -p y -e i -b ts -teach er -charac ter -john son -br on -ly wood -ch ine -c ing -c ine -d ge -gam ing -russi a -ci a -quo te -ric h -go v -flow ers -sp iri -st in -grow th -ðŁı ¼ -comm er -j uni -mu m -r an -s na -a ren -c b -ac tor -col or -si t -pa ir -ch i -bo w -acade my -hel d -r ang -me tal -y l -ac tive -probab ly -t ch -need ed -spe e -cho ice -ital y -ry an -ðŁĩ º -flow er -v it -m n -found ation -b ak -si ons -ne igh -f loo -he ard -re mo -fre sh -ing ing -re f -to wn -cl ou -je sus -spiri t -cou ldn -z es -ðŁĴ Ļ -willi ams -pro ce -moder n -pro cess -sho es -cre ated -tri c -issu es -ann e -att en -de but -h r -n it -sti g -a po -e ps -z u -ã Ģ -si x -car ds -lan gu -fam ous -tour nament -se l -e bay -y n -st on -k ick -announ ced -k am -vo c -brilli ant -hou se -che ese -war ri -mus ic -ho ckey -ðŁĺĤ ðŁĺĤ -sk ills -au tom -smar t -med ical -mon y -e x -gu ar -gi ve -pers onal -ven tion -al li -pre ss -flo or -m c -victor y -hi m -simp le -th or -ðŁĩº ðŁĩ -ta il -lu cky -ale x -qu ite -bo t -ssi ons -chall eng -c ann -amaz on -h ell -b ought -) : -ed y -secre t -produc tion -inde pend -de fe -ad ded -p r -p ag -be d -gre atest -with in -j ay -ðŁ ¥ -ire land -re ly -s d -te xt -dri ving -pro gram -spe ed -col um -str on -à © -fore st -â ĸ -mach ine -co in -sc ar -oun t -bi e -¡ ï¸ı -por tra -comm on -wre st -recei ved -kno w -inve st -pl ans -ac cor -ad op -ter y -re ali -p p -k al -art work -me an -go d -inste ad -an ci -motiv ation -as ing -inspir ation -up coming -polit ical -euro pe -m ers -heav y -ðŁij į -fe bru -scot land -ou gh -b t -bo ss -sche du -spe ak -n ick -u red -in o -e k -ri sk -tor y -pres ents -b on -ru g -st ates -exhib ition -il o -m ill -br ought -: -) -tou ri -com e -offici ally -champi ons -do ors -re p -po se -ex tra -k ings -soc cer -squ ad -app lic -at a -some times -t ari -excell ent -ðŁĺ ĺ -stra ight -car ol -ri p -âĢ į -gra phic -m ol -elec tion -febru ary -as ons -l i -di r -m t -n ick -u su -m rs -com ics -inst itu -cor por -v i -ðŁĻ ı -tu ral -di se -ac ci -we are -am ong -sho pping -t ill -wh at -cha ir -sp an -chine se -innov ation -jo y -k it -cent ury -ob ama -ph ili -f c -re ach -c iti -ul ous -n on -d ang -happ ening -bur n -p el -or ange -d v -k ick -cla im -ing ham -ph y -no v -pod cast -wh i -ni ghts -ear lier -be ar -la h -exc iting -or a -gi ven -s lo -memor ies -contin ues -produc t -gh o -c d -kno ws -ðŁİ ī -publi shed -discu ss -y ard -i phone -tri es -w all -fe b -are n -tru th -win ners -tu re -diti onal -milit ary -proble m -m and -do g -lo ss -c ric -can adi -ve ter -villa ge -" , -y r -un g -don ald -ag ing -bir ds -sci enti -le s -th is -regi on -tic al -itt en -il a -ðŁĺ İ -d ad -di am -abo ve -st ren -li t -p ir -la b -fo cus -bus y -d ur -app ly -s ma -auth or -ac i -exe cu -dom in -re la -jack son -at o -wash ington -ðŁĻ Į -k ill -popu lar -ce ment -ro ad -e ating -loc ation -v ent -ar re -n an -cu sto -advent ure -or din -spor t -ul t -lo ck -questi on -dri ver -land sc -on i -k ins -p d -jor dan -te red -k k -a f -chil d -s p -just in -en i -s elling -z o -wh it -bo ston -partic ip -sig ning -happ ened -he at -m am -dre ams -lo ws -gra ph -the day -head ing -br o -ble ssed -vi c -ve gas -h d -in ning -ro man -and ro -den ti -u se -c it -pro gress -writ er -bo b -ff s -gro wing -b ly -aw are -ex am -sp ent -be t -sc ore -bey ond -do cu -ad el -s f -cou ra -colla bor -in c -priv ate -bo at -* * -z one -p ha -b ill -to tal -plan ning -to wards -plac es -pre view -cre ative -dam n -ide as -se ems -po ten -say ing -di splay -s w -a qu -lou is -by e -li l -e mail -we stern -ger many -ell er -re s -f ant -ment ary -de als -ric hard -jer sey -stren g -ra d -pizz a -mon d -w are -l ac -g i -ar chi -c d -yel low -rec ently -re ach -à ¹ -kitch en -desig ned -tr y -g al -restaur ant -at ure -w w -j as -l ma -ðŁij Į -pa in -av o -min ute -sch ol -ther ap -tic ket -d ry -jap an -diti ons -ter ri -sel ves -happ en -t up -ma g -cop y -sh er -free dom -f ile -speci ally -tor onto -lo ad -g ary -re y -answ er -lo y -cau ght -pri ze -u ne -fic ation -ni ger -sy d -tou ch -feat ure -jaz z -recor ds -him self -di sh -ro ber -spot ted -ma ster -wa ve -fin als -bu ll -for um -al d -re comm -ch a -a e -d oo -inst ru -tru ly -l g -in k -bro thers -de st -j im -m it -clo sed -is on -tri ed -s anta -af fe -w an -hor se -g row -camp us -rel ation -nati ve -jour n -go v -o ct -k it -b ound -part ner -re ma -crow d -! ) -c alls -ra il -qu ali -solu tion -con test -con vers -sn ap -b ase -in iti -ta x -y e -ent repre -it or -constru ction -foo d -present ed -n ings -cli mate -k m -mo del -b j -blo ck -present ation -dre am -fi x -c alling -bus ine -con gress -under stand -we b -val ue -ï¸ı âĥ£ -mex ico -it ely -ki m -char ity -ref lec -bl an -fl ying -anal y -famil ies -b and -reci pe -celebr ation -ac cep -ar y -to t -g b -intere sted -cap tain -âĻ ¥ -ti p -ab sol -bra z -inve stig -o logy -de c -tru ck -ver ing -c lear -don t -go tta -ad vis -beg ins -ma ss -de scri -blo ck -k im -davi d -son gs -memor ial -feat ures -su stain -' . -gra b -jo se -v a -con serv -se ts -man chester -fi ghting -de gre -ag a -in d -sle ep -pos ition -ha ir -sig ns -pol icy -it o -al ert -st am -sp end -w y -absol ut -d m -anim al -my ster -success ful -proble ms -ro bo -k ay -gar den -p d -may or -d ale -t ol -off ers -vis iting -friend ly -tre es -offic er -accoun t -ke vin -ðŁij į -gi ant -contin u -con su -tr act -n fl -ðŁĺ Ĭ -h q -b ility -a ar -dis ney -te en -on ed -wh ite -tra iler -de dic -al one -absolut ely -dig ital -willi am -in ation -s wa -e e -enti re -ger man -ro ll -h its -co st -st ay -th a -ali ve -accor ding -co t -liter ally -her it -re ti -haha ha -exper i -li kes -g t -ste el -__ __ -ch air -christi an -to wer -diffe rence -m d -tre ss -mi d -prin ce -afric an -fe der -foo t -car ri -ser ved -r ice -sh all -feat ured -ck er -rec ru -po e -sen se -ni fic -com edy -cont ent -f at -po sted -con tribu -tim ate -li ver -mb le -inter net -ag e -europe an -cl ing -gla d -ff ic -sc o -ak es -el le -ter min -ton y -p ale -col our -seri ous -pat ri -movi es -b m -professi onal -ad o -al u -br inging -f alls -isra el -ter m -langu age -bro ok -man n -commun ic -can not -ac ti -p he -y an -entrepre ne -tur key -log ical -lon g -ar m -ur s -work ers -ing ly -gg s -ri c -tu al -recei ve -op ens -ge ar -soci al -fe et -c king -ad ver -fin an -fe els -sp la -h r -ea ster -bra in -ã ģ -fi g -le dge -ne arly -prote ct -ma ssive -e th -aw a -ðŁĺ ģ -y rs -aware ness -defin itely -k n -imag ine -k u -syste ms -ðŁij ı -f as -li k -provi de -am o -disco ver -inf lu -ma ker -g az -fit ness -stre et -er s -te d -w c -ys is -pos itive -hel ped -que st -andre w -bra d -b in -hang ing -l ing -bri ght -se ction -ma ss -ðŁĻ Į -follow ers -ho sting -tem por -fla g -a ve -let ter -k ur -re qui -of ten -cry p -su ff -âļ ½ -russi an -treat ment -al le -ha y -l an -keep ing -hol y -power ful -pre dic -fun d -e specially -windo w -je wel -il y -ðŁĴ ľ -gener ation -app a -seri ously -o d -ðŁĺĤðŁĺĤ ðŁĺĤ -cer ti -iri sh -ðŁij Į -mi ami -be th -v ity -se cu -che f -cri me -graph y -ma x -arti sts -re volu -gu ard -spee ch -u c -upd ates -fac es -st ant -chang ed -repor ts -low er -pe ar -n c -k il -loo ked -spe aker -s f -re spect -ok ay -oce an -s itting -architec ture -tra il -se at -i ra -le g -japan ese -d am -u lar -sw im -polit ics -finan cial -ol d -mou th -at temp -de stin -fi shing -atten tion -me m -chang es -deci ded -reli gi -g in -c av -z z -ad am -ma c -wr ite -beg in -sc ul -al ter -is s -ath on -imag es -m oo -jo ined -ðŁĺ ī -âŀ ¡ï¸ı -pas sed -mu sli -h ir -lar gest -cam er -com ic -gh ted -rug by -bur gh -gg ing -te sting -pre par -lau gh -al ed -impro ve -beli ev -adv ice -sha res -he art -tur ning -s b -t el -caf e -n es -dani el -pat ter -t z -se tt -par k -c and -st ick -happ ens -bri an -ne west -e pic -ad or -ki es -war ning -anim als -custo m -ar c -di an -gol d -cor e -t f -c ity -pan ts -re ality -con fi -in ju -fo x -gu il -k new -âĺ º -cor rec -itu de -d den -. # -re duc -pas s -f on -y a -ow ner -re turns -n c -e ast -ap ol -in sur -th o -si m -juni or -be e -ang el -att le -elec tric -hor ror -cra sh -e ye -pat h -sou thern -emplo ye -ge o -t an -ha z -r ally -ðŁı » -proper ty -was n -enjo yed -gre y -g as -bre w -nor thern -hol ding -g p -ta ke -ch art -ly n -dr ama -z o -pa id -throw back -cu p -discu ssion -down town -w ill -le w -b is -t ary -bre ad -up on -r ate -teach ers -it ation -anc ed -cy cle -choo se -d c -ir an -co w -da ve -ra ise -prin cess -fa ith -- > -indu stri -sp ain -guit ar -fac ts -m n -sp en -cour te -go tt -projec ts -au di -o sc -pe ter -s and -intere st -happ iness -ven ue -sol di -surpri se -poten tial -per io -custom er -i i -g ni -manu fac -e co -bro ken -sing er -vel s -wal es -hu s -in j -f our -tal ent -d ying -mat the -fil m -jo ining -s ell -j ar -lma o -sur ger -bb c -sour ces -au stin -ni k -char les -f am -prin ci -ange l -cas h -lo t -o red -pla ys -pl ate -don e -memor y -br ings -n ba -solu tions -teach ing -gr ace -cir cu -hel ps -foun der -mar y -expl ore -de cor -par ts -ch o -inte gr -ha u -is es -pu tting -in er -r it -v y -mic hel -blu es -every day -for ms -bi o -ye ar -p in -t ter -spr ing -) ) -po t -al ing -perform ing -sh an -plan et -mus ical -head s -it alian -stru gg -âĢį âĻ -w ings -pu mp -h h -tr ou -a id -pri me -ear th -pa int -mon t -am y -bb c -fab ulous -fru it -andro id -bour ne -cere mony -enti al -? ? -deb ate -on ing -dra ft -sol ar -t x -j am -cor n -!! !!! -bro o -mil k -po sed -o hi -mo vement -b ren -part ner -p g -et te -ar ies -sh out -n g -leav ing -t ells -sen s -ta ste -kel ly -wor l -gy m -ric h -e gy -pi d -ma s -â Ĥ -courte sy -fran k -incre ase -wr itten -pp ers -re l -ha i -s as -s ound -tt i -w ich -ri ver -.. ." -a g -fel low -ro me -sm all -gen cy -ic an -lux ury -pro of -me t -wild life -mom ents -ra ther -cor ner -com pe -canadi an -lik ely -therap y -li am -econom ic -indi e -rou te -fi ght -ho pe -se tting -ant ly -cro ss -fant asy -de e -sket ch -comp li -ym i -ru les -engine ering -fig ure -ro w -. , -f w -syd ney -w ou -t ation -dre w -us es -the re -sp read -struc ture -pat rick -appa rently -ro s -h ills -w we -ann y -com mission -di v -f ying -con sul -anal ysis -ex i -ten nis -vehic le -ðŁĺŃ ðŁĺŃ -as s -high ly -op ened -b ann -ðŁĴ Ļ -mp h -wi shing -v or -fi f -give away -r r -ra y -je ss -g at -ic ymi -x it -high est -yor k -pi e -invol ved -high er -ri e -mal ay -int elli -desp ite -che e -sar ah -be an -reco gni -ar sen -tal ented -pas sion -ic h -ab c -lead s -dise ase -v is -se c -pre senting -m illi -hol e -sho ts -de part -surger y -gov t -b in -du al -e vi -lon ger -ev ol -scre en -portra it -et c -lo se -ch at -p en -p i -om a -s ick -er c -compan ies -en try -plan e -gr y -ven e -liver pool -premi ere -sha red -a red -fil ms -ir a -holi days -cric ket -ici an -v ing -. ) -ul timate -di vision -con duc -se pt -for ces -mon t -s mart -disa pp -sun shine -in d -b less -ma de -col ors -fran k -ir on -bott le -s go -m ood -j ason -er ic -bir th -te en -respon se -tar get -state ment -fe ar -th el -al um -ar ab -bl in -direc tion -ste ps -er ial -wor ked -at l -ðŁĴ ķ -fel t -pol i -scen es -hom es -b ell -e at -ate ful -t in -l ace -fol ks -p se -an n -wis dom -fa v -but ter -s r -are as -sm oo -bi z -dg es -app o -mo re -the m -effe ct -windo ws -sun ny -cap ital -tot ally -c ities -gr ant -mb ers -s low -au tu -il ities -w ro -ri sing -st ics -viol ence -i gh -qu ot -h it -t c -herit age -bu ff -ne s -z ar -den tial -ex ac -ed ge -de ep -aren a -be came -benef its -mar ks -mb er -a z -am es -pre ci -dra gon -re g -d ings -do s -ðŁĴ ª -n el -s ity -me al -di st -leg end -pur chase -pic al -st ick -f at -du ba -profe ss -car to -pro f -coun tries -respon si -se qu -fa b -tribu te -hon ored -prac tic -pur ple -an ton -pa red -t ough -summ er -environ ment -s ons -ðŁĻ ı -m ps -gi es -her oes -t elling -hen ry -f en -know ledge -Ģ ï¸ı -f r -ne g -u re -ac king -hear ts -s oo -hol lywood -ju mp -sau ce -schedu le -tur n -yo ga -cre ating -c ket -cre ek -â Ń -custom ers -ma dri -gu l -asse mb -moun t -c ell -to p -st al -dav is -t wi -sig n -premi er -iti ons -he aring -un k -pati ents -app ear -heav en -al ty -doc tor -a e -plat form -je ff -ðŁĵ · -regi onal -bi d -box ing -ex ten -or ity -a w -w ise -il le -sever al -bi e -s itu -sy ria -âľ ħ -remin der -enter tain -li on -part ners -in n -ph ar -f au -pl s -expe cted -sug ar -deci sion -s b -ch ron -associ ation -leav es -vis ited -sh ap -ðŁĴ ĸ -fur ther -h ann -w i -run s -l er -fun ding -fil led -.. .... -tin y -han g -or g -co ol -se min -ðŁı Ĩ -spon s -nav y -sa int -dru g -d al -r oun -co vered -tra ditional -invest ment -de te -al ism -f low -n is -sun rise -fe at -f ted -we ird -je re -ve gan -medic ine -an o -ac cu -deli very -temp le -chang ing -wil son -phili pp -re fe -n d -is er -g ay -r and -ati ves -t ely -p and -intelli g -g are -am bas -de mon -commit tee -strate gy -refu ge -bud get -prote c -pi er -ex press -nom in -econom y -al low -ic on -gal ax -o h -indi vi -dem and -vir gin -lu ke -ali sts -man i -s mi -ju dge -ent y -mic hi -resul t -am ed -spe aks -' , -hou ston -sh in -b ing -fl y -ch em -au to -v as -ge t -ar m -thank s -d in -gan g -x x -si on -loc ated -p l -jo sh -in fo -jo ins -adver ti -ot d -el d -si e -re asons -v ent -ðŁĩºðŁĩ ¸ -â ł -convers ation -stu di -ðŁĶ¥ ðŁĶ¥ -go s -s ounds -un it -mu sc -ge l -ack ed -pac i -co s -de re -u u -a o -la m -inspir ing -ar ms -tw are -mat ters -ad dic -du de -ex t -cri sis -b ath -me et -sing h -expe ct -del hi -resc ue -wor st -au g -shi pping -ser ving -st o -dar k -ac es -histor ic -landsc ape -desig ner -b illion -gr ateful -wa ke -e ve -m iller -hou sing -dy nam -is co -be ha -sh op -pr ou -e as -a sia -e ding -k on -depart ment -aw ar -mar ine -in ci -photograph er -ta pe -lo go -r ings -d it --- -- -vin yl -w c -vo ting -se ven -ambas sad -dal las -t u -com ment -k ra -b les -w ag -u d -au dio -stri ke -offici al -o ts -me tho -to ols -ra di -al an -hun t -wat ched -a ke -fa ke -drin king -mer ry -m l -b day -ri o -ni ke -c ant -re pe -co stu -mur der -ak ers -ch ers -ou ts -beg inning -so s -ad es -n in -not es -wro te -sol o -c i -li ghting -ur ban -bre xit -att end -shir ts -pla yo -ac tress -pl ic -stand ard -quot es -par ade -anci ent - © -tur ing -re e -pri mary -fla sh -citi z -mat es -ste in -z i -clin ton -sk in -gen e -hu m -g ar -t le -y i -fo cu -de an -pl ants -cy ber -b u -om e -ho p -ad dress -ti x -gi fts -relation ship -sub scri -fe ed -exac tly -haw ks -ex o -stre ss -s n -arre sted -an e -sof tware -z ero -the me -mu mb -im migr -mi a -make up -ple asure -uni vers -har b -eng ine -ap er -r in -br a -institu te -le ather -al th -sing ing -co s -gh ty -me as -st ic -si de -insur ance -co t -pit ch -moun tains -cri min -su pre -valent ine -at er -wou ldn -sc ale -rel ated -re gar -star tup -pack ed -mi ke -week ly -p ts -coun t -ha r -gott en -min d -ber lin -con ditions -swit ch -cor n -sa ve -g li -emer gency -tun ed -sto ck -discu ssing -every body -s day -whe ther -wrest ling -ec es -gen der -ch en -ðŁij Ģ -madri d -mar athon -e gg -i er -th x -as king -kore a -wol f -ay a -g m -g au -at ory -v r -gra ss -k illing -b ble -ur o -un i -e th -sh ore -th en -re ale -bot tom -ex erc -k ar -or ies -ad ri -san ds -se x -. ' -volunte ers -per form -par liam -inclu de -deli ghted -execu tive -fu el -kis s -ã ħ -char ge -h u -ca kes -ve t -g lu -agre e -pr ices -n au -h l -g ru -ra j -streng th -b ic -sp ending -al es -av en -b last -: ( -yo f -nor mal -si x -qu ick -se a -d aw -mee ts -lo vers -upd ated -po tat -comple ted -coo k -opportun ities -p ure -organ ic -tem per -c am -avo id -par king -duba i -and o -di stri -to y -comple tely -don ald -tri al -bas s -b oun -back ground -v as -mar vel -lu m -ru s -t ool -com missi -throw back -fin ding -is lam -! ? -st op -e vil -or al -resi dents -i denti -o ak -ðŁİ ¶ -l il -span ish -chap ter -sto pped -direc t -ho sted -pic ked -lab our -lew is -defen se -à ® -health care -wh is -mat h -pe ak -ra ised -fi x -bu ll -th ir -chel sea -fol k -tr e -can di -pau l -ei ther -ad am -poe try -jewel ry -ðŁ ¦ -pr ay -Ø § -g c -o z -wi shes -fore ign -sun g -lear ned -en e -n ing -micha el -illu stration -legend ary -w av -b au -ðŁļ ¨ -cal end -stre ets -â Ĩ -mon ster -bu ck -g r -scho ol -ba th -wa ste -ne ck -ha wa -be ach -re plac -jec t -on er -fac tory -coun t -ðŁĵ ¸ -mor gan -der ing -se an -steph en -de p -no vel -vide os -ic al -press ure -arsen al -ex pre -ir s -tren ding -ss a -fla sh -re sear -thr ough -profess or -scul p -to s -gg ed -mm a -be e -a pe -hun ter -am i -he i -pla stic -bu cks -uni verse -le gen -niger ia -ple ased -ri s -thin ks -autu mn -i ds -d is -anth ony -ðŁı ½ -ak ed -gla sses -fin ance -z er -k as -con tract -nu mbers -sh aw -partner ship -t il -laun ched -s al -victor ia -theat er -usu al -nam es -perio d -eli za -i th -bar cel -ro cks -bag s -mat e -distri bu -j on -di ffic -ali zed -cur ren -sco red -b ha -du blin -ro se -in ted -soli d -beha vi -wal ker -simp ly -garden s -head ed -in i -ohi o -we ap -f o -gl en -e state -ran dom -th under -thr u -k ill -jac ket -it i -entertain ment -thanks giving -ent al -en coura -el o -a ther -tan k -high lights -f ting -ru le -model s -bor der -bj p -hus band -in done -ken ya -be ars -al o -n inten -pi x -str o -or ders -sal ad -ro ads -n or -l ation -sop hi -ðŁı ¼ -pi eces -b one -min s -inclu des -nu tr -phi l -s ent -fun dra -ga in -bor ough -n ad -mon day -activ ity -it ems -be coming -ken ne -de tro -car di -gue sts -u x -world wide -sever e -new s -thank ful -fic tion -ve ge -m all -si an -er al -inj ury -le e -men u -danc ing -scot ti -exam ple -( # -na i -studi os -ba i -ðŁĴ Ľ -j av -diam ond -vin ce -ric k -prote ction -lin col -cham ps -appro ach -d ar -m ile -clou ds -je ff -in fin -l ers -p les -pe ace -go p -âĻ ¡ -tech n -str a -a verage -ef fort -introduc ing -di versity -austr alian -am p -boo st -s ke -pati ent -appreci ate -ici ans -pu r -f ell -woo ds -illu str -ðŁ ĸ -ag ency -ac tions -brit ain -under way -se attle -el and -ag o -f ill -stre aming -pro test -challeng es -ky o -et sy -coo king -exper t -ru ss -rain bow -commer cial -sp in -be ats -c ry -val u -el i -th row -gr ams -le vels -michi gan -c ad -ador able -const itu -w s -pu b -mid night -th at -net fli -braz il -die go -regu lar -jo y -âĤ ¬ -li qu -ea stern -k ni -fl at -n p -bro wn -w er -se y -tt ers -ac ting -v anc -cy cling -program me -ra w -comple x -tat too -throwback thursday -se ssions -ro oms -si ght -speci es -bom b -lau gh -ke eps -mo on -offic ers -con ver -t r -ha sh -t ack -ri ous -ad ap -a j -reco gn -ex po -sug ge -confir med -rol ling -dre ssing -ic t -fri day -ph ones -ri dge -con cept -ro y -ke ys -ef for -c ate -k ne -ev en -l ay -commun ities -mo d -n az -every where -al ab -bit coin -ban ks -out door -feder al -sto res -h p -c al -m ely -sig nific -be ar -re public -clo ser -al lah -pic k -x d -pal ace -ch ill -b am -er ous -un a -al len -out standing -olym pic -supp ly -fi gu -v au -l p -char lie -un es -> >> -legen ds -ici al -co ast -benef it -mul ti -f its -far mers -am ount -si sters -har ve -hon ey -que en -b ers -pl ann -âŃ IJ -m u -barcel ona -al ber -stat us -re main -ex tra -c andy -vi ous -âľ Į -o v -warri ors --- > -ju mp -am ar -x mas -stu dies -i ors -k or -don ate -pre p -fi sh -im a -pain ted -ad mini -co splay -spor ts -dro ps -fi ghter -evi dence -ðŁĴ ª -la ke -ro b -cine ma -pro file -à ± -stan ds -leg acy -sh ape -ro of -ci vil -i ans -sy l -sh am -vo ted -re tail -ph illi -li sted -du ty -n b -th es -f are -au ction -ffici al -stor ms -d p -l oun -sh ops -al y -ani me -multi ple -ðŁĺį ðŁĺį -psy cho -je an -ap art -candi date -gg y -con f -jose ph -w ick -me at -fr ame -c l -for got -ph y -f ing -li ed -re p -se ed -f all -u fc -nu t -lin d -mo de -fiel ds -en ce -s ley -ðŁ¤ Ķ -ch ill -follow ed -announ ces -cor ru -tro phy -them selves -ac le -al du -k ong -l on -s v -bro ke -ander son -ta i -stor y -tempor ary -activ ities -k ati -ari z -cry stal -spo ke -extre mely -tra ding -ðŁĴ ļ -à ¼ -in ch -ed in -out fit -equ ip -ma di -form ed -be ef -po p -ti ger -this day -ti red -neigh b -re tro -is a -un t -t as -kan sas -de st -secon ds -ta y -hur ric -o u -galax y -dad dy -bro w -bur ger -en ced -de sk -ac cur -secre tary -el ite -k ab -ch in -touri sm -bud dy -ici de -dre ssed -u d -vac ation -che ers -com for -charac ters -j et -bu ying -l ins -n ap -reale state -li e -af c -i ii -f ame -n r -b at -ag ent -ma kers -âĢ ¼ -sec tor -op ti -le on -di et -pra yer -hi p -mi r -le x -br y -an a -pas sing -w en -reco very -ak i -po pul -res ort -mar ia -stu ck -read s -ti er -perfe c -netfli x -p oo -cham p -o c -re duce -we red -comm ents -cla im -acci dent -s ag -h ack -sal t -kin da -k iller -i os -z y -ex change -lec ture -eng er -ic king -t au -reve als -pri son -z om -gh an -u l -jour nal -i ot -tr in -jon a -govern or -cap e -quar ter -spec tive -impre ssive -bab ies -t x -m ill -o y -har ri -jo int -su e -collabor ation -tren d -revolu tion -re new -alum ni -ge tt -sh ell -sun day -ent u -ni c -donald trump -block chain -paci fic -expla ins -sp y -ad voc -par adi -to f -star ring -p av -fe ed -br ac -smo ke -ham p -y am -to kyo -si mon -d h -e ffici -phys ical -n j -ell i -s low -gradu ate -americ ans -ti fy -f red -ap ore -fin ds -rob in -we t -not ice -se mi -un ve -k om -pil ot -scre ening -da ily -ðŁĴ Ĺ -roy al -sp a -vo tes -n ag -wh ate -att ending -exper im -ad dition -k ate -sto l -m ali -foo t -chri st -ch an -de e -lic en -glo bal -mo ore -ti a -bri gh -myster y -y ay -âĿ¤ï¸ı âĿ¤ï¸ı -cre ati -me chan -clo ck -di c -âĢ Ķ -pp er -al ph -through out -al low -re sources -selec tion -ham il -bb q -aa aa -virgin ia -dis ney -en g -so red -drin ks -f ancy -consi der -end a -jan e -hand made -du l -on tari -i us -s ville -color ado -whate ver -whe el -promis e -ne ver -desig ns -ab ly -sex ual -vanc ou -at i -con vention -cul tural -sing apore -pro mo -load ed -gla sgo -pp l -n oo -ke e -ste m -men tion -i do -cru ise -ri ding -be comes -be y -âļ½ ï¸ı -tw in -dedic ated -na sh -de si -work out -jen ni -i v -grou ps -rela x -pho eni -li ft -mix ed -m ck -p c -mu st -me tro -ci es -y ar -a im -ang er -i e -rec y -marri ed -dro pped -eng ag -le st -ambassad or -op h -de s -w ick -assi stant -nat ur -fa il -l td -shor t -k ap -sha w -bi gger -rema ins -crit ical -sur vey -co verage -er son -win d -n b -bil ly -let es -ac ts -jim my -at lan -al and -t c -import ance -dam age -f g -stor age -tw t -bon d -bal ance -cr ying -pu ppy -vo te -pu sh -ðŁĴ ľ -pol y -me l -lon don -terr ori -effec tive -corpor ate -atl anta -jac o -nas a -gre ek -sen ate -i sh -ev a -intellig ence -effor ts -al co -k un -h all -di ag -claim s -fir st -h b -ba e -v ul -pu ll - ° -se par -spe ed -vic ti -on thisday -audi ence -r ates -te ach -fil ming -bu sh -son g -y um -br un -ra ine -aw a -par ks -ð Ŀ -ra bb -ra ch -ra id -reach ed -ra il -mo ves -selec ted -fr i -ra ising -om y -st ones -su k -franc isco -cas es -cap it -con fu -w tf -po ke -equip ment -gre g -ess ential -off ering -ne x -pi es -be c -cre ation -chair man -cro wn -w al -john ny -shi ft -ne ck -ban g -bir d -ðŁĺ ı -du ck -re serve -de pu -ma sters -over all -no tic -ju ice -sne ak -che er -cla sses -eag les -n ca -car pet -ci vil -coach es -har ris -u ps -b alls -dec or -mar tin -ro s -v ice -announ cement -who se -ti gers -ste red -c ts -dr am -ste el -youn g -inst all -supp o -recor ding -de ck -se ats -l der -ang le -bo t -sty les -elec tions -for tun -n ab -but ter -ari an -ka sh -in ner -ou red -be ast -we i -ic onic -exper ts -ne cess -b eng -jam es -li a -gre ece -ðŁĵ · -ðŁĺ ģ -good bye -m itch -tw ice -mumb ai -ste am -ru sh -med al -ne tt -fashi on -t ar -r s -sav ing -ric ul -l m -sleep ing -brook lyn -mis s -sen ding -disco vered -sp here -of theday -k icks -missi ons -w right -er n -ght ly -i ous -mel bourne -star tu -mo ved -car ry -d ak -ag ues -bel gi -e ma -way ne -do t -er ie -pe l -it unes -matthe w -no body -est ab -cal m -win ds -lu c -prep are -tren ds -exerc ise -adv ant -ðŁĴ ¯ -athle tics -app s -c tions -adv ance -laun ches -litt le -real donaldtrump -eliza beth -carol ina -hu b -hi dden -n w -us er -pol l -great er -mo st -f ed -p at -life style -s ati -sco res -marri age -l r -aven ue -de serve -ri f -ðŁ Ĺ -wat ch -champion ships -gr ay -en ni -cot ton -g om -whe re -pack age -su m -ab solu -new ly -foo ds -ty ler -assemb ly -musli m -ban k -re memb -op tions -produc er -land o -fun ds -u pper -shad ow -pro gre -co p -ing e -leg s -detro it -hill ary -jo se -gi ants -sou p -sustain able -t us -clo thes -roc king -n z -min ne -mat eri -bru ce -ear t -ca sting -independ ent -thou sands -ta h -de cl -veter ans -li ons -wra p -âĢ ¦ -de ss -bl ing -st ine -e ggs -o on -clo sing -z ay -at t -bac on -fa il -ariz ona -de pre -gho st -new sp -w ers -vi p -li ked -id ent -volunte er -ad ult -pu pp -cir cle -mat erial -degre e -gro wn -boo m -calend ar -su r -vie wing -ath letes -ch and -re ll -asi an -en tr -vol ley -victi ms -bo dy -m ama -trans fer -ge ek -in dic -sav ed -ma i -g ent -it s -loun ge -k ol -the ory -situ ation -is lands -ar th -z oo -floo d -vi ously -show ed -parliam ent -ch ev -el ine -at trac -ab ad -ta il -h rs -lu s -por tu -gor y -provi des -to ys -de ath -in fe -an ce -g le -li am -lo ver -hu d -dv d -reve aled -g w -re ment -ca the -l ying -ra dio -der by -stor s -che mi -hosp it -âľ ¨ -' : -ilo ve -le mon -re public -s ni -ne ss -do or -re action -pre gn -fla v -schol ar -spo tify -is ation -vis ual -aw are -spon sored -jo ke -less ons -leg is -lo ck -si mil -ðŁĺ ĭ -kin d -la y -ma h -ho ping -vancou ver -as er -clean ing -gal a -thre at -la p -ach e -ro mance -ex pen -re post -z am -e pi -mir ror -o ak -ad ul -bat man -s lu -l c -vie wed -re views -d ates -indone sia -acti vi -off en -lea f -i si -ag ricul -costu me -s ites -spir itu -appear ance -ir y -st air -applic ation -spec tac -ic ity -ski es -hand le -pun k -paradi se -t n -de al -provi ding -do c -recei ving -bre w -micro soft -à ¶ -fer r -me tro -th ail -y um -car ter -à ¡ -gent le -bre aks -coo per -show case -cu tting -egy pt -bab y -semin ar -gl ori -ss on -fa ve -re hear -lo tte -la dy -al as -pre p -deli vered -nu clear -ir o -engag ement -at ta -con ven -z an -gl ory -hol ds -busine sses -str ange -sch e -it self -gra d -mar kets -f alling -st ats -ge on -bu dd -li s -she et -thi si -co lo -deser t -regi stration -ig n -expla in -inter ior -la ws -writ ers -spr ings -k r -fri ed -blo om -inf ra -a o -cre d -pa st -line up -bo o -bre a -boo ts -celebr ity -att acks -bro ok -ev es -ex cu -cher ry -oo p -fas cin -boy friend -se as -n ine -effec ts -po wered -k ha -ðŁĺ Ģ -sh out -con dition -i j -her o -enter pri -win ter -applic ations -sho e -g el -batt le -pro grams -w art -ðŁĴ ¥ -ra p -ho l -dang erous -di a -coun ter -ric s -i or -k night -co at -emo tional -at ures -d as -whe el -fore cast -tran sport -glasgo w -king dom -prepar ing -im medi -ff in -awar ded -prin ting -ro man -fight ers -any more -bel t -p ine -win e -x i -employe es -logi es -al led -de mo -birth day -ange les -lo g -dri vers -neck lace -k ath -s it -athle te -ef s -s burg -pur pose -resi stance -rele ases -t is -vari ous -deli ver -ch al -s anc -opp o -cra w -neu ro -dr a -suppor ters -sna p -diffic ult -swe ar -logi st -pa th -attemp t -à ¥ -swim ming -ste ve -hur t -inclu ded -b ap -wa re -ðŁĴ ĭ -end ers -ja ke -le eds -cli mb -l b -im ple -li sa -clo thing -ðŁĺ İ -d t -com pla -sw ing -stra w -v als -k le -us ers -stor m -cu ts -ontari o -p an -hand some -i ow -ar gu -chec king -scotti sh -Ķ ï¸ı -si er -em ma -po d -patter n -de sh -en h -ed ward -t ing -k h -hal f -lincol n -mo ther -al leg -r c -volley ball -d n -g ay -all y -le ton -gro ve -l oud -adv anced -re spec -cli ent -supre me -thail and -ho w -gi g -to i -do t -dol lar -ðŁij ĩ -p it -r b -h n -produc ed -gg ers -âĨ Ĵ -ml b -can vas -fin eart -us d -in the -p son -actu al -s l -t b -ip ad -en sure -u mb -w d -sk a -mar s -k end -f eli -th ing -count down -absolu te -r out -dra l -p y -inju red -min t -hun ting -mm er -s age -li gh -ac ity -ex pan -mur ray -ar o -sec ure -four th -eag le -reli ef -st akes -industri al -clar k -under standing -see m -pl enty -sil ver -cla u -thre at -sa il -pro duce -ab str -is is -b r -eng ers -wor ry -bie ber -s j -just in -reali ze -ky le -esp n -fil ter -s ch -ty pes -game dev -d ing -twit ter -soldi ers -p om -car bon -y ards -child hood -ri ed -ke l -ele ph -t ons -key note -qui et -wi re -po sting -is sa -repre senting -bac ks -alex ander -celebr ates -ta ining -| | -ch or -esc ape -pe ek -ti ves -fiel d -ssi e -im pac -spons or -r c -we dd -cann ab -si des -trac ks -com par -con trac -techn ical -bi ble -expl oring -sh are -tra v -n ate -ill o -sc ru -m ingham -gun s -of the -sh ame -se es -ca tho -ac cess -ce l -repor ted - » -mari o -p ad -hope fully -ou se -y on -disapp o -ol o -p itt -pa c -ga p -cru sh -s g -k le -ge m -emp ire -dir ty -a is -avi ation -ze aland -fac ing -high way -d anny -spi der -ot ta -ðŁĺ Ħ -w y -col ours -in fl -co sts -olym pics -au s -h m -ho ward -pas ses -lau ren -mu sh -op in -r ho -disc ount -oper ation -em ily -mm m -cham ber -d il -to yo -shi p -sam u -pic tured -un ic -po l -keep er -carto on -st en -ig nor -n ations -n l -ta sting -deta il -offici als -mo tor -franc is -ed itor -ðŁij ĩ -pe ts -rang ers -t g -r n -w ri -nic hol -i se -spo ts -ani e -chec k -tri ple -ku mar -spe akers -ic ing -pre pared -ab use -friend ship -mon th -swi m -air e -sc ent -hamil ton -indi an -j es -yum my -te ars -da wn -i zed -worl ds -ðŁ ķ -b illi -st one -n hs -ba sic -p or -st le -ir on -ol der -cle vel -e ing -ðŁĺįðŁĺį ðŁĺį -prin ts -fir m -air craft -fin est -devel op -aar on -t z -gra ham -own ers -fo li -less on -qu es -bab e -cra ft -ph en -ju n -bir mingham -v ine -ll er -i an -fineart america -evol u -st ab -im per -war d -com ic -wi z -inv ited -du ke -mat ch -por ts -ro ger -diag no -ke pt -te st -vis u -r hy -so c -to x -b aker -sur face -co vers -man s -b its -x box -ff le -n an -gar d -h art -wat ers -v illa -re tro -light ning -catho lic -democr acy -neigh bor -pen n -cr an -jona than -la ura -vi bes -su b -coach ing -clear ly -uk raine -bra ve -commit ment -t all -mar t -ra p -mo di -sco tt -bro s -show er -ðŁı ¾ -âĺº ï¸ı -cou sin -appro ach -br e -com pos -hil ari -phil ly -g ad -quick ly -ri an -t m -vir tual -hou ses -k t -phoeni x -w ire -ff y -b unch -anc ing -tal e -snap chat -star ter -h t -k icking -ap art -th y -) ! -blo gger -it z -com fort -ang els -w ash -" : -ar gent -re quest -hon est -mi ghty -bo bby -k g -ro l -thou se -ex po -h c -tab les -mag ical -po sts -de m -n w -or lando -ab er -* ** -ðŁĺ ľ -environ mental -trans formation -mi le -w ic -hir ing -ma ine -bo ar -r ying -ti s -nit ure -twee ted -anton io -opin ion -fin ale -di y -f is -th in -trou ble -le go -fi les -qu art -sp a -curren cy -cli mate -fan art -rail way -sp ace -ban ds -dani el -mo tion -l eng -hol der -oc cu -mar ie -cathe dral -bu zz -bi es -nas car -bm w -bat tery -char lotte -doc tor -zz le -se ven -in san -d dy -st en -lab or -thr illed -se ren -docu mentary -wav es -cer tain -can did -allow ed -ninten do -star wars -ta p -home made -d les -ther ing -bre e -emp ty -pi ano -pos iti -coun try -por k -pu ts -per ry -m atic -spot light -ti st -or ities -we alth -c p -bar bar -commit ted -as sau -pro fit -e ight -hu l -fini shing -run ner -ss o -insp ec -char ged -christ op -lo sing -co al -ho o -ele v -de le -mo ham -don ation -c able -clin ic -j in -manag ed -ter ing -â ¬ -ur ban -depu ty -bb er -bur n -acade mic -o tt -sta ke -it er -sto wn -ack er -advent ures -ad ams -gre g -pro m -vo l -ac qu -con gre -pa int -citiz ens -c all -af ford -v c -as ks -the tic -independ ence -â Ľ -h itting -bl on -fu ture -â ı -in no -gen e -bo ards -di stance -se t -re mem -th al -pre vent -l ang -ob jec -su sp -mat t -in duc -bor o -pi one -re di -vir tu -prin ted -sco pe -shar k -suc ce -a stron -il legal -j ag -c ting -ine e -at o -rob in -nutr ition -b f -du tch -b n -fur niture -for gotten -at ar -ru p -hy per -bran ch -communic ation -degre es -on ia -un cle -promo te -or che -wi i -j s -but ton -ma jor -c bs -bri stol -premi um -ordin ary -e dit -m g -we ed -st even -: ' -gu s -te s -cap tured -dru gs -do w -wr ites -bi shop -whe els -ali zation -disco very -w r -rach el -ne il -hy dr -cu test -entreprene ur -kore an -ore gon -ul ty -perfec tly -suppor ted -histor ical -t wins -ell y -we l -de vil -in come -scienti sts -de leg -h en -on i -ic ed -gi o -cur ry -reve al -e g -buff alo -n ol -op era -camer on -haha haha -j ab -gradu ation -cra ig -r al -i f -organi zation -le ge -g ang -su d -edin burgh -l ack -fli es -g ate -thr ones -q b -the real -e leg -pp in -c les -jam ie -tn am -cryp to -ou l -p ages -a se -roo ts -stu pid -a did -boo t -prote in -s ap -si um -su s -end or -fun ction -don t -en na -ch y -squ e -wor ker -m tv -e a -k an -ðŁĴ ļ -mu s -professi on -t to -oper ations -al lo -c tor -inv ite -sc and -ou th -z im -lin ks -cli ents -sam sung -discu sses -n ell -ul tra -some where -ste wart -ine t -de z -b out -fac tor -ti an -tr ans -jere my -d b -ðŁĩ ¬ -or n -develop ing -spo l -coo per -ma u -rememb ering -tre k -famil y -sen iors -fo ster -att ended -w ing -trans form -ele mentary -hor iz -li sting -malay sia -it ch -warri or -philipp ines -russ ell -m end -initi ative -cre ep -to ps -br iti -a ur -shar p -adverti sing -ug ly -achi ev -materi als -bu g -dev ice -bon us -fac ility -col e -nh l -y as -plann ed -pol e -excell ence -tr ick -con fl -r p -achi eve -lo an -swa g -jess ica -ho we -p our -sc u -z oo -r ated -dre sses -re bel -mex ican -co ordin -me ss -atlan tic -t l -osc ar -wal ks -phar mac -investig ation -... # -cc i -eas ily -monday motivation -y ment -au ti -for ced -ar med -colle agues -pap ers -pro per -sha ke -bu c -le an -exhi bit -e vement -co tt -bi z -sp er -k ent -sw an -/ @ -girl friend -haw k -âĺ Ģï¸ı -mon o -ðŁĴ Ľ -stat ue -ðŁĺ ³ -ra s -te eth -preci ous -t ile -p am -swi ft -v ali -no se -dr unk -experi ences -come back -gen ius -wor se -sh ef -ra d -ed it -hon our -au spol -lar ry -h ire -gor don -achi evement -.... .... -su icide -alter native -su p -sur roun -sha ke -ke ith -pe pper -tur k -crimin al -be ck -su m -w alls -cn n -an tic -of fe -col li -win es -high light -hawa ii -emb ar -l fc -ðŁĩ ® -m v -> > -at mo -wor d -car l -shout out -bre wing -ì Ŀ -do f -s ic -hot test -col on -hh h -shu t -low ing -volu me -apart ment -agre ement -de stro -we e -religi ous -iow a -ro d -land ing -re present -ðŁĵ· : -la s -usu ally -h l -c ac -sal v -al ong -laugh ing -be ans -remin ds -pha se -some body -ma sk -ran ked -dest roy -sc i -â̼ ï¸ı -gab ri -le o -ro a -fa iled -si l -refuge es -re vi -r ing -ber ries -coo kies -y y -conserv ation -sh ab -human s -de termin -a in -ni all -as su -mb a -fro m -extre me -vic es -commer ce -ght ful -or dered -suppor ts -re cap -v or -dro pping -correc t -pay ing -mean ing -n j -qui z -" # -busine ss -ðŁĩ® ðŁĩ -indi gen -du st -box es -bl ind -x xx -zz y -ðŁĩ¬ ðŁĩ -ss els -s ant -dd le -hilari ous -desig n -wonder ing -vehic les -k re -ju d -rece ption -par ker -Ã Ń -pri vi -hy dro -sof tball -pol lu -lo cked -ba h -e ar -scri pt -di vi -br ace -geor ge -the ast -bel o -j al -tion ary -dent al -roc ket -pur ch -sh ak -manufac turing -e z -it is -con cep -tb all -ch s -direc ted -pra yers -oo k -phil os -vari ety -che ss -ser ver -g and -bal ti -ðŁĵ ¸ -sel y -cru z -spectac ular -bur ning -re present -i z -t one -mer ce -h ell -bed room -estab li -bo l -com mon -ãĥ » -ab or -kit ty -hei ghts -re pair -willi am -qu ake -alab ama -popul ation -re v -re tt -i sts -n ite -le m -a ha -clevel and -r m -po ver -ob se -mon tre -man ia - ® -con ne -car ni -sh ah -f y -u a -sc or -strugg le -bo b -' ' -appro pri -deci de -ff ed -ca ster -s ort -hun gry -dra g -ا Ù -gr ounds -d w -sli ghtly -car din -dead line -bron ze -web in -bar ry -sil ence -e uro -op tion -ear n -ðŁĴ ĸ -howe ver -na ren -na ils -bath room -v ine -ph d -min ing -gar age -( ) -shou lder -defe at -di r -o v -liber ty -ple as -x on -com pre -a v -j in -ab les -sil ent -fam ili -vis its -di pl -ha bit -milli ons -regar ding -innov ative -sen ator -r ts -v on -k l -wh il -requi red -âĿ Ħ -lu v -presi dential -po cket -hun dre -sho wn -fro zen -to ward -fa st -confi dence -r ough -indivi dual -qu et -ðŁı ½ -dom e -fi fa -engine er -z en -re mix -ðŁĺ ĥ -pl ant -min or -robin son -as y -pul led -cer tain -potat o -( : -pre s -oc ca -w it -it em -si e -d ating -thom pson -own ed -an u -vi e -te dly -good night -ex cept -ðŁĮ Ł -ira q -ki e -ren ces -li p -simil ar -sau di -vi g -arth ur -pic ks -mil an -hon da -ma xi -o g -ste st -ar ch -analy tics -ba sti -pear l -ter ry -hor se -ast ro -ac ce -laun ching -inter national -s no -ta sty -den ver -ir l -pe te -tor n -advant age -var sity -" " -sol e -g c -lan g -demon str -ol ds -un ity -ne ts -insp ire -cre te -nash ville -nel son -e ter -wal k -hy un -m ack -tre as -see king -ra ge -bru sh -ab and -whil st -co con -h ong -shel ter -i p -possi bly -so o -it ed -â Ħ -rac es -war ming -qu in -tele vision -mat ches -ra pi -ment al -pal m -jenni fer -rol ls -indi ana -b ars -cat ching -resc u -candid ates -fa re -âł Ģ -se o -vie tnam -alph a -michel le -visi ble -re gre -wn ed -app le -li p -f fe -li z -york shire -ha il -se asons -be gan -m d -k c -la p -fascin ating -hel p -ur y -u ms -nu ts -se m -along side -bri dge -ori al -o ve -world cup -briti sh -comfor table -i ve -hot els -fair s -hor ri -so x -d ining -stre am -bar ri -ss y -w im -ter ms -v u -pe re -l ens -wal ked -r or -l ars -shi eld -dou bt -pro to -cro ssing -me ant -medi um -ad ding -e b -che ap -fun c -pap er -bran ds -ry an -feed back -col lins -un known -tro pical -sand wich -fal len -for mu -selec t -lo ads -answ ers -or i -mag a -d or -du o -ali e -dru m -ur i -de er -sou l -sh ut -âĺ º -sto len -don ated -bu zz -patri ots -ha l -na sty -nomin ated -mon te -ki a -th ri -ing u -te sts -pe tro -ðŁij ij -ho sts -ne st -to pic -pat ch -m my -hu gh -ab ilities -ma the -s miles -g b -ag enda -insi ghts -chi p -ph an -fail ure -dg ers -ha i -signific ant -sho ck -ru ral -gl am -figu res -pot us -o ta -mini stry -appe ars -fe ar -r h -americ an -h att -son y -fi res -e di -n ou -e qui -wh en -univers al -mad ness -i x -sculp ture -b ach -t to -swe den -et a -en to -develop ed -month ly -ma ps -ra h -le d -del ta -sa ints -is lam -ben ch -fif th -v ard -so cks -wel coming -j e -tur ner -v b -ad i -nor way -ad y -hurric ane -por sche -tra dition -ex am -newsp aper -lu ci -a ver -ide al -d na -madi son -ðŁ § -wit ness -ac ou -insi ght -si mon -robo t -sna ke -n bc -ac o -ro ss -sh ment -religi on -ch ann -in su -camp bell -inst alled -we ather -hor ses -ol i -rober t -k az -ðŁı Ģ -veter an -th read -quar ter -ea sier -cap ture -hi pho -law rence -roman tic -pas sion -cl ay -ox ford -th ai -stu dying -fi a -elec ted -most ly -c b -tu mb -âĢįâĻ Ĥ -x l -sh an -fa ster -ev ans -sli de -sh ri -see k -mi es -chemi stry -pump kin -tu m -, , -ro om -fi red -li ps -pres ence -af f -brew ery -arri ve -sw ag -photo graph -pen gu -chi ps -at tor -val ues -accur ate -con temporary -princi pal -cannab is -ari o -any where -gi a -democr ats -buil dings -li ved -ap s -neg ative -m are -bal lo -li on -diam on -loo k -re form -tom my -il la -tre ats -hundre ds -port land -wor thy -ex cep -ar ia -ido l -be er -cd n -y u -aw k -ðŁĩ ¨ -c ells -à ³ -ident ity -dra wn -de vil -f inger -th am -ðŁij Ĭ -ear ned -fin tech -dol ph -twee ting -evolu tion -ðŁĵ į -est im -m vp -n one -ðŁĩºðŁĩ ¸ -toyo ta -au x -mar in -b old -l bs -ste ak -mur phy -it able -lou is -sol ve -pi a -sk ir -ill ino -webin ar -ban ana -lo v -th on -vo ters -afford able -defe ated -lm fa -air lines -super b -any way -deb t -bo red -ver si -me tal -responsi ble -m k -s se -f ay -cau sed -f p -recomm end -pla za -spor ting -alli ance -au stri -n n -t ours -surpri sed -arti f -th under -sur ve -wor e -bri ef -necess ary -z ie -ash ley -dra ke -r t -kni fe -im mun -char ges -a the -bri de -rep ly -g av -broad cast -pu er -brace let -cap acity -harve st -id k -perfor man -d ding -il ers -par a -jam a -pro vince -ch in -id ers -har i -te aser -ch en -re stor -r at -fl at -col om -ðŁĴ ŀ -ðŁĩ¨ ðŁĩ -smoo th -r t -p itch -stay ing -isra eli -t cot -per spective -do ck -open er -lo vel -x o -class room -l ington -go al -kenne dy -sh am -sp aces -mitch ell -home coming -uk i -claim ed -recru it -ing o -mu fc -mon it -g roo -resi dent -per cent -per man -otta wa -int ment -an xi -stand ards -wor ship -sche me -f x -pot ter -bi an -athle tic -af gh -s se -sat ell -par ties -âĿ¤ âĿ¤ -infra structure -rela x -mo du -wor n -smo king -y ach -practic es -wc w -am b -dome stic -tay lor -k entu -provi ded -mo di -ve g -" ... -ob serv -ðŁĺ © -be ard -m our -an gry -ðŁĺ ± -startu ps -woo den -di ve -na il -anti que -ro ses -torn ado -m at -^ ^ -su spect -far m -de vices -me ga -tu l -scholar ship -ge e -disa ster -arri val -po in -mar c -kati e -bb ed -fal se -deser ves -ric hard -ju ana -fre y -tion ed -hy bri -r w -sar ah -ach i -c ure -o le -mor ris -ch ic -broad way -la bel -pa k -pover ty -gol f -e red -f u -er ies -be es -alo gue -st el -wire less -je wish -ti de -blo cked -life time -b har -sp lit -am ster -th i -jo shu -br unch -ha ps -s for -oo ps -ka poor -hi king -suppo sed -ro of -re as -tra in -ti ght -tru mp -bas ically -r r -ea red -see ds -entr ance -c p -wi e -son ic -vic tim -he re -e h -ear rings -sal mon -arc tic -an ne -dou gla -corru ption -hann ah -ha sn -vo ices -con ce -att a -fle et -clin ical -democr atic -ton y -st ood -le f -twit ch -a il -honest ly -incre ased -dro me -don na -accep ted -visit ors -ap ar -ad or -p ar -jer ry -ra i -brand on -ab u -!! !!!! -me me -in gh -glori ous -b hu -pu mp -j ol -li ke -fi sher -ma z -ag an -destin ation -play list -le tters -gen u -br ace -celebr ated -bann er -r he -dra gon -ðŁĺ ħ -sig nature -gre y -âľ Ķï¸ı -al ice -be red -ph er -ber n -ca th -ga thering -sc oring -influ ence -sm iling -de pt -lo cal -a x -ac u -reti rement -hon or -her self -chem ical -asse ss -y all -fre qu -appreci ation -ac a -cho ir -cu z -so il -c il -repor ting -u h -enterpri se -gr at -jaco b -ru m -fe e -j ak -sp in -bi kes -phi a -ste re -p is -bloo d -t att -ra ft -war ren -sh eri -back stage -mar sh -hash tag -ther ine -re in -game day -guar an -reci pes -min ds -stron ger -issu ed -bic y -n ak -ment ed -sc ary -u x -pre vious -tt le -th ats -ac tors -u ma -tin a -bun ny -promo tion -u ss -oli ver -montre al -what s -appreci ated -la kes -excu se -kno wing -pri zes -musc le -shad es -sco t -ing redi -electr onic -ju an -comb at -s ri -e h -turk ish -l om -stri kes -pri son -re e -po pe -vi d -ol dest -dol l -sw iss -certi fied -cli p -re turning -lat or -le igh -tt es -wat son -heal ing -el im -per haps -ha ss -k au -d der -mou se -new castle -indigen ous -wel comes -co le -tau ght -no ise -appe ar -jo e -can on -wedne sday -u tah -c tive -dri ven -i v -c ell -stri p -ac c -focu sed -ar rest -sto cks -wo o -â Ĺ -notic ed -shad o -di spla -ter ror -bor ne -secon d -que ens -wo ke -ja il -no tt -cam bridge -har t -se af -fa x -ac cept -âĺ ħ -goo ds -k at -t win -h s -thou sand -s ins -su ite -amp ton -ar n -rele v -ric har -hoo ps -n bc -class ic -p ab -soldi er -de plo -le ans -install ation -cla sh -le ban -ee e -ti re -belo ved -fu sion -travel ing -ne i -coo kie -glo be -phys ics -s q -co l -wol ves -d l -ex it -" - -foo tball -le af -ster ling -hi de -minne so -fresh man -natu re -indi e -supp lies -bri s -iri sh -ink tober -doo dle -ic op -mess ages -adul ts -recor ded -fix ed -ar do -offe red -under ground -dr one -p ine -ma inten -and re -ham mer -s x -r ound -hi ke -bra d -ro me -fu ll -on ey -ro ws -colum bia -archi ves -appro ved -bat ch -illino is -recogn ition -shou ldn -fo g -nca a -ke vin -human ity -al though -pow ers -p ou -s ar -pe st -alco hol -con sci -phil adel -en o -t m -ok la -cate gory -particip ate -accu sed -bri ef -po em -clu bs -consul t -ja b -big data -amster dam -ac ing -certi fic -n u -d at -impro ved -and y -campa ig -pale stin -p ace -mo bi -feel ings -wol f -bra in -pro pos -inter active -prin ce -inde x -c is -cha e -peace ful -co vering -ac o -cour ses -mon key -re place -b l -bloo dy -tal es -brigh ton -neighbor hood -g ates -spiritu al -af raid -bre ast -b ones -ðŁij ī -vide o -w au -tou ch -inju ries -car l -ri x -une x -âĢ ¢ -fre d -consi dered -thu si -an ch -on y -u sa -graph ics -ac re -ðŁĺ © -com memor -com mod -go ti -guar dian -star bucks -pre vention -haha haha -admini stration -portu gal -fac ulty -bet a -ul a -al bert -bre ath -er i -le tting -tr ic -ment ation -incredi bly -ten nes -v d -ðŁĻ Ī -ed die -br ick -gr ill -bt w -wat ches -resear chers -t ney -ni e -p as -a ster -vi br -poke mon -ch rome -go at -pitt s -il ly -festi ve -y d -can al -ðŁ Ĩ -fi es -car los -re que -partic i -tra ins -sam ple -temper ature -sym ph -pic king -in door -z ers -playo ffs -____ ____ -ap es -ly rics -islam ic -performan ces -d ick -spar k -se as -hom a -gr ound -disc i -employe e -com mu -alas ka -al an -fe ast -dg ing -ban king -manu el -slow ly -tru cks -mc car -oo o -sc rat -orche stra -indivi du -m x -bre ath -stair s -equ ality -bla ke -loc ations -cocon ut -balti more -aa a -l c -ðŁı Ĩ -har vey -resi st -immigr ation -adid as -fil i -re f -lg bt -mo s -pp i -ken ny -terr or -ban e -apol is -s g -social media -ka i -hon est -as sas -bol lywood -âĢįâĻ Ģï¸ı -ferr ari -hor n -cryp to -bo om -mainten ance -i di -s man -w l -ext ended -in sul -ve s -go sp -tr i -pi g -tar ge -cel er -st ati -sm h -ri dic -appe al -? ) -con clu -cos me -she ep -christop her -en thusi -po lish -me ts -oun ded -sustain ability -creati vity -con crete -ra i -ali en -ble ss -te es -clu b -ro t -bo s -ex ist -perfe ction -lu ck -rock y -expen sive -mean while -happy birthday -pre t -thr iller -ca ve -playo ff -som er -l u -le x -def ence -am writing -home less -pro phe -ch et -past or -ðŁ¤ £ -land er -ww w -Ģ ï¸ı -tic a -! # -o tic -rad ar -po sters -pow der -po li -ha un -tra p -bl in -assau lt -shor ts -re y -sh y -squ ir -rac ist -gar lic -fu r -remo te -sm ell -impre ssed -fing ers -âł Ģ -din o -le ment -s nu -promo ting -str ing -produc tive -b age -ma son -ra z -direc tly -j k -ev al -ðŁij Ĭ -doc tors -co w -ri der -st v -re move -w u -na than -ro d -n r -= > -affe cted -inve st -mp tion -g inger -o d -agricul ture -s que -mu g -coun ting -ke e -mag nific -coo k -ani stan -roo t -plac ed -sym po -gh ana -un d -che er -thro wing -secre ts -f illing -opti mi -butter fly -bu bb -ðŁĺ ī -terri ble -d g -sil k -obse ssed -lo u -ai de -sal ute -mon u -philadel phia -scienti fic -i st -u ae -dess ert -bott les -can yon -ðŁĺ Ī -car ib -o ther -w ich -re source -guil ty -un d -le on -e ss -kan e -el e -tra iner -he im -an te -man age -roo kie -tre ated -po ses -rs vp -cau ses -aw ak -je well -le tt -on ics -tit les -cardi ff -g aga -bu mp -use ful -? ! -loo se -bb ing -: : -argent ina -de bu -cy cl -wh el -dis gu -j el -k ills -bio logy -ex ter -tra sh -bo dies -tr am -circu it -expe ct -la ds -w ells -sho t -ge e -naren dr -fa stest -b ent -b ills -mar shall -h ats -intro duce -citi zen -im possible -gi b -az z -net working -r ant -thin k -in dy -st ops -f theday -bri an -* * -amo di -dom e -coura ge -pac king -af fairs -g n -si zed -ent ary -pol and -swit zer -afgh anistan -w u -ten der -subscri be -mo sco -att end -republic an -hon ey -âĢ ĭ -si mul -we ster -foo die -or o -midd le -ab t -co pies -ma je -narendr amodi -ty pical -inspir ational -vit am -wis con -cu bs -tiv ity -h ali -e ars -k ay -d are -mari juana -cu rious -an ia -tom ato -re mind -ðŁĩ · -sc ared -cou p -po et -land ed -ri d -wra pped -mor ri -climb ing -e ws -fe eding -con tra -tho logy -gri d -ti vely -read er -la ser -di ving -di g -lat in -ti ed -shake spe -o ci -ad m -show ers -chu ck -mar cus -oo s -kne e -o live -ow l -dy lan -an no -g ym -deci sions -well ness -arri ves -sati s -chri s -thur s -ðŁ¤ £ -inter views -thank you -switzer land -over night -journ alist -ser ves -vol can -.... ... -plo t -nic ol -car rying -mag ne -tre asure -ex p -be ver -ðŁĺ ¢ -mar ty -mo le -don ations -recogni zed -b h -du s -sh ann -al do -success fully -ent e -ðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤ -cab inet -cu is -tit led -d as -so l -strate gies -deli vering -ad ds -ani an -ne ther -ðŁĴ ĥ -con tain -su its -pa irs -to dd -rel la -ro pe -ci o -cro p -paint ings -su z -re jec -bu st -d h -fra ud -m h -contro l -je al -destroy ed -al lows -wo ol -minneso ta -om en -j u -sympo sium -d af -lim it -accoun ts -load ing -inter n -re solution -hol land -qu al -meet ings -gra ve -cam ping -v am -re nov -liber al -am ber -gre e -hu mb -fe ver -el ing -broo ks -à ² -be th -ad ed -al t -ro e -perform ed -jo sh -frank lin -nic ole -de ss -bb s -m g -net works -min im -al t -weap ons -gu y -jas on -g ha -harb our -at on -pra ise -kentu cky -bel fast -st icks -blo ss -ho pes -an thro -famili ar -wa it -ch ile -depre ssion -la x -je ts -le ice -recei ves -si er -an k -de x -inde ed -fle xi -fab ric -lam b -hel icop -am anda -âĢĶ âĢĶ -compe te -sn ack -techno logies -sy rian -mom s -mu ham -cho sen -an at -dev on -shar ks -re t -fundra iser -selfi es -st ations -communic ations -tennes see -tu tor -ro t -valu able -dynam ic -nur se -i ed -earth quake -deser ved -a ve -sar a -stre tch -dougla s -ne pal -à § -ob viously -d ame -ra pe -any body -k w -pat rol -hol ders -h anna -info graphic -ec o -be ating -stan ley -bo ats -ri bb -e z -wit ch -inv a -ac id -boar ding -- @ -gi l -da ve -care ers -opp os -l loy -in ter -do pe -re su -j agu -sh ade -in dy -on ist -rel ations -ag en -ab le -inci dent -me ter -shar ma -id r -pro ve -immedi ately -tro ops -am an -g low -gaz a -blo cks -person al -chron ic -all er -si d -sh r -whats app -lu cy -ar chae -ho u -journ alism -our selves -go t -the med -shap ed -we ak -cas ual -leng th -sla m -ab bey -e v -coun ter -est a -reci pi -cha pel -expan sion -sel f -suff ering -sp ice -n z -sp art -desp er -boo king -quart ers -y on -ðŁĴ Ĺ -p k -continu ed -- # -man hatt -tal ked -sh en -com bo -hybri d -je ans -liqu id -se al -re tweets -ac celer -collec tive -t as -: )) -profession als -ra w -o tt -su san -ir ing -okla homa -re ven -survi val -cre ator -tran sit -st ac -sur f -i k -ed iting -ch illing -bai ley -ste al -ra ble -pa rent -hun ger -sn app -collec t -philos oph -dedic ation -c f -c m -le ep -repe at -re ha -un fortun -a er -a ero -abstr act -mon itor -ag ents -bu l -sci ence -harb or -drag ons -floo ding -ac compli -d ash -juli a -the red -tues day -cy ber -b low -ta ined -le m -refe rence -pp o -ne goti -char le -con nor -au lt -access ories -commissi oner -rain y -re ar -advis ory -luc as -ma id -co al -k av -pol o -ðŁı ¾ -tran sport -mar gare -straw berry -bur ns -gre ens -ne v -partici pants -col in -belgi um -col our -in form -d ell -br on -cal y -kick off -strate gic -re union -hon ors -li b -egy p -âŃIJ ï¸ı -hy po -si zes -regi stered -bet es -relax ing -bloo m -inten se -valent ines -insan e -w wii -p x -tri o -bla de -wiscon sin -con e -plat in -ali ze -ra ven -incre asing -indi ans -il ian -bl u -rabb it -exten sion -je f -au di -fer ry -s ell -a day -us b -swe at -cham pag -metho d -mem ph -assi st -s by -ca pe -remo ved -mag n -v t -r ams -f bi -tack le -phe w -h on -motor cycle -su spec -eleph ant -sub ject -let te -da iry -whe at -awk ward -ac t -tro l -mit ted -zay n -sheri ff -ene my -con s -ke tt -bul ls -ev alu -bt c -satell ite -ho lo -por ter -dia betes -bet ter -rele asing -sur f -: - -se basti -collec ting -en cing -e thi -go ds -al ley -health y -m ills -sma sh -co pper -cr ack -read ers -sp ac -licen se -bas ket -bang la -en tic -om i -m ere -si vely -anim ation -lan es -dent ally -chill in -fi e -k aren -dep th -li pse -n g -ri p -mel o -sand y -ðŁijı ðŁijı -vin cent -nu t -hu g -who le -cre ates -? ??? -âĿ¤ï¸ı âĿ¤ï¸ı -bak ed -up grade -rober ts -har a -carib bean -auth entic -mb s -mosco w -attor ney -wi ki -ch lo -hu ll -cor k -" ! -sty lish -ðŁĵ¸ : -di ary -impro ving -ex pand -bri ght -pollu tion -k nights -person ality -chec ked -fac ilities -z el -bow ling -gu er -ðŁİ Ĥ -on going -un its -hoo k -be ck -confl ict -to dd -far ming -educ ational -k ak -cla y -stro ke -bel ly -explo re -mill enni -th m -loo p -sm s -consi st -cir ca -br yan -d ab -youn ger -soli dar -pp a -experi enced -b ella -bo ard -shef field -steph en -consu mer -sub mit -spon sor -t ang -ag gre -comb ined -trac king -sand ers -b az -survi ve -fer red -equ al -se p -re ed -str ong -priv acy -st ap -un g -ac ry -pa sta -pir ates -ag er -fair y -du p -introduc ed -wi p -let s -spr ay -ðŁĵ º -gre w -a sts -pitts burgh -new york -jo ey -lau ren -tra de -ch op -pi pe -cla ire -behavi or -v ap -cre ws -lap top -ðŁ¤ Ĺ -che ster -disci pl -d f -out doors -k s -go ver -super star -cas ino -far mer -; -) -re turned -ðŁı Ī -ma il -roa sted -co sta -v ill -pe z -gard ening -distribu tion -sh ining -inve stors -ra sp -dec ades -reali zed -bar n -p ti -st able -ut d -pan thers -m ens -b n -ca de -bu cket -yn n -when ever -wa ke -da is -ber nie -lo dge -ju lie -atmo sphere -ðŁĺĺ ðŁĺĺ -major ity -par ti -exc it -cu t -me h -musli ms -be gun -fli ghts -vene ss -ce me -po sing -so le -g ou -dark ness -pe ach -cel tic -auth ority -grand ma -ful ness -smi th -speci fic -gar cia -co ins -good ness -aldu b -recru iting -den nis -gar y -sle eve -weap on -pl z -disco ver -harri son -recruit ment -ja i -ch im -com pared -tom s -mo thers -am y -archi ve -t ask -ben jam -se g -law yer -al um -inve sting -mi e -che z -j p -a ke -fl am -wall paper -âĻ¥ ï¸ı -t ton -che st -favor ites -we igh -coo lest -r ating -relev ant -lo gan -ma ple -run ners -pri or -peop le -ma ur -terrori st -te sted -carni val -su spen -me asure -m v -cyber security -app ren -terror ism -o z -v ital -ni es -gon z -fun ded -twi st -assess ment -die sel -en for -colum n -ad dressing -ca sts -pay ment -x ton -fi er -, ' -la st -ne e -un less -clo se -sk ill -cuis ine -fun eral -ti les -a un -k ru -relation ships -ðŁĴ ¯ -ev ent -âĢįâĻĤ ï¸ı -kind ness -pro posed -acou stic -a es -defen der -dan ce -h tt -w at -vo y -ðŁ¤ ĺ -au s -cli ff -sear ching -beauti fully -in qu -at l -speci alist -ðŁIJ ¶ -da i -tra ils -class ics -inst ant -v ous -re venue -mar ch -kir k -fr inge -fire works -tri via -âĺ ħ -tr action -wal ter -mo to -l ily -att itude -cli mb -sc an -sav ings -c w -fa ith -cred its -ab led -gra ff -auto graph -he he -ran ch -ha d -ro gers -ðŁĮ ¹ -f in -re qu -fol k -ad ditional -lyn n -u ber -dol lars -lo gic -wor th -so m -the sis -p ound -bi c -st ur -cer am -spen cer -en tered -v amp -organi zed -âľ Ī -pp s -tr on -merce des -no ti -compet itive -do w -ous ness -vic tor -gr illed -na i -pu tin -ab ra -bl ame -alex and -anim al -dec ent -p ent -inter ior -:' ) -but ler -bal let -ðŁĴ Ķ -albu ms -down s -la d -si r -pla in -p ers -blon de -dis c -paki stan -se ment -ga a -w age -ch as -man i -co ps -terr it -lo l -lau ghter -ri vers -magnific ent -lam p -w b -new sle -char ts -ble ssing -p unch -lon gest -fl oral -cu tie -fare well -sto pping -mb b -bu d -chee se -de cla -si m -mc donald -de ter -you th -t ch -fre der -kin dle -fer n -at or -as leep -p ond -spr int -p ounds -la zy -gh e -fundra ising -dead ly -gran de -dou g -he y -lin da -consi dering -i um -gol den -vi k -auth ors -di ss -u ally -appropri ate -mor ning -y le -hon oring -foli o -be c -re bec -fin land -formu la -corn wall -sh ay -cau sing -bl end -sig nal -t ent -kash mir -nation als -har mony -sc out -acce ssi -he ight -medi eval -impro vement -ke es -prac tical -car d -de par -hu n -om ing -cal gary -ste l -bu bble -gur u -ma h -unex pe -n h -ed a -me at -i ge -si o -god dess -in ches -tun es -br itt -sti on -ra j -âĻ « -mer cy -ðŁĴ ĺ -sen ds -i est -pol ici -val e -reduc ed -as ap -vi jay -defen sive -celebr ations -ri ders -med itation -har mon -g ing - ¡ -program ming -in au -sud den -m h -replac ement -sk u -j ar -gra des -ta st -k itt -brand ing -k aw -boo t -f ought -p ays -g f -iz ation -ho p -k k -activi st -v end -coast al -cha os -ðŁĶ ´ -se me -bill board -li fting -cu mb -sc al -ðŁĸ ¤ -stru ck -l v -indie dev -beat en -jun gle -al right -destin y -m ing -k c -ch ances -om an -q atar -cra f -tra ined -pri x -char m -o tive -s mu -e c -and ers -hand ed -al ban -certain ly -arri ving -i ze -sa i -tr ack -pain ter -hu mble -appo intment -head line -manag ing -mo d -as pe -andre a -à ¤ -ethi op -un ited -exi st -bal i -k ad -n t -d red -re x -recogni ze -tam pa -be ers -ati a -he els -no te -transport ation -tur tle -re de -hipho p -sp icy -sp urs -⬠ĩ -cor p -ther n -to ast -hur ry -proper ties -ma ge -mar co -ele ments -bou ti -syn drome -ms g -develop er -gra ders -he im -re sil -off ices -del ay -di men -vin tag -barbar a -ðŁĺ ± -vene zu -cu lar -fac ed -bar n -ðŁĺ Ĩ -survi vor -wor m -confu sed -passion ate -Ø ± -identi fy -electr icity -sou ls -brad ley -repor tedly -lun ch -shel f -eli a -swee t -smoo th -emplo yment -am el -manhatt an -ste am -oun ts -ye p -li ving -un e -descri be -ca res -man ila -sha wn -ac ted -bas h -st even -re st -pet ition -div ine -wel sh -rac e -platin um -ðŁĮ ¸ -p b -extra ordinary -solidar ity -m all -on ion -schedu led -game of -fer gu -de ms -nor m -p k -tri als -polici es -publi shing -st ole -fron t -charac ter -van ia -ex ce -sti e -sc a -resi dential -sa iling -ðŁĶ¥ðŁĶ¥ ðŁĶ¥ -spons ors -th ick -champag ne -she pher -continu ing -ven ice -per th -na p -a ster -y ak -un limited -cho ices -ne o -hi v -repor ter -bru ssels -f old -dy s -se mi -la wn -it alia -wi fi -as k -em ed -fr ame -monit oring -ste ad -i da -gr in -is a -fli p -re stric -offen sive -atta ched -di sh -wh y -philli ps -gre et -p als -mix tape -v ou -fiel der -spar k -alber ta -g len -ca sh -s ri -u ri -ro dri -entreprene urs -climate change -p sy -d le -em ents -lin ked -nether lands -acci dentally -oppos ition -vel vet -ra ys -c w -om o -m f -lmfa o -newsle tter -: ) -toi let -liter ature -di sp -phili p -uni form -sudden ly -head er -cool er --- - -prou d -bri g -nis san -scienti st -j ah -con centr -pac ks -appo inted -so ap -eng age -cho se -âĻ ¡ -se tup -jeal ous -har ry -g ation -tun nel -te mp -osc ars -dec ade -recomm ended -child ren -ab a -anxi ety -ve ments -sal on -pho too -organi z -mach ines -ab s -vil le -hy pe -ti ff -emer ging -av geek -[ # -contribu tion -bra dy -re sto -g mail -fit z -photo shoot -hel met -h t -eleg ant -ug anda -nur sing -or leans -pen n -na h -foo tage -em a -w o -w ad -concer ns -ve re -re mark -who ever -str ang -p t -qu it -sh ang -histor y -s ick -perman ent -ill ness -col d -visi on -he m -ar row -con vic -pin k -oc cup -bal d -ex hau -u of -am o -on t -ãĥ » -adop t -la id -smo ked -inter pre -ess enti -associ ated -b d -bb y -fi er -inst all -dipl om -con diti -c f -w ak -any a -gr aci -fi sher -s ss -ap r -il it -mus ician -symph ony -cor d -h ack -le gi -l v -bless ings -hum or -sc ra -e ti -min ster -trav elling -bu sh -jewell ery -li me -!! ! -pregn ant -pe e -lo b -cap ital -ip a -pen cil -la bor -duc ks -prou dly -wedd ing -dere k -m w -pe g -valent ine -an gu -re treat -pro spect -dang er -vul ner -up set -, # -sr k -x im -thur sday -n fl -kis ses -re ds -cr ack -re ward -c u -ko k -me te -aband oned -it t -me als -sp ell -stan bul -del ays -ru m -le op -gu m -no va -super man -ch ick -m is -dram atic -inno cent -r ounds -re c -auti sm -bangla desh -mor al -mo vie -sp oo -k la -âĥ £ -ou ting -mess i -ab road -loo kin -a im -q i -st ack -colla ge -à ¯ -hud son -sc an -ho e -ch au -oc cur -comm ander -ho les -ðŁİ Ħ -bi as -v on -stick er -ma k -responsi bility -colum bus -sa int -ed mon -rac ism -far ms -w en -gul f -may o -!!!! !!!! -corpor ation -ba chel -el a -inter nal -je ep -fol lows -di alogue -de rer -smart phone -he len -rich mond -equ ity -s land -b g -ne ar -av i -memph is -we ir -discu ssed -bad ge -p up -mi stake -phen omen -un ite -ðŁ Ľ -de pic -ri des -in augu -n at -sof twitter -comb ination -gosp el -âļ ¾ -ad mission -retro gaming -ðŁIJ ¾ -sch u -mb o -jun ction -al arm -à ¦ -gr ac -kh ali -k ul -m ale -cap tion -wi sh -te re -cor ps -ru bber -play station -er in -effici ent -l or -jo kes -in ary -nor man -lu is -inaugu ral -ch ed -âļ½ ï¸ı -di p -to e -str at -aa c -am u -pi er -co tt -comm and -tt en -sn oo -cu be -clo ses -class ical -s word -expre ssion -reach ing -n app -co st -affe ct -ric o -gi f -brea the -tri be -or tho -h ay -l g -fri es -n m -hi ding -richar ds -en de -mic ro -capit ol -cop y -ro m -regi me -mary land -tax i -di al -embar ra -un believ -ch t -v s -elim in -o dd -pen ny -sound track -l ings -trans ition -rema ining -a is -mali k -? !? -rand om -def end -ul tra -tru m -danc er -st ol -dri ve -a ver -ro ast -defin ition -se an -excit ement -partic ul -su rely -sh av -ber y -di shes -com m -is ol -i am -ob li -gho st -hugh es -chi efs -b as -conserv ative -speci al -fe min -sh ri -n ancy -inte l -tu ne -ðŁĩ ª -jo el -gg le -mo to -ðŁĺ Ķ -bu ck -d ag -antic ip -mont ana -gu id -fro g -ec raft -op e -dri ves -nu mer -x y -color ful -wednesday wisdom -illu min -bey on -inau gur -deep ly -pre fer -for tune -coo ked -ti ble -âĺ ķ -swe ater -it ter -tt y -u i -gi e -com plic -~ ~ -tax es -cu ps -di verse -sam anth -âłĢ âłĢ -ba king -sy mp -wa i -be half -mer cur -travel s -ðŁİī ðŁİ -or ia -eng aged -jump ing -reti red -n aked -p uni -speed way -sci ences -rehear sal -on ym -dy ou -pl ates -r ati -kri sh -jaz z -car ol -ra f -pen alty -tim eline -ru by -engine ers -ra f -bel le -do se -che on -esc ap -me g -ran k -or d -me gan -mer ch -ec lipse -âĺº ï¸ı -ple dge -kir k -per si -leice ster -sa k -w k -saf ely -yy y -je t -promis ed -j c -en ne -no ah -re no -re a -ðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤ -tra il -ðŁij Ģ -f d -soo o -ri min -w k -ภ² -i al -x ox -bis cu -d ale -fan dom -particip ating -fla g -privi lege -pe ach -mach ine -bo ston -gro ss -o g -mir acle -adop tion -u ss -mon sters -be ij -clar ke -pu shing -pra ying -ar o -d n -ell is -apol lo -od ds -refuge e -to w -b p -ðŁĩ¬ðŁĩ § -h end -app eared -memb ership -pe an -du m -viol ent -v y -potat oes -aw w -greet ings -t ts -ac on -sh ane -photograph ed -cra b -temper atures -cu ba -c fc -wel com -he l -in nings -m k -co de -kno ck -gra ss -swe dish -p ta -ick y -v at -lin ing -s q -sa p -ar c -announ cing -sk ins -cit yof -br ing -co x -gam er -it arian -i da -h d -ros se -sad ly -ge o -âļ ¡ï¸ı -tag s -fa ther -chan ge -l ance -whis key -adel aide -te c -stick ers -marke t -class y -bad ass -flo rence -lin er -fro st -k ate -ac on -scand al -es sex -ðŁĺ ı -vi vi -dr ill -blo ggers -recomm end -d ha -ac res -ro ma -bu y -gro cer -er ia -ma har -ff er -patter ns -ver i -com pu -st ev -ang a -ment or -do o -it ali -cdn poli -on ly -conduc t -elec tro -de f -wh ale -prepar ation -bicy cle -vi ral -turn out -bra ss -qu ad -hospit ality -pack aging -den cy -ceme tery -abo ard -dre aming -pic ture -t all -inv ent -ad mi -o e -tem ps -qu an -fun dam -pro mp -resi dence -mu d -sour i -âĦ ¢ -graff iti -gi f -d nd -com p -s war -pe eps -pale stine -devil s -san g -assi stance -bi ke -missi ssi -inter viewed -ne phew -dru ms -v and -gentle men -n sw -inst a -leban on -ee ee -oli via -ver y -rou gh -industri es -m ation -ðŁĺ Ĵ -bar rel -n ay -po ps -moder n -ill y -are st -on ents -protec ting -v ans -e o -vi kings -restaur ants -re ck -jac kie -andre w -w illing -he ath -citiz en -disc rimin -à¹ Ī -stu art -m ys -hi p -tran sp -" ? -te x -su shi -ke d -cro ssed -dist ur -pe dia -f ate -some how -mo th -proce ssing -is s -r in -u ts -yy c -ver t -lg bt -re id -on to -arab ia -habit at -= = -stre ak -simp son -addic tion -wim ble -deli vers -challeng ing -ðŁİ ¶ -fran ch -e du -s me -ai ds -hur st -th am -tari an -remem bered -palestin ian -fe es -tru m -sket ch -ur u -fit ting -jes se -ðŁĶ¥ ðŁĶ¥ ----- ---- -ba ch -ici a -colo red -da h -associ ate -int el -s eller -p u -stu ffed -ac s -b s -sh in -cooper ation -certific ate -ab u -ingredi ents -re v -in ge -el der -christi an -bun dle -th ic -dir t -beij ing -comm it -ted dy -ed u -to day -s field -w yn -confir ms -lo o -j v -ene ss -al pha -vir us -ari um -gr ind -bri dges -introduc tion -pol ls -bac ter -z ach -termin al -ra iders -fla vor -zom bie -vo d -sp reading -gameof thrones -effici ency -lat ely -ale m -twee t -cri mes -cl er -de y -dg ed -hy un -pay ments -cir cus -ðŁĺŃ ðŁĺŃ -mis souri -lu b -episo des -c age -po s -mat ching -tumb lr -lin ed -ge st -am bi -nar r -ing ton -regu l -blo wn -is le -co co -on don -joshu a -tour ing -sm a -sau sage -best friend -bo eing -desi re -sav age -ra pper -de vo -te ar -take over -cow boys -po ker -par ag -pp e -h int -we ars -se th -ro les -l anc -man ga -form at -fl yer -c ay -mo or -ba ke -spla sh -v ad -ker ala -proce eds -sil ly -reflec tion -di str -wi d -su it -ci vic -yan kees -by n -migr ation -di stin -or ch -fe mini -quali fying -tu ri -o be -hun dred -cra p -wan g -mathe mat -bu re -expo sure -fergu son -seme ster -re serv -pl ym -a hu -fac ial -wa x -wor ried -ca b -vi o -as a -co d -to pics -p cs -hal o -rescu ed -horiz on -ar k -âļ ª -hol ly -el f -ul ti -pu p -quali fied -attend ance -ati vely -destro y -y c -for th -photoo ftheday -c ents -ic eland -meas ures -de sk -port folio -artic les -direc tors -dat ab -e w -creep y -oun ding -hon oured -mi st -j it -men tioned -port able -iti c -d ann -friday feeling -am id -ti ger -scri p -helicop ter -hard ware -expl or -work place -austri a -beat les -ber nar -spi der -disc o -cul t -lim its -shor tly -fin al -nin ja -lu ke -le bron -wal mart -o il -van illa -shi re -ye g -ak y -c s -bl er -collec ted -t g -rol led -speci als -b ff -pier re -sh im -vi er -flash back -restor ation -individu als -pro d -fre aking -tu rer -o a -re fre -mor oc -gre et -re yn -care ful -our ing -u sh -is d -g ill -vie w -thunder storm -b led -pic nic -guar di -pi g -ar k -syl vania -bann ed -u cl -vi jay -ori um -av engers -believ es -eu r -monu ment -concer ned -la bs -ber g -a ap -vi sh -sing les -can cel -z el -ar ab -ru th -too th -ar ta -sh af -chair s -r ack -dise ases -crow d -cl y -fle x -christ ma -artif icial -tom at -fin e -dra ws -advoc ate -fran ce -Ù Ĭ -ðŁĺ ³ -heav y -s our -compre hen -no ble -aa p -hin du -cor al -g ars -ow en -n l -st all -yel low -mar ina -in ver -suppor t -tou gh -promis es -pi e -master piece -sco re -for ce -mor tg -crypto currency -o x -r ors -rock in -pro vin -ho g -no stal -oak land -pat rick -inclu sion -tra ffic -ah med -a ha -lux ury -con secu -de mon -âĸ º -b lowing -st ag -: " -encoura ge -ben e -sku ll -do dge -bu ster -kin son -wit ne -er ror -lo west -fel low -à ° -sh re -bl ur -vir gin -compos er -sli p -mor nings -ga ins -tab le -gra in -ari st -braz ilian -w we -tu es -ribb on -an ag -di st -sac rif -em brace -entreprene ur -af fili -de o -t ali -touri st -fat al -ì Ĭ -autom atic -ðŁĩ µ -we ak -wel fare -confir m -benjam in -fi ghts -alleg ed -me ad -strugg ling -pro secu -che f -à ¨ -propos al -er n -ðŁĺ Ħ -dy k -on gs -hon g -m ack -mel on -on ent -ru sh -d ap -tol er -pro pag -c ze -trans lation -wal let -cott age -sa il -constitu tion -ðŁĴ Ģ -mun ici -fav or -storm hour -i h -ðŁĺ Į -approach ing -pin ned -j ed -niger ian -n ach -sh at -particul arly -mc don -camer as -anni e -admini str -he at -electr ical -char ming -gib son -bouti que -ex posed -ac tor -pil low -beach es -genu ine -margare t -ben nett -lou isi -pos itions -el y -shin y -ten tion -architec t -ren tal -ac qui -goo gle -sub way -mom ent -ðŁļ ¨ -ri m -metho ds -cy cli -nor folk -Ù Ī -over whel -ra pid -we ar -happy birthday -progre ssive -ðŁĴ ¥ -co gn -pap a -f ool -philosoph y -pol ar -jim my -wi g -ðŁĴ ĭ -oper ating -reduc tion -ph i -fla gs -to the -o di -a res -k oo -k ang -ar kansas -ash ton -wimble don -sci fi -attrac tive -mississi ppi -logi sts -ral ph -la bel -gradu ates -ma ha -home town -âľĮ ï¸ı -foun ded -on the -li z -trans l -mini mum -pre sti -ta m -gener ations -re bel -journ alists -par am -mc m -acry lic -death s -tes la -w t -bry ant -jer us -i stanbul -muham mad -ri ley -k ris -work shops -is o -coun ts -stre t -prote cted -trin ity -man ual -r hin -r il -pleas ant -le mon -ner d -har der -dar ren -bur y -ra h -bas is -mi gu -occa sion -li sts -âĿ¤ï¸ıâĿ¤ï¸ı âĿ¤ï¸ı -e b -de cre -hamp ton -ìĿ ´ -tra vis -trans form -puer to -nh l -av oc -tri ps -unexpe cted -ve t -di dyou -bar ber -st ages -m son -re presented -for t -l al -pp le -nic ely -ignor e -qu il -qu inn -h k -carri er -remin ded -am ong -pass enger -el len -gue z -sc ape -mu ral -youn gest -ma sh -d ill -rout ine -stain less -jack son -gand hi -th al -on ers -edit orial -convers ations -sd ale -autom ation -i ke -า ภ-ðŁĩ ª -hau l -la ying -men tions -am en -abor tion -i bi -coun ties -ca therine -man ds -jam e -roll er -au t -n am -o logical -cep tion -ran king -tox ic -sn acks -victor ian -bang kok -psycho logy -re g -ang ela -respon d -sty le -sophi e -dak ota -achiev ed -mar ked -imper ial -in as -glo ves -sli m -confi dent -att acked -gg er -lon ely -valentine sday -re b -craft beer -orig in -zim bab -ce iling -te ens -other wise -w b -f ers -day sof -advis or -y ah -âĻ ª -en der -republic ans -av a -skir t -pi pel -chi e -jan e -ja x -ðŁĺ ĭ -âľ Ĭ -j ays -bre tt -bal o -cru cial -d har -as is -de au -lloy d -chat ting -âĿĦ ï¸ı -rel ay -remark able -n s -we t -bris bane -ðŁĶ ´ -tion ally -f k -la yer -house hold -consecu tive -es is -pend ant -st ir -crit ic -su gar -photo shop -pa res -arti stic -do dgers -c un -cra fted -am end -bo at -âŃIJ ï¸ı -egyp tian -sa w -tra ge -small er -ox y -pa ired -nex t -i res -tac o -o y -u c -st i -a erial -: // -dr o -dot com -gg ins -r pg -ay e -le an -stri ker -lo bby -prote sts -pri ority -congre ss -am ate -inv it -r ington -mom my -th us -allow ing -pione er -enfor cement -g ori -tal k -dra g -du mb -bul let -san ge -er y -tar gets -ðŁĩ ¦ -he ather -consi der -seaf ood -ve st -ris ks -% . -p g -sac red -he ating -kick ed -tto t -. - -chan di -co ven -po ol -pul se -i a -ro ster -shakespe are -es a -car go -pean ut -tro op -ac tion -tab let -home work -cast le -stru ction -mus icians -free zing -bu tt -justin bieber -j j -bah rain -an them -au dit -didyou know -na vig -guid ance -âĸ ¶ -tur f -n un -fic ations -ye men -char ging -x c -bron cos -su bur -p ale -bor ing -among st -for the -em per -om fg -p j -expe cting -ðŁĴ « -st l -ad min -expect ations -sw an -shoo t -oooo o -min ent -ãĢ IJ -wall ace -stan g -satur day -adop ted -dou bles -hom ie -ome z -d han -vent ure -surroun ding -fi le -mob ility -de es -w ski -broo ke -emb ro -re members -kar a -test im -bo tan -m tv -sacrif ice -jerus alem -d l - ´ -proper ly -ili on -as i -leg it -co pe -m cla -recy cling -lar ger -ðŁĴ ĵ -pat ric -gener ous -ja red -p f -mol ly -thom as -ju dges -h b -sor ts -bl vd -o ven -enter ing -plan es -be et -integr ation -boo ked -fre ed -ver n -ash es -to pped -de pot -welcom ed -ren a -m ick -d and -see ks -gam er -ran kings -ren e -mu t -whis ky -fire fighters -gu es -ga ther -tour ney -de men -y ang -new ton -autom otive -back yard -deta iled -mi st -to bac -fi ber -un usual -grat itude -sp are -ne ys -: * -per i -flo ating -fin alist -don ating -dre ss -bro ad -be the -econom ics -tai wan -ed wards -plu g -pra iri -val en -bab a -f ad -an as -har per -dis order -app lied -p att -bi kin -li ver -cu ri -carol ine -ann er -juli an -wal king -mal col -screen shot -co ding -skin care -activi sts -myster ious -ex act -blo cking -mercur y -bat ter -du mp -âľ Į -en se -li sh -ridic ulous -prote sters -ðŁĻ Ī -lu st -swe at -as s -ali ke -co dy -re ments -win ds -as pir -vi enna -pra y -.. .@ -bo i -cand le -assi sts -te e -der son -p ony -f ence -con spir -âĺħ âĺħ -oo th -e pic -ba rely -a unt -b am -diamon ds -end less -scre ens -can cer -gr o -p st -pro spec -mo sque -help ful -ou ri -bro ther -gu jar -cri sti -ine z -to wers -ad dresses -gra y -bur ton -re tweeted -ðŁ¤ Ķ -n ity -du ck -super vis -jo an -kin der -sanc tu -pi ed -âı ° -ł ï¸ı -m ati -reven ge -ce ster -eli fe -desig ners -back ed -bo li -wei ght -cou ch -su res -s its -shri mp -la gos -auth orities -os ity -hol ly -compu ting -fac tors -ab e -pan els -ram ad -sent ence -missi on -hol m -r b -d ads -shang hai -mon ey -she ets -sk ate -thre w -cup cakes -infin ite -l is -practic ing -ess ay -ka i -as ci -mo b -u gh -hol mes -re gg -ik h -mo ck -collec tions -pe p -o va -sal t -nan dez -co y -thre ats -tex ts -cin nam -pregn ancy -pen ding -stam p -flow er -g is -agre ed -pay ne -ro ver -ph ra -sof t -f fin -fa thers -pass engers -aw ays -al a -h es -li van -in s -samu el -ingu i -h of -j j -chen nai -cat al -om ic -he ath -ni ece -pump ed -integr ated -are l -no m -produc tivity -wan ting -vis a -di ana -tw il -it v -cam ps -ro wing -d ley -black and -gu ards -b ells -re verse -vi be -ric ky -mo ss -ny t -âĺ Ģï¸ı -el le -tro y -cu dd -ev an -women s -fo to -mi stakes -wick ed -mi l -c led -me mes -co smo -schol ar -ren o -ðŁĺ Ģ -v ents -# â̦ -terrori sts -ca sey -cardin als -ðŁĺĬ ðŁĺĬ -venezu ela -bol a -liter acy -t w -en o -con tains -au stin -fin anci -ev an -har vard -origin ally -chev ro -her ald -nott ingham -manag ers -âŀ ¡ -accep ting -wal sh -tutor ial -entrepreneur ship -yach t -requi rements -glen n -pe de -unfortun ately -ach ing -dais y -gi an -night mare -âĿ Ĺ -r ina -b art -ema ils -oppo site -who m -sa ke -pu zzle -da shi -par ty -blan ket -bus es -lo re -beau ty -reas on -pun jab -winds or -func tional -exi sting -hel lo -gli mp -con vin -la k -scre aming -rebec ca -bli ss -north west -infin ity -cosme tics -pul ling -coffe e -pl ing -op ho -colom bia -interior design -( + -emo tions -sa c -sun glasses -sav es -d f -six th -al y -ðŁĺ » -de en -dev ast -polit icians -lac rosse -g u -pe i -jav a -comb ine -coal ition -er ts -survi v -ch ad -stri an -n n -de vi -coun c -concer n -contro ller -bre ast -j ury -tu m -introduc es -la di -mobi le -al z -ste ady -nur ses -h acking -on line -oce an -ðŁİ Ħ -a am -ju ven -ic c -louisi ana -ar te -street art -is on -wn s -fr m -p anda -no ir -main tain -del ay -symp toms -thor n -ge ome -ter n -carri ed -p ru -pan or -as sy -per u -clou d -sp ra -pe di -e ste -tag ged -ðŁĺ Ŀ -shado ws -naz i -ا٠Ħ -cor ri -âĻ¥ âĻ¥ -j ad -ðŁĩ « -form al -spo ken -ðŁĮ ŀ -enjo y -lo pez -out look -in ho -w ander -Ù ħ -ma ya -pe e -d ine -ãĢ ij -brief ing -suppor ter -ar ily -ght ers -natur ally -doctor who -j en -v ar -new year -re se -si mm -re x -con sequ -tomat oes -bur st -bra vo -bur gers -cr acking -nor theast -bi om -mush room -mar que -dou ble -ni er -v ag -tw enty -key board -win ni -jama ica -par ish -: - -mental health -ali zing -ren der -wa king -ðŁİ Ĥ -g ly -na than -wa shing -mel issa -jun g -loy al -chil i -song writer -guit arist -bo wie -neighb ors -onym ous -as set -ta i -head quarters -ðŁĮ Ī -i hear -ci gare -sur g -) " -re pl -dar ling -ðŁĻ Ħ -z ak -sa re -ãħ ĭ -mic key -ware house -mass age -ine es -did nt -i w -hur ts -eng aging -mag ic -women in -k itten -mor s -c art -tit ans -colle ague -compe ting -er an -k hal -mar ble -dem and -del ight -et ary -bli zz -lou ise -m ls -fini shes -experim ent -conduc ted -electr onics -itt ers -car ing -wh ats -sym bol -jun g -e cu -pi x -con text -char ger -ðŁĺ ĩ -re ig -fra g -ë ĭ -ch ad -tru e -ker ry -def ending -a int -au ton -check out -bar nes -less ly -d t -m me -clou dy -second ary -are z -_ : -app a -const ant -" ) -ve ts -jo b -i ent -ðŁĺŃðŁĺŃ ðŁĺŃ -m j -fren ch -di ver -davi es -hh hh -e book -๠ī -mar iti -bree ze -susp ended -mat o -vi et -ra hu -se i -bol t -en ary -le is -kar l -fr amed -expla ining -ab c -de aling -nat o -ja ke -exp and -leon ard -establi shed -du b -ar men -el led -voc al -nichol as -ori ent -k yo -illustr ated -ah h -danc ers -milli on -ge ta -po pp -as u -mur dered -gi ble -sto ked -gri ffin -maxi mum -adri an -en counter -ther o -david son -ðŁį » -holi day -ev o -asse ts -car son -memor able -âļ ½ -ob am -represent ative -cb d -tr icks -vo gue -vo ice -mm mm -sebasti an -cli f -ath y -par alle -ðŁ¤ · -pa k -ev acu -e ats -ا Ø -tou ched -organ ised -spir its -can ad -gui ded -frame work -ðŁĮ Ł -pe d -natur al -ag ar -replac ed -anch or -ti t -sha h -organ is -super ior -r n -ch ro -eric a -st ill -cor on -chu ck -loc ks -or gan -ro sen -sc am -ben ed -/ # -ke en -tre vor -vamp ire -sor ted -! ' -af ford -in tro -gr ace -ðŁĺ ľ -sau r -kick starter -influ en -v u -y up -po c -ðŁİ ¥ -a ar -s ang -tre k -et sy -tb h -scre am -chevro let -pix el -shepher d -an or -gabri el -tw ood -sd cc -me ters -develop ers -clo sure -v w -twit ch -ì Ĺ -se oul -pr ice -ho g -n ish -hill ary -scrat ch -in cen -wag on -dis ability -pan ther -ch ats -g d -wit z -sus sex -l ate -den mark -ger ald -cancel led -net te -i x -nav al -bap tist -te t -y ad -ma th -ho y -r andy -po int -intel lec -fru its -w ool -gu in -pr on -the ft -con dem -mar ry -n ola -architec ts -cin cin -roc kets -gentle man -ex plan -t ate -do e -ra ises -wild life -w l -insi der -blan c -w p -for sale -ny c -po well -unbeliev able -pen s -goo dies -mu stang -p ens -st ays -squ ash -xox o -near by -ever ton -co co -le agu -k han -stu d -south west -con struc -s worth -cro atia -le a -su ms -aim s -e an -van ess -iti ous -pa thy -arc ade -b end -sugge sts -sac ram -roy als -ri er -em ir -in cl -an k -clar k -ri ght -vac c -ठ¾ -tan e -li b -u sc -sal es -hu h -s ally -ver a -p ga -gro ws -dru m -tre e -eth ics -sug gest -is ab -se aled -pre viously -anim ated -ab du -ri ses -glo b -pre dat -scar f -del ic -om ar -ll i -sx sw -py thon -ne bra -fun k -reflec t -pav ilion -tic ally -ch asing -bak ery -inva sion -ko h -believ ed -co hen -con qu -cra fts -nat i -cle ver -govern ance -sam ples -fa ils -â Ķ -ti mo -r itu -stri king -inclu sive -sho cking -can t -requi res -dra wings -à¸ Ń -purch ased -du m -z ach -war ner -con sole -man sion -foun tain -circu m -e sh -is land -mil k -pro fits -hali fax -ri val -âľĪ ï¸ı -jen ny -sand ra -ny e -k elly -y al -qu ad -no s -inste in -fin alists -mid fielder -cu e -excep tional -a an -sa pp -gett in -sa a -f ati -sl ice -vol k -s wal -la sting -sum mary -it as -sm o -s z -âĺ Ĩ -ip l -fl ames -ene ws -ha v -hoo die -pitch er -win dy -re vol -centr al -ton ite -ðŁİī ðŁİī -sol ved -mil wau -organiz ations -wee ts -re fin -s th -ãĥ ¼ -el in -ton a -cinnam on -ðŁİ ¨ -ðŁİ ģ -ron aldo -pen insu -ome ga -el ds -desig ning -e igh -blu et -ben z -nu g -ash a -robo ts -su dan -choo sing -en do -ser ge -clo sely -hand y -fing er -be ing -ar te -survi ved -fl ame -mile stone -gu t -d war -fu tures -é e -el o -fri dge -eli c -ou ch -u b -p v -tit an -col lar -st ation -nev ada -aur ora -r d -dun can -âģ ł -bri en -mar sh -Ð ¾ -to tal -ch ry -s ers -su ffe -ra chel -colle ge -to days -cour ts -ch it -re united -gym na -gen esis -be side -re presentation -ch ant -collec tor -ra k -ath ens -ni gh -mun ich -langu ages -fl u -particip ation -__ _ -c v -spec trum -so da -co ver -refe ren -ab bo -ap a -public ation -ed m -mon ica -ar my -ðŁļ Ģ -div or -dr y -stre ams -robo tics -ci der -bull ying -appro val -sto ke -plat forms -sier ra -ex tin -i b -ha yes -succe ed -suff er -at ically -da i -lyn ch -h ound -del ines -ack now -d ated -exclu sively -he res -fac ilit -dam aged -char ter -la kers -fal con -unve iled -wel ove -e ase -pati ence -l one -gent le -gene tic -produc ing -g our -shann on -bil ities -zimbab we -p int -dau ghters -liter ary -bel le -cl am -surroun ded -k any -ne il -pir ate -rang er -hb d -nat alie -bel ong -olym pi -emb assy -sc ol -en er -ak in -lo ren -b h -: / -di va -den im -hi pp -ðŁĩµ ðŁĩ -arn old -? ' -we ren -em power -dis abled -man or -rasp berry -b af -aw ful -dru mmer -kar dashi -n ash -machine learning -ch u -rebel s -tim ing -mon roe -ton gue -ran ge -pup ils -re ss -amaz on -b z -har ley -pal mer -ballo on -s ings -ic ec -j b -c ers -g ps -whi st -ri se -l t -oo oo -c attle -shoo ter -vod ka -uc l -mt g -le sli -jon as -di spo -at ric -ste in -vintag e -fir ms -flo yd -cow boy -soo oo -is aac -war craft -disney land -beauti ful -be am -franch ise -bu n -k ag -an on -tur bo -swee p -made in -kar achi -dete ctive -penn sylvania -contro versi -vitam in -a side -chron ic -descri bes -remo val -ha h -ap er -ten ed -u to -bad ly -mir ac -f ry -ye a -in jec -ther mal -comp act -th or -te ed -ur gent -l ite -g illi -sop hom -ic o -che m -p m -for k -fre ak -ch ak -recipi ent -i y -ni k -model ing -c ans -ðŁı Ģ -del ux -se am -surviv ors -rad ical -investig ating -reli able -f m -tur t -ligh thouse -to ol -go wn -) ) -bo ts -auto graph -a id -bu ffe -h mm -horri ble -ssi onal -ann i -à¹ Ģ -k its -sch i -eter nal -hu ss -sens itive -r u -tast es -chec ks -im o -por tion -sk ate -e den -half time -fri ed -ri hanna -ti se -fl ick -ca in -s gt -âľ Ķ -sh au -sta ined -ra ffle -dro ve -sal man -princi ples -sh o -ar u -je ss -gu ine -gar bage -my an -jel ly -dis ru -z ia -q ld -ent ries -la v -fle w -ad mit -objec ts -comp are -ny times -cann es -p n -suff ol -ro c -d ana -e gg -hi st -coun sel -' ! -phy si -imag ination -ad just -explo sion -plym outh -hor ror -elli ott -bour ne -de x -bre ed -au dio -lob ster -disappo inted -nation wide -( ( -incre ases -austr ali -ce dar -star ing -rac ial -e is -g mt -visi ons -stay ed -discu ssions -de an -cur tis -mai den -stel lar -happ iest -h wy -pre season -car av -mon days -hospit als -glimp se -schol ars -ja i -ter race -ann a -goo se -gra ded -lot us -hun g -grocer y -stam ps -emper or -sc oop -in ser -c as -exist ence -he al -fal cons -mar vel -reduc ing -terri fic -magne tic -perfor ms -bar re -p us -tre ating -ic on -w h -decla red -tra uma -do d -come dian -nik on -bu gs -as m -mont gom -ibi za -comprehen sive -ha s -san ti -fellow ship -da sh -p sal -louis ville -sp y -fau lt -d the -fi led -vi sta -de sc -fe ars -you tu -sp s -es p -ri g -cri me -ber ger -wonder land -k ent -in formed -stev ens -my th -ast on -ir i -visit or -at ri -produc ers -al la -person ally -separ ate -agen cies -af ri -il an -spo ke -n ina -squ ad -di ves -de pend -li v -fier ce -enter taining -cha in -sc at -bor ders -pal ette -sp ro -os is -der by -tobac co -zi o -willi e -ju vent -zoo m -hol y -enti rely -af e -mart inez -be ds -pe a -bull dogs -ðŁĩª ðŁĩ -ib m -ne on -ethiop ia -team mates -plan ting -tw er -any time -for bes -ó n -run way -ner vous -ro ger -p ile -ch anc -apo caly -u w -o i -dr ought -territ ory -br ick -cre atures -go in -w aff -gre n -sou theast -je an -am bul -ed ited -stra p -c v -aar on -ãĥ» ãĥ» -t su -descri ption -kin dly -clu tch -im mer -en or -women sday -or ange -ra g -ob vious -hy der -chann els -man go -me yer -ra ining -ge tty -pil gri -coordin ator -up load -ninten do -don uts -san chez -app arel -j r -zz i -, @ -jeff erson -accessi ble -great ly -e id -initi al -budd ha -par is -ma scot -â¬ĩ ï¸ı -sch war -si ri -sp inning -mortg age -e cho -end ange -ge dly -chlo e -enh ance -kar nat -k ry -explo res -ðŁĴ ģ -af fair -ic als -all a -dar t -dolph ins -diffe rences -squir rel -au gh -dr ones -ell en -re store -pa w -un for -pi ke -hil ton -colla b -consu mers -co inci -out comes -pp p -a q -coup on -li est -si ms -k ho -av es -spo on -pu dding -cor byn -hat ers -ex ams -sla ve -. ! -p sa -app les -tam il -se d -co ke -zz o -lo sange -car bon -cla ir -... ) -k hu -cra ig -explor ation -sanctu ary -su e -al way -demen tia -won ders -super hero -pakistan i -brown s -bluet ooth -lo cker -mar c -ev entu -delux e -rodri guez -âĿ¤ âĿ¤ -ro bb -ðŁĴ ¦ -lin ux -ten s -intellig ent -se ed -vo ter -s ler -pe aks -inter n -teen age -peninsu la -hand ling -ti e -cou sins -wen dy -me e -à¹Ģ ภ-din o -ðŁĴ ° -ðŁĺ ĥ -ze e -s bury -trage dy -b k -bo re -z in -war ns -idi ot -tou ching -contin ental -tac os -saf ari -wa shed -po dium -morri son -fore sts -c bc -al on -partic ular -be ads -inv ented -lo ch -li ghter -where ver -i de -docu ments -a we -k r -no where -min er -st it -ro x -contribu te -har dy -cl an -ob ject -ca it -ðŁĴķ ðŁĴķ -happ ier -vege tables -t art -g ag -nom inee -heav ily -pan ic -j d -there sa -at m -u ph -s fc -su ri -drin k -n al -re vel -k l -avoc ado -nom ination -ma donna -shar on -malcol m -control led -sh ers -revi val -legis lation -shoo ts -n in -comm entary -pro s -human rights -str anger -mit ch -pipel ine -leg ally -th u -gil bert -tol l -gran ted -gh s -ir anian -refre shing -du k -ab i -pri me -jose ph -mo sa -stati stics -produc tions -mer ry -pat el -sa x -human itarian -struc tures -e missions -town s -fre el -ster ing -rat ings -alle gedly -cab in -st l -w ade -fl yers -tri m -promis ing -z u -bal lot -compar ison -free ze -ou ter -great ness -as sign -snow y -r ale -tor ies -med iter -kno ck -consult ant -cincin nati -analy st -sc oo -je ws -appro xim -pu re -portra its -cy rus -ation al -lo ans -acqu is -el u -accep table -uni on -water color -ru st -batt les -per fu -seas onal -ser ial -mind set -ri ot -fel d -enni al -clo set -pri est -tan ks -int l -scre w -bu m -ab dul -ou x -expla ined -ric a -imag ing -law yers -bu ried -ãĥ»ãĥ» ãĥ» -ear l -âĢ ķ -l ton -resto red -stri pes -fo ss -de mands -ste aling -alex is -mun d -ak er -ur us -war dro -hu gs -gen re -e go -Ù Ħ -particip ated -bab es -ban quet -ti ous -he mi -ds b -lo st -milwau kee -jen ner -ge m -ou tra -lo ses -id i -re ps -ðŁİ § -regu lation -fla w -f ang -vibr ant -ram p -ra ins -well being -so viet -vie wers -de po -libr aries -bi go -ser y -g ill -de struction -co z -c x -bri dal -al ds -plan ted -amate ur -lu d -che ering -show cas -pro file -i u -ver tical -pack ers -wiz ard -ski p -s light -be au -air ways -mu ch -re ra -ðŁĮ Ĭ -ab sor -pati o -pack ages -s ells -ment ally -ðŁĺ ¢ -reyn olds -k are -tri bun -wal t -kn it -ta ste -sur rey -boun ce -cre ature -b are -bet ting -su re -mi ley -laugh s -al ore -cy n -t l -arti st -ann ah -war mer -dynam ics -lunch time -mariti me -vulner able -ðŁĴ ĥ -wol ver -dur ham -const antly -am in -si bl -: @ -bul let -k ach -angel o -wil der -doo m -desk top -law suit -k ca -hen derson -inv iting -bet ty -ta wards -ra fa -le aked -and i -ge ms -af l -vel o -mediter ran -pro be -to tten -steph anie -sn ation -com be -q s -over come -assas sin -ra v -fil ip -winni peg -sh il -determin ed -k as -ou tre -regre t -gui des -aa a -ðŁĺ Ī -wi ves -mani fe -er ly -sm y -sh ima -x ing -pix el -jac ob -ac commod -to y -on o -po o -ti er -an swe -ðŁĴ ģ -ro sa -le ase -bel ongs -th ar -eventu ally -nei ther -go a -ski ing -at ra -ag h -broad casting -f ury -py ram -d ice -volk swag -wom ens -provi der -bom bs -miss ile -whi p -d ick -nor we -back up -el der -mat ure -concer ts -gi ous -sque e -good morning -bra ves -^ _ -au ssie -lun a -mal es -he ck -for tn -rome o -steel ers -p n -pe er -re presents - « -kat y -migu el -requ ire -cha ins -l ur -immedi ate -ti mber -âĸ¶ ï¸ı -advoc acy -ex port -an z -tiff any -auth or -ðŁİ Ī -du des -chil ly -hi d -har m -bu g -mon ster -terri er -tu c -story telling -ta k -in ti -immigr ants -b is -reach es -com passion -john ny -contribu tions -ðŁIJ ¶ -mechan ical -impre ssion -ran ks -ko be -men ting -bloss om -pab lo -buil der -bom bing -tw el -sul livan -om o -pe te -de mi -ku dos -w bb -t gif -mass ach -neighb or -che fs -eng ines -pun e -ga ined -phan tom -s days -ext end -gr an -cent ers -jac qu -dat asci -sleep y -el vis -answe red -s lot -con y -flexi ble -ti ally -le tics -% , -andre ws -si ble -mom ma -vin o -do x -invit ational -twil ight -j ade -ill ery -joh ns -f ou -p v --- -> -break down -billi on -prin ter -mon d -c bc -mag gie -legi on -du b -kur t -po or -paren ting -regi ons -bikin i -be ware -si onal -au burn -kid ding -amp les -sp an -con tempor -c ic -ha bits -ak o -pre fe -bud dies -it z -em ily -person nel -moun tain -ver sus -ðŁĺ ¬ -ear ning -s ink -dar i -u u -s win -i ster -bru tal -n ac -kat a -clo th -am and -ðŁĶ Ĺ -ne o -alu min -week ends -nebra ska -co des -delay ed -brun o -pro ven -in c -i ght -fl an -or o -lam bert -regu lat -w f -massach use -kardashi an -bern ard -fi esta -volcan o -grand pa -anc a -d re -st itu -mean ing -fo am -au ck -at ed -r l -hot el -pers ons -dy nasty -ell or -ma i -am ne -sty ling -avi er -e g -vege tarian -, â̦ -foun ders -sta in -g d -cy cles -sky line -trac tor -exi sts -tra l -kid ney -mar il -inst ag -se tte -addic t -tri angle -flash back -controversi al -z on -p ins -i as -tr ay -town ship -deleg ates -sp am -h ms -cr ane -peop les -o lo -fac tion -but es -on ica -deleg ation -new profile -eli er -mc a -w and -g ely -losange les -ber ke -ti ve -dis rup -zz a -cas a -jor dan -ford shire -ga thered -ic hi -atten dees -à¸Ń ภ-pe ppers -co in -bour bon -ern ity -ro tary -behavi our -jere my -team work -compli ance -tre mend -ðŁĩ § -bu hari -cam bo -bu yers -ha gen -bu ds -bay ern -mon te -sm ells -an za -ath lon -descri bed -work force -gi ving -ap i -invest ments -da il -sel ena -datab ase -th um -mor tal -stu dent -bu yer -do ver -gar ten -att le -loy alty -gen oci -holo cau -theat ers -ru ling -ven us -pat ent -ch un -ab by -awa ke -mass acre -bang alore -break ing -simm ons -ju sti -hal e -ed chat -gg les -haw k -mar king -head lines -stro m -co ve -breath taking -med als -hair cut -christ ine -tele graph -gujar at -ju ra -can e -sho re -propag anda -mu eller -.... .... -sa vi -stom ach -thro ws -ta b -war m -j ong -reno wned -hi r -ra is -mush rooms -guaran teed -bo a -m j -revolu tionary -certi fication -bru ins -jo in -w es -pas sport -c g -sex u -cap able -w v -ton es -jac kets -ac compan -spin ach -fore ver -bla ir -wat ts -g l -cou ples -prairi e -newprofile pic -logi stics -massachuse tts -jagu ar -o id -we al -under water -mo z -y i -ma ths -myan mar -pre ps -suffe red -tr ace -wal i -ah hh -bor g -st itch -cu lin -real ise -infe ction -discrimin ation -sh ame -an kle -hu mid -y t -brac ket -tru ck -tri u -ea ster -commun ity -post card -invol ving -ty ler -car amel -over view -ex amples -integr ity -base ment -instru ments -ani um -at us -gh er -laun dry -achi eve -gen eva -pr icing -hyder abad -beli ef -me ta -j aw -accoun ting -lead er -cristi ano -cou ture -cy p -vis ed -, ,, -k nu -h ick -break er -br am -ra b -mo or -ham as -gradu ating -pupp ies -ak h -ta h -ach es -ri e -op ini -g ta -re ign -tra gic -re ver -p ill -pine apple -tou ches -da re -le ys -il o -inter iors -sc outs -bar t -en zie -don o -bro ck -christi ans -ense mble - · -cine mas -new port -air line -win ston -le igh -cont ents -pre scri -ur ge -tr out -fic ally -il ia -sub si -are r -âļ¾ ï¸ı -w ounded -ðŁĻ Ĥ -pe pper -ðŁĴ ŀ -fit ted -af f -re sur -thursday thoughts -z ero -archae ology -di v -je e -i on -awa iting -co zy -beauti es -bal d -dat a -gri zz -stal k -kin ds -cle ared -jess ic -regu lar -ali ens -plac e -bo s -bi zar -thisi s -ðŁĴ Ģ -totten ham -ma fia -s lam -ari ana -car roll -back pack -care y -uni v -r g -pe p -dig it -tatt oos -ag on -volunte ering -diffe ren -consu mption -ka thr -head phones -t shirt -o b -ele ment -re tail -sh ru -al gori -contain er -consci ous -fi l -com ing -ra sh -u rope -def ine -gi or -femini st -flow ing -rout es -gl aci -fer t -somer set -ant es -twee ps -$ $ -h our -endange red -year sof -ro h -po pped -bac king -ba sil -bra ke -mon aco -lgbt q -pra gue -ut ility -cas si -gate way -haun ted -sch ul -ðŁİ µ -shou ld -walking dead -comple ting -dann y -montgom ery -pengu in -ss i -mer chandi -ðŁij ij -chur ch -h ates -cap tain -brea thing -ce t -fair ly -approach es -compan ion -surpri sing -kany e -pe y -hin di -targe ted -lor ds -de ut -di gging -ger man -ru t -ener gy -close st -y un -apo logi -ภ± -s ack -ru p -dd y -port al -d ough -b ats -ðŁĵ ° -at ur -graph er -pi res -mo tors -ðŁĮ ¹ -j c -dan g -tu k -clu e -us c -pag e -d less -bro ws -ju s -ad ing -re marks -oo m -car dio -ste fan -arm strong -âĢ¢ âĢ¢ -ni est -belgi an -bi op -so y -lo f -í ĥ -q t -flashback friday -ce e -ģ ภ-wre ck -mar ines -amend ment -wardro be -vo y -bur ned -guit ars -ra inf -li fel -ssi l -oun ce -exter nal -c key -me sh -she ikh -inv itation -sugge sti -pop corn -phenomen al -an onymous -tun a -chic ago -o val -del y -loc als -( & -pro f -no vel -fin der -spar ks -la ven -in fu -nic ks -qu ant -ra e -exe c -dist ingui -st ances -mu tual -sh al -unve ils -edmon ton -zan ia -a dio -vie wer -brad ford -audit orium -qu is -re act -htt p -l ero -chee ky -impac ts -ta k -ed t -desper ate -t ay -ì Ħ -sett le -bar gain -resu me -un ite -thro wn -ke st -se ys -mar ching -am it -decl ine -sch ar -me tr -stan ford -lin ke -ber ra -dol ls -rug by -jam i -b or -road trip -dino saur -mi k -sun der -re m -b k -over seas -nau ghty -imple mentation -iam srk -lun cheon -fir ing -mi ami -pere z -the e -z on -gi fted -con version -ceram ic -¡ ï¸ı -pe dro -ì Ĩ -v ick -! @ -he ed -si d -b w -docu ment -pl un -gr ants -fant asy -predic tions -vali d -car ved -gradu ated -ðŁijį ðŁı» -nation ally -ch y -af l -re sso -blan k -ri vals -j ig -e ties -om ics -une mp -b ound -sk o -inspec tion -par al -high s -cri sp -b ans -ob a -[ @ -co spla -costu mes -rec all -mou th -ni gel -b ts -ter a -ko v -do cs -west minster -dic t -gra vity -kar i -ro gue -t ted -war k -ida ho -w end -aw i -queen sland -proce sses -cli ffe -m ick -com pens -op ol -the y -cl ari -wiki pedia -salman khan -haz ard -pre ston -swee test -pd f -che es -tr ilo -south africa -bur nt -( $ -con tain -t p -sub mitted -sound cloud -at u -re z -word press -corru pt -n f -ma ker -í ķ -par as -adv ent -ri al -ca fe -fo ssil -!!!! !!! -co ws -c j -sp ur -institu tions -land mark -ent it -re ut -h is -alz heim -we mb -regg ae -mo squ -st at -identi fied -deal er -re am -re land -ten sion -ðŁĩ © -wra pping -deep er -fr at -red dit -ar is -moroc co -.. " -b low -ma pping -pri orities -ing a -swa p -re wards -conspir acy -creati ve -c j -congre ssional -vau lt -ple x -sophom ore -shad ow -ele ss -ðŁĺ ħ -dar ts -aldu b -anno ying -pro ps -n as -alumin um -h bo -offen se -j ill -oni ons -la ur -ta e -har dest -sh ro -ga ining -meas ure -ed tech -cyp rus -tar a -ang eli -car lo -go on -all i -im plic -ju pit -resil ience -ha il -bal anced -) ... -joy ce -gr a -th eli -defin ed -shi pped -main ly -min a -l m -sac ri -o ber -p im -claim ing -ent ers -co rey -bo k -cri ed -cool ing -dani elle -pharmac y -thor ough -ca ke -k lo -outre ach -z ens -digital marketing -val ent -sn p -her b -mr w -caf é -cap tures -no tre -triu mph -pan cakes -cu mber -spi ke -d ation -bi gg -sp er -crit ical -am al -too th -foun ding -a stro -' # -quan tum -th ames -un c -pri de -air bus -kno cked -un defeated -mediterran ean -cal cu -clo wn -sens or -ham mer -for give -cu shi -ber ry -maje stic -elec t -polit an -g ta -k ari -bur ke -sea hawks -volkswag en -re i -landsc apes -cas u -grand father -list ened -/ / -star trek -rainf all -fur ry -vi er -star k -rif le -ff a -leg es -hillary clinton -min us -correc tly -architec tural -pre ce -up side -box er -ðŁĻĮ ðŁı¼ -is ai -de t -pro vo -tis sue -spoo ky -ve led -re con -prospec ts -que bec -âļ « -ig no -anat omy -shap es -w p -p interest -hor e -an es -pick up -ti p -pra desh -hu gh -co e -po k -gram my -well ington -sti gate -ri gh -lea p -king ston -scen ic -go sh -v ani -au g -s ary -zi er -bure au -lin son -con te -fra gr -all an -g aw -lan a -colli sion -surve ill -ren ais -ar range -s ali -do in -br ance -bren dan -our se -in coming -suspen sion -à ´ -l la -educ ators -in tri -da e -bio graphy -bul gar -villa in -go thic -rw anda -e w -may or -meet up -democr at -mor gan -su dden -te sco -car rot -bom ber -mck in -re ne -fun day -agricul tural -haha h -show time -form ing -col a -scor pi -quo te -po ppy -s life -d az -tu b -ne n -mo t -ðŁĺ » -s ore -elder ly -o ve -skin ny -um i -anc o -man ship -we re -g v -k ah -fol ding -ne at -samanth a -dan ish -uk rain -humid ity -nu tri -jak arta -cand les -oooo oooo -at ile -streng th -i bra -bap ti -charle ston -fr ames -girl s -clear ing -glu ten -# # -super natural -ju bi -ph one -he in -dr un -le ak -invest or -y er -dom ain -ball room -mi sh -app li -off shore -bla ze -dor o -âĺķ ï¸ı -win ery -shar if -ad ore -n ir -saf er -si gh -as cri -strong ly -trac y -ck er -ol l -faith ful -ey ed -deli ghtful -vis m -karnat aka -tit an -wh ar -jer seys -re fur -heav en -gri p -pan ama -pre li -glu ten -o dd -cont ent -pon ti -tion ing -e commerce -feder ation -flaw less -ge ar -ti res -by r -pol ice -cu ban -tri butes -tic ul -chur ches -nur sery -di aries -muse ums -snapp ed -i van -wi ght -touri sts -ramad an -t rent -prophe t -won dered -focu sing -hi d -ic ons -i q -ambul ance -pi st -fun niest -time less -sr ilan -bu ys -ki ds -colour ful -a shi -ch ir -mu m -ðŁĵ ļ -let ter -x en -reut ers -pre serve -in ting -ste p -fu ji -uni ver -i u -show down -po ems -surveill ance -suspec ted -ta e -sol ving -tom b -mother sday -car pen -recru it -pil ots -bro c -mix ing -fri days -ty r -represent atives -tra pped -abdu l -free style -clu ster -âļ łï¸ı -k d -sk ill -pit t -ex o -commer ci -muse um -loc ally -g ina -no bel -immun e -fr ac -cap su -main ed -attemp ts -bull dog -be spoke -sing ers -sp elling -seg ment -nat ures -tic k -lip stick -clean er -gett able -preci sion -â̼ ï¸ı -th ood -re ef -no pe -bill y -di gi -mu si -ri val -figu red -tal ity -sun ny -ber k -aw ww -awa its -un real -co pen -asy lum -ex otic -bu en -mo ck -en able -arch y -fr a -pla stic -al mond -amp li -displa ys -abbo tt -s me -x p -ðŁĻ ĥ -graph ic -i ved -mar a -cau tion -lea ks -en berg -ul u -unic orn -cann on -appren tic -ðŁĺĺ ðŁĺĺ -b ball -wil low -at ics -am as -manufac turer -campaig ns -port ers -flo ors -l su -ty pe -ke j -honor ary -it im -to le -min ecraft -d x -ma sh -ri o -consequ ences -ron ald -go ssi -suffol k -mu se -r bi -live music -i van -ðŁİ ¤ -le u -patri ot -man it -lan ca -home decor -de ar -sig ma -ti de -str ings -v ita -sequ el -try na -inve stigate -bor is -ve gan -barri er -mind fulness -web b -hu stle -in da -tan zania -str ay -tex as -c ag -diagno sis -wom an -g w -ob session -l ative -nu fc -fl ynn -moment um -sof a -wal d -vege table -tu cker -supp er -se ab -ar ro -se ag -ven ting -counc ill -sp lat -cal cul -.. # -com fy -odi sha -sto pp -war fare -ca es -à ¨ -co y -price less -in sec -ðŁĺ Ľ -contro ls -empower ment -datasci ence -per pe -gen ic -e res -tru deau -man o -sla very -expand ing -ma he -fa iling -s aga -photograph s -cre st -re on -surf ing -hi e -ðŁį Ģ -ja e -fel lows -south ampton -sol om -ce ster -tab ility -hor n -se ct -he e -cole man -at las -explo rer -consul tation -copy right -organi zing -den ied -mon keys -noo dles -br is -fl or -dou gh -bon ds -sho cked -eco system -care fully -w m -apart ments -cur ve -san diego -must ard -comm en -cere mon -e ch -ru th -ðŁĻĮ ðŁı» -hawa i -fil med -te ar -as ingly -ca ir -wat t -instru ment -ou tta -ye ol -river side -ë ° -. : -nor wich -alo g -migr ants -new man -ri de -spr ink -targe ting -beli eve -tor ch -reflec ts -per mission -ff man -ene mies -bas ics -se ized -sun days -le i -hass an -en do -h c -st ad -le ments -kk kk -nan o -shar k -man a -on ic -treat ments -ear ly -collabor ative -shu ttle -bran ches -mis ses -mained cm -ap ers -ky le -carri e -leis ure -sh et -bir ding -adv ances -ðŁĵ Ŀ -popu lar -di ane -a be -re war -neigh bour -k pop -remem brance -play ground -ru b -krish na -e bola -inqu iry -ep a -lu min -organ isation -abra ham -norm ally -pre ten -jan et -w t -ðŁĴ İ -encoura ging -a stic -bu mp -syd ney -s z -ss ss -gar rett -ðŁĵ » -consul ting -roman ia -spo tting -chanc ellor -ar ma -presti gious -ðĿ IJ -t ad -cry st -compe tit -rati o -cat aly -bro w -j ur -vi king -commu te -y day -la yers -du mb -esc al -genoci de -f ill -gu pta -ste pping -se i -fo to -wild cats -col i -projec t -ear nings -st r -ge ons -comple tion -b m -decor ated -craw ford -af ghan -sc are -visi bility -hi b -direc tion -stro ll -christ ina -alter nate -cl are -sty list -be hold -s ance -leop ard -acqui red -narr ative -ash i -the a -?? ?? -pe as -at ch -sli des -le en -renew able -eng lish -qu ir -co aster -r x -fo ols -match day -mis m -amaz ing -z ig -ke ting -won t -to wel -di ab -sta ke -n m -mel t -e than -gra pe -polit ician -sm en -í ĺ -re o -wedd ings -cat cher -or acle -me mo -ðŁĮ ´ -ec k -rob bie -norwe gian -oper ator -am or -se wing -ju l -x ie -u v -fif ty -me ga -tatt oo -liber als -u pri -traffic king -richard son -su v -ki p -mess y -tremend ous -gl ou -cour tney -la d -stere o -my ers -i dio -^_ ^ -man ning -dy e -w d -thr one -jun k -as u -provin cial -k ook -wr c -fine art -hamp shire -renais sance -b red -fall out -s j -sn l -al am -tor ture -fy i -sh ines -pa w -ch ar -hen ry -c row -aci ous -di an -pa ige -ba re -stock holm -scen ery -ðŁĩ · -jef frey -pu sh -decor ation -ne d -cu te -brig ade -laven der -inv ites -e sports -vo ir -dri ed -tran spl -sur geon -no vels -pul ls -son y -lun ar -man e -i vy -fru str -dor set -sa i -tor res -ssi on -shut down -suggesti ons -writ ing -e o -battle field -u ga -ðŁIJ ¾ -vac u -spl ac -g it -u g -high land -% ) -mer maid -sacram ento -ta ils -p w -ka h -t ell -enh anced -ì ķ -auck land -cru el -ðŁ¤ © -au dre -sail or -gram mar -g love -de on -infl am -fresh ly -k ell -zi p -christi e -mil d -di xon -instru ctor -g ence -ãħ ł -sub jec -constitu tional -crow ds -in visible -ru ins -da k -si p -pla que -p ouring -comple x -z ine -ste ad -f let -trans mission -lo way -ar un -incre asingly -au d -transp aren -cro wned -sc oun -blizz ard -lux u -fi ers -achieve ments -hun ters -rock ed -bas in -vio let -pro ves -achiev ing -pro sper -se ga -flo at -vi an -xi v -pol ic -tur a -approxim ately -wander lust -keep ers -geta way -co d -pol is -br yan -col ts -tal ents -yo gur -gluten free -wri st -gr y -cze ch -ðŁİ Ī -ev ille -ðŁı Ī -to x -dani els -am er -bi ds -weare one -me tab -g t -boy z -pd x -pos session -pu shed -shr ine -reali stic -tri gger -na vi -ru mors -n af -jen kins -tr un -comm uni -Ã Ĺ -gam ers -arm or -moham med -bal cony -y ah -stron gest -rhy thm -unfor gettable -k p -ho bb -custo dy -greg or -r ita -aes thetic -il ation -sponsor ing -n ay -kid napp -sh s -ra jas -me g -signific antly -butt ons -la c -ver sions -essenti als -opini ons -k ro -d printing -wi dely -d k -ur an -y al -reque sted -c n -cur ric -plu m -gr un -v m -dev on -m yo -rel ation -juvent us -rou ge -min ority -min es -jupit er -n ine -oxy gen -fran kie -une sco -fab ric -disgu sting -sal man -dete ction -lan ka -d ac -ðŁĩ« ðŁĩ· -argu ment -shel ves -cel tics -rober to -pi gs -he dge -fau l -pow ering -butter flies -fi r -re make -att i -com o -emp ha -kend all -poke mon -se ating -d ans -bald win -ðŁij » -lesli e -one direction -ti mber -im an -fon t -e der -di on -ste ph -for mat -gre gory -pro p -he x -ru in -sor y -inf er -n aw -bar ak -sd gs -kar ao -lu sh -v ander -end ent -g is -a fro -soc cer -ay an -t uni -lun g -da yof -alex a -mar ath -addic ted -ag ile -hy gi -light weight -ì § -mand ela -jo ey -anc y -hu m -bi r -memor ial -jim in -ging er -v ak -jav ascri -cro ps -orig ins -d ari -pi per -im port -aggre ssive -predic tion -re pairs -cr acker -voy age -ni ke -mu mmy -linke din -country side -bor der -gla ss -per t -s als -sho e -autograph ed -wal nut -colle gi -sal ary -pa iring -ðŁĮ ¸ -cath ol -swee the -defe ats -streng then -roof top -impro vements -barri ers -ur u -t ally -ru led -ðŁĨ ļ -nai ja -emo ji -per cent -gi o -pro bs -on ce -adm its -pa ths -li ar -day tona -pe ters -cal i -cal li -mu g -o sa -ap h -ab y -hy de -eth nic -pla ins -ol f -haha hahaha -holi c -?! ?! -su bli -bl acks -mo t -gh ton -lo vin -b rent -bar u -l ati -de w -ate au -q a -pain ful -bu sters -st atic -ðŁĩ¨ðŁĩ ¦ -note book -out fits -si es -r f -floo ds -Ñ Ģ -thro at -su ici -ro vers -beng al -pre pares -blo g -mini ature -Ø ¨ -am phi -com b -r sp -in timate -green e -Ì ĩ -al tar -surg ical -ves sel -... ? -gav in -g ator -threat ened -z ar -rob bery -di er -promo ted -y g -x s -su bs -inter viewing -threat ening -do zen -me ado -water fall -nintendo switch -cal um -mini sters -dro p -univers ities -war ned -tac tics -ðŁĩ ² -refu se -ad ju -v ast -ðŁĺ ´ -mc fc -lib ya -no filter -distribu ted -re ser -ron nie -de co -javascri pt -mon k -intere sts -fle x -mar tha -sti es -oo d -ðŁ¤£ ðŁ¤£ -e un -b ali -g omez -sti mul -moder ate -d ity -ir is -stra w -consist ent -direc tions -adop t -sal sa -cro o -reco vered -black friday -lan caster -accep t -weareone exo -buil ds -free man -air plane -diti on -bel ong -jam ie -pit ching -li f -om in -cri spy -pre pping -ve g -chan g -accompli shed -graci as -dolph in -elec tor -culin ary -super bowl -wal a -pur suit -black berry -be an -cardin al -pro ved -immigr ant -stric tly -holocau st -pass age -ha us -cou p -pur se -har ass -< < -le ed -ado be -st ad -legis lat -par ked -pri yan -sil va -kri st -s the -fun ky -ig a -sett lement -ph s -t mrw -stre ssed -hun t -ho ckey -treas ures -cham bers -ol u -hu t -mar ley -tex ture -wilder ness -mm ing -poten tially -om aha -ju dy -to es -spo iler -distingui shed -feli x -ah u -recommend ations -zom bies -hit ler -tri ple -colla pse -motiv ated -ulti mat -gg ling -so y -ci gar -fo ren -vine yard -gl itter -fin dings -colon ial -hun ter -eri k -den s -beet le -lot te -sub tle -s matter -tru sted -experim ental -nam ents -ðŁĺ Ĩ -regi on -acquis ition -bre eding -quarter back -am reading -oo td -ru de -initi atives -st out -hy ung -out come -al fred -mic s -exper tise -bacter ia -pengu ins -jump er -valen cia -bar k -ing day -sell ers -contrac ts -hou ston -commissi oned -adap tation -swan sea -santi ago -common wealth -ju dging -sub mission -sco rer -tom my -ñ o -ex quis -fil ing -explan ation -alli son -wemb ley -ri dge -chev y -san tos -own ership -cogn itive -favour ites -sh ed -phil anthro -dele ted -go dd -s nor -gui delines -ff ing -je ep -cli ps -sw amp -an or -guil d -bol ton -spring field -munici pal -goal keeper -ye on -ðŁĺįðŁĺį ðŁĺįðŁĺį -ãħĭ ãħĭ -water front -gra ve -contempor ary -ar ity -ÃŃ a -sle eps -sy rup -al am -pi re -co yo -moto gp -ty son -kej ri -cir cul -sing ly -cr unch -complic ated -nostal gia -k op -mo ve -k ale -mac ro -mid west -h ans -tri bal -nu de -௠į -bey once -congratul ate -cat er -leagu e -ðŁĻ Ĭ -la dder -cra shed -tech nic -karao ke -harass ment -ro ts -experi encing -kri sten -ðŁĩ ³ -ðŁ¤ Ĺ -reflec tions -guin ness -illustr ator -ðŁĻı ðŁı» -cen ter -nar row -comm ons -regul ations -Ù Ĩ -har m -cro ft -cu ssion -hong kong -st ical -intern ship -zo e -cho p -hoo ds -estim ated -batter ies -berke ley -smooth ie -shau n -cro s -~ ~ -cam pe -hu mp -b g -proto type -cl ick -shaw n -re viewed -tem pl -p f -jed i -blo gs -ray mond -as th -ba h -av ail -scot ch -leaf s -nik ki -to k -hol low -ur ges -of t -un like -lat in -u e -cat ering -mil i -alter nati -ma ver -Ð ¸ -ag le -pre order -lu x -cu cu -ðŁijı ðŁijı -t art -âĿ¤âĿ¤ âĿ¤ -arab ic -rapi dly -ar rang -all en -travel tuesday -pa ws -flo ws -st ability -flu id -ca pp -can berra -uu uu -sp ani -demon stration -m la -plac ement -m w -presi dents -awe som -bever ly -ani st -ne al -father sday -referen dum -la hore -o aks -deb bie -half way -gho sts -de bor -matthe ws -fi at -t fw -pre sen -rob i -de d -bro ck -laugh ed -am ounts -bam boo -kinder garten -eat en -mtv hottest -break out -u sic -fra ser -legis lative -p ang -modu le -sam my -go ver -ear ns -expe dition -gar h -concep ts -char lie -la va -bachel or -veg gies -deter mine -el lie -un locked -fru it -dal la -cou pe -wash ington -depo sit -iv ory -pau la -chic ag -gu cci -ðŁİ ĥ -cul tiv -pier ce -li fted -stu mb -re cover -musc les -conduc ting -cb s -mcla ren -sophi a -cel lu -oce ans -up loaded -game play -mal dives -kim ber -avo i -rac er -ca ine -cav s -h ana -li ga -ra ven -inter vention -inaugur ation -oo h -at traction -merchandi se -tune in -li king -juni ors -int ended -att acking -aqu arium -i wd -comp onents -sur ing -cent u -yogur t -ðŁı ĥ -show room -op tical -ty our -ju dge -yi eld -an to -pl c -transparen cy -recy cled -chi ef -ar om -ambassad ors -plan et -âĿĦ ï¸ı -om ed -vaness a -cour t -mar gar -hal ey -v r -reg ina -pd ates -hi span -live stream -âģ £ -ya hoo -gal la -secu red -w ir -bene ath -off l -n il -am b -ye g -out let -u te -pe ep -lind say -bent ley -... ! -he el -trilo gy -vo s -ty re -there fore -tor onto -ab i -simp li -ja e -exten sive -eleph ants -s or -orient ation -im peach -re play -constru cted -peter son -pa is -por ted -custom s -colla p -ad u -high lands -sal em -shel by -ko vic -stra in -ro sie -sen ators -snap s -bo bb -suz uki -bla des -k p -lo lo -gener ate -si ght -ma e -struc tural -predic t -jump ed -ah mad -sun g -just ice -gla m -vol vo -jubi lee -de tention -lo sses -pu ri -every time -Ð ° -ra o -ed ge -li mer -rese mb -har old -re tri -sacri fic -surpri ses -am c -srilan ka -bar bie -men s -fin n -ag s -ukrain ian -em brac -î IJ -flav ors -hom er -lau re -ou th -pr iced -ver de -fir m -ah s -cu b -tre y -par anor -pro fit -in dv -who a -har sh -al ot -crit ics -hu bby -fi gur -gi ra -ca stro -chan el -in put -origin als -ten ant -yy yy -ture rs -lincol n -co on -lear n -ch ou -ac are -o les -din er -hy p -bizar re -mc r -let sgo -decor ating -ðŁĮ İ -al ison -ar vin -f d -reha b -mccar thy -lot tery -da h -minne apolis -eli gible -diagno sed -emer ald -destin ations -s ans -or y -bla zers -n v -ba il -digital art -no c -mal ta -sol ar -pi pes -alleg ations -no ck -po pe -bri d -premi er -n x -present ations -ef a -bo ws -val ve -opp onent -Į ë -visu al -ing le -cate gor -e ter -po is -dan i -at tract -neu tral -th ene -cra shes -fred die -ut ili -c st -awak ening -slo ven -quali fy -pro of -fair y -le v -fre ight -enjo ys -cup cake -flav our -â ķ -protec tive -ðŁijı ðŁı» -is u -ad mir -h mmm -continu ous -ai res -rap tors -showcas ing -y uk -pa ste -follow er -instru ctions -sp ru -@ __ -the o -debu ts -ve tte -sto w -es of -ach ed -sul tan -sand wich -som alia -franc o -car ne -flu ffy -al pine -jas mine -he ated -viol in -ple ss -divor ce -per former -phi es -port sm -dar a -kir by -lo p -chill i -for th -sky pe -ðŁĩ®ðŁĩ ¹ -celebr ities -ed y -ve e -po ison -ey el -gra bs -ssi c -un o -wester n -rail road -am er -numer ous -s v -fo w -fi st -âĢ ĭ -reque sts -mar tial -em my -accept ance -lau ra -ภ´ -er up -hyun dai -out lander -u tt -wrest le -esp resso -demand ing -g dp -geo graphy -sas kat -tro ll -confe der -su es -se m -be ts -t ful -to sh -teach es -col oured -gal way -mac y -dis orders -bb cra -at em -fen der -lit ter -e sh -provi ders -renov ation -nomin ate -ps g -nomin ations -jen na -shar p -some day -z ur -bra ins -che shire -pre y -hu go - ¿ -to ken -r v -car r -tac tical -zel da -kay la -fern ando -photograph ers -j our -umb rella -woo dy -congress man -du mp -le vy -ju an -d azz -sign als -la in -an u -mic hel -por ch -al den -sibl ings -y ale -pe el -sw ick -gg in -ll c -k ale -s con -il d -pat reon -re el -qu in -wit t -mar ty -moo dy -ton i -der y -g ators -speci fically -dd in -ly on -tr ick -meado ws -p j -bor gh -vi k -tu r -bron x -pu ff -lan tern -ðŁ¤ ¦ -g ently -be stie -fac t -refu sed -fas ci -mp y -ðŁĶ µ -cross over -mead ow -indian apolis -duc ation -sle y -loo m -mix er -new music -film maker -prosper ity -li m -week end -cre amy -neu tr -lu ther -h v -nor thern -tw o -h ra -cat ches -appear ances -ha bit -kitt ens -n v -illa c -inf an -regar dless -liz ard -dun k -cur tain -ac om -in tu -ve z -e min -fl ats -calend ars -em power -ru ined -hun gary -vi d -we x -u lum -aber deen -o sa -k t -ma ssi -se emed -s den -' ? -tele phone -de fi -insp ires -me ow -z ones -bl ind -pl y -tuc son -advent ure -ge d -oy ster -ðŁijıðŁijı ðŁijı -out put -tt t -metal lic -sma sh -ucl a -sco ts -perfe ct -lu cy -regular ly -sp ic -rel ative -ath ers -mis e -batt ling -deci des -mat a -occu pied -random ly -cat softwitter -gi an -ball y -al ties -al lies -im men -sy rac -ðŁĴľ ðŁĴľ -l lan -au r -k ut -lam ar -affe cts -n ra -star war -ðŁ¤ ĺ -sc ram -en chan -pro cess -luxu rious -ar ray -sher lock -comp ati -dor f -stre ss -m su -s with -sal a -sof instagram -fo il -under stood -qu ay -r p -c ade -ja w -en ab -en coun -ðŁİī : -do ck -satur n -mu ll -lay out -ra rely -happ ily -fix ture -or ph -over looking -her bs -m itt -pil lar -nol an -pe tty -str y -u i -mu k -o res -o vers -á µ -re creation -we sley -ri t -kejri wal -sto cking -g v -subscri bers -moo se -ma e -ber t -opp re -assign ment -u ro -high lighting -cal vin -we igh -cambo dia -av on -ke m -dis abilities -read y -char gers -p ads -iz ing -illi an -tru ste -col leges -associ ates -alban y -mil ton -cr on -bu r -har dly -si ghts -anti ques -e cho -surpri singly -ha iti -cap t -ph p -op io -ine quality -equ al -ken y -sch mid -autograph s -ren t -qu er -cit rus -challeng ed -te c -epi de -fe st -z hou -li me -citizen ship -cry stal -convin ced -mess enger -copen hagen -âĿĹ ï¸ı -war ran -develop ments -ï¸ı âĥ£ -fore x -hi ro -sne akers -xi de -vi va -stere o -bat ting -ss el -ho st -beng al -critic ism -q c -cr un -attemp ted -ry e -determin ation -cre ations -d read -label s -pos se -anc er -joh an -si ster -partner ships -les bian -k st -guaran tee -bar o -fix ing -ma son -m ous -chem icals -t less -bio diversity -par o -bhar at -ac ol -refu ge -en te -t iti -dys sey -respon ds -lef to -in er -se vel -rahu l -ol ine -frank fur -cho reo -enjoy able -c to -strugg les -wood land -heavy weight -gen s -rece p -ac cred -ðŁĺ ¡ -trans formed -list en -at op -n k -sur ge -be re -gover nor -prison ers -clau de -t ill -mu lator -emo tion -water loo -star t -ðŁĩ º -clean ed -grand mother -fear less -afric an -astron omy -ðŁı ģ -à¸ Ļ -the world -su itable -anth ony -k and -tt en -meaning ful -disc lo -jaco bs -à ¸ -tom linson -ghe tti -ty pho -sub stan -as co -te k -nag ar -mu d -am on -vacc ine -f ty -fle sh -no el -infl ation -portu gue -glam our -tra m -v re -te qu -roun dup -w yn -rejec ted -mosa ic -si ghting -cal f -o ta -com position -go pro -gonz ale -e ed -b ard -tu e -effec tively -we en -al to -ri bs -rel ate -thir sty -fu rious -di m -ch ard -perfu me -s ny -chur chill -k of -master class -wa ve -ðŁĶ µ -er in -own s -to be -sk illed -te m -go f -en i -tor i -cra zy -l ick -resi stant -ici al -ag ar -! : -g ali -del aware -bl itz -koh li -pu ck -avail ability -hi malay -influ ential -cro chet -victor i -read ing -ho bby -vie t -j as -en gra -sk ul -ðŁĩ² ðŁĩ -educ ate -tech no -distric ts -blu es -se tt -seven th -lear ns -ee ee -apocaly pse -hang out -cru el -mu tu -bru h -hel en -she er -c tion -kle in -tex ans -ce real -sh ine -ne red -gra s -am bro -f ella -hin du -matthe w -li ma -mir anda -je wel -so ho -euro vision -neighb ours -chand ler -be sides -ðŁ¥ ° -ast ros -thu mbs -ren ault -ra ve -hi red -ðŁĸ ¤ -it ary -z or -bla zer -k ine -ea u -kat y -dc comics -pe c -ro dgers -water proof -kill ers -super int -pre serv -as so -brew ers -promo tional -sc am -villa ges -sket ches -ju icy -for life -au dit -so lo -fundam ental -len e -philipp ine -t end -conserv atives -sponsor ship -dd le -a ine -h tc -os i -hul k -w af -à¸ Ļ -evalu ation -ant ine -sle e -robert son -roo sevel -ag i -sophi stic -emplo yers -bubb les -ko wski -inter action -sh u -bou le -ic an -j are -han k -leg itim -k nicks -kar ma -recei ver -per ks -u h -sta ir -sun i -labor atory -gra ves -voc als -oo t -c ture -thri ve -tic o -ãĥ ³ -b w -carto ons -mcdon alds -dra w -y ung -pl er -li d -eth ical -groo ve -ent a -international womensday -pat ron -wor ries -ðŁİ ħ -ðŁij ĭ -ka therine -di az -tor i -bach chan -tru st -min eral -ic om -buil ders -bor n -col oring -lat te -ca se -revolu tion -tra der -ox id -chi pot -inst antly -sou thern -se hun -pro b -her nandez -lis bon -hu awe -p ong -me a -ro oney -wheel chair -ke en -be tt -cor in -regulat ory -di splac -ka ren -sch em -sun sets -wh ales -remin is -he p -hi de -mar cel -pand ora -do yle -th fc -ot to -no kia -trans gender -ko v -hawai ian -sha ve -so vere -exc er -nick i -pu g -st or -ro th -wee t -leg al -dig nity -po w -hom age -ðŁĩ³ ðŁĩ -s re -can on -la x -wo ah -quart z -ñ a -gree ting -flick r -nai robi -advoc ates -an c -vi i -eu gene -th ra -c re -el an -pen sion -th letics -ton i -re agan -x v -sto re -ben ch -har lem -todd ler -sent enced -âĻ¥ ï¸ı -glob ally -che aper -u f -ma m -nic o -ik u -tho u -ni st -dam i -th ala -rho des -sal e -bow ls -â Ī -las vegas -sanc tions -adm ire -mat ched -un able -travel er -ele ven -straw berries -âĢĶâĢĶ âĢĶâĢĶ -stu dio -jac ques -im s -valu ed -s no -cheese cake -n xt -e os -s x -f x -ton ic -hat ch -chic ks -gra ds -hand ic -r ory -as p -ri pped -denti st -n en -lu fc -âľ Ĭ -di ge -hop kins -sher man -f da -for all -ash ley -str and -h y -liqu or -buffe t -ess ence -phar ma -suri ya -ðŁĴĻ ðŁĴĻ -festi vals -z an -re fresh -pur ple -uni forms -kenne th -= ) -as an -hel sin -transform ers -k ali -person alized -chal k -bo bby -â Į -the mes -depar ture -prin t -illustr ations -qui et -agre es -gri ff -Ø ³ -m iti -toge ther -conven ience -ab ar -car lo -turt les -info sec -some what -ar lington -scholar ships -emir ates -mu ms -st ella -auton om -fe ather -g ore -nom inees -fragr ance -Ñ Ĥ -w ong -thea stern -gr e -z illa -is i -bump er -go o -do zens -ab duc -âļª ï¸ı -o ils -don ors -sil icon -i pod -fortn ite -ðŁĴ ¨ -tor o -spark ling -consci ousness -pal a -nu m -moun ted -ffin s -thi eves -team mate -pra b -om er -ta pes -bo d -mit su -ste w -e re -p bs -tu sc -lo we -ra de -parliam entary -h m -ed gar -ðŁijĩ ðŁijĩ -to a -a gh -hon i -s late -ge ek -ap t -hard t -ta p -horiz on -grow th -make over -hi l -paper back -id an -reha bil -gi u -possi bilities -let tu -fran co -bo ss -ach er -does nt -mo e -ta ker -huss ain -ml k -di l -th ia -ham a -real ised -raven s -curric ulum -m ith -k night -ted x -r v -isai ah -cumb ria -birth days -f ing -pre z -mu barak -exquis ite -clear ance -y en -par i -ev o -à º -modi fied -app lying -imple ment -disco vering -chap man -indie game -dis k -crowd funding -mach in -li vel -sty led -âĿ Į -ma king -rehear sals -nutr iti -subscri ption -and ro -cre ators -car ries -ky lie -cam den -appren tice -tax pay -c ca -tuesday thoughts -pis sed -er man -dete c -freed om -mer i -.. ! -psal m -sun light -per spec -be ings -book store -rock star -fun ctions -p ence -fav es -z n -obam acare -sp ill -coven try -pi geon -pi vo -ba it -kol kata -av al -don or -wa h -privi leg -tra ditions -rajas than -ten ess -portugue se -yn es -tack les -de fic -tor n -pol ling -thor ne -in a -bened ict -bar ry -cal ories -ver dict -save the -nor ton -off ice -main stream -impro ves -fr on -respon ding -real tor -scotti sh -de clar -r l -shi v -supp lier -re sting -swee ts -qu i -. â̦ -whit ney -startu p -thank you -teach er -h alls -ha ve -hand made -pro ving -quar tet -ro chester -li an -virtu al -mend es -of icial -mid lands -x box -meas uring -o vo -accommod ation -bri des -collegi ate -intellec tual -in car -ni ag -ðŁį · -sf w -coco a -co ats -civil ians -presi dency -mat rix -sweethe art -tri athlon -wag ner -ra dic -plann er -the o -execu tion -k um -the walkingdead -sc ar -ro tation -blo gging -bom b -re son -bb les -st are -assi sted -e do -brand ed -war nings -thor pe -acknow le -satis fied -sho res -ri d -dor a -phys ically -bi gh -appro ves -ha h -ric al -vers atile -pret end -lu m -ab hi -ye e -sp it -ãĢ Į -dj s -ash tra -j t -ven ues -gram mys -cy clo -tr acker -over watch -repl ica -el yn -nr l -lind sey -hom o -ballo ons -kitch en -si s -am os -ende av -ðŁĴ » -a rec -thu g -hoo ked -hr c -new york -bur gh -americ as -patric ia -ug u -ap athy -ha st -psy chi -cor k -petro l -ðŁİ ¬ -ak u -po pping -psycho logical -au x -g ma -cad illac -wa ste -auth ent -bri stol -nam e -que er -to ber -jer ry -com in -ch ant -privileg ed -op ar -lo ser -tex t -mar ker -stri es -equ ally -ak i -christ mas -gare th -ble w -em ma -imag in -se als -che at -conditi oning -j ana -ren s -dar ies -o asis -disc ounts -coun cil -i ka -shir ley -vou cher -al ps -w x -q r -dri ft -attemp ting -ut c -Ø ª -gonzale z -m f -jo ker -paralle l -pa re -aspe cts -proce du -n p -am a -rale igh -bright en -gu ire -radi ation -cre scent -ho b -il le -str and -v ore -n ard -che st -di wali -av atar -al der -d ling -pa thetic -ðŁĴ ĺ -spir it -jor ge -film making -ðŁĻı ðŁĻı -challeng er -b j -down town -ht ml -ade qu -twi sted -in ely -( ' -wra ps -oper ational -y ne -n us -mag net -market place -health ier -snap shot -dam on -inter ven -fe derer -ow ls -biscu its -j p -ro deo -blue berry -lec tion -fron tier -summ ers -re yes -pede strian -go l -caf fe -refur bi -bou lder -me ghan -speci alty -la ss -e i -suspec ts -appro x -rr r -ra th -st im -cru shed -he d -wh un -lo af -cr ore -river a -gene tics -so ck -wa sted -ny pd -answ ering -do ve -bel la -ol in -du n -fi ji -pre tty -spar kle -y un -j d -euro pa -li fts -am ber -mu r -te k -boy d -roy alty -in do -ri b -go tham -ti est -inst alling -ke mp -the photo -cos mic -) )) -whole sale -loy ment -eas y -su ing -sett led -af p -pro ver -suppor tive -re es -ne ath -deli ber -c é -wel come -pic oftheday -new born -pat ty -sun s -si est -fl int -diffe rently -spo ilers -troop er -g ins -cor y -look out -equi pped -ta pe -to by -resear cher -u sh -ke yes -al ma -induc tion -k w -k har -sl ick -bri de -e ur -cra ving -book ings -ch es -tr unk -vern on -sp her -cryst als -rel atively -pom pe -uni ons -val ley -par a -w ant -ok c -de af -ser gio -len non -sh ay -cr a -v at -he e -t we -liqu id -pol y -ðŁİ ģ -b ent -be aring -motor sport -bar be -te sti -han i -fin ancing -astron aut -water colour -ri sh -comic con -gar t -wr ong -ber n -it an -ste pped -fil ters -c low -me x -dem ons -all o -expand ed -comm and -et ers -go ats -si ri -y r -pot tery -mari on -i le -el an -san to -person a -du ke -hom eless -li ghted -wheel er -chang er -cab bage -sur real -ham burg -sma shed -str an -k not -i art -ob i -be dro -di al -th ick -b ingo -fu s -vacu um -con ve -ati ve -accur acy -accoun t -re fer -ri z -spider man -ban a -r ite -u b -ab s -medic al -lin k -si em -> >>> -be tra -g lowing -re actions -pupp et -spa ghetti -ang s -re medi -pray for -roy ce -char lotte -£ ï¸ı -gh et -affe cting -ro de -soci alist -mo ses -az i -o it -re porters -cd t -ap ing -s nat -minim al -wa ist -sie ge ->> >> -ri g -schmid t -h are -ec a -thor n -he mp -es the -cly de -th a -don ut -moham ed -ling erie -le gg -carpen ter -perform ers -de a -imag ined -cur se -la sh -ct r -agu a -ro ar -gr i -ro le -j fk -resur rec -roosevel t -maril yn -sm alle -will is -wa ited -char ities -the res -li k -origin al -car i -c ough -cru ci -la gun -contra st -k ou -arm our -re moving -t ent -maz da -bri ghter -thi ef -cor ner -tequ ila -buzz ing -al bi -p am -az ure -disc oun -pixel art -possi bility -ham ont -tra des -bu da -hi ve -vers y -fin ch -tran spa -em i -terri fying -in qui -g ba -sub stitu -collec ti -plac ing -cin dy -k ann -pa tho -diamon d -mour inho -guine a -anthro po -air s -pu mps -ì ļ -pas o -cur ling -an ita -resi dency -ne wh -jo on -cigare tte -que ue -ex trac -gam es -spl en -ex press -public ly -bon nie -tribun e -ba ek -reason able -c or -timo thy -she eran -Ä ± -f dn -su tton -concentr ation -carav an -x avier -al ger -cy lin -freder ick -ner ve -pe ak -lettu ce -j ail -pre game -kav an -up graded -eco logy -squad ron -gra pes -goo g -pa stry -ðŁĹ £ -ãĥ¼ ãĥ -mil ano -awa z -presen ter -ðŁĮ ¿ -her d -king s -tem plate -fl our -h v -k ley -i ya -spe c -at er -frankfur t -co ch -tex ting -del i -communi st -regi ment -ele anor -anticip ated -ðŁijĮ ðŁı» -thephoto hour -ran o -survi ving -simul ation -daw son -ar in -aqu a -m or -â̦ . -cin o -ira qi -sh az -dun dee -we s -dra u -hann ah -s news -occup ation -ste en -x m -ang les -sett ings -gur u -kno x -or ca -shap ing -w ent -dr illing -zz ie -br i -kis sing -fin d -ma ine -âŃIJï¸ı âŃIJï¸ı -ðŁĮ į -lar ry -bu sted -ta vern -acti vely -- " -replac ing -no d -un lock -. " -âŀ ¤ -affili ate -to w -l n -happy newyear -di f -j m -green wich -contro versy -daw g -con dol -sav annah -compens ation -touch down -te o -amb itious -embro i -convic ted -iart g -bar ack -tr ance -testim ony -au dition -thum b -my ths -be x -que z -orch id -den y -entit led -hoo d -gr ant -in box -blue jays -r illa -smalle st -bur den -in famous -divi ded -boun daries -t ter -el t -wy oming -be verage -me sm -one ws -budd hist -y ana -as sad -is ms -bar rett -predic ted -back to -tw it -e there -cap tains -escap ed -ay o -lam borgh -gard ner -la ps -k al -adverti sement -insec ts -na po -am en -ac y -r and -g k -te h -k athle -tri dge -pan cake -at ro -pyram id -bu la -paral ym -gau ge -en cies -tom y -biscu it -but cher -quali fier -coun ty -ke i -po ols -dar ker -should ers -ðŁĩºðŁĩ¸ ðŁĩºðŁĩ¸ -sp re -( " -writ ers -g m -ðŁİ ĵ -k nit -hu ff -mt b -philli es -o st -den is -g art -licen sed -inter face -ex cel -d well -from the -co fficial -az zi -appear ing -fore st -n ana -ke ith -manufac turers -beck ham -) ? -e se -col ony -delic ate -ut ter -mc in -transpl ant -pre ferred -par d -ari e -hu b -po ds -perspec tives -pic t -del u -app er -be than -p mo -crimin als -femin ism -sh ack -circum stances -fel las -prote sting -wa x -sugge sted -t ator -dre w -om ni -fa ke -kath y -re b -del ine -ber ni -mi sty -ðŁij © -er able -break through -men swear -millenni als -chan yeol -la z -inser t -rep lies -phra se -n x -ihear tawards -audre y -gran ite -rac ec -ori e -ter ra -innov ations -britt any -at eral -pe ar -bio logical -sh ments -institu tion -m sn -frequ ency -d man -neg lec -t f -ste fan -fox news -ty po -comm s -sequ ence -car men -wh ites -econom ist -exe ter -se um -re sorts -cas ually -bun de -divi de -Ø ¹ -ga g -cre ed -reti re -cau cus -rapi ds -wrestle mania -tul sa -sunder land -fundam ent -o di -yam aha -v ary -intri gu -el se -be acon -an gie -tra ded -tran sm -g ents -kn itting -gal ac -ðĿ Ĺ -u to -sea side -hol t -re rs -far go -train ers -mon soon -b ale -sou ght -mad die -h w -co li -fr an -fav s -ðŁĴ Ķ -int ent -r ally -s bs -lemon ade -barack obama -bre ad -stick y -explo sive -chel ten -t j -as soc -ram en -hom ies -v log -mi ster -lor d -âĢįâĻ Ģï¸ı -aly ssa -sketch book -ru mble -cat ch -migr ant -discipl ine -un likely -chronic les -fl ora -sl ams -am id -s boro -coo p -ju mps -tran qu -mel is -sof ia -en ri -gab e -sy ri -nicol as -cha i -w v -be cky -foo ty -ta o -suppo se -ðŁĺįðŁĺį ðŁĺįðŁĺį -plu sh -ri sh -ðŁ¤ ĵ -k ha -satur days -ac cent -he c -lim it -carl ton -wi red -taylor swift -ðŁĺ ij -sq l -har ro -recipi ents -g at -go p -th of -amaz ed -gh an -ðŁıĨ ðŁıĨ -por to -cla re -di stant -na c -ohi o -ðŁĻı ðŁı¼ -mt n -anti bio -dino sa -me sa -par tial -b v -lear nt -lov ato -questi on -ex tract -gossi p -gi bb -niag ara -ðŁij ¨ -displa yed -so oner -ste vie -nug gets -ml n -bro m -tur b -give aways -stu pi -bl ink -c ili -conven ient -mo h -vi ve -f ric -cau se -cham ber -cu les -ne arest -is se -small biz -t j -canadi ans -smar ter -bra sil -ra re -que tte -w ha -cand le -at omic -ðŁijį ðŁijį -warri or -relax ed -stri ps -ne ur -k ka -r fc -jen sen -reco vering -respon ses -sal am -ortho dox -acti ve -ell ers -n it -âŃ IJ -metro politan -centu ries -vi da -gra ding -transpa rent -sim ple -do ts -superint endent -elev ator -autom ated -red skins -ima m -summer time -jona than -ge aring -michel le -confl ic -m ice -to te -publi sh -pa x -) - -na iled -á ´ -tele scope -ser bia -ba b -ape u -st ically -sen ti -r ats -isol ated -grou p -hat red -paranor mal -stan ley -ali on -safe ty -l s -ठ° -nex us -alexand ra -mas ks -+ + -tr on -au k -brother hood -brow se -mix es -sim one -mu sk -appro ve -lo la -ex p -per th -fu turi -un seen -d m -chel se -sc outing -o we -portsm outh -k ram -mi ze -di spen -su p -d lc -adver t -tere sa -is le -cy cle -met all -shi elds -marin ers -ra z -ing en -fun d -an go -jon es -o ka -mad den -broc coli -domin ic -situ ations -mer o -cric ke -puni shment -d b -sha king -ðŁĺ ļ -m q -ari ans -le h -cla w -we ds -d ure -ni el -j elly -gour met -tra ders -le vi -w ages -kne es -wi se -heaven ly -avi d -melo dy -z ack -ban anas -apprentic e -pro p -fun ny -o de -respec ted -me gan -fe wer -dra fted -med it -gra pe -us army -cru sad -vo cali -prepar ations -non sense -us age -th r -ro th -wiz ards -insi de -promo tions -mon a -red sox -si g -eleg ance -ch ia -univer sal -ãĢ į -ra ja -un ga -pol lin -filip ino -ak a -t sun -ik on -bi king -decor ations -z ac -cade ts -hum our -ag m -re ppin -vac cin -elo ve -u w -dia be -galla gher -az er -do l -a while -pro minent -wel sh -t ann -' ) -bi en -wa g -in al -c wc -wic ket -ur st -q anon -x e -out door -dun n -star r -co logy -ric ky -u efa -reb ounds -s music -inf ant -ðŁĻ ĭ -so p -u mber -hand ing -beg in -sor ting -ha sh -sp ati -re k -buda pest -black hawks -dele te -ro m -can did -auth ori -de bris -spe cul -inter section -marri ott -im ran -ðŁĺģ ðŁĺģ -cru ises -ram sey -rafa el -aware ness -vas cular -beyon cé -ru g -ðŁĺ Į -festi v -ar am -s able -bas il -p ill -flo oring -un beaten -implic ations -u f -w ound -for ge -poin ting -po ts -popular ity -ðŁijı ðŁı» -mani pul -s lots -deb ates -abs ence -ver mont -never forget -wri st -gl oria -ren ce -hu sk -mel ting -ðŁİ Ł -br aces -tim ely -transform ing -am ps -ma k -po e -ah an -gener ally -nd p -ale ppo -unic ef -pro fs -nor d -ma sk -jackson ville -v v -sh ells -bloom ing -oper ators -char coal -ne ville -ma gi -chi p -sam a -ir an -re forms -accu mul -ru e -æ ľ -web sites -ga on -devast ating -sto s -glaci er -ra pp -chipot le -pr a -or ous -rom ney -seas on -decor ative -c isco -dit ch -compla in -ll o -assu me -ðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤðŁĺĤ -n els -cent ric -ft w -car rots -tat a -can ter -per ience -li ers -demo s -bl unt -oper ate -reserv ations -le ah -sub stance -di son -an te -elec tion -v ue -squ are -non profit -ca a -f su -y am -ãĤ ¤ -v ladi -comple tes -mar i -philli p -ne ill -er as -ka it -men do -mahar ashtra -g p -dan e -provi dence -ther apeu -juven ile -me mo -in corpor -aa aa -seven teen -teen ager -à £ -or ns -wi de -cu teness -tw d -ff les -bar a -com edy -over time -y az -bar on -unemp loyment -ðŁij ĭ -exter ior -den se -cent res -match up -history month -artif icial -qu it -e sk -war n -cr itic -j af -ðŁĵ ² -inform ative -fu els -recy cle -nam ing -stri pe -sol ic -mole cular -dee pi -con vo -s sel -na e -de scent -ti z -accoun tability -ter ry -r ito -sl ay -em o -dem ol -sens ation -co v -tor e -round table -y ol -excu ses -ॠį -tur quo -hh hh -pod casts -cele b -me ssi -li o -man n -contribu ted -u z -gener ator -ele ts -veg gie -indu l -en suring -detro it -pun jab -tran spor -instru ction -ad d -por cel -pan eli -cir cles -persi st -clay ton -sp n -dog softwitter -is nt -sp r -retail ers -p w -hun gar -el ena -mon aster -gu atem -je ssie -an z -ra shi -fle e -car ving -fau x -l al -hen ri -d jo -du ll -s ana -lar a -glo be -cri mson -com pass -pau se -na b -lion el -ba ths -u fo -invent ory -sin gh -sat an -ðŁĩ ¸ -ce ments -in form -gener ated -bi den -av g -tas ks -de er -sa u -ja iled -pa stel -sc c -na il -steel e -per is -lamborgh ini -pur sue -mar gin -u ch -bo sch -dra in -cl ara -bo m -lat ino -web ster -rose mary -r ha -s oun -billion aire -not ch -percent age -con or -' " -hom es -earth day -h ort -big gest -di sin -wal ton -edit ors -im ma -om ar -equi valent -pharmac eu -ah med -cam eo -han ni -under rated -ge ment -micro bi -v oo -honor able -obe sity -âļ ¡ï¸ı -limer ick -invol vement -st agram -boule vard -bur g -blackand white -liber ation -fi ve -inter im -sm m -rival ry -cap abilities -stat ements -thu mb -ve d -sw ans -bar ber -e que -seren a -hel m -noo dle -sam pling -n awaz -sing le -thunder storms -sh on -in ev -ë ¯ -to pp -orch ard -bi an -ðŁĺ Ķ -door step -salv ation -marke ting -r ons -cle mson -ra vi -in take -stand with -sin a -ha iku -ple y -elector al -ph illy -la ys -electr ic -cap turing -u pp -er gy -believ ing -cul tures -es day -inva sive -ed ed -spee ch -end ur -viet nam -boy cott -pe de -deli ver -ðŁĴĸ ðŁĴĸ -mer chant -st ir -den ies -poc kets -o ti -cu ddle -ro land -mm ed -den ed -lear ners -hoo p -sour cing -h acked -di m -environ ments -ben son -jud icial -wor cester -pear ls -govern ments -arri vals -cor ners -tun ing -la bour -y m -or dering -le wi -i fe -hygi ene -thou ghtful -indone sian -campaig ning -princi ple -assau l -ru bb -at v -wil ly -en tre -il i -ph on -du ties -âĻ¥ âĻ¥ -sn akes -lo op -am ar -conver tible -bon ding -ment oring -max well -ethere um -destro ying -ax is -ca iro -fin nish -sho ck -ðŁĺ IJ -cal eb -com a -pe dal -co re -contin ent -el son -temp o -helsin ki -ac p -tack ling -st ated -bl a -dou b -sma shing -a ja -camer on -disru ption -warm th -being salmankhan -bullet in -o de -syrac use -ar an -mc gregor -bul k -an ton -confir mation -sp ine -im ran -instru c -jac ks -chi o -pal m -str e -embarra ssing -un t -elimin ate -to ss -c ise -a ws -oni sts -sh inee -jo s -ho se -li vely -opp onents -mo vements -recogni zing -sandwich es -sh akes -exerc ises -se at -profe ssion -merry christmas -lu gg -adopt dont -mar vin -byr ne -un le -he t -ku wait -rah man -aspe ct -humb led -gen es -f and -long time -) ; -cam pu -an gus -ðŁijį ðŁı¼ -q uran -sle eves -s lic -¸ ë -twel ve -your e -i ke -go gh -b st -dic tionary -reflec ting -to on -yar n -em bed -ðŁı ´ -re serves -floo ded -ver iz -du sk -estab lish -pro li -au d -ritu al -or bit -declar ation -recor dings -cam o -cas sette -good luck -cu tter -bo p -b ho -che ating -paci fic -ma res -tim er -col t -tr ous -tomor row -han sen -ci e -w ang -ban i -circu lar -ac ute -far mer -co ys -p se -ir ving -w j -haw kins -b ison -ur day -cru ising -o te -k ath -whi stle -your selves -ant is -sla sh -thorough ly -ke sh -ser ie -ex em -en ig -guil d -sh red -ho gan -ap o -ä ¸ -pu zz -ne tball -au ssi -panor ama -ws j -av is -ar ming -hum ph -brow ser -cri es -fo ggy -mat te -ðŁĮ » -it er -tal lest -by ron -cap tiv -je su -any ways -flag ship -p ton -we y -fay ette -financi al -f oul -solom on -jenni fer -cucu mber -ar gue -tex tile -wrest ler -john ston -pa stor -ðŁĺŃðŁĺŃ ðŁĺŃðŁĺŃ -cac tus -edi ble -re served -ric hie -met res -ingredi ent -h ella -un to -ch ol -cele bs -po ets -gra ham -hay den -coinci dence -b aw -communic ate -flet cher -/ - -tole do -ecu ador -coun sel -s laughter -line ar -at p -os u -jo el -ev ed -conqu er -ru stic -plic ity -recogn ise -room mate -cr acked -jas per -ph er -ðŁĮ º -wo ven -mo ist -ff c -ste ering -ni sh -stand ings -frequ ent -ar di -haz el -as msg -bau m -d art -si dd -nat h -ch ero -card board -c ss -n sfw -pa ir -ðŁĺį ðŁĺĺ -occur red -homeless ness -mal one -ph e -xi a -pad dy -decl are -theat re -b f -per sian -ta d -ax e -susp icious -lam b -mu cho -sen ior -st as -k ite -st ing -gra d -k af -wat ering -Ø ¯ -spi ral -th ms -educ ator -jer ome -of c -clo ck -su l -pe mb -.... ..... -park way -de aux -restric tions -m ons -need le -e j -le agues -water melon -am an -pl enary -max im -w ab -coming soon -bry ce -vi gil -super market -fortun ate -turquo ise -presi dent -li v -inter ns -feel in -fix tures -stun t -st aged -premi eres -lo k -prac titi -shor tage -log ne -ve c -con cor -roc ke -li g -com posed -syn thetic -di p -cam ila -ch is -j ou -su san -eye brows -supp lement -satis faction -moham mad -ti bet -house of -pu n -as sam -shado whun -psy ched -se duc -mand atory -her bert -sc allo -stream ers -proto col -block buster -produc es -sch nei -lau rel -tri be -time hop -pl a -mod elling -tv time -mtv stars -wi dow -me tric -ch am -con do -flow ering -ale c -d ms -inten sity - ¨ -mccar tney -islam abad -k b -f fi -ph al -anal og -f ond -h acks -positi vity -treat y -sub marine -conne ct -sel en -categor ies -cu b -organi ze -si k -quote oftheday -remin ding -am or -loc king -ðŁijı ðŁı¼ -comp ound -et te -b out -rec ur -fe rence -mi zz -tren d -hip ster -for tress -forth coming -preli min -o dyssey -ang p -del ici -even ings -ðŁĶ ¹ -i q -d w -da ir -kathr yn -christian ity -moon light -ha b -wh oo -f bf -se th -genu inely -pa x -char ity -deplo yed -b nb -bu cs -ju dg -con ge -plant ation -im press -car a -sc lub -sco py -land ers -compla ints -b ama -re build -x y -real ism -sh our -le in -brac elets -mer a -assas sin -an chor -ðŁijĮ ðŁı¼ -lin en -con fron -chronic le -comm ent -cat alog -il les -gor ge -me try -jung kook -love my -sent in -se em -fit ness -alli ed -ts man -digital transformation -pr an -lo ft -min ton -alden richards -en vel -cher ish -certain ty -zz z -rhin o -per kins -en rich -cape town -ome ter -sec tions -ske leton -def enders -ðŁĺ Ŀ -pen c -bri t -ja h -capital ism -ðŁ¥ ĩ -baz aar -re me -ex t -kk k -conver t -stor my -b ye -kar an -chry sler -ad os -pre ssed -syn c -ation day -dang er -bad ges -refu ses -em powering -ly m -ex ports -adoptdont shop -ðŁĩ ¯ -th c -awa ited -focu ses -fin ed -o at -haha hah -âģ © -n family -fi ona -luck ily -thr illing -ty ping -out break -di es -he u -craw l -ne sses -o ath -scri pts -gee ks -ðŁIJ Ŀ -p b -mathemat ics -al is -________ ________ -gymna stics -acti vism -recommend ation -gre n -wa in -cour ty -n apol -cau li -hor nets -g als -jo ckey -dir ty -at ar -enor mous -pe st -greg ation -an os -ii ii -def ends -black historymonth -at x -mb c -lugg age -wit ch -co b -la sts -cu m -gg g -ba thing -n ar -ce bu -ðŁį ĥ -navig ation -min e -re jo -ðŁİ Ģ -gif tide -re ta -use less -pu ll -defic it -al lu -ati me -it v -tr illion -pu e -ac ies -proce dure -l ori -jen ny -c ad -ul ously -dr ac -promo tes -ing the -can u -woo hoo -na omi -zar dari -ts u -be ir -sd g -le ver -we ber -ab ud -lun d -crow ded -deplo yment -ter rain -ken ny -ho f -witne ssed -lo ch -j k -bul ly -w ren -poe try -do ff -ww i -mo red -din i -cul ture -promp t - ¥ -maur ice -to pps -r m -cor respon -ab out -jewel s -gi br -eag le -ðŁĺĺ ðŁĺĺðŁĺĺ -l ending -sou ven -ç Ķ -contemporary art -establi shment -j ong -â̦ " -gat or -patri otic -mc coy -v ape -human e -feli z -coach ella -re posting -ste als -fu ller -n ering -at ra -( - -bla ke -he ather -wor ms -discipl inary -rede mption -y ard -am in -" @_ -d nc -t ds -k appa -ne wark -comm its -spe ars -j ams -t and -msn bc -inter medi -aim ed -at ic -teen th -observ ation -kash mir -kavan augh -ou l -san francisco -re u -bel ated -cho w -pass word -st ills -deta ined -sar i -day ton -dar ren -itali an -ar th -amu sic -ar bit -w m -v m -he m -dou g -my r -a sho -pre v -vin d -bra h -sta g -ภµ -pre views -gu k -con taining -leon ardo -sad dle -ru shing -st av -lon gh -gam bling -ve gas -reserv ation -end ale -bal a -fl a -vari ant -he dge -bulgar ia -nat ali -we aver -sol st -encoura ged -ap c -as parag -ne st -cycli sts -fe l -ìĬ ¤ -overwhel ming -pey ton -j it -a post -mb le -ble eding -neighbour hood -a very -expre ssions -mac donald -gi gs -mon ds -illu sion -n ct -cam ero -over head -my th -ol y -vi o -et v -lau rie -unve iling -pri or -con n -iron man -di ff -day in -crit ici -con go -re vision -wal e -direc tor -p ines -black pink -gar ner -cur ated -manit oba -h ac -common ly -bar ton -.... # -mor tality -live smatter -philos op -shor ter -con vince -fre ak -vend ors -insi ghtful -el ly -sens ors -e led -s berg -weight loss -u kip -sp ur -priv ate -qu a -ss c -, ... -supervis or -advis er -amaz ingly -less er -at es -mah on -oooo oo -sar as -pmo india -waff le -un ders -toler ance -sculp tures -her sh -kno cking -smo ke -cathol ic -gri m -tra veled -fli p -ge off -dinosa urs -sle pt -scar let -ok i -compla int -ob sc -nam i -la g -cross fit -u fc -mc cain -refe ree -sad ness -pen ny -li eu -mo de -ki er -vol s -w is -el on -she a -ba o -son ia -cla ire -em manuel -moist ure -di gest -vi ii -t eller -ch on -access ory -night club -foss il -aw an -hu sky -ab original -brand on -ffici ent -cou gars -ste d -ad mitted -igno red -content marketing -ag as -v ase -execu ted -negoti ations -she ad -n and -tab lets -go th -ts al -d fw -on ep -protec tor -sp ho -gaz ette -andre as -ss er -comp ilation -ha v -contain ers -bro ker -soc al -porcel ain -hy uk -air ing -ðŁĴ ° -publi sher -scen ario -spart ans -re viewing -itu des -ed el -pear son -ba sh -mau i -a ad -ðŁĮ Ĭ -li u -ul ate -program mes -fav our -web design -real ty -motiv ational -cro sses -' ... -bus ch -adjust able -ar jun -mist ak -dimen sion -pi stol -weigh s -en y -unve il -indy car -gor don -f ade -fran ken -qual ities -bet t -loc ate -ker r -sp c -confu sion -ne e -luck y -bas es -dep ends -fire fighter -ol a -re t -mar oon -ðŁĶ Ĭ -w am -defin ing -whe at -bi l -é s -b hai -psy ch -ta u -ic ans -thi k -ob ile -inspec tor -ìĨ Įë -ill on -go s -ev angel -fa i -si st -voc ation -bur ge -chi stan -renew ed -enthusi asm -en ting -ag ri -ike a -m sc -aero space -sens iti -memo ir -hosp ice -co caine -der ry -mechan ics -Ħ ภ-tin o -reduc es -collec tors -in justice -supp re -v ana -ab un -nap a -su sa -os lo -e ff -en core -lic ence -ched dar -z al -moun t -ðŁĴ IJ -threat ens -!! " -archi e -fu tsal -scu ba -jo s -gn on -se xi -s official -compar ing -domin ant -tof theday -fa it -propos als -gi ft -y as -cn c -l r -ha b -reser voir -beli efs -gener al -mar ti -t d -est e -ì ł -wi l -ðŁij ¯ -ðŁĶ « -sp x -et work -excer pt -e instein -hir o -sil hou -team ed -per ception -corri dor -mental health -hin ts -ben ny -induc ted -sw x -wi desp -spe ak -cher yl -dru g -ðŁĺ ķ -h f -asparag us -myster ies -fitz gerald -off er -therap ist -care er -dam aging -ts d -per u -wei bo -y ay -phoeni x -disc re -mac book -bar ker -stig ma -sp read -roc kies -kang ar -bri dg -pa i -bi shop -ta iled -capsu le -ðŁĴ ĵ -ge of -roy ale -short listed -o ste -ash amed -ch app -key e -cl a -screen shot -austri an -nati ve -en ight -juli et -michel e -ðŁĮ ´ -travel ers -pi l -football er -win chester -ðŁĻ Ħ -azer bai -gold eng -organis ations -interpre tation -predat or -ofthe week -lo gan -pok é -mari e -cal la -t nt -cin de -ge tic -fit fam -gra v -ow ens -ðŁĮ ± -shoot out -sal is -commissi ons -co he -p tic -ni xon -hi a -amb ition -mar ine -cruel ty -t k -cru de -sal ty -jim a -mon go -ir ony -on wards -arre sts -strang ers -ig er -cycli st -ra g -exten ds -tra dio -bour g -mo i -el la -e able -lex us -au l -der a -histor ian -mor ton -ti ff -man ner -ko t -d k -po inted -mar qu -a an -en ey -du blin -on poli -em ili -secre t -fl o -âļ ¡ -ba j -ste ep -accompan ied -rum ours -dev i -purch asing -fi g -pu b -sch oo -autonom ous -go alie -x ia -autom atically -re vers -ter o -fu ku -titan ic -shoo k -sand als -see kers -exc av -nor dic -bigo live -ba ke -r att -z ak -ne p -ðŁĺ ¤ -cand y -billi ons -book worm -pp et -à ³ -sur faces -sc ars -phil ip -do gg -ci gars -co te -transl ated -cur ator -sin dh -han gover -bre wer -on es -el ton -ðŁĴª ðŁı¼ -mar cu -elli ot -righ te -di oce -ru ss -rail ways -grand son -as cen -apo logy -awa it -mob ili -re spir -parti san -oli vi -stri ke -yo o -white house -expre ssed -pu ps -bed ford -cul tur -fro gs -fly ing -cav ali -c ds -fri ger -street photography -re solve -tali ban -kan g -cru shing -ju m -ðŁĺ Ĵ -william son -tan g -cur ly -t man -veter an -fa ire -artificial intelligence -un anim -pre n -back drop -fr ances -oc cer -doro thy -work ing -ar thr -conver ted -day light -serv ant -pad dle -compla ining -thir ty -nad al -ak u -ibra him -ad dressed -p iss -green house -batt alion -si mulator -out lets -embroi dery -ðŁĵ ± -fis cal -ger ard -sas sy -ðŁİī ðŁİīðŁİī -vent ures -mer it -public ity -ðŁij Ī -sophistic ated -c tu -conven tional -condol ences -isra el -tra dition -ar an -te ss -gla d -ðŁĺĬ ðŁĺĬ -correc tion -ge on -am d -or ship -be ast -ch ment -ì ŀ -nic o -wk nd -wel s -cushi on -beli e -vo c -idio ts -under neath -pu ma -corn ell -en ation -lu l -swa ch -ab ig -u rer -mi e -form erly -ca f -er nal -chor us -juli us -sen ator -âľ į -wh ir -salv ador -ph d -uni fied -boo ster -graph ical -w rec -son ny -mi z -dere rs -s all -ven s -tusc any -wi d -y ong -kur ds -w az -trol ls -mac ro -cat urday -pre ssing -sa sha -cent ennial -gu sts -em c -be fore -den ise -cu st -ðŁĵ ¢ -lo oo -base l -eng land -y olo -ar du -manife sto -do ha -ì ľ -kni ves -bourne mouth -bi bl -bar b -al icia -Ø © -com er -cycl one -g it -ane ws -character i -vent ura -in tra -sf giants -hu t -be a -dar win -ell er -al v -re ese -bl y -kar an -conclu sion -man ny -fla kes -unite blue -nad u -co pp -ed ges -lanca shire -i als -o tta -philipp e -l ent -che e -ment ors -festi val -an ism -compli mentary -r j -pu g -d ine -we i -cli ffs -sar my -ti veness -treas ury -il and -after math -rabb i -ou n -bou quet -herit age -zi on -sur render -shen an -in ks -kar l -gh ty -pol icing -exam ination -ce y -per su -measure ment -hydro gen -lu han -âłĢâłĢ âłĢâłĢ -war i -о Ð -j y -fow ler -mis h -al fre -âĺ ij -bb naija -cat alogue -recogn ised -sa ver -hu skies -col in -mun do -si va -p ng -discoun ted -man utd -fre sno -de vin -prelimin ary -tro phies -pla stics -du g -pro cu -indi go -g ard -dy lan -pit ches -ground breaking -in son -bl ac -an thology -f h -expl ic -r ard -admi ral -so chi -la shes -splen did -en vy -ad v -sex y -festiv ities -stic king -bi b -thr ill -op p -ari el -botan ical -endur ance -fe males -br icks -vat ican -black pool -ber mu -br ough -roll er -bi d -sue de -sloven ia -mm ing -ml b -med alist -di ans -rehabil itation -ne on -s go -li thu -ram os -z ed -pi anist -inten sive -broad band -stu dy -peter sburg -lu ca -ah hhh -phys ician -dill on -tele com -gri ef -mu n -ac ro -si ded -s ly -blo ws -classic cars -tri um -ar gy -? : -h ri -marsh mal -âĢ ĵ -to pping -war saw -tran sc -preserv ation -b av -re friger -experim ents -ä º -gl it -sli ga -g age -fac tor -flav ours -br ony -sp o -cook book -carri age -aw ay -ny fw -on ian -w g -simp sons -ro lex -ðŁı ¿ -cro sby -ãħ ¤ -cre di -syn dic -pu bs -ali fe -poor ly -mac ed -ðŁĺ ŀ -behin dthe -w enger -n ats -ðŁİ Ł -rubb ish -procedu res -typho on -opho bia -er do -fu el -vi era -bu mps -millenni um -new zealand -lec tures -it on -mil ky -respon ded -ê ° -landsc ape -.. @ -bo ther -âĸ ¶ -z hang -huawe i -tu ition -s worn -in u -y or -pa olo -au ditions -ab il -malay sian -ho ps -fe athers -mp le -au ts -ã o -boun ty -ic he -ì ĺ -sh q -pin ot -ge ars -disapp ear -video games -t na -alzheim er -ðŁĮ ŀ -a ji -under wear -swit ching -sign age -o scar -ec on -dro w -cl int -pl ated -gun dy -emb lem -ho es -ici st -nel ly -juni or -road show -miner als -at le -alexand ria -ac claimed -v ell -shi va -ad he -en ne -amne sty -h ounds -councill or -ðŁĴ ¦ -aes the -part nering -influ enced -mag no -fl are -extin ction -civil ian -maje sty -va il -law makers -rac ks -mc c -ori an -sp ices -er rors -may er -co ca -pa i -s ooooo -reti ring -ba thro -ðŁĻĮ ðŁĻĮ -âĸ ª -su f -endor sement -buil ding -broo ch -pal la -arvin d -ag ent -kar ate -r hi -c tv -ta ine -um m -ba x -reig ns -uni of -enterpri ses -adel e -fla ke -at tire -bru ce -ba hamas -gra vy -sa in -che ek -tri vi -lo v -e en -bb lo -lady gaga -itt a -. "- -du stin -observ atory -eigh th -bloom berg -kh s -f cc -gi st -commemor ate -ve er -sexu ality -ed c -nic ole -vac ancy -u ser -son a -:' ( -dipl oma -t end -up grades -Å Ł -jura ssic -cardi ac -dr s -widesp read -à ł -dail ies -vend or -sim plicity -wi der -len ses -supp lements -de pos -ob served -vin es -parti ally -renew al -collabor ate -ali g -fin ity -ph u -zz y -pe tit -ðŁĵ ħ -z in -i gu -sm ack -fall on -ðŁĵ £ -back wards -comp onent -o so -compati ble -bin ding -zur ich -thom e -w ounds -ly ric -fresh men -sne aky -fi bro -di et -emplo yer -in sect -h ated -sch er -raz or -n sw -boo ker -califor ni -av fc - ° -preten ding -pep si -al is -un titled -k art -grand parents -e the -o ck -lux emb -visu als -small business -abdul lah -min ho -su baru -h ra -reve aling -heart breaking -clar ity -am g -sl r -** ** -âŀ ĸ -recor d -ici ary -min ded -ye h -exce ssive -knu ck -icec ream -tru th -ev ic -ta stic -ant arc -ren dering -, , -mit t -loren zo -st patrick -bound ary -zi g -vo cab -osa ka -fur n -tu n -gu l -s ounding -blo gger -utter ly -g af -adv ancing -l cd -mar gin -lifel ong -solst ice -sh ra -wa its -ple ar -bre ach -en ligh -ad er -itt le -c ation -ho on -stu died -?? ??? -k ash -ev angeli -ps l -wei ghts -met als -ty res -tur no -wi e -car b -g ale -se al -sun ite -am ic -patter son -á n -eu ph -up stairs -quali fiers -khali fa -apple music -ìĨĮë ħ -vau ghan -al ter -cru iser -mu a -t ana -kat rina -id ols -spo iled -secre tly -fi bre -part nered -um es -gi ov -com et -screenshot saturday -k eller -fil tr -fe t -con way -pe u -bad minton -gi d -m ound -don key -bu ff -lea ther -lar gely -bro ch -int ments -am use -r k -sto ve -impac ted -con t -cr acks -prison er -bar i -contrac tor -ori oles -domin ate -pol ar -am elia -dr c -ðŁijĮ ðŁijĮ -vi st -su arez -injec tion -blo oms -ðŁļ¨ ðŁļ¨ -sti ff -pay pal -sno wing -thur sdays -goo se -we dge -educ ated -weak ness -de cker -abud ha -bree zy -Û Į -hope ful -o bi -rai der -gh am -de u -se ve -par tly -fu t -infu sed -mer ri -than e -some time -hu e -me in -cre dit -sli ding -ran de -cher ry -dead pool -sh ol -ar am -under wood -sky e -distur bing -m nt -poli shed -guardi ans -ha dn -pic asso -ari us -ak shay -ir ri -j h -happ en -la kh -dal ton -at the -s well -mar sha -re h -cour s -j kt -top us -serv ice -r ink -hack ers -dono van -hor o -tc m -may hem -cha se -dev ops -ken sing -sc up -sh ere -quali fication -c live -ton g -n ancy -mar is -der dale -ber man -cinde rella -jol ly -ci c -loo t -collecti bles -hom icide -g ge -epide mic -su ites -mu ddy -gi mme -e rec -- * -tal la -lis le -embro ide -ðŁĩ© ðŁĩª -veriz on -ve ctor -be anie -arti san -ga in -flo res -vi gil -u so -ðŁĻı ðŁı½ -grin ding -gh er -air ports -respon sive -shaf t -can cel -ceremon ies -e me -at ari -bru shes -eag er -bo hemi -children s -yan kee -ma a -suspen se -mor an -mac ar -sun flower -cre w -vo id -ke ar -fashi oned -jen nings -sunday funday -sub missions -me ad -her man -wa i -crit ically -le um -baek hyun -for cing -co bra -ãģ ® -acqu ire -al k -ge ology -pri mar -import antly -ire z -bunde sliga -curi osity -sen a -stric t -con soli -win ters -ven om -chelten ham -ðŁį º -cen a -t at -ba in -glo ver -under cover -as ses -car n -memorial day -am eli -i rene -ch on -syn thesis -spe edy -mitsu bi -sla yer -compos ite -under stands -pe w -inter rup -hen ri -mor row -an om -thof july -g lee -thre e -ðŁĺ ® -and hi -ch att -renew ables -ye s -trans fers -!!!! !!!! -bab u -du ter -lo ops -pe ers -o ilers -pau lo -ic ation -h mu -war a -mer cer -hom eland -fu ji -ale y -year book -re m -re en -ab sur -bo is -] : -caes ar -shot gun -kur dish -o ren -ra e -anci es -ty pic -f h -def ault -re plic -lu k -trans actions -r ys -infan try -ðŁį ¾ -cho w -chick ens -ba gh -wy att -ay e -gg i -bre ws -ed itions -mi ra -commen cement -pre su -peris cope -ic hi -guatem ala -zam bia -pain ts -wit ches -wan i -un dere -cro y -vo ws -us mc -hear ted -theat res -shu ffle -le vel -mul tic -squee ze -fer n -app et -post al -mal t -on board -ld nt -co o -s sc -k ac -ðŁĺ ĩ -sc rap -mar cos -deal ers -ann u -mill er -co ve -ul ary -vladi mir -be ef -th ur -pick led -se same -bengal uru -mo tt -kathle en -hi st -no tor -dr ank -du chess -snow fall -e ff -tin y -j n -sy our -speci alists -scot us -bay lor -eve rest -mali bu -pre m -harm ful -l ali -b ates -g ye -differen ti -and ra -geome try -el over -black out -== == -ko ta -inter act -asi an -la yo -samu rai -fi del -exhau sted -gla di -pd t -spher ic -anti qu -guit ar -stu ri -ho pper -ang le -f ills -sla p -mi th -rod ney -ong i -in som -pre venting -cassi dy -ap ho -ore gon -lo in -ham mond -contribu ting -f n -gar ri -ori on -comp elling -escap ing -aim ing -plu mb -bi stro -be asts -concer ning -bo e -do pp -shop local -stumb led -âĤ ¹ -naz is -âĢįâĻĤ ï¸ı -gest ure -war ts -us open -hi ggins -char li -hang s -bom bers -° : -fe eds -c ch -st il -nic ola -ðŁĵ º -clam ation -tro pic -af ro -ou k -expen ses -der rick -al ine -fa w -reg ard -im er -sat in -thi um -ry der -pear l -te ss -mm mmm -sen ses -ðŁĩ ¹ -positi ve -exhau st -occu r -nor ris -lil ly -is les -direc ting -yo fficial -count less -sam ar -on stage -flo ck -mir rors -arch er -mo i -k d -vi v -in os -si kh -le i -sen sory -br its -kno x -chest nut -op y -coli seum -z af -di vin -adap ter -:) )) -tem ple -ku n -hel mets -t df -gu ide -m old -o ids -lu ther -he is -monaster y -sp ree -k lu -brit ney -jagu ars -gre ats -c cc -ky rie -machin ery -cric ket -re ro -ab o -aspir ing -semi finals -ale ss -sig natures -var d -me th -her bal -hol den -king dom -ap or -reg gie -ore o -palestin ians -em mys -sec tional -ro i -ney mar -qu el -cu ll -l ka -haz el -estim ate -ul ties -go w -be a -purch ases -bel ts -protec ts -m é -gue ssing -bb o -clau dia -fr acking -jon ny -el k -cel tic -al mighty -ra je -courty ard -ig i -can es -ðŁĴª ðŁı» -bank rup -le thal -âľĮ ï¸ı -graphic design -vad er -penc ils -rough ly -dan te -m fg -const ell -cam el -j b -bloss oms -en to -balo chistan -cine mato -ill ard -jer sey -con sent -dent ed -con templ -sch er -hol i -lou gh -st our -a yo -begin ners -cur b -v hs -a jax -du ff -av eng -dom est -commit ting -ai red -cha p -hedge hog -disappo inting -freel ance -in land -char ms -ðŁĺį âĿ¤ï¸ı -ai sh -m x -buck le -ti dal -per mit -bo ating -ra cha -kend rick -b ello -b hi -ple a -estim ates -l b -apo logies -jay a -bb l -ast oni -inter state -main taining -el bow -mu p -ep it -ðŁĺ ¡ -viol ations -def end -be h -sl c -am ir -pur i -ti um -fi fa -blur ry -scri m -ðŁĻı ðŁı¾ -ma ple -rel atives -âĺ Ŀ -cho c -con nor -⾨ ⾨ -whi sp -list ings -ma ze -than king -ri dd -grass roots -shi fting -desper ately -gor illa -den i -ju les -stra th -g ley -ja in -bu ick -t anner -ðŁĴ Ŀ -ga e -pri m -it ors -n ano -separ ation -armen ia -bor deaux -ðŁ ħ -pj net -bu rial -e bon -glo ss -re new -gri er -spe eds -comic books -sym boli -pur poses -ãħł ãħł -spati al -no table -ci on -n ps -ho ffman -nor man -rt g -du sty -situ ated -tr an -k fc -em en -nic kel -hast ings -sett ling -gr it -l ena -w aw -art s -gu m -ca regi -le wis -sapp hire -rememb er -embed ded -t lc -bl at -serge ant -el sa -boot camp -bow man -photo graphic -pill ars -direction ers -classi fied -no is -ve er -barre ls -wh oop -ðŁĺ± ðŁĺ± -fe male -petro leum -medi a -e fc -poké mon -ठķ -enthusi astic -var un -pro files -pedi atric -acci dents -con rad -jan g -jo jo -ac or -ob server -l f -live stock -for gi -fo s -el m -an and -go e -c ere -avoi ding -gri t -om an -thank fully -scat tered -nick y -cylin der -chees y -di ver -mahe sh -cav es -ear liest -qu inte -subjec ts -b end -gul f -vocali st -glu e -pat ches -un stopp -sny der -demonstr ating -pi o -hor ns -wic kets -and the -r ama -yo on -stra ight -bed time -or ang -bul lets -sa urus -min ers -inci dents -! ... -ðŁİ ¸ -ag ers -hand les -stat es -in ity -d ons -incredi ble -emin em -avi v -ru dy -moz art -folk lore -appli ances -mt l -fre y -di as -hu a -page ant -stri ve -im prison -bul lish -r ana -al erts -bb mas -hy per -derby shire -re cre -re dd -debor ah -cosmo s -law son -mel anie -psy cho -ho or -doo dles -sni per -shad y -man tle -canadi an -new year -inter actions -separ ated -cor ds -spiritu ality -ap u -it o -p ct -pel osi -rebel lion -se iz -wor cester -sec tors -ul i -san ta -Ð µ -ðŁĩªðŁĩ ¸ -bi ased -class ical -gam ma -dee plear -emer ge -back er -sur ance -hand crafted -ðŁİ ¥ -franc is -mill an -ic i -cro wn -wo w -stri ped -un fair -relax ation -³ ï¸ı -embrac ing -she alth -pale o -martin i -dist illery -wr ink -or k -na th -hay ley -cour thouse -si ber -sa di -quiet ly -mel t -m sm -me h -smart phones -rel ent -pp ing -war wick -co logne -gli a -cot ton -pro g -lon e -ip sw -star ters -expan ds -u mp -su ed -ski pper -infe ctions -ing le -à ¡ -cler k -demonstr ate -ac ar -ðŁĺĤðŁĺĤ ðŁĺĤ -ti bet -bun s -alo m -demol ition -ssi a -g st -[ ] -so ar -âĺ Ģ -ðŁĺ ª -ðŁĵ Ĭ -dee pest -beyon d -are t -att ends -activ ated -di mit -âļª ï¸ı -high lighted -magaz ines -rum or -az za -steph ens -dol ph -sho ckey -mat s -we av -mel an -serv ers -tra um -ku sh -æ Ĺ -bab ys -pa z -a al -la use -break ers -canter bury -ul ture -mi ri -euro s -tane ous -impre ssions -du tch -il d -gh i -pur due -adequ ate -l p -sy ner -ang ler -du rable -gal ore -ro wn -mg mt -ðŁĵ Į -lu cia -âĺij ï¸ı -zay n -bor row -. ( -north umber -cru sh -eng a -su sh -extra vag -t out -ma hal -ali stic -ther mo -gall eries -es se -chi bi -attrac tions -lex ington -legislat ure -docu mented -resi den -brow nies -w f -st ool -plan ets -sho ppers -conduc tor -ms p -tr icky -fru ity -end ra -feel the -whi pped -hair style -re fer -oo k -oc topus -audi ences -ku mar -after no -op tim -c fl -ni p -gen i -alpha bet -ann ab -lam in -accep ts -l ng -ðŁĺ « -t ine -ac om -cheer leaders -t k -gr on -v g -k ung -ja x -dha bi -r ss -mack enzie -beir ut -clean up -gy psy -st ell -bur ger -hurric anes -educ ation -st ina -âĻ¡ âĻ¡ -unfortun ate -jere mi -bad ger -at ers -: â̦ -ter ra -subli me -stu d -y mca -mr u -duter te -bren nan -bul b -mel o -yl on -hack er -c red -gu d -as an -pad illa -embroide red -vietnam ese -pione ers -projec tion -re boot -id c -an ey -pri mer -suff ers -win ding -p on -sto day -mor n -u ch -all in -adid as -eliza beth -tu ck -o graphy -ðŁļ Ģ -be g -os borne -ghet to -r h -cn n -ir ma -ma kin -cab les -mur ders -oc ks -inst a -al as -si k -cu ff -la re -foo dies -o vic -at om -geome tric -em pathy -ภµ -cent enary -newsp apers -administr ative -ðŁİ Ĭ -sti ve -contrac tors -le tt -tas mania -awesom eness -den sity -ve en -prince ton -frequ ently -re ject -gh i -modu lar -ceram ics -sh ag -ki wi -can vas -sweat shirt -an j -ti mm -napol i -il er -appe als -hamil ton -ma yo -we ave -arrang ed -whar f -occu py -b vb -as aki -ot ter -nor m -vi es -de tox -tion al -dere k -id ad -ad missions -constitu ency -u pper -woo t -allo y -se ve -lu b -un comfortable -ed win -ab re -d wight -ar che -virtu ally -sp ol -pri e -ai i -er r -swit ch -bar ack -se ok -cou l -wn t -pou l -o live -caffe ine -cardi ff -notor ious -de mp -ex cess -bar r -t ford -a jay -bump ed -my thology -shel ley -fal con -shakespe are -must angs -no ted -bon e -civil ization -sy d -par sons -un official -hy ped -sp ends -oppo sed -v ings -space x -noti fication -deci ding -bio tech -out si -sal ah -! . -fe d -ss y -c ms -bad gers -cr o -ela ine -n ba -dy our -n ant -honey moon -climb ed -conom y -ath a -m ell -ne bula -nature photography -juli e -bm x -inve sted -mon o -lieu tenant -wat kins -techn ician -o se -ka e -ì Ľ -mc queen -pre ach -trav eller -flexi bility -ze bra -reta iler -p ant -ben der -brand t -squ id -war rant -veri fied -cas s -pier cing -hon ours -t ying -mor ris -kis sed -op rah -panor amic -me i -splat oon -wich ita -ari as -gal li -indy ref -good times -athe ist -confe ssion -ow ski -re pping -ad ditions -mechan ism -z im -j ans -su f -cho pped -beg innings -vitam ins -ãħ¤ ãħ¤ -or th -po les -ru b -antarc tica -indie film -web cam -ket ch -bre tt -cle ment -her on -defe ating -hydr o -buc ket -wand ering -sid ney -future of -b inge -on ies -knock out -administr ator -syn the -l ent -jan i -bar ley -premier league -ner ds -cr m -bra s -bot any -evol ved -rot ter -ro wed -tum or -weal thy -Â Ń -mon arch -li shed -da hl -ðŁİ ĥ -bu ch -ken yan -Ø § -red ness -assemb led -se mit -hud der -shro p -ran i -lear ning -mor y -iti a -geo graphic -worl dof -f b -pho sp -boo gie -am ped -? ... -che w -dwar f -ar us -s sen -ru sty -recru its -h k -gar de -app lause -vol umes -invol ves -ta c -hand bag -trans late -ffe l -se ym -aqu atic -trans fer -zo di -and r -acade mia -cr ater -te z -ar se -adap t -col oni -snow man -mal i -hang in -di schar -oy sters -pho e -colon el -w ba -hispan ic -thri ving -sh y -ag les -sales force -cre me -so les -la fayette -â ī -ter ia -ach a -sp erson -go go -car ly -the ore -am ore -vo x -af t -ãĤ ¹ -stap le -mu ffin -di agram -ino x -su stained -av ent -me ta -arbit r -dec ay -ado le -Ð ½ -ec ol -ph o -n k -o cu -gr anny -ç a -luxemb our -stad t -alber to -le vit -am as -d x -or phan -co bb -as c -lo gy -immen se -chan ts -off line -p ent -bre x -w inger -plan e -i el -nichol s -ca thy -nar uto -low ed -/ // -ignor ance -cat astro -you ts -sch en -buil d -haz i -s ine -critical role -du g -dete ct -lo gs -en amel -stpatrick sday -ed die -co pa -cigare ttes -ho ff -kay a -la goon -ra pha -air borne -choo se -puer tor -ke v -gui ding -fro sty -bor ough -mir a -ðŁİ Ĭ -cade t -anu sh -yo gi -e ger -fl ing -slo pe -nin th -we ston -foot wear -f n -may weather -a am -pla in -stair case -witne sses -work outs -ro bust -dex ter -co hort -ðŁļ Ĺ -sp ell -ha ze -o om -organ ising -wild fire -cont acts -av on -min o -upd ating -ðŁį » -li thium -ing ual -k is -au ga -lo com -de duc -u da -th ak -boy le -mp er -hot tie -eri k -re vised -is la -travel photography -oo za -en qui -confe rences -clo ver -g room -cur ves -live on -per f -displac ed -bo log -xx xx -ðŁĺ© ðŁĺ© -te al -ve ssels -rain forest -cal ci -pan ther -gira ffe -ta sted -imag ery -pad res -day time -bas s -ri pe -opio id -nu e -vin yl -invent or -sen s -process or -mu t -gad gets -bibl ical -shann on -jacqu eline -car y -the resistance -ali en -n vi -co sy -bi har -fo ley -ren d -mu gs -fa ken -cl one -ni allo -gra bbed -chi hu -power house -n tt -chero kee -spon ge -imple menting -rh ine -le one -ðŁį Ģ -pret tiest -infra red -impro v -swit ched -tu bes -con tr -bl k -projec ted -be aver -yo t -bbcra dio -thi gh -per secu -apologi ze -w ack -po ster -oli ver -az a -lou d -( ?) -f the -women shi -spar row -blu sh -us able -sc ales -it ative -peu ge -ne eding -legg ings -glam orous -mat ur -c z -wat t -da b -tam ar -et sym -bau er -heart felt -h n -else where -bir ch -alu mini -hu ck -e me -j l -traf ford -d z -por tions -ana sta -arthr itis -esp n -ber gen -viol ation -yo shi -c z -northumber land -clo sures -ðŁĩ¯ ðŁĩ -smi ley -r w -tel ugu -inten si -gre gg -ve ga -dun geon -south bound -ba il -domin ican -semi final -chap ters -h itch -van ity -trans iti -recomm ends -sati sf -bar ca -queen s -( ( -de struc -stra it -ra vi -dess erts -in tru -har am -k os -fo e -fat ty -pais ley -magn itude -dri dge -com ey -schem es -vision ary -our t -down loaded -ðŁĻĮ ðŁı½ -gd pr -lan i -p wc -gu ad -nic est -stake holders -re ferred -george town -arvind kejriwal -schnei der -in doors -all star -strand ed -gen der -ze pp -ma sses -ðŁIJ ± -pati ently -bl dg -z ab -we arab -vi vid -he ck -d ella -sy mb -je opar -la ger -à ª -comb ines -ne c -br ay -flo p -tx wx -jo ys -pon t -pro found -sur round -mad hu -ma ble -ay r -te as -n sa -open ly -er nest -ãĥ © -to po -g na -anti oxid -ti an -e tr -c ello -ma thi -gener osity -b iting -man ic -kel sey -chee ks -ten der -w th -pron oun -ultimat ely -gu sta -ari anag -ger ry -ble ed -red dy -mic h -mitsubi shi -oper ated -sex ually -ma u -cl lr -vi ds -co c -mel ted -ðŁĮ Ī -q ld -ite ch -instru mental -end game -ðŁĵ ĸ -ener gi -brow nie -tam il -at in -domin ated -pra ises -fire place -sens ational -men a -k arti -un prece -ru pt -ori ental -mc cor -tour naments -scen ter -re eves -prescri ption -sam e -fra u -tru ffle -em bo -roman s -bla sts -techno logical -pr at -b sb -y ar -tren dy -ac l -al ad -ðŁį ģ -o hh -bankrup t -tho ven -regar ds -is er -war wick -vine yards -real m -niallo fficial -do ta -ge mini -to do -v able -¨ ¨ -la u -wre ath -ju ve -nat asha -le ver -lor i -hor ser -cc tv -air bnb -es anders -sin clair -ema biggest -high school -con test -optimi stic -t te -ðŁĴķ ðŁĴķ -ss d -ye e -hel ena -con sen -ric ks -jes se -an ic -ðŁİ ¯ -re acts -ro be -independ ence -vol tage -m ington -s ant -à¸Ļ ภ--------- -------- -sentin el -ke tt -rehear sing -aaaa aaaa -sof the -stir ling -sear ch -wi gan -stand out -sna il -pent agon -Ä ģ -ch lor -cru st -net any -chemi st -disapp eared -ric ardo -sp iders -bo se -war ren -me ssing -bann ers -gu el -par ach -ma id -coun ted -epi le -bon fire -speech less -se tter -meas ured -rejec ts -nik ki -le ster -foren sic -fab rics -alo ha -pre served -wat ford -deta iling -dar th -bo u -car ly -... ' -tail gate -noti fications -å ¤ -pas sive -trous ers -balo ch -ro ther -typic ally -à ¥ -sp it -wi z -sic ily -technic ally -ex pose -st age -hu bb -cre am -cap s -po ke -sle ek -ju ne -tempor arily -de z -awak ens -l ame -_ - -ji ha -tues days -advis ed -advis ors -exi sted -dis agree -news room -lo sers -world tour -dr ying -al di -har ness -foot print -hobb it -p mln -i ro -que red -asse ss -gaz e -sa b -th ian -í Ĭ -ti f -ob serve -ev il -dra wer -swee p -cor y -co dy -kyo to -cal lum -n inj -lau rent -be i -sket ching -custom ized -du r -regre ts -knox ville -ìķ Ħ -mess aging -grac ie -abun dance -bi dding -bre wed -fl ouri -therapeu tic -alt itude -ho gs -bur ner -elec tro -wonder fully -he ater -post pon -li very -r all -ad as -a ac -sau l -brook lyn -play house -âĻ¥âĻ¥ âĻ¥ -char itable -in y -z ah -compet itions -be av -plu gged -o is -do om -astron om -speci alized -max i -ta ps -cellu lar -depre ssed -folklore thursday -cri b -e mul -ë° © -fi gh -ru z -car lisle -spe ar -side walk -de i -depend ent -lac es -nh s -ðŁĮ Ļ -reali zing -net work -ric he -re gin -re fresh -st ral -pa thology -pla id -psyched elic -hin d -u ka -algori thm -lin king -progre ssi -fe y -d ade -hydr ated -b ant -fam ed -cot sw -bo ise -as c -rac ing -ja vier -ww en -mar lins -poo p -swe pt -toni ghts -we f -ani me -slo vak -âŀĸ âŀĸ -cla us -lem me -cli ppers -re ls -arianag rande -r te -ko t -thal apathy -hungar ian -zu ma -y von -is u -jour neys -clin ics -be be -ww f -n ws -super heroes -er it -sle ague -identi fication -mo tto -ba i -sour ced -ill er -ap i -pri se -unprece dented -dam as -tuni sia -dra in -undere stim -e ther -quarter ly -rewar ding -al ham -wolver ine -cab ine -hyp no -nad ine -hav ana -da e -ðŁĵ Ī -dr on -read ings -b ati -pic o -mer ci -iti an -wal kers -el ope -mi key -god zilla -bur lington -abu ja -social ism -at ility -sh ell -harry potter -g no -ab ur -re leg -fel ici -ro gen -neuro science -inst in -ath am -vou chers -j arre -fu se -def ici -monte rey -de port -mid day -pp ard -fre ed -ame ter -wil t -n ingham -pr att -liber ty -slo gan -o to -pr i -co ated -c pd -ne tt -il las -mal awi -evol ve -accessi bility -ðŁĶ¥ðŁĶ¥ ðŁĶ¥ðŁĶ¥ -or nament -b p -el is -son line -chi ro -fl ick -ib m -ar ak -en ables -gar land -san e -cu ties -tri p -rotter dam -n ys -lam ps -lu cas -bo g -ra ils -travel led -hic ks -en u -sab ha -scru b -hi er -hart ford -fo o -fer nandez -tre vor -mat tress -appo intments -ale j -fe i -o logist -saf ar -oc ta -sr c -sha un -ambi ent -dri c -bi ker -she e -must ache -h ta -bo one -her ty -car dio -bra kes -rec ital -consi sts -overwhel med -cau l -robb ins -im it -al th -ur l -bi bli -on ne -black livesmatter -diffic ulties -tel ang -tall er -ðŁĵ Ĩ -deb ating -bur rito -mo vember -strength ening -bo e -te stam -mirac les -base ball -re nee -ðŁijī ðŁı» -al fa -âĺ ĺ -unstopp able -ec s -g mo -giftide as -path way -fen cing -ðŁİ ¤ -b ham -ra s -sk o -d led -thel ast -magn um -bin ary -wil de -wil der -wh ati -barbe cue -h ism -can oe -kur di -eli ve -advant ages -mad ame -bi er -mis sing -enter tain -air force -y ama -c is -hash tags -j is -ve il -dream y -ten se -may ward -ch ateau -hunt ington -âļ ĵ -v all -up on -bl ouse -dun es -ðŁĺ ´ -fert ility -m ole -curren cies -st u -ber lin -toa sted -div as -wal t -lar k -por a -hit ter -um er -chil led -bal ancing -fa is -y in -or tiz -east enders -h ate -ur al -ap ril -tim el -à ± -per o -sto cked -respec ts -th t -best friends -giving tuesday -be ad -inv ent -im i -nap les -comb ining -tok ens -thir st -ma sc -par rot -sp u -dent on -* -* -t res -subur ban -wid th -si ve -con tender -siri us -lo k -troop ers -outra ge -tur bo -frag ile -me ssed -do h -disc ord -netany ahu -re sign -forgi veness -mo han -mun ch -cam ou -identi fying -enab ling -hot ter -thorn ton -jai pur -ar ya -ðŁı» âĢįâĻĢï¸ı -mu staf -maj ors -o ke -du ffy -roh ing -til t -ðŁĩ®ðŁĩ ³ -rock star -she ep -hend rix -ra v -in vention -do u -lagun a -gru mpy -sw is -im pe -) ' -you ths -bun ker -st ache -oppo se -indi es -acceler ate -ml p -ed en -w ann -k ail -akshay kumar -su pt -pol ym -midd leton -extra ordin -wil son -australi an -alumini um -way ne -alum nus -mat ics -gri m -er nie -opp a -competit ors -rand all -h ence -decla res -pre aching -sha he -can e -sustain able -stap les -le dge -ad ena -doctor al -bur gundy -decor ate -ren dered -ri sen -pr ank -di or -bee thoven -flo or -ac com -to t -ho dg -touri sm -say in -objec tive -mar kers -premi ership -en abled -camou fla -gi ant -Ñ ģ -smo key -ric ket -pan g -de pending -s ation -evol ving -inter cep -cen sus -tof the -re en -mendo za -trum pet -marke ters -an it -ðŁĻ Ĭ -north western -v la -foto gra -blackand white -che wan -wi g -tro om -ginger bread -k n -ro mero -n fc -or chi -fun ko -sour ce -f s -ra ped -o st -tar ot -ann ually -ðŁĺ ¬ -r ill -del av -.. !! -se s -can n -medic are -ph el -ape x -guardi an -rema ined -r pm -a ñ -story month -instag ood -neighb our -p ing -sem ite -my stic -as cot -mat er -hand ful -dang ers -ti d -ana heim -opol y -sh allow -nami bia -tor ia -procu rement -big bang -announ cements -prosecu tor -beng als -sal le -en roll -ga stro -sugge stion -ba k -ha ul -budd hism -berni esanders -flu te -fati gue -cyn thia -cho i -ir win -gu a -str ous -h p -ba p -satisf ying -play a -ðŁİ ¼ -inst ap -al ice -t p -irri gation -ðŁĩ¬ðŁĩ § -in tric -clu es -ple x -sa x -he pat -dump ed -signific ance -by u -medic ation -pro v -tough est -corn ish -âŀ ľ -kel ley -u v -si zz -si bling -me st -di stor -diplom atic -aun tie -b hat -son ic -bren da -pump kins -ro ch -black burn -ur ged -shi a -arrange ments -floo d -sa unders -lec turer -nou ri -popul ations -diplom acy -consist ently -ðŁ¤ Ļ -t mund -cauli flower -l ily -vocab ulary -vari eties -coo ker -up town -qu ent -mo sa -re inde -velo city -spru ce -social medi -i ber -volun tary -proce ssed -bal tic -y ang -leban ese -d p -dol ly -arrange ment -y uri -cran berry -kal yan -elev ation -cli ff -pu shes -ìĬ ¤ -sil ic -co wx -eter nity -sla ves -vine gar -glou cester -con tained -breaking news -aga inst -renov ated -norm andy -hero in -ys m -mo ds -gre ek -un di -tren ch -v h -encoura ges -head ache -gr ange -: ' -ever green -Ù Ĭ -reck on -ab used -th ru -cho ice -ti dy -col der -scho ice -ha in -bru m -li ars -bre it -yor ker -sh ack -he idi -micha els -sco pic -fasci st -play ful -ca c -yas ss -sh ad -.. ? -qu en -ram irez -clif ton -pr s -best fan -âģ ł -gener ating -head set -disappo intment -abstr act -bo iled -paren thood -azerbai jan -exhib iting -bom bay -oli vier -ko so -un lea -mat ernity -iz er -si ves -r hu -col l -saskat chewan -fre akin -de k -na g -stab ili -ðŁį ķ -organi zer -bo sses -ar u -u va -at able -ta un -after wards -fert ili -ver ge -az i -mor ph -๠ģภ-jer k -cosme tic -ko w -stru st -ap ache -post cards -for mul -ì ĭ -spin al -jack pot -elec tri -Ã Ń -lo y -gra der -diab lo -ar di -he sit -f w -arch ery -pa sh -the ories -repe al -re live -per cy -âĺ Ĩ -im in -syn chron -sham poo -coup ons -o to -la i -thou ght -luxembour g -mo v -ðŁĺ ¥ -ge mma -se ated -m ga -strat ford -un certainty -shi fts -est o -fo ol -fire arms -cor rie -ki ki -appa rent -p ills -olym pia -fi d -elev ated -de cks -ignor ing -av alan -ro v -whist le -p tsd -milit ants -robo tic -pac ers -quil t -bankrupt cy -lic h -per cussion -celebr ity -al s -( ; -su t -pokemon go -h g -off s -gibr altar -scre ams -billi e -gen ome -mar in -be ams -arch bishop -em in -bedro oms -g ated -ol ly -warran ty -at own -cudd les -gun na -k ic -vi ve -cy mru -nar row -pro b -le o -refe rences -manufac tured -cho pper -brun swick -sem is -don ia -r ye -man o -hur ting -? # -hol li -investig ations -c els -ðŁĵ ŀ -le ster -temp les -sto rey -mc mahon -toi lets -wo of -ï¸ İ -le verage -at om -night mares -victor ious -haun ting -custom er -ag i -yo ongi -mon ty -ver onica -w ur -inti mid -blan kets -volu tion -j m -âĺ İ -am on -jud ith -ðŁĺİ ðŁĺİ -distr acted -dri p -hurric ane -and es -revel ation -tro op -ab leg -col lin -tibet an -wor rying -inter nationally -eat er -camero on -brad or -y uk -ðŁĴĹ ðŁĴĹ -tra k -slo pes -ci er -ne a -ol er -ta ka -albi on -volcan ic -am n -a fi -ob stac -face time -ger ing -n pr -metall ica -organ ic -ðŁĴ ¡ -ki dd -d ances -pemb ro -wash er -m its -om er -emo tionally -tan go -ip o -do cks -scan ning -spec s -tho m -the ology -emer gen -om i -g pa -selec tions -un necessary -ima ge -ter s -induc ed -gi gan -rent als -supp lied -m fa -shan kar -lat er -pa jam -cla ve -Ù ģ -ma hin -carl son -avi an -ano va -kati e -aj ith -design ated -chocol ates -investig ators -gla zed -prin cess -er ry -ra gn -ou rable -hr u -sun dance -peuge ot -steam punk -gh lin -gre ase -hi res -z ap -per ce -j ill -tom e -he hehe -joy ful -mae stro -ni shed -gene alo -v ich -p its -fox es -good man -emer son -lo bes -con verse -o ats -thom son -ra him -mal ware -ah i -man kind -re sin -im g -sw ood -kin der -sc roll -ar a -sak ura -ro bbed -xi on -ny a -c ism -ce dar -be in -mour ning -tor to -heath row -done gal -bar b -hydr ation -k or -elim ination -su pdates -hill s -appe ti -star red -ko m -gw en -dd d -cra y -sc anner -personal ised -seren ity -re design -meta ph -box ed -judg ment -no se -ë ¹ -er ad -ac ne -supp liers -ener getic -v om -as ap -ðŁĶ ¸ -ir vine -hat ch -la ss -ad ren -waff les -accur ately -ici o -itt le -se un -occup y -web cam -thene w -ent es -ga i -j w -accoun table -vis or -ir rit -licen sing -hudder sfield -gen ie -ðŁİ ¾ -atmo spheric -ten sions -spart an -clif ford -ol an -north bound -ame en -cen sor -u el -ster y -$ $ -far rell -hy ster -cl t -se dan -rep lied -descri bing -micro wave -sla b -pro sp -assi sting -ru bio -e than -hh hhh -gu ay -z man -ra ise -roll ing -o e -n ile -ambro se -scar borough -hero ic -coo ks -mor t -chop ra -ðŁĮ · -to b -shav ing -stac ey -dor m -motor sports -wi ki -fol ds -sp iced -stress ful -liter al -fu dge -pe ggy -wa ite -tre sses -se sh -pr ic -ðŁİ ħ -fri ght -r va -mumb ai -po m -tt v -cel lar -tom e -andro id -dor is -tsun ami -tin der -o ec -m wc -dor tmund -no thin -l iti -so u -believe in -at u -kno cks -mag ni -ss sss -ro hit -ine ws -ang i -m andy -ke ttle -intermedi ate -av ant -cur l -endor sed -ori o -ur t -consider ation -wi res -shel ters -b ino -vik ram -imple mented -ly dia -bu k -paro dy -c news -under graduate -canu cks -sam i -polit ically -ro tten -gh z -tex tiles -over load -moder ni -recre ational -fli r -bat on -typo graphy -ov ation -intrigu ing -pilgri mage -al ge -ad ays -tcm party -sp elled -cur ls -boo ze -ste m -ann es -ir ls -spon ge -sho pper -sig nation -bra ss -mi stress -le ah -beg inner -lau derdale -augu st -pre school -ta ping -tai pei -execu tives -b d -rhe tor -esc or -immun o -deeplear ning -stat ues -it us -manu script -ly ric -cor vette -mol ly -la ge -de p -cn bc -le st -je ssi -fi fe -griff ith -oppo sing -ran g -dr ills -respec tful -p ity -d ell -har ding -play boy -blo ke -shut out -k ili -o sp -se attle -bc poli -mis es -journ als -team ing -es ther -fre ddy -Ķ ï¸ı -metr ics -no tre -gar ry -for ty -navi gate -perio ds -bened ic -j id -da w -ance stors -restor ing -con g -aller gy -tit anium -c ence -lean ing -ab bas -v ast -uc f -roof ing -e man -seve rely -vo gue -ve au -in bound -d z -tane ously -stret ching -man chester -dr yer -dav is -kan th -the game -it ted -re tain -el les -conge stion -frat ernity -ol lie -lo ki -fre ely -cho o -pon y -sc ep -tab ly -bal t -rock n -di me -lo gging -ðŁį · -ad u -ha voc -water ford -char is -swee tie -run ning -ner d -erdo gan -z ara -weigh ing -fif ty -pre cise -low ell -kurdi stan -r yo -or th -syn th -lin ers -phenomen on -art illery -il legally -constru ct -nostal gic -gar th -al ta -shel ton -a sean -w ander -dur ban -di versi -bon o -cl on -le man -sh un -obstac les -appet ite -fe eder -respir atory -di xie -formu la -an to -so ber -extin ct -au c -ing les -legitim ate -; ; -min nie -ipsw ich -dram atically -ðŁijı ðŁı¼ -ingh am -milit ary -mon et -us navy -for k -dun no -play er -q otd -st oo -ex or -ethiop ian -film fest -pe red -c ate -sau di -in ner -sin cere -tion ality -ale e -de eds -cooper ative -ir onic -cro cod -br ary -post season -cam per -can ary -e in -exten sions -nb d -sher wood -spo kane -hu mp -jit su -ê ¹ -dar yl -p si -stab bed -offer ings -expe cts -cav al -body building -fr aming -f ca -ye arly -bom bed -sk il -resear ching -jud iciary -gree ted -tu dor -mil o -innov ate -ðŁĺ Ľ -r hs -ru by -contribu tor -fam er -soci ally -m lin -fi ery -ut ter -beau t -it os -de voted -rain bow -bar ney -pe ren -ar jun -r na -gab by -ut i -hann ity -pick le -ser v -qu akes -pp e -fe m -wh itec -j n -victor ies -ðŁ§ ¡ -gol fer -congratul ates -resul ting -mechan ic -ur ve -cen tered -kie v -an s -in cub -< < -c mo -bestfan army -dap h -en ham -on cology -ku sh -t xt -ori ented -fashion able -c sr -sa hara -r ack -pd p -han son -ภĩ -ti ers -ra r -pan am -in sky -sa hi -testam ent -asth ma -in her -fisher ies -or der -ho we -gall on -ep is -suz anne -drow ning -paneli sts -ðŁĺ ² -ë ¦ -al ach -commemor ative -at tribu -ðŁij » -mo o -visi onal -week sary -gu st -ak in -poin te -ee e -di spar -ni pp -dent al -st all -pi an -bor e -ul ster -tic k -ir r -tae hyung -micro phone -bermu da -ga ard -el er -plumb ing -hu gely -âļ« ï¸ı -race way -cam bridge -mar cel -burn ley -to ast -holly wood -fa sting -me red -hib ition -ca pped -benef icial -ow ning -cont amin -arab ian -to on -cap ac -hul u -sm ir -nutri ents -se in -graph s -con ditional -ðŁij ħ -or ac -play in -nor the -tor nad -mar ian -ju mbo -lex i -incredible india -road to -uk one -confu sing -sp h -shan k -pi ed -mq m -positi vely -sher ry -path ways -consi ders -tof u -argu ments -resil ient -che tt -with dra -ter o -ated ly -sw ana -he b -fli ght -har ley -decre ase -kind le -book shop -³ ï¸ı -marty rs -sm ur -mc cl -concer to -sti me -rejo ice -app lau -cle ment -mer kel -jai me -im mortal -isle of -mar co -youtu ber -stal king -me too -st ack -sp ouse -u st -lu v -âļ¾ ï¸ı -eque strian -ev ing -fl in -nick name -the big -as ar -st acks -wal ker -bor a -kidnapp ed -hur ling -humb old -rec alls -co pper -ann is -se o -mer ger -mu ir -ad dy -ðŁĴª ðŁĴª -be x -cr acy -con an -congratul ation -mid st -âĻ ¬ -for bi -op tic -cr ate -crocod ile -mad agas -secur ing -ast on -o gue -savi or -salis bury -love it -fuji film -cast les -as st -ar rows -sp acious -tr s -poly vore -progre ssion -m ri -nel son -bi m -indic ator -o da -pe pe -re signation -gu t -sne aker -log ically -az y -are lla -te aring -jo shi -ssion ism -q pr -mari ah -p x -ble ed -mi an -med ley -we iss -ker ry -gat ory -at al -madi son -av enger -nab y -pl and -gi les -fresh water -d ington -ta j -demonstr ates -n tv -bul bs -sunday morning -pe ake -souven ir -wa h -ton nes -m kt -complex ity -con den -ross i -b ing -y ds -su k -n go -mid land -ol y -life is -ri pple -mo reno -dd ers -tu s -á ĥ -bou l -x a -hol dings -wn y -shadowhun ters -ke i -asp ire -m ous -ow en -so ak -skir ts -moun taine -stor ming -ch rome -ri ots -sar ato -amaz e -less ness -nav ar -crit eria -ra fa -indul ge -ay er -por to -nam o -........ ........ -yi elds -val le -j h -mac ron -sa ins -dur ant -tra ilers -wo t -confeder ate -sh rin -id ol -form ally -ten e -motor cycles -than g -no de -bang er -dal y -p ats -enroll ment -au ctions -at al -ar bor -lo gos -de arest -trans action -dom ingo -fle a -ser mon -de ck -sin cere -questi oning -juli o -was p -pre tz -armen ian -k ham -inflam mation -picture sque -acci dental -film makers -ðŁĺ ļ -ðŁĴ į -ca sey -so b -yee zy -good will -parag ra -ss ly -fe ather -dy ed -assassin ation -na de -b cs -app lies -femin ine -fe u -ext ent -depu ties -l ack -psy chic -go i -kill ings -pse u -ðŁ¤ ª -un c -mar l -tan e -mck enna -sur fer -influ ences -free way -hack ney -mal aria -el and -te au -rema stered -Ø ± -raz or -gg y -cor ro -lak sh -fla ir -honest y -hoor ay -de pp -am c -wedne sdays -q a -ed its -- $ -se villa -dou bled -human ities -c cot -som os -r ine -af a -si oux -re construction -wel ding -th reads -am ish -encoura gement -po der -bo ck -bal m -p tions -stand up -accompli shments -guar ding -convic tion -ac ion -napo leon -depic ting -att ack -su i -wear able -âĸª ï¸ı -pot ter -esc ort -vis e -to ts -bo on -event profs -angu lar -womenshi storymonth -bar row -sch i -ac comp -ti k -l end -kensing ton -wol fe -st acked -cra shing -exhi bit -wing ed -sab rina -ma sa -k ms -alway s -et t -pla sma -counsel ing -pick les -nfl draft -mr s -inev itable -coura geous -staf ford -writers life -ho s -e j -gh yun -trade mark -adri an -influen cer -coron ation -ra ging -explo red -usa f -excep tion -eu x -tan ker -sw ami -pac ket -ðŁij¨ âĢį -f en -she en -a ero -j l -re gal -nw t -au ster -meh ta -char ge -a ste -b ate -inf eld -racec ourse -collap sed -fle ece -z il -al lie -alternati ves -geor ges -ðŁĵ į -quir ky -fc b -nat geo -philanthro py -bra i -every day -ðŁIJ ° -ach ers -ja an -fin es -q i -fisher man -distin ct -gri mes -nation alist -comm ence -ro wn -âĢ ³ -z ing -f ter -hr w -baro que -bl ender -kitt y -hoo ks -c ited -w anda -consen sus -reinde er -an and -supp ly -me ds -v n -ol ph -rat chet -shel don -secur ities -ë°© íĥ -cro m -mosqu ito -j eric -im mac -dimen sions -â ¤ -di ssi -sponge bob -dami en -steven son -jo anne -del ish -yi kes -than x -surve ys -postpon ed -alco holic -al ised -ðŁĻı ðŁı» -do ch -sen tim -mered ith -com pares -b ago -happy days -mo ss -ãħ ĭ -ne c -gn ment -frustr ated -comb in -ri v -ec lec -col lo -compli ment -actor slife -ct to -nic ar -op hon -apar the -man t -ja de -trol ley -optimi zation -eye on -eco logical -qui st -ep he -ॠĩ -cin co -appo ints -old school -c pr -behavi oral -min aj -:- ( -tag ging -ev al -jo aqu -ðŁĺ « -ha k -de me -jama ican -so s -hy att -hand book -libr arian -hanni bal -pump ing -ch om -f man -ga i -hu ll -respon ders -green ville -n us -vau gh -ðŁİī ðŁİī -ta xi -gold berg -man tra -te ase -forbi dden -metho dist -ati vity -* *** -ec t -mc gr -Ħ ëĭ -se b -amid st -disapp ear -thy ro -phili ps -er ina -v icious -stream er -million aire -ma p -str ick -hack athon -gh a -ed ic -mi ka -pe ck -ill i -anto ine -ar ca -op tic -ma ure -ðŁĩ¦ ðŁĩº -cla shes -man ly -âĺ ģ -al var -and res -me i -el m -ww ww -al tered -l te -ê¹ Ģ -mo jo -for rest -thal ai -non t -spee ches -acknow ledge -ign ite -x factor -ðŁ¥ Ĥ -mead ow -disru pt -debu ted -scrim mage -pharmaceu tical -fi dd -found ations -philosop her -et al -publi shers -bo ys -c ke -ru gged -opti mism -re be -phil harmon -nar cis -ral lies -lu is -go blue -fol ded -un acceptable -optim al -li sa -pol aro -+ . -en za -âĿ £ï¸ı -mon opoly -grace ful -dair y -du a -diffic ulty -judge ment -o si -mer sey -flu x -new found -ter ns -dimen sional -in vic -al ba -am it -abudha bi -alger ia -autom obile -the ad -lo tion -acceler ator -vac ant -iti on -lu f -al ic -pl l -bla zing -ba z -sen e -ðŁij ¼ -villa ins -direc tory -eis en -to ck -broch ure -ri pp -hb d -zayn malik -nic he -lo lol -certific ates -mor se -fac up -x ham -un wanted -im ports -carne gie -fan sign -mo u -r alph -destroy er -sw ing -trek king -cili ation -pit bull -g aps -ho well -defin itive -mc le -f ps -et z -bol ly -lyn n -gan o -at ure -fur suit -co il -na v -but ts -tro jans -eu re -en ko -sch umer -horri fic -install ment -br b -subur bs -a bel -vi r -de sh -cun ningham -ðŁIJ » -span n -sch we -ke mp -tr u -ste alth -qu es -le w -deli ghts -ko ch -hu mili -cr iti -il t -sp ells -mi ley -car ic -ðŁį ´ -lc fc -substitu te -oun g -? !! -af fir -predic table -class of -er r -cy press -chand ra -age ing -__ __ -ther land -don caster -el in -yo shi -sail ors -har ris -jo anna -niger ians -h ers -pla gue -pro cra -k no -can ton -busine s -un h -pra kash -c in -bow en -co ating -m als -be gging -smith son -ponti ac -sp ies -dam ian -pl ine -und ant -al ta -one ss -shame less -da q -bb m -wal es -stam pede -ser um -Ù Ĩ -cataly st -x n -ab sc -free zer -ch un -ari os -mc cre -fore head -he ars -damas cus -tac oma -ardu ino -encoun ters -stan ton -lg b -ab as -" .. -ke te -drac ula -ele m -g ne -zepp elin -la brador -pul p -op tional -or n -russi ans -san itation -hil ary -etsym ntt -pen alties -au st -ig ans -olympi an -medic aid -vers ace -va pe -re stra -pe ep -sexi est -st alls -di le -the a -punjab i -pupp y -tuesday motivation -ðŁĵ ļ -the flash -roc ket -mo dest -chihu ahu -on na -k sa -hur dles -ca ve -fail ures -sp lit -bo ho -gur l -disappo int -ho ward -nug get -fran z -stal ert -kaz akh -for getting -sch ri -ag ate -am at -eve rett -du et -veter inary -juli an -ch ills -bra ve -ghost busters -lan do -gre ets -profit able -d é -ti r -ze e -om en -pd x -gray son -har i -fix es -stab bing -swim mer -symb ols -compli ments -po se -func tioning -th nx -gi r -corpor ations -bar low -lo e -off season -distin ctive -marvel ous -nik on -enri que -ky u -ja ws -amo to -lom bar -travel blogger -fa h -ouri sm -tri stan -so e -ce ase -ðŁı ħ -z ac -mck enzie -taxpay ers -swim suit -bl o -les ley -kan sas -w ks -ki el -provo king -my les -str ing -kangar oo -galac tic -fif th -s ke -we ir -ll is -mat ory -ðŁĩ ¿ -un ci -re productive -roo ting -ti des -gad get -.... ...... -alex ander -bow ler -scre w -apo log -eri ka -wal ters -shet ty -lan e -ban ter -as ant -me so -v ain -" "" -us i -fer din -accomp lish -man sfield -bom bar -collabor ating -cla p -it ure -s da -smo ky -na k -im person -car la -com ra -bur gl -lo co -ti es -in hi -trac ey -se is -diss er -rr rr -dra y -prote ct -cor ona -hun ger -ck en -c eli -trou bled -predat ors -fic tional -shav ed -riche st -metab oli -ful ham -gro oming -mono chrome -wa sting -as co -ast e -ti sta -remedi es -ung soo -south end -perman ently -bu mble -procra stin -ident ical -practic ally -ma scul -su ke -assu red -val erie -devi ant -grizz lies -thi er -pur a -ne pal -not ts -bil ateral -spo il -car mel -cine matic -ph l -ni fty -ma o -hypo cri -la ser -pan try -mathemat ical -el isa -coordin ation -bel mont -a it -radi ant -bo iler -man g -f ag -cr c -h ams -br in -â¬ĩ ï¸ı -famil ia -âĿ £ -sab er -ru pert -gg an -rit z -mic h -sal ford -le vi -gra l -ðŁĴ ¤ -n ino -ce d -business man -ul tr -sim ply -compre ssion -pa ins -hal t -ë°©íĥ Ħ -landsc aping -n f -croo ked -er d -itt in -ddle ston -sur passed -ino a -da g -bl en -exten ding -at ing -al gae -ball er -u mar -snoo ker -col lu -flo wn -thu b -ridic ulously -ki sh -op le -di re -as ser -ari sto -sc iss -h ating -trou ble -syl via -suc cul -plo ts -sincere ly -al er -laure ate -br ack -att n -rif les -me to -collec tible -cu omo -conte stant -consist ency -ant z -rang es -abig ail -de b -mini ster -grow ers -an oo -hoo ver -dream er -nu cle -resear ch -mi y -sha hid -ma v -d honi -cin i -do j -hin dus -part ying -dal i -alon so -inform al -clark son -it ton -ki an -cit yo -mor i -la sted -as pen -libr ary -susp ici -qu at -den ial -fol der -ch ori -swee ping -eni x -ðŁį Ĥ -Ø Ń -nas car -handmade hour -mou l -heat wave -em er -exam ine -ib n -gr ind -po v -tion ist -m bo -she ila -integr ate -om es -take away -cer v -con nie -tic ket -ce led -bi en -visu ally -madagas car -sor ry -gu i -park run -tra its -la be -pois oning -à¥ Ģ -vi able -bohemi an -denti stry -bad os -spr outs -mask ed -te ddy -ðŁĺ · -sa f -sa as -ji ang -ti ght -spe aker -withdra wal -bc n -as signed -class rooms -fle ming -ðŁĴ « -super girl -tot als -table top -e books -horizon tal -cra z -flu sh -j ard -c dc -er son -ãħ ł -green wood -ni h -co x -ad a -lit re -go ing -v icky -cur ved -lou ie -gra ins -hy e -lon ge -reme dy -tra inee -san jay -super stars -ma ser -man u -s age -wh l -ðŁĺĤ ðŁĺŃ -ðŁijį ðŁı» -m sd -en z -rab hu -j oo -gh u -ac er -e po -resurrec tion -justice for -bl ended -mo da -avalan che -france sco -re spective -g s -ye ast -wel ch -devo tion -ge tin -athe ism -am ic -carol yn -lo c -ld nont -ave c -us da -le gged -bra very -b lower -cow boy -he h -sti ble -buff al -chann el -run chat -âĺķ ï¸ı -ide ology -best seller -y oo -pe anu -bon ne -fel ic -edi son -fr actu -naren dra -pp ets -seym our -ri viera -he ctor -necess arily -bi anca -soci eties -the best -w g -sent ences -win k -vacc ines -pal ooza -jam ming -as f -mp us -agre ements -ec k -ba c -hon ore -com pul -wild cat -im posed -yo ga -hud son -can celed -l ich -fu zzy -es que -ch uk -w vu -se k -fli pping -r hon -wi shed -wh a -cap ability -len ovo -ìĨĮëħ Ħëĭ -vi vo -tv d -nor a -sil k -pas adena -yo semite -valu ation -clo cks -u ber -mr c -dar kest -au bre -ss o -bell y -wrest lers -kill in -lou der -buck ley -ge el -ad on -un s -appe aling -ðŁij ¯ -semit ism -list ens -fit z -ãĥ³ ãĥ -ny lon -ar ty -seem ingly -hal a -su ited -et y -she ds -mu ffins -ap ric -um ents -u ta -jam mu -chelse afc -star z -yo ko -roo t -clean sing -di ar -pione ering -ihear tradio -dig iti -fin dyour -can o -ðŁĴ İ -z ol -spac ecraft -six ers -moi sturi -b ile -ti sts -hor ton -rang ing -colum bi -mete oro -senti ment -ep l -foo th -text book -drain age -r ly -sc ue -imran khan -ðŁĴ ¸ -margar ita -ed dy -predic ts -gamer gate -advis e -growth hacking -love you -ug and -v f -beng hazi -s later -ne wor -ch el -independence day -p np -cul len -hoo dies -num bered -brit t -t sa -kl tu -s ages -mom o -onep lus -col l -gu ts -w ta -mesm eri -enh ancing -chiro prac -j is -teen agers -m one -constell ation -sweep stakes -e ze -slovak ia -la ye -pear ce -wa ver -po gba -k ron -sur geons -mar x -ti d -gg a -desc end -p ours -upri sing -wal la -sab bath -bachel ore -mack in -k am -peter borough -hor a -ðŁĮŁ ðŁĮŁ -think big -r j -hy drau -sp al -univers it -ðŁı ī -mail online -league of -ten ants -w ally -lan ce -heav ens -dd r -bol ts -am ir -i phone -ci gar -en du -re i -el abor -r inging -john son -characteri stics -sal oon -algori thms -tal kin -m tn -di ve -region als -ff ice -hat i -deviant art -so tto -shir o -l ama -k we -f aded -por ting -tu mmy -est ates -buen os -ðŁ¦ ģ -beli ever -pen etr -dar n -sp ite -can opy -fashi oni -t illa -pet als -eli jah -bra wl -marty r -ë°©íĥĦ ìĨĮëħĦëĭ -mid town -eric h -d apper -sm town -me gam -ww w -le le -on s -cat fish -fir th -fossil friday -ball park -th aw -pot ent -illi e -cre ep -car p -so ap -gun dam -infe c -yy yyy -ठ¨ -z ag -rit t -calcu lator -bo ca -ok o -to ad -threat en -refin ed -olym pic -accompli shment -bacter ial -a ji -tat um -feli z -she ed -j at -th ic -jam al -ðĿ ĺ -lin a -ðŁIJ ¯ -jo king -yot po -pin ch -ak ron -her b -motiv ation -li a -ho stage -cre ek -gam ble -russ ell -patt i -fo tos -c pc -bro ken -back the -cla ys -u mm -stock ton -mat ernal -ü r -la kel -cent ury -be k -infe cted -ภ¡ -smack down -man ned -ta hoe -sm es -bas a -su la -augu sta -. * -rohing ya -gre ed -counsel or -silhou ette -gra vit -cla use -' - -bo bc -occa sions -now adays -dic tat -be ard -n ally -brigh test -kab ul -inc india -dhan ush -archae ological -che ape -mizz ou -d hi -ov ski -bax ter -asse mble -à ¢ -gi gi -ac am -wis ely -haz ard -north ampton -âľĪ ï¸ı -me th -bla sting -re unite -mu lus -ali zes -t read -mil a -ed ward -ko va -pe sto -ðŁij ¶ -vit z -hydrau lic -refurbi shed -mo tel -isab ella -hom me -sever ance -uph ol -mis erable -f ari -lat ter -ef er -crack ers -es l -ac io -yy j -in an -ec b -z ind -pan as -tru cking -re ed -sh aker -burge ss -em pire -ag nes -n ington -art works -fr s -ti le -bi ome -eu n -ch ong -americ ana -god father -go blin -i shi -! ). -temp ted -gen omics -mand ate -ck y -ðŁĴĻ ðŁĴĽ -som ali -br andy -in ven -spoke sperson -pc b -yu an -h g -fa z -starwar s -ro wan -blue grass -don g -d day -trin idad -er ton -ban ning -re tention -cu red -tober fest -re set -we is -deta ched -behindthe scenes -immun ity -ph a -bra y -ðŁij ½ -ran cho -ram say -est onia -nd tv -] . -cab aret -tar o -d v -show cases -plu m -ðŁij ¸ -son oma -pre pa -memor ab -e stu -drive way -u les -magn us -x r -nn n -much as -en ge -stre amed -fore stry -audio book -tro y -reck less -kil om -ru ler -ra k -proce ssion -i ons -po ole -noc tur -wh s -farm house -per a -par me -hypocri sy -s ics -v ant -cas k -holi stic -au st -Ð ¿ -in do -ðŁij© âĢį -di so -disp atch -ol sen -make it -en nis -cent re -ar range -ðŁĮ ¼ -sal ted -ea siest -f ate -reg atta -mo zz -ac an -sin i -g ically -ch ops -chick en -work in -ha gg -invol ve -wee ds -book day -wake up -ky r -michel in -fu ss -re juven -vac ancies -incar cer -m st -sc ents -sovere ign -kick er -à § -bo d -âĢĶ > -sa h -mob il -shrop shire -oph one -dress er -mis suni -hep burn -i mo -foli age -diagno stic -as san -cycl ing -guil t -c sa -puertor ico -win elover -wake field -do ggy -k he -pa pp -co g -al lot -cu ck -poe tic -mi o -re vit -mag ician -ç ¥ -ant enna -west wood -mber g -lux e -oat meal -Ø ¬ -te at -ffe e -sear ches -l ly -plu to -el on -let tering -inno cence -fa i -ann on -telang ana -ma it -neu ral -can ni -ar oma -a stor -fe x -co cac -mon etary -f ent -un sure -' @ -indi rec -teh ran -isol ation -li bs -make up -merce des -ff y -he tero -de o -sco m -cur sed -veteran sday -franken stein -shre ws -de co -ge ese -lefto ver -ha did -vari able -acade mics -carol in -under going -vari ation -na h -ssi er -gamer sunite -pur suing -emer ged -ll ers -control ling -ro aring -mete or -vol t -daw gs -be aver -is life -bathro oms -aci onal -pre vent -lake district -in als -y ani -gra bbing -sac ks -le z -sw ay -k ool -time s -klo pp -la de -con cord -resul ted -revi ve -recon ciliation -ol and -az z -gir o -mand arin -de en -nutriti onal -is coming -van i -aw www -der ived -love your -stop the -shou ting -nov ak -ðŁĻĮ ðŁı¾ -lo af -displa ying -sunday with -ma guire -ch eri -ðŁı Ł -re match -qu ic -Ú © -y in -ðŁĺ ¹ -ili ve -z ip -our ke -down loads -sw at -missi ss -care rs -t ment -proper ty -hahahaha haha -gi bbs -sur rey -ar ise -tic ism -sti a -ir ling -fro g -co se -bas sist -fore ig -lea u -pil lows -hol la -eli e -disclo sure -peanu ts -inte ch -ww c -plun ge -trium ph -cor i -sli ppers -ðŁĻı ðŁĻı -neutr ality -ma re -hair y -gang ster -hu mming -cust ard -mer lin -ale a -s by -dam p -mo han -ver bal -j st -gu tted -b jor -un finished -ðŁĩ¯ðŁĩ µ -un happy -âļ« ï¸ı -by pass -at su -fis cher -sa v -afric ans -re use -mid way -demo lished -ger rard -her cules -Ä Ł -medic ines -cl icking -sur round -jo ong -wav ing -tri bes -wet lands -offici el -argu ing -l le -do va -su zy -club house -ne gro -ob tain -ga o -gl ance -assi st -ch os -ãĤ ¢ -âĺ ķ -adri d -occur s -st ans -par don -livel i -emplo yed -re visit -ff xiv -bb le -ne aring -min er -ðŁĺ ¹ -giov anni -up to -mar vell -mar se -to wels -cb n -engine ered -y elling -spart an -si ans -ðŁĻĮ ðŁı¼ -se v -coyo te -sta di -t cm -app en -shenan igans -open access -so aked -ma squ -le vine -stro kes -l k -aparthe id -hipho p -char don -may may -ha asan -stri pped -fr o -scri ption -f ton -h f -pri sons -marsh al -ķ ãĤ -an cho -com promise -classi fication -buzz feed -bblo ggers -deser ving -) / -s way -ob o -camp ers -poder nfamily -p oured -bri e -squir rels -se ize -: # -le k -ti mb -st acy -nas daq -repe atedly -br at -mi ghty -competit or -mah one -de si -o ke -bm w -shi e -f cb -cheape st -minim alist -par amount -n ate -har as -insan ity -lat eral -ment ality -mo zam -ta pped -yad av -u sp -b way -the od -bil t -ra ids -em press -adap ted -pat ron -nut shell -ag ra -be aded -sundaywith marsha -vi king -proce ed -main tained -thinkbig sundaywithmarsha -sn es -mus ica -to wer -ch ab -bo k -sm t -insul t -harve sting -windo w -ru ther -be ige -dec al -indic ate -ma iling -ri ft -po le -ander son -ch oral -sp ride -l ili -ev elyn -imrankhan pti -.... " -ke red -un dp -water falls -se ars -le mans -world series -ri el -ani e -app ar -score rs -lam p -a than -phys icians -qu inoa -refu sing -vu itton -unle ash -s la -pat i -shou ts -inten tions -fo amed -europe an -neighbor hoods -me er -man son -du h -br at -con es -bow l -kazakh stan -ठ¿ -in appropriate -del hi -ketch up -ful ton -s ys -consul t -gar field -to go -f ml -f led -b ds -facilit ate -ree bok -selfi e -elev ate -activ ate -bi ble -ca wx -b ys -cam ille -sy ou -sk ool -her t -w bc -ple dges -recor der -po sh -ac re -so aking -mat il -v sco -shoot ings -pla r -e con -ðŁĻĮ ðŁı» -rashi d -u bi -ðŁ¤ ¤ -sw inging -wi pe -rap tor -m su -music video -dur ham -at tic -apar ty -fe tus -activ ation -aa z -motiv ate -ðŁĴķ ðŁĴķðŁĴķ -j al -ठ® -ag on -sche er -stal ker -fo ster -az zo -tele gram -vi gor -s laugh -screen shots -entrepre neu -kri stin -inten tion -ch illi -fr action -don a -ge a -tc u -s ite -la k -em il -d nt -bor o -wil kinson -re cu -ato day -t anya -bl anco -cd n -brilli antly -g cc -ac c -evacu ated -ther ine -den ny -cait lin -she pard -pou ch -hand held -sou theastern -ha a -à ´ -re solutions -led ger -sr in -r ar -shat tered -chim ney -im with -mete or -hand led -ra ke -town send -en han -shi py -duc t -tw x -inflam matory -war hammer -theat rical -gro s -sk ar -sco tty -ni el -tit o -tin i -conne ction -_ . -goldeng lobes -sha q -ðŁı ³ï¸ı -hall way -fron ts -effec tiveness -gla ston -d hs -ex pi -to h -c pl -sc s -re o -ha g -resemb lance -hor an -abu sive -qu er -virtu e -cho lester -a q -shan e -m ce -carri ers -di stress -re wind - ¡ -voo doo -int act -ann o -ðŁĺ ¤ -pi led -adi a -ãĥ ³ -en ow -di gs -light ly -goo fy -turb ine -governor s -con te -re open -pa h -i ve -cra fting -swee ps -jo di -an de -zu cker -kaw aii -o ko -v ai -out line -kri sti -ts n -insp o -qu int -fil thy -lyn ne -listen ers -depar ting -or d -t weed -, & -ale k -sel fish -nor ther -recogni zes -i ps -be s -a ed -w ills -pe at -surround ings -mon uments -ais le -be cker -la v -quant ity -v ah -helicop ters -tu cked -alv arez -sha pe -o bey -ad diti -road side -m ite -bl ers -ep age -j au -ignor ant -b ins -lu lu -x o -c fo -ee eee -apprentice ship -shef fiel -to i -ho k -faken ews -deplo y -aid an -husk ers -ãĢ İ -west brook -mi ster -confi gur -car r -fic a -proceed ings -ha w -ste ak -mur derer -pay day -a jo -p vc -don ates -bi af -nom nom -be it -k ali -x rp -ahmed abad -se mic -che y -x tra -an twer -head lining -squ ares -roun ded -flu ore -bol d -disa sters -am oo -gener ic -cran es -brief ly -gi g -auster ity -anticip ation -for ti -treas urer -cann y -ce cil -dete cted -check list -ภ§ -pam ela -bar bados -an field -hear ty -tx lege -peren ni -arro g -ing ram -âĹ ı -ty ne -spo on -r ation -am ba -m be -cam el -h hs -york shire -reflec tive -fre aks -to k -ju do -partic les -du bs -ban jo -accred itation -prover bs -over dose -inte gral -gu ang -mc s -super car -af b -al vin -ail s -x tre -st aging -tw ent -rabb its -mar o -inste m -dol l -cr ay -sant ana -ble ach -mini ons -che ap -man t -di vers -catal onia -lo is -mat ri -cou gar -kay ak -e gre -p so -a ia -å ® -char lton -tr acked -sc ari -pe tt -f wd -x in -gra vel -br ic -bigg boss -ar den -hu gging -pal ms -st v -li mb -the movie -handic ap -ri me -z ai -stu b -indi a -lithu ania -rhy th -p ita -maced onia -high ered -brid get -schwar z -ske let -hi kes -ant arctic -c ps -mash up -Ð ° -n ell -chand ra -he ir -an us -sher idan -mi mi -muse u -bec ca -an ir -bar rie -dioce se -compar able -ðŁı³ï¸ı âĢį -yuk on -me p -hor mon -mer ic -al f -con quered -christ church -ðŁĴĻ ðŁĴĻ -hazard ous -poo h -cont ing -retro spective -par ame -na ir -con sor -ho tra -astoni shing -cater pillar -u man -ti sm -t vs -serv ic -croy don -mor ales -c g -cu m -te ur -scan ada -s all -magno lia -el ise -th our -à® ¿ -ag omez -phel ps -ë°©íĥĦìĨĮëħĦëĭ ¨ -wh os -weav ing -si sd -pro poses -cro ws -pre sale -econom ies -bernar do -sha hid -air show -mc cann -hor ticul -nr l -du el -mongo lia -tou lou -requi rement -struc tured -ed i -o lives -he a -cu ter -Ð º -enthusi ast -harri et -domin ion -sub mer -ðŁį ĥ -sa ab -nes burg -mo ff -def ended -bur t -rewar ded -gold man -op tics -khali d -house holds -buc kets -ce cil -che ss -substan tial -ef l -oper ation -evalu ate -st n -rece ssion -l ll -tom as -tru ths -ak bar -s words -p act -embarra ss -ha o -ay urve -scrip ture -ny cc -op t -di ameter -sc ented -organi zers -re lat -ha e -dream ers -de se -ðŁĮ » -restric ted -n ale -r hp -dol an -mun ster -ha ired -consult ants -jo ints -hu mil -d ill -relent less -t é -af il -ut ilities -japan ese -condem n -pet ite -colli de -q f -peach es -cou rier -l ore -âĺİ ï¸ı -reli ability -ch uk -ðŁĻ ĥ -stu res -ge ther -ho stel -bi er -- _- -â ĩ -e ze -ta ilo -di ent -blu ff -chu ffed -pil ip -mon arch -e em -bu chan -b ick -op au -ku ps -ภ¢ -pist ons -sp ins -m and -ce st -bur ne -v ile -cher ries -bec kett -need les -pan ch -ë Ĥ -haha h -trou bles -insi sts -do you -g mc -mor tar -deleg ate -in n -g anda -sin atra -ठ¤ -spee ding -pu pil -pre mises -ali gnment -pi kach -as us -j alan -Ø µ -lime stone -fol kl -parme san -ce il -mo y -shawn mendes -ac up -hu st -ot es -med ina -ma di -gta v -censor ship -ar g -swe eney -sy kes -col o -foot steps -cann ed -adv ance -gta online -healthy living -ðŁį ¾ -a ig -p ality -oc s -he brew -im minent -berk shire -jeremi ah -out going -bak er -entr ata -ma ids -gro ves -bo c -a del -m fw -con science -arm ys -nut ella -conte stalert -novel ist -la h -ban ker -marque z -ðŁı ¡ -to ff -out age -gr p -ðŁĺŃðŁĺŃ ðŁĺŃðŁĺŃ -musc le -du dley -nvi dia -mi di -m uni -ess ays -dat ac -car ter -ภ£ -t ans -i ves -public ations -al er -ok wx -il u -cu tt -har p -out law -luther an -br ill -bo lic -do well -green land -be sties -path i -pay ton -gue st -har den -ðŁ¤ © -ann ed -evacu ation -po ised -mc der -b han -o i -envel ope -ci d -ca vi -ta pas -book review -grey hound -âĻ ª -fe ud -lun gs -for te -rai der -ff er -oni x -dep end -yn wa -rel ating -de vs -ðŁĴ IJ -acqui res -d ha -j yo -priv ati -can ine -k b -cra b -sar din -imag ining -k j -em por -down hill -ne z -ta eyeon -nick imin -gb p -à µ -w ap -sec co -ma shed -ðŁĴ¥ ðŁĴ¥ -augu stine -diss ol -dic tator -â ĵ -vi per -ed fringe -vau x -hard work -book let -no x -chi ff -ðŁĴ ¨ -observ ations -xbox one -u sher -ke er -lu p -dal las -cal gary -ma dra -di ous -k bs -wood ward -hero ine -lu mber -sea world -o ws -mc ke -maver ick -gu la -cross roads -fan g -s ade -nik ol -chee tah -me c -pp g -er ick -ðŁİ µ -tox ic -bj j -viol a -sp ire -ch ino -tra vis -institu tional -ha as -low ry -w ac -ea e -hu mid -mp ton -ru ck -je w -c ine -zim mer -se f -bhar at -fre es -aam ir -ðŁĴ ħ -z inc -wan e -multi player -royal wedding -e el -preci pit -qu ery -kimber ly -isa bel -ful fill -ig an -vau l -pan e -sc y -dig it -gun n -u tah -dog day -fi on -xia omi -da c -el ast -cha vez -ro blo -g ine -ten th -ab h -ke to -hur dle -na dia -memorab ilia -ha bs -qu an -h w -hv ac -pix ar -ec cle -kram er -accu ses -ðŁĴļ ðŁĴļ -per se -mean time -wa hl -atle tico -âĢ¢âĢ¢ âĢ¢âĢ¢ -ott oman -no vo -k us -conne cted -tru sts -d mv -spen cer -rahu lg -do ve -sto kes -bolog na -enthusi asts -à ª -rockstar games -ted cruz -du ras -s acked -late x -immer sive -cer t -lu cin -princi pals -fa res -sa ils -far n -am ent -saf fron -quent in -check point -fer ris -ex cur -ðŁijī ðŁı¼ -bai ley -se h -ter re -mad am -s band -wan derers -cumber batch -yy c -digit ally -blackandwhite photography -roll in -moroc can -ðŁĮ ħ -din ner -d well -to om -m ye -ez ra -cp fc -war hol -me er -jon ah -no aa -s gate -so on -secu lar -g ating -ti o -dri ver -si ssy -assan ge -ta th -ed mund -bobc ats -ra ji -po stage -stu ds -m gm -kat o -edin burgh -meet the -shir t -fa a -mens fashion -sp reads -wi m -car ts -phoe be -j ars -bot swana -Ù Ĥ -ed war -sk ar -ri ve -gu sty -c tv -ferdin and -su therland -nickimin aj -k v -si us -bee ch -re z -desi res -on ial -camp o -quar ry -lor raine -gil more -ig gy -µ ï¸ı -ho pping -avi z -ðŁĮ º -uni sex -dedic ate -att itudes -ste er -jun kie -rail way -y b -whi sper -key an -k us -ju g -di x -a ins -sum mon -ov ich -sy ed -her ald -ma ison -me ded -wild flower -main land -ri sky -ru kh -over looked -ki c -destro ys -nam an -ki p -z ano -champion sleague -ban dit -quin cy -smi le -cal vin -open ings -ta pp -ol ulu -spec tro -accred ited -ap k -pra ised -bar nett -pol len -premi ered -selen agomez -tou red -screen ings -uu u -mis o -en se -adam lambert -guel ph -har yana -hu tto -le ar -l tc -po ached -brex it -æ Ŀ -tt c -pa vement -mon gers -ro e -ad ers -ling ton -particip ant -ca red -ga il -y ates -lan tic -dash board -jo o -feli pe -ssi onist -bu m -s end -a eri -thu gs -luci fer -a he -dete ctor -fil ly -gas oline -ham per -hump day -the ta -the band -fore casts -o hhh -lo bb -hol l -cp u -az u -ad ar -hai ley -bu b -car t -quo ted -an archy -pan cre -twit art -al den -st ash -the less -or ni -belie bers -mor mon -partic le -avi ation -⬠Ĩ -webcam toy -sad dened -cru is -ham let -n ct -roll ins -marque e -saw yer -reli ance -a ura -di ec -soo thing -sig nings -ak is -à ³ -at kins -aer op -ðŁĮ ¿ -y ab -sh ari -con nol -du bbed -manufac ture -convin cing -feelthe bern -ra u -pu lit -on ec -gem stone -ur ging -bag u -ga h -aci ds -fi anc -zodi ac -sn oop -her rera -initi ated -ven ge -profess ors -pro di -stron ger -e mission -bb a -hal le -ta pp -haw an -wh im -compe ted -myr tle -ir port -cold play -ach e -ske p -m son -ss ic -calli graphy -swim mers -me y -pp c -thri ft -po c -re places -commu ter -âģ¦ âģ¦@ -go ers -lo gue -para dig -bas kets -sensiti vity -joh an -atl antis -& & -suit case -anxi ous -l h -str i -gal loway -stre ad -war den -gr ounded -ffici ency -li feat -reli c -disgu ise -island ers -f cofficial -classical music -b mc -en field -bi que -oak ley -bat man -sla ying -ner ves -mul tit -calci um -projec tor -scott sdale -ant ino -gri ps -kim mel -des mond -prote stors -hi atus -metaboli sm -conclu ded -press er -ti pping -sli de -e to -hun ting -aus open -ri k -pp ery -innov ators -pitch ers -ag ger -fun gi -z ad -proli fic -rockn roll -bl ames -ct ar -stam ford -q ad -mozz arella -insan ely -den ver -ph ouse -nom ad -ï ¿ -s ris -pro du -hen ley -pag an -am trak -ru bi -in cl -tu tor -sco tia -wo es -sing apo -fun nel -turn bull -know ledge -gri mm -real madrid -we are -missi les -con sol -emo jis -sne ak -smi ths -ru iz -br ou -i el -ha ver -ðŁĮ ļ -kin gof -basil ica -circul ation -prin ters -ta pping -ri dley -dra gged -ha j -writ er -fundament als -personal ities -me tre -stereo types -bur le -best of -n ffc -ha th -mini stries -a ali -trac ing -pav ed -ł ï¸ı -g ic -insp ire -tu g -ha re -repe ated -ex pon -lol li -rho de -pre cin -install ations -instag ram -az ar -i es -sole ly -du kes -mission ary -van guard -fursuit friday -on d -pol ari -ma st -har an -jos é -jack ed -ec oun -al ities -ne ph -ra vel -moder ated -sco w -s fb -uru guay -as o -ni g -au du -p ints -lat ina -ben z -m itting -char ted -mat ology -cit ro -biop ic -ðŁij Ń -djo kovic -fox y -agu il -so to -an ada -sin king -sc rap -hair s -bethan y -fact friday -ðŁIJ IJ -unlea shed -) ( -contra dic -ram on -coast line -y ong -sn sd -li gan -p ome -mit age -ge tt -wat i -ri sk -so aring -bru sh -f pl -av an -å Ĩ -lar son -sh ear -mul til -blu r -multi media -chun ky -par i -n ani -weir d -cholester ol -char les -dream ed -tan ning -puzz les -fr am -hand ball -ch ag -beli ze -al u -bang s -Ñ Ħ -detec tives -mc g -ish q -bo thered -saf c -mp ing -ten eri -g ays -sail or -an gi -mul ticul -gue ssed -ros é -high ways -bro om -chatt anoo -- ' -see ker -on ed -at f -lu c -> < -bar i -per cep -jewel ry -as ph -sor row -sl ing -mam moth -jac kie -ë § -wilt shire -sa o -can cell -im paired -tor ial -bre ed -guy en -jud ice -tit le -pro spective -applic ants -ðŁį Ĭ -epis cop -e id -b yo -stock ings -ðŁĴĥ ðŁĴĥ -ll p -sna g -keep it -l ough -ol son -matur ity -!! !" -cop ter -i sha -bl i -wil mington -tr youts -th ai -ðŁ¥ ³ -pe bble -kra ft -f p - º -ssi vely -li vin -contest ants -tex tures -jo an -h dr -film festival -prov ence -wi do -op end -c si -sto wn -cro ati -ad just -host ile -analy sts -il an -cu ppa -bru m -newfound land -good win -me tt -mall orca -plu gs -bu k -bb hutto -wrest le -sa ire -sho pped -for za -le head -vi vo -ba st -ro xy -reg is -hard working -hon olulu -desp air -young sters -ni g -impro mp -roll tide -de emed -tre ason -ru shed -for ged -ff f -pikach u -bri ggs -do it -ac cent -la us -gla ze -compet ent -a ho -photo g -mid field -le go -har vard -min orities -re illy -slic ed -once upon -initi ally -financi ally -landscape photography -har dro -qu o -mm ers -par kinson -smu gg -read iness -bru tally -glou cester -mp ed -bbhutto zardari -mur der -ye d -dat aviz -sr t -dow ning -bi ans -m ü -fle ck -fli pped -s ly -brilli ance -ri m -k um -bubb a -ko i -knit ted -sor g -ma is -ðŁĮ ² -ti ss -su stain -sen su -ak han -zi est -exam ines -chardon nay -user name -short list -re bs -on o -dar ing -hard wood -che que -righte ous -light ening -dir k -shra dd -du ra -down stairs -sh al -ami gos -ru ff -s law -ri es -red nation -man us -ðŁĩ§ ðŁĩ· -distin ction -u bun -dur an -mi gra -thi ans -la ver -domest ic -k x -jaz zy -justi fy -belong ing -insul ation -color stv -drun ken -chann eling -qu and -xi ii -enligh ten -kan o -fati ma -teen choice -terri fied -p ba -as ley -met museum -dun e -pack er -ki o -ðŁĴľ ðŁĴľ -bo iler -fas cism -ar mored -back grounds -in mates -embarra ssed -defin es -th d -we go -silic one -lo on -el ding -bor rowed -he mp -ak sh -kaw asaki -br y -de af -kill er -dispo sal -ðŁĩ ° -glaston bury -un covered -o xide -po ff -d ant -k j -ku ro -dri zzle -peop les -fe e -pro pri -dd lovato -pi ggy -ot is -aller gies -u bis -pengu in -ser a -vi z -prosp erous -ici des -tornad oes -sene gal -web cast -sto red -enchan ted -bb cone -bay area -entrepreneu rial -rednation rising -experim enting -ang an -lot to -they re -por e -er p -seren e -east wood -bro kers -bar ge -stal lion -timber lake -tailo red -dy stop -b ate -lat ors -di xit -bran son -dynam o -ky lie -shame ful -bt wn -spring time -mix ture -s ounded -lu ton -dad es -mal a -op ra -en ic -rahulg andhi -se wer -~~ ~~ -ky u -nor theastern -ca er -bc u -nir vana -kitch ens -ous y -al m -river dale -hid den -fl int -sp d -pat rons -katy perry -au gh -exhib itions -sm c -shu ts -at ore -da in -some thing -ber th -bo g -por ter -gen to -con cussion -ang lic -ro we -gr illing -scar lett -master ing -mor nin -comm ented -si me -si zing -christ y -ce os -st m -at ry -tari ffs -vac ation -pre judice -p su -paren tal -far age -can a -cap com -koso vo -you re -men stru -stal in -grape fruit -br an -che sa -dav en -exc el -!! ) -๠Į -distribu tor -ce a -bride sma -millenni al -wa in -ob serving -mis ery -plan etary -expo sing -bra ised -comp ton -don gha -q l -spring steen -th ul -syl ve -cab o -pal ad -niel sen -gaz ing -ba ja -r oud -orchi ds -johan nesburg -se man -d ji -oper ative -affe ction -eclec tic -at c -mut ant -aw x -nic e -mel bourne -indu lg -tu lip -dias pora -wel p -big gie -mississ auga -retri ever -or an -tam my -c ta -hipp o -seas oned -ger mans -eng v -marvell ous -im f -rela ys -mon tan -maur iti -me ister -as surance -reig ning -su fficient -han e -no thing -pos se -nav y -in love -brigh ton -en qu -ch ung -sweat y -es c -cal ed -man s -nicar agua -sl ices -mo cha -washington post -bb n -dam ned -grow ing -en burg -lo an -me s -wh oops -believ ers -spi el -vo daf -l at -s led -cricke ter -brown e -golf ers -bar ra -wat chers -lu igi -sw amy -mom s -pit ched -san tor -cr s -si re -sc amp -bo de -ste war -jon ny -ent ity -pac qui -mind ful -min india -bear ded -temp t -scorpi on -eat on -authori zed -ar to -s vp -op athy -cch ini -house music -disney world -âĢĶ @ -pro pose -di y -expen se -ten g -pupp ets -sm el -d aca -per ry -fin n -boo sting -lefto vers -cou gs -satell ites -man y -az e -g ong -fi e -metho do -fer ries -ðŁ¤Ķ ðŁ¤Ķ -explore rs -load er -attrac ted -il ton -godd amn -pi azza -doc tr -sav ing -paragra ph -visu alization -may ors -work flow -ack les -ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ -ठ¸ -twer k -clu t -lo ver -te ases -si an -o te -deter ior -accor d -l fw -swar ovski -nat al -tra ps -k ina -analy ze -laye red -bever ages -un it -ran som -pe shaw -dest ined -astro logy -si pping -miley cyrus -cam ino -marshmal low -bli ss -out back -fa q -int oler -humil ity -po ppin -hallo ween -mon tene -op hy -nu n -tattoo ed -a as -ðŁĮ ³ -dale y -qual ity -du sa -fisher men -swi f -ter rac -st au -le in -trol ling -ship ment -garden er -march madness -head band -gr t -bur nett -w and -!!!! !!!!! -gh e -du x -hu d -war ner -ðŁĩ ¦ -ex ile -rescu e -rat a -d han -duc ati -dro wn -bl ends -spi e -alli gator -simul taneously -broo ke -u ke -k har -comm union -ri ka -ford fc -chin atown -you rown -me y -can al -syste matic -de pri -ox ford -an il -w ut -equ ation -be z -fle ur -the good -lang ley -ad ity -ed ith -al fie -о ÑĤ -en cry -br ill -ex emp -ce sar -mb ling -ab ri -sc icom -j ing -school ing -mi ka -mechan isms -impromp tu -rhe a -moo re -crime a -be sto -wri ght -el ders -ro ds -kam al -folkl ore -be et -mini on -reli eve -thr o -team usa -pas cal -made with -boli via -itt i -free bies -desi red -best selling -l iness -la den -ke ane -mi sts -hipp ie -atta chment -@ / -se w -flan agan -âĿĹ ï¸ı -supre mac -stl cards -si as -q u -rh ys -ste ep -val leys -v w -pav ing -disp at -al ison -por te -id u -new sc -soc ket -mo s -co star -re vo -prote ins -stanley cup -m cal -ear ring -se cs -mc lean -cap ric -nick elo -ad en -v c -shou se -adap tive -maxi mize -entertain er -pro se -gri ffi -six teen -lam ar -mi rage -saudi arabia -awe ather -ru st -in filtr -fashion week -ðŁĺĬðŁĺĬ ðŁĺĬ -selec tive -bubb le -a den -fen nel -deci sive -m ta -mock ing -mb les -st amp -mu le -bernar do -gr in -po tt -j ingle -vet tel -colom bian -cam o -motivation monday -ba han -p ly -dh ary -k ami -x men -sleep er -gar a -my sti -confi dential -conflic ts -p neu -ce s -insur tech -clean se -me rely -va is -tu x -the great -shar on -ma j -hol a -eco systems -aj ay -aa j -hu sh -har mon -backto school -wiki leaks -reflec ted -ðŁĺ ĵ -commemor ating -ac et -buck ingham -messi ah -tu ous -hor net -to be -d q -he ine -mi g -pl ate -nichol son -sp ie -cumber land -nor mal -pho bia -happy halloween -city fc -mc el -gilli an -ke to -lu de -de mise -su ga -str ate -mcgr ath -visit scotland -foo led -cb r -gc se -col ori -po td -missuni verse -fin ances -ma poli -for ks -Ø ´ -cann on -medic inal -ðŁĹ ĵ -kh o -wre ck -pan to -bag el -gu ll -syndic ate -ic y -pr c -ki en -zi ka -ti sh -pe ta -c co -li za -ch ut -ex traction -el g -gl i -fu eled -pos it -respec tively -leice ster -br ink -vulner ability -im ported -e sha -ðŁ¦ ħ -r ural -re ll -gam ing -atlan tic -aband on -no ah -re solved -pro state -aller gic -ps d -âĺ ¹ -dun geon -fang irl -illumin ated -m hs -white sox -d ently -ck o -endor se -over ly -dazz ling -prior iti -night life -ut il -be have -flam en -east bound -ðŁĴ Ł -ilove you -gov uk -mozam bique -alle gi -dr i -testim onial -ath s -ì§ Ģ -mm y -shab by -pro secco -friend ships -cal am -dam ages -off set -jura ssic -jun o -arre ll -ðŁĴ © -interven tions -dare devil -car ver -run away -ran e -truste es -ha ute -dep ths -ðŁİ Ń -me in -sacrific es -con cier -ne sting -i zzy -me tam -ilove my -ur ine -du lu -mal hotra -ve ins -night ly -co at -an di -he witt -lon el -ci ble -wr ite -jen nie -sant ac -ĸ ï¸ı -str ato -singapo re -sop rano -kri sten -cheer ful -flee twood -fa iri -m eli -wa st -tur nt -sfor sale -sc rolling -angel ina -ren dition -jeric ho -nick y -or b -fla vo -patri ot -ash eville -sick ness -re fund -aggre ssion -b pl -ãĥ ĥ -elu sive -thi story -hang er -bu ffs -vil las -at kinson -sp h -ja it -decl ined -wo k -supre macy -oo tball -ey ang -ðŁİ ĵ -s ford -ath i -consu me -road ster -e so -u pro -reci pe -au f -uc i -ar on -oo oh -cs go -re ich -mc d -min ute -ladi es -pun k -rut gers -mee k -ariz on -ta j -land lord -de gra -autu mn -lyn x -us f -b hi -fairy tale -dongha e -bet sy -explo ded -chen nai -op a -pro tag -br ant -ðŁĵ °: -g f -pal li -ðŁı¼ âĢįâĻĢï¸ı -su t -ill ini -colum nist -shir tless -de centr -sear ched -ec or -bu ggy -s ack -ðŁĺĤ ðŁĺŃ -de t -ther i -or naments -bring back -to v -quarter finals -ic he -con stra -gi er -buchan an -vi x -kay aking -mu stread -swal low -mel b -sc af -op al -may oral -har at -ðŁ¦ ĭ -schedu les -id f -ha gue -ro z -a ah -d mc -du plic -ca che -orph an -frac ture -rec on -ch av -bun nies -al ain -mustaf a -ðŁİ Ļ -vac ations -dynam ite -tex ted -broad caster -ðŁĴ £ -ste amed -rock er -di etary -luxury travel -inaugur ated -sa wards -vaugh n -lincoln shire -click ed -kra ja -f anc -remo ves -layo ffs -mc far -bre eds -win nie -jon ghyun -incen tive -vari ations -pat ton -atur day -persist ent -pr un -pi ers -dal es -æ ĸ -breast feeding -r ance -ta wa -Ĥ âĸ -mur doch -cap tive -thi stle -nic a -commod ity -cou ldnt -board walk -graci ous -practiti oners -n gc -scru m -ner o -camoufla ge -col on -he i -phys icist -saturday morning -ten er -si won -colum ns -bru ne -y vr -ba ir -reti res -hal am -cab er -shaz am -min u -cas cade -milk shake -gri d -d ren -vin cent -so dium -plat ter -cheer leader -chen ko -y ak -elimin ated -ty po -y man -re think -âĿ Ĺ -ts ville -bernardo kath -ex tr -ðŁĺģ ðŁĺģðŁĺģ -ta o -re per -mo ths -em powered -c iting -transpor ted -mon ks -san at -cle ars -bachelore tte -camp bell -racha el -har le -hand ler -climb s -inter ference -rele ase -sh and -r bs -hr h -ãģ ª -val le -r é -sli me -w akes -chu bby -slo an -el ves -ath en -attor neys -micro scope -ston er -sc aling -o be -c out -se man -mid week -bal sam -ðŁĺį âĿ¤ -ti ful -v ish -lo tta -ri pping -re mn -ti re -le ap -ha vent -la by -hi mach -whisp ers -we in -ðŁİ ¸ -wild flowers -se le -u cc -li ability -az ine -sw ings -k ya -ta ir -re main -e do -flo ps -poc ket -grand ad -exam iner -gr is -ffe ct -ðŁijĬ ðŁı» -stud ded -heart beat -de acon -firm ly -infec tious -ste f -out lines -le asing -cla ws -sen se -tab s -hoo t -mo sul -spa wn -co a -hog warts -ve in -alban ia -manu el -b ino -vaux hall -scot land -go bucks -mat ty -phy sio -tor ino -const able -investig ated -s lower -mistak en -bay er -wild fires -vo ic -x on -time to -chas sis -bar ric -pi on -bald head -woo k -regi str -dra fts -b hs -li gue -l ick -staf fordshire -baf ta -dar ry -je anne -ven ding -cor p -⼠³ï¸ı -kid dos -fen way -ca o -west bound -ðŁĺ Ļ -dv r -quick er -bla h -goo die -ðŁĴĭ ðŁĴĭ -vo x -esp er -fac ade -cor relation -red bull -rou p -decl ining -chi ve -mc gee -tur o -in der -f eller -fu g -il ysm -mar di -peshaw ar -ki eran -ine ma -meat balls -pe ck -depre ssing -sen sing -gi z -dd ington -spring watch -ro aming -yellow stone -horse shoe -am man -week day -ol or -ðŁ¥ ° -boo sts -spr int -scar ves -je e -bee tro -cl an -all the -ìĦ ¸ë -enlighten ment -ado be -re generation -? @ -cont ag -yach ts -to u -mor a -en voy -r ani -go li -dhanush kraja -wood working -streng ths -se di -disc s -ar ina -sc on -lit e -ano ther -ðŁ¥ Ĭ -ye men -gu ern -sav vy -lo yed -biom ed -heart break -comra des -milli e -pat ch -un f -jar vis -bl aming -commemor ation -ge y -å ¥ -cardio vascular -alig ned -docu ment -. ? -aesthe tics -em u -the irs -le h -ps ic -si f -pl ateau -ex pend -domin ating -rob es -mauriti us -excep tionally -hom er -discover ies -bra un -ten nant -insul in -ðŁİ ® -car bs -te as -? !" -zi e -franco is -brow sing -th ol -cla rence -hel per -ob tained -cas sie -le es -! , -pome gran -hu bs -presti ge -] [ -mach er -bott led -pun ch -pi pe -o ch -gall ons -deliver ies -u ra -un day -mon de -depic ts -re gency -outra geous -khal ed -car o -he arti -za g -develop mental -over coming -stati stical -flavo red -for ds -cre atives -lau rence -di as -sun screen -in ked -pre acher -n ul -impac ting -auti stic -âļ Ķï¸ı -o ss -pel icans -cele ste -v b -ru mp -mc gra -fair fax -hu mor -bbc news -row ling -cal der -seam less -ag ne -p ti -mix ed -t shirts -mer ci -b tob -women instem -genealo gy -pre ven -l our -cra dle -gi use -Ð ¾ -chron o -fair ness -chocol ate -tor y -as da -pre scott -stret ched -al man -u il -re charge -in tre -ob st -hosp ital -hay ward -teneri fe -fried man -vap ing -confe ssions -ye ah -bal li -luck now -cor pse -sculp tor -amp ton -t pp -indic ates -sur plus -tru man -ðĿ Ļ -sin ha -in vo -sovere ign -ke v -establi shing -engra ved -assu ming -ðŁı ģ -sou za -fab i -ton ed -oun ge -del oit -dow ney -no ble -om or -car tridge -ðŁı IJ -u hur -hol loway -succe sses -r sa -âĦ ¢ -ma zz -tw d -disc ourse -. < -y at -satis fy -com pri -ठ¹ -graph ite -disser tation -ar ter -í Ķ -b ally -zom bi -ly ons -a ic -u bc -pra da -e il -da x -cla i -grand daughter -extravag anza -chall enge -ðŁ¤ ŀ -po ver -primar ily -dad dy -man a -bi kers -inqui ries -da un -fel ine -gener ative -he f -benef iting -lind sey -pol ka -demonstr ated -al le -rand y -o su -low key -weir dest -red bull -our y -n ous -wood stock -cre denti -nic er -g ado -aly ss -ap h -prepa redness -station ary -incorpor ated -dy er -sarato ga -cele sti -: " -antibio tics -or gs -inde fin -ap ron -и Ð -fif teen -no f -ðŁĶ Ŀ -ph x -te ga -m z -organiz ational -on air -band ung -pleas ures -mor i -secre tari -rac coon -ca shi -pil ates -k on -geof frey -la o -kam p -depart ments -back packing -an am -à « -crack down -aun ty -on do -li zzie -ph ers -cu n -ðŁĩ ± -k pop -pu t -inten tional -connol ly -bar clays -hs fb -swin don -u ku -s ally -a int -âľ ħ -pen ang -up lifting -epile psy -inter ro -bun gal -go ku -blue berries -ठ¦ -u ssia -sil ky -mou red -i stic -bri efs -me ats -go b -ch aser -state wide -pra sad -gl itch -ar in -ban ff -memb er -ðŁĺŃ âĿ¤ï¸ı -lo ving -hall a -ภ¡ -smo kers -yak u -scicom m -physi o -sw ol -lem ons -gel ato -ch ool -capit als -ki stan -ti ghts -spi kes -trav ellers -ik lan -commissi oning -ar ine -emabiggest fans -empha sis -front line -pad dock -destruc tive -ba ha -l inger -je wish -shet land -mc gin -mon key -ko z -s one -raj ini -te h -y en -c vs -masqu er -gir ly -we sle -was nt -bro dy -termin ator -gil le -mag gi -bir die -jeopar dy -cu bic -vm ware -intric ate -an up -to pia -east on -sab res -investig ates -bu sting -bil ingual -valent ino -in format -fer re -advent ur -hydr ate -for sy -az iz -san to -e de -whist ler -continu ously -d ham -un used -ji had -addic tive -vi dy -do b -i do -fi ed -ni versary -n one -fu er -ðŁĺį ðŁĺĺ -coven ant -prin table -immac ulate -o em -cl t -serv ants -consu med -un released -sc um -pack aged -me re -ìĦ¸ë ¸ -to by -ta f -spo ons -me al -f ball -fair field -jan et -silver stone -dart mouth -follow me -voy ager -kom bat -anni ver -ene w -mag dal -ho ve -sa th -grizz ly -car di -gart ner -sand y -kan ye -post ure -po ign -im pulse -radio logy -horiz ons -si am -aish war -= => -no che -tr is -el yn -com me -du i -ce c -councill ors -cudd ling -creep ing -loc ke -manag es -trans ferred -ne cks -di er -dan o -v ick -lun ches -d he -en sures -cri ss -ul ster -bann on -cont enders -sp am -sweet ness -med al -hon duras -arc tic -ultra sound -in fr -disco vers -ei ffel -ca sters -ru ben -du st -awe ed -atri um -lest we -se ared -ðŁĵº : -ty ne -ex changes -little mix -l le -astron auts -hersh ey -work day -kno b -so v -re signs -today show -der man -an th -af c -ta ster -sw oo -sa eed -per ing -narrow ly -rn li -best buy -panas onic -obst acle -farmer s -ðŁİ Ļ -pa wan -ki est -ang ers -absur d -oh my -sin o -pist achi -sp ice -giu li -prime time -ko w -k ens -ex agger -! ?! -u ba -midd les -ju dd -e jec -slam med -pen sions -of a -re create -b hp -xx l -liver pool -thre sh -pur ity -ni eu -hol ics -wr ath -ra do -gli o -am ma -dile mma -cr u -lets go -.... @ -âĿ ĵ -sugge sting -tru mps -hor us -f v -ic om -refer ring -predic tive -tar ts -ge tte -so ck -glo ssy -pin ky -al ec -thy me -ou ra -thero ad -pe tr -cr am -p fi -dv n -me ier -incen tives -tun nels -mobi l -rec ap -extra s -upri ght -rev amp -per severance -, - -ot p -mir ror -ar wx -ger ry -ma her -g or -hom epage -am is -ag ra -made le -best friend -sirius xm -bun dles -admir ing -t dsb -ðŁį ģ -ch as -slow ing -ro h -wall papers -â̦ / -tek ken -gang s -tal a -lind say -shou l -line backer -tool kit -ur anium -caly p -ab rams -mat thi -ðŁı ¿ -hon ourable -da yo -ver sail -tan k -st c -fr itz -spl end -pat ag -anno yed -on day -devast ated -chattanoo ga -national ism -mas sey -jen n -tail or -dev gn -org ans -zu cchini -on fox -sat ire -wex ford -dis grace -no to -vol ta -âĿ¤ï¸ıâĿ¤ï¸ı âĿ¤ï¸ıâĿ¤ï¸ı -à ¶ -home owners -poin ter -m cr -au sten -day sto -mo ons -pal ma -gra zing -e so -influen cers -shahid kapoor -compli ant -measure ments -develop s -y d -par l -p vt -rand olph -tor tured -ger ald -eli as -deepi kap -war mup -hick ory -g ap -co ffin -am our -re neg -moun ting -seven s -ig le -hi er -dec ad -tri ght -esc apes -wer ner -t fl -ful filled -ni ger -sour dough -re aper -choo ses -spin ner -week nd -fil tered -sh uk -kat i -old ham -open source -kh anna -at elier -conne c -opho bic -gla s -complic ations -ar son -counc ils -sm ol -as sy -lur king -ling ui -han ks -e in -Ù ħ -ru gs -n guyen -nou veau -men ace -le v -alad din -ru ining -round about -k m -con or -shoo ps -may day -traum atic -prab has -ka iser -k ita -rou ter -pe dro -re tar -stun ner -spani sh -distur bed -acade my -e learning -wit ty -sen g -fer al -av y -sta b -ke aton -ur du -ko to -hu i -coo ke -ari an -the personal -u ma -se ap -a sting -rhetor ic -hand writing -munici pality -consor tium -ðŁIJ Ł -glasgo w -ra ya -eli za -polym er -bro th -prac ti -correspon dent -addic ts -gay le -ail ing -o fe -p li -hear tw -st itch -sight ings -prie sts -sam o -slo th -good wood -roc co -sab c -summ it -l ace -pres ley -itt en -cin cy -thepersonal network -s week -pe gas -af con -regi stry -ci m -le th -dic ap -cand ice -flu ent -sm ack -pede stri -al oud -car ac -priyan kach -p gh -ir ons -dol ce -lat via -dece ased -thero ck -cla p -cen e -fo am -morris sey -gre t -essenti ally -com cast -be agle -argu es -ing ed -- â̦ -sa g -ha san -ðŁĻ Ĩ -ðŁį ° -nh ra -kann ada -indic ators -on er -bri xton -at as -screen play -sor ority -sha heed -he em -class mates -tain ment -es i -breast cancer -zucker berg -aur or -en cia -ref ers -kae per -vor tex -com part -lym ph -photograph ing -ste ff -rest ling -par sley -mom ento -th man -lac king -du tt -ocu lus -fin o -fren zy -ra sc -der n -dis missed -noo k -met gala -sh ill -rapha el -maver icks -exhib its -eag erly -c pa -amen ities -. âłĢ -exo dus -ern st -lit a -deal t -womens march -i ain -score board -campe ones -c en -ti ki -garri son -fidel ity -bra g -road map -psy chop -lo e -ble u -ðŁijĬ ðŁı¼ -sau vi -spr inger -temp tation -ru dolph -ac ura -wic z -parach ute -stro l -len ny -zi k -dom s -nb af -al pac -vivi an -ro ve -pre et -perpe tu -sna ke -air soft -infl atable -prin ces -ati e -ffe y -pati ent -m ire -chel le -sl ack -groo vy -# : -up loading -!!!!!!!! !!!!!!!! -siem ens -provi sion -v fx -need y -f ats -to poli -bhu tto -sa thletics -alu ms -t winning -south western -adop ting -last night -man ne -la ga -tw ell -ac ia --- -- -eye wear -hur ley -fle e -sa ch -pe cker -cost ly -is k -cr ates -polic y -ero sion -in go -wer k -ðŁIJ į -torto ise -therap ies -inter net -chihuahu a -ri ps -fre i -ed or -tai ji -t fc -do d -demp sey -christ in -chen g -hi ps -gra eme -com passionate -cavali ers -histor ic -soul ful -crimin al -ja c -vin ci -expi red -sur at -turi smo -k ona -se aweed -ber ts -le ica -expre ssing -a al -wor t -break fast -her ring -am used -rhu barb -mar tian -cospla yer -y ash -stri al -ra ul -refer ral -dw ts -j w -ad ler -cur tains -gu r -val ence -tyr one -sw fc -coach ed -re born -diabe tic -cho ke -nor folk -investig ative -ðŁĴ¯ ðŁĴ¯ -z id -v mas -phi e -objec tives -âľ ĭ -over due -di vers -mat su -ðŁİŁ ï¸ı -casu alties -ภ§ -al k -stand ardi -re alist -arti facts -pand or -ke x -in vin -( !) -ine y -par aly -mr t -fay e -the voice -on ga -de ed -skin ner -az wx -speci men -priyankach opra -nu evo -bar kley -toulou se -resu mes -football ers -cit i -fe tch -è re -lestwe forget -ðŁĻ ĭ -ch unk -dri fting -manipul ation -equ als -pu tt -ky ungsoo -âĿ¤ï¸ı # -ela stic -par ano -fo y -do ping -cin cy -ss ler -interrup ted -al ay -ado res -ame thy -con voy -ãĢ ı -Ĭ ãģ -black list -gener als -sa chin -bru shed -oun ces -non stop -illi ams -bt sarmy -u av -ru ff -bur ma -bi k -defen ce -schul tz -bo asts -lonel iness -go re -trans forms -alum na -@ @ -ra ppers -ne hru -car o -himalay an -wearab les -ge h -pepper mint -re development -flam ingo -cos by -big baldhead -ag ri -bare foot -sco pes -re gram -gh ana -ðŁİ « -i heart -sa die -carri e -microbi al -ku ala -sk ater -quer que -âĻ © -gen res -reas oning -ch ased -as o -sli pped -en can -vam os -ker s -ad verse -mo il -commod ities -with you -sil ent -hy pe -an de -am ination -whi spe -lit z -âļ½ï¸ı âļ½ï¸ı -ri ff -pp y -lam bs -gan esh -ab sent -regu lator -marse ille -en roll -par cel -wa p -by rd -ðŁĩ Ń -tu ber -country music -par l -contro llers -responsi bilities -we y -ch ate -montene gro -chic o -mil an -l ms -tra inees -appropri ately -un certain -popp ies -ed sheeran -nutr itious -gar o -deut sch -awe some -ãĥ ¼ -comfor tably -land marks -et i -re usable -daniel le -ro sal -co les -just ic -c cs -f anny -ni m -mc u -clin ch -at ene -mer ge -im db -ang lo -uc cino -pan ini -an not -bur berry -feat ure -predic ting -fashioni sta -s ask -imag inary -mm o -south sudan -spe ar -hu bble -jo inthe -coyo tes -sli go -ko dak -sit com -polaro id -roo ted -corru p -ðŁĻĮ ðŁĻĮ -bris ban -at z -ah l -re my -tal ent -aval on -ra da -pau line -locom otive -go ons -ne mo -maser ati -ic u -stu tt -histor ically -sm b -pres by -avo id -so oners -rhine stone -w ad -ri sing -tro t -mo des -reg ent -optimi ze -re ece -sm u -ver ti -newyork city -cor tez -ra c -in case -sin c -fiel ding -e tta -tiff any -al monds -sad dle -k rat -mat ter -g low -star ving -gl o -cra ppy -sl ur -st d -monit ors -recei pt -maymay entrata -mc il -un is -rain bows -cal dwell -pacqui ao -j op -a fe -hoo k -es sen -wiz ard -medi an -fla ws -com s -âĿ Ħ -ing h -ha ynes -anton io -tem plates -ou ter -na w -cardi gan -bel grade -ðŁĴ ī -hom o -a ise -ro pes -no ve -what you -tri gge -concep tion -ad ukone -na di -fri ars -sw er -adju sted -hot line -san ity -kau r -down loading -c gi -ten or -eth nic -app alach -ภ¸ -pa g -gol ds -on set -investig ator -car tel -peace fully -jarre tt -cat alan -poli o -n um -fru stration -dhar ma -my life -âľĮ ðŁı» -aber deen -mu sa -bin der -spark ly -fle eing -instin ct -co ping -domin ance -ill ers -er a -u conn -lo oms -living ston -gal i -he s -c ma -bel a -se ley -mon k -la ch -mar x - ´ -m erica -woman in -es sex -ra ina -jim i -nep tune -z ack -chine se -mart ins -chand elier -her n -with us -ear l -asph alt -modu les -st p -ul la -psychi atric -mile age -captiv ating -si der -men to -mor t -tran ce -tal bot -ab by -ì ĥ -âľĮ ðŁı¼ -j ak -daw n -turn up -scre wed -fe ds -blue print -ðŁĴĸ ðŁĴĸ -har sh -er os -insom nia -ban kers -ta emin -mis conduct -hu mber -gi di -edu ardo -con a -musc ular -consu ming -ra sh -don nie -di pped -col lie -samu el -melt down -ðŁĺįðŁĺį ðŁĺį -me z -exam ining -schwar tz -pri stine -ðŁIJ Ŀ -ve it -ful filling -an esthe -gue sses -dra ft -som me -soli d -pati onal -ho ped -evolu tionary -all er -enter tained -sli ps -lud wig -conclu des -sen sible -bon net -cra ze -tra s -haz ards -const antine -ed ics -star trek -to c -occu pational -in cheon -deepikap adukone -pizz as -new comer -de part -oppre ssion -ebon y -foss ils -tro jan -el en -ste aks -k hou -positi oning -ug by -red cross -ak h -dol ce -us mnt -pp en -dil ig -ma vs -call er -cost ello -⼠Ħ -dy n -thing s -rhin os -a xi -sar kar -con vocation -att ers -ss ss -fun gus -eu gen -russ o -squ at -w sb -eli on -william sburg -s off -defici ency -be arer -o kin -key stone -t wain -cal ming -break able -wa res -horser acing -com bs -bun ting -u it -t land -ðŁĴĻðŁĴĻ ðŁĴĻ -ga stron -sab ot -ick ers -commissi oners -sen ate -ii ot -ath ena -nit rogen -an tony -ero tic -di alo -mis sou -hypo cr -âľ Ī -kaeper nick -can v -d roo -clevel and -o sh -mon sta -stefan o -^ ) -sh ul -po ison -ha e -commerci als -ma ul -nit ro -co worker -alo e -vap or -t ents -russi an -qu id -question able -mid get -po ker -girl friends -sin the -erit rea -ten ure -depos its -buc keyes -spot ter -theod ore -trin ity -joaqu in -u cci -follow the -caf c -mp a -ðŁIJ » -plo tting -dom ino -ta ek -sion ally -dicap rio -pa p -car mel -ig er -bt cc -beth le -www bigbaldhead -foo die -bagh dad -mason ry -off ended -à · -ภģ -sc ro -vers es -ori ent -ar ches -pi yu -know your -gre e -ta kers -gu ard -dish on -bucket list -bha fc -war dly -ðŁİīðŁİ Ĭ -leigh ton -pe w -stra y -assaul ted -in hal -ly fe -amar keting -l x -kat z -ubun tu -me o -carto onist -turno ver -mi z -dis like -mul len -mo f -bl and -hi des -emer ges -chori zo -truste e -ma hog -lan sing -paralym pic -fa int -fa una -ch al -sn ar -cat h -bent on -cast illo -sli ppery -apric ot -oec d -bar o -l z -he ming -clow ns -co workers -peru vian -commu ters -y ell -ðŁļ ´ -under ing -v j -tt p -fli pk -w ana -soc ent -Ĥâĸ Ĥâĸ -ठĤ -oo sa -jag ger -di sm -e less -d ham -cali f -a official -ec lip -harro gate -gra pp -com rade -n tr -concentr ate -thi ghs -bit coin -bel arus -ë ĵ -end uring -now watching -industri al -pi p -ar on -ar at - ® -whit by -oooo ooo -sa ree -tic als -mis leading -yo on -year s -sle igh -roman ian -sciss ors -vam pires -ac up -ab ba -th weeksary -cent ri -fl ye -u o -c bi -bu ena -sin d -mar ino -bur r -re building -ठ² -anniver saire -ac ca -ðŁĴĢ ðŁĴĢ -gett ing -tu lips -wolf pack -âľį ï¸ı -more than -ta kin -ð٤ĺ ðŁı» -u be -mon ic -dou bts -mo wer -co balt -don ne -specul ation -argu ably -kak u -htt ps -prosecu tion -din ah -stam atic -disclo sed -bever ly -fl wx -cra bs -extraordin aire -war mest -imper i -o logists -trac es -par c -lake side -am r -ter i -hour ly -domin ation -ar row -shrews bury -ance stry -wr angler -trigge red -pen sac -roo ster -survi ves -a on -bo ko -val or -love is -la g -pe y -fo cal -out laws -bl anc -artic ho -wit s -marsh all -die go -support small -u ca -sa h -je et -syn ago -gover ning -ðŁĴ ¬ -sal ads -cre ate -miri am -cen sored -ami de -no u -z eta -allegi ance -* ) -bl m -ric an -pa stors -oly mpus -blo c -whir l -star ry -pr one -y k -p ne -congratul ating -be v -so ber -love island -sa ir -an ing -tutor ials -q e -lun d -in ist -cle ver -taxpay er -ali z -wren ch -dd ling -cap ri -h pa -ðŁı» âĢįâĻĤï¸ı -na j -o j -futuri stic -jelly fish -ðŁĶ¥ðŁĶ¥ ðŁĶ¥ðŁĶ¥ -cel ery -plan k -fil a -ne me -un healthy -lec tions -ðŁ§ ¡ -rit chie -n ws -mi kha -wonder woman -âĢ İ -hip stamatic -ka g -ðŁĴľðŁĴľ ðŁĴľ -poul try -mo w -wor ds -lo ff -ðŁ¤£ ðŁ¤£ -relat able -re mixes -keny atta -ke m -re signed -fo d -stra igh -j lo -hu tch -box ers -colle en -mag s -instruc tional -ko l -attrac ts -pra g -account ant -go ggles -br u -th ole -mar row -leu ke -oc to -pon ds -bubb ly -he ist -ìĹ ij -im p -a har -ha unt -hall mark -psy ch -kkkk kkkk -col umb -jump suit -cost co -si delines -ag gies -over turned -ni b -key chain -fu k -f af -mi am -assist ants -cy cled -ri der -dam mit -red wings -mag es -kin s -ì Ĥ -ho d -son t -carol ine -" ' -cu le -bra id -fel ony -ar ities -ruther ford -depic tion -isab elle -ro ach -k day -fifth harmony -em y -li gam -bari sta -albu querque -gro ss -ðŁį º -oo ks -ðŁij ¼ -dun can -try in -jag s -g ould -li tho -âģ £ -а Ð -sam my -tun g -cas ser -apo lo -aaaa a -man g -as ics -sh en -p ye -tur bul -ss p -saint sfc -on lin -n anny -he ster -do z -à¸ Ķ -th read -ren ts -kh and -ðŁĴª ðŁı½ -un conditional -rob son -car re -ph on -sacrific ed - £ -auto s -par ker -oc a -log in -kee gan -hard cover -dough nuts -ðŁĮ İ -spit fire -refresh ments -saskat oon -commod ore -j f -rub ber -halam adrid -child care -stra da -io m -ri k -dak ar -ther mom -cro pped -gar u -ali k -ven i -i ft -si ka -ritu als -z ul -e ch - © -su dan -l land -i me -do cker -ì ¤ -fe ared -fa o -wal ter -no g -mutu als -l h -ali gn -mon ia -concep tart -ðŁĻı ðŁı¼ -sco e -compet ence -sw ine -ly me -laun ch -green er -abstract art -inqu is -gran ada -ga elic -flu ff -d backs -grave yard -ba be -acade mic -adventur ous -joh ann -~ ! -bi bi -| # -pl ings -gett y -as b -âĿ¤ï¸ı @ -staf f -religi ons -bang or -world bookday -me gh -de vin -ash ore -meri dian -gi thub -qui z -all stars -be stest -ir resi -ack er -do te -war rington -pol ly -newor leans -cr ou -wi gs -che y -smithson ian -la sag -de tour -bor is -stra ps -mari ah -inten tionally -ko h -ðŁį ¸ -ssi an -mar issa -cor al -episcop al -casu alty -tom o -supply chain -sam p -on go -ro o -cavi ar -p fw -clau dio -buff alo -s ations -mat ty -snap back -l ds -al arms -mat te -âĺ Ķï¸ı -conditi oner -d ors -he x -fi zz -a stri -sus sex -secur ity -qa eda -all star -cocac ola -as one -cl icks -sc ans -mu te -he avier -ðŁİ § -âĺ ŀ -lv l -book boost -youtu be -fla shes -f jor -c su -explo de -do dge -cair n -gonz ales -th ill -pel le -hart ley -renew able -re tin -e stre -costar ica -shipy ard -nc fc -pri ya -a ghan -an ath -plu gin -co rey -re bound -or u -kat rin -hor mone -gi m -mahin dra -s sus -park land -har per -fanta stic -infer no -ep ilo -wrest ling -fe ct -c it -ac oun -to ssed -monu mental -char tered -bu st -pe tra -âĮ ļ -wildflower hour -sweat ers -* . -bl er -ate ch -go wan -demo graphic -bra l -suici de -renov ations -vu el -sin ister -ar mani -miso gy -ph arrell -nap s -un iting -crusad ers -cor gi -insu red -than i -no or -g q -d ada -bicy cles -snu ggle -sch an -ten berg -ss al -fe mme -bo il -½ ï¸ı -re ap -occur ring -hus sein -divi d -sto ke -sh alom -na ia -o lic -frustr ating -Ù ĩ -ig s -gro ver -scen arios -n ds -bru tality -med alli -bu on -sas s -skate boarding -ony x -lor ry -ny u -gau tam -mm ings -gu g -end i -lo thian -comm ando -chal k -ph ora -asse ssing -ti gh -crun chy -ad ay -is l -ci ara -pilgri ms -kam al -p to -brit anni -t ani -sm c -l ure -app store -ab y -golf ing -cl c -fa u -an as -shu tting -regul ated -carn age -scow boys -all enge -c ma -humbold t -rel le -ku mb -her i -refin ery -sound check -d wayne -bos nia -i sp -the alth -anni v -relev ance -my a -bag gage -dre ad -s bc -th ed -bu h -hi jab -lo id -ke w -c te -respec t -lovel ies -cu bes -celebr ate -dir t -sav ers -_ , -gar ment -pulit zer -mas jid -beat port -al arts -encry ption -s ner -ple ads -found ry -sym metry -ru mi -birth place -scallo ps -supp le -pivo tal -t ati -no de -so d -pro xim -tr ics -col dest -bren t -mand u -cla ir -e ach -and alu -hi ddleston -ðŁIJ º -mel ts -v ance -pin n -se ments -scre ened -sa chs -o bl -ic ha -âĺĺ ï¸ı -school ers -heal ed -lo gged -ð٤ĺ ðŁı¼ -ic us -bore dom -b ish -b ffs -tal king -sure sh -hoo kem -de on -de fl -ei leen -ðŁį ķ -women intech -ri sotto -rang er -adverti se -ภģภ-tel ly -la go -dart moor -d ong -sk ates -lo go -un ner -mail box -ma sala -lo oooo -amethy st -che wing -c bb -australi ans -rc mp -game art -# ... -kor n -extre mism -fruit ful -anci ent -pu bg -pol ite -wh it -mur als -m gr -line man -dav ao -ste ms -ten nis -av age -tu pac -gigan tic -hs bc -auto biography -up the -ี à¹Ī -re gal -fig uring -ku l -mis sy -hoo p -gra s -for ums -back lash -abduc ted -p nw -min ic -bu tt -bott oms -at on -ven g -ðŁĮ ı -del aney -prab hu -fan club -over haul -health ye -sy no -aa f -ren amed -kim i -un cle -man city -se u -qu anti -este em -um in -en zo -mel vin -under go -j har -far ah -coast ers -humph rey -mh z -children s -^ . -d hi -disrup tive -integr ating -r nb -over sized -a ide -ne au -docu mentation -ðŁijĢ ðŁijĢ -pal o -hear th -ri yad -pun ctu -abc news -secu res -boy band -bir ch -ju co -tra ff -legislat ors -bay a -ãĤ ¯ -no ises -collec ts -s warm -k ner -bi shops -stur geon -snapp ing -mo l -fre aky -chair person -tro p -lyn ch -car cin -art sy -e sto -cha i -fl ur -inv ali -sau sages -im el -j or -fun fact -wit ter -puni shed -ac ons -h ya -re versi -em c -dif fu -z x -sp aw -cla d -d mit -hol land -fre sco -pay roll -ab undant -stu ffing -mor o -c ny -boy cott -wend y -ele ven -pro voc -pil ot -tr x -be ad -climate action -ri on -assi e -ì ĸ -o sm -islam ic -ho ar -good reads -al ici -afterno ons -spoke sman -jo lie -it as -masc ara -âĻ© âĻ« -pre vail -beetro ot -lu jah -k li -dod ger - » -ru le -l n -scre am -ho bart -col bert -r tc -er m -pat ro -quo ting -s live -que st -non fiction -semin ary -prosecu tors -ve st -express way -g ge -nau tical -et f -ðŁİīðŁİ Ĭ -dur ation -cha ired -the film -fab io -she h -can o -ðŁĴª ðŁı» -with draw -! :) -cor pus -phen om -yel p -la wn -ent om -snapp er -but te -pin ball -pro xy -libr e -alle vi -n ada -gabri el -fo wl -eure ka -daph ne -tu nes -pun ched -wh ore -jo g -ren tial -man ners -o pe -wh ufc -gu th -revol t -sne aker -philharmon ic -ho ste -sovereign ty -ðŁĻıðŁĻı ðŁĻı -fish ing -sci art -fe ta -i pp -dump ing -kel own -gir i -dig its -sal u -san jay -twee ters -sp as -col chester -sc ab -ma dd -๠Ħภ-Ä ĩ -ged don -march for -do p -maure en -un plugged -di do -fashion blogger -up a -mex ic -tar y -pol ye -jame son -v t -grin der -mad dy -consult ancy -¬ ë -leagueof legends -ac cents -um ni -jane iro -tu ss -h ens -ampli fier -to shi -pret tier -pre vents -new town -red wood -vant age -ball ard -ar tof -a she -a sion -lac ey -ap at -gro ve -ภĦ -rw and -real tors -tra itor -bed ding -ö r -zi on -fla shing -cam pan -boom er -secretari at -ab ol -liti gation -cont amination -se dly -shred ded -in for -do herty -bench mark -ro che -skate board -sho vel -i zz -to pper -o ster -laby rin -autu m -k ong -hum mus -vi z -tech news -kla us -am using -socialmedi amarketing -i des -cast ell -ste e -underestim ate -cal ab -pa ign -b illing -unanim ously -g mb -fly fishing -hath away -commerci al -colour ing -skul ls -pivo t -te p -tb c -motor way -x press -construc tive -pu k -under lying -kir sten -mani ac -cha o -se ma -chiff on -ðŁijĮ ðŁı» -ver ona -kom o -stan doff -wi ped -c ated -bla ir -wor kin -m sc -bethle hem -swi pe -unexpe c -pe es -pe tri -orig ami -ðŁij ħ -mex ico -flav or -ru dd -cannab is -mar u -ri ddle -wor shi -sil on -sch at -ap se -tang er -bi ous -e er -questi oned -o zar -dan k -angle sey -char an -bak u -compe ten -re pri -bat ter -sa xon -cal ves -leng ths -$ $$ -âŀ ¡ï¸ı -immer sion -ga unt -car ry -cy to -b anda -shu tt -experi ence -el gin -mous se -ta z -ê µ -in correct -en z -b ham -mor on -so ver -ar un -ti pped -la ble -de arly -bau tista -í Ļ -mor tal -woo p -dt la -sho cks -dav os -ðŁĵ Ŀ -swim wear -her man -ðŁijĩ ðŁijĩ -z ir -neglec ted -grac ed -campu ses -av s -ar ora -swach hb -live pd -ac cra -enqui ries -shoo ters -kur t -vancou ver -brad ley -gar da -g ü -ol la -attrac ting -up ton -ne win -lu mia -furn ace -ev ers -e on -sw a -roo kies -a oc -v ss -bris ket -tor ch -yo da -heart land -tac o -ph ony -food bank -ab bey -bab ylon -u y -gre ate -expre sses -d andy -sc apes -survi vor -ron d -e ci -ha vin -ab el -chil dish -tor que -wav y -ur self -kanye west -year of -ale stine -o brien -al fon -sk ag -kore an -anchor age -val eri -de w -ðŁİ ¨ -land slide -car ole -christ en -go phers -af i -priyan ka -q q -power of -it te -pc so -tw ol -pr y -intellec tu -guer rero -pi les -wish list -w ren -time table -ë ı -prodi gy -gibb ons -. / -ne ur -anz ac -mur ray -vie st -pla ster -la ir -art gallery -inter continental -g br -bell ator -nam joon -mam mals -am el -y aw -saras ota -cam ar -bud ding -sum mari -aco sta -la sh -ey ou -post graduate -instruc tors -ti g -const ant -were wolf -ic os -cla s -glen n -bud ge -ðŁĻ Ĥ -er ta -sta ins -persecu tion -cumb ri -o ch -syner gy -hu ang -scand in -mid terms -comment ator -regar ded -perpe tual -bo iling -al p -lan ge -sch le -fac eli -twee ta -ri dden -ok toberfest -charlotte sville -ik lan -jo u -ch atham -b sc -ðŁį ¦ -stra uss -mel low -xx xx -happy hour -re actor -ww er -distr action -at orial -ðŁĴª ðŁı¼ -twin peaks -fay ette -a or -ko k -bro om -sy fy -ou se -am ag -Ø · -ubis oft -lu lu -hall mark -stu art -it ya -si deline -venge ance -re lu -sex ism -boun cing -un ites -gu stav -te ssa -stu mp -pro clamation -ima x -divid end -col by -ðŁį İ -play wright -un safe -co smo -ðŁĩ²ðŁĩ ½ -cup board -constitu ents -ang lia -ram page -ðŁĺįðŁĺį ðŁĺįðŁĺįðŁĺį -than ked -take aways -shro ff -de bat -kh ur -conduc ts -format s -à © -port age -graph ers -u ten -pre m -mo ines -condem ns -s ous -l ps -f cs -deal ership -leuke mia -bure au -ski d -guardi ola -ca ster -thir d -avoi ded -en cyclo -c sr -vi xx -analy zing -she ar -dulu th -shap iro -chan ting -stre sses -as be -mil itia -ãĥ ª -col lin -arsen e -sure sh -teach ings -yi xing -sh ill -nu des -sv u -clear water -war ped -pro life -artist son -it u -versail les -galax y -ax el -spring st -cal a -hu hu -sc u -commit ments -exe ter -poign ant -mo tion -conserv atory -row dy -rec alled -mu sk -emb elli -so the -âĺ Ģ -sto pper -sch ild -to pe -el mo -zi el -j om -barn sley -snow den -on tour -jour ney -hills borough -par ole -w ts -mo ving -ag ility -tiv o -ff ers -kindle unlimited -g wen -ann an -ah mad -tex tured -hepat itis -dra m -insi ders -tis sues -ãĥ Ħ -fc barcelona -cr atic -na acp -pe can -f gm -custom ize -concer t -g sm -pe g -p one -justin trudeau -super cars -happy holidays -bu lar -ado x -lap tops -digital health -destin ation -gradu ally -áĥ ¦ -popp y -ss l -inhi bit -star light -of fro -glo omy -x per -hal der -im plants -le to -hass el -a as -un told -en ci -liber ia -or an -con tests -il ah -sma g -sc out -mari anne -cr yo -schedu ling -lo s -kan e -stutt gart -ne se -law rence -da in -pho tom -car ou -ภ£ -g wy -national dogday -roa sting -band camp -kentu cky -stret ches -ke rel -ca she -ãĤ ¸ -sta x -tran si -dog gie -at ric -hal le -ci vic -brow ning -lein ster -cat day -high land -joy ous -in cumb -or lando -ro mo -col ton -del ta -car ab -ro tc -aster oid -goose bumps -mo logy -yo ko -an ds -tomor rows -red carpet -sm p -ca sio -ðŁ¤£ðŁ¤£ ðŁ¤£ -se au -rejec tion -rot ating -bi partisan -th un -mat i -bon i -ol l -ener gye -do it -l j -mother hood -lou ise -neck laces -el ite -ni x -l cs -en v -gl u -le sh -cran k -su sie -m clau -so tu -crow ley -rat ri -use d -bre ton -alfre do -ye o -travel pics -ti pp -elli son -sax ophone -me red -heu ghan -ta ine -f es -vi ro -suppo sedly -i as -dige stive -y le -li zzy -wildlife photography -bri anna -west field -ra ined -am her -ðŁĺĦ ðŁĺĦ -distribu te -bott om -pre serving -oil and -craf ty -de scen -col ling -shakespeare sunday -r wc -ang led -ci an -t ations -mon tage -me yers -france sca -ðŁĮ · -wi ggins -san ford -volunte er -car ra -bar k -vari ed -pl in -am u -kap il -rock ers -qu ind -br ane -in mate -ent al -impro vis -michi gan -re tweeting -progre ssing -mercedes benz -smo ker -physi ology -dor ado -watt pad -h wa -sr bachchan -w ga -vol atility -hi re -ac ap -wn ba -hein z -stit ches -kidnapp ing -bur ys -lim b -f itters -thumb nail -ton e -mir and -desi rable -ad dison -tar an -tamil nadu -spec tator -soci ology -amit shah -remo tely -âĻ ¦ -ham id -r ds -g lee -smooth ly -sch ro -er c -lali ga -he als -us f -ni shi -d hu -un il -h le -tro mb -bhu tan -pilip inas -se ung -whit man -te y -min ce -snow boarding -re au -k ker -av o -zach ary -ran veer -ti k -gover n -qu al -beck y -anthropo logy -att en -grocer ies -de bit -war p -sil icon -hawa ii -ðŁĴ ħ -pomegran ate -pe er -orang es -people schoice -end ure -ðŁĴĽ ðŁĴĽ -ãĤ¹ ãĥ -ac ial -a haha -stu k -imper ial -bl ond -pow der -kno ts -vin ce -wood lands -den a -watch in -mat cha -ma hat -galax ies -middles brough -k ö -stre e -resc ues -wal do -lero y -desp ic -real ities -tm nt -ha q -un o -pe c -bolly wood -blin ds -design thinking -he ms -and hra -ab sen -fan s -ste ch -shire hour -bla ine -shak ti -pu rely -ðŁı ı -tra fal -ke ynes -gr ate -to bias -spon taneous -satur ated -caval ry -pri sc -ðŁĺ ij -wh t -pas si -~~ ~ -vir at -patt inson -la o -weir do -sym pathy -ju da -occa sionally -cred ited -stat u -es co -hil ly -esc ape -dischar ge -se er -may nard -sud bury -z lat -or al -we er -encoun tered -sm elling -over sight -ê ¸ -that cher -mack ay -you can -fre ep -freed oms -prophe cy -ho e -ishq ba -dra ke -qu its -pel led -tur k -o vi -wesle yan -new music -leg g -ch eng -h illi -ay y -pan ties -ad versity -ad jac -vaccin ation -ju ke -ga c -exce ed -time sof -sta ining -ep cot -v ital -up ward -bethe sda -apar k -ma hi -camp fire -enchan ting -rha pso -h z -na ver -fa x -vali dation -ac ad -ny r -as ym -coordin ated -depar ted -all ery -var ies -spr ite -chap lin -ss occer -s wat -bre t -relu ct -tunes app -super star -reminis cing -o co -home grown -dough nut -un canny -la pd -thyro id -! âĿ¤ï¸ı -botan ic -bre s -sp ade -i ste -echo es -du lil -bur sting -qui ero -ðŁij İ -loy ola -amuse ment -ha ils -sleep y -burgl ary -âľ ı -ro gue -cot land -mo ors -low er -wic ked -ðŁĶ Ĭ -compet iti -argent ine -yvon ne -karti keyan -ili ary -gat sby -precin ct -six ty -na ji -cam s -practiti oner -ðŁĺ³ ðŁĺ³ -pu ne -neg li -juli en -inv aded -cali br -cla m -duba i -mu k -lan tic -produc t -fe dex -ï¸ı : -eu ra -dari us -s ling -virtual reality -home stead -ðŁı³ï¸ıâĢį ðŁĮĪ -pac ed -in ha -pul mon -la zy -premi ering -ma stered -in he -con gregation -ba jo -sport ing -new jersey -hor ny -lma oo -leng thy -du t -yo gh -swe aring -philosoph ical -pap ua -in ski -know les -dy ke -âĢ ² -to ken -mc guire -ri ot -probab ility -mc con -gro s -su mat -c ite -da a -on da -mad dow -che w -board games -spar ked -re claimed -ad hd -ny se -imwith her -equ inox -boo ths -balsam ic -ha zy -dor chester -ag os -se aw -moder ator -seri ea -ander sen -pilgri m -âŃIJ âŃIJ -itch en -hal li -x ton -nathan iel -mun ition -celesti al -ga f -zo om -mark le -pen thouse -cal e -s fa -bar king -tu cket -em ery -cal orie -li que -ad ar -mc nam -tor tilla -wood pecker -mo town -bad ger -ayr shire -scram ble -dd ay -cra ziest -per rie -cho co -cast e -i ot -wre cked -selec ting -uss r -gra ft -pun t -lab ou -ir st -ba ek -Û Į -su ki -que u -ach at -te ster -aug mented -wc vb -sin ks -ðŁĵ » -ra ke -inter ne -be cause -belle vue -une arth -light en -ðŁĺ £ -turn around -labe led -unemp loyed -twitter kurds -le ia -h ye -great er -ðŁIJ İ -tim ed -i red -e tt -limit ations -cab e -s out -bee ch -anni hil -re trac -yo ona -ang er -den nis -supp lying -di z -" ( -sc ur -gun man -su ho -sauvi gnon -ภ¥ -wi ley -land on -choreo graphy -pre historic -ðŁı ĥ -var gas -assess ments -pinn acle -di i -chamber lain -ì Ī -v p -present ers -deut sche -sun shine -sal utes -r one -bu siest -- .- -motor ists -hemi sphere -al wx -ps p -ow a -den ying -cho c -gu tier -han uk -mus kete -jait ley -se wage -t ame -thin kers -shi m -se quo -pap ar -middle east -k wa -ke g -patag onia -no y -bar ça -take off -he a -à ¬ -n sc -g dc -ðŁij Ī -mou stache -mel ania -thr a -â¬Ĩ ï¸ı -pier ced -ze us -fon ts -ber a -it iner -q atar -contr ary -ire land -i fy -ou los -commun al -fin s -un paid -pa a -ðŁijĩ ðŁı» -ri os -ou p -f iller -cafe teria -à¸ Ń -kas i -cali ber -z ulu -v sco -ts ford -dragon fly -smo kin -pi st -psycho logist -diplom at -we bs -buc cane -à® ¾ -motiv ational -du ne -ba e -c fs -with out -er on -i ac -ate e -pen sion -fra zier -en sis -sk is -par ting -ger y -territ ories -nach os -eni ght -ever lasting -msd honi -tel e -sp un -po di -sab ah -environ mentally -ce ase -beau mont -mar ta -kel vin -ho ff -sun il -n da -co b -sh ale -ree dus -un boxing -u bio -re opened -n all -capsu les -mar r -himalay as -swee ter -ja z -f mr -twee ter -dha ka -na u -de mi -d fs -ta urus -fad ing -it utes -ci p -over flow -jef frey -don ny -car tunesapp -ðŁį ij -prefe cture -danc ed -c pt -ple asing -ital k -earth quakes -ul ation -hi o -ãĢ ĭ -ant an -nutri ent -de ere -selec ts -enrich ment -r iti -tram pol -bl amed -j ia -contribu tors -chesa peake -pi geons -tribun al -mad uro -w su -ilo ve -effici ently -dar cy -war ms -ar ra -ec u -ho wer -strugg led -rajini kanth -ðŁĺ¢ ðŁĺ¢ -hou sing -str at -eli x -disp ro -raf fic -thi erry -na sty -c fb -staf fing -al ma -back ers -hen son -sky walker -reale state -roo s -ness y -chan ce -cair ns -c ci -pe dal -ly ft -cross word -wait er -only in -kru ger -k ir -alej andro -car tier -car rera -re paired -ou at -un clear -un breakable -today in -qu eries -jo dy -gen ital -win ner -to l -kelown a -fascin ated -ãĥ ¬ -sris ri -squ ared -spr ung -negoti ate -priv ately -av en ->> >>> -g ical -gav in -chester field -zu mba -or r -nat alia -impeach ment -mn l -car at -criti que -credi ble -trac y -tan i -musi k -jig saw -gam bia -tol kien -fe u -as per -sav ory -fo xx -f itt -mar lon -l rt -v ell -p br -imprison ed -i om -chu l -wind shield -kay e -ba a -chor d -s art -al gon -minister ial -nat geo -la zio -nor ms -ðŁijį ðŁijį -lic king -fut bol -un sung -dalla scowboys -sh red -distur b -dev ine -be ards -ch f -b day -ro sso -ig or -ay i -si ren -k air -sti les -ro f -mag nets -un cover -mou se -bang ing -si ghted -spe ople -impac t -row land -kir a -environ ment -love the -p sis -mish ra -gl endale -ca jun -o che -de ception -sex ist -stra ws -s ga -buff er -apost le -sp l -pop up -ðŁļ Ĺ -r g -up er -ball in -i dy -occa sional -national park -ðŁı Ĭ -u an -innov ation -ภ« -te aparty -re tte -counter fe -b ha -rec s -ig en -ðŁĮ IJ -humming bird -cu r -ha ven -la zar -pue blo -: : -zi onist -op ath -inver ness -promo ter -carto on -cabine ts -mahog any -surve ying -r ational -feel ing -testi fy -so w -oc on -ภ¢ -ne el -mar is -sol itary -che mo -rad cliffe -sim ons -ros ary -new er -jo die -re tali -pra wn -pad dy -hen ge -k ala -im plant -at y -bren twood -par adox -ene z -re designed -p our -wy d -al de -௠ģ -sol d -biomed ical -๠Ĥ -tt tt -mat teo -ys er -new ton -de bun -ner dy -loo l -wo on -elisa beth -ec c -wh i -ach o -salv age -sal aries -qu ity -navig ating -oph thal -con soles -re built -o pec -ast ers -sho red -set list -kathr yn -rhy mes -re visiting -ash ish -li ft -re post -sole il -âı ± -weal th -sa at -we c -king james -flipk art -field work -se gu -mo dal -bu b -are rs -ðŁį Ĵ -clo oney -pad dington -necess ity -guth rie -pen te -li mo -jo sie -ar tin -en c -l hs -betra yal -info graphics -i er -mo a -hear ings -bon jour -sym bolic -ag ro -wed ges -krist ina -wild flower -athle tic -photograph y -pe sh -ca hill -chi lean -gou l -fi oren -ðŁij ¶ -z il -sk im -bad oo -deli a -tre ble -n cc -ðŁĩ¦ ðŁĩ -a house -bul lock -sol itude -ا٠Ĩ -can cers -futureof work -hu tch -water shed -war mongers -sp illed -colom bo -mo th -associ ations -weigh ed -global goals -not just -christ i -tor g -swe ating -man eu -clu sters -â̼ï¸ı â̼ï¸ı -ta ped -ul y -tru sting -yu suf -te in -ra b -, ,,, -sin ai -audi ble -explic it -cro wns -sch iz -at least -ðŁĹ £ -de bra -je suit -ene gger -z hen -one sie -i it -ss f -gur gaon -chak ra -bear cats -k ran -k awa -reque sting -han over -g end -sor os -mer cy -lovel y -do omed -tim my -ku z -ul l -ab ram -sa ison -ãĥ « -clean ers -re mo -circu its -bar red -o th -mo ist -madele ine -gall o -u j -per mits -hea viest -car ols -az te -gior gio -flo ats -decl aring -us rc -min at -craf ts -pri ma -conven i -nickelo deon -danc ing -ceremon ial -blo gg -tw p -anglic an -she k -k nick -( (( -hubb ard -harve y -hit man -fen g -we some -for za -s word -op us -bro m -gi bility -z al -m unch -dance hall -gre edy -hd mi -re birth -ðŁĺĭ ðŁĺĭ -s world -figur ine -com post -k f -engra ving -gior no -st ana -k man -ham ster -compos ers -aj e -func tionality -pol k -is ons -air planes -te se -hor rors -musc at -gi ven -sp ence -ðŁĩ¸ ðŁĩ -eli ot -ach illes -fre ck -crypto currencies -sou ther -hal o -bor neo -polit ic -hahahaha h -up state -si ena -obsc ure -hau sen -lloy d -happy friday -motor bike -bon a -americ as -hol s -- ( -spor ty -un aware -reven ues -christop her -bank sy -av an -ev apor -com press -eyel iner -to dos -buff y -renewable energy -ly rical -ar chan -rapi st -fair trade -lma ooo -beat z -pro active -la pse -ir ical -revers al -po de -mcin tyre -mac au -ãĥ ķãĤ -nash grier -f sa -g all -çĶ Ł -perpe tr -il ya -configur ation -% ; -str ange -rac i -ภĩ -pic kups -kov sky -mam mal -w ps -g able -compar ative -z h -save our -da vey -on etsy -mu ssels -mis er -cri stina -electr on -cra ve -lo ren -precipit ation -m z -ðŁį « -vin cen -snow board -no ida -ah n -marin ated -g tr -town hall -min is -bethe l -adv an -su ra -shi el -fur ry -ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤ -lyn d -so il -sc ence -sen eca -shar jah -dick ens -credenti als -av ar -per k -requ iring -pre fer -j ian -de ca -r ach -ing for -del e -be ep -ðŁĴ » -cis ely -hu ddle -green sboro -haw king -ho ax -hang ar -ç ľ -mis o -lo vin -gre ta -ab ad -logi e -at an -snow flake -mahe sh -fear the -al kal -bobb lehead -ba hn -ju dged -fu tu -feli x -ðŁį ĵ -pi ke -der iv -notic es -au er -dis super -or da -wi pes -am ino -stri kers -foo tb -dram as -pun ching -score less -heming way -bi h -bal lad -chat ter -am mo -kle in -fabric ation -kari m -z end -hi sto -vol ta -rock y -marke ter -xtre me -sequ encing -paradig m -cle ats -boom ing -âģł âģł -block ade -promp ts -yogh urt -pur pose -nu r -regu late -nois y -ing rid -bird watching -bar tender -Ù ĥ -wor dof -cha otic -shor ty -el dest -z app -onceupon atime -fl yo -rit os -mike quind -ðŁIJ ´ -regi stering -. ] -ad ol -gg gg -pur ge -kid lit -ar bor -val ves -synago gue -o th -unanim ous -veri fication -dar rell -ãģ Ħ -vander bilt -tape stry -pro sper -did dy -dra fting -de cep -marqu is -st int -michael jackson -pee led -men us -bb b -sc are -ema il -wri gley -it is -f ell -some thin -bar ra -ed gar -di pping -pu ddle -sla de -lear ner -jal en -ðŁ§ IJ -the daily -mikequind azzi -ju x -iq bal -mckin ney -ra iser -ef an -dr one -cat o -pic ket -cro we -l att -uk o -giuse ppe -hin i -synthe si -ponti fex -song writing -to d -swit ches -din ners -h q -gabri elle -pensac ola -cir cle -expo ses -ev s -riyad h -pro men -o ck -sa j -cit ation -brew co -jo si -ep aper -dri f -point less -tang led -cri pp -line ups -fairi es -daz e -mour n -bla dder -sal z -bur undi -book mark -the people -sub sequ -princi pal -sk er -court ney -a oki -rac ers -ad m -mom a -critical role -hou n -shed ding -sa ka -ace ous -mck ay -hus bands - ½ -me da -accu sations -ro sel -nc is -witne ssing -or ama -go ds -hil ton -el man -ÃŃ n -meg ap -cra ven -announ cer -crit eri -sheffiel dissuper -milit ant -consu l -hoo ded -aby ss -b x -ma dam -lo cu -mary am -manic ure -grat is -ac tresses -ros ario -this dayin -king ly -gn ome -cel ine -r ous -he el -lil ac -vish al -ab h -thor ns -s ls -ne al -construc ting -be ren -s lang -ma ins -far ra -sar ko -pai ge -gu iller -l ala -ice berg -nou n -plann ers -u mmm -ou ses -ill ary -ma an -box ing -zi pper -srin agar -migu el -o str -mp o -responsi bly -lan terns -appli ance -x b -gren ade -neglec t -dy sle -ham mock -ne ctar -wit cher -r gv -di ence -ser bian -seed ed -cru z -bi sh -sp he -e q -sky rim -alge bra -phil ately -bungal ow -ge off -y ves -demand ed -consider ations -the vamp -pawan kalyan -co ded -grit ty -erup tion -se infeld -uni denti -ëĭ Ī -wor m -ac us -se ung -dun g -ro land -su d -di visions -ab lanc -shor test -j f -p oun -plant based -be to -tough er -mc o -don et -mark us -v fl -ðŁı ł -open ing -co ward -caber net -o xi -burle sque -sand ra -su mo -consi st -tho t -cay man -motor ola -gutier rez -d slr -y w -no bel -nov ice -moms demand -grun ge -sp or -d cc -pre sses -sli st -allot ment -voc ational -ft c -pu ja -lo ven -utt arak -tan dem -sh ep -come dians -anat om -cant wait -healthye ating -west side -mar gins -chi ang -asbe stos -stupi dity -proble matic -fit bit -: $ -ceil ings -shu a -protec tions -bio tic -beng ali -re sts -bien nale -tim o -cul min -e minent -affe ction -unbeliev ably -individu ally -canvas sing -wh itt -nov asco -chin son -h pe -go w -gloucester shire -pa o -thresh old -chev ron -s ine -we ther -pp ie -aqu ino -antwer p -âĸ ¬ -po on -inst af -equ ine -cinemato graphy -nbaf inals -vali ant -kil kenny -te rence -syste mic -sr l -p ound -made ira -pl ough -tre cht -mat ed -mp d -ransom ware -ph in -li qui -bb ce -boom er -i standwith -con ju -r te -nar a -foo lish -da shing -vier nes -br ite -da u -juni per -ai da -you now -ra zer -de i -repe ating -comfor ting -adjac ent -e to -ca sted -chat ur -mu er -syn th -san itary -mac le -independ ent -law ful -e erie -h or -ðŁĴ Ń -am rit -vel o -station ery -mu f -may may -contempl ating -elabor ate -gre gor -dri es -ac col -ภļ -schwarz enegger -ill nesses -day break -follow back -collu sion -electr onic -jo vi -hiro shima -ta w -hom ec -mic ah -qu itting -fro sting -ben fica -hel i -s ical -pic cad -corpor ate -ment orship -you are -sing er -shi va -ru ne -ing er -ri um -play able -doo p -wil low -ter re -ni p -at d -war bler -profession ally -er ase -proce ed -pedestri ans -mis chief -ben ding -alas kan -c kett -mo p -dd les -shut ter -ge ared -atene o -ma deline -g ations -o sha -der ick -sw ild -an gry -pat ents -hun k -decre ased -fr y -ðŁĴĸðŁĴĸ ðŁĴĸ -sal on -quant ities -d ario -ni gel -ku ma -jen n -happ ye -xx x -rex perience -pro s -au sch -rele ssly -ham burger -fuku shima -er ne -stat ec -ren d -may field -j one -lef ty -bern stein -sm il -gener ates -fore station -band its -ta yo -r ca -ac ci -rodri go -kn app -elo vers -vege tation -u ral -le ft -ħ ï¸ı -worl dre -sur i -embar k -w son -ba you -mu ller -mo vers -ðŁķ º -presby ter -l f -cre e -bat b -sal am -demonstr ations -an ec -n pc -it ics -to graphy -re inst -thur st -tal e -off ences -smart city -bro tha -ofthe year -in valuable -ear n -ðŁijı ðŁı½ -kre mlin -gra dy -town fc -guern sey -ma ha -contag ious -dre x -be en -( £ -nati vity -k tm -somer halder -comp ounds -íķ ĺ -" â̦ -af g -ott news -h ound -fire fly -cil an -donet sk -volunte ered -ak ira -è ª -sing ul -st h -dro wned -mand o -he ir -ðŁİīðŁİ Ī -tax is -y uki -vel d -k ans -el k -ran ts -hash tag -t eng -ro g -a at -gru b -e ber -in india -colo ssus -sig ni -so ever -mile stones -der o -differen tial -phu ket -master mind -an gh -mel ani -bro ker -actor vijay -stun ned -continu ity -af fl -vo cal -perenni al -fianc é -in complete -hun ts -re issue -domin ates -tur meric -ro am -ri on -bag ged -nas sau -fu t -x ox -national trust -jo ye -san o -hearth stone -dis respect -le es -h se -siber ian -offe e -re stock -wolf gang -re gan -plan o -un wind -re par -mil le -] , -skul l -fat ally -concep tual -ðŁĮ ² -f é -ber to -b ms -u a -mag na -notre dame -le te -la undering -heartw arming -buffe tt -go at -pe abo -wind mill -v ac -continu ally -az alea -mem brane -can cels -make yourown -athe red -p to -tor pe -ðŁĺ ł -ðŁĴ § -sc ares -le aking -z et -pix els -ac i -kh il -marath i -ðŁĻı ðŁı½ -u la -tam u -chandi garh -z agre -aa b -pronoun ced -aubre y -sand er -pun ta -har low -ic elan -celebr atory -so t -unci ation -stru ly -mc dowell -deepi ka -remin ders -my stical -ct c -chat ted -s ica -bar gains -ch hat -ru bin -m net -oiland gas -pel ican -o at -mor ality -k our -i h -nu clear -gc u -ric her -vene zia -m ma -le ith -ac company -rich mond -sports net -ba ahu -smu ggling -mm i -ðŁĩ®ðŁĩ ª -twi sts -sahi b -.... . -amb itions -il lo -histor ical -fo rec -show biz -pon ies -chas ers -remo del -will ing -prince sses -am ple -cushi ons -ac les -lot r -da ch -an the -in corporate -new bury -ki ri -fried rich -ab v -ball ers -alber t -ðŁij Ń -let i -nan op -ci de -anal o -n sf -)) )) -griffi ths -valen ci -ro ano -fun run -babys itting -ca day -ent re -u ck -slu g -tic al -the sims -ro ar -car ney -g am -sto we -fi d -bun ny -sham rock -pe cu -mol ina -go cougs -con tributes -transform ation -mo y -v aj -sever y -antioxid ants -thir teen -sight seeing -l j -reversi ble -odd ly -hoo kah -nou vel -hal al -fe i -stab les -mul t -ho pped -bra ids -inter change -ghana ian -ww ww -eth no -con junction -ago v -ye ti -earth and -ts p -con serve -heir loom -metaph or -woo f -tor io -self less -n wa -em ilia -yl ene -y xe -gi ar -moder ating -pro bz -b fi -ne er -du mmy -hanuk kah -we bber -k v -eye brow -dag ger -su mp -ra ges -ork ney -tb o -hal sey -assign ments -tr onic -scri b -co on -an war -# âĢİ -jal ape -flori da -qu aid -haw keyes -âĻ¡ âĻ¡ -street car -ro g -dat lantic -gran ola -un changed -expect ation -Ù ĩ -mar lin -gu mmy -ðŁĻı ðŁı¾ -awareness month -oil painting -mu th -per ch -jun to -villa gers -mor g -che ated -web comic -the future -d ps -la kings -men tioning -vo or -ident ities -accor d -mc gu -l pga -rum our -massi vely -m pls -heal y -d ate -sp oli -re visited -on t -al and -scru tiny -lakel and -bl ending -< / -an kara -jami edor -metab olic -f ences -ann y -å ħ -semic on -oo tt -space ship -wack y -le ta -ap ac -she e -in herit -do res -ðŁĩ¨ðŁĩ ¦ -gent e -tw ick -ri ms -gal ve -de ville -king fisher -scorpi o -ow l -al ar -vari an -ðŁĹ ĵ -vene tian -star dust -then orth -q ing -har rington -consul ate -spectac le -ho bbs -tur ks -gre er -mat ing -ðŁİ Ģ -ðŁĮ Ģ -direc ts -í ĭ -pompe o -vo iced -la os -tz u -pro me -pri sm -mer c -fortun ately -bc fc -mcdon nell -not sorry -smi led -t ba -for war -mid term -dar by -we instein -up grading -wol ff -bron co -cab ello -ðŁ¥ ĩ -fi able -shar pe -bat tered -sat o -myth ical -instap ic -pre pped -eni um -e spo -di aper -explan ations -who pping -ragn ar -pe el -antibio tic -l acks -harri son -li sm -au l -qu ail -martin a -sent encing -sc ams -di di -tr onics -ãħł ãħł -go ff -za in -param ore -cha ined -clin ton -li ff -cott ages -em on -reve rend -consu mer -ce an -t any -lum pur -e bay -sto ol -ðŁĺ» ðŁĺ» -ta pro -h ath -modern art -just ine -prover b -app y -tra x -mani fest -am bu -nai k -pe pp -r sd -mer chants -kitch ener -shi fted -li zz -âĺħâĺħ âĺħâĺħ -âĢĶâĢĶâĢĶâĢĶ âĢĶâĢĶâĢĶâĢĶ -uto pia -tom o -ou ted -com ers -chiroprac tic -book club -cin dy -pro hibition -se uss -ë¯ ¼ -thin kin -rr rr -go fund -t ack -om b -catastro phic -ling u -guild ford -bo td -ॠĭ -plan ter -^ ^ -win k -kath mandu -sto ppers -smooth ies -re efs -hin d -bell amy -Ħ ë -waste water -vo or -nat l -! ] -re el -y ap -scoo by -work space -corin thians -bl un -obli gation -g bbo -dy son -cra vings -ell ington -dap l -wre xham -earthand clouds -uk runchat -positi oned -kal b -four square -jo ck -im pending -even ing -ath y -pro claimed -c ites -ann apolis -san i -mar th -ir l -accom mo -ka a -fin a -y aa -di sper -ec ar -bha k -will y -ðŁĺĢ ðŁĺĢ -mcder mott -mo j -gener ational -u said -train ing -lon ely -lo res -impe cc -âĢ IJ -beav ers -ma ki -he b -aap l -å ı -wolver hampton -leader board -me u -c fa -easter n -hu r -civil war -ou rage -hor ned -le high -awar ds -evi dent -gi gab -r ous -ma del -ro byn -ur gently -k ors -en as -heis man -bam bam -fab ian -f om -evalu ating -assemb ly -out sourcing -hun tsville -ðŁĶ ª -justi fied -cashi er -sp aper -buc keye -analy tical -illumin ati -au tho -o j -sha de -geel ong -wh ey -he aton -terri bly -ele k -un charted -sd live -moto cross -her mes -dar shan -dar lington -cash mere -gri pping -cilan tro -pun ish -... : -ðŁĴ Ħ -inst ance -der i -lo bal -muk her -sp ar -thin ker -fre mont -com piled -color ado -vig ne -sm d -whe ad -villa ge -le ek -formula e -ta res -persist ence -?? ???? -ped ago -he z -alzheim ers -vul ture -off ence -is great -suff ra -kick in -h mmmm -broad way -ï¸ı @ -art i -alli son -endor ses -ry u -lolli pop -soy bean -kend all -cer a -inv ade -( ðŁĵ·: -conver ter -car pets -ho bo -fr it -pe ac -es qu -ern an -ou f -an il -di ffer -ch ing -bre cht -sp g -daven port -stra va -sever n -n gos -stor ians -fe te -parame dic -j hb -al amo -sne aking -gold coast -roof s -isi l -depic ted -projec tions -nu mb -o ss -ep i -glu cose -zid ane -infin iti -íĺ Ħ -ran som -ton ics -fal k -g ler -ou tw -re ss -week ly -the on -n ole -ðŁĩªðŁĩ º -vol ley -sum mar -neg ativity -sam son -ye w -aus votes -ju l -ju dy -f art -pra yed -pal ate -multicul tural -double header -cycl ones -pier re -ãģ ¨ -âĺ łï¸ı -rt w -conver ting -wir ral -l ari -ir relevant -austin mahone -an che -ya an -sd f -$ . -explo ding -ulti mate -prof ici -gofund me -cell ence -ep stein -bul lied -sep tic -à® ¤ -lu mber -cu ff -vsco cam -pl or -ภ¥ -se ok -ro to -venezu elan -sor ta -spir ited -daniel padilla -team sisd -radio active -icelan dic -ðŁĴ ¤ -ver e -accommo date -shi pp -ot ter -ol ina -e go -su la -san antonio -de as -simil arities -âļ ¾ -y om -bro ward -å ° -can cun -veri fy -on te -candle light -ìł ķ -inf ants -az am -ðŁĺ ° -le ven -un stable -bloom ington -x ford -con tour -y p -innov ator -histor ies -po y -lolo lol -ex pires -cat alo -bill boards -an ab -el ic -novasco tia -fa ire -ìĿ ´ -rock well -gr ille -az tec -joh or -ur struly -fi ren -dun lop -id le -port man -jo es -tx hsfb -hol m -cham ele -under world -lo ss -ti em -therap ists -past ure -pa ste -ing now -vul can -ra gon -lar kin -o shi -ho co -child hood -umb rel -success or -kath y -iz en -° ï¸ı -share holders -ol ga -ai b -he ap -fl aming -ro u -air tel -rat t -z ane -vo w -thor ough -sn ag -par th -un conscious -ve y -new release -gh ee -croati an -facilit ating -swan son -astor ia -to logy -master y -ðŁ¤ ij -bil bao -trou pe -the ori -chey enne -ro tt -shore line -gra sso -master chef -+ ) -vi x -ellen show -as g -an ak -ku ya -safar ilive -debu ting -blu m -list ener -v ins -book shelf -smart cities -makeyourown lane -; ; -ðŁIJ ¯ -ri zz -on ward -bull dog -bear ish -vir uses -fri gh -lin den -we iser -sn t -gon a -dre sden -fl anders -cu k -wheel ing -ba u -atu esday -surf ers -swi ft -mc call -arbitr ation -aw d -mon c -b ine -at x -re fr -mi ro -po sey -n are -rit ter -âģ ¦ -play book -blow out -sports manship -s oooooo -malay alam -gri ms -bur bank -infin ity -sar gent -oit nb -joseph ine -ski pping -par kin -excur sion -semin ars -jo har -par tridge -post game -ll ll -blan che -temp ting -m na -lu ka -is ers -to ffee -bar ron -he mmings -sa e -go hawks -cu pid -li mbs -con se -un common -z ada -head shot -so ils -pione er -mam ma -sem itic -pan dey -jamiedor nan -spl its -vel a -son i -ra ff -t mobile -âŀ ĸ -pra wns -lit er -enjo yment -egg plant -tu b -cultur al -us ic -suspici on -sy cam -summ ed -ma du -ho ck -up wards -eye ing -ri ve -assas sins -âĤ ¬ -out fy -chi ves -t ner -la is -por ridge -sad dest -w cc -vick i -sna ils -biz italk -mill an -ðŁĮ į -sam oa -j ing -mi key -gu j -chel ms -eli gibility -arma da -thro p -surger ies -ãĤ ¿ -mo hawk -ex its -me m -is lington -c me -land fill -kait lyn -ðŁİ ¼ -combin ations -tomorrow land -ver b -cor a -pre cisely -na om -ðŁĨ ķ -shr ink -sof tly -merce de -mand el -poo dle -ball erina -sop h -jux ta -y at -ary an -hesit ate -lo wered -gu lar -dungeon sand -ron an -my ri -sp f -men opau -gra sp -pa thi -fe asi -fla w -shi story -ste ward -gg le -fay re -cli que -credi bility -yo g -sec tion -mu sko -se ville -no tt -cal m -mate o -indic ted -fi ba -by l -lin o -u kin -!! # -enig ma -siri us -bu sc -ðŁį Ĭ -mac kerel -psal ms -a at -tomorrow spaper -ðŁĺ ĸ -p fc -........ ... -shre k -mul let -o sh -danger ously -immen sely -am ur -ðŁį Ĥ -pro por -sy a -london marathon -abo ve -obli gatory -pro v -ra cha -alex is -pri mary -sh h -ether net -d stv -cou gar -un lucky -ni l -steak house -mel a -fc bayern -cause way -ca therine -fluore scent -nx t -to kyo -au sp -releg ation -qui zz -shored itch -proud tobe -promo s -inter acting -home brew -da esh -w pg -stead ily -provin ces -bal lots -i ah -al to -< << -you u -ri ley -prefe rence -tra verse -incen se -am munition -ho dges -# @ -hail state -tart an -witch craft -vent ilation -liber tarian -! â̦ -ow es -% ! -ong chang -bru shing -le ic -fi ber -under attack -down load -ex pir -hy o -pompe y -mc bride -y ag -stre e -com bat -ten ding -ai ra -gug gen -ab ra -in na -fli ps -aw al -m ach -dol lar -inspir ations -z um -o du -it ty -video game -aqu aman -har u -bel fast -je b -but ch -us gs -calcu lus -go yal -mor gen -x finity -stand up -contrac ep -sab re -na be -in secure -gener ously -epit ome -l w -t ca -narr atives -don nell -pand as -ber gh -tu t -ker al -fel icity -br ampton -quinte t -nom ore -ðŁĶ ij -lo i -alham dulil -ðŁĶ¥ ðŁĶĹ -ston er -shaw l -clin ical -bren dan -gon e -fla wed -tri ppy -j g -al location -po aching -ve vo -mo cks -lef tist -bon uses -condem ned -abil ity -st ating -microbi ome -bio logist -for you -wahl berg -ss or -ift ar -w ul -ÑĦ оÑĤ -pom er -me me -ver te -tre ll -tra it -in let -hormon es -deliber ately -vill ar -battle ship -p bl -tw enti -ho kies -dal ail -say a -may fair -han s -die ts -⾨ ⾨ -od in -hot spur -pap i -k ana -k amp -fin na -flo tus -ti ans -unic orns -tribe ca -chang ers -fore ground -out a -inv aders -gett ys -tomorrowspaper stoday -mac millan -hand written -w fp -u de -state of -base d -âĺģ ï¸ı -cas m -psy ched -histor ians -fol d -d da -ag grav -p ans -green way -au sv -ðŁĺ ¶ -shradd ha -inde x -be sti -zim mer -t ness -eye shadow -ot te -go ts -distribu ting -pro min -yo l -ace a -tram rahim -hoo per -supre me -jam min -intu itive -quali fications -sli m -sid di -jay ne -tri pping -g tx -pun s -e manuel -om g -mid summer -in to -succul ent -ri en -new mexico -o or -hoo king -in f -ðŁ¤ Ŀ -flir ting -na hi -g friend -t ps -hel ix -z s -on ie -ct f -kri s -irresi stible -fla p -ðŁijıðŁı» ðŁijıðŁı» -us wnt -ru d -ram ps -pin oy -ot w -lol z -low ering -favor ite -t mc -phra ses -her mi -aver aging -em br -ben o -estu ary -sle eve -ribb ons -ta sh -ภ¹ -x f -aw gs -sun ited -brew eries -anir ud -pun ches -ol die -ip ads -wi fey -land lords -d ji -gun ner -íķ ´ -tex an -ex op -cas sandra -s off -ðŁļ « -igh ton -bak ers -awareness week -v all -ear p -bts bbmas -apologi zes -âļĵ ï¸ı -was ps -states man -snat ch -watch dog -ra fi -after party -spi ke -j er -peri ph -r nc -mu ll -le en -shi es -li eu -urstruly mahesh -mer ton -de sai -shi f -ðŁĮ ± -pe dic -gos ling -arrang ing -ww g -gen y -you uu -netfli x -e ttes -k wi -bernar dino -am iga -Ø ¨ -kashmir i -t ings -emer itus -de cat -ab domin -dc i -pha ses -d jan -be am -op ry -i shed -the ellenshow -the st -habit ats -to ons -mclau ghlin -ri pper -micro biology -tal aga -clu eless -ss u -cro che -bro mance -longe vity -zagre b -prev ented -tra ve -spo ilt -darry l -migra ine -al cat -dd dd -vi v -ser pent -mat tel -jam a -con quest -î Ħ -sam sung -presbyter ian -ket ch -fire fox -mo tif -le c -cho pping -cher no -j ann -ðŁIJ ° -pro lon -wake up -conver gence -mersey side -heart broken -lo oming -hal lucin -mai ze -commun ism -mo h -twitter storians -serge y -res eller -favor able -ed gy -re iter -mal aga -live me -ka hn -pul sion -big g -kim kardashian -ati o -tyr anny -ru ption -q ant -pro ven -by z -pu shaw -kri stin -e er -tar dis -ri z -awak en -mi ko -un documented -path finder -indirec t -resemb les -h ler -conce aled -scand al -re im -d nb -cr itters -attend ant -apprentice ships -aa u -scre amed -l su -fa h -har bour -ed d -bat sman -li ss -mi sha -spani el -it f -advan cement -fa c -close up -cecil ia -medi c -narcis si -lav ish -gi ac -ma ys -le it -wine wednesday -pushaw ard -let to -curren ts -bug atti -out ine -w j -un do -ler osis -devo tional -ðŁij « -on na -fais al -sa una -himach al -am ii -à® ® -di zzy -screen writing -ph x -sp n -ick i -ag irl -fi shes -wb z -pi m -bo ar -ac id -! .. -rocke feller -n ga -dra stically -simpli fy -dru mming -autum nal -gur mee -lor de -jo ann -give up -b our -am ura -der land -sim pler -wat son -tri dent -concor dia -bel lum -bre k -dum plings -vi on -dungeonsand dragons -sp ri -ascen sion -wil datlantic -u st -rob ins -legi on -insi st -jar o -gue ss -so b -bigh it -pool side -negoti ating -mc gill -bil d -techn icians -miti gation -ajay devgn -b to -ant en -cosmo politan -ðŁĺĬðŁĺĬ ðŁĺĬðŁĺĬ -patri oti -temp er -promen ade -nav ajo -nam m -wrink les -dc fc -le ach -bru nette -r f -cout inho -al ti -tradition ally -op tome -na z -accord ingly -rec ard -de ets -sw ell -po sure -whit ening -strang er -illi on -here ford -u wu -ro bber -cotsw olds -cl en -gor ge -nam aste -re lish -gri ff -adren aline -bla sio -val e -ê ² -toler ate -rail minindia -jen sen -ho ven -el lu -ob sole -eisen hower -unidenti fied -than niversary -body guard -Ø ¯ -i dge -sch al -stock port -sn i -re taining -po po -pix ie -oli thic -ki er -ha jj -sa z -cor bin -!!!! !!!!!! -v it -me gat -de h -circu it -af fleck -theore tical -hope less -u ab -slu mp -b ice -jam med -let stalk -can i -side ways -labyrin th -re fs -ha hn -jare d -ðŁį ¹ -jam bo -ph yl -enhan cement -c tr -ful lest -se ye -do ba -cho ic -yo s -cb j -andr é -re watch -pri ma -doctr ine -for gets -u hm -ar ound -u le -art lovers -shi raz -har th -ex tor -Å ¡ -unexpec tedly -eli us -y x -em my -se ac -ðŁijĩðŁijĩ ðŁijĩ -correc ted -com bu -wom anc -cou gh -what son -publi shes -divers ity -back bone -lock down -mesmeri zing -nor te -ma b -desig ner -í ģ -ra gh -mole cules -get outside -the beatles -semicon duc -nach o -lun es -ham mers -sul tan -o on -fe ren -att ach -ar qu -uttarak hand -s ash -; - -tre ad -i ko -ar thur -scandin avian -r ation -ga el -charge able -fish y -v ma -hand bags -char a -ay ne -de fam -sett lers -qad ri -pal ais -in wx -apocaly ptic -poo ja -a es -at ories -proof ing -n lp -ts la -v ina -li do -dee phouse -informat ics -v v -pp ings -di ss -à ¯ -uhur u -st ony -betra yed -b aff -my ra -as pen -allow ance -tam ara -ci f -cor bett -ser ge -di go -ambi gu -pain ters -p cr -p ca -nom s -lo ft -ve e -opend ata -ðŁIJ ± -alex andre -identi fies -fantasy football -re production -brom ley -ware agle -mm er -p ss -cu es -ay at -hut chinson -sar ac -jack man -ira h -ap ink -col s -aussi es -ex ecs -day ton -ðŁĻ Ĩ -im v -har am -chuck le -authent icity -ar do -incub ator -ภª -photo shopped -embrac ed -fight for -gor man -zz zz -schol astic -cri sps -te apo -mid night -ga ine -col lier -s ate -de tte -å Ń -imag ine -i ff -tw ili -i fication -teat ro -nor ma -es ur -emergen cies -rise up -r inger -hass le -cait lyn -tranqu il -vers a -se b -over look -gin i -bo go -se re -may ne -henri k -contamin ated -rhapso dy -pro portion -wildatlantic way -âģ© . -organis ers -tran e -stand ard -sper m -laun cher -ric ci -her ts -paper work -showcas ed -mer yl -pen a -p imp -disa strous -^. ^ -phar a -x is -fron tal -sw irl -sp ills -swag ger -smart watch -sizz ling -savi our -cat ar -bb cr -refurbi shment -dr is -citro en -absor b -patrioti sm -il leg -chro mo -fresh ers -ru s -lim iting -ef ish -down ed -man dir -hazel nut -p all -mac on -disappear ing -quali fies -bo on -bar racks -am ine -gen dere -ðŁļ ĺ -j es -ãĥ Ń -qu ito -middle weight -sch au -quad ru -aci ones -limit less -ðŁijĮ ðŁı½ -ch man -ar av -regulat ors -it up -batter sea -mil ford -g z -tic king -gh ou -cru shes -tu tu -dread ful -fam ine -for change -dalail ama -ðŁĴ į -whit aker -hash mi -h us -vo d -bet te -aa ah -iso o -ðŁ¥ Ī -ha ar -la ine -b v -all day -spr out -indie games -free bie -gree ks -but ler -ill in -ha al -ware ness -si ma -public health -gam a -wa a -oun g -goo oo -okin awa -off enders -im pose -ho c -young ster -story teller -sc ap -figh ter -+ , -whit es -music monday -re za -go ducks -bri a -mi um -cas per -cru mbs -a ad -marti alarts -ch p -ri gged -tn g -harve sted -sa k -do jo -mill wall -b nw -oc d -histor yof -t mr -si rens -fan ci -caregi vers -vir a -son i -recur ring -acknowle dged -ðŁı Ł -oph ile -bu cky -stre ssing -roo k -di gger -vi val -san do -fle et -si ers -sel caday -refre shed -anti fa -a que -po lo -disappear ance -de mb -âĮļ ï¸ı -ren ted -ber ger -g mb -cu la -ss al -goo dy -u hh -marcel o -w anna -soft ware -shop small -turt le -tom as -fri sco -ðŁĺį ðŁĴķ -jim enez -c su -day z -an do -wyn ne -choreo grapher -cerv ical -trail blazers -ed g -zend aya -travel blog -el s -whole some -co g -lab out -ar ney -del le -su isse -ma si -ine se -om be -fi ddle -re claim -pa u -wat cher -sla in -ber ty -opti mum -el ites -min is -tur key -patro ls -ger ard -au reli -wild ly -wal tz -br gy -w ob -cre st -+ ++ -ve z -fro sted -davi do -the x -param edics -p into -han k -du pont -ur g -fo stering -micro poetry -spec tre ----- > -ne uro -fri da -music al -galve ston -e ffic -sc ape -pal azzo -th all -pro visional -p js -au re -ðŁĶ ľ -mam amoo -kit ties -cre e -wa k -lo ool -lu pus -cn blue -à º -ðŁİ ¬ -rac ed -tro se -om as -stri de -co ors -⤠µï¸ı -in comparable -cy ril -broad er -arec lipse -ðŁį Ķ -inter val -ti ru -co working -w aco -a ham -a bee -flouri sh -the times -ol ini -kick boxing -lu cer -at la -as un -casser ole -mi aw -lobb ying -jan ice -cir que -re flex -le ary -sanat omy -tem pest -se mb -mur dering -us av -ro bo -on et -p cc -nati ves -life of -sa ha -ruth less -rel ates -appeti zer -pye ongchang -nor d -er u -a thing -ug ly -pl ying -bran ce -organ ise -kend ra -dat o -chees es -par ma -burn out -a stra -pre toria -adjust ment -uk u -sl o -li ken -fav ors -cli ve -be ets -snow donia -go tv -sy n -open house -pan i -portra yed -sl ated -me cca -ren al -supportsmall streamers -staf fs -da o -bi ker -vik tor -tit us -admi red -ðŁĵ ± -hurric an -he ats -gl ory -photo genic -mer i -de por -burn ham -or angu -dj ing -impre ssionism -ign ition -ca i -w ynn -de pe -cove ted -colla gen -sau s -or nam -administr ators -ss on -nh politics -hahahaha hahahaha -aspir ations -r gb -swol len -so we -sc r -diver gent -hou ghton -han oi -d ory -ni ki -land ry -b cci -ðŁijĮ ðŁijĮ -is mail -tri pod -her d -bhat t -dress age -tab by -ingu ish -hur on -à³ į -à ł -to das -evangel ical -chor ds -st john -slo ppy -marty r -face book -ali ght -sen sei -kath niel -r ites -zi one -u o -revel ations -weight lifting -pan o -nc wx -ac ton -à® ķ -Ø ² -som a -à¸ Ĺ -respec ting -mar che -fore man -be tty -ki k -shi bu -po on -argy le -k swx -et z -mar bella -brac kets -stand by -fire side -defi ance -v ex -britanni a -in habit -appo int -piyu sh -le ash -sci ento -fla sk -sen na -> : -at roc -sand erson -id lib -dhan ush -ðŁĺ Ļ -en thr -hit ch -de dly -al ley -dor k -mon do -cudd ly -mis sin -ye sss -night ing -j pn -w ary -ump ire -ma z -ê ³ -bab s -ĭ ãģ -stan ford -posse ssed -exce eded -ðŁĶ ¶ -wall art -tra p -j il -hi bis -sp ying -scri be -khali l -trans lator -lu mb -di zed -ch c -super vision -shut ter -ja g -_ * -yester days -ms f -hi hi -gonz aga -gille spie -vive k -ec static -this morning -ch us -ed es -ston ed -be es -ðŁĩ¹ ðŁĩ -tur in -ho ver -at rics -ster n -sam heughan -auti sm -mi ya -eye witness -writ ings -travel tips -chut ney -px rtg -keny ans -my stic -k rit -/ $ -red head -world ly -am us -op la -le ve -gab bana -se en -o clock -gang a -keen an -sc ent -ol dies -go green -corner stone -comp ly -con cours -ðŁİ¶ ðŁİ¶ -ha an -con fis -aw son -cle op -î Ģ -su zu -sau té -al gar -subscri ber -este emed -ãĤ¤ ãĥ -worth while -mel rose -flo ck -bri ghtly -viol inist -p ere -sli pping -and co -si gh -ha van -cu lo -m sa -fibro sis -matil da -ra fting -aw ard -ë ª -mm mm -ge aux -ste iner -sin n -help ers -beet les -ai mee -tai wan -pistachi o -mac beth -m zan -descend ants -on sale -in r -il m -grou se -sa ig -mo w -bi gre -adjust ments -tu la -mathe w -transl ates -mu h -bol lah -ðŁĴĽ ðŁĴĻ -amo res -ab outs -bomb shell -bla ster -x avi -s ns -k roger -ga ther -erad ic -daf t -chem o -ben ches -ðŁĩ© ðŁĩ -ut v -our a -n ko -gator ade -biaf ra -ok state -im danielpadilla -dom ains -open ingday -kid do -do i -ric e -day care -mac millan -ba thurst -cheer leading -ðŁ¦ ģ -cash back -k won -hob bies -exem pl -ries ling -âļ ª -ag les -ny s -every thing -nav is -ad di -magne sium -faceli ft -ark ham -grand es -extre mist -don at -vit ality -pump kin -be tta -sl td -arti san -li by -pe aked -ah hhhh -mary am -assi m -un sc -ment e -al aya -low ers -ar as -gri ev -le ip -gr ati -cri ses -spr ints -exe cute -w to -ms d -mag ical -re viewer -spark les -juke box -ðŁĺĤ âĿ¤ï¸ı -pay back -licen ses -dun kin -bel t -lake wood -h ateful -bud gets -rev amped -ph erson -ky iv -went worth -ro sen -cru ise -gi ggle -def star -assassin scre -ym outh -win kle -w fc -band wagon -b kk -w iring -kear ney -south side -pe tit -! ðŁĺį -nor dic -mir za -mu gabe -v l -scon es -k tv -sand al -du c -m alls -ðŁĴŀ ðŁĴŀ -it c -al ay -im pair -un rest -flo ss -c é -ab ou -var ying -muse o -ser ver -di ya -hibis cus -ero y -mer ritt -fin dom -f pp -un usually -go tt -conting ent -ali aa -ball on -jo l -hi ked -zy me -ay r -ag n -ga z -perio dic -spar ty -practi sing -lin ton -tal is -cy pri -womanin biz -radio disney -ðŁĮ ¼ -jump ers -endo cr -ðŁļ¨ ðŁļ¨ -and on -shar apo -mi er -ma sonic -fac tories -vi en -bb ers -ìĽ IJ -hol d -ke bab -be ak -approach ed -ac milan -mun ro -ko sher -excell ency -negoti ation -walt disneyworld -cr ouch -te asing -suppre ssion -en ya -b ce -transformation tuesday -cal lie -vis was -p gat -ic ted -end ings -esc u -recru ited -it fc -collabor ations -g ino -snu ck -ausch witz -i fc -x ii -ke sha -ger vais -clo ak -x l -sa ad -prob ation -pre cau -mac in -anasta si -le k -e azy -daysof code -mariah carey -yo g -stit ched -boy friends -sh ar -ph ile -ag u -twin kle -phi shing -week ender -ic ton -gurmee tramrahim -al ton -l eness -all an -pen ultimate -kry stal -go u -lan de -dis mant -ab using -nor se -pat erson -ed mun -ap an -xi umin -sk el -cat walk -re act -wal led -t angle -br yn -ve to -super moon -cas ablanc -appreci ates -ski d -bo th -catal ina -ele ague -cyber monday -cau tious -ðŁ¤ ĵ -nov o -hamp ton -ha ye -jose f -var an -lo bos -roano ke -orph ans -tt in -squ ads -ishqba aaz -black panther -e tu -k sh -cru mble -cess na -reli eved -scul ly -pollin ators -explore canada -ki es -kam loops -kir an -pri mal -sett lements -hot spot -brain storming -ce dric -bi ennial -sh ant -âĻ¡âĻ¡ âĻ¡ -do on -hear n -walk way -fe m -ve al -deport ation -tox ins -elimin ating -descen ding -by the -bla sphe -ha sta -comple ment -as cent -ri ga -provo st -âĸ ª -wee ping -anti semitism -employe e -unearth ed -pin o -natali e -bla d -ang ola -lock heed -in ian -ag r -ni ster -im pala -m ke -fan atic -âĺħ âĺħ -ðŁij ¸ -lu ch -simpli fied -gall ery -econom ic -cy borg -con i -sel ma -in ception -ko ala -dv ds -cre sted -m mor -visi ble -n sd -ðŁĻĮ ðŁı½ -w under -refriger ator -re opening -e era -carou sel -as p -balli stic -victor y -mo tive -tre y -sharapo va -si i -mon ter -int end -west chester -sp e -cy mb -vi dal -ll ama -uni v -fin er -crafts manship -jazz fest -b ch -ag gio -n cc -lamb da -tranqu ility -cis co -ba den -so bbing -of i -go ta -ru mored -war med -ore an -ac ton -mar ci -gh ani -âľ ĵ -as sorted -pembro ke -pen elope -da f -at ty -aim o -pretz el -carni val -than os -ko chi -mer sal -ham radio -ar twit -cas c -guer rilla -kush ner -k app -al ise -todd lers -steward ship -o tti -ter ri -tem pe -rest less -vit o -zay ed -rsp b -pi on -hi ppo -haw thorne -in as -am ily -nut cracker -lo p -d ali -tro pic -ðŁ¤ ł -ul o -jare dle -py rene -pale o -usa ir -m ould -it ated -gene tically -biom ass -ðŁĩ³ðŁĩ ± -do dd -practic ed -monarch s -un manned -m buhari -am al -photo gra -ko ol -bren don -ju ices -cu re -world bank -poin ters -ðŁĴ Ŀ -tur f -le ds -bor ussia -bapti sm -warwick shire -moun ts -gay o -be gg -co pied -asi ans -k g -moder nist -gi d -front man -concentr ated -y t -sc avenger -iron ically -adi c -ps n -ðŁ¥ ī -cultur ally -yu v -mac arthur -fertili zer -be withyou -ri gor -min ors -z oning -âĸ ł -ri r -adole scent -vin ny -ren g -sand stone -gu et -we sth -ple dged -lac ed -sp ide -v ai -ty coon -seiz ure -du p -appalach ian -ro k -cathol ics -sey chel -posse ss -la ger -jo di -cham p -stra s -d ina -cent uri -cal der -blur ay -ðŁĩ¨ðŁĩ ³ -mo do -an nette -youtu bers -chap s -ang ling -label ing -a qui -pk wy -ly le -bi sexual -lit ur -dug out -li bby -grey sanatomy -sub stances -august us -rall ying -fi del -ing ue -äº º -hallmark channel -tooth brush -m á -adi rond -ag gi -ðŁĵį : -cru sade -tax ation -k z -i ver -dou bling -room ie -wa b -en rolled -az on -a ju -grand children -as df -ðŁ¥ º -mat ic -ough ton -utili ze -ðŁĴ £ -pon der -rais in -dys function -co bain -butter nut -e man -su red -dri an -and friends -with the -on omy -heine ken -bri dal -leader ship -pyram ids -deutsch land -jo cel -bo wel -y qr -horse power -be acon -ing eni -gra dient -fer mented -mo om -thing y -pot assi -wrist band -bor d -bo died -ðŁĺŃ ðŁĺį -ma pp -ka u -cyber punk -ph ish -loo king -co ates -ap ur -am ie -uk labour -at in -g la -adop table -shel by -v illi -ri ya -m ingly -cli mber -bumble bee -ðŁĺ ¸ -c sd -âĿ ¥ -hospit alized -c ki -hat er -ch r -re tina -it a -fan base -beat rice -gwy ne -go ss -fo s -favor ited -swachhb harat -mal ade -mon mouth -" [ -si van -sh hh -command ing -sains burys -wee d -g man -ss w -rep tile -iv y -tro pics -roll ers -over cast -ex position -masquer ade -man crush -wa ist -spr inter -sle et -le vin -j pg -_ ( -o pel -explo it -ap a -po we -wrec king -jong in -or b -er ick -bo sco -pra ising -ber tr -to wing -in security -ku t -resto cked -rr p -prescri bed -trafal gar -per t -g ases -app rais -g har -music als -âĸ¬ âĸ¬ -mc fad -ag ony -conditi on -equi p -shi k -atra vel -ðŁĩ¿ ðŁĩ¦ -ke h -abduc tion -pe oria -wil kins -g ms -as d -ev i -ðŁĴĹ ðŁĴĹðŁĴĹ -u z -mo c -halle lujah -guad alu -lou vre -dra wing -go ve -ph ant -fri e -web dev -program mer -z able -games com -clari fy -li th -kin ky -âĿ £ -labour doorstep -son ata -ju ris -mai den -vi adu -buch arest -conditi oned -capit alist -u de -ps b -sp ca -lul la -footh ills -kay o -bon d -wom b -roun der -ce sar -bur sts -ap ra -sw oon -sab rin -fra grant -cle arer -ku brick -cli max -jour no -ag le -ðŁı½ âĢįâĻĢï¸ı -poo ch -hal e -sol it -sal mon -organis ms -bron son -art en -hodg son -alo ve -vent ure -bb i -ae a -ðŁIJ ¢ -ld n -d nr -o zone -el las -man ny -azz ur -un beat -tru ffles -th ong -ma ñ -las ers -ley e -gettys burg -back packs -or is -ma ison -craw ling -la bra -cl ing -dra gging -ste al -dou bt -de van -ck ers -agent sof -photo bomb -elon musk -abo y -dist ances -story line -sp i -nor than -europe ans -wh ale -ser pent -ðŁļ ² -fi or -tr it -ox o -awar ding -class mate -su fc -smar test -rich es -pr k -big foot -ar mb -bi polar -dw elling -om ars -k wan -gri me -m eng -freder ick -navar ro -sorry notsorry -jaredle to -pa ve -sl ack -barn sley -att ar -evic tion -accumul ation -o ir -cat chy -wel ter -vik as -has see -nik ita -mo yes -mathe ws -shi v -gat wick -pro filing -compan ions -mar rake -an tics -ðŁĻĮðŁĻĮ ðŁĻĮ -se se -bo i -bart lett -poison ous -ab uses -ym m -kam pala -guggen heim -imv kohli -dol om -bre e -thro ttle -gare th -fitz patrick -un ya -par ad -mar got -j nr -we a -potassi um -p nc -disgu ised -cra sh -ren ergy -ill ic -coup led -ni els -ci ones -æĹ ¥ -im ent -despic able -d ye -what cha -conne ctions -paralym pics -gaunt let -wait rose -suici dal -star ship -vap or -st ou -law maker -coo led -si mo -then o -offro ad -ja den -bas que -vick y -lu kaku -centr o -tri sh -strate gist -medic ations -hor st -b fc -gra il -sharp ly -ad itya -tom b -kau fman -tri pad -sam ba -pastor al -brit ney -sag an -hill side -mas ons -sar a -z one -x u -to tes -rob bie -app en -mon tag -der o -short film -charis matic -tat ors -ki ba -and ri -al arming -split ting -ic ar -th ug -scari est -sylve ster -an an -u trecht -a difference -me ade -bu ster -air strikes -cu ffs -account ants -ðŁĺ¡ ðŁĺ¡ -new t -bo tt -issu ing -cl ancy -wwen etwork -kyu hyun -rese mble -pajam as -sin k -kin ney -sul ph -or k -li es -la gh -or ton -ra hul -d sc -we will -re am -collo qui -shar ia -hec tic -sar casm -land er -tm z -endor f -ro z -ham mered -fri s -w adi -pope francis -he it -flash light -un born -op es -hol iness -ðŁIJ ¦ -nach t -im sa -gr acing -bj p -ver ts -c sc -home owner -a que -bigo try -anni e -bag h -âĿ¤ï¸ı ðŁĺį -car i -thom p -dispo sable -cardio logy -pat ented -hh hhhh -ld r -stephen son -cro res -fan ning -cli mat -ðŁijį ðŁijįðŁijį -ðŁijį ðŁı¼ -aer on -piccad illy -bank rupt -sil via -emplo y -don ny -commen ting -screen writer -io ta -ce an -anc ers -tu an -street wear -ठ¯ -sk ine -esp a -asi f -os ce -she ppard -more cam -bott le -der s -orac le -google play -aver aged -edmon ton -steph an -sister hood -cru sted -stag gering -methodo logy -congress woman -c abo -tri ggers -mil ky -gli de -tooth paste -room mates -nu ff -gu am -sprink les -alternati ve -wat fordfc -uof t -hal ey -cont acted -bun dy -pro stitu -gh ar -pre ston -on site -hil ar -g ts -c att -hamp stead -? ?! -ðŁĩ§ ðŁĩ -bbc qt -aless andro -resi st -ma idan -t ko -shad ing -pin up -gal lo -sin u -at ec -fun k -ac lu -stri des -rhy me -wet land -bbc springwatch -t ins -wild card -st our -flamen co -pau la -onto logy -gang sta -am ade -ãĤ « -t bs -skelet al -run ner -jard in -harri er -hun ted -z hen -believein film -de mean -au diti -re start -chon dri -âĿ¤ï¸ı ðŁĴĻ -mcla ren -ga b -sh um -au sa -lewi sham -y pg -k jv -fur nished -dor o -bon ded -mor ty -lat itude -_ ) -lo va -water ways -vin ai -shor th -drun k -c ay -ay ana -kap lan -capp uccino -spr o -life boat -has bro -spol ice -tor on -do ing -dam n -sh ree -foun tains -ent ation -mar u -boar der -to pless -j ada -chan ning -ul ls -en closure -gib son -fractu red -brit ton -à ¶ -t ous -por th -dra f -tra iling -mar gate -eli fe -down ward -lin n -gla des -girl power -ak rish -u ki -ron da -ts c -appreci ationday -vis ing -lo om -ðŁį ³ -mex ican -ar gos -y ya -jad ine -south port -d end -si sta -rede em -men g -bra xton -antioxid ant -s key -mp g -fin ding -vibr ation -ce u -kh art -di mini -cl ine -shel ly -hin es -ī ï¸ı -to pical -no ver -ma xx -prim itive -illustr ate -b ounds -tren ton -join tly -breed ers -u chi -wakeup america -b ada -ðŁĹ £ï¸ı -gu acam -sp heres -pere gr -youth ful -lo lo -bir min -t ly -jeremy corbyn -defe cts -co sm -a rent -v aa -bag els -medi ac -cori ander -ic ago -g haz -ab bas -re model -struc turing -pu m -out law -ad ani -r bc -gul ls -n li -confu se -ðŁijĩ ðŁı¼ -vil a -mcnam ara -correc tions -mug hal -ser i -re gain -ss b -lea ve -haha hah -gran de -di stressed -re chargeable -ho a -hou sed -sti l -attribu ted -opath ic -di ps -pri t -head phone -conclu de -pil o -he t -ut sa -nit in -je m -sni ppet -tutor ing -op er -sun k -en sla -cha u -ac orn -quinte ss -ran kin -affili ated -our lives -cl int -se ater -isa ac -ba shing -sme ar -nur se -doo dling -" ; -sa ku -atroc ities -im am -g fs -viol ating -comm end -brad shaw -er ville -b illed -b be -thul hu -i phones -moo se -di os -re w -me thane -strang ely -whis ky -ti ghtly -spiel berg -radi us -notic ing -wi f -ig nati -i fa -ap is -w ali -ha itian -bu shes -y z -v l -ex ited -asse l -tru ec -dom en -ash er -in king -newyear seve -hend ricks -bat i -ìĿ´ ì -rich ter -mon santo -con line -agre at -ðŁ¤ ¯ -master pieces -ar n -rough s -cle ve -se v -fashi ons -to ya -sh ail -cop eland -aqu ari -dec als -are you -y aya -a str -fon t -ml m -ar ca -pp or -pol lock -xper ia -conserv ation -chain saw -ag gie -?! ?!? -si le -sh on -ìĹ IJ -note books -marque tte -de us -bb led -spic er -mc cabe -nor wich -modi fication -boo sted -stru m -sales man -bang le -nis san -hez bollah -brea sts -a af -anth us -sk er -ow ed -her os -gi fs -fo sters -eat ers -du es -_ / -lymph oma -sf am -me gal -afri di -ag ic -p amp -jeal ousy -ðŁijĮ ðŁı¼ -calcul ate -napp ing -g ale -ðŁ¦ Ħ -lub bock -assu med -ren ting -íĥ ľ -subur b -ãĤ · -tech nic -u cla -in front -gar net -ster oids -stri ving -ho war -mo ver -le ton -bull do -is in -ci ao -sn z -fore front -d ams -mid wife -ma wards -cla pton -we in -subsi dies -spr oud -rother ham -phan tom -ar ach -spi el -rac ket -sel amat -no on -l bc -enti ally -ðŁĴ ¸ -sil ve -m oud -kine tic -y asi -ðŁİ © -o ol -mi ku -i za -fer a -flo ren -barber shop -groo t -z est -ne ars -stan is -z and -police man -juris dic -form ations -appar atus -sp d -arti fact -to sc -motiv ating -womanc rush -re dro -diagno stics -ra za -out fitters -el xn -dod gy -ry n -sh d -ortho don -ol de -jay anti -bal ances -quic kest -can ton -friday reads -! * -na a -a ak -ðŁĶ · -behavi ors -rasp berries -ä » -polit ical -cam il -å ľ -di k -ast ounding -lie be -novel ty -tur moil -sul ly -spring break -hon ouring -cc g -ðŁı Ĵ -my little -ky c -pro ms -ðŁķ Ĭ -à ¨ -bi ge -av ril -ðŁĩµðŁĩ ° -mari on -as ants -sur ya -oc tag -luf than -ac ron -fayette ville -ti que -love s -en ca -de kalb -ta ver -de vote -aux iliary -joh annes -tread mill -ay an -qu r -donald son -cher yl -" .... -s ven -kir sty -gun ners -ra dish -o ahu -v sky -i ble -con course -b ps -elo qu -ash ford -te bow -roblo x -ma da -dri ving -th day -spro ject -m ms -band ed -. !! -libr arians -flan nel -intoler ance -her al -ç µ -neme sis -list a -tar ak -cry pt -star plus -vish nu -sc ale -cr is -% ), -j illian -regg ae -pegas us -ol in -ip ment -man ic -l fc -godd ard -ite am -parl our -anch ors -lee minho -talla hassee -ant it -d ho -kid ney -y ash -batt led -az ad -gar is -faul kner -sni ff -papar azzi -ed m -phy llis -con tested -aa ay -se ca -k ton -vel ve -rain ier -for um -tam pab -ho sp -trac tors -ox fordshire -no tion -guang zhou -ðŁĺ ¯ -ref ill -wednesday motivation -sli der -mukher jee -pr att -fon taine -alph on -af ar -ts i -pest icides -fi ends -mo cking -bra w -tran sat -do ses -co res -hom ophobia -docu menting -zlat an -con doms -s é -sun set -kun st -ton ga -ภª -v ation -sp ray -chow der -ra ps -palla dium -nor wood -music history -hoo ker -si si -osp rey -ph ys -conce ded -bob cat -ar mad -ze it -Ù Ħ -ðŁĺģ ðŁĺģ -mer idi -ðŁĩ· ðŁĩº -corn wall -! ), -touch downs -ze it -chal et -mm m -al che -gor illa -fo ss -ati ku -lumin ous -ivan ka -be ek -sta res -sw iss -âĿ¤âĿ¤ âĿ¤âĿ¤ -scru bs -me ath -gusta v -jo gging -confe tti -as os -ers fc -breit bart -applic able -autho red -ya ho -h in -displac ement -j v -ðŁĮ¹ ðŁĮ¹ -ot c -non profits -diec ast -gu sto -inte stin -c ages -me en -lu kas -moon ey -ðŁĺ · -very day -tor ah -is sion -wa c -lever aging -ish able -cu se -le wood -may an -turn table -ju ice -tru sty -tu p -eti quette -supervis ors -stu n -gu zman -confe ren -ric o -fe ast -back ward -pol aris -mic he -jo g -h ing -field house -vel ing -sho cker -esc ence -ठ¾ -vi be -anasta sia -mar ched -kill ing -Ķ ë -fe tt -exop lan -... ( -snow day -lo h -ir ani -la khs -del a -po caly -boom ers -dictat orship -ac er -tur keys -quarter final -muskete ers -ðŁĴĽ ðŁĴļ -sf x -museum week -sc ala -ri sis -( ðŁĵ· -ãĢ Ĥ -z ies -bo eh -hu es -lu sci -dol a -impeach trump -roo d -don caster -tor re -hero es -fo yer -tar i -blur red -ke w -frank ly -dro id -ap al -Ð ¼ -y af -bre t -par agu -cac ao -ðŁĻĮ ðŁı¾ -ru e -head aches -shaw ty -char ley -pal er -go wns -correc tional -ðŁĺ© ðŁĺ© -breaking bad -ol ing -da p -endeav our -cit adel -tra d -incumb ent -medit ate -foo ted -ðŁĴ µ -shab bat -dayof the -wil lem -gal way -to red -marri age -f illion -sleeve less -aud itor -jin young -invin cible -kad una -a and -volcan oes -mon eti -indie gogo -buccane ers -ðŁijī ðŁı½ -ãĢ Ĥ -lay ton -cuck oo -hu mber -buzz er -Ï ī -to re -stra ins -sto m -pa ine -s we -du ff -z ou -si mi -li pp -ur n -se agu -ðŁĶ ® -sun dae -hi c -ðŁĺ ¨ -bull pen -u per -flyo ver -al dridge -glo bes -ali es -ken zie -ge es -y cle -sp lin -mag enta -j ha -bal u -gh orn -ti pper -wick er -taste of -con clave -ch ale -inv asi -cat er -dio xide -me gab -win n -at p -transform ative -nest led -hi g -bri dging -lil ies -chee red -bad dest -sc rolls -real is -dipl o -ðŁĶ « -conce ssion -prefe rences -explo des -er gon -introduc tory -ine au -ch af -som es -land rover -spir ation -sex y -sco recard -illustr ates -soul mate -wi en -inter disciplinary -fore casting -ent ities -glu ed -en lar -cur t -percep tions -boot leg -mi re -asho k -v az -hor ne -cal le -ac ulture -ther oy -night time -oc al -character design -ar mist -ðŁĺı ðŁĺı -yah oo -ac eae -to se -even to -sou t -nay anth -wh om -v are -ri gging -gen us -hi ve -com mands -sti e -day a -ethan ol -en f -hi fi -flu ence -cle mson -re invent -thermom eter -humor ous -emer ging -aci ón -ðŁĺĺ ðŁĺį -s ity -haw ke -accompan ying -t ility -ðŁĺ ª -re cess -protag onist -l ery -dun dal -int l -britt any -q bs -off the -marri ages -how to -viol ated -adel aide -wit t -lanc er -pak v -hu me -st ade -bra gging -ou tright -ad c -super st -real time -cu res -garden ers -ero ck -dale jr -ver o -bar tol -mo ti -mc fly -v pn -st ink -over rated -guer ra -e tis -ath ome -twd family -th ab -tn x -rafa el -family travel -x ley -sat anic -equ ations -ru dy -wal dorf -stan i -tu be -meas les -zimmer man -obli gations -i ously -bow ser -trans former -sho ppe -shak en -gh ouse -to d -ke tball -share holder -mar ca -kp mg -ak an -given chy -coast al -au th -roller coaster -mar ches -coordin ate -cine ma -apprentic es -par lor -mit o -men on -consider able -bar re -glo ss -enh ances -jaz eera -fal mouth -thra sh -stat en -k zn -eng el -samanth ap -flo ppy -sal om -ðŁıĨ ðŁıĨ -w ack -deliber ate -osc ill -herit ag -du sted -orni thology -pad dle -fer ns -bar un -cl ans -anticip ate -a ay -mat ically -é ĩ -tu mble -post man -unic ef -tro tter -op d -leaf let -ge ist -cease fire -scre ws -cre ation -wal nuts -longh orns -under statement -ab b -proxim ity -na x -un ity -turn pike -orda ined -dub step -chak ra -me ch -love her -look alike -donne in -vir on -Ù Ī -bang ers -vari ants -out dated -in ta -cri sto -sp elt -food and -f on -stefan i -margin al -hu tton -ti ara -tel ford -qu en -fair grounds -que tta -mikha il -heal er -v ball -ty re -under grad -gl end -hom ers -scri bed -main tains -po che -mis sal -mar ko -u as -á n -sh p -con vey -pad re -sab a -pu glia -madhu ri -pa xton -chap lain -n ago -ca si -... !!! -fli rt -sal eh -k are -di re -stam ped -extre me -ðŁĺĥ ðŁĺĥ -ho ppy -guadalu pe -advant aged -eu char -p low -un n -mac qu -port land -cla sh -pe s -lou bout -y p -keep ing -arca dia -fran kie -fi u -de th -encyclo pedia -si ze -inve sts -ðŁį © -geo logical -fran ç -con front -ðŁĺ ¥ -d ys -af m -tex an -graph ene -repost app -ac f -ur sula -gaz a -dd led -fu m -wsb tv -m be -fron tiers -chrono graph -ke s -inter faith -tab oo -spar ta -won do -flori st -em braces -ca w -no el -arch ers -ðŁIJ · -roman o -ban an -sh akers -melo dies -geo thermal -se phora -ìļ ° -оР´ -pro c -hand shake -pan de -popul ated -slow down -hor tons -registr ations -un deni -lan ts -pas sover -thak ur -li ef -adhe sive -pe tal -micro scopy -memph is -confir ming -air drop -mesm er -perce ived -ming le -lifel ine -gh j -worcester shire -pas sions -ach er -el lar -ah o -firen ze -bar ang -letter man -hat field -lu cha -je ter -e shop -william s -horo scope -pre de -east bourne -dur ga -di version -al trin -seis mic -premi osm -nar co -ti r -ori g -or m -land fall -ci ous -lin do -max ine -x ico -tra y -os wald -c ba -ric otta -n cr -mar au -ภ² -gladi ator -ch ery -lun g -u me -po psic -lon ging -can als -ta ya -decentr alized -sho pp -pres sures -mahar aj -eti had -wal greens -succe ssion -sign aling -li g -staf fer -north korea -def ying -as ma -de g -peri meter -oak ville -m sk -balti more -rece ip -de ple -ðŁĺŃ ðŁĺĤ -jambo ree -> .< -rsp b -puni sher -consider ably -in tothe -pari sian -acceler ated -polye ster -low es -fr ying -sauté ed -mou ths -seychel les -ra x -go dis -dak ota -house wives -the me -mat inee -black bird -ye sung -pre fers -pelle gr -in ated -trun ks -stronger together -re pet -re pairing -ped als -toler ant -her r -dun ne -indic ation -decat ur -b tv -exhibit ors -ik on -friday motivation -bra gg -live tweet -al ves -womens art -foreig ners -wal lets -min dy -lan ey -bb in -tv miaw -lif ter -tar get -tam e -dr ou -astro photography -mp c -g pu -nord strom -fric tion -run off -lov able -sp nfamily -ext ingui -bloo dy -sch el -arti stry -sw ish -scar ce -ph ils -max im -pos sum -com promised -sty li -sc fc -is sa -birmin gham -sket ched -angel ica -ordin ance -je ts -conqu er -ðŁĺ IJ -online shopping -s ori -reason ably -nue stro -ar turo -ch l -benef ici -spho to -wel t -ni kk -ðŁ¤ ŀ -dan ao -for mid -as se -af irst -âľ Ĥ -gil lette -as sor -an onym -sel ca -fe mi -bear able -y and -ar mory -cre pe -celtic fc -bra vo -in expensive -de lec -ge cko -new market -snow flakes -kab ir -con tra -can ning -mor pho -gar wal -ðŁĴĥ ðŁı» -fight ing -mu tation -woo dy -ju gg -gr aces -premiosm tvmiaw -kenne dy -gu p -sa e -op ha -off spring -fini sher -bet ts -span ning -mar j -h one -sh ing -contin ents -samanthap rabhu -un related -l acy -explo sions -benjam in -sophi e -no ting -micro soft -as sen -a hoy -i ker -ho fer -mo e -ah madi -yan n -an ak -ma hi -be u -aha h -creep er -baahu bali -am at -pri ory -haw keye -deloit te -sko da -print making -assemb ling -mirac ulous -no ch -sw o -leg a -oper ates -border lands -eli e -stron gh -rep tiles -pir ate -un fold - ¯ -qual comm -un predictable -ot r -rose wood -direc tional -counsel ors -corn ell -liber ated -j ad -ir regular -bulgar ian -high ness -vodaf one -sw ild -mini mize -gra zie -๠ĩ -r stats -stre ep -ome tric -humb le -lu mp -l ille -b ü -home depot -tripad visor -ki wan -a via -er z -ex ico -du f -blu men -mi zing -ar ma -in im -con stan -sor a -ju al -au n -tw ell -tren ches -her a -r k -po plar -recipe oftheday -ll an -bhu ban -short ages -ing don -bridge water -ðŁIJ ĺ -fortn ite -cam den -un cture -pro w -colon ies -t ks -n go -b hm -live pd -spl ace -sli ke -happye aster -ter rence -revol ver -j ed -yy yy -office of -m ts -exist ential -r ourke -explore bc -sse d -pri est -vix en -si ding -k pa -a har -ju ic -ob struc -foren sics -uk mfg -cancell ation -we ary -ab q -ele c -pri zed -deb ts -me zz -salv atore -m dc -gre tte -c gc -th on -snow storm -ts ch -cook ery -å ¹ -wa xing -n acional -mur s -ra ve -cap es -ger main -dri pping -sub mitting -ome lette -iter ation -aj es -shim mer -fu eling -ðŁĩ§ ðŁĩª -li po -bo bble -un follow -islam ist -hi ber -cat s -agentsof shield -sen si -____ _ -ster ia -inst al -ausp icious -har row -over land -femini sts -inst ant -char iot -blind ness -sp ed -sc arec -nu it -mini atures -ho seok -glo ck -fifa worldcup -e te -dis m -we iner -ex foli -ear ts -à¸ Ķ -my art -man il -iss ant -form a -in cu -buffal ob -in tim -mc cul -anj ali -po po -un doub -hil a -fun gal -thank ful -fu tur -en dish -ren ds -th ar -she ff -ring o -nichol ls -io wa -po tom -cl ams -ãģ Ħ -acon f -stadi ums -di mp -di k -residen ces -do v -caric ature -seagu ll -kl m -confe ss -sla pped -cele b -turb ines -pp v -nur ture -el ab -.... .# -tu ff -de press -al far -amii bo -di spon -e wing -que er -friend s -for re -âĺ ¼ -sw t -aqu arius -head liner -cur d -fi gs -o tters -love fl -kare em -go vegan -fri yay -consol ation -at ri -ì§ Ħ -âĺĿ ï¸ı -poly ne -gu ed -o ya -la us -intestin al -cam illa -scal p -pi r -leed s -horri fying -bore tum -dand elion -fer rer -ell ic -as x -so ren -re loaded -ale ague -navig ator -ine tte -add ams -al chemist -ak shay -dystop ian -awe c -n aya -al isa -ai led -ag or -avi ator -ali zer -smo bile -findyour park -cop ying -to ddy -sh ti -mon ger -cal houn -nap kin -break up -y atra -se thu -ric hi -eras mus -fer ry -am ore -prac tise -bo bo -power point -oo se -li ffe -chin a -sh ka -fad navis -du ane -war on -fal se -ðŁļ Ĥ -wa shes -disc ip -==== ==== -g k -ab b -stub born -medi eval -p ci -ðŁį ª -maril yn -h yo -man di -cr i -prede cess -continu ation -om usic -s lat -wh al -mall ory -bon n -shen zhen -ca i -âĺ ĥ -sa fest -for wards -dra wers -bla sted -sle e -mor phe -mb ta -dumb ass -ÑĦоÑĤ о -alhamdulil lah -ec lub -al beit -heal ey -ayurve da -adverti sed -cro cs -itt les -bry son -be i -nj pw -honore e -fu sed -ðŁĶ ĺ -mul tin -n aga -de parts -ko p -kin o -jhar khand -ed na -ax le -mil ton -supremac ist -marrake ch -domin ic -tran script -] [# -: ). -wo c -sur rounds -o gil -leaf lets -co well -whe w -tru de -proli fer -succe s -sports man -con dom -po che -k up -imprison ment -{ } -scram bled -å Ľ -ka ine -cell phone -metam or -con i -remn ants -ee z -down pour -afterno on -exerc ising -ber ser -architec ture -wick low -m ns -is p -bo c -n iss -mn wild -stu mble -r si -lu ffy -sil en -dd ad -bul lies -haw ker -bb cc -scu ba -e pp -que ts -for aging -pal let -ha di -cinemato grapher -cat chers -to aster -k hi -lite coin -kid lit -amher st -maur icio -ip ad -mar malade -fe y -don nelly -g to -est as -cere bral -ant grasso -zz led -vir gil -swa pped -ðŁĺħ ðŁĺħ -no dapl -greate st -nhl bruins -fra ser -b mo -ane w -. âĿ¤ï¸ı -se gregation -remark ably -mccor mick -lo gger -er as -contrac ting -âłĢ âłĢ -yor ks -uku lele -touch screen -de cked -ben n -south wark -ra vin -nu mis -ðŁ¤ Ļ -ru t -gre co -eth ic -red neck -ar r -t cs -ih ri -ðŁĩ« ðŁĩ· -l k -inher ited -zy k -viadu ct -marty red -hi gu -ss n -be in -street style -fer gie -bank of -æĹ ¥ -stake holder -exempl ary -cre ss -ess a -ero tica -intre pid -gom es -bra un -bethan y -bang tan -pulmon ary -m illing -doctor ate -trump russia -ठ° -s ani -bl att -pla u -depri ved -t le -ful ly -bour n -st ak -lufthan sa -kio sk -far oo -def y -bad an -ðŁĺĺ âĿ¤ï¸ı -rit z -tri sha -ran ds -middle sex -arab s -pro j -sport scenter -repe ats -iv f -bleed blue -as sure -o bs -territ orial -ele n -bever ley -ann ah -âĿ¤ï¸ıâĿ¤ï¸ı âĿ¤ï¸ıâĿ¤ï¸ı -z l -for good -science fiction -gla u -son ya -pri th -st weets -mix ers -mari o -ant elope -writing community -went z -den ham -be di -sf o -harley davidson -look book -immuno therapy -or phe -es ville -ed ged -tas k -sb ball -corro sion -kilom eters -co sting -play back -ke ke -di visi -u ter -re location -yel led -pen g -up beat -ser ve -âļ ł -hal en -stir ring -reh man -en v -schu macher -frag ment -alkal ine -sb k -resil i -share point -rol lover -tra sh -counter part -âĻ « -ob itu -à ½ -ãĤ ¹ -mul berry -ðŁİ Ĩ -auton omy -spra ying -nat l -love you -fran ki -nu k -esc ar -can teen -ali baba -de plor -mole cule -pu d -fort night -blon die -sp hin -portra yal -ta che -bu te -consi sting -freep alestine -c sp -im mort -d ns -ðŁĴ¥ ðŁĴ¥ -tour de -coo king -archi val -ga thers -bit t -b anc -pre mature -snow ball -poetry day -lou dly -fug itive -ed ay -em ra -ðŁĩ¸ ðŁĩª -sci en -node js -jur gen -je ong -band ana -un is -fox sports -v andy -pro visions -wee p -tu k -i ko -h oun -zig gy -z r -fil let -bat a -tin k -con e -we want -k ilo -hor ace -sl t -sc t -stay tuned -victor ia -umb ria -att acker -ingham shire -fright ening -no ir -fr at -con tempt -lia ison -ho i -br ink -tr ill -ni agar -kick ass -dun das -not my -rho de -bu mble -no xi -fa g -spec tators -mancrush monday -jin ping -distr act -dais y -wal den -portra it -ar thistory -vol tron -ev el -is c -ac m -r ite -na o -de ported -swe ats -ru fus -lo bo -labor day -gam o -ihri thik -bl it -abdomin al -ãħ¤ãħ¤ ãħ¤ãħ¤ -i it -e q -bu sy -allu arjun -un disclosed -de ton -pro create -ki l -ðŁİĤ ðŁİĤ -mitch ell -ki i -inherit ance -al p -jo burg -pat rolling -compul sory -un signed -ni am -l ga -eshop suk -tr illi -ma w -appreci ating -rock ab -mañ ana -an tal -mal vern -roy o -grand prix -sut ton -go ftheday -dig i -ãħĭãħĭ ãħĭãħĭ -t les -varan asi -erec ted -discip les -cont act -ðŁĺ µ -li d -⬠ĩ -scen tre -radi ator -ing tips -trans itions -thursday motivation -chem ical -separ ati -sal is -mi m -geo graphical -book fest -/ . -âľ ĭ -v ae -cur rie -ag garwal -acceler ation -the ses -lg m -u mass -pro portions -nat a -ani ans -ku ch -be acons -ap r -@ # -ðŁĴª ðŁı¾ -nu ke -sher aton -ki o -ma kati -polit ico -mor ale -ì Ļ -econom ically -gg ly -ss en -pa stries -intern ships -vic ente -fanta ken -aveng ers -accu se -slee pover -indic ated -the dream -ster one -ren ders -fro st -ou i -gre gg -d ore -⾨ ⾨⾨ -pu gs -sat y -nu mb -hems worth -tam i -la ssic -schi ff -igle sias -ag awa -] " -re shi -game stop -divor ced -theat er -clau di -un conventional -prophe ts -ac in -twel f -tow ering -t ml -sc lerosis -k wan -ge ts -distur b -na ira -ener g -pir acy -pru itt -noti fied -hen na -bra m -ground water -bl s -opti mis -$ ) -luci e -biz hour -fang irling -gr ills -or l -ver se -c ina -law less -artistson twitter -tele vised -marshmal lows -radio head -bar r -m fc -bre vi -mmor pg -g aya -âĸ « -sub titles -j t -disney land -to bago -nh m -groo ve -fi awec -" / -ba o -scra bble -om ni -ff l -um c -si mba -ali er -ter rell -plu me -mi di -dig nit -co c -bru t -ad ata -alche my -d sm -ðŁĺĨ ðŁĺĨ -win try -spa res -cu er -conclu sions -to ys -od or -fl ann -gar vey -scrip tions -inspec tions -cat ap -ang lo -st louis -heim er -at ay -tr ich -en yc -chil ds -vent il -mont p -guiller mo -circu lare -z ell -mode led -craf tsman -al ina -stimul ation -cashe w -ju das -best of -to ire -susp ends -scol lege -real ising -by tes -bloo ds -as si -ðŁĴ ¿ -o hs -ðŁį ĭ -scallo p -ठµ -gi fting -camo gie -wil kes -o zzy -ðŁ¤ ¤ -ver onic -sav oy -deme tri -baby girl -ðŁĺį ðŁĺŃ -so x -cly de -induc tee -count down -self care -ठľ -vi ka -tor re -phd chat -pe ars -aw h -suff rage -le sn -admir ation -mp p -shark week -schul z -santor ini -clo ver -( * -stras bourg -ex iting -so yu -finger print -che a -ãĢ ľ -vin dic -song writers -so a -prou der -nam a -= )) -simple st -delici ously -gil les -u q -mn wx -ep p -sh un -ken nel -fall on -ðŁIJ £ -sin d -tra gically -out es -modern ism -co ke -gy n -spi on -âĺ¹ ï¸ı -le am -compress or -apolog ise -twent yon -fan atics -âĻ » -sco tsman -sa wa -ko u -as er -ภļ -welter weight -phen om -twick enham -stri a -p out -ka z -gi am -cd p -ho y -emplo y -red mond -ภĦภ-sm ere -trance family -proto cols -pie ce -lu iz -iter acy -carl s -united states -har med -phd life -ch aw -foot prints -l é -cho ker -z ana -sli pper -eric sson -insul ting -articho ke -advis ing -acquis itions -op or -mut ations -re ar -ॠģ -pod cast -wi ther -kun g -íĺ ¸ -win slow -di apers -ðŁĵ¸ @ -ec ker -col lar -hu ey -gi ro -mono gram -kas ich -si veness -malay si -arom atic -gre s -gali leo -u ji -rob b -dr m -none theless -as a -: > -lo a -l np -at work -ag t -laksh mi -pipel ines -id al -stre l -re all -chain z -stone wall -san sk -ðŁı ´ -pied mont -hoste ss -ci u -t é -analy ses -wil helm -scott y -rw by -mosqu it -use mb -qu ins -ðŁij İ -tu cker -s conf -speci fications -psychi atry -broo kes -s ils -ol af -de to -co di -cli p -fil th -womancrush wednesday -go to -ang erous -be ale -w tc -paneli st -ne x -lar sen -emili o -tab leau -h itters -conce ived -americ ani -or tega -mar di -Ñ ĥ -pain tball -thir sty -new yorker -etis ation -go ss -we aker -u gh -tro ll -har ga -du al -ght ning -at ine -ðŁĺİ ðŁĺİðŁĺİ -cook out -pyrene es -po ss -authent ication -sports wear -yun ho -kir o -archi pel -shen ko -ren der -nov ation -divin ity -ðŁij £ -su fi -humb ling -ge opol -devote es -wait ress -tr ough -py ro -i ba -bl ing -gra f -epilo ts -bt r -of tball -bas king -domin os -so om -r ath -sher yl -qu el -astronom ical -wel d -track list -sig nee -slee pless -com man -ch ron -summ on -pure michigan -cri spr -sli p -la gi -ra q -um u -thal ap -char med -scru mp -quad copter -ski p -peter sen -mun i -ðŁĮ ¾ -mon aghan -tra ys -ick ed -canad aday -te gr -ï¿ ½ -hot ness -heavy metal -ab ar -gop debate -az ul -spider man -sun flowers -ľ ë -web comics -bar d -Ð ² -nichol as -slu sh -ram an -mark ham -ffici al -ff ler -íĬ ¸ -ple ss -anush ka -to to -sk aters -pro wrestling -compet es -ay ala -myster y -thr ills -mp g -independ ently -y ul -imper ative -formid able -tire less -st acking -ton gues -mal tese -pot ts -mat ti -char ting -chill out -super nova -ome o -sky sports -nu tty -ðŁĹĵ ï¸ı -ro han -insp ired -concier ge -ser ra -ma kk -gal at -chi pp -ye v -ì £ -reim bur -op ul -kimber ley -i eee -bre men -ch itec -or in -nak u -bon kers -foo ty -emer gence -ðŁĨ ĺ -sti p -serge i -zo ey -ai me -wou ld -dy es -destin y -vinai grette -dri er -circulare conomy -an archi -ss r -sch el -cin er -gro om -determin ing -gar min -cal ais -incarcer ation -bu kit -no i -chelms ford -mckin ley -chi pped -belong ed -tu mors -str oud -mi i -influen za -wwen xt -tun dra -tele communications -cat sofinstagram -t ages -beat ty -o du -ml kday -oo per -dang le -ak ley -cru mb -anti gua -ti mbers -rou hani -ðŁĴª ðŁĴªðŁĴª -ha fi -... !! -w cs -coo p -sn c -lit res -ãĢ Ĭ -ha z -co z -k ant -green field -cur ti -y ale -flye agles -what soever -wor thing -rou lette -flyeagles fly -un da -a inted -stand ing -lusci ous -h pc -effic acy -ash land -me ghan -ky wx -n pr -bath tub -ac os -h ani -mar cor -man tis -da isi -bo ba -ab bie -mu til -vi al -spy der -po z -g ti -el fie -nigh tw -metro id -anton i -mad die -dh ry -dar lings -ten ds -taek wondo -atlan ta -me ow -chlo e -ãĥ İ -ym es -siber ia -k con -gu es -mar iner -fac il -azz le -[ ... -han nover -bav aria -vir go -te uk -u sps -) # -wall a -sam pson -need less -ver bally -hay ley -bow led -pi us -lam pard -ham string -vol vo -road safety -cho king -sor bet -a hem -healthy food -brai ded -horticul ture -cr ative -che ek -ad do -the force -ko ko -schiz oph -j ie -w ada -twentyon epilots -h bcu -pro ton -pau ls -lou isa -lat am -kyr gy -com pac -sd k -sap i -?? ? -liber alism -ep silon -ai den -w usa -spra yed -baske tball -kim ono -blue wave -ali as -ë§ Ī -mug shot -ce c -do gre -ad ora -ðŁĵ· @ -kra kow -intrigu ed -exhau sting -astron omer -ven ison -lady bug -ci v -bra e -us m -bri be -acup uncture -pembro ke -ke ating -chi e -y ad -t si -sm i -see ding -gate shead -lis boa -gy p -canv ass -ðŁĶ´ âļªï¸ı -op i -ni r -soci etal -ly te -ati es -c sm -ar tery -al in -aka poor -abstr acts -â̦ â̦ -teen wolf -ne we -travel gram -sentim ental -per ched -han del -ho ek -f ay -coordin ating -anim ate -man ian -effor t -jer ky -f ck -adri enne -ma bly -tra ding -my el -spi ro -sol a -stor ing -over drive -monday morning -dream team -pul se -bon di -ber nie -pgat our -tri poli -son am -plat t -âļ ¡ -ag roup -îIJ Ĵ -inv ading -v cu -k ell -ñ os -un dead -pod casting -mercede sam -mana fort -cor tex -que so -impecc able -pal mer -wil doz -sport sc -guacam ole -dispen ser -cate gori -stun ts -per il -invit ations -dune din -xi e -achi eves -saf er -pre ds -ph an -knuck les -k ak -igno res -lovemy job -aru ba -ound ation -datac enter -co vert -gr ing -cou ple -ا ر -vol i -mc cle -arti sans -lu do -kal am -arom a -under taker -hu la -wiz kid -gu mb -god frey -bakers field -ker n -engine er -car ve -pal in -guaran tees -pe bbles -b ays -zi eg -fin k -â¬ĩï¸ı â¬ĩï¸ı -down pours -ro chelle -rasp berry -ðŁĺ ® -gra phies -stom p -caf es -ari zed -utt ar -cal vary -dri e -crusad er -bus an -tux edo -si u -seam us -cul tured -blan chard -town house -ge red -butter milk -flu ctu -roger federer -hel i -ðŁ¦ ĥ -u ous -ram esh -mu ppets -email marketing -ye ss -br ice -ri zio -pel o -donnein arte -u rable -inve stin -bump ing -raji v -sav a -thro wer -fore x -o hhhh -th rust -pull man -r fid -sep sis -le ed -fri ght -roun ding -ne b -ph ins -ai sha -utili zing -squ ats -gold smith -j ic -bo ks -vau s -i po -exclu sion -tari ff -po kes -min al -land s -en force -washington dc -or char -g x -mar ys -ey our -aussi e -bak ers -un popular -latin os -lar ge -pu tnam -bol o -wa de -pel o -di zz -ob struction -fla ppy -weare the -depend ence -pajam a -e te -y ann -e wan -disc la -a ay -kar ina -e ic -an trim -w soc -neg atively -kai do -fotogra fia -dh ru -colo ssal -mcle od -k wang -mani pu -ex hilar -us atoday -summer slam -co les -tapro om -unbeat able -de ma -tic ks -k ling -fil s -campaig ners -ภķ -brew ster -audu bon -qu ay -ch s -ki gali -d ler -strength ens -som al -sign ingday -gol ds -pig ment -orche stral -g q -lin kin -ðŁı ĩ -ta w -algar ve -ho v -ear le -gold fish -am ig -ex er -ben in -dru id -ðŁIJ ¸ -she m -quat tro -mer cen -men te -incorpor ating -bon anza -state fair -en de -concep tions -e es -âĻ¥ï¸ı âĻ¥ï¸ı -d son -fire arm -orb ital -we h -multi p -fo b -requi em -p light -thou se -sa id -oc re -remem brance -n old -chi pping -be v -er t -ca thy -sy m -ri ggs -m ley -dialo gues -sl ender -how l -gau teng -wd w -to bi -smo kes -im plo -b pm -ad n -mom basa -cap sul -bloom field -artic ul -cle o -goog led -flu ffy -l ard -en zyme -ve sti -ibra hi -fl ame -e mea -out ages -dispro por -ble ak -an sel -ick er -st louis -stock market -good friday -sau lt -stal led -pro m -ep som -b é -the se -sau ces -me w -lit fest -pre d -re u -kar ak -si enna -ell in -bio technology -ï¸ıâĥ£ - -tac tic -sa in -por k -mon za -ka j -lu sh -compart ment -chang ing -shraddha kapoor -fo al -ar tem -cu ando -can ola -ori ente -me sse -d ited -br c -box er -bbc two -s st -ment day -em ing -de wey -kof i -âŀĸâŀĸ âŀĸâŀĸ -reali zation -smo l -tw ood -san je -flag staff -ber wick -cor set -can ary -whistle blower -et ched -com posing -squee zed -bow er -auto desk -ne h -mathi eu -ba ja -Å Ĥ -hy dra -da im -am eri -insi sted -mer lot -gar ros -heart news -gaine sville -cut ler -bo de -ðŁĺī ðŁĺī -lew es -scoun try -g sa -us u -cc m -god awgs -phara oh -cra e -mor ley -hyp noti -f ades -neur ons -fu zz -ing co -high landers -star k -vig ne -pac kets -amar illo -reu ben -insul ts -bas ic -vec tor -n me -ac ruz -tro s -transm itter -ðŁĺ ŀ -interpre t -ðŁĺ ² -pre quel -mc gowan -dis semin -ðŁĴĺ ðŁĴĺ -mascul inity -indie gamedev -ali ve -te t -pe tal -ema iled -ar med -ko o -he er -ba ird -super junior -metro polis -delav in -decl ines -stit utes -Û ģ -p tbo -g lan -cho res -e aling -chri ssy -ste mc -vi an -assassin ated -pron ounce -illeg als -discover y -cav ill -fri fotos -f al -so i -sabot age -t int -p dc -ðŁİīðŁİ Ī -ãĤ Ĭãģ -ji o -endeav or -in sig -commit tees -she arer -me tz -mar rying -h dd -g by -fre t -tri sh -pu l -scrip ted -sa ki -l w -ke ye -shim i -nan aimo -ca h -à « -tem pered -ici an -du gg -dish washer -air field -s rugby -gr inch -y st -r ms -mahat ma -lan kan -disc ar -dige stion -no des -l ls -om ic -gu tter -tis garh -feder ico -election day -bo he -master card -fire ball -âľ Ķï¸ı -oy ster -p ong -do k -en route -m vc -beat the -ali stair -shu b -sh aming -cherno byl -ghi bli -the s -pin ion -d bs -sal ts -ic tion -epi ph -nc pol -in convenience -whit ley -inspec ting -wood ley -wi ener -skil let -no les -m ca -h ina -a sha -willing ness -well ness -tam ed -show time -dis advantaged -ber nat -us n -mission aries -coun selling -arrog ant -quant itative -leg alization -ho dge -energye fficiency -cameron dallas -pos sessions -p bb -harris burg -v g -hindu ism -happy thanksgiving -fi b -re acting -tweeta picture -pol iti -mu ppet -hur rah -pac e -coast guard -guar ded -as am -par ry -fore very -x q -oom f -ke anu -j ind -ri st -customer service -sac red -ðŁĺ º -ton er -occur rence -mat u -val dez -red d -is ak -power rangers -pe asant -raj ini -abra ham -e mil -car do -tr il -hair styles -obsole te -sam pler -direc tive -delavin kisses -ver ton -glo s -sp ay -paler mo -com ets -man ziel -chicag of -ski pped -pic torial -h ant -b mi -a ol -re opens -pad dling -devo s -fra ud -bas eline -que ues -sp ired -sn are -eu ve -descri ptions -daisi es -ca ching -gall eria -tri mmed -stin o -recy cla -ic ular -bir ken -raw lings -fli x -chic as -b gt -lik eli -argy ll -thel ove -ga ston -bl anca -ha k -f one -sailor moon -h aci -ima c -fl yn -de can -bel les -ap ic -zo g -taun ton -con stance -lasag na -ker nel -in ka -har bor -collec tively -calcul ated -av ille -shil pa -pur du -gi mm -fun er -a est -pembroke shire -nighting ale -n unes -hyper tension -hu bert -sli ders -infer tility -comm ended -transat lantic -metr ical -!! @ -Å Ł -ss g -bac ca -inver ted -fun factfriday -it ans -albu m -acqu ainted -ri er -whel an -sar ab -mu e -snoo ze -pi ff -agre eing -sp itting -jer maine -n ye -âľı ï¸ı -am bush -ze ph -con greg -univers ity -s app -wann abe -pat rice -ib d -do glo -fri dges -sun d -king ston -ar gon -kam en -hardro ck -ds ley -do lores -ì ° -ota ku -pi ping -be having -âŃIJï¸ıâŃIJï¸ı âŃIJï¸ı -blue bird -an sari -teapo t -fire work -cro p -log ans -ty ped -thick ness -ig ers -c fp -dys functional -contra sting -et ty -aston martin -tx st -dra grace -at tributes -marath on -manu scripts -john stone -ðŁĺ± ðŁĺ± -bo er -ay u -aru gula -poo rest -con du -assu mption -anag h -no h -delav in -sit ter -g ö -mor ow -kick start -com i -gl acial -ghe ad -ba in -ker shaw -en dof -fre ud -om at -i af -hu g -sign up -each other -defin ite -tu bing -shak ira -ðŁijı ðŁı½ -uu uu -sw in -sham bles -ol as -sk ell -brit ain -kn w -clu tter -om y -j ens -hang ed -city scape -scra ps -un locking -dead liest -er no -breast cancer -a it -inspec t -fu ri -ðŁĴ Į -ku d -ju le -or ah -mi ds -m dt -bur gring -r attle -pu sa -stal k -cle ans -iss ance -z ek -worth it -nam eis -musko ka -council man -urban art -bar rac -un solved -tu l -g ita -white board -soy beans -em ent -cont i -saturday motivation -conveni ently -doc king -t ado -âı © -sp ino -puppy love -po f -fabric ated -robb ers -adop ts -ti fied -kk r -indulg ence -notic eable -macqu arie -chap el -sensu al -ki ko -melan oma -lore tta -li ance -ab en -sp lus -ga al -ac ele -lib dems -compar isons -ðŁĮ µ -rhy thms -mer y -en capsul -nap ier -ðŁijĮ ðŁijĮðŁijĮ -ðŁij IJ -plat z -fre sno -re formed -ran bir -el it -the best -bhu shan -vin nie -impro vised -s ittin -re created -e ba -ec ker -ac rob -pon te -cor d -gi ddy -eur usd -fe ver -intu ition -gar i -dum mies -bud weiser -amend ments -te tra -sch nit -ay as -mar ys -ci st -k ani -ker mit -ðŁĺ±ðŁĺ± ðŁĺ± -tin ker -strol ling -di visional -niger i -omin ous -menstru al -kar ab -k hy -bw fc -pan handle -l illi -well er -stra pped -son the -transfer ring -ethe real -sne aks -ru dol -gab les -jac king -cin code -for tune -canadi ens -con for -ab normal -frank lin -tit a -mu la -persi st -cu ties -ki el -ðŁĩ± ðŁĩ -her mann -aw k -fi asco -ko to -we ta -hi ker -budd y -preven tive -mcgra w -game boy -forsy th -top shop -si ob -sad h -in tram -follow art -so aps -dragon ball -ou x -morri son -๠ĥ -lu bric -adul thood -morri sons -âļ łï¸ı -her mo -ta ka -stall one -mis use -team gb -ra gha -con fined -at y -hom ophobic -nw o -sky news -ho ya -ac rosse -wi iu -pur ée -jed dah -ðŁ¤ § -advis ers -ph ine -an is -scrump tious -ë° ķ -c ke -vin y -ter m -s dc -o do -home school -vas c -leop ards -debor ah -illic it -cur ran -as roma -nau ght -mar ig -brand i -em p -ðŁĺį ðŁijĮ -î Į -su spend -lu z -initi ation -sch aft -jensen ackles -craw ler -post doc -des ks -trail blazer -den omin -tri x -no ise -po et -± ï¸ı -s mug -vol atile -proof s -pharmac ist -sardin ia -mash able -kim chi -co ed -schal ke -doo dled -c sw -sh ur -ro x -do k -chris brown -mathemat ician -ab ound -ang elic -rock ford -d ole -yor kers -ms n -g man -xavi er -bor rowing -mark ings -longh orn -k ja -diver ted -mm it -euph oria -ay yy -te a -pa h -ck i -un cut -li ven -ky ung -fan art -mer ing -red ding -amo vie -gri di -c thulhu -schol arly -ju dah -th bewithyou -eu calyp -ðŁIJ ķ -hert fordshire -cour troom -by u -auc tioned -ple ase -mar cia -ê° ĵ -succe eded -el as -arvin d -t lot -saig on -re tt -ra kesh -fd ny -as en -se bring -gladi ators -you know -v lad -gol a -par ap -ÑĢ Ð¸ -sab cnews -one team -oh l -sun e -ri j -cd c -star gate -run down -plat o -ph c -chat ter -ra viol -mn f -mand ala -li et -ภķ -mari a -hun gover -consoli dation -fer rell -tradition al -ilove art -gal ap -ðŁı Į -que zon -espa ña -ðŁĩ¨ðŁĩ Ń -ho bby -steam boat -mali gn -guil lau -pro hi -its me -íĥ Ģ -in scription -al z -mari an -k ade -mm on -adju sting -ne sts -intern ally -ci r -vik ram -mal ala -k ph -fel icia -the real -cap tivity -at is -marcor ubio -kale ido -che v -mano j -le more -gent ri -vi ps -tro pe -" âĢĶ -pair ings -mal nutrition -fr ay -desig nation -brun omars -az e -tor rential -pan zer -ga il -under the -the ological -schizoph re -dazz le -freder ic -mo par -ad illa -so ggy -ra un -medi ocre -colo rec -i fe -p inst -blu ef - ² -world water -gir oud -clar inet -ad olf -tar antino -receip ts -assu mp -ðŁij Ł -coffe es -âľĬ ðŁı¾ -du plex -s of -r x -lin o -timber wolves -pan dit -mo tm -e ga -ay ama -ach s -outsi der -ll en -co er -til ly -cheese burger -ma ds -ple dis -emp ty -national parks -az iz -p mi -jun kies -f ener -sq n -è s -gener ation -cleop atra -bhuban es -mosqu es -ty free -popp ins -tw c -or well -n age -ka whi -hol low -dal ai -¨¨ ¨¨ -ou ro -m health -gi on -az o -vis as -reneg ade -re ic -w sop -ðŁĴļ ðŁĴĽ -e chel -tox icity -mü n -bun k -stimul ating -asth our -\ ' -ep h -ende mic -cn bc -shrin king -peabo dy -michel angelo -can yon -wal e -su mi -si ders -inu it -? . -profession alism -dr acing -plat oon -p ons -out bound -maple leafs -de sol -cen cy -a than -ver ma -ru bbing -ok an -ðŁij ł -mull ins -authent ic -Å į -alman ac -ga ia -bb q -on imo -ke h -ty a -tou ts -y av -re posit -, . -wi ght -se eyou -cal lof -done sia -bar gaining -gr anth -sd su -amphi theater -p su -re watching -wine tasting -peak district -dete cting -thur man -phe e -èª ķ -u mich -re r -sculp ted -go le -name sake -ðŁĶ ģ -serv icing -bau gh -pu gh -pen cil -dar th -munch kin -at orium -ten ers -sun y -rolling stones -mag ing -star rer -i dris -fe instein -ag ron -âĺºï¸ı âĺºï¸ı -supervis ed -chamele on -aggre gate -succe ssive -mo gul -inst yle -pol dark -custom e -ohio state -ha ya -ci des -broker age -angel ou -fifa wwc -de forestation -al ton -pam ph -hu gged -ho bo -change able -ku ber -bur roughs -demon etisation -cape cod -vers atility -or ice -le ila -womenin science -tu a -he dges -embarrass ment -ali fe -so ars -ni ghter -hy mn -gi pp -chas u -tech s -ni all -k illa -hi ka -cam els -valu e - ¢ -sc oops -mah moud -clu sive -adri ana -pac o -oz il -un as -transl ations -whispe rer -s bi -bu xton -bio tics -indi ffe -ken ney -k lar -et ching -barra best -inst ability -se ine -vo tel -blo gged -whis key -my space -t ant -lan dia -give back -illu s -aw ak -ac ab -f bloggers -cloud computing -blat ant -syri ans -band ra -sty n -an em -ke ted -kar thik -barun sob -pin ot -gu bernat -gay e -arti ste -i fied -conven tions -hu an -geni uses -eeee ee -fol ly -somer ville -pride month -ðŁĩºðŁĩ¸ ðŁĩºðŁĩ¸ -chemo therapy -paul s -bak ar -ìĦ¸ë¸ IJ -taiwan ese -fol lo -c ss -re ign -nn nn -fla un -catastro phe -iti es -frag ments -extre mists -ym oun -car men -eze kiel -conne cting -se h -man ta -remodel ing -we ymouth -at oms -ce m -ne well -lu mi -the open -mo c -mili band -g land -z shq -mag gie -mani acs -m sp -ad y -cre ams -le anne -e sta -py g -af finity -pray er -dun bar -ligh troom -ac adi -wyn onna -roman tic -state dept -sick le -wh os -lam o -et our -fin ity -shru b -shar pen -pun dit -ed on -af ore -mar s -jeff ery -ter ps -medal list -kath arine -accu sing -ta z -roy d -from home -confron tation -alle gh -ðŁijī ðŁijī -refresh er -ran veer -never land -jo jo -lu crative -en am -ca ver -pa edi -man jaro -flu ids -the ssal -oppre ssed -mu ss -joh anna -Ø ® -cn g -buil dthe -sett les -s ith -fu ego -cl amp -ar ag -pay er -ted x -mand y -inter stellar -fr c -ch and -b cc -mo lo -len til -johan sson -grims by -nature lovers -ðŁļ¨ ðŁļ¨ðŁļ¨ -shin de -x in -international dayof -transiti onal -sat a -cad dy -wo d -if u -ha ys -holl yo -j ang -ir c -co im -grad able -" " -ðŁį ´ -ঠ¾ -a el -n yo -west lake -time out -sof i -phenom ena -cultiv ation -ag no -un armed -so t -con j -gen o -royal navy -nutriti on -fair mont -ti relessly -sn g -re ty -mic a -lu cent -slo ane -droo l -riz al -od ell -critici zed -. '" -la ze -deser ted -co der -pra s -l illian -itiner ary -dav y -an ap -whi pping -hobo ken -kare ena -çľ Ł -vi us -ter n -nan tucket -mis understood -bu laga -st ant -chin ook -z am -reli es -d ss -ed mond -sket chy -m ell -fe x -rec tor -dist ill -day dream -wine maker -ri pley -billion aires -hel ene -ati f -cul prit -bertr and -wou ldnt -ma pped -v ak -gla dly -parliam ent -kidlit art -ware ness -goli ath -âĨ ĵ -view point -tat ted -fu ls -dor sey -ang lers -li ds -ki ya -bow les -be h -b ite -compati bility -ance stral -pro x -beha ved -gubernat orial -ch field -sab an -z h -teen y -shibu ya -holli day -pan cy -âĿĦï¸ı âĿĦï¸ı -seun gri -? , -ðŁĩ¦ ðŁĩ· -im itation -impac tful -any i -gene vie -añ os -bate man -gli der -af ar -ra sheed -effor tless -sh war -dach sh -er un -at os -kin i -ch d -kha ki -k lin -felici dades -bel o -as l -to ppers -fin ley -stac ey -rigor ous -kar ting -le ppard -car michael -be ret -c se -ak hi -mer ingue -ab an -ha ke -ger i -er jee -re sto -comm anders -pr it -fl or -ad ven -ex termin -remain der -å IJ -es g -martin o -lulla by -| @ -mi gn -in store -big bang -cor di -cau ley -ante bellum -dg ate -cro ck -span dex -scaf folding -ore os -ê°ĵ ìĦ¸ë¸IJ -pom ona -ma uro -uni versi -re mi -af ootball -t ant -sm alls -ne h -worl do -tropic al -mor ph -jav elin -gla r -arqu itec -reminis cent -tu bs -spide y -make u -syl la -progressi ves -blo t -shor ten -keep in -ch ak -ang st -super food -decad ent -ston y -neuro logical -ar boretum -ann ak -fe ma -per cu -dis respectful -small biz -lo x -co om -c sc -bs bi -pre valence -him ss -esp an -mo ga -fr ampton -sky map -mas se -levi athan -( ). -noctur nal -car ameli -ang or -amne sia -outsi ders -she alth -rhin o -ant ag -ag io -ðŁĴ° ðŁĴ° -take me -kab addi -c si -m sh -coch rane -thessal oni -sil a -ha us -du sting -obe se -mack lemore -mani sh -len in -m dc -gro wn -shef field -s rs -ke le -car son -ch um -dah lia -can tore -opp o -how ling -cyber crime -sur realism -sc ran -fa iz -thre n -rac ists -r out -pk not -se mana -sin i -mc cull -ma chi -alfon so -y b -sar dar -kend rick -den g -reci pro -on f -doom sday -bri bery -custom iz -art is -c pi -ðŁĻĪ ðŁĻĪ -sla va -let te -en s -âĿ¤ï¸ı ðŁĺĺ -cra yon -ad an -tr c -migr ate -simp son -row ers -king sley -farmers market -shee han -ne phe -bor non -car ton -mic key -all ure -u lu -sli pknot -heb do -gui do -dog celebration -online marketing -acceler ating -) .. -origin ated -macar oni -ed tech -out field -mit z -disc us -adverti ser -man or -ha shi -descri p -cap ita -ful bright -recep tor -con n -con ey -spion age -r attle -pre st -u li -blog post -acker ay -) â̦ -red velvet -mat th -inspir ing -b sd -ker ri -po con -mil lar -re pur -accent ure -ä ¹ -ram bo -ragnar ok -dele ting -british museum -pat ory -leip zig -flori an -sci fi -in ers -br ate -yo y -melis sa -ab er -ma sa -po te -mosquit oes -transpl ant -r pa -; )) -bast ille -yl an -joye ux -melo dic -cap tions -atri st -roch dale -gott i -pew die -cuties aturday -who is -aqu aculture -tiv a -sp el -he ss -ha ji -fred die -co per -brand o -v k -photo book -* , -my dayin -micha ela -brune i -sr ini -in te -Ä ± -de ol -d fc -separ ately -bun d -ve sts -to c -me ck -rein forced -constra ints -car roll -sq ft -re ver -cam per -bird man -in action -gener ators -triumph ant -pe sts -o vo -gy pt -al amo -sc aled -suresh pp -sd n -is mo -gi os -) @ -justic eleague -restaur ant -gab i -den gue -next gen -exemp li -ap ex -inspir ational -down side -kid z -u pl -et na -alvar o -fel dman -bar net -m ha -es ch -bloo ded ->>>> >>>> -kan i -ho fficial -casablanc a -bir ds -ty ga -sw amp -o day -new castle -nb ap -ci sion -cho ols -af lo -ne p -mon ton -ak b -super model -down time -th os -sc wx -snoo py -ag greg -yo ke -nor cal -we tt -prolon ged -me tast -beat er -f ta -t lap -disgu sted -y h -voice over -itch y -ip c -ðŁİ ¾ -phe asant -stra its -ram pant -j g -fer til -assu res -fortun es -sal inas -liz ards -kett le -i bs -cyn thi -he g -mc cr -soccer oos -happen ings -cor den -ðŁĺĤ ðŁijĮ -t ches -egre t -wolver ines -congratul ated -ho gg -bott ling -wr i -fer ri -bo sch -af ire -og den -s jo -j dm -sv t -con tex -tol lywood -min k -me se -super sonic -op oulos -å ¸ -âĶ ģ -knuck le -gu ise -gam i -chu cky -z inger -radi al -compla ined -bo da -fe tal -discipl ines -cor ro -ðŁĩ®ðŁĩ ¹ -op ted -filtr ation -ad nan -em cee -mi stre -insom ni -fer gus -tra jec -on don -med tech -tanger ine -madra s -gru e -cab s -z hu -sureshpp rabhu -insul ated -day swild -pp m -band ai -v day -s ff -squ id -lo thing -not dead -expre ssive -cu ll -ala stair -x u -up front -fish ers -en es -um d -dis missal -sti er -sel s -lu st -re active -prote ster -eyel ashes -al im -goo de -gre eng -da ir -com pen -anush ka -proto typing -ma pu -bear ings -ðŁIJ Ł -for me -bsbi botany -timo thy -out skirts -am bed -are tha -wend ell -stre aks -ni m -k pk -sne e -fit ter -quo ta -p ate -win ning -ðŁį Ń -sho pping -ma inst -cul ver -ste vie -mcfad den -counter parts -gren fell -fol som -dor set -tech crunch -⬠ħï¸ı -tip tuesday -us l -tre x -geor gie -ranveer official -lic ks -se wn -k f -' â̦ -jap s -p ate -orth op -fe sta -stra s -mon tal -hammer smith -fore most -wido ws -mad re -ite z -mito chondri -lig ans -z ona -cari bou -m ss -andre i -weather channel -gh c -: ... -ta ft -awe ather -al isation -bru tal -bliss ful -nik ola -mal icious -q m -mpg vip -bro die -bl itz -applau d -dri bb -v ague -dog go -transl ating -interpre ted -hat ched -ge tyour -benefici aries -spar ring -caes ars -aw illiams -la hat -bro ke -ti mp -virtu es -rel ying -pie tro -k tn -ici sts -pab lo -lou i -a ag -pn pp -cha st -pul ses -fini sh -usair force -type writer -thomp son -dog s -ut to -ãģ į -sand al -new ly -do ge -z w -wan kers -ne gr -mu cha -determin es -black fish -sk unk -mu ps -instru ment -phy to -daysto go -skin ned -hai der -con ten -ðŁIJ¾ ðŁIJ¾ -we iler -undoub tedly -chair ing -wall is -sh ard -zind abad -adul t -absor ption -pre sto -deplo ying -drum mond -battle front -seag ulls -how dy -juda ism -des de -part ition -âľ Ŀ -no logy -national bestfriend -lesn ar -film fare -co asts -christen sen -ac an -mb u -co pped -ru bble -sw c -fun nier -far ther -where as -nano technology -with stand -pil low -bow ers -to pe -it ly -con fit -ma kar -comfor ts -bo sh -cli pper -bal la -sti k -mil b -safe guard -musi que -eas port -ya z -pad ded -bad er -fore ign -chop in -archi ve -o ka -tran sporting -tml talk -aj it -consequ ence -sc roo -ff o -collabor ated -pug chat -ye mi -jav ed -au burn -o of -ma w -sau cer -miti gate -i les -evangeli st -ter ie -re cl -indic tment -cat a -bright ness -may the -whim sical -un lv -key word -cu min -med way -west world -tra w -im posing -form ity -coul ter -ab z -ny pd -grass i -kel sey -qld pol -clock work -f dr -di anne -âĺ ij -ad h -p ann -bra vely -ae ge -un lawful -ver di -pocaly pse -phar o -kar la -reson ance -ma stiff -la dak -bu u -ma iled -hi i -craw ley -tor rent -mach ado -liby an -effort lessly -fal sely -q vist -ke ef -craf thour -cheri shed -val kyrie -s ari -kal amaz -be he -ðŁĮ Ļ -th im -ro ddy -col trane -but chers -ach im -wk end -awk ward -cab rera -:) ))) -fran c -decl an -con dos -a ja -pandor amusic -char ter -ph ill -mon trose -hatch back -handic app -gre aves -eucalyp tus -ut most -t son -bur ton -mid wives -in cur -ðŁĺį # -moo d -compre ssed -tom a -must ang -mo g -as ana -te stic -sho tel -in sol -cor sair -nh q -ben ny -sm ma -kap ur -in con -jon as -ener gies -don al -as ad -se z -n pa -archi ved -stimul ate -do p -hy d -gri eving -ãĥ Ī -ron a -why te -tree house -ss ell -sand ro -ko bo -ther most -se clu -hi ya -ge ez -mam as -prisc illa -flav oured -fas s -w old -maker space -cospla y -p tv -happy valentinesday -sequo ia -love craft -gu an -d tm -ci i -yoko hama -pos thum -re q -ðŁĶµ âļªï¸ı -galat asar -dol by -hamp tons -disturb ance -stone henge -ok c -disrup ting -month sary -jun gle -head lights -du stin -micro sof -happy mothersday -ko ko -gra zi -te sto -na idu -mal ay -ari al -ru mb -ab oo -har man -tra pe -spo ils -je ho -go dly -lock screen -z un -pi ous -ma gento -l enders -prob able -corpor al -m our -aw al -su a -call me -ton ne -go vin -devast ation -x j -gear box -war lock -per me -it ate -gaza underattack -du val -paras ite -clement e -le th -i va -fro zen -tho les -to bin -cair n -s ill -luc kiest -conver ts -st ale -pan cra -euro pale -wis dom -sch ur -ì ¶ -verti go -bi j -u bc -nu re -righte ousness -mt c -factor y -ver st -revers ed -hur i -hee chul -fab er -ar r -ul ous -ven om -ph at -green ery -bra dy -à ¦ -: (( -never giveup -di sha -mo ta -health care -dun ham -dex po -den zel -bb ins -f ics -wh am -mc g -eli an -wat a -str alia -tel lu -pe sky -spin off -ar moured -re acted -do fficial -te du -sag ar -mor ally -paralle led -fi os -dow ner -dau gh -re do -world cup -tari q -bar ne -glaci ers -oc cult -barbar ian -her mosa -!! !) -y ur -inter nation -p ss -sit u -p int -american air -sw am -dopp ler -ðŁĴĻ ðŁĴľ -cincode mayo -le van -hell enic -mc ne -ju di -yu h -st x -qu are -ðŁĺĤ . -sti g -g els -mot ley -hard work -euro zone -e ad -ç¥ Ń -seab ir -ci us -la id -alpac a -presu mably -pewdie pie -boo ted -am ari -tam ine -sol ace -bar row -acade mies -x ian -om ination -dun geons -b ma -de ity -ai k -stab il -hir a -affection ate -ving ne -new port -ãħĭ ãħĭ -thir ds -re tains -aroma therapy -ski er -ni ma -do pe -cr inge -con domin -to or -anim ator -sar aj -seas cape -minim alism -lake shore -calla way -berg man -à¤ Ĺ -whisp ering -stupi d -ri ghtful -requ is -ir n -se va -ut pol -tuber culo -squ ish -de but -govern mental -christ ine -all man -weap on -s ito -bur i -lo lita -leaf y -fu ch -tin ted -mck en -a hahaha -ðŁĩµðŁĩ ¹ -repe al -ne gan -ðŁķ Ĭ -tail gating -game insight -ðŁıŁ ï¸ı -yaku za -z t -ti ring -pro posing -bow lers -tra itors -ak shi -cler gy -cit o -up sets -tu scal -symph onic -sil ently -shu ff -black well -ðŁĺĤ ) -ko be -rober to -ri dg -dc u -mer ino -ft p -east side -. ~ -nb l -mn leg -ts for -frau dul -ca pping -in my -gymna st -ston es -ss in -twe aks -shag gy -oak land -dem sin -sang ria -mm va -hen nessy -down ton -ri ghtly -in it -aga ve -ob last -northe ast -friend ship -dal a -tro phy -ðŁij ½ -mag in -margar itas -ê · -ww fc -fa sh -di ke -cu d -char t -ðŁij ® -refuge es -jop lin -n cs -imp y -firm ware -pas cu -flam in -health tech -bell letstalk -w aka -ol ls -la go -co wan -bombar dier -sh ome -ðŁĻ ħ -mc master -na ve -well s -u ta -tell ers -mis fits -kap il -face off -af firm -a pro -whit epaper -super yacht -speci mens -al located -... , -- __ -ka w -dachsh und -djo ker -s work -qui ere -or um -ðŁIJ ł -som m -c mt -ingh our -skin ny -lgb ti -gi ggles -break away -resear ched -par ity -my al -ms l -re tained -si vity -make inindia -sol ves -defam ation -wal tham -sri racha -road way -concep tu -al in -iw ant -å Ī -del ft -tender loin -ga ins -faul ts -sw ire -st ellen -pol lo -dy ne -bornon thisday -asdf ghj -sq l -sali m -advis es -vo ip -ìĹij ìĨ -un touched -she il -ontari o -uph ill -so bre -de shi -nov ella -du tton -craw fish -ا٠Ĩ -ma a -tw ine -kal in -ðŁĩµðŁĩ Ń -ye ss -brook s -hoo siers -ton ka -umbrel las -ay ers -ate am -acqu iring -su ction -ä n -wi es -tari ans -soci o -mat tb -shepher ds -o so -charity tuesday -s logans -ninj as -al bat -by te -bash ir -trampol ine -mydayin la -i ja -bas el -ror y -gol die -fi rec -un noticed -pecu liar -sch a -ker son -mour ns -liquid ity -qu ipment -hi bs -ar s -aeron au -slide show -sla bs -delici ousness -sk itchen -hta fc -full erton -cre ighton -aer ob -procrastin ation -az ores -white hall -uss occer -medi ation -djoker nole -and me -um en -noxi ous -jo ss -ili fe -anni vers -sudan ese -et res -under mine -whole foods -diso be -kor i -ade le -eli z -can ti -al on -gymna sium -sarko die -meteoro logist -yl de -ste en -stamp collecting -nas al -lo tt -fran ks -ex ol -ack i -good year -animal rights -y les -vio lets -mm es -s thel -ra pping -tu scan -wai ver -tur ner -eat local -northe asthour -anim ations -tom morow -t sh -ff ame -bra e -pe tron -glam our -br yn -d cs -bal es -ðŁĶ ¶ -bro v -bre v -b ons -physi que -car ne -x e -elix ir -vol ved -l oma -ìľ ł -æ ĺ -van u -ri gs -bal ance -va res -bon ita -sprink le -perfec to -di on -le ak -calcu tta -o ba -d ma -c mon -tun er -pneu monia -bo gus -apolo ge -cl ough -bor ne -)) )) -revi ved -o varian -ner f -c legg -fan fest -cho u -reali zes -mc n -li gu -leg alize -just saying -for ster -bo sni -k hi -in dom -hei del -en cryp -si ss -ed di -mar bles -brisban e -y ing -pre paid -wal sall -cooper ate -orche str -mar isa -ho wie -che wy -bren ner -andro meda -e gan -sto cki -cav endish -ag an -ban o -de ir -go g -bl k -re thinking -ch ig -rhe u -sni p -p eng -semin ole -m swx -an nex -lyn da -lewisham ilton -cu mul -tb l -dolph in -agu ero -........ .... -pre lude -at our -gr anger -too ting -ro tun -dis ar -home items -da res -**** **** -ðŁij Ĩ -compre h -jin x -as well -iri e -circul ating -ðŁIJ ¥ -over board -cultiv ate -rhe tt -oriente ering -ca k -bal kans -s itt -jas min -britney spears -ro tor -se aling -g bc -oc ci -f as -eman cip -com er -war time -tic kle -son ny -pac es -log g -at rix -sr p -g win -do bbs -uz be -the wanted -dru sh -ex tru -m icky -honore es -dar win -re dux -mm j -ram i -jalape ño -io c -do ver -ju ju -whit ney -s eng -en ly -au ch -archipel ago -vigil ant -man gal -wil dest -parano id -hal i -bb ly -sanc tioned -real ms -con co -u ddin -c sk -play time -libr a -sav ag -oc tane -rec tan -re turn -par rish -mor rha -cc p -c mu -sa iled -se vent -ro sie -pil ing -he w -boar ded -seg ments -neph ro -( . -cr ats -bak es -ðŁį ¸ -back tothe -sibl ing -kirk land -ke o -gu wa -bre ads -ðŁĺľ ðŁĺľ -t q -haras sed -ga u -wil bur -j isoo -ep er -li sam -tri ppin -sh ino -ru kh -beast mode -cho a -inst aweather -rich land -gar i -fe z -cowboy snation -fur suit -k run -a en -sycam ore -se gun -ent ennial -di h -o ax -demsin philly -ðŁĻ Ģ -sn hl -pen nies -pass words -ma kin -ty e -d eng -kni gh -jeep life -hel pline -a for -zz zz -ste amy -pic ker -iter ate -happen ingnow -ki b -bloom berg -martyr dom -bul ly -assor tment -a hora -zo e -no i -illu stri -agar wal -p sc -electr onica -recruit er -gar diner -rad ha -naf ta -dot net -pi ero -geor g -bel s -ðŁĺĤ ðŁĺį -tuberculo sis -run nin -mor is -haul ing -ev oc -bre thren -sha ir -frame works -a stu -ri gid -ku ma -kre me -jin nah -insu rers -ny u -f ere -nol lywood -good vibes -- ... -toi le -sk ril -instaweather pro -cze ch -pa vel -one piece -nike plus -fi let -cav ity -ðŁı½ âĢįâĻĤï¸ı -ðŁİ £ -dra stic -dail ys -siam ese -re bu -oste o -lar k -f re -sh elling -p é -glad ys -ðŁıĢ ðŁıĢ -gusta ve -submer ged -grand stand -att u -won t -f pv -b ley -jon i -ang ames -weigh ted -al ou -ठ¶ -les bians -f j -anni es -am l -dor ia -dav in -be ta -can c -madewith unity -ha j -bad lands -mu l -blu ec -pa wn -cov ington -neuro logy -htt weets -dysle xia -thel ove -ne at -fork lift -autom ate -une ven -monte ss -he in -ha g -rel ics -competiti veness -can elo -mar tens -bullet proof -sk ittles -g ya -pri mo -americ afirst -woo o -abor tions -?? !! -ma che -ld ers -rl ly -preli ms -direc t -cour se -swa in -super cell -ec centric -sting ray -ple ts -wil cox -west in -okan agan -kir an -car bo -bomb ings -ra rest -bo h -gaw d -di gg -mo ana -enti rety -en closed -dodge ball -par ton -milky way -at r -thorough bred -re ally -qant as -epiph any -ine e -aero smith -spi eth -ar thro -ell ini -du bu -bra ving -âļ½ âļ½ -re structuring -illumin ate -equ ili -mp i -ash ton -pony tail -ma scots -flat tering -cru m -ast a -à® ° -stranger things -bar nab -ر ÙĬ -make shift -got cha -will am -cho irs -kilom etres -gho sh -eu than -dol ly -un ning -the ar -cre we -w sw -j ace -dis miss -ke an -ho ta -kh at -~ > -thir u -ren dez -hart man -tee ssi -cas ca -z ah -hydr ange -fo d -aw p -mzan si -thick er -nago ya -ne va -sti que -cast el -dam ian -there by -ji ang -ale k -music islife -ra q -calla han -gou ache -somal iland -sean hannity -ra heem -lo se -elo ve -whar ton -rectan gular -illustr ating -har ne -auti sma -scra pped -ell and -decre e -nag pur -ki pp -so re -n md -ma as -gun a -gart ner -bel li -then ight -je on -gendere quality -gi ver -a el -gar ments -ne u -mardi gras -mar sden -ro wer -pollu ted -camer aman -vin od -be asley -cro c -ji u -hollyo aks -anesthe sia -al les -ste ward -lati mes -ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸ ðŁĩºðŁĩ¸ -tic ian -gor ia -come dic -ðŁ¤Ķ ð٤ĶðŁ¤Ķ -nai ve -sli ons -ł Ī -bur glar -ðŁĺŃðŁĺŃ ðŁĺŃðŁĺŃðŁĺŃ -york shi -se ñ -fan boy -lau rel -inci dence -potom ac -rober ta -presi den -pr yor -os bourne -w ku -te me -pal ae -ðŁ¥ º -re boun -itu de -red dish -k hand -coloni alism -north carolina -ðĿ Ĵ -manne quin -lady bird -ta sty -knowledge able -g shore -ðŁĮ Į -à® © -qu aker -salz burg -med alists -chy na -bridesma id -ma ori -ro p -outra ged -in adequate -truck ers -al ana -ìĿ ¼ -ri x -oooo oooo -command ments -lam beth -aa j -eco friendly -bla z -morecam be -boun cy -rou x -rai ded -mi zed -sh c -gaw x -labor atories -ru bs -rest room -consult ations -ca jun -virgin i -so ir -rev ue -ple in -wag er -ç ¹ -we do -growing up -! ðŁĺĬ -face ted -sin ners -ho vering -ti ene -seas oning -an ja -leg go -il is -fla x -dev o -ash ram -mati sse -ker i -go wer -bo tox -mar shes -unh cr -ts m -opti mus -dun i -stu ffs -so k -order ly -n bad -islam ophobia -raviol i -fab er -cre ds -won ka -in fusion -over weight -daily news -assi mil -acol lege -medalli on -kili manjaro -sti ff -tham es -sun ken -th ard -my dubai -hilari ously -han nel -plu mber -fair view -separ ating -rasc al -qui en -necess ities -confeder ation -ll ll -: ] -weak nesses -bron co -ra ffles -el ot -ãĤ¸ ãĥ -advent calendar -ðŁİ ¹ -stra vel -tun ic -k su -im peach -e spionage -! - -di ment -cur rant -bio de -commu ting -by ron -ðŁĴĵ ðŁĴĵ -shad ed -tr uro -cray ons -ar ne -h sc -fre aked -dram ati -fle ek -u cd -marl borough -^ - -cross ings -mal o -black ops -bin ance -cho ked -chen ey -pl o -ge stures -val edic -ryan air -rem ington -v cs -mc kee -ec z -be gs -nail art -mayor of -happy fathersday -war t -pet itions -n ingly -clean energy -bro x -sl alom -exist ent -ab ay -ug liest -tom p -stom a -sel by -goal scorer -ben ji -overwhel mingly -lan s -semiconduc tor -south korea -re scheduled -sk yl -en listed -dow ski -si del -rosen berg -nas ser -white head -pri us -har are -en n -ry der -í Ĥ -mon g -clas ico -transpor ter -po tty -is me -** *** -vic e -sk it -ode ssa -l mp -her n -raci ally -pin oy -paragu ay -obitu ary -go es -bu cha -side walks -angu lar -un constitutional -transiti oning -i bu -gu ys -un packing -oooo oo -black girl -ber gs - ¯ -wordof theday -trump train -thunder bolt -m si -fasci sts -ठ¬ -t sk -collap ses -raje sh -loveis love -migr ating -set back -ðŁĺĬ âĿ¤ï¸ı -t els -safety first -nar rated -jae joong -un answered -lique ur -en nes -dal go -bill ings -salt water -mer maids -lon gs -clap ham -we arec -pic collage -n ach -h ace -pois oned -lo th -ag na -adel rey -guar dia -poli shing -peace keeping -d all -p isa -la pland -process ors -de andre -so bs -p once -dra ins -c be -ðŁİ¥ : -spla sh -meat ball -fon tana -worcester shirehour -ne v -bri sk -b int -ac r -po x -cay enne -skril lex -j fc -hahahaha hahaha -gla s -en gul -tempor al -oni zed -con cre -com pose -vibr ations -plant ers -fer t -criticalrole fanart -t bli -sch allenge -huck abee -munici pal -iam bic -radi os -ne vis -dura bility -mc cla -horse back -inst itutes -ful fill -atta ch -ate ur -ak an -resi sting -illumin ation -hand le -hair care -om ent -macle od -ka iser -g no -bear down -ly f -gl omer -distor tion -z m -san k -roo sters -is now -as ports -ag en -wo ken -st george -ro mper -my le -econom ists -ru to -t will -health and -d ito -ws l -tair p -pra kash -mic heal -h ts -w rights -kat su -fioren tina -defen seman -d itch -var sity -texan scheer -ba ham -sc anned -we il -seduc tive -ðŁijį ðŁı½ -fu e -er win -dav ison -ter ran -moo ds -wool f -re source -@ . -cu sh -ðŁį ° -regre ssion -cur led -la zer -jo anne -ab bott -mo z -down ers -mm mmmm -valent ina -k hair -dream t -cro ok -che k -ste aming -nephe ws -cl eric -as ober -indefin itely -w ye -us news -joy ce -flu shing -wynonna earp -ron do -kis s -hot dog -bar ns -sax ophon -far ley -gas p -decre asing -al way -pe x -l sd -shi ft -p outine -ra zz -rescu ing -ni ko -ho ch -cc l -u aap -n ts -m car -il wx -conqu ering -ket tering -stur dy -delay ing -sto k -vani shed -cath ar -bin gham -in v -ic hiro -he mo -budge ting -[... ] -be ss -sebasti an -slow ed -ðĿ ij -musli m -stun s -acton climate -ve a -se ton -rose tta -oun t -hard in -flu id -ca w -ðŁ¥ Ĥ -yach t -un l -sp hy -provoc ative -or ic -is back -__ _ -nicol as -gy an -loo se -fl in -reb ate -: :: -! "@ -com icon -she ff -down stream -chic hester -beach life -mom life -diabe te -ar ra -van e -ok u -ye o -man go -try out -app ell -he irs -arjun a -dd u -na veen -movi c -soci alists -s back -criteri on -soyu z -k her -da z -yol anda -wine oclock -re ina -one w -leon ard -en dez -u bs -support local -facilit ated -carameli zed -b pa -vuel ta -my tho -m ami -spe are -nbap layoffs -fe vre -nick jonas -im print -c so -craig slist -la salle -gi deon -ha doop -dis regard -w ud -tu c -ma gee -acou stics -ta a -qui e -pol a -cr t -dw yer -dis sec -capit ol -men tion -kn oll -he igh -fin ders -plac ements -l se -indi ra -gur i -madhuri dixit -kingdom s -iambic pent -geor gina -je ky -conflic ting -bay an -aga tha -uph old -dr on -vic ar -ex pat -periph eral -pe ssi -fa f -ance stor -? .. -wid get -pun c -comm enced -beav s -air waves -ad dis -po a -de sses -co den -vu e -ru pee -kar in -spo ck -m sy -ภ° -pr ick -fill more -ti fication -thing sto -sar de -em ile -pere ira -n ad -bright ening -arre sting -wo king -usc g -sp ill -raspberry pi -hu go -ite c -is ma -cuff links -optimi zed -oc c -mi wx -en ka -el ited -afford able -sa kh -coron ado -ho h -at ul -ai oli -jim cantore -accoun ted -vin ay -her mit -groo ves -ran ch -r illa -we tter -ou tof -veter in -ni kov -ki an -fair banks -ram apho -n iti -k ko -ru sty -ne stle -tv xq -shahe er -âĿ¤âĿ¤ âĿ¤âĿ¤ -penn ant -gem stones -dem debate -ðŁIJ Ĭ -auton ews -support indiefilm -mach o -ve x -new sat -ne ti -conce ssions -can died -yof the -mac au -den ds -cricke ters -san iti -mari ano -gh at -ar toftheday -¡ ľ -e gos -gen oa -chat bots -bri er -al labout -mon ty -spi ed -r tr -comfor t -sni ppets -real time -gra in -exam ined -en lightening -tt u -god bless -release the -sing ular -ki ans -ha ka -sor ren -defe ct -mar g -equ ities -d orian -su ka -per l -aishwar ya -pul lover -preci sion -fair way -ne ve -rive ting -vill anova -en com -ak o -passion ately -europale ague -siem pre -x vi -enligh tened -c fr -âĺħâĺħ âĺħâĺħ -wast eland -is f -new comers -emergen cy -amphi theatre -- . -text books -figur ative -tre mb -pe sc -ab hin -ab bot -ac acia -har ds -por sche -kau ai -el isa -car rick -abo u -elli er -be ch -neu tron -galap agos -ru ben -in nis -how to -nun s -sab ine -i ac -clin ched -no tori -fi ves -cairn gor -per i -gr c -ðŁĴ¯ ðŁĴ¯ -mal m -twelf th -di ff -rout ines -marty n -lin den -synthesi zer -nu mber -game cube -fal kirk -byz antine -queu ing -gr ill -scal able -char red -rou ting -her bali -gri zz -ðŁĺŃðŁĺŃ ðŁĺŃ -tol l -termin als -l pc -ab d -war mups -remo vable -¯ \ -vi go -pap aya -ne ve -lov ingly -jo kers -ib les -sse tt -poten ti -pel e -gi gi -sadi q -leg acy -son o -ru pees -retar ded -ele e -par r -fi ance -ey re -say ers -pend ants -mak nae -al bans -adap ting -p ff -pu berty -ji u -ing rad -hypocr ite -diplom ats -phys ical -rob by -bon sai -ãģ · -f att -catal unya -âľ ĸï¸ı -ro ma -more land -so e -conver sions -stl blues -shol m -gra ssy -pra do -on u -assaul ting -> _ -sett es -dis graceful -aph ra -âļ½ï¸ı âļ½ï¸ı -ठª -kil n -goal tender -s ru -philanthro pist -b als -th n -stu den -sando val -dogre scue -eli ons -asse ssed -lar go -hec tares -sh rm -sa if -cle avage -no ches -n ene -fat alities -cur ing -clean ser -al es -p vp -south bank -pizz eria -marsh als -kni fe -an dover -tbli ghtning -sr sly -ou te -digi mon -timesof india -prome the -le bo -f su -wit z -rever e -man as -mam ba -ch ica -gu an -exhibit or -csr racing -d ere -xx xxx -gu sta -story time -ston ey -organ ics -and u -se am -min ogue -anushka sharma -ab a -ðŁİĻ ï¸ı -ugand an -chro matic -as sn -document aries -sh t -ru paul -loy d -k ats -e us -ite ch -me dusa -pan ty -kel logg -et to -talla de -sha a -do st -p ms -mari ana -je ster -croo ks -ðŁĶ ¬ -min danao -ind hoven -ðŁ¤ ª -le xi -tv n -jan is -co te -ãģ Ĩ -ser rano -iw m -ðŁIJ ¬ -k ke -distribu tors -cap u -counterfe it -camp site -ag gie -ðŁĺ ¼ -chhat tisgarh -~ @ -state u -san di -prevent able -cl s -can ne -mm c -i ver -sa haran -pal is -night out -do s -ap ia -absc bn -manag erial -aro se -mo wx -aro sa -ðŁĮ ³ -under dog -remo ver -astronom ers -lent ils -su scep -smoo ther -pend leton -fau cet -e mory -dal mati -af cb -tic us -exem pt -en rol -d heim -ðŁIJ º -restric tion -star fish -sto w -snor kel -thunder birds -she ad -homo sexual -dy n -as li -andre tti -dou che -dom o -tar mac -slu mber -pr onto -first dayof -mini ature -mari achi -argu s -recomm ending -mobi les -in ce -illustri ous -or c -adver ts -gr its -wea sel -pag oda -over pass -gre ys -maxi mus -arma gh -wood land -sun ni -ðŁĴ ī -ë Ŀ -ti one -soci o -ho s -ðŁ¤Ĺ ðŁ¤Ĺ -wind sor -subsequ ent -munch ies -id h -exclu ding -e mi -cu th -z ai -week days -law suits -barn ard -Ø ª -pe tting -net es -mul ligan -pharmac ists -ra quel -e ton -cran ston -gil ded -cle ary -ce ph -ra a -pam per -lombar di -as in -sher ry -pro d -for te -ari anism -buffalob ills -æľ ¬ -ðŁĶ¥ # -uu u -just ices -car ina -nat in -mas low -dro oling -cog nac -cam ber -el ong -r dr -in en -convic tions -am use -tro ck -harm less -visit ation -gen omic -bl and -beno it -chim p -tuscal oosa -gre asy -x po -gil t -se q -per mitted -christma seve -book s -mu e -old school -human right -be ati -ðŁĶ Ŀ -sh at -sculp ting -h wan -fern andes -sci utto -fu entes -endeav ors -maid stone -un paralleled -shou ted -queen of -mer c -band ic -ve da -sel angor -pi le -ja han -intimid ating -disapp ears -cl ich -za ha -w urst -hi v -fod ils -cor dless -aaaa aa -hy dra -bel inda -e els -bu f -su staining -rugby league -no c -brig itte -( ðŁĵ¸: -tromb one -soo the -smo g -ad p -stab le -ing ley -diagno se -ms g -we ss -tic keting -one e -nsw pol -e up -auto psy -adity anath -sun down -river front -si ya -p is -hier archy -dur ango -di jk -ren shaw -he aps -epide mi -david bowie -interne tof -dd i -nation ality -mb ar -air y -win der -w alia -elli ott -c x -bav arian -pl att -an tw -wi wx -sof ter -ne ha -h eller -th and -dani ela -bo ast -degra dation -ðŁĴ¦ ðŁĴ¦ -transform ing -man e -av ut -ðŁĺĪ ðŁĺĪ -vo ter -the e -t ate -pu ff -in door -sop roud -boy ce -boris johnson -wait in -immun ology -ðŁıĨðŁıĨ ðŁıĨ -âĿ Į -street food -liz asober -cavali er -c elia -need le -motor ing -g ato -, ) -ra de -harve st -t ms -jar pad -on ey -air men -v re -impair ment -abhi shek -snoo p -l ant -fam ously -bl ou -s ze -g ander -un touch -tu f -dee jay -col lateral -b ind -ðŁļ © -pin ning -ic n -' ; -the economist -ul tram -worldwater day -ti poff -the i -feed ers -campa ign -sc umb -day weekend -yo m -pe dic -h ough -ps v -pl in -on de -boston marathon -az zy -* _* -con ley -thi ago -hoo o -gal erie -luci d -je tt -gl itz -final fantasy -achiev ers -y ung -peregr ine -op hi -dam es -biom ar -âĺĢï¸ı âĺĢï¸ı -sk c -l ics -fl ank -ar rahman -ho of -uphol stery -t ats -wo z - ¿ -snor ing -ra er -l ju -ap d -pl ating -kan u -im ation -fragr ances -m ra -mor ay -mo tt -im muni -hearti es -bho pal -tim ers -g ata -color way -car nation -win get -si ghs -s ville -optimi st -chate au -olympi ans -ci o -singer songwriter -ny o -fi bers -bur ch -ag ro -mil ne -ig bo -cr amer -ation als -dan ube -pad ma -nor mani -en forced -bre ck -boeh ner -ar den -sur rendered -pros thetic -om a -ha iled -calcul ations -w fa -bi b -fcb live -fon da -west coast -que sts -friend ly -to wie -fit ch -bal ot -star dom -scrat ching -ho sa -thi ka -o ven -stro ke -out post -pharmaceu ticals -hi kari -mu y -af d -fallon tonight -squ at -or u -dra ined -chocol at -ë¯ ¼ -wor ths -ri b -mu j -that s -residen te -it el -boo st -mi gos -mul led -la a -etsy shop -don keys -me k -p tc -flin ders -e hs -ro hit -mu ir -g ad -compos itions -åĨ Ļ -combu stion -i kh -yemen i -wav ed -gar ci -ak os -oo ds -fu sion -se que -s lan -pl ur -kic chasu -shenan do -s ams -worl den -horo witz -with me -mic robes -k ki -ðŁĴĶ ðŁĴĶ -w su -patch work -fre er -y aki -the art -symboli sm -mil er -bt n -ma bu -side kick -motiv ates -sag itt -natur als -serv iced -ps ori -pa ola -qu ig -i badan -gi ggs -ë ³ -sciento logy -si oux -salam at -d res -cad bury -d hawan -ci ón -_ ' -swa pping -maris ka -james bond -explo sives -ay les -af er -s agu -cen sor -tom a -jeff erson -ring ed -par tist -ir responsible -aguil ar -vac ay -equ itable -altrin cham -ac ur -man ish -ger min -schoo led -pu tter -ed ad -nav al -toast y -sol areclipse -dish u -coy ne -ac co -mu ck -mar an -el os -len der -cro ix -worth less -ha ber -gun men -ðŁį ĵ -zen ith -t enders -hur st -hol tz -itali ans -car low -u cd -characteri stic -bun g -av l -u th -sa sia -rs l -red man -neighbor ing -green peace -sti ps -follow party -y gk -en os -omni bus -na issance -chri ssy -secu re -call back -ji hoon -memor y -block er -l anta -daf fodils -bil t -ffer ty -fau st -ie c -nipp les -so g -m nd -jagu ar -bol dly -ab poli -pro position -gun sense -evan sville -cu tters -we go -dou n -do x -stal lions -ka j -shi ppers -j awa -vol o -le ven -pap rika -kov ich -jor di -induc tees -app alling -dial ysis -allevi ate -âĢĶ âĢĶ -pie ter -mid wi -q tr -juli ette -inter mission -haw ks -act ment -one ill -k lin -vam ps -fam ous -cou ld -autom obi -da an -west end -elli p -nh c -mel anch -web series -ton gue -snat ched -smy th -tan gible -sl i -e asing -bar stool -over lay -afford ability -ting ed -ter as -ay ush -wanna one -rh ine -dan a -sh ana -kend al -fer tile -w ir -repl eni -lar vae -is ro -con vos -ab brevi -u cc -hun gry -bur rows -ag er -nav i -mat in -du per -cer n -ma don -ķ ï¸ı -é ģ -tu ps -hy att -sh ep -friday night -wis er -hei di -hat ton -p gh -foun tain -wrist bands -ahmadi yya -aeri al -subscri bed -so los -m ace -sla yed -for fe -dul ce -christ mass -arun jaitley -viol ate -ob stru -ni eces -w vu -idy l -fa ze -pre serves -infr inge -premi ers -inter vals -agen cy -( © -stand alone -di mes -bo er -param eters -ge tit -ðŁĺĺðŁĺĺ ðŁĺĺðŁĺĺ -tu lane -for given -scol l -mb ps -smash bros -rob bi -prima vera -ali st -ghost ly -ay at -ye ats -impre ssionist -ear phones -caul field -wai kiki -sal ute -sc ou -mu ay -louis vuitton -bak hta -ado g -inven tions -hur d -forec lo -stream line -thalai var -ch snews -will ard -t sn -euro parl -cru sher -my sore -gro wer -ra ping -pat ti -g den -sm w -muf ti -kid man -ab r -soun ders -skep tical -ðŁĶ İ -sun dar -i me -fer g -feather weight -ar lington -pas qu -ag azine -wearab le -nati c -mccl ure -inter mitt -hor de -six ties -car te -bha v -ze al -experi ential -ador ned -som mer -eno te -hypo thesis -stin ky -pro to -dead lines -vo gel -mus ings -monc ton -gu ter -f le -aci on -voice of -ta sha -inhabit ants -type face -s ba -bts x -ðŁĶ Ĵ -wor x -u hc -jo ko -cell ars -gor o -continu um -... & -weather cee -ha p -sr k -ris ers -lonely planet -un named -co eur -ðŁį Į -the world -ili ke -fa sten -ami go -ri ba -ramapho sa -staf fers -had ley -? ?" -fi ore -sal ut -hu ff -bez os -Ñ ĭ -ra der -kam ala -in line -fill ers -um atic -all in -shat ter -re in -o ku -ch ases -fla gged -baby metal -water stones -ts b -cut out -op hel -aam a -rockab illy -sto lic -jet blue -ich ick -down ton -uzbe kistan -pat na -la q -gr ange -) _/ -subsi di -sc p -newsc ast -it sa -twee tyour -e mor -archae ologists -uni fication -por ta -q x -protec tors -pro hib -charis ma -car tag -ren fre -scul pt -guwa hati -de ma -boo p -unf pa -dex ter -lay la -alleg es -sou ps -never again -l ys -cal c -bar oness -visu alize -ger ber -absor bed -i ers -a han -fon tein -detec tors -verst appen -sv c -formul ated -ac dc -li x -in competent -bh k -lour des -water house -snow ed -appreci ative -sig ma -lizasober ano -pen ned -pay check -tall inn -fanc afe -par isi -av alley -vi g -ru fc -hard ship -so cute -po ise -ì ¹ -roth schild -k ly -???? ???? -l hp -il ay -f hs -am ad -ide als -brad bury -bal boa -nic ot -kid nap -wol ve -tas manian -op t -matthi as -ãĥ³ ãĤ -super markets -mylittle pony -me lee -li ster -gr oun -fe dora -kind ness -en en -bra hms -¯\ _( -ros well -mar lene -ic u -re formation -or ail -he brides -dispar ities -terrac otta -swal lows -re id -influ encing -flu or -den e -tum our -blon des -thunder bird -sh eva -moga dishu -ka b -cre eps -i ving -ene ed -anno y -âĶ Ģ -intri gue -enqu iry -ar aj -tur al -kuber netes -end lessly -divi dends -tor a -ti sh -commemor ates -un ra -tri b -pon ty -ne m -diss ent -brew ingco -ðŁĺ ½ -nor mali -bi of -( ... -chil len -ì£ ¼ -mell on -av is -mccor mack -ing ra -enrich ed -custome rexperience -testo sterone -snu g -sett i -ger onimo -inqui rer -bre aches -very thing -bloom ing -mu ra -dispo s -bi de -de va -shade sof -in trin -sh ev -s ven -nayanth ara -gan esha -c ws -ber ta -label led -use um -nick named -ma han -car uso -ap ur -ðŁij Ĩ -w q -orphan age -discar ded -mag nu -lu e -je on -bridge port -pac ing -mercur y -( ðŁĵ¸ -marx ist -amphi bious -transplant ation -stit ching -then burg -gradu al -ãĤ Į -ro ft -ma ils -ine c -guy ana -dopp elg -ver o -re write -head less -harb augh -gate way -car sforsale -sw i -st is -mach t -un de -sura baya -stap leton -nur turing -mil ner -ya o -lma oooo -ko sh -arsen al -k ame -er ry -ar royo -dis misses -ru bbed -rc b -lew d -dil u -and or -vi de -ur in -inter sec -ha ar -al b -year swith -app leton -é al -ul livan -suc cu -monter rey -d mx -artem is -ron nie -farm land -s football -gro tto -anth i -ãĢ ģ -à® Ł -vid ya -jimmy fallon -ൠį -t zer -gravit ational -w thr -u hhh -e hr -tin ker -ti juana -scran ton -ram charan -bar clay -re van -m si -ka p -wr s -we thenorth -tor al -sat u -gro m -fac ep -erick son -z yn -se dge -oo dle -spur sofficial -ds p -sic ilian -soli hull -recei vers -ladak h -hend rick -ther i -presi ding -mc guinness -litt ers -gun nar -gh oul -wi b -n tv -kar o -fro ck -b lau -ampli fy -all is -ul lah -memo irs -kh loe -intercep tions -pet day -lo oney -con fin -ch ay -piyush goyal -frequ encies -ut z -event ual -warm ly -obli vion -an ka -ta it -âĿ¤ï¸ı . -director ial -ru lers -prince s -mu ck -stur ridge -deu ce -abri dged -bagu ette -un cles -pen du -min ding -forre ster -av ila -wall er -wall street -ment or -hin o -high way -crom well -fanart friday -mb i -co yle -a hi -tro ve -spie gel -pay tm -mcin tosh -jan sen -nit i -nash ville -len o -leicester shire -le gos -dic t -ðŁĵ ½ -sp ad -beverly hills -sy rah -separ ates -z ain -un fit -dra gs -tan ia -over flowing -hri thik -haw thorn -z ani -mac far -fi de -to tem -pe ds -fundament ally -cal ico -sin ner -j ä -hil de -ds d -ten ay -ta hit -mil f -lie b -inform ing -up lift -ra el -mortg ages -lec t -ii ii -guillau me -compos ites -old smobile -l end -gar th -com mish -bapti zed -scorpi ons -ru cker -bringback our -alli ance -thalap athy -tal i -sp ans -eri dge -wither spoon -lin da -sky lar -kor n -hom s -Ä į -sil enced -caf fe -ar ty -dist inguish -to wed -pun g -jessic a -ear nest -beau fort -t ama -study abroad -si khs -new bie -nav ratri -mar ble -loun ging -lit ter -dal it -so sa -iz es -gra de -com promising -tr iton -de tta -v j -chau ffe -spec tral -powe red -montess ori -artic ulate -hal ton -al co -ye y -mn twins -acoun ty -ðŁijı ðŁı¾ -âī Ī -mad men -kal a -gru m -chi k -ati s -su me -akh tar -job search -high lighter -bo ath -âĦ ¹ -tar zan -lam bo -âĽĦ ï¸ı -ox fam -dump ster -pretz els -mac os -incl ined -fac tual -adverti sers -shu i -pu ree -ml pfi -anti dote -cap o -pa str -merc ado -but ton -ar min -ag g -lol la -horri bly -er rands -christop he -time snow -monday motiv -li ss -scand als -mc i -dispropor tion -âĺ İ -sur pass -samar itan -so tho -pu rest -fl att -trivi atuesday -delec table -leop old -hermi one -chou dhary -en rich -¡ ¡ -subsi diary -ine qualities -bachel or -auto immune -la kota -i hop -ad jec -the simpsons -sh es -se k -gret chen -up stream -hin akhan -coper nic -x tina -lu g -tough ness -e ad -cli pped -bi us -sl v -fah ren -dee pak -ca u -x an -im mature -dig ni -bo bs -shred ding -but tery -accommod ations -de ven -chun ks -super league -sky bet -kil dare -je et -ë į -ce k -wrec ks -pro pane -oh l -tb d -quo i -trum pp -mi mo -reluct ant -ver ne -o ic -ma gh -ar nau -se ver -li dge -stair way -kicchasu deep -ðŁĶ º -mach ining -aama admi -ot i -c da -al it -pan y -inst alls -ac ct -e shop -di em -hard well -fulfill ment -sc afe -qu ack -extrac ts -swee tened -fi ghton -f di -d inger -wal tham -us ur -refe rees -seok jin -gran n -af rin -th n -sch af -par cels -bet is -amar ine -nom an -kh tar -mor itz -cou pling -bar ons -ðŁIJ ¸ -à ¸ -sl p -sad ler -x ander -tri ad -mc millan -kh z -divi ding -ìĹijìĨ Į -dar yl -zed d -le ys -pla ques -flu ori -tipper ary -on nell -di dier -lang ford -im c -the sun -bir dies -ar cha -ye ssss -t di -dar ia -cand ace -al tam -pal aces -ch it -sant am -event ful -book of -ad b -mon stax -cre ole -co el -âĸ ½ -we aren -sten nis -she ath -ati sm -gron ingen -mlpfi m -le pre -wrong ly -rsp ca -rendez vous -acknowle dging -pel vic -solic itor -sla ys -nue stra -lo d -is lander -fer oci -fashion show -ra ss -dge on -adole scents -sma shes -negli gence -grate ful -ved ere -sw oop -ing l -apol ice -vand alism -gan n -jo ao -di supdates -zimbab we -under age -radi ance -w of -bour geo -pla s -cr ani -gh ue -wrec kem -warran ts -re form -jim mie -at wood -ys l -neil himself -l bj -i man -tan to -nois se -ver bs -equip o -al together -mam ent -l ice -dou glass -tier ney -pri med -j hal -furn itu -braz ili -v ill -past els -n ison -u ff -paral ysis -jay e -im po -ðŁij ģ -strate gically -pakistan is -was sup -super bike -thank u -tru elove -sha ikh -israel is -vi p -to g -li en -la ker -grey hounds -cul ars -bian chi -balot elli -ar ran -loo s -str ates -he bron -ar vo -sunder land -the al -tomb stone -sand man -c pac -thanks giving -love him -lat ino -an in -aka if -ĭ ãĤ -tor quay -di est -alli anz -ðŁĺ ķ -golf club -cl lr -wal cott -sch nau -promp ted -nomin ating -len nox -val et -mon ro -may ward -e ph -ðŁĶ Ķ -inter oper -r da -re flex -arm chair -ê° ķ -stri pper -por ti -ph arm -ham za -ni reland -ne ue -h pv -port foli -sun burn -fris bee -be al -bapti ste -x h -ty m -pr ati -o vers -haz rat -deser t -der ry -us ky -em mett -ach arya -)_/ ¯ -shu d -may a -ham ill -ra im -nr c -fitt ings -cur vy -ðŁı ĩ -ster ling -à¥ Ģ -wal kin -short cuts -mil ly -ast ur -alpha be -pl i -pe z -miss you -rad ford -ml g -ta eyang -notjust lakes -du mps -seren dip -le ur -ra ving -e ster -de priv -absc bn -ðŁijĩ ðŁı» -scar city -o cr -mean ings -cap t -da hl -fer mentation -bri oche -to win -out lander -massi mo -en cro -ðŁ¥ ³ -buil t -po tam -kir i -tm w -monit ored -k ites -peoples vote -gray son -íģ ¬ -afri ka -a dies -i vote -gy ne -g annon -di x -c mc -ou ral -fox andfriends -bel i -ig ne -gl an -katrin akaif -co politics -qual itative -p si -lu cci -disc oura -âĺ ® -kel li -gau tam -carac as -reale st -pu la -in us -hill top -make aw -atten borough -tw y -r arity -peck ham -ma hon -corn elius -clin icians -ton line -tb i -paradi se -ka si -inev it -fresh ness -colling wood -lun atic -defen se -cop d -in fra -wain wright -sains bury -alab am -te ma -lac o -chec ker -releg ated -tren t -stal ks -huff post -bhubanes war -ast ral -share your -prim rose -hi me -cat an -end ment -en dow -cle mens -mal oney -hil ary -game time -den ise -collabor ators -b wo -radic als -gue tta -ici on -au a -snap matic -sat chel -excav ation -base man -s ão -gn ation -fel d -surve y -shah zad -ma st -anirud hofficial -tru cker -ot ago -geo graph -ethe l -âļ¡ï¸ı âļ¡ï¸ı -s ver -mu tt -internetof things -ancho red -wh ouse -bang la -bal main -ç¹ ĭãģ -break fa -á Ģ -twi ster -te tris -ca v -stag s -g z -au b -stor med -hel ens -yar mouth -st asy -gustav o -co sc -vin son -up p -sc ricket -assump tions -app e -nu h -u er -pre mise -n aga -e amon -coron ary -na f -north side -el mer -ro tar -out lining -el f -re surg -kat elyn -in can -hyster ia -ce e -am bani -pro lly -Į ãĤĬãģ -ax es -san jose -rem brandt -mag pie -even ly -scor sese -qu aint -f g -b buk -indian football -weare all -spd wy -pis ces -ec g -âĺħâĺħâĺħâĺħ âĺħ -pre orders -: | -ni pple -sal azar -ju me -jail break -min n -bas sett -ze tta -jef free -ad jun -tic on -san diego -drink local -chol era -solic itors -o bo -com post -ni an -wr a -tre ach -ic ic -profession al -del ve -leg ate -histor ia -cro issant -con noisse -nam o -palli ative -chem trails -i ority -global warming -comic art -behavi oural -re sted -li as -cli mates -Ł ãģĦ -rut land -nou rish -menopau se -hot ties -demen ti -ve spa -mel ville -anal ogue -tz man -str ung -im perfect -gl are -cir cling -ros berg -rec o -oc ity -lo ire -em be -do ssier -ne el -nan do -me a -gal vani -fin esse -ag p -berke ley -asi m -âĺº âĺº -quil ted -ish ere -un matched -po tion -for z -at re -selfi es -juli ana -ðŁļ ¶ -âĸ º -mel ton -âłĢâłĢâłĢâłĢ âłĢâłĢâłĢâłĢ -spin rilla -pur cell -ed p -at leti -tony awards -ra ja -pro gno -mol ten -stu ff -p ally -nobel prize -âĻ» ï¸ı -spiritu al -spe ake -sa sha -bri um -tru ss -critici ze -assassinscre ed -yor uba -u lo -fire man -workin progress -ef cc -fla res -ro bot -hi kers -cl l -shado wing -pat sy -leh man -c ns -å ± -guad al -à± į -ra pe -r honda -paralle ls -son ja -langu age -land ings -z ola -cr amps -bur ning -apprais al -jol la -ham m -kas a -gul ly -f go -uly sses -ri be -ðŁĴ Ħ -ib u -eti enne -bri ar -fin ely -comb ating -y ql -go tham -we chat -to paz -primar ies -l se -iz z -hel e -dispon ible -cy stic -bel ichick -th rush -kansas city -ge om -soli di -red bubble -by stand -cambridge shire -par fait -ast le -ow o -ind ore -stom ping -sm elly -ðŁ¤ ĸ -locom o -adm itting -hol me -clock wise -min sk -mc co -for get -ev p -cam ra -ab ella -yo tes -universit yof -mé xico -silver ado -ric ket -crom bie -pu j -eradic ate -deli ght -y go -glam ping -vic a -du ggan -coun ters -cf d -sc our -react js -pu ram -paras ites -in ki -vill en -stel la -li mbo -ang as -k cr -ðŁĴļðŁĴļ ðŁĴļ -vap ori -mum ford -oli gar -à ¼ -al oo -boo ties -ad r -k elli -dru mmers -av ici -nature uk -ron al -in trac -un splash -le che -g oma -el ine -envir o -bi onic -bu eno -mi k -av in -star ling -em powers -cake day -boy cot -ðŁĴļ ðŁĴļ -ðŁĮ¸ ðŁĮ¸ -v ach -m ci -fractu res -ger i -sk ing -exclu ded -lu ce -ja ve -ig gy -evi den -aki stan -a wn -mor als -luci fer -ha ban -tumb ling -sunday motivation -mo sley -captain america -sch icago -the one -mo td -d ts -ðŁIJ ¼ -rep ell -ii i -locu st -geo spatial -mer sey -immer se -desc end -ber nade -j s -boat sales -win der -cran k -sing leton -candid acy -ben a -ðŁı» âĢį -high lander -ol t -k prs -healthy lifestyle -four teen -end the -ith aca -circul ated -r ans -pre valent -ha vas -splend or -roo ster -kalamaz oo -jewell ers -enne dy -rou sey -es y -cann ons -ornam ental -// // -ren don -win ne -mol ding -eid mubarak -coun tess -simon a -ha wa -fo es -du ster -sb u -por tray -mar ries -goo dday -cho co -achi ever -ðŁĺ¹ ðŁĺ¹ -pre neur -tr amp -tom i -n bat -garden chat -farra khan -ever glades -ab ru -sou sa -se ce -homes wee -terre strial -bar it -sri devi -ol u -mel inda -f rick -can dies -ðŁĺŃ ðŁĴķ -qu reshi -family fun -exor cist -cardin al -ny t -dies el -cu mulus -capric orn -si ology -lor na -dou gie -an die -super sport -c fl -п ÑĢи -say ang -pe ek -ภĬ -lo be -j em -ing lis -gg led -c sn -amne sty -chu ps -ba es -sau er -ðŁı IJ -mongo lian -en et -back street -dr illed -acce ssing -ce o -b se -ai ken -pur r -wor sen -whe res -war k -testi fying -bu ri -bla st -aw g -ðŁĵ ĭ -re defining -hear ing -u ci -c mp -bon i -tail oring -ta ji -noc chi -em t -stephen king -ne et -compla ins -campaig ner -luci ano -twili ght -ti esto -pas sports -flo yd -cathe dr -na ked -caregi ver -b coz -ade cides -ku ri -ly k -br aries -dren ched -disc lose -ðŁĴª ðŁı½ -le blanc -je tty -gar ty -chip mun -b su -rhyth mic -ic z -fri d -anne x -ame x -solo ist -lanc ers -arro whead -speci fication -simul ated -na is -inver te -bo wing -wor ship -f z -abo ss -sha q -ì¶ ķ -challeng ers -an arch -aamaadmi party -ãħĭãħĭ ãħĭ -suffol k -so corro -sn ell -cla dding -absor bing -shaw a -particip ates -ðŁį Ķ -book stores -bak u -seap ort -ko jima -gab y -pack ard -electr ician -let it -mo wing -fa wad -young jae -hot mail -men ing -u rie -intim acy -con ti -: ") -lifeis good -in ciner -i dri -craz iness -jour nos -fran chi -bott len -al da -ff es -k x -south we -air a -clay ton -sco ti -f j -bri ga -ð٤ĺ ðŁı» -demonstr ators -y z -stor k -na q -casc ades -travel chat -plat a -pad ma -fran ci -at tain -bat girl -lom bard -hoo s -d dos -neon atal -discla imer -r ss -r ant -di sen -tex aste -so cal -frac tal -cam ry -stri fe -sn acking -mu h -sant ander -mor ons -gra f -par ades -hu ston -dru pal -mi ento -kir stel -hy de -vom it -forti fied -sphin x -da v -bir yani -win nings -s baseball -mer ged -lovel ondon -ling ering -dream big -car leton -liveli hood -djan go -astri d -gri ds -down e -bru ised -s ne -scarec row -hel ium -f nc -bi ggs -an ter -restor ative -em pires -ab del -life style -kiwan is -colloqui um -me en -pr ick -anti que -ze b -mi mic -edmon ds -ðŁijĬ ðŁijĬ -q ing -pp el -mc gill -interpre ting -âŀ ķ -rash ad -do ka -narr ator -electro magnetic -ash by -sau ra -iran deal -âģ īï¸ı -krish nan -in di -ff en -bre a -os man -multin ational -chi ppe -recruit ers -aus biz -p ounding -re gen -cur sor -refu sal -mac s -in ak -ax ial -wa ifu -up cycled -hindu stan -cas sini -carly le -scrat ches -re ef -man atee -eat ery -ðŁĵ ¢ -un condition -sen pai -on ther -comic book -pro sciutto -de mar -mi se -ma ge -fre ec -aye sha -al der -android games -ley ton -ho ck -door way -chicagof ire -aali yah -sw elling -bi x -. ðŁĺĤ -evan kirstel -torpe do -kon stant -genevie ve -ma ia -ha user -do torg -hide ous -fi k -sp raw -e ek -z appa -wan dered -' ' -ra jan -bam bi -( $) -wid ening -tool box -sa ir -illumin ating -pra ys -out patient -i w -day o -lo b -sw fl -sha des -gu ms -coo kin -ko di -gri ffin -traum ati -ste a -slaugh tered -god bless -air time -pseu do -b sa -hau led -ar if -à¸Ńภĩ -le l -wc po -mil iti -char ters -worl da -ru k -k gs -digital india -is able -idyl lic -esp ino -marie tta -e bo -team canada -ab our -wil ton -rock stars -fav ored -phys ic -wrink le -tb r -d print -ball arat -ad al -z ey -ðŁĺį ðŁĶ¥ -tom lin -mt r -pal sy -fener bah -tight en -phil ia -ir oning -ry u -b ant -enqu ire -ca ir -abur ger -tru n -green berg -chau han -ir ina -sh ani -trend setter -pre tt -zaf ar -alo ve -v ici -pan ic -no o -lu stre -disrup ted -bal lis -son sof -mon si -inst ac -ake st -ëĭ ¤ -kw ame -horror movies -distric t -sau cy -mb an -ar mies -with drawn -med ics -loft us -er oom -be kind -ar ns -all on -un ison -davi ds -cr at -nicot ine -so or -sm x -on co -cospla ying -zombi es -har ms -e ger -ro sy -moon shine -fe in -ce tt -du brov -reg ents -ben itez -ðŁijıðŁı¼ ðŁijıðŁı¼ -ste c -m alia -prioriti ze -ic eland -ft se -v amo -lam ont -homo sexuality -bre es -regu i -cb p -te j -sky sports -deter gent -sha sta -de rel -conserv ancy -colori zed -accol ades -vis o -show your -nan ow -bice ps -us ability -bi m -dailys ketch -pearl jam -stran gest -mega deth -broad casts -bar ren -ar ton -chri ss -confi gu -lu res -is the -e ul -railway ana -global health -gi anni -u aap -s lum -consci ously -ab re -n up -bud get -v ada -e sch -real ness -er ased -th unt -be z -armist ice -ðŁij ¹ -sh run -o led -driver less -ðŁ¤· ðŁı»âĢįâĻĢï¸ı -won dr -sk an -sal aam -mother land -h wang -gen o -gang nam -tw right -endor sing -en ic -ador ation -pau sed -patric ks -do cked -plat te -ff xv -ethnic ity -auto show -side show -after life -re located -orphan ed -food network -dare to -and ra -sla ps -v live -swim s -re imagined -mist le -re vise -real ity -bhar ti -ðŁĴĻ ðŁĴĽ -late st -prou dest -gra sses -lan yard -fresh est -carcin oma -anom aly -zieg ler -sum ner -ly rix -gor g -is d -av el -swild life -me squ -john cena -euro league -sab er -master ful -yar ra -cogn ition -jacob son -abo lic -sir loin -shuk la -moj ito -su pere -st weet -me z -e sa -rudol f -gur a -where you -tt m -win s -trust worthy -ny k -bra den -table top -good food -es on -be k -lingui stic -gra ys -ch ath -h cs -mon i -de ans -cu ssions -ch ell -slo ws -he mi -d app -shar pie -boo sters -a os -str ack -se dona -mu eller -hard wick -or nate -thor a -sal ud -o twol -ch um -mi ho -for age -thel ittle -tear ful -ones elf -min dy -sm g -gmb h -emer ald -ðŁĶ´ âļªï¸ı -tu tti -recep tions -re vising -i brox -tope ka -sal ami -expan se -i books -dob son -cli o -at s -ðŁļ Į -mo ha -is ance -shu tters -moo t -jan ine -marvel comics -jor dani -pos er -kenne th -hy ung -de ja -ase ball -speci ality -eu ston -classic car -had ith -ðŁIJ ī -chas ing -iz o -gros ven -ag lia -thisdayin history -t row -om ile -hu ar -by n -sal ine -div ine -demon ic -ty ran -han dover -revit alization -pa ella -cryp tic -se dg -m end -dun kirk -bre d -wal d -sport scar -a ard -whe aton -da ener -k lan -br t -bakhta war -spi res -schu bert -ro ti -poli sh -o se -ag ame -wonder con -prote stant -bo sa -ðŁĺ Ł -d ü -joy ride -ger trude -âĿ Ŀ -gil a -v h -tw a -tra v -swal lowed -star ve -la in -ent ren -rei ki -su kh -cra ic -az u -web page -kee fe -hypo the -hir sch -hel le -camp ground -w amy -tra vi -sha hi -san deep -ru i -han uman -dw p -reposit ory -no or -no ff -un real -p ell -black history -har vick -ma scar -pay ee -pa sha -gastron omy -d ÃŃ -ai g -rosen thal -open day -embelli shed -t tip -sun bathing -go pack -end ome -ï¸ı # -invali d -final four -st fu -squish y -ra sta -mo sch -jam esc -die trich -sel a -mel b -el vi -t dp -sun i -sli t -j ha -bi za -spi ked -l li -l illard -vam pi -syno psis -az har -kendrick lamar -ĮãĤĬãģ ŁãģĦ -heart less -country file -air play -arrog ance -pre e -virtu oso -ãħłãħł ãħłãħł -raj u -le bu -for ward -tu g -dro s -mondaymotiv aton -concep cion -thel o -pad i -looo ol -ÑĢ Ð¾Ð´ -it ss -eth ical -end uro -__ : -expend iture -mon ste -mas king -terri ers -ib is -e mber -cu mple -punctu ation -pi per -ir vin -ade e -yy yyyy -flash backs -cel sius -don nie -bo gota -ben evol -the script -shil pa -pro se -fin dia -ze ke -ne ko -do ves -blues lyrix -fro sh -sowe to -mp lo -al ai -sab i -raq qa -wf tv -stro ller -ian somerhalder -ðŁĶ ª -an on -mo seley -! ?!? -sta king -mol y -car tri -c sg -ast or -transc end -ma er -de ux -cow girl -sas k -pun ter -ma ken -o ates -love tt -grow ler -sag in -v n -ssi ble -officeof rg -y mc -sab ar -faul ty -ap ha -ak on -ðŁij « -snow don -ae w -raise the -ðĿ ĵ -grue some -clement ine -sp ing -lat a -worlden viron -mi mic -can aria -bakhtawar bz -ao a -fal a -ãĤ Ń -avi va -you uuu -thi gh -la dders -gu mbo -tz ky -fu zz -plastic pollution -est ate -strength ened -k ant -dr in -cal vert -transform ational -frigh tened -mac lean -elited angerous -ear thy -t son -to da -j nu -.. , -mic hal -i ban -je ong -is real -sim coe -exclu sives -blue bells -ben e -te u -pil sner -pens ke -athe ists -m pu -cartag ena -ðŁĴĹ ðŁĴĹ -million aires -kk kk -it ar -subscri ptions -remo te -ma fi -hin ton -w cc -ho k -ds b -ab leton -sevent y -pun ks -e indhoven -sh one -mcfar lane -lim popo -empha si -à ¼ -sin fo -pe tre -man grove -ch ino -ber tie -play lists -push awards -p af -deb bie -c do -r ino -ðŁı¾ âĢįâĻĤï¸ı -fol ke -bon nar -th ine -sl an -hal ter -evi e -aw some -vul tures -spar ky -seiz ures -âľ Ķ -ram one -ine ffe -al n -pro ctor -ast ra -the voice -gro te -sci on -dead line -am aya -tain ted -patter ned -exce eding -cross fit -kay lee -drop box -ru shes -tack led -mo by -retro gamer -n cbd -benef itting -shay kh -guild hall -gen try -dream cast -dread ed -bun dled -th aw -revol ving -n pt -kylie jenner -imagin ative -ron i -over came -family time -ds burg -car naval -relation ship -recogni zable -cor oner -ho le -fan fic -emir ates -bur ritos -analy se -thin ner -ne es -galli poli -bl r -cat woman --- >> -au lt -ada ily -nau ghty -ili o -solit aire -mtv br -jocel yn -arun ach -rep ent -south gate -hy acin -essenti al -fent on -and um -it or -go pal -sl inger -po sei -aw il -wi elding -ra ila -eli as -a sto -à ¤ -tend ency -str ata -ker t -< - -im acele -da es -sti mulus -han ley -fit nes -ec stasy -lim ous -ha iling -ðŁ¤ Ń -chis wick -tar ies -sla v -pul i -moderni zation -black mail -b ingham -h fx -+ + -ðŁĩ®ðŁĩ ³ -ni v -we a -profess or -k off -bol ster -su ave -sequ ences -pepper oni -not te -dre n -ãģ¨ ç¹ĭãģ -hs v -o ga -ap tly -z ad -excel si -rin ka -mol dova -min n -ma bel -conferen cing -bas ing -of er -ob si -hamill himself -care less -brief ed -inhe rent -par ish -dub nation -town sville -sar awak -gee ky -doncaster isgreat -was abi -gu p -phen o -dra inthe -carrie underwood -ble eds -bbc world -ane w -alta f -dul wich -ani ston -w ti -sumat ra -gra fton -bl n -me ster -bode ga -re go -es q -an jo -sump tuous -mai sie -ï¿ ½ -wil t -jak ob -el vis -se pul -mu ster -air pollution -president e -happy monday -exten sively -fl ondon -t ls -play ing -pe ed -din ho -var dy -pi ka -n iro -au cus -ðŁį ¦ -nu ll -el ondon -juvent us -imag ines -dis ab -lit o -d ura -work places -promo te -mc caf -wood work -waw x -à® ª -tt ino -shar i -sem per -better together -ðŁijĬ ðŁı» -ze bra -pon dering -en chil -ho m -cosm ic -tan z -mo cked -ec cc -ath ed -abo lish -prop eller -paris agreement -assemb lies -indu stry -fraudul ent -pe sa -chang min -ax x -ðŁĴ µ -irr ational -cu sa -ramad han -octa via -on elove -jac ki -bar ak -taxi der -seri ous -nathan fillion -mc en -ch k -po part -grav ity -copp ola -reading fc -illu sions -j ig -ww x -re sh -ex porting -buzz ard -âĻ ¤ -p cm -lan apar -ko s -arom as -antal ya -ww dc -ven a -phil a -ball in -ðŁij Ħ -quin ta -ma o -f ery -eigh ty -sentim ents -safe guarding -r wa -pu ffs -luc ille -de cath -sl u -nu gent -de ter -braz il -ze iss -super bowl -subsi dy -alter n -hi dalgo -enz ymes -ä ½ -tag ne -hair dresser -adri en -walk out -oppo ses -can tina -bed side -af an -ðŁĶ Ĺ -prophe tic -dan es -un successful -super charged -pk k -exem ption -hart le -secu lar -cli pping -br s -united way -c net -pat chy -ha gan -e en -âļ ľ -var a -sym pathi -never trump -affir mation -om f -ny cfc -ma ja -sur ro -keer th -up scale -sandal wood -mon archy -kno bs -å ĭ -po tholes -hunger games -ter races -na sir -coun sell -welcome to -wa q -se aman -m ita -stun ningly -on theroad -in ability -) !! -bon go -ant v -sp ut -worldenviron mentday -resu sc -y td -fi m -eun hyuk -sa chin -rose anne -cler mont -ape c -am ina -v ening -n antes -al most -sin us -ex as -ty l -ti en -ple ad -lanc s -bur naby -re k -jo om -observ ers -disco graphy -cl g -âĻ ¦ -sn ack -r ti -o ily -crystal li -bru te -web development -topp ings -la f -an is -ad der -reli ving -car lin -battle of -we g -syri an -pon t -n dc -lagh ate -yu ma -sp p -p iti -ro bbing -mart ing -rey kja -raj put -nc ds -kie wicz -âĢ¢ âĢ¢ -vam pire -substan tially -opio ids -nepal i -k line -ar oo -under stand -lit t -u it -thro mbo -sar ies -qu ot -b alling -t tr -s gh -philip p -br ant -ac l -m ello -whit taker -. ; -defi ant -b gc -repl ying -mir ren -metamor pho -sch wab -bul ge -utili zed -pick ering -par don -d sa -à¸ Ī -doo ley -cumul ative -Ð » -ur gency -e mir -+ /- -¦ Ī -ot as -âı ³ -station ed -grape vine -ar ac -karan johar -f ancy -sau l -coo gs -lgbt q -ا٠ħ -jav i -u mmer -pl l -den is -dai pur -pu ffin -lewi sham -fand om -co pe -ves matter -s ve -hel pless -deo dor -ostr ich -kaz an -friday the -con dor -v x -sophom ores -rob les -cu tt -cli mbers -ë¦ ¬ -sle g -sn f -mac ys -hydr ating -grou pe -po yn -mou lin -hg tv -lmfa ooo -sulph ur -asdfghj kl -annab elle -hump back -bra ved -viswas am -multi purpose -hu midi -escor ted -barb ican -f ad -cor sa -ðŁ¤ « -pi ppa -here to -can y -ser gi -or cas -o vie -ed ou -s any -glob alization -man cini -food truck -f is -defi brill -sch re -sma fia -love wins -la ut -k aka -hol lande -game on -resurg ence -out side -olympi ad -int an -abstr action -rapi d -pal om -cal le -jas min -attack ers -swag g -mit ra -ky lo -à® ² -her mitage -gor do -e ira -so sfam -roll out -exc ite -sy nod -mer rill -c als -as sa -liveli hoods -ju ve -the black -gopack go -ant lers -alban ian -wool ly -qu iche -puri fication -are th -smar thome -ne k -all blacks -mex icans -is m -ger ms -comple xion -mar ck -u shi -ðŁIJ IJ -char l -ca stic -till erson -giuli ani -biode gradable -mal bec -bo is -ju bil -im es -r ame -gene tic -esp nu -ch ley -so ho -go pher -g sc -buu ren -cu be -bridesma ids -webin ars -to e -mani pur -viol ently -notic ias -ex changing -chi ev -replac eable -muay thai -bu ss -sp il -instal ment -div ya -cait lin -o lim -fil tering -whirl wind -sta red -prior it -pr am -pompe ii -mono logue -k ite -bu ka -â̦ .. -vac cine -bre ro -woz ni -sol ent -re ferr -my rt -gridi ron -galatasar ay -fro ze -clare mont -ðŁ¥ ĥ -victori as -ssel dorf -pa stures -net neutrality -ch or -ðŁij ģ -ಠ¿ -we ho -symp tom -jo sel -in ous -dragon con -power ball -p te -four thofjuly -ec la -ear buds -where abouts -salt life -depriv ation -ch ter -wi ggle -syste m -ps st -ch az -d any -ri mo -oax aca -lanapar rilla -barcel on -melanch oly -way back -ho tro -n si -l illy -kur o -ja han -intellec t -board game -ðŁı Ĭ -sneak peek -k prc -jail s -cand el -zan zi -mor timer -star ch -ra gs -p fa -long live -k art -gir ona -cro cker -christop h -precau tions -war ship -per m -paren t -van gogh -gif ford -allegh eny -ra yn -ut m -sten cil -rec alling -pen ney -z azzle -ìĥ Ŀ -hin ds -aren as -nu ev -law ler -gu in -do this -ðŁij ķ -ì¶ķ íķĺ -we g -ti b -ri din -complex es -turbul ent -pe sos -de marcus -vall arta -sam sun -kis ses -hein rich -deport es -wil ms -ur d -then ext -inki gayo -ho wi -fir sts -carri age -clean liness -mas war -is ch -ax el -si zzle -road house -fr ans -ent ourage -co bble -boo th -benedic t -tal on -fc u -year ofthe -ray on -raider nation -fo yle -ko val -pi anos -l pg -bur mese -man ure -geo caching -cosc ino -b np -fer ra -stro phy -mar ais -ce es -legen dof -kat niss -eno ch -av ed -you know -d prk -ðŁĺ¢ ðŁĺ¢ -sp un -pro st -sor rows -cent red -ke a -gal icia -? ðŁ¤Ķ -ÑĢод а -bou chard -ðŁĴĻ ðŁĴľ -yu i -seed lings -jon ah -reco vers -ny rd -board room -su ma -my japs -tun g -sha i -ir gc -eli o -wag ons -ka shi -polic emen -john nie -ale coscino -shop ify -dot ted -de tri -va w -to fficial -in your -chal mers -trac ed -no vi -by es -ari el -nipp on -la pel -gri ez -b gs -fool ing -d ita -vijay sethu -nm wx -as ot -kr anti -hel m -ve di -sic kest -mo chi -k abo -shru bs -he red -b sp -sq m -ham r -dul kar -anth a -nr f -avoid ance -at en -publi x -be arers -nas i -ha p -h ells -ðŁĸ ¥ -ภ· -thelast jedi -oh wx -ðŁį « -wa hoo -there se -rec aps -ss nhq -bird photography -v ay -pet ti -pau lo -bel vedere -( * -gr l -du vet -c pec -sa it -por sch -meas urable -avi ators -fre mantle -bre en -on om -me and -life saving -eu ref -en don -embar as -aira sia -el is -dun kin -star magic -s ill -porto bello -ki efer -ex e -mu ted -ãģ ¦ -we thepeople -logi a -liber al -theforce awakens -min ed -haun ts -freck les -care taker -s india -âķ IJ -dev lin -list on -direction er -oh n -fi garo -em manuel -du bois -cl ones -bru ise -ðŁİĪ ðŁİī -disin fe -der matology -as r -s watch -dis comfort -tam anna -pi day -mack en -k atic -delu sional -shaw nee -gu d -al bino -p ali -din gh -cucu mbers -coffe y -anticip ating -treas ured -web summit -shel tered -sav or -pedago gy -m gs -sh ma -s bu -den ali -cam pos -bubble gum -o ir -le aps -y ler -r one -sansk rit -min t -meat less -futuri st -du de -a vel -prote sted -squ ire -z aki -sz n -har court -cycl one -bour dain -gather ings -d ant -advent urer -parag on -alt man -dd ing -ban erjee -snorkel ing -mother well -mis sy -en der -glo ws -ki wis -chick pea -por o -e fron -app t -u y -speci fied -gab by -e strada -com bos -bour bon -vin i -var un -steph ani -key words -car vings -amit abh -wr ought -tw al -re els -clu bbing -ubi quit -cri t -ambed kar -æ Ļ -prun ing -vaccin ated -boe ing -s ks -lo ona -hypno sis -edel man -pho l -he w -colo sse -mckin sey -u on -to te -sacrific ing -ox i -n ang -e mu -пÑĢи ÑĢода -m th -kers wednesday -argu ed -timel apse -ris king -regul ating -ni gh -likeli hood -cu bic -au ction -rein for -pi stor -no ses -ye l -snu ggles -pe i -jean ette -ta ku -ri th -guy z -ภŀ -y te -ver ted -pay soff -jau regui -hoo ligans -procedu ral -mi b -har dy -el eng -chec kers -all ine -the met -prou dof -keerth yofficial -collabor ator -ni u -infl icted -adv ani -re twee -memor iam -f icial -ti ghter -sal em -re viewers -br ics -ben digo -am ell -tur kish -sush maswar -paul son -pal awan -mol lie -stitch er -s burgh -ir u -hay dn -en ers -aro a -u zzi -saraj evo -hel a -apol lo -nine ty -vac a -sp on -vent u -jel ena -hei fer -avo ids -sp ine -pri ze -mar ist -re creating -me de -woo den -find lay -ro fl -n di -compreh end -yu go -y ü -to work -u fos -son ar -pi ston -recor ding -tent ative -art forsale -pel lets -fre do -ÙĪ Ø± -mu ses -custom ization -pro found -is ner -ide ally -si am -plan kton -cm dr -man ger -fran ken -customiz able -ठ® -walk away -swi vel -vast ly -no ton -lex a -ex moor -z as -tan te -reduc tions -lol ly -hip sters -benef ited -ë ² -ww www -mascul ine -fi ji -dre y -ph ill -ane ous -nic ol -men dez -disapp ro -ch ner -through s -shen mue -east man -ðŁIJ İ -yu ck -under tale -re ys -go beavs -eng en -c na -mer r -bir k -ãģ¨ç¹ĭãģ ĮãĤĬãģŁãģĦ -âĥ£ @ -yn na -ste ed -offen der -at um -vani shing -presi denti -love them -g nocchi -fri ggin -per il -mad hya -ag ne -dee jay -mar nock -m tb -fold able -@ ___ -stand re -bron x -bow ski -fin ite -cro ckett -b sf -ge tit -seren awilliams -mir o -ignati us -sla y -rin se -fon due -sel dom -s more -gan i -dy ce -dmit ry -cru mb -late post -pri mark -oh ana -flor als -do a -remembrance day -d ds -azi one -toon ami -air port -æĿ ± -th ad -fi st -dine sh -dr who -ad words -admi rer -pro je -kyrgy z -à « -manife station -le wan -j ic -thi bau -le ased -van ity -nouri shed -never theless -aug mente -fu elled -che ad -wil shere -ru di -p z -my co -mor ro -herbali fe -hardro ck -de man -dre ality -sp ades -ce vic -bha i -bar on -ultimat efan -hou news -to bi -stru t -ke el -affili ation -the masters -sm al -hu e -este ban -con v -om nic -datab ases -co v -ter ti -st g -snoop dogg -metab ol -leth bridge -ðŁı» âĢįâĻĢï¸ı -year ling -residente vil -nws l -iy aki -griez mann -c ous -ðŁĵĿ : -tor ian -sam i -ðŁĶ¥ðŁĶ¥ ðŁĶ¥ðŁĶ¥ðŁĶ¥ -g are -alli ances -whit field -we ther -refin ing -coy i -kra ken -ðŁĺĺ âĿ¤ -singul arity -lil i -h ns -bol dand -waw rinka -misogy ny -lo vers -c q -b dg -ad ona -gar ter -women of -sc d -recogn ising -mun a -str ou -sign alling -lare do -hell boy -alek sand -un available -pedi atric -as in -mer ia -ri shi -futuri sm -w ye -polari zed -e we -pro pel -in forms -cre ase -~ " -arti ston -like for -heidel berg -er ra -life in -len ny -inter rupt -cohe rent -ca z -vick ers -le veled -f bs -cab ins -bu mmed -apost les -we h -ten don -souven irs -infu ri -pier ce -asse t -m las -go th -di ggin -ann as -yl or -th waite -sw el -pan era -mur derers -croo ked -bs go -ac u -a on -re an -one of -ko hl -bloo dh -pest icide -lost dog -fle xing -ëĤ ĺ -su pra -eter nally -ðŁļ Ļ -pa olo -ol an -mom o -is elle -captain marvel -s lou -mistak enly -akhi lesh -mer t -il inan -bu on -bal kan -mir ro -mill en -der ail -dam on -tit i -bi os -re don -pic ard -par te -ðŁ¤ Ł -Ø º -son ics -fir sth -dd c -veg ans -tur ban -ni gan -lot tie -lyn don -star buck -pink floyd -life styles -am ara -a she -r sc -val a -sm er -cw gc -cli ent -buen as -jag an -coo ps -ðŁijij ðŁijij -speci alizes -snag ged -g lar -ben net -wildlife wednesday -bow den -pi k -art in -empor ium -ar l -re ba -pas ser -disappo ints -additi ve -âľĬ ðŁı½ -bay er -missou la -ha skell -comm ences -ni x -ne man -explo ited -plastic surgery -cc d -aso cial -vo t -sie gel -fro ome -kap am -far a -e ha -pro bes -mw f -meet ing -p bb -ak ins -mistle toe -kingdom hearts -for kids -ec r -bal e -escor ts -adidas originals -k wa -k ts -hallo ffame -ðŁĺį . -wag s -pot ted -o wing -honey comb -he fty -uro logy -mer le -b pd -stri pping -re ich -k state -gu ay -yon ge -shak ti -g loom -bat t -son om -n ery -el ba -blan ks -hel le -triple ts -bom bay -ak arta -ab ia -transm itted -rol f -ja is -angular js -fi erc -m ss -trac e -ॠĩ -tom bs -old man -kom bucha -fo l -e health -cere als -are lli -in ari -ðŁĴ © -wo l -liber ties -fa wn -af firm -nun avut -hyster ical -k drama -art es -âĢ¢âĢ¢âĢ¢âĢ¢ âĢ¢âĢ¢âĢ¢âĢ¢ -valent in -man slaughter -gal es -eo in -energi zed -del s -with draws -st les -sar castic -ram esh -incredi bles -lock hart -ya wn -ultimatefan live -oooooooo oooooooo -mu en -guru dev -te er -pe eling -new snow -lingui stics -direc tv -ag end -uni lever -ru ger -han dedly -ero se -li mel -the c -royal ties -fini shers -nr g -m gt -fid get -com ps -bac on -aggre ssively -ab it -ch â -tar de -slu gger -q anda -gre ening -d ats -ensla ved -spec tor -o ye -fre ef -b hand -stop brexit -mis conceptions -cav a -ðŁĺįðŁĺįðŁĺįðŁĺį ðŁĺįðŁĺįðŁĺįðŁĺį -multit asking -hou sel -ferre ira -cen time -ank les -jo dh -hel ly -fro me -out tuesday -nar nia -bal aji -l bloggers -jyo ti -ðŁį ĩ -lan cia -cap ri -y ap -nat ash -down fall -." âĢĶ -à ® -ligam ent -coat ings -ai ded -hi ko -fall ing -encryp ted -yeg food -infringe ment -cu di -ce p -ðŁĺį ðŁĺĤ -tra d -super rugby -ed win -wh iche -vi meo -lay ne -in vigor -he he -dubrov nik -bie ber -u tr -sham an -op ers -ham ill -en ig -di f -ar um -scrap book -min h -diver gence -mckin non -life time -guter res -wil le -ple as -patt y -mic ron -k z -dom aine -ru sher -m ds -ches ney -screw driver -âģ© , -sle dge -hau er -chan a -stam ina -sprink ler -pl n -he ff -bol ton -om on -car rington -accor dion -jor ge -inter ception -in puts -gu ll -tran scription -vanu atu -it ical -eth os -tic h -spac ey -pee king -u mi -ha ger -psycho tic -illi an -illi a -bonnar oo -an ese -pu c -laghate parth -en hall -econom ical -dre dge -% - -u we -tu bular -scoun cil -pe asants -fl er -tumb ler -he p -ford ham -row ley -initi als -ev asion -er nation -plu gins -coch ran -c attle -acid ity -ðŁİĬ ðŁİī -re grann -jump man -ef ace -x ma -patri archy -esco bar -cristi an -tip ton -nu eva -hack ney -back seat -kill arney -aid an -sta dion -simul taneous -ida ho -a je -u th -figu re -clo s -bur k -volun tar -rec ite -macfar lane -cur few -bou do -w gn -sti x -sla p -scrat ched -philli p -jour ne -ex pelled -wa z -u ke -tati ana -ou e -ho pp -dimit ri -ðŁĵ £ -mato logist -electri fying -blu ffs -bill smafia -az cardinals -y aa -x mas -shar a -r ith -g ills -dre s -bar ton -authori zation -imperi alism -home of -to do -foot path -band width -visit spain -moh sin -erup ted -mi ki -insig nia -mike l -ss h -ger a -bank holiday -aw an -t weak -star craft -e al -construc tion -skelet ons -le ep -ine m -bar clay -ship wreck -monsi eur -yo h -ron t -form ative -ser o -le p -horse man -hoo sier -haz mat -cylin ders -cen ti -ðŁĴ¥ðŁĴ¥ ðŁĴ¥ -re em -na ire -mus ically -gras shopper -est onian -termin ology -ro main -blogger rt -tox in -stan ce -cultiv ated -an ast -ðŁIJ į -shi mano -go pher -ene i -recycla ble -gam ification -fight for -c q -avoc ados -ke ys -eli ke -gly cer -shak ur -mobili zation -gal ley -expla in -ex changed -pe th -obe dience -illa ge -en nis -ãĥ ŀ -wi v -walla bies -ma ar -ig ers -fin tech -fin alized -wo j -meaning less -in field -onna ise -e et -bron te -pass ages -ðŁij § -strick land -northern lights -lom ond -h tc -wr ay -shi fter -di alog -ðŁį į ->> >>>> -te atime -ste ch -sic huan -qu ill -fran ca -comple mentary -bar rington -marcu s -mal am -goo oo -for sa -elec tra -af s -âĹ Ĩ -tri fe -sn azzy -fo lia -and olan -after dark -wood son -stra de -litt lest -o gun -con wy -co wards -ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤðŁĺĤ -íĬ ¸ -se ul -mur phy -dun ks -kapil shar -jo achim -wom ack -equal ity -aver ages -a ine -ðŁ¦ Ī -tac ular -dis ability -u ked -mid century -bar thol -teas ers -tab ern -nj caa -sp out -op i -ku bball -bl om -so ar -popu lism -meth yl -ðŁijĬ ðŁı¼ -o spre -alo ils -ðŁĵ ĸ -ðŁĮ ļ -x er -sp illing -publ ica -car dam -adi sh -sa cha -p kg -bu da -lyric ist -i bc -gru mp -ho ver -hal ep -anti body -anem one -âĻ¥âĻ¥ âĻ¥âĻ¥ -m cl -litho graph -cc u -s fest -path ic -calli ster -otta wa -gun sn -rut ger -hali but -en vision -differenti ate -ðŁļĢ ðŁļĢ -pir an -lat el -uc n -trou bad -ra ine -fierc ely -learn english -lea se -wex mondays -em it -dray ton -bur rell -scuba diving -hol ler -dr u -clo cked -w ral -ap ro -trans lucent -w bo -patri arch -mo ja -lan nister -fish ery -ne derland -mil dly -mi rai -ma ko -ja p -ðŁĺ©ðŁĺ© ðŁĺ© -pro statec -p anna -ar ama -under taking -tomp kins -ne op -soli ds -sav oury -e ames -cut lery -wood bridge -steam er -ri zzo -wild cat -rat na -lamin ated -kin eni -jal ap -ai des -acknowle dges -?! ?!?! -! ðŁİī -w afc -mag gio -ha ves -dar je -of i -gr il -v asi -bru x -mo hd -fake speare -arn old -r mb -for be -wal leye -ro di -therapeu tics -strate gi -ob ste -mu dder -download able -dd ings -d ca -asi angames -campe on -appropri ation -th century -ram atta -dra ped -bul lion -mu c -one x -se greg -ophel ia -bod ily -âĿ¤ ðŁĺį -wi zar -te ased -ade my -to id -sur a -lazar us -sn ickers -ma se -lo h -bow ed -bibli o -x change -har lan -gho shal -flavor ful -bha gat -alle z -whiche ver -ten stein -disc er -organ iser -mt g -dream liner -t se -hok kaido -mo k -indulg ent -hick man -blin ded -al yn -aaa ah -sp ool -lough borough -inter pret -et v -aristo tle -optimi zing -avici i -madu rai -ju li -naw az -mat chups -ab ide -paint ing -w elling -vel i -octag on -in scribed -po king -plac er -life cycle -kili g -g sp -eli ves -cle ments -na sheed -me sut -incarcer ated -dist illed -wal ang -delic acy -del gado -che z -ch ita -ad ero -tu x -pati l -o do -abh cosmetics -tv c -p bc -in accurate -hardwork paysoff -ball er -quot ation -merchandi sing -ga stri -defen ses -dro gba -bex hill -ban kno -win ona -si eg -p gs -hahah ha -agu chi -su bram -mirac le -de sch -li bre -ba cher -ent ine -bbcra di -lou dest -r ps -pi erc -fr yer -storm trooper -rafael nadal -pas co -exhau stion -epic onetsy -rc tid -kel lie -ga ines -d bz -sm riti -s bridge -lim ited -cla w -technic al -bio graphical -ado red -ภ° -exclu de -ac adia -key boards -fur man -so ca -sur u -ni ps -sw aps -server less -run e -pu ffy -north ampton -nish ings -hen der -cartri dges -gun shot -ðŁĵ ¹ -fil ament -respon dents -pey ton -mountaine er -mer ging -life span -intimid ation -p afc -nl wx -expan sive -pur r -f ck -ca e -at ti -tele thon -so hn -mend el -lo pes -dor i -un broken -te red -tast ings -in active -disin tegr -t assel -share the -pi ano -is lay -air space -z awa -ricci ardo -ming ton -fresh er -cur ry -re vs -pharo ah -h mv -exhilar ating -wh oo -lin kin -kri spy -competen cy -ste wards -ne bu -kat su -ad mins -baz ar -as ar -giving back -s summit -song z -lin us -raj kumar -farm ington -fanta sia -ðŁĺ´ ðŁĺ´ -so bri -lis se -barry more -pri sm -blo b -sen ew -mono xide -exp ire -eigh teen -di pper -xi ao -kil t -hin ch -bbc sport -bam boo -p ter -ex al -ðŁ¦ ĭ -ham lin -expe ditions -star gazing -food security -wy lie -ul f -st ingly -on storm -lo eb -bro ome -bn ha -pancre atic -eli ve -!!!!!!!! !!! -ther apper -ortho pedic -avengers endgame -antit rust -ìļ ° -go te -om d -off side -gy llen -win eries -white water -ad l -lu pita -exce eds -consi sted -chew bacca -ash leigh -nhl jets -is san -sh ld -hay at -cran berries -ð٤ĺ ðŁı½ -rock the -spring training -fall out -dairy free -wa j -un decided -so wn -rc n -north wales -htt r -fu mble -d its -comp elled -popu list -min ted -blan chett -. '' -pro pulsion -m illa -au berg -her tz -h ta -u daipur -serendip ity -azte cs -als ace -ðŁIJ ij -lu n -sho es -char li -gar za -ðŁĴ Ł -pro biotics -fox tv -ol is -mi ff -loc alized -diffu ser -si gue -fun ko -rend ous -ðŁĴ ij -jeky ll diff --git a/diffsynth/tokenizer_configs/flux/tokenizer_1/special_tokens_map.json b/diffsynth/tokenizer_configs/flux/tokenizer_1/special_tokens_map.json deleted file mode 100644 index cf0682d..0000000 --- a/diffsynth/tokenizer_configs/flux/tokenizer_1/special_tokens_map.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "bos_token": { - "content": "<|startoftext|>", - "lstrip": false, - "normalized": true, - "rstrip": false, - "single_word": false - }, - "eos_token": { - "content": "<|endoftext|>", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false - }, - "pad_token": { - "content": "<|endoftext|>", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false - }, - "unk_token": { - "content": "<|endoftext|>", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false - } -} diff --git a/diffsynth/tokenizer_configs/flux/tokenizer_1/tokenizer_config.json b/diffsynth/tokenizer_configs/flux/tokenizer_1/tokenizer_config.json deleted file mode 100644 index 180a4e1..0000000 --- a/diffsynth/tokenizer_configs/flux/tokenizer_1/tokenizer_config.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "add_prefix_space": false, - "added_tokens_decoder": { - "49406": { - "content": "<|startoftext|>", - "lstrip": false, - "normalized": true, - "rstrip": false, - "single_word": false, - "special": true - }, - "49407": { - "content": "<|endoftext|>", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - } - }, - "bos_token": "<|startoftext|>", - "clean_up_tokenization_spaces": true, - "do_lower_case": true, - "eos_token": "<|endoftext|>", - "errors": "replace", - "model_max_length": 77, - "pad_token": "<|endoftext|>", - "tokenizer_class": "CLIPTokenizer", - "unk_token": "<|endoftext|>" -} diff --git a/diffsynth/tokenizer_configs/flux/tokenizer_1/vocab.json b/diffsynth/tokenizer_configs/flux/tokenizer_1/vocab.json deleted file mode 100644 index 469be27..0000000 --- a/diffsynth/tokenizer_configs/flux/tokenizer_1/vocab.json +++ /dev/null @@ -1,49410 +0,0 @@ -{ - "!": 0, - "!!": 1443, - "!!!": 11194, - "!!!!": 4003, - "!!!!!!!!": 11281, - "!!!!!!!!!!!!!!!!": 30146, - "!!!!!!!!!!!": 49339, - "!!!!!!!!!!": 35579, - "!!!!!!!!!": 28560, - "!!!!!!!!": 21622, - "!!!!!!!": 15203, - "!!!!!!": 9168, - "!!!!!": 5203, - "!!!!": 2360, - "!!!\"": 28048, - "!!!)": 42532, - "!!!": 995, - "!!\"": 20556, - "!!#": 34997, - "!!)": 28352, - "!!": 748, - "!!@": 40705, - "!\"": 2947, - "!\"@": 43819, - "!#": 9670, - "!'": 13222, - "!),": 37904, - "!).": 26225, - "!)": 4571, - "!*": 37737, - "!,": 29325, - "!-": 43499, - "!...": 22121, - "!..": 35475, - "!.": 22517, - "!:)": 31671, - "!:": 17545, - "!": 256, - "!?!": 29767, - "!?!?": 47081, - "!?": 6004, - "!@": 15117, - "!]": 34466, - "!â̦": 35068, - "!âĿ¤ï¸ı": 32559, - "!ðŁİī": 49085, - "!ðŁĺĬ": 43434, - "!ðŁĺį": 36438, - "\"": 1, - "\"!": 10377, - "\"\"": 41530, - "\"\"\"": 25539, - "\"\"": 8575, - "\"#": 8345, - "\"'": 31065, - "\"(": 32741, - "\")": 13112, - "\",": 4332, - "\"-": 9375, - "\"....": 37785, - "\"...": 9049, - "\"..": 25403, - "\".": 2811, - "\"/": 39486, - "\":": 7811, - "\";": 37549, - "\"": 257, - "\"?": 11727, - "\"@": 1512, - "\"@_": 20236, - "\"[": 36930, - "\"â̦": 33993, - "\"âĢĶ": 41151, - "#": 2, - "##": 15483, - "#...": 31491, - "#:": 30144, - "#": 258, - "#@": 35062, - "#â̦": 12834, - "#âĢİ": 34262, - "$": 3, - "$$": 24233, - "$$$": 31859, - "$$": 14929, - "$)": 39460, - "$.": 34682, - "$": 259, - "%": 4, - "%!": 35070, - "%),": 37819, - "%)": 16063, - "%,": 14505, - "%-": 48784, - "%.": 12475, - "%;": 33379, - "%": 260, - "&": 5, - "&&": 27791, - "&": 261, - "'": 6, - "'!": 13781, - "'\"": 19479, - "'#": 15319, - "''": 46594, - "''": 8445, - "')": 19175, - "',": 5662, - "'-": 26152, - "'...": 20474, - "'.": 4645, - "':": 7182, - "';": 44517, - "'": 262, - "'?": 17242, - "'@": 26397, - "'d": 1896, - "'ll": 1342, - "'m": 880, - "'re": 982, - "'s": 568, - "'t": 713, - "'ve": 1200, - "'â̦": 42120, - "(": 7, - "(!)": 30253, - "(\"": 18741, - "(#": 6229, - "($)": 46597, - "($": 15186, - "(&": 15042, - "('": 18235, - "((": 22944, - "(((": 33287, - "((": 13796, - "().": 41737, - "()": 8475, - "(*": 48004, - "(*": 39575, - "(+": 12903, - "(-": 20228, - "(...": 45159, - "(.": 43055, - "(:": 8528, - "(;": 23983, - "(": 263, - "(?)": 22885, - "(@": 2181, - "(£": 33987, - "(©": 44886, - "(ðŁĵ·:": 34610, - "(ðŁĵ·": 37999, - "(ðŁĵ¸:": 44422, - "(ðŁĵ¸": 45204, - ")": 8, - ")!!": 47518, - ")!": 7805, - ")\"": 13046, - ")#": 39981, - ")'": 23613, - ")(": 27956, - "))": 13720, - "))))": 42911, - "))))": 34181, - ")))": 18305, - "))": 5167, - "),": 2361, - ")-": 19034, - ")...": 15274, - ")..": 41822, - ").": 1818, - ")/": 26616, - "):": 4143, - ");": 19686, - ")": 264, - ")?": 18765, - ")@": 41928, - ")_/": 45028, - ")_/¯": 45781, - ")â̦": 41844, - "*": 9, - "*)": 30956, - "**": 9825, - "****": 21326, - "********": 42974, - "*****": 43571, - "****": 25167, - "***": 7829, - "**": 4441, - "*,": 41895, - "*-*": 23568, - "*.": 31304, - "*": 265, - "*_*": 44535, - "+": 10, - "+)": 34810, - "++": 47298, - "+++": 35986, - "++": 19056, - "+,": 35885, - "+.": 25238, - "+/-": 47614, - "+": 266, - ",": 11, - ",\"": 3823, - ",#": 11215, - ",&": 26905, - ",'": 10599, - ",)": 44493, - ",,": 21340, - ",,,,": 33225, - ",,,": 14811, - ",,": 8844, - ",-": 29821, - ",...": 20365, - ",.": 41277, - ",": 267, - ",@": 13975, - ",â̦": 14601, - "-": 12, - "-\"": 18646, - "-#": 10151, - "-$": 24946, - "-'": 28010, - "-(": 33345, - "-)": 3535, - "-*": 21527, - "--": 2154, - "----": 5753, - "--------": 11772, - "----------------": 23122, - "----": 30164, - "---->": 35999, - "---": 11079, - "--->": 14518, - "--": 2432, - "-->": 6422, - "-->>": 47252, - "-.-": 32765, - "-...": 43147, - "-.": 44040, - "-": 268, - "->": 5081, - "-@": 10087, - "-_-": 27227, - "-__": 42718, - "-â̦": 30047, - ".": 13, - ".!!": 37805, - ".!": 14030, - ".\"": 18650, - ".\"-": 21234, - ".\"": 1081, - ".\"âĢĶ": 48703, - ".#": 5014, - ".'\"": 41558, - ".''": 49379, - ".'": 5938, - ".(": 22294, - ".)": 5376, - ".*": 26145, - ".,": 5276, - ".-": 12481, - "..": 608, - "..!!": 23707, - "..!": 17994, - "..\"": 15229, - "..#": 15735, - "..,": 47143, - "...": 3002, - "...!!!": 38351, - "...!!": 39915, - "...!": 16860, - "...\"": 5240, - "...#": 8195, - "...&": 44979, - "...'": 23167, - "...(": 37981, - "...)": 14040, - "...,": 42717, - "....": 2386, - "....\"": 26689, - "....#": 20346, - ".....": 34151, - ".....#": 38867, - "........": 8246, - "................": 24855, - "............": 42965, - "...........": 35008, - "..........": 25526, - ".........": 19881, - "........": 14720, - ".......": 9917, - "......": 5590, - ".....": 3104, - "....": 1390, - "....@": 29790, - "...:": 34570, - "...": 678, - "...?": 16388, - "...@": 12672, - "..": 852, - "..?": 23875, - "..@": 21124, - "./": 31975, - ".:": 15811, - ".;": 47596, - ".": 269, - ".<": 29442, - ".?": 29294, - ".@": 1230, - ".]": 33511, - ".~": 42651, - ".â̦": 18047, - ".âĿ¤ï¸ı": 39085, - ".âłĢ": 30097, - ".ðŁĺĤ": 46580, - "/": 14, - "/#": 13217, - "/$": 36266, - "/-": 19811, - "/.": 39382, - "//": 15348, - "////": 46271, - "///": 22734, - "//": 3502, - "/": 270, - "/@": 8216, - "0": 15, - "0": 271, - "1": 16, - "1": 272, - "2": 17, - "2": 273, - "3": 18, - "3": 274, - "4": 19, - "4": 275, - "5": 20, - "5": 276, - "6": 21, - "6": 277, - "7": 22, - "7": 278, - "8": 23, - "8": 279, - "9": 24, - "9": 280, - ":": 25, - ":\"": 29498, - ":\")": 46432, - ":\"": 12089, - ":#": 26625, - ":$": 33769, - ":'": 8017, - ":'(": 21250, - ":')": 10701, - ":'": 23851, - ":((": 42496, - ":(": 5965, - ":)": 11070, - ":))))": 42339, - ":)))": 21840, - ":))": 10164, - ":).": 39010, - ":)": 1408, - ":*": 12617, - ":-": 13021, - ":-(": 25137, - ":-)": 4223, - ":-": 10323, - ":...": 42140, - "://": 12441, - ":/": 13604, - "::": 33077, - ":::": 43818, - "::": 9788, - ":": 281, - ":>": 39677, - ":@": 14339, - ":]": 43486, - ":|": 45986, - ":â̦": 22365, - ";": 26, - ";))": 41873, - ";)": 3661, - ";-": 35657, - ";-)": 10475, - ";;": 34824, - ";;": 24492, - ";": 282, - "<": 27, - "<-": 47280, - "": 34308, - "<<": 24588, - "<": 283, - "<<": 16482, - "<<<": 35054, - "<|endoftext|>": 49407, - "<|startoftext|>": 49406, - "=": 28, - "=))": 39587, - "=)": 17840, - "=": 284, - "==": 11748, - "====": 21734, - "========": 38952, - "==>": 29688, - "=>": 9714, - ">": 29, - ">.<": 38507, - ">:": 36196, - ">": 285, - "><": 28015, - ">>": 8270, - ">>": 2988, - ">>>": 6395, - ">>>>": 18461, - ">>>>": 18435, - ">>>>>": 32972, - ">>>>>>": 48947, - ">>>>>>>>": 41947, - ">_": 44144, - "?": 30, - "?!": 9785, - "?!!": 25342, - "?!\"": 29315, - "?!": 2835, - "?!?!": 16349, - "?!?!?!": 49084, - "?!?!?": 37619, - "?!?": 11395, - "?\"": 3283, - "?#": 24018, - "?'": 13610, - "?)": 9626, - "?,": 41628, - "?...": 22641, - "?..": 43905, - "?.": 41251, - "?:": 21067, - "?": 286, - "??": 5195, - "??!!": 43219, - "??!": 37341, - "??\"": 44996, - "??": 2197, - "???": 40017, - "???": 3824, - "????": 15936, - "????": 10362, - "?????": 21370, - "??????": 34589, - "????????": 45091, - "?@": 29258, - "?ð٤Ķ": 47928, - "@": 31, - "@#": 39397, - "@.": 43730, - "@/": 28639, - "@": 287, - "@@": 30314, - "@_": 2692, - "@__": 17042, - "@___": 48308, - "A": 32, - "A": 288, - "B": 33, - "B": 289, - "C": 34, - "C": 290, - "D": 35, - "D": 291, - "E": 36, - "E": 292, - "F": 37, - "F": 293, - "G": 38, - "G": 294, - "H": 39, - "H": 295, - "I": 40, - "I": 296, - "J": 41, - "J": 297, - "K": 42, - "K": 298, - "L": 43, - "L": 299, - "M": 44, - "M": 300, - "N": 45, - "N": 301, - "O": 46, - "O": 302, - "P": 47, - "P": 303, - "Q": 48, - "Q": 304, - "R": 49, - "R": 305, - "S": 50, - "S": 306, - "T": 51, - "T": 307, - "U": 52, - "U": 308, - "V": 53, - "V": 309, - "W": 54, - "W": 310, - "X": 55, - "X": 311, - "Y": 56, - "Y": 312, - "Z": 57, - "Z": 313, - "[": 58, - "[#": 11115, - "[...": 39975, - "[...]": 43790, - "[": 314, - "[@": 15148, - "[]": 22240, - "\\": 59, - "\\'": 41239, - "\\": 315, - "]": 60, - "]\"": 39434, - "],": 34067, - "].": 26262, - "]:": 21641, - "]": 316, - "][#": 39009, - "][": 29329, - "^": 61, - "^)": 30720, - "^-": 43516, - "^.": 31552, - "^.^": 35791, - "^": 317, - "^^": 34454, - "^^": 9064, - "^_": 14423, - "^_^": 15995, - "_": 62, - "_'": 44701, - "_(": 36951, - "_)": 37393, - "_*": 36237, - "_,": 31417, - "_-": 23193, - "_.": 26841, - "_/": 37647, - "_:": 13109, - "_": 318, - "__": 2355, - "__:": 47043, - "__": 3838, - "___": 43812, - "___": 13530, - "____": 4727, - "____": 25350, - "_____": 38803, - "________": 9549, - "________________": 20115, - "`": 63, - "`": 319, - "a": 64, - "a": 320, - "aa": 1821, - "aa": 3894, - "aaa": 14376, - "aaa": 9583, - "aaaa": 6727, - "aaaa": 19336, - "aaaaa": 31095, - "aaaaaa": 44413, - "aaaaaaaa": 23126, - "aaaah": 49151, - "aaah": 35856, - "aaay": 37846, - "aab": 34108, - "aac": 23251, - "aac": 11346, - "aad": 20464, - "aad": 35894, - "aaf": 37638, - "aaf": 31534, - "aag": 42174, - "aah": 28990, - "aaj": 28727, - "aaj": 43411, - "aak": 37739, - "aal": 22268, - "aal": 30208, - "aali": 27896, - "aaliyah": 46577, - "aam": 12943, - "aam": 22775, - "aama": 45018, - "aamaadmi": 45563, - "aamaadmiparty": 46406, - "aamir": 27456, - "aan": 20705, - "aan": 13426, - "aand": 38054, - "aap": 12023, - "aap": 12052, - "aapl": 34516, - "aar": 4695, - "aar": 13234, - "aard": 46932, - "aaron": 13948, - "aaron": 7709, - "aas": 28542, - "aas": 32205, - "aat": 34018, - "aat": 35004, - "aau": 35426, - "aay": 38281, - "aay": 40249, - "aaz": 26770, - "ab": 596, - "ab": 3937, - "aba": 44204, - "aba": 11102, - "abad": 33444, - "abad": 7155, - "aban": 41662, - "aband": 8595, - "abandon": 28805, - "abandoned": 11227, - "abar": 17860, - "abar": 39805, - "abas": 25402, - "abay": 43542, - "abb": 38954, - "abb": 38297, - "abba": 30870, - "abbas": 37494, - "abbas": 24412, - "abbey": 31927, - "abbey": 10132, - "abbie": 39949, - "abbo": 13536, - "abbot": 44046, - "abbott": 43737, - "abbott": 15649, - "abbrevi": 44843, - "abby": 30586, - "abby": 14694, - "abc": 13137, - "abc": 5334, - "abcnews": 31566, - "abd": 44093, - "abdel": 46511, - "abdomin": 35335, - "abdominal": 39328, - "abdu": 13361, - "abduc": 17884, - "abducted": 31520, - "abduction": 36984, - "abdul": 14227, - "abdul": 15593, - "abdullah": 21317, - "abe": 15856, - "abe": 12734, - "abee": 36037, - "abel": 31938, - "abel": 25318, - "abella": 46156, - "aben": 40865, - "aber": 7828, - "aber": 41867, - "aberdeen": 30539, - "aberdeen": 17236, - "abh": 27484, - "abh": 33649, - "abhcosmetics": 49189, - "abhi": 18113, - "abhin": 44045, - "abhishek": 44502, - "abi": 16867, - "abi": 14161, - "abia": 48604, - "abide": 49163, - "abig": 20863, - "abigail": 25686, - "abil": 21135, - "abilities": 8724, - "ability": 35146, - "ability": 3024, - "abit": 48668, - "ablanc": 33716, - "able": 10102, - "able": 863, - "abled": 10655, - "ableg": 24055, - "ables": 8486, - "ableton": 47169, - "ably": 6748, - "abnormal": 40934, - "abo": 2889, - "abo": 21861, - "aboard": 11661, - "abol": 31768, - "abolic": 46827, - "abolish": 47403, - "aboo": 42433, - "abor": 8416, - "aboriginal": 20422, - "abortion": 12336, - "abortions": 43218, - "aboss": 46401, - "abou": 36455, - "abou": 44053, - "abound": 41037, - "abour": 46637, - "about": 20204, - "about": 781, - "abouts": 36339, - "above": 35019, - "above": 4348, - "aboy": 37077, - "abpoli": 44779, - "abq": 38767, - "abr": 44932, - "abra": 10694, - "abra": 35087, - "abraham": 40623, - "abraham": 15869, - "abram": 33255, - "abrams": 29852, - "abre": 22472, - "abre": 46756, - "abri": 28605, - "abridged": 45333, - "abroad": 11253, - "abru": 46295, - "abs": 18431, - "abs": 11109, - "absc": 25389, - "abscbn": 44260, - "abscbn": 45810, - "absen": 32453, - "absence": 19240, - "absent": 30363, - "absol": 4624, - "absolu": 7055, - "absolut": 4666, - "absolute": 7501, - "absolutely": 4703, - "absor": 14303, - "absorb": 35806, - "absorbed": 45059, - "absorbing": 46412, - "absorption": 42210, - "abstr": 7530, - "abstract": 23885, - "abstract": 10197, - "abstractart": 31170, - "abstraction": 47696, - "abstracts": 40065, - "absur": 21639, - "absurd": 29757, - "abt": 9850, - "abu": 9167, - "abu": 11787, - "abud": 20180, - "abudha": 21450, - "abudhabi": 25256, - "abuja": 23371, - "abun": 20544, - "abundance": 23236, - "abundant": 31611, - "abur": 23377, - "aburger": 46660, - "abuse": 7678, - "abused": 23855, - "abuses": 37132, - "abusing": 36558, - "abusive": 26858, - "abv": 34172, - "aby": 16342, - "aby": 31378, - "abyss": 33632, - "abz": 42292, - "ac": 546, - "ac": 2816, - "aca": 9213, - "acab": 41388, - "acacia": 44047, - "acad": 32537, - "acade": 2892, - "academia": 22662, - "academic": 31178, - "academic": 7935, - "academics": 26417, - "academies": 42569, - "academy": 29968, - "academy": 4041, - "acadi": 41455, - "acadia": 49236, - "acam": 26172, - "acan": 42227, - "acan": 26318, - "acap": 32357, - "acar": 22232, - "acare": 16961, - "acc": 26805, - "acc": 9318, - "acca": 30883, - "acce": 8564, - "acceler": 10161, - "accelerate": 23619, - "accelerated": 38513, - "accelerating": 41821, - "acceleration": 39387, - "accelerator": 25261, - "accent": 28110, - "accent": 18931, - "accents": 31738, - "accenture": 41853, - "accep": 4616, - "accept": 16447, - "accept": 9338, - "acceptable": 14209, - "acceptance": 17090, - "accepted": 9159, - "accepting": 12855, - "accepts": 22338, - "access": 7596, - "access": 3822, - "accessi": 10787, - "accessibility": 23407, - "accessible": 13977, - "accessing": 46339, - "accessories": 10220, - "accessory": 20417, - "acci": 4263, - "acci": 33943, - "accident": 6608, - "accidental": 24895, - "accidentally": 11061, - "accidents": 22072, - "acclaimed": 21172, - "acco": 44730, - "accol": 33858, - "accolades": 46731, - "accom": 23658, - "accommo": 34495, - "accommod": 14386, - "accommodate": 34708, - "accommodation": 18066, - "accommodations": 45536, - "accomp": 24985, - "accompan": 14746, - "accompanied": 20715, - "accompany": 34142, - "accompanying": 38179, - "accompli": 10205, - "accomplish": 25542, - "accomplished": 16462, - "accomplishment": 26100, - "accomplishments": 24965, - "accor": 4182, - "accord": 34293, - "accord": 28513, - "according": 4717, - "accordingly": 35535, - "accordion": 48760, - "accoun": 3081, - "account": 18424, - "account": 4684, - "accountability": 19377, - "accountable": 24216, - "accountant": 31026, - "accountants": 37222, - "accounted": 43951, - "accounting": 14805, - "accounts": 9974, - "accra": 31900, - "accred": 17451, - "accreditation": 27015, - "accredited": 27647, - "acct": 45569, - "accu": 5618, - "accumul": 19275, - "accumulation": 37112, - "accur": 6551, - "accuracy": 18423, - "accurate": 8858, - "accurately": 24206, - "accusations": 33615, - "accuse": 39414, - "accused": 9434, - "accuses": 27496, - "accusing": 41474, - "acdc": 45067, - "ace": 2675, - "ace": 804, - "acea": 35219, - "aceae": 38153, - "acele": 40868, - "aceous": 33610, - "acer": 37990, - "acer": 25809, - "aces": 5725, - "acet": 28735, - "acf": 38389, - "ach": 972, - "ach": 987, - "acha": 22686, - "acharya": 45780, - "achat": 32706, - "ache": 27771, - "ache": 7214, - "ached": 17048, - "acher": 38442, - "acher": 17936, - "achers": 25051, - "aches": 14823, - "achi": 3264, - "achi": 9087, - "achiev": 8160, - "achieve": 14798, - "achieve": 8175, - "achieved": 12359, - "achievement": 8245, - "achievements": 16114, - "achiever": 46286, - "achievers": 44544, - "achieves": 40123, - "achieving": 16120, - "achilles": 33327, - "achim": 42335, - "aching": 12864, - "acho": 33130, - "achs": 41195, - "aci": 4359, - "aci": 34100, - "acia": 30163, - "acial": 32422, - "acid": 35474, - "acid": 10085, - "acidity": 48800, - "acids": 27751, - "acies": 20162, - "acin": 39442, - "acing": 9442, - "acio": 26202, - "acion": 44965, - "acion": 24968, - "acional": 26435, - "aciones": 35832, - "acious": 16020, - "acity": 7511, - "ación": 38175, - "ack": 877, - "ack": 725, - "acked": 5698, - "acker": 31201, - "acker": 7940, - "ackeray": 41843, - "acki": 42857, - "acking": 5515, - "ackles": 28503, - "acknow": 13563, - "acknowle": 18100, - "acknowledge": 25209, - "acknowledged": 35913, - "acknowledges": 49083, - "acknowledging": 45645, - "acks": 3858, - "acl": 47593, - "acl": 23073, - "acle": 6504, - "acles": 34164, - "aclu": 37354, - "acm": 39317, - "acmilan": 36500, - "acne": 24195, - "aco": 9463, - "aco": 8800, - "acol": 17431, - "acollege": 43468, - "acom": 17224, - "acom": 22342, - "acon": 11621, - "acon": 11571, - "aconf": 38851, - "acons": 31599, - "acor": 22076, - "acorn": 37537, - "acos": 39943, - "acosta": 31994, - "acou": 8794, - "acoun": 31295, - "acounty": 45449, - "acoustic": 10616, - "acoustics": 43873, - "acp": 19627, - "acqu": 7946, - "acquainted": 40713, - "acqui": 12194, - "acquire": 21576, - "acquired": 15932, - "acquires": 27376, - "acquiring": 42785, - "acquis": 14207, - "acquisition": 16543, - "acquisitions": 39649, - "acr": 43648, - "acre": 26749, - "acre": 9493, - "acres": 11630, - "acro": 21060, - "acrob": 40891, - "acron": 37770, - "across": 2500, - "acrosse": 40979, - "acruz": 40455, - "acry": 10440, - "acrylic": 12252, - "acs": 11782, - "act": 10305, - "act": 1393, - "acted": 10971, - "acti": 4786, - "acting": 6319, - "action": 12493, - "action": 1816, - "actions": 6271, - "activ": 3430, - "activate": 26737, - "activated": 22249, - "activation": 26769, - "active": 19009, - "active": 4046, - "actively": 18645, - "activi": 7230, - "activism": 20117, - "activist": 10850, - "activists": 12649, - "activities": 6514, - "activity": 6206, - "actment": 44807, - "acton": 36167, - "acton": 36697, - "actonclimate": 43797, - "actor": 12181, - "actor": 4035, - "actors": 9255, - "actorslife": 25117, - "actorvijay": 34033, - "actress": 5805, - "actresses": 33639, - "acts": 6816, - "actu": 2375, - "actual": 7488, - "actually": 2955, - "acu": 9204, - "acu": 48475, - "aculture": 38145, - "acup": 30869, - "acup": 27278, - "acupuncture": 40043, - "acur": 44719, - "acura": 30120, - "acus": 33710, - "acute": 19734, - "acy": 18717, - "acy": 2356, - "ad": 594, - "ad": 680, - "ada": 25785, - "ada": 1886, - "adaily": 47254, - "adal": 46646, - "adam": 6037, - "adam": 4944, - "adamlambert": 27659, - "adams": 7942, - "adan": 41802, - "adani": 37499, - "adap": 6341, - "adapt": 22666, - "adaptation": 16566, - "adapted": 26657, - "adapter": 21839, - "adapting": 44120, - "adaptive": 28672, - "adar": 27702, - "adar": 32681, - "adas": 23250, - "adata": 39500, - "aday": 31367, - "aday": 10280, - "adays": 24337, - "adb": 45630, - "adc": 38201, - "add": 19408, - "add": 3536, - "addams": 38912, - "added": 4149, - "adder": 47557, - "addi": 36378, - "addic": 5709, - "addict": 14614, - "addicted": 16275, - "addiction": 11751, - "addictive": 29638, - "addicts": 29997, - "adding": 8676, - "addis": 43911, - "addison": 32369, - "additi": 26927, - "addition": 6698, - "additional": 10666, - "additions": 22575, - "additive": 48546, - "addo": 40001, - "address": 5834, - "addressed": 20817, - "addresses": 12702, - "addressing": 10594, - "adds": 9944, - "addy": 24746, - "ade": 2194, - "ade": 1928, - "adecides": 46374, - "aded": 9994, - "adee": 47054, - "adel": 4434, - "adel": 27308, - "adelaide": 38193, - "adelaide": 11611, - "adele": 42843, - "adele": 21220, - "adelrey": 43627, - "ademy": 49123, - "aden": 28669, - "aden": 28688, - "adena": 23648, - "adequ": 18232, - "adequate": 22281, - "ader": 21365, - "adero": 49185, - "aders": 27672, - "ades": 5793, - "adh": 42301, - "adhd": 32649, - "adhe": 21175, - "adhesive": 38429, - "adi": 2486, - "adi": 8779, - "adia": 26874, - "adic": 36780, - "adid": 8086, - "adidas": 22396, - "adidas": 9589, - "adidasoriginals": 48575, - "adies": 45834, - "adifference": 37217, - "adilla": 41167, - "ading": 15000, - "adio": 15060, - "adirond": 36843, - "adish": 49009, - "adity": 28596, - "aditya": 37186, - "adityanath": 44437, - "adjac": 32517, - "adjacent": 33836, - "adjec": 45512, - "adju": 16413, - "adjun": 45995, - "adjust": 13784, - "adjust": 28073, - "adjustable": 20476, - "adjusted": 30515, - "adjusting": 41132, - "adjustment": 36081, - "adjustments": 36331, - "adl": 49351, - "adler": 30222, - "adm": 9892, - "adm": 33604, - "admi": 11666, - "admin": 12528, - "admini": 6434, - "administr": 12174, - "administration": 9502, - "administrative": 22424, - "administrator": 22603, - "administrators": 36123, - "admins": 49297, - "admir": 17031, - "admiral": 21013, - "admiration": 39569, - "admire": 17791, - "admired": 36103, - "admirer": 48344, - "admiring": 29835, - "admission": 11315, - "admissions": 22463, - "admit": 13769, - "admits": 16332, - "admitted": 20427, - "admitting": 46148, - "adn": 40339, - "adnan": 42037, - "ado": 4775, - "ado": 2933, - "adobe": 29256, - "adobe": 16484, - "adog": 44913, - "adol": 33512, - "adole": 22704, - "adolescent": 36793, - "adolescents": 45656, - "adolf": 41179, - "adon": 25907, - "adona": 48419, - "adop": 4183, - "adopt": 16441, - "adopt": 11159, - "adoptable": 36905, - "adoptdont": 19674, - "adoptdontshop": 20089, - "adopted": 12538, - "adopting": 30158, - "adoption": 11544, - "adopts": 40853, - "ador": 4992, - "ador": 9162, - "adora": 40031, - "adorable": 6298, - "adoration": 46781, - "adore": 15502, - "adored": 49233, - "adores": 30290, - "adorned": 44953, - "ados": 20079, - "adox": 32188, - "adp": 44426, - "adr": 46189, - "adren": 24204, - "adrenaline": 35552, - "adri": 5935, - "adrian": 25012, - "adrian": 13163, - "adriana": 41363, - "adrid": 26562, - "adrien": 47469, - "adrienne": 40081, - "ads": 2485, - "adu": 16882, - "adu": 24446, - "adukone": 30511, - "adul": 7222, - "adult": 42209, - "adult": 7115, - "adulthood": 40964, - "adults": 9391, - "adv": 1647, - "adv": 21018, - "advan": 33411, - "advance": 27291, - "advance": 7022, - "advanced": 7465, - "advancement": 35437, - "advances": 15852, - "advancing": 21355, - "advani": 48189, - "advant": 7017, - "advantage": 8573, - "advantaged": 38361, - "advantages": 23506, - "adven": 41670, - "advent": 3071, - "advent": 15199, - "adventcalendar": 43492, - "adventur": 29627, - "adventure": 17251, - "adventure": 4377, - "adventurer": 48098, - "adventures": 7941, - "adventurous": 31179, - "adver": 4806, - "adverse": 30348, - "adversity": 32516, - "advert": 19080, - "adverti": 5682, - "advertise": 31473, - "advertised": 38987, - "advertisement": 18713, - "advertiser": 41829, - "advertisers": 45472, - "advertising": 8158, - "adverts": 44306, - "advice": 4973, - "advis": 4634, - "advise": 25962, - "advised": 23196, - "adviser": 20367, - "advisers": 40984, - "advises": 42761, - "advising": 39648, - "advisor": 12380, - "advisors": 23197, - "advisory": 10224, - "advoc": 6657, - "advocacy": 14443, - "advocate": 12044, - "advocates": 17757, - "adwords": 48343, - "ady": 41446, - "ady": 8781, - "ae": 5548, - "ae": 4542, - "aea": 37048, - "aed": 26912, - "aege": 42304, - "ael": 41533, - "ael": 43340, - "aen": 43085, - "aer": 10195, - "aeri": 27685, - "aerial": 44866, - "aerial": 12440, - "aero": 10196, - "aero": 25026, - "aerob": 42824, - "aeron": 37286, - "aeronau": 42816, - "aerop": 27735, - "aerosmith": 43253, - "aerospace": 20530, - "aes": 10617, - "aes": 35677, - "aest": 40694, - "aesthe": 21181, - "aesthetic": 16179, - "aesthetics": 29295, - "aew": 47108, - "af": 702, - "af": 4391, - "afa": 24953, - "afan": 47474, - "afar": 41637, - "afar": 37866, - "afb": 27022, - "afc": 29742, - "afc": 6571, - "afcb": 44276, - "afcon": 30019, - "afd": 44626, - "afe": 30487, - "afe": 13912, - "afer": 44707, - "aff": 8849, - "aff": 14864, - "affair": 13998, - "affairs": 9830, - "affe": 4556, - "affect": 11361, - "affected": 9715, - "affecting": 18448, - "affection": 33780, - "affection": 28381, - "affectionate": 42578, - "affects": 17285, - "affili": 12120, - "affiliate": 18652, - "affiliated": 37540, - "affiliation": 48377, - "affinity": 41451, - "affir": 25343, - "affirm": 42711, - "affirm": 48625, - "affirmation": 47495, - "affl": 34036, - "affleck": 35584, - "afford": 7951, - "afford": 13223, - "affordability": 44828, - "affordable": 43944, - "affordable": 8926, - "afg": 33994, - "afgh": 9029, - "afghan": 15919, - "afghanistan": 9836, - "afi": 24074, - "afi": 31958, - "afil": 27209, - "afire": 42010, - "afirst": 38601, - "afl": 15132, - "afl": 14356, - "aflo": 41959, - "afm": 38385, - "afootball": 41694, - "afor": 43102, - "afore": 41468, - "afp": 18311, - "afraid": 9474, - "afri": 13888, - "afric": 2136, - "africa": 3093, - "african": 17471, - "african": 4736, - "africans": 26534, - "afridi": 37651, - "afrika": 45833, - "afrin": 45586, - "afro": 16267, - "afro": 21795, - "afs": 48960, - "aft": 22693, - "after": 2278, - "after": 953, - "afterdark": 48966, - "afterlife": 46790, - "aftermath": 20958, - "afterno": 22330, - "afternoon": 39035, - "afternoon": 2716, - "afternoons": 31631, - "afterparty": 35305, - "afterwards": 23911, - "ag": 602, - "ag": 5241, - "aga": 1050, - "aga": 4654, - "again": 1495, - "against": 23838, - "against": 1601, - "agame": 46943, - "agan": 42946, - "agan": 9178, - "agar": 13199, - "agar": 17544, - "agarwal": 43117, - "agas": 20430, - "agate": 25454, - "agatha": 43896, - "agave": 42671, - "agawa": 39433, - "agazine": 44942, - "age": 4758, - "age": 805, - "aged": 3889, - "ageing": 25349, - "agen": 10101, - "agen": 43696, - "agencies": 13887, - "agency": 44885, - "agency": 6270, - "agend": 48653, - "agenda": 8728, - "agent": 21210, - "agent": 6576, - "agents": 10199, - "agentsof": 37074, - "agentsofshield": 38801, - "ager": 44847, - "ager": 10443, - "agers": 22123, - "ages": 2321, - "agg": 45482, - "aggarwal": 39386, - "agger": 27836, - "aggi": 36844, - "aggie": 44244, - "aggie": 37618, - "aggies": 31047, - "aggio": 36685, - "aggrav": 35203, - "aggre": 10426, - "aggreg": 41968, - "aggregate": 41318, - "aggression": 28900, - "aggressive": 16295, - "aggressively": 48667, - "agh": 17917, - "agh": 14402, - "aghan": 31276, - "agi": 24036, - "agi": 17645, - "agic": 37652, - "agile": 16276, - "agility": 32161, - "aging": 4336, - "agio": 41746, - "agirl": 35469, - "agle": 37035, - "agle": 16702, - "agles": 36374, - "agles": 22679, - "aglia": 46912, - "agm": 19162, - "agn": 36474, - "agna": 43626, - "agne": 29374, - "agne": 48303, - "agnes": 26213, - "agno": 41540, - "ago": 6276, - "ago": 1468, - "agomez": 27127, - "agon": 26775, - "agon": 14901, - "agony": 36977, - "agor": 38920, - "agos": 32657, - "agov": 34227, - "agp": 46048, - "agr": 36639, - "agra": 26660, - "agra": 29830, - "agram": 2447, - "agre": 3180, - "agreat": 37594, - "agree": 5953, - "agreed": 12774, - "agreeing": 40720, - "agreement": 8286, - "agreements": 25865, - "agrees": 17854, - "agri": 20527, - "agri": 30326, - "agricul": 7234, - "agricultural": 15440, - "agriculture": 9720, - "agro": 33178, - "agro": 44589, - "agron": 41314, - "agroup": 40099, - "ags": 16926, - "agt": 39681, - "agu": 3922, - "agu": 36544, - "agua": 18482, - "aguchi": 49206, - "ague": 2095, - "aguero": 42964, - "agues": 7000, - "aguil": 27946, - "aguilar": 44715, - "ah": 1772, - "ah": 1288, - "aha": 12082, - "aha": 8429, - "ahah": 38661, - "ahaha": 32423, - "ahahaha": 42620, - "aham": 36036, - "ahan": 45061, - "ahan": 19255, - "ahar": 31038, - "ahar": 38760, - "ahe": 27688, - "ahead": 3158, - "ahem": 39995, - "ahh": 13152, - "ahhh": 14769, - "ahhhh": 21054, - "ahhhhh": 36392, - "ahi": 45349, - "ahi": 24154, - "ahl": 30433, - "ahmad": 32167, - "ahmad": 16902, - "ahmadi": 38656, - "ahmadiyya": 44865, - "ahmed": 19491, - "ahmed": 12081, - "ahmedabad": 26966, - "ahn": 33405, - "aho": 28114, - "aho": 38444, - "ahora": 43113, - "ahouse": 33197, - "ahoy": 38652, - "ahs": 16937, - "ahu": 11908, - "ahu": 16515, - "ai": 2014, - "ai": 2215, - "aia": 27046, - "aib": 34780, - "aic": 29454, - "aid": 13723, - "aid": 5182, - "aida": 33830, - "aidan": 48814, - "aidan": 26945, - "aide": 31558, - "aide": 9746, - "aided": 48707, - "aiden": 40020, - "aides": 49082, - "aids": 11759, - "aig": 27295, - "aig": 46989, - "aii": 22478, - "aik": 42575, - "aiken": 46342, - "ail": 1457, - "ail": 9154, - "ailed": 38919, - "ailing": 29999, - "ails": 27024, - "aim": 6787, - "aim": 11255, - "aime": 39872, - "aimed": 20247, - "aimee": 36318, - "aiming": 21768, - "aimo": 36706, - "aims": 13326, - "ain": 8326, - "ain": 2210, - "aine": 48983, - "aine": 17634, - "ains": 27621, - "aint": 29543, - "aint": 13099, - "ainted": 39933, - "aioli": 43949, - "air": 1281, - "air": 1922, - "aira": 35085, - "aira": 46444, - "airasia": 48020, - "airbnb": 23098, - "airborne": 22755, - "airbus": 15324, - "aircraft": 7706, - "airdrop": 38434, - "aire": 7682, - "aired": 21938, - "aires": 17034, - "airfield": 40525, - "airforce": 23511, - "airing": 20453, - "airline": 14847, - "airlines": 8929, - "airmen": 44499, - "airplane": 16451, - "airplanes": 33319, - "airplay": 47024, - "airpollution": 47362, - "airport": 48337, - "airport": 3259, - "airports": 21543, - "airs": 18539, - "airshow": 27139, - "airsoft": 30134, - "airspace": 49280, - "airstrikes": 37220, - "airtel": 34784, - "airtime": 46617, - "airwaves": 43910, - "airways": 14299, - "airy": 44453, - "ais": 7616, - "ais": 11393, - "aise": 30505, - "aish": 21946, - "aisha": 40211, - "aishwar": 29687, - "aishwarya": 44019, - "aisle": 26917, - "ait": 25613, - "ait": 40814, - "aj": 3990, - "aj": 6342, - "aja": 42343, - "aja": 19633, - "ajax": 21933, - "ajay": 22494, - "ajay": 28726, - "ajaydevgn": 35515, - "aje": 48818, - "aje": 33315, - "ajes": 38791, - "aji": 26102, - "aji": 21153, - "ajit": 42261, - "ajith": 24118, - "ajo": 26958, - "aju": 36855, - "ak": 819, - "ak": 1196, - "aka": 19154, - "aka": 3412, - "akaif": 45736, - "akan": 43678, - "akan": 38244, - "akapoor": 40064, - "akarta": 48603, - "akb": 41962, - "akbar": 27180, - "ake": 10558, - "ake": 5776, - "aked": 6115, - "aker": 14245, - "aker": 3074, - "akers": 5788, - "akes": 4764, - "akest": 46679, - "akh": 14821, - "akh": 30660, - "akhan": 28158, - "akhi": 41660, - "akhilesh": 48495, - "akhtar": 45458, - "aki": 18173, - "aki": 6592, - "akin": 24630, - "akin": 13601, - "aking": 1809, - "akins": 48568, - "akira": 34001, - "akis": 27732, - "akistan": 46221, - "akley": 39908, - "ako": 44027, - "ako": 14541, - "akon": 47105, - "akos": 44659, - "akrish": 37434, - "akron": 26115, - "aks": 2953, - "aksh": 28226, - "akshay": 21483, - "akshay": 38914, - "akshaykumar": 23624, - "akshi": 42634, - "aku": 18151, - "aku": 20815, - "aky": 11977, - "al": 526, - "al": 566, - "ala": 12783, - "ala": 3449, - "alab": 6365, - "alabam": 45880, - "alabama": 8422, - "alach": 24622, - "alad": 23074, - "aladdin": 29951, - "alai": 47072, - "alain": 28999, - "alam": 16612, - "alam": 16012, - "alamo": 41922, - "alamo": 34632, - "alan": 9563, - "alan": 5773, - "alana": 43405, - "aland": 34304, - "aland": 6819, - "alar": 34333, - "alarm": 11321, - "alarming": 37209, - "alarms": 31236, - "alarts": 31422, - "alas": 7276, - "alas": 22412, - "alaska": 9562, - "alaskan": 33898, - "alastair": 42062, - "alay": 30289, - "alay": 36450, - "alaya": 36397, - "alb": 45248, - "alba": 25254, - "alban": 10882, - "albania": 29170, - "albanian": 47721, - "albans": 44119, - "albany": 17359, - "albat": 42797, - "albeit": 38984, - "alber": 6413, - "albert": 34174, - "albert": 9507, - "alberta": 11048, - "alberto": 22714, - "albi": 18512, - "albino": 48062, - "albion": 24071, - "albu": 2216, - "album": 40712, - "album": 2431, - "albums": 10705, - "albuquerque": 31079, - "alcat": 35361, - "alche": 37909, - "alchemist": 38913, - "alchemy": 39501, - "alco": 6848, - "alco": 45446, - "alcohol": 9426, - "alcoholic": 25098, - "ald": 4539, - "ald": 2928, - "alda": 46440, - "alde": 33114, - "alden": 17155, - "alden": 27710, - "aldenrichards": 20051, - "alder": 18220, - "alder": 46571, - "aldi": 23204, - "aldo": 9933, - "aldridge": 38084, - "alds": 14285, - "aldu": 6505, - "aldub": 10532, - "aldub": 15247, - "ale": 1440, - "ale": 1336, - "alea": 26518, - "aleague": 38909, - "alec": 29804, - "alec": 19954, - "alecoscino": 47948, - "aled": 4970, - "alee": 24515, - "alej": 23440, - "alejandro": 32950, - "alek": 26906, - "alek": 43310, - "aleksand": 48429, - "alem": 11825, - "aleppo": 19258, - "aler": 25674, - "aler": 27335, - "alert": 4662, - "alerts": 22144, - "ales": 44171, - "ales": 5962, - "aless": 21864, - "alessandro": 37344, - "alestine": 31945, - "alex": 2959, - "alex": 4134, - "alexa": 16273, - "alexand": 10696, - "alexander": 25527, - "alexander": 7563, - "alexandra": 19054, - "alexandre": 35711, - "alexandria": 21171, - "alexis": 35023, - "alexis": 14243, - "aley": 21635, - "alf": 27098, - "alfa": 23482, - "alfar": 38870, - "alfie": 28598, - "alfon": 31947, - "alfonso": 41784, - "alfre": 20982, - "alfred": 16553, - "alfredo": 32291, - "algae": 25654, - "algar": 36291, - "algarve": 40290, - "alge": 24336, - "algebra": 33694, - "alger": 18568, - "algeria": 25257, - "algon": 33007, - "algori": 14912, - "algorithm": 23295, - "algorithms": 26039, - "alham": 23352, - "alhamdulil": 35129, - "alhamdulillah": 38982, - "ali": 835, - "ali": 3558, - "alia": 2492, - "aliaa": 36468, - "alian": 3464, - "alias": 40026, - "alibaba": 39231, - "alic": 25265, - "alice": 23759, - "alice": 9192, - "alici": 31630, - "alicia": 20914, - "alie": 8697, - "alien": 22846, - "alien": 9639, - "aliens": 14883, - "alier": 39493, - "alies": 38086, - "alife": 41347, - "alife": 21100, - "alig": 21272, - "alight": 36157, - "align": 31160, - "aligned": 29292, - "alignment": 27267, - "alik": 31141, - "alike": 12665, - "alim": 42075, - "alin": 42746, - "alin": 40063, - "alina": 39529, - "aline": 21799, - "aling": 5169, - "alion": 19049, - "alis": 21308, - "alis": 20114, - "alisa": 38918, - "alisation": 42143, - "alise": 36718, - "alised": 25099, - "alism": 5607, - "alison": 28653, - "alison": 16970, - "alist": 44900, - "alist": 3320, - "alistair": 40551, - "alistic": 22302, - "alists": 5653, - "alit": 45566, - "alities": 27925, - "ality": 1694, - "alive": 40467, - "alive": 4716, - "aliz": 30979, - "alization": 8026, - "alize": 10268, - "alized": 6141, - "alizer": 38922, - "alizes": 26181, - "alizing": 13023, - "alk": 30246, - "alk": 21577, - "alkal": 33450, - "alkaline": 39210, - "all": 813, - "all": 615, - "alla": 13884, - "alla": 14000, - "allabout": 43996, - "allah": 6378, - "allan": 36552, - "allan": 15404, - "allblacks": 47728, - "allday": 35862, - "alle": 4870, - "alle": 29478, - "alled": 7379, - "alleg": 7456, - "allegations": 16992, - "alleged": 12133, - "allegedly": 14177, - "alleges": 45051, - "allegh": 41479, - "allegheny": 47851, - "allegi": 28832, - "allegiance": 30955, - "allen": 16712, - "allen": 6386, - "allenge": 31387, - "aller": 10116, - "aller": 30630, - "allergic": 28809, - "allergies": 28247, - "allergy": 24408, - "allery": 32542, - "alles": 43354, - "allevi": 31682, - "alleviate": 44799, - "alley": 36205, - "alley": 10329, - "allez": 49137, - "alli": 4123, - "alli": 15268, - "alliance": 45404, - "alliance": 8945, - "alliances": 48403, - "allianz": 45740, - "allie": 25040, - "allied": 20045, - "allies": 17277, - "alligator": 28574, - "allin": 45007, - "allin": 22395, - "alline": 48182, - "alling": 2992, - "allis": 45309, - "allison": 34602, - "allison": 16578, - "allman": 42611, - "allo": 8107, - "allo": 18389, - "allocated": 42716, - "allocation": 35139, - "allon": 46693, - "allot": 26363, - "allotment": 33750, - "allow": 5645, - "allow": 6722, - "allowance": 35696, - "allowed": 7885, - "allowing": 12458, - "allows": 9966, - "alloy": 22467, - "alls": 1997, - "allstar": 31247, - "allstar": 22974, - "allstars": 31198, - "allthe": 29253, - "allu": 20157, - "alluarjun": 39333, - "allure": 41814, - "ally": 7461, - "ally": 769, - "alm": 28303, - "alma": 32933, - "alma": 18337, - "alman": 29394, - "almanac": 41268, - "almighty": 21898, - "almond": 15646, - "almonds": 30468, - "almost": 47534, - "almost": 2671, - "aln": 47203, - "alo": 3435, - "alo": 6183, - "aloe": 30728, - "alog": 15813, - "alogue": 9101, - "aloha": 23160, - "aloils": 49002, - "alom": 22236, - "alon": 14097, - "alon": 42846, - "alone": 4702, - "along": 8300, - "along": 2528, - "alongside": 8646, - "alonso": 25704, - "aloo": 46187, - "alore": 14323, - "alot": 16945, - "alou": 43180, - "aloud": 30028, - "alove": 46669, - "alove": 37045, - "alp": 32020, - "alp": 39342, - "alpac": 30128, - "alpaca": 42561, - "alph": 6720, - "alpha": 11807, - "alpha": 8624, - "alphabe": 45796, - "alphabet": 22335, - "alphon": 37865, - "alpine": 17055, - "alps": 18191, - "already": 2426, - "alright": 10866, - "als": 23982, - "als": 938, - "alsace": 49388, - "also": 1446, - "alt": 9995, - "alt": 10006, - "alta": 24470, - "alta": 25378, - "altaf": 47342, - "altam": 45624, - "altar": 16385, - "alter": 4949, - "alter": 21393, - "altered": 25201, - "altern": 47463, - "alternate": 15926, - "alternati": 16699, - "alternative": 37327, - "alternative": 8248, - "alternatives": 25041, - "alth": 23463, - "alth": 5863, - "although": 9421, - "alti": 35531, - "alties": 17276, - "altitude": 23241, - "altman": 48100, - "alto": 35053, - "alto": 17518, - "altogether": 45689, - "alton": 41331, - "alton": 36550, - "altrin": 38458, - "altrincham": 44718, - "alty": 5546, - "alu": 4776, - "alu": 27991, - "alum": 5404, - "alum": 10553, - "alumin": 14563, - "alumini": 22908, - "aluminium": 23631, - "aluminum": 15251, - "alumna": 30313, - "alumni": 6646, - "alumnus": 23633, - "alums": 30155, - "alv": 20928, - "alvar": 25196, - "alvarez": 26924, - "alvaro": 41941, - "alves": 38547, - "alvin": 27023, - "alway": 14046, - "alway": 43764, - "always": 24997, - "always": 1466, - "alwx": 32768, - "aly": 6468, - "aly": 12910, - "alyn": 49150, - "alyss": 29490, - "alyssa": 18898, - "alz": 12936, - "alz": 41128, - "alzheim": 15212, - "alzheimer": 21151, - "alzheimers": 34592, - "am": 548, - "am": 687, - "ama": 18206, - "ama": 1696, - "amad": 45095, - "amade": 37366, - "amag": 32049, - "amal": 15315, - "amal": 36753, - "aman": 19890, - "aman": 10110, - "amand": 14560, - "amanda": 10036, - "amar": 6424, - "amar": 19607, - "amara": 48522, - "amari": 42565, - "amarillo": 40449, - "amarine": 45591, - "amarketing": 30788, - "amas": 22716, - "amas": 15667, - "amat": 38664, - "amat": 25455, - "amate": 12453, - "amateur": 14287, - "amaya": 47210, - "amaz": 1185, - "amaze": 24846, - "amazed": 18944, - "amazing": 15949, - "amazing": 1370, - "amazingly": 20368, - "amazon": 13630, - "amazon": 4140, - "amb": 9042, - "amb": 16853, - "amba": 27003, - "ambani": 45967, - "ambas": 5634, - "ambassad": 5758, - "ambassador": 6795, - "ambassadors": 16832, - "ambed": 42089, - "ambedkar": 48131, - "amber": 18292, - "amber": 9986, - "ambi": 11844, - "ambient": 23447, - "ambigu": 35702, - "ambition": 20673, - "ambitions": 34152, - "ambitious": 18666, - "ambro": 17585, - "ambrose": 24253, - "ambu": 34423, - "ambul": 13944, - "ambulance": 15555, - "ambush": 40725, - "amc": 24942, - "amc": 16921, - "amd": 20845, - "ame": 3995, - "ame": 780, - "amed": 5660, - "ameen": 24229, - "amel": 31988, - "amel": 10960, - "ameli": 21599, - "amelia": 21433, - "amell": 48198, - "amen": 18716, - "amen": 12335, - "amend": 12425, - "amendment": 15019, - "amendments": 40901, - "amenities": 30096, - "ament": 27528, - "amer": 17081, - "amer": 16147, - "ameri": 40422, - "americ": 1283, - "america": 2224, - "americafirst": 43216, - "american": 8746, - "american": 2151, - "americana": 26221, - "americanair": 42538, - "americani": 39726, - "americans": 6676, - "americas": 33343, - "americas": 18142, - "ames": 5469, - "ameter": 23393, - "amethy": 30291, - "amethyst": 31485, - "amex": 46390, - "amg": 21324, - "amher": 32311, - "amherst": 39065, - "ami": 6100, - "ami": 3065, - "amic": 25824, - "amic": 21383, - "amid": 18908, - "amid": 11953, - "amide": 30952, - "amidst": 25172, - "amie": 36901, - "amig": 40294, - "amiga": 35329, - "amigo": 44991, - "amigos": 28176, - "amii": 35462, - "amiibo": 38871, - "amily": 36732, - "amin": 14337, - "amin": 20235, - "amina": 47531, - "amination": 30355, - "amine": 35823, - "aming": 3507, - "amino": 33464, - "amir": 26029, - "amir": 21973, - "amis": 29829, - "amish": 24958, - "amit": 15083, - "amit": 25255, - "amitabh": 48124, - "amitshah": 32374, - "aml": 43185, - "amma": 29786, - "amman": 29243, - "ammo": 33474, - "ammunition": 35060, - "amn": 24073, - "amne": 14596, - "amnesia": 41741, - "amnesty": 46330, - "amnesty": 21177, - "amo": 4833, - "amo": 11156, - "amodi": 9826, - "amon": 17492, - "amon": 24046, - "among": 12310, - "among": 4265, - "amongst": 12520, - "amoo": 26977, - "amor": 19977, - "amor": 15973, - "amore": 38937, - "amore": 22691, - "amores": 36338, - "amos": 18133, - "amoto": 25492, - "amount": 6403, - "amounts": 16747, - "amour": 29908, - "amovie": 41062, - "amp": 3521, - "amp": 6259, - "amped": 22640, - "amphi": 16379, - "amphibious": 45206, - "amphitheater": 41285, - "amphitheatre": 44039, - "ample": 34162, - "amples": 14536, - "ampli": 15647, - "amplifier": 31743, - "amplify": 45308, - "amps": 19252, - "ampton": 29410, - "ampton": 9347, - "amr": 30916, - "amreading": 16546, - "amrit": 33849, - "ams": 1396, - "amster": 9110, - "amsterdam": 9441, - "amtrak": 27855, - "amu": 11347, - "amu": 32336, - "amur": 35014, - "amura": 35487, - "amus": 36269, - "amuse": 21421, - "amuse": 44367, - "amused": 30212, - "amusement": 32570, - "amusic": 20266, - "amusing": 31789, - "amwriting": 9660, - "amy": 10547, - "amy": 5187, - "an": 514, - "an": 550, - "ana": 6588, - "ana": 1388, - "anab": 34742, - "anada": 27948, - "anag": 12115, - "anagh": 40774, - "anaheim": 23728, - "anak": 34814, - "anak": 38658, - "anal": 2785, - "analo": 34179, - "analog": 19963, - "analogue": 46031, - "analy": 4611, - "analyse": 47246, - "analyses": 39695, - "analysis": 5296, - "analyst": 14198, - "analysts": 28075, - "analytical": 34550, - "analytics": 8558, - "analyze": 28519, - "analyzing": 32107, - "anam": 29525, - "anan": 37215, - "anand": 25073, - "anand": 22083, - "anap": 41566, - "anarch": 46405, - "anarchi": 39879, - "anarchy": 27707, - "anas": 31382, - "anas": 12633, - "anast": 48902, - "anasta": 22915, - "anastasi": 36534, - "anastasia": 37975, - "anat": 10045, - "anath": 31277, - "anatom": 33759, - "anatomy": 15376, - "anc": 1124, - "anc": 17758, - "anca": 14583, - "ance": 7165, - "ance": 884, - "anced": 5071, - "ancer": 17415, - "ancers": 37296, - "ances": 3515, - "ancestor": 43904, - "ancestors": 24405, - "ancestral": 41615, - "ancestry": 30922, - "anch": 9489, - "anche": 34679, - "ancho": 26610, - "anchor": 20030, - "anchor": 13201, - "anchorage": 31950, - "anchored": 45926, - "anchors": 37830, - "anci": 4192, - "ancient": 31495, - "ancient": 5810, - "ancies": 21647, - "ancing": 7797, - "anco": 15459, - "ancy": 16282, - "ancy": 3633, - "and": 672, - "and": 537, - "anda": 2911, - "andalu": 31443, - "andco": 36302, - "ande": 26889, - "ande": 30354, - "ander": 3740, - "ander": 3935, - "anders": 10880, - "andersen": 32661, - "anderson": 26683, - "anderson": 6510, - "andes": 24052, - "andfriends": 36871, - "andhi": 21617, - "andhra": 32452, - "andi": 28870, - "andi": 14354, - "andie": 46318, - "andme": 42831, - "ando": 35950, - "ando": 5986, - "andolan": 48965, - "andon": 36488, - "andor": 45243, - "andover": 44177, - "andr": 22661, - "andra": 46795, - "andra": 21730, - "andre": 2657, - "andre": 9400, - "andrea": 10895, - "andreas": 20444, - "andrei": 42137, - "andres": 25197, - "andretti": 44291, - "andrew": 11717, - "andrew": 4847, - "andrews": 14506, - "andri": 37208, - "andro": 4417, - "andro": 17980, - "android": 24284, - "android": 5191, - "androidgames": 46572, - "andromeda": 42942, - "andré": 35609, - "ands": 32257, - "andthe": 22111, - "andu": 44200, - "andum": 47266, - "andy": 9447, - "andy": 2888, - "ane": 5846, - "ane": 3051, - "anec": 33965, - "anem": 41395, - "anemone": 49019, - "aneous": 48273, - "anes": 15381, - "anese": 48778, - "anesthe": 30622, - "anesthesia": 43353, - "anew": 39084, - "anew": 47341, - "anews": 20919, - "aney": 22387, - "anfield": 26993, - "ang": 883, - "ang": 2704, - "anga": 11641, - "angames": 43178, - "angan": 28264, - "angas": 46180, - "ange": 2960, - "ange": 3039, - "angel": 5029, - "angel": 5130, - "angela": 12354, - "angeles": 7382, - "angeli": 15265, - "angelic": 41038, - "angelica": 38582, - "angelina": 28890, - "angelo": 14342, - "angelou": 41328, - "angels": 7809, - "anger": 32737, - "anger": 6788, - "angerous": 39716, - "angers": 29756, - "angh": 34030, - "angi": 28003, - "angi": 24301, - "angie": 18859, - "angle": 21749, - "angle": 6946, - "angled": 32322, - "angler": 22284, - "anglers": 41608, - "angles": 18627, - "anglesey": 31850, - "anglia": 32076, - "anglic": 28322, - "anglican": 33284, - "angling": 36824, - "anglo": 39515, - "anglo": 30408, - "ango": 19090, - "angola": 36636, - "angor": 41740, - "angp": 19992, - "angry": 33910, - "angry": 9054, - "angs": 18441, - "angst": 41714, - "angu": 11209, - "angular": 43584, - "angular": 24981, - "angularjs": 48608, - "angus": 19688, - "ani": 1326, - "ani": 3624, - "ania": 9866, - "anian": 9945, - "anians": 39393, - "anic": 23113, - "anie": 26697, - "anie": 7671, - "anil": 28589, - "anil": 34619, - "anim": 2190, - "animal": 10697, - "animal": 4668, - "animalrights": 42859, - "animals": 4995, - "animate": 40076, - "animated": 13360, - "animation": 10344, - "animations": 42870, - "animator": 42591, - "anime": 23314, - "anime": 6469, - "anin": 45735, - "aning": 30972, - "anir": 27089, - "anirud": 35278, - "anirudhofficial": 45917, - "anis": 40986, - "anis": 47556, - "anism": 20947, - "anist": 16729, - "anistan": 9727, - "aniston": 47344, - "anit": 23683, - "anita": 18544, - "anium": 14794, - "anj": 22443, - "anja": 43440, - "anjali": 38834, - "anjo": 47353, - "ank": 13339, - "ank": 10029, - "anka": 45324, - "ankara": 34309, - "ankle": 14777, - "ankles": 48688, - "ann": 850, - "ann": 5424, - "anna": 13821, - "anna": 2160, - "annab": 22336, - "annabelle": 47661, - "annah": 39166, - "annah": 14327, - "annak": 41720, - "annan": 32166, - "annapolis": 34491, - "annas": 48467, - "anne": 9139, - "anne": 4083, - "anned": 27352, - "anner": 12642, - "annes": 24343, - "annette": 36821, - "annex": 42958, - "annex": 46389, - "anni": 2438, - "anni": 13728, - "annie": 37270, - "annie": 12173, - "annies": 43184, - "annihil": 32734, - "annis": 24742, - "anniv": 31399, - "anniver": 29671, - "annivers": 42836, - "anniversaire": 30882, - "anniversary": 3048, - "anno": 9901, - "anno": 26871, - "annon": 26385, - "annot": 30411, - "announ": 1806, - "announce": 3682, - "announced": 4103, - "announcement": 6932, - "announcements": 23735, - "announcer": 33626, - "announces": 6500, - "announcing": 11593, - "annoy": 45138, - "annoyed": 29863, - "annoying": 15248, - "annu": 21698, - "annual": 2906, - "annually": 23703, - "anny": 34313, - "anny": 5291, - "ano": 5617, - "ano": 2658, - "anom": 21612, - "anomaly": 46811, - "anon": 47079, - "anon": 13667, - "anonym": 38605, - "anonymous": 15036, - "anoo": 25690, - "anor": 13243, - "anor": 16596, - "anos": 20132, - "another": 29274, - "another": 1380, - "anova": 24116, - "ans": 24586, - "ans": 885, - "ansari": 40748, - "ansel": 40356, - "answ": 3369, - "answe": 14391, - "answer": 4518, - "answered": 14499, - "answering": 18280, - "answers": 8692, - "ant": 1103, - "ant": 773, - "anta": 3023, - "antag": 41745, - "antal": 39355, - "antalya": 47440, - "antan": 32899, - "antarc": 21338, - "antarctic": 27077, - "antarctica": 22587, - "ante": 19311, - "ante": 9769, - "antebellum": 41683, - "antelope": 39177, - "anten": 35517, - "antenna": 26370, - "anter": 46508, - "antes": 14927, - "antgrasso": 39074, - "anth": 3737, - "anth": 29741, - "antha": 47981, - "anthe": 34167, - "anthem": 12504, - "anthi": 45261, - "anthology": 21009, - "anthony": 17477, - "anthony": 6113, - "anthro": 10019, - "anthropo": 18538, - "anthropology": 32407, - "anthus": 37639, - "anti": 3120, - "anti": 3564, - "antibio": 18954, - "antibiotic": 34387, - "antibiotics": 29499, - "antibody": 49018, - "antic": 8260, - "anticip": 11435, - "anticipate": 38280, - "anticipated": 18605, - "anticipating": 48067, - "anticipation": 26983, - "antics": 37126, - "antidote": 45476, - "antifa": 35926, - "antigua": 39910, - "antine": 17641, - "antino": 27818, - "antioxid": 23010, - "antioxidant": 37452, - "antioxidants": 34208, - "antiqu": 21745, - "antique": 46517, - "antique": 9060, - "antiques": 17365, - "antis": 19748, - "antisemitism": 36630, - "antit": 37833, - "antitrust": 49343, - "antlers": 47720, - "antly": 5265, - "anto": 16826, - "anto": 24486, - "antoine": 25188, - "anton": 5497, - "anton": 19644, - "antoni": 39958, - "antonio": 30497, - "antonio": 7842, - "antony": 30707, - "antrim": 40252, - "ants": 1589, - "antv": 47520, - "antw": 44460, - "antwer": 26970, - "antwerp": 33797, - "antz": 25684, - "anu": 8537, - "anu": 17152, - "anup": 29617, - "anus": 27084, - "anush": 22765, - "anushka": 42080, - "anushka": 39822, - "anushkasharma": 44203, - "anwar": 34261, - "anxi": 9021, - "anxiety": 11103, - "anxious": 27793, - "any": 1307, - "any": 1504, - "anya": 11173, - "anybody": 10071, - "anyi": 41632, - "anymore": 7372, - "anyone": 2302, - "anything": 3582, - "anytime": 13924, - "anyway": 8931, - "anyways": 19778, - "anywhere": 8863, - "anz": 14445, - "anz": 19425, - "anza": 14669, - "anzac": 31977, - "ao": 7313, - "ao": 5703, - "aoa": 47119, - "aoc": 31918, - "aofficial": 30840, - "aoki": 33602, - "aol": 40643, - "aon": 30928, - "aon": 48476, - "aor": 32044, - "aos": 46860, - "ap": 688, - "ap": 2728, - "apa": 36954, - "apa": 13537, - "apac": 34320, - "apache": 23921, - "apal": 38017, - "apan": 36562, - "apar": 9161, - "apark": 32528, - "apart": 6474, - "apart": 7803, - "aparthe": 25121, - "apartheid": 26597, - "apartment": 8285, - "apartments": 15791, - "aparty": 26767, - "apat": 31755, - "apathy": 18145, - "apc": 20300, - "apd": 44563, - "ape": 6098, - "ape": 2609, - "apec": 47530, - "aper": 13681, - "aper": 5858, - "apers": 15846, - "apes": 9550, - "apeu": 19040, - "apex": 41935, - "apex": 23712, - "aph": 16341, - "aph": 29491, - "apha": 47104, - "apho": 21758, - "aphra": 44147, - "api": 23342, - "api": 14674, - "apia": 44259, - "apic": 40679, - "aping": 18456, - "apink": 35725, - "apis": 37575, - "apk": 27648, - "apo": 4089, - "apo": 19758, - "apocaly": 13932, - "apocalypse": 17571, - "apocalyptic": 35675, - "apol": 5023, - "apolice": 45663, - "apolis": 9598, - "apollo": 48213, - "apollo": 11554, - "apolo": 31094, - "apolog": 25530, - "apologe": 42908, - "apologi": 14977, - "apologies": 21959, - "apologise": 39608, - "apologize": 22879, - "apologizes": 35298, - "apology": 20768, - "apor": 21871, - "apore": 6679, - "apost": 20309, - "apostle": 33051, - "apostles": 48457, - "app": 882, - "app": 2231, - "appa": 4884, - "appa": 13110, - "appalach": 30523, - "appalachian": 36806, - "appalling": 44797, - "appar": 26698, - "apparatus": 37716, - "apparel": 13972, - "apparent": 23963, - "apparently": 5287, - "appe": 3748, - "appe": 45949, - "appeal": 9625, - "appealing": 25909, - "appeals": 22447, - "appear": 5544, - "appear": 9308, - "appearance": 7238, - "appearances": 17214, - "appeared": 11561, - "appearing": 18759, - "appears": 8743, - "appell": 43833, - "appen": 37201, - "appen": 26589, - "apper": 18780, - "appet": 21686, - "appeti": 24179, - "appetite": 24481, - "appetizer": 36065, - "applau": 24713, - "applaud": 42152, - "applause": 22650, - "apple": 8629, - "apple": 3055, - "applemusic": 21390, - "apples": 14032, - "appleton": 45250, - "appli": 15495, - "appliance": 33677, - "appliances": 22134, - "applic": 4235, - "applicable": 37927, - "applicants": 28035, - "application": 7241, - "applications": 7341, - "applied": 12636, - "applies": 24910, - "apply": 4356, - "applying": 17965, - "appo": 5433, - "appoint": 36190, - "appointed": 11087, - "appointment": 10890, - "appointments": 23439, - "appoints": 25132, - "apprais": 36972, - "appraisal": 46108, - "appreci": 3474, - "appreciate": 6263, - "appreciated": 9264, - "appreciates": 36573, - "appreciating": 39352, - "appreciation": 9212, - "appreciationday": 37438, - "appreciative": 45074, - "appren": 10582, - "apprentic": 15662, - "apprentice": 19122, - "apprentice": 17985, - "apprentices": 38252, - "apprenticeship": 26939, - "apprenticeships": 35425, - "appro": 2398, - "approach": 7781, - "approach": 6241, - "approached": 36499, - "approaches": 14962, - "approaching": 12164, - "appropri": 8446, - "appropriate": 10768, - "appropriately": 30383, - "appropriation": 49110, - "approval": 13549, - "approve": 19064, - "approved": 9412, - "approves": 18107, - "approx": 18266, - "approxim": 14201, - "approximately": 16128, - "apps": 7020, - "appstore": 31377, - "appt": 48112, - "appy": 34420, - "apr": 39396, - "apr": 11177, - "apra": 37027, - "apric": 25923, - "apricot": 30815, - "april": 23548, - "april": 2484, - "apro": 42712, - "apro": 49051, - "apron": 29502, - "aps": 8868, - "apse": 31843, - "apt": 17921, - "aptly": 47313, - "apu": 22166, - "apur": 36900, - "apur": 45193, - "aq": 14018, - "aq": 26862, - "aqu": 4458, - "aqua": 18613, - "aquaculture": 41885, - "aquaman": 35098, - "aquari": 37605, - "aquarium": 16814, - "aquarius": 38879, - "aquatic": 22658, - "aque": 35927, - "aque": 37268, - "aqui": 36826, - "aquino": 33796, - "ar": 516, - "ar": 625, - "ara": 24161, - "ara": 3340, - "arab": 5405, - "arab": 12028, - "arabia": 11746, - "arabian": 24663, - "arabic": 16709, - "arabs": 39155, - "arac": 47620, - "arach": 37689, - "arag": 41502, - "araj": 45142, - "arak": 23416, - "aram": 19223, - "aram": 21473, - "arama": 49066, - "aran": 20839, - "aran": 19641, - "aras": 36399, - "arat": 30856, - "arav": 35836, - "arbit": 20267, - "arbitr": 22702, - "arbitration": 34845, - "arbor": 33516, - "arbor": 24878, - "arboretum": 41719, - "arc": 4997, - "arc": 11592, - "arca": 25189, - "arca": 37612, - "arcade": 13331, - "arcadia": 38372, - "arch": 2458, - "arch": 8557, - "archa": 45619, - "archae": 10121, - "archaeological": 26163, - "archaeologists": 45035, - "archaeology": 14868, - "archan": 33359, - "archbishop": 23994, - "arche": 22474, - "archer": 21824, - "archers": 38407, - "archery": 23935, - "arches": 30771, - "archi": 4479, - "archie": 20557, - "archipel": 39750, - "archipelago": 43025, - "architec": 3359, - "architect": 12192, - "architects": 13290, - "architectural": 15360, - "architecture": 39038, - "architecture": 4920, - "archival": 39249, - "archive": 42257, - "archive": 10548, - "archived": 42379, - "archives": 9411, - "archy": 15643, - "arctic": 29716, - "arctic": 9138, - "ard": 3793, - "ard": 746, - "arden": 44600, - "arden": 27057, - "ardi": 23932, - "ardi": 19837, - "ardo": 35735, - "ardo": 9394, - "ards": 1654, - "ardu": 20906, - "arduino": 25398, - "are": 1076, - "are": 631, - "area": 2445, - "areas": 5429, - "arec": 18136, - "areclipse": 36030, - "ared": 5369, - "arel": 12798, - "arella": 24784, - "arelli": 48619, - "aren": 4033, - "aren": 4318, - "arena": 5463, - "arenas": 47860, - "arent": 37487, - "arer": 14857, - "arers": 33159, - "ares": 12224, - "arest": 11708, - "aret": 22247, - "areth": 47725, - "aretha": 42090, - "areyou": 37607, - "arez": 13108, - "arg": 27285, - "argent": 7812, - "argentina": 9789, - "argentine": 32582, - "argon": 40737, - "argos": 37443, - "argu": 7440, - "arguably": 30899, - "argue": 19788, - "argued": 48153, - "argues": 30045, - "arguing": 26549, - "argument": 16224, - "arguments": 24693, - "argus": 44300, - "argy": 21066, - "argyle": 36179, - "argyll": 40667, - "ari": 1221, - "ari": 3681, - "aria": 8883, - "arial": 42431, - "arian": 29980, - "arian": 6953, - "ariana": 14892, - "arianag": 23025, - "arianagrande": 23321, - "arianism": 44351, - "arians": 19104, - "arias": 22567, - "arie": 18774, - "ariel": 47959, - "ariel": 21025, - "aries": 5213, - "arif": 46621, - "arily": 12993, - "arin": 29564, - "arin": 18612, - "arina": 29271, - "arine": 29586, - "aring": 2142, - "ario": 8862, - "arios": 25392, - "aris": 15227, - "arise": 26490, - "arist": 12110, - "aristo": 25666, - "aristotle": 49156, - "arities": 31069, - "arity": 16608, - "arium": 11809, - "arius": 21482, - "ariz": 6516, - "arized": 40167, - "arizon": 28936, - "arizona": 7106, - "arjun": 24565, - "arjun": 20477, - "arjuna": 43835, - "ark": 11921, - "ark": 12010, - "arkansas": 12227, - "arkham": 36381, - "arl": 48542, - "arlington": 44940, - "arlington": 17865, - "arly": 3637, - "arm": 5671, - "arm": 4793, - "arma": 15887, - "arma": 38716, - "armad": 37897, - "armada": 34938, - "armagh": 44313, - "armani": 31314, - "armb": 37096, - "armchair": 45757, - "armed": 40471, - "armed": 8202, - "armen": 13145, - "armenia": 22008, - "armenian": 24891, - "armies": 46686, - "armin": 45481, - "arming": 19766, - "armist": 38150, - "armistice": 46765, - "armor": 16167, - "armored": 28214, - "armory": 38610, - "armour": 18503, - "armoured": 42514, - "arms": 5706, - "armstrong": 15005, - "army": 13541, - "army": 3133, - "armys": 27311, - "arn": 9348, - "arn": 37597, - "arnau": 45556, - "arne": 43509, - "arney": 35962, - "arnold": 49096, - "arnold": 13609, - "arns": 46692, - "aro": 7514, - "aro": 11551, - "aroa": 48209, - "arom": 16831, - "aroma": 40143, - "aroma": 26390, - "aromas": 47439, - "aromatherapy": 42584, - "aromatic": 39669, - "aron": 30855, - "aron": 28926, - "aroo": 47581, - "arora": 31897, - "arosa": 44264, - "arose": 44262, - "around": 35615, - "around": 1630, - "arqu": 35654, - "arquitec": 41703, - "arr": 39106, - "arr": 42489, - "arra": 32918, - "arra": 43827, - "arrahman": 44554, - "arran": 45722, - "arrang": 16711, - "arrange": 15410, - "arrange": 26311, - "arranged": 22451, - "arrangement": 23822, - "arrangements": 23792, - "arranging": 35321, - "array": 17293, - "arre": 4374, - "arrell": 28846, - "arrest": 9320, - "arrested": 5845, - "arresting": 43930, - "arrests": 20683, - "arri": 2115, - "arrival": 9073, - "arrivals": 19583, - "arrive": 8851, - "arrived": 3514, - "arrives": 9905, - "arriving": 10884, - "arro": 15729, - "arrog": 26997, - "arrogance": 47025, - "arrogant": 40582, - "arrow": 30920, - "arrow": 11149, - "arrowhead": 46393, - "arrows": 24768, - "arroyo": 45237, - "ars": 42815, - "ars": 864, - "arse": 22665, - "arsen": 5330, - "arsenal": 45234, - "arsenal": 6084, - "arsene": 32117, - "arson": 29937, - "art": 1486, - "art": 794, - "arta": 12031, - "arte": 13482, - "arte": 12947, - "artem": 40387, - "artemis": 45256, - "arten": 37043, - "arter": 29449, - "artery": 40062, - "artes": 48629, - "artforsale": 48239, - "artgallery": 31982, - "arth": 7146, - "arth": 20265, - "arthistory": 39313, - "arthr": 20807, - "arthritis": 22916, - "arthro": 43255, - "arthur": 35660, - "arthur": 8550, - "arti": 1635, - "arti": 34601, - "artic": 3003, - "articho": 30937, - "artichoke": 39647, - "article": 3550, - "articles": 11939, - "articul": 40343, - "articulate": 45444, - "artif": 8950, - "artifact": 37718, - "artifacts": 30249, - "artificial": 19357, - "artificial": 12040, - "artificialintelligence": 20799, - "artillery": 24465, - "artin": 33168, - "artin": 48540, - "artis": 41794, - "artisan": 36389, - "artisan": 21535, - "artisans": 40140, - "artist": 14326, - "artist": 2456, - "artiste": 41402, - "artistic": 12421, - "artiston": 48443, - "artistry": 38570, - "artists": 4899, - "artistson": 32127, - "artistsontwitter": 39469, - "artlovers": 35617, - "arto": 28464, - "artof": 31751, - "artoftheday": 43990, - "arton": 46744, - "arts": 22040, - "arts": 3812, - "artsy": 31588, - "arturo": 38591, - "artwit": 36713, - "artwork": 4188, - "artworks": 26215, - "arty": 45417, - "arty": 25916, - "aru": 13757, - "aru": 23907, - "aruba": 40131, - "arugula": 40770, - "arum": 48732, - "arun": 16105, - "arun": 31877, - "arunach": 47260, - "arunjaitley": 44874, - "arus": 22644, - "arvin": 16971, - "arvind": 21209, - "arvind": 41079, - "arvindkejriwal": 22971, - "arvo": 45726, - "arwx": 29824, - "ary": 4617, - "ary": 856, - "arya": 23594, - "aryan": 34966, - "as": 587, - "as": 601, - "asa": 39676, - "asa": 11914, - "asad": 42376, - "asaki": 22455, - "asam": 40603, - "asan": 22379, - "asan": 17841, - "asana": 42363, - "asant": 25536, - "asants": 37766, - "asap": 24199, - "asap": 10822, - "asar": 24733, - "asar": 49299, - "asb": 31186, - "asbe": 32113, - "asbestos": 33765, - "asc": 22720, - "asc": 23305, - "ascen": 20767, - "ascension": 35499, - "ascent": 36625, - "asci": 12753, - "asco": 25578, - "asco": 17488, - "ascot": 23723, - "ascri": 15506, - "asd": 36988, - "asda": 29391, - "asdf": 36857, - "asdfghj": 42758, - "asdfghjkl": 47660, - "ase": 8083, - "ase": 894, - "asean": 24472, - "aseball": 46903, - "ased": 2134, - "asen": 41085, - "aser": 39615, - "aser": 7209, - "ases": 3762, - "asf": 25863, - "asg": 34813, - "ash": 2067, - "ash": 2612, - "asha": 40572, - "asha": 13472, - "ashamed": 20633, - "ashby": 46531, - "ashe": 48523, - "ashe": 31752, - "asher": 37585, - "ashes": 12587, - "asheville": 28897, - "ashford": 37796, - "ashi": 15563, - "ashi": 15934, - "ashish": 33145, - "ashland": 39938, - "ashleigh": 49356, - "ashley": 17825, - "ashley": 8957, - "asho": 20273, - "ashok": 38141, - "ashore": 31194, - "ashram": 43445, - "ashton": 43264, - "ashton": 12228, - "ashtra": 18118, - "asi": 3596, - "asi": 12562, - "asia": 5741, - "asian": 21737, - "asian": 7128, - "asiangames": 49108, - "asians": 36771, - "asics": 31097, - "aside": 13676, - "asif": 37302, - "asim": 46050, - "asin": 48432, - "asin": 44347, - "asing": 4194, - "asingly": 15803, - "asion": 31753, - "asis": 12398, - "ask": 11027, - "ask": 2765, - "asked": 3993, - "asking": 5914, - "asks": 7953, - "asl": 41650, - "asleep": 10749, - "asley": 28206, - "asli": 44290, - "asm": 13851, - "asma": 38497, - "asmsg": 19839, - "aso": 30343, - "aso": 27932, - "asober": 43749, - "asocial": 48557, - "ason": 1163, - "asone": 31249, - "asons": 4249, - "asos": 37924, - "asot": 47968, - "asp": 17814, - "asp": 36666, - "asparag": 20301, - "asparagus": 20604, - "aspe": 10894, - "aspect": 19681, - "aspects": 18203, - "aspen": 35695, - "aspen": 25712, - "asper": 32991, - "asph": 28019, - "asphalt": 30574, - "aspir": 12669, - "aspirations": 36127, - "aspire": 24836, - "aspiring": 21862, - "asports": 43695, - "asr": 48052, - "asroma": 41000, - "ass": 12664, - "ass": 5301, - "assa": 47715, - "assad": 18699, - "assam": 19930, - "assan": 26352, - "assange": 27565, - "assas": 9603, - "assassin": 14366, - "assassin": 20029, - "assassinated": 40488, - "assassination": 24907, - "assassins": 34918, - "assassinscre": 36428, - "assassinscreed": 46082, - "assau": 7908, - "assaul": 19596, - "assault": 9679, - "assaulted": 30785, - "assaulting": 44143, - "asse": 3166, - "asse": 38600, - "assel": 37582, - "assemb": 5531, - "assemble": 26169, - "assembled": 22627, - "assemblies": 47406, - "assembling": 38670, - "assembly": 34542, - "assembly": 7059, - "assen": 38651, - "asser": 25665, - "asses": 21596, - "assess": 9209, - "assess": 23211, - "assessed": 44160, - "assessing": 31364, - "assessment": 10590, - "assessments": 32753, - "asset": 48463, - "asset": 13039, - "assets": 13170, - "assi": 2907, - "assi": 39540, - "assie": 31624, - "assign": 14190, - "assigned": 25767, - "assignment": 17342, - "assignments": 34257, - "assim": 36394, - "assimil": 43467, - "assist": 26558, - "assist": 10286, - "assistance": 11685, - "assistant": 6799, - "assistants": 31054, - "assisted": 18095, - "assisting": 24243, - "assists": 12675, - "assn": 44208, - "asso": 17617, - "assoc": 18891, - "associ": 3566, - "associate": 11777, - "associated": 11164, - "associates": 17358, - "association": 5578, - "associations": 33209, - "assor": 38604, - "assorted": 36701, - "assortment": 43112, - "asst": 24767, - "assu": 8328, - "assume": 19294, - "assumed": 37661, - "assuming": 29422, - "assump": 41182, - "assumption": 40773, - "assumptions": 45948, - "assurance": 28408, - "assure": 39161, - "assured": 25591, - "assures": 41988, - "assy": 29940, - "assy": 12963, - "ast": 1761, - "ast": 1242, - "asta": 43269, - "aste": 25033, - "aste": 25579, - "aster": 11013, - "aster": 9526, - "asteroid": 32253, - "asters": 33139, - "asth": 16684, - "asthma": 24610, - "asthour": 41238, - "astic": 15876, - "asting": 29984, - "astle": 46141, - "asto": 47275, - "aston": 24760, - "aston": 13879, - "astoni": 21962, - "astonishing": 27110, - "astonmartin": 40760, - "astor": 26391, - "astor": 47086, - "astoria": 34798, - "astounding": 37748, - "astr": 37609, - "astra": 47205, - "astra": 36079, - "astral": 45889, - "astri": 31243, - "astrid": 46499, - "astro": 8563, - "astro": 15318, - "astrology": 28526, - "astron": 7982, - "astronaut": 18376, - "astronauts": 29733, - "astronom": 23264, - "astronomer": 40036, - "astronomers": 44268, - "astronomical": 39775, - "astronomy": 17472, - "astrophotography": 38559, - "astros": 17598, - "asts": 10452, - "astu": 43137, - "astur": 45795, - "asu": 13157, - "asu": 16001, - "asun": 36044, - "asure": 3813, - "asus": 27269, - "aswell": 42978, - "asx": 38906, - "asy": 8524, - "asy": 2333, - "asylum": 15638, - "asym": 32539, - "at": 527, - "at": 536, - "ata": 4236, - "atable": 23909, - "atal": 24877, - "atal": 24797, - "atan": 33446, - "atar": 20128, - "atar": 7995, - "atari": 21549, - "atas": 30057, - "atay": 39518, - "atc": 28383, - "atch": 15938, - "atd": 33890, - "ate": 992, - "ate": 671, - "ateam": 42784, - "ateau": 16359, - "atec": 37352, - "atech": 31306, - "ated": 14589, - "ated": 943, - "atedly": 24698, - "atee": 32839, - "ateful": 5419, - "atelier": 29932, - "ately": 3862, - "atem": 17116, - "aten": 47984, - "atene": 30405, - "ateneo": 33904, - "ater": 18597, - "ater": 5877, - "ateral": 18819, - "aters": 22364, - "ates": 20370, - "ates": 1150, - "atest": 1705, - "ateur": 43677, - "atf": 28013, - "ath": 1374, - "ath": 1649, - "atha": 22530, - "atham": 23383, - "athan": 41260, - "athan": 26701, - "athe": 8963, - "athed": 47402, - "atheism": 25823, - "atheist": 22571, - "atheists": 47155, - "athen": 29112, - "athena": 30705, - "athens": 13524, - "ather": 6171, - "ather": 1817, - "athered": 34091, - "athers": 17266, - "athi": 28918, - "athing": 36069, - "athle": 3310, - "athlete": 7388, - "athletes": 7125, - "athletic": 33182, - "athletic": 9028, - "athletics": 7019, - "athlon": 14670, - "athome": 38217, - "athon": 4951, - "aths": 28835, - "athy": 34488, - "athy": 13183, - "ati": 591, - "ati": 6751, - "atia": 10908, - "atic": 20248, - "atic": 2647, - "atically": 13558, - "atics": 15666, - "atie": 30137, - "aties": 40060, - "atif": 41592, - "atiku": 37912, - "atile": 15474, - "atility": 23373, - "atime": 20158, - "atin": 36903, - "atin": 23047, - "atine": 39741, - "ating": 25653, - "ating": 1074, - "atio": 35401, - "ation": 2265, - "ation": 656, - "ational": 14205, - "ational": 3108, - "ationals": 44593, - "ationday": 20082, - "ations": 986, - "atis": 45456, - "atis": 41142, - "atism": 45638, - "ative": 18422, - "ative": 1648, - "atively": 11929, - "atives": 5629, - "ativity": 25166, - "atkins": 27734, - "atkinson": 28908, - "atl": 5411, - "atl": 10629, - "atla": 36043, - "atlan": 6818, - "atlanta": 39964, - "atlanta": 6839, - "atlantic": 28804, - "atlantic": 8189, - "atlantis": 27790, - "atlas": 15775, - "atle": 21170, - "atleast": 33231, - "atleti": 46067, - "atletico": 27501, - "atm": 14127, - "atmo": 8271, - "atmosphere": 10506, - "atmospheric": 24223, - "ato": 7987, - "ato": 4364, - "atoday": 26799, - "atom": 22418, - "atom": 24031, - "atomic": 18996, - "atoms": 41434, - "aton": 31525, - "aton": 10012, - "atop": 17455, - "ator": 10748, - "ator": 1962, - "atore": 28314, - "atorial": 32040, - "atories": 35678, - "atorium": 41306, - "ators": 3389, - "atory": 5920, - "atos": 41643, - "atour": 42967, - "atown": 24000, - "atp": 38105, - "atp": 19817, - "atr": 43247, - "atra": 20227, - "atra": 14401, - "atravel": 36981, - "atre": 46057, - "atri": 13882, - "atri": 38889, - "atric": 32238, - "atric": 13652, - "atrics": 36253, - "atrist": 41879, - "atrium": 29725, - "atrix": 43003, - "atro": 18724, - "atroc": 36197, - "atrocities": 37551, - "atry": 28334, - "ats": 46890, - "ats": 1032, - "atsu": 26531, - "att": 1017, - "att": 7103, - "atta": 7282, - "atta": 9146, - "attach": 43676, - "attach": 35653, - "attached": 11038, - "attachment": 28638, - "attack": 24971, - "attack": 3815, - "attacked": 12366, - "attacker": 39288, - "attackers": 47701, - "attacking": 16813, - "attacks": 7321, - "attain": 46459, - "attar": 37110, - "attemp": 4933, - "attempt": 7409, - "attempted": 17408, - "attempting": 18195, - "attempts": 15610, - "atten": 4084, - "atten": 32408, - "attenborough": 45860, - "attend": 9841, - "attend": 5802, - "attendance": 11928, - "attendant": 35424, - "attended": 8140, - "attendees": 14648, - "attending": 6696, - "attends": 22248, - "attention": 4936, - "atters": 30675, - "atthe": 21489, - "atti": 49265, - "atti": 16235, - "attic": 26766, - "attire": 21222, - "attitude": 10648, - "attitudes": 27611, - "attle": 14685, - "attle": 5030, - "attn": 25677, - "attor": 8856, - "attorney": 10372, - "attorneys": 29113, - "attrac": 7154, - "attract": 17010, - "attracted": 28493, - "attracting": 31909, - "attraction": 16807, - "attractions": 22307, - "attractive": 12231, - "attracts": 31024, - "attribu": 24624, - "attributed": 37520, - "attributes": 40763, - "attu": 43173, - "atty": 36705, - "atu": 15191, - "atu": 24295, - "atuesday": 34841, - "atul": 1744, - "atul": 43948, - "atum": 48295, - "atur": 14986, - "aturday": 29027, - "ature": 25305, - "ature": 4490, - "atures": 7358, - "atus": 14795, - "atv": 19598, - "atwood": 45680, - "atwork": 39680, - "atx": 34849, - "atx": 20136, - "aty": 40974, - "aty": 33107, - "atz": 30432, - "au": 627, - "au": 2566, - "aua": 45906, - "aub": 45938, - "auberg": 49382, - "aubre": 25899, - "aubrey": 34110, - "auburn": 42269, - "auburn": 14534, - "auc": 24489, - "auch": 43024, - "auck": 14588, - "auckland": 16072, - "auction": 48160, - "auction": 6462, - "auctioned": 41073, - "auctions": 24876, - "aucus": 47374, - "aud": 16107, - "aud": 19711, - "audi": 5091, - "audi": 10277, - "audible": 33227, - "audience": 6863, - "audiences": 22328, - "audio": 13792, - "audio": 5766, - "audiobook": 26282, - "audit": 12505, - "audit": 17625, - "auditi": 37377, - "audition": 18673, - "auditions": 21134, - "auditor": 38050, - "auditorium": 15063, - "audre": 16075, - "audrey": 18812, - "audu": 27934, - "audubon": 40275, - "auer": 33460, - "auf": 28924, - "aug": 15397, - "aug": 5720, - "auga": 22797, - "augh": 28310, - "augh": 14005, - "augmente": 48356, - "augmented": 32708, - "augu": 2610, - "august": 24353, - "august": 3171, - "augusta": 26144, - "augustine": 27397, - "augustus": 36835, - "auk": 19058, - "aul": 20695, - "aul": 34391, - "ault": 47253, - "ault": 10219, - "aun": 10608, - "aun": 38721, - "aunt": 12685, - "auntie": 23783, - "aunty": 29528, - "aur": 8156, - "aur": 17282, - "aura": 27728, - "aure": 36010, - "aureli": 35980, - "auror": 30067, - "aurora": 13500, - "aus": 10624, - "aus": 7630, - "ausa": 37384, - "ausbiz": 46543, - "ausch": 33926, - "auschwitz": 36523, - "ausopen": 27831, - "ausp": 35039, - "auspicious": 38806, - "auspol": 8241, - "aussi": 19762, - "aussie": 40230, - "aussie": 14424, - "aussies": 35727, - "aust": 26301, - "aust": 25418, - "austen": 29885, - "auster": 25030, - "austerity": 26982, - "austin": 12845, - "austin": 5125, - "austinmahone": 34678, - "austr": 2518, - "australi": 13798, - "australia": 3444, - "australian": 23630, - "australian": 6258, - "australians": 31488, - "austri": 8946, - "austria": 11960, - "austrian": 20638, - "ausv": 35206, - "ausvotes": 34661, - "aut": 12343, - "auth": 2381, - "auth": 38247, - "authent": 18158, - "authentic": 41266, - "authentic": 10369, - "authentication": 39746, - "authenticity": 35734, - "autho": 34552, - "author": 14447, - "author": 4358, - "authored": 37928, - "authori": 19207, - "authorities": 12729, - "authority": 10524, - "authorization": 48854, - "authorized": 28463, - "authors": 10765, - "auti": 8200, - "autism": 36256, - "autism": 11244, - "autisma": 43324, - "autistic": 29360, - "auto": 3917, - "auto": 5668, - "autobiography": 31509, - "autodesk": 40415, - "autograph": 10657, - "autograph": 13722, - "autographed": 16309, - "autographs": 17376, - "autoimmune": 45509, - "autom": 4114, - "automate": 43203, - "automated": 19022, - "automatic": 12126, - "automatically": 20725, - "automation": 12328, - "automobi": 44813, - "automobile": 25258, - "automotive": 12607, - "auton": 13100, - "autonews": 43975, - "autonom": 17870, - "autonomous": 20722, - "autonomy": 39223, - "autopsy": 44436, - "autos": 31118, - "autoshow": 46788, - "auts": 21140, - "autu": 5445, - "autum": 31783, - "autumn": 28940, - "autumn": 6110, - "autumnal": 35481, - "aux": 18154, - "aux": 8909, - "auxiliary": 37778, - "av": 722, - "av": 8484, - "ava": 12385, - "avage": 31505, - "avail": 1651, - "avail": 16686, - "availability": 17551, - "available": 1685, - "aval": 18012, - "avalan": 23970, - "avalanche": 25815, - "avalley": 45082, - "avalon": 30436, - "avan": 27971, - "avan": 33351, - "avant": 24305, - "avar": 33423, - "avatar": 18219, - "ave": 10062, - "ave": 4860, - "avec": 25828, - "aved": 47918, - "avel": 46817, - "avel": 48088, - "aven": 5963, - "aven": 32971, - "aveng": 21935, - "avenger": 24799, - "avengers": 39413, - "avengers": 12016, - "avengersendgame": 49342, - "avent": 22700, - "avenue": 7042, - "aver": 8788, - "aver": 11403, - "average": 6254, - "averaged": 37310, - "averages": 48982, - "averaging": 35266, - "avery": 20313, - "aves": 14023, - "avfc": 21304, - "avg": 19452, - "avgeek": 11114, - "avi": 3324, - "avi": 11297, - "avia": 38710, - "avian": 24115, - "aviation": 27717, - "aviation": 7617, - "aviator": 38921, - "aviators": 48011, - "avici": 46192, - "avicii": 49158, - "avid": 19118, - "avier": 14598, - "avila": 45339, - "aville": 40689, - "avin": 46204, - "avis": 45163, - "avis": 19765, - "aviv": 22130, - "aviva": 47122, - "aviz": 27607, - "avl": 44749, - "avo": 4496, - "avo": 32400, - "avoc": 12291, - "avocado": 14135, - "avocados": 48911, - "avoi": 16797, - "avoid": 30448, - "avoid": 5983, - "avoidance": 47983, - "avoided": 32103, - "avoiding": 22086, - "avoids": 48220, - "avon": 22790, - "avon": 17348, - "avril": 37763, - "avs": 31896, - "avut": 44472, - "avy": 29973, - "aw": 808, - "aw": 5557, - "awa": 4820, - "awa": 6872, - "await": 20769, - "awaited": 20092, - "awaiting": 14872, - "awaits": 15635, - "awak": 9776, - "awak": 41387, - "awake": 14695, - "awaken": 35412, - "awakening": 17017, - "awakens": 23191, - "awal": 42447, - "awal": 35090, - "awan": 48869, - "awan": 20420, - "awar": 5745, - "award": 36310, - "award": 2047, - "awarded": 7368, - "awarding": 37089, - "awards": 34528, - "awards": 2320, - "aware": 4427, - "aware": 7196, - "awareness": 19217, - "awareness": 4823, - "awarenessmonth": 34278, - "awarenessweek": 35294, - "away": 21088, - "away": 1520, - "aways": 12782, - "awaz": 18586, - "awd": 34846, - "awe": 1693, - "awe": 14106, - "aweather": 42142, - "aweather": 28681, - "awec": 38916, - "aweed": 29724, - "awesom": 16727, - "awesome": 30390, - "awesome": 1848, - "awesomeness": 22430, - "awful": 13617, - "awg": 46350, - "awgs": 35275, - "awh": 39566, - "awhile": 19171, - "awi": 15167, - "awil": 47271, - "awilliams": 42163, - "awk": 8888, - "awk": 40943, - "awkward": 42337, - "awkward": 10304, - "awn": 46222, - "awp": 43300, - "aws": 19658, - "awsome": 47196, - "awson": 36286, - "aww": 11568, - "awww": 15634, - "awwww": 26460, - "awx": 28385, - "ax": 3165, - "ax": 9203, - "axe": 19861, - "axel": 47889, - "axel": 32131, - "axes": 45970, - "axi": 30672, - "axial": 46550, - "axis": 19614, - "axle": 39003, - "axx": 47411, - "ay": 658, - "ay": 551, - "aya": 5917, - "ayala": 39827, - "ayama": 41194, - "ayan": 37781, - "ayan": 16269, - "ayana": 37400, - "ayas": 40904, - "ayat": 44902, - "ayat": 35720, - "aye": 21661, - "aye": 12446, - "ayer": 24852, - "ayers": 42783, - "ayesha": 46570, - "ayi": 33025, - "ayles": 44706, - "ayne": 35669, - "ayo": 21929, - "ayo": 18708, - "ayr": 23002, - "ayr": 36473, - "ayrshire": 32687, - "ays": 785, - "ayu": 40769, - "ayurve": 27185, - "ayurveda": 38986, - "ayush": 44831, - "ayy": 32514, - "ayyy": 41052, - "az": 854, - "az": 5468, - "aza": 22883, - "azad": 37838, - "azalea": 34087, - "azam": 34727, - "azar": 27911, - "azcardinals": 48846, - "aze": 41157, - "aze": 28485, - "azer": 19169, - "azerbai": 20649, - "azerbaijan": 23888, - "azhar": 47019, - "azi": 23914, - "azi": 18452, - "azine": 29140, - "azione": 48335, - "aziz": 41205, - "aziz": 29630, - "azo": 41227, - "azon": 36854, - "azores": 42826, - "azte": 33270, - "aztec": 34749, - "aztecs": 49387, - "azu": 27701, - "azu": 46963, - "azul": 39807, - "azure": 18514, - "azwx": 30262, - "azy": 24783, - "azz": 9817, - "azz": 26453, - "azza": 22255, - "azzi": 18758, - "azzle": 39974, - "azzo": 26779, - "azzur": 37055, - "azzy": 44534, - "añ": 23716, - "años": 41634, - "b": 65, - "b": 321, - "ba": 932, - "ba": 1792, - "baa": 33004, - "baahu": 34145, - "baahubali": 38663, - "bab": 1202, - "bab": 19039, - "baba": 12631, - "babe": 31177, - "babe": 7716, - "babes": 14253, - "babies": 6635, - "babs": 36217, - "babu": 21623, - "baby": 7268, - "baby": 1794, - "babygirl": 39554, - "babylon": 31928, - "babymetal": 45013, - "babys": 22266, - "babysitting": 34186, - "bac": 2791, - "bac": 25867, - "bacca": 40708, - "bach": 11773, - "bach": 8758, - "bachchan": 17690, - "bachel": 11283, - "bachelor": 45508, - "bachelor": 16766, - "bachelore": 26009, - "bachelorette": 29093, - "bacher": 49211, - "back": 1663, - "back": 893, - "backbone": 35635, - "backdrop": 20802, - "backed": 12721, - "backer": 22183, - "backers": 32934, - "background": 5994, - "backgrounds": 28215, - "backing": 14935, - "backlash": 31519, - "backpack": 14894, - "backpacking": 29524, - "backpacks": 37063, - "backs": 7562, - "backseat": 48812, - "backstage": 9236, - "backstreet": 46337, - "backthe": 26127, - "backto": 18703, - "backtoschool": 28730, - "backtothe": 43059, - "backup": 14415, - "backward": 37964, - "backwards": 21283, - "backyard": 12608, - "bacon": 48666, - "bacon": 7104, - "bacter": 11814, - "bacteria": 16556, - "bacterial": 26101, - "bad": 2564, - "bad": 2103, - "bada": 37475, - "badan": 39149, - "badass": 11616, - "baddest": 38112, - "baden": 36690, - "bader": 42254, - "badge": 11301, - "badger": 32686, - "badger": 22363, - "badgers": 22521, - "badges": 20084, - "badlands": 43192, - "badly": 13684, - "badminton": 21412, - "badoo": 33192, - "bados": 25755, - "bae": 32834, - "bae": 6855, - "baek": 18557, - "baek": 32702, - "baekhyun": 21572, - "baes": 46332, - "baf": 13616, - "baff": 35693, - "bafta": 29199, - "bag": 3408, - "bag": 3365, - "bage": 9698, - "bagel": 28777, - "bagels": 37489, - "baggage": 31402, - "bagged": 34047, - "bagh": 21659, - "bagh": 37271, - "baghdad": 30763, - "bago": 25105, - "bags": 6136, - "bagu": 27749, - "baguette": 45334, - "bah": 8372, - "bah": 16685, - "baha": 29592, - "baham": 43718, - "bahamas": 21224, - "bahan": 28704, - "bahn": 33452, - "bahrain": 12503, - "bai": 6232, - "bai": 23339, - "bail": 22933, - "bail": 16986, - "bailey": 27535, - "bailey": 10180, - "bain": 40784, - "bain": 21593, - "bair": 29059, - "baird": 40474, - "bait": 18010, - "baj": 20713, - "baja": 40418, - "baja": 28374, - "bajo": 32619, - "bak": 4059, - "bak": 23742, - "bakar": 41414, - "bake": 20736, - "bake": 11878, - "baked": 10364, - "baker": 27303, - "baker": 7743, - "bakers": 35293, - "bakers": 40231, - "bakersfield": 40149, - "bakery": 13377, - "bakes": 43057, - "bakhta": 44912, - "bakhtawar": 46937, - "bakhtawarbz": 47118, - "baking": 11467, - "baku": 46417, - "baku": 31852, - "bal": 1398, - "bal": 2282, - "bala": 20291, - "balaji": 48694, - "balance": 42894, - "balance": 6827, - "balanced": 15273, - "balances": 37733, - "balancing": 23541, - "balboa": 45098, - "balcony": 16169, - "bald": 11153, - "bald": 14875, - "baldhead": 29191, - "baldwin": 16242, - "bale": 48573, - "bale": 18873, - "bales": 42879, - "bali": 16432, - "bali": 10900, - "balkan": 48499, - "balkans": 42987, - "ball": 3807, - "ball": 1069, - "balla": 42246, - "ballad": 33472, - "ballarat": 46645, - "ballard": 31750, - "baller": 49194, - "baller": 25655, - "ballerina": 34962, - "ballers": 34173, - "ballet": 10703, - "balli": 29406, - "ballin": 47444, - "ballin": 33057, - "balling": 47588, - "ballis": 46675, - "ballistic": 36667, - "ballo": 8871, - "ballon": 36469, - "balloon": 13634, - "balloons": 18130, - "ballot": 14185, - "ballots": 35051, - "ballpark": 26080, - "ballroom": 15493, - "balls": 6927, - "bally": 17275, - "bally": 29451, - "balm": 24962, - "balmain": 45929, - "balo": 12395, - "baloch": 23173, - "balochistan": 21918, - "balot": 44615, - "balotelli": 45721, - "bals": 44154, - "balsam": 29121, - "balsamic": 32654, - "balt": 24441, - "balti": 8400, - "baltic": 23817, - "baltimore": 38502, - "baltimore": 9582, - "balu": 38093, - "bam": 6383, - "bam": 12686, - "bama": 20021, - "bambam": 34538, - "bambi": 46596, - "bamboo": 49322, - "bamboo": 16748, - "ban": 1159, - "ban": 2777, - "bana": 18428, - "banan": 38410, - "banana": 8922, - "bananas": 19121, - "banc": 39252, - "band": 4613, - "band": 1963, - "banda": 31865, - "bandai": 42054, - "bandana": 39265, - "bandcamp": 32229, - "banded": 37804, - "bandic": 44400, - "bandit": 27639, - "bandits": 33940, - "bandra": 41393, - "bands": 7858, - "bandung": 29512, - "bandwagon": 36432, - "bandwidth": 48859, - "bane": 9597, - "banerjee": 48102, - "banff": 29565, - "bang": 3524, - "bang": 6907, - "bangalore": 14697, - "banger": 24872, - "bangers": 38311, - "banging": 33033, - "bangkok": 12351, - "bangla": 10339, - "bangla": 45928, - "bangladesh": 11245, - "bangle": 37634, - "bangor": 31190, - "bangs": 27992, - "bangtan": 39131, - "bani": 19732, - "banjo": 27014, - "bank": 7061, - "bank": 2723, - "banker": 27316, - "bankers": 30599, - "bankholiday": 48868, - "banking": 9566, - "bankno": 49201, - "bankof": 39120, - "bankrup": 21904, - "bankrupt": 23077, - "bankrupt": 37288, - "bankruptcy": 23978, - "banks": 6367, - "banksy": 33350, - "bann": 5304, - "banned": 12012, - "banner": 9185, - "banners": 23145, - "banning": 26246, - "bannon": 29710, - "bano": 42947, - "banquet": 14254, - "bans": 15146, - "bant": 23301, - "bant": 46657, - "banter": 25535, - "bao": 39487, - "bao": 20408, - "bap": 7415, - "bap": 23754, - "bapti": 15477, - "baptism": 36765, - "baptist": 13274, - "baptiste": 45770, - "baptized": 45400, - "bar": 1040, - "bar": 2411, - "bara": 19345, - "barack": 18670, - "barack": 22481, - "barackobama": 18885, - "barak": 47419, - "barak": 16260, - "barang": 38446, - "barb": 24173, - "barb": 20913, - "barbados": 26992, - "barbar": 7906, - "barbara": 10937, - "barbarian": 42530, - "barbe": 18372, - "barbecue": 23501, - "barber": 19517, - "barber": 12296, - "barbershop": 37707, - "barbican": 47668, - "barbie": 16923, - "barca": 22942, - "barcel": 6134, - "barcelon": 47820, - "barcelona": 6412, - "barclay": 48877, - "barclay": 45276, - "barclays": 29538, - "bard": 39812, - "bard": 17514, - "bare": 16023, - "bare": 14318, - "barefoot": 30327, - "barely": 12684, - "bargain": 15076, - "bargaining": 41282, - "bargains": 34126, - "barge": 28272, - "bari": 21428, - "bari": 28016, - "barista": 31078, - "barit": 46300, - "bark": 32333, - "bark": 16560, - "barker": 20618, - "barking": 32676, - "barkley": 30266, - "barley": 22607, - "barlow": 25483, - "barn": 10490, - "barn": 10942, - "barnab": 43272, - "barnard": 44332, - "barne": 42527, - "barnes": 13102, - "barnet": 41943, - "barnett": 27650, - "barney": 24563, - "barns": 43759, - "barnsley": 37109, - "barnsley": 32153, - "baro": 17422, - "baro": 30817, - "baron": 48371, - "baron": 19349, - "baroness": 45056, - "barons": 45596, - "baroque": 25065, - "barr": 39473, - "barr": 22492, - "barra": 28442, - "barra": 33542, - "barrabest": 41376, - "barrac": 40835, - "barracks": 35822, - "barre": 13840, - "barre": 38257, - "barred": 33261, - "barrel": 11703, - "barrels": 22059, - "barren": 46743, - "barrett": 18701, - "barri": 8660, - "barric": 29189, - "barrie": 27090, - "barrier": 15706, - "barriers": 16321, - "barrington": 48954, - "barron": 34881, - "barrow": 42568, - "barrow": 24983, - "barry": 18028, - "barry": 8461, - "barrymore": 49310, - "bars": 8616, - "barstool": 44826, - "bart": 14838, - "bart": 12870, - "bartender": 33498, - "barthol": 48989, - "bartlett": 37130, - "bartol": 38209, - "barton": 48853, - "barton": 20345, - "baru": 16356, - "barun": 38278, - "barunsob": 41398, - "barça": 32788, - "bas": 1244, - "bas": 11420, - "basa": 26142, - "base": 2776, - "base": 4579, - "baseball": 23479, - "baseball": 3470, - "based": 35196, - "based": 2812, - "basel": 42803, - "basel": 20903, - "baseline": 40648, - "baseman": 45910, - "basement": 14792, - "bases": 20496, - "bash": 20462, - "bash": 10972, - "bashing": 37545, - "bashir": 42799, - "basic": 40452, - "basic": 7696, - "basically": 9125, - "basics": 15825, - "basil": 19225, - "basil": 14936, - "basilica": 27879, - "basin": 16117, - "basing": 47321, - "basis": 12278, - "baske": 3713, - "basket": 10338, - "basketball": 40023, - "basketball": 3835, - "baskets": 27787, - "basking": 39769, - "basque": 37175, - "bass": 22831, - "bass": 5992, - "bassett": 45992, - "bassist": 26496, - "bast": 28092, - "basti": 8559, - "bastille": 41874, - "bat": 2121, - "bat": 6575, - "bata": 39277, - "batb": 33962, - "batch": 9413, - "bate": 25034, - "bate": 28277, - "bateman": 41635, - "bates": 21727, - "batgirl": 46460, - "bath": 6064, - "bath": 5713, - "bathing": 20144, - "bathro": 21201, - "bathroom": 8470, - "bathrooms": 26434, - "baths": 19442, - "bathtub": 39942, - "bathurst": 36365, - "bati": 23362, - "bati": 37589, - "batman": 27811, - "batman": 7223, - "baton": 24331, - "bats": 14984, - "batsman": 35432, - "batt": 2407, - "batt": 48595, - "battalion": 20820, - "batter": 12654, - "batter": 31855, - "battered": 34375, - "batteries": 16666, - "battersea": 35839, - "battery": 7870, - "batting": 17401, - "battle": 7344, - "battle": 3528, - "battled": 37837, - "battlefield": 16055, - "battlefront": 42214, - "battleof": 47560, - "battles": 14213, - "battleship": 35165, - "battling": 17268, - "bau": 6055, - "bau": 34840, - "bauer": 22903, - "baugh": 41301, - "baum": 19840, - "bautista": 31881, - "bav": 21075, - "bavaria": 39977, - "bavarian": 44458, - "baw": 19808, - "bax": 21216, - "baxter": 26168, - "bay": 3631, - "bay": 2174, - "baya": 31573, - "bayan": 43895, - "bayarea": 28260, - "bayer": 48548, - "bayer": 29183, - "bayern": 14666, - "baylor": 21721, - "bayou": 33955, - "bays": 40156, - "baz": 10430, - "baz": 25268, - "bazaar": 20070, - "bazar": 49298, - "bb": 1174, - "bb": 3529, - "bba": 27762, - "bball": 15664, - "bbb": 33535, - "bbc": 5123, - "bbc": 5188, - "bbcc": 39052, - "bbce": 33818, - "bbcnews": 29370, - "bbcone": 28259, - "bbcqt": 37343, - "bbcr": 35802, - "bbcra": 17115, - "bbcradi": 49213, - "bbcradio": 22876, - "bbcsport": 49321, - "bbcspringwatch": 37358, - "bbctwo": 40395, - "bbcworld": 47340, - "bbe": 37559, - "bbed": 9077, - "bber": 7933, - "bbers": 36494, - "bbhutto": 28085, - "bbhuttozardari": 28135, - "bbi": 37047, - "bbin": 38553, - "bbing": 9787, - "bbins": 42504, - "bbl": 21961, - "bble": 26570, - "bble": 5924, - "bbled": 37626, - "bbles": 18093, - "bblo": 21231, - "bbloggers": 26614, - "bbly": 43031, - "bbm": 25382, - "bbmas": 22145, - "bbn": 28427, - "bbnaija": 20984, - "bbo": 21892, - "bbq": 41270, - "bbq": 6726, - "bbs": 10002, - "bbuk": 45978, - "bby": 11166, - "bby": 3810, - "bc": 3116, - "bc": 2162, - "bcc": 41509, - "bcci": 36138, - "bce": 36510, - "bcfc": 34359, - "bch": 36684, - "bcn": 25766, - "bcoz": 46373, - "bcpoli": 24389, - "bcs": 24909, - "bcu": 28299, - "bd": 24358, - "bd": 11165, - "bday": 33022, - "bday": 5781, - "bdg": 48418, - "bds": 26732, - "be": 571, - "be": 655, - "bea": 21886, - "bea": 20925, - "beach": 6068, - "beach": 2117, - "beaches": 12183, - "beachlife": 43824, - "beacon": 36883, - "beacon": 18858, - "beacons": 39395, - "bead": 31621, - "bead": 23557, - "beaded": 26661, - "beads": 14099, - "beagle": 30044, - "beak": 36498, - "beal": 45769, - "beale": 39717, - "beam": 35339, - "beam": 13663, - "beams": 23993, - "bean": 16471, - "bean": 5328, - "beanie": 21534, - "beans": 8302, - "bear": 6375, - "bear": 4298, - "bearable": 38608, - "bearcats": 33242, - "beard": 26157, - "beard": 9052, - "bearded": 28459, - "beardown": 43687, - "beards": 33020, - "bearer": 30686, - "bearers": 47986, - "bearing": 18370, - "bearings": 42083, - "bearish": 34829, - "bears": 6182, - "beasley": 43349, - "beast": 20847, - "beast": 6957, - "beastmode": 43076, - "beasts": 21771, - "beat": 3774, - "beat": 3018, - "beaten": 10864, - "beater": 41974, - "beati": 44386, - "beating": 10078, - "beatles": 11961, - "beatport": 31421, - "beatrice": 36922, - "beats": 6289, - "beatthe": 40550, - "beatty": 39903, - "beatz": 33363, - "beau": 1016, - "beau": 14298, - "beaufort": 45423, - "beaumont": 32857, - "beaut": 24559, - "beauti": 1154, - "beauties": 14874, - "beautiful": 13662, - "beautiful": 1215, - "beautifully": 10627, - "beauty": 12881, - "beauty": 2488, - "beav": 23260, - "beaver": 26432, - "beaver": 22874, - "beavers": 34513, - "beavs": 43909, - "bebe": 23331, - "bec": 6899, - "bec": 10773, - "became": 5464, - "because": 32714, - "because": 1631, - "becca": 27088, - "bech": 44055, - "beck": 8256, - "beck": 10396, - "becker": 26918, - "beckett": 27249, - "beckham": 18764, - "becky": 32406, - "becky": 18921, - "become": 2989, - "becomes": 6766, - "becoming": 6208, - "bed": 4152, - "bed": 2722, - "bedding": 31761, - "bedford": 20779, - "bedi": 39181, - "bedro": 18415, - "bedroom": 8411, - "bedrooms": 23996, - "beds": 13914, - "bedside": 47473, - "bedtime": 22115, - "bee": 6097, - "bee": 5028, - "beech": 32733, - "beech": 27596, - "beef": 21703, - "beef": 6529, - "beek": 37915, - "been": 33986, - "been": 1025, - "beep": 33432, - "beer": 8885, - "beer": 2544, - "beers": 10907, - "bees": 36249, - "bees": 9100, - "beet": 12582, - "beet": 28621, - "beethoven": 23656, - "beetle": 16534, - "beetles": 36317, - "beetro": 29251, - "beetroot": 31638, - "beets": 36087, - "before": 20898, - "before": 1348, - "beg": 2219, - "beg": 22401, - "began": 8636, - "begg": 36769, - "begging": 25371, - "begin": 19197, - "begin": 4947, - "beginner": 24351, - "beginners": 21930, - "beginning": 5791, - "beginnings": 22581, - "begins": 4635, - "begs": 43531, - "begun": 10514, - "beh": 21971, - "beh": 41612, - "beha": 5737, - "behalf": 11470, - "behave": 28825, - "behaved": 41617, - "behavi": 6149, - "behaving": 40745, - "behavior": 10461, - "behavioral": 25135, - "behaviors": 37741, - "behaviour": 14655, - "behavioural": 46019, - "behe": 42329, - "behin": 2335, - "behind": 2403, - "behindthe": 21104, - "behindthescenes": 26253, - "behold": 15929, - "bei": 38991, - "bei": 23227, - "beige": 26677, - "beij": 11547, - "beijing": 11796, - "bein": 39117, - "bein": 24168, - "being": 13481, - "being": 1265, - "beings": 17998, - "beingsalmankhan": 19637, - "beir": 20176, - "beirut": 22352, - "beit": 26963, - "bek": 46846, - "bek": 26135, - "bekind": 46691, - "bel": 1308, - "bel": 3543, - "bela": 30555, - "belarus": 30849, - "belated": 20256, - "belfast": 35100, - "belfast": 10015, - "belgi": 7001, - "belgian": 15008, - "belgium": 10239, - "belgrade": 30502, - "beli": 1859, - "beli": 45842, - "belichick": 46132, - "belie": 20854, - "beliebers": 27714, - "belief": 14802, - "beliefs": 20575, - "believ": 4972, - "believe": 15819, - "believe": 2649, - "believed": 13380, - "believein": 24294, - "believeinfilm": 37375, - "believer": 26057, - "believers": 28434, - "believes": 12017, - "believing": 19551, - "belinda": 44415, - "belize": 27990, - "bell": 5417, - "bell": 3718, - "bella": 18282, - "bella": 10418, - "bellamy": 34461, - "bellator": 31985, - "belle": 13587, - "belle": 11496, - "belles": 40678, - "bellevue": 32715, - "belli": 43335, - "bellletstalk": 42695, - "bello": 21954, - "bells": 12811, - "bellum": 35493, - "belly": 25901, - "belly": 10404, - "belmont": 25612, - "belo": 8379, - "belo": 41649, - "belong": 16453, - "belong": 13596, - "belonged": 39893, - "belonging": 28193, - "belongs": 14395, - "beloved": 9363, - "below": 3788, - "bels": 43127, - "belt": 36416, - "belt": 7373, - "belts": 21888, - "belvedere": 48003, - "ben": 1465, - "ben": 3518, - "bena": 46249, - "bench": 17770, - "bench": 8771, - "benches": 36349, - "benchmark": 31775, - "bend": 22100, - "bend": 13332, - "bender": 22551, - "bendigo": 48197, - "bending": 33897, - "bene": 12091, - "bene": 47151, - "beneath": 16850, - "bened": 13216, - "benedic": 24402, - "benedict": 47896, - "benedict": 18027, - "benef": 3260, - "benefici": 38593, - "beneficial": 24660, - "beneficiaries": 42160, - "benefit": 6399, - "benefited": 48266, - "benefiting": 29474, - "benefits": 5465, - "benefitting": 47222, - "benevol": 47060, - "benfica": 33873, - "beng": 6962, - "bengal": 17404, - "bengal": 16374, - "bengali": 33774, - "bengals": 23737, - "bengaluru": 21707, - "benghazi": 25967, - "benin": 40296, - "benitez": 46711, - "benjam": 10550, - "benjamin": 38647, - "benjamin": 12131, - "benji": 43548, - "benn": 39097, - "bennet": 48536, - "bennett": 12186, - "benny": 42369, - "benny": 20595, - "beno": 35268, - "benoit": 44373, - "benson": 19578, - "bent": 9809, - "bent": 18369, - "bentley": 16859, - "benton": 30812, - "benz": 27937, - "benz": 13470, - "ber": 867, - "ber": 1516, - "bera": 32802, - "bere": 17458, - "bered": 9193, - "beren": 33654, - "beret": 41658, - "berg": 12022, - "berg": 3294, - "bergen": 22918, - "berger": 35933, - "berger": 13873, - "bergh": 35120, - "bergman": 42597, - "bergs": 43592, - "berk": 15633, - "berke": 14639, - "berkeley": 46049, - "berkeley": 16667, - "berkshire": 27300, - "berlin": 23532, - "berlin": 5891, - "berman": 21514, - "bermu": 21032, - "bermuda": 24644, - "bern": 9195, - "bern": 18382, - "bernade": 46242, - "bernar": 11962, - "bernard": 14579, - "bernardino": 35328, - "bernardo": 27137, - "bernardo": 28696, - "bernardokath": 29081, - "bernat": 40578, - "berni": 18798, - "bernie": 40093, - "bernie": 10503, - "berniesanders": 23745, - "bernstein": 33936, - "berra": 15089, - "berries": 8319, - "berry": 15334, - "berry": 3488, - "bers": 6408, - "berser": 39037, - "bert": 17340, - "bert": 2358, - "berta": 45187, - "berth": 28317, - "bertie": 47182, - "berto": 34073, - "bertr": 36962, - "bertrand": 41594, - "berts": 30205, - "berty": 35973, - "berwick": 40407, - "bery": 11411, - "bes": 26911, - "bes": 3635, - "beside": 13519, - "besides": 17596, - "bespoke": 15612, - "bess": 43791, - "best": 3419, - "best": 949, - "bestbuy": 29749, - "bestest": 31199, - "bestfan": 23880, - "bestfanarmy": 24590, - "bestfriend": 29832, - "bestfriend": 11856, - "bestfriends": 23555, - "besti": 35210, - "bestie": 17188, - "besties": 27346, - "besto": 28615, - "bestof": 27892, - "bestof": 39533, - "bestseller": 25841, - "bestselling": 28632, - "bet": 1051, - "bet": 4430, - "beta": 43188, - "beta": 9505, - "betes": 10255, - "beth": 9993, - "beth": 4892, - "bethan": 18781, - "bethany": 39130, - "bethany": 27952, - "bethe": 12624, - "bethel": 33410, - "bethesda": 32527, - "bethle": 30760, - "bethlehem": 31827, - "betis": 45590, - "beto": 33721, - "betra": 18436, - "betrayal": 33171, - "betrayed": 35692, - "bets": 17107, - "betsy": 28946, - "bett": 17715, - "bett": 20489, - "betta": 36387, - "bette": 35855, - "better": 10320, - "better": 1539, - "bettertogether": 47392, - "betting": 14319, - "betts": 38637, - "betty": 36175, - "betty": 14350, - "between": 1957, - "beu": 38660, - "bev": 40324, - "bev": 30968, - "bever": 9924, - "beverage": 18694, - "beverages": 28521, - "beverley": 39165, - "beverly": 30906, - "beverly": 16728, - "beverlyhills": 45363, - "beware": 14532, - "bewithyou": 36787, - "bex": 18676, - "bex": 24748, - "bexhill": 49200, - "bey": 3234, - "bey": 6767, - "beyon": 11447, - "beyonce": 16632, - "beyoncé": 19219, - "beyond": 22246, - "beyond": 4432, - "bez": 28592, - "bez": 46764, - "bezos": 45000, - "bf": 19858, - "bf": 7990, - "bfc": 37183, - "bff": 11984, - "bffs": 31462, - "bfi": 34244, - "bg": 16674, - "bg": 11295, - "bgc": 47598, - "bgs": 47963, - "bgt": 40665, - "bh": 9930, - "bh": 13603, - "bha": 6144, - "bha": 33068, - "bhafc": 30779, - "bhagat": 49136, - "bhai": 48370, - "bhai": 20508, - "bhak": 34501, - "bham": 31874, - "bham": 23491, - "bhan": 27356, - "bhand": 48679, - "bhar": 9108, - "bharat": 27454, - "bharat": 17430, - "bharti": 46803, - "bhat": 23784, - "bhatt": 36143, - "bhav": 44950, - "bhi": 28943, - "bhi": 21955, - "bhk": 45070, - "bhm": 38741, - "bho": 19721, - "bhopal": 44573, - "bhp": 29776, - "bhs": 29195, - "bhu": 9172, - "bhuban": 38729, - "bhubanes": 41213, - "bhubaneswar": 45888, - "bhushan": 40884, - "bhutan": 32391, - "bhutto": 30153, - "bi": 717, - "bi": 3035, - "bia": 3841, - "biaf": 26961, - "biafra": 36355, - "bian": 19531, - "bian": 9027, - "bianca": 25854, - "bianchi": 45720, - "bians": 28141, - "bias": 11268, - "biased": 22178, - "bib": 44607, - "bib": 21022, - "bibi": 31182, - "bibl": 20912, - "bible": 26738, - "bible": 7583, - "bibli": 23465, - "biblical": 22841, - "biblio": 49131, - "bic": 5960, - "bic": 10675, - "bice": 35589, - "biceps": 46735, - "bick": 27238, - "bicy": 9247, - "bicycle": 11652, - "bicycles": 31326, - "bid": 21035, - "bid": 5553, - "bidding": 23237, - "bide": 45178, - "biden": 19451, - "bids": 16148, - "bie": 5561, - "bie": 4173, - "bieber": 48725, - "bieber": 7535, - "bien": 19176, - "bien": 25742, - "biennale": 33776, - "biennial": 36609, - "bier": 27226, - "bier": 23508, - "bies": 7867, - "big": 1915, - "big": 1205, - "bigbaldhead": 30325, - "bigbang": 41680, - "bigbang": 23734, - "bigdata": 9440, - "bige": 37762, - "bigfoot": 37095, - "bigg": 15312, - "bigg": 35399, - "biggboss": 27056, - "bigger": 6806, - "biggest": 19483, - "biggest": 3505, - "biggie": 28392, - "biggs": 46507, - "bigh": 18106, - "bighit": 35508, - "bigo": 14278, - "bigolive": 20735, - "bigotry": 37269, - "bigre": 36330, - "bih": 33471, - "bihar": 22849, - "bij": 42478, - "bik": 30306, - "bike": 11686, - "bike": 3701, - "biker": 36100, - "biker": 23449, - "bikers": 29468, - "bikes": 9227, - "bikin": 12638, - "biking": 19157, - "bikini": 14531, - "bil": 3092, - "bil": 20506, - "bilateral": 25599, - "bilbao": 34802, - "bild": 35512, - "bile": 25943, - "bilingual": 29623, - "bilities": 13582, - "bility": 4694, - "bill": 4444, - "bill": 2886, - "billboard": 10856, - "billboards": 34741, - "billed": 37558, - "billi": 7693, - "billie": 23990, - "billing": 31797, - "billings": 43615, - "billion": 14520, - "billion": 5729, - "billionaire": 19475, - "billionaires": 41590, - "billions": 20742, - "bills": 9810, - "billsmafia": 48845, - "billy": 15626, - "billy": 6814, - "bilt": 44770, - "bilt": 26654, - "bim": 46737, - "bim": 24775, - "bin": 4849, - "bin": 5346, - "binance": 43520, - "binary": 23497, - "bind": 44513, - "binder": 30541, - "binding": 21287, - "bine": 34848, - "bing": 24818, - "bing": 5665, - "binge": 22600, - "bingham": 43785, - "bingham": 47296, - "bingo": 18418, - "bino": 29172, - "bino": 24313, - "bins": 26934, - "bint": 43647, - "bio": 2830, - "bio": 5162, - "biode": 43502, - "biodegradable": 47740, - "biodiversity": 17428, - "biof": 45158, - "biographical": 49232, - "biography": 15423, - "biological": 18821, - "biologist": 35149, - "biology": 9796, - "biom": 13010, - "biomar": 44549, - "biomass": 36746, - "biome": 26218, - "biomed": 29280, - "biomedical": 33117, - "bionic": 46201, - "biop": 15009, - "biopic": 27942, - "bios": 48505, - "biotech": 22514, - "biotechnology": 40375, - "biotic": 33773, - "biotics": 41371, - "bious": 31845, - "bipartisan": 32266, - "bipolar": 37097, - "bique": 27809, - "bir": 921, - "bir": 16284, - "birch": 31569, - "birch": 22907, - "bird": 6908, - "bird": 3329, - "birdie": 29612, - "birdies": 45618, - "birding": 15851, - "birdman": 41915, - "birdphotography": 47999, - "birds": 41951, - "birds": 4337, - "birdwatching": 33497, - "birk": 48289, - "birken": 40661, - "birmin": 37482, - "birmingham": 38580, - "birmingham": 7720, - "birth": 1128, - "birth": 5397, - "birthday": 7381, - "birthday": 1166, - "birthdays": 17954, - "birthplace": 31429, - "biryani": 46489, - "bis": 5064, - "bis": 14461, - "biscu": 11532, - "biscuit": 18731, - "biscuits": 18248, - "bisexual": 36829, - "bish": 33690, - "bish": 31461, - "bishop": 20625, - "bishop": 8024, - "bishops": 31579, - "bison": 19741, - "bistro": 21770, - "bit": 3010, - "bit": 2010, - "bitcoin": 30848, - "bitcoin": 6366, - "bite": 41613, - "biting": 23016, - "bits": 7747, - "bitt": 39251, - "bius": 45525, - "bix": 46579, - "biz": 8212, - "biz": 5431, - "biza": 47013, - "bizar": 14886, - "bizarre": 16965, - "bizhour": 39462, - "bizitalk": 34929, - "bj": 4592, - "bj": 18229, - "bjj": 27437, - "bjor": 26525, - "bjp": 37264, - "bjp": 6178, - "bk": 15099, - "bk": 14083, - "bkk": 36433, - "bl": 833, - "bl": 9467, - "bla": 2205, - "bla": 19630, - "blac": 21008, - "black": 2025, - "black": 1449, - "blackand": 12809, - "blackandwhite": 23688, - "blackandwhite": 19506, - "blackandwhitephotography": 27544, - "blackberry": 16470, - "blackbird": 38526, - "blackburn": 23789, - "blackfish": 42193, - "blackfriday": 16445, - "blackgirl": 43591, - "blackhawks": 19203, - "blackhistory": 46982, - "blackhistorymonth": 20135, - "blacklist": 30295, - "blacklivesmatter": 23467, - "blackmail": 47295, - "blackops": 43519, - "blackout": 21733, - "blackpanther": 36592, - "blackpink": 20339, - "blackpool": 21031, - "blacks": 16351, - "blackwell": 42642, - "blad": 36635, - "bladder": 33593, - "blade": 10264, - "blades": 16893, - "blah": 29212, - "blaine": 32457, - "blair": 31824, - "blair": 14749, - "blake": 20229, - "blake": 9579, - "blame": 10695, - "blamed": 32906, - "blames": 27841, - "blaming": 29287, - "blan": 4609, - "blanc": 30936, - "blanc": 13301, - "blanca": 40670, - "blanchard": 40177, - "blanche": 34875, - "blanchett": 49378, - "blanco": 26801, - "bland": 44372, - "bland": 30799, - "blank": 15134, - "blanket": 12878, - "blankets": 24042, - "blanks": 48599, - "blasio": 35553, - "blasphe": 36622, - "blast": 46349, - "blast": 5964, - "blasted": 38976, - "blaster": 36341, - "blasting": 26178, - "blasts": 23067, - "blat": 22048, - "blatant": 41391, - "blatt": 39138, - "blau": 45307, - "blaz": 43413, - "blaze": 15497, - "blazer": 17606, - "blazers": 16984, - "blazing": 25267, - "bldg": 22981, - "ble": 1447, - "ble": 1059, - "bleach": 27034, - "bleak": 40355, - "bled": 12006, - "bleed": 23027, - "bleed": 24791, - "bleedblue": 39160, - "bleeding": 20311, - "bleeds": 47339, - "blen": 25651, - "blend": 10780, - "blended": 25813, - "blender": 25066, - "blending": 34307, - "blends": 28572, - "bler": 31305, - "bler": 11979, - "blers": 26930, - "bles": 5763, - "bless": 9640, - "bless": 5387, - "blessed": 4411, - "blessing": 10729, - "blessings": 11185, - "bleu": 30114, - "blew": 18176, - "bley": 43176, - "bli": 1450, - "bli": 28051, - "blin": 9678, - "blin": 5406, - "blind": 17248, - "blind": 8351, - "blinded": 49149, - "blindness": 38812, - "blinds": 32449, - "bling": 39764, - "bling": 7097, - "blink": 18976, - "bliss": 28531, - "bliss": 12893, - "blissful": 42145, - "blit": 39327, - "blitz": 42151, - "blitz": 17548, - "blizz": 13075, - "blizzard": 16111, - "blk": 42950, - "blk": 22872, - "blm": 30957, - "bln": 47348, - "blo": 1204, - "blo": 25505, - "blob": 49312, - "bloc": 30961, - "block": 4638, - "block": 4593, - "blockade": 33489, - "blockbuster": 19939, - "blockchain": 6653, - "blocked": 9106, - "blocker": 44767, - "blocking": 12652, - "blocks": 10113, - "blog": 16376, - "blog": 2589, - "blogg": 33282, - "blogged": 41380, - "blogger": 21352, - "blogger": 7806, - "bloggerrt": 48898, - "bloggers": 11627, - "blogging": 18090, - "blogpost": 41842, - "blogs": 16682, - "bloke": 24384, - "blom": 48996, - "blon": 7958, - "blond": 32426, - "blonde": 10711, - "blondes": 45130, - "blondie": 39236, - "bloo": 2373, - "blood": 9231, - "blood": 3590, - "blooded": 41946, - "bloodh": 48480, - "bloods": 39539, - "bloody": 38568, - "bloody": 9468, - "bloom": 7311, - "bloom": 10257, - "bloomberg": 43109, - "bloomberg": 21238, - "bloomfield": 40342, - "blooming": 45175, - "blooming": 19266, - "bloomington": 34731, - "blooms": 21439, - "bloss": 10017, - "blossom": 14472, - "blossoms": 21916, - "blot": 41710, - "blou": 44506, - "blouse": 23525, - "blow": 15230, - "blow": 10211, - "blower": 25832, - "blowing": 12087, - "blown": 11848, - "blowout": 34857, - "blows": 21063, - "blr": 47250, - "bls": 39458, - "blu": 1263, - "blu": 10273, - "blue": 3829, - "blue": 1746, - "bluebells": 47150, - "blueberries": 29551, - "blueberry": 18251, - "bluebird": 40747, - "bluec": 43194, - "bluef": 41174, - "bluegrass": 26241, - "bluejays": 18684, - "blueprint": 30594, - "blues": 17566, - "blues": 5159, - "blueslyrix": 47068, - "bluet": 13469, - "bluetooth": 14052, - "bluewave": 40025, - "bluff": 27232, - "bluffs": 48844, - "blum": 34818, - "blumen": 38714, - "blun": 34472, - "blunt": 19305, - "blur": 12102, - "blur": 27976, - "bluray": 36818, - "blurred": 38013, - "blurry": 21977, - "blush": 22889, - "blvd": 12578, - "bly": 20930, - "bly": 4426, - "bm": 4773, - "bm": 15916, - "bma": 42573, - "bmc": 27807, - "bmi": 40642, - "bmo": 39083, - "bms": 34074, - "bmw": 26637, - "bmw": 7869, - "bmx": 22535, - "bn": 10496, - "bn": 7992, - "bnb": 20010, - "bnha": 49336, - "bnp": 47910, - "bnw": 35903, - "bo": 647, - "bo": 2525, - "boa": 14732, - "boar": 7837, - "boar": 35473, - "board": 10419, - "board": 1972, - "boarded": 43052, - "boarder": 37414, - "boardgame": 47829, - "boardgames": 32646, - "boarding": 10086, - "boardroom": 47937, - "boards": 7963, - "boardwalk": 29043, - "boast": 44467, - "boasts": 30309, - "boat": 12426, - "boat": 4440, - "boath": 45461, - "boating": 21951, - "boats": 10080, - "boatsales": 46244, - "bob": 8444, - "bob": 4423, - "boba": 39948, - "bobb": 16891, - "bobble": 38796, - "bobblehead": 33451, - "bobby": 17847, - "bobby": 7816, - "bobc": 26153, - "bobcat": 37896, - "bobcats": 27568, - "bobo": 38939, - "bobs": 45533, - "boc": 27307, - "boc": 39042, - "boca": 26094, - "bock": 24961, - "bod": 17904, - "bod": 26340, - "boda": 42030, - "bode": 28452, - "bode": 40429, - "bodega": 47350, - "bodied": 36892, - "bodies": 9799, - "bodily": 49119, - "body": 7132, - "body": 1774, - "bodybuilding": 24538, - "bodyguard": 35565, - "boe": 23476, - "boe": 21773, - "boeh": 38002, - "boehner": 44599, - "boeing": 48135, - "boeing": 11857, - "boer": 44889, - "boer": 40768, - "bog": 23426, - "bog": 28318, - "bogo": 35769, - "bogota": 47059, - "bogus": 42907, - "boh": 43238, - "bohe": 40541, - "bohemi": 21552, - "bohemian": 25753, - "boho": 25444, - "boi": 37129, - "boi": 12673, - "boil": 31332, - "boiled": 23886, - "boiler": 28212, - "boiler": 25615, - "boiling": 32019, - "bois": 47742, - "bois": 21640, - "boise": 23304, - "bok": 26671, - "bok": 15289, - "boko": 30929, - "boks": 40216, - "bol": 2860, - "bol": 8413, - "bola": 12840, - "bold": 26975, - "bold": 8911, - "boldand": 48413, - "boldly": 44778, - "boli": 12722, - "bolic": 27343, - "bolivia": 28628, - "bollah": 36336, - "bolly": 25302, - "bollywood": 32448, - "bollywood": 9604, - "bolo": 40236, - "bolog": 22818, - "bologna": 27513, - "bolster": 47304, - "bolt": 13131, - "bolton": 48757, - "bolton": 16598, - "bolts": 26028, - "bom": 3012, - "bom": 19469, - "bomb": 18091, - "bomb": 6331, - "bombar": 25544, - "bombardier": 42700, - "bombay": 48602, - "bombay": 23890, - "bombed": 24542, - "bomber": 15436, - "bombers": 21786, - "bombing": 14475, - "bombings": 43236, - "bombs": 14410, - "bombshell": 36340, - "bon": 1871, - "bon": 4216, - "bona": 33342, - "bonanza": 40304, - "bond": 37022, - "bond": 6826, - "bonded": 37390, - "bondi": 40092, - "bonding": 19609, - "bonds": 15786, - "bone": 22502, - "bone": 6195, - "bones": 9476, - "bonfire": 23151, - "bongo": 47519, - "boni": 32269, - "boni": 46356, - "bonita": 42896, - "bonjour": 33176, - "bonkers": 39865, - "bonn": 38969, - "bonnar": 47191, - "bonnaroo": 48777, - "bonne": 25844, - "bonnet": 30636, - "bonnie": 18555, - "bono": 24476, - "bons": 42883, - "bonsai": 44129, - "bonus": 8164, - "bonuses": 35144, - "boo": 824, - "boo": 7317, - "boogie": 22639, - "book": 2828, - "book": 1116, - "bookboost": 31257, - "bookclub": 34438, - "bookday": 26327, - "booked": 12584, - "booker": 21302, - "bookfest": 39381, - "booking": 10145, - "bookings": 18345, - "booklet": 27405, - "bookmark": 33596, - "bookof": 45629, - "bookreview": 27362, - "books": 44382, - "books": 2161, - "bookshelf": 34821, - "bookshop": 24705, - "bookstore": 17999, - "bookstores": 46416, - "bookworm": 20743, - "boom": 9609, - "boom": 7121, - "boomer": 33819, - "boomer": 31766, - "boomers": 37988, - "booming": 33487, - "boon": 24979, - "boon": 35821, - "boone": 23453, - "boop": 45047, - "boost": 44639, - "boost": 6260, - "boosted": 37631, - "booster": 20877, - "boosters": 46859, - "boosting": 28480, - "boosts": 29247, - "boot": 10843, - "boot": 8087, - "bootcamp": 22051, - "booted": 42564, - "booth": 47895, - "booth": 3971, - "booths": 32653, - "booties": 46188, - "bootleg": 38139, - "boots": 7319, - "booze": 24341, - "bop": 19720, - "bor": 1141, - "bor": 15093, - "bora": 24736, - "bord": 36891, - "bordeaux": 22009, - "border": 16304, - "border": 6177, - "borderlands": 38676, - "borders": 13900, - "bore": 14084, - "bore": 24638, - "bored": 8933, - "boredom": 31460, - "boretum": 38902, - "borg": 14770, - "borgh": 17180, - "boring": 12519, - "boris": 31212, - "boris": 15704, - "borisjohnson": 44481, - "born": 17695, - "born": 2683, - "borne": 42910, - "borne": 9328, - "borneo": 33332, - "bornon": 41811, - "bornonthisday": 42757, - "boro": 26796, - "boro": 7974, - "borough": 22761, - "borough": 6203, - "borrow": 22293, - "borrowed": 28224, - "borrowing": 41045, - "borussia": 36764, - "bos": 14885, - "bos": 9644, - "bosa": 46946, - "bosch": 42009, - "bosch": 19466, - "bosco": 36960, - "bose": 23142, - "bosh": 42244, - "bosni": 42924, - "bosnia": 31396, - "boss": 17935, - "boss": 4206, - "bosses": 23906, - "boston": 11540, - "boston": 4399, - "bostonmarathon": 44533, - "bot": 4136, - "bot": 6947, - "botan": 12554, - "botanic": 32560, - "botanical": 21026, - "botany": 22612, - "botd": 34451, - "both": 36575, - "both": 2212, - "bother": 21125, - "bothered": 27997, - "botox": 43449, - "bots": 13721, - "botswana": 27584, - "bott": 3520, - "bott": 37225, - "bottle": 37306, - "bottle": 5392, - "bottled": 29331, - "bottlen": 46439, - "bottles": 9754, - "bottling": 42006, - "bottom": 32314, - "bottom": 5931, - "bottoms": 31524, - "bou": 3728, - "bou": 23165, - "bouchard": 47930, - "boudo": 48827, - "bought": 4142, - "boul": 24830, - "boulder": 18260, - "boule": 17652, - "boulevard": 19504, - "boun": 5993, - "bounce": 14316, - "bouncing": 32060, - "bouncy": 43415, - "bound": 15140, - "bound": 4567, - "boundaries": 18690, - "boundary": 21344, - "bounds": 37469, - "bounty": 21142, - "bouquet": 20961, - "bour": 2934, - "bour": 35486, - "bourbon": 48118, - "bourbon": 14652, - "bourdain": 48095, - "bourg": 20690, - "bourgeo": 45672, - "bourn": 39143, - "bourne": 13789, - "bourne": 5192, - "bournemouth": 20911, - "bout": 19982, - "bout": 8123, - "bouti": 10926, - "boutique": 12179, - "bow": 2297, - "bow": 4040, - "bowden": 48538, - "bowed": 49130, - "bowel": 36880, - "bowen": 25368, - "bower": 40414, - "bowers": 42238, - "bowie": 13036, - "bowing": 46398, - "bowl": 26719, - "bowl": 3814, - "bowled": 39987, - "bowler": 25528, - "bowlers": 42632, - "bowles": 41611, - "bowling": 10390, - "bowls": 17787, - "bowman": 22052, - "bows": 17000, - "bowser": 38234, - "bowski": 48311, - "box": 2774, - "box": 2063, - "boxed": 24190, - "boxer": 40394, - "boxer": 15363, - "boxers": 31019, - "boxes": 8350, - "boxing": 33669, - "boxing": 5554, - "boy": 2927, - "boy": 1876, - "boyband": 31568, - "boyce": 44480, - "boycot": 46208, - "boycott": 31615, - "boycott": 19559, - "boyd": 18295, - "boyfriend": 7328, - "boyfriends": 36541, - "boyle": 22802, - "boys": 25223, - "boys": 2034, - "boyz": 16152, - "bp": 23410, - "bp": 11558, - "bpa": 43855, - "bpd": 48587, - "bpl": 28901, - "bpm": 40338, - "bps": 37794, - "br": 711, - "br": 7532, - "bra": 1195, - "bra": 5860, - "brac": 6663, - "brace": 8376, - "brace": 9183, - "bracelet": 8969, - "bracelets": 20027, - "braces": 19249, - "brack": 25676, - "bracket": 14780, - "brackets": 36183, - "brad": 4848, - "brad": 9405, - "bradbury": 45097, - "braden": 46842, - "bradford": 15062, - "bradley": 31905, - "bradley": 10952, - "brador": 24062, - "bradshaw": 37556, - "brady": 42494, - "brady": 11117, - "brae": 42874, - "brae": 40040, - "brag": 30110, - "bragg": 38545, - "bragging": 38199, - "brah": 20276, - "brahms": 45114, - "brai": 25048, - "braid": 31067, - "braided": 39997, - "braids": 34221, - "brain": 9454, - "brain": 4812, - "brains": 17129, - "brainstorming": 36607, - "braised": 28363, - "brake": 14937, - "brakes": 23456, - "bral": 31309, - "bram": 14815, - "bram": 39456, - "brampton": 35124, - "bran": 3684, - "bran": 28348, - "brance": 36072, - "brance": 15413, - "branch": 7998, - "branches": 15843, - "brand": 3910, - "brand": 2896, - "branded": 18097, - "brandi": 41003, - "branding": 10841, - "brando": 41892, - "brandon": 20423, - "brandon": 9166, - "brands": 8681, - "brandt": 22552, - "brandy": 26232, - "brane": 32340, - "branson": 28280, - "brant": 28951, - "brant": 47592, - "braries": 46377, - "brary": 24520, - "bras": 22611, - "brasil": 18991, - "brass": 24348, - "brass": 11655, - "brat": 26717, - "brat": 26631, - "brate": 41864, - "braun": 39129, - "braun": 29309, - "brave": 25461, - "brave": 7769, - "braved": 47663, - "bravely": 42303, - "bravery": 25831, - "braves": 14422, - "braving": 43258, - "bravo": 38613, - "bravo": 13006, - "braw": 37871, - "brawl": 26066, - "braxton": 37451, - "bray": 26256, - "bray": 22993, - "braz": 4625, - "brazil": 47459, - "brazil": 6305, - "brazili": 45697, - "brazilian": 12111, - "brb": 25316, - "brc": 40393, - "bre": 887, - "bre": 7782, - "brea": 7318, - "brea": 46538, - "breach": 21363, - "breaches": 45173, - "bread": 18886, - "bread": 5066, - "breads": 43064, - "break": 2206, - "break": 2568, - "breakable": 30691, - "breakaway": 42732, - "breakdown": 14519, - "breaker": 14814, - "breakers": 22270, - "breakfa": 45931, - "breakfast": 30210, - "breakfast": 3290, - "breaking": 14698, - "breaking": 2755, - "breakingbad": 38032, - "breakingnews": 23837, - "breakout": 16752, - "breaks": 7263, - "breakthrough": 18802, - "breakup": 38931, - "breast": 12930, - "breast": 9475, - "breastcancer": 40813, - "breastcancer": 30065, - "breastfeeding": 29033, - "breasts": 37637, - "breath": 9508, - "breath": 9576, - "breathe": 11364, - "breathing": 14959, - "breathtaking": 14709, - "brecht": 34622, - "breck": 44598, - "bred": 46929, - "bred": 16008, - "bree": 7892, - "bree": 37138, - "breed": 28030, - "breed": 13791, - "breeders": 37472, - "breeding": 16544, - "breeds": 29021, - "breen": 48013, - "brees": 46721, - "breeze": 13125, - "breezy": 21451, - "breit": 23864, - "breitbart": 37926, - "brek": 35494, - "bremen": 39861, - "bren": 5209, - "brenda": 23786, - "brendan": 35134, - "brendan": 15414, - "brendon": 36756, - "brennan": 22372, - "brenner": 42941, - "brent": 31439, - "brent": 16355, - "brentwood": 33108, - "brero": 47781, - "bres": 32561, - "bret": 38020, - "bret": 32548, - "brethren": 43134, - "breton": 32290, - "brett": 22591, - "brett": 12394, - "brev": 42882, - "brevi": 39475, - "brew": 5048, - "brew": 7253, - "brewco": 33582, - "brewed": 23238, - "brewer": 20756, - "breweries": 35277, - "brewers": 17618, - "brewery": 8850, - "brewing": 8275, - "brewingco": 45155, - "brews": 21663, - "brewster": 40274, - "brex": 22726, - "brexit": 27666, - "brexit": 5801, - "brgy": 35983, - "bri": 1036, - "bri": 18636, - "bria": 35890, - "brian": 9824, - "brian": 4989, - "brianna": 32308, - "briar": 46119, - "bribe": 40042, - "bribery": 41792, - "bric": 27055, - "brice": 40190, - "brick": 13937, - "brick": 9518, - "bricks": 21029, - "brics": 48196, - "brid": 16995, - "bridal": 36875, - "bridal": 14284, - "bride": 18342, - "bride": 8964, - "brides": 18067, - "bridesma": 28356, - "bridesmaid": 43399, - "bridesmaids": 47754, - "bridg": 20623, - "bridge": 8647, - "bridge": 2465, - "bridgeport": 45201, - "bridges": 11811, - "bridget": 27073, - "bridgewater": 38732, - "bridging": 38109, - "brie": 26622, - "brief": 9435, - "brief": 8954, - "briefed": 47326, - "briefing": 12991, - "briefly": 26980, - "briefs": 29557, - "brien": 13504, - "brier": 43995, - "brig": 11081, - "briga": 46448, - "brigade": 16032, - "briggs": 28108, - "brigh": 6710, - "bright": 10383, - "bright": 4852, - "brighten": 18208, - "brightening": 43929, - "brighter": 18507, - "brightest": 26159, - "brightly": 36298, - "brightness": 42280, - "brighton": 28416, - "brighton": 9470, - "brigitte": 44421, - "brill": 27342, - "brill": 28601, - "brilli": 3821, - "brilliance": 28146, - "brilliant": 4106, - "brilliantly": 26803, - "brin": 25620, - "bring": 11596, - "bring": 2430, - "bringback": 28969, - "bringbackour": 45403, - "bringing": 4777, - "brings": 5138, - "brink": 39296, - "brink": 28796, - "brioche": 45818, - "bris": 9385, - "bris": 15783, - "brisban": 30431, - "brisbane": 42932, - "brisbane": 12407, - "brisk": 43646, - "brisket": 31920, - "bristol": 18159, - "bristol": 8010, - "brit": 2318, - "brit": 20066, - "britain": 40802, - "britain": 6272, - "britanni": 31373, - "britannia": 36188, - "brite": 33827, - "briti": 8155, - "british": 8651, - "british": 3504, - "britishmuseum": 41858, - "britney": 37192, - "britney": 21853, - "britneyspears": 42990, - "brits": 21832, - "britt": 10811, - "britt": 25976, - "brittany": 38187, - "brittany": 18818, - "britton": 37422, - "brium": 46079, - "brixton": 30056, - "bro": 927, - "bro": 4410, - "broad": 3491, - "broad": 12623, - "broadband": 21050, - "broadcast": 8967, - "broadcaster": 29005, - "broadcasting": 14403, - "broadcasts": 46742, - "broader": 36029, - "broadway": 34599, - "broadway": 9092, - "broc": 15587, - "broccoli": 19094, - "broch": 21419, - "brochure": 25275, - "brock": 14841, - "brock": 16745, - "brodie": 42150, - "brody": 29608, - "broke": 42165, - "broke": 6509, - "broken": 26126, - "broken": 5107, - "broker": 34032, - "broker": 20449, - "brokerage": 41327, - "brokers": 28271, - "brom": 18972, - "brom": 33296, - "bromance": 35353, - "bromley": 35715, - "bron": 4011, - "bron": 10243, - "bronco": 43488, - "bronco": 34370, - "broncos": 12516, - "bronson": 37042, - "bronte": 48936, - "bronx": 48310, - "bronx": 17183, - "brony": 21084, - "bronze": 8459, - "broo": 5204, - "brooch": 21207, - "brook": 4782, - "brook": 7322, - "brooke": 28576, - "brooke": 12549, - "brookes": 39707, - "brooklyn": 23253, - "brooklyn": 6983, - "brooks": 42779, - "brooks": 9991, - "broom": 32046, - "broom": 28008, - "broome": 49335, - "bros": 7776, - "broth": 29994, - "brotha": 33974, - "brother": 12697, - "brother": 3157, - "brotherhood": 19059, - "brothers": 4548, - "brou": 27874, - "brough": 21033, - "brought": 4222, - "brov": 42881, - "brow": 6547, - "brow": 15895, - "broward": 34719, - "brown": 6315, - "brown": 2866, - "browne": 28440, - "brownie": 23045, - "brownies": 22312, - "browning": 32241, - "browns": 14051, - "brows": 14998, - "browse": 19060, - "browser": 19768, - "browsing": 29318, - "brox": 43539, - "brs": 47485, - "brt": 46936, - "bru": 1698, - "bru": 31028, - "bruce": 21223, - "bruce": 7085, - "bruh": 17575, - "bruins": 14736, - "bruise": 48048, - "bruised": 46502, - "brum": 23862, - "brum": 28078, - "brun": 6870, - "brunch": 9113, - "brune": 29057, - "brunei": 41898, - "brunette": 35528, - "bruno": 14568, - "brunomars": 41156, - "brunswick": 24012, - "brush": 27969, - "brush": 8594, - "brushed": 30298, - "brushes": 21550, - "brushing": 35072, - "brussels": 11020, - "brut": 39499, - "brutal": 42144, - "brutal": 14556, - "brutality": 31348, - "brutally": 28132, - "brute": 47552, - "brux": 49093, - "bry": 6587, - "bry": 28228, - "bryan": 16134, - "bryan": 10412, - "bryant": 12256, - "bryce": 19895, - "bryn": 36569, - "bryn": 42877, - "bryson": 38990, - "bs": 11783, - "bs": 1329, - "bsa": 46619, - "bsb": 23070, - "bsbi": 41728, - "bsbibotany": 42086, - "bsc": 32031, - "bsd": 41848, - "bse": 46341, - "bsf": 48314, - "bsgo": 48474, - "bsp": 47977, - "bst": 19698, - "bsu": 46385, - "bt": 3317, - "bt": 4205, - "btc": 10315, - "btcc": 30759, - "btn": 44681, - "bto": 35516, - "btob": 29379, - "btr": 39767, - "bts": 15154, - "bts": 4007, - "btsarmy": 30302, - "btsbbmas": 35297, - "btsx": 44971, - "btv": 38541, - "btw": 9520, - "btwn": 28284, - "bu": 609, - "bu": 5831, - "bub": 27704, - "bub": 33158, - "bubb": 9739, - "bubba": 28149, - "bubble": 28687, - "bubble": 10799, - "bubblegum": 48078, - "bubbles": 17648, - "bubbly": 31034, - "buc": 8207, - "buccane": 32830, - "buccaneers": 38058, - "buch": 22623, - "bucha": 43582, - "buchan": 27237, - "buchanan": 28975, - "bucharest": 37013, - "buck": 6061, - "buck": 11433, - "bucket": 22596, - "bucket": 10498, - "bucketlist": 30778, - "buckets": 27168, - "buckeye": 34549, - "buckeyes": 30741, - "buckingham": 28736, - "buckle": 21948, - "buckley": 25905, - "bucks": 6103, - "bucky": 35916, - "bucs": 20011, - "bud": 2942, - "bud": 10737, - "buda": 18520, - "buda": 49012, - "budapest": 19202, - "budd": 7296, - "buddha": 13981, - "buddhism": 23744, - "buddhist": 18697, - "buddies": 14543, - "budding": 31992, - "buddy": 40948, - "buddy": 6557, - "budge": 32005, - "budget": 46758, - "budget": 5639, - "budgeting": 43789, - "budgets": 36419, - "buds": 14665, - "budweiser": 40900, - "buen": 15640, - "buena": 30876, - "buenas": 48529, - "bueno": 46202, - "buenos": 26055, - "buf": 44417, - "buff": 5456, - "buff": 21416, - "buffal": 25836, - "buffalo": 31231, - "buffalo": 8054, - "buffalob": 38831, - "buffalobills": 44352, - "buffe": 13724, - "buffer": 33050, - "buffet": 17829, - "buffett": 34081, - "buffs": 28906, - "buffy": 33356, - "bug": 14453, - "bug": 8162, - "bugatti": 35451, - "buggy": 28963, - "bugs": 13850, - "buh": 31406, - "buhari": 14661, - "buick": 22000, - "buil": 1354, - "build": 22739, - "build": 3289, - "builder": 14474, - "builders": 17694, - "building": 21206, - "building": 2307, - "buildings": 8866, - "builds": 16449, - "buildthe": 41497, - "built": 45824, - "built": 3874, - "buk": 28084, - "buk": 24317, - "buka": 47778, - "bukit": 39888, - "bul": 2572, - "bul": 10200, - "bula": 18726, - "bulaga": 41575, - "bular": 32187, - "bulb": 22373, - "bulbs": 24808, - "bulgar": 15424, - "bulgaria": 20295, - "bulgarian": 38693, - "bulge": 47603, - "bulk": 19643, - "bull": 4537, - "bull": 6029, - "bulldo": 37675, - "bulldog": 34828, - "bulldog": 15611, - "bulldogs": 13916, - "bullet": 14340, - "bullet": 12465, - "bulletin": 19638, - "bulletproof": 43212, - "bullets": 22117, - "bullied": 34689, - "bullies": 39050, - "bullion": 49114, - "bullish": 22142, - "bullock": 33198, - "bullpen": 38081, - "bulls": 10313, - "bully": 43111, - "bully": 20190, - "bullying": 13548, - "bum": 27683, - "bum": 14226, - "bumble": 25585, - "bumble": 39303, - "bumblebee": 36911, - "bummed": 48456, - "bump": 9783, - "bump": 15877, - "bumped": 22495, - "bumper": 17881, - "bumping": 40196, - "bumps": 21115, - "bun": 2591, - "bun": 13665, - "bunch": 7796, - "bund": 41905, - "bunde": 18841, - "bundesliga": 21582, - "bundle": 11793, - "bundled": 47228, - "bundles": 29834, - "bundy": 37332, - "bung": 44748, - "bungal": 29549, - "bungalow": 33696, - "bunk": 41236, - "bunker": 23615, - "bunnies": 28998, - "bunny": 34198, - "bunny": 9258, - "buns": 22235, - "bunting": 30695, - "buon": 31350, - "buon": 48498, - "bur": 1039, - "bur": 17362, - "burbank": 34862, - "burberry": 30412, - "burch": 44588, - "burden": 18687, - "bure": 11902, - "bureau": 32098, - "bureau": 15400, - "burg": 19505, - "burg": 3499, - "burge": 20522, - "burger": 22356, - "burger": 6548, - "burgers": 13007, - "burgess": 26211, - "burgh": 18141, - "burgh": 4965, - "burgl": 25554, - "burglar": 43365, - "burglary": 32573, - "burgring": 40823, - "burgundy": 23650, - "buri": 46348, - "buri": 42614, - "burial": 22012, - "buried": 14233, - "burk": 48822, - "burke": 15340, - "burle": 27891, - "burlesque": 33732, - "burlington": 23370, - "burma": 30305, - "burmese": 47906, - "burn": 7934, - "burn": 4285, - "burnaby": 47541, - "burne": 27246, - "burned": 15022, - "burner": 23243, - "burnett": 28558, - "burnham": 36111, - "burning": 46107, - "burning": 8405, - "burnley": 24653, - "burnout": 36078, - "burns": 10234, - "burnt": 15185, - "burr": 30879, - "burrell": 49045, - "burrito": 23473, - "burritos": 47245, - "burroughs": 41337, - "burrows": 44846, - "burst": 13005, - "bursting": 32566, - "bursts": 37026, - "burt": 27162, - "burton": 42354, - "burton": 12704, - "burundi": 33595, - "bury": 12276, - "bury": 3899, - "burys": 32362, - "bus": 1319, - "bus": 2840, - "busan": 40172, - "busc": 35000, - "busch": 20475, - "buses": 12879, - "bush": 11191, - "bush": 6867, - "bushes": 37578, - "busiest": 32764, - "busine": 4598, - "busines": 25364, - "business": 8346, - "business": 1716, - "businesses": 7287, - "businessman": 25635, - "buss": 47764, - "bust": 31299, - "bust": 9959, - "busted": 18643, - "buster": 37219, - "buster": 12094, - "busters": 16362, - "busting": 29622, - "busy": 39332, - "busy": 4354, - "but": 2201, - "but": 767, - "butch": 35102, - "butcher": 18732, - "butchers": 42334, - "bute": 39240, - "butes": 14630, - "butler": 35867, - "butler": 10702, - "butt": 12500, - "butt": 31523, - "butte": 31678, - "butter": 5427, - "butter": 6952, - "butterflies": 16232, - "butterfly": 9738, - "buttermilk": 40180, - "butternut": 36867, - "buttery": 45535, - "button": 45480, - "button": 8007, - "buttons": 16188, - "butts": 25309, - "buu": 42313, - "buuren": 47752, - "buxton": 41370, - "buy": 11632, - "buy": 2131, - "buyer": 14682, - "buyers": 14663, - "buying": 6566, - "buys": 15560, - "buzz": 7866, - "buzz": 8706, - "buzzard": 47434, - "buzzer": 38064, - "buzzfeed": 26613, - "buzzing": 18511, - "bv": 18958, - "bv": 35861, - "bvb": 22454, - "bw": 17672, - "bw": 15120, - "bway": 26652, - "bwfc": 40918, - "bwo": 45902, - "bx": 33633, - "by": 1713, - "by": 638, - "bye": 20076, - "bye": 4460, - "byes": 47958, - "byl": 34994, - "byn": 46917, - "byn": 11890, - "byo": 28039, - "bypass": 26530, - "byr": 15534, - "byrd": 30369, - "byrne": 19676, - "byron": 43504, - "byron": 19775, - "bys": 26740, - "bystand": 46138, - "byte": 42798, - "bytes": 39538, - "bythe": 36621, - "byu": 41072, - "byu": 23770, - "byz": 35406, - "byzantine": 44081, - "bz": 13631, - "bé": 40365, - "bü": 38706, - "c": 66, - "c": 322, - "ca": 772, - "ca": 1684, - "caa": 19316, - "cab": 3033, - "cab": 11912, - "cabaret": 26263, - "cabbage": 18407, - "cabe": 32731, - "cabello": 34371, - "caber": 29062, - "cabernet": 33730, - "cabin": 14178, - "cabine": 23354, - "cabinet": 9937, - "cabinets": 33083, - "cabins": 48455, - "cable": 7925, - "cables": 22408, - "cabo": 37318, - "cabo": 28370, - "cabrera": 42338, - "cabs": 42048, - "cac": 8298, - "cac": 23872, - "cacao": 38022, - "cache": 28993, - "caching": 40655, - "cactus": 19794, - "cad": 6297, - "cad": 20166, - "caday": 34187, - "cadbury": 44698, - "caddy": 41521, - "cade": 10497, - "cade": 17306, - "cadet": 22764, - "cadets": 19160, - "cadillac": 18156, - "cae": 49264, - "caer": 28298, - "caes": 15740, - "caesar": 21642, - "caesars": 42162, - "caf": 3471, - "caf": 20867, - "cafc": 30748, - "cafe": 15201, - "cafe": 4979, - "cafes": 40166, - "cafeteria": 32817, - "caffe": 18258, - "caffe": 45416, - "caffeine": 22487, - "café": 15304, - "cag": 15714, - "cage": 11838, - "cages": 37939, - "cah": 40519, - "cahill": 33185, - "cai": 38971, - "cai": 36116, - "cain": 13747, - "caine": 16799, - "cair": 15804, - "cair": 46659, - "cairn": 31264, - "cairn": 42467, - "cairngor": 44067, - "cairns": 32941, - "cairo": 19615, - "cait": 14116, - "caitlin": 47768, - "caitlin": 26809, - "caitlyn": 35763, - "cajun": 43425, - "cajun": 33044, - "cak": 42986, - "cake": 15295, - "cake": 2972, - "cakeday": 46207, - "cakes": 5950, - "cal": 1198, - "cal": 6372, - "cala": 32133, - "calab": 31795, - "calais": 39886, - "calam": 28841, - "calc": 45055, - "calci": 22824, - "calcium": 27815, - "calcu": 15328, - "calcul": 15734, - "calculate": 37656, - "calculated": 40688, - "calculations": 44605, - "calculator": 26093, - "calculus": 35104, - "calcutta": 42901, - "calder": 29372, - "calder": 36817, - "caldwell": 30484, - "cale": 32674, - "caleb": 19619, - "caled": 28421, - "calend": 6057, - "calendar": 7122, - "calendars": 17229, - "calf": 17508, - "calgary": 27415, - "calgary": 10797, - "calhoun": 38929, - "cali": 2857, - "cali": 16337, - "caliber": 32820, - "calibr": 32597, - "calico": 45379, - "calif": 30839, - "califor": 3526, - "californi": 21303, - "california": 3729, - "call": 7950, - "call": 1620, - "calla": 20658, - "callahan": 43313, - "callaway": 42596, - "callback": 44764, - "calle": 47699, - "calle": 38144, - "called": 2726, - "caller": 30666, - "calli": 16338, - "callie": 36512, - "calligraphy": 27775, - "calling": 4597, - "callister": 49026, - "callme": 42449, - "callof": 41280, - "calls": 4572, - "callum": 23224, - "calm": 34990, - "calm": 7011, - "calming": 30690, - "calorie": 32679, - "calories": 18029, - "cals": 47714, - "calum": 16405, - "calvary": 40169, - "calvert": 47134, - "calves": 31857, - "calvin": 27642, - "calvin": 17345, - "caly": 10244, - "calyp": 29851, - "cam": 1004, - "cam": 5982, - "camar": 31991, - "camber": 44362, - "cambo": 14662, - "cambodia": 17347, - "cambridge": 24651, - "cambridge": 9334, - "cambridgeshire": 46139, - "camden": 38735, - "camden": 17984, - "came": 1986, - "camel": 27005, - "camel": 21914, - "camels": 41357, - "cameo": 19492, - "camer": 4961, - "camera": 3934, - "cameraman": 43347, - "cameras": 12172, - "camero": 20320, - "cameron": 19634, - "cameron": 8057, - "camerondallas": 40587, - "cameroon": 24061, - "camil": 37745, - "camila": 19919, - "camilla": 38897, - "camille": 26741, - "camino": 28529, - "camo": 28702, - "camo": 19716, - "camogie": 39547, - "camou": 23588, - "camoufla": 23667, - "camouflage": 29049, - "camp": 2854, - "camp": 2877, - "campa": 2793, - "campaig": 9448, - "campaign": 44524, - "campaign": 3193, - "campaigner": 46364, - "campaigners": 40272, - "campaigning": 19594, - "campaigns": 15669, - "campan": 31765, - "campbell": 29094, - "campbell": 8806, - "campe": 16672, - "campeon": 49109, - "campeones": 30105, - "camper": 41914, - "camper": 24522, - "campers": 26619, - "campfire": 32530, - "campground": 46969, - "camping": 9982, - "campo": 27600, - "campos": 48077, - "camps": 12806, - "campsite": 44243, - "campu": 19687, - "campus": 4560, - "campuses": 31895, - "camra": 46155, - "camry": 46472, - "cams": 32590, - "can": 950, - "can": 753, - "cana": 28341, - "canad": 13193, - "canada": 2698, - "canadaday": 39800, - "canadi": 4329, - "canadian": 22160, - "canadian": 5255, - "canadians": 18989, - "canadiens": 40932, - "canal": 28585, - "canal": 9535, - "canals": 38483, - "canaria": 47117, - "canary": 40409, - "canary": 24523, - "canberra": 16719, - "canc": 43189, - "cancel": 12026, - "cancel": 21546, - "canceled": 25874, - "cancell": 28027, - "cancellation": 38765, - "cancelled": 13270, - "cancels": 34089, - "cancer": 12690, - "cancer": 3148, - "cancers": 33201, - "cancun": 34721, - "cand": 4986, - "candace": 45623, - "candel": 47834, - "candi": 6034, - "candice": 30024, - "candid": 7884, - "candid": 19206, - "candidacy": 46248, - "candidate": 6475, - "candidates": 8619, - "candied": 43982, - "candies": 46305, - "candle": 18995, - "candle": 12674, - "candlelight": 34724, - "candles": 15472, - "candy": 20741, - "candy": 6417, - "cane": 23644, - "cane": 14716, - "canelo": 43210, - "canes": 21902, - "cani": 35592, - "canine": 27380, - "cann": 4139, - "cann": 23709, - "cannab": 7577, - "cannabis": 31837, - "cannabis": 8861, - "canne": 44252, - "canned": 27290, - "cannes": 13773, - "canni": 26389, - "canning": 38621, - "cannon": 28771, - "cannon": 15661, - "cannons": 46269, - "cannot": 4785, - "canny": 26986, - "cano": 31668, - "cano": 25937, - "canoe": 23503, - "canola": 40389, - "canon": 17749, - "canon": 9310, - "canopy": 26061, - "cans": 13707, - "cant": 13395, - "cant": 5784, - "canteen": 39230, - "canter": 19301, - "canterbury": 22271, - "canti": 42845, - "cantina": 47472, - "canton": 37735, - "canton": 25363, - "cantore": 41769, - "cantwait": 33760, - "canu": 20171, - "canucks": 24321, - "canv": 30714, - "canvas": 22441, - "canvas": 7483, - "canvass": 40054, - "canvassing": 33783, - "cany": 47674, - "canyon": 41246, - "canyon": 9755, - "cao": 29207, - "cap": 1289, - "cap": 3938, - "capabilities": 19512, - "capability": 25885, - "capable": 14742, - "capac": 24665, - "capacity": 8970, - "capcom": 28342, - "cape": 10288, - "cape": 6631, - "capecod": 41339, - "capes": 38785, - "capetown": 20059, - "capit": 6889, - "capita": 41833, - "capital": 11198, - "capital": 5439, - "capitalism": 20068, - "capitalist": 37015, - "capitals": 29579, - "capitol": 43880, - "capitol": 11375, - "capo": 45477, - "capp": 16718, - "capped": 24659, - "capping": 42656, - "cappuccino": 37402, - "capri": 48699, - "capri": 30982, - "capric": 28667, - "capricorn": 46314, - "caps": 23185, - "capsu": 15608, - "capsul": 40341, - "capsule": 20627, - "capsules": 32870, - "capt": 45815, - "capt": 17369, - "captain": 14958, - "captain": 4621, - "captainamerica": 46229, - "captainmarvel": 48492, - "captains": 18706, - "caption": 11327, - "captions": 41878, - "captiv": 19776, - "captivating": 30580, - "captive": 29038, - "captivity": 41141, - "capture": 8818, - "captured": 8020, - "captures": 15305, - "capturing": 19548, - "capu": 44241, - "car": 811, - "car": 1615, - "cara": 20016, - "carab": 32251, - "carac": 30029, - "caracas": 45854, - "caramel": 14788, - "carameli": 41739, - "caramelized": 43854, - "carat": 32981, - "carav": 13814, - "caravan": 18566, - "carb": 21379, - "carbo": 43235, - "carbon": 14038, - "carbon": 7549, - "carbs": 29313, - "carcin": 31587, - "carcinoma": 46810, - "card": 10793, - "card": 2601, - "cardam": 49008, - "cardboard": 19845, - "cardi": 6211, - "cardi": 29677, - "cardiac": 21256, - "cardiff": 22488, - "cardiff": 9781, - "cardigan": 30501, - "cardin": 8457, - "cardinal": 46310, - "cardinal": 16472, - "cardinals": 12837, - "cardio": 15003, - "cardio": 23455, - "cardiology": 37276, - "cardiovascular": 29291, - "cardo": 40625, - "cards": 4094, - "care": 2050, - "care": 1776, - "cared": 27675, - "career": 20609, - "career": 3061, - "careers": 10090, - "careful": 11999, - "carefully": 15789, - "caregi": 22042, - "caregiver": 46372, - "caregivers": 35909, - "careless": 47325, - "carers": 26484, - "cares": 10968, - "caretaker": 48037, - "carey": 14895, - "cargo": 12490, - "cari": 18497, - "cari": 37273, - "carib": 9757, - "caribbean": 10368, - "caribou": 42135, - "caric": 25337, - "caricature": 38857, - "carina": 44357, - "caring": 13083, - "carl": 8273, - "carl": 9482, - "carla": 25552, - "carleton": 46496, - "carlin": 47559, - "carlisle": 23276, - "carlo": 17861, - "carlo": 15266, - "carlos": 9538, - "carlow": 44745, - "carls": 39635, - "carlson": 24114, - "carlton": 18934, - "carly": 23166, - "carly": 22689, - "carlyle": 46555, - "carmel": 30757, - "carmel": 25601, - "carmen": 41427, - "carmen": 18834, - "carmichael": 41657, - "carn": 21597, - "carnage": 31385, - "carnation": 44577, - "carnaval": 47238, - "carne": 17053, - "carne": 42885, - "carnegie": 25287, - "carney": 34194, - "carni": 8438, - "carnival": 36708, - "carnival": 10577, - "caro": 30317, - "caro": 29344, - "carol": 4242, - "carol": 11489, - "carole": 31955, - "carolin": 26418, - "carolina": 7027, - "caroline": 31064, - "caroline": 12641, - "carols": 33269, - "carolyn": 25825, - "carou": 32224, - "carousel": 36665, - "carp": 26085, - "carpen": 15584, - "carpenter": 18475, - "carpet": 6922, - "carpets": 34612, - "carr": 26951, - "carr": 17136, - "carra": 32332, - "carre": 31114, - "carrera": 32952, - "carri": 4739, - "carriage": 47885, - "carriage": 21087, - "carrick": 44052, - "carrie": 30334, - "carrie": 15848, - "carried": 12960, - "carrier": 12308, - "carriers": 26865, - "carries": 17982, - "carrieunderwood": 47338, - "carrington": 48759, - "carroll": 41911, - "carroll": 14893, - "carrot": 15435, - "carrots": 19299, - "carry": 31863, - "carry": 6998, - "carrying": 9920, - "cars": 3346, - "carsforsale": 45222, - "carson": 41766, - "carson": 13171, - "cart": 27705, - "cart": 13065, - "cartag": 45042, - "cartagena": 47157, - "carte": 44949, - "cartel": 30529, - "carter": 27330, - "carter": 7260, - "cartier": 32951, - "carto": 5487, - "carton": 41812, - "cartoon": 33082, - "cartoon": 7651, - "cartoonist": 30793, - "cartoons": 17673, - "cartri": 47084, - "cartridge": 29432, - "cartridges": 49249, - "carts": 27581, - "cartunesapp": 32888, - "caruso": 45192, - "carve": 40152, - "carved": 15127, - "carver": 28850, - "carving": 19428, - "carvings": 48123, - "cary": 22844, - "cas": 1671, - "cas": 13831, - "casa": 14643, - "casablanc": 36572, - "casablanca": 41950, - "casc": 36714, - "casca": 43296, - "cascade": 29065, - "cascades": 46454, - "case": 17698, - "case": 2068, - "cases": 6888, - "casey": 24899, - "casey": 12836, - "cash": 11050, - "cash": 5131, - "cashback": 36368, - "cashe": 32233, - "cashew": 39531, - "cashi": 29517, - "cashier": 34547, - "cashmere": 34566, - "casi": 38350, - "casino": 10473, - "casio": 32261, - "cask": 26299, - "casm": 35198, - "casper": 35892, - "cass": 22556, - "cassandra": 35289, - "casser": 31093, - "casserole": 36045, - "cassette": 19717, - "cassi": 14942, - "cassidy": 21757, - "cassie": 29323, - "cassini": 46554, - "cast": 2509, - "cast": 1970, - "caste": 32693, - "casted": 33838, - "castel": 43306, - "castell": 31792, - "caster": 32101, - "caster": 8449, - "casters": 29721, - "castic": 47737, - "castillo": 30813, - "casting": 7087, - "castle": 12496, - "castle": 3540, - "castles": 24766, - "castro": 16950, - "casts": 10595, - "casu": 15345, - "casual": 10129, - "casually": 18840, - "casualties": 30244, - "casualty": 31222, - "cat": 1481, - "cat": 2368, - "cata": 42279, - "catal": 12792, - "catalan": 30532, - "catalina": 36576, - "catalo": 34740, - "catalog": 20036, - "catalogue": 20985, - "catalonia": 27039, - "catalunya": 44132, - "cataly": 15894, - "catalyst": 25387, - "catan": 45893, - "catap": 39514, - "catar": 35801, - "catastro": 22736, - "catastrophe": 41422, - "catastrophic": 34448, - "catch": 18901, - "catch": 3042, - "catcher": 15965, - "catchers": 39060, - "catches": 17213, - "catching": 8617, - "catchy": 37114, - "catday": 32243, - "cate": 6357, - "cate": 24510, - "cated": 31823, - "categor": 17006, - "categori": 40117, - "categories": 19971, - "category": 9432, - "cater": 16634, - "cater": 38101, - "catering": 16697, - "caterpillar": 27111, - "catfish": 26077, - "cath": 9196, - "cath": 30811, - "cathar": 43784, - "cathe": 7174, - "cathedr": 46370, - "cathedral": 7865, - "catherine": 35035, - "catherine": 12339, - "catho": 7595, - "cathol": 16315, - "catholic": 20382, - "catholic": 7757, - "catholics": 36808, - "cathy": 40326, - "cathy": 22731, - "cation": 21367, - "cato": 33558, - "cats": 38800, - "cats": 3989, - "catsofinstagram": 39901, - "catsoftwitter": 17273, - "catt": 37339, - "cattle": 48799, - "cattle": 13644, - "caturday": 20892, - "catwalk": 36565, - "catwoman": 47251, - "cau": 1121, - "cau": 45529, - "caucus": 18847, - "caught": 4520, - "caul": 23460, - "cauley": 41682, - "caulfield": 44906, - "cauli": 20123, - "cauliflower": 23802, - "cause": 18982, - "cause": 1394, - "caused": 8940, - "causes": 9775, - "causeway": 35034, - "causing": 10779, - "caution": 15656, - "cautious": 36579, - "cav": 4942, - "cav": 45935, - "cava": 48682, - "caval": 24537, - "cavali": 20783, - "cavalier": 44488, - "cavaliers": 30194, - "cavalry": 32467, - "cave": 25441, - "cave": 9654, - "cavendish": 42945, - "caver": 41487, - "caves": 22096, - "cavi": 27360, - "caviar": 31228, - "cavill": 40492, - "cavity": 43156, - "cavs": 16800, - "caw": 38405, - "caw": 43804, - "cawx": 26739, - "cay": 11876, - "cay": 37399, - "cayenne": 43650, - "cayman": 33737, - "caz": 48451, - "cb": 4034, - "cb": 8830, - "cba": 38472, - "cbb": 31487, - "cbc": 14096, - "cbc": 14523, - "cbd": 13176, - "cbe": 43639, - "cbi": 30875, - "cbj": 35608, - "cbn": 26579, - "cbp": 46723, - "cbr": 28762, - "cbs": 16788, - "cbs": 8009, - "cc": 2976, - "cc": 2021, - "cca": 17987, - "ccc": 21856, - "ccd": 48556, - "ccg": 37755, - "cch": 21789, - "cchini": 28467, - "cci": 32942, - "cci": 8196, - "ccl": 43773, - "ccm": 40435, - "cco": 28786, - "ccot": 24950, - "ccp": 43045, - "ccs": 30400, - "cctv": 23097, - "ccu": 49023, - "cd": 4308, - "cd": 4480, - "cda": 45565, - "cdc": 41098, - "cdc": 25779, - "cdn": 8886, - "cdn": 26802, - "cdnpoli": 11645, - "cdo": 47187, - "cdp": 39624, - "cds": 20784, - "cdt": 18455, - "ce": 685, - "ce": 629, - "cea": 28355, - "cean": 34409, - "cean": 37295, - "cease": 32856, - "cease": 25499, - "ceasefire": 38291, - "cebu": 20146, - "cec": 29694, - "cec": 40029, - "cecil": 26987, - "cecil": 27169, - "cecilia": 35440, - "ced": 25634, - "ced": 2323, - "cedar": 24167, - "cedar": 13799, - "cedric": 36608, - "cee": 45966, - "cee": 15015, - "cees": 47914, - "ceil": 27275, - "ceiling": 12374, - "ceilings": 33770, - "cek": 45544, - "cel": 2269, - "cel": 7597, - "cele": 1314, - "celeb": 38862, - "celeb": 19393, - "celebr": 1372, - "celebrate": 31414, - "celebrate": 2694, - "celebrated": 9184, - "celebrates": 7564, - "celebrating": 3382, - "celebration": 4615, - "celebrations": 10825, - "celebratory": 34115, - "celebrities": 17071, - "celebrity": 23981, - "celebrity": 7320, - "celebs": 19803, - "celed": 25741, - "celer": 9621, - "celery": 30990, - "celeste": 29364, - "celesti": 29497, - "celestial": 32669, - "celi": 25567, - "celia": 44489, - "celine": 33644, - "cell": 9316, - "cell": 5533, - "cellar": 24282, - "cellars": 44976, - "cellence": 34687, - "cello": 23013, - "cellphone": 39029, - "cells": 8890, - "cellu": 16791, - "cellular": 23268, - "cels": 24021, - "celsius": 47057, - "celtic": 21897, - "celtic": 10523, - "celticfc": 38612, - "celtics": 16226, - "cem": 41435, - "ceme": 10517, - "cement": 4369, - "cements": 19448, - "cemetery": 11660, - "cen": 1306, - "cen": 30106, - "cena": 21591, - "cence": 24410, - "cency": 41259, - "cene": 30038, - "censor": 24230, - "censor": 44709, - "censored": 30951, - "censorship": 27284, - "census": 23677, - "cent": 1784, - "cent": 3662, - "centenary": 22422, - "centennial": 20895, - "center": 16651, - "center": 2119, - "centered": 24584, - "centers": 14494, - "centi": 48889, - "centime": 48687, - "centr": 2370, - "central": 13448, - "central": 3339, - "centre": 26310, - "centre": 2916, - "centred": 47925, - "centres": 19354, - "centri": 30872, - "centric": 19297, - "centro": 37178, - "cents": 11934, - "centu": 16818, - "centuri": 36816, - "centuries": 19014, - "century": 26134, - "century": 4275, - "ceo": 46340, - "ceo": 3559, - "ceos": 28332, - "cep": 2632, - "cep": 48714, - "ceph": 44343, - "cept": 3678, - "ception": 12346, - "cer": 1364, - "cer": 1925, - "cera": 34608, - "ceram": 10677, - "ceramic": 15112, - "ceramics": 22438, - "cere": 3984, - "cere": 22085, - "cereal": 17581, - "cereals": 48618, - "cerebral": 39073, - "ceremon": 15796, - "ceremonial": 33281, - "ceremonies": 21547, - "ceremony": 5193, - "cern": 44851, - "cers": 13638, - "cert": 27522, - "certain": 8526, - "certain": 7883, - "certainly": 10883, - "certainty": 20054, - "certi": 4888, - "certific": 9443, - "certificate": 11786, - "certificates": 25281, - "certification": 14735, - "certified": 9288, - "cerv": 25738, - "cervical": 35953, - "ces": 28715, - "ces": 1604, - "cesar": 37025, - "cesar": 28603, - "cess": 2314, - "cess": 1554, - "cessna": 36596, - "cest": 27245, - "cester": 15769, - "cester": 12718, - "cet": 14960, - "cett": 46708, - "ceu": 37457, - "cevic": 48369, - "cey": 20971, - "cf": 10189, - "cf": 11171, - "cfa": 34521, - "cfb": 32931, - "cfc": 11577, - "cfd": 46171, - "cfl": 46320, - "cfl": 22332, - "cfo": 26937, - "cfp": 40756, - "cfr": 44033, - "cfs": 32835, - "cg": 27118, - "cg": 14740, - "cgc": 38775, - "cgi": 30520, - "ch": 540, - "ch": 634, - "cha": 1587, - "cha": 4541, - "chab": 26670, - "chad": 13095, - "chad": 12923, - "chae": 9460, - "chaf": 38123, - "chag": 27989, - "chai": 31590, - "chai": 18919, - "chain": 13898, - "chain": 3946, - "chained": 34402, - "chains": 14438, - "chainsaw": 37617, - "chainz": 39687, - "chair": 4728, - "chair": 4269, - "chaired": 31664, - "chairing": 42205, - "chairman": 6901, - "chairperson": 31584, - "chairs": 12033, - "chak": 13702, - "chak": 41713, - "chakra": 38304, - "chakra": 33241, - "chal": 7397, - "chal": 30809, - "chale": 38099, - "chalet": 37907, - "chalk": 31362, - "chalk": 17846, - "chall": 2073, - "challeng": 4138, - "challenge": 29462, - "challenge": 2836, - "challenged": 17380, - "challenger": 18228, - "challengers": 46404, - "challenges": 6280, - "challenging": 11754, - "chalmers": 47955, - "cham": 1290, - "cham": 19951, - "chamber": 18983, - "chamber": 7642, - "chamberlain": 32756, - "chambers": 16501, - "chamele": 34759, - "chameleon": 41317, - "champ": 36813, - "champ": 6602, - "champag": 10283, - "champagne": 11007, - "champi": 1680, - "champion": 2643, - "champion": 3950, - "champions": 4227, - "championship": 3429, - "championships": 7047, - "championsleague": 27638, - "champs": 6240, - "chan": 1255, - "chan": 6704, - "chana": 48752, - "chanc": 13931, - "chance": 32940, - "chance": 2594, - "chancellor": 15886, - "chances": 10870, - "chand": 7126, - "chand": 41508, - "chandelier": 30570, - "chandi": 12482, - "chandigarh": 34106, - "chandler": 17595, - "chandra": 27082, - "chandra": 25348, - "chanel": 16951, - "chang": 2233, - "chang": 16461, - "change": 11608, - "change": 1799, - "changeable": 41335, - "changed": 4907, - "changer": 18406, - "changers": 35185, - "changes": 4938, - "changing": 40384, - "changing": 5621, - "changmin": 47410, - "chann": 8804, - "channel": 25837, - "channel": 3847, - "channeling": 28197, - "channels": 13961, - "channing": 37417, - "chant": 18165, - "chant": 13521, - "chanting": 32111, - "chants": 22723, - "chanyeol": 18805, - "chao": 31815, - "chaos": 10853, - "chaotic": 33501, - "chap": 3825, - "chap": 21939, - "chapel": 40859, - "chapel": 10137, - "chaplain": 38348, - "chaplin": 32545, - "chapman": 17968, - "chapp": 20634, - "chaps": 36823, - "chapter": 6014, - "chapters": 22936, - "char": 1054, - "char": 16017, - "chara": 35668, - "charac": 2792, - "character": 10997, - "character": 4009, - "characterdesign": 38149, - "characteri": 20920, - "characteristic": 44747, - "characteristics": 26037, - "characters": 6564, - "charan": 31851, - "charcoal": 19268, - "chard": 17524, - "chardon": 26599, - "chardonnay": 28161, - "charge": 25032, - "charge": 5948, - "chargeable": 35664, - "charged": 7916, - "charger": 13090, - "chargers": 17352, - "charges": 8962, - "charging": 12514, - "chariot": 38811, - "charis": 24449, - "charisma": 45041, - "charismatic": 37205, - "charitable": 23256, - "charities": 18493, - "charity": 20008, - "charity": 4607, - "charitytuesday": 42794, - "charl": 47736, - "charle": 10217, - "charles": 27983, - "charles": 5127, - "charleston": 15478, - "charley": 38027, - "charli": 21784, - "charli": 49392, - "charlie": 16764, - "charlie": 6393, - "charlotte": 18445, - "charlotte": 7871, - "charlottesville": 32027, - "charlton": 27048, - "charm": 10876, - "charmed": 39790, - "charming": 12177, - "charms": 21944, - "charred": 44085, - "chart": 42685, - "chart": 5053, - "charted": 27939, - "charter": 42345, - "charter": 13569, - "chartered": 31298, - "charters": 46626, - "charting": 39841, - "charts": 10728, - "chas": 10717, - "chas": 29838, - "chase": 21503, - "chase": 3859, - "chased": 30342, - "chaser": 29560, - "chasers": 34158, - "chases": 45011, - "chasing": 46909, - "chasing": 13376, - "chassis": 29188, - "chast": 42176, - "chasu": 41352, - "chat": 5355, - "chat": 2402, - "chatbots": 43994, - "chate": 30377, - "chateau": 44582, - "chateau": 23520, - "chath": 46849, - "chatham": 32030, - "chats": 13263, - "chatt": 21618, - "chattanoo": 28009, - "chattanooga": 29866, - "chatted": 34124, - "chatter": 33473, - "chatter": 41103, - "chatting": 12401, - "chatur": 33839, - "chau": 11263, - "chau": 37536, - "chauffe": 45440, - "chauhan": 46663, - "chav": 28997, - "chavez": 27480, - "chaw": 39639, - "chay": 45317, - "chaz": 47815, - "chc": 36233, - "chd": 41645, - "che": 983, - "che": 3842, - "chea": 39580, - "chead": 48358, - "cheap": 27036, - "cheap": 8678, - "cheape": 26164, - "cheaper": 17776, - "cheapest": 26640, - "cheat": 18180, - "cheated": 34285, - "cheating": 19722, - "chec": 1113, - "check": 7672, - "check": 1217, - "checked": 10387, - "checker": 45883, - "checkers": 48181, - "checking": 7441, - "checklist": 26989, - "checkout": 13101, - "checkpoint": 27531, - "checks": 13737, - "ched": 11341, - "ched": 2146, - "cheddar": 20551, - "chee": 5326, - "chee": 20944, - "cheek": 40000, - "cheek": 21227, - "cheeks": 23019, - "cheeky": 15068, - "cheer": 9733, - "cheer": 6918, - "cheered": 38111, - "cheerful": 28882, - "cheering": 14289, - "cheerleader": 29072, - "cheerleaders": 22343, - "cheerleading": 36366, - "cheers": 6562, - "chees": 15182, - "cheese": 10738, - "cheese": 4108, - "cheeseburger": 41200, - "cheesecake": 17803, - "cheeses": 36076, - "cheesy": 22093, - "cheetah": 27431, - "chef": 12137, - "chef": 4895, - "chefs": 14486, - "chek": 43745, - "chel": 3084, - "chel": 25970, - "chell": 46854, - "chelle": 30141, - "chelms": 34936, - "chelmsford": 39890, - "chelse": 19071, - "chelsea": 6031, - "chelseafc": 25927, - "chelten": 18889, - "cheltenham": 21589, - "chem": 5667, - "chem": 13698, - "chemi": 7179, - "chemical": 39376, - "chemical": 9208, - "chemicals": 17426, - "chemist": 23138, - "chemistry": 8841, - "chemo": 33095, - "chemo": 36348, - "chemotherapy": 41412, - "chemtrails": 46015, - "chen": 5907, - "chen": 8983, - "cheney": 43522, - "cheng": 32512, - "cheng": 30190, - "chenko": 29073, - "chennai": 28948, - "chennai": 12791, - "cheon": 11498, - "cheque": 28168, - "cher": 3597, - "cher": 3466, - "cheri": 26471, - "cherish": 20053, - "cherished": 42325, - "cherno": 35376, - "chernobyl": 40554, - "chero": 19844, - "cherokee": 22860, - "cherries": 27248, - "cherry": 21470, - "cherry": 7325, - "chers": 5789, - "chery": 38478, - "cheryl": 37784, - "cheryl": 20600, - "ches": 18346, - "ches": 1910, - "chesa": 28349, - "chesapeake": 32909, - "cheshire": 17130, - "chesney": 48747, - "chess": 27170, - "chess": 8397, - "chest": 18217, - "chest": 10563, - "chester": 10466, - "chester": 3343, - "chesterfield": 32975, - "chestnut": 21834, - "chet": 9663, - "chett": 24695, - "chev": 7152, - "chev": 41145, - "chevro": 12850, - "chevrolet": 13240, - "chevron": 33792, - "chevy": 16581, - "chew": 32645, - "chew": 22642, - "chewan": 23689, - "chewbacca": 49355, - "chewing": 31486, - "chewy": 42940, - "chey": 26968, - "chey": 31208, - "cheyenne": 34805, - "chez": 49183, - "chez": 10556, - "chf": 33021, - "chfield": 41619, - "chhat": 34127, - "chhattisgarh": 44246, - "chi": 1337, - "chi": 4039, - "chia": 19147, - "chiang": 33764, - "chibi": 22306, - "chic": 2627, - "chic": 9091, - "chica": 44190, - "chicag": 16778, - "chicago": 15038, - "chicago": 3530, - "chicagof": 40638, - "chicagofire": 46576, - "chicas": 40664, - "chichester": 43823, - "chick": 3170, - "chick": 11238, - "chicken": 26322, - "chicken": 3717, - "chickens": 21658, - "chickpea": 48109, - "chicks": 17810, - "chico": 30379, - "chie": 40046, - "chie": 12388, - "chief": 16830, - "chief": 3455, - "chiefs": 11419, - "chiev": 47761, - "chiff": 27407, - "chiffon": 31817, - "chig": 42952, - "chihu": 22857, - "chihuahu": 25437, - "chihuahua": 30181, - "chik": 45455, - "chil": 1333, - "child": 4392, - "child": 2913, - "childcare": 31133, - "childhood": 34772, - "childhood": 7551, - "childish": 31939, - "childre": 2135, - "children": 11101, - "children": 2153, - "childrens": 31551, - "childrens": 21553, - "childs": 39521, - "chile": 10022, - "chilean": 33186, - "chili": 13033, - "chill": 6498, - "chill": 6382, - "chilled": 23540, - "chillen": 45160, - "chilli": 26787, - "chilli": 17067, - "chillin": 10347, - "chilling": 10179, - "chillout": 39842, - "chills": 25460, - "chilly": 14450, - "chim": 10543, - "chimney": 26821, - "chimp": 44374, - "chin": 6555, - "chin": 8979, - "china": 38943, - "china": 2817, - "chinatown": 28582, - "chine": 4013, - "chinese": 30568, - "chinese": 4271, - "ching": 34621, - "ching": 1439, - "chino": 47181, - "chino": 27440, - "chinook": 41577, - "chinson": 33786, - "chio": 19650, - "chip": 19271, - "chip": 8730, - "chipmun": 46384, - "chipot": 17702, - "chipotle": 19284, - "chipp": 39854, - "chippe": 46541, - "chipped": 39892, - "chipping": 40323, - "chips": 8855, - "chir": 15564, - "chiro": 23413, - "chiroprac": 25987, - "chiropractic": 34437, - "chis": 19920, - "chistan": 20523, - "chiswick": 47290, - "chit": 13515, - "chit": 45626, - "chita": 49184, - "chitec": 39862, - "chive": 29222, - "chives": 34921, - "chk": 47424, - "chl": 38592, - "chley": 47748, - "chlo": 10374, - "chloe": 39966, - "chloe": 13992, - "chlor": 23135, - "chman": 35835, - "chment": 20848, - "chner": 48277, - "cho": 1327, - "cho": 5150, - "choa": 43077, - "choc": 32772, - "choc": 21983, - "choco": 46285, - "choco": 32692, - "chocol": 3443, - "chocolat": 44631, - "chocolate": 29389, - "chocolate": 3820, - "chocolates": 24120, - "choi": 23749, - "choic": 35606, - "choice": 23857, - "choice": 4051, - "choices": 11016, - "choir": 9214, - "choirs": 43277, - "choke": 30231, - "choked": 43521, - "choker": 39642, - "choking": 39993, - "chol": 19802, - "cholera": 45999, - "cholester": 26861, - "cholesterol": 27982, - "chom": 25151, - "chon": 20416, - "chon": 21601, - "chondri": 37379, - "chong": 26220, - "choo": 3869, - "choo": 24437, - "chool": 29578, - "chools": 41958, - "choose": 22756, - "choose": 5073, - "chooses": 29923, - "choosing": 13475, - "chop": 10458, - "chop": 16663, - "chopin": 42256, - "chopped": 22580, - "chopper": 24011, - "chopping": 35375, - "chopra": 24258, - "chops": 26321, - "chor": 7567, - "chor": 47795, - "choral": 26684, - "chord": 33005, - "chords": 36152, - "choreo": 17443, - "choreographer": 35952, - "choreography": 32749, - "chores": 40483, - "chori": 25718, - "chorizo": 30802, - "chorus": 20869, - "chos": 26559, - "chose": 11090, - "chosen": 10044, - "chou": 16960, - "chou": 42917, - "choudhary": 45503, - "chow": 20257, - "chow": 21657, - "chowder": 37886, - "chp": 35896, - "chr": 36918, - "chri": 1135, - "chris": 9907, - "chris": 2978, - "chrisbrown": 41035, - "chriss": 46745, - "chrissy": 44762, - "chrissy": 40485, - "christ": 1403, - "christ": 6703, - "christchurch": 27100, - "christen": 31956, - "christensen": 42226, - "christi": 3328, - "christi": 33213, - "christian": 11792, - "christian": 4729, - "christianity": 20000, - "christians": 14842, - "christie": 16084, - "christin": 30189, - "christina": 15925, - "christine": 42610, - "christine": 14712, - "christma": 12039, - "christmas": 18174, - "christmas": 1677, - "christmaseve": 44381, - "christmass": 44873, - "christop": 7917, - "christoph": 47844, - "christophe": 45486, - "christopher": 33349, - "christopher": 9630, - "christy": 28331, - "chro": 13207, - "chromatic": 44207, - "chrome": 24843, - "chrome": 9529, - "chromo": 35809, - "chron": 5577, - "chron": 39781, - "chronic": 10115, - "chronic": 13677, - "chronicle": 20034, - "chronicles": 18905, - "chrono": 29387, - "chronograph": 38397, - "chry": 13508, - "chrysler": 20078, - "chs": 40277, - "chs": 8391, - "chsnews": 44919, - "cht": 11384, - "chter": 47811, - "chu": 3799, - "chu": 13622, - "chubby": 29109, - "chuck": 13211, - "chuck": 9894, - "chuckle": 35733, - "chucky": 42026, - "chuffed": 27233, - "chuk": 25878, - "chuk": 27221, - "chul": 33001, - "chum": 46869, - "chum": 41767, - "chun": 14693, - "chun": 25391, - "chung": 28418, - "chunk": 30275, - "chunks": 45538, - "chunky": 27978, - "chups": 46331, - "chur": 2309, - "church": 14956, - "church": 2735, - "churches": 15539, - "churchill": 17527, - "chus": 36246, - "chut": 28788, - "chutney": 36261, - "chy": 15131, - "chy": 8096, - "chyna": 43398, - "châ": 48669, - "ci": 698, - "ci": 5798, - "cia": 4019, - "cial": 1143, - "cian": 32323, - "ciao": 37677, - "ciara": 31369, - "cible": 28873, - "cic": 14539, - "cic": 21517, - "cid": 27359, - "cide": 34178, - "cider": 13547, - "cides": 41326, - "cie": 19730, - "cier": 24067, - "cies": 6785, - "cif": 35698, - "cigar": 26031, - "cigar": 16525, - "cigare": 13044, - "cigarette": 18548, - "cigarettes": 22750, - "cigars": 20750, - "cii": 42408, - "cil": 9217, - "cil": 2998, - "cilan": 33998, - "cilantro": 34568, - "cili": 18977, - "ciliation": 25294, - "cim": 30021, - "cin": 2396, - "cin": 25367, - "cina": 39467, - "cincin": 13291, - "cincinnati": 14197, - "cinco": 25131, - "cincode": 40930, - "cincodemayo": 42542, - "cincy": 30015, - "cincy": 30286, - "cinde": 20660, - "cinderella": 21515, - "cindy": 34439, - "cindy": 18532, - "cine": 4015, - "cine": 27451, - "cinema": 38251, - "cinema": 6443, - "cinemas": 14845, - "cinematic": 25602, - "cinemato": 21919, - "cinematographer": 39059, - "cinematography": 33802, - "ciner": 39882, - "cing": 4014, - "cini": 25699, - "cinnam": 12768, - "cinnamon": 13460, - "cino": 18616, - "cio": 44584, - "cio": 9954, - "cion": 22024, - "ciones": 37155, - "cious": 38466, - "cip": 32884, - "cir": 2459, - "cir": 41135, - "circa": 10411, - "circle": 33574, - "circle": 7117, - "circles": 19411, - "circling": 46036, - "circu": 5143, - "circuit": 35583, - "circuit": 9801, - "circuits": 33260, - "circul": 16618, - "circular": 19733, - "circulare": 39525, - "circulareconomy": 39878, - "circulated": 46258, - "circulating": 42980, - "circulation": 27880, - "circum": 13406, - "circumstances": 18786, - "circus": 11833, - "cirque": 36049, - "cis": 9459, - "cis": 23513, - "cisco": 36689, - "cisco": 19290, - "cise": 19657, - "cisely": 33434, - "cision": 41957, - "cism": 24166, - "cist": 40906, - "cit": 4420, - "cit": 31294, - "citadel": 38036, - "citation": 33581, - "cite": 32641, - "cited": 25069, - "cites": 34490, - "citi": 4280, - "citi": 30270, - "cities": 5441, - "citing": 29088, - "citiz": 5816, - "citizen": 11720, - "citizen": 9814, - "citizens": 7949, - "citizenship": 17386, - "cito": 42636, - "citro": 27941, - "citroen": 35805, - "citrus": 17379, - "city": 5002, - "city": 1305, - "cityfc": 28751, - "cityo": 25709, - "cityof": 11595, - "cityscape": 40808, - "ciu": 39693, - "cius": 42559, - "civ": 40039, - "civic": 32240, - "civic": 11888, - "civil": 6923, - "civil": 6450, - "civilian": 21187, - "civilians": 18076, - "civilization": 22503, - "civilwar": 34524, - "ción": 44700, - "cj": 15238, - "cj": 15205, - "ck": 916, - "ck": 868, - "cke": 25224, - "cke": 40989, - "cked": 3441, - "cken": 25566, - "cker": 15509, - "cker": 4744, - "ckers": 37073, - "cket": 5525, - "ckett": 33899, - "ckey": 15029, - "ckey": 3657, - "cki": 36916, - "cki": 41055, - "cking": 4805, - "cko": 28818, - "cks": 2031, - "cky": 26229, - "cky": 3083, - "cl": 969, - "cl": 6482, - "cla": 940, - "cla": 20636, - "clad": 31606, - "cladding": 46411, - "clai": 29459, - "claim": 4290, - "claim": 6607, - "claimed": 9010, - "claiming": 15286, - "claims": 6852, - "clair": 31441, - "clair": 14039, - "claire": 20410, - "claire": 10460, - "clam": 13588, - "clam": 32598, - "clamation": 21793, - "clamp": 41501, - "clams": 38849, - "clan": 29252, - "clan": 14114, - "clancy": 37227, - "clans": 38279, - "clap": 30037, - "clap": 25546, - "clapham": 43619, - "clapton": 37683, - "clar": 3617, - "clara": 19468, - "clare": 18948, - "clare": 15927, - "claremont": 47789, - "clarence": 29320, - "clari": 15175, - "clarify": 37004, - "clarinet": 41178, - "clarity": 21323, - "clark": 13340, - "clark": 7521, - "clarke": 11548, - "clarkson": 25706, - "clas": 32003, - "clash": 38367, - "clash": 9359, - "clashes": 25193, - "clasico": 43567, - "class": 2876, - "class": 1874, - "classes": 6919, - "classi": 2507, - "classic": 9353, - "classic": 2713, - "classical": 22179, - "classical": 11355, - "classicalmusic": 27806, - "classiccar": 46906, - "classiccars": 21064, - "classics": 10634, - "classification": 26612, - "classified": 22056, - "classmate": 37090, - "classmates": 30062, - "classof": 25345, - "classroom": 9001, - "classrooms": 25768, - "classy": 11615, - "clau": 7526, - "claude": 17461, - "claudi": 39439, - "claudia": 21893, - "claudio": 31230, - "claus": 23317, - "clause": 26151, - "clave": 24111, - "claw": 49230, - "claw": 19106, - "claws": 29161, - "clay": 10402, - "clay": 8823, - "clays": 26128, - "clayton": 46445, - "clayton": 19413, - "clc": 31380, - "cle": 1321, - "cle": 2537, - "clean": 3572, - "clean": 3772, - "cleaned": 17468, - "cleanenergy": 43538, - "cleaner": 15619, - "cleaners": 33258, - "cleaning": 7210, - "cleanliness": 47886, - "cleans": 40827, - "cleanse": 28717, - "cleanser": 44170, - "cleansing": 25931, - "cleanup": 22353, - "clear": 4631, - "clear": 3143, - "clearance": 17959, - "cleared": 14880, - "clearer": 37031, - "clearing": 15481, - "clearly": 7767, - "clears": 29092, - "clearwater": 32124, - "cleary": 44342, - "cleats": 33486, - "cleavage": 44165, - "cled": 12827, - "clegg": 42915, - "clemens": 45896, - "clement": 22592, - "clement": 24714, - "clemente": 42461, - "clementine": 47112, - "clements": 49175, - "clemson": 38170, - "clemson": 19537, - "clen": 35547, - "cleo": 40344, - "cleop": 36287, - "cleopatra": 41212, - "cler": 11828, - "clergy": 42635, - "cleric": 43748, - "clerk": 22230, - "clermont": 47529, - "cles": 8077, - "cleve": 37599, - "clevel": 7701, - "cleveland": 30716, - "cleveland": 8430, - "clever": 30977, - "clever": 13385, - "clg": 47546, - "cli": 1503, - "clich": 44407, - "click": 16676, - "click": 3585, - "clicked": 29015, - "clicking": 26542, - "clicks": 31250, - "client": 48528, - "client": 7467, - "clients": 8114, - "clif": 13182, - "cliff": 23827, - "cliff": 10625, - "cliffe": 15170, - "clifford": 24226, - "cliffs": 20953, - "clifton": 23878, - "climat": 37283, - "climate": 7854, - "climate": 4589, - "climateaction": 31622, - "climatechange": 11055, - "climates": 46022, - "climax": 37033, - "climb": 7421, - "climb": 10649, - "climbed": 22528, - "climber": 36910, - "climbers": 47648, - "climbing": 9877, - "climbs": 29098, - "clin": 2879, - "clinch": 30404, - "clinched": 44064, - "cline": 37460, - "cling": 37068, - "cling": 4760, - "clinic": 7926, - "clinical": 35133, - "clinical": 9148, - "clinicians": 45866, - "clinics": 23330, - "clint": 37542, - "clint": 21160, - "clinton": 34403, - "clinton": 5820, - "clio": 46889, - "clip": 39712, - "clip": 9289, - "clipped": 45524, - "clipper": 42245, - "clippers": 23319, - "clipping": 47484, - "clips": 16594, - "clique": 34983, - "clive": 36086, - "clive": 21509, - "cll": 46091, - "cllr": 45743, - "cllr": 23034, - "clo": 1194, - "cloak": 36528, - "clock": 19878, - "clock": 6716, - "clocked": 49049, - "clocks": 25895, - "clockwise": 46150, - "clockwork": 42297, - "clon": 24477, - "clone": 22854, - "clones": 48047, - "clooney": 33161, - "clos": 48821, - "close": 10603, - "close": 2660, - "closed": 4552, - "closely": 13478, - "closer": 6377, - "closes": 11354, - "closest": 14975, - "closet": 14221, - "closeup": 35439, - "closing": 7101, - "closure": 13249, - "closures": 22923, - "cloth": 14559, - "clothes": 7080, - "clothing": 7425, - "clou": 4069, - "cloud": 12965, - "cloud": 3887, - "cloudcomputing": 41390, - "clouds": 6244, - "cloudy": 13106, - "clough": 42909, - "clover": 39574, - "clover": 22812, - "clow": 18386, - "clown": 15329, - "clowns": 30820, - "cls": 44251, - "clt": 29651, - "clt": 24236, - "clu": 996, - "club": 9642, - "club": 1736, - "clubbing": 48128, - "clubhouse": 26553, - "clubs": 9437, - "clue": 14994, - "clueless": 35350, - "clues": 23764, - "clusive": 41362, - "cluster": 15595, - "clusters": 33217, - "clut": 28507, - "clutch": 13953, - "clutter": 40804, - "cly": 12037, - "clyde": 39557, - "clyde": 18469, - "cm": 10190, - "cm": 3741, - "cma": 30554, - "cma": 31388, - "cmc": 45839, - "cmdr": 48250, - "cme": 34946, - "cmo": 24589, - "cmon": 42904, - "cmp": 46355, - "cms": 22520, - "cmt": 42727, - "cmu": 43046, - "cn": 3886, - "cn": 16200, - "cna": 48287, - "cnbc": 41242, - "cnbc": 24371, - "cnblue": 36018, - "cnc": 20571, - "cnet": 47487, - "cnews": 24319, - "cng": 41496, - "cnn": 22405, - "cnn": 8259, - "cns": 46095, - "cny": 31614, - "co": 622, - "co": 1320, - "coa": 29167, - "coach": 3275, - "coach": 2312, - "coached": 30228, - "coachella": 20222, - "coaches": 6924, - "coaching": 7766, - "coal": 10227, - "coal": 7919, - "coalition": 12920, - "coast": 6398, - "coast": 3720, - "coastal": 38246, - "coastal": 10852, - "coaster": 15944, - "coasters": 31548, - "coastguard": 40601, - "coastline": 27959, - "coasts": 42225, - "coat": 28869, - "coat": 7356, - "coated": 23401, - "coates": 36899, - "coating": 25369, - "coatings": 48706, - "coats": 18075, - "cob": 20140, - "cob": 32863, - "cobain": 36866, - "cobalt": 30896, - "cobb": 22719, - "cobble": 47894, - "cobra": 21574, - "coc": 23036, - "coc": 39498, - "coca": 21197, - "cocac": 26393, - "cocacola": 31248, - "cocaine": 20534, - "coch": 18599, - "cochran": 48798, - "cochrane": 41752, - "coco": 11850, - "coco": 13316, - "cocoa": 18074, - "cocon": 8597, - "coconut": 9581, - "cod": 16132, - "cod": 11915, - "code": 11582, - "code": 3217, - "coded": 33703, - "coden": 43914, - "coder": 41561, - "codes": 14566, - "codi": 39711, - "coding": 12647, - "cody": 23222, - "cody": 12666, - "coe": 15386, - "coed": 41028, - "coel": 45633, - "coer": 41198, - "coeur": 44986, - "coffe": 2255, - "coffee": 12898, - "coffee": 2453, - "coffees": 41184, - "coffey": 48066, - "cofficial": 18757, - "coffin": 29907, - "cog": 26362, - "cog": 35960, - "cogn": 12210, - "cognac": 44361, - "cognition": 46825, - "cognitive": 16584, - "cohe": 20669, - "cohen": 13381, - "coherent": 48450, - "cohort": 22782, - "coil": 25307, - "coim": 41528, - "coin": 14651, - "coin": 4170, - "coinci": 14015, - "coincidence": 19807, - "coins": 10530, - "coke": 39602, - "coke": 14035, - "col": 754, - "col": 9371, - "cola": 15444, - "colbert": 31647, - "colby": 32068, - "colchester": 31715, - "cold": 11146, - "cold": 3153, - "colder": 23859, - "coldest": 31438, - "coldplay": 27770, - "cole": 9305, - "cole": 8166, - "coleman": 15774, - "coles": 40265, - "coles": 30398, - "coli": 18877, - "coli": 15910, - "colin": 20989, - "colin": 10238, - "coliseum": 21836, - "coll": 25982, - "coll": 23898, - "colla": 2929, - "collab": 14013, - "collabor": 4437, - "collaborate": 21271, - "collaborated": 42265, - "collaborating": 25545, - "collaboration": 6642, - "collaborations": 36520, - "collaborative": 15841, - "collaborator": 48186, - "collaborators": 45901, - "collage": 11258, - "collagen": 36120, - "collap": 16881, - "collapse": 16520, - "collapsed": 25037, - "collapses": 43601, - "collar": 39662, - "collar": 13497, - "collateral": 44512, - "colle": 1801, - "colleague": 13067, - "colleagues": 8203, - "collec": 1733, - "collect": 10186, - "collected": 11980, - "collecti": 18530, - "collectible": 25680, - "collectibles": 21519, - "collecting": 10325, - "collection": 2548, - "collections": 12760, - "collective": 10162, - "collectively": 40687, - "collector": 13522, - "collectors": 20540, - "collects": 31576, - "colleen": 31020, - "college": 13512, - "college": 2229, - "colleges": 17357, - "collegi": 16311, - "collegiate": 18068, - "colli": 8262, - "collide": 27214, - "collie": 30611, - "collier": 35748, - "collin": 24056, - "collin": 32116, - "colling": 32319, - "collingwood": 45873, - "collins": 8684, - "collision": 15407, - "collo": 25115, - "colloqui": 37243, - "colloquium": 46514, - "collu": 25658, - "collusion": 33864, - "colo": 7300, - "colo": 27288, - "cologne": 22216, - "cology": 19187, - "colom": 8987, - "colombia": 12901, - "colombian": 28701, - "colombo": 33207, - "colon": 8280, - "colon": 29050, - "colonel": 22674, - "coloni": 22667, - "colonial": 16530, - "colonialism": 43385, - "colonies": 38738, - "colony": 18767, - "color": 4036, - "color": 3140, - "colorado": 34580, - "colorado": 6742, - "colorec": 41171, - "colored": 11775, - "colorful": 11444, - "colori": 28764, - "coloring": 17696, - "colorized": 46730, - "colors": 5389, - "colorstv": 28195, - "colorway": 44576, - "colossal": 40258, - "colosse": 48142, - "colossus": 34022, - "colour": 10240, - "colour": 4769, - "coloured": 17111, - "colourful": 15562, - "colouring": 31803, - "colours": 7626, - "cols": 35726, - "colt": 19726, - "colton": 32249, - "coltrane": 42333, - "colts": 16135, - "colum": 4164, - "columb": 31043, - "columbi": 25947, - "columbia": 9410, - "columbus": 11273, - "column": 10593, - "columnist": 28958, - "columns": 29056, - "com": 610, - "com": 2464, - "coma": 19620, - "comb": 3587, - "comb": 16380, - "combat": 35083, - "combat": 9275, - "combating": 46121, - "combe": 14363, - "combin": 25112, - "combination": 11312, - "combinations": 34950, - "combine": 12919, - "combined": 10427, - "combines": 22991, - "combining": 23561, - "combo": 10155, - "combos": 48117, - "combs": 30694, - "combu": 35629, - "combustion": 44654, - "comcast": 30043, - "come": 4225, - "come": 891, - "comeback": 8234, - "comedian": 13848, - "comedians": 33758, - "comedic": 43360, - "comedy": 19346, - "comedy": 4749, - "comer": 42997, - "comer": 20916, - "comers": 34436, - "comes": 2091, - "comet": 21405, - "comets": 40636, - "comey": 22957, - "comfor": 6563, - "comfort": 44000, - "comfort": 7808, - "comfortable": 8652, - "comfortably": 30392, - "comforting": 33835, - "comforts": 42243, - "comfy": 15736, - "comi": 40781, - "comic": 7729, - "comic": 4962, - "comicart": 46018, - "comicbook": 46564, - "comicbooks": 22018, - "comiccon": 18379, - "comicon": 43820, - "comics": 4256, - "comin": 18164, - "coming": 14916, - "coming": 1171, - "comingsoon": 19894, - "comm": 965, - "comm": 11413, - "comman": 39780, - "command": 18391, - "command": 11350, - "commander": 11265, - "commanders": 41667, - "commanding": 36933, - "commandments": 43409, - "commando": 31361, - "commands": 38163, - "comme": 29692, - "commemor": 9495, - "commemorate": 21242, - "commemorates": 45149, - "commemorating": 28734, - "commemoration": 29288, - "commemorative": 24623, - "commen": 15795, - "commence": 25059, - "commenced": 43908, - "commencement": 21666, - "commences": 48551, - "commend": 37555, - "commended": 40702, - "comment": 20035, - "comment": 5761, - "commentary": 14146, - "commentator": 32016, - "commented": 28328, - "commenting": 37292, - "comments": 6606, - "commer": 4028, - "commerce": 8333, - "commerci": 15601, - "commercial": 31802, - "commercial": 6287, - "commercials": 30724, - "commish": 45399, - "commissi": 6000, - "commission": 5292, - "commissioned": 16565, - "commissioner": 10221, - "commissioners": 30702, - "commissioning": 29585, - "commissions": 20668, - "commit": 3041, - "commit": 11797, - "commitment": 7770, - "commitments": 32136, - "commits": 20241, - "committed": 7907, - "committee": 5636, - "committees": 40504, - "committing": 21937, - "commod": 9496, - "commodities": 30350, - "commodity": 29041, - "commodore": 31129, - "common": 8414, - "common": 4176, - "commonly": 20344, - "commons": 16653, - "commonwealth": 16569, - "comms": 18832, - "commu": 9561, - "commun": 1515, - "communal": 32809, - "communi": 16164, - "communic": 4784, - "communicate": 19809, - "communication": 7999, - "communications": 10052, - "communion": 28579, - "communism": 35387, - "communist": 18602, - "communities": 6361, - "community": 14784, - "community": 1927, - "commute": 15898, - "commuter": 27782, - "commuters": 30823, - "commuting": 43503, - "como": 16236, - "comp": 2561, - "comp": 11679, - "compac": 40014, - "compact": 13690, - "compan": 1995, - "companies": 5361, - "companion": 14963, - "companions": 37124, - "company": 2634, - "compar": 7580, - "comparable": 27092, - "comparative": 33388, - "compare": 13771, - "compared": 10544, - "compares": 25104, - "comparing": 20564, - "comparison": 14186, - "comparisons": 40870, - "compart": 30072, - "compartment": 40383, - "compass": 19438, - "compassion": 14463, - "compassionate": 30193, - "compati": 17295, - "compatibility": 41614, - "compatible": 21286, - "compe": 5254, - "compelled": 49375, - "compelling": 21766, - "compen": 42079, - "compens": 15172, - "compensation": 18663, - "compet": 2932, - "compete": 10038, - "competed": 27767, - "competen": 31853, - "competence": 31165, - "competency": 49293, - "competent": 28113, - "competes": 39826, - "competing": 13068, - "competit": 15892, - "competiti": 32581, - "competition": 3742, - "competitions": 23259, - "competitive": 10687, - "competitiveness": 43209, - "competitor": 26633, - "competitors": 23638, - "compilation": 20446, - "compiled": 34579, - "compla": 7428, - "complain": 19292, - "complained": 42029, - "complaining": 20812, - "complains": 46363, - "complaint": 20391, - "complaints": 20020, - "comple": 1730, - "complement": 36624, - "complementary": 48953, - "complete": 3263, - "completed": 5976, - "completely": 5989, - "completes": 19321, - "completing": 14949, - "completion": 15915, - "complex": 16099, - "complex": 6324, - "complexes": 47870, - "complexion": 47732, - "complexity": 24815, - "compli": 5270, - "compliance": 14658, - "compliant": 29893, - "complic": 11460, - "complicated": 16621, - "complications": 29936, - "compliment": 25116, - "complimentary": 20948, - "compliments": 25477, - "comply": 36281, - "component": 21284, - "components": 16816, - "compos": 7783, - "compose": 43659, - "composed": 19916, - "composer": 12104, - "composers": 33314, - "composing": 40412, - "composite": 21606, - "composites": 45395, - "composition": 17510, - "compositions": 44652, - "compost": 46002, - "compost": 33307, - "compound": 19980, - "compounds": 33991, - "compre": 8483, - "compreh": 42976, - "comprehen": 12050, - "comprehend": 48230, - "comprehensive": 13854, - "compress": 33353, - "compressed": 42359, - "compression": 25638, - "compressor": 39607, - "compri": 29445, - "compromise": 26611, - "compromised": 38576, - "compromising": 45436, - "comps": 48665, - "compton": 28364, - "compu": 11639, - "compul": 25869, - "compulsory": 39345, - "computing": 12732, - "comra": 25553, - "comrade": 30844, - "comrades": 29282, - "coms": 30493, - "con": 616, - "con": 2457, - "cona": 30605, - "conan": 24750, - "conce": 9145, - "concealed": 35419, - "conceded": 37895, - "conceived": 39725, - "concentr": 11085, - "concentrate": 30846, - "concentrated": 36776, - "concentration": 18565, - "concep": 8389, - "concepcion": 47035, - "concept": 6353, - "conceptart": 31162, - "conception": 30510, - "conceptions": 40307, - "concepts": 16763, - "conceptu": 42745, - "conceptual": 34070, - "concer": 2228, - "concern": 12928, - "concerned": 12020, - "concerning": 21772, - "concerns": 11134, - "concert": 32180, - "concert": 3066, - "concerto": 24710, - "concerts": 14418, - "concession": 38117, - "concessions": 43981, - "concier": 28859, - "concierge": 39850, - "conclave": 38098, - "conclu": 9627, - "conclude": 37525, - "concluded": 27825, - "concludes": 30634, - "conclusion": 20932, - "conclusions": 39507, - "conco": 43034, - "concor": 19913, - "concord": 26448, - "concordia": 35492, - "concours": 36282, - "concourse": 37793, - "concre": 43658, - "concrete": 9637, - "concussion": 28321, - "condem": 13287, - "condemn": 27212, - "condemned": 35145, - "condemns": 32092, - "conden": 24816, - "conditi": 11170, - "condition": 36978, - "condition": 7336, - "conditional": 24671, - "conditioned": 37014, - "conditioner": 31239, - "conditioning": 18181, - "conditions": 5892, - "condo": 19952, - "condol": 18661, - "condolences": 20836, - "condom": 39021, - "condomin": 42589, - "condoms": 37878, - "condor": 47643, - "condos": 42342, - "condu": 40772, - "conduc": 5379, - "conduct": 11647, - "conducted": 13080, - "conducting": 16787, - "conductor": 22317, - "conducts": 32084, - "cone": 39279, - "cone": 10266, - "cones": 26718, - "coney": 41837, - "conf": 6477, - "confe": 1968, - "confeder": 17104, - "confederate": 24864, - "confederation": 43484, - "conferen": 37961, - "conference": 2230, - "conferences": 22811, - "conferencing": 47320, - "confess": 38860, - "confession": 22572, - "confessions": 29404, - "confetti": 37923, - "confi": 5005, - "confidence": 8510, - "confident": 12365, - "confidential": 28712, - "configu": 46746, - "configur": 26950, - "configuration": 33378, - "confin": 45316, - "confined": 40973, - "confir": 3930, - "confirm": 12130, - "confirmation": 19645, - "confirmed": 6346, - "confirming": 38433, - "confirms": 11803, - "confis": 36285, - "confit": 42241, - "confl": 8173, - "conflic": 19029, - "conflict": 10397, - "conflicting": 43894, - "conflicts": 28713, - "confor": 40933, - "confron": 20033, - "confront": 38382, - "confrontation": 41478, - "confu": 6890, - "confuse": 37503, - "confused": 10946, - "confusing": 24683, - "confusion": 20493, - "cong": 24407, - "conge": 20013, - "congestion": 24432, - "congo": 20334, - "congr": 1227, - "congrats": 1887, - "congratul": 1750, - "congratulate": 16633, - "congratulated": 42004, - "congratulates": 24580, - "congratulating": 30967, - "congratulation": 24751, - "congratulations": 1864, - "congre": 7947, - "congreg": 40727, - "congregation": 32618, - "congress": 12452, - "congress": 4599, - "congressional": 15239, - "congressman": 17145, - "congresswoman": 37317, - "coni": 39031, - "coni": 36651, - "conj": 41543, - "conju": 33821, - "conjunction": 34226, - "conley": 44536, - "conline": 37593, - "conn": 41836, - "conn": 20329, - "conne": 8437, - "connec": 29933, - "connect": 19969, - "connected": 27506, - "connecting": 41429, - "connection": 26840, - "connections": 37161, - "connie": 25739, - "connoisse": 46012, - "connol": 27739, - "connolly": 29537, - "connor": 21984, - "connor": 10218, - "conom": 2664, - "conomy": 22529, - "conor": 29955, - "conor": 19478, - "conqu": 13382, - "conquer": 38585, - "conquer": 19821, - "conquered": 27099, - "conquering": 43778, - "conquest": 35367, - "conrad": 22073, - "cons": 10311, - "consci": 9427, - "conscience": 27310, - "conscious": 14914, - "consciously": 46755, - "consciousness": 17894, - "conse": 34887, - "consecu": 12084, - "consecutive": 12413, - "consen": 23110, - "consensus": 25071, - "consent": 21922, - "consequ": 13003, - "consequence": 42262, - "consequences": 15682, - "conserv": 4649, - "conservancy": 46729, - "conservation": 37616, - "conservation": 8322, - "conservative": 11421, - "conservatives": 17631, - "conservatory": 32140, - "conserve": 34231, - "consi": 2899, - "consider": 12471, - "consider": 6734, - "considerable": 38256, - "considerably": 38510, - "consideration": 24310, - "considerations": 33700, - "considered": 9487, - "considering": 10761, - "considers": 24691, - "consist": 10410, - "consist": 33735, - "consisted": 49354, - "consistency": 25683, - "consistent": 16439, - "consistently": 23799, - "consisting": 39241, - "consists": 23458, - "consol": 27869, - "consolation": 38888, - "console": 13403, - "consoles": 33136, - "consoli": 21586, - "consolidation": 41111, - "consor": 27108, - "consortium": 29988, - "conspir": 12680, - "conspiracy": 15236, - "const": 3826, - "constable": 29179, - "constan": 38718, - "constance": 40682, - "constant": 32000, - "constant": 13111, - "constantine": 30640, - "constantly": 14336, - "constell": 21913, - "constellation": 25991, - "constitu": 6299, - "constituency": 22464, - "constituents": 32075, - "constitution": 12157, - "constitutional": 16091, - "constra": 28973, - "constraints": 41910, - "constru": 3983, - "construc": 13321, - "construct": 24467, - "constructed": 16876, - "constructing": 33653, - "construction": 48873, - "construction": 4585, - "constructive": 31810, - "consu": 4689, - "consul": 5295, - "consul": 33630, - "consulate": 34341, - "consult": 9438, - "consult": 26727, - "consultancy": 31735, - "consultant": 14196, - "consultants": 27203, - "consultation": 15777, - "consultations": 43424, - "consulting": 15883, - "consume": 28919, - "consumed": 29653, - "consumer": 34408, - "consumer": 10422, - "consumers": 14014, - "consuming": 30607, - "consumption": 14904, - "cont": 2036, - "cont": 21425, - "contact": 39367, - "contact": 3523, - "contacted": 37331, - "contacts": 22789, - "contag": 29259, - "contagious": 33984, - "contain": 9948, - "contain": 15187, - "contained": 23836, - "container": 14913, - "containers": 20448, - "containing": 20281, - "contains": 12844, - "contamin": 24662, - "contaminated": 35773, - "contamination": 31770, - "conte": 15402, - "conte": 26882, - "contempl": 21924, - "contemplating": 33854, - "contempor": 14538, - "contemporary": 16607, - "contemporary": 8859, - "contemporaryart": 20212, - "contempt": 39293, - "conten": 42201, - "contender": 23573, - "contenders": 29711, - "content": 15526, - "content": 4750, - "contentmarketing": 20429, - "contents": 14850, - "contest": 23103, - "contest": 4576, - "contestalert": 27313, - "contestant": 25682, - "contestants": 28062, - "contested": 37845, - "contests": 32210, - "contex": 42015, - "context": 13089, - "conti": 46431, - "conti": 40842, - "contin": 1918, - "continent": 19623, - "continental": 14089, - "continents": 38642, - "conting": 27104, - "contingent": 36467, - "continu": 4688, - "continually": 34086, - "continuation": 38964, - "continue": 3942, - "continued": 10150, - "continues": 4305, - "continuing": 11009, - "continuity": 34035, - "continuous": 17033, - "continuously": 29634, - "continuum": 44978, - "contour": 34733, - "contr": 22871, - "contra": 9880, - "contra": 38620, - "contrac": 7581, - "contracep": 35109, - "contract": 6120, - "contracting": 39091, - "contractor": 21429, - "contractors": 22427, - "contracts": 16563, - "contradic": 27957, - "contrary": 32805, - "contrast": 18501, - "contrasting": 40758, - "contribu": 4753, - "contribute": 14112, - "contributed": 19397, - "contributes": 34203, - "contributing": 21762, - "contribution": 11116, - "contributions": 14465, - "contributor": 24553, - "contributors": 32908, - "contro": 2372, - "control": 9963, - "control": 3366, - "controlled": 14140, - "controller": 12929, - "controllers": 30374, - "controlling": 26427, - "controls": 15746, - "controversi": 13674, - "controversial": 14617, - "controversy": 18659, - "conv": 48382, - "conve": 18421, - "conven": 7283, - "conveni": 33278, - "convenience": 17859, - "convenient": 18978, - "conveniently": 40844, - "convention": 6752, - "conventional": 20835, - "conventions": 41404, - "conver": 6336, - "convergence": 35381, - "convers": 4577, - "conversation": 5690, - "conversations": 12326, - "converse": 24149, - "conversion": 15111, - "conversions": 44137, - "convert": 20074, - "converted": 20808, - "converter": 34611, - "convertible": 19608, - "converting": 34674, - "converts": 42470, - "convey": 38342, - "convic": 11150, - "convicted": 18668, - "conviction": 24967, - "convictions": 44366, - "convin": 12889, - "convince": 20351, - "convinced": 17388, - "convincing": 27742, - "convo": 19372, - "convocation": 30674, - "convos": 44842, - "convoy": 30292, - "conway": 21410, - "conwy": 48971, - "cony": 14501, - "coo": 1664, - "coo": 21691, - "coogs": 47624, - "cook": 9726, - "cook": 5977, - "cookbook": 21086, - "cooke": 29979, - "cooked": 11452, - "cooker": 23806, - "cookery": 38779, - "cookie": 9367, - "cookies": 8320, - "cookin": 46610, - "cooking": 39248, - "cooking": 6283, - "cookout": 39743, - "cooks": 24256, - "cool": 5594, - "cool": 2077, - "cooled": 37170, - "cooler": 11078, - "coolest": 10566, - "cooling": 15291, - "coom": 41726, - "coon": 34260, - "coon": 16958, - "coop": 39917, - "coop": 18910, - "cooper": 7264, - "cooper": 8133, - "cooperate": 42936, - "cooperation": 11785, - "cooperative": 24517, - "coops": 48531, - "coordin": 8187, - "coordinate": 38250, - "coordinated": 32540, - "coordinating": 40075, - "coordination": 25611, - "coordinator": 13967, - "coors": 36025, - "cop": 3196, - "cop": 7070, - "copa": 22749, - "copd": 45876, - "cope": 47635, - "cope": 12564, - "copeland": 37604, - "copen": 15637, - "copenhagen": 17390, - "coper": 41891, - "copernic": 45519, - "copied": 36770, - "copies": 9851, - "coping": 30545, - "copolitics": 45846, - "copp": 20937, - "copped": 42229, - "copper": 24741, - "copper": 10333, - "coppola": 47427, - "cops": 10719, - "copter": 28049, - "copy": 11376, - "copy": 4509, - "copying": 38925, - "copyright": 15778, - "cor": 851, - "cor": 18559, - "cora": 34953, - "coral": 31220, - "coral": 12054, - "corbett": 35699, - "corbin": 35578, - "corbyn": 14026, - "cord": 40893, - "cord": 11181, - "corden": 41999, - "cordi": 41681, - "cordless": 44412, - "cords": 22164, - "core": 19622, - "core": 5000, - "cores": 37874, - "corey": 31279, - "corey": 15288, - "corgi": 31320, - "cori": 26508, - "coriander": 37491, - "corin": 17716, - "corinthians": 34471, - "cork": 18148, - "cork": 10376, - "corn": 5202, - "corn": 5894, - "cornelius": 45865, - "cornell": 38689, - "cornell": 20859, - "corner": 18509, - "corner": 5253, - "corners": 19584, - "cornerstone": 36280, - "cornish": 23774, - "cornwall": 37903, - "cornwall": 10777, - "coron": 13210, - "corona": 25564, - "coronado": 43946, - "coronary": 45955, - "coronation": 25014, - "coroner": 47241, - "corp": 29203, - "corp": 10918, - "corpor": 4258, - "corporal": 42445, - "corporate": 33877, - "corporate": 6838, - "corporation": 11282, - "corporations": 25482, - "corps": 11330, - "corpse": 29408, - "corpus": 31672, - "correc": 5011, - "correct": 8340, - "corrected": 35628, - "correction": 20843, - "correctional": 38030, - "corrections": 37507, - "correctly": 15359, - "correlation": 29218, - "correspon": 20203, - "correspondent": 29996, - "corri": 12974, - "corridor": 20592, - "corrie": 23961, - "corro": 24936, - "corro": 42033, - "corrosion": 39191, - "corru": 6501, - "corrup": 30429, - "corrupt": 15194, - "corruption": 9141, - "corsa": 47670, - "corsair": 42367, - "corset": 40408, - "cortex": 40109, - "cortez": 30461, - "corvette": 24367, - "cory": 23221, - "cory": 18329, - "cos": 5865, - "cos": 5700, - "cosby": 30324, - "cosc": 45944, - "coscino": 47909, - "cose": 26495, - "cosm": 37486, - "cosme": 9628, - "cosmetic": 23918, - "cosmetics": 12896, - "cosmic": 47398, - "cosmic": 18304, - "cosmo": 12829, - "cosmo": 32072, - "cosmopolitan": 35518, - "cosmos": 22151, - "cospla": 15149, - "cosplay": 42401, - "cosplay": 6435, - "cosplayer": 30215, - "cosplaying": 46701, - "cost": 11360, - "cost": 4713, - "costa": 10480, - "costar": 28659, - "costarica": 31272, - "costco": 31045, - "costello": 30667, - "costing": 39193, - "costly": 30170, - "costs": 7628, - "costu": 5786, - "costume": 7235, - "costumes": 15150, - "cosy": 22848, - "cot": 4718, - "cot": 5871, - "cote": 44234, - "cote": 20751, - "cotland": 32576, - "cotsw": 23303, - "cotswolds": 35546, - "cott": 8211, - "cott": 11349, - "cottage": 12155, - "cottages": 34405, - "cotton": 22218, - "cotton": 7050, - "cou": 1368, - "couch": 12724, - "cougar": 35028, - "cougar": 27042, - "cougars": 20425, - "cough": 35631, - "cough": 18498, - "cougs": 28482, - "coul": 22483, - "could": 44812, - "could": 1510, - "couldn": 4072, - "couldnt": 29042, - "coulter": 42291, - "coun": 939, - "counc": 12927, - "council": 18187, - "council": 3620, - "councill": 15732, - "councillor": 21179, - "councillors": 29695, - "councilman": 40833, - "councils": 29938, - "counsel": 13780, - "counsel": 19814, - "counseling": 25000, - "counsell": 47510, - "counselling": 40581, - "counselor": 26148, - "counselors": 38688, - "count": 6073, - "count": 5887, - "countdown": 39559, - "countdown": 7500, - "counted": 23149, - "counter": 10134, - "counter": 7352, - "counterfe": 33067, - "counterfeit": 44242, - "counterpart": 39216, - "counterparts": 42106, - "counters": 46170, - "countess": 46276, - "counties": 12338, - "counting": 9723, - "countless": 21819, - "countries": 5489, - "country": 7896, - "country": 2157, - "countryfile": 47023, - "countrymusic": 30372, - "countryside": 16303, - "counts": 12264, - "county": 18734, - "county": 2116, - "coup": 9871, - "coup": 16479, - "coupe": 16773, - "couple": 40136, - "couple": 3377, - "coupled": 37153, - "couples": 14752, - "coupling": 45595, - "coupon": 14019, - "coupons": 23945, - "cour": 1391, - "coura": 4436, - "courage": 9828, - "courageous": 25005, - "courier": 27217, - "cours": 21493, - "course": 43225, - "course": 2613, - "courses": 9464, - "court": 16837, - "court": 2908, - "courte": 5088, - "courtesy": 5228, - "courthouse": 22205, - "courtney": 33601, - "courtney": 15990, - "courtroom": 41071, - "courts": 13514, - "courty": 20121, - "courtyard": 21900, - "cous": 48397, - "cousin": 7780, - "cousins": 14073, - "cout": 29118, - "coutinho": 35530, - "couture": 14808, - "cov": 19384, - "cov": 48385, - "cove": 21700, - "cove": 14708, - "coven": 12483, - "covenant": 29647, - "coventry": 18007, - "cover": 13534, - "cover": 2202, - "coverage": 6810, - "covered": 5603, - "covering": 9462, - "covers": 7745, - "covert": 40134, - "coveted": 36119, - "covington": 43196, - "cow": 5076, - "cow": 9706, - "cowan": 42699, - "coward": 33729, - "cowards": 48972, - "cowboy": 25833, - "cowboy": 13657, - "cowboys": 11864, - "cowboysnation": 43082, - "cowell": 39015, - "cowgirl": 47090, - "coworker": 30727, - "coworkers": 30821, - "coworking": 36034, - "cows": 15204, - "cowx": 23831, - "cox": 25784, - "cox": 11597, - "coy": 12765, - "coy": 15742, - "coyi": 48407, - "coyle": 45348, - "coyne": 44729, - "coyo": 16614, - "coyote": 26586, - "coyotes": 30423, - "coys": 19736, - "coz": 39922, - "coz": 14282, - "cozy": 14873, - "cp": 7905, - "cp": 9130, - "cpa": 30095, - "cpac": 45731, - "cpc": 26125, - "cpd": 23402, - "cpec": 48007, - "cpfc": 27553, - "cpi": 41795, - "cpl": 26852, - "cpr": 25134, - "cps": 27078, - "cpt": 32892, - "cpu": 27700, - "cq": 48910, - "cq": 48417, - "cr": 1075, - "cr": 3483, - "cra": 1184, - "cra": 18362, - "crab": 27382, - "crab": 11574, - "crabs": 30908, - "crack": 11222, - "crack": 10334, - "crackdown": 29527, - "cracked": 19826, - "cracker": 16298, - "crackers": 26200, - "cracking": 13008, - "cracks": 21426, - "cracy": 24749, - "cradle": 29384, - "crae": 40438, - "craf": 10873, - "craft": 7717, - "craft": 3588, - "craftbeer": 12371, - "crafted": 12424, - "crafthour": 42324, - "crafting": 26886, - "crafts": 33276, - "crafts": 13383, - "craftsman": 39528, - "craftsmanship": 36682, - "crafty": 32317, - "craic": 46962, - "craig": 14042, - "craig": 8061, - "craigslist": 43865, - "cram": 29809, - "cramer": 44592, - "cramps": 46106, - "cran": 7761, - "cranberries": 49361, - "cranberry": 23824, - "crane": 14626, - "cranes": 26979, - "crani": 45674, - "crank": 46246, - "crank": 32283, - "cranston": 44340, - "crap": 11899, - "crappy": 30475, - "crash": 37150, - "crash": 5033, - "crashed": 16638, - "crashes": 17013, - "crashing": 24991, - "crat": 46696, - "crate": 24756, - "crater": 22663, - "crates": 30172, - "cratic": 32175, - "crative": 39999, - "crats": 43056, - "crave": 33397, - "craven": 33625, - "craving": 18344, - "cravings": 34476, - "craw": 7400, - "crawfish": 42772, - "crawford": 15918, - "crawl": 20106, - "crawler": 41012, - "crawley": 42316, - "crawling": 37066, - "cray": 24184, - "cray": 27032, - "crayon": 41801, - "crayons": 43508, - "craz": 25776, - "craze": 30637, - "craziest": 32690, - "craziness": 46436, - "crazy": 17540, - "crazy": 3578, - "crc": 25618, - "cre": 798, - "cre": 17762, - "cream": 23184, - "cream": 3867, - "creams": 41447, - "creamy": 17206, - "crease": 48441, - "create": 30949, - "create": 3380, - "created": 4080, - "creates": 10361, - "creati": 6714, - "creating": 5524, - "creation": 38293, - "creation": 6900, - "creations": 17411, - "creative": 15237, - "creative": 4450, - "creatives": 29352, - "creativity": 9636, - "creator": 10173, - "creators": 17981, - "creature": 14317, - "creatures": 13938, - "cred": 7314, - "cred": 22377, - "credenti": 29487, - "credentials": 33422, - "credi": 21097, - "credibility": 34984, - "credible": 32983, - "credit": 21467, - "credit": 3900, - "credited": 32480, - "credits": 10654, - "creds": 43462, - "cree": 33961, - "cree": 36014, - "creed": 18845, - "creek": 26120, - "creek": 5526, - "creep": 8153, - "creep": 26084, - "creeper": 38662, - "creeping": 29697, - "creeps": 45135, - "creepy": 11943, - "creighton": 42823, - "creme": 22681, - "creole": 45632, - "crepe": 38611, - "crescent": 18211, - "cress": 39124, - "crest": 35985, - "crest": 15760, - "crested": 36656, - "crete": 8584, - "crew": 21560, - "crew": 3462, - "crewe": 43284, - "crews": 10463, - "cri": 1621, - "cri": 38962, - "crib": 23271, - "cric": 4328, - "cricke": 19098, - "cricket": 21859, - "cricket": 5373, - "cricketer": 28439, - "cricketers": 43986, - "cried": 15290, - "cries": 19769, - "crime": 13872, - "crime": 4896, - "crimea": 28614, - "crimes": 11827, - "crimin": 5874, - "criminal": 30197, - "criminal": 8255, - "criminals": 18783, - "crimson": 19437, - "cringe": 42588, - "cripp": 33588, - "cris": 37818, - "crises": 36403, - "crisis": 5712, - "crisp": 15145, - "crispr": 39784, - "crisps": 35744, - "crispy": 16458, - "criss": 29708, - "cristi": 12699, - "cristian": 48808, - "cristiano": 14807, - "cristina": 33395, - "cristo": 38315, - "crit": 3613, - "crit": 48130, - "criteri": 33627, - "criteria": 24849, - "criterion": 43841, - "criti": 25333, - "critic": 12417, - "critic": 19361, - "critical": 15314, - "critical": 6808, - "critically": 21570, - "criticalrole": 33606, - "criticalrole": 22742, - "criticalrolefanart": 43663, - "critici": 20333, - "criticism": 17405, - "criticize": 46081, - "criticized": 41557, - "critics": 16946, - "critique": 32982, - "critters": 35423, - "crm": 22610, - "cro": 1192, - "cro": 22522, - "croati": 28072, - "croatia": 13323, - "croatian": 34795, - "croc": 43350, - "croche": 35352, - "crochet": 17554, - "crock": 41685, - "crocker": 47843, - "crockett": 48313, - "crocod": 24519, - "crocodile": 24757, - "crocs": 38988, - "croft": 16657, - "croissant": 46011, - "croix": 44735, - "crom": 25082, - "crombie": 46162, - "cromwell": 45345, - "cron": 17361, - "croo": 16443, - "crook": 43744, - "crooked": 48473, - "crooked": 25644, - "crooks": 44226, - "crop": 40751, - "crop": 9955, - "cropped": 31139, - "crops": 16290, - "crore": 18274, - "crores": 37281, - "cros": 16670, - "crosby": 21095, - "cross": 5266, - "cross": 3417, - "crossed": 11731, - "crosses": 20473, - "crossfit": 47214, - "crossfit": 20395, - "crossing": 8673, - "crossings": 43517, - "crossover": 17194, - "crossroads": 27427, - "crossword": 32945, - "crou": 31206, - "crouch": 36506, - "crow": 3138, - "crow": 16019, - "crowd": 12036, - "crowd": 4570, - "crowded": 20182, - "crowdfunding": 17971, - "crowds": 16092, - "crowe": 33560, - "crowley": 32287, - "crown": 22190, - "crown": 6902, - "crowned": 16109, - "crowns": 33229, - "crows": 27134, - "croy": 21676, - "croydon": 27116, - "crs": 28449, - "crt": 43877, - "cru": 1815, - "cru": 29788, - "cruci": 18499, - "crucial": 12396, - "crude": 20677, - "cruel": 16073, - "cruel": 17573, - "cruelty": 20675, - "cruis": 27721, - "cruise": 36425, - "cruise": 6764, - "cruiser": 21394, - "cruises": 19214, - "cruising": 19743, - "crum": 43268, - "crumb": 48327, - "crumb": 39909, - "crumble": 36595, - "crumbs": 35893, - "crun": 17407, - "crunch": 16620, - "crunchy": 31366, - "crusad": 19133, - "crusade": 36846, - "crusader": 40171, - "crusaders": 31319, - "crush": 22296, - "crush": 7610, - "crushed": 18270, - "crusher": 44923, - "crushes": 35844, - "crushing": 20790, - "crust": 23136, - "crusted": 37314, - "cruz": 33689, - "cruz": 8403, - "cry": 2837, - "cry": 6290, - "crying": 6828, - "cryo": 32215, - "cryp": 4865, - "crypt": 37814, - "cryptic": 46925, - "crypto": 8080, - "crypto": 9608, - "cryptocurrencies": 33329, - "cryptocurrency": 12070, - "cryst": 15891, - "crystal": 17387, - "crystal": 6517, - "crystalli": 47551, - "crystals": 18350, - "cs": 11978, - "cs": 2804, - "csa": 26355, - "csc": 41727, - "csc": 37266, - "csd": 36913, - "cse": 41659, - "csg": 47085, - "csgo": 28928, - "csi": 41750, - "csi": 28070, - "csk": 43036, - "csm": 40061, - "csn": 46329, - "cso": 43864, - "csp": 39243, - "csr": 32105, - "csr": 24598, - "csrracing": 44193, - "css": 41418, - "css": 19846, - "cst": 17016, - "csu": 35948, - "csu": 31261, - "csw": 41031, - "ct": 3381, - "ct": 1122, - "cta": 28397, - "ctar": 27842, - "ctc": 34123, - "cte": 31410, - "cted": 2910, - "ctf": 35250, - "cthulhu": 41064, - "cting": 7985, - "ction": 17578, - "ction": 1569, - "ctions": 7021, - "ctive": 9313, - "cto": 17445, - "ctor": 8108, - "ctr": 35602, - "ctr": 18481, - "cts": 6936, - "ctto": 25118, - "ctu": 20834, - "cture": 17668, - "ctv": 21213, - "ctv": 27590, - "cu": 729, - "cu": 11224, - "cuando": 40388, - "cub": 16938, - "cub": 19972, - "cuba": 11576, - "cuban": 15536, - "cube": 47753, - "cube": 11353, - "cubes": 31413, - "cubic": 48159, - "cubic": 29614, - "cubs": 9858, - "cuck": 26364, - "cuckoo": 38062, - "cucu": 16705, - "cucumber": 19787, - "cucumbers": 48065, - "cud": 42684, - "cudd": 12820, - "cuddle": 19568, - "cuddles": 24001, - "cuddling": 29696, - "cuddly": 36208, - "cudi": 48713, - "cue": 13424, - "cuer": 39506, - "cues": 35719, - "cuff": 34693, - "cuff": 22414, - "cufflinks": 43938, - "cuffs": 37221, - "cuis": 9938, - "cuisine": 10605, - "cuk": 34838, - "cul": 1877, - "cula": 35935, - "cular": 10940, - "culars": 45719, - "cule": 31066, - "cules": 18984, - "culin": 14772, - "culinary": 16466, - "cull": 21880, - "cull": 42061, - "cullen": 25973, - "culmin": 33778, - "culo": 36305, - "culprit": 41593, - "cult": 11965, - "cultiv": 16781, - "cultivate": 42983, - "cultivated": 48901, - "cultivation": 41539, - "cultur": 20780, - "cultural": 34908, - "cultural": 6753, - "culturally": 36783, - "culture": 20197, - "culture": 3673, - "cultured": 40176, - "cultures": 19552, - "culver": 42103, - "cum": 20142, - "cum": 27119, - "cumb": 10858, - "cumber": 15309, - "cumberbatch": 27541, - "cumberland": 28747, - "cumbri": 32010, - "cumbria": 17953, - "cumin": 42285, - "cumple": 47050, - "cumul": 42961, - "cumulative": 47610, - "cumulus": 46313, - "cun": 12423, - "cun": 29532, - "cunningham": 25321, - "cuomo": 25681, - "cup": 5059, - "cup": 1937, - "cupboard": 32074, - "cupcake": 17025, - "cupcakes": 12747, - "cupid": 34885, - "cuppa": 28077, - "cups": 11463, - "cur": 1092, - "cur": 33073, - "curated": 20341, - "curator": 20753, - "curb": 21931, - "curd": 38881, - "cure": 36758, - "cure": 9088, - "cured": 26248, - "cures": 38204, - "curfew": 48826, - "curi": 12640, - "curing": 44169, - "curiosity": 21583, - "curious": 9865, - "curl": 24306, - "curled": 43734, - "curling": 18543, - "curls": 24340, - "curly": 20795, - "curran": 40999, - "currant": 43501, - "curren": 6142, - "currencies": 23530, - "currency": 7853, - "current": 3653, - "currently": 3792, - "currents": 35450, - "curric": 16201, - "curriculum": 17947, - "currie": 39385, - "curry": 49285, - "curry": 8051, - "curse": 18479, - "cursed": 26408, - "cursor": 46546, - "curt": 38137, - "curtain": 17223, - "curtains": 30223, - "curti": 39925, - "curtis": 13808, - "curve": 15792, - "curved": 25789, - "curves": 22814, - "curvy": 45788, - "cus": 2736, - "cusa": 47414, - "cuse": 37950, - "cush": 43731, - "cushi": 15333, - "cushion": 20853, - "cushions": 34163, - "cussion": 16658, - "cussions": 46853, - "cust": 20900, - "custard": 26516, - "custo": 4376, - "custody": 16176, - "custom": 2662, - "custom": 4996, - "custome": 41323, - "customer": 24035, - "customer": 5102, - "customerexperience": 45167, - "customers": 5528, - "customerservice": 40611, - "customiz": 41793, - "customizable": 48253, - "customization": 48244, - "customize": 32179, - "customized": 23229, - "customs": 16880, - "cut": 10511, - "cut": 3032, - "cute": 16031, - "cute": 2242, - "cuteness": 19342, - "cuter": 27151, - "cutest": 8032, - "cuth": 44328, - "cutie": 10733, - "cuties": 40939, - "cuties": 23420, - "cutiesaturday": 41883, - "cutler": 40428, - "cutlery": 49073, - "cutout": 45016, - "cuts": 7435, - "cutt": 27338, - "cutt": 47647, - "cutter": 19719, - "cutters": 44783, - "cutting": 7266, - "cuz": 9215, - "cv": 13531, - "cv": 13947, - "cvs": 29603, - "cw": 10652, - "cw": 11065, - "cwc": 19179, - "cwgc": 48527, - "cws": 45186, - "cx": 44457, - "cx": 14283, - "cy": 1470, - "cy": 1678, - "cyber": 5830, - "cyber": 10210, - "cybercrime": 41772, - "cybermonday": 36578, - "cyberpunk": 36896, - "cybersecurity": 10581, - "cyborg": 36650, - "cycl": 9791, - "cycle": 19083, - "cycle": 5072, - "cycled": 31055, - "cycles": 14605, - "cycli": 12201, - "cycling": 26353, - "cycling": 6321, - "cyclist": 20686, - "cyclists": 20303, - "cyclo": 18122, - "cyclone": 48094, - "cyclone": 20917, - "cyclones": 34669, - "cylin": 18569, - "cylinder": 22092, - "cylinders": 48888, - "cymb": 36677, - "cymru": 24005, - "cyn": 14324, - "cynthi": 41994, - "cynthia": 23748, - "cyp": 14809, - "cypress": 25347, - "cypri": 36481, - "cyprus": 15263, - "cyril": 36028, - "cyrus": 14204, - "cystic": 46131, - "cyto": 31864, - "cz": 22898, - "cz": 22921, - "cze": 12152, - "czech": 43151, - "czech": 16141, - "cé": 36454, - "cé": 18317, - "d": 67, - "d": 323, - "da": 925, - "da": 1140, - "daa": 32642, - "daan": 44814, - "dab": 10413, - "dab": 22900, - "dac": 16222, - "dac": 27478, - "daca": 28477, - "dach": 34166, - "dachsh": 41641, - "dachshund": 42720, - "dad": 4346, - "dad": 2639, - "dada": 31325, - "daddy": 29466, - "daddy": 6546, - "dade": 23299, - "dades": 28289, - "dads": 12741, - "dae": 23358, - "dae": 15422, - "daener": 46934, - "daes": 47282, - "daesh": 35047, - "daf": 9972, - "daf": 36704, - "daffodils": 44769, - "daft": 36347, - "dag": 11434, - "dag": 25650, - "dagger": 34251, - "dah": 16976, - "dah": 11776, - "dahl": 45816, - "dahl": 22621, - "dahlia": 41768, - "dai": 13559, - "dai": 10632, - "dail": 14676, - "dailies": 21260, - "daily": 6689, - "daily": 2873, - "dailynews": 43466, - "dailys": 43160, - "dailysketch": 46738, - "daim": 40421, - "dain": 32222, - "dain": 28315, - "daipur": 47631, - "dair": 19998, - "dair": 42078, - "dairy": 25243, - "dairy": 10302, - "dairyfree": 49366, - "dais": 10502, - "daisi": 39947, - "daisies": 40654, - "daisy": 39310, - "daisy": 12865, - "dak": 6999, - "dak": 16095, - "dakar": 31137, - "dakota": 38522, - "dakota": 12358, - "dal": 2476, - "dal": 5601, - "dala": 42675, - "dalai": 41222, - "dalail": 35169, - "dalailama": 35849, - "dale": 11533, - "dale": 4677, - "dalejr": 38207, - "dales": 29031, - "daley": 28544, - "dalgo": 43614, - "dali": 36735, - "dali": 25703, - "dalit": 45432, - "dall": 43631, - "dalla": 16772, - "dallas": 27414, - "dallas": 5759, - "dallascowboys": 33016, - "dalmati": 44275, - "dalton": 21488, - "daly": 24873, - "dam": 1880, - "dam": 4926, - "damage": 6822, - "damaged": 13568, - "damages": 28842, - "damaging": 20610, - "damas": 23345, - "damascus": 25396, - "dame": 10069, - "dames": 44548, - "dami": 17783, - "damian": 43307, - "damian": 25375, - "damien": 25090, - "dammit": 31057, - "damn": 37409, - "damn": 4451, - "damned": 28428, - "damon": 48503, - "damon": 18244, - "damp": 26520, - "dams": 37680, - "dan": 2257, - "dan": 2284, - "dana": 44834, - "dana": 13777, - "danao": 38598, - "danc": 3945, - "dance": 10619, - "dance": 2724, - "danced": 32891, - "dancehall": 33300, - "dancer": 11400, - "dancers": 13153, - "dances": 24083, - "dancing": 33280, - "dancing": 6226, - "dand": 12593, - "dandelion": 38903, - "dandy": 31932, - "dane": 19330, - "danes": 47477, - "dang": 4283, - "dang": 14992, - "danger": 20083, - "danger": 11212, - "dangerous": 7350, - "dangerously": 35012, - "dangers": 23726, - "dangle": 39907, - "dani": 3001, - "dani": 17009, - "daniel": 7859, - "daniel": 4981, - "daniela": 44466, - "danielle": 30396, - "danielle": 15292, - "danielpadilla": 34702, - "daniels": 16146, - "danish": 15467, - "dank": 31849, - "dann": 11951, - "danny": 14950, - "danny": 7621, - "dano": 29703, - "dans": 16241, - "dant": 48097, - "dant": 28237, - "dante": 21911, - "danube": 44594, - "dany": 47816, - "dao": 36099, - "dap": 12149, - "dap": 38034, - "daph": 24591, - "daphne": 31687, - "dapl": 34478, - "dapp": 46857, - "dapper": 26071, - "daq": 25381, - "dar": 1377, - "dar": 6242, - "dara": 17064, - "darby": 34366, - "darcy": 32916, - "dare": 14833, - "dare": 9863, - "daredevil": 28849, - "dares": 42973, - "dareto": 46794, - "dari": 16292, - "dari": 14552, - "daria": 45622, - "daries": 18184, - "daring": 28166, - "dario": 33918, - "darius": 32606, - "darje": 49089, - "dark": 5724, - "dark": 3144, - "darker": 18737, - "darkest": 25898, - "darkness": 10521, - "darling": 13048, - "darlings": 39961, - "darlington": 34565, - "darn": 26059, - "darrell": 33522, - "darren": 20263, - "darren": 12275, - "darry": 29200, - "darryl": 35359, - "darshan": 34564, - "dart": 14001, - "dart": 19841, - "darth": 41304, - "darth": 23164, - "dartmoor": 31477, - "dartmouth": 29667, - "darts": 15246, - "darwin": 43013, - "darwin": 20926, - "daryl": 45607, - "daryl": 24532, - "das": 9940, - "das": 7359, - "dash": 13858, - "dash": 10206, - "dashboard": 27679, - "dashi": 12876, - "dashing": 33825, - "dat": 1717, - "dat": 9445, - "data": 14876, - "data": 2281, - "datab": 11941, - "database": 14678, - "databases": 48384, - "datac": 27329, - "datacenter": 40133, - "datasci": 14496, - "datascience": 15748, - "dataviz": 28138, - "date": 34300, - "date": 1524, - "dated": 13564, - "dates": 7228, - "dating": 8534, - "dation": 15311, - "datlantic": 34270, - "dato": 36075, - "dats": 48674, - "dau": 3162, - "dau": 33828, - "daugh": 42523, - "daughter": 3944, - "daughters": 13585, - "daun": 29470, - "dav": 3700, - "dav": 46488, - "davao": 31502, - "dave": 10089, - "dave": 5077, - "daven": 28350, - "davenport": 34624, - "davey": 33391, - "davi": 1732, - "david": 4640, - "david": 2259, - "davidbowie": 44448, - "davido": 35989, - "davids": 46695, - "davidson": 13166, - "davies": 13120, - "davin": 43187, - "davis": 24426, - "davis": 5536, - "davison": 43725, - "davos": 31887, - "davy": 41565, - "daw": 5971, - "daw": 24404, - "dawg": 18660, - "dawgs": 26431, - "dawn": 30590, - "dawn": 7689, - "dawson": 18611, - "dax": 29458, - "day": 1405, - "day": 575, - "daya": 38165, - "daybreak": 33862, - "daycare": 36363, - "daydream": 41587, - "dayin": 20332, - "daylight": 20809, - "dayo": 29856, - "dayo": 46605, - "dayof": 16272, - "dayofthe": 38043, - "days": 1161, - "daysof": 12379, - "daysofcode": 36537, - "daysto": 29886, - "daystogo": 42198, - "dayswild": 42052, - "daytime": 22830, - "dayton": 35729, - "dayton": 20262, - "daytona": 16335, - "dayweekend": 44526, - "dayz": 35949, - "daz": 15449, - "daz": 43844, - "daze": 33591, - "dazz": 17149, - "dazzle": 41164, - "dazzling": 28821, - "db": 19100, - "db": 8128, - "dbacks": 31175, - "dbs": 40558, - "dbz": 49226, - "dc": 5074, - "dc": 2743, - "dca": 49107, - "dcc": 33747, - "dccomics": 17610, - "dcfc": 35526, - "dci": 35336, - "dcs": 42878, - "dcu": 42647, - "dd": 1353, - "dd": 3766, - "dda": 35202, - "ddad": 39049, - "dday": 32689, - "dday": 26243, - "ddc": 48513, - "ddd": 24183, - "dddd": 35362, - "dden": 5013, - "dder": 9300, - "dders": 24827, - "ddi": 44450, - "ddin": 17175, - "dding": 48101, - "dding": 8974, - "ddings": 49106, - "ddington": 29238, - "ddle": 17633, - "ddle": 8357, - "ddled": 38392, - "ddles": 33901, - "ddleston": 25647, - "ddling": 30981, - "ddlovato": 28244, - "ddos": 46463, - "ddr": 26027, - "dds": 48334, - "ddu": 43836, - "ddy": 14981, - "ddy": 7876, - "de": 561, - "de": 654, - "dea": 18477, - "deacon": 29155, - "dead": 3906, - "dead": 2747, - "deadliest": 40811, - "deadline": 47209, - "deadline": 8458, - "deadlines": 44959, - "deadly": 10756, - "deadpool": 21471, - "deaf": 28229, - "deaf": 18358, - "deal": 7249, - "deal": 2696, - "dealer": 15218, - "dealers": 21697, - "dealership": 32096, - "dealing": 13138, - "deals": 4469, - "dealt": 30101, - "dean": 13807, - "dean": 5828, - "deandre": 43635, - "deans": 46852, - "dear": 15696, - "dear": 3817, - "dearest": 24880, - "dearly": 31880, - "deas": 34715, - "death": 7163, - "death": 2767, - "deaths": 12253, - "deau": 12399, - "deaux": 19883, - "deb": 2987, - "deb": 25687, - "debat": 32082, - "debate": 5196, - "debates": 19239, - "debating": 23472, - "debbie": 47186, - "debbie": 16735, - "debit": 32410, - "debor": 16738, - "deborah": 40997, - "deborah": 22150, - "debra": 33233, - "debris": 19208, - "debt": 8932, - "debts": 38770, - "debu": 9790, - "debun": 33123, - "debut": 42608, - "debut": 4085, - "debuted": 25215, - "debuting": 34817, - "debuts": 17044, - "dec": 3063, - "dec": 4628, - "deca": 33428, - "decad": 29914, - "decade": 11099, - "decadent": 41716, - "decades": 10488, - "decal": 26678, - "decals": 37606, - "decan": 40677, - "decat": 35334, - "decath": 47455, - "decatur": 38540, - "decay": 22703, - "dece": 3534, - "deceased": 30035, - "december": 3864, - "decent": 10698, - "decentr": 28960, - "decentralized": 38485, - "decep": 33529, - "deception": 33046, - "deci": 2262, - "decide": 8447, - "decided": 4939, - "decides": 17269, - "deciding": 22513, - "decision": 5575, - "decisions": 9903, - "decisive": 28690, - "deck": 24885, - "deck": 6943, - "decked": 39096, - "decker": 21449, - "decks": 23968, - "decl": 7091, - "decla": 10739, - "declan": 42341, - "declar": 18040, - "declaration": 19714, - "declare": 19856, - "declared": 13845, - "declares": 23641, - "declaring": 33273, - "decline": 15084, - "declined": 28911, - "declines": 40478, - "declining": 29221, - "deco": 26412, - "deco": 16422, - "decor": 5148, - "decor": 6928, - "decorate": 23651, - "decorated": 15917, - "decorating": 16968, - "decoration": 16029, - "decorations": 19158, - "decorative": 19289, - "decre": 12284, - "decrease": 24703, - "decreased": 33913, - "decreasing": 43763, - "decree": 43327, - "ded": 16744, - "ded": 1241, - "dedic": 4701, - "dedicate": 27610, - "dedicated": 6770, - "dedication": 10188, - "dedly": 36204, - "deduc": 22799, - "dee": 5268, - "dee": 6705, - "deed": 30260, - "deeds": 24516, - "deejay": 48304, - "deejay": 44511, - "deemed": 28102, - "deen": 26456, - "deen": 12912, - "deep": 5462, - "deep": 3383, - "deepak": 45528, - "deeper": 15224, - "deepest": 22245, - "deephouse": 35684, - "deepi": 19371, - "deepika": 34120, - "deepikap": 29903, - "deepikapadukone": 30646, - "deeplear": 22181, - "deeplearning": 24362, - "deeply": 11449, - "deer": 19454, - "deer": 8700, - "deere": 32901, - "dees": 12547, - "deets": 35537, - "def": 2044, - "def": 11649, - "defam": 35670, - "defamation": 42741, - "default": 21650, - "defe": 4148, - "defeat": 8477, - "defeated": 8927, - "defeating": 22594, - "defeats": 16317, - "defect": 44013, - "defects": 37485, - "defen": 3619, - "defence": 30307, - "defence": 9659, - "defend": 21970, - "defend": 11397, - "defended": 27161, - "defender": 10618, - "defenders": 20063, - "defending": 13098, - "defends": 20134, - "defense": 45875, - "defense": 6021, - "defenseman": 43714, - "defenses": 49198, - "defensive": 10824, - "defi": 17244, - "defiance": 36186, - "defiant": 47597, - "defibrill": 47684, - "defic": 18022, - "defici": 23387, - "deficiency": 30685, - "deficit": 20156, - "defin": 3188, - "define": 14919, - "defined": 15278, - "defines": 28218, - "defining": 20504, - "definite": 40793, - "definitely": 4824, - "definition": 11405, - "definitive": 25298, - "defl": 31467, - "deforestation": 41330, - "defstar": 36427, - "defy": 39148, - "defying": 38496, - "deg": 38498, - "degra": 28939, - "degradation": 44468, - "degre": 4653, - "degree": 7119, - "degrees": 8000, - "deh": 35582, - "dei": 33833, - "dei": 23279, - "deir": 42948, - "deity": 42574, - "deja": 46902, - "dek": 23901, - "dekalb": 37775, - "del": 1233, - "del": 2003, - "dela": 37986, - "delaney": 31528, - "delav": 23706, - "delavin": 40477, - "delavin": 40776, - "delavinkisses": 40631, - "delaware": 17547, - "delay": 12955, - "delay": 10934, - "delayed": 14567, - "delaying": 43781, - "delays": 11232, - "dele": 7922, - "dele": 33431, - "delec": 38615, - "delectable": 45500, - "deleg": 8046, - "delegate": 27259, - "delegates": 14623, - "delegation": 14632, - "delete": 19204, - "deleted": 16588, - "deleting": 41857, - "delft": 42749, - "delgado": 49182, - "delhi": 26723, - "delhi": 5717, - "deli": 1932, - "deli": 18601, - "delia": 33193, - "deliber": 18316, - "deliberate": 38271, - "deliberately": 35163, - "delic": 13366, - "delicacy": 49181, - "delicate": 18768, - "delici": 19993, - "delicious": 3959, - "deliciously": 39589, - "deliciousness": 42819, - "delight": 46165, - "delight": 13073, - "delighted": 5943, - "delightful": 15513, - "delights": 25330, - "deline": 18797, - "delines": 13562, - "delish": 25093, - "deliver": 19561, - "deliver": 7396, - "delivered": 7278, - "deliveries": 29336, - "delivering": 9943, - "delivers": 11753, - "delivery": 5619, - "dell": 24381, - "dell": 10242, - "della": 22986, - "delle": 35963, - "deloit": 29428, - "deloitte": 38667, - "dels": 48636, - "delta": 32250, - "delta": 8768, - "delu": 18779, - "delusional": 48059, - "delux": 13709, - "deluxe": 14056, - "delve": 46008, - "dely": 15040, - "dem": 3251, - "dem": 7825, - "dema": 40268, - "dema": 45046, - "deman": 48366, - "demand": 13072, - "demand": 5650, - "demanded": 33699, - "demanding": 17099, - "demands": 14241, - "demar": 46566, - "demarcus": 47873, - "demb": 35930, - "demdebate": 43973, - "deme": 25143, - "demean": 37376, - "demen": 12604, - "dementi": 46028, - "dementia": 14047, - "demetri": 39553, - "demi": 32879, - "demi": 14480, - "demise": 28756, - "demo": 2930, - "demo": 7380, - "democr": 3573, - "democracy": 7758, - "democrat": 15431, - "democratic": 9149, - "democrats": 8865, - "demographic": 31308, - "demol": 19382, - "demolished": 26537, - "demolition": 22237, - "demon": 5635, - "demon": 12085, - "demonetisation": 41338, - "demonic": 46920, - "demons": 18388, - "demonstr": 8579, - "demonstrate": 22231, - "demonstrated": 29477, - "demonstrates": 24806, - "demonstrating": 22107, - "demonstration": 16722, - "demonstrations": 33964, - "demonstrators": 46450, - "demos": 19304, - "demp": 22490, - "dempsey": 30188, - "dems": 10989, - "demsin": 42664, - "demsinphilly": 43091, - "den": 1177, - "den": 1181, - "dena": 32431, - "denali": 48076, - "dence": 3370, - "dency": 11659, - "dend": 37447, - "dends": 43985, - "dene": 45128, - "dened": 19571, - "deng": 43098, - "deng": 41788, - "dengue": 41932, - "denham": 39180, - "deni": 21995, - "denial": 25716, - "denied": 15780, - "denies": 19565, - "denim": 13606, - "denis": 47630, - "denis": 18750, - "denise": 45900, - "denise": 20899, - "denmark": 13268, - "dennis": 32738, - "dennis": 10534, - "denny": 26808, - "denomin": 41016, - "dens": 16533, - "dense": 19353, - "density": 22431, - "dent": 3593, - "dent": 1258, - "dental": 24635, - "dental": 8382, - "dentally": 10346, - "dented": 21923, - "denti": 4418, - "dential": 5459, - "dentist": 17816, - "dentistry": 25754, - "dently": 28817, - "denton": 23567, - "dents": 1517, - "denver": 27847, - "denver": 8569, - "deny": 18679, - "denying": 32771, - "denzel": 42503, - "deo": 26406, - "deo": 12121, - "deodor": 47639, - "deol": 41902, - "deon": 31466, - "deon": 16079, - "dep": 6079, - "dep": 24370, - "depar": 10794, - "depart": 5343, - "depart": 30649, - "departed": 32541, - "departing": 26902, - "department": 5744, - "departments": 29523, - "departs": 38998, - "departure": 17850, - "depe": 36118, - "depend": 13894, - "depend": 27371, - "dependence": 40243, - "dependent": 23280, - "depending": 23673, - "depends": 20497, - "depic": 11307, - "depicted": 34637, - "depicting": 24970, - "depiction": 31071, - "depicts": 29340, - "deple": 38504, - "deplo": 9356, - "deplor": 39232, - "deploy": 26944, - "deployed": 20009, - "deploying": 42212, - "deployment": 20183, - "depo": 14276, - "depor": 36110, - "deport": 23389, - "deportation": 36617, - "deported": 39320, - "deportes": 47878, - "depos": 21266, - "deposit": 16775, - "deposits": 30740, - "depot": 12589, - "depp": 24941, - "depre": 7107, - "depress": 38869, - "depressed": 23269, - "depressing": 29235, - "depression": 10023, - "depri": 28587, - "depriv": 45809, - "deprivation": 47810, - "deprived": 39140, - "dept": 9201, - "depth": 10350, - "depths": 28855, - "depu": 6912, - "deputies": 24914, - "deputy": 7932, - "der": 839, - "der": 801, - "dera": 20696, - "derail": 48502, - "derby": 13904, - "derby": 7177, - "derbyshire": 22147, - "derdale": 21513, - "dere": 5701, - "dere": 44194, - "dered": 3776, - "derek": 22461, - "derek": 11205, - "derel": 46728, - "derer": 11289, - "derers": 20882, - "deri": 34573, - "derick": 33908, - "dering": 6076, - "deriv": 33458, - "derived": 26461, - "derland": 35488, - "derman": 29740, - "dermatology": 48051, - "dern": 30086, - "dero": 37203, - "dero": 34026, - "derrick": 21798, - "derry": 45777, - "derry": 20535, - "ders": 37307, - "ders": 1923, - "derson": 12677, - "dery": 17172, - "des": 6797, - "des": 1437, - "desai": 35316, - "desc": 13866, - "descen": 32318, - "descend": 26004, - "descend": 46241, - "descendants": 36323, - "descending": 36620, - "descent": 19375, - "desch": 49209, - "descri": 4637, - "describe": 10967, - "described": 14671, - "describes": 13678, - "describing": 24239, - "descrip": 41832, - "description": 13951, - "descriptions": 40653, - "desde": 42218, - "dese": 27195, - "deser": 3659, - "desert": 45776, - "desert": 7301, - "deserted": 41560, - "deserve": 7043, - "deserved": 10061, - "deserves": 9079, - "deserving": 26615, - "desh": 25320, - "desh": 7448, - "deshi": 42769, - "desi": 6772, - "desi": 26635, - "desig": 1250, - "design": 8359, - "design": 1681, - "designated": 24119, - "designation": 41155, - "designed": 4486, - "designer": 35640, - "designer": 5728, - "designers": 12720, - "designing": 13467, - "designs": 6747, - "designthinking": 32450, - "desirable": 32368, - "desire": 11858, - "desired": 28631, - "desires": 27598, - "desk": 11937, - "desk": 6550, - "desks": 41014, - "desktop": 14345, - "desmond": 27821, - "desol": 41258, - "desp": 3642, - "despair": 28097, - "desper": 10144, - "desperate": 15072, - "desperately": 21993, - "despic": 32442, - "despicable": 37158, - "despite": 5325, - "dess": 7096, - "dess": 10001, - "dessert": 9753, - "desserts": 22948, - "desses": 43913, - "dest": 6540, - "dest": 4549, - "destin": 4934, - "destination": 32191, - "destination": 9179, - "destinations": 16981, - "destined": 28525, - "destiny": 39875, - "destiny": 10867, - "destro": 8287, - "destroy": 8308, - "destroy": 11930, - "destroyed": 9965, - "destroyer": 25291, - "destroying": 19613, - "destroys": 27634, - "destruc": 22945, - "destruction": 14281, - "destructive": 29591, - "det": 28966, - "det": 15366, - "deta": 1914, - "detached": 26252, - "detail": 7657, - "detailed": 12609, - "detailing": 23163, - "details": 2353, - "detained": 20260, - "dete": 5606, - "detec": 17991, - "detect": 22744, - "detected": 26988, - "detecting": 41290, - "detection": 16220, - "detective": 13672, - "detectives": 27994, - "detector": 27689, - "detectors": 45063, - "detention": 16908, - "deter": 10742, - "deter": 47458, - "detergent": 46726, - "deterior": 28512, - "determin": 8325, - "determination": 17410, - "determine": 16768, - "determined": 14371, - "determines": 42192, - "determining": 39884, - "deth": 38375, - "deto": 39710, - "deton": 39335, - "detour": 31211, - "detox": 22459, - "detri": 47951, - "detro": 6210, - "detroit": 19404, - "detroit": 7073, - "detta": 45438, - "dette": 35750, - "deu": 21457, - "deuce": 45332, - "deus": 37625, - "deut": 14970, - "deutsch": 30389, - "deutsche": 32760, - "deutschland": 36878, - "deux": 47089, - "dev": 2797, - "dev": 3670, - "deva": 45179, - "devan": 37072, - "devast": 12913, - "devastated": 29865, - "devastating": 19280, - "devastation": 42452, - "devel": 1820, - "develop": 1966, - "develop": 7708, - "developed": 8763, - "developer": 10929, - "developers": 13248, - "developing": 8131, - "development": 2855, - "developmental": 29347, - "developments": 17393, - "develops": 29895, - "deven": 45537, - "devgn": 29871, - "devi": 12926, - "devi": 20717, - "deviant": 25593, - "deviantart": 26046, - "device": 8163, - "devices": 9067, - "devil": 8894, - "devil": 8043, - "deville": 34329, - "devils": 11683, - "devin": 31193, - "devin": 20996, - "devine": 33019, - "devlin": 48040, - "devo": 11861, - "devo": 43444, - "devon": 16205, - "devon": 10046, - "devops": 21504, - "devos": 40646, - "devote": 37777, - "devoted": 24561, - "devotees": 39759, - "devotion": 25821, - "devotional": 35456, - "devs": 27374, - "dew": 31952, - "dew": 16358, - "dewey": 40399, - "dex": 10030, - "dex": 13790, - "dexpo": 42502, - "dexter": 45049, - "dexter": 22781, - "dey": 11829, - "dez": 23190, - "dez": 8122, - "df": 12908, - "df": 10468, - "dfc": 41903, - "dfs": 32880, - "dfw": 20439, - "dg": 2394, - "dg": 9742, - "dgate": 41684, - "dge": 4016, - "dge": 1360, - "dged": 11830, - "dgeon": 45655, - "dgers": 8733, - "dges": 5432, - "dging": 9565, - "dh": 6669, - "dh": 9960, - "dha": 11629, - "dha": 27377, - "dhabi": 22349, - "dhaka": 32877, - "dham": 29635, - "dham": 30838, - "dhan": 12542, - "dhan": 28569, - "dhanush": 26162, - "dhanush": 36200, - "dhanushkraja": 29266, - "dhar": 12397, - "dharma": 30536, - "dhary": 28706, - "dhawan": 44699, - "dhe": 29706, - "dheim": 44280, - "dhi": 31553, - "dhi": 26166, - "dho": 37834, - "dhoni": 25698, - "dhru": 40257, - "dhry": 39960, - "dhs": 26849, - "dhu": 32387, - "di": 570, - "di": 1618, - "dia": 7351, - "dia": 3357, - "diab": 15954, - "diabe": 19167, - "diabete": 43826, - "diabetes": 10319, - "diabetic": 30230, - "diablo": 23931, - "diag": 6851, - "diagno": 7736, - "diagnose": 44429, - "diagnosed": 16979, - "diagnosis": 15715, - "diagnostic": 26351, - "diagnostics": 37723, - "diagram": 22697, - "dial": 18416, - "dial": 11381, - "dialo": 30709, - "dialog": 48945, - "dialogue": 11288, - "dialogues": 40330, - "dialysis": 44798, - "diam": 4347, - "diameter": 27189, - "diamon": 8873, - "diamond": 18535, - "diamond": 6235, - "diamonds": 12687, - "dian": 16021, - "dian": 4998, - "diana": 12803, - "diane": 15855, - "dianne": 42299, - "dians": 21041, - "diaper": 34382, - "diapers": 39659, - "diar": 25932, - "diaries": 15541, - "diary": 10380, - "dias": 22137, - "dias": 29354, - "diaspora": 28390, - "diaz": 17688, - "dic": 1404, - "dic": 6717, - "dicap": 30023, - "dicaprio": 30755, - "dice": 14406, - "dick": 14413, - "dick": 9554, - "dickens": 33421, - "dict": 45360, - "dict": 15159, - "dictat": 26156, - "dictator": 27399, - "dictatorship": 37989, - "dictionary": 19699, - "did": 1861, - "did": 1335, - "diddy": 33527, - "didi": 34396, - "didier": 45614, - "didn": 2376, - "didnt": 13057, - "dido": 31725, - "didyou": 12295, - "didyouknow": 12506, - "die": 3150, - "die": 2082, - "diec": 27729, - "diecast": 37936, - "died": 3622, - "diego": 30940, - "diego": 6306, - "diem": 45571, - "dience": 33686, - "dient": 27231, - "dier": 29702, - "dier": 16394, - "dies": 20104, - "dies": 1862, - "diesel": 46312, - "diesel": 10591, - "diest": 45739, - "diet": 21295, - "diet": 6582, - "dietary": 29009, - "dietrich": 47005, - "diets": 35173, - "dif": 18656, - "dif": 48731, - "diff": 44073, - "diff": 20331, - "diffe": 1967, - "differ": 34620, - "differen": 14903, - "difference": 4731, - "differences": 14003, - "different": 2731, - "differenti": 21729, - "differential": 34027, - "differentiate": 49032, - "differently": 18325, - "diffic": 6140, - "difficult": 7405, - "difficulties": 23468, - "difficulty": 25245, - "diffu": 31603, - "diffuser": 49400, - "dig": 1831, - "dig": 9887, - "dige": 17820, - "digest": 20413, - "digestion": 40533, - "digestive": 32304, - "digg": 43240, - "digger": 35919, - "diggin": 48466, - "digging": 14971, - "digi": 15627, - "digi": 39361, - "digimon": 44181, - "digit": 14899, - "digit": 27472, - "digital": 4704, - "digital": 2794, - "digitalart": 16987, - "digitalhealth": 32190, - "digitalindia": 46630, - "digitally": 27543, - "digitalmarketing": 15299, - "digitaltransformation": 20047, - "digiti": 25935, - "digits": 31710, - "digni": 45532, - "dignit": 39497, - "dignity": 17744, - "digo": 35701, - "digs": 26877, - "dih": 43089, - "dii": 32755, - "dijk": 44444, - "dik": 38854, - "dik": 37747, - "dike": 42683, - "dil": 7643, - "dil": 17942, - "dile": 25428, - "dilemma": 29787, - "dilig": 30664, - "dill": 12318, - "dill": 27206, - "dillon": 21056, - "dilu": 45242, - "dim": 19576, - "dim": 17523, - "dime": 24443, - "dimen": 10935, - "dimension": 20479, - "dimensional": 25252, - "dimensions": 25086, - "diment": 43500, - "dimes": 44888, - "dimini": 37459, - "dimit": 22250, - "dimitri": 48840, - "dimp": 38853, - "din": 1462, - "din": 5673, - "dina": 36815, - "dinah": 30903, - "dine": 20951, - "dine": 12989, - "diner": 16963, - "dinesh": 48341, - "ding": 7545, - "ding": 796, - "dinger": 45580, - "dingh": 48064, - "dings": 5473, - "dington": 24804, - "dinho": 47370, - "dini": 20196, - "dining": 8658, - "dinner": 27548, - "dinner": 2571, - "dinners": 33570, - "dino": 9692, - "dino": 14077, - "dinosa": 18955, - "dinosaur": 15095, - "dinosaurs": 20387, - "dio": 3779, - "dio": 1521, - "dioce": 20763, - "diocese": 27091, - "dion": 42899, - "dion": 16250, - "dior": 23655, - "dios": 37563, - "dious": 27417, - "dioxide": 38102, - "dip": 19918, - "dip": 11343, - "dipl": 8490, - "diplo": 38115, - "diplom": 11169, - "diploma": 21251, - "diplomacy": 23798, - "diplomat": 32828, - "diplomatic": 23782, - "diplomats": 44126, - "dipped": 30610, - "dipper": 49317, - "dipping": 33544, - "dips": 37522, - "dir": 4251, - "dir": 8478, - "dire": 38355, - "dire": 25664, - "direc": 1534, - "direct": 43224, - "direct": 6016, - "directed": 8392, - "directing": 21817, - "direction": 15923, - "direction": 5407, - "directional": 38687, - "directioner": 48042, - "directioners": 22055, - "directions": 16440, - "directive": 40630, - "directly": 9701, - "director": 20337, - "director": 2681, - "directorial": 45327, - "directors": 11940, - "directory": 25272, - "directs": 34349, - "directv": 48652, - "dirk": 28171, - "dirt": 31415, - "dirt": 11795, - "dirty": 20127, - "dirty": 7615, - "dis": 1518, - "dis": 6112, - "disa": 3882, - "disab": 47380, - "disabilities": 17350, - "disability": 48986, - "disability": 13261, - "disabled": 13613, - "disadvantaged": 40577, - "disagree": 23199, - "disapp": 5384, - "disappear": 21148, - "disappear": 25173, - "disappearance": 35929, - "disappeared": 23139, - "disappearing": 35819, - "disappears": 44406, - "disappo": 7605, - "disappoint": 25446, - "disappointed": 13794, - "disappointing": 21941, - "disappointment": 23884, - "disappoints": 48545, - "disappro": 48276, - "disar": 42971, - "disaster": 9072, - "disasters": 26976, - "disastrous": 35790, - "disc": 1472, - "disc": 10712, - "discar": 40532, - "discarded": 45197, - "discer": 49140, - "dischar": 22671, - "discharge": 32485, - "disci": 9559, - "discip": 38951, - "discipl": 10467, - "disciples": 39366, - "disciplinary": 20232, - "discipline": 18903, - "disciplines": 42032, - "discla": 40248, - "disclaimer": 46465, - "disclo": 17481, - "disclose": 46379, - "disclosed": 30905, - "disclosure": 26502, - "disco": 2475, - "disco": 11964, - "discography": 47545, - "discomfort": 48054, - "discord": 23582, - "discoun": 18515, - "discount": 7638, - "discounted": 20993, - "discounts": 18186, - "discoura": 45850, - "discourse": 29441, - "discover": 10539, - "discover": 4834, - "discovered": 6986, - "discoveries": 29308, - "discovering": 17967, - "discovers": 29719, - "discovery": 40491, - "discovery": 8027, - "discre": 20616, - "discrimin": 11721, - "discrimination": 14775, - "discs": 29270, - "discu": 1984, - "discus": 41828, - "discuss": 4312, - "discussed": 11300, - "discusses": 8116, - "discussing": 5900, - "discussion": 5060, - "discussions": 13806, - "dise": 4262, - "disease": 5336, - "diseases": 12035, - "disen": 46468, - "disgrace": 29877, - "disgraceful": 44146, - "disgu": 9793, - "disguise": 27803, - "disguised": 37149, - "disgusted": 41977, - "disgusting": 16218, - "dish": 11039, - "dish": 4531, - "disha": 42498, - "dishes": 11412, - "dishon": 30777, - "dishu": 44728, - "dishwasher": 40524, - "disin": 19484, - "disinfe": 48050, - "disintegr": 49275, - "disk": 17970, - "dislike": 30796, - "dism": 30836, - "dism": 38821, - "dismant": 36557, - "dismiss": 43287, - "dismissal": 42068, - "dismissed": 30087, - "dismisses": 45238, - "disney": 6729, - "disney": 4696, - "disneyland": 39481, - "disneyland": 13661, - "disneyworld": 28469, - "diso": 26305, - "disobe": 42841, - "dison": 19310, - "disorder": 12635, - "disorders": 17114, - "disp": 11073, - "dispar": 24633, - "disparities": 45122, - "dispat": 28652, - "dispatch": 26306, - "dispen": 19077, - "dispenser": 40116, - "disper": 34499, - "displa": 9326, - "displac": 17718, - "displaced": 22817, - "displacement": 37931, - "display": 4456, - "displayed": 18967, - "displaying": 26468, - "displays": 15648, - "dispo": 13651, - "dispon": 38872, - "disponible": 46130, - "dispos": 45177, - "disposable": 37275, - "disposal": 28231, - "dispro": 32927, - "dispropor": 40354, - "disproportion": 45492, - "disregard": 43869, - "disrespect": 34055, - "disrespectful": 41723, - "disru": 13763, - "disrup": 14641, - "disrupt": 25214, - "disrupted": 46674, - "disrupting": 42419, - "disruption": 19635, - "disruptive": 31554, - "diss": 10766, - "diss": 35688, - "dissec": 43879, - "dissemin": 40463, - "dissent": 45154, - "disser": 25560, - "dissertation": 29448, - "dissi": 25088, - "dissol": 27398, - "dissuper": 33461, - "dist": 5479, - "dist": 12116, - "distance": 7964, - "distances": 37078, - "distant": 18949, - "distill": 41586, - "distilled": 49179, - "distillery": 22200, - "distin": 11892, - "distinct": 25056, - "distinction": 28183, - "distinctive": 25486, - "distingui": 15053, - "distinguish": 45418, - "distinguished": 16513, - "distor": 23781, - "distortion": 43690, - "distr": 11885, - "distract": 39309, - "distracted": 24049, - "distraction": 32039, - "distress": 26866, - "distressed": 37515, - "distri": 5987, - "distribu": 6138, - "distribute": 32313, - "distributed": 16419, - "distributing": 35216, - "distribution": 10484, - "distributor": 28354, - "distributors": 44240, - "distric": 3208, - "district": 46683, - "district": 3506, - "districts": 17565, - "distur": 11732, - "disturb": 33018, - "disturb": 39449, - "disturbance": 42416, - "disturbed": 29967, - "disturbing": 21476, - "disupdates": 45667, - "dit": 5752, - "dit": 2524, - "dita": 47965, - "ditch": 43715, - "ditch": 19291, - "dited": 40392, - "diti": 2363, - "dition": 16452, - "dition": 3015, - "ditional": 4322, - "ditions": 4503, - "dito": 43705, - "dits": 49374, - "dity": 16436, - "dium": 2903, - "div": 5293, - "div": 14869, - "diva": 13605, - "divas": 23534, - "dive": 26042, - "dive": 9058, - "diver": 13119, - "diver": 22094, - "divergence": 48735, - "divergent": 36132, - "divers": 30241, - "divers": 27038, - "diverse": 11464, - "diversi": 24475, - "diversion": 38457, - "diversity": 35634, - "diversity": 6257, - "diverted": 41049, - "dives": 13893, - "divi": 8375, - "divid": 31337, - "divide": 18842, - "divided": 18689, - "dividend": 32067, - "dividends": 45146, - "dividing": 45605, - "divin": 21838, - "divine": 46919, - "divine": 10976, - "diving": 9886, - "divinity": 39754, - "divisi": 39196, - "division": 5378, - "divisional": 40912, - "divisions": 33715, - "divor": 13543, - "divorce": 17060, - "divorced": 39437, - "divya": 47767, - "diwali": 18218, - "dix": 45838, - "dix": 27620, - "dixie": 24484, - "dixit": 28279, - "dixon": 16086, - "diy": 28472, - "diy": 7845, - "diya": 36459, - "diz": 32740, - "dized": 36232, - "dizz": 40239, - "dizzy": 35464, - "dj": 3761, - "dj": 3723, - "djan": 35338, - "django": 46498, - "dji": 35284, - "dji": 28379, - "djing": 36113, - "djo": 19432, - "djoker": 42721, - "djokernole": 42830, - "djokovic": 27944, - "djs": 18117, - "dk": 20702, - "dk": 16196, - "dl": 12558, - "dl": 9373, - "dlc": 19079, - "dle": 11057, - "dle": 3287, - "dled": 23494, - "dler": 40279, - "dles": 7890, - "dless": 14997, - "dley": 12808, - "dling": 18221, - "dly": 3069, - "dm": 19070, - "dm": 4667, - "dma": 42903, - "dman": 18826, - "dmc": 28991, - "dmit": 31607, - "dmitry": 48326, - "dms": 19955, - "dmv": 27508, - "dmx": 45255, - "dn": 11552, - "dn": 7459, - "dna": 8790, - "dnb": 35422, - "dnc": 20237, - "dnd": 11678, - "dnr": 37051, - "dns": 39245, - "dnt": 26795, - "do": 639, - "do": 818, - "doa": 48332, - "dob": 29640, - "doba": 35605, - "dobbs": 43006, - "dobson": 46888, - "doc": 3009, - "doc": 7251, - "doch": 25101, - "dock": 17311, - "dock": 8997, - "docked": 46784, - "docker": 31152, - "docking": 40845, - "docks": 24091, - "docs": 15157, - "doctor": 7872, - "doctor": 5547, - "doctoral": 23649, - "doctorate": 39134, - "doctors": 9705, - "doctorwho": 12996, - "doctr": 28497, - "doctrine": 35612, - "docu": 4433, - "document": 29293, - "document": 15121, - "documentaries": 44209, - "documentary": 7881, - "documentation": 31560, - "documented": 22310, - "documenting": 37876, - "documents": 14105, - "dod": 13847, - "dod": 30187, - "dodd": 36748, - "dodge": 31263, - "dodge": 12093, - "dodgeball": 43244, - "dodger": 31641, - "dodgers": 12422, - "dodgy": 37727, - "doe": 13296, - "does": 2397, - "does": 1897, - "doesn": 2503, - "doesnt": 17937, - "dof": 8277, - "doff": 20193, - "dofficial": 42516, - "dog": 4326, - "dog": 1929, - "dogcelebration": 41819, - "dogday": 27475, - "doge": 42187, - "dogg": 20749, - "doggie": 32237, - "doggo": 42155, - "doggy": 26359, - "doglo": 40733, - "dogre": 40030, - "dogrescue": 44158, - "dogs": 42182, - "dogs": 3255, - "dogsoftwitter": 19415, - "doh": 23581, - "doha": 20908, - "doherty": 31774, - "doi": 36361, - "doin": 15412, - "doing": 37408, - "doing": 1960, - "doit": 32272, - "doit": 28109, - "doj": 25700, - "dojo": 35901, - "dok": 40547, - "dok": 41034, - "doka": 46528, - "dol": 2287, - "dol": 19170, - "dola": 38005, - "dolan": 27200, - "dolby": 42414, - "dolce": 30033, - "dolce": 30661, - "dole": 41040, - "doll": 27031, - "doll": 9286, - "dollar": 35092, - "dollar": 7474, - "dollars": 10669, - "dolls": 15090, - "dolly": 43281, - "dolly": 23821, - "dolom": 37137, - "dolores": 40741, - "dolph": 8900, - "dolph": 22257, - "dolphin": 42963, - "dolphin": 16464, - "dolphins": 14002, - "dom": 2164, - "dom": 1919, - "domain": 15492, - "domaine": 48744, - "domains": 36358, - "dome": 8515, - "dome": 9827, - "domen": 37584, - "domest": 21936, - "domestic": 28189, - "domestic": 9043, - "domin": 4361, - "dominance": 30546, - "dominant": 20565, - "dominate": 21431, - "dominated": 23048, - "dominates": 34043, - "dominating": 29303, - "domination": 30919, - "domingo": 24882, - "dominic": 39007, - "dominic": 19095, - "dominican": 22934, - "dominion": 27155, - "domino": 30752, - "dominos": 39770, - "domo": 44293, - "doms": 30126, - "don": 1067, - "don": 847, - "dona": 26789, - "donal": 42375, - "donald": 5990, - "donald": 4335, - "donaldson": 37783, - "donaldtrump": 6652, - "donat": 36384, - "donate": 6429, - "donated": 8705, - "donates": 26960, - "donating": 12621, - "donation": 7924, - "donations": 9928, - "doncaster": 38008, - "doncaster": 25352, - "doncasterisgreat": 47333, - "done": 5136, - "done": 1700, - "donegal": 24172, - "donesia": 41281, - "donet": 33724, - "donetsk": 33999, - "dong": 26242, - "dong": 31478, - "dongha": 28365, - "donghae": 28945, - "donia": 24014, - "donkey": 21415, - "donkeys": 44644, - "donna": 9158, - "donne": 30897, - "donnein": 38308, - "donneinarte": 40193, - "donnell": 35118, - "donnelly": 39070, - "donnie": 47058, - "donnie": 30609, - "donny": 37291, - "donny": 32887, - "dono": 14840, - "donor": 18013, - "donors": 17887, - "donovan": 21499, - "dons": 22127, - "dont": 8094, - "dont": 4632, - "donut": 18471, - "donuts": 13970, - "doo": 4543, - "doo": 11643, - "doodle": 9388, - "doodled": 41030, - "doodles": 22156, - "doodling": 37548, - "dooley": 47609, - "doom": 23263, - "doom": 14344, - "doomed": 33251, - "doomsday": 41791, - "doon": 36612, - "doop": 33886, - "door": 7188, - "door": 2489, - "doors": 4228, - "doorstep": 19533, - "doorway": 46575, - "dop": 42381, - "dop": 31722, - "dope": 42587, - "dope": 10094, - "doping": 30285, - "dopp": 21774, - "doppelg": 45216, - "doppler": 42540, - "dor": 2766, - "dor": 8695, - "dora": 18104, - "dorado": 32350, - "dorchester": 32656, - "dore": 39423, - "dores": 34323, - "dorf": 17296, - "dori": 49270, - "doria": 43186, - "dorian": 44016, - "doris": 24285, - "dork": 36206, - "dorm": 24263, - "doro": 15498, - "doro": 37389, - "dorothy": 20805, - "dors": 31240, - "dorset": 42109, - "dorset": 16047, - "dorsey": 41607, - "dortmund": 24290, - "dory": 36135, - "dos": 44258, - "dos": 5474, - "dose": 11497, - "doses": 37873, - "dossier": 46042, - "dost": 44222, - "dot": 7473, - "dot": 7004, - "dota": 23085, - "dotcom": 12443, - "dote": 31202, - "dothis": 47864, - "dotnet": 43124, - "dotorg": 46587, - "dots": 19019, - "dotted": 47950, - "dou": 1756, - "dou": 23608, - "doub": 19631, - "double": 13013, - "double": 3200, - "doubled": 24948, - "doubleheader": 34668, - "doubles": 12539, - "doubling": 36850, - "doubt": 37071, - "doubt": 8671, - "doubts": 30894, - "douche": 44292, - "doug": 20271, - "doug": 10758, - "dough": 15785, - "dough": 14983, - "doughnut": 32555, - "doughnuts": 31124, - "dougie": 46317, - "dougla": 9140, - "douglas": 10065, - "douglass": 45692, - "doun": 44785, - "dov": 38856, - "dova": 26551, - "dove": 27511, - "dove": 18281, - "dover": 43019, - "dover": 14683, - "doves": 47067, - "dow": 8022, - "dow": 10688, - "dowell": 27344, - "down": 1833, - "down": 1136, - "downe": 46501, - "downed": 35814, - "downer": 42522, - "downers": 43739, - "downey": 29429, - "downfall": 48702, - "downhill": 27387, - "downing": 28140, - "download": 35076, - "download": 3794, - "downloadable": 49105, - "downloaded": 22961, - "downloading": 30519, - "downloads": 26481, - "downpour": 39034, - "downpours": 40160, - "downs": 10706, - "downside": 41937, - "downstairs": 28174, - "downstream": 43822, - "downtime": 41964, - "downton": 45023, - "downton": 42668, - "downtown": 18230, - "downtown": 5061, - "downward": 37430, - "dowski": 43556, - "dox": 44786, - "dox": 14510, - "doyle": 17728, - "doyou": 27256, - "doz": 31106, - "dozen": 16401, - "dozens": 17883, - "dp": 23820, - "dp": 6465, - "dprint": 46644, - "dprinting": 16194, - "dprk": 47920, - "dps": 34288, - "dq": 28741, - "dr": 1084, - "dr": 1701, - "dra": 1114, - "dra": 7402, - "drac": 20168, - "dracing": 41253, - "dracula": 25405, - "draf": 37426, - "draft": 30624, - "draft": 5198, - "drafted": 19129, - "drafting": 33528, - "drafts": 29194, - "drag": 8452, - "drag": 12463, - "dragged": 27884, - "dragging": 37069, - "dragon": 9187, - "dragon": 5471, - "dragonball": 40959, - "dragoncon": 47802, - "dragonfly": 32824, - "dragons": 10203, - "dragrace": 40762, - "drags": 45368, - "drain": 23347, - "drain": 19467, - "drainage": 25953, - "drained": 44630, - "drains": 43638, - "drainthe": 47337, - "drake": 32504, - "drake": 8958, - "dral": 7503, - "dram": 6937, - "dram": 32170, - "drama": 5055, - "dramas": 33467, - "dramati": 43512, - "dramatic": 11240, - "dramatically": 24495, - "drank": 21712, - "draped": 49113, - "drastic": 43159, - "drastically": 35478, - "drau": 18621, - "draw": 17675, - "draw": 4001, - "drawer": 23219, - "drawers": 38975, - "drawing": 36996, - "drawing": 3610, - "drawings": 13397, - "drawn": 8893, - "draws": 12043, - "dray": 25562, - "drayton": 49044, - "drc": 21434, - "dre": 960, - "dre": 14584, - "dread": 17412, - "dread": 31403, - "dreaded": 47227, - "dreadful": 35846, - "dreality": 48367, - "dream": 4595, - "dream": 2984, - "dreambig": 46495, - "dreamcast": 47226, - "dreamed": 27984, - "dreamer": 25692, - "dreamers": 27194, - "dreaming": 11662, - "dreamliner": 49143, - "dreams": 4405, - "dreamt": 43743, - "dreamteam": 40090, - "dreamy": 23517, - "dred": 10903, - "dredge": 48783, - "dren": 29068, - "dren": 47309, - "drenched": 46378, - "dres": 48852, - "dres": 44697, - "dresden": 34836, - "dress": 12622, - "dress": 2595, - "dressage": 36144, - "dressed": 6559, - "dresser": 26346, - "dresses": 8184, - "dressing": 6348, - "drew": 18792, - "drew": 5281, - "drex": 33985, - "drey": 48271, - "dri": 1203, - "dri": 28833, - "drian": 36870, - "dribb": 42153, - "dric": 23448, - "dridge": 22956, - "drie": 40170, - "dried": 16037, - "drier": 39877, - "dries": 33857, - "drif": 33585, - "drift": 18194, - "drifting": 30276, - "drill": 11626, - "drilled": 46338, - "drilling": 18634, - "drills": 24378, - "drin": 3375, - "drin": 47133, - "drink": 14131, - "drink": 3979, - "drinking": 5778, - "drinklocal": 45998, - "drinks": 6732, - "drip": 24050, - "dripping": 38787, - "dris": 35804, - "drive": 11402, - "drive": 2620, - "driven": 9314, - "driver": 27563, - "driver": 4383, - "driverless": 46769, - "drivers": 7384, - "drives": 11441, - "driveway": 26273, - "driving": 37800, - "driving": 4161, - "drizzle": 28240, - "drm": 39674, - "dro": 1494, - "dro": 12442, - "drogba": 49199, - "droid": 38016, - "drome": 9157, - "dron": 43898, - "dron": 23360, - "drone": 33557, - "drone": 9397, - "drones": 14006, - "droo": 30715, - "drool": 41554, - "drooling": 44360, - "drop": 16407, - "drop": 3387, - "dropbox": 47216, - "dropped": 6792, - "dropping": 8339, - "drops": 6437, - "dros": 47033, - "drou": 38558, - "drought": 13935, - "drove": 13753, - "drow": 21159, - "drown": 28571, - "drowned": 34005, - "drowning": 24618, - "drs": 21257, - "dru": 2275, - "dru": 49048, - "drug": 20601, - "drug": 5600, - "drugs": 8021, - "druid": 40297, - "drum": 13353, - "drum": 8698, - "drummer": 13618, - "drummers": 46191, - "drumming": 35480, - "drummond": 42213, - "drums": 11690, - "drun": 15488, - "drunk": 37398, - "drunk": 8232, - "drunken": 28196, - "drupal": 46481, - "drush": 43009, - "drwho": 48342, - "dry": 13544, - "dry": 4501, - "dryer": 24425, - "drying": 23203, - "ds": 3361, - "ds": 646, - "dsa": 47607, - "dsb": 47168, - "dsb": 14257, - "dsburg": 47237, - "dsc": 37240, - "dsd": 45383, - "dsley": 40740, - "dslr": 33740, - "dsm": 39502, - "dson": 40310, - "dsp": 45291, - "dss": 41580, - "dstv": 35027, - "dt": 13104, - "dt": 7427, - "dthe": 13863, - "dtla": 31885, - "dtm": 42407, - "dts": 46233, - "du": 691, - "du": 3686, - "dua": 25244, - "dual": 39739, - "dual": 5347, - "duane": 38946, - "dub": 14526, - "dub": 13144, - "duba": 5485, - "dubai": 32599, - "dubai": 5985, - "dubbed": 27740, - "dublin": 20707, - "dublin": 6145, - "dubnation": 47329, - "dubois": 48046, - "dubrov": 46709, - "dubrovnik": 48724, - "dubs": 27013, - "dubstep": 38303, - "dubu": 43257, - "duc": 979, - "duc": 36446, - "ducati": 28570, - "ducation": 17197, - "duce": 3660, - "duchess": 21713, - "duck": 12708, - "duck": 6910, - "ducks": 11202, - "duct": 26829, - "dude": 48087, - "dude": 5710, - "dudes": 14449, - "dudley": 27324, - "due": 2887, - "duel": 27143, - "dues": 37646, - "duet": 25457, - "duf": 38713, - "duff": 38071, - "duff": 21934, - "duffy": 23599, - "dug": 22743, - "dug": 21000, - "dugg": 40523, - "duggan": 46169, - "dugout": 36831, - "duh": 26716, - "dui": 29693, - "duk": 14160, - "duke": 18402, - "duke": 7732, - "dukes": 27914, - "dul": 6738, - "dulce": 44872, - "dulil": 32565, - "dulkar": 47980, - "dull": 19433, - "dulu": 28865, - "duluth": 32109, - "dulwich": 47343, - "dum": 13400, - "dum": 11564, - "dumb": 15901, - "dumb": 12464, - "dumbass": 38980, - "dummies": 40899, - "dummy": 34246, - "dump": 12655, - "dump": 17146, - "dumped": 23768, - "dumping": 31707, - "dumplings": 35495, - "dumps": 45804, - "dumpster": 45467, - "dun": 2616, - "dun": 18284, - "dunbar": 41453, - "duncan": 31084, - "duncan": 13502, - "dundal": 38185, - "dundas": 39300, - "dundee": 18619, - "dune": 32833, - "dune": 28208, - "dunedin": 40121, - "dunes": 23526, - "dung": 33712, - "dungeon": 28812, - "dungeon": 22931, - "dungeons": 42572, - "dungeonsand": 34970, - "dungeonsanddragons": 35497, - "dunham": 42501, - "duni": 43454, - "dunk": 17222, - "dunkin": 48022, - "dunkin": 36415, - "dunkirk": 46928, - "dunks": 48977, - "dunlop": 34753, - "dunn": 19185, - "dunne": 38538, - "dunno": 24502, - "duo": 8696, - "dup": 36805, - "dup": 10445, - "duper": 44850, - "duplex": 41186, - "duplic": 28992, - "dupont": 35994, - "dur": 4355, - "dur": 23230, - "dura": 28173, - "dura": 47382, - "durability": 43671, - "durable": 22285, - "duran": 28185, - "durango": 44443, - "durant": 24861, - "duras": 27518, - "duration": 31663, - "durban": 24474, - "dure": 19108, - "durga": 38456, - "durham": 26765, - "durham": 14335, - "during": 1590, - "dus": 9931, - "dusa": 28546, - "dusk": 19708, - "dust": 29723, - "dust": 8349, - "dusted": 38274, - "duster": 46280, - "dustin": 42423, - "dustin": 21235, - "dusting": 41756, - "dusty": 22029, - "dut": 32625, - "dutch": 22277, - "dutch": 7991, - "duter": 21624, - "duterte": 22371, - "duties": 19603, - "dutt": 30081, - "dutton": 42771, - "duty": 6458, - "duval": 42459, - "duvet": 48006, - "dux": 28562, - "dv": 4288, - "dv": 26265, - "dvd": 7170, - "dvds": 36655, - "dvn": 29811, - "dvr": 29210, - "dw": 8455, - "dw": 19997, - "dwar": 13487, - "dwarf": 22643, - "dwayne": 31395, - "dwell": 27549, - "dwell": 18755, - "dwelling": 37098, - "dwight": 22473, - "dwp": 46976, - "dwts": 30220, - "dwyer": 43878, - "dx": 22717, - "dx": 15679, - "dy": 1444, - "dy": 907, - "dyce": 48325, - "dye": 37159, - "dye": 15997, - "dyed": 24906, - "dyer": 29495, - "dyes": 39874, - "dying": 5115, - "dyk": 12142, - "dyke": 32632, - "dylan": 21004, - "dylan": 9900, - "dyn": 44289, - "dyn": 30669, - "dynam": 5735, - "dynamic": 10057, - "dynamics": 14329, - "dynamite": 29003, - "dynamo": 28281, - "dynasty": 14593, - "dyne": 42756, - "dyou": 11484, - "dyour": 22525, - "dys": 11022, - "dys": 38384, - "dysfunction": 36865, - "dysfunctional": 40757, - "dysle": 33681, - "dyslexia": 43199, - "dyson": 34475, - "dyssey": 17435, - "dystop": 28276, - "dystopian": 38915, - "dz": 24421, - "dz": 22913, - "dé": 25466, - "dü": 46948, - "dÃŃ": 46988, - "e": 68, - "e": 324, - "ea": 2150, - "ea": 8100, - "eable": 20693, - "each": 31442, - "each": 2416, - "eachother": 40792, - "ead": 42556, - "ead": 45523, - "eae": 27446, - "eag": 3743, - "eager": 21551, - "eagerly": 30094, - "eagle": 20207, - "eagle": 7517, - "eagles": 6920, - "eal": 48872, - "ealing": 40484, - "eames": 49072, - "eamon": 45954, - "ean": 13327, - "ear": 1055, - "ear": 8373, - "earbuds": 47807, - "eared": 9127, - "earl": 30573, - "earl": 14235, - "earle": 40292, - "earlier": 4297, - "earliest": 22097, - "early": 15840, - "early": 2090, - "earn": 33977, - "earn": 8465, - "earned": 8898, - "earnest": 45422, - "earning": 14550, - "earnings": 15912, - "earns": 16760, - "earp": 35296, - "earphones": 44905, - "earring": 28664, - "earrings": 9136, - "ears": 9861, - "eart": 7086, - "earth": 5184, - "earth": 3475, - "earthand": 34229, - "earthandclouds": 34480, - "earthday": 19481, - "earthquake": 10060, - "earthquakes": 32895, - "earthy": 47139, - "earts": 38824, - "eas": 5740, - "ease": 13574, - "easier": 8817, - "easiest": 26314, - "easily": 8197, - "easing": 44825, - "easport": 42251, - "east": 5022, - "east": 2602, - "eastbound": 28827, - "eastbourne": 38455, - "eastenders": 23545, - "easter": 14783, - "easter": 4811, - "eastern": 34522, - "eastern": 6311, - "eastman": 48280, - "easton": 29619, - "eastside": 42650, - "eastwood": 28270, - "easy": 18308, - "easy": 3176, - "eat": 5418, - "eat": 3384, - "eaten": 16750, - "eater": 24060, - "eaters": 37645, - "eatery": 46559, - "eating": 4371, - "eatlocal": 42868, - "eaton": 28462, - "eats": 13188, - "eau": 17608, - "eazy": 36536, - "eb": 12283, - "eb": 8677, - "eba": 40889, - "ebay": 34412, - "ebay": 4099, - "eber": 34020, - "ebo": 46635, - "ebola": 15864, - "ebon": 22013, - "ebony": 30651, - "ebook": 13122, - "ebooks": 25774, - "ec": 747, - "ec": 10879, - "eca": 18465, - "ecar": 34500, - "ecb": 26205, - "ecc": 33128, - "eccc": 47401, - "eccentric": 43228, - "eccle": 27494, - "ece": 2163, - "eces": 5905, - "ecg": 45983, - "ech": 15797, - "ech": 31147, - "echel": 41233, - "echo": 17366, - "echo": 13989, - "echoes": 32564, - "eci": 31936, - "eck": 25866, - "eck": 15969, - "ecker": 39661, - "ecker": 40890, - "ecla": 47806, - "eclec": 25114, - "eclectic": 28382, - "eclip": 30841, - "eclipse": 11505, - "eclub": 38983, - "eco": 5106, - "eco": 10077, - "ecofriendly": 43412, - "ecol": 22706, - "ecological": 25127, - "ecology": 18578, - "ecommerce": 15529, - "econ": 26755, - "econ": 21158, - "econom": 2768, - "economic": 36649, - "economic": 5259, - "economical": 48782, - "economically": 39406, - "economics": 12625, - "economies": 27136, - "economist": 18836, - "economists": 43701, - "economy": 5644, - "ecor": 28962, - "ecosystem": 15788, - "ecosystems": 28725, - "ecoun": 27924, - "ecr": 48572, - "ecraft": 11439, - "ecs": 23485, - "ecstasy": 47286, - "ecstatic": 36244, - "ect": 25168, - "ecu": 13087, - "ecu": 32919, - "ecuador": 19813, - "ecz": 43530, - "ed": 843, - "ed": 538, - "eda": 10804, - "edad": 44724, - "eday": 39258, - "edc": 21245, - "edchat": 14702, - "edd": 35431, - "eddi": 42930, - "eddie": 22748, - "eddie": 9517, - "eddy": 25959, - "ede": 29632, - "eded": 19555, - "edel": 20460, - "edelman": 48139, - "eden": 23621, - "eden": 13741, - "eder": 16249, - "edes": 36247, - "edfringe": 27402, - "edg": 35955, - "edgar": 33543, - "edgar": 17914, - "edge": 16914, - "edge": 5461, - "edged": 39188, - "edges": 20938, - "edgy": 35393, - "edi": 8750, - "edi": 27148, - "edible": 19795, - "edic": 25184, - "edics": 30641, - "edin": 6524, - "edinburgh": 27574, - "edinburgh": 8068, - "eding": 5742, - "edison": 25846, - "edit": 8239, - "edit": 8013, - "edited": 13945, - "edith": 28597, - "editing": 10178, - "edition": 3062, - "editions": 21664, - "editor": 7661, - "editorial": 12325, - "editors": 19486, - "edits": 24945, - "edm": 37843, - "edm": 13539, - "edmon": 11275, - "edmond": 41581, - "edmonds": 46520, - "edmonton": 37311, - "edmonton": 15058, - "edmun": 36561, - "edmund": 27567, - "edna": 39002, - "edo": 29145, - "edo": 18096, - "edon": 41467, - "edor": 30184, - "edou": 47678, - "edp": 46066, - "eds": 1941, - "edsheeran": 30386, - "edt": 15071, - "edtech": 41825, - "edtech": 15262, - "edu": 11757, - "edu": 11799, - "eduardo": 30604, - "educ": 2200, - "educate": 17563, - "educated": 21447, - "education": 22358, - "education": 2806, - "educational": 10400, - "educator": 19875, - "educators": 15420, - "edwar": 27586, - "edward": 26184, - "edward": 7450, - "edwards": 12627, - "edwin": 48718, - "edwin": 22471, - "edy": 17072, - "edy": 4144, - "ee": 2644, - "ee": 4708, - "eed": 17513, - "eee": 24632, - "eee": 9361, - "eeee": 11696, - "eeee": 17570, - "eeeee": 26938, - "eeeeee": 41407, - "eek": 46591, - "eel": 27462, - "eels": 44416, - "eem": 27236, - "een": 47490, - "een": 21230, - "eer": 35409, - "eer": 31846, - "eera": 36664, - "eerie": 33846, - "ees": 40308, - "eet": 48935, - "eez": 39033, - "ef": 1490, - "ef": 1829, - "efa": 16999, - "eface": 48804, - "efan": 33556, - "efc": 22065, - "efcc": 46087, - "efer": 26199, - "eff": 20548, - "eff": 21715, - "effe": 2808, - "effec": 3943, - "effect": 5436, - "effective": 6837, - "effectively": 17516, - "effectiveness": 26847, - "effects": 7331, - "effic": 36004, - "efficacy": 39937, - "effici": 6670, - "efficiency": 11823, - "efficient": 11334, - "efficiently": 32915, - "effor": 6356, - "effort": 40078, - "effort": 6255, - "effortless": 41639, - "effortlessly": 42320, - "efforts": 6847, - "efish": 35813, - "efl": 27172, - "efron": 48111, - "efs": 7389, - "eg": 8053, - "eg": 14599, - "ega": 41193, - "egan": 42943, - "eger": 46704, - "eger": 22767, - "egg": 13778, - "egg": 5911, - "eggplant": 34906, - "eggs": 7099, - "ego": 34712, - "ego": 14250, - "egos": 43992, - "egre": 27044, - "egret": 42002, - "egy": 5224, - "egyp": 10250, - "egypt": 7267, - "egyptian": 12428, - "eh": 9277, - "eh": 9135, - "eha": 48563, - "ehealth": 48617, - "ehr": 45271, - "ehs": 44648, - "ei": 4006, - "ei": 18264, - "eic": 40251, - "eid": 28038, - "eid": 13979, - "eidmubarak": 46275, - "eiffel": 29720, - "eigh": 13468, - "eight": 7910, - "eighteen": 49316, - "eighth": 21237, - "eighty": 47449, - "eil": 29457, - "eileen": 31468, - "ein": 29944, - "ein": 24524, - "eindhoven": 47172, - "eing": 7702, - "einstein": 20587, - "eira": 47708, - "eis": 13802, - "eisen": 25273, - "eisenhower": 35562, - "either": 6036, - "ej": 19887, - "ej": 25009, - "ejec": 29771, - "ek": 4212, - "ek": 2092, - "el": 544, - "el": 832, - "ela": 11284, - "ela": 3787, - "elab": 38866, - "elabor": 26034, - "elaborate": 33855, - "elaine": 22523, - "elan": 17763, - "elan": 18399, - "eland": 24930, - "eland": 6275, - "elas": 41078, - "elast": 27479, - "elastic": 30282, - "elba": 48598, - "elbow": 21965, - "eld": 5684, - "elder": 11791, - "elder": 14416, - "elderly": 15455, - "elders": 28617, - "eldest": 33503, - "elding": 28223, - "elds": 13466, - "ele": 2084, - "ele": 9766, - "eleague": 36577, - "eleanor": 18604, - "elearning": 29969, - "elec": 1564, - "elec": 38768, - "elect": 15336, - "elected": 8828, - "election": 19312, - "election": 4247, - "electionday": 40540, - "elections": 6949, - "elector": 16465, - "electoral": 19544, - "electr": 3654, - "electra": 48959, - "electri": 23927, - "electric": 19547, - "electric": 5031, - "electrical": 12176, - "electrician": 46422, - "electricity": 10950, - "electrifying": 48843, - "electro": 11648, - "electro": 23244, - "electromagnetic": 46530, - "electron": 33396, - "electronic": 33865, - "electronic": 9273, - "electronica": 43119, - "electronics": 13081, - "eled": 20357, - "elee": 44112, - "eleg": 8075, - "elegance": 19146, - "elegant": 11124, - "elek": 34559, - "elem": 25406, - "element": 14909, - "elementary": 8143, - "elements": 10925, - "elen": 30654, - "elen": 39164, - "elena": 19421, - "eleng": 48180, - "eleph": 7554, - "elephant": 10299, - "elephants": 16871, - "eler": 24646, - "eless": 15244, - "eless": 30837, - "elets": 19400, - "elev": 7921, - "elevate": 26736, - "elevated": 23967, - "elevation": 23826, - "elevator": 19021, - "eleven": 31617, - "eleven": 17795, - "elf": 45961, - "elf": 11924, - "elfie": 39955, - "elg": 28790, - "elgin": 31868, - "eli": 1018, - "eli": 6292, - "elia": 10956, - "elian": 42508, - "elias": 47274, - "elias": 29902, - "elic": 34743, - "elic": 13492, - "elie": 38677, - "elie": 26501, - "elier": 14634, - "elife": 37429, - "elife": 12719, - "eligibility": 34937, - "eligible": 16978, - "elijah": 26065, - "elike": 48913, - "elim": 9296, - "elimin": 11386, - "eliminate": 19655, - "eliminated": 29075, - "eliminating": 36619, - "elimination": 24176, - "elin": 25353, - "elin": 13458, - "eline": 46199, - "eline": 7153, - "eling": 9990, - "elio": 47943, - "elion": 30682, - "elions": 44159, - "eliot": 33326, - "elis": 23411, - "elis": 48021, - "elisa": 25610, - "elisa": 44051, - "elisabeth": 33127, - "elise": 27124, - "elit": 40882, - "elite": 32277, - "elite": 6553, - "elited": 43943, - "elitedangerous": 47138, - "elites": 35975, - "elius": 35623, - "elive": 49338, - "elive": 23505, - "elives": 49174, - "elix": 32926, - "elixir": 42887, - "eliz": 42844, - "eliza": 6132, - "eliza": 29992, - "elizabeth": 22397, - "elizabeth": 7026, - "elk": 34013, - "elk": 21896, - "ell": 826, - "ell": 812, - "ella": 20692, - "ella": 2957, - "elland": 43326, - "ellar": 38443, - "ellas": 37053, - "elle": 12818, - "elle": 4765, - "elled": 13146, - "ellen": 14007, - "ellen": 12312, - "ellenshow": 34812, - "eller": 20927, - "eller": 4465, - "ellers": 19010, - "elles": 24431, - "elli": 3367, - "elli": 6673, - "ellic": 38905, - "ellie": 16769, - "ellier": 44054, - "ellin": 40374, - "elling": 2220, - "ellington": 34477, - "ellini": 43256, - "elliot": 20761, - "elliott": 44456, - "elliott": 13788, - "ellip": 44816, - "ellis": 11553, - "ellison": 32295, - "ello": 2512, - "ellor": 14594, - "ells": 2433, - "ellu": 35560, - "elly": 8041, - "elly": 20355, - "elm": 25199, - "elm": 22082, - "elman": 33622, - "elmer": 45958, - "elmo": 32150, - "elo": 6170, - "elo": 13490, - "elon": 26381, - "elon": 20406, - "elondon": 47377, - "elong": 44363, - "elonmusk": 37076, - "elope": 23367, - "eloqu": 37795, - "elos": 44733, - "elot": 43490, - "elove": 43319, - "elove": 19165, - "elover": 21732, - "elovers": 33946, - "els": 35958, - "els": 1645, - "elsa": 22050, - "else": 18857, - "else": 3344, - "elsewhere": 22906, - "elson": 19624, - "elt": 18692, - "elton": 20758, - "elu": 14208, - "elusive": 28903, - "elves": 29111, - "elvi": 47008, - "elvis": 47359, - "elvis": 14498, - "elxn": 37726, - "ely": 12189, - "ely": 1273, - "elyn": 29691, - "elyn": 18126, - "em": 908, - "em": 2270, - "ema": 7002, - "ema": 11131, - "emabiggest": 23101, - "emabiggestfans": 29587, - "email": 33537, - "email": 4462, - "emailed": 40470, - "emailmarketing": 40188, - "emails": 12871, - "eman": 24416, - "eman": 36868, - "emancip": 42996, - "emanuel": 35232, - "emb": 3692, - "embar": 8266, - "embaras": 48019, - "embark": 33953, - "embarra": 11382, - "embarrass": 27183, - "embarrassed": 28217, - "embarrassing": 19653, - "embarrassment": 41346, - "embassy": 13598, - "embe": 46041, - "embed": 19703, - "embedded": 22046, - "embelli": 32144, - "embellished": 46992, - "ember": 47049, - "emblem": 21163, - "embo": 23065, - "embr": 35267, - "embrac": 16928, - "embrace": 12118, - "embraced": 35739, - "embraces": 38404, - "embracing": 22196, - "embro": 12550, - "embroi": 18667, - "embroide": 21530, - "embroidered": 22381, - "embroidery": 20823, - "emc": 20897, - "emc": 31602, - "emcee": 42038, - "eme": 22910, - "eme": 21548, - "emea": 40352, - "emed": 11028, - "emen": 22033, - "ement": 40841, - "ement": 2057, - "ements": 11058, - "emer": 3132, - "emer": 25727, - "emerald": 46878, - "emerald": 16980, - "emerge": 22182, - "emerged": 26425, - "emergen": 24096, - "emergence": 39867, - "emergencies": 35759, - "emergency": 44038, - "emergency": 5897, - "emerges": 30801, - "emerging": 38174, - "emerging": 11113, - "emeritus": 35333, - "emerson": 24147, - "emery": 32678, - "emi": 44327, - "emi": 18525, - "emil": 26794, - "emil": 40624, - "emile": 43926, - "emili": 20709, - "emilia": 34238, - "emilio": 39722, - "emily": 14545, - "emily": 7640, - "emin": 17227, - "emin": 23995, - "eminem": 22129, - "eminent": 33779, - "eming": 40398, - "emir": 13337, - "emir": 47613, - "emirates": 47244, - "emirates": 17867, - "emission": 27761, - "emissions": 14172, - "emit": 49043, - "emma": 18177, - "emma": 7445, - "emmanuel": 48045, - "emmanuel": 20411, - "emmett": 45779, - "emmy": 35625, - "emmy": 17089, - "emmys": 21875, - "emo": 3738, - "emo": 19381, - "emoji": 16327, - "emojis": 27870, - "emon": 34406, - "emor": 45034, - "emory": 44274, - "emotion": 17464, - "emotional": 7357, - "emotionally": 24088, - "emotions": 12904, - "emp": 3831, - "emp": 41004, - "empathy": 22420, - "emper": 12522, - "emperor": 13828, - "empha": 16237, - "emphasi": 47176, - "emphasis": 29588, - "empire": 26212, - "empire": 7614, - "empires": 46510, - "emplo": 3409, - "employ": 37290, - "employ": 39626, - "employe": 5037, - "employed": 26567, - "employee": 36631, - "employee": 9560, - "employees": 7377, - "employer": 21296, - "employers": 17647, - "employment": 10959, - "empor": 27386, - "emporium": 48541, - "empower": 13612, - "empower": 17230, - "empowered": 29087, - "empowering": 20086, - "empowerment": 15747, - "empowers": 46206, - "empress": 26656, - "empty": 41203, - "empty": 7893, - "emra": 39259, - "ems": 2858, - "emt": 46360, - "emu": 48149, - "emu": 29296, - "emul": 23272, - "emy": 31076, - "en": 524, - "en": 576, - "ena": 3452, - "enab": 17308, - "enable": 15642, - "enabled": 23666, - "enables": 23417, - "enabling": 23590, - "enam": 41486, - "enamel": 22746, - "enary": 13132, - "enas": 34536, - "enation": 20860, - "enberg": 15658, - "enburg": 28430, - "enc": 33169, - "enca": 37774, - "encan": 30345, - "encapsul": 40874, - "ence": 6495, - "ence": 954, - "enced": 6549, - "ences": 3777, - "enchan": 17290, - "enchanted": 28258, - "enchanting": 32531, - "enchil": 47396, - "enci": 32207, - "encia": 30068, - "encies": 18729, - "encing": 10326, - "enclosed": 43243, - "enclosure": 37419, - "encom": 44026, - "encore": 20549, - "encoun": 17309, - "encounter": 13164, - "encountered": 32492, - "encounters": 25399, - "encoura": 6169, - "encourage": 12090, - "encouraged": 20299, - "encouragement": 24959, - "encourages": 23848, - "encouraging": 15875, - "encro": 45822, - "encry": 28600, - "encryp": 42928, - "encrypted": 48710, - "encryption": 31423, - "ency": 3484, - "encyclo": 32104, - "encyclopedia": 38376, - "end": 945, - "end": 806, - "enda": 6735, - "endale": 20290, - "endange": 13990, - "endangered": 14931, - "ende": 11373, - "ende": 40306, - "endeav": 18134, - "endeavor": 40502, - "endeavors": 44394, - "endeavour": 38035, - "ended": 2622, - "endemic": 41241, - "endent": 16265, - "ender": 48106, - "ender": 12383, - "enders": 7418, - "endez": 43850, - "endgame": 23042, - "endi": 31359, - "ending": 2695, - "endings": 36516, - "endish": 38841, - "endless": 12688, - "endlessly": 45145, - "endment": 45894, - "endo": 13476, - "endo": 15830, - "endocr": 36486, - "endof": 40786, - "endome": 46996, - "endon": 48018, - "endor": 8092, - "endorf": 37249, - "endorse": 28819, - "endorsed": 24307, - "endorsement": 21205, - "endorses": 34603, - "endorsing": 46779, - "endow": 45895, - "endra": 22321, - "ends": 1339, - "endthe": 46256, - "endu": 26032, - "endur": 19557, - "endurance": 21027, - "endure": 32419, - "enduring": 30851, - "enduro": 47042, - "ene": 3297, - "ene": 6049, - "ened": 2494, - "eneed": 45137, - "enegger": 33235, - "enei": 48906, - "enemies": 15824, - "enemy": 10310, - "enen": 45113, - "ener": 2244, - "ener": 13600, - "energ": 39451, - "energetic": 24197, - "energi": 23044, - "energies": 42374, - "energized": 48635, - "energy": 14974, - "energy": 2650, - "energye": 32271, - "energyefficiency": 40586, - "eners": 48208, - "enes": 42066, - "eness": 11806, - "enet": 46336, - "enew": 29672, - "enews": 13442, - "eney": 20706, - "enez": 33110, - "enf": 38167, - "enfield": 27808, - "enfor": 10592, - "enforce": 40224, - "enforced": 44597, - "enforcement": 12460, - "eng": 1035, - "eng": 6730, - "enga": 22297, - "engag": 6793, - "engage": 11089, - "engaged": 11475, - "engagement": 7281, - "engaging": 13060, - "enge": 26279, - "enge": 2742, - "engel": 38265, - "engen": 48286, - "enger": 6618, - "engers": 7533, - "engine": 3355, - "engine": 5857, - "engineer": 40151, - "engineer": 8517, - "engineered": 26580, - "engineering": 5273, - "engineers": 11494, - "engines": 14487, - "england": 20904, - "england": 3595, - "english": 15942, - "english": 3469, - "engra": 17560, - "engraved": 29421, - "engraving": 33309, - "engul": 43655, - "engv": 28401, - "enh": 7449, - "enhall": 48781, - "enham": 24592, - "enhan": 26827, - "enhance": 13993, - "enhanced": 16070, - "enhancement": 35601, - "enhances": 38259, - "enhancing": 25986, - "eni": 4395, - "eni": 17538, - "enic": 46780, - "enic": 28292, - "enig": 19754, - "enig": 48730, - "enight": 32848, - "enight": 20640, - "enigma": 34998, - "ening": 1133, - "enium": 34380, - "enix": 25720, - "enjo": 1498, - "enjoy": 12981, - "enjoy": 2218, - "enjoyable": 17444, - "enjoyed": 5045, - "enjoying": 3603, - "enjoyment": 34905, - "enjoys": 17024, - "enka": 43942, - "enko": 25312, - "enlar": 38136, - "enligh": 21364, - "enlighten": 28200, - "enlightened": 44032, - "enlightening": 44005, - "enlightenment": 29255, - "enlisted": 43555, - "enly": 43023, - "enn": 43563, - "enna": 8095, - "enne": 21176, - "enne": 11518, - "ennedy": 46266, - "ennes": 43613, - "enni": 7049, - "ennial": 14220, - "ennis": 48923, - "ennis": 26309, - "eno": 9429, - "eno": 12843, - "enoch": 47917, - "enor": 13955, - "enormous": 20129, - "enos": 44759, - "enote": 44955, - "enough": 2744, - "enow": 26876, - "enqu": 28417, - "enqui": 22810, - "enquire": 46658, - "enquiries": 31901, - "enquiry": 45141, - "enri": 18915, - "enrich": 20058, - "enrich": 45504, - "enriched": 45166, - "enrichment": 32903, - "enrique": 25489, - "enrol": 44279, - "enroll": 23739, - "enroll": 30366, - "enrolled": 36853, - "enrollment": 24875, - "enroute": 40548, - "ens": 41799, - "ens": 1323, - "ense": 12657, - "ense": 27658, - "ensemble": 14843, - "ensis": 32842, - "ensla": 37535, - "enslaved": 48675, - "ensure": 7492, - "ensures": 29707, - "ensuring": 19403, - "ent": 724, - "ent": 621, - "enta": 17681, - "ental": 32342, - "ental": 6168, - "entary": 9833, - "entation": 37412, - "ente": 17433, - "ente": 9935, - "ented": 3800, - "entennial": 43088, - "enter": 2963, - "enter": 3819, - "entered": 10679, - "entering": 12580, - "enterpri": 7339, - "enterprise": 9220, - "enterprises": 21219, - "enters": 15287, - "entertain": 5566, - "entertain": 23510, - "entertained": 30631, - "entertainer": 28674, - "entertaining": 13897, - "entertainment": 6166, - "entes": 24213, - "enthr": 36202, - "enthusi": 9631, - "enthusiasm": 20525, - "enthusiast": 27153, - "enthusiastic": 22068, - "enthusiasts": 27514, - "enti": 1938, - "ential": 5194, - "entially": 37695, - "entic": 10340, - "entine": 49212, - "enting": 20526, - "entire": 4709, - "entirely": 13911, - "entirety": 43242, - "entit": 15209, - "entities": 38134, - "entitled": 18680, - "entity": 28455, - "ently": 2922, - "ento": 21917, - "ento": 8762, - "entom": 31676, - "entourage": 47893, - "entr": 7129, - "entrance": 9129, - "entrata": 27304, - "entre": 34188, - "entre": 19600, - "entren": 46959, - "entrepre": 4583, - "entreprene": 4789, - "entrepreneu": 26784, - "entrepreneur": 12119, - "entrepreneur": 8033, - "entrepreneurial": 28261, - "entrepreneurs": 11054, - "entrepreneurship": 12858, - "entries": 13766, - "entry": 5362, - "ents": 870, - "entu": 6650, - "enty": 5657, - "enu": 23430, - "env": 32280, - "env": 39207, - "envel": 20052, - "envelope": 27358, - "envir": 3512, - "enviro": 46200, - "environ": 3599, - "environment": 33039, - "environment": 5501, - "environmental": 7831, - "environmentally": 32855, - "environments": 19577, - "envision": 49031, - "envoy": 29263, - "envy": 21017, - "eny": 20482, - "enya": 36509, - "enyc": 39520, - "enz": 25805, - "enz": 31873, - "enza": 25239, - "enzie": 14839, - "enzo": 31543, - "enzyme": 40348, - "enzymes": 47465, - "eo": 16054, - "eo": 11712, - "eoin": 48634, - "eon": 31915, - "eos": 17805, - "ep": 1178, - "ep": 1117, - "epa": 15866, - "epage": 26931, - "epaper": 33584, - "epcot": 32524, - "eper": 43071, - "eph": 45752, - "eph": 41240, - "ephe": 25129, - "epi": 7219, - "epi": 34641, - "epic": 12683, - "epic": 4991, - "epiconetsy": 49222, - "epide": 17382, - "epidemi": 44447, - "epidemic": 21522, - "epile": 23150, - "epilepsy": 29547, - "epilo": 31291, - "epilots": 39766, - "epiph": 40561, - "epiphany": 43251, - "epis": 24616, - "episcop": 28037, - "episcopal": 31221, - "episo": 2708, - "episode": 2965, - "episodes": 11837, - "epit": 21967, - "epitome": 35114, - "epl": 25950, - "epo": 25810, - "epp": 39054, - "epp": 39593, - "eps": 4090, - "epsilon": 40019, - "epsom": 40364, - "epstein": 34688, - "eq": 39331, - "eq": 33692, - "equ": 2563, - "equal": 17373, - "equal": 10433, - "equality": 48981, - "equality": 9578, - "equally": 18172, - "equals": 30278, - "equation": 28591, - "equations": 38225, - "eque": 19518, - "equestrian": 24728, - "equi": 8752, - "equili": 43262, - "equine": 33801, - "equinox": 32652, - "equip": 6526, - "equip": 36979, - "equipment": 6893, - "equipo": 45688, - "equipped": 18331, - "equitable": 44717, - "equities": 44015, - "equity": 11293, - "equivalent": 19489, - "er": 517, - "er": 528, - "era": 30548, - "era": 2072, - "erable": 18801, - "erad": 24194, - "eradic": 36346, - "eradicate": 46164, - "eral": 6222, - "eran": 13069, - "eras": 19325, - "eras": 39090, - "erase": 33893, - "erased": 46762, - "erasmus": 38935, - "erc": 5360, - "erc": 32382, - "erd": 25645, - "erdo": 21112, - "erdogan": 24453, - "ere": 17907, - "ere": 642, - "erec": 21526, - "erected": 39365, - "ered": 9097, - "eres": 15751, - "ergon": 38120, - "ergy": 19550, - "eri": 2769, - "eri": 9509, - "eria": 11634, - "erial": 5409, - "eric": 1206, - "eric": 5396, - "erica": 13208, - "erich": 26070, - "erick": 27434, - "erick": 36959, - "erickson": 45286, - "ericsson": 39645, - "eridge": 45408, - "erie": 7005, - "eries": 9099, - "erik": 22805, - "erik": 16532, - "erika": 25531, - "erin": 17532, - "erin": 11333, - "erina": 25176, - "ering": 1785, - "erit": 23335, - "eritrea": 30738, - "erjee": 41665, - "erly": 14380, - "erm": 31649, - "erman": 17990, - "ern": 6992, - "ern": 12140, - "ernal": 20868, - "ernan": 34617, - "ernation": 48796, - "erne": 33930, - "ernest": 23006, - "ernie": 23636, - "ernity": 14653, - "erno": 40812, - "ernst": 30099, - "ero": 3211, - "ero": 3732, - "erock": 38206, - "eron": 32837, - "eroom": 46690, - "eros": 30597, - "erose": 48657, - "erosion": 30174, - "erotic": 30708, - "erotica": 39126, - "erous": 6384, - "eroy": 36461, - "erp": 28268, - "err": 22479, - "err": 25346, - "erra": 48446, - "errands": 45485, - "error": 12097, - "errors": 21195, - "erry": 45236, - "erry": 24124, - "ers": 4840, - "ers": 612, - "ersfc": 37925, - "ership": 2884, - "erson": 25780, - "erson": 6811, - "ert": 40325, - "ert": 3112, - "erta": 32007, - "erton": 26245, - "erts": 12921, - "eru": 36068, - "erun": 41642, - "erup": 17093, - "erupted": 48862, - "eruption": 33705, - "erville": 37557, - "erwin": 43724, - "ery": 12467, - "ery": 1692, - "erz": 38711, - "es": 957, - "es": 542, - "esa": 46834, - "esa": 12489, - "esanders": 23099, - "esc": 3330, - "esc": 28420, - "escal": 15902, - "escap": 11499, - "escape": 32484, - "escape": 7568, - "escaped": 18707, - "escapes": 29916, - "escaping": 21767, - "escar": 39229, - "escence": 37972, - "esch": 46760, - "esch": 41945, - "esco": 32482, - "escobar": 48807, - "escor": 24360, - "escort": 24976, - "escorted": 47667, - "escorts": 48574, - "escu": 36517, - "esday": 19553, - "ese": 18766, - "ese": 2260, - "esg": 41674, - "esh": 17119, - "esh": 13407, - "esha": 28799, - "eshop": 38451, - "eshop": 45570, - "eshopsuk": 39349, - "esi": 30064, - "esis": 12414, - "esk": 19359, - "esl": 26201, - "eso": 29890, - "eso": 28921, - "esof": 17047, - "eson": 46845, - "esp": 3849, - "esp": 13870, - "espa": 37301, - "espan": 41731, - "españa": 41118, - "especially": 4878, - "esper": 29216, - "espino": 46633, - "espionage": 43498, - "espn": 22917, - "espn": 7540, - "espnu": 47747, - "espo": 34381, - "esports": 16035, - "espresso": 17098, - "esq": 47352, - "esqu": 34616, - "esque": 25877, - "ess": 3118, - "ess": 9764, - "essa": 39125, - "essay": 12751, - "essays": 27328, - "esse": 22305, - "essen": 30489, - "essence": 17830, - "essenti": 11163, - "essential": 47264, - "essential": 6895, - "essentially": 30042, - "essentials": 16191, - "essex": 30563, - "essex": 11623, - "est": 2291, - "est": 1509, - "esta": 41449, - "esta": 10135, - "estab": 7010, - "establi": 8412, - "establish": 19709, - "established": 13143, - "establishing": 29420, - "establishment": 20213, - "estas": 39072, - "estate": 47130, - "estate": 6159, - "estates": 26054, - "este": 12968, - "este": 20579, - "esteban": 48381, - "esteem": 31541, - "esteemed": 36293, - "ester": 45808, - "esthe": 18468, - "esther": 24393, - "estim": 8904, - "estimate": 21883, - "estimated": 16665, - "estimates": 21957, - "esto": 31589, - "esto": 23958, - "estonia": 26260, - "estonian": 48895, - "estrada": 48116, - "estre": 31271, - "estu": 26272, - "estuary": 35269, - "esur": 35758, - "esville": 39187, - "esy": 46268, - "et": 1169, - "et": 875, - "eta": 8761, - "etal": 25221, - "etary": 13074, - "etc": 5353, - "etched": 40411, - "etching": 41375, - "ete": 38820, - "ete": 40245, - "eter": 8587, - "eter": 17007, - "eternal": 13732, - "eternally": 48486, - "eternity": 23832, - "eters": 18392, - "etf": 31661, - "eth": 4819, - "eth": 5927, - "ethan": 24245, - "ethan": 15958, - "ethanol": 38166, - "ethe": 21312, - "ethel": 45921, - "ether": 23349, - "ethere": 18705, - "ethereal": 40925, - "ethereum": 19612, - "ethernet": 35026, - "ethi": 10327, - "ethic": 39104, - "ethical": 47041, - "ethical": 17679, - "ethics": 13355, - "ethiop": 10897, - "ethiopia": 13920, - "ethiopian": 24507, - "ethnic": 30522, - "ethnic": 16344, - "ethnicity": 46787, - "ethno": 34225, - "ethos": 48768, - "eti": 11188, - "eti": 30394, - "etienne": 46118, - "eties": 15137, - "etihad": 38489, - "etiquette": 37957, - "etis": 38216, - "etisation": 39733, - "etna": 41940, - "eto": 27829, - "eto": 33837, - "eton": 44339, - "etour": 41462, - "etr": 23012, - "etres": 42838, - "ets": 3442, - "etsy": 13237, - "etsy": 6282, - "etsym": 22902, - "etsymntt": 25416, - "etsyshop": 44643, - "ett": 32729, - "ett": 24998, - "etta": 30466, - "ette": 19981, - "ette": 5212, - "ettes": 35326, - "etto": 44219, - "etty": 40759, - "etu": 36593, - "etv": 49155, - "etv": 20325, - "etwork": 20585, - "ety": 25920, - "ety": 2746, - "etz": 36181, - "etz": 25301, - "eu": 1506, - "eu": 3238, - "eucalyp": 41068, - "eucalyptus": 42351, - "euchar": 38362, - "eugen": 30678, - "eugene": 17760, - "eul": 46749, - "eun": 16431, - "eun": 26219, - "eunhyuk": 47526, - "eup": 44435, - "euph": 21386, - "euphoria": 41051, - "eur": 18343, - "eur": 12018, - "eura": 32605, - "eure": 25311, - "euref": 48017, - "eureka": 31686, - "euro": 2039, - "euro": 8463, - "euroleague": 46821, - "europa": 18290, - "europale": 42473, - "europaleague": 44029, - "europarl": 44922, - "europe": 4198, - "europe": 3848, - "european": 26712, - "european": 4759, - "europeans": 37082, - "euros": 22274, - "eurovision": 17593, - "eurozone": 42555, - "eurusd": 40895, - "eus": 44214, - "euston": 46905, - "euthan": 43280, - "euve": 40652, - "eux": 25019, - "ev": 776, - "ev": 10133, - "eva": 6845, - "evacu": 13187, - "evacuated": 26806, - "evacuation": 27353, - "eval": 25139, - "eval": 9703, - "evalu": 10314, - "evaluate": 27174, - "evaluating": 34541, - "evaluation": 17640, - "evan": 12821, - "evan": 12847, - "evangel": 20518, - "evangeli": 21372, - "evangelical": 36151, - "evangelist": 42275, - "evankirstel": 46581, - "evans": 8836, - "evansville": 44782, - "evapor": 33352, - "evasion": 48795, - "eve": 5732, - "eve": 1866, - "eved": 19820, - "evel": 39315, - "evelyn": 26687, - "evement": 8210, - "even": 6359, - "even": 1427, - "evening": 34487, - "evening": 2285, - "evenings": 19994, - "evenly": 45974, - "event": 10612, - "event": 1655, - "eventful": 45628, - "evento": 38155, - "eventprofs": 24980, - "events": 3667, - "eventu": 14055, - "eventual": 45321, - "eventually": 14397, - "ever": 888, - "ever": 1247, - "everest": 21722, - "everett": 25456, - "everglades": 46294, - "evergreen": 23852, - "everlasting": 32849, - "evers": 31914, - "everton": 13315, - "every": 1091, - "every": 1505, - "everybody": 5901, - "everyday": 25049, - "everyday": 5160, - "everyone": 1584, - "everything": 36376, - "everything": 2410, - "everytime": 16911, - "everywhere": 6364, - "eves": 7323, - "evi": 5348, - "evi": 36989, - "evic": 21336, - "eviction": 37111, - "eviden": 46220, - "evidence": 6439, - "evident": 34529, - "evie": 47195, - "evil": 23218, - "evil": 6006, - "eville": 16143, - "eving": 24729, - "evo": 17962, - "evo": 13169, - "evoc": 43133, - "evol": 5350, - "evolu": 7725, - "evolution": 8902, - "evolutionary": 30629, - "evolve": 23406, - "evolved": 22613, - "evolving": 23675, - "evp": 46154, - "evs": 33576, - "ew": 11942, - "ew": 15428, - "ewan": 40247, - "ewe": 48438, - "ewing": 38873, - "ews": 9878, - "ex": 659, - "ex": 4118, - "exac": 5460, - "exact": 12651, - "exactly": 5840, - "exagger": 29766, - "exal": 49324, - "exam": 4428, - "exam": 8785, - "examination": 20970, - "examine": 25728, - "examined": 44004, - "examiner": 29149, - "examines": 28160, - "examining": 30616, - "example": 6228, - "examples": 14790, - "exams": 14028, - "exas": 47536, - "exc": 1302, - "excav": 20733, - "excavation": 45909, - "exce": 10999, - "exceed": 32521, - "exceeded": 36221, - "exceeding": 47213, - "exceeds": 49353, - "excel": 28351, - "excel": 18754, - "excell": 3298, - "excellence": 8171, - "excellency": 36503, - "excellent": 4239, - "excelsi": 47315, - "excep": 8882, - "except": 8541, - "exception": 25018, - "exceptional": 13425, - "exceptionally": 29306, - "excer": 17737, - "excerpt": 20586, - "excess": 22491, - "excessive": 21332, - "exchange": 6616, - "exchanged": 48919, - "exchanges": 29730, - "exchanging": 47760, - "excit": 10510, - "excite": 47711, - "excited": 1889, - "excitement": 11407, - "exciting": 4300, - "exclu": 3114, - "exclude": 49235, - "excluded": 46216, - "excluding": 44326, - "exclusion": 40219, - "exclusive": 3747, - "exclusively": 13565, - "exclusives": 47149, - "excu": 7324, - "excur": 27533, - "excursion": 34869, - "excuse": 9266, - "excuses": 19388, - "exe": 3554, - "exe": 48027, - "exec": 15052, - "execs": 35728, - "execu": 4360, - "execute": 36405, - "executed": 20432, - "execution": 18085, - "executive": 5944, - "executives": 24357, - "exem": 19753, - "exemp": 28602, - "exempl": 36371, - "exemplary": 39123, - "exempli": 41934, - "exempt": 44278, - "exemption": 47481, - "exer": 40295, - "exerc": 5932, - "exercise": 7016, - "exercises": 19669, - "exercising": 39036, - "exeter": 32137, - "exeter": 18837, - "exfoli": 38823, - "exhau": 11154, - "exhaust": 21812, - "exhausted": 21741, - "exhausting": 40035, - "exhaustion": 49221, - "exhi": 3022, - "exhib": 3783, - "exhibit": 24992, - "exhibit": 8209, - "exhibiting": 23889, - "exhibition": 4219, - "exhibitions": 28311, - "exhibitor": 44192, - "exhibitors": 38542, - "exhibits": 30093, - "exhilar": 40262, - "exhilarating": 49289, - "exi": 5297, - "exico": 38712, - "exile": 28566, - "exist": 10899, - "exist": 9645, - "existed": 23198, - "existence": 13832, - "existent": 43541, - "existential": 38752, - "existing": 12886, - "exists": 14608, - "exit": 9374, - "exited": 37581, - "exiting": 39577, - "exits": 34943, - "exmoor": 48260, - "exo": 15600, - "exo": 5842, - "exodus": 30098, - "exol": 42856, - "exop": 35288, - "exoplan": 37980, - "exor": 24506, - "exorcist": 46309, - "exotic": 15639, - "exp": 9923, - "exp": 19066, - "expan": 7512, - "expand": 10382, - "expand": 13141, - "expanded": 18390, - "expanding": 15755, - "expands": 22223, - "expanse": 46886, - "expansion": 10138, - "expansive": 49261, - "expat": 43900, - "expe": 2560, - "expect": 9802, - "expect": 5716, - "expectation": 34273, - "expectations": 12529, - "expected": 5573, - "expecting": 12525, - "expects": 24536, - "expedition": 16761, - "expeditions": 49327, - "expelled": 48834, - "expen": 7216, - "expend": 29302, - "expenditure": 47044, - "expense": 28473, - "expenses": 21797, - "expensive": 9649, - "exper": 1533, - "experi": 4723, - "experience": 31867, - "experience": 2415, - "experienced": 10417, - "experiences": 8233, - "experiencing": 16643, - "experiential": 44952, - "experim": 6697, - "experiment": 13079, - "experimental": 16539, - "experimenting": 28263, - "experiments": 21077, - "expert": 6284, - "expertise": 16555, - "experts": 6960, - "expi": 26850, - "expir": 35077, - "expire": 49315, - "expired": 30200, - "expires": 34739, - "expl": 3261, - "expla": 3517, - "explain": 48918, - "explain": 7304, - "explained": 14229, - "explaining": 13136, - "explains": 6655, - "explan": 13294, - "explanation": 16577, - "explanations": 34383, - "explic": 21011, - "explicit": 33228, - "explo": 3586, - "explode": 31262, - "exploded": 28947, - "explodes": 38119, - "exploding": 34683, - "exploit": 36953, - "exploited": 48554, - "explor": 11958, - "exploration": 14043, - "explore": 10405, - "explore": 5147, - "explorebc": 38754, - "explorecanada": 36600, - "explored": 25016, - "explorer": 15776, - "explorers": 28491, - "explores": 13996, - "exploring": 7584, - "explosion": 13785, - "explosions": 38646, - "explosive": 18888, - "explosives": 44705, - "expo": 7820, - "expo": 6344, - "expon": 27905, - "export": 14444, - "exporting": 47433, - "exports": 20088, - "expose": 23181, - "exposed": 12180, - "exposes": 33575, - "exposing": 28362, - "exposition": 36943, - "exposure": 11903, - "expre": 6085, - "express": 18553, - "express": 5642, - "expressed": 20777, - "expresses": 31931, - "expressing": 30207, - "expression": 11357, - "expressions": 20314, - "expressive": 42060, - "expressway": 31658, - "exquis": 16575, - "exquisite": 17958, - "ext": 5711, - "ext": 20072, - "exten": 5555, - "extend": 14492, - "extended": 9614, - "extending": 25652, - "extends": 20688, - "extension": 10275, - "extensions": 24525, - "extensive": 16870, - "extensively": 47365, - "extent": 24913, - "exter": 9797, - "exterior": 19352, - "extermin": 41671, - "external": 15028, - "extin": 13553, - "extinct": 24488, - "extinction": 21186, - "extingui": 38567, - "extor": 35620, - "extr": 29082, - "extra": 6416, - "extra": 4231, - "extrac": 18550, - "extract": 18962, - "extraction": 28789, - "extracts": 45576, - "extraordin": 23628, - "extraordinaire": 30909, - "extraordinary": 10982, - "extras": 29817, - "extravag": 22299, - "extravaganza": 29461, - "extre": 3978, - "extreme": 38357, - "extreme": 8331, - "extremely": 6519, - "extremism": 31493, - "extremist": 36383, - "extremists": 41425, - "extru": 43010, - "ey": 1541, - "ey": 1477, - "eyang": 28915, - "eye": 5034, - "eye": 3272, - "eyebrow": 34250, - "eyebrows": 19923, - "eyed": 15512, - "eyeing": 34916, - "eyel": 17075, - "eyelashes": 42074, - "eyeliner": 33354, - "eyeon": 25126, - "eyes": 3095, - "eyeshadow": 35213, - "eyewear": 30165, - "eyewitness": 36258, - "eyou": 31996, - "eyour": 40229, - "eyre": 44115, - "ez": 10082, - "ez": 8387, - "eze": 25993, - "eze": 27229, - "ezekiel": 41428, - "ezra": 27552, - "f": 69, - "f": 325, - "fa": 778, - "fa": 2800, - "faa": 27577, - "fab": 2833, - "fab": 5492, - "faber": 43461, - "faber": 42488, - "fabi": 29425, - "fabian": 34539, - "fabio": 31666, - "fabric": 16217, - "fabric": 10033, - "fabricated": 40851, - "fabrication": 33476, - "fabrics": 23159, - "fabulous": 5189, - "fac": 1053, - "fac": 35438, - "facade": 29217, - "face": 2545, - "face": 1710, - "facebook": 36156, - "facebook": 2943, - "faced": 10941, - "faceli": 32023, - "facelift": 36380, - "faceoff": 42710, - "facep": 45285, - "faces": 4905, - "faceted": 43435, - "facetime": 24076, - "facial": 11909, - "facil": 39973, - "facilit": 13567, - "facilitate": 26733, - "facilitated": 43853, - "facilitating": 34796, - "facilities": 10388, - "facility": 8165, - "facing": 7619, - "fact": 17189, - "fact": 3598, - "factfriday": 27953, - "faction": 14629, - "factor": 21082, - "factor": 8124, - "factories": 36492, - "factors": 12733, - "factory": 42483, - "factory": 6072, - "facts": 5085, - "factual": 45471, - "faculty": 9504, - "facup": 25283, - "fad": 12632, - "fad": 47669, - "fade": 20486, - "faded": 26051, - "fades": 40441, - "fading": 32882, - "fadnavis": 38945, - "faf": 31052, - "faf": 43903, - "fag": 25617, - "fag": 39305, - "fah": 25495, - "fah": 35429, - "fahren": 45527, - "fai": 20519, - "fai": 26384, - "fail": 7105, - "fail": 6801, - "failed": 8314, - "failing": 15757, - "fails": 13388, - "failure": 8732, - "failures": 25442, - "faint": 30807, - "fair": 3031, - "fair": 2849, - "fairbanks": 43962, - "faire": 34745, - "faire": 20798, - "fairfax": 29368, - "fairfield": 29664, - "fairgrounds": 38325, - "fairi": 28884, - "fairies": 33590, - "fairly": 14961, - "fairmont": 41547, - "fairness": 29388, - "fairs": 8655, - "fairtrade": 33361, - "fairview": 43479, - "fairway": 44022, - "fairy": 17021, - "fairy": 10444, - "fairytale": 28944, - "fais": 23542, - "faisal": 35459, - "fait": 20567, - "faith": 10653, - "faith": 5080, - "faithful": 15511, - "faiz": 41775, - "fake": 18794, - "fake": 5777, - "faken": 22853, - "fakenews": 26943, - "fakespeare": 49095, - "fal": 2778, - "fal": 40494, - "fala": 47120, - "falcon": 22498, - "falcon": 13571, - "falcons": 13834, - "falk": 34648, - "falkirk": 44080, - "fall": 6489, - "fall": 2359, - "fallen": 8688, - "falling": 48709, - "falling": 7293, - "fallon": 39596, - "fallon": 21281, - "fallontonight": 44627, - "fallout": 49365, - "fallout": 16009, - "falls": 4778, - "falmouth": 38261, - "false": 38948, - "false": 9078, - "falsely": 42321, - "fam": 1058, - "fam": 5128, - "fame": 6573, - "famed": 23302, - "famer": 24554, - "famil": 3395, - "famili": 8488, - "familia": 25622, - "familiar": 10020, - "families": 4612, - "family": 8137, - "family": 1315, - "familyfun": 46308, - "familytime": 47236, - "familytravel": 38222, - "famine": 35847, - "famous": 44811, - "famous": 4096, - "famously": 44505, - "fan": 1675, - "fan": 2261, - "fanart": 41059, - "fanart": 7855, - "fanartfriday": 45346, - "fanatic": 36643, - "fanatics": 39610, - "fanbase": 36921, - "fanboy": 43369, - "fanc": 29017, - "fancafe": 45080, - "fanci": 35908, - "fanclub": 31530, - "fancy": 47622, - "fancy": 6733, - "fand": 19684, - "fandom": 47634, - "fandom": 11534, - "fanfest": 42916, - "fanfic": 47243, - "fang": 14269, - "fang": 27428, - "fangirl": 28813, - "fangirling": 39463, - "fanning": 37282, - "fanny": 30401, - "fans": 32454, - "fans": 1840, - "fansign": 25288, - "fant": 4467, - "fanta": 2703, - "fantaken": 39412, - "fantasia": 49306, - "fantastic": 31289, - "fantastic": 2935, - "fantasy": 15124, - "fantasy": 5267, - "fantasyfootball": 35713, - "fao": 31155, - "faq": 28533, - "far": 1578, - "far": 2384, - "fara": 48562, - "farage": 28340, - "farah": 31547, - "fare": 8620, - "fare": 6461, - "fares": 27525, - "farewell": 10734, - "fargo": 18870, - "fari": 26197, - "farley": 43761, - "farm": 9066, - "farm": 3985, - "farmer": 19735, - "farmer": 10474, - "farmers": 29752, - "farmers": 6402, - "farmersmarket": 41808, - "farmhouse": 26293, - "farming": 10399, - "farmington": 49305, - "farmland": 45258, - "farms": 11277, - "farn": 27527, - "faroo": 39147, - "farra": 33657, - "farrakhan": 46293, - "farrell": 24234, - "fart": 34664, - "farther": 42233, - "fas": 4830, - "fas": 42995, - "fasci": 17191, - "fascin": 7327, - "fascinated": 32964, - "fascinating": 8640, - "fascism": 28213, - "fascist": 23870, - "fascists": 43598, - "fash": 42682, - "fashi": 2099, - "fashion": 6976, - "fashion": 2444, - "fashionable": 24597, - "fashionblogger": 31726, - "fashioned": 21563, - "fashioni": 26062, - "fashionista": 30415, - "fashions": 37601, - "fashionshow": 45653, - "fashionweek": 28684, - "fass": 42398, - "fast": 8509, - "fast": 1953, - "fasten": 44990, - "faster": 8835, - "fastest": 9808, - "fasting": 24656, - "fat": 4751, - "fat": 5484, - "fatal": 12124, - "fatalities": 44168, - "fatally": 34069, - "fate": 26315, - "fate": 11734, - "father": 11607, - "father": 3224, - "fathers": 12780, - "fathersday": 16731, - "fati": 13430, - "fatigue": 23747, - "fatima": 28202, - "fats": 30151, - "fatt": 44131, - "fatty": 22953, - "fau": 5571, - "fau": 31381, - "faucet": 44273, - "faul": 16230, - "faulkner": 37840, - "fault": 13862, - "faults": 42752, - "faulty": 47103, - "fauna": 30808, - "faust": 44772, - "faux": 19429, - "fav": 1355, - "fav": 5426, - "fave": 7272, - "faves": 18003, - "favor": 1766, - "favor": 12160, - "favorable": 35392, - "favored": 46640, - "favorite": 35262, - "favorite": 1916, - "favorited": 36926, - "favorites": 10564, - "favors": 36085, - "favour": 3111, - "favour": 20469, - "favourite": 3342, - "favourites": 16585, - "favs": 18879, - "faw": 21800, - "fawad": 46425, - "fawn": 48624, - "fax": 32535, - "fax": 9337, - "fay": 8939, - "fay": 40074, - "faye": 30257, - "fayette": 32043, - "fayette": 19782, - "fayetteville": 37771, - "fayre": 34982, - "faz": 26238, - "faze": 44880, - "fb": 22637, - "fb": 3307, - "fball": 29663, - "fbf": 20004, - "fbi": 10293, - "fbloggers": 41389, - "fbs": 48454, - "fc": 4278, - "fc": 1399, - "fca": 24540, - "fcb": 26639, - "fcb": 25045, - "fcbarcelona": 32174, - "fcbayern": 35033, - "fcblive": 44608, - "fcc": 21240, - "fck": 40080, - "fck": 49263, - "fcofficial": 27805, - "fcs": 32095, - "fcu": 47898, - "fd": 16972, - "fd": 11525, - "fda": 17823, - "fdi": 45579, - "fdn": 18563, - "fdny": 41084, - "fdr": 42298, - "fe": 623, - "fe": 873, - "fear": 8744, - "fear": 5402, - "feared": 31154, - "fearless": 17470, - "fears": 13867, - "fearthe": 33449, - "feasi": 34977, - "feast": 37963, - "feast": 9564, - "feat": 1703, - "feat": 5611, - "feather": 24905, - "feather": 17871, - "feathers": 21138, - "featherweight": 44939, - "feature": 30413, - "feature": 4527, - "featured": 4743, - "features": 4643, - "featuring": 3706, - "feb": 4317, - "febru": 4202, - "february": 4248, - "fect": 31293, - "fed": 22518, - "fed": 7035, - "feder": 4737, - "federal": 6369, - "federation": 15530, - "federer": 18246, - "federico": 40539, - "fedex": 32603, - "fedora": 45111, - "feds": 30593, - "fee": 28242, - "fee": 9224, - "feed": 6662, - "feed": 5839, - "feedback": 8683, - "feeder": 24482, - "feeders": 44523, - "feeding": 9879, - "feeds": 21788, - "feel": 2408, - "feel": 2051, - "feelin": 19903, - "feeling": 33087, - "feeling": 3045, - "feelings": 9452, - "feels": 4808, - "feelthe": 22322, - "feelthebern": 27743, - "fees": 11765, - "feet": 4804, - "fei": 23441, - "fei": 34217, - "fein": 46707, - "feinstein": 41313, - "fel": 2081, - "fel": 20304, - "feld": 45913, - "feld": 14219, - "feldman": 41942, - "feli": 7498, - "felic": 25845, - "felici": 23379, - "felicia": 41139, - "felicidades": 41648, - "felicity": 35123, - "feline": 29471, - "felipe": 27681, - "felix": 33455, - "felix": 16514, - "feliz": 26104, - "feliz": 20221, - "fell": 33540, - "fell": 6266, - "fella": 17586, - "fellas": 18787, - "feller": 29226, - "fellow": 12099, - "fellow": 5242, - "fellows": 15766, - "fellowship": 13857, - "felony": 31068, - "felt": 5413, - "fem": 24574, - "fem": 36615, - "fema": 41721, - "female": 22062, - "female": 3970, - "females": 21028, - "femi": 38607, - "femin": 11423, - "femini": 11894, - "feminine": 24911, - "feminism": 18784, - "feminist": 14921, - "feminists": 38809, - "femme": 31331, - "fen": 5509, - "fen": 25024, - "fence": 12679, - "fences": 34312, - "fencing": 23489, - "fender": 17117, - "fener": 41208, - "fenerbah": 46652, - "feng": 33291, - "fennel": 28689, - "fent": 26395, - "fenton": 47265, - "fenway": 29206, - "fer": 1765, - "fer": 2897, - "fera": 37705, - "feral": 29972, - "ferdin": 25541, - "ferdinand": 27591, - "fere": 43144, - "feren": 35652, - "ference": 19984, - "ferg": 44938, - "fergie": 39119, - "fergu": 10988, - "fergus": 42041, - "ferguson": 11904, - "fermentation": 45817, - "fermented": 36886, - "fern": 10747, - "fern": 21685, - "fernandes": 44391, - "fernandez": 23436, - "fernando": 17140, - "ferns": 38277, - "feroci": 45652, - "ferr": 7256, - "ferra": 47911, - "ferrari": 9606, - "ferre": 29626, - "ferred": 10432, - "ferreira": 48686, - "ferrell": 41112, - "ferrer": 38904, - "ferri": 42008, - "ferries": 28489, - "ferris": 27532, - "ferry": 38936, - "ferry": 10278, - "fers": 12378, - "fert": 14925, - "fert": 43662, - "fertil": 41987, - "fertile": 44837, - "fertili": 23912, - "fertility": 23528, - "fertilizer": 36786, - "fery": 47448, - "fes": 32300, - "fest": 17383, - "fest": 2590, - "festa": 42124, - "festi": 1943, - "festiv": 19222, - "festival": 20946, - "festival": 2240, - "festivals": 17834, - "festive": 9533, - "festivities": 21020, - "fet": 21409, - "feta": 31705, - "fetal": 42031, - "fetch": 30271, - "fete": 34629, - "fett": 37979, - "fetus": 26768, - "feu": 24912, - "feu": 32990, - "feud": 27365, - "fever": 40896, - "fever": 9989, - "fevre": 43861, - "few": 1939, - "fewer": 19128, - "fex": 41584, - "fex": 26392, - "fey": 39069, - "fey": 23298, - "fez": 43081, - "ff": 1021, - "ff": 1304, - "ffa": 15355, - "ffame": 42873, - "ffc": 19832, - "ffe": 1138, - "ffe": 8631, - "ffect": 29151, - "ffed": 8448, - "ffee": 26377, - "ffel": 22656, - "ffen": 46537, - "ffer": 27369, - "ffer": 11636, - "ffers": 32163, - "fferty": 44771, - "ffes": 46441, - "ffey": 30138, - "fff": 28106, - "ffi": 19961, - "ffic": 4762, - "ffice": 26044, - "ffici": 3639, - "fficial": 39818, - "fficial": 6463, - "fficiency": 27800, - "fficient": 20424, - "ffin": 12779, - "ffin": 7367, - "ffing": 16592, - "ffins": 17898, - "ffl": 39490, - "ffle": 7749, - "ffler": 39819, - "ffles": 19344, - "ffman": 15823, - "ffo": 42264, - "ffs": 4424, - "ffxiv": 26569, - "ffxv": 46786, - "ffy": 26404, - "ffy": 7795, - "fg": 45977, - "fg": 6823, - "fgm": 32178, - "fgo": 46113, - "fh": 21649, - "fh": 21010, - "fhs": 45094, - "fi": 701, - "fi": 3589, - "fia": 8827, - "fiable": 34373, - "fianc": 27752, - "fiance": 44114, - "fiancé": 34039, - "fiasco": 40944, - "fiat": 16740, - "fiawec": 39485, - "fib": 40594, - "fiba": 34993, - "fiber": 35074, - "fiber": 12612, - "fibers": 44587, - "fibre": 21401, - "fibro": 21294, - "fibrosis": 36307, - "fic": 1788, - "fic": 2059, - "fica": 26952, - "fically": 14854, - "fication": 4523, - "fications": 12512, - "ficial": 48192, - "fics": 42505, - "fiction": 6218, - "fictional": 25570, - "fid": 34197, - "fid": 23966, - "fidd": 25218, - "fiddle": 35968, - "fide": 45375, - "fidel": 21740, - "fidel": 36837, - "fidelity": 30109, - "fidget": 48664, - "fie": 28487, - "fie": 10348, - "fied": 29642, - "fied": 2853, - "fiel": 1361, - "field": 7571, - "field": 1570, - "fielder": 11046, - "fieldhouse": 37969, - "fielding": 30465, - "fields": 6494, - "fieldwork": 33155, - "fiends": 37869, - "fier": 11167, - "fier": 10598, - "fierc": 48609, - "fierce": 13896, - "fiercely": 49039, - "fiers": 16113, - "fiery": 24557, - "fies": 9537, - "fiesta": 14580, - "fif": 5309, - "fifa": 21976, - "fifa": 8516, - "fifaworldcup": 38819, - "fifawwc": 41329, - "fife": 24374, - "fifteen": 29504, - "fifth": 25515, - "fifth": 8772, - "fifthharmony": 31075, - "fifty": 24456, - "fifty": 15978, - "fig": 4814, - "fig": 20719, - "figaro": 48044, - "figh": 23274, - "fight": 5262, - "fight": 2757, - "fighter": 35884, - "fighter": 6438, - "fighters": 7371, - "fightfor": 48909, - "fightfor": 35740, - "fighting": 38625, - "fighting": 4652, - "fighton": 45578, - "fights": 12132, - "figs": 38882, - "figu": 6390, - "figur": 16948, - "figurative": 44042, - "figure": 48820, - "figure": 5274, - "figured": 15630, - "figures": 8739, - "figurine": 33306, - "figuring": 31513, - "fiji": 48270, - "fiji": 18285, - "fik": 46589, - "fil": 1142, - "fil": 14915, - "fila": 30992, - "filament": 49252, - "file": 12545, - "file": 4512, - "filed": 13864, - "files": 7850, - "filet": 43155, - "fili": 9590, - "filing": 16576, - "filip": 14368, - "filipino": 19153, - "fill": 15904, - "fill": 6277, - "filled": 5589, - "filler": 32816, - "fillers": 45005, - "fillet": 39276, - "filling": 9736, - "fillion": 38048, - "fillmore": 43922, - "fills": 21750, - "filly": 27690, - "film": 5117, - "film": 1860, - "filmed": 15801, - "filmfare": 42224, - "filmfest": 24508, - "filmfestival": 28066, - "filming": 6866, - "filmmaker": 17202, - "filmmakers": 24896, - "filmmaking": 18226, - "films": 5370, - "fils": 40271, - "filter": 7541, - "filtered": 29926, - "filtering": 47770, - "filters": 18385, - "filth": 39713, - "filthy": 26899, - "filtr": 21408, - "filtration": 42036, - "fim": 47525, - "fin": 735, - "fin": 10663, - "fina": 34497, - "final": 11968, - "final": 1755, - "finale": 7844, - "finalfantasy": 44543, - "finalfour": 46999, - "finalist": 12620, - "finalists": 13422, - "finalized": 48930, - "finally": 1992, - "finals": 4536, - "finan": 4807, - "finance": 6117, - "finances": 28767, - "financi": 12846, - "financial": 19783, - "financial": 4930, - "financially": 28124, - "financing": 18375, - "finch": 18523, - "find": 18638, - "find": 1416, - "finder": 15045, - "finders": 43884, - "findia": 47064, - "finding": 37455, - "finding": 6002, - "findings": 16529, - "findlay": 48227, - "findom": 36463, - "finds": 6680, - "findyour": 25936, - "findyourpark": 38924, - "fine": 12042, - "fine": 3797, - "fineart": 7484, - "fineart": 16005, - "fineartamerica": 7724, - "fined": 20094, - "finely": 46120, - "finer": 36681, - "fines": 25053, - "finesse": 46047, - "finest": 7707, - "fing": 6485, - "fing": 17955, - "finger": 13480, - "finger": 8895, - "fingerprint": 39579, - "fingers": 9690, - "fini": 2405, - "finish": 42178, - "finish": 3958, - "finished": 3078, - "finisher": 38636, - "finishers": 48661, - "finishes": 13078, - "finishing": 7912, - "finite": 48312, - "finity": 41463, - "finity": 21273, - "fink": 40158, - "finland": 10775, - "finley": 41652, - "finn": 28479, - "finn": 16925, - "finna": 35180, - "finnish": 19616, - "fino": 30083, - "fins": 32810, - "fintech": 48929, - "fintech": 8899, - "fion": 27476, - "fiona": 20099, - "fior": 37086, - "fiore": 44997, - "fioren": 33188, - "fiorentina": 43713, - "fios": 42521, - "fir": 770, - "fir": 16233, - "fire": 2951, - "fire": 1769, - "firearm": 40311, - "firearms": 23960, - "fireball": 40543, - "firec": 42806, - "fired": 8846, - "firefighter": 20498, - "firefighters": 12600, - "firefly": 33997, - "firefox": 35372, - "fireman": 46085, - "firen": 34752, - "firenze": 38445, - "fireplace": 23050, - "fires": 8749, - "fireside": 36185, - "firework": 40750, - "fireworks": 10641, - "firing": 15105, - "firm": 16936, - "firm": 7705, - "firmly": 29156, - "firms": 13655, - "firmware": 42691, - "first": 6853, - "first": 874, - "firstdayof": 44297, - "firsth": 48512, - "firsts": 47884, - "firth": 26078, - "fis": 7846, - "fis": 47683, - "fiscal": 20825, - "fischer": 26532, - "fish": 6431, - "fish": 2759, - "fisher": 11175, - "fisher": 9176, - "fisheries": 24612, - "fisherman": 25055, - "fishermen": 28547, - "fishers": 42065, - "fishery": 49057, - "fishes": 35470, - "fishing": 31703, - "fishing": 4935, - "fishy": 35665, - "fist": 48340, - "fist": 17085, - "fit": 2366, - "fit": 2478, - "fitbit": 33768, - "fitch": 44614, - "fitfam": 20662, - "fitnes": 47285, - "fitness": 20044, - "fitness": 4838, - "fits": 6401, - "fitt": 32994, - "fitted": 14863, - "fitter": 42096, - "fitters": 32364, - "fitting": 11769, - "fittings": 45787, - "fitz": 11120, - "fitz": 25913, - "fitzgerald": 20606, - "fitzpatrick": 37141, - "fiu": 38374, - "five": 19508, - "five": 3127, - "fives": 44066, - "fix": 4596, - "fix": 6028, - "fixed": 9393, - "fixes": 25473, - "fixing": 17423, - "fixture": 17317, - "fixtures": 19904, - "fizz": 31242, - "fj": 43183, - "fj": 46447, - "fjor": 31260, - "fk": 12410, - "fl": 1082, - "fl": 2685, - "fla": 1577, - "fla": 20292, - "flag": 11536, - "flag": 4859, - "flagged": 45012, - "flags": 12221, - "flagship": 19779, - "flagstaff": 40406, - "flair": 24938, - "flake": 21221, - "flakes": 20934, - "flam": 10559, - "flame": 40351, - "flame": 13484, - "flamen": 28826, - "flamenco": 37362, - "flames": 13441, - "flamin": 42693, - "flaming": 34782, - "flamingo": 30323, - "flan": 14572, - "flanagan": 28641, - "flanders": 34837, - "flank": 44553, - "flann": 39510, - "flannel": 37807, - "flap": 35253, - "flappy": 40241, - "flare": 21185, - "flares": 46088, - "flash": 6089, - "flash": 5815, - "flashback": 14616, - "flashback": 11988, - "flashbackfriday": 15014, - "flashbacks": 47056, - "flashes": 31259, - "flashing": 31764, - "flashlight": 37256, - "flask": 36194, - "flat": 8986, - "flat": 6313, - "flats": 17228, - "flatt": 45498, - "flattering": 43267, - "flaun": 41421, - "flav": 7191, - "flavo": 28895, - "flavor": 31835, - "flavor": 11818, - "flavored": 29350, - "flavorful": 49135, - "flavors": 16930, - "flavour": 17026, - "flavoured": 42397, - "flavours": 21083, - "flaw": 14268, - "flaw": 34978, - "flawed": 35136, - "flawless": 15531, - "flaws": 30492, - "flax": 43443, - "fle": 2428, - "fle": 44964, - "flea": 24883, - "fleck": 28143, - "fled": 26731, - "flee": 19427, - "flee": 30167, - "fleece": 25038, - "fleeing": 30543, - "fleek": 43513, - "fleet": 35922, - "fleet": 9147, - "fleetwood": 28883, - "fleming": 25769, - "fler": 48789, - "flesh": 17495, - "flet": 16102, - "fletcher": 19810, - "fleur": 28593, - "flew": 13768, - "flex": 16426, - "flex": 12038, - "flexi": 10032, - "flexibility": 22547, - "flexible": 14502, - "flexing": 48483, - "fli": 2472, - "flick": 13746, - "flick": 23414, - "flickr": 17755, - "flies": 8070, - "flight": 24701, - "flight": 3795, - "flights": 10515, - "flin": 24730, - "flin": 43816, - "flinders": 44647, - "fling": 22768, - "flint": 28306, - "flint": 18324, - "flip": 20385, - "flip": 11035, - "flipk": 30829, - "flipkart": 33154, - "flipped": 28144, - "flipping": 25881, - "flips": 35089, - "flir": 24330, - "flirt": 38352, - "flirting": 35243, - "flix": 40663, - "flo": 1945, - "flo": 20711, - "float": 16123, - "floating": 12619, - "floats": 33272, - "flock": 36297, - "flock": 21822, - "flondon": 47366, - "floo": 4062, - "flood": 23793, - "flood": 7148, - "flooded": 19706, - "flooding": 10204, - "floods": 16369, - "floor": 23657, - "floor": 4125, - "flooring": 19227, - "floors": 15671, - "flop": 22994, - "floppy": 38267, - "flops": 29146, - "flor": 15784, - "flor": 41669, - "flora": 18906, - "floral": 10732, - "florals": 48331, - "floren": 37706, - "florence": 11617, - "flores": 21537, - "flori": 3482, - "florian": 41861, - "florida": 34264, - "florida": 3966, - "florist": 38403, - "floss": 36453, - "flotus": 35181, - "flour": 18592, - "flouri": 23239, - "flourish": 36038, - "flow": 2180, - "flow": 5608, - "flower": 12772, - "flower": 4055, - "flowering": 19953, - "flowers": 4023, - "flowing": 14922, - "flown": 25659, - "flows": 16715, - "floyd": 46369, - "floyd": 13656, - "flu": 3698, - "flu": 13528, - "fluctu": 40181, - "fluence": 38169, - "fluent": 30025, - "fluff": 31174, - "fluffy": 40346, - "fluffy": 17054, - "fluid": 43803, - "fluid": 16717, - "fluids": 41490, - "fluor": 45127, - "fluore": 26974, - "fluorescent": 35036, - "fluori": 45611, - "flur": 31591, - "flush": 25777, - "flushing": 43754, - "flute": 23746, - "flux": 25249, - "flwx": 30907, - "fly": 5666, - "fly": 3228, - "flye": 30873, - "flyeagles": 39927, - "flyeaglesfly": 39931, - "flyer": 11875, - "flyers": 14181, - "flyfishing": 31800, - "flying": 20782, - "flying": 4610, - "flyn": 40676, - "flynn": 15721, - "flyo": 33506, - "flyover": 38083, - "fm": 13715, - "fm": 3689, - "fman": 25152, - "fml": 26730, - "fmr": 32875, - "fn": 22773, - "fn": 21763, - "fnc": 46506, - "fo": 898, - "fo": 6157, - "foal": 40386, - "foam": 30039, - "foam": 14587, - "foamed": 26711, - "fob": 40315, - "focal": 30934, - "focu": 5827, - "focus": 4353, - "focused": 9319, - "focuses": 20093, - "focusing": 15551, - "fod": 31015, - "fod": 43299, - "fodils": 44411, - "foe": 22952, - "foes": 46279, - "fog": 9417, - "foggy": 19770, - "foil": 17302, - "fol": 1106, - "fol": 48616, - "fold": 35201, - "fold": 11021, - "foldable": 48307, - "folded": 25233, - "folder": 25717, - "folding": 15464, - "folds": 24266, - "foley": 22850, - "foli": 7713, - "folia": 48964, - "foliage": 26350, - "folio": 10772, - "folk": 10665, - "folk": 6032, - "folke": 47190, - "folkl": 27273, - "folklore": 22133, - "folklore": 28620, - "folklorethursday": 23270, - "folks": 5422, - "follo": 41417, - "follow": 1964, - "follow": 1979, - "followart": 40957, - "followback": 33863, - "followed": 6499, - "follower": 17039, - "followers": 4856, - "following": 3473, - "followme": 29668, - "followparty": 44757, - "follows": 11287, - "followthe": 30747, - "folly": 41408, - "folsom": 42108, - "fom": 34540, - "fon": 5017, - "fon": 38318, - "fond": 19964, - "fonda": 44609, - "fondue": 48321, - "fone": 40672, - "font": 37610, - "font": 16248, - "fontaine": 37864, - "fontana": 43643, - "fontein": 45062, - "fonts": 32801, - "foo": 1183, - "foo": 23435, - "food": 4586, - "food": 1559, - "foodand": 38317, - "foodbank": 31926, - "foodie": 30762, - "foodie": 9847, - "foodies": 22416, - "foodnetwork": 46793, - "foods": 7057, - "foodsecurity": 49329, - "foodtruck": 47682, - "fool": 23959, - "fool": 12212, - "fooled": 28761, - "fooling": 47964, - "foolish": 33824, - "fools": 15946, - "foot": 6702, - "foot": 4738, - "footage": 11130, - "footb": 33466, - "football": 9376, - "football": 1882, - "footballer": 20646, - "footballers": 30269, - "footed": 38040, - "footh": 25951, - "foothills": 37020, - "footpath": 48858, - "footprint": 23206, - "footprints": 39640, - "footsteps": 27289, - "footwear": 22772, - "footy": 39866, - "footy": 18922, - "for": 645, - "for": 556, - "forage": 46871, - "foraging": 39056, - "forall": 17824, - "forbe": 49098, - "forbes": 13925, - "forbi": 24754, - "forbidden": 25164, - "force": 12068, - "force": 2869, - "forced": 8201, - "forces": 5381, - "forchange": 35848, - "forcing": 21573, - "ford": 3751, - "ford": 1623, - "fordfc": 28581, - "fordham": 48792, - "fords": 29351, - "fordshire": 14645, - "fore": 1484, - "fore": 1332, - "forec": 34155, - "forecast": 7361, - "forecasting": 38133, - "forecasts": 27696, - "foreclo": 44916, - "forefront": 37679, - "foreground": 35186, - "forehead": 25394, - "foreig": 26497, - "foreign": 42255, - "foreign": 6046, - "foreigners": 38549, - "foreman": 36174, - "foremost": 42128, - "foren": 16526, - "forensic": 23158, - "forensics": 38763, - "forest": 18760, - "forest": 4167, - "forestation": 33939, - "forestry": 26281, - "forests": 14095, - "forever": 14748, - "forever": 3225, - "forevery": 40605, - "forex": 40200, - "forex": 17395, - "forfe": 44871, - "forge": 19232, - "forged": 28105, - "forget": 46153, - "forget": 2678, - "forgets": 35613, - "forgetting": 25452, - "forgi": 22080, - "forgive": 15332, - "forgiven": 44894, - "forgiveness": 23585, - "forgood": 39169, - "forgot": 6483, - "forgotten": 7994, - "fork": 24501, - "fork": 13700, - "forkids": 48571, - "forklift": 43202, - "forks": 28769, - "forlife": 17624, - "form": 1157, - "form": 1907, - "forma": 38829, - "formal": 12978, - "formally": 24867, - "format": 16252, - "format": 11874, - "formation": 2510, - "formations": 37715, - "formative": 48882, - "formats": 32085, - "forme": 42085, - "formed": 6528, - "former": 2276, - "formerly": 20866, - "formid": 38599, - "formidable": 39834, - "forming": 15443, - "formity": 42290, - "forms": 5161, - "formu": 8689, - "formul": 23923, - "formula": 24485, - "formula": 10776, - "formulae": 34586, - "formulated": 45066, - "forre": 38876, - "forrest": 25205, - "forrester": 45338, - "forsa": 48958, - "forsale": 13303, - "forster": 42923, - "forsy": 29629, - "forsyth": 40952, - "fort": 12300, - "fort": 2921, - "forte": 44350, - "forte": 27367, - "forth": 17068, - "forth": 11932, - "forthcoming": 19989, - "forthe": 12521, - "forti": 26984, - "fortified": 46486, - "fortn": 14428, - "fortnight": 39235, - "fortnite": 38734, - "fortnite": 17890, - "fortress": 19988, - "fortun": 6950, - "fortunate": 19898, - "fortunately": 34358, - "fortune": 40931, - "fortune": 11451, - "fortunes": 41989, - "forty": 24399, - "forum": 37851, - "forum": 4538, - "forums": 31518, - "forwar": 34364, - "forward": 47031, - "forward": 2342, - "forwards": 38974, - "foryou": 35150, - "forz": 46056, - "forza": 33293, - "forza": 28089, - "fos": 36925, - "fos": 22081, - "foss": 14240, - "foss": 37911, - "fossil": 20419, - "fossil": 15202, - "fossilfriday": 26079, - "fossils": 30652, - "foster": 26778, - "foster": 8139, - "fostering": 35996, - "fosters": 37644, - "foto": 15908, - "foto": 12823, - "fotogra": 23687, - "fotografia": 40256, - "fotos": 26124, - "fou": 14516, - "fought": 10844, - "foul": 19784, - "foun": 3154, - "found": 3454, - "found": 1546, - "foundation": 4058, - "foundations": 25219, - "founded": 12240, - "founder": 5145, - "founders": 14602, - "founding": 15317, - "foundry": 31426, - "fountain": 44863, - "fountain": 13405, - "fountains": 37411, - "four": 5113, - "four": 2721, - "foursquare": 34484, - "fourteen": 46255, - "fourth": 7516, - "fourthofjuly": 47805, - "fow": 17084, - "fowl": 31685, - "fowler": 20980, - "fox": 5007, - "fox": 3240, - "foxandfriends": 45841, - "foxes": 24145, - "foxnews": 18830, - "foxsports": 39267, - "foxtv": 49396, - "foxx": 32993, - "foxy": 27945, - "foy": 30284, - "foyer": 38011, - "foyle": 47902, - "fp": 28058, - "fp": 8941, - "fpl": 27970, - "fpp": 36464, - "fps": 25300, - "fpv": 43175, - "fr": 936, - "fr": 5512, - "fra": 3368, - "fra": 15644, - "frac": 15607, - "fracking": 21894, - "fractal": 46471, - "fraction": 26788, - "fractu": 25847, - "fracture": 28995, - "fractured": 37421, - "fractures": 46213, - "frag": 13093, - "fragile": 23579, - "fragment": 39209, - "fragments": 41424, - "fragr": 15403, - "fragrance": 17874, - "fragrances": 44567, - "fragrant": 37030, - "fram": 27987, - "frame": 11029, - "frame": 6481, - "framed": 13135, - "frames": 15479, - "framework": 13195, - "frameworks": 43136, - "framing": 24539, - "frampton": 41733, - "fran": 2118, - "fran": 18878, - "franc": 3872, - "franc": 42340, - "franca": 48952, - "france": 12045, - "france": 3552, - "frances": 20803, - "francesca": 32327, - "francesco": 25816, - "franch": 11756, - "franchi": 46438, - "franchise": 13664, - "franci": 46458, - "francis": 22187, - "francis": 7660, - "francisco": 6887, - "franco": 17934, - "franco": 17052, - "francois": 29317, - "frank": 5390, - "frank": 5229, - "franken": 20487, - "franken": 48252, - "frankenstein": 26410, - "frankfur": 17442, - "frankfurt": 18598, - "franki": 39227, - "frankie": 38373, - "frankie": 16215, - "franklin": 40935, - "franklin": 9999, - "frankly": 38015, - "franks": 42855, - "frans": 47892, - "franz": 25449, - "franç": 38381, - "fraser": 39082, - "fraser": 16754, - "frat": 15225, - "frat": 39292, - "fraternity": 24433, - "frau": 23063, - "fraud": 40647, - "fraud": 9961, - "fraudul": 42655, - "fraudulent": 47408, - "fray": 41154, - "frazier": 32841, - "frc": 41507, - "fre": 821, - "fre": 43165, - "freak": 20352, - "freak": 13701, - "freaked": 43511, - "freakin": 23900, - "freaking": 11992, - "freaks": 27009, - "freaky": 31583, - "freck": 33328, - "freckles": 48036, - "fred": 9486, - "fred": 6678, - "freddie": 41890, - "freddie": 17014, - "freddy": 24394, - "freder": 10745, - "frederic": 41165, - "frederick": 37103, - "frederick": 18570, - "fredo": 48241, - "free": 2065, - "free": 1139, - "freebie": 35865, - "freebies": 28630, - "freec": 46569, - "freed": 12585, - "freed": 23392, - "freedom": 17992, - "freedom": 4511, - "freedoms": 32500, - "freef": 48678, - "freel": 14174, - "freelance": 21942, - "freely": 24436, - "freeman": 16450, - "freep": 32499, - "freepalestine": 39242, - "freer": 44676, - "frees": 27455, - "freestyle": 15594, - "freeway": 24927, - "freeze": 14187, - "freezer": 25390, - "freezing": 12499, - "frei": 30183, - "freight": 17023, - "fremantle": 48012, - "fremont": 34578, - "fren": 2919, - "french": 13118, - "french": 3461, - "frenzy": 30084, - "frequ": 9211, - "frequencies": 45319, - "frequency": 18825, - "frequent": 19836, - "frequently": 22434, - "fresco": 31609, - "fresh": 4065, - "fresh": 2975, - "fresher": 49284, - "freshers": 35810, - "freshest": 46809, - "freshly": 16081, - "freshman": 9381, - "freshmen": 21292, - "freshness": 45872, - "freshwater": 24803, - "fresno": 40879, - "fresno": 20995, - "fret": 40510, - "freud": 40787, - "frey": 22136, - "frey": 9082, - "fri": 815, - "fri": 6882, - "friars": 30513, - "fric": 18981, - "frick": 46304, - "friction": 38563, - "frid": 46388, - "frida": 36001, - "friday": 6350, - "friday": 1461, - "fridayfeeling": 11952, - "fridaymotivation": 38544, - "fridaynight": 44858, - "fridayreads": 37736, - "fridays": 15589, - "fridaythe": 47642, - "fridge": 13491, - "fridges": 40734, - "frie": 36999, - "fried": 13743, - "fried": 7310, - "friedman": 29402, - "friedrich": 34171, - "friend": 3017, - "friend": 1625, - "friendly": 44612, - "friendly": 4681, - "friends": 38875, - "friends": 1574, - "friendship": 42674, - "friendship": 7679, - "friendships": 28840, - "fries": 11369, - "frifotos": 40493, - "friger": 20785, - "friggin": 48300, - "frigh": 34831, - "fright": 24277, - "fright": 40207, - "frightened": 47136, - "frightening": 39290, - "fringe": 10640, - "fris": 37252, - "frisbee": 45768, - "frisco": 35945, - "frit": 34614, - "fritz": 29860, - "friyay": 38887, - "frm": 12951, - "fro": 626, - "fro": 26603, - "frock": 45306, - "frog": 26494, - "frog": 11438, - "frogs": 20781, - "from": 8330, - "from": 633, - "frome": 48691, - "fromhome": 41477, - "fromthe": 18756, - "fron": 1847, - "fron": 18036, - "front": 10996, - "front": 2184, - "frontal": 35794, - "frontier": 18253, - "frontiers": 38396, - "frontline": 29589, - "frontman": 36775, - "fronts": 26846, - "froome": 48560, - "frosh": 47069, - "frost": 39420, - "frost": 11619, - "frosted": 35988, - "frosting": 33872, - "frosty": 22760, - "froze": 47788, - "frozen": 42464, - "frozen": 8507, - "frs": 26216, - "fru": 3248, - "fruit": 16771, - "fruit": 5190, - "fruitful": 31494, - "fruits": 13282, - "fruity": 22320, - "frustr": 16046, - "frustrated": 25111, - "frustrating": 31342, - "frustration": 30535, - "fry": 33914, - "fry": 13686, - "fryer": 49217, - "frying": 38516, - "fs": 23699, - "fs": 3854, - "fsa": 33373, - "fsu": 44185, - "fsu": 19317, - "ft": 3391, - "ft": 981, - "fta": 41975, - "ftc": 33752, - "fted": 5612, - "fter": 25063, - "fthe": 22886, - "ftheday": 9823, - "fting": 6174, - "fton": 26605, - "ftp": 42649, - "fts": 3767, - "ftse": 46717, - "ftw": 19298, - "fty": 17494, - "fu": 665, - "fu": 9098, - "fuch": 42617, - "fudge": 24270, - "fue": 43723, - "fuego": 41500, - "fuel": 21113, - "fuel": 5945, - "fueled": 28792, - "fueling": 38793, - "fuelled": 48357, - "fuels": 19365, - "fuentes": 44393, - "fuer": 29645, - "fug": 29227, - "fugitive": 39257, - "fuji": 15573, - "fuji": 21634, - "fujifilm": 24765, - "fuk": 31051, - "fuku": 20728, - "fukushima": 33929, - "ful": 1814, - "ful": 857, - "fulbright": 41834, - "fulfill": 43675, - "fulfill": 27467, - "fulfilled": 29919, - "fulfilling": 30621, - "fulfillment": 45573, - "fulham": 25574, - "full": 9407, - "full": 1476, - "fuller": 20225, - "fullerton": 42822, - "fullest": 35603, - "fully": 39142, - "fully": 2401, - "fulness": 10526, - "fuls": 41606, - "fulton": 26725, - "fum": 38393, - "fumble": 49373, - "fun": 1229, - "fun": 1499, - "func": 8679, - "function": 8093, - "functional": 12885, - "functionality": 33316, - "functioning": 25479, - "functions": 18001, - "fund": 19089, - "fund": 4877, - "fundam": 11670, - "fundament": 18852, - "fundamental": 17627, - "fundamentally": 45378, - "fundamentals": 27887, - "funday": 15439, - "funded": 10588, - "funding": 5588, - "fundra": 6201, - "fundraiser": 10049, - "fundraising": 10755, - "funds": 7066, - "funer": 40693, - "funeral": 10606, - "funfact": 31596, - "funfactfriday": 40710, - "fungal": 38838, - "fungi": 27837, - "fungus": 30677, - "funk": 37353, - "funk": 13372, - "funko": 49402, - "funko": 23697, - "funky": 16492, - "funnel": 27862, - "funnier": 42232, - "funniest": 15557, - "funny": 19124, - "funny": 3789, - "funrun": 34185, - "fur": 2395, - "fur": 9686, - "furi": 40816, - "furious": 17522, - "furman": 49238, - "furn": 21348, - "furnace": 31913, - "furnished": 37388, - "furnitu": 45696, - "furniture": 7993, - "furry": 33414, - "furry": 15351, - "fursuit": 25306, - "fursuit": 43083, - "fursuitfriday": 27917, - "further": 5583, - "fury": 14404, - "fus": 18419, - "fuse": 23386, - "fused": 38994, - "fusion": 44661, - "fusion": 9364, - "fuss": 26331, - "fut": 21460, - "fut": 34049, - "futbol": 33014, - "futsal": 20558, - "futu": 33454, - "futur": 38840, - "future": 7959, - "future": 1904, - "futureof": 22599, - "futureofwork": 33202, - "futures": 13488, - "futuri": 19068, - "futurism": 48435, - "futurist": 48086, - "futuristic": 30987, - "fuzz": 47128, - "fuzz": 40443, - "fuzzy": 25876, - "fv": 29795, - "fw": 23934, - "fw": 5277, - "fwd": 27052, - "fx": 17807, - "fx": 9025, - "fy": 8440, - "fy": 2702, - "fyi": 16014, - "fying": 5294, - "fz": 46400, - "fé": 34072, - "g": 70, - "g": 326, - "ga": 1275, - "ga": 1531, - "gaa": 10715, - "gaal": 40867, - "gaard": 24645, - "gab": 3927, - "gab": 37382, - "gabbana": 36272, - "gabby": 48115, - "gabby": 24567, - "gabe": 18916, - "gabi": 41931, - "gable": 33387, - "gables": 40928, - "gabri": 8311, - "gabriel": 31684, - "gabriel": 13244, - "gabrielle": 33572, - "gaby": 46420, - "gac": 32520, - "gad": 7786, - "gad": 44651, - "gadget": 25525, - "gadgets": 22840, - "gado": 29489, - "gae": 22003, - "gael": 35663, - "gaelic": 31173, - "gaf": 21354, - "gaf": 32670, - "gag": 14121, - "gag": 18844, - "gaga": 9782, - "gage": 21081, - "gah": 27750, - "gai": 24214, - "gai": 25153, - "gaia": 41269, - "gail": 41160, - "gail": 27676, - "gain": 21536, - "gain": 6202, - "gaine": 35747, - "gained": 14489, - "gaines": 49225, - "gainesville": 40427, - "gaining": 15260, - "gains": 42751, - "gains": 12107, - "gal": 2001, - "gal": 4488, - "gala": 7211, - "galac": 18864, - "galactic": 25514, - "galap": 41115, - "galapagos": 44057, - "galat": 39853, - "galatasar": 42413, - "galatasaray": 47787, - "galax": 5647, - "galaxies": 32435, - "galaxy": 32130, - "galaxy": 6545, - "gale": 37658, - "gale": 21380, - "galerie": 44539, - "gales": 48633, - "gali": 17546, - "gali": 30552, - "galicia": 47927, - "galileo": 39671, - "gall": 3011, - "gall": 33374, - "galla": 16847, - "gallagher": 19168, - "galleria": 40656, - "galleries": 22304, - "gallery": 36648, - "gallery": 3830, - "galley": 48917, - "galli": 22568, - "gallipoli": 47249, - "gallo": 37350, - "gallo": 33265, - "gallon": 24615, - "gallons": 29335, - "galloway": 27796, - "galore": 22286, - "gals": 20125, - "galvani": 46046, - "galve": 34328, - "galveston": 36003, - "galway": 38045, - "galway": 17112, - "gam": 1162, - "gam": 34195, - "gama": 35873, - "gambia": 32988, - "gamble": 26121, - "gambling": 20287, - "game": 2882, - "game": 1063, - "gameart": 31490, - "gameboy": 40951, - "gamecube": 44079, - "gameday": 9241, - "gamedev": 7544, - "gameinsight": 42626, - "gameof": 10987, - "gameofthrones": 11822, - "gameon": 47691, - "gameplay": 16794, - "gamer": 12595, - "gamer": 11598, - "gamergate": 25961, - "gamers": 16166, - "gamersunite": 26423, - "games": 18551, - "games": 1955, - "gamescom": 37003, - "gamestop": 39436, - "gametime": 45899, - "gami": 42025, - "gamification": 48908, - "gaming": 28803, - "gaming": 4017, - "gamma": 22180, - "gamo": 39325, - "gan": 1822, - "gan": 1670, - "gand": 8399, - "ganda": 27261, - "gander": 44508, - "gandhi": 12322, - "ganesh": 30362, - "ganesha": 45185, - "gang": 8066, - "gang": 5674, - "ganga": 36275, - "gangnam": 46777, - "gangs": 29844, - "gangsta": 37365, - "gangster": 26514, - "gani": 48324, - "gann": 45665, - "gannon": 45837, - "gano": 25304, - "gao": 26556, - "gaon": 19279, - "gap": 29906, - "gap": 7609, - "gaps": 25296, - "gar": 1099, - "gar": 5824, - "gara": 28710, - "garage": 8474, - "garbage": 13760, - "garci": 44658, - "garcia": 10529, - "gard": 7751, - "gard": 21003, - "garda": 31906, - "garde": 22649, - "garden": 4674, - "garden": 2756, - "gardenchat": 46292, - "gardener": 28554, - "gardeners": 38205, - "gardening": 10483, - "gardens": 6152, - "gardiner": 43121, - "gardner": 18710, - "gare": 5633, - "gare": 48402, - "gareth": 37140, - "gareth": 18175, - "garfield": 26728, - "garh": 16762, - "gari": 40898, - "gari": 43080, - "garis": 37839, - "garland": 23418, - "garlic": 9685, - "garment": 31418, - "garments": 43341, - "garmin": 39885, - "garner": 20340, - "garnet": 37669, - "garo": 30388, - "garrett": 15881, - "garri": 21764, - "garrison": 30108, - "garros": 40425, - "garry": 24398, - "gars": 12055, - "gart": 18380, - "gart": 18751, - "garten": 14684, - "garter": 48420, - "garth": 45398, - "garth": 24469, - "gartner": 43334, - "gartner": 29678, - "garty": 46383, - "garu": 31140, - "garvey": 39511, - "garwal": 38623, - "gary": 10535, - "gary": 4516, - "garza": 49393, - "gas": 5047, - "gas": 2474, - "gases": 36971, - "gasoline": 27691, - "gasp": 43762, - "gaston": 40669, - "gastri": 49197, - "gastro": 23740, - "gastron": 30699, - "gastronomy": 46987, - "gat": 5314, - "gat": 18941, - "gata": 44575, - "gate": 8071, - "gate": 3302, - "gated": 23997, - "gates": 9472, - "gateshead": 40051, - "gateway": 45221, - "gateway": 14943, - "gather": 36345, - "gather": 12602, - "gathered": 14646, - "gathering": 9197, - "gatherings": 48096, - "gathers": 39250, - "gating": 27561, - "gation": 11095, - "gations": 33906, - "gato": 44492, - "gator": 20216, - "gator": 16390, - "gatorade": 36354, - "gators": 17173, - "gatory": 24796, - "gatsby": 32586, - "gatwick": 37122, - "gau": 5919, - "gau": 43068, - "gauge": 18728, - "gaunt": 31862, - "gauntlet": 37163, - "gautam": 45853, - "gautam": 31356, - "gauteng": 40333, - "gav": 8966, - "gave": 3485, - "gavin": 32974, - "gavin": 16389, - "gaw": 15405, - "gawd": 43239, - "gawx": 43420, - "gay": 7460, - "gay": 5627, - "gaya": 39477, - "gaye": 41401, - "gayle": 29998, - "gayo": 36768, - "gays": 28001, - "gaz": 4837, - "gaz": 36475, - "gaza": 38391, - "gaza": 10112, - "gazaunderattack": 42458, - "gaze": 23212, - "gazette": 20443, - "gazing": 28373, - "gb": 8727, - "gb": 4619, - "gba": 18528, - "gbbo": 34474, - "gbc": 42993, - "gbp": 27391, - "gbr": 31984, - "gby": 40509, - "gc": 8577, - "gc": 6043, - "gcc": 26804, - "gcse": 28763, - "gcu": 34137, - "gd": 13264, - "gd": 14604, - "gdc": 32793, - "gden": 44928, - "gdp": 17100, - "gdpr": 22963, - "ge": 619, - "ge": 710, - "gea": 26790, - "gear": 15532, - "gear": 4802, - "gearbox": 42454, - "geared": 33903, - "gearing": 19027, - "gears": 21147, - "geaux": 36313, - "gecko": 38616, - "ged": 17252, - "ged": 3480, - "geddon": 31720, - "gedly": 13991, - "gee": 9806, - "gee": 9071, - "geek": 17920, - "geek": 7135, - "geeks": 20110, - "geeky": 47332, - "geel": 25906, - "geelong": 34555, - "gees": 38088, - "geese": 26413, - "geez": 42394, - "geh": 30320, - "geist": 38290, - "gel": 7343, - "gel": 5697, - "gelato": 29577, - "gels": 42552, - "gely": 14637, - "gem": 14261, - "gem": 7613, - "gement": 19495, - "gemini": 23086, - "gemma": 23952, - "gems": 14355, - "gemstone": 27747, - "gemstones": 43972, - "gen": 1024, - "gen": 3278, - "gence": 16088, - "gency": 5245, - "gend": 33247, - "gender": 22976, - "gender": 5906, - "gendere": 35824, - "genderequality": 43338, - "gene": 5822, - "gene": 7962, - "genealo": 24142, - "genealogy": 29381, - "gener": 1832, - "general": 20576, - "general": 3658, - "generally": 19256, - "generals": 30296, - "generate": 16896, - "generated": 19450, - "generates": 33938, - "generating": 23882, - "generation": 41211, - "generation": 4883, - "generational": 34506, - "generations": 12247, - "generative": 29472, - "generator": 19399, - "generators": 41917, - "generic": 26978, - "generosity": 23015, - "generous": 12570, - "generously": 35113, - "genes": 19683, - "genesis": 13518, - "genetic": 47746, - "genetic": 13578, - "genetically": 36745, - "genetics": 18276, - "geneva": 14799, - "genevie": 41633, - "genevieve": 46584, - "geni": 22334, - "genic": 15750, - "genie": 24221, - "genital": 32960, - "genius": 8235, - "geniuses": 41406, - "geno": 41544, - "geno": 46776, - "genoa": 43993, - "genoci": 14687, - "genocide": 15903, - "genome": 23991, - "genomic": 44371, - "genomics": 26227, - "genre": 14249, - "genres": 30340, - "gens": 17449, - "gent": 3685, - "gent": 7139, - "gente": 34325, - "gentle": 7262, - "gentle": 13577, - "gentleman": 13293, - "gentlemen": 11692, - "gently": 17187, - "gento": 28320, - "gentri": 41148, - "gentry": 47225, - "gents": 18862, - "genu": 9182, - "genuine": 12184, - "genuinely": 20006, - "genus": 38161, - "geny": 35323, - "geo": 5038, - "geo": 11604, - "geocaching": 47908, - "geof": 20629, - "geoff": 33697, - "geoff": 20386, - "geoffrey": 29520, - "geograph": 45920, - "geographic": 22635, - "geographical": 39380, - "geography": 17101, - "geological": 38380, - "geology": 21578, - "geom": 46135, - "geome": 12958, - "geometric": 22419, - "geometry": 21731, - "geon": 20844, - "geon": 7295, - "geons": 15914, - "geopol": 39758, - "geor": 2549, - "georg": 43126, - "george": 8377, - "george": 3296, - "georges": 25042, - "georgetown": 22970, - "georgie": 42115, - "georgina": 43892, - "geospatial": 46238, - "geothermal": 38413, - "geous": 3068, - "ger": 1291, - "ger": 1502, - "gera": 48867, - "gerald": 29901, - "gerald": 13269, - "gerard": 35979, - "gerard": 20826, - "gerber": 45058, - "gered": 40179, - "geri": 41664, - "geri": 46214, - "gering": 24077, - "germain": 38786, - "german": 14972, - "german": 4710, - "germans": 28400, - "germany": 4464, - "germin": 44721, - "germs": 47731, - "geronimo": 45171, - "gerrard": 26538, - "gerry": 29825, - "gerry": 23026, - "gers": 3314, - "gertrude": 46950, - "gervais": 36527, - "gery": 32845, - "ges": 3316, - "gest": 11843, - "gest": 2033, - "gesture": 21780, - "gestures": 43524, - "get": 5670, - "get": 779, - "geta": 13155, - "getaway": 16131, - "gether": 27224, - "getic": 20661, - "getin": 25822, - "getit": 44891, - "getit": 48315, - "getoutside": 35644, - "gets": 39448, - "gets": 2127, - "gett": 6647, - "gett": 27965, - "gettable": 15620, - "gette": 29800, - "gettin": 13428, - "getting": 30885, - "getting": 1500, - "getty": 31185, - "getty": 13965, - "gettys": 35189, - "gettysburg": 37062, - "getyour": 42159, - "gey": 29289, - "gf": 28953, - "gf": 10846, - "gfriend": 35245, - "gfs": 37553, - "gg": 1129, - "gg": 3286, - "gga": 26003, - "ggan": 25626, - "gge": 21521, - "gge": 31659, - "gged": 6095, - "gger": 12367, - "gger": 3493, - "ggers": 7480, - "ggg": 20143, - "gggg": 33513, - "ggi": 21662, - "ggin": 17160, - "gging": 4966, - "ggins": 12444, - "ggle": 34981, - "ggle": 11430, - "ggled": 46328, - "ggles": 14703, - "ggling": 16523, - "ggly": 39407, - "ggs": 4797, - "ggy": 24935, - "ggy": 6476, - "gh": 583, - "gh": 790, - "gha": 10010, - "gha": 25183, - "gham": 21456, - "ghan": 18945, - "ghan": 6624, - "ghana": 30330, - "ghana": 9731, - "ghanaian": 34223, - "ghani": 36699, - "ghar": 37334, - "ghar": 36973, - "ghat": 43989, - "ghaz": 37493, - "ghc": 42139, - "ghe": 10754, - "ghe": 28561, - "ghead": 40783, - "ghee": 34794, - "gher": 21542, - "gher": 14796, - "ghet": 18447, - "ghetti": 17485, - "ghetto": 22403, - "ghi": 22436, - "ghi": 22279, - "ghibli": 40555, - "ghj": 38439, - "ghlin": 24131, - "gho": 4307, - "ghorn": 38094, - "ghosh": 43279, - "ghoshal": 49134, - "ghost": 11417, - "ghost": 7108, - "ghostbusters": 25462, - "ghostly": 44901, - "ghosts": 16737, - "ghou": 35843, - "ghoul": 45302, - "ghouse": 38238, - "ghs": 14157, - "ght": 1413, - "ght": 630, - "ghted": 4963, - "ghter": 2427, - "ghters": 12994, - "ghtful": 8334, - "ghting": 3019, - "ghtly": 6993, - "ghtning": 39740, - "ghton": 16353, - "ghts": 1259, - "ghty": 20968, - "ghty": 5866, - "ghu": 25808, - "ghue": 45675, - "ghyun": 25010, - "ghz": 24325, - "gi": 707, - "gi": 4478, - "gia": 8864, - "giac": 35444, - "giam": 39623, - "gian": 17274, - "gian": 12866, - "gianni": 46752, - "giant": 23668, - "giant": 4687, - "giants": 7076, - "giar": 34241, - "gib": 9816, - "gibb": 18964, - "gibbons": 31974, - "gibbs": 26488, - "gibility": 33297, - "gible": 13159, - "gibr": 20206, - "gibraltar": 23988, - "gibson": 37420, - "gibson": 12178, - "gic": 27900, - "gic": 2570, - "gical": 32973, - "gically": 26320, - "gid": 36774, - "gid": 21413, - "giddy": 40894, - "gideon": 43867, - "gidi": 30603, - "gie": 11459, - "gie": 3991, - "gier": 28974, - "gies": 5505, - "gif": 11363, - "gif": 11677, - "gifford": 47850, - "gifs": 37643, - "gift": 20569, - "gift": 2733, - "gifted": 15110, - "giftide": 20152, - "giftideas": 23487, - "gifting": 39546, - "gifts": 5836, - "gig": 26981, - "gig": 7471, - "gigab": 34530, - "gigan": 24104, - "gigantic": 31507, - "giggle": 36426, - "giggles": 42731, - "giggs": 44692, - "gigi": 44106, - "gigi": 26171, - "gigs": 20316, - "gil": 3997, - "gil": 10088, - "gila": 46952, - "gilbert": 14154, - "gilded": 44341, - "giles": 24802, - "gill": 14280, - "gill": 12003, - "gille": 29610, - "gilles": 39590, - "gillespie": 36242, - "gillette": 38603, - "gilli": 13695, - "gillian": 28753, - "gills": 48851, - "gilmore": 27603, - "gilt": 44378, - "gim": 31284, - "gimm": 40692, - "gimme": 21525, - "gin": 3374, - "gin": 4941, - "gina": 15604, - "gine": 27482, - "ging": 10829, - "ging": 3905, - "ginger": 16287, - "ginger": 9718, - "gingerbread": 23692, - "gini": 35768, - "gino": 36521, - "gins": 18328, - "gio": 16329, - "gio": 8050, - "gion": 41226, - "gior": 14920, - "giorgio": 33271, - "giorno": 33310, - "gios": 41927, - "gious": 14419, - "giov": 21404, - "giovanni": 26574, - "gipp": 41351, - "gir": 1077, - "gir": 25481, - "gira": 16949, - "giraffe": 22826, - "giri": 31709, - "girl": 3914, - "girl": 1611, - "girlfriend": 8217, - "girlfriends": 30736, - "girlpower": 37433, - "girls": 15480, - "girls": 1917, - "girly": 29605, - "giro": 39664, - "giro": 26454, - "girona": 47842, - "giroud": 41177, - "gis": 16266, - "gis": 12773, - "gist": 21241, - "git": 16060, - "git": 20918, - "gita": 40838, - "github": 31196, - "giu": 17931, - "giuli": 29762, - "giuliani": 47739, - "giuse": 29385, - "giuseppe": 33563, - "give": 4120, - "give": 1781, - "giveaway": 5310, - "giveaways": 18974, - "giveback": 41385, - "given": 33323, - "given": 4302, - "givenchy": 38245, - "giver": 43339, - "gives": 3926, - "giveup": 35485, - "giving": 14673, - "giving": 2339, - "givingback": 49300, - "givingtuesday": 23556, - "giz": 29237, - "gk": 38953, - "gk": 18719, - "gl": 1849, - "gl": 14751, - "gla": 1523, - "gla": 36904, - "glaci": 14924, - "glacial": 40782, - "glacier": 19282, - "glaciers": 42528, - "glad": 20841, - "glad": 4761, - "glades": 37432, - "gladi": 21742, - "gladiator": 38477, - "gladiators": 41087, - "gladly": 41598, - "gladys": 43168, - "glam": 8738, - "glam": 16905, - "glamorous": 22896, - "glamour": 42876, - "glamour": 17499, - "glamping": 46167, - "glan": 40482, - "glan": 45844, - "glance": 26557, - "gland": 41441, - "glar": 48535, - "glar": 41702, - "glare": 46035, - "glas": 29935, - "glas": 43654, - "glasgo": 6757, - "glasgow": 29990, - "glasgow": 7363, - "glass": 16305, - "glass": 3313, - "glasses": 6116, - "glaston": 26848, - "glastonbury": 28233, - "glau": 39171, - "glaze": 28112, - "glazed": 24122, - "gle": 7166, - "gle": 2865, - "glee": 32379, - "glee": 21614, - "glen": 6158, - "glen": 11049, - "glend": 38332, - "glendale": 33043, - "glenn": 32004, - "glenn": 12861, - "gler": 34649, - "gley": 21998, - "gli": 5896, - "gli": 28791, - "glia": 22217, - "glide": 37321, - "glider": 41636, - "glimp": 12888, - "glimpse": 13817, - "glio": 29785, - "glit": 21079, - "glitch": 29563, - "glitter": 16528, - "glitz": 44542, - "glo": 1721, - "glo": 30474, - "glob": 13363, - "global": 6707, - "global": 2779, - "globalgoals": 33211, - "globalhealth": 46751, - "globalization": 47680, - "globally": 17775, - "globalwarming": 46017, - "globe": 19436, - "globe": 9368, - "globes": 38085, - "glock": 38818, - "glomer": 43689, - "gloom": 48594, - "gloomy": 32199, - "glori": 7270, - "gloria": 19244, - "glorious": 9171, - "glory": 36107, - "glory": 7285, - "glos": 40633, - "gloss": 38258, - "gloss": 22014, - "glossy": 29802, - "glou": 15989, - "gloucester": 28133, - "gloucester": 23835, - "gloucestershire": 33789, - "glove": 16078, - "glover": 21594, - "gloves": 12363, - "glow": 30472, - "glow": 10111, - "glowing": 18437, - "glows": 48107, - "glu": 5952, - "glu": 32281, - "glucose": 34642, - "glue": 22103, - "glued": 38135, - "gluten": 15482, - "gluten": 15524, - "glutenfree": 16138, - "gly": 13027, - "glycer": 48914, - "gm": 18743, - "gm": 5918, - "gma": 18155, - "gmail": 11119, - "gman": 41043, - "gman": 36936, - "gmb": 35934, - "gmb": 31799, - "gmbh": 46877, - "gmc": 27257, - "gmo": 23486, - "gms": 36987, - "gmt": 13803, - "gn": 2455, - "gn": 9831, - "gna": 23009, - "gnation": 45912, - "gne": 25407, - "gni": 5104, - "gnment": 25110, - "gno": 23376, - "gno": 43686, - "gnocchi": 48299, - "gnome": 33643, - "gnon": 20561, - "go": 650, - "go": 861, - "goa": 14399, - "goal": 9003, - "goal": 3321, - "goalie": 20723, - "goalkeeper": 16601, - "goals": 3295, - "goalscorer": 43547, - "goaltender": 44151, - "goat": 34082, - "goat": 9530, - "goats": 18393, - "gob": 29559, - "gobeavs": 48285, - "goblin": 26223, - "goblue": 25232, - "gobucks": 29175, - "gocougs": 34202, - "god": 4190, - "god": 1731, - "godawgs": 40436, - "godbless": 46616, - "godbless": 44007, - "godd": 16589, - "goddamn": 28495, - "goddard": 37827, - "goddess": 10808, - "godfather": 26222, - "godfrey": 40148, - "godis": 38521, - "godly": 42438, - "gods": 33620, - "gods": 10328, - "goducks": 35889, - "godzilla": 23369, - "goe": 22084, - "goers": 27784, - "goes": 43581, - "goes": 2635, - "gof": 17537, - "goff": 34399, - "goftheday": 39360, - "gofund": 34445, - "gofundme": 34686, - "gog": 42949, - "goggles": 31027, - "gogh": 19697, - "gogo": 22688, - "gogreen": 36279, - "gohawks": 34884, - "goi": 24917, - "goin": 13939, - "going": 25787, - "going": 1245, - "goku": 29550, - "gol": 1537, - "gol": 18257, - "gola": 41090, - "gold": 4999, - "gold": 2209, - "goldberg": 25161, - "goldcoast": 34634, - "golden": 10763, - "golden": 3878, - "goldeng": 20650, - "goldenglobes": 26842, - "goldfish": 40293, - "goldie": 42805, - "goldman": 27164, - "golds": 30526, - "golds": 40283, - "goldsmith": 40214, - "gole": 41297, - "golf": 9096, - "golf": 3096, - "golfclub": 45742, - "golfer": 24579, - "golfers": 28441, - "golfing": 31379, - "goli": 29265, - "goliath": 41602, - "gom": 7051, - "goma": 46198, - "gomes": 39128, - "gomez": 16433, - "gon": 1854, - "gon": 3379, - "gona": 34835, - "gone": 35135, - "gone": 3601, - "gong": 28486, - "gonna": 2562, - "gonz": 10587, - "gonzaga": 36241, - "gonzale": 17512, - "gonzales": 31265, - "gonzalez": 18198, - "goo": 1381, - "goo": 17882, - "good": 2185, - "good": 886, - "goodbye": 6968, - "goodday": 46284, - "goode": 42076, - "goodfood": 46844, - "goodfriday": 40360, - "goodie": 29213, - "goodies": 13308, - "goodluck": 19718, - "goodman": 24146, - "goodmorning": 14421, - "goodness": 10531, - "goodnight": 8540, - "goodreads": 31629, - "goods": 9340, - "goodtimes": 22570, - "goodvibes": 43146, - "goodwill": 24902, - "goodwin": 28080, - "goodwood": 30008, - "goody": 35937, - "goodyear": 42858, - "goofy": 26879, - "goog": 18581, - "google": 12195, - "google": 3460, - "googled": 40345, - "googleplay": 37309, - "goon": 15267, - "goons": 30440, - "goooo": 35876, - "goooo": 48957, - "goose": 21445, - "goose": 13822, - "goosebumps": 32254, - "gop": 18942, - "gop": 6250, - "gopack": 46995, - "gopackgo": 47719, - "gopal": 47268, - "gopdebate": 39806, - "gopher": 47750, - "gopher": 48905, - "gophers": 31957, - "gopro": 17511, - "gor": 1747, - "gor": 29827, - "gordo": 47707, - "gordon": 20485, - "gordon": 8244, - "gore": 30311, - "gore": 17872, - "gorg": 46815, - "gorge": 35548, - "gorge": 20038, - "gorgeous": 3241, - "gori": 12461, - "goria": 43359, - "gorilla": 37910, - "gorilla": 21994, - "gorman": 35741, - "goro": 44977, - "gory": 7160, - "gos": 20517, - "gos": 5693, - "gosh": 15395, - "gosling": 35320, - "gosp": 9617, - "gospel": 11313, - "goss": 39734, - "goss": 36924, - "gossi": 15684, - "gossip": 18963, - "got": 10125, - "got": 1005, - "gota": 36693, - "gotcha": 43275, - "gote": 49345, - "goth": 48465, - "goth": 20437, - "gotham": 46123, - "gotham": 18299, - "gothic": 15426, - "goti": 9497, - "goto": 39715, - "gots": 35215, - "gott": 5089, - "gott": 36466, - "gotta": 4633, - "gotten": 5889, - "gotti": 41881, - "gotv": 36089, - "gou": 10520, - "gou": 36555, - "gouache": 43314, - "goul": 33187, - "gould": 31087, - "gour": 13580, - "gourmet": 19111, - "gov": 4022, - "gov": 4564, - "gove": 36997, - "govegan": 38886, - "gover": 10471, - "gover": 16759, - "govern": 2351, - "govern": 32404, - "governance": 13386, - "governing": 30946, - "government": 3149, - "governmental": 42609, - "governments": 19582, - "governor": 17459, - "governor": 6630, - "governors": 26881, - "govin": 42451, - "govt": 5345, - "govuk": 28830, - "gow": 21885, - "gow": 33788, - "gowan": 31307, - "gower": 43448, - "gown": 13719, - "gowns": 38029, - "goyal": 35105, - "gp": 19329, - "gp": 5051, - "gpa": 24098, - "gps": 13639, - "gpu": 38561, - "gq": 40286, - "gq": 31324, - "gr": 709, - "gr": 6062, - "gra": 782, - "gra": 15276, - "grab": 4646, - "grabbed": 22856, - "grabbing": 26440, - "grabs": 17076, - "grac": 11323, - "grace": 13225, - "grace": 5142, - "graced": 31894, - "graceful": 25242, - "graces": 38629, - "graci": 11174, - "gracias": 16463, - "gracie": 23235, - "gracing": 37263, - "gracious": 29044, - "grad": 19869, - "grad": 7291, - "gradable": 41529, - "grade": 45435, - "grade": 3394, - "graded": 13823, - "grader": 23930, - "graders": 10930, - "grades": 10838, - "gradient": 36885, - "grading": 19016, - "grads": 17811, - "gradu": 3230, - "gradual": 45210, - "gradually": 32192, - "graduate": 6675, - "graduated": 15128, - "graduates": 12236, - "graduating": 14819, - "graduation": 8060, - "grady": 33980, - "graeme": 30192, - "graf": 46478, - "graf": 39765, - "graff": 10656, - "graffiti": 11676, - "graft": 32698, - "grafton": 47347, - "graham": 19805, - "graham": 7711, - "grail": 37184, - "grain": 44003, - "grain": 12109, - "grains": 25791, - "gral": 25631, - "gram": 2949, - "gram": 2338, - "grammar": 16077, - "grammy": 15388, - "grammys": 18121, - "grams": 6294, - "gran": 3892, - "gran": 14493, - "granada": 31172, - "grand": 3058, - "grand": 2991, - "grandad": 29148, - "grandchildren": 36856, - "granddaughter": 29460, - "grande": 37514, - "grande": 10757, - "grandes": 36382, - "grandfather": 15346, - "grandma": 10525, - "grandmother": 17469, - "grandpa": 14582, - "grandparents": 21311, - "grandprix": 39358, - "grandson": 20766, - "grandstand": 43172, - "grange": 45027, - "grange": 23850, - "granger": 42968, - "granite": 18813, - "grann": 45585, - "granny": 22710, - "granola": 34271, - "grant": 18682, - "grant": 5442, - "granted": 14156, - "granth": 41283, - "grants": 15123, - "grape": 19131, - "grape": 15959, - "grapefruit": 28347, - "grapes": 18580, - "grapevine": 47619, - "graph": 1349, - "graph": 4407, - "graphene": 38387, - "grapher": 14987, - "graphers": 32088, - "graphic": 15653, - "graphic": 4245, - "graphical": 20878, - "graphicdesign": 21907, - "graphics": 9492, - "graphies": 40164, - "graphite": 29447, - "graphs": 24670, - "graphy": 4897, - "grapp": 30843, - "gras": 31517, - "gras": 17584, - "grasp": 34975, - "grass": 11584, - "grass": 5922, - "grasses": 46807, - "grasshopper": 48894, - "grassi": 42294, - "grasso": 34808, - "grassroots": 21991, - "grassy": 44140, - "grat": 9221, - "grate": 32463, - "grateful": 45659, - "grateful": 5730, - "grati": 36402, - "gratis": 33638, - "gratitude": 12614, - "grav": 20663, - "grave": 16606, - "grave": 9981, - "gravel": 27054, - "graves": 17665, - "graveyard": 31176, - "gravit": 26150, - "gravitational": 45268, - "gravity": 47426, - "gravity": 15160, - "gravy": 21225, - "gray": 12703, - "gray": 7048, - "grays": 46848, - "grayson": 45831, - "grayson": 25471, - "grazi": 42427, - "grazie": 38698, - "grazing": 29889, - "grc": 44069, - "gre": 689, - "gre": 17878, - "grease": 24132, - "greasy": 44376, - "great": 3265, - "great": 830, - "greate": 31930, - "greater": 32725, - "greater": 7033, - "greatest": 39080, - "greatest": 4153, - "greatly": 13978, - "greatness": 14189, - "greats": 21855, - "greaves": 42350, - "greco": 39103, - "gree": 9987, - "gree": 30774, - "greece": 6965, - "greed": 26147, - "greedy": 33301, - "greek": 23844, - "greek": 6842, - "greeks": 35866, - "green": 2762, - "green": 1901, - "greenberg": 46662, - "greene": 16383, - "greener": 31169, - "greenery": 42493, - "greenfield": 39924, - "greeng": 42077, - "greenhouse": 20819, - "greening": 48673, - "greenland": 27345, - "greenpeace": 44755, - "greens": 10235, - "greensboro": 33436, - "greenville": 25156, - "greenway": 35205, - "greenwich": 18658, - "greenwood": 25782, - "greer": 34345, - "greet": 11042, - "greet": 11997, - "greeted": 24546, - "greeting": 17754, - "greetings": 11569, - "greets": 25464, - "greg": 6894, - "greg": 7943, - "gregation": 20131, - "gregg": 39422, - "gregg": 22929, - "gregor": 33856, - "gregor": 16177, - "gregory": 16253, - "gren": 13941, - "gren": 20119, - "grenade": 33679, - "grenfell": 42107, - "gres": 39670, - "gress": 2752, - "gret": 30041, - "greta": 33443, - "gretchen": 45516, - "grette": 38774, - "grew": 10451, - "grey": 9190, - "grey": 5046, - "greyhound": 27363, - "greyhounds": 45718, - "greys": 44311, - "greysanatomy": 36833, - "gri": 2169, - "gri": 18484, - "grid": 29067, - "grid": 9882, - "gridi": 41063, - "gridiron": 47786, - "grids": 46500, - "grief": 21058, - "grier": 22016, - "griev": 36400, - "grieving": 42383, - "griez": 47962, - "griezmann": 48396, - "griff": 17855, - "griff": 35551, - "griffi": 28676, - "griffin": 46612, - "griffin": 13161, - "griffith": 24375, - "griffiths": 34182, - "gril": 49091, - "grill": 44083, - "grill": 9519, - "grille": 34748, - "grilled": 10691, - "grilling": 28324, - "grills": 39464, - "grim": 20383, - "grim": 23635, - "grime": 37101, - "grimes": 25057, - "grimm": 27865, - "grims": 34861, - "grimsby": 41513, - "grin": 11033, - "grin": 28697, - "grinch": 40527, - "grind": 25730, - "grind": 11810, - "grinder": 31733, - "grinding": 21541, - "gring": 40135, - "grip": 15521, - "gripping": 34567, - "grips": 27819, - "gris": 29150, - "grit": 22037, - "grit": 22087, - "grits": 44307, - "gritty": 33704, - "grizz": 14877, - "grizz": 44088, - "grizzlies": 25594, - "grizzly": 29676, - "grl": 48005, - "gro": 1464, - "gro": 12691, - "grocer": 11633, - "groceries": 32409, - "grocery": 13826, - "grom": 45284, - "gron": 22345, - "groningen": 45639, - "groo": 9015, - "groom": 39883, - "groom": 22813, - "grooming": 25575, - "groot": 37708, - "groove": 39484, - "groove": 17680, - "grooves": 43954, - "groovy": 30143, - "gros": 26834, - "gros": 32639, - "gross": 31080, - "gross": 11541, - "grosven": 46911, - "grote": 47207, - "grotto": 45260, - "grou": 1582, - "groun": 45110, - "ground": 9558, - "ground": 2461, - "groundbreaking": 21006, - "grounded": 27799, - "grounds": 8454, - "groundwater": 39457, - "group": 19045, - "group": 1771, - "groupe": 47654, - "groups": 6776, - "grouse": 36327, - "grove": 31756, - "grove": 7463, - "grover": 31345, - "groves": 27306, - "grow": 3179, - "grow": 4559, - "grower": 44925, - "growers": 25689, - "growing": 28429, - "growing": 4425, - "growingup": 43433, - "growler": 47096, - "grown": 41762, - "grown": 7120, - "grows": 13352, - "growth": 17925, - "growth": 4026, - "growthhacking": 25963, - "grp": 27321, - "grt": 28557, - "gru": 5957, - "grub": 34019, - "grue": 42047, - "gruesome": 47111, - "grum": 45454, - "grump": 49015, - "grumpy": 23610, - "grun": 16203, - "grunge": 33745, - "gry": 16140, - "gry": 5364, - "gs": 25818, - "gs": 1345, - "gsa": 40433, - "gsc": 47751, - "gshore": 43392, - "gsm": 32181, - "gsp": 49173, - "gst": 22239, - "gt": 16151, - "gt": 4725, - "gta": 14826, - "gta": 15338, - "gtaonline": 27292, - "gtav": 27283, - "gti": 39954, - "gto": 39071, - "gtr": 33407, - "gts": 37338, - "gtx": 35230, - "gu": 700, - "gu": 12916, - "gua": 23751, - "guacam": 37477, - "guacamole": 40115, - "guad": 22966, - "guadal": 46097, - "guadalu": 36994, - "guadalupe": 38360, - "guam": 37325, - "guan": 44191, - "guan": 42406, - "guang": 27019, - "guangzhou": 37857, - "guar": 4119, - "guaran": 9242, - "guarantee": 17421, - "guaranteed": 14731, - "guarantees": 40154, - "guard": 30776, - "guard": 4901, - "guarded": 40602, - "guardi": 12008, - "guardia": 43628, - "guardian": 23713, - "guardian": 9498, - "guardians": 21479, - "guarding": 24966, - "guardiola": 32100, - "guards": 12810, - "guatem": 19423, - "guatemala": 21670, - "guay": 48591, - "guay": 24247, - "gubernat": 41400, - "gubernatorial": 41618, - "gucci": 16779, - "gud": 48061, - "gud": 22378, - "gue": 2030, - "gue": 2917, - "gued": 38893, - "guel": 23146, - "guelph": 27660, - "guer": 10391, - "guern": 29277, - "guernsey": 33982, - "guerra": 38215, - "guerrero": 31967, - "guerrilla": 36715, - "gues": 39971, - "gues": 12601, - "guess": 35506, - "guess": 3135, - "guessed": 28005, - "guesses": 30623, - "guessing": 21891, - "guest": 27349, - "guest": 3781, - "guests": 6212, - "guet": 36797, - "guetta": 45904, - "guez": 12313, - "gug": 31358, - "guggen": 35086, - "guggenheim": 37135, - "gui": 2587, - "gui": 25746, - "guid": 11437, - "guidance": 12508, - "guide": 21845, - "guide": 3555, - "guided": 13194, - "guidelines": 16591, - "guides": 14375, - "guiding": 22759, - "guido": 41818, - "guil": 5008, - "guild": 19755, - "guild": 16597, - "guildford": 34450, - "guildhall": 47224, - "guillau": 41123, - "guillaume": 45394, - "guiller": 33660, - "guillermo": 39524, - "guilt": 26354, - "guilty": 9761, - "guin": 13284, - "guin": 47863, - "guine": 13759, - "guinea": 18537, - "guinness": 16648, - "guire": 18209, - "guise": 42024, - "guit": 3759, - "guitar": 21746, - "guitar": 5084, - "guitarist": 13035, - "guitars": 15023, - "guj": 34935, - "gujar": 12698, - "gujarat": 14714, - "guk": 20280, - "gul": 5530, - "gul": 21350, - "gula": 27426, - "gular": 34969, - "gulf": 22101, - "gulf": 11279, - "gull": 48764, - "gull": 28778, - "gulls": 37501, - "gully": 46112, - "gum": 22041, - "gum": 11235, - "gumb": 40147, - "gumbo": 47126, - "gummy": 34276, - "gums": 46609, - "gun": 2748, - "gun": 3496, - "guna": 43333, - "gundam": 26087, - "gundy": 21162, - "gunman": 32743, - "gunmen": 44738, - "gunn": 27473, - "gunna": 24002, - "gunnar": 45301, - "gunner": 35285, - "gunners": 37788, - "guns": 7591, - "gunsense": 44781, - "gunshot": 49250, - "gunsn": 49028, - "gup": 38632, - "gup": 47335, - "gupta": 15905, - "gur": 3218, - "gur": 30224, - "gura": 46836, - "gurgaon": 33240, - "guri": 43888, - "gurl": 25445, - "gurmee": 35482, - "gurmeetramrahim": 36549, - "guru": 18629, - "guru": 10800, - "gurudev": 48647, - "gus": 8018, - "gust": 24629, - "gusta": 23024, - "gusta": 44196, - "gustav": 32062, - "gustav": 37921, - "gustave": 43170, - "gustavo": 45943, - "gusto": 37937, - "gusts": 20896, - "gusty": 27589, - "gut": 24780, - "gut": 13486, - "guter": 44963, - "guterres": 48738, - "guth": 31696, - "guthrie": 33164, - "gutier": 32773, - "gutierrez": 33739, - "guts": 25983, - "gutted": 26524, - "gutter": 40537, - "guwa": 43063, - "guwahati": 45045, - "guy": 10008, - "guy": 2149, - "guyana": 45215, - "guyen": 28031, - "guys": 43588, - "guys": 1791, - "guyz": 48170, - "guzman": 37960, - "gv": 15462, - "gv": 17336, - "gw": 7172, - "gw": 15717, - "gwen": 32165, - "gwen": 24182, - "gwin": 43005, - "gwy": 32226, - "gwyne": 36923, - "gx": 40227, - "gy": 2168, - "gy": 1164, - "gya": 43214, - "gyan": 43814, - "gye": 21728, - "gyllen": 49348, - "gym": 9902, - "gym": 5222, - "gymna": 13517, - "gymnasium": 42847, - "gymnast": 42658, - "gymnastics": 20116, - "gyn": 39603, - "gyne": 45836, - "gyp": 40053, - "gypsy": 22354, - "gypt": 41921, - "gz": 45937, - "gz": 35841, - "gö": 40778, - "gü": 31907, - "h": 71, - "h": 327, - "ha": 560, - "ha": 1429, - "haa": 26814, - "haal": 35869, - "haan": 36284, - "haar": 45247, - "haar": 35859, - "haas": 27443, - "haasan": 26601, - "hab": 20573, - "hab": 20002, - "haban": 46225, - "haber": 44737, - "habit": 8491, - "habit": 17215, - "habitat": 11747, - "habitats": 35344, - "habits": 14540, - "habs": 27489, - "hac": 20343, - "hace": 43623, - "haci": 40674, - "hack": 6610, - "hack": 11182, - "hackathon": 25182, - "hacked": 19575, - "hacker": 22376, - "hackers": 21498, - "hacking": 12939, - "hackney": 48811, - "hackney": 24928, - "hacks": 19965, - "had": 10660, - "had": 1100, - "hadi": 39058, - "hadid": 26415, - "hadith": 46907, - "hadley": 44995, - "hadn": 21480, - "hadoop": 43868, - "hae": 30723, - "hae": 27193, - "hafi": 39914, - "hag": 26855, - "hag": 43207, - "hagan": 47489, - "hagen": 14664, - "hager": 48773, - "hagg": 26324, - "hague": 28988, - "hah": 18108, - "hah": 13680, - "haha": 1913, - "haha": 3060, - "hahah": 27253, - "hahah": 15441, - "hahaha": 4722, - "hahahah": 37513, - "hahahah": 20096, - "hahahaha": 8058, - "hahahaha": 9501, - "hahahahah": 33334, - "hahahahaha": 16347, - "hahahahahaha": 26487, - "hahahahahahaha": 43653, - "hahahahahahahaha": 36126, - "hahahha": 49205, - "hahn": 35596, - "hai": 8734, - "hai": 5234, - "haider": 42200, - "haiku": 19542, - "hail": 15272, - "hail": 8634, - "hailed": 44604, - "hailey": 27703, - "hailing": 47288, - "hails": 32571, - "hailstate": 35063, - "hain": 23861, - "hair": 4658, - "hair": 2225, - "haircare": 43682, - "haircut": 14711, - "hairdresser": 47468, - "haired": 27202, - "hairs": 27951, - "hairstyle": 22324, - "hairstyles": 40627, - "hairy": 26513, - "haiti": 17368, - "haitian": 37577, - "haj": 27885, - "haj": 43191, - "haji": 41889, - "hajj": 35576, - "hak": 25142, - "hak": 40671, - "haka": 44011, - "hake": 41663, - "hal": 1296, - "hal": 8708, - "hala": 25918, - "halal": 34216, - "halam": 29061, - "halamadrid": 31132, - "halder": 32201, - "hale": 37038, - "hale": 14701, - "halen": 39204, - "halep": 49017, - "haley": 37330, - "haley": 16839, - "half": 7453, - "half": 2349, - "halftime": 13742, - "halfway": 16736, - "hali": 9860, - "hali": 43030, - "halibut": 49030, - "halifax": 13411, - "hall": 6850, - "hall": 2140, - "halla": 29569, - "halle": 27763, - "halle": 32239, - "hallelujah": 36993, - "halli": 32665, - "hallmark": 31040, - "hallmark": 32053, - "hallmarkchannel": 36840, - "hallo": 3463, - "halloffame": 48578, - "halloween": 28537, - "halloween": 3739, - "halls": 18052, - "hallucin": 35385, - "hallway": 26845, - "halo": 33331, - "halo": 11918, - "halsey": 34256, - "halt": 25640, - "halter": 47194, - "halton": 45445, - "ham": 1522, - "ham": 1714, - "hama": 17944, - "hamas": 14818, - "hamburg": 18409, - "hamburger": 33928, - "hamid": 32377, - "hamil": 6725, - "hamill": 45784, - "hamill": 48729, - "hamillhimself": 47324, - "hamilton": 22448, - "hamilton": 7684, - "hamlet": 27722, - "hamlin": 49326, - "hamm": 46110, - "hammer": 15331, - "hammer": 9401, - "hammered": 37251, - "hammers": 35649, - "hammersmith": 42127, - "hammock": 33682, - "hammond": 21761, - "hamont": 18518, - "hamp": 6665, - "hamper": 27692, - "hampshire": 16006, - "hampstead": 37340, - "hampton": 36582, - "hampton": 12285, - "hamptons": 42415, - "hamr": 47979, - "hamradio": 36712, - "hams": 25619, - "hamster": 33313, - "hamstring": 39990, - "hamza": 45762, - "han": 1545, - "han": 3565, - "hana": 16801, - "hand": 1722, - "hand": 2463, - "handbag": 22654, - "handbags": 35667, - "handball": 27988, - "handbook": 25147, - "handcrafted": 22185, - "handed": 10881, - "handedly": 48656, - "handel": 40072, - "handful": 23725, - "handheld": 26812, - "handic": 17812, - "handicap": 27063, - "handicapp": 42349, - "handing": 19196, - "handle": 43681, - "handle": 7245, - "handled": 26824, - "handler": 29097, - "handles": 22124, - "handling": 14071, - "handmade": 18054, - "handmade": 6737, - "handmadehour": 25724, - "handover": 46922, - "hands": 3500, - "handshake": 38418, - "handsome": 7438, - "handwriting": 29986, - "handwritten": 35192, - "handy": 13479, - "hane": 28411, - "hang": 3351, - "hang": 5592, - "hangar": 33439, - "hanged": 40807, - "hanger": 28905, - "hangin": 22670, - "hanging": 4850, - "hangout": 17572, - "hangover": 20755, - "hangs": 21785, - "hani": 39944, - "hani": 18374, - "hank": 35993, - "hank": 17655, - "hanks": 29943, - "hanley": 47284, - "hann": 5584, - "hanna": 10075, - "hannah": 18622, - "hannah": 9142, - "hannel": 43477, - "hanni": 19493, - "hannibal": 25149, - "hannity": 24569, - "hannover": 39976, - "hanoi": 36134, - "hanover": 33246, - "hans": 35172, - "hans": 16628, - "hansen": 19729, - "hanson": 24602, - "hant": 40641, - "hanuk": 32774, - "hanukkah": 34247, - "hanuman": 46975, - "hao": 27184, - "hap": 44981, - "hap": 47988, - "happ": 784, - "happen": 21486, - "happen": 4506, - "happened": 4402, - "happening": 4284, - "happeningnow": 43107, - "happenings": 41998, - "happens": 4988, - "happier": 14118, - "happiest": 13811, - "happily": 17316, - "happiness": 5096, - "happy": 2952, - "happy": 900, - "happybirthday": 9651, - "happybirthday": 12207, - "happydays": 25106, - "happye": 33922, - "happyeaster": 38745, - "happyfathersday": 43534, - "happyfriday": 33340, - "happyhalloween": 28750, - "happyholidays": 32186, - "happyhour": 32036, - "happymonday": 47364, - "happymothersday": 42425, - "happynewyear": 18655, - "happythanksgiving": 40593, - "happyvalentinesday": 42403, - "haps": 9114, - "haq": 32445, - "har": 915, - "har": 5888, - "hara": 10367, - "haram": 35732, - "haram": 22950, - "haran": 27921, - "harare": 43562, - "haras": 26644, - "harass": 16481, - "harassed": 43067, - "harassment": 16641, - "harat": 28984, - "harb": 5856, - "harbaugh": 45220, - "harbor": 40686, - "harbor": 10202, - "harbour": 35430, - "harbour": 10011, - "harcourt": 48093, - "hard": 3312, - "hard": 1626, - "hardcover": 31123, - "harden": 27350, - "harder": 12274, - "hardest": 15258, - "hardin": 43802, - "harding": 24382, - "hardly": 17363, - "hardro": 28126, - "hardrock": 48365, - "hardrock": 40739, - "hards": 44048, - "hardship": 45085, - "hardt": 17922, - "hardware": 11957, - "hardwell": 45572, - "hardwick": 46864, - "hardwood": 28167, - "hardwork": 42554, - "hardwork": 27404, - "hardworking": 28095, - "hardworkpaysoff": 49193, - "hardy": 48179, - "hardy": 14113, - "hare": 27903, - "hare": 18464, - "harga": 39738, - "hari": 25472, - "hari": 8981, - "harlan": 49133, - "harle": 29096, - "harlem": 17771, - "harley": 24702, - "harley": 13632, - "harleydavidson": 39183, - "harlow": 34113, - "harm": 16656, - "harm": 14452, - "harman": 42434, - "harmed": 39637, - "harmful": 21725, - "harmless": 44369, - "harmon": 10828, - "harmon": 28729, - "harmony": 10785, - "harms": 46703, - "harne": 43323, - "harness": 23205, - "harold": 16917, - "harp": 27339, - "harper": 31288, - "harper": 12634, - "harri": 6639, - "harrier": 37372, - "harriet": 27154, - "harrington": 34340, - "harris": 25356, - "harris": 6925, - "harrisburg": 40590, - "harrison": 34389, - "harrison": 10540, - "harro": 18939, - "harrogate": 30842, - "harrow": 38807, - "harry": 11094, - "harry": 3600, - "harrypotter": 23375, - "harsh": 30596, - "harsh": 16944, - "hart": 9335, - "hart": 7752, - "hartford": 23434, - "harth": 35619, - "hartle": 47482, - "hartley": 31268, - "hartman": 43294, - "haru": 35099, - "harvard": 28118, - "harvard": 12848, - "harve": 6405, - "harvest": 44495, - "harvest": 8971, - "harvested": 35899, - "harvesting": 26674, - "harvey": 33289, - "harvey": 9586, - "harvick": 46983, - "haryana": 27661, - "has": 13855, - "has": 791, - "hasan": 30049, - "hasbro": 37405, - "hash": 6338, - "hash": 19199, - "hashi": 41831, - "hashmi": 35852, - "hashtag": 34015, - "hashtag": 9238, - "hashtags": 23514, - "haskell": 48550, - "hasn": 9143, - "hass": 9298, - "hassan": 15829, - "hassee": 37117, - "hassel": 32204, - "hassle": 35762, - "hast": 18146, - "hasta": 36623, - "hastings": 22035, - "hat": 3447, - "hat": 3801, - "hatch": 24202, - "hatch": 17809, - "hatchback": 42348, - "hatched": 42158, - "hate": 23546, - "hate": 3753, - "hated": 21298, - "hateful": 36418, - "hater": 36917, - "haters": 14027, - "hates": 14957, - "hatfield": 38448, - "hath": 27894, - "hath": 34416, - "hathaway": 31801, - "hati": 26045, - "hating": 25668, - "hatred": 19046, - "hats": 9812, - "hatt": 8747, - "hatton": 44861, - "hau": 5152, - "hauer": 48751, - "haul": 23743, - "haul": 12332, - "hauled": 46620, - "hauling": 43132, - "haun": 9676, - "haunt": 31039, - "haunted": 14944, - "haunting": 24034, - "haunts": 48035, - "haus": 41755, - "haus": 16478, - "hausen": 33338, - "hauser": 46586, - "haute": 28854, - "hav": 13443, - "hav": 20447, - "havan": 36304, - "havana": 23357, - "havas": 46261, - "have": 18053, - "have": 720, - "haven": 33074, - "haven": 3871, - "havent": 29130, - "haver": 27876, - "haves": 49088, - "havin": 31937, - "having": 1977, - "havoc": 24447, - "haw": 2788, - "haw": 26954, - "hawa": 6067, - "hawa": 46278, - "hawai": 15800, - "hawaii": 32413, - "hawaii": 8265, - "hawaiian": 17734, - "hawan": 27765, - "hawk": 14704, - "hawk": 8218, - "hawke": 38178, - "hawker": 39051, - "hawkeye": 38666, - "hawkeyes": 34266, - "hawking": 33437, - "hawkins": 19740, - "hawks": 44806, - "hawks": 5841, - "hawthorn": 45372, - "hawthorne": 36730, - "hay": 4871, - "hay": 11367, - "haya": 41325, - "hayat": 49360, - "hayden": 19806, - "haydn": 48207, - "haye": 36583, - "hayes": 13555, - "hayley": 39986, - "hayley": 22204, - "haynes": 30496, - "hays": 41524, - "hayward": 29400, - "haz": 5040, - "haz": 39921, - "hazard": 26174, - "hazard": 15178, - "hazardous": 27102, - "hazards": 30639, - "haze": 22785, - "hazel": 19838, - "hazel": 21882, - "hazelnut": 35816, - "hazi": 22740, - "hazmat": 48887, - "hazrat": 45775, - "hazy": 32655, - "hb": 6854, - "hb": 12576, - "hbcu": 40008, - "hbd": 25277, - "hbd": 13594, - "hbo": 15252, - "hc": 15831, - "hc": 7821, - "hcs": 46850, - "hd": 11601, - "hd": 4414, - "hdd": 40508, - "hdmi": 33302, - "hdr": 28065, - "he": 651, - "he": 797, - "hea": 27150, - "hea": 32790, - "head": 1603, - "head": 1375, - "headache": 23849, - "headaches": 38025, - "headband": 28556, - "headed": 6153, - "header": 11077, - "heading": 4409, - "headless": 45219, - "headlights": 42422, - "headline": 10891, - "headliner": 38880, - "headlines": 14706, - "headlining": 26971, - "headphone": 37524, - "headphones": 14906, - "headquarters": 13041, - "heads": 5174, - "headset": 23883, - "headshot": 34890, - "heal": 1231, - "heal": 13833, - "healed": 31456, - "healer": 38328, - "healey": 38985, - "healing": 9295, - "heals": 32384, - "health": 2145, - "health": 1728, - "healthand": 43704, - "healthcare": 42500, - "healthcare": 6023, - "healthier": 18242, - "healthtech": 42694, - "healthy": 10330, - "healthy": 3782, - "healthye": 31532, - "healthyeating": 33761, - "healthyfood": 39996, - "healthylifestyle": 46254, - "healthyliving": 27293, - "healy": 34299, - "heap": 34781, - "heaps": 44446, - "hear": 2749, - "hear": 2584, - "heard": 4063, - "hearing": 46353, - "hearing": 5541, - "hearings": 33175, - "hearn": 36613, - "hears": 25395, - "heart": 4975, - "heart": 1936, - "heartbeat": 29154, - "heartbreak": 29281, - "heartbreaking": 21322, - "heartbroken": 35383, - "hearted": 21679, - "heartfelt": 22904, - "hearth": 31563, - "hearthstone": 34054, - "hearti": 29345, - "hearties": 44572, - "heartland": 31923, - "heartless": 47022, - "heartnews": 40426, - "hearts": 5516, - "heartw": 30002, - "heartwarming": 34080, - "hearty": 26994, - "heat": 12175, - "heat": 4403, - "heated": 17057, - "heater": 23246, - "heath": 12794, - "heath": 11719, - "heather": 20230, - "heather": 12470, - "heathrow": 24171, - "heating": 12478, - "heaton": 34557, - "heats": 36106, - "heatwave": 25726, - "heav": 2409, - "heaven": 15520, - "heaven": 5545, - "heavenly": 19117, - "heavens": 26026, - "heavier": 31253, - "heaviest": 33268, - "heavily": 14123, - "heavy": 12048, - "heavy": 4200, - "heavymetal": 39804, - "heavyweight": 17448, - "heb": 24700, - "heb": 34515, - "hebdo": 41817, - "hebrew": 27298, - "hebrides": 45121, - "hebron": 45725, - "hec": 18932, - "heck": 22985, - "heck": 14427, - "hectares": 44162, - "hectic": 37245, - "hector": 25852, - "hed": 18271, - "hedge": 16229, - "hedge": 20294, - "hedgehog": 21940, - "hedges": 41345, - "hee": 18364, - "hee": 15773, - "heechul": 42487, - "heed": 15118, - "heel": 33646, - "heel": 16861, - "heels": 10909, - "heem": 30061, - "heer": 40473, - "hef": 29473, - "heff": 48756, - "hefty": 48584, - "heg": 41995, - "heh": 25834, - "hehe": 48723, - "hehe": 10658, - "hehehe": 24138, - "hei": 6101, - "hei": 29051, - "heidel": 42927, - "heidelberg": 48445, - "heidi": 44860, - "heidi": 23867, - "heifer": 48219, - "heigh": 43883, - "height": 10788, - "heights": 8418, - "heim": 10931, - "heim": 9768, - "heimer": 39517, - "hein": 15487, - "hein": 43206, - "heine": 28742, - "heineken": 36874, - "heinrich": 47877, - "heinz": 32359, - "heir": 27083, - "heir": 34007, - "heirloom": 34232, - "heirs": 43834, - "heis": 21849, - "heisman": 34537, - "heist": 31035, - "heit": 37255, - "hel": 919, - "hel": 11579, - "hela": 48212, - "held": 4042, - "hele": 46129, - "helen": 17576, - "helen": 11291, - "helena": 23109, - "helene": 41591, - "helens": 45940, - "heli": 33874, - "heli": 40183, - "helicop": 10035, - "helicopter": 11956, - "helicopters": 26922, - "helium": 46505, - "helix": 35247, - "hell": 8410, - "hell": 4141, - "hella": 19800, - "hellboy": 48428, - "helle": 48600, - "helle": 46968, - "hellenic": 42544, - "heller": 44464, - "hello": 12887, - "hello": 3306, - "hells": 47989, - "helly": 48690, - "helm": 47970, - "helm": 19520, - "helmet": 11122, - "helmets": 21843, - "help": 8641, - "help": 1318, - "helped": 4845, - "helper": 29321, - "helpers": 36316, - "helpful": 12695, - "helping": 3875, - "helpless": 47638, - "helpline": 43101, - "helps": 5144, - "helsin": 17842, - "helsinki": 19626, - "hem": 20270, - "hem": 11148, - "hemi": 14256, - "hemi": 46856, - "heming": 30819, - "hemingway": 33470, - "hemisphere": 32767, - "hemmings": 34882, - "hemo": 43788, - "hemp": 28225, - "hemp": 18467, - "hems": 32451, - "hemsworth": 39428, - "hen": 2385, - "hen": 8047, - "hence": 23640, - "hend": 11560, - "hender": 49248, - "henderson": 14348, - "hendrick": 45296, - "hendricks": 37588, - "hendrix": 23605, - "henge": 33104, - "henley": 27853, - "henna": 39455, - "hennessy": 42667, - "henri": 19431, - "henri": 21610, - "henrik": 35772, - "henry": 16018, - "henry": 5508, - "hens": 31742, - "henson": 32935, - "hep": 17724, - "hep": 48791, - "hepat": 23767, - "hepatitis": 32169, - "hepburn": 26348, - "her": 1223, - "her": 899, - "hera": 38724, - "heral": 37809, - "herald": 27625, - "herald": 12851, - "herb": 26116, - "herb": 15302, - "herbal": 21868, - "herbali": 44087, - "herbalife": 48364, - "herbert": 19935, - "herbs": 17320, - "hercules": 26539, - "herd": 36142, - "herd": 18589, - "here": 9134, - "here": 763, - "hered": 47976, - "hereford": 35543, - "heres": 13566, - "hereto": 47673, - "heri": 31392, - "herit": 4720, - "heritag": 38273, - "heritage": 20962, - "heritage": 5455, - "herman": 31890, - "herman": 21568, - "hermann": 40942, - "hermes": 34563, - "hermi": 35265, - "hermione": 45502, - "hermit": 43953, - "hermitage": 47706, - "hermo": 40967, - "hermosa": 42531, - "hern": 30571, - "hern": 43576, - "hernandez": 17707, - "hero": 7338, - "hero": 3756, - "heroes": 38010, - "heroes": 5506, - "heroic": 24255, - "heroin": 23841, - "heroine": 27420, - "heron": 22593, - "heros": 37642, - "herr": 38537, - "herrera": 27755, - "herring": 30211, - "hers": 25359, - "herself": 9207, - "hersh": 20379, - "hershey": 29734, - "hert": 26744, - "hertfordshire": 41070, - "herts": 35784, - "herty": 23454, - "hertz": 49383, - "hes": 30553, - "hes": 12784, - "hesit": 23933, - "hesitate": 34967, - "hess": 41888, - "hester": 31105, - "het": 37527, - "het": 19678, - "hetero": 26405, - "heu": 20105, - "heughan": 32298, - "hew": 48141, - "hew": 43051, - "hewitt": 28871, - "hex": 16255, - "hex": 31241, - "hey": 10759, - "hey": 2189, - "hez": 34591, - "hezbollah": 37636, - "hf": 26606, - "hf": 20603, - "hfx": 47297, - "hg": 23986, - "hg": 26237, - "hgtv": 47657, - "hh": 3280, - "hh": 5180, - "hhh": 8281, - "hhhh": 19391, - "hhhh": 13121, - "hhhhh": 24246, - "hhhhhh": 37278, - "hhs": 27006, - "hi": 677, - "hi": 1883, - "hia": 20672, - "hiatus": 27823, - "hib": 15922, - "hiber": 38799, - "hibis": 36226, - "hibiscus": 36460, - "hibition": 24658, - "hibs": 42814, - "hic": 3549, - "hic": 38079, - "hick": 14813, - "hickman": 49148, - "hickory": 29905, - "hicks": 23429, - "hid": 15552, - "hid": 14451, - "hidalgo": 47464, - "hidden": 28305, - "hidden": 7029, - "hiddleston": 31444, - "hide": 17725, - "hide": 9379, - "hideous": 46588, - "hides": 30800, - "hiding": 11371, - "hie": 15763, - "hier": 23433, - "hier": 29913, - "hierarchy": 44442, - "hifi": 38168, - "hig": 38108, - "higgins": 21783, - "high": 1487, - "high": 1400, - "higher": 5321, - "highered": 27072, - "highest": 5317, - "highland": 32244, - "highland": 16062, - "highlander": 46251, - "highlanders": 40445, - "highlands": 16883, - "highlight": 8264, - "highlighted": 22252, - "highlighter": 45460, - "highlighting": 17344, - "highlights": 6173, - "highly": 5302, - "highness": 38694, - "highs": 15144, - "highschool": 23102, - "highway": 45344, - "highway": 7620, - "highways": 28007, - "higu": 39115, - "hihi": 36240, - "hii": 42315, - "hijab": 31407, - "hika": 41356, - "hikari": 44624, - "hike": 9404, - "hiked": 36471, - "hiker": 40947, - "hikers": 46090, - "hikes": 27076, - "hiking": 9118, - "hiko": 48708, - "hil": 3508, - "hil": 17927, - "hila": 38837, - "hilar": 37337, - "hilari": 7784, - "hilarious": 8358, - "hilariously": 43476, - "hilary": 45898, - "hilary": 25415, - "hilde": 45382, - "hill": 3671, - "hill": 2682, - "hillary": 13257, - "hillary": 7074, - "hillaryclinton": 15357, - "hilli": 32513, - "hills": 24178, - "hills": 5289, - "hillsborough": 32157, - "hillside": 37194, - "hilltop": 45858, - "hilly": 32483, - "hilton": 33621, - "hilton": 14012, - "him": 4128, - "him": 1269, - "himach": 29132, - "himachal": 35461, - "himalay": 17552, - "himalayan": 30318, - "himalayas": 32872, - "hime": 45892, - "himself": 4530, - "himss": 41730, - "hin": 1676, - "hin": 37930, - "hina": 40571, - "hinakhan": 45518, - "hinch": 49320, - "hind": 34460, - "hind": 23293, - "hindi": 14967, - "hinds": 47859, - "hindu": 17587, - "hindu": 12053, - "hinduism": 40592, - "hindus": 25701, - "hindustan": 46553, - "hines": 37462, - "hing": 37968, - "hini": 33564, - "hino": 45343, - "hint": 11868, - "hinton": 47165, - "hints": 20594, - "hio": 32897, - "hip": 11725, - "hip": 6584, - "hipho": 8819, - "hiphop": 26598, - "hiphop": 10914, - "hipp": 13607, - "hippie": 28637, - "hippo": 28398, - "hippo": 36729, - "hips": 30191, - "hipstamatic": 31002, - "hipster": 19987, - "hipsters": 48265, - "hir": 4959, - "hir": 14728, - "hira": 42577, - "hire": 32356, - "hire": 8243, - "hired": 17602, - "hires": 24133, - "hiring": 7835, - "hiro": 17396, - "hiro": 20588, - "hiroshima": 33867, - "hirsch": 46967, - "his": 15211, - "his": 787, - "hism": 23502, - "hispan": 16843, - "hispanic": 22676, - "hist": 21710, - "hist": 13779, - "histo": 33479, - "histor": 2993, - "historia": 46010, - "historian": 20697, - "historians": 35200, - "historic": 30195, - "historic": 5726, - "historical": 34154, - "historical": 8039, - "historically": 30445, - "histories": 34736, - "history": 11142, - "history": 1695, - "historymonth": 19356, - "historyof": 35905, - "hit": 5453, - "hit": 2341, - "hitch": 22937, - "hitch": 36203, - "hitler": 16518, - "hitman": 33290, - "hits": 4712, - "hitter": 23538, - "hitters": 39724, - "hitting": 7957, - "hiv": 44410, - "hiv": 11018, - "hive": 38162, - "hive": 18521, - "hiya": 42393, - "hk": 22648, - "hk": 12307, - "hl": 8297, - "hl": 5956, - "hle": 32389, - "hler": 35418, - "hm": 17913, - "hm": 7631, - "hmm": 13725, - "hmmm": 17032, - "hmmmm": 34598, - "hms": 14625, - "hmu": 21630, - "hmv": 49288, - "hn": 22905, - "hn": 7478, - "hns": 48412, - "ho": 606, - "ho": 2971, - "hoa": 37517, - "hoar": 31628, - "hoax": 33438, - "hob": 18212, - "hobart": 31646, - "hobb": 16175, - "hobbies": 36370, - "hobbit": 23207, - "hobbs": 34343, - "hobby": 41120, - "hobby": 17557, - "hobo": 34613, - "hobo": 41334, - "hoboken": 41568, - "hoc": 35880, - "hoch": 43772, - "hock": 34914, - "hock": 46574, - "hockey": 16499, - "hockey": 4111, - "hoco": 34771, - "hod": 31062, - "hodg": 23660, - "hodge": 40585, - "hodges": 35061, - "hodgson": 37044, - "hoe": 32502, - "hoe": 11262, - "hoek": 40073, - "hoes": 21164, - "hof": 20186, - "hof": 12789, - "hofer": 38654, - "hoff": 32860, - "hoff": 22751, - "hofficial": 41949, - "hoffman": 22026, - "hog": 12075, - "hog": 13255, - "hogan": 19757, - "hogg": 42005, - "hogs": 23242, - "hogwarts": 29168, - "hoh": 43947, - "hoi": 39295, - "hok": 26942, - "hok": 47167, - "hokies": 35168, - "hokkaido": 49145, - "hol": 1187, - "hol": 7349, - "hola": 28724, - "hold": 36496, - "hold": 3254, - "holden": 21869, - "holder": 7862, - "holders": 10074, - "holding": 5050, - "holdings": 24832, - "holds": 7286, - "hole": 47242, - "hole": 5341, - "holes": 11266, - "holi": 2093, - "holi": 21926, - "holic": 16348, - "holics": 29782, - "holiday": 13168, - "holiday": 2878, - "holidays": 5372, - "holiness": 37259, - "holistic": 26300, - "holl": 27699, - "holla": 26500, - "holland": 31608, - "holland": 9978, - "hollande": 47690, - "holler": 49047, - "holli": 24019, - "holliday": 41624, - "hollow": 41221, - "hollow": 16691, - "holloway": 29435, - "holly": 12731, - "holly": 11923, - "hollyo": 41525, - "hollyoaks": 43352, - "hollywood": 24655, - "hollywood": 5518, - "holm": 34758, - "holm": 12739, - "holme": 46149, - "holmes": 12756, - "holo": 10317, - "holocau": 14688, - "holocaust": 16476, - "hols": 33344, - "holt": 18868, - "holtz": 44743, - "holy": 13910, - "holy": 4874, - "hom": 906, - "hom": 47397, - "homa": 9557, - "homage": 17746, - "home": 2143, - "home": 1137, - "homebrew": 35046, - "homec": 33869, - "homecoming": 9008, - "homedecor": 15695, - "homedepot": 38707, - "homegrown": 32554, - "homeitems": 42972, - "homeland": 21633, - "homeless": 18403, - "homeless": 9661, - "homelessness": 19851, - "homemade": 7889, - "homeof": 48856, - "homeowner": 37267, - "homeowners": 29882, - "homepage": 29828, - "homer": 29307, - "homer": 16931, - "homers": 38333, - "homes": 19480, - "homes": 5416, - "homeschool": 40994, - "homestead": 32609, - "homeswee": 46298, - "hometown": 12238, - "homework": 12495, - "homicide": 21520, - "homie": 12540, - "homies": 18893, - "homme": 26193, - "homo": 18129, - "homo": 30504, - "homophobia": 37875, - "homophobic": 40975, - "homosexual": 44288, - "homosexuality": 46720, - "homs": 45413, - "hon": 1279, - "hon": 10296, - "honda": 8553, - "honduras": 29715, - "hone": 38640, - "honest": 7814, - "honest": 9602, - "honestly": 9155, - "honesty": 24939, - "honey": 9843, - "honey": 6406, - "honeycomb": 48583, - "honeymoon": 22527, - "hong": 12144, - "hong": 8598, - "hongkong": 16659, - "honi": 17918, - "honolulu": 28096, - "honor": 9206, - "honor": 3402, - "honorable": 19498, - "honorary": 15675, - "honore": 25868, - "honored": 5494, - "honoree": 38993, - "honorees": 43012, - "honoring": 10771, - "honors": 10248, - "honour": 8240, - "honourable": 29855, - "honoured": 11945, - "honouring": 37754, - "honours": 22558, - "hoo": 2300, - "hoo": 7920, - "hood": 18681, - "hood": 3222, - "hooded": 33631, - "hoodie": 13444, - "hoodies": 25974, - "hoods": 16664, - "hoof": 44555, - "hook": 30488, - "hook": 10395, - "hookah": 34214, - "hooked": 18138, - "hookem": 31465, - "hooker": 37891, - "hooking": 35240, - "hooks": 25068, - "hooligans": 48176, - "hoon": 21368, - "hooo": 44538, - "hoop": 31516, - "hoop": 19573, - "hooper": 35221, - "hoops": 9351, - "hoor": 22155, - "hooray": 24940, - "hoos": 46462, - "hoosier": 48886, - "hoosiers": 42780, - "hoot": 29164, - "hoover": 25691, - "hop": 10848, - "hop": 5833, - "hope": 5263, - "hope": 1683, - "hoped": 30628, - "hopeful": 21453, - "hopefully": 7602, - "hopeless": 35586, - "hopes": 10018, - "hoping": 7207, - "hopkins": 17821, - "hopp": 48839, - "hopped": 34220, - "hopper": 21748, - "hopping": 27606, - "hoppy": 38359, - "hops": 21137, - "hor": 1407, - "hor": 33847, - "hora": 26013, - "horace": 39282, - "horan": 26857, - "horde": 44947, - "hore": 15380, - "horiz": 8144, - "horizon": 17924, - "horizon": 11920, - "horizons": 29685, - "horizontal": 25775, - "hormon": 27096, - "hormone": 31283, - "hormones": 35162, - "horn": 15771, - "horn": 9607, - "horne": 38143, - "horned": 34526, - "hornet": 28739, - "hornets": 20124, - "horns": 22109, - "horny": 32622, - "horo": 21500, - "horoscope": 38453, - "horowitz": 44669, - "horri": 8656, - "horrible": 13726, - "horribly": 45484, - "horrific": 25314, - "horrifying": 38901, - "horror": 13787, - "horror": 5032, - "horrormovies": 46682, - "horrors": 33321, - "horse": 8562, - "horse": 4558, - "horseback": 43673, - "horseman": 48885, - "horsepower": 36882, - "horser": 23096, - "horseracing": 30693, - "horses": 8809, - "horseshoe": 29242, - "horst": 37182, - "hort": 19482, - "horticul": 27141, - "horticulture": 39998, - "horton": 25945, - "hortons": 38422, - "horus": 29794, - "hos": 44320, - "hos": 25008, - "hosa": 44618, - "hose": 19662, - "hoseok": 38817, - "hosp": 2847, - "hosp": 37853, - "hospice": 20533, - "hospit": 7180, - "hospital": 29399, - "hospital": 3851, - "hospitality": 11657, - "hospitalized": 36915, - "hospitals": 13816, - "host": 17403, - "host": 3953, - "hostage": 26119, - "hoste": 31700, - "hosted": 6017, - "hostel": 27225, - "hostess": 39692, - "hostile": 28074, - "hosting": 4857, - "hosts": 8718, - "hot": 2851, - "hot": 2069, - "hota": 43289, - "hotdog": 43758, - "hotel": 14591, - "hotel": 2738, - "hotels": 8654, - "hotline": 30516, - "hotmail": 46427, - "hotness": 39803, - "hotra": 27109, - "hotro": 47823, - "hotspot": 36606, - "hotspur": 35176, - "hotter": 23591, - "hottest": 8279, - "hottie": 22804, - "hotties": 46027, - "hou": 1011, - "hou": 10122, - "hough": 44529, - "houghton": 36133, - "houn": 39273, - "houn": 33607, - "hound": 33996, - "hound": 13561, - "hounds": 21178, - "hounews": 48373, - "hour": 14930, - "hour": 2232, - "hourly": 30918, - "hours": 2382, - "house": 4107, - "house": 1212, - "housed": 37518, - "household": 12412, - "households": 27167, - "housel": 48685, - "housemusic": 28468, - "houseof": 19928, - "houses": 7791, - "housewives": 38523, - "housing": 32924, - "housing": 5734, - "houston": 16564, - "houston": 5663, - "hov": 40291, - "hove": 29674, - "hoven": 35559, - "hover": 36252, - "hover": 49016, - "hovering": 43437, - "how": 7470, - "how": 829, - "howar": 37672, - "howard": 25447, - "howard": 7632, - "howdy": 42216, - "howe": 8179, - "howe": 24614, - "howell": 25297, - "hower": 32920, - "however": 8467, - "howi": 47883, - "howie": 42939, - "howl": 40332, - "howling": 41771, - "howto": 38191, - "howto": 44060, - "hoy": 39625, - "hoy": 13278, - "hoya": 40978, - "hp": 23753, - "hp": 6371, - "hpa": 30983, - "hpc": 39936, - "hpe": 33787, - "hpv": 45765, - "hq": 33571, - "hq": 4693, - "hr": 4810, - "hr": 4086, - "hra": 21320, - "hra": 17212, - "hrc": 18139, - "hrh": 29103, - "hri": 21068, - "hrithik": 45371, - "hrs": 7157, - "hru": 24127, - "hrw": 25064, - "hs": 9343, - "hs": 2466, - "hsbc": 31508, - "hsc": 43510, - "hse": 34057, - "hsfb": 29539, - "hsv": 47311, - "ht": 11123, - "ht": 7801, - "hta": 23452, - "hta": 49384, - "htafc": 42821, - "htc": 48942, - "htc": 17635, - "html": 18231, - "hts": 43710, - "htt": 10620, - "http": 15066, - "https": 30901, - "httr": 49372, - "httweets": 43198, - "hu": 845, - "hu": 5949, - "hua": 22138, - "huan": 41405, - "huang": 32013, - "huar": 46916, - "huawe": 17709, - "huawei": 21128, - "hub": 18775, - "hub": 7028, - "hubb": 23183, - "hubbard": 33288, - "hubble": 30421, - "hubby": 16947, - "hubert": 40699, - "hubs": 29327, - "huck": 22909, - "huckabee": 43666, - "hud": 7169, - "hud": 28563, - "hudder": 22629, - "huddersfield": 24220, - "huddle": 33435, - "hudson": 25873, - "hudson": 11260, - "hue": 48380, - "hue": 21465, - "hues": 38003, - "huey": 39663, - "huff": 18746, - "huff": 44999, - "huffpost": 45887, - "hug": 40790, - "hug": 10359, - "huge": 2699, - "hugely": 24648, - "hugged": 41333, - "hugging": 27058, - "hugh": 8723, - "hugh": 15385, - "hughes": 11418, - "hugo": 43935, - "hugo": 17132, - "hugs": 14248, - "huh": 13348, - "huhu": 32134, - "hui": 29978, - "hul": 7911, - "hula": 40145, - "hulk": 17637, - "hull": 25154, - "hull": 10375, - "hulu": 24666, - "hum": 5823, - "hum": 16283, - "human": 3175, - "human": 2751, - "humane": 20220, - "humanitarian": 14170, - "humanities": 24949, - "humanity": 9420, - "humanright": 44385, - "humanrights": 14148, - "humans": 8324, - "humb": 9988, - "humber": 30602, - "humber": 38063, - "humble": 38703, - "humble": 10889, - "humbled": 19682, - "humbling": 39757, - "humbold": 24739, - "humboldt": 31389, - "hume": 38197, - "humid": 14778, - "humid": 27447, - "humidi": 47666, - "humidity": 15469, - "humil": 27205, - "humili": 25332, - "humility": 28535, - "humming": 26515, - "hummingbird": 33072, - "hummus": 31785, - "humor": 29369, - "humor": 11186, - "humorous": 38173, - "humour": 19161, - "hump": 16673, - "hump": 24529, - "humpback": 47662, - "humpday": 27693, - "humph": 19767, - "humphrey": 31549, - "hun": 1616, - "hun": 10795, - "hundre": 8505, - "hundred": 11898, - "hundreds": 8879, - "hung": 13825, - "hungar": 19420, - "hungarian": 23325, - "hungary": 17232, - "hunger": 25565, - "hunger": 10184, - "hungergames": 47507, - "hungover": 41110, - "hungry": 44845, - "hungry": 8451, - "hunk": 33912, - "hunt": 16498, - "hunt": 5774, - "hunted": 37373, - "hunter": 16531, - "hunter": 6099, - "hunters": 16115, - "hunting": 27830, - "hunting": 7507, - "huntington": 23521, - "hunts": 34041, - "huntsville": 34544, - "hur": 2305, - "hur": 34523, - "hurd": 44915, - "hurdle": 27486, - "hurdles": 25440, - "huri": 42486, - "hurley": 30166, - "hurling": 24738, - "huron": 36147, - "hurrah": 40599, - "hurric": 6543, - "hurrican": 36105, - "hurricane": 24051, - "hurricane": 8782, - "hurricanes": 22357, - "hurry": 10921, - "hurst": 44742, - "hurst": 11760, - "hurt": 7413, - "hurting": 24017, - "hurts": 13059, - "hus": 5111, - "hus": 35853, - "husband": 6179, - "husbands": 33612, - "hush": 28728, - "husk": 19246, - "huskers": 26946, - "huskies": 20988, - "husky": 20421, - "huss": 13733, - "hussain": 17940, - "hussein": 31336, - "hust": 27279, - "hustle": 15709, - "huston": 46480, - "hut": 20924, - "hut": 16503, - "hutch": 31018, - "hutch": 33203, - "hutchinson": 35721, - "hutto": 27662, - "hutton": 38321, - "hv": 17209, - "hv": 18593, - "hvac": 27492, - "hw": 27491, - "hw": 18876, - "hwa": 32352, - "hwan": 44390, - "hwang": 46775, - "hwy": 13812, - "hy": 1441, - "hy": 17827, - "hya": 31600, - "hyacin": 47263, - "hyatt": 44856, - "hyatt": 25146, - "hybri": 9084, - "hybrid": 10156, - "hyd": 42382, - "hyde": 46484, - "hyde": 16343, - "hyder": 13960, - "hyderabad": 14801, - "hydr": 8031, - "hydra": 44414, - "hydra": 40420, - "hydrange": 43298, - "hydrate": 29628, - "hydrated": 23300, - "hydrating": 47653, - "hydration": 24174, - "hydrau": 26017, - "hydraulic": 26189, - "hydro": 8368, - "hydro": 22595, - "hydrogen": 20974, - "hye": 32724, - "hye": 25792, - "hygi": 16277, - "hygiene": 19591, - "hymn": 41350, - "hyo": 38960, - "hyo": 35078, - "hyp": 16964, - "hype": 30353, - "hype": 11111, - "hyped": 22507, - "hyper": 7997, - "hyper": 22146, - "hypertension": 40698, - "hypno": 23355, - "hypnosis": 48138, - "hypnoti": 40440, - "hypo": 10252, - "hypocr": 30711, - "hypocri": 25606, - "hypocrisy": 26296, - "hypocrite": 44125, - "hypothe": 46966, - "hypothesis": 44956, - "hyster": 24235, - "hysteria": 45965, - "hysterical": 48627, - "hyuk": 20452, - "hyun": 11831, - "hyun": 8589, - "hyundai": 17094, - "hyung": 46901, - "hyung": 16551, - "hz": 32533, - "i": 72, - "i": 328, - "ia": 12486, - "ia": 1073, - "iac": 32838, - "iac": 44063, - "iaf": 40789, - "iah": 35052, - "iain": 30103, - "ial": 11530, - "ial": 1974, - "ials": 20940, - "iam": 3579, - "iam": 11415, - "iambic": 43668, - "iambicpent": 43891, - "iamsrk": 15103, - "ian": 7723, - "ian": 1800, - "ians": 6451, - "iansomerhalder": 47077, - "iart": 18413, - "iartg": 18669, - "ias": 32303, - "ias": 14620, - "ib": 3962, - "ib": 13554, - "iba": 39763, - "ibadan": 44691, - "iban": 47145, - "ibc": 49014, - "ibd": 40732, - "iber": 23814, - "ibi": 12337, - "ibis": 47048, - "ibiza": 13853, - "ible": 37792, - "ibles": 44102, - "ibm": 23415, - "ibm": 13918, - "ibn": 25729, - "ibooks": 46887, - "ibra": 15476, - "ibrahi": 40350, - "ibrahim": 20816, - "ibrox": 46883, - "ibs": 41993, - "ibu": 43587, - "ibu": 46117, - "ic": 535, - "ic": 1029, - "ica": 2576, - "icago": 37492, - "ical": 6082, - "ical": 1110, - "ically": 3161, - "icals": 13999, - "ican": 17653, - "ican": 5246, - "icans": 20511, - "icar": 37211, - "ication": 21629, - "icc": 12945, - "ice": 2739, - "ice": 733, - "iceberg": 33662, - "icec": 13636, - "icecream": 21334, - "iced": 8049, - "icelan": 34114, - "iceland": 46716, - "iceland": 11935, - "icelandic": 34705, - "ices": 1931, - "ich": 5333, - "ich": 1232, - "icha": 31453, - "iche": 28972, - "iche": 21143, - "ichi": 21669, - "ichi": 14647, - "ichick": 45022, - "ichiro": 43787, - "ici": 948, - "ici": 22189, - "icia": 11774, - "icial": 17543, - "icial": 6397, - "ician": 40522, - "ician": 5374, - "icians": 6264, - "iciary": 21329, - "icic": 46006, - "icide": 6558, - "icides": 28253, - "icing": 7676, - "icio": 24207, - "icion": 45905, - "icious": 3325, - "icist": 21165, - "icists": 42171, - "icity": 7243, - "ick": 1168, - "ick": 1068, - "icked": 39799, - "icker": 40357, - "ickers": 30701, - "icki": 35468, - "icking": 6619, - "icks": 3727, - "icky": 11587, - "icn": 44516, - "ico": 13697, - "ico": 3040, - "icom": 17693, - "icom": 29796, - "icon": 13843, - "icon": 5646, - "iconic": 6959, - "icons": 15553, - "icop": 9389, - "icos": 32002, - "ics": 1324, - "ict": 6349, - "icted": 36515, - "iction": 40560, - "icton": 36548, - "icu": 45118, - "icu": 30443, - "icular": 40660, - "icus": 31459, - "icy": 28780, - "icy": 3495, - "icymi": 5315, - "icz": 46387, - "id": 1568, - "id": 1014, - "ida": 11032, - "ida": 11600, - "idad": 22462, - "idaho": 48817, - "idaho": 15165, - "idal": 39684, - "idan": 17929, - "idc": 22386, - "ide": 1909, - "ide": 14104, - "idea": 3612, - "ideal": 8789, - "ideally": 48247, - "ideals": 45096, - "ideas": 4452, - "ident": 7113, - "identi": 6009, - "identical": 25587, - "identification": 23337, - "identified": 15217, - "identifies": 35712, - "identify": 10949, - "identifying": 23589, - "identities": 34292, - "identity": 8892, - "ideology": 25840, - "iders": 8980, - "ides": 31791, - "idf": 28987, - "idge": 35567, - "idh": 44325, - "idi": 9611, - "idi": 14264, - "idio": 15994, - "idiot": 14087, - "idiots": 20856, - "idk": 8972, - "idle": 34754, - "idlib": 36199, - "ido": 6763, - "ido": 29641, - "idol": 24866, - "idol": 8884, - "idols": 21398, - "idr": 10106, - "idri": 46435, - "idris": 41312, - "ids": 6111, - "idu": 28655, - "idy": 33058, - "idyl": 44879, - "idyllic": 46632, - "ie": 6789, - "ie": 1718, - "iec": 44773, - "ied": 10059, - "ieee": 39860, - "iel": 27875, - "iel": 22729, - "ience": 1542, - "ient": 13115, - "ier": 33173, - "ier": 5912, - "iers": 45060, - "ies": 27912, - "ies": 963, - "iest": 10818, - "if": 8063, - "if": 878, - "ifa": 37574, - "ifc": 36524, - "ife": 41172, - "ife": 19590, - "iff": 35753, - "ification": 35755, - "ified": 41403, - "ift": 31143, - "iftar": 35153, - "ifu": 41523, - "ify": 32807, - "ig": 1089, - "ig": 3072, - "iga": 16493, - "igan": 27468, - "igans": 25419, - "igbo": 44591, - "ige": 10806, - "igen": 33070, - "iger": 30758, - "iger": 20685, - "igers": 40755, - "igers": 48928, - "iggy": 46219, - "iggy": 27604, - "igh": 2712, - "igh": 5451, - "ight": 14571, - "ight": 897, - "ighton": 35292, - "igi": 21901, - "igle": 29912, - "iglesias": 39432, - "ign": 7303, - "ign": 2326, - "ignati": 37573, - "ignatius": 48318, - "igne": 45843, - "ignite": 25210, - "ignition": 36115, - "igno": 15375, - "ignor": 7653, - "ignorance": 22735, - "ignorant": 26933, - "ignore": 12304, - "ignored": 20428, - "ignores": 40129, - "ignoring": 23969, - "igor": 33024, - "igs": 31344, - "igu": 21279, - "ih": 12162, - "ih": 34135, - "ihear": 13043, - "iheart": 30332, - "iheartawards": 18811, - "iheartradio": 25934, - "ihop": 45511, - "ihri": 39108, - "ihrithik": 39326, - "ii": 5103, - "ii": 2329, - "iii": 46236, - "iii": 6572, - "iiii": 20133, - "iiii": 45393, - "iiot": 30704, - "iit": 39330, - "iit": 33238, - "ij": 7337, - "ija": 42802, - "ik": 3903, - "ik": 10177, - "ika": 18188, - "ike": 12329, - "ike": 19696, - "ikea": 20528, - "iker": 38653, - "ikh": 44655, - "ikh": 12758, - "iklan": 32028, - "iklan": 29584, - "iko": 35659, - "iko": 39272, - "ikon": 38543, - "ikon": 19156, - "iku": 17780, - "il": 543, - "il": 958, - "ila": 4344, - "ilah": 32211, - "ilan": 13889, - "ilan": 28076, - "iland": 20957, - "ilation": 16180, - "ilay": 45093, - "ild": 22278, - "ild": 17164, - "ile": 18398, - "ile": 989, - "iled": 3358, - "iler": 22446, - "iler": 3615, - "ilers": 8975, - "iles": 42274, - "ili": 2076, - "ili": 19601, - "ilia": 14855, - "ilian": 10272, - "iliary": 32585, - "ilife": 42835, - "ilike": 44989, - "ilinan": 48497, - "iling": 3299, - "ilio": 47256, - "ilion": 12561, - "ilis": 43442, - "ilit": 11178, - "ilities": 5446, - "ility": 1787, - "ilive": 26478, - "ill": 828, - "ill": 660, - "illa": 8877, - "illa": 3043, - "illac": 17218, - "illage": 48922, - "illard": 21920, - "illary": 33667, - "illas": 23404, - "ille": 18213, - "ille": 5559, - "illed": 2527, - "illeg": 35808, - "illegal": 7983, - "illegally": 24466, - "illegals": 40490, - "iller": 23341, - "iller": 2956, - "illers": 30547, - "illery": 14514, - "illes": 20037, - "illi": 1086, - "illi": 25187, - "illia": 48776, - "illiams": 30301, - "illian": 48775, - "illian": 17355, - "illic": 37152, - "illicit": 40998, - "illie": 26083, - "illin": 35868, - "illing": 2803, - "illini": 28957, - "illino": 8920, - "illinois": 9414, - "illion": 35542, - "illion": 2035, - "illness": 11145, - "illnesses": 33861, - "illo": 34153, - "illo": 7588, - "illon": 20516, - "ills": 1900, - "illu": 3025, - "illumin": 11446, - "illuminate": 43261, - "illuminated": 28814, - "illuminati": 34551, - "illuminating": 46601, - "illumination": 43680, - "illus": 41386, - "illusion": 20318, - "illusions": 47429, - "illustr": 6268, - "illustrate": 37468, - "illustrated": 13151, - "illustrates": 38129, - "illustrating": 43322, - "illustration": 6052, - "illustrations": 17852, - "illustrator": 16649, - "illustri": 43116, - "illustrious": 44304, - "illy": 11707, - "illy": 9532, - "ilm": 36326, - "ilo": 4220, - "ilo": 14835, - "ilove": 7183, - "ilove": 32914, - "iloveart": 41114, - "ilovemy": 28863, - "iloveyou": 28829, - "ils": 1543, - "ilt": 25334, - "ilton": 28494, - "ilu": 27337, - "ilwx": 43777, - "ily": 4881, - "ily": 1026, - "ilya": 33377, - "ilysm": 29228, - "im": 732, - "im": 1496, - "ima": 2414, - "ima": 6432, - "imac": 40675, - "imacele": 47281, - "imag": 2316, - "image": 24101, - "image": 2867, - "imagery": 22828, - "images": 4952, - "imagin": 18178, - "imaginary": 30417, - "imagination": 13783, - "imaginative": 47233, - "imagine": 35752, - "imagine": 4826, - "imagined": 18478, - "imagines": 47379, - "imaging": 14231, - "imagining": 27384, - "imam": 37552, - "imam": 19024, - "iman": 45684, - "iman": 16247, - "imation": 44566, - "imax": 32066, - "imc": 45616, - "imdanielpadilla": 36357, - "imdb": 30407, - "ime": 44937, - "ime": 31151, - "imel": 31594, - "iment": 37157, - "imer": 21802, - "imes": 47744, - "imf": 28403, - "img": 24157, - "imi": 23559, - "imin": 23942, - "imit": 23462, - "imitation": 41630, - "imma": 19487, - "immac": 25085, - "immaculate": 29649, - "immature": 45531, - "immedi": 7366, - "immediate": 14440, - "immediately": 10108, - "immen": 17278, - "immense": 22722, - "immensely": 35013, - "immer": 13954, - "immerse": 46240, - "immersion": 31861, - "immersive": 27521, - "immigr": 5851, - "immigrant": 16474, - "immigrants": 14460, - "immigration": 9588, - "imminent": 27299, - "immort": 39244, - "immortal": 24717, - "immun": 8961, - "immune": 15606, - "immuni": 44571, - "immunity": 26254, - "immuno": 24361, - "immunology": 44483, - "immunotherapy": 39185, - "imo": 26349, - "imo": 13738, - "imp": 3335, - "imp": 31037, - "impac": 7573, - "impact": 33036, - "impact": 3844, - "impacted": 21424, - "impactful": 41631, - "impacting": 29359, - "impacts": 15069, - "impair": 36451, - "impaired": 28028, - "impairment": 44501, - "impala": 36641, - "impe": 23612, - "impeach": 16874, - "impeach": 43497, - "impeachment": 32979, - "impeachtrump": 38006, - "impecc": 34511, - "impeccable": 40111, - "impending": 34486, - "imper": 7727, - "imperative": 39833, - "imperfect": 46034, - "imperi": 30911, - "imperial": 32425, - "imperial": 12361, - "imperialism": 48855, - "imperson": 25551, - "implant": 33106, - "implants": 32202, - "imple": 7423, - "implement": 17966, - "implementation": 15102, - "implemented": 24315, - "implementing": 22862, - "implic": 15269, - "implications": 19229, - "implo": 40337, - "impo": 45704, - "import": 2336, - "import": 16294, - "importance": 6821, - "important": 2829, - "importantly": 21580, - "imported": 28798, - "imports": 25286, - "impose": 35879, - "imposed": 25871, - "imposing": 42289, - "impossible": 9815, - "impre": 3763, - "impress": 20015, - "impressed": 9689, - "impression": 14468, - "impressionism": 36114, - "impressionist": 44904, - "impressions": 22276, - "impressive": 6634, - "imprint": 43863, - "imprison": 22141, - "imprisoned": 32999, - "imprisonment": 39024, - "impro": 2531, - "impromp": 28100, - "impromptu": 28611, - "improv": 22868, - "improve": 4971, - "improved": 9446, - "improvement": 10790, - "improvements": 16320, - "improves": 18035, - "improving": 10381, - "improvis": 32343, - "improvised": 40886, - "impulse": 29683, - "impy": 42690, - "imran": 19647, - "imran": 19212, - "imrankhan": 25956, - "imrankhanpti": 26688, - "ims": 17800, - "imsa": 37262, - "imv": 35731, - "imvkohli": 37136, - "imwith": 26822, - "imwithher": 32651, - "in": 512, - "in": 530, - "ina": 18026, - "ina": 1366, - "inability": 47517, - "inaccurate": 49192, - "inaction": 41916, - "inactive": 49274, - "inadequate": 43403, - "inak": 46549, - "inal": 19178, - "inals": 26438, - "inan": 26204, - "inappropriate": 26722, - "inari": 48620, - "inary": 11337, - "inas": 36731, - "inas": 12362, - "inated": 38530, - "ination": 4706, - "inau": 10832, - "inaugu": 11309, - "inaugur": 11448, - "inaugural": 11340, - "inaugurated": 29011, - "inauguration": 16805, - "inbound": 24420, - "inbox": 18683, - "inc": 14570, - "inc": 4438, - "incan": 45964, - "incar": 18070, - "incarcer": 26334, - "incarcerated": 49178, - "incarceration": 39887, - "incase": 30463, - "ince": 44303, - "incen": 13259, - "incense": 35059, - "incentive": 29024, - "incentives": 29813, - "inception": 36653, - "inch": 6523, - "incheon": 30645, - "inches": 10809, - "inci": 5747, - "incidence": 43371, - "incident": 10103, - "incidents": 22120, - "incindia": 26161, - "inciner": 46434, - "incl": 27857, - "incl": 13338, - "inclined": 45470, - "inclu": 1738, - "include": 5942, - "included": 7414, - "includes": 6197, - "including": 2814, - "inclusion": 12079, - "inclusive": 13393, - "income": 8044, - "incoming": 15416, - "incomparable": 36027, - "incompetent": 45069, - "incomplete": 34040, - "incon": 42372, - "inconvenience": 40563, - "incorpor": 19335, - "incorporate": 34168, - "incorporated": 29494, - "incorporating": 40303, - "incorrect": 31872, - "incre": 1870, - "increase": 5230, - "increased": 9156, - "increases": 13797, - "increasing": 10270, - "increasingly": 16106, - "incredi": 2883, - "incredible": 22128, - "incredible": 3457, - "incredibleindia": 24680, - "incredibles": 48641, - "incredibly": 9513, - "incu": 38830, - "incub": 24587, - "incubator": 35736, - "incumb": 32246, - "incumbent": 38038, - "incur": 42356, - "ind": 5386, - "ind": 4655, - "inda": 15710, - "inde": 2645, - "indeed": 10031, - "indefin": 29501, - "indefinitely": 43750, - "independ": 4147, - "independence": 23117, - "independence": 7955, - "independenceday": 25971, - "independent": 33844, - "independent": 7088, - "independently": 39831, - "inder": 29225, - "index": 35209, - "index": 9458, - "indhoven": 44229, - "indi": 1098, - "indi": 46536, - "india": 27067, - "india": 1762, - "indian": 7685, - "indian": 3606, - "indiana": 8615, - "indianapolis": 17196, - "indianfootball": 45979, - "indians": 10271, - "indic": 7136, - "indicate": 26679, - "indicated": 39416, - "indicates": 29412, - "indication": 38539, - "indicator": 24776, - "indicators": 30054, - "indicted": 34992, - "indictment": 42278, - "indie": 5260, - "indie": 9383, - "indiedev": 10863, - "indiefilm": 22588, - "indiegame": 17969, - "indiegamedev": 40466, - "indiegames": 35864, - "indiegogo": 38057, - "indies": 23618, - "indiffe": 41372, - "indigen": 8348, - "indigenous": 9303, - "indigo": 21002, - "indira": 43887, - "indirec": 26398, - "indirect": 35416, - "indivi": 5649, - "individu": 9574, - "individual": 8512, - "individually": 33782, - "individuals": 11990, - "indo": 26303, - "indo": 18297, - "indom": 42926, - "indone": 6180, - "indonesia": 7229, - "indonesian": 19593, - "indoor": 44478, - "indoor": 9546, - "indoors": 22973, - "indore": 46143, - "indu": 2298, - "induc": 7973, - "induced": 24103, - "inducted": 20596, - "inductee": 39558, - "inductees": 44796, - "induction": 18338, - "indul": 19402, - "indulg": 28388, - "indulge": 24851, - "indulgence": 40856, - "indulgent": 49147, - "industri": 5082, - "industrial": 30853, - "industrial": 7520, - "industries": 11700, - "industry": 47407, - "industry": 3318, - "indv": 16942, - "indy": 9821, - "indy": 10098, - "indycar": 20484, - "indyref": 22569, - "ine": 855, - "ine": 715, - "ineau": 38122, - "inec": 45214, - "ined": 2038, - "inee": 43252, - "inee": 7986, - "inees": 13056, - "ineffe": 47202, - "inely": 18234, - "inem": 48876, - "inema": 29232, - "inen": 44365, - "inequalities": 45507, - "inequality": 17372, - "iner": 17438, - "iner": 5155, - "iners": 41863, - "ines": 2137, - "inese": 35966, - "iness": 1463, - "inet": 8121, - "inette": 38911, - "inev": 19527, - "inevit": 45871, - "inevitable": 25004, - "inews": 24300, - "inexpensive": 38614, - "iney": 30254, - "inez": 12700, - "inf": 1529, - "inf": 35241, - "infamous": 18688, - "infan": 17219, - "infant": 19192, - "infantry": 21655, - "infants": 34726, - "infe": 7164, - "infec": 26088, - "infected": 26136, - "infection": 14774, - "infections": 22227, - "infectious": 29157, - "infeld": 25035, - "infer": 16258, - "inferno": 31290, - "infertility": 40701, - "infield": 48933, - "infiltr": 28683, - "infin": 6246, - "infinite": 12748, - "infiniti": 34644, - "infinity": 34863, - "infinity": 12895, - "infl": 7627, - "inflam": 16080, - "inflammation": 24893, - "inflammatory": 26831, - "inflatable": 30135, - "inflation": 17497, - "inflicted": 48188, - "influ": 4835, - "influen": 13229, - "influence": 9199, - "influenced": 21183, - "influencer": 25013, - "influencers": 29891, - "influences": 24926, - "influencing": 45126, - "influential": 17553, - "influenza": 39897, - "info": 5680, - "info": 2222, - "infographic": 10076, - "infographics": 33172, - "infor": 31773, - "inform": 10241, - "inform": 19449, - "informal": 25705, - "informat": 29625, - "informatics": 35685, - "information": 3204, - "informative": 19364, - "informed": 13876, - "informing": 45388, - "informs": 48440, - "infosec": 17863, - "infr": 29718, - "infra": 7312, - "infra": 45877, - "infrared": 22867, - "infrastructure": 9034, - "infringe": 44882, - "infringement": 48712, - "infront": 37668, - "infu": 15048, - "infuri": 48461, - "infused": 21461, - "infusion": 43464, - "ing": 653, - "ing": 519, - "inga": 15233, - "ingco": 40444, - "ingday": 16561, - "ingdon": 38731, - "inge": 11790, - "inge": 7071, - "inged": 30046, - "ingen": 19088, - "ingeni": 36884, - "inger": 33883, - "inger": 3541, - "ingfor": 33430, - "ingh": 9170, - "ingh": 30495, - "ingham": 24497, - "ingham": 4291, - "inghamshire": 39289, - "inghour": 42728, - "inging": 4066, - "ingl": 45662, - "ingle": 22228, - "ingle": 17005, - "ingles": 24490, - "ingley": 44428, - "inglis": 46327, - "ingly": 4796, - "ingnow": 34766, - "ingo": 30175, - "ingo": 9012, - "ingra": 45165, - "ingrad": 44124, - "ingram": 26998, - "ingredi": 9272, - "ingredient": 19799, - "ingredients": 11788, - "ingrid": 33496, - "ings": 895, - "ingthe": 20170, - "ingtips": 39373, - "ington": 11846, - "ington": 2156, - "ingu": 8714, - "ingual": 22795, - "ingue": 36838, - "ingui": 12788, - "inguish": 36146, - "inha": 32612, - "inhabit": 36189, - "inhabitants": 44968, - "inhal": 30786, - "inhe": 32617, - "inher": 24611, - "inherent": 47327, - "inherit": 34322, - "inheritance": 39341, - "inherited": 39111, - "inhi": 25557, - "inhibit": 32196, - "inho": 12984, - "ini": 6154, - "ini": 3581, - "inian": 36638, - "inim": 38717, - "inindia": 34021, - "ining": 1389, - "inist": 30976, - "init": 42670, - "initi": 4580, - "initial": 13980, - "initially": 28123, - "initials": 48794, - "initiated": 27756, - "initiation": 41009, - "initiative": 8152, - "initiatives": 16549, - "inity": 22126, - "inj": 5112, - "injec": 13688, - "injection": 21438, - "inju": 5006, - "injured": 7505, - "injuries": 9481, - "injury": 6223, - "injustice": 20541, - "ink": 4547, - "ink": 967, - "inka": 40685, - "inked": 29356, - "inki": 46176, - "inkigayo": 47882, - "inking": 37586, - "inks": 20966, - "inktober": 9387, - "inland": 21943, - "inlet": 35161, - "inline": 45004, - "inlove": 28415, - "inmate": 32341, - "inmates": 28216, - "inmy": 42657, - "inn": 27260, - "inn": 5569, - "inna": 35088, - "inner": 24512, - "inner": 6955, - "inning": 4415, - "innings": 11580, - "innis": 44059, - "inno": 7961, - "innocence": 26383, - "innocent": 11241, - "innov": 2890, - "innovate": 24549, - "innovation": 33063, - "innovation": 4272, - "innovations": 18817, - "innovative": 8494, - "innovator": 34735, - "innovators": 27834, - "ino": 4211, - "ino": 2691, - "inoa": 25649, - "inos": 21828, - "inous": 47801, - "inox": 22698, - "input": 16952, - "inputs": 48763, - "inqu": 10628, - "inqui": 18527, - "inquirer": 45172, - "inquiries": 29469, - "inquiry": 15865, - "inquis": 31171, - "inr": 36325, - "ins": 12786, - "ins": 1041, - "insan": 7875, - "insane": 10260, - "insanely": 27846, - "insanity": 26645, - "inscribed": 49168, - "inscription": 41127, - "insec": 15744, - "insect": 21297, - "insects": 18714, - "insecure": 35112, - "insecurity": 36964, - "inser": 13830, - "insert": 18807, - "insi": 3453, - "inside": 19141, - "inside": 2912, - "insider": 13300, - "insiders": 32171, - "insig": 40503, - "insight": 8795, - "insightful": 20354, - "insights": 8729, - "insignia": 48864, - "insist": 35504, - "insisted": 40423, - "insists": 27255, - "inski": 32630, - "insky": 24607, - "insol": 42366, - "insom": 21755, - "insomni": 42040, - "insomnia": 30598, - "inson": 21007, - "insp": 1597, - "inspec": 7915, - "inspect": 40815, - "inspecting": 40565, - "inspection": 15142, - "inspections": 39513, - "inspector": 20514, - "inspir": 2573, - "inspiration": 4195, - "inspirational": 41936, - "inspirational": 9855, - "inspirations": 35093, - "inspire": 27901, - "inspire": 8583, - "inspired": 39849, - "inspired": 3516, - "inspires": 17245, - "inspiring": 41847, - "inspiring": 5705, - "inspo": 26897, - "inst": 1264, - "inst": 1581, - "insta": 22411, - "insta": 11694, - "instability": 41377, - "instac": 46678, - "instaf": 33800, - "instag": 14612, - "instagood": 23718, - "instagram": 27910, - "instagram": 2659, - "instal": 38805, - "install": 6940, - "install": 11168, - "installation": 9358, - "installations": 27909, - "installed": 8807, - "installing": 18301, - "installment": 25315, - "installs": 45568, - "instalment": 47766, - "instance": 34572, - "instant": 38810, - "instant": 10635, - "instantly": 17703, - "instap": 23758, - "instapic": 34378, - "instaweather": 43078, - "instaweatherpro": 43150, - "inste": 3571, - "instead": 4191, - "instein": 13421, - "instem": 27030, - "instin": 23382, - "instinct": 30544, - "institu": 4257, - "institute": 5861, - "institutes": 43674, - "institution": 18823, - "institutional": 27442, - "institutions": 15207, - "instore": 41679, - "instru": 4544, - "instruc": 19648, - "instruction": 19407, - "instructional": 31022, - "instructions": 17040, - "instructor": 16087, - "instructors": 31998, - "instrument": 42196, - "instrument": 15806, - "instrumental": 23041, - "instruments": 14793, - "instyle": 41321, - "insu": 8805, - "insul": 9615, - "insulated": 42051, - "insulation": 28194, - "insulin": 29311, - "insult": 26673, - "insulting": 39646, - "insults": 40451, - "insur": 5024, - "insurance": 5870, - "insured": 31321, - "insurers": 43142, - "insurtech": 28716, - "int": 1828, - "int": 1207, - "inta": 38314, - "intact": 26870, - "intake": 19539, - "intan": 47695, - "inte": 1598, - "inte": 41900, - "intech": 26504, - "inted": 6147, - "integr": 5151, - "integral": 27018, - "integrate": 25735, - "integrated": 12797, - "integrating": 31555, - "integration": 12583, - "integrity": 14791, - "intel": 11778, - "intel": 11426, - "intellec": 13281, - "intellect": 47828, - "intellectu": 31966, - "intellectual": 18069, - "intelli": 5324, - "intellig": 5632, - "intelligence": 6846, - "intelligent": 14063, - "inten": 2967, - "intend": 36674, - "intended": 16812, - "intense": 10258, - "intensi": 22928, - "intensity": 19956, - "intensive": 21049, - "intent": 18881, - "intention": 26786, - "intentional": 29536, - "intentionally": 31215, - "intentions": 26710, - "inter": 1006, - "inter": 10093, - "interact": 21736, - "interacting": 35045, - "interaction": 17650, - "interactions": 22162, - "interactive": 9456, - "intercep": 23676, - "interception": 48762, - "interceptions": 45313, - "interchange": 34222, - "intercontinental": 31983, - "interdisciplinary": 38132, - "intere": 2008, - "interest": 5095, - "interested": 4620, - "interesting": 3628, - "interests": 16425, - "interface": 18753, - "interfaith": 38399, - "interference": 29099, - "interim": 19509, - "interior": 10700, - "interior": 7305, - "interiordesign": 12902, - "interiors": 14836, - "intermedi": 20246, - "intermediate": 24304, - "intermission": 44805, - "intermitt": 44946, - "intern": 9976, - "intern": 14068, - "internal": 11285, - "internally": 41134, - "internation": 42534, - "international": 8566, - "international": 2436, - "internationaldayof": 41518, - "internationally": 24059, - "internationalwomensday": 17682, - "interne": 32713, - "internet": 30180, - "internet": 4757, - "internetof": 44449, - "internetofthings": 45925, - "interns": 19902, - "internship": 16661, - "internships": 39410, - "interoper": 45754, - "interpre": 11162, - "interpret": 49154, - "interpret": 40459, - "interpretation": 20652, - "interpreted": 42157, - "interpreting": 46525, - "interro": 29548, - "interrup": 21609, - "interrupt": 48449, - "interrupted": 30288, - "intersec": 45246, - "intersection": 19210, - "interstate": 21963, - "interstellar": 41506, - "interval": 36032, - "intervals": 44884, - "interven": 18245, - "intervention": 16804, - "interventions": 28848, - "interview": 2885, - "interviewed": 11688, - "interviewing": 16399, - "interviews": 9910, - "intestin": 37938, - "intestinal": 38896, - "inthe": 7486, - "inti": 14459, - "intim": 38832, - "intimacy": 46430, - "intimate": 16382, - "intimid": 24041, - "intimidating": 44405, - "intimidation": 49258, - "inting": 15571, - "intl": 38186, - "intl": 14224, - "intment": 9020, - "intments": 21420, - "into": 35235, - "into": 1095, - "intoler": 28534, - "intolerance": 37808, - "intothe": 38511, - "intra": 20922, - "intrac": 46195, - "intram": 40956, - "intre": 29397, - "intrepid": 39127, - "intri": 15421, - "intric": 23763, - "intricate": 29616, - "intrigu": 18856, - "intrigue": 45140, - "intrigued": 40034, - "intriguing": 24334, - "intrin": 45181, - "intro": 2999, - "intro": 13224, - "introduc": 3621, - "introduce": 9813, - "introduced": 10446, - "introduces": 12933, - "introducing": 6256, - "introduction": 11812, - "introductory": 38121, - "intru": 22949, - "ints": 2514, - "intu": 17225, - "intuition": 40897, - "intuitive": 35224, - "inu": 21131, - "inuit": 41250, - "inus": 45857, - "inv": 2279, - "inv": 43786, - "inva": 10084, - "invade": 34609, - "invaded": 32596, - "invaders": 35188, - "invading": 40101, - "invali": 31592, - "invalid": 46998, - "invaluable": 33976, - "invasi": 38100, - "invasion": 13378, - "invasive": 19554, - "inve": 2024, - "inven": 26233, - "invent": 11665, - "invent": 23558, - "invented": 14100, - "invention": 23607, - "inventions": 44914, - "inventor": 22836, - "inventory": 19444, - "inver": 12061, - "inverness": 33080, - "inverte": 46397, - "inverted": 40709, - "invest": 4180, - "invest": 9716, - "invested": 22536, - "investig": 4626, - "investigate": 15703, - "investigated": 29180, - "investigates": 29621, - "investigating": 13713, - "investigation": 8194, - "investigations": 24020, - "investigative": 30233, - "investigator": 30528, - "investigators": 24121, - "investin": 40195, - "investing": 10554, - "investment": 5605, - "investments": 14675, - "investor": 15490, - "investors": 10486, - "invests": 38378, - "invic": 25253, - "invigor": 48722, - "invin": 30252, - "invincible": 38052, - "invisible": 16093, - "invit": 12454, - "invitation": 15032, - "invitational": 14511, - "invitations": 40120, - "invite": 8109, - "invited": 7731, - "invites": 16034, - "inviting": 14349, - "invo": 29417, - "invol": 4000, - "involve": 26325, - "involved": 5320, - "involvement": 19502, - "involves": 22652, - "involving": 14786, - "inwx": 35674, - "iny": 23257, - "inyour": 47954, - "io": 3167, - "io": 3752, - "ioc": 43018, - "iom": 33000, - "iom": 31135, - "ion": 14871, - "ion": 3668, - "ions": 26289, - "ior": 7354, - "ior": 2498, - "iority": 46016, - "iors": 6427, - "ios": 6614, - "iot": 32694, - "iot": 6627, - "iota": 37294, - "ious": 6994, - "iously": 38233, - "iow": 7439, - "iowa": 38847, - "iowa": 8290, - "ip": 1719, - "ip": 8600, - "ipa": 11199, - "ipad": 39067, - "ipad": 7491, - "ipads": 35281, - "ipc": 41981, - "iphone": 26030, - "iphone": 4314, - "iphones": 37561, - "ipl": 13440, - "ipment": 37824, - "ipo": 40218, - "ipo": 24090, - "ipod": 17889, - "ipp": 31706, - "ips": 26910, - "ipsw": 22221, - "ipswich": 24494, - "iq": 15554, - "iq": 19996, - "iqbal": 33553, - "ir": 582, - "ir": 742, - "ira": 4923, - "ira": 5371, - "irah": 35724, - "iran": 19273, - "iran": 5075, - "irandeal": 46533, - "irani": 37984, - "iranian": 14158, - "iraq": 8543, - "iraqi": 18617, - "irc": 41527, - "ird": 2770, - "ire": 3013, - "ire": 1454, - "ired": 32728, - "ired": 2995, - "ireland": 32806, - "ireland": 4157, - "irene": 21600, - "ires": 12435, - "irez": 21581, - "irgc": 47942, - "iri": 2155, - "iri": 13880, - "irical": 33366, - "irie": 42979, - "irina": 46664, - "iring": 10169, - "iris": 16437, - "irish": 9386, - "irish": 4889, - "irl": 34494, - "irl": 8570, - "irling": 26493, - "irls": 24344, - "irma": 22406, - "irn": 42603, - "iro": 23209, - "iro": 7280, - "iron": 7699, - "iron": 5391, - "ironic": 24518, - "ironically": 36779, - "ironing": 46655, - "ironman": 20330, - "irons": 30032, - "irony": 20681, - "irport": 27769, - "irr": 24641, - "irrational": 47413, - "irregular": 38692, - "irrelevant": 34677, - "irresi": 31200, - "irresistible": 35252, - "irresponsible": 44714, - "irri": 21484, - "irrigation": 23761, - "irrit": 24218, - "irs": 6086, - "irst": 32701, - "iru": 48206, - "irvin": 47053, - "irvine": 24201, - "irving": 19738, - "irwin": 23750, - "iry": 7239, - "is": 595, - "is": 533, - "isa": 11034, - "isa": 6536, - "isaac": 37544, - "isaac": 13659, - "isab": 13357, - "isabel": 27466, - "isabella": 26192, - "isabelle": 31072, - "isable": 46631, - "isai": 15365, - "isaiah": 17952, - "isak": 40619, - "isance": 46893, - "isation": 7194, - "isback": 43811, - "isc": 39316, - "isch": 47888, - "isco": 5736, - "iscoming": 26458, - "isd": 46816, - "isd": 12002, - "ise": 7669, - "ise": 1479, - "ised": 2861, - "iselle": 48491, - "iser": 23080, - "iser": 5626, - "isers": 34879, - "ises": 5153, - "isf": 44036, - "isgreat": 34595, - "ish": 6844, - "ish": 1061, - "isha": 28050, - "ishable": 37949, - "ished": 35341, - "ishere": 46053, - "ishi": 26224, - "ishq": 27996, - "ishqba": 32503, - "ishqbaaaz": 36591, - "isi": 7233, - "isi": 17880, - "isil": 34636, - "isin": 37676, - "ising": 3426, - "isis": 7531, - "isk": 30171, - "isl": 31368, - "isla": 22807, - "islam": 6003, - "islam": 8770, - "islamabad": 19959, - "islamic": 31627, - "islamic": 9552, - "islamist": 38798, - "islamophobia": 43459, - "island": 13408, - "island": 2619, - "islander": 45651, - "islanders": 27804, - "islands": 7145, - "islay": 49279, - "isle": 19082, - "isle": 11849, - "isleof": 24718, - "isles": 21816, - "islife": 26433, - "islington": 34945, - "ism": 47730, - "ism": 1935, - "isma": 43937, - "ismail": 36140, - "isme": 43570, - "ismo": 41926, - "isms": 18700, - "isn": 2923, - "isner": 48246, - "isnow": 43694, - "isnt": 19416, - "iso": 2462, - "iso": 12263, - "isol": 11414, - "isolated": 19044, - "isolation": 26400, - "ison": 12949, - "ison": 4553, - "isons": 33318, - "isoo": 35857, - "isp": 31397, - "isp": 39041, - "isra": 3591, - "israel": 20837, - "israel": 4779, - "israeli": 8994, - "israelis": 45713, - "isreal": 47147, - "isro": 44841, - "iss": 11738, - "iss": 4950, - "issa": 38579, - "issa": 7560, - "issan": 49358, - "issance": 40828, - "issant": 38828, - "isse": 18986, - "ission": 37946, - "issu": 2049, - "issue": 3202, - "issued": 9246, - "issues": 4082, - "issuing": 37226, - "ist": 9751, - "ist": 2304, - "istanbul": 12258, - "istandwith": 33820, - "iste": 32563, - "ister": 14555, - "isthe": 46748, - "istic": 29556, - "ists": 8426, - "isu": 17030, - "isu": 23328, - "it": 529, - "it": 585, - "ita": 36920, - "ita": 2864, - "itable": 8915, - "ital": 2306, - "ital": 1660, - "itali": 11644, - "italia": 11025, - "italian": 20264, - "italian": 5175, - "italians": 44744, - "italk": 32894, - "italy": 4052, - "itan": 18383, - "itans": 40711, - "itar": 47161, - "itarian": 11599, - "itary": 17604, - "itas": 31634, - "itas": 13436, - "itate": 42457, - "itated": 36744, - "itation": 5070, - "itative": 22892, - "itc": 36449, - "itch": 2387, - "itch": 8147, - "itchen": 32664, - "itchy": 41980, - "ite": 2732, - "ite": 802, - "iteam": 37828, - "itec": 3099, - "itec": 43936, - "itech": 44215, - "itech": 23040, - "ited": 8603, - "ited": 1108, - "itel": 44638, - "itely": 4605, - "item": 8532, - "items": 6207, - "iter": 7938, - "iter": 19773, - "iteracy": 39634, - "iterate": 43106, - "iteration": 38790, - "ites": 2454, - "itez": 42131, - "itf": 35436, - "itfc": 36519, - "ith": 6133, - "ith": 1757, - "ithaca": 46257, - "iti": 760, - "iti": 6165, - "itia": 22634, - "itian": 23365, - "itic": 11950, - "itical": 48767, - "itics": 33967, - "ities": 41423, - "ities": 1480, - "itim": 15676, - "itiner": 32803, - "itinerary": 41564, - "iting": 1257, - "ition": 25263, - "ition": 1104, - "itions": 5540, - "itious": 13329, - "itis": 33539, - "itis": 8388, - "itive": 3067, - "itly": 42240, - "ito": 22167, - "ito": 4661, - "iton": 21119, - "itor": 47267, - "itor": 4584, - "itors": 22005, - "itos": 24560, - "its": 7140, - "its": 902, - "itsa": 45032, - "itself": 7290, - "itsme": 41125, - "itss": 47040, - "itt": 1031, - "itt": 11228, - "itta": 21233, - "itte": 31962, - "itted": 24429, - "itten": 30014, - "itten": 4343, - "itter": 11456, - "itters": 13082, - "itti": 28629, - "ittin": 25646, - "itting": 3147, - "ittle": 24208, - "ittle": 21366, - "ittles": 38989, - "itton": 25707, - "itty": 35096, - "itu": 1668, - "itu": 32128, - "itude": 43382, - "itude": 5012, - "itudes": 20459, - "itunes": 7007, - "itup": 35838, - "iture": 25547, - "itus": 24364, - "itutes": 32883, - "itv": 20159, - "itv": 12805, - "ity": 2480, - "ity": 696, - "itya": 32055, - "itz": 14544, - "itz": 7807, - "iu": 14292, - "iu": 15575, - "ium": 10762, - "ius": 6740, - "iv": 6775, - "iv": 9315, - "iva": 42463, - "ivan": 15544, - "ivan": 15689, - "ivanka": 37914, - "ive": 26885, - "ive": 8653, - "ived": 15654, - "iver": 36849, - "iver": 44254, - "ives": 27333, - "ivf": 39159, - "iving": 45136, - "ivory": 16776, - "ivote": 45835, - "ivy": 36939, - "ivy": 16045, - "iw": 13058, - "iw": 46604, - "iwant": 42747, - "iwd": 16815, - "iwm": 44237, - "ix": 13272, - "ix": 8756, - "iy": 13704, - "iya": 18595, - "iyaki": 48395, - "iz": 2845, - "iz": 8407, - "iza": 37704, - "ization": 10847, - "ize": 10885, - "ized": 7690, - "izen": 34776, - "izer": 23895, - "izes": 45434, - "izing": 17354, - "izo": 46910, - "izz": 31779, - "izz": 46128, - "izzy": 28861, - "j": 73, - "j": 329, - "ja": 1586, - "ja": 2641, - "jaan": 25052, - "jab": 8059, - "jab": 9439, - "jac": 2293, - "jac": 30198, - "jace": 43286, - "jack": 2679, - "jack": 3267, - "jacked": 27923, - "jacket": 6164, - "jackets": 14745, - "jacki": 47418, - "jackie": 28023, - "jackie": 11716, - "jacking": 40929, - "jackman": 35723, - "jackpot": 23926, - "jacks": 19649, - "jackson": 12321, - "jackson": 4363, - "jacksonville": 19263, - "jaco": 6840, - "jacob": 14385, - "jacob": 9222, - "jacobs": 17482, - "jacobson": 46826, - "jacqu": 14495, - "jacqueline": 22843, - "jacques": 17799, - "jad": 12976, - "jad": 38691, - "jada": 37416, - "jade": 25123, - "jade": 14513, - "jaden": 37174, - "jadine": 37445, - "jae": 16869, - "jae": 15765, - "jaejoong": 43610, - "jaf": 19362, - "jag": 7984, - "jag": 36236, - "jagan": 48530, - "jagger": 30835, - "jags": 31086, - "jagu": 10096, - "jaguar": 44777, - "jaguar": 14757, - "jaguars": 21854, - "jah": 20067, - "jah": 11084, - "jahan": 44404, - "jahan": 47827, - "jai": 10542, - "jai": 13819, - "jail": 18574, - "jail": 9332, - "jailbreak": 45990, - "jailed": 19456, - "jails": 47833, - "jaime": 24716, - "jain": 21999, - "jaipur": 23593, - "jais": 48607, - "jait": 28910, - "jaitley": 32776, - "jak": 9225, - "jak": 30589, - "jakarta": 15471, - "jake": 13140, - "jake": 7419, - "jakob": 47358, - "jal": 8380, - "jal": 26773, - "jalan": 27270, - "jalap": 49081, - "jalape": 34263, - "jalapeño": 43017, - "jalen": 33548, - "jam": 1434, - "jam": 5201, - "jama": 8977, - "jama": 35366, - "jamaica": 13019, - "jamaican": 25144, - "jamal": 26108, - "jambo": 35599, - "jamboree": 38506, - "jame": 12341, - "james": 6963, - "james": 2392, - "jamesbond": 44704, - "jamesc": 47004, - "jameson": 31731, - "jami": 15092, - "jamie": 16454, - "jamie": 8078, - "jamiedor": 34310, - "jamiedornan": 34896, - "jammed": 35590, - "jammin": 35223, - "jamming": 25862, - "jammu": 25926, - "jams": 20243, - "jan": 1891, - "jan": 3334, - "jana": 18182, - "jane": 12389, - "jane": 6736, - "janeiro": 31740, - "janet": 29665, - "janet": 15872, - "jang": 41526, - "jang": 22074, - "jani": 22606, - "janice": 36048, - "janine": 46896, - "janis": 44233, - "jann": 35377, - "jans": 22578, - "jansen": 45354, - "janu": 3623, - "january": 3697, - "jap": 2299, - "jap": 49062, - "japan": 4502, - "japan": 3400, - "japanese": 27211, - "japanese": 4925, - "japs": 42121, - "jar": 5120, - "jar": 10837, - "jard": 25778, - "jardin": 37371, - "jare": 17654, - "jared": 35597, - "jared": 12571, - "jaredle": 36739, - "jaredleto": 37106, - "jaro": 35505, - "jarpad": 44497, - "jarre": 23385, - "jarrett": 30531, - "jars": 27583, - "jarvis": 29286, - "jas": 4492, - "jas": 17559, - "jasmin": 42989, - "jasmin": 47700, - "jasmine": 17056, - "jason": 10009, - "jason": 5395, - "jasper": 19827, - "jat": 26106, - "jau": 26932, - "jauregui": 48175, - "jav": 6234, - "java": 12918, - "javascri": 16289, - "javascript": 16423, - "jave": 46218, - "javed": 42268, - "javelin": 41701, - "javi": 47627, - "javier": 23307, - "jaw": 14804, - "jaw": 17307, - "jawa": 44790, - "jaws": 25491, - "jax": 22348, - "jax": 12390, - "jay": 3427, - "jay": 4155, - "jaya": 21960, - "jayanti": 37732, - "jaye": 45703, - "jayne": 35228, - "jays": 12393, - "jaz": 3465, - "jaz": 32874, - "jazeera": 38260, - "jazz": 11488, - "jazz": 4528, - "jazzfest": 36683, - "jazzy": 28191, - "jb": 21915, - "jb": 13637, - "jc": 14991, - "jc": 11517, - "jd": 18289, - "jd": 14125, - "jdm": 42013, - "je": 1013, - "je": 8776, - "jeal": 9964, - "jealous": 11093, - "jealousy": 37654, - "jean": 13943, - "jean": 6473, - "jeanette": 48167, - "jeanne": 29201, - "jeans": 10157, - "jeb": 35101, - "jec": 1347, - "ject": 6070, - "jed": 12166, - "jed": 38748, - "jeddah": 40982, - "jedi": 16681, - "jee": 29250, - "jee": 14870, - "jeep": 16593, - "jeep": 11286, - "jeeplife": 43100, - "jeet": 45542, - "jeet": 30944, - "jef": 10276, - "jeff": 6245, - "jeff": 5550, - "jefferson": 44711, - "jefferson": 13976, - "jeffery": 41470, - "jeffree": 45994, - "jeffrey": 32886, - "jeffrey": 16027, - "jeho": 42437, - "jeky": 43893, - "jekyll": 49405, - "jel": 9794, - "jelena": 48218, - "jelly": 19110, - "jelly": 13762, - "jellyfish": 30988, - "jem": 46326, - "jem": 37530, - "jen": 2554, - "jen": 12997, - "jenkins": 16162, - "jenn": 33921, - "jenn": 29869, - "jenna": 17125, - "jenner": 14260, - "jenni": 6774, - "jennie": 28875, - "jennifer": 19786, - "jennifer": 8613, - "jennings": 21564, - "jenny": 20165, - "jenny": 13414, - "jens": 40806, - "jensen": 35558, - "jensen": 19004, - "jensenackles": 41011, - "jeon": 45200, - "jeon": 43337, - "jeong": 47146, - "jeong": 39264, - "jeopar": 22988, - "jeopardy": 29613, - "jer": 2310, - "jer": 35307, - "jere": 5614, - "jeremi": 22362, - "jeremiah": 27301, - "jeremy": 14656, - "jeremy": 8127, - "jeremycorbyn": 37484, - "jeric": 25084, - "jericho": 28892, - "jerk": 23917, - "jerky": 40079, - "jermaine": 40722, - "jerome": 19876, - "jerry": 18163, - "jerry": 9164, - "jersey": 21921, - "jersey": 4471, - "jerseys": 15518, - "jerus": 12257, - "jerusalem": 12557, - "jes": 7686, - "jes": 35826, - "jess": 5313, - "jess": 13758, - "jesse": 23112, - "jesse": 11770, - "jessi": 24373, - "jessic": 14881, - "jessica": 45421, - "jessica": 8178, - "jessie": 19424, - "jester": 44225, - "jesu": 19777, - "jesuit": 33234, - "jesus": 4070, - "jet": 11515, - "jet": 6565, - "jetblue": 45021, - "jeter": 38450, - "jets": 38584, - "jets": 10025, - "jett": 44541, - "jetty": 46382, - "jew": 27450, - "jewel": 4880, - "jewel": 17591, - "jewell": 9777, - "jewellers": 46265, - "jewellery": 11192, - "jewelry": 28018, - "jewelry": 6039, - "jewels": 20205, - "jewish": 29594, - "jewish": 9104, - "jews": 14200, - "jf": 31130, - "jf": 33718, - "jfc": 43652, - "jfk": 18486, - "jg": 41986, - "jg": 35138, - "jh": 24858, - "jh": 21485, - "jha": 47012, - "jha": 38092, - "jhal": 45695, - "jhar": 31546, - "jharkhand": 39001, - "jhb": 34631, - "ji": 3252, - "ji": 2697, - "jia": 32907, - "jian": 33427, - "jiang": 43309, - "jiang": 25762, - "jic": 48350, - "jic": 40215, - "jid": 24403, - "jie": 40005, - "jig": 15136, - "jig": 47430, - "jigsaw": 32987, - "jiha": 23194, - "jihad": 29637, - "jihoon": 44765, - "jil": 36225, - "jill": 24136, - "jill": 15254, - "jillian": 37820, - "jim": 3190, - "jim": 4550, - "jima": 20679, - "jimcantore": 43950, - "jimenez": 35947, - "jimi": 30565, - "jimin": 16286, - "jimmie": 45679, - "jimmy": 12215, - "jimmy": 6817, - "jimmyfallon": 45265, - "jin": 7927, - "jin": 8485, - "jind": 40609, - "jing": 34933, - "jing": 28607, - "jingle": 28699, - "jinnah": 43141, - "jinping": 39308, - "jinx": 42977, - "jinyoung": 38051, - "jio": 40501, - "jis": 25988, - "jis": 23515, - "jisoo": 43070, - "jit": 11947, - "jit": 20308, - "jitsu": 24530, - "jiu": 43351, - "jiu": 44123, - "jj": 12502, - "jj": 12790, - "jk": 20189, - "jk": 9702, - "jkt": 21494, - "jl": 25027, - "jl": 22911, - "jlo": 31017, - "jm": 24044, - "jm": 18657, - "jn": 24576, - "jn": 21717, - "jnr": 37145, - "jnu": 47142, - "jo": 683, - "jo": 3804, - "joachim": 48979, - "joan": 28064, - "joan": 12710, - "joann": 35484, - "joanna": 25357, - "joanne": 43736, - "joanne": 25092, - "joao": 45666, - "joaqu": 25140, - "joaquin": 30745, - "job": 13114, - "job": 2075, - "jobs": 3735, - "jobsearch": 45459, - "joburg": 39343, - "jocel": 36879, - "jocelyn": 47259, - "jock": 34485, - "jockey": 20126, - "jodh": 48689, - "jodi": 36812, - "jodi": 26888, - "jodie": 33100, - "jody": 32959, - "joe": 9309, - "joe": 3305, - "joel": 19819, - "joel": 11429, - "joes": 34756, - "joey": 16281, - "joey": 10455, - "jog": 37967, - "jog": 31691, - "jogging": 37922, - "joh": 1201, - "johan": 17416, - "johan": 27789, - "johann": 31180, - "johanna": 41494, - "johannes": 37779, - "johannesburg": 28377, - "johansson": 41512, - "johar": 34871, - "john": 2004, - "john": 1742, - "johncena": 46820, - "johnnie": 47947, - "johnny": 14464, - "johnny": 6904, - "johns": 14515, - "johnson": 26036, - "johnson": 4010, - "johnston": 19791, - "johnstone": 40766, - "johor": 34750, - "join": 14737, - "join": 1384, - "joined": 4954, - "joining": 5118, - "joins": 5681, - "joint": 6640, - "jointhe": 30422, - "jointly": 37471, - "joints": 27204, - "jojo": 41484, - "jojo": 22075, - "joke": 7198, - "joker": 18200, - "jokers": 44101, - "jokes": 11336, - "joking": 26112, - "joko": 44975, - "jol": 9174, - "jol": 36470, - "jolie": 31633, - "jolla": 46109, - "jolly": 21516, - "jom": 32152, - "jon": 3026, - "jon": 6139, - "jona": 6629, - "jonah": 47934, - "jonah": 27556, - "jonas": 42373, - "jonas": 13650, - "jonathan": 19026, - "jonathan": 7762, - "jone": 33934, - "jones": 19091, - "jones": 3538, - "jong": 20214, - "jong": 14726, - "jonghyun": 29023, - "jongin": 36957, - "joni": 43177, - "jonny": 28454, - "jonny": 21895, - "joo": 25807, - "joo": 27680, - "joom": 47543, - "joon": 18547, - "joong": 26544, - "jop": 30486, - "joplin": 42688, - "jor": 2482, - "jor": 31595, - "jordan": 14644, - "jordan": 4388, - "jordani": 46898, - "jordi": 44795, - "jorge": 48761, - "jorge": 18225, - "jos": 20560, - "jos": 19661, - "jose": 4647, - "jose": 7075, - "josef": 36584, - "josel": 47800, - "joseph": 14163, - "joseph": 6478, - "josephine": 34866, - "josh": 9998, - "josh": 5679, - "joshi": 24786, - "joshu": 9112, - "joshua": 11852, - "josi": 33583, - "josie": 33167, - "joss": 42834, - "josé": 27922, - "jou": 19921, - "jou": 32029, - "jour": 2078, - "jour": 17142, - "journ": 4563, - "journal": 6626, - "journalism": 10123, - "journalist": 9914, - "journalists": 12249, - "journals": 24391, - "journe": 48833, - "journey": 32156, - "journey": 3749, - "journeys": 23329, - "journo": 37034, - "journos": 46437, - "jovi": 33866, - "joy": 6308, - "joy": 4273, - "joyce": 43753, - "joyce": 15275, - "joye": 34052, - "joyeux": 41876, - "joyful": 24139, - "joyous": 32245, - "joyride": 46949, - "joys": 22996, - "jp": 18249, - "jp": 10557, - "jpg": 36950, - "jpn": 36212, - "jr": 13973, - "jr": 3605, - "js": 46243, - "js": 8006, - "jst": 26523, - "jt": 39480, - "jt": 18119, - "ju": 669, - "ju": 9970, - "jual": 38720, - "juan": 17148, - "juan": 9274, - "juana": 9081, - "jubi": 15485, - "jubil": 47743, - "jubilee": 16907, - "juco": 31570, - "jud": 8363, - "juda": 32478, - "judah": 41066, - "judaism": 42217, - "judas": 39532, - "judd": 29770, - "judg": 20012, - "judge": 16824, - "judge": 5656, - "judged": 33453, - "judgement": 25246, - "judges": 12575, - "judging": 16570, - "judgment": 24191, - "judi": 42546, - "judice": 28032, - "judicial": 19579, - "judiciary": 24545, - "judith": 24047, - "judo": 27011, - "judy": 34663, - "judy": 16510, - "jug": 27619, - "jugg": 38628, - "juic": 38761, - "juice": 37954, - "juice": 6916, - "juices": 36757, - "juicy": 17623, - "juju": 43020, - "juke": 32519, - "jukebox": 36411, - "jul": 34662, - "jul": 15975, - "jule": 40819, - "jules": 21996, - "juli": 3614, - "juli": 49160, - "julia": 10207, - "julian": 25459, - "julian": 12643, - "juliana": 46059, - "julie": 22534, - "julie": 10505, - "julien": 32595, - "juliet": 20641, - "juliette": 44804, - "julio": 24888, - "julius": 20870, - "july": 2272, - "jum": 20791, - "jumbo": 24678, - "jume": 45989, - "jump": 5519, - "jump": 6423, - "jumped": 16901, - "jumper": 16558, - "jumpers": 36485, - "jumping": 11476, - "jumpman": 48803, - "jumps": 18911, - "jumpsuit": 31044, - "jun": 1637, - "jun": 7719, - "junction": 11320, - "june": 23188, - "june": 2345, - "jung": 13086, - "jung": 13031, - "jungkook": 20040, - "jungle": 42421, - "jungle": 10865, - "juni": 4029, - "junior": 21167, - "junior": 5027, - "juniors": 16811, - "juniper": 33829, - "junk": 16000, - "junkie": 27613, - "junkies": 41207, - "juno": 28845, - "junto": 34282, - "jupit": 15270, - "jupiter": 16212, - "jur": 15896, - "jura": 14715, - "jurassic": 28844, - "jurassic": 21255, - "jurgen": 39263, - "juris": 37010, - "jurisdic": 37714, - "jury": 12931, - "jus": 14999, - "just": 1770, - "just": 761, - "justi": 14700, - "justic": 30399, - "justice": 16904, - "justice": 3604, - "justicefor": 25812, - "justiceleague": 41929, - "justices": 44356, - "justified": 34546, - "justify": 28192, - "justin": 7537, - "justin": 4394, - "justinbieber": 12501, - "justine": 34418, - "justintrudeau": 32184, - "justsaying": 42922, - "juve": 47717, - "juve": 23092, - "juven": 12944, - "juvenile": 19333, - "juvent": 13908, - "juventus": 47378, - "juventus": 16208, - "jux": 33552, - "juxta": 34964, - "jv": 37932, - "jv": 11805, - "jw": 30221, - "jw": 24215, - "jy": 20979, - "jyo": 27378, - "jyoti": 48696, - "jä": 45381, - "k": 74, - "k": 330, - "ka": 1595, - "ka": 1525, - "kaa": 34496, - "kab": 6554, - "kab": 45134, - "kabaddi": 41749, - "kabir": 38619, - "kabo": 47974, - "kabul": 26160, - "kac": 21693, - "kach": 14341, - "kad": 10901, - "kade": 41130, - "kaduna": 38053, - "kae": 22542, - "kaeper": 30070, - "kaepernick": 30713, - "kaf": 19870, - "kag": 13666, - "kag": 31003, - "kah": 16068, - "kah": 15463, - "kahn": 35397, - "kai": 12752, - "kai": 9601, - "kaido": 40255, - "kail": 23623, - "kaine": 39028, - "kair": 33027, - "kaiser": 43685, - "kaiser": 29960, - "kait": 19326, - "kaitlyn": 34948, - "kaj": 44788, - "kaj": 40381, - "kak": 10401, - "kak": 40128, - "kaka": 47689, - "kaku": 30900, - "kal": 4187, - "kal": 18712, - "kala": 45453, - "kala": 33105, - "kalam": 40142, - "kalamaz": 42328, - "kalamazoo": 46264, - "kalb": 34483, - "kale": 17162, - "kale": 16625, - "kaleido": 41144, - "kali": 17844, - "kali": 26964, - "kalin": 42776, - "kalyan": 23825, - "kam": 4104, - "kam": 26011, - "kamal": 31371, - "kamal": 28619, - "kamala": 45003, - "kame": 45235, - "kamen": 40738, - "kami": 28707, - "kamloops": 36602, - "kamp": 35179, - "kamp": 29522, - "kampala": 37134, - "kan": 2532, - "kan": 8101, - "kana": 35178, - "kand": 17478, - "kane": 32218, - "kane": 9765, - "kang": 12226, - "kang": 20789, - "kangar": 20622, - "kangaroo": 25513, - "kani": 40907, - "kani": 41948, - "kann": 18533, - "kannada": 30053, - "kano": 28201, - "kans": 34012, - "kansas": 25507, - "kansas": 6539, - "kansascity": 46134, - "kant": 39923, - "kant": 47132, - "kanth": 24427, - "kanu": 44565, - "kany": 13590, - "kanye": 29680, - "kanye": 14965, - "kanyewest": 31943, - "kap": 6804, - "kap": 45279, - "kapam": 48561, - "kapil": 32337, - "kapil": 42709, - "kapilshar": 48978, - "kaplan": 37401, - "kapoor": 9117, - "kapp": 36717, - "kappa": 20239, - "kapur": 42371, - "kar": 1813, - "kar": 5933, - "kara": 12552, - "karab": 40916, - "karachi": 13671, - "karak": 40372, - "karan": 20077, - "karan": 20931, - "karanjohar": 47621, - "karao": 16262, - "karaoke": 16640, - "karate": 21211, - "kardashi": 13619, - "kardashian": 14578, - "kare": 14310, - "kare": 38354, - "kareem": 38885, - "kareena": 41569, - "karen": 17719, - "karen": 10349, - "kari": 15339, - "kari": 15161, - "karim": 33477, - "karin": 43917, - "karina": 40250, - "karl": 20967, - "karl": 13134, - "karla": 42309, - "karma": 17658, - "karnat": 13994, - "karnataka": 15515, - "karo": 45305, - "kart": 47841, - "kart": 21310, - "karthik": 41397, - "karti": 23053, - "kartikeyan": 32584, - "karting": 41655, - "kas": 6119, - "kas": 14372, - "kasa": 46111, - "kash": 6954, - "kash": 21371, - "kashi": 47945, - "kashmir": 20251, - "kashmir": 10783, - "kashmiri": 35331, - "kasi": 45870, - "kasi": 32819, - "kasich": 39666, - "kat": 2844, - "kat": 9341, - "kata": 14558, - "kate": 11620, - "kate": 6699, - "katelyn": 45963, - "kath": 7386, - "kath": 19745, - "katharine": 41473, - "katherine": 17687, - "kathle": 18721, - "kathleen": 21709, - "kathmandu": 34456, - "kathniel": 36159, - "kathr": 14905, - "kathryn": 33142, - "kathryn": 19999, - "kathy": 34775, - "kathy": 18795, - "kati": 6515, - "kati": 29928, - "katic": 48058, - "katie": 24117, - "katie": 9076, - "katniss": 47916, - "kato": 27573, - "katrin": 31282, - "katrina": 21397, - "katrinakaif": 45845, - "kats": 44213, - "katsu": 49296, - "katsu": 43712, - "katy": 17609, - "katy": 14435, - "katyperry": 28309, - "katz": 30790, - "kau": 9299, - "kau": 36895, - "kauai": 44050, - "kaufman": 37188, - "kaur": 30518, - "kav": 10228, - "kavan": 18576, - "kavanaugh": 20252, - "kaw": 10842, - "kaw": 42719, - "kawa": 33244, - "kawaii": 26891, - "kawasaki": 28227, - "kawhi": 41220, - "kay": 4673, - "kay": 9862, - "kaya": 22752, - "kayak": 27043, - "kayaking": 28977, - "kaye": 33003, - "kayla": 17139, - "kaylee": 47215, - "kayo": 37021, - "kaz": 8812, - "kaz": 39622, - "kazakh": 25451, - "kazakhstan": 26720, - "kazan": 47641, - "kb": 27381, - "kb": 19960, - "kbs": 27418, - "kc": 10869, - "kc": 8638, - "kca": 14347, - "kcon": 39970, - "kcr": 46181, - "kd": 21826, - "kd": 15597, - "kday": 31074, - "kdrama": 48628, - "ke": 643, - "ke": 618, - "kea": 47926, - "kean": 43288, - "keane": 28635, - "keanu": 40608, - "kear": 21562, - "kearney": 36435, - "keating": 40045, - "keaton": 29975, - "kebab": 36497, - "ked": 11730, - "ked": 1243, - "kee": 9724, - "kee": 6760, - "keef": 42323, - "keefe": 46965, - "keegan": 31122, - "keel": 48376, - "keen": 17714, - "keen": 13218, - "keenan": 36276, - "keep": 2924, - "keep": 1726, - "keeper": 7650, - "keepers": 16130, - "keepin": 41712, - "keeping": 38371, - "keeping": 4873, - "keepit": 28044, - "keeps": 6333, - "keer": 27412, - "keerth": 47500, - "keerthyofficial": 48185, - "kees": 10791, - "keg": 32785, - "keh": 41272, - "keh": 36983, - "kei": 18735, - "kei": 24835, - "keith": 18762, - "keith": 8252, - "kej": 15674, - "kejri": 16617, - "kejriwal": 17334, - "keke": 39195, - "kel": 2825, - "kel": 7553, - "kele": 41765, - "kell": 16082, - "kell": 40103, - "keller": 21407, - "kelley": 23776, - "kelli": 45852, - "kelli": 46190, - "kellie": 49224, - "kellogg": 44218, - "kelly": 13417, - "kelly": 5220, - "kelown": 31708, - "kelowna": 32963, - "kelsey": 42295, - "kelsey": 23018, - "kelvin": 32859, - "kem": 31013, - "kem": 17349, - "kemp": 18302, - "kemp": 25325, - "ken": 1838, - "ken": 1702, - "kend": 7497, - "kendal": 44836, - "kendall": 34607, - "kendall": 16238, - "kendra": 36074, - "kendrick": 41787, - "kendrick": 21953, - "kendricklamar": 47020, - "kenne": 6209, - "kennedy": 38631, - "kennedy": 9004, - "kennel": 39595, - "kenneth": 46900, - "kenneth": 17839, - "kenney": 41373, - "kenny": 20185, - "kenny": 9595, - "kens": 29765, - "kensing": 21505, - "kensington": 24988, - "kent": 13875, - "kent": 8214, - "kentu": 9045, - "kentucky": 32230, - "kentucky": 10014, - "keny": 17374, - "kenya": 6181, - "kenyan": 22624, - "kenyans": 36263, - "kenyatta": 31012, - "kenzie": 38087, - "keo": 43062, - "kept": 7737, - "ker": 2352, - "ker": 1485, - "keral": 35122, - "kerala": 11881, - "kered": 26690, - "kerel": 32232, - "keri": 43447, - "kermit": 40908, - "kern": 40150, - "kernel": 40684, - "kerr": 20491, - "kerri": 41849, - "kerry": 24795, - "kerry": 13097, - "kers": 30347, - "kers": 2880, - "kershaw": 40785, - "kerson": 42810, - "kerswednesday": 48152, - "kert": 47279, - "kes": 38398, - "kes": 1115, - "kesh": 19751, - "kesha": 36526, - "kest": 15080, - "ket": 2715, - "ket": 1236, - "ketball": 38240, - "ketch": 22590, - "ketch": 35371, - "ketchup": 26724, - "kete": 25404, - "keted": 41396, - "keting": 15951, - "keto": 27485, - "keto": 28754, - "kets": 1632, - "kett": 23124, - "kett": 10312, - "kettering": 43779, - "kettle": 41992, - "kettle": 24303, - "kev": 22758, - "kev": 29419, - "kevin": 9419, - "kevin": 4685, - "kew": 38014, - "kew": 31409, - "kex": 30251, - "key": 2891, - "key": 1458, - "keyan": 27617, - "keyboard": 13017, - "keyboards": 49237, - "keychain": 31050, - "keye": 40516, - "keye": 20635, - "keyes": 18336, - "keynes": 32462, - "keynote": 7556, - "keys": 48912, - "keys": 6355, - "keystone": 30688, - "keyword": 42284, - "keywords": 48122, - "kf": 33308, - "kf": 42119, - "kfc": 22032, - "kg": 36772, - "kg": 7817, - "kgs": 46629, - "kh": 2166, - "kh": 7452, - "kha": 7333, - "kha": 18929, - "khair": 43742, - "khaki": 41646, - "khal": 13070, - "khaled": 29343, - "khali": 11324, - "khalid": 27166, - "khalifa": 21389, - "khalil": 36229, - "kham": 24892, - "khan": 13318, - "khan": 3873, - "khand": 43384, - "khand": 31110, - "khanna": 29931, - "khar": 18340, - "khar": 28578, - "khart": 37458, - "khat": 43290, - "khe": 26360, - "kher": 43843, - "khi": 39062, - "khi": 42925, - "khil": 34101, - "khloe": 45312, - "kho": 14022, - "kho": 28774, - "khou": 30656, - "khs": 21239, - "khtar": 45593, - "khu": 14041, - "khur": 32083, - "khy": 40917, - "khz": 45604, - "ki": 848, - "ki": 2608, - "kia": 8712, - "kian": 43961, - "kian": 25708, - "kians": 44010, - "kib": 43108, - "kiba": 37207, - "kic": 24003, - "kic": 27633, - "kicchasu": 44665, - "kicchasudeep": 45560, - "kick": 4102, - "kick": 4289, - "kickass": 39299, - "kickboxing": 36041, - "kicked": 12479, - "kicker": 26338, - "kickin": 34597, - "kicking": 7802, - "kickoff": 10245, - "kicks": 6989, - "kickstart": 40780, - "kickstarter": 13228, - "kid": 3948, - "kid": 3551, - "kidd": 24082, - "kidding": 14535, - "kiddo": 36360, - "kiddos": 29205, - "kidlit": 39064, - "kidlit": 33515, - "kidlitart": 41600, - "kidman": 44931, - "kidnap": 45100, - "kidnapp": 16183, - "kidnapped": 24737, - "kidnapping": 32361, - "kidney": 37835, - "kidney": 14610, - "kids": 15561, - "kids": 1911, - "kidz": 41938, - "kie": 8544, - "kie": 3094, - "kiefer": 48026, - "kiel": 40940, - "kiel": 25509, - "kien": 28782, - "kier": 20403, - "kier": 35575, - "kieran": 29231, - "kies": 36601, - "kies": 4993, - "kiest": 29755, - "kiev": 24585, - "kiewicz": 47574, - "kigali": 40278, - "kii": 39340, - "kik": 36176, - "kiki": 23962, - "kiko": 40861, - "kil": 4912, - "kil": 39337, - "kildare": 45541, - "kili": 24386, - "kilig": 49172, - "kilimanjaro": 43470, - "kilkenny": 33805, - "kill": 6163, - "kill": 4367, - "killa": 41355, - "killarney": 48813, - "killed": 3733, - "killer": 28230, - "killer": 6613, - "killers": 17614, - "killin": 25903, - "killing": 37977, - "killing": 5923, - "killings": 24918, - "kills": 9795, - "kiln": 44150, - "kilo": 39281, - "kilom": 26285, - "kilometers": 39192, - "kilometres": 43278, - "kilt": 49319, - "kim": 4639, - "kim": 4606, - "kimber": 16796, - "kimberley": 39859, - "kimberly": 27465, - "kimchi": 41027, - "kimi": 31536, - "kimkardashian": 35400, - "kimmel": 27820, - "kimono": 40024, - "kin": 1442, - "kin": 2667, - "kina": 28518, - "kind": 7204, - "kind": 3044, - "kinda": 6612, - "kinder": 12711, - "kinder": 24159, - "kindergarten": 16749, - "kindle": 24704, - "kindle": 10746, - "kindleunlimited": 32164, - "kindly": 13952, - "kindness": 45112, - "kindness": 10614, - "kinds": 14879, - "kine": 17607, - "kineni": 49080, - "kinetic": 37699, - "king": 2365, - "king": 674, - "kingdom": 21870, - "kingdom": 7364, - "kingdomhearts": 48570, - "kingdoms": 43890, - "kingfisher": 34330, - "kingjames": 33153, - "kingly": 33642, - "kingof": 27878, - "kings": 18590, - "kings": 4232, - "kingsley": 41807, - "kingston": 40736, - "kingston": 15393, - "kini": 41644, - "kinky": 37006, - "kinney": 37233, - "kino": 39000, - "kins": 31060, - "kins": 4386, - "kinson": 12095, - "kio": 28210, - "kio": 39401, - "kiosk": 39146, - "kip": 27636, - "kip": 15986, - "kipp": 43329, - "kir": 3476, - "kir": 32949, - "kira": 33038, - "kiran": 43234, - "kiran": 36603, - "kirby": 17065, - "kiri": 34170, - "kiri": 45826, - "kirk": 10639, - "kirk": 11508, - "kirkland": 43061, - "kiro": 39749, - "kirstel": 46483, - "kirsten": 31813, - "kirsty": 37787, - "kis": 3199, - "kis": 22796, - "kish": 25662, - "kiss": 43757, - "kiss": 5946, - "kissed": 22561, - "kisses": 47876, - "kisses": 11220, - "kissing": 18637, - "kistan": 29580, - "kit": 4566, - "kit": 4274, - "kita": 29961, - "kitch": 3850, - "kitchen": 18131, - "kitchen": 4485, - "kitchener": 34428, - "kitchens": 28301, - "kite": 47777, - "kite": 19867, - "kites": 45829, - "kits": 13730, - "kitt": 10840, - "kitten": 13063, - "kittens": 17216, - "kitties": 36013, - "kitty": 25067, - "kitty": 8417, - "kiwan": 38709, - "kiwanis": 46513, - "kiwi": 22440, - "kiwis": 48108, - "kiya": 41610, - "kj": 27385, - "kj": 28238, - "kja": 41048, - "kjv": 37387, - "kk": 4390, - "kk": 10849, - "kka": 19002, - "kke": 44239, - "kker": 32399, - "kki": 44672, - "kkk": 20073, - "kkkk": 15834, - "kkkk": 47160, - "kkkkkkkk": 31042, - "kko": 43965, - "kkr": 40855, - "kl": 8498, - "kl": 14134, - "kla": 11249, - "klan": 46935, - "klar": 41374, - "klaus": 31788, - "kle": 7612, - "kle": 7432, - "klein": 33475, - "klein": 17579, - "kley": 18594, - "kli": 31640, - "klin": 44809, - "klin": 41647, - "kline": 47580, - "kling": 40270, - "klm": 38859, - "klo": 15296, - "klopp": 26446, - "kltu": 25978, - "klu": 21852, - "kly": 45090, - "km": 29954, - "km": 4590, - "kman": 33312, - "kms": 24996, - "kn": 4825, - "kn": 23693, - "knapp": 33945, - "kne": 6358, - "knee": 9897, - "knees": 19115, - "kner": 31578, - "knew": 5009, - "kni": 6312, - "knick": 33286, - "knicks": 17657, - "knife": 44176, - "knife": 8960, - "knigh": 43099, - "knight": 17949, - "knight": 7355, - "knights": 10385, - "knit": 18745, - "knit": 14313, - "knitted": 28151, - "knitting": 18863, - "knives": 20910, - "kno": 1482, - "kno": 25362, - "knob": 29736, - "knobs": 47504, - "knock": 14195, - "knock": 11583, - "knocked": 15325, - "knocking": 20380, - "knockout": 22602, - "knocks": 24296, - "knoll": 43882, - "knot": 18412, - "knots": 32428, - "know": 4179, - "know": 1038, - "knowing": 9267, - "knowledge": 27864, - "knowledge": 5510, - "knowledgeable": 43391, - "knowles": 32631, - "known": 3102, - "knows": 4309, - "knowyour": 30773, - "knox": 18630, - "knox": 21833, - "knoxville": 23232, - "knu": 14812, - "knuck": 21333, - "knuckle": 42023, - "knuckles": 40127, - "knw": 40803, - "ko": 1313, - "ko": 2448, - "koala": 36654, - "kobe": 42644, - "kobe": 14470, - "kobo": 42390, - "koch": 25331, - "kochi": 36710, - "kodak": 30425, - "kodi": 46611, - "kof": 17528, - "koff": 47303, - "kofi": 40400, - "koh": 13379, - "koh": 31216, - "kohl": 48479, - "kohli": 17549, - "koi": 28150, - "kojima": 46419, - "kok": 32045, - "kok": 11225, - "koko": 42426, - "koko": 40003, - "kol": 7142, - "kol": 31023, - "kolkata": 18011, - "kom": 6686, - "kom": 24181, - "kombat": 29670, - "kombucha": 48615, - "komo": 31820, - "kon": 5743, - "kon": 29519, - "kona": 30203, - "kong": 31784, - "kong": 6506, - "konstant": 46583, - "koo": 12225, - "koo": 40472, - "kook": 16003, - "kool": 36755, - "kool": 26444, - "kop": 16623, - "kop": 38999, - "kor": 6428, - "kor": 24175, - "kore": 3919, - "korea": 5915, - "korean": 31949, - "korean": 8034, - "kori": 42842, - "korn": 45412, - "korn": 31492, - "kors": 34535, - "kos": 47438, - "kos": 22951, - "kosh": 45233, - "kosher": 36502, - "koso": 23892, - "kosovo": 28343, - "kot": 23323, - "kot": 20701, - "kota": 21735, - "koto": 40945, - "koto": 29977, - "kou": 18502, - "kou": 39614, - "kour": 34134, - "kov": 17733, - "kov": 15156, - "kova": 26185, - "koval": 47903, - "kovic": 16886, - "kovich": 44794, - "kovsky": 33384, - "kow": 29764, - "kow": 23919, - "kowski": 17649, - "koz": 29598, - "kp": 16174, - "kp": 16894, - "kpa": 38759, - "kph": 41138, - "kpk": 42094, - "kpmg": 38243, - "kpop": 29534, - "kpop": 15859, - "kprc": 47832, - "kprs": 46253, - "kr": 7309, - "kr": 14107, - "kra": 5762, - "kraft": 28057, - "kraja": 29016, - "kraken": 48408, - "krakow": 40033, - "kram": 19075, - "kramer": 27495, - "kran": 33243, - "kranti": 47969, - "krat": 30470, - "kre": 8362, - "kreme": 43140, - "kremlin": 33979, - "kri": 3679, - "kris": 35251, - "kris": 12261, - "krish": 11487, - "krishna": 15863, - "krishnan": 46535, - "krispy": 49292, - "krist": 16490, - "kristen": 28881, - "kristen": 16644, - "kristi": 26895, - "kristin": 35408, - "kristin": 26785, - "kristina": 33180, - "krit": 36265, - "kro": 16193, - "kroger": 36344, - "kron": 25999, - "kru": 10609, - "kruger": 32948, - "krun": 43084, - "kry": 13995, - "krystal": 36554, - "ks": 10470, - "ks": 662, - "ksa": 25439, - "ksh": 36594, - "kst": 17420, - "kstate": 48590, - "ksu": 43496, - "kswx": 36180, - "kt": 17238, - "kt": 7792, - "ktm": 33989, - "ktn": 42170, - "kton": 37848, - "kts": 48577, - "ktv": 36444, - "ku": 1836, - "ku": 4827, - "kuala": 30336, - "kubball": 48995, - "kuber": 41336, - "kubernetes": 45144, - "kubrick": 37032, - "kuch": 39394, - "kud": 40818, - "kudos": 14481, - "kul": 11325, - "kul": 31514, - "kum": 18086, - "kum": 28148, - "kuma": 43139, - "kuma": 33920, - "kumar": 22329, - "kumar": 7674, - "kumb": 31391, - "kun": 6849, - "kun": 21842, - "kung": 39656, - "kung": 22347, - "kunst": 37881, - "kup": 39023, - "kups": 27240, - "kur": 4862, - "kurdi": 23504, - "kurdish": 21644, - "kurdistan": 24459, - "kurds": 20888, - "kuri": 46375, - "kuro": 28239, - "kuro": 47826, - "kurt": 31903, - "kurt": 14527, - "kus": 27618, - "kus": 27505, - "kush": 22264, - "kush": 24594, - "kushner": 36716, - "kut": 17283, - "kut": 36965, - "kuwait": 19679, - "kuya": 34815, - "kuz": 33253, - "kv": 27594, - "kv": 34249, - "kw": 10072, - "kw": 18339, - "kwa": 32784, - "kwa": 48576, - "kwame": 46681, - "kwan": 37100, - "kwan": 39447, - "kwang": 40260, - "kwe": 26050, - "kwi": 35327, - "kwon": 36369, - "kx": 28190, - "kx": 46442, - "ky": 2018, - "ky": 2383, - "kya": 29142, - "kyc": 37758, - "kyiv": 36422, - "kyle": 15847, - "kyle": 7539, - "kylie": 28282, - "kylie": 17983, - "kyliejenner": 47232, - "kylo": 47704, - "kyo": 13150, - "kyo": 6281, - "kyoto": 23223, - "kyr": 26329, - "kyrgy": 40013, - "kyrgyz": 48346, - "kyrie": 21857, - "kyu": 28296, - "kyu": 25490, - "kyuhyun": 37229, - "kyung": 41058, - "kyungsoo": 30280, - "kywx": 39940, - "kz": 48743, - "kz": 36848, - "kzn": 38264, - "kö": 32437, - "l": 75, - "l": 331, - "la": 572, - "la": 1210, - "laa": 44642, - "lab": 3537, - "lab": 4352, - "labe": 25749, - "label": 12235, - "label": 9093, - "labeled": 32720, - "labeling": 36825, - "labelled": 45188, - "labels": 17413, - "lable": 31879, - "labor": 11201, - "labor": 7878, - "laboratories": 43421, - "laboratory": 17664, - "laborday": 39324, - "labou": 32700, - "labour": 19586, - "labour": 6019, - "labourdoorstep": 37008, - "labout": 35961, - "labra": 37067, - "labrador": 25409, - "labs": 12021, - "laby": 29131, - "labyrin": 31782, - "labyrinth": 35594, - "lac": 4477, - "lac": 16189, - "lace": 30012, - "lace": 5421, - "laced": 36800, - "laces": 23281, - "lacey": 31754, - "lach": 30558, - "lack": 24915, - "lack": 8069, - "lacking": 30080, - "lacks": 34388, - "laco": 45882, - "lacrosse": 12915, - "lacy": 38645, - "lad": 15991, - "lad": 10707, - "ladak": 42312, - "ladakh": 45295, - "ladder": 16637, - "ladders": 47125, - "lade": 26447, - "laden": 28634, - "ladi": 12934, - "ladies": 28932, - "ladies": 3431, - "lads": 9803, - "lady": 7275, - "lady": 2909, - "ladybird": 43389, - "ladybug": 40038, - "ladygaga": 21232, - "laf": 47555, - "lafayette": 22683, - "lag": 30932, - "lag": 20394, - "laga": 30161, - "lage": 24369, - "lager": 36811, - "lager": 22989, - "lagh": 37237, - "laghate": 47565, - "laghateparth": 48780, - "lagi": 39786, - "lago": 42698, - "lago": 31476, - "lagoon": 22753, - "lagos": 12728, - "lagun": 18500, - "laguna": 23609, - "lah": 27315, - "lah": 4299, - "lahat": 42164, - "lahore": 16733, - "lai": 23947, - "laid": 42560, - "laid": 11160, - "lain": 46958, - "lain": 17151, - "laine": 35860, - "lair": 31981, - "lais": 34923, - "lak": 12890, - "lak": 26793, - "lake": 6441, - "lake": 2553, - "lakedistrict": 26437, - "lakel": 26133, - "lakeland": 34306, - "laker": 45717, - "lakers": 13570, - "lakes": 9265, - "lakeshore": 42595, - "lakeside": 30915, - "lakewood": 36417, - "lakh": 21487, - "lakhs": 37985, - "lakings": 34289, - "lakota": 45510, - "laksh": 24937, - "lakshmi": 39682, - "lal": 12301, - "lal": 19430, - "lala": 33661, - "lali": 21726, - "laliga": 32383, - "lam": 2022, - "lam": 5704, - "lama": 26049, - "lamar": 28678, - "lamar": 17284, - "lamb": 19863, - "lamb": 10034, - "lambda": 36687, - "lambert": 14574, - "lambeth": 43410, - "lambo": 45464, - "lamborgh": 18709, - "lamborghini": 19462, - "lambs": 30361, - "lame": 23192, - "lamin": 22337, - "laminated": 49079, - "lamo": 41461, - "lamont": 46719, - "lamp": 26700, - "lamp": 10725, - "lampard": 39989, - "lamps": 23424, - "lan": 1193, - "lan": 4872, - "lana": 15406, - "lanapar": 47437, - "lanaparrilla": 47819, - "lanc": 11872, - "lanca": 15694, - "lancashire": 20939, - "lancaster": 16446, - "lance": 26025, - "lance": 11609, - "lancer": 38195, - "lancers": 46392, - "lancia": 48698, - "lancs": 47540, - "land": 1567, - "land": 973, - "lande": 36556, - "landed": 9873, - "lander": 37247, - "lander": 9666, - "landers": 20019, - "landfall": 38465, - "landfill": 34947, - "landia": 41384, - "landing": 8292, - "landings": 46104, - "landlord": 28938, - "landlords": 35283, - "landmark": 15208, - "landmarks": 30393, - "lando": 25463, - "lando": 7065, - "landon": 32748, - "landrover": 38125, - "landry": 36137, - "lands": 40223, - "lands": 2961, - "landsc": 4384, - "landscape": 21123, - "landscape": 5727, - "landscapephotography": 28125, - "landscapes": 15344, - "landscaping": 25642, - "landslide": 31954, - "lane": 25534, - "lane": 3980, - "lanes": 10345, - "laney": 38552, - "lang": 7969, - "lang": 8578, - "lange": 32021, - "langford": 45615, - "langley": 28595, - "langu": 4095, - "language": 46103, - "language": 4781, - "languages": 13527, - "lani": 22964, - "lanka": 16221, - "lankan": 40531, - "lannister": 49056, - "lans": 43550, - "lansing": 30805, - "lant": 44504, - "lanta": 44768, - "lantern": 17185, - "lanterns": 33676, - "lantic": 32601, - "lantic": 27678, - "lants": 38425, - "lanyard": 46808, - "lao": 32475, - "lao": 29521, - "laos": 34353, - "lap": 7213, - "lap": 8639, - "lapd": 32557, - "lapel": 47961, - "lapland": 43633, - "laps": 18711, - "lapse": 33365, - "laptop": 10464, - "laptops": 32189, - "laq": 45026, - "lar": 1592, - "lar": 1652, - "lara": 19435, - "lard": 40347, - "lare": 22415, - "laredo": 48427, - "large": 40234, - "large": 3638, - "largely": 21418, - "larger": 12567, - "largest": 4960, - "largo": 44161, - "lari": 34676, - "lark": 43164, - "lark": 23536, - "larkin": 34769, - "larry": 18642, - "larry": 8242, - "lars": 8669, - "larsen": 39721, - "larson": 27973, - "larvae": 44840, - "las": 8295, - "las": 2552, - "lasag": 31210, - "lasagna": 40683, - "lasalle": 43866, - "laser": 25607, - "laser": 9885, - "lasers": 37060, - "lash": 31995, - "lash": 18480, - "lashes": 21015, - "lass": 24203, - "lass": 18263, - "lassic": 39430, - "last": 10600, - "last": 952, - "lasted": 25711, - "lasting": 13434, - "lastnight": 30159, - "lasts": 20141, - "lasvegas": 17789, - "lat": 1591, - "lat": 28437, - "lata": 47114, - "latam": 40012, - "late": 13267, - "late": 2325, - "latel": 49035, - "lately": 11824, - "latepost": 48328, - "later": 24109, - "later": 2941, - "lateral": 26646, - "latest": 46805, - "latest": 2053, - "latex": 27520, - "lati": 16357, - "latimes": 43356, - "latin": 16695, - "latin": 9888, - "latina": 27936, - "latino": 45734, - "latino": 19470, - "latinos": 40233, - "lation": 6191, - "latitude": 37392, - "lative": 15719, - "lator": 9291, - "lators": 28278, - "latt": 33561, - "latte": 17697, - "latter": 26198, - "latvia": 30034, - "lau": 1853, - "lau": 23090, - "lauderdale": 24352, - "laugh": 4969, - "laugh": 6332, - "laughed": 16746, - "laughing": 8301, - "laughs": 14322, - "laughter": 10722, - "laun": 2944, - "launch": 31168, - "launch": 2904, - "launched": 6125, - "launcher": 35782, - "launches": 7023, - "launching": 8565, - "laundering": 34079, - "laundry": 14797, - "laur": 15256, - "laura": 17091, - "laura": 7763, - "laure": 16932, - "laureate": 25675, - "laurel": 43370, - "laurel": 19942, - "lauren": 10456, - "lauren": 7634, - "laurence": 29353, - "laurent": 23226, - "laurie": 20326, - "laus": 38895, - "laus": 28111, - "lause": 22269, - "laut": 47688, - "lav": 13767, - "lav": 26919, - "lava": 16765, - "laven": 15047, - "lavender": 16033, - "laver": 28188, - "lavish": 35443, - "law": 2874, - "law": 2606, - "lawful": 33845, - "lawler": 47862, - "lawless": 39468, - "lawmaker": 37169, - "lawmakers": 21190, - "lawn": 31675, - "lawn": 11024, - "lawrence": 32221, - "lawrence": 8820, - "laws": 7306, - "lawson": 22152, - "lawsuit": 14346, - "lawsuits": 44331, - "lawyer": 10552, - "lawyers": 14232, - "lax": 17750, - "lax": 10024, - "lay": 7205, - "lay": 6360, - "laye": 25995, - "layer": 12411, - "layered": 28520, - "layers": 15900, - "laying": 12333, - "layla": 45050, - "layne": 48721, - "layo": 21738, - "layoffs": 29019, - "layout": 17314, - "lays": 19546, - "layton": 38061, - "laz": 18806, - "lazar": 33075, - "lazarus": 49126, - "laze": 41559, - "lazer": 43735, - "lazio": 33010, - "lazy": 32614, - "lazy": 10753, - "lb": 21958, - "lb": 7422, - "lbc": 37694, - "lbj": 45683, - "lbloggers": 48695, - "lbs": 8912, - "lc": 9584, - "lc": 7225, - "lcd": 21356, - "lcfc": 25339, - "lcs": 32279, - "ld": 1431, - "ld": 730, - "lder": 6945, - "lders": 43221, - "ldn": 37050, - "ldn": 2517, - "ldnont": 25827, - "ldnt": 21690, - "ldr": 37279, - "lds": 31235, - "le": 534, - "le": 579, - "lea": 2246, - "lea": 13324, - "leach": 35527, - "lead": 1328, - "lead": 2784, - "leader": 14806, - "leader": 3236, - "leaderboard": 34519, - "leaders": 3546, - "leadership": 36876, - "leadership": 3652, - "leading": 3833, - "leads": 5335, - "leaf": 9377, - "leaf": 7232, - "leaflet": 38289, - "leaflets": 39014, - "leafs": 16688, - "leafy": 42616, - "leagu": 13317, - "league": 16635, - "league": 2313, - "leagueof": 26022, - "leagueoflegends": 31737, - "leagues": 19888, - "leah": 24350, - "leah": 19308, - "leak": 42900, - "leak": 15489, - "leaked": 14353, - "leaking": 34097, - "leaks": 15657, - "leam": 39606, - "lean": 12447, - "lean": 8208, - "leaning": 24411, - "leanne": 41448, - "leans": 9357, - "leap": 29129, - "leap": 15392, - "leaps": 48080, - "lear": 1146, - "lear": 27663, - "learn": 16959, - "learn": 1768, - "learned": 6048, - "learnenglish": 49040, - "learner": 33547, - "learners": 19572, - "learning": 22632, - "learning": 2378, - "learns": 17569, - "learnt": 18959, - "leary": 36051, - "lease": 49041, - "lease": 14394, - "leased": 48352, - "leash": 36192, - "leasing": 29160, - "least": 3651, - "leather": 21417, - "leather": 5862, - "leau": 26498, - "leav": 3198, - "leave": 37512, - "leave": 3258, - "leaves": 5579, - "leaving": 5216, - "leban": 9360, - "lebanese": 23819, - "lebanon": 11695, - "leblanc": 46381, - "lebo": 44184, - "lebron": 11971, - "lebu": 47030, - "lec": 944, - "lec": 35374, - "leche": 46197, - "lect": 45392, - "lection": 18252, - "lections": 30995, - "lecture": 6617, - "lecturer": 23795, - "lectures": 21118, - "led": 8767, - "led": 912, - "ledge": 23647, - "ledge": 4815, - "ledger": 26817, - "leds": 36763, - "lee": 6224, - "lee": 2592, - "leed": 16483, - "leed": 40206, - "leeds": 38900, - "leeds": 7420, - "leek": 34585, - "leeminho": 37831, - "leen": 35311, - "leen": 15940, - "leep": 48875, - "leep": 10191, - "lees": 29324, - "lees": 34056, - "lef": 9152, - "left": 33949, - "left": 1823, - "leftist": 35143, - "lefto": 17437, - "leftover": 26414, - "leftovers": 28481, - "lefty": 33935, - "leg": 1211, - "leg": 4924, - "lega": 38674, - "legacy": 44108, - "legacy": 6447, - "legal": 17743, - "legal": 3998, - "legalization": 40584, - "legalize": 42921, - "legally": 14152, - "legate": 46009, - "lege": 8065, - "legen": 6105, - "legend": 5480, - "legend": 3539, - "legendary": 6053, - "legendof": 47915, - "legends": 6396, - "leges": 15356, - "legg": 18474, - "legg": 32511, - "legged": 25830, - "leggings": 22895, - "leggo": 43441, - "legi": 11183, - "legion": 35503, - "legion": 14525, - "legis": 7200, - "legislat": 16486, - "legislation": 14143, - "legislative": 16755, - "legislators": 31572, - "legislature": 22309, - "legit": 12563, - "legitim": 17656, - "legitimate": 24491, - "lego": 28117, - "lego": 7849, - "legos": 45359, - "legs": 7072, - "leh": 19105, - "leh": 29298, - "lehead": 28090, - "lehigh": 34527, - "lehman": 46094, - "lei": 15828, - "lei": 21830, - "leia": 32723, - "leic": 35073, - "leica": 30206, - "leice": 10026, - "leicester": 28795, - "leicester": 11510, - "leicestershire": 45358, - "leigh": 14849, - "leigh": 9292, - "leighton": 30782, - "leila": 41342, - "lein": 20026, - "lein": 28551, - "leinster": 32242, - "leip": 36401, - "leipzig": 41860, - "leis": 13133, - "leisure": 15849, - "leit": 35446, - "leith": 34141, - "lek": 26626, - "lek": 36535, - "lel": 46623, - "lele": 26075, - "lem": 10213, - "lem": 8428, - "leman": 24478, - "lemans": 26694, - "lement": 9693, - "lements": 15833, - "lemme": 23318, - "lemon": 12272, - "lemon": 7184, - "lemonade": 18884, - "lemons": 29576, - "lemore": 41147, - "len": 3687, - "len": 2159, - "lena": 22038, - "lend": 45397, - "lend": 24987, - "lender": 44734, - "lenders": 42443, - "lending": 20209, - "lene": 17628, - "leness": 36551, - "leng": 7861, - "length": 10130, - "lengths": 31858, - "lengthy": 32624, - "lenin": 41760, - "lennon": 18360, - "lennox": 45748, - "lenny": 48448, - "lenny": 30124, - "leno": 45357, - "lenovo": 25886, - "lens": 8666, - "lenses": 21264, - "lent": 20943, - "lent": 22605, - "lentil": 41511, - "lentils": 44269, - "leo": 24008, - "leo": 8312, - "leon": 6581, - "leon": 9763, - "leonard": 43849, - "leonard": 13142, - "leonardo": 20282, - "leone": 22864, - "leop": 11234, - "leopard": 15931, - "leopards": 40996, - "leopold": 45501, - "lep": 48884, - "leppard": 41656, - "lepre": 45641, - "ler": 5587, - "ler": 1803, - "lero": 15067, - "lerosis": 35455, - "leroy": 32441, - "lers": 6247, - "lery": 38184, - "les": 4339, - "les": 840, - "lesbian": 17419, - "lesbians": 43182, - "lesh": 32282, - "lesley": 25506, - "lesli": 13649, - "leslie": 16244, - "lesn": 39568, - "lesnar": 42223, - "less": 3242, - "less": 1285, - "lesser": 20369, - "lessly": 13103, - "lessness": 24847, - "lesson": 7714, - "lessons": 7199, - "lest": 24372, - "lest": 6794, - "lester": 23157, - "lester": 24023, - "lestwe": 29726, - "lestweforget": 30273, - "let": 1898, - "let": 1094, - "leta": 34319, - "lete": 34078, - "letes": 6815, - "leth": 30022, - "leth": 42462, - "lethal": 21905, - "lethbridge": 48390, - "leti": 34176, - "letics": 14504, - "letit": 46423, - "leto": 32203, - "leton": 37674, - "leton": 7462, - "lets": 10448, - "lets": 3243, - "letsgo": 16967, - "letsgo": 29789, - "letstalk": 35591, - "lett": 22428, - "lett": 9778, - "lette": 41798, - "lette": 10301, - "letter": 15567, - "letter": 4861, - "lettering": 26382, - "letterman": 38447, - "letters": 9181, - "letting": 9510, - "letto": 35449, - "lettu": 17933, - "lettuce": 18573, - "leu": 15691, - "leuke": 31031, - "leukemia": 32097, - "leum": 21571, - "leur": 45806, - "lev": 17022, - "lev": 29950, - "levan": 42543, - "leve": 36271, - "level": 21682, - "level": 2931, - "leveled": 48453, - "levels": 6295, - "leven": 44792, - "leven": 34729, - "lever": 20178, - "lever": 23094, - "leverage": 24030, - "leveraging": 37948, - "levi": 25630, - "levi": 19113, - "leviathan": 41736, - "levin": 36949, - "levine": 26594, - "levit": 22715, - "levy": 17147, - "lew": 5063, - "lew": 25329, - "lewan": 48349, - "lewd": 45241, - "lewes": 40431, - "lewi": 19589, - "lewis": 22043, - "lewis": 6020, - "lewisham": 37385, - "lewisham": 47633, - "lewishamilton": 42960, - "lewood": 37951, - "lex": 6586, - "lex": 9658, - "lexa": 48259, - "lexi": 44231, - "lexi": 24679, - "lexington": 22308, - "lexus": 20694, - "ley": 2565, - "ley": 1066, - "leye": 37061, - "leys": 45609, - "leys": 14834, - "leyton": 46573, - "lez": 26442, - "lf": 33960, - "lf": 22078, - "lfc": 37826, - "lfc": 8267, - "lfw": 28514, - "lg": 4546, - "lg": 11368, - "lga": 39348, - "lgb": 25401, - "lgbt": 11743, - "lgbt": 9592, - "lgbti": 42730, - "lgbtq": 47625, - "lgbtq": 14939, - "lgm": 39389, - "lh": 27794, - "lh": 31159, - "lhp": 45092, - "lhs": 33170, - "li": 554, - "li": 4250, - "lia": 26118, - "lia": 6964, - "liability": 29139, - "liaison": 39294, - "liam": 5258, - "liam": 7167, - "lian": 18058, - "liance": 40864, - "liar": 16334, - "liars": 23863, - "lias": 46021, - "lib": 10249, - "lib": 13345, - "libby": 36832, - "libdems": 40869, - "liber": 3425, - "liberal": 48032, - "liberal": 9985, - "liberalism": 40018, - "liberals": 15981, - "liberated": 38690, - "liberation": 19507, - "liberia": 32208, - "libertarian": 35067, - "liberties": 48623, - "liberty": 23397, - "liberty": 8480, - "libr": 2856, - "libra": 43038, - "librarian": 25148, - "librarians": 37806, - "libraries": 14277, - "library": 25713, - "library": 3519, - "libre": 49210, - "libre": 31681, - "libs": 26401, - "liby": 36390, - "libya": 16417, - "libyan": 42319, - "lic": 2508, - "lic": 3376, - "lice": 45691, - "licen": 6706, - "licence": 20550, - "license": 10337, - "licensed": 18752, - "licenses": 36414, - "licensing": 24219, - "lich": 23979, - "lich": 25875, - "lick": 29197, - "lick": 17541, - "licking": 33013, - "licks": 42117, - "lics": 44552, - "lid": 39369, - "lid": 17678, - "lidge": 45558, - "lido": 35683, - "lids": 41609, - "lie": 6570, - "lie": 2538, - "lieb": 45387, - "liebe": 37749, - "lied": 6486, - "lief": 38428, - "lien": 45716, - "lier": 3626, - "liers": 19303, - "lies": 37236, - "lies": 3205, - "liest": 14020, - "liet": 41107, - "lieu": 20401, - "lieu": 35313, - "lieutenant": 22538, - "lif": 16456, - "life": 2666, - "life": 970, - "lifeat": 27801, - "lifeboat": 37404, - "lifecycle": 49171, - "lifein": 48447, - "lifeis": 24824, - "lifeisgood": 46433, - "lifel": 15025, - "lifeline": 38438, - "lifelong": 21358, - "lifeof": 36061, - "lifesaving": 48016, - "lifespan": 49257, - "lifestyle": 46512, - "lifestyle": 7037, - "lifestyles": 48521, - "lifetime": 48737, - "lifetime": 9107, - "liff": 34404, - "liffe": 38942, - "lift": 33146, - "lift": 6779, - "lifted": 16783, - "lifter": 38555, - "lifting": 10857, - "lifts": 18291, - "lig": 19915, - "lig": 38493, - "liga": 16802, - "ligam": 31077, - "ligament": 48705, - "ligan": 27962, - "ligans": 42133, - "ligh": 7510, - "light": 3885, - "light": 1395, - "lighted": 18404, - "lighten": 32717, - "lightening": 28170, - "lighter": 14102, - "lighthouse": 13717, - "lighting": 5799, - "lightly": 26878, - "lightning": 7756, - "lightroom": 41454, - "lights": 3073, - "lightweight": 16278, - "ligu": 42920, - "ligue": 29196, - "lik": 4831, - "lik": 18495, - "like": 9175, - "like": 789, - "liked": 7112, - "likefor": 48444, - "likeli": 40666, - "likelihood": 48158, - "likely": 5256, - "liken": 36084, - "likes": 4724, - "liking": 16810, - "lil": 6012, - "lil": 4461, - "lilac": 33647, - "lili": 26686, - "lili": 48411, - "lilies": 38110, - "lillard": 47016, - "lille": 38705, - "lilli": 40920, - "lillian": 41563, - "lilly": 47825, - "lilly": 21815, - "lily": 23803, - "lily": 10647, - "lim": 2377, - "lim": 17204, - "lima": 17589, - "limb": 27061, - "limb": 32363, - "limbo": 46179, - "limbs": 34886, - "lime": 17385, - "lime": 11193, - "limel": 48658, - "limer": 16915, - "limerick": 19501, - "limestone": 27272, - "limit": 18933, - "limit": 9973, - "limitations": 32730, - "limited": 49229, - "limited": 3472, - "limiting": 35812, - "limitless": 35833, - "limits": 11966, - "limo": 33166, - "limous": 47287, - "limpopo": 47175, - "lin": 1254, - "lin": 2424, - "lina": 26110, - "lincol": 6239, - "lincoln": 16957, - "lincoln": 7454, - "lincolnshire": 29014, - "lind": 6492, - "linda": 45410, - "linda": 10760, - "linden": 44076, - "linden": 34832, - "lindo": 38467, - "lindsay": 29846, - "lindsay": 16858, - "lindsey": 29475, - "lindsey": 18128, - "line": 3674, - "line": 1148, - "linear": 19816, - "linebacker": 29848, - "lined": 11842, - "lineman": 31501, - "linen": 20032, - "liner": 11618, - "liners": 24463, - "lines": 3418, - "liness": 28633, - "lineup": 7316, - "lineups": 33589, - "ling": 4851, - "ling": 1358, - "linger": 29593, - "lingerie": 18473, - "lingering": 46494, - "lings": 11390, - "lington": 27673, - "lington": 9002, - "lingu": 34449, - "lingui": 29942, - "linguistic": 46847, - "linguistics": 48651, - "lining": 11589, - "link": 18433, - "link": 2468, - "linke": 15088, - "linked": 11059, - "linkedin": 16302, - "linkin": 40287, - "linkin": 49291, - "linking": 23296, - "links": 8113, - "linn": 37431, - "lino": 41189, - "lino": 34995, - "lins": 6567, - "linson": 15401, - "linton": 36479, - "linus": 49303, - "linux": 14061, - "lio": 19395, - "lion": 8872, - "lion": 5567, - "lionel": 19441, - "lions": 7093, - "lip": 8630, - "lip": 8546, - "lipo": 38795, - "lipp": 38074, - "lips": 8847, - "lipse": 10351, - "lipstick": 15618, - "liqu": 6310, - "lique": 32680, - "liqueur": 43612, - "liqui": 33817, - "liquid": 18366, - "liquid": 10158, - "liquidity": 42812, - "liquor": 17828, - "lis": 7297, - "lis": 12749, - "lisa": 25236, - "lisa": 7424, - "lisam": 43072, - "lisboa": 40052, - "lisbon": 17708, - "lish": 12658, - "lish": 2354, - "lished": 22620, - "lisle": 21529, - "lism": 34390, - "liss": 45489, - "liss": 35433, - "lisse": 49309, - "list": 1734, - "list": 1998, - "lista": 37812, - "listed": 6457, - "listen": 17454, - "listen": 2672, - "listened": 15347, - "listener": 34819, - "listeners": 26901, - "listening": 3656, - "listens": 25912, - "lister": 45109, - "listing": 8145, - "listings": 21987, - "liston": 48041, - "lists": 12281, - "lit": 2213, - "lit": 4350, - "lita": 30100, - "lite": 29273, - "lite": 13694, - "litecoin": 39063, - "liter": 3085, - "liter": 34904, - "literacy": 12841, - "literal": 24269, - "literally": 4719, - "literary": 13586, - "literature": 11072, - "litfest": 40369, - "lith": 37005, - "lithium": 22794, - "litho": 31088, - "lithograph": 49022, - "lithu": 21045, - "lithuania": 27068, - "liti": 24292, - "litigation": 31769, - "lito": 47381, - "litre": 25786, - "litres": 39919, - "litt": 1216, - "litt": 47583, - "litter": 45431, - "litter": 17118, - "litters": 45300, - "little": 7024, - "little": 1274, - "littlemix": 29731, - "littlest": 48969, - "litur": 36830, - "litz": 30357, - "liu": 20466, - "liv": 13895, - "liv": 19901, - "livan": 12785, - "live": 3215, - "live": 1064, - "lived": 8867, - "livel": 17973, - "liveli": 26566, - "livelihood": 46497, - "livelihoods": 47716, - "lively": 19663, - "liveme": 35396, - "livemusic": 15688, - "liven": 41057, - "liveon": 22815, - "livepd": 38742, - "livepd": 31899, - "liver": 4755, - "liver": 12639, - "liverpool": 29778, - "liverpool": 5366, - "livery": 23248, - "lives": 3247, - "livesmatter": 20348, - "livestock": 22079, - "livestream": 16844, - "livetweet": 38546, - "livin": 28061, - "living": 10965, - "living": 2815, - "livingston": 30551, - "lix": 45068, - "liz": 8632, - "liz": 12242, - "liza": 28787, - "lizard": 17221, - "lizards": 41991, - "lizasober": 44487, - "lizasoberano": 45076, - "lizz": 34430, - "lizzie": 29530, - "lizzy": 32306, - "lj": 34211, - "lj": 32273, - "lju": 44562, - "lk": 39110, - "lk": 26596, - "lka": 21881, - "ll": 1657, - "ll": 865, - "lla": 15419, - "llama": 36679, - "llan": 17281, - "llan": 38728, - "lland": 31150, - "llc": 17161, - "lle": 26550, - "lle": 29732, - "llen": 41197, - "ller": 7722, - "llers": 26426, - "lli": 47015, - "lli": 13368, - "llis": 25518, - "lll": 27177, - "llll": 34874, - "llll": 43485, - "llo": 19293, - "lloy": 10092, - "lloyd": 33339, - "lloyd": 12400, - "llp": 28042, - "lls": 40535, - "lly": 26379, - "lm": 6981, - "lm": 15282, - "lma": 4493, - "lmao": 5121, - "lmaoo": 32623, - "lmaooo": 33362, - "lmaoooo": 45232, - "lmfa": 8928, - "lmfao": 11068, - "lmfaooo": 47658, - "lmp": 43575, - "lms": 30381, - "ln": 31644, - "ln": 18654, - "lng": 22339, - "lnp": 39679, - "lo": 549, - "lo": 2982, - "loa": 39678, - "load": 4515, - "load": 2834, - "loaded": 6756, - "loader": 28492, - "loading": 9975, - "loads": 8691, - "loaf": 26467, - "loaf": 18273, - "loan": 28431, - "loan": 8176, - "loans": 14206, - "lob": 11197, - "lob": 46606, - "lobal": 34574, - "lobb": 27698, - "lobby": 12449, - "lobbying": 36047, - "lobe": 46325, - "lobes": 24148, - "lobo": 39323, - "lobos": 36586, - "lobster": 13793, - "loc": 1378, - "loc": 25826, - "local": 9202, - "local": 2029, - "localized": 49399, - "locally": 15603, - "locals": 15041, - "locate": 20490, - "located": 5677, - "location": 4372, - "locations": 9580, - "loch": 20188, - "loch": 14101, - "lock": 7201, - "lock": 4381, - "lockdown": 35636, - "locke": 29698, - "locked": 8371, - "locker": 14053, - "lockhart": 48642, - "lockheed": 36637, - "locking": 19978, - "locks": 13212, - "lockscreen": 42439, - "loco": 25555, - "locom": 22798, - "locomo": 46147, - "locomotive": 30439, - "locu": 33635, - "locust": 46237, - "lod": 45650, - "lodge": 10504, - "loe": 30113, - "loe": 25484, - "loeb": 49334, - "lof": 15011, - "loff": 31008, - "loft": 35707, - "loft": 20049, - "loftus": 46689, - "log": 3239, - "log": 7383, - "logan": 20655, - "logan": 10569, - "logans": 40752, - "logg": 43002, - "logged": 31457, - "logger": 39089, - "logging": 24444, - "logi": 3177, - "logia": 48031, - "logic": 10670, - "logical": 4791, - "logically": 24782, - "logie": 33445, - "logies": 7378, - "login": 31121, - "logist": 7407, - "logistics": 14755, - "logists": 12233, - "logne": 19911, - "logo": 31480, - "logo": 5750, - "logos": 24879, - "logs": 22745, - "logue": 27785, - "logy": 22721, - "logy": 1659, - "loh": 49129, - "loh": 37983, - "loi": 35128, - "loid": 31408, - "loin": 21760, - "loire": 46040, - "lois": 27040, - "lok": 19908, - "lok": 23575, - "loki": 24435, - "lol": 10721, - "lol": 1824, - "lola": 19065, - "lolita": 42615, - "lolla": 45483, - "lolli": 27906, - "lollipop": 34605, - "lolly": 48264, - "lolo": 16895, - "lolo": 37481, - "lolol": 25280, - "lololol": 34738, - "lolz": 35260, - "lom": 9279, - "loma": 42889, - "lombar": 25493, - "lombard": 46461, - "lombardi": 44346, - "lomond": 48941, - "lon": 1235, - "lon": 6507, - "london": 6835, - "london": 1789, - "londonmarathon": 35018, - "lone": 22220, - "lone": 13576, - "lonel": 28872, - "loneliness": 30310, - "lonely": 34509, - "lonely": 12368, - "lonelyplanet": 44984, - "long": 4792, - "long": 1538, - "longe": 25793, - "longer": 5349, - "longest": 10731, - "longevity": 35354, - "longh": 20286, - "longhorn": 41047, - "longhorns": 38295, - "longing": 38482, - "longlive": 47840, - "longs": 43618, - "longtime": 19685, - "loo": 731, - "loo": 11804, - "look": 8874, - "look": 1012, - "lookalike": 38307, - "lookbook": 39184, - "looked": 4913, - "lookin": 11254, - "looking": 36898, - "looking": 1312, - "lookout": 18330, - "looks": 1606, - "lool": 33125, - "loom": 37440, - "loom": 17199, - "looming": 35384, - "looms": 30550, - "loon": 28222, - "loona": 48137, - "looney": 45315, - "looo": 20902, - "loool": 36016, - "looool": 47038, - "looooo": 31484, - "loop": 19606, - "loop": 10408, - "loops": 21625, - "loos": 45723, - "loose": 43815, - "loose": 9786, - "loot": 21518, - "lop": 36734, - "lop": 17066, - "lopes": 49269, - "lopez": 12982, - "lor": 2179, - "lor": 11335, - "lord": 18896, - "lord": 3486, - "lorde": 35483, - "lords": 14969, - "lore": 12880, - "lore": 27218, - "loren": 13602, - "loren": 33398, - "lorenzo": 21342, - "lores": 34510, - "loretta": 40863, - "lori": 20164, - "lori": 23095, - "lorna": 46316, - "lorraine": 27602, - "lorry": 31354, - "los": 32217, - "los": 3087, - "losange": 14037, - "losangeles": 14638, - "lose": 43318, - "lose": 5354, - "loser": 18168, - "losers": 23201, - "loses": 14263, - "losing": 7918, - "loss": 34761, - "loss": 4327, - "losses": 16909, - "lost": 14258, - "lost": 2624, - "lostdog": 48482, - "lot": 5132, - "lot": 1954, - "loth": 43625, - "lothian": 31360, - "lothing": 42058, - "lotion": 25260, - "lotr": 34165, - "lots": 2958, - "lott": 42854, - "lotta": 29125, - "lotte": 16535, - "lotte": 7274, - "lottery": 16975, - "lottie": 48517, - "lotto": 28265, - "lotus": 13824, - "lou": 2207, - "lou": 9745, - "loubout": 38369, - "loud": 22884, - "loud": 7464, - "louder": 25904, - "loudest": 49214, - "loudly": 39256, - "lough": 21927, - "lough": 28045, - "loughborough": 49153, - "loui": 42173, - "louie": 25790, - "louis": 8916, - "louis": 4459, - "louisa": 40011, - "louise": 32275, - "louise": 13076, - "louisi": 12187, - "louisiana": 12946, - "louisville": 13860, - "louisvuitton": 44911, - "loun": 6466, - "lounge": 7141, - "lounging": 45430, - "lour": 29383, - "lourdes": 45071, - "louvre": 36995, - "lov": 8923, - "lov": 21229, - "lova": 37394, - "lovable": 38565, - "lovato": 18960, - "love": 2618, - "love": 793, - "lovecraft": 42405, - "loved": 3249, - "lovefl": 38884, - "loveher": 38306, - "lovehim": 45733, - "loveis": 30931, - "loveisland": 30970, - "loveislove": 43603, - "loveit": 24764, - "lovel": 8999, - "lovelies": 31412, - "lovelondon": 46493, - "lovely": 33250, - "lovely": 2165, - "lovemy": 20041, - "lovemyjob": 40130, - "loven": 33754, - "lover": 28508, - "lover": 7168, - "lovers": 48416, - "lovers": 5973, - "loves": 37773, - "loves": 3925, - "lovethe": 33040, - "lovethem": 48298, - "lovett": 47095, - "lovewins": 47687, - "loveyou": 39226, - "loveyou": 25964, - "loveyour": 26462, - "lovin": 33442, - "lovin": 16354, - "loving": 29568, - "loving": 3721, - "lovingly": 44100, - "low": 1049, - "low": 1042, - "loway": 16104, - "lowe": 17910, - "lowed": 22733, - "lowell": 24458, - "lower": 32578, - "lower": 4909, - "lowered": 34968, - "lowering": 35261, - "lowers": 36398, - "lowes": 38515, - "lowest": 12098, - "lowing": 8283, - "lowkey": 29481, - "lowry": 27444, - "lows": 4406, - "lox": 41725, - "loy": 4519, - "loy": 23929, - "loyal": 13032, - "loyalty": 14686, - "loyd": 44212, - "loyed": 29279, - "loyment": 18307, - "loyola": 32569, - "lp": 22282, - "lp": 6392, - "lpc": 44092, - "lpg": 47905, - "lpga": 34295, - "lps": 32094, - "lr": 20572, - "lr": 7041, - "lrt": 32996, - "ls": 19051, - "ls": 1268, - "lsd": 43766, - "lse": 46127, - "lse": 43886, - "lsu": 35428, - "lsu": 15672, - "lt": 13642, - "lt": 3333, - "ltc": 27664, - "ltd": 6802, - "lte": 25202, - "lton": 14237, - "lu": 664, - "lu": 9657, - "lub": 22469, - "lub": 11836, - "lubbock": 37660, - "lubric": 40963, - "luc": 7013, - "luc": 28014, - "luca": 21053, - "lucas": 23425, - "lucas": 10225, - "lucci": 45849, - "luce": 46217, - "lucent": 41552, - "lucer": 36042, - "luch": 36646, - "lucha": 38449, - "luci": 8787, - "lucia": 22290, - "luciano": 46365, - "lucid": 44540, - "lucie": 39461, - "lucifer": 46224, - "lucifer": 27687, - "lucille": 47454, - "lucin": 27523, - "luck": 9647, - "luck": 2820, - "luckiest": 42469, - "luckily": 20100, - "lucknow": 29407, - "lucky": 20495, - "lucky": 4133, - "lucrative": 41485, - "lucy": 17262, - "lucy": 10120, - "lud": 14288, - "lude": 28755, - "ludo": 40141, - "ludwig": 30633, - "lue": 45199, - "luf": 25264, - "lufc": 17818, - "luffy": 39047, - "lufthan": 37769, - "lufthansa": 39145, - "lug": 45521, - "lugg": 19673, - "luggage": 20138, - "luhan": 20975, - "luigi": 28444, - "luis": 25231, - "luis": 11339, - "luiz": 39633, - "lujah": 31639, - "luk": 21652, - "luka": 34878, - "lukaku": 37177, - "lukas": 37941, - "luke": 11970, - "luke": 5652, - "lul": 20861, - "lulla": 37019, - "lullaby": 41676, - "lulu": 32052, - "lulu": 26935, - "lum": 18112, - "lum": 5997, - "lumb": 36231, - "lumber": 27421, - "lumber": 34692, - "lumi": 41437, - "lumia": 31912, - "lumin": 15867, - "luminous": 37913, - "lump": 38704, - "lumpur": 34411, - "lun": 3221, - "lun": 49390, - "luna": 14425, - "lunar": 16043, - "lunatic": 45874, - "lunch": 10954, - "lunch": 2772, - "luncheon": 15104, - "lunches": 29705, - "lunchtime": 14330, - "lund": 30975, - "lund": 20181, - "lunes": 35648, - "lung": 38479, - "lung": 16271, - "lungs": 27366, - "lup": 27413, - "lupita": 49352, - "lupus": 36017, - "lur": 14439, - "lure": 31376, - "lures": 46747, - "lurking": 29941, - "lus": 7158, - "lusci": 38004, - "luscious": 39935, - "lush": 40382, - "lush": 16263, - "lust": 42071, - "lust": 12662, - "lustre": 46673, - "luther": 21848, - "luther": 17208, - "lutheran": 27341, - "luton": 28288, - "luv": 24726, - "luv": 8502, - "lux": 3439, - "lux": 16704, - "luxe": 26373, - "luxemb": 21314, - "luxembour": 22712, - "luxembourg": 23949, - "luxu": 16112, - "luxurious": 17292, - "luxury": 12083, - "luxury": 5247, - "luxurytravel": 29010, - "luz": 41008, - "lv": 10862, - "lv": 11184, - "lvl": 31256, - "lw": 40515, - "lw": 35115, - "lx": 30789, - "ly": 1251, - "ly": 597, - "lydia": 24316, - "lyf": 43688, - "lyfe": 30787, - "lyft": 32944, - "lying": 7175, - "lyk": 46376, - "lyle": 36828, - "lym": 20087, - "lyme": 31167, - "lymph": 30073, - "lymphoma": 37648, - "lyn": 3957, - "lyn": 5054, - "lynch": 31586, - "lynch": 13560, - "lynd": 33416, - "lynda": 42959, - "lyndon": 48518, - "lynn": 25303, - "lynn": 10667, - "lynne": 26900, - "lynx": 28941, - "lyon": 17176, - "lyons": 29453, - "lyric": 24366, - "lyric": 21291, - "lyrical": 33358, - "lyricist": 49013, - "lyrics": 9551, - "lyrix": 46814, - "lys": 45054, - "lyte": 40059, - "lywood": 4012, - "lz": 30818, - "lé": 39641, - "m": 76, - "m": 332, - "ma": 577, - "ma": 1226, - "maa": 42774, - "maa": 21555, - "maan": 33668, - "maar": 48927, - "maas": 43332, - "mab": 35639, - "mabel": 47319, - "mable": 23001, - "mably": 40082, - "mabu": 44682, - "mac": 1961, - "mac": 4945, - "macar": 21558, - "macaroni": 41824, - "macarthur": 36785, - "macau": 43984, - "macau": 33370, - "macbeth": 36321, - "macbook": 20617, - "macdonald": 20315, - "mace": 44869, - "maced": 21102, - "macedonia": 27071, - "macfar": 45374, - "macfarlane": 48825, - "mach": 2637, - "mach": 35091, - "machado": 42318, - "mache": 43220, - "macher": 29330, - "machi": 41783, - "machin": 17972, - "machine": 11539, - "machine": 4169, - "machinelearning": 13621, - "machinery": 21858, - "machines": 11108, - "machining": 45562, - "macho": 43977, - "macht": 45225, - "macin": 36533, - "mack": 8590, - "mack": 12145, - "mackay": 32497, - "macken": 48057, - "mackenzie": 22351, - "mackerel": 35002, - "mackin": 26010, - "macklemore": 41758, - "macle": 33843, - "maclean": 47137, - "macleod": 43684, - "macmillan": 36364, - "macmillan": 35191, - "macon": 35818, - "macos": 45469, - "macqu": 38365, - "macquarie": 40858, - "macro": 20891, - "macro": 16626, - "macron": 24859, - "macs": 46548, - "macy": 17113, - "macys": 47652, - "mad": 2740, - "mad": 3843, - "mada": 37799, - "madagas": 24758, - "madagascar": 25744, - "madam": 33634, - "madam": 27538, - "madame": 23507, - "madd": 31717, - "madden": 19093, - "maddie": 39959, - "maddie": 18875, - "maddow": 32644, - "maddy": 31734, - "made": 5388, - "made": 1105, - "madein": 13670, - "madeira": 33810, - "madel": 34532, - "madele": 29831, - "madeleine": 33264, - "madeline": 33905, - "madewith": 28627, - "madewithunity": 43190, - "madhu": 23000, - "madhuri": 38346, - "madhuridixit": 43889, - "madhya": 48302, - "madi": 6527, - "madi": 27282, - "madison": 24798, - "madison": 8791, - "madmen": 45452, - "madness": 8755, - "madon": 44852, - "madonna": 14137, - "madra": 27416, - "madras": 42046, - "madre": 42130, - "madri": 5529, - "madrid": 5909, - "mads": 41201, - "madu": 34913, - "madurai": 49159, - "maduro": 32912, - "mae": 16898, - "mae": 17339, - "maer": 47088, - "maestro": 24140, - "mafi": 47164, - "mafia": 14890, - "mag": 1191, - "mag": 4508, - "maga": 8694, - "magaz": 2974, - "magazine": 3113, - "magazines": 22253, - "magdal": 29673, - "mage": 46568, - "mage": 10923, - "magee": 43872, - "magenta": 38091, - "magento": 42442, - "mages": 31059, - "maggi": 29611, - "maggie": 41443, - "maggie": 14524, - "maggio": 49087, - "magh": 45555, - "magi": 19270, - "magic": 13061, - "magic": 3778, - "magical": 36408, - "magical": 7823, - "magician": 26368, - "magin": 42678, - "maging": 41310, - "magn": 10290, - "magna": 34076, - "magne": 9921, - "magnesium": 36379, - "magnet": 18240, - "magnetic": 13838, - "magnets": 33030, - "magni": 24297, - "magnific": 9725, - "magnificent": 10724, - "magnitude": 22955, - "magno": 21184, - "magnolia": 27123, - "magnu": 45198, - "magnum": 23496, - "magnus": 26275, - "magpie": 45973, - "mags": 31021, - "maguire": 26470, - "mah": 7206, - "mah": 10801, - "maha": 12237, - "maha": 33983, - "mahal": 22301, - "mahan": 45191, - "mahar": 11635, - "maharaj": 38488, - "maharashtra": 19328, - "mahat": 32434, - "mahatma": 40530, - "mahe": 15756, - "maher": 29826, - "mahesh": 33448, - "mahesh": 22095, - "mahi": 32529, - "mahi": 38659, - "mahin": 24113, - "mahindra": 31285, - "mahmoud": 41361, - "mahog": 30804, - "mahogany": 33084, - "mahon": 45864, - "mahon": 20371, - "mahone": 26634, - "mai": 7138, - "mai": 14595, - "maia": 46585, - "maid": 23148, - "maid": 10226, - "maidan": 37346, - "maiden": 37011, - "maiden": 13809, - "maids": 27305, - "maidstone": 44395, - "mail": 10478, - "mail": 2614, - "mailbox": 31482, - "mailed": 42314, - "mailing": 26680, - "mailonline": 26021, - "mails": 45213, - "main": 3904, - "main": 2623, - "maine": 18639, - "maine": 7836, - "mained": 15609, - "mainedcm": 15845, - "mainland": 27629, - "mainly": 15280, - "mains": 33656, - "mainst": 42102, - "mainstream": 18034, - "maintain": 12954, - "maintained": 26665, - "maintaining": 21964, - "maintains": 38335, - "mainten": 9399, - "maintenance": 9610, - "mais": 28153, - "maisie": 47355, - "maison": 37065, - "maison": 27626, - "mait": 26387, - "maize": 35386, - "maj": 2948, - "maj": 28723, - "maja": 47498, - "maje": 9852, - "majestic": 15335, - "majesty": 21188, - "major": 8008, - "major": 3350, - "majority": 10508, - "majors": 23597, - "mak": 11271, - "mak": 19253, - "makar": 42242, - "makati": 39402, - "make": 3232, - "make": 1078, - "makeaw": 45859, - "makeinindia": 42739, - "makeit": 26308, - "maken": 47093, - "makeover": 17926, - "maker": 15196, - "maker": 4836, - "makers": 6577, - "makerspace": 42400, - "makes": 2088, - "makeshift": 43274, - "makeu": 41707, - "makeup": 26402, - "makeup": 5853, - "makeyourown": 34090, - "makeyourownlane": 34823, - "maki": 34514, - "makin": 43096, - "makin": 22407, - "making": 17976, - "making": 1665, - "makk": 39852, - "maknae": 44118, - "mako": 49061, - "mal": 1662, - "mal": 3796, - "mala": 28290, - "malade": 36928, - "malaga": 35395, - "malala": 41137, - "malam": 48956, - "malaria": 24929, - "malawi": 23405, - "malay": 5323, - "malay": 42430, - "malayalam": 34860, - "malaysi": 39668, - "malaysia": 8146, - "malaysian": 21136, - "malbec": 47741, - "malcol": 12645, - "malcolm": 14139, - "maldives": 16795, - "male": 11326, - "male": 2801, - "males": 14426, - "malhotra": 28866, - "mali": 6701, - "mali": 22669, - "malia": 46714, - "malibu": 21723, - "malicious": 42147, - "malign": 41122, - "malik": 11394, - "mall": 10984, - "mall": 6220, - "mallorca": 28082, - "mallory": 38968, - "malls": 36447, - "malm": 44071, - "malnutrition": 41153, - "malo": 43518, - "malone": 19852, - "maloney": 45897, - "mals": 25370, - "malt": 21688, - "malta": 16989, - "maltese": 39838, - "malvern": 39356, - "malware": 24153, - "mam": 4404, - "mam": 17778, - "mama": 7133, - "mamamoo": 36012, - "mamas": 42395, - "mamba": 44189, - "mament": 45690, - "mami": 43858, - "mamma": 34893, - "mammal": 33385, - "mammals": 31987, - "mammoth": 28022, - "man": 723, - "man": 786, - "mana": 29467, - "mana": 15837, - "manafort": 40108, - "manag": 1830, - "manage": 9770, - "managed": 7928, - "management": 3319, - "manager": 3898, - "managerial": 44261, - "managers": 12853, - "manages": 29699, - "managing": 10892, - "manas": 44188, - "manatee": 46558, - "mance": 2324, - "manchester": 24424, - "manchester": 4651, - "mancini": 47681, - "mancity": 31538, - "mancrush": 36945, - "mancrushmonday": 39307, - "mand": 4325, - "mand": 27244, - "mandala": 41106, - "mandarin": 26455, - "mandate": 26228, - "mandatory": 19934, - "mandel": 34960, - "mandela": 16280, - "mandi": 38961, - "mandir": 35815, - "mando": 34006, - "mands": 12340, - "mandu": 31440, - "mandy": 41505, - "mandy": 24302, - "mane": 44471, - "mane": 16044, - "maneu": 33216, - "mang": 25616, - "mang": 31096, - "manga": 11873, - "mangal": 43027, - "manger": 48251, - "mango": 43831, - "mango": 13962, - "mangrove": 47180, - "manhatt": 10152, - "manhattan": 10961, - "mani": 5654, - "mani": 10718, - "mania": 8435, - "maniac": 31814, - "maniacs": 41444, - "manian": 40077, - "manic": 23017, - "manic": 37825, - "manicure": 33637, - "manife": 14379, - "manifest": 34422, - "manifestation": 48348, - "manifesto": 20907, - "manil": 38827, - "manila": 10969, - "manipu": 40261, - "manipul": 19237, - "manipulation": 30277, - "manipur": 47757, - "manish": 41759, - "manish": 44720, - "manit": 15693, - "manitoba": 20342, - "manjaro": 41489, - "mankind": 24155, - "manly": 25194, - "mann": 19396, - "mann": 4783, - "manne": 30160, - "manned": 26139, - "mannequin": 43388, - "manner": 20700, - "manners": 31693, - "manning": 15996, - "manny": 37054, - "manny": 20933, - "mano": 15753, - "mano": 24016, - "manoj": 41146, - "manor": 41830, - "manor": 13614, - "mans": 28422, - "mans": 7746, - "mansfield": 25543, - "manship": 15460, - "mansion": 13404, - "manslaughter": 48632, - "manson": 26715, - "mant": 25122, - "mant": 27037, - "manta": 41431, - "mantis": 39946, - "mantle": 22159, - "mantra": 25162, - "manu": 3404, - "manu": 25799, - "manual": 12268, - "manuel": 29171, - "manuel": 9567, - "manufac": 5105, - "manufacture": 27741, - "manufactured": 24010, - "manufacturer": 15668, - "manufacturers": 18763, - "manufacturing": 8386, - "manure": 47907, - "manus": 28181, - "manuscript": 24365, - "manuscripts": 40765, - "manutd": 20994, - "many": 28484, - "many": 1346, - "manziel": 40637, - "mao": 47447, - "mao": 25605, - "maori": 43400, - "map": 25180, - "map": 3923, - "maple": 21980, - "maple": 10570, - "mapleleafs": 41257, - "mapoli": 28768, - "mapp": 36894, - "mapped": 41596, - "mapping": 15231, - "maps": 8765, - "mapu": 42082, - "mar": 675, - "mar": 3091, - "mara": 15655, - "marais": 47913, - "maran": 44732, - "marath": 16274, - "marathi": 34102, - "marathon": 40764, - "marathon": 5910, - "marau": 38475, - "marbella": 36182, - "marble": 45429, - "marble": 13071, - "marbles": 42931, - "marc": 14054, - "marc": 9075, - "marca": 38242, - "marcel": 17726, - "marcel": 24652, - "marcelo": 35939, - "march": 10638, - "march": 2227, - "marche": 36173, - "marched": 37976, - "marches": 38249, - "marchfor": 31721, - "marching": 15082, - "marchmadness": 28555, - "marci": 36698, - "marcia": 41075, - "marck": 47733, - "marco": 24719, - "marco": 10924, - "marcor": 39945, - "marcorubio": 41143, - "marcos": 21696, - "marcu": 20760, - "marcus": 48955, - "marcus": 9895, - "mardi": 39728, - "mardi": 29229, - "mardigras": 43343, - "mare": 26512, - "mare": 8870, - "mares": 19724, - "marg": 44014, - "margar": 16838, - "margare": 10232, - "margaret": 12185, - "margarita": 25958, - "margaritas": 42679, - "margate": 37428, - "margin": 19464, - "margin": 21357, - "marginal": 38320, - "margins": 33763, - "margot": 37144, - "mari": 2603, - "mari": 19322, - "maria": 41109, - "maria": 6595, - "mariachi": 44299, - "mariah": 31214, - "mariah": 24789, - "mariahcarey": 36538, - "marian": 41129, - "marian": 24677, - "mariana": 44224, - "marianne": 32214, - "mariano": 43988, - "marie": 20657, - "marie": 7864, - "marietta": 46634, - "marig": 41002, - "marijuana": 9864, - "maril": 14611, - "marilyn": 38959, - "marilyn": 18489, - "marin": 8910, - "marin": 23992, - "marina": 12060, - "marinated": 33406, - "marine": 20674, - "marine": 5746, - "mariner": 39972, - "mariners": 19086, - "marines": 15018, - "marino": 30878, - "mario": 39176, - "mario": 7600, - "marion": 37765, - "marion": 18397, - "maris": 21512, - "maris": 33093, - "marisa": 42938, - "mariska": 44703, - "marissa": 31219, - "marist": 48223, - "mariti": 13124, - "maritime": 14331, - "marj": 38639, - "mark": 3805, - "mark": 2110, - "marke": 2399, - "marked": 12360, - "marker": 18170, - "markers": 23664, - "market": 11614, - "market": 2196, - "marketer": 33482, - "marketers": 23682, - "marketing": 19535, - "marketing": 2905, - "marketplace": 18241, - "markets": 7292, - "markham": 39817, - "marking": 14705, - "markings": 41046, - "markle": 32672, - "marko": 38338, - "marks": 5466, - "markus": 33725, - "marl": 24922, - "marlborough": 43515, - "marlene": 45117, - "marley": 16504, - "marlin": 34275, - "marlins": 23309, - "marlon": 32995, - "marmalade": 39068, - "marnock": 48305, - "maro": 27029, - "maroon": 20501, - "marqu": 20704, - "marque": 13012, - "marquee": 27725, - "marquette": 37624, - "marquez": 27317, - "marquis": 33530, - "marr": 32871, - "marrake": 37125, - "marrakech": 39006, - "marri": 3839, - "marriage": 38047, - "marriage": 7040, - "marriages": 38190, - "married": 6791, - "marries": 46283, - "marriott": 19211, - "marrow": 31030, - "marry": 13288, - "marrying": 40507, - "mars": 41469, - "mars": 7496, - "marsden": 43344, - "marse": 26577, - "marseille": 30365, - "marsh": 9237, - "marsh": 13505, - "marsha": 21491, - "marshal": 26608, - "marshall": 30939, - "marshall": 9811, - "marshals": 44175, - "marshes": 43450, - "marshmal": 21069, - "marshmallow": 28530, - "marshmallows": 39471, - "mart": 2348, - "mart": 7772, - "marta": 32858, - "martens": 43211, - "marth": 34493, - "martha": 16427, - "marti": 20577, - "martial": 17088, - "martialarts": 35895, - "martian": 30214, - "martin": 6929, - "martin": 3690, - "martina": 34393, - "martinez": 13913, - "marting": 47570, - "martini": 22199, - "martino": 41675, - "martins": 30569, - "marty": 9926, - "marty": 17169, - "martyn": 44075, - "martyr": 36155, - "martyr": 26067, - "martyrdom": 43110, - "martyred": 39114, - "martyrs": 24707, - "maru": 37413, - "maru": 31838, - "marvel": 13835, - "marvel": 5996, - "marvelcomics": 46897, - "marvell": 26576, - "marvellous": 28402, - "marvelous": 25487, - "marvin": 19675, - "marx": 30559, - "marx": 26001, - "marxist": 45205, - "mary": 5146, - "mary": 2676, - "maryam": 33636, - "maryam": 36393, - "maryland": 11379, - "marys": 40905, - "marys": 40228, - "mas": 5226, - "mas": 1412, - "masa": 24995, - "masa": 41868, - "masala": 31483, - "masc": 23564, - "mascar": 46984, - "mascara": 31635, - "mascot": 13983, - "mascots": 43266, - "mascul": 25589, - "masculine": 48269, - "masculinity": 40465, - "mase": 49128, - "maser": 25798, - "maserati": 30442, - "mash": 12317, - "mash": 15680, - "mashable": 41026, - "mashed": 27395, - "mashup": 27079, - "masi": 35965, - "masjid": 31420, - "mask": 19262, - "mask": 8306, - "masked": 25757, - "masking": 47046, - "masks": 19055, - "maslow": 44359, - "mason": 17424, - "mason": 9699, - "masonic": 36491, - "masonry": 30764, - "masons": 37195, - "masqu": 26593, - "masquer": 29604, - "masquerade": 36944, - "mass": 4636, - "mass": 4854, - "massach": 14484, - "massachuse": 14577, - "massachusetts": 14756, - "massacre": 14696, - "massage": 13055, - "masse": 41735, - "masses": 22978, - "massey": 29868, - "massi": 17239, - "massimo": 45821, - "massive": 4818, - "massively": 34297, - "mast": 45916, - "mast": 27920, - "master": 4534, - "master": 3498, - "mastercard": 40542, - "masterchef": 34809, - "masterclass": 17529, - "mastered": 32616, - "masterful": 46823, - "mastering": 28326, - "mastermind": 34029, - "masterpiece": 12066, - "masterpieces": 37596, - "masters": 6913, - "mastery": 34800, - "mastiff": 42311, - "maswar": 47887, - "mat": 905, - "mat": 9063, - "mata": 17270, - "match": 7733, - "match": 2439, - "matcha": 32433, - "matchday": 15947, - "matched": 17792, - "matches": 8609, - "matching": 11840, - "matchup": 19355, - "matchups": 49162, - "mate": 6137, - "mate": 2936, - "mated": 33813, - "mateo": 34991, - "mater": 23724, - "materi": 7084, - "material": 7118, - "materials": 8161, - "maternal": 26131, - "maternity": 23894, - "mates": 5817, - "math": 13277, - "math": 6025, - "mathe": 8725, - "mathemat": 11901, - "mathematical": 25609, - "mathematician": 41036, - "mathematics": 20113, - "mathew": 36333, - "mathews": 37120, - "mathi": 23014, - "mathieu": 40417, - "maths": 14763, - "mati": 12716, - "mati": 32268, - "matic": 36859, - "matic": 7900, - "matically": 38282, - "matics": 23634, - "matil": 26751, - "matilda": 36308, - "matin": 44849, - "matinee": 38525, - "mating": 34346, - "mation": 11701, - "matisse": 43446, - "mato": 13127, - "matologist": 48842, - "matology": 27940, - "matory": 25519, - "matri": 27041, - "matrix": 18078, - "mats": 22259, - "matsu": 30242, - "matt": 7972, - "matt": 3972, - "mattb": 42791, - "matte": 31237, - "matte": 19771, - "mattel": 35365, - "matteo": 33120, - "matter": 30471, - "matter": 3828, - "matters": 5708, - "matth": 41846, - "matthe": 5116, - "matthew": 17588, - "matthew": 7008, - "matthews": 16739, - "matthi": 29853, - "matthias": 45104, - "matti": 39840, - "mattress": 23438, - "matty": 31233, - "matty": 29176, - "matu": 40616, - "matur": 22897, - "mature": 14417, - "maturity": 28047, - "mau": 8134, - "mau": 23033, - "maui": 20463, - "maul": 30725, - "maur": 10574, - "maure": 25191, - "maureen": 31723, - "maurice": 20200, - "mauricio": 39066, - "mauriti": 28406, - "mauritius": 29305, - "mauro": 41691, - "mav": 25697, - "maver": 16700, - "maverick": 27425, - "mavericks": 30092, - "mavs": 30665, - "maw": 39351, - "maw": 42271, - "mawards": 37682, - "max": 4898, - "max": 3902, - "maxi": 8554, - "maxi": 23266, - "maxim": 19892, - "maxim": 38574, - "maximize": 28673, - "maximum": 13162, - "maximus": 44312, - "maxine": 38468, - "maxwell": 19611, - "maxx": 37466, - "may": 1686, - "may": 1270, - "maya": 45783, - "maya": 12987, - "mayan": 37952, - "maybe": 3746, - "mayday": 29957, - "mayer": 21196, - "mayfair": 35171, - "mayfield": 33933, - "mayhem": 21502, - "maymay": 26600, - "maymay": 33853, - "maymayentrata": 30480, - "maynard": 32487, - "mayne": 35771, - "mayo": 22449, - "mayo": 11280, - "mayor": 15429, - "mayor": 4676, - "mayoral": 28983, - "mayorof": 43533, - "mayors": 28501, - "mays": 35445, - "maythe": 42281, - "mayward": 45751, - "mayward": 23519, - "mayweather": 22774, - "maz": 9177, - "maz": 36215, - "mazda": 18506, - "maze": 21988, - "mazz": 29439, - "mañ": 37059, - "mañana": 39354, - "mb": 758, - "mb": 3996, - "mba": 8329, - "mban": 46685, - "mbar": 44452, - "mbb": 10736, - "mbc": 20137, - "mbe": 38395, - "mbe": 27004, - "mber": 5467, - "mber": 1034, - "mberg": 26372, - "mbers": 5443, - "mbi": 45347, - "mble": 20310, - "mble": 4756, - "mbles": 28693, - "mbling": 28604, - "mbo": 25733, - "mbo": 11319, - "mbps": 44896, - "mbs": 10370, - "mbta": 38979, - "mbu": 42228, - "mbuhari": 36752, - "mc": 1278, - "mc": 4126, - "mca": 40570, - "mca": 14635, - "mcal": 28663, - "mcar": 43776, - "mcbride": 35080, - "mcc": 21192, - "mccabe": 37628, - "mccaf": 47385, - "mccain": 20397, - "mccall": 34844, - "mccann": 27140, - "mccar": 9570, - "mccarthy": 16974, - "mccartney": 19958, - "mccl": 24709, - "mccla": 43672, - "mccle": 40139, - "mcclure": 44945, - "mcco": 46152, - "mccon": 32638, - "mccor": 23057, - "mccormack": 45164, - "mccormick": 39088, - "mccoy": 20218, - "mccr": 41996, - "mccre": 25393, - "mccul": 38833, - "mccull": 41782, - "mcd": 28930, - "mcder": 27355, - "mcdermott": 34504, - "mcdon": 12171, - "mcdonald": 10741, - "mcdonalds": 17674, - "mcdonnell": 34360, - "mcdowell": 34119, - "mce": 26864, - "mcel": 28752, - "mcen": 47423, - "mcfad": 36976, - "mcfadden": 42105, - "mcfar": 29020, - "mcfarlane": 47174, - "mcfc": 16416, - "mcfly": 38211, - "mcg": 42507, - "mcg": 27995, - "mcgee": 29223, - "mcgill": 46524, - "mcgill": 35511, - "mcgin": 29596, - "mcgowan": 40462, - "mcgr": 25169, - "mcgra": 29367, - "mcgrath": 28759, - "mcgraw": 40950, - "mcgregor": 19642, - "mcgu": 34294, - "mcguinness": 45299, - "mcguire": 32635, - "mci": 46212, - "mci": 45491, - "mcil": 30481, - "mcin": 18770, - "mcintosh": 45353, - "mcintyre": 33369, - "mck": 6781, - "mckay": 33611, - "mcke": 27424, - "mckee": 43529, - "mcken": 42619, - "mckenna": 24924, - "mckenzie": 25502, - "mckin": 15437, - "mckinley": 39891, - "mckinney": 33554, - "mckinnon": 48736, - "mckinsey": 48143, - "mcl": 49021, - "mcla": 12565, - "mclaren": 37381, - "mclaren": 16789, - "mclau": 32285, - "mclaughlin": 35346, - "mcle": 25299, - "mclean": 28666, - "mcleod": 40259, - "mcm": 12251, - "mcmahon": 24026, - "mcmaster": 42703, - "mcmillan": 45603, - "mcn": 42919, - "mcnam": 32682, - "mcnamara": 37506, - "mcne": 42545, - "mco": 33723, - "mcqueen": 22544, - "mcr": 29884, - "mcr": 16966, - "mcs": 27020, - "mcu": 30403, - "md": 8637, - "md": 4732, - "mdc": 38773, - "mdc": 41761, - "mds": 48746, - "mdt": 40822, - "me": 613, - "me": 614, - "mea": 46045, - "mea": 17711, - "mead": 12134, - "mead": 21567, - "meade": 37218, - "meado": 16402, - "meadow": 25213, - "meadow": 17195, - "meadows": 17178, - "meal": 29662, - "meal": 5478, - "meals": 11229, - "mean": 4189, - "mean": 3450, - "meand": 48015, - "meaning": 14586, - "meaning": 8342, - "meaningful": 17480, - "meaningless": 48932, - "meanings": 45814, - "means": 3494, - "meant": 8674, - "meantime": 27499, - "meanwhile": 9650, - "meas": 5867, - "measles": 38230, - "measurable": 48010, - "measure": 15261, - "measure": 10579, - "measured": 23154, - "measurement": 20973, - "measurements": 29894, - "measures": 11936, - "measuring": 18064, - "meat": 10805, - "meat": 6480, - "meatball": 43642, - "meatballs": 29233, - "meath": 37920, - "meatless": 48085, - "meats": 29558, - "mec": 27432, - "mecca": 36095, - "mech": 38305, - "mechan": 6715, - "mechanic": 24582, - "mechanical": 14467, - "mechanics": 20536, - "mechanism": 22576, - "mechanisms": 28610, - "meck": 41908, - "med": 1948, - "med": 2177, - "meda": 33614, - "medal": 29714, - "medal": 6974, - "medalist": 21040, - "medalists": 43397, - "medalli": 31349, - "medallion": 43469, - "medallist": 41472, - "medals": 14710, - "mede": 48225, - "meded": 27627, - "medi": 1436, - "media": 22064, - "media": 1895, - "mediac": 37490, - "median": 30491, - "mediation": 42829, - "medic": 3602, - "medic": 35441, - "medicaid": 25421, - "medical": 18432, - "medical": 4116, - "medicare": 23710, - "medication": 23771, - "medications": 37181, - "medicinal": 28772, - "medicine": 5616, - "medicines": 26541, - "medics": 46688, - "medieval": 38956, - "medieval": 10789, - "medina": 27281, - "mediocre": 41170, - "medit": 19130, - "meditate": 38039, - "meditation": 10827, - "mediter": 14194, - "mediterran": 14358, - "mediterranean": 15327, - "medium": 8675, - "medley": 24793, - "meds": 25075, - "medtech": 42044, - "medusa": 44216, - "medway": 42286, - "mee": 1725, - "mee": 14075, - "meek": 28935, - "meen": 37940, - "meen": 46515, - "meer": 26714, - "meer": 27555, - "meet": 5714, - "meet": 1633, - "meeting": 48566, - "meeting": 2071, - "meetings": 9980, - "meets": 5972, - "meetthe": 27575, - "meetup": 15430, - "meg": 11500, - "meg": 16186, - "mega": 15979, - "mega": 9068, - "megab": 38103, - "megadeth": 46741, - "megal": 37650, - "megam": 26073, - "megan": 19127, - "megan": 11503, - "megap": 33624, - "megat": 35581, - "megh": 31192, - "meghan": 39939, - "meghan": 18261, - "meh": 10512, - "meh": 22211, - "mehta": 25031, - "mei": 22564, - "mei": 25198, - "meier": 29812, - "mein": 28857, - "mein": 21466, - "meister": 28407, - "mek": 44645, - "mel": 1902, - "mel": 6834, - "mela": 35032, - "melan": 22261, - "melanch": 44818, - "melancholy": 47821, - "melani": 34031, - "melania": 32796, - "melanie": 22153, - "melanoma": 40862, - "melb": 47007, - "melb": 28980, - "melbourne": 28387, - "melbourne": 6995, - "melee": 45108, - "meli": 28885, - "melinda": 46303, - "melis": 18913, - "melissa": 41866, - "melissa": 13030, - "mell": 22531, - "mell": 41583, - "mello": 47594, - "mellon": 45162, - "mellow": 32034, - "melo": 10354, - "melo": 22374, - "melodic": 41877, - "melodies": 38412, - "melody": 19119, - "melon": 12146, - "melrose": 36296, - "melt": 22209, - "melt": 15957, - "meltdown": 30613, - "melted": 23037, - "melting": 19247, - "melton": 46062, - "melts": 31446, - "melville": 46030, - "melvin": 31544, - "mely": 6373, - "mem": 4937, - "mem": 34944, - "memb": 2114, - "member": 29566, - "member": 1640, - "members": 2567, - "membership": 11562, - "membrane": 34088, - "meme": 35157, - "meme": 9169, - "memes": 12828, - "memo": 15967, - "memo": 19334, - "memoir": 20532, - "memoirs": 45311, - "memor": 1858, - "memorab": 26271, - "memorabilia": 27488, - "memorable": 13172, - "memorial": 16285, - "memorial": 4642, - "memorialday": 21598, - "memoriam": 48191, - "memories": 4304, - "memory": 44766, - "memory": 5137, - "memph": 10285, - "memphis": 38432, - "memphis": 11298, - "men": 1552, - "men": 1656, - "mena": 23052, - "menace": 29949, - "mend": 8151, - "mend": 46927, - "mendel": 49268, - "mendes": 18060, - "mendez": 48275, - "mendo": 19327, - "mendoza": 23680, - "meng": 37102, - "meng": 37450, - "mening": 46428, - "menon": 38255, - "menopau": 34974, - "menopause": 46026, - "mens": 16924, - "mens": 10495, - "mensfashion": 27578, - "menstru": 28345, - "menstrual": 40915, - "menswear": 18803, - "ment": 1585, - "ment": 777, - "mental": 8611, - "mental": 3448, - "mentalhealth": 20593, - "mentalhealth": 13022, - "mentality": 26647, - "mentally": 14307, - "mentary": 4468, - "mentation": 9512, - "mentday": 40397, - "mente": 40302, - "mente": 36396, - "mented": 9249, - "menting": 14471, - "mention": 43881, - "mention": 6762, - "mentioned": 11948, - "mentioning": 34290, - "mentions": 12334, - "mento": 30582, - "mentor": 45342, - "mentor": 11642, - "mentoring": 19610, - "mentors": 20945, - "mentorship": 33878, - "ments": 1827, - "menu": 6225, - "menus": 33534, - "meo": 30792, - "meow": 39965, - "meow": 17246, - "mep": 27095, - "mer": 1316, - "mer": 2452, - "mera": 20028, - "merc": 34357, - "merc": 44399, - "mercado": 45479, - "merce": 8409, - "mercede": 34959, - "mercedes": 26403, - "mercedes": 10685, - "mercedesam": 40107, - "mercedesbenz": 32347, - "mercen": 40301, - "mercer": 21632, - "merch": 11504, - "merchandi": 14954, - "merchandise": 16808, - "merchandising": 49196, - "merchant": 19563, - "merchants": 34427, - "merci": 23364, - "merci": 29378, - "mercur": 11471, - "mercury": 45203, - "mercury": 12653, - "mercy": 33249, - "mercy": 10815, - "mere": 29657, - "mere": 10342, - "mered": 24657, - "mered": 32297, - "meredith": 25103, - "merely": 28718, - "merge": 30406, - "merged": 46492, - "merger": 24744, - "merging": 49256, - "meri": 17993, - "meri": 36109, - "meria": 48433, - "meric": 27097, - "merica": 30561, - "meridi": 37901, - "meridian": 31195, - "mering": 41060, - "meringue": 41661, - "merino": 42648, - "merit": 20830, - "merkel": 24715, - "merle": 48586, - "merlin": 26517, - "merlot": 40424, - "mermaid": 16064, - "mermaids": 43617, - "mero": 19097, - "merr": 48288, - "merri": 21462, - "merrill": 47713, - "merritt": 36462, - "merry": 14167, - "merry": 5779, - "merrychristmas": 19672, - "mers": 4199, - "mersal": 36711, - "mersey": 25248, - "mersey": 46239, - "merseyside": 35382, - "mert": 48496, - "merton": 35315, - "mery": 40873, - "meryl": 35787, - "mes": 28432, - "mes": 3029, - "mesa": 18956, - "mese": 42018, - "mesh": 15030, - "mesm": 18695, - "mesmer": 38435, - "mesmeri": 25985, - "mesmerizing": 35637, - "meso": 25537, - "mesqu": 46819, - "mess": 2490, - "mess": 8188, - "message": 3918, - "messages": 9390, - "messaging": 23234, - "messe": 40391, - "messed": 23580, - "messenger": 17389, - "messi": 19394, - "messi": 11252, - "messiah": 28737, - "messing": 23144, - "messy": 15987, - "mest": 23780, - "mester": 47349, - "mesut": 49177, - "met": 5249, - "met": 2340, - "meta": 14803, - "meta": 22701, - "metab": 16150, - "metabol": 48389, - "metaboli": 25573, - "metabolic": 34311, - "metabolism": 27824, - "metal": 8935, - "metal": 4044, - "metall": 19084, - "metallic": 17257, - "metallica": 24079, - "metals": 21375, - "metam": 28862, - "metamor": 39030, - "metamorpho": 47601, - "metaph": 24189, - "metaphor": 34233, - "metast": 41973, - "mete": 11226, - "meteor": 26429, - "meteor": 26823, - "meteoro": 25948, - "meteorologist": 42849, - "meter": 10104, - "meters": 13247, - "metgala": 30089, - "meth": 21867, - "meth": 26177, - "methane": 37565, - "metho": 5770, - "method": 10284, - "methodist": 25165, - "methodo": 28488, - "methodology": 37316, - "methods": 12200, - "methyl": 48999, - "metmuseum": 28207, - "meto": 25679, - "metoo": 24722, - "metr": 15086, - "metre": 27889, - "metres": 19798, - "metric": 19950, - "metrical": 40704, - "metrics": 24396, - "metro": 7257, - "metro": 6784, - "metroid": 39957, - "metropolis": 40476, - "metropolitan": 19013, - "metry": 20039, - "mets": 9633, - "mett": 28081, - "metz": 40506, - "meu": 34520, - "mew": 40368, - "mex": 3213, - "mex": 18387, - "mexic": 31728, - "mexican": 37442, - "mexican": 8186, - "mexicans": 47729, - "mexico": 31834, - "mexico": 4604, - "mey": 28584, - "mey": 27777, - "meyer": 13963, - "meyers": 32326, - "mez": 30615, - "mez": 46833, - "mezz": 38771, - "mf": 18199, - "mf": 11067, - "mfa": 24107, - "mfc": 39474, - "mfg": 21912, - "mfw": 27309, - "mg": 10003, - "mg": 8014, - "mga": 23954, - "mgm": 27572, - "mgmt": 22288, - "mgr": 31500, - "mgs": 48073, - "mgt": 48663, - "mh": 9962, - "mh": 10834, - "mha": 41944, - "mhealth": 41225, - "mhs": 28815, - "mhz": 31550, - "mi": 714, - "mi": 2251, - "mia": 5852, - "miam": 31053, - "miami": 15106, - "miami": 4891, - "mian": 24792, - "miaw": 36046, - "mib": 48178, - "mic": 1213, - "mic": 3816, - "mica": 41551, - "micah": 33870, - "mice": 19030, - "mich": 25628, - "mich": 23029, - "micha": 2083, - "michael": 6051, - "michael": 2511, - "michaela": 41897, - "michaeljackson": 33532, - "michaels": 23868, - "michal": 47144, - "miche": 37966, - "micheal": 43709, - "michel": 5158, - "michel": 17153, - "michelangelo": 41245, - "michele": 20642, - "michelin": 26330, - "michelle": 19028, - "michelle": 8625, - "michi": 5658, - "michigan": 32344, - "michigan": 6296, - "mick": 15171, - "mick": 12592, - "mickey": 41813, - "mickey": 13053, - "micky": 43011, - "micro": 3160, - "micro": 11374, - "microbes": 44671, - "microbi": 19496, - "microbial": 30335, - "microbiology": 35348, - "microbiome": 35148, - "micron": 48742, - "microphone": 24643, - "micropoetry": 35997, - "microscope": 29114, - "microscopy": 38431, - "microsof": 42424, - "microsoft": 38650, - "microsoft": 7254, - "microwave": 24240, - "mics": 16554, - "mid": 2192, - "mid": 4734, - "midcentury": 48988, - "midd": 2983, - "midday": 23390, - "middle": 9849, - "middle": 3694, - "middleeast": 32783, - "middles": 29769, - "middlesbrough": 32436, - "middlesex": 39154, - "middleton": 23627, - "middleweight": 35829, - "midfield": 28116, - "midfielder": 13423, - "midget": 30734, - "midi": 39496, - "midi": 27326, - "midland": 24822, - "midlands": 18062, - "midnight": 35746, - "midnight": 6302, - "mids": 40821, - "midst": 24752, - "midsummer": 35234, - "midterm": 34365, - "midterms": 32015, - "midtown": 26069, - "midway": 26536, - "midweek": 29120, - "midwest": 16627, - "midwi": 44802, - "midwife": 37681, - "midwives": 42355, - "mie": 20865, - "mie": 10555, - "miento": 46482, - "mier": 36490, - "mies": 8840, - "miff": 49398, - "mig": 28743, - "might": 2727, - "mighty": 26632, - "mighty": 7815, - "mign": 41678, - "migos": 44640, - "migr": 3736, - "migra": 28186, - "migraine": 35360, - "migrant": 18902, - "migrants": 15814, - "migrate": 41804, - "migrating": 43604, - "migration": 11891, - "migu": 12279, - "miguel": 33672, - "miguel": 14436, - "miho": 46870, - "mii": 39896, - "mik": 15096, - "mik": 46203, - "mika": 28609, - "mika": 25185, - "mike": 5884, - "mike": 3178, - "mikel": 48865, - "mikequind": 33508, - "mikequindazzi": 33551, - "mikey": 34934, - "mikey": 23368, - "mikha": 30999, - "mikhail": 38327, - "miki": 48863, - "miko": 35413, - "miku": 37703, - "mil": 1469, - "mil": 12826, - "mila": 26183, - "milan": 30380, - "milan": 8552, - "milano": 18585, - "milb": 42248, - "mild": 16085, - "mildly": 49059, - "mile": 7833, - "mile": 6243, - "mileage": 30579, - "miler": 44680, - "miles": 3446, - "milestone": 13485, - "milestones": 34025, - "miley": 25336, - "miley": 14321, - "mileycyrus": 28528, - "milf": 45386, - "milford": 35840, - "mili": 16698, - "miliband": 41440, - "milit": 3715, - "militant": 33629, - "militants": 23974, - "military": 24498, - "military": 4323, - "militi": 46625, - "militia": 32114, - "milk": 13409, - "milk": 5205, - "milkshake": 29066, - "milky": 37320, - "milky": 21120, - "milkyway": 43246, - "mill": 4221, - "mill": 6637, - "milla": 49381, - "millan": 34930, - "millan": 22188, - "millar": 41851, - "mille": 34066, - "millen": 48501, - "millenni": 10406, - "millennial": 28357, - "millennials": 18804, - "millennium": 21116, - "miller": 21699, - "miller": 5733, - "milli": 5340, - "millie": 29283, - "milling": 39133, - "million": 13154, - "million": 2506, - "millionaire": 25179, - "millionaires": 47159, - "millions": 8492, - "mills": 10331, - "millwall": 35902, - "milly": 45794, - "milne": 44590, - "milner": 45230, - "milo": 24548, - "milton": 39004, - "milton": 17360, - "milwau": 13452, - "milwaukee": 14259, - "mim": 39379, - "mimi": 27086, - "mimic": 47116, - "mimic": 46519, - "mimo": 45551, - "min": 771, - "min": 3331, - "mina": 15281, - "minaj": 25136, - "minal": 40222, - "minat": 33275, - "mince": 32396, - "mind": 5890, - "mind": 2575, - "mindanao": 44228, - "minded": 21330, - "mindful": 28457, - "mindfulness": 15707, - "minding": 45337, - "minds": 9244, - "mindset": 14217, - "mindy": 46875, - "mindy": 38551, - "mine": 20149, - "mine": 3347, - "minecraft": 15678, - "mined": 48034, - "minent": 12533, - "miner": 14109, - "miner": 26572, - "mineral": 17692, - "minerals": 21169, - "miners": 22119, - "mines": 16211, - "ming": 10868, - "ming": 2107, - "mingham": 7590, - "mingle": 38437, - "mingly": 36909, - "mington": 49283, - "mington": 23119, - "minh": 48734, - "minho": 21318, - "mini": 1810, - "mini": 3954, - "miniature": 44298, - "miniature": 16377, - "miniatures": 38816, - "minic": 31522, - "minim": 10005, - "minimal": 18458, - "minimalism": 42594, - "minimalist": 26641, - "minimize": 38697, - "minimum": 12244, - "minindia": 28458, - "mining": 8473, - "minion": 28622, - "minions": 27035, - "minis": 33409, - "minis": 35976, - "minister": 25688, - "minister": 3569, - "ministerial": 33008, - "ministers": 16406, - "ministries": 27895, - "ministry": 8742, - "mink": 42017, - "minn": 45991, - "minn": 47318, - "minne": 7083, - "minneapolis": 16977, - "minneso": 9380, - "minnesota": 9968, - "minnie": 24493, - "mino": 22791, - "minogue": 44202, - "minor": 8522, - "minorities": 28119, - "minority": 16210, - "minors": 36789, - "mins": 6196, - "minsk": 46151, - "minster": 11189, - "mint": 48084, - "mint": 7506, - "minted": 49377, - "minton": 20050, - "minu": 29064, - "minus": 15358, - "minute": 28931, - "minute": 4497, - "minutes": 3056, - "mio": 26366, - "mir": 2750, - "mir": 6585, - "mira": 21665, - "mira": 22762, - "mirac": 13685, - "miracle": 49208, - "miracle": 11543, - "miracles": 23478, - "miraculous": 38671, - "mirage": 28679, - "mirai": 49060, - "mirand": 32367, - "miranda": 17590, - "mire": 38140, - "mire": 30140, - "miri": 22273, - "miriam": 30950, - "miro": 34851, - "miro": 48317, - "mirren": 47600, - "mirro": 48500, - "mirror": 29823, - "mirror": 7220, - "mirrors": 21823, - "mirza": 36440, - "mis": 866, - "mis": 11239, - "mischief": 33896, - "misconceptions": 48681, - "misconduct": 30601, - "mise": 46567, - "mise": 17267, - "miser": 33394, - "miserable": 26196, - "misery": 28360, - "mises": 24390, - "misfits": 42708, - "mish": 15494, - "mish": 20981, - "misha": 35434, - "mishra": 33042, - "misleading": 30862, - "mism": 15948, - "miso": 27657, - "miso": 33441, - "misogy": 31315, - "misogyny": 48415, - "miss": 6984, - "miss": 1526, - "missal": 38337, - "missed": 3955, - "misses": 15844, - "missi": 3008, - "missile": 14411, - "missiles": 27868, - "missin": 36209, - "missing": 23509, - "missing": 3423, - "mission": 12738, - "mission": 2406, - "missionaries": 40580, - "missionary": 27915, - "missions": 6990, - "mississ": 26483, - "mississauga": 28393, - "mississi": 11687, - "mississippi": 12232, - "missou": 30710, - "missoula": 48549, - "missouri": 11835, - "missuni": 26347, - "missuniverse": 28766, - "missy": 48105, - "missy": 31515, - "missyou": 45799, - "mist": 12610, - "mist": 11946, - "mistak": 20478, - "mistake": 11303, - "mistaken": 29182, - "mistakenly": 48494, - "mistakes": 12824, - "mister": 26949, - "mister": 18895, - "mistle": 46800, - "mistletoe": 48569, - "mistre": 42039, - "mistress": 24349, - "mists": 28636, - "misty": 18799, - "misunderstood": 41574, - "misuse": 40970, - "mit": 3303, - "mit": 4551, - "mita": 47514, - "mitage": 27964, - "mitch": 6969, - "mitch": 14150, - "mitchell": 39339, - "mitchell": 9007, - "mite": 26929, - "mith": 21752, - "mith": 17948, - "miti": 17857, - "mitigate": 42273, - "mitigation": 35514, - "mito": 38254, - "mitochondri": 42132, - "mitra": 47703, - "mits": 24086, - "mitsu": 17905, - "mitsubi": 21604, - "mitsubishi": 23030, - "mitt": 17321, - "mitt": 21341, - "mitted": 10307, - "mitting": 27938, - "mitz": 41827, - "mium": 35891, - "miwx": 43941, - "mix": 3210, - "mix": 3285, - "mixed": 29376, - "mixed": 6780, - "mixer": 17200, - "mixers": 39175, - "mixes": 19061, - "mixing": 15588, - "mixtape": 11044, - "mixture": 28286, - "miy": 25695, - "miya": 36257, - "miz": 20881, - "miz": 30795, - "mize": 19076, - "mized": 43418, - "mizing": 38715, - "mizz": 19985, - "mizzou": 26165, - "mj": 13117, - "mj": 14733, - "mk": 11581, - "mk": 8937, - "mke": 36642, - "mkt": 24814, - "ml": 3627, - "ml": 5780, - "mla": 16723, - "mlas": 48464, - "mlb": 21039, - "mlb": 7482, - "mley": 40329, - "mlg": 45801, - "mlin": 24556, - "mlk": 17941, - "mlkday": 39905, - "mlm": 37611, - "mln": 18971, - "mlp": 23620, - "mlpfi": 45475, - "mlpfim": 45640, - "mls": 13077, - "mm": 1028, - "mm": 2848, - "mma": 34140, - "mma": 6096, - "mmc": 44253, - "mme": 13105, - "mmed": 19570, - "mmer": 35717, - "mmer": 7508, - "mmers": 28128, - "mmes": 42862, - "mmi": 34147, - "mming": 21038, - "mming": 16507, - "mmings": 31357, - "mmit": 41050, - "mmj": 43015, - "mmm": 37908, - "mmm": 7641, - "mmmm": 36312, - "mmmm": 13180, - "mmmmm": 21808, - "mmmmmm": 43740, - "mmo": 30418, - "mmon": 41131, - "mmor": 36657, - "mmorpg": 39476, - "mms": 37803, - "mmva": 42666, - "mmy": 28837, - "mmy": 8722, - "mn": 5086, - "mn": 4057, - "mna": 34877, - "mnd": 44776, - "mnet": 34129, - "mnf": 41105, - "mnl": 32980, - "mnleg": 42653, - "mns": 39040, - "mnt": 21477, - "mntwins": 45448, - "mnwild": 39044, - "mnwx": 39592, - "mo": 617, - "mo": 2080, - "moa": 33174, - "moana": 43241, - "mob": 2818, - "mob": 12754, - "mobi": 9451, - "mobil": 26343, - "mobil": 29815, - "mobile": 12935, - "mobile": 3451, - "mobiles": 44302, - "mobili": 20770, - "mobility": 12546, - "mobilization": 48916, - "moby": 47219, - "moc": 41439, - "moc": 36992, - "mocha": 28425, - "mochi": 47973, - "mock": 15641, - "mock": 12759, - "mocked": 47400, - "mocking": 28692, - "mocking": 37870, - "mocks": 35142, - "mod": 6362, - "mod": 10893, - "moda": 25814, - "modal": 33157, - "mode": 20402, - "mode": 6493, - "model": 4591, - "model": 2863, - "modeled": 39527, - "modeling": 13706, - "modelling": 19946, - "models": 6176, - "moder": 2894, - "moderate": 16435, - "moderated": 27928, - "moderating": 34242, - "moderator": 32659, - "modern": 11706, - "modern": 4077, - "modernart": 34417, - "moderni": 24328, - "modernism": 39601, - "modernist": 36773, - "modernization": 47294, - "modes": 30454, - "modest": 25436, - "modi": 9047, - "modi": 7774, - "modification": 37630, - "modified": 17964, - "modo": 36820, - "mods": 23843, - "modu": 9036, - "modular": 22437, - "module": 16757, - "modules": 30575, - "moe": 38655, - "moe": 17938, - "mof": 30798, - "moff": 27160, - "mog": 42362, - "moga": 41732, - "mogadishu": 45133, - "mogul": 41320, - "moh": 18979, - "moh": 35388, - "moha": 46892, - "moham": 7923, - "mohamed": 18472, - "mohammad": 19926, - "mohammed": 16168, - "mohan": 26521, - "mohan": 23586, - "mohawk": 34942, - "mohd": 49094, - "mohsin": 48861, - "moi": 20691, - "moi": 21825, - "moil": 30349, - "moines": 32091, - "moist": 19831, - "moist": 33263, - "moisture": 20412, - "moisturi": 25942, - "moj": 34505, - "moja": 49055, - "mojito": 46830, - "mojo": 25204, - "mok": 49146, - "mol": 4246, - "mol": 31582, - "mold": 21846, - "molding": 46274, - "moldova": 47317, - "mole": 9927, - "mole": 23529, - "molecular": 19370, - "molecule": 39233, - "molecules": 35643, - "molina": 34201, - "mollie": 48203, - "molly": 24368, - "molly": 12573, - "molo": 41510, - "mology": 32255, - "molten": 46071, - "moly": 47083, - "mom": 1614, - "mom": 2543, - "moma": 33605, - "mombasa": 40340, - "moment": 12197, - "moment": 2495, - "momento": 30078, - "moments": 5251, - "momentum": 15722, - "momlife": 43825, - "momma": 14508, - "mommy": 12456, - "momo": 48490, - "momo": 25980, - "moms": 28446, - "moms": 10042, - "momsdemand": 33744, - "mon": 749, - "mon": 2173, - "mona": 19143, - "monaco": 14938, - "monaghan": 39797, - "monarch": 27235, - "monarch": 22619, - "monarchs": 36750, - "monarchy": 47503, - "monaster": 19422, - "monastery": 21850, - "monc": 34847, - "moncton": 44962, - "mond": 14522, - "mond": 4475, - "monday": 6205, - "monday": 2098, - "mondaymorning": 40089, - "mondaymotiv": 45488, - "mondaymotivation": 8198, - "mondaymotivaton": 47034, - "mondays": 13815, - "monde": 29339, - "mondo": 36207, - "monds": 20317, - "mone": 25990, - "monet": 24499, - "monetary": 26394, - "moneti": 38056, - "money": 12743, - "money": 2327, - "mong": 43566, - "monger": 38928, - "mongers": 27670, - "mongo": 20680, - "mongolia": 27144, - "mongolian": 46335, - "moni": 46851, - "monia": 31161, - "monic": 30893, - "monica": 13540, - "monit": 9014, - "monitor": 10198, - "monitored": 45828, - "monitoring": 11030, - "monitors": 30478, - "monk": 30557, - "monk": 16424, - "monkey": 29597, - "monkey": 9465, - "monkeys": 15781, - "monks": 29090, - "monmouth": 36929, - "mono": 8220, - "mono": 22537, - "monochrome": 25576, - "monogram": 39665, - "monologue": 47776, - "monopoly": 25241, - "monoxide": 49314, - "monro": 45750, - "monroe": 13625, - "mons": 19885, - "monsanto": 37592, - "monsi": 46677, - "monsieur": 48879, - "monsoon": 18872, - "monsta": 30718, - "monstax": 45631, - "monste": 47045, - "monster": 14454, - "monster": 6060, - "monsters": 11546, - "mont": 5186, - "mont": 5382, - "montag": 37202, - "montage": 32325, - "montal": 42126, - "montan": 28405, - "montana": 11436, - "monte": 8711, - "monte": 14667, - "montene": 28538, - "montenegro": 30378, - "monter": 36673, - "monterey": 23388, - "monterrey": 45254, - "montess": 43205, - "montessori": 45443, - "montgom": 13852, - "montgomery": 14951, - "month": 7680, - "month": 1924, - "monthly": 8764, - "months": 3109, - "monthsary": 42420, - "monton": 41961, - "montp": 39523, - "montre": 8434, - "montreal": 9262, - "montrose": 42347, - "monty": 43997, - "monty": 24038, - "monu": 9748, - "monument": 12019, - "monumental": 31297, - "monuments": 26916, - "mony": 4117, - "monza": 40380, - "moo": 4953, - "moo": 24626, - "mood": 42358, - "mood": 5394, - "moods": 43727, - "moody": 17170, - "moom": 36887, - "moon": 6334, - "moon": 3293, - "mooney": 37942, - "moonlight": 20001, - "moons": 29887, - "moonshine": 46706, - "moor": 14817, - "moor": 11877, - "moore": 28613, - "moore": 6708, - "moors": 32577, - "moose": 37562, - "moose": 17338, - "moot": 46895, - "mop": 33900, - "mopar": 41166, - "mor": 657, - "mor": 18614, - "mora": 29262, - "moral": 11246, - "morale": 39404, - "morales": 27117, - "morality": 34133, - "morally": 42519, - "morals": 46223, - "moran": 21557, - "moray": 44569, - "more": 5434, - "more": 750, - "morecam": 37305, - "morecambe": 43414, - "mored": 20195, - "moreland": 44135, - "moreno": 24826, - "morethan": 30889, - "morg": 34284, - "morgan": 15432, - "morgan": 6075, - "morgen": 35106, - "mori": 25710, - "mori": 29514, - "moris": 43131, - "moritz": 45594, - "morley": 40439, - "mormon": 27715, - "morn": 22393, - "mornin": 28327, - "morning": 10769, - "morning": 1119, - "mornings": 12106, - "moro": 31613, - "moroc": 11996, - "moroccan": 27546, - "morocco": 15228, - "moron": 31875, - "morons": 46477, - "morow": 40779, - "morph": 23915, - "morph": 41700, - "morphe": 38978, - "morpho": 38622, - "morrha": 43044, - "morri": 9876, - "morris": 22560, - "morris": 9090, - "morrison": 40961, - "morrison": 14094, - "morrisons": 40965, - "morrissey": 30040, - "morro": 48363, - "morrow": 21611, - "mors": 13064, - "morse": 25282, - "mort": 24257, - "mort": 30583, - "mortal": 31883, - "mortal": 14680, - "mortality": 20347, - "mortar": 27258, - "mortg": 12069, - "mortgage": 13988, - "mortgages": 45391, - "mortimer": 47836, - "morton": 20698, - "morty": 37391, - "mory": 22633, - "mos": 28658, - "mos": 9593, - "mosa": 14164, - "mosa": 23809, - "mosaic": 17506, - "mosch": 47003, - "mosco": 9840, - "moscow": 10371, - "moseley": 47080, - "moses": 18451, - "mosley": 46228, - "mosqu": 15215, - "mosque": 12694, - "mosques": 41214, - "mosquit": 39699, - "mosquito": 25083, - "mosquitoes": 41870, - "moss": 25107, - "moss": 12815, - "most": 7034, - "most": 1096, - "mostly": 8829, - "mosul": 29165, - "mot": 16352, - "mot": 15452, - "mota": 42499, - "motd": 46232, - "motel": 26191, - "moth": 33208, - "moth": 11736, - "mother": 7455, - "mother": 3050, - "motherhood": 32274, - "motherland": 46774, - "mothers": 10546, - "mothersday": 15583, - "motherwell": 48104, - "moths": 29086, - "moti": 38210, - "motif": 35373, - "motion": 32139, - "motion": 7860, - "motiv": 3183, - "motivate": 26771, - "motivated": 16521, - "motivates": 44684, - "motivating": 37720, - "motivation": 26117, - "motivation": 4193, - "motivational": 32832, - "motivational": 20472, - "motivationmonday": 28703, - "motive": 36669, - "motley": 42553, - "motm": 41192, - "moto": 10646, - "moto": 11431, - "motocross": 34562, - "motogp": 16615, - "motor": 3975, - "motor": 7659, - "motorbike": 33341, - "motorcycle": 10297, - "motorcycles": 24869, - "motoring": 44491, - "motorists": 32766, - "motorola": 33738, - "motors": 14989, - "motorsport": 18371, - "motorsports": 24264, - "motorway": 31808, - "motown": 32685, - "mott": 44570, - "mott": 21708, - "motto": 23338, - "mou": 2809, - "mou": 25289, - "moud": 37698, - "moul": 25725, - "mould": 36743, - "moulin": 47656, - "moun": 2023, - "mound": 21414, - "mount": 20553, - "mount": 5532, - "mountain": 14547, - "mountain": 3965, - "mountaine": 24841, - "mountaineer": 49255, - "mountains": 5873, - "mounted": 17897, - "mounting": 29910, - "mounts": 36767, - "mour": 9053, - "mour": 42446, - "moured": 29555, - "mourinho": 18536, - "mourn": 33592, - "mourning": 24169, - "mourns": 42811, - "mous": 24837, - "mous": 17425, - "mouse": 33032, - "mouse": 9301, - "mousse": 31869, - "moustache": 32795, - "mouth": 15152, - "mouth": 4932, - "mouths": 38518, - "mov": 23950, - "move": 16624, - "move": 2783, - "moved": 6997, - "movember": 23474, - "movement": 5208, - "movements": 19665, - "mover": 37673, - "movers": 33957, - "moves": 6880, - "movi": 1707, - "movic": 43838, - "movie": 11247, - "movie": 2016, - "movies": 4772, - "moving": 32160, - "moving": 3584, - "mow": 31006, - "mow": 36329, - "mower": 30895, - "mowing": 46424, - "mowx": 44263, - "moy": 27276, - "moy": 34205, - "moyes": 37119, - "moz": 14761, - "moz": 43738, - "mozam": 26648, - "mozambique": 28831, - "mozart": 22132, - "mozz": 26317, - "mozzarella": 27845, - "mp": 1037, - "mp": 1246, - "mpa": 30749, - "mpc": 38560, - "mpd": 33814, - "mped": 28134, - "mper": 22803, - "mpg": 39830, - "mpg": 37454, - "mpgvip": 42149, - "mph": 5306, - "mpi": 43263, - "mping": 27999, - "mple": 21139, - "mplo": 47071, - "mpls": 34298, - "mpo": 33674, - "mpp": 39570, - "mps": 5504, - "mption": 9717, - "mpton": 27448, - "mpu": 47156, - "mpus": 25864, - "mpy": 17192, - "mq": 19103, - "mqm": 24687, - "mr": 3139, - "mr": 1982, - "mra": 44568, - "mrc": 25897, - "mri": 24773, - "mrs": 25003, - "mrs": 4255, - "mrt": 30256, - "mru": 22370, - "mrw": 15303, - "ms": 3525, - "ms": 988, - "msa": 36306, - "msc": 31826, - "msc": 20529, - "msd": 25804, - "msd": 36407, - "msdhoni": 32850, - "msf": 36239, - "msg": 44430, - "msg": 10928, - "msh": 41751, - "msi": 43597, - "msi": 45278, - "msk": 38501, - "msl": 42736, - "msm": 22210, - "msn": 18824, - "msn": 41042, - "msnbc": 20245, - "mson": 27773, - "mson": 12298, - "msp": 41445, - "msp": 22318, - "mss": 42136, - "mss": 48610, - "mst": 26335, - "msu": 26763, - "msu": 17298, - "mswx": 42957, - "msy": 43919, - "mt": 4252, - "mt": 3284, - "mta": 28691, - "mtb": 48306, - "mtb": 18747, - "mtc": 42482, - "mtg": 49142, - "mtg": 13648, - "mth": 48151, - "mtl": 22135, - "mtn": 26041, - "mtn": 18953, - "mtr": 46650, - "mts": 38751, - "mtv": 8099, - "mtv": 12555, - "mtvbr": 47258, - "mtvhottest": 16751, - "mtvstars": 19948, - "mu": 670, - "mu": 6411, - "mua": 21395, - "muay": 44910, - "muaythai": 47763, - "mubarak": 17957, - "muc": 49115, - "much": 14300, - "much": 1238, - "mucha": 42191, - "muchas": 26278, - "mucho": 19864, - "muck": 44731, - "muck": 45330, - "mud": 17491, - "mud": 11673, - "mudder": 49104, - "muddy": 21524, - "mue": 44383, - "mue": 40717, - "mueller": 46863, - "mueller": 14719, - "muen": 48646, - "muer": 33840, - "muf": 33852, - "mufc": 9013, - "muffin": 22696, - "muffins": 25922, - "mufti": 44930, - "mug": 16339, - "mug": 9722, - "mugabe": 36441, - "mughal": 37508, - "mugs": 22852, - "mugshot": 40028, - "muh": 36335, - "muh": 46475, - "muham": 10043, - "muhammad": 12259, - "muir": 44650, - "muir": 24745, - "muj": 44635, - "muk": 17327, - "muk": 32600, - "mukher": 34575, - "mukherjee": 37862, - "mul": 1899, - "mul": 43193, - "mula": 40937, - "mulator": 17463, - "mulberry": 39221, - "mule": 28695, - "mull": 17313, - "mull": 35310, - "mulled": 44641, - "mullen": 30797, - "muller": 33956, - "mullet": 35010, - "mulligan": 44336, - "mullins": 41265, - "mult": 34219, - "multi": 3947, - "multi": 6400, - "multic": 21683, - "multicul": 28004, - "multicultural": 34667, - "multil": 27975, - "multimedia": 27977, - "multin": 38996, - "multinational": 46540, - "multip": 40314, - "multiplayer": 27460, - "multiple": 6470, - "multipurpose": 47665, - "multit": 27814, - "multitasking": 48684, - "mulus": 26180, - "mum": 15565, - "mum": 4030, - "mumb": 5850, - "mumbai": 24279, - "mumbai": 6971, - "mumford": 46184, - "mummy": 16301, - "mums": 17868, - "mun": 2617, - "mun": 21059, - "muna": 48424, - "munch": 23587, - "munch": 33299, - "munchies": 44324, - "munchkin": 41305, - "mund": 14244, - "mundo": 20990, - "muni": 27327, - "muni": 39795, - "munich": 13526, - "munici": 12159, - "municipal": 43667, - "municipal": 16600, - "municipality": 29987, - "munition": 32668, - "munro": 36501, - "munster": 27201, - "mup": 21966, - "muppet": 40598, - "muppets": 40187, - "mups": 42195, - "mur": 2144, - "mur": 18293, - "mura": 45176, - "mural": 12315, - "murals": 31499, - "murder": 28136, - "murder": 5787, - "murdered": 13158, - "murderer": 26956, - "murderers": 48472, - "murdering": 36055, - "murders": 22409, - "murdoch": 29037, - "murphy": 48976, - "murphy": 8914, - "murray": 31978, - "murray": 7513, - "murs": 38783, - "mus": 2198, - "mus": 8103, - "musa": 30540, - "musc": 5696, - "muscat": 33322, - "muscle": 27323, - "muscle": 9269, - "muscles": 16786, - "muscular": 30606, - "muse": 2369, - "muse": 15686, - "museo": 36457, - "muses": 48243, - "museu": 27087, - "museum": 15602, - "museum": 2786, - "museums": 15542, - "museumweek": 37996, - "mush": 7635, - "mushroom": 13011, - "mushrooms": 14730, - "musi": 15628, - "music": 4110, - "music": 1179, - "musica": 26668, - "musical": 36002, - "musical": 5173, - "musically": 48893, - "musicals": 36974, - "musichistory": 37890, - "musician": 11179, - "musicians": 12498, - "musicislife": 43311, - "musicmonday": 35887, - "musicvideo": 26764, - "musik": 32986, - "musings": 44961, - "musique": 42250, - "musk": 32143, - "musk": 19063, - "muskete": 32775, - "musketeers": 37993, - "musko": 34987, - "muskoka": 40832, - "musli": 4958, - "muslim": 43795, - "muslim": 7060, - "muslims": 10513, - "muss": 41493, - "mussels": 33393, - "must": 6783, - "must": 2048, - "mustache": 23451, - "mustaf": 23596, - "mustafa": 29000, - "mustang": 42361, - "mustang": 13309, - "mustangs": 22500, - "mustard": 15794, - "muster": 47361, - "mustread": 28978, - "mut": 12598, - "mut": 22839, - "mutant": 28384, - "mutation": 38626, - "mutations": 39651, - "mute": 31252, - "muted": 48028, - "muth": 34280, - "mutil": 39950, - "mutt": 45924, - "mutu": 17574, - "mutual": 15055, - "mutuals": 31158, - "muy": 44625, - "mv": 10580, - "mv": 8269, - "mvc": 40549, - "mvp": 8905, - "mw": 16725, - "mw": 11206, - "mwc": 24289, - "mwf": 48565, - "mx": 21947, - "mx": 9575, - "my": 1152, - "my": 607, - "mya": 31401, - "myal": 42735, - "myan": 13761, - "myanmar": 14764, - "myart": 38826, - "myco": 48362, - "mydayin": 41896, - "mydayinla": 42801, - "mydubai": 43475, - "mye": 27551, - "myel": 40084, - "myers": 15993, - "myjaps": 47939, - "myle": 43700, - "myles": 25511, - "mylife": 30537, - "mylittle": 37757, - "mylittlepony": 45107, - "myo": 16206, - "myr": 20272, - "myra": 35694, - "myri": 34972, - "myrt": 47785, - "myrtle": 27768, - "mys": 11724, - "myself": 3245, - "mysore": 44924, - "myspace": 41382, - "myster": 4669, - "mysteries": 20605, - "mysterious": 12650, - "mystery": 39828, - "mystery": 6711, - "mysti": 28711, - "mystic": 36264, - "mystic": 23722, - "mystical": 34122, - "myth": 20322, - "myth": 13878, - "mythical": 34377, - "mytho": 43857, - "mythology": 22496, - "myths": 18675, - "mz": 29509, - "mz": 33400, - "mzan": 36322, - "mzansi": 43301, - "má": 36842, - "mé": 21890, - "méxico": 46159, - "mü": 28142, - "mün": 41235, - "n": 77, - "n": 333, - "na": 1097, - "na": 1272, - "naa": 37738, - "naacp": 32176, - "nab": 6951, - "nab": 19440, - "nabe": 35111, - "naby": 24800, - "nac": 14557, - "nac": 18950, - "nach": 12168, - "nach": 43622, - "nacho": 35647, - "nachos": 32847, - "nacht": 37261, - "nacional": 38782, - "nad": 6204, - "nad": 43928, - "nada": 31683, - "nadal": 20814, - "nade": 24908, - "nadi": 30512, - "nadia": 27487, - "nadine": 23356, - "nadu": 20936, - "nae": 19374, - "naf": 16161, - "naf": 45956, - "nafta": 43123, - "nag": 6694, - "nag": 23902, - "naga": 45953, - "naga": 38997, - "nagar": 17490, - "nage": 41219, - "nago": 38349, - "nagoya": 43303, - "nagpur": 43328, - "nah": 26421, - "nah": 11129, - "nahi": 35244, - "nai": 6230, - "nai": 10692, - "naia": 31340, - "naidu": 42429, - "naija": 16326, - "naik": 34424, - "nail": 19459, - "nail": 9059, - "nailart": 43532, - "nailed": 19035, - "nails": 8469, - "nair": 27107, - "naira": 39450, - "naire": 48892, - "nairobi": 17756, - "nais": 46396, - "naissance": 44761, - "naive": 43362, - "naj": 30985, - "naji": 32589, - "nak": 9248, - "nak": 25550, - "naked": 46371, - "naked": 11478, - "naku": 39864, - "nal": 14132, - "nal": 3119, - "nale": 27198, - "nall": 32869, - "nally": 26158, - "nam": 1410, - "nam": 12344, - "nama": 39586, - "naman": 27635, - "namaste": 35549, - "name": 18160, - "name": 1981, - "named": 3194, - "nameis": 40831, - "nament": 3916, - "naments": 16540, - "names": 6130, - "namesake": 41298, - "nami": 20393, - "namibia": 23731, - "naming": 19367, - "namjoon": 31986, - "namm": 35524, - "namo": 46013, - "namo": 24854, - "nan": 4375, - "nan": 7750, - "nana": 18761, - "nanaimo": 40518, - "nancy": 21511, - "nancy": 11425, - "nand": 20435, - "nandez": 12764, - "nando": 46044, - "nang": 48148, - "nani": 27980, - "nanny": 31104, - "nano": 15835, - "nano": 22006, - "nanop": 34177, - "nanotechnology": 42235, - "nanow": 46734, - "nant": 22526, - "nantes": 47533, - "nantucket": 41573, - "nao": 39319, - "naom": 34955, - "naomi": 20173, - "nap": 6568, - "nap": 11012, - "napa": 20545, - "napier": 40875, - "napkin": 38930, - "naples": 23560, - "napo": 18715, - "napol": 20122, - "napoleon": 24969, - "napoli": 22445, - "napp": 11359, - "napping": 37657, - "naps": 31317, - "naq": 46453, - "nar": 2977, - "nar": 20145, - "nara": 33823, - "narcis": 25229, - "narcissi": 35442, - "narco": 38461, - "nard": 18216, - "nare": 34853, - "naren": 8468, - "narendr": 9807, - "narendra": 25848, - "narendramodi": 9853, - "narnia": 48693, - "narr": 11845, - "narrated": 43609, - "narrative": 15933, - "narratives": 35117, - "narrator": 46529, - "narrow": 24006, - "narrow": 16652, - "narrowly": 29747, - "naruto": 22732, - "nas": 3090, - "nas": 15250, - "nasa": 6841, - "nasal": 42853, - "nascar": 25723, - "nascar": 7868, - "nasdaq": 26629, - "nash": 6771, - "nash": 13620, - "nasheed": 49176, - "nashgrier": 33372, - "nashville": 45356, - "nashville": 8585, - "nasi": 47987, - "nasir": 47509, - "nassau": 34048, - "nasser": 43559, - "nasty": 32930, - "nasty": 8709, - "nat": 1276, - "nat": 11310, - "nata": 39392, - "natal": 28516, - "natali": 20296, - "natalia": 32978, - "natalie": 36634, - "natalie": 13595, - "natash": 48701, - "natasha": 23093, - "nate": 26643, - "nate": 7587, - "natgeo": 33009, - "natgeo": 25046, - "nath": 22203, - "nath": 19843, - "nathan": 13028, - "nathan": 9711, - "nathanfillion": 47422, - "nathaniel": 32667, - "nati": 1060, - "nati": 13384, - "natic": 44944, - "natin": 44358, - "nation": 2317, - "nation": 2670, - "national": 3126, - "national": 1362, - "nationalbestfriend": 42222, - "nationaldogday": 32227, - "nationalism": 29867, - "nationalist": 25058, - "nationality": 44451, - "nationally": 15130, - "nationalpark": 33060, - "nationalparks": 41204, - "nationals": 10784, - "nationaltrust": 34051, - "nations": 7654, - "nationwide": 13795, - "native": 20639, - "native": 4562, - "natives": 36060, - "nativity": 33988, - "natl": 39225, - "natl": 34465, - "nato": 13139, - "nats": 21106, - "natu": 2775, - "natur": 6800, - "natural": 13198, - "natural": 3288, - "naturally": 12995, - "naturals": 44686, - "nature": 9382, - "nature": 2625, - "naturelovers": 41514, - "naturephotography": 22533, - "natures": 15616, - "natureuk": 46193, - "nau": 5955, - "nau": 32878, - "naught": 41001, - "naughty": 47255, - "naughty": 15101, - "nautical": 31660, - "nav": 3413, - "nav": 25308, - "navajo": 35523, - "naval": 44725, - "naval": 13273, - "navar": 24848, - "navarro": 37104, - "nave": 42704, - "naveen": 43837, - "naver": 32534, - "navi": 16159, - "navi": 44848, - "navig": 12507, - "navigate": 24400, - "navigating": 33134, - "navigation": 20148, - "navigator": 38910, - "navis": 36377, - "navratri": 45428, - "navy": 28414, - "navy": 5598, - "naw": 16259, - "naw": 30500, - "nawaz": 49161, - "nawaz": 19523, - "nax": 38299, - "nay": 11704, - "nay": 16182, - "naya": 38917, - "nayanth": 38157, - "nayanthara": 45184, - "naz": 6363, - "naz": 35534, - "nazi": 12972, - "nazis": 21778, - "nb": 6459, - "nb": 6813, - "nba": 22524, - "nba": 5139, - "nbad": 43458, - "nbaf": 30127, - "nbafinals": 33803, - "nbap": 41956, - "nbaplayoffs": 43860, - "nbat": 46291, - "nbc": 9352, - "nbc": 8799, - "nbd": 24526, - "nbl": 42652, - "nc": 5021, - "nc": 4911, - "nca": 6921, - "ncaa": 9418, - "ncbd": 47221, - "ncc": 33195, - "ncc": 36686, - "ncds": 47573, - "ncfc": 31274, - "ncis": 33617, - "ncpol": 40562, - "ncr": 38474, - "ncs": 42689, - "nct": 27723, - "nct": 20319, - "ncwx": 36166, - "nd": 5625, - "nd": 1764, - "nda": 32862, - "ndc": 47564, - "ndi": 48229, - "ndp": 19257, - "nds": 31347, - "ndtv": 26261, - "ne": 557, - "ne": 1422, - "nea": 24068, - "neal": 33652, - "neal": 16730, - "near": 11296, - "near": 2252, - "nearby": 13314, - "nearest": 18985, - "nearing": 26571, - "nearly": 4816, - "nears": 37710, - "neat": 43201, - "neat": 15465, - "neath": 18315, - "neau": 31559, - "neb": 40209, - "nebra": 13371, - "nebraska": 14565, - "nebu": 49295, - "nebula": 22532, - "nec": 25109, - "nec": 22992, - "necess": 6961, - "necessarily": 25853, - "necessary": 8955, - "necessities": 43483, - "necessity": 33163, - "neck": 6066, - "neck": 6906, - "necklace": 7385, - "necklaces": 32276, - "necks": 29701, - "nectar": 33683, - "ned": 16030, - "ned": 1369, - "nederland": 49058, - "nee": 20494, - "nee": 10601, - "need": 3229, - "need": 1262, - "needed": 4049, - "needing": 22894, - "needle": 44490, - "needle": 19886, - "needles": 27250, - "needless": 39984, - "needs": 2536, - "needy": 30150, - "neel": 33092, - "neel": 46043, - "neer": 34245, - "nees": 47248, - "neet": 46362, - "neg": 5513, - "negan": 42623, - "negative": 8869, - "negatively": 40254, - "negativity": 34658, - "neglec": 18827, - "neglect": 33680, - "neglected": 31893, - "negli": 32594, - "negligence": 45658, - "negoti": 10216, - "negotiate": 32969, - "negotiating": 35510, - "negotiation": 36504, - "negotiations": 20433, - "negr": 42190, - "negro": 26554, - "neh": 40416, - "neh": 41697, - "neha": 44463, - "nehru": 30316, - "nei": 9366, - "neigh": 4061, - "neighb": 6534, - "neighbor": 7759, - "neighbor": 14485, - "neighborhood": 9471, - "neighborhoods": 26713, - "neighboring": 44754, - "neighbors": 13037, - "neighbour": 15858, - "neighbour": 23719, - "neighbourhood": 20312, - "neighbours": 17594, - "neil": 13591, - "neil": 8030, - "neilhimself": 45682, - "neill": 19324, - "neither": 14398, - "nek": 47727, - "neko": 47066, - "nel": 5476, - "nel": 2693, - "nell": 27081, - "nell": 8117, - "nelly": 21166, - "nels": 19296, - "nelson": 24774, - "nelson": 8586, - "nem": 45153, - "neman": 48553, - "neme": 30993, - "nemesis": 37811, - "nemo": 30441, - "nen": 17817, - "nen": 15451, - "nene": 44167, - "neo": 14562, - "neo": 11017, - "neon": 21043, - "neon": 13919, - "neonatal": 46464, - "neop": 49069, - "nep": 20739, - "nep": 41960, - "nepal": 25597, - "nepal": 10066, - "nepali": 47579, - "neph": 27926, - "nephe": 41810, - "nephew": 11689, - "nephews": 43747, - "nephro": 43054, - "neptune": 30566, - "ner": 2064, - "ner": 998, - "nerd": 24452, - "nerd": 12273, - "nerds": 22609, - "nerdy": 33124, - "nered": 17583, - "nerf": 42914, - "nering": 20226, - "nero": 29048, - "ners": 2129, - "nerve": 18571, - "nerves": 27813, - "nervous": 13928, - "nery": 48597, - "nes": 5457, - "nes": 4980, - "nesburg": 27159, - "nese": 32220, - "ness": 7187, - "ness": 1294, - "nesses": 20107, - "nessy": 32939, - "nest": 20302, - "nest": 8719, - "nesting": 28860, - "nestle": 43967, - "nestled": 38107, - "nests": 41133, - "net": 1851, - "net": 2315, - "netany": 23137, - "netanyahu": 23583, - "netball": 19761, - "netes": 44335, - "netfli": 6304, - "netflix": 35325, - "netflix": 6600, - "nether": 9946, - "netherlands": 11060, - "neti": 43980, - "netneutrality": 47794, - "nets": 8582, - "nett": 23403, - "nett": 6975, - "nette": 13271, - "network": 23285, - "network": 3304, - "networking": 9818, - "networks": 10004, - "neu": 3855, - "neu": 43342, - "neue": 45764, - "neur": 19001, - "neur": 31976, - "neural": 26388, - "neuro": 7401, - "neuro": 36000, - "neurological": 41718, - "neurology": 43197, - "neurons": 40442, - "neuroscience": 23381, - "neutr": 17207, - "neutral": 17011, - "neutrality": 26511, - "neutron": 44056, - "nev": 10236, - "nev": 43645, - "neva": 43304, - "nevada": 13499, - "neve": 44099, - "neve": 44023, - "never": 6746, - "never": 1426, - "neveragain": 45053, - "neverforget": 19242, - "nevergiveup": 42497, - "neverland": 41483, - "nevertheless": 48355, - "nevertrump": 47494, - "neville": 19269, - "nevis": 43670, - "new": 1218, - "new": 686, - "newark": 20240, - "newbie": 45427, - "newborn": 18320, - "newbury": 34169, - "newcastle": 41955, - "newcastle": 9302, - "newcomer": 30648, - "newcomers": 44037, - "newe": 40068, - "newell": 41436, - "newer": 33099, - "newest": 4990, - "newfound": 25250, - "newfoundland": 28079, - "newh": 18546, - "newin": 31911, - "newjersey": 32621, - "newly": 42186, - "newly": 7056, - "newman": 15815, - "newmarket": 38617, - "newmexico": 35238, - "newmusic": 32510, - "newmusic": 17201, - "newor": 25969, - "neworleans": 31205, - "newport": 42580, - "newport": 14846, - "newprofile": 14633, - "newprofilepic": 14754, - "newrelease": 34793, - "news": 6216, - "news": 1120, - "newsat": 43979, - "newsc": 28656, - "newscast": 45031, - "newsle": 10727, - "newsletter": 11069, - "newsnow": 48650, - "newsp": 7109, - "newspaper": 8786, - "newspapers": 22423, - "newsroom": 23200, - "newt": 37224, - "newton": 33122, - "newton": 12606, - "newtown": 31747, - "newyear": 22161, - "newyear": 12999, - "newyearseve": 37587, - "newyork": 18140, - "newyork": 10454, - "newyorkcity": 30460, - "newyorker": 39732, - "newzealand": 21117, - "nex": 6897, - "nex": 39720, - "next": 12434, - "next": 1131, - "nextgen": 41933, - "nexus": 19053, - "ney": 3857, - "ney": 1438, - "neymar": 21878, - "neys": 12616, - "nez": 27388, - "nf": 15195, - "nf": 25643, - "nfamily": 20098, - "nfc": 23695, - "nffc": 27893, - "nfl": 11219, - "nfl": 4691, - "nfldraft": 25002, - "ng": 10352, - "ng": 5215, - "nga": 35477, - "ngc": 29046, - "ngo": 38740, - "ngo": 24821, - "ngos": 34627, - "nguyen": 29947, - "nh": 3760, - "nh": 10803, - "nhc": 44817, - "nhl": 12290, - "nhl": 8167, - "nhlbruins": 39081, - "nhljets": 49357, - "nhm": 39483, - "nhpolitics": 36125, - "nhq": 42368, - "nhra": 30052, - "nhs": 23282, - "nhs": 7695, - "ni": 697, - "ni": 3256, - "nia": 3098, - "niag": 18071, - "niagar": 39298, - "niagara": 18965, - "niall": 41354, - "niall": 8327, - "niallo": 22855, - "niallofficial": 23084, - "niam": 39347, - "nian": 46003, - "nib": 31049, - "nic": 2109, - "nic": 6651, - "nica": 29040, - "nicar": 25119, - "nicaragua": 28423, - "nice": 28386, - "nice": 1805, - "nicely": 12303, - "nicer": 29488, - "nicest": 22967, - "niche": 25279, - "nichol": 7668, - "nicholas": 39814, - "nicholas": 13148, - "nicholls": 38846, - "nichols": 22730, - "nicholson": 28745, - "nick": 4209, - "nick": 4253, - "nickel": 22034, - "nickelo": 28668, - "nickelodeon": 33279, - "nicki": 17738, - "nickimin": 27390, - "nickiminaj": 27593, - "nickjonas": 43862, - "nickname": 24731, - "nicknamed": 45190, - "nicks": 15049, - "nicky": 28893, - "nicky": 22091, - "nico": 20850, - "nico": 17779, - "nicol": 9919, - "nicol": 48274, - "nicola": 21791, - "nicolas": 43813, - "nicolas": 18918, - "nicole": 21246, - "nicole": 10000, - "nicot": 45099, - "nicotine": 46697, - "nie": 9524, - "nie": 3501, - "niece": 12795, - "nieces": 44877, - "niel": 19109, - "niel": 26837, - "niels": 37154, - "nielsen": 28372, - "nier": 13014, - "nies": 10586, - "niest": 15007, - "nieu": 29781, - "nific": 4748, - "nifty": 25604, - "nig": 27933, - "nig": 28099, - "nigan": 48516, - "nigel": 33919, - "nigel": 15153, - "niger": 4524, - "niger": 29920, - "nigeri": 40913, - "nigeria": 6106, - "nigerian": 12167, - "nigerians": 25358, - "nigh": 13525, - "nigh": 48157, - "night": 3870, - "night": 930, - "nightclub": 20418, - "nighter": 41349, - "nighting": 36211, - "nightingale": 40696, - "nightlife": 28823, - "nightly": 28868, - "nightmare": 12867, - "nightmares": 24032, - "nightout": 44257, - "nights": 4296, - "nighttime": 38147, - "nightw": 39956, - "nih": 25783, - "nik": 5126, - "nik": 13705, - "nike": 16300, - "nike": 5783, - "nikeplus": 43154, - "niki": 36136, - "nikita": 37118, - "nikk": 38596, - "nikki": 23156, - "nikki": 16689, - "niko": 43771, - "nikol": 27430, - "nikola": 42146, - "nikon": 25488, - "nikon": 13849, - "nikov": 43960, - "nil": 16852, - "nil": 35030, - "nile": 24252, - "nim": 30402, - "nim": 42093, - "nima": 42586, - "nin": 5794, - "nin": 14145, - "nina": 13891, - "nine": 16213, - "nine": 7330, - "ninety": 48214, - "ning": 6050, - "ning": 762, - "ningham": 23395, - "ningly": 43537, - "nings": 4588, - "nington": 26214, - "ninj": 23225, - "ninja": 11969, - "ninjas": 42796, - "nino": 25633, - "ninten": 6184, - "nintendo": 13969, - "nintendo": 7886, - "nintendoswitch": 16404, - "ninth": 22770, - "nip": 33889, - "nip": 22333, - "nipp": 24634, - "nipple": 45987, - "nipples": 44774, - "nippon": 47960, - "nips": 49241, - "nir": 15503, - "nir": 40057, - "nireland": 45763, - "niro": 47373, - "nirvana": 28300, - "nis": 5609, - "nis": 3786, - "nish": 19834, - "nish": 13256, - "nished": 24141, - "nishi": 32386, - "nishings": 49247, - "nison": 45700, - "niss": 39043, - "nissan": 37635, - "nissan": 11082, - "nist": 17782, - "nister": 36640, - "nit": 4087, - "nit": 19011, - "nite": 8427, - "niti": 43964, - "niti": 45355, - "nitin": 37529, - "nitro": 30726, - "nitrogen": 30706, - "niture": 7840, - "nity": 12707, - "niu": 48187, - "niv": 47300, - "niversary": 29643, - "nix": 48552, - "nix": 32278, - "nixon": 20671, - "nj": 8343, - "nj": 6672, - "njcaa": 48992, - "njpw": 38992, - "nk": 22708, - "nk": 17456, - "nko": 36353, - "nl": 12057, - "nl": 7655, - "nli": 37502, - "nlp": 35680, - "nlwx": 49260, - "nm": 15956, - "nm": 11370, - "nmd": 43331, - "nme": 40454, - "nmwx": 47967, - "nn": 8947, - "nn": 12925, - "nnn": 26277, - "nnnn": 41420, - "no": 578, - "no": 871, - "noaa": 27557, - "noah": 28806, - "noah": 11519, - "nobel": 33742, - "nobel": 15605, - "nobelprize": 46074, - "noble": 29430, - "noble": 12051, - "nobody": 7009, - "noc": 16988, - "noc": 44420, - "nocchi": 46359, - "noch": 38672, - "noche": 29689, - "noches": 44166, - "nock": 16993, - "noctur": 26291, - "nocturnal": 41738, - "nod": 18648, - "nodapl": 39079, - "node": 31434, - "node": 24871, - "nodejs": 39262, - "nodes": 40534, - "noel": 38406, - "noel": 17496, - "nof": 29505, - "noff": 46979, - "nofilter": 16418, - "nog": 31157, - "noh": 40775, - "noi": 43115, - "noi": 39889, - "noida": 33404, - "noir": 39291, - "noir": 12953, - "nois": 22057, - "noise": 41018, - "noise": 9307, - "noises": 31575, - "noisse": 45686, - "noisy": 33495, - "nokia": 17731, - "nol": 8055, - "nola": 13289, - "nolan": 17323, - "nold": 40322, - "nole": 34654, - "noles": 40569, - "nollywood": 43145, - "nology": 42221, - "nom": 2981, - "nom": 12799, - "nomad": 27849, - "noman": 45592, - "nomin": 5643, - "nominate": 17122, - "nominated": 8710, - "nominating": 45747, - "nomination": 14136, - "nominations": 17124, - "nominee": 14122, - "nominees": 17873, - "nomnom": 26962, - "nomore": 35126, - "noms": 35706, - "non": 4282, - "non": 3353, - "none": 29644, - "none": 8906, - "nonetheless": 39675, - "nonfiction": 31654, - "nonprofit": 19315, - "nonprofits": 37935, - "nonsense": 19136, - "nonstop": 30300, - "nont": 25207, - "noo": 6759, - "noo": 46672, - "noodle": 19521, - "noodles": 15782, - "nook": 30088, - "noon": 37693, - "noon": 2347, - "noor": 46978, - "noor": 31323, - "nope": 15625, - "nor": 1062, - "nor": 6190, - "nora": 25890, - "norcal": 41970, - "nord": 19261, - "nord": 36067, - "nordic": 36439, - "nordic": 20734, - "nordstrom": 38562, - "norfolk": 30232, - "norfolk": 12202, - "norm": 10990, - "norm": 22457, - "norma": 35757, - "normal": 28748, - "normal": 5967, - "normali": 45157, - "normally": 15870, - "norman": 22027, - "norman": 11338, - "normandy": 23840, - "normani": 44596, - "norms": 33011, - "norris": 21814, - "norse": 36559, - "norte": 35638, - "north": 3468, - "north": 2188, - "northampton": 49246, - "northampton": 26175, - "northan": 37081, - "northbound": 24228, - "northcarolina": 43386, - "northe": 24675, - "northeast": 42673, - "northeast": 13009, - "northeastern": 28297, - "northeasthour": 42869, - "norther": 26908, - "northern": 17210, - "northern": 5049, - "northernlights": 48940, - "northkorea": 38495, - "northside": 45957, - "northumber": 22295, - "northumberland": 22922, - "northwales": 49371, - "northwest": 12894, - "northwestern": 23685, - "norton": 18032, - "norway": 8780, - "norwe": 14414, - "norwegian": 15971, - "norwich": 37629, - "norwich": 15812, - "norwood": 37889, - "nos": 13420, - "nose": 24192, - "nose": 8231, - "noses": 48163, - "nostal": 12076, - "nostalgia": 16622, - "nostalgic": 24468, - "not": 2534, - "not": 783, - "notable": 22023, - "notch": 19476, - "notdead": 42059, - "note": 10910, - "note": 3246, - "notebook": 16365, - "notebooks": 37623, - "noted": 22501, - "notes": 5795, - "nothin": 24291, - "nothing": 28412, - "nothing": 2586, - "noti": 10686, - "notic": 6915, - "notice": 6683, - "noticeable": 40857, - "noticed": 9324, - "notices": 33459, - "noticias": 47759, - "noticing": 37571, - "notification": 22512, - "notifications": 23169, - "notified": 39454, - "noting": 38649, - "notion": 37856, - "notjust": 33212, - "notjustlakes": 45803, - "notmy": 39301, - "noto": 29878, - "noton": 48258, - "notor": 21711, - "notori": 44065, - "notorious": 22489, - "notre": 24397, - "notre": 15306, - "notredame": 34077, - "notsorry": 34361, - "nott": 9333, - "nott": 34989, - "notte": 47308, - "nottingham": 12852, - "notts": 25598, - "nou": 8751, - "nou": 30953, - "noun": 33663, - "nouri": 23796, - "nourish": 46025, - "nourished": 48354, - "nous": 29485, - "nouveau": 29948, - "nouvel": 34215, - "nov": 2264, - "nov": 4293, - "nova": 11236, - "novak": 26465, - "novasco": 33785, - "novascotia": 34744, - "novation": 39753, - "nove": 30507, - "novel": 15044, - "novel": 6080, - "novelist": 27314, - "novella": 42770, - "novels": 16040, - "novelty": 37750, - "november": 3680, - "nover": 37465, - "novi": 47957, - "novice": 33743, - "novo": 27504, - "novo": 36581, - "now": 2040, - "now": 692, - "nowadays": 26155, - "nowhere": 14108, - "nowplaying": 3708, - "nowwatching": 30852, - "nox": 27406, - "noxi": 39304, - "noxious": 42833, - "noy": 32787, - "np": 18205, - "np": 6314, - "npa": 42378, - "npc": 33966, - "npr": 39941, - "npr": 24078, - "nps": 22025, - "npt": 47231, - "nr": 6574, - "nr": 9713, - "nra": 17286, - "nrc": 45786, - "nrf": 47982, - "nrg": 48662, - "nrl": 27142, - "nrl": 18127, - "ns": 12405, - "ns": 1373, - "nsa": 23004, - "nsc": 32792, - "nsd": 36659, - "nsf": 34180, - "nsfw": 19847, - "nsi": 47824, - "nsw": 21301, - "nsw": 11693, - "nswpol": 44434, - "nt": 10902, - "nt": 3207, - "ntr": 30845, - "nts": 43775, - "ntt": 22859, - "ntv": 24807, - "ntv": 45304, - "nu": 1156, - "nu": 9444, - "nucle": 25693, - "nuclear": 34136, - "nuclear": 7279, - "nude": 16630, - "nudes": 32122, - "nue": 22834, - "nuestra": 45649, - "nuestro": 38590, - "nuev": 47861, - "nueva": 48810, - "nuevo": 30265, - "nufc": 15720, - "nuff": 37324, - "nug": 13471, - "nugent": 47457, - "nugget": 25448, - "nuggets": 18970, - "nuh": 45950, - "nuit": 38815, - "nuk": 39228, - "nuke": 39399, - "nul": 29358, - "null": 47376, - "num": 17896, - "num": 30534, - "numb": 34639, - "numb": 39427, - "number": 44078, - "number": 2842, - "numbered": 25975, - "numbers": 6121, - "numer": 11442, - "numerous": 17082, - "numis": 39100, - "nun": 12511, - "nun": 28540, - "nunavut": 48626, - "nunes": 40697, - "nuns": 44061, - "nup": 46757, - "nur": 3920, - "nur": 33493, - "nure": 42480, - "nurse": 37547, - "nurse": 10058, - "nursery": 15540, - "nurses": 12938, - "nursing": 11126, - "nurture": 38865, - "nurturing": 45229, - "nus": 25157, - "nus": 18239, - "nut": 10358, - "nut": 6491, - "nutcracker": 36733, - "nutella": 27312, - "nutr": 6198, - "nutri": 15470, - "nutrient": 32900, - "nutrients": 24668, - "nutriti": 17978, - "nutrition": 41546, - "nutrition": 7989, - "nutritional": 26457, - "nutritious": 30387, - "nuts": 8644, - "nutshell": 26659, - "nutty": 39846, - "nv": 17217, - "nv": 16985, - "nvi": 22847, - "nvidia": 27325, - "nw": 7826, - "nw": 7030, - "nwa": 34237, - "nwo": 40976, - "nws": 23333, - "nws": 30998, - "nwsl": 48394, - "nwt": 25029, - "nx": 18810, - "nx": 16997, - "nxt": 35037, - "nxt": 17804, - "ny": 1383, - "ny": 1350, - "nya": 24165, - "nyc": 13304, - "nyc": 2832, - "nycc": 27187, - "nycfc": 47497, - "nye": 40723, - "nye": 13416, - "nyfw": 21089, - "nyk": 46841, - "nylon": 25915, - "nyo": 41534, - "nyo": 44586, - "nypd": 42293, - "nypd": 18279, - "nyr": 32538, - "nyrd": 47936, - "nys": 36375, - "nys": 23423, - "nyse": 32650, - "nyt": 46311, - "nyt": 12816, - "nytimes": 13772, - "nyu": 43143, - "nyu": 31355, - "nz": 10142, - "nz": 7082, - "o": 78, - "o": 334, - "oa": 11994, - "oahu": 37790, - "oak": 6010, - "oak": 7221, - "oakland": 42663, - "oakland": 12077, - "oakley": 27810, - "oaks": 16734, - "oakville": 38500, - "oasis": 18185, - "oat": 20095, - "oat": 34132, - "oates": 47094, - "oath": 20108, - "oatmeal": 26374, - "oats": 24150, - "oax": 43090, - "oaxaca": 47818, - "ob": 1411, - "ob": 14908, - "oba": 42902, - "oba": 15147, - "obam": 13174, - "obama": 4276, - "obamacare": 18005, - "obe": 11897, - "obe": 29117, - "obedience": 48921, - "ober": 15284, - "obese": 41757, - "obesity": 19499, - "obey": 26926, - "obi": 21454, - "obi": 18414, - "obile": 20513, - "obitu": 39218, - "obituary": 43580, - "objec": 7970, - "object": 14115, - "objective": 23663, - "objectives": 30238, - "objects": 13770, - "obl": 31452, - "oblast": 42672, - "obli": 11416, - "obligation": 34473, - "obligations": 38232, - "obligatory": 35020, - "oblivion": 45323, - "obo": 46001, - "obo": 26618, - "obrien": 31946, - "obs": 39162, - "obsc": 20392, - "obscure": 33337, - "obse": 8433, - "observ": 9050, - "observation": 20250, - "observations": 27409, - "observatory": 21236, - "observe": 23217, - "observed": 21267, - "observer": 22077, - "observers": 47544, - "observing": 28359, - "obsessed": 9744, - "obsession": 15718, - "obsi": 47323, - "obsole": 35561, - "obsolete": 40628, - "obst": 29398, - "obstac": 24075, - "obstacle": 29751, - "obstacles": 24480, - "obste": 49103, - "obstru": 44876, - "obstruc": 38762, - "obstruction": 40240, - "obtain": 26555, - "obtained": 29322, - "obvious": 13959, - "obviously": 10068, - "oc": 1566, - "oc": 6603, - "oca": 31120, - "ocal": 38148, - "occ": 43940, - "occa": 8530, - "occasion": 12280, - "occasional": 33059, - "occasionally": 32479, - "occasions": 26154, - "occer": 20804, - "occi": 42994, - "occu": 7863, - "occult": 42529, - "occup": 11152, - "occupation": 18624, - "occupational": 30644, - "occupied": 17271, - "occupy": 22453, - "occupy": 24210, - "occur": 11264, - "occur": 21813, - "occurred": 19850, - "occurrence": 40615, - "occurring": 31335, - "occurs": 26563, - "ocd": 35904, - "oce": 3509, - "ocean": 12941, - "ocean": 4918, - "oceans": 16792, - "och": 29334, - "och": 32011, - "oche": 33045, - "oci": 9891, - "ocity": 46039, - "ock": 33579, - "ock": 21313, - "ocks": 22410, - "oclock": 36274, - "oco": 32553, - "ocon": 33090, - "ocr": 45813, - "ocre": 40320, - "ocs": 27297, - "oct": 4565, - "octa": 23444, - "octag": 37768, - "octagon": 49167, - "octane": 43040, - "octavia": 47416, - "octo": 31032, - "october": 3481, - "octopus": 22327, - "ocu": 22709, - "oculus": 30082, - "od": 4886, - "od": 9719, - "oda": 24777, - "oday": 41954, - "odd": 15525, - "odd": 11387, - "oddly": 34213, - "odds": 11555, - "ode": 19125, - "ode": 19639, - "odell": 41556, - "odessa": 43574, - "odi": 12223, - "odi": 18853, - "odin": 35175, - "odisha": 15737, - "odo": 49188, - "odo": 40993, - "odor": 39509, - "odu": 35095, - "odu": 39904, - "odyssey": 19991, - "oe": 24251, - "oe": 11667, - "oec": 24288, - "oecd": 30816, - "oem": 29650, - "oes": 3643, - "of": 684, - "of": 539, - "ofa": 29774, - "ofc": 19877, - "ofe": 30000, - "ofer": 47322, - "off": 892, - "off": 1007, - "offe": 8261, - "offee": 34059, - "offen": 7231, - "offence": 34594, - "offences": 33972, - "offended": 30765, - "offender": 48294, - "offenders": 35878, - "offense": 15253, - "offensive": 11037, - "offer": 20607, - "offer": 3271, - "offered": 9395, - "offering": 6896, - "offerings": 24535, - "offers": 4679, - "offic": 3276, - "office": 18033, - "office": 2171, - "officeof": 38750, - "officeofrg": 47100, - "officer": 4683, - "officers": 6335, - "offices": 10933, - "offici": 1401, - "official": 5768, - "official": 1868, - "officially": 4226, - "officials": 7658, - "officiel": 26548, - "offl": 16851, - "offline": 22724, - "offro": 32198, - "offroad": 37173, - "offs": 23987, - "offseason": 25485, - "offset": 28843, - "offshore": 15496, - "offside": 49347, - "offspring": 38635, - "offthe": 38189, - "ofi": 36692, - "ofi": 49090, - "oficial": 18061, - "oft": 16693, - "oftball": 39768, - "often": 4864, - "ofthe": 7592, - "oftheday": 6988, - "oftheweek": 20654, - "oftheyear": 33975, - "og": 11542, - "og": 8555, - "oga": 47312, - "ogden": 42011, - "ogil": 39013, - "ography": 22399, - "ogue": 24761, - "ogun": 48970, - "oh": 5648, - "oh": 1779, - "ohana": 48330, - "ohh": 23076, - "ohhh": 27697, - "ohhhh": 40201, - "ohi": 5207, - "ohio": 18951, - "ohio": 6155, - "ohiostate": 41324, - "ohl": 45547, - "ohl": 41095, - "ohmy": 29758, - "ohn": 48043, - "ohs": 39542, - "ohwx": 47993, - "oi": 27357, - "oi": 13934, - "oic": 45554, - "oid": 14758, - "oids": 21847, - "oil": 11973, - "oil": 2870, - "oiland": 32316, - "oilandgas": 34130, - "oilers": 21627, - "oilpainting": 34279, - "oils": 17886, - "oily": 47550, - "oir": 48079, - "oir": 37113, - "ois": 23262, - "oit": 18453, - "oitnb": 34865, - "oj": 30986, - "oj": 34553, - "ok": 1944, - "ok": 2481, - "oka": 42258, - "oka": 19092, - "okan": 41263, - "okanagan": 43233, - "okay": 4917, - "okc": 42418, - "okc": 18357, - "oke": 26636, - "oke": 23598, - "oki": 20390, - "okin": 30687, - "okinawa": 35877, - "okla": 9431, - "oklahoma": 10170, - "oko": 26892, - "oko": 26095, - "okstate": 36356, - "oktoberfest": 32026, - "oku": 45010, - "oku": 43829, - "okwx": 27336, - "ol": 562, - "ol": 2985, - "ola": 20499, - "ola": 3373, - "olaf": 39709, - "olan": 48489, - "olan": 24227, - "oland": 26452, - "olas": 40800, - "old": 4931, - "old": 896, - "olde": 37731, - "older": 7700, - "oldest": 9285, - "oldham": 29929, - "oldie": 35280, - "oldies": 36278, - "oldman": 48614, - "olds": 8580, - "oldschool": 44384, - "oldschool": 25133, - "oldsmobile": 45396, - "ole": 9089, - "ole": 1947, - "oled": 46768, - "oler": 24069, - "oles": 16962, - "olf": 16346, - "olga": 34779, - "oli": 3811, - "oli": 8810, - "olic": 31341, - "oligar": 46185, - "olim": 47769, - "olin": 37823, - "olin": 18283, - "olina": 34711, - "oline": 17441, - "oling": 38033, - "olini": 36040, - "olis": 49397, - "olithic": 35574, - "olive": 22486, - "olive": 9898, - "oliver": 22882, - "oliver": 9261, - "olives": 27149, - "olivi": 20773, - "olivia": 11697, - "olivier": 23891, - "oll": 32270, - "oll": 15510, - "olla": 31908, - "ollie": 24434, - "olls": 42697, - "olly": 23998, - "olo": 14628, - "olo": 7606, - "ological": 12345, - "ologist": 23442, - "ologists": 30912, - "ology": 4627, - "olor": 29245, - "olph": 25077, - "ols": 2236, - "olsen": 26307, - "olson": 28046, - "olt": 46252, - "olu": 16502, - "olu": 46302, - "olulu": 27645, - "oly": 20323, - "oly": 24823, - "olym": 3594, - "olympi": 13597, - "olympia": 23965, - "olympiad": 47694, - "olympian": 25420, - "olympians": 44583, - "olympic": 26099, - "olympic": 6388, - "olympics": 7629, - "olympus": 30960, - "om": 547, - "om": 3932, - "oma": 44603, - "oma": 5358, - "omaha": 16509, - "oman": 22088, - "oman": 10871, - "omar": 19488, - "omar": 13367, - "omars": 37099, - "omas": 36023, - "omat": 40788, - "omb": 34447, - "ombe": 35967, - "omd": 49346, - "ome": 3693, - "ome": 5832, - "omed": 16835, - "omega": 13465, - "omelette": 38789, - "omen": 9969, - "omen": 25469, - "oment": 43683, - "omeo": 39844, - "omer": 24087, - "omer": 17902, - "omes": 25736, - "ometer": 20060, - "ometric": 38702, - "omez": 12541, - "omf": 47496, - "omfg": 12523, - "omg": 35233, - "omg": 3186, - "omi": 24097, - "omi": 10341, - "omic": 40536, - "omic": 12793, - "omics": 15138, - "omile": 46915, - "omin": 16457, - "omination": 42571, - "oming": 10796, - "ominous": 40914, - "omni": 18793, - "omni": 39489, - "omnibus": 44760, - "omnic": 48383, - "omo": 14478, - "omo": 11066, - "omon": 48758, - "omor": 29431, - "oms": 3770, - "omusic": 38965, - "omy": 40805, - "omy": 6884, - "on": 521, - "on": 525, - "ona": 2687, - "onair": 29511, - "onal": 918, - "onboard": 21689, - "once": 16331, - "once": 2654, - "onceupon": 28122, - "onceuponatime": 33505, - "onco": 46700, - "oncology": 24593, - "ond": 27918, - "ond": 2636, - "onda": 32643, - "onday": 29864, - "onde": 44532, - "ondo": 29529, - "ondon": 42043, - "ondon": 11851, - "one": 1980, - "one": 637, - "onec": 27746, - "oned": 28012, - "oned": 4698, - "onedirection": 16245, - "onee": 44433, - "oneill": 44808, - "onelove": 47417, - "onent": 12147, - "onents": 11709, - "oneof": 48478, - "onep": 20440, - "onepiece": 43153, - "oneplus": 25981, - "oner": 30055, - "oner": 6071, - "oners": 12324, - "ones": 20757, - "ones": 1575, - "oneself": 46874, - "onesie": 33237, - "oness": 25379, - "onet": 36058, - "oneteam": 41094, - "onetsy": 33392, - "onew": 43848, - "onews": 18696, - "onex": 49116, - "oney": 44498, - "oney": 9408, - "onf": 41790, - "onfox": 29874, - "ong": 2787, - "ong": 846, - "onga": 30259, - "ongchang": 35071, - "ongi": 21754, - "ongo": 31226, - "ongoing": 10393, - "ongs": 12143, - "oni": 4385, - "oni": 8048, - "onia": 8001, - "onial": 27599, - "onian": 21090, - "onic": 15838, - "onic": 3711, - "onica": 14631, - "onics": 9779, - "onie": 35249, - "onies": 22601, - "onimo": 41271, - "oning": 5197, - "onion": 10985, - "onions": 15255, - "onist": 10099, - "onists": 19659, - "onix": 27370, - "onized": 43657, - "onlin": 31103, - "online": 12940, - "online": 2027, - "onlinemarketing": 41820, - "onlineshopping": 38587, - "only": 11646, - "only": 1033, - "onlyin": 32947, - "onna": 25438, - "onna": 35458, - "onnaise": 48934, - "onne": 23466, - "onnell": 45613, - "ono": 28165, - "ono": 14388, - "onom": 48014, - "onomy": 36873, - "onpoli": 20708, - "ons": 26076, - "ons": 708, - "onsale": 36324, - "onset": 30527, - "onsite": 37336, - "onstage": 21821, - "onstorm": 49333, - "ont": 34303, - "ont": 11157, - "ontari": 6739, - "ontario": 42766, - "ontario": 7436, - "onte": 34723, - "onthe": 12241, - "onther": 46563, - "ontheroad": 47516, - "onthisday": 6862, - "onto": 11745, - "onto": 3141, - "ontology": 37364, - "ontour": 32155, - "onu": 44142, - "onward": 34827, - "onwards": 20682, - "ony": 9490, - "ony": 2926, - "onym": 11483, - "onymous": 13038, - "onyx": 31353, - "oo": 574, - "oo": 2822, - "ood": 16429, - "ood": 738, - "oodle": 45289, - "oods": 44660, - "oof": 42270, - "ooh": 16806, - "ook": 22326, - "ook": 8394, - "ooks": 31082, - "ool": 37702, - "ool": 929, - "oom": 22786, - "oom": 15002, - "oomf": 40607, - "oon": 35651, - "oon": 7100, - "ooo": 9571, - "oooh": 28927, - "oooo": 4002, - "oooo": 13643, - "ooooo": 12532, - "oooooo": 43590, - "oooooo": 20372, - "ooooooo": 30859, - "oooooooo": 15473, - "oooooooo": 43408, - "oooooooooooooooo": 48645, - "oop": 7326, - "ooper": 39906, - "oops": 9116, - "oor": 35239, - "oos": 9896, - "oosa": 30834, - "oose": 38941, - "oot": 17667, - "ootball": 28914, - "ootd": 16547, - "ooth": 12682, - "oott": 34316, - "ooza": 22809, - "op": 676, - "op": 3691, - "opa": 28949, - "opal": 28982, - "opar": 18167, - "opath": 33079, - "opathic": 37521, - "opathy": 28466, - "opau": 27239, - "opd": 38288, - "ope": 31694, - "ope": 11440, - "opec": 33138, - "opel": 36952, - "open": 3647, - "open": 1488, - "openaccess": 26591, - "opend": 28069, - "opendata": 35709, - "openday": 46991, - "opened": 5303, - "opener": 8998, - "openhouse": 36091, - "opening": 33728, - "opening": 2516, - "openingday": 36359, - "openings": 27643, - "openly": 23005, - "opens": 4801, - "opensource": 29930, - "oper": 2796, - "oper": 37533, - "opera": 8056, - "operate": 19306, - "operated": 23031, - "operates": 38675, - "operating": 12218, - "operation": 27173, - "operation": 7639, - "operational": 18237, - "operations": 8106, - "operative": 28380, - "operator": 15972, - "operators": 19267, - "opers": 48728, - "opes": 37258, - "oph": 6796, - "opha": 38634, - "ophel": 45017, - "ophelia": 49118, - "ophi": 44547, - "ophile": 35915, - "opho": 12900, - "ophobia": 21111, - "ophobic": 29934, - "ophon": 25120, - "ophone": 26345, - "ophthal": 33135, - "ophy": 28539, - "opi": 40056, - "opi": 48994, - "opin": 7636, - "opini": 14825, - "opinion": 7843, - "opinions": 16192, - "opio": 17371, - "opioid": 22833, - "opioids": 47578, - "opla": 36270, - "ople": 25663, - "opol": 15173, - "opoly": 23729, - "opor": 39650, - "opoulos": 42020, - "opp": 2020, - "opp": 21024, - "oppa": 23637, - "oppo": 7399, - "oppo": 41770, - "opponent": 17002, - "opponents": 19664, - "oppor": 2914, - "opportun": 2939, - "opportunities": 5978, - "opportunity": 4004, - "oppos": 10091, - "oppose": 23617, - "opposed": 22509, - "opposes": 47471, - "opposing": 24376, - "opposite": 12872, - "opposition": 11062, - "oppre": 17341, - "oppressed": 41492, - "oppression": 30650, - "opra": 28291, - "oprah": 22562, - "opry": 35340, - "ops": 3054, - "opt": 45103, - "opt": 27188, - "opted": 42035, - "opti": 6580, - "optic": 25190, - "optic": 24755, - "optical": 16822, - "optics": 27165, - "optim": 22331, - "optimal": 25235, - "optimi": 9737, - "optimis": 39459, - "optimism": 25226, - "optimist": 44581, - "optimistic": 23104, - "optimization": 25125, - "optimize": 30456, - "optimized": 43939, - "optimizing": 49157, - "optimum": 35974, - "optimus": 43453, - "option": 8464, - "optional": 25411, - "options": 7063, - "optome": 35533, - "opul": 39858, - "opus": 33295, - "opy": 21835, - "or": 523, - "or": 541, - "ora": 4301, - "orac": 24673, - "oracle": 37308, - "oracle": 15966, - "orah": 40820, - "orail": 45120, - "oral": 32490, - "oral": 6007, - "orama": 33619, - "oran": 32209, - "oran": 28395, - "orang": 22116, - "orange": 13957, - "orange": 4287, - "oranges": 32417, - "orangu": 36112, - "orb": 28894, - "orb": 36958, - "orbit": 19713, - "orbital": 40312, - "orc": 44305, - "orca": 18631, - "orcas": 47676, - "orch": 11893, - "orchar": 40226, - "orchard": 19530, - "orche": 8004, - "orchestr": 42937, - "orchestra": 9573, - "orchestral": 40285, - "orchi": 23696, - "orchid": 18678, - "orchids": 28376, - "ord": 26903, - "ord": 11502, - "orda": 33462, - "ordained": 38302, - "order": 24613, - "order": 2191, - "ordered": 8335, - "ordering": 19588, - "orderly": 43457, - "orders": 6187, - "ordin": 4378, - "ordinance": 38583, - "ordinary": 8012, - "ore": 3580, - "ore": 1423, - "orean": 36696, - "ored": 5133, - "oregon": 21759, - "oregon": 8035, - "oren": 21645, - "oreo": 21873, - "oreos": 41688, - "ores": 17328, - "org": 3401, - "org": 5593, - "organ": 3338, - "organ": 13213, - "organi": 3636, - "organic": 24080, - "organic": 5980, - "organics": 44199, - "organis": 13204, - "organisation": 15868, - "organisations": 20651, - "organise": 36073, - "organised": 13191, - "organiser": 49141, - "organisers": 35778, - "organising": 22787, - "organisms": 37041, - "organiz": 11107, - "organization": 8064, - "organizational": 29510, - "organizations": 13453, - "organize": 19973, - "organized": 10681, - "organizer": 23905, - "organizers": 27191, - "organizing": 15779, - "organs": 29872, - "orgs": 29500, - "ori": 1540, - "ori": 8693, - "oria": 11474, - "orial": 8648, - "orian": 21193, - "oric": 43810, - "orice": 41341, - "orie": 18815, - "orient": 13149, - "orient": 30770, - "oriental": 23056, - "orientation": 16873, - "oriente": 40390, - "oriented": 24596, - "orienteering": 42985, - "ories": 5934, - "orig": 2273, - "orig": 38463, - "origami": 31832, - "origin": 2555, - "origin": 12372, - "original": 18496, - "original": 3117, - "originally": 12849, - "originals": 16953, - "originated": 41823, - "origins": 16291, - "orin": 39863, - "oring": 3006, - "orio": 24308, - "orioles": 21430, - "orion": 21765, - "oris": 37064, - "orities": 7903, - "ority": 5556, - "orium": 12015, - "ork": 22202, - "ork": 37235, - "orkney": 34254, - "orl": 39465, - "orlando": 32247, - "orlando": 7827, - "orleans": 11127, - "orm": 38464, - "orn": 25412, - "orn": 8130, - "ornam": 36122, - "ornament": 23409, - "ornamental": 46270, - "ornaments": 28968, - "ornate": 46865, - "orni": 27713, - "ornithology": 38275, - "orns": 19340, - "oro": 9848, - "oro": 14573, - "orous": 19286, - "orph": 17318, - "orphan": 22718, - "orphan": 28994, - "orphanage": 45196, - "orphaned": 46792, - "orphans": 36588, - "orphe": 39186, - "orr": 32977, - "ors": 1127, - "orship": 20846, - "ort": 1019, - "ortega": 39727, - "orth": 22584, - "orth": 24461, - "ortho": 11366, - "orthodon": 37730, - "orthodox": 19008, - "orthop": 42123, - "orthopedic": 49341, - "ortiz": 23544, - "orton": 37238, - "oru": 44629, - "oru": 31281, - "orum": 42724, - "orwell": 41218, - "ory": 16983, - "ory": 1985, - "os": 2211, - "os": 1299, - "osa": 16340, - "osa": 17237, - "osaka": 21347, - "osborne": 22402, - "osbourne": 43376, - "osc": 5092, - "oscar": 21157, - "oscar": 8191, - "oscars": 11098, - "osce": 37303, - "oscill": 38272, - "ose": 46942, - "ose": 22541, - "osh": 30717, - "osh": 35011, - "osha": 33907, - "oshi": 34770, - "osi": 25247, - "osi": 17636, - "osis": 13903, - "osity": 12730, - "oslo": 20547, - "osm": 31626, - "osman": 46539, - "oso": 42793, - "oso": 21285, - "osp": 24387, - "ospre": 49001, - "osprey": 37893, - "oss": 29362, - "oss": 34640, - "ost": 23701, - "ost": 18749, - "oste": 20632, - "osteo": 43163, - "oster": 31781, - "ostr": 33673, - "ostrich": 47640, - "osu": 29480, - "osu": 19818, - "oswald": 38471, - "ot": 1863, - "ot": 2062, - "ota": 17509, - "ota": 8741, - "otago": 45919, - "otaku": 40743, - "otas": 47616, - "otc": 37934, - "otd": 5683, - "ote": 28511, - "ote": 19744, - "otes": 27280, - "oth": 33262, - "oth": 33519, - "other": 9758, - "other": 1010, - "others": 3326, - "otherwise": 12376, - "oti": 19567, - "oti": 45564, - "otic": 9671, - "otis": 28246, - "otive": 10877, - "oto": 23946, - "oto": 23399, - "otp": 29822, - "otr": 38685, - "ots": 5769, - "ott": 10167, - "ott": 7936, - "otta": 7623, - "otta": 20941, - "ottawa": 49027, - "ottawa": 9019, - "otte": 35214, - "otter": 34710, - "otter": 22456, - "otters": 38883, - "otti": 36721, - "ottnews": 33995, - "otto": 17730, - "ottoman": 27503, - "otw": 35259, - "otwol": 46868, - "ou": 520, - "ou": 6544, - "ouat": 32954, - "ouch": 13493, - "oud": 1359, - "oue": 48838, - "ouf": 34618, - "ough": 4204, - "ough": 991, - "ought": 2253, - "oughton": 36860, - "oui": 39421, - "ouk": 21796, - "oul": 20253, - "oul": 8081, - "ould": 859, - "oulos": 32808, - "oun": 636, - "oun": 20960, - "ounce": 15027, - "ounces": 30299, - "ound": 2013, - "ound": 853, - "oundation": 40132, - "ounded": 9634, - "ounding": 11944, - "ounds": 2753, - "oung": 35875, - "oung": 25341, - "ounge": 29427, - "ount": 43801, - "ount": 4172, - "ounts": 10963, - "oup": 32815, - "our": 727, - "our": 581, - "oura": 29806, - "oura": 36352, - "ourable": 24126, - "ourage": 34525, - "oural": 45840, - "oured": 6956, - "ouri": 12696, - "ouring": 12000, - "ourism": 25496, - "ourke": 26480, - "ourlives": 37541, - "ouro": 41224, - "ours": 1491, - "ourse": 15415, - "ourselves": 10124, - "ourt": 22960, - "oury": 29484, - "ous": 1987, - "ous": 879, - "ouse": 32048, - "ouse": 7603, - "ouses": 33666, - "ously": 2501, - "ousness": 10689, - "ousy": 28302, - "out": 1130, - "out": 620, - "outa": 35187, - "outage": 27320, - "outages": 40353, - "outback": 28532, - "outbound": 41256, - "outbreak": 20103, - "outcome": 16552, - "outcomes": 14016, - "outdated": 38313, - "outdoor": 19184, - "outdoor": 6368, - "outdoors": 10469, - "oute": 44180, - "outed": 34435, - "outer": 30499, - "outer": 14188, - "outes": 39600, - "outfield": 41826, - "outfit": 6525, - "outfits": 16366, - "outfitters": 37725, - "outfy": 34920, - "outgoing": 27302, - "outh": 16933, - "outh": 8111, - "outine": 35452, - "outing": 11251, - "outlander": 45820, - "outlander": 17095, - "outlaw": 37498, - "outlaw": 27340, - "outlaws": 30935, - "outlet": 16855, - "outlets": 20822, - "outline": 26894, - "outlines": 29159, - "outlining": 45960, - "outlook": 12983, - "outof": 43958, - "outpatient": 46603, - "outpost": 44622, - "output": 17255, - "outra": 14262, - "outrage": 23577, - "outraged": 43402, - "outrageous": 29342, - "outre": 14373, - "outreach": 15297, - "outright": 38200, - "outs": 5790, - "outsi": 22515, - "outside": 47693, - "outside": 2782, - "outsider": 41196, - "outsiders": 41742, - "outskirts": 42088, - "outsourcing": 34543, - "outstanding": 6387, - "outta": 15807, - "outtuesday": 48692, - "outw": 34650, - "oux": 40960, - "oux": 14228, - "ov": 6420, - "ov": 8479, - "ova": 12762, - "oval": 15039, - "ovarian": 42913, - "ovation": 24333, - "ove": 8649, - "ove": 15456, - "oven": 44620, - "oven": 12579, - "over": 1658, - "over": 962, - "overall": 6914, - "overboard": 42982, - "overcame": 47235, - "overcast": 36942, - "overcome": 14365, - "overcoming": 29348, - "overdose": 27017, - "overdrive": 40088, - "overdue": 30240, - "overflow": 32885, - "overflowing": 45370, - "overhaul": 31531, - "overhead": 20321, - "overland": 38808, - "overlay": 44827, - "overload": 24327, - "overlook": 35767, - "overlooked": 27632, - "overlooking": 17319, - "overly": 28820, - "overnight": 9913, - "overpass": 44310, - "overrated": 38214, - "overs": 45774, - "overs": 17329, - "overseas": 15100, - "oversight": 32494, - "oversized": 31557, - "overtime": 19347, - "overturned": 31048, - "overview": 14789, - "overwatch": 18124, - "overweight": 43465, - "overwhel": 12204, - "overwhelmed": 23459, - "overwhelming": 20306, - "overwhelmingly": 43549, - "ovi": 32508, - "ovic": 22417, - "ovich": 27623, - "ovie": 47677, - "ovo": 41920, - "ovo": 18065, - "ovski": 26167, - "ow": 2032, - "ow": 2250, - "owa": 32770, - "owe": 19073, - "owed": 37641, - "owen": 24838, - "owen": 12056, - "owens": 20664, - "owes": 35069, - "owing": 48582, - "owl": 34332, - "owl": 9899, - "owls": 18247, - "own": 3845, - "own": 1758, - "owned": 8536, - "owner": 5019, - "owners": 7712, - "ownership": 16583, - "owning": 24661, - "owns": 17533, - "owo": 46142, - "ows": 27423, - "owski": 22573, - "ox": 3282, - "ox": 12071, - "oxfam": 45466, - "oxford": 28588, - "oxford": 8824, - "oxfordshire": 37855, - "oxi": 33731, - "oxi": 48147, - "oxid": 17701, - "oxide": 28235, - "oxo": 37088, - "oxy": 12432, - "oxygen": 16214, - "oy": 6638, - "oy": 12437, - "oya": 38894, - "oye": 48677, - "oyster": 40545, - "oyster": 17253, - "oysters": 22672, - "oz": 10584, - "oz": 6044, - "ozar": 31848, - "ozil": 41365, - "ozone": 37052, - "ozzy": 39549, - "p": 79, - "p": 335, - "pa": 765, - "pa": 2217, - "paa": 32812, - "pab": 9354, - "pablo": 42172, - "pablo": 14473, - "pac": 2332, - "pac": 7608, - "pace": 40600, - "pace": 9450, - "paced": 32611, - "pacers": 23976, - "paces": 43001, - "paci": 5699, - "pacific": 19723, - "pacific": 6654, - "pacing": 45202, - "pack": 2711, - "pack": 3420, - "package": 7053, - "packaged": 29656, - "packages": 14305, - "packaging": 11658, - "packard": 46421, - "packed": 5883, - "packer": 28209, - "packers": 14294, - "packet": 25022, - "packets": 40448, - "packing": 9829, - "packs": 11086, - "paco": 41364, - "pacqui": 28456, - "pacquiao": 30485, - "pact": 27182, - "pad": 3798, - "pad": 7601, - "padded": 42253, - "paddington": 33162, - "paddle": 38276, - "paddle": 20811, - "paddling": 40645, - "paddock": 29590, - "paddy": 33103, - "paddy": 19855, - "padi": 47037, - "padilla": 22380, - "padma": 44595, - "padma": 46457, - "padre": 38343, - "padres": 22829, - "pads": 17353, - "paedi": 41488, - "paella": 46924, - "paf": 47185, - "pafc": 49259, - "pag": 4151, - "pag": 30525, - "pagan": 27854, - "page": 14996, - "page": 2504, - "pageant": 22139, - "pages": 8082, - "pagoda": 44309, - "pah": 41054, - "pah": 26884, - "pai": 20624, - "pai": 21198, - "paid": 5057, - "paige": 33659, - "paige": 16022, - "paign": 31796, - "pain": 2141, - "pain": 4495, - "paine": 38069, - "painful": 16361, - "pains": 25639, - "paint": 7948, - "paint": 5185, - "paintball": 39730, - "painted": 6433, - "painter": 10888, - "painters": 35703, - "painting": 49164, - "painting": 3086, - "paintings": 9956, - "paints": 21672, - "pair": 19848, - "pair": 4038, - "paired": 12433, - "pairing": 16313, - "pairings": 41152, - "pairs": 9950, - "pais": 16878, - "paisley": 22954, - "pajam": 24110, - "pajama": 40244, - "pajamas": 37231, - "pak": 13186, - "pak": 9094, - "paki": 3438, - "pakistan": 10713, - "pakistan": 3994, - "pakistani": 14050, - "pakistanis": 45707, - "pakv": 38196, - "pal": 1850, - "pal": 3611, - "pala": 17895, - "palace": 6381, - "palaces": 45625, - "palad": 28371, - "palae": 43379, - "palais": 35673, - "palate": 34666, - "palawan": 48202, - "palazzo": 36006, - "pale": 4768, - "pale": 12518, - "paleo": 36741, - "paleo": 22198, - "paler": 38028, - "palermo": 40635, - "palestin": 9449, - "palestine": 11682, - "palestinian": 11764, - "palestinians": 21874, - "palette": 13901, - "pali": 48063, - "palin": 40153, - "palis": 44256, - "pality": 27296, - "pall": 35817, - "palla": 21208, - "palladium": 37888, - "pallet": 39057, - "palli": 28954, - "palliative": 46014, - "pally": 46073, - "palm": 19651, - "palm": 8612, - "palma": 29888, - "palmer": 40112, - "palmer": 13633, - "palms": 27059, - "palo": 31562, - "palom": 47698, - "palooza": 25861, - "pals": 11043, - "palsy": 46651, - "pam": 8228, - "pam": 18513, - "pamela": 26991, - "pamp": 37653, - "pamper": 44345, - "pamph": 41332, - "pan": 1072, - "pan": 7437, - "panam": 24606, - "panama": 15522, - "panas": 26207, - "panasonic": 29750, - "pancake": 18723, - "pancakes": 15308, - "panch": 27251, - "pancra": 42472, - "pancre": 27708, - "pancreatic": 49337, - "pancy": 41625, - "pand": 5631, - "panda": 12952, - "pandas": 35119, - "pande": 38419, - "pandey": 34895, - "pandit": 41191, - "pandor": 30250, - "pandora": 17727, - "pandoramusic": 42344, - "pane": 27470, - "panel": 3724, - "paneli": 19410, - "panelist": 39719, - "panelists": 24619, - "panels": 12735, - "panera": 48471, - "pang": 16756, - "pang": 23672, - "panhandle": 40919, - "pani": 36092, - "panic": 46671, - "panic": 14124, - "panini": 30410, - "pann": 42302, - "panna": 49065, - "pano": 36165, - "panor": 12962, - "panorama": 19763, - "panoramic": 22563, - "pans": 35204, - "pant": 22550, - "panther": 22825, - "panther": 13262, - "panthers": 10494, - "panties": 32515, - "panto": 28776, - "pantry": 25608, - "pants": 5003, - "panty": 44217, - "pany": 45567, - "panzer": 41159, - "pao": 33790, - "paola": 44689, - "paolo": 48488, - "paolo": 21133, - "pap": 1884, - "pap": 30756, - "papa": 12211, - "papar": 32782, - "paparazzi": 37842, - "papaya": 44098, - "paper": 8680, - "paper": 2802, - "paperback": 17928, - "papers": 8204, - "paperwork": 35785, - "papi": 35177, - "papp": 26361, - "paprika": 44793, - "papua": 32629, - "par": 699, - "par": 9163, - "para": 18355, - "para": 8976, - "parach": 23147, - "parachute": 30122, - "parad": 37143, - "parade": 5809, - "parades": 46479, - "paradi": 6658, - "paradig": 27786, - "paradigm": 33485, - "paradise": 45869, - "paradise": 7247, - "paradox": 33109, - "parag": 11866, - "paragon": 48099, - "paragra": 24903, - "paragraph": 28499, - "paragu": 38021, - "paraguay": 43579, - "paral": 15143, - "paralle": 13184, - "parallel": 18201, - "paralleled": 42520, - "parallels": 46101, - "paraly": 30255, - "paralym": 18727, - "paralympic": 30806, - "paralympics": 37162, - "paralysis": 45702, - "param": 12250, - "parame": 27106, - "paramedic": 34630, - "paramedics": 35991, - "parameters": 44890, - "paramore": 34401, - "paramount": 26642, - "parano": 30283, - "paranoid": 43029, - "paranor": 16940, - "paranormal": 19047, - "parap": 41091, - "paras": 15198, - "parasite": 42460, - "parasites": 46175, - "parc": 30914, - "parcel": 30367, - "parcels": 45589, - "pard": 18773, - "pardon": 47606, - "pardon": 26565, - "pare": 18202, - "pared": 5498, - "paren": 3106, - "parent": 47848, - "parent": 10183, - "parental": 28339, - "parenthood": 23887, - "parenting": 14529, - "parents": 3731, - "pares": 12420, - "parfait": 46140, - "pari": 17961, - "pari": 27979, - "paris": 13982, - "paris": 3445, - "parisagreement": 47405, - "parish": 47328, - "parish": 13020, - "parisi": 45081, - "parisian": 38512, - "parity": 42734, - "park": 4985, - "park": 1452, - "parked": 16487, - "parker": 31119, - "parker": 8365, - "parkin": 34868, - "parking": 5984, - "parkinson": 28129, - "parkland": 31287, - "parkrun": 25747, - "parks": 6873, - "parkway": 19882, - "parl": 30373, - "parl": 29897, - "parliam": 5941, - "parliament": 41599, - "parliament": 7151, - "parliamentary": 17912, - "parlor": 38253, - "parlour": 37829, - "parma": 36077, - "parme": 26295, - "parmesan": 27274, - "paro": 17429, - "parody": 24318, - "parole": 32158, - "parr": 44113, - "parrish": 43043, - "parrot": 23565, - "parry": 40604, - "parsley": 30077, - "parsons": 22505, - "part": 1872, - "part": 1551, - "parte": 48508, - "parth": 34790, - "parti": 10509, - "partial": 18957, - "partially": 21269, - "partic": 2871, - "partici": 9540, - "particip": 4400, - "participant": 27674, - "participants": 10237, - "participate": 9433, - "participated": 14252, - "participates": 46414, - "participating": 11535, - "participation": 13529, - "particle": 27716, - "particles": 27012, - "particul": 11408, - "particular": 14098, - "particularly": 12170, - "parties": 9032, - "parting": 32844, - "partisan": 20772, - "partist": 44713, - "partition": 42219, - "partly": 21459, - "partner": 5210, - "partner": 4568, - "partnered": 21402, - "partnering": 21182, - "partners": 5568, - "partnership": 6123, - "partnerships": 17418, - "parton": 43245, - "partridge": 34872, - "parts": 5149, - "party": 12877, - "party": 1580, - "partying": 25702, - "pas": 1341, - "pas": 9525, - "pasadena": 25892, - "pascal": 28626, - "pasco": 49220, - "pascu": 42692, - "pash": 23936, - "pasha": 46986, - "paso": 18542, - "pasqu": 44941, - "pass": 5016, - "pass": 3511, - "passage": 16477, - "passages": 48937, - "passed": 4957, - "passenger": 12311, - "passengers": 12781, - "passer": 48544, - "passes": 7633, - "passi": 32471, - "passing": 6589, - "passion": 8822, - "passion": 5332, - "passionate": 10947, - "passionately": 44028, - "passions": 38441, - "passive": 23171, - "passover": 38426, - "passport": 14739, - "passports": 46368, - "password": 20258, - "passwords": 43095, - "past": 7315, - "past": 2729, - "pasta": 10441, - "paste": 34765, - "paste": 17038, - "pastel": 19457, - "pastels": 45699, - "pastor": 19792, - "pastor": 9664, - "pastoral": 37191, - "pastors": 30959, - "pastr": 45478, - "pastries": 39409, - "pastry": 18582, - "pasture": 34764, - "pastures": 47793, - "pat": 1300, - "pat": 7036, - "patag": 29862, - "patagonia": 32786, - "patch": 29284, - "patch": 8721, - "patches": 22104, - "patchwork": 44675, - "patchy": 47488, - "pate": 42122, - "pate": 42098, - "patel": 14168, - "patent": 14692, - "patented": 37277, - "patents": 33911, - "paterson": 36560, - "path": 7408, - "path": 5035, - "pathetic": 18222, - "pathfinder": 35415, - "pathi": 34976, - "pathi": 27347, - "pathic": 49025, - "patho": 18534, - "pathology": 23290, - "paths": 16333, - "pathway": 23488, - "pathways": 24690, - "pathy": 13330, - "pati": 2799, - "pati": 26708, - "patience": 13575, - "patient": 30139, - "patient": 6262, - "patiently": 22980, - "patients": 5543, - "patil": 49187, - "patio": 14304, - "pational": 30627, - "patna": 45025, - "patory": 41859, - "patreon": 17165, - "patri": 4771, - "patriarch": 49054, - "patriarchy": 48806, - "patric": 12569, - "patrice": 40731, - "patricia": 18143, - "patrick": 12078, - "patrick": 5286, - "patricks": 46783, - "patriot": 28896, - "patriot": 15692, - "patrioti": 35520, - "patriotic": 20217, - "patriotism": 35807, - "patriots": 8707, - "patro": 31650, - "patrol": 10073, - "patrolling": 39344, - "patrols": 35978, - "patron": 26658, - "patron": 17683, - "patrons": 28308, - "pats": 24874, - "patsy": 46093, - "patt": 12637, - "patter": 4982, - "pattern": 7447, - "patterned": 47212, - "patterns": 11637, - "patterson": 21384, - "patti": 44927, - "patti": 26123, - "pattinson": 32474, - "patton": 29026, - "patty": 48741, - "patty": 18321, - "pau": 1834, - "pau": 35970, - "paul": 6035, - "paul": 2597, - "paula": 37363, - "paula": 16777, - "pauline": 30438, - "paulo": 48002, - "paulo": 21628, - "pauls": 41413, - "pauls": 40010, - "paulson": 48201, - "pause": 19439, - "paused": 46782, - "pav": 6661, - "pave": 37107, - "paved": 27898, - "pavel": 43152, - "pavement": 27669, - "pavilion": 13374, - "paving": 28651, - "paw": 14009, - "paw": 16016, - "pawan": 29754, - "pawankalyan": 33702, - "pawn": 43195, - "paws": 16714, - "pax": 20007, - "pax": 19033, - "paxton": 38347, - "pay": 2642, - "pay": 3345, - "payback": 36413, - "paycheck": 45078, - "payday": 26957, - "payee": 46985, - "payer": 41503, - "paying": 8341, - "payment": 10596, - "payments": 11832, - "payne": 12775, - "paypal": 21442, - "payroll": 31610, - "pays": 10845, - "paysoff": 48174, - "paytm": 45352, - "payton": 27348, - "paz": 22267, - "pb": 20112, - "pb": 10981, - "pba": 28205, - "pbb": 48567, - "pbb": 40589, - "pbc": 49191, - "pbl": 35166, - "pbr": 32998, - "pbs": 17908, - "pc": 6782, - "pc": 3808, - "pca": 35705, - "pcb": 26235, - "pcc": 36059, - "pci": 38957, - "pcm": 47436, - "pcr": 35704, - "pcs": 11917, - "pcso": 31963, - "pct": 22168, - "pd": 4387, - "pd": 4675, - "pdates": 16842, - "pdc": 40498, - "pdf": 15181, - "pdp": 24601, - "pdt": 21743, - "pdx": 25470, - "pdx": 16153, - "pe": 661, - "pe": 956, - "pea": 13915, - "peabo": 34083, - "peabody": 41244, - "peac": 34615, - "peace": 6249, - "peace": 3021, - "peaceful": 9461, - "peacefully": 30530, - "peacekeeping": 43630, - "peach": 10522, - "peach": 11538, - "peaches": 27216, - "peak": 18572, - "peak": 6026, - "peakdistrict": 41289, - "peake": 24810, - "peaked": 36391, - "peaks": 14067, - "pean": 11563, - "peanu": 25843, - "peanut": 12491, - "peanuts": 26503, - "pear": 4910, - "pear": 18820, - "pearce": 25996, - "pearl": 21806, - "pearl": 8560, - "pearljam": 46739, - "pearls": 19581, - "pears": 39565, - "pearson": 20461, - "peas": 15937, - "peasant": 40621, - "peasants": 48788, - "peat": 26914, - "pebble": 28056, - "pebbles": 40155, - "pec": 32447, - "pec": 17611, - "pecan": 32177, - "peck": 25186, - "peck": 29234, - "pecker": 30169, - "peckham": 45863, - "pecu": 34200, - "peculiar": 42808, - "ped": 13197, - "ped": 2966, - "pedago": 34590, - "pedagogy": 48072, - "pedal": 32943, - "pedal": 19621, - "pedals": 38535, - "pede": 12862, - "pede": 19560, - "pedestri": 30027, - "pedestrian": 18256, - "pedestrians": 33895, - "pedi": 12967, - "pedia": 11733, - "pediatric": 48431, - "pediatric": 22071, - "pedic": 35319, - "pedic": 44528, - "pedro": 29963, - "pedro": 15114, - "peds": 45377, - "pee": 12988, - "pee": 11196, - "peed": 47369, - "peek": 46323, - "peek": 7569, - "peeking": 48771, - "peel": 34386, - "peel": 17158, - "peeled": 33533, - "peeling": 48649, - "peep": 25425, - "peep": 16857, - "peeps": 11681, - "peer": 32416, - "peer": 14432, - "peers": 21626, - "pees": 31830, - "peg": 32182, - "peg": 11207, - "pegas": 30018, - "pegasus": 37822, - "peggy": 24271, - "pei": 48166, - "pei": 12917, - "pel": 4286, - "pel": 7006, - "pele": 44105, - "pelican": 34131, - "pelicans": 29363, - "pell": 46981, - "pelle": 31267, - "pelled": 32506, - "pellegr": 38529, - "pellets": 48240, - "pelo": 40192, - "pelo": 40238, - "pelosi": 22169, - "pelvic": 45646, - "pemb": 19880, - "pembro": 24084, - "pembroke": 36702, - "pembroke": 40044, - "pembrokeshire": 40695, - "pen": 1501, - "pen": 5356, - "pena": 35788, - "penalties": 25417, - "penalty": 11491, - "penang": 29545, - "penc": 20065, - "pence": 18002, - "pencil": 41303, - "pencil": 11200, - "pencils": 21909, - "pend": 3052, - "pendant": 12415, - "pendants": 44117, - "pending": 12770, - "pendleton": 44272, - "pendu": 45336, - "penelope": 36703, - "penetr": 26058, - "peng": 42955, - "peng": 39200, - "pengu": 8854, - "penguin": 28249, - "penguin": 14952, - "penguins": 16557, - "peninsu": 13464, - "peninsula": 14070, - "penn": 7760, - "penn": 11128, - "pennant": 43971, - "penned": 45077, - "penney": 47856, - "pennies": 43094, - "pennsylvania": 13673, - "penny": 20400, - "penny": 11388, - "pens": 13307, - "pens": 13310, - "pensac": 30925, - "pensacola": 33573, - "pension": 32840, - "pension": 17764, - "pensions": 29773, - "penske": 47154, - "pent": 10699, - "pent": 22725, - "pentagon": 23133, - "pente": 33165, - "penthouse": 32673, - "penultimate": 36553, - "peop": 1030, - "people": 10573, - "people": 1047, - "peoples": 28241, - "peoples": 14627, - "peopleschoice": 32418, - "peoplesvote": 45830, - "peoria": 36985, - "pep": 12761, - "pep": 14898, - "pepe": 24778, - "pepp": 34425, - "pepper": 14861, - "pepper": 8253, - "peppermint": 30321, - "pepperoni": 47307, - "peppers": 14650, - "pepsi": 21307, - "per": 703, - "per": 1284, - "pera": 26294, - "perce": 24135, - "perceived": 38436, - "percent": 16328, - "percent": 9017, - "percentage": 19477, - "percep": 28017, - "perception": 20591, - "perceptions": 38138, - "perch": 34281, - "perched": 40071, - "percu": 41722, - "percussion": 23980, - "percy": 23940, - "pere": 8665, - "pere": 36300, - "pered": 24509, - "peregr": 37479, - "peregrine": 44546, - "pereira": 43927, - "peren": 24564, - "perenni": 26996, - "perennial": 34038, - "perez": 15107, - "perf": 22816, - "perfe": 1624, - "perfec": 6599, - "perfect": 17261, - "perfect": 1878, - "perfection": 9646, - "perfectly": 8037, - "perfecto": 42898, - "perfor": 2311, - "perform": 3866, - "perform": 5940, - "performan": 8973, - "performance": 2714, - "performances": 9553, - "performed": 9997, - "performer": 17061, - "performers": 18476, - "performing": 5170, - "performs": 13839, - "perfu": 14214, - "perfume": 17525, - "perhaps": 9297, - "peri": 12618, - "peri": 44068, - "perience": 19302, - "peril": 40119, - "peril": 48301, - "perimeter": 38499, - "pering": 29746, - "perio": 5101, - "period": 6131, - "periodic": 36476, - "periods": 24401, - "periph": 35308, - "peripheral": 43901, - "peris": 19461, - "periscope": 21668, - "perk": 33424, - "perkins": 20057, - "perks": 17660, - "perl": 44018, - "perm": 47847, - "perman": 9018, - "permanent": 11144, - "permanently": 25584, - "perme": 42456, - "permission": 15822, - "permit": 21950, - "permits": 33267, - "permitted": 44380, - "pero": 23551, - "perpe": 15749, - "perpetr": 33376, - "perpetu": 30132, - "perpetual": 32018, - "perrie": 32691, - "perry": 28478, - "perry": 7899, - "pers": 3688, - "pers": 10710, - "perse": 27498, - "persecu": 22878, - "persecution": 32009, - "perseverance": 29820, - "persi": 11509, - "persian": 19859, - "persist": 19412, - "persist": 40938, - "persistence": 34588, - "persistent": 29028, - "person": 3510, - "person": 2533, - "persona": 18401, - "personal": 10114, - "personal": 4121, - "personalised": 24186, - "personalities": 27888, - "personality": 10386, - "personalized": 17845, - "personally": 13885, - "personnel": 14546, - "persons": 14592, - "perspec": 17997, - "perspective": 8996, - "perspectives": 18777, - "persu": 20972, - "pert": 36970, - "pert": 16306, - "perth": 19067, - "perth": 11011, - "peru": 20612, - "peru": 12964, - "peruvian": 30822, - "pes": 38368, - "pes": 2598, - "pesa": 47409, - "pesc": 44044, - "pesh": 33184, - "peshaw": 28524, - "peshawar": 29230, - "pesky": 42512, - "pesos": 47872, - "pessi": 43902, - "pest": 20130, - "pest": 9425, - "pesticide": 48481, - "pesticides": 37868, - "pesto": 26186, - "pests": 41919, - "pet": 2167, - "pet": 3703, - "peta": 28785, - "petal": 38430, - "petal": 40469, - "petals": 26064, - "petday": 45314, - "pete": 14479, - "pete": 8571, - "peter": 5093, - "peter": 3696, - "peterborough": 26012, - "peters": 16336, - "petersburg": 21052, - "petersen": 39794, - "peterson": 16877, - "peth": 48920, - "petit": 36437, - "petit": 21276, - "petite": 27213, - "petition": 10975, - "petitions": 43536, - "petr": 29808, - "petra": 31300, - "petre": 47179, - "petri": 31831, - "petro": 8716, - "petrol": 18149, - "petroleum": 22063, - "petron": 42875, - "pets": 7663, - "pett": 27051, - "petti": 48001, - "petting": 44334, - "petty": 17324, - "peu": 21411, - "peuge": 22893, - "peugeot": 24129, - "pew": 21608, - "pew": 30783, - "pewdie": 41882, - "pewdiepie": 42563, - "pex": 43765, - "pey": 14966, - "pey": 30933, - "peyton": 49254, - "peyton": 20307, - "pez": 45798, - "pez": 10482, - "pf": 16680, - "pf": 12572, - "pfa": 47839, - "pfc": 35007, - "pff": 44121, - "pfi": 29810, - "pfw": 31229, - "pg": 12476, - "pg": 5211, - "pga": 13351, - "pgat": 36514, - "pgatour": 40094, - "pgh": 44862, - "pgh": 30031, - "pgs": 49204, - "ph": 745, - "ph": 2042, - "pha": 4443, - "pha": 26255, - "phal": 19962, - "phan": 8731, - "phan": 40126, - "phant": 36998, - "phantom": 37688, - "phantom": 14490, - "phar": 5570, - "phara": 35792, - "pharaoh": 40437, - "pharm": 45761, - "pharma": 17831, - "pharmac": 8193, - "pharmaceu": 19490, - "pharmaceutical": 25217, - "pharmaceuticals": 44623, - "pharmacist": 41024, - "pharmacists": 44337, - "pharmacy": 15293, - "pharo": 42308, - "pharoah": 49287, - "pharrell": 31316, - "phase": 8304, - "phases": 35337, - "phat": 42492, - "phc": 41102, - "phd": 20875, - "phd": 8472, - "phdchat": 39564, - "phdlife": 39638, - "phe": 4787, - "phe": 19853, - "pheasant": 41983, - "phee": 41292, - "phel": 23711, - "phelps": 27128, - "phen": 7718, - "pheno": 47336, - "phenom": 31673, - "phenom": 39618, - "phenomen": 11304, - "phenomena": 41538, - "phenomenal": 15035, - "phenomenon": 24464, - "pher": 9194, - "pher": 19828, - "phers": 29531, - "pherson": 36421, - "phew": 10295, - "phi": 2239, - "phi": 12220, - "phia": 9228, - "phic": 3977, - "phie": 30237, - "phies": 17062, - "phil": 2821, - "phil": 6199, - "phila": 47443, - "philadel": 9428, - "philadelphia": 9749, - "philanthro": 16587, - "philanthropist": 44153, - "philanthropy": 25047, - "philately": 33695, - "phile": 36543, - "philharmon": 25228, - "philharmonic": 31699, - "phili": 4277, - "philia": 46654, - "philip": 20748, - "philip": 11074, - "philipp": 5623, - "philipp": 47591, - "philippe": 20942, - "philippine": 17629, - "philippines": 8149, - "philips": 25175, - "phill": 42346, - "phill": 48272, - "philli": 6456, - "phillies": 18748, - "phillip": 48832, - "phillip": 19323, - "phillips": 11041, - "philly": 19545, - "philly": 7785, - "philos": 8395, - "philosop": 20349, - "philosoph": 10187, - "philosopher": 25220, - "philosophical": 32628, - "philosophy": 12213, - "phils": 38573, - "phin": 33816, - "phine": 40985, - "phins": 40210, - "phish": 36897, - "phishing": 36546, - "phl": 25603, - "pho": 816, - "pho": 22707, - "phobia": 28749, - "phoe": 22673, - "phoebe": 27582, - "phoeni": 6778, - "phoenix": 20615, - "phoenix": 7793, - "phol": 48140, - "phon": 19602, - "phon": 31115, - "phone": 15486, - "phone": 1951, - "phones": 6351, - "phony": 31925, - "phora": 31363, - "phosp": 22638, - "photo": 1153, - "photo": 1125, - "photobomb": 37075, - "photobook": 41894, - "photog": 28115, - "photogenic": 36108, - "photogra": 36754, - "photograph": 1688, - "photograph": 8853, - "photographed": 11573, - "photographer": 5748, - "photographers": 17141, - "photographic": 22053, - "photographing": 30074, - "photographs": 15759, - "photography": 33183, - "photography": 2108, - "photom": 32223, - "photoo": 11106, - "photooftheday": 11933, - "photos": 2479, - "photoshoot": 11121, - "photoshop": 12419, - "photoshopped": 35738, - "phouse": 27848, - "php": 17370, - "phra": 12777, - "phrase": 18809, - "phrases": 35264, - "phs": 16495, - "phu": 21274, - "phuket": 34028, - "phx": 35466, - "phx": 29507, - "phy": 6484, - "phy": 4292, - "phyl": 35600, - "phyllis": 37844, - "phys": 3734, - "phys": 37894, - "physi": 13782, - "physic": 46641, - "physical": 44127, - "physical": 6671, - "physically": 18105, - "physician": 21055, - "physicians": 26702, - "physicist": 29052, - "physics": 9369, - "physio": 29574, - "physio": 29177, - "physiology": 32349, - "physique": 42884, - "phyto": 42197, - "pi": 741, - "pi": 5357, - "pia": 8918, - "pian": 24637, - "pianist": 21048, - "piano": 49278, - "piano": 7894, - "pianos": 47904, - "piazza": 28496, - "pic": 901, - "pic": 1282, - "pical": 5482, - "picard": 48507, - "picasso": 21481, - "piccad": 33876, - "piccadilly": 37287, - "piccollage": 43621, - "pick": 6379, - "pick": 3142, - "picked": 6018, - "picker": 43105, - "pickering": 47605, - "picket": 33559, - "picking": 9545, - "pickle": 24570, - "pickled": 21705, - "pickles": 25001, - "picks": 8551, - "pickup": 15382, - "pickups": 33383, - "picnic": 12007, - "pico": 23363, - "picoftheday": 18319, - "pics": 2559, - "pict": 18778, - "pictorial": 40640, - "picture": 11663, - "picture": 1674, - "pictured": 7647, - "pictures": 3646, - "picturesque": 24894, - "pid": 5225, - "piday": 48056, - "pie": 12065, - "pie": 5319, - "piece": 39632, - "piece": 2754, - "pieces": 6194, - "pied": 24686, - "pied": 12713, - "piedmont": 39691, - "pier": 5641, - "pier": 11348, - "pierc": 49216, - "pierce": 48462, - "pierce": 16782, - "pierced": 32799, - "piercing": 22557, - "piero": 43125, - "pierre": 34670, - "pierre": 11985, - "piers": 29030, - "pies": 6898, - "pieter": 44801, - "pietro": 42169, - "piff": 40719, - "pig": 12009, - "pig": 9619, - "pigeon": 18008, - "pigeons": 32910, - "piggy": 28245, - "pigment": 40284, - "pigs": 16228, - "pik": 48539, - "pika": 47372, - "pikach": 27268, - "pikachu": 28107, - "pike": 33457, - "pike": 14011, - "pil": 2893, - "pil": 20645, - "pilates": 29518, - "pile": 44403, - "pile": 13930, - "piled": 26873, - "piles": 31968, - "pilgri": 13966, - "pilgrim": 32662, - "pilgrimage": 24335, - "pilgrims": 31370, - "piling": 43050, - "pilip": 27234, - "pilipinas": 32392, - "pill": 14830, - "pill": 19226, - "pillar": 17322, - "pillars": 22054, - "pillow": 42237, - "pillow": 12182, - "pillows": 26499, - "pills": 23964, - "pilo": 37526, - "pilot": 31619, - "pilot": 6687, - "pilots": 15586, - "pilsner": 47153, - "pim": 15285, - "pim": 35472, - "pimp": 35789, - "pin": 2629, - "pin": 5164, - "pinball": 31679, - "pinch": 26114, - "pine": 9398, - "pine": 7374, - "pineapple": 14831, - "pines": 20338, - "ping": 23720, - "ping": 2089, - "pinion": 40557, - "pink": 11151, - "pink": 3360, - "pinkfloyd": 48520, - "pinky": 29803, - "pinn": 31448, - "pinnacle": 32754, - "pinned": 12165, - "pinning": 44515, - "pino": 36633, - "pinot": 41399, - "pinot": 21146, - "pinoy": 43578, - "pinoy": 35258, - "pins": 14619, - "pinst": 41173, - "pint": 42537, - "pint": 13584, - "pinterest": 15379, - "pinto": 35992, - "pints": 27935, - "pinup": 37349, - "pio": 22108, - "pion": 36728, - "pion": 29190, - "pione": 7975, - "pioneer": 34892, - "pioneer": 12459, - "pioneering": 25933, - "pioneers": 22383, - "pious": 42441, - "pip": 30854, - "pipe": 29333, - "pipe": 10459, - "pipel": 12387, - "pipeline": 14151, - "pipelines": 39683, - "piper": 47052, - "piper": 16293, - "pipes": 16991, - "piping": 40744, - "pippa": 47672, - "pir": 4351, - "pir": 38899, - "piracy": 39452, - "piran": 49034, - "pirate": 38680, - "pirate": 13592, - "pirates": 10442, - "pire": 16613, - "pires": 14988, - "pis": 9230, - "pis": 44441, - "pisa": 43632, - "pisces": 45982, - "piss": 20818, - "pissed": 17989, - "pist": 15556, - "pist": 32826, - "pistachi": 29760, - "pistachio": 36320, - "pistol": 20480, - "piston": 48236, - "pistons": 27242, - "pistor": 48162, - "pit": 2946, - "pit": 7476, - "pita": 27070, - "pitbull": 25295, - "pitch": 8992, - "pitch": 5872, - "pitched": 28447, - "pitcher": 13445, - "pitchers": 27835, - "pitches": 21005, - "pitching": 16455, - "piti": 47568, - "pits": 24144, - "pitt": 7607, - "pitt": 15599, - "pitts": 9531, - "pittsburgh": 10453, - "pity": 24380, - "pius": 39988, - "pivo": 18009, - "pivot": 31805, - "pivotal": 31432, - "pix": 6185, - "pix": 13088, - "pixar": 27493, - "pixel": 14384, - "pixel": 13241, - "pixelart": 18516, - "pixels": 34099, - "pixie": 35573, - "piyu": 30772, - "piyush": 36191, - "piyushgoyal": 45318, - "pizz": 3897, - "pizza": 4474, - "pizzas": 30647, - "pizzeria": 44174, - "pj": 12524, - "pj": 17179, - "pjnet": 22011, - "pjs": 36009, - "pk": 10149, - "pk": 10991, - "pkg": 49011, - "pkk": 47480, - "pknot": 41779, - "pkwy": 36827, - "pl": 712, - "pl": 5678, - "pla": 841, - "pla": 19945, - "plac": 2331, - "place": 14884, - "place": 1445, - "placed": 9729, - "placement": 16724, - "placements": 43885, - "placer": 49170, - "places": 4448, - "placing": 18531, - "plague": 25360, - "plaid": 23291, - "plain": 22776, - "plain": 10709, - "plains": 16345, - "plan": 1740, - "plan": 2970, - "pland": 24801, - "plane": 22728, - "plane": 5363, - "planes": 12581, - "planet": 16833, - "planet": 5172, - "planetary": 28361, - "planets": 22315, - "plank": 30991, - "plankton": 48249, - "plann": 6409, - "planned": 8169, - "planner": 18083, - "planners": 33664, - "planning": 4446, - "plano": 34063, - "plans": 4181, - "plant": 8521, - "plant": 3912, - "plantation": 20014, - "plantbased": 33720, - "planted": 14286, - "planter": 34453, - "planters": 43661, - "planting": 13922, - "plants": 5829, - "plaque": 16097, - "plaques": 45610, - "plar": 26754, - "plas": 45673, - "plasma": 24999, - "plaster": 31980, - "plastic": 15645, - "plastic": 6102, - "plasticpollution": 47129, - "plastics": 20999, - "plasticsurgery": 48555, - "plat": 3172, - "plata": 46456, - "plate": 28744, - "plate": 5135, - "plateau": 29301, - "plated": 21161, - "plates": 11485, - "platform": 5549, - "platforms": 13551, - "platin": 10267, - "plating": 44564, - "platinum": 10979, - "plato": 41101, - "platoon": 41254, - "platt": 44459, - "platt": 40097, - "platte": 46785, - "platter": 29071, - "platz": 40878, - "plau": 39139, - "play": 1222, - "play": 1453, - "playa": 23756, - "playable": 33885, - "playback": 39194, - "playbook": 34856, - "playboy": 24383, - "played": 3432, - "player": 24503, - "player": 2477, - "players": 3030, - "playful": 23871, - "playground": 15861, - "playhouse": 23254, - "playin": 24674, - "playing": 47368, - "playing": 1629, - "playlist": 9180, - "playlists": 47183, - "playo": 5804, - "playoff": 9655, - "playoffs": 9548, - "plays": 5134, - "playstation": 11332, - "playtime": 43037, - "playwright": 32070, - "plaza": 8943, - "plc": 16827, - "ple": 926, - "ple": 1619, - "plea": 21956, - "plead": 47539, - "pleads": 31425, - "plear": 21362, - "pleas": 8481, - "pleas": 48740, - "pleasant": 12271, - "please": 41074, - "please": 1474, - "pleased": 6107, - "pleasing": 32893, - "pleasure": 5854, - "pleasures": 29513, - "pledge": 11507, - "pledged": 36799, - "pledges": 26746, - "pledis": 41202, - "plein": 43429, - "plenary": 19891, - "plenty": 7524, - "pler": 17677, - "ples": 6248, - "pless": 39821, - "pless": 17059, - "plets": 43230, - "plex": 23765, - "plex": 15241, - "pley": 19543, - "pli": 30001, - "pli": 45797, - "plic": 5806, - "plicity": 19823, - "plight": 40317, - "plin": 44531, - "plin": 32335, - "pline": 25376, - "pling": 12899, - "plings": 31184, - "pll": 47629, - "pll": 25266, - "pln": 48755, - "plo": 1778, - "plo": 43523, - "plor": 34695, - "plot": 9918, - "plots": 25672, - "plotting": 30751, - "plough": 33811, - "plow": 38363, - "pls": 5572, - "plu": 2052, - "plug": 12628, - "plugged": 23261, - "plugin": 31278, - "plugins": 48797, - "plugs": 28083, - "plum": 26267, - "plum": 16202, - "plumb": 21769, - "plumber": 43478, - "plumbing": 24647, - "plume": 39495, - "plun": 15122, - "plunge": 26506, - "plur": 44664, - "plus": 3097, - "plush": 18926, - "pluto": 26380, - "ply": 17249, - "ply": 28705, - "plying": 36071, - "plym": 11907, - "plymouth": 13786, - "plz": 10538, - "pm": 13699, - "pm": 990, - "pmi": 41206, - "pmln": 23208, - "pmo": 18782, - "pmoindia": 20374, - "pms": 44223, - "pn": 14431, - "pn": 13774, - "pnc": 37148, - "pne": 30966, - "pneu": 28714, - "pneumonia": 42906, - "png": 20992, - "pnp": 25972, - "pnpp": 42175, - "pnw": 31521, - "po": 628, - "po": 3057, - "poa": 43912, - "poached": 27665, - "poaching": 35140, - "poc": 13232, - "poc": 27780, - "pocaly": 37987, - "pocalypse": 42307, - "poche": 38336, - "poche": 39022, - "pocket": 29147, - "pocket": 8504, - "pockets": 19566, - "pocon": 41850, - "pod": 3583, - "pod": 7446, - "podcast": 39654, - "podcast": 4294, - "podcasting": 40106, - "podcasts": 19392, - "pode": 33368, - "poder": 24960, - "podernfamily": 26620, - "podi": 32853, - "podium": 14093, - "pods": 18776, - "poe": 4746, - "poe": 19254, - "poem": 9436, - "poems": 15577, - "poet": 41019, - "poet": 9872, - "poetic": 26365, - "poetry": 20192, - "poetry": 6038, - "poetryday": 39255, - "poets": 19804, - "pof": 40850, - "poff": 28236, - "pogba": 25998, - "poign": 29682, - "poignant": 32138, - "poin": 9074, - "point": 13280, - "point": 2301, - "pointe": 24631, - "pointed": 20703, - "pointer": 29883, - "pointers": 36760, - "pointing": 19233, - "pointless": 33586, - "points": 3396, - "pois": 17008, - "poise": 45087, - "poised": 27354, - "poison": 30722, - "poison": 17074, - "poisoned": 43624, - "poisoning": 25750, - "poisonous": 37131, - "pok": 15387, - "poke": 6892, - "poke": 23186, - "pokemon": 16239, - "pokemon": 9528, - "pokemongo": 23985, - "poker": 30735, - "poker": 11865, - "pokes": 40221, - "poking": 49169, - "poké": 20656, - "pokémon": 22066, - "pol": 977, - "pol": 7649, - "pola": 43876, - "poland": 9834, - "polar": 21432, - "polar": 12214, - "polari": 27919, - "polaris": 37965, - "polarized": 48437, - "polaro": 25237, - "polaroid": 30427, - "poldark": 41322, - "pole": 26682, - "pole": 8170, - "poles": 22585, - "poli": 9675, - "poli": 5414, - "polic": 16126, - "police": 15535, - "police": 2120, - "policeman": 37713, - "policemen": 47946, - "polici": 10819, - "policies": 10993, - "policing": 20969, - "policy": 30173, - "policy": 4660, - "polio": 30533, - "polis": 16133, - "polish": 46941, - "polish": 9632, - "polished": 21478, - "polishing": 43629, - "polit": 2247, - "politan": 15337, - "polite": 31497, - "politi": 40597, - "politic": 33333, - "political": 37744, - "political": 4197, - "politically": 24323, - "politician": 15960, - "politicians": 12914, - "politico": 39403, - "politics": 4929, - "polk": 33317, - "polka": 29476, - "poll": 7032, - "pollen": 27651, - "pollin": 19152, - "pollinators": 36599, - "polling": 18024, - "pollo": 42755, - "pollock": 37614, - "polls": 11813, - "pollu": 8370, - "polluted": 43346, - "pollution": 10384, - "polly": 31204, - "polo": 35928, - "polo": 10229, - "poly": 6833, - "poly": 18367, - "polye": 31730, - "polyester": 38514, - "polym": 23626, - "polymer": 29993, - "polyne": 38892, - "polyvore": 24771, - "pom": 7548, - "pom": 24280, - "pome": 27963, - "pomegran": 29326, - "pomegranate": 32415, - "pomer": 35156, - "pomona": 41690, - "pompe": 18352, - "pompeii": 47775, - "pompeo": 34351, - "pompey": 35079, - "pon": 3809, - "pon": 22391, - "ponce": 43637, - "pond": 10750, - "ponder": 36863, - "pondering": 47395, - "ponds": 31033, - "pone": 32183, - "pong": 40546, - "pong": 17710, - "ponies": 34157, - "pons": 41255, - "pont": 47563, - "pont": 22997, - "ponte": 40892, - "ponti": 15527, - "pontiac": 25373, - "pontifex": 33566, - "ponty": 45152, - "pony": 24438, - "pony": 12678, - "ponytail": 43265, - "poo": 6601, - "poo": 14389, - "pooch": 37037, - "poodle": 34961, - "pooh": 27103, - "pooja": 35676, - "pool": 12484, - "pool": 2831, - "poole": 26290, - "pools": 18736, - "poolside": 35509, - "poon": 33799, - "poon": 36178, - "poop": 23310, - "poor": 14528, - "poor": 3665, - "poorest": 40771, - "poorly": 21101, - "pop": 6530, - "pop": 2852, - "popart": 47425, - "popcorn": 15034, - "pope": 16994, - "pope": 9283, - "popefrancis": 37254, - "poplar": 38726, - "popo": 38835, - "popo": 35572, - "popp": 13156, - "popped": 14934, - "poppies": 30385, - "poppin": 28536, - "popping": 18152, - "poppins": 41216, - "poppy": 32194, - "poppy": 15447, - "pops": 11705, - "popsic": 38481, - "popu": 3785, - "popul": 6593, - "popular": 15854, - "popular": 4368, - "popularity": 19235, - "populated": 38420, - "population": 8423, - "populations": 23797, - "populism": 48998, - "populist": 49376, - "popup": 33053, - "por": 817, - "por": 7697, - "pora": 23537, - "porcel": 19409, - "porcelain": 20451, - "porch": 17154, - "pore": 28267, - "pork": 40379, - "pork": 7897, - "poro": 48110, - "porridge": 34924, - "porsch": 48009, - "porsche": 44049, - "porsche": 8783, - "port": 1641, - "port": 1418, - "porta": 45037, - "portable": 11949, - "portage": 32087, - "portal": 14982, - "porte": 28654, - "ported": 16879, - "porter": 28319, - "porter": 10318, - "porters": 15670, - "portfoli": 45766, - "portfolio": 11938, - "porth": 37425, - "porti": 45760, - "porting": 26052, - "portion": 13739, - "portions": 22914, - "portland": 38366, - "portland": 8880, - "portman": 34755, - "porto": 24853, - "porto": 18947, - "portobello": 48025, - "portra": 4175, - "portrait": 39312, - "portrait": 5352, - "portraits": 14203, - "portray": 46282, - "portrayal": 39238, - "portrayed": 36093, - "ports": 7734, - "portsm": 17063, - "portsmouth": 19074, - "portu": 7159, - "portugal": 9503, - "portugue": 17498, - "portuguese": 18019, - "pos": 1780, - "pos": 11839, - "pose": 25478, - "pose": 4230, - "posed": 5206, - "posei": 47270, - "poser": 46899, - "poses": 9773, - "posey": 34852, - "posh": 26748, - "posing": 10518, - "posit": 28793, - "positi": 7895, - "position": 4657, - "positioned": 34482, - "positioning": 30657, - "positions": 12188, - "positive": 21811, - "positive": 4844, - "positively": 24688, - "positivity": 19966, - "poss": 39745, - "posse": 17414, - "posse": 28413, - "possess": 36810, - "possessed": 36220, - "possession": 16154, - "possessions": 40588, - "possi": 2521, - "possibilities": 17932, - "possibility": 18517, - "possible": 3134, - "possibly": 8601, - "possum": 38575, - "post": 3489, - "post": 1549, - "postage": 27570, - "postal": 21687, - "postcard": 14785, - "postcards": 23922, - "postdoc": 41013, - "posted": 4752, - "poster": 22881, - "poster": 3574, - "posters": 9673, - "postgame": 34873, - "postgraduate": 31997, - "posthum": 42410, - "posting": 7559, - "postman": 38285, - "postpon": 23247, - "postponed": 25097, - "posts": 7824, - "postseason": 24521, - "posture": 29681, - "posure": 35539, - "pot": 3547, - "pot": 5168, - "potam": 45825, - "potassi": 36889, - "potassium": 37147, - "potat": 5975, - "potato": 8527, - "potatoes": 11567, - "potd": 28765, - "pote": 41869, - "poten": 4454, - "potent": 26082, - "potenti": 44104, - "potential": 5100, - "potentially": 16508, - "potholes": 47506, - "potion": 46055, - "potom": 38848, - "potomac": 43372, - "pots": 19234, - "pott": 28698, - "potted": 48581, - "potter": 24975, - "potter": 9026, - "pottery": 18396, - "potts": 39839, - "potty": 43569, - "potus": 8740, - "pou": 9423, - "pouch": 26811, - "poul": 22485, - "poultry": 31005, - "poun": 33719, - "pound": 33809, - "pound": 10674, - "pounding": 46544, - "pounds": 10752, - "pour": 33112, - "pour": 8180, - "poured": 26621, - "pouring": 16098, - "pours": 26005, - "pout": 39621, - "poutine": 43768, - "pov": 25731, - "pover": 8432, - "pover": 29464, - "poverty": 9095, - "pow": 1317, - "pow": 17745, - "powder": 32427, - "powder": 9674, - "powe": 36955, - "powell": 13305, - "power": 2789, - "power": 1807, - "powerball": 47803, - "powered": 45442, - "powered": 7332, - "powerful": 4875, - "powerhouse": 22858, - "powering": 16231, - "powerof": 31961, - "powerpoint": 38940, - "powerrangers": 40620, - "powers": 9422, - "pox": 43649, - "poy": 34737, - "poyn": 47655, - "poz": 39953, - "pp": 604, - "pp": 4186, - "ppa": 10416, - "ppard": 23391, - "ppc": 27778, - "ppe": 24573, - "ppe": 11867, - "pped": 1873, - "ppel": 46523, - "ppen": 30663, - "pper": 6719, - "pper": 2440, - "ppers": 5232, - "ppery": 27833, - "ppet": 20744, - "ppets": 25849, - "ppg": 27433, - "ppi": 9594, - "ppie": 33795, - "ppin": 8076, - "pping": 22214, - "pping": 1682, - "ppings": 35687, - "ppl": 6758, - "pple": 12302, - "ppm": 42053, - "ppo": 10215, - "ppor": 37613, - "ppp": 14017, - "pps": 10683, - "ppv": 38864, - "ppy": 30360, - "ppy": 3860, - "pr": 766, - "pr": 4150, - "pra": 1865, - "pra": 19285, - "prab": 17901, - "prabhas": 29959, - "prabhu": 31529, - "prac": 2243, - "practi": 29995, - "practic": 5495, - "practical": 10792, - "practically": 25588, - "practice": 3349, - "practiced": 36749, - "practices": 9040, - "practicing": 12750, - "practise": 38938, - "practising": 36478, - "practiti": 19909, - "practitioner": 32591, - "practitioners": 29045, - "prada": 29456, - "pradesh": 15384, - "prado": 44141, - "prag": 31025, - "prague": 14940, - "prairi": 12629, - "prairie": 14753, - "praise": 10013, - "praised": 27649, - "praises": 23049, - "praising": 36961, - "prakash": 43708, - "prakash": 25366, - "pram": 47774, - "pran": 20048, - "prank": 23654, - "pras": 41562, - "prasad": 29562, - "prat": 23069, - "prati": 45773, - "pratt": 37863, - "pratt": 23396, - "prawn": 33102, - "prawns": 34903, - "pray": 12671, - "pray": 6041, - "prayed": 34665, - "prayer": 41452, - "prayer": 6583, - "prayers": 8393, - "prayfor": 18443, - "praying": 11550, - "prays": 46602, - "prc": 28781, - "pre": 679, - "pre": 2900, - "preach": 22545, - "preacher": 29357, - "preaching": 23642, - "precau": 36532, - "precautions": 47845, - "prece": 15361, - "preci": 5470, - "precin": 27908, - "precinct": 32587, - "precious": 8226, - "precipit": 27463, - "precipitation": 33399, - "precise": 24457, - "precisely": 34954, - "precision": 44021, - "precision": 15621, - "pred": 40370, - "predat": 13364, - "predator": 20653, - "predators": 25569, - "prede": 38454, - "predecess": 38963, - "predic": 4876, - "predict": 16900, - "predictable": 25344, - "predicted": 18702, - "predicting": 30414, - "prediction": 16296, - "predictions": 15125, - "predictive": 29798, - "predicts": 25960, - "preds": 40125, - "pree": 47026, - "preet": 30131, - "prefe": 14542, - "prefecture": 32890, - "prefer": 33426, - "prefer": 11450, - "preference": 35057, - "preferences": 38118, - "preferred": 18772, - "prefers": 38528, - "pregame": 18575, - "pregn": 7190, - "pregnancy": 12769, - "pregnant": 11195, - "prehistoric": 32750, - "prejudice": 28337, - "preli": 15523, - "prelimin": 19990, - "preliminary": 20997, - "prelims": 43223, - "prelude": 42966, - "prem": 32090, - "prem": 21724, - "premature": 39253, - "premi": 2413, - "premier": 16996, - "premier": 5539, - "premiere": 5367, - "premiered": 27652, - "premieres": 19907, - "premiering": 32615, - "premierleague": 22608, - "premiers": 44883, - "premiership": 23665, - "premiosm": 38460, - "premiosmtvmiaw": 38630, - "premise": 45952, - "premises": 27266, - "premium": 8011, - "pren": 20801, - "preneur": 46288, - "preorder": 16703, - "preorders": 45985, - "prep": 6430, - "prep": 7277, - "prepa": 26270, - "prepaid": 42934, - "prepar": 4968, - "preparation": 11651, - "preparations": 19135, - "prepare": 7014, - "prepared": 7677, - "preparedness": 29492, - "prepares": 16375, - "preparing": 7365, - "prepped": 34379, - "prepping": 16459, - "preps": 14765, - "prequel": 40461, - "pres": 1385, - "pres": 8529, - "presale": 27135, - "presby": 30447, - "presbyter": 33959, - "presbyterian": 35370, - "preschool": 24354, - "prescott": 29392, - "prescri": 14851, - "prescribed": 36968, - "prescription": 23061, - "preseason": 13813, - "presen": 16742, - "presence": 8848, - "present": 2344, - "present": 2881, - "presentation": 4594, - "presentations": 16998, - "presented": 4587, - "presenter": 18587, - "presenters": 32759, - "presenting": 5339, - "presents": 4215, - "preserv": 17616, - "preservation": 21074, - "preserve": 15570, - "preserved": 23161, - "preserves": 44881, - "preserving": 32315, - "presi": 1697, - "presiden": 43374, - "presidency": 18077, - "president": 19900, - "president": 1940, - "presidente": 47363, - "presidenti": 48297, - "presidential": 8503, - "presidents": 16726, - "presiding": 45298, - "presley": 30013, - "press": 4124, - "press": 2124, - "pressed": 20080, - "presser": 27826, - "presses": 33748, - "pressing": 20893, - "pressure": 6083, - "pressures": 38487, - "prest": 41840, - "presti": 12245, - "prestige": 29328, - "prestigious": 15888, - "presto": 42211, - "preston": 37335, - "preston": 15179, - "presu": 21667, - "presumably": 42562, - "pret": 9652, - "preten": 15871, - "pretend": 18111, - "pretending": 21306, - "pretoria": 36080, - "prett": 46667, - "prettier": 31745, - "prettiest": 22866, - "pretty": 18286, - "pretty": 2111, - "pretz": 24890, - "pretzel": 36707, - "pretzels": 45468, - "prev": 20274, - "prevail": 31637, - "prevalence": 41729, - "prevalent": 46260, - "preven": 29382, - "prevent": 26436, - "prevent": 7968, - "preventable": 44250, - "prevented": 35356, - "preventing": 21756, - "prevention": 9500, - "preventive": 40949, - "prevents": 31746, - "preview": 4449, - "previews": 20279, - "previous": 9252, - "previously": 13359, - "prey": 17131, - "prez": 17956, - "pri": 955, - "pri": 23400, - "pric": 24275, - "price": 13254, - "price": 2827, - "priced": 16934, - "priceless": 15743, - "prices": 5954, - "pricing": 14800, - "prick": 43921, - "prick": 46516, - "pride": 15323, - "pride": 3436, - "pridemonth": 41410, - "prie": 22477, - "priest": 38756, - "priest": 14222, - "priests": 30005, - "prim": 22004, - "prima": 35611, - "prima": 33277, - "primal": 36604, - "primar": 21579, - "primaries": 46126, - "primarily": 29465, - "primark": 48329, - "primary": 35024, - "primary": 5814, - "primavera": 44899, - "prime": 14162, - "prime": 5183, - "primed": 45694, - "primer": 22388, - "primetime": 29763, - "primitive": 37467, - "primo": 43215, - "primrose": 45891, - "prin": 1588, - "prince": 9457, - "prince": 4735, - "princes": 45329, - "princes": 30136, - "princess": 24123, - "princess": 5079, - "princesses": 34161, - "princeton": 22433, - "princi": 5129, - "principal": 33599, - "principal": 8860, - "principals": 27524, - "principle": 19595, - "principles": 13755, - "print": 17851, - "print": 3557, - "printable": 29648, - "printed": 7978, - "printer": 14521, - "printers": 27881, - "printing": 7369, - "printmaking": 38669, - "prints": 7704, - "prior": 20328, - "prior": 10572, - "priorit": 47773, - "prioriti": 28822, - "priorities": 15232, - "prioritize": 46715, - "priority": 12451, - "priory": 38665, - "prisc": 32468, - "priscilla": 42396, - "prise": 23343, - "prism": 49311, - "prism": 34356, - "prison": 9281, - "prison": 6622, - "prisoner": 21427, - "prisoners": 17460, - "prisons": 26607, - "pristine": 30618, - "prit": 41668, - "prit": 37523, - "prith": 39173, - "prius": 43561, - "priv": 3270, - "privacy": 10437, - "private": 20362, - "private": 4439, - "privately": 32970, - "privati": 27379, - "privi": 8367, - "privileg": 18015, - "privilege": 11537, - "privileged": 18166, - "prix": 10875, - "priya": 31275, - "priyan": 16488, - "priyanka": 31959, - "priyankach": 30030, - "priyankachopra": 30264, - "prize": 48222, - "prize": 4521, - "prized": 38769, - "prizes": 9268, - "prk": 37094, - "pro": 644, - "pro": 2630, - "proactive": 33364, - "prob": 17706, - "prob": 24007, - "probab": 3907, - "probability": 32637, - "probable": 42444, - "probably": 4047, - "probation": 36531, - "probe": 14359, - "probes": 48564, - "probiotics": 49395, - "proble": 2719, - "problem": 4324, - "problematic": 33767, - "problems": 4671, - "probs": 16330, - "probz": 34243, - "proc": 38417, - "proce": 4076, - "procedu": 18204, - "procedural": 48177, - "procedure": 20163, - "procedures": 21109, - "proceed": 26664, - "proceed": 33894, - "proceedings": 26953, - "proceeds": 11882, - "process": 17291, - "process": 4078, - "processed": 23816, - "processes": 15169, - "processing": 11737, - "procession": 26288, - "processor": 22838, - "processors": 43634, - "proclaimed": 34489, - "proclamation": 32065, - "procra": 25361, - "procrastin": 25586, - "procrastination": 42825, - "procreate": 39336, - "proctor": 47204, - "procu": 21001, - "procurement": 23733, - "prod": 44349, - "prod": 11991, - "prodi": 27759, - "prodigy": 31973, - "produ": 27852, - "produc": 1471, - "produce": 7529, - "produced": 7479, - "producer": 7064, - "producers": 13883, - "produces": 19940, - "producing": 13579, - "product": 32602, - "product": 4306, - "production": 4146, - "productions": 14166, - "productive": 9697, - "productivity": 12800, - "products": 3964, - "prof": 15043, - "prof": 5488, - "profe": 2611, - "profess": 5486, - "professi": 3705, - "profession": 8104, - "profession": 19671, - "professional": 46007, - "professional": 4774, - "professionalism": 41252, - "professionally": 33892, - "professionals": 10165, - "professor": 47302, - "professor": 6092, - "professors": 27758, - "profici": 34685, - "profile": 14291, - "profile": 6444, - "profiles": 22070, - "profiling": 37123, - "profit": 16941, - "profit": 7909, - "profitable": 25465, - "profits": 13410, - "profound": 48245, - "profound": 22998, - "profs": 19260, - "prog": 22219, - "progno": 46070, - "program": 4162, - "program": 2737, - "programme": 6322, - "programmer": 37001, - "programmes": 20468, - "programming": 10831, - "programs": 7345, - "progre": 7069, - "progress": 4421, - "progressi": 23297, - "progressing": 32346, - "progression": 24772, - "progressive": 12208, - "progressives": 41709, - "prohi": 41124, - "prohib": 45040, - "prohibition": 34440, - "proj": 39156, - "proje": 48345, - "projec": 1610, - "project": 15911, - "project": 1965, - "projected": 22873, - "projection": 22384, - "projections": 34638, - "projector": 27816, - "projects": 5090, - "proli": 19710, - "prolife": 32126, - "prolifer": 39018, - "prolific": 27839, - "prolly": 45968, - "prolon": 35379, - "prolonged": 41972, - "prom": 40363, - "prom": 7944, - "prome": 34355, - "promen": 33578, - "promenade": 35522, - "promethe": 44183, - "promin": 35217, - "prominent": 19172, - "promis": 3963, - "promise": 6745, - "promised": 11516, - "promises": 12064, - "promising": 14183, - "promo": 3037, - "promo": 6755, - "promos": 35044, - "promote": 47384, - "promote": 8003, - "promoted": 16395, - "promoter": 33081, - "promotes": 20169, - "promoting": 9695, - "promotion": 9259, - "promotional": 17619, - "promotions": 19142, - "promp": 11671, - "prompt": 20198, - "prompted": 45746, - "prompts": 33490, - "proms": 37759, - "pron": 13285, - "prone": 30964, - "pronoun": 23022, - "pronounce": 40489, - "pronounced": 34109, - "pronto": 44296, - "proof": 17020, - "proof": 5248, - "proofing": 35679, - "proofs": 41023, - "prop": 19123, - "prop": 16254, - "propag": 12151, - "propaganda": 14718, - "propane": 45546, - "propel": 48439, - "propeller": 47404, - "proper": 3577, - "proper": 8205, - "properly": 12560, - "properties": 10922, - "property": 26486, - "property": 5043, - "prophe": 9662, - "prophecy": 32501, - "prophet": 15549, - "prophetic": 47476, - "prophets": 39441, - "propor": 35016, - "proportion": 35775, - "proportions": 39391, - "propos": 9455, - "proposal": 12139, - "proposals": 20568, - "propose": 28471, - "proposed": 10615, - "proposes": 27133, - "proposing": 42631, - "proposition": 44780, - "propri": 28243, - "props": 15249, - "propulsion": 49380, - "pros": 33925, - "pros": 14147, - "prosciutto": 46565, - "prose": 47063, - "prose": 28675, - "prosecco": 28839, - "prosecu": 12136, - "prosecution": 30902, - "prosecutor": 23736, - "prosecutors": 31656, - "prosp": 24242, - "prospec": 12693, - "prospect": 11211, - "prospective": 28034, - "prospects": 15372, - "prosper": 16121, - "prosper": 33526, - "prosperity": 17203, - "prosperous": 28252, - "prost": 47923, - "prostate": 28808, - "prostatec": 49064, - "prosthetic": 44602, - "prostitu": 37333, - "protag": 28950, - "protagonist": 38183, - "prote": 1845, - "protec": 5640, - "protect": 25563, - "protect": 4817, - "protected": 12266, - "protecting": 11710, - "protection": 6238, - "protections": 33772, - "protective": 17028, - "protector": 20441, - "protectors": 45039, - "protects": 21889, - "protein": 8088, - "proteins": 28661, - "protest": 6279, - "protestant": 46945, - "protested": 48089, - "protester": 42073, - "protesters": 12660, - "protesting": 18788, - "protestors": 27822, - "protests": 12450, - "proto": 8672, - "proto": 44958, - "protocol": 19938, - "protocols": 39631, - "proton": 40009, - "prototype": 16675, - "prototyping": 42081, - "prou": 5739, - "proud": 11080, - "proud": 1679, - "prouder": 39585, - "proudest": 46806, - "proudly": 11203, - "proudof": 48184, - "proudtobe": 35043, - "prov": 23772, - "prov": 35021, - "prove": 10107, - "proved": 16473, - "proven": 35405, - "proven": 14569, - "provence": 28067, - "prover": 18312, - "proverb": 34419, - "proverbs": 27016, - "proves": 16119, - "provi": 2289, - "provide": 4832, - "provided": 9046, - "providence": 19331, - "provider": 14409, - "providers": 17120, - "provides": 7161, - "providing": 7250, - "provin": 12074, - "province": 8978, - "provinces": 35050, - "provincial": 16002, - "proving": 18055, - "provision": 30148, - "provisional": 36008, - "provisions": 39269, - "provo": 15367, - "provoc": 31618, - "provocative": 43809, - "provoking": 25510, - "provost": 36627, - "prow": 38737, - "prowrestling": 39825, - "prox": 41616, - "proxim": 31436, - "proximity": 38298, - "proxy": 31680, - "prs": 23879, - "pru": 12961, - "pruitt": 39453, - "prun": 29029, - "pruning": 48133, - "pry": 31965, - "pryor": 43375, - "ps": 3982, - "ps": 814, - "psa": 14031, - "psal": 13859, - "psalm": 17995, - "psalms": 35003, - "psb": 37017, - "psc": 43118, - "psd": 28810, - "pse": 19737, - "pse": 5423, - "pseu": 24919, - "pseudo": 46618, - "psg": 17123, - "psi": 45848, - "psi": 24533, - "psic": 29299, - "psis": 33041, - "psl": 21373, - "psn": 36781, - "pso": 27045, - "pson": 7487, - "psori": 44688, - "psp": 32769, - "pss": 35718, - "pss": 42535, - "psst": 47814, - "pst": 12692, - "psu": 41286, - "psu": 28338, - "psv": 44530, - "psy": 3576, - "psy": 11056, - "psych": 31041, - "psych": 20509, - "psyched": 19932, - "psyched": 35199, - "psychedelic": 23292, - "psychi": 18147, - "psychiatric": 30578, - "psychiatry": 39706, - "psychic": 24916, - "psycho": 6472, - "psycho": 22154, - "psychological": 18153, - "psychologist": 32827, - "psychology": 12352, - "psychop": 30112, - "psychotic": 48774, - "pt": 11139, - "pt": 1459, - "pta": 11586, - "ptbo": 40481, - "ptc": 44646, - "pte": 47804, - "pter": 49323, - "pti": 29375, - "pti": 10491, - "ptic": 20670, - "ption": 3479, - "ptions": 24963, - "pto": 31372, - "pto": 34092, - "pton": 19780, - "pts": 5886, - "ptsd": 23973, - "ptv": 42402, - "pu": 755, - "pu": 11780, - "pub": 20720, - "pub": 6301, - "puberty": 44122, - "pubg": 31496, - "publ": 3434, - "publi": 1617, - "public": 3592, - "public": 2122, - "publica": 49007, - "publication": 13538, - "publications": 27334, - "publichealth": 35872, - "publicity": 20831, - "publicly": 18554, - "publish": 19032, - "published": 4311, - "publisher": 20455, - "publishers": 25222, - "publishes": 35633, - "publishing": 10994, - "publix": 47985, - "pubs": 21099, - "puc": 48779, - "puck": 17550, - "pud": 39234, - "pudding": 14025, - "puddle": 33545, - "pue": 20161, - "pueblo": 33076, - "puer": 8968, - "puerto": 12289, - "puertor": 22757, - "puertorico": 26356, - "puff": 44477, - "puff": 17184, - "puffin": 47632, - "puffs": 47453, - "puffy": 49245, - "pug": 20950, - "pug": 17739, - "pugchat": 42266, - "pugh": 41302, - "puglia": 38345, - "pugs": 39425, - "puj": 46163, - "puja": 33753, - "puk": 31811, - "pul": 2469, - "pul": 40512, - "pula": 45856, - "puli": 47293, - "pulit": 27745, - "pulitzer": 31419, - "pull": 20155, - "pull": 6857, - "pulled": 8525, - "pulling": 12897, - "pullman": 40203, - "pullover": 44020, - "pulls": 16041, - "pulmon": 32613, - "pulmonary": 39132, - "pulp": 25410, - "pulse": 40091, - "pulse": 12485, - "pulses": 42177, - "pulsion": 35398, - "pum": 37497, - "puma": 20858, - "pump": 5179, - "pump": 9173, - "pumped": 12796, - "pumping": 25150, - "pumpkin": 36386, - "pumpkin": 8842, - "pumpkins": 23787, - "pumps": 18540, - "pun": 2707, - "pun": 19929, - "punc": 43907, - "punch": 29332, - "punch": 10730, - "punched": 31689, - "punches": 35279, - "punching": 33468, - "punctu": 31565, - "punctuation": 47051, - "pundit": 41466, - "pune": 32593, - "pune": 14488, - "pung": 45420, - "puni": 11479, - "punish": 34569, - "punished": 31598, - "punisher": 38509, - "punishment": 19099, - "punjab": 19405, - "punjab": 12883, - "punjabi": 25430, - "punk": 28933, - "punk": 7246, - "punks": 47171, - "puns": 35231, - "punt": 32699, - "punta": 34112, - "punter": 47092, - "pup": 11926, - "pup": 11302, - "pupil": 27265, - "pupils": 13628, - "pupp": 7116, - "puppet": 18439, - "puppets": 28475, - "puppies": 14820, - "puppy": 25431, - "puppy": 6829, - "puppylove": 40849, - "pups": 20778, - "pur": 1727, - "pur": 6265, - "pura": 25596, - "puram": 46174, - "purcell": 46065, - "purch": 8384, - "purchase": 5481, - "purchased": 13399, - "purchases": 21887, - "purchasing": 20718, - "purdu": 40691, - "purdue": 22280, - "pure": 14202, - "pure": 5979, - "puree": 45474, - "purely": 32459, - "puremichigan": 39783, - "purest": 45497, - "purge": 33514, - "puri": 16910, - "puri": 21974, - "purification": 47724, - "purity": 29780, - "purple": 17837, - "purple": 5496, - "purpose": 33492, - "purpose": 7391, - "purposes": 22020, - "purr": 49262, - "purr": 46343, - "purse": 16480, - "pursue": 19463, - "pursuing": 26424, - "pursuit": 16469, - "purée": 40981, - "pus": 13841, - "pusa": 40825, - "push": 16028, - "push": 6831, - "pushaw": 35407, - "pushaward": 35448, - "pushawards": 47184, - "pushed": 16155, - "pushes": 23828, - "pushing": 11549, - "put": 29535, - "put": 1983, - "putin": 10693, - "putnam": 40235, - "puts": 7898, - "putt": 30279, - "putter": 44723, - "putting": 5154, - "puzz": 19760, - "puzzle": 12875, - "puzzles": 27986, - "pv": 14517, - "pv": 13495, - "pvc": 26959, - "pvp": 44172, - "pvt": 29898, - "pw": 19419, - "pw": 16067, - "pwc": 22965, - "px": 24790, - "px": 10262, - "pxrtg": 36262, - "py": 4005, - "py": 7504, - "pye": 31099, - "pyeongchang": 36066, - "pyg": 41450, - "pyram": 14405, - "pyramid": 18725, - "pyramids": 36877, - "pyrene": 36740, - "pyrenees": 39744, - "pyro": 39762, - "python": 13370, - "pz": 48361, - "pé": 43167, - "q": 80, - "q": 336, - "qa": 24944, - "qa": 16360, - "qad": 27844, - "qadri": 35672, - "qaeda": 31246, - "qanda": 48672, - "qanon": 19182, - "qant": 35404, - "qantas": 43250, - "qatar": 32804, - "qatar": 10872, - "qb": 8073, - "qbs": 38188, - "qc": 17406, - "qe": 30974, - "qf": 27215, - "qi": 25054, - "qi": 11256, - "qing": 46522, - "qing": 34339, - "ql": 28366, - "qld": 23039, - "qld": 13765, - "qldpol": 42296, - "qm": 42148, - "qotd": 24504, - "qpr": 24788, - "qq": 31960, - "qr": 18193, - "qs": 14364, - "qt": 15013, - "qtr": 44803, - "qu": 666, - "qu": 28646, - "qua": 20363, - "quack": 45575, - "quad": 11656, - "quad": 13419, - "quadcopter": 39792, - "quadru": 35831, - "quaid": 34265, - "quail": 34392, - "quaint": 45976, - "quake": 8421, - "quaker": 43395, - "quakes": 24572, - "qual": 9979, - "qual": 32405, - "qualcomm": 38683, - "quali": 4574, - "qualification": 21508, - "qualifications": 35225, - "qualified": 11927, - "qualifier": 18733, - "qualifiers": 21388, - "qualifies": 35820, - "qualify": 17019, - "qualifying": 11895, - "qualitative": 45847, - "qualities": 20488, - "quality": 28545, - "quality": 3027, - "quan": 11669, - "quan": 27490, - "quand": 28198, - "quant": 15050, - "quanti": 31540, - "quantitative": 40583, - "quantities": 33917, - "quantity": 26920, - "quantum": 15320, - "quar": 3856, - "quare": 42549, - "quarry": 27601, - "quart": 7851, - "quarter": 8816, - "quarter": 6632, - "quarterback": 16545, - "quarterfinal": 37992, - "quarterfinals": 28971, - "quarterly": 23350, - "quarters": 10146, - "quartet": 18056, - "quartz": 17752, - "quat": 25715, - "quattro": 40300, - "quay": 40276, - "quay": 17304, - "que": 1147, - "que": 2319, - "quebec": 15373, - "queen": 6407, - "queen": 2997, - "queenof": 44398, - "queens": 22943, - "queens": 9330, - "queensland": 15168, - "queer": 38874, - "queer": 18161, - "quel": 39774, - "quel": 21879, - "quen": 23876, - "quen": 38324, - "quent": 23808, - "quentin": 27530, - "quer": 17378, - "quer": 26859, - "quered": 23210, - "queries": 32958, - "querque": 30338, - "query": 27464, - "ques": 25328, - "ques": 7715, - "queso": 40110, - "quest": 31653, - "quest": 4846, - "questi": 2391, - "question": 18961, - "question": 4382, - "questionable": 30733, - "questioned": 31847, - "questioning": 24887, - "questions": 3883, - "quests": 44611, - "quet": 8513, - "quets": 39055, - "quetta": 38326, - "quette": 18993, - "queu": 32705, - "queue": 18549, - "queues": 40649, - "queuing": 44082, - "quez": 18677, - "quezon": 41117, - "qui": 1912, - "qui": 18046, - "quic": 26474, - "quiche": 47723, - "quick": 5969, - "quick": 3712, - "quicker": 29211, - "quickest": 37734, - "quickly": 7787, - "quid": 30732, - "quie": 43875, - "quien": 43482, - "quiere": 42723, - "quiero": 32567, - "quiet": 17853, - "quiet": 7557, - "quietly": 22208, - "quig": 44690, - "quil": 12305, - "quill": 48951, - "quilt": 23977, - "quilted": 46052, - "quin": 8607, - "quin": 17167, - "quincy": 27640, - "quind": 32339, - "quinn": 12306, - "quinoa": 26703, - "quins": 39701, - "quint": 26898, - "quinta": 47446, - "quinte": 22098, - "quintess": 37538, - "quintet": 35125, - "quipment": 42813, - "quir": 15943, - "quirky": 25044, - "quis": 15064, - "quist": 25128, - "quit": 19358, - "quit": 11140, - "quite": 4135, - "quito": 35828, - "quits": 32505, - "quitting": 33871, - "quity": 33133, - "quiz": 31197, - "quiz": 8344, - "quizz": 35041, - "quo": 3046, - "quo": 28127, - "quoi": 45549, - "quot": 5452, - "quot": 47587, - "quota": 42097, - "quotation": 49195, - "quote": 15446, - "quote": 4020, - "quoted": 27706, - "quoteoftheday": 19975, - "quotes": 5808, - "quoting": 31651, - "qur": 37782, - "quran": 19690, - "qureshi": 46307, - "qvist": 42322, - "qx": 45038, - "r": 81, - "r": 337, - "ra": 559, - "ra": 1735, - "raa": 44344, - "rab": 14816, - "rab": 33224, - "rabb": 6875, - "rabbi": 20959, - "rabbit": 10274, - "rabbits": 27028, - "rabhu": 25806, - "rable": 10182, - "rac": 1773, - "rac": 30462, - "raccoon": 29516, - "race": 10978, - "race": 2471, - "racec": 18814, - "racecourse": 25036, - "raced": 36021, - "racer": 16798, - "racers": 33603, - "races": 8605, - "raceway": 24650, - "rach": 6876, - "rach": 33429, - "racha": 21952, - "racha": 35022, - "rachael": 29095, - "rachel": 13511, - "rachel": 8029, - "raci": 33381, - "racial": 13801, - "racially": 43577, - "racing": 23306, - "racing": 3699, - "racism": 11276, - "racist": 9684, - "racists": 41777, - "rack": 24600, - "rack": 12034, - "racket": 37691, - "racks": 21191, - "rad": 4473, - "rad": 8238, - "rada": 30437, - "radar": 9672, - "radcliffe": 33096, - "rade": 44494, - "rade": 17911, - "rader": 45002, - "radford": 45800, - "radha": 43122, - "radi": 5772, - "radial": 42028, - "radiance": 45670, - "radiant": 25614, - "radiation": 18210, - "radiator": 39372, - "radic": 18082, - "radical": 13712, - "radicals": 45903, - "radio": 7176, - "radio": 2638, - "radioactive": 34704, - "radiodisney": 36483, - "radiohead": 39472, - "radiology": 29684, - "radios": 43669, - "radish": 37789, - "radius": 37570, - "rado": 29784, - "rae": 21646, - "rae": 15051, - "rael": 45390, - "raer": 44561, - "raf": 11495, - "raf": 11490, - "rafa": 14352, - "rafa": 24850, - "rafael": 38221, - "rafael": 19216, - "rafaelnadal": 49219, - "raff": 34900, - "raffic": 32928, - "raffle": 13752, - "raffles": 43489, - "rafi": 35304, - "raft": 9233, - "rafting": 36309, - "rag": 13958, - "rag": 20687, - "rage": 8593, - "rages": 34253, - "ragh": 35642, - "ragha": 40972, - "raging": 25015, - "ragn": 24125, - "ragnar": 34385, - "ragnarok": 41856, - "ragon": 34768, - "rags": 47838, - "rah": 12277, - "rah": 8766, - "raheem": 43317, - "rahim": 24152, - "rahman": 19680, - "rahu": 13129, - "rahul": 37239, - "rahul": 17440, - "rahulg": 27510, - "rahulgandhi": 28293, - "rai": 9165, - "rai": 9638, - "raid": 6877, - "raided": 43417, - "raider": 27368, - "raider": 21455, - "raidernation": 47901, - "raiders": 11817, - "raids": 26655, - "rail": 4573, - "rail": 6879, - "raila": 47273, - "railminindia": 35557, - "railroad": 17080, - "rails": 23427, - "railway": 27614, - "railway": 7856, - "railwayana": 46750, - "railways": 20765, - "raim": 45785, - "rain": 3128, - "rain": 2443, - "raina": 30564, - "rainbow": 24562, - "rainbow": 6286, - "rainbows": 30483, - "raine": 49038, - "raine": 6871, - "rained": 32310, - "rainf": 15024, - "rainfall": 15350, - "rainforest": 22823, - "rainier": 37850, - "raining": 13964, - "rains": 14272, - "rainy": 10222, - "rais": 14729, - "raise": 24249, - "raise": 5078, - "raised": 6027, - "raiser": 33555, - "raises": 13297, - "raisethe": 47109, - "raisin": 36864, - "raising": 6883, - "raj": 5958, - "raj": 10813, - "raja": 46069, - "raja": 19150, - "rajan": 46595, - "rajas": 16185, - "rajasthan": 18017, - "raje": 21899, - "rajesh": 43602, - "raji": 27569, - "rajini": 29600, - "rajini": 40622, - "rajinikanth": 32922, - "rajiv": 40197, - "rajkumar": 49304, - "rajput": 47572, - "raju": 47029, - "rak": 13523, - "rak": 26287, - "rake": 26825, - "rake": 32712, - "rakesh": 41083, - "ral": 8062, - "ral": 1406, - "rale": 14192, - "raleigh": 18207, - "rall": 23249, - "rallies": 25230, - "rally": 18882, - "rally": 5041, - "rallying": 36836, - "ralph": 25290, - "ralph": 12234, - "ram": 1976, - "ram": 2007, - "rama": 22112, - "ramad": 12736, - "ramadan": 15547, - "ramadhan": 47415, - "raman": 39816, - "ramapho": 43963, - "ramaphosa": 44993, - "ramatta": 49112, - "rambo": 41855, - "ramcharan": 45275, - "rame": 47745, - "ramen": 18892, - "ramesh": 48640, - "ramesh": 40186, - "rami": 43016, - "ramirez": 23877, - "ramon": 27958, - "ramone": 47201, - "ramos": 21046, - "ramp": 14271, - "rampage": 32077, - "rampant": 41985, - "ramps": 35257, - "rams": 10292, - "ramsay": 26259, - "ramsey": 19215, - "ran": 1433, - "ran": 4031, - "rana": 22143, - "ranbir": 40881, - "rance": 29034, - "ranch": 43955, - "ranch": 10659, - "rancho": 26258, - "rand": 5628, - "rand": 18718, - "randall": 23639, - "rande": 21469, - "randolph": 29899, - "random": 11396, - "random": 6160, - "randomly": 17272, - "rands": 39153, - "randy": 29479, - "randy": 13279, - "rane": 28852, - "rang": 4043, - "rang": 24377, - "range": 13627, - "range": 3818, - "ranger": 31472, - "ranger": 13593, - "rangers": 7664, - "ranges": 25685, - "ranging": 25946, - "rani": 29264, - "rani": 22631, - "rank": 11501, - "ranked": 8307, - "rankin": 37539, - "ranking": 12347, - "rankings": 12596, - "ranks": 14469, - "rano": 18608, - "rans": 46259, - "ransom": 28523, - "ransom": 34646, - "ransomware": 33815, - "rant": 46467, - "rant": 9819, - "rants": 34014, - "ranveer": 32402, - "ranveer": 41482, - "ranveerofficial": 42116, - "rao": 16913, - "rap": 7773, - "rap": 7348, - "rape": 46099, - "rape": 10070, - "raped": 23700, - "rapha": 22754, - "raphael": 30091, - "rapi": 8610, - "rapid": 47697, - "rapid": 12205, - "rapidly": 16710, - "rapids": 18848, - "raping": 44926, - "rapist": 33360, - "rapp": 19283, - "rapper": 11860, - "rappers": 30315, - "rapping": 42864, - "raps": 37887, - "raptor": 26762, - "raptors": 17035, - "raq": 39787, - "raq": 43312, - "raqqa": 47074, - "raquel": 44338, - "rar": 26819, - "rar": 24605, - "rard": 21012, - "rare": 18992, - "rare": 3865, - "rarely": 17315, - "rarest": 43237, - "rarity": 45862, - "ras": 23492, - "ras": 8224, - "rasc": 30085, - "rascal": 43481, - "rash": 14917, - "rash": 30608, - "rashad": 46527, - "rasheed": 41638, - "rashi": 19426, - "rashid": 26757, - "rasp": 10487, - "raspberries": 37742, - "raspberry": 40162, - "raspberry": 13615, - "raspberrypi": 43934, - "rass": 45654, - "rasta": 47002, - "rat": 3806, - "rat": 8985, - "rata": 28568, - "ratchet": 25078, - "rate": 5068, - "rated": 8183, - "rates": 6864, - "rath": 18268, - "rath": 39772, - "rather": 5252, - "rati": 11486, - "rating": 10567, - "ratings": 14176, - "ratio": 15893, - "ration": 27002, - "ration": 35662, - "rational": 33086, - "ratna": 49078, - "ratri": 32288, - "rats": 19043, - "ratt": 20737, - "ratt": 34785, - "rattle": 40824, - "rattle": 41839, - "rau": 27744, - "raul": 30218, - "raun": 41169, - "rav": 14367, - "rav": 23606, - "rave": 38784, - "rave": 17601, - "ravel": 27927, - "raven": 10269, - "raven": 16803, - "ravens": 17946, - "ravi": 22947, - "ravi": 19538, - "ravin": 39099, - "raving": 45807, - "raviol": 41104, - "ravioli": 43460, - "raw": 10166, - "raw": 6323, - "rawlings": 40662, - "rax": 38520, - "ray": 5312, - "ray": 3077, - "raya": 29991, - "raymond": 16683, - "rayn": 47852, - "rayon": 47900, - "rays": 11064, - "raz": 9700, - "raz": 19087, - "raza": 37724, - "razer": 33832, - "razor": 24934, - "razor": 21300, - "razz": 43769, - "rb": 12740, - "rb": 7477, - "rbc": 37500, - "rbi": 15687, - "rbs": 29102, - "rc": 7575, - "rc": 7457, - "rca": 33942, - "rcb": 45240, - "rcmp": 31489, - "rcn": 49370, - "rctid": 49223, - "rd": 13501, - "rd": 1973, - "rda": 45755, - "rdr": 44364, - "rds": 32378, - "re": 515, - "re": 810, - "rea": 11521, - "reach": 4483, - "reach": 4279, - "reached": 6878, - "reaches": 14462, - "reaching": 11358, - "react": 36566, - "react": 15065, - "reacted": 42515, - "reacting": 40595, - "reaction": 7189, - "reactions": 18438, - "reactive": 42072, - "reactjs": 46173, - "reactor": 32037, - "reacts": 23115, - "read": 933, - "read": 1199, - "reader": 9884, - "readers": 10335, - "readiness": 28131, - "reading": 17556, - "reading": 2337, - "readingfc": 47428, - "readings": 23361, - "reads": 6597, - "ready": 17351, - "ready": 1112, - "reagan": 17767, - "real": 2017, - "real": 1532, - "realdonaldtrump": 7025, - "reale": 5930, - "realest": 45855, - "realestate": 32937, - "realestate": 6569, - "reali": 4185, - "realis": 38114, - "realise": 14773, - "realised": 17945, - "realising": 39537, - "realism": 20024, - "realist": 30248, - "realistic": 16157, - "realities": 32443, - "reality": 46802, - "reality": 5004, - "realization": 40402, - "realize": 7538, - "realized": 10489, - "realizes": 42918, - "realizing": 23284, - "reall": 39686, - "really": 43249, - "really": 1414, - "realm": 23083, - "realmadrid": 27866, - "realms": 43033, - "realness": 46761, - "realtime": 44002, - "realtime": 38203, - "realtor": 18038, - "realtors": 31759, - "realty": 20471, - "ream": 37242, - "ream": 15219, - "rean": 48477, - "reap": 31334, - "reaper": 29922, - "rear": 39652, - "rear": 10223, - "reas": 9121, - "reason": 12882, - "reason": 3893, - "reasonable": 18558, - "reasonably": 38589, - "reasoning": 30341, - "reasons": 5686, - "reau": 32398, - "reb": 12370, - "reb": 18796, - "reba": 48543, - "rebate": 43817, - "rebe": 25227, - "rebec": 10774, - "rebecca": 12892, - "rebel": 8185, - "rebel": 12248, - "rebellion": 22170, - "rebels": 13623, - "rebirth": 33303, - "reboot": 22385, - "reborn": 30229, - "reboun": 43381, - "rebound": 31280, - "rebounds": 19190, - "rebs": 28164, - "rebu": 43162, - "rebuild": 20022, - "rebuilding": 30880, - "rebuilt": 33137, - "rec": 1020, - "rec": 11243, - "recall": 15151, - "recalled": 32142, - "recalling": 47855, - "recalls": 24740, - "recap": 29816, - "recap": 8337, - "recaps": 47997, - "recard": 35536, - "rece": 1890, - "recei": 2148, - "receip": 38503, - "receipt": 30479, - "receipts": 41181, - "receive": 4800, - "received": 4178, - "receiver": 17659, - "receivers": 45294, - "receives": 10027, - "receiving": 7252, - "recent": 3969, - "recently": 4482, - "recep": 17450, - "reception": 8364, - "receptions": 46881, - "receptor": 41835, - "recess": 38182, - "recession": 27176, - "recharge": 29396, - "rechargeable": 37516, - "reci": 2037, - "recipe": 28923, - "recipe": 4614, - "recipeoftheday": 38727, - "recipes": 9243, - "recipi": 10136, - "recipient": 13703, - "recipients": 18940, - "recipro": 41789, - "recital": 23457, - "recite": 48824, - "reck": 11715, - "reckless": 26284, - "reckon": 23854, - "recl": 42277, - "reclaim": 35969, - "reclaimed": 32648, - "reco": 2535, - "reco": 46038, - "recogn": 6343, - "recogni": 5329, - "recognise": 19824, - "recognised": 20986, - "recognising": 48423, - "recognition": 9415, - "recognizable": 47240, - "recognize": 10905, - "recognized": 9929, - "recognizes": 26909, - "recognizing": 19666, - "recomm": 4540, - "recommend": 11628, - "recommend": 8942, - "recommendation": 20118, - "recommendations": 16516, - "recommended": 11100, - "recommending": 44301, - "recommends": 22940, - "recon": 15371, - "recon": 28996, - "reconciliation": 26451, - "reconstruction": 24955, - "recor": 1723, - "record": 21328, - "record": 2717, - "recorded": 9392, - "recorder": 26747, - "recording": 48237, - "recording": 6942, - "recordings": 19715, - "records": 4529, - "recover": 16785, - "recovered": 16444, - "recovering": 19005, - "recovers": 47935, - "recovery": 6591, - "recre": 22148, - "recreate": 29775, - "recreated": 40888, - "recreating": 48224, - "recreation": 17331, - "recreational": 24329, - "recru": 4745, - "recruit": 9011, - "recruit": 15585, - "recruited": 36518, - "recruiter": 43120, - "recruiters": 46542, - "recruiting": 10533, - "recruitment": 10541, - "recruits": 22647, - "recs": 33069, - "rectan": 43041, - "rectangular": 43321, - "rector": 41585, - "recu": 26798, - "recur": 19983, - "recurring": 35912, - "recy": 6790, - "recycla": 40659, - "recyclable": 48907, - "recycle": 19366, - "recycled": 16829, - "recycling": 12566, - "red": 1893, - "red": 736, - "redbubble": 46137, - "redbull": 29483, - "redbull": 29219, - "redcarpet": 32259, - "redcross": 30659, - "redd": 22149, - "redd": 40618, - "redding": 41061, - "reddish": 43383, - "reddit": 15226, - "reddy": 23028, - "rede": 10913, - "redeem": 37449, - "redefining": 46352, - "redemption": 20233, - "redesign": 24188, - "redesigned": 33111, - "redevelopment": 30322, - "redhead": 36267, - "redi": 7976, - "redman": 44753, - "redmond": 39627, - "rednation": 28180, - "rednationrising": 28262, - "redneck": 39105, - "redness": 22626, - "redo": 42524, - "redon": 48506, - "redro": 37722, - "reds": 11221, - "redskins": 19023, - "redsox": 19144, - "reduc": 5015, - "reduce": 6604, - "reduced": 10821, - "reduces": 20539, - "reducing": 13836, - "reduction": 12219, - "reductions": 48263, - "redux": 43014, - "redvelvet": 41845, - "redwings": 31058, - "redwood": 31748, - "ree": 9282, - "ree": 5813, - "reebok": 26734, - "reece": 30457, - "reed": 26209, - "reed": 10435, - "reedus": 32865, - "reef": 46557, - "reef": 15624, - "reefs": 34459, - "reel": 34467, - "reel": 17166, - "reels": 48127, - "reem": 48891, - "reen": 21638, - "reen": 23679, - "rees": 18314, - "reese": 20929, - "reeves": 23060, - "ref": 4067, - "ref": 9591, - "refe": 5624, - "refer": 18425, - "refer": 22325, - "referee": 20398, - "referees": 45583, - "referen": 13535, - "reference": 10214, - "references": 24009, - "referendum": 16732, - "referr": 47784, - "referral": 30219, - "referred": 22969, - "referring": 29797, - "refers": 30069, - "refill": 37859, - "refin": 13455, - "refined": 26098, - "refinery": 31393, - "refining": 48406, - "reflec": 4608, - "reflect": 13373, - "reflected": 28732, - "reflecting": 19700, - "reflection": 11884, - "reflections": 16647, - "reflective": 27008, - "reflects": 15821, - "reflex": 45756, - "reflex": 36050, - "reform": 45678, - "reform": 8875, - "reformation": 45119, - "reformed": 40880, - "reforms": 19274, - "refr": 34850, - "refre": 11995, - "refresh": 17836, - "refresh": 23288, - "refreshed": 35925, - "refresher": 41481, - "refreshing": 14159, - "refreshments": 31127, - "refriger": 21076, - "refrigerator": 36662, - "refs": 35595, - "refu": 3545, - "refuge": 5638, - "refuge": 17432, - "refugee": 11556, - "refugees": 42687, - "refugees": 8316, - "refund": 28899, - "refur": 15519, - "refurbi": 18259, - "refurbished": 26190, - "refurbishment": 35803, - "refusal": 46547, - "refuse": 16412, - "refused": 17190, - "refuses": 20085, - "refusing": 26704, - "reg": 5472, - "reg": 12353, - "regain": 37510, - "regal": 31512, - "regal": 25028, - "regan": 34062, - "regar": 5881, - "regard": 21801, - "regarded": 32017, - "regarding": 8493, - "regardless": 17220, - "regards": 23079, - "regatta": 26316, - "regen": 46545, - "regency": 29341, - "regeneration": 29257, - "regent": 30455, - "regents": 46710, - "regg": 12757, - "reggae": 37821, - "reggae": 15214, - "reggie": 21872, - "regi": 1608, - "regime": 11378, - "regiment": 18603, - "regin": 23287, - "regina": 16841, - "region": 16542, - "region": 4341, - "regional": 5552, - "regionals": 26043, - "regions": 14530, - "regis": 28094, - "register": 3967, - "registered": 10254, - "registering": 33510, - "registr": 29193, - "registration": 7302, - "registrations": 38423, - "registry": 30020, - "rego": 47351, - "regram": 30329, - "regrann": 48802, - "regre": 8627, - "regression": 43733, - "regret": 14374, - "regrets": 23231, - "regu": 3411, - "regui": 46722, - "regul": 11847, - "regular": 14882, - "regular": 6307, - "regularly": 17263, - "regulat": 14575, - "regulate": 33494, - "regulated": 31384, - "regulating": 48156, - "regulation": 14267, - "regulations": 16654, - "regulator": 30364, - "regulators": 35837, - "regulatory": 17717, - "reh": 21492, - "reha": 10193, - "rehab": 16973, - "rehabil": 17930, - "rehabilitation": 21042, - "rehear": 7273, - "rehearsal": 11482, - "rehearsals": 17977, - "rehearsing": 23125, - "rehman": 39206, - "rei": 15343, - "rei": 26033, - "reic": 41230, - "reich": 48589, - "reich": 28929, - "reid": 45125, - "reid": 11744, - "reig": 13092, - "reign": 41419, - "reign": 14827, - "reigning": 28409, - "reigns": 21217, - "reiki": 46960, - "reilly": 28120, - "reim": 35421, - "reimagined": 46799, - "reimbur": 39857, - "rein": 9240, - "rein": 45009, - "reina": 43847, - "reinde": 23810, - "reindeer": 25072, - "reinfor": 48161, - "reinforced": 41909, - "reinst": 33969, - "reinvent": 38171, - "reissue": 34042, - "reiter": 35394, - "rejec": 9958, - "reject": 22435, - "rejected": 17505, - "rejection": 32264, - "rejects": 23155, - "rejo": 20150, - "rejoice": 24712, - "rejuven": 26332, - "rek": 47542, - "rek": 19201, - "rel": 1825, - "rel": 5233, - "rela": 4362, - "reland": 15220, - "relat": 27192, - "relatable": 31010, - "relate": 17520, - "related": 5880, - "relates": 36064, - "relating": 27373, - "relation": 4561, - "relation": 16207, - "relations": 10100, - "relationship": 47239, - "relationship": 5837, - "relationships": 10610, - "relative": 17265, - "relatively": 18351, - "relatives": 21981, - "relax": 6777, - "relax": 9035, - "relaxation": 22194, - "relaxed": 18999, - "relaxing": 10256, - "relay": 12403, - "relays": 28404, - "rele": 1602, - "release": 29100, - "release": 2706, - "released": 3410, - "releases": 7393, - "releasethe": 44008, - "releasing": 10321, - "releg": 23378, - "relegated": 45884, - "relegation": 35040, - "relent": 22213, - "relentless": 27207, - "relessly": 33927, - "relev": 9349, - "relevance": 31400, - "relevant": 10568, - "reli": 2674, - "reliability": 27220, - "reliable": 13714, - "reliance": 27727, - "relic": 27802, - "relics": 43208, - "relief": 7518, - "relies": 41579, - "relieve": 28623, - "relieved": 36597, - "religi": 4940, - "religion": 8803, - "religions": 31189, - "religious": 8289, - "relish": 35550, - "relive": 23939, - "reliving": 47558, - "rell": 28802, - "rell": 7127, - "rella": 9952, - "relle": 31390, - "reloaded": 38908, - "relocated": 46791, - "relocation": 39198, - "rels": 23320, - "relu": 32058, - "reluct": 32549, - "reluctant": 45552, - "rely": 4158, - "relying": 42168, - "rem": 15098, - "rem": 21637, - "rema": 4569, - "remain": 29144, - "remain": 6415, - "remainder": 41672, - "remained": 23714, - "remaining": 11392, - "remains": 6807, - "remake": 16234, - "remark": 11136, - "remarkable": 12404, - "remarkably": 39087, - "remarks": 15001, - "remastered": 24932, - "rematch": 26473, - "rembrandt": 45972, - "reme": 20071, - "remedi": 18442, - "remedies": 25581, - "remedy": 25794, - "remem": 7966, - "rememb": 7062, - "remember": 22045, - "remember": 2195, - "remembered": 11763, - "remembering": 8135, - "remembers": 12551, - "remembrance": 40321, - "remembrance": 15860, - "remembranceday": 48333, - "rement": 7173, - "rements": 12667, - "remi": 41693, - "remin": 3216, - "remind": 9868, - "reminded": 12309, - "reminder": 5565, - "reminders": 34121, - "reminding": 19976, - "reminds": 8303, - "remington": 43527, - "reminis": 17723, - "reminiscent": 41704, - "reminiscing": 32552, - "remix": 8519, - "remixes": 31011, - "remn": 29127, - "remnants": 39032, - "remo": 4064, - "remo": 33259, - "remodel": 34159, - "remodel": 37495, - "remodeling": 41432, - "remote": 47163, - "remote": 9687, - "remotely": 32375, - "removable": 44095, - "removal": 13679, - "remove": 9709, - "removed": 10289, - "remover": 44267, - "removes": 29018, - "removing": 18504, - "remy": 30434, - "ren": 737, - "ren": 2596, - "rena": 12591, - "renais": 15409, - "renaissance": 16007, - "renal": 36096, - "renamed": 31535, - "renault": 17600, - "rence": 19245, - "rence": 1553, - "rences": 8545, - "rend": 33932, - "rend": 22851, - "render": 39752, - "render": 13024, - "rendered": 23652, - "rendering": 21339, - "renders": 39419, - "rendez": 43293, - "rendezvous": 45644, - "rendition": 28891, - "rendon": 46272, - "rendous": 49403, - "rends": 38842, - "rene": 15438, - "rene": 12597, - "renee": 23480, - "reneg": 29909, - "renegade": 41229, - "renergy": 37151, - "renew": 6645, - "renew": 22015, - "renewable": 31269, - "renewable": 15941, - "renewableenergy": 33357, - "renewables": 21619, - "renewal": 21270, - "renewed": 20524, - "renfre": 45043, - "reng": 36795, - "reno": 11520, - "reno": 12831, - "renov": 9984, - "renovated": 23839, - "renovation": 17121, - "renovations": 31311, - "renowned": 14727, - "rens": 18183, - "renshaw": 44445, - "rent": 17377, - "rent": 1609, - "rental": 12193, - "rentals": 24105, - "rented": 35932, - "rential": 31692, - "renting": 37662, - "rently": 2615, - "rents": 31109, - "reo": 15963, - "reo": 26854, - "reon": 15761, - "reopen": 26883, - "reopened": 32868, - "reopening": 36663, - "reopens": 40644, - "rep": 4229, - "rep": 6487, - "repair": 8419, - "repaired": 32953, - "repairing": 38534, - "repairs": 16297, - "repar": 34065, - "repe": 5785, - "repeal": 42622, - "repeal": 23938, - "repeat": 10192, - "repeated": 27904, - "repeatedly": 26630, - "repeating": 33834, - "repeats": 39158, - "repell": 46235, - "repent": 47261, - "reper": 29085, - "repet": 38533, - "repl": 13047, - "replac": 6069, - "replace": 9466, - "replaceable": 47762, - "replaced": 13200, - "replacement": 10835, - "replaces": 27781, - "replacing": 18647, - "replay": 16875, - "repleni": 44839, - "replic": 21651, - "replica": 18125, - "replied": 24238, - "replies": 18808, - "reply": 8965, - "replying": 47599, - "repor": 2628, - "report": 2417, - "reported": 7598, - "reportedly": 10953, - "reporter": 11019, - "reporters": 18454, - "reporting": 9218, - "reports": 4908, - "reposit": 41276, - "repository": 46977, - "repost": 33147, - "repost": 7217, - "repostapp": 38388, - "reposting": 20223, - "reppin": 19163, - "repping": 22574, - "repre": 3397, - "represent": 8293, - "represent": 8406, - "representation": 13520, - "representative": 13175, - "representatives": 15591, - "represented": 12299, - "representing": 7561, - "represents": 14433, - "repri": 31854, - "reproduction": 35714, - "reproductive": 25522, - "reps": 14265, - "reptile": 36938, - "reptiles": 38679, - "republic": 6376, - "republic": 7185, - "republican": 9842, - "republicans": 12384, - "repur": 41852, - "req": 42411, - "requ": 10664, - "reque": 9539, - "request": 7813, - "requested": 16199, - "requesting": 33245, - "requests": 17087, - "requi": 4863, - "requiem": 40316, - "require": 14437, - "required": 8500, - "requirement": 27146, - "requirements": 12860, - "requires": 13396, - "requiring": 33425, - "requis": 42602, - "rer": 41295, - "rer": 3407, - "rera": 14301, - "rero": 21860, - "rers": 18869, - "res": 4466, - "res": 934, - "resc": 3956, - "rescheduled": 43553, - "rescu": 8618, - "rescue": 28567, - "rescue": 5718, - "rescued": 11919, - "rescues": 32439, - "rescuing": 43770, - "rese": 13000, - "resear": 6090, - "research": 25694, - "research": 2379, - "researched": 42733, - "researcher": 18334, - "researchers": 9522, - "researching": 24544, - "reseller": 35391, - "resemb": 16916, - "resemblance": 26856, - "resemble": 37230, - "resembles": 35417, - "reser": 16420, - "reserv": 11906, - "reservation": 20289, - "reservations": 19307, - "reserve": 6911, - "reserved": 19796, - "reserves": 19705, - "reservoir": 20574, - "reset": 26250, - "resh": 47432, - "reshi": 39435, - "resi": 2152, - "residen": 22311, - "residence": 11672, - "residences": 38855, - "residency": 18545, - "resident": 9016, - "residente": 44637, - "residentevil": 48393, - "residential": 11002, - "residents": 6008, - "resign": 23584, - "resignation": 24779, - "resigned": 31014, - "resigns": 29738, - "resil": 10932, - "resili": 39212, - "resilience": 15271, - "resilient": 24694, - "resin": 24156, - "resist": 37345, - "resist": 9587, - "resistance": 7392, - "resistant": 17542, - "resisting": 43679, - "resolution": 9977, - "resolutions": 26816, - "resolve": 20787, - "resolved": 28807, - "reson": 18092, - "resonance": 42310, - "resort": 6594, - "resorts": 18839, - "resource": 43729, - "resource": 9760, - "resources": 6723, - "respec": 7466, - "respect": 31411, - "respect": 4916, - "respected": 19126, - "respectful": 24379, - "respecting": 36172, - "respective": 25817, - "respectively": 28794, - "respects": 23553, - "respir": 20771, - "respiratory": 24483, - "respon": 2421, - "respond": 12355, - "responded": 21121, - "respondents": 49253, - "responders": 25155, - "responding": 18037, - "responds": 17436, - "response": 5399, - "responses": 19006, - "responsi": 5490, - "responsibilities": 30375, - "responsibility": 11272, - "responsible": 8936, - "responsibly": 33675, - "responsive": 21544, - "ress": 34651, - "ress": 13629, - "resso": 15133, - "rest": 10974, - "rest": 2539, - "restart": 37378, - "restaur": 3775, - "restaurant": 41930, - "restaurant": 4489, - "restaurants": 11714, - "rested": 46020, - "resting": 18044, - "restless": 36724, - "restling": 30076, - "resto": 11118, - "resto": 41666, - "restock": 34060, - "restocked": 36966, - "restor": 8984, - "restoration": 11989, - "restorative": 46509, - "restore": 14008, - "restored": 14238, - "restoring": 24406, - "restra": 25424, - "restric": 11036, - "restricted": 27197, - "restriction": 44282, - "restrictions": 19884, - "restroom": 43423, - "restructuring": 43260, - "rests": 33775, - "resu": 10095, - "resul": 2655, - "result": 5659, - "resulted": 26449, - "resulting": 24581, - "results": 3790, - "resume": 15077, - "resumes": 30268, - "resur": 14865, - "resurg": 45962, - "resurgence": 47692, - "resurrec": 18487, - "resurrection": 25811, - "resusc": 47523, - "ret": 20500, - "ret": 10048, - "reta": 20153, - "retail": 14910, - "retail": 6455, - "retailer": 22549, - "retailers": 19418, - "retain": 24430, - "retained": 42737, - "retaining": 35571, - "retains": 42583, - "retali": 33101, - "retar": 29964, - "retarded": 44111, - "retention": 26247, - "rethink": 29078, - "rethinking": 42951, - "reti": 4721, - "retin": 31270, - "retina": 36919, - "retire": 18846, - "retired": 11477, - "retirement": 9205, - "retires": 29060, - "retiring": 21200, - "retrac": 32735, - "retreat": 11210, - "retri": 16918, - "retriever": 28394, - "retro": 6535, - "retro": 7755, - "retrogamer": 47220, - "retrogaming": 11316, - "retrospective": 27105, - "rett": 41082, - "rett": 8425, - "rette": 33066, - "return": 43042, - "return": 3458, - "returned": 10476, - "returning": 9290, - "returns": 5020, - "retwee": 48190, - "retweet": 3195, - "retweeted": 12705, - "retweeting": 32345, - "retweets": 10160, - "rety": 41550, - "reu": 20255, - "reu": 40371, - "reuben": 40450, - "reunion": 10247, - "reunite": 26179, - "reunited": 13516, - "reusable": 30395, - "reuse": 26535, - "reut": 15210, - "reuters": 15569, - "rev": 8424, - "rev": 11789, - "revamp": 29819, - "revamped": 36420, - "revan": 45277, - "reve": 3115, - "reveal": 8052, - "revealed": 7171, - "revealing": 21321, - "reveals": 6621, - "revel": 14133, - "revelation": 24053, - "revelations": 36163, - "reven": 10171, - "revenge": 12717, - "revenue": 10637, - "revenues": 33348, - "rever": 14829, - "rever": 41913, - "revere": 44187, - "reverend": 34407, - "revers": 20726, - "reversal": 33367, - "reverse": 12812, - "reversed": 42485, - "reversi": 31601, - "reversible": 34212, - "revi": 8317, - "review": 2268, - "reviewed": 16678, - "reviewer": 36409, - "reviewers": 48195, - "reviewing": 20458, - "reviews": 7227, - "revise": 46801, - "revised": 22806, - "revising": 46882, - "revision": 20335, - "revisit": 26568, - "revisited": 34302, - "revisiting": 33144, - "revit": 26367, - "revitalization": 46923, - "revival": 14142, - "revive": 26450, - "revived": 42912, - "revo": 28660, - "revol": 13447, - "revolt": 31697, - "revolu": 4900, - "revolution": 17699, - "revolution": 6644, - "revolutionary": 14734, - "revolver": 38747, - "revolving": 47230, - "revs": 49286, - "revue": 43428, - "rew": 37564, - "rewar": 15857, - "reward": 11223, - "rewarded": 27163, - "rewarding": 23351, - "rewards": 15235, - "rewatch": 35610, - "rewatching": 41287, - "rewind": 26867, - "rewrite": 45218, - "rex": 13002, - "rex": 10904, - "rexperience": 33924, - "rey": 9681, - "rey": 4517, - "reyes": 18255, - "reykja": 47571, - "reyn": 11998, - "reynolds": 14309, - "reys": 48284, - "rez": 27597, - "rez": 15192, - "reza": 35888, - "rf": 35529, - "rf": 16368, - "rfc": 19003, - "rfid": 40204, - "rg": 33055, - "rg": 14897, - "rgb": 36128, - "rgv": 33685, - "rh": 8745, - "rh": 22404, - "rha": 19473, - "rhapso": 32532, - "rhapsody": 35774, - "rhe": 9186, - "rhea": 28612, - "rhetor": 24359, - "rhetoric": 29985, - "rhett": 42984, - "rheu": 42953, - "rhi": 21212, - "rhin": 12269, - "rhine": 22863, - "rhine": 44833, - "rhinestone": 30450, - "rhino": 41744, - "rhino": 20056, - "rhinos": 30671, - "rho": 7637, - "rhode": 39302, - "rhode": 27907, - "rhodes": 17785, - "rhon": 25882, - "rhonda": 46100, - "rhp": 27199, - "rhs": 24551, - "rhu": 23897, - "rhubarb": 30213, - "rhy": 7740, - "rhyme": 37356, - "rhymes": 33143, - "rhys": 28647, - "rhyth": 27069, - "rhythm": 16172, - "rhythmic": 46386, - "rhythms": 40872, - "ri": 553, - "ri": 2574, - "ria": 3650, - "rial": 15200, - "rian": 7788, - "rib": 44634, - "rib": 18298, - "riba": 44992, - "ribb": 10081, - "ribbon": 12114, - "ribbons": 35271, - "ribe": 46115, - "ribs": 17519, - "ric": 920, - "ric": 4798, - "rica": 14230, - "rical": 18109, - "rican": 30958, - "ricardo": 23140, - "ricci": 35783, - "ricciardo": 49282, - "rice": 36362, - "rice": 4741, - "rich": 5223, - "rich": 4021, - "richar": 9350, - "richard": 9080, - "richard": 4470, - "richards": 11372, - "richardson": 15984, - "riche": 23286, - "richer": 34138, - "riches": 37093, - "richest": 25572, - "richi": 38934, - "richie": 19797, - "richland": 43079, - "richmond": 34143, - "richmond": 11292, - "richter": 37591, - "rick": 6237, - "rick": 3064, - "ricket": 46161, - "ricket": 23671, - "ricks": 23111, - "ricky": 19188, - "ricky": 12814, - "rico": 37962, - "rico": 11362, - "ricotta": 38473, - "rics": 7353, - "ricul": 6980, - "rid": 18103, - "rid": 9874, - "ridd": 21990, - "ridden": 32025, - "riddle": 31839, - "ride": 15816, - "ride": 2994, - "rider": 31056, - "rider": 9707, - "riders": 10826, - "rides": 11308, - "ridg": 42646, - "ridge": 16580, - "ridge": 6352, - "ridic": 9624, - "ridiculous": 12659, - "ridiculously": 25661, - "ridin": 47869, - "riding": 6765, - "ridley": 27883, - "rie": 14824, - "rie": 5322, - "ried": 7552, - "riel": 26696, - "rien": 35237, - "rier": 40714, - "rier": 13336, - "ries": 28179, - "ries": 3059, - "riesling": 36372, - "rif": 7044, - "riff": 30359, - "rifle": 15354, - "rifles": 25678, - "rift": 26681, - "rig": 18462, - "rig": 13871, - "riga": 36626, - "rigged": 35897, - "rigging": 38160, - "riggs": 40328, - "righ": 15391, - "right": 13341, - "right": 1155, - "righte": 20762, - "righteous": 28169, - "righteousness": 42481, - "rightful": 42601, - "rightly": 42669, - "rights": 3336, - "rigid": 43138, - "rigor": 36788, - "rigorous": 41654, - "rigs": 42893, - "rihanna": 13744, - "rij": 41097, - "rik": 31136, - "rik": 27832, - "rika": 28580, - "ril": 12270, - "ril": 2388, - "riley": 35056, - "riley": 12260, - "rill": 23705, - "rilla": 43956, - "rilla": 18685, - "rim": 28147, - "rim": 12199, - "rime": 27064, - "rimin": 11527, - "rimo": 47817, - "rims": 34327, - "rin": 5859, - "rin": 11739, - "rina": 12869, - "rine": 24952, - "ring": 8318, - "ring": 2540, - "ringed": 44712, - "ringer": 35761, - "ringing": 26035, - "ringo": 38845, - "rings": 5751, - "rington": 12455, - "rink": 21497, - "rinka": 47316, - "rino": 47188, - "rinse": 48320, - "rio": 15681, - "rio": 5782, - "rion": 31623, - "rion": 34046, - "rios": 32814, - "riot": 32636, - "riot": 14218, - "riots": 24844, - "rious": 6340, - "rip": 10353, - "rip": 4243, - "ripe": 22832, - "ripley": 41589, - "ripp": 25276, - "ripped": 17815, - "ripper": 35347, - "ripping": 29126, - "ripple": 24825, - "rips": 30182, - "rir": 36792, - "ris": 6108, - "ris": 1999, - "rise": 13641, - "rise": 3151, - "risen": 23653, - "risers": 44983, - "rises": 13362, - "riseup": 35760, - "rish": 18378, - "rish": 18927, - "rishi": 48434, - "rising": 30452, - "rising": 5448, - "risis": 37998, - "risk": 27967, - "risk": 4213, - "risking": 48155, - "risks": 12474, - "risky": 27630, - "risotto": 31471, - "rist": 40610, - "rit": 5156, - "rit": 17333, - "rita": 16178, - "ritchie": 30997, - "rite": 39318, - "rite": 18429, - "rites": 36160, - "rith": 48169, - "rith": 48850, - "riti": 32904, - "rito": 19379, - "ritos": 33507, - "ritt": 26092, - "ritter": 34854, - "ritu": 13391, - "ritual": 19712, - "rituals": 31145, - "ritz": 39151, - "ritz": 25627, - "rium": 33884, - "riv": 25113, - "rival": 13412, - "rival": 15629, - "rivalry": 19511, - "rivals": 15135, - "rive": 27588, - "rive": 34917, - "river": 5239, - "river": 2473, - "rivera": 18275, - "riverdale": 28304, - "riverfront": 44439, - "rivers": 10723, - "riverside": 15809, - "riveting": 44024, - "riviera": 25851, - "rix": 43407, - "rix": 9483, - "riya": 36908, - "riyad": 31564, - "riyadh": 33577, - "riz": 18426, - "riz": 35411, - "rizal": 41555, - "rizio": 40191, - "rizz": 34826, - "rizzo": 49076, - "rj": 26016, - "rj": 20949, - "rk": 38725, - "rk": 21422, - "rl": 18041, - "rl": 14590, - "rlly": 43222, - "rly": 25954, - "rm": 20202, - "rm": 8431, - "rmb": 49097, - "rms": 40529, - "rn": 13206, - "rn": 7666, - "rna": 24566, - "rnb": 31556, - "rnc": 35309, - "rnli": 29748, - "ro": 532, - "ro": 2795, - "roa": 8313, - "roach": 31073, - "road": 4370, - "road": 1759, - "roadhouse": 47891, - "roadmap": 30111, - "roads": 6189, - "roadsafety": 39992, - "roadshow": 21168, - "roadside": 26928, - "roadster": 28920, - "roadto": 24681, - "roadtrip": 15094, - "roadway": 42744, - "roam": 34045, - "roaming": 29240, - "roano": 34184, - "roanoke": 36587, - "roar": 34193, - "roar": 18483, - "roaring": 26428, - "roast": 11404, - "roasted": 10479, - "roasting": 32228, - "rob": 2668, - "rob": 6442, - "robb": 14059, - "robb": 39673, - "robbed": 24163, - "robber": 35545, - "robbers": 40852, - "robbery": 16393, - "robbi": 44898, - "robbie": 37200, - "robbie": 15970, - "robbing": 47569, - "robbins": 23461, - "robby": 44128, - "robe": 23116, - "rober": 4532, - "robert": 8811, - "robert": 3929, - "roberta": 43373, - "roberto": 42645, - "roberto": 16227, - "roberts": 10366, - "robertson": 17643, - "robes": 29304, - "robi": 16743, - "robin": 6681, - "robin": 7988, - "robins": 35502, - "robinson": 8523, - "robles": 47646, - "roblo": 27481, - "roblox": 37798, - "robo": 4672, - "robo": 36057, - "robot": 46089, - "robot": 8797, - "robotic": 23975, - "robotics": 13546, - "robots": 13473, - "robson": 31113, - "robust": 22780, - "robyn": 34533, - "roc": 3268, - "roc": 13776, - "rocco": 30009, - "roch": 23788, - "rochdale": 41880, - "roche": 31776, - "rochelle": 40161, - "rochester": 18057, - "rock": 2640, - "rock": 2172, - "rockab": 39353, - "rockabilly": 45019, - "rocke": 19914, - "rocked": 16116, - "rockefeller": 35476, - "rocker": 29008, - "rockers": 32338, - "rocket": 25435, - "rocket": 8383, - "rockets": 13292, - "rockford": 41039, - "rockies": 20621, - "rockin": 12073, - "rocking": 7081, - "rockn": 24442, - "rocknroll": 27840, - "rocks": 6135, - "rockstar": 23603, - "rockstar": 18000, - "rockstargames": 27516, - "rockstars": 46639, - "rockthe": 49363, - "rockwell": 34747, - "rocky": 33481, - "rocky": 9648, - "rod": 9712, - "rod": 8291, - "roddy": 42332, - "rode": 18449, - "rodeo": 18250, - "rodgers": 17612, - "rodi": 49100, - "rodney": 21753, - "rodri": 11053, - "rodrigo": 33944, - "rodriguez": 14057, - "rods": 28618, - "roe": 27671, - "roe": 9996, - "rof": 33029, - "rofl": 48228, - "roft": 45212, - "rog": 34269, - "rog": 34017, - "rogen": 23380, - "roger": 13929, - "roger": 7735, - "rogerfederer": 40182, - "rogers": 10661, - "rogue": 32575, - "rogue": 15162, - "roh": 14933, - "roh": 29840, - "rohan": 39848, - "rohing": 23600, - "rohingya": 26146, - "rohit": 44649, - "rohit": 24299, - "roi": 21877, - "rok": 36807, - "rol": 3393, - "rol": 7818, - "roland": 33713, - "roland": 19569, - "role": 18485, - "role": 3414, - "roles": 11871, - "rolex": 21093, - "rolf": 48606, - "roll": 4711, - "roll": 3341, - "rolled": 11982, - "roller": 21034, - "roller": 12342, - "rollercoaster": 38248, - "rollers": 36941, - "rollin": 27545, - "rolling": 24250, - "rolling": 6347, - "rollingstones": 41309, - "rollins": 27724, - "rollout": 47710, - "rollover": 39214, - "rolls": 8614, - "rolltide": 28101, - "rom": 11377, - "rom": 19205, - "roma": 44134, - "roma": 11631, - "romain": 48897, - "roman": 4416, - "roman": 7370, - "romance": 7215, - "romania": 15884, - "romanian": 30866, - "romano": 38409, - "romans": 23066, - "romantic": 41457, - "romantic": 8821, - "rome": 9406, - "rome": 5243, - "romeo": 14429, - "romero": 23694, - "romney": 19287, - "romo": 32248, - "romper": 43699, - "ron": 2393, - "ron": 3372, - "rona": 42385, - "ronal": 46194, - "ronald": 15683, - "ronaldo": 13463, - "ronan": 34971, - "rond": 31935, - "ronda": 37436, - "rondo": 43756, - "rone": 48082, - "rone": 32763, - "roni": 47234, - "ronnie": 45257, - "ronnie": 16421, - "rons": 19536, - "ront": 48881, - "roo": 1249, - "roo": 31227, - "rood": 38007, - "roof": 9120, - "roof": 6449, - "roofing": 24415, - "roofs": 34635, - "rooftop": 16319, - "rook": 35918, - "rookie": 9771, - "rookies": 31917, - "room": 8845, - "room": 1530, - "roomie": 36851, - "roommate": 19825, - "roommates": 37323, - "rooms": 6328, - "rooney": 17712, - "roos": 32938, - "roosevel": 17644, - "roosevelt": 18488, - "rooster": 46263, - "rooster": 30926, - "roosters": 43693, - "root": 25930, - "root": 9728, - "rooted": 30428, - "rooting": 25523, - "roots": 8084, - "rop": 43401, - "rope": 9953, - "ropes": 30506, - "ror": 8668, - "ror": 2843, - "rors": 12072, - "rory": 42804, - "rory": 17813, - "ros": 5288, - "ros": 6930, - "rosa": 14393, - "rosal": 30397, - "rosario": 33640, - "rosary": 33098, - "rosberg": 46037, - "rose": 6146, - "rose": 3568, - "roseanne": 47528, - "rosel": 33616, - "rosemary": 19472, - "rosen": 13214, - "rosen": 36424, - "rosenberg": 43558, - "rosenthal": 46990, - "roses": 9061, - "rosetta": 43800, - "rosewood": 38686, - "rosie": 43049, - "rosie": 16888, - "ross": 8801, - "ross": 2158, - "rosse": 11602, - "rossi": 24817, - "rosso": 33023, - "roster": 12487, - "roswell": 45116, - "rosy": 46705, - "rosé": 28006, - "rot": 10055, - "rot": 9643, - "rotar": 45959, - "rotary": 14654, - "rotating": 32265, - "rotation": 18089, - "rotc": 32252, - "roth": 17741, - "roth": 19139, - "rother": 23174, - "rotherham": 37687, - "rothschild": 45089, - "roti": 46940, - "roto": 34698, - "rotor": 42991, - "rots": 16642, - "rott": 34806, - "rotten": 24324, - "rotter": 22614, - "rotterdam": 23422, - "rotun": 42970, - "rou": 2964, - "rou": 34783, - "roud": 28375, - "rouge": 16209, - "rough": 11699, - "rough": 8511, - "roughly": 21910, - "roughs": 37598, - "rouhani": 39912, - "roulette": 39930, - "roun": 5602, - "round": 9403, - "round": 2522, - "roundabout": 29953, - "rounded": 26973, - "rounder": 37024, - "rounding": 40208, - "rounds": 11242, - "roundtable": 19386, - "roundup": 17503, - "roup": 29220, - "rourke": 38753, - "rous": 33645, - "rous": 34531, - "rousey": 46267, - "rout": 7502, - "rout": 41778, - "route": 5261, - "router": 29962, - "routes": 14923, - "routine": 12319, - "routines": 44074, - "routing": 44086, - "roux": 43416, - "rov": 23971, - "rove": 30130, - "rover": 12776, - "rovers": 16373, - "row": 5275, - "row": 1044, - "rowan": 26240, - "rowdy": 32141, - "rowe": 28323, - "rowed": 22615, - "rower": 43345, - "rowers": 41806, - "rowing": 12807, - "rowland": 33037, - "rowley": 48793, - "rowling": 29371, - "rown": 22287, - "rown": 25060, - "rows": 9409, - "rox": 14111, - "rox": 41033, - "roxy": 28093, - "roy": 2128, - "roy": 6354, - "royal": 6691, - "royal": 3853, - "royale": 20630, - "royalnavy": 41545, - "royals": 13335, - "royalties": 48660, - "royalty": 18296, - "royalwedding": 27461, - "royce": 18444, - "royd": 41476, - "royo": 39357, - "roz": 28989, - "roz": 37250, - "rp": 17305, - "rp": 8174, - "rpa": 41872, - "rpg": 12445, - "rpm": 23715, - "rps": 49215, - "rr": 5311, - "rr": 9126, - "rrp": 36967, - "rrr": 18267, - "rrrr": 25561, - "rrrr": 34444, - "rs": 6978, - "rs": 1724, - "rsa": 29437, - "rsc": 48524, - "rsd": 34426, - "rsi": 39046, - "rsl": 44752, - "rsp": 16381, - "rspb": 38508, - "rspb": 36727, - "rspca": 45643, - "rss": 46466, - "rss": 22350, - "rstats": 38700, - "rsvp": 9774, - "rt": 8959, - "rt": 8991, - "rtc": 31648, - "rte": 33822, - "rte": 23322, - "rtg": 22028, - "rti": 47549, - "rtr": 43999, - "rts": 8496, - "rtw": 34673, - "ru": 681, - "ru": 13735, - "rub": 15862, - "rub": 22586, - "rubb": 19597, - "rubbed": 45239, - "rubber": 31131, - "rubber": 11331, - "rubbing": 41262, - "rubbish": 21108, - "rubble": 42230, - "ruben": 44058, - "ruben": 29722, - "rubi": 27856, - "rubin": 34128, - "rubio": 24244, - "rubs": 43422, - "ruby": 24552, - "ruby": 11493, - "ruck": 27449, - "rucker": 45402, - "rud": 35256, - "rudd": 31836, - "rude": 16548, - "rudi": 48360, - "rudol": 40927, - "rudolf": 46835, - "rudolph": 30119, - "rudy": 38226, - "rudy": 22131, - "rue": 38024, - "rue": 19276, - "rufc": 45084, - "ruff": 28177, - "ruff": 30304, - "rufus": 39322, - "rug": 4217, - "rug": 19220, - "rugby": 15091, - "rugby": 4964, - "rugbyleague": 44419, - "ruger": 48655, - "rugged": 25225, - "rugs": 29946, - "rui": 46974, - "ruin": 16256, - "ruined": 17231, - "ruining": 29952, - "ruins": 16094, - "ruiz": 27873, - "ruk": 46628, - "rukh": 43075, - "rukh": 27631, - "rule": 31643, - "rule": 6175, - "ruled": 16324, - "ruler": 26286, - "rulers": 45328, - "rules": 5272, - "ruling": 14690, - "rum": 9223, - "rum": 11233, - "rumb": 42432, - "rumble": 18900, - "rumi": 31428, - "rumor": 22254, - "rumored": 36694, - "rumors": 16160, - "rumour": 34296, - "rumours": 20716, - "rump": 29366, - "run": 1639, - "run": 1934, - "runaway": 28851, - "runchat": 25838, - "rundown": 41100, - "rune": 33882, - "rune": 49244, - "runner": 37370, - "runner": 7913, - "runners": 10571, - "runnin": 43130, - "running": 24451, - "running": 2761, - "runoff": 38564, - "runs": 5586, - "runway": 13927, - "rup": 7996, - "rup": 14980, - "rupaul": 44211, - "rupee": 43916, - "rupees": 44110, - "rupert": 25625, - "rupt": 23055, - "ruption": 35403, - "rural": 28801, - "rural": 8737, - "rus": 35811, - "rus": 5998, - "rush": 12148, - "rush": 6973, - "rushed": 28104, - "rusher": 48745, - "rushes": 47217, - "rushing": 20284, - "russ": 6285, - "russ": 20764, - "russell": 26122, - "russell": 8150, - "russi": 2600, - "russia": 4018, - "russian": 30731, - "russian": 4868, - "russians": 25413, - "russo": 30679, - "rust": 28682, - "rust": 14212, - "rustic": 19822, - "rusty": 43966, - "rusty": 22646, - "rut": 14973, - "rut": 39102, - "rutger": 49029, - "rutgers": 28934, - "ruth": 15798, - "ruth": 12029, - "ruther": 26676, - "rutherford": 31070, - "ruthless": 36063, - "rutland": 46024, - "ruto": 43702, - "ruz": 23275, - "rv": 17135, - "rv": 17951, - "rva": 24278, - "rw": 9085, - "rw": 22926, - "rwa": 47452, - "rwand": 31758, - "rwanda": 15427, - "rwby": 39698, - "rwc": 32321, - "rx": 41188, - "rx": 15945, - "ry": 1511, - "ry": 913, - "ryan": 8682, - "ryan": 4053, - "ryanair": 43526, - "ryder": 43564, - "ryder": 21805, - "rye": 24015, - "rye": 17409, - "rying": 7838, - "ryn": 37728, - "ryo": 24460, - "rys": 21654, - "ryu": 46656, - "ryu": 34604, - "ré": 29106, - "s": 82, - "s": 338, - "sa": 774, - "sa": 1344, - "saa": 13429, - "saab": 27158, - "saad": 36530, - "saas": 25761, - "saat": 33151, - "sab": 3233, - "sab": 23213, - "saba": 38344, - "sabah": 32854, - "saban": 41620, - "sabar": 47102, - "sabbath": 26008, - "sabc": 30010, - "sabcnews": 41093, - "saber": 46822, - "saber": 25624, - "sabha": 23431, - "sabi": 47073, - "sabine": 44062, - "sable": 19224, - "sabot": 30700, - "sabotage": 40496, - "sabre": 35110, - "sabres": 29620, - "sabrin": 37029, - "sabrina": 24994, - "sac": 3632, - "sac": 12905, - "sach": 30168, - "sacha": 49010, - "sachin": 47527, - "sachin": 30297, - "sachs": 31451, - "sack": 28964, - "sack": 14979, - "sacked": 27519, - "sacks": 26441, - "sacram": 13334, - "sacramento": 16065, - "sacred": 40612, - "sacred": 12477, - "sacri": 15283, - "sacrif": 12117, - "sacrific": 16919, - "sacrifice": 12556, - "sacrificed": 31116, - "sacrifices": 28858, - "sacrificing": 48146, - "sad": 2810, - "sad": 3719, - "saddened": 27720, - "saddest": 34925, - "saddle": 30469, - "saddle": 20283, - "sade": 27429, - "sadh": 40955, - "sadi": 22207, - "sadie": 30333, - "sadiq": 44107, - "sadler": 45600, - "sadly": 11603, - "sadness": 20399, - "sae": 38633, - "sae": 34883, - "saeed": 29745, - "saf": 2125, - "saf": 25760, - "safar": 23443, - "safari": 14091, - "safarilive": 34816, - "safc": 27998, - "safe": 2901, - "safe": 2996, - "safeguard": 42249, - "safeguarding": 47451, - "safely": 11513, - "safer": 40124, - "safer": 15504, - "safest": 38973, - "safety": 19050, - "safety": 3406, - "safetyfirst": 43608, - "saffron": 27529, - "sag": 6609, - "sag": 30048, - "saga": 15758, - "sagan": 37193, - "sagar": 42518, - "sage": 25800, - "sage": 7509, - "sages": 25979, - "sagin": 47097, - "sagitt": 44685, - "sagu": 44708, - "sah": 30943, - "sah": 26342, - "saha": 36062, - "sahara": 24599, - "saharan": 44255, - "sahi": 24608, - "sahib": 34150, - "sai": 16048, - "sai": 10886, - "said": 40319, - "said": 1946, - "saif": 44164, - "saig": 36328, - "saigon": 41081, - "sail": 7528, - "sail": 12156, - "sailed": 43047, - "sailing": 11003, - "sailor": 28002, - "sailor": 16076, - "sailormoon": 40673, - "sailors": 25355, - "sails": 27526, - "sain": 21226, - "sain": 40378, - "sains": 24860, - "sainsbury": 45879, - "sainsburys": 36934, - "saint": 11274, - "saint": 5599, - "saints": 8769, - "saintsfc": 31102, - "sair": 46600, - "sair": 30971, - "saire": 28087, - "saison": 33256, - "sait": 48008, - "saj": 33580, - "sak": 11511, - "sak": 35900, - "saka": 33609, - "sake": 12874, - "sakh": 43945, - "saki": 40514, - "saku": 37550, - "sakura": 24162, - "sal": 980, - "sal": 6126, - "sala": 17300, - "salaam": 46773, - "salad": 6188, - "salads": 30948, - "salah": 22516, - "salam": 19007, - "salam": 33963, - "salamat": 44696, - "salami": 46885, - "salaries": 33132, - "salary": 16312, - "salazar": 45988, - "sale": 17786, - "sale": 1690, - "saleh": 38353, - "salem": 48194, - "salem": 16884, - "sales": 13347, - "sales": 3765, - "salesforce": 22680, - "salesman": 37633, - "salford": 25629, - "sali": 15411, - "salim": 42760, - "salinas": 41990, - "saline": 46918, - "salis": 20667, - "salis": 39378, - "salisbury": 24763, - "sall": 27122, - "sall": 20883, - "salle": 23738, - "sally": 29542, - "sally": 13349, - "salman": 13754, - "salman": 16219, - "salmankhan": 15177, - "salmon": 37040, - "salmon": 9137, - "salom": 38268, - "salon": 33916, - "salon": 11105, - "saloon": 26038, - "sals": 16307, - "salsa": 16442, - "salt": 12763, - "salt": 6611, - "salted": 26313, - "saltlife": 47809, - "salts": 40559, - "saltwater": 43616, - "salty": 20678, - "salu": 31711, - "salud": 46867, - "salut": 44998, - "salute": 44908, - "salute": 9747, - "salutes": 32762, - "salv": 8299, - "salvador": 20874, - "salvage": 33131, - "salvation": 19534, - "salvatore": 38772, - "salz": 33594, - "salzburg": 43396, - "sam": 1644, - "sam": 3730, - "sama": 19272, - "samanth": 11465, - "samantha": 15466, - "samanthap": 38266, - "samanthaprabhu": 38643, - "samar": 21820, - "samaritan": 45495, - "samba": 37190, - "same": 23062, - "same": 2208, - "samheughan": 36255, - "sami": 48400, - "sami": 24322, - "sammy": 31091, - "sammy": 16758, - "samo": 30006, - "samoa": 34932, - "samp": 31225, - "sample": 9542, - "sampler": 40629, - "samples": 13387, - "sampling": 19522, - "sampson": 39983, - "sams": 44667, - "samson": 34659, - "samsun": 47875, - "samsung": 35369, - "samsung": 8115, - "samu": 7646, - "samuel": 30612, - "samuel": 12787, - "samurai": 21739, - "san": 1489, - "san": 2223, - "sana": 19434, - "sanantonio": 34714, - "sanat": 29091, - "sanatomy": 36052, - "sanc": 7398, - "sance": 15930, - "sanchez": 13971, - "sanctioned": 43032, - "sanctions": 17790, - "sanctu": 12712, - "sanctuary": 14044, - "sand": 2147, - "sand": 5094, - "sandal": 36445, - "sandal": 42185, - "sandals": 20731, - "sandalwood": 47502, - "sandeep": 46973, - "sander": 34111, - "sanders": 10429, - "sanderson": 36198, - "sandi": 44249, - "sandiego": 45997, - "sandiego": 15793, - "sandman": 45730, - "sando": 35921, - "sandoval": 44157, - "sandra": 33733, - "sandra": 13415, - "sandro": 42389, - "sands": 5936, - "sandstone": 36796, - "sandwich": 17050, - "sandwich": 8687, - "sandwiches": 19667, - "sandy": 29679, - "sandy": 10355, - "sane": 23419, - "sanford": 32330, - "sanfrancisco": 20254, - "sang": 13235, - "sang": 11684, - "sange": 12466, - "sangria": 42665, - "sani": 39137, - "sani": 34492, - "sanitary": 33842, - "sanitation": 25414, - "saniti": 43987, - "sanity": 30517, - "sanjay": 31712, - "sanjay": 25796, - "sanje": 40405, - "sanjose": 45971, - "sank": 43692, - "sano": 34053, - "sans": 16982, - "sansk": 39689, - "sanskrit": 48083, - "sant": 8356, - "sant": 23120, - "santa": 22175, - "santa": 4555, - "santac": 28876, - "santam": 45627, - "santana": 27033, - "santander": 46476, - "santi": 13856, - "santiago": 16568, - "santo": 29631, - "santo": 18400, - "santor": 28448, - "santorini": 39573, - "santos": 16582, - "sany": 47679, - "sao": 28026, - "sap": 8089, - "sap": 11591, - "sapi": 40016, - "sapp": 13427, - "sapp": 40729, - "sapphire": 22044, - "sar": 1808, - "sar": 9424, - "sara": 37196, - "sara": 10063, - "sarab": 40716, - "sarac": 35722, - "sarah": 9086, - "sarah": 5327, - "saraj": 42592, - "sarajevo": 48211, - "saras": 20373, - "sarasota": 31990, - "sarato": 24845, - "saratoga": 29496, - "sarawak": 47331, - "sarcasm": 37246, - "sarcastic": 48639, - "sardar": 41786, - "sarde": 43925, - "sardin": 27383, - "sardinia": 41025, - "sare": 13051, - "saree": 30860, - "sargent": 34864, - "sari": 42327, - "sari": 20261, - "saries": 47586, - "sarkar": 30673, - "sarko": 33658, - "sarkodie": 42848, - "sarmy": 20954, - "sart": 33006, - "sary": 15398, - "sas": 3960, - "sas": 5235, - "sash": 35656, - "sasha": 46078, - "sasha": 20894, - "sasia": 44751, - "sask": 47091, - "sask": 30416, - "saskat": 17102, - "saskatchewan": 23899, - "saskatoon": 31128, - "sass": 31351, - "sassy": 20827, - "sat": 1382, - "sat": 3279, - "sata": 41520, - "satan": 19446, - "satanic": 38224, - "satchel": 45908, - "sate": 35749, - "satell": 9031, - "satellite": 10316, - "satellites": 28483, - "sath": 29675, - "sathletics": 30154, - "sati": 7038, - "satin": 21803, - "sation": 23674, - "sations": 31232, - "satire": 29875, - "satis": 9906, - "satisf": 22941, - "satisfaction": 19925, - "satisfied": 18101, - "satisfy": 29444, - "satisfying": 23755, - "sato": 34376, - "satu": 45283, - "satur": 1634, - "saturated": 32466, - "saturday": 12537, - "saturday": 1748, - "saturdaymorning": 29053, - "saturdaymotivation": 40843, - "saturdays": 18930, - "saturn": 17312, - "saty": 39426, - "sau": 2096, - "sau": 19455, - "sauce": 5520, - "saucer": 42272, - "sauces": 40367, - "saucy": 46684, - "saudi": 24511, - "saudi": 8548, - "saudiarabia": 28680, - "sauer": 46333, - "saul": 47623, - "saul": 23252, - "sault": 40361, - "sauna": 35460, - "saunders": 23794, - "saur": 13227, - "saura": 46532, - "saurus": 22118, - "saus": 36121, - "sausage": 11855, - "sausages": 31593, - "sauté": 36290, - "sautéed": 38517, - "sauvi": 30116, - "sauvignon": 32745, - "sav": 2248, - "sav": 26533, - "sava": 40198, - "savag": 43039, - "savage": 11859, - "savannah": 18662, - "save": 5895, - "save": 2673, - "saved": 7137, - "saveour": 33390, - "saver": 20987, - "savers": 31416, - "saves": 12907, - "savethe": 18031, - "savi": 14721, - "saving": 28498, - "saving": 6979, - "savings": 10651, - "savior": 24762, - "saviour": 35800, - "savor": 48071, - "savory": 32992, - "savoury": 49071, - "savoy": 39552, - "savvy": 29278, - "saw": 12429, - "saw": 2425, - "sawa": 39613, - "sawards": 29012, - "sawyer": 27726, - "sax": 14169, - "sax": 23766, - "saxon": 31856, - "saxophon": 43760, - "saxophone": 32296, - "say": 3047, - "say": 1451, - "saya": 35170, - "sayang": 46322, - "sayers": 44116, - "sayin": 23662, - "saying": 4455, - "says": 1563, - "saz": 35577, - "sb": 5576, - "sb": 4977, - "sba": 44970, - "sback": 43840, - "sband": 27539, - "sbaseball": 46491, - "sbball": 39190, - "sbc": 31404, - "sberg": 20358, - "sbi": 41369, - "sbk": 39211, - "sboro": 18909, - "sbridge": 49228, - "sbs": 18883, - "sbu": 48075, - "sbu": 46281, - "sburg": 7390, - "sburgh": 48205, - "sbury": 14081, - "sby": 26519, - "sby": 10287, - "sc": 663, - "sc": 3219, - "sca": 11001, - "scab": 31716, - "scaf": 28981, - "scafe": 45574, - "scaffolding": 41687, - "scal": 10859, - "scala": 37997, - "scalable": 44084, - "scale": 37817, - "scale": 5879, - "scaled": 41923, - "scales": 22891, - "scaling": 29116, - "scallo": 19936, - "scallop": 39544, - "scallops": 31430, - "scalp": 38898, - "scam": 17620, - "scam": 13215, - "scamp": 28451, - "scams": 34395, - "scan": 10650, - "scan": 11261, - "scanada": 27121, - "scand": 8110, - "scandal": 35420, - "scandal": 11622, - "scandals": 45490, - "scandin": 32014, - "scandinavian": 35661, - "scanned": 43719, - "scanner": 24185, - "scanning": 24092, - "scans": 31251, - "scap": 35883, - "scape": 36005, - "scape": 12314, - "scapes": 31933, - "scar": 4171, - "scar": 18088, - "scarborough": 24254, - "scarce": 38572, - "scarcity": 45812, - "scare": 33536, - "scare": 15920, - "scarec": 38814, - "scarecrow": 46504, - "scared": 9870, - "scares": 34096, - "scarf": 13365, - "scari": 27050, - "scariest": 37213, - "scarlet": 20389, - "scarlett": 28325, - "scars": 20747, - "scarves": 29249, - "scary": 9250, - "scat": 13899, - "scattered": 22090, - "scavenger": 36778, - "scc": 19458, - "scd": 48422, - "scen": 2204, - "scenario": 20456, - "scenarios": 31346, - "scence": 33418, - "scene": 3562, - "scenery": 16025, - "scenes": 5415, - "scenic": 15394, - "scent": 36277, - "scent": 7683, - "scented": 27190, - "scenter": 23059, - "scentre": 39371, - "scents": 26336, - "scep": 24439, - "scfc": 38578, - "sch": 844, - "sch": 7542, - "scha": 42809, - "schaf": 45588, - "schaft": 41010, - "schal": 35568, - "schalke": 41029, - "schallenge": 43665, - "schan": 31328, - "schar": 15085, - "schat": 31842, - "schau": 35830, - "sche": 3038, - "sche": 7289, - "schedu": 4207, - "schedule": 5521, - "scheduled": 10986, - "schedules": 28986, - "scheduling": 32216, - "scheer": 26776, - "schel": 39881, - "schel": 38569, - "schem": 17720, - "scheme": 9024, - "schemes": 22958, - "schen": 22738, - "scher": 21925, - "scher": 21299, - "schi": 13731, - "schi": 24984, - "schicago": 46230, - "schiff": 39431, - "schild": 32148, - "schiz": 33230, - "schizoph": 40004, - "schizophre": 41163, - "schle": 32022, - "schmid": 17375, - "schmidt": 18463, - "schnau": 45745, - "schnei": 19941, - "schneider": 22972, - "schnit": 40903, - "scho": 2493, - "schoice": 23860, - "schol": 4498, - "scholar": 7192, - "scholar": 12830, - "scholarly": 41065, - "scholars": 13818, - "scholarship": 9070, - "scholarships": 17866, - "scholastic": 35743, - "schoo": 20721, - "school": 6063, - "school": 1228, - "schooled": 44722, - "schoolers": 31455, - "schooling": 28608, - "schools": 3513, - "schre": 47685, - "schri": 25453, - "schro": 32381, - "schu": 11318, - "schubert": 46939, - "schul": 14945, - "schultz": 30308, - "schulz": 39572, - "schumacher": 39208, - "schumer": 25313, - "schur": 42475, - "schwab": 47602, - "schwar": 13985, - "schwartz": 30617, - "schwarz": 27074, - "schwarzenegger": 33860, - "schwe": 25324, - "sci": 2267, - "sci": 8309, - "sciart": 31704, - "scicom": 28606, - "scicomm": 29573, - "scien": 39261, - "science": 10201, - "science": 2497, - "sciencefiction": 39170, - "sciences": 11481, - "scienti": 4338, - "scientific": 9750, - "scientist": 11083, - "scientists": 8045, - "sciento": 36193, - "scientology": 44694, - "scifi": 41862, - "scifi": 12230, - "scion": 47208, - "sciss": 25667, - "scissors": 30867, - "sciutto": 44392, - "sclerosis": 39446, - "sclub": 20017, - "sco": 1065, - "sco": 4763, - "scoe": 31164, - "scol": 13599, - "scoll": 44895, - "scollege": 39536, - "scom": 26407, - "scon": 17163, - "scon": 29272, - "scones": 36443, - "sconf": 39704, - "scoo": 14199, - "scooby": 34469, - "scoop": 13829, - "scoops": 41360, - "scope": 7979, - "scopes": 30328, - "scopic": 23869, - "scopy": 20018, - "scor": 8442, - "score": 12067, - "score": 4431, - "scoreboard": 30104, - "scorecard": 38128, - "scored": 6143, - "scoreless": 33469, - "scorer": 16572, - "scorers": 26699, - "scores": 7039, - "scoring": 9198, - "scorpi": 15445, - "scorpio": 34331, - "scorpion": 28461, - "scorpions": 45401, - "scorsese": 45975, - "scot": 2496, - "scot": 9271, - "scotch": 16687, - "scoti": 46446, - "scotia": 27859, - "scotland": 29174, - "scotland": 4203, - "scots": 17260, - "scotsman": 39612, - "scott": 7775, - "scott": 3664, - "scotti": 6227, - "scottish": 18039, - "scottish": 7442, - "scottsdale": 27817, - "scotty": 39697, - "scotty": 26836, - "scotus": 21720, - "scou": 44909, - "scoun": 16110, - "scouncil": 48787, - "scountry": 40432, - "scour": 46172, - "scout": 32213, - "scout": 10786, - "scouting": 19072, - "scouts": 14837, - "scow": 27929, - "scowboys": 31386, - "scp": 45030, - "scr": 36131, - "scra": 11187, - "scrabble": 39488, - "scram": 17289, - "scramble": 32688, - "scrambled": 39026, - "scran": 41774, - "scranton": 45274, - "scrap": 27950, - "scrap": 21695, - "scrapbook": 48733, - "scrapped": 43325, - "scraps": 40809, - "scrat": 9572, - "scratch": 13258, - "scratched": 48831, - "scratches": 46556, - "scratching": 44617, - "scre": 1795, - "scream": 31645, - "scream": 13239, - "screamed": 35427, - "screaming": 12891, - "screams": 23989, - "screen": 5351, - "screen": 3750, - "screened": 31450, - "screening": 6688, - "screenings": 27655, - "screenplay": 30058, - "screens": 12689, - "screenshot": 20637, - "screenshot": 12646, - "screenshots": 26783, - "screenshotsaturday": 21406, - "screenwriter": 37293, - "screenwriting": 35465, - "screw": 25529, - "screw": 14225, - "screwdriver": 48748, - "screwed": 30592, - "screws": 38292, - "scri": 2139, - "scrib": 34259, - "scribe": 36228, - "scribed": 38334, - "scricket": 45947, - "scrim": 21978, - "scrimmage": 25216, - "scrip": 11955, - "script": 8374, - "scripted": 40513, - "scription": 26604, - "scriptions": 39512, - "scripts": 20109, - "scripture": 27186, - "scro": 30768, - "scroll": 24160, - "scrolling": 28889, - "scrolls": 38113, - "scroo": 42263, - "scru": 7589, - "scrub": 23432, - "scrubs": 37919, - "scrum": 29047, - "scrump": 39791, - "scrumptious": 40987, - "scrutiny": 34305, - "scs": 26853, - "sct": 39284, - "scu": 8181, - "scu": 32135, - "scuba": 39053, - "scuba": 20559, - "scubadiving": 49046, - "scue": 25955, - "scul": 4948, - "scully": 36598, - "sculp": 6093, - "sculpt": 45044, - "sculpted": 41296, - "sculpting": 44389, - "sculptor": 29409, - "sculpture": 8757, - "sculptures": 20378, - "scum": 29655, - "scumb": 44525, - "scup": 21506, - "scur": 32742, - "scwx": 41966, - "scy": 27471, - "sd": 3080, - "sd": 4159, - "sda": 25548, - "sdale": 12327, - "sday": 5902, - "sday": 1376, - "sdays": 14491, - "sdc": 40992, - "sdcc": 13246, - "sden": 17241, - "sdf": 34681, - "sdg": 20177, - "sdgs": 16261, - "sdk": 40015, - "sdlive": 34561, - "sdn": 41925, - "sdsu": 41284, - "se": 567, - "se": 611, - "sea": 5970, - "sea": 2102, - "seab": 15728, - "seabir": 42558, - "seac": 35626, - "seaf": 9336, - "seafood": 12472, - "seag": 15730, - "seagu": 38076, - "seagull": 38858, - "seagulls": 42215, - "seahawks": 15341, - "seal": 21381, - "seal": 10159, - "sealed": 13358, - "sealing": 42992, - "seals": 18179, - "seam": 13710, - "seam": 44201, - "seaman": 47513, - "seamless": 29373, - "seamus": 40175, - "sean": 11406, - "sean": 6077, - "seanhannity": 43316, - "seap": 29983, - "seaport": 46418, - "sear": 1612, - "search": 23129, - "search": 1920, - "searched": 28961, - "searches": 26378, - "searching": 10626, - "seared": 29727, - "sears": 26693, - "seas": 7329, - "seas": 9556, - "seascape": 42593, - "seaside": 18867, - "season": 19288, - "season": 1367, - "seasonal": 14215, - "seasoned": 28399, - "seasoning": 43439, - "seasons": 8635, - "seat": 19670, - "seat": 4922, - "seated": 23953, - "seater": 37543, - "seating": 16240, - "seats": 6944, - "seattle": 24388, - "seattle": 6274, - "seau": 32263, - "seaw": 32658, - "seaweed": 30204, - "seaworld": 27422, - "seb": 35766, - "seb": 25171, - "sebasti": 10324, - "sebastian": 43792, - "sebastian": 13181, - "sebring": 41086, - "sec": 2875, - "sec": 5338, - "seca": 37847, - "secco": 27394, - "sece": 46297, - "seclu": 42392, - "secon": 1846, - "second": 9329, - "second": 2241, - "secondary": 13107, - "seconds": 6541, - "secre": 2460, - "secret": 20710, - "secret": 4145, - "secretari": 29515, - "secretariat": 31767, - "secretary": 6552, - "secretly": 21400, - "secrets": 9735, - "secs": 28665, - "sect": 15772, - "section": 34986, - "section": 4853, - "sectional": 21876, - "sections": 20061, - "sector": 6579, - "sectors": 22173, - "secu": 4894, - "secular": 47483, - "secular": 27560, - "secur": 2557, - "secure": 44763, - "secure": 7515, - "secured": 16848, - "secures": 31567, - "securing": 24759, - "securities": 25080, - "security": 31245, - "security": 2741, - "sed": 14034, - "sed": 1252, - "sedan": 24237, - "sedg": 46926, - "sedge": 45288, - "sedi": 29269, - "sedly": 31771, - "sedona": 46862, - "seduc": 19933, - "seductive": 43721, - "see": 1751, - "see": 862, - "seed": 14064, - "seed": 6488, - "seeded": 33688, - "seeding": 40050, - "seedlings": 47933, - "seeds": 9128, - "seeing": 3214, - "seek": 8839, - "seeker": 28011, - "seekers": 20732, - "seeking": 8592, - "seeks": 12594, - "seem": 20043, - "seem": 7523, - "seemed": 17240, - "seemingly": 25917, - "seems": 4453, - "seen": 36273, - "seen": 2041, - "seer": 32486, - "sees": 7594, - "seeyou": 41279, - "sef": 27453, - "seg": 10551, - "sega": 16122, - "segment": 15615, - "segments": 43053, - "segreg": 49117, - "segregation": 39086, - "segu": 33156, - "segun": 43087, - "seh": 27536, - "seh": 41430, - "sehun": 17705, - "sei": 13130, - "sei": 15907, - "sein": 24669, - "seine": 41378, - "seinfeld": 33706, - "seis": 25559, - "seismic": 38459, - "seiz": 22171, - "seize": 26624, - "seized": 15826, - "seizure": 36804, - "seizures": 47199, - "sek": 45515, - "sek": 25880, - "sel": 1000, - "sel": 4098, - "sela": 47006, - "selamat": 37692, - "selangor": 44402, - "selby": 43546, - "selca": 38606, - "selcaday": 35924, - "seldom": 48322, - "sele": 29137, - "selec": 3014, - "select": 8690, - "selected": 6881, - "selecting": 32696, - "selection": 6724, - "selections": 24099, - "selective": 28686, - "selects": 32902, - "selen": 19970, - "selena": 14677, - "selenagomez": 27653, - "seley": 30556, - "self": 10139, - "self": 1322, - "selfcare": 39560, - "selfi": 3007, - "selfie": 26735, - "selfie": 3666, - "selfies": 46058, - "selfies": 10050, - "selfish": 26907, - "selfless": 34236, - "sell": 10279, - "sell": 5119, - "seller": 11779, - "sellers": 16562, - "selling": 4396, - "sells": 14306, - "selma": 36652, - "sels": 42070, - "selves": 4505, - "sely": 8402, - "sem": 8645, - "sem": 17106, - "sema": 31816, - "seman": 29119, - "seman": 28378, - "semana": 41780, - "semb": 36054, - "seme": 10855, - "sement": 10714, - "sements": 31449, - "semester": 11905, - "semi": 11023, - "semi": 6684, - "semic": 26967, - "semicon": 34315, - "semiconduc": 35646, - "semiconductor": 43551, - "semifinal": 22935, - "semifinals": 21863, - "semin": 5595, - "seminar": 7269, - "seminars": 34870, - "seminary": 31655, - "seminole": 42956, - "semis": 24013, - "semit": 22628, - "semite": 23721, - "semitic": 34894, - "semitism": 25911, - "semper": 47391, - "sen": 1057, - "sen": 2249, - "sena": 21584, - "senate": 30703, - "senate": 6843, - "senator": 20871, - "senator": 8495, - "senators": 16889, - "send": 27684, - "send": 3625, - "sending": 6985, - "sends": 10817, - "sene": 25269, - "seneca": 33419, - "senegal": 28255, - "senew": 49313, - "seng": 43022, - "seng": 29971, - "senior": 19865, - "senior": 3415, - "seniors": 8138, - "senna": 36195, - "senpai": 46562, - "sens": 5218, - "sens": 22837, - "sensation": 19383, - "sensational": 23051, - "sense": 29162, - "sense": 4747, - "sensei": 36158, - "senses": 21809, - "sensi": 38802, - "sensible": 30635, - "sensing": 29236, - "sensiti": 20531, - "sensitive": 13734, - "sensitivity": 27788, - "sensor": 15330, - "sensors": 20356, - "sensory": 21831, - "sensu": 28157, - "sensual": 40860, - "sent": 6200, - "sent": 3676, - "sentence": 12737, - "sentenced": 17773, - "sentences": 25858, - "sentencing": 34394, - "senti": 19042, - "sentim": 25102, - "sentiment": 25949, - "sentimental": 40070, - "sentiments": 47450, - "sentin": 20042, - "sentinel": 23123, - "senting": 3924, - "seo": 24743, - "seo": 8622, - "seok": 34697, - "seok": 22482, - "seokjin": 45584, - "seoul": 13253, - "sep": 3212, - "sep": 10434, - "separ": 6859, - "separate": 13886, - "separated": 22163, - "separately": 41904, - "separates": 45365, - "separati": 39377, - "separating": 43480, - "separation": 22007, - "sephora": 38414, - "sepsis": 40205, - "sept": 5380, - "septe": 3672, - "september": 3707, - "septic": 34690, - "sepul": 47360, - "seq": 44379, - "sequ": 5491, - "seque": 44662, - "sequel": 15701, - "sequence": 18833, - "sequences": 47306, - "sequencing": 33484, - "sequo": 32781, - "sequoia": 42404, - "ser": 803, - "ser": 2771, - "sera": 28250, - "serbia": 19038, - "serbian": 33687, - "sere": 35770, - "seren": 7880, - "serena": 19519, - "serenawilliams": 48316, - "serendip": 45805, - "serendipity": 49386, - "serene": 28269, - "serenity": 24187, - "serge": 13477, - "serge": 35700, - "sergeant": 22049, - "sergei": 39870, - "sergey": 35390, - "sergi": 47675, - "sergio": 18359, - "seri": 2763, - "seri": 37509, - "serial": 14216, - "serie": 19752, - "seriea": 32660, - "series": 1857, - "serious": 47421, - "serious": 4770, - "seriously": 4885, - "sermon": 24884, - "sero": 48883, - "serpent": 37084, - "serpent": 35364, - "serra": 39851, - "serrano": 44236, - "sers": 13509, - "serum": 25385, - "serv": 1297, - "serv": 24571, - "servant": 20810, - "servants": 29652, - "serve": 39202, - "serve": 2838, - "served": 4740, - "server": 36458, - "server": 8398, - "serverless": 49243, - "servers": 22262, - "serves": 9915, - "servic": 27115, - "service": 21496, - "service": 2086, - "serviced": 44687, - "services": 3100, - "servicing": 41300, - "serving": 5722, - "sery": 14279, - "ses": 23708, - "ses": 1386, - "sesame": 21706, - "sese": 37128, - "sesh": 24274, - "session": 2550, - "sessions": 6327, - "set": 7965, - "set": 1167, - "setback": 43605, - "seth": 20005, - "seth": 11870, - "sethu": 38933, - "setlist": 33141, - "seton": 43799, - "sets": 4650, - "sett": 4984, - "sett": 17567, - "sette": 14613, - "setter": 23153, - "settes": 44145, - "setti": 45170, - "setting": 5264, - "settings": 18628, - "settle": 15075, - "settled": 18310, - "settlement": 16494, - "settlements": 36605, - "settlers": 35671, - "settles": 41498, - "settling": 22036, - "setup": 11092, - "seu": 31539, - "seul": 48975, - "seum": 18838, - "seun": 24209, - "seung": 32393, - "seung": 33711, - "seungri": 41627, - "seuss": 34441, - "sev": 26585, - "sev": 37600, - "seva": 42604, - "seve": 21458, - "seve": 22468, - "sevel": 17439, - "seven": 7874, - "seven": 5757, - "sevens": 29911, - "sevent": 43048, - "seventeen": 19337, - "seventh": 17568, - "seventy": 47170, - "sever": 3250, - "sever": 45557, - "several": 5560, - "severance": 26194, - "severe": 6215, - "severely": 24417, - "severn": 34626, - "severy": 34207, - "sevilla": 24947, - "seville": 34988, - "sew": 28640, - "sewage": 32777, - "sewer": 28294, - "sewing": 15974, - "sewn": 42118, - "sex": 3548, - "sex": 5937, - "sexi": 20562, - "sexiest": 25426, - "sexism": 32059, - "sexist": 33047, - "sexu": 14741, - "sexual": 6749, - "sexuality": 21244, - "sexually": 23032, - "sexy": 21019, - "sexy": 38127, - "sey": 6317, - "sey": 2258, - "seychel": 36809, - "seychelles": 38519, - "seye": 35604, - "seym": 22657, - "seymour": 25850, - "seys": 15081, - "sez": 42377, - "señ": 43368, - "sf": 4435, - "sf": 4915, - "sfa": 32675, - "sfam": 37649, - "sfb": 27930, - "sfc": 14129, - "sfest": 49024, - "sff": 42056, - "sfgiants": 20923, - "sfield": 11801, - "sfo": 39182, - "sfootball": 45259, - "sfor": 9115, - "sford": 28917, - "sforsale": 28888, - "sfw": 18073, - "sfx": 37995, - "sg": 9599, - "sg": 7611, - "sga": 33049, - "sgate": 27558, - "sgh": 47590, - "sgo": 5393, - "sgo": 21044, - "sgt": 13748, - "sh": 552, - "sh": 849, - "sha": 1514, - "sha": 3337, - "shaa": 44221, - "shab": 8323, - "shabbat": 38042, - "shabby": 28838, - "shack": 23866, - "shack": 18785, - "shad": 3182, - "shad": 23874, - "shade": 34554, - "shade": 10097, - "shaded": 43506, - "shades": 46608, - "shades": 9270, - "shadesof": 45180, - "shading": 37348, - "shado": 9325, - "shadow": 15243, - "shadow": 7068, - "shadowhun": 19931, - "shadowhunters": 24834, - "shadowing": 46092, - "shadows": 12971, - "shady": 22158, - "shaf": 12032, - "shaft": 21545, - "shag": 22439, - "shaggy": 42662, - "shah": 13203, - "shah": 8439, - "shahe": 23643, - "shaheed": 30060, - "shaheer": 43969, - "shahi": 46972, - "shahid": 25696, - "shahid": 27138, - "shahidkapoor": 29892, - "shahzad": 45915, - "shai": 47941, - "shaikh": 45712, - "shail": 37603, - "shair": 43135, - "shak": 8385, - "shake": 8206, - "shake": 8251, - "shaken": 38237, - "shaker": 26210, - "shakers": 38411, - "shakes": 19668, - "shakespe": 9890, - "shakespeare": 22499, - "shakespeare": 12488, - "shakespearesunday": 32320, - "shaking": 19101, - "shakira": 40795, - "shakti": 48593, - "shakti": 32458, - "shakur": 48915, - "shal": 15056, - "shal": 28175, - "shale": 32864, - "shall": 4742, - "shallow": 23730, - "shalom": 31339, - "sham": 6453, - "sham": 9005, - "shaman": 48727, - "shambles": 40799, - "shame": 14776, - "shame": 7593, - "shameful": 28283, - "shameless": 25380, - "shaming": 40553, - "shampoo": 23944, - "shamrock": 34199, - "shan": 5171, - "shan": 8834, - "shana": 44835, - "shand": 29101, - "shane": 26863, - "shane": 11572, - "shang": 11141, - "shanghai": 12742, - "shani": 46665, - "shank": 24685, - "shankar": 24108, - "shann": 9932, - "shannon": 22842, - "shannon": 13581, - "shant": 36610, - "shap": 5581, - "shape": 26925, - "shape": 6448, - "shaped": 10127, - "shapes": 15377, - "shaping": 18632, - "shapiro": 32110, - "shaq": 46402, - "shaq": 26843, - "shar": 1669, - "shar": 36542, - "shara": 48849, - "sharapo": 36489, - "sharapova": 36671, - "shard": 42207, - "share": 7585, - "share": 1978, - "shared": 5368, - "shareholder": 38241, - "shareholders": 34778, - "sharepoint": 39213, - "shares": 4974, - "sharethe": 49277, - "shareyour": 45890, - "shari": 27738, - "shari": 47390, - "sharia": 37244, - "sharif": 15501, - "sharing": 3567, - "sharjah": 33420, - "shark": 15836, - "shark": 7980, - "sharks": 10047, - "sharkweek": 39571, - "sharma": 10105, - "sharon": 28722, - "sharon": 14138, - "sharp": 17126, - "sharp": 8157, - "sharpe": 34374, - "sharpen": 41465, - "sharpie": 46858, - "sharply": 37185, - "shasta": 46727, - "shat": 12169, - "shat": 44388, - "shatter": 45008, - "shattered": 26820, - "shau": 13750, - "shaun": 23446, - "shaun": 16669, - "shav": 11410, - "shave": 17735, - "shaved": 25571, - "shaving": 24261, - "shaw": 6122, - "shaw": 6805, - "shawa": 46413, - "shawl": 35132, - "shawn": 16677, - "shawn": 10970, - "shawnee": 48060, - "shawnmendes": 27277, - "shawty": 38026, - "shay": 10778, - "shay": 18361, - "shaykh": 47223, - "shaz": 18618, - "shazam": 29063, - "shc": 43419, - "shd": 37729, - "she": 1729, - "she": 1043, - "shea": 20407, - "shead": 44287, - "shead": 20434, - "shealth": 41743, - "shealth": 22197, - "shear": 27974, - "shear": 32108, - "shearer": 40505, - "sheath": 45637, - "shed": 16586, - "shed": 1492, - "shedding": 33608, - "sheds": 25921, - "shee": 23450, - "shee": 34321, - "sheed": 26105, - "sheehan": 41809, - "sheen": 25025, - "sheep": 23604, - "sheep": 9629, - "sheer": 17577, - "sheeran": 18561, - "sheet": 7298, - "sheets": 12744, - "shef": 8237, - "sheff": 38844, - "sheff": 43821, - "sheffiel": 26940, - "sheffield": 41763, - "sheffield": 10420, - "sheffieldissuper": 33628, - "sheh": 31667, - "sheikh": 15031, - "sheil": 42765, - "sheila": 25734, - "shek": 33285, - "shel": 3159, - "shelby": 36906, - "shelby": 16885, - "sheldon": 25079, - "shelf": 10955, - "shell": 23374, - "shell": 6648, - "shelley": 22497, - "shelling": 43166, - "shells": 19265, - "shelly": 37461, - "shelter": 8599, - "sheltered": 48070, - "shelters": 24312, - "shelton": 24471, - "shelves": 16225, - "shem": 40299, - "shen": 10154, - "shen": 31098, - "shenan": 20965, - "shenando": 44666, - "shenanigans": 26590, - "shenko": 39751, - "shenmue": 48279, - "shenzhen": 38970, - "shep": 33757, - "shep": 44857, - "shepard": 26810, - "shepher": 11008, - "shepherd": 13242, - "shepherds": 42792, - "sheppard": 37304, - "sher": 3570, - "sher": 4510, - "sheraton": 39400, - "shere": 21507, - "sheri": 9235, - "sheridan": 27085, - "sheriff": 10309, - "sherlock": 17294, - "sherman": 17822, - "sherry": 44348, - "sherry": 24689, - "shers": 14141, - "sherwood": 24527, - "sheryl": 39773, - "shes": 45514, - "shes": 2502, - "shet": 15850, - "shetland": 29595, - "shetty": 25533, - "shev": 45182, - "sheva": 45132, - "shh": 35025, - "shhh": 36932, - "shi": 823, - "shi": 3533, - "shia": 23791, - "shibu": 36177, - "shibuya": 41623, - "shie": 26638, - "shiel": 33413, - "shield": 8670, - "shields": 19085, - "shies": 35312, - "shif": 35317, - "shift": 43767, - "shift": 6905, - "shifted": 34429, - "shifter": 48944, - "shifting": 21992, - "shifts": 23957, - "shik": 36980, - "shil": 14370, - "shill": 32121, - "shill": 30090, - "shilpa": 47062, - "shilpa": 40690, - "shim": 11986, - "shim": 32780, - "shima": 14382, - "shimano": 48904, - "shimi": 40517, - "shimmer": 38792, - "shin": 5664, - "shin": 11784, - "shinde": 41516, - "shine": 17582, - "shine": 3780, - "shinee": 19660, - "shines": 16015, - "shing": 38641, - "shing": 1743, - "shining": 10485, - "shino": 43074, - "shiny": 12190, - "ship": 7645, - "ship": 1158, - "shipment": 28553, - "shipp": 34709, - "shipped": 15279, - "shippers": 44789, - "shipping": 5721, - "ships": 3262, - "shipwreck": 48878, - "shipy": 26828, - "shipyard": 31273, - "shir": 1956, - "shiraz": 35618, - "shire": 11975, - "shire": 2968, - "shirehour": 32456, - "shirley": 18189, - "shiro": 26048, - "shirt": 27576, - "shirt": 2523, - "shirtless": 28959, - "shirts": 5803, - "shistory": 34979, - "shiv": 18042, - "shiv": 37121, - "shiva": 33881, - "shiva": 21174, - "shka": 38944, - "shld": 49359, - "shma": 48074, - "shment": 8802, - "shments": 18822, - "sho": 719, - "sho": 13756, - "shock": 19617, - "shock": 8736, - "shocked": 15787, - "shocker": 37971, - "shockey": 22258, - "shocking": 13394, - "shocks": 31886, - "shoe": 16308, - "shoe": 7342, - "shoes": 49391, - "shoes": 4079, - "shol": 21472, - "sholm": 44139, - "shome": 42701, - "shon": 19526, - "shon": 37621, - "shone": 47173, - "shoo": 1975, - "shook": 20730, - "shoops": 29956, - "shoot": 12531, - "shoot": 3704, - "shooter": 13645, - "shooters": 31902, - "shooting": 3992, - "shootings": 26753, - "shootout": 20666, - "shoots": 14144, - "shop": 5738, - "shop": 1557, - "shopify": 47949, - "shoplocal": 21775, - "shopp": 38486, - "shoppe": 38236, - "shopped": 28088, - "shopper": 24346, - "shoppers": 22316, - "shopping": 42101, - "shopping": 4266, - "shops": 6467, - "shopsmall": 35942, - "shor": 3209, - "shore": 14717, - "shore": 5928, - "shored": 33140, - "shoreditch": 35042, - "shoreline": 34807, - "shores": 18102, - "short": 6803, - "short": 3005, - "shortage": 19910, - "shortages": 38730, - "shortcuts": 45793, - "shorten": 41711, - "shorter": 20350, - "shortest": 33717, - "shortfilm": 37204, - "shorth": 37397, - "shortlist": 28163, - "shortlisted": 20631, - "shortly": 11967, - "shorts": 9680, - "shorty": 33502, - "shot": 9805, - "shot": 2000, - "shotel": 42365, - "shotgun": 21643, - "shots": 5342, - "shou": 3890, - "shoul": 29847, - "should": 14947, - "should": 1535, - "shoulder": 8476, - "shoulders": 18738, - "shouldn": 9416, - "shour": 20025, - "shouse": 28671, - "shout": 7335, - "shout": 5214, - "shouted": 44397, - "shouting": 26464, - "shoutout": 8274, - "shouts": 26709, - "shovel": 31778, - "show": 2133, - "show": 1080, - "showbiz": 34156, - "showcas": 14290, - "showcase": 7265, - "showcased": 35786, - "showcases": 26266, - "showcasing": 17036, - "showdown": 15576, - "showed": 7150, - "shower": 7777, - "showers": 9893, - "showing": 3649, - "shown": 8506, - "showroom": 16821, - "shows": 2665, - "showtime": 40576, - "showtime": 15442, - "showyour": 46733, - "shp": 38341, - "shq": 21145, - "shr": 10118, - "shra": 21360, - "shradd": 28172, - "shraddha": 35208, - "shraddhakapoor": 40385, - "shre": 12101, - "shred": 19756, - "shred": 33017, - "shredded": 31772, - "shredding": 45534, - "shree": 37410, - "shrek": 35009, - "shrews": 26411, - "shrewsbury": 30921, - "shri": 8838, - "shri": 11424, - "shrimp": 12727, - "shrin": 24865, - "shrine": 16156, - "shrink": 34957, - "shrinking": 41243, - "shrm": 44163, - "shro": 15259, - "shroff": 32081, - "shrop": 22630, - "shropshire": 26344, - "shru": 14911, - "shrub": 41464, - "shrubs": 47975, - "shrun": 46767, - "shs": 16184, - "sht": 44210, - "shti": 38927, - "shu": 2872, - "shu": 17651, - "shua": 33771, - "shub": 40552, - "shud": 45782, - "shuff": 42641, - "shuffle": 21681, - "shui": 45473, - "shuk": 29927, - "shukla": 46829, - "shul": 30721, - "shum": 37383, - "shun": 24479, - "shun": 39594, - "shur": 41032, - "shut": 8702, - "shut": 8282, - "shutdown": 16051, - "shutout": 24385, - "shuts": 28313, - "shutt": 31866, - "shutter": 36235, - "shutter": 33902, - "shutters": 46894, - "shutting": 31383, - "shuttle": 15842, - "shwar": 41640, - "shy": 22678, - "shy": 9682, - "si": 564, - "si": 2990, - "sia": 2357, - "siam": 29686, - "siam": 48248, - "siamese": 43161, - "sian": 28510, - "sian": 6221, - "sians": 26583, - "sias": 28645, - "siber": 22206, - "siberia": 39969, - "siberian": 34058, - "sibl": 14338, - "sible": 14507, - "sibling": 43060, - "sibling": 23779, - "siblings": 17156, - "sic": 8278, - "sic": 1118, - "sica": 34125, - "sical": 33875, - "sichuan": 48950, - "sicilian": 45292, - "sicily": 23179, - "sick": 11143, - "sick": 5359, - "sickest": 47972, - "sickle": 41459, - "sickness": 28898, - "sics": 26297, - "sid": 10117, - "sid": 15119, - "sidd": 19842, - "siddi": 35227, - "side": 5869, - "side": 1145, - "sided": 21061, - "sidekick": 44683, - "sidel": 43557, - "sideline": 32056, - "sidelines": 31046, - "sider": 30581, - "siders": 41249, - "sides": 7578, - "sideshow": 46789, - "sidewalk": 23278, - "sidewalks": 43583, - "sideways": 35593, - "siding": 38758, - "sidney": 22598, - "sie": 8533, - "sie": 5685, - "sieg": 49203, - "siege": 18460, - "siegel": 48559, - "siem": 18434, - "siemens": 30147, - "siempre": 44030, - "siena": 33336, - "sienna": 40373, - "sier": 10028, - "sier": 7444, - "sierra": 13552, - "siers": 35923, - "sies": 16367, - "siest": 18323, - "sif": 29300, - "sig": 872, - "sig": 19145, - "sigh": 36303, - "sigh": 15505, - "sighs": 44579, - "sight": 16897, - "sight": 6329, - "sighted": 33034, - "sighting": 17507, - "sightings": 30004, - "sights": 17364, - "sightseeing": 34210, - "sigma": 45075, - "sigma": 15697, - "sign": 5538, - "sign": 2292, - "signage": 21156, - "signal": 10781, - "signaling": 38492, - "signalling": 48426, - "signals": 17150, - "signation": 24347, - "signature": 9189, - "signatures": 21865, - "signed": 3163, - "signee": 39778, - "signi": 34023, - "signific": 6374, - "significance": 23769, - "significant": 8735, - "significantly": 16187, - "signing": 4401, - "signingday": 40282, - "signings": 27731, - "signs": 4659, - "signup": 40791, - "sigue": 49401, - "sii": 36672, - "sik": 19974, - "sik": 22413, - "sika": 31144, - "sikh": 21829, - "sikhs": 45426, - "sil": 1556, - "sil": 8315, - "sila": 41754, - "sile": 37620, - "silen": 39048, - "silence": 8462, - "silenced": 45415, - "silent": 30352, - "silent": 8487, - "silently": 42640, - "silhou": 20589, - "silhouette": 26149, - "silic": 23830, - "silicon": 32412, - "silicon": 17888, - "silicone": 28221, - "silk": 25891, - "silk": 9743, - "silky": 29554, - "sill": 42468, - "sill": 48024, - "silly": 11883, - "silon": 31841, - "sils": 39708, - "silva": 16489, - "silve": 37697, - "silver": 7525, - "silver": 3467, - "silverado": 46160, - "silverstone": 29666, - "silvia": 37289, - "sim": 5026, - "sim": 10740, - "sima": 35871, - "simba": 39492, - "simcoe": 47148, - "sime": 28329, - "simi": 38073, - "simil": 7202, - "similar": 8547, - "similarities": 34716, - "simm": 13001, - "simmons": 14699, - "simo": 37171, - "simon": 8796, - "simon": 6668, - "simona": 46277, - "simone": 19062, - "simons": 33097, - "simp": 2542, - "simple": 19018, - "simple": 4129, - "simpler": 35489, - "simplest": 39588, - "simpli": 16868, - "simplicity": 21262, - "simplified": 36647, - "simplify": 35479, - "simply": 25637, - "simply": 6151, - "simpson": 41805, - "simpson": 11750, - "simpsons": 21092, - "sims": 14021, - "simul": 9845, - "simulated": 46395, - "simulation": 18610, - "simulator": 20821, - "simultaneous": 48816, - "simultaneously": 28575, - "sin": 1303, - "sin": 3421, - "sina": 19541, - "sinai": 33226, - "sinatra": 27262, - "sinc": 30464, - "since": 1855, - "sincere": 24513, - "sincere": 24886, - "sincerely": 25673, - "sinclair": 23100, - "sind": 39598, - "sind": 30877, - "sindh": 20754, - "sindia": 48038, - "sine": 22741, - "sine": 33793, - "sinfo": 47178, - "sing": 1387, - "sing": 1197, - "singapo": 27861, - "singapore": 28879, - "singapore": 6754, - "singer": 33880, - "singer": 5108, - "singers": 15613, - "singersongwriter": 44585, - "singh": 19445, - "singh": 5715, - "singing": 5864, - "single": 19524, - "single": 2688, - "singles": 12025, - "singleton": 46247, - "singly": 16619, - "sings": 13635, - "singul": 34003, - "singular": 44009, - "singularity": 48410, - "sinha": 29416, - "sini": 41781, - "sini": 26319, - "sinister": 31313, - "sink": 37232, - "sink": 14551, - "sinking": 27949, - "sinks": 32710, - "sinn": 36315, - "sinner": 45380, - "sinners": 43436, - "sino": 29759, - "sins": 9345, - "sinthe": 30737, - "sinu": 37351, - "sinus": 47535, - "sio": 10807, - "siob": 40954, - "siology": 46315, - "sion": 5676, - "sion": 1015, - "sional": 14533, - "sionally": 30754, - "sions": 4060, - "sioux": 44695, - "sioux": 24954, - "sip": 16096, - "sipping": 28527, - "sir": 10708, - "sir": 3846, - "sire": 28450, - "siren": 33026, - "sirens": 35907, - "siri": 13986, - "siri": 18394, - "sirius": 23574, - "sirius": 34999, - "siriusxm": 29833, - "sirloin": 46828, - "sis": 18132, - "sis": 2580, - "sisd": 27132, - "sisi": 37892, - "siss": 42929, - "sissy": 27564, - "sist": 20520, - "sista": 37448, - "sister": 17417, - "sister": 3677, - "sisterhood": 37313, - "sisters": 6404, - "sit": 7387, - "sit": 4037, - "sitcom": 30426, - "site": 26792, - "site": 1988, - "sites": 7236, - "sith": 41499, - "sito": 42613, - "sits": 12726, - "sitt": 42988, - "sitter": 40777, - "sittin": 40887, - "sitting": 4919, - "situ": 5562, - "situ": 42536, - "situated": 22030, - "situation": 7144, - "situations": 19096, - "sity": 38177, - "sity": 5477, - "siu": 40174, - "sium": 8090, - "sius": 27595, - "siva": 20991, - "sivan": 36931, - "sive": 23572, - "sive": 1875, - "sively": 10343, - "siveness": 39667, - "sives": 23896, - "sivity": 42738, - "siwon": 29055, - "six": 5968, - "six": 4093, - "sixers": 25941, - "sixteen": 28677, - "sixth": 12909, - "sixties": 44948, - "sixty": 32588, - "siya": 44440, - "size": 38377, - "size": 3235, - "sized": 9832, - "sizes": 10253, - "sizing": 28330, - "sizz": 23778, - "sizzle": 47890, - "sizzling": 35799, - "sj": 7536, - "sj": 16010, - "sjo": 42012, - "sk": 909, - "sk": 2058, - "ska": 7495, - "skag": 31948, - "skan": 46772, - "skar": 27587, - "skar": 26835, - "skate": 13740, - "skate": 12745, - "skateboard": 31777, - "skateboarding": 31352, - "skater": 30337, - "skaters": 39824, - "skates": 31479, - "skc": 44551, - "ske": 6261, - "ske": 25516, - "skel": 36564, - "skelet": 27075, - "skeletal": 37369, - "skeleton": 20062, - "skeletons": 48874, - "skell": 40801, - "skep": 27772, - "skeptical": 44934, - "sker": 37640, - "sker": 33600, - "sket": 3744, - "sketch": 11767, - "sketch": 5269, - "sketchbook": 18899, - "sketched": 38581, - "sketches": 17622, - "sketching": 23228, - "sketchy": 41582, - "skey": 37453, - "ski": 3327, - "ski": 3428, - "skid": 36574, - "skid": 32099, - "skier": 42585, - "skies": 7244, - "skiing": 14400, - "skil": 24543, - "skill": 15598, - "skill": 10604, - "skilled": 17535, - "skillet": 40568, - "skills": 4113, - "skim": 33191, - "skin": 5821, - "skin": 3575, - "skincare": 12648, - "skine": 37300, - "sking": 46215, - "skinned": 42199, - "skinner": 30261, - "skinny": 42729, - "skinny": 15457, - "skins": 11594, - "skip": 39793, - "skip": 14296, - "skipped": 40639, - "skipper": 22226, - "skipping": 34867, - "skir": 8919, - "skirt": 12386, - "skirts": 24840, - "skis": 32843, - "skit": 43573, - "skitchen": 42820, - "skittles": 43213, - "sko": 15141, - "sko": 23493, - "skoda": 38668, - "skool": 26743, - "skril": 43149, - "skrillex": 43651, - "sks": 48136, - "sku": 10836, - "skul": 17561, - "skull": 34068, - "skull": 12092, - "skulls": 31804, - "skunk": 42194, - "sky": 3075, - "sky": 2390, - "skybet": 45540, - "skye": 21475, - "skyl": 43554, - "skylar": 45411, - "skyline": 14606, - "skymap": 41734, - "skynews": 40977, - "skype": 17069, - "skyrim": 33693, - "skysports": 39845, - "skysports": 46725, - "skywalker": 32936, - "sl": 2621, - "sl": 7489, - "sla": 2725, - "sla": 26707, - "slab": 24241, - "slabs": 42818, - "slack": 37108, - "slack": 30142, - "slade": 33546, - "slain": 35972, - "slalom": 43540, - "slam": 14891, - "slam": 10131, - "slammed": 29772, - "slams": 18907, - "slan": 44663, - "slan": 47193, - "sland": 11294, - "slang": 33655, - "slap": 48830, - "slap": 21751, - "slapped": 38861, - "slaps": 46796, - "slash": 19749, - "slat": 38966, - "slate": 17919, - "slated": 36094, - "slater": 25968, - "slaugh": 26782, - "slaughter": 19815, - "slaughtered": 46615, - "slav": 47292, - "slava": 41797, - "slave": 14029, - "slavery": 15754, - "slaves": 23833, - "slaw": 28178, - "slay": 48319, - "slay": 19380, - "slayed": 44870, - "slayer": 21605, - "slaying": 27812, - "slays": 45648, - "slc": 21972, - "sle": 1709, - "sleague": 23336, - "sled": 28438, - "sledge": 48750, - "slee": 17642, - "slee": 38977, - "sleek": 23187, - "sleep": 4656, - "sleep": 3840, - "sleeper": 28709, - "sleeping": 6982, - "sleepless": 39779, - "sleepover": 39415, - "sleeps": 16610, - "sleepy": 32572, - "sleepy": 14497, - "sleet": 36948, - "sleeve": 35270, - "sleeve": 10536, - "sleeveless": 38049, - "sleeves": 19691, - "sleg": 47650, - "sleigh": 30865, - "slender": 40331, - "slept": 20388, - "sler": 14066, - "sley": 17198, - "sley": 6496, - "sli": 1811, - "sli": 44824, - "slic": 19692, - "slice": 13431, - "sliced": 28121, - "slices": 28424, - "slick": 18341, - "slide": 27828, - "slide": 8837, - "slider": 37861, - "sliders": 40700, - "slides": 15939, - "slideshow": 42817, - "sliding": 21468, - "slife": 15448, - "sliga": 21080, - "slight": 14297, - "slightly": 8456, - "sligo": 30424, - "slike": 38744, - "slim": 35226, - "slim": 12364, - "slime": 29107, - "sling": 28021, - "sling": 32607, - "slinger": 47269, - "slions": 43363, - "slip": 39785, - "slip": 12105, - "slipknot": 41816, - "slipped": 30344, - "slipper": 39644, - "slippers": 26509, - "slippery": 30814, - "slipping": 36301, - "slips": 30632, - "slist": 33749, - "slit": 47011, - "slive": 31652, - "slo": 4303, - "slo": 36083, - "sloan": 29110, - "sloane": 41553, - "slogan": 23398, - "slogans": 42795, - "slope": 22769, - "slopes": 24066, - "sloppy": 36154, - "slot": 14500, - "sloth": 30007, - "slots": 19238, - "slou": 48493, - "slovak": 23315, - "slovakia": 25994, - "sloven": 17018, - "slovenia": 21037, - "slow": 6674, - "slow": 5444, - "slowdown": 38421, - "slowed": 43793, - "slower": 29181, - "slowing": 29839, - "slowly": 9568, - "slows": 46855, - "slp": 45599, - "slr": 21325, - "sls": 33651, - "slt": 39283, - "sltd": 36388, - "slu": 7224, - "slu": 47456, - "slug": 34190, - "slugger": 48671, - "slum": 46754, - "slumber": 44295, - "slump": 35588, - "slur": 30476, - "slush": 39815, - "slv": 45526, - "sly": 28145, - "sly": 21062, - "sm": 978, - "sm": 2764, - "sma": 4357, - "sma": 11854, - "smack": 21280, - "smack": 30026, - "smackdown": 26138, - "smafia": 47686, - "smag": 32212, - "smal": 48379, - "small": 5244, - "small": 2442, - "smallbiz": 41724, - "smallbiz": 18987, - "smallbusiness": 21316, - "smalle": 18490, - "smaller": 12431, - "smallest": 18686, - "smalls": 41696, - "sman": 9612, - "smar": 3201, - "smart": 5383, - "smart": 4115, - "smartcities": 34822, - "smartcity": 33973, - "smarter": 18990, - "smartest": 37092, - "smarthome": 47726, - "smartphone": 11290, - "smartphones": 22212, - "smartwatch": 35798, - "smash": 17258, - "smash": 10332, - "smashbros": 44897, - "smashed": 18410, - "smashes": 45657, - "smashing": 19632, - "smatter": 16537, - "smb": 30446, - "smc": 31375, - "smc": 28312, - "smd": 34582, - "sme": 11758, - "sme": 15650, - "smear": 37546, - "smel": 28476, - "smell": 9688, - "smelling": 32493, - "smells": 14668, - "smelly": 46145, - "smen": 15961, - "smer": 48526, - "smere": 39629, - "smes": 26141, - "smg": 46876, - "smh": 9623, - "smi": 5655, - "smi": 40049, - "smil": 33937, - "smile": 27641, - "smile": 3490, - "smiled": 34362, - "smiles": 8726, - "smiley": 22925, - "smiling": 9200, - "smir": 24667, - "smith": 10527, - "smith": 2915, - "smiths": 27872, - "smithson": 25372, - "smithsonian": 31209, - "smm": 19510, - "smma": 42370, - "smo": 2513, - "smo": 13437, - "smobile": 38923, - "smog": 44425, - "smoke": 20381, - "smoke": 6664, - "smoked": 11161, - "smoker": 32348, - "smokers": 29571, - "smokes": 40336, - "smokey": 23670, - "smokin": 32825, - "smoking": 9038, - "smoky": 25549, - "smol": 29939, - "smol": 40403, - "smoo": 5430, - "smooth": 10958, - "smooth": 8990, - "smoother": 44271, - "smoothie": 16668, - "smoothies": 34458, - "smoothly": 32380, - "smore": 48323, - "smp": 32260, - "smriti": 49227, - "sms": 10409, - "smt": 26672, - "smtown": 26072, - "smu": 10878, - "smu": 30458, - "smug": 41021, - "smugg": 28130, - "smuggling": 34146, - "smur": 24708, - "smusic": 19191, - "smw": 44929, - "smx": 46699, - "smy": 14381, - "smyth": 44822, - "sn": 1672, - "sn": 5844, - "sna": 4032, - "snack": 47548, - "snack": 10039, - "snacking": 46474, - "snacks": 12349, - "snag": 34789, - "snag": 28043, - "snagged": 48534, - "snail": 23132, - "snails": 34928, - "snake": 30133, - "snake": 8798, - "snakes": 19605, - "snap": 4578, - "snap": 7404, - "snapback": 31234, - "snapchat": 7799, - "snapmatic": 45907, - "snapp": 10185, - "snapped": 15543, - "snapper": 31677, - "snapping": 31581, - "snaps": 16890, - "snapshot": 18243, - "snar": 30810, - "snare": 40651, - "snat": 18457, - "snatch": 35302, - "snatched": 44821, - "snation": 14362, - "snazzy": 48963, - "snc": 39918, - "sne": 3791, - "sne": 46503, - "sneak": 27871, - "sneak": 6917, - "sneaker": 31698, - "sneaker": 24781, - "sneakers": 17397, - "sneaking": 34633, - "sneakpeek": 47831, - "sneaks": 40926, - "sneaky": 21293, - "snee": 42095, - "snell": 46410, - "sner": 31424, - "snes": 26667, - "snews": 18623, - "snf": 47651, - "sng": 41549, - "snhl": 43093, - "sni": 7186, - "sni": 35570, - "snickers": 49127, - "sniff": 37841, - "snip": 42954, - "sniper": 22157, - "snippet": 37531, - "snippets": 44001, - "snl": 16011, - "sno": 8567, - "sno": 17802, - "snoo": 11352, - "snooker": 25657, - "snoop": 44503, - "snoop": 27754, - "snoopdogg": 48388, - "snoopy": 41967, - "snooze": 40718, - "snor": 16590, - "snoring": 44560, - "snorkel": 44285, - "snorkeling": 48103, - "snow": 3880, - "snow": 2583, - "snowball": 39254, - "snowboard": 33403, - "snowboarding": 32397, - "snowday": 37982, - "snowden": 32154, - "snowdon": 47107, - "snowdonia": 36088, - "snowed": 45073, - "snowfall": 21714, - "snowflake": 33447, - "snowflakes": 38618, - "snowing": 21443, - "snowman": 22668, - "snowstorm": 38777, - "snowy": 14191, - "snp": 15301, - "sns": 36343, - "snsd": 27961, - "snt": 34834, - "snu": 9694, - "snuck": 36522, - "snug": 45169, - "snuggle": 31327, - "snuggles": 48165, - "sny": 17526, - "snyder": 22106, - "snz": 37678, - "so": 759, - "so": 706, - "soa": 39584, - "soak": 24839, - "soaked": 26592, - "soaking": 26750, - "soap": 26086, - "soap": 11088, - "soaps": 40958, - "soar": 48997, - "soar": 22241, - "soaring": 27968, - "soars": 41348, - "sob": 24900, - "sob": 35507, - "sobbing": 36691, - "sober": 30969, - "sober": 24487, - "sobre": 42768, - "sobri": 49308, - "sobs": 43636, - "soc": 3253, - "soc": 7741, - "soca": 49239, - "socal": 46470, - "socal": 20450, - "soccer": 16268, - "soccer": 4233, - "socceroos": 41997, - "socent": 30831, - "sochi": 21014, - "soci": 1720, - "social": 4803, - "social": 2346, - "socialism": 23372, - "socialist": 18450, - "socialists": 43839, - "socially": 24555, - "socialmedi": 23813, - "socialmedia": 9600, - "socialmediamarketing": 31790, - "societal": 40058, - "societies": 25855, - "society": 3757, - "socio": 44319, - "socio": 42790, - "sociology": 32373, - "sock": 29801, - "sock": 18277, - "socket": 28657, - "socks": 8774, - "socorro": 46409, - "socute": 45086, - "sod": 31435, - "soda": 13533, - "sodium": 29070, - "soe": 44136, - "soe": 25498, - "soever": 34024, - "sof": 1571, - "sof": 41187, - "sofa": 15723, - "soff": 35290, - "soff": 30684, - "sofficial": 20563, - "sofi": 41537, - "sofia": 18914, - "sofinstagram": 17301, - "soft": 12778, - "soft": 3773, - "softball": 8369, - "softer": 44462, - "softhe": 23127, - "softly": 34958, - "software": 35941, - "software": 5847, - "softwitter": 11311, - "sog": 44775, - "soggy": 41168, - "sohn": 49267, - "soho": 47749, - "soho": 17592, - "soi": 40495, - "soil": 33417, - "soil": 9216, - "soils": 34891, - "soir": 43427, - "sok": 43456, - "sol": 1175, - "sol": 9941, - "sola": 40086, - "solace": 42567, - "solar": 16990, - "solar": 5199, - "solareclipse": 44727, - "sold": 33116, - "sold": 3939, - "soldi": 5098, - "soldier": 9355, - "soldiers": 7547, - "sole": 10519, - "sole": 8576, - "soleil": 33148, - "solely": 27913, - "solent": 47783, - "soles": 22682, - "soli": 3911, - "solic": 19369, - "solicitor": 45647, - "solicitors": 46000, - "solid": 30626, - "solid": 6148, - "solidar": 10415, - "solidarity": 10983, - "solidi": 46136, - "solids": 49070, - "solihull": 45293, - "solit": 37039, - "solitaire": 47257, - "solitary": 33094, - "solitude": 33199, - "solo": 17626, - "solo": 5797, - "soloist": 46391, - "solom": 15768, - "solomon": 19785, - "solos": 44868, - "solst": 20298, - "solstice": 21359, - "solu": 2487, - "solution": 4575, - "solutions": 5140, - "solve": 8917, - "solved": 13451, - "solves": 42740, - "solving": 15581, - "som": 734, - "som": 10672, - "soma": 36170, - "somal": 40281, - "somali": 26231, - "somalia": 17051, - "somaliland": 43315, - "some": 1132, - "some": 836, - "somebody": 8305, - "someday": 17127, - "somehow": 11735, - "someone": 2100, - "somer": 9656, - "somerhalder": 33990, - "somerset": 14926, - "somerville": 41409, - "somes": 38124, - "somethin": 33541, - "something": 28316, - "something": 2006, - "sometime": 21464, - "sometimes": 4237, - "somewhat": 17864, - "somewhere": 8119, - "somm": 42726, - "somme": 30625, - "sommer": 44954, - "somos": 24951, - "son": 1176, - "son": 825, - "sona": 21249, - "sonam": 40096, - "sonar": 48235, - "sonata": 37009, - "sone": 29599, - "song": 6868, - "song": 2295, - "songs": 4641, - "songwriter": 13034, - "songwriters": 39583, - "songwriting": 33567, - "songz": 49302, - "soni": 34899, - "soni": 35911, - "sonia": 20409, - "sonic": 23785, - "sonic": 9132, - "sonics": 48511, - "sonja": 46102, - "sonline": 23412, - "sonny": 43000, - "sonny": 20880, - "sono": 44109, - "sonom": 48596, - "sonoma": 26269, - "sons": 5502, - "sonsof": 46676, - "sont": 31063, - "sonthe": 40923, - "sony": 16042, - "sony": 8748, - "sonya": 39172, - "soo": 5517, - "soo": 8602, - "soom": 39771, - "soon": 27559, - "soon": 1745, - "sooner": 18968, - "sooners": 30449, - "sooo": 11526, - "soooo": 13658, - "sooooo": 21199, - "soooooo": 34859, - "soor": 46698, - "soothe": 44424, - "soothing": 27730, - "sop": 3974, - "sop": 19194, - "soph": 34963, - "sophi": 6192, - "sophia": 16790, - "sophie": 38648, - "sophie": 12357, - "sophistic": 17646, - "sophisticated": 20833, - "sophom": 13696, - "sophomore": 15242, - "sophomores": 47645, - "soprano": 28880, - "soproud": 44479, - "sor": 1852, - "sor": 16872, - "sora": 38719, - "sorbet": 39994, - "sore": 43330, - "sore": 15454, - "sored": 6731, - "soren": 38907, - "sorg": 28152, - "sori": 38588, - "sorority": 30059, - "soros": 33248, - "sorren": 44012, - "sorrow": 28020, - "sorrows": 47924, - "sorry": 25745, - "sorry": 3675, - "sorrynotsorry": 37105, - "sort": 8450, - "sorta": 34700, - "sorted": 13221, - "sorting": 19198, - "sorts": 12577, - "sory": 16257, - "sos": 25145, - "sos": 5792, - "sosa": 45433, - "sosfam": 47709, - "sot": 41542, - "sot": 34116, - "sothe": 32145, - "sotho": 45496, - "soto": 27947, - "sotto": 26047, - "sotu": 32286, - "sou": 1101, - "sou": 24293, - "sought": 18874, - "soul": 8701, - "soul": 3755, - "soulful": 30196, - "soulmate": 38130, - "souls": 10951, - "soun": 19474, - "sound": 5236, - "sound": 3608, - "soundcheck": 31394, - "soundcloud": 15190, - "sounded": 28287, - "sounders": 44933, - "sounding": 21351, - "sounds": 5694, - "soundtrack": 11389, - "soup": 7077, - "soups": 45052, - "sour": 2235, - "sour": 12049, - "source": 23698, - "source": 3634, - "sourced": 23340, - "sources": 5124, - "sourcing": 19574, - "sourdough": 29921, - "souri": 11674, - "sous": 32093, - "sousa": 46296, - "sout": 38156, - "sout": 32732, - "south": 2938, - "south": 2045, - "southafrica": 15184, - "southampton": 15767, - "southbank": 44173, - "southbound": 22932, - "southeast": 13942, - "southeastern": 26813, - "southend": 25583, - "souther": 33330, - "southern": 17704, - "southern": 5036, - "southgate": 47262, - "southkorea": 43552, - "southport": 37446, - "southside": 36436, - "southsudan": 30419, - "southwark": 39098, - "southwe": 46443, - "southwest": 13320, - "southwestern": 30157, - "souven": 20210, - "souvenir": 24811, - "souvenirs": 48460, - "souza": 29424, - "sov": 29737, - "sover": 31876, - "sovere": 17736, - "sovereign": 29418, - "sovereign": 26337, - "sovereignty": 31701, - "soviet": 14274, - "sow": 33089, - "sowe": 36130, - "soweto": 47070, - "sown": 49369, - "sox": 39556, - "sox": 8657, - "soy": 16524, - "soy": 15010, - "soybean": 34606, - "soybeans": 40840, - "soyu": 39578, - "soyuz": 43842, - "sp": 588, - "sp": 4393, - "spa": 7852, - "spa": 6692, - "spac": 10336, - "space": 7857, - "space": 2138, - "spacecraft": 25940, - "spaces": 9006, - "spaceship": 34317, - "spacex": 22511, - "spacey": 48770, - "spacious": 24769, - "spad": 45362, - "spade": 32562, - "spades": 48368, - "spaghetti": 18440, - "spain": 5083, - "spal": 26018, - "spam": 29712, - "spam": 14624, - "span": 4270, - "span": 14537, - "spandex": 41686, - "spani": 16721, - "spaniel": 35435, - "spanish": 29966, - "spanish": 6013, - "spann": 25323, - "spanning": 38638, - "spans": 45407, - "spaper": 34548, - "spar": 3378, - "spar": 34576, - "spare": 12615, - "spares": 39505, - "spark": 9555, - "spark": 11047, - "sparked": 32647, - "sparkle": 18287, - "sparkles": 36410, - "sparkling": 17893, - "sparkly": 30542, - "sparks": 15046, - "sparky": 47198, - "sparring": 42161, - "sparrow": 22888, - "spart": 10143, - "sparta": 38401, - "spartan": 26582, - "spartan": 24225, - "spartans": 20457, - "sparty": 36477, - "spas": 31714, - "spati": 19200, - "spatial": 22022, - "spaw": 31605, - "spawn": 29166, - "spay": 40634, - "spc": 20492, - "spca": 37018, - "spd": 37717, - "spd": 28307, - "spdwy": 45981, - "spe": 876, - "spe": 36676, - "speak": 20599, - "speak": 4208, - "speake": 46077, - "speaker": 25764, - "speaker": 4914, - "speakers": 7675, - "speaking": 3714, - "speaks": 5661, - "spear": 23277, - "spear": 30420, - "speare": 43859, - "spears": 20242, - "spec": 1711, - "spec": 18596, - "speci": 1969, - "special": 11422, - "special": 1689, - "specialist": 10630, - "specialists": 21719, - "speciality": 46904, - "specialized": 23265, - "specializes": 48533, - "specially": 4513, - "specials": 11983, - "specialty": 18262, - "species": 6330, - "specific": 10528, - "specifically": 17174, - "specification": 46394, - "specifications": 39705, - "specified": 48114, - "specimen": 30263, - "specimens": 42715, - "specs": 24093, - "spect": 3416, - "spectac": 7242, - "spectacle": 34342, - "spectacular": 8404, - "spectator": 32372, - "spectators": 39306, - "spective": 6633, - "spector": 48676, - "spectral": 45441, - "spectre": 35998, - "spectro": 27646, - "spectrum": 13532, - "specul": 19209, - "speculation": 30898, - "sped": 38813, - "spee": 4050, - "speech": 19556, - "speech": 4902, - "speeches": 25208, - "speechless": 23152, - "speed": 6860, - "speed": 4163, - "speeding": 27264, - "speeds": 22017, - "speedway": 11480, - "speedy": 21603, - "spel": 41887, - "spell": 22784, - "spell": 11230, - "spelled": 24339, - "spelling": 15614, - "spells": 25335, - "spelt": 38316, - "spen": 5087, - "spence": 33324, - "spencer": 27509, - "spencer": 10678, - "spend": 4664, - "spending": 5961, - "spends": 22508, - "spent": 4429, - "speople": 33035, - "sper": 8213, - "sper": 15313, - "sperm": 35781, - "sperson": 22687, - "spf": 34973, - "spg": 34623, - "sph": 28909, - "sph": 24684, - "sphe": 33691, - "spher": 18349, - "sphere": 6987, - "spheres": 37478, - "spheric": 21744, - "sphin": 39237, - "sphinx": 46487, - "spho": 20442, - "sphoto": 38594, - "sphy": 43808, - "spi": 3174, - "spi": 37080, - "spic": 17264, - "spice": 29761, - "spice": 10141, - "spiced": 24267, - "spicer": 37627, - "spices": 21194, - "spicy": 10915, - "spide": 36801, - "spider": 11963, - "spider": 7622, - "spiderman": 39808, - "spiderman": 18427, - "spiders": 23141, - "spidey": 41706, - "spie": 28573, - "spie": 28746, - "spied": 43998, - "spiegel": 45351, - "spiel": 28435, - "spiel": 37690, - "spielberg": 37569, - "spies": 25374, - "spieth": 43254, - "spike": 35306, - "spike": 15310, - "spiked": 47014, - "spikes": 29582, - "spil": 47765, - "spill": 43933, - "spill": 18006, - "spilled": 33206, - "spilling": 49006, - "spills": 35796, - "spin": 6288, - "spin": 9226, - "spinach": 14747, - "spinal": 23925, - "spine": 48221, - "spine": 19646, - "sping": 47113, - "spinner": 29924, - "spinning": 13987, - "spino": 40848, - "spinoff": 42513, - "spinrilla": 46064, - "spins": 27243, - "spion": 39604, - "spionage": 41838, - "spir": 3745, - "spiral": 19873, - "spiration": 38126, - "spire": 27439, - "spired": 40650, - "spires": 46938, - "spiri": 4024, - "spirit": 18224, - "spirit": 4071, - "spirited": 34701, - "spirits": 13192, - "spiritu": 7237, - "spiritual": 46076, - "spiritual": 9473, - "spirituality": 22165, - "spiro": 40085, - "spit": 18115, - "spit": 23177, - "spite": 26060, - "spitfire": 31126, - "spitting": 40721, - "spl": 2470, - "spl": 33052, - "spla": 4809, - "splac": 16059, - "splace": 38743, - "splash": 43641, - "splash": 11879, - "splat": 15733, - "splatoon": 22565, - "splay": 3169, - "splen": 18552, - "splend": 29861, - "splendid": 21016, - "splendor": 46262, - "splin": 38090, - "split": 25443, - "split": 9109, - "splits": 34897, - "splitting": 37210, - "splus": 40866, - "spn": 35467, - "spn": 19414, - "spnfamily": 38566, - "spo": 1261, - "spo": 21085, - "spock": 43918, - "spoil": 25600, - "spoiled": 21399, - "spoiler": 16512, - "spoilers": 18326, - "spoils": 42436, - "spoilt": 35358, - "spokane": 24528, - "spoke": 13890, - "spoke": 6518, - "spoken": 12979, - "spokesman": 31632, - "spokesperson": 26234, - "spol": 22476, - "spol": 8132, - "spoli": 34301, - "spolice": 37406, - "spon": 1715, - "spon": 48216, - "sponge": 22861, - "sponge": 24345, - "spongebob": 25089, - "spons": 5597, - "sponsor": 10424, - "sponsor": 7574, - "sponsored": 7197, - "sponsoring": 16181, - "sponsors": 11005, - "sponsorship": 17632, - "spontaneous": 32465, - "spoo": 11248, - "spooky": 15369, - "spool": 49152, - "spoon": 27001, - "spoon": 14024, - "spoons": 29661, - "spor": 1475, - "spor": 33746, - "sport": 4379, - "sport": 2364, - "sporting": 32620, - "sporting": 8944, - "sports": 6436, - "sports": 2054, - "sportsc": 40114, - "sportscar": 46931, - "sportscenter": 39157, - "sportsman": 39020, - "sportsmanship": 34858, - "sportsnet": 34144, - "sportswear": 39747, - "sporty": 33346, - "spot": 3223, - "spot": 3049, - "spotify": 7193, - "spotlight": 7901, - "spots": 7670, - "spotted": 4533, - "spotter": 30742, - "spotting": 15885, - "spouse": 24724, - "spout": 48993, - "spp": 47567, - "spr": 1536, - "spr": 19417, - "spra": 12966, - "spraw": 46590, - "spray": 37885, - "spray": 10449, - "sprayed": 40022, - "spraying": 39224, - "spre": 18740, - "spread": 20620, - "spread": 5284, - "spreading": 11821, - "spreads": 27579, - "spree": 21851, - "spri": 35498, - "spride": 26685, - "spring": 5166, - "spring": 2420, - "springbreak": 37753, - "springer": 30117, - "springfield": 16599, - "springs": 7308, - "springst": 32132, - "springsteen": 28367, - "springtime": 28285, - "springtraining": 49364, - "springwatch": 29239, - "sprink": 15817, - "sprinkle": 42897, - "sprinkler": 48754, - "sprinkles": 37326, - "sprint": 29248, - "sprint": 10751, - "sprinter": 36947, - "sprints": 36404, - "sprite": 32544, - "spro": 13902, - "spro": 37403, - "sproject": 37802, - "sproud": 37686, - "sprout": 35863, - "sprouts": 25756, - "spru": 17041, - "spruce": 23812, - "sprung": 32968, - "sps": 13869, - "spu": 23566, - "spun": 47922, - "spun": 32852, - "spur": 15206, - "spur": 20361, - "spurs": 10916, - "spursofficial": 45290, - "sput": 47521, - "spx": 20584, - "spy": 13861, - "spy": 6656, - "spyder": 39952, - "spying": 36227, - "sq": 9370, - "sq": 11590, - "sqft": 41912, - "sql": 42759, - "sql": 18938, - "sqm": 47978, - "sqn": 41209, - "squ": 1653, - "squad": 13892, - "squad": 4234, - "squadron": 18579, - "squads": 36590, - "square": 19314, - "square": 3999, - "squared": 32967, - "squares": 26972, - "squash": 13312, - "squat": 44628, - "squat": 30680, - "squats": 40213, - "sque": 9721, - "sque": 8097, - "squee": 14420, - "squeeze": 21684, - "squeezed": 40413, - "squid": 42057, - "squid": 22553, - "squir": 9683, - "squire": 48090, - "squirrel": 14004, - "squirrels": 26623, - "squish": 42607, - "squishy": 47001, - "sr": 3437, - "sr": 5428, - "srbachchan": 32353, - "src": 23445, - "sre": 17748, - "sri": 11051, - "sri": 9276, - "sridevi": 46301, - "srilan": 15559, - "srilanka": 16922, - "srin": 26818, - "srinagar": 33671, - "srini": 41899, - "sriracha": 42743, - "sris": 27851, - "srisri": 32966, - "srk": 44982, - "srk": 11216, - "srl": 33808, - "srp": 43004, - "srs": 41764, - "srsly": 44179, - "srt": 28139, - "sru": 44152, - "srugby": 40526, - "ss": 690, - "ss": 632, - "ssa": 6088, - "ssal": 31330, - "ssal": 35936, - "ssb": 37511, - "ssc": 21692, - "ssc": 20364, - "ssd": 23107, - "sse": 9030, - "sse": 8938, - "ssed": 38755, - "ssed": 1804, - "ssel": 17402, - "ssel": 19373, - "sseldorf": 47792, - "ssell": 42388, - "ssels": 8355, - "ssen": 39408, - "ssen": 22645, - "sser": 20445, - "sses": 1802, - "ssett": 44103, - "ssf": 33239, - "ssg": 40707, - "ssh": 48866, - "ssi": 834, - "ssi": 14953, - "ssia": 22238, - "ssian": 31218, - "ssible": 47099, - "ssic": 27774, - "ssic": 17077, - "ssie": 7572, - "ssier": 26422, - "ssil": 15026, - "ssin": 42660, - "ssing": 2112, - "ssion": 16050, - "ssion": 1627, - "ssional": 13727, - "ssionism": 24787, - "ssionist": 27682, - "ssions": 4137, - "ssive": 2734, - "ssively": 28060, - "ssl": 32195, - "ssler": 30287, - "ssly": 24904, - "ssn": 39116, - "ssnhq": 47998, - "sso": 25900, - "sso": 7914, - "ssoccer": 32546, - "sson": 36124, - "sson": 7271, - "ssor": 35152, - "ssp": 31101, - "ssr": 39880, - "sss": 11176, - "ssss": 30676, - "ssss": 15880, - "sssss": 24298, - "sst": 40396, - "ssu": 35351, - "ssummit": 49301, - "ssus": 31286, - "ssw": 36937, - "ssy": 22519, - "ssy": 8661, - "st": 522, - "st": 545, - "sta": 1363, - "sta": 2745, - "stab": 7726, - "stab": 29974, - "stabbed": 24534, - "stabbing": 25474, - "stabil": 42576, - "stabili": 23903, - "stability": 16716, - "stable": 44427, - "stable": 10492, - "stables": 34218, - "stac": 10175, - "stacey": 41653, - "stacey": 24262, - "stache": 23616, - "stack": 24723, - "stack": 11257, - "stacked": 24990, - "stacking": 39836, - "stacks": 24734, - "stacy": 26628, - "stad": 15832, - "stad": 16485, - "stade": 38198, - "stadi": 26587, - "stadion": 48815, - "stadium": 3390, - "stadiums": 38852, - "stadt": 22713, - "staf": 2367, - "staff": 31188, - "staff": 2813, - "staffer": 38494, - "staffers": 44994, - "staffing": 32932, - "stafford": 25006, - "staffordshire": 29198, - "staffs": 36098, - "stag": 12088, - "stag": 20277, - "stage": 23182, - "stage": 2170, - "staged": 19906, - "stages": 12297, - "staggering": 37315, - "staging": 27026, - "stagram": 19503, - "stags": 45936, - "stain": 3933, - "stain": 14603, - "stained": 13751, - "staining": 32523, - "stainless": 12320, - "stains": 32008, - "stair": 7240, - "stair": 17662, - "staircase": 22777, - "stairs": 9577, - "stairway": 45559, - "stak": 39144, - "stake": 15955, - "stake": 7937, - "stakeholder": 39122, - "stakeholders": 22968, - "stakes": 7519, - "staking": 47082, - "stal": 3861, - "stal": 5535, - "stale": 42471, - "stalert": 25450, - "stalin": 28346, - "stalk": 40826, - "stalk": 14878, - "stalker": 26777, - "stalking": 24721, - "stalks": 45886, - "stall": 24636, - "stall": 12058, - "stalled": 40362, - "stallion": 28273, - "stallions": 44787, - "stallone": 40969, - "stalls": 25427, - "stam": 4663, - "stamatic": 30904, - "stamford": 27843, - "stamina": 48753, - "stamp": 28694, - "stamp": 12771, - "stampcollecting": 42852, - "stamped": 38356, - "stampede": 25384, - "stamps": 13827, - "stan": 2203, - "stan": 2434, - "stana": 33311, - "stanbul": 11231, - "stance": 48900, - "stance": 3542, - "stances": 15054, - "stand": 1819, - "stand": 2087, - "standalone": 44887, - "standard": 35780, - "standard": 5807, - "standardi": 30247, - "standards": 9022, - "standby": 36184, - "standing": 39934, - "standing": 2862, - "standings": 19835, - "standoff": 31821, - "standout": 23131, - "standre": 48309, - "stands": 6446, - "standup": 35108, - "standup": 24964, - "standwith": 19540, - "stanford": 36219, - "stanford": 15087, - "stang": 12536, - "stani": 38228, - "stanis": 37711, - "stanley": 19048, - "stanley": 10079, - "stanleycup": 28662, - "stans": 26564, - "stant": 41576, - "stant": 4906, - "stanton": 25400, - "stap": 10438, - "staple": 22695, - "staples": 23646, - "stapleton": 45228, - "star": 993, - "star": 1565, - "starbuck": 48519, - "starbucks": 9499, - "starch": 47837, - "starcraft": 48871, - "stardom": 44616, - "stardust": 34337, - "stare": 18094, - "stared": 47772, - "stares": 37916, - "starfish": 44283, - "stargate": 41099, - "stargazing": 49328, - "staring": 13800, - "stark": 40446, - "stark": 15353, - "starlight": 32197, - "starling": 46205, - "starmagic": 48023, - "starplus": 37815, - "starr": 19186, - "starred": 24180, - "starrer": 41311, - "starring": 6660, - "starry": 30963, - "stars": 2895, - "starship": 37166, - "start": 17466, - "start": 1572, - "started": 2760, - "starter": 7800, - "starters": 22222, - "starting": 2530, - "startrek": 30642, - "startrek": 15349, - "starts": 3105, - "startu": 6996, - "startup": 18049, - "startup": 5882, - "startups": 9056, - "starve": 46957, - "starving": 30473, - "starwar": 17287, - "starwars": 26239, - "starwars": 7887, - "starz": 25928, - "stas": 19866, - "stash": 27711, - "stasy": 45942, - "stat": 3004, - "stat": 15216, - "state": 3492, - "state": 1295, - "statec": 33931, - "stated": 19629, - "statedept": 41458, - "statefair": 40305, - "statement": 5401, - "statements": 19513, - "staten": 38263, - "stateof": 35195, - "states": 22125, - "states": 4218, - "statesman": 35301, - "stateu": 44248, - "statewide": 29561, - "stati": 9622, - "static": 16363, - "stating": 35147, - "station": 13498, - "station": 2631, - "stationary": 29493, - "stationed": 47618, - "stationery": 33851, - "stations": 10051, - "statistical": 29349, - "statistics": 14165, - "stats": 7294, - "statu": 32481, - "statue": 8222, - "statues": 24363, - "status": 6414, - "stau": 28550, - "staur": 3709, - "stav": 20285, - "stax": 32235, - "stay": 4714, - "stay": 2277, - "stayed": 13805, - "staying": 8993, - "stays": 13311, - "staytuned": 39285, - "stc": 29859, - "std": 30477, - "ste": 795, - "ste": 2686, - "stea": 46614, - "stead": 16101, - "stead": 11031, - "steadily": 35049, - "steady": 12937, - "steak": 26955, - "steak": 8913, - "steakhouse": 35031, - "steaks": 30655, - "steal": 37070, - "steal": 10181, - "stealing": 14242, - "steals": 20224, - "stealth": 25327, - "steam": 10962, - "steam": 6972, - "steamboat": 41121, - "steamed": 29007, - "steamer": 49075, - "steaming": 43746, - "steampunk": 24130, - "steamy": 43104, - "stec": 46713, - "stech": 48949, - "stech": 32455, - "sted": 20426, - "sted": 1356, - "stee": 31793, - "steed": 48293, - "steel": 6938, - "steel": 4726, - "steele": 19460, - "steelers": 14430, - "steen": 42851, - "steen": 18625, - "steep": 28648, - "steep": 20714, - "steer": 27612, - "steering": 19833, - "stef": 29158, - "stefan": 15004, - "stefan": 18829, - "stefani": 38319, - "stefano": 30719, - "steff": 30075, - "stein": 13653, - "stein": 5818, - "steiner": 36314, - "stel": 9102, - "stel": 10798, - "stell": 22355, - "stella": 46178, - "stella": 17869, - "stellar": 13810, - "stellen": 42754, - "stem": 24342, - "stem": 6761, - "stemc": 40486, - "stems": 31503, - "sten": 7652, - "sten": 7877, - "stencil": 47854, - "stennis": 45636, - "step": 15572, - "step": 3348, - "steph": 3522, - "steph": 16251, - "stephan": 37312, - "stephani": 48121, - "stephanie": 14361, - "stephen": 10421, - "stephen": 6078, - "stephenking": 46361, - "stephens": 22256, - "stephenson": 37280, - "stepped": 18384, - "stepping": 15906, - "steps": 5408, - "ster": 1022, - "ster": 881, - "stere": 9229, - "stered": 6935, - "stereo": 15992, - "stereo": 17400, - "stereotypes": 27890, - "steria": 38804, - "stering": 14175, - "sterling": 45790, - "sterling": 9378, - "stern": 36254, - "stern": 2945, - "steroids": 37670, - "sterone": 39418, - "sters": 2132, - "stery": 24232, - "stest": 8556, - "stev": 11640, - "steve": 7412, - "steve": 3803, - "steven": 10973, - "steven": 8016, - "stevens": 13877, - "stevenson": 25091, - "stevie": 42104, - "stevie": 18969, - "stew": 17906, - "stewar": 28453, - "steward": 34980, - "steward": 43355, - "stewards": 49294, - "stewardship": 36720, - "stewart": 8120, - "stfu": 47000, - "stg": 48387, - "stgeorge": 43698, - "sth": 13456, - "sth": 34004, - "sthe": 16491, - "sthel": 42863, - "sti": 860, - "sti": 12439, - "stia": 26492, - "stible": 25835, - "stic": 5868, - "stic": 1561, - "stical": 16660, - "stically": 19041, - "stick": 5483, - "stick": 4987, - "sticker": 11270, - "stickers": 11613, - "sticking": 21021, - "sticks": 10016, - "sticky": 18887, - "stics": 5449, - "stie": 38164, - "stie": 11000, - "stier": 42069, - "sties": 16428, - "stiff": 43471, - "stiff": 21441, - "stig": 4088, - "stig": 42551, - "stigate": 15390, - "stigma": 20619, - "stik": 42247, - "stil": 21790, - "stil": 37519, - "stiles": 33028, - "still": 13209, - "still": 1170, - "stills": 20259, - "stim": 18269, - "stime": 24711, - "stimul": 16434, - "stimulate": 42380, - "stimulating": 41237, - "stimulation": 39530, - "stimulus": 47283, - "stin": 2588, - "stin": 4025, - "stina": 22359, - "stine": 7098, - "sting": 19868, - "sting": 1271, - "stingly": 49332, - "stingray": 43229, - "stink": 38213, - "stinky": 44957, - "stino": 40658, - "stint": 33531, - "stion": 10812, - "stip": 39869, - "stips": 44756, - "stique": 43305, - "stir": 12416, - "stir": 19564, - "stirling": 23128, - "stirring": 39205, - "stis": 45224, - "stit": 14110, - "stitch": 30003, - "stitch": 14771, - "stitched": 36540, - "stitcher": 48204, - "stitches": 32360, - "stitching": 45208, - "stitu": 14585, - "stitutes": 40479, - "stive": 22426, - "stix": 48829, - "stjohn": 36153, - "stl": 14179, - "stl": 12527, - "stlblues": 44138, - "stlcards": 28644, - "stle": 7698, - "stles": 48638, - "stlouis": 40358, - "stlouis": 39516, - "stm": 28333, - "stn": 27175, - "sto": 928, - "sto": 5723, - "stock": 5899, - "stock": 3206, - "stocked": 23552, - "stockholm": 16024, - "stocki": 42944, - "stocking": 17335, - "stockings": 28040, - "stockmarket": 40359, - "stockport": 35569, - "stocks": 9321, - "stockton": 26130, - "stoday": 22392, - "stok": 43782, - "stoke": 31338, - "stoke": 13550, - "stoked": 13160, - "stokes": 27512, - "stol": 11401, - "stol": 6700, - "stole": 10995, - "stolen": 8704, - "stolic": 45020, - "stom": 2343, - "stom": 38068, - "stoma": 43545, - "stomach": 14722, - "stomp": 40165, - "stomping": 46144, - "ston": 4101, - "ston": 1839, - "stone": 7694, - "stone": 2441, - "stoned": 36248, - "stonehenge": 42417, - "stoner": 35131, - "stoner": 29115, - "stones": 42659, - "stones": 6885, - "stonewall": 39688, - "stoney": 44198, - "stony": 41717, - "stony": 35691, - "stoo": 24505, - "stood": 9151, - "stool": 34413, - "stool": 22314, - "stop": 6005, - "stop": 1691, - "stopbrexit": 48680, - "stopp": 15738, - "stopped": 6015, - "stopper": 32147, - "stoppers": 34457, - "stopping": 10735, - "stops": 9822, - "stopthe": 26463, - "stor": 809, - "stor": 17740, - "storage": 6824, - "store": 17769, - "store": 2183, - "stored": 28257, - "stores": 6370, - "storey": 24025, - "storians": 34628, - "stories": 3784, - "storing": 40087, - "stork": 46452, - "storm": 7434, - "storm": 2819, - "stormed": 45939, - "stormhour": 12161, - "storming": 24842, - "storms": 6464, - "stormtrooper": 49218, - "stormy": 20075, - "stors": 7178, - "story": 6512, - "story": 1134, - "storyline": 37079, - "storymonth": 23717, - "storyteller": 35882, - "storytelling": 14457, - "storytime": 44197, - "stos": 19281, - "stou": 37168, - "stour": 37361, - "stour": 21928, - "stout": 16550, - "stove": 21423, - "stow": 44284, - "stow": 17046, - "stowe": 34196, - "stown": 28071, - "stown": 7939, - "stp": 30576, - "stpatrick": 21343, - "stpatricksday": 22747, - "str": 807, - "str": 15913, - "stra": 1894, - "stra": 6253, - "strack": 46861, - "strada": 31134, - "strade": 48968, - "straigh": 31016, - "straight": 22114, - "straight": 4241, - "strain": 16887, - "strains": 38067, - "strait": 22946, - "straits": 41984, - "stral": 23289, - "stralia": 42510, - "stran": 18411, - "strand": 18214, - "strand": 17826, - "stranded": 22975, - "strang": 11138, - "strange": 33380, - "strange": 7288, - "strangely": 37566, - "stranger": 35541, - "stranger": 14149, - "strangers": 20684, - "strangerthings": 43271, - "strangest": 46740, - "strap": 13946, - "strapped": 40922, - "straps": 31213, - "stras": 36814, - "stras": 42125, - "strasbourg": 39576, - "strat": 11345, - "strat": 32925, - "strata": 47278, - "strate": 3532, - "strate": 28758, - "strategi": 49102, - "strategic": 10246, - "strategically": 45706, - "strategies": 9942, - "strategist": 37180, - "strategy": 5637, - "strates": 45724, - "stratford": 23955, - "strath": 21997, - "stration": 3156, - "strato": 28878, - "strauss": 32033, - "strava": 34625, - "stravel": 43494, - "straw": 7430, - "straw": 16438, - "strawberries": 17796, - "strawberry": 10233, - "straws": 33048, - "stray": 30784, - "stray": 15712, - "stre": 1079, - "stre": 19652, - "stread": 27797, - "streak": 11749, - "streaks": 42092, - "stream": 8659, - "stream": 3322, - "streamed": 26280, - "streamer": 25178, - "streamers": 19937, - "streaming": 6278, - "streamline": 44917, - "streams": 13545, - "stree": 35082, - "stree": 32438, - "streep": 38701, - "street": 4839, - "street": 2012, - "streetart": 12948, - "streetcar": 34268, - "streetfood": 44486, - "streetphotography": 20786, - "streets": 6058, - "streetstyle": 39118, - "streetwear": 37298, - "strel": 39685, - "stren": 4349, - "streng": 4472, - "strength": 15475, - "strength": 5959, - "strengthen": 16318, - "strengthened": 47131, - "strengthening": 23475, - "strengthens": 40280, - "strengths": 29268, - "stress": 17297, - "stress": 5843, - "stressed": 16497, - "stresses": 32112, - "stressful": 24268, - "stressing": 35917, - "stret": 12265, - "stretch": 10064, - "stretched": 29393, - "stretches": 32231, - "stretching": 24423, - "stri": 1493, - "stri": 27795, - "stria": 39620, - "strial": 30217, - "strian": 12924, - "stric": 2607, - "strick": 25181, - "strickland": 48939, - "strict": 21585, - "strictly": 16475, - "stride": 36024, - "strides": 37355, - "stries": 18171, - "strife": 46473, - "strike": 20774, - "strike": 5767, - "striker": 12448, - "strikers": 33465, - "strikes": 9280, - "striking": 13392, - "string": 25512, - "string": 9696, - "strings": 15699, - "strip": 9317, - "stripe": 19368, - "striped": 22192, - "stripes": 14239, - "stripped": 26602, - "stripper": 45759, - "stripping": 48588, - "strips": 19000, - "strive": 22140, - "striving": 37671, - "stro": 3121, - "stro": 6186, - "stroke": 44621, - "stroke": 10403, - "strokes": 26595, - "strol": 30123, - "stroll": 15924, - "stroller": 47076, - "strolling": 40911, - "strom": 14707, - "stron": 4165, - "strong": 10436, - "strong": 2389, - "stronger": 27760, - "stronger": 9245, - "strongertogether": 38532, - "strongest": 16171, - "strongh": 38678, - "strongly": 15507, - "strophy": 47912, - "strou": 48425, - "stroud": 39895, - "strous": 23752, - "stru": 1666, - "struc": 3311, - "struck": 10861, - "struction": 12497, - "structural": 16899, - "structure": 5285, - "structured": 27147, - "structures": 14171, - "structuring": 37496, - "strugg": 5176, - "struggle": 8443, - "struggled": 32921, - "struggles": 17446, - "struggling": 12135, - "struly": 34118, - "strum": 37632, - "strung": 46033, - "strust": 23920, - "strut": 48375, - "stry": 17325, - "stry": 2245, - "sts": 1088, - "stu": 858, - "stu": 23531, - "stuart": 32054, - "stuart": 11723, - "stub": 27066, - "stubborn": 38955, - "stuck": 6596, - "stud": 22368, - "stud": 13319, - "studded": 29153, - "studen": 44156, - "student": 14681, - "student": 2556, - "students": 1712, - "studi": 5691, - "studied": 21369, - "studies": 6426, - "studio": 17798, - "studio": 3155, - "studios": 6231, - "studs": 27571, - "study": 21051, - "study": 3123, - "studyabroad": 45425, - "studying": 8826, - "stuff": 46072, - "stuff": 3487, - "stuffed": 11781, - "stuffing": 31612, - "stuffs": 43455, - "stuk": 32424, - "stumb": 16784, - "stumble": 39045, - "stumbled": 21776, - "stump": 32064, - "stun": 3088, - "stun": 37959, - "stunned": 34034, - "stunner": 29965, - "stunning": 3769, - "stunningly": 47515, - "stuns": 43796, - "stunt": 19905, - "stunts": 40118, - "stupi": 18975, - "stupid": 42600, - "stupid": 8085, - "stupidity": 33766, - "stur": 10676, - "sturdy": 43780, - "stures": 27223, - "sturgeon": 31580, - "sturi": 21747, - "sturridge": 45331, - "stutt": 30444, - "stuttgart": 32219, - "stv": 27060, - "stv": 9708, - "stweet": 46832, - "stweets": 39174, - "stx": 42548, - "sty": 1421, - "sty": 2920, - "style": 12356, - "style": 1844, - "styled": 17974, - "styles": 6948, - "styli": 38577, - "styling": 14597, - "stylish": 10378, - "stylist": 15928, - "styn": 41394, - "su": 605, - "su": 2937, - "sua": 42448, - "suarez": 21437, - "suave": 47305, - "sub": 1783, - "sub": 7765, - "subaru": 21319, - "subjec": 16090, - "subject": 10300, - "subjects": 22099, - "subli": 16350, - "sublime": 22367, - "submarine": 19968, - "submer": 27156, - "submerged": 43171, - "submission": 16571, - "submissions": 21566, - "submit": 10423, - "submitted": 15189, - "submitting": 38788, - "subram": 49207, - "subs": 16398, - "subscri": 5838, - "subscribe": 9839, - "subscribed": 44867, - "subscriber": 36292, - "subscribers": 17337, - "subscription": 17979, - "subscriptions": 47162, - "subsequ": 33598, - "subsequent": 44323, - "subsi": 14856, - "subsidi": 45029, - "subsidiary": 45506, - "subsidies": 37685, - "subsidy": 47462, - "substan": 17487, - "substance": 19309, - "substances": 36834, - "substantial": 27171, - "substantially": 47577, - "substitu": 18529, - "substitute": 25340, - "subtitles": 39479, - "subtle": 16536, - "subur": 12517, - "suburb": 37664, - "suburban": 23570, - "suburbs": 25317, - "subway": 12196, - "suc": 1869, - "succe": 7981, - "succeed": 13556, - "succeeded": 41077, - "succes": 39019, - "success": 3695, - "success": 3034, - "successes": 29436, - "successful": 4670, - "successfully": 9934, - "succession": 38491, - "successive": 41319, - "successor": 34774, - "succu": 45253, - "succul": 25671, - "succulent": 35236, - "such": 2046, - "suction": 42786, - "sud": 8067, - "sud": 33714, - "sudan": 31149, - "sudan": 13474, - "sudanese": 42837, - "sudbury": 32488, - "sudden": 10833, - "sudden": 15433, - "suddenly": 11076, - "sue": 14045, - "sue": 6641, - "sued": 22225, - "suede": 21036, - "sues": 17105, - "suf": 21204, - "suf": 22579, - "sufc": 37091, - "suff": 4866, - "suffe": 13510, - "suffer": 13557, - "suffered": 14766, - "suffering": 10140, - "suffers": 22389, - "sufficient": 28410, - "suffol": 13775, - "suffolk": 46408, - "suffolk": 15685, - "suffra": 34596, - "suffrage": 39567, - "sufi": 39756, - "sug": 3189, - "suga": 28757, - "sugar": 12418, - "sugar": 5574, - "sugge": 6345, - "suggest": 13356, - "suggested": 18790, - "suggesti": 15033, - "suggesting": 29792, - "suggestion": 23741, - "suggestions": 16052, - "suggests": 13333, - "suho": 32744, - "sui": 24972, - "suici": 16372, - "suicidal": 37165, - "suicide": 31310, - "suicide": 8247, - "suing": 18309, - "suisse": 35964, - "suit": 11887, - "suit": 3940, - "suitable": 17476, - "suitcase": 27792, - "suite": 9346, - "suited": 25919, - "suites": 21523, - "suits": 9949, - "suk": 24820, - "suk": 6886, - "suka": 44017, - "suke": 25590, - "sukh": 46961, - "suki": 32704, - "sul": 1767, - "sul": 19879, - "sula": 34713, - "sula": 26143, - "sullivan": 14477, - "sully": 37752, - "sulph": 37234, - "sulphur": 47659, - "sultan": 35650, - "sultan": 17049, - "sum": 7054, - "sum": 8257, - "suma": 47938, - "sumat": 32640, - "sumatra": 47346, - "sume": 45457, - "sumi": 41248, - "summ": 1309, - "summar": 34657, - "summari": 31993, - "summary": 13435, - "summed": 34912, - "summer": 5500, - "summer": 1673, - "summers": 18254, - "summerslam": 40264, - "summertime": 19025, - "summit": 30011, - "summit": 3768, - "summon": 27622, - "summon": 39782, - "sumner": 46813, - "sumo": 33734, - "sump": 34252, - "sumptuous": 47354, - "sums": 13325, - "sun": 968, - "sun": 2176, - "sunbathing": 46994, - "sunburn": 45767, - "sund": 40735, - "sundae": 38078, - "sundance": 24128, - "sundar": 44936, - "sunday": 6649, - "sunday": 1706, - "sundayfunday": 21565, - "sundaymorning": 24809, - "sundaymotivation": 46227, - "sundays": 15827, - "sundaywith": 26469, - "sundaywithmarsha": 26662, - "sunder": 15097, - "sunderland": 45727, - "sunderland": 18851, - "sundown": 44438, - "sune": 41096, - "sunflower": 21559, - "sunflowers": 39809, - "sung": 16903, - "sung": 6047, - "sunglasses": 12906, - "suni": 17663, - "suni": 47010, - "sunil": 32861, - "sunite": 21382, - "sunited": 35276, - "sunk": 37534, - "sunken": 43473, - "sunlight": 17996, - "sunni": 44315, - "sunny": 15632, - "sunny": 5438, - "sunrise": 5610, - "suns": 18322, - "sunscreen": 29355, - "sunset": 37880, - "sunset": 3424, - "sunsets": 17721, - "sunshine": 32761, - "sunshine": 5385, - "suny": 41308, - "sup": 19078, - "sup": 8249, - "supdates": 24177, - "super": 1642, - "super": 1994, - "superb": 8930, - "superbike": 45709, - "superbowl": 47461, - "superbowl": 16467, - "supercar": 27021, - "supercars": 32185, - "supercell": 43227, - "supercharged": 47479, - "supere": 46831, - "superfood": 41715, - "supergirl": 25771, - "superhero": 14049, - "superheroes": 23334, - "superint": 17615, - "superintendent": 19020, - "superior": 13205, - "superjunior": 40475, - "superleague": 45539, - "superman": 11237, - "supermarket": 19897, - "supermarkets": 45106, - "supermodel": 41963, - "supermoon": 36571, - "supernatural": 15484, - "supernova": 39843, - "superrugby": 48717, - "supersonic": 42019, - "supersport": 46319, - "superst": 38202, - "superstar": 32551, - "superstar": 10472, - "superstars": 25797, - "supervis": 12709, - "supervised": 41316, - "supervision": 36234, - "supervisor": 20366, - "supervisors": 37958, - "superyacht": 42714, - "supp": 1023, - "supper": 15727, - "supple": 31431, - "supplement": 19924, - "supplements": 21265, - "supplied": 24106, - "supplier": 18043, - "suppliers": 24196, - "supplies": 9384, - "supply": 25074, - "supply": 6389, - "supplychain": 31224, - "supplying": 32739, - "suppo": 6941, - "suppor": 2104, - "support": 12062, - "support": 1425, - "supported": 8038, - "supporter": 12992, - "supporters": 7403, - "supportindiefilm": 43976, - "supporting": 3976, - "supportive": 18313, - "supportlocal": 43852, - "supports": 8336, - "supportsmall": 30941, - "supportsmallstreamers": 36097, - "suppose": 18924, - "supposed": 9119, - "supposedly": 32302, - "suppre": 20542, - "suppression": 36508, - "supra": 48485, - "supre": 5875, - "supremac": 28643, - "supremacist": 39005, - "supremacy": 28913, - "supreme": 35222, - "supreme": 7468, - "supt": 23625, - "sur": 1090, - "sur": 7123, - "sura": 33412, - "sura": 49125, - "surabaya": 45227, - "surance": 22184, - "surat": 30201, - "sure": 14320, - "sure": 1650, - "sured": 36869, - "surely": 11409, - "sures": 12725, - "suresh": 32118, - "suresh": 31464, - "sureshpp": 41924, - "sureshpprabhu": 42050, - "surf": 10176, - "surf": 10322, - "surface": 7744, - "surfaces": 20746, - "surfer": 24925, - "surfers": 34842, - "surfing": 15762, - "surg": 13045, - "surge": 17457, - "surgeon": 16039, - "surgeons": 26000, - "surger": 5122, - "surgeries": 34940, - "surgery": 5344, - "surgical": 16386, - "suri": 14130, - "suri": 33952, - "suring": 16817, - "suriya": 17832, - "surpass": 45494, - "surpassed": 25648, - "surplus": 29413, - "surpri": 3244, - "surprise": 5099, - "surprised": 8949, - "surprises": 16920, - "surprising": 14964, - "surprisingly": 17367, - "surreal": 18408, - "surrealism": 41773, - "surrender": 20964, - "surrendered": 44601, - "surrey": 26489, - "surrey": 14315, - "surro": 47499, - "surroun": 8250, - "surround": 26543, - "surround": 22999, - "surrounded": 13589, - "surrounding": 12544, - "surroundings": 26915, - "surrounds": 39012, - "suru": 49240, - "surve": 8952, - "surveill": 15408, - "surveillance": 15578, - "survey": 45914, - "survey": 6809, - "surveying": 33085, - "surveys": 25096, - "survi": 3440, - "surviv": 12922, - "survival": 10172, - "survive": 10431, - "survived": 13483, - "survives": 30927, - "surviving": 18609, - "survivor": 31934, - "survivor": 10944, - "survivors": 13711, - "surya": 37767, - "sus": 8091, - "sus": 3036, - "susa": 20546, - "susan": 19922, - "susan": 10168, - "suscep": 44270, - "sush": 22298, - "sushi": 11729, - "sushmaswar": 48200, - "susie": 32284, - "susp": 7971, - "suspec": 10298, - "suspect": 9065, - "suspected": 15579, - "suspects": 18265, - "suspen": 10578, - "suspend": 41007, - "suspended": 13126, - "suspends": 39535, - "suspense": 21556, - "suspension": 15417, - "suspici": 25714, - "suspicion": 34910, - "suspicious": 19862, - "sussex": 31244, - "sussex": 13266, - "sustain": 4644, - "sustain": 28156, - "sustainability": 9635, - "sustainable": 23645, - "sustainable": 7078, - "sustained": 22699, - "sustaining": 44418, - "sut": 23984, - "sut": 28956, - "sutherland": 27592, - "sutton": 39359, - "sutton": 18564, - "suv": 15985, - "suz": 9957, - "suzanne": 24617, - "suzu": 36289, - "suzuki": 16892, - "suzy": 26552, - "sv": 6508, - "sv": 17083, - "svc": 45065, - "sve": 47637, - "sven": 37786, - "sven": 45183, - "sver": 45923, - "sville": 44580, - "sville": 6741, - "svp": 28465, - "svt": 42014, - "svu": 32123, - "sw": 1220, - "sw": 4457, - "swa": 4707, - "swa": 31916, - "swach": 20862, - "swachhb": 31898, - "swachhbharat": 36927, - "swag": 8852, - "swag": 8177, - "swagg": 47702, - "swagger": 35797, - "swain": 43226, - "swal": 13433, - "swallow": 28979, - "swallowed": 46956, - "swallows": 45124, - "swam": 42539, - "swami": 25021, - "swamp": 41953, - "swamp": 16595, - "swamy": 28445, - "swan": 8215, - "swan": 12530, - "swana": 24699, - "swans": 19516, - "swansea": 16567, - "swanson": 34797, - "swap": 15234, - "swapped": 39077, - "swapping": 44702, - "swaps": 49242, - "swar": 11680, - "swarm": 31577, - "swarovski": 28515, - "swat": 32547, - "swat": 26482, - "swatch": 48053, - "sway": 26443, - "sway": 26617, - "swc": 42231, - "swe": 2350, - "swe": 38070, - "swear": 7406, - "swearing": 32627, - "sweat": 10282, - "sweat": 12663, - "sweater": 11455, - "sweaters": 31303, - "sweating": 33215, - "sweats": 39321, - "sweatshirt": 22442, - "sweaty": 28419, - "sweden": 8760, - "swedish": 11585, - "swee": 1812, - "sweek": 30017, - "sweeney": 27286, - "sweep": 23220, - "sweep": 13669, - "sweeping": 25719, - "sweeps": 26887, - "sweepstakes": 25992, - "sweet": 10957, - "sweet": 2418, - "sweetened": 45577, - "sweeter": 32873, - "sweetest": 15180, - "sweethe": 16316, - "sweetheart": 18079, - "sweetie": 24450, - "sweetness": 29713, - "sweets": 18045, - "swel": 48470, - "swell": 35538, - "swell": 21490, - "swelling": 46578, - "swept": 23311, - "swer": 30514, - "swfc": 30227, - "swfl": 46607, - "swi": 3881, - "swi": 45223, - "swick": 17159, - "swif": 28548, - "swift": 34843, - "swift": 8229, - "swild": 33909, - "swild": 38696, - "swildlife": 46818, - "swim": 4928, - "swim": 7681, - "swimmer": 25475, - "swimmers": 27776, - "swimming": 7411, - "swims": 46798, - "swimsuit": 25504, - "swimwear": 31889, - "swin": 14554, - "swin": 40798, - "swindon": 29540, - "swine": 31166, - "swing": 25292, - "swing": 7429, - "swinging": 26760, - "swings": 29141, - "swipe": 31828, - "swire": 42753, - "swirl": 35795, - "swis": 23611, - "swish": 38571, - "swiss": 37917, - "swiss": 9287, - "swit": 3726, - "switch": 22480, - "switch": 5893, - "switched": 22869, - "switches": 33569, - "switching": 21155, - "swith": 17299, - "switzer": 9835, - "switzerland": 9912, - "swivel": 48256, - "swo": 38673, - "swol": 29575, - "swollen": 36129, - "swoo": 29744, - "swood": 24158, - "swoon": 37028, - "swoop": 45661, - "sword": 33294, - "sword": 11356, - "swords": 27181, - "swork": 42722, - "sworld": 33305, - "sworn": 21130, - "sworth": 13322, - "swt": 38878, - "swx": 20597, - "sx": 9402, - "sx": 17806, - "sxsw": 13369, - "sy": 974, - "sy": 2126, - "sya": 35017, - "sycam": 34911, - "sycamore": 43086, - "syd": 4525, - "syd": 22504, - "sydney": 15878, - "sydney": 5278, - "syed": 27624, - "syfy": 32047, - "sykes": 27287, - "syl": 6452, - "sylla": 41708, - "sylvania": 12011, - "sylve": 28369, - "sylvester": 37214, - "sylvia": 25670, - "sym": 3645, - "sym": 40327, - "symb": 22987, - "symbol": 13085, - "symboli": 22019, - "symbolic": 33177, - "symbolism": 44679, - "symbols": 25476, - "symmetry": 31427, - "symp": 11468, - "sympathi": 47493, - "sympathy": 32477, - "symph": 9544, - "symphonic": 42639, - "symphony": 11180, - "sympo": 9730, - "symposium": 9971, - "symptom": 47799, - "symptoms": 12956, - "syn": 3758, - "syn": 36090, - "synago": 30945, - "synagogue": 33518, - "sync": 20081, - "synchron": 23943, - "syndic": 21098, - "syndicate": 28779, - "syndrome": 10927, - "syner": 22283, - "synergy": 32012, - "syno": 31533, - "synod": 47712, - "synopsis": 47018, - "synth": 33841, - "synth": 24462, - "synthe": 22604, - "synthesi": 33565, - "synthesis": 21602, - "synthesizer": 44077, - "synthetic": 19917, - "syou": 26742, - "syour": 21718, - "syrac": 17279, - "syracuse": 19640, - "syrah": 45364, - "syri": 18917, - "syria": 5563, - "syrian": 47562, - "syrian": 10041, - "syrians": 41392, - "syrup": 16611, - "sys": 26726, - "syste": 1933, - "system": 47813, - "system": 2422, - "systematic": 28586, - "systemic": 33807, - "systems": 4828, - "sz": 13438, - "sz": 15879, - "sze": 44507, - "szn": 48092, - "são": 45911, - "sé": 37879, - "t": 83, - "t": 339, - "ta": 648, - "ta": 1397, - "taa": 43874, - "tab": 2648, - "tab": 14724, - "tabby": 36145, - "tabern": 48991, - "tability": 15770, - "table": 12108, - "table": 2175, - "tableau": 39723, - "tables": 7822, - "tablet": 12494, - "tabletop": 46843, - "tabletop": 25773, - "tablets": 20436, - "tably": 24440, - "taboo": 38400, - "tabs": 29163, - "tac": 3145, - "tac": 22653, - "tache": 39239, - "tack": 6339, - "tack": 34446, - "tackle": 10294, - "tackled": 47218, - "tackles": 18021, - "tackling": 19628, - "taco": 31924, - "taco": 12436, - "tacoma": 25397, - "tacos": 14090, - "tactic": 40377, - "tactical": 17137, - "tactics": 16410, - "tacular": 48985, - "tad": 15890, - "tad": 19860, - "tado": 40846, - "tae": 15257, - "tae": 15580, - "taehyung": 24642, - "taek": 30753, - "taekwondo": 39963, - "taemin": 30600, - "taeyang": 45802, - "taeyeon": 27389, - "taf": 29660, - "taft": 42141, - "tag": 3456, - "tag": 3640, - "tage": 2669, - "tages": 39902, - "tagged": 12969, - "tagging": 25138, - "tagne": 47467, - "tags": 11606, - "tah": 14822, - "tah": 7090, - "tahit": 45385, - "tahoe": 26140, - "tai": 6511, - "tai": 13040, - "taiji": 30185, - "tail": 7156, - "tail": 4132, - "tailed": 20626, - "tailgate": 23168, - "tailgating": 42625, - "tailo": 27230, - "tailor": 29870, - "tailored": 28275, - "tailoring": 46357, - "tails": 16066, - "tain": 2841, - "tain": 1908, - "taine": 21214, - "taine": 32299, - "tained": 10212, - "taining": 7565, - "tainment": 30063, - "tains": 3952, - "tainted": 47211, - "taipei": 24356, - "tair": 29143, - "tairp": 43707, - "tait": 45325, - "taiwan": 36319, - "taiwan": 12626, - "taiwanese": 41416, - "taj": 28937, - "taj": 24805, - "taji": 46358, - "tak": 15070, - "tak": 14458, - "taka": 24070, - "taka": 40968, - "take": 5052, - "take": 1172, - "takeaway": 25737, - "takeaways": 32080, - "takeme": 41748, - "taken": 2807, - "takeoff": 32789, - "takeover": 11863, - "taker": 17939, - "takers": 30775, - "takes": 2633, - "takin": 30890, - "taking": 2019, - "taku": 48168, - "tal": 976, - "tal": 2066, - "tala": 29845, - "talaga": 35349, - "talbot": 30585, - "tale": 33971, - "tale": 7798, - "talent": 30435, - "talent": 5114, - "talented": 5331, - "talents": 16136, - "tales": 9469, - "tali": 12122, - "tali": 45406, - "taliban": 20788, - "talis": 36480, - "tality": 15631, - "talk": 12462, - "talk": 1841, - "talked": 10153, - "talkin": 26040, - "talking": 31463, - "talking": 2578, - "talks": 3237, - "tall": 11664, - "tall": 7771, - "talla": 21528, - "tallade": 44220, - "tallahassee": 37832, - "taller": 23470, - "tallest": 19774, - "tallinn": 45079, - "tally": 16323, - "talon": 47897, - "tam": 2661, - "tam": 12246, - "tama": 45424, - "tamanna": 48055, - "tamar": 22901, - "tamara": 35697, - "tame": 38557, - "tame": 32778, - "tamed": 40575, - "tami": 39429, - "tamil": 23046, - "tamil": 14033, - "tamilnadu": 32371, - "tamine": 42566, - "tammy": 28396, - "tampa": 10906, - "tampab": 37852, - "tamu": 34105, - "tan": 2123, - "tan": 5039, - "tana": 21396, - "tand": 20244, - "tandem": 33756, - "tane": 13344, - "tane": 24923, - "taneous": 22275, - "taneously": 24422, - "tang": 10425, - "tang": 20794, - "tanger": 31844, - "tangerine": 42045, - "tangible": 44823, - "tangle": 36568, - "tangled": 33587, - "tango": 24089, - "tani": 31374, - "tani": 32985, - "tania": 45369, - "tank": 29858, - "tank": 6172, - "tanker": 25020, - "tanks": 14223, - "tann": 19174, - "tanner": 22001, - "tanning": 27985, - "tans": 27332, - "tant": 41383, - "tant": 41695, - "tante": 48262, - "tanto": 45685, - "tany": 34410, - "tanya": 26800, - "tanz": 47399, - "tanzania": 15711, - "tao": 29084, - "tao": 18923, - "tap": 17923, - "tap": 7888, - "tapas": 27361, - "tape": 18332, - "tape": 5749, - "taped": 33219, - "tapes": 17903, - "tapestry": 33525, - "taping": 24355, - "tapp": 27644, - "tapp": 27764, - "tapped": 26649, - "tapping": 27882, - "tapro": 34415, - "taproom": 40266, - "taps": 23267, - "tar": 2002, - "tar": 6977, - "tara": 15264, - "tarak": 37813, - "taran": 32370, - "tarantino": 41180, - "tarde": 48670, - "tardis": 35410, - "tares": 34587, - "targe": 9620, - "target": 38556, - "target": 5400, - "targeted": 14968, - "targeting": 15818, - "targets": 12468, - "tari": 4238, - "tari": 38012, - "tarian": 11762, - "tarians": 42789, - "taries": 47291, - "tariff": 40220, - "tariffs": 28335, - "tariq": 42526, - "tarmac": 44294, - "taro": 26264, - "tarot": 23702, - "tart": 16707, - "tart": 14120, - "tartan": 35064, - "tarts": 29799, - "tary": 31729, - "tary": 5065, - "tarzan": 45463, - "tas": 6538, - "tas": 10163, - "tash": 35272, - "tasha": 44967, - "task": 39189, - "task": 10549, - "tasks": 19453, - "tasmania": 22429, - "tasmanian": 45102, - "tassel": 49276, - "tast": 10839, - "taste": 14314, - "taste": 5219, - "tasted": 22827, - "tasteof": 38097, - "taster": 29743, - "tastes": 13736, - "tastic": 21337, - "tasting": 7656, - "tastings": 49273, - "tasty": 43390, - "tasty": 8568, - "tat": 2652, - "tat": 21592, - "tata": 19300, - "tate": 44476, - "tate": 13295, - "tath": 27566, - "tati": 31433, - "tatiana": 48837, - "tation": 5280, - "tations": 32324, - "tator": 18791, - "tators": 37206, - "tats": 44557, - "tatt": 9232, - "tatted": 41605, - "tattoo": 15980, - "tattoo": 6325, - "tattooed": 28541, - "tattoos": 14900, - "tatum": 26103, - "tau": 6620, - "tau": 20510, - "taught": 9306, - "taun": 23910, - "taunton": 40681, - "taurus": 32881, - "taver": 37776, - "tavern": 18644, - "taw": 33868, - "taw": 40289, - "tawa": 29035, - "tawards": 14351, - "tax": 4581, - "tax": 3879, - "taxation": 36847, - "taxes": 11462, - "taxi": 25160, - "taxi": 11380, - "taxider": 47420, - "taxis": 34009, - "taxpay": 17986, - "taxpayer": 30978, - "taxpayers": 25503, - "tay": 6542, - "tay": 15073, - "taya": 38484, - "tayl": 3913, - "taylor": 9044, - "taylor": 3961, - "taylorswift": 18936, - "tayo": 33941, - "taz": 41475, - "taz": 31870, - "tb": 1990, - "tb": 7490, - "tba": 34363, - "tball": 8390, - "tball": 1467, - "tbc": 31807, - "tbd": 45548, - "tbh": 13238, - "tbi": 45868, - "tbl": 42962, - "tbli": 43664, - "tblightning": 44178, - "tbo": 34255, - "tbr": 46643, - "tbs": 37368, - "tbt": 2950, - "tc": 6820, - "tc": 5454, - "tca": 35116, - "tch": 10744, - "tch": 4048, - "tches": 42001, - "tcm": 21501, - "tcm": 26588, - "tcmparty": 24338, - "tcot": 8995, - "tcs": 39107, - "tcu": 26791, - "td": 20578, - "td": 3192, - "tdf": 21844, - "tdi": 45621, - "tdp": 47009, - "tds": 20238, - "tdsb": 29836, - "te": 600, - "te": 756, - "tea": 41053, - "tea": 3274, - "teach": 2043, - "teach": 6865, - "teacher": 18051, - "teacher": 4008, - "teachers": 5069, - "teaches": 17110, - "teaching": 5141, - "teachings": 32119, - "teal": 22821, - "team": 2085, - "team": 1027, - "teamcanada": 46636, - "teamed": 20590, - "teamgb": 40971, - "teaming": 24392, - "teammate": 17900, - "teammates": 13921, - "teams": 3891, - "teamsisd": 34703, - "teamusa": 28625, - "teamwork": 14657, - "teaparty": 33065, - "teapo": 35745, - "teapot": 40749, - "tear": 15802, - "tear": 11862, - "tearful": 46873, - "tearing": 24785, - "tears": 7688, - "teas": 23003, - "teas": 29314, - "tease": 25163, - "teased": 49122, - "teaser": 8982, - "teasers": 48990, - "teases": 28509, - "teasing": 36507, - "teat": 26376, - "teatime": 48948, - "teatro": 35756, - "teau": 24931, - "tebow": 37797, - "tec": 17381, - "tec": 11612, - "tech": 1782, - "tech": 2061, - "techcrunch": 42110, - "techn": 6252, - "technews": 31787, - "technic": 16639, - "technic": 37666, - "technical": 49231, - "technical": 7582, - "technically": 23180, - "technician": 22540, - "technicians": 35513, - "techno": 2599, - "techno": 17564, - "technological": 23068, - "technologies": 10040, - "technology": 3089, - "techs": 41353, - "ted": 4841, - "ted": 775, - "tedcruz": 27517, - "teddy": 25758, - "teddy": 11798, - "tedly": 8539, - "tedu": 42517, - "tedx": 17950, - "tedx": 41504, - "tee": 12676, - "tee": 3385, - "teed": 13692, - "teen": 5398, - "teen": 4697, - "teenage": 14069, - "teenager": 19338, - "teenagers": 25989, - "teenchoice": 28203, - "teens": 12375, - "teenth": 20249, - "teenwolf": 40067, - "teeny": 41622, - "teer": 48648, - "tees": 9641, - "teessi": 43295, - "teeth": 8225, - "tega": 29508, - "tegr": 39801, - "teh": 18720, - "teh": 29601, - "tehran": 26399, - "tein": 33223, - "tej": 46724, - "tek": 17489, - "tek": 18294, - "tekken": 29843, - "tel": 4978, - "tel": 2226, - "telang": 23469, - "telangana": 26386, - "tele": 3103, - "tele": 32851, - "telecom": 21057, - "telecommunications": 39900, - "telegram": 26780, - "telegraph": 14713, - "telephone": 17243, - "telescope": 19037, - "telethon": 49266, - "televised": 39470, - "television": 8608, - "telford": 38323, - "tell": 16069, - "tell": 2330, - "teller": 20415, - "tellers": 42707, - "telling": 5507, - "tells": 5217, - "tellu": 42511, - "telly": 31475, - "tels": 43607, - "telugu": 22927, - "tely": 5630, - "tem": 2404, - "tem": 17536, - "tema": 45881, - "teme": 43378, - "temp": 2684, - "temp": 11097, - "tempe": 36723, - "temper": 5981, - "temper": 35521, - "temperature": 9543, - "temperatures": 11575, - "tempered": 40521, - "tempest": 36053, - "templ": 16679, - "template": 18591, - "templates": 30498, - "temple": 21841, - "temple": 5620, - "temples": 24024, - "tempo": 19625, - "tempor": 4858, - "temporal": 43656, - "temporarily": 23189, - "temporary": 6513, - "temps": 11668, - "tempt": 28460, - "temptation": 30118, - "tempted": 26226, - "tempting": 34876, - "ten": 1149, - "ten": 2581, - "tenant": 16954, - "tenants": 26023, - "tenay": 45384, - "tenberg": 31329, - "tend": 17630, - "tend": 21252, - "tendency": 47277, - "tender": 23020, - "tender": 9838, - "tenderloin": 42750, - "tenders": 44741, - "tending": 35084, - "tendon": 48459, - "tends": 39962, - "tene": 24868, - "tened": 13682, - "tener": 29054, - "teneri": 28000, - "tenerife": 29401, - "teners": 41307, - "teness": 18018, - "teng": 34016, - "teng": 28474, - "tennant": 29310, - "tennes": 9514, - "tennessee": 10053, - "tennis": 31504, - "tennis": 5298, - "tenor": 30521, - "tens": 14062, - "tense": 23518, - "tension": 15221, - "tensions": 24224, - "tenstein": 49139, - "tent": 18505, - "tent": 10782, - "tentative": 48238, - "tenth": 27483, - "tention": 12191, - "tents": 30730, - "tenure": 30739, - "teo": 18665, - "tep": 31806, - "tequ": 17502, - "tequila": 18510, - "ter": 704, - "ter": 652, - "tera": 15155, - "teras": 44830, - "tere": 11329, - "tered": 49272, - "tered": 4389, - "terence": 33806, - "teresa": 19081, - "teri": 30917, - "teria": 22685, - "terie": 42276, - "tering": 7929, - "term": 40991, - "term": 4780, - "termin": 4766, - "terminal": 11816, - "terminals": 44091, - "terminator": 29609, - "terminology": 48896, - "terms": 8663, - "tern": 41572, - "tern": 12959, - "terns": 25251, - "tero": 20727, - "tero": 24697, - "terps": 41471, - "terr": 3921, - "terra": 22366, - "terra": 18816, - "terrac": 28549, - "terrace": 13820, - "terraces": 47508, - "terracotta": 45123, - "terrain": 20184, - "terran": 43726, - "terre": 33888, - "terre": 27537, - "terrell": 39494, - "terrence": 38746, - "terrestrial": 46299, - "terri": 4504, - "terri": 36722, - "terrible": 9741, - "terribly": 34558, - "terrier": 14455, - "terriers": 47047, - "terrific": 13837, - "terrified": 28204, - "terrifying": 18526, - "territ": 10720, - "territorial": 39163, - "territories": 32846, - "territory": 13936, - "terror": 9596, - "terror": 9327, - "terrori": 6836, - "terrorism": 10583, - "terrorist": 10575, - "terrorists": 12835, - "terry": 19378, - "terry": 8561, - "ters": 24102, - "ters": 1737, - "terti": 48386, - "tery": 4184, - "tes": 8019, - "tes": 3609, - "tesco": 15434, - "tese": 33320, - "tesla": 12254, - "tess": 21807, - "tess": 20840, - "tessa": 32063, - "test": 7738, - "test": 1628, - "testam": 23477, - "testament": 24609, - "tested": 10576, - "tester": 32707, - "testi": 18373, - "testic": 42364, - "testify": 33088, - "testifying": 46347, - "testim": 12553, - "testimonial": 28834, - "testimony": 18672, - "testing": 4967, - "testo": 42428, - "testosterone": 45168, - "tests": 8715, - "tet": 40468, - "tet": 13275, - "tetra": 40902, - "tetris": 45934, - "teu": 47152, - "teuk": 39979, - "teur": 27120, - "tex": 2056, - "tex": 11728, - "texan": 35287, - "texan": 38386, - "texans": 17580, - "texanscheer": 43717, - "texas": 15713, - "texas": 3403, - "texaste": 46469, - "text": 18169, - "text": 4160, - "textbook": 25952, - "textbooks": 44041, - "texted": 29004, - "textile": 19789, - "textiles": 24326, - "texting": 18600, - "texts": 12767, - "texture": 16505, - "textured": 32168, - "textures": 28063, - "tey": 32395, - "tez": 22664, - "tf": 18828, - "tf": 5001, - "tfc": 30186, - "tfl": 29918, - "tford": 22493, - "tful": 17108, - "tfw": 16741, - "tg": 7665, - "tg": 11981, - "tgif": 14483, - "th": 513, - "th": 640, - "tha": 18470, - "tha": 4715, - "thab": 38219, - "thad": 48339, - "thai": 28054, - "thai": 8825, - "thail": 7258, - "thailand": 7469, - "thak": 22801, - "thakur": 38427, - "thal": 7967, - "thal": 12323, - "thala": 17784, - "thalai": 25206, - "thalaivar": 44918, - "thalap": 39789, - "thalapathy": 45405, - "thalapathy": 23324, - "thall": 36007, - "tham": 11761, - "tham": 8896, - "thames": 43472, - "thames": 15321, - "than": 792, - "than": 1126, - "thand": 44465, - "thane": 21463, - "thang": 24870, - "thani": 31322, - "thank": 2790, - "thank": 1144, - "thanked": 32079, - "thankful": 38839, - "thankful": 6217, - "thankfully": 22089, - "thanking": 21989, - "thanks": 5672, - "thanks": 1085, - "thanksgiving": 45732, - "thanksgiving": 6167, - "thanku": 45710, - "thankyou": 18050, - "thankyou": 9911, - "thanniversary": 35564, - "thanos": 36709, - "thanx": 25095, - "thar": 14396, - "thar": 38843, - "thard": 43474, - "that": 6303, - "that": 682, - "thatcher": 32496, - "thats": 44636, - "thats": 9254, - "thaw": 26081, - "thaw": 47229, - "thbewithyou": 41067, - "thc": 20091, - "thcentury": 49111, - "thd": 28219, - "thday": 37801, - "the": 599, - "the": 518, - "thea": 15935, - "thea": 25429, - "thead": 25259, - "theal": 45728, - "thealth": 31398, - "thear": 43283, - "theart": 44678, - "theast": 8378, - "theastern": 17877, - "theat": 2263, - "theater": 39438, - "theater": 6128, - "theaters": 14689, - "theatre": 19857, - "theatre": 3292, - "theatres": 21680, - "theatrical": 26833, - "theband": 27695, - "thebeatles": 35645, - "thebest": 40883, - "thebest": 25856, - "thebig": 24732, - "theblack": 47718, - "thec": 48659, - "thed": 31405, - "thedaily": 33550, - "theday": 4408, - "thedream": 39417, - "thee": 44475, - "thee": 15108, - "theeconomist": 44518, - "theellenshow": 35342, - "thefilm": 31665, - "theflash": 25434, - "theforce": 40002, - "theforceawakens": 48033, - "theft": 13286, - "thefuture": 34287, - "thegame": 24428, - "thegood": 28594, - "thegreat": 28721, - "thei": 44522, - "their": 911, - "theirs": 29297, - "thel": 5403, - "thelast": 23495, - "thelastjedi": 47992, - "theless": 27712, - "theli": 15277, - "thelittle": 46872, - "thelo": 47036, - "thelove": 40668, - "thelove": 43200, - "them": 5435, - "them": 1180, - "themasters": 48378, - "theme": 38524, - "theme": 5849, - "themed": 10126, - "themes": 17849, - "themet": 48183, - "themovie": 27062, - "themselves": 6503, - "then": 5929, - "then": 1594, - "thenburg": 45209, - "thene": 17012, - "thenew": 24212, - "thenext": 47881, - "thenight": 43336, - "theno": 37172, - "thenorth": 34338, - "theo": 17043, - "theo": 18084, - "theod": 26653, - "theodore": 30743, - "theological": 41162, - "theology": 24095, - "theon": 34653, - "theone": 46231, - "theopen": 41438, - "theore": 22690, - "theoretical": 35585, - "theori": 34804, - "theories": 23937, - "theory": 7143, - "thepeople": 33597, - "thepersonal": 29981, - "thepersonalnetwork": 30016, - "thephoto": 18303, - "thephotohour": 18607, - "ther": 1160, - "ther": 743, - "therap": 4499, - "therapeu": 19332, - "therapeutic": 23240, - "therapeutics": 49101, - "therapies": 30179, - "therapist": 20608, - "therapists": 34763, - "therapper": 49340, - "therapy": 5257, - "there": 5283, - "there": 997, - "thereal": 8074, - "thereal": 41140, - "thereby": 43308, - "thered": 10208, - "therefore": 16865, - "theres": 18494, - "theresa": 14126, - "therese": 47996, - "theresistance": 22845, - "theri": 28967, - "theri": 45297, - "therine": 26807, - "therine": 9239, - "thering": 7891, - "therland": 25351, - "thermal": 13689, - "thermo": 22303, - "thermom": 31138, - "thermometer": 38172, - "thermost": 42391, - "thern": 10919, - "thern": 3137, - "thero": 13165, - "theroad": 29807, - "therock": 30036, - "theroy": 38146, - "thers": 1959, - "thes": 40556, - "thes": 6460, - "thescript": 47061, - "these": 40366, - "these": 1071, - "theses": 39388, - "thesimpsons": 45513, - "thesims": 34192, - "thesis": 10673, - "thessal": 41491, - "thessaloni": 41753, - "thest": 35343, - "thesun": 45617, - "theta": 27694, - "thetic": 7954, - "thetimes": 36039, - "thevamp": 33701, - "thevoice": 47206, - "thevoice": 30258, - "thewalkingdead": 18087, - "thewanted": 43008, - "theworld": 44988, - "theworld": 17475, - "thex": 35990, - "they": 15174, - "they": 889, - "theyre": 28266, - "thfc": 17729, - "thi": 2362, - "thi": 9111, - "thia": 17943, - "thiago": 44537, - "thian": 23214, - "thians": 28187, - "thibau": 48351, - "thic": 26107, - "thic": 11794, - "thick": 18417, - "thick": 11006, - "thicker": 43302, - "thickness": 40754, - "thief": 18508, - "thier": 25595, - "thierry": 32929, - "thieves": 17899, - "thigh": 47124, - "thigh": 22877, - "thighs": 30847, - "thik": 20512, - "thika": 44619, - "thill": 31266, - "thim": 42331, - "thin": 2178, - "thin": 7847, - "thine": 47192, - "thing": 7499, - "thing": 946, - "things": 30670, - "things": 1739, - "thingsto": 43924, - "thingy": 36888, - "think": 9820, - "think": 1331, - "thinkbig": 26015, - "thinkbigsundaywithmarsha": 26666, - "thinker": 34577, - "thinkers": 32779, - "thinkin": 34443, - "thinking": 3291, - "thinks": 6109, - "thinner": 47247, - "thir": 6030, - "third": 32102, - "third": 3981, - "thirds": 42582, - "thirst": 23563, - "thirsty": 39731, - "thirsty": 17521, - "thirteen": 34209, - "thirty": 20813, - "thiru": 43292, - "this": 4340, - "this": 589, - "thisday": 6532, - "thisdayin": 33641, - "thisdayinhistory": 46913, - "thisi": 7299, - "thisis": 14887, - "thismorning": 36245, - "thistle": 29039, - "thistory": 28904, - "thium": 21804, - "thletics": 17765, - "thm": 10407, - "thman": 30079, - "thms": 19874, - "thn": 44155, - "thn": 45587, - "thnx": 25480, - "tho": 1325, - "tho": 5025, - "thof": 18943, - "thofjuly": 21613, - "thol": 29319, - "thole": 31029, - "tholes": 42465, - "thology": 9881, - "thom": 2585, - "thom": 24094, - "thomas": 12574, - "thomas": 3888, - "thome": 21289, - "thomp": 37274, - "thompson": 42181, - "thompson": 8535, - "thomson": 24151, - "thon": 38776, - "thon": 8924, - "thong": 37058, - "thood": 15623, - "thor": 4130, - "thor": 13691, - "thora": 46866, - "thorn": 12957, - "thorn": 18466, - "thorne": 18025, - "thorns": 33650, - "thornton": 23592, - "thorough": 15294, - "thorough": 34788, - "thoroughbred": 43248, - "thoroughly": 19750, - "thorpe": 18099, - "thos": 41965, - "those": 1753, - "thot": 33736, - "thou": 1513, - "thou": 17781, - "though": 2846, - "thought": 23948, - "thought": 2449, - "thoughtful": 19592, - "thoughts": 3618, - "thour": 27125, - "thousand": 9344, - "thousands": 7089, - "thouse": 40318, - "thouse": 7819, - "thoven": 23078, - "thr": 1111, - "thr": 19138, - "thra": 17761, - "thra": 32797, - "thrash": 38262, - "thre": 1607, - "thread": 31108, - "thread": 8815, - "threads": 24957, - "threat": 7527, - "threat": 7212, - "threaten": 26097, - "threatened": 16391, - "threatening": 16400, - "threatens": 20555, - "threats": 12766, - "three": 21615, - "three": 2097, - "thren": 41776, - "thresh": 29779, - "threshold": 33791, - "threw": 12746, - "thri": 8713, - "thrift": 27779, - "thrill": 21023, - "thrilled": 7879, - "thriller": 9653, - "thrilling": 20101, - "thrills": 39829, - "thrive": 17669, - "thriving": 22677, - "thro": 2101, - "thro": 28624, - "throat": 16371, - "thrombo": 47585, - "throne": 15999, - "thrones": 8072, - "throp": 34939, - "throttle": 37139, - "through": 6091, - "through": 1417, - "throughout": 6721, - "throughs": 48278, - "throw": 3315, - "throw": 6293, - "throwback": 6001, - "throwback": 5058, - "throwbackthursday": 6326, - "thrower": 40199, - "throwing": 9734, - "thrown": 15079, - "throws": 14723, - "thru": 23856, - "thru": 6162, - "thrush": 46133, - "thrust": 40202, - "ths": 2079, - "tht": 23554, - "thu": 3837, - "thu": 14153, - "thub": 25660, - "thug": 37212, - "thug": 18137, - "thugs": 27686, - "thul": 28368, - "thulhu": 37560, - "thum": 14679, - "thumb": 19514, - "thumb": 18674, - "thumbnail": 32365, - "thumbs": 17599, - "thun": 32267, - "thunder": 6161, - "thunder": 8951, - "thunderbird": 45131, - "thunderbirds": 44286, - "thunderbolt": 43596, - "thunderstorm": 12005, - "thunderstorms": 19525, - "thunt": 46763, - "thur": 1837, - "thur": 21704, - "thurman": 41291, - "thurs": 9908, - "thursday": 11218, - "thursday": 2221, - "thursdaymotivation": 39375, - "thursdays": 21444, - "thursdaythoughts": 14866, - "thurst": 33970, - "thus": 12457, - "thusi": 9488, - "thwaite": 48469, - "thweeksary": 30871, - "thx": 5913, - "thy": 7804, - "thy": 3362, - "thyme": 29805, - "thyro": 25174, - "thyroid": 32558, - "ti": 555, - "ti": 2605, - "tia": 6709, - "tial": 2826, - "tially": 14503, - "tian": 23011, - "tian": 8125, - "tians": 35182, - "tiara": 38322, - "tib": 47868, - "tibet": 19927, - "tibet": 22234, - "tibetan": 24057, - "tible": 11453, - "tic": 890, - "tic": 1550, - "tica": 9669, - "tical": 34191, - "tical": 4342, - "tically": 13375, - "ticals": 30861, - "tice": 3122, - "tich": 48769, - "tician": 43358, - "ticism": 26491, - "tick": 24640, - "tick": 15617, - "ticket": 25740, - "ticket": 4500, - "ticketing": 44432, - "tickets": 2015, - "ticking": 35842, - "tickle": 42999, - "ticks": 40269, - "tico": 17670, - "ticon": 45996, - "tics": 2419, - "ticul": 15538, - "ticus": 44277, - "tid": 26002, - "tid": 23727, - "tidal": 21949, - "tide": 15698, - "tide": 9105, - "tides": 25524, - "tidy": 23858, - "tie": 14072, - "tie": 3422, - "tied": 9889, - "tiem": 34762, - "tien": 47538, - "tiene": 43438, - "tier": 14390, - "tier": 6598, - "tierney": 45693, - "tiers": 24604, - "ties": 25556, - "ties": 2499, - "tiest": 18300, - "tiesto": 46367, - "tif": 23216, - "tiff": 11112, - "tiff": 20699, - "tiffany": 30467, - "tiffany": 14446, - "tification": 43923, - "tified": 40854, - "tiful": 29123, - "tify": 6677, - "tig": 31999, - "tiger": 11954, - "tiger": 6531, - "tigers": 6934, - "tigh": 31365, - "tight": 25763, - "tight": 9123, - "tighten": 46653, - "tighter": 48193, - "tightly": 37568, - "tights": 29581, - "tijuana": 45273, - "tik": 24986, - "tik": 32403, - "tiki": 30107, - "til": 6124, - "til": 1763, - "tile": 26217, - "tile": 8227, - "tiles": 10607, - "tility": 38180, - "till": 17462, - "till": 4267, - "tilla": 26063, - "tillerson": 47738, - "tilly": 41199, - "tilt": 23601, - "tim": 1292, - "tim": 3863, - "timate": 4754, - "timb": 26627, - "timber": 14441, - "timber": 16246, - "timberlake": 28274, - "timbers": 39911, - "timberwolves": 41190, - "time": 3764, - "time": 788, - "timed": 32727, - "timehop": 19944, - "timel": 23549, - "timelapse": 48154, - "timeless": 15558, - "timeline": 11492, - "timely": 19250, - "timeout": 41536, - "timer": 19725, - "timers": 44574, - "times": 26445, - "times": 1661, - "timesnow": 45487, - "timesof": 32522, - "timesofindia": 44182, - "timetable": 31971, - "timeto": 29187, - "timing": 13624, - "timm": 22444, - "timmy": 33252, - "timo": 13390, - "timo": 33777, - "timothy": 42087, - "timothy": 18560, - "timp": 42166, - "tin": 1310, - "tin": 5420, - "tina": 9257, - "tinder": 24287, - "tine": 22341, - "ting": 7451, - "ting": 694, - "tinged": 44829, - "tings": 35332, - "tini": 26839, - "tink": 39278, - "tinker": 45272, - "tinker": 40910, - "tino": 20538, - "tins": 37359, - "tint": 40497, - "tinted": 42618, - "tiny": 21716, - "tiny": 5591, - "tio": 27562, - "tion": 2274, - "tion": 740, - "tional": 22460, - "tional": 2986, - "tionality": 24514, - "tionally": 12409, - "tionary": 8381, - "tione": 44318, - "tioned": 9083, - "tioning": 15528, - "tionist": 25732, - "tions": 1371, - "tious": 14255, - "tip": 15383, - "tip": 4623, - "tipoff": 44521, - "tipp": 32294, - "tipped": 31878, - "tipper": 38095, - "tipperary": 45612, - "tipping": 27827, - "tips": 3173, - "tipton": 48809, - "tiptuesday": 42112, - "tique": 37772, - "tir": 25467, - "tir": 38462, - "tire": 29128, - "tire": 9362, - "tired": 6533, - "tireless": 39835, - "tirelessly": 41548, - "tires": 15533, - "tiring": 42630, - "tiru": 36033, - "tis": 7839, - "tis": 7394, - "tise": 13745, - "tisgarh": 40538, - "tish": 45148, - "tish": 28784, - "tism": 27113, - "tiss": 28155, - "tissue": 15368, - "tissues": 32172, - "tist": 7902, - "tista": 25580, - "tists": 25944, - "tit": 1991, - "tit": 13202, - "tita": 40936, - "titan": 13496, - "titan": 15516, - "titanic": 20729, - "titanium": 24409, - "titans": 13066, - "titi": 17434, - "titi": 48504, - "title": 28033, - "title": 3644, - "titled": 9939, - "titles": 9780, - "tito": 26838, - "titus": 36102, - "tium": 21975, - "tiv": 1835, - "tiva": 41886, - "tive": 14640, - "tive": 1420, - "tively": 9883, - "tiveness": 20955, - "tives": 7570, - "tivity": 9859, - "tivo": 32162, - "tix": 5835, - "tiz": 19376, - "tj": 18890, - "tj": 18988, - "tk": 22344, - "tk": 20676, - "tko": 37347, - "tks": 38739, - "tl": 14325, - "tl": 8190, - "tland": 30697, - "tlap": 41976, - "tlc": 22047, - "tle": 39141, - "tle": 5825, - "tles": 39363, - "tless": 17427, - "tlot": 41080, - "tls": 47367, - "tly": 37483, - "tly": 1646, - "tm": 9430, - "tm": 7789, - "tman": 20796, - "tmc": 35263, - "tment": 26485, - "tml": 39445, - "tmltalk": 42260, - "tmnt": 32444, - "tmobile": 34901, - "tmr": 35906, - "tmrw": 16496, - "tms": 44496, - "tmund": 23801, - "tmw": 45827, - "tmz": 37248, - "tn": 3827, - "tn": 7248, - "tna": 21150, - "tnam": 8079, - "tner": 34922, - "tness": 35212, - "tney": 9523, - "tng": 35898, - "tnt": 20659, - "tnx": 38220, - "to": 580, - "to": 531, - "toa": 17916, - "toad": 26096, - "toast": 24654, - "toast": 10920, - "toasted": 23533, - "toaster": 39061, - "toasty": 44726, - "tob": 24260, - "tobac": 12611, - "tobacco": 13905, - "tobago": 39482, - "tobe": 17534, - "tobe": 28740, - "tober": 18162, - "tober": 2925, - "toberfest": 26249, - "tobi": 40335, - "tobi": 48374, - "tobias": 32464, - "tobin": 42466, - "toby": 29659, - "toby": 18333, - "toc": 41907, - "toc": 30643, - "tock": 25274, - "tod": 38239, - "tod": 33568, - "toda": 47141, - "todas": 36150, - "today": 11800, - "today": 721, - "todayin": 32957, - "todays": 13513, - "todayshow": 29739, - "todd": 10398, - "todd": 9951, - "toddler": 17772, - "toddlers": 36719, - "toddy": 38926, - "todo": 48857, - "todo": 23087, - "todos": 33355, - "toe": 47756, - "toe": 11344, - "toes": 16511, - "tof": 6659, - "toff": 27319, - "toffee": 34880, - "tofficial": 47953, - "tofthe": 23678, - "toftheday": 20566, - "tofu": 24692, - "tog": 45715, - "toge": 1903, - "together": 17858, - "together": 1952, - "togo": 26729, - "tography": 33968, - "toh": 26851, - "toi": 7472, - "toi": 26941, - "toid": 49124, - "toile": 43148, - "toilet": 11071, - "toilets": 24027, - "toire": 39534, - "tok": 16690, - "tok": 27010, - "token": 32634, - "token": 17134, - "tokens": 23562, - "tokyo": 35038, - "tokyo": 6667, - "tol": 4678, - "tol": 32962, - "told": 3527, - "tole": 15677, - "toledo": 19812, - "toler": 12150, - "tolerance": 20377, - "tolerant": 38536, - "tolerate": 35556, - "tolkien": 32989, - "toll": 44090, - "toll": 14155, - "tollywood": 42016, - "tology": 34799, - "tom": 999, - "tom": 2435, - "toma": 42360, - "toma": 44710, - "tomas": 35944, - "tomas": 27178, - "tomat": 12041, - "tomato": 9867, - "tomatoes": 13004, - "tomb": 37187, - "tomb": 15582, - "tombs": 48613, - "tombstone": 45729, - "tome": 24137, - "tome": 24283, - "tomi": 46290, - "tomlin": 46649, - "tomlinson": 17484, - "tommorow": 42871, - "tommy": 16573, - "tommy": 8876, - "tomo": 31223, - "tomo": 34434, - "tomor": 1277, - "tomorrow": 19728, - "tomorrow": 1293, - "tomorrowland": 34951, - "tomorrows": 32258, - "tomorrowspaper": 35005, - "tomorrowspaperstoday": 35190, - "tomp": 43544, - "tompkins": 49068, - "toms": 10545, - "tomy": 18730, - "ton": 838, - "ton": 917, - "tona": 13459, - "tone": 32366, - "tone": 8408, - "toned": 29426, - "toner": 40614, - "tones": 14744, - "tong": 21510, - "tonga": 37882, - "tongue": 44820, - "tongue": 13626, - "tongues": 39837, - "toni": 17766, - "toni": 17171, - "tonic": 17808, - "tonics": 34647, - "tonight": 1009, - "tonights": 23312, - "tonite": 13449, - "tonka": 42781, - "tonline": 45867, - "tonne": 42450, - "tonnes": 24813, - "tons": 7555, - "tony": 9150, - "tony": 4767, - "tonyawards": 46068, - "too": 1843, - "too": 1256, - "took": 2280, - "tool": 13718, - "tool": 5999, - "toolbox": 46599, - "toolkit": 29849, - "tools": 5771, - "toom": 27550, - "toon": 24664, - "toon": 19701, - "toonami": 48336, - "toons": 35345, - "toor": 42590, - "tooth": 15316, - "tooth": 12030, - "toothbrush": 36841, - "toothpaste": 37322, - "tooting": 42969, - "top": 5534, - "top": 1253, - "topaz": 46125, - "tope": 32149, - "tope": 42239, - "topeka": 46884, - "topia": 29618, - "topic": 8720, - "topical": 37464, - "topics": 11916, - "topless": 37415, - "topo": 23008, - "topoli": 30152, - "topp": 19529, - "topped": 12588, - "topper": 31780, - "toppers": 41651, - "topping": 21071, - "toppings": 47554, - "topps": 20201, - "tops": 8154, - "topshop": 40953, - "topus": 21495, - "tor": 937, - "tor": 1208, - "tora": 45147, - "torah": 37945, - "toral": 45282, - "torch": 31921, - "torch": 15820, - "tore": 38066, - "tore": 19385, - "tored": 38046, - "torg": 33214, - "tori": 17689, - "tori": 17539, - "toria": 23732, - "torial": 28029, - "torian": 48399, - "tories": 14193, - "torino": 29178, - "torio": 34235, - "torn": 8572, - "torn": 18023, - "tornad": 24676, - "tornado": 9062, - "tornadoes": 28254, - "toro": 17892, - "toron": 37407, - "toronto": 16866, - "toronto": 4514, - "torpe": 34093, - "torpedo": 46582, - "torquay": 45738, - "torque": 31940, - "torre": 39563, - "torre": 38009, - "torrent": 42317, - "torrential": 41158, - "torres": 16049, - "tors": 2546, - "tortilla": 32683, - "torto": 24170, - "tortoise": 30178, - "torture": 16013, - "tortured": 29900, - "tory": 29390, - "tory": 4214, - "tos": 6094, - "tosc": 37719, - "tose": 38154, - "tosh": 17109, - "toshi": 31744, - "toss": 19656, - "tossed": 31296, - "tot": 4618, - "tot": 23659, - "total": 13507, - "total": 4445, - "totally": 5440, - "totals": 25772, - "tote": 48145, - "tote": 19031, - "totem": 45376, - "totes": 37199, - "tothe": 12222, - "toto": 39823, - "tots": 24978, - "totten": 14360, - "tottenham": 14889, - "tou": 1879, - "tou": 29261, - "touch": 9480, - "touch": 4526, - "touchdown": 18664, - "touchdowns": 37905, - "touched": 13190, - "touches": 14832, - "touching": 14088, - "touchscreen": 39095, - "tough": 12063, - "tough": 5499, - "tougher": 33722, - "toughest": 23773, - "toughness": 45522, - "toulou": 27145, - "toulouse": 30267, - "tour": 2710, - "tour": 1760, - "tourde": 39247, - "toured": 27654, - "touri": 4224, - "touring": 11853, - "tourism": 23661, - "tourism": 6556, - "tourist": 12123, - "tourists": 15546, - "tournament": 4097, - "tournaments": 23058, - "tourney": 12603, - "tours": 8948, - "tous": 37424, - "tout": 22300, - "touts": 41274, - "tov": 28970, - "tow": 11557, - "tow": 18653, - "toward": 8508, - "towards": 4447, - "towed": 45419, - "towel": 15953, - "towels": 26578, - "tower": 26669, - "tower": 4730, - "towering": 39444, - "towers": 12701, - "towie": 44613, - "towin": 45819, - "towing": 36963, - "town": 4068, - "town": 1605, - "townfc": 33981, - "townhall": 33408, - "townhouse": 40178, - "towns": 14173, - "townsend": 26826, - "township": 14622, - "townsville": 47330, - "towork": 48233, - "tox": 7742, - "tox": 16145, - "toxic": 27436, - "toxic": 12348, - "toxicity": 41234, - "toxin": 48899, - "toxins": 36618, - "toy": 14387, - "toy": 5988, - "toya": 37602, - "toyo": 7644, - "toyota": 8908, - "toys": 39508, - "toys": 7162, - "tp": 23760, - "tp": 15188, - "tpp": 29411, - "tps": 35246, - "tq": 43066, - "tr": 635, - "tr": 6337, - "tra": 752, - "tra": 2483, - "trac": 2266, - "trace": 48611, - "trace": 14767, - "traced": 47956, - "traces": 30913, - "tracey": 25558, - "tracing": 27897, - "track": 10887, - "track": 2700, - "tracked": 27049, - "tracker": 18123, - "tracking": 10428, - "tracklist": 39777, - "tracks": 7579, - "tract": 4690, - "traction": 10644, - "tractor": 14607, - "tractors": 37854, - "tracy": 32984, - "tracy": 15508, - "trad": 48716, - "trad": 38037, - "trade": 10457, - "trade": 3629, - "traded": 18860, - "trademark": 25011, - "trader": 17700, - "traders": 19112, - "trades": 18519, - "trading": 40083, - "trading": 6520, - "tradio": 20689, - "tradition": 20838, - "tradition": 8784, - "traditional": 41113, - "traditional": 5604, - "traditionally": 35532, - "traditions": 18016, - "traf": 3227, - "trafal": 32461, - "trafalgar": 36969, - "traff": 31571, - "traffic": 12080, - "traffic": 3399, - "trafficking": 15983, - "trafford": 22912, - "trage": 12430, - "tragedy": 14082, - "tragic": 14828, - "tragically": 39599, - "trail": 11523, - "trail": 4921, - "trailblazer": 41015, - "trailblazers": 35954, - "trailer": 4700, - "trailers": 24862, - "trailing": 37427, - "trails": 10633, - "train": 9122, - "train": 3231, - "trained": 10874, - "trainee": 25795, - "trainees": 30382, - "trainer": 9767, - "trainers": 18871, - "training": 34508, - "training": 2199, - "trains": 9541, - "trait": 35160, - "traitor": 31760, - "traitors": 42633, - "traits": 25748, - "trajec": 42042, - "trak": 24065, - "tral": 14609, - "tram": 9800, - "tram": 17500, - "tramp": 46289, - "trampol": 32905, - "trampoline": 42800, - "tramrahim": 35220, - "tran": 1357, - "tran": 22031, - "trance": 30584, - "trance": 18671, - "trancefamily": 39630, - "trane": 35779, - "tranqu": 18912, - "tranquil": 35764, - "tranquility": 36688, - "trans": 1826, - "trans": 8126, - "transaction": 24881, - "transactions": 21653, - "transat": 37872, - "transatlantic": 40703, - "transc": 21073, - "transcend": 47087, - "transcript": 39008, - "transcription": 48765, - "transfer": 22659, - "transfer": 7134, - "transferred": 29700, - "transferring": 40924, - "transfers": 21621, - "transform": 8142, - "transform": 12288, - "transformation": 34204, - "transformation": 7832, - "transformational": 47135, - "transformationtuesday": 36511, - "transformative": 38106, - "transformed": 17453, - "transformer": 38235, - "transformers": 17843, - "transforming": 44470, - "transforming": 19251, - "transforms": 30312, - "transgender": 17732, - "transi": 32236, - "transit": 10174, - "transiti": 22939, - "transition": 11391, - "transitional": 41519, - "transitioning": 43586, - "transitions": 39374, - "transl": 12243, - "translate": 22655, - "translated": 20752, - "translates": 36334, - "translating": 42156, - "translation": 12153, - "translations": 41367, - "translator": 36230, - "translucent": 49052, - "transm": 18861, - "transmission": 16103, - "transmitted": 48605, - "transmitter": 40457, - "transp": 11726, - "transpa": 18524, - "transparen": 16108, - "transparency": 16828, - "transparent": 19017, - "transpl": 16038, - "transplant": 41871, - "transplant": 18771, - "transplantation": 45207, - "transpor": 19406, - "transport": 10231, - "transport": 7362, - "transportation": 10911, - "transported": 29089, - "transporter": 43568, - "transporting": 42259, - "trap": 36224, - "trap": 9677, - "trape": 42435, - "trapped": 15592, - "traps": 28517, - "tras": 30638, - "trash": 39215, - "trash": 9798, - "traum": 22263, - "trauma": 13846, - "traumati": 46613, - "traumatic": 29958, - "trav": 7586, - "trav": 46955, - "trave": 35357, - "travel": 2824, - "travel": 1949, - "travelblog": 35957, - "travelblogger": 25494, - "travelchat": 46455, - "traveled": 20384, - "traveler": 17794, - "travelers": 20644, - "travelgram": 40069, - "traveling": 9365, - "travelled": 23428, - "traveller": 22546, - "travellers": 29583, - "travelling": 11190, - "travelphotography": 22808, - "travelpics": 32293, - "travels": 11472, - "traveltips": 36260, - "traveltuesday": 16713, - "traverse": 35058, - "travi": 46971, - "travis": 27441, - "travis": 12287, - "traw": 42288, - "trax": 34421, - "tray": 38470, - "tray": 14621, - "trays": 39798, - "trc": 41803, - "tre": 975, - "tre": 6033, - "treach": 46005, - "tread": 26182, - "tread": 35658, - "treadmill": 37780, - "treas": 8591, - "treason": 28103, - "treasure": 9922, - "treasured": 48068, - "treasurer": 26985, - "treasures": 16500, - "treasury": 20956, - "treat": 3968, - "treat": 3901, - "treated": 9772, - "treating": 13842, - "treatment": 4869, - "treatments": 15839, - "treats": 8878, - "treaty": 19967, - "treble": 33194, - "trecht": 33812, - "tree": 13354, - "tree": 2677, - "treehouse": 42387, - "trees": 4682, - "trek": 13236, - "trek": 8136, - "trekking": 25293, - "trell": 35159, - "tremb": 44043, - "tremend": 14659, - "tremendous": 15988, - "tren": 2579, - "trench": 23846, - "trenches": 38723, - "trend": 19986, - "trend": 6643, - "trending": 6087, - "trends": 7015, - "trendsetter": 46666, - "trendy": 23072, - "trent": 45885, - "trent": 15548, - "trenton": 37470, - "tres": 23569, - "tress": 4733, - "tresses": 24273, - "trevor": 23437, - "trevor": 13219, - "trex": 42114, - "trey": 36670, - "trey": 16939, - "tri": 924, - "tri": 9618, - "triad": 45602, - "trial": 5991, - "trials": 10992, - "triangle": 14615, - "triathlon": 18080, - "trib": 45151, - "tribal": 16629, - "tribe": 19943, - "tribe": 11365, - "tribeca": 35184, - "tribes": 26546, - "tribu": 3028, - "tribun": 14311, - "tribunal": 32911, - "tribune": 18556, - "tribute": 5493, - "tributes": 15537, - "tric": 9511, - "tric": 4081, - "trich": 39519, - "trick": 17177, - "trick": 8172, - "tricks": 13177, - "tricky": 22319, - "trics": 31437, - "trident": 35491, - "tridge": 18722, - "tried": 4554, - "tries": 4315, - "trife": 48962, - "trigge": 30509, - "trigger": 16158, - "triggered": 30924, - "triggers": 37319, - "tright": 29915, - "tril": 40626, - "trill": 39297, - "trilli": 39350, - "trillion": 20160, - "trilo": 15183, - "trilogy": 16862, - "trim": 14182, - "trimmed": 40657, - "trin": 6628, - "trinidad": 26244, - "trinity": 30744, - "trinity": 12267, - "trio": 10263, - "trip": 23421, - "trip": 2529, - "tripad": 37189, - "tripadvisor": 38708, - "triple": 16519, - "triple": 7673, - "triplets": 48601, - "tripod": 36141, - "tripoli": 40095, - "trippin": 43073, - "tripping": 35229, - "trippy": 35137, - "trips": 12292, - "tris": 29690, - "trish": 40511, - "trish": 37179, - "trisha": 39152, - "tristan": 25497, - "trit": 37087, - "triton": 45437, - "triu": 14782, - "trium": 21065, - "triumph": 26507, - "triumph": 15307, - "triumphant": 41918, - "trivi": 21228, - "trivia": 10642, - "triviatuesday": 45499, - "trix": 41017, - "tro": 1046, - "tro": 3332, - "trock": 44368, - "trojan": 30653, - "trojans": 25310, - "trol": 10306, - "troll": 39737, - "troll": 17103, - "trolley": 25124, - "trolling": 28552, - "trolls": 20890, - "tromb": 32390, - "trombone": 44423, - "tron": 19057, - "tron": 10684, - "tronic": 34258, - "tronics": 34397, - "troom": 23691, - "troop": 12492, - "troop": 24054, - "trooper": 18327, - "troopers": 23576, - "troops": 10109, - "trop": 31585, - "trope": 41150, - "trophies": 20998, - "trophy": 42676, - "trophy": 6502, - "tropic": 21794, - "tropic": 36736, - "tropical": 41699, - "tropical": 8686, - "tropics": 36940, - "tros": 40456, - "trose": 36022, - "trot": 30453, - "trotter": 38287, - "trou": 5181, - "troubad": 49037, - "trouble": 25669, - "trouble": 7848, - "troubled": 25568, - "troubles": 27254, - "trough": 39761, - "troupe": 34803, - "trous": 19727, - "trousers": 23172, - "trout": 14853, - "trove": 45350, - "trow": 46914, - "troy": 26283, - "troy": 12819, - "trs": 24770, - "tru": 931, - "tru": 25326, - "truck": 14781, - "truck": 4629, - "trucker": 45918, - "truckers": 43404, - "trucking": 26208, - "trucks": 9569, - "trude": 39017, - "trudeau": 15752, - "true": 13096, - "true": 2328, - "truec": 37583, - "truelove": 45711, - "truffle": 23064, - "truffles": 37057, - "truly": 4545, - "trum": 11766, - "trum": 11399, - "truman": 29414, - "trump": 9124, - "trump": 1797, - "trumpet": 23681, - "trumpp": 45550, - "trumprussia": 39135, - "trumps": 29793, - "trumptrain": 43595, - "trun": 16163, - "trun": 46661, - "trunk": 18347, - "trunks": 38531, - "truro": 43507, - "truss": 46080, - "trust": 17691, - "trust": 3876, - "truste": 17356, - "trusted": 16538, - "trustee": 30803, - "trustees": 28853, - "trusting": 33221, - "trusts": 27507, - "trustworthy": 46840, - "trusty": 37955, - "truth": 21335, - "truth": 4319, - "truths": 27179, - "trx": 31620, - "try": 4487, - "try": 1209, - "tryin": 31085, - "trying": 2551, - "tryna": 15702, - "tryout": 43832, - "tryouts": 28053, - "ts": 2290, - "ts": 590, - "tsa": 25977, - "tsal": 20438, - "tsb": 45015, - "tsc": 37437, - "tsch": 38778, - "tsd": 20611, - "tse": 49144, - "tsfor": 42654, - "tsford": 32823, - "tsh": 42872, - "tshirt": 14907, - "tshirts": 29377, - "tsi": 40048, - "tsi": 37867, - "tsk": 43600, - "tsla": 35681, - "tsm": 43452, - "tsman": 20046, - "tsn": 44921, - "tsn": 26896, - "tson": 42353, - "tson": 47140, - "tsp": 34230, - "tsu": 13950, - "tsu": 20175, - "tsun": 19155, - "tsunami": 24286, - "tsville": 29080, - "tt": 971, - "tt": 1402, - "tta": 2646, - "ttc": 27668, - "tte": 23105, - "tte": 3070, - "tted": 15163, - "tten": 11351, - "tten": 17479, - "tter": 18691, - "tter": 5165, - "tters": 6318, - "ttes": 9293, - "tti": 5237, - "ttin": 36589, - "tting": 1188, - "ttino": 47389, - "ttip": 46993, - "ttle": 9253, - "ttm": 46838, - "tto": 8759, - "tto": 8105, - "tton": 10562, - "ttot": 12480, - "ttp": 30828, - "ttr": 47589, - "tts": 11570, - "ttt": 17256, - "tttt": 33119, - "ttu": 44006, - "ttv": 24281, - "tty": 11457, - "tty": 1856, - "tu": 764, - "tu": 5760, - "tua": 41344, - "tual": 4799, - "tuan": 37297, - "tub": 34907, - "tub": 15450, - "tube": 38229, - "tube": 3308, - "tuber": 30371, - "tuberculo": 42606, - "tuberculosis": 43129, - "tubes": 22870, - "tubing": 40794, - "tubs": 41705, - "tubular": 48786, - "tuc": 14456, - "tuc": 43871, - "tuck": 22398, - "tucked": 26923, - "tucker": 39703, - "tucker": 15726, - "tucket": 32677, - "tucson": 17250, - "tudor": 24547, - "tue": 17515, - "tues": 2283, - "tues": 12113, - "tuesday": 10209, - "tuesday": 2519, - "tuesdaymotivation": 25432, - "tuesdays": 23195, - "tuesdaythoughts": 17988, - "tuf": 44510, - "tuff": 38868, - "tug": 47032, - "tug": 27902, - "tuition": 21129, - "tuk": 39271, - "tuk": 14993, - "tul": 9069, - "tul": 40837, - "tula": 36332, - "tulane": 44893, - "tulip": 28389, - "tulips": 30886, - "tulsa": 18850, - "tum": 12932, - "tum": 8843, - "tumb": 8831, - "tumble": 38284, - "tumbler": 48790, - "tumbling": 46226, - "tumblr": 11841, - "tummy": 26053, - "tumor": 22616, - "tumors": 39894, - "tumour": 45129, - "tun": 1415, - "tun": 21349, - "tuna": 15037, - "tundra": 39899, - "tune": 11427, - "tune": 3300, - "tuned": 5898, - "tunein": 16809, - "tuner": 42905, - "tunes": 31688, - "tunes": 10810, - "tunesapp": 32550, - "tung": 47940, - "tung": 31092, - "tuni": 16270, - "tunic": 43495, - "tuning": 19585, - "tunisia": 23346, - "tunnel": 11096, - "tunnels": 29814, - "tuous": 28738, - "tup": 37956, - "tup": 4507, - "tupac": 31506, - "tups": 44855, - "tur": 985, - "tur": 17182, - "tura": 16127, - "tural": 45143, - "tural": 4261, - "turb": 18973, - "turban": 48515, - "turbine": 26880, - "turbines": 38863, - "turbo": 23578, - "turbo": 13668, - "turbul": 31100, - "turbulent": 47871, - "ture": 4321, - "ture": 941, - "tured": 3987, - "turer": 11993, - "turers": 16956, - "tures": 2400, - "turf": 36762, - "turf": 12510, - "turi": 11896, - "turin": 36251, - "turing": 5812, - "turismo": 30202, - "turk": 8254, - "turk": 32507, - "turkey": 35977, - "turkey": 4790, - "turkeys": 37991, - "turkish": 48199, - "turkish": 9278, - "turks": 34344, - "turmeric": 34044, - "turmoil": 37751, - "turn": 5522, - "turn": 2105, - "turnaround": 32719, - "turnbull": 27863, - "turned": 3771, - "turner": 42867, - "turner": 8777, - "turning": 4976, - "turno": 21377, - "turnout": 11654, - "turnover": 30794, - "turnpike": 38301, - "turns": 3185, - "turnt": 28887, - "turntable": 37953, - "turnup": 30591, - "turo": 29224, - "turquo": 19390, - "turquoise": 19899, - "turt": 13716, - "turtle": 35943, - "turtle": 10912, - "turtles": 17862, - "tus": 24828, - "tus": 7079, - "tusc": 17909, - "tuscal": 42638, - "tuscaloosa": 44375, - "tuscan": 42865, - "tuscany": 20885, - "tuss": 31741, - "tut": 35121, - "tutor": 10054, - "tutor": 27858, - "tutorial": 12857, - "tutorials": 30973, - "tutoring": 37532, - "tutti": 46880, - "tutu": 35845, - "tux": 28720, - "tux": 49186, - "tuxedo": 40173, - "tv": 3197, - "tv": 1583, - "tvc": 49190, - "tvd": 25889, - "tvmiaw": 38554, - "tvn": 44232, - "tvs": 27114, - "tvtime": 19947, - "tvxq": 43968, - "tw": 966, - "tw": 12842, - "twa": 46954, - "twain": 30689, - "twal": 48126, - "tware": 5707, - "twc": 41217, - "twd": 29440, - "twd": 19343, - "twdfamily": 38218, - "twe": 18365, - "tweak": 48870, - "tweaks": 42661, - "twee": 1330, - "tweed": 26904, - "tweeps": 14928, - "tweet": 11826, - "tweet": 1842, - "tweeta": 32024, - "tweetapicture": 40596, - "tweeted": 7841, - "tweeter": 32876, - "tweeters": 31713, - "tweeting": 8901, - "tweets": 3560, - "tweetyour": 45033, - "twel": 14476, - "twelf": 39443, - "twelfth": 44072, - "twell": 38722, - "twell": 30162, - "twelve": 19694, - "twent": 27027, - "twenti": 35167, - "twenty": 13016, - "twentyon": 39609, - "twentyonepilots": 40007, - "twer": 13923, - "twerk": 28506, - "twi": 5537, - "twice": 6970, - "twick": 34326, - "twickenham": 39619, - "twil": 12804, - "twili": 35754, - "twilight": 46366, - "twilight": 14512, - "twill": 43703, - "twin": 9342, - "twin": 6769, - "twine": 42775, - "twinkle": 36545, - "twinning": 30156, - "twinpeaks": 32042, - "twins": 8040, - "twist": 10589, - "twisted": 18233, - "twister": 45933, - "twists": 34149, - "twit": 1643, - "twit": 18704, - "twitart": 27709, - "twitch": 13251, - "twitch": 9153, - "twitter": 7546, - "twitter": 1989, - "twitterkurds": 32722, - "twitterstorians": 35389, - "two": 17211, - "two": 1237, - "twol": 31964, - "twood": 40404, - "twood": 13245, - "twp": 33283, - "twright": 46778, - "twt": 6825, - "twx": 26830, - "twy": 45861, - "tx": 6636, - "tx": 5200, - "txhsfb": 34757, - "txlege": 26995, - "txst": 40761, - "txt": 24595, - "txwx": 22995, - "ty": 1260, - "ty": 744, - "tya": 41273, - "tycoon": 36803, - "tye": 43097, - "tyfree": 41215, - "tyga": 41952, - "tying": 22559, - "tyl": 47537, - "tyler": 14787, - "tyler": 7058, - "tym": 45772, - "tyne": 27000, - "tyne": 29729, - "tyour": 16823, - "type": 15673, - "type": 3877, - "typed": 40753, - "typeface": 44969, - "types": 7543, - "typewriter": 42180, - "typho": 17486, - "typhoon": 21110, - "typic": 21648, - "typical": 9854, - "typically": 23175, - "typing": 20102, - "typo": 18831, - "typo": 29076, - "typography": 24332, - "tyr": 15590, - "tyran": 46921, - "tyranny": 35402, - "tyre": 38330, - "tyre": 16864, - "tyres": 21376, - "tyrone": 30226, - "tyson": 16616, - "tz": 7710, - "tz": 4983, - "tzer": 45267, - "tzky": 47127, - "tzman": 46032, - "tzu": 34354, - "té": 27208, - "té": 39694, - "u": 84, - "u": 340, - "ua": 34075, - "ua": 8441, - "uaap": 46753, - "uaap": 43774, - "uab": 35587, - "uae": 9752, - "ual": 1921, - "ually": 10767, - "uan": 33062, - "uas": 38339, - "uav": 30303, - "ub": 18430, - "ub": 13494, - "uba": 29768, - "ubc": 42479, - "ubc": 29455, - "ube": 30892, - "uber": 25896, - "uber": 10668, - "ubi": 26758, - "ubio": 32867, - "ubiquit": 48129, - "ubis": 28248, - "ubisoft": 32051, - "ubs": 43851, - "ubun": 28184, - "ubuntu": 30791, - "uc": 4903, - "uc": 12438, - "uca": 30942, - "ucc": 44844, - "ucc": 29138, - "ucci": 30746, - "uccino": 30409, - "ucd": 44746, - "ucd": 43514, - "ucf": 24414, - "uch": 19465, - "uch": 22394, - "uchi": 37473, - "uci": 46354, - "uci": 28925, - "uck": 34189, - "ucl": 12013, - "ucl": 13647, - "ucla": 37667, - "ucla": 17259, - "ucn": 49036, - "uconn": 30549, - "ud": 6560, - "ud": 5765, - "uda": 22800, - "udaipur": 49385, - "uddin": 43035, - "ude": 37016, - "ude": 35194, - "ue": 16696, - "ue": 1190, - "uefa": 19189, - "uel": 24231, - "uer": 45951, - "ues": 2526, - "uf": 17777, - "uf": 19230, - "ufc": 20396, - "ufc": 6490, - "uff": 45701, - "ufo": 19443, - "ufos": 48234, - "ug": 3754, - "ug": 16061, - "uga": 16056, - "ugand": 25965, - "uganda": 11125, - "ugandan": 44206, - "ugby": 30658, - "ugh": 39736, - "ugh": 12755, - "ugliest": 43543, - "ugly": 36070, - "ugly": 8159, - "ugu": 18144, - "uh": 17661, - "uh": 9219, - "uhc": 44974, - "uhh": 35938, - "uhhh": 45270, - "uhm": 35614, - "uhur": 29434, - "uhuru": 35690, - "ui": 17326, - "ui": 11458, - "uil": 29395, - "uit": 30696, - "uit": 47584, - "uj": 33266, - "uji": 39672, - "uk": 2294, - "uk": 1432, - "uka": 23294, - "uke": 48836, - "uke": 28577, - "uked": 48987, - "uki": 37435, - "uki": 9009, - "ukin": 34996, - "ukip": 20360, - "uklabour": 36902, - "ukmfg": 38764, - "uko": 33562, - "ukone": 24682, - "ukrain": 15468, - "ukraine": 7768, - "ukrainian": 16927, - "ukrunchat": 34481, - "uku": 29541, - "uku": 36082, - "ukulele": 39094, - "ul": 914, - "ul": 6625, - "ula": 34104, - "ula": 9506, - "ular": 4927, - "ulary": 21701, - "ulate": 20467, - "ulation": 32896, - "ule": 35616, - "ules": 26274, - "ulf": 49331, - "uli": 41841, - "uli": 22174, - "ull": 33254, - "ulla": 30577, - "ullah": 45310, - "ullivan": 45252, - "ulls": 37418, - "ulo": 46084, - "ulo": 36738, - "ulous": 42490, - "ulous": 4281, - "ulously": 20167, - "ulster": 29709, - "ulster": 24639, - "ult": 4380, - "ulti": 11925, - "ulties": 21884, - "ultimat": 16522, - "ultimate": 34684, - "ultimate": 5377, - "ultimatefan": 48372, - "ultimatefanlive": 48644, - "ultimately": 23023, - "ultr": 25636, - "ultra": 11398, - "ultra": 8118, - "ultram": 44519, - "ultrasound": 29717, - "ulture": 22272, - "ulty": 8036, - "ulu": 41815, - "ulu": 15659, - "ulum": 17235, - "uly": 33220, - "ulysses": 46114, - "um": 1622, - "um": 1008, - "uma": 29982, - "uma": 9256, - "uman": 27112, - "umar": 25656, - "umass": 39390, - "umatic": 45006, - "umb": 7493, - "umber": 19195, - "umbrel": 34773, - "umbrella": 17143, - "umbrellas": 42782, - "umbria": 39287, - "umc": 39491, - "umd": 42067, - "ume": 38480, - "umen": 42832, - "uments": 25924, - "umer": 23539, - "umes": 21403, - "umi": 48772, - "umi": 15458, - "umich": 41294, - "umin": 31542, - "umm": 26129, - "umm": 21215, - "ummer": 47628, - "ummm": 33665, - "umni": 31739, - "ump": 22224, - "umpire": 36214, - "ums": 8643, - "umu": 39788, - "un": 569, - "un": 2271, - "una": 6385, - "unable": 17793, - "unacceptable": 25234, - "unanim": 20800, - "unanimous": 33520, - "unanimously": 31798, - "unanswered": 43611, - "unarmed": 41541, - "unas": 41366, - "unavailable": 48430, - "unaware": 33347, - "unbeat": 37056, - "unbeatable": 40267, - "unbeaten": 19228, - "unbeliev": 11383, - "unbelievable": 13306, - "unbelievably": 33781, - "unborn": 37257, - "unboxing": 32866, - "unbreakable": 32956, - "unbroken": 49271, - "unc": 24921, - "unc": 15322, - "uncanny": 32556, - "uncertain": 30384, - "uncertainty": 23956, - "unch": 1527, - "unchanged": 34272, - "uncharted": 34560, - "unci": 25521, - "unciation": 34117, - "uncle": 31537, - "uncle": 8002, - "unclear": 32955, - "uncles": 45335, - "uncomfortable": 22470, - "uncommon": 34888, - "uncondition": 46561, - "unconditional": 31112, - "unconscious": 34791, - "unconstitutional": 43585, - "unconventional": 39440, - "uncover": 33031, - "uncovered": 28234, - "uncture": 38736, - "uncut": 41056, - "und": 9762, - "und": 9732, - "unda": 39932, - "undant": 25377, - "unday": 29338, - "unde": 45226, - "undead": 40105, - "undecided": 49368, - "undefeated": 15326, - "undeni": 38424, - "under": 1473, - "under": 1798, - "underage": 45669, - "underattack": 35075, - "undercover": 21595, - "underdog": 44266, - "undere": 21675, - "underestim": 23348, - "underestimate": 31794, - "undergo": 31545, - "undergoing": 26419, - "undergrad": 38331, - "undergraduate": 24320, - "underground": 9396, - "undering": 30826, - "underlying": 31812, - "undermine": 42839, - "underneath": 20857, - "underrated": 19494, - "unders": 20376, - "understand": 47582, - "understand": 4600, - "understanding": 7522, - "understands": 21607, - "understatement": 38296, - "understood": 17303, - "undertaker": 40144, - "undertaking": 49067, - "undertale": 48283, - "underthe": 41161, - "underwater": 14760, - "underway": 6273, - "underwear": 21154, - "underwood": 21474, - "underworld": 34760, - "undi": 23845, - "undisclosed": 39334, - "undo": 35454, - "undocumented": 35414, - "undoub": 38836, - "undoubtedly": 42204, - "undp": 26691, - "une": 4522, - "une": 10966, - "unearth": 32716, - "unearthed": 36632, - "unemp": 15139, - "unemployed": 32721, - "unemployment": 19350, - "unes": 6394, - "unesco": 16216, - "uneven": 43204, - "unex": 9484, - "unexpe": 10802, - "unexpec": 31829, - "unexpected": 12293, - "unexpectedly": 35622, - "unf": 29285, - "unfair": 22193, - "unfinished": 26526, - "unfit": 45367, - "unfold": 38681, - "unfollow": 38797, - "unfor": 14010, - "unforgettable": 16173, - "unfortun": 10194, - "unfortunate": 22361, - "unfortunately": 12863, - "unfpa": 45048, - "ung": 10439, - "ung": 4334, - "unga": 19151, - "ungsoo": 25582, - "unh": 25365, - "unhappy": 26528, - "unhcr": 43451, - "unhealthy": 30994, - "uni": 1107, - "uni": 5926, - "unic": 7648, - "unicef": 38286, - "unicef": 19259, - "unicorn": 15660, - "unicorns": 35183, - "unidenti": 33707, - "unidentified": 35563, - "unification": 45036, - "unified": 20876, - "uniform": 11075, - "uniforms": 17838, - "unil": 32388, - "unilever": 48654, - "uniof": 21218, - "union": 14210, - "union": 3503, - "unions": 18353, - "unis": 30482, - "unis": 39266, - "unisex": 27609, - "unison": 46694, - "unit": 28522, - "unit": 5695, - "unite": 15078, - "unite": 11305, - "uniteblue": 20935, - "united": 10898, - "united": 2690, - "unitedstates": 39636, - "unitedway": 47486, - "unites": 32061, - "uniting": 31318, - "units": 10394, - "unity": 38300, - "unity": 8581, - "univ": 36680, - "univ": 14896, - "univer": 15574, - "univers": 5855, - "universal": 19148, - "universal": 8754, - "universe": 6104, - "universi": 41692, - "universit": 26019, - "universities": 16408, - "university": 40728, - "university": 2182, - "universityof": 46158, - "unk": 5542, - "unknown": 8685, - "unl": 43807, - "unlawful": 42305, - "unle": 19677, - "unlea": 23893, - "unleash": 26706, - "unleashed": 27955, - "unless": 10602, - "unlike": 16694, - "unlikely": 18904, - "unlimited": 11015, - "unlock": 18649, - "unlocked": 16770, - "unlocking": 40810, - "unlucky": 35029, - "unlv": 42283, - "unmanned": 36751, - "unmatched": 46054, - "unn": 38364, - "unnamed": 44985, - "unnecessary": 24100, - "unner": 31481, - "unning": 43282, - "unnoticed": 42807, - "uno": 32446, - "uno": 17078, - "unofficial": 22506, - "unpacking": 43589, - "unpaid": 32811, - "unparalleled": 44396, - "unplugged": 31724, - "unpopular": 40232, - "unprece": 23054, - "unprecedented": 23344, - "unpredictable": 38684, - "unra": 45150, - "unreal": 46980, - "unreal": 15636, - "unrelated": 38644, - "unreleased": 29654, - "unrest": 36452, - "uns": 25908, - "unsafe": 32071, - "unsc": 36395, - "unseen": 19069, - "unsigned": 39346, - "unsolved": 40836, - "unsplash": 46196, - "unstable": 34730, - "unstopp": 22105, - "unstoppable": 23484, - "unsuccessful": 47478, - "unsung": 33015, - "unsure": 26396, - "unt": 19654, - "unt": 6537, - "until": 1942, - "untitled": 21309, - "unto": 19801, - "untold": 32206, - "untouch": 44509, - "untouched": 42764, - "unused": 29636, - "unusual": 12613, - "unusually": 36465, - "unve": 6685, - "unveil": 20483, - "unveiled": 13572, - "unveiling": 20327, - "unveils": 15057, - "unwanted": 25285, - "unwind": 34064, - "unya": 37142, - "uo": 30874, - "uo": 36162, - "uof": 11155, - "uoft": 37329, - "uon": 48144, - "uous": 40185, - "up": 1083, - "up": 705, - "upa": 31727, - "upbeat": 39201, - "upcoming": 4196, - "upcycled": 46552, - "upd": 3226, - "update": 2491, - "updated": 5974, - "updates": 4904, - "updating": 22792, - "uper": 38082, - "uper": 33056, - "upfront": 42064, - "upgrade": 10365, - "upgraded": 18577, - "upgrades": 21253, - "upgrading": 34368, - "uph": 14128, - "uphill": 42767, - "uphol": 26195, - "uphold": 43897, - "upholstery": 44556, - "upl": 41939, - "uplift": 45389, - "uplifting": 29546, - "upload": 13968, - "uploaded": 16793, - "uploading": 30145, - "upon": 23524, - "upon": 5067, - "upp": 19549, - "upp": 45946, - "upper": 22465, - "upper": 7067, - "upri": 15982, - "upright": 29818, - "uprising": 26006, - "upro": 28922, - "ups": 6926, - "upscale": 47501, - "upset": 11214, - "upsets": 42637, - "upside": 15362, - "upstairs": 21387, - "upstate": 33335, - "upstream": 45517, - "upthe": 31510, - "upto": 26575, - "upton": 31910, - "uptown": 23807, - "upward": 32526, - "upwards": 34915, - "uq": 39591, - "ur": 565, - "ur": 1775, - "ura": 29337, - "ura": 3544, - "urable": 40194, - "ural": 23547, - "ural": 33948, - "uran": 16197, - "uranium": 29850, - "urban": 7931, - "urban": 5800, - "urbanart": 40834, - "urd": 47880, - "urday": 19742, - "urdu": 29976, - "ure": 5514, - "ure": 726, - "ured": 4210, - "urer": 20864, - "ures": 2288, - "urg": 35995, - "urge": 14852, - "urged": 23790, - "urgency": 47612, - "urgent": 13693, - "urgently": 34534, - "urges": 16692, - "urging": 27748, - "uri": 11052, - "uri": 8699, - "urie": 46429, - "urin": 45245, - "urine": 28864, - "uring": 1351, - "url": 23464, - "urn": 38075, - "uro": 17343, - "uro": 5925, - "urology": 48585, - "urope": 14918, - "urs": 4794, - "urself": 31942, - "urst": 19181, - "urstruly": 34751, - "urstrulymahesh": 35314, - "ursula": 38390, - "urt": 24309, - "uru": 16322, - "uru": 11768, - "uruguay": 27931, - "urus": 14246, - "urve": 24583, - "ury": 8642, - "ury": 2106, - "us": 904, - "us": 718, - "usa": 9491, - "usa": 2547, - "usability": 46736, - "usable": 22890, - "usaf": 25017, - "usage": 19137, - "usaid": 34507, - "usair": 36742, - "usairforce": 42179, - "usarmy": 19132, - "usatoday": 40263, - "usav": 36056, - "usb": 10281, - "usc": 13346, - "usc": 14995, - "uscg": 43932, - "usd": 7485, - "usda": 25829, - "use": 4419, - "use": 1483, - "used": 32289, - "used": 2026, - "useful": 9784, - "useless": 20154, - "usemb": 39700, - "user": 21248, - "user": 7031, - "username": 28162, - "users": 7433, - "uses": 5282, - "useum": 45189, - "usf": 32385, - "usf": 28942, - "usgs": 35103, - "ush": 12001, - "ush": 18335, - "usher": 27411, - "ushi": 47734, - "usi": 25540, - "usic": 34909, - "usic": 16753, - "using": 1996, - "usky": 45778, - "usl": 42113, - "usm": 40041, - "usmc": 21678, - "usmnt": 30662, - "usn": 40579, - "usnavy": 24500, - "usnews": 43752, - "uso": 21539, - "usopen": 21782, - "usp": 26651, - "usps": 39980, - "usrc": 33274, - "uss": 11545, - "uss": 9260, - "ussia": 29553, - "ussoccer": 42828, - "ussr": 32697, - "ust": 35501, - "ust": 24725, - "usu": 4254, - "usu": 40434, - "usual": 6129, - "usually": 8296, - "usur": 45582, - "uswnt": 35255, - "ut": 1419, - "ut": 3641, - "uta": 42706, - "uta": 25925, - "utah": 27474, - "utah": 9312, - "utc": 18196, - "utd": 10493, - "ute": 16856, - "ute": 3130, - "uten": 32089, - "uter": 39197, - "utes": 2850, - "uth": 48819, - "uth": 44750, - "uti": 24568, - "util": 28824, - "utili": 17015, - "utilities": 27210, - "utility": 14941, - "utilize": 36861, - "utilized": 47604, - "utilizing": 40212, - "utm": 47853, - "utmost": 42352, - "uto": 18866, - "uto": 13683, - "utopia": 34433, - "utpol": 42605, - "utr": 48726, - "utrecht": 37216, - "uts": 11740, - "utsa": 37528, - "utt": 17096, - "uttar": 40168, - "uttarak": 33755, - "uttarakhand": 35655, - "utter": 18769, - "utter": 24558, - "utterly": 21353, - "utto": 42183, - "utv": 36351, - "utz": 45320, - "uu": 5702, - "uu": 14553, - "uuu": 44355, - "uuu": 27656, - "uuuu": 16720, - "uuuu": 40797, - "uv": 23777, - "uv": 15977, - "uva": 23908, - "uw": 13933, - "uw": 19166, - "uwe": 48785, - "uwu": 35544, - "ux": 9251, - "ux": 6213, - "uy": 31929, - "uy": 48113, - "uz": 19398, - "uz": 36991, - "uzbe": 43007, - "uzbekistan": 45024, - "uzzi": 48210, - "v": 85, - "v": 341, - "va": 4648, - "va": 1892, - "vaa": 37488, - "vable": 23088, - "vac": 3125, - "vac": 34085, - "vaca": 48215, - "vacancies": 26333, - "vacancy": 21247, - "vacant": 25262, - "vacation": 28336, - "vacation": 6561, - "vacations": 29002, - "vacay": 44716, - "vacc": 13342, - "vaccin": 19164, - "vaccinated": 48134, - "vaccination": 32518, - "vaccine": 47780, - "vaccine": 17493, - "vaccines": 25860, - "vach": 46211, - "vacu": 16058, - "vacuum": 18420, - "vad": 11880, - "vada": 46759, - "vader": 21908, - "vae": 39384, - "vag": 13015, - "vague": 42154, - "vah": 26921, - "vai": 26893, - "vai": 36802, - "vail": 21189, - "vain": 25538, - "vais": 28719, - "vaj": 34206, - "vak": 16288, - "vak": 41597, - "val": 1214, - "val": 1560, - "vala": 48525, - "valdez": 40617, - "vale": 35554, - "vale": 10820, - "valedic": 43525, - "valen": 12630, - "valence": 30225, - "valenci": 34183, - "valencia": 16559, - "valent": 3655, - "valent": 15300, - "valentin": 48631, - "valentina": 43741, - "valentine": 11208, - "valentine": 5876, - "valentines": 10259, - "valentinesday": 12369, - "valentino": 29624, - "valeri": 31951, - "valerie": 25592, - "valet": 45749, - "vali": 8230, - "valiant": 33804, - "valid": 15126, - "validation": 32536, - "valkyrie": 42326, - "vall": 23523, - "vall": 35295, - "vallarta": 47874, - "valle": 24857, - "valle": 29105, - "valley": 18354, - "valley": 3136, - "valleys": 28649, - "valor": 30930, - "vals": 7431, - "valu": 6291, - "valuable": 10056, - "valuation": 25894, - "value": 41358, - "value": 4602, - "valued": 17801, - "values": 8857, - "valve": 17001, - "valves": 33517, - "vam": 9983, - "vamo": 46718, - "vamos": 30346, - "vamp": 10680, - "vampi": 47017, - "vampire": 47576, - "vampire": 13220, - "vampires": 30868, - "vamps": 44810, - "van": 2446, - "van": 2451, - "vana": 20543, - "vanc": 6320, - "vance": 31447, - "vancou": 6750, - "vancouver": 31904, - "vancouver": 7208, - "vand": 11691, - "vandalism": 45664, - "vander": 16264, - "vanderbilt": 33524, - "vandy": 39268, - "vane": 43828, - "vaness": 13328, - "vanessa": 16836, - "vangogh": 47849, - "vanguard": 27916, - "vani": 15396, - "vani": 26459, - "vania": 10998, - "vanilla": 11974, - "vanished": 43783, - "vanishing": 48296, - "vanity": 48353, - "vanity": 22938, - "vans": 11711, - "vant": 26298, - "vantage": 31749, - "vanu": 42892, - "vanuatu": 48766, - "vap": 10462, - "vape": 25423, - "vape": 20219, - "vaping": 29403, - "vapor": 37167, - "vapor": 30729, - "vapori": 46183, - "var": 3187, - "var": 12998, - "vara": 47492, - "varan": 36585, - "varanasi": 39364, - "vard": 21866, - "vard": 8773, - "vardy": 47371, - "vare": 38159, - "vares": 42895, - "vargas": 32752, - "vari": 3354, - "variable": 26416, - "varian": 34334, - "variant": 20293, - "variants": 38312, - "variation": 26420, - "variations": 29025, - "varied": 32334, - "varies": 32543, - "varieties": 23805, - "variety": 8396, - "various": 7395, - "varsity": 43716, - "varsity": 8574, - "varun": 48120, - "varun": 22069, - "vary": 18855, - "varying": 36456, - "vas": 5669, - "vas": 5995, - "vasc": 40995, - "vascular": 19218, - "vase": 20431, - "vasi": 49092, - "vast": 24413, - "vast": 16414, - "vastly": 48257, - "vat": 11588, - "vat": 18363, - "vatican": 21030, - "vation": 37884, - "vau": 6391, - "vaugh": 25158, - "vaughan": 21392, - "vaughn": 29013, - "vaul": 27469, - "vault": 15240, - "vaus": 40217, - "vaux": 27403, - "vauxhall": 29173, - "vaw": 47952, - "vay": 48000, - "vaz": 38142, - "vb": 29365, - "vb": 8778, - "vball": 38329, - "vc": 28670, - "vc": 7952, - "vcs": 43528, - "vcu": 40102, - "vd": 9515, - "vday": 42055, - "ve": 673, - "ve": 563, - "vea": 43798, - "veal": 36616, - "veau": 24419, - "vec": 19912, - "vector": 40453, - "vector": 21533, - "ved": 19515, - "ved": 1102, - "veda": 44401, - "vedere": 45660, - "vedi": 47971, - "vee": 35708, - "vee": 17073, - "veen": 22432, - "veer": 21243, - "veer": 22058, - "veg": 9048, - "veg": 16460, - "vega": 22930, - "vegan": 15705, - "vegan": 5615, - "vegans": 48514, - "vegas": 20288, - "vegas": 4413, - "vege": 6219, - "vegetable": 15725, - "vegetables": 14119, - "vegetarian": 14600, - "vegetation": 33947, - "veggie": 19401, - "veggies": 16767, - "vehic": 3973, - "vehicle": 5299, - "vehicles": 8361, - "veil": 23516, - "vein": 29169, - "veins": 28867, - "veit": 30620, - "vel": 942, - "vel": 1287, - "vela": 34898, - "veld": 34011, - "veled": 15370, - "veli": 49166, - "veling": 37970, - "vell": 21173, - "vell": 32997, - "velo": 14357, - "velo": 33850, - "velocity": 23811, - "vels": 5109, - "velve": 37849, - "velvet": 11063, - "vely": 1708, - "vember": 3477, - "vement": 3129, - "vements": 11104, - "ven": 1240, - "ven": 1638, - "vena": 47442, - "vend": 10851, - "vending": 29202, - "vendor": 21261, - "vendors": 20353, - "vene": 5365, - "veness": 10516, - "venetian": 34336, - "venezia": 34139, - "venezu": 10939, - "venezuela": 12839, - "venezuelan": 34699, - "veng": 31526, - "venge": 27757, - "vengeance": 32057, - "veni": 31142, - "venice": 11010, - "vening": 47532, - "venison": 40037, - "venom": 42491, - "venom": 21588, - "vens": 20884, - "vent": 4373, - "vent": 5687, - "ventil": 39522, - "ventilation": 35066, - "venting": 15731, - "vention": 4122, - "vents": 12833, - "ventu": 48217, - "ventura": 20921, - "venture": 37046, - "venture": 12543, - "ventures": 20829, - "venue": 5097, - "venues": 18120, - "venus": 14691, - "ver": 624, - "ver": 667, - "vera": 13350, - "verage": 3725, - "verb": 34952, - "verbal": 26522, - "verbally": 39985, - "verbs": 45687, - "verde": 16935, - "verdi": 42306, - "verdict": 18030, - "vere": 11135, - "vere": 34707, - "vered": 2868, - "verge": 23913, - "veri": 11638, - "verification": 33521, - "verified": 22555, - "verify": 34722, - "vering": 4630, - "veriz": 19707, - "verizon": 21532, - "verma": 41261, - "vermont": 19241, - "vern": 2214, - "vern": 12586, - "verne": 45553, - "vernon": 18348, - "vero": 45217, - "vero": 38208, - "verona": 31819, - "veronic": 39551, - "veronica": 24039, - "vers": 1219, - "vers": 2094, - "versa": 35765, - "versace": 25422, - "versail": 29857, - "versailles": 32129, - "versary": 2940, - "versatile": 18110, - "versatility": 41340, - "verse": 39466, - "verse": 3131, - "verses": 30769, - "versi": 8934, - "version": 3273, - "versions": 16190, - "versity": 1906, - "verst": 42484, - "verstappen": 45064, - "versus": 14548, - "versy": 18522, - "vert": 11742, - "verte": 35158, - "verted": 48173, - "verti": 30459, - "vertical": 14293, - "vertigo": 42477, - "verton": 40632, - "verts": 37265, - "very": 11698, - "very": 1070, - "veryday": 37944, - "verything": 45174, - "ves": 9616, - "ves": 1003, - "vesmatter": 47636, - "vespa": 46029, - "vessel": 16387, - "vessels": 22822, - "vest": 31657, - "vest": 12473, - "vesti": 40349, - "vests": 41906, - "vet": 12294, - "vet": 5951, - "veter": 4330, - "veteran": 20797, - "veteran": 8814, - "veterans": 7092, - "veteransday": 26409, - "veterin": 43959, - "veterinary": 25458, - "veto": 36570, - "vets": 13113, - "vette": 17045, - "vettel": 28700, - "vevo": 35141, - "vex": 36187, - "vex": 43978, - "vey": 34792, - "vey": 3884, - "vez": 35987, - "vez": 17226, - "vf": 25966, - "vfl": 33726, - "vfx": 30149, - "vg": 40591, - "vg": 22346, - "vh": 46953, - "vh": 23847, - "vhs": 21932, - "vi": 603, - "vi": 4259, - "via": 1048, - "viable": 25752, - "viadu": 37012, - "viaduct": 39113, - "vial": 39951, - "vian": 40487, - "vian": 16124, - "vibe": 37974, - "vibe": 12813, - "vibes": 7764, - "vibr": 9527, - "vibrant": 14270, - "vibration": 37456, - "vibrations": 43660, - "vic": 1555, - "vic": 4412, - "vica": 46168, - "vicar": 43899, - "vice": 43572, - "vice": 6931, - "vicente": 39411, - "vices": 8332, - "vich": 24143, - "vici": 46670, - "vicious": 25177, - "vick": 15116, - "vick": 29704, - "vickers": 48452, - "vicki": 34927, - "vicky": 37176, - "vicky": 25788, - "victi": 6861, - "victim": 9133, - "victims": 7131, - "victor": 2423, - "victor": 10690, - "victori": 17555, - "victoria": 39286, - "victoria": 6127, - "victorian": 12350, - "victorias": 47791, - "victories": 24577, - "victorious": 24033, - "victory": 36668, - "victory": 4127, - "vid": 17233, - "vid": 9284, - "vida": 19015, - "vidal": 36678, - "vide": 1334, - "vide": 45244, - "video": 9478, - "video": 1455, - "videogame": 35097, - "videogames": 21149, - "videos": 6081, - "vids": 23035, - "vidy": 29639, - "vidya": 45264, - "vie": 922, - "vie": 8538, - "vien": 36493, - "vienna": 12670, - "vier": 15352, - "vier": 11987, - "viera": 21114, - "viernes": 33826, - "vies": 22458, - "viest": 31979, - "viet": 17558, - "viet": 13128, - "vietnam": 19558, - "vietnam": 8623, - "vietnamese": 22382, - "view": 12004, - "view": 1093, - "viewed": 7226, - "viewer": 15061, - "viewers": 14275, - "viewing": 7124, - "viewpoint": 41604, - "views": 2758, - "vig": 8549, - "vig": 45083, - "vigil": 21538, - "vigil": 19896, - "vigilant": 43026, - "vigne": 40447, - "vigne": 34581, - "vigo": 44097, - "vigor": 26781, - "vii": 17759, - "viii": 20414, - "vijay": 12014, - "vijay": 10823, - "vijaysethu": 47966, - "vik": 10764, - "vik": 17181, - "vika": 39562, - "vikas": 37116, - "viking": 26663, - "viking": 15897, - "vikings": 11713, - "vikram": 41136, - "vikram": 24314, - "viktor": 36101, - "vil": 1338, - "vil": 3000, - "vila": 37505, - "vile": 27247, - "vill": 10481, - "vill": 45698, - "villa": 3203, - "villa": 7754, - "village": 34584, - "village": 4331, - "villagers": 34283, - "villages": 17621, - "villain": 15425, - "villains": 25271, - "villanova": 44025, - "villar": 35164, - "villas": 28907, - "ville": 11110, - "ville": 1930, - "villen": 46177, - "villi": 36907, - "vimeo": 48720, - "vin": 1379, - "vin": 2558, - "vina": 35682, - "vinai": 37396, - "vinaigrette": 39876, - "vinay": 43952, - "vince": 32429, - "vince": 6236, - "vincen": 33402, - "vincent": 29069, - "vincent": 10357, - "vinci": 30199, - "vind": 20275, - "vindic": 39582, - "vine": 8471, - "vine": 7721, - "vinegar": 23834, - "vines": 21268, - "vineyard": 16527, - "vineyards": 23082, - "ving": 5375, - "ving": 903, - "vingne": 42579, - "vings": 22510, - "vini": 48119, - "vinnie": 40885, - "vinny": 36794, - "vino": 14509, - "vinod": 43348, - "vins": 34820, - "vinson": 45945, - "vintag": 10936, - "vintage": 13654, - "vintage": 3266, - "viny": 40990, - "vinyl": 22835, - "vinyl": 5754, - "vio": 11913, - "vio": 20324, - "viol": 3164, - "viola": 27438, - "violate": 44875, - "violated": 38192, - "violating": 37554, - "violation": 22919, - "violations": 21969, - "violence": 5450, - "violent": 11565, - "violently": 47758, - "violet": 16118, - "violets": 42861, - "violin": 17058, - "violinist": 36299, - "vion": 35496, - "vious": 6418, - "viously": 7149, - "vip": 45714, - "vip": 7111, - "viper": 27401, - "vips": 41149, - "vir": 1790, - "vir": 25319, - "vira": 35910, - "viral": 11653, - "virat": 32473, - "virgil": 39076, - "virgin": 5651, - "virgin": 12103, - "virgini": 43426, - "virginia": 6728, - "virgo": 39978, - "viro": 32301, - "viron": 38309, - "virtu": 7977, - "virtual": 18059, - "virtual": 7790, - "virtually": 22475, - "virtualreality": 32608, - "virtue": 26860, - "virtues": 42167, - "virtuoso": 47027, - "virus": 11808, - "viruses": 34830, - "vis": 1301, - "vis": 5337, - "visa": 12802, - "visas": 41228, - "vise": 24977, - "vised": 14810, - "vish": 12024, - "vish": 29124, - "vishal": 33648, - "vishnu": 37816, - "visi": 1409, - "visibility": 15921, - "visible": 36658, - "visible": 8626, - "vising": 37439, - "vision": 11147, - "vision": 2515, - "visional": 24627, - "visionary": 22959, - "visions": 13804, - "visit": 3388, - "visit": 1600, - "visitation": 44370, - "visited": 5580, - "visiting": 4680, - "visitor": 13881, - "visitors": 9160, - "visits": 8489, - "visitscotland": 28760, - "visitspain": 48860, - "vism": 15514, - "viso": 46732, - "visor": 24217, - "vist": 21436, - "vista": 13865, - "visu": 7739, - "visual": 17004, - "visual": 7195, - "visualization": 28500, - "visualize": 45057, - "visually": 25743, - "visuals": 21315, - "viswas": 36513, - "viswasam": 47664, - "vit": 4056, - "vit": 35580, - "vita": 15700, - "vital": 32525, - "vital": 10585, - "vitality": 36385, - "vitam": 9856, - "vitamin": 13675, - "vitamins": 22582, - "vito": 36725, - "vity": 4893, - "vitz": 26188, - "vius": 41571, - "viv": 21827, - "viv": 35363, - "viva": 17399, - "vival": 35920, - "vive": 18980, - "vive": 24004, - "vivek": 36243, - "vivi": 11625, - "vivian": 30129, - "vivid": 22984, - "vivo": 28091, - "vivo": 25888, - "vix": 28976, - "vix": 34811, - "vixen": 38757, - "vixx": 32106, - "viz": 28251, - "viz": 31786, - "vj": 45439, - "vj": 30827, - "vk": 41893, - "vl": 37580, - "vl": 36442, - "vla": 23686, - "vlad": 41089, - "vladi": 19320, - "vladimir": 21702, - "vlive": 46797, - "vlog": 18894, - "vm": 16204, - "vm": 20269, - "vma": 35666, - "vmas": 30236, - "vmware": 29615, - "vn": 47098, - "vn": 25076, - "vo": 947, - "vo": 3951, - "voc": 4105, - "voc": 20855, - "vocab": 21346, - "vocabulary": 23804, - "vocal": 34037, - "vocal": 13147, - "vocali": 19134, - "vocalist": 22102, - "vocals": 17666, - "vocation": 20521, - "vocational": 33751, - "vod": 11820, - "vod": 35854, - "vodaf": 28436, - "vodafone": 38695, - "vodka": 13646, - "vogel": 44960, - "vogue": 24418, - "vogue": 13178, - "voic": 29185, - "voice": 13179, - "voice": 3386, - "voiced": 34352, - "voiceof": 44966, - "voiceover": 41979, - "voices": 9144, - "void": 21561, - "voip": 42762, - "voir": 16036, - "vol": 1343, - "vol": 7945, - "volatile": 41022, - "volatility": 32355, - "volcan": 9916, - "volcanic": 24072, - "volcano": 14581, - "volcanoes": 38055, - "voli": 40138, - "volk": 13432, - "volkswag": 14407, - "volkswagen": 15342, - "volley": 7130, - "volley": 34656, - "volleyball": 7458, - "volo": 44791, - "vols": 20404, - "volt": 26430, - "volta": 29879, - "volta": 33480, - "voltage": 23118, - "voltron": 39314, - "volu": 3563, - "volume": 8284, - "volumes": 22651, - "volun": 3356, - "voluntar": 48823, - "voluntary": 23815, - "volunte": 3556, - "volunteer": 32331, - "volunteer": 7114, - "volunteered": 34000, - "volunteering": 14902, - "volunteers": 5939, - "volution": 24043, - "volved": 42888, - "volvo": 39991, - "volvo": 16906, - "vom": 24198, - "vomit": 46485, - "von": 11269, - "von": 8497, - "voo": 19497, - "voodoo": 26869, - "voor": 34291, - "voor": 34464, - "vor": 8338, - "vor": 5308, - "vore": 18215, - "vortex": 30071, - "vos": 16863, - "vot": 48558, - "vote": 6830, - "vote": 2187, - "voted": 6454, - "votel": 41379, - "voter": 44474, - "voter": 14065, - "voters": 8925, - "votes": 6693, - "voting": 5756, - "vou": 11045, - "voucher": 18190, - "vouchers": 23384, - "vous": 10636, - "vow": 34787, - "vows": 21677, - "vox": 29215, - "vox": 22692, - "voy": 10622, - "voy": 15021, - "voyage": 16299, - "voyager": 29669, - "vp": 32758, - "vp": 3896, - "vpn": 38212, - "vr": 16840, - "vr": 5921, - "vre": 44500, - "vre": 17501, - "vs": 11385, - "vs": 1547, - "vsco": 26752, - "vsco": 32822, - "vscocam": 34694, - "vsky": 37791, - "vss": 31919, - "vt": 31732, - "vt": 10291, - "vu": 8664, - "vu": 13230, - "vue": 43915, - "vue": 19313, - "vuel": 31312, - "vuelta": 43856, - "vuitton": 26705, - "vul": 6856, - "vulcan": 34767, - "vulner": 11213, - "vulnerability": 28797, - "vulnerable": 14332, - "vulture": 34593, - "vultures": 47197, - "vv": 19264, - "vv": 35686, - "vw": 28650, - "vw": 13250, - "vx": 47644, - "vy": 11566, - "vy": 5157, - "w": 86, - "w": 342, - "wa": 869, - "wa": 2663, - "waa": 35874, - "wab": 19893, - "wab": 36852, - "wac": 27445, - "wac": 37947, - "wack": 22880, - "wack": 38270, - "wacky": 34318, - "waco": 36035, - "wad": 11133, - "wad": 30451, - "wada": 40006, - "wade": 40237, - "wade": 14180, - "wadi": 37253, - "waf": 17638, - "wafc": 49086, - "waff": 13940, - "waffle": 20375, - "waffles": 24205, - "wag": 5764, - "wag": 19177, - "wage": 10716, - "wager": 43430, - "wages": 19114, - "wagner": 18081, - "wagon": 13260, - "wagons": 47944, - "wags": 48580, - "wah": 24812, - "wah": 18014, - "wahl": 27500, - "wahlberg": 35151, - "wahoo": 47995, - "wai": 11469, - "wai": 21569, - "waifu": 46551, - "waikiki": 44907, - "wain": 28358, - "wain": 20120, - "wainwright": 45878, - "waist": 36946, - "waist": 18459, - "wait": 10021, - "wait": 1885, - "waite": 24272, - "waited": 18492, - "waiter": 32946, - "waitin": 44482, - "waiting": 2680, - "waitress": 39760, - "waitrose": 37164, - "waits": 21361, - "waiver": 42866, - "waj": 49367, - "wak": 11172, - "wak": 36015, - "waka": 42696, - "wake": 10501, - "wake": 5731, - "wakefield": 26358, - "wakes": 29108, - "wakeup": 26328, - "wakeup": 35380, - "wakeupamerica": 37474, - "waking": 13025, - "wal": 1056, - "wal": 6903, - "wala": 16468, - "walang": 49180, - "walcott": 45744, - "wald": 46930, - "wald": 15724, - "walden": 39311, - "waldo": 32440, - "waldorf": 38227, - "wale": 41247, - "wale": 20336, - "wales": 25383, - "wales": 5110, - "walgreens": 38490, - "wali": 37576, - "wali": 14768, - "walia": 44455, - "walk": 8588, - "walk": 2374, - "walkaway": 48255, - "walked": 8667, - "walker": 24735, - "walker": 6150, - "walkers": 23366, - "walkin": 45792, - "walking": 12644, - "walking": 3941, - "walkingdead": 14948, - "walkout": 47470, - "walks": 8192, - "walkway": 36614, - "wall": 4316, - "wall": 2569, - "walla": 26007, - "walla": 39982, - "wallabies": 48926, - "wallace": 12535, - "wallart": 36223, - "walled": 36567, - "waller": 45340, - "wallet": 12154, - "wallets": 38550, - "walleye": 49099, - "wallis": 42206, - "wallpaper": 10560, - "wallpapers": 29841, - "walls": 8258, - "wallstreet": 45341, - "wally": 26024, - "walmart": 11972, - "walnut": 16310, - "walnuts": 38294, - "walsall": 42935, - "walsh": 12856, - "walt": 23535, - "walt": 14312, - "waltdisneyworld": 36505, - "walter": 31156, - "walter": 10645, - "walters": 25532, - "waltham": 42742, - "waltham": 45581, - "walton": 19485, - "waltz": 35982, - "wam": 20503, - "wamy": 46970, - "wan": 2060, - "wan": 4557, - "wana": 30830, - "wand": 14636, - "wand": 28559, - "wanda": 25070, - "wander": 12985, - "wander": 24473, - "wandered": 46593, - "wanderers": 27540, - "wandering": 22597, - "wanderlust": 16129, - "wane": 27459, - "wang": 19731, - "wang": 11900, - "wani": 21674, - "wankers": 42189, - "wann": 23622, - "wanna": 35940, - "wanna": 3836, - "wannabe": 40730, - "wannaone": 44832, - "want": 18356, - "want": 1280, - "wanted": 3146, - "wanting": 12801, - "wants": 3107, - "wap": 27393, - "wap": 30368, - "waq": 47512, - "war": 984, - "war": 2238, - "wara": 21631, - "warbler": 33891, - "warcraft": 13660, - "ward": 7728, - "ward": 1460, - "warden": 27798, - "wardly": 30780, - "wardro": 14247, - "wardrobe": 15020, - "wards": 2593, - "ware": 7416, - "ware": 4476, - "wareagle": 35716, - "warehouse": 13054, - "wareness": 41601, - "wareness": 35870, - "wares": 30692, - "warfare": 15739, - "warhammer": 26832, - "warhol": 27554, - "wari": 20977, - "wark": 46346, - "wark": 15164, - "warlock": 42455, - "warm": 14725, - "warm": 3616, - "warmed": 36695, - "warmer": 14328, - "warmest": 30910, - "warming": 8606, - "warmly": 45322, - "warmongers": 33205, - "warms": 32917, - "warmth": 19636, - "warmup": 29904, - "warmups": 44094, - "warn": 19360, - "warned": 16409, - "warner": 28564, - "warner": 13402, - "warning": 4994, - "warnings": 18098, - "warns": 14086, - "waron": 38947, - "warp": 32411, - "warped": 32125, - "warran": 17392, - "warrant": 22554, - "warrants": 45677, - "warranty": 23999, - "warren": 23143, - "warren": 9234, - "warri": 4109, - "warrington": 31203, - "warrior": 18998, - "warrior": 8148, - "warriors": 6421, - "wars": 3931, - "warsaw": 21072, - "warship": 47846, - "wart": 43535, - "wart": 7346, - "wartime": 42998, - "warts": 21781, - "warwick": 23081, - "warwick": 22215, - "warwickshire": 36766, - "wary": 36213, - "was": 3398, - "was": 739, - "wasabi": 47334, - "wash": 3363, - "wash": 7810, - "washed": 14092, - "washer": 24085, - "washes": 38950, - "washing": 13029, - "washington": 16774, - "washington": 4365, - "washingtondc": 40225, - "washingtonpost": 28426, - "wasn": 5044, - "wasnt": 29607, - "wasp": 24889, - "wasps": 35300, - "wassup": 45708, - "wast": 28886, - "waste": 18157, - "waste": 6065, - "wasted": 18278, - "wasteland": 44035, - "wastewater": 34463, - "wasting": 25577, - "wat": 800, - "wat": 10621, - "wata": 42509, - "watch": 7046, - "watch": 1239, - "watchdog": 35303, - "watched": 5775, - "watcher": 35971, - "watchers": 28443, - "watches": 9521, - "watchin": 32432, - "watching": 2113, - "water": 2505, - "water": 1573, - "watercolor": 14211, - "watercolour": 18377, - "waterfall": 16403, - "waterfalls": 26692, - "waterford": 24448, - "waterfront": 16605, - "waterhouse": 45072, - "watering": 19871, - "waterloo": 17465, - "watermelon": 19889, - "waterproof": 17613, - "waters": 7753, - "watershed": 33204, - "waterstones": 45014, - "waterways": 37395, - "watford": 23162, - "watfordfc": 37328, - "wati": 27966, - "watkins": 22539, - "watson": 35490, - "watson": 9294, - "watt": 22899, - "watt": 15805, - "wattpad": 32351, - "watts": 14750, - "wau": 9479, - "wav": 6054, - "wave": 17530, - "wave": 4535, - "waved": 44657, - "waver": 25997, - "waves": 7882, - "waving": 26545, - "wavy": 31941, - "waw": 22039, - "wawrinka": 48414, - "wawx": 47387, - "wax": 18789, - "wax": 11910, - "waxing": 38781, - "way": 3079, - "way": 923, - "wayback": 47822, - "wayne": 23632, - "wayne": 7003, - "ways": 1248, - "waz": 20889, - "waz": 48835, - "wb": 10726, - "wb": 12377, - "wba": 22675, - "wbb": 14482, - "wbc": 26745, - "wbo": 49053, - "wbz": 35471, - "wc": 4842, - "wc": 5755, - "wcc": 47166, - "wcc": 34926, - "wcpo": 46624, - "wcs": 39916, - "wcvb": 32709, - "wcw": 9041, - "wd": 15998, - "wd": 7494, - "wdw": 40334, - "we": 598, - "we": 649, - "wea": 37146, - "wea": 47301, - "weak": 12128, - "weak": 10128, - "weaker": 39735, - "weakness": 21448, - "weaknesses": 43487, - "weal": 14759, - "wealth": 33150, - "wealth": 7904, - "wealthy": 22617, - "weap": 6156, - "weapon": 42612, - "weapon": 10537, - "weapons": 10007, - "wear": 12206, - "wear": 2839, - "wearab": 22983, - "wearable": 44943, - "wearable": 24973, - "wearables": 30319, - "weare": 4264, - "weare": 27867, - "weareall": 45980, - "wearec": 43620, - "wearen": 45635, - "weareone": 16149, - "weareoneexo": 16448, - "wearethe": 40242, - "wearing": 3309, - "wears": 11869, - "weary": 38766, - "weasel": 44308, - "weather": 8808, - "weather": 2237, - "weathercee": 44980, - "weatherchannel": 42138, - "weav": 22260, - "weave": 22450, - "weaver": 20297, - "weaving": 27131, - "web": 2055, - "web": 4601, - "webb": 15708, - "webber": 34248, - "webcam": 24211, - "webcam": 22589, - "webcamtoy": 27719, - "webcast": 28256, - "webcomic": 34286, - "webcomics": 39811, - "webdesign": 20470, - "webdev": 37000, - "webdevelopment": 47553, - "weber": 20179, - "webin": 8460, - "webinar": 8921, - "webinars": 47755, - "webpage": 46964, - "webs": 32829, - "webseries": 44819, - "website": 3364, - "websites": 19278, - "webster": 19471, - "websummit": 48069, - "wec": 33152, - "wechat": 46124, - "wed": 1687, - "wed": 3478, - "wedd": 7576, - "wedding": 11204, - "wedding": 3101, - "weddings": 15964, - "wedge": 21446, - "wedges": 33179, - "wedne": 2380, - "wednesday": 9311, - "wednesday": 2689, - "wednesdaymotivation": 37860, - "wednesdays": 24943, - "wednesdaywisdom": 11445, - "wedo": 43432, - "weds": 19107, - "wee": 716, - "wee": 8288, - "weed": 36935, - "weed": 8015, - "weeds": 26326, - "week": 1286, - "week": 994, - "weekday": 29244, - "weekdays": 44330, - "weekend": 17205, - "weekend": 1456, - "weekender": 36547, - "weekends": 14564, - "weekly": 34652, - "weekly": 5885, - "weeknd": 29925, - "weeks": 2898, - "weeksary": 24628, - "ween": 17517, - "ween": 1599, - "weep": 39270, - "weeping": 36629, - "weer": 32491, - "weet": 17742, - "weets": 13454, - "wef": 23313, - "weg": 47867, - "weg": 47561, - "wego": 44784, - "wego": 28220, - "weh": 48458, - "weh": 40313, - "weho": 47798, - "wei": 6958, - "wei": 20952, - "weibo": 20613, - "weigh": 10565, - "weigh": 17346, - "weighed": 33210, - "weighing": 24455, - "weighs": 20481, - "weight": 12723, - "weight": 3868, - "weighted": 43179, - "weightlifting": 36164, - "weightloss": 20359, - "weights": 21374, - "weil": 43720, - "weiler": 42203, - "wein": 29134, - "wein": 37684, - "weiner": 38822, - "weinstein": 34367, - "weir": 11299, - "weir": 25517, - "weird": 27981, - "weird": 5613, - "weirdest": 29482, - "weirdo": 32476, - "weis": 26251, - "weiser": 34833, - "weiss": 24794, - "wel": 1267, - "wel": 8042, - "welch": 25820, - "welcom": 11578, - "welcome": 18318, - "welcome": 1881, - "welcomed": 12590, - "welcomes": 9304, - "welcometo": 47511, - "welcoming": 8775, - "weld": 39776, - "welding": 24956, - "welfare": 12129, - "well": 3277, - "well": 1123, - "wellbeing": 14273, - "weller": 40921, - "welling": 49165, - "wellington": 15389, - "wellness": 40574, - "wellness": 9904, - "wells": 42705, - "wells": 9804, - "welove": 13573, - "welp": 28391, - "wels": 20852, - "welsh": 19173, - "welsh": 10977, - "welt": 38595, - "welter": 37115, - "welterweight": 39617, - "wemb": 15213, - "wembley": 16579, - "wen": 6590, - "wen": 11278, - "wend": 15166, - "wendell": 42091, - "wendy": 31616, - "wendy": 14074, - "wenger": 21105, - "went": 18633, - "went": 2437, - "wentworth": 36423, - "wentz": 39179, - "wer": 6316, - "wer": 2980, - "were": 15461, - "were": 1365, - "wered": 6605, - "weren": 13611, - "werewolf": 32001, - "werk": 30176, - "werner": 29917, - "wers": 7110, - "wes": 18620, - "wes": 14738, - "wesle": 29606, - "wesley": 17332, - "wesleyan": 32509, - "wesome": 33292, - "wess": 44431, - "west": 2973, - "west": 1593, - "westbound": 29208, - "westbrook": 26948, - "westchester": 36675, - "westcoast": 44610, - "westend": 44815, - "wester": 9846, - "western": 17079, - "western": 4463, - "westfield": 32309, - "westh": 36798, - "westin": 43232, - "westlake": 41535, - "westminster": 15158, - "weston": 22771, - "westside": 33762, - "westwood": 26371, - "westworld": 42287, - "wet": 12406, - "wet": 6682, - "weta": 40946, - "wethenorth": 45281, - "wethepeople": 48030, - "wether": 33794, - "wether": 48405, - "wetland": 37357, - "wetlands": 26547, - "wett": 41971, - "wetter": 43957, - "wewant": 39280, - "wewill": 37241, - "wex": 17234, - "wexford": 29876, - "wexmondays": 49042, - "wey": 30376, - "wey": 19781, - "weymouth": 41433, - "wf": 14576, - "wf": 22313, - "wfa": 44606, - "wfc": 36431, - "wfp": 35193, - "wftv": 47075, - "wg": 21091, - "wg": 25857, - "wga": 32354, - "wgn": 48828, - "wh": 573, - "wh": 13844, - "wha": 18994, - "wha": 25884, - "whal": 38967, - "whale": 37083, - "whale": 11650, - "whales": 17722, - "wham": 42506, - "whar": 15517, - "wharf": 22452, - "wharton": 43320, - "what": 4268, - "what": 768, - "whatcha": 37160, - "whate": 6695, - "whatever": 6743, - "whati": 23500, - "whats": 9263, - "whats": 13084, - "whatsapp": 10119, - "whatsoever": 39928, - "whatson": 35632, - "whatyou": 30508, - "whe": 2009, - "whead": 34583, - "wheat": 20505, - "wheat": 10303, - "wheaton": 46933, - "wheel": 7360, - "wheel": 6744, - "wheelchair": 17713, - "wheeler": 18405, - "wheeling": 34839, - "wheels": 8025, - "whel": 9792, - "whelan": 40715, - "when": 8753, - "when": 827, - "whenever": 10500, - "where": 7052, - "where": 1234, - "whereabouts": 47808, - "whereas": 42234, - "wheres": 46345, - "wherever": 14103, - "whereyou": 46837, - "whether": 5903, - "whew": 39016, - "whey": 34556, - "whi": 4295, - "whi": 33129, - "which": 1448, - "whiche": 48719, - "whichever": 49138, - "whil": 8499, - "while": 1519, - "whilst": 8596, - "whim": 27766, - "whimsical": 42282, - "whip": 14412, - "whipped": 22323, - "whipping": 41567, - "whir": 20873, - "whirl": 30962, - "whirlwind": 47771, - "whis": 6024, - "whiskey": 41381, - "whiskey": 11610, - "whisky": 37567, - "whisky": 12599, - "whisp": 21986, - "whispe": 30356, - "whisper": 27616, - "whisperer": 41368, - "whispering": 42599, - "whispers": 29133, - "whist": 13640, - "whistle": 23972, - "whistle": 19746, - "whistleblower": 40410, - "whistler": 29633, - "whit": 4398, - "whit": 31498, - "whitaker": 35851, - "whitby": 30858, - "white": 4699, - "white": 1579, - "whiteboard": 40839, - "whitec": 24575, - "whitehall": 42827, - "whitehead": 43560, - "whitehouse": 20776, - "whitening": 35540, - "whitepaper": 42713, - "whites": 35886, - "whites": 18835, - "whitesox": 28816, - "whitewater": 49350, - "whitfield": 48404, - "whitley": 40564, - "whitman": 32394, - "whitney": 43021, - "whitney": 18048, - "whitt": 33784, - "whittaker": 47595, - "whl": 25801, - "who": 2969, - "who": 822, - "whoa": 16943, - "whoever": 11137, - "whois": 41884, - "whole": 10360, - "whole": 2954, - "wholefoods": 42840, - "wholesale": 18306, - "wholesome": 35959, - "whom": 38158, - "whom": 12873, - "whoo": 20003, - "whoo": 49290, - "whoop": 22060, - "whoops": 28433, - "whopping": 34384, - "whore": 31690, - "whos": 41460, - "whos": 27130, - "whose": 6933, - "whouse": 45927, - "whs": 26292, - "wht": 32470, - "whufc": 31695, - "whun": 18272, - "why": 11040, - "why": 1182, - "whyte": 42386, - "wi": 820, - "wi": 5585, - "wib": 45303, - "wic": 7834, - "wich": 9759, - "wich": 5238, - "wichita": 22566, - "wick": 6798, - "wick": 6479, - "wicked": 32579, - "wicked": 12825, - "wicker": 38096, - "wicket": 19180, - "wickets": 22110, - "wicklow": 39039, - "wicz": 30121, - "wid": 11886, - "wid": 20886, - "wide": 19341, - "wide": 3184, - "widely": 16195, - "widening": 46598, - "wider": 21263, - "widesp": 20598, - "widespread": 21258, - "widget": 43906, - "wido": 28068, - "widow": 19949, - "widows": 42129, - "width": 23571, - "wie": 21378, - "wie": 9131, - "wielding": 47272, - "wien": 38131, - "wiener": 40567, - "wies": 42788, - "wif": 37572, - "wife": 3607, - "wifey": 35282, - "wifi": 11026, - "wig": 23690, - "wig": 12216, - "wigan": 23130, - "wiggins": 32329, - "wiggle": 47812, - "wight": 41278, - "wight": 15545, - "wigs": 31207, - "wii": 8005, - "wiiu": 40980, - "wiki": 10373, - "wiki": 24265, - "wikileaks": 28731, - "wikipedia": 15176, - "wil": 1352, - "wil": 20581, - "wilbur": 43069, - "wilcox": 43231, - "wild": 2780, - "wild": 3220, - "wildatlantic": 35500, - "wildatlanticway": 35776, - "wildcard": 37360, - "wildcat": 49077, - "wildcat": 25870, - "wildcats": 15909, - "wilde": 23498, - "wilder": 14343, - "wilder": 23499, - "wilderness": 16506, - "wildest": 43028, - "wildfire": 22788, - "wildfires": 29184, - "wildflower": 27628, - "wildflower": 33181, - "wildflowerhour": 31302, - "wildflowers": 29136, - "wildlife": 13298, - "wildlife": 5250, - "wildlifephotography": 32307, - "wildlifewednesday": 48537, - "wildly": 35981, - "wildoz": 40113, - "wiley": 32747, - "wilhelm": 39696, - "wilkes": 39548, - "wilkins": 36986, - "wilkinson": 26797, - "will": 5062, - "will": 751, - "willam": 43276, - "willard": 44920, - "wille": 48739, - "willem": 38044, - "willi": 2256, - "william": 8420, - "william": 4705, - "williams": 38452, - "williams": 4075, - "williamsburg": 30683, - "williamson": 20793, - "willie": 13907, - "willing": 34160, - "willing": 11718, - "willingness": 40573, - "willis": 18491, - "willow": 33887, - "willow": 15665, - "wills": 26913, - "willy": 34502, - "willy": 19599, - "wilmington": 28052, - "wilms": 47879, - "wilshere": 48359, - "wilson": 23629, - "wilson": 5622, - "wilt": 23394, - "wilt": 47357, - "wilton": 46638, - "wiltshire": 28025, - "wim": 8662, - "wim": 27580, - "wimble": 11752, - "wimbledon": 12229, - "win": 831, - "win": 1225, - "winchester": 20647, - "wind": 6812, - "wind": 3630, - "winder": 44454, - "winder": 46245, - "winding": 22390, - "windmill": 34084, - "windo": 3110, - "window": 26675, - "window": 4879, - "windows": 5437, - "winds": 12668, - "winds": 7012, - "windshield": 33002, - "windsor": 44322, - "windsor": 12884, - "windy": 13446, - "wine": 7375, - "wine": 2604, - "winelover": 26357, - "winemaker": 41588, - "wineoclock": 43846, - "wineries": 49349, - "winery": 15500, - "wines": 8263, - "winetasting": 41288, - "winewednesday": 35447, - "wing": 8141, - "wing": 1340, - "winged": 24993, - "winger": 22727, - "winget": 44578, - "wings": 5178, - "wink": 34455, - "wink": 25859, - "winkle": 36430, - "winn": 38104, - "winne": 46273, - "winner": 32961, - "winner": 2520, - "winners": 4320, - "winni": 13018, - "winnie": 29022, - "winning": 42099, - "winning": 2577, - "winnings": 46490, - "winnipeg": 14369, - "winona": 49202, - "wins": 46839, - "wins": 2718, - "winslow": 39658, - "winston": 14848, - "winter": 7340, - "winter": 2541, - "winters": 21587, - "wintry": 39504, - "wip": 10447, - "wipe": 26761, - "wiped": 31822, - "wipes": 33463, - "wir": 16849, - "wir": 44838, - "wire": 7558, - "wire": 7794, - "wired": 18935, - "wireless": 9103, - "wires": 24311, - "wiring": 36434, - "wirral": 34675, - "wis": 3392, - "wis": 20405, - "wiscon": 9857, - "wisconsin": 10265, - "wisdom": 42474, - "wisdom": 5425, - "wise": 19116, - "wise": 5558, - "wisely": 26173, - "wiser": 44859, - "wish": 11328, - "wish": 2412, - "wished": 25883, - "wishes": 6045, - "wishing": 5307, - "wishlist": 31969, - "wit": 584, - "wit": 8531, - "witch": 20139, - "witch": 10083, - "witchcraft": 35065, - "witcher": 33684, - "witches": 21673, - "with": 1435, - "with": 593, - "withdra": 24696, - "withdraw": 31670, - "withdrawal": 25765, - "withdrawn": 46687, - "withdraws": 48637, - "wither": 39655, - "witherspoon": 45409, - "within": 4154, - "withme": 44670, - "without": 32836, - "without": 2193, - "withstand": 42236, - "withthe": 36872, - "withus": 30572, - "withyou": 30351, - "witne": 12096, - "witness": 8793, - "witnessed": 20187, - "witnesses": 22778, - "witnessing": 33618, - "wits": 30938, - "witt": 38194, - "witt": 17168, - "witter": 31597, - "witty": 29970, - "witz": 44186, - "witz": 13265, - "wiv": 48925, - "wives": 14378, - "wiwx": 44461, - "wiz": 7730, - "wiz": 23178, - "wizar": 49121, - "wizard": 30490, - "wizard": 14295, - "wizards": 19140, - "wizkid": 40146, - "wj": 19739, - "wj": 35453, - "wk": 11512, - "wk": 11528, - "wkend": 42336, - "wknd": 20851, - "wks": 25508, - "wku": 43377, - "wl": 13299, - "wl": 9613, - "wm": 20268, - "wm": 15790, - "wn": 1186, - "wn": 757, - "wnba": 32358, - "wned": 8628, - "wns": 12950, - "wnt": 22484, - "wny": 24833, - "wo": 1613, - "wo": 11132, - "woah": 17751, - "wob": 35984, - "woc": 39011, - "wod": 41522, - "woes": 27860, - "wof": 45671, - "woj": 48931, - "wok": 28912, - "woke": 9331, - "woken": 43697, - "woking": 43931, - "wol": 2798, - "wol": 48622, - "wold": 42399, - "wolf": 9453, - "wolf": 5916, - "wolfe": 24989, - "wolff": 34369, - "wolfgang": 34061, - "wolfpack": 30887, - "wolve": 45101, - "wolver": 14334, - "wolverhampton": 34518, - "wolverine": 23353, - "wolverines": 42003, - "wolves": 9372, - "wom": 1087, - "womack": 48980, - "woman": 15716, - "woman": 2308, - "womanc": 35630, - "womancrush": 37721, - "womancrushwednesday": 39714, - "womanin": 30562, - "womaninbiz": 36482, - "womb": 37023, - "women": 3648, - "women": 1507, - "womenin": 13062, - "womeninscience": 41343, - "womeninstem": 29380, - "womenintech": 31470, - "womenof": 48421, - "womens": 12822, - "womens": 14408, - "womensart": 38548, - "womensday": 13956, - "womenshi": 22887, - "womenshistorymonth": 24982, - "womensmarch": 30102, - "won": 1528, - "won": 1749, - "wonder": 2070, - "wonder": 3936, - "wondercon": 46944, - "wondered": 15550, - "wonderful": 2582, - "wonderfully": 23245, - "wondering": 8360, - "wonderland": 13874, - "wonders": 14048, - "wonderwoman": 31000, - "wondo": 38402, - "wondr": 46771, - "wong": 17876, - "wonka": 43463, - "wont": 43174, - "wont": 15952, - "woo": 1867, - "woo": 9322, - "wood": 3269, - "wood": 1704, - "woodbridge": 49074, - "wooden": 48226, - "wooden": 9057, - "woodland": 44314, - "woodland": 17447, - "woodlands": 32430, - "woodley": 40566, - "woodpecker": 32684, - "woods": 6267, - "woodson": 48967, - "woodstock": 29486, - "woodward": 27419, - "woodwork": 47386, - "woodworking": 29267, - "woody": 38627, - "woody": 17144, - "woof": 34234, - "woof": 24028, - "woohoo": 20172, - "wook": 29192, - "wool": 9967, - "wool": 13283, - "woolf": 43728, - "woolly": 47722, - "woon": 33126, - "wooo": 43217, - "woop": 31884, - "woot": 22466, - "wor": 641, - "worcester": 22172, - "worcester": 19580, - "worcestershire": 38440, - "worcestershirehour": 43644, - "word": 8272, - "word": 2653, - "wordof": 33500, - "wordoftheday": 43594, - "wordpress": 15193, - "words": 31007, - "words": 2709, - "wore": 8953, - "work": 1636, - "work": 951, - "workday": 29735, - "worked": 5410, - "worker": 8098, - "workers": 4795, - "workflow": 28502, - "workforce": 14672, - "workin": 31825, - "workin": 26323, - "working": 20806, - "working": 1699, - "workinprogress": 46086, - "workout": 6773, - "workouts": 22779, - "workplace": 11959, - "workplaces": 47383, - "works": 2322, - "workshop": 3832, - "workshops": 12262, - "workspace": 34470, - "worl": 5221, - "world": 2334, - "world": 1002, - "worlda": 46627, - "worldbank": 36759, - "worldbookday": 31191, - "worldcup": 42525, - "worldcup": 8650, - "worlden": 44668, - "worldenviron": 47115, - "worldenvironmentday": 47522, - "worldly": 36268, - "worldo": 41698, - "worldof": 22636, - "worldre": 33951, - "worlds": 7691, - "worldseries": 26695, - "worldtour": 23202, - "worldwater": 41176, - "worldwaterday": 44520, - "worldwide": 6214, - "worm": 33709, - "worm": 10945, - "worms": 20231, - "worn": 9037, - "worried": 11911, - "worries": 17684, - "worry": 7534, - "worrying": 24058, - "worse": 8236, - "worsen": 46344, - "worshi": 31840, - "worship": 46399, - "worship": 9023, - "worst": 5719, - "wort": 30209, - "worth": 10671, - "worth": 2450, - "worthing": 39929, - "worthit": 40830, - "worthless": 44736, - "worths": 44633, - "worthwhile": 36295, - "worthy": 8881, - "worx": 44973, - "wot": 24863, - "wou": 5279, - "would": 39873, - "would": 1311, - "wouldn": 5878, - "wouldnt": 41595, - "wound": 19231, - "wounded": 14859, - "wounds": 21290, - "woven": 19830, - "wow": 22191, - "wow": 2781, - "woz": 44558, - "wozni": 47782, - "wp": 15378, - "wp": 13302, - "wpg": 35048, - "wps": 33386, - "wq": 45195, - "wr": 1189, - "wr": 8028, - "wra": 3852, - "wra": 46004, - "wral": 49050, - "wrangler": 30923, - "wrap": 7094, - "wrapped": 9875, - "wrapping": 15223, - "wraps": 18236, - "wrath": 29783, - "wray": 48943, - "wrc": 16004, - "wre": 3168, - "wreath": 23091, - "wrec": 20879, - "wreck": 28775, - "wreck": 15017, - "wrecked": 32695, - "wreckem": 45676, - "wrecking": 36956, - "wrecks": 45545, - "wren": 20191, - "wren": 31970, - "wrench": 30980, - "wrest": 4177, - "wrestle": 17097, - "wrestle": 28086, - "wrestlemania": 18849, - "wrestler": 19790, - "wrestlers": 25902, - "wrestling": 31292, - "wrestling": 5904, - "wrexham": 34479, - "wri": 7667, - "wri": 42007, - "wright": 28616, - "wright": 6991, - "wrights": 43711, - "wrigley": 33538, - "wrink": 22201, - "wrinkle": 46642, - "wrinkles": 35525, - "wrist": 19243, - "wrist": 16139, - "wristband": 36890, - "wristbands": 44864, - "writ": 2902, - "write": 28874, - "write": 4946, - "writer": 27886, - "writer": 4422, - "writers": 18742, - "writers": 7307, - "writerslife": 25007, - "writes": 8023, - "writing": 16053, - "writing": 2979, - "writingcommunity": 39178, - "writings": 36259, - "written": 5231, - "wro": 5447, - "wrong": 18381, - "wrong": 3669, - "wrongly": 45642, - "wrote": 5796, - "wrought": 48125, - "wrs": 45280, - "ws": 6300, - "ws": 799, - "wsb": 30681, - "wsbtv": 38394, - "wsj": 19764, - "wski": 12548, - "wsl": 43706, - "wsoc": 40253, - "wson": 33954, - "wsop": 41231, - "wsu": 44674, - "wsu": 32913, - "wsw": 43285, - "wt": 15873, - "wt": 12255, - "wta": 25984, - "wtc": 39718, - "wtf": 6891, - "wth": 23021, - "wthr": 45269, - "wti": 47345, - "wto": 36406, - "wts": 32159, - "wu": 9710, - "wu": 9837, - "wud": 43870, - "wul": 35154, - "wunder": 36661, - "wur": 24040, - "wurst": 44409, - "wusa": 40021, - "wut": 28590, - "wv": 18920, - "wv": 14743, - "wvu": 44878, - "wvu": 25879, - "ww": 3181, - "ww": 4491, - "wwc": 26505, - "wwdc": 47441, - "wwe": 12112, - "wwe": 5290, - "wwen": 23308, - "wwenetwork": 37228, - "wwenxt": 39898, - "wwer": 32038, - "wwf": 23332, - "wwfc": 42681, - "wwg": 35322, - "wwi": 20194, - "wwii": 10261, - "www": 26074, - "www": 9667, - "wwwbigbaldhead": 30761, - "wwww": 34224, - "wwww": 25200, - "wwwww": 48268, - "wwx": 47431, - "wx": 18192, - "wx": 3561, - "wy": 4665, - "wy": 7625, - "wyatt": 21660, - "wyd": 33113, - "wye": 48436, - "wye": 43751, - "wylie": 49330, - "wyn": 11802, - "wyn": 17504, - "wynn": 36117, - "wynne": 35951, - "wynonna": 41456, - "wynonnaearp": 43755, - "wyoming": 18693, - "x": 87, - "x": 343, - "xa": 24831, - "xan": 45530, - "xander": 45601, - "xavi": 36342, - "xavier": 41044, - "xavier": 18567, - "xb": 33678, - "xbox": 18063, - "xbox": 7748, - "xboxone": 27410, - "xc": 12515, - "xchange": 49132, - "xd": 6380, - "xe": 42886, - "xe": 19183, - "xen": 15568, - "xer": 49005, - "xf": 35274, - "xfactor": 25211, - "xfinity": 35107, - "xford": 34732, - "xh": 45771, - "xham": 25284, - "xi": 2467, - "xi": 7376, - "xia": 19854, - "xia": 20724, - "xian": 42570, - "xiao": 49318, - "xiaomi": 27477, - "xico": 38469, - "xide": 17398, - "xie": 40122, - "xie": 15976, - "xii": 36525, - "xiii": 28199, - "xim": 11217, - "xin": 27053, - "xin": 41517, - "xing": 14383, - "xion": 24164, - "xis": 35793, - "xit": 5316, - "xiumin": 36563, - "xiv": 16125, - "xj": 42453, - "xl": 36529, - "xl": 8833, - "xley": 38223, - "xm": 18626, - "xma": 48805, - "xmas": 48848, - "xmas": 6425, - "xmen": 28708, - "xn": 25388, - "xo": 26936, - "xo": 9000, - "xon": 29186, - "xon": 8482, - "xox": 11531, - "xox": 34050, - "xoxo": 13313, - "xp": 15651, - "xper": 32200, - "xperia": 37615, - "xpo": 44377, - "xpress": 31809, - "xq": 40606, - "xr": 26276, - "xrp": 26965, - "xs": 16397, - "xt": 1052, - "xtina": 45520, - "xton": 32666, - "xton": 10597, - "xtra": 26969, - "xtre": 27025, - "xtreme": 33483, - "xu": 42063, - "xu": 37198, - "xv": 17768, - "xvi": 44031, - "xx": 5675, - "xx": 3553, - "xxl": 29777, - "xxx": 33923, - "xxx": 8352, - "xxxx": 32035, - "xxxx": 22819, - "xxxxx": 44195, - "xy": 20023, - "xy": 11443, - "y": 88, - "y": 344, - "ya": 5018, - "ya": 1430, - "yaa": 48847, - "yaa": 34498, - "yaan": 34680, - "yab": 27737, - "yach": 9039, - "yacht": 43806, - "yacht": 12859, - "yachts": 29260, - "yad": 13276, - "yad": 40047, - "yadav": 26650, - "yaf": 38019, - "yag": 35081, - "yah": 16170, - "yah": 12381, - "yaho": 37929, - "yahoo": 38152, - "yahoo": 16846, - "yak": 11014, - "yak": 29074, - "yaki": 44677, - "yaku": 29572, - "yakuza": 42628, - "yal": 16198, - "yal": 13418, - "yale": 39926, - "yale": 17157, - "yall": 9210, - "yam": 6666, - "yam": 19318, - "yama": 23512, - "yamaha": 18854, - "yan": 3949, - "yan": 4788, - "yana": 18698, - "yand": 38609, - "yang": 23818, - "yang": 12605, - "yani": 26439, - "yankee": 21554, - "yankees": 11889, - "yann": 40246, - "yann": 38657, - "yao": 45231, - "yap": 48700, - "yap": 34468, - "yar": 6786, - "yar": 23071, - "yard": 20234, - "yard": 4313, - "yards": 7550, - "yarmouth": 45941, - "yarn": 19702, - "yarra": 46824, - "yas": 8168, - "yas": 20570, - "yash": 30216, - "yash": 37836, - "yasi": 37700, - "yasss": 23873, - "yat": 29443, - "yat": 34965, - "yates": 27677, - "yatra": 38932, - "yav": 41275, - "yaw": 31989, - "yawn": 48643, - "yay": 20614, - "yay": 6712, - "yaya": 37608, - "yaz": 19348, - "yaz": 42252, - "yb": 41785, - "yb": 27615, - "yc": 11931, - "ycle": 38089, - "yd": 29896, - "yd": 9534, - "yday": 15899, - "yds": 24819, - "ye": 693, - "ye": 4582, - "yea": 13687, - "yeah": 29405, - "yeah": 3908, - "year": 5163, - "year": 935, - "yearbook": 21636, - "yearling": 48392, - "yearly": 24541, - "yearof": 31944, - "yearofthe": 47899, - "years": 30864, - "years": 1151, - "yearsof": 14932, - "yearswith": 45249, - "yeast": 25819, - "yeats": 44903, - "yed": 28137, - "yed": 3301, - "yee": 18114, - "yee": 23108, - "yeezy": 24901, - "yeg": 16854, - "yeg": 11976, - "yegfood": 48711, - "yeh": 21331, - "yel": 3323, - "yel": 48164, - "yell": 30824, - "yelled": 39199, - "yelling": 26581, - "yellow": 12059, - "yellow": 4481, - "yellowstone": 29241, - "yelp": 31674, - "yemen": 29276, - "yemen": 12513, - "yemeni": 44656, - "yemi": 42267, - "yen": 29602, - "yen": 17960, - "yeo": 32292, - "yeo": 43830, - "yeol": 15808, - "yeon": 16602, - "yep": 10964, - "yer": 15491, - "yer": 2371, - "yers": 3722, - "yes": 21620, - "yes": 1958, - "yess": 42778, - "yess": 40189, - "yesss": 36210, - "yessss": 45620, - "yester": 1905, - "yesterday": 1926, - "yesterdays": 36238, - "yesung": 38527, - "yet": 2296, - "yeti": 34228, - "yev": 39855, - "yew": 34660, - "yey": 45447, - "yg": 16396, - "ygk": 44758, - "ygo": 46166, - "yh": 41978, - "yi": 5826, - "yi": 14762, - "yield": 16825, - "yields": 24856, - "yikes": 25094, - "yin": 26476, - "yin": 23543, - "ying": 42933, - "ying": 910, - "yixing": 32120, - "yk": 30965, - "yl": 2656, - "yl": 4045, - "ylan": 41875, - "ylde": 42850, - "yle": 32305, - "yle": 10770, - "ylene": 34239, - "yler": 48081, - "yles": 42860, - "ylon": 22375, - "ylor": 48468, - "ym": 1786, - "ym": 19587, - "yman": 29077, - "ymc": 47101, - "ymca": 22369, - "yment": 8199, - "ymes": 39968, - "ymi": 5271, - "ymm": 37133, - "ymoun": 41426, - "ymouth": 36429, - "yn": 2823, - "yn": 4100, - "yne": 18238, - "ynes": 18020, - "ynn": 10499, - "ynna": 48292, - "ynwa": 27372, - "yo": 586, - "yo": 3497, - "yoda": 31922, - "yof": 5966, - "yofficial": 21818, - "yofthe": 43983, - "yog": 34985, - "yog": 36539, - "yoga": 25872, - "yoga": 5523, - "yogh": 32626, - "yoghurt": 33491, - "yogi": 22766, - "yogur": 16137, - "yogurt": 16819, - "yoh": 48880, - "yoke": 41969, - "yoko": 25929, - "yoko": 32256, - "yokohama": 42409, - "yol": 19387, - "yol": 35218, - "yolanda": 43845, - "yolo": 20905, - "yom": 34718, - "yom": 44527, - "yon": 10147, - "yon": 7604, - "yong": 27960, - "yong": 20887, - "yonge": 48592, - "yoo": 25842, - "yoo": 20775, - "yoon": 30863, - "yoon": 22113, - "yoona": 32736, - "yoongi": 24037, - "yor": 2028, - "yor": 21132, - "york": 5318, - "york": 2705, - "yorker": 23865, - "yorkers": 41041, - "yorks": 39093, - "yorkshi": 43367, - "yorkshire": 27007, - "yorkshire": 8633, - "yoruba": 46083, - "yos": 35607, - "yosemite": 25893, - "yoshi": 22920, - "yoshi": 25354, - "yot": 22875, - "yotes": 46157, - "yotpo": 26113, - "you": 1562, - "you": 592, - "youare": 33879, - "youcan": 32498, - "youknow": 47919, - "youknow": 41088, - "youn": 1596, - "young": 6939, - "young": 1888, - "younger": 10414, - "youngest": 12316, - "youngjae": 46426, - "youngster": 35881, - "youngsters": 28098, - "younow": 33831, - "your": 2130, - "your": 695, - "youre": 28344, - "youre": 19695, - "yourown": 28583, - "yours": 3834, - "yourself": 3053, - "yourselves": 19747, - "youth": 10743, - "youth": 3281, - "youthful": 37480, - "youths": 23614, - "youts": 22737, - "youtu": 13868, - "youtube": 31258, - "youtube": 3895, - "youtuber": 24720, - "youtubers": 36822, - "youu": 35055, - "youuu": 35324, - "youuuu": 47123, - "yoy": 41865, - "yp": 38370, - "yp": 34734, - "ypg": 37386, - "yql": 46122, - "yqr": 36881, - "yr": 18395, - "yr": 4333, - "yrs": 4822, - "ys": 1971, - "ys": 961, - "yser": 33121, - "ysis": 4843, - "ysl": 45681, - "ysm": 23842, - "yst": 40528, - "yt": 36777, - "yt": 14779, - "ytd": 47524, - "yte": 48172, - "yu": 3371, - "yu": 8887, - "yuan": 26236, - "yuck": 48282, - "yugo": 48231, - "yuh": 42547, - "yui": 47932, - "yuk": 17037, - "yuk": 24063, - "yuki": 34010, - "yukon": 27094, - "yul": 39832, - "yum": 6869, - "yum": 7259, - "yuma": 47566, - "yummy": 7687, - "yun": 14976, - "yun": 18288, - "yung": 44545, - "yung": 17676, - "yunho": 39748, - "yup": 13231, - "yur": 42533, - "yuri": 23823, - "yusuf": 33222, - "yuv": 36784, - "yves": 33698, - "yvon": 23327, - "yvonne": 32583, - "yvr": 29058, - "yw": 33741, - "yx": 35624, - "yxe": 34240, - "yy": 3433, - "yy": 8321, - "yya": 37444, - "yyc": 27542, - "yyc": 11741, - "yyj": 26203, - "yyy": 11514, - "yyyy": 38749, - "yyyy": 16955, - "yyyyy": 26089, - "yyyyyy": 47055, - "yz": 37579, - "yz": 46451, - "yü": 48232, - "z": 89, - "z": 345, - "za": 3710, - "za": 2186, - "zab": 22982, - "zable": 37002, - "zac": 25501, - "zac": 19159, - "zach": 13401, - "zach": 11815, - "zachary": 32401, - "zack": 30567, - "zack": 19120, - "zad": 47314, - "zad": 27838, - "zada": 34889, - "zaf": 21837, - "zafar": 46668, - "zag": 26091, - "zag": 29346, - "zagre": 34107, - "zagreb": 35355, - "zah": 23258, - "zah": 43297, - "zaha": 44408, - "zai": 44329, - "zai": 27065, - "zain": 34400, - "zain": 45366, - "zak": 13050, - "zak": 20738, - "zaki": 48091, - "zal": 20552, - "zal": 33298, - "zam": 7218, - "zam": 41578, - "zambia": 21671, - "zan": 7284, - "zan": 17835, - "zana": 39643, - "zand": 37712, - "zane": 34786, - "zani": 45373, - "zania": 15059, - "zano": 27637, - "zanzi": 47835, - "zap": 24134, - "zapp": 33504, - "zappa": 46592, - "zar": 5458, - "zar": 16392, - "zara": 24454, - "zardari": 20174, - "zas": 48261, - "zation": 3683, - "zawa": 49281, - "zay": 7102, - "zayed": 36726, - "zayn": 22292, - "zayn": 10308, - "zaynmalik": 25278, - "zazzle": 47857, - "ze": 2254, - "ze": 1298, - "zeal": 44951, - "zealand": 7618, - "zeb": 46518, - "zebra": 47394, - "zebra": 22548, - "zed": 21047, - "zed": 1993, - "zedd": 45608, - "zee": 25468, - "zee": 14080, - "zeiss": 47460, - "zeit": 37898, - "zeit": 37906, - "zek": 40829, - "zeke": 47065, - "zel": 10389, - "zel": 12027, - "zelda": 17138, - "zell": 39526, - "zen": 8518, - "zen": 3928, - "zend": 33478, - "zendaya": 35956, - "zenith": 44740, - "zens": 15298, - "zeph": 40726, - "zepp": 22977, - "zeppelin": 25408, - "zer": 6118, - "zer": 3716, - "zero": 14867, - "zero": 5848, - "zers": 9547, - "zes": 4073, - "zest": 37709, - "zet": 34098, - "zeta": 30954, - "zetta": 45993, - "zeus": 32800, - "zey": 46647, - "zh": 33389, - "zh": 41621, - "zhang": 21127, - "zhen": 37374, - "zhen": 33236, - "zhou": 17384, - "zhu": 42049, - "zi": 2651, - "zi": 5819, - "zia": 13764, - "zid": 30235, - "zidane": 34643, - "zie": 29316, - "zie": 8956, - "zieg": 40157, - "ziegler": 46812, - "ziel": 32151, - "zier": 15399, - "zies": 38001, - "ziest": 28159, - "zig": 15950, - "zig": 21345, - "ziggy": 39274, - "zik": 30125, - "zika": 28783, - "zil": 25039, - "zil": 33190, - "zilla": 17879, - "zim": 8112, - "zim": 22577, - "zimbab": 12373, - "zimbabwe": 45668, - "zimbabwe": 13583, - "zimmer": 27452, - "zimmer": 35211, - "zimmerman": 38231, - "zin": 14085, - "zin": 21278, - "zinc": 27458, - "zind": 26206, - "zindabad": 42208, - "zine": 16100, - "zing": 25062, - "zing": 3152, - "zinger": 42027, - "zio": 13906, - "zion": 31763, - "zion": 20963, - "zione": 36161, - "zionist": 33078, - "zip": 26479, - "zip": 16083, - "zipper": 33670, - "zir": 31892, - "zl": 39168, - "zlat": 32489, - "zlatan": 37877, - "zm": 43691, - "zman": 24248, - "zn": 18004, - "zo": 4397, - "zo": 5056, - "zodi": 22660, - "zodiac": 27753, - "zoe": 43114, - "zoe": 16662, - "zoey": 39871, - "zog": 40680, - "zol": 25939, - "zola": 46105, - "zom": 6623, - "zombi": 29452, - "zombie": 11819, - "zombies": 46702, - "zombies": 16517, - "zon": 15109, - "zon": 14618, - "zona": 42134, - "zone": 37197, - "zone": 4442, - "zones": 17247, - "zoning": 36790, - "zoo": 8182, - "zoo": 7147, - "zoom": 32671, - "zoom": 13909, - "zor": 17605, - "zou": 38072, - "zr": 39275, - "zs": 35248, - "zshq": 41442, - "zt": 42629, - "zu": 4091, - "zu": 14184, - "zucchini": 29873, - "zucker": 26890, - "zuckerberg": 30066, - "zul": 31146, - "zulu": 32821, - "zum": 35094, - "zuma": 23326, - "zumba": 32976, - "zun": 42440, - "zur": 17128, - "zurich": 21288, - "zw": 42188, - "zx": 31604, - "zy": 6615, - "zy": 2303, - "zyk": 39112, - "zyme": 36472, - "zyn": 45287, - "zz": 1544, - "zz": 4943, - "zza": 14642, - "zzi": 13974, - "zzie": 18635, - "zzle": 7873, - "zzled": 39075, - "zzo": 14036, - "zzy": 21275, - "zzy": 8353, - "zzz": 20055, - "zzzz": 35742, - "zzzz": 43103, - "{": 90, - "{": 346, - "{}": 39025, - "|": 91, - "|#": 31183, - "|": 347, - "|@": 41677, - "||": 7566, - "}": 92, - "}": 348, - "~": 93, - "~!": 31181, - "~\"": 48442, - "~": 349, - "~>": 43291, - "~@": 44247, - "~~": 11461, - "~~": 16671, - "~~~": 32472, - "~~~~": 28295, - "¡": 94, - "¡": 350, - "¡ï¸ı": 15113, - "¡ï¸ı": 4174, - "¡ľ": 43991, - "¢": 95, - "¢": 351, - "£": 96, - "£": 352, - "£ï¸ı": 18446, - "¤": 97, - "¤": 353, - "¥": 98, - "¥": 354, - "¦": 99, - "¦": 355, - "¦Ī": 47615, - "§": 100, - "§": 356, - "¨": 101, - "¨": 357, - "©": 102, - "©": 358, - "ª": 103, - "ª": 359, - "«": 104, - "«": 360, - "¬": 105, - "¬": 361, - "¬ë": 31736, - "®": 106, - "®": 362, - "¯": 107, - "¯": 363, - "°": 108, - "°:": 21787, - "°": 364, - "°ï¸ı": 34777, - "±": 109, - "±": 365, - "±ï¸ı": 41020, - "²": 110, - "²": 366, - "³": 111, - "³": 367, - "³ï¸ı": 22195, - "³ï¸ı": 24706, - "´": 112, - "´": 368, - "µ": 113, - "µ": 369, - "µï¸ı": 27605, - "¶": 114, - "¶": 370, - "·": 115, - "·": 371, - "¸": 116, - "¸": 372, - "¸ë": 19693, - "¹": 117, - "¹": 373, - "º": 118, - "º": 374, - "»": 119, - "»": 375, - "¼": 120, - "¼": 376, - "½": 121, - "½": 377, - "½ï¸ı": 31333, - "¾": 122, - "¾": 378, - "¿": 123, - "¿": 379, - "À": 124, - "À": 380, - "Á": 125, - "Á": 381, - "Â": 126, - "Â": 382, - "¡": 26868, - "¡": 10830, - "¡¡": 45505, - "¢": 41359, - "£": 31117, - "£": 1950, - "Â¥": 20199, - "¨": 19957, - "¨¨": 23089, - "¨¨¨¨": 41223, - "©": 31148, - "©": 5811, - "«": 14434, - "®": 30857, - "®": 8436, - "¯": 38682, - "¯": 43593, - "¯\\": 44096, - "¯\\_(": 45115, - "°": 21305, - "°": 6858, - "²": 41175, - "´": 30560, - "´": 12559, - "·": 14844, - "º": 28059, - "»": 31642, - "»": 7599, - "½": 33613, - "¿": 44559, - "¿": 17133, - "ÂŃ": 22618, - "Ã": 127, - "Ã": 383, - "á": 7261, - "á": 22229, - "án": 38340, - "án": 21385, - "â": 26170, - "ã": 19339, - "ão": 21141, - "ä": 10896, - "ä": 47276, - "än": 42787, - "Ã¥": 23176, - "æ": 42495, - "ç": 10067, - "ça": 22711, - "è": 12138, - "è": 37761, - "ère": 30272, - "ès": 41210, - "é": 3459, - "é": 4166, - "éal": 45251, - "ée": 13489, - "és": 20507, - "ê": 27515, - "ë": 29526, - "ë": 40520, - "î": 48704, - "ï": 35689, - "ñ": 6445, - "ña": 17753, - "ño": 16574, - "ños": 40104, - "ó": 8891, - "ó": 27733, - "ón": 13926, - "ô": 26815, - "ö": 7255, - "ö": 37423, - "ör": 31762, - "ø": 17483, - "ø": 45598, - "ú": 17963, - "ú": 36019, - "ü": 6522, - "ü": 47177, - "ür": 26132, - "ÃĹ": 16165, - "Ãł": 36149, - "Ãł": 21259, - "ÃŃ": 8366, - "ÃŃ": 23928, - "ÃŃa": 16609, - "ÃŃn": 33623, - "Ä": 128, - "Ä": 384, - "ı": 18562, - "ı": 41901, - "Äģ": 23134, - "Äĩ": 31719, - "Äį": 45414, - "ÄŁ": 26540, - "Å": 129, - "Å": 385, - "Å¡": 35621, - "ÅĤ": 40419, - "Åį": 41267, - "ÅŁ": 21254, - "ÅŁ": 40706, - "Æ": 130, - "Æ": 386, - "Ç": 131, - "Ç": 387, - "È": 132, - "È": 388, - "É": 133, - "É": 389, - "Ê": 134, - "Ê": 390, - "Ë": 135, - "Ë": 391, - "Ì": 136, - "Ì": 392, - "Ìĩ": 16384, - "Í": 137, - "Í": 393, - "Î": 138, - "Î": 394, - "Ï": 139, - "Ï": 395, - "Ïī": 38065, - "Ð": 140, - "Ð": 396, - "а": 16912, - "а": 27080, - "аÐ": 31090, - "в": 39813, - "е": 22176, - "и": 16701, - "иÐ": 29503, - "к": 27152, - "л": 47611, - "м": 38018, - "н": 22705, - "о": 13506, - "о": 29386, - "оÐ": 20978, - "од": 38416, - "оÑĤ": 28599, - "п": 26302, - "пÑĢи": 46321, - "пÑĢиÑĢода": 48150, - "Ñ": 141, - "Ñ": 397, - "ÑĢ": 16370, - "ÑĢи": 41092, - "ÑĢод": 47039, - "ÑĢода": 47929, - "Ñģ": 23669, - "ÑĤ": 17875, - "Ñĥ": 39729, - "ÑĦ": 27993, - "ÑĦоÑĤ": 35155, - "ÑĦоÑĤо": 38981, - "Ñĭ": 45001, - "Ò": 142, - "Ò": 398, - "Ó": 143, - "Ó": 399, - "Ô": 144, - "Ô": 400, - "Õ": 145, - "Õ": 401, - "Ö": 146, - "Ö": 402, - "×": 147, - "×": 403, - "Ø": 148, - "Ø": 404, - "ا": 6042, - "ا": 22625, - "اØ": 13189, - "ار": 40137, - "اÙ": 8453, - "اÙĦ": 12973, - "اÙħ": 47626, - "اÙĨ": 42773, - "اÙĨ": 33200, - "ب": 16378, - "ب": 35330, - "Ø©": 20915, - "ت": 18197, - "ت": 44333, - "ج": 26375, - "Ø®": 41495, - "د": 19872, - "د": 35566, - "ر": 10948, - "ر": 24933, - "رÙĬ": 43273, - "ز": 36169, - "س": 17856, - "Ø´": 28770, - "ص": 27271, - "Ø·": 32050, - "ع": 18843, - "غ": 48510, - "ØŃ": 25722, - "Ù": 149, - "Ù": 405, - "Ùģ": 24112, - "ÙĤ": 27585, - "Ùĥ": 33499, - "ÙĦ": 14251, - "ÙĦ": 37899, - "Ùħ": 12986, - "Ùħ": 29945, - "ÙĨ": 16655, - "ÙĨ": 25386, - "Ùĩ": 34274, - "Ùĩ": 31343, - "ÙĪ": 12203, - "ÙĪ": 38310, - "ÙĪØ±": 48242, - "ÙĬ": 12046, - "ÙĬ": 23853, - "Ú": 150, - "Ú": 406, - "Ú©": 26475, - "Û": 151, - "Û": 407, - "Ûģ": 40480, - "ÛĮ": 21452, - "ÛĮ": 32703, - "Ü": 152, - "Ü": 408, - "Ý": 153, - "Ý": 409, - "Þ": 154, - "Þ": 410, - "ß": 155, - "ß": 411, - "à": 156, - "à": 412, - "à¤": 3124, - "त": 27263, - "द": 29552, - "न": 26090, - "प": 44149, - "ब": 43599, - "म": 48254, - "म": 26774, - "य": 37299, - "र": 39136, - "र": 19052, - "ल": 30881, - "व": 39545, - "श": 43181, - "स": 28505, - "ह": 29446, - "ा": 37973, - "ा": 13343, - "ि": 26721, - "à¤Ĥ": 30833, - "à¤ķ": 22067, - "à¤Ĺ": 42598, - "à¤ľ": 39561, - "à¥": 7410, - "à¥Ģ": 45791, - "à¥Ģ": 25751, - "à¥ģ": 39653, - "à¥ĩ": 48612, - "à¥ĩ": 25130, - "à¥ĭ": 34452, - "à¥į": 19389, - "à¦": 11322, - "া": 41532, - "à§": 26339, - "à¨": 15741, - "à©": 32086, - "àª": 22990, - "à«": 48347, - "à¬": 32791, - "à®": 6022, - "த": 34691, - "ன": 43394, - "ப": 47388, - "à®®": 35463, - "à®°": 43270, - "ல": 47705, - "ா": 32831, - "ி": 27126, - "à®ķ": 36168, - "à®Ł": 45263, - "à¯": 11259, - "à¯ģ": 33115, - "à¯į": 16631, - "à°": 12100, - "à±": 23550, - "à±į": 46098, - "à²": 9992, - "ಿ": 47797, - "à³": 20745, - "à³į": 36148, - "à´": 15418, - "àµ": 27392, - "àµį": 45266, - "à¶": 29881, - "à·": 30766, - "à¸": 1777, - "ม": 26137, - "ม": 29570, - "ย": 27241, - "ย": 33091, - "ร": 32225, - "ร": 27331, - "ล": 34696, - "ล": 32746, - "ว": 26990, - "ว": 30245, - "ส": 37883, - "ส": 35737, - "ห": 33064, - "ะ": 43920, - "ะ": 49234, - "ั": 14978, - "า": 11529, - "า": 38476, - "าà¸": 12330, - "ิ": 17092, - "ี": 22421, - "ี": 20278, - "ีà¹Ī": 31511, - "ื": 47991, - "ุ": 30524, - "ู": 35273, - "à¸ģ": 30767, - "à¸ģà¸": 31474, - "à¸Ħ": 31757, - "à¸Ħà¸": 39628, - "à¸ĩ": 24603, - "à¸ĩ": 33382, - "à¸Ī": 47608, - "à¸Ĭ": 46324, - "à¸Ķ": 31107, - "à¸Ķ": 38825, - "à¸ķ": 40273, - "à¸ķ": 41108, - "à¸Ĺ": 36171, - "à¸Ļ": 17474, - "à¸Ļ": 17639, - "à¸Ļà¸": 23121, - "à¸ļ": 33859, - "à¸ļ": 39616, - "à¸ŀ": 48171, - "à¸Ń": 13398, - "à¸Ń": 32818, - "à¸Ńà¸": 14649, - "à¸Ńà¸ĩ": 46622, - "à¹": 4484, - "à¹Ģ": 13729, - "à¹Ģà¸": 14076, - "à¹ģà¸": 23916, - "à¹Ĥ": 33118, - "à¹ĥ": 40962, - "à¹Ħà¸": 31718, - "à¹ĩ": 38699, - "à¹Ī": 11722, - "à¹ī": 13123, - "à¹Į": 28353, - "à¼": 46186, - "à½": 39219, - "á": 157, - "á": 413, - "á´": 19036, - "áµ": 17330, - "áĢ": 45932, - "áĥ": 24829, - "áĥ¦": 32193, - "â": 158, - "â": 414, - "â¤": 25087, - "⤵ï¸ı": 36026, - "â¬": 7930, - "â¬ħï¸ı": 42111, - "â¬Ĩ": 27718, - "â¬Ĩï¸ı": 32798, - "â¬ĩ": 10917, - "â¬ĩ": 39370, - "â¬ĩï¸ı": 25621, - "â¬ĩï¸ı": 13984, - "â¬ĩï¸ıâ¬ĩï¸ı": 40159, - "âĢ": 728, - "âĢ¢": 9485, - "âĢ¢": 2701, - "âĢ¢âĢ¢": 15006, - "âĢ¢âĢ¢": 47575, - "âĢ¢âĢ¢âĢ¢âĢ¢": 27502, - "âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢": 48630, - "â̦": 7095, - "â̦\"": 20215, - "â̦..": 47779, - "â̦.": 18615, - "â̦/": 29842, - "â̦": 959, - "â̦â̦": 40066, - "â̲": 32633, - "â̳": 25061, - "â̼": 6578, - "â̼ï¸ı": 15622, - "â̼ï¸ı": 8310, - "â̼ï¸ıâ̼ï¸ı": 33218, - "âĢĭ": 17086, - "âĢĭ": 9844, - "âĢį": 4244, - "âĢįâĻ": 5177, - "âĢįâĻĢï¸ı": 18897, - "âĢįâĻĢï¸ı": 9605, - "âĢįâĻĤ": 8832, - "âĢįâĻĤï¸ı": 21779, - "âĢįâĻĤï¸ı": 10613, - "âĢİ": 31001, - "âĢIJ": 34512, - "âĢĵ": 21070, - "âĢĵ": 1224, - "âĢĶ": 6718, - "âĢĶ": 2005, - "âĢĶ>": 26341, - "âĢĶ@": 28470, - "âĢĶâĢĶ": 10037, - "âĢĶâĢĶ": 44800, - "âĢĶâĢĶâĢĶâĢĶ": 17797, - "âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ": 34432, - "âĢķ": 14236, - "âģ": 1667, - "âģ£": 31089, - "âģ£": 16845, - "âģ¦": 2773, - "âģ¦": 34855, - "âģ¦@": 2859, - "âģ¦âģ¦@": 27783, - "âģ©": 20097, - "âģ©,": 48749, - "âģ©.": 35777, - "âģ©": 2918, - "âģīï¸ı": 46534, - "âģł": 23881, - "âģł": 13503, - "âģłâģł": 33488, - "âĤ": 5227, - "âĤ¬": 34919, - "âĤ¬": 6309, - "âĤ¹": 21777, - "âĥ": 2805, - "âĥ£": 11250, - "âĥ£": 3076, - "âĥ£@": 48291, - "âĦ": 8604, - "âĦ¢": 29438, - "âĦ¢": 11675, - "âĦ¹": 45462, - "âĨ": 6059, - "âĨĴ": 7481, - "âĨĵ": 41603, - "âĩ": 27228, - "âĪ": 17788, - "âī": 22684, - "âīĪ": 45451, - "âĮ": 17848, - "âĮļ": 31301, - "âĮļï¸ı": 35931, - "âı": 7960, - "âı©": 40847, - "âı°": 12714, - "âı±": 33149, - "âı³": 47617, - "âĵ": 27400, - "âĶ": 13389, - "âĶĢ": 45139, - "âĶģ": 42022, - "âķ": 17027, - "âķIJ": 48039, - "âĸ": 4168, - "âĸª": 21203, - "âĸª": 36628, - "âĸªï¸ı": 24974, - "âĸ«": 39478, - "âĸ¬": 33798, - "âĸ¬âĸ¬": 36975, - "âĸ¶": 12509, - "âĸ¶": 21126, - "âĸ¶ï¸ı": 14442, - "âĸº": 46061, - "âĸº": 12086, - "âĸ½": 45634, - "âĸł": 36791, - "âĹ": 9323, - "âĹĨ": 48961, - "âĹı": 26999, - "âĺ": 1741, - "âĺ®": 45851, - "âĺ¹": 28811, - "âĺ¹ï¸ı": 39605, - "âĺº": 5010, - "âĺº": 8703, - "âĺºâĺº": 46051, - "âĺºï¸ı": 11506, - "âĺºï¸ı": 7779, - "âĺºï¸ıâĺºï¸ı": 41315, - "âĺ¼": 38877, - "âĺĢ": 32146, - "âĺĢ": 22242, - "âĺĢï¸ı": 12817, - "âĺĢï¸ı": 8219, - "âĺĢï¸ıâĺĢï¸ı": 44550, - "âĺģ": 25195, - "âĺģï¸ı": 35197, - "âĺĥ": 38972, - "âĺħ": 9339, - "âĺħ": 10643, - "âĺħâĺħ": 12681, - "âĺħâĺħ": 36644, - "âĺħâĺħâĺħâĺħ": 34431, - "âĺħâĺħâĺħâĺħ": 44034, - "âĺħâĺħâĺħâĺħâĺħ": 45984, - "âĺĨ": 23941, - "âĺĨ": 13439, - "âĺİ": 24045, - "âĺİ": 45493, - "âĺİï¸ı": 27219, - "âĺij": 20983, - "âĺij": 42300, - "âĺijï¸ı": 22291, - "âĺĶï¸ı": 31238, - "âĺķ": 11454, - "âĺķ": 26561, - "âĺķï¸ı": 25839, - "âĺķï¸ı": 15499, - "âĺĺ": 23483, - "âĺĺï¸ı": 31454, - "âĺĿ": 21982, - "âĺĿï¸ı": 38891, - "âĺŀ": 31255, - "âĺłï¸ı": 34672, - "âĻ": 1548, - "âĻ¡": 11091, - "âĻ¡": 6251, - "âĻ¡âĻ¡": 22360, - "âĻ¡âĻ¡": 34267, - "âĻ¡âĻ¡âĻ¡": 36611, - "âϤ": 47435, - "âĻ¥": 4622, - "âĻ¥": 3405, - "âĻ¥âĻ¥": 12975, - "âĻ¥âĻ¥": 19604, - "âĻ¥âĻ¥âĻ¥": 23255, - "âĻ¥âĻ¥âĻ¥âĻ¥": 49020, - "âĻ¥ï¸ı": 17774, - "âĻ¥ï¸ı": 10561, - "âĻ¥ï¸ıâĻ¥ï¸ı": 40309, - "âϦ": 32376, - "âϦ": 47547, - "âĻ©": 30339, - "âĻ©âĻ«": 31636, - "âĻª": 27364, - "âĻª": 12382, - "âĻ«": 39217, - "âĻ«": 10814, - "âϬ": 24753, - "âĻ»": 39611, - "âĻ»ï¸ı": 46075, - "âļ": 2234, - "âļ¡": 40098, - "âļ¡": 20712, - "âļ¡ï¸ı": 19500, - "âļ¡ï¸ı": 11605, - "âļ¡ï¸ıâļ¡ï¸ı": 45922, - "âļª": 11922, - "âļª": 36373, - "âļªï¸ı": 22251, - "âļªï¸ı": 17885, - "âļ«": 15374, - "âļ«ï¸ı": 26529, - "âļ«ï¸ı": 24649, - "âļ½": 4867, - "âļ½": 13173, - "âļ½âļ½": 43259, - "âļ½ï¸ı": 11342, - "âļ½ï¸ı": 6768, - "âļ½ï¸ıâļ½ï¸ı": 30358, - "âļ½ï¸ıâļ½ï¸ı": 44148, - "âļ¾": 11314, - "âļ¾": 34717, - "âļ¾ï¸ı": 24727, - "âļ¾ï¸ı": 14858, - "âļĵ": 23522, - "âļĵï¸ı": 35299, - "âļĶï¸ı": 29361, - "âļľ": 47491, - "âļł": 39203, - "âļłï¸ı": 40966, - "âļłï¸ı": 15596, - "âĽ": 7956, - "âĽ³ï¸ı": 29204, - "âĽĦ": 30668, - "âĽĦï¸ı": 45465, - "âľ": 1508, - "⾨": 7181, - "⾨": 3531, - "⾨⾨": 35174, - "⾨⾨": 21985, - "⾨⾨⾨": 39424, - "âľĤ": 38602, - "âľħ": 29544, - "âľħ": 5564, - "âľĪ": 10682, - "âľĪ": 30712, - "âľĪï¸ı": 26176, - "âľĪï¸ı": 13413, - "âľĬ": 12392, - "âľĬ": 17819, - "âľĬðŁı½": 48547, - "âľĬðŁı¾": 41185, - "âľĭ": 39383, - "âľĭ": 30239, - "âľĮ": 6419, - "âľĮ": 12656, - "âľĮï¸ı": 21906, - "âľĮï¸ı": 12239, - "âľĮðŁı»": 30538, - "âľĮðŁı¼": 30588, - "âľį": 20872, - "âľįï¸ı": 30888, - "âľı": 32574, - "âľıï¸ı": 40724, - "âľĵ": 36700, - "âľĶ": 47200, - "âľĶ": 13749, - "âľĶï¸ı": 40544, - "âľĶï¸ı": 9191, - "âľĸï¸ı": 44133, - "âľĿ": 42220, - "âĿ": 1045, - "âĿ£": 37007, - "âĿ£": 25623, - "âĿ£ï¸ı": 25240, - "âĿ¤": 1266, - "âĿ¤": 2720, - "âĿ¤âĿ¤": 9033, - "âĿ¤âĿ¤": 14058, - "âĿ¤âĿ¤âĿ¤": 16708, - "âĿ¤âĿ¤âĿ¤âĿ¤": 37918, - "âĿ¤âĿ¤âĿ¤âĿ¤": 43970, - "âĿ¤ï¸ı": 2626, - "âĿ¤ï¸ı#": 30281, - "âĿ¤ï¸ı.": 45326, - "âĿ¤ï¸ı": 1752, - "âĿ¤ï¸ı@": 31187, - "âĿ¤ï¸ıâĿ¤ï¸ı": 6713, - "âĿ¤ï¸ıâĿ¤ï¸ı": 10363, - "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 12282, - "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 39167, - "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 29880, - "âĿ¤ï¸ıðŁĴĻ": 37380, - "âĿ¤ï¸ıðŁĺį": 37272, - "âĿ¤ï¸ıðŁĺĺ": 41800, - "âĿ¤ðŁĺį": 49120, - "âĿ¥": 36914, - "âĿĦ": 8501, - "âĿĦ": 30494, - "âĿĦï¸ı": 16834, - "âĿĦï¸ı": 12402, - "âĿĦï¸ıâĿĦï¸ı": 41626, - "âĿĮ": 44485, - "âĿĮ": 17975, - "âĿĵ": 29791, - "âĿĹ": 12868, - "âĿĹ": 29079, - "âĿĹï¸ı": 28642, - "âĿĹï¸ı": 17391, - "âĿĿ": 46951, - "âŀ": 3257, - "âŀ¡": 12854, - "âŀ¡ï¸ı": 31860, - "âŀ¡ï¸ı": 4956, - "âŀ¤": 18651, - "âŀķ": 46526, - "âŀĸ": 21327, - "âŀĸ": 34902, - "âŀĸâŀĸ": 23316, - "âŀĸâŀĸâŀĸâŀĸ": 40401, - "âŀľ": 23775, - "âł": 5689, - "âłĢ": 9691, - "âłĢ": 8621, - "âłĢâłĢ": 11466, - "âłĢâłĢ": 39092, - "âłĢâłĢâłĢâłĢ": 20976, - "âłĢâłĢâłĢâłĢâłĢâłĢâłĢâłĢ": 46063, - "âŃ": 5527, - "âŃIJ": 6410, - "âŃIJ": 19012, - "âŃIJâŃIJ": 32663, - "âŃIJï¸ı": 12427, - "âŃIJï¸ı": 10251, - "âŃIJï¸ıâŃIJï¸ı": 18640, - "âŃIJï¸ıâŃIJï¸ıâŃIJï¸ı": 40746, - "ã": 159, - "ã": 415, - "ãĢ": 4092, - "ãĢģ": 45262, - "ãĢĤ": 38060, - "ãĢĤ": 38000, - "ãĢĬ": 39920, - "ãĢĭ": 32898, - "ãĢĮ": 18116, - "ãĢį": 19149, - "ãĢİ": 26947, - "ãĢı": 30293, - "ãĢIJ": 12534, - "ãĢij": 12990, - "ãĢľ": 39581, - "ãģ": 4813, - "ãģ¦": 48029, - "ãģ¨": 34671, - "ãģ¨ç¹ĭãģ": 47310, - "ãģ¨ç¹ĭãģĮãĤĬãģŁãģĦ": 48290, - "ãģª": 29104, - "ãģ®": 21575, - "ãģ·": 44130, - "ãģĦ": 33523, - "ãģĦ": 38850, - "ãģĨ": 44235, - "ãģį": 42184, - "ãĤ": 3909, - "ãĤ¢": 26560, - "ãĤ¤": 19319, - "ãĤ¤ãĥ": 36294, - "ãĤ«": 37367, - "ãĤ¯": 31574, - "ãĤ·": 37665, - "ãĤ¸": 32234, - "ãĤ¸ãĥ": 43491, - "ãĤ¹": 22694, - "ãĤ¹": 39220, - "ãĤ¹ãĥ": 32421, - "ãĤ¿": 34941, - "ãĤĬãģ": 40500, - "ãĤĮ": 45211, - "ãĤŃ": 47121, - "ãĥ": 2429, - "ãĥ©": 23007, - "ãĥª": 32115, - "ãĥ«": 33257, - "ãĥ¬": 32965, - "ãĥ³": 17671, - "ãĥ³": 26875, - "ãĥ³ãĤ": 45105, - "ãĥ³ãĥ": 25914, - "ãĥ»": 8415, - "ãĥ»": 11158, - "ãĥ»ãĥ»": 13949, - "ãĥ»ãĥ»ãĥ»": 14234, - "ãĥ¼": 13457, - "ãĥ¼": 30391, - "ãĥ¼ãĥ": 18584, - "ãĥĥ": 28902, - "ãĥĦ": 32173, - "ãĥĪ": 42384, - "ãĥİ": 39967, - "ãĥķãĤ": 33371, - "ãĥŀ": 48924, - "ãĥŃ": 35827, - "ãħ": 5947, - "ãħ¤": 21096, - "ãħ¤ãħ¤": 22583, - "ãħ¤ãħ¤ãħ¤ãħ¤": 39329, - "ãħĭ": 13052, - "ãħĭ": 25108, - "ãħĭãħĭ": 16604, - "ãħĭãħĭ": 42581, - "ãħĭãħĭãħĭ": 46407, - "ãħĭãħĭãħĭãħĭ": 39362, - "ãħł": 16089, - "ãħł": 25781, - "ãħłãħł": 22021, - "ãħłãħł": 34398, - "ãħłãħłãħłãħł": 47028, - "ä": 160, - "ä": 416, - "ä¸": 19759, - "ä¹": 41854, - "äº": 21078, - "人": 36839, - "ä»": 37743, - "ä½": 47466, - "å": 161, - "å": 417, - "å¤": 23170, - "å¥": 29290, - "å®": 27047, - "å°": 34720, - "å±": 46096, - "å¸": 42021, - "å¹": 38780, - "åħ": 34314, - "åĨ": 27972, - "åĨĻ": 44653, - "åĪ": 42748, - "åĭ": 47505, - "åı": 34517, - "åIJ": 41673, - "åĽ": 39027, - "åľ": 37746, - "åŃ": 35751, - "æ": 162, - "æ": 418, - "æĸ": 29032, - "æĹ": 22265, - "æĹ¥": 39121, - "æĹ¥": 37156, - "æĺ": 42891, - "æĻ": 48132, - "æľ": 19277, - "æľ¬": 44353, - "æĿ": 27667, - "æĿ±": 48338, - "ç": 163, - "ç": 419, - "ç¥": 26369, - "ç¥Ń": 42557, - "çµ": 37810, - "ç¹": 43431, - "ç¹ĭãģ": 45930, - "çĶ": 20211, - "çĶŁ": 33375, - "çľ": 33440, - "羣": 41570, - "è": 164, - "è": 420, - "èª": 34002, - "èªķ": 41293, - "é": 165, - "é": 421, - "éģ": 44854, - "éĩ": 38283, - "ê": 166, - "ê": 422, - "ê°": 21122, - "ê°ĵ": 41076, - "ê°ĵìĦ¸ë¸IJ": 41689, - "ê°ķ": 45758, - "ê²": 35555, - "ê³": 36216, - "êµ": 31871, - "ê·": 42680, - "ê¸": 32495, - "ê¹": 24531, - "ê¹Ģ": 25203, - "ë": 167, - "ë": 423, - "ë¦": 24621, - "리": 47649, - "ë§": 28024, - "ë§Ī": 40027, - "ëª": 36311, - "ë¯": 19528, - "민": 34442, - "민": 44632, - "ë°": 15810, - "ë°©": 23273, - "ë°©íĥ": 25081, - "ë°©íĥĦ": 25641, - "ë°©íĥĦìĨĮëħĦëĭ": 26068, - "ë°©íĥĦìĨĮëħĦëĭ¨": 27129, - "ë°ķ": 40988, - "ë²": 48267, - "ë³": 44693, - "ë¹": 24193, - "ëĤ": 27252, - "ëĤĺ": 48484, - "ëĭ": 13094, - "ëĭ¤": 46680, - "ëĭĪ": 33708, - "ëį": 45543, - "ëı": 31972, - "ëĵ": 30850, - "ëĿ": 44317, - "ì": 168, - "ì": 424, - "ì£": 39856, - "주": 45161, - "ì¤": 31153, - "ì§": 16279, - "ì§Ģ": 28836, - "ì§Ħ": 38890, - "ì°": 40742, - "ì¶": 42476, - "ì¶ķ": 46403, - "ì¶ķíķĺ": 47866, - "ì¹": 45088, - "ìĤ": 31061, - "ìĥ": 30587, - "ìĥĿ": 47858, - "ìĦ": 15074, - "ìĦ¸ë": 29254, - "ìĦ¸ë¸": 29658, - "ìĦ¸ë¸IJ": 41415, - "ìĨ": 15115, - "ìĨĮë": 20515, - "ìĨĮëħ": 21391, - "ìĨĮëħĦëĭ": 25887, - "ìĪ": 32757, - "ìĬ": 12125, - "ìĬ¤": 20305, - "ìĬ¤": 23829, - "ìĭ": 23924, - "ìķ": 16071, - "ìķĦ": 23233, - "ìĸ": 31625, - "ìĹ": 13252, - "ìĹIJ": 37622, - "ìĹij": 31036, - "ìĹijìĨ": 42763, - "ìĹijìĨĮ": 45606, - "ìĺ": 21144, - "ìĻ": 39405, - "ìļ": 18541, - "ìļ°": 38415, - "ìļ°": 49344, - "ìĽ": 22543, - "ìĽIJ": 36495, - "ìľ": 20909, - "ìľł": 42890, - "ìĿ": 8276, - "ìĿ´": 12286, - "ìĿ´": 34746, - "ìĿ´ì": 37590, - "ìĿ¼": 43406, - "ìŀ": 20849, - "ìł": 20580, - "ìłķ": 34725, - "í": 169, - "í": 425, - "íģ": 35641, - "íģ¬": 45832, - "íĤ": 43565, - "íĥ": 15012, - "íĥĢ": 41126, - "íĥľ": 37663, - "íĬ": 23215, - "íĬ¸": 48974, - "íĬ¸": 39820, - "íĭ": 34350, - "íĶ": 29450, - "íķ": 15197, - "íķ´": 35286, - "íķĺ": 33992, - "íĺ": 15962, - "íĺ¸": 39657, - "íĺĦ": 34645, - "íĻ": 31882, - "î": 170, - "î": 426, - "îĢ": 36288, - "îĦ": 35368, - "îĮ": 41006, - "îIJ": 16929, - "îIJĴ": 40100, - "ï": 171, - "ï": 427, - "ï¸": 842, - "ï¸İ": 24029, - "ï¸ı": 1392, - "ï¸ı#": 46997, - "ï¸ı:": 32604, - "ï¸ı": 1001, - "ï¸ı@": 34600, - "ï¸ıâĥ£": 17394, - "ï¸ıâĥ£-": 40376, - "ï¸ıâĥ£": 4603, - "ï¿": 27850, - "�": 47356, - "�": 39802, - "ð": 172, - "ð": 428, - "ðĿ": 6874, - "ðĿIJ": 15889, - "ðĿij": 43794, - "ðĿĴ": 43387, - "ðĿĵ": 47110, - "ðĿĹ": 18865, - "ðĿĺ": 26109, - "ðĿĻ": 29415, - "ðŁ": 558, - "ð٤": 1793, - "ðŁ¤£": 9665, - "ðŁ¤£": 9909, - "ðŁ¤£ðŁ¤£": 16430, - "ðŁ¤£ðŁ¤£": 31009, - "ðŁ¤£ðŁ¤£ðŁ¤£": 32262, - "ðŁ¤¤": 39550, - "ðŁ¤¤": 26759, - "ðŁ¤¦": 17186, - "ðŁ¤§": 40983, - "ðŁ¤©": 27351, - "ðŁ¤©": 16074, - "ðŁ¤ª": 44230, - "ðŁ¤ª": 24920, - "ðŁ¤«": 47671, - "ðŁ¤¯": 37595, - "ðŁ¤·": 13185, - "ðŁ¤·ðŁı»âĢįâĻĢï¸ı": 46770, - "ð٤ij": 34801, - "ð٤ĵ": 36580, - "ð٤ĵ": 18928, - "ð٤Ķ": 12706, - "ð٤Ķ": 6497, - "ð٤Ķð٤Ķ": 28490, - "ð٤Ķð٤Ķð٤Ķ": 43361, - "ð٤ĸ": 46146, - "ð٤Ĺ": 16646, - "ð٤Ĺ": 10465, - "ð٤Ĺð٤Ĺ": 44321, - "ð٤ĺ": 10623, - "ð٤ĺ": 17288, - "ð٤ĺðŁı»": 46449, - "ð٤ĺðŁı»": 30891, - "ð٤ĺðŁı¼": 31458, - "ð٤ĺðŁı½": 49362, - "ð٤Ļ": 23800, - "ð٤Ļ": 39101, - "ð٤Ŀ": 35242, - "ð٤ŀ": 29463, - "ð٤ŀ": 38597, - "ðŁ¤Ł": 48509, - "ðŁ¤ł": 36737, - "ð٤Ń": 47289, - "ðŁ¥": 4156, - "ðŁ¥°": 29246, - "ðŁ¥°": 17597, - "ðŁ¥³": 45823, - "ðŁ¥³": 28055, - "ðŁ¥º": 43380, - "ðŁ¥º": 36858, - "ðŁ¥Ĥ": 43805, - "ðŁ¥Ĥ": 25212, - "ðŁ¥ĥ": 47790, - "ðŁ¥ĩ": 34372, - "ðŁ¥ĩ": 20069, - "ðŁ¥Ī": 35858, - "ðŁ¥ī": 36782, - "ðŁ¥Ĭ": 29275, - "ð٦": 6040, - "ð٦ģ": 36367, - "ð٦ģ": 26056, - "ð٦ĥ": 40184, - "ð٦Ħ": 37659, - "ð٦ħ": 28800, - "ð٦Ī": 48984, - "ð٦ĭ": 49325, - "ð٦ĭ": 28985, - "ð٧": 8792, - "ðŁ§¡": 30996, - "ðŁ§¡": 24578, - "ð٧IJ": 33549, - "ðŁħ": 22010, - "ðŁĨ": 9536, - "ðŁĨķ": 34956, - "ðŁĨĺ": 39868, - "ðŁĨļ": 16325, - "ðŁĩ": 1173, - "ðŁĩ¦": 12469, - "ðŁĩ¦": 28565, - "ðŁĩ¦ðŁĩ": 33196, - "ðŁĩ¦ðŁĩ·": 41629, - "ðŁĩ¦ðŁĩº": 25192, - "ðŁĩ§": 14660, - "ðŁĩ§ðŁĩ": 37342, - "ðŁĩ§ðŁĩª": 38794, - "ðŁĩ§ðŁĩ·": 28182, - "ðŁĩ¨": 8889, - "ðŁĩ¨ðŁĩ": 8989, - "ðŁĩ¨ðŁĩ¦": 34324, - "ðŁĩ¨ðŁĩ¦": 16364, - "ðŁĩ¨ðŁĩ³": 36819, - "ðŁĩ¨ðŁĩŃ": 41119, - "ðŁĩ©": 15222, - "ðŁĩ©ðŁĩ": 36350, - "ðŁĩ©ðŁĩª": 21531, - "ðŁĩª": 11428, - "ðŁĩª": 12331, - "ðŁĩªðŁĩ": 13917, - "ðŁĩªðŁĩ¸": 22177, - "ðŁĩªðŁĩº": 34655, - "ðŁĩ«": 12977, - "ðŁĩ«ðŁĩ·": 39109, - "ðŁĩ«ðŁĩ·": 16223, - "ðŁĩ¬": 8129, - "ðŁĩ¬ðŁĩ": 8354, - "ðŁĩ¬ðŁĩ§": 23762, - "ðŁĩ¬ðŁĩ§": 11559, - "ðŁĩ®": 8268, - "ðŁĩ®ðŁĩ": 8347, - "ðŁĩ®ðŁĩª": 34148, - "ðŁĩ®ðŁĩ³": 47299, - "ðŁĩ®ðŁĩ³": 23602, - "ðŁĩ®ðŁĩ¹": 42034, - "ðŁĩ®ðŁĩ¹": 17070, - "ðŁĩ¯": 20090, - "ðŁĩ¯ðŁĩ": 22924, - "ðŁĩ¯ðŁĩµ": 26527, - "ðŁĩ°": 28232, - "ðŁĩ±": 29533, - "ðŁĩ±ðŁĩ": 40941, - "ðŁĩ²": 16411, - "ðŁĩ²ðŁĩ": 17562, - "ðŁĩ²ðŁĩ½": 32073, - "ðŁĩ³": 16645, - "ðŁĩ³ðŁĩ": 17747, - "ðŁĩ³ðŁĩ±": 36747, - "ðŁĩµ": 12127, - "ðŁĩµðŁĩ": 13608, - "ðŁĩµðŁĩ°": 37764, - "ðŁĩµðŁĩ¹": 42621, - "ðŁĩµðŁĩŃ": 42777, - "ðŁĩ·": 16026, - "ðŁĩ·": 9869, - "ðŁĩ·ðŁĩº": 37902, - "ðŁĩ¸": 19447, - "ðŁĩ¸ðŁĩ": 33325, - "ðŁĩ¸ðŁĩª": 39260, - "ðŁĩ¹": 21810, - "ðŁĩ¹ðŁĩ": 36250, - "ðŁĩº": 4054, - "ðŁĩº": 17467, - "ðŁĩºðŁĩ": 4131, - "ðŁĩºðŁĩ¸": 8907, - "ðŁĩºðŁĩ¸": 5688, - "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 18739, - "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 41411, - "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 43357, - "ðŁĩ¿": 25520, - "ðŁĩ¿ðŁĩ¦": 36982, - "ðŁĩŃ": 30370, - "ðŁĮ": 1576, - "ðŁĮ±": 35318, - "ðŁĮ±": 20665, - "ðŁĮ²": 34071, - "ðŁĮ²": 28154, - "ðŁĮ³": 44265, - "ðŁĮ³": 28543, - "ðŁĮ´": 20643, - "ðŁĮ´": 15968, - "ðŁĮµ": 40871, - "ðŁĮ·": 32328, - "ðŁĮ·": 24259, - "ðŁĮ¸": 16314, - "ðŁĮ¸": 10980, - "ðŁĮ¸ðŁĮ¸": 46210, - "ðŁĮ¹": 14990, - "ðŁĮ¹": 10662, - "ðŁĮ¹ðŁĮ¹": 37933, - "ðŁĮº": 27608, - "ðŁĮº": 19829, - "ðŁĮ»": 27196, - "ðŁĮ»": 19772, - "ðŁĮ¼": 36484, - "ðŁĮ¼": 26312, - "ðŁĮ¾": 39796, - "ðŁĮ¿": 27736, - "ðŁĮ¿": 18588, - "ðŁĮĢ": 34348, - "ðŁĮħ": 27547, - "ðŁĮĪ": 23038, - "ðŁĮĪ": 13042, - "ðŁĮĬ": 20465, - "ðŁĮĬ": 14302, - "ðŁĮĮ": 43393, - "ðŁĮį": 34931, - "ðŁĮį": 18641, - "ðŁĮİ": 31125, - "ðŁĮİ": 16969, - "ðŁĮı": 31527, - "ðŁĮIJ": 33071, - "ðŁĮĻ": 42330, - "ðŁĮĻ": 23283, - "ðŁĮļ": 49004, - "ðŁĮļ": 27877, - "ðŁĮŀ": 21152, - "ðŁĮŀ": 12980, - "ðŁĮŁ": 13196, - "ðŁĮŁ": 8542, - "ðŁĮŁðŁĮŁ": 26014, - "ðŁį": 2011, - "ðŁį¦": 47375, - "ðŁį¦": 32032, - "ðŁį©": 38379, - "ðŁįª": 38958, - "ðŁį«": 47994, - "ðŁį«": 33401, - "ðŁį°": 43732, - "ðŁį°": 30051, - "ðŁį³": 37441, - "ðŁį´": 41531, - "ðŁį´": 25338, - "ðŁį·": 24445, - "ðŁį·": 18072, - "ðŁį¸": 43058, - "ðŁį¸": 31217, - "ðŁį¹": 35598, - "ðŁįº": 31081, - "ðŁįº": 21590, - "ðŁį»": 22793, - "ðŁį»": 13167, - "ðŁį¾": 27294, - "ðŁį¾": 21656, - "ðŁįĢ": 22865, - "ðŁįĢ": 15764, - "ðŁįģ": 29837, - "ðŁįģ": 23075, - "ðŁįĤ": 35015, - "ðŁįĤ": 25721, - "ðŁįĥ": 27157, - "ðŁįĥ": 20147, - "ðŁįĩ": 48697, - "ðŁįĬ": 35001, - "ðŁįĬ": 28036, - "ðŁįĭ": 39543, - "ðŁįĮ": 44987, - "ðŁįį": 48946, - "ðŁįİ": 32069, - "ðŁįij": 32889, - "ðŁįĴ": 33160, - "ðŁįĵ": 44739, - "ðŁįĵ": 33456, - "ðŁįĶ": 46415, - "ðŁįĶ": 36031, - "ðŁįķ": 31469, - "ðŁįķ": 23904, - "ðŁįŃ": 42100, - "ðŁİ": 1165, - "ðŁİ£": 43158, - "ðŁİ¤": 23490, - "ðŁİ¤": 15690, - "ðŁİ¥": 22186, - "ðŁİ¥:": 43640, - "ðŁİ¥": 13233, - "ðŁİ§": 31254, - "ðŁİ§": 14266, - "ðŁİ¨": 31953, - "ðŁİ¨": 13461, - "ðŁİ©": 37701, - "ðŁİ«": 30331, - "ðŁİ¬": 36020, - "ðŁİ¬": 18150, - "ðŁİ®": 29312, - "ðŁİ¯": 23114, - "ðŁİµ": 27435, - "ðŁİµ": 14946, - "ðŁİ¶": 11755, - "ðŁİ¶": 6011, - "ðŁİ¶ðŁİ¶": 36283, - "ðŁİ¸": 29135, - "ðŁİ¸": 22122, - "ðŁİ¹": 43493, - "ðŁİ¼": 34949, - "ðŁİ¼": 23757, - "ðŁİ¾": 41982, - "ðŁİ¾": 24222, - "ðŁİĢ": 34347, - "ðŁİĢ": 20151, - "ðŁİģ": 18368, - "ðŁİģ": 13462, - "ðŁİĤ": 13026, - "ðŁİĤ": 10392, - "ðŁİĤðŁİĤ": 39338, - "ðŁİĥ": 22622, - "ðŁİĥ": 16780, - "ðŁİĦ": 12942, - "ðŁİĦ": 11267, - "ðŁİħ": 17685, - "ðŁİħ": 24276, - "ðŁİĨ": 39222, - "ðŁİĪ": 16142, - "ðŁİĪ": 14448, - "ðŁİĪðŁİī": 48049, - "ðŁİī": 4310, - "ðŁİī:": 17310, - "ðŁİī": 3986, - "ðŁİīðŁİ": 11473, - "ðŁİīðŁİĪ": 40499, - "ðŁİīðŁİĪ": 34008, - "ðŁİīðŁİī": 25159, - "ðŁİīðŁİī": 13450, - "ðŁİīðŁİīðŁİī": 20828, - "ðŁİīðŁİĬ": 31662, - "ðŁİīðŁİĬ": 30781, - "ðŁİĬ": 22763, - "ðŁİĬ": 22425, - "ðŁİĬðŁİī": 48801, - "ðŁİĵ": 28916, - "ðŁİĵ": 18744, - "ðŁİĻ": 29001, - "ðŁİĻ": 29753, - "ðŁİĻï¸ı": 44205, - "ðŁİŁ": 19248, - "ðŁİŁ": 21107, - "ðŁİŁï¸ı": 30243, - "ðŁİŃ": 28856, - "ðŁı": 1109, - "ðŁı¡": 27318, - "ðŁı³ï¸ı": 26844, - "ðŁı³ï¸ıâĢį": 27093, - "ðŁı³ï¸ıâĢįðŁĮĪ": 32610, - "ðŁı´": 39690, - "ðŁı´": 19704, - "ðŁı»": 5042, - "ðŁı»": 3702, - "ðŁı»âĢį": 46250, - "ðŁı»âĢįâĻĢï¸ı": 48391, - "ðŁı»âĢįâĻĢï¸ı": 23595, - "ðŁı»âĢįâĻĤï¸ı": 30984, - "ðŁı¼": 6193, - "ðŁı¼": 4027, - "ðŁı¼âĢįâĻĢï¸ı": 28955, - "ðŁı½": 8514, - "ðŁı½": 6114, - "ðŁı½âĢįâĻĢï¸ı": 37036, - "ðŁı½âĢįâĻĤï¸ı": 43157, - "ðŁı¾": 10230, - "ðŁı¾": 7778, - "ðŁı¾âĢįâĻĤï¸ı": 47189, - "ðŁı¿": 29854, - "ðŁı¿": 21094, - "ðŁıĢ": 13708, - "ðŁıĢ": 8813, - "ðŁıĢðŁıĢ": 43169, - "ðŁıģ": 29423, - "ðŁıģ": 17473, - "ðŁıĥ": 16820, - "ðŁıĥ": 32751, - "ðŁıħ": 25500, - "ðŁıĨ": 9585, - "ðŁıĨ": 5596, - "ðŁıĨðŁıĨ": 18946, - "ðŁıĨðŁıĨ": 38269, - "ðŁıĨðŁıĨðŁıĨ": 44484, - "ðŁıĩ": 45789, - "ðŁıĩ": 40288, - "ðŁıĪ": 16144, - "ðŁıĪ": 10477, - "ðŁıī": 26020, - "ðŁıĬ": 33061, - "ðŁıĬ": 47830, - "ðŁıĮ": 41116, - "ðŁıı": 32460, - "ðŁıIJ": 46334, - "ðŁıIJ": 29433, - "ðŁıĴ": 37756, - "ðŁıŁ": 35914, - "ðŁıŁ": 26472, - "ðŁıŁï¸ı": 42627, - "ðŁıł": 33727, - "ðŁIJ": 2074, - "ðŁIJ¢": 37049, - "ðŁIJ£": 39597, - "ðŁIJ¥": 42981, - "ðŁIJ¦": 37260, - "ðŁIJ¬": 44238, - "ðŁIJ¯": 34825, - "ðŁIJ¯": 26111, - "ðŁIJ°": 35378, - "ðŁIJ°": 25050, - "ðŁIJ±": 35710, - "ðŁIJ±": 22979, - "ðŁIJ´": 33509, - "ðŁIJ¶": 14466, - "ðŁIJ¶": 10631, - "ðŁIJ·": 38408, - "ðŁIJ¸": 45597, - "ðŁIJ¸": 40298, - "ðŁIJº": 44281, - "ðŁIJº": 31445, - "ðŁIJ»": 30750, - "ðŁIJ»": 25322, - "ðŁIJ¼": 46234, - "ðŁIJ¾": 16057, - "ðŁIJ¾": 11317, - "ðŁIJ¾ðŁIJ¾": 42202, - "ðŁIJī": 46908, - "ðŁIJĬ": 43974, - "ðŁIJį": 48903, - "ðŁIJį": 30177, - "ðŁIJİ": 48281, - "ðŁIJİ": 32726, - "ðŁIJIJ": 47735, - "ðŁIJIJ": 27954, - "ðŁIJij": 49389, - "ðŁIJķ": 41069, - "ðŁIJĺ": 38733, - "ðŁIJĿ": 30619, - "ðŁIJĿ": 20111, - "ðŁIJŁ": 42084, - "ðŁIJŁ": 29989, - "ðŁIJł": 42725, - "ðŁij": 964, - "ðŁij£": 39755, - "ðŁij§": 48938, - "ðŁij¨": 18966, - "ðŁij¨âĢį": 25023, - "ðŁij©": 18800, - "ðŁij©âĢį": 26304, - "ðŁij«": 47106, - "ðŁij«": 35457, - "ðŁij®": 42686, - "ðŁij¯": 25910, - "ðŁij¯": 20582, - "ðŁij¶": 26187, - "ðŁij¶": 33189, - "ðŁij¸": 26268, - "ðŁij¸": 36645, - "ðŁij¹": 46766, - "ðŁij»": 24625, - "ðŁij»": 16243, - "ðŁij¼": 25270, - "ðŁij¼": 31083, - "ðŁij½": 42677, - "ðŁij½": 26257, - "ðŁijĢ": 11524, - "ðŁijĢ": 5908, - "ðŁijĢðŁijĢ": 31561, - "ðŁijģ": 47796, - "ðŁijģ": 45705, - "ðŁijĦ": 47445, - "ðŁijħ": 31833, - "ðŁijħ": 24672, - "ðŁijĨ": 42975, - "ðŁijĨ": 45194, - "ðŁijĩ": 7662, - "ðŁijĩ": 7475, - "ðŁijĩðŁı»": 45811, - "ðŁijĩðŁı»": 32813, - "ðŁijĩðŁı¼": 37504, - "ðŁijĩðŁijĩ": 17915, - "ðŁijĩðŁijĩ": 31891, - "ðŁijĩðŁijĩðŁijĩ": 35627, - "ðŁijĪ": 32794, - "ðŁijĪ": 20832, - "ðŁijī": 9477, - "ðŁijī": 3988, - "ðŁijīðŁı»": 23481, - "ðŁijīðŁı¼": 27534, - "ðŁijīðŁı½": 38059, - "ðŁijīðŁijī": 41480, - "ðŁijĬ": 8897, - "ðŁijĬ": 9704, - "ðŁijĬðŁı»": 47393, - "ðŁijĬðŁı»": 29152, - "ðŁijĬðŁı¼": 49000, - "ðŁijĬðŁı¼": 30115, - "ðŁijĬðŁijĬ": 46521, - "ðŁijĭ": 19351, - "ðŁijĭ": 17686, - "ðŁijĮ": 4890, - "ðŁijĮ": 4494, - "ðŁijĮðŁı»": 31818, - "ðŁijĮðŁı»": 18606, - "ðŁijĮðŁı¼": 37655, - "ðŁijĮðŁı¼": 20031, - "ðŁijĮðŁı½": 35834, - "ðŁijĮðŁijĮ": 36139, - "ðŁijĮðŁijĮ": 21435, - "ðŁijĮðŁijĮðŁijĮ": 40876, - "ðŁijį": 4686, - "ðŁijį": 4201, - "ðŁijįðŁı»": 25803, - "ðŁijįðŁı»": 15129, - "ðŁijįðŁı¼": 37285, - "ðŁijįðŁı¼": 19689, - "ðŁijįðŁı½": 43722, - "ðŁijįðŁijį": 33012, - "ðŁijįðŁijį": 18997, - "ðŁijįðŁijįðŁijį": 37284, - "ðŁijİ": 39702, - "ðŁijİ": 32568, - "ðŁijı": 3802, - "ðŁijı": 4829, - "ðŁijıðŁı»": 19236, - "ðŁijıðŁı»": 17029, - "ðŁijıðŁı»ðŁijıðŁı»": 35254, - "ðŁijıðŁı¼": 24496, - "ðŁijıðŁı¼": 19979, - "ðŁijıðŁı¼ðŁijıðŁı¼": 46712, - "ðŁijıðŁı½": 40796, - "ðŁijıðŁı½": 33978, - "ðŁijıðŁı¾": 45450, - "ðŁijıðŁijı": 10356, - "ðŁijıðŁijı": 16706, - "ðŁijıðŁijıðŁijı": 17254, - "ðŁijIJ": 40877, - "ðŁijij": 14955, - "ðŁijij": 8717, - "ðŁijijðŁijij": 48532, - "ðŁijķ": 47865, - "ðŁijŁ": 41183, - "ðŁijł": 41264, - "ðŁijŃ": 34175, - "ðŁijŃ": 27943, - "ðŁĴ": 837, - "ðŁĴ¡": 24081, - "ðŁĴ£": 36862, - "ðŁĴ£": 29006, - "ðŁĴ¤": 34706, - "ðŁĴ¤": 25632, - "ðŁĴ¥": 12209, - "ðŁĴ¥": 7347, - "ðŁĴ¥ðŁĴ¥": 27396, - "ðŁĴ¥ðŁĴ¥": 39246, - "ðŁĴ¥ðŁĴ¥ðŁĴ¥": 48890, - "ðŁĴ¦": 21180, - "ðŁĴ¦": 14060, - "ðŁĴ¦ðŁĴ¦": 44469, - "ðŁĴ§": 34095, - "ðŁĴ¨": 27408, - "ðŁĴ¨": 17891, - "ðŁĴ©": 48621, - "ðŁĴ©": 28847, - "ðŁĴª": 5475, - "ðŁĴª": 6440, - "ðŁĴªðŁı»": 31669, - "ðŁĴªðŁı»": 21903, - "ðŁĴªðŁı¼": 32041, - "ðŁĴªðŁı¼": 20759, - "ðŁĴªðŁı½": 46380, - "ðŁĴªðŁı½": 31111, - "ðŁĴªðŁı¾": 39398, - "ðŁĴªðŁĴª": 24747, - "ðŁĴªðŁĴªðŁĴª": 39913, - "ðŁĴ«": 25770, - "ðŁĴ«": 12526, - "ðŁĴ¬": 30947, - "ðŁĴ¯": 10611, - "ðŁĴ¯": 7018, - "ðŁĴ¯ðŁĴ¯": 30234, - "ðŁĴ¯ðŁĴ¯": 44070, - "ðŁĴ°": 20454, - "ðŁĴ°": 14078, - "ðŁĴ°ðŁĴ°": 41747, - "ðŁĴµ": 47412, - "ðŁĴµ": 38041, - "ðŁĴ¸": 37696, - "ðŁĴ¸": 25957, - "ðŁĴ»": 33433, - "ðŁĴ»": 18135, - "ðŁĴ¿": 39541, - "ðŁĴĢ": 14888, - "ðŁĴĢ": 12158, - "ðŁĴĢðŁĴĢ": 30884, - "ðŁĴģ": 13997, - "ðŁĴģ": 14392, - "ðŁĴĥ": 9947, - "ðŁĴĥ": 14333, - "ðŁĴĥðŁı»": 38624, - "ðŁĴĥðŁĴĥ": 28041, - "ðŁĴĦ": 46116, - "ðŁĴĦ": 34571, - "ðŁĴħ": 27457, - "ðŁĴħ": 32414, - "ðŁĴī": 44316, - "ðŁĴī": 30503, - "ðŁĴĭ": 12217, - "ðŁĴĭ": 7417, - "ðŁĴĭðŁĴĭ": 29214, - "ðŁĴĮ": 40817, - "ðŁĴį": 35850, - "ðŁĴį": 24898, - "ðŁĴİ": 25938, - "ðŁĴİ": 15874, - "ðŁĴIJ": 27375, - "ðŁĴIJ": 20554, - "ðŁĴij": 49404, - "ðŁĴĵ": 20628, - "ðŁĴĵ": 12568, - "ðŁĴĵðŁĴĵ": 43505, - "ðŁĴĶ": 18880, - "ðŁĴĶ": 10704, - "ðŁĴĶðŁĴĶ": 44673, - "ðŁĴķ": 5412, - "ðŁĴķ": 3082, - "ðŁĴķðŁĴķ": 23106, - "ðŁĴķðŁĴķ": 14117, - "ðŁĴķðŁĴķðŁĴķ": 26772, - "ðŁĴĸ": 8466, - "ðŁĴĸ": 5582, - "ðŁĴĸðŁĴĸ": 19562, - "ðŁĴĸðŁĴĸ": 30595, - "ðŁĴĸðŁĴĸðŁĴĸ": 33915, - "ðŁĴĹ": 10148, - "ðŁĴĹ": 6690, - "ðŁĴĹðŁĴĹ": 47158, - "ðŁĴĹðŁĴĹ": 24064, - "ðŁĴĹðŁĴĹðŁĴĹ": 36990, - "ðŁĴĺ": 18223, - "ðŁĴĺ": 10816, - "ðŁĴĺðŁĴĺ": 40464, - "ðŁĴĻ": 5305, - "ðŁĴĻ": 4074, - "ðŁĴĻðŁĴĻ": 17833, - "ðŁĴĻðŁĴĻ": 27101, - "ðŁĴĻðŁĴĻðŁĴĻ": 30698, - "ðŁĴĻðŁĴĽ": 46804, - "ðŁĴĻðŁĴĽ": 26230, - "ðŁĴĻðŁĴľ": 47931, - "ðŁĴĻðŁĴľ": 42541, - "ðŁĴļ": 8102, - "ðŁĴļ": 6521, - "ðŁĴļðŁĴļ": 27497, - "ðŁĴļðŁĴļ": 46209, - "ðŁĴļðŁĴļðŁĴļ": 46182, - "ðŁĴļðŁĴĽ": 41232, - "ðŁĴĽ": 8221, - "ðŁĴĽ": 6233, - "ðŁĴĽðŁĴĻ": 36337, - "ðŁĴĽðŁĴļ": 37994, - "ðŁĴĽðŁĴĽ": 32420, - "ðŁĴľ": 6832, - "ðŁĴľ": 4882, - "ðŁĴľðŁĴľ": 17280, - "ðŁĴľðŁĴľ": 28211, - "ðŁĴľðŁĴľðŁĴľ": 31004, - "ðŁĴĿ": 36761, - "ðŁĴĿ": 22002, - "ðŁĴŀ": 14862, - "ðŁĴŀ": 8988, - "ðŁĴŀðŁĴŀ": 36448, - "ðŁĴŁ": 49394, - "ðŁĴŁ": 28828, - "ðŁĴŃ": 33848, - "ðŁĵ": 1497, - "ðŁĵ¢": 46560, - "ðŁĵ¢": 20901, - "ðŁĵ£": 48841, - "ðŁĵ£": 21282, - "ðŁĵ°:": 28952, - "ðŁĵ°": 14985, - "ðŁĵ±": 36104, - "ðŁĵ±": 20824, - "ðŁĵ²": 19363, - "ðŁĵ·": 6966, - "ðŁĵ·:": 8294, - "ðŁĵ·": 5551, - "ðŁĵ·@": 40032, - "ðŁĵ¸": 8401, - "ðŁĵ¸:": 10379, - "ðŁĵ¸": 6074, - "ðŁĵ¸@": 39660, - "ðŁĵ¹": 49251, - "ðŁĵº": 21792, - "ðŁĵº:": 29728, - "ðŁĵº": 10450, - "ðŁĵ»": 32711, - "ðŁĵ»": 15882, - "ðŁĵ½": 45361, - "ðŁĵħ": 21277, - "ðŁĵĨ": 23471, - "ðŁĵĪ": 23359, - "ðŁĵĬ": 22244, - "ðŁĵĭ": 46351, - "ðŁĵĮ": 22289, - "ðŁĵį": 25043, - "ðŁĵį:": 36845, - "ðŁĵį": 8903, - "ðŁĵĸ": 49003, - "ðŁĵĸ": 23043, - "ðŁĵļ": 25433, - "ðŁĵļ": 15566, - "ðŁĵĿ": 31888, - "ðŁĵĿ:": 48398, - "ðŁĵĿ": 15853, - "ðŁĵŀ": 24022, - "ðŁĶ": 1428, - "ðŁĶ¥": 3191, - "ðŁĶ¥#": 44354, - "ðŁĶ¥": 3016, - "ðŁĶ¥ðŁĶ¥": 5692, - "ðŁĶ¥ðŁĶ¥": 11771, - "ðŁĶ¥ðŁĶ¥ðŁĶ¥": 11004, - "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥": 23408, - "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥": 30989, - "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥": 48401, - "ðŁĶ¥ðŁĶĹ": 35130, - "ðŁĶª": 47078, - "ðŁĶª": 34545, - "ðŁĶ«": 38116, - "ðŁĶ«": 20583, - "ðŁĶ¬": 44227, - "ðŁĶ®": 38077, - "ðŁĶ´": 12408, - "ðŁĶ´": 10854, - "ðŁĶ´âļªï¸ı": 46879, - "ðŁĶ´âļªï¸ı": 40055, - "ðŁĶµ": 17531, - "ðŁĶµ": 17193, - "ðŁĶµâļªï¸ı": 42412, - "ðŁĶ¶": 42880, - "ðŁĶ¶": 36222, - "ðŁĶ·": 37740, - "ðŁĶ¸": 24200, - "ðŁĶ¹": 19995, - "ðŁĶº": 45561, - "ðŁĶģ": 41299, - "ðŁĶĬ": 32580, - "ðŁĶĬ": 20502, - "ðŁĶİ": 44935, - "ðŁĶij": 35127, - "ðŁĶĴ": 44972, - "ðŁĶĶ": 45753, - "ðŁĶĹ": 47475, - "ðŁĶĹ": 14561, - "ðŁĶĺ": 38995, - "ðŁĶľ": 36011, - "ðŁĶĿ": 44387, - "ðŁĶĿ": 29506, - "ðŁķ": 7692, - "ðŁķº": 33958, - "ðŁķĬ": 42624, - "ðŁķĬ": 37760, - "ðŁĸ": 6269, - "ðŁĸ¤": 17603, - "ðŁĸ¤": 10860, - "ðŁĸ¥": 47990, - "ðŁĹ": 7045, - "ðŁĹ£": 33232, - "ðŁĹ£": 18583, - "ðŁĹ£ï¸ı": 37476, - "ðŁĹĵ": 34335, - "ðŁĹĵ": 28773, - "ðŁĹĵï¸ı": 39847, - "ðŁĺ": 668, - "ðŁĺ¡": 21968, - "ðŁĺ¡": 17452, - "ðŁĺ¡ðŁĺ¡": 37223, - "ðŁĺ¢": 14308, - "ðŁĺ¢": 9925, - "ðŁĺ¢ðŁĺ¢": 32923, - "ðŁĺ¢ðŁĺ¢": 47921, - "ðŁĺ£": 32718, - "ðŁĺ¤": 26872, - "ðŁĺ¤": 20740, - "ðŁĺ¥": 38383, - "ðŁĺ¥": 23951, - "ðŁĺ¨": 38080, - "ðŁĺ©": 9051, - "ðŁĺ©": 9494, - "ðŁĺ©ðŁĺ©": 22820, - "ðŁĺ©ðŁĺ©": 38031, - "ðŁĺ©ðŁĺ©ðŁĺ©": 49063, - "ðŁĺª": 38181, - "ðŁĺª": 22243, - "ðŁĺ«": 25141, - "ðŁĺ«": 22340, - "ðŁĺ¬": 23704, - "ðŁĺ¬": 14549, - "ðŁĺ®": 40163, - "ðŁĺ®": 21616, - "ðŁĺ¯": 37858, - "ðŁĺ°": 34728, - "ðŁĺ±": 10938, - "ðŁĺ±": 9055, - "ðŁĺ±ðŁĺ±": 22061, - "ðŁĺ±ðŁĺ±": 40767, - "ðŁĺ±ðŁĺ±ðŁĺ±": 40909, - "ðŁĺ²": 40460, - "ðŁĺ²": 24620, - "ðŁĺ³": 12047, - "ðŁĺ³": 8223, - "ðŁĺ³ðŁĺ³": 32592, - "ðŁĺ´": 23527, - "ðŁĺ´": 16415, - "ðŁĺ´ðŁĺ´": 49307, - "ðŁĺµ": 39368, - "ðŁĺ¶": 35207, - "ðŁĺ·": 37943, - "ðŁĺ·": 25759, - "ðŁĺ¸": 36912, - "ðŁĺ¹": 26477, - "ðŁĺ¹": 26573, - "ðŁĺ¹ðŁĺ¹": 46287, - "ðŁĺº": 40613, - "ðŁĺ»": 15453, - "ðŁĺ»": 12911, - "ðŁĺ»ðŁĺ»": 34414, - "ðŁĺ¼": 44245, - "ðŁĺ½": 45156, - "ðŁĺĢ": 12832, - "ðŁĺĢ": 7334, - "ðŁĺĢðŁĺĢ": 34503, - "ðŁĺģ": 6967, - "ðŁĺģ": 4821, - "ðŁĺģðŁĺģ": 37900, - "ðŁĺģðŁĺģ": 19213, - "ðŁĺģðŁĺģðŁĺģ": 29083, - "ðŁĺĤ": 1424, - "ðŁĺĤ)": 42643, - "ðŁĺĤ.": 42550, - "ðŁĺĤ": 1558, - "ðŁĺĤâĿ¤ï¸ı": 36412, - "ðŁĺĤðŁijĮ": 42000, - "ðŁĺĤðŁĺĤ": 2286, - "ðŁĺĤðŁĺĤ": 4112, - "ðŁĺĤðŁĺĤðŁĺĤ": 22233, - "ðŁĺĤðŁĺĤðŁĺĤ": 4887, - "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 9936, - "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 11522, - "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 19295, - "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 33415, - "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 48973, - "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 28504, - "ðŁĺĤðŁĺį": 43128, - "ðŁĺĤðŁĺŃ": 28965, - "ðŁĺĤðŁĺŃ": 25802, - "ðŁĺĥ": 14079, - "ðŁĺĥ": 8520, - "ðŁĺĥðŁĺĥ": 38358, - "ðŁĺĦ": 12141, - "ðŁĺĦ": 7624, - "ðŁĺĦðŁĺĦ": 32312, - "ðŁĺħ": 15245, - "ðŁĺħ": 9188, - "ðŁĺħðŁĺħ": 39078, - "ðŁĺĨ": 16541, - "ðŁĺĨ": 10943, - "ðŁĺĨðŁĺĨ": 39503, - "ðŁĺĩ": 21694, - "ðŁĺĩ": 13091, - "ðŁĺĪ": 14377, - "ðŁĺĪ": 9756, - "ðŁĺĪðŁĺĪ": 44473, - "ðŁĺī": 9740, - "ðŁĺī": 4955, - "ðŁĺīðŁĺī": 40430, - "ðŁĺĬ": 4692, - "ðŁĺĬ": 3020, - "ðŁĺĬâĿ¤ï¸ı": 43606, - "ðŁĺĬðŁĺĬ": 12838, - "ðŁĺĬðŁĺĬ": 20842, - "ðŁĺĬðŁĺĬðŁĺĬ": 28685, - "ðŁĺĬðŁĺĬðŁĺĬðŁĺĬ": 35519, - "ðŁĺĭ": 12391, - "ðŁĺĭ": 7203, - "ðŁĺĭðŁĺĭ": 33304, - "ðŁĺĮ": 19221, - "ðŁĺĮ": 12163, - "ðŁĺį": 1796, - "ðŁĺį#": 42357, - "ðŁĺį.": 48579, - "ðŁĺį": 1754, - "ðŁĺįâĿ¤": 29122, - "ðŁĺįâĿ¤ï¸ı": 21945, - "ðŁĺįðŁijĮ": 41005, - "ðŁĺįðŁĴķ": 35946, - "ðŁĺįðŁĶ¥": 46648, - "ðŁĺįðŁĺĤ": 48715, - "ðŁĺįðŁĺį": 3663, - "ðŁĺįðŁĺį": 6471, - "ðŁĺįðŁĺįðŁĺį": 30614, - "ðŁĺįðŁĺįðŁĺį": 7703, - "ðŁĺįðŁĺįðŁĺįðŁĺį": 16603, - "ðŁĺįðŁĺįðŁĺįðŁĺį": 18925, - "ðŁĺįðŁĺįðŁĺįðŁĺįðŁĺį": 32078, - "ðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺį": 48683, - "ðŁĺįðŁĺĺ": 29646, - "ðŁĺįðŁĺĺ": 19849, - "ðŁĺįðŁĺŃ": 39555, - "ðŁĺİ": 7426, - "ðŁĺİ": 4345, - "ðŁĺİðŁĺİ": 24048, - "ðŁĺİðŁĺİðŁĺİ": 39742, - "ðŁĺı": 11624, - "ðŁĺı": 6909, - "ðŁĺıðŁĺı": 38151, - "ðŁĺIJ": 38586, - "ðŁĺIJ": 19618, - "ðŁĺij": 32469, - "ðŁĺij": 18937, - "ðŁĺĴ": 20792, - "ðŁĺĴ": 11702, - "ðŁĺĵ": 28733, - "ðŁĺĶ": 19532, - "ðŁĺĶ": 11432, - "ðŁĺķ": 45741, - "ðŁĺķ": 20602, - "ðŁĺĸ": 35006, - "ðŁĺĺ": 4240, - "ðŁĺĺ": 3352, - "ðŁĺĺâĿ¤": 48409, - "ðŁĺĺâĿ¤ï¸ı": 39150, - "ðŁĺĺðŁĺį": 38176, - "ðŁĺĺðŁĺĺ": 15663, - "ðŁĺĺðŁĺĺ": 10507, - "ðŁĺĺðŁĺĺðŁĺĺ": 20208, - "ðŁĺĺðŁĺĺðŁĺĺðŁĺĺ": 44892, - "ðŁĺĻ": 36201, - "ðŁĺĻ": 29209, - "ðŁĺļ": 24897, - "ðŁĺļ": 19102, - "ðŁĺĽ": 24550, - "ðŁĺĽ": 15745, - "ðŁĺľ": 13226, - "ðŁĺľ": 7830, - "ðŁĺľðŁĺľ": 43065, - "ðŁĺĿ": 20064, - "ðŁĺĿ": 12970, - "ðŁĺŀ": 40458, - "ðŁĺŀ": 21103, - "ðŁĺŁ": 46947, - "ðŁĺł": 34094, - "ðŁĺŃ": 2962, - "ðŁĺŃ": 3915, - "ðŁĺŃâĿ¤ï¸ı": 29567, - "ðŁĺŃðŁĴķ": 46306, - "ðŁĺŃðŁĺĤ": 38505, - "ðŁĺŃðŁĺį": 36893, - "ðŁĺŃðŁĺŃ": 5300, - "ðŁĺŃðŁĺŃ": 11834, - "ðŁĺŃðŁĺŃðŁĺŃ": 44089, - "ðŁĺŃðŁĺŃðŁĺŃ": 13116, - "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 19793, - "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 27322, - "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 43366, - "ðŁĻ": 1478, - "ðŁĻĢ": 43092, - "ðŁĻĤ": 32006, - "ðŁĻĤ": 14860, - "ðŁĻĥ": 27222, - "ðŁĻĥ": 15652, - "ðŁĻĦ": 20648, - "ðŁĻĦ": 13049, - "ðŁĻħ": 42702, - "ðŁĻĨ": 30050, - "ðŁĻĨ": 35730, - "ðŁĻĪ": 12661, - "ðŁĻĪ": 9516, - "ðŁĻĪðŁĻĪ": 41796, - "ðŁĻĬ": 23684, - "ðŁĻĬ": 16636, - "ðŁĻĭ": 19193, - "ðŁĻĭ": 30274, - "ðŁĻĮ": 4366, - "ðŁĻĮ": 4855, - "ðŁĻĮðŁı»": 26756, - "ðŁĻĮðŁı»": 15799, - "ðŁĻĮðŁı¼": 26584, - "ðŁĻĮðŁı¼": 15364, - "ðŁĻĮðŁı½": 36660, - "ðŁĻĮðŁı½": 22962, - "ðŁĻĮðŁı¾": 38023, - "ðŁĻĮðŁı¾": 26466, - "ðŁĻĮðŁĻĮ": 21202, - "ðŁĻĮðŁĻĮ": 30430, - "ðŁĻĮðŁĻĮðŁĻĮ": 37127, - "ðŁĻı": 4260, - "ðŁĻı": 5503, - "ðŁĻıðŁı»": 25100, - "ðŁĻıðŁı»": 16650, - "ðŁĻıðŁı¼": 31163, - "ðŁĻıðŁı¼": 18952, - "ðŁĻıðŁı½": 34103, - "ðŁĻıðŁı½": 21540, - "ðŁĻıðŁı¾": 34277, - "ðŁĻıðŁı¾": 21979, - "ðŁĻıðŁĻı": 18227, - "ðŁĻıðŁĻı": 26510, - "ðŁĻıðŁĻıðŁĻı": 31702, - "ðŁļ": 2730, - "ðŁļ¨": 12198, - "ðŁļ¨": 6056, - "ðŁļ¨ðŁļ¨": 36487, - "ðŁļ¨ðŁļ¨": 21440, - "ðŁļ¨ðŁļ¨ðŁļ¨": 41515, - "ðŁļ©": 44514, - "ðŁļ«": 35291, - "ðŁļ²": 37085, - "ðŁļ´": 30825, - "ðŁļ¶": 46060, - "ðŁļĢ": 22400, - "ðŁļĢ": 13542, - "ðŁļĢðŁļĢ": 49033, - "ðŁļĤ": 38949, - "ðŁļĮ": 46891, - "ðŁļĹ": 33054, - "ðŁļĹ": 22783, - "ðŁļĺ": 35825, - "ðŁļĻ": 48487, - "ðŁĽ": 11306, - "ñ": 173, - "ñ": 429, - "ò": 174, - "ò": 430, - "ó": 175, - "ó": 431, - "ô": 176, - "ô": 432, - "õ": 177, - "õ": 433, - "ö": 178, - "ö": 434, - "÷": 179, - "÷": 435, - "ø": 180, - "ø": 436, - "ù": 181, - "ù": 437, - "ú": 182, - "ú": 438, - "û": 183, - "û": 439, - "ü": 184, - "ü": 440, - "ý": 185, - "ý": 441, - "þ": 186, - "þ": 442, - "ÿ": 187, - "ÿ": 443, - "Ā": 188, - "Ā": 444, - "ā": 189, - "ā": 445, - "Ă": 190, - "Ă": 446, - "ă": 191, - "ă": 447, - "Ą": 192, - "Ą": 448, - "ą": 193, - "ą": 449, - "Ć": 194, - "Ć": 450, - "ć": 195, - "ć": 451, - "Ĉ": 196, - "Ĉ": 452, - "ĉ": 197, - "ĉ": 453, - "Ċ": 198, - "Ċ": 454, - "ċ": 199, - "ċ": 455, - "Č": 200, - "Č": 456, - "č": 201, - "č": 457, - "Ď": 202, - "Ď": 458, - "ď": 203, - "ď": 459, - "Đ": 204, - "Đ": 460, - "đ": 205, - "đ": 461, - "Ē": 206, - "Ē": 462, - "ē": 207, - "ē": 463, - "Ĕ": 208, - "Ĕ": 464, - "ĕ": 209, - "ĕ": 465, - "Ė": 210, - "Ė": 466, - "ė": 211, - "ė": 467, - "Ę": 212, - "Ę": 468, - "ę": 213, - "ę": 469, - "Ě": 214, - "Ě": 470, - "ě": 215, - "ě": 471, - "Ĝ": 216, - "Ĝ": 472, - "ĝ": 217, - "ĝ": 473, - "Ğ": 218, - "Ğ": 474, - "ğ": 219, - "ğ": 475, - "Ġ": 220, - "Ġ": 476, - "ġ": 221, - "ġ": 477, - "Ģ": 222, - "Ģ": 478, - "Ģï¸ı": 9668, - "Ģï¸ı": 5511, - "ģ": 223, - "ģ": 479, - "ģà¸": 15016, - "Ĥ": 224, - "Ĥ": 480, - "Ĥâĸ": 29036, - "ĤâĸĤâĸ": 30832, - "ĥ": 225, - "ĥ": 481, - "Ħ": 226, - "Ħ": 482, - "Ħà¸": 20537, - "Ħë": 34462, - "Ħëĭ": 25170, - "ħ": 227, - "ħ": 483, - "ħï¸ı": 33950, - "Ĩ": 228, - "Ĩ": 484, - "ĩ": 229, - "ĩ": 485, - "Ī": 230, - "Ī": 486, - "ī": 231, - "ī": 487, - "īï¸ı": 37463, - "Ĭ": 232, - "Ĭ": 488, - "Ĭãģ": 30294, - "ĭ": 233, - "ĭ": 489, - "ĭãģ": 36218, - "ĭãĤ": 45737, - "Į": 234, - "Į": 490, - "ĮãĤĬãģ": 45969, - "ĮãĤĬãģŁãģĦ": 47021, - "Įë": 17003, - "į": 235, - "į": 491, - "İ": 236, - "İ": 492, - "ı": 237, - "ı": 493, - "IJ": 238, - "IJ": 494, - "ij": 239, - "ij": 495, - "Ĵ": 240, - "Ĵ": 496, - "ĵ": 241, - "ĵ": 497, - "Ķ": 242, - "Ķ": 498, - "Ķë": 37978, - "Ķï¸ı": 24395, - "Ķï¸ı": 7443, - "ķ": 243, - "ķ": 499, - "ķãĤ": 26609, - "ķï¸ı": 44853, - "ĸ": 244, - "ĸ": 500, - "ĸï¸ı": 28877, - "Ĺ": 245, - "Ĺ": 501, - "ĺ": 246, - "ĺ": 502, - "Ļ": 247, - "Ļ": 503, - "ļ": 248, - "ļ": 504, - "Ľ": 249, - "Ľ": 505, - "ľ": 250, - "ľ": 506, - "ľë": 39810, - "Ŀ": 251, - "Ŀ": 507, - "ŀ": 252, - "ŀ": 508, - "Ł": 253, - "Ł": 509, - "ŁãģĦ": 46023, - "ł": 254, - "ł": 510, - "łï¸ı": 27899, - "łï¸ı": 12715, - "łĪ": 43364, - "Ń": 255, - "Ń": 511 -} diff --git a/diffsynth/tokenizer_configs/flux/tokenizer_2/special_tokens_map.json b/diffsynth/tokenizer_configs/flux/tokenizer_2/special_tokens_map.json deleted file mode 100644 index 17ade34..0000000 --- a/diffsynth/tokenizer_configs/flux/tokenizer_2/special_tokens_map.json +++ /dev/null @@ -1,125 +0,0 @@ -{ - "additional_special_tokens": [ - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "" - ], - "eos_token": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false - }, - "pad_token": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false - }, - "unk_token": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false - } -} diff --git a/diffsynth/tokenizer_configs/flux/tokenizer_2/spiece.model b/diffsynth/tokenizer_configs/flux/tokenizer_2/spiece.model deleted file mode 100644 index 4e28ff6ebdf584f5372d9de68867399142435d9a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 791656 zcmYhEd7zY2AIGh!Ys>Nr2`!XY`!31bZ7;2Ei&DJL%sltbbmz|8=9#&-TcOCllQmhg zhJ=U+QCYKlkz|QimXIV;ct5}A_sqHGuk-yrXZfAq*?(u5k^@TiKDpXUoFcpNJABWQ z{Y&;bxntmF{|hbhVR#SENE$ zAg8`lw~{Cu5GJ4WrCa9g@Pj8ak32tgoh0r1T~YMnWfIb=2Do3z-Wnn+*}5x>C|fS& zBd=XqazM#Gd@E0-({gn=7%6<&deikqf2oXA?YcSS^D=keg`umx9Mj@%=Od7J4_Z`2 z3152b`Cw6m@@!6SxbC5qVMb@)EbA{?97d3mLmDk9CZ11vdAYj!v0|XQY(dVGrJ=`! z{4^2Qz9oAh%r$bj(+THeS zF$FK@OW7Mh(@ZKamo|QGWl5uwJ^YMx9EQ_A#C%yl4`eDIi^}^x{Cx4F7s!wgUMPx# z&^h_K_v$dQw7gV<%cMSDKK^!1F%bGV;JKHJfs$U0FNIfMDSkx{_s@8(__aKjt&&qa zz3#qdWNTNDNmEPulY0QI1oO#MAYF%mH5WO#<6;*s?a4bgfhgK4d35)+p(`hs?1zgA zn95Yh)WdLFym2ohbK>Tyh5S78jbaM5T1N(41CEqaCtX^+8AkSRx$*gTixFh9d0BGM zd!ZocdEtjRN%~*1>dg0xVQYQ4>pBp8ooxFVmnB}H%C>&`!y-(Q#~aE%DnbtdHqCP| zk@Ds9d)F7g#tM0PVXqBEsO9nWQNROB8d`fveLaS3iWS3&Oj7#HfMjePtiP751k<>Zl{7GtKJ*2&oWKP$$S&G<4d z`FT++FP25@+lFo|hOVlUKSqLxwvd)Pb2u#nwaHXGDO;O<8Gc)Jhji$Q(~=yTk)ho| z$k$O$J;}UKTU$;tvRr0f;5?^&>9hc3dDG-u9raaFTrZ9IqX&VDSr4T5UjK6m<>iP@ z4i&29gY!YA(3W!lMS!R}rmB3o>M<}GS$)3r++faBxoqC-e2vvgyI0Gvi_uT;W%uJY zh54wKL)zi8{3Wt7cLYeg%h}h-jEixbd>d!qDxcg2BpKTMCy(Pq3rX=?eNzk_q-4v- zAiR+G|IPch7&z?*^8B9P72&FfGqR(xgDTrO2?uMFSy|Wn`(g&usY+jZZUmEDOd9OD zx%j0vNhfmC31GhTmajXyFEO7ltIh&jk!GnJIe9Jvp|aIU<2x->OL=dfAHwhzGWq~q zriOSywhjfEisRag4g0|1TJ1*9UJo|aWc*sWXBSXalasZ5e=LR!vJ_6)D`4|k;pb&n z*-ypKwLTHQ^t8iM3qf8kd=c!L5+45jiX&>Xg>*u8z5_v)%jBK!Tv&2gC0=$*F)1%o zC&TXtBGkV+NX2=UA#jn{KisIP-Y^h9S5Ng_JTM;I+wNf)4 zXkjy1c`N~HT+)cXrP|L{Q?r5@FyxD5R?Wc~*472G^LC?GEs}Xl&B=iC;0rh{vZ3X& z|BE1WS1IFL{aWO!QF12`QBoDVOu`vy08z(Z4z!HbW^ov)xJ=NvFZ0)d zEWTH6PDziSfhKp-Pgl$IMt>G@%%@Zr><`vP*iH^>>X#o$AH*{u`E;tqUYN+$y?aQUhg zIr(jt`|K6U<@a-dCSX^YIU9gln8qeA#z`AiZC%rFw-=%)VqUybRyT7vC@?I4*Ai^S z8zjTtuyrRG6fj-x?SsoQ1FHs)3r;180Jf zOQ-jnJac#G&_Tw{bpDds!OxoqCM)!%=iiFc67XqHyS?Om`ZYAD;V-+-(+a*cUF%?i zx_82R&UapW*XflYT6IiLP~Nz!?sTzok@1Gc6X^W&a&NF&aJl9B^xeW?r!kSZ^q zqD75=0GeVhqzfteafyqjJcS;ozYeCf`pby#aYc298Ptf~_jn885OcOEH8U!J=MY-z5!JtcQN3Ou-EU-BL+1b7Gn+4m(l(_t=Ek<3fU%Px+vai!jI z9%4jVA$R-=VWRbup1--T`D7OTzWh6c)%BQP4wVK{c_zqWA0gje;sW{}ZKLl) zU`t^d#XRvDpk=)}?a@gtei;I3;;gKmTx)Kv!$0r4S5bgKr`aP82V20gp^kp;1ffNc zlVl&9mPQ#Xkky`p*;>}2)j-P%rJk1?r-L>Bq|)wO+!iZiHdCfQ=RlB`D*bxc`So9u zDF(B)%OEVFe$3L}T@Ez8dkiWl`jEFRIHf&YPJRb~gbazNZ@?MJkOmLjWACCm$7bb= zy+EeG?s8uPfO$<)Ipl~V98PDc3hkSLDWd_h&oM680NJ3xWT9E6nPeHV#pxceJ=Skbga9c(^?InL)=5RWjt(5CucR0qpTV{P~VbgS3PpI3%CbSI8 zzO-+78uhlXaYxzk@;xN^wPNp{nqrDOGY6};zn&IyE z*3M(X%X{)^Td?V3thDarKBk$L$^+fOmT$UiW-c@PK$uLOJITO)_1-=DogD)m4m`$z zrKh^!+9g!T_hrsQd5-b%x=X>P<^;3#11@v^gS@W!hhGJOnmzj4_SczP$Enlr#a--s z^R>FTnYkQ_43C!6SKy*LVnTUo9ZoA-uf|KI! zpQ1tr%l5{ALreCja`@A${anh=P=Dqg4Qpzt$ZEcl9U-hVf)uiT-4Bc~Q)R#qT$%$q ztJ+bxEw=&dp7$>@PhF(PJY3NjUdPx{B|~n4F~L(+)!AHLyNWG$xbWqyuo^!Fw6Z8q z$;FCDNc)gmp22P6Owgs-dGCO&Z;qM(^`KG1VjgJF%ndFqfhYz-q4(_vHldiyYma|l zGZ>S9BN3?e?*bXlFt)}$Cr8mFMp!yp--cAoP-qsgz+z)_cyW}X>H&2$*c z&H!JoodvSA;*8FnFK{r)X!zB&K&!Nau#W&NDs7wFW(rj?lyL6`7z7?Gy}onb5`~;b(ClZ&lzUYvdY(nUIVPRT=jk|)-w9Lf zD71zrrBRWnr55L8YZC~|m{!0QM*%I@r5U-kwV{}&3aL>tn*2Ym*Lew zDwa?72IrjDtk#*cXM%NXDoElY7mB%%G`$#T^025+P@kI2b72!%%pZr|0Jic_gQ{AC z7AR%PU=2L_W*0@m4^kBwS-HS{ukmE#5|C*!!EEonbwJbBD0z631LLIm9{?SnswLOB zq!`9$7`v<$(26UajbjSfy#s{lBI8fbOOIX-Glj%rTI#~5F*dH705pM5^QFTi_c5QX zmggorIEv}#{PTd8R#2t(#9c~QTIB`sAD4rz)~cB#tjp4IJ|x;NPyzq<0B*~04P(c` zH9+d>K>15u7E?uvZhPN%z?M{1IPXfl2VuJQFp{e6=h9E1OkF*s?kj-BT#6(Qf9<@} zlH)p+Yw-;fi4B+czsF^fmxptKb^GjFOr1_XA?Xrul+l!Smz$1p9ww;ie6JQ@;yP6} zw8o_+QzcV+;*2T`o>(iCoaBO4t2JQYNU#Zs0ZOi(2n;pj%eFLb%gji5tJ;BK(j)+o zLMkqs&c;cyr^&o4aS>FT$@O=c2TJIVdvRIeOMxa)bs?4}hSAGlOwX0RUo9`K z0b8;|B>pnMvYn;E=j4Qs9TDVpLe}te2xReR$gRKPw4^r9zDY*^4b;5kQgY#5`xWa{ zmrl~Q#Gz3cDL()drJ++58Q2}d3U*q-qZ6tVzmKETJfl6=r(;|sBeT+>3}m?&tD{3n zg~Js(YOk6HHrbezOOqRbmS#SytFkk0b3{gG1#-tS7Xd?YrF``a*jhzdURJDfU(0=; zCiWWGqOYdSU;jQZ>IVE+IC}2(F%$(gQBL^Gg(c-c4*T4Breap)7)Xx((os6w((Eq! z4a$n;R5^GDAR7Df1v(-OPxtJEvyzSL+72V%pY;sJI(o$!yX{}pQcP!YYjy`yM5^Mp zd*hBeZD!B=f-D8KD(60Sl=%B#Roofs>k$TGBhq;eBg_R51fh{`FTOC0rT$LB* zXL&dGy`L|q^#Dc5<+6!FoDBV`oE2%Dp+nB@<>Cn9$!s<)eNKiktp}80^PxcGVOrwJ ziK79Q{s`*N1dx??HItjX9C(Hcij7|GI}>Q>$5|{Sm@muz9$chQY;Q;~cn*w7gju3i zV%LC80Vm2GHvlXNCad`VUI~++^NBL(HupL01~zL7wydiGGLXxZ(7YvO zd<&dW$+Bq8W~dEUwzSW*%Z}}FMk>uxE=#+BO=(lv)EWaa5i*GIahZb^3{=&pJHG?6 z^6(iT?f>ZIUZ0HH^p~VE(mmf7gR$HP)qXsM6m8CiLdXs+W#g3969N|@XboYPHcOlBvjyL{RhpRm*LC1w) z(@s9izJU&6M=XLv1pWTt1aW3y{lVSMd9RErANw z>D97OiDHaYp}N6ft!%7ehTre?cEexTZA5`UFZIV(BfHtRLjZjs{X!rS49@6qfgUx%W=8|im5bVXpvfHgVEsM1G zS04gdax~348SxxAs;*v*_V)jJ9>y|Von@{mQ(tpIn9`-?w+&!RF05$?J@Zo+3${C5 zynMU8K9F`1mmb)-SV_jqI|l=->gDLJyc~ryrWu@x*}04ScofLAP>a3bn-;*ZY2;*5 zOWY=(P3dZSIl>Dq9iI+@peVY@E_GS%ym&b8y0FZ7PBYl1<*$08I;X~rnkmkQ+T}9R z(t|LS1-c0Py%KDaj?2idD(Ah*S3~Cwvmi+Qc&VF<%Q6v5Rmj@Q9UM%1oO3V|%DRk{5Wr{4rYU9nzq6Z+%ESk_fWUE7~(~yUtEkd=GO0&fdGb$w{ z`LM$b$qD)45paFA$uX)OyUbBkzHl<~%(E^OmOggqR)Z~5Q?oI7<~{d0>CwKw+vGmi zGO1krz55&vjs1UepS?sv`fmYR)@rhO8h!s?od2}WydP0)9&+I{qV0A^l!hkwtP&>A za8^MZesi##^gr0=ps?a-Kh?YuZu6PNVz;Xq(6UeCEZ0G4QGvP*2T_+eSLMNHm1D7Qgs8SgLHukoljMp-CIBrV5 zs!EogiPP*O<1#29#{{lUUiV;45>85x@I&PgCTRbxyq9vHwe1BmYbr#s+UJ-<$@WT^ zsLiIeHq=SUG$^|5b{l6ekdhq0>P{@n$6sfHO;-aja@Cvk5oaTE73Y&Ul`1=p*rN;V(qh!}0Q zR-S1ELH!sdCme^%M$ld|s*CfNrfbBA(FcO8>Wco`Q`|>22xX;a7=%ge=i~C;M4(lS z7ax>b&wZT1@Yc%*nkFi}Oh)=7!KUS^RHl$eStnOI%Af9q#d3YE3q`#S?~ceeo$09u7|T^nEf%Euaetugd*$1sM|RNIPOX#H3!tKY zOvkbd?r=C?$4>f%iqVJG!{zFB<01A&nuo;ha{)p-+v86<9M|TQT?MwHi6?#b055pi z5sA?Lc;;&m6bPH-EQZ$tEQ{sTg4XW=E&l^E()&Y@3ClTyoLsTqLXvQW^x6clgaUd; zU%vVVLOTqiJ@;GOrYdsDViN;c$K4Jo2B`7UY~VHA9fHa_BPB2Hh11gYVzR0c$Qm5x zYu%EWy&sH8!fBX5Zd7cEXTr+;-r+8S0hpSO0a>Xq!_4Kek6qu~g`noqR1R+BPQ0zl2>F2^Qz-DAbe=xe$UO|2x0x%)*O)dtG zbm2q0N+6}99HY|@VK;;&}T^!ywkg~Awm$XuMJkgWCs&s<`mDwsF) zxg57eTDfXjMyC>^uYt0Nv z$kaKDaa*-0t&;1X0GT}4@G`PxCD>%psiDpu?s?q;>vS~tDQ>HSu>xyPt+Xe74rgV` z7FyaN@P53v5)XY*kL_Y5{aXHLE*rQ*b^EH`zO>k$t)I9?U~D3DUp zDe5iH*9Y|zDNjbe0Ab_n8S>cM04owQ$<&V#;kS?Ata8SDuaHk+K$gQlbJ3_3nDKuj zgh`MfJEOk=T4kuvGiRBwGW7h;dCD*zY}(=?@#!+t(qyY6^4UDf?eqtPl|x4BC)5_D zOt}ooSSEIYoya=y2QqV)3s&RhQaUnUx|u-m5iRx$;drNLs$nhG%c?-2br|xx_gJkMOwHJoWMZ*SuG(fg;=3Z za>oKq`q2JSPKWzh8#ogy6$JDLGO;sElxMXM9^cJjCMO9w<3zB@6<~L)lZuldELxTl zsqkb-w^A1Z*>q=U)qDLR7JBl$J~9L0I)I&!!n!w43o4Ko)xlKSx*k!2^zA&Y)}Ekg=FJ z6wC1a*W)hgShz`d_%aBSGUsy;BggnV@kuz^+M2skGV~eTR%~p4bB^{|FdgYx;y-7; zwB;{-0k_qmgeUi`a%fzcoc%J$a9c}L) zD`mN$4Cv`D`4Ga2y%N({&yU@AU4#a5)F%)oG{)ONhJEI6mrfW=f7|2&gk7oZyBWgd zADEMaeg=_$J+(AuwD)NA@fXh@7@n*)Yb~Qur|~+b`*<<6yt|{a*ze!8ps<5G1h0@w4~I}m8BpR! zx{slqS=t^0VOn5>&dFD;z$VLR8x{Z6!Gfz%yImI-JRNSl4phRtlb9Oir<`cswPyX+ z2e;Lmk>MFD`h~WqxF~{r9kuPez7WMW&+6v#{{QgcaVlfxP)BfR9(8Y-0$~M#iLxZ_ z!k6ptoSo`$jLCy+QfxxljwhA(%1Y;_R=dNQS+K=8z>|t;AX7P0xNxPuqu>HC__1wP z1GXZaK?mRF9H8l(=7ET-l|bjTb-C<3+$KOgntjUhi(P3l%%T7vJ>G^CA zBJ0`r!bFWi4}jdi*kQH@6Vmlbhx_X)YTLh0}S%F zA`2d4V3O;Ow&3Anap5uMm6=~zN;)?OS!7ub8is@VS*_tv;feD43An7(7?|mm6D(e3 zaR*pRNv(Kabi!@w(k;_Kx}OALn#fk$)PIpurt}f4sqzCI9I5*&gMp^ZoFASoxoId& zSPOFU@o3x@f3l#bKCU<&!USfV*9lTWE|~;p^|4=_{Bjm3Djhv`6o{9AAwxYS=eut@ zo}$`DX&BR0mCt0emtu>ndnajBRsr8=aSYcp~fe z+aXNcDo?i6qfuO>dVp`FJW1LtalShDlH;EQ*&HIJ3yN@#GUq8s%L+%iY@zb-vv5UC zhNk{Zb8vqZC~7M@wUf_QyGWvK7nHxod5Wu*OO9U)LGkKEo-J?UwiLW{mF^yV z`xXpUJ*E0hybrM4b?zoR*MaCl83@X zQs`QGLlbZ`Rij;H!abmxBOqP%RSQ`l+SvMHMav6jt{}_FSLPgc3C-zd1b+*~? z(3)+beRC?bDJw=fE*}ZBte2}HtmPOm^)H+fFCFjv4nUmiCplQdTJw`LfGAZ@lk}gA z+r-n|I$z>bAWXdKY;}R%cxmf7KV?{W*$7KOATOf;qsr9s`p@A;aL)>eu-B(4bW8yv zm5~6_oH-SSmP=7F?wbZSAsG=#^3yzoDQp;Xw1WGb zs?vSot+f!SKk3QX>FyhS19h%*T|I&EJtHy`0%;jnCdsq20jA^xN}fFjXo3vV`>_{Z z0ygC&d=@&KjaYjnl*y(?X*dCYHAJzcU^dmM=Iz(PX_nbaIdq=;&glh?|1eS6?`AlQ zw~A_|$I^P=;=F0^6Pjh#*K^!6V2r@uiUs&PtfVpGb8>@`p*WRYey@v@1syZ6g#3Iz zl*wA{>E!PhC9H5%QVL|sBABRJsR{V{#b8rN7Sq6mkGRj_ykz83a8z4zsTv&|#w~{` zHaP^+3F(5T;mCiC+O%XveH0!k>{?lm^9-{;zW}E2xcu?QYMhpM8AIsiSAZrh+1Ha? z1J^o6+cxrl^aczm>4an5ySS|*tIn!nQT4UuJxJ3*j1eRuZ+zg0@|4f6+Y{?tFlGu~ zJ~TBTVPDjG>~wQ_o(eqrsvS)ss-FnIrFB68&wJ4{SIYH zu%j{BWKaGZ5>dG0Il93SMIHA?$3a7&dH3nfY@WB~07#Uo8;aW-?8brJixK_#_BZP9d<(jA^~0tozaWU&AyRkMWaTNWE22 zC*LVyc~1K*sM_^$!PG-49(I%4l{H^geljP+P64AeJ=K0=Kip9}@pEyVXYV(_c`x@E zNnRZYHhGgwvg>5nU@Ld&W<6Srl6u|@0MhO=Ny4WZtsKR+u)0KUsZ9_}cAY~sqrF&6=2u^xqZFYY3U zhlln8IiSLMVw09)l+G!~=$$bd!T6~z7_%q3q(GikDoQI$ZVQ!Q%gIRgk&@Ci4H1rF z97dm81+c8J|EF2mw;IM2L1{_rz_1oUb)Ydv&2W0%Lf;NOnNiE z$+Z{5nDlBcn|_t^kn_V8?YJ9YBK2VLk_|UHA1oB}G>W6NO&>E~WMQtFY)p?Ns-_h1-N;Skij~-@FTBX|g&`%h(Sc4zEJAUgtto zWn((>Uh^r86}21FzfjKN#j>C$65jvZ#gj^+jsw1}_mRx%+~X%@kc&xxocx`+)fAuj z(fJ&l&FaQzgP&k*;^}8^jQ!Vcd5Py@>m>fB1N{XgnT^}&f=u=QOnYl zX)s8DF|0v=(=w>@%fs_PYMsa4^d-}AlQM@{ax!fuz!DmwM$8w^HII5GqU&tjRuxA0 z^5+H4C)e2%^3-MENdFkWDrM_c&O58f!>$II0CCQ+ym-Cytyh=>x$<@h%aQIzbBZM= zP49y-$H**gV?GA5S~q)l9!=V}acD3>^6g*IS)m0GKio^neU_7b{~WMq7(l z(%*D`5eh9ZIguk*<2LzfSj1fWGSKpxNXb@37Pc~#XMvKDwXZlT&+9LtJkh-SP?8 zB+_j(ov_?qPkB||3~grLC`AT!KDOJJxKU3BJtubacL2++E-#Xs?HixkN#Cu2sJiQE z1~msX{RPglrl%ui-8P3O=(N4;Plvf{%3X=Qk96@fAzIP^XuCn)5;j~D$mX}}Z8Qr-c)!XZtiZt`+v}|qULg~~8NoMWuBF1t{+HWgC z$zngE7#|Rjm66_Z)mds@4mk`GRi*Nb(@T#6(1OdGdkn4h(^D5 z$8D8B8VrUo)dbNriW%WhNQ-tf#XQV;@o7|ce)Wvukffz=Uc4{@w-vND5mIkA8iv$c zc92WQ25q6M&Xhw@xx;63D9M&~OcpJ$L*Wj*Co^328hT&U90# zrY}F?Kj5epPN(gpwXK}5q6Gn1c^E1~ zFKJeT%VebnC*-*4V3W0T2l-~U`^r98MoQ;^BWZQ$2;`HwFo<$e$Br`S5_9W_bKljt zO{{W{i*vf^^}#jvJ}#GfsS1v zX-S6NhR*hyJcvir&(JD4cXlzaza3hVDlpUn3vVn6%6#dP(*eFbdOilOMLmZ3@P+63%`6Hc(cCBXaUnM^L2h@ao51T_7yBM3!0J zzq&gQs}|`XcW^HV(`PI@T}t}^EvvL-8lYVDZ|8wiDRN|AAoZQ26IieN15BRiypr59 z0Lmmth2#7B;SeSP9biIUALGL4ri9k3UQ--}%{N@NU#^tta}f5y@WNr+GhIw0bWdrO z2iC!wQ#GVB>@fprVcq+blgjmEqdUT|^l@ zRFszaSHPIadhaffo!2@qvOQvuEl(WM^vm@bv2nLKF9A)i-JL*c zkRCc4ewXuZ_Hf41h02+1MZ0+*ZSRIONvc*ozn1-a;PqA(Uwu3u3`c$II72 zl1>fgNglmaT#CHp8QzSZFpk#*UW`8$c#ycrgk?@K-P< z1?NS>TbtFJp-@A3SUvHR3&HfTfIgRPx6j0Qg7nz|usjW*w;rv~l%yvncxvb`h^TmU z+1Fv8qr$ku7ePMX7ht}5*o0Z;^u`coK4H)|yB;-u|AU~(BPW_gXeJDyJ$w~iep_zhjr~HFLeUjX{*UGiZPd}mL7^t$=O^5_eW+})9RRXk&q6`yEf$l^0=ZeD;1lp{i$o7 zl%$=9(Q5U0rpkFxedMLtG)Kgk2PC+31z~kM>`hsk>Uj2G0UB{D7z-BF;x2ZRzPzK* zq}K_O%&l{{mNqFRGyaJy<+GV!qDpeHf7Wc=)+vqTP{xHI6Ms$Cbi2pO6(72f!}M9c^Vfr|lIfc-DNGew8GlyVV&!nVR=Yz#fr&~f zyk*ky(|-_ITH_C2Kv>o}ES!+DHiAu8rL>6WH`V*+G)h_$-#CIopo(0pge5wh%^Bt} zZ~o{Q<|}&Jc(0#eEYk6m)1N=PIP?%c3jG~|szo!U@t>_IDte6Da;{=aK{w-wZ}J~5 zg2CY(?je7PShX@J|(9-$kr zdzaWZeWCQ){anC`f^K`h1Swhe27R8ahWaj6ZV-%-lKutm=Ck(Tvd zXb@WSUOCQDEMYLL`?BbGD6*=DK%P`Lf`;bP4gUg|mN*bB51!zH+FhjS@Maw zo+nlFg|75lOoBxv;X2?Q4`5=lZVpdj9a0Wuqqm;R3Wt^blW-O{Q+a)1p?exe>r)Vy zv#N2MOnx1&D^3HN5>-ic^1?ah7n`;Gp>9$Sx6M1v0a&dgFQ1+VvV`gG^5H%7yU&NS zxbj(j&@Vj~!UU#=V3`Fd_5>7xp-Aa0C`5y826$un5D400MdVMi?vF$iTozGHwAR z=|=AYNl!~PtL~^%^kky&lA;;7

l4{)H$osCy`k*$ zC2o_heS1lN1FEmm)mX^Bg|X}wXNbg5p)3-5mD2Y+7eS9sa?)lOK|s4_Ovr6<%m91y z?G0tDE~Pp5pjNiZ-=QpH`Y2;uxkq=vndz|poTRRf%6Gc(Ilm%2o0I+%iX7=Xn*08R z+p^zJw_4i&4Mx_}uo3ThOtF;NO_#s-0-9n5Yab=6_lB^-@wrx-t@BB)%|4K(6T3kn zKQwgV)O5?dqXa^$7xn%9{c&6FB725p4uvv_)E+0h>Tyihh<}Oup3q0gUTFrl6iW*g zdMx72V_-}$c2%_3V*)j#Ii%IuQl6eZv89WpRtJh(TDvge<8$E?2#t=1L{c_><%AP( zM?EExhF3UOw}rDfS&2xlJjGg}VGU9xqq~AFw>I+RWuj6R?MQaB!)~Z^ zH#p0Ym9IS6(?x^@qYDq=f-T{p^f!0+1r{gAbmLf?f+UNjX8l}5;fXF^yg@Kp^tlQ- zSKTHZ-9TPm9_etcK0vhRD6nM*n>ORmJ!KFkD;Mi+aeLZ0IBOAII!NwxkY%ve=QIeD zg(;I=pzz+hawmFlmcb$R=F(tgsCF&eNryO2OO{C}>x;&|BlIC8p3A6!uvBU|C4kvi zx+EQ?%LA>+4bv`|KEobJHzh2;F} z>hM#X(b?fOr1^RlEF%;9K3I!zr>x|<^B|G2 zxP-2}1Z?_9a9%t-?y}`FILkdVknlB|4VODgM?Z~z_Ek_OE9N*ZgNO5+^~#z`I65hF zuLs)j-by~a89;e=?j%p&g40wrmB$=p_5!fwiY*t`42Rs|2yTf{F*`1FA%+#0>vX;g zZ0e+oQ&a2S_rOrivERw$2VHRb0jhD&#SoUy>LjJEi{VL2;Y9&xgy7_YE zYAB1J2|CKSLJ87-@nN~~AK&PIDLweAvuGUf2MG^{KCP-H5Ab2Ps}C;u^?TE{aLq zQIi?}10#DyK?eNjKI!>=nXlMH%Vbls>nC8;>h!U29xd4dqxxs&xK-UITCJW9UA7e* z={S0@n&sF-+h9#~Hog4pHwcpF0#LKRa1tBmcchiNttAf3%hP|GCtjM|u}AY_wTOi(pO7omX{9xRgGstOpWn+-RQu4Px_ECWlcG{z-jOF7LRd~q*}$J$;(`qG zG6mh*Yc+N{SFweVIQV|E3XY(;Wp)$(QDB4EKtUx4Bslz%9K69acUxEfZ;!m+QXSj z7!J$z$)yK7Kv~%g@bx%wSkv$8>^%C@Q7%p5C#5XaQTDp>+^#Ulqz_?VdLnMLmSXPD ztmv#>KvRjngcZogeIQI{`V5Jj-p~A#;RN?crw#;K1`NJBCA9 zr1|h7*XA-u)Q0DPmyCs=$0}qvv$tgu0P*t71x}3Nw&ZC(X&Ir|y4Ucf)F4S$-5?HW zS`42{*2&nxKAfe=PCSqHrjig=8&Azjoua74($7t*0&2CT_Z&D4wWtmt5t_d)?RJS@~EI8PUIM zc=sXPk*XQ@w<#nc?$c1;1}t_T%UJ_GrqD*NS@O)I?pr@UCEnveD~D+4lfp766MCd4 zv!4XfZBCPaJ%fw9^H{>xl{l@L^yk%DiYtS~=f)RcqiVzf6i%z5&*zmS6gL-_yol4% z(wAhlS(dJbvTSI#`NJCLgBv?)z?5TNfwD-MwbG;Ly^o2nLz1Ia3t5LRSqrdA8H9b- zfp0-XTI9a6T=6y-z3D{ePjy>?>%n}K6?@0|9__I_JMUw#WxszxhHfxKGo{CPcO!^& z^&q<3{T0BX(sRMo*Sj{s6cc9M+W#AeN2}qq1#pO*h&JfOoFye$x>XMN9CE#R?j^|9Hj4)kHUKOjx{ zV}}fsuN9i~3|m-o|8!xPBWCC{$!hP7&kAsOSJ*y2}UVYXEDyl@(ioEP|JVdd9@hGK2 zi<8kvgD7SPQ!5==2N=^@DO2$9ovLFy!dZ+JDQfA=PGGBd zT|3A_T|guszQ31~kGcXQrRl*DI_-VCnGePcE-A<`g%*6^bY3q3S%L%DqY9U0TTgOa zWVhq$dS5upHbyAy-#7PnM8M>j+3Ro!6O$)QV{+68uw|}{*(ejU-<2`h`turyPZ2ab z4a$UB^|1693!>G8r=4$C5M^7BdXJn0HVNoOsg;AzfS|50`pTK=w#2y44<55|3^*y;?g*RYMlq;$n%I218n=bLLHz+#6t^Awmg z1NAQM!X++rwe2x5bAAlY3WH%mje4yghhdmJL4JG+m+7T=OF1E9mqSL?5R0nLrPe6c=PsD|@)UB(btb~%7MVWuS zWib=X+zIjSUXJJ=J|_8bi6dyJXl=#5 zj?k5~FR$zmVXBGikn79D17S$R&IK%KY>1JhIxD9h46-bO(JH~)`x z0ZMt3@K_KE(}UA8OaWQe;}-y`z-L%3p^B()uKaInj^dU9qLiUk1o0eSlWk zI7&ld^B~sDf5U~X+?T7>ZP8KjxWLjMY$Egzuccf+z&tY+$khX#=LvcP^`=2!%YpX1 zbbogYb-}r3fnmJOa7To8cgc@-VfY&>UZ#{M#yBr}Tt6eLl|bd8@wH(Yzy#D66{UKd z^IaAmBK>YWL^MN<-epY4-xFOZOl!XEGYL#h3s=I@=?svmHT-7)y4MDip{zxPht8RX zs5xlxS&r6&nQSPBmZ-`Ki;k)~Bqaw{gQ8-~spWjdGzerao??5y7HowOzAdgj_3)W+ zCWG2lb<|3q?Y!xNJyBp7SgM?P<6vocGw0=cZ{dVkpIq2u4&EqI-QT=V-4BU6>JQX8e0%W7|OJ_y4MOlI5+6E;`YzjGCscMS7g<>cF(_bU6JnUXu4 zk5WA@()Uj1gF{!kVji&2`QY3+|C@3b*gCKQDVcLW2o1dboSglT^H`x%imMf)I}J2r zv3Z+XkRzAiw1zZ}HCNkZK+9v^j*354&*-5GH2=|4_%Y>h%9O%MvmAYGxeJKNQY%&b zX&B2bo6dpsc@Av#2OZPBf24$^#n`R)%FcfU#>zB^vr@p*$4VhK!?@i0s*3?h(0Avx z5SIBaCraI$=2Z`H-TW5rVods<9y}HOHk6GC3>$e*U%|;szjvHRJqaJ@USae9T0HvC zOt*c6+l2PQA+=%ygjI4@Pb_)3EwDp56Jx9=TRsI@w%D{%F)#QWCaT@4LW*&>FC5h` z`ajG$N}0a6y_S?^8^KmGTuzq%eFcov$8A!Mkbe(h`LX9lLOc4D&9Igh^Ib0Q(U&M? z5mhl_&8+vq5ECwpFICD!jwi#nY8UJ)wghk0DLjS&D@_~*CFa7wEY3vxDveH{uWe34nv4juZ>Or?aCMauhh5Hl< zxmOu9T$7R;4zB;G>;6DqI|Rmbu73!k-R7Z(LRp5nj}k~mTp%{w^uY7f!(pr%j_xmO z6q$cEqS(MX(tYihlD(U`uPnlNlA{IK5{xnC=`4B2KPZpgeNq^HEWT)ry2`#t-Ik*a z6JYws*N$^>GQHv-qS}D9N7bol#lPIgA-WR%L$M_o&F$p(j*imXi#l!Jr<0>-HhL3J z#&mWOF{iGQL%V^km^pZ>Cxa{GsP1qk8*`8h6S;*aTBuYIZg|S`iqSsXqgV9CX_C;w z>A7-p=*i}rfv-~ko_(ca8#YU$>7;6CeP3uxW|Td-`BFdUQM(xa2P?Gh{?s3u)Vg(- zAp>w)u?*J%;!DMm9_Vl@khSYzC~L30c`nsMfGE8N_vB%?t%yfpz*J;$vBFHq$Pr+x zdeP)jrTku5^A(=8Su)CD7Hk|N9S7DzWlj72c-)pF9j+1PjMJTOY*Kl7X`=JNF(Xy> z5of@dOcPS_^;sYjeuNj7{o~HNod}Vs3CHj}r$+En1&pa+aP%zW<4Gu!*$iQdi?OEV zu#}6V%>Dv$+EkcmOmdgQR!xJn7EQgNM&<$tvZ~KVzOHVIvWmNQ3-iFJT-0R#Z9Uda zS2@0x?!?}_6$X6f4Pz-%EdDbHOD4yy*6l9%JQ7{k904&=>iU@ZFs zXiOMXm7p3{rsSh*and%!P1^J40W8`W#$|b;9`k~FAPcXzh-j!x{}HRlO%RqA8!)=+ z|93s3n=VZ0ZlCWW)pz&grv+fuJ#Dz(Z5AQz%VYJNkXg4wn0ECoeXSM$zT5fXcslns zb}3=Hz}g}C``vdA@?&H;;34-t=2ggUi-AR5@pljhC^l`BVu#|Y`oqox2lXnY@e;6Q zDx5CoWWrJyME2PP?YIoLbu*am=_t@k?bPp+{L_6*NTMV178 zqHIuTN$8n`&>&j+vg1xG{!7o(%2`6ydb+9NH5U!DWJ$m7;5dpr_7<>M3G^VgeDI!Q zLR%aAMeoB<&BC{dmPTNP?)Pr~fG?KCEvc% zjJ&Ok6)HAUn&D<8ta-0m#1&<0-px0{nh@9l_=n0b!B+H4H^TXC^RJ;uNB^X?^(New z$+2ucy}H@?W_`>3JeCVTI3Ib==X_TWdAC#^{E@Gg+(dsq`;_uin#r#KMXlq50nSk#R|usv8sG7XzQT7k3HCh(%VT{xlx>-Dqc9(QFd~ZdMZ5J8%X=Zpsdb~_2vG$bz6>E#fGb>4lpFnD~%00xsUqi8|15*GS=Q~I;-1^^E*SD z{PfK9dsW?BxI%UKRCvSgFebuS{R`u(dV-@W$e^Pg;oOtzDgC=>=Dlw!MMYE1%kRAb zmZ#dRoOlYzY8m~vWcoVDMP2^84QRR%$S=xJTJ1Z_{82bfB{7U(i^td(E(ZUuPAZ!o z$bN9Fx%Jjh={Vdb50^Yrx=fm;6kfS(A`ef*ZK>(iMH=(_lVOU*h&;WffK6>`-(V_m zCWOUk{|X&Q?OBcr-`v61r%I7XB}#1U;U)uPN5~oKwAj3Kj0a$b`Vgj^ER^a+oMoGb+rfCm6Z8DE@ej3&A?OLMGL^I5}Lc&G*ddFz9pO0O>sgr{yNR z-ma~z!Pzc?1c$=qa>Z6)RD8}U*U9X2oR{G#?$M7t7aZx`%i-ZXB`l1dQqw~LO=d$` z-dMt`nPJWxn5cpB?{CW$S|mJlPSuW|?}!QEDr<)l(Tq2t^B&z#?t395`s09{w7A&Z zdTaI?bz5}U%4i2IFR_o9=H%H+ahrThRKmTnotHa`9SXKCiqD$IF~IIn<%5BKi%znhoegA zfpy!V&7CgP4BiHjjf$h1VjB}3?{YpEfs^w0-C*>RUA0{PbsugMRR3EikO!48!7!k6 zzl)Kr+<8R<0K9zdtT zGjJ*yMLX|h+_p9R&nui<*?f%;QFY^n_r%Jfw_O3(U)Ld1O79++YyC`xBfd zR2O=e<2L|{I@dwwu+Lo3QFwEu{tP6!bJ@%^&kyy{)-b z8M0H&bmHDGaa%)*{?`C&Oxgb+tw{TEhfFtK7JLI`;^;pWW~Ar05GHPnX-%u|fR>8B zwLl~LSsAj{tCQTl8K>pC+~Y8f{NOxej#umah96;~N`xl$AKXs+2?{A!pI;?!tJ^+v z43xW)z8t&-%Hrg3A^&x_6>I`pln4F_VFC{4Hsx-=0j&j}mXdD2gQDI}bEq?umz1*@ z<5^w;t=<7916*+YW2ZS;QLNvE6S3%VDkt9988_PgOBc zovu_=-NJe6RrSQGht**4v=XF&nOTn8C4#)>o0BeS~Ea#a`SD|{B4{k*O)S~RS%E?2jyG2Dmu z^<8>4gS$bS+&X?!OBsBRIK{&03$h3H#ckr)!={0Zsi#ITCV6zvla#W8*Q=({Wxacq zwa9h2^V#q5Wk4xpv3KF9K5thlV`aq4vh>Rr^@E^n!-?G^{oPki^3#A18w_CzqjT*g zj|>A_+U!c`OB!l2KKE4EXp|o+Lq>osQGNHCGHx~!#>$Vwz4FOuAjQ+QyUZJd)5Ku_ zA#*LuT;T8@9{Hz-agO2EgZ7Z`OmKlt(_T$3I~~H}@5l7|r-?3vp1)yVr{iQ8%Lf0k z%Q4?G9Uh%#A%D_;z@~U=RM@ottCW>GCL(QpT#NHuu$*e=uNc?{N(@x)@7)5~Jaz!9HQMs`fRY(i&5e1?X+_ok!0JFe<7F)VRddKFS}g;veY@c6`q2-R4I#cIK8Yq zeWAl6)%tSPMfE{=hDsIj%f(PpnNO#&>DjGWSNwz1H^LZ054#dBs?XT^uDA+lDb?y+ zzVEdT4_dV%C;zz)Y?&F$@p>MDy9vT%=fBKOz14l@H3#mE3|Ig``{>+Lo{pS7JIRf= z!&(-p7);19klqP}g7fV9zo2du>~T*;j$H^aiH3Qs&?*#1666Y8>%X(!15cXsA1HU> z!6K(dh1)kT-VbF$mQ_oKMJ@&oQqbm(dJuxNdEH5#RJSEpp5;v32Z}8=eWX%VyY68Z zKH;e?uIUmB&$&Zs{0MF`{PrVqa^&pXO?o{FYqcbNwUGybmpZ1Dhc%g`^?w2;N?HFA zW7rCZS*_;f^k?g_TI>#Z-r>Qz2e)2v)VH(S6}}j_W+hxSux2yNvck#1Z_1hqd2$cs zw|@Zw{qR3;REgJDf$X?WG}Ku)a`$R@OMp_-D<}!sZw;Ic4~&35lhW`4WbQR+YlcG^ zEjh|VDjzCo$qozuuwlqr2-AwTQ|4bi-+-Yc7@4HOo90e2U)rK>D}`hs2oDM#@sMH+^P)_4$MK>b5#~Dt8FK08z!m z*T+te0AnbZp}yoxvC|Cy7z#D-P%_F7hk;sb0$REp=}*f&itV$W82jIMKqA*3R9b!y zhzgrh@Z{_tAx!I|dCP@5``k}3CR(*8r)&YK8c&q#)M+I?Ahed#Kpv~-nBeNUJRb1e zia*mJLn-4Lk4pUtWvOr@MeA$t?Jlwyi_~0}jO_Rg&N31j0sh8CP#aeKxn7eS(>TS+6N|lJ(?WO zGQLon7Q)7~J4QOs)6=>Vumgm4G*%Yz`Sgt^-;Z4L#R5aItyF`m~mY;^0v zzXz0w;s}-7TOh16p#7@FR3|)_HGx%OPn2)f87Vi&b7M!=AK~JNg>RfpI2yugggu18 zQt`)Q;HW;S@Oj{o&0Qq?!Ke0@XSQ$*I@i_bwaRGrwez`NfknxTikTQ=Xx~l3%_t{5)onhR1IHLFYg;=e{NLa9-SZ!JRXh&^qQP^0&e8>^dIFPR$#@Y4L~0i`m8M!8qgQucp1K(^H9lY6ji~d z_E44|=AI|YpB*d!vznkFZ*+74VyOx`0UjdheOV<1wU1_ z_gli@M6OUFpZQ?xP_#P<#b-8K4r>LghkuVvx}>m{Q}mC{#jPdTLmJLDTHiDst8UYx9(d6st2Ij5_+7l&&T{~B zAgywQ|4OeThkr=J=v3fWbmF#;7U&(eq*5B&1 zk*xkEOdw|~ZGwAoY##a+;3DNrQnja(l>;t?iPWxF#_qWcjN0|y<-BWfTgw>D-uiVQ z)2JT2*XITwQOb>>NNMdPGY=Zsx*sR&Zop{?hBNv=j<^xV667ob(|X0`<21d%nwO?G z!amxbc)&LWG;7Ji>+o;UB^-~DUObLX8obH>k{Inx_Y2(nD; zIDsz2y8>Vx-vcrGB8S`96~3pi)2rn`ps2&Ky34@K1s1ylqUQ=|*9Uz;ie{v>%2oM? z9Dx-o-n!aq_t0jR=pU9#!jd1})T+?n@6y8df>Vyy=pQOfK|3Y(2-azj4)L1N{qbt~gpyHdZkE_n!}$wWXGLQ#bEWGJbNlOBPFEA2A}KAuNrH$~Ea z65t9QMXO3rb^amD*`Z#Kt)75%Rj|U+q(d*^ARnyE3SVA$i+up zGBb+L=HDc1?Kq|ckZ(_8{P+1hk>qNhVQcjl^F&z6^pAH+bd$_Aetwz%EfZY@V9oy0 zD|t3}uV!1c&8q;erO7=}DGKSxNb4|#Exx2~md-Q|d>v3@@1UrDlb}n$%Dg51cpKU& zVQM_~-JBYO6(!v>8c+9%t5tZe2HCu%Avv>7QXcE zH-64Pt6^`|9m9Xk@ogNkVVX@yoVw3>fb;tw`6Co#ZAl?XkkszNmb=y+w_%J z_GU?&_;+DuO8zc1%ipsE5aNFBuGn@-2PYI_x`NJu(xi4>jH3lw66K8Y$1at}$yQes zXy~$lmIF_k&;y!EfcAjw+Xa}<_5x=UgO-aK$(=ZBm846Xj*_4)u%-MU#Cuy zuNQ45iicv3&I9XfgTO@ci&7VL>*gOc7H|N1*?Of9WPH~h$E^?I7Tehp^&3N(Y2R?C zT&r#hW*%!%Y-k?YdNY5-ywB(`cnc_VTq!g8A4FRaHRSNqR{8hS5 zaSF2P=*KUv6+v0CmWpS$CF}~esjbu`pX>x^0kWP%`~2I^0M4ro_4nEXAv1v#@RPtJ z)`LJY33L1?yU}Zh0+9a5jCeo8J{gHZpoXY)`_L^1u;sZ5dS7(WMDhD}S*P%qeojOHFP&I*-)O71`uL z^k`w03F9F)8M4~hcMPCcz1b;`++RH-)PiYeHaTo8r0a-F54gGA3)&2g>A|iirz$4J zB2j%^t4~KaeNqlGwV(sgQB7qvV5ZLN^D|J4ev?5hE;czT+yWskFrMf90^m>kftiEB z{i5?gD9f)VyDD?fK|p4s66Kypu}vBQT@Kw>i7Q3Ze3F~R?@ND{$y8|EbpFg*O9%IF z<1hhE5-D)*DDBA98GvR4RqzI_P-Zm^DlOA{qpcmxvfLJHqyZg}(yTjoC7H3QQ z3~fZeJrcx8qvCCeJ&(#`pz`ctk22+G5ND=z#oMuhoY}NOBOO5<`Cos@Q>xe9z|&8F zcIGVP=upR=2%sK`7ntq;N;pe&HXh1p_Cp2ncQ+mOtWyX(11-ogrQ~??3xSeVfg$nU zQz4vsa)lGio(7vI~NJTHFSoYS*3TwNjB33?e3SKR_)-jZ<+9{w;rx)o6MgukHre-L)zJjUbs z?Kc0-02!Am>=b!7f(<}?eFunBl(jXEh*_k+d#68P4-$X5i?ErlmfMvd?}qjYL^oo! z6PLBv_v|!1ot9DxcKO)p0Pwzp+xAFbZu!{Lnyq1N#S=Q7%V4TyWxL@I{sUSmxiW+|93%ShKuqx^N^nCp6(?svyQZ{&!z_3`oBIf9-*7oMJa2P9Q41}qQ13*jUg zgE3@#58BzLWvl9IwfDiYs*u-%#&p+y=0{G3!3_&q5iS&SG)-^J#{gz6y;j;7X9}^9 zSe(t`!GxIk36L|VtxS)5MEfIb5t3Evr#WJm!O{5{h&#k6Mx53TKO(lr=fHhc(1qHv z9lLoTwOj7dy`(T@*5irswg~EG6yE&5BIvr|ZeVQrHKbefR91YVh*wq}OMOGooF-G4 zlfQ>{c9}*nE*50w$IygWsXSgn(u%YYEB!zm*jR84eR#MCr^)*bUGaH|u9S*y(og(v z-tgfXCzZ(Yu)4z{{)M>(mqqs<1mQYanMBo++x8_pS1c{V%zlxVjH7NPXtx}O`4(&nczHsY<4lvA zug%whc3!zwG4@;wFq5UaI)yth^6zkN%u&ud(8#S>7jO3yLCGggj)H>b3YSzeV7g&_ z5GR(FM=eCZ< zy8HdYW}q~)Zd8!3XJK9Ze}D5l#Wws=7CHC;aO0|-6dP>?Wv-^zp&Mw3AB9jz-5-s? zTSK_=%V{hB-6sFGz9TN)7Rq9+M9mQIZx4+`>U-iZ0}1}GKZX|iBkGZ-g{L;6sjqo3 z6j`NHq92Aqn3Hq>(;gcS2eB|}xP#UjQv^^(UGsZhVM~}JgycQ!xm|$FPg8+WX!G52 zn#;Z|vBF4br$^UE6dUaU;7-f{gF4+Wa;~u_uzDJb`iP0>?jU0Nm!P8e*r~Jo+eA z!)>DZmt!|rdT&bpudc4du2Z3@4SX`jXocMzE7xF`@IOl*oN_BWa-*8N<(pX5ftu*4T&Fn9IUK}s@eI+t*Dio;}8MPURU?oCVwb|5Bm>^rREW& zC2>tJR#(`S0`JM5X|JPzY@e1M0Ms6feRnZ_)gMlDOhL!XvC;7$mLK*GEInAVY$J>w zk(Qe)odA*53CpyrN@NW~#LFW4T8U;Ls^qcLz$}+?ZnnfXqTOXG%A)QZC`$<=Vh%@N zJ1>te8@xE^LTHL92fqqBrD6yBrQ$`xcf<_sZyjY~rrCzVaH z{^g)q#@r6Y*UXgw8XURe)c-2N<`L_}GEvSKS6_vw675e}s&R+lqpJa&XSzP$5xZOu zk_m$81~mb;#y8}jp?_x=P%glqmD^DB$QwZ{U5ry=(9Mv}7=7`Oc;c4)gCZIQW~SmL4b;<8UT{a#2XUc<&AM%)MOQWZHyX6<%A00Pmm z)L$QPmO!!534HO6N1^ElJwLusUFmmdYL zM08*&#$;Jmo+R2Vk6{J3%fpav+j8ZgYFzozJPkcgMs+_R#O09MkafZ10A>Ymlzqnv zd8CF`Y3omV0>DYzIEK5Xf}Hs%(yesI%Pr zkWMojAmZi^fLx6&)jR=Gh)q7szZs-sj*lRnsg?;X9#A*>I8UiAoz558ujCOK;FFJJ zWc2N4pyo*C4KeR?XjjD4j(A6u8Pc*?#%wdc%s=SjJ=j?PD`<@FN5)`<{OOpMxLK5o z#)(KQ_!<&XZnoKG@wCFOyDB+lijMyPxDjC{)|ejm+2~s^my0Pedy+Y}63T!zZemPS z(2|*r3sI`;U*CbaEZ8FT#+pAsJD)n-G4Xn)0KP``I{znvFv}v4R$TZED(ft=>uHSN z{tJ>-1)B(M=N4N8y+bkmbumI&~VKI{5x8fsp#>mN;-_f(&iR4cQ?gNMym4UGYd3mM5xL6t+~f zD%G8UOIHDMt&_@{t3sK_=^OyXO``p8#i+P!btsb626yW<2%EQAn5xbcY2Gw`^X~L& zYdQi4gg7LQ`68Wixn9}(T(nt{Izg_;uU{*V(#dmGvBuiaWG7$t2P*9JStzI1JJ-z- zb};Qttjg8{DNT7={uN@=4FH{QQ%hHCOcU*D%idkUi`#~IoUuC3{7y7tt*nWW8xf?K zom}*!n7eKaX*R|)2GMB8Yzm_KB7Bj;E|Z)b=}Os?g4~3#V!(9f<<0UGu>6-KcG)8T zik>!8H~RJx;aKePjwp5r?NUm7pv{IyW z(#oS0(X=Cx;7l{nd;JJIhbkEZb;Zv@5OGgCUAY+B?VP8mHvus`8VJA$wPj3J$R*|} zx2CvXwA0cU&Oq?I5ZGbbwYqQCAOKYl8;89I6Lw>s(iy{sK)JFiWN(Td|HffJeKJaV ze3*Z5S3qZ4lnunccF*Y=OY+;+>%D^=W$?WLMDzHH}jP$5-*kx+|=XZ6wUf`+rNl-??p?#6Pt05hG&V z$ppP`XF9f<1CBqwnhNZ(TFXToIjRD#p9bn8Vk{|lf4p40FBmdN=X?`IWR;HJJT(rl z2roH^KU@y37}4sl5#Xe%bl+4R<1S(9Hq5`LACiAigGmwIVCw+LhtljGr`AKb>hRIV zx2UiMB|b>0u>m3*D(ZTRaYSP&zD84K$HoGX1(~-0=&gm2Lb~E{f`~qbxg8jDoAM;G zyFYPBGf>vVbhd!iZ6QvY3Fz9bJ8aIVQGG3n1h6eT%jSSTWE|H*3z zeSMeX!PL-(#6BkxRNrJ%eUHKxN+XSgY2(vV{O?TE5!ang*agw*DyE;2(-U+w_sW?$ zjSX%wem)D@snua!a7{55I|sz9>eXhM&<+t~L34Tr@8i$SztRgfDQ@|7Uw$4^(#i_R40 z{A%!Ae6dLLBSnQ4I{P~4 ztP{#G3W4?9kVE3L4O_~01UZ=n7wW}}MFRTd)>fZHzwqy1@R_;rc`G9k1nyyC=#BjA zvid=}xa1}fXU#a>szbVqZUtf{n9bW#&GEP>3WW)HOZ5H&ffleG5aUP5h?ovG8&Cz0XtUzmj(+s6YU$y(K7w$P}(&$+74k$QDcWpKh zk#+ZU9pU6V{jYSvvGQFI7I3Ymz%J1)_qbx0u6dqaLKwufjo$U{JPvz)t-u?FSib3D zL$Uh#eR;}qofJR*6Pm_FFOHM$Cv2YNfv6a7JqVET-^{BQP+8l1Ie;mw->s=^K3>0&| zP=BduCysmu+G)4gvgJb|j)7C6Vr=#-bk-fvX{K$$RSUsfvdSrO@bgeEI1fIuQ(fkT z9H>r{)Uz+;0B-nc%+;>|P?;!x8sqU-A)JGqd6Db$!knh8+lz7h>i|gl{XgFj-zn^( zX|5$p9Trr*;TTvf#*uI4G3-GgS-_p}7N`@|4K1#cz6}5y4f5l%NQ5g0byllhii&sh z4<<9dvau3iacLbwcUkj&5O;)0^yRfafV5=Ts$TI46il;djz<-y^9&ggk9|teIThNm zCtxH${c|8EtdWVP^^_1t;?TN9cXNL%MXJ)aygiz}BvRH!3-x~cyG58;7*!d+mST)n zC+5aU%>0@du*KFSKKzETWm2uzr()`V09^lFGAR2%G!cBCo~_Ke-vXFbw&!xid65u* zGFckrA4Mb9WS-poJp}C3re)WxAN`lE<;C*9=HIckm=nAH25nxrgo(H({};f;lZr7_ z;tRt1D!+ZYG*587!|(a0dvRw6(?0=y#XwRI|B=&8J*;FNTx|Qkys+1js$PKQ%|b0{ ziLP1#$aSk`_vfV`o#g12*0wlu>6|9Vx|p>bw7KQIaTbqEMYDxz$Q&BKl>SafFJgP) ziu|nx($Nz?txnjvwWIldVl7DLR;^{mT5Cfi$BN|b{tZ#)b|O}cQv9v4E|8@sM>u#$ z72rV`J2uYZHL9(-Q67t>ClAklv?+kIEt@zhHrXsED;U&v-2&2F)bmbMj27)oWNXIl zz-ejm+aV z;QcVR+5_54c@0)w1vazEGHT?W{9(T7TRJ~v@>oD;Ih83E#m)~xvNqe^J3AR3Cd@M9buu1N z8b=)RtgVc1_995NG-C}mZ-T?R{W!zi!fp9))kGVs@hiRqJpoC{|qEvZH9S({^c z6mCskg^w0S3AE6QC#FJ>ms;(q(+GQag@bkPLV@oUOnm9BW0`#*oG-S1oU6CTIVH%N zGA;uNvef8wX!Azz>omAdS{KXj2SJ`JGNRuBge`dd+>R6F)HU*Jt^K?pBFnGYiwaw2 ztR7lp@dKgZj2oKm@zz0(h_QLxbTHw*bjY8HXYG0*k7*q- zPn7xRk^s}**QHPT9-@tBXA%j{y4Y?k*Pef%LANpH2;fiI3oTvD|7=UIUmYaC>>&^O z1MYjf9lr_l_DgyQd$1^{Fb|6CD@;2T(;dIh&LNqKY0-4doE%~soAgx6%>{Bfu^g)7 z}$K_lXZqChQV0gtHr|JT(XOL|#`+KP^we zuX|FgPGJ@Uh6*wHbV!$aR0qq>>q_)^`5qs0CjY0#&=&1k7$dZJBn!*w^cLsjNlxPC z{y`$m*A#7FW8rxK7F&f58*K3Mve2wGaHsR17ebnmiSjJ7@kKd}EeW?=MfbHW=}{Wv z2SLtA<54m8xFiP{p%K6>mjSr-b18G=6{dVEWwt0M*xexyIK8p-RX`NBi51jx3j0&; z7w#>J<~4L^oT{LP8CfHoaJBJ@=2+s|{DUTZnu&6Ca-C9p-?2jYj!U;K@so%?8KuGI z2LIb44-B1uhj88lcZ@G?f$A&y^fd8(bQ_RU$4XsxWeaZyvG}ARj@#~lw#@b38zR?X zMB#2w5@t(6y*zl&|D>YFJYv^-^HeZD?TFU;*~p z7J-(e6o&D!Xs5|~SJ$gndKH9bS2-c-Ud#W)N>TSC3qqVBa~gBrIj;j`nPg`}Os)c* z2?y3g&TXvJT?dFJA?YZu`IvCV9);RpkA43IaM~;h(i`W^i}E;KytLXFp9pZA8fo9@IbW9`E}*21M%(ZCXB_o1 zldbj#fZGu}J*LdKObAk#z>0FQ9s1IQvGf-E>Wc%pVw$j5r^$1I)gv(LS=cnuyv|!9 zkA^W?whk>@641SWBA#cTTMAN*VHP*~r*hVKvv&mjSZ)P@H#h6w}4LFU`^7 z%khty;G8|Xb+Cg#OHu2l*hw_aiD{N|Ny2KE%%xE+!V#;PZDZLLp{a~1X$^6)D3=Ap zW(|GEtptKV1`m#9S0(6XkqHF2Lu6T2G0H@F0gSJ?54QLplOU20wlh%S8 zIZGty4GH{mn4dkc@M?gY0otkTF%MiD#0gf&A^f-NLR(hTycdsi*8?KmNi}hwg65|V zc8c+F2}q70WB&U2_h_K1ybcdas>uBn?X3YnS6Qv=KgZ%@VH7re>t7= zGM(quYyp(bRvc5QsWaGovo)x>X;cC3*fys}GZn=q+vPM~9O`1KXpaun&GBbZmN{-) zwF10(dk~kn)31gXV46Y$ z3bFADyU+`+u8TkVLAqFMqxzf-&E6T*Y%qEIMUGR2nOP<-6>iV~5Eq`MJnq*9 zOkgJTj-I+0I1C!0a!^8>8Y0S>l%b$uh!Q}FliLm~wuk5c)0xQkG66`ZFqemeM>sB% zY-cO}p?86VKUzymEVdhj1;AQNv*V>BLCkRtXTs?=z(afb6PDvG@wEt7H;M0G24%jn zo$8Dc<iDtDC_uCC>mz4W0>B=ZeWG;?;l6sY&NDtA3r4Egte^qyirT&M z1W;cw@SIQrXrs*ZUE_&iuEzJoJW#aZ)7*b=|GHccMNPz=-^S)0SUJ(?3$xl zgtM&IHRCqX?!)pDD>WFo{z<57dF-e-N|dUW70|f~lUTBg8rccq#Om~DL`xCcC5KsV z{P#wo2guwqoW%LFAiWZ1mDr;jEq;*csyY1O*4fut~O+a+E}+I8C`WGm|1N^QLBqGTO9-Biq!Us`?Lbw z8qkMv_SV5V=*wgCSS+(x2fuzCfKz8HBDF~|j{PebtjyA^Gy8847IFTB8iw8{gVIC? z4UZE}$y3md9@p@NxLueFi)KVyo|R9{k;#R0uCvb>AT9&8Yxtzc7WYgb*9b566yiA1 zj)G-(GH;r97Es2oY$i09u6%a>SyL-h(HPOx8gfAQ6q}v{i7-+5#a;?qN>X9-oc6f@ zPR}REr1#Nzc_gm9F|drh5Wsw}aZTKxAjo~fkD4-v(ezFhoYg`HN|u;=5dfJtGIi~G zF=2!@p)&rZpi^vHa9=w48FL8`Qm1RTN6n?#XN1pIn9qg`i7yl+`DERs+Bxtt0Jotn z2gakK$V~d@6D}ug2E9A>Kk4GYZ{vyW`-S0APP+Z@jFaOFd0$QI5~Y-<(HhXUX_; zq5xM&+k!iqbW13jZpkCHw96UZMFPxuYaRNCZEuBkTTg8e%U;y#w*$I3JcTQ1?I^_E zc?#|w2a0qmEX8|cSJ8-%^+$YI`Z(R{_uxHC0hXIY@4-iIaYMm>~yt>b=R4K{8@IM##!#e`Dbj@+T-^}pq&#M zy=Z+L5;={g)cu|y3`e*fY>G)w=J9mhbjzn8ojdBv))+228?D=$P`jiFY`V~_)oDoL z*}oDpp-{(pUaa*ralCZ*iD#yPIPG8HS-tCgJ>6sbbs-=up~@=6zn+J1p4e+@(XWe( ztG)nA2{09na};*rc*`oT$o|43e%zz5#l>f7{CQH4tE8cBE=#|0F9EopF>7E~YBS^m zUIupA%Zlg*k9swyX&uec`Z_cm@Yr>{pm5fw(wx1=8#y4iti_lwz~yILcH zGsv@HC}7)Tz*|5r4hI2oqDX{+V!k)t6p{6d&tS%bv`MRSb)DF1+p>tUvXjky9TyH1F2 z``BUJF~=645+JTj%=tHeTTZx8i-})Cy1wvp(#_4woNg>&k@=oz=dVMTq;5a%n>-FzN9y9P z|3JITkE@OozJ+oIY%61z?;x|@tv5xBafT2xs5_0Sj5*%}x!CD4Mm+yxo=7<}+wDK) zpV+r6SJ!^dA)DwGV^9gg4vU`I9p4Kw`%({dNzZrLFJRdemY(tEfgU>X_P+sB3YC$! zD}Fa+W4*=xFqO`o;wVurKsrOGnLjDWVw@`zvM(0jv9HvvJ#*yXVCoVeS(9b8rX1H? zvIL>u6SU1zKyGNc9YEYFklMw;Qv9GWT>4<|CsCGLy%bfk&e8w~A00|3!arUlEvup< zmR-jGu+Qm0Pd8Dd#|KU+`yPS0WLeO@cC2aq#O3m@YFI{{w|xG8atIp_3vjs`J5V64 zuwqW5g%}ijh_?Le3(eduVCWnrjN){%G^kjKFbobI6hA2FyvO$FsC@fX0kUCpj=F!k zl6u{mpl(m?1r8Lrzq=NQWhUn>I93ZOm10>!<-we>%s6DXzSaEX@4~aT+m!=9uABq6sZeQcT zrJ6W;i~M8ynbr^23NZ(5+|Zf6B_z@xH95++b#VBIxN19sS^w10By1eDgZ~{dJQgTu z*=fn2_VK6gn13Pf+jYF^@YcW_lE=|>hkP&)RU*som^u`~rLDp12A6gj8O{@E_P7lk zKb7LJx{^O)9N`jzN8$auZ=Tvbg(GdcvK!M z8Rxb~^Joxrh2ckhD>54ns92sMj^)PWF(!03#@b^covbb>FopNVq7p{KwbFmqc;Zos z!-mIx69}5wDd`;cb^*?X0Rw*kt#M~54)uao^4zNYqfRxtxKKd$5iZOm;6W3C+!s>q zb%=IhunUgIOJwz^(XqlL{>?gW=E?%~{eWsv*Emv3TH<2V3RM3e5l<@UJecTtk9Tiq z7iKCZF&$Bu4B&AabrcovmHl;sT_Mv8xO?Npx*8DY&oHQMi^_0QZJwU}s%$tV|1ce; zqs|f{Q4Wo+sRYgKbj~fM>0w{C4tBLfHJHdDRfX%y(ZMAc@Q5F1ia4ZeQ7pLB)7zdI{@gxRL9yRa(HzFL!ks zqhTg=U-MuHKySQ88;~<&P0xTK-Fat69*-L!8op!iP9Rq)_Zb-ThIi!vb~VVXsvE$W zabnRCyU&7l%4zORtHLh?Qfyu+R>jVr4e5e3vkK)n5QSkc7$s#F&YK}~A)Kw8pS8y> zhXFX31|6(zbvSg^oY@0SI0DioWEe&x!k+3#Akx#c8xM+bdPOu#952oTFbBNqV2iZF zjsnwkz!~qo3cHHuUl9xc-1iU5t)mG$DP2p%b;46e+9@2E0r`ge3c+GHKPY7 zY*GI=C&9GB_U61(pePv!sqsk`W{i&uPX{-nZAJK=v*+;#pm;L3R+R6KAv9Q zY1gc=!uk9G&rPg3PEgqCHuAVZI(k|AA}}+{;tcbFS(oGh4sNjrnkT@e!`d!B5=nx( zA^WYuZltX|=uqtLMXvWvb!Q~&LdQj&04oYIvd>`NNbNxg!v;)DXhslg=B*U+r?P+wwxZV z`)u&hkq;9l zrJ)03*GCB=)dtxv$1S3=!K9f3#{C|HrU)83VzI~l)$rkQt%6xRsr_$%654{)=^bsN z7`HwJ<{H8$QT8g&T2BL-QJhA#G!)|$A!NZ3XmcF!4207yV>uLSJ_`-gx~n}+VgJXw z6K!$zbI|U0qY6FUjAqI8U-Lp>(o1`>9ubykMdhSeDAGdFVsmCpc|HfGC4>6iU+~xT z;X{oVyQNSs|DhDC zphK~!euG#R3w}0b%+eslSxl~s)7~`Y?)cSPP?kgwrVer1+tAK*43{!-cK;p#yy^v( z1qwSd?mW0l{XVp_(wNMOaruuxXboi(;%NmPnNg}MUi%n2KW^2g9lH_MZsR`zca9zE zpHcBC0PLz8HpXtB<$vROj+x+P0a-_5YG9j;Ezy_$giTz391MO9<($iOed0*b6oEsa zxH$`VEx0efEyZI~HY_gtj(9NN)D+h#n2AM3en)-}?UF9I78{=KX!rr7Z+_@Zr{XvM z4}|FydTx9s<>}0p!tJP^pj}wZ_c2R-|K}Xg$rp!qzW_L=-9`a!QaEe9$oQf~kmi0eiV__TzAA`p1`UhT7Tc*W zas!L@UqqTE&J|l(M*OxoNLKhRZcZWO|63A>&xa0);Y$;w0$CrjUY#k*qHIz-m?PTt zr(IIAiukiIv)(PeRV-Q-0M>a=A|79XFv&?fSd67t%0Fj{L<`Ny$%vI5g9G6!yW|)8M8GY&WH5_qNN8G2Nxx|WVPTHp~6ow4# z?Js`GV%JQGvNid~D{Giy|AB2YN7zDLa!l9p5WN<(M;QiTZczCm|Gt9Fhm9uV3?XnS zH$H2xP1vPD^EIMhRF(ieM8zTIdinRCHP{dGG7NZq({iSUH z-&`li`Sf(jD0Z2R04#xux^~Ru!Tv4Gt*D$C+mk zY(j*-qPB5^J0=S8Cny&v@7V&f=E1ptau_mWQ!o!;RXwr6W>97xRUBIkbn%<#ANd6m z5_)DUFz1yN>g};mh?5zOGxe>Z5Xao!SbG}>2M>(P6*NC=W9h;3wgqr9*$L#~+d1~c zj@Wbuf))Tvu0qu8!F3Ys-9T|{di2JPmRogpNB`f9^V3F;hXAg`NMXmZo$SYNc`@3yOPNq!9fpK%58PGs8b7zsRI<@_a9nnkv9Do{Nu5y}A;|2(w70%JcW?qoFBm{}Hi=g8qvs zgqyyiDMdf*iRO=Sv_XSn;#h*_n2ntbn=TPTM5!o-jw9?M&h26Lp}VvRa)G$8k2%8C z0{SNSh7K0$+)SFcmlNS&D}GYgS!k8V6g+%92yE8W#>gsyu7COjUcfq2MIJlRF{)~! zWl|QoD!LRjJG@jx?!9rDAWKCPLb}p(uQ0ld?%s6AqkBV{VHrpJ7{k@f@&A{uGO?_*P-Z#Nk43eAfi+oG{Z*qr}_IK-u(VOGX8zOx+Q*Ex){U`V zCy<-mfFb>3Zg-xN^lTh=JFckPQ&F3d@56Q$+GpJoF&>1w|E zi%51Gv??y}&Vit6Fr@jWs_r11^YU6LjnI_6!%(z{y+&9qUmn{7E@9*%3%FUVJ<5}OnS5OvXg;ao@C0( zE8C}nm{m^C>$vLx?UZM-@WxL1EC5N58Fy=i{U>jn@B-C81Z2%i{>QtMTY(kN26e`G zgh&_Y))(YPT-Fd5oRfc+EAx0$_gv7dDYoF3qbuGP;)J+2mr8KVc|Zt!R9Or;pP(gE zn1iKUYC<=bz>cICmTlVQ7Z5X(jtsr!P2#!|R>R8#X^4JKm@5}|J<)z4Bt@@aO?QRD z{)-y|@$*HHu<`f>QFXC{{RhU83c8e>t6q5pRF*3P6(=i=@u)DDu^g9lw2eQ6kkYtO z(eFxvRW<4oL`*PIJG5R_PbDZvj%3;LV>%rVWnmTxmc%2)dUB>bBt-nLL4&>L} zL-DZ!ow00k=~BN4al?@V!X4x08v!iNBATx|MUrTDi>!*q-UQ{@mm$W5V<7(OW?&>W zVptSzA?O@xSl7}co)Kb3Z2K+nKeqz9ywo8buQj^<0fYo+v3)vQVaphMJz2iXNba9P z{n2!sw&5ZoJ`;qHutF%rzePB2x|Qq#I&TND*jOsCJR_CW?*Mb!eQqRaMP~_h+T$?y z)Bv~Yoj^*Pu`1d{SlA797~y?)SDuQTH5qUnBfH>jmqWLZ;;+4llas7~gVcNBKU zu#Y&Ca&pD{LCv&`pqLJqdmu+<>*S#;&V3LB`3~4Io>q|h=#k}Ld?L!SO^+v`Cg8cD zhXCPQZZzYN`4E->mW{0_ziYjcG7fn>Pi1Joc=U-p6>5Zy|MMjP4Ki!$wVotKA1`&9*L7O>sZdKB zdmA3!dMZzcd!JlvY<(KQ+}M=5G4_21$lPH0q)u6k6NMpu?b|L<*bRFUNme}vY0+7! zDkJu30-dZ~BXd?K2-#Ov#%2o%k{TwPah1ZB&ve}A@qk$g;1GcP%AO~NE0+C@xaoNz z|AtprW;e!l0?cwvcaK(|#rXV1u&m$OeF&01^d&%z1zKnQD#BmMG8^lmu4s7`%wnWJ z^}TV?`x>ZODEheMv^Vl^?f8tu-+nX4vgIF>&s$)w0C`}D!%H*=n2oXjyZrCQQqdC2 zzXyekCRfHI3Yz_N?L4haCcmG9C@WeUXESm{n?ODqIylzhhZVo)WmP|VL_;;Q{D?=i>;o2`j5C!MhR7A~JP!=5OwgFN2 zCQpL<3(4|h^0z={9CJfXX&CH}5oX5NR!OOLqmVqI!SUJuM-Pn0zstXn4J*2>&%Ot7 z;nM>%apDg-kdy=QjR1{kxV4A}euO|Y@U3;q?;>4RtOHV0T;f+CWYgU;GcHtEpUN6y z^Z!Cnbp!jw4T}i6AxxPVgMWuI(=+sR^p_=?BkSh4aPfY9)iEA{-?ap!r;`S3!t?;U zCWk+k1kM^btf)K1V@rd%iD=)?IHzT4zhyz;3ZJ%JF>W~s*Y#vB#Jnuhyfn%YY~}LM zPL;(lt(e*GJYi-aotd{6nkjlbFW4zfs2mmFiL_{#sTg^?qHcx!L;2ujq)j*O9$OJO zyG+z~(BsAV6D#o#A{#y=s#eaE!-quD$(^?fka-=WQG1hBq1_y~>>-8lq}4zuDeuQM zMR!JUMXx(nT!VjH?JQB5a~59{fUJg%ho2%l{h17A1CD-+v7o5Um>$ zbatcI9gp1@(rsvT2l_UszD_qCV}L?T+ziM&OL_-0O{VNC3U}^hw4OLgv_*ljaAA6! zzXgEHKeq4jqldT5BWVT0#iy+REbLy4Bse+g6haBHuZu-n=Si>-s7n?RW3~Y^i#Q8x z&@H$}w*xXOwNe`VbNf6BMic2|wR#62=aU>H)Wv>6%&%PRG{$TJPGy?9^tGb%$v_v< ziS+4?`B$h0Sq-n%4?trWz53vt31>Bqwg3-Y#aJNBspy)J9)HDmfr5Om}dcohvB2oOs#;q%bjiy#{8f#}Y1}xs#`?QJJC$0T>OHe-PEMRu8{6y# zX+d_j$d75e0JC1x9LIfn@;H?}am1cbeWI5CSzS4NS)kLcVHH}4 zSIcrh-od!VRGud?sWV;_04TTvP)>LF?It-Kj9iV3AdfF;gI1NzC~y zxto|8E)i%^*!AGc0Z3JDG z$*kT`EHB%hBds{d;`ZA{9gbcxIo3Ti|C37vIu9H#gtW?QqDnzaf$@vkXO@7h&N*7s zVQ5pdbOB~{Neeu!2x$hYS{fUf3Rdg}A;n>+iWGEQ41;689!P2_y#Vp?Y)2bc6MM|b zqm5?^I;|HHLDsd;jd==Za&GBjgWer;=K?v$3cqUI{U;E~V_58a1VNV}dCrV;1@K?( z)c8O_MkbcI431^<@|e|WJSi7Iwp}=&_(I_Tn16xfWTDeR47js+qE-olE9k0Y!Gi-IPeJU)+$`#0J(e!TwzWXV-A&m4LJ zqzBb0U2(N2K2G($#YqtPxpxt7A#Q8i{1sdUuZhDHbUh~9!_;NI7nm*U=u22^sRT>> z&0h^097~)`&@!my0*chtI|*{Pn5m7{38LM6rB!5`Sr;3gl4JBtw9agBcMz5@A${ev z5KEp4+UMxO)<|Z3mi}d?-RqV{8n# z!Z><`rWk*v@!YhZbcPhi5sCU1q}K+(jDufux!Xn3i0?zhz1O* z@pFZpMGb>}U0i(=fQ9cj>NJS`cr&0GV>>cCR=fqeZ=h<4##&VrLC#m~2c zI%AgZQjScy0|b$zcg0q^(;ropNAa#aW-@~*#tA|!PmJz3nB_g?dw}3t#yvaVOW0hG zVP}gitpNU%N80iC<=^6UJSm!59|UrT!XHs&8&EQ)W&WSAn`jHdc+(N(ct^vV&af5#qR6vS3-|QK-Ih;GdoDxtSmjKxvM^bd36s4ySmJc!f4cOYCn zJP3CB`%o@gxzq)J`w-f>;zO{jzBji07{uag<4lsyP>j=rISuZBkNpJ7tgv`yLvT_F z=t(aM4E>B4ZUla7rw})N0pyG@E7G?4xe~(7I6ZN=!oP_lzR$y4ZmTbyOHU^kEYu~B z{VGo(z4P1{#|eQgl%D95mipQsV1rezJHF8W_?{O{5hvM%-G=FAw~4-s{5UV`ip(*;$l%2Zkeo4+Ir*sUxCDfw!6=W zpnVP4D)#@4pi6@T9$q8f#QC{b@}@?64%1NB(i8H^G@AtM!MQfKJIkju66!Nof@4mHtOX(1_vS z`9j>E(>5)6m3Vq7K$>4)504p3=jqZ^bl3GAA&6kJ&12(b2*R`cBQC!zVZ_v-)A-v& zxD~s*y-)(M*GB)i?(&Y_rgz&95@F`6Fvr5F*$*W|%X=h$@e0JTxG_I&jV)J%hK&&e zqG6@{bG;?Znqc>pK`1*1fNZp$5#^HN=%%fJE*5j9RUC!0H`@PiqMUlQ4juPc723^P zmlF7*F-{TY*37|ltiBqg%UHokR*Yi=WUU?ff4({-0%6}bGky`_|C%@{>sZ78>3}kh z72$-iSD|LMT@%1nzU{y`Lli0oII&6;V3Beo4xJ~%%IWKaxl6Vc+A!x#J$i!;^2D(Goyp*}mk`$=o-TCe z0=U?Qd4fJV2AeI+5gC@SHqyfeQ#JxLE4&iKeE5g}+Ro&%c%k$k2QUn5H*CUxE*P87 zSaDNGq(74;xc}yavv!nJ0%|!k1@=ji_u_RCkD2VFEr2Z$T$uyL23vuUX+>q6rJ#$g z(}-jW)EEP|26g4)cD5xh6m5aY9*}jCDukUpAu@iP55+Hg@asZhVgr zORJm<0uPEb%UnXzrlc!Q+b#b{j>@Q{&34aYsVkzr?Tj6Sk&s`xj-5p!c0J=&K9aE6 zs%dD(tneNIW<$OyDdNIC@+d5AhwT{8m*r7(+(xFSmV=lzHXu?)bj2&9z$}?6c^Fx4 zG&GVKUB;{J1Yw|^ro7D~jBTh)9(k~ke=)P9h+R&jl=*O3!w0vXlRVT;-#xir{ zI43IG$k7V>`^Eyj6&73ea%9w0l<62zSr2P&;gt+XOMxjF&&Nww0b~`exvVP=pOi;w zNar{gR0FttD12weLnXQn6V>>5GXGm9EPL56a$Ej@nmiH8N=v?3fD_^U?z&jK7TSz= z)Td{tV)7Iqv!Z0^Dh~=ll4Vsm74#{Jbb@6^3*)`Vzxw~qWs?m_D?G)g|Lwh(kZC9XAwT_w!b z!ZpeE*sl%R>@r=RlK3^3J^OT!^e! zr8OE&a*!~mYx7-A9Z!}bHnHza^L6K+i0B5|l;%x5DGGwa26#0W6`aj(Y7LM+$La zyY-^W4AD-t(8cj3%jo#~y!CYEkwbY|ta=A)QFnC5UVkBqOWKH;TRNRTT^Ov{<=n^@A6kj_|&)_`fElFAYxRDo5SNNWc=X#m zVSE7)Y)TzGM`8cX(JTt#rud=+(J|6OdH%)3BGYaeB7G%-&)Js6oJ$B>-cxGI<13M_ zHWa!YU3&R$kITR;!=et#{)e7`)Z%FUPudJPbnnX{NV{cvZ+v+LVT88eAMyQF1YJ&A z3~PY5uLf|#%=~b&3*P=3K<88`XIN`p2W??=V*ElqbPIu94436&_;iuv(0@?WTu;!P zb9q$#u{UlJ=EC93HjcZ|q}0zy@hXw<&BCfZ9=gQ@H>V15!>tfhW`P}lAvXMnDOT@X z#~vk$EJu%vmG2tn8E|M04Ib;VhM4=gCK6f zobmKzdiQM~0;OO(4UC<#;MkftP(h0|yBm+PQ<%%$oGub=w*Y{Gv}5(rq_DYUC5M@3 z5pPOBuGR_-QENN`ZH~0l;L!c>M?sw8R7MA0C3r0Vgm;QtV(llOVQ%<m zKbc2E!`>NJiuTB93$2zWYv%WUia(qIukLV0Irtd>^S~ZMOUP@4xN;`bAcj8+>4J~y zX=_Ir-xGTYbCkV03UP->C)$(@x|Vw`|9>ir+G`g=W`nOhZ6-BA`wRI;aum=NV_(dH z8jMA|WBN+~mLN7b$$o3smw_xnO}(+3Xh$h)Z(_RV=IX0Jv}XK=G{=Uo<^N+P!;N&d zNP_skL5^CNcmu*M0DTT?pfpI_{wAn%nW$a+jJKf8KjzUaGNjA@Ss;A_0}VRhB2gA2 ztxe<6{%?a=Ml&&W`duW^ZR)qvJ9%`L18j4m;e8M)7W=_dJ|IZN*3X?Cn}0~y1!N7D z)Uxd#0hzneat&7gF|?(RH4_rp=@S65N>9bb2vL1S)ZO27Ww)5cK~;&E7K$QA%>##h zM%aAI9Dmo(AzgNRhMkKHPZo#_wI+?1zJzemrB%TOdqVm=IfXq3_FV2$EiC?Unh5 z5~1X8ioc3*O3>^N&Ri7$d6Z3zp4ADuw4>PoXc<%{VK^F78LugrwJF2|Lo2Tf z;(ezaLl@=UlTyZf;UvJhRBW>zVYh}fge9fZ`RfC^aE12ioN*Q7@eP2oO3=RVKN~~4 z5~>(WzS$Hq6BRCoi}8a1Ct>TFoCip$5}ih0AOWk##~wR~}au0NdDBNq2wkHu?XPxgXjQ zHwZwULk36FwvI{{*KztEB1xAu5ss<1gRnI8ltWy;J+zC0H{c!yh@XUHb+6^)Mms>8 zlga54_$C4zfhABOJ`?REsu=ij|FP1JIl|2}J&Jfv3E?qxb_j9VPQ=No0jr|D`$1aj zjKs-@#_l`+Pb&0W_K_u|S})TV;_&`?D%DIJ!v{bjgejG=U=Tr?N*kO17Ze7rtc=bf z1f7rEtY|BAk04@lX)kUbO4x<1LIJ?joWlTISQZIQarAI#S8G{k)Qo_#!hl-|vbaf# zor?uK6Ag*7tEt^3PiL~u)%MsG+C7T(3Wecit5tW;!2N17LR8iep$S)$x@ADS+>q4j2pN0^r3qo*D4SNJhf1hcuMuYGA4m0_krM@E;z{z3 zFKY8BC=A%ZS5E<;G0g2@#!=Ycqw}E{FHQBI_>qZM6sFNFxRyIZzlxx=>85^MHVw++ zVBSs+m#^3d$VrL6LOj1OfD6e;h$q$kpv_uxN75D>3vrApI$<0y+AV>*opHKIS3j5C z@bJnMv-AOA&SgfAs0@W1j@ia(u$0L-U-(HG-C2LqW69fDEj9qaN) zI*rrD^{IwDA(R8C&^{AjUTU;T&}L*nzPMcMy&;?genyqWx;i3L?TTjY(}a-=L>z>SsKK`0le{f)71 z2ZV;Xs<>1^l~?A~uXI9~eXhasz*QHt*>6B0g5?sIiiGj)psiwqB0);tO3Qpw;cT#A zMS&#@vRbCwamy;ANkQViKkwX_*F!jy0JG)aM3N#ez$`b5APlfAL8aTyol$646Kj2T z$MU_dWy#_C!%38P%!vy`S_0`RG0w_+K`gTg9a51#Ccw>!jTJg;4h-hzkaStPZ*;#P zr(1zlRP$jui7|4la(GU%S&4l`I!XFAW>LlX{eK{eps{Aa5yUblZLH%OqP_%7Ow;@5 zd3lT(JvxcLSb({x=79Noky!)Q{bNn|*Z&y|p-q|)Pbf(D_RYRGj)pQ*Qs?3VGFChW z$Q(>cs)GM30j%>l&>40tG5UHlo7KN$bmne2CLYKCW@uj;WI-gEFSsNwI)Px;_8vZ3 zY27Bo+)5;AseIsxVCG=7+`q<#Cjrpx&@SObPlR(%PIylL8#HZ2Cs02s>@RWJjX0M( z8NjT__1J2sK*CDJl=$XUf@XzR8T1NQ@iY*#!tC3~<_y1v!kl1vs`6{k0C4WyIP8tv zM0-x^^N*z#x7L~YcWgS@D(o#lJx32dJS~DcV2G!5&!1(A#k?Is9)f(auR#)fl&&o2O9I8!wfp3B9rB`TR!-QYVo}4ib=Mz_tCDCpv2a?d=P$ zWyCmLkYz)o)LLTS3qTMuFHyxh7ZFB?H65{@g20blQHZk@_DAge*n-4;LYxz-u424! zDKwQKGiAo`{+B_jR%Ob2xd<1_1~C{FJ|(Pg=i12nEscmgIdxbO z*AsTB28`gz`@2gt4Y)fN|GU$fT!Vjif-V4c#RCQDM)Sm*K+TW_5}n0Q5#(mt&UX7E zk06CjaGJ zK;wfJ>Nkt%O9X$jOtwos0p#*a8%Tw%Da1+a-BaMJG67j_a6ozNQ;_Ojx|Dp82&afu z3UgOmv_9kiTjnxpJSc(&-qY9;oBfNhYY1EL`=5n!#<&mC2sCXWhy{T@N6&+(L{AB| zAQ;yRvBe9}E_D;Tje@oVe|<4e3&HT9b(|+8TUqIBs!2WwHGs}|i9g&%TWH64SR&2c zUIy%|XWgRbs*NB^p`62#rr6^Z0B>^oTr)U9;Sw-UfW+6YnW&fL>4-NVT*7+hdK&En zA(mt{bJQkpLL!_d_!|5+Vdtj{-<*sHaJg%nwdD~_J{4Hwzd&IF2#%ZXeg{|~H5P3FeM|_J2n6K;Z zQ0b2N021cq+Pm^Y$3q#}9p8v>Nowd}+-m&DAJQ}RX54WfL&7_+53+7O_mey}7pa&d z{`eFCR%zw5YQ2?n&}Sgz#M?M=p2B7l(+LbR8slz3&Tm>$WIQ5(4qQDU3ZD~nlk(h= zj&6Px>S`!s`+e{ij$b)3ZdcHa3%__=q@ZRV6z24qzA4~B0es%lgwKxdnDZs1Wh*@+ zW5f<$1RpFWoPjFtEz#}L?*DCh z{#R$_t)mCBFz&J9Lxr7cD{evlEz%{M+S0{RjH&gE6~MB!h%R{A#OXqzmT4U?SmP^4 zfGW3zBcfAQgvbVAU47=5pcronbY8q?8>g?7lN0HM7l?F`nP?h&aNG2(AOteHBHmNb z*~x4TQ>2yi=vZXR5pyiF3XnMHW{W&QgfqkTFg_M(hKf9@z2I8Ca@VzL9%n2^(>IAk zMvO~w|7s5FU6eN!wg8eQgO)~lKN6a?_CfLZnowky?s1~#UMu^Y;cHigT`)Crj3VN^ z0CH_HNVdoSi6&=xs9C5mg&w$7e4D}YKDhTfz%D>#?&9WkfvzqbVa`m~Ajhohv=-cm zDr7yUHK1RNTc4oiQO5aKb@mq3y8xI+lq5`o zyZ%oE9zTm01emGv%;$cbu$w>Xd-D3M-JzXT8ILV8n4loJU!W_sttqY_31!Y@0Kv!Csf9x{m-??%3*hw-(B2;Upt=nF8tNAjdN4x_4YE z&@%Gm7b{E!$?~adD@qwrHVw%6@bWP-cu|1k@V-SdYOL5NPe_YRW`!3^2=}}+@3-vh zI0!%{R3N_zu?W}+W8p&L>+KJgwS7IZwYz8p&hnMVy$*mtAhqM-eg$1TIbHtbAV?>L zYiFNyo^dc3&6LGq+*{`_%gSOzylz`EpbU!GWHmv#oXM>j-0dz1 zS)@xXD#FAJU}@2WKVKUUy z?I4VZ@g!b|-8&&H|Atlu&mPf;#2uog3yKP;sEPsI1kG_;5HkOk3o^&mC>3Y-JXSvo)-J*%v)6#7&JAGnUJz^a5z4p?h9DmvA-?(CRo8ZH#jd12ea^ z9C^mjKS7(RvB|qwZ|o*WJyxZw5#co0^WZq_|1ove;dND68^>J-7Vc2Ag|@V`*g%>l zZQ7;@OqMf1p-IYaVw5KCCoFI%ZUSdfSB!N9oO%wl z8Tyww9Kx}&@bx0?RdM!2o8{!L>j(%ZS%$upt2swPA@gE#rR6Lk=aJ2<;c@;^8BP>d z?hz(EZ3wXSXuB~h7_S|Z`KwjeKI2%hdjQUF()*Yfi*)w-c@%~kwc2qGLMqlP&%sUh zTBX}{*YTOxx<1zXJ@JSbrwp^#o$=HOnU8#~Nfy_aI}wKZGI~VplR9gs#kMEGrXAHl z>>@<^o6&9EM{b9hBlGcxu#-nSQcJr3WSF#lu?}H?(R^s(Q(#G`tZ~JO0tDFFmWy>y z#qA=H4jZ>+xWp&U5>FISkoXy=Lpjtu8&B;r5mt;##rk7n$tBlQCY%9B;4Bqlw%o*; zyk-2rnE;2%W_7Zk@w+J3ES8J0+dqKL+*FwYsyi#gOikP3bYX(dV=m+~xgB+qY%Ps8 zI2+DYXCe#Gffy~qa%}2~0U=M8^k5PTQlGuvBc3vtP!pfXX>nHY3=6uC>zo7QG)?a$ zOCw5ng!V(eD`9LWHphR3$uieDct`nMfaBk2Yt&gm5<45P*I$6!NlofnEN~%2+Q?AB zntPGI%t_TTmb}=tz;8wmR=T{r4U ztg>$=l8jHQiakpKke;rlI9~*bl|J(5OYLubMZ6+sI!dP2yYsQaWsr`&p-ZQAXNho8 zcWTZrZh6iPAybbQNyIjyAyT*k>-l;j@pbk$<^sVVodMf9Uz*U{OxAn)=N6MT83rJKQ^7 zm;H_5D^~8?Tn|BP-3$nu+<@B=u-hi(_nXByG_2(?Lz{b}y|}ESekcgL1B<$ytoYL^xMmO2|j^{b1tBB`Te> z9?1T!rB{o&!j53m5u3$p{{%XMWdD`$Y94}e1el1Rd+&KT^Hb!*lyZN&^<`qK%>`zm$D}c78tt?Xs_= zU+A1;1p(xgW6b(PZlqaT5##=a)A>gGm&wONuiD@K4DbuQhMOoGbQ6Cm0Rqs&STp5z zHD|I-Z;&iAFBR)F)28WV;+8Z38QzJ}uk*L1hf+;j9Q+2@1u}ld_&DUv?CXXusVT&X zZ^1ZiMK-H?<604(rV!MaZ#o%T3nUpA%zopuWfs z=!x%2ObfP*5X(Qmy$o4e$e-juKnG8EGttbnoC4G&}s^I zwf_KJ!3Iq>(nM|-oAy%l{hG;sEo!jNCHuSImpo}^p#8m5f54NoQ{LXyhrwnMX^~nV zeBB>m$e?WEZShm~Z=H7Gi*eP@SpYiTrQ;JpSuu3dwq6oieW|}``U$C}DO9RJ_U6V<0Z(K)+HT$+1!fpmfK!4kCx z-n5;x#IN{r*7~`NK#5d}ab0AJ*&7S}FAImBFVTOW{09{A_cKA#4s85G1hpY~y>R-U z0GACFHO>~rd@@JECeNE9O zY^e+`K6%p?&Qf7-o|L~N!XG7bgv%ENJ7JR<3f>iT7|9hC=@b357@Wm{DwaCJQy0%Z z8a;eSTvvhY!10lnnK5D6?9<7V65+ozqsD#ojBz4CYlzil6_@UZ_)3Y##oYm0(C(a**|afJMrL zj9CqriqiklaBb%)9sWl&8n{T>E`lVLGae>?Xi0@Jp%?Y+=trw$cs6%)WB96Iw^HhJ zX33&Pv;{tnlZuZ7-N1R>n(H(g$0}EcbT`)66Augd3r4XxX^rg9%Jz7EO%Q!WS$$Nm zg_HKz#suspxt%w}p4?w3t_@{TSF@2A_X*=2OZ2hSI(ExI+YxfRxYaWw*yF|cOLXVZ z>0K9`c1kSRnG91Wwq378Ca*~7Y<=zZA)WMERAhb=bkN;g;bir;@CGmz(%@AG4O?r9 zwnrvwsEx&RF-}8bxca@)hpFyG(pN09A>I(jux+A6PJbbtmv}=Mp;ERacg2lBj#&K- zwht1?PZ!PiTW=Z556Ql-mf4>dga<{+i43)O^^KuC8nR8@$%B-Mm5LZA-sM-{5U9&~@J*|kA&4JNSDA)cb zY}VxCQ4y9QZ%pgtc;#W)SDf5ocAtl`L}{pLiUqgCNdnj|oUP@=R>E#{>CRd)9!mXC z=h3|Dh%2|k4~f*lXiq#PL_tm55s1 zNsk9aILjC|iB(1dUFjH;(lzI%C6YxD%4e!m-6F};i?_$z(*L=3u5`-cY}_p}6%1QI z2xjR~FiuROTNIa-2#$>^0(~cVQh8X9cJ!Bay%E!Cn$Ug?L z;@Gjc9nDnj?Z!PK9BM6>jAZBQLs2gFsk_JY?K2O&KiVpj2KS3{>e;wdmp*z2drBr` zdhL23Zru^eF<~;TbyiIMJ4D)KnM9yOg0FK#Iw)+{%2wMQV*DK^`J=XsaXXnn2NgY9 zYVM^XoJS6S^NFtI#yi6~&Ac79uaKj%2Aan3CCVUOn3GY=x~BvuqKkUVIR16r@6=-N zs&X(wL&GKx9FN;cs!X)8uM$BY&b+H9?n*&&r~IvwSC+MHQ|SI0V$jn~_%(qOrj z)jkQg^HI(9HSRWcMvpiuPFqKuBBx7+8>E^%#IKY6HB;mGdo%#;v8$`H12qO zF6=%m->S)>jM)@pr5VsJRu;ZmtuMPPjEjT{lWa@p<1~>Zic^w4CLwR_miggEZ1P?e zZz1e%KeYD|BZtS6f(U5L$f(#8rv*eVlVM_xC<=x-4J+Nfvj5Sx(LIJZa&IV?WEGXI zJ-*l{BRG_1O?1n?5YDuwfL*$ke$;+&77u$k*sY_bVPrc8>*y1+&;-A?sH}s)s)r>1 zYy+f4R%m9Q;)O)cY0mi99&4TV>#U=<#HR;sTD_N&DLp#Q5KM5JtE!n3s z8(xf4L^yP5)3Y}|rWMBVPu2aPXM~XvE5O#6zCVC!&OvJIB{v@R9-|U|sIY_LjS?+Y zt`{@dt~wL7CJr%6-V$4`y|G>!-Ymm{tmi1O7sL@!IZbcncHGX#q^_1ujjfM~N<~bg z(!&+>X3vYYU}z!qz#o=~@p|?)w(r1)t93ae)-s(yCpoz%!~Tbuv}#i+&|lNwzI-<< z)qnI*ZCIAbyCTye8f~2&c=uLA!Qi56LiHG`gk2No*JuIuy5qW+_eW8-EytYhN}Wr0M4m zhjyZ8vg4tng1ban1QSuw*x?9Z+Q#*SL88kwQKbFs%GBa0Xdzh))pQs z7IbWWBq+8#5r&lYbu`5MlW>#d)?T*7E;<=t$<__t-ajS#y01HV*Lx>1_SDxcrFLr6 zX;5i9o5k!$M`Oo`vo}8-!!Cv>=aQg$<1Aq(bOMvCO-=`rton*+ac}A#HezH9Jp-O3 z%1}rA{TzVHi0216Z!H(Wzp8w3%efAMhfw3`^ZW-(+tzqT0P&0+5_8VSX?c`MIjEn5 z0GPB?WzXTr3&4(p{INWkbs+@BfwBgj>)Ar~&g`u-eh{|z2?f?evGzp}Bq>o#IZwc{ z;qtq$PFyd}6jE18!xDaq&wsT+M7c#$5^~Tolb(;|>AGN>gQveuK;G zyCGeKZzJq{qJ<;vfZEGpTqNCCT9RtaM^`{OC0JG@Ep+pYSeIOx(Rv<;Ie5|yzAPFk zj;o4=uf|DX>7Dl7=HgDv1eMT%Xt@T$d1kC;sz#89h;!sD5!oEBG$eUj4UZDQ z?=F4un4DP=qiE;-o0V_1_vC%)$pVghSETl7`X8%>_Q>7Fe`%|wNg=Jm?J(}tx*BA3 zOT*6TcV^x={l>a{oG*qnOf8Gs?!xK%G?4?sxaaN+V>hHXZoDV^*oPbOofv;XPhNS|XJ53R)O*HYx#W?+w=yf-~&z`qoBs=7R%yS3pLl(+&#W?7EVgFn_`5;)8 zS(E>F1uQKY;L)X833tnfvTs`S;_Ab~X}!|Xi~MZ#2poyv3ZL$4?I}o#QNE0u<#v_R z_7=vQV(WQOX|ZIt?h5hzW6*WfCU&`5k@MV$Qv zKAgbugVu?be=-ZDQH#5!o&u+>g@q%h=Ou#s0`y|fh;Y0Zq$tJ@!p=vHo~xq|n)(cs zlR^iD)|NHgH+&WvVIUZGv|^s1qiUe9EX1cGsQbOiIKBJxAcvb=`{5?VrJ`JRsG%0ANl zuXc9zei!A$*0S}88Q53-DXYgA^%`z!3&xf5aqR1v&vuMLagpIo2sdd?>nnuFgOY0YAsDUMW?)mOxjw{RyFk=8RqfRK13m&){D`d4CW^s=}4D=oL=G~#Iy z77iC|cuhPue8*q2L>ONaV}$%ada0_bzm}MORWt6|yqg883(37P{5=R{R8<*|%IQj( z?Dcv-dJY< zhW7_WIGTn6rc0V){Esk*N-7xV$nDD88aw{vKl+*EH^`l)PHV-E*!|~>Y3Li!s^CO1 z#7x&nRs3qck#?Dj@xNa{PJFI`x}fVfm;D#ciRWH3tB>~B;8&=$rlBx|b^;yRsUj_h zaZIO<`VE-Y6b$*YJ74yD<^g@~DEnXLfx5uDHs**S1ijirx3$b4hODL~`FsBaB7kPp z_7+}n>%ncuWWvyy+8?N02tLcd8s7tAoO->NBWmS^p`6fKRBK-lB;1x?zCOd z8?n*57{uTZp&jyMQPD5f(fCepT(UR}LgnEb4nF1yIT#GlQUR7;0)jYZ>Wci5*~jBs zi!zCJ|57j%r>+deeoJS6=cH)GA-$NvipVA{;$WB>y9_K{#`Nl#D(B#lq;4>4f1cIC zKFh*88CV3Nhj@262$C`H3U=6+$L%u2H*HC~)m0+>1scp$kB?V`u&m0s`~SnrKvJqn zM>MShuoR|uC#rI1i*XZToNS1%NtFc>;tYD^4xsYni;{!!v4+^B3!u{7k#UK-?iYJ zhg?1}#B{S5`=Y68xy_Pv?TkyBXD;6rw~Nc}n`!qmImPXaE!N==gv`B%I7x079aowu z%|zq!NMP z|HL6fR>v~Z)xROoeazOQ;#(m;Zek&bzTA9uXun zV}``Rqj09f1j8U}6k?wvk{mYY(6KySz-eQ0LfP8A0@UE^xp%Tzcg=~;eOdH>=RB)XJ~A)J;>6;@H#?>uf#YwHtyot9e@@t zIzf#sTvz#ONBcOFrAnKSEvq68*PB28#oZJOnXy@$zNz9+RTjJU>2S7C_XDDBH z9MDm;(2eO_V4Nt#gUSiFumzBYrqpxB+dY=Cb@vL~7TY8lIEtkz{U_!(Vn4YZDATwC z#scD0QTETKCmN`~On{(}rdGv}i8x(IQVQYdnX0x}RrUdg%Z#4QlOZgd@r;)D2s(}& zFQsdHbW(LjaVC`*syt^Z6nUYiBZ)Ntmrg?;9sTvfu1qr-7*o?t4Yjb2r>wIx?iA!B zEMBSl#Q&y&kuTxrdt+D~D9tHTPHjYAC5ns<*(_G8&;FH;sBCdIi*jBQP4}ci-YU`w zn;Dj;iT`w-Fw!wD=t07lMrJNS|L~POUJ;C71l=!&^Q zB%-`NZkE$oV}92W9}7EC#aM!4wbqiXk(S*TF6&xYdL3XE$%ieAUroVE~*Oy+a<- zY3}W7hz>!dR#_jX%V}xXChx=d#+#y?WHfBslO6733UHQsWluaPM0q@RITM^F+)j3D zEoTfLG=o(w#z%PzPN$7Ql(Op-k&2E!?BZ6Ss+YE5R&O&v6SFh66>y;3ok4A(9qd%f z?0$h6YgLKTY+Z(4nnV#EFBbR4S{+#sXpE1I9fci4u5%VoK2xo_pqx?Z&?KrkHR2pI zDtR1K2|EwDymVuRc0&*eC$e0&6L6q1Dx#CPgL2TuSU zacwWy!DGCOHvL+On4)8!nA?XB%WN9Ga@EWXQU_v+AgO6(<1!8sNL3egE4!6r4GoH4 z^}{=rI?~jM)wQ#soDLrA<^`4mGn~A^v(-Uh!tIqB+cyHPc<2!7O(157EeAt7<+>5Y zjW`h$d|73DB&R=Mxzu14>^}~LqzdW0pfyGv4sy22Te@P^BY?!mjlf(Sd87gLqB%w% z1#sfacnFy8H%=DiZ>y@R;sZgaq?eL!i6xH7K9Rvc^&o4Os60B~GEBVjvG^iN>_2kf z@i+j9pH>^!&B5u!>kx>=8=3n_B%XO#q%$@<0pMb7&-H6+blr(CPC^yta0>CA2*R6K z6C0g`b8uOSq5xG*dzTYMy9BT_REYe^{$fH|Tqh@z(k5qX{3}I%%y6kw`H#F~TuMp= z8wm1N(w?!eTxn&-4hD%RMw>V%tFD!DjfGFkJn1S(ze>05OgJhm%gyMNn{2Yb$`xAy z3xWwP!-|qU^&eRXY%Z_}q}}~4&e3=-&~O%nJGjJ7maM-N)^sd2SY% z*We75jpxBB^T}%@3ta#ryI2*M_J6scg~qdlm=qFL8L}>qs|3i1cJ(&65VxhF@i0-J z*jJpZ%``fy_bviDtbs0G;F)qUIL$7{f8LiLc?l#D^vbU0xdQG4YwF@kA?G@IOg(ND zkyZWpyEOCCz~*}~CSDHVIN2HLh(=*TM2tKke5w$UaR}BHR|$|Cu8_p?SK@YV z*kkGFR-@iRoZAHpxnuBH=Cov2he z*u48#&{?hQi~k5YH2PX*u4~POa8}t7(_Eyg#4v%BRh=#|1R<3_47OhbB5lJ*#nW;+ zv^rj)#rE(GS!mKS;oJRhgdw$$T}%VW-3)MAw%#^w60$7P#aS`l7KitK>6k5k3xJrm z9vXYeX>qd(L^DO$$>u;+)p_Y#OFoz&Ma6AkLh7W-#)Y>792MH?%*puR#Jk|sy(7j4 z@6JB1W)m>hxF_>Z=-$z}R-7}-Ng0FA4pFu9Vlb$CFtV*>E=YC>%Met&gka^xulGOSkK{e+`X9WM(IWozl>t@uM6DXOf9MW4++ z9nYL*W5L1NtbK=@MIf+T@^st-CEpATvF&sIlQmI8Ps|qbx7@kd zvO@sX!g0a6l3LA;ho~nKJF`{;lGP0HEz9zlM`^!*RTZrMKGJQyL?^f8vGeI1r|(ZEu_<_+8yLIdMFE&o_CZW0@fjyK)}IZ@d6 z(HTv|+fa^pqUy{N?|?~UgZhnC1^o4>k+Es|7t1=PobP7eq4rdW(}bO$vf8RR={-ZX z)1(sF{63IGR!psrf6MJ1Ax_Suy<`3D_~PIEVNo|FSCbe20K#dTpqbk255bOO0_&XL z1=9-Kt*z;sK7w%eWPwEk*PEhJiD35=>?!-Q7_vc{e_^)PQi*ddF7>rjMUFw8KQnBvIM{%Zx(?6hF9BpI#0_V+Wj%KPUx0B3@^0EIf{ z+l*+y22x9`@*RZ5MmCdE{3)U=>1lM#{}N2|-N}nS63$!VELrJfVZW&Rdl=W2y0%#P zKOl;{n|^M%+)jh`c#5&W4-n3H69#X2)t-skKui+tF!$%(Y*b>76hn74y*#dxGp&q6 z<2@nE7|RreSn|j0Q#Ryj?J-P@^S}xsZYp`8`_S3foni?4#rxw0Io-420JxL7jYfzHT?`MAlDwOmy;wPKv^woWc6 zC)hF5-z3K;)bus~n}w`;#fnN6WdAPKVw^VbetPKLMUaCj<*~!BINf=nqu9dUHob6* zNHTypI%1{Y0G7u%Bs=;Kpv#nwpu45Dk%#BRr8R+mnNo>sL=glUmF;nx070-VqlwY< zKY>mU);4Ghi!U%L`Jac}v@rQp&{3qS*Twi;9O3oNWHrCaLI&MhY3v~29LS~u8b#FJ z7ercIoMvJpe-W@#xJ#e3C)k=?9Lf?zIY6^r9pB6qYs0=An;=?3^8v{R`X`+{Bo;nQ z|8ogwi)Yi3o5eay0!gR#TH{v%r-IIohv$|8J7eWJh9eqo+$+w0Mh-^>a*L(GuFG`A zvguzZh9DEAtdEw-{%){c#dVg4u{5hu?Q4rCMWpSiJYE-a&O7Ga+1SFM`TL4cj#5=J z{`NPpv(QG@K78fu3!ThKSA0KF)XhHjDL#@o~P~h#;{X{-l7z<9rx7#up`~g%O!5 zwB%Y@c#~;9662quEc|XHPeUxm7Hh*<5*=K`iDAOTDJu%E%k6x&W3n_KZ;5ccOrG_^ z;8$XhG^;r}$XC|^rJVs2y8&NYIAUE`N5Q#kYqYPIh0@f6j(;)US|5hYp*lpaH^}}* z;Zt%Lx*?23qqT|@97k;gj*!iDHW^ z*etv`&^h6COM1&PF>9mcjh--#tU*V;V=b5nk1LZoQH`mp9#BL+t92r$7AGZoyAUuwx zU^g2HLxCll8e;`4of)i3w=cm4+Qxxf^RKgl9W4s`P7yAY(k!MBQ$}U}YISiX%0_4Y zva@)CUt*h#&NZS#roAZWQKB6U`rn*D^^MK^(S|iyN&GFmEwshWZb}#PTY8{_MLL7@ zTAIB6Z95o?1#MRZqa8?bjv_}bWFN3Z=phB(so5SME?(kH29o1M`6E`rbk-;C05Ldh z^$x1QohKH7Cgz(z67V-=MVZ9hXh#SaNb&^79>PvlWT}^zJ z^6V$%;^x0+!6av3|J*4HhT}9DC7!r5%wLSUvsWkyA>RY_kE7(8DxGNjPmCr3_r)<~ zIO#NaH8LI=hugJb7v6PVV?5AF=L$e~Y$VLzDi9_gn+aNCwLM+?7qG#0Nd=U1&xSiT zn{N=|s4=&Yr^Pl_!nhDQjpm?9HyW-L>7e*7*|NS@lq0WJsjfT$?4T2K+p)C>d&ue1 z;KVAh^RKO5?HcLOm3hV9`o~c=qqRu!(m{@!l##m1$8wvtQ6&;#Xa{FN?zG@gu!=n= zg9p=y@pr8=N|ZDWSOsBE311W@n%af@#tfo)c~~Q)nF2x z+G4yemb6e6+T++pf1x!tm3hUyA%)t7g3-7pLn;#L0`c!=pc9@LTBjB7(gGzxa43mU zW+BI4jSj+p1zpX`wSf@7wL+-M-WxmYpLrd}!tuBqLb`F3|&*=8r^gEa-CYX2fRG`T;Rc3r4v)>gGkNUMP}Um{o{EA8zu+8FzDR zI1`Z8DGk#zOBlVT#+cZjg~d#!Rj2B^iE=`<3y-BvrUdgvTFN=i9b#Zs7VvcG-aIah z)Th_S4g;Cr7V2?N>?*>EDC&;PQnSIXGL9jcZ!U09=8G$11JNpMIqc6i-r<5471MSd zx1oP>h(A%q(=c(mu>D|FinMcd$fVG|~al%@LGEl0IS2`Szj!*g!+X=cX zw;2_03ZjMtBYODhZy<=!<1=;(U8*y8Dp zOOLk)-EG0n!e@e15X=EHyFC%$jA8qLwds)}oF4o)#(gJ&Jz?V1y|tLCtF3Wz=8c=5 zow3R(VAVi0kybr5b)y0~OKyK&o*q3(*zcSU?Mjv^z)+lxXFxjT)G^)>jF-iz8&Tc; zKLMiX<9&qkGjThW{ar2XG4vnyDD|*-S^$sDon7&Na;Gf}GY4cDtGj3U|2o8brrc@% zsn^(MI6EWgGf34BA}n6+-Nw1+WdEb?M(?)Wxh0~NjhQH%2jRRieq+Z<`v;eZBux!0 z$A6cbw(`;u(R@Bmm4#NshX}xzO^*vxUMHLJoPaaS9!XbxD(ro!(xY+;;MfcJ!vgf{ zK=FeZ!q<#EPP!P7c4DbHz)MA?r7e3SYh03f;nD;Bbz|&zse|AomaRGg$B@?*vCg

1IoaV{Q10NSe7G1NL8jQNvKu?B6QNa1lGrQD2;bQke zSwIue=WD}W6ogZV3MC3=B@ajtksMJXIAo-HY`d0wh1 ze8(ej?sk2!?LD^@=Gp6qn6z+t7UN=ZP9O7b%qkuED1_73A*(kJK9+f?$3{_G{8xkv zS9@qYbXtrR9*1+nHT2R}-w;EEV3(3Uto8|z1EnFi#X?VkZ4QQWA9Xd1)(pgK3f-W; z9275g@H7zzV%f))YPp{YyZ(>siOrt@IRn!$@W@lMBB%uHDDpg`0Mg~UaGdk3|5*RS z!!C^DpYtE+V#IIH<4#M>=R>Ez1mk*z3RArHGLRUt&dCFU(V#jTpu-M`8Vvh z6(z(Gayur*w7z&y*y$_y^;Bk!Z^Jou-s4H`2o&CdvKXuC;uayNrFL3P{37TuXX@6} zqVIx9TMuV=2gvQVnrL$-r!GIe2TA`lY)s5}A14LCQPx1L@Na+v+EoV|`+Wc=R%&Fi zBc2oDU-dxh%=h}p|FZSVX7}+zL|VqE{<7STl*LJENacsr1BVV#`7s{ShNgi&dPKNL zSh{NypJceXl|JBJVTZ~t1!k+B{xl1Nmy(j+`y)~K zQO89kF2+uuL6Aq3k%{6rA;h7@d@S=hz}cvzo^&PF_|FyT0;^_clbh{WQ~VOn2}A9$ z5Vs1a%|dp$dCXLdB$>Qi^7~gHWQBR)SngZglttaNSm--DtEa>#ayo>%9J_4;@t^Nu z9EZ;GW8MD)6UJ07kUsk(Zr6^j$Hc2b-kT`<%iaV7!jeDnkL%6IEr&WM5mhl9ok7vSC1#m&w?zcy6yH0(4{t2H|v;m9aW+jUT6dD zasIFPazS;s&c^)ZPQO8r0Vxi&#)#iR6oP6_%n?YdN~=yZt`p$`@UoO7bf z)KnIFwNFJlE$qZFLGO)^{>=Pg`YV@bG~`tYkq%HB1&Mux;R}pTN@aR&OqSEZNVi@) zH?u^!bhsj_!}SBjIF+m+sa8k-<*ObHP?PIr7cBXa1(eu+p)8nYsgC8NSBwjY1B&=o z&}GgQh+@ny{oUHaHhl`&f711srbY0b=2x5jEX;aEQMsnn#>sLfHJi!PxWxdDr-9M; zi4tUX8@~z?5V_UCj{4~f7Kd^T8$CMemjJmklup!$ZddX4#oYH1p|r;8n03UGV5hHw zJ?CcDTi-2}eNZzw)>;~5AvWvwR9qs$UtriGF)X$7G8x4LyIp#1Wujaf7$xACCR)Te zX1&=OM+*;@a9=)|;*44r&aDKa4v7idZr?I~TmSg$CZ0f!1DDG%+fds?=L%V9+&rTO z(L6pAm*$oB8b_=Ic2+tP^L@vPz(W)B3N38*2(S~)R;(-)zExsc`q)e6okTGf=cZf7 zMXP`;jGlSdBsUm-Ts8ABeiBbAfGAlKem4-0tZNVBrp0S=x-@wxr!P@$-fBHKdc$&h z#tqlU?W|LG718TrkPLTGdSd4d04}NtI?~%;*nNkd%hFy;A^JrUUU$nttg<0)XKx%^ zziSCvj!ec`wfDr;qS6{@J3w>ASO#>KoC+kjwO;v)KMv#ga<7f>M?6}tz9=`TOl-lu zEZ`uEg;|`##;ZeM(!POd&N>?d5nfGwl*{RQG9B};LJEe(R163>v%5;+?FeClWz1&F zK(cAw#NJzrth~Muu=n;H+x*XM3MP4pxs=W9-k<%)lQzfgK>~Y*yu783F&`^#VK0o= zagf|3L+4BJs(@w2`i+sa$ZHl*j;exc6^{+Ie~e`g#FoPVE)xdoM6Wg$HJdR`QvbqYj)C; zM2vtSPzGF{vZWTLxS5L?dem(7k^Dbc)tO)9uy@Yw0I^Bs$;D4{yPNszAvsNL)lm*x z6MU@x#Ji)x&NCB!)_r=4v9JxK^IVwKg1P{A3dTT@XUyC#Dkl+jAnym|cJ&xf#&#GB zOiM`i)YOhh+7{bFTLMYjDaJ4{loJCC<3j20rrf-%X|@M#mj&Be!|Q*7PNZHL#E1-4 zsIWccVE3AA@9|cQID+leJ@7qt05})rQkErS7nCUaE8YO_m~xohj;)w%Lnd>@70Y2QV{BxX=Gh)?!lb{C05+Ly_qJSw znj<$c*Hy+^Q*c@&ZD^h{;n`1&vmx_u&4P|5dApIEwUsE=gh|^zLX_o*aeI=oeRURY zavh>Kt}8L<>1(IxRxy%L19~n?P6ebgE$Gq4MEzvT8i&ictpyvpz#^PpE!-*Or|Ys0 zrBl}_Ge3=S(DclojD8@14QIeOBlTUH&UeK&yFt;uwQW+2YlU10*}Nt05$TMf5;}6r z_)>%ttsQ_Hch5f2dcD{rgCpDR0cinNQgMzDOpBhRF$NamMsdzX8Chd-^}HC0q-K1~ zFMVI#SRU){$v;HMSx;}QFW?ep541hDP5;ycF%B1SS}{(=#QAs;X|-baKe6N(hwKIE z|qcWJ|q+d=`|1Qtc=eoqnWBu>$NmK)l^WL#&e@o*;vE~ouz z*D-$W0y_=UaxH2^=XArkUT`v=8uK1@0Ho^0VYl*&i+fUd*5UP>k1p1i1=6fnnVRGD znJ{ioxjw0E=i@d}j%io!P7u>4Vg~y??F{kZx8jhUHpLe1&jMj%Zl>Bs)hsC2;+o_k ztKCI-0yBIBhf<)y`jrkb=ZnL~qepUj7YiSl`Jfhg5L&s!<*_*4E34x_i6b4l0si$t zxDnsPs(9^SoX!`kjKti)N1`mzelCc_@Ix{$bUDc`>|{|CMz!qGUMb)#jHh0A#6u!1 z7;2ZQLFeHx4w4O$&iF>ynWQwgIs)VZVj!hA()q>WM`oY2^Hg)JE9_Y6*mF z2OH>DfAK^$D#q8PP&D_9>yP3iN6Zs~dma6kFzEo|pc0|&y^g3k1|RM|%IQ_FD#2+? z{iYt9B>_ztWKSP8M;vly9FMEzcD5_EkF(!#U<;dZLwX|djVQe5I31cWC-dIT+gEX% zu;o~HK%erQJ3eD3OJl8*hgD!4Bvp_D2%cI#0m^yYW^_C$avyN5d zrWiSW>WZN!0i4xoNUl}T@@9XPGP_y?AI?V6^bNTkXuX~#{^4Y>Q>uj#GrHYRfg$L| zfyVY|6mVE9q)_oVF8wz#Vs^5C1FK`17554cCZsXmP5rX>#}3u0*?;wgPW$*oe;_tJ17kf#xD75{ZYzs{XQJ{P%NlJLl*W_Uax)P82!fg(WVQ6{F`ou;b@qjbw9# z7=MIP8Iz6nIHTl=R*2Q(SpIxGkwNBM15p%k^_apfODvgOkOh%!R1;6XD3ZkvLcAxp z6}boD3a^*i1G=Q&a7G6_3BqDjL6eN6H50_tFh?9TKLjRx_8 zfD@z3Tgv~ZqS7j(^=OQ`F(Von$ck}|2&V#(v@*r!(ziGZLT?r_bJq}j%T2HjUYj1O z<&TSU@MYSLjFWGMaOOG3;F0EAvXGkTnbmR#f1_JrE%%wMJ4XsSVBXY99(r2sHmJd3 zA2iFo-R+Q0s`Mu3&85uFyEFSXxp~Y2;xSRK%9T^%Z6SZeOoKTJ?QEU9;HVW7s^SDW z(?Y>Mq^4W5@P1RN(bKnw@n2ezR4I{T94?#(5tfX!T$)1vp| z%hKgR^w{(PprcI<#}s4n2eW{ReHg!{O#Us(`C+>adni{u1TolSVd97rWlp*wbQj8 zgQi5WQ$UfwDuh%94~c#kc0y%x6;;gE$6*{j?-KHnCYZN~v@q**KE2EnU?iQ>Ks`gi zU+4<Mj=~D{S2(@bzA_PbT#|p(fxss@NQD ziF-vj4O37JNFbkLmvx6?I<36!Nz`%EiQF!pkK>o4T>D2$vp{5H0WKl)Mx> z^n_^nD|m4^uug7?wf_YsLRN^o7ouLs|BlC^W%QIVTkoUevJ!5f*NA_-ivL8Kvt!BE z9TNQ+C*d0lxp}c2lkDzYB#Nwd7Lp~>zXXw>o*s^I*qHYms+00-VT;@%n zbJy6_t9`Ar#kh)Rn|n0BP2Yxgxi|3wHS1jV!^HVxwgWWY?)wf5X|JXL|NXAr++~Zc z-^cA%h*_edUkr@}KY&F}MP?7pah4Eir$5WbXCDJBG`7FnW5g$5mlL;g>E6md%?Or7 zME#ry3x}C&vMXNy8I-f4jlCSBgf@Ae6`drKm7d%rfyWJ5$*=u6$d#U5CFT%~o{1jt z1-xUI24OKy{tM9}ov&ZM#D`sU5pyI< zf0KPYgE3E=zzctyQEHD0?^ID13D?lDCRg%;tyV0nV7K$T%*%MK6@L&$qz$qiF#kUQ zr=f-}gh;0TV1~JzS{ZS4{U408!R4;jJPGaj5$Z2pUaEk%;!n_|zmsTpl$$!$#Qh`W zn@?ezS2kmQ=3iPox1+w6)4?-=(PSwupAY4>Qb+F~G}z4*60^($vH33<=^Ge2JK5pC zkc3)YAJ@p~P#X%`4&=yN6d7+PPFb=;XHw#6iM@9IPiJA>8 zw=h`sJei5^E95e*==Z=>u7j?vfqAwHM1gv2vvOe=oxIfML8n&(3Q;7G50E zoruQg^xou_Vx1S-63bFgX8s|X4Ci}f`skn~K`xHT%#sgX3g~vuydk;gkr1?YNGnP{!Fi*O=! zuBWT{o39F$PWxpGfl5HsZ?BepiIGXxJnO6u;dHl1fA!-v>>)X(RxdZOCWOnPtg}-` z4o8Ve1Vx{!~D$bhNQBEg-=LpzC`c;I}%rr`yP~UEl zF*)Xh%>UNj5XOZ_Q;<1pHnzpNC26Kh@6B3as7T9xe^j>rE=auNtK%#=iB}5l9U3(j z*(mc`*VY=B2+C`1R1U#unW5Rx9j6KN*T`Y9`^Gq3P@KSWoShu~e6dOP0ix@Q?}VLP z9f)#UxfpwI>OksdM8BMb&&el8^6l}uFkm3nLm^05<+ONKPQJoKXLGzIU@0`CJNt(q#eybryf_TED<3wnlZF0+TV@pI4V$&t z9<>#eQk0C(I|>mT=eW|IcxAXDodWk{e=L}Tp5kC6baiUz*>$c*#W*P)bRAk9UN#QOLQk&x=+W%vxKFG-7L!}P z`FOTOr6X&+B#Popp4C32957gY+v6NTCr5fk6vJENVchh2HkpVsZT(UowOZ|_ zVx7LafxHa!ej&yZW7;eohQiW8oFbn8h}&KD6>mrOL(fZgd2pZX){@YA6<*vshfeu zkPZoAhh5+X8<5@!Oy4YLOnaR0Ee_L2s z_Eov{)WJAc6*Tny>VbnPXM8$Fn2#~w8K22b{9IHiU^A&B{v*QQj-y^IF&*f@Wm}qo zb$W@RchN%bOEIoHtPZqk!GdzD8Cf`0^wlQ|I$IUo2v()JT9lJDMXK7D?h2-doOccC zUGce)|Lv5?x7=>vG+kXhK`H3)3LKN@-q*FFEJ_UAvZH+79vOi~7(?0nA}G(UyjHw> z0-S2Ducn56|1FX<&=GGmDi+ualtwa317cs)igL1~njD?N1jz|yd;C|x#m)wh_G&ug zf_<_NbQv~&C}CFgqoR9X{`ZuW#hy3h$;?}#E%#(oi_vaB7^gRTaXzjQ>EzIX7UP{9 z*eT%H(>I5wHo#eOoW#d5d7#C_Y=v*OD`elW&ua57ed6q?8`a8=xU1wTv0bItKTdAK z6ZNQp@+fb{ZBey$?#~W+K29!C{fTiTY$mj1zM2yAA;q|&6$$~eBTdJ!KOpUeG|NqF zG(Xp#k%RB*@nD2VXR4kx)AxcF7caP~4Q$o{;~YrQwHUKS@P*a_H1)d#ExAgWd4Hl4 zY{@CdTzPGbfi5^VS!U4eq9^xEPZsN_Q4x$yx`EC)#;AG7kcYKI*+*s9tk~%Qpa-#J z?vk3C!NbD2h{2^*KWP9I!2&W#C`RMYYawRJ3s2FYgw13kp)_IzU_I$eQ@uUEG9FH;U*7!R8 z3r*0*SnY8BqSVorh|zL8h>^pG#J+--YI6@qTr-88H}>gOUNuL+5H5=>P6y(s?0=d6 zWi7wH7!vX1`3ys~rN5I`H;YkyboPN1gBby@7vm^d-{fM|W5BA1Gh!b(Ek}+Tbh|eG zJQj-Jlikkcj>|sf9CjdX5GFI}g{Y}>fKI#?zYGE+j)zH`O6NeLAanl-Q0eO071!#n zZ=DG5WR2J1@#ZIGpLP~7&ql@AL6pNkpbum1#du5%X`fOT@1L6aFT<`Db1v+8LLoYK zq0oeHO3}P)+G${O&fvea-=uU=+04f(XTqhO5mRqA)|kjW_ZJeaFWZ#b>MVS^c44(i z=Sj&ZEY{!GGez~)>Tg6^1itKqQoAT*nj9YAcQzpHhI9u*I-0AW17}%qVK5hK2$K=E z{}99PLKcJW8Itf#&V_M{nt`eNV$3NKyb!~@@(dBKcHm7SAi&NHCMdAsTS z%p=yaVu&CflV$E`0q1WV&kM52!l}+>A{{q}9_+8*Cc=s5J~1nX3&56@jGAz$C|Tvi z4z{qSCZ+&1Wv-`+_Qy;Nxt%YpgzCZD6EDvGWZl&h=UoC$1);++)ZN5LvN}w-N5FaJ z#%{cL84w}5Prvc<>{}^J6WVKH9QA~bhKcc~Fk#Vx88WW`I4sTF4-ur^)nH@xN}Mh~ z)}pFu#h7-L{~k9k?!FqQ)96<+<1sN7*o0)oKYK2O!>(hm)5ZS@;e0gp#Yzv_FSpn`W7UTNZn5m5>qY}MqfqlL(J0A~Ta^f=j6}PN zq6C$AZjWCdfpksB9DAHL&%r-=E=j*v!2h-DHJB7)(n<=v8$O!-E8BmY3Ob*Zx$Mlv z$P(4S387lV)+MUgJHU;;mY81(r-MJm*J%Ljnhec}I z35nI703akktFoI=S~*x+7vrBIvYm;+_ak|Q_(_M67-(N!fH0cS)5ynAAxld_M3)cA z9{m)g1D3i~cU(~-${94<;|>v|ij(p7m|yz0Gf~w)=4t-5kZRe-rr(RpL^;T*yz_)* zpc26@VIQy1JcAFXMC%_tZ9=as(jiTvKhUeK=ZYE}wz_DTtk`;=&Af5HAvtrpM3h^8 zbpfMx`FLH7lOPjM`FKMFnMjP6eOvNBmB%vaYFB%nza?x==<+e}LKYsImD)*L>?IiL z4dVw2HnEwYlZu@-rZ7B5A<8YFAX}BQUje%!kLMozVE?-DtI%o~n$7)Pz~8V(REVcu z16wTg6%4~VA^WaG)?pGty)-&cujA7R;F>s_eY|`w%2}ps=EF5aC{KyO#Z84ASB-Lf zjxaqy8-3K(a=TSb;fDXSf({%jWsF2v+Ij=Z67E1>mG8HEGy7)5RwHBYx3X_oDd=Kp zEd34?F{)v4hE=(f*0z*;qG;BEgX z^HfbPZxwW$95{CM5XJHzXB0P8(FM!L`JxD!%A$>>hXoyRxia^Iu(H>`b1;Rd{nB_}fEvblG-_CE@&(AHkev0? z!$iF}yffb{#t8yWUOgAw;yqzUHk;EWmXTj(Auw+6MhjW|+gEU^#Zsq>)xHLiVU3w= zS|6W662QLQ%s0?3W2}|v_QCSs!8qoLg-&|G2Sm7tFtib?eGha> z+0Jc?3Bn|znC#SEE#&Z04e)EkxU?Aad10>*TmO)G%H_HL!JI;jJtZqZ8vIWG2jz4$ zGV90miU|9$;nhO?^dl5CtC?Y==BMm$X2vp}7u7#Qxi%%$wOyx=@v>M=Tv#fTi>#KK*atx`#(9YQTH#mnnV#Jm) z^0NFwxE&>9pKSU!$6ez51ydyLm~Fo>jD^8FRqb)2a4KXb zHmvWT7ekB_nRhR{NalOm0Ofh1utyzDJhV%NW`{`2vnj_@S#j8+5KakG>1z@@Xorg< z-o8{J zan$nmOi^jC^NtmOPFsJW6=nA)M5M<#U97Vkw0>6I{dgt*Aln?3#d<5_woo|k(4f9b zz>uOU9uMt|V}%?l{cQs$^s7LS#HJjlCcL*Tmm#?N;}r71zSVGq*xw_qB1S6(QMHh*$o?^s!aYz_~Df9C!_@u8jo!Nl#ycW(uE_Jg~Zp zUj<25?S$BRJ)9N^Z%$z-QrH=mrTe1hQcsCeV(tcf;FL_$TIiOIz|MtUV4|yCXh;@IT`v|Z zDbamJIcdy%>E=gl3_;e$Pmf#WbfRS8vWXEoze(o3dERAYtwn?*K_yTR6V-2;eZ>H* z*P&w%QTD`rVjIG1l$@k1)N{hakm{BSBOZZrk>?qVV9rI4mo@uh^NIN2$~Gr zC#_g~imkTH!ogfNGrbE$49;lKAM9+_p3OyKsfZ0!niaOPyJl)dtT#OSyt%!LNBJg- zaL=%7Sv)4>Y2e^ABAFm- zx#ynj7c96fq@|0hIpP;~qU&=5Ivi-(4u(Qy{@xu6Zx65#CoubEwJAf{7m9W39LN2W zRd&e2NbG=+aWNKNWqBMRoGtPDgs_sqKjL$v-!Ej1UBd37KR}P!Fr^xMK>R99#;&lG_DQbCF`=Un7&iq%Zs3VUOMc> zn`2D+FLMB98c24!2Dq@i@iuw656ev^XdEuI%gTpb1V z)(%=vC2e%Q-JuabN9oM1u-vhy!=XM*V=mme7tnI9)}C@4 zE&_?siYU;%0ZuR4t2z>DjIrYU1%iY)a9;=)FsB)PasPf`#+ZuwxFDDPg>qqAd{e?4 zVqgzWcu=Z-`zRk*HbA<)j~N;h^B_kzHQA0iK?Idn_qKnK+v)2-A(UsS+v6v3WT2Ci zjkvr3a0xY_eRPGOrIt&~_{6nhiXdXVW$iOeg6<5pQE;n zpXE%;ReJ+^gQLF%&I#ANwh&wFZz8pf)EPU8ASc6kk>I)XPl^Sl2o??7wE0g?KVq)j zB(kV?PM#J>^T7LNjGW(yA(NWL6VD;-Ku1i&pcF3>VIkBqYJ4o{04k+oRfxknV91vi z-97QO&|rhah5<&WsW%ID!IB0Y=fvg$4x*RUBdvVK0WgjqBRu7vSisd3uk6U%JnKW2841_Xs$_?9BA0 zjq`w+X$UMM<2L~}vKfpSYP{?9LpcRVoGm=zG=bdZUwszPWj%aoY?`97`l_0#F-yqB zog5msQFpEq=iqq!vJhJifF0vRCW=zutPn+cH)6f*!`XJD!W54kh}$t&Aj-850=iKo z55dwe6{Ad~=A;wj`qKaG<#C}(=Mgs@%KuK1CYWqMwZ=!{oFsOs2V$|q!NlLhmgm`W zTcoUs+2D_wqcYzd^R!2cuv0}_jc<BlA(Fge@Wv%Z!?s zB_}neU;FP@l{}*VMYr~g2uD#?AEQnH5vDAV?jSecwdj$Qvjr@&7ODSVk^YJ8nbuh7 zMEgXUtQc#Y1R&-KwJ}0YC%BfG`VA)ook-^45c{11LEKhCYZY=yF-2=Z&9N9~i*pFo z%v?S#!EsD2=L?cT?S916Qw^w4#kfBOoPV{%o~QATh#3&xLQujGFAAei+n<4Dg>4rYP!FOa^IV1!C2RE(W^3# z-F>LcQafjgvUsPk!aq*Xk<))+vM9a~L*nVv=mW2|Z#og(hwPjSq-wBeVCQ(FYe1GR z|F&aJ1B3M?Zt$IiSmD~tU!rfSDeCItEEQcd<5foy&S`&1}HVB(?qyf8rb*e6z;HFVH`!WXq$IM6E$yz+hEBPw~w1+=M=Ew;psmC zwKr7dKa$(u(s%XB04yWPuD8QFj=Z)I7+XpV2Zfwk#1?nr2a)G|**hi)6CWc}SKKP# zU|E$6UI2YkESa2I9wYDa*Yq8^1_r7L!WJd_Ysu3u`-*bTn@Lw8P7*O#sgg(Adt=N! zaO9yPBie!tX3ISQ zkq!j2y4gvh5B%goIP#eo8rV~Hzb=^kC@e9U?{^Ov4A=HnD`1cc$+ zp7=_@+X7O?ORlsluvH#`Cx3Yir|m0uDpb~OUBxb0u*-=}J2Lc05>s#Ih;wt8q_+Bz zFoK^_6<^6|L9?ZlDE&>F2W6R5SH+b=mSR<1P5dZGQ0V?Ay~*m2W`ECQraDD%unAC6 zSZv%}@-dEm;B6nnhwBm-IMmhhSuW0n?wr%?&jJ=d6YBTG-lzGvWZEEu7Vo2hP53pQs5O5w6 z%Vz4n-zPDkE6VVY*PvYWC-ubbLW2TH zk}~vl7#Dpto$*eBgS9F#R>qv|oHrmHY&~6Rnx|{>C z{F~W_Tx3nA*=?en0$szVV?0BQDwp;lWBs?X|2aHhIUNIHoJ%y#F_$~yZGS)yH)2?{ z2|2(P?4}UJ{1U@1wZ?F~&51^d!Djjbm#@auGa|)Sa(vL6rQT83jX0$mb^`abU*_0{qg&;*cqbw>u#_13% zW~s9viI<4OH;q#|ffs!O^wg`ibc-?mEn5An9@zd?z;bAyRnplH{S?Nvl6wq=*hX01 zi6eFtaGKeK;A(sKXa094uhYdHp92sAYxy|l3%kpv)yFwRTYSiH?dlwXPL%9GHVn zWPdm6#>DZ$PHA1Co!hN(ju@vjeFa(T*x$sediHn4XFuY$EI3YQr#I$9I8I*tn;pZ1 zDRCaC%f~+j93Dq;UDz^Hcn|ynYnkizD7$HNaN_t(in;OgZvdy8GoIM>_Y!1JW9<{JQR{?*M?hm*uE?f}avFk#mZg%{>kiSbbRyJPPfDO|^j1jP$ zvWHTOals<6PN4cJ=Dk}j3PVk^j>xG(mSY|E6%qxfGsLBt=3))&@K|UuD3Xh?nqm_H zic1>*DvL4cpKKeqL_YnKUJeWL?Qx5+B`rOtV$4_^!jeX>J$WWufqWu5?ZK1x5j6@N zwge>6&StQQPX(ORY1+uefKz-X&Y8|NvxCa-K&E3z!XTw`I{~W zljWCmH*kiYkMl%3jp+EHHZ1HkN?oTX9uVP>bRU~fhAa<*G<5V9TM6*}bZa)oSYhXY zqhq4zjKx;WDB4G|jj^sMg3um&Y#@L@lX*;f7inv8&JN>jJRz8-is^8B-2JzV=uPJS z7p`Oi-?U81>?sjMk=N3H+m#Ji9~EMnfODkyDfH=LT%>4-%bEiEfdOa1G{}<06T(hH zHAVqjDKc$+=wl7AZN0@>P)=}SUy3JB#Skvj_MZ4t$Yun<-SE2n%`r zUkJa?dFSbEe*4#)v-76h=`(lk#K_Gd97BE5ILo+E)_iUeb|8fwCIwOx__FMWhM9}Q zx4;jBUiE}ncT1d>vh=>#-D7;-K%{HTII6^pzsrJT;nK|B1SXiaf=km?+JhV))K?E0V0INeAE@&0-WuVY^e;yZDJ5hkL-lT<3cWBCJVZL8Xd!+2zr3m z(e9UkE4eBb2{K=-qVS5h53{9E|p7=%_c|^6Y6zhxz3=QQ&J13=hOtf1-UZ(PVo$6Du&b}TaSA9Hjhsk7j&L-DhqQz1bzH7dpVJHnBPIqbZjyOZ5$K*arWTb@%HwEE&B5tbCT2v!>l zBJs1;eXJ|s%+J)m*6zao7F`A=B0Am|-mhRkcDemDLl)g5%1g}7WO{hmWqj>7U7 z`G&JFHTX%GUy2%P3mmxc{G=G?5JSdY@n7Lol6uxu=cMgbaJ1ga@o~1CWFyCGVeuyc zOQR*Z=<~V=Qmpr_5$sRZhE&0N_MO(R1v>I%U$7W&h#?R3n^dLWj?4b%s<|%UY$C>e z1#c{N4bD-gxS2>7S9M?fW<1E5qchMvOBXqp8l$YF@dK(+z=(S!~BAdH6-FQ^iFqrRun;FwQ7?%e+6_71h(SPq~nY ziYbSx`Dki@_4HM9%w+Z~np3cN7tbxQBItljGeKXHn7G_# zR`z9UM=OhzO}ik{GGp1svUmS7(>uV)MNf?F!4K&e-~i@exrw^1kZS3jof1^Yaa2(* z4;1l1O7#S2JSX6mlS|h~pNkqAlrnYuSg{w{B`}UF)+Y-(i#2SF55{$eKoD%QJ-c=v zzzx@1C9ID*jTXx{GIhuh(iZ;~L6KCkjN*nyyk9aVtyOyFV~c*U>o<=ejuxUiG)#{{ zIf?A?tJ(29V>ZA^sh!JIKkgS{!Pe_ELR&M>h;!*rDzvu78)ZKz1#!B764i@y@I!Ro zgMU}ZUyP?H;!9zo@9d0UQs<^y$BuJhT|Sk1HK!-~2VqECKaUH?rvjEQ7i_3XygNBB zqnHvTdwsQ{kgaY|(dbVRBpqDaWO97+p&(~ZdZw6u6){vm$@PF_3;w3Vpk2qbD)cqp zMIwhLn60D6v4$ zQmSq*w8m?~PHiP`SaGfFC}WB^>^d5X7;AgtdO4j1a=@F5dZT@_ zV>6P~t;}c8*cTm_g~JK~wLGq59uMV+##P2cWi)zpEOi3^S!7&y^Icr(?E9i!QFw>j z&k3J)BAn~_Og07nC};uJv6^Mzy+Mq#;MXThQGHV8se6DqW>@ShhDIJ6plI0iZ?V+r_R9-sJpXkoIVD zr6HC-C;LIfD$%7mc9t56Eg&nQQ$>i~u@vmhuQ+HZ#z9Pen z&-3EeE5VjcrS42?b^Eb63!|NvG6!SYRS@KYwu@b&y@k?x>W!6eulE1aL?4Ksg#7tr zHsyL^uWKM&tFTCxRJ8e`97bYpqCegjOJV%^)YAKdbzhrK|`*m>0jdeCJW(cHl6x*?oE$oD$n35by>@pvU=E`P49GSXV zN5_qFJHZ?VvUL<6iE)yW>6PYUYhDjWY|WaCydmIn9<{D(xB+&WllTQ}%T+pO1qDEtlzC8cSP6I3X-N zsAC$!PQDq^vS=A#2)Rf&^F&3Er!8)QbgrwDt&eCIgUFbCCb!_IF3o>}a^h#P1Mq;L zqgQFS#um3ikQ;8nw8nsdJvVZMa-X0*S0>uKUy4ZU3X3jj26x{Mg#bnl8-W$b3saO> z=8ETq2%Cr9`{J-W0S>!{rQcVA&LIZ*^4wUrU)joue+&3eo_8$8PwAhsHWnin@Fxv; zT3wteC-tF*?t9VQxE=mZq-5khK=)k#vyUEs5}oFd4s*?WfzD7p69w%stu0D*OmD@O z|JKo&w#g=oB2Rf4$$z~3KP7QM;fO6;IHS1aup%78z8!S!XInoh`c zU+i_iL#HFzA=Z2VU@3Izt>N{Bo%R`OxDNtJXjeyPtoD%IdgWjfx%sS7^_qCD6ZU6S zSOwqa&p_82j{2q6Fhz`$oF`?;Zs7Og)WDOAW`{fsvY2{{ezSAmN1&WI4trYTKw*z7 z)lBA-Y)W1yJnDcrAz=ufx)4N~_h4+Um)p^zuGu2#$I*|$IjwcfI4%?<6--yFo6uPR zrK!+7h1&h)<4|c6kQ!2f_m*Ltu5rBj5`#}>A2Ao?6ao{yPeBoCU(!)+FT@8L4VB<0 zg#FlZW>OlNXg6uZ(^*)aKy*jHsG;`M!3o=gf60hsbcm-!IMvLzbWbGieg?`p?<~;s z==6Mr=Q57n3oa|MZ8J+8(&|KV|9u`eiPm+Sd9m?dGvCQ|P&JaV;+*rz=mo^5!tOKJ zdDM9b0$ch8NDFKlhlk{MIWf)wvnP%MHWxwahBV}~tbe}jsSW)r77`+eYzo^R+VER* z5m?=b>>nk-L2A4n7K_q^U95$1#6{NZSfJR|Ovgyd&0m!$w>& znqLB|k#hU)^_R0C&@7<^pZ^MkC7#Uxk@yj>LZ!WZVvL-^j(>=BWot#>e)rch*ubG- zlb}BtdMkr{O_3Jg6qIfW`Hk0M$atGhKtB_3!D17RnGlWtb8$|(HojxQ8<`*85g`Z` zSZ|A?OY3ZlWB-ZM?T8^r+puYa6z~3wHH&H{YXxoz{woWO-HaR;>H1@NajGil9^_*q zA$3^AzSh_;1*~)W<46IgtWJgqo)C7ip=rv5_SCmBqPa`gw=Nc;kdv~zOvu7uGhLVe zmwp?@A@[-|zqhBr(43DLpJ^{F=cclsf(4z-Q9fSP_aK zZ3UUOJzs#@Rbpw?9{T1&O#UPb zqKfl>DovvpJg^ThC8{<-;;XN1h(Wn67fzXz7aQoZt`wVged*QUSJIEVFqu^KZ~k_@ zW2P#T;Dz}5Q#jY|skJks@-v`wOx5A&>{t>02IG_zCJJA^HR)-ICBASllvKsoF(aZ!*m=@69&(&rq%@ShA6VVJXJ^4>x3edD4IJ=Tg{e^SD+hjZ_!t9ul_9E77H0{uiJN ziH#|=0Whr?<3zfmq?h3|A}_JT*dYPqRd9;uxX!fKSAWpbej7X0;D3x)>R=^ zSQ6y4r~1tK*hHLD)|=zl>sDdDAT9>l6_y4e@3F&U6FHrs(G@XI$kK0O)2lN+5aBE^ z`D5#eAvWeO1L+9SbK+o|mh*}@XT6kbi4~Uxy25ZPR(3&m5<>wL*pvN-++>D>$Nu;w z1?Y?x(6!(n7Y!St?C0fUk|>8=bj*HPp|{c2If zJn;DCF}Pe7K9VFmeQ}K_=XpwUo$Bu8Ge3#33hMu_SAeoOCt$=dwidQnCa@`q{cl9@ zsz{YevcP;($T{ibRdH#SzEuu|=S4ZcSP?JIStF%#b{W?3I@x1_YGGVlil+sf0InkU z#J_~e(`-iMUw(rdStBj_0d4bau#y=%CuXENh~9c-7*dlg@>!Qs<%fxKkZo)_#t31{ zfn7P$lNUh(TH1-^S^*;LWxN=>O6IdJS)Pv;T!zV`IJ|AY5T}p@ke@Eye}NR6<4+;+e9iw1a<73~_KL zB1Wy9g_*n>5_^{kR&Q;^=qwX-N?Z>(NrY>F)Godibd=oGOy-%D>p-b%oE~een}y#~ zK>siLML1K5X|mIV)5gkh#{ZvqpR`35K64W{UAi~ z=lbZ42G;j?TvgR1=t)6Kt5#a#{qd?8&)pdRn1c+UJ;nj!2GGRL;TikoC;k@XcdC

kM9h*Ie1)a4tw zEkm2l;ReFSPz2Yf_3S?cT=IOv8S%25M7HpHEG)#@ zn}Zxd9S2Nk=*0?KK)E2s>4Lz?!Y)?yOyU|r*Lv-}GtuLYfH+GKtxwJU_S_OCt&$jq z%g1XXh?XmLXhFR3JCJ13#eUfGTLH*0uf)VuxgFW`hADB9AYVyNJ@LJeqoIvxJ9xuw z{3+%-W2)SQ$y|!wDFt&dc^TgcI5F(^vrcQ+HVdd$+D&~iPmJTOqPO^qpud=~Z~=Q; zpNgTlyZq=<@0d$d-deJ^6^&5zCcsfdFX)s$1-z1t$ zbO@Dc*RkH}eoQnuAV(NKCsx;We<|XT#zo*~P;#n`mL8I{IsC(IZkpzwu zWx-G;strjKcv!T1K5r7US;E6>qhZPA(BgJgVbYU4br`T3&+b zYKtl{#HA$?yK3_V-I|(oheflaTg17KurWXNDc9_lg~a?@Pf~p-hG=IqL-|H-r;(`+ zo44PJa4u(b6#9FZlP|S9RGMtvr{1xQtE7i>k+7veO5zPc2g#&|5#i_Z|CJot?70X3 zyM)=uPSi^Q9@iXiA)*+k$;zOol4#r*~D9FD5RixYzpTh!0v znleKZ&Cv>X9*3WF{y;rS4hwD)=kJo|W=iph7!rd56<0~d8w~U@u+&WeIOV!Wq$OIn zC`7gJx;RTtheX9ImOA1lF(jLtgUK@R!SeSkACqP~W+GmOT0$Y-6tni}>CX(JyO4p@m6LLti3Ye(p0=<~D9sA0vy4-il{?f<)-Y@%W$@LByvR-2U zEIfLFZW_UHe$MOflb2E@HeX3PEA3C4O@N?ldp6Q#+V3S7cffIYW3s&^4ho z(SNQ#5CWl3X^K1KOnWL>Wr$-Bf^a4$atOfQ%JpKh&WZ}aWSwWC56=80_r952x>#S-oOVc}vwHkt*=HI+U(Y}7+%US z>Z&7SpMJ2z*HV*DxL7HU(2@ztxdIk-`GsQAc#U{x6ytu$#=>=?kZLm;rz;HLw!j-O ze6g~ilgv|pbfD2tW;$&&;At_I%Itv{I~(NiXBPaZ>Zmzcn3I`>j1i<26kB+=K{JNK z=0Z5H99op(ph0_JYEon$vm<^K;Z*bv%$r9^tUeEhtfCMyH r;565^6l2rFfKJa2 zBV)Lb+bj>p%#+r2drTJTL`tt#y8_1_o>5%MPNv#R908@;#)DsL%k4CdA0Ukf9tpPm ziL)m$vva&S`=Q_AK4BTp>p*eg&*g8dQ$U^gR-DsLj}R*#1$3!tlFBBV>JFD3#k=QV z_@r@fFT!!5rq&w+!i1v-87bw91(5-R5tF-Tg~&+P!Uge$+|<#j^)d5koDQ^#6aLP4 zrc5v=L@3)FgO8!)XfCX+4qDq{@y)?^6&_z0#nzO`@!o)sq zTr6=sP8Vv4Nd=zU#n|)AA_sqe5>87gHOlv#DDw6A)oPC~p8z5QeiDhb+t(*SILu_r z1ue;w{efN+?TP(_hyi;xvY8u8o|5@bZkF&6w;0Q$p(sh7dTK_n1&z+rxguO*lL+$L z(|}Giry_%~(&^dXn^%m8R|TDiYFSrh?`-umppe1%fmkG`<;eOk-_^lWc-x$racC{+ zeI+zg#W@tTH#+jn2KPM+$_Yi&ot5B4=RhEu4z7mx#Z^N7n?)V5#<>uViU+t^06^?#|=C zB3(Rf>?bjw{?+ecki`yL$9i%)0nO^tl3Bnp>Hm!r;tV;{`l4wX21UhC67-}pt@mT< zcZcCI{yhG1x~EYOrNEdI$z)8! zzVUjOX8st2Vq-yOg#}!n^e4X;c1}4+tiKH8xN|Z(7Asy3VZk-B zR-~Oz65|43pGr@E#_^(D0jufKs5-3X%O6Vl8@be6k@;e7&{L8{%a6n%t0ISxvEh{f z=W}10-@}5GN9EL}c=RgV>4b+EP%~3sRk|8B?W>ZlPm;I9HE_-W_Y|-!J6Qx7l)*w2 zxr7`C*MH+OLD$T*-zGcvicG5*mzbrG^Svl!-ZU;oUyIWbG@@C;&Z!7{r#U1V`X~Gm zidZ<2?`QdTr64(BZ#P~~-P=`EU}y!N+|QAg!)yUdx=HFh%#-Ik6b8%ML997n4|H0G zOypwRQY6JWbYgY2@Nz3Gd06~aF4c|XX~|boe}vUsKxyZj^xxJFU8z{=2K+j1Rs$@R z;yf`HEel5uEZMSL_C`3%ZWIS}!vyVxEwnsFgrjj`NTkJS8R4O zz!SpS^1Wet!gl?EFBCre;*SEZme>@zLkIz~UWq54?FY*3zw79hRY5Pm6UyR9r=#lfSG@~Xm0lfng@A=xjoiD_|JWkzk5d=$AE8VC zb@)}3>WC${~GthAd{M^_~QfqlsB!E;vWS`u?)3s`5D-CvhEt|xOW0QZ+i z@#i|2Oo%Y3#ZxRI$$KZ_?_P)-VRIjhb)$fN7kXwD2a|(>iH|{&0a7PpLT9It0%@W% zxL?o}Cs`HhZc!;d6>HJe=pyTk$H5j)9ep4Yp?37)B}m2ImetQGY^wwq`DN(i?I;7Qk-0N+yAM|llFOe{;ORKa?pAF zpX7E5$Fp!{A58>;p%TuG6`uw;xMVhq;MNeu|H+Z~rUGdvRFE}AWKj7`_6?6GaAwKw zS()PKNdsM6^{j(o+SMORJqK_i8k6q%S0Y>>l`N>~A?c*f5ozJ}OI7q?VMpFesLA`~ zD?Ok2k+MiUF6@Rg4mE(a|C)V*a%ys*6-9BDa&J3V&ah1>2KiK?o%V&?o&!jM+Am0v z{%v1?cfb{xxI9zP-5GoFOE1b0FV4RuWUtuljn!TRIzG-IsI4&|hL3H$@p>Tyj8YXl zECQBHX+Y)G{Wl1gNV3~Rausvj~+v*JhD|5Y9R$*Z*Mh)U(cz5e6{nDfLD zU0vnWxIu0bDFrhQ+&%_5n;1jZX3N86MwW?~tE}@$=BE)O@AN#={|!UwF@7X>y1G+L zXIH6RuET{LYH|dEx}q3mvLTlI6sH?m3x%@2pe5;PqYAgV>}RWrouWg~a_KKh|KwN^ z2toG#Vx`XjLk$hR3>wkrqS6m!0Vpo|9Kx{_<`z)tR(ahk(xS3?Vs3$aBhG;(*UQN5 zGG9QYxj{#n4-XRKI>V`mwvv>7m@Z6N>QSr%<#w*JYt*sJSCGh8*I^mmj}>(4lasri z_`MjS;Pk_D%4-%wIEq>x#*E*54NjX*a#OM=TFO)_!zDS4Uqm@zOcYC=#cv!i)u0+{ z1iT&m9|t@>wU4^Ius_LjTb|lBQ;egUSnA6$GR3Q9%FUX2!~c9o(ed4R-}0S1dsa?p z`N(`Cx&2DK#mAW%g}3f^5Ec=m8HZCjE(mTf(y?}Ow5#UXFUm5&AXzS+7Ir{1MHWN< zE_*-~b1+W%9uHK(q|w|h;EcI?#lR04!zsHCs*V-o5Ua5~mXEW_1bU+QcvFPaz@A?& zCj1C?GP%ouN(Ap+iK6r?url60yn22oAMo z8^SKd3nDEYtznp4?Y$%nvGD{pvOiDA|F;&Bo&0yjIQ~h}i`ZtV3^&rB#=gR74W;2| z@9*xVp&WCLWlKAwP)mF*(o(46f;bQBEd$}Gn%QWgCM^(yMCYLgy7scT$!2nA|8)U> zOXcFg^am0Cw!>TZ7yJsw$-pXDqDHmUuc4gsMrjaiE$lYVePEwC6_uO|e^SQ82zU(4+| zu#R!*pEMm`eg#;k74v^s5H8~e)uClpJLYZSqm+%MUdQ{h1ayS zYh;Z%WiJ>2<37?asTVqjh62+~-op8fKZ2=4w~ZCsTrp{fh$dTyw193Dhg^Kh_pwlt zLsa&EyHXZFqilQd+S|(ZY%7dE30Y(E?Cz__0{a zf2Q_^S6K~AiM26lI#+I|otOENJ4v^$4&{__DNkE_4~ua*G)jpe>ehfjJm}Amz%>DB zUI)}DC+Q0(TaVMn1OW{M;Yy*!4;dT?`)#haR~)1W;Knk`OlO|PnOts2eZfm$9L1ib{>x*?`;MkTX^SlV=wA=3#Yi@&6m7Kn> z6uStKT4wh$6?RejCpQb}WOvz?Kb^Z8mUMa*`yNpg0ctn#SGg^6ZhFQ3!+@5W4HZa@ z;sKHPXL%Hlr|x!isa})YDXcDPWB!2cAdp0d7FAaWIHy^`y(o?^I96v%iT>fkl%Up* zON_Di4aWzWq^VVHkJBl~Y9jWg`eL^c{yw>?gwhF50%d;CfJw^uU!t6&wn8!96?73u zcaKhGtqK@YsV9A7vOpq>-Z)Zj=Lf5nj1NbNa8Tp0B7UT{1tIfy&2XVKSJU=Wn-b_5q3}8anfF zi8xoKt^rIqQSkTd21QZP;H6l*%kG&ETO}yPiK0}SwGnbo`Tyi`2?A}|ga0in-IpoF zt$Suf9d)Y^--{rGCJZXaW|aVEm9r-8bX_FIsiK2StcJfL3OTlM>8CBWtMd2L&{c@D z1RNc+R=S$^MGVOjozKbDnTI5av8X2VFhTDI466lOl%9+aTj)6ej_l!Fq+j zt*2K+S#aEXYK|o)XF;g`7NSXnzrh0~*%yBW#ScaETRdp<1&wnmN-h@t-WvfLh@hIiXUI5D_ zF=_{TlI|4&=cF6!9n9O}^}V1hPVQqC;{SwQ$TE_}05_rm#seIcj8oy{XsK4LQZ4P# zXv{v!7g}}W;(RfViC!B$_fkAtrZ~To=3uPa1lYon!w}_)b&Gm!Ar*WeIRj*>Zk+H`N+*#w|&v!+TINAvhEzbm`Ng>bDYF{kM#ehAYLSl{Qq`fn5 z?G&kGcZn!ugMJIus5vV4$-Jn+tJj#auf0%0EDu`Z7-46kntrNcKOhN_W@wDqAK+wG z4&*u9oG-#zOok*Dzhy5C$??S22jIn}+{#`RZGz+xloB|L2*kQj9tX{|K3fQ{v2naZ@vUlkKTr z=RiZV9<(8>LHe{<=f0Ks!MJ81xu-AUJ(v{mW?pC9CO{6@bn)@<8Z8h*t(r=Awy?wJ zgG8~M0cW?o1EfuN#B~CWtclU|c0miXhRrNyJCBJW^?h8qU#@`L;bW1Bm&%1zLu3)- zMgctb_728(t+*}QW*KmfVQtx$EwWeBFYI_|KP@a)Z)?v!nb6UNx=B+u5uK9G|)D>o-z5On&xXeKCwd@hFaWHH(iTlWK!taitlaywnsT?4b?FT!c@ z<$ASli2)clF|G31p{KT9E|TbJ*E4Hl((Eh{M4pd*go!{#xxP=`Z7hkFos)evdQ8lj zYhRtLeeWIgKi0BYoFcYn=;il6l>bOOO_!ZR0ZZGpAQzvODc*SGxU@euKMY@<*vZ6v zawCOQH;PU(S4hnK&nq*`X}KR3w~9&oKD0rSQ@bz3S-j)x8{?3}fn<+oJY$~RE;LF% zUyR2^xMfa6t+dpi3Q5&Kvw9yr5ycY9KI&Jxwun= zEjnmqO^qI**wl_OE&d{1m>Q4IhC5FFQ4e%uG835Pw%Jx_wlopTn z@;Ng)6UM=-tC4n0hbZT{3X`QVSjOxy%#PE};(x+tjmS&IX9Fw{bZ0pH?mGt}m8{Mj zdSZbXS15EonUt_?!IkB6VM!R(hGkQY5MeHy&jjyC0TNJ&)lH6&<7HvT!9f9fY1^M? z4>aD|xmPUcPM1$goKMGFB9ju+Y;V?; zAeU-&qRB05rSY;@f5;vfmpU}fntBzaV`k-5h`WV}35DO7bq#Kdnt?`2^hb%YZy7g< zi-n!PZXb&!9n$94LL(^h(igJ@5VB@jl;?BT0i8B3#A)>>iQGIN*20rHHSKD~bK+b& zGufYiM=3G@EkH`3h=J8}=RO4Op)7(9>*p?o1k;9SCM#mLxfr$%2OTji_lQjW4fE%=h*(PEC_7F$C?WqghrcA znSWXSZV005WcQ;&ZilZb!y!!iXRma{aO=xoedD92% zIeB^dWC2%6RD~9w*(XeXnFv-t z4B=w6idBi{yP%vt?zP2DWmw15bMr^|*MeftTIaCZIw**BOjv@Ft&N$FIutg9_592+ zf-XK+3- z2i#3h28%~Txo;S+?Hg9F@w><2h^um9)X6!Nh3uu1+r%Q-W;AP_l-pr8Fg;e!ao!V9 zmdbd>iOU4>-OAecak-tMeTqD|{deJ^K3rER*nLAavEoT+i-5h6jY`-pNHv|5c>!md?Ql5>ew{{rC>VsAFb zZL=CNmJ&7qDHSGVFN<@BkXblS^zgQQhCkBmGtc~1(BINrbbYNqR(=+aQo~>YonwWN z6V7u~$qKy`cRdH`RP{5xpl=s}7-#du*@1t}{+yI+DK^K+A{h*8e{urZ(Tw#AF(jdKYE7*3B5tP-4H4#J zhn5NE`&8>|MJU0nSfBkHZi20u7XOjc;*lQxPlB!ibPFt}{1(aTFZts>rm$Pf?XpHS zhY14jcZwnqHUzZL7$ZoglBarK5^(ueX-S@7f38K8jvP;X881%!9G1SNxL1S)K^dWP z5=*@TLyE^uizYcKElh3GW#xs)Rv-6cru`ka!>Q-(yskK1gyZg&t&}^2T?R}G*u6+L ziXSQann;$T>Q#J^{2m?hA0Xi5P8;aY7imF9igE=>m9rpE73rY$+$5(wta##du}%>> z->m4ac@2WFxnI~4FA0!#YD28@I&LQwDLlw+;6 z@*Ky*ap)Vc6h{j-y2--N{{Rvv2YWll$bV*Ea4S!vA!apV>wnn`Og_e>fOp~^idq4u zOXor{LD)r;X#DaXlo+Q}yBwOsE&mpj>pufCOAyL*{M&Hp{3$U+<2|O6MLYa@G*09E zcfe^gDe|TUTjcDbZt*T`Dh+z&S@fRuqNoN;f2ZZ+5Fy9dq`L?7?{U632i>YY-xJ=; zz8E_!+TYLqoueUm_6OiJ8+v#IL9F>96y?*)VE3Wi&Nqj#QsUX*BL~3v#gkP1@!J%W zAU#?YV}+e0Pl)JA4-%J*54z~{w2;F@mqDkfFP9lk^i^d)5aSxu*ee?sUyGn6Os7Vq4 zy%+TeTE9Rlfv!YXXE6O+>L77O3R=4e`g#G^jM)R6eWP2Bd>#_(Tqibo5bJwok8F7* zFP75^R{Ro=WOWV|O08mLVRw8@By#hw{5P^})^`7;!j^kJ*H84*jbdzw2tvI#+UJ|+`M^lkTB`wEw=bv?x7li zf#$B*;6E^As#jOW(5(=3UzgKcLreoX8vYj2k#bK$H)3M*@1Pthi}SebvY7tN{K z%YSpk*%hyrF;`+M;x%D+4jj1adRLOswZDgUA=C^=J7eV^AjtVvTgU2hT4Y#aMyyG7 zSW_$+(ju5`H+IAK6GQEz%qiyTA3@F!0;MYGk-P;Wop?+ow#7nWB!u0Qcv5cXb>D%` zfgaS&Ulv6onx@ANKjCyDqydbEi;e+L6`P*<=zN8V26y#2BY#YPSlqQTdBqD{|INJO zOLsKWtRJR7T=v>u>F7(GWED({V6}?0zM|-XaoNaw!`XZl zXy-%^j3Op~aY5>LzNsHLt&wI!L)igeP|C8PJ&6@L}y#ywT5syJ>n2&aKQMC zsp6@P9|RqjY+{qZs&t7FFeIxw0Tv}=z1fe?*IuhT!dF77luG`dP`E+>?8=y;jP4#w;9=;YTk#9X;P z&~eP-5Rn9w>5RSeM8!w{BJ3}vbg`fH@h-5WRju{)_ zwA|2vA_t#{AORdS&52nwc@#ZM+nzurVca5f=FU3$MW%e0f`O^_20?uT#rY$E_Mc{;~#b>hbFO;MDsaH z?3e=(@F&g8wfEfxXhE^a=!%1d`5Wm=LE|Sumo>vVHsTovw%Qer;Al1ZSZX(b1*RQQ zTcX-WBtCQ!5&Ncqy#+Q{1swYXj+6%kiJkW@lSR_byMvKKV#!ex`rA^14y(I3NP?E# z6CZAXtOj%4+7T;L>I(bVCfceJU&yFpg*HQb=bATyJ4XLokoC!-p0ML+D{whNHZ6V>=a00P#@28x z1kG}KRg9L?f~@31CxbGD)hLo`&@!mUC+o+7oT3@5dfyauXxuBO&wfXQbIQ(Ovh*sB zcK|d=v{N3L0CG*mDxzxY>Jwqy&!q;eJgb>LDGN%P^;~U;`pFK8qL^A4R|q*MHceU> zZAA3##xRGKnZ0PcOi&%eDH-KG??)a<@#!{1tUVJU=dOy7@dZzl{^jbl@ zE$kd4KKjCnX%LpkR-?9#T0zUavY(Dgud^Id=ISt@!rUPjec~KzN%yE0l!M{HqnJ^j zg<8cN;5ot6w|e)!y}%Z8wNI7eC{ZfMnmA8RC#jC!{Z>If?qFdn)8>VS>|+e-uzOpI zTbiKARdUBi5j-y1<>g>eLz(XGkHhu`lP%0oXt(VaLH}JRP2=J| z;IzN0kCTPcUX;a+%>HuRUM3qjUX%fhQY;qfaO$bDar(Ys*N`Sz#K1W4elSDRZmt;7 zG3laFLZ^#1H28Y8h}HJmVtFT>Z3S-k^vCu41MQ8W3(I|}k%PO%yC$(*V4UzO zJ&ZX)m^A5JI2uzx4}&Ro0c97Hi4V$6V%ZL6#d~)vD3wk6jMIpAD2uq6osDL}R2)O5 z)C=OA?wPWm#GzkLEZLC-Rn0y#_WoiyQHU)Oj1-*KEc@xg*10q#>Sy66Em5{o&lM!8 zQVu>}ZZgD1?&w&uGxOI)6OPDP%y5_J#-HmC9X68RMua8bplV-=BYGTeVtDFpfwaD%3DOh$6d_zLMcpho zO4!w(NNs~kJ+%@ICdqe#R2Mb{k>Lg<5T!GZ zq4`O(rK4X$a#^`Z%fYpQH?6ynN3F!g|!ndil7^8nkWBbuo;U=_$kQ+_E*G+xX69H| z+#swrQ|OFu<#upYi5AGYM?yGJ$vGOO_>?FTEiLY-Kgs}Ix30KKz;QDZ7}V{N__H_% zQ16EkGi% zaox!vC!v}Z%bh}giQN1h|U|>hk8Kr)9#2ICb_*&uy zVT;ojhp0rKi6a$>?aMgsG!R9H(pr2Vw`!^EY(089z(I2rurpo|=1)xo<9h+8y+Pf^ z895>giwDX+Fu{j_SbLv|4@V%IjHsW1-IQdxL;3XphW~yydh5W@iCTSY73qu7&llv!;X}cyT^b~W?g(2kG zoEV%cx3fNF8m}ilE$pIBj!6?miM`Itf@o#IMb7pXy0Fa5VLO5;!5w0p3M}lg$ISUHngJJOUv|)6F&cg@ zhO*V2;&}LC11wW^#ZH$1TsmZvLm3w?#4^u$%*dMcE)o; ze6r{$4qrbNK*C8)qeXm?{;492QJ3?lMN*qndw%5#2t-D&KCtlm`1wkpbChau>#U>q zDp;p-D)v>hoPAZ4ONj*-&*w5LS}e{2?c(h{D)ld7NT%+pVB_X$5aPv(Nt`aX6T)N0 zoC%z8jR~f^EcX%PSt8Q5C5^Oigb9l}+B6tTT?=xOa!loS6@cD@dG8VPeq8htz?%2lksy(XR&MDiS* zcEutArx;5=UC}t-e_~Xx7=ILS8Z;cSl5i5RbDVl(1EnKYydH0sOAC7F?NKknX=rES zgiZdqs!Yvb_g-s@xi{d;<%a&xQ0`{k2uoge8nH#R-GtkTo1&aQA?%FkUA4cJ|DIXs z>x`9e=I>N6NS`RiiE)WzJCkirs^L-M80e==iS=*6i6lBDrB?-rY@ij*a~j@j>7TMo z9EbmjKhrv$J%>tVIqOz%F6lP>CN{)F4IZ}jMm zT5XdkWT2M?Ke-(@F>t6P(>|Nu0dyXD35KpPwh-eunUN>0zU$7+6W4I{An5U8?1>i_ zIH_RI`YtGUewxJCM)*YwAT4n$jqsZD-I=FkyH7Ljs1)g1UE3RH3R%Y7=u|QCc!)TH z;dH*Ju5YohGBme8HoeFH(I%1|)iJ^rVYL+h1Su1yK)vw?0lphGW{0T1H}ls>g?dZy ze;y36wGzERebs#qp{0m^@9y^loVuB4=jl<%=oOc?V`iZLAJMGq_i@<+_^^~U+Rz-& zdJrZpS(N-$O3#)lMmwE<#;+d2m#Z{JMbcBMSo>jE3pIISa9A00g1J-Vg~_Gf_B};z z$E;C})UNqR_BF;@N5sv-E;lV_`WD_vo{xGo`;3uHGnUiEAl6jr^LataOtv{O{;&|_ z6tP2}k57f2?y?p{Z2wrs%2sf4F##1`v6kM*5hFMnT;}nN&^i`Fz-6L2c>;X62n(+( zN86%%ZV=^sR`dP~$54%8T&t>Q#3@29H&r||K=bk36L3@wY=n2mg--&U3{FXq{bM35 z7nU6zvv`3p7hj50ZK7hvHBSMY>wUF++UsdBF`|P;r?ui=Kn}3BZ4d(uTZwSRoH}kw z99V`GBZoy%(9v;&tIIZxE-sTZuz-*ZuCZ7o9nQl0W7IQQ96iZgO`tNIv2+VGAyKol^bg;PY?}v=z-Z`hyq5AQxuH zJfkh(AT>>ojs9vZ8~Uo~e@HPccB(9u_rJjZPEix{wO9mnjOpHKUrZB6_=}ESbW98g zAb{%0ak88ivW8o3)4vEY)C<{Q7T4-U(jc^@)bcWJht4tvolFvR>?>I)v)M1`h{Yo8 zNr#?Vc1Go^4y2K5v1iClNmCu!D}7eTzS*xv|B!L-6>%2Pc&a)gdNcig;hj{4*ZsYf z*n5QBTFOcsG5if!XDzwv8rzGoHy(`|y3%*9SR}*v&>OD`IM&LZL75F%z@PaiFhz z?7J{dBdYeS(uAoKmHWmbIVl`&`En%E{65HqgDyp)nj6b}V9!l8@mo0^GWSgMqRd<| zRF+=ecm9jq7FRb)y*kUS{}9R%PteBKEMX#UWSi+DoTR_KzZ=8%0uGo{UrmMX6GOV5 zx>$D{z7(QhxSpXZIRE3!dllN>7uknA7?W7_ z9Pw55A@?}D5^DpogGh^|O7&z%;dHrKD32W$fro^)SdrR&*6qOF~IQKYUsf^*^N{XT#V$v2}%rBge zNN*EG4rc1f+~)-xFZ~!x)s>cpka% zn`Ba5O6m$n3nMrtLp^b%0ClUDM^fZWYauhMv(wKgxqj_#uC|gtE3u~YVi%2V!KKAow$hN{S$AeBGemmH#!9Rc8>D)CxD@lmJJ6iZR##aK z#`&j?#KwZoqBNi}P*WzFI2Pl@k=5~mkVmssT`PB5uZz5JOK*LED2F$m{gO)sow~#b zR$@Z&s`6*HjMafYSN2&Q+ZwHF7;;*oeXOTam`*Cc7j!Pr7}JQSV6pq%R3Ds@ca7tEu?lAv-P|hz zjBC`#@s-iAp8ukTU=n}*>}ze{F)PtZzCocgI{YC!`)o5f2r{tAY-#QK9`J?D)|NCfjRo2;r)jOoue2 zs?(e&o@lYxNz4C9h+tSqCPVWkzxBVwvcu@uT*!fHjn6~%A{=WIeRpgjjLaB4i*c-g zWzWTyV4!l6fLF~LXTR$ zE{44IYhJeJW&kSOHrs3yKcwzX_R`kf0zU0w(d*ZySt*)CI(F3E>5<|rF{G}$*vv`c zW?O;?SvxN=U%-7AHYPhz&P+PpA4OZ3vUa0s#jk(o&oCy=!+AqsFy9w12)LX$E$0yY z6A_m5Jk~S4TV?-O6?>R(Jg_wc<;Vd_auH{>-4>iq>o`eB zSNC(prX3)PJ|5VX7zRmWuNfC1CnDpHN^B}OIUX^b&4gV9Tq#EE7{{c4A&gv{C*X8f zj2gz{oQ8cYcB#5w2s*}&o>^&(t8E8KG>9%Xk(+4dB*USACD_uAogVgFw%s1e`!{U$ zv}$FiH!2wfj@3VwV3YLc;zJScnR=vuAR~T@#Zj*L!a%=tB6{Mw5im|s*TCHL#?{D* zEG&+cSo=*9L9)~%#ohwWVm4Yz5f<>% z38+;%J;fajm~U{aOMvq1B!S!S-A!#|6T~&A7{O_-y(w z{n&)2x+yVvEdM$!OyOve2a2$$nWl7Us*ch8*m7r(<*UO~nzmIV4;AlZG`1Au z;az~Km{OzUTlcYjO9p-09d`%1$?@iJF7_8j3Z>3m z%-GZJc6Qnik=yy;iXPLDf0v13FI!e9zv083Pdg8EF>>4@#$mHjfhlrf!k)&`|NXM> zHg%Le@u?UmtYU{@v0b&H>N`v7CP9Z)CvBTq!p^=-tg&*V3f@wa1%VZx*sT`mG-J}9 zf#S$AQN<*wFOD0R`CuSo@AgI!B!Z0uy*u-!ASHw8SxzT+7!Ps|=XhN>UQF6qqMOl0 z<=ekZNlU1gQ0@`soQ)c>MLZ%%pt|7P8?Otwj4-jG8!n|ddO{W~!!l9jVo{l*H8bz- zkAI0uv&UsaibMp}yq+s86LC69v=pLB&^2uHF>z7(&vA(98$k~Q_0j-ff~9SZ6(&JD zwdkH#> zoNOS_d7LTA@l4ip_VJ|%%cxG)TC|7rl{i<3QW0I`E$hI}F=jgAX~C=&c|oB%xyMv{ zlVK2w=0HJ*!kI%U4o?3jaon?vm8aQva{IX_P8C!?G9@O~<8+dEzg@;W`{Oim7Nmyb zq~g*FPZ#YtCaAxgFH9(VY!YiV;3UzFD$(vZPS8HbGpmg!gc_nGUuCAZNIe*g zTN?3W`ElgJj4U5_i=v$sc`p0q)GZ@UJ2k-*n~u-viH;Zamsl7});E`l;>$K#>y+uZ z9eFk9W1NqiA%=e^a+-9%-2S*xZ&H!{HD*A$*f>DrxNW=$%ZP!(Hw{~6!a0lF&*+WU z%D6j+&csoBXW=$;q@l&=)P10w5cFSl)A@ceZv2?{Dh)D8>5lhAQpae~Makr-@w zcIm!p%-$Em5jXPaQ9Lc|#MMZ!Ty)CJ?>+~TtSE3Dz&nn9M~CQ%E9K-nIi0=Mpspq zZUHk&V%shk#|v26nB-uao+W;~DiTpK^^EsZx2(!sorfoxx-L!KUm)m=r?cokryewc zCLeznB&J?AKVzo?z{!wOQ|v9QQC~IV&{hC3ZZ&%AI7)73ptDDB^t~a%GexZjll=oO zX|=&xW_hyxPeFo{A^upk9pJbc&SXi?d z%SQh2MyGe9NP?Cch)VzXSzxCYJ5MyEOGWTi`V@pG-ph1Ck^pVU7GfnKSHBt_rQx1# zOczD4= zTq2wXL0d|mA$w7rgGlBtRI^XZR6Bi)dhojs!B?6|n+hdEZ;7;sTRYg6ipsun_#A~N zJ69))a~e-3=%_49x(K(CJ(P|K9;ZfGa>Ls1rLfhjYKv_t(_ zYIi}4j6+0`Vb0WZ@v4A7V9SztMjr-2+KGfZvbT_n6@{+uSoUzR^P&w(TeP1g(qSYM zkvO?bAaM@oh97|si-_YCt$QbxnHenONY?RU+`mt>sR_&fbMclGRuOa*j&_!~wC=FC z!uWV31j1@xcqfLWVk<$l4o(%nO5NL4jESnF;GGU))$Q%uQifx9N?dl(Ha;3ZY4c>L zjpk#FRn|G|Q1%`RMts=P<7La^0B+h1nC^a4&}n7sg~=2m{6U--d)ilKIi!aNi2C&7 z`D3WDvAU8S?RZh7Lr7+#s`NjJb0kg7WR^PtNCGqqij4%2L^CftCIjgfA}mJC$`tjc zO;VlS7Vm(olcqrB_~;}!2Rw!GW%S90J#442O5IJ+!fM6#Fh^=%h)KoH{F_I)PJuwq zT^N`eDL2VVW|hYYxb^C^B2B|pVl7%uNOEzPu;b^w*f`--AfZ(Z9~sxloz|%qy5zX^ zGzc&ASxqEwTcS~{inIRd4h8iJbpaa)x}oq`Xi)>|U1iE9fOGM0QN-1$jf^uKKw`|c zKMoQkBf450{Q~%73o0H=-8-RbvDBIH7LM;8#YSQ*;Bly!f5pa_zSN*{0J z;&#B&d|=e4EiKL@^l9oy`{N8DWHi7b&eN&8gBo`3AK($q zv}y5`oK7w?aJ{PbjTl$xCZ--eg?7DxUVj0sMWDSWnlO)Li%WIg*zBQW9y+=>{6hY5 zMwqC^nSw67$^rW4xKV^d!+4=A=ak|JagLW7L@&WZ5nb_YIV3h8;=S^RO}yirj7lGi zBR3P8Vy}yE60ht|P`9p4A@f-++#eNmGFU2cZ1cJZmke8OSgZDO^O}n@e+?Wb7h>`y z5bB4dCAp7))5&0*nn8-2#afQ6;^xtFKP84rDRVr1><=f~*MAdf!AxdH@Bai z*==GSeFG}}4+~OTwAkU4_HvN3&|MhN@?y^`U@S27mUbE*bs}6N$4lLOe__YMMfskE z*KrA*!+r^v0I=QIb#f=Gt zI-~3rJO0dnuet`WSxb{1>d#`FyyU=cmuta9wD62LL{9(J)W>ewJHr0BNwy5x_Yh;* z)T8WHmI@g>S(Ix8st!G!r8ZgxJ=?FYhew(yI&gzvTOsF!vgf{F zoRt1gwXd&@Gs^!ru*b#r#J6Hd7Wpd0GB+Bi0mLc-PDEu74?^&0_3EP1>bLNE9Um5A z_)Ty~Lh72yQ2e`_A)L25%r`tGXi4!z1U>(cBAi*K^_{WBE#R~_VKXDS8oB46;G7R? ztUBd=#5kClMdp%mvk0PT)e%Qeye5bod~@{FTS0Ejeh1KF=4qlGNU2lrnJg?5O&Ukz zeGyK4#g4JASfN-v` zWU0Py`8#1qbyuQmvyG4??st;xtn>dnrN{FS!Tr$cpmoWn67gM_Z zMLOdFan3y-#dc2t$&H`1z--%JvQOvMkmI#o8+53O{j?n<+L<80B+?3EPfW{!kovU7Q!0-98B7epfOWKX)soPKJzt0 zdH_Aq{?{xh8FOJ-66c6=vWQt`m^8#ZB-Z(@Z>)(uUI02T%`y@Z z3hjOIdts*z)7lzQMlFJIQ8JP5U@T(@o-7if_i6k2_W~3WhF;=Axrtmghpb&D=-^13 z4tTB;V~KQYvy!>Uo`3WA*Z_=2dGzXWyzt)nD(!d8U8Po6_!&UK|IN5Y2J z0Q+Nah_PT2>mR;E*8g(m52Z_8PIyj?{gpbp*&9t3D9gQ)`NOtOD`U+Te}{2N|Bq?p zQtTt%velzNv40t}tCEj?VZ?;$AdjKG3UFlg$!1Lw*)6Zby6~&HsF3Iw&Uyom3dGUR zeR4Xx$)$0|^qv0!6-t-kEPzsWn@-2cZs29C0lv1&YK{|U$57v zV=oa7G_O~cxj=H4sI2B=d6n*K!?!XHOS;V5xvRgsIDe`ks)Tm*v7#KkY$;JlTfPH> zT zsF@N6lkTlPf_5aRiei^7=8AD7l_)KtAPaUEQrQs8eC&TY|8AF_@?*kILSOQBLMeU_ zMXX(1f9?GQH_2hK$5E;P^>s>J-1=`jr}I#ToDKo=w?%a9CjG11sqD8gUrh=v<2^!?wIeN~~H z+7sJ+fiH4z!(S%|rCrDXhrg(h(+h;`@jA{HK^c20 zsOk88F^-YJjJ-9wZyN1)qFv$hbZ|9`fkSPG(EyXsgHaIa;+n;wl&n%s_!^1=uN+_d z|EN0ez`U#K{{w>TDFXe0h>Q8Dx?tX`3>XB|}8^5ZNG9 zw#cpn!d?QhrwE9EfPid)-|KzfpL`yE$v^kJpZmGv+h~htKiQ!7$HQMjyJ$&Dgo2u6U+)X{*kYF_UC7VA0&z)D{E@W&Yk+J{L{}w! zHeZ-Oqda;C`EemmAECIqD#@c)D%e#o;A`3*z>ipNuhFu%=z&7|*S&(^tBxQj`exLG z^rHMTqFf@$4ba$G9w3{EkQ5|+hE8Ewi8@d^#DTOBbD&XH_bacE$7p8+GjMJe;uzR` z!*kIvkVUT$pOmBP`HN(VAoJqYXfnU07}Mv01s-l$2;W+gY5T8am>p37ggLZ72*;BE|8>e zJWr0r@hV`N7?M`sW1_NNm|90BYOAdZn9XdhT(h!W`=kspBD!TKc*AOWELqmc@ydIG zvR=XFmw{J`F{^`RZNMC-ZP^+i)WcKpAPTF)OlbXtrf zX=A;WrN#2uII;}&btJ8$(eF zgn)&S3($WPO=HN6F&-4*4ul&uu1DoB<)lsg1qoK6S{b)}&tK?GrC78Xge%Srj=1A} z0h($S%E{)In?tz@dZ%?*yX38f`IAP?TJe7Z%$Nl%?{R85!pt;tJevXXi;H$wYdkN& zth1hBd~XX7&jhvA^|56}>S-j7z&nVflIddp_HtB=bK*IXE)L~qcG!H&JaJS>wc;OAZ;bc!zcakav3_hxM>Xy@g(^4Do$IZ%XqZ=c?=k)5VG-Xh^< zR5c3)u4;I5Ntj#1#}K9V3xQ;&J^8y^6NcB`6MZO(U#e>wqHbINLYr+pDgGeJ?5fb~ z6YX)E5Gt)rO)`SISAfgS?m9_%*4-Y+vuTz6OC_7%%>+BK9yUmDq*n;V!1ETZ_q+1% z{m94|;tvXO+KsZHMGkQO7$8^IngB}*vGWg{3<~D#eRmb*7$``{%DX*&zav;SVtnnt z;!ggCdd?e;l|;D{G$O5BPh`exMkP)My3-BweZt-1)Sro9?#=*aw5<&^7zcI%bOAKw zW2AyQo3N~Pxl9mLF&4;qj@xZ)>?_h!4<%;rk=3=W;jPZiZYaV35Cswk! zDn*xY7Xei&4mN2;Ub1VB%IsbzaSApv5!402AqZ3Ye+8Jg)44O4xErLi)&7U}zCeh3Hci_iVTw-HmkMT>Ykw8T?+%d_ypQ+V z7!(ZCeI|p_9CF%>vtm&$UmGqPKPZz;uj)L&+|CfV!8V zwpx-L1n-bwZda_3in02h(C(heJwsBq{GmYSjafm=5otz9*oKys04Gw9-{zRP7c{d& zQ$s~utFSrJ)S<;Ot{0L`HIkmB_Sj^fJX#AfC^oCV65>q9?Yd)ZTmhN&FLo6uYw8Sf zk5D4%IK`p=WszjYC;&baY6f0Ca6$X0p zT8ofI2u*a1|;8dnaf$5TIc`EJ6 zIwDgN{oZu&tT>D#`uwS7L~|wkhEIezaVB~;A$!fpfvPkutTPk93`Nv&P?3)w5af^0 zhoV0;)B&ixNa~_p1RUvN=iqUZFpQ1q*m4D zS5sl!AjCa6IpCEn_C;aJRC-;z)kBPSp0P1sq!Z;zlp)t0j|!qFea*?zpeyu>s!=^mx8x5ivG-+y3&0AmRxzq_eHrNT#D%JrM~?@+-itf8C2)~B#(k| z(n78jHryY?rJE|9)SX1TC-*YON1ePxt8g~XO%3rE1)YyP&LCRd;{cFsY2;eeo0UJ8 zL789sl=bufBPNxvs+bjP9Z1-$Mb#Sx>fty^kmrgXYpg=H4&~I%ZJvESi7|hX*r?q?<9!Ur90wox=s|8 zn9X@_c9HO?eF^(7HYMik8T!G@PHH&_3VD~R4baWlD?RP$qTvs&Bh%R?ZV^CnaJrJ@ zIOOmjL9T0Q2zSPV@nty`R3`90)lLlRUP4UB9^mI0$@JNTjr%PkU69<6Ee`GQC&bZA zdX`AHlMFzMu~dKyIz=~N50)b!vSkH)uN+~K3`SP&vow^l^bGnQO=m7 z7i0N^GQd6_CF{opxO$CTA;+tt-Fu_}RoR`BjR=?yER#(LK2Pf5SDGIWkMX|3E*Up0 z>7BY`dV!qw{9>UaP8Ch`T@a96Q2vc)Z+W|3G_|1IGlpZ0zC1CeW>mRxjwp!6w?{gy zDQ}k}HL>H<`tXq;=g6LKAkH`#I;#!3>9~*{&1CGZvAHOZG&VkZl~~)Qg!&U*OL3R*_kQ_YL6)0b@u(?y0rB+{i{{r;@*+et3|I|B7T7GL3Y^ zDMJu$CA3(P%RDN;DN6rFu3+|92&7hKB5jT5M7dO`o9P~{H8xrV=H6W+X<(A2%oIpI z?QB}&RS`~B+B$NGzV2cmnxqlG>bENFVsxi^z9E2*+2EAoa}nQ|`4Le><8b~3?%tT+ zuUO(QxOUhr{w0DUcXAb2jJHHNb?St}uq_S&a9btQ_d(I-SySJzo)GCL=o7Mn_#1 ztBpt24p%svZu-&K=P98wvH%lHI56g?PN}Pa+R)ELWZ7B>R*J`jxnIwk9^XGAk1>@K zYBDfuIugv;)HA73qIp6nYWGk}9HFqkpSWABd=wOm+T8lsM?o{AWoV)FlVYJD=U;(W z)U<6qL72JR!h0TZVmS`_ljtFk-?m2+2X^2Vt|JZ=;SZ3D@N(NR{&7%Fiidt~0tM{B z_|35p=KbiS?Cg0T6iVR-rF5>aHHeVh^~fDZS`tdJ?{SV*QB@P|3Y!7ii*iaHdpwBy zN+afsrTF#<0F)2~F5dGN;Sx?^QjDL9&W6uH5_}#fz^uUsanf7(r6A|g)!8cF-glQH zc|XYPQHExB591fQC2CL1)8;W@M%kt(0WkLa7$L|iuMz3ID_Xgr!#`!7QF>RT6or#P zXhBZ57-rlq%4JLI3-j0$Sp+n{SYN&psoF}s`s2vd8|Ab3GxPf!!a?ijC% zbj^|r2F{n|2vd2*bk3C5NX0!fH>|h$=Litl$eGwS*DNokarSQj%$2H9)y23~pg)>TvNh*HQt+L| z?HaWT`=@#~FZ0ib%z7JIYdVuYCWtC<22UFBUzMXs5XhxsY<>Yz+@)spCc*s)WrX7! z=jd4HLgIL?u`dJ|zNIj8hpD&}f6QpVep8BPMN^uz6_JRlJzmU`u@65>wBHtH=Cki< zgRe*_SgTgNC{JFda&f$9R}OO;*1&-{r3~Q+03{SUryyDY?@U8+uZU6p>Ur8p7Xx?> z(+f=L9LzPg&`g_F8Wn7eiYP-f5bxG6&eJ|6v6B%*23Tmf#-}1imjEvF`YFMlmw?eG zNELZsI<69FE@9k^KD=lT$86)2L?}})9sLTW>)vP<>5{Xgad&p10OvGF4`v{v`lDWV&_AcQK)*LZaP z7Q$U}5rX6n7=!hgKzEtxQY%K$>2eSkcwSN(T_C{qULap#YhD3uR&p_#?rFCXdV!|6tNcsq9xq@f=^|>Pwf|T|zxFW^xD~66uOp zv1Ma3#LLyfoLN0H0(%|-E;5gv_rwangLZx`=(_PI(%}Wm;Ae^#3o`Ft9}t;U=5qo;dDW2uB&YrU!-&^>WE@w$QAHpo!es z!YH^_n7e?)jPyR3HTF8dEK^>FkOj^yf?Q{WA*}xKkPyw#SOcQma(zzmEXtrL3WGqc z5cg*UTclD<`+c5Ha+8FhK!{7wlY6>2wHyt@V;y&15aJePGtUZf+YJD2s~TMm-7Z=T zK>H>R`vU|MDZ0~fp~B99QOOf3nyvmV(8-NUx6JPea0U0p_MiUw#El^CW^)-|Gj7Vs z2C_R)q!VcD=b|bW3UGOt-;uv%H04Rpa4vTy3J*im?U>Gd<&qfQaf^~WNXI?AszIb(rI2CxJ#J( zAMc!Jx@J6RaVs#zWU# z(oXSfp_H3b*~D?N-JeV)sY~o8YILw;LdPzNx?L&M1;VPgHSQPf#j232sxU;B|1-GR ziSUvUA8!iD293cTWjb7mm^QAVaE*EezZ2ysSNT^mVIU)~-S?)+zB3lV4D z1?>`N?jz%XyA3t@IvLHAI8%@r+fa&|L^+MA%(YQ}543xcmT5o3#coQl3okXB0l!@I zict8$Bi7n+Pq-J-`TOYG8K(#uEjoLJ-XSm6e-du))Fk&AE8hp;CV*Wi{-{Ow66gfc z*+?onCkisZSk)zHx~Lqf5)CvdlsqYjhTud#5G&jd!NF|um>931GeJX=J-iGg&JyUV zp`)4AI1YQjU)5E|Ru2+%S#>ARp-ccA^e(Ng{4-9m*>j&OARCC89w6Ijw042bX=L2} zvE4(^8MjcKkO-+L$T{havSJ+eFaW~{mpbu97Vf|b{0EQZ-?Gq4Juyxe6wmE_LE70WO?ITauA%{W#c0tF6aJ68C6ti_ICe!?~A! za`YZHM`)R}#N@w$QQf{~DP+VYBF#BT8;9c00{l$_!lC5QVdp2n=oai=O3^OD%xLW6 z{P5T8M;(P@npxcxREP6D&WVr=(( zo&*Xs@wP~p3#;+Iu6R#?JKRF88!W3|3Cc!0r%4?-buF^;3&1q)xXELqOJOGiYq+Pp zUx16o;ZUZ??+I~1=Aw}r=e?N6(4muCjLw%poHZ9Cn8&onUM~ZU4){K{vvSPvBY|X$ zzqP1Q*cr0}!??6ofHS5es?Q(!cMv!76f}#M6k+7i!u*-uYQj&C07^Hy2Ypwxld0`a zvgnV5nD_O_6(_s`=~QLbgmwBroGi?xMf)nVfZP5bfN*bQsg`_Mi>Bkj9fHk{dcE24 zsAyLnUDJhWw^ELBkVLGLUd=y2Owtwme1k3`-1&oOSITa=?=Axv6|;mrEx@&^NY4Vt zrLTd&%SFtAlmAIrqs6ZRHHma`O|p1FIoQ85_$q0J)3e{}ItSb7|E+Ph09PT2=jkKY z3Nn}KikOB=MehS)46x+gvG)xVbv3h^;$aa^K>IoDUgCg%0lCC-4$boyXP1!*e0gI# zc99UzJI=BPLKO2%tF&J zKaNn)%%LXQ>Gj5$!d%~KW&l|DmJrybyGLGA&wtjl5;V#V@8^lsqxP$PdpseGL|k>P zk&{0FaowcEg@Tj-=gryMn#k7*bKW3#9W@Y72r<)Ua)gf8MSBcX@+Q>O4J4(J4sNCOWG-lH(-qZA^wwyi&gr0RIu9Eh1B z@=XGF4suS$8g{a9XI#;{h?8rQAbtYqg3r(qul`eLGNEB(NMZkmxUwxy6iqJ8`tTYN zG%sv~xu1yg%x8@f-X8*3OhD8wJs^1N{@_a(HOl(P(-k9L3Z zD==3;+X-3D3U2>8|Fl~^RzDFo7zGD;=bSAt#I2d~_Z(4WqV#QIS zT@U0{?6TwW6+oyavgzjdOoTaw{T(M>L>e*iTY&HwPd7BpYTt(P=#{}79F`%%M4`@C z{!n5M(N0KKPVD8${NQq=TI9rWX*tqRFK&3!nQPL@d`Wp0q23aWA}koH8@Y*TtEHn0 z2`7WLSHEQQZ%YRkxu~aSUk-F4Z#ZHVCxi9qV9Z_>!kt*o68fS;fQwh1J%)|?y+G=T z>M$AnTa?>#CYmGfiKK#hm=VEo9J(5`yEkI5q;Vsk*|Fv7;MvBi^&pvsi#@BZ;jfv6 z8qvFn?M2i1*ulqv3cDuF=(%ylg$6DVJm}Oiak&ULnNBvG*{asezhJk*u^PuAWoTA$ z5FbU0e=S4RauUZX@J~VJoeYlO6-n*|rYRaPVjXB0t*6v@%vF>*wy1NkLk|@( zjecugP!|UiH4b0uK${ArINWM*3IO;-fX9&hI5i7SzeEMit}GGL$annV_E)LM{Oj!Jo&vi7_1O)+=g&1SkoxS+^3=v*8hWfq7nyg{ zO0e+kECPEvB#H4-IX(~S#&(+%A8w4_b!>d7unXFy@wN9B(C(zQy4d%;e3Z~^Rc5=y zA%?4HVXjgQ$DB2`gmjf!3T;df8wj8$ba3ku^F=tDdDEJv#_aFsv5|)&a(P97>md0V z>*|_Y

HQ(g?2>;w;e2=PiM21&~GZId`WBH_Q|kK6HZP)UER*ko9XTxJ-yg+gt=j zI@9AistoG6X|${0@%U}>Z9H-^Zm@x+0 z>w)yHlU|gzJ8i<kx_mXqI%#$NrJLA1R~U{dM;Z zr&HXMLMc(HMVdq%JJ%T1FR(n3K?yxQC~1rXxEA07Ntn} zV8F^=X?#r5Pt?jUvkr=KlFh6=h)upV3B)DlSz*r9w8mP(q^>tVj?RMhQ(~EdH2EUL zQFC@DOdr#AY+NN`w7WB^pDWXKY;t1x$^4%6>U4i7iNhKKJ)GH;Fhl#6qFS(vV~HZ8 z`Xpgy7Y5&~CzlCuZy7&s$9PX91?%kUi|IckJUSkAa+MeL*8~y)OOJRv3lE`Cwc;M& zE;y@TAtsBaR?Vnu#7{+-RdogK#arW_<>=G;78US≦`29}wc4xEBb_W}V~$=~7Wn zo)u{@o)JwkX3d@#|IA`?=tZTXcJDl;Mr+F*FNC5X{X&0u9}{V@ogu<~Wg1ua(w9Wm z*eulS)GNfvjU-tnIO~s{_%hSfeWjvONCxuQ-Q!O`f^s#H+(^tH6(F?!jEZtG+6Q8>@dlSqO*PD=R373iYX%ZtwAqTNy`0iRA zAhKCB_tP=yKjmocOlN@Tm1ms5A1Xqi6){SgSqF%K5 zSkZ>-ZVs$-W&=^9k!8%+ai}O~P*qtSr;Bt33`aH^v1fgbu)*V@+c;c^)5P{7z0q>K zAafEkd|0fqoFUNFW*?!Oq+)EfZyu3J8#{FwvF;+!g>I7B`F5xr<20uSaZ%g48%60mn%?;6ZI50T z%S7b65%~sA!*E^`Fnr^&0nRpm6=f!LX1B0=%>l~X()48~LuycnTlE8z!&u(dIA|VW zGo_jpI%!jUD9rV*l_2*c(eADE4k;b$EI&GnSKSoPDVQ}kx(z%?7tafF z%9Yv^OCq!Oet9%HUaNe&tzU-Z1V#C8Ajp}{n--JG5vFtwCg%-P1i57!D(YfEnQX$} z%ug~BeJ#D>bF4_0gXLG!hJ|sSFlvYgpPPR|(7h4^l-|}f@J0mE7>n5V+^_IxJrL_j zmUPYj{=H)6%$R%tVOJeBq$Ef=U69j6pBgsH#^THa{Z0SCP&}-#BP4&BY1g<;Ay0_2 zZ?_DRHy7kiI&w{~US`^*9I3ciy^aZ2kb6c;VQ5MGpcxwWr6=8DwH7GPM!0FG>>CQ; zXRW#DponrwkkjKrBdL-Kq`%0oKs+EK>u4iO(FkM3F6Q(gb^&o)8#Mf{o)sIlJ27wML^TR}9Grn4EgGl|lB`%v3rI-zw1AAf}a0@z|~$!CNFVP;Ai+;vP4* zepXBq={_;LtGmFVybvD#=$J5f9epsA8P!<8GU>OXU01DKnAFk+ZwqviXj7@< ze-j#6swc~eu}+CtE@gGvimWfdY2(`-rE}4)DzX;Nl<>Ay7&Sq&czm2M!ufFbqb*nb zRuHAN0?aytc?5Y{mcU?g8N&7&N#a19I+Vwfot6BjK4a202wkP+33{k313i+YiK&73!Ra;f`c!46*ZG`H1`*S}tAU9w5l8sHP*%T`Z$5@oT|m1a4F~GQ>JZf@Do5iPHI^ zDf-CLG+*z36r`JM;)ECwWp=ZbvZ~04Fej<~FKDuq-p}UOM9wAsMT2biKyUA!)@vz1qA$MAnMD zX07Yz1i3zq^J4X5iI9zhP70sqw`ze}q#NQ+1AMI{t6Qp&7!1N+9KKoJu;sB<{&b;qZo%&18`yEXMZNVkgKtwZLMIuBmQ^n*FpOrw_KWC0Xn3bz#7 zoKH9#F&qlyWirMH%Nh-=6s-ctsfQJTK6|4`Si|HY$ACz>IdDHCklG-RMssIL^j!e$ z+TfQ!TFpa3oK@NfNtgG0VHv-MQ1n>)!u%_$pabYKAudc^Qoi5$A^-~0gkKbz|9Fu! zi42(IN)e8XO#!pr?=J=*eeFi1dOGtG$6h3p|G$WE=FJ5r+o4OL;mb^{N*89~Htxu- zR5+8`%G_0(IIU@GUIy+8HQ=ByozC_VrU_jyAFTFU2zSl;jy{Cwi(|GR*n*Glq?(F` zLWa=D7T^;t*T)5vA{*GAWCDo`lAuj#&!jdI%lxWvoUXP1$j(}W7nEWdw zP3*^oyJuJ6C`g*ZI1gS5=;mS_l>GlRAx^KNmCb(4{v7~S{^sy-Cfx8_xe|1*@xKedmY)Ycbq8Pg;ife@O z85iVmf;nTE09tPB_?_b|h0Sy(CIn;nTfW-yVczs1Su{5YP%EJn6DNzHNcLrLrYLg< z@h>-uy05+O8=%$|loWg76@lt5Y>VE%j<7SWN*o=IuLYSy=~+Ojo9}Rao_OvHKd$~g zpzAzm5QEIuL^~}+FjUdR|qp~|twC8UCrsO@HtxVouiEF*Ij;U&so0)JVgrW;+z4BZCd-bpVUU9CSB$`|yuzW#yO>UK!V)Hbrr9x(d_WIe18 z32l-W-6W87OHxRDMTAorH+jdXT9%XJciky|_(w?RieVz3%dt*02zALh`W3N7`K1u& zBlk((O57>XTuX`>oQ7k;pYl&xA9V}!x)4f(gzHbS;$QNgIY&v(xQ7szZ?4`4zQeHx zaBLOph%iTS-iB9i(QXnp`@KW4K!B$u#xHJiM(@joyJYi5tRzf^Zhu^DEqL3v+psRl~2#aU_RK zg7U?8=W#G$;Qmsy>r_`5X4?9@0CGXGp+A0j4+ILrD0Uv z^y1eB!d!(4E*KaxGlZDKHFFx`mhwk+s7T4ODE7aPC@y%Fgtyz??@#p14^!e&k?tkF z1md=)7*7eM_tQ6b+bQ19DBPmZ4GItNn<^nar0YdE;*{C(FHz1*T`vA7+Qpm7fpMJ& z{Us;DLUf5Rb6Bb9OurN`+TVLQ;cDpofI<-OHlg8q--b|1L7C zD$4ZDj2p83{*#V`o;xc+REVT1ZGHF;IYERIOfsqAr=VSPv;ml2;xr*n6JhG;D;Bp4 z^>_8SsE#9_&gmZM^qwl(wQogKkL$%aU67en!QCm3&lThM!rVb5t7hxK61~PVfG)#y z?he`9?fWc<8D8qp2It$)+NDf7tJvDdEyB#@N+y;+izGQ5hQ%ui!_0!V_@}&N0y$+@d?LcNKsTa| zUGNbv`)7Gyj1~S4;WVp|22Xn>kBuaeg*6@)LIk}Tj!@|tk!0F}&gB~aAe=E@OWpTH z!ybtV)2(BTfKl$Fr_9d&pfZ%++GL)1T@Wnuh1^E3nxYj(E{!HpF5ZN(JH=BX$x$LF zjp6fNgLVcO9i`d%DM7ApBQq|2VBOb2+_*X^EK0-TfHKIP7eWLbJ}(#O_HAM{FBxzR zlMdMW|0IHGZ7*Y6(OHK`cNh}{c$?IkdEzbL4rUL*RD}$UNFEASCAeNwZg`LQpUTt5r&Q`n$ z=*f$I?m;9uN2hQX+Y)Nlbl7{TV0smf1H0ntw;)C*l0Kvl#kft7^B7fWXpJM@2F!TY z$AhM~iZ+kva16G!{tZB7kuP9*x1A{eOy}k2@!8Mmf$!M&o&0B%-k3)37vMbQ78hAE ztKYis0=m)B(A3q^Swfs_MRz|gV16gS#hN3be4;&{WP%O}qW-E_<|?-wB}^=OWBd2= zq>?5~R13(aB4!ID7h=XkLY;x^0phavp=peY#-?~nVV7p1PFV3__T!zm?-&a{;77R3 zMRIGrA!3w!=;x045ZYYleuMiPylV&|J_W=tjfkuppjVih?ZGSp;}9hS5oFEFu0#0D zZjIai3*aGD%hdgpNK$KKNslkfzx5(WtNV!Gs6uCPNgS*&t%ksubLAZ$n`+0MYH$4R z6DU^?-w-TZw+nE8!cl`1%B4~fn|%uG%JE8gYfKmI!Y3~^+Wr4nn46MEY9&b;`3!{J z%&gjYSV8|-sbdsAJU<6Vo4O*6Vywc~_2O<5k`1xx^p8d)kg!0(E3`I^zn z*x)P3(F~BrB4QNcG`ot!=u=`JyWZDe;yc6c0EOKFuo9P_g6I`AS}{G%5=RLkmmb-? z_Qsi4J zO=z=P4=Sjn^V<-nX;e9ggb-n9GPpqG@eNik}Rb zZV|yO%c{t7Nm3e`w+S>`Iq@Yg?^~`1^6g$QAh7`wk=( z;fR5%5thWF%}|s~Ulipgl5(<~-)il=a6^AY-z=d!9nXuVPPm4M6TVB>CBoHZnuhn> z2*}LQQJyzFWLZ34s4Fikv0_|X2Bhz}b37)1noLF28f5?);vFICU7TEF#f|+lrKo2v zqg9%OWaA*+t9u+Q5bT_|ey05Gn*dPWx@k>uro#SlMzMF87w*dd$~>rFZL}#d+*79L z7;=wjGg$Yj%uzr79*FDAFiZDcmkXk#cVB|s<7N>sm6BF0iRCtf8tr+SUPZf?v`Xzo zs=((9yCd3^Nyj=Er(Zw9e6JlNJ;KG>b`Sh%+?TQ5?2DlF;{fPVm({J z6nMEH=al>a$ISxV&v4jNSnSrX*#?xx=B6!H+mkoz_l;3>GYap&scv+5n0L8 zz+HV@4h`baPznQ23ZmXfr*Tyz!c+9*vD?H0<4q!Smg&z^B3(Gt)pW1D!-PEk3=Ug^ zA{~Jfnao)Ky(@^>mgdD{Cqk1Ovf07dXSe+87ACH`GKurpD2eo^m3p)O1<`IqREhC~ zQH*0I(yF0X}qQ{PiJw&-QoTl~S%lksey>BUy0pcPzO`X;lUx?PMHNXNCtL~6;6Xx${>Q--=Xm_C+of*}JNkCGu7eBa|N!_kB76>6L9zvxXog*q= zlr#nD)HQx@P$xfb^6tCEiK5+)+VHN`9lP(7egecG-E6&#r0S6yW6yiqbQPzI9w2IG;%Qv_>Bm+5p#W! zS8gzlxuYuoh>M$oE^0Qb269cfv7>A4Ab?bwrS&^m1gWrIGqX!AkjZJ9KxbW#ZGjeu zMnSIVRH^e{Cfcd)JEZ00{u%%ek1Cd-2c|%}bTr{Y7Jnv+Hw3zAWJ1mf^08VFnx-}F zVE!x0AK^!%o7Q4v?KTyVob|Z9lm(}Y_9Q1gS8XaZ2`v`v0&%6y>ishTuztquxJJRO z=c09rc=355u1-aflkbYtpxr6bEGR3Hq1a8Rv$8TMFQ(I1_7?1q@fbfG)uP>9r(m?5 zdf6|ggSj}mO2E=k0L5vV8Glxgfiezf4xeY?8dL_qHN$u#tLc3T`}<1n!Y}}v3E)h* z%;=3hMY~R=mXRxH$|@nWs}y)ry?Tev}qXGJ)feopps=lPr zBQy@8-HFurIzd@|@WS|L-%`;`FmBKaiaUV|Iq1&yFinv-?9K`IG?8$7Gdk+}&X>*FjU zY~k>Hf385c6IwR|u`Hvps$iNd#eQ>~0(wG)I8OvzY3Urfp*OnbLOH);H!Bq*ah>@< z77``b!K1fIL&CF34EqeYqp9sep>Dj2WOxt@09-=(Xmw?$2^^hWk^f4+VTmwj-N2&& z-8Ag;Wr(%;uA?xFsNXM7efNY8F1i*fbcL!ongwvbBuH!EvouR#e$D=}= zL-Nrq1ED<*0HehH9sTjN!v39&99uVb6f-}_4*j-Uvn@To1UERaI@un~j3XH2e zW7}3}KER*JK+Frx&5jnqn^+?;WvC z8;Hvw!(K7w90cTl%aFNF2;uUb4tm z{^y9Mekk{`)W<_2&2AEE>x}J60Imb~8VIdvg1-uLs+HVm;tTrAa;$U~QTxvuBo;O8 zMS*!RRvyaJ;n`v5Jzf}j_e&{gAf|{US?L0$cb#I)LL&^)#+D8#0iRMvIGD=|^UFe9 zKrPA1bHoS2%ss@g-BGm&+T3G@wnWm38G_(^9g}ap!f+lXqt0l`eyFaPAI~V9H9jv| zt+p7_$u{?~S3FNNKVt%qP&PIn&SQ_8v}4Q>X)e(%QD~2wg?N&IxygZ(UI%(yut!fi zaI%<|;>~hmh?1jo32`XyxUm!CT!r06JS)LbW2Hj?Ts_1b(glxcg3Og>bbmS8aa%9U z1#0YLCLN3mgkHF$K^G|Dw%_Enh8y;^9^F&s!&9#g?2IE7aX3q@nDW)9; z2^*Or3$ajy%T&>W-*q~~*@9qSdK{TVe<#|X&&Jw*mBS&aV$I_In53|ak4Tc2$eH)w z5JvI?ooxSWehOh8v<=B-?Wbh`FHT(EoFRbdTk;p@lz&AjiE(^~0Ovh^{EqRYNQ#E) zdvY=QjA*k;zS>z(-WB5dR3W%l^%wjM$Ynqkff2m*kpN~VyI598)w)d}Ni(r>HsBUM zqv_L?@{hRvQT*<#7Ir54^ZqizaZAr5oFK$G)uw0J9}@tt#!lWjx{fC7*}Z-^#{N8y zs%fGW*9gcvZD%YK?WP>{Q$*kW{8&)9C7qi1#qkg>68kjm;lC6@eghaPuYUqzC&)!% zS3D-#g=2MOU$xdR0J2ptxdp^(ur<1cl0-8d_M#IBYZk4I+Z2Sax-LYkP3N91e+iUj zKh&y)qFRWv@Wtq10!VD+qTcv}!tN{Z(>?Q1VKi^Dizvq4Cz*uj)tvw5i{$G{9^e{O znA)HLmFBjWMZ*Yo>p05#6-3sFd*K9rO|B3`(Ps_e_}Wp^hWtShq*+x_9S4sRJ-THg*dCmOmXc7K@A|R(32bK}{*J-iM!^PRFJZ1qQNGmYi}rwLf6E(+YRUb}V4=NT zYL?8$M+-E67a|bRrsU%IH*eRSmnT9RLoxk)XmbL;ENYF`GJ=Rh!qmSAF((F! za=OJBeY+eBs~3FE#3~o$DK|=gw@>tF$Kx&tCOj`Fn-~y>;?+{yvz0FdrFe1?xi||C z40p%&7lFG~8@U8+jRP+RFt-uSaKWd$>to7blll{qPE1AxGICfQyJO2sAe<{3yfkp= z@Ouf(8YGEFOR?u=KxEi5%>GsTxkChuRspkqYI&j{3R`Gyjt{c<%qr8-|767cwj5t? zY?4j0pu88!ehjyW!Wb|;dK)rUxE#v0lGi`m>HbV0+0;{ta};LA8MjNUbOk|Li%yk2 z#kN3*C;w_5RTGu1Y7=&hWg=liGtQY~#ifqPV#z_QRg{@q)zQi3>kk54P(0pB?9NNC zzXNmUz+tzfOKV&SLRqSZVoe28S$N_h^@)!&1b+DV9F14yiP~71M4uJrG1Q=uvBTAl zqsxK8&h}1ZrE!EX*JIo+yT(Hzy>Fsh({Fr!e_ybR$K0dIaD!{Uf#7W$4}8~xxCyoJ z<4Yo56=a6%P=*+fcG~Wgm{4M3FixtfWx!h-gC8aZl|{qNq-sEmZC)jb=hg$gg8!Nch>46M$GYN0ZtyXadcDfgm&`n z+BtJgiyURbNd6h`jB#cEq=0-WXu&~&R;mw#x}N@+{ZY1!NJ zZXg$`QBQHiy7vH>5lQbUS%zOJkUF(+BO4Ek@FxwOvT9iOUH}-tb)%##r)4BVUOf`W~_ zjaCbRp^|>+Dq=vi%f*R79^*>!D`8~IEyfJs<`RdweHyVF+jLp$NvAU#~opoo(@9mD5)~+ z_&31O-Z?NZ9uk=bd|hnt1Yyq+vQERsFG-Et1-rP}IX_Mi zL?VmWTfmg3MVcw-OBFf2y(q+uQ%KGZiS-|0X4DkjPp|nTG+ED?RUMZq?9Ru?MhMhu zC;ndvcixzcrfd4Gp8{fqtC=0UWWk;Gjy1fe>v#ba5u>ZL+kYAoCZu<*W0ok# z#y*6Faeo1H92UjasC+j6)i*?~0$iI+C6Kp!gt>(>AII$KE-pvxWfn=kOWrT1#z>=s ztoQEa2ITcZsY%@yfH+qf;~7)^J1Rba+lJgu=3?*BZ9IXRDU8p}kxeC+up zuq(vJFXVA5m~CSEi_q>Hn4HrM(=7XuP|DJQ4#sB+lTTl>Zdo-4AMlb9W)r3`%`eBk z4C0)tq||etXxJ*RHN|*96t$5rA;ich{~a=$@>mcg<6Z9+Ao#qb*um^1f{#fXhxz{y zrH?O7o7i>#k$+sx#SogI@f|_LM>7JSDI16ytx$pU7iv?(5wC)}fXp?>Ek%=IZxI!g zCK2$7yUvc@^tJ$xs0brbv@O{EA1lfoqr@A-#kl@8|2#Fx)$bPN=Gt}aHu0WFXLe9= z5qrYdp3sRl6v#rW)9uxxH)JC6HM0j^L_=RxQJ$7e!FM0YV5V!xOF z&La(4gAxJ@Q6?CmmP{JS|D1zYpei+akT8u}_Kx20ie*%YmObk~LPZdCD@DbD!D?xh%2fH)A1~Dgf{z~qpw_ff?qZMQGD{bJ| zOOT5qXt=O_wJO zlcWR_i$u`%xj2#XQ|k(l?zOzj!2?1ZZje#94|{egBx%+<_t z+ph@d3h3^N^7IIy`B-TrO4M27k#eLZo!wFxdrXLHQR1_%_}fYV&a9R_a(ph@5vHZ- zZ}o5I5%AR56J4Sm0Xr!+9v2CKap|q8xK)%3%a*RK$VE_ZJTDM-U>i}0AFK>PRa+>| z1clw@xRObYb)Xu#<(TWfp2Y_pM^ecY2qIasm~tsf)&ySp-xN`1p;CLaxNI zp<>lM@yuWM!$RC&bkmZ=N8?w^6J6NHyO0dVxoZH?h6Uzn^iRLHCL~;2L<8b9BeoYk z+NSNg#Gr{533S=GlVaeP;%q^#91dNS2A+17dXlaUfj|x zU*$SrG}b`plGr~BkDah{^egOQPh~g_igbC>Jc@90SdddwFGhsNWAnn?cV>-TiZ;&4 z0!aQKU9@CBwiml3eeI3&*X2jAOf(J8HdvVRm|%Fw(Wu#*rq$L1v1f98Hc(%DIu0(; zVf6_8Li&A63h|i2=6wm7J;P(}cXC7{2zf-IjBwqj4cRe5VC#^iO&5xA`MIQF{(D#e zY^dn&k#gbNA~oL*#l`Cr^vpNAC%y>(P&XxJZ9tGBb;Hpng`J)ZZ%eU2!02&8n$XFP zYk#3`P23{%#;u~w{J9k3TN^@VoeF<7%upgiw^9B9R)&MIo@gr6&9;-Xbw;J7BTg3O5=`alii%t*#H}ws-syRi zy*38RxQFIMXFMr@BvG+rC;#Fm`6u%8$aWS#V4LQjlsXqJaZ3SICdBo=hJOSiY%jL*X zo993F_6^ZfP7~ndxR6q6%pS4)7N8o0a>B5O2p53sQIx$Q-6#wQEU&QzE<-jp=!`Z& zc`zj&`#{6lIM^;1^T0T?9F1CXdhYA2F2$Igb3S$(}=U@#A^!j z3D+XHFDu5Z?Vw#zsgiJJkK?un%30FF;hFKlnNP6$Fk2ID{$r~#Al~l|m8TUZrb+t2 zOu^cWMP`lwGX*YVWyU&nmN0T2owxaJM3A#!=ZwTPz7%54&0@oI_YQf4TKqkr4Dquc zfS3i%LzMDO(f**KXE-+85z^Juv&51yZ6$~<$ZB7V9Yr|DO4QGo^O}XY>=?!~qRuY^ z=-=R#_qm;%K&5;!4egvKAXO#0&YeP>03K;z)87QZ{RK6#`7Zfy2qoxLr)Gc*SCUrY z8e@quI&4(lq&0qkP$q)pz&1`0#kk{!HeOTM^&K~UrznkcMD8b9u&wK0DJkU3uqUt-%`p(zOu zMdSM|_7a&jOdmseYMDmZJsb_9sS---yrA=-DDuKUF&>i5PI<&Pz5k4-1$ zi8so5^EA;O9onSuY6gqZp#q%_QVx{m2jXHuG{ua@*n77;367!oHWBUmmwXNPo{&)i zkbK#&HP+m)#w2h`uJ$U$nxbGLY9KtmAR^0>y&u<@vArO3y&~C4Y_U6l$3pf9Rw-^5 zMx=Qx`Y1$TBsn=xs0TTqyqmTOasg3^DxhEUQz2$C=F1qc|IoA)b$B!(J`@d8J1{Vd zJqVMlJecWnRR_lL!YDeLm;rVEO+s8XIPP~-Z!TkXBg@d4hj{Mf*ugL?)4_Cqq0V3^S^Ix1fbyjU zbMqfTz;%i=r7r44Ij-K#D8<17oHYjTctmGzze8Bw0g{XKj|IUp4C7Kssw(nySWxNo z2MTac!u?P(SSrLJ0y8;urfb>Ji6O<|!s+ttd9@cjUZnHGLof%@Vr*6kL=`xCb;Uzf z`S18UNG`%ARRejhrDOM&q>ReI-BYkz7QuIaY)}K8v4&TJF{BaT&M=E!Nx}bKj#4iv za8yspqhJbNLYh`5g#IudCD?+(t|!Wq196dJIj zn5D2eUWMK+tm=*yVJ=p2F-vm``tCWR=SP?h_nrolafy`;xn3M7$Vtf+Qc`EWQkXe| zJrf;lAPJ)G6-*8&VlC!=i;3$wA?8)BOa<4U4xMihbjhwkyN6KciUDEr+ z*AJ`9=49b{&rC21G^+MWtA8cP(Wc7rq7G!0bzl^dT`HDtA{@br>6~g4Mx!plWAxNn zPLnah6wb~TSFD04%goUQ=57IQ(Mq)8HlOX^`J}BAZH?GXfc6SA;@a|WdatGsYuEFe z^VhS8$-Lk@!pvA)2kY)6ie-rY%rseyUkY-^ki%8H5}q%_V+vbxO|4AiNJ}5t7u@BW zHNPr0X~@Y2Uc0$eq|@OM=w8&`?{5S#v+@3bNOsdE017!JWzANeL?;V#A8D6cmi@~# zI&yvSvgo|ev}JBtHl73O=GA3+(sy8(Hwz|5drrVN(;)&}sP177rKMOZ#QEVqqa%JZ z7up9fUUI`5*pk%47%@xri`mDzD(%f!Z61(0(1!GKSftZJX0wxD z?^TeQTAe@_kC~TE`Xyz+Qtr#jK=M%=O9hxYyaj=AR{Zn;5VsA2+`jm|XeWw)3HEoZ z9|%Balww04PHj|yhlIh=4t(Oo+alal(8l_PNdc zUy8@GNHmJH+@2EY@r9j6a*mfIBjcOF$(=0)+us%lc*!IMbx=l8aAux^MVV_&TFsIo z!4iS)$PMzjsAmAY{+z8}+`A;XSLMGDWNy@>KQ$P4wgND>>^>$wQqa6a3a@L2J&GXa zWuqRY8xT$J*k#=0C@JjNjcK9H^D^{WQJY|`;gvPzLy`(nyEOHzLzX z#{qxcUDP(RPn2uU#$9(>rMOxcZIQdyiU)+i*treyse)w34Lted+#Ax_wGB|oSZmN9 z!vI!L>9&j!;w+}4ebp%kx4pu0PNeqkFGoWsvBZp5ieCsLGd6UZN=_H)svtXWiRnYo zFc(FLLR=-nKh_R)x5ZyXdkCh@9RdfcHylq1r*_jgA!u3QF^PpB8H;gLp}}*oAbM5B zv{|w5qC6pdnfY2to0_6_&Uy{+Zw|)3hXR-+Mi3<{p-R zSwe@k5StyIe~D||?qP=4-vxPaqf92pUfcgPPs{26X?{CMsN>NxdO7YJNM&(Y4r{Jg z92X4FC~4F3h9DQd9UE-?z6qeBFmybxyAA#fl8QDJ7sqJ|6IaiEP>GiKT!5G|sJ9nl z`y(889&=!g!kRF^<9419 zCo4>%Oc*_Jo(Quv^WLP6b+1s0geX%wM~{j0QoxRbdQ+GrO$?0%!!xZQ=*V z6LtmAxU)ZvDZ)rw?(QF9X8->L`n#F(kchd!FF+{3APZh0J{3hLK-p3%$E%-ck}h3`md=_YnR}+r zj2|kh6@wky^IAQrQz;avGGGB`-^QUwSxZo&lIG>{3wBJjV(DyJrGZC(f9Km?y$<$W?EfrkjxO zo(1fz>pErd`17*?oBp9!R!PRmepN5BWySn zD=Rz#vElf88Q@}p%kRnyh(U2Wxs}}P!aPmcvLF>>$rR=^xmVI0SrX!=Z{vw1wic<> z*OVhNGYs)w-wQ%u2N!`dW{b%er!CzTpIs8-_{%NUIs>1I4ePpyvuK)T%mq>O6Jf}D~N*0hhqFV zBP2YC8$`JEQP5KhYTq&Ea!{9cE~lF~;|gdRsFs2Jqow&5GJw!Ym2?uC)dvMD91tlQ zpHxk)iKV|IOjmD1w9D{+BO|pj;0f$auH;uf^&9Y7Gz7T0xe$=A*%?BpAop5ScSw{Q z#Hz1qrTJF@Di5i=^~Q@LsWDSMPrQmycSq*F!qtRbVY_F$`vQ94nnKN0thIT_fnp?# zuNrEq<4^kCMe1QSVQ>155VNR#h#|W88tBmmWvz>M1n_xdQ$u{9us>&E8|01CZLbBP zM)m1jwYMnG1>?q!i<3lVJ+5zvDQpnGgPUFl$QZAk6`x-3k6V+i*G|8Oa?VI^sm##= zT$v;>kj>vOg}K@V>9Qkff|GX&btSmU=B*0R8Rw}K=fBNv$dRg!alRHp zO)Ki_}tbZG%(`@X+H_w)$DHnd`nWmS!-4v=KO9rD;lv$s;McgGIW4)gOYmFy`xiyi; zA#>Lhv)-~iBGYtwC_VNO=HjC2DQS9z5c8@7|4GPXbcIuj5#esl{*GY`hLg4`yZinImTF@w^M5O@=Ip}W6QdMP)J z`FZ-qm7Ox`QGH4I>wY;z{*7q*#YG5jaOt{+lrhG}f|XApRc8Y462k3}(N5h~yb zU4$d^coy61IJ}IYUd^3ZJS@Z&>gK$Jv?(6^3y?Vjr%V6{}JMBWD>#Sn}|$)_Ap?!KVg>P zzBXBJfBO+&M#I?Mc8*OIb{$K}Y4Pp?UZ7kb9QC5yyXJLH>5N}xv;=CJ zCeIf=+TBr!=J@-%Aa_8$0g$PuulBf;l~kuW))(bMG@y`!?xz6HY2VP8;1Wl;+21JZ z+}=2$jG!PWySj6PxED(Bnt1x0F!Nuh0A$-#{j9$MQkitK4*h^=7++00zpSwNj3cc0 zXZBNFMMF${!k^IuC7A-Zh^Du&kS&Z;o`e`J;1cb1Z+|LBD&&d15MKz%=6`1EG)>Nb z8q6F(4}`0UvCrf{Ki6z{B@w{VKD{DNRnWCrgt-idDJI5?1Ugljjdtjb&JToXvXJ;C z7Cj5)vQ-Z7$Rr*ng}{MyWk9XZD90$YB1?+uG zK=FpV=92}$nP%?A#1f&i;G;YZBAP z`2wi2oYwO7XCi4g<_;$L6<^6?7ug&Y;=2OqsN=@$9M>qE&F{QOj`DLe3*+5cjA_{C zZ1@jixbF4R0$wQEt>-sW>{slqax|pj#kl2FqEXza=A;ROjNhX}k6BoY@WVXX| zw6i1t8xiPg&qm3Ymo@}Y6WRfZ^u7OtGC!?FL^@9k%Q{n50V!AQ_6D$92n$l_wmc`q zX=~$Fik1Ea0Mn*%6yiKGNhDuT772Xle-{g(=4_|Z^F{v??OIf2N4>f?fn1AZi=K`u?57!vCWpfvs1CbMf9D-!nc)aJN&MT9F;%Q286 z<|pOP+mk!Vv2Qy*Mht^-nFyCk(?0GS`q0=C=yv2zsyo*EH?&KDy&xJk+X|or@Om7N z?R{AOy`B={J4FC9$du~%uY&X-Tq?wQ?+|vnGxd7s??gLYtjHE&sQfN~TMggvuolt$ zHt&JCM)DlMa$F^Z+{ceimMkxnBaAb0?SJx5l3FGn@rAhWqT9*tE*)N97V64ZNGi=q z8jo@B=O5Q$qkw3nAjGA@H6o7U88_>EkSB&SC2id=t;woXz z9k~OS>Du03EzpJL&WTR*djYN$BGJBhL$t@pU{YD9{XYhB5oSqOh0gs6h#M|?y?Ds~%=|KS%VaELacfXv9bq`tvNJuJfG&HNIOVT|r+JC~lfWn{{SOP;Ut{MC86-euNQ@7?8VesFWZa5n5R zGGKAsTev&x6z;|mYH~bYu41%O<0fOmHCeP7QO$$Q9Fb069>l#AZ7GU+SsCc$abv8z zS0IM@iG${(OEOoGdBr|{Fgm^k?E!_bxwCI^Cq7Sj4^=QzUk9D&@hn_ZUlFk)c$O!J zkJi}m|50_`@p+cj|7UMx9t1>LWmKSau*x(^)23~bhNOdnj3;@XjAoK2X_~^9Rb+@D zL-r6EG6mVl5=oIE3dj)IviFv$@_T>o&z<|`mskF{&*!=3x%RovIhO#Z$A)Ym>O{NK zbE&}1zuqv5HCF<4LD;}3qo@+(dAMfeve;K-+J?H_*iSSW@wlUh$o^^g1<6~<@kAcI zfoXrYm9ywW64E#2G*>m0_H;b7Oxmn1f|nux2|~LYR{@*?f}yz zmFFlp6w9s#NO5^-Q!jl*zi9IXF(G@YqXjr6{7p#hHqv$eC&ZO#gqvC`0QN_ci8in0AneJy z;uaB36gw%MjqVcSDki2hvE$kRX)CaA&;qqkkc(WU2f)u1o%T^!*_McJ%ki*aCsoz2 zGsd)aGN5v>Lu&6iP+C+#hFuciBBv)zNnD48rbEkSC`SvSp19CMu~~#m$QpKz`p8HLDD3$A4FAK>|uS0M9cCMd8$b)svJz5au){R@@9d+|Cz?Ot<5tHJq9I&n6 z?E+kcX(f4H?uz~mfO6Jy^6&dTh%3jwoQp2zkzzb5)E$v&Mb-g$EZPu^u}68jur=9g zBS`bRwRfNwYZL!0xfT;{zeuN`Jpdw9yrkw)$f_^a-k2~hO-*tZjh_i|Y4k{&?sajY z@>Cj*4zJ5(y7WaqAe>uqHihnt<$nmk)PrxOXwWxZxle+fxHqNX)82bW=a2Z!S+dON zZl`WjkhC7G@4OTM?UCESx<{Vm*O>=GI@KSOHp>Dr9$HGt7O7SsDIt?h44dYNb|D*k zo0)r;+Z@0Rf@=^i#(QFCLG&5?)0X2z5fr?m=l)9zvDFs-N$Q$$fCx&(v-RZ?!m4DI zsoxUNvmt`#I98;SuaUTwz3|vEKyC$lqi6Y@Bh1|pb1kjnOzFc>5KQLny^OzrPp@<&wa5{8W|W0zu@7Td`tn_G1XQ9UUjx zqLyQfKsN*XTb#aC=b>b2$}5AM@h=m|s$NkOH|f*4EMP{^%O|&O_Z_IeL+*J0B*?5q z3)MHePo@}q%)~i`&TStEbN1{IaFNrsJ%Cxkt6;q7v4a!JWr&6NfiSnx^n7Cx=L&XO zHCzC)IOG5d)ST2B#}UGO?W5=@7u+BO4t6Gs#{>EA&6q^cl#ivq>*BE|R@jlyE*#^W zICdgrdSviHD0%jt?)u0ielaUpDq2c%^-i(h5lq2mPLKEW>Gr};BUW7<@z0%r zT-GVRd6>R4h)XexzLV@|QHKzkhLfLxo){1^+N=d{KI&%538N+Z9KN zCY@Q$ye~RfW>&u^4^heP%(RuXmfViv-Vfj)l7<0)G1+~P0!ocTo$zOaX(oJT4;uiAx{$xM~J6$NYOTT*x z3*%vZlYcv-pcEg8G6&flkDlK@70US2d)l$w{o3xMdonLB6Y^1e}Alta^dY><1KAn&Qa^L`oD z=EBXCX)qso;d??{=c-{{G}O%mfaTca@;ZeGhnIdB7g;?*sDNMFV3Sb`P3aI+u;Y7N zlv86f$$|Dqby)(4O!Z9lzCs*?XH>ap{ha_Z?ABr6!IVIJ(-$k(^Nag-UoX}X+le;s z`x4~@t>YR5cjyPR8pg`0HT$T43#3<7SIv%9X7N2bo0Zv`b12_Z7@WkJ8#8Y*#Al}v z;}E}uTyYGAb7-#SFV@(lf;=E9Dy!n(BF!WnxajVE@ue`=2i-Cp9W+6^_1IQZ|NR7{X>j^G zW;Uc-Vqdv=N;X=@=OEJ`<4BU?*9B6yLBzcAwg{)vG-q!7a1Nw%oh?(xokY8a@)L#v zar6uFY&5?k9ueiD*J6rRDo*c-C-O+h54ktvwcEK_Focg5>Fz1a2@SQ!6jAQ*^X$QG z&i{uVgIXz9qbd4LqLS_fEmeJs=Uf}9pJf*i>da|G{+(qTGBh`WF`Fl-%~ zkssYH)Hx(4eD$sPvp@=(*yTjyeo$@*^fB3;&7BWmo*`=K_%4n8D%WKC-8s?r?YuKo zD0j!y{j+4yh(bqemH@L_2V`EgjQcZ9m{7)ELejK}r3Gz2L2fw7>kef%fCUVgQ%|jl2>7)?YTwtfh%Ntu1EE~|X^A(n zUkGs0>5DRPtsr7_4)$<$bcZOHt`5`Wcv|%6jDVa-tNsVV=!I=K#6m{2>_J%&Bs$5V zA?_5WAhI89i)9XmG;7!q$;f1p5clMsQd=A$(%IG&So+@*tx1Wo)OiTsu0%5rZ z;Q82FjBVjZb3n?~$tkNtnB&i#-Vi5>bO|ep5?B0OfY+5W#}Mn3aa%m02<|4+v^1F9 z$267z$sOYX&VWlrIf#5F;jWC8zt{>0JId`i$=0{aqn(>%6Y;h%SD7h&AjY?4VX*ID zj-n0D6Xrs4!Ng>8nE(=f2)_)h~oXUuS`r`=!P7CoZhdInY7i3TiZ$(O)vr%UTB~NlPHB<@mV3bCT?B!Uo z_7&>d>RH$*bY*l=y811mso1s?#>Qj%X8OdjOjoSdodv8d(b?yVcDb13Sh;x&BZnk2 zlF{M=c`U?L#rROP8<(!jmMA^QtlZmp)a-)=Cqp8p5dD5Xdy6W z0Ul^K>vIaM;lmmw&xlSNtzDW^%O46rA4g+RvZhBxItO$)+Hm87Yk>YNjYfnLv0fRP z^sCqaF4VVkr#w>c{EZL~nu$AYA9ss%D|TbP6_2JrC!((YSaBeWFU1N@`(uQpB^*?@ znkT?TlJPQ$bi|f}U~bYWY-Z)_wL*|ND2)joh{Y};P7q4=v~u!%#ap6B%d2fpY_%{$ z@S7!bj^7D!k!5)iO+(OhE}j%F#Vw1n;K?P`K-@1REijMJ^~BeC5T3M=mx!gqS&+8g zkvn-MMgS~i7Z_VD<~yB5(q)*o=i$D?90_evh9LoG9uD9d7UV}E>5$QS1SscQ*ia&)-}Kll7+c8RVsg1;)X6R2k#>5} z5enyCCGTrE2`j`)!d#?UjDKS7pPBB&%bkYfQqhjnL}tk<`Kd7X#Kbco{pGCVvSiRB z}Zbs&vT#||=neTXQiOI)ERIi- z&qTS?{xRZmV(@1vviM#ObM5~0$q+` zshM)j72v)~=dm%6PNox20#7;zyQqE?RNeIFg)oJM1*9KZY}zmvs; z@e|`ReN!rC=ziRsQtF*g1tC+ta)|6Cad*`rjA|jnxIK;#;T)y5C1Z!HPXjW45)=J+ zRe-~ACL7ji>~le`Of#0IvNnxP&H&?3v(MOApiloy%OpxWap|wXC|sgzuMKtFC?p+D z2p`$p(6FBgq;&0iGwDkaFg-bW#oRNq@UVEGmsh7zR2S&dGA7ycTqeL%b$zKfuFc7b zledr0M8d9)#l3OgSy|KyJ?IfH3D5+FZ-~dw=9{L`rK}d$RnLJ=nIJVX(R;OCrpH)QdlB-^Bu{0QQDDz27C0f-v<#7US=t zeMUrTr@NJ8!s8jv^Y|g{lhR~CQ%8uq7fI28D1Mx4xPMW8o&|vxT z1zF67!Z2H+*jUxdYqySB20&^wQXxzjk`WM{AV#B1Aj> z>_P{VRy5`aGbX$LI7A5XnGWMTebdT?ESKRws=yZjrt2Xmu6RW{W zSAe>MSWUnS|6zh*(V>IAah1MZKsG+QuXtF9lkO?to8~3aZv27`tz)MvoxrT7*|C?t z{c~lhV}W|=euC)AtvZ}Wt0=R!D>;oN0DTQ{QF~l-RTiB)g~YILi>ram$e!K~xv)dy z;e3H`M1u|nU7nIKe>z5JcZ)Prd!&4!&F2rV0V7{b7_9R2kpR-wc9AD&%!_@mbtn>Q z4VPr`T#DnahoZCub-^wa;D$k@ML&s)g*bJ@=Il088ncd0WGpf1_fCSUv@KnN z9q6PR0BGll<0iyK`gSu-C3iK`or0VL3xoR0(>H-oB$Q}4Bz`T*op2gE!f`ie!TXZ8 zR$?b1E|7EpqF%I_g7^%96v9VtpoDrZm1u^YdQw4d3iX3~Cp=8ds8y^p1iM)BB+k}! z(k)rQLJ{{T>AN{Q33m34ywMu>h#rl|wih*wTS46QlB@i9OaN6vMN;$C8zP+oK2s*f zo1&dUbN`@(@o%LO;OVr%;h1zA0el69(P~ngvP^RZVupS(vm{pgwMchJ+~T%#A{vUZ zw>xz^gi+TzN{A!XS60N+B1eZ(51aRaSnUsvL;i_xc|7$9q0BTQb_vB;;f^fCLLNco zV0O9?*9z}(U2$U`VnHvB&w<6t`SY@!6Oq^JZy$h5c z%~aMMi&J9cYW7TLi*Sv38bEsOoaltPB-4ADt*h>ab_M0#h`WrqUXX*Zu=K?TqA7)* z?xc1@_hd2HTw!=uilu_wNR8~=5p;c;BU7bO$O~D2%%b6#j2U$}#t3p>kROjiR10uA z?8jv$|GW^!%K!_)JNl-g4YL|&$IAC+aS_Gh`CvN%t_XJ?iS&4ZAR-{X=#QiG2$DwN(frXcsM#rq%_)s~{1BZ{AR;tMA@=}+2T7vpYGE&!q?W;6=5(FmCH;&Ec+qqhlw zNj#kwt1so7#?wgGjI@sE(OD5GEL%95_z*QY21lpS=+sl>Ipp*_8hg&>F8S*! z$5!_{aYA9rb5$H7I&DC)BCZv{UU*7>ysl3Q)zrtcvw!B>{kVr{Cy1n(^}+|&UtDzl zDG$(uylnFa9DscK_?ZIF5CV&EZI110{xi0m5)NJ_#BEYed|qqT`XC1vb|6zr&d2d! zmh^0Ol7ZMph`W2Oyi8sun!;ix*w3!;z=t5I$ApQKqF>+C7j67PpESI$5a8y)A_{2+ zm)Z{tqiy9#t}{LnMZsH!QTB<4Aw0d-Nl}e|G$7)1!S42noQBr}{{lj$yr&(D^=&4% zj$BPoIzoU8JWtPYjeo>}o0$zK>zl$C7IL9qi~~eEDBc$PuffcS7K5jOIxYn1Y&6vfQY{J+Dx7jLIqTjy`?t ztU~AA3VTF*U$_Ub=C&U088>Vi%!j> zN{V6MSMh2d6-OPME#f~yC_*CKTKx$K*DCRiB~PPO!p!{S9x1tHxm6%ctEi5Zp5&8G z)zhI?xmc7_XvKGMqQiQFFqgLv>tkfP4?hJ$C*s}hcwXP|2nPg-SCy|sJM=8X)l7#Q zJPkq)Ec$)zF(k@46p<|@)=9q*X4XmIr_S)xe*n2};#m^u@6QF0ZgS(3c&)!u2&F*z zC02NbZ`V@FZ%A2jmHeDQC(B&arQTBcPp888pT;f$q$niq$lVA^hD1B#>T;O_-7Nwr zzPwhn#8Oe-6wGFl!3u>7#bo@i^sK|}h#AD>*in=V#7bsm$Jj%l^Wyw65I;$2d~fSD zk*E=E-mpoMDj3qR9ODTd6dyQf+$hY6b5ou8F5_-cAU$DdnX%}YyhybveD9W9=! zcm-OJXXe_$Me$}Hr?S*P*vb%mF-x{7x#piEz$L8bF`>J72$%KZ2%+YQB#((mV`&=I z=KM^^Z{=y}v50cKo5z}s7q&jl^c62BD zkd|fv{i_fc1)m;8sUXKHFN4ALLCsG`i!ftse;X&KKlf?IzDfivK_I65?#@HwA4tk# ze-z?&Wn(jPD{6SS!4>FPjrFqw3wG^6%Q5y};PgwL>EhaBf+(|_xiPMP#iYFDkj9g* zLZ(dQA|ZJ)<}G1vvu-qlRic$%19Ho4Rn5W+Cv*LlxLw2ZoEtxf@EL@tIX!7T#3m8inu2)EaLnm@LB-&tXQh3R7aSY+B& zxzN$8b2vs5Xoj}1Wa0m;C$13Y?prbXT8w0sZ>H(-z>XApxBh@&)Emh)$DWkH@9^z$ zS4s>OP(fQN!mS}!4vE+veUc9ynJar7yL|-V@*`s!xi#@6Fc1?2lR<;7M0Xcq#^P$P zC)z~2j0d^g~RWzq0@_eOngvhk-q|QXp z_qkZ8S=S&PN2cdu+#`_Uq0HPDuZnO^tmBvsFd8=f1k6P)>m2m9XpJu&-%I7;nD;3F z!(*VZD2~#%>%;J9Ep>92B*^g_rL(Z!XIT)oKZ(Nn-hxPBn4VRkZ+A5mEV=j`%)?vb zvmB2Iaxw9hQo!K9EuP4OG0$VpSB(AtO)z+b0*w?PUl&P#_hXoIK6gG1y&NNDdh$~N zloN4o(heoju8J~{?pB{5Hx2r`+yck5LR=EIbMin`j5WTPMsG2@ZD&6R`Y+$+l^Rju9yN#qU_LtQN@-5Pl;UR z%GU5U{p4gDG`qbb+QrPCUPr}vB~W#o9#*S8R))y>thQ18alSC~2m=KA>_EIGh~k$L zZI|O#fnwvnZ%M&auPCzA$VOuOT#wIJaFLy{BJMWWKlsYg4jdjyaJyXxln zK!k@mZ?v{_CUWNI*9LT{I(6xS$EkHdJjk@4;&PV-SiA5?^g}v++NBw?+`3tUGuXSz z1pzOA3Zz68$y)^rM45AgeJE9HpmYfnmpJCc1tQ$C5~u0+%JqP#VxdRR!^8rSY1^wD zTngTvhUmsxg%&(^{VYTS_k)V`s32EsZsLbEeh?t)`aA3pKi0Q@O>S%1UI?MFB!kt7 zwn8*z&`l`h&r*`?IGn`=d3>$DFzt#B2yfmdreMjwc-8MaQRYYVZF}N0(PjqXU$$pd z_Df+dit3Ns#a%Z9N#_wt)6U~L03I__CG&Bg+)Su*iUrxT%02^-(tF2 zvMCWKBmkW+19o5~L4pF-$a=+VXdc?7P|WY-wiZ{|#;`yZc*aO|0S3!)Tj zZ|>MTu<^cT3%=7{Bz;%z6w5JrOF)-F29WCiHw$968@V7Z7?VZmOE*otsxHt`e4AE| zdjz=`^bN|X%bPhMbHaf*V{1ZqL&}iXEk4^T`qbmX-4>|GpwYwT^($enMl*Lc9EQu= zWJo=yMC$#m5Dg)3fcDxJ(!H*>mt)$!qTQ>>uD1(^N@i!a#b6%0PjT# zbxE3-x{G>!;ay>_2t7znbCwwo;taXsOT>CB3vxtMQcFCXG923oGjFH!R$x!up93u& z{n#PCHh~asSOgO&WKGNxu4C5IE%Ba$((aNxO&!CNvSb$)bYJtf05v?)ulaD9^@DHd;Q;dr|zUKH&PR7p|R+8vUj=*hA8fe3Q0 z#=pxZ={H)4F+txhVt;AmiotkY0EtKsf^m7s9@+26zR`>r769wfX@wI{h$0W9b(rPv z31QYV@cUztX!j7^_(~@vj?b|~fP?8}{6!dvVAWEJ&G&+EPwVPvO|}4&1bIA8&;7&4 z-hwGfRU%4XB8m!Du;aZ{->y_^;??SE0jcx{Cw|=MS^Be7IF*%>GX?p<-jJ>UH-(+C zr)Zaj{avwR5a(WbkcFkicpMXLo_BE!8m|6HWYRAuRA>|FtrW~x@vPT_Q6?$&zVk}KiA z3vrfg56khSXc&f%fOseWt)|2vx}lQa(q$Ge&{pF9M4;!7k-HGoVIs&~bxjp$I$F^* zU`Q_!Lc6ez!_&(|xhWuFbpAaz&j;N5m`G2`?y#LMcrZIPVu|^ z*N)lf5|`sg)%@z}<3pc&b|&5~VU&TbD@U`t@(@LwVjxLF`YsH%&gn^9NYze-G+R6L z1SETnxzikiq1}X|z^7^;>7^(_lw;|12+GVG61|f6|5G&WS6>-*GyD^Ge{3Uq;%w1r z@i^rqJH#u6ksEf;m>ebz_K_g-XSG~CCH^lh|hHUqyR^m(aSUABHcUKMWGoLf1L$FTaKGNIiAort(3UziIxT^ zr`6ocBY?3`fFt+dKZmBCzAuQ&h>l7N%UrD12;^SLh(qNGxom%|pQ9C{UdS-!5lY_G z?97V#cH-KkFtUCx#2l7;D%o$M(UC?OzuUODs)+zDV1-tmxLtrJCLd*c5@ATp1}2+H zxGK3y-{$``oVmq+MAKq&yMl2Mgv+2SJviPgL}Q1`%~G5s%8_9m4=;*#0a&Uj-TL#g z08_g#Mx2n-6L#i}j$1`LLgyei;qey%WIqoLi}=xgS?rnuuPPiOIz2=pZ|FerMC0qX zf=Ps%4mP#g!U}MYX6ZrcgZ1U{`C!x;7a;MXzRk$mmwP0mlt{}j;7tQ}0e<4hFd z`p~|55INbRb_#Y5GdP^b2cjui#r(MJKt4wqOb3q_1-LuUq`~(*D2vK{6#j`*x+`_@ zJV$iOnG&X#GG5*BU@-UDYLBn==>l?OYw7e|@2W*m7pf9j%mR@v0Z+%3V~J==fP}deo0cG4=l+qa z*$G@IV6;Ykv0p2sb4ZPJGy|O})Um4A#G>oomc^ROthj?nvP>3RvOG+5I;uI8Aj0`p zh*p9`uvpy=<@9DJcD9WVQx<3#W?HliT4FL%f3P0IP8L?I0!BJ z;&c&CqCv{`4F0%Zm@jFIgWX7o7RuUBy3C8Zvy`SW3SLf0Ia*@He_bSOu<7&HqFfO6 z_lr9?Uv1L^M9vlURn<{1!fe%xZp1GMA%1eD7+;H`6V}d+alL$|OCV1Pa4*7c<0gSl zpB7ED^d9O1a*|UMS5NWBLqXh9QwA{*T)*EmLV38=&r3O3jQ)}ywu?)JnC&P@up4M8 z12`Yf8Hr%&h#Z<*LPkc}pArO*rF6vDziPm#lzQ~y3G3w}0;n6eE-c?)h;o|9S~$bw zEMd?w78YOccJ^&d$gP6hSR97APGiCQKVf9d{HSZ!qw`pZ)LG8Z(9dH@ zg+aY1?pjDJ^JNBi;PHP$S<2;7oA=1q3UdMQ0+Hx=u)pS-dXC8pD9jbmThv-f5y;9gYbLWg#3F3v;IU_hPDTGvsLd7kDh;as{Er zdP0Nk(!cXmOO+f&cUjEO&bm5zmqzac?JbaGhj8@`r;zzZt03nNLj5S=)xvyS(7}bZ zSFd3Y$3cgIr=0?=*4{Wu0L|IX0k}VI5alVXTP8u=sB#$jn@}@rN{RKd6#o$7nzU-| zd^wK?m(Yetj#jH2p2g#>7NmCZGa-x{h8g12|AR<5)H^JVy=W2SD%Yw&y2Ovx9ApOV z?qdz)#btq{w0JRXS9t`4YbEP|LcDz>fXml_ge2x41?_qz-ieuPdt0MVs0$zm5fc8q zEQplmv4(N6*dMDM4d5xK1`iS2iFA}y;V~MI0b;5{hdb`ow?iY^i?2kQaaGB?XHi;` z#pe-pmX-ear697zLeTTU)dC!g4VR?euYWgI89^OaL&p**y-w}>j(+VF;VvNe5rgs( zu=3AVS!)I1B$ZV*CKVummrE1i)2j0;^9E@CqwX>olbO9rzyHGPe@MP3H1jdnqb)Ou`n-xqSH-;K@@Bg(P_Ha4(O=dDad6l zk|vq2^m7n~OLg+#!fm45U0GHT+e)o#os+V37>=Bl$}wJ$!)c4?i`-pu5M3r*qDY{9 zia^ruNM2vKMwD9uMK;QOivagyc8N37~D}}H(C(k zI!*89>HwXolQX1_B-wE+dkP3WrhzR%ufAOdJRI||?KZ#60ImXffObCtW}LJH=pb>B zAeWQF3yew5j7JM~Gj(f%Rd@K?sbDk;^O>Bv)SQ;3g)U4ID}fvf%Rf{+*wmh>U{u1! zucRIivr6z5 zrD#~A=b`?d|HeRF81v8IH}x zt!Ls+ZI538xinL#LX3#^sMm#^UxXy;mF%{p6agAs?Su^i>#@!;^X>f9j zrrM&j9hoD&mbz0#sz;)|9=D5d>FhRwOM-KN%t>iHa!rOaBw?cqv@W47i1HfbQGS+F?kQ^mj24S z!pvYj>=)}?01eM%Js*=qz_aAgUf|GPip7GQF88$zE_T}23v(KEJQW@||tSP3t^&F^G#qyO!fy(Po-pUHsgme~H5Q4wpI%BAJf3P=pgh z-HK)X@)RIOb0Sc9O@Mig0fS#OUF9+`IN3D2I@ZxQW#j2cEM!Db<0iz%$rBBttv!G1r~aJV3xB%*B4lPpg+h5JkWY0LTXq{O4@^XyqhP&{&*MxSuAsH zmPofmCIhryyD%6#Tc_F!M7WR(>3O)@jb97$c%Xix`tRjfa2+_AwrP&xUwk9T=wyqy z9q}^SzXH>Vt0d=t@f#uLziwTU1J+H#C^g=vkUrmJg1NLSYDKt1PUG?fb43Bn5A$lG zRi91?hU$(7CF&e2@LCSMZ;Q2WCLBzhHFavV=$mS>&lv1N6mhL+D$Sh>S#NR+lzECq zEiQ)W@x{1Ws9A@Lx6(qUo!<#^U9rm?z<(tz@0sX!uBY&hN!NLS(F zh7((C`3Fcdu?p#GZ_F3q8cEw+vcdxec?N01(mW0p?IBUk2Bsq=(b&=4394*p|O~FEj<=OcZPU$=}?X#Rm8B?Q$fiaysJ9f?N+K0qf=LBg}0(PiN>@ zD8OBl)05nMOQ<2xHK^;Q2Mon;@_1AA1WVkU0}UOE@lyMofV7~vEsT>!peg9SbE@MH z`gVG}XjjC#>`@`qcjPMBalM`Y&Seo-*RjmfEQZ8{&HeGI5La+2djs{`b?(msRnpSz zI=X~7P-BVfE{<}i2r?H?mF<)Jg?J^8sNEyd+PDAgP>DRqPqM{|4`eZ`c`mqFjx@W4 zIfIIx0VZAU+y!~7kuP1t@vdO=qpypFblV4^;m7p4>bOMTX1e6TGDj-Kat}E|0V6`h z6tTHzGp%`$%>$yo97$Y!q3Q7*1gRb8;X8B0jt*m+he^jxPEIgsngIImRJo_ULxgkT z4P88=#I#31Jd`V%2V<#7*8`QQ$?=tFM@Xt$He&HBZ~kx>j{d(^mvVIcHA`wS%=?{4 zv7n+hD*l!wgSRv$z-SdRx>)SQMQ!p!8?D%&OvyLU1&H70K zobdn;OKJ&yOAzczTqW^@g-DlI_D;#WTk*XovUCvSGQfreI2~ShqqOCSCxIw#Cx@(8 z^gVk1Bau;0Pycz!VcOAwT<&SUU0U|Gh`nQ5A>hA-wEg{z)?=n+bI-tOGACk z*QZONJ%M)HXA5FnCUV!fG!HPFRa-aQ@BJgoq>QU5`2yy^Es!+#+B;TxhEI2wdL0pe zAR5jOO7Cn_5iSfnZYJM-1yJb)F!E4+yZCy<(lgP80-ap5G_-C@BWNl-;h#>8AovS6y=zLNOELN+5C#aQwk z2)%T@E4kW>&OoyCxK;$^6<><+gO{LOMl6gu%<%S7j1lOGC|$Q$&^zKX!JYt7OJBN# z!pD8W%vM=d$BHj!^pp<79~+8x95%+5S3dO$7>%bz8z~viSp;~ZK(i%&@+zdqujJ}n zZ>vof2u-J73)V6G&O;_*WQppFahgzf4laV25Z8VU00t+|SC13n%xbHq#M2@jc&c7^ zjAdU3aOJD}TX|v@1ue?Bk>JtUYqtq;nP?YBW=r^ZV-gjnuE3(w5gWY$@tTq|=dT1fd^g9B zb>4yW8K_Q*wye6y7Z_pZ=?8`aa`Ct@{caw)D}JYMCx~SOoj3u=5lJF~xPf>+4}kp` zGtHjw<}v!@MP*J7ActVidqaS^ikCGlg8viZ_8IJ>uWJCU{azLd*K;@zh&e(WiWP?= z2Q$}k!e~Xt_m&gmQc)ftP01rpV%d`S0nNxV&qF2BleYMmPvo$-!52XX{f}@xNUK;ur&SV#e?y;l$Ob^6k(qf ztqy>aBSZWCk0ISns_dt?LoONrE7%p9O~L(q`}9v7l{%xp5Qm9$PI}}#juY*^B=ZJI ziq`uSD6L%YLJiQ!ktrA-u{d2R#K~0ZZ2r7xca{HdeUHk|_#qwf*c7pj>xH)&LY-?( zM-Q__IeslD?K`O5a{v&)&_=oE_i^CAAzjl@YXV7*6rA5UWI|!DXXTQm?%E2zSBy_pwgu%TUWCk7o;hWiJN=@9VPiMYh zkjpe$`p<$WDpowqjL+o(uqaMMooD}-0PY;bs_qj7zz+@-$r@0X z2A~F&^xxL#72shwYjNBniuxiQVvdOq(;wwPy&NBlax+#UM(w~6PARtj)-mwx-N6j; zfJo(sVxT;t%vz>uw96H7*{A6lv`455&UKsSk>3fS`lD(j90Wc~A&T1C&uc8q8EY;} zD5|FoH>zvpxJ-b1bahV~b5wFQ@t{yAk7N=-Mbs|`;vq6&@}%+cm}oav#b9fj9J9y! z!W@g0u*7>S-w5$MQiWCQVar2OMmYJ+epp%RYSVRFNsxmzB(FEK zcWD>qVzMeSyXge ze{eb@DrF#>J$^p3!~_D zy~Q|E-%dbp#iRT2Ga*bE9lG;>U4*lrfnH{OA=)`Fz0+yb#(I!BZ{I=f9B z*mSPieQifUc&wxIm~|k1u*03#sB0s2h28ZF#p0c?xK-b7I@Q~CiZv( zB_2`ua$T^|<<}3iP_C`k19b!PY%x_nOMn}Yu_S?bye-I8n=5<2z1N2(`-=LCxL4oi z3YXTpYWPTqE8d8G*uLS&^af5^gV8HG>mYhcD~=MB_DF4~neF1lG#IxWqt41N5|j?a z`lwcKh%9C(L7qVmA>Uc#WmR z?u7x5Plz;3vKu-i@v36rM`e7Ev;(fvtggNh-OqOl2 z(T@OJxJJDF#VMklRTKUo87OB6VN_3>8t3X$14v#9Hro`!Nws2y+a6m9aG9inkZ3vW zuo;-S)PoTQgMlWUD$sc#D%a3HUWhwcTGTjCkQ;Xf=Mx?lXT}+bO*hX%Hlwwq*G4WE zq8og9oFNdwTfbx1Qd-kJdBJ(pXoN&gT459Hx5ZrcXJ4S~wyU~;Y0w`~?< zexV)HfU^X6w%Q+~P3gwVVHh*Ym1DD@x}4*%_+tRKP8mPHQmlJd5L{qShrv;Qd?}g= zFrUG}*nhh$2Ad^aaYp0zKrS-6#T?NO7T~TSGk)J+Tq6*+OB)5jjwtg%F5hUD5AsO$ zS$}$P2ZGTA3sD?hejML00*}@$F;|2OUdNsSt;#q_ked(B8w0V$jt-Cas>RVQB5f}^ z1}o?iA)XvDdX8U-qW96TAONxjDiG3QyA<$RzV&kCbNyX}tR_U9s8B5rl0 zNgQA2andtfIrf-HFtX*L0ZFCapJbBOZ(__=>;&mzRwcSV(UK#&ccyH=5<*GUt^={} z&QR`l_@v?7#LUj_@m0ZY6N;;Oa2Eg`*-DcHzV7) zmUi6)#-Ko_Q{BZufGg%-=HdF2_d|5SoPeHplV{2XyQEonhJu=la`d5RwBib3aIr@l zsRu;35$oh~=bw257LkRCd}NE=2*Z$_R~==2I`|ZnL*fF_P9OadY19qJox&(_y1c8e zZ9O>)#@)pN>087e!p!=nLN`Mz_7XzDs64ML><;1jh|kQh$U|Zsp^n7SL|e$aa->xT zqFwQh5Oa(dh!al;o9vM#rmJWjiD1M&LfvM#LS}REnE=dEZ7XU+P*Ks??R(td9mL1j5=c-*G<&bDg>O8AiVGz7W^2L$7kc^e=NrdJr*_&AcGLbouNvIzqj zZeawtWU}k#P(zQuPoUFhJD;pR?+T(eBRBQLH~OYO)Yru93O=0&2PGzWj-uxagKLQ@ ze^-pJgmhxlZR;P41*N=VZSRi}0knKkQkO%kAY9)H%o`ZA3x&XGPLIXG*k}rrhfy8= zEhmX|`YqBgoJ0Vng z+nvY8!TNRz3^_bwW1`S4jB>DT<7z#Q6zOs#zEre}7X+hf=EtVfvINp=?fw`m%oV9c z33HrCmmpD{DMo`3*P~TACNw4DxvMxxzwtc_5lwF%b0KQGwEaUu+^kjYz3@d!v~@Lr zX=C-F3@M?{2g00bazVfg@Y6wD4_=PcJBjhNAkTS?>0~7puLP#m5@FNHGYI20S%yT31<#9$1&r+;M>~iFjuHU#s|MpjeNq(Z;=~6cJh3q5@g~gljT&|udd|#Pww?fH6B1SK1JFm_ zL>N6#63do&SCp#Oi=*}(8vGM(mta++sB8{Lw20PaoGy^s%gHm{dDnd*UDalJ*r^ar zaufw|y9jrRIz+Iyh)mmuQwEn0&*#XL#C|T;Ys|u6iq7p-GzuZb2KoCR6k%pF(Xt(A zi-m&B1Flla$fia`#09836pmkA4ePjZH};Y z7?e)P?Sd#L@3Z#Aqas}SN;E8b<68l)Dx$|`pS@8~`l%x!5iUi)h|x|t*pDCF!-PGqb5NF!551B4#X1C z=^#%`6Q0lGP+GhYbi}3y6Nc)IT-DtXJBwg6_4a7}i(^E(;OK|w;iEf*IK^%hK)T{S z0j@uzUn;~Lb=%{`JSM6fXsg6(g)Cs=#sLEoSs2U^?Ec2(sa)W(@Au6rqU9?+DBsbD)^jOIK{cVRFwd|tOmG#8e)r?GJL?f1HEz-jvovK{D- z6NET@UUX)PD8x;IU`_gfUZnocOM!_0`u%PzBoIgeS% z#xI(Mkivus6XGO&TXN6Iohx;nLL|RgVL=wHQCds9y)K0I;&q?4=ogVLQS<;wf8){| ztLDgrB?s2Ne5Ws@u(QJ-9Rv+j>&gFeLK&}!4}NIAe|?> zT4}f8!L}z$LGEIte%_ly$l8;0$09+lO!MI4#c`Zymjnk#x|^!(1#$iHlFpL4PhSSq z@Zd&&%n$&pt7(k5Hve@t+qSNFNI=r+?D>#Qbsq}N>^3CfOFS&f6`6*S?&1BAu1M-V z9Y>L6Fjm}&JH{RQBrb!JG4Y5f7}7a%Yg{*wMNBRuHLPzDM%qotT9{tm5owmr?B(G~ zc01n+a!1#zC2h&;fXJr?fvNI>k;~Y{A^kf@fVomZS}b)(3vps{f-A-9lLV!sX4LB$ z?eNRIw0dy)MW#t>g-oMv+<2heFNG>6{D@ zPV9Xch>OA7#f;}h0W>sTtx+#t<5{u;_`Qtq1G%3tT4MUJ;Vz%hX5t}#i{twU< z<($st6ABCi@w!kK1uu>iycBbe0HV+C%<=7Zefwt#Ql)tEtRS;yniL~AO&$s27UTSf z9ph2ZaK>6f+Z+v*_5!W!S`~(c5u+M`fi9BI7EM(X%ZwXSqR6rqw~KHA&}1wupvXra zlO@O?LvT!2+j0q*Tc;JH<*t}3z@?;CQY?B)h?!E+%pL}DV>vcF7LcNH*MVH3JEanB z-Kab+OF}-3x`(4i2%)*AL(b8kQe2bvb;Xf+=o+kH>8EQRPiQKIz8zPFBFv-m5Vk*C zp8)Lw;|r)T7<=bHW&beKPM-izJNsk3zfhzr#8sXS@#o|S0tg8f9u?wDhD)59PCXHt z5)A1H+X+91pjjui{J@mh7fm84S9usAkX|Qh z7T^g>Di!z@LAY3s-ZX(`jwBcB+qr4mf{pV)yeG`PMx$I;{@)0q=5%0uUKiq2QMT2j(1Ga{SlVUhJi_!mp($ zHKDY)m-!Whd4YR47VZ@VxVuiTuZjkdM8yv^rRY8r%KZf8s~%1bI>0`q>Oj;gE?0T?>jlda-l8Ic_5}j_Ob1`8PGH!yMsLhxXIA$;D()z zu^@^+t63`WagfM#B-@4XFVjSHV$eHBarAkPAz3X0>O7Hd=d$($Tb-Y!fKylXin&6_ zB2kIxkH1_1=|T)}lHqBU`2K}JZi3N=)o9_L3T2h5n-_ihbS72m$%{nO(Z=)I&2joB zdHJIu)=46%!D9TTx|^MvBKVG#brfL^xyaFXoUlt=k$yMKYKm|5?Y7*XDQov%Lps-4 zowQk{Xmg#(UB^e#IzXT~hKifqfPXB+U4UR61WRlM_PQ8&v{zEHI8uO_upjVJnOf1cGLw$lU zmxOy|ofZBo#CbI05{|2e-)2E5m&6tS5@Mc>J{ZM9KM+G#WKkPXeea4V1h@k@AOM_Of!?t8 z6+n&NcP)r}>vSym5dkG9!ovkSU{#?=@6uKL8oahvh z_Ea2k_Qyp6T#Nad1uJiWPMrbcADo`YIoe*7F49kK;#tZ{>wa7=(0v6rT^&-}r9uhn zUT%fQV!Wz=W=|!}$u_g;MiBKnrU+X6G?C^_H?{$EIR+~B@;3pydAN1!lpcI+Bh1N< zTqnU*Tl5H`mdFY zXLMrh9FG48@f9!E*xFq&0-2&(V)xtl#qEbg5<&HR0ghZ-NZww*K#1qV+3kgR@lyO*2(!%~^XDu2cKK-;e38>K|IK3{0@G_<|I3jI&gNWyJaQND+%z*0RuA0` zNtHR3562P_6rm11io^u@dI6)nEaGWPyu4MQtIQlb(2k-inya@7b(Lo$S!!igyFE>g z-OQZi&ijKrCVE&Ma^_61mF^)Xwc26g#2C=GnLs1aOiKhXY*-uVyQig;6h;T)DpBSj z;=)DowP>@1OPfTCF}@K-b@2)(d%HFN2$?n)xtCj$#{_N=>V#(YwxeTmFSM&&r_DE4 zKUh@@G~>`yilIL_4CijtGkKXsgZ>hMgu(fk+)TxNkTADThqCD+TspLFB-AU$N+V#- z7=xc(VtvuncJRS-xg-2Q6deGSMK(HO)sL3?cgaw=TM%WI*Wu(y!ih@+!OIDB;Dhex zo5HYf(xv_ak!CGs7#x-Ra)2h}ox}G8xHPg8iWUCs#O78v#sYo2^h^gl*1-nh8etC4 zeyJmF6iw-;BsU0;h;sMUB_>CORUZIxd7EUKhCBY-1rbO`Gn;xmC?qW=a%i-+9?W9Z z;qgUdX;=^$A*iNPoGi-4m?vfV9Up>r#z;jq)+-+dp&j%TP?SUvxmUi9W3|6PIlJzT zMX+g#0LMW1}dV4V#1bOy4d5_sP2OANy#Q08a>_Codi}JqDN-JTb99K!CZ+v8Ag6 zJHc~u2#cO%dwF*bHL!E+LUsMQ9AeSX;u#+bqWl$H{lvV-vz%(#HuuMsIl%HyE8Zl) zMedf{p>izycMz(_(64TZHAK0r>?Fx@eF1b$9N=m{+a%JRtDdbB-K94U6XrBA4d`nh zjbA>R%DS!?a#|xn(u$`?4Y~24z01B#8Xn*`w z6!pekR#!YN!tI9VOx8Y)@OaAM{qVv>Q6$%{`||-_)@~8NxvFMyoS;ufXJx`Y7&A%S zFEBm7U?PSY2!0BN<5|I;mKs>^%Dj`rv%Q}QO~;&8bIA+qp3bmlM2N}7{;k5yUe;eq z{)Pb8TTgQ{Mn24=amB$3%tr7(!pu*3S&kL|k1QtkxxiAW#pD2=9}fzo@i;7T=h^=+DEC8DImAHuYl($|X+Epx#OWfl5rq+Z+#|%f z%`NqJ76x16DM7BhZoK<=WRbPuW1+5m$6zx@w`*R>faEFBxIuv9RUw;`XH~}6wy%Pw zlkEVL=Ty<~Z1$}BI91RENkhi!krt79b+jO zO(7Jf$i5w)@S@0DZ=Yza*Nb*nCEJ zY`782y~($ER>uk?7kdW?a!uL_$#Y=G3UOZC>~MF>_Tj`FboR`wN_TP@T(zAk@Hp^mE?@^wRjp0^mcTb1=TrxB0*YLh?e%$?s(l3k&mc zDaO1HL?iU!6o;o_Vko6&CuTyWI9&ATw6j>fhKBeK$Pl%LOEi8!NUFoE(J(hY66NY5 z=7$Nf&PO0FPh+A(N*f9ykQ{%@RJ1J$LYy5kh15Lr;xrg3b3E03(>Wg#%-we0^xE1u zPc+>GZ+|#^%zxyma#UhJ$pW+M+c|1PQ_zZ{g!z9H<$_9d#N_?D5cdt4uOO8!Me)-N z;~-1(f!!l4?PwHVTdw{84WureC|7pHplDZzM(in5`f?mD&|@P}fx$HuGu+yrgF079 zVA{3STK9`AI+9g7g$CjJ0^L#2zM%%=^H8{IL)0cl?kdoka4X4rh}fY!_7w`BP@w9H zk3^V>m8cj-@k?m+M_Qs3SBY}%)bn+kC--Lnpz=DYOm`bv)z6@2{seFNqUSRsA=IS~5^bs{xvW)2ZKI@NKl6$kwX z!1dsG+!A++cFKCoSCebponzH)!^n16tv38G;ao%c|3~9J<_dD!Eu6Br{}tfgF%>~4 zf{0IqxJjAk;i`D>{r>?v%hVb2Ks@tp7Ij`C>5tchI24x{h&#yO3t?1FYJ*HC6PB4W z8l)SO)kLmcC5$rADY{1Pk_W0cg_ut$ICQW}*35M7vY@oz++N)deId$Kso{!zI96W{ zz}4mK&gP&P8wzrP8f*o~V-Uj3O_^`Uh-lJDhNE`(e-of30#^qw=$kBYKF0+5q9`ZF zflwZ_7!k{ZnSpi}X~dzB;>cYOFFGldE3^<#C-gQoP+K0i0dXzM%K16M&X#wj;#nb1 zjj=j%r7lgHR>&|m=2~shNfrj5S$VX%{fEf3?GY5}vTOW`4nbe7VkN8)Nxes19mIa= z@01O-inv;&E7q;SUx<}f0)a7&74@-|zR6A-Mi|{8l5y8E*j$KHQ=+$k`qgWPzGGe z^~64_Lb(17T1%L_4qMH?Yk2K9T?_@fXdH{QXx)(q>+6@xW%_8`C(vo@ zT&vmW3qfur=3EK-hGXjLV3da~Gp-xffS^*%$>hT^beaGsf<6Opu6M@o1$h%CHAlN- zEXUV*NL-~Pw}XW>3F+*pnkH**427k$9kyKYf1=&xSjQ#VRF`{GC>^@MhPn{ndK#(s|wE*Of@92ooKIYDj_Hf`+OH&`o6n_C(th{OfguZ6l)iMft$X+_nI+O?u0vlW-V^N<@N>!y8qK!(I$&-TJ#tG4#|d%GxRL^4?}Q;~9u9`e zRoe4H+{Vo#muRY?MR!>j(A6s~(jlxE4+)}rZ6%)Akp<3s0vwTv#5!T|-yCH7ZNYVH zzFwBvG&US^5I0s3qof`Cwe9sy?O}caIZ&HOm~Z>!-->cBRa}6{!6+^Z(-;^UXd+x? zeZsi%(rbYoYYHLxZ6}V6WAsfAt3^F5z7=8KaSOw=$rxTez|3F9S}|%wn{&u_`g#|$ zZCw}$DUu{7jNc_r*LTY8L5_{ham)r;8tg81jaNjw*cDxixksnmv1mg;_Yf&T56AHW z97^xC*=P1yX(+#!3!~o<;M~x?E74}lZUkakJ*)DllR~Zzu8M8-6U@ym!kyW(8lzR;aHo~qhxoe)=QLFw4tiqaO+eH$OM`kv=^K%>4f7|X^EW?) zO3&4jRnryaWbRSH?mP@JcHX@8lS4=ixDP7Fx<4Wm8FQiC7aNFh6#S+zoO)uv9IC}= zU#rN*n-VG=ypr_I7Y#pkwZ$rmY*Ij)GtxfF6St&c;0Ldy(6u+%j4-aFl+HOwZZE{K zrgrdl9pb7vg4|8~b{1QqK4Gvzs;Zo_e=d^p>jFC-5s{wkbWNr+0>{6G&4FRpxN+m~ zAK#KviBu%!Z{g%9*tQcU#sMO!OJYp7nDO?E04M1-mhBD?-fao&l5w+!s{vNE1;Wgi znR@TDOSJo0XAdiOd?~;+Y+?jSSMItoSvn(EF@<5S8qWzMUN&3QZIva&#T$#vi2~fa zDo~2KSR@(ig%g?SjHEv&{-yfkt2BDzc}|YcSKONLbiC@?>R3zP@J3F#dg50pRoUB1 zJFdG8za_k7gF0D+3$F`7u7l$=L2kntTnSRJGliH1vvp^2WgdcAg=1vABgBm3EZ*HG z3D7Frf>8!3^l)1&$|0ENt(v!&FsC~t2i&cqopXIJMyJEE{8$i=usXS4OFY#~6X>L5 zNzUxp6^9CQt8lDB6!cF4FmqO7bQGKZ7&2|7ffDwt1F@YTv%8D#bgoEJrAhhK!taf@ z19Pf&{dKtjmr(A@5oWxXB1MF+%;ePjBO$Z{cCpdDeU`vfR7==;2ylg}S+*?|dqSXl zV>Yi-r|~-gn>`aI@3LE*pVMvHCa*V+-MhFd#WnxF% zl>f}_DJo}i-H!ZBJNccXJ4B_kCLK>UR%qd+LC`&Co?3T2LClrp-IMsC0LPi0Xe&zW zQ8fY39OsD0g%SRJ1vwlWqU5}N~PQOlC8kh_MNCB}TUd%%`Y3an% zQtMVH!)pYaT16#uOEFSB~}wiAuWVX49dPIh;C8A%@BM1--vWV z*x<S~5qJ2U6v@+)ltwUy>D@#6KV~?RG=-RXWULg4}x>v76#rdN>{z=1dY( z;qG|lJBS>-mE)Y931hCXL12@5s{r@k1}@vi?UMc8in?(*<_a)l5;Ijs4_m^Eh0;5B zm^dL`(YK4Nu994YNA<25*oc%@TPvRV7D(pFT!ivu9K9QW1~;1UrTCL5mu*0gXT*a7 z+;z#dBQ~21?VR*ft={4&M~y&=!o75i+k{DK@;%Ic@>z8!fOUnEQmX& z05Oj0>*6VWx)Y~Q4kr%3pAqaxZQL~LDrk>tAeSihT~>~hr-GSZHFGPf;$qQm|8l1o z_(={l_UdKmuksj`y~Fr#o<@|{&Y=rkPORlPMOZr4 zlf9)z&o2d1kve+Ii~4p;s|V}OWZ8Xz$c23maLYG zkYgstWKphXZDELWIs3Ce2umlSjzy~E(?Z;&FoKOUjR-tx9}%yAS(FSjrhH~N`zBkW3A)&vqIcdO^vnjxky-E z!Kt=pKHt8{Y##O;oJW5q#3>_)l^YcbakW6ls%jsMEq@B>j>ogvjE^w_oHL>? zUN1&GvF?FcoTlk@@qtJR!X7ZO&|2%DEK1@YMgEkx$}x@xYz8?UjT4r3KwY9D(A-&& z3(cNPp6n0G5t>qBi6J2_VjnjXT*e-p<%f(OH(k{ep zgy{*YcP-FP8vB5rme^JR0Xq9fF4L3nHKOTTGO3IGMbI{N@RdWt^XYFW5lxP*iu^{m z>c+doW+f*=`DWKt%#Q6vd(hMm$FD_sI3%iDEWmmiAKMp?`?s1*xireKG|mYlh-Re(E)Hq9lTbm{_eAy}5Aiy5~Ia&;vNL(nx4le)pI zQ?4!!7g80c=-~(0I~9XxTInf~H0t8Tz41)?6Kt$*Ym0A1k~jKpWKFqas29Y=n+sP9 zaj^h*YCK%160Bse=Alrh#>>ljeOV}#1*rwsaSsV}N1s_hlDpQS(5@-w$7-Zqg`}fh zLN0X3elLinV7u+&W_`NSecX{I0=h?pIZZTYc|5oun!2J@C+U2rNN3SBoQO297h*2; zW$zDYkiV-RoHv_)+)I_QqjO?#XIf!#JTn047Uc4q6CCno=JMwRJC7*^ocb1`eh?(p zUQ9GUvZvD;^^3!Db;uidvC~2jSE^PsuqL6hFc$?KNu)%j__?5D%%hxmt_U~ER3;ld zNDl!pxbR(1&pt}jXn~Rh_gduGZMq=4bFm3_}bAS$R^sNJthkf4!f-1e)Yp9v$d-2TPp z$3nPz96-?RkFA8b0{D`|r_!RI0i-j1fBa398Opm#Xgw6-X+cgy^CqS*KRFJ>4c;RA z>64C!cA7I#avsL8EDKP}?RCsOAxok~qSZ35ae+A3rpo9N*XIME9IgxzR7O6lNmU zD#S4Y+-4F*p~OrnPZj7=_SM9%L{ZBB$JCj}*I86=96QM7#1Rn$*Ex7Q!gSDN7Ge@BB$5yeM%q?WVJ8k@aFSH+P;A;@NQl`qs_Q zhIEGQTr<##%c+7gvy)X9Y_)B;K{%2!#9wTD4utEEp9r>)I?OmhARVW5ZhTuoOJ-r9 zSBFx4ENlKL)RKS?9u}16`h$gY;uZz@32U*m$i7Fkxx<63c->`C{ZN=QO=f|~Ug8ty z0aD!}lGdxRKjH+vFKI9~I3LUv#?xCIC(^ZJQ>P09Nz)M^z8(yCuv4J zN(8N&o`Z__L^;`dbf?QMgrpR6>gUDH3iA`wE8}H6F48QXq4zdm@wAJ8NH-n5ohZV; z(<_lzUL+ReHb&v6W7m&=8-(;5(2X9VFzL%(GK1xJv!Ckc)WwI{Pg<8PsEb3tLrjmO ze!|T6!Ya|JADn@XASu$25I5LD-iejXF9kVAs2i+v0I!SJTntUYSNvqfEwTD{A;b{* zB#35FW?;G;pU&=&66m_kWZGuPjX^=~jEgYAM&}~Hjh2o>byDzcVdicVPNbg_>0)3{ zUWgBJxs-q3E<8J0uEC~xYIC7ZxLLao?a=?2W2|>|RDlD(%ZO)YEGUg&;Sn>0 zP|eAca8=eX!UblT#jmI4@kw9=YwdL|cl;9jjQE8J*O`u=-no8Lkdxr0M~1*ht^jb+ zxPZ@XUyiMw7v}F)osRuZkiQ$o(+j%5@A*3x4BS=qaR0b47hkWvGP2xsB?#P;FB5jO zD@D3x+SO&!B=J-hhZllmsndLwKj|xV%bxNQkzSk}#;k2`cE#A}YG9(;2lgmx<`aLubIrzeDQZ7W~1~v8r!$H7Ra?(WwopmwQU8%7umw_v>0V) z^gK?WGhs1WE$9jME@93@@9-cpW6Nr>beoI!FS>Yj3e1f~QkW9&c0GVgLUHv9QFIG3 zBQ!c_X~Kf*?2kt5s%jbdh9DQCo4ZaNTyU;(16bC>&?hl((Qq3IblF%N%T8?*A*;$! z?5RYz0C+geQu+jiGalk_ZQ?pM@zhPI*!hP< zL!cLzmX_iU5q!gethRs5vd;=ZF7cU6-4D4L%H>;#z9&XSn?15HqsPQ)f?Sq`rGAdN z&^N^AZvnI@A^KEjUv^5hlP~;z>J3uNV;G3S(nCgdvZH?aOWjHSbvJ z=scnluL#R_7PwU%81f1DI=2BcJ0Ctj>J)T#^W^>|iUORBu2Ckp1vd(FhEfws;Y{bg zRj4bShD8_L@T^^*J zURcQicBEYWT`9oK#6zWgiqi`>yA#l*)<&E?Uw2Hu3(O7ESsmz%uZVVe@s*pb%+AiD zRJas~HX$y^qa3C!DCgVX7v${NoH34|MvrR+dN!3h4VlY~Ptp&Jw+%%+c#X$xis!ad z*Ja_-^`lyHu$wVa_glUr&?UpOV>(Lu)Q^Eyu{@0!lDrLsx|(=^q#Ge0gTkB#M;S8L zSpV)C!U>z661_(V#bN-BcOuXcaOM8fb(dsR#=In2gspz?4;CC>)c;Uw_`-UgQg2GUwb*% zW{std=zc=!5PMIJbsr!|=YHv=OED?fK$Mx=Ot0k#{$3$&0^|XAB%JFn_$ipn!bu~8 z?|6&|qJlgH%kqh(jcB;lSL%=dO%Qgktm|brN!L;5IQ1c57Y$3^Dhp*&Wv;r$#4^~6?8F7aT?$DFGiFkt z{zEVgr4GWW-=;r<%&LZBLW9aNIV5d{w3cTG;-mEbTQP1E>7?htU@6^#mELBmd-v}^oYVV8O-Y=JT3j~SVPji-%wo)VHZ9}>mma{|Z=Go$2=V7*^LTfnpx zOq<@R!rX?853BCCGO-9J^L=5s!jI{HGE_I+`y062w-N?|EG)@MYkYE{e# zW6E#*t2R8kjWr^goaeN+$Eylcvto~2x=16t-jj~q#6yhIGxcONU!zn*FDBYW>t(^! z^Au#(iCAvguLcZySU7cMb=e&UKWzeaW@mJWps)pH$w;&$qsgEowf9w`Eq18uN2Ppy zFORarRJI!%K0_2sbba#2B7^Z)1^PRAI%b0te-UIxF`%*9n)xh%`yYc*rO1MaE+6L! zCZ5#T_<8J(D+D;Lc^GS}zdt943iTBE{rUWN_9nP+8Hp`_%kS>4ItoPttDRg`C@HUc z3^lG6;2ty+cm8qqbB;jm=b{7di^p?-IRiDyhXP#LluFtGZ1p^#3n=xSPPht!+&!4A zq)Z=)ZwjN(6ZfF~9eV-7+>jewj7@W(GR|>H^u1V1LeC(io!>RWXm4GnWR3f4kt8CU z`Nbui)gJQu+BfY?`N^H_7lb*d7L0obVo(6J#``4Cag<|eMpNs04aJB4f#0pa^#chd zuJI(cANdl$xb~a}=z0??--BKTbXD~vICtQ<#4BvoXpS;oFK{ZWIl|p8%`jB^YqZwS z38jQ}YCS!8c+@KZ&Zh~VJi7VJ8P=Cq^Cy^z$o zSh^o0A`=DP-IKe4D+Q5@n84ode%z;i^Er`;|328X>FtvDkn2Qy@S|_^@u)PL3lxr7 zN;;KKkG;Po046oa@zG&#LeQ#|yM3$>OGTQiemqpG+G_;5CP;>CeY)e*Z-KcuGm4yR zu@ezOO03K}>6gPI&3*cM+PwcjkP|QPOgP<_75d`>o%lioG(DBn{VylA&!luUwJMT| zCN~k}J^r>oN#`QE8qIt(;~g+JNPT*$^Lzn_)$EyZr-D?tZbnlhx0xVZ;YKV}ZWQTa zGOG8A^*wO%XVG z8jPRikuk&BC)WJ8Gh`RerijjX)_(x#AjNb`VA_A3HcOMVz57x|ql8U zh)TSfBTX#zm>Ay{LjG+3dSdK-2v?Buu>e2r0{}Cjt*9$ABXQ{eYH?baTF2uI0o0Hw zm5xz~OGWz|4mKyQW_x~3{*A7>SK?j)Z>@%0ZOymiEVKv!RHv4^Lm&|JQ(A2PA% zra;{Cz$@QsjVZffIWeWpzbD$2WM*fR5G#eaFX`wv`G=^)qaUlGTF6`38~+nZLGVme zj{R1LaP^xw?;4MX1*kD6u4Yx=8=I~H%`h~tsUbG}xWB{}b39HK;X-g|Q!Nk0%|d7? zy3a1V$DN{FlScNVZ0>2y6#^~bbU+j`Fz-WQ2xnV0c36|3C1;kjD0^k};)y$$OMByV z(c&RfW;`y!!rOv}nlF6<()p#WMzSsWia@F-=Y?r|b&-H<3doGDVCZE+Gr`BLC_2hm zZ!I8KkCogg7EhZAffG|%W}m4r1zB-71~`8dLA~WeW+?ufMO#>RWL&v6zc}6DQg70; zt`tP%cI~5Lqjj7P`=EiiSA^RFB?r2^C7<-K90GDHTZGf1(NRHR*|#oG#$qu1lrhAu zFOZ^PY}XeDi7<~EFkw4dBvs&gT|AV9=eKmknx6vqXbJD|sqd>jQw6)V8NF|nwc2&@{vEAZ*u{=tfyr|c&M-&QP zE$i-HG>LSs?dW;YFfSHjX0iUmRAq?(7o%lfM`N5L+J$HRhgm=U_7Y*#zn$ZouFnuQ zYi1Ai#F--fd0Um0`Z=OuBMZ*KSRulU=5a$E%`@dZEs&P1R5hFaM-+mkackm^bjg`j z=l$!~KE(Ar=SxzmJd%OTdTHy-2C_RQZ9q)t&p5bm%>5`mZT~nvr@6^D5d9kx-yC87 z8)2b%i4ZReWn75Y2Okh#lgCU)kBj4WA!bc88ppKbUwxxmdUGj1o$YcV<}k09qrG`s z04y0pdl@YoL%=@fR$0TGB9h`4Fcj9dZD12(uq6k3dAzuJ;Z>q}Yo&M(ngH zL?&&F4YH_?eT9(=b_k4K(VCHrFg*Jahv#1`||K_y_t*n7Z;}cr|STvY`k}Df--3X)* z8aX5!^F(J=!%CTx8;nPUIjhF>2I6Z1JTz4v{7RV@cD2;md`r;G+JnB5?jcE-@xiHI zV4m)g#D7IFh%T7LBUuF9nR))YN8UdaJ;7O{>!<-!wm3jhEkxX_WXBLQgpeo)xP`b} zgg@aSByRdh&V{+#%;H#l?QI}ki%Oqf8~L&Ty4XHEXLxcJuAe<8mMiRbNuKo$8F6?mMV1NEsV zFB3qK*c@U>b%!XYjX3f|QEy!OxmptCA@%(?1h}B>zNoy$4j^Qy6>4lFg0k$#95bx2 zxzBM*AKj=sz9Y;sqDwGCXy8$Sd`~F#tD6`9QP5?Odpe26_>Mqsv0ZqJ>uQm%Rr8Rp z+-Z&M+#l8b;Nv8M>C)qwu#(|ZukxKdt{mf9N#GFKXgy5b^{E@Yb~&O36N zYY4@-dapbM+&gJwzH$-8PwUlf3-LE0)N%$56d&6Mf?Adqm0}Zx-Lk_I*U>l?0sdA; zPz=-8W(dxR8Mryhia!V;wWQ1V?!L7T7D%KkvFFrUnljj`#NGlZpES2DXyT+iLS>*> z?2mHLWG&eRTiXv zgZ%-_JQVDV$T3fd?}HW?!toGLDo$LEIc8s+sGr<#s^v};7kXgtf&*$vHZisq<81*h zhqPN9wWG4G#Q5idX~W4F)t;!Zi?NXVGrUJE!0f>TMS6m7;ekNrA_|>q8Yv$aMznN8 zVkted5Pud#vbqNUuk2?!SP$oEMH+77gKDXu1@27(lQ24+TpSf*f5mX6M!9V%ts)&> zpv!~PMcFyUMS@&r>vQSa!Dv z%i2~}GliHZ#NtFZlgA!S4Mzz@LRhR6;!Y9plcT73H%~w|7I1s-8IH=_gNttwaHNRW zwJsfn-z$I`a?YizdGCnK@?V4>GBix}Ag(i~i~T*NHVPJ*Mb>`eUQjexFdCr(Tk^JFklKb@m&C8PjiMHS11x>!wI zXq6%2S~G!EH#DS4_IDRRk*b{teoTncTst>@ll{QL0CiN`EPn9v^rLI!xL!DQNQYh2 z@J7+FN?zR%(_2Ks^*YoQe^l6lHjIP%@wj?+4ZxBR>;1CYNk_Y*B_S_D2qb}0 zC%xru5oQXVska=TZ33t%hcQpLdO5fbyh`YXI@A8~B_ZxHI>qEtCrhVdyd~IG@8xz? zY|sqt92Rk5qbs%&K>iw#V!DW|w=yH?4D)?KNZ7<3__x3b93vzxxu^xytdLh@a$@5x z%pC!D5?b#*B?M_=;O7O{--~kM9DTAvFUM~4fZzy*nkaoT>!nEoDG_U=p_ng%^6Cju z+Nlx)0zBE4VG$O!BfRv9Zs<4qA?rWv>EdnC&JI6o#VW_iH?#t|gxNX60f*N<>X#k& zA)=Ac;qIPjSD4c44an{|B9Dolv>aa+4F^0y$k?qMujO&t_$X$y5r>-8;aWc$M3`k- zjwO4UBZSexrfwhI?F7lW6GMtVg{jJ_qpi(5Oef1IM$q<*MznYcZaB)KoizQOwr9k7 zMwqxLrPsFeA>8WqG~!55bG}hvbZvtv%FN3?k><`hB_Ey zjgh2xKUy%n!rdSS+RH^z{t;apCGqtaLX&+fCBw+76t@d-fzgg6+l|=d2rxI!EaoTJ zH!DXN57-;TkAygPc;ShGG2rezB8#eC)7-$rlsXR-;o0a>JuJT%Q*~oic@V zto_AW^jU188IjKyLUiu86yh%;EHOj6{FD6s?0OuaYsw(rE$fsxLYR3;t>~;LdnR*?8OSJ{Mymb2D^4b zODyc`c)3V7Spy5`xJxwLU_FeIP>YM}yJ|5wt|-TyqRlUfKkJ>9LRI#?cEKO z4aHi{G7>BE_XU&NVTG<_&lW{!1cJ9cIf^-2lvy$xeaUzX2q3ylDp@ASX`92^-|Ae_9jiqv0YilDrv#%DFd%T=ZQqil0Is1;JliQ$F z15ip7hhuvI7WXE`sC`Ak=1G$#$J{L3k90L>;YJ*zU8Ash!-~FF#%P~jR7)23TO3b! zX8;Q?9#N*ACkb%Xcv`cIUVpX_ixr1kOzG+@2lWF|9QF{hX}nvctIuLX20G<buUPdv>>@q2y$)neEB&H4a4&_=V!v&_Sl4uGrW%{JJP+^jiDKJluh%&S1*EPn2 zLnccYNwD>LNTdtLi4;N)8;5~h{|ZKQjFy~Iyf!m6(%tS6NGS2_v+o&ny8UV{ujEUc52x zC~3e*>de)aItr4Fc^l53_Y;sM&AkS8SjgKk0-f{>T->ur>K5Ym;q+AdrFd8ng>Avt z$qNdTb2~gpF54%62|#0>+z2y({Z=#)(nGWwkJmfF5w!Br#J;s?H|NNMUm2XZk}d2F z!Ynr(^IGF=kuEBeJ?=WMLoPlL=%VU)Mt5mX1Tp*T(z{240vrX0OM2_$Tp^4n=v%At z9T6TdID8w9$3?H2dy@hq+4en|r@~r9_vrqVf5^45?%4dxeCXotGG$VO>ABZXh(-Tb`=SX|{!)=HN&8SwAzly-hr4-2>fydbMTum3uO9HIXs0m4 zm&ZSQiV>!O%;VV}LMU}>!_3%EVG4tf?xpcP5iaUXWUo8!6X5!`6foz0RJ4U`MzsfX ziJ^E-ka^lp*LopOq78jwIiCGWEzP;OK4)ieDu7EU1EJ%q@tzQPfawYHvgKEyEN7@W zy0jaO$As1PWI^wb>c)@hRzh42bZNb# zSo%FHh?>%Lc!=+gd0&Gj<&t$PH;BxHzr=(utAm?_Ib9qG=)QCB>Hdf=GjqnAD4zjI znzm6b#95+ia?1eim(*?$?9woHF#dPPt%BT<v-7u~VR*8yGG7FI*giX`>yF_d^E z|M{m^O^G)%l51nsI!^i~v0VX~?Q*0u66XjrSGlgO`@p`takX$)Y_<$BMq;&Zfgmvh zT+*pin05L=+sCO2IsrVZU@wxn^7)2v%Mt5$2B&KUcyXP?wp<+Li3A$^IsM?Qw4Fhq z_c;^9+>mvdjPrX1A*1cOcz3!8r(G$_vf>8K%&VvucrsIjLrZ zzgUQ6C40_LYK41+)<&@GH0^=>V{T_^Yxa;3b8{Ap6Q*OH!aWndrz z-^)k@a86r$Y;&Hog&g2;pb#?!xQy*Oi!F=RNYFMfE>PHf9a<^G z^~UV16WtU3`p&Om`YNqmuFNrAV&lk#)N(AjXe_c){B@3@zN597cjjC`dTtnWQ{2u; zyRadlWEj?{T9wVC=cs1nP#4so6vDKI+np`LEJtg+b!c+Fz zH;z!4q7^z>Onx!{2)i{_&7!k$2>(TD!A-y8bdyIr%=E@ZI|>8%DErb_CdfTtR-f#~ zZpgl=!y~7jE!*m1BDh;(2asM1PCQ&8++}74ug0#IK)dF0B`GIIF(`~NDl@Uv;G+Tf z8Ntp|{)*va{w*p5Itr8Jd%|4Sv0^84RWTaB3*@3`fjYA4apnQxZqB(#_BA@4&}@k+w3nOx4}ctU{Z47JfvSwjem zZNadES0?aVlM$T0-W^M?;ujZ;y#-GlaZU0(VdeyDLuuN7Cd8er1rPn%TY;|#B~@$` zOR?$I5FWEXVsER>wSFKL3)q3U;2H>e3X+NR+>=r5Y5b^spWmohU2|jnC<`~!D}Sjl z-(aT5!n1U(W69;{YYJOJn{>Tkjq9LYaMW9UtkC*$L<<^RC!Q(9qEts8yI-UVBr~OG zS_b77w&fqbw*+OvBj3OALje@Cfa{Ha=0Ec8CR_34*YhKlmF-F_ya9r&xXf5)@%~+r z%!CUY;!{5$Xs%;ohiM0+(rLn6nRXe3Z+N4>*3yb>PY~&HV2jN_be;fLmS-7P8T82-fwjZlLa7MHiR`MrmQk3K zajLt?9sFi4&X74!|3xnUBQh<4rdTzjc!Jnk3d{FvUf)%lAMbGI#B zu9)=W8h}qlMwul7s5x)KVH*GD-B3_?X>VL4kTmLfOJt*;5T>hhtc_R>P)%7JzaKR9~Iftk9tA9L>~#Z_Aksc>|Ow-QZ)%@z5$ zq=mlv0Eo+vfHoQy+4Q z9gQvVvcjHiIpi2A;&4j6H{KMEfGsXz8Wy`hTuXzsrC#1B3E_9_#`yi)8P(aTT3;v1 z%*ix;-MnxHmbIsL5^-xDX#t8N78aG*;SnOi9QIR#(UlQnI+}PwMAn<-S8VRjoSY?_ z23aD2^03fYSdGU-xdihDxPK!{)!$_?M!D%V5PuMr4YHWSNB7Tb^h{U~2mS)uTx-=v zqZFqIahgftEs_19FbX`e9O7jWBstupBgoMhf7CxB&!y;mtoAh*zLg@^jDU((_HcMDfeok8K!_T_Mu_svVu!D5v(L@vuNADQy$RNWT)|k#m)u z0+Z^qdAvS9-?irBwJ+rfjAs&d6=D_*>ddU|SI}<4V)7$0eFZ=^w9_3IibPNg7zb^< zlCXO#qhLC?KP`hQXh0agHEmre$il}%$atEMCV;P`kqH}i&3CMO3&0jjYN_>pQ$yN)-|8?S{-&*wXE(l( z$6%*e;vlq|A#4Ps_J(a22r+X}R&jDP5X%L*DJrOCc6kz#BDXJ$;}moT*sb+PZ$>8> zrqGv*HY*pPpkB%ygF;*-(9;=Pw{-sK0WJODPEgM^7nF4au`Ex4$1+&OOWWV`6e*aC zhjH7Q(J=>jrfcyyPpc8bNpXxXC-@`rfiMdzi)k$1sBR@rc^Z@|j1>nsH5iF+iFW;^ z`H=ze3t1Fe0$l^1$etmJSGP>>%wMkcPq7oaHk#vHAKsz>Tq3qV>Bx4BC^BK!#?p1K~Acj7czO)>vAFPM=U%` zTx=YT?+bGq6qq{32c>vj@kc=NTtt&p;;D>EPn*S`MNuKVeaJrS;MZztHsH8xPEtySeNC?^>Ku zya{2YQ;Fk*xnNyLpl)_z5Gv5=W3tcwbToPeQ60P*u)4iO6oM!pG;C6D6lsp&&V`vc zek#P{HNK;iXgYx3`aiXZ8rxa)EfT`e)ldI;Mq#pLw9uJU38UnF{|n?QaL;IRR5Ci< z`=a=#i8gmI04Q|J(bPADt*TPC_K24Rl24g)s5Rbqcoz0EpH~osbva@Uh^(|Mlr(H`fn1=I?B`k;1Kh!R zw02|yD>ORp41s2SGt=))B3;LZVp;3HxJ8f~oeMw%sA*K7p9;13U?)C`dZZF>^T&zjV$c)t0mSvyS{9gNCdJtE8HE2gj;QJJ<=B4BS}bhPlD1~KAVg?yo)fBm zf^e2S_SZW3i4%muBeqnj>uf$Rx$VQU1 zTwzZ_v{WDK6^2{b^onpd$3n(U-k~!s2N%5I6}yTySFx9lTSU6Fc#%Z;!4hPfb->&S zdI~c7|EvJ>87B0hrI$74!9uA7P0H&OY^LK_A#QEFFUwa2(asj=n&}p&YW6@5)m1T* z!h(G3PZG=J$WBEVZH4(u?s}#5$RUDq=|}T}pw#8@x~i}Wf!b=_T5_l{Mwj9=JEjVw zy!h&3RiY8NA|w-CX`0Ryot3}PgJw;(?aOnpSCcjhZkDB=CMH786~Jp$WXf7D_R9|y3qBO%*KWuU^n%HgrpLb(c9EqR>yMs|YCs3)n_ck_0iGl4Fo`1t z*-6I#1ZV7=Ul->EBx$xYS^Z1lteV&svhvt!6A+gFy#}Q)i5rEGTAc^Zz_xuV4R3M&DcyN5>=LwVS65*qwFr(+r++`J3^sE z?=Ol#Q|UAKmG|CkpX$+$s<@5Qk|{=--`3q0fD(4Y{#OysKdL z3!XMs-vQF%t=$Vj6|zdylA&bMlFb+FHr11eC3HKd34-Ok)`ap)gnBMUHidXN z`-vrYHC_{uNmB9%II$N1EI{kTt)EzT@7nJ)9xjOP5|DoH)r7pxBuF!aO`nXNG4f4K$MoMUlc}{o6}wwA1LgKG-IuzacZM!KxTUfM)%`! zfB>h#@&Pj=#=u*Ix#$Q4r-=#`!Q|2?6GdMqohryx!irFreG)>k znlTtI}&%Tp>x8%>yn@B1Lkg)M3Un?UoKx!G3vjFGBq6pr7@Atp7YF0BBFyA; zps$6^m@sFQM%YTcDug6b=VZDbKBN{ykKS-Gf4LCnT8GAv+wlV2_8cbg;)raT82Dx# z3hrK{GY^DQ))_|&b!?48Qhg;o)A7P#Ukl5+^Au)AY-x%c6{HGiA4cPD5#}Nu8b;C1 z;^G_sje;rjC{G|UufaXLzpWrj7U8La7B4K!5zH-k;Pl zW9b&akqyvJ6pfP>*q>?yaoJ>=T!Z936W~hW^e^q3|0oE-7}FtZsh5en91XLa5W^p>M3IgW;P1w=bG>5)dDX9rSUMW; zg~9J@$Y})&v)alAP1yDmOM4p7ksCt3%r6cI6oB^oqO($A5*9pOycc&xLZq zHI}2pkDUd%k<<9q>C?0x!C8IEi-yKiZ|M_gv8&Vg@Th1Q!?g^I&3TySuOIy?9R;DS z+oP$L)Y7WfOI*B(L4h=mwK;K?Nc!H~S#fAHLD#I%Eq4=4f`d6m-;zH#b{N83LTpc@ zWS^RvCYLcJoPN&D^%zswa$eUdE$P<AeNY(d$1tEfMg=+5R*pB>N1ZIhHLLJ+U%HxqhA`SEZJXl# zjAE#i-Fa&*(M~jD(t~X!$fKLQ!?K@LH@Qq`#xlJVqjwRm%CQROI$RLKCF0@4gA4tA zg?Le6b1FA*B(2liz+LPf?Q$ydnf4mO4c4?3&k$)ldef`L6(Qznqn>(*)j9yoL2u3R zkvAWR+^ftH4=!-H0prr2D+~)Bx?B(1JSNH?wsD(XynbE~s*Hio%BTc8X}gm#!a586 zsVq*H#37t7~$=mxQDZ$-ndY-i_C@}WyXyH{P|Ign25o0 zA?7v*i)b^`6UqP1;xUqG#-Xy?9!Wf8qksntb_h)(-A!0VvEdi(=0UU1!j1#q%knrw z+MR866mb{{4jbw#$95T^YxuFV2$vJX%7JPeF2LoK=L=?)ULjNy9YdU!N5@p0jp5G) zP&j#l$LjhCk>)Q?HcL$rXMMpjxB{!K`$~~cpUp7`QL*G`5Enr+bXR;!fD=Kh!qhMn z-_PTq(!!<7X2%eRX5|aSY9~=1W?0&;a&{J{2{x-H@4xTVxcOLUYBHyOPOSDt!W6u- z#K}chY%9`*n1MNGPuwHG-?Cg>8jpx}p}Kjdg|`Ocmst|hnb1yu&%fYy;6OZd9A9Mp zM&8t=%DJ#m!&rl+2Mgi`VIFGI0}RaMUGWovWIwZ1modJJSM<3>?rY8{7TU-FDj1i zs=LrPv=ci|7~I6-0|}kE2+Dp z7BasOX^zN7O5X9;8TQw-csV{L!f9h6g01NW0+72Y(VX$oh@^_lTE^(XBO7~j=;u0_9#$Pcu9IqvTBx-mY zjV}o^x6`F$u3s=-9lR9OU8s&BwL7f~4-x1bG=Cv{r{{>ChGEz6Jt356URy()@TFRO zIHbpso)v;{=sbYogpZ%#ALXcqo!@36Su4(Jh{qLlMP_l_P>9hJ0W3jd zc+%0`#?`(I(M58nsuM2a@WNgO1l#y=JlK{<2 z)VK(1OgS0E4bhnSE}&I^C)C4d-A66MV~tbz!9s=FafBPE(9VUskau7(&!G3JKngo1 ztBAe73gNQxY`fH#<#<(CRt%g?v8wr>5YOOJ`7r7xUCTMA)gmq~@z7NvK7uq@7y9L| z5r-lnTioHnnqIVrf_aV2u}ox^7uRax<-J0j*GJp|#RGz2cVkO?taUnJSF>Isvq?_( zJouHaXb|m08}umZ01rZfxP4jYq+5B{%=pUS?#hQ(B+_Y1-b+n;&#&q#xf*bQqaT6afT4LA~Vl0ukDJdH17@i z0U_Uga9Sz@dk|?<2K83TsQW%%nPK=!iajr;Lox|^99DW|O3)#fhk2X0;h;zVC16Mvc1B^Qa*4iB- z(88j4R9LnXuT~hMIi!>M_rLN4yi3!-vEBJZcD`i|o1Yi$eBq}2n_n%2A+7_(CW~}Y zY4AWZzaoz;b52C*Z6PGri`67r;Mn{EXbqRkn3B3pH~QS z11BA4+#rBT)(;F}>V8X}3f^3CpG3;LT|^8G<{eS_HbL_Un@=gn9v8yr@+dJF?~8IR zrJs@o;PKx9a)!<5H#s~L;54*B@_(yyqGXt9@ezEnFx3&+fHVVsUqEC z?cD8)=R~{8c7P01*S`eJEYz}xn=*hq3v{uVN-#Yt#~y-QESw&o9ym|{-F)mL7tPZK z_ICZ?#X*Ip#X?TqF&QoF`Q2K&Iz5sR@m+$bOA~%%R=<=md3e~MefJmOs#p40^Yurw z5Lan2Z{$T$v`Z>a5^QDS>?{?UdM1LnG=3|9EQb24(RLYOvp|Jo^u|-Nu&fdMVtD*m zkh5u$i}jy~cI_L}C8p;Ekmhu}PQ0dYmZqHdsS8fO98Aie`LR?%8YMkjHV}7aWFyWM znYE6)f?rA5^BCfLfdJPT69u#rafJ}4KSx&qHJ09z$4c&(i}9K75z7t9?L%5ZYUv?? zo^&wf(E!Chb~H{C?CQ)$?yB*j0N1x(D!r|*ghs05Ix@a20+B_9NzJ|{it6B(Pv@w8 zS3!CvXVtHDfgB7i66zxBh!jOlcl=QpAF@TqGeMkrH6&Ti#adOp<_Xc6Adc#4txSQ} zyatf0iy8r9fhe=4gA2Nx@0}`yG!H|TcrW>@d4$2jBIyBsA;cA9uq;LW_iIV8&Et8p z*!Wry^Mdn9hWMQYxFFiXS=oM|Ko~MM)He`A`6pP|Bg9n!6cdqKxG+u=YU^vVUF0x0X`lF$#2~o0G&Pu z*g6lmR|rx+Wv_kWPYSyG(0#Ikyh!`7@$%Y-6dj@c)J*_zec}$u(}tp)jJh>yQePw|`iRDy1ERaZ>#5c5^KkuKbI2Q-!#a z>8LvXDuA-;hIBR7z6AL*ex-Ov(tD|G_r*>&YieA{*eE?5Y=WJdMiJhwN3K7OB4J(hi0K3>*bvMLqQ~> zhkZEhSnW1QGjevdfC9(}Rn!9{vQvuP^C;*LOIV5&ggB?p5tcu2@Z>xaT-F@^fe^C_ z4-~Y=Cbt8?oXLAlk8Krp3f?CS#S%g0S5-;|{A9#MImUj-Z#(@x|CAMBy1xFFAZJCl zq?tCp1HhbH$ksc)C7P^x;3a;lup=*&$}Bdz6TmFcew)vi3vubvkV|bE?*g(g0WpB< z!4EC~3wB04%)TsspV1AB4v0}~^kaS}<9ZeV%?i7M6U%g~nsM+HVa}TswQNal&XHL% zqQ_O15ErLB3vg-B%rOgi58Lzzu-UG2N2wUUE{HYBf{wU3|8;h{r&o!`1Z7-e z^*~me{tN{E;_D#3{&T|SDH{25EE7#>+s6kxnTo#o3&>2=^qyFnRDP6$jg~)VpVtJs zWfxA|quog{{vyo0(4h``zDEJPgk*5Z{2^hLg9osy^b?iXds-}itdE_YZ!p!hiZIX+z;|dTnxtY_l*j+RY zu*m$}uGex;R7aiq8u`YCT&XFUnQ6FmmRee}Ttx^1D-jMXn8HP~%ES zvKwa77*yEVapj0=o}B$yU)LU|XW@=H^>MAju0>lB?;Uu65|T~(+KLaV`^NMqY6`P;gXxC1+Zsn`s-x<`6<)oZp zk>_r|A(qS5-qnZIeH<+W9&rc+3*XHs4sh9mf9%OxY~Ax`nS7!U=c1jlv8ZbnP*0$?rCTS zCp>KqMz07nGh44|7G05}n5QuPV!&98e+eZsIh97=iBqNBp8;_NhFKZtqF0L`4;R$I zmrdyoZFx-TO663wTM*^yK(;oBO<3gQVdSqND z-|QjgJPS+}d0J~U&JvLgV0zdk8^F#MO!i#;lIkSR7j0HDPe^=U72?|I=w9{)TmBY^ zgfUIQgSH5dz0@hWg(DLE{OspyAI?UVxs-;wE{8a~M6RS907+=yxy{yP7)wg~JLY8&I*mAJgfW40ka+cV<^d@G!EsWNadv(5= z_9CPuHQg16W&!@D!jnl{M;nNL3v;iNMj?v7ho-9255UvNBO?64u>5sOckyx-fqC`h zxbhD~u)OrK7kOHwo0$$O{X|-i{5Fr(g1k4pBV!ccny{`Z;hRK@ZF@^p4Fp4pTiD-Ql2=*a9jMF;j?hxW)mE=|jBNt82*9#_L zJKKU`uV(-~U58O4vCSWe;riFJO&sJZXzVHshT*`35e3(GqAjy?rAH~mv@8b6RC{Tv zL^B{^`OBfsf;>LwePw=sR0vFz{XQGKUx}mz`q8(?o`3Rhq}9OT|Na??f?R#9Q=}WO zy*knpKM+k$GOsMeTcTVtZp`RP_}te(NSgggZ?ub``aPvyTc2Jepf;!{e@O7)rYssI z=tDrdW6eL;l5HxCVKpD?2_f6MZfOf{5#{!(6b2ZfZWEA+s!l4k(L3lbPGlyoiJbd! zLM%$HjG?cHbSed2Bq_vy{tB>an9&s+%ht}X1E;Rpi61Z#&Ixl>R1ygIAF>GC$SB24 zZxF%LJuVz2Lb@=^qK)Y@fo^+@($grRea`A{*WwQIl7*fmG1kEHq+ykSnFLd*JgGh zxrP0V5XWLQL53{Swh-pVPNj{b^3P;bkEwZ=5OUYbS7Y3eXcw?Whu4@d9q?W)73n%S zmJk463$j>yTLfHVQ6CbVD02ivWh$s!lxjDkhSnYe45PHD$^cd{jqMQSgi>(^>g!51I=p_RAM2@hZ zmp<#SLOiM+Ue$!I{ahUTK`nc{wMg^UCB!W`r_9u^_3L6`&Qgj|iQ~Oly87s{=FFQ|DU#O>LR_5=oiRtOZUE&TE)7rvE*3!fWz0kU9}t<(8_58FaqP1OC`{mm zSaiW7ahzyAVn-z}nrDb~zMQFOdG?kN_k3QnMRTNU_S<|M&}nP2t%o`3n?DokBBFF+ zn0ZZrQ^T{3>b32fAY`W#8w}*H6zOzOGNr=`PUwFv)C^h3o}G)fc*XhznA1KCbN7MR zNr1~mcO4y~dm_GT0ebjYDqp+ziq2-I+O5?}JR;cTYA&URE8opOtPG*1IAU#LP)=H~ zolExrC(`9)sNlLRE4h)_;FF+ke);^cB_Yd!kF5)C`9vGTCTcYPEXW0usypIS&@LD^ z8F@RD9W-0LNrGK-3{DYu*4k5qIkh>mvL&_Uf?R$%NM{E9VIE0K4(6t~C5yCFCsJuW zyxXVw%sj*$FwHnkfUBitIvxfzfSfKA*=%g9i}v*hyPWLf^e78zg5ekt>~b!qYmLi7 z^(0}|9Iy*v$yAd0h1xWh>nB$sc}Atf-X+KkL5bEGujEnS8Qp;QSOsbHyAJVx;WH36 zoI>DOQ2M0{q;Adi`Guxp0v8L{7P*RratqGh|234rxnX)VN70pa?ykWPmYLP)~X zCYG!{@t`0xU>FT=JS&d|;M z4~^fsx>c^Z(akveUt3( zy#*lboQyK_8HJt6jPx$`ky`?|j!onEmHCqBtQ6xM;Kr>2T+p7OQ5|vZuoVbwz+q)M z&K2SK-3*!|@u!afycmz0T>2<4X`ur&~427L_3-oG?tVm+yL6PmOdH_ z_i0k7T6W)#o;`*U4IZlg;=tT z#Kqe~kUq08YoXMI-uxU8MP*E8Way1`c7S%B=cJpA-xfduxD*+U7e%BA>dp`gmVqGpM0h=^y2phxK*^a{uUHdg~QQ3|6^XuxRYNK#;48$4L6eNxK2i zQ0?50h-*by5aiq%WB9j)I7J?(i=o{i%@M9NILRINa1?aX-OSiKi*yCJKQ}z``S5+>6L(IkG;gYdjV2qZ4#O6Me#k(ojECn2c6l^@Nns9oGOac z=5QJ75rtj+jy_)V9*^fUq&|5w*QDCJH*s8p?4_Mb3<*;ftKMAsKLJh%j}7d0<3=H{ zsaVEu+N*gQoR1-QwA){Wx!SEd0r}6QS`rvsQr6gSGKkx25Ck!ZdO=E6H>Yk8K}|Yw zt@Gn7%&Z=Brhr>^Sdxyh2xRebotAxkgRL+oR5a;9}A*9 z>_w5Xr$w2`*q1XcfjukC{k0xb6DC1!U(yTyohMc|Aj@s8BTOSI)nU@bFb$`r)1h4* zxmm%e7KZOH5Z(>zJm5bf+$%L?$q5!&uCpJg%UR`;nI&Io#e`2|?SXQ_- z$Ax8W?{4F)I6&!m>tZLXTq(sC`!GPd5f*qt63blv>g*Id&D|4$%nLIcgR_ zDy+>oE-uGaqNy29_bPF-2&AIK$nviP39AKpqN|ifi{^ts%$bF(2ZotlSfvPbshE%O zHImZ}+_;LJ59W6Yz`Ibq844>85#SFd@6UGf;hgT2ea!ZU@H+*PiZ6B)VOGzOu_>4R zIZ&@;QD*DSo*fS*in+%_N9BI@*ry313GP1+#c~lD+jLCuifFe0BYkoN8vhhVy_?(S z#DR5$wXmBN8_lSFszEOrMVq~>LOG3!a*k9mJEsG!R$u$(Fw6;JN6{`G@+>>W=ojQR z!NoVD)(Zm6ShS*OKJ~ieZ4H17hG+<+k69^_I<>3Le-fc_sJ$`XQ`o6#tA}09%o>30 zB}Le1mOpLgy}ei{!sS6fnp&h!kU26em*Z$f;w)jSIyJUz+~_OCl>%M=c3(1Eo=2l) zc#eVbA|4b-$+c0(RQ5HI6qX%tysNO=Q-;oLTy>vfz1g5{BCKymq3b@H6vJT z^v4q-yrx)y!4JmjXv9Wh?>SDu2jdtLiFQHJzsn)&YeHPd7G5kXRQl209tP&ZaN3B~ zOWIgW7fR-Raz(pe9)*QzI$)tK{w*})14AOKu5IUnP{$@LIOi(tWYZip#ynDv!-c|> zY**H{ij;XfATeA?Oe4HU=Y89^|A0SX|wPTHv%ntWvx!6t)A_E{kwOjj-B zSj^~^Q*c*QtCMWC_OwvTBiaPU{+&Z`O+EtzXj10^l4eu zZ_^H$agB@IX{ftHm|J)h`C&r)s}Med`8(iHrn3W*l3|CJl(JjThjt}(T#&r^N0&e| zs7x$wqUc)Rl|sEen^>02I8F$;@REzn?Ba6K zU>w3oiwkQWuAbB{=Fc2VIpn_SX+ah+62AN_!7_&xu{NZ@I9@aXpsn zf}HtyQWDZv|0c|`UFTD6RNs4a1G=R+fsk_ZL?Q4JpQ)t%v?%yjq5H6^dLfUFhX6S~ zZz~ePId$?}Afhg>Q*NTf2cKun^`LHpr z5NIJ}#OAzcheZHxXqK~GddUoy-``*Rc96@P81%3W+G_yNWvMHpmPhh(WPFH2y3u$_ zh*{B~yL+&F^}!m%Sv7adx?^uaE(N~37%{#t!0h6%V`&^%hW3PM9W=tg#PPCVdi0Ez z__Kl(M%J6-OVtTi9|D09*hh`T;UY*7Lp96|2V(1CX!Dk%$K={6CJ9q&v*LLLDK^K0 zQpiZwryLLDk+aafC=L>7Zp!RJ6E7QGfi89f_EXUz+PP2&9LT`6-WEPKLwG};#v&n#xIKQ&Gt zgEmiQYO`AzkGljpscdP*$&4^pxHa9kEb;OpJpOzBxgJ^9)UnADqMBL#*k2Z7GXXAX zdwWwHBT{{eVfq&eBV%=CPUQY9!bRz-4tH@+BHj_?(l$w#J98X5OKRfkSR#_o%ewc~ zy|ngQEA#7FA{mD!r?aF>zLdr6L#MOO2@uXm%Q@83ai<^`8ifMaK3~n_%wSoe<>ecK z$PI1vNNjN;1SLrqeTy+&q`wE@yzXEDW?~vF1~>+qBh0nYLn_gc(NgcTqm8O)OAkI* zF-1zflw<#I>j#e%tmxEpZx&*SNAJlxOv|*G^<_{>xZ>^=w?~%$1Po@# zgR49@A=GTwOL|ciO~E--PKRPiLw8~xsf8U~oGjV}n^9!F*wYhV5#&-}&dX|RnE;o9 zH^6H8oxX1ZyIJiL7qQ&`TVQ4uIzc^{#(dB!&=F{a6khD@tz+qE_`{rng(OX|j{p}~hq~-F4;SJ(rWeN2IqTa3DOeT7>=x(OB4{PUOpdA3 ze1T4fArWs@akdaD&Qr!|fXGM&h(@%6&x>^J8b`{wM$-FcanJ>zZqZhm)Uer~CCCvm zqbae>(DZVvP!g}_h=9@N(-+ph(~&p=96Jl5bMb)XP`oYzuA?7XjK6_c?;>a?DhJ)F zBBSX%p{@h!#!ikN286h^=CbXMifHqun-z01wT_Q}8&C^dU6qalzT;0|4Xc~MI8Zbt zWjaBj7Kda%!-nD#W}}&+VQdGAk)`=>1yuYo>AU8*zr;@s;)UK4i=GBRSt#J$426Lp-w6J{iNRwT?XXZYnWj7ceN198rw@YoXmEp z@b~wgF5rVW4J6^)mjC6+(>fKsb9}ni=b4`!l zs3!;xxPl09b?RP|&;pK%QUbbTn0#R0BE%JEHO5AU2JHKuW5HbJZk-UHEW`~c1z|iP znp8_Xe^HE`u7sj(h~Giq{FO+rWjMl_xPyUKHI5yw0(VxORjdOvBON8oY^9}_XwJa` z_G>~tQEMB+DG7J8w8%|AP~9n0r^V%p1n+y0`cf~axufx^tAWgEbt#5|q-)wtuv=*s z_u91qyHb#Ojoojifm!34T0Bl((r#s&Yk}OFD2(Lj@pCzXW`o^l93jNX&&9Gy1KfFn zTvc(9g>`paCCofz5bTPTd7Lp!cvEYfcO7xyYfrjBzf6=Rgq;bNm~G1dsQx?@s3$4x zAKCA-zPMfh-{Mq-33SEvwV&BIvvK;TfUL%FeyE^_zz(?qkOUif-J(xnS`mll1xz2m zEYdZfRqVuWEUp#e3g9@jCmt5{UTe~`An{<1rHNM~(KEsz0fc`eaa`F}~1^p6BmVa*a~25GTE64xYAuJ@+QPPxp_RUMaa(S zV>c6zw&2T_g|WIQHx)eL=bs9oQ*)1(3$RQ9CIr% z+}|2n+T#t85*pt48I1AUYM-M?}){jf3&32=g6l6Q=0#m~=ak8+xpSt4ppO zWC-fUvIghwxFbVkZcn4qMYq1A_LV(d#|=3mlVGH4?K_F%W=c)0o^iS`OT(fnkr*P2 zap{i$VS%rD7GrrvYe2zePdq4^D&T#*6x;q70{$%JRo~>zDekT%nGR?1Sc`L9fo^(6 z%T?wtl~^F$W#TTmIz9)1!pw0UCXB@Wd5Y;JWDX&4b@*;k|HT)|1Lm1xg8G{_Y$@MG!+Xyv4v?puVH{tmf;n_t9i6za^NIWCbMP*S_j9=Ujt?_uT{o?lulNwQN2Q%#f0LvPqF=u+C z(I$xGbdC?p&K0T8vDrB2r-UsSns9O4g)-tOf$ndxjzdzMa`(oUg!0)O%$#mjn7SwH z>i9z*AN3Oovb3suCr2sBVg_H?qt2P&OS8EZGaasG^CE;WQr>gwr09E)aAvoZ-uiVfT9RgB`yI3Q1M=6UypI$(pABoGd2mlQl|F`}*AGj2(ku>cyoBUZI^^^eN zF_Ohd9Q_L?%y)CK-HC@pQ?7|K%dt{nmrJ8Pb^Gn3AV?e<^Iq&4*LV!ljf8S<3{#fD zm?8|uVIfwEn?+c{XCM{vx@fb3nv&g7zXYgBHLk$8<{-$4wpVpF$NKeikAt}+SV5}k zrwO5cELQr`G$v6#P@o&6-lMGq;yYPXs)(mB`8CsICeRt9k|h_6aB>Jgp^2-n2||`8 z&R}GXHGTy}?Pan<5w;iUB=q2w+(WJuqygbJR2qL1;nLt*Lf65hHT*}Gh)mg7YCpD; zNUqs}Qu46>a*kLYW8Ei+V=gW(b&kZIqG_Ya$j;LWo1^2^K^zdgC4eHdcd!ci>aT0R zCe2@*E5K#u!NatWd`S?s@(uI&KqQGT#WV8Zzp2GWCoQcF*VlktDJgh$=EVSfrBJsS z_Nz8(yFnNQ;p)~{toCFrC02esImqQML9SJ^%!@G`Ll#;edc9U#K zvb%Ii`I611d?9rc5TvPy2!9LG6hx#;6Obk%0i;L=K|w@mQlzOM{r`LCy}6e>&pvz4 z`^}v@bLLDt=ggVMOfoacr+U*r=czHv5vfx2%;3S~PUq&71tWU_80Ap)1GBC9u`6kS#xPcKOE~n0bte= z-^7YnEPa1Dkj8OAM&LEyA6;2WuB|vm7WCAaLIW!96~Y*Pf3=&*NZ|HwnzDSlb&5VT%C2oLaxqV`x|H`n%agPD%$cT z=NP8?I9w2|La}D=K0CI4%5R{kVbjM2B%)zM5AI0BUL@$D0H<%}#LLy$@4Q}JV$B*i z3GgTI=ty1Wmh2U7^u|OMe-~+9)8%AtEdM=!X9+6^X*FIIWbVua7>W)5ktb`!AQB{L?bN&&83%?cegiaF2cnQ){8QK}Mvkfex>T_T(w&4d=k zPYa=4D>^&l=6~{RcDGFqMaK(}uFuE-;~ZN4m=fkP)_1kz#(&m}Igq^I;u-;7A95E2 zr^RLDq8w|y1n$fiGTQI(GNgBuX+r7b1RbOMu7dqdW~Ow{QT$98MXaf)j<5cUXIG#N zO(pq$O^6xEG{efIJ(`L{L+z6a;L%?sBHMn9_%SHRtvNDAgB&5+HLK<Le5puhJ)h?7F0Y94>p&j0 zfZ8E$6uM2PyeK%)}76_NMv1W;H zWG}Jubb!!56}b$hHrwGX=VGTxjdEWX;t$oy#U~CIz_4eBuv0}*EOe~8*8Mt>G|Uh) z*RKEL*%f1?_Pr4Q6Gn23JXAKm@HV75rdgIqt(c}pqfM|mHZXloeC!=a`UtVxvuO88 zlM@>VH)zeip@90zgMBjjEEVJiWlI2i)xGu==tjT?gzf!%c~-614H%>13_(<7 zN$+@^SNxpr(Lp3uexILR#;T^K_?k$Ur6ea881;8S2%Z+k@v;a$hzcYH<6HRyXg61D zcbcX@C=hIl@P%8CTK>JAn!4$45-g3AFG>TBxUCk7VQu?r@s=gONIY9 z_9`NF3-X82o@(26m=KDF<5UXzah+&thPQ%PI$gYgKe{+m{c#ew5Dlv72--5PsftPA zaI9)kLp-}O&;ASs?LmA!5Br3(#nP(iK0jCm($Yc0qwJvFe=Cd(X3v@(4XftABR!o- z2|Ou`l4+qsIle2BX&AbNnNhS1dp2oE zUtI%G(yZ7!6>CDc-{O1KAw9MB3x^c2s|(DLl`g?BRdF$gk%Wgu({8%1`#n9wKWbjX zX!G&4AhR)J4DTVut?UuT>m)%@G!XBIcJYuLh9qq%He1_?P_8Gh%SqAhd?WFHA}k86 zeUqG$xLg3AL7IIVE;&3P%sl53D=p$)6vBr~gLHbT_oYumQ|_I1*eS;KZ1zr|mc;ok z{v^m>K>*P>-v1c@SEez|MvVUd6z0OCUeLXMe0rVF<_Sk|qF~r1Jhd=0W&wlgERmE2 z%|r4DUm%*&b`s%^Q4SG!DN6yHaE|AM`MOR4-GBo=tu{-D%QG;hyCf*|SA~(URwEjb z&lBw_8Y9KbF`N5@W>xE5fjIt60PJnWpcnsL55ko{!`+0KT}+cU!bhDzie2s{mh3qq+}WxLi;|8t-zLnBT94u* zZZC3CJQu9vIH*1?&_!zDP$6eRHuxflD_6zjuF365L5N&-6Bt)|eksqoy%Q_U4F%oE zV8m^E{(U|>qvW+I+Ig~2Md?a>hc5%s2D%utD{dC$8p?%N(OC!MUV-LmH6~0sB6HEf zS2hRD`U71cE*I?@;4Gwz;~y$gv}qgltY|pJ;$bR2u>}NTNC3h3ik|(d4sW9i-baYb zftOaVZVhdZg9MtnOaU?@W0Vx;QuMb^O~%)@%+s)BZ%&susg~<7F=NH#M?pfKur89oJ*AWu zSezoVwNh6MZ4I39NIf16VS%u8P++dzw;>IT>BDJiFFm`x*piKwmcs-=&Z0`uQ-|Y5 zA=HKS77DE};hvSDuk}CAB9M?I!NP-Bgn2bw50!Ib`NO8X8vMs#N&|>8eYM? z3Q^{{+5pFE77I3Nvt}K(maMEgaZjPXGjb&a64HUG_YLOXzNT&YCd`@HZHJ z0<*!E^LCu39f{|JQAkbhQnK8zSNSwb9;D&sqwVFJxi3!Ca*~!rn|TR_bU(gVP<1?`LjTCSoU6|CN{2g zR)k`poo0!e*+mn_jJcl4hG+)DDWA@sQNH^{(ku#R9RJF4kf~}gK3&C6{=}m8z7%<6 zb3yQQ1%2q3dN#|DX%@HWd9D^VGfZi+XLo}Mz8ciBh`?f6vUE#2MIS0nf0VVWFzBHOeBLc7BTV`dtB8duAcfE)nFCb+%7w;r^#0k+hmx zt$ckUiI67tZ{^+?7v)@-A{eC+FhTxs1KQ_{L{g?f)PSF9;28!H6*5gukBWAoU<4kA z34qxgF6)i=^=r0FDB^gI#KDc=S%YbzBKM<5Xnz6sHwN{1I!i(L8nu0_ybmd;je-quJ}ZB{MzLUq3%ZNJD4r4R z?BuoDHyMjtawKzuy-0|Q(AwTV74H-&8g;?JaD1wj6mH0BWKEn{Q;=JSZQ{sSd@ciY z&=`yVa2!{>gmXqZ^>vaU#6uT?4aZHQT()io0|w$Jvpf*X3W7%wA+Jb+5kgCfPb|)h z;%E!wv56qcKTOaO8SA=5J72a>GI+fs#LYqIPZl9EixsIbxoId|ZwaYfC1wr2zPSL8 z6lo!;eE{X2CD^&K&Fesc(-j8_vsBf!_r#GRCD+^<@obTtF2FfHHc#g+&9lIoXG6z@ z!dz^2Sc)w1L4l-uK}zxE{Yc{?wxq>cjQ~E>u&6B#*RwM~J0wf~KeAuT#N37!teeJ zI$FH&E)n3;*5Q_c#er0h38b{Gw9{xjUL+YR>q=lvQH$D1;^N_zkX&mP7ZBU|p(z$x z9}05r3)wsLr4#EP>i~2aD$`IprvTW9BK!*lxE7VVY`#IX#i4R>Tf8FD-^Nx)c!2)c zyA#N~UPmU;<0USQLV-6f$T_k8 zCw9@50`f^ONs|oxHx%iHxh5HXVmIkr|Cv4LX#7ebLbenB*>7jhL*4z+(F5+H=)MV@ zlJ5}WiXuwzYAn`U21HSk8CLeq{WE}0jw2mPyKQ@&qIJf&yte@JrV&$jnshG^21mOH zEyeJ2vq*;MMK$qzJ$eLgDUD>e$i-_}dX2O=5yx^8xq;e02N<8)Dy?)*-grkpEx`FskN>m4jJGl*P5gJC9O+$9R%WgQcx-D4a?B3 z60*l^&3ZLOy77(8|&M#8S7M1$)oFV7P>&Y>1foRu2UQ_f+77L5AMAEEj zh&@m=6_{doepvP!d?(BHI3xS5wW29**RwOLV!>I$528O-In*!Do;N4z^z7QVRMyA; ziFC`irmcAVRfu_~u^wfVfUL7KLMCm@iNpD8G)p@-ZeAz^)-kr@ER<&7B_oC?C()GS(X7(aof@^wKM~334Hu zS!AZBI!qXu&z?Omj?gpXif&XGj3-2yODP;@TCu$^kX9txP&rl|_q(&uSMRN77rI*4 zFL!0XH8L=MY{D<$dvkd_Bf`wA#`MWd^!XJaW+rNEnqo?Ti>@wDUyD-&x#)^kri&De z`MqY8#{Kd2NfJ@GhWe(OSbqw_-_g-YCexo6NF`oUeCSajRJZ~2B?tHqgYst?S-E_v z6xD(<9?0u3>1=xhI_HHFyutXU5I&6hXfXaJ!knlg=3Dd}4o$WkR%eu-z5pxc8s7kd z>2t9|Ez`3*wC>Q>?v`2oR-vvh$6HzZpfNk|2rz$_HMN@PP9ZKa!z!ozmc{FWJoMR@ zmn)Qb_f5h6igcA(80D5DK`Hn2RTS!J5f-H;E|+~*Bwt&u!xmC{#H??EWNm;AGsStE zEzAr`dsybD(YXF7K(b|^N8KgD1=5OQCItPNM>{h$Fl(qJi?zpqxk2kkbv@Pv0{pIa zeVmOS9i!Pgj|C?Giu&r-xL<^MfR+yCq1reO$Su%1h}HgV(HV;;>54X+-&LSk=5d4` z>|!o^NhnRz!_l&?Z}H6MXTlVym^ARif?dpHIgAay4WRle&fPW{Ar(rzD#C?hamIWw z8e4w{$XujV**V}3R4dHM=5y>8l}`)_qBXKmEH{WWpE0d0lRw``JSxxy(Nds0HaI>9 zP)Tx{V+R4$rj1+YcGt7TXIy74I-{vb&M{f8kL?gZf6xii?&vRG7?{3sgpvBP0$IxX z8&>~ANE$wNi~8^KJTqVnaZpIlTKyP2%uV!w#WBXeR&6gy@LSBXASAVE5Eo6jtYp zbP-W}=%`zJY)FeZysQX-^H^_TuHPu0rN8ph2Z3&{pF7F=aOYOZW=~h{_|Y*(yIGCDNi}t$o;` zSX-=eN}l;0XS5HqwY*yhlU)7&agQEJzob_~Geak?hCcx^bD9yC4@IgZNut&`6^fQ& zn3#%Y5&Tq{tXX%Q2IUsOb5g4??fM@Un)5aVUysLyxmDP6OUK5jFcN#64h)+d4xuZ? z&VV+%yR{=I#Z5wZae{EhQtNUztF%_1I}_MlL^oOCq{7K9AQy%v#l5gc=R&V35-yu0 zDhP96d!Tyr~J0LQ&M1i7g7a^1m1 ze|DY|o`UqxPy8H!5A~P(knl}&O1-Z~l)qBJmE~&S7&`~dUgc>98j-CnRrd?O;1_=k z7R#sl5FwTiZqrQ#9gl8-^w=Rnt@Y@cHf6<2u#GrQB&|4mcg(MAp9|sa8mJzl)Rscb zh3a;B6HEypD^?frKRwe*y7*J#QvFNQtiNhe^u+0+on2FDLfh#N&I2L4YOaV&i}Pd8 zH;_$dw2J`puWzV{yRsk2k1%Oy1~5mUJ2Xtc5#eBhdt~0q!jX-l^!+|B{T9 zt2~#*&b^pl&3bm~{nHl^6fW8Xx;<%p#vBCVAYm?P3%b#`qewG~@jvOqcDy7{)7p+@ zwO_RJN$MDF4W1Nc$wpAP;)Qq_T=VcN(3xW6d@~rg3$c{W$5%$Gpa+UXwYc^&5FTyGfgKXlTVN)^ffL6x4>|J4M$-Y2|&Oq z?l`SIzAeIKTTC2&3`EZhfu$_i2Vli2mqWU!++n1!Jh4O&C74Y_g?2srtJ&2*K0f+= z;r?n@xqVPei1x; zR*-p}nw!ZAfw=K1P`64Ot0g%g#y!GZ#|33hc(%u5Ldb4J>(Sw)76^;os*@nVk1=nmMu=|Q4mlv>} zBujTdpgB6c011G>`5j5^kpe05lKR&8m7d+6c!?$bA2Tk2;e~s|wfaKm7dXB0;B zbc-+wIDIZ-)Q&eo&>{+xu|q5tHItl9q6TDPY1OpJ=2f5kpw|>Y^#je;Nc`A!f9L0##){ZX z&*sVyj-6!DBE*~==UjUSCdYO`<_RA0rSbTK0M}_m<`S78*0>o=oe+0#3_P_W-LM2G z=#2wK`xV8r#`g*rK4GpxI`_!t8s~L^{yq}bH3oMd7DV9}aysX^;+;ds%31BU{=F0I zID2;NtY^xR%y0N@-va4!q=0Y(qG=qi8AVcmJnh6i81XeEi*$kHgV2jl#c-T>E1+wS zF`Lcx$8Q60RjN^_a9|5{>gVONN9KH202gRU0nig~`oIYZElY+y@gI~r&H zA;;R7O_<|K@pECW1R-YT#Aolw)8ICku9b;x?gVp7rjtM;@jD?dIGjZpxb0m4(upjL z9z8n8N)~xBD%vG!VN4j5iQ;%+)Tw(|!JfY_3K8#@QSxUZXpsi4xw>1=8v8iai(H;@ zH>7i}LImSee}r}^B0 zr|-x4Cq8i>1Z8A-jrsf~QQ5c4LzA)p{rS6GEHl&{?E+NRsdfdpxkM!CC#G*?%It~f z9)Kpd?dQxQQrVUdLQ%=`R5=#w+2fM;>B)-uEumTarAufTk8c#_@)BvEk9_hWfUHnl z>KaQ#d#c9yLTbLWXdV}A@fhKX35nxzg4~;n6@te6^YnwcgkD*@ocoHmnfH?i&ejj- z-_faHS?c7I@~B{c2g6r#7<*liv+KZJoO{C`egwo7Yr`m)4zM)+DNmKu6oeKO0)uAH z-ZgH1lxJEI<-t_!{aF6`d@gvvnYlj>5@t5kG62U9MAH-m`jfC-DbmGaW=L0Y5C1ub zsyQ55W?O#LUx3`Pa3+u^GyW$(5=hR23<8j7QLv(-r7<3v`9-k?>*(m2Bb-eFGFOI}(Br$JT|wRE!qT{#-X_rBu4KmPiKhi*B7_-&UX?`X z`68X~aoYVU(ose>Kx}tPqWpPumArwNg6n{?S5wztTUQCpFxh)j@+uHvmU@*l)6&8HsVBv>UF%ctE}`%B_tq zt4mX|%qOe80PNzHDs`l*P5|FtQq!_uj1<30(=ZTkXFv@r1>()I)OwNRZlY8_YM3Br zSwjUTW4_YB%WiHCoYu41T(hDx-WTb#iv}3QqVXjFS7rX#V7i5cMa&YReybMe*E2=B zLGVQ*Zf!>Ew4T;+ABa}3N}eF=ybP7e`Ha^lQ_3aj6Deyr&J}G=F)(+O<8cA{R61y~`s+~c7rMR;qtzxtvh{Np%PVY+ z$rv^g3*X=uw;8G;?y=$C06`WqZh#%`?P5wjc(821XIplvNRvEEzxD_ul}Oq%jJ z!d%rw2=xsjErhuOg$us_2kLxv@@beG)s7HEzB|s|IX?S#{!#;70fkgMOOaB6rs$l+ z3}IF{MQGLnxxG;UjKzRx*Q^btpZten@FQu6;~(c2X{r==P44%134>cK zv8Uo&@8;iW#t~HQaiJi$lcJtuLq#Ql#`Im`9+wu*j3Ke=dwxGz3*>HzyWjgjmcxY% zU43zi01B?yKr9;K4bkutA(xl(J0cOx>2n8CECGi351hd9CAoyj2^hZ=%p4u>?McUC{ve3PXPEYRdD3x{Go6f^(K}ix3x-tF#ckpR5evQnby zj=ze|nuDboL5Nq)-(l_BAyvlOpY%JW!EU<4NIWmvV_J5?PWSf2JAz$nzJ{_OjdZ2e z^8A*TN~l4);$lJmCeC40Xs=J@X}Wt?q;+GLAec4Kz8pTt<$v?ljnK}l{Ip3qAVZi( zQ3Gl4y*xuo#5P86skvN;>&vcjAbz_>{wYS|4koeKWKAHKdnvc+p~gk=@!AsxJ4MkFxR)Iy(ZGxR}3hS4q@wuWA>*(ojnQ$ zmAzI7t(vZpUs(LyiDwaOokN9KG&t}sD||dHh!MOrc-t@9W8$+={?5o)hkRjGnA3&& zJM(4FF2{chgi!QyJvw6@(zrd?W3kM^xB74RU==yeEhn-hU%@=i2ok;L-?JM&mlRP$ccdDU1{fr$=-qBOF0t zj?U7{JBdKo`-SsrZLX<`J=cfuw^-~FL8d+W1i7(v+7NAgSNvWW#T%L=guv%`Hn(sN zpn%DevsZ!gR;Ig%GlcS9MRiTwpl1(r^5bD?%y7K&27p;znVQ9%C4!ue8<2eASv(?; zI^!Sxk8l1oJ{0MGG~ok2>B8Bj4S{J^ofX4RO_U3&m9V}DDQ*5x=l;CMaaH2@ue>oJje$7{U2|6qh;~8I zJ?gA|vK*W$*wyB|s=O59q3lgMX$3bVN6jYA37Y*@cdWZ9w2O)({6O-{*hpB`)ajP= zaRDxqVp}Tu3bUs`FGsZZWdp$0Jw-cScQbx*rKU7uaV0{5w6c%Foa98Lv(@^A{r^C`oF##2EgbCoP^1gff|^$rm-;Q8 zNfMi$I3Oc=fjF=+A<`9;6#JBQ3~mLASh2dAJ}WK}Wu`AJ^#V)_@IcwhGB|D*O*zSP z0ehB{wt;jxEAh!i_DACP!u*{!bfigN$Z=PpZVqhjM0AJ;h4@=7#AWP_CEMni^)O2e zq0NbIVg6o4TVtFd(v?O~aM&M+vjvf{93~j>X|q+o0z#RYV5e`yz%eY^1;X$pXDsOi zXKx2Ov(`p(l5YVz#yf>OwGa2E`Kn=iP#0DKfxDtffJ=gTSVyK~nV?LHIVdV;^4NBV z{JjRwd@qX&1;B6S4g~OhQJMH?e$jaJ@g2bsi=8?l@asjpkZgsA&|JjVv*Z;GIA!SBUsrscG=!`^yTE#RMat;43T&X4 zSyr(+w-H7u`(V%|cqPRJjgtkKDeV4qsr!9J679?hF|ZR!+yRv<6FeS?hYIw2 zl@2pFAtd8Sm(Bz;jDJ%YLqH$V6k}S1i#x(HzdfE8;EV|{&#}L9};mdUhcQpdo9(fDm&4W~CveW)~oT1*-zaM#=eBXc20XW6+DDDQYVki;d>- z?1JgG`VngS6+z}l1&g2YSXKZ%(oMv10$g-DK1OHp^nl&-*V=nz?z&!pxxgtn9i95U z5Pxit*!a@qJXj>cCN87Io?p!qaT){_Y;Sz!Yd};0AKT=*cSA zU#ErP+5((4dUfo9vhI8YWJNhp!j#z;eM(7*x?~%BL>-` zFjnbU_R~V(4O^u#ToBf&hIDy30nd!Ly8sIbo1A1;9&?cjy4#XAYXez!A#My}x1k&5Oi6B;vjq0IbR9bYvDjDrx73RWV z8z;DKIo=oK0<*@)T(!l59H_t)I2y+b&;nx$mK?PZg6cNvfZA|0H9)##7?U-7e7+IH z?Ng2UF^$+e3Zp7@jWM7{w{N8kUC$LXwwaycEzvHambZ!wSJ4E-_ldc+Yy4bHQ(E3&e2|bfx{}`LwS^Ge^1{^2Spmn_v=a(Njq-)5`y&fabjjT#@e7i<<$uy*Ah^k; zS^e>eB_?WhE^!?Z)S{N@ZPI8?0HodM9JkgGk`A(kXU{RStFzD4u0*ffE@(`ZhI9oKgUsq@Xp=5#Uzm zmCVKw4+zzKOn{_Q~`|WP0%8G~pVplC@Ch>48aOMfQFAcri*%fTz{j*VP8MXg^|RwTOC(=sH|toV7YK0k zTO&<7uhR~eH9!7yOu_Pei(LdGlZ5tX1{Q%B?#60$SkGn)#zib3QdY%Zh4SUZ{uS}8 zNEbM}Hh~WFx>^Nx z=&UT9C}EgcuRG74@bx|ExZv@^TnYTYaK+{tIYAVOZC-E8>VcrNY)Y^fw}^6{+)|-n z051r!k@X`nRFKH3W&GfbTZvjJmCjdta|o$ThgkL&WDy$~!#orF3Gf%>#ZE>gAv9le zO;h|(&oq?A9!xujE{CL~<$ly`*NNaaoU(O7YV$t-wnDCV4I=nWk)sg4!$i7&RqNo$ zk43wz3$!1^b@z%QNeh0`@#}(as;a1nr$uM|s~;bFozyDDp8cS1z{P{CBIURDLt)OG zbr&~Yjl_Qhx&3f(U>hbi;o1X$7Q%(?9kGo_b4DixO9uGeHwDEq{qAhrIpw6&O4kcA z?>Z)jw4ix7OT=J>e*}x5u2^G`ME>>wryp!7+FGD9Bj6&9#Z7pE;O+DU=oIIP$eM|z z{6M@Zz!jAP2b)@yXl1|A+Qv1s2Ms}(#fzF6_KU|wyQ3=h0X|wtWPf}p*p;qb)*HJV zn!nXD)-f61DCjB{i1G7+##0xENzup#!TMv5;XJGEa!=eQlJcjMAuo$CXPPI|O{||A z0dWP!IG)%(g%bNq!dyl+e2LixMGGO4h?${6@4(Qk}fxV&$nk zMYDR`2BP^avO5#q=~O}PbX5!vy@Xg{GkSiOkkK30gF8-R$bUc(xvSR9!gTrfgisz$ zpwgNhahPdds*{!Y_ZgjHGr^ZT59fEYn{LdZK25uC3N$B(h?O)sZwoS)Q0~&djg$`)g>nQb&kvPm#2GB_iQXO=D}^lReL8EWKaPZa+5B#KjTq zPqGPB)OJJgsX=~Rt9+Z^{h>zUnZl6kgqU;EM}S;H|d$bAwIB0>wM{p|5VT?DO9=%rT(p{^;&*JSK_JY?2x`FZ37 znsM~F>|85X11N3Q4Nz>trw`_pQg=aT+!L%!!~b4mtIqyX%U5gwb@f zQGkr;nGb0_#7W-6MY<^LV<-t1{(K+EmCxLi=>7)?^w&{hFxj14q_SOIhuhB-$X2?3 zpLk1;bYm`oxL_oHb5fos*OAK}1JgTC281m#v}v1gsOTBVVerJC?kGWSsSea!Z1Ha{ z5EgkF%%8}zVX$1tz!bCok9SML+J(?IXrCz1RoU+pns(7n@&zH;OePH#>q2bYd!L%4EEUJo z4?P?&a$Z!JW&QUvs*g#RX)2@O2-@PkP9vo&(v2OtADz&Vf*2T&lPmxRJR3c$XYjdDfM9~u*_}1d%HLin_xItL zCxo&rYN?66vgg@z=EP-srX)0#F1sKaUG`%Y2i(^=JI|($E8VHm7lh1ojCSnss(O_` z*u`ydY3;TB&!K5Dh31ZZMQC8zdFQxO&z5z3Pg$z*y~=aI%nEd|n0O8r0H>xDHZ{I2 z3O>kXO=nX_988h3e^1cV6)_-r?NKxS? zaCXCAoA=+LpL1%$rA8|rP|;#K>q0>zmwf3tR=-d7Xw-6*U6qL zIg@B9IZt>tarJTy6b_^mYVAvMv^a1Qy9lMV_!5`e#H2_{&e`|4RnKNm`6JQ>-wpPcrMJ6m1h-*vaiYxL=@cNL3^%5a2%K|PB zi{FcOjZm}nq$3>1T?xi)K7VO!^lKi~>+v3pJw!NdvyRvgvgN*3m^mvKA%(YK{1@nt zuo4`Ki?4#FF7#8Dd0MhPadnBr!8&>u(?_%)n?WW?g&AaukP}w&MtK`3Lh&J3Fev+>c8o=P-f!L^f|06ZY%(eEt7Gx z02iLKRa0@FXmaH?4-8byKFLLKt?Tl9SlnaBxlDjxTf!nVt`+U}!@X&oJyO*T`70Q) zk=sTA7F0G;AR}?IAa@-Zy8G}wO!wqoB;4Z*zU}mOw7h)u(jtc{ma`hd{wv6hB$WmM zt}l$;ZUlpIl7YBd1OSzS0>DH?j4H)aQEqh1 zi!B76lgPg07WctfbBles&`J~#1fK32(* z;m4vaujd<(-KF@xHz&;G6PZbC@MuOGFGeW4ya;uuh zZ-MlN8)GJSTIyybJ{WiE2R9&&%EYGzdqfxvYOJr071KPs!dPZGJ~8#Q7^znXZXh`HXgqclzJzXi91D65xhvT z+Lhwoj3(12{3GJsg2n_fJI?uo)9bLDPHcVg4(HZE2-{NZCJL@qbdASCJyZJzsRx$b z36TvHFkcsmF=}mm7pS>rv#F-Ueu2D)lif`*EP^Iv2pgkx&t<=LNYVR}2)99vG-H?i z5!!Xd{TpxJk$6K8b=9fhu~>W$6s23j+Vt$~nXnUarJhN}IKwfKr|yN~`z;#tX59zj zQJ`K!QRWIVA=m|H9g`Vj4j1U}vl&8>!AK@3D})o>mL+i`sU zeiBh?G1s!d7$#)A8*KUAo^B%}w;XW{F*g26a%tdZM z>ma2SiX4G1AKv4G@mlfX2riOoS^B95Nkg@=HJetd>pqm{C1tWBw+;UEl zMyC?ShEcf0-2yEJ>J&Jit1mBp7!>}aKxFB)*&})8X1I1NTM9I9bx1>nI5&f^YwPw1 z>B(*qmXB}4o7MgV?61-}y`JN49ieUuMsl52`23^!M;K(`8%}kCoM@!X%7J@s z9s@DU8nvx|O0@Zkx}4<)n#P#?GoZh?4Cg6zZFb>D33bx3A?C_-cIIe-u30*ZMW6hQ zAh&oOw)u1beC5C7IW}WXkl!HVXS+}rkAYGSP_GJccDiCsyqkJFPsE%pb@?Vw?#iiW30oKDawY`)eFL>1KHz_XtAz zIaWRudp_m#3}LO9z~WBPu84fJaFL)q@pnKzqj92?))&VMG1q;K7(-0kKfuVhO=H2O zBAftoeXoKp#=nIjy8V+QH2dtQp9d>P_E|8PF9A1csoJtW_k zheX4*N*Tc)7eU>nxgCw2{|V*tb(Mz3aLan3K;){;fOpjkr13YroYw?$jX=LxrA^F8 z-2TyI7~FAG<$Cfgnf7Na#u#AV6T+~BG!Q2~KKUY~hlms)6itNo4 z(r72AwZJpUE=}NhNDzdVfXsO9oe?LkWIhxMn~8+33pi0$zMLl;lECY*GETrkNj5V$ z#cu@g0UUrxmQ*0GWoan!0K=MYxAIHUh4ylrId@wDSZ1^yVX;0KYrg{KMyMVph|e9O zU2>E{@RcwpuL4oYo#xJp@95b~M(vilfq(3^JdY)eFR`a++D;=b(G5ho2`026?ux$( zA;T)77;O4F&op|!Vh?{MBa)%}L=k?k2YG=vR|>IA4iHvm!~msXJ{uenr_@F7S1K2= zk!aEFWAX-h^bJ5)ovEkH<$1)p5az1t=&H1tpZPZs+#sqqabk!vEm}1hB4kC>|Hr?= z;RW(QJ3+L|GXXF9;?VyBP??5`rs_CC1QLWy$!@>en~?q*U5vO4L^}{f@_C9M^PC7* z7YWox<*K*xHxz4F9QczU%2S`VQ7ioq$~8srnCWac6=+E%u7ZLn@HIk|2zK_0g+YIr z|27EaD|cWH9E)C&mQP|7;WWX@kkR}dP@1ZOt6#6no_E@QPJH!UaDSQwN(zuc*AnPH zQ%SIZ7m73t<+wQDY8I{UIm5QaEpddNEqiptjyOfMGi~MeDu#EeyxRMKGmSpTpnY5c zU@;-ZQvzldy2)El#|duTd#|SoW$g3mH5p7AD zPrPsB_(>r%qCyDz z#9JA`x~xCGyehxARvP!TSK3n$td}uOv8Q5Ofb*_I`F>>fJ1s#`$^8TYl&Zd}H8%RB zU*^Y3-7RFm4Wmn~rLBnVmB>QLYEk+NR1XUU!UINamNQz6ZoQiG9qZ_bBlJuS7-W#b zPeHiQ+WJU5ej>dhi-~)|lnc zKMciFg2eRZidcV5o;O^59jb?#lo2vJ@`1E{O+Qh7Eza?KLOc!@;twok0jR?t{qgMD zfHd0_fxk*|@~5Gw)}&lw&Jw{-HH|e*ZSk@w%Ltu(G&cN`xNnii3cofX51iHyj4g5IMdMq=iiv^PthbL5SN3x4!ZSTIskg!)ecn&!>d>i?|CZb}0*jmDkNd?ZMd+B5IcibgOZkI@xX3T@Q#Fvt=c#2Sv?{ z5UfBIH@P=H6zZ1cPU_e;Ak7w>wQ*S%?ObugNfmrukc-6nTprvs&%*TqY5o-FPZG0C z0Qq{(jX{ylN1Y$tP#h(QPbt!!-gs2B*<@uuX`U8_a8$Hb#2b26kCa%%VV{Fi(<)Tr z6C%uWT^5Be#Y;k5*~<127QQK_#5+Qj9g%E)w*k-2j-AMxqAb4iQxwT9HUw~KIAqWI zmC_t8jMv+0TH_%-yF!R$n*AOXL>}-22Mjvm1{(pGxAQCZiEoK?nL0S>gOXcU!_NOa zC~V+}XP5N)D@1$9Mpa3NF-J{Jf64jt#j8``% zk!!&{j2tv%9lGfkz^KI{7OPY_M7xDr7*ptu385JEZLM*Fe&u;_OGBKkXX;hxM3MMk z3UryWBlvMk_D&NaWzO3y|CvkL`GIfE*K+n`6 z8E6?&N*SG^{<5C=sb~)aEu8#U;2b$y&iayHqbVBXP1jt|BMO|cShO3OJ)2Gx%BnK; zWl+SUQaVD#U^y%U$fjzUBL6-rz)ebPu-VsDEz_F=!dHxKEL1ny0?LeNWmCe~9-9gx zUxos%Z(t~^6`;YKdj_IvO9++}i)(t<6-?y54iv8`+s}?wXtX$lYF3- zIsasl77En79SHR~+vfR6fx-FGcu0^%lo_F`Jit2XEn(Rht8&s zC1dRzcojPi7MwLjsh?|IqC*JfVtRq^9R3&0r)j&2{(cTSJ|n=NW?@HCS;^i8r2%%zfkE6R@ zBa+6c;E2PGdNvyy%Q%U(%!PJwQ@@SV1h^rVurs=^AkpH*>muP%eN$UabnKkx)s5OQ zo)qm$Y0Z)p&~Fu3#ULJ*5`>j@Asyuzr|`S&>I{jT(-*@cC=5O|=w&GRQ35Coac;_5VM9q_tncQl%fL9Q#lH7|#fPc0U+vjmyb**J*$EFKn0vA8iW zKG3rpjcH3ULFwje@9qSRHPvyPo?Tb&sK4zj*z zG=E%p|5YHDyA5Sc94p$*pp{!WzAYdVSnO~dRfKWh5$JNKxX?^h$o54-vpSqE~@qB`^cf}0?82P8q*K+-CQ8SEV%u5HWcB}$)w%oNb82eU3XWftc8!^%m zq)eD=rtN?{O{nksHK6`lBkPNH?j6dLAz`d`2jmoaiBMM#&pN`{#IFj3yW!;-9 zSsK8u7_(}jT!aB`W@DlwQi(8%$vDBZf<{_25;?fMH$Gj*v%kQBS4??vk&vts>|O}$ z(ib-gl>F?9Ci(Xmoz-c<_0W6NrS74UnmzY|dY6<&5C62@Qrr`{b`-AXev?6hdM4qr1fu#m}vr0jEH(3dw4wJ&e}f z>$iZxMk#s-*22bIfSa{d=DF2cpbr!$V zmV`qETOulSIY!(hfP$ugaICezy9Ano&f9gjIA71MmhM!bqU$aNQRAf@=K@jY)pDW+ zC3R7QFy1NQQG&A%4Btn{%y7vC?+k_X{b`}BUF!CU@xx zL7OcL(A;Y+{E{HEgpp0O*g5>cp1)J9>m>Lc=DRh2HD~^K^6o_Lo{?#7Kd1 zk-UuEGAJG^6iI1Pq= zdAJ!v8pHT{j>v_Q=ux9WoC|k#EgMbt#+L=suXT|b?fr(xY*=Gf?1~)@0iZN9$^g<- z2y){zqcz|ZOmF-_SQ2D;bPTcH>j1Env`L8~TRJu0La@t;ElAp!SwdW0+~rc3_p=0f zC>1NQ-O{pXbJpqvcLlsWQ>4lSntik9&D|}=l*mi0MwUQ_iFAcn_ucl3_@MyUHlWq` zDIyS%il(Mmw~OcetC{tgh1a&i`9PnXa&f;pNVE%#5Ng95-326G%Q*EdL42lN8_&x{ zxZo^XvkRtnE&;k^)tCh*qgDW2ph_0*7!c)pY(9Gqmn1>Dnr&lvd`*jXBQU}xcec2_ zNK;p_q$VB~?Jty9Fb2k-1z46D{aL`K`For0JXvKrO#HflnSP~Xg3=^#KrK&JEf0to zSO(y>tWASyTq(p$9UZYIKo{m>4O%bh2j@tX9IdRTFJ!Rm1)bq|QVDt}IZ(LE-3c$2 z#o-?TaQGzHqX4%-Lx~Hik*w1MnISDi-5l&K^~O(&WQuH}_1t4cGFD@qCAs@FE$3C2 z4n0R-W)V0+sH>Omt|Azg5Pub(V~HP(mj%&En2k|=#%+C&F1e0%69-)YZAu6TmLPu_ zgmRX3oCIWB^6|2NZyuS%*m0RC%L!fsjMh@8TwQ>4{b81J3=&%mkrMGrBklGgTy>6v zqB2A2@=ak#_VkSdvE8Bh?@Rg_m#_lGgfNTO0z8!dEYelO#X<(iSZ6p-(ab)%LZk(- zK}O)mN1*vYeOpC5sb_v08^ME~t?{#>T@MCQF5$-996h5s$eK<1H^!1>V}KN+k4*rx z=A=m2HGPdd#E%}&-xw~L}I3=Z|e zTuH3b(-&f|U3CQzEkb!T&rt182_gT~L2woOx#+yn(+vgdO#)HMeuc4*O+`5YZkEYE za-krXN!v<{#Yus^RIr<$07!~9@b3bt(CPK|*m%l$*T~Ws3q@Hn=}9z9c^Ku`lR{~H zHrt$PI2^(i<(>+>Pf+i-3iI~~x5$AxRNoAhmk9Rv(O)Vs(AYQfmr>yAa=+0ySD0lO z57BaG{HFlYN-pAvJRb~JC=jD0gk~JjyfPWz{73?wsO4S;Auc5wF2+rI!&|~&qvAIX z#6#b7rf6JrG6lik<0uffbpP0jQgn*uBZIwnoCe?D6bTmyS2-HXkA`rKS-EgfResnU zF%|4AxE-h?s*iz|(4qx7RnPv7aKXcIuV~t_+ILte6wu!US&lJeqDYVmErD)t%}O0~ z`&Eu5omockGB%hS32?hnSK5gM;9z`NC>7|X!ZhRdMIwZaJtyt~#F!B3VpXzJi4oCR z`=-+cjB6(g^Y@4*&%pkG09ZLOea*7Lcu>?#mtY@4_;!RRo)ApsTPmvK?|SxE*h%-s z$B%OcatcDXFN%~EZS_&F=d5o1+Wwv)=7G^3H^YCpuy{pCyG?-VZ?eUQI4K)~^0v*=sudN^4I5kqvrWNmMrvUmR+7h(KTmeqinp_;^QGSrX zoR7M=^@l?IWmXcHry0Oc`bnOu(IIz6PX(ci`68+~Po!Ux!&tHxUY{jFM=hJgUZ;`7 z%;suH#@bRW5ac3OjI!X0r2^D(b$J}up_~rs5-VChOvSPOJ%Mzt?y+UVvEi8zZggf* z854C{@C!l_diwIPwp?E=QjLt^T*G~PjHlleN=7X53HKzz6<~hUN#m|~w!j*v&+SUm z_(Ap(Dl$Ts{gjs&{U{mHnZGBBuP#FatG@7W(XKGwf%rUe)8x;9s2?NM0Nw1zXF>X- zQcWSYr8rj@xyW3u@L03XhIS1**rcFNlcZfMl(J)VpBw)aVVRYR8Os58&ko1`1ZNDV zJN0v;UFFYn2;~^}zw|_X0WD76iltaC$ZXHXC1&UU73t(NDi@hG&LN$PpKXh{Kvoz{ zs<8zv#$;^y3jhmP3+o)dxU&$K%j-%4coYzehCz0A@hd?dxR%z@d9M}ik1_l850?p6 z_s1d){OyYE&dtBoDi%Fa1<=ME;!tuK0qir~T zCd}_NVEUSjO9fC_tXxRmNNjL{X+AT*Wl>{%S+tqRsI5yg4irKb$;u%uV^5ZZYre}! zAPdTYA`{$Dx?{A+BrT`L;%Xt3jqyt3ZJJ{KT%;nRs)od;Go;rJ>F7n@7UH&H+ldq0 zg{FzNpBxCT7M%$O8#K0LECPh3Zp)nb@r(R|kCJd-rf7IZecS4r;}wy#nyfD+&PyRQ z`(5leNWqn6ebFvD$r<-gp=FCR1bA?yA8|#D=4|c!|DYdSn2`yrh!Vkvg}GC4%QpKG zX0Oipmr!pDta8(aeJ>Q+#j*jhBD9c6QiB6X=qcfaWyal{!cEz>S_nP4|E zn?m$0XJxO^HFQ|%U%%pamn-@0O5`h9&oh#pBnZ`A-_p2Mk8VwgAHK*bMcZ$HT@lV9N)?UIsW7vk35-D= z_3TR50x~CX4-9P+86^SZ77S4oFnNx!6&C5HsX%e5)jCU!*MvIP`Dk~^6VXhdtKY%s z*a5E-H;-?v~#W zqwaw>fm$B$${%I-<}JjOa9X8RM(u$(NhF=8o}1qe{TAZulZqNiU7x~g zd|v|2)Gg0RBdoob3ve3` zNwejHk$9#40USBSMWH>m6G032;2k;V4xat35uLzRi>!4gnA?%n)Af zvC+BY?)+QH4A*YNMefAiNE*kuSE-CuGmz+zajf-6Fmqg82d5C~c8);TjkWPq{3D|g zZ=?xE-IITA3Cn**xz&ZtOsss4k%~PcC7Z7w%$Bw>#fm?q0BAS3@?-`uOVQT}x$>rf z#QCY%<6fR!JXYX30L~C{fItfb&VeWpQZv6Q)L*KY(#B!s`v9l~Uf;S~Gu9GK^OkTk zkIh7odYnzFEN!vD{Z0*6S#C|n4x&jteHGRFwg~qU1#DJ8gZB%FF_{MLiqAYiDz`Xy z3otRnkA#@x9OozxQwL+}UPSL>)cijZ3gdJT zQE|`LeFWOIs9+_U4&i?UlM$9H>`JlZPks*;?lx=I_OYL6Y62$+`ApGY$p8}Q6j)a@ zJW2|4tXYwsOED(IoK6?T5Se?s$ACPRRqLu~EP|Q@HvV&-QeAxlZR6g8%(^N?MvkQd zT=Myu;v&}jOa5Mmq6;G_V}g*lxjXC>8$QmnSwes7VjIE!BkmOn-_$W_>G~(~JQ(az zEU-;2pvv;#a#>mbD~N2dKg7y^<=I7R=r2!p#g+mr*)5n(*+awmJp{Vrv1TsEgGHh? zRudScSt7nK5CIs{M)%?;{YJW`6`kT0qWNYsOVHY<^4#z=nT#gUvOU(unt$if#p%QL zKNy<{Fy9ppUPl#X3o<8$6hChOdr5}6$?)ISIk2OZ&NX2P#cb1o3I4I-1B!7t8K3_L zsoaW#r5>UJd2rtOX;pmqK2pn^uUUK8L-uy~#zOGN&ImjEqE&A6qu zX48CvEe>@$X@C67`Mao9CX%%;ek_o7NQduU5}|I-ow8r}ms7Km$h3lJ0rnH1nsZzI#f_pYmX-K9F~x833JB$>s%VV4 zdiEzLW?c3uNvVLbP@rt4Jd7&m2{wO--hpqaqFg>K5H4cH@Y?lh0eqjkbMeJ__0{~H zv?P^Lg!5H^v=he(6;S8{k?JTcEeGPoH=tZ%P0=0gQS~2?84hp}&6%QS<~f~s1Q0yXX6P0?fV)A&Ui&05N3gFcqU2&4;?m90og5G+b<(zOWyi zCO6b+6?&#n*He3Byh7tTL6o_p)W?Ec=alL{%o8bolk^Y=70`UPORSROL_wJtl({9c zH_9JdI3uhTGfR7RR+p^=?DphN4;oK4>$qH~+2Es`DBt29fo4NfFFp)$^v40rhH6H` zxL344T1hk$3j44SzXF4?RIRgeo}`&_GJYK-gvRfjnv4sJUmZswt|*Y^@`Plo_Y-*z z6~lw_ja@DTk?CwFoLd%(;;&gM*1$4-Q1;$gc$X`|LtS0Ig$OPRRXN zPQHtl$$t>RP=k?QYSYiGmA{RXESlE^0{C6a5&G`RYeV`B7I~*zX$X$UKB7NX(@#_k zH4plquGmpDrD~sK>9n^9mw8z-WnEkVBlsBLp8IK1WL=Ey85yqh84&ZDgL>S`!S_$j zFjgV7kVWxgVdkLYO>w?M5T!16Oq2=xwaRB9Ev)kHOp0kr__APsl9g8fp<}W4Iypk< zJDm%=Nr(%W*V_KJbpb3|98n=y1$xEU zV?98Bzlkg12!(#45WY5jA?I_3JgQzTEQ#0b_(o*U99@1;&n}tle(c0YVu$s?oTJW_ z!}tY4ToP0z=tn>Ox%@STQd{Gf2r~!kCRzC-rr!``L8gPEwMZj;LY)xt9#@N8qJ%I* zhu}re-vHVl#9stXea0k(uk=S@6 zzu$!Qhe3i z_`%qGa|1N!B3bZa3jx%PTRYJob;WN*yF_@grhWLIw*c~2o3Sgcvn8afE9*6TM*7_u z0{vPQeN#FAP7qDMtgJJd650O<(PD~DgoD;rQ07Ql19B!9ZSr@7n%7m*({E7g7y)|39thaTZPa7qS?FzaXWn6quv}?gQ#Th5HpKDP`|5gtooiRh^B0;sEw{|d4`GXgsDV)U!+IM zKyV~WtE%VwY+W}HquF8leJv#wjyCt2>8jj`L1G27IQW=lx z(IU}81mQJzfOIcUDz=XJS%JvOA947DknEeR9mx=By-T16u{O4V+#awa0L7lO-Ofy! zw~KPViy1rc4%==P2n>QbL`O(d)hmKrdqT0Yu0@JAn4M!7tBEXx=FJAub*!n}OJNXK2eO zt{ZaNk1;`J5>pdXSP!$|bpqY4Xr0m3{6zqt*I8C{sdTJ&K9UtR!Acb8?Ls0d-`v8{ z-c5Feu!xQ0VYc^fkp6O4ubp`Z1(96G8slLR=9QG7X}Ek;n2UyBjNf{UE^WGd9v~7{ zkENN&5|vSuQ%FBY%W<$jIcby;$f9~1V6@M2>6R=fJuB2~^rm1y00Q=G0iaei1 zK4=;Pf+%1AmYpJ6O0OMDxyuGS;b$75k_TsLw6krGKMVGj*Pf2uy~%&1%XM^ppk=7md?gtV3v;D}^DTDGWG$=wZ>$sFllp(WJ(l#{4zz>9)t?cbf8K zgIuh|jv#&{$lp^e2b%HcLYQNg*2e96bU&z8tgU?)<+Lno@k`PEYzHehEfVj|l5l2+ zA*U1@HIu|lX-HlO2MO>yiaEe(t2n(#!$yX4BaaKP^o$WAqF*_)quwXas)HpzG9kca zuO^xUUq7Zm(6y*<{y>ODgY`7iwl9LeLohvZSZ3SzMEpNjX98YlQT=}eWRXpfBZ45v zzO{6P76i@KHf_>|ByDBqCb>y&o8*Sv+_s4#ASj@SAj&Q%JE-g;0-_Q&1zB8J6j2rd z*;H^rLHU2a^L=w)e*g68^StMK=bd-v%$b=pXU?3Vo$A^b#U>p*yUHCnpT{Re`~7AK z#wxEZPZsQIu`|K^@Rd#g_aN-yYv@`qcVsCb5<~(JRuTKxnezv{)GumN7W3$0`EIkVQ7PWR1q@qTLJdP8jO3 zw|XU>5YC5iXX6(1<+WUTme=%!JWm{@WIG{wbGBaQqlk%&!ub6uRPXD1!T{1;b?Hq#Lr97QxBF*n>N+-2sr z>7)QWEXa+T%n=xSW6vYO+^F-lzKE|1Ab(x`C3lyZM?t$qSBV!y z!3(s5ag=zRZC?|@$D1+W%29?i`oSrem4?O!;wu6uQ6pEF{aDYIxK7+w z<8ILi=NMK*7=Dk)jNx)8<#IY9^qm2e)p%ZnN7$^9)AmUps6V)zOXF6?S^WTRnY0qa z#u85mGqD4$UG5o2r#AnPJpYc7M|SW z%94$67@iaKJ+W{l$tK%Wu{4JX1-Rot4Wym>Qb&$e0?qyDb7#+qd4tfdajSGM#>n;@ z;`(4Dcr?xuM1^&uI|}JdBI!td1V$;wi=tc+;xwYtbE)c?LtxZ4`7kj($4dgtvc(Hp z;y)r?j7BMlCzfk@5D^PgC`#A!!ZPWWGaojN<_=$YEnp%eyQ3&tY_W$nZUpDfnPiJQ9LN{K+lz3Onb5NfN_V2S=xo(UJf6SA(p*jUrVtmX z33uZP*S6JwHfFs3_?Q4^<&7&@-V4zp7}jxMEUmAO5S>XYi#eu;SbH@PvPknTIwWR` zqNdywREi@-@OFx~Ou@QEQ#mFbX0YfJ>B>!Q3}M7EOo zYk(Q}rv86_u}nYk#Rj%MHJH6Anz^EGVQfCeBPGMjrW{`r;d;~|P+BK^Pmtw_2>?3; z|KnDHlz6@(su1@+f6c1#X#6Tmk*=cU5UfbPk{l3D8Hb=W{W=^;FHW>ODmE^;9IlQJ zeWv!=j^sJ1`h83wjXZtEelb_i9)Z+Hu)J#$IbA3PYmzfhF)kMg<8Ylp`C&ks@!8s2 zO^&UrVR}4AH5bYYl`Z3hx*c#H!fF;DJ(jo4p3)G50Y%#4bN*mAHU%W=Xpv4K!H~)H zNnt}{A}b~OJkWRi6~RZh6ahF<_mGQp0) zR1wZuYGqZT_oOht!wB0W%kp!AXkTuRqJPGpMfy9dY-XSNMG$0*ZpUT;$Av{ZUjpoL z5S%gp3Sk!MG*>5c@MC#G*2qkb=uA%t<7*|R6OOml%4r` z(Qr#nAY6cO3Y5#=j#D%{|FPgypvlHk47SArEDLoGz%DWRqELScuTe!XyERXzP$8`C zrM~}DC~aEDBL3rF;+b!2pQ0z`isD0@ZQ-B;z}o^y+K#F-`^!B0qwtlb9}7z^9}!A> ztY#}NiXwPF%>?Mf@e2VilA~b9FS8`PZr97#@+2dKnqsl|fzzDjBe!CJ+*E{f;Mxlf z+Ea5R+y5ICV*bz*;m>0N%)V2QUJ$@BQlOeubZHz@GNi+BX+rt6&_A4nY>Hp&|nQ{xfQHr$nk&{z(yB5N|?FNsi*-l`;t6a zFC!lTpSV>)n0h?U-G#V96oo*AjXU-1y0PKM6+o{G@Ulcfugeu3e3N3k_67Vv6QDV1 zzu*Cpl%9n@$GS9hKYAeu<*WH@Qoe+dstpqgo5CPZD3#@j@6U~AMY{%-;W0FF&P9LI zNr**}D!GAZC()XVpfRyTkc+*dpndxv37~YeyRI64B!8vc<<-Id7m*@UIdxchCEFKq z)5QiI%7m8{0%}4adPn`TxLc2~s*@G+{@>!+?WOGk1i4;_S`#0_ReE+IDWgu`#QVPu zM41+|dBl0y%|)8COLeawuB|%t+!IIZ2fxaK9urA1P7&m9v0tO}ZZG7C5aLk9T8TZ{YNYZIQ#l{}AR1;|M#VJ)#Z10|fWdEjn@EcTKe^`b#`4iXw6& zfe+Pe@jU>KN%hV1V}VFBhV6EOszjF%iqy1>>l^+nf~=Wx;n~*LLS@pziU_?eih?Yk zba}KC8N%)n2peYZH!Yg3t0h4B%R&VY2&548)pQBKXGCVLQ|;yGNc>EQIV5SqVf@qtTMX~n{Ji1`>iYPRziyV~R;zno76*-)AM~HV5KxJ{g z<2=Pp5YDiGj-+kVT?M({4@gU%E_!lyN3ckSBDYK^)#-I~%{s9J3{H*y#>3B%liwX6>h1Gnfc48ewwHy&N zDP}}!OVOjR7wE=W&H=IbfoOyk8!pQIUbod=!E}{5um10LgXqB7HPi^fy8;n{cE#yD z;tmtTn%ag$xU;n3)B9JEZZ2I3wJNsyQ4O$5*gXPh1oSp;)KCw9R|uK2hp{r^&RY6T zwA@O3Sb&*?3$6#BZwhnu@gI}o6r29onM|KHb$T2q!i8#TZ;LxbQtQ>)X^aU`E+RS| zYQ#&~t8D|Amj0c+itCIv!5036#4u1dMs>l6t3|sd+3sK`+b#KmU>As~26uU`?arar z5=MANuXBD{ONBKx4JAJmR&eUw zPKWJNC4DW5l=aAq53#YH5zw^*kvExX?z+y#N%M^J+tJ zrU>%lU5 z7ZzyRq`Wnwk**cmfUZ8w@2*(~t2k^c0xa^1MqERY&y8yTD0-bFdiMxfEd&HaQ(w@s>g6N_9 zPMyZN!P*P+nQIY{?+S4tk@O1gb@0BCz!F+UuI*u~l$bR9MF7y1M8RY1Yl-eDP@fR)AT@;D=He z`#%BV529ycM=QqhIn<`(C~))3g2)IZftv;3#!Uhc0i75WL+=;ujC)fwpg7`5zXCsS zmRVJb-VEUM6t-Ok`+I*&ib?%xQp1V<-+{V7y3bta`UeC#VbOtT^~rIe*(WIl9?Ulj zu~_qF1@$8hP9J^>SQ;o-)BjA*?tE?ZVMnuA=l6h?=hgzpFcyeL65tv4eLoFB)hP&F z8unBLxPTn9VzXl?F3ZwTSC)ymIJ@N7%fn400vk-e3 zhd%>M8^RIJ>qc25g?Z#wCcqWXLO9kvxI0S;yBg0diFOe99N{caefAk6xp(; zr@^;t1yNfcIIP6>&p}&AdJ5W7-#Z6J6@)RqF2MZh*N)wtqAih%X;&WYp*Nt3z9ZZM zje9Ojc}B!_p9eF;v9O^g$hv1(3X7#|j2Sm7@CnAig)s@I~^AMOvQF{L1o1;2^{eLj4)`?9l#Z zy#N4f8(NxTxt?8(dGd`)J44TB2?jBi{Y%dh6K)@6$DbQeUIZ}HU=CLqh;}7A(LERy z-}e$o)>#>HR*K|vbxRv#|3CBWvNSM#jm2>Ss3D$ig*a7&%iomSg6|gUdbhIrV;eNy z1Eo0`ae6?h+(x~HE~-KQTKDA|mS!!^vd7!PTwGkmhL|Mxe5IB@;kbr01MVltTw#dR zi3BWL?E)#P0)@mKB3uz>a)$P`Uj?9+Gp6nv8|s-7Cqru=qfuXU3G!!$2~k0qlgo32 zc#;<*pWhMUJh5L>j0au=KC0vPMY^z8mH!O506QBd*U=f)L;Py-kbF7;^L%(T#MN)|?CKLoYGj<-V4oCZ@nPkHT_=r_uL;$#fcL>TCbo_K4y`_;r3B(E zk~}-xm&B-^T>|XFnxjq>LJP&vc^jdY z43>Z86lY>zVSJGzuF;Y`&zv$X)_bS+9fh@#WAOzgqNpHofQ}88NSIt zOVwY@erhk5<3D<)mMm1#;_1u(^=tKPUw&85ZWT=`vT5+CUkT+?46#F;pFwEe6yma@ zXzSWv_D6)77b$2fyx8u4K$C-Oso0ZtXTS6=AjO>5+!>c=&(jz*CiLw0d)TT|$9+*y z*5iv^F}7CAq?NLX&B-CkgH4olPwEH1I+#VI`^@`*%$^1KPcylEa&0FgJ&j^5Z%h+_;bse5*Mn1%*+*l0ebO&>7 zd~jWUH}C3N8)J@0jArxZ#{qgI+d6H^X{+T80WP2%iBib$-@hNs*%M-8EWW!Qv{{S_ zwyKO7>!uu9&S5$=#9e|svMnm-4o1HcY>vtbAvZy;+*rSsV_UB_LohpS07RKNEg_%2 zPiCaf1+Z>Pq>C^=yGraDrI_qP4iOaB<`}^jSrjuPZ?Pfi%t`z?an;yefVtDr))>=8 z!r1Bi&W_gXnF#oCn4bMfa>7C|8Yc^KUE0_Jz$1tJPtCzX7t?A5=Q|ts)qXxN)AT>p!BLxPu*Z#_6>;sv%vB#;7;n7UJ5rF|4Ndrfm%7c4|gj ziy_g7w7eR$_@n>QH;09~9`qSD=_+wd_99{Ih%Y=6$BCwFtQ81#B*N@zsC_XNs+W8$D4HWyT_uMCJXi0Lp{mJ}rtL7wt03%L-QmhgaCYOi`v`OVT7%u6@xGsRoC1SYxK;! z_g}!|b>W8~T)`F$S?W%A2r?sRb|!o6k~}WZ-k<}M6#Y+cW>6S7$ooGkvMk;ryDAvF5e)+ zxgbLn=?6J5gmPbq-wW_}aU3A-pJ-YY*KCY`5}NAALC8PZW=G=(BHbTX%C%qj@qzdw zP07#e9RZY%)qd=-6VF*2GZ819YmP9ozHS$S5R|r`&%j&7?Vy+fo9wDhijv;wV z4k9ZAgHzDOrkzQd4XSupcd>vT*~R%IBfUB#ITAn3kY=|0nbVln?-zpLa3X^q`VUbq zWr{Z@r<}KiO%?$!gfSgp-fGuc)^gHi2{T2AlQopt$x6D~B?8^@=?+ra!V%sC`!hPR z&YEV-n3K-qQssWOiSEEH^3v=>s|r}^lU4C*A+!{VC(e*PvBhrCW_uk@Dj7gS4YTHf z{8;`w*Mp!|E*9XX#vg>Z%c3=I;;1wlSBc{7iVluqJ;{F5iJ_IT&hGr^LP{Z>+;?gd z?7HI(g1R6De@&h?3(}hye(4^hrO_Fa(YqyB3-$T_um9>^Agev9QpO_Az~;Q;+9p+RR5C$D02hRDM*+>4uvqmDc6Mp<&-ROSHNN zE8GXN-)J+ImZP!p6#q7JzuB=w&s5z}F=&&!1z6O%LP{pdcu^2}6I^5BGA8wzQ=!cr zJiAuHjRitnQKDtgaPBF_ zzeH0GA{(U50GyF#gIKVa7YA5YvjLT3;l#PW*3Qe$QH8_t_G+^5^^c z6_N}xQO9GVNy3Gaj5kAC2v3_+duz!6G5Anv6Xde98_+{YE;d_~jM-&O2C%n9XeiK* za5>(YB+D2sI2k&ZUtKlaLWbjB(asJNyJzWN2(uVApd!+3f1e}BiB^IO_a}|(BSVes z>l+6D=&)nx0qq%>CZBB-A_3Yf`aj~92<)^A6w8GxISEf z_>5&1^1_THeZ%60cuS;0`AZ_ewllq)8IJh7OQ$=f@cXj@x}(36Y11 zJFt#kCBp3F;DnfSs}QQmZji!09@|jMR_+u?cM8uGCkf}piK`1kid0yMzY56&uZ%(# zb&XD(Jzy#@P;5-lf@1$=>1JlmQ4cT zmE(+@ZkOlV<)Y1qq&tpqkDrV~59jGx2@4kM%=3qs$5!DuB1LISL78yLsd6;-6hh%r zKWBS8Bbj}fq~i*aE(*a_hvP}nSyT0MP)7>QGdWfnDh%USF&h68<{~yQOJJeIeud_| z)BIX~OhUz2E}EvA&-Q3sE5gjn@4z^8fm1MT^u`xNP<$M9(rJ*MxL!!6lvM4#Ay(Wh z*jeFMQXyy$rsBH=QdOF3`aUz`)hsi}Oi9?anFMs?i62Ir9;rCxk^w(X6YY0fF&~UC zg!E_IIbV-UJ>9YrPYR|w_|p%^|3tW*X?X_Np=ep;T#~bHB@PzpQp!;r%LKaMJuNkm zYoIuVqOF4a1!ik=UHDVG%KRz);ExR7eWLsY<8Oi(bY^17+qRWw3N+4AI?5v8o=)O; zdikjkOLW7StTRsupna{h(a9+E-?K!_!yLFlZT_zS7dshIS$6fr*kV8zB;DwS@Vq9- z!lP~OXj@{MZm5$iak^+;(ZUsNQV#f+33AqCinUJ~)ei+)XdCE43_Q2x2p<>nqbx5P z6SYLXobg{l<{8z2!$%m-tT7Wu`Vl!1x6HarrkOCWOi$Zgz* z{e%cJU=iya$=Er|f!w|7o0r73B3&vh3k)1+;+6Q5P`^hEs}vof`O`q|hqX}NWOu$` zYKf7ZBYoV*o6(f1PPZd%c!+<89xALNE~n_M&9JJE#L0)&0Cp&l!7~K7@tF!Rv#V1I zW7MWoaH;suL@{_QSinhS9?hvne5ujv4irM4Y-ntV20i;@t)&6x^274v3*=He7N_J$ z2RApO{+=qt?KZD@9%qX4Od5-;81XI;Kw-MmT~yx@WmYltry$pl338!W&naHmt63hS zxFMxm?2Ct!gjT}!wHn(UVPa)@xHpDG@Ofk}>7^s0-9U|8HJBYX_`;E({sdm6Wo_gM zp;QcijP~CYWhVAgD>lAI;zLJ)nHvqs^JSg@Sk=+g675Ix>f<*llz4rs+9bi$$7^vCqX9qh<_k~+eMJG zA#yHyR&)a(rrHB5_rxJ0vzAN&>CO~Do_$=+UO+Y3st0;fj_F5^jI}~2xpeE?LMgtU zk$kvuX>)v2WH!vOO^GxX<0@fpP7KiLtm{TaAb)!vTk>NuHv@bco(g_R04y1)N_jc7 z7mBv+NVltt;AO5RDWN@3Nfd@Mq{V!jxNz| zl*%xUjMAYPAh-v!mJ?$+`$MsB34}6@R_{Nr6mN-gReSJ3(;i41vH~oVTxzddWxE8q zKnyW5SYDMUN)b@2!*O4pXg&%rlJ&38H)ut(3XkPM@-{(!%rWssxs zUn2x|^_RGG@p=)w+O&9nyp#Qao7OFFVh zw^Qnkiv_x79lEsWQPHXyPMCkxvxT+2sI~nY*^lkG)O=#t31_ldTBm0>HTGtuY!c#c zWirBOyilOO)w%@lmZE6$rh)yhbQ0x@IhF$F$iMiIV`}M=J5U8N`He8=RbL(FhJX=h z^R$P38yWQ96y#D36*1$jj>bypq^KTzN89%V-|T3*S0Yh%;g@p4}<#wi874@sq~F9G}lfdQy9}H!jLwaQxO% zygkkfaF2sMNWj*gseJ>NCtYjy10m)H!>{J#p?F!Czq@?QN84w97Rb##cYhp~mx`8X z+hOb;kSEk*ESN{}K6a+Lut4X<-v2pg#DGBw24W-8ev=T51Sdyp`++bE zFZTn>v52D;Q;!35p&T4?G^Xd!@@$8qSCC&#zB_DW7UK$mE_@4{+|hD8G=&$}h?UbS zk`^Ssp*&6y!k=Xc*df%W@UKL|LnakWJlmY$+*nB`=bQo2{Mu6GV{y#qp`3Ty;+D8o zq)SZ$%5yxHomhkFhBV<3f>e+Th(r#@H$+S1wUZYQh_Zwg@FHete^3ac)$%2T*E#eg zC#E*@4vC^D^AolebsRsYd;tvUBWBKJNpGAhfc#nCAd8>Ke#V~$DQ7bIS_U-gqBeOE zZuCXcy9A8%MT|FJ5n?`fa9)bGL{WJtPlk!9mlv-;nPikEzZ88ao)QL!szi}smi(tk z=e>%M060s2^c0Y+vlMkf>sqG*z{J6cTL@c{Y7k$pz16vz;dLx_$`Bm|cf`x7c^ak} zt??EK;nU6dYE#}7BHcLBUAo3_z!C&!6)-za$}`ZZBh~@m5aPwr>Lgz>sN8~xU7#P# zc6>A0wTA7F33ELL@tWe&!P9C0&VQT-eDNy)d~JyiF>Q7_goOZ$WHI&@O(S(wW%6qh z<;)u7r5Se$pe|Uyz72I{yg>^c0x`QCM9TsC*BT(Av1v>x3 zh#hc5Tp}QA7x^C*<2ypKuFAG42e^k~${AqJegUf+b)F*xx!=o$E3*roDAd`d4Sb@= zoLNJ#yXa!v_8p0{{|CH9uVomB$La1J`{nx=^IbN5f`kW}fhDm2It`|WuI209Ie}iW# zo$g}dgeFqFMv&RZJcN#ON|uIRNqd#p366^enSZP3#oU;M0xrx|?o0ZYigD1{HBin< zqs&U(Q7@1$FuRuV0v(Na1;EidjtD7HcWi%-5xxmn9psRq5X%%Xow@5=G!?94MKTa~ zgbF!&Lh2>7dc6W*7F3hU|YH@{-{S+nxB|VI?e~+bG;Z!qyZO2 zCuL@4ths>atcMJgy^VEYjheT53QMAw331UG ze3H>BZqKpRoa#!;fro`reab+e>WcP@Ow%5PE?h;<5$!S!7ACIc%soo^BLd-P)4|bv zF^{f#6Dvl}y~x2~r*DC}(Cqp4z~vKUD~vX=U%_96h^5OddzQI8goRFe+NV8NtgNU6dGB) zdN~6~%VjsC z6zFmhrAS*E(}cKOElZed^91!$WJcq8ATl>t zR|_=TTG$3Bde~JpU_PQ=jRgYucJm;9`9pD>Nar$nYwb|%bv2+%oZS$!caC&$s)!yp zU5H;y20kp?#h7&sAUvzWOJWEY$4mk0jsM*}~vgGT(A51$#?`{5bIcW~-0#N-Xc|Ggd5^>k1Gvfy$sbiWEwY^5}6T;{`ix25& zirues0;IKZOxFq!0(Q2rY%#}}$=I80X8Q@YNGz&i+l;3L@Ga`j_!2i>U&~6Copkp1 z0bzWL!$GC^g$UuIYIXck_atp zj;r(x`(+~1E_nS9pfwG0!Ywwv6~g7|WdoU`8|aHo0-a$SaX#TkoFd5IRY*##&74yH zA(%PX!7+4Pe*`!)t_~dJNR?d11p3WIGG0l`cvYC2wXHM~=iLS=fk0%&<4IBOOrNAM+F(eEcrbxOSDauU?E# z2y$_|WWi?xGegi*Sq)%AjHiXT6vU@Rtk}UowqDO)?c@eyqK)i5K|;zk8DLr5KTMbf zqlG>AI7>8TV3tO;OH6-5kn?9Al?QEt*Si}SzD%FFZ|taN7qfSOP0VWSCB$`UO@}MS zq9;#8j5H0vgE`XAb$zhq`200u%i#4j8utp~I~|?PariwvTUJK+=J&X_mKLRv;9D>-W(ag+H{wHv>v9hCrZdco1h^`gFPQ%0Bq8pHBNJEkWm3HD zeV~jUx_=>d6yX-9=cK5;F(S-;n~U4yev$riuY7J|z5AU3!%ry|i!i&`cVR}yUMt82 zVLx6=nQMgjD~!bWSF-eaCC8c=i`XkCkhw4yxOoT+(UZ*;dg{P zqYmUpLjI{=09it^5VS@4D_ki!ll`XoOX5D!yoteX6t6@r($;?n$c@;+vAjgWi1*Cu zd~qiwaz2MJ$i+dTU0M8>Z<)9lb?9(m)TO<-Ay(_z?8M6jekNCvbFy5x5)#vRPy9@@ z+rOSw2=^cSEla`C)-sw(Z1ylID4e25^o+(7k(MwNOD6K!0$h#-cnj%Ba~vehJ*b0i zD_o5kFTa;3^l3m6{{B4)v(qIb@v=ad9OI#Hvu6H#r1l+IiIW)YCCn^uRalM`grgFt z33lV??0rvpO*}5h+#^EHU_Ab4PB(WpMfsPtSJKI8cCOzR{6aHT<`B8P~%Z4|(Kt)Q)-l^*k04QZ1uIvQ69aWS#~ucejKxCk;#S9Gy4G#Yn40b(|B_#{aZV)DFDO45PGo^HW=Nq~i`eMxg1_#`A> zsFOp_*&>_;sgYt9(PQzXP?w!^!aA6_$!|g6UTKKdg$rAXc8UchYCI6Dh0x733y;Uf zzw_^_)h)Hf659)*6cbmcpyK6j2@Mi7HI`K3P~$b4p2sa;BT_fxmf`-j@A9sOknziS0J|3*Tylc*98 z2=fr2^|W-1G&Q}Rzg&-jYh@hxM-o!0LPaZ$r!$J%+2G*%&-hEasa4m=#_t5UkUCSQ zgU$r(dlt~`fs%k*l3e!Ugy%rve%+9q?~twMq2XnU?NEuIh~(#a92wBDp?gG=b8?eF zF`z~IV#=QY%_J&@!Z9eo9b)=EGj@zCMY|0O_OTG`(qC#x&ejLK;v85Cu26iBLqt142a#My<30fh|9st!_j*Rm z=91yqJtVlyFj7Xb&;BAeA-3Wc@(Yk)AwK|50APY~o1HxXVw=DZH=?~PRZS)F`U zfW--?ZA6Fha_hg;u*K!og`NTjJ+~2N<`Zs*RwPWlAj?NGlwmn3$MyovxFl8%|8$U0 zYQ^5ja4gJ-@&FeTj*2krQH-V7wf_wS7GmVZMJlcr=?}MP$%CXkD2O?v6VK>>>zP8J zNhAkamIhc7-T<5w3tSzz`6xpwWAtX0uidea5OV^_raA`*$i$0lg>{cqlOT#xraxhz zn4hIU=}6&CSQNJAxuCi6>E!}6cww}Qzlz8v9=1@^(W1(mV5tjgN&OvB=3!}QW%6ku z#<4((KU}tv6Te|0Zbr^G4#i(YJLAe2(HmHy$LIbI=JALXnQwig4%_{smb8&Fq(x)B zFv@_UfY0Z*MUiUPW(jpu;zNlwvBxRMw5_8SHLd^b#vRqxd{{=w)jp*`pjNgiMYcx@2rpdVm0JtS;YzrU(Si>4`28=7^i;{tMy~VJ;SD#<57M58p13T-c!}Y-Ue<^xw4? zb^lMgRCy0!{y3u>BWGVU3Zi(~$%?i91L@*(TR6(`6alRA>lbjomPa>Md!eTqy`n9R zysbc5{X$?(T}xe@q-SzkHhDq%_&Wgpc(W&vxKWTRSI@1A!?a?&k|&#A9OPgqeg%Ro z0iG))7v0Cgb70XE0(xILr2ZtjVv zM8giEChAo5X7ASC*FGdSKnswRuPUO-JS)l^Y2miCShZHG>)%!vm+RS<4aLOOP+OX~ zisj6j_wfV8u9Qc*;&>6xfpY@0<80Apl45O+av0%}EIoS?{qdX#^IQ`+)4(fhJG<3* zT*MyhKv*(tlvDh=0|fFxjvY^rkE~lup&*WmSzL^Z1e(Deikvxu10}aDUo6_Wmc_B&`)j#MZ*)A{Ed}!3#u7V&9~D8tOPEE`O5yEPA+CDElGe7kLG+|> z5^st=zHB|eMC+iMsJ9+1#Ggk9@bFEmk6xjf)GC@u^a*fpjkl3s+DnCo=AbklOs+%m}f-mN+K((Fi5AgtHh>NPGqZm?qd&A#9N+qx!+T?AC;B?4W3hRd1Jo6{}T?vePsX!ui+=W?tcrZiYrVqFhj{fr?17&4)qEv92OBPf-VK;v%7zl4W@J z48*enTptDNW4T0ot=2eB`sKWFt|+R` z4g*i=ws!>a4~+*}Da7U7Bzn%ifhwA%?d~eBxuIRyQUb<>T2i zYsdJro?Sfr|Ku*OTjSy_!P&P5INB5IZV%w{Vt~?lkyuZV#ZM;cQ8rI1u|gotuMvl_ z@tKTdJZ-_na<3g~*|wKqcpN0ajZ&y6+HAQJRbiGhPR($tMA3_5EJsn9hsvutB-j_{ z33aiO0?u%7sUR1fT{6~a4+*eD&YeCzj{3NNC&cq*Fl#if7m#(5a=&JQ8-+Mm1{mzu zJ+b|cKq##X*@e_2tem)ZFs{_I%etsatQso7g%sZrN<*>nggSMzD7P!7f!_GOXxACx z)NM+m@uxf=sdfze@`#wa6DhqRvMjkH>FOSN{lppi#i}3{Tqd|79JaLAlYVgRSF&d~ z9^X((*RGx$Ch#;oTZo%HiLJ~M2>u&_VG}pN%I3BA&X8o}-5r|pfn5O9ySNfNj@lK% zb)tQG<3`c`Y&r=O4+^kw%=5kvHt#bzqgZ!$AoHAs3Omj?xAerOLe14K)M}i1M`NxqN?GVG-hUn@^Z9!~JA0H=**lly z$nxaG6yri*1NIONr+16gmSk&uX3yH&oJy0uHKNy3UCA2vjywr*L^V0RZ?D=* zi*ZJ!Cp8FhL76vt)75P&g&{EHi(YbvC|6KBSug^{J~rMP6z0_tqhaRcGmehUdN!xq z$^&EScSJ4B!H$0IHx#jvBoKBaFCoUY4X4z;L+EcE{MUp&LnviQCthO9sZg+kKrgzy z^Kg+ad_x(7miARggt_h8F-oU8E~Tv{3~BTsa%WoqT>3r|nBcJ4Gi~ zsluzB_pRmH$y!pyXcXi`b=7qL;fN3lrkNPC*V`G%lrVouTg=~&U#Ul%E@7+eMj6NVAzv!4JkXIWCqE%F;e zUB z+b}xcFUlP1E9$5QYt09RS79KOW54bG&FZMKnU7UfK|_LQeVJS)sR z;wnAnh9er^lW4@(!2U)Ua;y!$SgQ%bpIyYL7q^M_XBSs>CS@f4kVDHbaS(|)o)P4F zvoE<)LE=?=<8DYa3K@A)O6R93giu|a)b;ltJ^s-XqfG>(=e|>C#M63q+RAVN3r{uf znx8{*w`VTxqXX&qK;r@cc#C|}a>rzpj?|CFb)qaI_=zfbzIwqX&CY`6QNYSl ztg{fn#Zk$+ezg)MdxMZfGHV;7YWR2j*NK^B}+a?EA>o9Z8RMg#z#c*rt_sE+|>#|<2N_dwM5rqCq@Ai>xv>N>)gcFr$kvYW=umM zr!0YX{aNVXf`BF?$h`#11)H++VR;YOQSEYw_}HzDl-c@27Xi}AbOHR4Ih-_=KC1{P z3$|!$xMmNr8oM40rfGKCoM_Xti&Bq-N?-W0AnJuJaAwSDhoB;iPk2ZjAAV|2lrwFAIkgvYR$!EsC&jn%d=F7D)!+!tfMh&M#|i-i zz$V|t&W_=)N02*7TY*3;BXNunR|nDL;KuO+oIRr`Yf)8an?q|L6NEehc=d6C&cBPB z4tp@Nf9NnE=dW-?><9D;L73^Mo&D7~IU`q3TsA`W-gkH{3t9}-8rO6|W>9wTGHhbZ zSoa8U1cKpr2*+(v&i1ZLXUBlZtv|2@UqRY>S>qydX4ja0qJl~JQ=`6}y^7IR`%PCJ z$?wQTw~h-vlo8DSeX-V2{6g;`mIr4(M&f;_#OtPZzOU zJP^YI;FN5GSg=)!fun(Vi(+LFgolKfoeZ<96nS|<5VgdlCN1+&le@Y2s=k8s=ZsInbzDWme_sl_gT31H-XiO8x7Yide(o^b2 z(Uft#%9?OUF)!k;f-HHc3O!syzcMc9_6KScs$BZ+5@@chBDTd)%qcn*jmEYJ)(rL7 zHi52VM~PeajuxHC59<8xy^vD zeYH&MF)uIghn(c=?ERuD+9FsTlB)>niy{fF5pH(cU!ERB2`(1whSL#5Eiv{fIY*qY z=_!?1E!uTwFOdXa7vOpjcv4C37lM3`a$t-QeOU(ZW;Ncj@r-DHgOModH!liuDVfu~ z4v>j!;}zg;T`l^`$tz?lfv#f9lJ>?pP&91Sjo0+=E|HT};0i{#_)|eHA;;IyP2!xD zHH48Kf0#IV0K`S8Fi+qU<`C=`2&Yd?oi=-Fd^;oNEopC!od)@Za^U_8WAK6(?XPf- zYYhEk$qJdSR~_(lF6HzW|AD_+i# zKGX}=5!;r@$-G%6^XT59{jEZ;Cd61S#NX=2W~AmC$P?AmRoQPnLkKJ_SCi}V%Oc?c zSG)Ab-owrkKNmLqWr;sUNG5ap;%RWTXx>haJmb-Mj9*~#;Kofg>#agu2Ko6)%AOQ7 z+0sK5PW3XLy_BaTN(P>qE8}fJdDf$5F#Srw z{v-x|*@ogSL1cx$A5K9W1)?1ZEzZT5-G(9~a^Qf>DjDENS+x9821P zy3up_o6AZC{Ci{9DsNi6xDpeWFDjsg-3!9u6=A@1waJoIkY+<2KDRIoMgRK((KRNn zWdC)w(^JSr1pPTmq_brKET>^4jLEAg+(oBP%9*jariO4^3|%FLg=8$zd`!cfC&33 zaOcNv0gll$_;teE1(<6_<9jDSn>(mQse%l;n|>bDUuCaElWy!Gh_;?XIH@vc8RgJ@Bf+}MFf<6J?s0ZJX` zKCTdjrnnX;48ynCYjS0_H+v z5*Zmw0n8KV{HlbvV>FQiQbKthy&e%EPzbDBtk<2glk+B}zLo5V#h6fYi*3zx0pjlk zG1$$}G3EnLg@ECMm?`O=O(JQn#T|9=BR#u|jWE*j>)(X9{W{SriT3;@fXVj3K99{Z z4i)Cte04^=Cd6e}#8!i1g#9Z|qiClaHRx1LD;$}iKKIsl!jVMP5b02Qz zWB`LtN}6CNy0}5lE`0+cN>TSe zt%l4iFiKWpp%8y5nR2zV`bVA$2akb?Z^Z^*A(d;*o&mmVL$Qk>GUr&5b~Y)?jMHn0 z6x?4Cds+oing*I+sh$x#ucG=yWdg!Id1Sy!>MH_WVBJiI$v7cSh6MnDQA;JzrC}uI z^v?f*P<_(vT4yOU*G&36-iyUJPnc}sopHAw%{k^;IPpsXly6m`2dn)Dz6#}{bZ~f; zeWrOrvQs&XFt{ZVm|w+dK7$`z9F767eal!N$UUh=X9qVt(@ByF#iUA8Oc4ZYrq7)d z9of(Bq<9StM6V!!UuG@FkF~xA;_q{1dQpKv`4k0TUq6^f#K#@QRfREadx6x3EO(j} z9ip5Ywyu$}atxjY;_4G|3r*pQuh(+vV5Pu_#rXZYKyu+uIOc-JZ`9tQuQI>?PypEy z?gRx_Q6laWLak;`pAl=F&9i1p3Q2`866rEDC}`$j{8EU^fQ=ZY?|BYD#z}M)*^=9G zNIS+dgT}d}a((IOoMx6k&kh17dkhK3;6cK&2{P@e6I`60KSxR3JCP}D0Q8%lI8dk) z(}n3@Oj&SXVU{n;rAlFNFxj$C5=JFwO`jI`>)CZ+P^Xy=IKP&UMrAT*eqwnc*o7X} zMNLc!*h2*}dbMau+vEZWc&#NQ1E{F1KBVs^qHeSc^S3z4MtG490cPnix;Z}D143L2 z>EH@_Bvs*`f{{n&N`{h=cuO=*BOi6@o=9ihpyEd3g|&=F9JcaUA(ReNFE*yn<-b=3 zvEhxx1wvduT35kU0bdvBN_3$>r5(YKe6yA{Ry8@|e^m&DVfsKuuCHdZJz=#o(kL9fWfmTreAl33M?~L{rPgwK-Oifu)O? z=zAB}(%~yas0#GVUB3lJTQ{|L#)*1%uWr*xlIKOcFbi0Jjm7TY2B0vCLq5r-1T-_{QbgD-0yM zo%N77>C##vi5RQY(Kt^SW#L4kHjet@@;uQ3X6lmq?Piye$n2P3#4t4)e-wiJYr5cQ zaqZ_i4tflfBQ6KAOrgh4+>vY?y9=Zdusb5j+?XvIo-Y|n+R1Su{Snzt5U*2nM8h*@ z46nX|G#&sr-iYSQ!fqg56Y4h?C{o90^j-*)j-HQVtjKzNic2f&gei~pD~ z%22>C!-Q~>NatVZ(Xv1r330Mew<;85>*!RqIr4 z(P$7zJ~D)67l*Wc2grh1Kd_36??k%*ERW@Tt^wxHLMn^;wEQ@i@k^z1q}X6NuJ%zZhE?MgccvNqiNL(*nTUbF;EomOxk6>hG~ z$D;&?$o~o9YqO`!i0y9US-l`_m+m0SX*uvCSDr0y2l1jb&9Os@gpverNWZvjELN+; zEtH1#1)-ThGvy4JR9hp-!%FEyM-$Gr-`pGK<*S@RvGNXmOL!v5(dQ6|SZ@i^v zGd4Ms48&FwAg;(fVp6d|!02{@KwjzZXNynIeuZr;1)=(-9RoND^Wx# zd8U6)C>3k1ju+{v4~X=83#-KXLI}6L#}EfhSW^^b*Pkbdm*hanKJIt26m&@0V0FId z20=6ic0rj-d*V?62u~X=U5O_|nJwfwN}uS7CxwwI?lXAI#fu_c+*K3TSgEhX-uLD& z%BfqCVP^`1J=jJFGoz?pjCBVJ^x%ted~O^p8g`9wE5})Sb|vL0*@qH+TMjM6tz`rQ z-cJQlg+Fa)nFRi}j-B-()$%H5}Shep5a@z^2YPT;I=3qmA zaTTFWsqm)-X5B#^Ny9|ZJ588nnxo4sVZJATGUy&_4K{CP39P2T=aHY0z;$bo9)JdN zwxBHIe$FY&!}MRm5N=&f9Vh?XnQ|5by>c+_5KT+870bG}=+7BBV|Lm_-0cB=r6tSV zV`yTBiZT;gw1--bbA`AB3mE<|V#S?;VE$~jgnydE)P(wL zgnsB@t1JtlmVVG!m)vjP*2`|hN-y{55g~Fk9)8GR9hV(Qr%h_jBLXvC7x1o^X(nNw z{iqGX?W6e#HlGN6O>3 zc|Ij}=t{BHqa<`WdkYg+Y9iS5m(F?#H^Ie@BAhjYu*|5fLMU;2xht0CzwXX`cC+?~ zBZW|I29rb4{=X^G4a?Q4WAU13iruF3O|8GG{kTM%{&$IXS>~4p6#gL|$&)O@;WT#p zHA!5Y;X;*5!(xdLzobiJweNS5Aj&{YXR-Klk#1DHiSUe|Q#~ro~A%Je3hdQ&<@x|Fp*~KjA z4iVM|mAG7>TcJ=*htl`{9SF;z`OD(-dNeB=m`-3>nyG%C!}P4QKgduY33eA|R~_fu zJGj|Ig?L`+T|1WQJzP6A9A`cS274wh!OXk$?;*@$_FM;a_rh_4vT_l55{=^oA+)p1 zH?n+u@abBLh1x#heqtdWSv(Br6i(bG)STlu2MekF{s2Jh;Tu%u0DYH8|IQqy3t$D9 zPhQtZsra8j7qK}-0vL!7{L%TWE~L0wyNUE`KFqDL@(zK1t%KMcOv>>}_F5k#&HRaH zNI?sA9MTc}dUmDf>0)vGPcwu840m`j^FKnIBU*pz74Hi2mkBdLE8(dgi=M5e!#K@) zp%N{E;NUQGrULyRB3f;5OaDMzo&AWKo+1@KD%xdf=Pqwb`hpMyau{B4UV?OZ}TMW*q#Pry+4t}5~q_kTDPn7o+;Rs zMzf&WJ+a9PKoowi4ye5%0wy*Pz;^Q&{k$4aqh0k(Q)6G_nwL0BBsJ)(YB2fgOHgFm zG&tN#I7twN!pMhO7q5tP>#?vYazhifTl`AxwFR7aVUH`$7UXoqU*n*O zG=j}v1$5U!A(rY>jID&ZPzwuvZ0p5dLSPS0_3;fod$=Ul*l-;78nm-T%}?6$KzvRh zMeHphVTHKhbx5~lT^gI76yTSd);RinF*^SO=v7CXO?cePmfV)oFP_4Z(M0p0R!|S> z4VMMga7KLOuh3>z0|(ZmR((nkmEjU@F2a(7@>lXiw2?Mf;|)PBAGeltaYpBF05miW zNRO+CBHMm0{gU4d(BKG!-0$s-8GNzpSM7u7ScUav> z%|)92AY8pe3!GT>CKL?h)UwKa$v+?|fvlvN!t;AU&ZkasVDWjmOtzAQyJYnW;artm zCeVy25XDlO>$U&leGhi6NERk|)PmUY-=J>$mb7UQ`wQ`_+GP|^zbeR8>fo4EJSEyv zk9Lo5h&JR)@v2~?rmkT@eB?hoyHqPCF6+l)!BBL*Kv#i7p_8GXxI;s@i^eQ)b+)y6 zf1ZU*CM8|K5Dy4+>E>w*h7J7gw`-qD16WD>Cdq`WrOJYwkd=~@kXJrV1Nj_xVTA0{JtQ* zG{h|)Sk*^k!*>BFN^&}&Li0qM!}Hh%#bZNK@;Ra8lt#|-n40<$L77_Jjd2efnZJ6S zexR&4>&O5nr?%)`Yw=`b;3m)B$*S1xeL&{HJi;1BooH8)?P7wiGBC9ZL)Pafm!&vX zBurd_GS0bJQBN{gJlCZeRTU3v2hL& z=&Mir5}(bXc^<#wPC@35a?FCp|5&J{yc4}k*E(w?cu6p&XZ?fAMXbLrq)Q_U;0X6& z*oWnO{eV<)qEps1zP#Q0YZ+oMWIQ990$e<3Jz#%*8D1R3*<%wW77DfSw3NEIT=e$> zTsw+`u z!d#B#%vOxvFjXimDYFeO9S3g?>CbW$JG(ylRe_d=cKT;I_S+&)Rb&dlGKu4~Kr^wf zN=yqj*AD(r4XUsOsyK&5K`t;WsVuHVCB_6>f)-XehJ1!-e{wl`33H?XxP!?DRc7}s zq1;9NO{71Rw$KPfEdAC-7Um@^}of$n7K+Xlq^sNG^1?vvw0S7+6v%g(DNrXWu~f86wn)B`E92KfnDRL0LcqFh zA+lyF(7B>Z0KIi7p6D;@8G%dF9NT5;IAh#I3rf*-Qoa-|qFnSwmXm`D<(4pilRl&a zgj;@$RGDa%xR#wzIG+|oqP*x&3!yBSv-{Hm>@5M(BN`iHv+e!d*vw56dZq*`lPm6= zjA~8#)3F(a*@|VtvK>h2l5ne+BFmhYBL#wTAWoZqoHVpda@u7q-4W6qD7$f#PWH8( z04W;U3f^ESbAQSJniGTD>Y@0n0Qf+Ke$ViRV~>Ld z?6^lQrxjcVNN6ho76^RExeSbEIB`!fk6gV-S#~Tlp)N!n>%_4*P09RfLCqzvzpo0T zS_|qH$K!hD1(eoaJfu07mi;(==6Ui=7m*xHtswuMO2Vpm~{6n)|#0WLo`-lj5W z7XOi81|s?$K@2Op-Piu9yX?&m<{QyfIRnY8AA1UvM0R$@r?cl7)Ao%^^z5qO?S~5Z zssPuRjZsXC5{X!Q3Mgfi8q55*kx1lYyvhzeF|~%`79sxVB1R$f3WBWtN}$=I8&}2n z_?6n>tDM(hw)lepOLH$~Qq98epI(C&$Xkm>+d79D z(DY<~*)@k4gVHr0hY8~Qtyq2sVpNnHkKUmB+)`FA2=+RHl|8;&jL@>&ye{0ut?ffc zaR#7E$GzT3@BOVHXWKBjvCoL`nqa=2S|xU$Sxba6q9CwI{X0=GT{#_TV!eJ&o-C~{ zJYL4Rg3WqGPEZ>Qap8O)y2NnRto$R`CYcYD6*IxST}Q2mpp6=~S_v%-orR(CoE~GaIxPnq2%JciExxwLzNDm* zT4MAuOzbb(Zz90*b7WjPRiM8|lq$}l_r#Y4S^nlPZRQ{+qx0B7)zfPoh6#M8a+mu-Y)wC zx&R8wjd4MK69Yp1K~5fMRr#kp9b$(oZEEU$4yb+N(GwoIGd?Mr_oU8OQp~tkA?^yT zI!PY~3t&8*K4;%)an>gwT%;bh8f}@nR3NqIhOpJ@7LSRBi&|(C)mkQ?;e!m(ix-rK zu@zS0WI-;njO{b_i0=!5N379wwru}TLe>b^^Ynv@xVS7E{ue(5;AX2Y62%;y zb-Owsvxy}Z)>6c7C=L;7k;6czpz=epLKp*KV_id>q-SS@p&-SpxG~SDPN9axZw|lY z2`OxfN${8;vxXZgrI*8s7X`YKxc^9`cc}+KemN&bXW3O0AD_;NnS=H0YT>oPZCmGL zKi1(ElvkHUAQH4`rPTW;yKagKx>uG zF~}~&N1DL04|C~8G5#XJRlyickI{gB_dG!JrqG4`59l62nXDAXu_J$QK7hZ5*H}4r z5KRK6>$IYc(6c&o3G4UUvu1Hz2HffZhg+S(t{1A0KE6xzcC&WvR z2E=)y=?8tB@Q6R=FA!kJPFm+La=w^H@m$1YGZuFXa%H%w8iW1%EdYo(b{?JTK0#z^ z{y8zd)vwb<$e`52dHdhy5QcgNsqR>7F{#YqLy@s~N;KkB&pnns@o!Pt;Gkn#$UM^W z(Iuef;A*nQhK^(=(40q);gD$Tu4I(019Kv4v^gRvTW685V0yN6ElNj5vy4<8wmHX{L9rK`ZgPFzTRPo_%BH!L<}DU0A}i#`?T4 z^D)ITj}vpC8%ZCDGX+rEc3dW()wAcq_gGWn6~S)zZk?g0${p>7umB;CsmUsW%q*pm zF|ZPAauD|z_$LltERf2#7WxX~tcy17g!XtvOpjsKhFR&i*ALDTm#PtYWsk((0?nF^ zVNOlq{+S~R^@`2;cp->fKOq<6!%HFDw%SY2bioI6R90W^k}R?`NGKe|jY_NSWxQ#b z+5Oq$FN(dF*XR^cXtwBV)GPF){I^%V7vgGHM~^<1dH{5&Bf zD=CgYj@k(s9wJ0Av4ahEJ^j4Sl)jKn(BwBHJGM)mXv^`xy22g)3V z2EFFYoVUNAU({)FWaNvdi*)5NT5(8$mv1`)OhxBRl+?DZkA!w(*|{%8Fl-b`dvcPw z5~ zvsQn5N;KadCzuQeCqLEgpBZb|IGG+_7j1EB9N}h(IIjo5l7Tx#(twl6{yE{U7|Soz zDoqs$bc3nWGv>>0XQW6{b85IWfcbSW&g=zpXF@1f#yc5J;AL(r8H$bj_+2fDucjQ7 z;$s;=IUC1lnP?F}Jsv&b(a%3}V+@OgU5l5r$Jeu;U;)7wwq|^+ObYXfGN73j(5^QX+h$Ju&vVN{G>e?~JdGeb? z&|IwO=`F8_f{DK1wkOsfhIWqHZpAzk^90d0SRP<$Tq4r4O+Tb_QQ9#Mb=FXdiUBw6 zGG8Oq{gZJ=x)f_}VSKTZ{jDB&^gq#VHqX!+@Vk$xy{I`eEo$!)MmaEvR~YfeMoh}F z1yR0JBy~&{+j~T~TX9&eH;%4AyByrS%f14-&-$Za{w{XBYy#XsATMIGA7M&}jYXRs z17*9V?Ip+qLB2;v&%8TJ$_7(+zm7%^#owy6Pqe1Hi&0u)tyQ%w)2<2bR)^$Ji}tLT z-A)ojE2Vp+<91OlKli@jQHO}WEQ|{4%7NHwbu9t+U|Bf!5N7ddzrd1lKBEL?>BInS)U+SfG}FAHFtXyvAb&DQX2c4MXJj^jjA zG{RfVh+kwsPUe2vSS|6P^2mEkG&RM(%L?Pq*{>YXN}Ev|j`OQKqQ*c*lVATorp`OC zvZDIqNJj(|ID!QgL}Y=5?FDH`HoKeMWH%()U5b>K;#SLnXz7 z)P_8{11)2FRWARN}vt;P; zhO=n+qj|FB+y=?mO!P2esTHNcd_8-KvAdz<1P>iT0yx!)?P5Ige-M<9vrJ}adT*MI z#~ljl5@BPcD6BC>5d3b%G_%Zw^uNn!GF~R*j+uw?yK7#B%~x+6D8MpHt*Cp^Aqx)| z?uM(E^Y$_Mi;Xgvj9&}LXQ^>_4?)>`>Yzh$i4b!w z1*l?NiD&W`7Y?-dX;QlRM3TAa%&)%kkTLU4!7f@8^NoU^-z&%&E-7dxP7NhI& zLd>%HSSOFfTUipMkM2X<&zSJzN~WlA(>D0*p8&Z(5g2z$JS*DFVaG0i&@Fxn;<_}- zi&fqA(3AZZTQ0opP(X>cC=-^6b4j^{9Vd_iW%GP{d>|lWv*QMHY2GPLL?MwyrsYQk zz@~}V0==MTXUoE)w~GQ1GW^s^E_3NK>4LX+bBqfOT3Lp-#p*wE&g^2fnk`1P0IJGT za;U^v^xs6gIL)?HAB^*V4o3AW*Z0Ocr-Qg5IBn$grYFV;^7;qOBI6zcX?xr*{o(?n z1Cz3h<-!R9&C){qQr5BY!z@qMD>(P1G4o78G`_CZihDC+aqWVZ_>Bm+D@xMALcCUa zdzceS1saHXXOPB{xv*jjiesi=PahmJ%TiVoV||XIC}P!xU*C{X7rja97rOpnG$`p< zyRsC;#}~hPffThy2RToPpx-tpAJO=`NLp`EOKo(W$#XVTV|y26(dGkj!F3+|vJkSD zF^rrplkLPO0^KaQ&LG{7y)dl*gbTmta{*lP+`a*~0uqPLwmx1}RnJu`C02CmLUL zRPwAKK9EAheeoO!s?GJ6NS&e@?IHvwRn;}e(RyaSCHxJu+0CNhCcUAluq@U%7jmp~ z;*4^zXquTrFQz1^^%KWV7EB(v$TAK6OqAKoDv@p;>z!9Yl5!81aUo-Zp=A^iG`P~_ z{7SmzIudR-;&jQZLYrlLoR!Wz=GeU45u029;Bxlkoe>9$HUrD8DC66(R& z(R?A#E+Ut&A(nBx5VsQA64vO&I4Ms=2tmz_p9q1i#9^BomtItPL#A39AfFaQG3wf> zHQL7)M7x}QY>3+{C(ZX1d7=BZjrE0&;N) ziO4!YLHK41bi1K$TN<~C_D2eBnpfiE92qF4vFj^WlZHZ1C&YB0p0jo@aFOmAqAA4? ztI&a%{0k^o38ywj?pJaEwKMkX2VO%87phJx4H?qJ@dBw=AEDObRuO!ysjh`uGv?Rgg1FzDS(z;jk*uHA3~P0E`o^2N`Q~?ywo}p*zPUL<@fchIUDrv*^6=hB^W768c6+j&) z5QHgKiEz%n_zE#zRJH=q!Cem|>3|Ke}NWjIswsz(PjBRG3j(iOSZucJe{o$Cd<tTDpS*x5WaupbZN5z}z|# z*WC_iHgk5|F~rpdeMyhB?2bxm8Eff+vY|L!pqZk{lv}r@DNU#=%WTS_Qh&UrgvdD! z0B`5r3FYFl)Tc?}Fd<}=<_nIonU)cj|K#ZkC~nXUef}=L$rP5Huuu&Np}>x5#CWO0 zEh!~-)(>Mcr0~f@u`q**+%J-?8(IXp8}%w8fW+rYVKjX@kfh)3dN;H=2TxcNwhM5z zvBW}!%NlPmE)eQkRdW#mt@(-&I@%0^qQ)loR9+fAoBqIH#+mo`gu)m!HEcus?}c(N zVv%J}9IA4IU`C9IQzpkBvgfHgPK&?m*#(n|sSq372jJRBAIlIo9OH$V4?6mm?s9`) z0nr`MAV@tOA}K=?F0+JB5#><>^N^NBDX3C zfD!COaOS>Al-rXF2s=7P;!h7$5Q34*2Xo6`gJ>wdbJfl92N5*Wq6Kr}Yro-{I&))5 zse>c42c52E1f#FxMTq_M`^$!7_lF=Um&^?brQH*=1^B}{T{Nrc=#YBt36zKi#z+|FEm34<*BNh75~njC+|2tW_zAKQbV-D9DyF3QLrzTCF8t3cTLM*FvX}V=4@`7M@nkCE) zR8}4qe-&y0T8@63g-2hk{WzG%AaoTlx-;7-?W-SL&JsFFdg56^U<}^I93c(G;BTQ3 zB?bV^IBMnR1^Fcm&j-rr_g7Vr+7)P3hhni1m!eydCnQei=BaAR!*ZFqREYV;;t^%W zhXO3vEi8n-^E*hGFmvjJn677jB6yozrx%FytG%TaaHCHE1-yYYbo5F3{#I2M9ENnun3A5Mbgq8 z81E6WLX^u{Gf-HG8Sj$-&XNOfHsx`N5GPrdR5~3`0U!u?Lg30O0v@1HjhpoBf^(Fh zQzNFK*-r!VJ=PGcB5oJu@--+k^u{j>qDdto3S#pw(Jp~pw5j_h&w#iD3vs|A?vMZp zdQ)@!Dtn%Spg*o>awi@dXBTN3y87=c?@AB0EY1<&>|0?E7c2fIM`jHV=nmPx386^o z0z;GpOa1^&9hS&wJ-+xIdl}!EVMie;qU1Ju{cSz`BU32MawS)xJR-D z)alDX!|W(0{27={#lAa5iykvc=5CodQVwhY@UKhzo0|k#W)(19 z_XuM>iK}9k0YmGUB9^1-H-u8^+5%PhR0O3&i^&NwTzU6-|B3+_H}d=if^RWV4tMAP zl8t~Mi_;?FMy3OV*!%@RN{aBXzrp-NEgdfirGaoW;ll8wUNRoITG!mV$zESs4cE63`^4ln4NoL!+M-Z z=S;Y%WJsu^TbG?a~>#<5V>b0N=gUEmX8oaNY5_WBHJBo@K67` z0N>`V^z2`=cqM1N24WV~5G_giFFMtOLR}n5EHRj^kSjH$ zy)K%HvHFF(ABdvXoaF01_|;yA^ye@g#AqsQNH!O00ZkSN+HCw(7)9kW38aE)H69V- zLV5Xxg-gtT!>`Kiii1&&qj8il_)*(X7Z+yFQzlG_OS9*?n)z{A_8tw4x3iT*`@G3} z5_PJ~&|8ev{$+rmjVRg3xmti#Tvb^}rflLpLBF_g_+VzSTh0{d+KvqjgcLtlC@kR< zXj$UY|3JF(`mok(@u@I)$e7KfaNt`|u)=5EI^ypWLJfyGC%RA1{s?=;F~_xEW$(~S zIcUdHA=Y`DbQT=O1-T%_=LPxGEUO4wLe~HOud|*uetLZF9iEZ-m8g{?M0lWW#GbSq zUlZWBYwQO=*t-9H7VE%Ne<13GS#HvuznOjHVS-&AuG&r#w!yoVw=v~KbIY=$Fpq_2zaWUH?w4`p*~bIZVe-?3&yKUl(pJlyLpb+#mm)zdd&rz3G5Y zoq2N+oj6-okroMx!(zT}wS{BlM6FK=bdv>pOu^gw(v|{9l*T{eS%ang_UuWK&r;Y%LB5+*THxcF@9}m*MZSH z#mvFK5zI|WlPQe*c; z)P=RXoZx;Z$ijgDYlkksj(9>ab)PzkOQAmRFSRPjPkR(ZniazX>_Uf_jyW}2AJnzT zY(R>!tuV?g8@99u-cEqZ%wYzm6gy_WGt4yWQjNGmfLn^Y`i8h#4)A|D$jHso-V+J#Ldw*B-O&4mEw%akLgN5 z7BsBhb&mdGmJ^M!r?jB)QJy3n{gq?wjs245ER>)eCwy0kb8bjsI2nKh!L(U*O|>;G zF+L+%>PhCOh|I*L2g4D@w$3OCWbEtUHXQ;xZvy2SG&)|wNSq+d-)Y89j4m2i34)XO zAjG43CIgOk2u<*kD1RfnvV>jED*`PdjDsuy(h5JJW9ze;vRD?kEw`;-HK!uAo&>b_|Pl|CUSon8N!V!F*?KRa0!S1PwbBMCBLi&&|?kh#&kn{W+urpN#3zAlhXd?(az9L7N1*Y3c|PLt6v#M?lb zk%PE$_r$lp>lbvSXaBtIgt%E~&Q@8yNR_yYV0H8XRQ7{WBa)BSH7<-fdNzM)M*Qy8 z)8`4K`p6|K1SY=rxHEfaR&7;$sAo$c`)PWWWH9#G7SzSZdq@KQUm<20ha$R9VmLk# z=I^1(LkFp2k=4cln%xU^3sD;WL}3Qcw&ahzysW*d*F+YS;I z$)|#%5teOx02itbLs-^<oFYNni3}>le3gbBABgb)WQzJuria^zbTx+idJ4}HPGU#LIFopLx!5k6xAkZV@55C2plE+>AXSmUrWnJ3II>Rdu9fqFsa>>Sn??P!LG z0LYb~PU+ZW_;wShPLO(ubTH8)N~~rugDba-M2IF$of>;g;pqtVXTm9ILqk>EtY?3|pEG0$`|4BtGkqf-)w5aHjN2@lzs}fuDwrjf z8%%IxXyZ&jf$C8@1V_`YG|pCQGztIe(<)G-ydG(cbp#r#W$&Xft8B4~Ca0jaGG63Wo{u%-L@S z&#uuNbmn*^F+05|kOcLF&-qx-Zjn|Fa$?(=(Ee;A-U-P6b3)80wypFE6x}=Q45n^_ zGc2ozO}|%3(pi+si_+dCjE_`f)3nwuPEN1~so58agoRk$&@*61d-Mo$Ggl9zJn{PY zkt`wenshjkM3^RicbX=~a)7{RLLwak&6kMqkxGN!t6h&RB}Z1S7vJ+c`??YnIJGki`cKB0uUgCzHz1ic+6pwPTYPY686#2`p`Ag1-4%R;yR4Q z!qC*4V3!$v04f|+B!Od-SrKQN|*b|BN zhq!g9M{SZIzt&ht&J~5IS2CBK?XoX?AbgNe*n&cqRRHGm-~UW*WrFl@9<@?XmK!3E z0VFn=!YmBMK6Zyloo0-GXW20sEiZJ!_9GTMe`Ta9x17$mkqXB;oo!YHF5yL~;JCtakKxYoM(b?!q_`b~S0Q;Wg) z=Dv`Y@%eoyz9)!=!Gqjtj5(8Ag}SAh+$I?A?FUB2bq$R%V}G709EOCn#ZcTPnzGYeTN9$< zf&(D4F36@*CL1^#2{eBzhlS$r+6RKW1X?p-e|;_ih8Kj9F)YA@0YN&WV1Tk?pT=VAb^$bNEjr@3 zQ-rfvpr{5KOnxoQ)obDy#iyPLw4I66ty0)ou2v~^CgEc)O00MPnP-epvP)$EnAl2E z_dt$=74$Eb2r_qOl?Is;k}>)|c|tUix-={95M+k2(t)pAwgJGJA^dBpY%CS+nlQGo zMqz&WX_kh%56Q&o6w$PC&D@3Ym3F_+Z@6Vlj4edF!*OC>z}~bh#Kk3`5yQ?%oGJ)G zfct4zUW7DL+K3;Qq9QS@55>v)!R@y7L@Zp+5$(e1FrL5^1MzSN7=@`~xAn1}UAubX zY^{uKIx8}JM7N*xd=+^g>}sAg5;wu zbq#p(R^F^dS&6aLb|nxOv$;sfY)w&r6^6)RaZG#FF0G`X36l)OkTA;&e8VD5x{uog zj#@BiPUL3&V0^pA({P+4+VWDRK$>ixMg@^?GS2>E{yU}>Fsc+kK8WAV zrJ8hj+ojDRt@@IF$XcSS$RQZ>OTx0Awu1Ou>Vm95J_C<=25BNsu;qeXk+y-=1LO&B z_?1=O>lroJBQN5&MOgrwain9ZJP3fG^j3AnGCiARi*$S{y=g|5j4q?;@Qb61(Al3XL&8EF8cq*b*IY;($MyRAlC$!0NNJ2uK3ajpc{cro{nqq zhR_6lH*sS|{08H-JjukFlL@)9k|glFw=mMm%^jj( zYHdqZ?5bzKEO!hP8S!`y?K65OMeB;^1W|XI5Hkw+{H22dkSNTwx?}5%SatWRyW)Ey z$Pi0hEhDudY8J%X=#3ou^C$skZjG+PEXOO^TXdM_e%W7+#FdATNM#=3v@C8EmCb9k z7`kff72E$0+~rril73cLBZmS}y#@ka_Z`MF;?b@EINSjsk`FUX;%%t1{ac77ZmtwL z(R;W7tVL~yu~NWTe{W@TjP|GZ2Vfou@m4|S^` zN>`6h&|CS>i|ODp0{_wx{7iYO(DBd%v1AtD?%gK`n%SZ)J8TV5o3wJwRVUEjslob< z_*AjKAlS$D1?&8$@&xI!X~xinN0Pv$UBtm4xwHy#P1(%ITxKK=JgSnamq15sj*b!H zR3-eob-6&?CCuOF;Efm>3>|_bGVDe$_F%Vkbmcv{^I?zF9sA^%RW``(jAJT!v35w+ z+woW+*BOl%W3$GlTLilN1g4QC{x^>Up|OeM#BdsYBAtNHJ{(iTNkSwURgH1@@qV7p zF^q58J;!x9EZ1l*`DBmNY1rjTw`CHJj9W>R)*#v~qPNPo z@oj--W}lWih4?5(8Ve&thKO(cxKfxZg|UrY1Y|9sYq|#GRUz<&HNu3M<6@VeR1!30 zwn;)rK%r5K#BYD<1f2TPflwvJ=T8Q4jae-Wbi{Z8$YZ|u=!)$BH4d%0!O#8c#2M3LXFam>!0v$(fsxPv1pz2+b0FX ztfso!7=Nzc8Yp6IWw)I(1-Y=z?6C`RnE-!IeHUK=`tFC>iwf`AgFy=Y>Z0@fBFCnh zcY5NU^8sATHhL;5!DocHkTMHFl@R|CMB~sG*$u?I`8(Api18Wv*##tUr`bn#GwA=~ zC1F_^S7HkguL|&|B&STvKe({+8lGG@6jA82gz+t1m8FHeNXFw;*RjS}_o7M;RaBDk zcb*WXMtQ}oeyhl={||Hd=-r}SF1$O?I55opC(9&R_atM4-(KuYFq@PY;{}&MQchi; zD&56U{7Z@S4cAYeC`oU|O zMjM~FKTgsME^RYb+uf|uqb!UXwaP%0cTW=S?{Gy>3gCE2mPATsMHZNMWhEQwSP(r7 zL4^6W7VOQ~JI7Umlyka9TL^X?O)N{4p>cHMbGu&~a5!CwH zrnq0vbUqpR^~D;$fZ~(M4TZUb<~d(b)+uxbDNlp=oxt=B4yVx948_cA{3c@oZhE?c zHl|+-WY*AY>sD@@y}G6blxja@a?d+p-4c55>;~ zSZ3U&{rB<<1s-HyrRLiCSJ3bh zdlZE*8;ny%0bE+Dj)pGI$dQ5K=w;Uq$eW?aV7DF5Vrh;O)lM2sPWK*FQGPT-BbO~_5 zwE$<+n3gVQWa+p)2djD5b;X0EqbaoQWw-jg$gDqPQAJo9{8Okohyp%oCRck1$n{#T zaNO;&mJo6pET;en+vf?Gr*yeqjS$L+`ia?q+55sgi5AlI-A{!e38?v){w{tP%F|W~ zMOq%}8oivbDBM%Y4}4r}2Ss}vj372oO?O*H{$`9_5)&+f0IiZ$AM} z&jkmAUD)-XBFJCk5V5PwNY02$-e>9uNh|&&l8iWY&q*-bzj-aJ^OWeJN2McqF_2%^?3BY~&#pLKpjFWUHde$IMFDGg7> zxL%km=b*^N_(%|+K?fr(!1%vZ-m2-++L&$jLSgE&c+d5eiB7$L1~62yfM&h+wHHW1 zQ3gs}aP$okmWw&8HDhk+$hj8f{85Oyi0)JB!)vzx)SRJVXk~_^0MOwhnFgu(Hj*(oZ<5u z0$q-|iba4&s1Rq*4jAzg?iGRx?S%Y|fuDf1iV++Pc#7!!#E`M-{8%Z>rC-8639rA`09=2c zwMNJ5Ak-g&Hq<7`{_HYAu2CI^q4A?PoQrP2NJrLp39zu}m>iz5u6kIY8A;#_ZguR7 zPXxip1>8<^=bO$1;g&-n<^5MBJu=uX6N}vhjm_!V89F=lYSzo#6> zi8K>yWw;Ti=Kw((Dd3j>sbtDwCoG9mgz!}@Y8Z4L6zMExr}fn)Zy8YRlH`qb<=cKi z-^69<*8f7f$}*3UT@C!);++aQ$VIPGfFRSKLDRmWl;T5grL=AiMQ>*hA1#91u5-Do zEYAH5U2biSKmPACudsh&`g>mxBM?5>z0ve8gn36u7ge()#2=}`JWvu=&J(eklUL%6 zLXd)KOlBLt$Fuu_ZoEkIpvL&`3U@}VLDTv64T8*qG3y$}fhUBz)j7nFati%N0-ftJ z9a-rxNv4n=RMsXdE`=|oW(S5!tid6#Lahqk*EW6*& zB*YdI4YA(7N^jGC3>E{j$@sl!SC6gOAVyuW|Hn?UysNDK90!Yb5;S>?h=eE-q|L}u z=?ShE<$^CQkLc|F&FpugSh4Rp=Tm;C@rctE8?V+lHp0}EITO}t+-}0m&xsSXTsbCB zG@n{)Mu<~{nHM_Z)gF5wt`_Jo&mt@oTAbVSSJ^dijLT-?gU^wQIun9g8|pZ1b!aLy zc4W)M^0A;wU!g}OQ`jkM7}co8{(!Mwde?V^`lJ2B?6`*FD{F!v;T*Ge#=#<7P^M{x zBBU*z5C{tg z0J#`d(jpJWwL<)&oQx6Tk+@!%dAoGDge}Q!0$e<;E7D*Xcja$#XvcWO9Q3R(+PH?b z7t`^&P!zh@#uf>Ubw>7}7or`8M8aXt$I&KYMzS8X%O-nFj4Kv@9wci%g|*TKBvuGC z*O}3}u-w(bMnbcsIz4A}#LPrVog-%DG8=nX2uzpeC=Ob`l0BX`$d&F=9uP>Gv8yHL zI8h{p)K*WYVP|DDaeGqC)pJCf$1EktrGzhG1?nX-9=@RQ?Jqb}^z@3Dv6o1fs|K?! z!p+|$#2m*1nX3OL2Uv7*&i|(@3p9e=wC#=?@PfpX2x&U~Y!nUSv2-FN!j>EPS9TXD z$U380w7J6#E=)Eu8vf*qfbgPIR*4h81Yt3tG1^mHqGyHqtrQ(EognpY4Cv)vZjd6* z{82x^xVphYiQC(s73mD6*U?p=@qCV-eKIx`Sk=S~&ud_iUgR(!m7wSa6n zf>M6WDgQ~q{={OWlC^1TJSPmkFrCq{M&iXhuNFGi@;H1`5>QM8Rywg`L^_Xje}*=} z4+(RTFji$cFUHe?=!%nfoDq8j&u%?*7ShH(EyOQ0;q<}H=&|z4KrTm17dqafM4N3g zdXZK#UEmi#*AKY>b+c4BT@dxJ#tb4|`*fxNjhriFjd86gcK}&@M1Mz1e@DDF$%($KxMS5&$lj>{TuR|w^NNz=2 zigW>)c%{2k4?0()EwwkXJ$4bZqhsKnj5$ zBX`&DB+B_IgK6WUD8x16vT6Dj*2IFmywD+nTN??uMY!|lwiGPAG>LsAkZQEs7if=f zLQ+eHOj#hs|A}@(^rwcnAWJ|`;SO42xqXWSE|K;Z2>s$5;V@?zlf^b$lE%DQz}_9k zC?3x$p?wS0T1uamz0cDE-JHpSRwKfngi%T~!P3LWnp-(5tCPg|=Y;sJ z77V|!x#P<0Z-coQOms}JETKyAq);>-2%v`EeDrZ_~TYmON9TZ|D7SEyhM zAq>e|xM)sXu4i*5MYYp~V?*(RQ0m=>;|T^`+7$nETOij{w#$RD)i|d~`+%g^iEcr} zK-jY;4%ed_l(jYkC(a1*jzA_5T;H%{+P;!Pb{qtnV_z!SbQVaiUE z;*=erDVfaR80-`^{Ot@P$Xj`Z0)rG|qw%CNCzc@HEGM~@NmwSX(*F@nTZqk!=`9?z zkA>`u$@%*YtRE>}d;DidQd2bCWHj;j#oGe1NromIvj+a*PQY~A)*_0V`1}MYH__}x z3t~HwRA)I$5HqAh~`ELbohiJuE{IjcJ8E^H2RWG+fy#_p$cWN||>O@1~*iYriq zrY+by6UoOs;PjU{oa6S7CxOxO3s8P;IN6^@kkrH)#HR`}vl|td>i?!xvf!>PdEF5w zSD0(9M&k+zBnGXFV3)d|&(X(SZ?4CtOj2o&_aQKW`DdQno>@jIw`i^z48K4zq&C)V%Xj#2iYVNY(xfH*9de8 z=1?8v{5Bz;s}LYGyU7@4B>pPg73JKHy3==G6y_>1J?rk*csYldeNs@N$MztV+0h`o zDhu@|RiGB8#T>Qt#IDr~sjl}`qdvKN@s#d?SIWE()OPtPOq_l}2>t-RHo_L=x{fl#n)%1#UY?SwgRjv;hXv9Ay}13E((&bZze?b*9XvE%$QXS^@W zmBRCi795?I1Ned$<6!~LeUThO24mcO5RYQq;mXwSAo(xnFvfE2C>hdu^WpWRrdllS zhp;kGcRW!LoT_QAi!V3$dD4U_vEc%q&2_E~Anp)V`Ba!c-?W_C72=gf5c5g*K1eCL za8IDI4yl+|+6MlysgjO;4L<8b@fShPY;TkiU4{5oGYGS42gWRMlZZ^(xw4ATasvD* zCTaNC9$#sxKsY9+t@KucD0bS)X`p7QwaX$feZi@ocDY#Me;*0~nX<#wn6GCSs9Gy! zgtlG~-{vxV?%-;V3q`wTtT(g{xKD^%vITuQMszu_x0dYDwx9E00H~i+-4fS`b_3zW z%osbw`C;s^Kd6gaWyOiy*9Ky$@Jt4YuPe`_8bRcvt+RbBLm zGzSD@AN+GfrliQOEdJl4YwkIY(qgJytNy)W57P zwrb;<21Id#{$!jeeyQdhcVhNji!*wu9X#vd=xEvmJ z$ZRm9|Ep~fnBNF>3)HH~*{wb#$QjY*9i>>e0{~W99VO-#g;1_Z6Q;+}dM1ZK26J@L z8kk-YWFDof4btqpdMBW>rNa{-v|Rw%>i#!;7-*qGg-~KvBsz0GRU{2Y5Ll3JqA8VHSM-*b6oLrpQ4M|NpiAO3)-M<&w#hIfZVe;(i z5avc^`&ewlY7M^e=HE1@9`qjHNrR1mz&T+1{__kmD4rao!&>NB#6$jkit zql{!Lq66^PMY=fLT*0Y1VS^Ty!CdQp8nusGWiMxIlwh|~BXK|(2J8AsNUvXtyKwAy zP$eg-pddU0Fn(TBuJq3|x*I&yaUQ_<>J z*wdb?XTGh|sklgli$>TRjZweIUR_?)CW8QL1EjDV&qYtp%r3yJ>?=Qb1}eSSauA5p zYKc&p*!LI6H)k!ZiOcouOt7S9O8ry-+|y`Y=;Z_i2LEixFI1yz{iU8=E$%dtk*u0~ zyJ+40RqFRShI$GHC@M0!aOuaE850B(3NXybymDZKh;Cx}(>yIuYv9>joHHY6Q4= z(ubs+-xo&Cv$atqB7dgCuhjli1aw2V|$Kkk}kFXFy}$@D=sXSD+0^{d44g-C|z zI9i|!50^1$O@O!lXmPt`xMA<`pDLxfqjMfY94N~E#X@Ps=DPZLQO~YplZ+vlDPI)? zqiWgc#WqJk@GaeomiF8W1h`VDOvtsTM>EErf-P!o3d8-ZXjm~gf{R33Y;Ytbd2+m3 zim!^WRB*ydwPM>GnWs~jzL+A!`7OqhCFY6tXK_+t9Wj*7^7c6j*sMTz#`JOM(UrWb zwC*j&j>mwwhI29gQDJ9iP;X)MycFfIFB$|ne|5TaeIRr9LcuNu!T<5G%jjk36?HRe z;aGmRpfN0A3dHbrvM{RNQDCJ#5~Fz%tcax(8i|R=k;Lu7TAY*Wqz(T>sCcG}wua)O z6Do<&U@_zMqBw|O2(?7ik}w5Uj+ajaG#g|bTZs1rxYE7Ef>!X#b$?jNjs;$FhGdPm zwNSsQiKRWNKLgOATBAb%(lT-Cj5t`&{&+HQ)Cwlf70C3?tiTrWz3hFiL9xYW=_GzO z3)u^FN21MX6gDgobcOj~+^A&cBym-}j(A5Hg=;N#r5#9o{zm{vj<(po+G2wYp))y# zeC#iT;;dw-MbjE$2dfU3)UzT zK_z%WSk^6df=uAF0fWw`vA(vdxihv!|ANx^;_()rJ85Mw!M%{R?~{P9Fqm5o`CjvDDD>NYLw*2QvEY%F=6`5n5Ad4 zjYB$0AD0Mme)I8Blt%2bJkcD2{9%pyst^~ham*2DB;FJ1cI8Me3mtsvX@Fugx5TlZ z-0J6$F3h54Rt|@W=5sZy5KyT}qD~hCt5QgjA~HYsbZ3ENYfwE|Bz&qb40m?L1KE$v z*?M`#8O~3u1J-6*X7>v+>t~k~CA)YQfOlp(t-iS%|GXwZ+A41*}7YoQ_MQUdSaJVJMmM{U+6-HwJb4X`)H>9wpc#8|7 z7&^2gBo%|t<>!LHG;}fy%7h0Koj61|?{4EcD2F(@e5^@tq`*O5%b%Ojk*fOSZX~=Wp z90&z^?0y01khG~gPLA1nc43JfdeliH-3x85ysAbtQwj1lm-8`MjVk0|b%+Y?LP&ED<1x zF`+Ss7NaGSX+xhNiivJn`@XGy;hY;$n;^Ez`4U0SS({*PY8ZD7h}oihQnd>!Mq3WD z%qL16<%%1{D8nQsbd#SpaI@bsM}r}+u>NHWad&0BdK<=FXs>!?BEX$ zn+3Ua#6TD%AaLWgAX!Ew?U2tCK$|wRV;qVvURQZx5tbYDlLjGv0Sh*g)Lai>i7sLX zo79kT1DH!gShIA`=w^bLA6MP_yC1~)`R}a+?4qu5X%02AsF0=d6*(lME15~$Cdk~7 z8l9b}ZXy20ji8jt;h|`l*92s9EpZ$*3lbzeOT}n~u@~Y!`6g1ia)+jLm1-9lqvmN+@Ggmbin3?`n;H91-7M_^A5%8zvMkPL^Ul}ioFE5Xqfj7b*Tui z3v)xT@@5l16z>W`hQ0NP@81eZ_0(PQ;Jpnh%QV>w?Igf;tCgUlB2n!4m?hjFn^R=Q zlp5v$Vg48qSkom0-C2Ig6oVyN>~=e8+<^0C$jH5+qLJ-~aE~i1#I50%?))HW;s>dBbbTQb@*LI`rr6uVAsW(S(9XW0BbNN5j-BHqVhJ1!e7sZwPVbH3ZM;j_Cs6g$|(O3K8y<+T}^JnU>%yegmEf z5vQBU42$XVV8QSi$1TmCFNmbJW>HsM{2(-qIb+(i_}4=`yAI7d<6@^9p9sqa(+=@k zSGcbJFd*fTGBef}L5|3KBf7#^Dw-T8O`8&HKf*KP%cLD8aGQzZH9R}H)j|Zxs#+T3 z)9eSukBO;|@`Lm1RCJ9cqR9jE7Y59IvtMc5(K`vuzhCw%J_d07q3qY?Xz)10jvXGW z*8G z7Q&|nY|Pj5J16ipBU*`_F2n__CX^vI=q!966KIQ?rlQV&W#yX&vGj|(G+38x;h$Dw z_=2(=-vUOD&p!b`>r9+JEq2$l(_^x$<#=rLBoIa7ib=&t{p2Y~*S?n9%u`dW_cR#I z=1cp|5k-E@%{X=Fnfy4TWDHJ4TJ0G_$WR?#@^UQRLXhW*DyG7c&I)>BnowAUcRSZ% zh{!r2(+eYyfK=+~aoq2na&c`_bDSl@RcOLaMFu9X2r|=Ha!8L(_H+LTNRFIlb)->r zZvlJ{58hkjFcFr_4#mwqUbM4F=FLb#Uz{$`-E&boPA2k`X1N=LyImL1?@>74mLbgj z*ve>De@2MwEdMSm!DHT^DsMJ$F(dQG6+&E)?!IzsT$iOlY1mU9(G0!Sv!rl^8NyiH zbj6;6cxgF9sQN~SXqZ)BRULO`&l4F1PWyA^Rd#hrsraTKw=Zf`hAmk2!{_|^l<^bd z1wFeVs?rfr;rR+MFRiwo5CE$jW(oFK%-u4+vaf2{KYkiSi- zy@A_MnA&;Lcj8NWHZQPcE60yTJB#_{UX~t1amZgQSrlZ=&ZT|vf5IrTb?DsRa)AK9 z$4)g(tRH1*kV?v@tLk6$O90&H$XHSLQst%o!4ay~7V`vIDA=B04I{JPvOt7_Gk(ms zb;|}K!7k%R!l_7I!@_tf%d6avev4U-R&0LRukk%a2Uhq z1IXsL_&3sIor4}-j-Lsza8WQteAQCqfjk+3I|qj2VF6>)3+hl*OB_OS^V8oe`L&nZ znaQ3Iz!*T`8rm?Ydj-<<&Uz~^-7U}s#TsO&j7_%?zqR$mDY4e8kXb1Qq>i1M1AWBH z#wAyP^Id{}V^UK@^FJ#2Hp}~bBszqUk?hH&%#Jq%Se5pfV*<7;xBinKXz1+j0FAKS zuleQ58PzEnQ0iN%T{(LS0b`=Na+qV~>Nh|vmrDzntK&;!%wu8(xPs||M8~T&dj>J+=V_aws;rNY)2!)*tM4c z%D|!sUuW60oG1jwH*v`N^n3mlHq0sv5c@FxB?LaSD|!Np?|+NV_Y>oB5*z3(jg zh;=$Hd!EK^U(-ME%bbs6WKxXfqTQ~9f;7N$O{A{+Pn-?hL@&VsL0N#4 zl=_xo(I4jsA)nS_MHKH5L_w=;rdu3658H)^PeJL2Xm+?ss!tSLn~Fo+!FtY^r0ATz zaclPL^yw2~$JO>sAK+A*rDhV80|c2nDbgP|^hSp;_eQSynGk=^UZd!>NUAhIALXwp zDncKdIAfjxI7r9RSQTexOKB}7Y+My%gSK1ab9{r*tC77Es;SLHx){1rg|_UEDq*e* zMnddx;wM5Z6Azw2KZ>&inDayjP1~{!R|j(8SJJF;qG(oP3kYr;mx*wC)MvxGuKH?0 z{xVY{^DJZM|AfKOi8H6g`fF5vw?wcocxz2CttO@X`B(%sA;7Mz4u;pNq;I89bfZ*> zE7k^cO>iWUp5WI)_(E&BEoQFEbJi_6pBsu10Zu;0Yp&yjxZKMyEt4|ul{^tgNd&%# zw{t}9myB#{tyjs2kPn=9M~9Hgh{AkMe;AJAgd$HJI9d$G19=)|R#M1DKi2*{X((}Z zRZVQJXY*|9i4&|9xLX)(n4PQuvmS{;fBLMdXg zjnSqM_hfG()-a+yZG7W7fv#HGd$W0rzvKyFM~Zqn6n_;+BQe%1Fd8eTSADlGgi2$o^l;}+Y{iW(cS6^4TBOVc?Ot|zR zap7B=0r=C+Sf-)fyg`WNpqWM*l!+w4t2eKpDUc}pUMy%H6q*fF!>u@8X-fF!R{;42 zS5Y%Xp&LCk0}vWT(ZMP{1LRYZntk1`@`_8!HW`I{Pkcv^S%;W$zQnewAdH-wJ2(JR zq`AZM_i%XUo>sI2f?T1!N`-Q7DSn;jr09;Lm(w%HeT`HI5k;zRnH779^!N7}cD(4m zECtGE=Cn9cq*o*?4wCJrcA8J;FLM5<-9YT}by8}e)37Zvse^xLvhh&n{1gF3lN;CNOf}y=$vV zCQRSMD`V$xR{$)hsmIL%U@{R!SxLw4TSMw2Qzu7b-0hq6@$ecLN2&Ro%iXgO? z3h}H&WSoweydAU)*vt`rvO2m|m{~!yq4ieCm$*%+n}dtrbW{?*7DSI>yy%Jlo= z(O^*%V1HIqsO~$9hlIKKbC~3j)Q5%8U?|kJ7dUY|B%kB! z?XhA<2p5tS9>1qi@xmO%0h=93vOKv}s9TYq(TigkhtLUh5m*Z`p3_{nDIpz>{>AP` zb~ggdu3GIWM|XmT2@0Kz>R%yYKhB_z0)dd_RMj566Pyhrx-<`kxIlovwG?YzZh#}q zXO7`~j)wt?&$on`1%zoRW8=U6M378o>+0eJkuKo;k^&j6I|)GZUy3?MEa%D4@CdEc zvY0akA{+a(>c}oxzg4hXvJrOxj4R?rL42mWyd0me_}WyH@L|%<6Jtw}yny8es!d#5 zuwWD9k`6OJGCZ~jp(LnWb>l}AvX{t%PK}0uIB8iDppHVRH=dg2e4z@eNA zaLAR$o&};^R)wvRR;pW&KfQ#J6PNc5W`I~0a3n|_g1y-ZLfz_pSTZXTerMbxkcy(j z9qt*7RU+NTh)dh8K1*k!lm1RW_#<3uh{;*}bS8*JfqA?Xw~Kbk@TBUN{loi$+&WFt zLB!;p0n$uAGk%yoPh*quw4R+iv&l-9!FTOa$!!kDAn1YOZb4c3iB5@v38{TXAa7Q& zxntYMRQi$-%7x!qO7YK(u95#E^YQ<(H`ETExW2I~Z&(^Q z`&~jLNnEoO+wKP8%61ersrSWXL9T3v;@n1q09Q%@W*Nbc7Xq{UyI79k0G5$OT>RKj zpDfbSNnf?O2xI)If?Ybwn=G~CK4Fvv7uu0{dMq1tnZD@Vo!|JZR(7M;pcROZGlUu! z21j(<^2a@#2863Z3TP?c7n1dKDl`}yS5<&{V5ar^bl~~sZaC@+}NSEKD z8Wd>8G?z364aPY&6^M6P{`ST*g8a35Io`(kvnqLMkWj25hEsu-FlK6wp+u86ZIWFH z8Q-q0yoaKSb0%r3wx8{E)NZg1l@f*$2MU7o9UaN@;3$#skF%2r@naF@I~|i_ewHpY z)`Nmw#x{J(i}70l%#R%TAZFXnfpW8SFuW)GkA7k1NE5>)=0zwcKNUzVa1u=xZhOqF zyvPwWP0B9$QDN?|M29a9rm-pB7iwN&Plib+Hwfm?J`_^x%XRBWWigx0Cm2aL%@OQ$ zkj^+;hzmwURQBN7f4nY`3J#T5Fm@wIA7lW0lVMUJmd&eVy{K1S)3GW?YB32Tmp=xm6ntFccQ<>+O=KLQDa!so_+Q)*md*LwiZtRo)&J_o8 zqS{x8*DXa^Bcxihmt1#&-)ohBM{Fg^491SJtsHyhz+CA><0=8vV(emD*l?X7vq6EI zIhvr?-Is%dM2%@h3fg0xg`~9f%PwBdb>hr;!OpqgKLAdm_xWRHu=O2Vn`r&?*rP{7DA-pckRX2}i!Gn;YlyVg_z477aYOH+Is`JK8@)zs9Ob?X$(ziN{H?DsbHD26dAc!7_~$zQI7jW_-i=8^p@jK0vIG|-iF1E(eiyrb3w)+%;!sUWSH2ruVD5g}pWj`j21Rd2_Z+JqP{{=a3k&o3z7V9Lh?=ek zo$a{?R^DO^NjJB~e{)PKFSK_(@t!c3xmjmsD`L$8NY=&KGPbcYTVIexE9){csV_xy zD>(IMhSwFLJ( zcq~%9jQFQ$+F9DU_^*hu0fZdl#E$$MLk#qtv3aKv`=+xhitUBC^0HheaE`ozCJA)~ zbi2fZXVFx3!pz_r?8mjA-%}7WfEy;KKwES{Q9}$D*dfFtBFEAr)eN20B_Mx{Ygn>i zeX&p|KIsmsQ~i^Yf_!@+#$pgI~hOScSTaXiU|%ppSaW*LokLXc32$W4Mq{z ztxET(X4pxPnS_-K0Rv7JKpp0wmFS{&Jg5a&;EHbr}c& z!+)}@djO`2hC5AlbLO_haDOFhtcy?#6qDBYOu;TgjaJ%RJs`w&O13xTbf+NKsk+$H zsZJHU9t1|S>d22K+;9Mrj}v?&mG6Dgex<_$1kRg-V0?=hPxNwe;65RA%qmv5M-4d# zViRCbH4xt!hW6~HB_E|lkarO1!ZnN(`mkA!MnUFT?F!vxZ!JcXU{`El^m6H@f1YPJ z+^cZpS|Gn7kP=Lq$W?&LDz9KxSs*6nsY2+ZRcLzSnB@=_KTZyb-gKb=SU?-8z*ma& z7r9D}87tQWzvPtup?g1(x9%8&P^Y^?x;!{OnSM|?A{!}G@YgM{Q~@b?e?@#-R$TJgrK6= zKt(LbHEb#q5t(srWt=V09iwX2+*$FcXp2e%_S2ZTKQ6>|X2ok$VJg#i46sX1%ta=y z3k5hQRBFrPPdPnt=A@Z%-?98o!`8OUjTiKcsPeU zOuq8=l_kkPL{r&REl)=~9PbR&C8;SZ*?WaBT;Qs-GS)cJ#8S@o<7XoHblt)QRrPVc zC^r))FEAN;?58}ta5|Vx!_A+Bku9en zQiWMz`l7&0@CHgNS29#3DB~!XL*61s_LuVfIBjB4hHmt4!d#P9*4wNR-WKATu&ZSr zdPe|FHKdrwvDV2@&X`u!9g#z^NscizFq5)j?h)pUIp~(={%Rpe5a$A#Oyb7;tyvtH zr7iU@vos8Mi^~Mked9yk;i@f8S_n;z0|MP(^_t@1>*oNtbgA%c0pc=YmEl8|CMH3ibS`kESJ0N^T8dkR zy6Dx#c2=A1@z!S&%Hs(&CL{j3=T%ssrYt)8vVYxRT$*7t zvc^fBGsItnA!K$+{Nnjg*#NIi4B8J29b$LEuGm89Jf#^Oe*qX}r5DldSaBDkN)X?# zsji8m^lauZH!>mLDZn+Bm6+@W2pzoOLQv<-wt=w$6YG^ZmcMaNg}Q@a1jQ`po|j+?Qc>1}XzY`b`_)SBQqfOjG!t(M zk|5R9#Jkt>3^!R{4Wzxy*RKPh$f#Jb5h8XF9p^Y<6uE9eP5q)-Ts;QqEK}%bX1A@c z2O`UfQ>VwadbTLwErbP794*8zXt$vWX^k5y5E7}_9r3LjfoKuN2JQ`>9&6uhy2$04 zooMs+6m53cvupHnM!aQ0GZyo$q=>)x7BGL0E8<}LP&_Bd$!4>OKpljROIXHV0s?Bi z3i_r{7r&vcMpO5mzpNnA^^xXcArzJIzoZ@CdwB{nmYR9DTS)<5drN2!HM<`zz|E$> zp^QK2h~*Q({+hHA>~4y;foKgvKaNL5kd2H{Q**wQ{aRnQV1Dd;JHNV~H5}$^sd}Iw zN>)`}S6|n%cWiVAwCh+yIk*&FYx0EpMM9ya9dd%qpc;cGlM5n{Uq`9%VrNYize5DLuxm_e)aJ}8fU z1DN>Gl?5Oy4FuGW1AhhK`WJeV9d+z83gi~Vl$D)%+L3(*SzOWG6YD+Tk6^@#&wbjcHvbxkmTuvGr|G}p*&oDQ2hUe+;ttC}djo1s zm_V1dl>l+9$l@nTXGlRWWmHiAUCD*dQD_P| zwm)Pzs1`&n@EpxJ3+dY*1F&c}vI>R<&r2WiIeoo@@~l)j+2so_zOl-bT6x zu|a2@zP(7xj)J^MD-w%@QSZhs1!25Tl*OW2mP2vSZvpHVg<4rbz`P$PO1ge=W^G&} zNleU=F}GO?E1Yf!NeL+g?LN19VXVsEYebRGTC*pf7Ut%{N<7W*tFNkLpUzHI)-MQi z_BsIID3ry?w*>m#jxs|pd*}r@RMjf&!ALxkXQ%a@oE~D2-|?pVNENQt=$;k`q536u z8>}IEVvzu6E+Z}(9dzapMhECpdgHx3BNq8u=db;QGosF|7@w?+Y5}mhs;8e0dzvVJ z89pH&aj_6{bCCF2$su^$lTO9pD18D~7knTjn`e^86PxwHSo5h$Zd{F}VPqR2W)!1x zQ7&d(aYGJbBq%{C0tI?Jxg*B|Ya&@+tgKh|@QWH1GNUT)lT-WmWb54T6AxR2Nm0UQ}QP z7-mMKWRfXil9`ZXW&jnqNp8xd+>o10CPP=Gh#*ahh)9#(q$% z_w(%MK-|X?<|U*B!+EK5nFqy=MOp+= zt)lI*zc6=^~qbb}DprGUjx8w6b&Qjf@7(ZgfDS|z`M~p5PLIPz5WT%6Q!qP3~PK`rex+cI@S)E zUblM@$gNPMsg&C>!d!&xY&~#+$P+ICGCbzZo?nf3MK}ej3D4wM;blOoj%3agV#9H} zXu4jZIN(>yE0E@Rdi+*)(V@6qsH@b9jac3=j}*{smPA=RUKd1D>gf}$rd(Ti6~y&U za;pT_tAshP8C=Ifc8eDTQ3pMC&3(|*U(3JJIZ+PfG5Sw1*NfvLG5+aaUWI-g)XalV zGJriJBx5|6a0%_n6kX{JKzAcTur9VG_X*=!^|*P}E{~&|mDbfuIyzRmyE8}+fHDST z_RsH`4N<)~M>N4yX-T|Om;}^*;R~BWya^0HeGqe;QFOX&v zTPVq>Pl`qunmlz{JgeU>qKjD}js5QL0>L!iJeQ^GFCty}#x!0Szdi5e7&lkbSdW8+ znYptTG&ICdMAPZq_{^TtzP0$u-aM>V?+f$F*(|rVxchwr<)umeHykH_0HE6LIyD~C zZ_1D+SvskYL{ml!@f$eX(l2v91aW(C^Bp4g2-m%U+fSLawm& z3H0FYLn!O(>E;n}qh50~q#0RBIg}DR*FiiS(qj1wW#FXESk8trecAoYwF?ELc;#+@o{wQz9t<4HcZAxfo{um0^r8WL_N;hvNGmlg=FE48-y3Yyq^5 zn?gh5m-lB?69`{mJX=XRcitq(Eg&-u_HxYpB*zHKj&d25*S^B2J`oh;b{j8I(d(6Hq(!GrR{xI!A@^u0*T|V6L=|Ux4Z(HW$Lz(|8H!x7qJ0 z+p|h!zo+vS@%k%~#)G_tVp%cq5i7;+g56dt4q4-Bgt5lq#oZHsUm1ehboJ@hWt_PR zq??mu!K5T!~tqbwH`M-bV7;veCLAuv=ETq>8E$K#g}I()My+ z@}C@8(GhXfeno(JEiFGQ?@&w&FtY*+DI0t=2%!~d?O6MwOQfsJ0RkepomxHqkKyl;7qT|b?@JCwP z5gX%#uK>|~s5djZP0gLudVm*Y1hWj@sqUFA*_w+eNQ@VHCS z8I3U%i2Bjf5icr*d$N11e<1$8MP6dJOm#_+ct)W40HByL@sc3g-BcNj|LAvCHqGRX zEyhi(zNJ&t)YZqP`psaNU8%??=j$Tfzcn0}xUGAf5LbiV8H#$N(k`>`)LV_qjms* z--9w)b;Z|r^!FIQ5df!6a%wKr3?b=t#5n>yN@`_7U+F3$+s#~(pX%o z52+W&ZTj#bQQ0uSi-1ugqvMW~L9-!@M9*wwahO_wNiCtpG(Hvx)9Y$lYhs<9AY3QR zHJVH9afdKx#XVAC6P+@P=cxo^@lB^T=BV4 zw3*m~B#LLb09T>8Jcd&qZ%IBa%wtI=0WST&DTK^ZJ95o>OaY}9UCBGing>D74vz+= z7DbE-^9YdrH#fK5A>1`)6JnC5JB1tW>P%TVBr#}H|FnSSAw$aXce$yg%H)}0`)6vx zt}NKiYgf0&?HPcM68!}1ojr{dW(exvUE?0nu5c^QHNx_acvu)cuD7o`;uTTukOu0* zaR6I;|FHl!sjY+v#9;5*J%?m%&`AHDAaY5EXB2pQKxTc&BrIbgU}jF) z6Vxn2O2!&3ZR`mG;XIz(>5bT%MY{sLb3|)z5nu-DNXynVf3F;|&qxdkA?Iw_MkPzm zzYqwUP_Gk2{kq~KbgM)#kE?6)Y#J-esCnEV#QeeJXHV#Rid0JoVhzU<;$|#F<{!UF z3bL1#%xr4yE^F(E zx$#XS1B-bbYl~VCHlJOI%KNx}PYgE%FvOc@1(|(vXX=Xw>Hw&P-Z#~@SUYp#`(5H- z{dRlN>T#a|tbxI;8G#jDM>`TGS>8{Nhtsz{P}hw7b30t3Nd2jfQq$OV8Gh`Zx- z(JY)h>+S6aMY!PPz;fR?ScPKuy>pbHV|sf;gK-a`v;?mX)_2*zYlL|5z$*gx&m^?O zm*xUf&oZioP?kAwDk0;m(txb4H3o5~ITf&>d$Oqa!?c>v}!Hz-ikNk$ZA z!3Vb$G=s>`*l+0b^pA%Hfs}}nt(D~@C8rizX9n?yjdKK;@5IGUY0ekq4_T5r2)K>L z`vZaQu{7nmHaZ_kJ|0WgUm3Hk)exC)Dh(p|fN4eV{$u z2xtK>4le+iQ`n9Lurlx_2#t&{HbXLeCr_CkKhbYjgy$WG@C@QolrXn}T_rhBb{KTR z>KK=GhY4Quuo6;o_GD*YJSHF;xOkg#bK;*u>>8s8IB50I)>%D%L<@g8#}W%*)t<_%t1R?yqSeL*i}($f(2Nr7?4ooTXdu3lp$vd} zdzr{Cn9!_DR4Y84L#SD$baUquJ=7}3O9f6$YkWC4bk5p0htkQ6b@2m1pBd~JiW%&6 z`+;U$z`n~<3iU$hGB%bWtvbKkAKKMGs8)zIt_^-0jG>=4a%`fk1d=cM1iN=}0B-Ax ziwgkv21d!w-ywyIn}Pot<9ddWY+R;zM+5OwL1G^69_JSy;2J9x{cS?Tb}lF&;r~-4 z)$ORrST`K|eK*f<{}C=-FviXjSDQ@o%#jS#}sg^zc%9UY(WY2pc$uvAJI_tL-gqhv{U&35M(@l_d0KAbdg#4FDnTlM|BHCPT!PB4=4+}608JnDEh^r|GCZ%y!if2lY zY!XBjrHD1#^7M@oCe;~Z#qp+8OK+9 zAhODHK4LN?#JiS8p^TVV-J(GW4X5j@+4{XuFd4Vww)lj8yZYSjkV|CjvpCPbn^#h( z`_n?q-5KMz%eSOIad#+|_U8X^j^(`wNuB8hqQmqLHJ-nqp)OYFgP`$T@KvO+SX;C^ zh4^h9Xf1T(;fexH;*osx06mb8U9pk^P(Qzh)5B zeci2Vh5cMEjQ4Q`_rzj zSbh}1LvEg|1w@rOT@Zt%rGgK|SC>M#%6_`nw}B56Z2rtF49J&`fqF`|mhich+3_-A zPKgmDg>%On_i~|b1(T=)U+(LRZ|5qWwd|NT33F{5dzQk?LzV&1)Dhn@|BXoJ-NK@Z zo1K&{D<1+%{cGpcG{^cP80_*gl$YBi(QdD7h=fg!-33uf8)k(RGyYituG~ycM99zW zhZ>O~lbJa>5N(HpxM)@*sS%X>;X++uTx8fuZWZ9-nsql8p?2C4In;m!m*aGN?MR@k zPy3L4`;0jIv}&>XvxKa7l(XmZF}=$sbf*9_93^*!Wu_bpzXz5x+#c@(vAH%xmK|=g3lh0O$Q-v{+ z@c64>JQhV8G>;jWi;p+iqOepQvEYZ048i{K^Q&>W2!9naO*tHHI}yk&Vo#pT z(sRBLw+L+nQH_wsFBV4DJMT0tRz8W}YEL&^UX34#Bu{w*B6#YUTK+K*&6qFO6|BRZ zqP;-1+G6COzwZfmNem!@PxlbW@nkTsBAh?(JDb6VgGpQ^m}(|ZNLIrtPed6(WFgQh z@>+cF6cBHBJZp=Iw-=wm)WN6fA7_e_kK8ZP0X4xMg}mA!gGW4~l%AHg6~&j??or~~h( z!dxIv0ZOa=rJsP94f~I1(;SGm1-VEa`A1||eNULnkP(4M@@e;fU$9%H4n()hR-Zs; z+0u>QGo`G42I*XG4H_*9*-;2>@JS{f5Xm>JVv1#?bM_i%0%Z#n@fxTbQc6q_N|(s} zQx&U-X1XbCa&H`2e8USQNLhs1sm7*fk;b)bsB~#NB_P>m0?9}Z7s~;BC(&;8z2|A* zag(C)r}>B2Zi#s*U%rY!K4e+K4U0>?=j1eDRYu}&(X>^EQM7EF8>apY#4Twi8!E;O zkHdtTCwq@yR%H$!A;eY4VuY9DsN&lhS_Z0e3X3a*!o3DeD+9}7mvbRmM`kvl={fK` z2spBYWw{)8h$1Bp>I$m&kVqGbszgeQ$-l^v{YzZ4BYeX7Anw0e+_lhpx~Cv>kjdKK z+a`a4y9BBW>5ED=o-97og(D7X@h?G)W&*w;#!%RW7XVN^4m~M6GEdzJVR5OSH7mY) zA-~Nh#=$U>p=|YM3iXtj$*WP-m~>GN%uraaI8%Tdg=&ipATG)fEguS(i97Sfr11b& z^o8*YumUrcPWUCT+scZgD9G`iAo!=N354aoBu`MoF_~~3Q4z!dm|Gd(1-*gzo@n?` zhu)9XYSyKYbjJ8i@MuJYyVRE=SP@Ssu+^q##0r6sqT$6^SCJS-5s@ir zh`(`lo+EZo?V!je-xTN^i(?Rszd^Xh4tqm#W4~QMHet1we&rfy`UxX+wGt}Q+ZkhI_nV>35+b8}xx7di z^&?p2?d-!{@wr<7XhXC7)@5ic$BG%mI7pGRuw*C56+$7G^l_zFwMgeF)f3ZhbN0M^ zGDak4qAUxblTc~mkhIqAP&AF#*8JSciyaRj(iL<2<<#b7t1s4tHFHKYd?LV^zaX{OA*8`#yC2QzEKKs+MMY*f%k z?J+&^RFM#;C`TVy;Ws3tQ8+eYjDl4g3vey^ODXE}lj9&RRxdg!a=-4Yt#VIZM02@) zOqVoxNkyP}REHj*6q^fxAM{F!12#WPfTdCXvej7aUVp(l%;JJrcZ~p79gBm^<@DIE z3veDfh5^{!6?^=al+2K+c*;Dc-x(9oaExC{4G;WXp3|V7;z)MJW%mJQQ)#4gRA-_? z1i3^xYc5GPv$gKe(<$iqFs^qS3UcG-&E%z(xIut9!)!*^VCC!|1)GI!3^8i*ln~dB zE8AVkzN>w5toT{RkHbd-#PI&f#g8o(2iuDjmQ5C;th8ZSO2s70K5GKlx35b zW-IZa2)CVsNHTQ2E(o@19B_6|nxicr$deIqzSNCoVlP3O>`YZ=%_5PMj(FN;lf)rH z%zj1gO_B7De-O+Q6P+b~KLml&YWH*DFs%bgA`O`H1dv_!LLC=2-V@?E&dG%_{t&bq z$7K*=$Dv;<$1Ot5{U%0PDSj=$4WytU&BzxX26Dv~qM=C_B7gSR9OE)6m(@zk;sIg) z6urBAA!Fyi0h#f2(&;h$#^N@Ct_g=Y1z^#nOekN=ev&Rb<6mAROs90Kt3DD+l{rqd zB_ZzXj{v|~v^|A}C69ilmG4_e@H$ z#tZ&{)kL<9jYN77^i|sEnW5N4kS7K&j>EU33ShQc9A{?$Dj`l5RQC86`NX`Jn=X?0 ziZA7v@LDF%`TbCUxGb$3=QpW3mwOpZ!xzON1s^HW9m@@YYEQO093^-{-NQ8EPCsV| zhD-Xx1A6xS`E_x+5_-1c0LUqn5&f(%*R0A@MmYPuSEOp>04p&sCcQ$cY=!VM*HW|A z2sI;W6$3PW`6_^!)vM(nzu?;hx$lT{OV}^%2~Dp7y0c0hnblsZnTLe4o=)chx>)C* z5Dcg$VyoP(-~OPUxXm=uz5Bj!=RP;R*&wMSfo>*Op72y<$4Qu*iIceO03R2hV${br zpnFaK73gFQ3=j-etG}Kj+@_Kod}fCDi6kj<)_j9BbW>|ZgC?F52^VxSBc`^dOM+ab z63epYX)OOH$&5*PkRIL|E->voT%950h%gWIhIzB&|NaGOrq`iyE5)3*TpSaD65I8t zWv3vkc-VuJlX+$oeoO!GbqnilnR>+6-UdMIY#Ed%UbiTBATPTimZLKfWESHGf*eV; z;x2*a(>%6xxbkL!wBy4JgS-Dt8b&MOm?U*EkkWQ^m0$+tedkE7>xnhqfu^nt(PiI+ z1)C>?kXF?hFTBe?)D<;nxlNk-1h^`-r6E+VniL82q^O@aYkqTMO)K$u3{wozT{t)J zn&x}X3@%|pLw|In0B6QLV5TDZ#|;AA{u0+@G)`_RK5Quy4ldSspEPd!_(h5c5u8p+Z!F_@O9Q5r<(m zQI1P2LC*>1>zOl~;ywL_H4K9#B?Z&|%6|ahu%Gpaqd$c-OB~OL!yvcPgi=gxLw)Vs znEPKS*RPwUh!(d9@mGv|lnNHWbegUVZxTyFPFQF>B1HF>G z>HkOrr*sQZ#!lE3J1y6eYKj$7qSwDg(&?I|(pO!$JTxt<<6=44 zBEXf`yT*#5nAY~Mt(+%hmEolRLjkZxfholCXNm+Z-P|UMO;;g-Ti23e;Ke~gJdEWo zqJZ{81=NJ6V^4WBZZEz|wk@vueu3{8KK=3(>xd&nQZiDsF0sUVt3rDSh;@5k{1_wtxr)oXi!s3NnY_kF*D5{AYp6n1h)*D!vG9?#yTZ(vUx1keOI% zV}>a{>JNqKv-Uok+7XAYk$Mt zi2!qGHewP5VcBd+ud%ns zbAl*~obVB8Ul)p+qqe{qQ5=zB@kpUAkQD>#h{oxTUjih5?n4q{jpGGuo|9p8v_38^ zX01t>%Vguja`XR#SZQ$1;C6_<^n|OU(`Z5}WU*jshkgJr)$fTi>v>~B;o)8uqM3nz zK9OtI4@j!dv>@bu9}jo4QpIZPfif6a%&`#+$70cZ)H#&in3=RbB-_mNou9Y-~&v8t{oQ_jO|m;(Xjj026W}`Z>L8pw6qt zdK>c(ja$&z6dPr~dlnPYRKHyo%1#cn(yYBzfCQDq?NN*VPMhQ@35bATu6?ILpq9_^ zn+Ti|^F_L2ni%cuy!!|-3lL8_WrjXAON0-fHY;*D=qt`eYaf$gNF+^_?wVr~vI;>; zGYlrpr&rTz{;RRi%4A?_2I zY~TvY8C;46<6ZsZ`lC)|0oM_#c5^^RI`L{)&R-Kny&G&P?})bqP;b3s#0Yz9D@f{@ z#uBbosC`!08pw5G*Q=1p=Y?d6B#)73UqNJp)&M0(oGj7xDRmgPW{cYOzjKzio7YD_BNv@ z>0=}P1|%|Qxx9=gCPlb|w5mzn!lI+|@mII!A1_8VBh~!HuWbdpQA|&papTVgs6Y{f zBH--t!49N!D`qm5(Lwh|`;Iw=MrnW>)Lq6baZB~6Qy|wESX@(LkLHEtp+RhsYE4;L zcyyw0FOT5-dsLBYeR8p5?j0x0{6gu{i=2C6_8}%%h2l|X{GrH;(><;n<#SDjPD0fiQkqy3i3vOwNm&!wSV!(X)lP`CZrxnaRVk<4(?t51Y_? z65J*Z6hty?YznlOmY`=djAR{UJiyxUnRru}Cn?I({@8hEXw8L@P9BMmgG6RyZKSF| zZU_L!W@$Px*E3(P73uMTHUkM)2OAc*H?m}Oh!j@yc5%*F!v+XPeU?ZHCOr?hb;SHB zrZHk9@7TjdyFW@Di*+wFChZF3rZ>uURo%9?F!L2Tiw)`9(+nt<1v=HfSR%j!ZZ6Ru z$z6V&hMhl`-O+0NKM)?jtfGhMW7yHFC@JNrpKh|9MF8eqBhqL^cge3SC>67VNw25l*?A-?FgvDYe=tbu z@HFVoXXu{#!_7dzqUTxThJ-O2i0x9vwSXG`tVq>JI8ju@sJn#0#Kmkr+=_U-NTO$t z5zL3;YkQDHvPg2k8xY0D+S^@;@9DQ|uQgyvj~*?by(ge&VIBJ$9ks6zTCJ6sot5Xr z1-QCs>eV^T+?fRm?WSQbz?g#tQL7!`&Gdyn594;SPBdiz6W@UIK!?O)yX zs;xDWG~A3a^aVm_L1TSOjMh0fMDs4XfhAwofw;>)wZJdn84JAfj@kV~1@ z)2nwWwo)PwWFoc5q?KHQrU+(G=nWJtK2H|OCKgQ%rLWB<88vFfUGZf7_6o>fJ^gLc zI>qk_6ay4eCjx3;w9j$&B$Vzx7K^5*nvwwXm?$r@4nsuvSXPO?*sh*Gq><=A&R=eC z=b@`c6?`PVEZUPc38Znj0Jnc8a+qE^y+V-LgxS=ure0fMxDV*nB_3rf@*T z=9y2PvOO2Aj}%}|5}2orjc~7q9O1N3CK6*$fy}_R&G>mwh!@7j`g!$nT_d!snC{bX ze%feWo(*aWJ$tvu{5+BLr@E%MbrTT$YON?n-7Oi_%}pq_Xi@G5iL1vBYQ*c6oDQpY`;hqT;t^+aB-0q?VC;z%%$H6 zG@}u1nJCQI3!3wyOL^~H2mmb_yVq6eIcGvAE#g`ZGoAD%bLu|6}vuVdKrEZG^ zWkXu~hX>4wi$IykLw@Auwv5z1+0hy6?aRN^QudTl#yhud0O+elS{$NTlxxIbPZI8c zATt}j4yV_9f#PDQyUGqRNj|tXd%#8a8Ff%1;V|&yigaWBzjf^|Va&U$a_pFvZ zWX&?N+0U|cbmd(9VmM2jS7e9K#q^N(*Hr@PimL9%NCai?gAp?haw&YRs9OxjIYP{R z9LMDK6-Nki{S-W$ONFHSQ7KQyDLn06337L%zqTEQd6nRd2e=<5Kn_@?oggCpowv-5LX)AJ?2@h z;a}(k^eU}*^HuKP%0;m&w$LB0Iw~M0IUJgR-Bq>SLA1Y){MrL54(Ive0{M~vyQV$wumCH`G~?uF|kav3&f*c z8VhE~Z34~F79CgHv6TEN`;cO0<4_lW7VW0C^Mo2cobnw+a8G|w0tpw?Hq^v7dr3%b zQQu+he@2x5oy&lYH$|JVgXs5=i*D}&ap9OomZa#!tMr3;(CFy`w7s#KASct&IT`=g zEt28C#if~n7!yQ!y6vU~cZ~sP&+geeV|~WMd_jf3%nAgV5m z-iyZydhPc{;Ak+Y*VseCzFrvyf(ublrd0L3GYBf0V}y=ad_tPb)L zP15N51)Sm(5w=#!l(nm*X04e&Z$b3vw`(~>Ku$!0m4`vxlolcw;QkhC3NjOsobS(7 zMGqCFRg*1=?Z18$($(O=k?G-nyc7_oa(F_7W(G3aE)(V@U6PKCrYy2I3wP6+den$& z{9ce*i~B7WpMMCT5onlEAAN0E{&znw_zlHP0^I6amI5TE)eZq6Q_gOL_KLLG)9-c?nJljwy&E;0)9(IDQg_sdF-TjyhrwU-LnN!mk_ZNTS z-_2?0T>;dQjaGJ#JAMQShiHPLK*!7zp4( z#5F(mSLD*912p&6J_GGt9zZ`|jejW>Wua~?5j{5!Jvq;*)Q+h}agpB==1%NK>CH|U z`<)78Hfn>ym41lx>S03NYE}f|8*O+R0M$g?(`}G;krdXH1)@JE`+HtZOI^)A(Rw<6 zyA^t#i;If=#ysqr5tO>p#jljm>jD9GQbbvc9O)0IPQyX(u{#S_f3Rq$cu1jsg=j5qmGSeU=`0Aw&6FuqqD{YDYt}j3e3SI` zu~2i3vmYT`5Cm2{JI9*2y*(tO^}52Ss5}(<_4a62OfQnnz!^ZXHsWew9(-Hwg0IkX zqRmpBCX)0UhyN6i7P15q_bI-54x|~!;|jdC6^%m76|X z_IUZ{`G=NNc{uAlAgaZ-g@j18R{BMrhSS*q8ec=~g}fTenz~d8$XEf3VF?WHi24ii zk2=arRLy8CAX!Rv%6>_Zc}wuYUE3zNRF9NazBX?kYf|4%y=Yrh}=$7Oo zN=3H5*x7NmlJ++q7tL7Nb%$MJ(_ix2t#9PifIM7^cZ9(s%%)Oa($=#s0m-IDvgTG} zlS_d#N43q90vljUX09mhX6Vkdl#pJ2St(BNELVaZrm^9Jt3|D znmNQF=GTY9$P?|c{28{r8ZxUkQLj0R#Ib_>6?+E7HvxfTr8b`U;EmdZsLog7o3 zM}Nit?)xOrA_sBej*A34t7hWIryb`FVJ;Ex2Ss*@pIr}RF3cg64$JLhLTK5n9x{rT zMNwyB4(a4nb3>jSqZy{1c%wiT;)w`rcDT`>^m0f={GTtg?KTVIfO==V>d@ zaY&YI$@t}&U%~Z4=%fbb79-+)k*;x)N0;b+=5Q>3JFwT0(o#+fKM>7upWoUV>)qiT zM%hi!QS@fyR6UzJDJ~G{YM@nDEHcLY;{x4Mda+tr68ql?;{KUMObp*rhJy*uS}|P8 z{2ESCVmchXJQqD+=s5zieJZ~O8`u9DbYd8@I)OrcC%eH_sBPtRD>D)z2{NVr>CKfqg2W3jGM zx~|*;t(FJV>E<}0u4^lrPw9K&1Yuq{M$uzd# zW;ZyTRO95t!E&i!*sbdwymWUzl-t-!(6QmTNC50n=8C89 zZhKxR<87>he~f;+>Wdn1P~Z6Xc^0+YdFzQS1(@e^crJ`n*Bu3dQ@1X`{I>XLR+%$! z?Ei=SBc?(dL*;rQ*)$vJ!!-))#_%7(+$Ms6p{MahwgfvV9!W##tbeGIdPFQ3p!aFWAu+3!$n&8m;OeGs-)S7__p<5^AzpeuA!&@Ss?X_>%h}5 zej+5=4Y^EqL%{8r--;=6P8^?h=S_U(-6jIkx|Mo}w8$3a@#y0G=Z-;|Kn25?ikpM78Lr zc{69$#gj!6iI$A}__rXpyNyc@aB(0$dNTj4xm@myU7mtA$Cxe%GwJ+&iO_7e;2WSz z67hql!8`~*8&@|z67D8-@o*jk9VL3lGk|H_rSr(KB7HcM2^AeWp-U9;<^%oVhNY`% zoOIWD7ARY9h!H##=R5~shWB!(tAG4bHcvE$f?X~ntIcxJEdo7CW^pqERol}ULfAVk ztIVbypU*!eIuf2TTndeK{sEYc975(w5MbzkNvNyWNEC^7_SF#NVsVM*xQxcnp!qdR z#$TMiV?8Cz4y5T}0Te47o-ttR4>z!$5E@n983Axt4k}o{S}#Jg{t`19HQQuS6U#$C z5r^UeA!NYZ>FJh_)`dk9jDw15e3y`H$fKubjN&){64*q|WyU}>j}ryb!o>*kxV-lD zRpTr{^c^Eu+L3ETx_rb+JsH3L%OEWE4FnSXs($l5ceiPE5`H#&1<1|8J8N1@73~b> zVZx?KbwX$$+bD7OmWj#+o1@r`#RJ03_*UX>%cE=ESAo3d_=z7G9MV0ldHRO}c&s^7 z0`+7FlVf4>fr;ycP${nZpt|CA_iHX71)7q+q-uy-Vf$sc|3nF(2vNo;n)FYoY%;Rsn((oFUL3*JjUCq8B^q4N!WKU3-ZAWH_$O02URF z2FU80zeNhxg>E?ke!TC%H68N^;tsH3rU%Uc)7xRB=j7Z!}Bg(v6BGUjq-}c?c1Vg8mGOUa!}7=5nnQ-O+@m6=&toLXD+9 zHuwHGOQ~Gj{uFFbGT1o?@?fx5M+4a?dPHYwkZ?F1;>j+| zqXlI|zq(zL$e#pzv>=(GEbfcf1ewn)k15I_r=eI|;@27;Rf_ z1tVB&Bq^}E9wtOnlq<`0^cz_McqtER1hOeBlED3k0}1{)$(y3HKv78{)2B844xw(@ zOzHc@_3wN>$JmNEfHQnMgt=KYnF(${kf&RpE)vB-E9GhARH_SPB|%QZbS6sgo+|^e zOf=4)*BW15#sAVWHcU9Os{l8@34pW6^%-C#(Zx^O(JWG3|U*O>b!`& ziVHIbuB`+(FTU(#eV8P~ZNS!7!sjC92y*lHo_NgR;364^bX0m{ai<_}yLeHu5W+j^ z$1@G9^M@;gnSnrVDbR~yXN&LfWu4qyj(;jCV@mPj^>Ws|Uo_1aOK%-)zlLcP7YK13 zTuOk3b&A}Az5|oAfGpD#5RJ9*0b%ZHPElG~WSxAyNQv;DVwT6TYdZV#Qq9V8{6Vyt zA>Jw=)nC>Ia%T~ec}Y*aEP#>LCs)OexOkoX6T}F9avy?S%~RNfrIUdXZ<&9T1cfLornlYewJrO(+1rDT*v=@Eq$dBe86< z9)PEKjl%1uD^=43Qgj*9m{+qMHvn-%@t%T3c*+VgCsk`^csYJ3%)_OwtcMD4$}dt4 zu^&qFdBBEwW{o_ZDmB3?8|BFuRfMkNB5=aYmhw_XgG~-emv5Y>oQF3~8-}8|?#p1b zirJ1sIGlP{fLk?#>MA7n5Ki@gwzq3MNxd%-Q}0i-=r1>~73+s$HkAaqbe!FBROd$6 zSllkujYc$KZK9(t`)dAihgK5~cW(-U>FgQU>{i_jit6oy?=0@n)%Z#V;H0a8T#6ks zfaM2~mnmNr;BushNb!iivN@RR&tPLq!nrKj+{-04-WHGp34^&@((*il2yZ1v%l5;8o8JWnpdx z+5z1&JY{PTI7$vn#xKAsbBzFZU~@^fvaYyZP{tDnl1`3OA7!6%Xe2^ELvQzONawMV zSx^alQD1?qM%RN2?RVTfC7#7Wb$$?^2|KyGzgNkR4FZ6VG`ezs$A(2fA`erK+UPl~Qd zP_V9^wOf0f;^mx@VKAx_AD$$`oGZ*3&%!WEM_(xf77&&pU71YU{a*=mFU=Vpi@QZq zIDRk{%*;dat^l_JZ(ONEV)w~Ftj-*LQvBDqcY<~oNI+qYPSGbm5j??hw$W7Fb~^)7 zd*zO7#u&yaGCNF(TR-r`MhCJue>4l1SgIE&A;u39_Az4Lk;se}z*Jt2=JU z1=K8S4#Lj^pI(q_nvLevoz=&YAt}rKS0vYBRnxkrfJZ?F;aO}E3fGaZttr!EJj5+<7OU?>S48~-mePvDu_X_ErkM1LicO>V`6Q^-O0*=NHE1{SSyke z%lb-nqy)0QE=7{uR}j@jEakoGp*Sr|LiII(6^Y})ED2i_xv)*mk&u#%BPot98BDl%gzmRSV1-w3g|Or&<1*>rFnjzzP;=?j)RUCbSdg|nS; zAAE;gZjN&zH5w!7#D6HnUDLtZB+srZ1!)duL9Xw~K2oc=XOFCx5H}(nXpjM6(C>GIm?c`i1^6ueDMrn&&SC zxmX=8IWzsQK!^{;3)OMp{QNU60!gHZvjy=P9=^EZYCT!03B=WCVTb99s|9%Waw`i{ zRaZ5-_Q^5C2-GbYxql>#+0(#XT%Oz$(B-0O^Nt6t^ccb=1tO;a*3J8bpz2tHOW&5?=C^x5ffnWFhYEAI?|{M> zy0c#)O9*B72{;FLdG)AFh zxG@lGh^Bve5~wxq6p^(;kKD(SZU8Dw{qa{}#+BnaL7wNj@Pt$%pP#upr)hVH_^ZJ<^Mqio%;SOl;bh zr-Y5t+;HPonCp&th{bwoKY(X`O&`%65N(bSwEb_TWD=+)sz4}rO&WI*JyG-ZD+)cn-ZW5SNReihhN!e5@{;D})Z+H;BD zT8--jOca6trXmR^%th2IR8B`y^d!_wYb_6AGVgXmLpZGCgFE)w{FX3`)9K*}mEmLx zOU&UYzBvK}V+R!{8}aV6CD7T-=UxRbkT4p)F`Czn1qWLeM%n4YTsO3aa#mA6t-Tb~ z%}4Olee`r^>2kqtTr*EGC5MBbj^!DkP-;Usk&KhjtZ_IjCnpCb^vARDOy*ezt}Par z+y$lLjSGa)6#B2ZWnoiXESfRWtLnv5qTDcsczTqu92+ci)-pMEBirjd|4(7=s{RV< z%UJOc0FM-`CfM_0kr0wFK&A2=iv6-AoCnbNYIIz9C`nwI2ECZ@rf9Qk5$CNeBKW++ zK;7CIJbp$L^EmqO9GcV9Pk7PJc(#DBho`o#c0`{2QWW7qL z*~1gfy{K_xND19LC~+A2hYE119Gsbjakh|&o|dK(KfbuUNXCN3bFXoy5cklWF+yLi zeYA7dooJ5wL$Q$n=RAx3jvGcd3t=IyM*xqWA3(U6dZMPXcoV7n+s6P>W0uA?8m#ej zydan;=do(scPtd8wrAlnJ{9eTvp*8RH63~mJPy=(GvV5N8?GeK+^E(5i@eh=$lSp4 zKsbdi0cP(k*(bGX!^j@NE@i;)c_^6rfKXPrUS6cVK);&3iY2V!Gq6OcQ_jF*nIa#(Dh%%FiC#SxK+x+Sf@G#!RJ?L(8_Ze`)v4po(N(N@i54+jstxLU5_Tbz0+#hq=e3#!NI_9 zi%$hPHx?A)6vi$mSG1Qf7u+LFJMR(v#=YoD7J zGf!_FaBkV4fCv)I8&&<$DaiFlkBk~EE-8?Oj6<04DJ;JA_#>k4t}7BRu~fNVLwERYhquutf~x0z`7H{mq7 z9p89~0W*YcvQGe2l)Z((Y`a{Rf6$7xWh`b1AV)^rVucfZLiEHm)orU%{Pyx3;(fjj zEZ(uo6+mn@t#VG;L4?`M=0$AO)TC;Zg*z)Rv_tV{K}^rK{?T|#zujc~I(Ydaw!9LA zx{O~o%w}1QJ4KUXw5L~AbE zi(o9TbZ)yY84od;zF0slI#r^u*;$aM9qT`mHhs!$YPt^CUC)+<%18d+hyN-E(;Enj znY(d>K=W|6{q%mHC9~5NHZ=)0w_cwo!}5z{7)>{Tz-kG?m}TNX(Js=lzcrUWDZqK< zQJmf-!R&q`DH%{&@1|@Q3vV*XwOrY|?-c3I$YMA!7~)}}&Yfd-I(HCkzDUZ^4$T)L z?r{^%A!TE&DzdD0u}-pn ziFE+gc_frwVR274hn-KA#$$jLK!R36T$}m*B@_bxRT7W0_6kuA#a71Ic6R|gT@wN9 zhND#g746_MCXD!H_P@>_x|9^ncjq58%9s)r0qzCX?|zinq#70I(){$ekI4ykTWF)! z<{=(XUZ%t_1A_qP<0QwORAfqUSrST%b&0?|N2C>pTeC zI*7HZ8hZ-x2-PhnxnK1Nay_|ptpm_bzx7wo6Be(c&3sP*lsA4EOk`oy)E&(*C8Bu< zpaZeAz?!9Y)6J=)ip*Nro_pf7`^d~KBOsIO-V52V;}}t7FJEpJ?3YFI zg;rM!NtF4az--NH!=Khq=6>?09Mu@@L*ixKO=#9}BiPXrlRp1v7gE99m@<1svPn+e zetKM?->x=SNZ1JIibMVa1{b&s!dy<`!(k5?g3bGnmeHaTMfpJ>R-l?zT>iR5(2U8x z|M3@*IhQ20XTgS-dze35+{`2bG7VM~W~MLD8=VM%2_m}(k9y$N2kwluixjw=a~H4@ zn+ozcpPfSBJX@sEQAam2ul}pk3_?%^=Ai(p%*Ca_IPz~0u4B58ERTnzPW`_AaD6z* zN*^BAWe{dKj>3r}{}SX1Xm6>+J6W1k7OP6nP~7AhU>l7vm4{ ziywt%JaB0V5$|MC^v!4sui)snKUa89*-gh{rN;nWZmF-Ug!iz3thzdWvK?k{8{;@g&?AhZ1iA#mgA<)`IJQ$dy1S{hr7p%qxSp6G z;3r4Rr-e~ZHPDX*EP5KmY?wuF@#KyG*e1;#^5JVDor|CB!<=&3Ghklbk>`1|O5=kU zET7Gj;u*wU%D(nEO&DCq=0`xju6X4+Xm884JfVgIl|o6h$NJCb-zLX+Ia_XAUCAt)(oqIebq+Hl9vAJ-K)P1$FBdsUGp;8r;;-2O$YpX1 z(`$OGzLbBvP&+*fVx1rt(Sk*gow*vDznp&zcW|jp6EdM56%A~#Z2lx|R8rSQlL6s0 zS_LsG7U@BS5fL;fT{!QH&0qC@wcXrfm?pwwp=Ci6hx2a=fU`CDO38z0zl>H*2N@i1 ziFRSy54npItGxyU^HUh`q#GUiCx}~4EJ_XqNhI0vb*H1#h;PO)k4ZwDnQnw@Roq38 zdC@o#PU&04hg1QzOzb7Z12z6uHiOh&Cw*YFpnR8OYOMcuo@@pu2Ccih3vy2=0xK`dQmxN`<)yuH+CbJx zpzGRHNr6suR=rZ+djt<~Q!YLb?GX%LrKn%=-+7@;oC|Q9isj!0GINrI#Htt133C(5 zM8+YedrW%Ic?~L(OOo2ggqSm}b86<@DC<$G_}0gu z)LfDTRwub2JS&J#QNidYAFI`JpMbbuac@ICNA&hMK%n!RfeJ5sSLr~Zt}O?n#heK+ zItim&mk`fY&qdu>q`?r&gh5Q(<3D*3HEkRV5TJ*nS0IzLR;x}ajuXii*-~E{y@yO8%0xU8xhdh zo38tx|K|?k_Gb=K^~d1tM>NQbjdcpoaOuZ?BgCC8Z^qJnXEUKb7D%120qEZJ&`2+X zJU%Isvc7Zl({{Dq3M8dOf-rQ(wj$tV1BYZL`1eE_IgJAdXQg^F+r(&P*^dP8aHQJhV7GHWW7&2ywA>Ng!6r(~RoI&~R*)A$UqK zoxADcAFNCoYAI2gi;qu;bS*jH^~iJL&{cpCQ1CfoK*txLs7?0RJi}G4ckc;J;|~+W zur5Atx+)0ywvS(+z3*g^ZiH^7G7fZo{am5ucV~s`pxjfg#`ywWg}E4EXmPBu8jx3~ zw@uKDG*cbhw+)5`5w!hCX#yYbX zxnhJMNo$afruP#vWh{;nm357d0|W6_Aq+Xa+mv2RdQO0A)aDn5+T%rGZn_?wOL92@ z8rIBH(hs%rnoAy){<=7wUp6Y8Hg z*wz7bdy&sdXtQ`1uE6ju!sf@sdP7(?tQ2XQCrf{~t|6ZAS;mY;O&5RqQeK4hWtvk7 zbl=P6T^@b^4~!iplOYK?qAaNP9Bg5j&#UpK0Gds=?)=TkvBCO~&Mhr4nWlIz!S32Q ztd>}FuG;{_E#u@=Wj)~`zYW1Moh-*+cz}ZtJ5a*S8f^W#{KZ)!fzAM1XuI4*<5guq z?WhcogN${=hXOn{>Uye#eMlR^>KlPF?C?CmuK%hi*KNiSPG$pf&&K{AXI0J=$9x6K zyq#ShC8WSmeEzFIZYAE4a9d8{9fqA#gQnnctfqvn@5@iT(`y9A#%+#K31;b}DlpyZ%>tk7)GV0|Hk1B3JPTL97}C0?RnV67pN z@(2(i=PHo%g}6nCS2*C{GruK}E2<~6(EAL;*22t91W#uAcLlgKMk97l^}|6$Dy-gU z)=w4S+_#+KqkHXEK<1Ym{8;!{ILc8KOg}8g0f!hk@h|n zd})>=KFX-;>(x7;xg4uaGSWLx!64H`E1wbK?4+*b zz?UpO?+a$=C2h7mcu$`UV8+p*cwZ%5*qS?m!lQYtDSZ*5D9?hbWar~{263x-TpDed zd;k;Zs-lU)vkc8e!mPK!`jDKDRFNzP_dh`-xI#}?6q(uu7|l?eB+%(_1ncE8T%0V3 zHt4~2HoSqjb{7B_jB>!^=D8`K!FZJM29C`e?Fz=YOktgn8K#T&cdb_U<7ro5>IM9f zFhUEXKq5-(Qtwclk!94R2d}8WlT)35Wh4G@Xk)l%TVSYXlCylgG@W$rUK|lP_jKV*)e(E`222a(-zvYE zdqk6+u9o4;g%s8ihkwgysBF!G*7-4gcWAFh^L2ec&K1BMAP{_4JS@UvZ4M6+vvB-P zi2J@)!<3b=6ps|iP#SO~B^Eo$yyq~Ay1ID}72h3RSrP6QvT(A0vh%(v%vRkL(#E4x zirTO2!9Q-NbiCM{&~3EGE`k{qv`PWWwDx{WkohU=G$NIb^xw}CqV3QvK&f#q6GY!= z$HCUMaId@sl-s2t1RcqJU4ls^hZ&asGio5+=uUaql##IGEP>1tydPOf4x9m%HH>AL zaAw2tA7SPm8mji*q4-pY$FuY{924Sy#fK6ad6lOcYt@p>^G4GbYe**Pm-UAkG8dH# zyQ=Q2#U(|0yV~Y&+18Psb|^3zBX#Ob$TXBux@wW1C4kN*a!zk_i!cva@L4TWEZUQW zlEpB7_{{x1XXS~A_mJ$744UocfRZuSK5S*2n*kU+nUnCMU4Z$cfbYoY<=DF(jPXkJ z06qK@^D-KpVp_mQMVsyYyl~*d&Q^N^x|VQ2ac*Vs=@m)^^m4JrWSl2}-dV;HR*Fp; zpeTt^-#|W55$%4U8&I_m$M%g*!bu6?AB&M_Cz*rSByYMHa?vPMlfeY7O0yEWWiTK4 zgiW_y7^S0pLeRRQNP(gfez1%^Ak1SUMJOAHRp;eMi)IFakAyg9sU^`m_QVsyTrKWb zvg!B5JM;4o2P-4UkMVp{fiM`!>dbvW=tUL=3}hn&##3PPML8;SO>8xBxsDO`=^vR-pB?!2IF?Br_+tr=1>XeDuH^8Z7y!_naW>RO*QRr{PU# zhqNF+ufJSQ0|!zz+*b=^rh2I@jyZ@lZi3E%3iKUowt;z!WJUt*plb(^2XR5KO zFb}we*d2+AQGD9SRTgeJDzr)w?}U z!|S@qSv)#`*!$S%DWAIyRvkukf};$3{>TfWq~FXe1?%Bhd$`DnlDI6vYFyU^l*vx% z#76V*GOqfMEGDZftE`}@r%VE zu97b4VqcC6d-DP^w++?(exb~y={roH677BYSMxi@;OJP4334B&sDwH$CCpVtZc9(! zAccGe%?`8DStr!9N$!9=J;YFCfA01_8P^8UC*$^WqzbbjBh!-{6S%w35&QRp(}A6i zGxRT!jImap=HOy!H9ivH_H*n)R}uXKAn*-;t)vP&G@}WXdpox9xIUwq%AMu5q4-I~ zS>g$aL`9Kj3UKW@G>OnEe=rC{f7PeUG2cHJ%CmX~bE_)tS^TO1Ekm(z#Ek;UN=|Kv zqms355#(9IqpAmw#NPza!X~2ODggeF|L2I;mEyoZBEZ#|h0>YbIc~1{6P)(tk@&le ztf>+b7U4&vS<)RI+8=|RjjON5|-r947u^biq{Kh5f}ayc(Qpk z&z)nhM1bR#f_PIS)br4UsBp^|Fg!!jVl=lO;?EXghGaQrR$L>*!wV%34`N`k-15*I zt3e`UCRAf9VIFem>v?NK>!?xp!GQWTjjL^oq{Ao-yW1Eg-SI7kG>p0sJ#+q}c0Jnr;kI*7X_!EI?5F`2{vb7%p?KW^} z4iy|hkd6W}Upf^N0UI@|)=5H1$m-LE8Bsgm3xZ&@0%RhgC%eVy_rN>~n~=mY)fWJR zh{(I!97782vwCH&T?B3GmYYlTiE@owNQ(0zqvt$<*^WSD=;XV6zA%r(MI*#1>WjOD zxE)m{pgQK z#RO(N%D%$ z<))|bkNcU>Tg0zr&av4Fb=CTD$dK)IGeORx0f~LIXJ{gV;@0}dTv2GGLGA0?2+Kx* zUUQ5C1-P8FTce;taJx|`_2hB3CF-repX#hoOc7ZejS|>zPRqX}0s)3mEip#nF*JoGv43`)dsB4ro zn>sqxm${8{S5azZ5REqGhE0D)GT2wk)`!u4Ymu&3EAFn`Ac|>%=qyHJ^Q`*1IN;}b zk{0>y_Eh3NL9QqU1LDWWljr5g+zJs#D)H7YKwL?220s_1)^`f9O1Rhb?m(<~J}Etj z>IfP$7V8UerRo^_Z9GAIkRYwKDfr_V*%vHz=}JtTenI{PMk(CT%5lbpKs1za5|YZd zy~r8hg~|{x$g9FARSO>dCh6(5i-Bl^W;3g*v<+zc1-iPk>YH0y zW7;nP%neit%joUBgt#xH%R#0biq-;4uRY4Z@B?A6n*K^p32tyno)gLp={zqLGO;Z( zG7(^;E4favIaJXdJ5I4RmjY2O9`CTq;|2jvlRW|y{}BWid*mM*U6=U_I=V)$^2PlH zfTJ^a#O}YG6wIbMH4P2%vIuwRgq6A!-@gJdtH>||3X|+-pc00GKAu|pd6p{0pzRs# zp+7DbMAq}>FRY0#T$yK$+{ObWIsvY8bq==mES7UN4c}Iv`N4SV>Xa&i8Ju7kxg@iZ zExLweo;PjSka;0hQ6?U~7S!E>7>S(#S@bDkZdEH9UeEP^6bZ4?b8`!c_;q2l6vKp;9Xbu8(K|NaV+YVqt_(q|ugJ+up($qvu1%ni6N-vsEo`j%SS z&^8h3K~?K#XSNh#R?`?nvW`fQ8$|Tuv@b`yl6iQdH$r3ABfu=HK^T%-I=*RfzhJ7` z$o9aV+Hy0btA_ZZj$)xXLZI`~!vK|dSOB%s#kH;!&*EP~XktfZ!aMN)pfhb4@f5HZ zNS81Drv9M%^|&P^pZwvtH-i{}OdNz3{3UL20i)fVX$X>rs#%Dey@)ZXV_e^DK(4D+ zzu_Ki?FO9=;RffG?&w!Kx4e_P856fhDuSI-&qwJLb8-RA$=nA|6XXu4Lw5{|-YF0c zv67Qkxt%mLo^g&~R*HQ@XI<&5Gs^rSp>Ud;A6jxRx+722hUZiYpM$sIouF=0dV{SK zw_k&<0}W!=j$tKpkF@fz=U8lZ7XXb>giZVoqz}D3gJ5fG1;-cSvfS}&rvk#ehzPpv z@kc=(J8ZQQrm@Tym}5TCZ%+wx9TuT7L&}(ZcaF@^v)a77C&V0`!Qj%3v(p5*qQvds z__)S6K-R!^-DpCM5nTdVkeD5%?%44jD9ax3fcgIaF?R0ZRTbCYpOXL)^yCp$t6) zSkulzD4fB1U*kZ4EfX533V{}(@_UMFrOSy9CS9k&;ZT85*RQgZ%8fK{rhf{Up!iH-Z7ucf|3 z9j)``nDPjVYA>@)ojveb0;A0sWarX3VX2B=Y2j92>9f%V`}^9`EK6XeFIo0?!mQt6 zH%bluBEWiz6bOhS;qZLP>%dfu5rKnw{!Rsr=2yxjIO!h|a!RM3M2OenybHfWb9O#a z%4G}?e^Xm@G;*bE(7hBP_-lkEG)!E&&yrO4LL0|=bcYaY8+PTXF<_~$c+*fFk;ctQ z39IxsITpL$S-5T9zu;@iqVnI@K*^iN)|(Mt~$P6 z2xZWAjTA@I-z5y|a4JftbF%(U&jEooP42*m%|dAE|g;Q}Mz|?x{Em z%CD!G@KJ!G{tIKF3Dg906v{?Dd4#jy0j2D)f<&4M|9xcXu3acl4QV6+~$LBKmnB$pGfHZy2u0$zyi$X+Tc|&Ui1;gc^8U(x2VP@-a z(?~&H1!u1JI)z9f zpa1fqQD$yrWv?i94yH47v>Hh;`Tr3G12PLTSLy^DYMMT@w zK?n*Na4RG$Vvb7J`o|R2DuX(U@sUvjTVg;+t~=LRC;RORLx5b)9dSzi4+>f`VS|&d z`l6uLVFqU{j0s3n1HT59%zS~fSBDcY%>fZK4-SReLw zwRQ`zanrn#O1!`sVyOnHO*He1m8#XNfL6d;q5xW){*tKR*59ubjz$YD@QeJ442zMi z{S|DEURT(f2RV;BzBuq*<}dAN-yw8rADww{f)IEhKeM2~Z#_2k(L{z}*`|>B$3l&_ z!x4dMwr4*PW+j|mSv)IlfR(Ubn{S^+5$J(-4agw@t;}s6mD_brK?U+B4YQmg$nqM4 z#dj@QG~ZvCB|QdS@YM=iP*1pHr?fNA^bIHQ zuN7CD$E+m6F7xw(jZsIUspmWL2_PlPmKhP8%WbTRW@Vh3tSM$`y<3P;8Ry@@m+=Qt zB&Gu<3GFd_D#XYvIUKriF7iY$tDKHA&Csr>%06GXl|+MT7~*}4Kucf}#gng3|E@4A zO9997QG=az5{Q+hkP~Q+P6<57EH>x43i}MTc=yN7`=bW& z5F-c@DtySne?c^gW@F7rVI{q`Enb#d0HNiK?-7q2OLsmVt~&62&IYgw(VDYZ>d8(g zpyWU^u_GJS^bO|_5hlQyInGYrR*0EkoczRG+@AoYj@OoBx`p8y6!@z!;}QwLsCn7W zd0im6qcMvIhtuB(u+bVhXr9kK*TS-P)-*aT3SSwo2)m2pzR&xplgbPw?kj~D#gJ6y zvEHfiKRYjl&Eupu2O^S&k+p=#T4rv4w<1MdiB&ZRJ0VX1O@9?U$%gfqL>vEGMYo(s zbDta1^ix7e5fcogYsk=hKq^WmyRKZO^q3HudjvxQMEkRX)^5_ytNVQ<2s&ov1E&3d1s`rw`t&Fa;_O8wz510aj)d7E+Kb z6JqQ>g@cO83e5hs_&)YRa3e-Gs_8<`B^LY60*$on@3N5y83JZin8@l(CImedjY^_C zD@_#n(-ovfxeS?pkOD?^77Fz4!2;}i?3aQ+u`EHx1JiOcC;FVWD0B+uvT8r%B1=Y# z{%~%nI09%*?82GCe=Wc&z_~yMMs3bKG}toZs)69lN+h|&QsOk1&N!l!5n!XY769fP zThiBpOH(Owc8_JCZk`RG)n`_+vSyY6Rv$!IP5^O!TpvP@EcSfa@3>t-i&0WpjQKrh zmjux+oQg=!2$T0SLLx3sN_1cdx_yYnqK-PEi8b%rLZZy7oAxw z@T0atZt>>{Z>4-NzuUFO|5%`LUS6VvNzN}4Vm&LH!Bm!OnmlNdt(Wu0YK2l&OHaQ; zkdZlo7mTFuHzF0Ml2Jz2_T3=J@(t6A4N`3&YiSMw$yoJ1ftFfsIE?z30Q#BkC7I_t zO z^ww&lgTi1v*4x^&|1b)&>A3TWa)0TSyp1TPbBnv~Lxz1ypp~fvy+2b4(iE!$@Hgav8MowcxL0bWU?Xs&aAQPn zb0IFKJT*R`5z9R>E7-^&YZ`2vQGg-^;mXg~_g36Eo?*3}>K6;6);Lh*h=+f;EelC} z!Br_{IYKS1N%#oR0jX2Q*?019i&~?%g4SGR&)is|xcD_Y-nArXXu1^d^k7zGM;zC8$B1VIrM1*}mc0Dg@qH zVzfOG1f$6;Qc8&bO(B+%U|bLeZ@mhDo-(2y@xlLo8li}PWj`jJL$4o!8IeZdsYpu% zuH>02G-@W@56?j7-w;OP64@mX`Rr>9PytHo@gqJq0)jhu**4uGfNI4Hbs$@X{&YHs z)qS>9PcVJQ9|KX-++^*jxnx^^|=e%Q9)RuMuj+$wcyziPqlhEIl2mV?Ln(cNV2SWyd-w zL?C8B?NhXvFmA2?xgt^&*p}p_bh%$hD;;Se5#=>u*0q8Lz16b$^XO})7-^1RyYymW zS{Av{)N`#Ms8dxr&opqq7C@dTN!gatA~re0zG~Ip04)$?G%BD+ao@>=5)g=9SvV63 z(SNF-MbKb`*W{N=EfpGfRMbgMPXa}i!Le^xo{bP@MI6gMP))qvXSGG6ziEdi;eR2_ z(ywLmL6y6@?ZfDzIRB;~8%3}Rsi@WcK6Kc3X8~Jt%KIfe1Ka*XAPGi}&R8gJr~t8& z8M0g|h%P{>Dl1q|50AnlcWw(8M&u}LrHs$P&0x^|jEd4B`{2tS zd*5&BhzETzv1Le=`C5-TmNHGreOjDqoJ*L}a70SZ6XO1T0mcI(Ie8hJ5j|gT$&m7u z_{8G;NI*~WO^9g8f-T5f zRW|~_zSul)$EklnK`O$oI%g~7m%02V5FjVC z1Y$JEk{7ygqAV_u1}LtspTqd(s}yF2n8MIe=hGJxG76|87|$1f0Ob!{a|!Y>qP4U3?{DzDZcDzy&WPJD+{=<1OKpKhN6xe`pxZFBPcY4n zi3aZbAwb450IRaj*p#Ax^A`&v{oo<&Nq@#|1g&Oh)Un$@&iCU5k`MRUlHo+fTa5>8tEG6m$;m1DH<`&9tO2D1|!PGH?3#F|M`dNR27_!StW zWQIj{^Oq^eM_4mesk_wp+WSC^$MaB+%?f+m3IDJAfvqjCqL_kSm7HeEuzUnu`FmmE3y{rb$sR=*m;S{I?%;-?aHGmWt9dZT3 z2e&3E7J&Isrq^5)0f`EfF(pj<69gDpaU{wtpv>ocMGmrscSRe+&Q)szIWu@zn}h zN=TN{5*7Jpg;{xn>jrwrzXe&JnMBXWBI>t~gOIK*2^iNl3$Q4K$;v9Ms2|<}Vrk2? zrjE;gaJ~y>-2?SJSB0VN)h0l|Ne=h^;iSHAR79%|<3P%r`=^4fZ26cvOLjQ!Ng%Zb z$`R=iFHqPD!Z3wvZvXeKAVvdZ6h`=Se{aA@oV#O`R^bZ-L`}rxp+mrTwn1Yt0@DJg z@~KohS=d1*{i)jktcD}Y>PoZ^>o41GkyN>0GBiQ}X|tKhW*3$M#eU7xK=hAE+Jsft zLlmj^cRvFfO?B!_Y`ijw3AB=7>xD3fl=>`?8AAITNS&13N5?F9Gyn<=*d1y=Uujwr~6+VRB|x)?SOseMAsB7s()4ely^w zcDul=p~h%uyml5F)(L;CU}z5eLMnFMx(Tpm$xd)U6iYcFaAHBN^h7fivRX|<2xV~L zq~TEVnA$QIB_Km@{820L1{eL8~PYp(#pNUVOy zam%Zg5MSlei`cdHErP7cBDZ@w!*KatP;0Uh`52htb0Q=v16=#aYs8^8d6J_xtn zD%5(@3|8ntsDN<-Y$-|sP|WJ<4#hM|WN9fHCV~3}MpeRTFyqHrZ-Cg?SIDuqq@Q;b zpnFaIMQ^6QDL{xt=6Fho6+(9vhpi-dG5(fC2JVPMJk2hB2V-=`)xq0N`7gb{JLEx#O4l}7lo7P z45x-$6Cl6qJan-irnr`MFwyka9!Mp}p!ont1Md++6iKo{%)j(q0HbI^*GvZ!uX_X< z<;xNn?69$P=sh57IkOTE8w>$Gxzq8`0xwfY!XikC9rz0GtS(boKGM!D?_07nWVvQn zg6{Z-LDJ7*{-mCH_CJBFW@y|sesR!An5Cut^U6AZSO^u#EaNDvB=zGy0H8mN#xkJX zUmk@xfk2o2gF-NLH7d)}a)0%Q1g%9fv4v%wC-t@ewP34t5&m#c78=ArQd6~ao2Ja) zD2fT{nXO%xnsAAuUKkk}VOZyB)$^4=bDdP0_{)r(>{^jO=Xq6DF0SlZ$vI`yQ5y4gi=zM({+{Y1Ay7oH?l3dE}T1<6VAceqQMd!e4{P551qcRjPj2P<`H1=-w zMajr_`hP&J^GTQ!jf2`C;1DOA00^44A&O~@8?Ts)n%zGrgdDSUT24oqz7{|+QPb$C zMvpHEGH7IvpE}N$D8T1I^yE^u3ZHQp1lsC^j+_uuu@eN50Oy+U?B_31kPKwG$*wohM%c-d4ppy=&`(FvQ zX&7raoh7u(aO&-iRg>%aqU(D?^k_(}*S0c3M!<|{S-8MZUiT?g=ngldR!5z-Fb#k> zY@%VR$NX6A*QXnl7koklp6T7rKGRT%C2z4mPk@>eX90ZDsXaj}5_ah98)#c&x%&Z3|DgIpxXaR0V9Yt7&Ga%j$lR*sa8Ame~p zC(fx38^z*+eo;r{;HW4berV_%;WU1}AS;X< z5C*sWL4jYQ7giW7%GrNn*!x--l+>wjzqcy^E16|7S9Qqm4ho?PFw(}d6jA?mA+$Hm zF)3JSkL>{fnWV&F)Edx}ppDX~r?~o6VNzhV$mA{V+#4YhqT&p|3BTK09Ma_EoAh|E z2r&{cJF+aP^&hoCx{gU6`4*m#`ffxbUJmp8q77l&fzv7ek`q%YjjLoyfo}BilTr{? zFySJr02yne4m3kTi)UQRT~b);QXht-E&AL@iJnUKgY1d7C0T>IN3NlJ^dcIBn4_a8 z@bRm?Eu2FeGg|uJ_i0NeILIN+Pfi81;eh6=w7XJ*DR6jW1i`2++}$!?ks*Ax?KV6Y z1%tPux&DSTd255#q&dN9#MrY^NI_z*6p`bEkb?H;Al@Vejl-iG)|k5})nwsOV+6N7 zHU1hw#%HpN#7_vGRn%v>N3!N@1VXe9xXMM5`wIr?x$wJgMH{D^ai1cPF!Zf=Xl^#`E%+Cr2K z6!&r8D+tb~LAg*t0b8X}6)uccMJ(-;&*hbM3dDN`XNJ#e-&ZI(h|*!-Sq2DWvrqbF1dB(qVk%fzNp$6m9fr|+n)pU8fj%kom zufd=cLI=hQWm7#%fJGaNZkKsgJ!!j8=H{v6CyvebPb*~A!>@EDK0qrBGqR1SW06oj z0|E4z=(9QM9C(Fn`%Eh(q-Q_lQcGCZHWc|2hFA>PO)gnz#<(9N3~JAoi$b(fL+yi$ z&%c1%4)0b-O#yRY9rB2Rv?huQ@i*R&7FxywYQgLaq?BI?;*P#j^kJ9?$YFQp_ zR@lD5#)ng3N&ja-#^t5SnL6#zc?1Xyjg@JIzghv3j0^?heqUB9{?%+AXc3YPVstEF z+Z2{VGHez|&N6?fL5G=@OW10M@|bSR1_8ATFpV@3LsXK=R;<>}fkKI0RKu*y1$I}E5>R+=t>Zyg@y;aA9=7qlA6RRI2~giqOj2- zhaC`;v`F$Rr@Jcv`9HG+(XSt&pr z{1nCgX?dW=NbEz%Ukf&e3>o&qii0~-j&RsAI>fiCAPO*#%B}H8C1MREZzw@C{-iJ~ zWle=-`!@y9sJL!r-uHh~&j()@JaRPq1W0R-;UoD09k0`ws6xjBUt2 z+&(49%2dN8AULJNoHl?~{_%wfAK^~vdY@iET%!Wk8BE|I(GEf>4Eq5XBFs~w#|Wah z9EwKG;73m;NNa^59A6WA6gDOz4eS;Dn?fu@?Y5c6C$uk9>_cW!S)BOTRKi9pqRG8e zjnBLa%qoEYNrpl#aQ6$efrQOlM%D6K4N-A6X`R>g3Qb%Y(^o3eIdZokL&JC zls^6|VOBIG=XqH2any{<$gj3_Y1NH zW@KSS*Y_F&a1BdMrB0p>014SVmG`#JKPCtXIo%ZQ=4+0^jD_0#^8ZxW^1vRR$+gxW zjG`c8h8^YqwngC{$|BA<_+zi-D{BZwLQE5Uxe%kksMh;2ZWU&QWgbCM`|Nc9#s{o+ zu$T!V(x!MSu>vL&I?|>>5DLxq2dm!_A1fkEotO*J0{9^c@*1Plup9Ay6gE%j6@E=I z@2zF=_`;kcf0zNZmX)gCIwLFCfE)Qu$EAyRgT!=HGCLpP!~z=uSXL@+t3m;a+Q|FS zLqaLwEXEJm?bixg(?_1X!+QGD1hC4jeRJl_uV)Z8+E-L!`pFdV^AaEE&QkJ7Y!|z^ki3E#HEQdf8V5#Nu(SyVy!=rGQ$kvf>p?b1W`u^sQwvDPCHMJ9z;X zDcq`{o6U}jR8XONU+1(%VuVxIDe^xNX7rXCy^;)nDJU8?H4Ftk>w|6af=4?fzQSta zS#h~<6?YsAM)Q10O)5bw7!Xfu{0)LE!KqMFdH`I6x5bOIX(8u}{}2QfOY7K#x~7f* zRV<2gJVTB;7DNDUev#|xW19q6ojC`N`2>UDx58)_JU}GzqxHkt1fen8>RJ;+lxu|0 z$AVT-Ynx~(5O6CaI>ADjqSabtBl8Td*sk|C>uaNyD2Nk&KVOiQf&l|tGC206xnT4n zcx+5^PSO`fVNNQA9sX@wt@IBHvdp=HqNL6by20Y$qd*#a zbSubX|py15V3>bMg0z(BYG9Xk%6vS!A9D;8sNba+>oS+xh_(KswdBZW45`X_q z#GwYLQ^JPW12+@4!XTru8ULjKvdEOk%3gefy=U$(WZhElyEj?_;Vv>NI5-MtGL7Lm z+JC07(L1YBcS}o~H*`LrRc9iu-2)6m2H4_KOC)5fX`s=)fF&KJbJPbu+?YHs1T z381RMJ8@XR9*m;Ypv}^DhQDUTn{q+}>fHg0+sgHh6v7(s?JO0O;fpcp9P656+oEhTKl zprJ9B(MtVXftC`c&|z8Q&ujuy6_e&dw%CslVxO{6LPwyd`!jE~Xf$ylE(cgfo+ZSh zg-hwfsCu42?i%@-h1d*&0236l*!i+w!yYR_xN9mJWL_0Qt~3G@H6!x(?*y=N zj^en;jQCuC{9UPZ&;(U*Br+gTCr3a^Mh>t89Cd&?)OWhs`b2$fSe8y1Xyg2nqY_M% zW31~J3!$y5*m&|EDnLgbS&asa%O+M3q)oJu&E6}fIdtYV0x35blLiaa{JR0**0DRE zz#}&sSN9OM$`_%oq$F<$v9@I!zE17{lK$XI`%I<>uz)}I7xoz=ci5lCenE)Ubq4EK zZkR*ViC{~O)1>UZYm^OfOJj{6^&8*#`j6`^I>UEkBBC(uCvI}kr z=PC~iwyKTC226f%{GN!C_ln|JK>F$d>$Twa1$61 zaxZ9=txnL#0I1DOKhh{5;zvd>R`IN~9s;xGDZrgw6;@5h3W5i5(aZ4%|F^anupw$M zPO`5f2AO9w_0gLdJPr!6q{D?#;)pZXTRJs$+yukDYzi86#>ElSeU}Xe=u~46(+gSw zwl)}=>;3HlEY(a5rNZf>w1>frP{Fw?t3Vc|xf?-STNfwD3{&`=5NMy{Js5o=biQ90 zrQkTL7R(j?m`4Dp3omi-z`){TzXgf3HXOwc17VQw!waK%Hh1inCVgB{>3N(0Lj9TW z+c#NaaCW)u`;$KH(G-$XjP|S8rwgG}q1voyWbZg$7$n!N@E7&>H$Fy?M$wjKg}j&H z7)=mg>CVeC22HJU#yt*Z6vb|a!#BT*0M<^(&QRcg0*qLb*kt0kAuZ_Bx1_$~<|3?x zvISVhBu>)8zY$^|G60J-%*Vd{lc2^dI7FlIaf>g=SbUO{wixSag6kwW8VxkJqxqx6 z=;?)9lVCxPu7;h@Uc#sb6C9c_yx$ILi^M$6xF?xDid0@7nc@%I%2!qv4t$0IY?B}> z^wrQ+hS7LOd7}-jh_jC-4L{C3o+7R>Os!ONoUN@s8pJG!%T4HnzQ1A`aYo@~6&@-R z!iYUm?rksMMt~Y28A3_=j$+mSA8NX8BJ-#ehz>ix6w~5o0s6#`tamtTMU?J{ZjS;{{R(@#4;3m-(xs zu-s4L1kwLhVR$Y+lQ|>l-&4?9W-9ibD9(!gxM#qug!BRgt)%~58^m2b$+G$i-+KqK zDBvu%VzAev^^f}l2n969R}i3aXDV^+A+t|j@BJUaj79LfL|^1w2h8J76mGSfT#Fk3 zpDDmffeQpp1%8Jh>V_?f7}4MTTq;Vql9{i1KMxd*y}=s7Y%EU~Of?z9!|8$q6Y8&D z0D}@xsst}=Zz^c{aZEm3W77LiDTsEK<7GcdkkKoP;YuCVPiu>YnHj6EAd3D>DGpge|A7O#+&wh#2A`S_PygBfQKcmI+sHj2fr?X0Y-Sg#CzL1js%rtSO zF#EbdSD%^7?sIm7MqE*>BVH2fj{+qSm(7zlDu6cP$wpqD#ZBYZUr87#M$pG*v$hz} z1{L9sOX$4tm;Kx{BQw@fmI&M9v4%Xk2 z^Ip*lh#F2Aam8%HKl%oUrK+_Vn{*Iz)LVeop)h#DnQH(nn=GtR&``>La*U$U1{5dd zv2ms_BWj*>>2$SeZ(B4fhy6n$xX~{YMt8(;nn3^=`5nU6_t*rK(q@L(`A6>pTfSwL z%m^HNg(Ke)YI#T$LW1+33nIO+8fW;p`tO9P0b_D7X!q9%AOa`LwBYe~M_~-c=r}e0 zviB`bO_tH6F$qD&V&$lxgY{d$ENjLCPLme-(&$U72PY=yu>)Bq2=+$tB;SrVDM;Sh z9pUmTzg1ymz=;fymHwQ65TZ@;w5($9s?588f>8K52_3_uHhCWa729#w+L zCKn@8Snq2Et-P!cCFv*q@dv>uZ%~)W{RXD(AEu(QQLoNB&+ipx>^(A}i!&mT`o@pI zjsA=hXoHxo=J|jc&#=*pFfUW|07jINNdUrZhmZN%x-g1A#s@z^h!rt6$uSzPGWAP8 z1+&x&aSsyiZtzV4kFGIa!@su*HpU!Nj{_Kgn*b{y_G2_`*f_saCSS9$y%MZ3$P=vlQR_U;BpPie;EQtN!3i5b8;@n(>o_y9&QUaMUYX zjmCZF{{dR5vy-?<5*t4w%<5l28gryxWVcl)ZBY;|MWqkK{j-87Zt!cvT8k;*D~kl* zVg`hcj@`i{K+pi_VlG5&_l?CsRt*-;^wa(BsxY!3)5HJT50G@kkP%*3^N$^!H+oV| zwn0`2N$c=7YlD9&0jtDVc2E(`X8FUyjFhZ3D;cn#je*#wj2MO3tYanhCxJwh?HT#P z4sQs7;&`rQvj*Ni5CPi1L~x7y{51Q9W~M)2_~9RmkZ=7Rl{1_$Jn# zFqw0cEz(6~IPqQwK${t;J3c~Lf`l!mZ}oz4JU)X3^1=)P6o@;axWt(u@|9x%th&W0 z9k6(*@ZCFsS#__D*U@6WNQe<{bRoV$ljZ)*E~yXkIAm9~4G=~_F_e%7)vxPHm~kYI zfvkBKk?B0#Yp9eoYAU^Jtxt;s^H2XId@2XzC$XB=EgImCVCBqCYY4x3RN zbk?e>suyr8x0GObMEeMNRY;~U%W~H z^@3?RSQ*ZfgpRiIWN>SXNx>DY-+GFDO08HRRVDoq0Z_T5z6zShdlRw_htk~qe$DfX zgj(U{MA@z*381wq=5+IzH~syJM_Qa7<2Ie&C}a^#puoR~AgdhCf~uMHX8GQIz^sh& zjz(*&6=HP0vaW_1CH+(YqiGfj-C3+u+I}WHeWis6WfxCcxG+rZDi+4I-13RfyT|xLNp2PODM^60^sW^bQ_qCyx2FDh%Bek zXX66@3@{p82N78iYgi5X zhJ&vKQlK2py971I@UsDo12T2@Aa~E60}@Tluwk+|sEIxh3VY%9mUcb%`<6_c<44#L8IBSd1c&uHH|ek%Gw&nM5jA zJdPZdAm8H4)!!fpQqTuv)gA`!TZGYHC)Kl9qI*88uw}s!0Hy^rP=4w8fL6FMs26ns zXt#ktmTQiz8MxI;x`Xb5jbk`cJfjScR{-RxX{y!dMb03@;G&UO=A!k*Z{&qwmTzvI zwo|wv#z4zA7ylS)x^oo|!g2(KOPjt?2=UWz0}J(XL#;t3!uxRcB*ins#lh=M6LBR6tt>ncYt#_*fFZLl2JU&9%b4) zPeF=}b#+*-UOqIHIKx0f+8sI@%SVkql0$|ilE9{^KFL@Y+iNeUSsjKdRKr7ssi zQ_f+17+h);`ni_@S(7s>Rwrj_IU7K0h};CeY%2T>ibkozz1fJ6orhca;tT9r@R>g( zY-B;QMf|`v@GfCw&umvS3w5slxd2nfj%JR-6uxzt{42dN;vmjwwG++~*Z8N~USwC! zf*o6J9Sbk#3#gkT9{>se=@kSiG3TAN*GV0IpJ@@ebfppll9p_X0J|5XN8mv7D?+U1 zd2)mnM(^C5)~_&>tMpGrK`v$5zI(d`tt(8V4H4M=%Om*!h%T=K_fkcCm}?MwNkN~5 zb~SAjkP#HEv8M7;A$-RzN{C{d+r|`U43HIQYLc5q6WktWpk? zbwKK|u@;sAwR|PMp)j8y0oTn!l7(v<@_MP;YyJ1efzw1%Q0{yXi|?}q zSQ#w2B=hSyfyi_NFBm$|Z*Gghb{rxil8JwPynV*lCyTy{V%BaxXM%zBFHHC~$FVLN zDbV@~(@m*#->3+WLQ{vK;yzb#DV=V|k?z7T7GPY@>Kp@Y*+AjhWTK_1?ZL<%7ODRu zp?nRqOBYw;e^&rtb+eMoaCgQeKx15q&CCK77(LmS*uAnEgximw0sym3u3%}I^y?HP zN*rUBRB#m*Yi0u8oQ zDXL-6N`ylaPhJIJbc2kuSTISE`X8aNLZMs`kkHht30tm=pm@E~QuM{?fL1yQbt%YF z;{+`3NZ`L>i)df7xxu)Fv7&)SAaX1ZySX7ni-?CCHVZU)UiRZuHn0K5v^mal9gt;% zo`hUbKE(Y;La7^TFFeI)B95n`jl|4>IX{k7Y}aB?%OsBh8r=y~M0bIfqG>#t%vrk$ zCB z8A`F}Y?Jonw?-&5WYE<Vsg4<9*R#pZm?rD`f5k~$evwmWUu`5=ohA;M?3ZdB$uY)$L>ShZw{WClj z`x_exTRnB33YQIPcdKAQ3e}k-)8P1JLaj=OsE9CZ`vuIJIe+5#DWkJ<{7>7W>1uBd zT(f;CEE?~1O$Q?(t5tumU@EV*2qQb?>ADbv@`jm$ZhiS8!f=QBz@ElE^Zf(?MBx4( zw&E)M-o=E)rqYzp^D~Snz`OZ(S}vh~1g!}Qpbi;!KZp?*l?+QlE_u6L7!{HQ zl}^W&D@<;z+gg_u_X{Go@rA+V-JU3zjU2{u;oJx80~jP*C%ibea^)HzTNW^0={gpq zbPbc?g%W?TdR{OW=i+e11>*cpE3FO-f&!#iL$@yT7pygaV-0q6`}PQs8aSMYE_{d> z)RjK1b6psK&WjKwGJ{PIrb!`GLw>|q0X)Mp1 z5j9K8lfk;A>n339qf^=4XI{V*KN$3fH`bW!DL`N5%%Fn`FO0v&B}&-S@;#IY)PWRJ zh=n4;_Z4K3CUUns6Z!>0tf*}62G>&b`>TXfK?=$8sm9M#n8unYlPwkn3R+FXQQXI! z^rt@tW;F?pT!O2ITAwMHlyIr4tK!4Mxf06tR>8a=L0D|}|8(0$2=b|v_lxhH2sIXPgmf8N`9}HpHQd_H?3O{Nm z2ql!pGYD;P?Py`vMENvP4aRqzsk~CCQ9`Q#^aJSI#t5`h$)L1Y$Bm*CAvL$3pu%9> zn;AvRmm_f7XE^1L_9f~z7SX|-!OMc6OeIVSgMX$V#U}>W%4sJ4V3$RxVe<@y!S&A( zG}2+SCdwoFCj3o;`A)(m#YbR%J3?BGViJDu^TeUt8o=q8>h)i~00hPHZHqS}a_jmc z0Ax7I3prf#xKOL{m2o*>iXRP4%^6X^MwUTVv`mhEu|^6*^7CyU=Otk!Nlk*NbU{6n zXpuknPXw*aSPDwDeVY)=Fkjwsu<3awf{_00P$F)_1d$9>pUJtGo-f%)G&6eS&`s`X zwir8`*e=7=D+(5%5T~of{%jv`|7Gh3KM-WYKmnovDz1b-Yd4rNL0sqAbe!jCkeyTX zG+OC~n8rj9RCdcjiq`i=7kt<2m?UMX_5B1>YnB31s6+hkMF{no7%#7(C!QmO8e-3d zRO2)Eq)L)iX?H!=`acPTR>6mpT_-pFFF-cd=Caf%=AQK0d10)b{t9obgq)3zu4C~5 zHF9UMYS*cj`(7m?{VZH|#%h1=Udx5Di8r~|#IF(rUEn!RFhzb8-;6-wX)=8x$p1fo zEtP8xp0C3F#~h{+YP2qbDB*nny~3!lL>N{xk1A-SXDC7X=McKzA<#;TF*srk_gB1b zpJkQeG#x|weHH^>FRWq1u=GIe;M#Xkk9iW(g0LyO)dOlP1zY!txuuM4mOPC^#N zZkTHy_JbJ#!^Td~JR2HyTe=@@*+ub8;47`K;+9kM|6;fm9`8|%tu?-@qFOdA@|lkD z!`WS!b%}6sXr&(^1TJFGmguRiRSBu1-m!?ZW>~k)GsM_!1lk5!DM$hh5viS&3zGW% zy68(Ci(t~ZCLSV-d@c-vXno0|0)3u;^gSS$ATbBqQjU@%-L5r^u@r+jGh7d}oEAw@Ad`tVOI9$ic0Ou{e!6o9NS z`DQSDP9dttRD^pHN?zih`jpNoU4#Tc{Ot1SU+x`-u=E z7JVgjqPyE5?$r)@#RmjI75V6AIAVq`^Vj|x%&5X3g}o_boj?0?OHTbr=iOKY4T&HM zH=E6S^bB-9qi{GG zijJiJm-Zgvgpp&OU!yS00T;(PLH(QPwbs>|E{{KK(K&OCZ|q`!x5CyB3lm73+`%DB zQa|i};3jm23o=rNo(uGX^tEYqL4SWlK`4xsyd1%hV$ZKYpwlR-1fwB9BR)QS1dt}J zLfk9%v2X07s^ITH5_jXbAjXCm5uiL_NPX5#YsDZhG)ItlPM9@9W`!I%tLy$*Ac^b3 z+qi#SA!}jj`Ve3J@i9v&x}GgeeJyEVG(ff}Us1wztSKhFoqe2xUkk)l_$>vgO>~)# znn|&YXgOS>S`;kNS#|{6N{2oH^|ch`0kn=6d~UWjwbC#K(Bi+u{5iE3YALXfU58=; zjYapisF;Z(rb*B1PZml&wK)p!$d>5&3?O(VIAX&15+_$D6<(I zb2?fC>UP#8L(liUIuW))Q2{NS*f;jK3Z+ot!T{PKtXFOmXf1Z(1%oagFlf*KA21_* zZc>K<>ilmMjVx(Z1YK6w&j_H5MG20>Acv(LOBh-fpi@Q6HG?r2aCetf`i#KjiZJ_z zFryAihp4~3Cy*SdLmg|qT7OJe04oJk00vZL{$fGInW6*S9OCgqx&c^4bxOR|Ft|tR zOEjk3XH(`+YJ=niQ3odO6lATBSsf(ebd7dwf7BD)8v5wFlCKdAcaATrjnDRjPOvNx zSArWTM&_X>8bk&10?(EC2Lu^$%ahm@Yv(p#R;@BRh%^;z6wQV=EG{&qyxEqPa?q*E zlTNWnOa!~`3+}q72r)8hWnR7OKB%^?Hy|V$F}Yy$xEwTN7oKXd5VOrjLnCi1NLg|I zMU!G}V4#(ZjesDwXX@jqKmQaJ0+W}Y6eY;F3R6B>=G4=O$hXYcMcO1#A3PnXQ>XNE z7+!kxO{FtJR~cv-cY`pib#_j{=-hFJSc{C8{E4VUIZFIe#e~<&l2}8rmizh)!c?4r zx~#SiLwzkio6Z6>R^VJMMyT&X0Dw_n)L04%7Gd?@SY@1@dUe_ZF?x1IxHLooi|(dlcs zAak67{dqy+$Q~jf56_U1~HfW-!eIL|{JvQ>B7$9TVBE~h2 zJO`D?Q83h?nV5MQZvrghMUqs|fM45|>J)4d5)4A6f~-FlU_my=|5gCh2|7mElluDx z7-EMnrHWfC#8^`5lWN(j`3~m;QZHOv=(NUa1uagi&Nb=R3WOHO#|iB?CI=CQ@wAZD znnNvohcK&WYqygb(ILT5BwN}PyUYFa=xh1+X7CRZG#E&`ww!9r7;1qxde;Ft}2m4q~Vb1nv@Y|J8vC)%FG74=|CiqyHk3RiiP1t=-# zE+1yq3R}B{DV~b~&bkyx1eHXSkp0QBAs|K?#tK@7i&_n|J~*>(ZY|2KC?csbb7~Hd zmC|kc)-uSGW|%9*17!SMP?o^J^k@W4CX%#2pwGhI_TYg&{RgT1(FmHQj2|G-5=JwC zS`xpT!+?x8#@1s3;?su%SQ$&uY9M?i{W+I|8S#PQ7k5g zu$Y!q{9;b(vmhp+&NO0>?Nga z6)w;P6|d`ays?LzooK72%&SL_%k^b~tTD7`t_xyG07<9_C$nnAW5=bw z!#P2vj{aOH2=-&Kk@Vfi6Ck5ve51-ejelAIZ^tAORsNWK0+z)jZaBpaFEu)@05BT+ z@b{s8jzT2G09#y~mvgs=OqT*^ZI%d!tC7i0)c1x}F>k%Fl41mmVsrSaUb?73ienU# z^)FYdJRIek+7737j6}yzA|fBM@T_AI=I1Fa86G7Ytbc=oRwz!LQ$NjZGo}Di0IkM# zoK1`B{ApmuNE763V?<@;tQ!B6Q2QKZBe&mGmel)gZBemSlkQW5eod&=3gsEZV#U<+ zYA^~X6Gn6~eu=`cRb{=@#N&TtA;e4Uq@GjAvXXs44R#8}zT0#_YxNqu&1%^+MiBYv zkR4ZuAknfC5k@}vJqX4k^RFcg^>n5l^LrZS9}$un$&zO+{!@(5{ z*17MY1kh+F6&U81K1Yz18clp12k2^qSd|%>f(j1qdp?R+Y||TKW9H0MVw~kcbY+|p zM9Ek&)N0MuX%=DY7VHz^qn6H2v|)m&QREz%Rj2?sG+41<*H}i-_y$sGF@-H$IhYk4 z>0Nu;{wYB=;*6%!CR8C>>=bUFC(Tb=wcpkDc~+h2uAdWR^~7`pQJjS;9&9Q=O;S^$ zjDAe}!I@&8>!S?9(C-v&(06MEmYCnAu?Bs&Rv7f% zT7ji)Yo^DI8z|eZMD;)x$HfED!05*3fgBi03}k_ktQb&PQa%71@A!Z@I#^LXu%@cK zmG3T+zGR>d>1s`li>9K)NwL!#{ujHz{U9A~oP62aH(!2L!yo=Sum;>;Q#mdf4Flu=>Lxmzg7PabFO8ZbN6<14Zq@9 z&9k5nANO*uk!N@x*D#nTk0*zxVGuaagT!m%clcH5ZrG@FcSR@H@Waw{7h6OTVlkI~ zn{#PR&b43RT)XAYWvp_p!%F8mZgB3H)y^He(Ya1*o$IpIxz5|2>&DZS|9k9muKNz> zj&E`9xEG!Cd!6gK$GH>tJ9ol9=X$;G+(`$VJLMDSPCn>dpDeaN}fjyQMfVdvsQ zI=TeqEV`nrEBx#jmpG@JD{koON?z~gW_;GoC968S(hEAfS)X-wGhgrQ%13o^Wxcz& z%I?Rxin1 zLo(czpJlkbsth+aJHw6tteqRz+|I3ouKDM+cLn!%aud6^cavQwHz~i9o4WfLH>LZr zZrZ40+*MB=;}-UC?&@L3xCZLqxXQWL$#VeZSddI}^K;T%nr%^#$@ zmg~~oMABMMyIsZak9mrD_A73Zblz&`=J$xX_X*Dg|AgNoJnc`Ry8J!{jhgRo=URx@ zd`!$eL3k}?T+BBI-1pr(H=gbOzI0UDyPb!mtpZ=NJm$9WyOF11Ukuui#~;(&huOtx zE5N#t?!QWl(;9nZxcP@-?z7#urZr85uJ6a3=iQP6F&de07wTN^+-%C!$Zx}nm}?#q zb1gk%ZrP5QyAgaFWm)56Zs%Pmx5`6Th zcTV8j6Wm~E{^xC{yO)32%k61C%k6%>pL^wlv)twhF?Sj7*6q)5+xhObZD+ZCFP!aO zfAtLa=9g!?0V>-kl$rFJzij9F5bj63B`wb3Y}Gb@aR8q_gy3hHGlc zaCd>-z9YjeBd=fZ|A+9>y6NN(Pr||t5Am$Y?BH_vW-`xB9( ztD&RDI_HiMr+IgVYi68n-y6PYBHq{zuB9FPUd3-MzgJ|qdk1uL&FeE<%flINF>SeI zayR(Bo9jp$&X09=3kWyRc8!c34^pn%Y2#&|IQKLD-%lNGrCdwPySXO5?Z!8c>$`r= z<*6N@R{_8NRK^*eems462Jp1!Y3NU#c)sSFE|hU$J#!wvjd}3S>XZAk0pAS=8^-VE7x|WSClk)$SwcHa=l6QbaS89&w5KeGX_NKoE&)CY zY}shq`4Ih!@Eqce023eFM7WNp`C<6%1?TotrzJ_}-r=`#JADwk{ax7Bm>b78%hwt| zt>T+WgqKo|wfukYa^_9K{~)gV{$Ke2Ii6>Ent2xFkQQk)@-(cCx#6_Y;_IEej&}#%NwhI{7iTCMVr39LgL{yiXMOE!u5V znk%RL2ci93(ptp-zlH{?;}^vF3D}xFv^97gPxI1tuCb1^c^dU@3+cOdu4Pi1J3{;= zU<>IdxAEH^e(u84yt}JwK9Bmi_O9ic_U>cy`WH|0JzbgS_`Vrl9!4DGhL&IQ`)W7W zGS{${A;fu~cIR2ZGoR;GzFo+#WPnW4Z(7S7vJd*bnBj&Jp1?C&`Cc4zUl6xpKV?1W zT$aYmBb2>`@L|y~!zGCq=V_vj&F92iJ^x?NbAa;e`#1S*m`>aTd=sM#asG$*8b&)e zhmuI+tQ@&f0 z@Y22xZYjS_Uu3w^d~-W<_p*J==cM-~ar%?qSNtwtO*)yR!=rxuB6-Xwojv^S<5{x; z`Cw6olT6Tj2krh&d$;)@Wrxm_N%u!Qah?*MZ%A)#BEucvyXGg;-TKKH?jZk{^X?G8 zF~;hSJcso^*qV*38~9z;uY=3bZ=Q1-DA)XQXo4ItKZbk)omPV_;diTkdmtZdWWM45 z=6uS7jMqrJHM|Hd`PKN}aD=wo8*?j(vyAzoX|QuOq#t+;+Akgey}%aU#9D;%%wNQ} z{NJFth4G>nSQ!5gI9JbiH}iDjS=t49($4GP`Q@a2E7%a|xu##t{gB^H(|8ZAezb$% zjXvgX2OmUwi@_EVzc2sy=UGSFG;|?9cwosO=sB7;>t%EEP~LatyXC~+I03r93Lk$N zbL%sm%j8{ae{TFF-Tj<6t9X`Cwol-jHT>VWi~dhv-@yO#`F}LuEPOFN@W^8FnnqYO zZXk~d{J$j5nu^~fkEh%kt6Ls6`9Sk@7t-m@b3D&7=(dI5r+7B+rJnrO^WBSlw^=-X zLCj^7?v*@ylvcZ#d!9Jg6KCC8=U(CeHMdcxo{UXfX=k2wi&!rbe|~~6@s!8^DXout zyOyt+|G^&F=-fWOUC#f_&oZy?M*iFmz3#|xZ-Y0|{>>-#aV@8(Ijx&M0&97ax&Pq~ zZZ+S$uQ>goG5>GiS@Q|;uOL3M&{o1bcp5vx|NI7gcbZ$z7|@e)DG!aO&DW7{aR+yZ zw0iUGA@1`2F?U31x18qo5&kd#ALRFd()lCtDQhD%Z`jBBm3L?GXzp2%1JClSIcV|p zm^%cm7V>+L-xk)C&7U#m#(KM^`k0F`PToG5IVK)+hk4()|5VrLPjf5DqjCCaZdv1L zZb{~8)Sq@MWp2nj%{8t)&GjR%1sixz++}-Cb>|RXvgI`9Cf={)fldpy6JEssO{cj- zq$fG+Uh3S$`@@7ar*@z%C`v;^7ux3t>wErRv+p#wrv6m7hPLa>`;CK7bL--#x^?8Y zn(r49cOAcLz`swv8%clR^C4lwmHP9{?+YD~*%m^VLEwv<=u7a#Qu=Vy!I;}e+P5BoZtpYBQ=X2* zxqUz1K=b8{A&tas(6gHVc@C4;w>+Xj59-*oBIfRGf1f&0+#cBe+Rex3*x(WuH|FKg#`71R%7_)dg9F^y&Qdi60{sjn}gN!G+#qn z{hT|W_SF0~%&(Q?*?5HXp!-VjKtGB@pDxJA{Qm;O+9}8v8E1#$l~h%Px9TvJWKe0DSf+X zOU&J}o%y%FciTv>kJ^7O{r7eB1n`4ouJhL)7!j}HHU*uFz=cc(bwL~aL=qxckRjV zuc{A3ZF#RVyxZ9|Ay+J&%(@4jxP@oY7UX)KB`eSYHlpvP?Cp3mcP@H?1i4|#1~;aoqyeT?5nc?R?U5S}Y|F5@|$r;fM} zQQm(1-b4BO^hIYhIo)OPt{4A%p7`m|qRhFw5?$QST$=kO?YwGj%>AN?ahm6zmGnb? zn@?xnZAaOz;P)DSAzBN%*ye2+ZjmyIu_o<;o|yE0&o_(OJ9j<*Pvd!d5xmhKT|mEX zEi&7rcEoL(PccJ(C5nd{es^){2tmSkC(WzTlGluol4tNConLFACI&c0MM*f2I!B<18E%4d^-v5p^ZJ-W~)Tj9z`ZM)t zXd!&KYiOqhv|mFVZLpc&L;UWIxea`e7}B72-pv0Ew7KT~`8PRtC-3h%K-r+zO5P!! zG)T839aDe)zY`w2YXEu$-YL$YD4mUjhw6WLV?J>g4R++mcL$J zdY+}o_-jcctk1R~|FSL^PkM`y16J|Afj-=L1Uf5S%Go%YveM^U-hu}Ft|iWD9z?f> z7Uti(7*}p1y%x&Z+!}G)!PlMEa%VU50$g`#62= zu9b`}&t67=a_ytul(nUpcyoz&BiKfGnP*XgHeAL36To=xI>5Mutgo@*&RwkI`Tl*< zT-w+8@e}2L5^eB0={@e?d&-NIlZ~7sNTj?|y#U^K{|yJRNyrJR6}|4}N>` zG(SmyLH5vuDxHh?zYn<5S_B`&h`T{_VeW5af>XVHCAw?-0C8dK-kR{5+F+H(c+_c@Is+D+|`fTo;v}`YQh({NJ9ZFY%gR zfF5t9xreDU;(bGq^Mh8S!}AckvDr0DG&y`H1ja}=mR*^HqBH!y(- z6helwX@de4C__cu`E6nQru%kRf0z&ddY-lRK5Or__j-Gtwbnj#Cf26YI3LB!LzJHm z`p78#W5Q=W|6l9tje|DPH;dciw?BSQJ0|_==s8C5(Y?9uSaIEczd8E7xTs8Ldv9;< zlmBKmcJxkPa&Dgb z9TCNke0Wdqef?Ia zvEi?_7)jz57v zMg4&HtSp>t)6etuzkfOE@7?>$q`BD0RzLYe@qGR#`TpPkkbKwpclKApkGs7$bElg> z$_#U*y%3wS^TL)5>;m^Y*7J+z{6_1-HtW+j=_8`&S8$T_j0&PYMmgKG5c>L__b-i} z`}h0`&PP4c<<`LBcx!ml{KnS6=dG*bNN zuY@;uc@7&w80Flz)c^N??_KiytZ>eo^W9riPTgO^JKZ2IIwvNLo_7Dx!+Vd)aiRB@ z5FfqA6#1IG7o7In^UgU5<#4lS5Y^XXj@P0YNAPB?@-L0|=sopp;HT%0b$%ZITXers zIgwt}|NVF~{kM5x;uqClzMiNL+hSb5%X$A=KK7B@_(3j4=}9B%S9~Y_3?fJWK!|}cH6}2UA%Ika1y(2EFzo+SM-C@s>J$|c;?&qpD z!SRoxb3O_Wuj6{r`Ny1hD~k90PSWpvn$B5z?UC$Iqd!g2>&V3U?9gyhU+vzbHl$f! zeTcpXt)Bf!d_mqaq>)YjqwxE$Wrlx6KY2M{Sy$%Oo9~O?iRZ;{z$W=T`8&ry$PYgj ze!q?0l&@ja7aJoKxK7b4A>=!@MV}v)Z|-s9PtSGgSHf%Z7xh1eeVbnMJpPXS4~WX| z9x`g*enA#>sKfIAZ=+-G<4`1@V262UuZ>o&v_G?>I5N7=U(vUWWs|()e!k~^&blA@ zFUT;)FlKrGyYA)v%<#ge>AnA$`*~Dd?Jupj9k21b2JO#p$X}D29_B0N{Ah0C{hw?Q zgk~+^xMvR9Z^5@@5%T1+Sf-?dldZm<-y_n08O481`s&Cx=oR?cud~BmeafDv7{=cJo8Mv+yXI@|>5WgO zh9b}BU|Du3oX&p|{r-2EVSj^j7n`%K$_~H!I4issy^G&VSN~bumKAEE^S+ZAj*=_! zE@F>ng;QkZpR&x?xVP*Oeka`cVXu%JniUdX63>pSCx5ce^*5MX|3OwLQr6PHWN)ky zH!Oq;e$(=G@13pC^k-$HOj@2}oA_vMYHKtGd@eJ*B>a9>X84JF`l;iqgnv5NeQGCu zK;OZ3e@&V{H;=vV8NM)a#vYsA|9Yn9A^lIE-|i@#Pw%%r(xZ6kCFxu5SeNiSel7he zGi1|wc|OBjJd6JqZ%y<($MaRcoEa=f+H_vpZ^`#1y80a6QXjtY1fSg?cKjE@rZZ%e z#^2HtzwZ?q*v?TJ=f9O17Kw{~d;Xi!aQ*SZmw&*o@vgiN^lZq>oA@2YZ+^$S{x~!I z@?C!GZ#(Y%ebM=anUVh^9H9U5lgx1WPy9CT_X>M{YYtLe{pDVv)Ng*}9(Mmegzt(= z9|_^_$)Ay{$#vL-sQ$d)_LYz}VE>75+YcWLDSFGF9t*ACd@MAPyTvEb*z{OvArls& z)V;$;@y=u6kZ|o+dxf}iU#*TeWt+eJnt3Ae)hK(xbM*`^(0@h#(lac1O<9@PD>UZy z3cKBBg50;5tv{R1ua511g%5yy8~?B+BOGMgZGAg4q+=oMEeYXgDewPTez({EHoV{T zoc`^(@DF>eCsM8sIQQ*4d@i*aA^t;tHrGvKU!L}x{Drc!zmMP3{}GqJmKm&{3sv9F z3^C8~zPP)1fDf_tJH0{|S?ztL)sLTf?wL>dT~%i2O%{E^r}A;HaJeBfH?Wq1QW!+HECT*qY`#E*@IHs8w#RrGqa;0WGD2{tu6EH@pGdcN-KmN;v+ z!Vd5H4!t8N>&igqTTg_@{`lE8V~8iUBR|x}hO97Siuo(d!8|O$0ne)I^_Rojhcdz< z;Uze{Pd>AE^$LJFy#$?7ie<{-g03VXyFh zG~Y80=+A#NSeX_+j8ar!Q;ojoHF=}|SU*+xO>O13vqGJ4q51Gl7xkxOJx}dWTG?t5 zpT<^osp##jP_OSjDttVOm*4cz?2t4jNSyB#PKayO2e;hn750DYA2SLs^BWqYq#a91 zw+-p=R47q*OZ)W-r<`9n^r>)$JcnYxtJ06h5O+@XVBuf>b!O}jLozE|w{oQ4e|>OB ze{D#(;QTZXaKq&xnTg*G%S?VbH?zw-yClBj9sbVehGceXvs!|2<#&f>rvEfFvpqbK zxhv}*Gv4^%ZpP&oGQ#dh^E0nF{~E62Cel?y!)@{|?&AS6ndiOH7n^HVc!>OSL+6Ep z-~WfO<%|16;T!3&_0j*Nj$8}dzW28ouU&p7<0nsT%-H_>zslHgxg_KDKQ7An>7V~| z#?Ej3G^6J17eZ~u%b{+^-}Ax#eQ22Z_aU+8{|q_OzWn`+Fn}C{A;`rh^(gWu_0&({ zF*sC`84mM@7eD_<_@%XTB^GOyT5$C%ll)~FLqoZNWyQ>=LuJMjq3X_Gg;?ybLOkQi zP<`(Up=^BbP>zb(y+h^n-l2+&vDxE0dWY&Q{P~;I^QU@+ajr7~lQ0GOn1&{P{N^=} zgq97DgjTXmIK{7!HaF0Yn)2S}=X!^_b&rI4G;k~@kX-yoXq-=ap7lqpb(PjAB!~45 zv&cC}h;MnNcW8BA^Mn^5T`pa;A%$XTFA~25OR)^43q1qz%juOF@;0`2SV>=v>Kl)Q zIAW+mnQN4f%?K6hQDvTXfQ$*p$!hg_t>3K2MiihZk}2;1o6vNCKRu>=m1KlgZEM>a z)>h-n7WdXV zkw0G@P9eP^BebvK_rIY|JMS<`Q4vXTC5Y22*U8%|*QX~@Jz2cAAvRxL(IPI5qd1Ne z=%~sFr^wFl^bTjpbGU%6Z}twC$SX)U3<=lB>$r)cmxqR}+VtD>ySR@B_{I`x)0E19AOh5g6uTX(XR3SD~eP5-%qwtHv!Vu@>VgyEE48~ysnm*FD-3g(kJA~F- zp8qb-A8Bo8J8C-kX;FuIG$3)q`0KpB2Az*ZK0AC_8~*4cVUjebARp5(1G6v(^U(G4 zN5TSfQKY}7OmE8!OXy3H-ZUgEBa`A1SWa(!$uq`E`f9WrW2A*s-d!6S^LmFQ5@;CS zJJe59S5T|m*7WHe+Lh(>+}xoEu_@ z)2m13hI;yb=N-ghlp@JFl|Ti(^?Yt<;rxoz>(Ktn(;@xh(;-E+9mx%4w{k-{Do}~4 ziPFz=K3P3BHzfV01!)|`ah$*@oIw*qyqU4xLbfuD+ZfQPU9N%l9Z!dv`PK(4&JA^B zefQI$!FdViC&@;Jeet@d!#U|(z$IKkY3k`vf@}23ecDcG6}_fU|GG9ozT?B)WArRG zGooi|3sA67`P!EouKP`5oBN)d5pL3N<1Sjps=p(>bM-hyrqMpoeBNyNcW)K$v68G3 zjvY`(jU%cz>yOpN`+olbnR)sh^hFK^ph;P3Uh%MuwQ3XEgj2#%8EaR*Yf7{~G4-FU zPicSX3F#;ID0_>w9~O2FlFkt1VgyQu8S{Amqv(~dWQK|tGs77AI8-k*RuC6M)vC-e zLEI!vK|ZEo24-On{<8kh6JCTR*lO%v^zE!r`1P!?RCpN*v$8|y7c;_gdPkW$Lw3D2 zB&;UaVm&sZ07cl2`a`|KPI5Q)Vn31_j6ZOY-n!8EV{Wf-m|lu@?L->UoM@Z2rfjaY zP4ZMBKT%$*67rUkN3uG`7B71wRQO#Sb%@rEC&^7@G+)uAJ~yMKkN1nu+h=+^YR;PD zy_gy5$of}2f9-JsNqXap_8&CicTb1T=kVBHYv+}99K{Ko!Wo>y1*E6;375#O!oLm? z#fNPF5ia^LD{N()U89$7@cwX}UK!<09&gfbqqUyC?nRaq)L?9}ke+rajux zJ7jXFv}z|>gnQHbqWzBcNI0b(X(MyQ4Zt7_K`usM6uJtBg)wA@ap5?!^UqnFjh@@E zhvSzC;>vDmi%`)|+mx-1LadKAOM6(Y9h~HxDd?$xPxK1;^l6xZS!n97Egb9~wVkbK zyJK9yhmj`RKQjK9tu39eOM)&G+0uwR-7aTpaSJ(L|v64@b6uY4~%RNTuBMHba< z9jZ?ru5L_LN5-owr?W$ybLwwph9ud7q$+ayxbO+2$M*{DdB*?)xU|#XrB5L znrs)Yxz#Jw-m|vh2)_XuYPAh1et-n~v%FWhE{&VGjk~yy2gv-2@>@0}q%HdCcq=3H z7VaFXzwM{|u2O!{S6uO(ULl7ZfI{~45VBN#E4kV$5XZnR|&n_;X zvH9|fj&Jl1Bb?Xyr2Q$sek_clkHI)hz$8pTKBi#?W})kce;v}FJRauI=V1Zrz4Jw6 zVxRX*UlPTu&r8V`WubMp@-bieSgd@I?K8c94&FhIFT--IM5(xv`8i=Vy>fF7+aV{c zrLRZzmYfjZm*XAfger2AF>W+36|E!Q=y=yteL?|Qgze~@+Ar)RJHExA6UFltZEfR^ zpzlYSbjqbu@m@}-A3EPD0>X1a!k;g+bTF{C%q>x7Y zoyS9sbqBT6b3z@baXs0v*qR0TOQI2Fb6tO->pQRVfa}X&j2O&a{tZlza^H#PVsy_+Z35bG(N96=K3dHpRAX+1~QRweQ7t6_1QV$f^;t73a%m9 z&pEhGZ|x)PjGSS$2Lmt&L(sHJ z{X`de;3t1vi+p`cUu34I@F_KuKI^08j&lF`j0Zh2r|7Xi+?UNjG~W0Py5k7 zGqhry@C2lHNf&Jy)|_CHxG5-R1Lu<^`s``s43w_R4712NsFcqN#AeGQs@-E8WoPC8 zqGMy(eFEFl;d`MeAw$Dzu2-~p} zUEj$GyUD%Sk1gNc5Dt=u(fM8LBFK&gwjbH`j`B>FYS#)s(Pq<=XhFR`XBnBsQ5;87 zTteS_g5G*aAEtjjML&afHbXk5??PL`x_afW%lfv@%3u21y}~)?6@8+-eUJZ@ehF7_ z4cF26LRPp*cD&AhFXOw9-vfQiL^7H_ulk{k-gHJ-WS#GB|cUki7NM)Bd(sG zVE{P@$(fIYA!IHR)?>7w6(fX4AwAK!U^4%~bmN3P_P;dB^^p~1rSVMF0cr1WjavOL z#`w)ROu!`cwEwl*J4_L7@_d>-i=KYoJ-5+6Z(p^yHK+Bzs5`6wb*#ZTiDu)Ea^nyA zEKcOWbDD#Ww_@dN6}9yYAsti)=p z#d>T+0gBM%oMz{?98yowwo3g+8ttgLq5j`d|51NT{a4o#3FC!YYge&d8auHY#aGRp zkR@bkhj}9UK^#WKNcWfL{>Hk$VeW6B``h5W=osQC{OXS^SkWJSI=-wEm;U%ip@l5^ z+6F#3_eDR7<2Zq`1M=^_Dvn4WRrJ_N>EDt*PC4fc&fx;y3&tPh6Rz1xyB~!q!lRInq9*0-ciK|=49vnD%)oYt!-lYu z-a36_Xu)dwTC~&CNX_49EV9vFl?`FN_>Cw)5lUxo2qoA~uiUaRRA484H>&Az#P)0q zRfyW3`tch-Z-0`vUJZrjCH6WuG2FFJz8YFjzxuEp{axgjdsVyqs&T@r#tE;6va7F# z@;k4FitblKC0TXj)exKL8hIN-HMw6p2XPpss6ZTbNTTWd#?aj1I%q}PReAeJ9`|hw z?Wmc((Z1}Bp$_$ExU(@N?rjW7=QN_&{j^9YjiWe@(!|D4^3to}1if;P{BPSBPSMYx z+I_|4H|7~s;heZA{};$hNFK`wSIBEfoK`O1Q!a5`_$JcIXS@EXO z|FR)esavrO<=;3p8mr#+o4dG=2k7zR{V&?V{^}W;`m6tg)&Jq@KibafmoI9^$aeLp zW@fa;S^ZzA{-a^G`afR%Mtp)t z9S`eemADuNIIr+s>yF;fX5aJ*gM<@g@Ux<=7as zhpbSnjTuKTAxp>dZO|uS3MyjmqeT0Va$PcxsC~?LY#L@@7UrPy{a#@n8TnBckX=K2 zg+=5NEX6V`$4Yd3|J5)fs()GPi17L-?yZclk?j0dhDj^W83iNN;SZE|@kQvmyfJJi z>oYRK&M59fd+wge3cKk^?Rz4dFPR?sL|gbg_tOueUB8t+&Ho|IHu1g1t;J!K;!s9b zc&EQ-F*t<7$Ff41{;FIbRq>Jjv0I;WL*H{&A9PXwsQ;|dkJiqAgnx}MbOrzZ8vgx( z{QATA_sOQQ<_9L~hsW!O`{{QF>vxgvqaQ}J*T2GbJP))!~SFV2lsZc`G@xLy&8T#|NG$=hkg|H-}_N0c=3DT#}=I*mG*HI^foFFrFnR6m~`203R4axnsP};$i@}oYO_LN&<@#@FiCn-kUX6g5+zw7pWb>hOCORIrqO4h zy;<6|t})TIFiTwVovbj2oQKjqSz!^m1SK6=+OI6_m$s{{UnqM2u@Jw*uhDIu!`LW> z$o}tm_L0!}RdW&Fdc=Av_5Gsvb=G{aG?rmGR$?{QVm-Rv)*ih3NZ3d(K>b8*B$-H< zQzCaFnqx|T%$n-+(|BZg3yt`khxYI9Gu3#6gVHa*-FOb#3 z3;cF3_TwN9qZAd0qv;O&x0{Vhwm#4PMrtPedlmZ|H3Qkd!`Q!M{Ym!kS+?y(_Al8e zT)d%QsFO|-El8tOdL=kYuRPC&J08_VKYF`;8D;a?|BGE?4?8&~&4jd5u0d7{pOMBn zT)-tYERi9hY_KIhWw6S3OS>wO6@?RqV(yBis|MUbsc|iU>|Kede;kI<{ z;#yQ!$kIId#{+uhi|$Q+GpDONsD8!0iHogquj_I`UvW7YfI%37T#Udd^w=u{Js)G@ zF~Z|80fj$wkJ^XMCm#<}ggY+x3x$DCzD8MnlAl%FG|a#(%)vZ#{rd5+fLw$nSc+v> zj(UCUN;2}jC)e}}tLba89+3~e6>Z9YO8HNJr2p;c6*f9nfFf+iPVB~B>_^!u#JLhXd2Exfflr)4JoA2j-Gwm{XHK< z^ABB~$wAjWjPw)cBi;>sD4x%6`2@&1B+-I2D%@Kodi?L-(QX|vE_ly4!5AoZ)*3eF z6i-)n$rC8mUY#P(pk%)9x{w!e30H6p*KreFg?v3#S)t>%%7}31lNsSIc^?muIm0to z&U%x5k%Iw9cI*E!gdW)>EhxO46>^2!4|(q?WfW~F%htaSduU@;j(uoT$MW>=6ZP+- zdxuijDbU_;zN&4)AZd(1(NOiO#`7J?{ynCzKFQBbwiy?s$h2@fS+kJ+i#lo5qhT`p zcO?54jjmgKpm!K0y)hVv2`EkJ|1pVP>39XE(DPB<;oiE9K~RNh;_BVs400BdH~90( zc}Te5mL2*xLwue58;30Mo26KWVDd;%D*vEnv8rUHT~qjzx<M`;pGmE)DnYCo2OJtsM||7=_x%^roSql-@DaHObB(508!uae5u1{w7Jb zAdRCqjuR->r=B89&RfGsoMnWAio{oHsT>9l<;fmj1!*%qm>yG@*`^>KjH+Lw5quCy0%SY-q8I1v=wk2v~YG<=S zP|r508>{ahX$&w~-{0RDfL^A)mb=diR9erqO4h{XO?~#66O2&MUUD{jR*ta%>Lfp>$wwDDknS1@y{nbJOPI7txoXx{vwq z40F_EmGh!Cznc-+g-ab@hUM7$gudiQ`UCoEti^h4L;;Gh9c3j?hw_*?cCyl(dleZI zj+4~`pAI{nvm1M{9|v(5rKmttKlA^C&HoQK|Bp6yO=R1oIcnOE@V~C|jR4f4e!BVp z=gt4mHvgYz{(qu4YYgJjsY4PikrbCen%9$L|cl)R^J$vb?7ZV zu^jpU3_?_|qVhL{-l~4JD5JUb5oi}qpXVb`-`dDg;>KVcia%18$&!2QZ~7$q6y&4g ztoo@vs2cdNjp*_J32%DQb4OJEra5mG3PYB$jWIdB=-0+G-(Zi^7hn;VU@4ZNET;aa z)c-`3zUM#C^B?BeOhAEk@+UwA;ToU1Qc$Cp1-nL$tg zHB+HTwEZ4hCQl9{S(yU#c~}5pJ5zZk?~6U#y=e+k{j4 z__V&geHFX)7`yc(JC>}!$ZjPQ!b!4mow$!43Axf4fx=&VZ@>3$I-DbZl(?RJX7Ini zrrz=Aw99u|I7LR!WsGx*V~>P!GCz3k6(Bi;=<9qe${;WB+KNz z{3ZEc;XLxc&VRK2XNKR+!W_)Q0xZH3H0@IU^o1?Pb**SyrENzV?Wnn>{GslS@~2!j z98>;k2qIKI-rA%&<{hvGoE4 zWD!c0o9*OIl*HKetJEv(7EXW3xP7|5eY|@7l6pK_{XMJxM)qz8AC+^W^*;y6X#LM& zGHQ!T$)5EGFB|`BYbu1}s6+h@V|}s(g_|A=X)>8&18lY~ncm7SXu)y%3AEGGNTCg7 z&#V9Lqr!bfYh|itx~~n!9`ae;9F0Hvg;Rca2Io-ty~n}@@)E9~NqufsuUinU^=nh# zQv*H!{+>T--sAs7owlwX4fF((&TCww{{N8e|8sU&6FKkZ{tICLpbVtqc*U3i#7XX$u|1|#1BAef?e`e>kfYNSg4f0D338zp&HSzDL>%&414| z|IH4bBd%C@9=QOe?Bhk`5|q#@MziZD`nIp|GQ?f4`t;+WYNUJ)H2csx{)anvD+7Nl_$$8iFua0Z

#{;M+;2h{&sb>pl$a#7tNt50g5PTC*RNB;Q3=lK0REJFJG*;LX~|NYhfSM)!K z`shkj(PM}o&`<5@9Tr6Cp#pK#A&C~G(Zr5zzGp13gZ;ZjpXhffGL7~G`}YR>*Ex0F zY!$LWIB}Z&i$>JnV%r)wZ%$~(a5PH)BjEQo+cW>~TL0qxd%vI0|3`U6&-{N%{-xdMUhDf98+~2*M_=S1nIZq; z2hdvw%0C9thoId%O0SatZ23p7xZ>H?kdfEar_$+cMfw<&h_8^>Xbd(^cmjI-I(hQ> zynJFa8#&tJQ}~)avux&~nyF#yYx=!!_6k#+pO0ynfmxV?d02o&==>ue3EA;wwrZFf zmeQADIaXpdwmh2`){^VdRl{dZ7N7{*u@k$I{>{{|mn=A-&LQ&6mGx6UUs8rQsGn=p z&xPtI;(UPBsEKTR)Uo;N(LhfixtRY?+t9Rv|Bo-S1+DypZQ>)}ea||?(e@>|-a#Bj z1vAD(!vwVG2DT?Kh;26xuLN zTyaN!m_g1$DSZw(4<)zc>!N)5cEkeVMTnQn7pf4$PqRmcdih-9*is}%=Z9tFazyvp zvO&J4=ZBTTtC4nJ?J3umuN3(Y8=eaXUz!{a+{$O0KX1-(@)M!#rAI@#dBzGKL#yo5FKiz_F8t)szX=itt`}doEl=N@7aCUOg+}2dSv-7dh)bsqNwlDJ?9@<# zG`;fqsrtsL;VAt$s+*l(JCz@EsNoTXXUJ&Z<2mvIqJ5B;$SY_)rVT=> zSsRq#pY%?ndBkguUB^vyeOp@`^@V&%(SFOW*Yh9t|9{R8w;k^snjh|yk^k>O6#k-d z|D9eTb57W_)pIKPURLN$FI}kq&+QfZ(kt0G7096vK=nvAnD!y2eW=RQ-b?2$201na zxfp>_7=to>O?lLh?9h*F(QoMQV&rH018chV@6WTZQI7^BX0or@-i>G)&OS%WNcQ<; z_W5Y``9SvhF!p)3-;C5Zxy}Sk!W4YA{xF~3%AStaA5NpsKs%c{EiRRi7G{Yne$hHO zavnt8ST}Mp>==rJQ%tc^Bo6jOG^Ncl7`4xJ|}M zn~ebrjv3Q^mp%1{aYnF5MOrJ-blUjuyzyU$@gLfD8UN|G(tC{mw(0+k`D)h~|8Fq< zCmVzl#^y;hqF#T!OggKv7VDARp#K-Yk=|M>?Q++r7a>}^mljSPaxHSZxZ;f7VJEp8 zrN%OQ$^9r1-{V)jWBf0C7(IT)RmOfd9{L$OzQhI)-}w#wM06Z=NTLO49K~^*K(zKf z+JoBlHhXhj2&crILE*c`x61bgdV);i;rO5Z`GWoX<@(2$}P%_jmuH7+4urgh}IQ-zW(`p{lbIjI^XFRGXFQ@ zH1tId2Hj=?xgz$A2i&pY_Q`k9%TVTy1*rePLJ#g$+Vy>hg? zjZ|*G<~{u3G0$4w#>ykA_+fh1zq9o#*!-1b6?-^F#)YfNO;>$q@W$hzfSe({d02o& zSc0ar?0tqG``&emW2cxWIKv)TV-qjc*>vdyKl49l?+ zNpT4_`)YdY7HP9_*V5OceVep5y9U{|>+!HrTrodD0a=7n`gU?BO85>c7R%R>$HQ*n zy@(%soL$HMcfT>RjNM;;-g%Sx-%&MMy+B+Yj`siV_nU(_j8bgs_s~`;Nby4nH#w)- zx&Q2c<9}C2(r8DGdRBYqQ~#Sbq~W6apZI70TU;7-sBczgeywb%(@EhLM0vIfXr{G59s`X|G!{AbUbT4 zf^ZH7U=Vtqmp(1>4-65`Mb|GM4#9g{@>~5_l?b~^i?r^X?%_G{Q=)U`sKio z{*iv_QR9oD_M!XD1WdvdG~J25ab*63Y~8`8-|YD#jds*rwg2~q{l8>Ao4BJ$z_OdP>!DWtoZ_B-dV*OH!TQ+87qJu8&yYswSa z-IR8htcq!e_0Mtrb2YiwZ{ny!5-mvMD2}5^pWcj?_q4lcJFgu^`lNRG82`I5KE~^a<&l!Wo=H=^p#PE1UmIJIozEx}WD+e%<0y{f1Ww@$%J}iiQGrTSA%-}r zQKQ|c)n?R@_1cdH`e*Bp8nrD=Z2abl#@>06FUj~L!}vp65$yqP*A~?4AJ4hY1zf@v zB!_E<#TR~KSZGzhqrDB+#ot7`x}6r6LR+oxh|N~c(DgPOZL$42fAlRM2A?N60h2HV(VS^M zISn%~3(?+*67h5BpY7kCM^Bdx4gdB2?XvOqpQ8ems9J1Zdb&CB+2*OqYwo+?q;L91 zAFu279ewF6LeKi2PdwjaxuMy7b_>~h(VRG$5>Aut3+=zpv;RKZ{`-FR-;)i)%&l8L zkfb+ui)+peOQg3H%di|tzfG7MUrBE@U*2*rH>{?wMZ06^^R98)waE42ilwuWEI{dI z^Qq)^loKr={_LzSkE&r&P@4OY_*NERB|75lK`@Md%9|v(5 zrKmt0b!hrX{_nWQZuuwMZn!tsO`{z(!{q;}{EM$A8=RLQlfsQ;{YAQTT9C$3eD?i> z!f$C)hD%#M(_b=o>sY&Rda`SbcP;X>@>jgaS_I{1(~eGxSP%sc=8B zb@I6CVcz0oj~|DBzJ9ZM;JgHRfxLty{WbGVSLm(ERLc$J{;G0+kMHWVdO*MC*mV?2 zBmKHDfWGjiaA&ZF;LlHo+w_j9xuNS1))0K>>2O#0J{}6@}l+4!xb%7daS!K^THuG=HQFcBtF;)NwK;+(x!*3({omXzPDQ8hhs%gAZ4) zjei@AeH+iSQTp3=D2-7VgW@g57i0-piV5^7$VY{AE3ayQy0t&jjidT3dwr(9X1+cf z#oC1Q&$IYrj0-T$?}{#Ghpp(@U#RV_>Tev7t$*pKO(thJXBOt5>klE!BNt#%6n@SB z5ojknK2}GBJBQ|mrDSCPFN?xo)_%#`mhW5(%Y|2BHImx(1lH196A#Cx>**U2*`(>s z`VTVND?UkF0gA94JFy#ku^(j_)^(#|jdk8^?<#$9OrIPtH$Fg(zP^?XR(DVTen(I_UR@ans(ihbM<+G;T_!o83te0kkXPrQDq46)$&zOJvrg1#_A_oI7 z1i9!LZ%$Oc^L!6ZxabXbnB~|UL~G;Yh#gVa-m^ET#M&M43$O@FP4u{FYkNtNY+4WoZNybr!Bw7&7 z>7~h|IF1uYs^ii4`xL!3PyL(df8*%q(BAEQL~8`va6w$hpM6(}?40^YxI$jTb=<^l zq`&uAxKBPn=6vr8ebM!#`J2V+4~nny5vfzrcb{?`8-UV%Y{#<4LJ=QArFNhqrR_PO z4MO!AZ4k=X*5#;Rb4T;!Rp#AebJ_o>Mv-gY#USYoK`usM6vm+GhV}nlet=!hzK_$Zn;#G3 zTw?+zVG8mw4KpwcO?&{&`}hbB@jsx=nt~KRLwY9v18Ug#wQT)5K8kv>K{!#%|A0nB z`|sySXC4+{5t8mRF)$}Ap||#zw?2Fn^ul-8d~4)QIOUu+vOrzy*?+GtMgPB3I^A*o z_A=*}czzYLJ=gi3qq)gw{ZL%E+B2ylW6oXfyp>puwOEgh80483kY%n>j*7w3K@~mr zqW6QyKk>PLL7A$*!@ltT6W;4*>r4Kd{;6B)J=#&^8r!iGyRjG1{QiD2vgf03`bFyl z4hkPeDJl?09qMoO3rVsCX&gmzbH9-2=ogODTjv@#%*+fY=%>&#ZV-;gdv!8YRz`gK(6v*W@sW0PnP;Ah|ADVS|flj9xV9S}Od zZmjX>zY5X*yW8TrrW%VpH7MMr7k$HjzJ0A%xKDq8p80=i{TKc};rJF~kYWB)Wr6yJ zzR1A<48jl;Hf4t<&$@YoI!?CsQKz-#J>O@ew`X`i+JRbaL>*c0yoOi2pT*wKeD>e$ zjF2mx5g3Ir7>5bydX`P`>zokH{dau*ao?A;M(FLFFv+oksNdY6-eZbzKGMI@|4z4G zm!2S#v-vXUMPHTQH?oY~+@Ch0t=s*P=??dI)^|?#JQ@yWKN=2P>=)k7do287{9|Fi zy+HeJJQ04L_eA*F+5TZ~?ZB`n`^oUu)q&y7#Ne=d{8M4qlU+fp&T+`2YDD}aP z?>*MXUc{{M){e)_Z}qW<-Tprmz57pLt8=3NV%@aPH&};f1vYdzbj-W(yW)&Lhr{RD z^8>rXFB9G2S3cTVn(>Lc|4As@bI_v1PT>sB;Q}t<3a+7SM^-4`lNBmZS&|j1QduG9*~ITb z#@~Mq*PVA0w{aKu@c^0shxT_}w*4R3_J3rDR@_kvM7IicEs?cDq ze*#H*BZ`M*h4lM>4!xz-7da?hm=#JUW`zOt%Ej&tgXlw0eZU?Y#88D?arOM^BgipG zYH!Ao6Od?r*!Hx}kNn@=Ve7B|9MWuxcD6v9I+7yGF3SH*eY^Rx$`!^O{Eo3b#vsNR z)y>BDJIw#6F9qt$Bxz1TKBi#?nw;D0+!nGmW=xYXwn-V|knOHpV+>P!mVW~EjyH(^ zY<)nZ@GNP}LGfwh9J1t2udskzgwkQX!V+>RD)}-hvfbM%_t?k1k@0oxtuH+imO0+} zf$v?C9bfV-swix)$ZB#e)}!lNzRyDzpePEzYwrkIHoKdDr8`ud?hcixy47u7tUJU} zjZx8W`YX4CmD|(*6m|;lM&S?S5lw3zX#XB){~mb%54`^e-v0yk|AWwe=Rv61)opy< z9qLezhWQVa(FfYU2im^}_Jntbz0xbH(m%2x_tTTBxqwy+ijcu6UUHCF`PdCU%D;*@BWh$NS3{(!xg(Kl>n5yT_^w`9#^te+uRDRe{Rk z;t@j})i~}qCvXa9a1Ix630KgxL;l@Q%O3fU$$!GVHM_@h`A1F119kKP|FgW_dtiNv z{JV#wduViC{lb3=*QAqFpRSWPBmIzb{Ps4zb)>X$mwq4Z{iQwDHPANxLC?Ch;$i>f z&ln!Qqf*y-;JnOl$^VT9q2k2{p%R7P)y|kNj=v&b^W}4meA4?muk+alA&2bvQTN0B ze~sfQadkQUU30&FHv_XU2lKE1 zi_m;e8N8+L%~l7=)brAncJ%F$G`+TuI;fu4tA}+N?(4idc-q<^;b?w7^2aTaPUj!( zUm=&_FYSlr!Yi>F>#-39=z7N3i7ftzEkS;^|8FO~$3EHT8{_n7&tir4U@v_?di-zl z)sxR%zELRdFiKH@IO>o@mvLm_J6Yxk|0%QxcNThw; zC+@K==&dEjCeFD;zk>Dy(m+alTg>?1oNIYMc545J^R89nBmX!1dbIv`q;J{zO~HHi z{~dY6pHT3(3tvZ%|L@P9{SEf@9eulgzm;qgPR)JTw@34`HH+Ej!`axA+2_)27|gat z@*?~Eto;M~eD_3pw{aKuQCe&NK(qY=^vEY4eg7wOp>Y_h{Vw|UKumn}ErPz{>fKik zIRHsMhe6~JM1F{#{eLf6<1ah{J^KQL+cub|SY!Twto%=OjXe2xe?9B+r@IEC{ePqU zW(>w*0w!S!@-YofAN@D-zl)DxkM)l`KHdN4UhCXzJ=xGL|723Qk&M>cIbvy1lsEfBv5)xz<~58qK0`Coj0bh@(aoSywX^riHouWDDnYiu&w_-~|l zp6A^UR|XJ$e{`91mZN9?UwABZ>OZ3lY3E0-%lPychbwB!s#{ba&$9_E=rG*N_QHLa2@c-BU z*R%iUtb4=f`+tnxd**+}dQPr!6en;BXK)S|a0yMGV>AB6_}X~8{i^Z5aenP$b#&?5)6e++p#6V0g*)HypWI~R|GOK7 zA5*5ZpZDooU+{f_uUo4?Prm0G7uhb0)PHrq<+M2mdSA4s+`rVsP*B4+2^RD#}F@PpQGkH`}zj^`W{>Q4*Pl!`zv4uD1 zgel^p>lS_JJH>6?`;+}IejXNJ z5lTn9{&4m`z0$o@q}c!TWvK4>^!lz{#lNv!+)Av*TCB%L6rc#(u@k$o7d`XgXT9%> z_PYp|J%}%#3hhKBceSib<#n#d9-$55`7BVhpB(~Y(Gc7xQ=hw>q2%; zRZhuSn1gv(fJNx!ms~f?zgeZ z|2IDP7yIj6qsg&m)(7CT@Ajt;@B!@c8^?FL-fry0enflo5@&s{hu%8;;o5@3 z^is5s)+UNek!=&*m$+hmew?g}&Y>sC7L@FAe8{yF zq6)EX+W%V5703PN1Ww@$&fx+s;R>3JyPNr4TgX=am^LycoF?1V@tQ;0|Csi_MEl>Y z{r9f^`TN@;T$4tDb}w4WZ{jv0KVo53X1Gh=`l!19x^IEgOQW(+W`0}wo2hLl z`yvMekQA4|AbRU)+Fh35aF{(p|}JVgEf0&)@ld;NczwxC=aQ!!CHF;ly+ zP`iLQz1sM_=l>D=XfN5HOQf?D%MpEBvS{c~<4@23mgj%R^WWk5@A3RMd;VxodH#JO z8^GA#dG#Hie*d3LUVZre|CQ2OjkT!PUacn+7x~YO2R70RP=pqFiS{zL>AzExJvYy_ z9i=<$zj1sgO12pLkkOp{UUEO;tCS;Dtx&FBjOwRx1`axQ7^UbaG{z%4|HjxZ3VY^t zQJ9U9{%}}G(p%8Qhg|q)YYYB3G^C^O*B%K+$>TVIQ~0bLouNm2qKm$2J;HB>h6}=% za0STtI>Lb`Ylg~9O(?eAPhlr zn{$v$kJ4=Ecsh)rk3#!<(iWEz*XBB7#1$WTI*cPHp!DR^VG=n7CG?6C`HDRq@`a}% zPOsh}pXKuDSeg0za#Y+i|Botq4Dmeg&Aa}~^}mJR$qX~3F$-JySBl>D{kzk~f9KV) z4&!V5i}k;=)&Ke4=VJAr?AiZ)%lHpH>wnF`&yn6dEWjcpU$I8uMRUIN)?L!xW-L!% zhMx7m!l})!MJ|uxy^~^V0ansWMq4XEu0^S}0_(|*sC17NN0h<$ltr?7gEo#VaI6U1 zu@k$o7yEG#hf#_O#8HP9q$7+OIzBK*ZO*^*!>2>E?>_QvZbmdOa@0AW?f*MYKY>#? zgRUCgqAxuM1270lanUz` zhR}QFA4mGmFnt8tjakz8Z2qxqA{%70aSHxh>whNMq%#RqkdJAYfm!JKZEny0 zhmLP6Bf_25p3fr}U=fxeTK~J0T!!UXiO<&muBKOxl{dbXwe|Zu-g6$i9C%^H$xQo`VOJ_IsVn3o|i95Z+L3-;*X`{$EY%3pOJHJGF zxNDGY?)|WHA|FC2S%K0+?wzbd2|vPT-xWv-w;=u!yMBdyu98o($NtZA-ZuG1m9%>P z|A=0VwBH=Xah$*@oWVI=3+E4oSrgNfi_>|If=tW-m*0=P} z$0GY!|E%uBhUuTlYT=HzzYsc~;v;yH{w&|YQ@+XRcY`nloxk<%53bUov| z4@QPD^ypjMvAGX@ebp~1j~VJ?A7yr->&-W&__$vf=e!A+g!Cu%sJ z7y10qbb~*hUGdrX#-jg1O`K&H%3mW{!(Q(3#|zgBN8k6FA)V-dIA)QB-}Fs=avm1o z&_|i!oq_x`)*Bp_-!4AXMdFuWDVAY5wmds3MBgGP2QJr&Sqnm0_+vH zZ^hH$=f|E7zj*Jj!~S(c!htnI!rRCGI{d>u<7ee)*DHg<8@2yAyt(2FVfV=|ggvMK zD!lc|0N628V)*k z7(d-IFznoC?#o`X*Y^wyZ_a%(?4JH)D0Qp?KYP(PFxNa9_V|;&x9$xz_hmjY)-S{z zt3whkNaO8W{>O1ezi^aZoRJj{Z&7CVWrbhv$_l?4{9GtCk5u00xllIxId#)MtPK0E zGQJ$DhD{CCqrYs9=F8faXU%^-s~&yH{MWPmyY|$b{!%zD%@a6)+aK$;-@$mmgd~=F^8+UOZP3G~N=UO9x zRmP>kkJx`uhk7&|;~zQ6KSDOHvnJx~(tYL>9=rlcn3^-te4K~Ek)0OV(uIksSey=vjpbYY$3o_`dh}zO%k>^<59yuugvSoj1)<|EY&sT$>%M{u^ly z-Y{d-!dr;u#IUfK%#zE=a%E1=`o#Qai^59oPW5w~hui(oe`r4!<`LXg_xWuN)}nIG zu&|!2B9jY-g^l!zpR2FmD+;OW%9B&-yerBZ&sI3!$MaTurkZKW8_)Hx=a2YZ9Uc29 zHw$AcN>PTNu){0J-PnsZ&(fauY*pf~jo-CN{jY76CwsUXJY%D0ZaN|UXlWFG|DTgK ztu^cgVf2*`4+qKKUz>M99>sAaQT{1&-^dJ_kVF43eQ#tQ&G+pwNEXH)-YsGm6oohj znibzO{5!XKwoUeCqn}654DUvKyO%sEiz=rL4omr;#zkC4a-TJ&69$K?^xAI!?<|;W z^c$#uX#DlQ@z(>!U$I>|vjb)9?B+f8GIvd!I~UIox*Grv`^HLsW;VQs8gR6+9$Cd*N4z>{loc(WIP9;<)*#^G!jnz85i1=Mn3$F67xnaZbg4za2zUUO!A8MHM|YMZWUQ9p^6p zxpss65ck!u8!aCq|F_C@zdb$AGpTjU+w!vmCmO8+`J zfI+K#pXmSE$HP!^IGSJ8M)~tuVFY~)ijm`&#dvz>`|_WBKaoBOJ=}TjZnEo~e#N-| z&yURQvHmaa|MP4<(>Z(fyM8LBBmds#!%VUS{Vy3SFfKEjUWk9utT30}yg?g=%#QYc zhgb_jUyP;bNccX`g>K{#=OKB!tpBHHk=ycGXmw3e zx>n9830s9V3dz??LMd5>szsh_p65aZ_ikk9^{7J{h4HUNz7I6-doApBO}72Dkel;b z*iWzc0=uC|dyu&XX8FYKa2Wxp3QTnH+a@WNjT5%A}-@9uHgo5;SQelR}T*19(P_lw;Ye?1Nfcs zFL|zRn*1jZX2^Q(LI0a?9z!u4BQOeMP>i;l+S};ZrG1MoZQpL>={;yzqW^7$yh=9T zH1_^bdmK4>YcIbQ@`-RNKBJ!giT(KLh5fs_KNcp@JB8J;&m3d=RK&Bx^5{m_4*ULx zKMa+l%{B1bOeBR}Le54NJ#|69;uZV?}uz2>-!FplCllBian*C6$Z`Ui1LIqof682{&9 ziMVj`?LLjZ?knDvwz9#743ryAKJg{_0z&RgR?k~ zT)S%;&6%fnYD;wBBK?8ywP!S&$a59!f6p27edTrK=Ih#|NcxYfuDyn;`|8z8;)RO8 zP!4>}xSG1Qeua2$6Hj`zFbew*zWI^%#TN1BuE!1G+`=8)!vj3R00w*Rm3 zbDR86c5~;+!uZo@_CK0-h(B78MGmcqX9qqfoS_(w5y(As&0Tx`&^xb)KgQ6D(Zkly zqk9SaALIGOGn6KhlMwe#o=i?fJVPnYov6ih?wQCW#0#FrQqA#aN2v_?`T}Q=U8`Z|;ys(Ubj;^Z)jB`X8#~pS1FOiS@I>%r+_?u!>%8 z9AXXGA>7Ub!rvqODZ)n&V%}ft_nlvVGpr{!BKg=j1UA!aFDV1g87qkE-;EE~Z&S7* zt}(7VVLhX3N>PRi^f&06_^~-X^u6eNOWy`tq_@pF9Q(2D=XVf?aTLdqM7ebY`7aL( zh5oT=;xo$_E&mLfP&ohZuk-`Z^EipqID@k|kBhjBtGI@K_I`QYFzbJOhunSI2e-&O zxQ7RbbG{#u0~oYgS&g{2aNJ{VD7`TLIL?{?`Uv#g*EU4=1mEhb>i<{GFP~h_sifDG!f z&^?Q>6w9#^tFQ)Z(I%bR_pryv&Lix9dUuRdmi@ma+JEAYCN!TC{}a|0Acs~|J{%s_ z3uhxXV=Iy`ntw2QcqpaEH5nkDeT>x~>-W9bk8)#3c~pGC zxiJ4?t^vQJIF2Mz$e?-``*)xGe?b0k*Dk-HU4DxFi~9TQR@davfqdjG`)ZP>aRzPj zQM-K9fx`J4WcT~>=Uw>|4SU$XbJYI})b(V`3iUsk<8CG6{FSr9Igg9DjNBaUXYGQk z^v)f^p685P`VI7O=MVXheNWmNxA;{qv*wVzhh)NdDftLh%7)Z1@jCUfFu*-%jW}OX z25{Gr=?foozK;I=+r~lUm;N^U7r3UEy;lA`V;VoTuQ7N2mqlR&+5d?UMiqY7??U!{ z)w&LK{yTqd9x``xx3&zKO|$Q-wE5|iFc}@Ms~^xMZ+0*6Zpfa8`v3hl9Wzma*_ey@ z=>NIr_*zlu{i*MeyYD6YT#`$%94k>dMIQ)R{%P}o$+bwnV*T&9L18`p={oO?^sOkY z`+46u&PuT}g<_g3)1Dm-^QSrx5<_EN$)Bk-* z9o4J;N7p`e(-C#l0rTH>ng2FV{fFNTr^quKAs(OXg2Utk#f zonI%q_;usy{=Q|N<)C+4LGH$0?2mppjOx4QFR%46Zwl zBvQz&Q2&n82B3G&5jQkNfAn;V8@dtu`aAeluH#3ZL~^wD40#4stDp28)F$jx$bBA} zhtd^wNTb@lHI2sR=9z!so2Xmi-H;jINIiMcJs+JPaGComt|9-WL}=S#{Ckh_@14fK z53&DuDcif1@mco2Hfh5w_J6PWKWIh^vNw%?voBlGIavO>#MZYC@SV@fv%};seoyE3 z#4~el`L8=D_xn9EcU}HeHawsg_RW0VdK&rwGW7a+^4B8y3xoc5=_4;ZM-D~um~jPi z1mgUlni_jB9JBV(^QJs^`ieD>sK2YN_E zz1@W#dOYi_@qjdtCd~_^0X@4y8laWieqUj#Fs5TBN-!I9F&_)@QTu5z_j0VnJO5Lf zy){<5peW@3=_Tj)JQr5+UxT$+kB!)jttdqqDzF=IAH==nejLQ%=w~eAD0v*o=oV&* zjOzn4WZeI+DY}*Gx#<4tb7A|7AJ)gOh~r~v!2jv|l05w+%D*<&_o4hbByDy{qeapN zJ*b>g9G1G~G|u2GlFRJ>zo%*-4!5}PAkW>yziXs1eEtpB%@4S3egN5QK0phZy=i{HX!8Tm z_KNuh<_&br5)Rq*qWi|010akZVciqv1KjZKJt7A%=&!YdFcj71CDfoW51{+^&KEfH za;RKT9EQ7Y1V&*DlKiSrOs^I9)a%7zJbfbS`DeJ({OhFAN9*57exHn~C~uL*==nt= z%;c^xSK}RPy2`uk1W&)z6#%&HzphyT^tU3w(l#$Qhv{%3B8{h5q@x2 z{r;&@;UC7m9FCela^%pTgde>6C(ea4j@JE0;n0jf3g2t|qww8be-IAV{6RQ??`)bJ zzCGhlt$+K|u&?n?!{1N)U&FW36GPuOUk>Mu3-tb#J^AIaod4KMetzoO!Bi+%_9 z@Boi6fI)Bi{xKBWXDJ_sXdk|Bjy!keL*tm*Wyhv`Ed1~T`ETBa(3p5DG!1zxG>_XB zS`yz3*^%E2(}k172>0Yaqn-0S`}tkxD{_xPF~(yejww&A4GopsLU=EpVK>s6>HE)z ze@uKLRFAXY%h&b8d_7cO*Z#Px{ejw7wLhk5f6UVU zAnUm&34b!CVmfA`1hX+0ZI87-_Na$f;_Jqfl_R_9d(pN{{0EEwF!3k5#)>}~=LqzW4M()UHmNsU z*G#r7(zaP4{!7Gv0l&LN_UM!!aTrID+h&a(j?+6Et?gT8y(K+`oG$9Lau3{E?s_I-JjOe+)W z@O1x=GwwNy^SFr1xQc7Ifx`TMHh%1TEbRYrB zdEZ`}e1(KEihGwjJ7UWn}+<*Vmw3(D&=2(EEwF??+MS zH!jfssUrJUJs*nQKOVj1i7=7u`%zsNh~m zF2+(++h?{0sden@)ofPwe45OlehK?`C)@T_X)rPr=D#j?Pm*8N0rT(awY}_LbMRNu z*P#BUd-yN~U^$NYbD{Wo)OMShBY2|S(uU(Jp# z?0+!N^UV?F3SqzQKTyAk4ep*YRA4vuVm}VzFxrkuA9M_pzJsOjSm~RPz9_8!9wvR? zm%f*z?*-}GD1DKuk-qM$JTW{R70z)ak&2REm1{HfTKA{o{q!8-GsUwR(+SV&w+?=J zoWyAq+9T6^Ps;r>+-Gqf<-VCbD*jA6(QaD-HEP;zK?U>1$wa2Z0}B1qHR=a-=d<(akJ&%xruqdP@2g)jwheq8p^&6hfL#s}!|FZ1!ZuK8o$DO`nKEL)# z{R#a5!s$STKEimWSe||or*Q^t>x}OY*2YTcA5b@Ush4BjoPWjmJ{orDUqF+3wt2Pw z1@&+C4dEir4``moHW$u$T*PG*?EeYcT=b{*|26sz#5P4<8g(~5vHx%JtK49&19=b8 z{(nF|Le(4pYX1*#5Bi$4dCdOr{m}luul+wk`+u7EtX*D9*1hVz?)%sEKhL>mD28JM zMxm@*pE3{$Hp4ze)RFJ1kH3aR0mOf4bSeSvGKuJvoI}jPWSnt`GYq z=WEa_{^wi9kK#CzNFjqJ1Mowx=|-A1KwvDGi-frTVl=l~1M0r&8rpX=qqo8XDJ? zhNdm0p?O1TXjxXOZ@o0+T-TZ{4b@9ZL(Lnd=5UmTTGSzp4C*o7{~Q#zi4pp9n1sog zifUojAcb1g?R(qz|F-`3xAnij9j3cxCQ2|Hb1@$au^4ST-`4(rTl@cQ{qJw1|)>?7Gd{x6=v6w^27Z;pD^ihqsQW`0|dd0o1o zkWQ|xrq?WT9ldrNf9ai;-kFEq?_K4-JoV1!)Cuk_Lj`tYFZSbi_Mc%V{G0hd2iX5d z*#Bhn0`~tB-{uPS6B+lPX}+>892CxB9K~_uZf^@&BY^bai3KA>k%*#B~$_U|G6 z1FvgS^Q%0xE#$}!Bx|;XJb4mT^wdc0im}eW(V+GE4@hpQ`el-JXj~bH}yGv7U|R?@n~xBuvIsOvg-=plz7(-xu|};o17? z3+}t3ekU801C40fBCR(_|7Fr2xf#-b8oyWV*(02}n2&{cI)7*}J+|39u$;aUzrTL} zqvv18J;7JGrvI03hyE_#FTMAZ%7vfepG(;jZ-=$f?~A2jJ=yo^(r`l>E~VG96;mk8 zYa{E`i}9?O3T5UypHWWlwBL^Fs>sdcRwU{1{GL*J87iynu_lo{+q-JOz;Mw`RD?9`H^ssx5;y98>A%iC5&~{V(kB*1xe{|8ikzb+y zSHC}N-z3<&W3_L@Ez35JXC1a)GXJkp`S=n0qeGa<8uR~-34>nySla|A>E-YFCf?UJ zIiZb0)?IMTPrO6%IPLc{$kk|1kmnKiAuOy5QT`Y9|9akcL67Uk;@Z1zJd6J_@n@UY zX*0xm37M??Ut|2o7|=ywUB*>h!wuZR9sGy$k1qNCj`{x3j20C37~kjnLnV9so^T%E z5eATCn^zeF8uWjP|7v+d{(X)<6!r2;hF^MU*Wpl*o*x* zh{MQ#V_0ZgC;tzYN0kH5>_6^2cMsWc$@nLlT-Ur>`fro|WDc!M?Eh~rR_|Z3`CDQi z;K!fL{~N-7Q~n+GAIFhG22Hr5KFE>%WdB#?&#y>7`}G^SFr1xQc72Ucx?J#s)pcKJI37rrDkc*q{&D$M@L} z``EW=LNlJ(w~g#uwDD^{!cKKhw8OiW8GFam{rQ@Q3=cQ_&n?`+J>;gbkq7Hrpm%O^ zFCNhc(6dC}1G-l~nQK4j>*}8c&M72^BDrCB7*38r)$5*niswf8d)kV~yy&@6H%>fI zT_t|vlp<@}`Cs9GLi`Z-8rXjONk49xv>GMM!utCb>uKr5XnWuLN8Dqj^W2B_oixrL z^8OEa|2w6RcmHhvH}5^mU3mYlYkqZj7%!ZOn1sp5y)ORZIF;V14CvsVPM?V$?z}wQ zO?EY!SIn>SP3y)t(g-F@vy@?Y2Av!nHA)g95c zCNrqdYPY**C01b#)?z(2Vl&#bdH(#L%+TF{1Kfc3e_5DsAAlYsHf0OwT?x_ebDIcZ5tNwq2|DUCOK;{wq z@~iiWA5y4Aow%ftQIFSStuWSOBR1psICbW#-qDF5&q%C;P8X@hlC?Id~8_we)sdnuI%C1J}ewOwjq3X&W7-vIh(?P#tmWr zu}$`H+!X%)`j)WogDqkAijCp#=4>>MCVjWR6!t1>zqNXJc(;9H*h4Ryu{Hb+4*LBt zDmH8m?;P43%7<(XJJ)RvhxNaFU%hhFZ^w~DuXU3*W_ewhavjrv0Z-vu9MO-Z?7NCO)aG9u?9XMufWTh)~}?Ib;%_4ac_0YwoQ) zRveDk7?*7v6W+W3a`?yVe-e`P@}IELzxYyUa!n2$$m1kVqtUvshD(z}voXe|o0CHJ z{>0Fd_;e`$`q*&B?}hcfY`EBm72|)F-+8>#X6*9wW5Z?oRb0aj+`=8)!vj3R00wQ8 zU(vVSdStTqhvwXo{pJpiAV*;giZLD&F$t3q&-a^3_UZqh9^LwAXGV9O@gMDg*f$=Z zum2@u$NXm_-}bUIfaU+2`d_%?8vlx4z7!VHbJ9MmJ+qh|*Bo|WDSbJ5+I^dsp7cR> z-S_>yGB#ZF{B!-b6049rG&WT28ynWpYpn-NVJ&?<>bDsGHm;g}u{hLCv(}tn^SYPA zW^yZXt6vVKWEryjJB+V(qJn!j@)L~P8`JNaVjLgU$BfS-g<8}hjST9s*FF1j5QlLT z$B{$|ZE5oZvi`@|eka+*-Mz&8fJOdif$>dYG@=R3XsI&4pvL?HWB;uOjQ_u4-+XP; zIH%Fa5;8IDalSMDP4wE;#UX{l`oG_(JJ*T-8{)r7{E>4_^P%Fxj3DGoJpjs@a>)ol~@AMnVI$h`0Uop?Fi z@r?KI0FN+$LI0P$f}t3W5r{T^<)X1+6uoN8*pNIhD2$;OV?3U%&%dkweXLBlum1Jz zPx4#uuhnm4-&={oSsXhy$uHZ??crD0f8f2MFr7XVC76x5=zrJP<2>zuq*kckm1A{V zgpW+O@Uz+;KO7w9yLKTKBi~gNmXgb{5^b+afA6uwd+j7+pI-L`>3>D~e<1xo(Eh)m z{eQ(6B-z3p>*m~8=}-2S8~^{>%c0NQf!;6M-(Q$(uommF5u33UrMMZ}{l9je!WVtF z+|37+Q{-;!#eU@4{YRs9SoEj+FBi^d)R)n-&$Ef|)BTqxx~6i`;Bb^Yh-4i1jS^Lx z^xN+-ervop#hr<8*Q<}~$Ta(Z`z~_>QFciEul{Uu&C~IN9K8eiC>N-IwF!QI{a^L# z>fZ%V>iB=N|G?_!!b$g@#u=Q&d0fP0Tt(Y5_CGq_6h6APsNa!a{iJ=-a7zFG2m1fX z=1bZq?`xkR=h{|OUa{wzaBkoh?jU)c{eRzHYxJk<{~pjE;oq$P)5pFe+H1;>0lyFW zhBo8(q;cG%f?n|>{dd|QL+QgY0;4bn#VD-*+oS*gi2mIJ+U3gUG&1sR{R#R1hvr$k zZXzaOGNxiWW}I>ZO?O4xj?5#@^=?pxG@@yq{Euh-PxRL4H`4hr!kLY^ zn2+2H-_I2L_|QA&s6Vinz7#!2w2{$`F6?~E8YXqka=)!aa-05qat*3B$yW*a3TwI7 zBXdcP)aXD1$JXE_TwN9<0y_Ji4^+2BJIdn z|2C1aFEB@TAdizcjWall>Nn+oq_%iY)X~#;n*Z~4&xiA_yNJtp)<0RJf0Daxk@R0C z{oj!P(KTBB$20r?-^>3SQtaEsp^ z+(VrE^MHI5^>5fClN_{3d5SpycqsXF{_$}72#i8}hn*|DGqPKIAWz2m$D7?>wat3z zO~b+s`WV+1V>~L?Sr2}|dhjFGgOl~In4iyo5>ni?d(1hwZl6BxshEzL$nneGA0A5R zoyXYkm)NKDx#+24zq7Nu8`MrxR$J&#r`HU-1TI^KK!u#GvECSu^8pX zVwRF^o~a!jGxRT@i{6d=W8?hytv^`7{uWM?@Zwp6EyB-^GyYC*9c}!*>V>deI2E6< zuih8!pGi;htIEC*R?%yFt)V~kLRdpzi~8%D2^kE zw)?(6`MU$1=t4Z(CGPRqgJ8vdHrxj#0LvUpaF_IJa;I<;Fknkq__)`Ogjx1LUC1 z@)-KQpnXsFe#Y7WvQnNJL5{*06eFojjb-0>dR+S)*FaCCPeT0#>4&sy>d@!A>kaw< zKEeLS_QxOAC))j!T|X5wQG(g%cQ(UZa(>jUKZtV<7Sapr|DPWm7SnV5Vjt5|dgpCp z!&pvVi5}^aSKfB(2k0UT^$+{9aPDfidYzqGIA4dXAN}z#-94+Y25Ye%8?hN%(ZSyp!YM-qb|c9zo@=+4 z9?u*~VL$yK>JPC0YuNw#0qV$)mJf&heiX-1{w3eoUm1_1r%>?=_3#&cSM(<2(1ASq zii*NXvbSY$I8C0xS)9j3T*g&g!wvKsXSzk+LGx{U^NJXw`C z-_IPqiS$Xxf5sYpeGIjT-)wrUZ|m*r2Rvydr;- z&9~+6`||f=`I~IzZd1k-<|nzfQ~6Rjf0)c8oDn{z;ant!*_ zOHn^s++XwzWZgLJmp8p16y|68eK+=EKjJ##@~`Q;{7xbqp20 zrzep@dAT#s$tL8`Z)_m{waeLlF&hR&U?)_{hI!L>+9(~ufJye&H80D3A-6BcdcJW&ieM&+uDx%oM|qc3gziL zKQiZ%o;+7#|L>A8=$o9yjT63_BaOLx?t@chy%-Rt>pmV|0?tw9R4*ZJ{} zzh#75kHYwWc?fHTwH_O>8Cy|`wr$csA^p*bE_B;3AU~zVJjW8{uk_y}{ax2gwk(tW zbELm_+e%jNl)nzhU#P9}O>0A@FZjmK$=@jR9~Ib*o=@LM>?QZ(AP%E#iTt%pzFsGP zt(L#G$>(IAyN7JZvVVoyw1fSNmJRYJa%e?d|94b4$B{%l2Pn6M{V~n_KHoy8HhISe z&*!%c;+o=kw#L)_Ynu4Q^?y0C14(%^Po6|oyiYi_v-II}pF!rXaszd)OQX6*{E=#R z9qQ<5WM+6**sh#>TK=AO|9Rw{`*WGRing1|Uvy}5b?#KAACb3qc-QF3YJbgRe=T8) ztWf@vzrX)u{~tQzO*n=14?i0mZqUoWtbQT;e_RsokiFzRvhUCAQJ@TY5dE|p9+A0i zgTeqgXp8TClW|(Ic{MwL9F9Ui#2cRLis!oEIWKum<^Bl2jY6gQ-51?IhF*;ENSfDO zr7oCAk7u!`hM8wfpN#sfx=DQ!&uXtDr}CSQnJB?W$2Vqk=f7r5mn@8b8_$_fUx>w6 zit2Xr|B%vtsMU6;8=`L4ZpchA|L>KLhjQ(+<*r+aRak?ySdWcpd*ApIIxew)&uP;< z)V4wXkoF%MR%m~W)BZs7EbR|rW|2#1f1r7%GY*8a6{RRcP8{PJ{R(>Lb!G1%{qOX> z=(%nF0=jRSzi{8Y27U)|7)Nm&Nu<#K_eCK?_Wn%$OZNRzf5|KAi)han@5<}1is$rp z_^nX>u|M+k3fnM`$SU42u{<_Ansil_b@2*|8FwafM2un{XBUQabL;H?MuF3A5^l^0H3u7pTqw>DCG+8yp_!~yi z$DkN-4npln&!^3iZv3$RC!fx{Dy&C$UB$21&0kSQeDe7)iF=K2dono{(=ihzn2ovk zkIKLKu33o1Sc>IXiB(vGwoUT?Ht9@uDl59=vA6~?|GxZxLjKSGUir5~-RylloqrY2 zStz$RLEOuKt?g zj5m4*dPcLYjVpJ%rVDv~m6z<{N1jG<->`6oJc}y#rG!)4?Tk0>i^#BP>)Ez-u1lku zUh@X~7qzIvv-9ofm)&y}*Kh;3a0mDB0BsBGKd{I=e{`a2gZTx>qX!KqesBLh?ksX> zMf1%U!Xx42w*Um987FqEt z^Xq?V?8mhuFbd^gH%3bqqxa{=`^mn)knUvVn=gb(_RxK?!{} z>gjQBhBybK4s-c6Pci0CE=0~Zx0qatthQlX6C2NfS*Ji zu*z?15ZBMICD$X)k1wpBSD$R;j&sQ4+@ClHs7_gt#y=c;J{;XLG#q(zX!yaoPlWF` zP7jASO%I38eJ*^jcWU_Vx~bvd>Z!*1UJKuOqa=KLLrK^_?Z1V68)k;T-!e07KcGB3 zqC7li{NI1{|GD<^H(m&(^uCvzdnYZ+=)FJp{r;8mQhquvy^mcP9)8%aFQ-Nyf#1u2 zR1|W@=)W=_|6huf>-O;A-@QSZq^#*-r{w8-`Q?;dv9F**SyXn#`oqM3vo?aw`Xgoj zi?4-uM!#nL;eQJ|SIi8%wVVEK!ha6?-FFas+oy$Zjr=d+-D&?h?AgKp*nbJ%zBDA{ ze>^%Ij@L8}v$y68;X7OYQ#kO(e+q{OzYxB!J$}@0$B~5YiSPsMq!hjPFZ3soaczGS znL`KiIElvB|Hv5DABE?zUZnLpwT-IRDK!L07xAVaT&#d(d!w zaA>@3{TR+dTIs@z^3BXc#G8{4y(~Q%tuxo{65S8X9WJ>h7VTCOtF^ z^`FBr0;4bn#Tbu?D4egL-IID%`)8i^&jRfq?LQOOvY49$4r!9Hs+#jiS-%g zNOX{$8?;HZ9lGzTf6#-5H2Z&#_J?*(^AYnO=~>t1v>jTtBPy$kerNvUd|@W97lo=* zMPVVmcAhxBsb7h{9Q9do=9fmDaifZ_m|yV;=hXSFd8fXAGM*tG&kkQhUyJql|7su9 zEm4lF(B61Mxq@o-dkw$Be7r^4A9eQ8b^m5;Mcn_slq^F9y5!@S$79}(XMX*De_+c# z?F~GgZ`439|v(5ao@nBWS=@A|JN^tfEP=@~rj|Eu_v^!xGHaMo|FPR4#H}yn9Fa zZ48Q$++h6MIRAKh?SbNu+EpAT(kG#Qnz3D^cNT{_Y+v_bdsf{0e=|BvcFj~w$4pfG zNWS$BOX#yP7rpj$m{0co#Q1UA`1c3?QSz*Gb5}f$~ z=BBX!dk2M!^v;p&e_WrL!qahu zy4S@M)tAJ7ob*C1>X1eT^%!u^pi=n?vHdfY9F7qfgSI`||MFl5Vmqs=+xyopmqww7 zY`D(;zs>%C$o{{}{=Xz|A}4>gX7&C5v3(|l)AyN|!+3H>Rq)^;Or)2Qh4Bw({Y>JX zjH#H8{;w2=nPdrOW9P4aotR6`N8eYC36QZITTJ%rGgwM4$5MH{{5RGs(pMqwe=~|) zgSA+XSchi$Z=`qDs9TP)k?C8}qYlZVyGmW7Zs~3LcqrxH_f7Q(xm~<>ux-mmdUozY z{{OOamcAGJaS;8_7lp%QrSOlE$B{%TO8-~IKSPgu5~e&)6FrCevHX#qp#R}j&&Ds# zJIs?O<2Bnp9!`^IkTu@W!Tyi){?2lrM?5>IM_pT(2dIysdY1n89sWc8SbIR2?ZU>> z`F|JPa~W504L4BO|62QJd%L}7q(OVP=g6vm|GoTwMg1$Snq1c`UM-000&@BkTK!+; zFnhWQ^A7GI?mP8#{$I?;wQoGJ|0_PB9MxYC_auyG7^KzTaWCXY?rHWN4UmH@LyC6g zbL3FOc|IK*e3x64|J);xpQ2qb%XhlScZz>A{;e!X@00&gk5TR!gJO)wL`=eDOvUf+ zf32;(N#4}9jc0`Q=mY$Z_P?GkjF~7wn3)`k#jQ{$}Wl<+loJu^#!KKOZ)dn^9q}zIUEC{&+(E|3Eo$O8Clx zy7$H3a}?$mNvEx@Ek#+p2fMKs`;q^%=R?~m`JavTZ2tFM>59Vq??cLe-$dgA`JZf7 zH?)vh?zl%^>tp%f`u>B$>QmPrCXeDcl1QPv+SohUgdFk>Y+W+84f5nkB$ZL8$usEt z%Mi|zasI=3vR~ivW%8X*#q$AQ2-|1LpW~#@1oId8H-9iJtRXMr8g3x>nEiiZShz*+ zG-lk97#{A>@1ch~&)rRS=?8eguTP)FBeM55_Od4jZC74mD28JMMqvz!(f^g_!+3Hc zDi;h3lgP=Kis?vhGyeR#a~|l0^Y?bJf9bPPzl;6LohIX6eX;&)o@U(i!}>30%)k6+ z`W#)GH3y*MoPGfm%F&<2a#_E?YW)Ok|L!gJ|1+LnbK4ldvHV)pv8B_|BJ&T>gg8&268&p1o`^#fG!i~Bz8Cif!u&>ofKL1a((E^Fj} z<-=j_qbThAa6tY~`%bIm|LgW=(8m+osBxWMp?__b?|+*7wSukfo+MJp;G_M!P26p- z+W+3Q9r9);x-OVsaz!6J*&|;yyrTa1dy_Dm$rjgVN3svb+W(&H5JveATu)Z+&`u_+ zcFF&Z`t0cypDNOy!dC9}e2+cfb@}tQ=Og1f!Z^;CH-5SE-N%VI?svvL&E#3~Jo3t- z!uX@Hh*&Pgwa9Tz@K6p7Y!j`7gzCti&p;!CF+SuWHm+DRp8kd$g`s-E&(Vbe(;AihZx{Yn-Eh zo!#23u5BS>y&LP{*0JXQt9#qkw;g2XEcLFw_wJGE>lf9-sC>~mv;JozHe)N23)KJf z%nP8`9%CCXv(GQR0`>bo9RIIjD;qcX==lF`zwgC<#PcKaI7mN?!uZ3x)>P4tBZ(BM zk9-)vD)AGSI&m$mZ@3_SABd-Gnvg>W@;HgpIDCUy9}ER}ZZu_d{d^Ow zd!pB#leXG4$D==Dn<>McKJlcjvzcEC88o5)y~l|h*?~OzzWF$DlI;D$ZxW};GdPR$ zxQOQK1Bs9Je_ZC5M+ zVGO0$j`M879ZnyC`d2&~zw|WEI%6O)ieK~1fy5ZH7`X!jiSgt_WchbA4kS9;2NILG zC!=sKj545W-$0@p)nmn9TvOs&EADkahKx{Ee~&A?*Cdr^0&Ilsi+j{HH}>Bfa7a>hCX^OGDp^ zQj|r131Vf??Hon-KY#`I;FoaZTzuae;{)7)+*W`hOt|ve)?8NwV*2+7{$? z?dBcEaO0eqv)s)Oop(%D{IzkPzcQ9i&#lnL!DV{qJmm&kmh8$Nh8dZhBl>pJkuUr+2E~JBC=}M_-H{^=cm73H=BBpRF%E zpq^3(*Qqblr_{Tszwg|8_bkUstil?s#d>T++a~t+Hvd6(exQAELA|9u%ac9a4eH)T zVK*INf0Kp%@6?;Ij%`Kd*pC~}`DJ3Wu(qNU$vNi#PqU_hUb{#gH}B)2g1#Gt`G*J8 zZLh22_#d6~F!2vn4--Gw_c(E6!NbJiC65!|U-3Bcy}@@Ahm-{etL`Pf`^MeGch=oc z9GG@LvA_3b;@dkPB>sNMFBAJ#+)Ok!jtvb}V?*<<(V^*eYikmp4lVabhrRx5KMta- zMjkV^73Z2AH||h5cuJLNegOS=cFNQi}MfHi7LT1y8<{|Va{^phb#8F`!M@8@b#5*rONR-dGmDsuG zR$}*-2Z_Jyy_<;p&!zmn_u=Ejx0EUG&bpV_^Txfz{&@q5Zyy;@r#~MmzGmO4#z%?o zJ{%mrv+Pmg01m(LT=@P8@6)|a$e{y2h;1aYIvw`_G{*3O@uw@s6v*^rV+{9=tBkd8 zpleR!49?;_F5)t-qHU9Lnr+6Ph8VX&mwUPs#-GqL%Q(#j<1}bOGg=lI$3YIQ3;4|` z3fF{l1GjJoxot%u%l{s|Gh3u@rzkw2KSIxeBK?m=Y$t6b4DgF**A3b!Zz4HEA0as$ zRs2(Ti$X0%aL2O@Gv|szJ?eUkLK?l+{FOg%{Qon)S^XDdT-VQ@Da`->?66SG-KYI9 zp6vaNIlc1g#CXj=85<%eV=Cg=gVV_@Ig>0wE^D48ITxMAKRf0r8_BL^%0@EJT{*{I zo#HUcHS@6$$!+%VpZ(~;@)_>$-PJ# zd#~cZpI*C1-vA2jkFRMv(=$k;4hLP+?D-FqM-k`$A19N@_UaeNig$M$!&k>3ukMfQ z@}ABAfAPch597pNeCu8n|0UwTLj3g^lvmrg+I=~6#QSj)r*Q^t>iu^1dk)fMxd}R!!s84 zp`75oh=1!5=OmI@?v8HnRXVp-dC&CxtKREK@7X)Yi(P!z@+N2(i9yFlHzOx%#6*UvHjVPE3u!kLK@l(!Xy+2mZz zN84llAFoS)bfOEtJO5h$2b-bk1N|ST^nYB@HXw7{tz_>{+5ew3_un3Vz0cd{Uzm%r z6w9#^tC0V6QCLfA0*C%T$4}%9?y}YTztrc2{;zfH`6K3P@ZXH=0d0?6`ejg2tsVAr z?_&kqMZDtKZE+4|rSkHDceBRzTTzPSGUM;c$1-}YGBSk<`fk+kP&OY@M(hJsl^*`A#LH+-#`k!o`rT!4-&_d6CAe_7MC)yJ7H#*RX-#@?YmN4(&9v+~= zyL?0rV9-12*9Fp7|4B;RYo|$H`9FQ#_!jD8{=YCdJm*H04rV{f52NU{Z^#?I z-!b%J)W0cj@O!%d&v$Xmlz{$n;Y zy}6hEBkdpd{6g-<$f>86lFRXQzD{>mzm;}<4;jxmSn0RkXkU|k&iSkOy76@N=Nj&{ zSdWd^jIAg|87k2KP2+#^K#jbR5~mo~x^v=&4C4If-LBh<{WyrjIEv#)qU}xZ2OUGa zA3R(CM=$LEll^f19~t)wY}w-dB1dn1L;wGe+5Yco`?u)_cZPPJ(Pdhb8>0Wt4F-8k{z zY5aSiabnb_jSFXu3tuq)ea<-XDdW__xQwf~h8wtrJGh6oH;lid1D)tXH}dE~L$~p7 z|IxI^SU1^n#Mm~O<8CFJM;C_&!g+)N4EkH|eTi#k6^G~Oov$1J-clTf(ubpmJHO2M z_afupLZ6KPaQ~k<|3BkD59N8!H&r;(F%u=oiC0$q zX45-&`L=dA2Y@~wJ=}TjxW=z*pMG6_i&44BISypi=;E-PT#1+#bEWAjdhH2g-0yqu zWISUat}TvhjMw;WE!JZrHlzHH?d?yNq6`(-jlI~9!#Ilk*PqxVh4n96taS-R=HIDj zzG07fzxO}?a!8WBb<&sY`;~Mhn~*~X@;HgpID`H#SuaSQ$3^tL$BriB{y$gAYq)`1 zxPy46&^_`29$^4Ee%U4V1>7b5#yx5G=6|SN&fW8h?|@(TH0|IS#{Xu>|48kT|98s& z>d!QpL9~$z`#;?`2NA>FUtv!7J4J)T2>K|rdB5$-_ztpjg?BRV!}?zy??GYxf0g<_ zE&m@=|I-WWbNx^2HuFM-QH=4Jh)I}?sp$V(>%X<1dcRa0rgQgw$ypL)31(w1D(5Ia z$%R;qrAYFtQm!qh*UICmdCpa%uR{GR;>IsMOR;D1-B$mSEsxc|#t?F7 zMe`-s{&)RD!rF>b9TYsRPk`i7m>h|$R2K6}Zo+MK7UNj+x4&>3c zQ~dXMMlzmN*hPxEba2B~2T{HT*aGu_& z&hEfP`enpD{qyK{P1gkDB(EDM;C~ItdCwKv;@joL%3kR=(s?@kVqD{yhZ&TA&RR5i zB);nu@;|Zvy?f(65_ib#CF@_w2gs@aDz=+X|6ld3ozj-X(|xsiM#@KcV+2NF42lu^yT_A#pZ4EhXG8in)5xfU>wUA;W97%ezFE}rE3bY&OmzJu zOh&$ISeQyq$4qoBkeB84JmPwzwww9`(Xqla^J{ruS%BOn{e5T{tv*H*y%{CK==&FY ze3Nniz`11X``UpxRz9D5Ar@mfR-#{;ts>W;^2*1;TC(C7aovIQ$LYyK=Kt+8Z;@Wh z9#3I2eJkRaYK&`~^H_&cer2e@ZtTT=97N&#FMA;LzU%wtj_3azC66PC6q>c+GGr5S z=!jCikUgZHpg&#zcanYv&-M>JqO3yoV0IC^Fg2E)G?HD!=1e2QMy|(F_mq9W#%C*U zXEUGXK8p%#8{cV&>23TQ9go?$_u2Ps;qE2uv_Z zJ6D^_|98GiJRASt#(vyn{GXj!GtGZY5r5Pz5%wJafwrJxOoKv$I_{7xPhGq%UZf zJwNDA_rF_A@A=G(u#`;glpk;VW*_=?@pS*YzAsyA@*}pZx@x)WR$>*_U@g{TBQ|3z z`acubyqnKJ@BMRWpOxp|v_>Q$&kqqc8Rz)aPw^l0xGzwdYb&rDd$At}aTx8_eS_#k z2fERP9^}!eu4_=fG*6J{k$puS%rDM8$e}R*M>&_eFMh+~xTrSLV0j90{ohglaU3aR z(1aX1kVj$vpIOrMwzR!2jUP*EGSA&ZHoU6+H;sMgnr5ZSJ<5N&PLBx z<9*X_p|JlCclR;RN+$W;L28crPyJ9wrWdFy$a?k0J-6)v24wttg!T zcTT!qF_(e644H1}S|y#ybT;;X*#G~&wz==7maOyLq%nfg3E(*jNh+RqVF+v> zfn-P^h7C++c7Aow&+hrzJplz2tbhVq(1I3}VHOlnU>0uR25w;Qry7IoWcTiS|M=>8 zPW3s}r_MR`S7 zsu9;BtsBp|e=p@pvD=?u(ug;vBp#G`H(NZ49CD zkY|yQ=Ec&El;_ie3*xwh%eaco)71O<`ZDQT=-Il$aFc!;Ti-4W_sNGS{nE=}+b@(W z+WY->hE2b5j|0iUNDE702)%QH{5Q@x9eo743)~+vW89zLdcQbbzaQJbLAm&{eV&~& z8e=gYC0{U?{%7W?()Vm=vt z1K%XKTzkcuORt1Yw_XY5=8Atm?{ndkdKx#(VUokEGbe*|h8(s|i)=UX|$4m-8-Z~}xXw?@& z*|IN~FZ=oM!-`4a2lGA`cGDa4pA8KIo(;|BB{eO0R^P~Tp(XWPSna>oVm;o$Mr^_1 z(=YSW$_qbRH^cbX%i+)o`9OFagE=hER}}QOK4ky!Pxn31nUC~w?4p7>jUH$g-zP3o3Ukpbq4)k)>wT{ zLt(f;?|s#}1z$3DO}~u3KfDsIlGjoC6Lzrq+|ANf^2z?;rm)+%i}avE{%7(Ez4MN} z?eUP_ZEe?pZz<4t$Tz0dq>^c^P&BJQCKxQ zb<(3}ys1EWFV3{^=p&z`cb*WpdbWvP!oMJA|EsezGt|H8*0^_gxp>dT@6I`u`o_mo!=r2ph_`Pdry~9XFAl<{Z!K zHodbzdT^J1AKlBOd4u#9OFte8D_@{3_I+(J#I=(LlS2^u2BPm|ErvNBfn=fdkMW*o zNGGb)&DFk#nhDn5Lfs7CKa%u%jB?FrjKxGu##Ch6j5m{QH`v=-<*Uu|72=+F8TM|r z(Yr#!LTzv~Ij>oJyyXFVoJ^zjto)^3-i$c^H>LZII|Iv|iXd*og8;);l39 zw5coQ=TiC(>_UyaT8lqlAEEb`)+m3*`hC{wiSzU}t&twY@^p{uN`IkW;H&vzKm8#3 z+;d+r7nfd%8YIz#G&+#QQS|*huK$>C?Q`otIPQJMI01^4#QrV((Ei=b{@uy`-OBz=4GDW)a~JpV5FhRT>-Pii z{{Z_x!Tzma|FieI*#4Qt?Eg9J|C#LnlkDHK>|ge5ZvQ)RrZVh*vek9vWX!_@#Wfg1 zP?^vE9W*2iqt^~)|I$n2{1-l0^&8yV8uysT{uMsLIioNd&8bg?v1H0Uq}{`K_dDM4 zM0D`=??jie{U`V59;;TcK~W>lTEsa82_#XE$*!G>>39i6cn!1hI@&y^_MOsCc6t_F z^o;anrR!1upY-37el#Cr{~wWlq|u7o|5aYH^-qMwNw5tlmAhx{;E?R#5yAS zh}J)_SFdySVm}U|J3Jc>la;7JTe19qL>@yYa`o&9c})K6M#EP1-z@omuDnXNES5LP zwBuGXuK$x1PZQGUh;p$y5?OjIQ##a}N9o6r+t+iS{JYzKksk<)>;If0&!AG@!8!5* z;uu)2AMw~z;gaLaNGc2J9oLa@ox$tEdVkHf>TfJw|KFw)?C}SW`unrbm|yPq?C1N3 z+vHu`M_hyRA=z)YXZc(0AbS=Jqz^`~IWR-Wp5J~d3?uuV<)c83!f1@ecud4(OvQA( zgd)6#*?1kX|8HymVSMy_BaY)50lD>K)RVD%nJ=t)ihh2w! zBykOhmGsrft^XyBnIrN)S#^#76>9GBzd{{7fuy>j9#^IFP1hL%3UU9mwe3}*){TlcBX_%w_L(^h)oqDQeuDb1(`VXxrU&GHpJf+xyU8r2a z|6s|0u!kP&?i%c+??-N5BJn0RtHa2H!VaU^n0zIfB5TMb(pNqa*1P{EdS}KQghPA- z`0jP-U+5;YsLE6SkMkd6)RU+~Vv_n_SX}2g?b?s_Z*(}$;%JmFvcEl}cI5U)JR$v_ zMFv@VHyXs%I7RxWN&g_}A1VDv(_0beKbBaBy43v7ZGWSD7jI?a(fr3#^x8Y_?V7Ow z`Z?5JbZ}B^bN(aqPwoSkB8{DCv5s&v~$V97=pGN+Q0I6hjOHIv-U4NL(gXTnJ60? zp!v*m$rlT4#kSrgZu9wwd<7=_WO+~EEDF2>Sp56Ht9PoIb<{ztz1IDWNd zC_h!_H9&#`C z;~;YVJNkHHem?9t`?cpo$=9syukW^xPjHVuo}NGP4|v&nKCZ8EP785a&+DvM)=o9pw2HDnIGRQSV!bdk4ln=HnV@Rl5s9^*(F-A1Vm7s5@QY z+bRgjvjw64L_wGy!7Yy5wKvF*ji|H;-lqeI(C{sU+E4_xFwKz7~YKk$J6z%~8@XsEFMe53TCd8hUJ z%go=uWB$H#TFENo-PNd>Wc=P(dfhnV_w*!LFV4&E@hYz4CT`=nZ}~2HAI(=@2oK4A zKh%E4V5D#O4-BDqPLsAV{0!(L(7i_5jH_q-XP5ujvcg!~p%=oYg}%Re=HFMp5Jov~ zG%Dyd%iNc1a`U);@7YUtT-Ts(iTm^%s;0Pq)Xa1qa`W;%m!xazF;+a|F%gq771Qw& ziqJN}_%k|Y$mi31o0H`0)7sLAbG@Fd|L=V_qeYpYLV6_s6Eu6ruZd?iUdJ0q3rk@> zy))l;k2mRy&`r-GgD$)!tlaxuN-jqweI>aX74$gvT#L1i*CR=<$K!FooP9mi_m6k{ z-uG)`V&oQ-{6L@B7yRGn^-27TeotYg&!}(z*_;>p9yBj8$BW#LgE)+|c70r9u#(=X zy%EO(Yv@UI_joq)TjtiIzT$HEzA0XV4rEc;F8?&jGxW#h&vE((h-(7GwE+`T$Mj8CUTneZ%!BI_~-H^Wi4h``s79Z8Ep#>i6{de_4OR7y5_$!XBdU z=hpcD!|2cx@VE8{_=0@`8b*hHKk}VoFos|lMxdmme~9}Fj-sdZFQ$j*h0*k}7>_6Z zV|S$efoDK=-_Yl(uBpx_S5oS~6Y3OoR{}|T{SiLKuAPjjn2wiFgx4?|Pxjx<=YOZq zz6%*-(T#@f>OVBC($~IB|9i3icQTE*|8A+Ymd`L(KwNj}m5cSq)8`}Z0aP=`ec?^V zixBtzsjo0jKqgY`tyPc8{kR6_TYg`PxCZrdawSS%)o%Gl2&?I9u^#VWBevkl{y**h z2X)obkECZ%k5cFCz%J~;UhKy~97daG-j3Wnd$Q~NqqbF6`>LC5_GEvg?dpHD%#;6+ zMl0g@L#23XkVF$6k3Xd8oyF?#b)aX_U8dea#&hbzQDJfX;W+sL;`qZU@(gn058{vO z3YXNmj{oB0ePioJN++r=YX9F*N2rhgVt#GmQ{jSmF5xn=-+Ve;C9mTqZlg_Gza1Ut zwf|3Rli$-eM|Pj~f0_DU8^2LozbUr$<--;-)dbezo=vD*zfPFV=>A(CDsNQO^(HQOvGeNMc?zrS;?28u6~Zb7LV6+jOz~+3GX#F z^BNh~-*`Qq_icUDztRuz^Zf9J0ioU8hpe_&H@fH;W@{@u;~MKgV`c+Po;`^keyC!B-B^v-2$Yxh-2uR-@JwzaU#I<_`q zzAwMQE{g3xdgV3u?z|>c&};M;#4+@=;|?V0aUX-n^ZcupNWXY$ik(N+i9c~i91omF zmhwB><~?Rzdlbj<0Z!ozKDPd&dNqzYWsW%?x%|IV`pcyMfV4{QU+ll@KhBBg0xsb) z((9!E3ZDmh=RE13GbCK6-$eI5aqspYv!q}6ZDHk&)?py;qjJiS@R01cSNc8cntbUR zZhu6_gONOIT+uz&36Fg!Rq}rIBza_<{y+J>j!ZaClJ)Z05Z4UD2#msLj79c4`q;^~ zo$^0hyaP|x*IuW7DOSIb4cFvepQV zkuoTW#4>3WKHE9H!F>9!eal)lp9-%#egpIICJt3U6@K=>Jdq=O8uPV7^g;Kr?R%9; zi~Rl;mZGG8|B%IUdffACCAk`F(W||(p6vOmHV(NFTTqIRjxX$RybF8q{oS7leeaw9 z@mponKbv17toPrCgoET^ls>0l=Lh}6zAK*&KfzD0S!-~{{|E;jScmb}{|NsmKOU4X zcTadB{NUtg!Vky25cX(mmsuO|M-%=k{CMQ2?Zx(2;d?#Lgq`D_3A-je6TW|sua&s> zV#kbU!#|Wg8%nM{7q$*~E|gw*E^NE?Y^d~G4Yo{qKK%Xg=fnHe{X^1m6Vm8F7W+^4 z4?i`J>POc1D7*5>uopj0eKH*N+i~n!_;mOo4($%%1IOig?}o$sjNi4t8-Bj~-SAKQ z{w`GZ6o=}~8$#8gzYDe7zY}WmzT=zycBotUolw8^+aY=2+o7@NTcIKETgDIH4owB$ zVjH||jN$EYN}OkK4i|6aTB*uRmlH9KR^v?^$XNp)UG(j{{Zy``rV!LQTzYC zCh!HsA_GjjbO)0E?d?0|zbgT<9w z|JPifA@sO+VoKX+7`-zkZj7LhLia6kYe(ePU#b`wMhh!n_=zx<9FIzEu8HJiR1B3a z=htGYw~j-PdZU{CuP&*{u>aYKb>sB^&C@4`dc5SCBD{v#D0$f$&g2`I zkG2Z-e}aubc4pWFWX5rp%-R1_*#Br^k2a%)4gGk3n$`pC3UMsLTWFp!AS@+Q+T>~N zlI8T3SdET*;zt)dJcBrgwtE@7!f%zE)%OqhEYr)EsYAc^ZYceicf+>#-_?gDElZ@y ze?{NkxQ0jE>wBYfx8T29|9A6NeS#>B=igw1kh`!4d-1XIUq9P_yZ)~>-$&bj`^9k( zhf#^17qv&eX->fBwMTxb-S*kyP$Rt0*j3+i>UVk*(&#|*V$b5s?}jY>D2^lgHN~~S zKcII?e_Wg86#We1p5$3%oYRH!1I1yz@N>9;OQ<|s94b=9;WEAUp8vm99In!@qkj0t zkVL{cb=Wjl`Mp5-jpoLU;ihv^%ic5o{9d?CFMXBYPk;5pwf90Ny6(NFefXX+;P*oJ z$@jut=iJ9YJVd{r&@mW8P_=4fs9wA=)GXK-YPY@@>NdX@5=f#R4Kv>hjc7tMTGqX% z4f*K*TgkS??}heB?}d(W{=d-w(=!GBf4Kj@qwVnEy)exGjKC<2M&+1|*6-gK#?osm z{5QtaC!&6{^zWAb)J9`L8^dH_Q!yPcp$M;GHeN?=y(Rrpv7PdUIXiPdOh!?kba1GAI=57~8qeMj;q2;F4E z2L3;2a$fTlJ}+#)6q!crSz*4Pwc=Ticd!xZeZC)gWeYv#=?;|Acc5GSo1LK!o}w-$ zqitS3k`IyJ_Mq}&zp$6wkBam1+C6y=2OS?qvRJz!*1cq6o$G(qKU4~@LH3vW9myu7 z(SeUXzpUe4$m2~^vn7MzG__``UPCVWn9H|+{A74nqP32?0MC?ndHM5 z_PO^$zkR}#7X!(`7=mFa`PP6if*gg>7>mB=-V5W&iDYg*K#p1WSM2|B*lDLitZQ5XXvpUVSgjc3$t7HtK)uAGVyA&+Q4YsY1S@zaD@4 zg>Q%K`DcDS(m$R44mM&7O0fgGP_>U;ftnlYkq7FHE9?q-ZvCH&>Nj;zV?zB#Mql<8daS#0 z`}@qbuFN|AdBytP$<8_Y2h>#=Wa)8!Uh^z{qW)(u_TwPZbH#zf^xXP4Guh#B{7>g| zcVoO({pa_%{>_%f@^BtIY?kLmuN>rFU6({f)CaileD@dQjp6kAJL>u2(&6{q{I~_q zL+y6yYZRW5zG~;8(ls5(;wX;e1DwJcw9R4vBl^^K;>r9EZR}XqHh4x)p3hFt$Foc! zO>f=mnfd*kcrM@)E+e)LQZw}9(>qtF|3_Nqhkg^?i`DDl0_i{d;rPGrwH9^LBWU_ve_W4OR`iJG$ z5Z4bwoFA0^4&MO!6My^d+UHx92?rk8=-JKM=i2uT3$*_iYyXqY?0}Yg+W$z?TYI$2 zKg%wDhF_?-#$r4sVlw*7=bK9Q$Pd%WUVHgv|HwyxUWC^WeR(?A6ise{|)mV%5cn57G z+3l08KQQi*9p82Sk=>p>$&Nq9ZeKMZG_GUEqj?s)eKEVe(E0<@*zpqvgdTGud%mi^ ze^!0pW&Va|y{S5cjpB}L7{)aNw$QhI*ZK<93D`m3g}z^ZBJ};z`dJg63VR&y#eN(_ z<%37pR%~(a*k%?VlmDj=RZ*v+UoC>|Zix|Ch0C{ZE=~^}o&R&=16O z3TJQ*ac)2g7wDbF!8?Wy376<2(7llT&nC{<|LoVx!phmfSIO(Bq~9cOqk>+uK)M#Q zqkjHWxGO9gm`KxM_e$volrJFp9T5a$iYdBl6^opY5B*iS!*?s3Y886UP^r2DY2O4J~UzOQ{MG?8g^ zp!Wy9H?qgNn@7px_yDJH25~Qr&C(qA;wX7;fIT+!nF`N7Yh8P?_cu?4%Vdv!=&NMu zZ?sdsW^H@=P29#^+{Z)odtdt(gV9&V|Nr;>?dLHh3~?Oi{|zHYU=&6pu9-TP9FK{Z zjLL=b|2$v|9v0U?+NupM){{6PtcQO?EgOJp5oX{ z>|5J(|2F9V zM;xPmT|93f`n{FDsGk42Z>4Zxm@h2n=eCY5JxjfZo5?J?Q`$$@q#sSrX+{e@by59)Lj8|8CvvZN_TwN9BOT9Sn^)31 z-FpXW=t*>s6u09Hy0B$<9DmgQS>hcb`VKTXFYZ5FQQ^83(uERzm>n?=dYlXRIGs8B zcc*WpO#R>Po6sJ}`TvmhC$;-G8E5{cJQL&m##7-ac^n_$6x!Cw|Hbkc*(slQotL+e zrFS1u|7Y0$;&0l@{+AD17RZ~6<$to(arFN=Bc75^YP*vc@Yw(768-V|S(oWo(Y;UH zyZr|l*UxGQ((ik}?zfwW{y(?LyU6+f_{MAV`-S_CA0l~Hxpq-GH$0}(e8mIu8wR7V zZeSQf4#NnHLiTshhtcF%OvGgLZ2Cl)O7?!kbJymK&wUG;i&Ek%lPKgOFvs6 zzni;%PyL_{$&C-4SB5vLAJF%+5Q?1l8fN2lyn*?M`<;}0>-o_0YvW&zgb$MYeeLd<6*_KEwPzRvuhVg&nioS7}?d@qcv8F@BG(g~sn^8NZ)v{GM#M zW&Zy?^Z$>S>yMUp~g4;X5EA5Cc1N7;guKFjoCT*@$~+)M&%k-q7dh_zfhleVBjmXs6J#B~OLdc<*ms2>Z!{=;3#HIEKB=2Z5|X5`Ev*4kFW0uk%flSscZ2e1KCp zgLCNno6+F{+4HZX!zHr!`O)Drc^x-#8=F^nZ{BtJ73nC}e@@;NUh-`JP)U!zi}xMZ zZdMK`ZxUy!hi8l(PGkJOA)PZX*b$ejP1IwJc2uXpWLlq#RMeb4@!^nU-S z%*0@{sjG7H|9Adb{pYxQtNKrzAM^iL2d0p|q5iw#_mk%5oi#u2w7!5F`~%qLsR#NG zu4x~jO@Dm*IPHr``VYw5y8UF>*GbEp^zu=tcr(d_u^RMu365bKH>6*NWEf3`9 z&*=|*@l*D<;_D!;rD!`Q|DTlq$06N8tlfaz zIvTs>DPiSo;vM8JRL@`o6;N1Ck`Mj=8r1v0<^T6h zeL{Y_j+?lRySR^RU@N?8?3do}XYvCEV+e*}1V*7;`{ytYDSLk=554L9;~4F?u^111 zfMN6QU~ZWD)A(KW0c*gOnWH_9eO$;sW>+RrkA`CQEnB^509$^N@$DIGSLD`lbN-X{ z0gmvGM^`)B{Q#S)jDI}axVg~!|Ne6_reZqMjpFe8OZ3ib?EiiGA->Bl@ZEQ_&$Gy! z7uPNO0yw99gMA1~w3W=0sARLhCTun;PP2cVTbrMJvpxl=-Y@sv=v*j%=}7q!3iKe*>O$DtN=Bi$F0`R;R) zdGg}jf>P|jF6_Zx>_=OH{uj@=!?~SgSD|Mk&&M&I?vwf-7I;64rIXCPpJ|@0^B%vS z=FR!xpm+|W21%s%+5f@bjM=aD51sT5dg*uBtzFvmJ;JRAn7L>FN7rtdqwYDDZ{1k^ zg!D>lk2b|PP;u6GgIZx7j*|+|8MqxCfZ$qk4yNKQ?Z5_z1zxQ)}g7hpN z?_V7EgY5Z&^*=vp4M5}kIe&=L$`6cp{X|U0R7}T9C_>+N`7C_F{E^|>OOChx@_&Ze z9!O`ln&eqN=1CL2zu|Iu`jt$UXJds6+sPW@i2ekWtt zdhxu2jo5_lML8?GyhW=O?rc(7%_j4M=ttX!DU7 z$62yl8*slk58^N?QG;aEeJ4$1)g;e%n&*o+4qtcL|00Qc#Qy)Z^E!~lQKYB1hZ%hO z=$*r*A0N<9p?j6Kgmy<}o%C-oFF;uA|361wK&9|Yt944z#-yHos~b?F2m^2|7QgGc;0RoTOf`-X3>pp?yK~hFNY@Wl2OhZjowf84`a!m z-wzDq$-bbCM^466Ovg(o!fTj~*YO5=f5x65d#a4jl70VfZghcf3(-DWBk<1RO1{M}fgXJ;q91xyXH7Pz$tum{@)qLZO&;&$DCK#|F5wB zUkMpx=e`oUXTA~|66U9;%ujDLKYge9>HEx2M|!*Y=_vPGod0r8To-T&l>>_OT@{7P z^x8GkUp~Wf%)_F%*7)0tNHVuVZU;D0fR9F z!*Ev~89|Oh)ndYU|RiPebh|Fz_Lyo0s^^{;yLznuTKUH>1NCdmJ2Q8%TKrnl-Bi2Z*X#j^#a z*n#vy_JzK^UG&bm>QC&U??v|m`3xC!VZX40IE+g4Jneary;ZM-B)M5TeA5(d>A!s? zmyer-W&hYelS zRq{G+;x-S;Y^oOWd1|(xSfY|@u@8`ZB48{-)!w8H*-{*ZBWY5b*VJzAE zrM$3dg0g71^3r>sC@jwZi#`<5cVM#Psdzm9Z#w<){J)pzMfm?a|1a&H$Ga}q{^rZ` z8htii#~b)h=l|_iN1+@4JM;hMi|0)&!ds|Z@o4_vQu^chf6M7B@#pjZRtsy^cd(XR zkMyligm=h|NU`fW{1*4O+2XhqSvG$+x{hh5kZmjUU!Y^V{tI-eOEZW*cDebBL$wbI zv=7w5&CY94=cbURw=PkC&QZTF_(=OHwky=Z$u;U(%r5tBf297{;U0EjPrR3JvmMIV zsPvrw-H!6wcrrV*bBCUczNOng7S;|JeM$M*eyD-m z6Sq;h;nDoRyY$ENZ|>6{;?L**ZC)jRqhE#RJV3w4*Pjl9=|j*L*VflQ=y~28L&v>; z`a~E(jza0bu)nRX_uu*djdt!>jK@Sw##BtlOZb@o-wDt4yyuE6z559JE2aF)DE~a4 zX8ig5k0NozzJu4u*~mWsbaafWg z`0dOa zd(r)oEsj&pORus1KgzGURyxOVHC_01%FJIQGde78xZz8?;sAN5;aJo32m!BVjqr_{|Cwch4TM2`TxB9 zuPxPxCg(LPcc1M4MsHnU{NNkr5?G%h&h3xwkF+?W-_K@kmN<6N;W+l;XUU^cw^vwf zfAsx^-;Cqj`UC3!Q}kwK%^5OP%zt{FHZ}bME}>(d@)=$1g`9s1*?ohJi6~@MQiyJ!hwq!(1~0qc9p{F&+~!8EseeKj_o% zAUpN%caa&#S+ZN-L~i~6Y3fJyVw`K&B3-%p_tMsy7w6w=8)#2$PH9_VsyHhTS^r=7 zbb77!L|p&B09oNUP18s_g`0I>M6bXy_3gy=Sf6n_$*$+wBY@{;MK1aX70_|_d zZy-Cvb4S-K?}4mxU)98%3hqOOV>)#j8TCB%AsJx~Ay*D6iq{lUHYUJZB z^iq7xe=N`X_`-H!5B4JW-Lgma)8lu1kUWg+=Zxi%Ni?DKU-%Vhf2Zjk$Rf_a@BOU( zw&*>-$_vNIz8Cqilc#V7=Wqd+a2c`x?J9X4H*p(}``_-;JLj=Y=IFbiKScETj{DwZ zX0jj1s!9Jv|D!s)UVh5HT@dRJN}1{LzY@uYKtM*PsmG|2IsWqfq*s=kwKqFq%FV<1rE0KRzEO zlYRd_F!cS__glu#$MJN$gd#kizx*1#R-TV@uxHa>NBtdji{rTWd7Oh?waWbSb)!R# zG3;7n+jV8erH%b3>Gj6BN4Vw<%*UHpgtxF1%h9&o_&+-Kng4HWrpwrV2H9fc|LctZ z8}DvBY5bgQzG(cPOgT=It7 zy(V%m_M_6bQc6~(JfE%N*(n~f&a+8)F3AVtaqkVn8_T2*%@xv@aqk~_zq#*Yp6>&3 zEv7#&f1Afi&nJyR`LDzAe~}&|j%ogr-nmBlSNK1A6QWPyll>2tc#gu#$qq7$%4MU& zQSvw{T$@`LL0lg=K84)=2dA|&{BPXk+lcTP4YIr zKV@RLPxk%t{|OJtA1;|3_7wbe*nNIt_`#fs;m7BVm#rHX%JPku9hn?{T3|eF=7_Lw z&WP}n{7;8}bWaC|ekL3+ZukBMZHGhlraWLgs_cdEv+d@%f7hCrpLAbu+Z*#w=D`0e zADMUgglvBpTJruBn)B{^*7rkW-XHmn+zs_TcSADo4H142poMA$pem?3^N z?=zun#;CBrc(i$N`ZdKp44ah|n-=7S_ZR9{^gU#M#U>Fx3ZpR=<1rC^|2#TOCVM_- zj|j5&ZT>~%ODMu?=v<+Fwov}2zmDz#Wfn3c)%9zXIXA57EPOuR#3H2KPfGY(^zwZB z|9Fo}=_|1umFo8j;j8Jj+l^th^NWb%|Li@0k)C1~P^>|Vref>!By0#QM zunT*z7yEG#ADdq=K>P*b_upA`qrsR&BbqiCYd{PAqw^EW{nugfRH6pSs28)phify? zYZLAbX?h3Zo&ZV5(MPflSz$+U93S9G{h;nU<@gLPAh-XpaivT2%eac`sM^W@XP@@} zdF}t0FK=o8Bd#$K*Po8<|C`Rcjk~yyhv-+MPD0yBmAn(=pQ-<^$%^{M|*rP^lo$^gQlIHo3>6v%5(I7;+jgW z+E*>NJV(@MW7RJ39_N@xiR4A^?N}UN@gC2b+vpyLU>HVV6h>n##-sVPdBo&o#QpE4 zlIdH%&wK0$`s4LSUZNM_&(|Nx`9G=yH!V^AqcUaQjq_edMdnf67uO%j&A+d+cOCts z>yP}W`wus2|Dzsp{k=EDH6L$c5#GX5EJxcE^*=fg``Z6v{@-cse>5ScwPmR1k2G4* zV{Ct=czR=B#jBqRtLbt4acvCyg|RX69c)COae&|Jf7ufCZ}K4^cVHLxpt;@LD>7BB z{V$L2rys;@52UDBUH&Od#h_TM`5Cj4HB8YH9Mm>(*Z8ONp9mOUE#&;DFn&2ij2 zEs4Zd_CG#0|8E&v+BqpQ%?{7fbMyb6)dpivce3}pl+77-c$PihO;#;r|LQxaK`rW# zKoa#hDvsm$0H<&U=Wqd+(3Z#kM+Z6!#GfzzY2q)8@%#SWOCx)~iLKu}m;Jwh{lD1# z(OYM-|0kG3AfBtZj+>|)!~Pe3n_kP_uhE}zmwq47cOb3}m>}aGau0>|tCbfq7=541 z4@1ad7=d1P+9v_%9+YJ+0DHWz4^N9owRqpCB3M^e0a9pF};@`p@-v z2j{BgBXSE$u>+ePSnqF;_c1brU5@u4`#16qc@S~_!C`XSAC)2c0xIb>NTLbN(wHVY zki}8NwO!&GXUFNC`dK^HS=Wtz3f-}902yt7F7$l+Q{jy8-ZpboeqsK=ulRgAzJNUj*N4?)!ox&g{{dXlVHpEe9qFQ93p zdSa-0!MQ1<{eSBT{p{y*|zzq{~UKYXKWD$H~DKO-;-qmfRC17qo(H(ZPH z^ofXVhAc8y#3g*PuyXa*RB}2huYDrCL>8ffUQ_*OUjJ*3XCt}KeIvKez@bls*M+@- z`FInH@D`S0IUbL%t)$0qWHq@K*?-Fm>&epJ@=Id(zeC@MEinu`(AUsE>>}g%?H;oC z>;1!Caz75@Fsk-_SpVmH9|Pq70`H5=?LRS1{r_D)Mb4{15=}^>16i~kk$=#EPIMuI zEV_|f|9_nPKjuUKKYH|SeO&(>6~}Qj?__6_ao+DK@(j|#&XE_;c}+R6S=}LjccHsS z9U|p6Q?ac!J>Wl5|{6BWKi)Y4;_aM^X&(f39B4w2iQGmig- zKZV=$ySR^s=$BBoVlalFH<$xM_WU+4j37Hw{C+a#BGN}AuI<~6t_n6jIo|i(?3}TF z8;|sEe#zuyq?}WwF04k)LidK;x@7Cz^BVR4DrtKALCF66Kf+Y!Psd9r!fTj~wtLn; zK(3!>jy%0s-kvFsqq~^>KV1D^p#CSDPpJRTD_fDKx1#s6?0;>5xW8HNGXp>7Klr-% z-@tq06dY=Oh+L zqjTb3OG#7}KMd8IANuWKs6`z+D}iMDL+|ckcvC#@U?aAm6g#jBd(f787}_%r^?&?_ z{*V6%UC1DdZZw>D7#h)Z@uBt)8%~@lq<23It!N(lFzgl2ejLPMqz5@C@1eOYzPo&J zqlTVDxBJZ^GvJ{%s(6K!FAyKufy%jlCy%0Hmh0w77fL?=R5&gyIYYWo=e)$shvv5` z|JA`;(CofGa8Am7rq4bMr|6~MQ17Bc{_K=5yX4OdnROiZcRb^qbGU#@xQw1(JP23G z-tYY>TqkehHu_rr6z-Du@euuz^8Wke1KUVKN>Hiz){)YShj;a5pv59QHsQpjo_7^2vH{^x!;+TlZC~todrjiw>AB4)Y z4?@Xro(eM^XZhg7KET=mz8&9kf=oKDCpXWL&u8j`8N{Al%>R7kQ(=~C=Ad$@JpBuP zga4*(&R0hxk*AI>U`LWk*EIC-4?+{-T)q}|d5VuuZhi1Vd;1LFU*Nh9veR{4WX5rp z?9PNR*Z<7J0xZN;WyWH136`PimiECt|6T1KP}lArkfhgF_|KXB!V2fD!Wyi@WB-E< z^vC{t#q`bSo})}~O>9GV$v<0#ZO2aRM&I|I3T5Oz96;~4^lgwmf9u(h)yV$cfY3;m ze4E{(4U_ZlzNcLytW=$`?W+SqJH78!-#OXy^@rgI8R!2UBTwKYPNP}F27Q0%AJ&mo_w;`Z@*hJz|4HJUA?|VhgI-T=a85BcV=Eqy zlSTVsyW_Si?9UtCG1>V*{wFhzvt;)|`QLMFG>+1QX0#yg%N+L@Y(>>+^`H7E=FM7p zD(1}unMD0G`Rtqh!fyXlhJ84IL#RMC+Q!MBlRRItQ-19tbL(@-bKPi|D}RrXzlAsZ zy@gCUKTWp!ujZ|MaDJss5m%f8*hr>@rOH6h?d!E&Ir2&4_3w-GntM`KWXLXgd+YB9 zhv~k_@{{hv`4t!aMjk<>uw)FzYxN2Kl0qcWhLjAg({zn#WeoJZfUo(dPq9$Z0h z%TwW6Jl6icK}KJz&Fg$`#Ub2sdf}MA~Bkw0u z;T^SmH}Y+kKFE3b7>W}8AH&I!C_vw@^THUi=L-YFI5LinMEidNeG;Z%8p^fhXOM-6 zbN^=>D46{LO6t7J6zL`+o}iAM3=`XD!4HWHB~lE4E`NcB2gYZ~%wUTcv-5 z?D<}PXg=ck(GzGy3h86gc$!~6z4HX$e8(C35p?TM$O_B2u8WLw_m2rbfs;6m$^rTx zh7Jj5>2Xd#jpOt5Xp7db`MCeVrg`3lvTw^0<=-)7yKg2%rcdfWcTLX!?^)m83HAZH z&U-${(z~7axL&Rvq%GVo{(SiY3DhCZ`Mn~ZYq)`1xPyDhzG%&h3G)6(c@J%Q@~8Zl zjd>7peqWsP8`u7iYkX7Acq(`ESKs$k$ZPa1lLN>>$VXg*FsUp^ zC==>XOOJc??DSm@_4{y)L;=QN9425AreGRopb#_hq<>akoaMON*vlMp9{P*{Eg(yN z)GsU~7h{{YPicQ^iiPLG635HX^Mkywg6!QC!YXnN)*+Sw8^~A&q*vvIV)|xmMXr3l zC-2LX8RXW7%;bgbe%p!N$o_IQSJIJDE6E(^6^{5}-JF%s!z>ezJyLjk>0UEAULj-ii3_i)b!8FhP{zaQHl-{1GA zFu`wAFb%o(hi5Z`UVF{E>ET}x_Xl{NJ$?33+bnTKTfwtExN1=NN9lb(-`bBi28I3R z=KOReA=Y`yh|u;a+Thkr1(Hp_2wu&d|k@crRK!uO`#4mx!GC33r+l*A+=k&!-3{}dn>S@Z9Lia+}O;XmRlsOsg zA9Aa6w__)E<4O6^9zvPpwz=&8#q9r8`qu}s2}V8|d&|!?_t(phsWYJWqK{b}Dx}$YXvxfk`-klQ@mDIFF0yK~=)~04eLZ zqjsF}??V4uU_9E`as4{u-}%PBCmH|tTeGq7mNH}42aHRTt&ZDL`~xz^r^!y?U0aRa zFR(s<`TgBwb3sA4;(xB;25uqk{FLkO&^u4K_S)!hkNyDN8;qA5L(Y(0r;YzlQ0^@( z@J*=)m4TJ_Mu)s+?JrdLy(VGY-hB*m9QO}PPIKRd(joo?S!K+*nqK4HY8!>SCUIW+ zPCD<{=#cNap~(3^e=9Exr;kJd#-MGna`uS&6`klp=7jS7mNp(5q@_{ZIsX7<@Du+p z^+T)Unn|APw1O~BToaIVTwnIQ@6bLB;|lce7^AUA!z8~=LFLebFpZpn3X@Wr7mN;t z^qH82IY^K6{4kf^Np{Q_9p=#&pu5MtsfRM|tt)SISSV~UmZ01|5X;Dlvjyz`g0Ol3L6>|H z^W0hC=W!7|NEgd1$Jia;9-#fp{*jhAH>Z}Ylcu;#;FsFp z*Bsx%9kf-;|9H~>x6r++TeHaZ|DF8E_@DDyw6Ri1(_0VObLjJY9lpr#={xfGbG~zN zK0s;Pqke#rFMT59wfLSf2w8KD^U1zn^Nsiw|A{;O!cfP z|DSR63HaFjgIW6jmasqZcpdRNdIFEv5x?PHCb@13reOvOF%z@UrcQ512Re~k|E5tL zhi){eryA8$O=R-}^*@<%oIbDqSC>Wq!8ziYi+Nap$Nqy0>5uEr#q=fUR-eYTbu(nw zln>YcOXxS9mlARHij^y4(2=1W4aKs6FbOJ@p=^v-I}=8$80JG#~PSz#G; zc|KL`ANoIxbf3tr%U9_6PjjErm~qWV{fmz{K86!GjW%t`cI4KVk*}Y$FO^H(3zR?O z+{Yx(pNwlNwa`-qp8s&q-#1;l$#eO)e&MXR%K3eqCoA}YR^lSP2Uk#YgMa81J|EHd zgG{3Staa?P;jj6<90&Hz53}eV~n1D%`g6Kn=8{bntO>>-W zcq+^ww|zD$r$K-q9d88N`S!cJkkQHGLNpAP%T12}{}dA)+{`J8Wr z>@}CG^atM8pPmklj#Jpg_KSXRTiAc)7kvxDD#Tyue=_tVIEJ|PM=k2MYyTtZ{_5T5 zN$;T0?BG^z!M(4M1~z#Ya{ecChbWx92gLOUM>;S24F5ynHS;_-lzdKKj_~9( z&#lmNoas5DY6hDQH47h|S10ZSlDoYhl&WvGm4qKkvKlS*L!HHnM%zhy6C5O&2}0&U4H2jLWXyGlQX?H)XnoAQRp6LVix9LF6LnY7NU7xeppN{!7{8sdX4o5Hsps@ z^iFkUhcUx7^mXXgzRnJP)W+^oe{K*~K0H4ZlbcaFEkA4}x1*wv?~3}d_5nYh-#ry} z3yWi3_3Fnu^<#ppTBZI^s3R+!mk~}T94E>8V!nFp=*_ntt^b&{hHsfT_Fp41 zo!X~c-QNNCM`j(zwIs_E1Hw+fpTudLMWwI`oTtaR^EEREhKuwb)H_Z(PLOr;28Jub zuHgo5;fZ}TBZNDS@8JP*{%@bv$I~jGVG#0Br4Fu6>7U4a=wC2M-~JT#Kk6s(yRUjG z40Yacj6?y(U>qjkzx999H_$ka{V%TOLiRtI8qWSNVE^kYoFtAOe%DjT-aiZo)5sYp z#7xY>9L&W$EWko6M&Iu|bGGi5N%FpSVoduI;c@MN)Di6vdZ)U-LmOcQJ67?&%crv*3m1c7=Osq_fM}~;`t997>eneQ9sY~U*Y*9uGO$rSaU`{ z61fv;<@#>246zRCkk308=Y@Ta4&P;6Jm*_mABC7xK^dw=Mbo%i)mg zDo~9C8j(Uf+7`1nwX^@r^*7FH|6VlyO*XG$|8CGuaDJNJN*4GwGU7Rc*p@v;R!rf~ zf)n(UIE|VD`E$7Y%a=b>+60a4bM1hf{V_=WH!_5?&O47DWS@CCTp>$;@p9PaySqle zfm^tPdw78VZvJ0ho9`8akdL7lj*%$9$ID;&>%8ZBO#VWw>;Gc@-=CGgJwMksB%a>C z^<8XwIg~vAGWkjvCu{;HVG5>U1`06~(f5BAxw+VPwkh^EP>lo{kwV+7SMXFdQRG_3>`&&j#m`k6B1z3p1Sb}9(fmK+8 zIIn0OS*1*=mWCSl|78D@N&d@m^pV-%oMLRoR&2*klr;1YyUDiG(tlq1-CrlVc1!;O z=`WN1?b0u!2U4-He@J@<~3{PERVd!OHL z{~qVJA8YNGvMjFueL~uC4Y5ab?y*9T6id-ke>*tfAS zy!{0G@(lZu?DSig`Yg_Si}T=mh5N+!pHc@d(f^L$SpQ!*ssACxw#?g;quHFUIfQ1m zWy|&A@QB`EonEq6XP>@Wat6A+m;AcGTMNQW;n~P;QZERncglZcf?eJ;)BAOwE$%b6 zsZw9PYh$(T{5Z^Y&U{qjD=fkiEW--)sZaW)Ie<9UG)=~Ivt_O`Uj0)V{V&>|kF-D4 zM_pvMa1Yt*xO#T}D(S32cCq|zUAo4L+JAS8!#Z&-@@Vtb;;@0f32nlyQ`Es^a<;KE z;CID@ljKoe5vz`Om&e?9)uc;B@4O$OOh+Bs-H!vDVLT_5B>yZ`uC@}qS6kt<^J zkUcSOmG(0|u4yo^M7xB3H6A0JJtglPlZVLb;%?$L?qcMe@dM-#9$|Qlwnq%F<{zD0 z>cjAU5Pdq&W4wBQsLuTUIzIm8pM-tKOTtU>Ba8nuypF$5{V2R8hqwJnC>Zp8V>Ct* z>ZHf~T6NZgEGhLJe-ui^H^|>($?<3uH<2tuoCDG9-cxJ*Z!45-!f}tM#3pvzPWC5z z{=@!v-RwAadR+hiscTPh{xrIk~#hP?fd{8 zxA_O|^AC_+!rj~1k?3_^=gUvSLg_5VQY=T$T*t5{SJDT^vcK8ltLbZzW1D7$Gy2Wb zWZwSn_<{Y+c3$tey#0OFbFsl&R(%>ai>oz$qE)!R+3$Haq-$+U)Q%oY-3#j+a)3pC0;A_{sQr;aKJTa1=k@G%buI zt(6dp!hyPiP~|$?umdBH3qmzHWQ^->GM4i-Q5wrq8{#)hOmv$le9{=c!I-zxoozOP>#^1nN`4{2P$>AfLbigD_u{){|E{S6OFpi#aKdO_{sWy45;^V@tzE*Z5X>r>i7ypIs5YhCROPEdPJ! z8UMxM;&4OU9o$1tLy7ipNq9gXv?t!cb9--0H~(+4_4_7z&arWPLrF;Ep=0XrmW0RT z6Ex}{eMY`OgY%lD(~>C(uY}*Ab(M0B&6GS<5>g21P*a3aD8U$v!vu_^3&JFFX!oaK zGC91wAWS8vqa3r)={?LL=b-`%(Bpl^K8D5g{Q4gov=8*N9N){`3%?dr{B z6{4LzFj~EjZNfW{RiEaNMh3UMlZE1|u^Tn0#X%fKLf?JfKhVuT@Kl}aoD^E=ZIATt zH?i+=ehK>?U2E9)=%M$bZUo?qi+n(lg7943WQb?j^tMXi%E!v++zqh0Ir23cL)kWVQ^w(apPar*w3%GiN`KEiesQ(ey-+CsU7sy)c{}tJ@%J(z(qwt14xJ%yL{*gp| zr^xxXvUtCKwFN6vmMg(BQmg>BM&RB;mYjEhBi|cP` zc7y|W%EEqY^`G?n`}Ma@(+976f2gCMLyn$B25Fr4|95WEj*jiLRocyaw42Y^Cy0z= zNCu7=W2GIO-ye9NaY&byLE2+!?c?rdq5ZqPEL?HiHQc~0G;S#i4I9eB9eT?VegC*ee}Fc6D^f_}p}5Z5pNGff6ZGge ze@4DQxBl~iv-?HNoJcO3rwn%W}%ocMnuu5VT(E-^*_Jbq_? zf&%^XXh#RmqZ3`l`o2dSf1q!_cdqdVrN$qOcOPZ$gG>u&9vOe|)c6Ck{@52`l4;hEC_lq!w-r|^MjH6FLoAX+S>>=>dUILgTu5;$+VKO-taoodnvK(<7 z#DHfU#4O=C$nMlG-=m#=)_ovx)AQZ+d_%?`Ai2W$1GJ(I^PE$G1z3p1Sc>IXiN5>t zzxpjdzU+u}Pig<-y??iTw)cbc=tS2``Two8!oS8xqCa0??# zzX*}I{?;Agdw77yc)$ME6Z-r0w`!-#yARaAx8;Al-+#03SQ-1+{vc<|E*}?XD71X$Fkqa zS>o!oMdpz65d8!dyQIEOHGOk0Fp>6p$YcGzQeK)HOSE?zBeExOOTclkZq zrn3fr&))!2<1^r^%9 zF)-S7iltj7{};&r$jxQ{%jX~Z{~Y%l_5TLxOXDC8qh5RNDB18Ge~Y^LIQ=9}<2S~~ zbg&%}=f~Ii?Hsbo?pk9k67(e6(S`hYd3__%-t7}k;{pz_7h?Z=Tod?;@HO1PE%a2% zqqsvK)ZQK#TNv)qA0XE+Z+FP!WV)a*JQNql2R$Z-`DdPx&+r1T@CM<6?;4{}f-x9} zk)`a1Rs4VQaI-dY3x2!*BWs`LV$Mt_+_|BzYj#-$83M{}vEJjw}|59?KO8aBF`n&0) zuv~cmX5#{^Be;^@xQAaAtLbr0WAloS!dm)z#5LtwpKD{1$*(;BKKB0+b~2gY|Bs$x z`?q$pliBQZoU;*|QH5>TfokkVUrPVJV+WiwNT&4}XZEns$sGH>y@~xV{=DBi$u7so zwf*zs|H-)jUyXEXaS(^mQ|XxEPs36AAbWmbD|?cD61f%n-?y;;H@FrV^KRV#FW(PD zZ`}21IPJJPG<@Z`W_Yf-`roZxe@_9a-EBR^2?wR}x{QoudtbYE3g$-x;#F#!#5%v z=l@+IuizSPp#J$s;TCxZ_wWFX-^u?&ABBhXmYwoH9@C%TL;H>YFR8ps%uxTJx$@Wk zXa4lqx8J&poq$^BL?7FG|G+chTHnJ9@)i27s^1ao=s~3E8D!O&Ikb;v*Wmm_?E`et zyAjv;??q?#$Kj21%$W~yd|vo(*c-R47{3sZ%sqm|x-44S@K``*I`Isf01xp z8*NmSJ3=TShqO2AuNH(c^l{j?ZC98;HeNLLO#CEz3)y_RAWWuDMO*`qYPNy7%neq;7jZ+K?T5Tj!0{>qHm5`?~pi&#iBRJ~nkf z2Jk!k`}tN6O7k#|Vx#}+IC&DMQHO-KMiZLVeJzjpDBl`GMz*5ub_nMj^TYAQ3E?E# zkzL7eT%#^~@=@p$PU8YD;R>?mU=Hv2I$Wa<_3a5a$XmFBdw76{c#J1_hLHk2hbJ zfB6+A3-5ctcTJDuhLRofFNR)etN3kr?H8e(9C=X~W|8}?vwOgN%i-N$hdIJSH3eZF zS%C!@dH9E6A=$aTAS@=AVmVeKuC3C&fgMO6l*i-RU~B2?k((&r`#nRZrxb*Z;x?lS z+pq)G*o~1(-+0-s{pRaXOAbF1E>%wL)ZW+=!a;F|aTN8hr5i?tIOq4I@M$!jQYIye zLmfSi{cY}-?~lp%O>9fD)$bqbfAvcXk~7o|E7TFI)PKv=o6e2>X6MM`s2{So;i;l< zc4ASed(JQEoPNYQdhoJ3>b|;)%p&(({Wn+rU#b2lJ4@C7VL9#w7G9*t3oGR zI`4n8zF^N;YstAz65SpA29Nj+`1;$0yO5>FHGtAD`SIz0|GGN--J@#jN$v`#r|b^D zc)mOQ?S?)2@3x1NcejO~4{ZxS+qEN{*t#Pe$4^sR>>0N?9NV=y94-4V!?#nmhQECD zUxhwtr?G$8=5XM|&QN=5OW23MYS2AhqrK|~>@*MV{PRhn<04-Xe^Hk_ z^;kOHLlgL7CWa^UXLx~E_%J^6ZDDvLTtB`zgv*}$Ap22UbrijEn*N`;#i4}WvV{+2 zLva{GABVOz@{qU`lG}^J1aXs4TT~JzlT$HN#f~M1pZwRMoScO@7)g8_=8+XxfQ4w{ z-%jl1qe;2WDe0b-Za*JQgX=U|Qn#a;~TaK_s!OSi2?15xR+v@ZIJ2KZpp9@$o4Jle|#AKG+)~YJ!7>&u)ofGUs@Wf zrL`L)=RVdC=Y2-~8ovPF#*nz7dBzBtmvB)0VI0MAoWyB#&Xga?I0xe#nLrOcNw#C~ zk$XhuzI#Nxw=9bMwgX-0LmC%w30IJq%|2#7HW#pu@nL*A`|`v1|7q-7c5ElvHHGbt z9`vH`GXDTOI?jO~WK*ZfjBwUk0XcLweHyO0&JEnc9rWCk#@$cDJ$l?TaNzBy;Q>8c z&SrCdHm2d2v}2;(S^xCYkheouXuqBKG(2?dV>Fy`-~H};A3NTa?HXU%tHkh zpidjF|F$}YJwHgMg)=ME1<0XY8XY)4&Hv+n?3&@-x=xSl_3}?Fl*VE#MZNN7IoZ%q z%7(Dd-#ugM`0rPXYyM7IgrqS8DSH66qV2x%EQ{1f%hX4F0QJ7NwekF~?0bv#^!)li zK|MTEJv`0#SSf#zt!Nu>&4D#+fPH@7>v#Ni{@W;x&8WgQ>_9bkqXvDO*!Nqt5y-(k z+5}`qI7{a4^Zz%n0aE+}WT*C07rpz8dgcWGz&dfGJ)?9E;xLY)r&J!rae97zU3T(G z`f23cZ&rTJAg-@lC+-{)NFw$-wv%!1-!3vgE}UHv{Q}XB?h}{B1;p_Oab3uvB zu01`l#<%g34`hjNgN%E`=I!5AY(&JhZj;^W|BU*dY!klZ+&2jSz3(5RP=YZShrTWP z*YUoeKRr>sjBKfTTAyS4ZS}u4!FjUNab3Ig&m*qS)4M_cyWb{AXA&l3DjF-Io_j39yv}O3;kwUWIV*@;We_MTjo3x)-xb7YQ?{?{9j^p<~knj1U^6LZ4*KU~M z{)|h=mbt&Xp8o{@#~Ifk|GDR%Cyfd$z(Op>QY^J&pY(poc61@mMe8He zxPVKzf@`>eTj-SM?vU@t_ur#Gz(Wi~8~u^|`BwgXDt~I5=kVBXPw)&c@Ct7buK3=N zP$o3tL-{~Yp%rat|4RN|CVwxImsiPiyZry?nJ9mwZ-(*#1M^)UX?lKrBEQA4Jo$c~ zvC6I~@767BAV;}w2^wd76vmJf&~lc4g6}WY#P;v!pE$ukfm(eC*~Mkyz()Q5Pt4=x z!9_ouwD|{IH?N4;TT*|(S`qwdlL8OsE7CE%9(|;+Q z^U{lJ-FHpVf9X1Lj(RWZ&wd_`O7A#M;xrl?J~!|2^H4{Red5ihJ`d;U3A8;Rj%ydRBCcH!|EsumR`fk(OA5j(zxUAJkm0K5 zSH47_)8I7o2p9Xmj33L=b2I(-vwZ_7a@;7yu@_mkehGafVeA7r4!P;G+4x_C3G|_P zpNC20WK6|$lw%g^uN#L(HfUSUBP-A-ZUMOvac`Zt7f?#OIEgm-Deke;qz#%d2BD?E z@1y-rCMWt`o4S==TU8tuJAWybV};f(63DCLw=nwGJ`a7$f9G4Huw_nXqq-S zx@Q=JFRpjIHaHUQvFWC3qoq{2I-ZSO%r-7yBV)Det;Kq5M%+8Firj`B=yQC3nfE-y zd&Y0??~q@AeUx{J`yZj&BbCv&<%75>Fzd3~mt`vuAX^o^q*iF`;Hii%Tf6J`_ z^6c|)PT^*;N#A(#>0rXsUjr#?mPI&O9d%Mnlrzeo>@C{t` z-(PgEWc}UZuu*&x?dU>dvHG{n*nN77y0@9#o~BRztp9aJ`)`Tyfvb!UTw$!1y8pxa#M87tv;#WTv16n&4ihj5aSVU= zv5&)K`XCv{@K2>rNA4}(1~N!Hu3TLG9^(+mIcU7d#w9DzaNE1Ps{SSy2*>$<@Ap+s zIxf|(uKkXG4$ZHP|3i|V!u$3AYqZh!zSX|D7{Wqn#5KPl#c|ZX)y^gxo|^y1Ry|Fx!#Ol73t}0OL@Xm>+0cd`jxR_! zE{S$zSC)i6a{mSP^;Bg7{Q@rG3a;S>V&6lPeBO)}B<1~-av;C{^88r;mWEr7yMudp zfQNXDC+M3g|I2S5`hVrC{Q58c%Q(&>zy9(H`Cs1eoFV^9vwNESkKXZYdt(ltNoUwt z!WZN##5ovm$nZbP&sCp=A~L)FvoMM*!5EBa3ydRc#```d`aWjr`?H+{ey{6VZ2-iRzchxh9QCf2ckx4OnA_HRs+ObNG=ZT+8x z&CaR9Htaw(cB2Nh=rhN>-yDR2Rj#u|y2c)4%tgo=dyv~;{J~Z8)^YwWUjw@6-RN;# z-2d;OG~zt=9vqJ8Y76Gyr+%T%@zN`ko^-OvIp?Tjj$^=YgQM*qkoqi~6wZ(Txnf@z zdZV}o)X`g(c{ZG*C(uTZa}ZL9a}W~SJwKY!g5(+B-#*_T+Pan1Px$Pe*N!gqA&m>T z6vG>QfB3Nf6g`71a%flfcQp9^&iek)Mek1e{*L+n(D}MFT#?Q-+`ujL6ssrIjd$pS z8~BaX%lGIHklUiYD=xE>{fmd<>W#sFOg=&5SH=^QFVL_;y}C@kRqwtMeuMnHx|#B^ zawCPkPlHKaaX&kAFv}Dm*OcwpKaX9zyfBo|$6y>LAg=p8i5xa>elj@~(@~DjV|@2y zcfa=kp29GPJ`WX$YhDfF{W>IBWoop66N}{kRqhilo7~qP_tl|%K^)%{`~Md>e<2oQ zDVAd;RwKXu(^l_)lls3@ojTF`N0y!|_Wp13|KY>_A3MGOJ>EYV=U)v~%d>OL{qN%k zFwTFiG}mJzHlqsLP>tQF!N@~%|BVf(rPrTfx9A(%%MTXUf~-Al?ACVu|J@&jgMQDi z|Kr*AFZMpK6@?Gue>`^*dHc6PJ;l~)|#)LC^rz3u+TVTUvh<0y{fBu=9a=a66j zcPIOQ4?jJb-pand%)Y+Pz9!qH(;@Bi>bkf;P?!2N)~!A3)W7QG&S~mE=_JvPF7!-s z%v|jc`rsDz?*?@({Q`2U)W67V61P?RLtMSzuaMW!DE?e1 zLcX8FbrMbNe>A(lmJadPJ^yU^4{fNqs;!A{xyc_mKU=L$iih;a_!r0Du8{w59&xRh z-yVM(_auBGo&5a&dj;Vcy{E%BvgqURfriv?kVxJj6dshEy(%)%VZLj@LKAr@mP zmLt2(`goxt3|-Psy-;7-;ZHuSgA%{V5k9be3;jMuo5JzKcmA`me|cp%ppR{>@W?sy zMxtEsQP@asMisVU2dYt9Q4w~NHBrYw4C{|POve82&J~5>DE&B2;xu}8MITUMsG~=l ze_%GBI6Z;)eyNS+2LfttZ-`ltnjxd=7wL)m=jKKofFQS znj8MEa$flRx%0wGY4+UyGIU@5GF+l}-uyDWA;bUVdqnQ;mm%x-jNj83qz`=eWfPXt9v;=oGq>lzx=Ku)W56@V;nmU6EF!K7srKmeP{W8vOVKM_ig=RnC!Qy_;$*) z@Ru{Dh3WKi?7uuC9N1ePYA4PJ`|wu zMRUVXC(aG~AL}pXll|G6`Qhgq=7r-g=Y$jVzdc(KezB-BR5*75_U=(mqp*!@2u=c>>qpAm;Q+T^P{lPIg7Cr%drxxu@>vm=eYjy)<3}DH0vK! zT94qZcmDFrkbC-NXy0S~gRiWAfKGIwdztkQ9M?PdN1=XiMc62v&8WgQH14Yi4JRtX z4tfi{xxXS*(|4ooP=&Thg}#%DkVNejem83c)cCCyJ=PF7NFGKveIQjC2CV^cRQNcu zXDUN(inc>TWyqYc{)0J{O=Z>}m}32bO6w0yv;M$L>kp7^YouE$e`w>zw!ul~pGF-L zF|9tfzx>*-Ju)y+{~OZUDj8&5BVMOnS{-vM!+CTTSB5Uv>ORZ%M_hle-h9WTwDR-& zlq>D@#+2tr7ro`V=l5;((bH(##P@jD^WSvdP(`>Pu5-pm;SzZTaqQDI@&@7_W&^j~ z*L`F3gzq4`OSypbW%o(m6ZZgd52c4>!(3za$R}u|KO%D%j*0r@FGFIc^?$eOd)@S| z4>oDMe9CzFR>^W)0wUe?B3d1<(>>tBde9{;T`XuzJpZjASt*(h<_0z@b=F)d# z)^h5$_S^bk@9TRdI~~`xOWlqhdhZ7H?|k*IG4;)-e!c#0Kl>l8ciI29KM9jvV=Csj zkLhGNX2me(p-&s4Ut3~8f5ITr`W!O&u>U}c{m-v(9-aHx|7Y|+Xs7gOuk>ygr!7z+ zodsBk#prQQ-R^xUeNf#x;9i&0S0b)ym|f!;S9n%!jn(4nwMW*H>k;=3+(>RlgL9j; zJ>tCnD&cKtEz|yZshu%JJ7c`(?`Qwx!~S2D%I0bQ+nMTVa))z%=wGQ8-i;b$x0zSC z*Z80#{2%-HKTh#~9OC~_pJdf1Ips|II_(eT=6Q6Y3*FxtUv$&>BINyt4IhR4dU(^{ z<%@&TK8&~rSls(-aEkxGNQHN-|$GyMK(Ocw~xZh8L zoAkb>5oT*KF#Jm+b4eaV|gm`YHSR1bg}n`}&Y|<=F83``G74qJ8fB^xiVa z(sS5*Mtz9&t`*zZ8_CV6!Zz%WeN)t{ZV1=%!`vbdJdSJm6@@$Wdw76{NccvYd^62t%QXK@H7smVjOZyx*=pDs2K$?C5xt+=-WG;)trI?meij-x@vYF`pkeDrA^aQfKV@7GISQ>q@;c(0A1O3{ z<^7|?Z)1=+rT#^8lX?ot{&#-qR(c!8Ip&A;&jjH~n2hWW?XQ&jzgr#2Z!k!vg)@BZ zS>(`umi>?O=tLLdyttkQ^}p*bbl=ej5Z90GqW4ttwM%db~3%^y+TcjO*01M~~5%<(-MM~Ont=;_m`>l@IvG5q*y7ckF=kKJ0&x(ne@-pL^JUZ$1BI&wt(XlU?^cKiMPP zOLjihw=bP4Y{L%p6gy_@$Dx{@pZ_@DymWdEetZ67t+;ys(?Rku8o#oh0a@$2Z&>BI z_IR#E?ngOvQhe(=&n28BQ+`jJV*g(BU7!U?q~zaL`MM2z_0`wlv~$0GZH&NC^Y8ud zb;9S+cUk}Xbv8CRcwf7Kp3%pjeW{&r=GXo6`smN26J3aVfApXiolCS0q?1HDy3ix8 z8-4V_ebPpnegQdp7MT;`a7kQ!sWCI;H8j$1khjo4k9+>K;EwP;kqS?>#+m-0+PM9+Hpo1S9jzpN;Z`I+J`A^;gE9sLS5lC$*t0yb&M%7kOh$ zSr|p`zu$W4Gha$%;#9}PPa;(H^tVLgw{%;Jd)6c#_zxsCl>d5ZY&%Q;!{9FCs*Y$s2*8iQ- z|J|?u8$I-1{rsKu_0rjh%{cD;^^`dVRlFh>Lygm-%FplClKFq(}rJaYgwqOSNcHV9If1l?+;`zzWGoGL97VaT?wZ%H0 zejZLr=QQeY4n5+!k)RJQ@%&%8FM2z2!dc-AnO-41arI5sIUv($q+cK}p@H69?72#f zV-UWER^c|`k;lE_^Y*v;CGK(8a!i@gt?am| z47sW-xo9k!bCPIB7xMmu4*wVWg!?wIzcGM8Hg$Rv`&%DCmdur@{|_mPz@Q}3k7KUp2Zsdfs!WlAsO!*Z)4UNwp5)=xt-T9BO0wnZ!2aLzrAbKYc3#dMTo7UrPuJLCH? zz!o0dXIwnHIm0&2BG=7UT4ntCI^)mDPRDhTXsTaiN2HCBr2B5$7ai5cl}Y zp?#+3pX>SEZ+?G-@t%L8=O69)QQ!2b4TR;>8ubGo&ptka#+2vBLGdlm-P?EOFHG0J zFo9p}zURN``5$?H@rNDL`BWW19!DJed6GPhxXx$3UwW(WS@;~X^xQiCeY8R831j%1 zjO%ZnBL5?a6yh46ZAdsLiFS11mt*1b;Wp8jjw@>F<6ooEdMu!9{i^Z!G^9l!t8{67qToszCKBTx8_ezxVa@bmkhg?oM*v7Y}=uKs@b>7L&Y zr{-^O4ZCSVdKBf-XKdZd3~h^>JnJ-=S>P5lFCXMcC#JUY>(j_XF8!{2)`jv-V( zVqll{0rKnbO;h)dRR@=<|Cbbpsjf2}<(P%WW#*q@4!vcc`VaHy6=+izwj!=wmc#;a z3$YkWu^cP08fy{X)n3H%a=mcuGpfaAdKD6v)&I9$197k4Hz_qg(=pqy z-`wj1llbH5)!2<1eAs{IkoG^)r{pVS={dB^XC2Dg^JM3D{BO$TST^_2d--KL*YSNw z=O7NFHtn}Fj@ikM7ar{Q|8*3E=w121m$N87)dUbEtef590`oBZ{kLE7kCdU4U z!lqOIp|H8&MPWE9M+FTE+OMSH=M!j4rVh3A)z6n1X^e+tJ*ZvrM^ zGNxiW$}tQ1^?&CS@hkH0KmIt(5jPJNSb&isLwm)2PEaB#^IveOKRNq~vX3 z=PLIsz8%>G+Gk|X9@p6QePJJcaOU@g1IUlhOZp~Dzc0)de_xmx|9xQ^^(Wl3<1XM5 zuAnjVtHOr{=55a;W+6`K=i>* zA{(#1Eo{JKdW-rcj=h~u&*~p<6OLnVQ)FDjvu26!bCv%CasFSq;~L0D%%bP#|BYjV z&(IE^rhHMxeDE*GXLB4g4;5H|g;HVSV>ocotsr=7D?JBMB+ZX0%>8oLqghZ-`@1*#O?|cP4h~xi;#}tPb(iwVI6kd@Z z+SlsrH^Sk6_l|x&{-B6HVvge|@&JGAe(m9U_gW%62IDXRjq20}Orp2!@@(7L$@Hm+ zYX-NzHpbw&xdz%RiDRB0&B)LH>-YQ(oY;`aNVUpnWd*GcC4Kbo|UPQB~@h-)G(md;Wv$4c}}(}tRDYymy?eGE8e zEqy(5Tlw-gz3cl(?=sdvT)k)AOje=M@!QC_262A;f!|^uNY*!5ExvV%=klK?D?R62 z&wg1Q(4p;5HY3{pNwV#we2(4Dsfp>~AP(awj^iYv?caA%-nuJ~kJ2z(&8@Q5~ljzM)coZp;SIw7q3p&elwb_T zVFD&$GNxiW$}tNg*R;={u<5M>G)H)Nf;C9U3M{}vbUrc{gsiRoEG#9L|zHJM*0k9_Ffztyk5_t9rI`uZ%a zl*W4OKdC>?oYDOHe~Z}J%h=gu8X0<)%*DSHT#!6+u6YW&#-^VIM2TK#HV4S zv_?GRX0i&~umjcDjjZ=yL)PLT4rACjilgL^H7DvHSsRdk5~opzMsW=<`7Y>jPC+w! zJV8$)&Z%#m|IWveBtPuGx5D$|!}`9IL^oG$bsjUW2gF2wko?0*bk&~?*8?0;mD zL;G0IkMrn6*K^O09`vHqyC_y6&_8$)1bsf5&xp zn`0}TahQNf=$YskZHvkDLH9L)sr2cHdunI>p7DF!Yr9-rTmx?wIS+B~+X`|48dfRO zc6l!BU#yPxEOAY-Hf2kaO!@s^+W%{zbQWVNmLu*3SaXUUn9mn+?2+;xALjqum$$U7 zKCJ)f`8qP5f3Ii7hy53|+v56>e~5d&&JlkU$8i#k#_BZ~ z|96_6U;qDkaj2u8Lz}uNj-gLECW)FA+J9@5e;fEBv>6hPc|ZR?Nzc#!TdDkhs(qmD zO(U+`lfB9Qzpj1Ym@f1ojSIMhkp)HJ3OO{3kDnYqUKDPSw{Qpd@Bq>FeMmmW6FfuX zTkH4Y1-(VTL$kKVEBYI>=?7?Cr~Vh0^jpF`G`W{%#CZb0vH$KFRF|n0^d6XK?JhGCecjIoh_decP`5@Ayjl ze^K-`X#bPl%e4Q=UdK(BMmc67?%O+ue80cOJbDEdAiqC{@*sUi-rFY+IyOhfIlc@1 zz8DSi^-{9_wl!(Vm1w26-GA4nNRiD?<;R=+^)IX|;`hXC{f}ry3zEl-0Yod>jN4o5 znDy9*&8WgQ>_9d8GR8k`GX80+@lRx0J)9x4!a1^CTQ|SH+I8P&6Z^2=7{CT&bj}(B zh?4if>20{;ib%kao<_Hf>_@!=w1h$dPNFfxLiAG2VR7E9B6P zf^dz@kH2^^Da8H%ZV1;a+isC}a1RgA=$mS|YdtA?%SGSTYwIJ^pCG^fpJz@X>6lt^ z-;S4uADQ2eXTmS=3U81Y{e|^utbt$lMQ9oSMM#!f`(FQkE85UL_6y_7z6j^}Pdl%E z;aR>2-50;`eSg9KXD(@~D0^_HXA^b7URU z?Y;KUtLXddw3CW_1L}mql>fZjfA8IA$+#9n;*|W4=2H2;*nd*yzajJUPv2>sTNT69>Z;^MCFV&lG-`NIjG3>W?bRo8X`^Ypd;1crwLf_Z7WxiwK zYq)^}U2*;IqP+j_j_^G^z{q3c;K|2$g5fvDz?1R&e?h*&8-)MMH-pX%`u52ZjKMg( zpZ_y~9&P>sOrlRlZb;dnOvoVif4BFmpK*S)_k*s9-cPajgI@2dkIe6{w$=OHO<-WC_gsIY~Pv% z1lCws5Ov>8c1%%NOkawT65lDg605N`#_jmr_&4+aVjcXIvP_w|Ui|y{KO5;S>s%Z8 z{eLR>?1y|?Z+&A(BC(GRc!d2+wwzM`smtQJxUCs>Vu$g0(%6hDY{L#zV>fEhr;hAT zss9@Ee~(tbPgK7nzy43L`u(Q*59iT|-`*cEt_fHxor5@xqv#pU{w?5ppbw6hHcrw{ zBezT4x?LSxtgc0!xS_hDaE=^at36WcUCi~pRq}0!OQIdM>)DJ|^29TJY)91t;zmwd z|NqD*p>~wIHL%+b8`B`Z@sRezUe7{rIiYQU3-n89<2z{OJ4o%*e!vxRo!@;Nu8}v; zL%&7dK{x%q|L>mg1N@Hvk3HUm{5-`Bg+bKW)Y?tp7S>JJf|G&%s3ICttili|LC8$4Zojx4z=PEj+sB9}E9}!^h$8<`#!D zi~b~>-ts5m7po@fqx(FZT4UVPB5T{M4`Gt@hnIdCCX@9YABA7uwjN^lC!y|;Z>L~h zXdnHhw&h=h^GE(d-@(5MT?JFkE&Pj+omCQ=uKsyQT>SIUVxC&_(7)!t_|KJ5e-_$a ze;HZ}D#F=`e3voZt)JRoE!u^@7tUE<@gMg6erP=Pd;WLh@)z0%ZS=n_{7cl%{>Q?7 zl^Zz&;xc!fT=ZPEgJ({E$_7=qX zrT#PDV1@VvScnmQPK(Jn|8FT7*Z*EluEc7rMcn&yJ-HE^QH7o}?59&7g>Ce~L+qyv z8-ZSp{Q6}pwBgoh%N^7HpHUiii?2Z~4x(|Lc?4@p!(n>M5`Dva*xU4zXgg)?w`10T zBaCmfwf}t+`;7@W!0$84@g*38ahQNf=-c;$ z`9EjWX~LN!o{!9Be%=08=be9~{rgtA_f-4$zV@%SL9gGV{Ww`VQ!yRo=uy{qpI}qd z8ONE!Cbt}KU z`Z&)0NzoJ1X+raS?SCZcDYTx^{y!1IO6RP`TCB%LeA^_?cyBf6JLUOP+W!sO|64u( zPS3x`^OHH@cF)`K((|M9vgh~g-JZLr+w-#rhN`u}+10U~JzT-gmVOnou|f2p=lX~?`R4RQb19j?=bKBUoTEOM*^ zF3{uLwdNV-+0(Be?g`L3Q5`i_U8UZ-Chi7q;SPT2zrQE^01uI0f0MuPG5rai;RO;6 zzuw<3!~VXiPQ9ycMJv7SqIQIK&@0EiLHNJ5`7jD47z1;$L;r36KL&l{u}zmb;r~D5 z|0mnkCgvJvC!GnHgvsbxqy4eM*lGIUS#|g_{Q&fG z0PGmF}E)ayRR3Vy;3txp$A}-X6j%dK}|2kF1Elt2eSMt-WxxAS@6bwuazB zaxs=-{~hn=g*o$QD#InuF-P2TtVHAOO8&pfu$tcT+T85tm0>M?J=!K}SB?ELq#TpP zMsb~IzYLqnD)gkj4BN;Z=1{|Nh0gmGAt63LzCeFX zlHOxpeD}+W&`uvTcYdJ5TylCJa%!- z-G5Avd-4v<|0q16KSOR0-ybr&v>(S>S3+F<*~;*We1paY1>QsDG#XOA9mlpTv!gTndsf3|ps8Q}LqHy3z z5xa=}|D655$bW!D1N&25mS6v-U!6ivp;aB!hFWPH$4Q(<9nK+vB>J`+|Bw9se)rkT zm)ZY(>RB@Pmi;fS4t3ml{L}ik>faUWU&Qrq+NIMK(?S|OGt|H8xC`{b?dspH+8^{Q z$g#z<>gLQE_3sAl4{>q*n;YaUG&=SUc@GVn-nCs?)RPZ{AEI@<=ent#G0k(98Bbm6 z-Kjs~_xITEPw))2_xPL0{Wbh|lg;m@zd?B9+r}uAU<`(L6oqkQToZ2sISG?771L3U zS(t-)80jhs732aW*!&;HUo1~MP_ZK>DF_vOER$?{QqHnAG&rTR%Lk=QM z&y*_rXUqR+zbpUayyH4I$^YuAZn6ix>Y%-uLOyVP@A`704?M0-zh1go`L9PEyqR8w zL3Qr{V&7i6gYUmdIY4gnTjLh}?fg)+?AQ7u+LPk;zv54c^EICH6?`YJYY!*MSQf-J z!Kxjz8#SoKK^(@gcYTzM^YD(7ac!WJ}3UVj3O^|8ORzYHh_79ppcU&tcNsnWTTh}P3luu|r!Ff(9m(i#hzXwYEA1_{=vk;ED+R_OcPSpB`f_jCUEex7yPYwx|*UiW9M zP1+1|{c#vPz??x3I`=C%V~{j*Q;*G`|9_UWnIVl}5c3dp;NJ;dpq`c36Y|~K=J)6S zpCZ2Z;WV6qkKr>o57Q?(0}82*|G!2~+Bh3wNvM2_Sw7-B1a<9m5poGEg=O&l|G$=F zZt3OSU?t{j;rnxoop#<8e$ap4&Nw5UaYZ8I3TQJj&VZBLYYo?}h4rujZiP*-1ze0l z+_Q``kRHY<^ZNgcmj_{9|NkuG66hjK_XWl!ql`<&8K)roM{qmL8UAYjt(W=>?j%li zZ$K|&oI%E9o@nBRjHmqNF8qeVa1UYqJCb`vu}1(mEBDIGrkrv{g{nJ$KW;n0ZfESC z$a}#<*dKwm-MklLq1GhclQDj`o-@~Qe*&I@r=c&6`{VvBW=|RSH(V;uV}1dKX1G7^ z;p3iOcoDY)pn(jSToNqV$cg35pCG4Omq;P96e^(x%wU5?6+gyx@WcNVFgu|Q)V&U@ zNB!{qwGzGqGPY2@IoHvJ*%eOulRg8Hr2kpU|BTwN#~o!sL z@1b1X86bVT{pBQXr(pj4kbR5`&;xiM`)L^5$hZK!yC{RuQG||=5#1mQbrAHNpzJ~) zsQ>74U1WSQLm3_+o}*kh&UMH^>_f;i#Q8CN2IoO{AWXhQehp9Y?KVbGzA1O=8CXBe zUtVDR0PXNC;pA_WC0GR0J2`I(xfGVcWQNFcMor4DT-#_YJ@iknlwvFGri(OZlb$S4o@>!r1Yv!Q66; z=bB}nAF~bS=O6a*oGCo#37#FdZX@kEvIEq9MGw*i^ZyTGtUrG)A@)~b{=bCV=%?WB zV-Da5Zm+>G9EX!|3f_n5#y~lZoVYbm&LAiGr{l-S&)__K3D$7_zZd;oA7Xxf4>!js z&rhG@Tg)vNc>c4D@jfOW*3zaQCSAvQ{t2ER_y2YO0eR6zyzR)r&zSq;+WGyD*Ya&* zUJA=#Ik-|;e<2SCg2@AF3P(*f%3_C-m{nT$(Y^;gD?d1 z>uUzuKOOBK^L;(Zl%RVtxYJsY}$d??kHl%Z;>w z>KWwCKHeYM>g4??7j2Y}cD0-y{%IHeFZkj7#|!koVE+H>Q|R!YqHjUFmu9E^g9F5^fee^F!nqE}3FZd!kdvF( zBY=KDq3XuBREn&Gd3`_9CgaYP^zm53#mx*h7~0D_fNzv%gYWkPHR9I_R{kOW0>h9_`lcRPGNr^PD3yC<-Qd> zcMoZNiarqbWbBQs|2KvHU-|_b(Jx?azYU#&_K0hkANl{nr~E(Hr()-f6>*O-|HmA+ z2fRhh|G^;4pMOmpoy6G%-Ov++ZUKD%f4%wm`OnStL*YVFNSb;NFZVpoGe)sK5Xt|} ziu}bA8ZPEw)~htDL`RrOFXkhB)1&Owr+=uvlm6e=_Hk~2dd7e2X>{U9s{uQ9-q(Qt zkb(XC;KR)e)=<`bzrSCINI_zxIO0hE*i}+Q_?BeSe=lLwbYQN!eFy0ePTbV93aZDK zN!3)S82zqdj(Mrn?Yl;5Q^LeF6DBo&S4+uMv=oQjEJaCCQV10@AyR%JM9OsQ*}E2v zE&y}jrOcsSxL(o?>m;LWo#-akOJ?{@l9h6kv~u4vo+D@X&5|2+IsZLlKU~-ql7C^D z6og+n|6WG?i+5ioB}L1nluRz0Ss~>!E2Scp^BGS2OUWgyf9>X3$)_5=)AS{jcW~(! zORHZ1eF^HjG{$nA6FSV8p$|V3alVQ?3a^0>N1FW>#fv$B@2RSc^|amBDc&OLyG}K~ zsqbi0T-2%a`?0ZGh@*%4ws(a34*IEg=g(uAqCPY+_lwj$^e2Bi;j+G^KjpVroS4sY zk4|J>(eK1yL+|E1{dtzU4ugIf;ldV6?h=s|bkBYUZo&@%&YGfE{N&G=T}g}O4dh99 z560m%u`HOTaY)CmcKy8QWd5l_aXm; zyb*aVVd9XFA^#KkIC2|)&%k!<{PsZeGh_{tP%;u1iY|@+5keMj*%m+1JN+bw_2^t)CXe|CgWXSl zN?ZlV!U;bqI=4`Y`O!oP^BtvxDciMxGUg{0jJqpO_=%Brw~F?+n)mg= zQwQecWs1eUK87{-3Bbg7@&oC?i{96dqsG z%+S3R+SMUEen;Rh@F5(-|1xk{=o^ri{Xo;9c>_PXrke3U@)Wm$K;`G!L+r0wA&M$aRnWW>pRgE?PLFEKVf?=at6&g_JzjLcRb90^pH(4oEtfU zjtPFPNaxW&smI+K7$|1$VTnbrAUseSu-lOKP|myI8LGMdcI0lT9S@YcjRCBaD`FZA zl;7f}2OH14VYR9XPTPjLfV}%0zJNmVTp|A#Ksn!kDfV{C;&1TFg$k&LW)=MY$#-`7 zWqjW-9PlG$q+^3Jd@b@caw$9wk8Dsz@>KIiWn|e#W%#5jZ&QY!zg8JJeuFZ+0rMK% zmuyi+p1wmFUVeu%a;qxil#y3%QHJl@f;m~PUKqTBIIik z%E+4QmEl&nbCWV$8L5oCwpkfoxkedTx<(n!_%YZ27=PToxVIAKYwmw~lQQzcCgld+ z;VaT-58rq;j9j~pYj0GVaa&J%4cmBLGvPN~ue=_BebZ*XAL3gYtBgExyE05-4#u$- zcy7J&4*nkM)Bw`R9k*I}lQ>w4)7WuG4MNj}{0-j?-4XuxEShA@?LPizXS_*q+Ly?y z*jtXSR{Zflg8y~M<4C5(G*Orn!0NYLG2^y^c*CJCVTsi4T7rJa61f3;b$o=t4e;)Ze zV5&ehbh3JJp{)$?uWsJqBv{N9JRkDJ2KQ_vIlO{5n;JzWuFef;KB@!J8GW-)1E} zaI=yTwpr1IZ&orxHY-`7o0aUS&5AyHvyv0BS;>vutmMURRt#~QmHdsHm4X<)N7}}s zvzJM_dcR?wEllK)YJMNxpegh(Vt6i|w?K8{+0(bur= z=3Kjk%S3&@#-`0mlXJ7enHypyj2d5q8g?h+_g?P59=!q8&RSnL`W-#!cf5(cF9`h) zQ2!t9qOx>tBW*S@c8I!6-o}3r-I3u(I9qZG-Hwlm--j;2F}1Cyezj39HLI181C+T^ z%6UMPGV&Q^<|)EW5$-T;!YJo6Kt(5KL-Ky@#Muq~pr(Nb`7YEY{#f}7a!7Tj0UM^z zsD1~3!2T!IZA|3P$iJ%Q6RZQM@&e(8f290PHB*rffBYlmQ`JlwH0|YGR5$WgowM)X z5zJUFSbCYyQRjACyG25;?2TTK5eP;}$yMU5^-{|Wm;eE;+Mf5Ef z@lV}FQj$b{#r}n|@I_Lt<66wajGc{fi=>LaUNzU(aJ`A^hws5}DLSTm7mJ0yQa#cY zxKM1J0lY8kzeraw`uSt_{sSB&*%LvckBn3l>a45bCfEuO!LOmvxmb#}2crKKC?$IWr4-6Am+uXfijY95 zj8We|P^ysC!R)CBMHh~?uNGMsM7!7&B$ip)$&0j;ezc3UjdsF0kPTedh-|{%jBE*` zUDXHi{_6S6{0n$CNILw2q%)QCm6K_wxxObc2;E8AX~Oq&y$k7{h*Sn}^MLn6q~ePU zVt-B$`&fe{eM6vFUgST@od@M5-p7X7eh+;d%zua60|7Eln|p;H?aMs@vJ!bMtO16g zn)N{UQqv`@<$TLO7cnyo)AXHTedqi_`3Uo;FmRgmuVLOLn|YT;{_BKW_u+$L9S)F- zn7@H^36Oswb*!h@ucbaFuI&2*BnWvaTm|jXJlhd;L?W2;iCQR318UehN;|+9cJD&@ z5E)K8^Pli2{2ub*ZNhy9frMQJbTKqu-c37xqmn*#qoSFm9L(IP%;!6KpE0qRaS-1< z>75JNpoe^2kle+!c@vZaLy#1^Z&XUQ-k=m3Z&ZqoULxh3`%#g6gHn1lSjtYUQL3<4 zkF8TGk;asT@^h|z25M$6kyK8ULm9c*+`k$%jr=_yLBvm@MA7p;xAoI%yncq3c z{LVq?OFSt3;EF#eZWxF|l$FZzJU50)Zb!9#I z)(Y+5e;rK~_#$5vZ|@_@A2(7KH?tLM9d2+8#$W=@!9|#bpd0CTKr-l{4D8Sgqo7GS zBxz}fL<{MrLy}=QB)ZN+k_lOmJ#k3%kOR51ha_+6kQmM#l6+*r_#r8@ACe;HAt^?d z_zp=avJ889(m|<+J}8x;2gS&HR-HX4)lf5Ykg@qesl{A3bx_RV2gMS3Q0l=NcTj9d zJJ&go4G9OOaqmHCBCcj+3+dX5bYgEqw&ULsd64}Vq(8Eo>wA#Bq;p^V@1=hqXlH_-CZcq42Qdsn`QWUjciqrN>$;f{8i|?1Rv->Fr`=#PU zyi#e|FUGx(DpeQuOZARNm6{X##kA)osZD!WsT8 z!%-LoWAbA88oA~s>M_^}kHHi06lgztNz$KsSq@;1yKLW z0VzRNgB6DF+^&qsc4he6dl`4!r?lYK59eSOLMRvcPzBA<)p|g>?>xX-F7qkyI=lfr z&mNH8fCChWg_34kC|YEC6T0wj`oEA#$3F|Q&n^@_z-_P}UIHy-LOv8j9}K`d@E&{sAHr9#;#S%oSPgeWCgeaf z48W`KAuNbtUIiY6U%_wTCEyGgO(x_(JviW1cpct?ci=<#1U`o^;2W5O+S~XRpjNj; zK0##?dms~Y7G%%(iym??=RzKS2FR!3EnxkvkO5T@|7$5` zov{Q;<1Z!uFD3u!8$)HvrR4vm_kB|=qkPUjsfn3M{9b`flXdn%=kPg@X2j4W@2ET>~A_;db~5+zpSzlkf~2 z1TADjJ`_VaR6#AY!5`sucn98t58y-i1U`o^U=9}0XSftDhZUf4GyVtdG3NguV~qVl zkcl}9vS%3UK@R3z$ivS7`Irl!(9QfG6f+M{0;L;8%3u|7uZDGS3&hcP*-KlL2$5@e zpC=SCsvTwYi_$ntL3?I_q-O_6#>uNiSGqznm$P2MHQ7g2iC&gUP9x{dwT4KZtQAAg zjgs$LCk5Up{tv<2^b4GW$GC5jd139d?5pQGBmJnsyKtv3rG1)vG4A%@KFt_i)v4BI zphLzOa*__>B%Q+{{3h9_@ds5hPX3cBS%>?xdJS{kde^cL2vj`+*ls{JH;S?E-+ z;yZX(4Uc)AGyd`y)$StvV{pH}{8bHq59TikgZXc&{Z`D(_p5umKUMAUcbM!6kbfZm z3IBrWX9L8aG+PMMj|9l{&H%Xt^Odjyd?e%K*8%b>awGOtz*2{1vL-+lMT+KspbgaP z-a`glk9kXgL?Jan_bHhxe?OmATGDZFry{rGZVaK_3MP+RNmDw?hNo$>4l9y-0Lj{5 zZ4_=ThWu;xqN&zk@cgn`H$ zqklHo%J?rTNaoLHWQ|vw#~ASA0MRlR^jq%XI?pw%2OE>vqqPT_!n!eWt7i3ngLiUI z{A-!GoBv+z|MvG`Uy1)R{8?8Syq~zCYMii8cXWYFF2T)<{|x?5v0m{d{ul6PeWng- z$B?6h3m{A+^Wzc3`6A}H@7%O^5gvE--KJ}Jra;1nE)aEpXjL-Lie1aKs{bU^;#L<- zynf$3oBWf)OT0&-q^%7k8u4F!Wap!#>Mxb zEyZp?j}x;;gmg!TlJ%t8edtYK*FMDDdjRE^__`@$#!lw`pQnxHefu{N&cd43t@!gS zEzDu5_UXse`=K+ilyE$|a~ap^c&==9G|xT#5as`QSOaSbOPsYn!t&1Z`N#VC{PlM3aVu`5 zzmfScH6NzGB`o!l`u=K~7Lxwz{$7UL^Zu3CYr|D_c=YBX*UhJ$T92vUj`|I?;XnNf zaS)$&IdN~|9;AVhdbf&t&Zw5j7tmEETwNsTM7+kcxb-jxMjci)!ZV-Zddjx;l)uap zE{(iV|FLt;4eGNHj_1k1O}iGIl%<~rfrJEsd7!YY(;h+6$vDK zH?nJ-vHvmlXdPy67We2ws%Ja6z%#-eHuwnRg(1QZ5;llygJB2kfe;9V>18kaPs=_2 z6Q_3gPi$&bCLc@jpX}M`@5HYYdch6z|Mz5FPCHKfpG4jxb^FMBKlI_~Yh_0={~5#l ze_WvC(g(|9p3Oi%EFW_L-&0{Pb-j=FA6c@4bEbFm4WJKGj;z35nauZqzDpId8hZ`y zrg*-C?f&$C{ptUrTLATk(PbHDKMZ|pJAG>hvSAkeRQlXa(0qbKJ;s#5BkBi3;i4z01tfs->&cV@O-4fVK@rMU<6LUD2%~4Ou+eT6`4Yw zg>!HLE<(-4+r%_`o6KM?DpX_^*#z5QyDG`!fy7g?9k~Nsowtb_c46KP@sI$!2)hTl z7j|QghXmLQiI4>QAQ@6171BTldN4o{l!0338j*>_bslpP(uCatcJRX<2t_Zb|JM4k z5`=m3)M_Od83LgY2H{|54rw>@15L=o;DmC_C?Ia4ICYl zOOX_J%?cpNHP^#>xCJ)B?Qj>|3p-&#^P+N|zT@PwXB4ewkxZB`QzjN&!Tm2)CYP^L z9wyAK{@m}@A1l)vu2D8)zEia?xm>v$8OgIn0bMlBgAk2*Bhaj99*6y)g>1-&Qm6u| zGtE;FL)chI!u%Nbio?7Ws2(*`4H~K;%}Y=TPeL&~2UG)^lb~S@JS~BCc{lxE+M|pV zbTS|la~5RBFA%+j@r<4EjBlajAq}**`N%K0{xdiY#X-xYgniqk7g&FXa;SjH6SRR) z6}wETku|LAo1iw8wmy6rbv1JoP#wn{1(-0`Km_lg_UDS|&lH|mASIEMaVW#D6e_}* z;{g2yp7HDg$wlVOED%E{x?0!^nifdDn|H%qbB=dIYVp^d;2rVLLS~MsXD6sTTRnqSb_z2#E0Ngr|U&Ct5i=h^?2Ks;|O*03XKvSq` zM$+xj$6k!A#qIJ< z)J5y^uQqe&s>xWqvmec-SCi-(LBxmYs$&Cn?yx4FtV9rlmDg~n} zq)>OY6z$-=^KF+&32U~c$TIBZ$cj|{!G|NQUd?}RFB22gLLHb7UnUl)2P@dz zmxvu4O_xZ+2?U5V`fhn_v`|A)R<)_=hj z!v234pda|Z>BoObTKogPfmslW>$NZu!1s>*ZMYQst#BVa4Tqo{C|;Vc;4;Eq&Nsew z5%n|NfjM>!^C@73*T8)}Yc}v248w7_c`bDzltUE^!Jl9fF2W4V!obO|q<}Ap@ zPY*eX%>O}N3iE%EkGTK}@hgI2`b#Cz^nYUL|4^5gQ>RxTE0Y+jCI4Ih30Z^P6vH@; zab6wLJc6FyDEd#-vsR>ShW-yY&eH#Z#xd$R>P>YIdJDA1qB{-q|M7^UZrny4x#PRD zMY|KI8?pcJf00z`*6s9vV1EB!D*DlT(HGg}5gRn{Z8_o>iJNdP1}Pro0O5T|uRB;a689kXPV60H z!R&1amM-kQ$R7Ork$u=V5$+!N1$2QIiogQLA?;i+m07T)&vHfy=rCtM7I9`qET?P* zOC?mZ#$UyHz9}Ub9m`;;OAKZ_87xJ5&hAJHmJ(wyYc#A$VlPEjqy|el@-*MYwcN)5 z!|(wt;@gadpTK$S-@p>gVXzS%fJb0I+zY>g=RgB_PzqM~0BWEM8o&o9;Y0AdfxZ@8 z4L^lq_?v3x`%Zv8uoun~_9onKhkM{U_!;~X4#M+L4zI#7aKPIz4n44(^0OE|#T<$J z0{Jcc2=kqAFFXW~!*AgbWI`?2p$q;DqwqKI(xtzW;e0_yXy5KX+W_T7}yj-SV31-d!UckH)8sU8) z&8M%uLRbpZ@OvK3rNoD7i{=z&)U-4am>1&rYvdB-e&nsl|A1!9eSj(p7-GF;O{sqXwq&O)` zj+5flI4MDvVlTtJ9RCVrCE<-RarCX@q#9X+-GqBB{&h(6xj3<0jFbA=II$va*zLGG z@NYmi622*hedJu$-yPY5y%+bskT~fF*Ty(;!vJ`| z8^V9*VGxEu!~fmWcy}%Do*sOMWQ5)!y6`(B6PblQJLV42$K4@0$Xx7sxEt`#M;2f& zOuj>kI7^@yS%SS3_p-1%q&(;jsesDZJH!Z8n5&@%KNHkqt^@PT9b$oc%vP}BX9ox7 z255}ELzxDkd{osn_d;}Q4>;W%+J{ZJ21ezJ% zAGF)!Bpov1<3tCUn6n@|h5Uyc%(;+^hVmqYx_3xV+Z+xG-I~}l3(Pt73npRo_5lc>xYmo>~0%pD?tS?8BRjBn9wom<$S-8x-pyi2+AR6Vg4E5Dy=^o47MmppnTtIGcOCwj$Smxd zz|Dfbrf8Ar*lQ9tiD`=O=^Wn^vJ$&-+a}fzH%T?J)V+!I!%eIoZesm#lN54Y5!V+Z zOZ;Nx73!c<@By5M{nSsZsV|w5)-X)gJcQYV{4@CA2zA^0v`sJ57QF#M*l&ir;g|3d z6v4Z+Z|5P1u&W^k?tx#xbC3q5@C~2_r&$g+z-_P%9)O?26OagHPzwn>z_U~&ol7~EfC+;xFAupap+YxzeR^SK#^Rc(lo&!TBZ z;bV9g{foMc7cd z)#&SfCO+FV>pGvyknNnfWP3%#E=$Dm8tdh3g7}d57xE5)+~3J0 zO)G1M&NEj?+sBtn`x7D3cz&@otqhXpudkGrZG-^<+d7VgyrJBZ_Uu8HN^|Hi$OFrRb3T&}sCcvOYYG|$-nJca(#InF#pT4&LJMymhraUdIZp#PlA{=`)DUzx*c zf&ce>h#$^(V0=6Of2ow!^8Nnb{&VQRjIuv*oc)Q&AI_g;PH2$0hQLDl7|4e?(&|^F z>FuQB0O|7)TtgbI!tD-Z8~%Z$VHzBThw+~yUB4m?@8z0D$%`1`Ity2k&JA31ko^1! zVaoA)iErf%_z2G8=ivI!xo(pD8Y28(;D2y`j%xVCZ{eK7Z|H-WXr?UQ!%*L+AJ;!vTjs_B1>MjCz0e2!p#Gb~4eGx+Jm3W%48qVc_D+xnPm-P=!WVE8`4dfgX-T^W zu)hu!q}c_^%`V*UAq}5~3exzGr28AB<4dId3c`)yR*wuOPa;YGjc_?(?u7>k*9_~3 z_fqb;nfrW7yj_IfO1yc*c?;n_B%gnZdmY4b{Q&+K@E;@27YXw!_Xx%PN%#uxCC)*3 zlsLapzYnf|hieiD{{ZogBVR{8N0=P^2k~p>-fyYz$@QJF*GWeRX%|WwLND_dJc0&K!NR!OT*NtgO~LFF<=g~!FgonP(tzE8Y{I`0_xb-2Bb_PK z$@^HlNo0N&Sro?jJcRLiG~;t*SsdeYsK8zh#&E{>P@Tm19_GjQY3NQRemA~PBYsse zHl6!viC@S4Gdh_cz@CZJKh7``F*cZ&)O)&drf@K|P^GDL|kL1fIu!3}| z$L&|7V=(D{6KQr4IR$?w{jVVXzareNm=_Rk3i~I7Z6;kWBhU5_E{3wEhf(}aU|&s~ zAHls`_bYe?4#GEF^EtAZdpt}Wi?QDZ50Ezx!ox5CH{te2*hpNvArT_U*Nw0bBDvRA z*bX`Wu!?uz425CnYaok5sZ)_3Kv@jsKaTQ`tPH07<5v|<`A61ZH(jLskFS*n zNfUFRpWH$k)cg6#!^j80PMnS)KY0}MgM9_Z#FMLokrCN<82i;T#*}VA(44;MkkI6&(a* z7khZSx2;0YoBx=z7r8Hi|CVF#LH4qz_xt~gH{#b6yh>i;S!?0@|BKgie!3ORU;zg- z%+Ssu?NG&D=xXj+i8Sg~Nv(dB)Ikk)6ZYaj_77iVA5{=##SkmyovRpEGR|O)yv%Q< zhJ%HU4L%9ccB=rEO{~R9#^&jM3urg^}59%HcGc=gky8(7k zb^ltJuy5{U4@cw;qW(*>-N@dJqCjyXTP^7D`Jv0ly^E4#rSKv;e%Q;y(D@5N_YYak zK8>o_KrwDaHxOADj;^UC+5qX@Qc4R44Ycn0(3v9UbP3wCg1oa}MZxS<8k3 z%=uuzKM&p@uQpTue*zD~L3jfc%H%U}IpuO4JPuES1DZjhJiZ63D4(}MI%TWy6UsT) zx@jwXl+jeRoWfoxf(XiL9E4ClBOnfpa0br963XmKxP-W_fd7PFLJ71$HMGMt{1@f- zXYdP1hak#wI6O}Idl)j|O5W{5RZ`BLgo98Ie}vcJ6VMna|7Dba6XhS7fnC>2`S(%& zk=fYwhr=bui2ffkPe=L3EgxAhOZ$&33JaIwBGw-TC9Et&<7vED)R9GxP@=vQFsHcB;QxVPvCBN5T1jN;7Y!Qd*K=AgG>1))x7~a9||0(@-xRNp2_3&SeBOZj^a4$RnKW7Yj;ZpJv{svzGzr@jW z@tpTvMqWZ1+zgMxui-aff>*%>K4^wcnB!W1fQKQX7XuAb4$`KW}+>_lG}vWw6sh8)O+ybG+qLp~HhVG;WO zW$6DiNl-#Or8?H%nK&qi3e1&m*58>ps3P9#PS)SKhY7tnRUfX7Ys`BVie>LY&SvLd zz=`btOJ@JszJ=m|2F#7yIUjfjXYg<1eBiC@`AT5#S3Ktfr^7tYy4q3k~kVgFe$`_F>V#k+tG5$%d5i2M(x{D+hOVdVdq zhz|Xt%u&uGLWe7xaC*Y!%nJJ-IA8HB=XlWe=(>VWv@Fv37;$DZlIa$QAkG)Q*>~BpGu_f+Q>`6Nn#~${% zqW9Do&z{c&#=yHpT6S^H$PWIGvz`Cr#8LiZDgRMBmCop$3detF7Q!6)7>ryDVKDaC zgUU6?NLUSP;YPR>ZUU=?d)s-2BA%s;d-BWH+eP%YjOcBd(Bpz0%pNCtTTSS3aSs>FpGQNzrv7`}o5-1-Jj2i7 zPM})S+yf85W*A$)OSupE5Tx*46MWwrmr}RDY6yptts*xd*TWT15l`BaR%OW21k!vT zX`e_Q>?JRt4r=$12OEE;)FkmtOhmb106gG@UEFWb5Fj6Mzm9SG20H?zYa9Ch3G4?z z_G0fteu{sig|_|NR%v$9*4t^*eYEY!Htg+4a}#}oqOGhQ-YM$;6>RYR{}mcA|BE=5 z^W2X>0K^)u}r~?}`K^t^~3;qnp;WJne%339?hr3`qJPf}F z^e;6%@CqD-5%?Sy@!ebnw?Zszg}Z^@!)qP~jg#_Ed#d$O{*f8jb)(e(~pmL z7pebeY5$RV*bTVn<6nR*Bz#dX^*`5_AWN~A?MDAE0sTK@WfJ;-v>8>Y=>H*Wu$yqN z#lH?|b~1+SMgPx7UlM7E(qb9K!r;a1qQygVmJZsz$_@Z zClc;C=m#&nN*sTJa;OGd9O=D{^xlEK0n&`!5=nkUlOM>|RP-%zZ^OSG*@(SqFa5D3 z@&nmJ_}-)Rw?;^BWC!-nNb-a0yOG{m#tOK9e_o*ryL*cKILDjCNe90 z2`{ol(wdftHf)Kc?_#XZ85IV|hXTlsS|a+;C6YtfTx4n75-B78a_(P&Ec9FQ-F_@& z3Dj^u6YjP6*C8vh8>jdlxqmfMlNcarpxqZh{SY7-rU23D0wk0AFAJHCBDx-P4(43^ z@{oomblYYEqyP%_0q7V6(EkmP5-9Bqp#2MA{)_b=XMj|e1&EQw>Z(N6f1oCX^&hB3 z|E-Qa(B@#)f5OqN2xI+c8@d&NtpDsl|0jm^pI-Jjgs}bt&Cn9VeyivJary;tHerCY zhqL}O!un4V>p#hyZ%14`j17Bn??d)ucM*>}j`bg;2fG({AO3^LA?%uq^nYg2(?_P$ zer24a{b&4_30aW6k#QyDKrZCz=>LQ=&J1Uqi7ZS~$EEata4#|ayZ?i|9D9Y2{twcK zy$V^4e+|-vy%t%gqs>jF?M>r*11s3T4i0F5M*1yHI=(jp-y5>k#P?|7dxUoANT&Y- zUG$f_b?pBi36$Q8?Ejx-EPIUdKg0J11GF_Bn4kYU>MtLJP(MRP%|GRO%F2zf2YvJ~2oxsM|KS~qX=h8I)bCsBzd7o^IqJVT>c2TLCeBG! z${b_aIjO;Hg4*~wsROfpPAtwj`XF<(k8`w-b7IHrz^wtuQ~%9Lzi&=lx;g5_xXp0@NZk~Tm3nb1l<(+SOS^g|=*mm-^D=x3(V z&*b{~|5~g0aEx;p_R=pUKRRLlKiFLFJ;V@jxM5I;@{ik;Xgs)TSx;B^uUee_j0%f9)_Pn0vv;D_ztp= zFM|zQAP%>k@FE<7OlSZv`~gr=(0m2ca6k2ODrCSV)bU|(IdG($CK9<34nR7Tz%L*X z>c9a{!hN&{55nIue*XdxXkOtk+x z+J8OmKV;LU>uK9_khuoh|1#Qt!sH_huxAs;fAIdrv$X#gY5&jB{^M6pm{|RH>%lv=R|1$o^yQ9peMX&^WHF7!9j$DImM{Yp6kuk_O>Few!9V($3ZseUG1~Ux9 z$M7lq9fC=VrSLmA3>(z+f(odE4!)yKzNapv`X87cWH0tUWPbwlzk8YgNn)NCss0Px zi~I}MZRR-=K@;efvR?04*|3R1vsKn1W+9g%vT~bZIp~g-754D)<($N2~F#d=7h%T|( zsfTG3?TjlOTf1eH_!?;|o64wnP1L){2l#eghucWE+xc!fW~pa&)VqXx8hekCdK1}4 z*nZqxz0|Wl>RqGhYX!r(fQ0uW~W)n!kq0kEBYd{ zl4CL}x%lVdHb4JIT~V-&_mAcM>0cCYWRC!pMx*~S%KP))72~`=>1l+jbM${m&l=Li zM7q_IMs=9YLG*v1o_>$j5B-;L`ahT*xHZr}X(Y}jXyzX3e+*iq(0_rpNc3L_+p&lK z4|FBb|4F3(lT807mHy8@`ahUm+v)$n0MF{#O8+N;{txCs+*JJ+O()|+&@ygKcQZb; zGd^VOnn~M{g*n?uI|Dgov^z!285@{8rv1sMpHV#PiaLdl&I^6RwAOed-12i)Jn&lJR{Qa}N`I|L6Gr zr}+M7{Nxnh)i?}Y%v2g!2m287-{4Kk=NNQT)+hKhQmIq^xtcP0Ql|``)+ztOoEAkH zUPIdg2Jn(LJ{aVkhm2lyfdj=21A6*f&_f#Zj&@4lc&GFerlYesiD z0)JRbodg#k`+CvnxKAQP;TH{KPni||L!cSk$v2AR=pjuGWWi_DgVQ|Qm&nS!q<2V{ ze2aN(gIOt!EmF#MkRRK~j|B27k^F+dly^?;^D;Y&z(V4uGY4=K& z!7JI$5=B4dW&NvA$?f$@UPz&0DDz5wbcYlitx*ckdZp;_Yf`+mP${`!Qc6d?Qbzdl z39nS_C{!x<7AnTnLZ!;>rTi~u{EPm-Z?XKH=dEMH)9gh5Ka%smI0MYe8DO^H#q|Hs ztqwfN8Y;TkM#jI;9L4%Kw4!(Igf`6W&=HURzv+Z@m7Spf`;PQv1VOe4O_`F2zs+rBDXtPyv--962skM~_SO_;IP380Gut{EIQx zaKSwK7xq7lN&VQESdljNfZ0dJ#DQ!G8{@(2h>2eeHXakGTNyzyQU2PDu%z<=W4o zeD5i#NIE5@$*0hRIVDvIr}*#XDfv5o=h-0Ex#6!;be?*EYrBp!?uljXd{?bvjzrfU zvtu{w*K8WIVYesn?*Z1lTl^RY?qMx`7yTX@(FVpGt=OGM75SQYvxfcU{6n0pw{t*> zP6bHymw%C><>=kq`WG?15Fmr$%=&fP=Owrhy_1@j2ng+U-}qd`XmR1T;!*D0kzgHi_N`+QOXT33Kn(C)gAkcN+-%XB}w zOlClRrsc{1j`)!dlzu%cTY#xH>vjwdlPY&xc9nmsd;m z*H=lEEMt5`KO8hXUn}vCET&Jfgt0*|b4~CJ><82)GzZ`iXrTxypbAWIE#>D=$ammd z_!nHtIulcF8m8nl%kjSj$uykiUHT_WnPYwr>5}#0&bwI#vTu?B+MUdrt7P~^>fy?0 z(LHvR^yBtAWD9x->zB#70CZ)jw*Lw%Xuj`+pTi4~3!8YAEpR8?1^2-HV59H16Iq|k zIwO5RGyN9}c=rv6uc%K3kwf-Aab0kUJ9$6`jBkhs*|TRrdXomEFJ*xK#DH{!4ba{W z@I4Pmci;g3fE^I$zCOy!fV2~?@q~-@iUHPZ`dF_R5Zk_T#lELpaUdIf1MK&yRxB3= zr2g1|SdR`cA5^87q6at^;0@+O%9TpuHTIS(Rj~t7t?%PJl>zxWf^Q*lfcd0yrI6<+ ziW#84HNgDV0Qy*Oh+(Qv^24f>0?UA8gMMqDRXG14z$^hxK2N_;4RQ=3x&# z8zPN1NsG)@v`seFzZ&Hie78EEO)^cif1%k*wn?YxEjHG_ZIWANle~y*#jr73$wwBP zv(f%Jq-cjtig!0k$yS?`LfM{1DMwbM+N5&3O^leU;%!pB+a@&|ZDNYENo|x(>LP7o z-s2FXkG9(TE*x{A$qKPkKifqg(!~6qT^_;hF)-ksA7qz;Ks)oTcJ!(3 zQXFQNl2AMIpAGT^Vamr&p#Ni+O2Qg1G)Wb*`e>8X%$$&?@ULysDRsD+x7*Ptw@W?o zSU1|mhO~#}D2{kLx+Hp~G1kue>y>8i*WzxH*6ljQ$@8_vosjlkz0#4Ip>%GwOIL~= z{U1B}e|n|&gkI@G_VZk>Jq_YMXP2kBr)R=0Uho}ml0oFNxb-JFq;E7+>5XcZ9^G5g z?QUi);gC*4vveSbB65_$DF=N9y~42{8jrytFCg7+hqx{}&`{}lG&p?Ny`y8ST%v925ni*uj zB?T9oC4ZYk499YmJf1W6LXMKd{q)ItB|FJMW7HsN5e=e^ZD8K5K{9qVNG12m+}^7Aol(nG$`L=Akj`<%W&rUq0#sp}zT|Jh1aQ-f3|2TA?X^X^klp7Rq-UlY7m!&{C76UfV)9Fy$$StA^*!7SpRujhLD;fGrB_siuRb9{V#b+hTbf?m^>xZ zZI-N{R>uDtMQ4|zNOE5Fu zXqAdxW~tm~7UQl~<{iyay~8Xu@n$jMUc22ab=%BhK5Q1th?)McS*)XG+KYOzUo^A+ zMgJe!c*4xL(kji!mcUl}|0l&+)+%j@8l@e#jv%vi5>MAwvvlt>OAoR)zCh^Xg zQ(P%#aUac72DqPRrd7N_8pX%+4W`onznG_Jd>SP!%_3U2h4nuR->pH>g&34fWY&m< z_QWdsXp7{;INASfq5iXo!OtoA$bxVS<0*?2?XgI4ibYD2Em9g{kuu!NgDkAiTBI_~ zDn?}0*-@!R*7#Y(G~tw5WZkHf@&73OOQ+PwrzzG6i`e2VVh^>5gLoPuER6px(iD}i zG;^O8r$K3rvhe-}r7g@N?M+VlmuX68l0~|9j7s-WgVK|vQF?b-IDge5{d+Cc{|3c9 zVNeE;o;Ztm_gTd^Ymvbzrwm=NFlSINU5uMLksaqqGk2TxCDlu>p-p;_o*nfva8awc zgW6cXe}_4fdKvW9i_cvz-i!5O#lJqjO)OjMY1_@xK>j&!vnREQ4cQW3kM2UfG+}Ps zR?nENp1F&9=F;o=&s4pXl(k7Q_M)bIr7*N!DvbF`dB{7Qp;nJBy;i9XtCuR=jnVbg zrRhrDo_eWu<|`&&zEXosPi$lEqF&Mx+UTU#OBQ4XY8Bl~n`G>MM{*PDB`2v~^t@j- z_bIrLujI$niy_&dx0`9EcqstK!9pRh9i zvx;dZPpNfVnU^n6%wbmYzd)(yepX{U`XA|vJWFwoGJ&oWz? zS^0j}`z_V`{mS<{b4*oHB^ni|)!0Z3UI|rJRYsT{GspBEwTJHg;->KIoZYjh@)vO* zaU?sXWyE)-jCRrgPv`nL{rMB6f^9N~ z|Ci;{U032Ay;5eGoP~91zYOa4ANauWX^x)(1XD($^Wiu)_J!VOpPu3*-dyCiGN(jS+3e@4>8sn1=m7o>^zPH;mYNx*WFdt!Jcb=d5%-5w>-t2BrPdSv-{vTPx{W zVh&sLi?cif*lp39W_f3BR>B8oc_(95f}{(SX2pML*yg*y`TpP;@f_g$pl_Bwok3|N zc6RZ7@!}b=6E}QrT3#P-w4M02+xGgl9C6O}`d@@?U*~vxz0<}2$!O&F?47m~_d9K` z_Y((+Z*AcJbu{q5J%}3{Y~8zgbcAmw$EuI}fP?5^^b~4AFQPv582aDQ>%XU*M#{NA zYdbMrur-|E_a8YH9q)=bi5r=BeTe^zEGl%+uwafMt^^sjQ!z@GAw&!5a}r{k@NuG7mpQnVC5H@LX_( zq>fy{_rR!R-m2J)$715eD|kok3Yl_EaGhst;sAAI++hwg!7TM1*nb6MoQ!QeSF{Zh zk34vV3=@wLkD{&@X(LzB)&USKyJLZ`5@K?+8yw(}NXTw944Zi7W7!%t+hm zE2Q-!##RngZ0)JM&DV7W9#wuDb#KMy|B&BbI?X(hxzt{(wgv@c+58VT5%*_SP-_4EXFoRIbl6Vib?+xY%(3fQ_+ z0b5Uq`+wi0^r53X6COkT*G|X)8hm+DhK_Lme=uMhc`<|IxOWP4NCV z&wuw$%FOhhn!rGUA=?;-_OforYGP3yc{7O|J)9lIP!U!{p=2zW1H#AJ})yUe%B8A z#ptu!NuBtTO+UCpGWYJF9d_^@_zuZGvIA~-NRwl}?7Km-E2ecL6%Hc`?w)wWA> zYP+E*&SfODFSP%y+Y{r)#@RP<9NRqwEqfU*EhtvYqGu?J{)r zc3EQC$RpRu=)UbTwspIVf0k{YyG~B9d}`-)GJVN)_@Azm+0SjK|6@D%|J%8D-!AdJ z+a>Y9c1a%L{m%!tOZw66k~z3tvdrh0&$F&@^>+Etl#Sj;AE1WNE!;odB90l}e=Xg@ z_}?vz|J}m)&n@D4_ZIP1ZxP@7xA6NV+U^K`R`U4IDRY7e_W?{VQl!s ztun$7*^gFlrT^zv8Bgt$iQG<^e1E4*#kytsWV_77?v&ZoopLqh&+l*L{o%f7`kLZRh^Co%`Q*?tk04|83{~x1IancJ6=MWy!Sv%iCnozD;f* zzu}qdW381LIAnG@UOUY6yUKS*5Y+QwJ|=Nftu9YGEIt{3}9+r)9nHgR6PO%Ain^~m+& z-nWf$m~G^6Kp*VF#jbN_lh{V3P-{CPe8vuzS#n<(j;4sDa>z1z6{Y?Ia_+obKm zZTPRYG5)qqI+^cczMFMDS8wCH^=9e2^w<3Vg`4FVYM8lO?5VqXhPs>QUw;3Bc~@=) z|2w~baf08!_>kYf_=w-X=vtxggx@Q;M?j;1+yH zTks)$McTQ}cf7boI$7Rz?H2BTx&H6{d+8e!;+H8wVQr6VLSL*)17gsp`x5yl2%-_KEe<%LzyS7MzG)dB? zKEnStf&UHtX*~7_yph@IP?9iyk`8y(r`VcX5p(w#NUL82^9t|HuE!?~WXO1V1L~ zpTKX526OoT&@j^@X!JDxKQvy$Z;B=_;Q#pu{|_FY8N5ET#B+xiWF8M-4E^c%Z%9>M>E%1l>KgK+=JG50cRL+drZpNLbagifMAAkFgM zBjJ$Qg8l{iuPncuc;E)Uv(Ud{dK>zXbYCI<3-oRDkIa9c_y_2A?qw%P`!v%Gc{DVl zEsF0E;>(D8i91n0nny`AL;6MH*U_8k?@^3ppCWyV`2S-5kLaJ#TU>ws70b`;<#~~^ za;*O{^Pgn--OPUjm00#~h@V1}*ZzV3`^)csjPSc3CpKjE-rwV^MTzY9Y85#s!1Uy*0YCrg||f5Wr~Em6j=Q2bqf=i^d-r{>^) zmKf($`sLr#$Fjyf6u(n*=qvI$>J}mW5E?@n^tWgMEh9(jkMag_!$+K_oQvm~-a!A1 z{wJP;yG#78%tt4rlei0xqmS@AAgCAhp=LY3OLM5kbLyHj#rRzs<~!_vV4Pw@+PdzR ztH|Te(2dmNdgA{D?M7cle~EUo{9)qzI8MGr{55nOu75}TBzhJ-j!v-5LwpPg(xYHTpV;f6M&y#M#U6N1;5=>jhL~ zx`fL6@W-S16HDA*zmC5Ye?3b2@Yka>>oRD(#`yek{Q0N({}dBTGQEU9{{;V^f_Ux% zYxtaDL{{#G{J@Q_7m2oqE*Wx*T*W!KpNth0y|JVDo_-;HeGd_&Z#zoxC7<|tO ze&?czaW2;xIm-Mo=6m15=R@55Nq!dt?@pAsiEY}6J3i!hGKkxlpQLYL>Rx<4#1nh* z^*ljv0nlQ33 z$u~hc^TaXoNMB8#21Sp>A^d)=> z9bJAkK2$W{FFPpSftb z6fHCT3VIziT=HA7f57z*IZ@+h*{`&Xo7hA9dawH}{+Hj1pSBI4AfBuc3e$#>mtT|U z!QayV&3;BL@Bd0#UuM5P!G6B>KT5|Tj*Ca|VeEWGx*y~?xryU~ebC3gIJ%SL1ofX} zzoJ1jgoZz0zoJnzhQ?2FT%bubg{F6MT%cJrhvpw(zoIxwpyYGxSCmE>lzo8X0_89K zCn->e;z8bPrw(Q6P%-iQJnQn@SR^j-oS8q4PYz|5{#J6tX_jYLpCnGPJ|4nn$Nc=s zcky-L!~1||T{O+Q89G0$->KsHbbN_tW8zWLju8*DeuQ|C`61%|3;6!}-j$=o$51co zL*1wcbt3b>6Rh_rh+Cg~SK5f1*|vqeqQp()6()|jxGy0#zDz%L2wd{6c&U^5ZDRMY z_^n~o$g~SNnwIJ3q?5LW?{VorTG#)R90xY~>HbIb8MO6F+}opfSpEg(uV(rz(^aHU zJIg-DvO(fe^q*Pw2UKSIT9)&b)$n&r{~BG#{68`OUzomuX&c}9|H$+wi2nuGa zvHWJ@+tAn1PUb(ux~tG$rvF#szhe2d#6Kf`34MX-KSRw--%8v~{1=M*cGAb0PNV%y z=ZUZ6zF;qD?nL*J=05aWmj45N%nc{-|GtI)_g(sbi5r=BeTe`2BmBR_Ugmu)_p?6m z;bjs$j{h5l`R#!S|EE5B?=_5nUn9-LExaq-O5DbLJ8{R$*GT97W%_@vq5tO^`hTvW z|K}R|fA}Bm=-6ZbhxC`Ok%6Uaxc|KdKg2ck|6C&@#G}lQ5s!a#jZ6?vGCxH;&H5SQ zS?1@6=h;4XFaO{E;5GbC1OMOtqvMi#aNi?Eez`pV-O1 zZhZd_;yTIxM;_$;fc=mBD1d?|+5aevA}IRu0@t+#X@2yAw2)8h2>$;W{QtxqJOgw- z!TVN+dEbh-=VjjiBkp7VDDkm<3-teS|BnXI5E@1!XcUd1@pIh&qd&d>$Mg)EMRVx? z^ZtK;_t8)WWgo+rh4QF?il~IjBMVX)SdhGF1KQ;8YP2)cub@@*8|KdtyIAK(Utrv2 z7rGNgnE!L)pP=W_OXwNoKyK8^`ex$8w0Spi7xT|C{dY_cb1o|4qbwUoFS7gy`piD= z^O=8{={WH&<}<|i5!+ezL*k#HXV6Q?#j+ynN12WzAM+uUX8KnwZ({mY;=e_2qBiFL zfp!0gZ2Zo^C(s#|M>)5fIJcXLTi$2?eaQa%)gQ(B|Lqt4D4iS=U99USjrITAkN;83 z?>F5`ei5#%4cueck)y`(gBmaJ{ej#}dytoPKIFgbR}$d32&OoGI8MUE5$5~JV*m}J zAvBCe&?p*1 zwS4ZD?Xp8YFW1Qzx4dIJ3fJ&Asb4xyi+r_j^rFnR_(i++xtL(iiZ&|jk$ z(M#xW&@a#t^h$d%w7rH`}h|TTNIPGGWnfg=X8TYsv~|Oz2OVFl5$NLneJS8Zu#1(S$uc zChX-O>U^;}GfKGLF=E1AuN9gS*L%lJ*gt8)d6V0Q-Q>UF%bBppl)f=sHsOe=+s23~ zchh6iZ+a3Yj1QPFVOpT(F*Q{4hD$p-O&Az9VV|jw9y8_Yc~j5dIi^k6)@Z`c;otRN zoOr*-nmB6O>UTqCtKZF=8vZV4>h`|V`TaR_wA??6OwumYusKFDn}h$YixkcHJnD?z)waiOlLiD-6OmA%I@1__wBO#4%xj=c9RVYt;|fw zLX+iglciPDJ+kLUVSnwpRrc(ZJ-cMjZrQ`ay|U*v*>k(>xkL8slRZ`j7SSHFsYSI{ z_TDJ$SGwT$vi>$JyhAL|y(X2lr*D(nZWMO4g?*b@Z0+sa<@Ote-F>@NDXS@NH!H3E zeuvy~qp;(xhPlHmw)Xu#*>|I`^Q~suXBJ!g|4zB{&b#Ex@)fyD_zCE{(4WiQa`&}z z_ucXrvS0Q;DEr^@wjGwQ%2%J0ugTZ$k*_^_vwV$ZPdzF3$UXPTJwKFteku1Xl~7sk zm9NX!_y0h?A>a6_eB+zHlyBVsn0(`id{e%8zkKr{`R0?t!UJ-^e9(Pz;6XX?kR14d z+$Z0XZ+%t1MR-uYE#JORzWx1sWbj+$Z1pnSAG2`AfN9?*FFTfB!-Bklg=Wx&Qn33aKBE@5*;yknhR&?vd{ulD`(*h2gYuv}Bo7_nV~M~5e^?&=nmqhNdH6^21Np%LJ_qCx`Jwz9`M2_@ z{FVGW`S_RO4rTmzdd_tahNS=89N%@KV#C#5+r^Q5shvi9mk}Q8J zKZWh5kI5l9WIpI;a_AX3^pYIWJWb;t7E+Pu@P0Y`kQ{zk4*y6FKPiU~JuQd% zu-dFQ37%#41vyO1T?}{0GxE%SdFJc#%tP|b!}83}rVY65SE3AEHueNvn;g8LaQva$wIpNnoGi@CLQEFovXGF4q%5RlAuS6z zS;)&mK^BU#P?Cj;EH=oZT^1d(=#<4qS#-&wTNXXC=#@pEEc#_JAd5j+49Q|x79+CQ zEQ>9&*eZ){ve+(*9kSRdi(RtVEsH&}*ei>DvUpS$kI7=cEDp%xpezo_;;<}^$l|Ch zj>+PB1_}4G$BiqvNR=2)3P)pOS7^xCrk6P6qBX6 zEG1+qDN89?N(&DCrK~LFWGOE=@t2CiHx#GUt6_OHDzEm-s{``tl)O48ug=SINS4F0 z9FgUyEH}w=vn;pBa;q%2$#T0acgS+5EO*Irw=DO_a<42OmE~iy+%L-mvOFluL$W+7 z%VV-UF3S_LJSodlvOF!zGqOA@%X6|!5lLB2$#PbfbF!S5<$^31Ww|8FWm&GsN`tJ} zWyK*YPFZP`6_>2IWyK>aURm+UieFX&vJ#Y)kgSAdB_b5!F9S?Q9MZdvJ(m7}t9OjZVEWmHziWMy1dCS+w&R;FZST2^LcWmZ<^WMy7fVzLsK zm4vKhMA+vIqc9Pg3iy>k4R93PP5LvnmXj*klF|Kk&Kd{T~2$?+LE zJ}1ZL<#(F?L#lGU=TR^*KadBZMmIOGkdywNCcxa1ADyy1~Iyz+)m-tfyC0eK@RZ-nHH zu)NVLZ}iEV4f3X4-gL^F9(gk;Z-(T}u)NtNZ|3C9lAQF*$)KE!%E=}<*&-)9X@7wlv6`; zYFJK<$f;2|H72Jf<Da%xsi&B>{GITe#raXFQcQ%O0Ml2chZm6Nwde(jLA zl5dTeV9EqDCP>R$WrE*C`TRO#f~*OO1i$H(-!$k+WGdl$!F|zBY&hBB9jS`+3>W>+JeYrZ>ltDsx(<&%Cadw zX1ghq+my*|%H%d>l1`bVQzq$@*_>3%tU7C>Bem)u$qkY78Ze(vR*uM^^|5S zGessABY`z*N`%(rY9f=OKFt&qv!>`-vz65hRwGzVFeh)%^P%U&)IRoh;x$VvrJiOdbR#7b25=qCRcaBw@-Ss4urNPnbLtrfLM%)^#Z6>I;EQSm{lLIY5#o z5rH*p@=h9>dSp_VB9bQ4q_woZhnTL2*f{W7^=d2VaZN^Mx`Ir?sKBhPVYqnll=Ie7-<(M|xnTRcO z^=YQ^e9X3ala}D)h_q(Tq%djoOr37v+$9KG!RaOwMXaDc9W~RY1CGcFn&q^e57`sU zm|)flDC4yKVqo&MQvv}~4n8KO<8&jfOGHY7`XW;|GiN0fsrlh4~@CZHInN$E7j zI5|`a*n;5W$Rsx#I?aZxZ{&NBV8{d`CLo_ilMlhg$ZX1-wSmdJ(K-gLh>Xmf$;y;S zU?sj7Sc&VEHD7$zabf_gf0?2~}#?Ya7d$*aDV zIit;VJ&8Hp>d=tar>Uzo<(AWKla{~~OJp`N5h+Z>mhSo*t30ObYb;K;Db4+M%mi@) zYg;ll+g{9{?>VO7b=Q}YA2S|PrpF{FxHw&(V^fbQ)WdfS*Ofn=V0`l=#bn_#D@fos zn-SEfsk^`4_I^_*zp0bo)X8tMCHOcpTbep?_L_hU{Z zB4--pj8o1u${Dwu@yHpkoC(UAkemt2nI<{YDQ5=c%&?ppl`|7^W=77;$(fj(NywSJ zoGHkevb@tE@7U!XhrHtwu8v$A-|{w9Fj<=s3XXSWDJ?gDT zQ}rlbkFr)YJ#coKl+%M&G~<=CGvqNt9 z(~WvjA37>$bD^`@xSY*Toy~XEqX9Wv2%XKdu*f#0hO^}qk>hMRLlmq>P4%d~9`)3t z*x3>rm56{RDpQqm)Y`nQVep+r;aeCjPV_VHTWg7?pF*UZKAm z*^vV|Q6q98H}W7a@*zJ8pdbpNFp8ikYC_GZ6}6#mWb*DqM^Qf-K!a!qjiCuNg{ILA znniOcj?!|@MZH{<@1i_c2kJy!h`e0nt^`V=l$>+J?S|V8uNxjWJZ^a0)Za~cZt`=Jmz%uY@v4q23ELpRhGP2YuOF3EgVV1J8?87YO zW!Z;W%FMD4v+Tnx%vs9MQs*q(S@vU=GP9JKrR}pYWZAFT9z-3o)FDgTWMRs(PqMU8 zmVJ_CpJdr5S?ZT%pJdr5*>S`^$ zXO`n2n?YHWLwN*qwuoq>Y#CMLTn_#m{5kk@@aN#q!JmUa2Y(L!9Q-->bMWWj&%vLA zKL>vf{v7-{_;c{*;LpLIgFgp<4*necIrww%=itx5pMyUKe-8c}{5kk@@aN#q!OzR# z=W_7p;LpLIgFgp<4*necIrww%=itx5pMyUKe-8c}{5kk@@aN#q!JmUa2R}a|bS?*f z4*necdHD12=i$%8pNBsWe;)ok{CW8E@aN&r!=Hyg4}Tv1Jp6h1^YG{4&%>XGKM#K% z{yhA7`1wLRmxnVCXCBTxoOw9&aOUC6!*0Dl4g z0{jK|3-A}K>1YZfh5`0{Z&v7X}SAwYo4?muAt^|jeN~2~3`-f7GAw0S%CMASDZ^4`pOxV$v(L&fmDy)yxXK)p zW!TEFm0>HxR)(z%TN$=8Y-QNWu$5se!&Zi^3|kqtGHhko%CME$cV+m>?7K3IWf-|Z zIah|W%zi7wT86a(V+F`sU!ndL>R+M$73yE1t`+K9 zp{^C`S)ra4>RF+l73x``o)zj@p^g>mQ=vW;>QkXU73x!=J{9UxDav~dQPhH3QJl}k z=)aq)&!paKNU_#L)|6vH-gC1#m-+X&%)jR*8+SMAL9_CnCxpT%BJX(}s1bRP4+Rn1 zdD+f8j7HHEpNr9dH&vgRea}m#UK3eUKD)fq`sB~cn>P!8o$;k+GSdl8l7yu*9mK^r(I0jrI1+MIWgvxByB zkh5d(ytC=Nvv}S~gPEDB^Uk)5YtK6q=bhawt*;@?6jMWJ*yy}7U{W}!kC`DYZSSC_ zj!~oYjtLerMZF#4M(1f0$DCCddxm+6bxe4C;*0dFRNV5;Ajf*$maU@-yX_#pj&^R=M6fY<&@R zzF6KY$!6;%&7L>>rd+0|M?-yPjs>KrzQEBerXc3Y2$~Qxrn&xGnHQ&^sW%oo))6yl zu{)f1#F1g4mFtnUm%yStzqQ=t+Q(E9rHo9WRt2mXYp+;E8$gZ}2Sv;}yjzm)*lL7N)f7PdGWhdvUbqaiZoS6!9N(@=08jvizA zE*&i6Gs{QE0cRCtLyE)9N{!<}lE-jLDHHPFE)fu|XhxJD}QToY!0OA4Whdc=K;a)^OJX+y-(fUGr5$yzIF>q7&G>5dLWew`^<>n_V$k4M%<*=B51)+T0A zTGl4xvNoNNwYjjYC5&cJR@PFK&$Im+&-Sb<^aSy%5z3v+KwQJnRu5tgkRxY55RJm8EHcUyCD`%DKW0ixy$|+gp z7^^l;BGS;GQ+2se2(_YNG=fM&pG}oM8yt^l24$q`CcT?{-K6Ixt?F(=?WhA$hMVo( zq^D1&O5aP>%{Cq%3Zh|QU-abTmUh3;DNY&>?l;evd^5KrE>hD2A zD1nkv4X|y12go~+L*x^LEl55=>KUYrApF5BBL5)#Av|u6 zQ!*(W z)VrUyA0WR0+GBuv4U*3w`*x7BhB^`34pH{dEFzB~(hsq(hADrTvWIDxVd^~0_Iwvq zhpE#r$JX%J`V1JuH17x~BcP8^%@LY@gd`(mKSuUr6g5tD$Ju0pz9_K3mFszy)&lKgQKu)nfRg`KPt8_V4=~AlZ66;>FqF1R(4^oxhqbj{dReEiz^vG1{iK)^9Q>Evm zO3z7^-jOQ3BUO4rs`Ow~>7}R^$d+D+D!mO=dK9Yk6jXT%uk!R=<;l9rlXaCR=qgXo z)e`yf4N>Jux5^W2l_%FKPoPzvI;%WARx4ogWLT}xyp^1+^YpOZ&@1cql&m}Ks8!av z?OEprV4aKKIz7?G>-DJ?q;6Hxplq~*4=X`F6*5A>-4g&d(yJb*}v{( zTiRgV+k<9N60yz~KrN_G*8OcLChMH#>wySjd4ReEil{8>LFyDN$$E%RJDNsWS?7#f zZ|^`|h`ie85pC5$-W`<18Modsghph&GrS(B8l6 zX1>Rb$gRgrQ&KOjgUfWim-L*Y>%Fv4FRk7ST^}vmN49;`tdAD$qnM-A;3(;iB@nIO z&vgGWG-*^VM%Iem7|CUQ0PI0_=O7ylk`X8L`f#hPk2qv~l&X)h!8p_76fnuUDdH&# zpQc%+;h3fpGo+sZl{0vKhTS=nK{UfG19x`aIL~ zNm=KjzaATt^*EsMimWGSpaf|XG+%0Dn=vP}}mBbbJo}n&uim%g&y-sKDIvug=c^aCF=sF#e>vTe{)8V*IXW}{?bL(`(t#j(E z)5*3@=h8ZzDC?CeRF;i~9&{AtWW(-9Q)nJ#P)Rnpux{`aw!sb6hLdRqR5rMvZn%7C zRyN$dvO%xKh9@c;9O@e!>Kop1G>OQ|n?h++Ko!}b`(eXJIv;uYNJ|$4F9XO1T?`wX z3LA7OY|xdk5n!HEU?WIAT$naOdD#dzBDN1xb~u1WP+T@5)SrvbMwI0o`WsE8X`)^n z{u@m(*=VM|&8%zgl#Lb`TUggJDjThA)5@~eV`vyr?^f7b$+IH5RmV-gmBpXAd8)ABhx(!bv+J=!%bNp{`rP&yP zb%c8H3oE=K~(?aoQ`+vIKF0dL?L! z1e}})8{BkmB-u7eos%#p+5aipGDRDtNS7v`G}~s_uNmsWfYSyyr5g;=Y;cpg!Jx|q zoxU4%>~7FOyFsVvMu~dSS-L@|=mwpj8+2}Na7oxGvtQ`U+oZE;v%!Ues2L?>lbi2N zx@|V?b7&rwQAIW#J`_MRC@z}}SZ+45&eMs8&^VeztoM+Qrzo3V@}~!Hlb*XxFX=ch zHoeDWldiT+y4p6m>D#1xZIfev(@)<1Ce(*wh&=oy*$j-JQP~W#JV-ggv}}f`Yp4Y= zAEvx8=_1sR8?;Ss&^9BzXbe#XH)fkr);F=fsY5nfD7%GaE#%)q9b4OFvu#*5J3@#y z;bv;Hll(i$w-fFzmUl%Fd3KQ}$M$B|lx*_lzS+%m&!BAfh7tUIEboKmD0v(uuVVwU z*-snrCB4ZF*5&|p8=!3msP~WqMG$owI*Ld;L|Y88-7w{iu-ypt8ljEpBHSFMeMYJ4 z74DHXI)8;H~G@p^p7adh4ON^xxP-MpEm9n-WU-_4sD z+B?H?2B0>%o!MlNXOn@9O$IME8Hm_q0AiEdl}$R+H|a#*qyv1DLHSKOy=x6aC@VF- ze`|CT*6eXql$s-gnoz6M=vk@p_+R6Jw&pBJjlu6)V~^DM9Tm zwjR=O%Te=^H^1gy^O0wu19eI**eJCSAU|y2z)Sa=NK^H~DsxwwvYMvk#-M49TiY6gmbE;2F*sSvQ%431YYYU|7znI!J+3jx zRx7YQx3x6})@lr#)wn*_xV5bnsVf6wHI9=SgHbgGpK1&=)flv?F=$g`0H(&kOO1gS z)jKAtZ%)*7M$}BR@IoB2BL{M#M&v?nt^6hTqcgiKy7s1>!LcGQ77 zQ5Wh)J*XG;p`+*+qE7PzXb=sdVKjn95p|s(M-ylgO`&NtgJuyN^YbW%;wXWVD237p zMxJ2Qd=BMN0TodRl~F|$`$xs#kHH^&ze++)^&r}Tl z82mB#WAMk|kHOD(gW}i96#HGp;E%!2FQBOy{4w}r@W&ze++(}z*G$W82mhmDgHZ=;(DoK@W|{M=6{?yD6~ zuZr(%#do$!z@LCW0e=F1zQYyY;VJgc34aoPu3ah#e-i#A{7Lwe@F(Fgc34ap)B>YMElkjt| zr;_j|;ZMR(C#m9DNG0Jgc34ap)B>YMElkg|u=Xpsb;ZMQObCcqpQPG2;Qt+qX zPr;vpKLvjZemryv51qn8r&92z;7`F%kB3UZpMpOHe+vE-{B(q>6#OapQ}CzY=bluh z;OAegs}%ex_*3vRfT>dObHA#%M^GvFQ}A=GP$~F%rc*rADSB2^3jP%Q^suND{M=8d z6#OapQ}CzYPr;vppZg5OGoVVrpMoFrm7-ThrQlD&pMpOPe;WQY{Au{p@bk>5((tF@ zPs5*vKMj8x{xtk)_|x#G;ZMV#hCdB|8vZo=Y53`^R%!Ut@TcKV!=Hvf4L|>8UZvqr z!_WP>O2eOqKMj8x{xtk)_|x#G;ZMV#hCdB|8vZo=Y53Fdr{Pb-PfwHLo=S0TRcZLs z@TcKV!=HwqUMQ7@pL;HqhM#8wm4-h9e+K>x{2BN&@Mqx9z|S*-%D|t2KLbDaYAOSN z2L25E8Td2sXW-AkpMgIEe+K>x{2BN&@MqxX`CHKortn@Wyq5~^rOLpcfuEi+MbDVZ zz@LGi9x_D_naaSQfjx{2BPUZ&chjDtgjX2L25EJmaVg{2BN&@MqxX znO||gp)&Ai;HTG3(d(u%@Y7kZ=&V;f7b!aI6-GRT5l>;nQyB3SMm)vylA;q|(TT4x z;wd`v6-GRT5l>;nQyB3SMm&WPPi5i9h^H{(DU5guBc7tuU(x5FFyg5!{21{R_dY5M zKSn%-5l>;nQyB3SMm&WPPccZKFybkC1{Fp;g%M9-#8VjY6axneBc7rULt(^I81WPX z2?`^g!ic9Z;wc6d6wi2yfd$3Dg2ITWFybkUc#57zg%M9N(4a8lDV_rrMm&WPPhrGU zJQFI6cnTw)!ic9Z;wg-H3L~Dvh^KP!W5iP!@f1cpg%M9-#8VjY6h=IS5l>;nQyB3S zMm)tphQf%aFybkUc#1&{g%M9-#8VjY6h=IS5l>;nQyB3SMm&WPPhrGU81YmNevEhu zBc8&Dr!e9v`fe0PJcSWYVZ>APnktNViY`;dJ*C2kr!e9vjCcwoo}wF7VZ>8(r7Dbg z3L~E4zEjcvqcGwr23ZtFJcSWYG1#IQY*85TR33hecnTw)!ic9BfKeFn6h=IS5l>;n zQyB3SMm&WPPcb;7FybkUc#3;kg%M9-#8Y|r^YGKxr1J3R;pcu=F_5D$=qU_(Di1${ zI*NN>MOUxFprMNpWAUxGz^2^b`g?#r?UW3tG_yt$21*81xhdJ%vF}VbD`_ODha|ias@k zK~G`OQyBCV20evAPhrqg1^6-ODF(0<20evAPhrqg1^6-ODGYiFgPx*$TVc>s81xhd zJ%vF}VbD_;^b`g?Mc22&prYir_Y{L|3ge#QSwzuas4(s+ zo=H>@ehhpH1E0dcrx>(U82A(hK2?Oj2tUs)3L~GQk5SRbs4(&=jC_jcUxkrR72(Io zr+5}t82J=NK82A_VdPU7`4mPzg^^ET(eF|frD#4GjPnF=u+^0(LWAIZY_%ZpZ68sqb6oZtC;Rwa^y~60H zF#0KsehQ45%;zD*C|` zLnMkJ62)_+;<-{`22_{<6=pz%8Bmqs#|)_GFH@KS6+2oC zh{6o0Fas*gfC@99qHjdeH=-~EDhz=NL!iPCs2KcJm;x33c&ZFP1Hq~Se+7Pwf{LLY zg;7vd;HRHkVHQ-F1yu!p41=lyKc+#&aFD9NUx6P3p~66@Fc7K={1x~s@M9)a3>hiR zgbFjE!c3?t@K@lkz+ZvC0zU>rg~3o^FjN)zF&QdMh62{oe@)QZ|rJL*84s0($Y9@LBa&{1>@^`ika zh=$NG8bPCI42`1+G>N9rG@3!PXb#Pz7>c6=N}?1>qYTQT9Ll2tDxwl9Blzv`+u^sv zZ-<{BtCe@6`0eo9;irF1kqIe%cN{ea6}jKXyNDhu;oAwm)r$AM2mC!%x4Sw!_c(inhaV zhu;oAea)J_W{oXS)9kr>1~p?dn*M1`|Fm|% z&k&||!0&+H0lx!&2mB8B9q`kKtuYg7`mwbGeh2&x_~~QR4*2P3)DHL^@YCO@9q?mB z(GK_>@G}miF(7IO{0{gX@MA{Q4)`7LJK%S~?|>hVp~g(89q>EgcfgNJP~#HRI0Cf; zeh2&x_#N=$tJ4nn>08&BsWeW8czP&a@ML ztYjLGn0CU?kg#U#OJgq4Sh+NQEsaY{(;utxRB8NDnjvG29ZF-|(b%Ch)+deiNn=LQ z7@jo7CXL}q<6_ddm^AJ!?Svo8lE$c{>DSd5l{7{r?S$V6zY~58OB%zH#;~L@ENKi& z8pD#N4_ISb(oXn!nM(6AmF8tC&DezIWh&hWKQB{hUZ&CvWourh(v9%*GL`0KD$UDO znwP0GFH>n=rqc8$YhI?(jqvkAmF9&i-3Wgp{EhH8!p{pj@bl7@ZiK%PeoQ%j@bf~JZiK%P{zmv4;ctY$5&lN_8H3T7d^9E>?SkJ0zYBhb?KNYR+66x^b7_X~ zHT~q;1-}b^hVwOsAkC1zcERt0-vz%5ei!^0g|rKP7yP^grg;fW^AebL!S90K1-}b^ z7yK^xu?1*e4%56Grg=F`V=~g1j5IHYX^ci1%Yepcr0J*En2j_qhiME)n(!2|V|>yWpR^l( z#uqehb&XqHGv=$^@bi+IcEj(6pO@9N8-6$ZSR%9=emDGX_}%ck;pb&Gjd@CAp3=O~ zrrq#kozT44rg^bVW1-Nz*rplB(3q>V8-5H{+6})OemDGX_}%ck;djIDhTj9f2YwIy z9{4@*d*Ej*MDuc-=H)nzNlSa+_rTBih{kQN8MoGqlW4}SHDe{(13xd%X%GA!_&xCR zBAsTOPxCUJ_Q3CfAETG%#X61ML-TT-#_***@O$8AJWzY!_rULg-vdA6gW3Z>V}#lR zzXyKC3bhA*5BwhZc|lKO9np**Y7hJv$TT(*?SbC|zXyH~{2urjU(_D>J@9+r$7-S( zZ`7CtHHI_IxTE&M?}Z=pnZ|~qd5b`M;rGJNn56c??}gtBKjZeAw+l3H7icg1UiiK6 zd*R2prWwD~UiiK6Gmfdf@MC7vUicZ))L!_#@O$C+!p|ED+6zB#DQL`#8b5%>_@?m$ zXgmSh3%?hBEHT;(zZZTl{9gFI@H1wm@eOEv1KJBe<5$`XKgKzYk3f6j_rmXmA5Vey z!O!ar+6TW6eqM9XKKL=%Y5WG-2S2YoXdnDoZ?q47jCa}xzYl)=2-*j~4}NSp+6TW6 zejofk_SI|EA@hoT`{8)Fi4}Kr~KKOm`<7Lo3__rZ^M zLi2it_QUUopVu@ruW4vM{C@a(T|;9o)PDFGYu0}F{qXzY_rvdppYdnyhu;ssAAYP}F|-8LQTQ`2Fzv;rGMuhadBz_QUUo-w(eZen0$v`2Fzv;m6~l zd2K}V+K9&Mq5beP?yddsGxn`9K59Swe)#?H`{DP)?}y(HzaM_a$F(1RKl}ms1Mmmn z55OOQKLCFKehid406+FB9e_Uoe*pdf`~mm_@CV=zz#o7=06+FC9e_UoKjWo30Dl1f z0Q>>?1Mmmn$B?N5@blV>=Cv2iYcHDDUieW;1V3ZE4x@P;M)Nw1=5-hy zfIk3#0R8~{0r&&(2jIu#sRQr_;19r$;ZtMy)B*VM#pnS1*u!)H{viCAgLM%8ApDH& zYK);8W2nYHri1VY;Sa(egg*#>5dI+iLHL942jLIGAA~;$KZa5rgdbC>4#FRVA9JY= z!XJbmPl67@AA~;$e-Qp4{P+}f5dI+iLHL942jLIGkC#UW;Sa(egg*#>5dI+iLHL94 z2jR!$tb_35chEujF|_I+{6Y9Jw(21KSmJaLeoU@91V2Vs9fCguKZaM0;Z=v=55XUT zAM>kbyjzFh55XUTKLmdWeynvGW30woq48E|46+)loesetfuZKA^5TEX^gfy1b+zr5d0zdL-2>-$AGIt@Q2{Xh^s^J z<9X5{_(SlA;19u%2TEi8(;@gV>go{un00j+{xJNQb~Uy@jZaF4;Sa-)UrLAJ$1|nF z@Z+1(Vfe%Fhv5&yABH~+e;EES{9*XR@Q2|K!ykq}41XB@F#KWo!|;dU55tcQQHS9V z!ykq}3_sp19fm&)e;9sjiaHE`82&K)cvo~7{xJMu_`~qy<0)GU44E8z#e+2#r{Fv=^1pWy8cx-e8etc&dL$r>-AAvstKjvtS4O3%~ z))Dye;^+wc5%?qUW17|x_#^N~;K!$3;gTSwuK!XJe{3V#&- zDEv|Q@r39o{89L$@JHc~!XJeni>b!kt)uWq;g7-}g&*&oj=~>>KMH>o{wVxW_@nSg z;g7-}g+B^^6#gjuQTU_q|KA+lNlu+@ux{JF?_DF2e!v(EHmNZ=NT*phX7u5Xa|&+* z9%)Id5@UY<+5eyY|Jnba{r}njpZ$)=#}WDbXa9fp|7X9uruonQ|Li}p|HS?i`%mmY zvH!&W6Z_o)&WZhQpytH>6Z=o>Ke7MB{uBF8>_4&J$?Tlie`5cM{U`RH*neXGiTx+` zpV;rVa8B$$vH!&W6Z=o>Ke7MB{uBF8>_4&p#C}Kb;}&sF?1wex#QqcePwYRj|HOWG zigRNBiTx+`pV)t5zw_NWvH!&W6Z=o>Ke7MBerLUNV*jcAr}m%Pe`^1!{caxT)c#ZZ zPwjXAIH&fX+V4_4;r%>Fa`-H* zpV@zA|C#-6Oy|u0GyBi%KePYL{xkc}?01hhXZD}j?=El7?01_tXZD}j4@bN@!I)e{}20r*zdk|e%SxR{vY=Lu>XhsKkWZu{}20r z*#E=+ANK#S|A+lQ?1!Jmec=4C-;M12u>XhsKkWZu{}20r*zb0Be%SxR{vY=Lu-`51 z{ILIr{Xgt?S35uK|6xCNf7tKlc7E9J?snYWj??t{VgC>N-Qdm-`+wN~ z!+!U;^TYlh_PfiSANHTye{TP|{pa?d+kbBVx&7z%pWA&+T_^Kj-$J+kbBVx&7z%pWA-R;d!`+wU1(|&kne%k-jekc9&)Bd0K|FqwI-~6=yr~N(cupZ5Q>|EK*w?f+^2Py5k<`Dy=8`+wU1)Bd0K z|Fr+7{Xgw@V>mzU|7kyFFhA}8X+PvMKkff%KkPH^5a*};7{dIt|EK*w?f+^2Py2t` ze_{WH{ZPZpT3;QqZ zzp($p{tNpr?7y)8!u|{UFYLdt|HA$Y`!DRju>ZpT3;QqZzp($p{!9BW?Z34D(*8^P zFYR~FJD2uf+J9;PrTv%oU)q0Z|E2wx_Pb%9OZ(ma4$m={_Fvk6Y5%4Dm-b)Ue`)`v z{q91?&HG&1@9us0kGZt}(*8^PFYUjy|I+?T`!DUkwExonOZzYFzqH?N>0H`>Y5%4D zm-b)Ue`!CsIG6Tc+J9;PrTv%oU)q0Z|E2wx_PbM_U-tj9-~Ijkvj3O;zwG~I|1bN0 z*$<}7FZX|FZv= z{Q%AUvY#WEU-rAXonQ9?zQL2 z{ww>h?7y=A%Kj_+uk63FAJ&;G`>*W3vj58dEBmkPzq0?z{ww>h?1y^h%Kj_+uk63F z|H}R=`>*W3vj58dEBmkPzp~%G_*~h4W&f4^SN8J}b7lXP{a5y1*?(m}Ff>>8yDc8K z#pAYk+!oK3{a5y1*?(m}*gAa0T-$$b|F!+s_Fvn7Z9jB0*Y;oAe{KJ@{nz$i+kb8U zwf)!jU)z6e|F!+s_Fvn7ZU43X*Y;oAe{KJ@{nz$i+kb8Uwf)!jU)z6ezq{vg_dM73 zU)z6eKkqT_py%5DYx}S5zqbF{ehy@=?FXag+Wu?%ukB|;=GuO@({pV}Q$g#{L`oZ|uLZ|Hl3s z`)};OvH!;Y8~bnUzp?+u{u}#m?7y-9#{L`oxstiD|Hl3s`)};OvH#ZoTl;VAzqS9? z{#*NR?Z37E*8W@jZ|&y{=GOjO`(fa@wg1-sTl;VAzqS9?{#*NR?Z37E*8W@jZ|!Fk z=GOjO`)}>PwVzoS-eq{LxwZe+{#*NR?Z37E*8W@jZ|%Re|JMFn`)}>Pwg1-sTl;VA z=N<-7o?H8G?Z37E*8W@jZ|%Re|JMFn`)}>Pwg1k3PGautzq9|&er96s?7y@B&i*_5 z@9gI&=FWZqdhYDMv;WTiJNxhKzq9|&{yY2c><2A}yO=xs@9e*`|IYq9`|s@Md*;sm zJNxhKzq9|&{yY2c?7y@B&VFuV?(Dy_|IYq9`|s=*RhT>bVeEmi=g$5+`|s?(v;WTi zJNxhKzq6nHm^=IL?7y@B&VIgV?(M&~|K9$4`|s_)xBuS$d;9P0zqkM1eimu&?dR&| z-u`?0@9k$t=idH%`|s_)xBuS$d;9P0hs5XJ{(JlH?PpNt-hQrW?(M&~pKqFb`|s_) zxBuS$d;9P0zqkM1{(JlH?Z3DG-u`?0@9n?0|K9$4`|s_)xBuS$d;9P0zqkM1{(JlH z?Z3DG-u?&snb&!+|H1wT`}x;-u>ZmS2m2rFf3W|-evWV+?B`?W!TtyPAMAgy|H1wT z`ycFou>ZmS2m2rFf3W|-{s;RX?0>NT!TtyPAMAgy|H1wT`ycFou>Zk+PG=tMf3W|- z{s;RX?0>NT!TtyPAMAgyAH1Ij`ycFou>ZmS2m2rFf3W|-{s;RX?0>NT!Tv}4AMNML z=F$E~`ycIpwExlmNBbY`=O*XT{zv;C?SHiY(f&vKAMJm%|Iz+O`ycIpw4YI%NBbY` zf3%-Bnn(K|?SHhNJ(@@R8Ma}N=F$E~`ycIpwExlmNBbY`f3*M6{zv;C?SHhNSsG!1 zd9+_#U>@y%wExlmNBhMFhK-v?`}w$GK8JCdNBbY`f3*Lv{eSHjC78eV|F!?G{Q?E^ z*Z#lu|F!?G{eSKMYyV&S|Jwi8e&%le+W*)7zxMOeBWf^z?f+~4U;F>sFLE${?H4+j zzxMyN|F8Xj?f+}P_`&?O|F8Xj?f+}P7{c&a!($DrI)Cl|YyV&S|Jwi8{=fGBwg0dE zf9?Nk|6lw6+W*)7zxMyNpL3nR_Oq_@*Z#lu|F!?ger9kO?|HKS$^Iw%pX`6K|H=L* z`=9K8vj55cC;Ol5f3lyI9d(9zvj53`m4Rf*TV$^Iw%pX`6K|H=L*`=9K8vj55cC;OEl=E?pi`=9K8 zvj55cC;Ol5f3p9{{wMqS$YFZtpZ#3#{Ima`{r~KjR~vS6{@MS}{(tuWv!A7$fA;^g zp8*~Qc>dY{&;Eb*|Fi#}{r~Le=jNaN|Lo`J=AZrl?Eh!~Kl}gL|Ihw^_W!g0pZ$#e z{Ima`{r~J|@8+NV|Lp%~|3CX#y!mH8dp!T_|7ZU{`~TVh&;Eb*|Fi#}{r~L$Xa7I@ z|Jnb~ensZ_XFsz%|LlLZ|Ji=NZ=UUcw*T4wXZxS+f42YGe!h90?SHoa+5TtypY4CP z|Ji=Nbe`>hw*T4wXZxS+f42YG{%8B2?SHoa+5TtypY4CP|JnX$`}x#)w*T4wXZxS+ zf42YG{%8B2?SHoa+5TtypY4CPpG%x)`=9N9w*T4wXZxS+f42YG{%8B2?SHoa*?x|3 zp6%yh=f!?qgL$$4#r_xjU+jOe|Hb|n`(NyTvH!*X7yDoAf3g3?{uldS?AJw@7yDoA zf3g3?{uldS?0>QU#eOz?UhIFd|Hb|n`(NyTvH!*X7yDoAf3aWLWM1rlvH!*X7yDoA zf3aUQXQU#r_xjU+fp%o)`OH?0>PJQ=eD+U+sUj z|JD9i`(N#Uwg1)rSNmV>f3^SB{#W~7?SHlZ)&5ueU+sUj|JD9i`(N#Uwg1)rSNmV> zf3^SB{#W~7?SHlZ)&5ueU+sUj|JD9i`_)|L)&5ueU+rg3=hgmK`(N#Uwg1)rSNmV> zf3^SBe%5|o?SHlZ)&5ueU+w4b=hgmK`(N#Uwg1)rH~Zi0f3yG1{x|#I?0>WW&Hgw0 z-|T<0|IPk4``_$;v;WQhH~Zi0f3yG1{x|#I?0>WW&Hgw0-|T<0|IPk4``_$;v;WQh zH~Zi0f3yG1{x|#I?0>WW&Hgw0-|T<0|IPk4``_$;v;WQhH~Zi0f3yG1{x|#I?0>WW z&Hgw0-|T<0|IPk4``_$;v;WQhcl+P%f4BeL{&)M|?SHrb-Trs`-|c_5|K0v~``_(< zxBuP#cl+P%f4BeL{&)M|?SHrb-Trs`-|c_5|K0v~``_(<>}RRx!~PHZm5k=Y z{tx>{=F|R9`#Cvj5BeFZ;jj*FTsq`@ih}vj59|-H!RP|I7X_`@ih>O@jHd|I2=*srj=1%l zzwH0AU)XNG?AOGZFZ;jj|FZwf{xAE#?EkX=%lzwH0A|I7X_`@ihh(wQ&&zwH0A z|I2=Do%yo=%lzwH0A|I2>OozdKxFZ;jj|FZwfe%+Dzw*T9HRjm28|J!~olKHm( z+x~C+zwOuSnQ!~Q?fn`@ik~wqM0-zU}|E z|J(j=`>>+x~C+zwQ6F|J(j=`@iky|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSm zhyNe`fB665|A+q{{(ty?pV0N}=l_TQAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{ z{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K z{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665 z|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&% z|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe` zfB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yK zAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSm zhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y z;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K; z`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{ z{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K z{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665 z|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&% z|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe` zfB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yK zAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSm zhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y z;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K; z`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{ z{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K z{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665 z|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&% z|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe` zfB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yK zAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSm zhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y z;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K; z`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{ z{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K z{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665 z|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&% z|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe` zfB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yK zAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSm zhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y z;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K; z`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{ z{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K z{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665 z|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&% z|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe` zfB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yK zAO3&%|Kb0K{~!K;`2XSmhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSm zhyNe`fB665|A+q{{(t!Y;s1yKAO3&%|Kb0K{~!K;`2XSmhyRcNkN=PVkN=PVkN=PV zkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PV zkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PV zkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PV zkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PV zkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PV zkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PVkN=PV zkN=PVkN=PVkN@v~?C1aE|KtDT|KtDrAN%?L`2YC-`2YC-`2YC-`2YC-`2YC-`2YC- z`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-{?~r~KmI@dKmI@dzyGzL|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|L?&51N#r`Kd}G6{sa3D>_4#o!2SdK59~j%|G@qO z`w#3tu>ZjR1N#r`Kd}G6{sa3D>_4#o!2SdK59~j%|G@qO`w#3tu%G{r|BwHV|BwHV z|BwHV|L?&51N#r`Kd_(wkN@w${sa3D>_4#o!2SdK59~j%|G@qO`w#3tu>ZjR1N#r` zKd}G6{sa5@|M>s-|M>s-|M>s-|M>q7?LV~t(EdaF`TzL;4(;dvJGB4M{zLl@?LV~t z(EdaF5A8p+|Iq$J`w#6uwExilL;DZyKeYeQ{zLl@?LV~t(EdaF5A8p+|Iq$J`w#6u zwExilL;DZyKeYeQ{zLl@?LV~t(EdaF5A8p+|Iq$J`w#6uwExilL;DZyKeGSG{v-R3 z>_4*q$o?bykL*9PpZ|~lkN@w;{v-R3>_4)f|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwIg*#2YtkL^FU|JeRx`;YBEw*T1v zWBd94`2UXWKeqqa{$u<3|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s- z|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>s-|M>qN?0>MI|BwHV|BwHV|BwHV z|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwHV|BwIg(f&vK z`TzL;`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC- z`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-`2YC-_zuW(A|GWL~ z_P^WzZvVUe@Akjj|8D=g{U7#!*#BYwhy5S+f7t(F|A+k__J7#_VgHByANGIP|6%`! z{U7#!*#BYwhy5S+f7t(F|A+k__J7#_VgHByANGIP|6%`!{U7#!*#BYwhy5S+f7t(F z|A+k__J7zf1CR_rANGIP|6%`!{W1W_03-vD3_vmf$p9n+kPJXF0LcI(1CR_rG62Z{ zBmxnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE z16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbH zRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowE zST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTx zfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE z16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE1K9tu zUkzZ@09FlP)c{rvVATLt4Pey(Rt;d)09FlP)c{rvVATLt4Pey(Rt;d)09FlP)c{rv zVATLt4Pey(Rt;d)09FlP)c{rvVATLt4PgJ*el>tq16VbHRRdTxfK>xnHGowEST%rE z16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbH zRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowE zST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTx zfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE z16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbH zRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowE zST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTx zfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE z16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbH zRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowE zST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTx zfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE z16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbH zRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowE zST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTx zfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE z16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbH zRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowE zST%rE16VbHRRh=q`_%we4Pey(Rt;d)09FlP)c{rvVATLt4Pey(Rt;d)09FlP)c{rv zVATLt4Pey(Rt;d)09FlP)c{rvVATLt4Pey(Rt;d)09FlP)d2R;el>tq16VbHRRdTx zfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE z16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%q>vj52bBm0l+R|8lzfK>xnHGn;`|H%F$`;Y8Dvj52bBm0l+KeGSG{v-R3 z>_4*q$o?bykL*9P|H%F$`;Y8Dvj52bBm0l+KeGSG{v-R3>_4*q$o?bykL*9P|H%F$ z`;Y8Dvj52bBm0l+KeGSG{v-R3>_4*q$o?bykL*9T|JeRx`;YBEw*T1vWBZToKeqqa z{$u-(?LW5v*#2YtkL^FU|JeRx`;YBEw*T1vWBZToKeqqa{$u-(?LW5v*#2YtkL_0j zST%rE16VbHRRdTxfK>xnHGowEST%q>w*T0EHGowEST%rE1K4BxkL^FU|JeRx`;YBE zw*T1vWBZToKeqqa{$u-(?LW5v*nTyDRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbH zRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowE zST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTx zfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE z16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xn zHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbH zRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowE zST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRdTxfK>xnHGowEST%rE16VbHRRh@n zzlwXwapH^uFbcb1m^H9R7$7Eq5DbW7cRk!TC?PNkirCR7-6e1gG><{wuClE1L)}$> z-N&H;L<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G z0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLaw zq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V z0Yn3c1`rJ(8bCCFp8NbXfM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks118 z0MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT z(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G z0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLaw zq5(t$hz1Z1pcg(r4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCF zXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks118 z0MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT z(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G z0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLaw zq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V z0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?W zL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz z1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$ zhz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c z1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh z5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC? z4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1 zAR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ( z8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2 zKs1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4Immo zG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4 zfM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCF zXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks118 z0MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT z(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G z0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLaw zq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V z0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?W zL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz z1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$ zhz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c z1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh z5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC? z4ImmoG=O=gP$;~)hPQ$`kMNHm`yxCQtmB3Mui>-c(k^)H&quHPY}bv~zirrYnD3AJ z?QlPL-v7Gu@2@)_>^%O@Q?G~owqfUbq#rxiBR&3f_mr<2cTc(7@sxw=)*LK-1;;vA zNY23(X%2?(=U_5;4(8kDU=U=EZUN@#yf;Vx^m8a*#$o=tPg>?$g= zD-X;rPd~dP>+G_6v%A%r-Mz%@7LT(tB?ND6f9H7n^{wqXv}e~&JG*|O*)@>NNd_soX", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 1, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 2, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32000, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32001, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32002, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32003, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32004, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32005, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32006, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32007, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32008, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32009, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32010, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32011, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32012, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32013, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32014, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32015, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32016, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32017, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32018, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32019, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32020, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32021, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32022, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32023, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32024, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32025, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32026, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32027, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32028, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32029, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32030, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32031, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32032, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32033, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32034, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32035, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32036, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32037, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32038, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32039, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32040, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32041, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32042, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32043, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32044, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32045, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32046, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32047, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32048, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32049, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32050, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32051, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32052, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32053, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32054, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32055, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32056, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32057, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32058, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32059, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32060, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32061, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32062, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32063, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32064, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32065, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32066, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32067, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32068, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32069, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32070, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32071, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32072, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32073, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32074, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32075, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32076, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32077, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32078, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32079, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32080, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32081, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32082, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32083, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32084, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32085, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32086, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32087, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32088, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32089, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32090, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32091, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32092, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32093, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32094, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32095, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32096, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32097, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32098, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 32099, - "content": "", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - } - ], - "normalizer": { - "type": "Sequence", - "normalizers": [ - { - "type": "Precompiled", - "precompiled_charsmap": "ALQCAACEAAAAAACAAQAAgMz8AgC4BQAAhyIAgMzkAgC4PQAAeyIAgMzsAgC4BQAAiyIAgMw8AADNvAAAmwkAgJ4JAIChCQCAgx0AAIAZAACBGQAAPR0AgDUdAIBNHQCARR0AgIAxAACBMQAApAkAgIkxAAA9WAMAPEgDAEAKAIA+aAMAAYUAAIQBAQADjQAAAokAAAWVAAAEkQAAB50AAAaZAAAJqQAACKEAAAutAAAKpQAADbkAAAy9AAAPvQAADrkAABHFAAAQwQAAE80AABLJAAAV1QAAFNEAABfdAAAW2QAAGeUAABjhAAAb7QAAGukAAB31AAAc8QAAH/0AAB75AABhOAkAZR0AgGNADgBi8AgAZSgPAGSADgBn2A8AZvAPAGlwDABoMAwAa/AMAGrYDABtSA0AbBwNAG8QEgBubA0ARgoAgHAMEwBzqBMAcuwTAHUoEAB0TBAAd9ARAHYUEAB50BYAePQQAF0dAIB69BYAdR0AgG0dAIB/fQEAhgwAgEGAAgDeCwCAQxgAAELAAABFSAAARGAAAEeQBgBGhAEASSgGAEhsAQBLOAcASvAHAE1wBwBMRAcAT/AEAE7MBACnCQCAUCwFAFOgCgBSEAUAVQAKAFRQCgBX0AgAVhALAFlICABYuAgAhBEAAFo8CACA9QAAgZ0AANgLAIAtHQCAg2kCAIJFAgCBNQIAgDUCAIdtAwCGVQMAgTkAAIRlAgAXDACAigEEAInVAwCI7QMAjwkAAKgLAIApDACAjAkAAC8MAICJMQMAkQkAAMzYAABVHQCAfR0AgL0aAIBMCgCAgGUDAIENAwCGPQAAgx0DAMwQAgDNhAEAgikAAMx0AwCjgQYAxRoAgICxAgCBsQIAzRoAgIEpAAClwQAA1RoAgMzoAwDNYAIAUgoAgKjxAABYCgCAXgoAgGQKAIDdGgCAgWkAAMzcBACCEQEA5RoAgGoKAIDtGgCA/RoAgAUbAID1GgCAswkAgMygBADN3AQAzAgBALYJAIClHQCAhhEBAOEAKwDgfCcA44hIAuIMOAKdHQCAh5EBALUdAICtHQCAgNkBAIE1AADMxAIA6kRkApUdAIANGwCA72hkAoERBwCC8QEA8NCLAolVAACB5QEAFRsAgIfhAQCAbQAAgQ0AAIN5AAB2CgCAgXkAAICVAQDMOAEAzRQBAIzBAQB8CgCAvAkAgKMVAQDDlBcAwpwUAMWEFwDEUBcAx+wXAMaAEgCNHQCAiAoAgMvQFgDK4BYAzRQWADUMAIDPvCAAzpwZANHMJADQ2CUA0+gkALFRAQA7DACAp90HAL0dAIDWvCQA2cgnANjUIgDb+CcALRsAgIftBwCCCgCAzPgEAB0bAIAlHQCAh8kGALAJAICR3QcAuQkAgCUbAIBwCgCANRsAgIUdAICMDACAjPkGAAsMAICA1QYAgcEGAMzEAgDNBAUAglEAAIN1BwCArQYAgbkGAIY1BwCHKQcAhEEAAI4KAICn7QAAPRsAgIjpBwCJzQcAlAoAgI/BBwCM3QcAmgoAgOoLAICnXQYAsJ0AAKAKAICmCgCAo0EGAEUbAIBVGwCAfQwAgE0bAIBdGwCArXEGAGUbAIC/CQCAzPgDAM0sAwDCCQCAo+UAAMUJAICMTQAAsgoAgKfxAAC4CgCAsT0GAIedAACGlQAAqB0HAISJAAC+CgCAgqkAAIHVAACtAQcAygoAgJE9AACCmQEAyAkAgM0MBQDMCAUAgT0AAIeFAQCIvQEAdRsAgMUdAICuCwCAjJEBAEEMAIBHDACAzR0AgID1AQCBhQEAgoEBAIOdAQCEiQEAxAoAgIapAQCHXQAAiG0AAIlNAABtGwCAzBACAIxdAACCDQAA0AoAgI9JAACw6QAAfRsAgPALAICjKQEAgCUBAIFVAQCFGwCApzUBAMykAQDNEAIA1goAgI0bAICBNQAA3AoAgK4JAQDoCgCAzOgBAM0oAgCVGwCAo/EAAIQFAACdGwCA4goAgK0bAICotQAApRsAgIFdAAC1GwCAzPwBAM3AAQC9GwCAxRsAgIGFAwARDACAgeUDAO4KAICH6QMAywkAgIylAwDNGwCA+goAgKoJAIDVGwCAgZkDAIHdAwCMvQMAzSQBAMwgAQDMEAIAzTACAIH5AACHUQAAgFUAAIFZAAD0CgCAg0kAAIxBAADlGwCA3RsAgM4JAICBfQAAgHEAAMwgAwDNsAMAo30DANEJAICjEQMA7R0AgIEtAQCx/QAApzEDAK1BAwDlHQCAo20DAP0dAID1HQCA7RsAgKdtAwCANQAAgR0AALFtAwCILQAAmAwAgKeVAACBcQAAgFkAAINxAACj9QAAgVEAAK2BAAD1GwCAsQkDAIldAACEPQAAzDgBAISdAQCBGQAAgAkAAIRlAAD9GwCAzNAHAMzwBwAFHACAkYkAAMxMBgDNBAYAzHAGAM10BgDMQAcAmy0PAMyoBwDNrAcAhg0AAIdVDwCEQQ8ACQsAgIIBDACDVQ8AgDUBAIHZAQCkDACAj+kAAIztAACSDACA3R0AgIv1AACIbQ8AiQ0AAA8LAIC0CwCAgiUAAE0MAICBQQAAUwwAgBUeAIANHgCAJR4AgB0eAIAtHgCABR4AgIApAACBKQAA/AsAgA0cAICEeQAAFRwAgIFNAQCAoQEAGAsAgKP9DwDMOAIAzUgDAB0cAICBWQAAzXwCAMykDQAkCwCAWQwAgKjJDwCHOQAA1wkAgImhDwADCwCAkREAAJ4MAIDaCQCAmQsAgF8MAICAuQ8AgbkPANUdAICDjQ8A9gsAgCUcAICEBQAALRwAgB4LAIA1HACAKgsAgIGdDwCHIQAAh7UPAMyoAgDN6AIAzLQMAM3cDACmzQAAp8UAAE0cAICPgQ8AjIkPAKPlAAAwCwCAPRwAgDwLAICxyQAAhwUAAFUcAIBFHACAhz0AAF0cAIBxDACANgsAgKMFDwCB+QAAzKgDAGUcAIBICwCAjEkAAKPxAABtHACAdwwAgEILAICnlQAAfRwAgHUcAIDMrAMAzcgAAN0JAICHaQAA4AkAgIG9AACCeQAA4wkAgIe5AQBOCwCAkaUAAIEdAACdHACAVAsAgIgFAAClHACAm5EAAFoLAIDmCQCAjJEBANILAIDGCwCAwAsAgMwLAICDRQAAgrkBAIG5AQCApQEAPR4AgIZxAABgCwCAhEkAAIsVAACKPQAAiTkAAIhFAACP+QAAZgsAgLoLAICMBQAAp1EBAKZJAQBlDACAsHkAAKNZAQCMqQAAgKkAAIGpAACBlQAAgJUAAK1xAQBrDACAogsAgISNAABNHgCARR4AgKMhAABdHgCAVR4AgGUeAICBbQAAgG0AALEFAQCkOQAANR4AgIUcAIBsCwCAqAUAAJUcAICNHACArQkAAMywAQCBvQMAgL0DAIPNAwCtHACAtRwAgL0cAIDMvAEAzYQBAInpAwDMHAEAgdkCAIDFAgDNOAEAzDwBAMxoAgDNRAIAg00AAMUcAICH2QAAhy0AAIBFAACBEQAAggUAAHILAIDVHACAzRwAgN0cAIDMOAIAiBUAAIjhAACAbQAAgTkAAMyEAgDNUAEAo0UDAIQ5AQDlHACA7RwAgMzcAwDNSAIAbR4AgOkJAIB4CwCAhR4AgKoMAICBbQAA9RwAgH4LAICj0QAAfR4AgHUeAIDMiAQAgXUAAIB1AACBCwCAo7UAAMwABADNVAIA/RwAgIcLAICETQEAjQsAgAUdAIANHQCAzNAOAMwsAQDMAAUAzVwFAOwJAIDvCQCAzJgOAIHBAADMzA8AzDwOAMwIAQDNnA4AzNQPAM14DwDMPA4AzTgOAIHlAQCA5QEAg+UBAILlAQDUCQCAhOUBAIfhAQBBHQCAiaUBAIjZAQCByQcAOR0AgFEdAIBJHQCAzDQBAPUJAICA3QAAgekAAEMKAICD/QAAgM0AAIH5AACBEQcAaR0AgGEdAICJ0QAAzCgBAHkdAIBxHQCA4QsAgMw0AQDbCwCAgF0AAIFlAACjAQEAg2EAAIFxAACASQAAMR0AgBoMAICrCwCAiVUAACwMAIAyDACAWR0AgIEdAIDBGgCATwoAgIIdAACDeQcAgBkHAIEZBwCGIQAAhykAAISRBwDyCQCAimkAALHZBgCIaQAAifUHAEkKAICP3QcAjNkHAIkMAID4CQCAKR0AgPsJAICRoQcAgEEHAIFBBwCHBQAAyRoAgIKRBwDRGgCA2RoAgKOVBgCGhQcAp+0AAMyQAgDN4AUAsekAAKPBAABVCgCAWwoAgGEKAIBnCgCA/gkAgKVlBwDhGgCAzLgDAKhVBwDpGgCAbQoAgPEaAIABGwCACRsAgPkaAIABCgCAo60AAAQKAICMJQYABwoAgIxNAACpHQCAgm0AAIE9BgCCAQYAgWUAAKEdAICHZQAAuR0AgIcRBgCHrQEAsR0AgMxQAgDNxAIAgeEBAIDJAQCD4QEAkYkAAID9AQCB1QEAmR0AgIydAQCJNQAAcwoAgIB1AACBXQAAhi0AAIc1AACEfQAAERsAgIKFAQCDfQAAgJ0BAIGRAQAZGwCAj+kAAIzhAAB5CgCAfwoAgAoKAICIDQAAifkAAKc5AQCRHQCAiwoAgDgMAICjJQEAPgwAgLBZAACJHQCAggUAAMEdAICtFQEAjwwAgDEbAICGBQAAhQoAgCEbAIApGwCAp2kAAIANAQCBAQEAhzEAAKNJAACxGQEAzBACADkbAIAODACAkQoAgK1RAADM1AEAzfgBAKhBAABBGwCAzTgBAMw8AQCB7QMAlwoAgJ0KAICMDQAA7QsAgKMKAICBxQMAzGgCAKkKAICCxQMASRsAgITJAwCHKQAAhjEAAFkbAICCbQAAgAwAgFEbAICHYQAAYRsAgGkbAIAVHQCAzKgDAM2sAgCB+QAAiC0AAA0KAIAQCgCAEwoAgIw1AAC1CgCAuwoAgLHVAADBCgCAeRsAgMkdAICxCwCAzDABAEQMAIBKDACA0R0AgMwEAQDHCgCAcRsAgKelAADTCgCAo40AAMwUAgCAuQAAgbkAAKeFAAAIDACAgmUAAIEbAICMNQAA8wsAgMzsHADN/AMAiRsAgK6tAADZCgCAkRsAgMzABgDN0AYAsL0BAMyQBwDfCgCAgckBAMwYHQDNIAIAhBEAAOsKAIDNuAYAzKwGAKEbAIDlCgCAgSkAALEbAICpGwCAo+0BAMxAHQDNEAIAuRsAgMEbAICBCQAAyRsAgMxAHQDN0AIAqNkBABQMAIDMkAcAzBwBAMxgBgDNZAYA8QoAgBwKAIDRGwCAkSkBAP0KAICBzR8A2RsAgPcKAIDpGwCA4RsAgMzEBgDNwAYAgTEAAIDZAAAfCgCAIgoAgIK5AQCDRQEAgLkBAIG5AQCGXQEA8R0AgIRdAQDpHQCAzcAAAMzwAACIARwAiXkBAAEeAICPVQEAjGEBAPkdAICB3R4AgRUfAJkbAICBXR8AjIEfAIdBHwDMGAMAzWgDAIBNHwCBpR8AJQoAgIOpHwCMFR8AjNEeACgKAICHtR8AgJUfAIGZHwCBEQAAg70fAICFHwCBiR8A8RsAgIQ9AACbDACAiZkfAPkbAICIBQAABgsAgAEcAICADQAAgf0AAAkcAICj2R8Ao3keAKOFAAAMCwCArTUfAKdhHgCnqR8AoQwAgIQNAACnDACAozUfACsKAICtiR8AhHEAAKchHwCxPR4AsYUfAJUMAIDhHQCAEgsAgLcLAIDMtBwAzbAcAFAMAICxQR8AVgwAgJwLAIAZHgCAER4AgCkeAIAhHgCAgLkeAIG5HgCCIQEAgzUBAIRhAQAxHgCAhokBAIe9AQCIkQEAiekBANkdAICL/QEAjOUBAIINAAAJHgCAj90BAIO5AQCRrQEAgb0BAIC9AQCAoQEAgaEBAPkLAID/CwCAhD0AABEcAICJlQEAm4EBAIHNHgCAzR4AzPwCAM3wAgCB5QAAGRwAgIHtAACjpQAAzJABAM1cAgCHHQAAGwsAgKj5AAAhHACAJwsAgFwMAIBiDACAKRwAgIQFAAAxHACAo9UAACELAIA5HACAgVEAAMz0AQDN0AEALQsAgIc9AABRHACAMwsAgEEcAIA/CwCAhwUAAFkcAIBJHACAh/EDAIHZAwCBmQMAgZEAAGEcAIB0DACAjPkDAMwkAQCHuQMAgfkDADkLAIDMZAIAgskDAIyZAwBpHACAh9EDAI+RAwCB3QYAkfUDAMwABADN7AMAh2UAABkdAIBLCwCAcRwAgHoMAIBFCwCAzBgBAIg5AACBHACAeRwAgMxcAwCMJQAALgoAgMwsAQCx/QAAozkDADEKAIA0CgCAoRwAgKdZAwDMdAMAiAkAAKNRAwCpHACAXQsAgINtDQCnnQAApq0AAKOdAACxDQMAzCgBANULAICntQAAprUAAMkLAIDMMAEAgdUHAMMLAIDMKAEAzwsAgEEeAIBjCwCArYkAAGkLAICAzQEAgd0BAMxEAQDNnB4AhPUBAL0LAIDMWAEAzUwBAIDtAQCB/QEAg7UAAGgMAICM3QEAbgwAgMwIHgCM8QYAzDgBAM08AQBRHgCAiREAAIEFBgBJHgCAYR4AgFkeAIBpHgCAgz0AAIAhAACBOQAAgDkAAIEhAAA5HgCAiRwAgMwoAQCB2QYAbwsAgIH9BgDMJAEAmRwAgJEcAICxHACAgCEBAIE1AQCjBQAAuRwAgMEcAIDJHACAzIwFAM1AAgC3HAMAdQsAgIfNBwDZHACA0RwAgB0dAIDNiAAAzJAAAIzdBQCjhQAAFgoAgMzgAgDhHACAiNUHAIFNAACATQAAUQsAgOkcAIBXCwCAkTkHADcKAICIxQcApQsAgIrJBwDxHACAmz0AAIflBwBxHgCAgYUHAICFBwA6CgCAgvkHAILVBgCDRQAAgMkGAIHdBgCG4QYAewsAgIRRAACJHgCAipUGAIuZBgCIeQAAiZ0GAK0MAICPWQcAjG0HAPkcAIDMgAMAzSQCALARBwA9CgCAgR4AgCEdAIB5HgCAhAsAgICNAACBnQAAzOwDAM3oBAABHQCAigsAgKNJBwCQCwCACR0AgKO9BwARHQCAGwAAgOcHAIALAACApKUHAOsEAICKBQCAAwAAgKhhBwDZDQCAZQAAgMgDAIAbCQCArWkHAIAtAQCBPQEAgl0BAINRAQCEYQEAuAQAgKwEAICHYQEAiK0BAIm1AQCKvQEAjykVALwFAIAdDACAzHgCAM3YBQCB3QEAgXEAAOQLAICC/QEAhBkAACMMAICH7QEAIAwAgMw0BADNMAQA5wsAgJ9pFQAmDACAjMkBAM34BADM8AIAsUkBACEHAICB1QAAoxUBAKCZFQBzCACARgcAgIT1AADMKAQAzSwEAMMIAICveQEAqH0BADENAICqaQEAUgkAgLQlAQC1KQEAowkBAAIMAIDqBgCA7gYAgLIFAQCzPQEAvPUAAL39AAC+2QAAOAgAgLgBAQC5AQEAugEBADwHAIBDBwCAhgwAALOdAwCyiQMAswgAgIC9AwBpBwCAbAcAgBIJAIDkBgCA5wYAgDUIAICJhQMAzOQHAL+hAwAFDACA1wwAgIxlAADN5AwAzCQMAIlBAACIVQAAi0UAAIpFAACFtQMAhLUDAIeVAwCGgQMAAQ0AgAQNAIAHDQCAmCwAABMAAICmyAAAzYwGAMyoBgCFaQAAFwAAgDEAAIBpAACAzPADAAcAAIA1AACA0QwAgLGVAAAlDQCAs5UAALKVAAA1DQCAOA0AgEANAIA7DQCALg0AgHUAAICmBgCAJQAAgJgJAIAdIQCAv1UDAEMNAIAZIQCAFSEAgGEgAIC4bAAAlGUNAJIAAgCcrQEAnaUBAJqJAQCbiQEAmJkBAJmJAQDMIAYAzQQGAMxABgDNXAYAzDwHAM04BwDMvAcAhXUAAIABDwCBDQ8AaSAAgLqZAQCFBQAAcSAAgFkgAIC+hQEAgSkPAIAlDwBlIACAgiEPAIUpAAC0pQEAhREAAG0gAICziQ8AsoUPALHJAQCwAQwAt4EPALbtAQC17QEAtO0BAIFlAQCAZQEAg2EBALi1DwDMPAsAhHkBAIDhDwCB3Q8AdSAAgF0gAIDMyAQAzbgEAIWtAACFFQAAISEAgDkhAIDM6BkAzbQZAKRdAQBGDQCAok0CAKPxDwCgVQEAod0PAH8IAIBuCQCAOwkAgO0eAIBsCQCA9R4AgHcJAIDxHgCAsQgAgJMNAACtHgCA+R4AgITVDACF6Q4AlGkAAIfdDgC1HgCAmbQCAL0eAIDFHgCAsR4AgD0hAIC5HgCAn3QBAMEeAICRGA0AgI0OAIGBDgCGhQ4AlYwDAISJDgCXRAIAghEAAKm4AACA0QAAge0AAMkeAIBJDQCA5R4AgIVZDwCDiQAAoTQNAIFFDgCASQ4A6R4AgKU0AQCFYQ8AzPAUAB0fAIC5xAUAzMgDAM3cAwCA3QAAgcEAACUfAIC/kAUAhREAALHsBwCA9QAAgcEAAKEgAIC1jAYALR8AgLdABgCA3Q4AgekOAMwoAgDNtAIAgM0OAIH5DgCFKQAAg4UBAIB1AQCBsQEAgPEBAIHVAQCpIACANR8AgIUFAACxIACAgJkBAIG9AQCCfQAAk9UBAJThAQCFDQAAmSAAgCEfAICACQAAgRkAACkfAICTrQEAlC0AAKUgAICFDQAAMR8AgIUFAACtIACAOR8AgIUpAACCGQAAhTUAAIDxAACB4QAAtSAAgJ0gAIBBIQCAhQUAAGEhAICDdQEAgO0BAIEpAQDM8AEAzbABAEwNAIBdIQCAWSEAgKMNAIBdHwCAZR8AgIA9AACBDQAAbR8AgHUfAICALQAAgR0AAIIVAABhHwCAzSwBAGkfAIBxHwCAeR8AgIjFAwClIQCAzJACAM28AgCE7QMATw0AgIb5AwCdHwCAgIEDAIH9AwCAPQAAgTUAAIFJAACAQQAAzdwBAIJBAAClHwCAoR8AgKkfAIDNMAEAlJ0DAI0hAIDN8AEAzAwBAIG5AwCAxQMAg6EDAJOlAwCArQAAgdUAAICdAACBqQAAiSEAgFINAICBwQAAgMkAAIC1AACBgQAAhSEAgINpBADMcAMAzbQDAIEhAIDNPAEApg0AgJMBBADNjAIAzPQCAIANAACBNQAAlNkGANEfAIDVHwCA2R8AgMwIAQDNHAEAgREAAIApAACpIQCAghkAAICRAQCBkQEAzWgFAMyUAgDMEAkAzSgWAMxYDgDNeA4AzBQNAM3YCgDMKAwAzYwNAMzgFwDM4AoAzDgLAM30CACFEQAAVQ0AgIBRBwCBUQcA4SAAgM2QDgCFBQAA6SAAgMzYDgDN7AEA8SAAgM0ADgCFGQAAzfAPAM08DgDNVA4AzGgBAM1sAQDZIACAYQgAgJSZBwDMwDsAgGEBAIHZAACFKQAAzWQOAMx4AQDNfAEAga0HAICtBwCFZQAAgp0HAIBRAQCBUQEAlOEHAM3AAACEeQEAk8UHAIZhAQDlIACAiCEBAIUNAADtIACAzRgBAMzYAADNtAAAgN0HAIHNBwCZHwCAhQkAAM0fAID1IACA/R8AgN0gAIAFIACADSAAgBUgAIAJIACAASAAgK0hAIARIACAGSAAgMy4AgDNHAMAgGUAAIF1AACCfQAAHSAAgIUJAACFQQAAASEAgKkNAICAmQYAgSEHAIUZAACDfQAACSEAgIVZAAD9IACA+SAAgIDNAACB2QAAjR4AgIURAACE6QAAlR4AgIblAABBIACAgDUAAIENAACdHgCAhR0AAEkgAIClHgCAhQUAAFEgAICAVQAAgW0AAIJ9AACTRQAAlA0AAIUNAAA5IACAkR4AgIAJAACBEQAAmR4AgIUdAABFIACAoR4AgIUFAABNIACAgOkBAIHxAQCCBQAAqR4AgIUJAACFCQAAVSAAgD0gAICAbQEAgXkBAIIZAACDpQEADSEAgIV1AACFBQAAESEAgAUhAIAhIACAzMgCAM3cAgCsDQCAzR4AgIA5AACBOQAA1R4AgN0eAIDRHgCA2R4AgIAdAACBDQAA4R4AgCUgAICAxQAAgdUAAM3AAADMJAIAgNUAAIHFAACFOQAAg8kAACUhAICvDQCAgNUAAIEJAACFBQAALSEAgP0eAICBIACAgAkAAIERAAAFHwCAk5kAAJS5AAANHwCAhWUAAIU9AACJIACAk10AABUfAICFEQAAzXAFAMx0BQCUATwAkSAAgHkgAIDNKAEAhSAAgI0gAICFGQAAlSAAgH0gAIA1IQCAKSEAgCkgAICFJQAAhTkAAMz4AgDNxAMAzTwBALINAICBlQMAgI0DAM3EAQCCpQMAhVEAAIVJAADMKAEAzSwBAM04AQDMPAEAgGk+AIFpPgBJIQCARSEAgM04PADMVDwAgdE8AJOdPgDMSAEAzcgCAM00AQBNIQCAlLk+AFgNAICAoT4AgaE+AIKhPgCIjTwAVSEAgIWtAACALQAAgSEAAIXVPwCVHwCAgO0AAIHxAACGpQAARR8AgISpAADNJAEAzSgBAE0fAICI+T4AhfE/AFUfAIBJHwCAhcU/AM0wAQDNEAEAzfQGAIDdAQCB6QEAzbwGAM1wBgDM4AYAzVwBAMxoBgDNkAYAzWQGAM14BgDMrAcAzagHAMzoBwDNyAcAgk0/AIP9AgCANQIAgekCAFEfAIBZHwCAgAU9AIV9AQBRIQCALSAAgM0UAQApDgCAge0BAIDhAQDNPAEAgs0BAM0sAQCCdQEAgW0BAIBZAQCAZQEAgcUAAIUfAIDNJAEAzTgBAILxAACB+QAAgFkBAIApAACBcQAAzBgBAM18AQDNLAEAjR8AgIEdAACAHQAAiR8AgJEfAIBxIQCAzSQBAMzkPQDNXA8AzegAAMwMAQCA1QEAgckBAIKZAACD5T8ACR8AgBEfAIAZHwCAMSEAgCMOAIB1IQCAPR8AgDEgAIBBHwCALA4AgIBNPwCBQT8AfR8AgGkhAICBHwCAZSEAgIAlPwCBKT8Ak5E/AIN9AAAmDgCAlEEAAMzYAgDNrAIAbSEAgJNVAACACQAAgR0AALUNAIB9IQCAlEEAAK0fAICAnQAAgaEAAIAdAACBEQAAhKUAALUfAICGpQAAvR8AgIjxAACC0QAAgdkAAIDNAACAJQAAgSkAAIIFAADFHwCAsR8AgLkfAIDBHwCAk7EAAJQRAADJHwCAgB0AAIEVAACAJQAAgS0AAII9AAB5IQCAgO0AAIHRAACCFQAAg4EAAIHQPQA1IACAzCACAM3cAQCFeAIAkSEAgC8OAICZIQCAiRgDAN0fAICALQAAgTUAAIAJAACBbQAA5R8AgMEgAICRsQAAkKkAAJPdOwCSAQQAlaUAAJSVOwDtHwCAlqEAAIUJAACTQQAAySAAgPUfAICFBQAA0SAAgJT1AAC5IACAgLkAAIHdAACC5QAA4R8AgOkfAICF6QAAgAkAAIE1AACFBQAAxSAAgPEfAICFHQAAzSAAgPkfAICFBQAA1SAAgLHBBQCwxQMAvSAAgLLFAwC12QUAtM0DAJ0hAICFOQAAuf0DAKEhAICVIQCAuw0AgM0NAIAXDgCAAR8AgAUOAIDTDQCAzIgCAAsOAIDN4D4AzZABAMwkAQBwDQCAjg0AgEEOAIB9DgCAgLEAAM3UPgDN5D4Agw4AgMy8PgDNuD4AgNEDAIHtAwCC/QMAhmkAAD4OAICFnQMAzTwBADgOAIDM6AIAzTw/AIjlAADNGAEAiQ4AgIhBAAA7DgCAdw4AgM0sAQCVDgCAgNUAAJsOAICG4QAAhukAAEcOAIDNJAEAoQ4AgM0QAQCI0QAAiCkAAMz4AgBNDgCAzfgCAMwkAQCnDgCAhS0DAMygPgDNbD4AgNUDAIHNAwCCAQMAg/kDAMxkAwDNzAIARA4AgM0kAQDMDAIAzQgCAIERAADMnAMAzLA+AM20PgDMxD4AzcA+AMyAPgDNuD4ArQ4AgMyEAgDMmD8AzVA+AMwgPgDNoD4AzQw/AM0wPwDNeD8AzQQ/AIhZAAC/DgCAzfgBAMzEAQBKDgCAxQ4AgMsOAIDMFAIAzAgBAM3IAQCIBQAA0Q4AgNcOAIDMKAIAuQ4AgIgNAACG0QAAgB0BAITNAACI9QAAzDwCAIQ1AQDMRAIAhikBAIAOAICIZQEAhg4AgKdEBQBiDgCAi+0AAIjtAACBDQAAiCUAAIZlAADMcAIAzXQCAMwwAgDN2AUAXA4AgIwOAICAOQAAXw4AgMzgBQB6DgCAzCgBAM0UAQCGJQAAiFUAAAgOAICGhDAAxA0AgIDVBwCG/QcAmA4AgMwkAgCIPQAAng4AgGsOAICIPQAApA4AgMxIAgDNeAIAUA4AgKoOAICXwAUAlnAFAJUYBQCAaQAAk1gFAIE5AACIZQAAkPg8AIZZAACeqAUAhEUAAGgOAIDM1AIAmrQFAIBdAACYrAUAp+wEAIgRAADM2AIAzdwCAKO8BACwDgCAzGACAMIOAIBuDgCAyA4AgK0IBADODgCAq/QEAMwsAgCIBQAA1A4AgLfoAwC2HAQAtSgEAMwAAgCzKAQAi3kAAIh9AACwdAQAhkEAAL6kAwCEdQAAiB0AANoOAIC6TAMAzNwDALj8AwCDqAIAiA0AALwOAICIFQAAh5QCAMw4AgBlDgCAzAQCAIvcAgCPDQAAcQ4AgI8ZAADMIAIAdA4AgI3wAgCIdQAAmCADAJksAwCPDgCAlA0AgMxMAgCWcAMAzCQCAIg9AACSDgCAzCwCAIgFAACzDgCAzCQCAIgNAAC2DgCAh/UAAKjUAwCpxAMA3Q4AgNlgAgDSDwCA1Q8AgNsPAICUNQAAkzEAANloAgDYDwCA2UwCAJQFAADeDwCAlSEAAJQpAABQEACAdBYAgEMXAIDSFgCA2WACADcXAIC12AMAtPADAJQ1AADZWAIAWhcAgJQFAADZVAIAlA0AADEXAIDgdAEAisgAALwVAACIyAAA4IACAIcXAICBoAAApOwCAKTIAgCoXAAAvA0AAJkXAIDghAIAvAUAAJ0XAICk+AIA4PQCALDMAwCV0AAAXRcAgLPgAwCmyAIAp2ACAJLYAABkFwCAvsEAAGsXAICXwQAAchcAgHkXAICAFwCAzXg/AMy8PwC+gA0AixcAgLx4DAC9gA0AuvQMALtUDAC49AwAkhcAgLYXAIC3uAwAuhcAgLWMDACyoAMAs6AMAKEXAICxQAMArnACAK9kAwC4BQMArUgDAKgXAICvFwCAqEQDAKnYAwDaFwCAp9gDAKRoAgCliAMAtjUDALc9AwCSyAIAtT0DAJldAQCYTQEAm2UBAJppAQCdZQEAnGUBAJ+FAQCemQEAh5wCAL6tAACWpQAAl70AAMw0BQDNjDcAzLg4AM2sOACflQEAth0AAJ2ZAQCc9QEAs7EBAK54AgDhFwCAvhcAgJk9AADFFwCAmxkAAJoJAADMFwCA0xcAgOBIAgCeCQAArFwCAK30AgD6FwCA9hcAgP4XAIDoFwCAh2ADAO8XAICvVAIAvhEAAJcFAAACGACA4KwCAAYYAICG+AMAh+wDAOC0AgAOGACAr0gCAK6QAgDgPAIAvg0AAAoYAICXGQAA4NgCAIaEAwCWEQAAvwAMAJ1tAACcYQAAEhgAgLFMAgCzUAIAlQ0AABYYAICGnAMA4MgCALMEAgCCBQAAIhgAgLNQAgCVDQAAJhgAgBoYAIAeGACA4LQCAIaMAwCH3AMAvg0AAJVpAACWeQAAKhgAgLToAgC1UAIAlwUAADIYAIDg1AIAtPQCAL4ZAADgoAIALhgAgODUAgCZjAMAt9QCAIoFAAA2GACAOhgAgIoVAAC3NAIAjx0AAD4YAIBCGACAswUAAEYYAICzBQAAWxgAgJwJAACdCQAATRgAgFQYAICMBQAAYhgAgG0YAIB0GACAexgAgJ9JAACCGACAiRgAgGYYAICQGACAlxgAgNkYAIDPGACA6hgAgOAYAICeGACAg8kBAIH5AQCsGACAsxgAgLoYAIDBGACAyBgAgKUYAICAtAIApYgDAOEIAgCuHQAA8RgAgLwJAACN9QEA9RgAgOEAAgCSlQEA45QQAJNFAACXiQEAhRQAAId4AQCGAAQARjoAgEo6AIBOOgCAUjoAgFY6AICdeQAA74xoAJyhAQBaOgCAXjoAgKKZAABiOgCAZjoAgGo6AIBuOgCAp4kAAHI6AIB2OgCAqUkBAHo6AICsqQAAfjoAgII6AICGOgCAsyUBAIo6AICOOgCAkjoAgLchAQC2OQEAtTEBAJY6AICaOgCAufkAALkRAQC4GQEAnjoAgKI6AICmOgCAqjoAgICwAQCEiAIArjoAgIPIAQCEVAMAhFwEALI6AICEXAUAgN0DAIEtAACCMQAAvjwCALo6AIC+OgCAh4gDAIacBACzLQMAwjoAgMY6AIC+AAQAvhwFALbRAwC12QMAyjoAgLv5AwC68QMAmljTAYTgBwC/xQMAvtkDAL3dAwC83QMAvgAYAKUFAwCmDQMAzjoAgIQcGADSOgCA1joAgKPxAwCsAQMArQEDAK4FAwCvGQMArKQbAq3cGgKqLQMAqyUDAL5MGQC+SBoA2joAgL6AGwC04BoCtdQdArYwHgLvCAIA3joAgOGgAQC6OBoC4/gCALoAAAC9ZBwCvvQcAr8AEAKRBNMBkOT2AeBEAQCSCD4C4joAgOY6AIDqOgCA7joAgL6sHADyOgCA9joAgPo6AID+OgCAAjsAgAY7AIAKOwCAgbBtAICAAQCDHFIAgth3AIUgmgCEkL4AhwjPAIaM5gCJbDcBiOAsAYsYfgGK2BMBjeClAYzwWgGP/OsBjliPAbDVFwCxAWgAso1rALOdawC0SWsAtZVvAA47AIDgcAEAEjsAgBY7AIAaOwCAHjsAgIAZAACBGQAAggUAACI7AIAqOwCAoaUCAKJJBwCjQQcApEEGAKXVGwCm3RsAp8EaAKgBHACp4R8AqkkfAKsBEACs9RMAra0TAK4BFACv+RcAqDEGAKkxBgCqTQYAq0UGAKxNBgCtmQYAro0GAK+FBgCGgAMAhxgDAC47AIAyOwCANjsAgDo7AIA+OwCAQjsAgLhtBwC5dQcAun0HALt1BwC8bQcAvc0HAL75BwC/+QcAsKkGALGFBgCyeQcAs3kHALRpBwC1aQcAtl0HALdVBwC2OgCAs8EGAEY7AIAmOwCAth0GAEo7AIBOOwCAtcEGALppBgC7RQYAUjsAgFY7AIC+qQcAv6kHALypBwC9qQcAo4UGAFo7AIBeOwCAYjsAgGY7AICmWQYApYUGAGo7AICrAQYAqi0GAG47AIByOwCAr+0HAK7tBwCt7QcArO0HAKjBBgCpLQEAqiUBAKs9AQCsJQEArS0BAK4lAQCvlQEAdjsAgHo7AIB+OwCAgjsAgIY7AICCvQAAgb0AAIC9AAC4nQEAua0BALqlAQC7bQAAvHUAAL19AAC+dQAAv20AALD1AQCx/QEAssEBALPBAQC0tQEAtb0BALa1AQC3rQEAijsAgI47AICSOwCAs6EBAJY7AIC1oQEAtqEBAJo7AICGgAEAh8QBALo9AQC7NQEAvBkBAL0ZAQC+fQEAv3UBAKPtAQCeOwCAojsAgKY7AICqOwCApu0BAKXtAQCuOwCAq3kBAKpxAQCyOwCAtjsAgK85AQCuMQEArVUBAKxVAQC6OwCAvjsAgMI7AIDGOwCAyjsAgOGsAQDOOwCA42AGANI7AIDWOwCA2jsAgO9UBgDeOwCA4jsAgL60GgDmOwCA6jsAgO47AICGaBwAh4wDAPI7AID2OwCA+jsAgP47AICAOQAAgTkAAIIFAAACPACACjwAgA48AIASPACAFjwAgKgdAwCpQQMAqkEDAKtBAwCsQQMArUkDAK5xAwCvcQMAhCAdABo8AIAePACAIjwAgCY8AIAqPACALjwAgDI8AIC46QAAufUAALr9AAC78QAAvJEAAL2RAAC+iQAAv4kAALDhAACx4QAAsuEAALPhAAC04QAAte0AALbZAAC32QAA4wwHAOEgBwDhMAEA4wgHADY8AIA6PACAPjwAgEI8AIBGPACASjwAgE48AIBSPACA75gHAFY8AIBaPACA74gHALOJAgBePACAYjwAgL6AGgBmPACAtokCALWJAgBqPACAu2UBALplAQBuPACAcjwAgL9pAQC+ZQEAvXUBALx1AQC3PQYAtj0GALU9BgC0IQYAszUGALI1BgCxAQYAsAkGAL9ZBgC+UQYAvVkGALxNBgC7bQYAunkGALlxBgC4eQYAgJ0AAIGtAACCpQAAejwAgH48AICCPACAhjwAgIo8AICvcQYArmkGAK1tBgCsbQYAq4EGAKqZBgCpkQYAqJkGAAY8AIB2PACAjjwAgKPFHQCSPACApcUdAKbFHQCWPACAhgADAIdkAwCqKR4AqykeAKw5HgCtOR4ArikeAK8lHgCzOR4AmjwAgJ48AICiPACApjwAgLb9HgC1/R4AqjwAgLvZHgC60R4ArjwAgLI8AIC/aR8AvmEfAL1pHwC8wR4AqPEeAKnxHgCq8R4Aq/EeAKw1HgCtPR4ArjUeAK8tHgC2PACAujwAgL48AIDCPACAxjwAgMo8AIDOPACA0jwAgLjlHwC57R8AuuUfALv5HwC86R8AvZEfAL6RHwC/jR8AsFUeALFdHgCyVR4As/0fALTlHwC17R8AtuUfALfdHwCjeR8A1jwAgNo8AIDePACA4jwAgKa9HwClvR8A5jwAgKuZHwCqkR8AhogAAIdMAQCvKR4AriEeAK0pHgCsgR8AgEkAAIFJAACCWQAAs5keAOo8AIC1iR4AtlEBAO48AIDyPACA9jwAgLotAQC7JQEAvD0BAL0lAQC+JQEAvxUBAKhNHgCpVR4Aql0eAKtVHgCsTR4ArZ0BAK6JAQCvgQEAhKwBAPo8AID+PACAAj0AgAY9AIAKPQCADj0AgBI9AIC4ZQEAuW0BALplAQC7fQEAvGUBAL1tAQC+ZQEAv9kAALClAQCxrQEAsqUBALO9AQC0rQEAtZ0BALaVAQC3XQEAo9UdABY9AIAaPQCAHj0AgCI9AICmHQIApcUdACY9AICraQIAqmECACo9AIAuPQCAr1kCAK5pAgCtaQIArHECADI9AIA2PQCAOj0AgD49AIBCPQCARj0AgEo9AIBOPQCAgDkAAIE5AACCBQAAUj0AgFo9AIBePQCAh0ADAIZcBACETAQAYj0AgGY9AICEBAUA4yABAGo9AIDhqAEAbj0AgO+UGgByPQCAdj0AgHo9AIB+PQCAgj0AgIY9AICKPQCAs6EDAI49AICSPQCAlj0AgJo9AIC2fQMAtX0DAJ49AIC7WQMAulEDAKI9AICmPQCAv/0AAL79AAC9/QAAvEEDAKhRAgCpWQIAqmkCAKtpAgCstQIArb0CAK61AgCvrQIAhKgHAKo9AICuPQCAsj0AgIKpAAC2PQCAgKkAAIGpAAC4aQEAuWkBALoJAQC7CQEAvBkBAL0ZAQC+CQEAvwkBALDVAgCx3QIAstUCALNpAQC0eQEAtXkBALZpAQC3YQEA4bgBAOHUHwDjOB8A4wwbALo9AIC+PQCAwj0AgMo9AIDOPQCA0j0AgNY9AIDaPQCAvjwJAN49AIDvhBsA74QbAKOhAgDiPQCAhugEAIe8BQDmPQCApn0CAKV9AgDqPQCAq1kCAKpRAgDuPQCA8j0AgK/9AQCu/QEArf0BAKxBAgCzhQYAxj0AgPY9AID6PQCA/j0AgLaJBgC1jQYAAj4AgLuRBgC6iQYABj4AgAo+AIC/9QYAvokGAL2BBgC8iQYADj4AgBI+AIAWPgCAGj4AgB4+AIAiPgCAJj4AgO+EHQAqPgCA4QAEAC4+AIDj/AQAgBEAAIEdAACCBQAAMj4AgKjxBgCp8QYAqg0GAKsFBgCsBQYArQkGAK49BgCvNQYANj4AgDo+AICGiAAAhxADAD4+AIBCPgCARj4AgEo+AIC4EQYAuRkGALohBgC7IQYAvPUHAL39BwC+9QcAv+kHALBNBgCxVQYAsl0GALNVBgC0TQYAtTEGALYxBgC3MQYAo4UHAE4+AIBSPgCAVj4AgFo+AICmiQcApY0HAF4+AICrkQcAqokHAGI+AIBmPgCAr/UHAK6JBwCtgQcArIkHAGo+AICz4QYAbj4AgHI+AIC25QYAdj4AgHo+AIC18QYAur0GALuNBgB+PgCAgj4AgL59AQC/ZQEAvJUGAL11AQCoHQYAqSUGAKotBgCrJQYArD0GAK0hBgCuXQYAr00GAIY+AICKPgCAjj4AgJI+AICWPgCAgrkDAIGxAwCAuQMAuO0BALmFAQC6jQEAu4UBALydAQC9hQEAvo0BAL+FAQCwPQYAsQ0GALIFBgCz5QEAtP0BALXlAQC25QEAt9UBAKOlBQCaPgCAnj4AgKI+AICqPgCApqEFAKW1BQCuPgCAq8kFAKr5BQCGCAwAhxwDAK8hAgCuOQIArTECAKzRBQCyPgCAs/ECALY+AIC6PgCAtlUDAL4+AIDCPgCAteECALpxAwC7eQMAxj4AgMo+AIC+MQMAvz0DALxRAwC9UQMAqCUCAKk1AgCqPQIAqzUCAKwtAgCtkQMArpEDAK+RAwDOPgCA0j4AgNY+AIDaPgCArAAAAN4+AIDiPgCA5j4AgLiZAwC5rQMAuqUDALttAwC8dQMAvX0DAL51AwC/bQMAsPEDALH5AwCywQMAs8EDALSxAwC1vQMAtrUDALepAwDqPgCA7j4AgPI+AID2PgCA+j4AgP4+AIACPwCA76gaAL5oDADhlAEABj8AgOMcBgCADQAAgXEAAIJxAAAKPwCAo/UDAA4/AIASPwCAhEwCABo/AICmUQIApeUDAB4/AICrfQIAqnUCAIbIDACHLA0ArzkCAK41AgCtVQIArFUCAOFQBgAiPwCA4xQHAITADAAmPwCAKj8AgC4/AIAyPwCANj8AgDo/AIA+PwCAQj8AgEY/AIBKPwCA73gbAL74DwBOPwCAUj8AgFY/AICzjQEAWj8AgLWZAQC2jQEAXj8AgFY9AIBiPwCAuoUBALtNAQC8VQEAvV0BAL5VAQC/SQEAo0EOABY/AIBmPwCAaj8AgG4/AICmQQ4ApVUOAHI/AICrgQ4AqkkOAHY/AIB6PwCAr4UOAK6ZDgCtkQ4ArJkOAIBtAACBCQAAgh0AAH4/AIDvGAkAgj8AgIY/AICKPwCA4zwNAI4/AIDhWAwAkj8AgIbQAACHvAMAlj8AgJo/AICokQ4AqZkOAKrJDgCrxQ4ArN0OAK3BDgCuwQ4Ar/UOAIToAACePwCAoj8AgKY/AICqPwCArj8AgLI/AIC2PwCAuMEPALnBDwC6wQ8Au8EPALzBDwC9wQ8AvsEPAL/1DwCwjQ4AsUUOALJNDgCzRQ4AtF0OALVBDgC2QQ4At0EOAKhRDgCpWQ4Aqo0OAKudDgCshQ4ArY0OAK6FDgCvvQ4Auj8AgL4/AIDCPwCAxj8AgMo/AIDOPwCA0j8AgNY/AIC4kQ4AuZkOALqtDgC7RQEAvF0BAL1FAQC+RQEAv3UBALDFDgCxzQ4AssUOALPdDgC0xQ4AtbUOALa9DgC3tQ4AswUOANo/AIDePwCA4j8AgOY/AIC2DQ4AtQ0OAOo/AIC7CQ4AugEOAO4/AIDyPwCAv3EOAL4BDgC9CQ4AvBEOAIJtAACjQQ4AgFUAAIFlAACmSQ4A+j8AgP4/AIClSQ4AqkUOAKtNDgCGSAAAh3gAAK5FDgCvNQ4ArFUOAK1NDgCoXQIAqWECAKplAgCrdQIArG0CAK2xAgCusQIAr7ECAITsBAACQACABkAAgApAAIAOQACAEkAAgBZAAIAaQACAuHEDALlxAwC6cQMAu3EDALzVAwC93QMAvtUDAL/NAwCw0QIAsdECALLRAgCz0QIAtFEDALVRAwC2UQMAt1EDAB5AAICz6QIAIkAAgL6ABAC2NQIAJkAAgCpAAIC14QIAuhECALsRAgAuQACAMkAAgL6RAwC/kQMAvAECAL0BAgA2QACAOkAAgKOlAgA+QACApa0CAEJAAIBGQACApnkCAEpAAIBOQACAq10CAKpdAgCtTQIArE0CAK/dAwCu3QMAqNUCAKndAgCqLQEAqyUBAKw9AQCtJQEAri0BAK8lAQBSQACAVkAAgFpAAIBeQACAYkAAgGpAAIBuQACAckAAgLiFAQC5iQEAup0BALuVAQC8sQEAvbEBAL55AAC/eQAAsF0BALHlAQCy4QEAs/kBALTpAQC13QEAttUBALe9AQDh8A4AdkAAgOMUDgB6QACAgb0AAIC9AAB+QACAgq0AAIYABACH7AUAgkAAgIZAAICKQACAjkAAgO9gDgCSQACAlkAAgJpAAICFXH0AnkAAgKJAAIDjZAEApkAAgOG0AQCqQACA76AOAK5AAICmPgCAhPgFALJAAIC2QACAukAAgLMlBgBmQACAvkAAgMJAAIDGQACAtiUGALU1BgDKQACAu6EGALoZBgDOQACA0kAAgL+ZBgC+rQYAva0GALy1BgCCbQAA7zAEAIBVAACBZQAAvlwDANZAAICG+AAAh2wDANpAAIDeQACA4kAAgOZAAIDqQACA40QEAO5AAIDhjAcAo6UGAPJAAID2QACA+kAAgP5AAICmpQYApbUGAAJBAICrIQYAqpkGAAZBAIAKQQCArxkGAK4tBgCtLQYArDUGAA5BAICz+QcAEkEAgBZBAIC2SQcAGkEAgB5BAIC1UQcAulEHALtRBwAiQQCAJkEAgL41BwC/OQcAvEUHAL09BwCoNQYAqT0GAKo1BgCriQYArJ0GAK2NBgCusQYAr7EGACpBAIAuQQCAMkEAgDZBAICADQAAgbEAAIKxAAA6QQCAuKEGALmtBgC6vQYAu7UGALytBgC9XQEAvlUBAL9NAQCw0QYAsdEGALLVBgCzrQYAtLUGALW5BgC2qQYAt6UGAKO9BgA+QQCAQkEAgISEAgC+kAEApg0GAKUVBgBKQQCAqxUGAKoVBgCGCAAAh3wBAK99BgCucQYArXkGAKwBBgBOQQCAs60BAFJBAIBWQQCAtqkBAFpBAIBeQQCAta0BALptAQC7dQEAYkEAgGZBAIC+XQEAvzUBALxlAQC9VQEAqGECAKlhAgCqYQIAq2ECAKxhAgCtbQIArp0CAK+VAgBqQQCAbkEAgHJBAIB2QQCAekEAgH5BAICCQQCAhkEAgLiVAgC5nQIAuqECALuhAgC8cQMAvXEDAL5xAwC/cQMAsO0CALH1AgCy9QIAs8UCALTdAgC1tQIAtrECALexAgCKQQCAjkEAgJJBAICj5QIAlkEAgKXlAgCm4QIAmkEAgJ5BAICiQQCAqiUCAKs9AgCsLQIArR0CAK4VAgCvfQIApkEAgKpBAICuQQCAhEB8AIAVAACBHQAAggUAALJBAIC+7HwAukEAgIZIfQCHCAMAvkEAgMJBAIDGQQCAykEAgKidAgCpxQIAqsECAKvBAgCsxQIArc0CAK7xAgCv8QIAzkEAgNJBAIDWQQCA2kEAgMkAAADeQQCA4kEAgOZBAIC4wQEAucEBALrBAQC73QEAvM0BAL31AQC+/QEAv50BALBBAQCxQQEAskEBALNBAQC0QQEAtUEBALZBAQC3QQEA4TgGAOpBAIDjaAYA7kEAgPJBAID2QQCA+kEAgISUfQC+rHwA/kEAgAJCAIAGQgCAvrh/AApCAIDvEAEADkIAgBJCAIAWQgCAGkIAgB5CAIDhkAEAIkIAgONEAAAqQgCAgS0AAIAtAADvgAAAgjkAAC5CAIAyQgCA9j8AgDZCAIDhsH8AtkEAgOPUfAA6QgCAJkIAgD5CAICGuAAAh9QCAEJCAIBGQgCASkIAgE5CAIBSQgCAVkIAgO8gfABaQgCAs4l9AF5CAIBiQgCAZkIAgGpCAIC2jX0AtY19AG5CAIC7RX4AukV+AHJCAIB2QgCAv0V+AL5FfgC9VX4AvFV+AKNJfQB6QgCAfkIAgIJCAICGQgCApk19AKVNfQCKQgCAq4V+AKqFfgCOQgCAkkIAgK+FfgCuhX4ArZV+AKyVfgCCbQAAszF+AIBVAACBZQAAtvF/AITcAwCWQgCAtSF+ALrNfwC70X8AhgAEAIfUAAC+dX8Av3l/ALzBfwC9wX8AqOV/AKn1fwCq/X8Aq/V/AKztfwCtNX4Arj1+AK81fgCaQgCAnkIAgKJCAICmQgCAqkIAgK5CAICyQgCAtkIAgLjZfgC54X4AuuF+ALvhfgC85X4Avel+AL6ZfgC/mX4AsE1+ALFRfgCyUX4As1F+ALT1fgC1+X4Atul+ALfpfgCjdX8AukIAgL5CAIDCQgCAxkIAgKa1fgClZX8AykIAgKuVfgCqiX4AzkIAgNJCAICvPX4ArjF+AK2FfgCshX4A1kIAgLMxfgDaQgCA3kIAgLbFAQDiQgCA5kIAgLXRAQC6yQEAu8kBAOpCAIDuQgCAvs0BAL+xAQC8yQEAvckBAKjdfQCp9X0Aqv19AKvxfQCsHQIArQECAK45AgCvOQIA8kIAgPZCAID6QgCA/kIAgIIFAAACQwCAgBEAAIERAAC4EQIAuRkCALohAgC7IQIAvNUCAL3dAgC+1QIAv80CALBJAgCxSQIAslkCALNZAgC0TQIAtTECALYxAgC3MQIAvgADAKNxfQCEiAIAvoAEAKaFAgAKQwCADkMAgKWRAgCqiQIAq4kCAIYoBACHDAMAro0CAK/xAgCsiQIArYkCABJDAICEyAMAhcwFALPlAwAWQwCAteUDALbtAwAaQwCAHkMAgCJDAIC6bQMAu2UDALx9AwC9ZQMAvmUDAL9VAwAmQwCAKkMAgL8ABACjJQIALkMAgKUlAgCmLQIAMkMAgDZDAIA6QwCAqq0CAKulAgCsvQIAraUCAK6lAgCvlQIAPkMAgEJDAIBGQwCASkMAgE5DAIDjzAMAUkMAgOGsAQBWQwCA7xwDAFpDAIBeQwCAYkMAgGZDAIBqQwCAbkMAgOFwfwBGQQCA4wR+AHJDAIB6QwCA4ZQBAH5DAIDjWAEAgNkAAIHZAACCJQAA7+R+AIJDAICGQwCA7+B+AIpDAICzAQEAjkMAgIboBwCHLAQAkkMAgLY1AQC1BQEAlkMAgLvxAAC64QAAmkMAgJ5DAIC/sQAAvtEAAL3ZAAC84QAABkMAgHZDAICiQwCApkMAgKEBBACgEQQAoxkAAKLFBACotQYAqb0GAKrpBgCr/QYArO0GAK3VBgCu3QYArz0HALBFBwCxVQcAslUHALNtBwC0dQcAtRUHALYdBwC3FQcAuC0HALk1BwC6MQcAuw0HALwZBwC9GQcAvgkHAL8JBwCjQQYAqkMAgK5DAICyQwCAtkMAgKZ1BgClRQYAukMAgKuxBwCqoQcAj8ltAL5DAICv8QcArpEHAK2ZBwCsoQcAld11AJTBdACXzXAAli1zAJFdaACQVWgAk9l0AJJNaQCd5XgAnB17AJ9tBwCeuXgAmR1/AJhVcACboXwAmvl8AIJhbACDhWkAwkMAgMZDAICGEXUAhxF1AISVaQCFjWgAij10AIvFcgDKQwCAzkMAgI7dfgCPMX0AjD1xAI2dcQCSGX0Ak716ANJDAIDvkAkAltUGAJdRBQCUXXkAlQl5AJpxBQCbvQUA1kMAgNpDAIDeQwCA4agFAJx5AQDjuAgAoYUBAOJDAICjqQ0AogEMAKUBCACkOQ0Ap6kJAKa9CQCppRUAqAEUAKsBFACq/RUArbkRAKyxEQCvARwArqEQALH9HACw5R0As+kZALIBGAC1ASQAtH0ZAIQUAAC+FAAAgI0AAIGVAACCbQAA6kMAgIZQDwCHZAAA7kMAgPJDAIC61QcAu90HALjBBwC5wQcAvjEEAL8xBAC88QcAvfEHALKtBwCztQcAsK0HALGlBwC2nQcAt/UHALSlBwC1lQcAqmkHAKtpBwCoaQcAqWkHAK5pBwCvaQcArGkHAK1pBwD2QwCA+kMAgP5DAIACRACABkQAgApEAIAORACAEkQAgKgRBQCpHQUAqjkFAKs5BQCsLQUArVEFAK5JBQCvQQUAFkQAgBpEAIAeRACAIkQAgCZEAIAqRACALkQAgDJEAIC4XQIAuWkCALrBAwC7wQMAvPkDAL35AwC+kQMAv7UDALAJBQCxCQUAsuECALPhAgC0dQIAtX0CALZ1AgC3bQIAs7EEAIQAAgC+BA0ANkQAgDpEAIC20QQAtaUEAD5EAIC7zQQAus0EAEJEAIBGRACAv7kDAL6xAwC9NQMAvDUDAEpEAICj9QQATkQAgFJEAICmlQQAWkQAgF5EAICl4QQAqokEAKuJBACHqA0AhswMAK71AwCv/QMArHEDAK1xAwDhUAYA4TQHAONAAADjWAcAgNEAAIHdAACC1QAAYkQAgGZEAIBqRACAbkQAgHJEAIB2RACAekQAgO+cAADvyAcAfkQAgIJEAICzNQIAhkQAgLW1AQCKRACAjkQAgLa1AQC+7AwAkkQAgLuRAQC6mQEAvVEBALyJAQC/UQEAvlkBAKjtDQCp/Q0AqvUNAKttDgCsdQ4ArX0OAK51DgCvbQ4AVkQAgJZEAICaRACAnkQAgKJEAICmRACAqkQAgK5EAIC49Q4Auf0OALr1DgC7QQ8AvEEPAL1JDwC+cQ8Av3EPALAVDgCxHQ4AshUOALPNDgC01Q4Atd0OALbVDgC3zQ4Ao30NALJEAIC2RACAukQAgL5EAICm/Q4Apf0OAMJEAICr2Q4AqtEOAISoAgDGRACArxkOAK4RDgCtGQ4ArMEOAIBNAACBVQAAglUAALNRDwDKRACAtXEPALZxDwDORACAhuAAAIcEAwC6XQ8Auy0PALw1DwC9OQ8Avi0PAL8lDwCoVQ4AqV0OAKqVDgCrrQ4ArLUOAK29DgCutQ4Ar60OANJEAIDWRACA2kQAgN5EAIDiRACA5kQAgOpEAIDuRACAuGkBALlpAQC6eQEAu3kBALxpAQC9aQEAvt0BAL/VAQCw1Q4AsaUOALKtDgCzoQ4AtKUOALWtDgC2nQ4At1kBAKMdDgDyRACA9kQAgOZDAID6RACApj0OAKU9DgD+RACAq2EOAKoRDgACRQCABkUAgK9pDgCuYQ4ArXUOAKx5DgAKRQCADkUAgBJFAIAWRQCAGkUAgB5FAIAiRQCAJkUAgIANAACBFQAAgh0AACpFAIAuRQCAMkUAgIR4AQC+FAAA4xQPADpFAIDh4A0AhAADAIawBACHFAMAPkUAgEJFAIBGRQCASkUAgE5FAIBSRQCA78APAFZFAIBaRQCAXkUAgGJFAIBmRQCAakUAgLNtAwBuRQCAtX0DALZ1AwByRQCAdkUAgHpFAIC6UQMAu1EDALz1AwC9/QMAvukDAL/hAwB+RQCAgkUAgIZFAICKRQCAjkUAgJJFAICWRQCAmkUAgKhxAgCpeQIAqokDAKuJAwCsmQMArZkDAK6JAwCviQMAsPkDALH5AwCyTQMAs0UDALRBAwC1SQMAtnEDALdxAwC4IQMAuSEDALohAwC7IQMAvCEDAL0hAwC+IQMAvyEDAICdAQCBEQAAghEAAIQEBQDvFAAAnkUAgKJFAIC+EAUA48gAAKpFAIDh0AEArkUAgLJFAIC2RQCAukUAgL5FAICqeQIAq3kCAIboBACHYAUArsECAK/JAgCs3QIArdUCAMJFAICjRQIAxkUAgMpFAICmXQIAzkUAgNJFAIClVQIA1kUAgNpFAIDeRQCA4kUAgOZFAIDqRQCA7kUAgO+EDgC+rAQA4dAOAPJFAIDjFAEA9kUAgPpFAID+RQCAAkYAgLPdAQAGRgCACkYAgA5GAIASRgCAtv0BALX9AQAaRgCAu90BALrdAQCE4AQAHkYAgL+hAQC+vQEAvb0BALy9AQCoBQYAqR0GAKoVBgCrLQYArDUGAK09BgCuNQYArykGAKZFAICC9QcAgeUHAIDlBwAWRgCAIkYAgIYcAACHsAMAuCUGALnFBgC6zQYAu8UGALzdBgC9xQYAvs0GAL/FBgCwWQYAsVkGALIpBgCzKQYAtDkGALUlBgC2JQYAtx0GAKOdBgAmRgCAKkYAgC5GAIAyRgCApr0GAKW9BgA2RgCAq50GAKqdBgA6RgCAPkYAgK/hBgCu/QYArf0GAKz9BgBCRgCAs/UHAEZGAIBKRgCAtu0HAE5GAIBSRgCAteUHALqNBwC7kQcAVkYAgFpGAIC+dQcAv30HALyBBwC9fQcAqCUGAKkpBgCqOQYAqzkGAKwpBgCtKQYArnkGAK91BgBeRgCAYkYAgGZGAIBqRgCAbkYAgHJGAIB2RgCAekYAgLjVBgC53QYAuuEGALv9BgC85QYAve0GAL7lBgC/mQYAsA0GALERBgCyEQYAs+0GALT1BgC1/QYAtvUGALftBgCjsQYAgi0AAIEVAACAsQAANkUAgKapBgCloQYAfkYAgKvVBgCqyQYAgkYAgL5oAQCvOQYArjEGAK05BgCsxQYAikYAgLPxAQCGaAAAh3wBALZdAQCORgCAkkYAgLVVAQC6SQEAu0kBAJZGAICaRgCAvj0BAL8hAQC8OQEAvTUBAJ5GAICiRgCAhAQDAL6AHACmRgCA4RwGAKpGAIDjAAYAvwguAK5GAICyRgCA78gHALZGAIC6RgCAvkYAgMJGAIDGRgCAykYAgKN9AgDORgCApdkCANJGAIDWRgCAptECANpGAIDeRgCAq8UCAKrFAgCtuQIArLUCAK+tAgCusQIAqW0FAKhZBQCrDQIAqrkCAK0dAgCsHQIArwUCAK4NAgC+aB0A4kYAgOZGAIDqRgCAgB0AAIEJAACCmQEA7kYAgLnhAwC4KQIAu+EDALrpAwC94QMAvPkDAL/hAwC+6QMAsU0CALBNAgCzIQIAsi0CALUlAgC0OQIAtxECALYlAgCowQIAqdECAKrRAgCr5QIArP0CAK0VAQCuHQEArw0BAPJGAID6RgCA/kYAgAJHAIAGRwCACkcAgA5HAIASRwCAuAUBALkJAQC6HQEAuxUBALwxAQC9MQEAvv0BAL/1AQCweQEAsUEBALJBAQCzXQEAtEUBALVNAQC2RQEAtz0BAIagHQCHxB0AFkcAgO/YAAAaRwCAHkcAgCJHAIDvxAYAhGwcAOH0BgAmRwCA47AGACpHAIDhlAEALkcAgONEBgCzGQIAMkcAgDZHAIA6RwCAhewsALbVAQC1NQIAPkcAgLvFAQC6/QEAQkcAgEZHAIC/yQEAvsEBAL3JAQC81QEAo9kdAPZGAIBKRwCATkcAgFJHAICmFR4ApfUdAFZHAICrBR4Aqj0eAFpHAIBeRwCArwkeAK4BHgCtCR4ArBUeAIBpAACBaQAAggUAAGJHAIBmRwCAakcAgIcQAwCGfAMAbkcAgHJHAIB2RwCAekcAgH5HAICCRwCAhkcAgIpHAICopR8Aqa0fAKqlHwCrvR8ArKUfAK2tHwCupR8ArxUfAI5HAICSRwCAlkcAgJpHAICeRwCAokcAgKZHAICqRwCAuA0fALkZHwC6IR8AuyEfALzZAAC92QAAvskAAL/BAACwcR8AsXEfALJxHwCzRR8AtEEfALVNHwC2PR8AtzUfALMtHgCuRwCAskcAgLZHAIC6RwCAti0eALUtHgC+RwCAu7UeALq1HgDCRwCAxkcAgL+JHgC+hR4AvZEeALylHgCCKQAAo2keAIAdAACBFQAApmkeAMpHAIDORwCApWkeAKrxHgCr8R4A0kcAgITgAQCuwR4Ar80eAKzhHgCt1R4AqNUBAKnlAQCq7QEAq+UBAKz9AQCt5QEAru0BAK/lAQC+oAEAhkYAgNZHAIDaRwCAhhAAAId0AQDeRwCA4kcAgLh9AQC5wQAAusEAALvBAAC8wQAAvckAAL7xAAC/8QAAsJ0BALFFAQCyTQEAs0UBALRdAQC1RQEAtk0BALdFAQDmRwCA6kcAgO5HAIDyRwCA9kcAgO80AgDv7B4A+kcAgOHwHQDj4AIA4zAeAOGEAQD+RwCAAkgAgAZIAIAKSACAsyUCAJQAAAAOSACAEkgAgBZIAIC2JQIAtTUCABpIAIC7wQIAuhkCAB5IAIAiSACAv8ECAL7ZAgC90QIAvNkCACZIAIAqSACALkgAgKPpAgAySACApfkCAKbpAgA2SACAOkgAgD5IAICq1QIAqw0CAKwVAgCtHQIArhUCAK8NAgCAYQAAgWEAAIIFAABCSACASkgAgIQABAC+FAQATkgAgIbABACHUAMAUkgAgFZIAIBaSACAXkgAgGJIAIBmSACAqK0CAKm9AgCqtQIAqw0BAKwVAQCtHQEArhUBAK8NAQCE7AQAakgAgG5IAIBySACAdkgAgHpIAIB+SACAgkgAgLgdAQC5LQEAuiUBALvNAQC81QEAvd0BAL7JAQC/wQEAsH0BALFVAQCyXQEAs1UBALRNAQC1PQEAtjUBALctAQDhGB4AhkgAgOM4HgCKSACAjkgAgJJIAICWSACAmkgAgJ5IAICiSACAvmAEAKZIAICBdQAAgHUAAO/gHwCCbQAAqkgAgK5IAICG6AQAh3wFALJIAIDhkAEAukgAgOOgAAC+SACAwkgAgMZIAIDvtAAAykgAgM5IAIDSSACA1kgAgLUFBgBGSACAtkgAgLYFBgDaSACA3kgAgLOlBQDiSACAvRkGALwRBgC/YQYAvhEGAOZIAIDqSACAuwkGALohBgCj/QUA7kgAgPJIAID2SACA+kgAgKZdBgClXQYA/kgAgKtRBgCqeQYAAkkAgAZJAICvOQYArkkGAK1BBgCsSQYAqFEGAKlZBgCqYQYAq2EGAKxhBgCtYQYArmEGAK9hBgAKSQCADkkAgBJJAIAWSQCAgA0AAIGxAQCCsQEAGkkAgLhNBwC5VQcAul0HALtVBwC8TQcAvXUHAL59BwC/cQcAsMUHALHNBwCyxQcAs90HALTFBwC1zQcAtsUHALd5BwCz6QcAHkkAgCJJAICEwAEAvtgBALbhBwC16QcAJkkAgLsJBgC6AQYAhogAAIesAQC/CQYAvgEGAL0JBgC8EQYAKkkAgKOtBwAuSQCAMkkAgKalBwA2SQCAOkkAgKWtBwCqRQYAq00GAD5JAIBCSQCArkUGAK9NBgCsVQYArU0GAKhZBgCpZQYAqm0GAKtlBgCsYQYArWEGAK5hBgCvYQYAhKwBAEZJAIBKSQCATkkAgFJJAIBWSQCAWkkAgF5JAIC4kQEAuZkBALqhAQC7oQEAvHEBAL1xAQC+cQEAv3EBALDxAQCx8QEAsvUBALPdAQC0xQEAtbEBALaxAQC3sQEAs+UFAGJJAIBmSQCAakkAgG5JAIC24QUAtekFAHJJAIC7NQIAujUCAHZJAIB6SQCAv3UCAL4BAgC9CQIAvCECAH5JAICjoQUAgkkAgIZJAICmpQUAikkAgI5JAIClrQUAqnECAKtxAgCSSQCAvigDAK5FAgCvMQIArGUCAK1NAgCA1QAAgd0AAILhAACaSQCA4yABAJ5JAIDhqAEAokkAgO80AgCmSQCAhggMAIdoAwCsAAAAqkkAgK5JAICySQCAs40DALZJAIC6SQCAhIAMAL5JAIC2vQMAtYEDAMJJAIC7TQMAuk0DAMZJAIDKSQCAv00DAL5NAwC9TQMAvE0DAKhBAgCpTQIAqkUCAKtZAgCsSQIArX0CAK51AgCvuQIAvmgNAM5JAIDSSQCA1kkAgIRsDADaSQCA3kkAgOJJAIC4TQEAuVUBALpVAQC7ZQEAvH0BAL0VAQC+EQEAvxEBALDJAgCxyQIAstkCALPZAgC0yQIAtckCALZ9AQC3dQEA4XgHAOOYAADjuAYA4VwGAOZJAIDqSQCA7kkAgPJJAID2SQCA+kkAgP5JAIACSgCA7AAAAO9cAADv6AYACkoAgIFpAACAYQAAo4UCAIJhAACliQIADkoAgBJKAICmtQIAhkAMAIfEDACrRQIAqkUCAK1FAgCsRQIAr0UCAK5FAgCojQ4AqZEOAKqVDgCrqQ4ArKUOAK2tDgCupQ4Ar9kOAAZKAIAWSgCAGkoAgB5KAIAiSgCAJkoAgCpKAIAuSgCAuHUPALl9DwC6dQ8Au90PALzFDwC9zQ8AvsUPAL/9DwCwqQ4AsbUOALK1DgCzhQ4AtJ0OALVRDwC2UQ8At1EPALMdDgAySgCANkoAgDpKAIA+SgCAti0OALUtDgBCSgCAu3EOALptDgBGSgCASkoAgL+VDwC+WQ4AvVEOALxhDgBOSgCAo1kOAFJKAIBWSgCApmkOAFpKAIBeSgCApWkOAKopDgCrNQ4AYkoAgGZKAICuHQ4Ar9EPAKwlDgCtFQ4AqL0OAKnRDgCq0Q4AqykBAKw5AQCtOQEArikBAK8pAQCADQAAgRUAAIIdAABqSgCAbkoAgHJKAIC+dAIAdkoAgLjtAQC5hQEAuoEBALuBAQC8hQEAvY0BAL6xAQC/sQEAsFkBALFZAQCy7QEAs+UBALT9AQC15QEAtuUBALfVAQB6SgCAtqkBALWhAQB+SgCAs0kOAIJKAICGOAAAh9wBAL8xAQC+KQEAvSEBALwpAQC7jQEAuo0BAJZJAICGSgCAoxkOAIpKAICOSgCAkkoAgJZKAICm+QEApfEBAJpKAICr3QEAqt0BAJ5KAICiSgCAr2EBAK55AQCtcQEArHkBAKZKAIDv3A8AqkoAgK5KAICySgCAtkoAgLpKAIC+SgCAwkoAgMZKAIDKSgCAzkoAgNJKAIDj6A4A1koAgOGMDgCAEQAAgREAAIIRAACEQAIA2koAgN5KAIDiSgCAvhADAIbABACHRAMA6koAgO5KAIDySgCA9koAgPpKAID+SgCA7yQCAAJLAIAGSwCACksAgA5LAIASSwCAFksAgBpLAICE7AQAHksAgCJLAIAmSwCA4+wCACpLAIDhOAEALksAgLNVAwAySwCANksAgDpLAIA+SwCAth0DALUdAwBCSwCAuwkDALo5AwBGSwCASksAgL/9AAC+/QAAvfkAALwRAwCogQIAqYkCAKqdAgCrsQIArNUCAK3dAgCu1QIAr80CAIDNAQCBCQAAghkAAE5LAIBSSwCAWksAgL5wBQBeSwCAuFkBALlZAQC6aQEAu2kBALx5AQC9eQEAvmkBAL9lAQCwvQIAsY0CALKFAgCzbQEAtHkBALV5AQC2aQEAt2kBAIYgBACHCAUAYksAgGZLAIBqSwCAbksAgHJLAIDvXAAAhOwEAOFcDgB2SwCA44wOAHpLAIB+SwCAgksAgIZLAICjVQIAiksAgI5LAICSSwCAlksAgKYdAgClHQIAmksAgKsJAgCqOQIAnksAgKJLAICv/QEArv0BAK35AQCsEQIAqGkGAKlpBgCqeQYAq3kGAKxpBgCtaQYArp0GAK+VBgBWSwCApksAgKpLAICuSwCAsksAgLZLAIC6SwCAvksAgLj1BgC5+QYAuo0GALuFBgC8nQYAvYUGAL6FBgC/tQYAsO0GALH1BgCy/QYAs/UGALTtBgC10QYAttEGALfRBgCz8QYAghUAAIG1AACAtQAAwksAgLbpBgC14QYAvtQDALsxBgC6KQYAxksAgMpLAIC/FQYAvikGAL0hBgC8KQYAzksAgKO1BgCGyAAAh8gAAKatBgDSSwCA1ksAgKWlBgCqbQYAq3UGANpLAIDeSwCArm0GAK9RBgCsbQYArWUGAKg1BgCpOQYAqoEGAKuBBgCsgQYArYEGAK6BBgCvtQYA4ksAgOZLAIDqSwCA7ksAgPJLAID2SwCA+ksAgP5LAIC4nQYAua0GALqlBgC7aQEAvHkBAL15AQC+aQEAv2kBALDRBgCx0QYAstEGALPRBgC0tQYAtb0GALa1BgC3rQYAswkGAAJMAIAGTACACkwAgA5MAIC2AQYAtQkGABJMAIC7FQYAuhUGABZMAIAaTACAv3kGAL5xBgC9BQYAvAUGAB5MAICjTQYAIkwAgOZKAICmRQYAJkwAgCpMAIClTQYAqlEGAKtRBgAuTACAMkwAgK41BgCvPQYArEEGAK1BBgCB6QMAgN0DAISIAwCC4QMAhrA8AIeIAgC+VAMAOkwAgD5MAIBCTACARkwAgEpMAIBOTACAUkwAgFZMAIBaTACA4/AGAF5MAIDhMAYAhAA8AGJMAIBmTACAakwAgG5MAIByTACAhTQ9AHZMAIB6TACA77AHAH5MAICCTACAhkwAgIpMAICOTACAkkwAgL7EPACWTACAgp0BAIGdAQCAnQEAqA0CAKllAgCqfQIAq3UCAKxZAgCtWQIArpkDAK+ZAwCw6QMAsekDALL5AwCz+QMAtOkDALXpAwC2XQMAt1UDALhtAwC5dQMAunUDALtFAwC8XQMAvTUDAL4xAwC/KQMAmkwAgJ5MAICiTACAqkwAgOFgAwDv9AMA40QCAK5MAICyTACA4zwDAO/0NwDh/AEAtkwAgLpMAIC+TACAwkwAgIZkPwCHaD0AhTQhALOZAwDGTACAtb0DALa1AwDKTACAzkwAgNJMAIC6QQIAu0ECALxBAgC9QQIAvkECAL9BAgDWTACA2kwAgN5MAIDiTACA5kwAgOpMAIDuTACA7/gBAIRoPADhPAYA8kwAgOMcBgD2TACA+kwAgP5MAIACTQCAoxUDAAZNAIAKTQCADk0AgBJNAICmOQMApTEDABpNAICrzQIAqs0CAL5kPgAeTQCAr80CAK7NAgCtzQIArM0CAKgdPgCpJT4Aqi0+AKslPgCsPT4ArSU+AK4tPgCvJT4ApkwAgIL1PwCB5T8AgOU/ABZNAIAiTQCAhgAEAIecAwC4LT4AuTE+ALoxPgC7MT4AvNE+AL3RPgC+0T4Av80+ALBdPgCxIT4Asjk+ALM5PgC0KT4AtSk+ALYZPgC3FT4As6U+ACZNAIAqTQCALk0AgDJNAIC2pT4AtbU+ADZNAIC75T4Aupk+ADpNAIA+TQCAv+0+AL7tPgC97T4AvO0+AEJNAICj4T4ARk0AgEpNAICm4T4ATk0AgFJNAICl8T4Aqt0+AKuhPgBWTQCAWk0AgK6pPgCvqT4ArKk+AK2pPgCPBSUAsyU+AF5NAIBiTQCAtik+AGZNAIBqTQCAtSk+ALp9PgC7RT4Abk0AgHJNAIC+tT4Av70+ALxdPgC9vT4An304AJ5lOQCd8TgAnFE0AJtZNQCaUTUAmfEwAJgNMQCXZTEAlsEwAJVZLQCUTS0Ak+EsAJLZKQCRWSkAkPEoALSlGQC13RgAdk0AgIQIAACwkRUAsQEVALIBGACzvRkAgA0AAIGtAwCCpQMAek0AgKNhAACiHT0AoZk9AKBxPACkxQUApUEEAKYBCACn4QkANkwAgKH1AQCi6QEAo90FAKwBEACtxREArtkRAK85EACoZQgAqQEMAKrZDQCrCQ0AijEuAIuhMwB+TQCAgk0AgI65MwCPETYAjB0yAI1NMgCCJSYAg6krAL5kAwCEYAQAhqEvAIcVLgCEGSoAhZEqAJphPgCb7T4AhsgEAIfcAwCKTQCA4Vw+AJyJAwDjAD4Akmk2AJN5NwCOTQCA7xg+AJZNOwCXuT8AlME7AJVdOgCpnT0AqIk9AKu5PQCqrT0Arak9AKyhPQCvyT0ArqE9AL7oBACSTQCAlk0AgJpNAICeTQCAok0AgKZNAICqTQCAuVk9ALhRPQC7eT0AumU9AL1pPQC8YT0Avx09AL5hPQCxgT0AsLk9ALNpPQCyiT0AtXk9ALRxPQC3aT0AtnE9AKMhPACuTQCAsk0AgLZNAIC6TQCApi08AKUtPAC+TQCAq0E8AKp5PADCTQCAxk0AgK+5PACusTwArbk8AKxZPADKTQCAzk0AgLN9AwDSTQCAtdkDANZNAIDaTQCAttEDAN5NAIDiTQCAu8UDALrFAwC9uQMAvLUDAL+tAwC+sQMA5k0AgOpNAIDuTQCA71wDAIAVAACBHQAAgjEAAO+MPgCE7AQA4fw+APJNAIDjHD4A+k0AgOGUAQD+TQCA4yAAAKP1AwACTgCAh+gEAIZsBAAGTgCAplkDAKVRAwAKTgCAq00DAKpNAwAOTgCAEk4AgK8lAwCuOQMArTEDAKw9AwCGTQCA9k0AgBZOAIAaTgCAHk4AgCJOAIAmTgCAKk4AgKhxBgCpTQYAqo0GAKuFBgCsnQYArYUGAK6NBgCvhQYAsP0GALFBBwCyQQcAs0EHALRBBwC1SQcAtnEHALdxBwC4IQcAuSEHALolBwC7OQcAvCkHAL0VBwC+HQcAv/0HALMlBgAuTgCAMk4AgDZOAIA6TgCAtiUGALU1BgA+TgCAu6UHALoZBgBCTgCARk4AgL+tBwC+pQcAvbUHALy1BwBKTgCAo2EGAE5OAIBSTgCApmEGAFZOAIBaTgCApXEGAKpdBgCr4QcAXk4AgGJOAICu4QcAr+kHAKzxBwCt8QcAqLEGAKm9BgCqzQYAq90GAKzNBgCt/QYArvUGAK8VAQCA+QEAgc0BAILFAQC+ZAIAhpAAAIcAAQBqTgCAbk4AgLjRAQC52QEAuuEBALvhAQC8kQEAvZ0BAL6VAQC/iQEAsG0BALF1AQCyfQEAs3UBALRtAQC18QEAtvEBALfxAQCzRQYAZk4AgHJOAIB2TgCAek4AgLZ9BgC1RQYAfk4AgLuxAQC6qQEAgk4AgIZOAIC/NQEAvqkBAL2hAQC8qQEAik4AgKMBBgCOTgCAkk4AgKY5BgCWTgCAmk4AgKUBBgCq7QEAq/UBAJ5OAICiTgCAru0BAK9xAQCs7QEAreUBAOEoAQCmTgCA41ACAKpOAICuTgCAsk4AgLZOAIC6TgCAvk4AgMJOAIDGTgCAyk4AgIFxAACAGQAA75wCAIJ5AADOTgCA0k4AgITIAgCzxQMA2k4AgLXFAwC2xQMAvhADAIbADACHRAwAuqkDALulAwC8vQMAvaEDAL6hAwC/lQMArhEGAK8ZBgCsAQYArQEGAKqlBgCrEQYAqEU5AKlxOQDeTgCA4k4AgOZOAIDqTgCA7k4AgPJOAID2TgCA+k4AgL7tBwC/TQcAvNEHAL3lBwC63QcAu8EHALg1BgC51QcAtjkGALcNBgC0JQYAtTkGALIxBgCzPQYAsFEGALFRBgCoOQIAqTkCAKqBAgCrgQIArIECAK2JAgCusQIAr7ECAIRsDQD+TgCAvmANAAJPAIAGTwCACk8AgA5PAIASTwCAuE0BALlVAQC6XQEAu1UBALxNAQC9dQEAvn0BAL91AQCwoQIAsa0CALKlAgCzuQIAtKkCALWdAgC2lQIAt3kBAOFUBgDh1AcA4zgGAOOwBwAWTwCAGk8AgB5PAIAiTwCAhOQMACZPAIAqTwCALk8AgDJPAIA2TwCA72wAAO/kBwCjSQIAOk8AgD5PAIBCTwCASk8AgKZJAgClSQIATk8AgKspAgCqJQIAhkgMAIfcDACvGQIAri0CAK0tAgCsMQIAqFEOAKmlDgCqrQ4Aq6UOAKy9DgCtpQ4Arq0OAK+lDgCA5Q8Age0PAILlDwBGTwCAUk8AgFZPAIBaTwCAXk8AgLjVDwC53Q8AutUPALvpDwC8+Q8AvfkPAL7pDwC/6Q8AsN0OALFBDwCyRQ8As10PALRFDwC1TQ8AtkUPALftDwCzJQ4AYk8AgGZPAIBqTwCAbk8AgLYlDgC1NQ4Ack8AgLuFDwC6GQ4Adk8AgHpPAIC/iQ8AvoEPAL2JDwC8kQ8Afk8AgKNhDgCCTwCAhk8AgKZhDgCKTwCAjk8AgKVxDgCqXQ4Aq8EPAJJPAICWTwCArsUPAK/NDwCs1Q8Arc0PAKjRDgCp2Q4AqjkBAKs5AQCsKQEArSkBAK6dAQCvlQEAmk8AgJ5PAICiTwCApk8AgIANAACBtQAAgr0AAKpPAIC4lQEAuZ0BALqhAQC7oQEAvHEAAL1xAAC+cQAAv3EAALDtAQCx9QEAsvUBALPFAQC03QEAtbUBALaxAQC3sQEArk8AgLJPAICzuQEAvsACALWpAQC2TwCAuk8AgLahAQCGgAEAh8QBALs5AQC6IQEAvRkBALwpAQC/eQEAvhEBAKPxAQC+TwCA1k4AgMJPAIDGTwCApukBAKXhAQDKTwCAq3EBAKppAQDOTwCA0k8AgK8xAQCuWQEArVEBAKxhAQDWTwCA2k8AgN5PAIDiTwCA4agBAOZPAIDjQAIA6k8AgL8oFQDuTwCA73QCAPJPAID2TwCA+k8AgP5PAIACUACABlAAgON0DwCEiAMA4TQOAApQAIAOUACAElAAgBZQAICADQAAgRUAAIIRAAAaUACAHlAAgO+kDwAiUACAKlAAgKgZAwCpQQMAqkUDAKtdAwCsTQMArX0DAK51AwCvnQAAhaQVAL58AwCGCAQAhxwDAC5QAIAyUACANlAAgDpQAIC49QAAuf0AALr1AAC7jQAAvIEAAL2BAAC+gQAAv4EAALDlAACx7QAAsuUAALP5AAC07QAAtdEAALbVAAC3zQAAPlAAgEJQAIBGUACAs8ECAEpQAIC1yQIAtvECAE5QAIBSUACAVlAAgLotAQC7JQEAvD0BAL0hAQC+JQEAvxkBAKapAgCESAIAWlAAgKWRAgBeUACAo5kCAGJQAIBmUACArn0BAK9BAQCsZQEArXkBAKp1AQCrfQEAalAAgG5QAIByUACAdlAAgHpQAIB+UACA7+QAAIJQAICGUACAilAAgOMQDgCOUACA4VgOAJJQAICALQAAgREAAIIVAAC+sAUAs3UBAJpQAICHFAUAhmwEAJ5QAIC21QAAtWUBAKJQAIC7/QAAuvUAAKZQAICqUACAv6EAAL69AAC93QAAvN0AAKh9BgCptQYAqr0GAKu1BgCsrQYArRUHAK4dBwCvFQcAllAAgK5QAICyUACAtlAAgLpQAIC+UACAwlAAgMZQAIC4OQcAuTkHALrJBwC7yQcAvNkHAL3ZBwC+zQcAv8UHALBxBwCxeQcAskkHALNJBwC0OQcAtSUHALYhBwC3IQcAozUGAMpQAIDOUACA0lAAgNZQAICmlQcApSUGANpQAICrvQcAqrUHAN5QAIDiUACAr+EHAK79BwCtnQcArJ0HAOZQAIDqUACA7lAAgPJQAID2UACAgj0AAIE9AACAPQAA+lAAgP5QAIACUQCAhKADAL6kAwAGUQCAhvgAAIfgAACoxQYAqdUGAKrVBgCr5QYArP0GAK0xAQCuMQEArzEBAApRAIAOUQCAElEAgBZRAIAaUQCAHlEAgCJRAIAmUQCAuN0BALntAQC65QEAu40BALyVAQC9nQEAvpUBAL+NAQCwUQEAsVEBALJRAQCzUQEAtPUBALX9AQC29QEAt+0BALNdBgAqUQCALlEAgDJRAIA2UQCAtrEBALV1BgA6UQCAu5UBALqVAQA+UQCAQlEAgL85AQC+MQEAvYUBALyFAQClLQYARlEAgEpRAICm6QEATlEAgFJRAICjBQYAVlEAgK3dAQCs3QEAr2EBAK5pAQBaUQCAJlAAgKvNAQCqzQEAXlEAgGJRAICExAMAvwD0AGZRAICCPQAAgT0AAIA9AABqUQCAblEAgHJRAIC+YAMAelEAgH5RAICCUQCAhlEAgIbgHACHAAMA7wwHAIpRAICOUQCAklEAgJZRAICaUQCAnlEAgKJRAICmUQCAqlEAgOHABgCuUQCA4ywHALJRAIC2UQCAulEAgL5RAIDCUQCAxlEAgMpRAIDOUQCA0lEAgKiBAwCpgQMAqoEDAKuBAwCsgQMArYEDAK6BAwCvgQMAsEUDALFNAwCyRQMAs10DALRNAwC1fQMAtnUDALcZAwC4KQMAuTUDALo9AwC7MQMAvAEDAL31AAC+/QAAv+0AALMpAgDWUQCA2lEAgN5RAIDiUQCAtiECALUpAgCEUB0Au6kCALqhAgDqUQCA7lEAgL+ZAgC+qQIAvakCALyxAgCBTQAAgE0AAO+cAwCCXQAAhvAcAId4HQC+EB0A8lEAgPZRAID6UQCA/lEAgAJSAIDhkAEABlIAgONgAwAKUgCADlIAgBJSAIAWUgCAGlIAgB5SAIAiUgCAJlIAgO+UAQCE7BwA4XAGACpSAIDjUAEALlIAgDJSAIA2UgCAOlIAgKPpAgA+UgCAQlIAgEZSAIBKUgCApuECAKXpAgBOUgCAq2kCAKphAgBSUgCAvqgcAK9ZAgCuaQIArWkCAKxxAgCoMR4AqTEeAKoxHgCrMR4ArF0eAK1FHgCuTR4Ar0UeAOZRAICCzR8AgfUfAID9HwBWUgCAWlIAgIYcAACH+AMAuMUeALnNHgC6xR4Au90eALzFHgC9zR4AvsUeAL9ZHwCwPR4AsQUeALINHgCzBR4AtB0eALUBHgC2BR4At/0eALO5HgBeUgCAYlIAgGZSAIBqUgCAtsUeALXVHgBuUgCAu8EeALr5HgByUgCAdlIAgL/FHgC+2R4AvdEeALzZHgB6UgCAo/0eAH5SAICCUgCApoEeAIZSAICKUgCApZEeAKq9HgCrhR4AjlIAgJJSAICunR4Ar4EeAKydHgCtlR4AqCkeAKkpHgCqVR4Aq20eAKx1HgCtfR4ArnUeAK9pHgCWUgCAmlIAgJ5SAICiUgCAplIAgKpSAICuUgCAslIAgLjpHgC59R4Auv0eALv1HgC87R4AvZEeAL6RHgC/kR4AsB0eALHlHgCy7R4As+UeALT9HgC15R4Atu0eALflHgCz3R4AtlIAgLpSAIC+UgCAwlIAgLb9HgC1/R4AhFgBALshHgC62R4AvigAAMpSAIC/IR4AvjkeAL0xHgC8OR4AgU0AAIBNAACjlR4Agl0AAKW1HgDGUgCAzlIAgKa1HgB2UQCA0lIAgKtpHgCqkR4ArXkeAKxxHgCvaR4ArnEeAIYABACHRAMAs4ECANZSAIC1gQIA2lIAgN5SAIC2gQIAiAAAAOJSAIC74QIAuu0CAL3lAgC8+QIAv9ECAL7lAgDmUgCA6lIAgIREAwC+jAMA4UgCAO5SAIDjAAIA7/wfAPJSAIDhPB4A79wCAONgHwD2UgCA+lIAgP5SAIACUwCAqQUCAKixAgCrBQIAqgUCAK0NAgCsBQIArzUCAK41AgCEbAUABlMAgApTAIAOUwCAElMAgBZTAIAaUwCAHlMAgLnpAwC44QMAu/kDALrhAwC96QMAvOEDAL9dAwC+4QMAsSkCALAlAgCzPQIAsiECALUZAgC0LQIAt9kDALYRAgAiUwCAJlMAgCpTAICjhQMALlMAgKWFAwCmhQMAMlMAgDpTAIA+UwCAqukDAKvlAwCs/QMAreEDAK7hAwCv1QMAgEkAAIFVAACCVQAAo6kCAL6YBAClQQEApkEBAEJTAICG4AUAh+AFAKotAQCrOQEArBEBAK0FAQCuDQEArwUBAEZTAIBKUwCATlMAgO/cAABSUwCAVlMAgFpTAIDviB4AhCwHAOHsHgBeUwCA4xweAGJTAIDhlAEAZlMAgOMwAACzJQIAhWDmAGpTAIBuUwCAclMAgLbNAQC1zQEAdlMAgLu1AQC6oQEAelMAgH5TAIC/iQEAvoEBAL2JAQC8nQEANlMAgIJTAICGUwCAilMAgI5TAICSUwCAllMAgJpTAICoAQcAqQEHAKp1BwCrrQcArLUHAK29BwCuqQcAr6kHALDZBwCx7QcAsvkHALP1BwC0mQcAtZkHALaJBwC3gQcAuIkHALmJBwC6bQAAu2UAALx9AAC9ZQAAvm0AAL9lAACBCQAAgJkAAJ5TAICCHQAAolMAgKZTAICqUwCArlMAgKgNBQCpfQUAqk0FAKuhBgCspQYAra0GAK6dBgCv/QYAsIUGALGRBgCyqQYAs70GALSlBgC1rQYAtqUGALd5BgC4SQYAuUkGALpZBgC7WQYAvEkGAL1JBgC++QcAv/kHALNdBgCyUwCAhigCAIcsAQC2UwCAtp0GALWdBgC6UwCAu4kGALq9BgC+UwCAwlMAgL/9BgC+/QYAvYEGALyNBgDGUwCAoxkGAMpTAIDOUwCAptkGANJTAIDWUwCApdkGAKr5BgCrzQYA2lMAgN5TAICuuQYAr7kGAKzJBgCtxQYAqBkBAKkZAQCqjQAAq50AAKyNAACtvQAArrUAAK/dAADiUwCA5lMAgOpTAIDuUwCA8lMAgPZTAID6UwCA/lMAgLhpAAC5aQAAunkAALt5AAC8aQAAvWkAAL7dAwC/1QMAsKkAALGpAACyvQAAs7UAALSZAAC1mQAAtlkAALdZAAC+LAIAAlQAgAZUAIAKVACADlQAgBJUAIAaVACAHlQAgIAtAACBNQAAgj0AACJUAICGkAwAh+gCACZUAIAqVACAs0UDAC5UAIAyVACANlQAgDpUAIC2fQMAtUUDAD5UAIC7LQMAui0DAEJUAIBGVACAvx0DAL4dAwC9IQMAvCkDAKvNAwCqzQMASlQAgE5UAICv/QMArv0DAK3BAwCsyQMAo6UDAFJUAIBWVACAWlQAgF5UAICmnQMApaUDAGJUAIBmVACAalQAgG5UAIByVACAdlQAgII9AACBPQAAgD0AAHpUAIB+VACAglQAgIRgAwCG0AwAhzADAIpUAICOVACAvkQCAJJUAICWVACAmlQAgOEAAACeVACA46gGAKJUAICE7AwAplQAgO/QAwCqVACArlQAgLJUAIC2VACAulQAgLNtAQC+VACAwlQAgMZUAIDKVACAthEBALVlAQDOVACAuz0BALo1AQDSVACA1lQAgL/9AQC+/QEAvRUBALwVAQDaVACA4fwGAN5UAIDjPAcA4lQAgOZUAIDqVACA7lQAgPJUAIC+bAwA+lQAgP5UAIACVQCABlUAgApVAIDvFAYAgV0AAIBdAACj5QEAgm0AAKXtAQAOVQCAElUAgKaZAQCHqAwAhuQMAKu1AQCqvQEArZ0BAKydAQCvdQEArnUBAKgZDgCpGQ4AqiUOAKs1DgCsLQ4ArVEOAK5RDgCvUQ4AhlQAgPZUAIAWVQCAGlUAgB5VAIAiVQCAJlUAgCpVAIC47Q4AufUOALr1DgC7jQ4AvJUOAL2dDgC+lQ4Av40OALAxDgCxOQ4AsgEOALMBDgC0+Q4AtfkOALbdDgC31Q4AqHkOAKl5DgCqjQ8Aq4UPAKydDwCtgQ8AroUPAK+5DwAuVQCAMlUAgDZVAIA6VQCAPlUAgEJVAIBGVQCASlUAgLiRDwC5mQ8AuqEPALuhDwC8UQ8AvV0PAL5JDwC/SQ8AsM0PALHVDwCy3Q8As9UPALTNDwC1sQ8AtrEPALexDwCzBQ4ATlUAgFJVAIBWVQCAWlUAgLYBDgC1FQ4AXlUAgLsRDgC6CQ4AYlUAgISgAQC/dQ4AvgkOAL0BDgC8CQ4AgmkAAKNBDgCAWQAAgVEAAKZFDgC+WAEAZlUAgKVRDgCqTQ4Aq1UOAIbIAACHrAEArk0OAK8xDgCsTQ4ArUUOAGpVAIBuVQCAclUAgHZVAIB6VQCAflUAgBZUAICCVQCAqAkOAKkJDgCqGQ4AqxkOAKwJDgCtYQ4ArmEOAK+VAQCw7QEAsfUBALL9AQCz9QEAtO0BALV1AQC2fQEAt3UBALhNAQC5VQEAul0BALtVAQC8TQEAvfEAAL7xAAC/8QAAhlUAgIpVAICOVQCAklUAgJZVAIDj6A4AmlUAgOE0DgC+AAQA79wPAJ5VAICiVQCAplUAgKpVAICuVQCAslUAgLPxDQC2VQCAulUAgL5VAIDCVQCAtoENALXhDQDGVQCAu1ECALpJAgDKVQCAzlUAgL/RAgC+SQIAvUECALxJAgCjMQ0A0lUAgISIAwDaVQCA3lUAgKZBDQClIQ0A4lUAgKuRAgCqiQIA5lUAgOpVAICvEQIArokCAK2BAgCsiQIAgKkAAIGpAACCTQAA7lUAgOFkEgDjTAIA4wgLAOGsAQDyVQCA7zwCAO8YFgD2VQCAhlAGAIdIAwD6VQCA/lUAgKiBAgCpgQIAqoECAKuBAgCsgQIArYECAK6FAgCvHQEAAlYAgAZWAIAKVgCADlYAgBJWAIAWVgCAGlYAgIS4BQC4dQEAuX0BALp1AQC7CQEAvBkBAL0ZAQC+CQEAvwEBALBlAQCxbQEAsmUBALN9AQC0aQEAtV0BALZVAQC3TQEAHlYAgCJWAIAmVgCAKlYAgC5WAIAyVgCA7zQAAO/ADgDhXA4A4UwPAOOUAADjnA4ANlYAgIJlAACBfQAAgH0AADpWAIA+VgCAvsQHALNFAgBCVgCAtUUCALZNAgBKVgCAhkAGAIeQBAC67QEAu+UBALz9AQC95QEAvuEBAL/VAQCflQgAngUIAJ3dDQCcPQwAmzEMAJr1DQCZ7RAAmD0QAJfVEQCWsRUAlQUUAJTlFQCTtRkAkjEYAJE5GACQDRwAj2EcANZVAICz1QYATlYAgLX9BgBGVgCAUlYAgLaRBgBWVgCAWlYAgLuVBgC6lQYAvVUHALxVBwC/VQcAvlUHAF5WAIBiVgCAqo0GAKuFBgCsnQYArYUGAK6BBgCvtQYAhKgAAGZWAIBqVgCAoyUFAG5WAIClJQUApi0FAHJWAIB2VgCAelYAgH5WAICCVgCAhlYAgIpWAICOVgCAklYAgJZWAICaVgCAnlYAgKJWAICjqQUAotEEAKHZBACgZQUAgiEdAIM1HQCmVgCAqlYAgIaVGACH3RQAhBkZAIUZGQCKDRUAi7EUAK5WAICyVgCAjsURAI/VDACMzRAAjR0RAJJhDQCTdQ0AvkwAALpWAICWxQkAl80EAJSNDACVXQkAmkEFAJtBBQCGyP8Ah0wAAIFZAACAeQAAnCEEAIJRAAChxQEAvlYAgKMB/ACi2QEApRX9AKS1/QCnufkApgH4AKkJ+AColfkAqwX1AKqt9QCtsfEArAHwAK8d8ACurfEAseHtALAB7ACzAegAsv3sALVd6QC09ekAwlYAgMZWAIDKVgCAzlYAgNJWAIDWVgCA2lYAgN5WAIDiVgCA5lYAgKiNBACplQQAqpUEAKulBACsvQQArdkEAK75BACv8QQAhGz8AOpWAIDuVgCA8lYAgPZWAID6VgCA/lYAgAJXAIC4eQUAucUFALrNBQC7xQUAvN0FAL3FBQC+zQUAv+0FALCZBACxmQQAskkFALNJBQC0WQUAtVkFALZJBQC3SQUAox0EAL7M/AAGVwCAClcAgA5XAICmWQQApTUEABJXAICrXQQAql0EABZXAIAaVwCAr50FAK6dBQCtnQUArJ0FAB5XAICznQIAIlcAgCpXAIC2UQIALlcAgDJXAIC1uQIAukkCALtVAgCGSP0Ah8D8AL41AgC/PQIAvEUCAL09AgCo3QQAqUkDAKpRAwCrbQMArHUDAK2VAwCunQMAr7kDAICNAQCB5QEAguEBADZXAIA6VwCAPlcAgEJXAIBGVwCAuJUDALmdAwC6lQMAu60DALy1AwC9vQMAvrUDAL9VAgCwyQMAsdUDALLVAwCzrQMAtLUDALW9AwC2tQMAt60DAEpXAIBOVwCAo9EDAFJXAICl9QMAVlcAgFpXAICmHQMAXlcAgGJXAICrGQMAqgUDAK1xAwCsCQMAr3EDAK55AwDhKAcAZlcAgOPkBgBqVwCA4SgGAG5XAIDjaAEAclcAgHZXAIB6VwCA71gAAH5XAICCVwCAhlcAgO/IBgCKVwCAqE39AKmB/QCq0f0Aq9H9AKzx/QCt8f0ArvH9AK/x/QAmVwCAghEAAIEZAACA0f8AjlcAgJJXAICEdAMAvnQDALh1/gC5ff4AunX+ALvF/gC83f4AvcX+AL7F/gC/9f4AsJH9ALGR/QCykf0As5H9ALRV/gC1Xf4AtlX+ALdN/gCzWf0AllcAgIasAACHRAMAmlcAgLZx/QC1ef0AnlcAgLtV/QC6Vf0AolcAgKZXAIC/mf4AvpH+AL1F/QC8Rf0AqlcAgKMd/QCuVwCAslcAgKY1/QC2VwCAulcAgKU9/QCqEf0AqxH9AL5XAIDCVwCArtX+AK/d/gCsAf0ArQH9AKjN/wCp0f8AqtH/AKsh/gCsIf4ArSH+AK4h/gCvIf4AxlcAgMpXAIDOVwCA0lcAgNZXAIDaVwCA3lcAgOJXAIC4jf4AuZH+ALqV/gC7rf4AvLX+AL25/gC+qf4Av6n+ALDh/gCx4f4AsuX+ALP5/gC06f4AtdX+ALbd/gC3uf4As1n/AOZXAIC2VgCA6lcAgO5XAIC2of4Atan+APJXAIC7Jf4AuiX+APZXAID6VwCAvxH+AL4t/gC9Lf4AvDH+AIIZAACjHf8AgGUAAIEZAACm5f4A/lcAgAJYAICl7f4AqmH+AKth/gCEZAEAviAAAK5p/gCvVf4ArHX+AK1p/gAKWACA4zT+AA5YAIDhfP0AhrAEAIcIAwASWACAFlgAgBpYAIAeWACAhCQDAIQkBAAiWACA70j+ACZYAIAqWACAs+kCAC5YAIC+RAQAvkAFADJYAIC2nQIAtZkCADZYAIC7iQIAur0CADpYAIA+WACAv1kDAL5RAwC9WQMAvJECAKkdAgCoFQIAqyUCAKolAgCtWQIArFUCAK9NAgCuUQIAvmQGAEJYAIBGWACASlgAgE5YAIBSWACAVlgAgFpYAIC5+QMAuPEDALtNAwC68QMAvUEDALxZAwC/cQMAvkEDALEJAgCwPQIAs8kDALIBAgC12QMAtNEDALfJAwC20QMA4ZABAF5YAIDj8AAAYlgAgGZYAICCPQAAgT0AAIA9AABqWACAblgAgHJYAIB6WACAflgAgIJYAIDvLAAAhlgAgKPpAwCKWACAhugEAIdgBQCOWACApp0DAKWZAwCSWACAq4kDAKq9AwCWWACAmlgAgK9ZAgCuUQIArVkCAKyRAwCeWACAolgAgKZYAICqWACArlgAgLJYAIC2WACA71gBAISgBADhVP8AulgAgOOEAQC+WACAwlgAgMZYAIDKWACAs9kBAM5YAICFzBkA0lgAgNZYAIC28QEAtfkBANpYAIC7pQEAutkBAN5YAIDiWACAv50BAL6dAQC9pQEAvK0BAKgBBgCpDQYAqhEGAKsRBgCsMQYArTEGAK4pBgCvJQYAdlgAgILJBwCBwQcAgPEHAOZYAIDqWACAhhwAAIf8AwC47QYAufUGALr9BgC79QYAvO0GAL1RBwC+VQcAv00HALBdBgCxIQYAsjkGALMxBgC0GQYAtRkGALbdBgC31QYAo5kGAO5YAIDyWACA9lgAgPpYAICmsQYApbkGAP5YAICr5QYAqpkGAAJZAIAGWQCAr90GAK7dBgCt5QYArO0GAApZAICz8QcADlkAgBJZAIC2gQcAFlkAgBpZAIC1mQcAuo0HALtlBwAeWQCAIlkAgL59BwC/ZQcAvH0HAL11BwCoLQYAqTUGAKo9BgCrMQYArFUGAK1FBgCuRQYAr3UGACZZAIAqWQCALlkAgDJZAIA2WQCAOlkAgD5ZAIBCWQCAuOkGALn1BgC6/QYAu/UGALztBgC9kQYAvpUGAL+NBgCwDQYAseUGALLtBgCz5QYAtP0GALXlBgC27QYAt+UGAKO1BgBGWQCASlkAgE5ZAIBSWQCApsUGAKXdBgAGWACAqyEGAKrJBgBWWQCAWlkAgK8hBgCuOQYArTEGAKw5BgCASQAAgUkAAIJZAACzRQEAXlkAgLVFAQC2RQEAYlkAgIZAAACHZAAAuikBALslAQC8PQEAvSEBAL4hAQC/FQEAZlkAgGpZAICEBAMAvgAMAOMoBgDv4AIA4RAGAG5ZAIDvkAYA4zwCAHJZAIDh1AEAdlkAgHpZAIB+WQCAglkAgIZZAICKWQCAo8ECAI5ZAIClwQIAklkAgJZZAICmwQIAmlkAgJ5ZAICroQIAqq0CAK2lAgCsuQIAr5ECAK6lAgCpBQIAqLECAKsFAgCqBQIArQ0CAKwFAgCvNQIArjUCAISoDACiWQCAplkAgKpZAICuWQCAslkAgLZZAIC6WQCAuekDALjhAwC7+QMAuuEDAL3pAwC84QMAv10DAL7hAwCxKQIAsCUCALM9AgCyIQIAtRkCALQtAgC32QMAthECAKitAgCp1QIAqtUCAKsNAQCsFQEArQkBAK4xAQCvLQEAvlkAgMJZAIDKWQCAzlkAgNJZAIDWWQCA2lkAgN5ZAIC4IQEAuSEBALrtAQC75QEAvP0BAL3lAQC+7QEAv+UBALBVAQCxXQEAslUBALMtAQC0NQEAtTkBALYtAQC3JQEAgD0BAIGlAACCrQAA79QHAOJZAIDmWQCA6lkAgO8oBwC+LAwA4fQGAO5ZAIDjkAcA8lkAgOGUAQD2WQCA4wwGALMdAgD6WQCAh0QNAIZMDQD+WQCAtskBALXdAQACWgCAu9kBALrRAQAGWgCACloAgL+9AQC+sQEAvbkBALzBAQDGWQCADloAgBJaAIAWWgCAGloAgB5aAIAiWgCAJloAgKgJDwCpCQ8AqhkPAKsZDwCsCQ8ArQkPAK6pDwCvqQ8AsNkPALHtDwCy+Q8As/UPALSVDwC1hQ8AtoUPALe1DwC4jQ8AuWEAALphAAC7YQAAvGEAAL1hAAC+YQAAv2EAAKNdDQCCLQAAgRUAAIAdAAAqWgCApokOAKWdDgAuWgCAq5kOAKqRDgAyWgCANloAgK/9DgCu8Q4ArfkOAKyBDgA6WgCAs/UPAIboAwCHvAMAtu0PAD5aAIBCWgCAteUPALp5DwC7TQ8ARloAgEpaAIC+NQ8AvyUPALxJDwC9RQ8AozEOAE5aAIBSWgCAVloAgFpaAICmKQ4ApSEOAF5aAICriQ4Aqr0OAGJaAIBmWgCAr+EOAK7xDgCtgQ4ArI0OAGpaAIBuWgCAcloAgHZaAIB6WgCAfloAgIJaAICGWgCAiloAgI5aAICSWgCAlloAgIANAACB1QAAgt0AAJpaAICoQQEAqVEBAKpRAQCrZQEArH0BAK2RAACukQAAr5EAAJ5aAICiWgCAhGQBAL5kAQCGkAEAh4QAAKpaAICuWgCAuJEAALmRAAC6kQAAu5EAALyxAAC9sQAAvrEAAL+xAACw8QAAsfkAALLBAACzwQAAtLEAALWxAAC2sQAAt7EAALPZAgCyWgCAvnADAL5EBAC2WgCAthEDALX1AgC6WgCAuz0DALo1AwC+WgCAwloAgL91AwC+dQMAvRUDALwVAwDGWgCAo50CAMpaAIDOWgCAplUDANJaAIDWWgCApbECAKpxAwCreQMA2loAgN5aAICuMQMArzEDAKxRAwCtUQMAqDkDAKk5AwCqjQAAq50AAKyNAACtvQAArrUAAK/dAADiWgCA5loAgOpaAIDuWgCA8loAgPZaAID6WgCA/loAgLhpAAC5aQAAunkAALt5AAC8aQAAvWkAAL7ZAQC/2QEAsKkAALGpAACyvQAAs7UAALSZAAC1mQAAtlkAALdZAAACWwCABlsAgApbAIAOWwCA70QAABJbAICGmAUAh+QCAOOYAACEqAIA4fgBABpbAICAOQAAgTkAAIItAAAeWwCAs0UBACJbAIAmWwCAKlsAgC5bAIC2fQEAtUUBADJbAIC7LQEAui0BADZbAIA6WwCAvx0BAL4dAQC9IQEAvCkBAD5bAIDhUA4AQlsAgOM8DwBGWwCASlsAgE5bAIBSWwCAVlsAgFpbAIDjAAAAXlsAgGJbAIBmWwCAhPQFAO/kDgCuqQEAr6kBAKydAQCtlQEAqpkBAKuZAQBqWwCAblsAgKbJAQByWwCAdlsAgKXxAQCC/QcAo/EBAID9BwCB9QcAFlsAgHpbAIB+WwCAglsAgIZbAICKWwCAhrgDAIeQAwCoDQcAqRkHAKptBwCrZQcArH0HAK1lBwCuZQcAr1UHALAtBwCxxQcAssEHALPdBwC0xQcAtc0HALbFBwC3/QcAuMUHALnJBwC62QcAu9kHALypBwC9qQcAvp0HAL+VBwCzxQcAjlsAgJJbAICWWwCAmlsAgLbFBwC11QcAnlsAgLshBwC6yQcAolsAgKZbAIC/KQcAviEHAL0pBwC8NQcAqlsAgKOBBwCuWwCAslsAgKaBBwC2WwCAulsAgKWRBwCqjQcAq2UHAL5bAIDCWwCArmUHAK9tBwCscQcArW0HAKgVAQCpgQEAqoEBAKuBAQCsgQEArYkBAK6xAQCvsQEAxlsAgMpbAIDOWwCA0lsAgNZbAIDaWwCA3lsAgOJbAIC4ZQAAuW0AALplAAC7fQAAvGUAAL1tAAC+ZQAAv90AALChAQCxrQEAsqUBALO5AQC0qQEAtZ0BALaVAQC3XQAA5lsAgIIdAACBHQAAgB0AAOpbAIDuWwCA8lsAgL5YAQCErAIA9lsAgIcIAQCGjAEA+lsAgKZaAID+WwCAAlwAgLNJAQAGXACAClwAgA5cAIASXACAtkkBALVJAQAWXACAuykBALolAQAaXACAHlwAgL8ZAQC+LQEAvS0BALwxAQC+2AMAIlwAgO/4BgAmXACAKlwAgC5cAIDv4AIAMlwAgOGUAQA2XACA43QCADpcAIDhmAUAPlwAgOMMBwBCXACARlwAgEpcAICjwQIAhIwDAKXBAgBOXACAUlwAgKbBAgBWXACAWlwAgKuhAgCqrQIAraUCAKy5AgCvkQIArqUCAKgxAwCpPQMAqjUDAKtJAwCsWQMArVkDAK5JAwCvQQMAgMUAAIEJAACCGQAAXlwAgGJcAIBqXACAh2wDAIYcHAC47QAAufEAALr1AAC7jQAAvJUAAL2BAAC+gQAAv70AALAJAwCxCQMAsu0AALPhAAC04QAAteEAALblAAC32QAAblwAgHJcAIB2XACAs7ECAHpcAIC13QIAttUCAH5cAICCXACAhlwAgLrBAgC7wQIAvDUBAL05AQC+KQEAvykBAKaNAgCKXACAjlwAgKWFAgCSXACAo+kCAJZcAICaXACArnEBAK9xAQCsbQEArWEBAKqZAgCrmQIAnlwAgKJcAICmXACA4YQGAKpcAIDjJAYArlwAgOGUAQCyXACA4ywAAL7oHQC2XACAulwAgO/IAACE/B0AvvAcAL5cAIDvSAcAwlwAgMZcAIDKXACAzlwAgIEdAACAHQAA0lwAgIIFAACGQBwAh8QcANpcAIDeXACA4lwAgOZcAIDqXACA7lwAgKi1HgCpBR8Aqg0fAKsFHwCsAR8ArQkfAK45HwCvOR8A1lwAgPJcAID2XACA+lwAgP5cAIACXQCABl0AgApdAIC4yR8AudUfALrRHwC76R8AvPkfAL3tHwC+mR8Av5kfALAlHwCxLR8AsjkfALM1HwC0LR8AtQ0fALYFHwC3/R8As4UfAA5dAIASXQCAFl0AgBpdAIC2iR8AtYkfAB5dAIC76R8AuuEfACJdAIAmXQCAv8kfAL7pHwC94R8AvO0fACpdAICjwR8ALl0AgDJdAICmzR8ANl0AgDpdAIClzR8AqqUfAKutHwA+XQCAQl0AgK6tHwCvjR8ArKkfAK2lHwCo6R4AqekeAKr5HgCr+R4ArOkeAK3pHgCuPQEArzUBAID5AQCBzQEAgsUBAIRgAgBGXQCASl0AgIdoAQCGnAAAuNEBALnZAQC64QEAu+EBALyRAQC9nQEAvpUBAL+JAQCwTQEAsVUBALJdAQCzVQEAtE0BALXxAQC28QEAt/EBALNxHgBOXQCAUl0AgFZdAIBaXQCAtmkeALVhHgBeXQCAu5EBALqJAQBiXQCAZl0AgL81AQC+iQEAvYEBALyJAQBqXQCAZlwAgKM5HgBuXQCApSkeAHJdAIB2XQCApiEeAHpdAIB+XQCAq9kBAKrBAQCtyQEArMEBAK99AQCuwQEAgl0AgIZdAICKXQCAjl0AgJJdAICWXQCAml0AgJ5dAICiXQCApl0AgKpdAICuXQCAsl0AgLpdAIC+XQCAvnADAOHkHgCESAIA4+gfAIQABACAeQAAgXkAAIJpAADCXQCAhsAEAIdEAwDGXQCAyl0AgM5dAIDSXQCA7yAfANZdAIDaXQCA3l0AgOJdAIDvSAIA5l0AgOpdAIDuXQCA8l0AgL7oBAD2XQCA+l0AgP5dAIACXgCA4ZABAAZeAIDj6AIAs0kDAApeAIAOXgCAEl4AgBZeAIC2SQMAtUkDABpeAIC7LQMAuiUDAB5eAIAiXgCAvxUDAL4VAwC9IQMAvCkDAKg1AgCpgQIAqoECAKuBAgCsgQIArYkCAK6xAgCvsQIAgP0BAIHNAQCCxQEAKl4AgIaQBACHBAUALl4AgIRwBAC4SQEAuUkBALpZAQC7WQEAvEkBAL1JAQC+eQEAv3kBALChAgCxqQIAsr0CALO1AgC0kQIAtZECALZ5AQC3eQEAMl4AgDZeAIA6XgCAPl4AgEJeAIBGXgCASl4AgO/QHgC+6AQA4VweAE5eAIDjkAAAUl4AgFZeAIBaXgCAXl4AgKNJAgBiXgCAZl4AgGpeAIBuXgCApkkCAKVJAgByXgCAqy0CAKolAgB2XgCAel4AgK8VAgCuFQIArSECAKwpAgCoNQYAqT0GAKpVBgCrZQYArH0GAK1lBgCubQYAr2EGACZeAIB+XgCAgl4AgIZeAICADQAAgbEAAIKxAACKXgCAuOkGALnpBgC6+QYAu/UGALyVBgC9nQYAvpUGAL+NBgCw4QYAseEGALLhBgCz/QYAtOUGALXtBgC25QYAt9kGALPdBgCOXgCAkl4AgJZeAICaXgCAtuUGALX1BgCeXgCAuyUGALolBgCGmAAAh6wAAL8pBgC+IQYAvSkGALw1BgCiXgCAo5kGAKZeAICqXgCApqEGAK5eAICyXgCApbEGAKphBgCrYQYAtl4AgLpeAICuZQYAr20GAKxxBgCtbQYAqC0GAKk9BgCqiQYAq4kGAKyZBgCtmQYArokGAK+JBgC+XgCAwl4AgMZeAIDKXgCAzl4AgNJeAIDWXgCA2l4AgLiNBgC5lQYAupUGALulBgC8vQYAvXEBAL5xAQC/cQEAsPkGALHNBgCy2QYAs9kGALTJBgC1yQYAtr0GALe1BgCzAQYA3l4AgOJeAIDmXgCA6l4AgLYZBgC1EQYA7l4AgLsJBgC6PQYA8l4AgPZeAIC/DQYAvg0GAL0NBgC8DQYA+l4AgKNFBgC2XQCA/l4AgKZdBgACXwCAhFgAAKVVBgCqeQYAq00GAL5oAQAGXwCArkkGAK9JBgCsSQYArUkGAIDBAwCByQMAgt0DAKPNAgAKXwCApdkCAKbNAgAOXwCAhoANAIeUAwCqxQIAqw0DAKwVAwCtHQMArhUDAK8NAwDhnBcA4xgGAOMUAwDhNAYA7xgCABJfAIAWXwCAGl8AgOPQAgAeXwCA4VACACJfAIAmXwCA7ywGAO/kJQAqXwCArE0CAK1RAgCuUQIAr2UCAKgBAgCpCQIAqlkCAKtVAgCE7A0ALl8AgDJfAIA2XwCAvvgNADpfAIA+XwCAQl8AgLxRAwC9WQMAvmEDAL9hAwC47QMAuVEDALpRAwC7UQMAtM0DALXVAwC23QMAt9UDALAdAgCx1QMAst0DALPVAwDjyAAARl8AgOG4AQBKXwCAhFQPAE5fAIBSXwCAVl8AgKHpAgCgFQYAo6UDAKINAwDvIAAAWl8AgF5fAIBiXwCAZl8AgGpfAICFNCYAs40DAG5fAIC1mQMAto0DAHJfAICGwA8Ah5QNALqFAwC7TQIAvFUCAL1dAgC+VQIAv00CAHpfAIB+XwCAgl8AgIZfAICKXwCAjl8AgI/d6wDvxAYAvuAPAOGMBgCSXwCA44AGAID1AACB5QAAguUAAJZfAICZbR8AmMUfAJvJGwCaeRoAnXUaAJzFGwCf+QcAnhkGAJFpFgCQsesAk20XAJLNFwCV0RMAlGkSAJdREgCWzRMAg1XkAIJB5AB2XwCAml8AgIeNHQCGkRgAhTkYAISVGQCLERwAigUcAJ5fAICiXwCAj4UVAI6ZEACNORAAjJUdAJNRFACSRRQApl8AgKpfAICXYQkAlnUIAJWdCQCU+RUAm0EMAJqtDQCuXwCAsl8AgLZfAIC6XwCAvl8AgJzxDAChbQ0Awl8AgKMBBACihQAApZkEAKSRBACnGTgApsUFAKkJOACoKTgAq4k8AKoBPACtATAArB08AK8pMACunTAAseE0ALABNACzASgAsv00ALXZKAC00SgAxl8AgMpfAIDOXwCA0l8AgNZfAIDaXwCAgB0AAIEJAACC2QEA3l8AgKgRDwCpGQ8Aql0PAKtVDwCsTQ8ArXEPAK51DwCvbQ8A4l8AgOpfAICGiAAAhxABAO5fAIDyXwCA9l8AgPpfAIC4TQ4AuVEOALpRDgC7UQ4AvGUOAL1tDgC+ZQ4Avx0OALAdDwCxwQ8AssEPALPBDwC0xQ8Atc0PALbFDwC3eQ4As9UPAP5fAIACYACABmAAgApgAIC28Q8AtcUPAA5gAIC7BQ8AutkPABJgAIAWYACAvwkPAL4BDwC9FQ8AvBUPABpgAICjkQ8AHmAAgCJgAICmtQ8AJmAAgCpgAIClgQ8Aqp0PAKtBDwAuYACAMmAAgK5FDwCvTQ8ArFEPAK1RDwCogQ0AqYENAKqBDQCrgQ0ArIENAK2BDQCusQ0Ar6ENADZgAIA6YACAPmAAgEJgAIBGYACAgrkAAIG9AACAvQAAuDUCALk9AgC6zQIAu5UCALyNAgC9tQIAvr0CAL+1AgCwbQIAsU0CALJFAgCzJQIAtD0CALUdAgC2FQIAtw0CAEpgAIBOYACAswENAFJgAIC1AQ0AWmAAgISUAwC2CQ0AviwEAF5gAIC7gQIAuqECAL35AgC8mQIAv9ECAL7xAgBiYACAZmAAgGpgAICjRQ0AbmAAgKVFDQCmTQ0AcmAAgIbgBACHpAQAquUCAKvFAgCs3QIArb0CAK61AgCvlQIAqCUCAKk1AgCqPQIAqzUCAKwtAgCtkQIArpECAK+RAgB2YACAemAAgH5gAICCYACAzAAAAIZgAICKYACAjmAAgLiZAgC5rQIAuqUCALttAQC8dQEAvX0BAL51AQC/bQEAsPECALH5AgCywQIAs8ECALSxAgC1vQIAtrUCALepAgCSYACA44QOAJZgAIDh9A4AmmAAgJ5gAICiYACApmAAgIQgBQCqYACArmAAgLJgAIC2YACA7+wOALpgAIC+YACAs/UCAMJgAICG6AQAh4wEAL5cBAC2UQIAteUCAMpgAIC7fQIAunUCAM5gAIDSYACAvzkCAL41AgC9VQIAvFUCAKM1BQBWYACAxmAAgNZgAIDaYACAppEFAKUlBQDeYACAq70FAKq1BQDiYACA5mAAgK/5BQCu9QUArZUFAKyVBQCA+QcAgfkHAIKNBwCzjQYA6mAAgLWdBgC2iQYA7mAAgPJgAID2YACAuk0HALtFBwC8XQcAvUEHAL5BBwC/QQcA+mAAgP5gAIDmXwCAAmEAgAZhAIAKYQCADmEAgBJhAICoNQYAqQEGAKppBgCraQYArHkGAK1lBgCuZQYAr50HALDlBwCx7QcAsuUHALP5BwC06QcAtekHALZZBwC3VQcAuHEHALlxBwC6cQcAu3EHALxVBwC9XQcAvlUHAL9NBwCjwQcAFmEAgBphAIAeYQCAImEAgKbFBwCl0QcAJmEAgKsJBgCqAQYAKmEAgC5hAICvDQYArg0GAK0NBgCsEQYAgGkAAIFpAACCBQAAMmEAgL6YAQCEmAEANmEAgDphAICGADwAh8QBAD5hAIBCYQCARmEAgEphAIBOYQCAUmEAgKhdBgCpbQYAqmUGAKuBAQCsgQEArYkBAK6xAQCvsQEAVmEAgFphAIBeYQCAYmEAgGZhAIBqYQCAbmEAgHJhAIC4VQEAuV0BALpVAQC7yQAAvNkAAL3ZAAC+yQAAv8EAALCxAQCxuQEAsokBALOJAQC0cQEAtXEBALZ1AQC3bQEAs+0FAHZhAIB6YQCAfmEAgIJhAIC2CQIAtQkCAIZhAIC7fQIAunUCAIphAICOYQCAv7UCAL61AgC9XQIAvF0CAL5gAgCjqQUAkmEAgJZhAICmTQIAmmEAgJ5hAIClTQIAqjECAKs5AgCiYQCAhOADAK7xAgCv8QIArBkCAK0ZAgC+iDwAqmEAgKotAwCrJQMArD0DAK0lAwCuLQMAryUDAID1AACB/QAAgsEAAKPBAwCuYQCApcEDAKbBAwCyYQCAhmA8AIdUAwC2YQCAumEAgL5hAIDjqAIAwmEAgOGkAQDGYQCA71wCAMphAIDOYQCA0mEAgNZhAIDaYQCA3mEAgOJhAIDjjAcA5mEAgOE8BADqYQCA7mEAgPJhAID2YQCAhCACAPphAID+YQCAAmIAgAZiAIDvbAcACmIAgA5iAICzLQIAhEQ9ABJiAIAaYgCAHmIAgLYtAgC1LQIAImIAgLvJAgC6wQIAJmIAgCpiAIC/yQIAvsECAL3JAgC80QIA4XgHAOPAAADjOAYA4VwGAICpAACBqQAAgtEAAC5iAIAyYgCANmIAgL6kPAA6YgCAPmIAgO8cAADvkAYAQmIAgIZgPACHBD0ARmIAgLNxAQBKYgCAtRkBALYJAQBOYgCAUmIAgFZiAIC6AQEAuwEBALwBAQC9AQEAvgEBAL8BAQCohT4AqbU+AKq1PgCrxT4ArN0+AK3FPgCuwT4Ar/0+AFpiAIBeYgCAYmIAgGZiAIBqYgCAbmIAgHJiAIB2YgCAuFE/ALlRPwC6UT8Au1E/ALx1PwC9fT8AvnU/AL9tPwCwiT4AsYk+ALKZPgCzmT4AtIk+ALWJPgC2eT8At3U/AKZhAICjOT4AemIAgBZiAICmQT4AfmIAgIJiAIClUT4Aqkk+AKtJPgCGYgCAimIAgK5JPgCvST4ArEk+AK1JPgCASQAAgVEAAIJRAACzkT8AjmIAgLW5PwC2RT8AkmIAgIZAAACHBAMAukU/ALtdPwC8TT8AvT0/AL4pPwC/IT8AqE0+AKlVPgCqVT4Aq2U+AKx9PgCtiT4Arrk+AK+5PgCWYgCAmmIAgJ5iAICiYgCApmIAgKpiAICuYgCAsmIAgLhhAQC5YQEAumEBALthAQC8YQEAvWEBAL5hAQC/YQEAsM0+ALHVPgCy1T4As6U+ALShPgC1qT4Atpk+ALeZPgCj3T4AtmIAgLpiAIC+YgCAwmIAgKYJPgCl9T4AxmIAgKsRPgCqCT4AymIAgM5iAICvbT4ArmU+AK1xPgCsAT4A0mIAgNZiAIDaYgCA3mIAgOJiAIDmYgCA6mIAgO5iAICAOQAAgTkAAIIFAADyYgCAvrgBAIS4AQD6YgCA/mIAgKitAgCp1QIAqtUCAKstAwCsNQMArT0DAK41AwCvLQMAAmMAgAZjAIAKYwCADmMAgBJjAIAWYwCAGmMAgB5jAIC46QMAuekDALqJAwC7iQMAvJkDAL2ZAwC+iQMAv4kDALBVAwCxXQMAslUDALPpAwC0+QMAtfkDALbpAwC34QMAs10CACJjAICGKAQAh8wDACZjAIC2vQMAtb0DACpjAIC7mQMAupEDAC5jAIAyYwCAvz0DAL49AwC9PQMAvIEDAIUAFACjGQIANmMAgDpjAICm+QMAPmMAgEJjAICl+QMAqtUDAKvdAwBGYwCASmMAgK55AwCveQMArMUDAK15AwDjVD4A4dw/AOHQPgDjPD4ATmMAgO8cAABSYwCAVmMAgFpjAIDjwAAAXmMAgOHUAQDvYD4AYmMAgGpjAIDvRD8AgGEAAIFtAACCfQAAhAAFAIbwBACHnAUAvhAFAG5jAIByYwCAdmMAgHpjAIB+YwCAgmMAgIZjAICKYwCAjmMAgLiJPQC5iT0Aupk9ALuRPQC8uT0Avbk9AL7RPQC/0T0AsAU+ALENPgCyBT4Asx0+ALQFPgC1DT4AtgU+ALe5PQConT4Aqa0+AKqlPgCrvT4ArKU+AK2tPgCupT4Ar30+AISsBAC+rAQAkmMAgJZjAICaYwCAnmMAgKJjAICmYwCAqPkFAKn5BQCqKQYAqykGAKw5BgCtOQYArikGAK8pBgBmYwCAqmMAgK5jAICyYwCAtmMAgLpjAIC+YwCAwmMAgLiNBgC5kQYAupEGALulBgC8vQYAvUUHAL5BBwC/QQcAsFkGALFZBgCy7QYAs/0GALTtBgC13QYAttUGALe1BgCzoQYAxmMAgMpjAIDOYwCA0mMAgLa5BgC1sQYA2mMAgLudBgC6nQYA1mMAgPZiAIC/GQYAvikGAL0pBgC8OQYAglEAAKPlBgCAQQAAgUEAAKb9BgDeYwCA4mMAgKX1BgCq2QYAq9kGAIZIAACHbAAArm0GAK9dBgCsfQYArW0GAKg5BgCpWQYAqmkGAKtpBgCseQYArXkGAK5pBgCvaQYA5mMAgOpjAIDuYwCA8mMAgPZjAID6YwCA/mMAgAJkAIC4ZQEAuW0BALplAQC7fQEAvGUBAL1tAQC+ZQEAv9kBALAZBgCxGQYAsoEGALOBBgC0gQYAtYEGALaBBgC3gQYAs+EGAAZkAIAKZACADmQAgBJkAIC2+QYAtfEGABZkAIC73QYAut0GABpkAIAeZACAv0UGAL5FBgC9VQYAvFUGACJkAICjpQYAJmQAgCpkAICmvQYALmQAgDJkAICltQYAqpkGAKuZBgA2ZACAOmQAgK4BBgCvAQYArBEGAK0RBgConQIAqdECAKrRAgCrLQMArDUDAK09AwCuNQMAry0DAD5kAIBCZACAvmQCAEpkAIBOZACAUmQAgFZkAIBaZACAuOkDALnpAwC6iQMAu4UDALydAwC9gQMAvoEDAL+1AwCwVQMAsV0DALJVAwCz6QMAtPkDALX5AwC26QMAt+EDAIBtAwCBpQAAgq0AALNVAgBeZACAtbEDALaxAwBiZACAhOACAGZkAIC6nQMAu5UDALyNAwC9MQMAvjEDAL8xAwCjGQIAamQAgIVwaQBuZACAcmQAgKb9AwCl/QMAdmQAgKvZAwCq0QMAhkgMAIe8AwCvfQMArn0DAK19AwCswQMAemQAgH5kAICCZACAhmQAgO+wBgDvxAMAimQAgI5kAIDjfAYA45QDAOG4BwDh3AEAkmQAgJZkAICaZACAnmQAgKJkAICmZACAhEQCAL5YDQCADQAAgTUAAII9AACqZACArmQAgLJkAICGyAwAh1wNALpkAIC+ZACAwmQAgMZkAIDKZACAzmQAgNJkAIDWZACA2mQAgN5kAIDiZACA74AGAISsDQDh7AYA5mQAgONcBgDqZACA7mQAgPJkAID2ZACAs/UBAPpkAID+ZACAAmUAgAZlAIC2RQEAteUBAAplAIC7LQEAuiEBAA5lAIASZQCAv/UAAL71AAC9JQEAvC0BAKgtDgCpNQ4Aqj0OAKs1DgCsLQ4ArYUOAK6FDgCvuQ4AtmQAgBZlAIAaZQCAHmUAgIAZAACBGQAAggUAACJlAIC4WQ8AuVkPALp5DwC7eQ8AvGkPAL1pDwC+GQ8AvxkPALClDgCxqQ4AsrkOALOxDgC0cQ8AtXEPALZxDwC3cQ8Apb0OAL6IAwAqZQCAph0OACZlAIAuZQCAo60OADJlAICtfQ4ArHUOAK+tDwCurQ8ARmQAgDZlAICrdQ4AqnkOALO5DwA6ZQCAhmgAAIcMAwA+ZQCAtlEPALVZDwBCZQCAu3UPALp1DwBGZQCASmUAgL9FDwC+RQ8AvVEPALxlDwCocQ4AqXEOAKpxDgCrcQ4ArJEOAK2RDgCukQ4Ar5EOAE5lAIBSZQCAVmUAgFplAIBeZQCAYmUAgGZlAIBqZQCAuIUOALmNDgC6hQ4Au50OALyNDgC9vQ4AvrUOAL95AQCw8Q4AsfEOALLxDgCzxQ4AtMEOALXBDgC2wQ4At8EOAKP5DgBuZQCAcmUAgHZlAIB6ZQCAphEOAKUZDgB+ZQCAqzUOAKo1DgCCZQCAhmUAgK8FDgCuBQ4ArREOAKwlDgCADQAAgRUAAIIdAACKZQCAjmUAgJJlAICElAEAvpQBAIZABwCH5AAAmmUAgJ5lAICiZQCApmUAgKplAICuZQCAqIkCAKmRAgCqlQIAq7kCAKzVAgCtxQIArsUCAK/1AgCyZQCAtmUAgLplAIC+ZQCAvnwDAMJlAIDGZQCAymUAgLh9AwC5wQMAusEDALvBAwC8wQMAvckDAL7xAwC/8QMAsI0CALFFAwCyTQMAs0UDALRdAwC1RQMAtk0DALdFAwCzHQIAzmUAgNJlAIDWZQCA2mUAgLZFAgC1XQIA3mUAgLuBAwC6SQIA4mUAgOZlAIC/gQMAvpkDAL2RAwC8mQMA6mUAgKNZAgDuZQCA8mUAgKYBAgD2ZQCA+mUAgKUZAgCqDQIAq8UDAP5lAIACZgCArt0DAK/FAwCs3QMArdUDAIDZAQCB7QEAguUBAO+4DgAKZgCA4cQBAISYAgDj1AAADmYAgL7sBAASZgCA7wgAABZmAIDhxA8AGmYAgONkDgCGAAUAh2gFAB5mAICzvQIAImYAgLWtAgC2pQIAJmYAgCpmAIAuZgCAukEBALtBAQC8RQEAvU0BAL5FAQC/+QEAMmYAgDZmAIA6ZgCAPmYAgEJmAIBGZgCASmYAgO/gAQCEbAQA4dQOAE5mAIDjHA4AUmYAgFZmAIBaZgCAXmYAgKMxAgBiZgCAhCQHAGZmAIBqZgCApikCAKUhAgBuZgCAq80BAKrNAQByZgCAemYAgK91AQCuyQEArcEBAKzJAQCo6QUAqekFAKr5BQCr+QUArOkFAK3pBQCuOQYArzkGAAZmAICCzQcAgfUHAID9BwB2ZgCAfmYAgIYYAwCHkAMAuNEGALnZBgC64QYAu+EGALyRBgC9nQYAvpUGAL+JBgCwSQYAsUkGALJdBgCzVQYAtE0GALXxBgC28QYAt/EGALDhBwCx4QcAsgkHALMJBwC0GQcAtRkHALYJBwC3CQcAuDkHALkNBwC6GQcAuxkHALwJBwC9CQcAvn0HAL9xBwCCZgCAlmUAgIZmAICKZgCAjmYAgJJmAICWZgCAmmYAgKjxBwCpxQcAqsEHAKvdBwCsyQcArb0HAK6pBwCvoQcAsykGAJ5mAICiZgCApmYAgKpmAIC2XQYAtSEGAK5mAIC7RQYAukUGALJmAIC2ZgCAv70GAL69BgC9vQYAvL0GALpmAICjbQYAvmYAgMJmAICmGQYAxmYAgMpmAIClZQYAqgEGAKsBBgDOZgCA0mYAgK75BgCv+QYArPkGAK35BgCobQYAqbEBAKpJAQCrRQEArF0BAK1FAQCuTQEAr0UBANZmAICCHQAAgR0AAIAdAADaZgCA3mYAgOJmAIC+VAEAuIEAALmNAAC6hQAAu5kAALyJAAC9vQAAvrUAAL99AACwPQEAseEAALLhAACz4QAAtOEAALXpAAC20QAAt9EAALsFAwC62QIAhiwCAIcsAwC/DQMAvgUDAL0VAwC8FQMAs+ECAOpmAIDuZgCAhCwDAPJmAIC25QIAtfUCAPZmAICqnQIAq0EDAPpmAID+ZgCArkEDAK9JAwCsUQMArVEDAAJnAICjpQIABmcAgApnAICmoQIADmcAgBJnAIClsQIAqakAAKihAACrtQAAqr0AAK3dAACs3QAAr/EAAK79AAC+LBwAFmcAgBpnAIAeZwCAImcAgCZnAIAqZwCALmcAgLl9AAC4fQAAu80BALrNAQC93QEAvN0BAL/NAQC+zQEAsZUAALCJAACzTQAAspUAALVdAAC0XQAAt00AALZNAAAyZwCANmcAgDpnAIA+ZwCAQmcAgEZnAIBKZwCATmcAgIA5AACBOQAAggUAAFJnAIBaZwCAXmcAgIf4AgCGfB0A4bgEAL7IHADjQAYAYmcAgGZnAIBqZwCAbmcAgHJnAIB2ZwCAemcAgH5nAICCZwCAhmcAgIpnAIDvsAcAjmcAgJJnAICWZwCAmmcAgO/IAACeZwCAomcAgKZnAIDvQAYAqmcAgOH8BgCuZwCA4xwGALJnAIDhlAEAtmcAgONkBgCAEQAAgRkAAIIpAACz/QEAumcAgLWdAQC2lQEAvmcAgMJnAICEbB0AuoUBALuZAQC8iQEAvVEBAL5RAQC/UQEAozEeAFZnAIDGZwCAymcAgM5nAICmWR4ApVEeANJnAICrVR4AqkkeAIYIAwCHbAMAr50eAK6dHgCtnR4ArEUeANZnAICzCR8A2mcAgN5nAIC2CR8A4mcAgOZnAIC1CR8AugUfALsNHwDqZwCA7mcAgL4FHwC/CR8AvBUfAL0NHwCw5R8Ase0fALLlHwCz/R8AtOUfALXpHwC2GR8AtxkfALgpHwC5NR8Auj0fALs1HwC8ER8AvR0fAL4JHwC/BR8A8mcAgPZnAIDmZgCA+mcAgP5nAIACaACABmgAgApoAICo0R8AqdEfAKqlHwCrvR8ArKUfAK2tHwCupR8Ar50fAKNNHgAOaACAEmgAgBZoAIAaaACApk0eAKVNHgAeaACAq0keAKpBHgAiaACAJmgAgK9NHgCuQR4ArUkeAKxRHgCADQAAgRUAAIIdAAAqaACALmgAgDJoAICEtAEAvrQBAL/oAQA6aACAhkgHAIc0AACEvAYAPmgAgEJoAIC+tAYAqI0BAKmVAQCqlQEAq80BAKzZAQCt2QEArs0BAK/FAQBGaACASmgAgE5oAIBSaACAVmgAgFpoAIBeaACAYmgAgLgdAQC5wQAAusEAALvBAAC8wQAAvckAAL7xAAC/8QAAsIkBALGJAQCyKQEAsykBALQ9AQC1JQEAti0BALclAQC7bQIAum0CAGZoAIBqaACAv8ECAL7ZAgC93QIAvN0CALM9AgBuaACAcmgAgHZoAICE/AYAtnkCALVxAgB6aACAqikCAKspAgB+aACAgmgAgK6dAgCvhQIArJkCAK2ZAgCGaACAo3kCAIpoAICOaACApj0CAJJoAICWaACApTUCAIJtJwCDjSoAhqgFAIdsAwCGmS4Ah80vAIQRLgCFmS4AiiESAIspEgCaaACAnmgAgI6RFgCPHRYAjBESAI0RFgCScRoAk+UaAKJoAIDvlHYAlvEeAJflHgCUSRoAlRkeAJopAgCb4QIAqmgAgK5oAICyaACA4SASAJzxAgDjIBYAnyEfAJ7BHwCdmRsAnC0bAJuhGwCavRcAmTkXAJixFwCXiRMAlqkTAJWpEwCUdS4AkzkvAJIxLwCRsS8AkDUrAI+tJgDjeB8A0gAAAOFcHwCCmQEAtmgAgIDxAQCB8QEAvqgHALpoAIC+aACAwmgAgIS8BgDvLB8AxmgAgMpoAIDhpB4A48wAAON8HgDhvAEAzmgAgNJoAIDWaACAhJwGANpoAIC+bAYA3mgAgOJoAIDmaACA7xAAAO8EHgDqaACA7mgAgPJoAID2aACA+mgAgP5oAIACaQCABmkAgAppAICAPQAAgQkAAILJBwAOaQCAo/kDAKLxAwChMQMAoM0fALBJcQCxAXwAsgl8ALMhfQC0AXgAtRV4ADZoAICmaACAEmkAgL4oDgCGDAAAh4wDABZpAIAaaQCAHmkAgCJpAIAmaQCAoV0AAKJVAACjfQAApAEMAKUVDACm9QwApwEIAKghCACpxQgAqgF0AKsJdACsAXQArR11AK55cACveXAAqOUFAKnxBQCq8QUAqy0FAKw1BQCtPQUArjUFAK8tBQAqaQCALmkAgDJpAIA2aQCAOmkAgD5pAIBCaQCARmkAgLj9BgC5jQYAuoUGALutBgC8uQYAvbkGAL6tBgC/pQYAsFUFALFdBQCyVQUAs+UGALT9BgC10QYAttEGALfRBgCzeQQASmkAgE5pAIBSaQCAVmkAgLa9BAC1vQQAWmkAgLuZBAC6kQQAXmkAgGJpAIC/FQcAvjkHAL0xBwC8gQQAZmkAgKM9BABqaQCAbmkAgKb5BAByaQCAdmkAgKX5BACq1QQAq90EAHppAIB+aQCArn0HAK9RBwCsxQQArXUHAKhpBwCpaQcAqnkHAKvZBgCs9QYArf0GAK71BgCv5QYAgMkAAIHJAACCBQAAgmkAgIZwDwCHNAAAimkAgI5pAIC4fQYAuQUGALoNBgC7BQYAvB0GAL0FBgC+DQYAvwUGALCdBgCxdQYAsn0GALN1BgC0UQYAtV0GALZVBgC3TQYAs/EEAJJpAICWaQCAmmkAgJ5pAIC2fQUAtX0FAKJpAIC7sQUAulkFAKZpAICqaQCAv5kFAL6VBQC9oQUAvKkFAK5pAICjtQQAsmkAgLZpAICmOQUAumkAgL5pAIClOQUAqh0FAKv1BQDCaQCAxmkAgK7RBQCv3QUArO0FAK3lBQCpuQIAqLECAKvJAgCqsQIArTUCAKw1AgCvNQIArjUCAMppAIDOaQCA0mkAgNZpAIDaaQCA3mkAgOJpAIDmaQCAuekDALjZAwC7iQMAuuEDAL2dAwC8nQMAv4EDAL6JAwCxVQIAsFUCALNVAgCyVQIAtfkDALTxAwC36QMAtvEDALM9AwDqaQCA7mkAgPJpAID6aQCAtrEDALW5AwD+aQCAu5UDALqVAwCGiAwAh6ANAL85AgC+MQIAvYUDALyFAwACagCAo3kDAAZqAIAKagCApvUDAA5qAIASagCApf0DAKrRAwCr0QMAFmoAgBpqAICudQIAr30CAKzBAwCtwQMAgIUAAIGNAACChQAA79AGAOOwBwDj9AQA4QgHAOHsBADvOAYA7yAEAL6kDAAeagCAImoAgOGEAQAmagCA49wGACpqAIAuagCAhMANALPJAQAyagCAtdkBALbJAQA2agCAOmoAgD5qAIC6xQEAu60BALy5AQC9uQEAvq0BAL+lAQCwLQ4AsUUOALJBDgCzQQ4AtEUOALVNDgC2cQ4At3EOALiBDgC5gQ4AuoEOALuBDgC8gQ4AvYEOAL6BDgC/gQ4A9mkAgEJqAIBGagCASmoAgIZpAIBOagCAUmoAgFZqAICo2Q0AqdkNAKptDgCrZQ4ArH0OAK1lDgCuZQ4Ar1UOAKOFDgCCLQAAgRUAAIAdAABaagCApoUOAKWVDgBeagCAq+EOAKqJDgBiagCAZmoAgK/pDgCu4Q4ArfUOAKz1DgBqagCAs4UPAIZoAACHHAMAtoUPAG5qAIByagCAtZEPALqNDwC7SQ8AdmoAgHpqAIC+MQ8AvzEPALxJDwC9RQ8AqBEOAKkZDgCqSQ4Aq0UOAKxdDgCtQQ4ArkEOAK91DgB+agCAgmoAgIZqAICKagCAjmoAgJJqAICWagCAmmoAgLihDgC5oQ4Aug0BALsFAQC8HQEAvQEBAL4BAQC/AQEAsA0OALHJDgCy2Q4As9UOALSxDgC1sQ4AtqkOALehDgCjwQ4AnmoAgKJqAICmagCAqmoAgKbBDgCl1Q4ArmoAgKsNDgCqyQ4AsmoAgLZqAICvdQ4ArnUOAK0BDgCsDQ4AumoAgL5qAIDCagCAxmoAgIANAACBNQAAgj0AAMpqAIDOagCA0moAgISEAQC+hAEAhjAHAIf4AADaagCA3moAgKjBAgCp0QIAqtECAKvlAgCs/QIArTUDAK49AwCvNQMA4moAgOZqAIDqagCA7moAgPJqAID2agCA+moAgP5qAIC40QMAudkDALrhAwC74QMAvJEDAL2RAwC+kQMAv5EDALBNAwCxVQMAsl0DALNVAwC0TQMAtfEDALbxAwC38QMAu7EDALqpAwACawCAvoQDAL8VAwC+qQMAvaEDALypAwCzeQIABmsAgAprAIAOawCAEmsAgLaVAwC1VQIAFmsAgKrtAwCr9QMAGmsAgB5rAICu7QMAr1EDAKztAwCt5QMAImsAgKM9AgAmawCAKmsAgKbRAwAuawCAMmsAgKURAgA2awCAgiEAAIEVAACAFQAA7wQAAISUAgA6awCAPmsAgOPYAABCawCA4fgBAEprAIBOawCAUmsAgFZrAIBaawCAhmAFAIcIBQBeawCAs20BAGJrAIC1fQEAtnUBAGZrAIBqawCAbmsAgLpRAQC7UQEAvPkBAL3RAQC+0QEAv9EBAHJrAICjpQEAdmsAgHprAICmvQEAfmsAgIJrAICltQEAqpkBAKuZAQCGawCAimsAgK4ZAQCvGQEArDEBAK0ZAQCOawCA4fQOAJJrAIDjFA4A9AAAAOF8DACWawCA41AKAJprAICeawCAviAEAO8wDQCiawCApmsAgIQ0BADvrA4AsDkGALE5BgCygQYAs6kGALS5BgC1uQYAtqkGALehBgC46QYAuekGALrJBgC7xQYAvN0GAL3BBgC+wQYAvz0HAEZrAICCHQAAgR0AAIAdAACqawCArmsAgLJrAIDWagCAqJkFAKmZBQCqSQYAq0kGAKxZBgCtWQYArkkGAK9JBgCorQcAqbUHAKq9BwCrtQcArK0HAK3dBwCuyQcAr8EHALZrAIC6awCAhogDAIcQAwC+awCAwmsAgMZrAIDKawCAuG0HALkFBwC6AQcAuxUHALwxBwC9MQcAvikHAL8pBwCwgQcAsYEHALJpBwCzZQcAtH0HALVhBwC2YQcAt1UHALM1BgDOawCA0msAgNZrAIDaawCAtl0GALUlBgDeawCAu0UGALpFBgDiawCA5msAgL+lBgC+uQYAvbEGALy9BgDqawCAo3EGAO5rAIDyawCAphkGAPZrAID6awCApWEGAKoBBgCrAQYA/msAgAJsAICu/QYAr+EGAKz5BgCt9QYAqCUBAKk1AQCqPQEAqzUBAKwtAQCtkQAArpEAAK+RAAAGbACACmwAgA5sAIASbACAFmwAgIK9AwCBvQMAgL0DALiZAAC5rQAAuqUAALttAAC8dQAAvX0AAL51AAC/bQAAsPEAALH5AACywQAAs8EAALSxAAC1vQAAtrUAALepAAAabACAHmwAgCJsAICEgAIAvhwCACpsAICG+HwAh8wCAISsAwAubACAMmwAgDZsAIA6bACAPmwAgEJsAIBGbACAs/UCAEpsAIBObACAkgAAAFJsAIC2UQMAteUCAFZsAIC7fQMAunUDAFpsAIBebACAvzkDAL41AwC9VQMAvFUDAKM1AgBibACAZmwAgGpsAIBubACAppEDAKUlAgBybACAq70DAKq1AwB2bACAemwAgK/5AwCu9QMArZUDAKyVAwC+wAMAfmwAgIJsAICGbACAgA0AAIE1AACCPQAAimwAgI5sAICSbACAhsh8AIcAAwCabACAnmwAgKJsAICmbACAqmwAgK5sAICybACAtmwAgLpsAIC+bACAwmwAgO/0AwCE7HwA4ZQBAMZsAIDjMAMAymwAgM5sAIDSbACA1mwAgLNpAQDabACA3mwAgOJsAIDmbACAtmEBALVpAQDqbACAuykBALohAQDubACA8mwAgL8dAQC+HQEAvSUBALwtAQD2bACA+mwAgP5sAICjpQEAAm0AgKWlAQCmrQEAvlR8AIaAfACH7HwAqu0BAKvlAQCs4QEArekBAK7RAQCv0QEACm0AgOGcBgCEBH8A4yQGAOPUBgAObQCA4TAEABJtAIDvlAcAgnUAAIFhAACAaQAAFm0AgBptAIAebQCA7+wGALiNfgC5lX4AupV+ALulfgC8vX4AvdF+AL7RfgC/0X4AsGV+ALFtfgCyeX4As3F+ALRZfgC1WX4Atr1+ALe1fgCoVX4AqWF+AKphfgCrYX4ArGF+AK1hfgCuYX4Ar2F+ACJtAICWbACAJmwAgCZtAIAGbQCAKm0AgC5tAIAybQCAqHF+AKlxfgCqcX4Aq3F+AKyRfwCtkX8ArpF/AK+RfwA2bQCAOm0AgD5tAIBCbQCARm0AgEptAIBObQCAUm0AgLiFfwC5jX8AuoV/ALudfwC8jX8Avb1/AL61fwC/XX8AsPF/ALHxfwCy8X8As8V/ALTBfwC1wX8AtsF/ALfBfwCz+X8AVm0AgFptAIBebQCAYm0AgLYRfgC1GX4AZm0AgLs1fgC6NX4Aam0AgG5tAIC/BX4AvgV+AL0RfgC8JX4AghUAAKO9fwCAYQAAgWEAAKZVfgBybQCAvpABAKVdfgCqcX4Aq3F+AHZtAIB6bQCArkF+AK9BfgCsYX4ArVV+AKhBfgCpUX4AqlV+AKt9fgCsZX4ArW1+AK75AQCv8QEAhgAAAIc0AQB+bQCAgm0AgIZtAICKbQCAjm0AgJJtAIC4dQEAuX0BALp1AQC7yQAAvNkAAL3ZAAC+yQAAv8EAALCVAQCxnQEAspUBALNNAQC0VQEAtV0BALZVAQC3TQEAs919AJZtAICabQCAnm0AgKJtAIC27X0Ate19AKZtAIC7WQIAulECAKptAICubQCAv5kCAL6RAgC9mQIAvEECALJtAICjmX0Atm0AgLptAICmqX0Avm0AgMJtAIClqX0AqhUCAKsdAgDGbQCAym0AgK7VAgCv3QIArAUCAK3dAgDObQCA0m0AgNZtAIDabQCAgB0AAIEJAACCOQAA3m0AgOJtAIC+AAQA6m0AgO5tAIDybQCA9m0AgPptAID+bQCAhIwDAAJuAICHCAMAhuwEAAZuAIDviAIACm4AgA5uAICEbAQA4zQCABJuAIDhVAEAFm4AgBpuAIAebgCAIm4AgKhtAgCprQIAqqUCAKu9AgCspQIAra0CAK6lAgCvGQEAvqwEACZuAIAqbgCALm4AgDJuAIA2bgCAOm4AgD5uAIC4DQEAuREBALoRAQC7JQEAvD0BAL3VAQC+3QEAv9UBALBpAQCxaQEAsnkBALNxAQC0WQEAtVkBALY5AQC3NQEAsy0CAEJuAIBGbgCASm4AgE5uAIC2LQIAtS0CAFJuAIC7rQEAuq0BAFpuAIBebgCAv50BAL6dAQC9pQEAvK0BAIBNAACBVQAAglUAAO9sAABibgCA7+x/AO+8fgBmbgCA4RB/AOPUfwDj2H4A4ex/AGpuAIDhTH4Abm4AgOMkfgDmbQCAVm4AgKsFBgCqBQYArQ0GAKwFBgCvNQYArjUGAIYAAwCHKAMAo4UFAHJuAIClhQUAdm4AgHpuAICmhQUAs/EGAH5uAICCbgCAhm4AgIpuAIC26QYAteEGAI5uAIC7vQYAur0GAJJuAICWbgCAv4kGAL6BBgC9iQYAvJUGAKgpBgCpKQYAqjkGAKs5BgCsKQYArSkGAK5dBgCvTQYAmm4AgJ5uAICibgCApm4AgKpuAICubgCAsm4AgLZuAIC46QcAuekHALr5BwC7+QcAvOkHAL3pBwC+XQcAv1UHALA5BgCxOQYAsgEGALMdBgC0BQYAtQ0GALYFBgC32QcAo7EHAIItAACBFQAAgB0AALpuAICmqQcApaEHAL5uAICr/QcAqv0HAMJuAICEpAIAr8kHAK7BBwCtyQcArNUHAL7MAQCzlQYAxm4AgMpuAIC2qQYAzm4AgNJuAIC1rQYAulkBALshAQCGyAAAhwwBAL4hAQC/KQEAvDEBAL0xAQCoKQYAqSkGAKpZBgCrUQYArGEGAK1tBgCutQEAr6kBAITgAQDWbgCA2m4AgN5uAIDibgCA5m4AgOpuAIDubgCAuGEBALlhAQC6YQEAu2EBALxhAQC9YQEAvmEBAL9hAQCw2QEAsaEBALKhAQCzoQEAtKEBALWpAQC2kQEAt5EBAKPRBQDybgCA9m4AgPpuAID+bgCApu0FAKXpBQACbwCAq2UCAKodAgAGbwCACm8AgK9tAgCuZQIArXUCAKx1AgAObwCAEm8AgBZvAIAabwCAHm8AgCJvAIAmbwCAKm8AgIA9AACBCQAAghkAAC5vAIAybwCAOm8AgL48AwA+bwCAhgAMAIcUAwBCbwCAs9UDAEZvAIC1PQMAtjUDAEpvAIBObwCAv4wKALoRAwC7EQMAvLUAAL29AAC+tQAAv60AAFJvAIDjdAEAVm8AgOG8AQBabwCAXm8AgGJvAIBmbwCAam8AgG5vAIBybwCAdm8AgHpvAIDvdAIAfm8AgIJvAICoTQIAqVECAKpRAgCrqQIArLkCAK25AgCuqQIAr6kCAIRsDQCGbwCAim8AgI5vAICSbwCAlm8AgJpvAIC+dA0AuG0BALkFAQC6DQEAuwUBALwdAQC9BQEAvg0BAL8FAQCw2QIAsdkCALJtAQCzZQEAtH0BALVlAQC2ZQEAt1UBAOG4AQDhUAcA47QAAON8BwCAqQAAgQkAAII5AACebwCAom8AgKpvAICubwCAsm8AgO4AAAC2bwCA7wAAAO9kBgCGYAwAh+QMAKORAgC6bwCApXkCAL5vAIDCbwCApnECAMZvAIDKbwCAq1UCAKpVAgCt+QEArPEBAK/pAQCu8QEApm8AgDZvAIDObwCA0m8AgNZvAIDabwCA3m8AgOJvAICoVQ4AqVkOAKqhDgCrvQ4ArK0OAK2VDgCu+Q4Ar/UOALCRDgCxkQ4AspEOALORDgC0sQ4AtbEOALaxDgC3sQ4AuJEOALmdDgC6lQ4Au0kPALxZDwC9WQ8AvkkPAL9JDwCzCQ4A5m8AgOpvAIDubwCA8m8AgLY1DgC1BQ4A9m8AgLt1DgC6dQ4A+m8AgP5vAIC/VQ4AvlUOAL1lDgC8ZQ4AAnAAgKNNDgAGcACACnAAgKZxDgAOcACAEnAAgKVBDgCqMQ4AqzEOAISkAwC+pAMArhEOAK8RDgCsIQ4ArSEOAKilDgCprQ4AqqUOAKu5DgCs3Q4ArcEOAK7BDgCv/Q4AgO0BAIHxAQCC8QEAFnAAgIaQAQCHtAEAGnAAgB5wAIC4yQEAuckBALrZAQC70QEAvPkBAL35AQC+mQEAv5UBALCFDgCxbQEAsmUBALN9AQC0ZQEAtW0BALZlAQC3+QEAsy0OACJwAIAmcACAKnAAgC5wAIC2QQ4AtVUOADJwAIC7qQEAukEOADZwAIA6cACAv6kBAL6hAQC9qQEAvLEBAD5wAICjaQ4AQnAAgEZwAICmBQ4ASnAAgE5wAIClEQ4AqgUOAKvtAQBScACAVnAAgK7lAQCv7QEArPUBAK3tAQCoOQMAqTkDAKqNAwCrhQMArJ0DAK2FAwCuhQMAr7UDAFpwAIBecACAYnAAgGZwAIBqcACAbnAAgHJwAIB2cACAuGEAALlhAAC6YQAAu2EAALxhAAC9YQAAvmEAAL9hAACwzQMAsaUDALKhAwCzoQMAtKUDALWtAwC2kQMAt5EDAIANAACBEQAAghEAAHpwAIDv9AIAfnAAgIJwAIC+HAMA4xQCAISIAgDhgAEAinAAgI5wAICScACAh8gDAIY8BAC7AQMAumkDAJZwAICacACAvwkDAL4BAwC9FQMAvBUDALNlAwCecACAonAAgKZwAICqcACAtmUDALV1AwCucACAsnAAgLZwAIC6cACAo4kCAL5wAIClmQIApokCAMJwAICELAIAxnAAgKqFAgCr7QIArPkCAK35AgCu7QIAr+UCAMpwAIDOcACAvkQFAIRMBQDScACA1nAAgNpwAIDecACA4nAAgOZwAIDqcACA7nAAgIAZAACBGQAAggUAAPJwAIDhGA8A4VwOAOO4DgDjdAEA+nAAgP5wAIACcQCABnEAgIYABACHZAUACnEAgA5xAIAScQCAFnEAgO98DgDvqAEAs3UBABpxAIAecQCAInEAgCZxAIC2MQEAtRUBACpxAIC7HQEAuhUBAC5xAIAycQCAv+EAAL79AAC9/QAAvP0AAPZwAIA2cQCAOnEAgD5xAICGcACAQnEAgEZxAIBKcQCAqI0GAKmVBgCqnQYAq+UGAKz9BgCt0QYArtEGAK/RBgCwsQYAsbkGALJJBwCzSQcAtFkHALVFBwC2RQcAt3kHALghBwC5IQcAujkHALs5BwC8KQcAvSkHAL4ZBwC/GQcAozUGAE5xAIBScQCAVnEAgFpxAICmcQYApVUGAF5xAICrXQYAqlUGAGJxAIC+oAMAr6EHAK69BwCtvQcArL0HAIBRAACBWQAAgmEAALNVBwCF9AAAtX0HALZ1BwBmcQCAhgAcAIfkAQC6LQcAuyUHALw9BwC9JQcAviUHAL8VBwCokQYAqZEGAKqRBgCrkQYArLkGAK25BgCuqQYAr6kGAGpxAIBucQCAcnEAgHZxAICiIQEAozUBAKA5BQChEQQAuEkBALlJAQC6XQEAu1UBALxNAQC90QEAvtEBAL/RAQCwpQYAsa0GALKlBgCzvQYAtK0GALWdBgC2lQYAt3kBAKMZBgCPnXkAenEAgH5xAICCcQCApjkGAKUxBgCGcQCAq2kGAKphBgCKcQCAjnEAgK9ZBgCuaQYArWkGAKxxBgCeiQgAn8EFAJzJCQCdyQkAmqENAJu9DACYsQ0AmbkNAJahcQCXRXEAlEV1AJWxcQCSoXUAk7V1AJDleQCRzXkAil1yAItFcgCScQCAvoAcAI51DgCPZQ4AjLlyAI11DgCCOXoAgzl6AJZxAICacQCAhnF2AIeZdgCECXoAhW12AJptBwCbVQIAnnEAgKJxAICmcQCA4ZAAAJxZAgDjCBoAkgkPAJNlCgCqcQCA7zgWAJZ1BgCXdQYAlH0KAJU1CwCpjRYAqIUWAKsBEACqMRYArXESAKy1EgCvuS4ArgEsAKF9AgCucQCAo6EeAKKpHgClsRoApPUfAKflGwCmsRoAhMwDAIRMHACycQCAtnEAgLpxAIC+cQCAwnEAgMZxAICxASgAsNkuALONKgCy6SoAtfUmALQBJACEcB0AynEAgID9AQCBFQAAgh0AAL6AHADOcQCA0nEAgIe4AgCGPB0A2nEAgN5xAIDicQCA5nEAgOpxAIDucQCA8nEAgPZxAID6cQCA/nEAgAJyAIAGcgCA44ADAApyAIDhoAEADnIAgO+UAwAScgCAFnIAgBpyAIAecgCAInIAgCZyAIAqcgCALnIAgOE8BgAycgCA49AGADZyAIDhMAcAOnIAgOOsBgCAOQAAgRUAAIIdAADvHAYAPnIAgEJyAIC+uB8A7+gBALPpAgBKcgCAh8QcAIbsHABOcgCAtlkCALVRAgBScgCAu00CALpNAgBWcgCAWnIAgL+5AQC+2QEAvdEBALz1AQCjKR0A1nEAgEZyAIBecgCAYnIAgKaZHQClkR0AZnIAgKuNHQCqjR0AanIAgG5yAICveR4ArhkeAK0RHgCsNR4AcnIAgLNtHwB2cgCAenIAgLZlHwB+cgCAgnIAgLVtHwC6IR8AuyEfAIZyAICKcgCAviUfAL8pHwC8MR8AvTEfAKihHwCpoR8AqqEfAKuhHwCsoR8AraEfAK6hHwCvoR8AjnIAgJJyAICWcgCAmnIAgJ5yAICicgCApnIAgKpyAIC4rR8AubUfALq9HwC7tR8AvK0fAL1VHwC+UR8Av00fALChHwCxoR8AsqEfALOhHwC0pR8AtakfALadHwC3lR8AoykeAIIZAACBGQAAgLEBAK5yAICmIR4ApSkeALJyAICrZR4AqmUeAIaIAACH/AEAr20eAK5hHgCtdR4ArHUeALZyAICzmR4AunIAgL5yAIC2XQEAwnIAgMZyAIC1sR4AukkBALtJAQDKcgCAznIAgL49AQC/IQEAvDkBAL01AQCoRR4AqVUeAKpVHgCrZR4ArH0eAK2ZAQCuiQEAr4EBAISsAADScgCA1nIAgNpyAIDecgCA4nIAgOZyAIDqcgCAuK0BALllAQC6bQEAu2UBALx9AQC9ZQEAvm0BAL9lAQCwyQEAsckBALKpAQCzpQEAtL0BALWhAQC2oQEAt5UBALhpHAC5oRwAusEcALvBHAC8wRwAvcEcAL7BHAC/wRwAsIkfALGJHwCyIRwAswUcALQdHAC1fRwAtnUcALdtHACoYR8AqWEfAKphHwCrYR8ArNkfAK3ZHwCuyR8Ar8EfAO5yAIDycgCA9nIAgPpyAID+cgCAAnMAgAZzAIAKcwCADnMAgBJzAIC+AAQAo1EdABZzAICleR0AppUCABpzAIAecwCAInMAgKqBAgCrgQIArPECAK39AgCu9QIAr+kCACpzAIDh9AEALnMAgON8AQCATQAAgXUAAIJ9AAAycwCAhsAEAIekBAA2cwCAOnMAgD5zAIBCcwCARnMAgO+MAgCoSQIAqUkCAKpdAgCrVQIArHkCAK15AgCuvQIAr7UCAISgBQBKcwCATnMAgFJzAIC+vAQAVnMAgFpzAIBecwCAuC0BALk1AQC6PQEAuzUBALwtAQC91QEAvt0BAL/NAQCwzQIAsdUCALLdAgCz1QIAtM0CALUVAQC2HQEAtxUBAOGEHgDjbB8A41wfAOFYHgBicwCAZnMAgGpzAIBucwCAcnMAgHZzAIB6cwCAfnMAgOkAAADv9B4A70weAIJzAICzlQIAhnMAgIpzAICOcwCAknMAgLa5AgC1sQIAmnMAgLtRAgC6SQIAhsgEAIesBAC/kQEAvkkCAL1BAgC8SQIAJnMAgKNRBQCecwCAlnMAgKZ9BQCicwCApnMAgKV1BQCqjQUAq5UFAKpzAICucwCAro0FAK9VBgCsjQUArYUFAICJBwCBiQcAgpkHALORBgCycwCAtbkGALapBgC2cwCAunMAgL5zAIC6TQcAu0UHALxdBwC9QQcAvkEHAL9BBwCoQQYAqU0GAKpVBgCrZQYArH0GAK1lBgCubQYAr2UGAMJzAIDGcwCAynMAgM5zAIDScwCA1nMAgNpzAIDecwCAuFkHALlZBwC6aQcAu2kHALx5BwC9eQcAvmUHAL8ZBwCwxQcAsc0HALLFBwCz2QcAtMkHALXJBwC2aQcAt2kHAKPdBwDicwCA5nMAgOpzAIDucwCApuUHAKX1BwDycwCAqwkGAKoBBgD2cwCA+nMAgK8NBgCuDQYArQ0GAKwRBgCAbQAAgQkAAIIZAAD+cwCAAnQAgISYAQC+kAEABnQAgIbAAACH5AEACnQAgA50AIASdACAFnQAgBp0AIAedACAqF0GAKmNAQCqnQEAq5UBAKy5AQCtuQEArskBAK/BAQCEoAAAInQAgCZ0AIAqdACALnQAgDJ0AIA2dACAOnQAgLh5AQC5eQEAus0AALvFAAC83QAAvcUAAL7FAAC/9QAAsIEBALGBAQCySQEAs0kBALRZAQC1WQEAtkkBALdJAQCzFQIAPnQAgEJ0AIBGdACASnQAgLY5AgC1MQIATnQAgLtFAgC6RQIAUnQAgFZ0AIC/nQIAvp0CAL2dAgC8nQIAhXw+AKNRAgBadACAXnQAgKZ9AgBidACAZnQAgKV1AgCqAQIAqwECAGp0AIBudACArtkCAK/ZAgCs2QIArdkCAIDpAACB6QAAggUAAHJ0AIC+AAwAenQAgIeoAwCGvAwAfnQAgIJ0AICGdACAinQAgI50AICSdACAlnQAgJp0AICedACAonQAgKZ0AICqdACA42ABAK50AIDhoAEAsnQAgO+IAgC2dACAunQAgL50AIDCdACAxnQAgMp0AIDOdACAqGkCAKlpAgCqeQIAq3kCAKxpAgCtaQIArr0CAK+1AgC+rAwA0nQAgNZ0AIDadACAgB0AAIEJAACCqQAA3nQAgLhRAQC5WQEAumEBALthAQC8GQEAvRkBAL4NAQC/BQEAsM0CALHVAgCy3QIAs9UCALTNAgC1cQEAtnEBALdxAQDjxAAA4XwHAOF4BgDjvAYA4nQAgIQYDQCGuAwAhzwNAL4sDwDqdACA7nQAgPJ0AIDvEAAA9nQAgPp0AIDvdAYA/nQAgAJ1AIAGdQCAs70CAAp1AIC1rQIAtqUCAA51AIASdQCAFnUAgLpFAgC7XQIAvEUCAL1NAgC+RQIAv/kBAHZ0AIClfQ0ApnUNAOZ0AIAadQCAHnUAgCJ1AICjbQ0ArJUNAK2dDQCulQ0ArykOACZ1AIAqdQCAqpUNAKuNDQCz5Q4ALnUAgDJ1AIA2dQCAOnUAgLblDgC19Q4APnUAgLuhDgC62Q4AQnUAgEZ1AIC/pQ4AvrkOAL2xDgC8uQ4AqBUOAKklDgCqLQ4AqyUOAKw9DgCtJQ4Ari0OAK8lDgCADQAAgRUAAIIdAABKdQCATnUAgFJ1AICEMAMAVnUAgLgpDgC5KQ4AujkOALs5DgC8KQ4AvSkOAL79DwC/9Q8AsF0OALElDgCyLQ4AsyUOALQ9DgC1IQ4AtiUOALcZDgCjpQ8AWnUAgIYoAQCHTAEAXnUAgKalDwCltQ8AYnUAgKvhDwCqmQ8AZnUAgGp1AICv5Q8ArvkPAK3xDwCs+Q8AbnUAgLPpDgBydQCAdnUAgLaRDgB6dQCAfnUAgLXlDgC6sQ4Au7kOAIJ1AICGdQCAvmEBAL9hAQC8mQ4AvZkOAKglDgCpLQ4AqiUOAKs5DgCsKQ4ArVUOAK5dDgCvVQ4AinUAgI51AICSdQCAlnUAgJp1AICedQCAonUAgKZ1AIC49QEAuYEBALqBAQC7gQEAvIEBAL2JAQC+sQEAv7EBALAxDgCxOQ4AsgkOALMJDgC04QEAteEBALbhAQC3zQEAo60NAKp1AICudQCAsnUAgLZ1AICm1Q0ApaENALp1AICr/Q0AqvUNAL51AIDCdQCAryUCAK4lAgCt3Q0ArN0NAIBdAACBbQAAgmUAALNRAwC+nAMAtXkDALYZAwDKdQCAhOACAM51AIC6PQMAuzUDALwZAwC9GQMAvtkDAL/ZAwCohQMAqZUDAKqVAwCrpQMArL0DAK3VAwCu0QMAr9EDAIYABACHNAMAv6AzANJ1AIDWdQCA2nUAgN51AIDidQCAuHEDALlxAwC6cQMAu3EDALzVAAC93QAAvtUAAL/NAACwtQMAsb0DALKBAwCzgQMAtFEDALVRAwC2UQMAt1EDAO+oAwDmdQCA6nUAgO51AICEHAIA8nUAgPZ1AID6dQCAviwFAP51AIACdgCABnYAgONAAwAKdgCA4SgAAA52AICjXQIAEnYAgBZ2AIAadgCAHnYAgKYVAgCldQIAInYAgKs5AgCqMQIAJnYAgCp2AICv1QIArtUCAK0VAgCsFQIA4ygBAOEADwDhCA4A4wgOAID9AACBCQAAgjkAAC52AIAydgCAOnYAgD52AIBCdgCA7+gOAEZ2AIBKdgCA72QOALNtAQBOdgCAhugEAIcMBQBSdgCAtm0BALVtAQBWdgCAu+0AALrtAABadgCAXnYAgL/VAAC+6QAAveEAALzpAACoXQYAqWEGAKqlBgCrvQYArKUGAK2tBgCupQYArxkHADZ2AIBidgCAZnYAgGp2AIBudgCAcnYAgHZ2AIB6dgCAuHUHALl5BwC6DQcAuwUHALwdBwC9BQcAvgUHAL81BwCwaQcAsWkHALJ9BwCzdQcAtG0HALVRBwC2UQcAt1EHAKMtBgB+dgCAgnYAgIZ2AICKdgCApi0GAKUtBgCOdgCAq60HAKqtBwCSdgCAlnYAgK+VBwCuqQcAraEHAKypBwCADQAAgRUAAIIdAACadgCAnnYAgKJ2AICEVAMAvlwAAKZ2AICqdgCAhugAAIdMAwCudgCAsnYAgLZ2AIC6dgCAvnYAgOMEBADCdgCA4bQFAMZ2AIDKdgCAznYAgNJ2AIDWdgCA2nYAgN52AIDidgCA5nYAgO/sBADqdgCA7nYAgLPtBgDydgCA9nYAgPp2AID+dgCAtpEGALXhBgACdwCAu40GALqNBgAGdwCACncAgL9BAQC+WQEAvVEBALxZAQCoJQYAqS0GAKolBgCrOQYArCkGAK1RBgCuSQYAr0EGAIDNAACBCQAAghkAAA53AIASdwCAhCwBAL40AAAadwCAuP0BALlBAQC6QQEAu0EBALxBAQC9SQEAvnEBAL9xAQCwCQYAsQkGALLNAQCzxQEAtN0BALXFAQC2zQEAt8UBAIagPACHRAMAHncAgKOhBQAidwCApa0FAKbdBQAmdwCAKncAgL4oPACqwQUAq8EFAKwVAgCtHQIArhUCAK8NAgC2QQMALncAgDJ3AIC1sQIANncAgLOhAgA6dwCAPncAgL5FAwC/TQMAvHUDAL1NAwC6ZQMAu20DAEJ3AIBGdwCASncAgE53AIDGdQCAUncAgFZ3AIBadwCAXncAgGJ3AICoRQIAqVUCAKpdAgCrVQIArE0CAK21AwCusQMAr60DALDVAwCx3QMAstUDALPtAwC09QMAtf0DALb1AwC37QMAuNkDALnZAwC6rQMAu6UDALy9AwC9pQMAvqUDAL+VAwCj9QMAZncAgGp3AIBudwCAcncAgKYVAgCl5QMAdncAgKs5AgCqMQIAencAgH53AICvGQIArhECAK0ZAgCsIQIAgGkAAIFpAACCBQAAgncAgIp3AICOdwCAkncAgO8cAACEbAIA4ZQBAJZ3AIDjyAAAmncAgJ53AICGWDwAh1A9AKJ3AICmdwCAqncAgISEPQCudwCAsncAgLZ3AIDvuAEAvmw8AOF0BgC6dwCA42QBAL53AIDCdwCAxncAgMp3AICz0QEAzncAgNJ3AIDWdwCA2ncAgLaRAQC1+QEA3ncAgLu9AQC6vQEA4ncAgOZ3AIC/dQEAvnUBAL2FAQC8hQEAqL09AKkNPgCqGT4AqxE+AKwxPgCtUT4ArlE+AK9NPgCGdwCAgh0AAIEdAACAHQAA6ncAgO53AIDydwCA9ncAgLjVPgC53T4AutU+ALtJPwC8WT8AvVk/AL5JPwC/QT8AsDk+ALE5PgCyET4AsxE+ALTxPgC18T4AtvU+ALftPgCjkT4A+ncAgIYoAACHwAMA/ncAgKbRPgCluT4AAngAgKv9PgCq/T4ABngAgAp4AICvNT4ArjU+AK3FPgCsxT4ADngAgLOdPwASeACAFngAgLalPwAaeACAHngAgLWtPwC6aT8Au3U/ACJ4AIAmeACAvlk/AL9FPwC8bT8AvWU/ACp4AIAueACAMngAgDZ4AIDjYDwAOngAgOEAPQA+eACA7/w9AEJ4AIBGeACASngAgE54AIBSeACAVngAgFp4AICjGT4AghkAAIEZAACAcQAAXngAgKYhPgClKT4AYngAgKvxPgCq7T4AhCQBAL4kAQCvwT4Art0+AK3hPgCs6T4AqNE+AKnRPgCq0T4Aq+U+AKzhPgCt4T4Arhk+AK8ZPgCGAAAAh4QAAGp4AIBueACAcngAgHZ4AIB6eACAfngAgLh9PgC5AT4AugE+ALsBPgC8AT4AvQk+AL4xPgC/MT4AsGk+ALF1PgCyfT4As3U+ALRZPgC1RT4Atk0+ALdFPgCohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAIJ4AICGeACAingAgL8k5gGOeACAkngAgJZ4AICaeACAuFUDALlZAwC6bQMAu2UDALx9AwC9ZQMAvm0DAL9lAwCwtQIAsb0CALKBAgCzgQIAtHEDALVxAwC2cQMAt3EDALMdAgCeeACAongAgKZ4AICEiAMAtlUCALU1AgAWdwCAu3kCALpxAgCqeACArngAgL+1AwC+tQMAvVUCALxVAgCyeACAo1kCALZ4AIC6eACAphECAL54AIDCeACApXECAKo1AgCrPQIAxngAgMp4AICu8QMAr/EDAKwRAgCtEQIAqKkCAKmpAgCquQIAq7kCAKypAgCtqQIArjkBAK85AQCAzQEAgQkAAIIZAADOeACA0ngAgL64BQDaeACA3ngAgLjpAQC56QEAuokBALuFAQC8nQEAvYEBAL6BAQC/tQEAsEkBALFVAQCyXQEAs1UBALRNAQC18QEAtvEBALfxAQDvFAAA4ngAgIaoBQCH3AUA5ngAgIRYBADqeACA78Q+AO54AIDhxD4A8ngAgOMwPgDjyAAA9ngAgOEoAQD6eACAtn0CAP54AIACeQCAtXUCAAZ5AICzZQIACnkAgA55AIC+3QEAv2EBALzdAQC91QEAutkBALvFAQASeQCAFnkAgKOxBQDWeACAGnkAgB55AIAieQCApqkFAKWhBQAmeQCAqxEGAKoNBgAqeQCALnkAgK+1BgCuCQYArQEGAKwJBgAyeQCANnkAgDp5AIA+eQCAgBkAAIEZAACCBQAAQnkAgL5sAwBGeQCAhsgAAIccAwBKeQCATnkAgFJ5AIBWeQCAqLkHAKm5BwCqDQcAqx0HAKwJBwCtNQcArjEHAK8pBwCEqAMAWnkAgF55AIBieQCAZnkAgGp5AIBueQCAcnkAgLjJAAC5yQAAutkAALvRAAC8+QAAvfkAAL6ZAAC/mQAAsF0HALEhBwCyIQcAsz0HALQpBwC1KQcAtgEHALcBBwCzhQYAdnkAgHp5AIB+eQCAgnkAgLa1BgC1gQYAhnkAgLvlBgC6mQYAinkAgI55AIC/7QYAvu0GAL3pBgC89QYAknkAgJZ5AICaeQCAnnkAgKJ5AICmeQCAqnkAgO+QBACueQCA4dwGALJ5AIDj7AUAgCkAAIEVAACCEQAAvnwBAKMFBgC6eQCAhigAAIdMAQC+eQCApjUGAKUBBgDCeQCAq2UGAKoZBgDGeQCAynkAgK9tBgCubQYArWkGAKx1BgDOeQCAs70BANJ5AIDWeQCAtnkBANp5AIDeeQCAtXkBALpVAQC7XQEA4nkAgOZ5AIC++QAAv/kAALxFAQC9+QAAqHECAKlxAgCqcQIAq3ECAKy1AgCtvQIArrUCAK+tAgCE7AwA6nkAgO55AIDyeQCA9nkAgPp5AID+eQCAAnoAgLhpAwC5aQMAugkDALsJAwC8GQMAvRkDAL4JAwC/CQMAsNUCALHdAgCy1QIAs2kDALR5AwC1eQMAtmkDALdhAwAGegCACnoAgA56AICj9QIAEnoAgKUxAgCmMQIAFnoAgBp6AIAeegCAqh0CAKsVAgCsDQIArbEDAK6xAwCvsQMAgGEAAIFhAACCBQAAInoAgIbwDACHYAMAvhAMACp6AIBmeACALnoAgDJ6AIA2egCAOnoAgD56AIBCegCARnoAgKiFAgCplQIAqpUCAKulAgCsvQIArdUCAK7RAgCv0QIASnoAgE56AIBSegCAVnoAgFp6AIBeegCAYnoAgGZ6AIC4dQEAuX0BALp1AQC7zQEAvNUBAL3dAQC+yQEAv8EBALC1AgCxvQIAsoECALOBAgC0VQEAtV0BALZVAQC3TQEA4RAGAIRIDADjDAYAanoAgISYDABuegCAcnoAgHZ6AIB6egCAfnoAgIJ6AICGegCAgXUAAIB1AADvIAEAgnUAAIp6AICOegCAknoAgL7ADACFtA4A4RACAO9cAADjABYA4ZABAJp6AIDjWAEA7zwHAJ56AICiegCAhgAIAIe4DACznQ0AJnoAgKZ6AICqegCArnoAgLbVDQC1tQ0AsnoAgLv5DQC68Q0AtnoAgLp6AIC/GQ4AvhEOAL3VDQC81Q0AvnoAgKPZDQDCegCAxnoAgKaRDQDKegCAznoAgKXxDQCqtQ0Aq70NANJ6AIDWegCArlUOAK9dDgCskQ0ArZENAKhdDgCpYQ4AqmEOAKthDgCsYQ4ArWEOAK5hDgCvYQ4A2noAgN56AIDiegCA5noAgOp6AIDuegCA8noAgPZ6AIC4TQ8AuVEPALpRDwC7UQ8AvHEPAL1xDwC+cQ8Av3EPALDBDwCxwQ8AssEPALPBDwC0wQ8AtcEPALbBDwC3wQ8As+kPAPp6AIC+gAEA/noAgJZ6AIC24Q8AtekPAAJ7AIC7BQ4AugUOAAp7AIAGewCAvwUOAL4FDgC9FQ4AvBUOAIFNAACAQQAA72gNAIJRAACG8AcAh9QBAA57AIASewCAFnsAgIRwAQAaewCAHnsAgOHgDgAiewCA40gNACZ7AICjaQ8AKnsAgC57AIAyewCANnsAgKZhDwClaQ8AOnsAgKuFDgCqhQ4APnsAgEJ7AICvhQ4AroUOAK2VDgCslQ4ARnsAgLMxDgBKewCATnsAgLbBAQBSewCAVnsAgLXRAQC6zQEAu6UBAFp7AIBeewCAvqUBAL+tAQC8sQEAvbEBAI/dJgCj8Q0AYnsAgGZ7AICmAQIAansAgG57AIClEQIAqg0CAKtlAgByewCAviAEAK5lAgCvbQIArHECAK1xAgCfoQwAnnkKAJ1pCgCc0QgAm7E2AJp1NgCZ0TQAmOEyAJdtMgCWZTIAlTU/AJRhPgCTcT4AkjU7AJFxOgCQeToAgJUAAIGdAACCoQAAensAgO9EAgDhdA8AfnsAgOMcDwDj1AEAgnsAgOHgAQDvXAEAo7UCAKJBAACh3Q4AoLkOALWpAwCGewCAhMAEALahAwCG8AUAh+QEALOFAwCKewCAvXEDALxpAwC/QQMAvnEDAI57AIC2eQCAu3EDALp5AwCC3ScAgwE7AL6EBwC+wAYAhhE/AIcZPwCEETsAhV06AIp9PgCLJTMAknsAgJZ7AICOuTUAjxU3AIw1MwCNgTMAkqE3AJPZCQC+xBkAmnsAgJaxDQCXUQ8AlHkLAJVhCwCaBQ8Am5EBAJ57AICiewCApnsAgN0AAACcfQMAqnsAgOFIDwCuewCA4xwOALJ7AIC2ewCAunsAgL57AIDCewCAsUEXALChFwCzqesBsgHoAbUB7AG0EesB74wOAMZ7AICpxR8AqAEcAKsBEACqkR8ArdkTAKzREwCv2RcArgUTAKHxAgDKewCAo8kHAKLBAgClARgApGUHAKehGwCm+RsAqCkFAKldBQCqVQUAq20FAKx5BQCteQUArm0FAK9hBQB2ewCAznsAgNJ7AIDWewCAgA0AAIGxAACCsQAA2nsAgLiJBQC5iQUAup0FALuVBQC8uQUAvbkFAL5RBgC/UQYAsOUFALHtBQCy5QUAs/0FALTtBQC13QUAttUFALe9BQCj3QUA3nsAgOJ7AICEDAAA5nsAgKb5BQCl8QUA6nsAgKspBQCqIQUAhpgAAIegAACvGQUArikFAK0pBQCsMQUA7nsAgLNhBgDyewCA9nsAgLYhBgD6ewCA/nsAgLUBBgC6rQcAu40HAAJ8AIAGfACAvo0HAL9xBwC8lQcAvY0HAL65BQC/uQUAvLkFAL25BQC6uQUAu7kFALi5BQC5uQUAtkkFALdJBQC0fQUAtXUFALJ5BQCzeQUAsBUFALF9BQCuXQUAr20FAKxFBQCtXQUAqqUKAKtdBQCovQoAqa0KAAp8AIAOfACAEnwAgBZ8AIAafACAHnwAgCJ8AIAmfACAqA0HAKkdBwCqLQcAq0kHAKxNBwCtZQcArrEGAK+xBgAqfACALnwAgDJ8AIA2fACAOnwAgD58AIBCfACARnwAgLhVBgC5XQYAulUGALtxBgC8NQYAvfEBAL7xAQC/8QEAsK0GALGNBgCyhQYAs50GALSNBgC1cQYAtnUGALdtBgCjpQQAgi0AAIEVAACAHQAASnwAgKblBAClxQQATnwAgKtJBQCqaQUAUnwAgFp8AICvtQUArkkFAK1JBQCsUQUAhmAcAIcIAwBefACAs4UCAGJ8AIC1gQIAtoECAGZ8AIBqfACAbnwAgLoJAwC7CQMAvBkDAL0ZAwC+CQMAvwkDAKxVAgCtXQIArmECAK9hAgCoDQIAqVUCAKpRAgCrUQIAhKwDAHJ8AIB2fACAenwAgIT8HQB+fACAgnwAgIZ8AIC8cQMAvXEDAL5xAwC/cQMAuHEDALlxAwC6cQMAu3EDALSRAwC1kQMAtpEDALeRAwCwkQMAsZEDALKRAwCzkQMAinwAgI58AICSfACAlnwAgJp8AIDhpAEAnnwAgOOAAQC+aBwAonwAgKZ8AIDv2AYAqnwAgK58AICyfACAtnwAgKOJAwCCLQAAgRUAAIAdAAC6fACApo0DAKWNAwC+fACAqwUCAKoFAgDCfACAynwAgK8FAgCuBQIArRUCAKwVAgCGIBwAh8QdAM58AIDSfACA1nwAgNp8AIDefACA72wGAOJ8AIDhbAcA5nwAgON0BwDqfACA7nwAgPJ8AID2fACAs5EBAPp8AID+fACAAn0AgAZ9AIC2sQEAtbkBAAp9AIC7VQEAukkBAA59AIASfQCAv/UAAL71AAC9RQEAvEUBAKNRHgDGfACAFn0AgBp9AIAefQCApnEeAKV5HgAifQCAq5UeAKqJHgAmfQCAKn0AgK81HwCuNR8ArYUeAKyFHgCAbQAAgRUAAIIdAADv/BkALn0AgDJ9AIA2fQCAOn0AgIbAAACHrAMAPn0AgEJ9AIBGfQCA4SwcAEp9AIDjzBwAqK0eAKnNHgCq2R4Aq9EeAKzxHgCt8R4Arj0eAK81HgCE7AAATn0AgFJ9AIBWfQCAWn0AgF59AIBifQCAZn0AgLjRHwC53R8Auu0fALvlHwC84R8AveEfAL7hHwC/4R8AsE0eALFRHgCyUR4As1EeALTxHwC18R8AtvEfALfxHwCobR4AqY0eAKqFHgCrnR4ArIUeAK2NHgCuuR4Ar7UeAGp9AIBufQCAcn0AgHZ9AIB6fQCAfn0AgIJ9AICGfQCAuJ0eALmtHgC6pR4Au0UBALxdAQC9RQEAvkUBAL91AQCw0R4AsdEeALLRHgCz0R4AtLUeALW9HgC2tR4At60eALMNHgCKfQCAjn0AgJJ9AICWfQCAtg0eALUNHgCafQCAuxUeALoVHgCefQCAon0AgL95HgC+cR4AvQUeALwFHgCCbQAAo0keAIBVAACBZQAApkkeAL6cAQCqfQCApUkeAKpRHgCrUR4Ah3wAAIZMAACuNR4Arz0eAKxBHgCtQR4AqF0CAKltAgCqZQIAq30CAKxpAgCtsQIArrECAK+xAgCE7AQArn0AgLJ9AIC2fQCAun0AgL59AIDCfQCAxn0AgLhxAwC5cQMAunEDALtxAwC81QMAvd0DAL7VAwC/zQMAsNECALHRAgCy0QIAs9ECALRRAwC1UQMAtlEDALdRAwCz7QIAyn0AgM59AIC+gAQA0n0AgLYxAgC14QIA1n0AgLsVAgC6FQIA2n0AgN59AIC/lQMAvpUDAL0FAgC8BQIA4n0AgKOpAgDmfQCA6n0AgKZ1AgDufQCA8n0AgKWlAgCqUQIAq1ECAPZ9AID6fQCArtEDAK/RAwCsQQIArUECAKjZAgCpIQEAqiEBAKshAQCsIQEArSEBAK4hAQCvIQEA/n0AgAJ+AIAGfgCAviAEAAp+AIAOfgCAEn4AgBp+AIC4jQEAuZEBALqRAQC7pQEAvL0BAL11AAC+fQAAv3UAALDlAQCx7QEAsvkBALPxAQC02QEAtdkBALa5AQC3tQEA4RgeAB5+AIDjKB8AIn4AgIGlAACApQAAJn4AgIKlAACGAAQAh/QFACp+AIAufgCAMn4AgDZ+AIDvYB4AOn4AgD5+AIBCfgCAhfD0AUZ+AIBKfgCA42QBAE5+AIDhpAEAUn4AgO/IAABWfgCAWn4AgFZ8AICE/AUAXn4AgGJ+AICzKQYAFn4AgGZ+AIBqfgCAbn4AgLYhBgC1KQYAcn4AgLupBgC6oQYAdn4AgHp+AIC/nQYAvp0GAL2lBgC8rQYA4bQHAH5+AIDjeAQAgn4AgIB9AACBEQAAghUAAIZ+AICGwAAAh1gDAIp+AICOfgCAkn4AgJZ+AIDvDAQAmn4AgKOpBgCefgCAon4AgKZ+AICqfgCApqEGAKWpBgCufgCAqykGAKohBgCyfgCAtn4AgK8dBgCuHQYArSUGAKwtBgC6fgCAs0kHAL5+AIDCfgCAtn0HAMZ+AIDKfgCAtXUHALpdBwC7JQcAzn4AgNJ+AIC+IQcAvy0HALw9BwC9MQcAqD0GAKmBBgCqhQYAq5UGAKy5BgCtuQYArqkGAK+pBgDWfgCA2n4AgN5+AIDifgCA5n4AgIK5AACBsQAAgLkAALitBgC5vQYAurUGALtFAQC8XQEAvUUBAL5FAQC/dQEAsN0GALGlBgCyrQYAs6EGALShBgC1rQYAtpkGALeVBgCjDQYA6n4AgO5+AIDyfgCAhJgCAKY5BgClMQYAvpwBAKthBgCqGQYAhggAAId8AQCvaQYArmUGAK11BgCseQYA+n4AgLO1AQD+fgCAAn8AgLZVAQAGfwCACn8AgLWhAQC6cQEAu3kBAA5/AIASfwCAvjEBAL89AQC8UQEAvVEBAKhpAgCpaQIAqnkCAKt5AgCsbQIArZECAK6RAgCvkQIAFn8AgBp/AIAefwCAIn8AgCZ/AIAqfwCALn8AgDJ/AIC4mQIAua0CALqlAgC7bQMAvHUDAL19AwC+dQMAv20DALDxAgCx+QIAssECALPBAgC0sQIAtb0CALa1AgC3qQIANn8AgDp/AIA+fwCAo/0CAEJ/AICl6QIAph0CAEZ/AIBKfwCATn8AgKo5AgCrMQIArBkCAK0ZAgCueQIAr3UCAFJ/AIBWfwCAWn8AgIQADACAGQAAgQkAAII5AABefwCAYn8AgGp/AIBufwCAvuAMAHJ/AIB2fwCAhlgNAIcMAwCowQIAqc0CAKrFAgCr2QIArMkCAK39AgCu9QIArz0BAHp/AIB+fwCAgn8AgIZ/AICKfwCAjn8AgJJ/AIC+MAwAuMUBALnNAQC62QEAu9EBALzxAQC98QEAvpkBAL+ZAQCwRQEAsU0BALJFAQCzXQEAtEUBALVNAQC2RQEAt/0BAOE4BgCWfwCA42wGAJp/AICefwCAon8AgKZ/AICqfwCAhKgNAK5/AICyfwCAtn8AgL6wDwC6fwCA72wGAL5/AIDCfwCApn0AgMZ/AIDKfwCA41AAAM5/AIDhoAEA0n8AgO+EAADafwCAhyANAIZMDwCAPQAAgSEAAIIlAADefwCAs80NAGZ/AIDWfwCA4n8AgOZ/AIC2/Q0AtcENAOp/AIC7CQ4AugEOAO5/AIDyfwCAvwkOAL4BDgC9CQ4AvBEOAPZ/AIDjmAwA+n8AgOH8DwD+fwCAAoAAgAaAAIAKgACADoAAgBKAAIAWgACAGoAAgB6AAIDvYAwAIoAAgCaAAICjTQ0AKoAAgC6AAIAygACANoAAgKZ9DQClQQ0AOoAAgKuJDgCqgQ4APoAAgEKAAICviQ4AroEOAK2JDgCskQ4Agm0AALM1DgCAVQAAgWUAALb1DwCE3AMARoAAgLX9DwC60Q8Au9EPAIYABACH3AAAvn0PAL9lDwC8wQ8AvXkPAKjlDwCp7Q8AqvkPAKv5DwCsMQ4ArTEOAK4xDgCvMQ4ASoAAgE6AAIBSgACAVoAAgFqAAIBegACAYoAAgGaAAIC43Q4AueEOALrhDgC74Q4AvOUOAL3pDgC+mQ4Av5UOALBRDgCxUQ4AslEOALPpDgC0/Q4AteUOALbtDgC35Q4Ao3EPAGqAAIBugACAcoAAgHaAAICmsQ4ApbkOAHqAAICrlQ4AqpUOAH6AAICCgACAryEOAK45DgCtPQ4ArIUOAIaAAICzyQEAioAAgI6AAIC2+QEAkoAAgJaAAIC1wQEAuqkBALu1AQCagACAnoAAgL6tAQC/lQEAvK0BAL2lAQCo5Q0AqfkNAKoFAgCrHQIArA0CAK09AgCuNQIAr10CAKKAAICmgACAqoAAgK6AAICAGQAAgRkAAIIFAACygACAuC0CALk1AgC6MQIAuzECALzVAgC93QIAvtUCAL/NAgCwKQIAsTUCALI9AgCzNQIAtC0CALUVAgC2HQIAtxUCALqAAICEnAIAvoAAgKOBAgDCgACApYkCAKaxAgDGgACAhiAEAIfUAwCq4QIAq/0CAKzlAgCt7QIAruUCAK/dAgC29QMAvkQDAIWM/QG1/QMAyoAAgLP9AwDOgACA0oAAgL59AwC/TQMAvGUDAL19AwC6dQMAu30DANaAAIDagACA3oAAgOKAAICEBAIAoyUCAOaAAIClJQIApi0CAOqAAIDugACA8oAAgKqtAgCrpQIArL0CAK2lAgCupQIAr5UCAPaAAID6gACA/oAAgAKBAIAGgQCA48ADAAqBAIDhrAEADoEAgO9YAwASgQCAFoEAgIANAACB5QAAgu0AABqBAIDhYA8A40ABAOM4DgDheA4AHoEAgCKBAIC+lAUAKoEAgIYABACHZAUALoEAgDKBAIA2gQCA7/wOAO98DgA6gQCAs1EBAD6BAID2fgCAQoEAgEaBAIC2DQEAtQkBAEqBAIC74QAAuhkBAE6BAIBSgQCAv9EAAL7pAAC96QAAvPkAALaAAIAmgQCAVoEAgFqBAIBegQCAYoEAgGaBAIBqgQCAqKEGAKmtBgCquQYAq7EGAKzhBgCt7QYAruUGAK/FBgCwvQYAsUUHALJNBwCzXQcAtE0HALV1BwC2fQcAtx0HALglBwC5LQcAuiUHALs9BwC8KQcAvRUHAL4RBwC/EQcAoxEGAG6BAIBygQCAdoEAgHqBAICmTQYApUkGAH6BAICroQcAqlkGAIKBAICGgQCAr5EHAK6pBwCtqQcArLkHAIANAACBFQAAgh0AAIqBAICOgQCAkoEAgISUAwC+lAMAloEAgJqBAICGyAAAh4wAAJ6BAICigQCApoEAgKqBAIConQYAqa0GAKqlBgCrvQYArK0GAK3RBgCu1QYAr80GAK6BAICygQCAtoEAgLqBAIC+gQCAwoEAgMaBAIDKgQCAuF0BALnBAQC6wQEAu8EBALzBAQC9yQEAvvEBAL/xAQCwvQYAsY0GALKFBgCzZQEAtH0BALVlAQC2bQEAt2UBALMtBgDOgQCA0oEAgNaBAIDagQCAtlEGALUlBgDegQCAu0kGALp5BgDigQCA5oEAgL+hAQC+uQEAvbEBALxRBgDqgQCAo2kGAO6BAIDygQCAphUGAPaBAID6gQCApWEGAKo9BgCrDQYA/oEAgAKCAICu/QEAr+UBAKwVBgCt9QEAutUHALvdBwC4wQcAucEHAL4xBAC/MQQAvPEHAL3xBwCyrQcAs7UHALCtBwCxpQcAtp0HALf1BwC0pQcAtZUHAKppBwCraQcAqGkHAKlpBwCuaQcAr2kHAKxpBwCtaQcAgLkDAIGNAwCChQMAhKgDAIZQ/AGHCAMAvjQDAAqCAICoZQIAqXUCAKp9AgCrdQIArG0CAK21AwCuvQMAr7UDAA6CAIASggCAFoIAgBqCAIAeggCAIoIAgCaCAIAqggCAuFEDALlZAwC6YQMAu2EDALwRAwC9HQMAvhUDAL8JAwCwzQMAsdUDALLdAwCz1QMAtM0DALVxAwC2cQMAt3EDAC6CAIAyggCAs/0DADaCAIC17QMAOoIAgD6CAIC2PQIAQoIAgEaCAIC7GQIAugECAL0JAgC8AQIAv70CAL4BAgBKggCAToIAgITE/QG+wPwBUoIAgFaCAIBaggCA79wDAF6CAIDhlAEAYoIAgOMQAwBmggCAgu0AAIHtAACA7QAA4TgGAOE8BwDjQAEA45QGAGqCAIBuggCAcoIAgHqCAICGgPwBh+j9AX6CAICCggCAhoIAgIqCAIDvnAEA79wGAKM1AwCOggCAkoIAgJaCAICaggCApvUCAKUlAwCeggCAq9ECAKrJAgCiggCApoIAgK91AgCuyQIArcECAKzJAgB2ggCAqoIAgK6CAICyggCA76T9AbaCAIC6ggCAvoIAgON4/QHCggCA4UD8AcaCAIDKggCAzoIAgNKCAIDWggCAs+X+AYItAACBFQAAgB0AANqCAIC25f4BtfX+Ad6CAIC7Yf8Butn+AeKCAICE5AMAv2n/Ab5h/wG9df8BvHn/Aaj9/gGpJf4Bqi3+Aasl/gGsPf4BrSX+Aa4t/gGvJf4BviwAAOaCAICGiAAAh+wAAOqCAIDuggCA8oIAgPaCAIC4gf8BuYH/AbqZ/wG7mf8BvIn/Ab21/wG+sf8Bv63/AbBd/gGx5f8Bsu3/AbPh/wG05f8Bte3/AbbZ/wG32f8Bo6X/AfqCAID+ggCAAoMAgAaDAICmpf8BpbX/AQqDAICrIf4Bqpn/AQ6DAIASgwCAryn+Aa4h/gGtNf4BrDn+ARaDAICz6f4BGoMAgB6DAIC2lf4BIoMAgCaDAIC16f4BurH+Abu5/gEqgwCALoMAgL51AQC/fQEAvJH+Ab2R/gGoHf4BqS3+Aaol/gGrPf4BrCX+Aa1R/gGuUf4Br1H+ATKDAIA2gwCAOoMAgD6DAIBCgwCARoMAgEqDAIBOgwCAuNkBALnZAQC67QEAu+EBALzhAQC94QEAvuEBAL/hAQCwMf4BsTn+AbIB/gGzAf4BtPUBALX9AQC29QEAt+kBAKOt/QFSgwCAvkwDAFqDAIBegwCAptH9AaWt/QFigwCAq/39Aar1/QFmgwCAaoMAgK85AgCuMQIArdX9AazV/QGA+QMAgfkDAIJNAACFdCAAboMAgITYAwCE1AQAcoMAgIZABACHVAMAdoMAgHqDAIB+gwCAgoMAgIaDAIC+8AUAqDECAKkxAgCqMQIAqzECAKyVAwCtnQMArpUDAK+NAwCKgwCAjoMAgJKDAICWgwCAhHwHAJqDAICegwCAooMAgLipAwC5qQMAumkDALtpAwC8eQMAvXkDAL5pAwC/aQMAsP0DALHNAwCyxQMAs60DALS5AwC1uQMAtq0DALelAwCmgwCAqoMAgK6DAICygwCAtoMAgLqDAIDv6AMAvoMAgOGQAQDCgwCA42wDAMqDAICAJQAAgSkAAIIdAADOgwCAs/kDANKDAICGaAcAh1wFANaDAIC2XQIAtV0CANqDAIC7SQIAunkCAN6DAIDigwCAvz0CAL49AgC9OQIAvFECAOaDAIDhPP4BvkAGAOPwAQDqgwCA7oMAgPKDAID2gwCA+oMAgP6DAIAChACABoIAgAaEAIAKhACADoQAgO/kAQAShACAFoQAgKNxAwAahACApdUCAB6EAIAihACAptUCACaEAIAqhACAq8ECAKrxAgCtsQIArNkCAK+1AgCutQIA4dz8AcaDAIDjUAQA74gEAID1BwCBCQAAgj0AAC6EAICEJAEAMoQAgDaEAIA6hACAPoQAgOFMBADv5BwA43QEALNdBgBChACAhgAMAIfgAwBGhACAtgUGALV1BgBKhACAuxEGALoJBgBOhACAUoQAgL/VBgC+1QYAvQEGALwJBgCojQYAqZUGAKqVBgCrpQYArL0GAK3FBgCuxQYAr/UGAFaEAIBahACAXoQAgGKEAIBmhACAaoQAgG6EAIByhACAuHUGALl9BgC6dQYAu80HALzVBwC93QcAvtUHAL/NBwCwjQYAsZUGALKdBgCzlQYAtFEGALVRBgC2UQYAt1EGAKMdBwCPFewBdoQAgHqEAIB+hACApkUHAKU1BwCChACAq1EHAKpJBwCGhACAioQAgK+VBwCulQcArUEHAKxJBwCeRfkBn6X5AZyR/QGdTfkBmlX9AZtd/QGYBfEBmZX+AZal8gGXYfEBlG31AZU19QGS4ekBk4X2AZBV7AGRXekBsbEdALClHQCziRkAskEcALUBJAC09RkAjoQAgJKEAICWhACAgqkDAIGhAwCAaQAAohUFAKMFAgCgFQYAob0FAKHFAQCahACAo80NAKLlAQClAQgApN0NAKfRCQCm2QkAqQEUAKilCACrxRQAqs0VAK3REQCsARAArwEcAK51EQCCEe8BgynvAZ6EAICihACAhuH1AYcR9gGEOeoBhY3qAYp59gGL4fEBvqQMAKqEAICO+f0BjzH+AYw98gGNYfIBkkn+AZOd/gGHCAwAhmwMAJax+gGX+QUAlFn6AZVZ+gGaYQYAm8EGAK6EAICyhACAtoQAgLqEAICcyQEAvoQAgKitBQCpuQUAqs0FAKvdBQCszQUArf0FAK71BQCvHQUAwoQAgMaEAIDKhACAzoQAgNKEAIDWhACA2oQAgN6EAIC4dQUAuX0FALoJBQC7CQUAvB0FAL0BBQC+AQUAvz0FALBxBQCxcQUAsnEFALNxBQC0UQUAtVEFALZRBQC3TQUAs0UEAOKEAIDmhACA6oQAgO6EAIC2fQQAtUUEAPKEAIC7tQQAurUEAPaEAID6hACAv5UEAL6VBAC9pQQAvKUEAP6EAICjAQQAAoUAgAaFAICmOQQACoUAgA6FAIClAQQAqvEEAKvxBAAShQCAhOwNAK7RBACv0QQArOEEAK3hBADh0AYAhAwMAOMoBwC+AAwAGoUAgO9EAwCGuAwAhywNAB6FAIDjlAEAIoUAgOH8AQBWgwCAJoUAgO/IBgAqhQCALoUAgDKFAICzjQMANoUAgLWNAwA6hQCAPoUAgLa1AwBChQCARoUAgLtBAwC6SQMAvUEDALxZAwC/QQMAvkkDAKNFDACmhACAFoUAgEqFAIBOhQCApn0MAKVFDABShQCAq4kMAKqBDABWhQCAWoUAgK+JDACugQwArYkMAKyRDACAFQ8AgR0PAIIhDwCzIQ4AXoUAgLUhDgC2JQ4AYoUAgGaFAIBqhQCAusEOALvBDgC8wQ4AvcEOAL7BDgC/wQ4AqK0OAKntDgCq5Q4Aq/0OAKzlDgCt6Q4ArjkOAK85DgBuhQCAcoUAgHaFAIB6hQCAgB0AAIEJAACCvQEAfoUAgLjNDwC51Q8AutUPALvlDwC8/Q8AvZUPAL6RDwC/kQ8AsEkOALFJDgCyWQ4As1kOALRJDgC1SQ4Atv0PALf1DwCjbQ8AgoUAgL6EAQCKhQCAjoUAgKZpDwClbQ8AkoUAgKuNDwCqjQ8AhogAAIdsAQCvjQ8Aro0PAK2NDwCsjQ8AloUAgLPtDgCahQCAnoUAgLaRDgCihQCApoUAgLXhDgC6tQ4Au70OAKqFAICuhQCAvn0BAL9lAQC8mQ4AvZkOAKgRDgCpJQ4AqiEOAKs5DgCsLQ4ArVUOAK5dDgCvUQ4AhKgAALKFAIC2hQCAuoUAgL6FAIDChQCAxoUAgMqFAIC47QEAuZUBALqVAQC7rQEAvLUBAL11AQC+fQEAv3UBALA1DgCxPQ4AsgkOALMJDgC0/QEAteUBALblAQC31QEAo6kNAM6FAIDShQCA1oUAgNqFAICm1Q0ApaUNAN6FAICr+Q0AqvENAOKFAIDmhQCAryECAK45AgCt3Q0ArN0NAIANAACBFQAAgh0AAOqFAIDuhQCA8oUAgIeQAwCGfAQAvuwEAPqFAID+hQCAAoYAgAaGAIAKhgCADoYAgBKGAICyLQ4AszUOALAtDgCxJQ4Ati0OALedDwC0LQ4AtSUOALq9DwC7jQ8AuKUPALm9DwC+LQ8AvxUPALyVDwC9JQ8AFoYAgBqGAIAehgCAIoYAgCaGAIAqhgCALoYAgDKGAICqpQ4Aq7UOAKjFDgCp3Q4Arp0OAK9VDgCspQ4ArZUOAKgNAgCpFQIAqhUCAKtNAgCsWQIArVkCAK5NAgCvRQIAhKgFADaGAIA6hgCAPoYAgIS4BABChgCARoYAgEqGAIC4/QIAuUEBALpBAQC7QQEAvEEBAL1JAQC+cQEAv3EBALAJAgCxCQIAss0CALPFAgC03QIAtcUCALbNAgC3xQIA4dQPAOMQDgDj9A4A4QwOAE6GAIBShgCAVoYAgFqGAIBehgCAYoYAgL4kBABqhgCA7AAAAO9EAADvzA4AboYAgIJlAACz2QIAgFUAAIFtAAC2nQIAcoYAgHaGAIC1lQIAuokCALuJAgCGqAQAh+AEAL5dAgC/RQIAvF0CAL1VAgCjHQUA9oUAgGaGAIB6hgCAfoYAgKZZBQClUQUAgoYAgKtNBQCqTQUAhoYAgIqGAICvgQUArpkFAK2RBQCsmQUAjoYAgLMpBgCShgCAloYAgLYpBgCahgCAnoYAgLUpBgC6pQYAu60GAKKGAICmhgCAvqUGAL+tBgC8tQYAva0GAKjlBgCp7QYAquUGAKv9BgCs5QYAre0GAK7lBgCvXQYAqoYAgK6GAICyhgCAtoYAgLqGAIC+hgCAwoYAgMaGAIC46QcAuekHALr9BwC79QcAvO0HAL1FBwC+TQcAv0UHALAlBgCxLQYAsiUGALM9BgC0JQYAtS0GALYlBgC32QcAo20HAIItAACBFQAAgB0AAMqGAICmbQcApW0HAM6GAICr6QcAquEHANKGAIC+oAEAr+kHAK7hBwCt6QcArPEHANaGAICzkQYAhugAAIcsAQC2QQEA2oYAgN6GAIC1UQEAuk0BALslAQDihgCA5oYAgL4lAQC/LQEAvDEBAL0xAQCwrQEAscUBALLBAQCzwQEAtMUBALXNAQC28QEAt/EBALgBAQC5AQEAugEBALsBAQC8AQEAvQEBAL4BAQC/AQEA6oYAgO6GAIDyhgCA9oYAgIaFAID6hgCA/oYAgAKHAICoTQYAqVkGAKo9BgCrNQYArP0BAK3lAQCu5QEAr9UBAKPVBQAGhwCACocAgA6HAIAShwCApgUCAKUVAgAWhwCAq2ECAKoJAgAahwCAHocAgK9pAgCuYQIArXUCAKx1AgAihwCAJocAgCqHAIAuhwCAMocAgOFkBQA2hwCA4+wFAIARAACBEQAAghEAAO/0BgA6hwCAPocAgEKHAIC+MAMAhMQCAEqHAICz4QMAhMAcALVRAwBOhwCAUocAgLZZAwBWhwCAWocAgLtxAwC6eQMAvbUAALxpAwC/tQAAvrUAAF6HAIDhlAEAYocAgONcAgCGcBwAh0QDAGaHAIBqhwCAbocAgHKHAIB2hwCAeocAgH6HAICChwCAhocAgO94AgCoVQIAqV0CAKphAgCrYQIArNECAK3RAgCu0QIAr9ECAIqHAICOhwCAkocAgJaHAICahwCAnocAgKKHAICmhwCAuGkBALlpAQC6CQEAuwkBALwZAQC9GQEAvgkBAL8FAQCwtQIAsb0CALK1AgCzaQEAtHkBALV5AQC2aQEAt2EBAOHEBwDjpAYA47gGAOF8BgCADQAAgTUAAII9AACqhwCArocAgLKHAIC+4B0AuocAgL6HAIDvYAAA7+gGAMKHAICjqQIAxocAgMqHAIDOhwCA0ocAgKYRAgClGQIA1ocAgKs5AgCqMQIAhkgcAIfMHACv/QEArv0BAK39AQCsIQIAqIUeAKmRHgCqkR4Aq60eAKy1HgCt1R4ArtEeAK/FHgC2hwCA2ocAgN6HAIDihwCA5ocAgOqHAIDuhwCA8ocAgLhhHwC5YR8AumEfALthHwC8YR8AvWEfAL5hHwC/YR8AsL0eALGFHgCyjR4As4UeALSdHgC1hR4Ato0eALeFHgCzGR4A9ocAgPqHAID+hwCAAogAgLZVHgC1PR4ABogAgLtBHgC6eR4ACogAgA6IAIC/QR4AvlkeAL1RHgC8WR4AEogAgKNdHgAWiACAGogAgKYRHgAeiACAIogAgKV5HgCqPR4AqwUeAISkAwC+qAMArh0eAK8FHgCsHR4ArRUeAKitHgCptR4AqrUeAKvJHgCs2R4ArdkeAK7JHgCvwR4AgO0BAIHxAQCC8QEAJogAgIaQAACHdAEAKogAgC6IAIC4yQEAuckBALrZAQC70QEAvPkBAL35AQC+mQEAv5UBALBFAQCxTQEAskUBALNdAQC0RQEAtU0BALZFAQC3+QEAsz0eADKIAIA2iACAOogAgD6IAIC2WR4AtVEeAEKIAIC7iQEAuoEBAEaIAIBKiACAv4kBAL6BAQC9iQEAvJEBAE6IAIBSiACAo3UeAFaIAIClGR4AWogAgF6IAICmER4ARocAgGKIAICrwQEAqskBAK3BAQCs2QEAr8EBAK7JAQBmiACAaogAgG6IAIByiACAdogAgIQYAgB6iACAfogAgIKIAICGiACAiogAgI6IAICSiACAmogAgJ6IAIC+cAMAgGkAAIFpAACCeQAAhAAEAIbwBACHdAMAoogAgO8MHwCmiACA4aweAKqIAIDj8B4ArogAgLKIAIC2iACAuogAgL6IAIDCiACAxogAgMqIAIDvVAIAzogAgNKIAIDWiACA46QCANqIAIDhgAEA3ogAgOKIAIDmiACA6ogAgO6IAICzRQMA8ogAgPaIAID6iACA/ogAgLZFAwC1VQMAAokAgLshAwC6SQMAvqAEAAqJAIC/KQMAviEDAL01AwC8OQMAqDkCAKk5AgCqjQIAq4UCAKydAgCthQIAroUCAK+1AgCA7QEAgfUBAIL1AQAOiQCAhpAEAIcEBQASiQCAFokAgLhFAQC5TQEAukUBALtdAQC8SQEAvUkBAL55AQC/eQEAsM0CALGlAgCyrQIAs6ECALSlAgC1rQIAtp0CALd9AQAaiQCAHokAgCKJAIAmiQCAKokAgC6JAIAyiQCA74gBAITsBADhVB4ANokAgONUAQA6iQCAPokAgEKJAIBGiQCAo0UCAEqJAIBOiQCAUokAgFaJAICmRQIApVUCAFqJAICrIQIAqkkCAF6JAIBiiQCArykCAK4hAgCtNQIArDkCAKg1BgCpPQYAqlEGAKttBgCseQYArWUGAK5tBgCvZQYABokAgGaJAIBqiQCAbokAgIAZAACBGQAAggUAAHKJAIC45QYAuekGALr5BgC7+QYAvOkGAL3pBgC+nQYAv5UGALAdBgCx5QYAsu0GALPlBgC0/QYAteEGALbhBgC34QYAs9kGAL7QAwB2iQCAeokAgH6JAIC25QYAtfEGAIKJAIC7IQYAutkGAIaYAACHeAMAvyUGAL45BgC9MQYAvDkGAIaJAICjnQYAiokAgI6JAICmoQYAkokAgJaJAICltQYAqp0GAKtlBgCaiQCAnokAgK59BgCvYQYArH0GAK11BgCo7QcAqSkGAKoxBgCrMQYArJEGAK2RBgCukQYAr5EGAKKJAICmiQCAqokAgK6JAICyiQCAtokAgLqJAIC+iQCAuIUGALmNBgC6hQYAu50GALyNBgC9vQYAvrUGAL95AQCw8QYAsfEGALLxBgCzxQYAtMEGALXBBgC2wQYAt8EGALO5BgDCiQCAxokAgMqJAIDOiQCAthEGALUZBgDSiQCAuzUGALo1BgDWiQCA2okAgL8FBgC+BQYAvREGALwlBgClQQYA3okAgOKJAICmSQYAgRUAAIB5AACj4QYAghUAAK1JBgCsfQYAr10GAK5dBgCENAEAlogAgKttBgCqbQYAvswDAOqJAICzlQIA7okAgLXZAgDyiQCA9okAgLbRAgCGgAwAhzgDALvFAgC6xQIAvRUDALwVAwC/FQMAvhUDAPqJAID+iQCA71gGAIRAAwACigCABooAgAqKAIAOigCAEooAgBaKAIAaigCAHooAgOE4BgAiigCA4yQGAL5wDACsSQIArUkCAK5dAgCvVQIAqB0CAKkFAgCqBQIAq10CAISoDAAmigCAKooAgC6KAIC+vA0AMooAgDaKAIA6igCAvE0DAL1VAwC+VQMAv2UDALjpAwC56QMAul0DALtVAwC0yQMAtckDALbZAwC32QMAsBkCALEZAgCy2QMAs9kDAD6KAIDj5AAAQooAgOG8AQBGigCAgj0AAIE9AACAPQAASooAgE6KAIBSigCAWooAgF6KAIDvzAMAYooAgGaKAICj3QMAaooAgIboDACHYA0AbooAgKaZAwClkQMAcooAgKuNAwCqjQMAdooAgHqKAICvXQIArl0CAK1dAgCsXQIAfooAgIKKAICGigCAiooAgI6KAICSigCAlooAgO/gAQCEvAwA4YwGAJqKAIDjHAYAnooAgKKKAICmigCAqooAgLPVAQCuigCAsooAgLaKAIC6igCAtpEBALWZAQC+igCAu70BALq9AQDCigCAyooAgL+dAQC+nQEAvZ0BALydAQCoBQ4AqQkOAKodDgCrFQ4ArFEOAK1RDgCuSQ4Ar0kOAFaKAICCzQ8AgfUPAID9DwDGigCAzooAgIYcAACHsAMAuOkOALnpDgC6/Q4Au/UOALztDgC9VQ8AvlEPAL9NDwCwOQ4AsTkOALIJDgCzCQ4AtBkOALUZDgC2DQ4At9kOAKOVDgDSigCA1ooAgNqKAIDeigCAptEOAKXZDgDiigCAq/0OAKr9DgDmigCA6ooAgK/dDgCu3Q4Ard0OAKzdDgDuigCAs/0PAPKKAID2igCAtoEPAPqKAID+igCAtZkPALqNDwC7ZQ8AAosAgAaLAIC+fQ8Av2UPALx9DwC9dQ8AqC0OAKk1DgCqMQ4AqzEOAKxVDgCtRQ4ArkUOAK91DgAKiwCADosAgBKLAIAWiwCAGosAgB6LAIAiiwCAJosAgLjpDgC59Q4Auv0OALv1DgC87Q4AvZEOAL6RDgC/kQ4AsA0OALHlDgCy7Q4As+UOALT9DgC15Q4Atu0OALflDgCjuQ4Agi0AAIEVAACAHQAAKosAgKbFDgCl3Q4ALosAgKshDgCqyQ4AMosAgL4sAQCvIQ4ArjkOAK0xDgCsOQ4AOosAgLZVAQC1RQEANosAgLNVAQA+iwCAhngAAIdcAAC/OQEAvjEBAL0lAQC8JQEAuzEBALpZAQDmiQCAQosAgEaLAIBKiwCAhAQDAKOJAgBOiwCApZkCAKaJAgBSiwCAvyg5AFaLAICqhQIAq+0CAKz5AgCt+QIAru0CAK/lAgDjWAIA78AOAOGIAQBaiwCAXosAgGKLAIBmiwCAaosAgG6LAIByiwCAdosAgHqLAIDvKAIA4ygOAH6LAIDhRA4AqbUCAKhpDQCrAQIAqgkCAK0BAgCsGQIArzECAK4BAgC+AAQAgosAgIaLAICKiwCAjosAgJKLAICWiwCAmosAgLnlAwC45QMAu+UDALrlAwC95QMAvOUDAL/lAwC+5QMAsSECALBJAgCzJQIAsiUCALUpAgC0IQIAtxUCALYVAgCowQIAqdECAKr1AgCrDQEArBUBAK0FAQCuBQEArzkBAJ6LAICiiwCAqosAgK6LAICyiwCAtosAgLqLAIC+iwCAuC0BALk9AQC67QEAu+UBALz9AQC95QEAvu0BAL/lAQCwLQEAsTUBALI9AQCzNQEAtC0BALUVAQC2HQEAtxUBAIA9AQCBpQAAgq0AAO/YAACGsAUAh9gFAMKLAIDv1A8AhGwEAOH0DgDGiwCA4xwPAMqLAIDhlAEAzosAgOMMDgCzPQIA0osAgNaLAIDaiwCA3osAgLbFAQC13QEA4osAgLuxAQC6qQEA5osAgOqLAIC/kQEAvqkBAL2hAQC8qQEAposAgO6LAICqRQYAq10GAKxFBgCtTQYArkUGAK99BgDyiwCA9osAgPqLAICj0QUA/osAgKUxBgCmKQYAAowAgAaMAICCHQAAgR0AAIAdAAAKjACADowAgBKMAIC+lAMAFowAgBqMAICGSAMAh8wDAB6MAIAijACAJowAgCqMAICoqQcAqakHAKq5BwCruQcArKkHAK2pBwCuAQcArzUHAC6MAIAyjACANowAgDqMAIA+jACAQowAgEaMAIBKjACAuC0HALnBAAC66QAAu+kAALz5AAC95QAAvuUAAL+dAACwUQcAsV0HALItBwCzJQcAtD0HALUlBwC2JQcAtxUHALMxBgBOjACAUowAgFaMAIBajACAtikGALUhBgBejACAu5kGALqVBgBijACAZowAgL/hBgC++QYAvfEGALz5BgBqjACAo3UGAG6MAIByjACApm0GAHaMAIB6jACApWUGAKrRBgCr3QYAfowAgIKMAICuvQYAr6UGAKy9BgCttQYAqOUBAKn1AQCq/QEAq/UBAKztAQCtNQEArj0BAK81AQCA+QAAgc0AAILFAACEYAEAvngBAIqMAICHrAAAhpABALjRAAC52QAAuuEAALvhAAC8kQAAvZ0AAL6VAAC/iQAAsE0BALFVAQCyXQEAs1UBALRNAQC18QAAtvEAALfxAACzdQIAjowAgJKMAICWjACAmowAgLa1AgC1ZQIAnowAgLuRAgC6iQIAoowAgKaMAIC/NQMAvokCAL2BAgC8iQIAqowAgKMxAgCujACAhMADAKbxAgCyjACAtowAgKUhAgCqzQIAq9UCALqMAIC+jACArs0CAK9xAwCszQIArcUCAKuNAACqjQAAqY0AAKg5AwCvvQAArr0AAK2FAACsjQAAqgAAAKsAAADCjACAxowAgMqMAIDOjACA0owAgNaMAIC7fQAAun0AALl9AAC4fQAAv90BAL7dAQC93QEAvN0BALO5AACysQAAsaEAALCtAAC3XQAAtl0AALWVAAC0lQAA2owAgN6MAIDijACA5owAgIE1AACADQAA6owAgII1AAC+rD0A7owAgPKMAICFaD0A+owAgP6MAICGODwAh8ACALNJAQACjQCA0AAAAAaNAIAKjQCAtkkBALVJAQAOjQCAuykBALolAQASjQCAFo0AgL8dAQC+HQEAvSEBALwpAQDjNDYA4QwGAOGwAgDjPAYAGo0AgB6NAIAijQCAJo0AgIQsPwC+oD8AKo0AgC6NAIDvfDcAMo0AgDaNAIDvGAEAOo0AgD6NAICGaD4Ah8w/AEKNAIBGjQCASo0AgO+UAABOjQCA4ZQBAFKNAIDjUAAAVo0AgILpPwCB6T8AgPE/AKMJPgCPASQA9owAgFqNAIBejQCApgk+AKUJPgBijQCAq2k+AKplPgBmjQCAao0AgK9dPgCuXT4ArWE+AKxpPgCeYTgAn3U4AJzBNACdtTkAmqU1AJt1NACYeTAAmXExAJYhLQCXhTEAlG0sAJVlLACSeSgAk6UtAJBRJACReSgAsQ0UALAFFACzARgAslUUALV5GAC0tRgAbo0AgHKNAIB2jQCAeo0AgH6NAICCjQCAotE8AKMlAQCgdTkAob08AKHJAACGjQCAowEEAKLlAAClHQQApPUEAKf5CACmAQgAqQEMAKhtCACrzQwAqs0MAK3REACsARAAr9URAK7ZEACCBSUAgy0lAIqNAICOjQCAhsEsAIcRLQCEHSkAhRUpAIopLQCLZSwAko0AgJaNAICOHTAAj8E0AIzZMACNHTEAkmE1AJPNNQCajQCAno0AgJZhOQCXmTgAlKE4AJV9OQCaYT0AmwU9AKKNAICmjQCAqo0AgK6NAICc6QAAso0AgLaNAIC6jQCAvo0AgMKNAICGjACAxo0AgMqNAIDOjQCAqJE+AKmRPgCq7T4Aq+E+AKzhPgCt6T4ArtE+AK/RPgCwUT4AsVE+ALJRPgCzUT4AtHk+ALV5PgC2bT4At2U+ALghPgC5IT4Aujk+ALs5PgC8KT4AvRU+AL4RPgC/DT4AgJkDAIGZAwCCBQAA0o0AgL5UAwDhsD0A2o0AgONAPgCEOAIA3o0AgOKNAIDv9D8A5o0AgOqNAICGmAQAhxwDALMFPQCECAQA7o0AgPKNAID2jQCAtgk9ALUJPQD6jQCAu/U9ALr1PQD+jQCAAo4AgL/dPQC+3T0AveU9ALzlPQAGjgCACo4AgKPNPQC+xAQApcE9AA6OAIASjgCApsE9ABaOAIAajgCAqz09AKo9PQCtLT0ArC09AK8VPQCuFT0AtmkCAB6OAIAijgCAtWkCACaOAICzSQIAKo4AgC6OAIC+qQMAv6kDALzBAwC9wQMAuvkDALv5AwAyjgCANo4AgKgtAwCpnQMAqpUDAKutAwCstQMArb0DAK61AwCv2QMAgA0AAIEVAACCHQAAOo4AgD6OAIBCjgCAh7QFAIacBAC4MQIAuTECALo1AgC7zQIAvNUCAL3dAgC+1QIAv8kCALBpAgCxaQIAskECALNBAgC0OQIAtTkCALYRAgC3EQIASo4AgOM0PgBOjgCA4aw+AFKOAIDvfAMAVo4AgFqOAIBejgCA45QDAGKOAIDhfD4AZo4AgO/oPgBqjgCAbo4AgHKOAIB2jgCAo1UDAHqOAICldQMAfo4AgIKOAICmdQMAho4AgIqOAICr5QIAquUCAK3dAgCs3QIAr7UCAK61AgCoGQYAqSEGAKohBgCrPQYArCUGAK1dBgCuVQYAr00GAEaOAICOjgCAko4AgJaOAICajgCAno4AgKKOAICmjgCAuOUGALmBBgC6gQYAu50GALyJBgC9iQYAvqEGAL+hBgCwPQYAsQ0GALIFBgCz7QYAtPUGALXhBgC24QYAt90GALOpBgCCLQAAgRUAAIAdAACqjgCAtt0GALWtBgCujgCAu8kGALr5BgCyjgCAhOADAL8lBgC+MQYAvTkGALzRBgC+iAMAo+0GANaNAIC2jgCAppkGALqOAIC+jgCApekGAKq9BgCrjQYAhkgAAIdsAACudQYAr2EGAKyVBgCtfQYAqIEGAKmNBgCqmQYAq5UGAKyNBgCttQYArrEGAK+tBgDCjgCAxo4AgMqOAIDOjgCA0o4AgNaOAIDajgCA3o4AgLilBgC5YQEAumEBALthAQC8YQEAvWEBAL5hAQC/YQEAsNkGALHZBgCyqQYAs6kGALS9BgC1oQYAtqEGALedBgCzEQYA4o4AgOaOAIDqjgCA7o4AgLY1BgC1BQYA8o4AgLsdBgC6HQYA9o4AgPqOAIC/ZQYAvnkGAL19BgC8fQYA/o4AgKNVBgACjwCABo8AgKZxBgAKjwCADo8AgKVBBgCqWQYAq1kGABKPAIAWjwCArj0GAK8hBgCsOQYArTkGAKjVAgCp3QIAqikDAKspAwCsOQMArTkDAK4pAwCvKQMAGo8AgB6PAIAijwCAKo8AgC6PAIAyjwCAvrgDADaPAIC47QMAuYUDALqBAwC7gQMAvIUDAL2NAwC+sQMAv7EDALBZAwCxWQMAsu0DALPlAwC0/QMAteUDALblAwC31QMAgKEAAIGhAACCoQAAvoAMADqPAICEmAIAPo8AgEKPAICGAAwAh/QDAEaPAIBKjwCATo8AgFKPAIBWjwCAhLADALPhAwBajwCAXo8AgGKPAIBmjwCAtvkDALXxAwBqjwCAu90DALrdAwBujwCAco8AgL9hAwC+eQMAvXEDALx5AwB2jwCAeo8AgH6PAICjLQIAgo8AgKU9AgCmNQIAho8AgIqPAICOjwCAqhECAKsRAgCstQIArb0CAK61AgCvrQIA48QDAOMQBwDhuAEA4WwHAIBxAACBcQAAggUAAJKPAICGwAwAh1QNAJqPAICejwCA77ADAO8ABwCijwCApo8AgKqPAICujwCAso8AgLaPAIC6jwCAvo8AgMKPAIDvpAEAhKANAOGABgDGjwCA4xABAMqPAIDOjwCA0o8AgNaPAICz9QEA2o8AgN6PAIDijwCA5o8AgLZNAQC1SQEA6o8AgLtRAQC6SQEA7o8AgPKPAIC/OQEAvjEBAL1BAQC8SQEAqC0OAKk1DgCqPQ4AqzEOAKyBDgCtjQ4AroUOAK+1DgCWjwCA9o8AgPqPAID+jwCAgBkAAIEZAACCBQAAApAAgLidDgC5rQ4AuqUOALtNDwC8VQ8AvV0PAL5JDwC/QQ8AsM0OALHVDgCy3Q4As9UOALS1DgC1vQ4AtrUOALetDgCjtQ4AvogDAAaQAIAKkACADpAAgKYNDgClCQ4AEpAAgKsRDgCqCQ4AhggAAIdsAwCveQ4ArnEOAK0BDgCsCQ4AFpAAgBqQAIAekACAs7UPACKQAIC1VQ8Atl0PACaPAIAmkACAKpAAgLp5DwC7eQ8AvGkPAL1dDwC+SQ8Av0kPAKhpDgCpaQ4AqnEOAKtxDgCskQ4ArZEOAK6RDgCvkQ4ALpAAgDKQAIA2kACAOpAAgD6QAIBCkACARpAAgEqQAIC4hQ4AuY0OALqFDgC7nQ4AvI0OAL29DgC+tQ4Av3kBALDxDgCx8Q4AsvEOALPFDgC0wQ4AtcEOALbBDgC3wQ4Ao/kOAE6QAIBSkACAVpAAgFqQAICmEQ4ApRkOAF6QAICrNQ4AqjUOAGKQAIBmkACArwUOAK4FDgCtEQ4ArCUOAIANAACBFQAAgh0AAGqQAIBukACAcpAAgISUAQC+lAEAhkAHAIf0AAB6kACAfpAAgIKQAICGkACAipAAgI6QAICojQIAqZUCAKqVAgCrzQIArNUCAK3dAgCuyQIAr/0CAJKQAICWkACAmpAAgJ6QAIC/ABQAopAAgKaQAICqkACAuH0DALnBAwC6wQMAu8EDALzBAwC9yQMAvvEDAL/xAwCwhQIAsUUDALJNAwCzRQMAtF0DALVFAwC2TQMAt0UDALMdAgCukACAspAAgLaQAIC6kACAtl0CALVdAgC+kACAu4EDALpBAgDCkACAxpAAgL+BAwC+mQMAvZEDALyZAwDKkACAo1kCAM6QAIDSkACAphkCANaQAIDakACApRkCAKoFAgCrxQMA3pAAgOKQAICu3QMAr8UDAKzdAwCt1QMA6pAAgOPMAACEBAIA4bwBAIDJAQCB/QEAgvUBAL4QBQDukACAvigEAPKQAID2kACA+pAAgO8QAAD+kACAApEAgIbgBACH9AIABpEAgAqRAIDj/A8ADpEAgOHgDwASkQCA7xQPABaRAIAakQCAHpEAgCKRAIAmkQCAKpEAgC6RAIAykQCANpEAgDqRAIA+kQCAQpEAgEaRAIBKkQCA7+ABAIUEEgDh3A4ATpEAgOMcDgCAKQAAgR0AAIIFAABSkQCAszECAFqRAICEzAUAXpEAgGKRAIC2KQIAtSECAGaRAIC7zQEAus0BAGqRAIBukQCAv3UBAL7JAQC9wQEAvMkBAKjpBQCp6QUAqvkFAKv5BQCs6QUArekFAK45BgCvOQYA5pAAgFaRAICGiAAAhwADAHKRAIB2kQCAepEAgH6RAIC40QYAudkGALrhBgC74QYAvJEGAL2dBgC+lQYAv4kGALBJBgCxSQYAsl0GALNVBgC0TQYAtfEGALbxBgC38QYAo3EFAIKRAICGkQCAipEAgI6RAICmaQUApWEFAJKRAICrjQYAqo0GAJaRAICakQCArzUGAK6JBgCtgQYArIkGAJ6RAICikQCAs+EHAKaRAIC14QcAqpEAgK6RAIC25QcAdpAAgLKRAIC7vQcAuqEHAL2VBwC8qQcAv5UHAL6VBwCoAQYAqSUGAKohBgCrIQYArCEGAK0tBgCuJQYAr1UGALaRAICCHQAAgR0AAIAdAAC6kQCAvpEAgMKRAIC+MAEAuDkGALk5BgC6yQYAu8kGALzZBgC92QYAvskGAL/JBgCwLQYAsTEGALI1BgCzCQYAtBkGALUZBgC2CQYAtwkGAKOpBgCEjAIAhigfAIdEAQDKkQCApq0GAKWpBgDOkQCAq/UGAKrpBgDSkQCA1pEAgK/dBgCu3QYArd0GAKzhBgDakQCAsxUGAN6RAIDikQCAtj0GAOaRAIDqkQCAtTUGALrZAQC72QEA7pEAgPKRAIC+fQEAv2UBALx9AQC9dQEAqMUFAKnJBQCq2QUAq9EFAKz5BQCt+QUArikCAK8pAgD2kQCA+pEAgP6RAIACkgCAjAAAAAaSAIAKkgCADpIAgLjtAgC5hQIAuo0CALuBAgC8hQIAvY0CAL69AgC/fQMAsFkCALFZAgCy7QIAs+UCALT9AgC15QIAtuUCALfVAgCjUQUAEpIAgBaSAIAakgCAHpIAgKZ5BQClcQUAIpIAgKudAgCqnQIAJpIAgCqSAICvIQIArjkCAK0xAgCsOQIAghEAAC6SAICAZQAAgQkAADKSAIC+mAMAOpIAgD6SAICEJAMAQpIAgIdoAwCGjBwARpIAgEqSAIBOkgCAUpIAgFaSAIBakgCAs6ECAITAHAC10QIAXpIAgGKSAIC21QIAZpIAgGqSAIC7wQIAuvUCAL0RAQC82QIAvxEBAL4ZAQBukgCAcpIAgHaSAIB6kgCAfpIAgIKSAICGkgCA77gGAIqSAIDhnAQAjpIAgON0BgCSkgCAlpIAgJqSAICekgCAgPkAAIH5AACCBQAAopIAgL5YHACEWB8A71wAAO9ABgDhkAEA4fwGAOM8AADjdAYAqpIAgK6SAICGmBwAh/QcAKNpAgC+DB8AspIAgLaSAIC6kgCAph0CAKUZAgC+kgCAqwkCAKo9AgDCkgCAxpIAgK/ZAQCu0QEArdkBAKwRAgCokR0AqZkdAKqhHQCroR0ArNEdAK3dHQCu1R0Ar8kdADaSAICmkgCAypIAgM6SAIDSkgCA1pIAgNqSAIDekgCAuHkeALl5HgC6zR4Au8UeALzdHgC9xR4AvsUeAL/1HgCwuR0AsY0dALKFHQCzTR4AtFUeALVdHgC2VR4At0keALjNHwC51R8Aut0fALvVHwC88R8Avf0fAL7pHwC/6R8AsKUfALGxHwCysR8As40fALSVHwC19R8Atv0fALf1HwCoGR4AqRkeAKotHgCrPR4ArCUeAK0tHgCuJR4Ar90fAOKSAIDmkgCA6pIAgO6SAIDykgCAxpEAgPaSAID6kgCAs+UfAP6SAIACkwCABpMAgAqTAIC27R8Ate0fAA6TAIC7NR4AuiEeABKTAIAWkwCAv3EeAL4RHgC9GR4AvCUeAIJpAACjoR8AgFkAAIFRAACmqR8AGpMAgB6TAIClqR8AqmUeAKtxHgCGAAQAh+wBAK5VHgCvNR4ArGEeAK1dHgCoMR4AqTEeAKpBHgCrQR4ArEEeAK1JHgCucR4Ar3EeACKTAIAmkwCAKpMAgC6TAIAykwCANpMAgDqTAIA+kwCAuCkBALkpAQC6OQEAuzUBALwtAQC90QAAvtEAAL/RAACwyQEAsckBALLZAQCz2QEAtMkBALXJAQC2GQEAtxkBALPJHQBCkwCARpMAgEqTAIBOkwCAtskdALXJHQBSkwCAuw0CALoNAgBWkwCAWpMAgL8NAgC+DQIAvQ0CALwNAgBekwCAo40dAGKTAIBmkwCApo0dAGqTAIBukwCApY0dAKpJAgCrSQIAcpMAgHaTAICuSQIAr0kCAKxJAgCtSQIAgA0AAIERAACCEQAAepMAgO/MAgB+kwCAgpMAgISQAgDjLAIAvigDAOHYAQCKkwCAhhAEAIfUAwCOkwCAkpMAgLNhAwCWkwCAmpMAgJ6TAICikwCAtnkDALVxAwCmkwCAu10DALpdAwCqkwCArpMAgL/hAAC++QAAvfEAALz5AACjoQIAspMAgLaTAIC6kwCAvpMAgKa5AgClsQIAwpMAgKudAgCqnQIAxpMAgMqTAICvIQEArjkBAK0xAQCsOQEAzpMAgNKTAIDvZB8A1pMAgNqTAIDekwCA4pMAgOaTAICADQAAgREAAIIVAADqkwCA4eAcAO6TAIDjiB8A8pMAgISAAgC+jAUAh0gFAIYsBAD6kwCA/pMAgO+kHgDv9B4A4QAeAOFQHwDjLB4A47AeAAKUAIAGlACACpQAgA6UAIASlACAFpQAgISEBACzcQEAGpQAgLUdAQC2FQEAHpQAgCKUAIAmlACAugEBALsBAQC89QAAvf0AAL71AAC/7QAAqK0GAKm9BgCqtQYAq8kGAKzZBgCt2QYArskGAK/BBgAqlACALpQAgDKUAIA2lACAOpQAgD6UAIBClACARpQAgLhtBwC5BQcAug0HALsBBwC8AQcAvQEHAL4BBwC/AQcAsIkGALGJBgCybQcAs2UHALR9BwC1ZQcAtmUHALdVBwCGkwCAozkGAEqUAID2kwCApl0GAE6UAIBSlACApVUGAKpJBgCrSQYAVpQAgFqUAICuvQcAr6UHAKy9BwCttQcAgG0AAIEJAACCGQAAXpQAgGKUAIC+nAMAZpQAgGqUAICGQAAAh2AAAG6UAIBylACAdpQAgHqUAIB+lACAgpQAgKiRBgCpkQYAqrkGAKu5BgCsqQYArakGAK7ZBgCv2QYAhpQAgIqUAICOlACAkpQAgJaUAICalACAnpQAgKKUAIC4cQEAuXEBALpxAQC7cQEAvNkBAL3BAQC+wQEAv/UBALCxBgCxuQYAsokGALOJBgC0UQEAtVEBALZRAQC3UQEAszEGAKaUAICqlACArpQAgLKUAIC2KQYAtSEGALaUAIC7fQYAunUGALqUAIC+lACAv5UBAL6VAQC9XQYAvF0GAMKUAICjdQYAxpQAgMqUAICmbQYAzpQAgNKUAIClZQYAqjEGAKs5BgCErAEAvqABAK7RAQCv0QEArBkGAK0ZBgCo3QIAqe0CAKrlAgCr/QIArOUCAK3tAgCu5QIArz0DANqUAIDelACA4pQAgL5kDADmlACA6pQAgO6UAIDylACAuMkDALnJAwC62QMAu9EDALz5AwC9+QMAvpkDAL+VAwCwRQMAsU0DALJFAwCzXQMAtEUDALVNAwC2RQMAt/kDAIFVAwCASQMAs2UCAIJVAwC1ZQIA9pQAgPqUAIC2ZQIAhgAMAIfkAwC7gQMAuokDAL2BAwC8mQMAv4EDAL6JAwCjLQIA/pQAgAKVAIAGlQCACpUAgKYtAgClLQIADpUAgKvJAwCqwQMAEpUAgBaVAICvyQMArsEDAK3JAwCs0QMA49gGAOGsBwDhnAYA45wGABqVAICEWA0AHpUAgCKVAIAmlQCAKpUAgC6VAIAylQCA7xwBADaVAIA6lQCA70AGAIB5AACBFQAAghEAAIQADAA+lQCA46wAAEKVAIDhpAEASpUAgO9wAACGyAwAh6QNAE6VAIBSlQCAVpUAgFqVAIC6yQUAu8kFALilBQC5zQUAvvkFAL/5BQC8zQUAvcUFALKlBQCzrQUAsBEGALERBgC2rQUAt50FALS1BQC1rQUAqmEGAKthBgConQYAqZUGAK5hBgCvYQYArHEGAK1xBgBelQCAYpUAgGaVAIBqlQCAbpUAgHKVAIC+sAwAdpUAgKghDgCpIQ4AqiEOAKs9DgCsJQ4ArS0OAK4lDgCviQ4ARpUAgHqVAIB+lQCAgpUAgIaVAICKlQCAjpUAgJKVAIC4UQ8AuV0PALpVDwC7bQ8AvHUPAL19DwC+dQ8Av2kPALD5DgCxoQ4AsqEOALOhDgC0oQ4AtakOALaRDgC3kQ4As6kOAJaVAIDWlACAmpUAgJ6VAIC2rQ4Ata0OAKKVAIC7ZQ4Auj0OAKaVAICqlQCAv20OAL5lDgC9dQ4AvHUOAIIZAACj7Q4AgGUAAIEZAACm6Q4ArpUAgLKVAICl6Q4AqnkOAKshDgC2lQCAupUAgK4hDgCvKQ4ArDEOAK0xDgCoYQ4AqXUOAKp9DgCrdQ4ArG0OAK31DgCu/Q4Ar/UOAIaAAQCHpAEAvpUAgMKVAIDGlQCAypUAgM6VAIDSlQCAuHUBALl9AQC6dQEAu8kBALzdAQC9xQEAvsUBAL/1AQCwjQ4AsZUOALKdDgCzkQ4AtFUBALVdAQC2VQEAt00BALP1DgDWlQCA2pUAgN6VAIDilQCAtnUOALXlDgDmlQCAu1EOALpJDgDqlQCA7pUAgL+ZAQC+kQEAvUUOALxJDgDylQCAo7EOAPaVAID6lQCApjEOAP6VAIAClgCApaEOAKoNDgCrFQ4ABpYAgAqWAICu1QEAr90BAKwNDgCtAQ4AqO0CAKktAwCqJQMAqz0DAKwlAwCtLQMAriUDAK+ZAwAOlgCAEpYAgBaWAIAalgCAHpYAgCKWAIC+dAIAKpYAgLiNAwC5kQMAupEDALulAwC8vQMAvXUAAL59AAC/dQAAsOkDALHpAwCy+QMAs/EDALTZAwC12QMAtrkDALe1AwCArQAAgbUAAIK9AACzoQMALpYAgLWhAwC2oQMAMpYAgITgAgA2lgCAuiEDALshAwC8IQMAvSkDAL4RAwC/EQMAo+0DAIXABACFtG8AOpYAgD6WAICm7QMApe0DAEKWAICrbQMAqm0DAIZIBQCHbAMAr10DAK5dAwCtZQMArG0DAEaWAIDjAA4A71hsAOG0DwBKlgCATpYAgFKWAIBWlgCAoakDAKD9DwCjwQMAog0DAOHgAwDv4A8A4+QDAFqWAIBelgCAYpYAgIQEBAC+BAQAZpYAgO+UAwBqlgCAbpYAgHKWAIDj1AMAdpYAgOFUAAB6lgCAfpYAgIKWAICGlgCAgA0AAIEVAACCHQAAipYAgI6WAICSlgCAj5EbAO+cDgCE4AcA4dQOAJqWAIDj8A4AnpYAgKKWAICGGAcAh5AEAJnlFwCY5RcAm+kLAJo5CwCd/QoAnPELAJ9VDwCeXQ8AkSkfAJDNGwCTJR8Aks0fAJXREwCUKRMAlxkXAJZ1EwCM4RAAjSUQAI4tEACP+QwAJpYAgJaWAICKORQAi5UUAITpGACFBRgAhuUYAIfxFACmlgCAqpYAgIIxHACDFRwAnKkEAK6WAICylgCAtpYAgLqWAIC+lgCAmtEEAJt9BACUTQ0AleUIAJblCACXtQgAwpYAgMaWAICSWQwAk1kMAKGRAADKlgCAowF8AKKZAACluXwApJF8AKeZeACm4X0AqYF5AKiheACriXQAqgF0AK0BcACsWXQAr4VwAK6dcACx4WwAsAFsALMBaACyHWwAtfVoALT1aADOlgCA0pYAgNaWAIDalgCA3pYAgOKWAIDmlgCA6pYAgO6WAIDylgCAqD0HAKmVBwCqlQcAq6kHAKzdBwCtxQcArsUHAK8dBgD2lgCAgh0AAIEdAACAHQAA+pYAgP6WAIAClwCAvmABALgZBgC5GQYAuikGALslBgC8IQYAvSEGAL4hBgC/IQYAsHEGALFxBgCycQYAs3EGALRNBgC1NQYAtj0GALctBgCzHQcACpcAgIYoAACHqAAADpcAgLZFBwC1VQcAEpcAgLu1BgC6tQYAFpcAgBqXAIC/8QYAvokGAL2lBgC8pQYAHpcAgKNZBwAilwCAJpcAgKYBBwAqlwCALpcAgKURBwCq8QYAq/EGADKXAIA2lwCArs0GAK+1BgCs4QYAreEGAKipBQCptQUAqr0FAKs9AgCsJQIArVECAK5RAgCvUQIAOpcAgD6XAIBClwCARpcAgIQ8AwBKlwCATpcAgFKXAIC4pQIAua0CALqlAgC7vQIAvKUCAL2tAgC+pQIAv30DALAxAgCxMQIAshkCALMZAgC09QIAta0CALalAgC3nQIAVpcAgFqXAIBelwCAszkFAGKXAIC1oQIAtt0CAGaXAIBqlwCAbpcAgLr5AgC7+QIAvMECAL3BAgC+PQIAv2UCAHKXAICmgQIApf0CAHqXAICjZQUAvlh8AIbYfACHnHwArzkCAK5hAgCtnQIArJ0CAKulAgCqpQIAfpcAgIKXAICohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAIGFAQCAhQEAhpcAgILtAQCKlwCAjpcAgJKXAICWlwCAuHUBALl9AQC6dQEAu80BALzVAQC93QEAvskBAL/BAQCwtQIAsb0CALKBAgCzgQIAtFEBALVRAQC2UQEAt1EBAJqXAICelwCAopcAgKaXAIDhMAYA4WQHAOMoBgDjxAYAhCB9AKqXAIDvbAAA7xgGAK6XAICylwCAtpcAgLqXAICzXQIAvkh8AL6XAIDClwCAxpcAgLYVAgC1dQIAypcAgLs5AgC6MQIAzpcAgNKXAIC/1QEAvtUBAL0VAgC8FQIAo519AHaXAIDWlwCA2pcAgN6XAICm1X0ApbV9AOKXAICr+X0AqvF9AOaXAIDqlwCArxV+AK4VfgCt1X0ArNV9AIBNAACBVQAAglUAALOxfgDulwCAtWV/ALZtfwDylwCAhkADAIcEAwC66X8Au+l/ALz5fwC9+X8Avt1/AL/NfwD2lwCA+pcAgAaXAID+lwCAApgAgAaYAIAKmACADpgAgKhtfgCpXX4AqlV+AKuFfwCsgX8ArYF/AK6BfwCvgX8AsEF/ALFBfwCyQX8As0F/ALR1fwC1ZX8Atm1/ALdlfwC4XX8AuS1/ALolfwC7PX8AvC1/AL0dfwC+FX8Av/UAAKP9fwASmACAFpgAgBqYAIAemACApiF+AKUpfgAimACAq6V+AKqlfgAmmACAKpgAgK+BfgCukX4ArbV+AKy1fgAumACAMpgAgDaYAIA6mACAPpgAgEKYAIBGmACASpgAgIA9AACBCQAAghkAAE6YAIBSmACAhLgBAL6wAQBWmACAqK0BAKnVAQCq1QEAqw0BAKwVAQCtGQEArgkBAK8JAQCGAAQAhwQBAFqYAIBemACAYpgAgGaYAIBqmACAbpgAgLjtAAC5hQAAuo0AALuFAAC8nQAAvYUAAL6NAAC/hQAAsHkBALF5AQCy7QAAs+UAALT9AAC15QAAtuUAALfVAACzXQIAcpgAgHaYAIB6mACAfpgAgLaZAgC1nQIAgpgAgLu9AgC6vQIAhpgAgIqYAIC/IQMAvjkDAL0xAwC8OQMAvigDAKMZAgCOmACAkpgAgKbdAgCWmACAmpgAgKXZAgCq+QIAq/kCAJ6YAICimACArn0DAK9lAwCsfQMArXUDAL7IBACmmACAqpgAgL7EBQCumACAspgAgLaYAIC6mACAgD0AAIEJAACCGQAAvpgAgMKYAICEOAMAypgAgM6YAIDveAIA0pgAgIZIBACHVAMA1pgAgNqYAIDemACA4pgAgOaYAIDqmACA7pgAgPKYAIDjVAIA9pgAgOFAAQD6mACA/pgAgOMkfwACmQCA4Zx8AAaZAIAKmQCADpkAgBKZAICEbAUAFpkAgBqZAIAemQCAIpkAgO8YfwAmmQCAKpkAgLPxAgAumQCAMpkAgDqZAIA+mQCAtukCALXhAgBCmQCAu3EBALppAQCHoAUAhswEAL85AQC+WQEAvVEBALxhAQDhQH8ARpkAgOM4fgCEwAQAgtkAAO8UAACApQAAgdkAAEqZAIDjwAAATpkAgOHUAQBSmQCAVpkAgO+EfgBamQCAqs0BAKvVAQBemQCAYpkAgK79AQCvnQEArMUBAK31AQBmmQCAo1UCAGqZAIBumQCApk0CAHKZAIB2mQCApUUCAMaYAIA2mQCAepkAgH6ZAICCmQCAhpkAgIqZAICOmQCAqJkGAKmZBgCq7QYAq/0GAKzlBgCt7QYAruUGAK/dBgCwpQYAsa0GALKlBgCzuQYAtK0GALVVBwC2UQcAt00HALh1BwC5fQcAunUHALtJBwC8WQcAvVkHAL5JBwC/RQcAs0UGAJKZAICWmQCAmpkAgJ6ZAIC2TQYAtU0GAKKZAIC7SQYAukEGAIYIAACHjAAAv7EHAL5JBgC9TQYAvFEGAIJdAACjAQYAgEUAAIFdAACmCQYAqpkAgK6ZAIClCQYAqgUGAKsNBgCymQCAtpkAgK4NBgCv9QcArBUGAK0JBgCoTQYAqVUGAKpVBgCriQYArLEGAK29BgCuqQYAr6kGAKaZAIC6mQCAvpkAgMKZAIDGmQCAypkAgM6ZAIDSmQCAuEkBALlJAQC6WQEAu1kBALxJAQC9SQEAvt0BAL/VAQCw3QYAsa0GALKlBgCzjQYAtJkGALWZBgC2jQYAt4UGALPdBgDWmQCA2pkAgN6ZAIDimQCAtj0GALU5BgDmmQCAu2kGALoZBgDqmQCA7pkAgL9dBgC+XQYAvVkGALxxBgDymQCAo5kGAPaZAID6mQCApnkGAP6ZAIACmgCApX0GAKpdBgCrLQYABpoAgAqaAICuGQYArxkGAKw1BgCtHQYAqNUCAKndAgCq4QIAq+ECAKw1AwCtPQMArjUDAK8tAwCAzQMAgQkAAIIZAAAOmgCAEpoAgIQYAgC+dAMAGpoAgLjpAwC56QMAuokDALuFAwC8nQMAvYEDAL6BAwC/tQMAsFUDALFdAwCyVQMAs+kDALT5AwC1+QMAtukDALfhAwCGIAwAhxADAB6aAIAimgCAJpoAgCqaAIAumgCA71wCADKaAIDhFAAANpoAgOOIAgC++AwAOpoAgD6aAIBCmgCAu/kDALrxAwC+gA0ARpoAgL9dAwC+XQMAvV0DALzhAwCzCQIASpoAgE6aAIBSmgCAVpoAgLbdAwC13QMAWpoAgKipBgCpqQYAqrkGAKu5BgCsqQYArakGAK4dBQCvFQUAXpoAgGKaAIBmmgCAapoAgG6aAIBymgCAdpoAgHqaAIC4GQUAuS0FALolBQC7yQUAvNkFAL3FBQC+zQUAv8UFALBtBQCxdQUAsnUFALNFBQC0XQUAtT0FALY1BQC3KQUA4fQGAOFUBwDjFAYA47wGAIEJAACAqQAAfpoAgII5AACE7A0AgpoAgIeIDACGDAwAipoAgI6aAIDvzAcA78QHAKMpAwCSmgCAlpoAgJqaAICemgCApv0CAKX9AgCimgCAq9kCAKrRAgCmmgCAqpoAgK99AgCufQIArX0CAKzBAgCoPQ4AqY0OAKqFDgCrnQ4ArIUOAK2NDgCuuQ4Ar7UOAIaaAICumgCAspoAgLaaAIC6mgCAvpoAgMKaAIDGmgCAuL0OALllDwC6bQ8Au2UPALx9DwC9ZQ8Avm0PAL9lDwCw1Q4Asd0OALLVDgCzoQ4AtJUOALWdDgC2lQ4At40OALMNDgDKmgCAzpoAgNKaAIDWmgCAtg0OALUNDgDamgCAuxkOALoRDgDemgCAFpoAgL9ZDgC+UQ4AvXUOALwBDgDimgCAo0kOAOaaAIDqmgCApkkOAO6aAIDymgCApUkOAKpVDgCrXQ4AhKQDAPaaAICuFQ4Arx0OAKxFDgCtMQ4AqLEOAKmxDgCqzQ4Aq8UOAKzdDgCtxQ4ArsUOAK/1DgCA7QEAgfEBAILxAQD6mgCAhpABAIe0AQD+mgCAApsAgLjFAQC5zQEAusUBALvdAQC8zQEAvf0BAL6ZAQC/lQEAsI0OALFBAQCyQQEAs0EBALRBAQC1QQEAtkEBALdBAQCzRQ4ABpsAgAqbAIAOmwCAEpsAgLZFDgC1VQ4AFpsAgLuFAQC6SQ4AGpsAgB6bAIC/hQEAvoUBAL2VAQC8lQEAIpsAgKMBDgAmmwCAKpsAgKYBDgAumwCAMpsAgKURDgCqDQ4Aq8EBADabAIA6mwCArsEBAK/BAQCs0QEArdEBAKgtAwCpPQMAqjUDAKuJAwCsmQMArZkDAK6JAwCvgQMAPpsAgEKbAIBGmwCASpsAgE6bAIBSmwCAVpsAgFqbAIC4rQMAuWUAALptAAC7ZQAAvH0AAL1lAAC+bQAAv2UAALDJAwCxyQMAsqkDALOlAwC0vQMAtaEDALahAwC3lQMAgL0AAIEJAACCGQAAXpsAgGKbAIC+2AMAapsAgG6bAICErAIAcpsAgIfoAwCGDAQAdpsAgHqbAIB+mwCAgpsAgLP9AwCGmwCAipsAgI6bAICSmwCAtlkDALVRAwCWmwCAu00DALpNAwCamwCAnpsAgL8lAwC+OQMAvTEDALw9AwCimwCAppsAgKqbAICumwCA71gPALKbAIC2mwCAupsAgOOQDgC+mwCA4bAPAMKbAIDGmwCAypsAgM6bAIDSmwCAgHUAAIF9AACCdQAAhBgFAO88AwDamwCAvhQFAN6bAIDj0AMA4psAgOFAAADmmwCAhtAEAIdYBQDqmwCA7psAgPKbAID2mwCA+psAgP6bAIACnACABpwAgAqcAIDvrA8AhOwEAOEQDgAOnACA41QBABKcAIAWnACAGpwAgB6cAICj/QIAIpwAgCacAIAqnACALpwAgKZZAgClUQIAMpwAgKtNAgCqTQIANpwAgDqcAICvJQIArjkCAK0xAgCsPQIAqJkGAKmZBgCqrQYAq70GAKylBgCtrQYArqUGAK/ZBgDWmwCAghEAAIEZAACAwQcAPpwAgEKcAIC+cAMARpwAgLhJBwC5SQcAul0HALtVBwC8TQcAvXEHAL51BwC/bQcAsKkGALGpBgCyuQYAs7EGALSZBgC1mQYAtnkHALd5BwC1NQYASpwAgE6cAIC2NQYAhjAAAIdcAwCzPQYAUpwAgL19BgC8dQYAv0UGAL5FBgBmmwCAVpwAgLt1BgC6dQYAo2UGAFqcAIBenACAYpwAgGacAICmbQYApW0GAGqcAICrLQYAqi0GAG6cAIBynACArx0GAK4dBgCtJQYArC0GAKhVBgCpWQYAqm0GAKthBgCsaQYArWkGAK6ZBgCvmQYAdpwAgHqcAIB+nACAgpwAgIacAICKnACAjpwAgJKcAIC4+QYAufkGALqNBgC7hQYAvJ0GAL2FBgC+hQYAv7UGALDpBgCx6QYAsvkGALP5BgC06QYAtd0GALbJBgC3yQYAs+UGAJacAICanACAnpwAgKKcAIC26QYAteEGAKacAIC7LQYAui0GAKqcAICunACAvxkGAL4tBgC9LQYAvC0GAIIVAACjoQYAgGEAAIFhAACmrQYAspwAgL6QAQClpQYAqmkGAKtpBgCEpAEAupwAgK5pBgCvXQYArGkGAK1pBgCohQIAqY0CAKqVAgCruQIArNUCAK3dAgCu1QIAr80CAIaAHACHZAMAvpwAgL5gAwDCnACAxpwAgMqcAIDOnACAuHUDALl9AwC6dQMAu8kDALzZAwC92QMAvskDAL/BAwCwvQIAsY0CALKFAgCzTQMAtFUDALVdAwC2VQMAt00DALMdAgDSnACAhAgDANacAIDanACAtl0CALVdAgDenACAu0kCALp5AgDinACA5pwAgL+ZAwC+kQMAvZkDALxRAgCwAAAAo1kCAOqcAIDunACAphkCAPKcAID2nACApRkCAKo9AgCrDQIA+pwAgP6cAICu1QMAr90DAKwVAgCt3QMAAp0AgAadAIAKnQCA76wGAA6dAIASnQCAFp0AgBqdAIC+6BwAHp0AgCKdAIAqnQCALp0AgOGABwAynQCA42AGAIBdAACBYQAAgmEAALN9AQA2nQCAtW0BALZlAQA6nQCAhiAdAIdYHQC6+QEAu/EBALzZAQC92QEAvrEBAL+xAQDvoAAAPp0AgEKdAIBGnQCASp0AgE6dAIBSnQCA71wBAIRsHADhzAYAVp0AgOMcBgDjSAAAWp0AgOEwAQBenQCAo/EBAGKdAICFABQAZp0AgGqdAICm6QEApeEBAG6dAICrfQEAqnUBAHKdAIB2nQCArz0BAK49AQCtVQEArFUBAKjtHQCpLR4AqjkeAKs5HgCsKR4ArSkeAK6dHgCvkR4AJp0AgHqdAIB+nQCAgp0AgIadAICC+QAAgfEAAID9AAC4qR4AuakeALpJHwC7SR8AvFkfAL1FHwC+TR8Av0UfALDxHgCx+R4AssEeALPBHgC0uR4AtbkeALatHgC3pR4AsBEfALERHwCyER8AsyUfALQlHwC1KR8Atl0fALdRHwC4cR8AuXkfALpBHwC7QR8AvJUAAL2dAAC+lQAAv40AAIqdAIC2nACAjp0AgJKdAICWnQCAmp0AgIb4AwCH0AAAqM0fAKnVHwCq0R8Aq70fAKytHwCtcR8ArnEfAK9xHwCzOR4Anp0AgKKdAICmnQCAqp0AgLaRHgC1RR4Arp0AgLu1HgC6tR4Asp0AgLadAIC/jR4AvoEeAL2RHgC8pR4Aup0AgKN9HgC+nQCAwp0AgKbVHgDGnQCAyp0AgKUBHgCq8R4Aq/EeAM6dAIDSnQCArsUeAK/JHgCs4R4ArdUeAKhVAQCpgQAAqoEAAKuBAACsgQAArYkAAK6xAACvsQAA1p0AgNqdAIDenQCA4p0AgOadAIDqnQCA7p0AgPKdAIC4ZQAAuW0AALplAAC7fQAAvGUAAL1tAAC+ZQAAv90DALChAACxrQAAsqUAALO5AAC0qQAAtZ0AALaVAAC3XQAA9p0AgIIdAACBHQAAgB0AAPqdAID+nQCAAp4AgL4UAgAKngCAhKgCAA6eAIASngCAFp4AgBqeAIAengCAjwAAALNJAwAingCAhugEAIesAgAmngCAtkkDALVJAwAqngCAuykDALolAwAungCAMp4AgL8ZAwC+LQMAvS0DALwxAwA2ngCAo40DADqeAIA+ngCApo0DAEKeAIBGngCApY0DAKrhAwCr7QMASp4AgE6eAICu6QMAr90DAKz1AwCt6QMAvoQDAFKeAIBWngCAWp4AgF6eAIBingCAZp4AgGqeAICAPQAAgQkAAIIZAABungCAcp4AgHqeAICENAMAfp4AgLMtAQCCngCAh8wCAIZMBQCGngCAti0BALUtAQCKngCAu0kBALp5AQCOngCAkp4AgL+9AQC+vQEAvbkBALxRAQDheB8Alp4AgOPQHwCangCAnp4AgOGUAQCingCA42gDAKaeAICqngCArp4AgO+IAwCyngCAtp4AgO+sHwC6ngCAvp4AgMKeAIDGngCAyp4AgM6eAIDSngCA1p4AgO9EHgDangCA4dweAN6eAIDjHB4A4p4AgOqeAIDungCA8p4AgIFpAACAZQAAo+UBAIJ9AACl5QEA9p4AgIQUBACm5QEAvigEAPqeAICrgQEAqrEBAK1xAQCsmQEAr3UBAK51AQCoIQYAqS0GAKolBgCrPQYArCUGAK0tBgCuXQYAr00GAHaeAIDmngCAhggDAIeMAwD+ngCAAp8AgAafAIAKnwCAuOkGALnpBgC6jQYAu4UGALydBgC9hQYAvo0GAL+FBgCwPQYAsQ0GALIFBgCz7QYAtPkGALX5BgC27QYAt+UGALDNBwCx1QcAstEHALPtBwC09QcAtf0HALbpBwC36QcAuN0HALklBwC6LQcAuyUHALw9BwC9JQcAvi0HAL8lBwAOnwCAEp8AgAaeAIAWnwCAGp8AgB6fAIAinwCAJp8AgKgVBgCpGQYAqu0HAKv9BwCs7QcArd0HAK7VBwCvuQcAswUGACqfAIAunwCAMp8AgDafAIC2PQYAtQUGADqfAIC7cQYAumkGAD6fAIBCnwCAv1kGAL5RBgC9WQYAvGUGAEafAICjQQYASp8AgE6fAICmeQYAUp8AgIS0AQClQQYAqi0GAKs1BgC+gAEAWp8AgK4VBgCvHQYArCEGAK0dBgCoNQYAqT0GAKo1BgCrWQYArHUGAK2lAQCurQEAr6UBAIDpAACB6QAAgv0AAL8kAQCGMA8Ah+QAAF6fAIBinwCAuMUAALnNAAC6xQAAu90AALzNAAC9/QAAvvUAAL+dAACw3QEAsSUBALItAQCzIQEAtCEBALUhAQC2IQEAtyEBALvBAgC6OQIAZp8AgGqfAIC/xQIAvsUCAL3VAgC82QIAs50FAG6fAIBynwCAdp8AgIwAAAC2BQIAtd0FAHqfAICqfQIAq4UCAH6fAICCnwCAroECAK+BAgCsnQIArZECAIafAICj2QUAip8AgI6fAICmQQIAkp8AgJafAIClmQUAgpFqAIORagCanwCAnp8AgIa5FgCH6RcAhBEWAIWZFgCKoRIAi6ESAKKfAICmnwCAjpEeAI9ZHgCMmRMAjREeAJJxGgCT5RoAqp8AgO/oJACW8QYAlwUGAJTlGgCVGQYAmikCAJvFAgCunwCAsp8AgLafAIDhKBsAnN0CAOMgDwCfIQcAnsEHAJ01GwCcLRsAm6EbAJr5HwCZOR8AmLEfAJcBEgCWIRMAlSkTAJRRFgCTGRcAkjEXAJGxFwCQKWsAj1FrAOOsBwCEBA0A4RwHAIANAACBNQAAgj0AALqfAIC+nwCAwp8AgL4gDQDKnwCAzp8AgO9MBwCGWAwAh2ANANKfAIDWnwCA2p8AgN6fAICEXA8A4p8AgO8IAADvhAYA4ZABAOGwBgDj4AAA42QGAOafAIDqnwCA7p8AgPKfAID2nwCA+p8AgL4ADwCEQA4A/p8AgAKgAIAGoACACqAAgA6gAIASoACAFqAAgBqgAICj1QMAotUDAKExAwCgLQcAVp8AgMafAIAeoACAIqAAgCagAICCmQAAgZEAAICZAACoTQ0AqZ0NAKqVDQCrJQ4ArD0OAK0RDgCuEQ4ArxEOALB9DgCxDQ4AsgUOALMtDgC0OQ4AtTkOALYtDgC3JQ4AuOkOALnpDgC6wQ4Au8EOALy5DgC9nQ4AvpUOAL+NDgCzPQ0AKqAAgC6gAIAyoACANqAAgLaxDgC1lQ4AOqAAgLvpDgC6mQ4AhogAAIfkAAC/3Q4Avt0OAL3ZDgC88Q4APqAAgKN5DQC+hAEAhIAGAKb1DgBCoACARqAAgKXRDgCq3Q4Aq60OAEqgAIBOoACArpkOAK+ZDgCstQ4ArZ0OALIFNQCzGTQAsG0wALENNQBSoACAVqAAgLQBKAC1PSkAWqAAgF6gAIBioACAZqAAgGqgAIBuoACAcqAAgHagAICiRQEAo9UBAHqgAIChTQEAps0FAKcBOACkAQQApX0FAKoBPACrRT0AqEk5AKnlOQCudTEAr30xAKxdPQCtATAAqO0OAKn1DgCqCQ4AqwkOAKwZDgCtGQ4Arg0OAK8tDgB+oACAgqAAgIagAICKoACAjqAAgJKgAICWoACAmqAAgLgdDgC5JQ4Aui0OALslDgC8PQ4Avd0BAL7VAQC/zQEAsFUOALFdDgCyVQ4Asy0OALQ1DgC1JQ4Ati0OALclDgCzgQ0AnqAAgKKgAICqoACArqAAgLaZDQC1kQ0AvlQEALuZDQC6kQ0AhogEAIe8AwC/4Q0AvvENAL35DQC8gQ0AgkkAAKPFDQCA9QMAgUkAAKbdDQCyoACAtqAAgKXVDQCq1Q0Aq90NALqgAIC+oACArrUNAK+lDQCsxQ0Arb0NAKgdAgCpRQIAql0CAKtVAgCseQIArXkCAK6JAwCviQMAwqAAgMagAIDKoACAzqAAgIT8BQDSoACA1qAAgNqgAIC4iQMAuWUDALptAwC7ZQMAvH0DAL1lAwC+bQMAv2UDALDBAwCxwQMAssEDALPBAwC0wQMAtcEDALbBAwC3wQMA3qAAgOKgAIDmoACA6qAAgO6gAIDhpAEA8qAAgOPADgC+aAQA9qAAgPqgAIDvHAEA/qAAgAKhAIAGoQCACqEAgLOVAwAOoQCAEqEAgBqhAIAeoQCAtrkDALWxAwAioQCAu0UCALpFAgCGqAQAh6QFAL9FAgC+RQIAvVUCALxVAgDh4A4A4SwMAOMIDgDj1A4AgK0AAIHRAACC0QAAJqEAgCqhAIAuoQCAMqEAgDahAIA6oQCAPqEAgO+IDgDvLA4AoxUDAEKhAICFxCsARqEAgEqhAICmOQMApTEDAE6hAICrxQIAqsUCAFKhAIBWoQCAr8UCAK7FAgCt1QIArNUCAKgNBgCpFQYAql0GAKtVBgCseQYArXkGAK65BgCvuQYAFqEAgFqhAIBeoQCAYqEAgGahAIBqoQCAbqEAgHKhAIC4TQcAuVUHALpRBwC7aQcAvHkHAL1lBwC+bQcAv2UHALDJBgCxyQYAst0GALPVBgC0zQYAtXUHALZ9BwC3dQcAs9UGAHahAIB6oQCAfqEAgIKhAIC2+QYAtfEGAIahAIC7DQYAug0GAIYIAACHLAAAv7EHAL4JBgC9AQYAvAkGAIJRAACjkQYAgEEAAIFBAACmvQYAiqEAgI6hAICltQYAqkkGAKtJBgCSoQCAlqEAgK5NBgCv9QcArE0GAK1FBgCwsQYAsbEGALLNBgCzwQYAtMEGALXJBgC28QYAt/EGALgFAQC5DQEAugUBALsdAQC8BQEAvQ0BAL4FAQC/uQEAmqEAgJ6hAICioQCApqEAgKqhAICuoQCApqAAgLKhAICoLQYAqTUGAKo1BgCr8QYArNEGAK3RBgCu0QYAr9EGALPdBgC2oQCAuqEAgL6hAIDCoQCAtjEGALU5BgDGoQCAuxUGALoVBgDKoQCAzqEAgL9tBgC+ZQYAvXUGALx5BgDSoQCAo5kGANahAIDaoQCApnUGAN6hAIDioQCApX0GAKpRBgCrUQYA5qEAgOqhAICuIQYArykGAKw9BgCtMQYAqNUCAKndAgCq4QIAq+ECAKxRAwCtUQMArlEDAK9RAwDuoQCA8qEAgL7sAwD6oQCA/qEAgAKiAIAGogCACqIAgLjpAwC56QMAuokDALuFAwC8nQMAvYEDAL6BAwC/tQMAsDEDALExAwCyNQMAs+kDALT5AwC1+QMAtukDALfhAwCAbQMAgaUAAIKtAACzZQIADqIAgLXVAwC23QMAEqIAgITgAgAWogCAuvkDALv5AwC87QMAvTEDAL4xAwC/MQMAh+wDAIZkPACyAAAAGqIAgB6iAIDjCAQAIqIAgOHsBgAmogCA7wAGACqiAIAuogCAMqIAgDaiAIA6ogCAPqIAgEKiAIBGogCASqIAgE6iAIDjoAMAUqIAgOGoAQBWogCA7/ADAIIdAACBHQAAgB0AAFqiAIBeogCAYqIAgGqiAIC+TD0AbqIAgKOhAwC+QDwApRECAHKiAIB2ogCAphkCAIRsAgB6ogCAqz0CAKo9AgCt9QIArCkCAK/1AgCu9QIAhkA8AIe0PQB+ogCAgqIAgIaiAICKogCAjqIAgO9EBgCSogCA4dQGAJaiAIDjDAcAmqIAgJ6iAICiogCApqIAgLP1AQCqogCArqIAgLKiAIC2ogCAtkUBALXlAQC6ogCAuzEBALopAQC+ogCAwqIAgL8dAQC+HQEAvRkBALwlAQCoLT4AqTU+AKo9PgCrNT4ArC0+AK2FPgCuhT4Ar7k+AGaiAIDGogCAyqIAgM6iAICAGQAAgRkAAIIFAADSogCAuLk+ALm5PgC6ST8Au0k/ALxZPwC9WT8Avk0/AL9BPwCwrT4AsbU+ALKxPgCzjT4AtJk+ALWZPgC2iT4At4k+AKO1PgCEjAIA1qIAgNqiAIDeogCApgU+AKWlPgDiogCAq3E+AKppPgCGCAAAh2gDAK9dPgCuXT4ArVk+AKxlPgDmogCAs5E/AOqiAIDuogCAtlk/APKiAID2ogCAtbk/ALp1PwC7fT8A+qIAgP6iAIC+QT8Av0E/ALxZPwC9VT8AsJU+ALGdPgCyqT4As6U+ALShPgC1oT4AtqE+ALehPgC45T4Aue0+ALrlPgC7/T4AvO0+AL3dPgC+1T4AvxkBAAKjAIAGowCACqMAgA6jAIASowCA9qEAgBajAIAaowCAqF0+AKkhPgCqPT4AqzU+AKwVPgCt/T4ArvU+AK/tPgCj1T4AHqMAgCKjAIAmowCAKqMAgKYdPgCl/T4ALqMAgKs5PgCqMT4AMqMAgDajAICvBT4ArgU+AK0RPgCsHT4AgREAAIANAAA6owCAghkAAD6jAIBCowCAhJQBAL4QAACGQAcAhwABAEqjAIBOowCAUqMAgFajAIBaowCAXqMAgKiNAgCplQIAqpUCAKvNAgCs2QIArdkCAK7NAgCvxQIAYqMAgGajAIBqowCAbqMAgIwAAAByowCAdqMAgHqjAIC4HQMAucEDALrBAwC7wQMAvMEDAL3JAwC+8QMAv/EDALCJAgCxiQIAsikDALMpAwC0OQMAtTkDALYpAwC3JQMAsx0CAH6jAICCowCAhqMAgIqjAIC2WQIAtVECAI6jAIC7TQIAuk0CAJKjAICWowCAv/0DAL79AwC9/QMAvP0DAJqjAICeowCAoqMAgKajAIDhDD4AqqMAgOOoPwCuowCAgT0AAIAxAADvUD8Agh0AALKjAIC++AQAhhgFAIdMAwCEDAIA48wAALqjAIDhvAEAvqMAgMKjAIDGowCAyqMAgM6jAICELAUA0qMAgNajAIDaowCA7xAAAN6jAIDiowCAo90DAOajAIDqowCA7qMAgPKjAICmmQMApZEDAPajAICrjQMAqo0DAPqjAID+owCArz0CAK49AgCtPQIArD0CAAKkAIAGpACACqQAgA6kAIASpACAFqQAgBqkAIDvKD4AHqQAgOE8PgAipACA4zgBAIApAACBFQAAghEAACqkAICzMQIAvsgEAITABAAupACAMqQAgLYpAgC1IQIANqQAgLvNAQC6zQEAOqQAgD6kAIC/dQEAvskBAL3BAQC8yQEAqOkFAKnpBQCq+QUAq/kFAKzpBQCt6QUArjkGAK85BgC2owCAJqQAgIaIAACHQAMAQqQAgEakAIBKpACATqQAgLjRBgC52QYAuuEGALvhBgC8kQYAvZEGAL6RBgC/kQYAsEkGALFJBgCyXQYAs1UGALRNBgC18QYAtvEGALfxBgCjcQUAUqQAgFakAIBapACAXqQAgKZpBQClYQUAYqQAgKuNBgCqjQYAZqQAgGqkAICvNQYArokGAK2BBgCsiQYAbqQAgLPRBwBypACAdqQAgLbxBwB6pACAfqQAgLXBBwC60QcAu90HAIKkAICGpACAvrkHAL+5BwC8xQcAvbkHALhpBgC5aQYAuokGALuJBgC8mQYAvZkGAL6JBgC/iQYAsBEGALEdBgCyFQYAs2kGALR5BgC1eQYAtmkGALdhBgCoSQYAqVUGAKpdBgCrVQYArE0GAK11BgCucQYAr3EGAEajAICCHQAAgR0AAIAdAACKpACAjqQAgJKkAIC+cAEAo5UGAJqkAICGKAAAh0gBAJ6kAICmtQYApYUGAKKkAICrmQYAqpUGAKakAICqpACAr/0GAK79BgCt/QYArIEGAK6kAICzFQYAsqQAgLakAIC2PQYAuqQAgL6kAIC1NQYAutkBALvZAQDCpACAxqQAgL59AQC/ZQEAvH0BAL11AQCovQUAqckFAKrZBQCr0QUArPkFAK35BQCuKQIArykCAMqkAIDOpACA0qQAgNakAICMAAAA2qQAgN6kAIDipACAuO0CALmFAgC6gQIAu4ECALyFAgC9jQIAvrECAL+xAgCwWQIAsVkCALLtAgCz5QIAtP0CALXlAgC25QIAt9UCAKNRBQDmpACA6qQAgO6kAIDypACApnkFAKVxBQD2pACAq50CAKqdAgD6pACA/qQAgK8hAgCuOQIArTECAKw5AgCBbQAAgG0AAAKlAICCBQAAvlwMAAqlAIAOpQCA79AGAITsAwDhHAUAEqUAgOP8BwAWpQCAGqUAgIbYDACHvAwAqIUCAKmVAgCqlQIAq6UCAKy9AgCt1QIArtECAK/RAgAepQCAIqUAgCalAIAqpQCALqUAgDKlAIA2pQCAOqUAgLh1AQC5fQEAunUBALvJAQC82QEAvdkBAL7JAQC/wQEAsLUCALG9AgCygQIAs4ECALRRAQC1UQEAtlEBALdRAQA+pQCAhAQNAEKlAIBGpQCAvhwMAEqlAIDvHAAA76AGAOGQAQDhRAcA43AGAOOYBgBOpQCAUqUAgFalAIBapQCAs10CAF6lAIBipQCAZqUAgGqlAIC2FQIAtXUCAG6lAIC7OQIAujECAHKlAIB6pQCAv9UBAL7VAQC9FQIAvBUCAKOdDQAGpQCAdqUAgH6lAICCpQCAptUNAKW1DQCGpQCAq/kNAKrxDQCGCAMAh2ADAK8VDgCuFQ4ArdUNAKzVDQCAkQ8AgZkPAIKhDwCzpQ4AiqUAgLWhDgC2eQ8AjqUAgJKlAICWpQCAukUPALtdDwC8RQ8AvU0PAL5FDwC//Q8AqFUOAKldDgCqYQ4Aq30OAKxlDgCttQ8Arr0PAK+1DwCapQCAnqUAgKKlAICmpQCAqqUAgK6lAICypQCAtqUAgLhVDwC5dQ8Aun0PALt1DwC8bQ8AvREPAL4RDwC/EQ8AsM0PALHVDwCy3Q8As9UPALTNDwC1dQ8AtnEPALdxDwCj6Q8AuqUAgL6lAIDCpQCAxqUAgKY1DgCl7Q8AyqUAgKsRDgCqCQ4AzqUAgNKlAICvsQ4ArgkOAK0BDgCsCQ4A1qUAgIIdAACBHQAAgB0AANqlAIDepQCA4qUAgL6UAQCErAEA5qUAgIfgAQCGzAAA6qUAgO6lAIDypQCAlqQAgKhtDgCpiQEAqpkBAKuRAQCswQEArckBAK75AQCv+QEAhKAAAPalAID6pQCA/qUAgAKmAIAGpgCACqYAgA6mAIC4xQAAuc0AALrFAAC73QAAvM0AAL39AAC+9QAAv50AALBBAQCxQQEAskEBALNBAQC0QQEAtUEBALZBAQC3QQEAsxECABKmAIAWpgCAGqYAgB6mAIC2SQIAtUkCACKmAIC7hQIAuoUCACamAIAqpgCAv4UCAL6FAgC9lQIAvJUCAIU8GgCjVQIALqYAgDKmAICmDQIANqYAgDqmAIClDQIAqsECAKvBAgA+pgCAQqYAgK7BAgCvwQIArNECAK3RAgCCGQAARqYAgIAZAACBGQAASqYAgE6mAIBSpgCAWqYAgL4ABABepgCAYqYAgGamAIBqpgCAbqYAgHKmAIB2pgCA7+gOAHqmAICG6AQAh1ADAH6mAICCpgCA74ACAIamAIDhlAEAiqYAgONYAQCOpgCA4wAOAJKmAIDhaA0AlqYAgKhxAgCpcQIAqnECAKupAgCsuQIArbkCAK6pAgCvqQIAhKwFAJqmAICepgCAoqYAgKamAICqpgCArqYAgLKmAIC4bQEAuQ0BALoFAQC7GQEAvAkBAL09AQC+NQEAv9kBALDZAgCx2QIAsm0BALNlAQC0fQEAtWUBALZlAQC3VQEA4WAPAOP0AADjHA4A4bwBALamAICCOQAAgTEAAIA9AAC6pgCAvigEAL6mAIDCpgCAvjwHAO8QAADv0A4AyqYAgIbgBACHyAQAzqYAgLO1AgDSpgCAtX0CALZ1AgDWpgCA2qYAgN6mAIC6UQIAu1ECALz1AQC9/QEAvvUBAL/tAQBWpgCAxqYAgKqxBQCrsQUArBUGAK0dBgCuFQYArw0GAOKmAIDmpgCA6qYAgKNVBQDupgCApZ0FAKaVBQDypgCAs+kGAPamAID6pgCA/qYAgAKnAIC24QYAtekGAAanAIC7sQYAuqEGAAqnAIAOpwCAv50GAL6RBgC9pQYAvKkGAKgdBgCpIQYAqiEGAKshBgCsIQYArSEGAK4hBgCvIQYAEqcAgBanAIAapwCAHqcAgCKnAIAmpwCAKqcAgC6nAIC45QcAue0HALrlBwC7/QcAvOUHAL3tBwC+5QcAv00HALAlBgCxNQYAsj0GALMxBgC0FQYAtRkGALYNBgC3AQYAo6kHAIIVAACBtQEAgLUBADKnAICmoQcApakHADanAICr8QcAquEHAISgAgA6pwCAr90HAK7RBwCt5QcArOkHAD6nAICzlQYAhugAAIcYAQC2tQYAQqcAgEanAIC1vQYAukkBALtVAQBKpwCATqcAgL45AQC/OQEAvEUBAL05AQCoPQYAqU0GAKpZBgCrUQYArHEGAK1xBgCuuQEAr7kBAISsAQBSpwCAVqcAgFqnAIBepwCAYqcAgGanAIBqpwCAuKkBALmpAQC6aQEAu2kBALx5AQC9eQEAvmkBAL9pAQCwyQEAsdUBALLVAQCzqQEAtLkBALW5AQC2qQEAt6EBAKPRBQBupwCAcqcAgHanAIB6pwCApvEFAKX5BQB+pwCAqxECAKoNAgCCpwCAhqcAgK99AgCufQIArX0CAKwBAgCKpwCAjqcAgJKnAICWpwCAgTEAAIANAACapwCAgjkAAJ6nAICipwCAviQDAKqnAICupwCAsqcAgIbYHACHTAMAtqcAgLqnAIC+pwCAhMAcAOMgAQDCpwCA4cgBAManAIDvMAIAyqcAgM6nAIDSpwCA1qcAgNqnAIDepwCA4qcAgLOVAwDmpwCA6qcAgO6nAIDypwCAtrkDALWxAwD2pwCAu1EDALpJAwD6pwCA/qcAgL/1AAC+SQMAvUEDALxJAwCoLQIAqUUCAKpdAgCrVQIArHkCAK15AgCuvQIAr7UCAL5oHQACqACABqgAgAqoAICAHQAAgQkAAIKpAAAOqACAuFEBALlZAQC6YQEAu2EBALwRAQC9EQEAvhEBAL8RAQCwzQIAsdUCALLdAgCz1QIAtM0CALVxAQC2cQEAt3EBAOFYBgDhVAcA47AAAOO8BgASqACAGqgAgIYYHACHVB0AHqgAgCKoAIAmqACAKqgAgL74HAAuqACA7/AGAO/gBgCjlQIAMqgAgDaoAIA6qACAPqgAgKa5AgClsQIAQqgAgKtRAgCqSQIARqgAgEqoAICv9QEArkkCAK1BAgCsSQIAqG0eAKl1HgCqfR4Aq40eAKyVHgCtnR4Aro0eAK+BHgAWqACATqgAgFKoAIBWqACAWqgAgF6oAIBiqACAZqgAgLiJHgC5iR4AupkeALuRHgC8uR4AvbkeAL59HwC/dR8AsMUeALHNHgCyxR4As90eALTFHgC1zR4AtsUeALe5HgCz9R4AaqgAgG6oAIByqACAdqgAgLYdHgC1HR4AeqgAgLsJHgC6AR4AfqgAgIKoAIC/CR4AvgEeAL0JHgC8ER4Agm0AAKOxHgCAVQAAgWUAAKZZHgCEmAMAv9ABAKVZHgCqRR4Aq00eAIYABACHmAEArkUeAK9NHgCsVR4ArU0eAIqoAICOqACAhCQAAJKoAICWqACAmqgAgKanAICGqACAqLUeAKmFHgCqjR4Aq4UeAKydHgCtgR4Arv0eAK/1HgCwjR4AsZUeALKVHgCzpR4AtL0eALVxAQC2cQEAt3EBALhRAQC5UQEAulEBALtRAQC89QEAvf0BAL71AQC/7QEAsyUeAL4IBwCeqACAoqgAgKaoAIC2IR4AtTUeAKqoAIC7cR4AumkeAK6oAICyqACAv5UBAL5ZHgC9UR4AvGEeALaoAICjYR4AuqgAgL6oAICmZR4AwqgAgMaoAIClcR4Aqi0eAKs1HgDKqACAzqgAgK4dHgCv0QEArCUeAK0VHgDhVBoA0qgAgONcCgDWqACA2qgAgN6oAIDiqACA5qgAgOqoAIC+qAUA7qgAgPKoAICPMSoA+qgAgO/E+wD+qACAk2EuAJIdLwCR2SoAkEkqAJfZEgCWdRIAlQ0TAJTBLgCbHRsAmkEWAJlJFgCYDRcAn3EeAJ4RGwCdcRoAnHkaAKOhAgCinQMAoZUfAKCJHgDjiAEA4wgeAOFoAADh/B4A79wBAO98HwC1if4AtAH8ALMB+gCylfoAsQH4ALAR9gCv4fYArgH0AK0l8gCs7fIAqwHwAKrpDwCp1Q4AqN0OAKcBDACmyQoApe0KAKQBCACj4QYAovEGAKHlAwACqQCAggErAIMBKwAGqQCACqkAgIYxLwCHiS8AhIkrAIVFLgCKdRIAiwUTAIYIBQCHbAUAjhEXAI8RFwCMsRMAjV0WAJI9GgCTQRsAhMgFAIQABwCWUR8Al1EfAJRRGwCVORoAmn0eAJt9AgAOqQCAEqkAgIFZAQCAVQEAnFkDAIJRAQC+yAcAFqkAgBqpAIAeqQCAIqkAgCapAIAqqQCA79QeAC6pAIDhJB4AMqkAgONoAQA2qQCAOqkAgD6pAIBCqQCAu2kCALpZAgBGqQCASqkAgL8dAgC+HQIAvRkCALxxAgCz7QIATqkAgFKpAIBWqQCAWqkAgLZ9AgC17QIAXqkAgKMNBQD2qACAYqkAgGqpAIBmqQCApp0FAKUNBQBuqQCAq4kFAKq5BQCGCAMAh3wDAK/9BQCu/QUArfkFAKyRBQCAsQcAgbkHAIJBAACzsQYAcqkAgLVZBwC2MQcAdqkAgHqpAIB+qQCAuuEHALvhBwC84QcAveEHAL7hBwC/3QcAqLUGAKm5BgCqdQYAq4UHAKydBwCt/QcArvUHAK8ZBwCCqQCAhqkAgIqpAICOqQCAkqkAgJapAICaqQCAnqkAgLh1BwC5fQcAunUHALsFBwC8HQcAvTEHAL4xBwC/MQcAsGkHALFpBwCyeQcAs3kHALRpBwC1VQcAtlEHALdNBwCj/QcAoqkAgKapAICqqQCArqkAgKZ9BgClFQYAsqkAgKutBgCqrQYAtqkAgLqpAICvkQYArq0GAK2tBgCsrQYAvqkAgMKpAIDGqQCAyqkAgIAdAACBCQAAgjkAAM6pAIDSqQCA2qkAgIbIAACHpAEA3qkAgOKpAIDmqQCA6qkAgKiNAQCpmQEAqtkBAKvRAQCs8QEArfEBAK45AQCvOQEAhKAAAO6pAIDyqQCA9qkAgPqpAID+qQCAAqoAgAaqAIC4zQAAudUAALrVAAC75QAAvP0AAL2VAAC+nQAAv5UAALBJAQCxSQEAslkBALNZAQC0SQEAtUkBALb9AAC39QAAugUEALsJBAC44QcAueEHAL4JBAC/CQQAvAkEAL0JBACyjQcAs+UHALC1BwCxhQcAtuUHALftBwC08QcAtfEHAKpNBwCrVQcAqEkHAKlJBwCu3QcAr8UHAKxNBwCt1QcACqoAgA6qAIASqgCAFqoAgBqqAIAeqgCAIqoAgCaqAICz0QIAKqoAgC6qAIC+AAwAMqoAgLbxAgC1+QIANqoAgLsNAgC6DQIAOqoAgD6qAIC/DQIAvg0CAL0NAgC8DQIAghUAAKOVAgCAYQAAgWEAAKa1AgBCqgCASqoAgKW9AgCqSQIAq0kCAIbIDACHrAwArkkCAK9JAgCsSQIArUkCAKhlAgCpdQIAqn0CAKt1AgCsbQIArbECAK6xAgCvsQIAhKANAE6qAIBSqgCAVqoAgFqqAIBeqgCAYqoAgGaqAIC4MQEAuTEBALoxAQC7MQEAvNUBAL3dAQC+yQEAv8EBALDRAgCx0QIAstECALPRAgC0EQEAtREBALYRAQC3EQEA4bAGAGqqAIDj0AYAhEAPAG6qAIDhpAEAcqoAgOPABgB2qgCAeqoAgH6qAIDv1AYA7AAAAIKqAIDvZAcAhqoAgIqqAICOqgCAkqoAgLO5AgCWqgCAtakCALZ9AgCaqgCAnqoAgKKqAIC6WQIAu1kCALxJAgC9SQIAvpkBAL+ZAQCjdQ0ARqoAgKaqAICqqgCArqoAgKaxDQClZQ0AsqoAgKuVDQCqlQ0AvqQDALaqAICvVQ4ArlUOAK2FDQCshQ0AgE0AAIFVAACCVQAAs2UPALqqAIC1ZQ8Atm0PAL6qAICGQAMAhxQDALrtDwC7/Q8AvOkPAL3VDwC+3Q8Av9UPAKhZDgCpoQ8AqqEPAKuhDwCsoQ8AraEPAK6hDwCvoQ8AwqoAgMaqAIDKqgCAzqoAgNKqAIDWqgCA2qoAgN6qAIC4AQ8AuQEPALoBDwC7HQ8AvA0PAL01DwC+PQ8Av9UAALBlDwCxdQ8AsnEPALNNDwC0VQ8AtV0PALZNDwC3QQ8AoykOAOKqAIDmqgCA6qoAgO6qAICmIQ4ApSkOAPKqAICrsQ4AqqEOAPaqAID6qgCAr5kOAK6RDgCtmQ4ArKUOAP6qAIACqwCABqsAgAqrAIDvJA0ADqsAgBKrAIAWqwCA49AOABqrAIDhGA4AHqsAgIAVAACBGQAAggUAACKrAICo0QEAqdkBAKopAQCrKQEArDkBAK05AQCuKQEArykBAL5oAQAqqwCAhsgBAIesAAAuqwCAMqsAgDarAIA6qwCAuO0AALmFAAC6jQAAu4UAALydAAC9gQAAvoEAAL+BAACwWQEAsVkBALLtAACz5QAAtP0AALXlAAC25QAAt9UAALOhAgA+qwCAQqsAgEarAIBKqwCAtrkCALWxAgBOqwCAu50CALqdAgBSqwCAVqsAgL8hAwC+OQMAvTEDALw5AwCF+PUAo+UCAFqrAIBeqwCApv0CAGKrAIBmqwCApfUCAKrZAgCr2QIAaqsAgG6rAICufQMAr2UDAKx9AwCtdQMAuOkAALnpAAC6aQAAu2kAALx5AAC9ZQAAvm0AAL9lAACwsQAAsbkAALKBAACzgQAAtPkAALX5AAC27QAAt+UAAKhlAwCpdQMAqn0DAKt1AwCsbQMArdEAAK7RAACv0QAAcqsAgHarAIB6qwCA1qkAgH6rAICCqwCAhqsAgIqrAICA/QEAgQkAAIIZAACOqwCAkqsAgL5EAgCaqwCAnqsAgISsAgCiqwCAh/gCAIasBQCmqwCAqqsAgK6rAICyqwCAs/UCALarAIC6qwCAvqsAgMKrAIC2UQEAteUCAMarAIC7fQEAunUBAMqrAIDOqwCAvz0BAL49AQC9VQEAvFUBAOFwDwDSqwCA47gOAITABQDvyAAA1qsAgNqrAIDeqwCA4zwOAOKrAIDh0AEA5qsAgIR0BwDqqwCA72gBAO6rAIDyqwCApXkCAKbNAQD2qwCAgCEAAIEhAACC3QcAo2kCAKzJAQCtyQEArqEBAK+hAQD6qwCA/qsAgKrpAQCr4QEAlqsAgAKsAIC+QAIABqwAgIYwAwCHMAMACqwAgA6sAICoOQcAqTkHAKoNBwCrHQcArAUHAK0NBwCuBQcAr3kHALAJBwCxCQcAshkHALMRBwC0OQcAtTkHALbdBwC3yQcAuPkHALn5BwC6zQcAu8EHALzFBwC9yQcAvrkHAL+xBwCzpQcAEqwAgBasAIAarACAHqwAgLatBwC1rQcAIqwAgLvtBwC67QcAJqwAgCqsAIC/3QcAvt0HAL3lBwC87QcALqwAgKPhBwAyrACANqwAgKbpBwA6rACAPqwAgKXpBwCqqQcAq6kHAEKsAIBGrACArpkHAK+ZBwCsqQcAraEHAEqsAIBOrACAUqwAgFasAIBarACAXqwAgGKsAIBmrACAgREAAIANAABqrACAghkAAG6sAIByrACAvuQBAHasAICG4AAAhxgBAHqsAIB+rACAgqwAgIasAICKrACA77AEAI6sAIDh1AYAkqwAgONcBACWrACAmqwAgJ6sAICirACAqJkBAKmZAQCqDQEAqwUBAKwdAQCtBQEArgUBAK81AQCEiAEApqwAgKqsAICurACAsqwAgLasAIC6rACAvqwAgLjBAAC5wQAAusEAALvBAAC8wQAAvcEAAL7BAAC/wQAAsE0BALElAQCyIQEAsyEBALQlAQC1LQEAthEBALcRAQDCrACAxqwAgLONAgDKrACAtZ0CAM6sAIDSrACAto0CANasAIDarACAu+kCALqBAgC9/QIAvP0CAL/hAgC+6QIA3qwAgKbVAgClxQIAvggDAKPVAgCCLQAAgRkAAIB5AACvuQIArrECAK2lAgCspQIAq7ECAKrZAgDirACA6qwAgO80AgDurACAhxgDAIYs/ADyrACA9qwAgPqsAID+rACAAq0AgAatAIAKrQCADq0AgOMAAQASrQCA4eABABatAIC6tQMAu70DABqtAIAerQCAvnkDAL95AwC8pQMAvXkDACarAICztQMAIq0AgCatAIC2kQMAKq0AgC6tAIC1pQMAqEkCAKlJAgCqWQIAq1kCAKxJAgCtdQIArnECAK9tAgC+aP0AvqT/ADKtAIA2rQCAOq0AgD6tAIBCrQCARq0AgLj5AgC5+QIAukkBALtJAQC8XQEAvUEBAL5BAQC/fQEAsBUCALEdAgCyFQIAs8kCALTZAgC12QIAtskCALfJAgDjIAYA4bAGAOGAAQDjEAYAgA0AAIE1AACCPQAASq0AgE6tAIBSrQCAWq0AgF6tAIDvcAAAYq0AgGatAIDvTAEAhIz9AGqtAICjmQIAbq0AgKWJAgByrQCAdq0AgKa9AgCGwPwAh+T8AKuRAgCqmQIArVUCAKyJAgCvVQIArlUCAKh9/gCpgf4Aqpn+AKuZ/gCsif4ArYn+AK65/gCvuf4AVq0AgHqtAIB+rQCAgq0AgIatAICKrQCAjq0AgJKtAIC4tf4Aub3+ALph/wC7Yf8AvGH/AL1h/wC+Yf8Av2H/ALDJ/gCxyf4Ast3+ALPR/gC0uf4Atbn+ALaR/gC3kf4AsxH+AJatAICarQCAnq0AgKKtAIC2Cf4AtQH+AKatAIC7Df4Aug3+AKqtAICurQCAv33+AL59/gC9Bf4AvAn+ALKtAICjVf4Atq0AgLqtAICmTf4Avq0AgMKtAIClRf4Aqkn+AKtJ/gCEKAMAxq0AgK45/gCvOf4ArE3+AK1B/gCAzQEAgdEBAILRAQCzuf4Ayq0AgLXR/gC21f4Azq0AgIZgAQCHYAEAug0BALsFAQC8HQEAvQUBAL4NAQC/BQEA0q0AgNatAIDarQCA3q0AgOKtAIDhwP0A5q0AgOOM/ADqrQCA7q0AgPKtAIDvtPwA9q0AgPqtAID+rQCAAq4AgKgp/gCpKf4Aqj3+AKs1/gCsVf4ArVn+AK5N/gCvRf4ABq4AgAquAIAOrgCAEq4AgBauAIAargCAHq4AgCKuAIC4SQEAuUkBALpZAQC7UQEAvHkBAL15AQC+GQEAvxUBALDFAQCxzQEAssUBALPdAQC0xQEAtc0BALbFAQC3eQEAJq4AgCquAIAurgCAo7n9ADKuAICl0f0AptX9AITQAwBBrgCAvuACAKoNAgCrBQIArB0CAK0FAgCuDQIArwUCAIFJAACAQQAAowkDAIJdAAClGQMARa4AgEmuAICmEQMAhsAEAIfkAwCrDQMAqg0DAK0BAwCsHQMArwEDAK4JAwCw4QMAseEDALLhAwCz/QMAtOUDALXtAwC25QMAtz0DALgFAwC5DQMAugUDALsdAwC8BQMAvQ0DAL4FAwC/vQAATa4AgFGuAIBVrgCAWa4AgOasAIBdrgCAYa4AgGWuAICo8QMAqfkDAKqpAwCrqQMArLkDAK25AwCuqQMAr6UDALNBAgBprgCAba4AgHGuAIB1rgCAtlkCALVRAgB5rgCAu0UCALpFAgB9rgCAga4AgL9JAgC+QQIAvUkCALxVAgCFrgCAia4AgI2uAICRrgCA74wDAJWuAICZrgCAna4AgONsAwChrgCA4VAAAKWuAICprgCAvngFALGuAICEcAIAgOUAAIHpAACC+QAAta4AgIawBACHVAUAua4AgO9A/gC9rgCA4Vz+AMGuAIDjVAEAxa4AgMmuAIDNrgCA0a4AgLOZAQDVrgCA2a4AgN2uAIDhrgCAth0BALUdAQDlrgCAuz0BALo9AQDprgCA7a4AgL/hAAC++QAAvfEAALz5AACoIQYAqVEGAKpRBgCrzQYArNUGAK3dBgCu1QYAr8kGAK2uAIDxrgCA9a4AgPmuAID9rgCAAa8AgAWvAIAJrwCAuG0HALkFBwC6DQcAuwUHALwdBwC9AQcAvgEHAL8BBwCwuQYAsbkGALJtBwCzZQcAtH0HALVlBwC2ZQcAt1UHAKPZBgANrwCAEa8AgBWvAIAZrwCApl0GAKVdBgCEnAIAq30GAKp9BgC+JAMAHa8AgK+hBwCuuQcArbEHAKy5BwCASQAAgUkAAIJZAACzVQcAIa8AgLV9BwC2aQcAJa8AgIZAAACHVAMAulUHALspBwC8OQcAvTkHAL4pBwC/IQcAo5kGACmvAIAtrwCAMa8AgDWvAICmpQYApbEGADmvAICr5QYAqpkGAD2vAIBBrwCAr+0GAK7lBgCt9QYArPUGAOE4BQBFrwCA4yQEAEmvAIBNrwCAUa8AgFWvAIBZrwCAXa8AgGGvAIBlrwCAaa8AgG2vAIBxrwCA7/QEAHWvAICo+QYAqQkGAKoRBgCrLQYArDkGAK0lBgCuLQYAryUGAHmvAIB9rwCAga8AgIWvAICAGQAAgRkAAIIFAACJrwCAuOUBALntAQC65QEAu/0BALzlAQC97QEAvuUBAL9ZAQCwXQYAsSEGALIhBgCzIQYAtCEGALUpBgC2EQYAtxEGAKjRAgCp2QIAqg0DAKsFAwCsHQMArQUDAK4FAwCvNQMAvmQCAJGvAICVrwCAma8AgJ2vAIChrwCApa8AgKmvAIC4JQMAuS0DALolAwC7PQMAvCUDAL0pAwC++QMAv/kDALBNAwCxIQMAsiUDALM9AwC0JQMAtS0DALYlAwC3HQMAs4UDAITIAgCtrwCAhAgDALGvAIC2hQMAtZUDALWvAIC75QMAuokDAIYIDACHnAMAv+kDAL7hAwC96QMAvPEDAIXsCgA2rgCAo80DALmvAICl3QMAva8AgMGvAICmzQMAxa8AgMmvAICrrQMAqsEDAK2hAwCsuQMAr6EDAK6pAwDNrwCA0a8AgNWvAIDZrwCA78gDAN2vAIDhrwCA5a8AgOO0AwDprwCA4dABAO2vAICADQAAgXUAAIJ9AADxrwCA9a8AgPmvAICzZQEAvgQCALVlAQABsACABbAAgLZlAQCGQA0Ah1gNALv1AQC6/QEAvaUBALy5AQC/mQEAvqUBAAmwAIANsACAEbAAgIQADAAVsACAGbAAgB2wAIDvzAEAIbAAgOEsBgAlsACA4yABAOwAAAApsACALbAAgDGwAIA1sACAo+kBADmwAIA9sACApukBAEGwAIBFsACApekBAKpxAQCreQEASbAAgE2wAICuKQEArxUBAKw1AQCtKQEAqCUOAKktDgCqJQ4Aqz0OAKwlDgCtLQ4AriUOAK+VDgD9rwCAUbAAgFWwAIBZsACAXbAAgIKdAACBnQAAgJ0AALhFDwC5TQ8AukUPALtZDwC8SQ8AvUkPAL59DwC/cQ8AsPEOALH5DgCypQ4As7kOALSpDgC1lQ4Atp0OALd9DwCo1Q8Aqd0PAKoJDwCrCQ8ArBkPAK0FDwCuDQ8ArwUPAGGwAIBlsACAabAAgL6gAwBtsACAcbAAgId4AwCGEAAAuBUPALkdDwC6IQ8AuyEPALz1AAC9/QAAvvUAAL/tAACwQQ8AsU0PALJdDwCzVQ8AtE0PALU1DwC2MQ8AtzEPAHWwAIDvsAwAebAAgH2wAICBsACAhbAAgImwAICNsACAkbAAgJWwAICZsACAnbAAgKGwAIDjqA0ApbAAgOGMDQCzwQ4AqbAAgK2wAICxsACAtbAAgLbFDgC10Q4AubAAgLvJDgC6xQ4AvbAAgMGwAIC/sQ4AvskOAL3BDgC8yQ4AowEOAMWwAIDJsACAzbAAgNGwAICmBQ4ApREOANWwAICrCQ4AqgUOANmwAICErAIAr3EOAK4JDgCtAQ4ArAkOAIBRAACBWQAAgmEAALPFAAC+zAEAtcUAALbNAADhsACAhkAHAIcUAQC6yQAAu8kAALzZAAC92QAAvskAAL/FAACrDQMAqg0DAKkJAwCouQIArw0DAK4NAwCtDQMArA0DAL5gAwDlsACA6bAAgO2wAIDxsACA9bAAgPmwAIC+MAUAuykDALoZAwC5GQMAuAEDAL/dAwC+3QMAvd0DALwxAwCzTQMAsk0DALFNAwCwTQMAtzkDALYxAwC1QQMAtE0DAP2wAICmkQMApZkDAAGxAICjmQMABbEAgAmxAIANsQCAr5kDAK6VAwCthQMArIUDAKuVAwCqlQMAja8AgBGxAIAVsQCAGbEAgB2xAIAhsQCAJbEAgCmxAIAtsQCAMbEAgDWxAIA5sQCAPbEAgEGxAICAHQAAgQkAAIL9AQBFsQCAvwgHAEmxAIBRsQCA7yQAAFWxAICElAIAWbEAgF2xAICH4AIAhgQFAL4AGABhsQCAZbEAgOGQAQBpsQCA44AAAG2xAIBxsQCAdbEAgLNlAQB5sQCAtWUBALZtAQB9sQCAgbEAgIWxAIC65QEAu/kBALzpAQC96QEAvsUBAL+9AQCJsQCAjbEAgJGxAIC+xBkAlbEAgJmxAICdsQCA78gBAKGxAIDh3A4ApbEAgOMwDgCpsQCArbEAgLGxAICEMAQAgHkAAIEVAACCFQAAo+UBALWxAICl5QEApu0BALmxAICGQAYAh5AHAKplAQCreQEArGkBAK1pAQCuRQEArz0BAKjdBQCpIQYAqiEGAKshBgCsIQYArSEGAK4hBgCvnQYATbEAgL2xAIDBsQCAhDABAMWxAIDJsQCAzbEAgNGxAIC4jQYAuZUGALqdBgC7lQYAvI0GAL21BgC+vQYAv7UGALDtBgCx8QYAsvEGALPxBgC0zQYAtbUGALa9BgC3tQYAqIkHAKmVBwCqkQcAq5EHAKy9BwCtpQcArqEHAK/dBwDVsQCA2bEAgN2xAIDhsQCA5bEAgOmxAIDtsQCA8bEAgLhJBwC5VQcAul0HALtVBwC8cQcAvX0HAL5pBwC/aQcAsKUHALGtBwCyuQcAs7EHALSRBwC1kQcAtnkHALd5BwD1sQCA+bEAgP2xAIABsgCA78gFAOHACQAFsgCA48AZAOMkBAAJsgCA4dAGAO/cKACinQMAoxUBAKAZBQChjQUAs1kGAA2yAIARsgCAFbIAgBmyAIC2ZQYAtXUGAB2yAIC7KQYAuiEGACGyAIAlsgCAvxUGAL4VBgC9JQYAvC0GAKOZBgCPmfwAKbIAgDGyAIA1sgCApqUGAKW1BgA5sgCAq+kGAKrhBgCGKB8Ah5wAAK/VBgCu1QYAreUGAKztBgCebQkAn30HAJwNCwCd7QkAmvENAJs5DQCY5fAAmQ0PAJbh8QCX6fEAlMX1AJUN8wCSHfcAk/H1AJD9+QCR7fkAgh3/AIMB+gA9sgCAQbIAgIYV9gCHOfYAhAn6AIXx9ACKwfAAiyXyAEWyAIBJsgCAjuEMAI8VDgCMNfIAjQHzAJKtDgCTgQgATbIAgFGyAICW6QQAl3UGAJR5CgCV8QoAmtEGAJvJAABVsgCAWbIAgIEdAwCAHQMAnFkCAIL1AwCrARAAqpUWAKmNFgCojRYAr5UuAK4BLACt/RIArJkSAKOlHgCipR4AoY0CAN2wAICnGRoAppUaAKUBGACknR8AXbIAgGGyAIBlsgCAabIAgG2yAIBxsgCAdbIAgHmyAICz5SoAsuUqALGtLwCw5S4AfbIAgIGyAIC1ASQAtBEqAKgpAwCpNQMAqj0DAKs1AwCsLQMArbUDAK69AwCvtQMAhbIAgImyAICNsgCAkbIAgIAdAACBCQAAgrkAAJWyAIC4TQIAuV0CALptAgC7CQIAvBkCAL0ZAgC+CQIAvwECALDNAwCx1QMAst0DALPVAwC0zQMAtXUCALZ9AgC3dQIAmbIAgITIHQChsgCAvgwfAKWyAICpsgCA70gGAO9YBwDhWAYA4ZgGAOOUAQDjAAYAhhAcAId8HQC+9B4ArbIAgLGyAIC2ZQMAtfUDALWyAICz5QMAubIAgL2yAIDBsgCAv+ECAL5ZAwC9UQMAvFkDALtBAwC6WQMAxbIAgMmyAIAtsgCAnbIAgM2yAIDRsgCA1bIAgNmyAIDdsgCA4bIAgKitHQCptR0AqrUdAKslHgCsPR4ArR0eAK4VHgCvdR4AsA0eALEtHgCyJR4As40eALSVHgC1nR4AtpUeALeNHgC4tR4Aub0eALq1HgC7nR4AvIUeAL1VHwC+XR8Av1UfALMdHQDlsgCA6bIAgO2yAIDxsgCAtr0eALWVHgD1sgCAu8keALrpHgD5sgCA/bIAgL95HgC+cR4AvXkeALzRHgCCKQAAo1kdAIAdAACBFQAApvkeAAGzAIAFswCApdEeAKqtHgCrjR4ACbMAgITgAwCuNR4Arz0eAKyVHgCtPR4AqIkeAKmVHgCqnR4Aq7EeAKzRHgCt2R4Ars0eAK/FHgANswCAEbMAgIaIAACHbAEAFbMAgBmzAIAdswCAIbMAgLhdAQC5wQEAusEBALvBAQC8wQEAvckBAL7xAQC/8QEAsL0eALGdHgCylR4As2UBALR9AQC1ZQEAtm0BALdlAQCqLR0AqzUdACWzAIApswCAri0dAK+VHACsLR0ArSUdAISMAQCjkR0ALbMAgDGzAICmER0ANbMAgDmzAIClgR0As1UeAD2zAIBBswCARbMAgEmzAIC2GR4AtRkeAE2zAIC7GR4AujkeAFGzAIBVswCAv+EBAL75AQC98QEAvAEeAFmzAIBdswCAYbMAgKOZHQBlswCApdUdAKbVHQBpswCAbbMAgHGzAICq9R0Aq9UdAKzNHQCtPQIArjUCAK8tAgCAZQAAgRUAAIIdAACEAAQAdbMAgHmzAICHcAMAhvwEAIGzAICFswCAibMAgI2zAICRswCAlbMAgJmzAICdswCAvsgEAKGzAIClswCAqbMAgK2zAICxswCAtbMAgO/cHwC5swCA4ZQBAL2zAIDjHAEAwbMAgMWzAIDJswCAzbMAgLt1AwC6aQMAvkgGANGzAIC/HQMAvh0DAL0dAwC8ZQMAs9UDANWzAIDZswCA3bMAgOGzAIC2fQMAtcUDAIRwBQCoJQIAqTUCAKo9AgCrNQIArC0CAK2dAgCulQIAr7UCAIIVAADlswCAgNkBAIEJAADEAAAA6bMAgPGzAID1swCAuKkCALmpAgC6SQEAu0kBALxZAQC9RQEAvkUBAL99AQCwzQIAsdECALLRAgCzqQIAtLkCALW5AgC2qQIAt6ECAOEoHgDhNBwA43QBAOMYHgD5swCA/bMAgIa4BACHVAUAhDgHAAG0AIAFtACACbQAgL6sBwANtACA78weAO/IGgCj9QIAEbQAgBW0AIAZtACAHbQAgKZdAgCl5QIAIbQAgKtVAgCqSQIAJbQAgCm0AICvPQIArj0CAK09AgCsRQIAqGEGAKlhBgCqYQYAq2EGAKxhBgCtYQYArmEGAK9hBgDtswCALbQAgDG0AIA1tACAObQAgD20AIBBtACARbQAgLjxBgC58QYAuvEGALvxBgC8nQYAvbEGAL6xBgC/sQYAsOUGALHtBgCy5QYAs/0GALTlBgC17QYAttkGALfVBgCz6QYASbQAgE20AIBRtACAVbQAgLbhBgC16QYAWbQAgLspBgC6IQYAXbQAgGG0AIC/KQYAviEGAL0pBgC8MQYAgl0AAKOtBgCARQAAgV0AAKalBgBltACAabQAgKWtBgCqZQYAq20GAIYADACHQAMArmUGAK9tBgCsdQYArW0GAG20AIDvfAUAcbQAgHW0AIB5tACAfbQAgIG0AICFtACAibQAgI20AICRtACAlbQAgJm0AIDjaAUAnbQAgOF4BQCz0QYAobQAgKW0AICptACArbQAgLb9BgC1/QYAsbQAgLupBgC6oQYAtbQAgLm0AIC/mQYAvqkGAL2pBgC8sQYAqLkGAKm5BgCqGQYAqxkGAKw1BgCtPQYArjUGAK8pBgC9tACAgh0AAIEdAACAHQAAwbQAgMW0AIDJtACA0bQAgLjpAQC56QEAuvkBALv5AQC86QEAvekBAL5dAQC/VQEAsCUGALEtBgCyJQYAsz0GALQtBgC1HQYAthUGALfZAQCGgAwAh+QCANW0AICjnQUA2bQAgKWxBQCmsQUA3bQAgOG0AIDltACAqu0FAKvlBQCs/QUAreUFAK7lBQCv1QUAtk0DAOm0AICExAMAtUUDAO20AICzjQIA8bQAgPW0AIC+SQMAv0kDALxJAwC9SQMAumkDALtpAwD5tACA/bQAgAG1AICmiQMApYEDAAW1AICjSQIACbUAgA21AIARtQCAr40DAK6NAwCtjQMArI0DAKutAwCqrQMAfbMAgBW1AIAZtQCAHbUAgIW0PQAhtQCAJbUAgCm1AIAttQCAMbUAgIA9AACBCQAAgh0AADW1AIC+sAMAObUAgIc4AwCG3AwAQbUAgEW1AIBJtQCATbUAgFG1AIDvXAYAVbUAgFm1AIC+6AwA45QGAF21AIDh3AEAYbUAgGW1AIBptQCAbbUAgLNRAQBxtQCAdbUAgHm1AIB9tQCAtnEBALV5AQCBtQCAuz0BALo9AQCFtQCAibUAgL/9AQC+9QEAvQUBALwFAQCNtQCAkbUAgJW1AICEQAwAmbUAgJ21AIChtQCA76wHAKW1AIDhJAYAqbUAgONABwCGkAwAh/wMALG1AIC1tQCAgFkAAIFlAACCYQAAo90BALm1AICl9QEApv0BAL21AIDBtQCAxbUAgKqxAQCrsQEArIkBAK2JAQCueQEAr3EBAM20AIA9tQCAybUAgM21AICttQCA0bUAgNW1AIDZtQCAqJ0NAKktDgCqOQ4AqzEOAKwRDgCtEQ4Arn0OAK9tDgCwGQ4AsRkOALIxDgCzMQ4AtNEOALXZDgC2zQ4At8UOALj9DgC52Q4AuqkOALupDgC8vQ4AvaUOAL6tDgC/pQ4AqIEPAKmBDwCqgQ8Aq4EPAKyBDwCtjQ8AroUPAK+1DwDdtQCA4bUAgOW1AIDptQCA7bUAgPG1AID1tQCA+bUAgLidDwC5rQ8AuqUPALtNDwC8VQ8AvV0PAL5JDwC/SQ8AsNEPALHRDwCy0Q8As9EPALS1DwC1vQ8AtrUPALetDwCzCQ4A/bUAgAG2AIAFtgCACbYAgLYNDgC1CQ4ADbYAgLsVDgC6FQ4AEbYAgBW2AIC/eQ4AvnEOAL0FDgC8BQ4AghUAAKNNDgCAYQAAgWEAAKZJDgAZtgCAvhABAKVNDgCqUQ4Aq1EOAIQkAQAhtgCArjUOAK89DgCsQQ4ArUEOAKg5DgCpOQ4AqlkOAKtRDgCscQ4ArXEOAK6RAQCvkQEAhgAAAIeEAAAltgCAKbYAgC22AIAxtgCANbYAgDm2AIC4dQEAuX0BALp1AQC7yQAAvNkAAL3ZAAC+yQAAv8EAALD1AQCx/QEAsvUBALNNAQC0VQEAtV0BALZVAQC3TQEAuk0PALtVDwC4TQ8AuUUPAL59DwC/tQ8AvEUPAL11DwCyAQ8AswEPALAxDwCxMQ8AtgEPALcNDwC0EQ8AtREPAKqZDgCrRQ8AqOUOAKmZDgCuQQ8Ar0EPAKxRDwCtUQ8APbYAgEG2AIBFtgCASbYAgE22AIBRtgCAVbYAgFm2AICzUQ0AXbYAgGG2AIBltgCAabYAgLZxDQC1eQ0AbbYAgLu5AgC6sQIAcbYAgHW2AIC/GQIAvhECAL0ZAgC8oQIAebYAgKMVDQB9tgCAgbYAgKY1DQCFtgCAibYAgKU9DQCq9QIAq/0CAIToAwCRtgCArlUCAK9dAgCs5QIArV0CAKhtAgCprQIAqqUCAKu9AgCspQIAra0CAK6lAgCvfQEAgO0BAIHxAQCC8QEAvqAFAJW2AICZtgCAh2gFAIYcBQC4yQEAuckBALrZAQC70QEAvPkBAL35AQC+mQEAv5UBALAFAQCxDQEAsgUBALMdAQC0BQEAtQ0BALYFAQC3+QEA4WQPAOGcDwDjFA4A49QPAJ22AIDhPA4AobYAgOPkAAC+rAQApbYAgKm2AIDvDAAArbYAgLG2AIDvYA4A77QPALW2AIC5tgCAhEQEALNhAgC9tgCAtWECALZhAgDBtgCAxbYAgMm2AIC6jQEAu4UBALydAQC9hQEAvo0BAL+FAQCjrQUAjbYAgM22AIDRtgCA1bYAgKatBQClrQUA2bYAgKtJBgCqQQYA3bYAgOG2AICvSQYArkEGAK1JBgCsUQYA5bYAgOm2AIDttgCA8bYAgIAdAACBCQAAgjkAAPW2AID5tgCA/bYAgIbIAACHIAMAAbcAgAW3AIAJtwCADbcAgKhtBgCptQcAqr0HAKsdBwCsCQcArTEHAK4xBwCvLQcAhKgDABG3AIAVtwCAGbcAgB23AIAhtwCAJbcAgCm3AIC4zQAAudUAALrVAAC75QAAvP0AAL2VAAC+nQAAv5UAALBVBwCxJQcAsi0HALM9BwC0LQcAtRUHALYdBwC39QAALbcAgOG8BgAxtwCA4/QFADW3AIA5twCAPbcAgEG3AIBFtwCASbcAgE23AIBRtwCAVbcAgFm3AIBdtwCA7+gEALN1BgCCLQAAgRUAAIAdAABhtwCAtvEGALXBBgBltwCAu6EGALrRBgBptwCAvmwBAL+RBgC+qQYAvakGALy5BgCjtQYAcbcAgIYoAACHTAEAdbcAgKYxBgClAQYAebcAgKthBgCqEQYAfbcAgIG3AICvUQYArmkGAK1pBgCseQYAhbcAgLO9AQCJtwCAjbcAgLZ5AQCRtwCAlbcAgLV5AQC6VQEAu10BAJm3AICdtwCAvvkAAL/lAAC8RQEAvf0AAKhxAgCpcQIAqnECAKtxAgCstQIArb0CAK61AgCvrQIAhOw8AKG3AICltwCAqbcAgK23AICxtwCAtbcAgLm3AIC4XQMAuWUDALptAwC7ZQMAvH0DAL1lAwC+bQMAv2UDALDVAgCx3QIAstUCALNtAwC0eQMAtWUDALZtAwC3ZQMAHbYAgL23AIDBtwCAo/UCAMW3AIClMQIApjECAMm3AIDNtwCA0bcAgKodAgCrFQIArA0CAK21AwCusQMAr60DAIBlAACBCQAAghkAANW3AIDZtwCA4bcAgL4QPADltwCAhsA8AIcgAwDptwCA7bcAgPG3AID1twCA+bcAgP23AICohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAAG4AIAFuACACbgAgA24AIARuACAFbgAgBm4AIAduACAuHUBALl9AQC6dQEAu8kBALzZAQC9xQEAvsUBAL/9AQCwtQIAsb0CALKBAgCzgQIAtFUBALVdAQC2VQEAt00BAOGkBgAhuACA41AGAL6APACEHDwAvoA/ACW4AIApuACALbgAgDG4AIA1uACAObgAgD24AIBBuACA7+AGAEW4AICBfQAAgHEAAEm4AICCBQAAUbgAgFW4AIDvTAAAWbgAgOGQAQBduACA41gBAGG4AIBluACAabgAgIZYPwCH/DwAs509AN23AIBNuACAbbgAgHG4AIC21T0AtbU9AHW4AIC7+T0AuvE9AHm4AIB9uACAvxk+AL4RPgC91T0AvNU9AIG4AICj2T0AhbgAgIm4AICmkT0AjbgAgJG4AICl8T0AqrU9AKu9PQCVuACAmbgAgK5VPgCvXT4ArJE9AK2RPQCoVT4AqVk+AKphPgCrYT4ArGE+AK1hPgCuYT4Ar2E+AISoAwCduACAobgAgKW4AICpuACArbgAgLG4AIC1uACAuEU/ALldPwC6VT8Au20/ALx1PwC9fT8AvnU/AL9tPwCwwT8AscE/ALLBPwCzwT8AtME/ALXBPwC2wT8At8E/AIC5AQCBuQEAggUAALm4AIDhgD4AwbgAgOMoPQDFuACAhoAAAIcEAQDvCD0AybgAgM24AIDRuACA1bgAgNm4AICzqT8AvbgAgN24AIDhuACA5bgAgLahPwC1qT8A6bgAgLtFPgC6RT4A7bgAgPG4AIC/RT4AvkU+AL1VPgC8VT4Ao2k/APW4AID5uACA/bgAgAG5AICmYT8ApWk/AAW5AICrhT4AqoU+AAm5AIANuQCAr4U+AK6FPgCtlT4ArJU+ABG5AICzGT4AFbkAgBm5AIC2IT4AHbkAgCG5AIC1MT4AuvEBALv5AQAluQCAKbkAgL6xAQC/vQEAvNEBAL3RAQCo0T0AqdE9AKrVPQCr6T0ArP09AK3lPQCu7T0ArxECAID5AwCBzQMAgsUDAIQkAwC+AAQAMbkAgIesAwCGvAQAuBkCALktAgC6JQIAu+kCALz5AgC9+QIAvukCAL/pAgCwcQIAsXkCALJBAgCzQQIAtDECALU9AgC2NQIAtykCAKVtPQA1uQCAObkAgKZ9PQA9uQCAbbcAgKNFPQBBuQCArY0CAKyNAgCv4QIAru0CAKwAAABFuQCAq6UCAKqtAgDh+AEASbkAgOP0AgCEwAQATbkAgFG5AIBVuQCAWbkAgF25AIBhuQCAZbkAgGm5AIBtuQCAcbkAgO8wAgB1uQCAqBUCAKkZAgCqJQIAqz0CAKwlAgCtLQIAriUCAK9VAgB5uQCAfbkAgIG5AICFuQCAibkAgI25AICEsAQAkbkAgLjRAgC52QIAuuECALvhAgC8kQIAvZ0CAL6VAgC/iQIAsC0CALE1AgCyNQIAswUCALQdAgC18QIAtvECALfxAgDheD8A4zQBAOMIPgDhbD4AgQkAAICpAACVuQCAgj0AAJm5AIChuQCApbkAgL4gBACpuQCA79g+AO/MPgCtuQCAsbkAgLPpAgCG6AQAh8AEALbpAgC1uQCAubkAgLXpAgC6rQIAu7UCAL25AIDBuQCAvp0CAL9xAgC8pQIAvZUCAC25AICduQCAxbkAgMm5AIDNuQCA0bkAgNW5AIDZuQCAqBUGAKmhBgCqoQYAq70GAKytBgCtgQYArv0GAK/tBgCwlQYAsZ0GALKVBgCzrQYAtLUGALW9BgC2tQYAt60GALiVBgC5mQYAukkHALtJBwC8WQcAvVkHAL5JBwC/SQcArN0FAK3tBQCu5QUArwkFAN25AIDhuQCAqtUFAKvNBQDluQCApZEFAKaRBQDpuQCA7bkAgPG5AID1uQCAo5EFALNJBgD5uQCA/bkAgAG6AIAFugCAtmEGALVFBgAJugCAuzkGALoxBgC+ZAAADboAgL8ZBgC+EQYAvRkGALwhBgCjiQcAgtkBAIHZAQCAwQEAEboAgKahBwClhQcAFboAgKv5BwCq8QcAhggBAId8AQCv2QcArtEHAK3ZBwCs4QcAGboAgLP1BgAdugCAIboAgLaFBgAlugCAKboAgLWdBgC6jQYAu20BAC26AIAxugCAvmUBAL9tAQC8dQEAvW0BAKglBgCpLQYAqjkGAKsxBgCsUQYArUEGAK5BBgCvdQYANboAgDm6AIA9ugCAQboAgEW6AIBJugCATboAgFG6AIC4VQEAuWUBALplAQC7fQEAvGUBAL1tAQC+HQEAvxUBALANBgCx7QEAsuUBALP9AQC05QEAte0BALblAQC3bQEAo7EFAFW6AIBZugCAvkgDAL5YDACmwQUApdkFAF26AICrKQIAqskFAGG6AIBlugCArykCAK4hAgCtKQIArDECAGm6AIBtugCAcboAgHW6AICAGQAAgRkAAIIFAAB5ugCAhKwDAIG6AICHGAMAhswMAIW6AICJugCAjboAgJG6AICokQMAqZkDAKrJAwCrxQMArN0DAK3BAwCuwQMAr/UDAJW6AICZugCAnboAgKG6AIClugCAqboAgK26AICxugCAuH0DALnBAAC6wQAAu9EAALz5AAC9+QAAvpkAAL+ZAACwjQMAsUUDALJNAwCzRQMAtF0DALVFAwC2TQMAt0UDALNBAgC1ugCAuboAgL8EDwC9ugCAtkECALVVAgDBugCAu4ECALpJAgDFugCAyboAgL+BAgC+mQIAvZECALyZAgDNugCA0boAgNW6AIDZugCA76QDAN26AIDhugCA5boAgOMQAwDpugCA4VgAAIQgDQCAKQAAgSkAAIIdAADxugCA4VAGAOGgBwDjoAYA41AHAIWUDAD1ugCA70gbAPm6AIDhJAIA/boAgONwGgABuwCABbsAgAm7AIDvqAEA7+gGAIagDwCHDA0Ao4kCAA27AIClnQIAEbsAgBW7AICmiQIAGbsAgB27AICrSQIAqoECAK1ZAgCsUQIAr0kCAK5RAgCoZQ4AqXUOAKp9DgCrdQ4ArG0OAK21DgCuvQ4Ar7UOAO26AIAhuwCAJbsAgCm7AIAtuwCAOLsAgDy7AIBAuwCAuF0PALltDwC6ZQ8Auw0PALwVDwC9HQ8AvhUPAL8JDwCwzQ4AsdUOALLdDgCz1Q4AtM0OALVxDwC2cQ8At20PALP1DgBEuwCASLsAgEy7AIBQuwCAtjUOALXlDgBUuwCAuxEOALoJDgBYuwCAXLsAgL+1DwC+CQ4AvQEOALwJDgCCFQAAo7EOAIBhAACBYQAApnEOAGC7AIC+EAEApaEOAKpNDgCrVQ4AaLsAgIQgAQCuTQ4Ar/EPAKxNDgCtRQ4An0UIAJ4NCQCdDQkAnJkLAJt1NQCaETUAmZk3AJgNMQCXJTEAliUxAJWBPQCUDT0Ak4k/AJIVOACRPTkAkD05AI9lJQDvrA0AhgAEAIegAQBsuwCAcLsAgHS7AIDv6AEAeLsAgOE0AgB8uwCA4zQBAIC7AIDjCAwAhLsAgOEIDQChoQEAiLsAgKMJBQCibQMApc0EAKQRBQCnHRkAph0ZAKmhHQCoORkAq+kcAKqpHQCtkREArAEQAK8BFACuUREAsfkVALDlFQCz6WkAsgFoALUBbAC0eWkAjLsAgJC7AICUuwCAmLsAgJy7AICguwCAowkDAKIZDQCh/Q0AoP0NAIIlJgCDBToApLsAgKi7AICGqTwAhzU+AIQdOgCFPTsAiok+AIslMgCsuwCAsLsAgI6xNACPMTYAjD0yAI0tMgCSJTYAk9EIAIREAwC+wAQAlhULAJdVDgCUXQoAlVUKAJplDgCbiQ4AtLsAgLi7AIC8uwCAwLsAgJyBAADEuwCAuLUCALm9AgC6tQIAuwkCALwZAgC9GQIAvgkCAL8BAgCwdQ0AsX0NALJJDQCzSQ0AtJUCALWdAgC2lQIAt40CAKi9DQCpUQ0AqlUNAKtpDQCsfQ0ArWUNAK5tDQCvEQ0AZLsAgILtAQCBHQAAgB0AAMi7AIDMuwCAfboAgL5wBQCznQwAhIwFANC7AIDYuwCA3LsAgLalDAC1tQwA4LsAgLv5DAC68QwAhigFAIcgBQC/GQMAvhEDAL3dDAC83QwA5LsAgKPZDADouwCA7LsAgKbhDADwuwCA9LsAgKXxDACqtQwAq70MAPi7AID8uwCArlUDAK9dAwCsmQwArZkMAAC8AIAEvACACLwAgAy8AIAQvACAFLwAgBi8AIDvvAEAHLwAgOF8DgAgvACA41ABACS8AIAovACALLwAgDC8AICzlQIANLwAgDi8AIA8vACAQLwAgLa9AgC1uQIASLwAgLs5AgC6YQIAhsgEAIesBAC/GQIAvhECAL0ZAgC8IQIAo1UFAILVBwCBxQcAgMUHAEy8AICmfQUApXkFAFC8AICr+QUAqqEFAFS8AIBYvACAr9kFAK7RBQCt2QUArOEFAFy8AICzWQcAYLwAgGS8AIC2HQcAaLwAgGy8AIC1FQcAugkHALsJBwBwvACAdLwAgL75BwC/+QcAvPkHAL35BwDUuwCARLwAgHi8AIB8vACAgLwAgIS8AICIvACAjLwAgKitBwCptQcAqrUHAKvtBwCs+QcArfkHAK7tBwCv5QcAsKkHALGpBwCySQcAs0kHALRZBwC1WQcAtkkHALdJBwC4eQcAuUUHALpBBwC7XQcAvEUHAL1NBwC+RQcAvzkHAKMdBgCQvACAlLwAgJi8AICcvACAplkGAKVRBgCgvACAq00GAKpNBgCkvACAqLwAgK+9BgCuvQYArb0GAKy9BgCAbQAAgQkAAIIZAACsvACAsLwAgISYAQC+kAEAtLwAgIYAHACHxAEAuLwAgLy8AIDAvACAxLwAgMi8AIDMvACAqF0GAKmVAQCqlQEAq6UBAKy9AQCt1QEArtEBAK/RAQDQvACA1LwAgNi8AIDcvACA4LwAgOS8AIDovACA7LwAgLhZAQC5WQEAus0AALvFAAC83QAAvcUAAL7FAAC/9QAAsLUBALG9AQCygQEAs4EBALR5AQC1eQEAtmkBALdpAQCzHQIA8LwAgPS8AIC+gBwA+LwAgLZVAgC1NQIA/LwAgLt5AgC6cQIAAL0AgAS9AIC/vQIAvr0CAL1VAgC8VQIACL0AgKNZAgAMvQCAEL0AgKYRAgAUvQCAGL0AgKVxAgCqNQIAqz0CABy9AIAgvQCArvkCAK/5AgCsEQIArRECACi9AIAsvQCAvgQdAL4AHgAwvQCANL0AgDi9AIA8vQCAgPkAAIHNAACCxQAAhCADAIawHACHlAMAQL0AgES9AIBIvQCATL0AgFC9AIBUvQCA42wCAFi9AIDhoAEAXL0AgO8UAgBgvQCAZL0AgGi9AIBsvQCAcL0AgHS9AIB4vQCA4fAGAOE0BgDjTAAA4xgGAHy9AICAvQCAhL0AgIi9AICAPQAAgQkAAIIZAACMvQCAkL0AgIS8HQDvmAAA7zgHALMxAgDRAAAAh9gdAIZsHACYvQCAtikCALUhAgCcvQCAu80CALrNAgCgvQCApL0AgL/NAgC+zQIAvc0CALzNAgCyXQYAs2UGALANBgCxVQYAtn0GALedBQC0fQYAtXUGALqNBQC7zQUAuKUFALmFBQC+xQUAv8kFALzVBQC9zQUAqL0AgKy9AICwvQCAtL0AgLi9AIC8vQCAwL0AgMS9AICqtQYAq70GAKgBBwCpvQYAroEGAK+NBgCsmQYArZUGAKNxHQDIvQCAzL0AgNC9AIDUvQCApmkdAKVhHQDYvQCAq40dAKqNHQDcvQCA4L0AgK+NHQCujR0ArY0dAKyNHQDkvQCAs9UeAOi9AIDsvQCAts0eAPC9AID0vQCAtcUeALqhHgC7oR4A+L0AgPy9AIC+pR4Av6keALyxHgC9sR4AJL0AgJS9AIAAvgCAhAQDAID5AACB+QAAghEAAAS+AICoIR4AqSEeAKo5HgCrOR4ArCkeAK0pHgCuAR4ArwEeALABHgCxAR4AsgEeALMBHgC0BR4AtQkeALY9HgC3NR4AuA0eALkVHgC6HR4AuxUeALwNHgC95R8Avu0fAL/lHwCjkR8ACL4AgIYoAQCHSAEADL4AgKaJHwClgR8AEL4AgKvlHwCq5R8AFL4AgBi+AICv7R8AruEfAK31HwCs9R8AHL4AgLMtHgAgvgCAJL4AgLaVHgAovgCALL4AgLWdHgC6sR4Au7EeADC+AIA0vgCAvnUBAL99AQC8oR4AvaEeAKjRHgCp2R4AquEeAKvhHgCsUR4ArVEeAK5RHgCvUR4AOL4AgDy+AIBAvgCARL4AgEi+AIBMvgCAUL4AgFS+AIC43QEAue0BALrlAQC7jQEAvJkBAL2ZAQC+jQEAv4UBALAxHgCxMR4AsjEeALMxHgC09QEAtf0BALb1AQC37QEAo2kdAFi+AIBcvgCAYL4AgGS+AICm0R0ApdkdAGi+AICr9R0AqvUdAGy+AIBwvgCArzkCAK4xAgCt5R0ArOUdAIFpAACAWQAAvgAEAIJhAAB4vgCAfL4AgIC+AICEvgCAhOwDAIi+AICHiAMAhuwEAIy+AICQvgCAlL4AgJi+AICohQMAqZUDAKqVAwCrpQMArL0DAK3VAwCu0QMAr9EDAJy+AICgvgCApL4AgKi+AICsvgCAsL4AgLS+AIC4vgCAuHEDALlxAwC6cQMAu3EDALzVAAC93QAAvtUAAL/NAACwtQMAsb0DALKBAwCzgQMAtFEDALVRAwC2UQMAt1EDAOFUHgDhrB8A45QBAOMoHgDjYAMAvL4AgOEIAADAvgCA75ADAMS+AIDIvgCAzL4AgNC+AIDUvgCA70wfAO9MHwCzXQIA2L4AgNy+AIDgvgCA6L4AgLYVAgC1dQIA7L4AgLs5AgC6MQIAhCQFAL7gBAC/1QIAvtUCAL0VAgC8FQIAuJEdALmZHQC6oR0Au6EdALzRHQC93R0AvtUdAL/JHQCwCR4AsQkeALIZHgCzGR4AtAkeALUJHgC2vR0At7UdAKipHgCpqR4AqrkeAKu5HgCsqR4ArakeAK55HgCveR4AgKUAAIGtAACCpQAA8L4AgIbQBACH+AQA9L4AgPi+AIB0vgCA5L4AgPy+AIAAvwCABL8AgAi/AIAMvwCAEL8AgKhxBgCpcQYAqnEGAKtxBgCsVQYArUUGAK5NBgCvRQYAsD0GALHlBgCy7QYAs+UGALT9BgC15QYAtu0GALflBgC43QYAuXEHALp1BwC7SQcAvFkHAL1ZBwC+SQcAv0kHALPZBgAUvwCAGL8AgBy/AIAgvwCAtuUGALX9BgAkvwCAuwEGALrZBgAovwCALL8AgL8BBgC+GQYAvREGALwZBgAwvwCAo9kFADS/AIA4vwCAppEFADy/AIBAvwCApfEFAKq1BQCrvQUARL8AgEi/AICuUQUAr1EFAKyRBQCtkQUAo1kHAIIZAACBGQAAgOEBAEy/AICmZQcApX0HAFC/AICrgQcAqlkHAISgAgC+rAEAr4EHAK6ZBwCtkQcArJkHAFS/AICzqQYAhugAAIcsAQC2WQEAWL8AgFy/AIC1oQYAunUBALt9AQBgvwCAZL8AgL75AQC/+QEAvGUBAL35AQCo0QYAqdkGAKplBgCrdQYArG0GAK2dAQCulQEAr40BAITsAQBovwCAbL8AgHC/AIB0vwCAeL8AgHy/AICAvwCAuGkBALlpAQC6CQEAuwUBALwdAQC9AQEAvgEBAL81AQCw9QEAsf0BALL1AQCzaQEAtHkBALV5AQC2aQEAt2EBAIS/AICIvwCAjL8AgKPhBQCQvwCApekFAKYRAgCUvwCAmL8AgJy/AICqPQIAqzUCAKwtAgCtsQIArrECAK+xAgCgvwCApL8AgL4EAwCEAAwAqL8AgKy/AICwvwCAtL8AgIANAACBFQAAgh0AALi/AIC8vwCAwL8AgIdEAwCG3AwAs+kDAMi/AIDMvwCA0L8AgNS/AIC2PQMAtT0DANi/AIC7GQMAuhEDANy/AIDgvwCAv7kAAL6xAAC9uQAAvAEDAOS/AIDhlAEA6L8AgON8AQDsvwCA8L8AgPS/AID4vwCA/L8AgADAAIAEwACACMAAgAzAAIAQwACAFMAAgO9MAgCoVQIAqV0CAKphAgCrYQIArLUCAK29AgCutQIAr60CAL5oDQAYwACAHMAAgCDAAIAkwACAgq0AAIGtAACArQAAuGEBALlhAQC6CQEAuwkBALwBAQC9AQEAvgEBAL8BAQCw1QIAsd0CALLVAgCzbQEAtHUBALV9AQC2aQEAt2EBAOFoBgDh8AcA47AAAOP0BgAowACALMAAgDDAAIA4wACAPMAAgEDAAIBEwACASMAAgL78DABMwACA72wAAO8oBgCjqQIAUMAAgIZoDACHBA0AVMAAgKZ9AgClfQIAWMAAgKtZAgCqUQIAXMAAgGDAAICv+QEArvEBAK35AQCsQQIAqIUOAKmNDgCqhQ4Aq50OAKyNDgCtvQ4ArrUOAK/dDgA0wACAZMAAgGjAAIBswACAcMAAgHTAAIB4wACAfMAAgLitDgC5tQ4Aur0OALu1DgC8dQ8AvX0PAL51DwC/bQ8AsKkOALG1DgCyvQ4As7UOALStDgC1lQ4Atp0OALeVDgCzDQ4AgMAAgITAAICIwACAjMAAgLY9DgC1BQ4AkMAAgLtxDgC6bQ4AlMAAgJjAAIC/UQ4AvmkOAL1hDgC8aQ4AghkAAKNJDgCAZQAAgRkAAKZ5DgCcwACAoMAAgKVBDgCqKQ4AqzUOAIS8AwCkwACAri0OAK8VDgCsLQ4ArSUOAKidDgCppQ4Aqq0OAKulDgCsvQ4AraEOAK7dDgCvzQ4AhiABAIdkAQCowACArMAAgLDAAIC0wACAuMAAgLzAAIC4eQEAuXkBALrNAQC7xQEAvN0BAL3FAQC+xQEAv/UBALC9DgCxjQ4AsoUOALNJAQC0WQEAtVkBALZJAQC3SQEAtS0OAMDAAIDEwACAtjkOAMjAAIDMwACAsz0OANDAAIC9hQEAvEkOAL+FAQC+hQEA1MAAgMS/AIC7UQ4AumEOAKNlDgDYwACA3MAAgODAAIDkwACApmEOAKV1DgDowACAqwkOAKo5DgDswACA8MAAgK/dAQCu3QEArd0BAKwRDgD0wACA+MAAgO/QDwD8wACAAMEAgATBAIAIwQCADMEAgBDBAIC+aAMAGMEAgBzBAIDhVA4AIMEAgONkDgAkwQCAgFkAAIFZAACCaQAAhIwDAIbwBACHFAMAKMEAgCzBAIAwwQCANMEAgDjBAIA8wQCAQMEAgETBAIBIwQCATMEAgFDBAIBUwQCAWMEAgFzBAIBgwQCAZMEAgGjBAIBswQCAqIkDAKmJAwCqmQMAq5kDAKyJAwCtiQMArj0DAK81AwCwUQMAsVEDALJVAwCzfQMAtBUDALUdAwC2FQMAtw0DALg9AwC5DQMAugUDALvtAAC89QAAvfkAAL7pAAC/6QAAcMEAgHTBAIB4wQCAsz0CAHzBAIC1LQIAtiUCAIDBAIC+aAUAiMEAgLq5AgC7uQIAvK0CAL2FAgC+/QIAv/UCAIBJAACBVQAAglUAAIQABQDvjAMAvhgEAId0BQCG/AQA4zwDAIzBAIDhUAAAkMEAgJTBAICYwQCAnMEAgKDBAICkwQCAqMEAgKzBAICwwQCAtMEAgLjBAIC8wQCA79QOAL4oBgDhdA4AwMEAgONUAQDEwQCAyMEAgMzBAIDQwQCAo/ECANTBAIDYwQCA3MEAgODBAICm6QIApeECAOTBAICrdQIAqnUCAOjBAIDswQCArzkCAK4xAgCtSQIArGECAKgpBgCpKQYAqj0GAKsxBgCsSQYArUkGAK55BgCveQYAhMEAgIIVAACBxQcAgMUHAPDBAICEaAMA9MEAgPjBAIC4yQYAuckGALrZBgC72QYAvMkGAL3JBgC+WQcAv1kHALAJBgCxCQYAshkGALMZBgC0CQYAtQkGALb5BgC3+QYAs7UGAPzBAICGrAAAh0ADAADCAIC2yQYAtcEGAATCAIC7zQYAus0GAAjCAIAMwgCAv80GAL7NBgC9zQYAvM0GABDCAICj8QYAFMIAgBjCAICmjQYAHMIAgCDCAIClhQYAqokGAKuJBgAkwgCAKMIAgK6JBgCviQYArIkGAK2JBgCoJQYAqWEGAKplBgCrfQYArGUGAK1tBgCuZQYAr50GACzCAIAwwgCANMIAgDjCAIA8wgCAQMIAgETCAIBIwgCAuPUGALn9BgC69QYAu4kGALyZBgC9mQYAvokGAL+BBgCw5QYAse0GALLlBgCz/QYAtOUGALXtBgC20QYAt80GAEzCAIC2/QYAtf0GAFDCAICz/QYAVMIAgFjCAIBcwgCAvzkGAL4xBgC9OQYAvCEGALs5BgC6MQYAFMEAgGDCAICjrQYAgnkAAIFVAACAVQAAhFwBAKatBgClrQYAaMIAgKtpBgCqYQYAhkh/AIfkAACvaQYArmEGAK1pBgCscQYAbMIAgO/cBwBwwgCAdMIAgHjCAIB8wgCAgMIAgITCAICIwgCAhKADAIzCAIC/JHkAkMIAgONoBwCUwgCA4XQGALPRAgCYwgCAvgQDAISAfQCcwgCAtvkCALXxAgCgwgCAu7UCALqpAgCkwgCAqMIAgL9RAwC+mQIAvZECALylAgCpBQIAqLkCAKsVAgCqHQIArT0CAKw9AgCvUQIArl0CAL5ofQCswgCAsMIAgLTCAIC4wgCAvMIAgMDCAIDEwgCAufEDALjpAwC78QMAuvkDAL1RAwC86QMAv00DAL5RAwCxNQIAsCkCALMBAgCyNQIAtdEDALQZAgC30QMAttkDAIIpAACjlQMAgB0AAIEVAACmvQMAyMIAgMzCAICltQMAqu0DAKvxAwDQwgCA2MIAgK7dAwCvFQIArOEDAK3VAwCGYH0Ah3h9ALNBAQCEAH8AtUEBANzCAIDgwgCAtkkBAOTCAIDowgCAu0EBALpNAQC9SQEAvEUBAL8pAQC+OQEA7MIAgO/cBgDwwgCA9MIAgPjCAID8wgCAAMMAgO8wBgCELH4A4eAGAATDAIDjiAEACMMAgON0AAAMwwCA4SwBAKPJAQAQwwCAFMMAgIVweQAYwwCApsEBAKXJAQAcwwCAq8kBAKrFAQAgwwCAJMMAgK+hAQCusQEArcEBAKzNAQCo3X0AqQV+AKoBfgCrAX4ArAF+AK0BfgCuAX4ArwF+ANTCAIAowwCALMMAgDDDAIA0wwCAgp0AAIGdAACAnQAAuC1+ALnhfgC64X4Au+F+ALzhfgC94X4AvuF+AL/hfgCwQX4AsU1+ALJZfgCzVX4AtDV+ALUlfgC2JX4AtxV+AKitfwCp0X8AqtF/AKvtfwCs9X8ArRV/AK4RfwCvEX8AOMMAgDzDAIBAwwCARMMAgIbwAwCHuAAASMMAgEzDAIC4EX8AuRl/ALohfwC7IX8AvPUAAL39AAC+9QAAv+0AALBxfwCxcX8AsnF/ALNFfwC0QX8AtU1/ALY9fwC3NX8As1l+AFDDAIBUwwCAWMMAgFzDAIC2lX4AtX1+AGDDAIC7tX4AurV+AGTDAIBowwCAv4l+AL6FfgC9kX4AvKV+AGzDAICjHX4AcMMAgHTDAICm0X4AeMMAgHzDAIClOX4AqvF+AKvxfgCAwwCAhMMAgK7BfgCvzX4ArOF+AK3VfgCwrQAAscUAALLBAACzwQAAtMUAALXNAAC28QAAt/EAALhhAAC5YQAAumEAALt9AAC8ZQAAvW0AAL5lAAC/vQMAiMMAgIzDAICQwwCAZMIAgJTDAICYwwCAnMMAgKDDAICoWQEAqVkBAKrtAACr5QAArP0AAK3lAACu5QAAr9UAAKTDAICCHQAAgR0AAIAdAACowwCArMMAgLDDAIC+VAIAhoAEAIfsAgC4wwCAvMMAgMDDAIDEwwCAyMMAgL54AwDjdH4AzMMAgOG4fQDQwwCA1MMAgNjDAIDcwwCA4MMAgOTDAIDowwCA7MMAgPDDAIDvwH4A9MMAgPjDAID8wwCAs4UDAADEAIAExACACMQAgAzEAIC2hQMAtZUDABDEAIC74QMAuokDAL4kBgAUxACAv+kDAL7hAwC99QMAvPUDAIIpAACjwQMAgB0AAIEVAACmwQMAGMQAgBzEAICl0QMAqs0DAKulAwAgxACAheAFAK6lAwCvrQMArLEDAK2xAwDh+AMAKMQAgONcHwAsxACA7/QDADDEAICGPAcAh6wCAON8fgA0xACA4YABADjEAIA8xACAQMQAgO/kEwBExACAs3EBAEjEAIBMxACAUMQAgFTEAIC2EQEAtWEBAFjEAIC7OQEAujEBAFzEAIBgxACAvxkBAL4RAQC9GQEAvCEBAGTEAIBoxACAbMQAgHDEAIB0xACAeMQAgHzEAIDvxH8AgMQAgOH8fgCExACA4/B/AIANAACBdQAAgn0AAIjEAICMxACAkMQAgKP5AQC+AAgApekBAJjEAICcxACAppkBAISoBQCgxACAq7EBAKq5AQCtkQEArKkBAK+RAQCumQEAqCkGAKkpBgCqOQYAqzkGAKwpBgCtUQYArlUGAK9NBgAkxACAhCABAKTEAICUxACAo+EBAKKZBAChGQQAoPEFALg5BgC5OQYAus0GALvFBgC83QYAvcUGAL7FBgC/8QYAsDUGALE9BgCyNQYAsw0GALQVBgC1HQYAthUGALcJBgCPoWwAs5EHAIYoAQCHfAMAtqEHAKjEAICsxACAtbEHALrlBwC77QcAsMQAgLTEAIC+7QcAv90HALz1BwC97QcAn/l4AJ7leACdcXkAnCF8AJvxfACaYX0AmZlxAJjZcACX4XAAlnl0AJVtdACUbXQAk61pAJJxaACReWgAkB1uAIIhbQCD5W8AuMQAgLzEAICGTWgAh5V1AISZaQCFmWkAiqV1AIu5dQDAxACAxMQAgI5xcACPgXwAjDlxAI05cQCSYX0Ak6l9AMjEAIDMxACAlml5AJeZBACU4XgAlX15AJpBBQCbyQUA0MQAgNTEAIDYxACA3MQAgJypAADgxACAo4ENAKKpAQChqQEA5MQAgKexCQCmAQgApU0NAKSZDQCrkRUAqoUVAKkBFACocQkArx0QAK7pEQCtvREArAEQALMBGACy8RwAscEdALDJHQC0wwCA6MQAgLXhGAC0/RkA7MQAgPDEAID0xACA+MQAgIAdAACBCQAAgv0DAPzEAICjFQUAAMUAgIaIDACHPAMACMUAgKYlBQClNQUADMUAgKtpBQCqYQUAEMUAgBTFAICvWQUArmkFAK1pBQCscQUAGMUAgBzFAICEBAwAIMUAgCTFAIDhbAYAKMUAgOPsewAsxQCAMMUAgDTFAIDvqAYAOMUAgDzFAIBAxQCARMUAgKmNBQCogQUAq60FAKqZBQCtoQUArLkFAK+lBQCuqQUAhGgNAEjFAIBMxQCAUMUAgFTFAIBYxQCAXMUAgL70DAC5SQUAuEEFALtZBQC6QQUAvUkFALxBBQC/cQUAvn0FALGpBQCwoQUAs7kFALKhBQC1mQUAtKkFALd5BQC2kQUAqNUEAKndBACq7QQAqyUDAKyFAwCtjQMArrEDAK+xAwBgxQCAZMUAgGjFAIBsxQCAgBkAAIEZAACCBQAAcMUAgLgxAgC5MQIAujUCALvBAgC8hQIAvbUCAL69AgC/tQIAsGkCALFpAgCyQQIAs0ECALQ5AgC1OQIAthECALcRAgCGoAwAh0wNAHjFAIB8xQCA76QGAIDFAICExQCA78wHAOOUAQDhpAYA4TgBAONcBgCIxQCAjMUAgJDFAICUxQCAmMUAgJzFAICzLQQAoMUAgLVFAwCkxQCAqMUAgLZFAwCsxQCAsMUAgLvlAgC65QIAvd0CALzdAgC/tQIAvrUCAATFAIB0xQCAtMUAgLjFAIC8xQCAwMUAgMTFAIDIxQCAqDEOAKk5DgCqAQ4AqwEOAKxxDgCtcQ4ArnUOAK9tDgCwGQ4AsSUOALItDgCzJQ4AtCEOALUhDgC2IQ4AtyEOALjFDgC5zQ4AusUOALvdDgC8xQ4Avc0OAL5ZDwC/WQ8As6kOAMzFAIDQxQCA1MUAgNjFAIC20Q4AtdkOANzFAIC7wQ4Auv0OAODFAIC+LAAAv8UOAL7FDgC90Q4AvNkOAIJpAACj7Q4AgFkAAIFRAACmlQ4A5MUAgOjFAIClnQ4AqrkOAKuFDgCGyAAAh6wAAK6BDgCvgQ4ArJ0OAK2VDgDsxQCAs5EOAPDFAID0xQCAtqUOAPjFAID8xQCAta0OALrhDgC74Q4AAMYAgATGAIC+6Q4Av9UOALz1DgC96Q4Ao6UKAAjGAIAMxgCAEMYAgBTGAICmzQ0Apc0NABjGAICrbQwAqm0MABzGAIAgxgCArz0MAK49DACtVQwArFUMAKgJDgCpCQ4Aqh0OAKsVDgCsIQ4ArSEOAK4hDgCvIQ4AJMYAgCjGAIAsxgCAMMYAgDTGAIA4xgCAPMYAgEDGAIC4zQEAudUBALrdAQC71QEAvM0BAL1RAQC+UQEAv1EBALAhDgCxIQ4AsiUOALM5DgC0KQ4AtRUOALYdDgC39QEARMYAgEjGAIBMxgCAo5kNAFDGAIClpQ0Apq0NAL7cAgCE7AMAWMYAgKrpDQCr6Q0ArP0NAK3hDQCu4Q0Ar90NAIBFAACBTQAAglkAAKNFAwBcxgCApUEDAKZBAwBgxgCAhsAEAIcAAwCqLQMAqyUDAKw9AwCtJQMAriUDAK8VAwCoWQIAqYUDAKqBAwCrgQMArIUDAK2NAwCusQMAr7EDAGTGAIBoxgCAbMYAgHDGAIB0xgCAeMYAgHzGAICAxgCAuGUDALltAwC6ZQMAu30DALxlAwC9bQMAvmUDAL/dAACwpQMAsa0DALKlAwCzvQMAtK0DALWdAwC2lQMAt10DALMJAgCExgCAiMYAgIzGAICQxgCAtg0CALUNAgCUxgCAu2kCALphAgCYxgCAnMYAgL9ZAgC+aQIAvWkCALxxAgCgxgCApMYAgKjGAICsxgCA4aABALDGAIDjaAMAtMYAgIEVAACAFQAA74wDAIIVAAC4xgCAvMYAgMDGAIC+cAUA4RgOAOGUDwDjOA8A49QPAISUAgDIxgCAzMYAgNDGAIDUxgCA2MYAgNzGAIDgxgCA5MYAgOjGAIDv7AEA7/gPAIZgBACHBAUAs5UBAITMBQC1dQEA7MYAgPDGAIC2dQEA9MYAgPjGAIC7UQEAulkBAL31AAC8SQEAv/UAAL71AACoJQYAqVUGAKpVBgCrrQYArLUGAK29BgCutQYAr60GAMTGAID8xgCAAMcAgATHAIAIxwCADMcAgBDHAIAUxwCAuGkHALlpBwC6CQcAuwkHALwZBwC9GQcAvg0HAL8BBwCw1QYAsd0GALLVBgCzaQcAtHkHALV5BwC2aQcAt2EHAKPdBgAYxwCAHMcAgCDHAIAkxwCApj0GAKU9BgAoxwCAqxkGAKoRBgAsxwCAMMcAgK+9BwCuvQcArb0HAKwBBgCAXQAAgW0AAIJlAACzUQcAvtgDALVxBwC2cQcANMcAgIbgAACHFAMAul0HALs5BwC8KQcAvRUHAL4dBwC/2QAAqJUGAKmdBgCqlQYAq60GAKy1BgCtvQYArrUGAK+tBgA4xwCAPMcAgEDHAIBExwCASMcAgEzHAIBQxwCAVMcAgLhxAQC5cQEAunEBALtxAQC81QEAvd0BAL7VAQC/zQEAsNUGALGxBgCysQYAs40GALSVBgC1UQEAtlEBALdRAQBYxwCAoxkGAFzHAIBgxwCApjkGAFTGAIBkxwCApTkGAKoVBgCrcQYAaMcAgGzHAICuVQYAr5EBAKxhBgCtXQYAcMcAgHTHAIB4xwCAfMcAgIDHAICExwCAiMcAgIzHAICQxwCAlMcAgJjHAICcxwCAgBkAAIEZAACCBQAAoMcAgISAAgC+gAMAhwwDAIasHADhaAYAqMcAgOOYBwCsxwCAsMcAgLTHAIDvrAcAuMcAgLzHAIDAxwCAxMcAgMjHAIDMxwCA0McAgNTHAICzZQMA2McAgLVlAwC2bQMA3McAgODHAIDkxwCAuukDALvlAwC8/QMAve0DAL7RAwC/0QMA6McAgOzHAIDwxwCA9McAgPjHAID8xwCAAMgAgATIAICogQMAqYEDAKqBAwCrgQMArIEDAK2BAwCugQMAr4EDALBBAwCxTQMAskUDALNVAwC0eQMAtXkDALYZAwC3GQMAuCkDALkpAwC6OQMAuzkDALwpAwC9KQMAvhkDAL8ZAwCBGQAAgBEAAKMhAgCCLQAApSECAAjIAIAMyACApikCABDIAIAYyACAq6ECAKqtAgCtqQIArLkCAK+VAgCulQIAhEwCAL5IHQCHZB0AhuwcAONAAwAcyACA4aABACDIAIDvnAMAJMgAgCjIAIAsyACAMMgAgDTIAIA4yACAPMgAgEDIAIBEyACASMgAgEzIAIBQyACAVMgAgFjIAIDvtAEAhKgdAOF8BgBcyACA43AGAGDIAIBkyACAaMgAgGzIAICz4QEAcMgAgHTIAIB4yACAfMgAgLblAQC19QEAgMgAgLuhAQC62QEAvuQcAIjIAIC/rQEAvqUBAL2xAQC8uQEAqBUeAKkZHgCqKR4AqykeAKw9HgCtJR4Ari0eAK8lHgAUyACAgvkfAIH5HwCA4R8AhMgAgIzIAICGHAAAh7ADALjBHgC5wR4AusEeALvBHgC8wR4AvcEeAL7BHgC/wR4AsF0eALElHgCyLR4AsyUeALQhHgC1KR4AthkeALcZHgCjoR4AkMgAgJTIAICYyACAnMgAgKalHgCltR4AoMgAgKvhHgCqmR4ApMgAgKjIAICv7R4AruUeAK3xHgCs+R4ArMgAgLOZHwCwyACAtMgAgLa9HwC4yACAvMgAgLW1HwC6mR8Au5kfAMDIAIDEyACAvnkfAL95HwC8eR8AvXkfAKglHgCpUR4AqlUeAKtpHgCseR4ArXkeAK5pHgCvaR4AyMgAgMzIAIDQyACA1MgAgNjIAIDcyACA4MgAgOTIAIC42R4Aue0eALr5HgC7+R4AvOkeAL3pHgC+nR4Av5UeALAZHgCxGR4AsukeALPpHgC0+R4AtfkeALbpHgC36R4Ao90eAIIpAACBFQAAgB0AAOjIAICm+R4ApfEeAOzIAICr3R4Aqt0eAKTHAIDwyACArz0eAK49HgCtPR4ArD0eAITIAgCzQQEAvgwBAPjIAIC2QQEA/MgAgADJAIC1UQEAuk0BALslAQCGSAAAh1ABAL4lAQC/LQEAvDEBAL0xAQAEyQCACMkAgIQEAwC+gAQADMkAgO+oHwAQyQCAFMkAgL8oMQDjdB8AGMkAgOE4HgAcyQCAIMkAgCTJAIAoyQCALMkAgDDJAICjzQIANMkAgKXdAgA4yQCAPMkAgKbNAgBAyQCARMkAgKupAgCqwQIArb0CAKy9AgCvoQIArqkCAKm1AgCoaR0AqwECAKoJAgCtAQIArBkCAK8xAgCuAQIAhGwFAEjJAIBMyQCAUMkAgFTJAICCnQEAgZ0BAICdAQC55QMAuOUDALvlAwC65QMAveUDALzlAwC/5QMAvuUDALEhAgCwSQIAsyUCALIlAgC1KQIAtCECALcVAgC2FQIAqM0CAKnRAgCq0QIAqw0BAKwVAQCtBQEArgEBAK8BAQBYyQCAXMkAgGDJAIBoyQCAvvgEAGzJAIBwyQCAdMkAgLgVAQC5HQEAuikBALspAQC89QEAvf0BAL71AQC/7QEAsEkBALFVAQCyXQEAs1UBALRNAQC1NQEAtj0BALcxAQCGoAUAh8gFAHjJAIDvvAAAfMkAgIDJAICEyQCA74weAIQsBwDh8B4AiMkAgOMcHgCMyQCA4ZQBAJDJAIDjbAAAsxkCAJTJAICYyQCAnMkAgIQACAC2xQEAtd0BAKDJAIC70QEAus0BAKTJAICoyQCAv7EBAL7JAQC9wQEAvMkBAKPZBQBkyQCArMkAgLDJAIC0yQCApgUGAKUdBgC4yQCAqxEGAKoNBgC8yQCAwMkAgK9xBgCuCQYArQEGAKwJBgDEyQCAgh0AAIEdAACAHQAAyMkAgMzJAIDQyQCA1MkAgIZAAwCHxAMA2MkAgNzJAIDgyQCA5MkAgOjJAIDsyQCAqK0HAKmxBwCqsQcAq7EHAKwZBwCtBQcArg0HAK8FBwDwyQCA9MkAgPjJAID8yQCAAMoAgATKAIAIygCADMoAgLgtBwC5zQAAusUAALvdAAC8zQAAvf0AAL71AAC/nQAAsEkHALFVBwCyUQcAsykHALQ5BwC1OQcAtiUHALcVBwCzOQYAEMoAgBTKAIAYygCAHMoAgLaFBgC1kQYAIMoAgLuRBgC6jQYAJMoAgCjKAIC//QYAvv0GAL39BgC8hQYALMoAgKN9BgAwygCANMoAgKbBBgA4ygCAPMoAgKXVBgCqyQYAq9UGAEDKAIC+bAEArrkGAK+5BgCswQYArbkGAKjpAQCp6QEAqvkBAKv5AQCs6QEArekBAK45AQCvOQEAgPUAAIH9AACCwQAARMoAgIYQAACHdAEASMoAgPTIAIC4zQAAudUAALrVAAC75QAAvP0AAL2VAAC+kQAAv5EAALBJAQCxSQEAslkBALNZAQC0SQEAtUkBALb9AAC39QAA7/QGAEzKAIBQygCAVMoAgO8wAgBYygCAXMoAgGDKAIDj4AcAZMoAgOGAAQBoygCA4ygGAGzKAIDhyAUAcMoAgLMxAgB0ygCAeMoAgJYAAAB8ygCAtikCALUhAgCAygCAu80CALrNAgCEygCAiMoAgL/NAgC+zQIAvc0CALzNAgCMygCAkMoAgJTKAICj/QIAmMoAgKXtAgCm5QIAnMoAgKDKAICkygCAqgECAKsBAgCsAQIArQECAK4BAgCvAQIAgA0AAIEVAACCHQAAqMoAgKzKAICwygCAvlQMALjKAICGwAwAhyQDALzKAIDAygCAxMoAgMjKAIDMygCA0MoAgKi5AgCpAQEAqgEBAKsBAQCsBQEArQ0BAK4FAQCvOQEAhKgNANTKAIDYygCA3MoAgODKAIDkygCA6MoAgOzKAIC4LQEAucUBALrNAQC7xQEAvMEBAL3JAQC++QEAv/kBALBNAQCxUQEAslUBALMpAQC0OQEAtSUBALYlAQC3FQEA4RgGAPDKAIDjOAcA9MoAgPjKAIC+WAwA/MoAgADLAICEbA8ABMsAgL5gDwAIywCADMsAgBDLAIDvcAYAFMsAgIAVAACBGQAAgi0AAITMDwDjYAYAGMsAgOGgAQAcywCA73QAACDLAICGyAwAh/wMACjLAIAsywCAMMsAgDTLAICjCQ4AtMoAgCTLAIA4ywCAPMsAgKYNDgClDQ4AQMsAgKsVDgCqCQ4ARMsAgEjLAICvYQ4Arn0OAK19DgCsAQ4ATMsAgLOpDgBQywCAVMsAgLapDgBYywCAXMsAgLWpDgC6SQ8Au0kPAGDLAIBkywCAvkkPAL9JDwC8SQ8AvUkPAKhdDgCpbQ4AqmUOAKt9DgCsZQ4ArW0OAK5lDgCvuQ8AaMsAgGzLAIBwywCAdMsAgHjLAIB8ywCAgMsAgITLAIC4UQ8AuV0PALpVDwC7aQ8AvH0PAL1lDwC+bQ8Av2EPALDJDwCxyQ8AstkPALPZDwC0yQ8AtckPALZ9DwC3cQ8AiMsAgLURDwC2EQ8AjMsAgIARAACBGQAAgikAALMVDwC8HQ8AvWEPAL5hDwC/fQ8AkMsAgJTLAIC6FQ8AuwkPAKOtDwCYywCAhugAAIfIAQCcywCApq0PAKWtDwCgywCAq00OAKpNDgCkywCAqMsAgK9NDgCuTQ4ArU0OAKxNDgCocQ4AqXEOAKpxDgCrcQ4ArJ0BAK2FAQCuhQEAr7UBAL7sAACsywCAsMsAgLTLAIC4ywCAvMsAgMDLAIDEywCAuGEBALlhAQC6YQEAu2EBALxhAQC9YQEAvmEBAL9hAQCwzQEAsaUBALKhAQCzoQEAtKUBALWtAQC2kQEAt5EBALP5DQDIywCAzMsAgNDLAIDUywCAtgUCALUVAgDYywCAu2ECALoJAgDcywCA4MsAgL9pAgC+YQIAvXUCALx1AgDkywCAo70NAOjLAIDsywCApkECAPDLAID0ywCApVECAKpNAgCrJQIA+MsAgPzLAICuJQIAry0CAKwxAgCtMQIAge0AAIDtAADv0AEAgh0AAADMAIAIzACAhjgEAIdQAwAMzACAEMwAgBTMAIAYzACA4eABABzMAIDjZA8AIMwAgCTMAIAozACALMwAgLORAwAwzACAtbkDALZ9AwA0zACAOMwAgDzMAIC6WQMAu1kDALxJAwC9SQMAvv0AAL/1AACoRQIAqVUCAKpVAgCrZQIArH0CAK2xAgCusQIAr7ECAL5oBQBAzACARMwAgEjMAIBMzACAUMwAgFTMAIBYzACAuF0BALltAQC6ZQEAuw0BALwZAQC9GQEAvg0BAL8FAQCw0QIAsdECALLRAgCz0QIAtHUBALV9AQC2dQEAt20BAOF4DwDjNA4A47gOAOF8DgBczACAYMwAgGTMAIBozACAbMwAgHDMAIB4zACAfMwAgIDMAIDv5A4A79QOAITMAICjnQIAgmEAAIFpAACAUQAAhJwFAKZxAgCltQIAiMwAgKtVAgCqVQIAhkgEAIfMBACv+QEArvEBAK1FAgCsRQIAqJUGAKmlBgCqrQYAq6UGAKy9BgCtoQYArqUGAK/dBgB0zACAjMwAgJDMAICUzACAmMwAgJzMAICgzACApMwAgLhtBwC5dQcAun0HALt1BwC8bQcAvcUHAL7NBwC/xQcAsKUGALGtBgCyuQYAs7EGALSRBgC1kQYAtl0HALdVBwCzJQYAqMwAgKzMAICwzACAtMwAgLYhBgC1NQYAuMwAgLtpBgC6YQYAvMwAgMDMAIC/VQYAvlUGAL1lBgC8bQYAxMwAgKNhBgDIzACAzMwAgKZlBgDQzACA1MwAgKVxBgCqJQYAqy0GANjMAIDczACArhEGAK8RBgCsKQYArSEGAKipBgCpqQYAqrkGAKuxBgCszQYArTEBAK4xAQCvMQEAgMkBAIHJAQCCBQAA4MwAgL54AgCEeAIA5MwAgOjMAIC43QEAue0BALrlAQC7jQEAvJkBAL2ZAQC+jQEAv4UBALBRAQCxUQEAslEBALNRAQC09QEAtf0BALb1AQC37QEAszEGAOzMAICGKAAAh9wBAPDMAIC2sQEAtUUGAPTMAIC7lQEAupUBAPjMAID8zACAvzkBAL4xAQC9hQEAvIUBAATMAICjdQYAAM0AgATNAICm9QEACM0AgAzNAIClAQYAqtEBAKvRAQAQzQCAFM0AgK51AQCvfQEArMEBAK3BAQAYzQCAHM0AgCDNAIAkzQCAKM0AgCzNAIAwzQCANM0AgDjNAIA8zQCAQM0AgETNAIBIzQCATM0AgFDNAIC+cAMAhQA8AOHEBgCERAIA44wHAIBhAACBYQAAgmEAAO9oAwCFRDwA4RACAFjNAIDj2CsAhlA9AIf0AwBczQCA76QHAGDNAIDvQAIAZM0AgGjNAIBszQCAcM0AgHTNAIB4zQCAhDw8AHzNAICAzQCAhM0AgIjNAIDj7AIAjM0AgOEsAQCzUQMAkM0AgJTNAICYzQCAnM0AgLZ5AwC1cQMAoM0AgLs5AwC6MQMApM0AgKjNAIC/9QAAvvUAAL0VAwC8FQMAqD0CAKmBAgCqmQIAq5ECAKy5AgCtuQIArtECAK/RAgCEqD8Avqg/AKzNAICwzQCAtM0AgLjNAIC8zQCAwM0AgLhRAQC5UQEAulEBALtRAQC8cQEAvXEBAL5xAQC/cQEAsLUCALG9AgCygQIAs4ECALRxAQC1cQEAtnEBALdxAQCAtQAAgb0AAIK1AADIzQCAhrA/AIfgPADMzQCA71QAAL4sPgDhVAYA0M0AgOOIAADUzQCA2M0AgNzNAIDgzQCAo1ECAOTNAIC/2CYA6M0AgOzNAICmeQIApXECAPDNAICrOQIAqjECAPTNAID4zQCAr/UBAK71AQCtFQIArBUCAJAtJACRBSgAkg0oAJPZKACUhS0AlTUsAJbFLACXtTEAmAEwAJkVMACalTUAmyk0AJxtNACdmTUAnj04AJ81OABUzQCAttU+ALXFPgDEzQCAs9E+APzNAIAAzgCABM4AgL/ZPgC+1T4AvcU+ALzFPgC71T4Auuk+AAjOAICPXSQAqeUJAKgVCACrBQwAqg0MAK0BEACsAQwAr0EQAK69EACh4QAADM4AgKMBBACi4QAApZ0EAKSVBACnuQgApgEIAKD1OQChBT0Aouk8AKP1PQAQzgCAFM4AgBjOAIAczgCAscEUALABFACzARgAsn0UALXVGAC01RgAIM4AgCTOAICCISUAgyklACjOAIAszgCAhsUpAIeBLACEGSkAhRkpAIoBLQCL+S0AMM4AgDjOAICOATEAj4k0AIyRMACNHTEAkkU1AJMZNQCG6AcAh+wBAJZZOQCXYTgAlPU0AJVZOQCaoTwAm0U9ADzOAIBAzgCAgX0AAIB9AACcQTwAglUAAKjpPwCp/T8Aqgk/AKsFPwCsHT8ArQU/AK4NPwCvBT8ARM4AgEjOAIBMzgCAUM4AgFTOAIBYzgCAXM4AgGDOAIC4DT8AuRU/ALoVPwC7JT8AvD0/AL39PgC+9T4Av+0+ALB9PwCxQT8AskE/ALNBPwC0QT8AtU0/ALY9PwC3NT8Ao4E8AGTOAIBozgCAbM4AgHDOAICmhTwApZU8AHTOAICrhTwAqrk8AHjOAIB8zgCAr4k8AK6FPACtlTwArJU8AITIAwCz7T0AgM4AgITOAIC26T0AiM4AgIzOAIC16T0Auq09ALu1PQCQzgCAlM4AgL6dPQC/IQIAvKU9AL2VPQCoDT0AqR09AKohPQCrPT0ArCU9AK0tPQCuJT0Ar1k9AIANAACBFQAAgh0AAJjOAICczgCAoM4AgKjOAIC+uAMAuLkCALlhAgC6GQIAuxkCALwJAgC9CQIAviECAL8hAgCwLT0AsTU9ALI1PQCzBT0AtB09ALWhAgC2oQIAt6ECAKOpPACszgCAhigFAIfsAgCwzgCApq08AKWtPAC0zgCAq/E8AKrpPAC4zgCAvM4AgK9lAwCu2TwArdE8AKzhPADAzgCAsykCAMTOAIDIzgCAtvkCAMzOAIDQzgCAtfkCALrVAgC73QIA1M4AgNjOAIC+eQEAv3kBALzFAgC9eQEA3M4AgODOAICj5QIA5M4AgKU1AgDozgCA7M4AgKY1AgDwzgCA9M4AgKsRAgCqGQIArbUBAKwJAgCvtQEArrUBAOPwPgDhrD8A4UA+AON8PwD4zgCA/M4AgADPAIAEzwCAgA0AAIERAACCEQAACM8AgO+oPgAMzwCAEM8AgO8gPgCoLQUAqW0FAKplBQCrrQUArLUFAK29BQCutQUAr60FAKTOAICE6AMAvuADABTPAICGEAMAh5gDABjPAIAczwCAuGkGALlpBgC6AQYAuwEGALwFBgC9DQYAvjEGAL8xBgCw1QUAsd0FALLVBQCzaQYAtHkGALV5BgC2aQYAt2EGAKg5BgCpgQcAqpkHAKuRBwCsuQcArbkHAK7ZBwCv1QcAIM8AgCTPAIA0zgCAKM8AgCzPAIAwzwCANM8AgDjPAIC4VQcAuV0HALppBwC7aQcAvAEHAL0BBwC+AQcAvwEHALCtBwCxsQcAsrEHALOFBwC0nQcAtXUHALZ9BwC3cQcAsxEGADzPAIBAzwCARM8AgEjPAIC2OQYAtTEGAEzPAIC7dQYAumkGAFDPAIBUzwCAv7EGAL5ZBgC9UQYAvGUGAFjPAICjVQYAXM8AgGDPAICmfQYAZM8AgGjPAICldQYAqi0GAKsxBgBszwCAcM8AgK4dBgCv9QYArCEGAK0VBgCouQEAqbkBAKopAQCrKQEArD0BAK0lAQCuLQEAryUBAHTPAICCHQAAgR0AAIAdAAB4zwCAfM8AgIDPAIC+cAEAuIEAALmNAAC6hQAAu5kAALyJAAC9vQAAvrUAAL99AACwXQEAseEAALLhAACz4QAAtOEAALXpAAC20QAAt9EAAITIAgCzpQIAhzgDAIYoAgC2oQIAiM8AgIzPAIC1sQIAup0CALshAwC+bAMAkM8AgL4hAwC/KQMAvDEDAL0xAwCj4QIAlM8AgJjPAICczwCAoM8AgKblAgCl9QIApM8AgKtlAwCq2QIAqM8AgKzPAICvbQMArmUDAK11AwCsdQMAqZkAAKiRAACrzQAAqqEAAK3dAACs3QAAr8UAAK7NAAC+LA0AsM8AgLTPAIC4zwCAvM8AgMDPAIDEzwCAyM8AgLnBAQC4eQAAu8EBALrJAQC9wQEAvNkBAL/FAQC+xQEAsY0AALCNAACzQQAAskkAALVBAAC0WQAAt0EAALZJAADMzwCA0M8AgNTPAIDYzwCA3M8AgO9QBwDgzwCA5M8AgL74DwDjdAcA6M8AgOF8BACAGQAAgQkAAIJ5AADszwCA8M8AgLNpAQD4zwCAhMQCALYdAQD8zwCAANAAgLUVAQC6CQEAuwkBAIboDQCH6A0Avt0BAL/FAQC83QEAvdUBAATQAIAI0ACADNAAgBDQAIDv1AAAFNAAgBjQAIDvTAEA47ADAOG0BgDhgAEA45gBABzQAIAg0ACAJNAAgCjQAIAs0ACAMNAAgKPlAQCEwA0ApZkBADTQAIA40ACAppEBADzQAIBA0ACAq4UBAKqFAQCtWQEArFEBAK9JAQCuUQEA9M8AgETQAIBI0ACATNAAgFDQAIBU0ACAWNAAgFzQAICoaQ8AqXEPAKpxDwCrrQ8ArLUPAK29DwCutQ8Ar6kPALDZDwCx9Q8Asv0PALP1DwC07Q8AtZUPALadDwC3iQ8AuLkPALmFDwC6jQ8Au2kAALx5AAC9eQAAvmkAAL9pAACBnQAAgJ0AAGDQAICCBQAAZNAAgGjQAIBs0ACAcNAAgIaAAwCH9AMAdNAAgHjQAIB80ACAgNAAgITQAICEzwCAs5kPAIjQAICM0ACAkNAAgJTQAIC2XQ8AtV0PAJjQAIC7UQ8Aun0PAJzQAICg0ACAvzEPAL5JDwC9QQ8AvEkPAKNZDgCk0ACAqNAAgKzQAICw0ACApp0OAKWdDgC00ACAq5EOAKq9DgC40ACAvNAAgK/xDgCuiQ4ArYEOAKyJDgDA0ACAxNAAgMjQAIDM0ACAgBkAAIEZAACCBQAA0NAAgISgAQDU0ACAh+gBAIYABADY0ACA3NAAgODQAIDk0ACAqBUBAKkdAQCqFQEAqyUBAKw9AQCtJQEAri0BAK8lAQDo0ACA7NAAgPDQAID00ACA+NAAgPzQAIAA0QCABNEAgLjJAAC5yQAAutkAALvRAAC8+QAAvfkAAL6ZAAC/mQAAsCUBALEtAQCyJQEAsz0BALQtAQC1HQEAthUBALf5AAAI0QCADNEAgBDRAICzkQIAFNEAgLW5AgC2qQIAGNEAgBzRAIAg0QCAuu0CALvlAgC8/QIAveUCAL7lAgC/1QIApvECACTRAIAo0QCApeECACzRAICjyQIAMNEAgDTRAICuvQIAr40CAKylAgCtvQIAqrUCAKu9AgA40QCAPNEAgID5AACB+QAAggUAAEDRAIC+yAMAhBgDAEjRAIBM0QCAUNEAgFTRAIBY0QCAXNEAgGDRAIBk0QCAhhgEAIecAwBo0QCAbNEAgHDRAIB00QCAeNEAgHzRAIDvsAIAgNEAgOGUAQCE0QCA42wCAIjRAICM0QCAkNEAgJTRAICY0QCA79APAJzRAICg0QCApNEAgKjRAIDhrAEArNEAgONsAACAMQAAgT0AAIIdAADv9A4A42wOALDRAIDhLA8AvnAFALM5AgCEDAUAhugEAIdgBQDcAAAAtvECALX5AgC40QCAu9UCALrVAgC80QCAwNEAgL91AQC+dQEAvcUCALzFAgDE0QCA4fQOAMjRAIDjUA4AzNEAgNDRAIDU0QCA2NEAgNzRAIDg0QCA5NEAgOjRAIDs0QCA8NEAgPTRAIDv5A8ApmUCAPjRAID80QCApW0CAADSAICjrQIABNIAgAjSAICu4QEAr+EBAKxRAgCtUQIAqkECAKtBAgAM0gCAENIAgKiZBgCpmQYAqqkGAKupBgCsuQYArbkGAK6pBgCvqQYAFNIAgIIdAACBHQAAgB0AABjSAIAc0gCAINIAgL50AwC4rQYAubUGALq9BgC7tQYAvK0GAL1RBwC+UQcAv1EHALChBgCxoQYAsqEGALOhBgC0oQYAtaEGALalBgC3mQYARNEAgLMlBgCExAMAtNEAgLY9BgAk0gCAKNIAgLU1BgC6YQYAu2EGAIYIAACHiAAAvmEGAL9hBgC8cQYAvXEGAKNhBgAs0gCAMNIAgDTSAIA40gCApnkGAKVxBgA80gCAqyUGAKolBgBA0gCARNIAgK8lBgCuJQYArTUGAKw1BgCoXQYAqW0GAKplBgCrjQYArJkGAK2FBgCujQYAr4UGAEjSAIBM0gCAUNIAgFTSAIBY0gCAXNIAgGDSAIBk0gCAuIUGALmNBgC6mQYAu5UGALyNBgC9rQYAvqUGAL99AQCw/QYAscUGALLNBgCzxQYAtN0GALXFBgC2zQYAt8UGALPtBgBo0gCAbNIAgHDSAIB00gCAtgUGALURBgB40gCAuwEGALo5BgB80gCAgNIAgL8BBgC+GQYAvREGALwZBgCE0gCAo6kGAIjSAICM0gCApkEGAJDSAICElAEApVUGAKp9BgCrRQYAvqABAJjSAICuXQYAr0UGAKxdBgCtVQYAqJkCAKnBAgCqwQIAq8ECAKzBAgCtyQIArvECAK/xAgCB7QMAgO0DAJzSAICC+QMAhpAcAId0AwCg0gCApNIAgLjFAwC5zQMAusUDALvdAwC8zQMAvf0DAL71AwC/nQMAsEEDALFBAwCyQQMAs0EDALRBAwC1QQMAtkEDALdBAwCzSQIAqNIAgKzSAICw0gCAtNIAgLZJAgC1SQIAuNIAgLuFAwC6hQMAvNIAgMDSAIC/hQMAvoUDAL2VAwC8lQMAxNIAgKMNAgDI0gCAzNIAgKYNAgDQ0gCA1NIAgKUNAgCqwQMAq8EDANjSAIDc0gCArsEDAK/BAwCs0QMArdEDAOOYAQDhpAcA4VgGAONYBgDhoAEA4NIAgOPQAADk0gCA6NIAgOzSAIDvOAAA8NIAgO/0AQD00gCA+NIAgO/4BgCAeQAAgRUAAIIdAACEAB0A/NIAgADTAIC+EB0ACNMAgIbAHACHrB0ADNMAgBDTAIAU0wCAGNMAgBzTAIAg0wCAu8UFALqhBQC5qQUAuJEFAL/NBQC+zQUAvckFALzVBQCzHQYAsh0GALEdBgCwHQYAt6EFALa9BQC1vQUAtL0FAKu9BgCqvQYAqb0GAKi9BgCvfQYArn0GAK19BgCsfQYAJNMAgCjTAIAs0wCAMNMAgDTTAIA40wCAPNMAgEDTAICo7R0AqS0eAKoxHgCrMR4ArJUeAK2dHgCulR4Ar40eAATTAIBE0wCASNMAgEzTAIBQ0wCAVNMAgFjTAIBc0wCAuKkeALmpHgC6XR8Au1EfALxxHwC9cR8AvnUfAL9pHwCw/R4Asc0eALLFHgCzrR4AtLkeALW5HgC2rR4At6UeALO5HgBg0wCAZNMAgGjTAICU0gCAth0eALUdHgBs0wCAuwkeALo5HgBw0wCAhOADAL99HgC+fR4AvXkeALwRHgCCaQAAo/0eAIBFAACBUQAAplkeAL6cAwB00wCApVkeAKp9HgCrTR4AhkgAAIdsAACuOR4ArzkeAKxVHgCtPR4AqF0eAKltHgCqZR4Aq30eAKxlHgCtbR4ArmUeAK/9HgB40wCAfNMAgIDTAICE0wCAiNMAgIzTAICQ0wCAlNMAgLhpAQC5aQEAunkBALt5AQC8aQEAvWkBAL7dAQC/1QEAsIUeALGNHgCyhR4As50eALSFHgC1jR4AtoUeALdZAQCz7R4AmNMAgJzTAICg0wCApNMAgLbtHgC17R4AqNMAgLtJHgC6QR4ArNMAgLDTAIC/SR4AvkEeAL1JHgC8UR4AtNMAgKOpHgC40wCAvNMAgKapHgDA0wCAxNMAgKWpHgCqBR4Aqw0eAMjTAIDM0wCArgUeAK8NHgCsFR4ArQ0eAKghAwCpIQMAqiEDAKshAwCsIQMArSEDAK4hAwCvIQMA0NMAgNTTAIDY0wCAvmACANzTAIDg0wCA6NMAgOzTAIC4iQMAuYkDALqdAwC7lQMAvLkDAL25AwC+eQAAv3kAALDlAwCx7QMAsuUDALP9AwC07QMAtd0DALbVAwC3vQMAgKkAAIG1AACCvQAAs6UDAPDTAIC1pQMAtq0DAPTTAICE4AIA+NMAgLotAwC7JQMAvD0DAL0lAwC+JQMAvxUDAKPpAwD80wCAhmgEAIeAAwAA1ACApuEDAKXpAwAE1ACAq2kDAKphAwAI1ACADNQAgK9ZAwCuaQMArWkDAKxxAwAQ1ACAFNQAgBjUAIAc1ACAINQAgOE8HwAk1ACA40AeACjUAIAs1ACAMNQAgO+MHgA01ACAONQAgDzUAIBA1ACARNQAgIIlAACBEQAAgB0AAEjUAIDj5AMATNQAgOGsAQBQ1ACA77ADAIRkAgC+YAUAhtAEAIdEBQBY1ACAXNQAgGDUAIBk1ACAaNQAgGzUAIBw1ACAdNQAgHjUAIDvsAEAhKQFAOHcHgB81ACA4xABAIDUAICE1ACAiNQAgIzUAICzUQEAkNQAgJTUAICY1ACAnNQAgLYRAQC1fQEAoNQAgLsNAQC6DQEApNQAgKjUAIC//QAAvv0AAL39AAC8/QAAqDkGAKk5BgCqmQYAq5EGAKy1BgCt0QYArskGAK/BBgBU1ACArNQAgLDUAIC01ACAgA0AAIGxAACCsQAAuNQAgLhhBwC5YQcAumEHALt9BwC8ZQcAvW0HAL5lBwC/HQcAsIkGALGJBgCyaQcAs2kHALR5BwC1eQcAtmkHALdlBwCjEQYAvNQAgMDUAIC+gAMAxNQAgKZRBgClPQYAyNQAgKtNBgCqTQYAhggAAId8AwCvvQcArr0HAK29BwCsvQcAzNQAgNDUAICzSQcA1NQAgLVZBwDY1ACA3NQAgLZRBwDg1ACA5NMAgLtBBwC6dQcAvUUHALxFBwC/RQcAvkUHAKh5BgCpeQYAqokGAKuJBgCsmQYArZkGAK6JBgCviQYA5NQAgOjUAIDs1ACA8NQAgPTUAID41ACA/NQAgADVAIC4jQYAuZUGALqVBgC7pQYAvL0GAL1xAQC+cQEAv3EBALD5BgCxzQYAstkGALPZBgC0yQYAtckGALa9BgC3tQYAowEGAATVAIAI1QCADNUAgBDVAICmGQYApREGABTVAICrCQYAqj0GABjVAIAc1QCArw0GAK4NBgCtDQYArA0GACDVAIAk1QCAKNUAgCzVAICAGQAAgRkAAIIFAAAw1QCAhKwBAL6sAQCH6AAAhkwPADjVAIA81QCAQNUAgETVAIConQIAqcUCAKrNAgCrwQIArMUCAK3NAgCu+QIArz0DAEjVAIBM1QCAUNUAgFTVAIC+PAwAWNUAgFzVAIBg1QCAuMkDALnJAwC62QMAu9EDALz5AwC9+QMAvpkDAL+ZAwCwRQMAsU0DALJFAwCzXQMAtEUDALVNAwC2RQMAt/kDALNFAgBk1QCAaNUAgGzVAIBw1QCAtk0CALVNAgB01QCAu4kDALqBAwB41QCAfNUAgL+JAwC+gQMAvYkDALyRAwCA1QCAowECAITVAICI1QCApgkCAIzVAICQ1QCApQkCAKrFAwCrzQMAlNUAgJjVAICuxQMAr80DAKzVAwCtzQMAgO0BAIEVAACCEQAAhAACAJzVAIDhpAEAoNUAgOPsAACo1QCArNUAgLDVAIDvMAAAtNUAgLjVAIC81QCAwNUAgIbgDACH9AIAxNUAgMjVAIDM1QCA0NUAgO/MBgDU1QCA4bAHANjVAIDjEAYA3NUAgODVAIDk1QCA6NUAgOzVAIDw1QCA9NUAgPjVAID81QCAANYAgATWAIAI1gCA7+gBAIUYDwDhzAYADNYAgOMcBgCAKQAAgR0AAIIFAAAQ1gCAszkCAITMDQCGaA8Ah/wMAOHQ0gO28QEAtfkBABjWAIC72QEAutEBAL7kDAAc1gCAv30BAL59AQC9fQEAvMEBAKjxDQCp8Q0AqvENAKvxDQCsMQ4ArTEOAK4xDgCvMQ4ApNUAgBTWAIAg1gCAJNYAgCjWAIAs1gCAMNYAgDTWAIC46Q4AuekOALqJDgC7hQ4AvJ0OAL2BDgC+gQ4Av7UOALBVDgCxXQ4AslUOALPpDgC0+Q4AtfkOALbpDgC34Q4Ao3kNADjWAIA81gCAQNYAgETWAICmsQ4ApbkOAEjWAICrmQ4AqpEOAEzWAIBQ1gCArz0OAK49DgCtPQ4ArIEOAFTWAICz7Q8AWNYAgFzWAIC26Q8AYNYAgGTWAIC16Q8Auq0PALu1DwA01QCAaNYAgL6VDwC/mQ8AvK0PAL2hDwCoIQ4AqSEOAKohDgCrPQ4ArCUOAK0tDgCuJQ4Ar1UOAGzWAIBw1gCAdNYAgHjWAICAHQAAgQkAAIK9AAB81gCAuDkOALk5DgC6yQ4Au8kOALzZDgC92Q4AvskOAL/JDgCwLQ4AsTUOALI9DgCzMQ4AtBUOALUZDgC2CQ4AtwkOAKOpDgCA1gCAhIACAL6AAQCFAAQApq0OAKWtDgCI1gCAq/EOAKrpDgCGKAcAhxgAAK/dDgCu0Q4AreUOAKzpDgCM1gCAs+0BAJDWAICU1gCAtuUBAJjWAICc1gCAte0BALplAQC7bQEAoNYAgKTWAIC+bQEAv10BALx1AQC9bQEAqN0NAKnpDQCqIQIAqyECAKwhAgCtIQIAriECAK8hAgCo1gCArNYAgLDWAIC01gCAohECAKMRAgCgqQ4AodUCALiJAgC5iQIAup0CALuVAgC8vQIAvXUDAL59AwC/dQMAsOUCALHtAgCy5QIAs/0CALTtAgC13QIAttUCALe9AgCjqQIAj8UaALjWAIC81gCAwNYAgKahAgClqQIAxNYAgKspAgCqIQIAyNYAgMzWAICvGQIArikCAK0pAgCsMQIAniUOAJ/lDgCc6QoAnRUKAJpFFgCbRQoAmFkWAJlRFgCWcRIAl4ETAJRVEgCV7RIAktEeAJPZHgCQtRoAkVUeAISpHwCFJR8AhiUfAIexEwDQ1gCA1NYAgIJZGwCDURsAjEUSAI2lFwCOpRcAj7kXAIA5+wHY1gCAijkTAIutEwCUmQsAlaEPAJZpDwCX3Q8A3NYAgO+cDwCSyQsAk30LAJxFAwDjeA4A4NYAgOGYDADk1gCAhHgCAJqRAwCbXQMA4QQAAL6IBQDj3OoD6NYAgOzWAIDw1gCA7+wAAO+MDgDhcA4A4fwOAOMwAADjeA4AgSEAAIA5AADvtO0DgikAALMJAgD41gCAhmgEAIcsBQD81gCAtg0CALUNAgAA1wCAu8UBALrFAQAE1wCACNcAgL99AQC+fQEAvdUBALzVAQCE1gCA9NYAgAzXAIAQ1wCAFNcAgBjXAIAc1wCAINcAgKi9BQCp5QUAquEFAKvhBQCs5QUAre0FAK7RBQCv0QUAsGEGALFhBgCyYQYAs2EGALTZBgC12QYAtskGALfBBgC4yQYAuckGALp5BwC7eQcAvEUHAL0lBwC+EQcAvw0HAKNJBQAk1wCAKNcAgCzXAIAw1wCApk0FAKVNBQA01wCAq4UGAKqFBgA41wCAPNcAgK89BgCuPQYArZUGAKyVBgBA1wCARNcAgEjXAIBM1wCAUNcAgFTXAIBY1wCAXNcAgIA5AACBOQAAggUAAGDXAIC+uAMAhLgDAGjXAIBs1wCAqMUGAKnVBgCq1QYAq+UGAKz9BgCtHQEArhUBAK8NAQBk1wCAcNcAgIaIAQCHHAEAdNcAgHjXAIB81wCAgNcAgLjpAQC56QEAuokBALuJAQC8mQEAvZkBAL6JAQC/iQEAsHUBALF9AQCydQEAs+kBALT5AQC1+QEAtukBALfhAQCzXQYAhNcAgIjXAICM1wCAhLwBALadAQC1dQYAkNcAgLu5AQC6sQEAlNcAgJjXAIC/PQEAvj0BAL09AQC8oQEAnNcAgKMZBgCg1wCApNcAgKbZAQCo1wCArNcAgKUxBgCq9QEAq/0BALDXAIC01wCArnkBAK95AQCs5QEArXkBAKj5AgCp+QIAqi0DAKs9AwCsJQMArS0DAK4lAwCvmQMAuNcAgLzXAIDA1wCAxNcAgIANAACBsQAAgrEAAMjXAIC4lQMAuZ0DALqhAwC7oQMAvHEAAL1xAAC+cQAAv3EAALDpAwCx6QMAsvUDALPFAwC03QMAtbUDALaxAwC3sQMAvswDAMzXAIDQ1wCA2NcAgNzXAIDg1wCA5NcAgO/kAgDo1wCA4ZQBAOzXAIDjLAEA8NcAgPTXAICHGAMAhhz8A7tNAwC6TQMA+NcAgPzXAIC/EQMAvnkDAL1xAwC8QQMAs8UDAITo/AMA2ACABNgAgAjYAIC2zQMAtc0DAAzYAICkAfwDpSX/A6bZ/wOnAfgDENgAgKEVAwCiHQMAoz0CAKwR9wOtAfADri3zA68B8wOoEfsDqZn7A6oB9AOrHfcDtAHoA7Vl6wO+xPwDhMT8A7AB7AOxVe8Dsk3vA7Nx7gMU2ACAGNgAgBzYAIAg2ACAJNgAgCjYAIAs2ACAMNgAgOFQBgDhNAQA42wBAOPoBgA02ACAONgAgDzYAIBA2ACAgDUAAIE9AACCNQAASNgAgEzYAIBQ2ACA77ABAO/ABgCj5QIAVNgAgIbo/AOHfP0DWNgAgKbtAgCl7QIAXNgAgKttAgCqbQIAYNgAgGTYAICvMQIArlkCAK1RAgCsYQIAqI3+A6mV/gOqnf4Dq5X+A6yx/gOtvf4Drqn+A6+p/gNE2ACAaNgAgGzYAIBw2ACAdNgAgHjYAIB82ACAgNgAgLgl/wO5Lf8DuiX/A7s9/wO8Jf8DvS3/A74l/wO/zf8DsKn+A7Gp/gOygf4Ds4H+A7SB/gO1if4Dtmn/A7cd/wOE2ACA4SD8A4jYAIDjePwDjNgAgJDYAICU2ACAmNgAgJzYAICg2ACApNgAgKjYAICAHQAAgXEAAIJxAADvDP0Ds1X+A6zYAICw2ACAvkAAALTYAIC2ff4DtXn+A7jYAIC7Lf4Dui3+A4boAACHrAAAvw3+A74F/gO9Ff4DvBX+A6OV/wO82ACAwNgAgMTYAIDI2ACApr3/A6W5/wPM2ACAq+3/A6rt/wPQ2ACA1NgAgK/N/wOuxf8DrdX/A6zV/wPY2ACAs/H+A9zYAIDg2ACAto3+A+TYAIDo2ACAtY3+A7pFAQC7TQEA7NgAgPDYAIC+RQEAv00BALxVAQC9TQEAqC3+A6k1/gOqPf4Dq0n+A6xB/gOtSf4DrnH+A69x/gP02ACA+NgAgPzYAIAA2QCABNkAgAjZAIAM2QCAENkAgLhJAQC5VQEAul0BALtVAQC8TQEAvXUBAL59AQC/dQEAsMUBALHNAQCyxQEAs90BALTFAQC1zQEAtsUBALd9AQCjtf0DFNkAgBjZAICExAMAHNkAgKbJ/QOlyf0DINkAgKsJAgCqAQIAKNkAgL7sAgCvCQIArgECAK0JAgCsEQIAgEkAAIFVAACCVQAAo0UDACzZAIClRQMApkUDADDZAICGwAQAhxQDAKopAwCrJQMArD0DAK0hAwCuIQMArxUDADTZAIA42QCAPNkAgEDZAIBE2QCASNkAgEzZAIBQ2QCAqH0CAKmhAwCqoQMAq6EDAKyhAwCtqQMArpEDAK+RAwCwgQMAsY0DALKFAwCzmQMAtIkDALW9AwC2tQMAt30DALhFAwC5TQMAukUDALtdAwC8RQMAvU0DAL5FAwC/+QAA1NcAgLMNAgBU2QCAWNkAgLYNAgBc2QCAYNkAgLUNAgC6YQIAu20CAGTZAIBo2QCAvmkCAL9dAgC8dQIAvWkCAGzZAIBw2QCAdNkAgHjZAIB82QCA4aQBAIDZAIDjQAMAhNkAgIjZAICM2QCA77gDAIAVAACBHQAAggUAAJDZAICEgAIAvsgFAIcYBQCGLAQAmNkAgJzZAICg2QCA76gBAKTZAIDhdP4DqNkAgOPw/gOs2QCAsNkAgLTZAIC42QCAvNkAgMDZAIDE2QCAs5EBAMjZAIC1UQEAtlEBAMzZAIDQ2QCA1NkAgLp9AQC7dQEAvG0BAL39AAC+9QAAv+kAAKgpBgCpVQYAqlUGAKuNBgCslQYArZ0GAK6VBgCvjQYAlNkAgNjZAIDc2QCA4NkAgOTZAIDo2QCA7NkAgPDZAIC4bQcAuQUHALoNBwC7BQcAvB0HAL0FBwC+AQcAvz0HALD1BgCx/QYAsvUGALNlBwC0fQcAtWEHALZhBwC3VQcA4xAFAPTZAIDh8AQA+NkAgIAdAACBCQAAgjkAAPzZAIAA2gCAhOgDAL7gAwAE2gCA78wFAAjaAICHOAAAhhgAAKOdBgAM2gCAENoAgBTaAIAY2gCApl0GAKVdBgAc2gCAq3kGAKpxBgAg2gCAJNoAgK/lBwCu+QcArfEHAKxhBgCokQYAqZEGAKqRBgCrrQYArLkGAK2lBgCurQYAr6UGACjaAIAs2gCAMNoAgDTaAIA42gCAPNoAgEDaAIBE2gCAuGUBALltAQC6ZQEAu30BALxlAQC9bQEAvmUBAL/ZAQCw3QYAsaUGALKtBgCzpQYAtKEGALWpBgC2mQYAt5kGALMZBgBI2gCATNoAgFDaAIBU2gCAtiUGALUxBgBY2gCAu2EGALoZBgBc2gCAYNoAgL9tBgC+ZQYAvXEGALx5BgBk2gCAo10GAGjaAIBs2gCApmEGAHDaAICEmAEApXUGAKpdBgCrJQYAvqQBAHjaAICuIQYArykGAKw9BgCtNQYAqcUCAKixAgCrxQIAqsUCAK3NAgCsxQIAr/UCAK71AgB82gCAgNoAgITaAICI2gCAjNoAgJDaAICU2gCAmNoAgLnJAwC4wQMAu9kDALrBAwC9+QMAvMkDAL+ZAwC+8QMAsUUDALBFAwCzRQMAskUDALVFAwC0RQMAt0UDALZFAwCASQMAgUkDAIJdAwCzRQIAvtwMALVFAgC2RQIAnNoAgIYADACH5AMAuokDALuJAwC8mQMAvZkDAL6JAwC/iQMAowkCAKDaAICk2gCAqNoAgKzaAICmCQIApQkCALDaAICrxQMAqsUDALTaAIC42gCAr8UDAK7FAwCt1QMArNUDALzaAIDA2gCAxNoAgCTZAIDvAAAAyNoAgMzaAIDQ2gCA4+gAANTaAIDhjAEA2NoAgNzaAIDg2gCA6NoAgOzaAICAbQAAgXUAAIJ9AACEQAIAhvAMAId4DQDw2gCA9NoAgPjaAID82gCAANsAgATbAIAI2wCADNsAgBDbAIAU2wCAGNsAgBzbAIAg2wCAJNsAgCjbAIAs2wCAMNsAgO/MAQCE7AwA4TAGADTbAIDjGAEAONsAgDzbAIBA2wCARNsAgLPlAQBI2wCAhIQPAEzbAIBQ2wCAtuUBALX1AQBY2wCAu30BALrZAQC+oAwAXNsAgL8hAQC+OQEAvTEBALw5AQCo7Q0AqSUOAKotDgCrJQ4ArD0OAK0lDgCuLQ4AryUOAOTaAICC9Q8AgeUPAIDpDwBU2wCAYNsAgIaYAACHDAMAuK0OALlFDwC6TQ8Au0UPALxFDwC9TQ8AvkUPAL95DwCwXQ4AsfkOALKtDgCzpQ4AtL0OALWlDgC2pQ4At5UOAGTbAIDv7AwAaNsAgGzbAIBw2wCAdNsAgHjbAIB82wCAvugAAIDbAICE2wCAiNsAgIzbAIDj6A0AkNsAgOEEDACj5Q4AlNsAgJjbAICc2wCAoNsAgKblDgCl9Q4ApNsAgKt9DgCq2Q4AqNsAgKzbAICvIQ4ArjkOAK0xDgCsOQ4AqDkOAKk5DgCqUQ4Aq1EOAKxxDgCtcQ4ArnEOAK9xDgCw2wCAtNsAgLjbAIC82wCAgBkAAIEZAACCBQAAwNsAgLjRDgC50Q4AutEOALvlDgC84Q4AveEOAL7hDgC/4Q4AsBEOALERDgCyEQ4AsxEOALTxDgC18Q4AtvEOALfxDgCz2Q4AyNsAgIYoAACHuAAAzNsAgLbxDgC1+Q4A0NsAgLvVDgC61Q4A1NsAgNjbAIC/NQ4AvjUOAL3FDgC8xQ4A3NsAgKOdDgDg2wCA5NsAgKa1DgDo2wCA7NsAgKW9DgCqkQ4Aq5EOAPDbAID02wCArnEOAK9xDgCsgQ4ArYEOAKjdDQCp6Q0Aqj0CAKuNAgCsmQIArZkCAK6JAgCviQIAvqwEAPjbAID82wCAhCADAADcAIAE3ACACNwAgAzcAIC4iQIAuYkCALqZAgC7kQIAvLkCAL25AgC+eQMAv3kDALD5AgCx+QIAss0CALPFAgC03QIAtcUCALbBAgC3uQIAs7UCABDcAIAU3ACAGNwAgBzcAIC2GQIAtRECACDcAIC7PQIAuj0CACTcAIAo3ACAvwECAL4ZAgC9EQIAvBkCACzcAICj8QIAMNwAgDjcAICmXQIAPNwAgEDcAIClVQIAqnkCAKt5AgCGSAUAh6wEAK5dAgCvRQIArF0CAK1VAgCohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAETcAIBI3ACATNwAgFDcAICB8QEAgJkBAHTaAICC9QEAuHkBALl5AQC6zQEAu8UBALzdAQC9xQEAvsUBAL/1AQCwtQIAsb0CALKBAgCzgQIAtFUBALVdAQC2SQEAt0kBAFTcAIBY3ACAXNwAgO/UAQCEEAUAYNwAgGTcAIDvjA4AvuwFAOHsDgBo3ACA4xwOAGzcAIDhlAEAcNwAgONkDgCzXQIAdNwAgHjcAIB83ACAgNwAgLYVAgC1dQIAhNwAgLs5AgC6MQIAiNwAgIzcAIC/2QEAvtEBAL0VAgC8FQIAo50FADTcAICQ3ACAlNwAgJjcAICm1QUApbUFAJzcAICr+QUAqvEFAKDcAICk3ACArxkGAK4RBgCt1QUArNUFAIBRAACBWQAAgmEAALOVBgCo3ACAtXEHALZxBwCs3ACAhkADAIdUAwC67QcAu+UHALzlBwC97QcAvtEHAL/NBwCw3ACAtNwAgLjcAIC83ACAwNwAgMTcAIDvQAQAyNwAgOEwBwDM3ACA45QEANDcAIDU3ACA2NwAgNzcAIDg3ACAoxkGAOTcAIDo3ACA7NwAgPDcAICm/QcApf0HAPTcAICraQcAqmEHAPjcAID83ACAr0EHAK5dBwCtYQcArGkHAKjNBwCp0QcAqtEHAKstBgCsNQYArT0GAK41BgCvnQYAAN0AgATdAIAI3QCADN0AgIAZAACBGQAAggUAABDdAIC4iQYAuYkGALqZBgC7kQYAvLkGAL25BgC+UQEAv1EBALDlBgCx7QYAsv0GALP1BgC02QYAtcUGALbBBgC3uQYAqNEBAKnZAQCqCQEAqwkBAKwZAQCtGQEArgkBAK8JAQCEYAEAvnwBAIeoAACGjAEAGN0AgBzdAIAg3QCAJN0AgLgJAQC5CQEAuhkBALsRAQC8OQEAvTkBAL75AAC/+QAAsH0BALFBAQCyRQEAs10BALRFAQC1TQEAtkUBALc5AQAo3QCALN0AgDDdAICzjQIANN0AgLWdAgC2lQIAON0AgDzdAIBA3QCAurUCALuJAgC8nQIAvYUCAL6NAgC/hQIAps0CAETdAIBI3QCApcUCAEzdAICj1QIAUN0AgFTdAICu1QIAr90CAKzFAgCt3QIAqu0CAKvRAgCE9AMAWN0AgKgxAwCpMQMAqjEDAKsxAwCskQAArZEAAK6RAACvjQAAXN0AgGDdAIBk3QCAaN0AgGzdAIBw3QCAdN0AgHjdAIC4vQAAuWUAALptAAC7ZQAAvH0AAL1lAAC+bQAAv2UAALD9AACxxQAAss0AALOpAAC0uQAAtaUAALahAAC3oQAAgL0BAIEJAACCGQAAfN0AgIDdAIC+WAIAhxQdAIacHQCEbB0AxNsAgIjdAICM3QCAvrwcAJDdAICU3QCAmN0AgLP5AgCc3QCAoN0AgKTdAICo3QCAtlEBALVZAQC+3B8Au0EBALp5AQCs3QCAsN0AgL8hAQC+PQEAvT0BALxZAQDhcAcAtN0AgOMIBgC43QCA78wAALzdAIDA3QCAxN0AgOMQAADI3QCA4dABAMzdAICGkBwAh/QcAO/gBgDQ3QCAo3kCANTdAIDY3QCA3N0AgODdAICm0QEApdkBAOTdAICrwQEAqvkBAOjdAIDs3QCAr6EBAK69AQCtvQEArNkBAITdAICCFQAAgeUfAIDlHwDw3QCA9N0AgPjdAID83QCAqAkfAKkJHwCqHR8AqxUfAKwNHwCtcR8ArnEfAK9xHwCwER8AsS0fALIlHwCzyR8AtN0fALXBHwC2wR8At8EfALjFHwC5yR8AutUfALupHwC8uR8AvbkfAL6pHwC/oR8As7UfAADeAIAE3gCACN4AgAzeAIC20R8AtaUfABDeAIC7yR8AuvUfABTeAIAY3gCAvyUfAL45HwC9PR8AvNEfABzeAIAg3gCAJN4AgCjeAIAs3gCA4WAfADDeAIDjtBwANN4AgDjeAIA83gCA7wAdAEDeAIBE3gCASN4AgEzeAICjNR4AUN4AgFTeAIBY3gCAXN4AgKZRHgClJR4AYN4AgKtJHgCqdR4AhKgCAGTeAICvpR4ArrkeAK29HgCsUR4AgE0AAIFVAACCVQAAs8kBAGjeAIC12QEAtskBAGzeAICGoAAAhwQBALrFAQC7rQEAvLUBAL29AQC+tQEAv60BAKiZAQCpmQEAqg0BAKsFAQCsHQEArQUBAK4FAQCvNQEAcN4AgHTeAIB43gCAfN4AgIDeAICE3gCAiN4AgIzeAIC4JQEAuS0BALo5AQC7OQEAvCkBAL0pAQC+3QAAv9UAALBNAQCxJQEAsi0BALMlAQC0PQEAtSUBALYhAQC3HQEAkN4AgJTeAICY3gCAo4kCAJzeAIClmQIApokCAKDeAICk3gCAqN4AgKqFAgCr7QIArPUCAK39AgCu9QIAr+0CAKzeAICw3gCAtN4AgIRAAgC43gCAvN4AgMDeAIDE3gCAgA0AAIEVAACCHQAAyN4AgMzeAIDQ3gCAh7QDAIbcBAC+zAMA2N4AgNzeAIDg3gCA7+gCAOTeAIDo3gCA7N4AgOP8AgDw3gCA4dABAPTeAID43gCA/N4AgADfAIAE3wCAs2EDAAjfAIAM3wCAEN8AgBTfAIC2eQMAtXEDABjfAIC7XQMAul0DABzfAIAg3wCAv+EAAL79AAC9/QAAvP0AALC5AgCxuQIAsgkBALMJAQC0GQEAtQUBALYFAQC3PQEAuAUBALllAQC6bQEAu2UBALxhAQC9YQEAvmEBAL9hAQCFXAcAJN8AgCjfAIAs3wCAFN0AgDDfAIA03wCAON8AgKgxAgCpOQIAqskCAKvJAgCs2QIArdkCAK7JAgCvyQIAhMwFAOGAHgA83wCA47weAOE4HgBA3wCA46AAAL4QBABI3wCATN8AgO8MHgBQ3wCAVN8AgFjfAIBc3wCA73QeAKNhAgCCUQAAgUEAAICRAABg3wCApnkCAKVxAgBk3wCAq10CAKpdAgCGyAQAhzwFAK/hAQCu/QEArf0BAKz9AQCohQYAqY0GAKqFBgCrmQYArIkGAK2JBgCuvQYAr7EGAETfAIBo3wCAbN8AgHDfAIB03wCAeN8AgHzfAICA3wCAuJ0GALmtBgC6pQYAuwkHALwZBwC9GQcAvg0HAL8FBwCw0QYAsdEGALLRBgCz0QYAtLUGALW9BgC2tQYAt60GALMNBgCE3wCAiN8AgIzfAICQ3wCAtgkGALUBBgCU3wCAuxUGALoVBgCY3wCAnN8AgL95BgC+cQYAvQUGALwFBgCg3wCA4aAEAKTfAIDjXAUAgA0AAIE1AACCPQAAqN8AgKzfAICw3wCAhGADAL5sAAC/8AEAhZAAALTfAIDvmAUAo40HAIQIAACGAAwAh4wAALjfAICmiQcApYEHALzfAICrlQcAqpUHAMDfAIDE3wCAr/kHAK7xBwCthQcArIUHAMjfAICz6QYAzN8AgNDfAIC26QYA1N8AgNjfAIC16QYAukUBALtNAQDc3wCA4N8AgL5FAQC/TQEAvFUBAL1NAQCoIQYAqSEGAKolBgCrPQYArCUGAK0tBgCuSQYAr0EGAOTfAIDo3wCA7N8AgPDfAID03wCA+N8AgPzfAIAA4ACAuEkBALlJAQC6WQEAu1EBALx5AQC9eQEAvhkBAL8VAQCwxQEAsc0BALLFAQCz3QEAtMUBALXNAQC2xQEAt3kBAATgAIAI4ACADOAAgKOhBQAQ4ACApaEFAKahBQAU4ACAjyHqAxjgAICqDQIAqwUCAKwdAgCtBQIArg0CAK8FAgCX7RIAlmUSAJVFEQCUnRYAk3EWAJJVFQCReesDkFnqA59hBgCeNQUAnUUaAJxpGgCbVRkAmkUeAJlZHgCYRR0A4WAAABzgAIDjTD4AIOAAgKOxAgCi1QEAobUHAKCJBgCxATgAsAk+ALOVOgCyjToAtbUmALQBJADvaDoAvjAMAKnJNgCowTYAqwEwAKrhNwCtzTMArPUyAK/5PgCuATwAoRkCACjgAICjbQ4Aom0OAKX1CgCkAQgAp4ULAKaZCgCGAA0Ah0QNAIIJ6wODCesDhDHqA4UVFACGORcAh80XAISgDQAs4ACAiiUQAIsNEwCMnRMAjQ0cAI4ZHwCPDR8A1N4AgO8AAwCSbRgAk0kbAJR9GwCVBQQAllkHAJdJBwAw4ACANOAAgJpFBgCbLQAAnFEDAONgAAA44ACA4WwAAIClAQCBAQEAggUBAL4ADAA84ACAQOAAgETgAIDviAEASOAAgOFUBgBM4ACA41QBAFDgAIBU4ACAWOAAgFzgAICz6QIAYOAAgGTgAIBo4ACAbOAAgLadAgC1mQIAcOAAgLuJAgC6vQIAdOAAgHjgAIC/WQIAvlECAL1ZAgC8kQIAoykNAHzgAICA4ACAhOAAgIjgAICmXQ0ApVkNAIzgAICrSQ0Aqn0NAJDgAICY4ACAr5kNAK6RDQCtmQ0ArFENAIBRAACBWQAAgmEAALMtDwCc4ACAtS0PALbJDwCg4ACAhkADAIcIAwC6yQ8Au8UPALzBDwC9wQ8AvsEPAL/BDwAk4ACAlOAAgKTgAICo4ACArOAAgLDgAIC04ACAuOAAgKhFDgCpgQ8AqskPAKvJDwCsyQ8ArSUPAK4tDwCvJQ8AsGEPALFtDwCyeQ8As3kPALRpDwC1aQ8Ath0PALcVDwC4LQ8AuTUPALo1DwC7BQ8AvB0PAL3xAAC+8QAAv/EAAKNhDgC84ACAhMQBAMDgAIDE4ACApoUOAKVhDgDI4ACAq4kOAKqFDgDM4ACA0OAAgK+NDgCujQ4ArY0OAKyNDgDU4ACA2OAAgNzgAIDg4ACA5OAAgOjgAIDs4ACA8OAAgPTgAICCHQAAgR0AAIAdAAD44ACA/OAAgADhAIC+tAEAqK0BAKnVAQCq1QEAqwUBAKwdAQCtBQEArg0BAK8FAQCGgAEAhxgBAAjhAIAM4QCAEOEAgBThAIAY4QCAHOEAgLiFAAC5jQAAuoUAALudAAC8hQAAvY0AAL6FAAC/vQAAsH0BALHhAACy5QAAs/0AALTtAAC13QAAttUAALe9AACzXQIAIOEAgCThAIAo4QCALOEAgLaFAgC1lQIAMOEAgLslAwC6uQIANOEAgDjhAIC/GQMAvikDAL0pAwC8MQMAvswEAKMZAgA84QCAQOEAgKbBAgBE4QCASOEAgKXRAgCq/QIAq2EDAEzhAIBQ4QCArm0DAK9dAwCsdQMArW0DAKgpAwCpKQMAqjkDAKs5AwCsKQMArSkDAK6dAACvlQAAVOEAgFjhAIBc4QCAYOEAgGThAICCqQEAga0BAICtAQC4mQAAua0AALqlAAC7bQAAvHUAAL19AAC+dQAAv20AALDtAACx9QAAsvUAALPFAAC03QAAtb0AALa1AAC3qQAA4XgBAOEcDgDjEAAA4zwOAGjhAIBs4QCAvhQEAHDhAICErAIAeOEAgId4BQCGDAUAfOEAgIDhAIDvvAAA70gOALPxAgCE4QCAiOEAgIzhAICQ4QCAtukCALXhAgCU4QCAu3EBALppAQCY4QCAhKAEAL85AQC+WQEAvVEBALxhAQCc4QCAhIwEAKDhAICEADgApOEAgKjhAICs4QCAsOEAgKqJDgCriQ4AqLkOAKmxDgCu/Q4Ar+EOAKz5DgCt9Q4Asq0OALNlDgCwkQ4AsaUOALZ9DgC3ZQ4AtH0OALV1DgC6XQ4Au+UNALhdDgC5VQ4AvuENAL/pDQC8/Q0AvfUNAKOxBQB04QCAtOEAgLjhAIC84QCApqkFAKWhBQDA4QCAqzEGAKopBgDE4QCAyOEAgK95BgCuGQYArREGAKwhBgDM4QCA0OEAgNThAIDY4QCAgB0AAIEJAACCOQAA3OEAgODhAIDk4QCAhsgAAIcMAwDo4QCA7OEAgPDhAID04QCAqKUHAKm1BwCqvQcAq8kHAKzZBwCt2QcArskHAK/BBwC+oAAA+OEAgPzhAIAA4gCABOIAgAjiAIAM4gCAEOIAgLjNAAC51QAAutUAALvlAAC8/QAAvZUAAL6dAAC/lQAAsIkHALFlBwCyYQcAs30HALRlBwC1bQcAtmUHALf1AACzNQYAFOIAgBjiAIAc4gCAIOIAgLZZBgC1UQYAJOIAgLuhBgC6TQYAKOIAgCziAIC/qQYAvqEGAL2pBgC8tQYAMOIAgDTiAIDv8AUAOOIAgDziAIBA4gCAROIAgEjiAICAPQAAgQkAAIIdAABM4gCA4cgGAFDiAIDjSAQAVOIAgKO1BgBY4gCAhigAAIdAAQBc4gCAptkGAKXRBgBg4gCAqyEGAKrNBgBk4gCAaOIAgK8pBgCuIQYArSkGAKw1BgBs4gCAs70BAHDiAIB04gCAtnkBAHjiAIB84gCAtXkBALpVAQC7XQEAgOIAgITiAIC++QAAv/kAALxFAQC9+QAAqHECAKlxAgCqcQIAq3ECAKy1AgCtvQIArrUCAK+tAgC+rDwAiOIAgIziAICQ4gCAlOIAgJjiAICc4gCAoOIAgLhpAwC5aQMAugkDALsJAwC8HQMAvQUDAL4NAwC/BQMAsNUCALHdAgCy1QIAs2kDALR5AwC1eQMAtmkDALdhAwCk4gCAqOIAgKziAICj9QIAsOIAgKUxAgCmMQIAtOIAgLjiAIC84gCAqh0CAKsVAgCsDQIArbEDAK6xAwCvsQMA7xgCAIIVAACBbQAAgG0AAMDiAIDI4gCAhvg8AIcYAwDM4gCA0OIAgNTiAIDY4gCA42wHAAThAIDhaAEA3OIAgKiFAgCplQIAqpUCAKulAgCsvQIArdUCAK7RAgCv0QIA4OIAgOTiAIDo4gCA7OIAgPDiAID04gCA+OIAgPziAIC4dQEAuX0BALp1AQC7zQEAvNUBAL3dAQC+yQEAv8EBALC1AgCxvQIAsoECALOBAgC0VQEAtV0BALZVAQC3TQEA4bQGAADjAIDj9AYABOMAgIQYPQAI4wCADOMAgBDjAIAU4wCAGOMAgBzjAIAg4wCAJOMAgCjjAIDvWAYALOMAgIF9AACAcQAAMOMAgIIFAAA44wCAPOMAgO+AAQC+VDwA4ZABAEDjAIDjfAYAROMAgEjjAIBM4wCAhtg8AIf0PACjnT0AxOIAgDTjAIBQ4wCAVOMAgKbVPQCltT0AWOMAgKv5PQCq8T0AXOMAgGDjAICvGT4ArhE+AK3VPQCs1T0AZOMAgLOhPgBo4wCAbOMAgLatPgBw4wCAdOMAgLWxPgC6ST8Au0k/AHjjAIB84wCAvkk/AL9JPwC8ST8AvUk/AKhVPgCpZT4Aqm0+AKtlPgCsfT4ArWk+AK65PwCvuT8AgOMAgITjAICI4wCAjOMAgJDjAICU4wCAmOMAgJzjAIC4VT8AuV0/ALpVPwC7bT8AvHU/AL19PwC+dT8Av20/ALDJPwCxyT8Astk/ALPZPwC0yT8Atck/ALZ9PwC3cT8AghUAAKPhPwCAsQEAgbEBAKbtPwCg4wCAvtABAKXxPwCqCT4Aqwk+AITkAQCk4wCArgk+AK8JPgCsCT4ArQk+ALPdPACo4wCAhugAAIfMAQCs4wCAtpU8ALX1PACw4wCAu7k8ALqxPAC04wCAuOMAgL9ZPwC+UT8AvZU8ALyVPACoUT4AqVE+AKptPgCrYT4ArGE+AK1hPgCulQEAr40BAISgAQC84wCAwOMAgMTjAIDI4wCAzOMAgNDjAIDU4wCAuKkBALmpAQC6aQEAu2kBALx5AQC9eQEAvmkBAL9pAQCw/QEAsc0BALLFAQCzrQEAtLkBALW5AQC2rQEAt6UBALPlPQDY4wCA3OMAgODjAIDk4wCAtuE9ALXpPQDo4wCAuwkCALo5AgDs4wCA8OMAgL99AgC+fQIAvXkCALwRAgD04wCAo6E9APjjAID84wCApqU9AADkAIAE5ACApa09AKp9AgCrTQIACOQAgAzkAICuOQIArzkCAKxVAgCtPQIAgOkAAIHpAACCHQAAvsADAO/kAgAQ5ACAh1QDAIY8BADjEAEAGOQAgOH4AQAc5ACAIOQAgCTkAIAo5ACALOQAgDDkAIA05ACAOOQAgLORAwA85ACAtbkDALZ9AwBA5ACAROQAgEjkAIC6WQMAu1kDALxJAwC9SQMAvv0AAL/1AACoRQIAqVUCAKpVAgCrZQIArH0CAK2xAgCusQIAr7ECAIRsBQBM5ACAUOQAgFTkAIBY5ACAXOQAgL5wBQBg5ACAuF0BALltAQC6ZQEAuw0BALwZAQC9GQEAvg0BAL8FAQCw0QIAsdECALLRAgCz0QIAtHUBALV9AQC2dQEAt20BAOFAPwDjvAAA4wg+AOFsPgBk5ACAaOQAgGzkAIBw5ACAdOQAgHjkAIB85ACAgOQAgL5sBwDvVAAA75w+AIjkAICjnQIAgmkAAIFhAACAaQAAjOQAgKZxAgCltQIAkOQAgKtVAgCqVQIAhsgEAIfsBACv+QEArvEBAK1FAgCsRQIAqKUGAKmpBgCquQYAq7kGAKypBgCtqQYArtkGAK/ZBgCE5ACAlOQAgJjkAICc5ACAoOQAgKTkAICo5ACArOQAgLhxBwC5cQcAunUHALvdBwC8xQcAvc0HAL7FBwC//QcAsKkGALG1BgCytQYAs40GALSVBgC1UQcAtlEHALdRBwCzMQYAsOQAgLTkAIC45ACAvOQAgLYpBgC1IQYAwOQAgLtxBgC6bQYAxOQAgMjkAIC/lQcAvlEGAL1ZBgC8YQYAzOQAgKN1BgDQ5ACA1OQAgKZtBgDY5ACA3OQAgKVlBgCqKQYAqzUGAODkAIDk5ACArhUGAK/RBwCsJQYArR0GAIANAACBFQAAgh0AAOjkAIDs5ACA8OQAgITcAQD05ACAhoAAAIcgAQD45ACA/OQAgADlAIAE5QCACOUAgAzlAIAQ5QCA43QEABTlAIDhyAUAGOUAgBzlAIAg5QCAJOUAgCjlAIAs5QCAMOUAgDTlAIA45QCA77QEADzlAIBA5QCAqD0GAKlVBgCqVQYAq6kBAKy5AQCtuQEArqkBAK+pAQCErAEAROUAgEjlAIBM5QCAUOUAgFTlAIBY5QCAXOUAgLhtAQC5BQEAugEBALsBAQC8BQEAvQ0BAL4xAQC/MQEAsNkBALHZAQCybQEAs2UBALR9AQC1ZQEAtmUBALdVAQCBvQMAgL0DALPVBQCCGQAAtTkCAGDlAIC+VAMAtjECAGjlAIBs5QCAuxUCALoVAgC9uQIAvLECAL+pAgC+sQIAcOUAgKZpAgClYQIAhAAMAKONBQB05QCAhvgMAId8AwCv8QIArukCAK3hAgCs6QIAq00CAKpNAgB45QCAfOUAgIDlAICE5QCAiOUAgIzlAIDjIAEAkOUAgOGgAQCU5QCA70ACAJjlAICc5QCAoOUAgKTlAICo5QCArOUAgLDlAICz8QMAtOUAgBTkAIC45QCAvOUAgLbpAwC14QMAwOUAgLu1AwC6tQMAxOUAgMjlAIC/lQMAvpUDAL2lAwC8pQMAqCkCAKkpAgCqOQIAqzkCAKwpAgCtKQIArlkCAK9VAgCAzQEAgQkAAIIZAADM5QCA0OUAgL58DQCHtA0AhhwMALgxAgC5PQIAujUCALvpAgC8+QIAvfkCAL7pAgC/6QIAsDECALExAgCyMQIAszECALQRAgC1EQIAthECALcRAgDY5QCA3OUAgODlAIDk5QCA6OUAgOzlAIDw5QCA79QGAPTlAIDhVAYA+OUAgOOkAACsDBUA/OUAgADmAIAE5gCAo/ECAAjmAIAM5gCAEOYAgBTmAICm6QIApeECABjmAICrtQIAqrUCABzmAIAg5gCAr5UCAK6VAgCtpQIArKUCAKghDgCpIQ4AqkkOAKtZDgCsaQ4ArWkOAK6ZDgCvmQ4A1OUAgCTmAIAo5gCALOYAgDDmAIA05gCAOOYAgDzmAIC49Q4Auf0OALr1DgC7iQ4AvJ0OAL2FDgC+hQ4Av7UOALDpDgCx6Q4Asv0OALPxDgC01Q4Atd0OALbVDgC3zQ4As8EOAIIVAACBtQAAgLUAAEDmAIC26Q4AteEOAL4QAAC7LQ4Aui0OAIRkAwBE5gCAvxkOAL4RDgC9JQ4AvCkOAEjmAICjhQ4AhogAAIdsAwCmrQ4ATOYAgFDmAIClpQ4AqmkOAKtpDgBU5gCAWOYAgK5VDgCvXQ4ArG0OAK1hDgCziQ4AXOYAgGDmAIBk5gCAaOYAgLaBDgC1iQ4AbOYAgLuVDgC6jQ4AcOYAgHTmAIC/+Q4AvvEOAL2FDgC8hQ4AeOYAgHzmAICA5gCAhOYAgOMMDQCI5gCA4RgNAIzmAIDvrAwAkOYAgJTmAICY5gCAnOYAgKDmAICk5gCAqOYAgKgBDgCpAQ4AqgEOAKsBDgCsAQ4ArQEOAK4BDgCvPQ4AgN0AAIEJAACCGQAArOYAgLDmAICEPAEAvnQAALjmAIC4HQ4AuS0OALolDgC76QEAvPkBAL35AQC+6QEAv+kBALBJDgCxUQ4AslEOALNRDgC0NQ4AtT0OALY1DgC3LQ4Ao4kNALzmAICGrAQAhzwDAMDmAICmgQ0ApYkNAMTmAICrlQ0Aqo0NAMjmAIDM5gCAr/kNAK7xDQCthQ0ArIUNANDmAICznQIAhEgDAL5ABAC2VQMA1OYAgNjmAIC1sQIAunEDALt5AwDc5gCA4OYAgL4xAwC/MQMAvFEDAL1RAwCwkQMAsZkDALKhAwCzoQMAtNEDALXRAwC20QMAt9EDALj1AwC5+QMAus0DALvFAwC83QMAvcUDAL7NAwC/xQMA5OYAgOjmAIDs5gCA8OYAgIV8GQD05gCA+OYAgGTlAICoIQIAqTECAKoxAgCrBQIArB0CAK3xAwCu8QMAr/EDAPzmAIAA5wCABOcAgAjnAIDvUAAADOcAgBDnAIAU5wCA44QAABjnAIDh+AEAHOcAgIAVAACBGQAAggUAACDnAICjmQMAKOcAgIZoBACHYAUALOcAgKZRAgCltQMAMOcAgKt9AgCqdQIANOcAgDjnAICvNQIArjUCAK1VAgCsVQIAPOcAgEDnAIBE5wCASOcAgEznAIBQ5wCAVOcAgO/4AQC+bAQA4YAOAFjnAIDjFAEAXOcAgGDnAIBk5wCAaOcAgGznAIBw5wCAdOcAgLPdAQB45wCAtf0BALb1AQB85wCAgOcAgITnAIC6sQEAu4UBALydAQC9NQEAvj0BAL81AQCpBQYAqLkFAKsVBgCqHQYArT0GAKw9BgCvTQYArl0GACTnAICCHQAAgR0AAIAdAACI5wCAjOcAgJDnAICU5wCAuUEHALidBgC7QQcAukkHAL1FBwC8WQcAv0UHAL5FBwCxCQYAsD0GALOpBgCyAQYAtbkGALSxBgC3rQYAtrEGAKORBgCEjAIAhigAAIfAAwCY5wCAprkGAKWxBgCc5wCAq8kGAKr9BgCg5wCApOcAgK95BgCucQYArXkGAKzRBgCo5wCAs5kHAKznAICw5wCAtlEHALTnAIC45wCAtbEHALptBwC7dQcAvOcAgMDnAIC+WQcAv0UHALxtBwC9ZQcAxOcAgMjnAIDM5wCA0OcAgNTnAIDY5wCA3OcAgO+oBQDg5wCA4TQFAOTnAIDjdAUA6OcAgOznAIDw5wCA9OcAgKMdBgCCLQAAgRUAAIAdAAD45wCAptUGAKU1BgD85wCAq/EGAKrpBgAA6ACAhCgBAK/BBgCu3QYAreEGAKzpBgCoxQYAqdUGAKrVBgCr5QYArP0GAK0VBgCuHQYArxUGAL7sAQAI6ACAhggAAIcgAAAM6ACAEOgAgBToAIAY6ACAuH0GALkFBgC6DQYAuwUGALwBBgC9CQYAvjkGAL85BgCwbQYAsXUGALJ9BgCzdQYAtFkGALVFBgC2TQYAt0UGAKiRAgCpmQIAqqECAKuhAgCs0QIArd0CAK7VAgCvyQIAHOgAgCDoAIAk6ACAvyweACjoAIAs6ACAMOgAgDToAIC4VQMAuV0DALppAwC7ZQMAvGEDAL1hAwC+YQMAv2EDALC5AgCxjQIAsoUCALNtAwC0dQMAtX0DALZ1AwC3bQMAOOgAgDzoAICzIQIAQOgAgLVRAgCEiAMAROgAgLZVAgC05gCAvigcALtBAgC6dQIAvbEDALxZAgC/sQMAvrkDAKNpAgBI6ACATOgAgFDoAIBU6ACAph0CAKUZAgBY6ACAqwkCAKo9AgBc6ACAYOgAgK/5AwCu8QMArfkDAKwRAgCopQIAqbUCAKq9AgCrtQIArK0CAK01AQCuPQEArzUBAL4sHABk6ACAaOgAgGzoAIBw6ACAeOgAgIdoHQCGHB0AuIUBALmNAQC6hQEAu50BALyNAQC9vQEAvrUBAL95AACwUQEAsVEBALJRAQCzUQEAtPEBALXxAQC29QEAt+UBAO/YAACCtQAAgaUAAIClAAB86ACAgOgAgIToAIDvxAYAiOgAgOH0BgCM6ACA4zgBAOPMAACQ6ACA4SgBAJToAICY6ACAtuUBALV1AgCEQBwAs2UCAJzoAICg6ACApOgAgL9lAQC+ZQEAvdUBALzVAQC7xQEAusUBAKjoAICs6ACAo7UdAHToAICw6ACAtOgAgLjoAICmNR4ApaUdALzoAICrFR4AqhUeAMDoAIDE6ACAr7UeAK61HgCtBR4ArAUeAMjoAIDM6ACA0OgAgNToAICADQAAgTUAAII9AADY6ACA3OgAgODoAIC1BQAAcRoAgOG0AgCs2AIAtQUAAHUaAICotR8AqRUfAKodHwCrFR8ArDEfAK09HwCuLR8AryEfAOG0AgCs2AIAtQUAAHkaAIDhtAIArNgCALUFAAB9GgCAuNEAALnZAAC64QAAu+EAALyRAAC9kQAAvpEAAL+RAACwIR8AsTEfALIxHwCzMR8AtAkfALUJHwC28QAAt/EAAOG0AgCs3AIA71QdALUdAACBGgCA4bwCAKzQAgC1KQAAoyUBAKKRAwChFR0AoA0dAOGAHgCFGgCA47wdAOHEAgCz1R4AtQkAAKzYAgCJGgCA4bwCALb9HgC1+R4ArOACALu1HgC6pR4AtQUAAI0aAIC/jR4Avo0eAL2lHgC8pR4AoxUeAOG8AgCs0AIAtREAAI9pJQCmPR4ApTkeAJEaAICrdR4AqmUeAOG0AgCseAEAr00eAK5NHgCtZR4ArGUeAJvdFACa5RUAmQEXAJjhEACfcR8AnnkZAJ35GQCcARsAk+UtAJIRLwCRbSkAkG0pAJf5EQCW8REAlYUsAJSZLQC1JQAA4ZQCAILxJgCDjSoAhJUqAIXhLACGHS4Ah3kuAKy0AgCVGgCAilUvAIspEgCMORIAjRkTAI7xFACPHRYAtQUAAJkaAICSVRcAk5EYAJRxGgCV+RoAlvkcAJd9HgCC4AMAkwsAgJpVHgCb2QAAnHUCAIMMAICzDACAuIkKAKwBBACthQYAroEGAMwQAgDMfAMAtgwAgJ0aAIDCDACAxQwAgMgMAIAACwCAgaUyArwMAIAE6ACAmpUGAJtVIwK8kQYAvbEAAL6RBgC/rQYAuOkGALmVBgC6kQYAoRoAgLTBBgC1zQYAts0GALfdBgCw/QYAseUGALKdAACz5QYAhVTHA6UaAICH/AAAuAEKAK0aAIDpDACAsRoAgIyRcwCNpAEAzPACAL4NAIDBDQCAiRQAALgZCgCLDAAAGg4AgFMOAIC5DACAvwwAgBkKAICRwAEAywwAgLhtCgDODACA1AwAgNoMAIDdDACA4AwAgLUaAIAoDQCA5gwAgLkaAIDhpB4AKw0AgONUHgCvIXMAzCgCAO8MAIDsDACA8gwAgPUMAID4DACAzIACAJS4AwD7DACAkhQCAO9gHgCQAAIA/gwAgAoNAIC48QoADQ0AgJ8LAIAQDQCAiSkLABMNAICpGgCAvDABAL/EAQC+7AEAFg0AgMzsAgC4xQoAukQBAK0JAIAZDQCAygYAgN8GAIDyBgCAHA0AgPoGAIAfDQCACgcAgC0HAIAYBwCA9gcAgC8HAICpDQCAOgcAgK8NAIBKBwCAtXkAAGcHAIC3cSoCcgcAgLFhAAB0BwCAsw0pAo0HAIC96QAAoAcAgPoHAICtBwCAuRkrAsMHAIC7WRQCHwgAgFoJAIA8CACALw4AgFsIAIA5AACAgQgAgHEAAIDHCACAKwAAgCAJAIA9AACAXAkAgEMAAIBeCQCARQgAgGoIAIBJAACAAAgAgFMAAIB5CQCAWQAAgCINAIBfAACAuw0iAtANAIDMFDYCHwAAgL9lAAC+EQAAvW0AAOUHAICAaQEAgXUBAIJxAQCD3SEChGkHAIWBBwCGgQcAh3EBAIihAQCJrQEAirUHAIuNBwCMlQcAjaUBAE8AAICPpQEAkOEBAJHtBwCSsSECk/0HAJSNBwCVUQYAlvEBAJfZAQCY0QEAmXUGAJp9BgCb1QEAnGkGAJ2ZFAKeUQYAn1EGAKB1FAKhuQYAokkBAKOFLQKkIQEApS0BAKZ1FAKntQYAqKERAqlRFAKqlQYAsSEAgMy8NQLNPDUCbQAAgKoDAICsAwCArwMAgL0hAIDEIQCA2yEAgOIhAIDJAACADwAAgLihBgC6BgCAtwYAgMwAAIDOIQCAtQMAgN0FAIAYBgCAugUCALvVAgC46QUAuf0FAL7JAgC/5RcCvA0CAL0BAgCy4QUAs+EFALCNBQCxnQUAtuUFALfpBQC09QUAte0FAKo9BQCrwQUAqD0FAKk1BQCuzQUAr/UFAKzNBQCtxQUAoj0FAKMFBQCg1QIAoTkFAKYdBQCnBQUApB0FAKUVBQC/BgCAm8EFAD4GAIBVBgCAnt0FAJ8xBACcUQIAndUFAHIGAICJBgCApAMAgDAiAIDbAACAoAMAgI8HAIDuBwCA8gcAgJAJAIACCACABggAgJYLAICUCQCArwoAgG8HAICLBwCAlwcAgKIHAICqBwCAqgkAgPsOAIASDwCAHw8AgMwEMwLNsDACzCAzAs3gMALMEDACzGgwAsxYMALNjDACzGgxAs0UMQLM1DECzRQ2AsxwIALN0CcCzDA2AswkMQLMDDwCzWg/AswYPwLNND8CzBg9As3AMgLMRDwCzBg5Asw4MgLNqDICzIgyAs34MwLMfDMCzUAzAswoMwLNCDMCzMghAs0kJgLMrCYCzEA4AsyYJQLNyDoCzBwkAs0QJALMhDsCzag7AsysJQLNvDoCzKw4Asz4JwLM4DgCzXQ4AicPAID2BgCAYQ0AgIgNAIDNICoCzBwrAqoGAIAsIgCAzKQgAs2gJwLMOCYCygQAgMw4OgLNPDsCzBA5As1gPgLMoAMAvj0NAL3tLALWBACAu1UjAgQJAIC5PSICzwYAgNkHAIClBACAoA0AgLIEAIBvBQCA9AYAgL4EAIB1BQCAr70MAK6ZLgKtpQwAwgUAgKvFIgIDBgCAxAQAgCMGAIDQBACAyAUAgCkGAIBdBgCAowEYAqAEAIAaBwCAHQcAgJ9dDACeUQwAnUUMACcHAICbWSECrwcAgLEHAIC0BwCAuAcAgCoHAIDOBwCA0AcAgJMtJgLTBwCAbAgAgG8IAICPBQwAjnEMAI1lDAB5CACAi0UgAmAJAICJNS8CYwkAgGcJAIB8CACAcAkAgHMJAIC9AwCAACIAgIFdDACAYQwAgAABAIEYAACCAAQABCIAgIQQBwCFFAYAhuQIAIc8AgCILAUAiaQFAIoAeAAIIgCAjCQAAAwiAIAUIgCAECIAgLgRAACRxHsAkkh6AJNMeQAcIgCAzOgCAJbwCQC4OQAAkMAJACQiAICS8AkAzPgCAJS0CQC4DQAAKCIAgMwcAgC4BQAANCIAgMzkAgC4HQAAOCIAgDwiAIBDIgCAWiIAgKiMCACp5HsAYSIAgKvUBgDM5AIAuA0AAGsiAIDMlAIAbyIAgLGAewC4CQAAuBUAAMz8AgC15AgAcyIAgMzYAgB3IgCAuAUAALqcBQC7XAUAvAB8AL30fwC++H0Av/xyAIAJOgKBDToCggE6AoMFOgKEGToChR06AoYROgKHFToCiCk6AoktOgKKIToCiyU6Aow5OgKNPToCjjE6Ao81OgLM8AIAkekPAIMiAIDMzAIAuBkAAH8iAIDM3AIAl+UPALg1AAC4DQAAjyIAgMz8AgC4BQAAkyIAgMwwAgCXIgCAzNACAJsiAICfIgCAzIgCAKQtDwClVQ8Apl0PAMyUAgCoqToCqa06ArjVAACjIgCAuDUAAKciAIDMUAMAr7U6AswsAwCrIgCAzBgDALMFDwC0HQ8AzyIAgLYJDwC3CQ8Avmh9ALhtAAC4RQAAzDgDALwpDwDTIgCAviUPAMxYAwCH5Q4AzOg6Ari9AQC4yQEAzPA1As2kMwLMgCICzXwlAs2UNgLMBCkCzew7AsxkOgK45QEAuMEBAInVDgCI1Q4Al7EOALgNAACvIgCAsyIAgLciAIC4GQAAuyIAgNciAICfaTsC2yIAgL8iAIC4PQAAzMQCAMz4AgDDIgCAxyIAgLjZAADLIgCA3yIAgLjRAADjIgCAuPEAAMzMMwLnIgCAuMkAAMzoMwLrIgCAuNUAAKllAAC4yQAAzNgCAKq5BgC3TQ0Atk0NALU1DgC0NQ4AuFUAABUjAICxGQ8AsCkOAL/1AwC+UQ0AvVkNALw1DAC7XQ0Aul0NALldDQC4XQ0AgL0KAIHFCgCCFQQAg8kKAMx8BQCF3QoAhtUKAIfNCgDMVAUAifEKAIq5CACLDQgAjBEIAI0VCACOtScCj+UKAJBpCACRbQgAknEIAJNtJALMEAUAlR0IAJaFCgDMEAUAzDQFAJk9CACaiQoAmw0IAJwRCACdFQgAzEgFAMwQAgCgZQoAoW0KAKJlCgC4BQcApLEEAMzoAgCmsQQAuA0HAKiBBADM/AIAqpkIAKtdCgCsuQgArakEALglBwCvNQgAsNEIALHxBADMwAIAs40IALQpKAK1IQoAtiEKALchCgC4IQsAuSUIALhBBwC7KQsAvA0dAr3dDwC+MQsAvzELAIDdCgAZIwCAnKF9ANADAIDpAwCAhRkJAIaZCQCHlQkAiOEJAIklJQICBACAGwQAgC4EAIBBBACAVAQAgGcEAICQrQoAkUkFAJJtBQCTYQUAlGEFAJVtBQCWZQUAlxEFAJg1BQCZPQUAmjUFAJsNBQCcFQUAnR0FAJ4VBQCfCQUAoKkJAKH9BQCi9QUAowEFAKQFBQClDQUApgUFAKc9BQCoBQUAqQ0FAKoFBQCrGQUArIkJAK2pBQCutQkAr/0JALABCQCxfQUAsnUFALMBBQC0aQkAtQEFALYFBQC3PQUAuAUFALnhJQK6AQUAuwEFALzRJQK9PQkAvnkJAL9dCQCDMAUAoXgHAJ+xfgB6BACApHgHAKVIBwCNBACA8wQAgIt8BADdAACAEwEAgIhIBAAcAQCAIAEAgCQBAIAoAQCALAEAgDABAICyAAcAs/wHADQBAIDhAACAtuQHALfwBwDmAACA6wAAgLrgBwC7nAcAvIgHAL2oBwDwAACAs8F+AKPMBAD1AACA+gAAgIMABAD/AACAhXQEAKUgBAAEAQCAiEwEAAkBAIAOAQCAFwEAgK8tBwCNxAcArSEHAKwpBwDNAwCA8AQAgI8FAICwZQcA4gUAgB0GAIBDBgCAWgYAgHcGAICOBgCA0wMAgOwDAIAFBACAHgQAgDEEAIC8fAQAgt0rAoPlKwKA/QoAgfkrAoaZCQCHmQkAhOEKAIXhCgCKiQkAi4kJAIiJCQCJiQkAjoUJAEQEAICM4QgAjY0JAJK5KwKTQScCkJkrApHFCwCWyQsAl3UnApTFDQCV0SQCmskLAJvZKgKYyQsAmXkHAFcEAIBqBACAnP0LAH0EAICQBACA9gQAgKABAICkAQCAqAEAgONkAgCsAQCAsAEAgLQBAIDvvAcAqBEJALgBAIC8AQCAwAEAgMQBAIDIAQCAzAEAgNABAIDUAQCA2AEAgNwBAIDgAQCA5AEAgOgBAIDsAQCA8AEAgPQBAID4AQCA/AEAgAACAICCnH4ABAIAgKD1VAKh2VQCoulUAqP1dQCk7XUApZ12AKaVdgCnvXYAqIV2AKkpfQCqOX0AqwV9AKwdfQCtBX0Arg19AK8FfQCwfX0AsUl+ALJRfgCzUX4AtHV+ALV9fgC2aX4At2l+ALhZfgC5WX4Auil+ALspfgC8IX4AvSF+AL4ZfgC/GX4AkgcAgDkJAIDXBwCATSIAgLQNAAC1NQAAtj0AAKIGAICsBgCArwYAgAMjAIAJIwCAvSV4ALy1WALGMQCALjoAgJkqAIC9KgCAySoAgNkqAIDhKgCA7SoAgPUqAID9KgCACSsAgF0rAIB1KwCAhSsAgJUrAIClKwCAtSsAgNUrAICAeX8AgYF/AIKBfwCDnX8AhI1/AIWxfwCGsX8Ah7F/AIjhfwCJ4X8AiuF/AIv9fwCM5X8Aje1/AI7lfwCP3X8AkKV/AJGtfwCSpX8Ak71/AJSlfwCVrX8Alm1+AJctfgCYFX4AmRl+AJrpfgCb6X4AnPl+AJ35fgCe6X4An+V+AKAdfgChJX4AoiV+AKM9fgCkJX4ApS1+AKYlfgCnXX4AqGV+AKltfgCqZX4Aq31+AKxlfgCtbX4ArmV+AK9dfgCwJX4AsS1+ALIlfgCzPX4AtCV+ALUpfgC2WXcAt9V1ALj9eQC56XUAuvl1ALvZeQC86XUAvdV1AL7RdQC/2XUAgDF2AIE9dgCCSXYAg0V2AIRBdgCFTXYAhvl0AId9dgCIoQIAiU12AIpZdgCLuXoAjEl2AI2degCOsQIAjx16AJCRVgKRKXYAkoF2AJPNdgCU2XYAlel2AJbJdgCX0VkCmKF2AJllWgKa8XYAm01aApzRdgCdYXoAnoFWAp/VdgCgBQIAoY1aAqI1VwKjCXYApCF2AKUtdgCmiVoCp5laAqi5WgKpdXYAql13ANkrAIDdKwCAESwAgDksAIBJLACAUSwAgFUsAIBhLACAfSwAgIEsAICZLACAnSwAgKUsAIC1LACAUS0AgGUtAIClLQCAuS0AgMEtAIDFLQCA1S0AgJl1CgD4LQCAJC4AgDAuAIBQLgCAXC4AgGAuAIBkLgCAgux6AINkewB8LgCAgC4AgIZ0ewCHvHsArC4AgLguAIDALgCAyC4AgNguAIDnLgCA7y4AgBsvAIAfLwCAJy8AgJJwfAArLwCAMy8AgJFMfAA7LwCASy8AgGcvAIDfLwCA8y8AgKvMfACo5HwAqdx8APcvAIB3MACAezAAgI8wAICiwHwAkzAAgJswAICjMACAzEBJAs0ASQLM/EoCzWhLAqswAIC3MACA7TAAgP0wAIARMQCAjjEAgJoxAICqMQCAsqx8ALNAfAC2MQCAwjEAgMoxAIDOMQCAtGx8ALUEfACAlQcAgZ0HAIKVBwCDqQcAhLkHAIW5BwCG2QcAh9kHAIjpBwCJ6QcAivkHAIv5BwCM6QcAjekHAI7RBwCP0QcAkLEHAJGxBwCSSQEAk0kBAJRZAQCVWQEAlkkBAJdJAQCYeQEAmXkBAJpJAQCbSQEAnFkBAJ1ZAQCeSQEAn0kBAKC5AQChuQEAoskBAKPJAQCk2QEApdkBAKbJAQCnyQEAqPkBAKn5AQCqyQEAq8kBAKzZAQCt2QEArskBAK/JAQCwuQEAsbkBALJJAQCzSQEAtFkBALVZAQC2SQEAt0kBALh5AQC5eQEAukkBALtJAQC8WQEAvVkBAL5JAQC/SQEA0jEAgNYxAIDaMQCAkjIAgNoyAIDmMgCA6jIAgO4yAIDyMgCA+jIAgP4yAIASMwCALjMAgDYzAIB2MwCAejMAgIIzAICGMwCAjjMAgJIzAIC2MwCAujMAgNYzAIDaMwCA3jMAgOIzAID2MwCAGjQAgB40AIAiNACARjQAgIY0AICKNACAqjQAgLo0AIDCNACA4jQAgAY1AIBKNQCAUjUAgGY1AIByNQCAejUAgII1AICGNQCAijUAgKI1AICmNQCAwjUAgMo1AIDSNQCA1jUAgOI1AIDqNQCA7jUAgPI1AID6NQCA/jUAgJ42AICyNgCAnoUMAOY2AIDqNgCA8jYAgIC5AwCBuQMAgskDAIPJAwCE2QMAhdkDAIbJAwCHyQMAiPkDAIn5AwCKyQMAi8kDAIzZAwCN2QMAjs0DAI/FAwCQvQMAkQEMAJJJDgCTSQ4AlFkOAJVZDgCWSQ4Al0kOAJh5DgCZeQ4AmkkOAJtJDgCcWQ4AnVkOAJ5JDgCfSQ4AoLkOAKG5DgCiyQ4Ao8kOAKTZDgCl2Q4ApskOAKfJDgCo+Q4AqfkOAKrJDgCryQ4ArNkOAK3ZDgCuyQ4Ar8kOALC5DgCxuQ4AskkOALNJDgC0WQ4AtVkOALZJDgC3SQ4AuHkOALl5DgC6SQ4Au0kOALxZDgC9WQ4AvkkOAL9JDgC8eQQAvXkEAL6JBAC/nQQAuHUEALl9BAC6aQQAu2kEALRxBAC1cQQAtnEEALdxBACwcQQAsXEEALJxBACzcQQArGkEAK1pBACucQQAr3EEAKhBBACpQQQAqkEEAKtBBACknQUApWEEAKZhBACnYQQAoJ0FAKGFBQCijQUAo4UFAJxdBQCdZQUAnm0FAJ9lBQCYXQUAmUUFAJpNBQCbRQUAlB0FAJVlBQCWbQUAl2UFAJAdBQCRBQUAkg0FAJMFBQCMMQcAjTEHAI4xBwCPMQcAiDEHAIkxBwCKMQcAizEHAIQxBwCFMQcAhjEHAIcxBwCAMQcAgTEHAIIxBwCDMQcAJjcAgC43AIA2NwCAcjcAgHY3AIB+NwCAgjcAgIY3AICyNwCAtjcAgL43AIDSNwCA1jcAgPI3AID6NwCA/jcAgCI4AIBCOACAUjgAgFY4AIBeOACAijgAgI44AICeOACAwjgAgM44AIDeOACA9jgAgP44AIACOQCABjkAgAo5AIAWOQCAGjkAgCI5AIA+OQCAQjkAgEY5AIBeOQCAYjkAgGo5AIB+OQCAgjkAgIY5AICOOQCAkjkAgJY5AICaOQCAnjkAgK45AIDGOQCAyjkAgNY5AIDaOQCA3jkAgOI5AIDqOQCA7jkAgPI5AID+OQCABjoAgA46AIASOgCAGjoAgIC5AQCBuQEAgskBAIPJAQCE2QEAhdkBAIbJAQCHyQEAiPkBAIn5AQCKyQEAi8kBAIzZAQCN2QEAjskBAI/JAQCQuQEAkbkBAJIRAACTEQAAlDEAAJUxAAAeOgCAIjoAgCo6AIAyOgCAPSMAgGUsAIBpLACAJSQAgIJgAgCZ4QAAgIAAAIGYAACC5AYAg4gEAITUGwCFlBoAhhgfALMjAICIxB4AiQAQAIqoEwCLrBEAjAAoAI20KwCOuCoAj7wpAOOwAgC+dAIAnlUAAOMUAgCCbAIAtyMAgJkNAAC+RAIAnjUAAIJoAgCZBQAAuyMAgO/MAgC+oAAAgoQAAO/YAgDj7AEA4/QBAL8jAIDjCAMAwyMAgOM4AwDHIwCA44gDAMsjAIDv4AMAzyMAgO+IAwDvPAEA78QDANMjAIDv1AMA4+wDAB43AIDXIwCA4+wDAOPsAwDj5AMA2yMAgOO4AwDvXAMA70wDAN8jAIDvSAMA7/QDAOMjAIDnIwCA7zQDAON8AwDjlAQA6yMAgO8jAIDzIwCA47QEAPcjAID7IwCA/yMAgO9sBAADJACAByQAgO9YBADvUAQACyQAgBYkAIAaJACAvQAAgOP4BADCAACAMSQAgB4kAIBtKQCA45wEAAglAIBrJQCAriUAgO9QBADaJQCABCYAgO88BAApJgCAgAlLAoYcdwC+RAIAgnQCAL5QAgA+JgCAmREBAJkNAQCPrAIAggQCAI1oAQCewQIAi3wBAJ49AQCeKQEAvggCAJfQAgCZXQEAldACAJ5VAQCT0AIAmXUBAJHQAgC+SAIAn7gCAEYmAICdtAIAnk0BAJuwAgCZXQEAmbQCAL6EAgCeqQEApowCAGImAICkgAIAmakBAGomAIChSAIAgqwCAK/kAgCCtAIAglwCAJnlAQC+CAIAgnwCAIIABACopAIAnvkBAL5wAgC1HAQAnoUBAL6oBQCyhAIAtrECAL6sBQC4KQkAuYkCALqZAgCCjAUAu+gEAIKcBQByJgCAuPAEAJ5ZBgCZbQYAnmEGAJl5BgC+fAIAnmEGAIJcAgC+QAIAmVkGAJ5dBgCCYAIAmaUGAL58AgCevQYAghwCAL4UAgCZzQYAvkwCAIJMAgCa3QYAnt0GAJ/FBgDjDAIAgrwCAJn5BgC+ZAIA7/QCAJrxBgCe6QYAn+kGAJ7ZBgCf1QYA4wQCAJklBgCaIQYAgngCAJk9BgDjBAIAgkQCAJolBgC+cAIA75wCAJ4FBgCfFQYA7+gCAJp1BgCZBQYAggQCAL5wAgDjcAIAnnUGAJ8NBgCeAQYAvnwCAOM0AgCZDQYAvmACAIJsAgDv8AIAmTUGAIKQAwDv2AIAniEGAIQmAICbxQcAmeUHAL58AgCe7QcAn8UHAOPsAwCdUAIAnNEHAIJsAgDv1AIAmc0HAIJ8AgC+cAIAmd0HAJ7dBwC+AAIA42gCAJ6tBwCZuQcA42gCAIJ8AgDjDAIAvkgCAJmpBwCCWAIA78QCAJ6ZBwC+bAIA77gCAIKUAgCejQcA77gCALsAAACZeQcAuQwAAJ5xBwC/AAAAglQCAL0EAAC+aAIAs9QDAJmxBgCxcAMAggQCALc4AACeoQYAtTQAAL5wAgCrWAMAnqEGAO9cAgCZqQYArxADAIJQAgCtFAMAmYUHAJlpBgC+WAIAnmEGAL58AgCCaAIApqACAOOQAgCZaQYA43wBAOOYAQDjrAEA49ABAOPoAQC+dAIAno0FAOMwAgDvzAIAgmgCAJnRBQDvlAIA71QBAO9wAQDvJAEA7ygBAL58AgCevQUA4wwCAIJ4AgCZrQIAvnQCAJ6lAgDjNAIAgmACAJkZAAC+YAIA7/wCAJ4NAACClAIA79QCAJAmAIDj/AIAmQkAAL5gAgCYJgCAnh0AAOMAAgCwJSoAglgCAJkNAADv9AIAvmQCAK4mAIDvwAIAnhkAAIIYAgCCOAIA43ACAJkRAACaNQAAmSkBAL50AgDsJgCAnyUAAJ4JAACZ6QEAvrQDAL7gAwCazQEA79gCAJ4RAQCC2AMA/SYAgIHEAgDjsAMAHycAgOP8AwC+/AIAhMQCAIIoAgCGEAIAKicAgIg8AgCeIQAAnw0AAHonAIDvKAMAj3QCAO8sAwCCiAIAmXUAAJoVAACSxAMAldADAJktAACa0QAAjicAgL7IAgCYaAMAm3wDAILEAwCeQQAAnykAALAnAICChAIA45ACAL4IAwC+JwCABigAgJ8ZAACe7QAA49ACAJlxAACaFQAAvhQCAO8wAgCZIQAA71gCABQoAICv7AMAggQCALFMHACwABwAniUAALJMHACeXQAAn2EAAOO8AgCZIQAA+QAAAHEpAIDvlAIAdSkAgL08HACCgB0Av8EfAHkpAIDjtB0AvnQCAJ71HwDj8B0AmQUAAH0pAIC+fAIAngkAAIJgAgCZDQAAiSkAgL5gAgDvzAIAnh0AAOklAIDv3AIA42gCAPkYAIDjPB0AIRoAgP0YAIABGQCAJRoAgCkaAIAtGgCAMRoAgDUaAIA5GgCA76QCAD0aAIDvJB0AQRoAgLHFAAAFGQCAs8UAALLdAAC1yQAAtMEAALcdAAC2wQAAuWUAALhlAAC7zQAAus0AAL3dAAC83QAAv8UAAL7JAAAJGQCADRkAgE0ZAIBhGQCAERkAgBUZAIDvFHgD7wBIA+HYTQPhOKgC41x5A+O0UAOtGQCAsRkAgLUZAIC5GQCAgMkBAIHVAQCC3QEAg20CAITdAQCFcQIAhgEEAIcdBQCIJQUAiTUFAIo9BQCLbQUAjHUFAI1lBQCObQUAj80BAJC1AQCRvQEAkrUBAJNNAwCUVQMAlV0DAJZVAwCXTQMAmHUDAJl9AwCadQMAm00DAJxVAwCdWQMAnkkDAJ9JAwCguQMAobkDAKLBAwCj3QMApMUDAKXNAwCmxQMAp/0DAKjJAwCpyQMAqtEDAKvRAwCsMQMArTEDAK4xAwCvMQMAsFEDALFRAwCyUQMAs1EDALRxAwC1cQMAtnEDALdxAwC4UQMAuVEDALpRAwC7UQMAvDEDAL0xAwC+MQMAvzEDAL0ZAIDBGQCAxRkAgMkZAIDNGQCA0RkAgNUZAIDZGQCA3RkAgOEZAIDwIAIA5RkAgOkZAIDtGQCA8RkAgPUZAICc9TYAnf02APkZAICRkAIA/RkAgKkZAIBFGQCASRkAgEUaAIC6adgASRoAgE0aAIC4sTYAubE2AFEaAIBVGgCAWRoAgF0aAIBRGQCAYRoAgGUaAIBVGQCAWRkAgF0ZAIBlGQCAaRkAgG0ZAIBxGQCAdRkAgHkZAIB9GQCAgRkAgIUZAICJGQCAjRkAgJEZAICVGQCAglgCAJkZAIBpGgCA8FgCAG0aAICdGQCAoRkAgKUZAIABGgCABRoAgJF0AwDhtDsCCRoAgOPYIgINGgCAERoAgBUaAIAZGgCAHRoAgKUqAIBVLQCAqSoAgMEqAICtKgCAljMAgO/IPwK1KgCA4ZTzAuGY0gLjlPcC4xDGAuGUtgLhkJ0C44SiAuMIhwIZGQCAHRkAgO+4swLvOIsCnSoAgOAtAIDvIJcC7+DgAoLkAgBpLQCACAIAgLrF2QAOAgCAFAIAgBoCAIAgAgCAJgIAgCwCAIAyAgCAOAIAgD4CAIBEAgCASgIAgFACAIDhgHgC8OQGAOMUagKCgAgA4aAPAuEIEwLjhA4C4xgeAlYCAIA0AwCA7zQ7Au8wHwI6AwCAQAMAgO8MEgJGAwCAJRkAgCkZAIBMAwCAUgMAgC0ZAIAxGQCAWAMAgF4DAIB2AwCAggMAgIgDAICOAwCAlAMAgJoDAIB8AwCAZAMAgDUZAIA5GQCAbQMAgFwCAIA9GQCAQRkAgHQCAIBoAgCAvAIAgHoCAICYAgCAYgIAgJICAIBuAgCApAIAgNQCAICAUQYAgV0GAIJVBgCDaQYAhHkGAIV5BgCGaQYAh2kGAIhZBgCJoQcAiqUHAIu9BwCMpQcAja0HAI6lBwDyAgCA7AIAgOACAICSCRQAkxUUAJTxBwCV8QcAlvEHAJfxBwCY0QcAmdEHAJo5FACb0QcAnIEHAJ2BBwCefQcAnx0UAJktAQCYLQEAmz0BAJo9AQCdLQEAnC0BACEZAICeVQEAkd0GAJDRBgCTJQEAkiUBAJUtAQCULQEAlx0BAJYdAQCJ8QYAiOkGAIvxBgCK+QYAjbEGAIzpBgCPqQYAjrkGAIHxBgCA7QYAg/EGAIL5BgCF0QYAhOkGAIfRBgCG2QYAua0DALitAwC7vQMAur0DAL2tAwC8rQMAv90DAL7dAwCxrQMAsK0DALO9AwCyvQMAta0DALStAwC3nQMAtp0DAKm5AQCosQEAq3UBAKqxAQCtFQEArBUBAK/dAwCu3QMAobkBAKCpAQCjiQEAorEBAKWZAQCkkQEAp4kBAKaRAQAuAwCAwgIAgM4CAIDmAgCA2gIAgAQDAICwAgCA+AIAgCIDAIAKAwCAngIAgIACAIC2AgCAyAIAgP4CAICGAgCAKAMAgKoCAIAQAwCAjAIAgBYDAIAcAwCACS0AgOsuAIDKNACAhAcAgAYFAIAVBQCAJAUAgDMFAIBCBQCASwUAgPAsOABUBQCAXQUAgGYFAICSBQCA40huA5sFAIDhTG4DpAUAgO/0AQOnBQCAqgUAgK0FAIBGOgCApkwAgNZVAIA2aACAZnEAgJZ6AID2jACAVp8AgIaoAIDtugCAJMQAgFTNAICE1gCAtN8AgDG7AIA6rgCABqUAgPkqAICJKwCAoSoAgOUqAIBBMQCAATEAgE40AIDVLACABjMAgIo3AIBiNACAHSwAgJI0AICeMwCAEjgAgFkrAICFLACA+jEAgCY5AIAdKwCArSsAgJ4xAIC8LgCAySwAgFksAIA4LgCALC4AgJGgBgDuMwCAGSsAgJ43AIB1LACAzS0AgLAFAIDh1D8D4VgaA+PcLwPjUA4D4RTyA+FA0wPjQOoD40DDA7MFAIC2BQCA73jrA+9c8gO5BQCA5QUAgO9E3gPvmCUD4bSLA+E8lwPjfKID45iLA+EwQQDhUKwD4xx/AOOIRgDoBQCA6wUAgO84ewDv4EEA7gUAgPEFAIDvzIoD7yCHA4DBGACB3RgAgikLAIMpCwCE6Q4AhekOAIYZDwCH8RgAiCUPAIntGgCK5RsAiyEdAIw5HQCN5RsAjmkQAI/VGgCQhRsAkU0PAJJFDwCTXQ8AlEUPAJVNDwCWRQ8Al30PAJhFDwCZTQ8AmkUPAJtpGwCcQQ8AnUEPAJ5BDwCfQQ8AoMEPAKHBDwCiwQ8Ao8EPAKS5CwCluQsApqkLAKfNDwCo9Q8Aqf0PAKr1DwCrzQ8ArNkPAK3ZDwCuyQ8Ar8kPALC5DwCxuQ8AsmkPALNpDwC0YQ8AtWEPALY5DwC3OQ8AuBEPALkRDwC66QEAu+kBALz5AQC9+QEAvukBAL/pAQD0BQCA9wUAgPoFAID9BQCAAAYAgCAGAIDhBACAgAUAgNMFAIAOBgCANAYAgEsGAIBoBgCAfwYAgJYGAIDdAwCA9gMAgA8EAIASBwCAQQgAgD4IAIA/BwCAOSQAgHIkAICjJACAyCQAgLkmAIDEJgCAyCYAgMwmAIDQJgCALygAgG4oAICWKACAmigAgL8oAIDHKACA4ygAgPUoAID5KACA/SgAgLrp0wAVKQCAMCkAgEspAIA9JACASiQAgFckAIBkJACAdiQAgIMkAICVJACApyQAgLckAIDMJACA1iQAgOQkAIDuJACA+yQAgAwlAIAWJQCAbyUAgHYlAIAkJQCAgBkDAIEZAwCCKQMAgykDAIQ5AwCFOQMAhikDAIcpAwCIGQMAiRkDAIppAwCLaQMAjHkDAI15AwCOaQMAj2kDAJAZAwCRGQMAkgEEAJMtAwCUNQMAlVUGAJZdBgCXVQYAmG0GAJl1BgCafQYAm3UGAJxtBgCdNQYAnj0GAJ81BgCgzQYAodUGAKLdBgCj1QYApPkDAKX5AwCm6QMAp+kDAKjZAwCp+QYAqikGAKspBgCsOQYArTkGAK7FAwCvPQMAsEUDALFNAwCyRQMAs10DALRFAwC1TQMAtkUDALd9AwC4SQMAuUkDALpZAwC7fQYAvGUGAL1tBgC+ZQYAgCUAgKkVDwCoAQ8Aq00PAKpNDwCtRQ8ArEUPAK+hDQCuqQ0AoXULAKBhCwCj7QsAoqkLAKXlCwCk5QsApzkPAKZZCAC5oQ0AuJkNALuhDQC6qQ0AvaENALy5DQAxJQCAvqkNALGhDQCw2Q0As6ENALKpDQC1oQ0AtLkNALehDQC2qQ0AOCUAgEglAIBbJQCAsiUAgLwlAICRJQCAoSUAgNAlAICB7Q0AgO0NAIP9DQCC/Q0Ahe0NAITtDQCH2Q0AhiEYAJlNDQCYTQ0Am1ENAJpdDQCdeQ0AnHUNAJ9pDQCecQ0AkYkNAJCBDQCTmQ0AkoENAJWJDQCUgQ0Al30NAJaBDQDgJACAICUAgI0lAIDMJQCA3iUAgAgmAIAtJgCAQiYAgPAlAID6JQCADCYAgBkmAIAxJgCATiYAgFgmAIB2JgCASiYAgGYmAIBuJgCAgCYAgIwmAICUJgCAoyYAgN4mAICcJgCAsiYAgKcmAIC9JgCA1CYAgOImAIABJwCAEScAgBsnAIBPJwCAkicAgOcnAIBPKQCAXSkAgGEpAIBlKQCA8CYAgC4nAIA+JwCASCcAgCMnAIBTJwCAYycAgH4nAIBwJwCAlicAgMInAIDJJwCApicAgNMnAIDdJwCAtCcAgBgoAIAKKACA6ycAgCUoAIDyJwCA/CcAgDMoAIBAKACASigAgFQoAIBeKACAcigAgH8oAICGKACAnigAgKUoAICyKACAyygAgNUoAIDnKACAASkAgA4pAIAZKQCAIykAgDQpAIA7KQCAUykAgMMDAIDmBACAhQUAgNgFAIATBgCAOQYAgFAGAIBtBgCAhAYAgJsGAIDjAwCA/AMAgBUEAIAoBACAOwQAgE4EAIBhBACAdAQAgIcEAICaBACAAAUAgA8FAIAeBQCALQUAgDwFAIBjCACAJAgAgMEGAID8BwCAHQkAgOMoEwAzCQCAKggAgC0IAIAxCACAJAcAgNwuAIDKMACA2S0AgLswAIBFMQCAJwkAgO/sEwAGCQCA3A0AgM8IAICDCACAMQcAgEwHAID8BgCACggAgJQIAIAqCQCACQkAgOANAIDsDQCA2wgAgJkIAIAVBwCAhggAgFUHAID/BgCApgcAgJEkAIDwDQCA4ggAgCcIAICcCACAWAgAgBUJAID0DQCA5QgAgBQIAICfCACA6AgAgBcIAIDJCACAoggAgOwIAIAbCACAzAgAgKYIAID3CACA/QgAgIgHAICKCACAWQcAgAMHAIA9CQCAQQkAgEkJAIA2CQCAGAkAgPgNAID0CACALQkAgAwJAIDkDQCA0ggAgI4IAIBdBwCAMAkAgA8JAIDoDQCA1QgAgJEIAIBgBwCArQgAgGMHAIDjSBIA4xQSAOP4EwDjuBMA4+wSAOOgEgDjbBIA43gSAO/ADQDv2A0A73QSAO9QEgDvqBIA79wSAO8oEwDvIBMA6QcAgMwGAIAOCACAEQgAgNgGAIDUBgCAIQgAgAcHAIBnCACADAcAgHYIAIA0BwCANwcAgKoIAIC2CACAuQgAgOPYEADjoBAA46AQAON0EQDjNBAA4wgQAOPkEADj9BAA77wQAO/gEADvzBAA7zgQAO8QEADvcBAA73AQAO9MEADjhBMA4+gTAOMwEADjEBAA42ATAONAEwDjpBMA47QTAO/IEwDvtBMA75gTAO98EwDvXBMA70wTAO8UEwDv6BAAgO08AIH1PACC/TwAg/U8AITtPACFFT0Ahh09AIcVPQCILT0AiTU9AIo9PQCLNT0AjC09AI0VPQCOHT0AjxU9AJBtPQCRdT0Akn09AJN1PQCUbT0AlRU9AJYdPQCXFT0AmC09AJk1PQCaPT0AmzU9AJwtPQCdFT0Anh09AJ8VPQCg7T0AofU9AKL9PQCj9T0ApO09AKUVPQCmHT0ApxU9AKgtPQCpNT0Aqj09AKs1PQCsLT0ArRU9AK4dPQCvFT0AsG09ALF1PQCyfT0As3U9ALRtPQC1FT0AthE9ALcRPQC4MT0AuTE9ALoxPQC7MT0AvBE9AL0RPQC+ET0AvxE9AIDxPACB/TwAgvU8AIMNPwCEFT8AhR0/AIYVPwCHDT8AiDU/AIk9PwCKNT8Aiw0/AIwVPwCNHT8AjhU/AI8NPwCQdT8AkX0/AJJ1PwCTDT8AlBU/AJUZPwCWCT8Alwk/AJg5PwCZOT8Amgk/AJsJPwCcGT8AnRk/AJ4JPwCfCT8AoPk/AKH5PwCiCT8Aowk/AKQZPwClGT8Apgk/AKcJPwCoOT8AqTk/AKoJPwCrCT8ArBk/AK0ZPwCuCT8Arwk/ALB5PwCxeT8Asgk/ALMJPwC0GT8AtRk/ALYJPwC3CT8AuDk/ALk5PwC6CT8Auwk/ALwZPwC9GT8Avgk/AL8JPwCA+TwAgfk8AIJJPQCDST0AhFk9AIVZPQCGST0Ah0k9AIh5PQCJeT0Aikk9AItJPQCMWT0AjVk9AI5JPQCPST0AkDk9AJE5PQCSAQQAk00GAJRVBgCVXQYAllUGAJdNBgCYdQYAmX0GAJp1BgCbTQYAnFUGAJ1dBgCeVQYAn00GAKC1BgChvQYAorUGAKPNBgCk1QYApd0GAKbVBgCnzQYAqPUGAKn9BgCq9QYAq80GAKzVBgCt3QYArtUGAK/NBgCwtQYAsb0GALK1BgCzTQYAtFUGALVdBgC2VQYAt00GALh1BgC5fQYAunUGALtNBgC8VQYAvV0GAL5VBgC/TQYArH0/AK2lPwCurT8Ar6U/AKh9PwCpZT8Aqm0/AKtlPwCkHT8ApUU/AKZNPwCnRT8AoB0/AKEFPwCiDT8AowU/ALydPwC9pT8Avq0/AL+lPwC4nT8AuYU/ALqNPwC7hT8AtN0/ALWlPwC2rT8At6U/ALDdPwCxxT8Ass0/ALPFPwCMZToAjW06AI5lOgCPfToAiEU6AIlNOgCKRToAi306AIRlOgCFbToAhmU6AId9OgCABToAgQ06AIIFOgCDfToAnF04AJ3lPwCe7T8An+U/AJhdOACZRTgAmk04AJtFOACUuTgAlWU4AJZtOACXZTgAkAU6AJENOgCSBToAkwE5AMAIAIDYCACA3ggAgPAIAIB2BwCAIgkAgHkHAICBBwCAVAkAgJ0HAIDLBwCAvQcAgMQGAIDcBACAewUAgM4FAIAJBgCALwYAgEYGAIBjBgCAegYAgJEGAIDXAwCA8AMAgAkEAIAiBACANQQAgEgEAIBbBACAbgQAgIEEAICUBACA+gQAgAkFAIAYBQCAJwUAgDYFAIBFBQCATgUAgFcFAIBgBQCAaQUAgJUFAICeBQCAXQgAgFYOAIBZDgCAOjoAgKwKAIAVCwCANjoAgD46AICcGQAAnRkAAJ45AACfOQAA4wwAgEI6AIB6NwCA8TAAgKI3AIBaMgCAxSoAgLksAICaMDUA7C0AgB0tAIDoLQCA1y8AgJ+ENQDSMwCAnUQpAGI1AICaNgCA1jYAgAo3AIAeOACAdjEAgAIyAICuMgCARjMAgGI2AIBGOACAcjkAgOkqAICNLACAijEAgNIyAICWNgCAwjkAgJQuAIB6MgCAhjYAgBo3AIALMACAvjUAgLSAGgC1hBkAtojmALeM5ACwABwAsZQeALIAGACznBsAvADsAL2k7wC+qO4Av6TtALgA4AC5tOMAurjiALu84QCkwAAApQAMAKbIDgCnAAgA4jYAgAcvAIAFMQCArXwDAKwAEACt5BMArugSAK9gEQCo8AoAqRwJAKr4FgCr/BQAGjIAgB4zAIAqOACAKSsAgMErAIAtLACAczAAgIIxAIDOMgCA8jMAgI42AICmNgCAyjcAgO44AICiOQCAvjkAgC40AIBuNACAvAgAgCY1AIBGNgCAejgAgE43AIChLQCAIy8AgN40AICeNQCAAjMAgDY0AICaNwCA5jgAgJ0tAIBwLgCAejEAgC4yAIBiMgCAFjUAgD41AICmOACAKSwAgJwAAACqNQCAzSsAgMkrAICaNACAKjUAgF42AICuOACAajcAgA8wAIBaNwCA0SoAgEQuAIB7LwCAMjMAgLIzAIBNLACAPjQAgDkrAIBfLwCAsSoAgO4xAICLMACAEjUAgIDpAwCB6QMAgjkvAIP9AwCE5QMAhe0DAIblAwCHfS4AiEEuAIkhAgCKeS8AiyUCAIw9AgCNJQIAjiECAI8dAgCQZQIAkW0CAJJlAgCTfQIAlGUCAJVtAgCWZQIAlx0CAJglAgCZLQIAmiUCAJs9AgCcJQIAnS0CAJ4lAgCfHQIAoOUCAKHtAgCi5QIAo/0CAKTlAgCl7QIApuUCAKdNAgCodQIAqX0CAKqpAQCrqQEArLkBAK25AQCuqQEAr6kBALDZAQCx2QEAsukBALPpAQC0eSIAtf0BALb1AQC37QEAuNUBALndAQC61QEAu60BALy1AQC9uQEAvqkBAL+pAQChLACAjS0AgP4zAIBmNgCAPjcAgLoxAIDmMQCAHzAAgB42AIA/MACArjMAgAUrAICBKwCAxSsAgFYxAID+NACA9jUAgEo3AIBaOACANSwAgOksAIAXLwCApzAAgH4yAIBCNACAljgAgHo5AIDOOQCA5jkAgOkwAICmMQCA7jcAgOMuAIC/LwCA2y8AgGswAIBuMgCAujIAgGozAICONACAMjUAgJY1AIDeNwCAbjYAgAY4AIB+OACA6SsAgBUsAID9LACAqjIAgPY2AIADLwCAcy8AgDcwAICyMQCA2jQAgCYzAIAVKwCAWS0AgKguAIB/LwCAQjMAgF4zAIBuNQCAgFEBAIEBKgCCXQEAg1UBAIRNAQCFdQEAhn0BAId1AQCITQEAiVUBAIqdKwCLWQEAjEkBAI1JAQCOuQEAj7kBAJDJAQCRyQEAktkBAJPZAQCUyQEAlckBAJb5AQCX+QEAmMkBAJnJAQCa2QEAm9kBAJzJAQCdyQEAnrkBAJ+5AQCgSQEAoZUBAKJFAQCjXQEApEUBAKVNAQCmRQEAp30BAKhFAQCpTQEAqnkPAKtBAQCsQQEArUEBAK5BAQCvQQEAsMEDALHBAwCywQMAs8EDALTBAwC1wQMAtsEDALfBAwC4wQMAucEDALrBAwC7wQMAvMEDAL3BAwC+wQMAv8kMAI41AIBiOACA4jgAgPI4AIAuOQCALSsAgII0AIBOOACAyjgAgJcvAIDxKgCAUSsAgEguAIBoLgCAlzAAgMYyAIDOMwCAejYAgBo4AIDZMACAojgAgA0sAIAlMQCAMTEAgBIyAIBKMgCATjMAgKozAIAqNACADjUAgDo5AIDrLwCAsjgAgEErAICMLgCAMjIAgOI3AIBPLwCAny8AgDkxAIC6OACA8SsAgNksAIB4LgCAwjAAgBUxAIBiMQCA9jEAgEozAIC+MwCAWjUAgPo2AIAGNwCA1jgAgF0sAIBOMgCA3SwAgMoyAIBuMwCAijYAgL44AICqOQCA0jkAgC0xAICxOSMAsBEDALMVAwCyFQMAtTUDALQ1AwC3NQMAtjUDALkVAwC4FQMAuxUDALoVAwC9dQMAvHUDAL91AwC+dQMAoZkNAKCRDQCjqQ0AopENAKW5DQCksQ0Ap6kNAKaxDQCpmQ0AqJENAKtpAwCqkQ0ArXkDAKxxAwCvaQMArnEDAJEZDQCQEQ0Aky0NAJIRDQCVPQ0AlD0NAJctDQCWLQ0AmR0NAJgdDQCbbQ0Amm0NAJ15DQCcgQ4An2kNAJ5xDQCBmQ0AgAkjAIOpDQCCkQ0AhbkNAISxDQCHqQ0AhrENAImZDQCIkQ0Ai2kNAIqRDQCNeQ0AjHENAI9pDQCOcQ0AKjIAgMY1AIDGNACA6jQAgBozAICiMgCAZjcAgA0rAIAuNgCA9SsAgOUrAIDzLgCAEzAAgPY0AIA0LgCABjIAgOUwAIDqNwCAqjgAgA8vAIBhKwCANS0AgIktAIDVMACA0SsAgCIzAIDmMwCASjQAgGY0AIBqNACAfjQAgPo4AIDuNACAkjYAgFY3AIAKOACANjgAgE45AIBSOQCAVjkAgLo5AIAuOACAxjgAgDErAIBVKwCAaSsAgCUsAIAxLACAcSwAgCUtAIBBLQCASS0AgIUtAICRLQCAdC4AgIsvAICzLwCAuy8AgJH4EADTLwCAfzAAgK8wAIDdMACAWjEAgIApAQCBKQEAgjkBAIM5AQCEKQEAhSkBAIZZAQCHWQEAiNkoAIltAQCKKSUAi2EBAIxhAQCNYQEAHjIAgDoyAICQGQEAajIAgJIVAQC+MgCA3jIAgJU1AQCWPQEAlzUBAJgNAQCZFQEAmh0BAJsVAQCcDQEAnfUBAJ7dKABSMwCAoAUBADI0AICiAQEAVjQAgFI0AIClGQEApgkBAFo0AIBeNACAdjQAgKo9AQCrNQEArC0BAK0VAQCuHQEArxUBALBtAQCxdQEAsn0BALN1AQC0bQEAtRUBALYdAQC3FQEAuC0BALk1AQC6PQEAuzUBALzZLgC9KQEAvhkBAL8ZAQC6eR4Au3keALjNAgC5eR4AvpUeAL+dHgC8QQIAvZ0eALJ9HgCzRR4AsH0eALF1HgC2XR4At0UeALRdHgC1VR4AqgUeAKsNHgCodR4AqQ0eAHo0AICeNACArBUeAK0NHgCiSR4Ao0keAKBJHgChSR4ApkkeAKf5AgCkSR4ApUkeAJqNHgCblR4AmI0eAJmFHgCeiR4An4keAJyNHgCdhR4AkgUDAJP1AACQCQMAkY05AJaxHgCXFQYAlO0AAJUBHACKvQMAi0EDAIiFAwCJnQMAjkEDAI9JAwCMyTkAjVEDAIIVAgCDHQIAgAUCAIEdAgCGzQMAh7EDAIQFAgCFxQMAs/kFALLxBQCx+QUAsOEFALeZKgC2EQMAtRkDALThBQC7NQMAujUDALklAwC4JQMAvxUDAL4VAwC9JQMAvCUDAKP9BQCi/QUAof0FAKD9BQCnnQUApp0FAKWdBQCknQUAq7kFAKqxBQCpJScAqL0FAK+ZBQCukQUArZkFAKyhBQCTAQUAkvkFAJF1OQCQ9QUAlwEFAJYZBQCVEQUAlBkFAJt5CQCaOQUAmTEFAJg5BQCfHQUAnh0FAJ0dBQCcHQUAg4kFAIKBBQCBiQUAgPEFAIeFBQCGhQUAhZUFAISBJgCLhQUAioUFAIm1BQCItQUAj4UFAI6FBQCNlQUAjJUFAM40AIA6NQCAQjUAgFY1AIB+NQCAzjUAgAI2AIBqNgCAEjcAgCo3AIBeNwCAYjcAgKY3AICqNwCAAjgAgNo4AIAeOQCANjkAgIMvAICQ6gCA5jUAgLkqAIC9KwCAfSsAgCUrAIBlKwCAkSsAgCEsAIA9LACAES0AgCEtAIA9LQCAmS0AgOQtAIDwLQCADC4AgBwuAIALLwCAEy8AgEMvAIBjLwCAky8AgKsvAICbLwCAry8AgO8vAIBHMACAUzAAgFswAICDMACACTEAgB0xAIBeMgCAVjIAgIYyAIAWNACA4jIAgBYzAIBiMwCAfjMAgKIzAIDGMwCAyjMAgOozAICAjQEAgZUBAIKdAQCDlQEAhI0BAIW1AQCGvQEAh7UBAIiNAQCJwR0AipkBAIvBHQCMhQEAjY0BAI6FAQCP/QEAkIUBAJEZHQCSkRQAk4UBAJSdAQCViTIAlk0ZAJc9GwCYsQEAmbEBAJotHACbtQEAnD0cAJ2pAQCemQEAn5kBAKDlHQChbQEAomUBAKN9AQCkZQEApW0BAKbxHQCnYQEAqKEDAKmhAwCqoQMAq6EDAKyhAwCttQEArq0DAK+lAwCwYRkAsdkDALLZAQCz7QMAtPUDALX9AwC29QMAt+0DALjFAQC50QMAumEdALvVAwC82QEAvT0XAL7FAwC/0QEA+jMAgA40AIAKNACAOjQAgLY0AIDmNACAHjUAgE41AIAyNgCAWjYAgM42AIAWNwCAIjcAgEI3AIBGNwCAUjcAgG43AIDmNwCAFjgAgEo4AIBqOACAtjgAgA45AIAqOQCAijkAgCfqAIAi6gCAVOoAgOEpAIAJKgCADSoAgNbqAIAD6wCAe+sAgBY6AIAmOgCARwgAgFIIAIBVCACASggAgE4IAIBXCQCA8Q4AgOIOAIDnDgCA9g4AgOwOAICyNACASw8AgMoPAICBDwCALw8AgFoPAIBnDwCAbw8AgJ0PAIDCDwCAuA8AgL0PAICqDwCAsQ8AgP4OAIADDwCACA8AgIBBAQCBMQMAgk0BAINFAQCEXQEAhUUBAIZNAQCHIQMAiF0fAIl9AQCKaQMAi3EBAIx1AwCNVQEAjlk6AI9ZAQCQKQEAkSkBAJI5AQCTOQEAlCkBAJUpAQCW2QEAl9kBAJjpAQCZ6QEAFQ8AgCIPAIAqDwCAMg8AgDwPAIBBDwCARg8AgFAPAIBVDwCAXQ8AgGoPAIByDwCAdw8AgHwPAICEDwCAiQ8AgJMPAICYDwCAoA8AgKUPAIDFDwCANw8AgBoPAIBiDwCAjg8AgA0PAIDdFgCA5hYAgOkWAIDvFgCA4xYAgOwWAIDgFgCAExcAgBYXAID1FgCA8hYAgPgWAICAmQcAgZkHAPsWAICDrQcAhLUHAAQXAICGsQcAh7EHAIiRBwCJkQcAipEHAIuRBwCM8QcAjfEHAI7xBwCP8QcAkJEHAJGVBwCSnQcAk5kHAJSFBwCVgQcAloEHAJeFBwCYuQcAmb0HAJq1BwCbsQcAnK0HAJ2pBwCemQcAn50HAKBhBwChZQcAom0HAKNpBwCkdQcApXEHAKZxBwCndQcAqEkHAKlNBwCqRQcAq0EHAKxdBwCtWQcArkkHAK9NBwCwMQcAsTUHALI9BwCzOQcAtCUHALUhBwC2IQcAtyUHALgZBwC5HQcAuhUHALsRBwC8DQcAvQkHAL7xAAC/9QAAgAkBAIENAQCCHQEAgxkBAITZAACF3QAAhtUAAIfRAACI8QAAifUAAIr9AACL+QAAjOkAAI3tAACO5QAAj+EAAJCdAACRmQAAkq0AAJOpAACUtQAAlbEAAJaxAACXtQAAmIkAAJmNAACahQAAm4EAAJydAACdmQAAnokAAJ+NAACgdQAAoXEAAKJ9AACjeQAApGlQAqVtUAKmYQAAp2UAAKhZAACpXQAAqlUAAKtRAACsTQAArUkAAK49AwCvOQMAsClQArEtUAIBFwCABxcAgP4WAIANFwCAChcAgBkXAIDZXFICHxcAgCUXAIAiFwCAKBcAgCsXAIA0FwCALhcAgKOhAACipQAAoZEAAKCVAACntQAAprEAAKW9AACkuQAAq40AAKqJAACpgQAAqIUAAK+FAACugQAArYkAAKyNAACz/QAAsvkAALHxAACw9QAAt5kAALadAAC1nQAAtJkAALutAAC6qQAAuaUAALilAAC/ZQEAvmEBAL1tAQC8aQEAHBcAgFcXAIBAFwCAPRcAgEgXAIBOFwCAOhcAgNksUQJLFwCAVBcAgHkWAIDhDwCAMRAAgA4QAIAiEACAHRAAgJNBAAAnEACALBAAgBMQAICXWQAAllUAAJVZAACUXQAAm3EAAJppAACZZQAAmGUAAJ9lAACeYQAAnTFTApxtAAC4gQQAuYEEALqBBAC7gQQAvIEEAFEXAIC+jQQA5g8AgLDdBQCxTQQAskUEALNdBAC0RQQAtU0EALZFBADrDwCAqKEFAKntQQCqrQUAq6UFAKy9BQCtpQUArq0FAK+lBQCgqQUAoZFBAKKpQACjoQUApKEFAKWhBQCmoQUAp6EFAP8PAIAYEACAWBAAgF0QAIBpEACAnVUFAH8QAICfWQUAjhAAgJMQAICeEACAkwUFAJQdBQCVBQUAlg0FAJcFBQC4EACAyxAAgO8QAIAhEQCAJhEAgC4RAIA9EQCATBEAgIBxBQCBcQUAgnEFAINxBQCEUQUAhVEFAIZdBQBREQCAWREAgHwRAICjEQCArxEAgM8RAIDUEQCA2REAgBMSAIAmEgCAMhIAgEoSAIDEEgCAGhMAgDMTAIA4EwCASxMAgFwTAIBuEwCAcxMAgJoTAICiEwCAtxMAgN4TAIDjEwCAPRQAgEIUAIBHFACAUxQAgF8UAIBkFACAbBQAgHgUAICSFACAlxQAgJ8UAICkFACAqRQAgK4UAICzFACAuBQAgMsUAIDQFACA7BQAgAYVAIAgFQCALBUAgEQVAIBJFQCAVhUAgHcVAICaFQCAtBUAgMAVAIDFFQCAzRUAgO4VAIAIFgCAFxYAgDQWAIA5FgCAQRYAgEYWAIBZFgCAXhYAgICtAQCBtQEAgr0BAIO1AQCErQEAhdUBAIbdAQCH1QEAiO0BAIn1AQCK/QEAi/UBAIztAQCN1QEAjt0BAI/VAQCQrQEAkbUBAJK9AQCTtQEAlK0BAJVVAwCWXQMAl1UDAJhtAwCZdQMAmn0DAJt1AwCcbQMAnVUDAJ5dAwCfVQMAoK0DAKG1AwCivQMAo7UDAKStAwCl1QMAphkOAKfZAwCobQ8AqSEOAKrhAwCr4QMArCkOAK3lAwCuGQ4ArxkOALCVAwCxnQMAsgEOALORAwC0HQ4AtQUOALa5AwC3uQMAuDkOALmNAwC6NQ4AuxEOALyBAQC9gQEAvnkBAL95AQCEFgCAkBYAgJwWAICrFgCAyBYAgM0WAIDuEQCA/xEAgHwWAICBAACAiwAAgJUAAICfAACAqQAAgLMAAID1DwCA+g8AgAQQAIB1EACAehAAgIQQAIDlEACA6hAAgBcRAIAzEQCAOBEAgEIRAIBRFQCADRYAgBIWAIAqFgCAoRYAgKYWAIC+FgCA8A8AgAkQAICJEACAHBEAgNcSAIA/FQCALxYAgGMWAIDDFgCARxEAgGQSAICfEgCAshIAgBEUAIAdFACAKRQAgI0TAICSEwCA0RMAgNYTAID9EwCAAhQAgGkSAIBuEgCAtxIAgLwSAIDCEQCAxxEAgJYRAICbEQCApD0DAKVFAwCmTQMAp0UDAKA9AwChJQMAoi0DAKMlAwCsfQMArUUDAK5NAwCvRQMAqH0DAKllAwCqbQMAq2UDALQ9AwC1xQMAts0DALfFAwCwPQMAsSUDALItAwCzJQMAvP0DAL3FAwC+zQMAv8UDALj9AwC55QMAuu0DALvlAwCEBQwAhQ0MAIYFDACHHQwAgI0MAIGpDACCGQwAg1ENAIxhDACNYQwAjmEMAI9hDACIKQwAiRUMAIodDACLFQwAlD0MAJXFAwCWzQMAl8UDAJABDACRAQwAkgEMAJMBDACc/QMAncUDAJ7NAwCfxQMAmP0DAJnlAwCa7QMAm+UDAIBpBACBaQQAgnEEAINxBACEnQQAhYUEAIaNBACHhQQAiL0EAImNBACKhQQAi50EAIyFBACNqQYAjvkEAI/5BACQiQQAkYkEAJKRBACTkQQAlLEEAJWxBACW+QYAl60EAJiVBACZwQYAmmkGAJtpBgCceQYAnXkGAJ7RBgCf/QsAoA0GAKEdCwCiGQYAo0ULAKQFBgClTQsApjUGAKe1BACoEQYAqREGAKoRBgCrNQQArC0EAK0BBACuXQQArx0GALDNBgCxbQYAsnUGALMNBgC0FQYAtR0GALYVBgC3DQYAuDUGALk9BgC6NQYAuw0GALwVBgC9HQYAvhUGAL8NBgCA9QcAgf0HAIL1BwCD9QAAhO0AAIURAwCGEQMAhxEDAIgxAwCJMQMAijEDAIsxAwCMhQcAjRUDAI4dAwCPFQMAkG0DAJGNBwCShQcAk50HAJSFBwCVjQcAloUHAJe9BwCYhQcAmY0HAJqFBwCbnQcAnIUHAJ2NBwCehQcAn4UAAKB9AAChgQMAooEDAKOBAwCkgQMApYEDAKaBAwCngQMAqBUHAKmFAwCqjQMAq4UDAKydAwCtoQMArqEDAK+hAwCwdQcAsXUHALJxBwCzhQUAtM0FALX1BQC2/QUAt8kDALj5AwC5+QMAuqEFALuhBQC8wQMAvcUDAN4RAIDjEQCAhJz7ACYTAIArEwCAYRMAgGYTAIB2EgCAghIAgJUSAICaEgCARRIAgNwSAIBXEwCASxAAgKMQAIC9EACAxBAAgJB1AACRfQAAknEAAJNxAACUAfwAlVX+AJZd/gCXVf4AmG3+AJlp/gCaef4Am3n+AJxp/gCdaf4Anln+AJ9Z/gCgpf4Aoa3+AKKl/gCjof4ApKH+AKWl/gCmrf4Ap6X+AKiZ/gCpmf4Aqun+AKvt/gCs9f4ArfH+AK7x/gCv8f4AsI3+ALGV/gCymf4As5n+ALSJ/gC1if4Atrn+ALe9/gC4hf4AuY3+ALqF/gC7nf4AvIX+AL2B/gC+gf4Av4H+AKbZCACnBQcApMEIAKWZBQCi0QgAo9EIAKCJBQChtQgArgEHAK8BBwCsMQcArTEHAKo9BwCrJQcAqD0HAKk1BwC2fQcAtwUHALR9BwC1dQcAsskFALNlBwCwcQcAsXEHAL4BBwC/AQcAvDEHAL0xBwC6IQcAuyEHALg9BwC5MQcAhjkHAIc5BwCELQcAhTkHAIINBwCDNQcAgBEHAIEFBwCOSQcAj0kHAIxNBwCN1QUAisEFAIvBBQCI1QUAiXEHAJbVBQCX2QgAlE0FAJXdBQCSUQUAk9kFAJD5BQCRoQUAnnEIAJ99CACcYQgAnWEIAJpxCACbeQUAmMUIAJl1BQD0EACA+xAAgAIRAICBEQCAuxEAgLQRAIArEgCAGBIAgB8SAIBWEgCATxIAgF0SAIDJEgCAHxMAgIcSAIB7EgCApBIAgKsSAIA9EwCAUBMAgHgTAIB/EwCAhhMAgKcTAIC8EwCAwxMAgOgTAID2EwCA7xMAgEwUAIB9FACAhBQAgAsVAIAZFQCAEhUAgPEUAIAlFQCAMRUAgHwVAICDFQCAkxUAgFsVAIBpFQCAnxUAgKYVAIBiFQCASxYAgFIWAIDzFQCA+hUAgNkVAIDgFQCAIxYAgBwWAICwFgCAbhAAgLEQAICqEACA3hAAgNcQAIAQEQCACREAgI8RAIBeEQCAgIEBAIGBAQCCgQEAg4EBAISdAQCFhQEAhokBAIeJAQCItQEAib0BAIq1AQCLjQEAjJUBAI2dAQCOlQEAj40BAIgRAIA3EgCAkv0BAJP1AQCU7QEAlZUBAJadAQCXlQEAmKkBAJmpAQCauQEAm7kBAJypAQCdrQEAnqUBAJ+dAQCgZQEAoW0BAKJlAQCjfQEApGUBAKVtAQCmZQEAp90AAKjlAACppQMAqq0DAKulAwCsvQMAraUDAK6tAwCvpQMAsN0DALHlAwCy7QMAs+UDALSpAQC1VQEAtvUDALftAwC41QMAud0DALrVAwC7rQMAvM0DAL3BAwC+vQMAv7UDANASAICOEgCARBMAgP8UAIA4FQCAlRYAgIkWAIC3FgCAuRUAgIsUAIABFgCAyhMAgMQUAIDSFQCArRUAgPgUAIC9FACAZREAgKgRAIBwFQCA0BAAgFgUAIBiEACAPhIAgOcVAIATEwCAcRQAgEIQAIA5EACAihUAgOESAID2EQCArhMAgGsWAIDqEgCA8RIAgGwRAIAEEgCApgMAgA0jAIARIwCAoAYAgMcAAIC1BgCAqyMAgK8jAIC5IQCAtSEAgOMHAIB7CQCAfwkAgEEjAICnIwCANSMAgDkjAIAdIwCAISMAgCUjAIApIwCALSMAgDEjAIDbBwCA3wcAgNEAAICATQEAgVEBAIJRAQCDTQEAhE0DAIUhAwCGRQEAh30BANcAAICiAwCAqAMAgN0HAIDTAACA1QAAgL0GAIB5AACABxQAgH0AAICHAACAkQAAgAwUAICbAACAGBQAgKUAAIAkFACArwAAgDAUAIC5AACANRQAgM8PAIBVEACAmBAAgJsQAIArEQCAVhEAgKARAIDMEQCA6BEAgOsRAIDzEQCADRIAgBASAIBzEgCAwRIAgDATAIBrEwCAlxMAgJ8TAICwpQEAsa0BALKlAQCzvQEAtKUBALWtAQC2pQEAt10BALhlAQC5bQEAumUBALt9AQC8ZQEA2xMAgDoUAIBpFACAgAW5AIHhBgCC4QYAg+EGAIThBgCoBgCAswYAgIfpBgCI2QYAifmxAIr1sQCL8bEAjO2xAI31BgCO+QYAj/0GAJDZBgCR2QYAkvWxAJwUAICUiZIClfEGAJb1BgCX9QYAmNkGAJnVsgCa3bIAm6kGAJy5BgCduQYAnqkGAJ+BBgCgoQcAoaEHAKIhsgCjpQcApIUAAKWNAACmQbMA1RQAgKiNBwCplQcAqp0HAKuVBwBOFQCAyhUAgDYQAIA+FgCAsP0HALGFBwCyjQcAaBYAgLSZBwCBFgCAtpUHALeNBwC4tQcAub0HALq1BwC7jQcAvJUHAL2dBwC+lQcAv40HAIB1BgCBlaACgpmgAoOZoAKEhaAChb2gAoaxoAKHhaACiLmgAomRoAKKnaACi5mgAoyFoAKNjQEAjoEBAI9FBgCQOQYAkT0GAJIxBgCTMQYAlC0GAJXVBgCW2QYAl90GAJjhBgCZ4QYAmu0GAJvpBgCc9QYAnf0GAJ7xBgCf9QYAoAkGAKEJBgCiBQYAowEGAKQdBgClBQYApgkGAKcNBgCoMQYAqTEGAKo9BgCrNQYArCkGAK0pBgCuJQYArx0GALBhBgCxYQYAsm0GALNpBgC0dQYAtX0GALZxBgC3dQYAuEkGALlJBgC6RQYAu0EGALxdBgC9RQYAvkkGAL9NBgCAsQUAgbEFAIK9BQCDuQUAhKUFAIWtBQCGoQUAh6UFAIiZBQCJmQUAipUFAIuRBQCMjQUAjcEFAI7NBQCPyQUAkLUFAJG9BQCSsQUAk7UFAJSpBQCVqQUAlqUFAJehBQCYnQUAmSkCAJolAgCbIQIAnD0CAJ3pAgCe5QIAn+ECAKAdAgChNQIAojkCAKM9AgCkIQIApSECAKYtAgCnKQIAqBUCAKkZAgCqFQIAqxECAKwNAgCteQIArnUCAK8V8ACwafAAsRECALIdAgCzGQIAtAUCALUhAAC2LQAAtyUAALgZAAC54QEAuu0BALvlAQC8+QEA2BQAgN0UAIC/9YYCp2kNAOIUAIDnFACAzwAAgNkAAICzAwCA4QcAgH0JAID7IgCAzNSFAszghQL/IgCAgSkAgDUkAIBuJACAjSQAgLyZBQC9mQUAvqkFAL+ZvAC4mQUAuZkFALqJBQC7iQUAtKEFALXVsQC23bEAt6kFALCxsgCxzQUAssUFALO9BQCfJACAxCQAgMMoAIDfKACA8SgAgIgmAICFKQCAaSkAgCkkAIAtJACA2WSgAoEJAIDZUKAChAkAgI0JAICKCQCAhwkAgOwhAIDvIgCA9CEAgJhlBQCZEbIA/CEAgNkwoAKUOZEClU0FAJZFBQCXXQUAkGkFAJFpBQCSWQUAk1kFAID9vACB1ZwCgmW8AIPFvACEkbwAhZ28AIalvACHjbwAiK2TAonlvACKKZACi7W8AIwRkAKNlbwAji2wAI/FnAKQ6bwAkcHIAJJBkAKT8Z0ClNW8AJXlvACW4bwAl02QAphlkAKZfZACmrm8AJupCgCcbQ8Anb0KAPMiAICfXQ8AoK0PAKElCgCibQoAo2UKAKQNCgClpQ8ApgXUAKepDwComQ8AqZkPAKopDwCrKQ8ArDkPAK05DwCuKQ8ArykPALBZDwCxndEAspXRALOF1gC0sdEAtbHRALbZ1AC32dQAuOnUALnp1AC6+dQAu/nUALzp1AC96dQAvrnUAL+51ACASdUAgUnVAIJZ1QCDWdUAhEnVAIV90ACGddAAh23QAIhV0ACJXdAAinXVAIut1QCMtdUAjb3VAI611QCPQdAAkMHQAJHB0ACSwdAAk8HQAJTB0ACVwdAAlsHQAJfB0ACYwdAAmc3QAJrF0ACb3dAAnOHVAJ3pDgCe2Q4An9kOAKDV2wChwdkAotnZAKPB2QCkxdkApc3ZAKbF2QCnGdkAqGHZAKlh2QCqydkAq8nZAKzZ2QCt2dkArs3ZAK/B2QCwCdkAsRXZALId2QCzrdoAtB3ZALWx2gC2wdwAt93dALjl3QC59d0Auv3dALut3QC8td0AvaXdAL6t3QDwIQCAgvHaAIPx2gD3IgCA5OgAgIYR2ACHEdgAhOHaAIXh2gCKKdgAiynYAK9AEwClKNoAjinYAI8p2ACMKdgAjSnYAJJh2ACTYdgA6egAgO7oAICWZdgAl23YAJR12ACVbdgAml3YAJst2ADz6ACA8FwCALEw3wCR8AIAnCnYALLQAwCiOQ0Ao1GeAqAlDQChOQ0AplUNAIS8AgCkJQ0ApV0NAKptDQCrAQQAqGENAKlRAwCuuQAAp3UAAKxhDQCtxQIA+OgAgIfMAwDwVAIAzFC6AJHYBACb9NsAkRgCAJk02wCddAQAvh0AAJ9gBQCejAUAjOwCAI2sBAD96ACAvfWKAqghvwCpLb8Aqi2/AKs9vwCsKb8ArVW/AK5RvwCvTb8AoBkIAKGlvQCiIb8AozGzAKQ9vwClJb8Apg2zAKclvwC46bMAuc3LALppswC7uQkAvH0IAL2tCQC+QQwAv50JALA5vwCxhb0Asgm/ALPtywC0Gb8AtQW/ALbtswC3Bb8AiDG9AIkxvQCKrQgAiyW9AIwJCQCNvQgAjiW+AI+JDAAC6QCAgQ0JAIKlDACDUQkAhIEIAIWBCACGmQgAh60MAJhhvQCZYb0Amm0JAJsVnQKcxQ8AnQ28AJ7BDwCfcQkAkBW+AJERnwKSNZ8Ckw2fApQJvgCVCb4AlnG9AJdxvQCCuAQAl6UHALnEAwDwWAIAkUwCAJLIAgCErAQAsD0AAAzpAIAH6QCAvQUAABHpAIDwTAIAuhEAAJEkAgCN5AQAkqwCAJasAgC4uAMAudADAJb4AgCvDQAAFukAgPB4AgCRXAIAlrACAK8FAAAb6QCAIOkAgCnpAIAy6QCAP+kAgIX4AwBM6QCAh4ADAIbAAgBZ6QCAZukAgHPpAICW6QCAuzkAAHzpAICf6QCAiekAgL8dAAC+HQAAvR0AALwhAACVwB0AlMQfAJfIGgCWABgAkSAAAJDUAQCT2B4AkgAcAJ3gEgCcABAAn+gRAJ7sEwCZ8BkAmPQbAJv4FwCaABQAnnEBAJ9xAQCABQAArOkAgM0KAICwDACAXg0AgGQNAIBqDQCAdg0AgHkNAIB8DQCAfw0AgIINAICRDQCAlw0AgJoNAICdDQCAICIAgMcNAIDWDQCA/A0AgP8NAIAODgCAEQ4AgB0OAIAYIgCAMg4AgDUOAIDXFgCAEBcAgNoWAIC4ACwAuYwvALqILgC6AwCAhpwXAMx4vACEmC0AhVwXALcDAIDKAwCAiAAoAIksFADtBACAjAUAgN8FAIAaBgCAQAYAgFcGAIB0BgCAiwYAgDgBAIA8AQCAQAEAgEQBAIBIAQCATAEAgKR9AQBQAQCAonUBAKNlAQCggQEAoYEBALxxugC9kbYAvnG6AL+ltgC48bgAuXW6ALqZzgC7dboAtGG6ALVtugC2eboAt3W6ALAZugCxEboAsgm6ALMFugCsUboArXG2AK5RugCvbboAqNG4AKldugCqRbYAq1G6AKRxlgKlYZYCpnGWAqe9ugCgzZsCofG6AKLJugCjxboAnHmaAp0tugCeDc4An4WWApgJugCZtZYCmjm6AJuJtgCUMboA+CEAgJZpugCXrZYCkHm6AJE1ugCSMboAkwG6AIxJzgCN5bYAjhmaAo+hugCIoboAiUG2AIqhugCLdbYAhAG4AIWFugCGac4Ah4W6AICxugCBvboAgqm6AIOlugCAgbkAgQ27AIIVtwCDAbsAhAG7AIUhtwCGAbsAhz27AIgJuwCJAbsAihm7AIsVuwCMcbsAjX27AI5puwCPZbsAkKG5AJEluwCSyc8AkyW7AJQhuwCVwbcAliG7AJf1twCY6c8AmUW3AJq5mwKbAbsAnLm7AJ31uwCe8bsAn8G7AKARuwChCZQCokm7AKONlwKkCbsApbWXAqY5uwCnibcAqFmbAqkNuwCqLc8Aq6WXAqwNmgKtMbsArgm7AK8FuwCw0ZcCscGXArLRlwKzHbsAtFG5ALXduwC2xbcAt9G7ALjxuwC50bcAuvG7ALvNuwC82bsAvdG7AL7JuwC/xbsAgJmkAIEliAKCqaQAgxmoAFsNAICFvaQAhp3QAIcViAKInYUCiaGkAIqZpACLlaQAjCGIAo0xiAKOIYgCj+2kAJDBpgCRTaQAklWoAJNBpACUQaQAlWGoAJZBpACXfaQAmEmkAJlBpACaWaQAm1WkAJwxpACdPaQAnimkAJ8lpACgYaYAoeWkAKIJ0ACj5aQApOGkAKUBqACm4aQApzWoAKgp0ACphagAqnmEAqvBpACseaQArTWkAK4xpACvAaQAsFGkALFJiwKyCaQAs82IArRJpAC19YgCtnmkALfJqAC4GYQCuU2kALpt0AC75YgCvE2FAr1xpAC+SaQAv0WkAIARiQKBAYkCghGJAoPdpQCEkacAhR2lAFQBAICHEaUAiDGlAIkRqQCKMaUAWAEAgFwBAICNEaUAjgmlAI8FpQCQAaUAkQ2lAJIZpQCTFaUAlLGnAGABAICW2dEAlzWlAJgRpQCZ8akAmhGlAJvFqQCc+dEAZAEAgJ6phQKfEaUAoEmlAKEFpQCiAaUAozGlAKQBpQClGYoCplmlAKediQKoOaUAqYWJAqoJpQCruakArEmFAq0dpQCuPdEAr7WJArB9hAKxQaUAsnmlALN1pQC0wYkCtdGJArbBiQK3DaUAuGGnALntpQBoAQCAu+GlALzhpQC9wakAvuGlAGwBAIC3baYAttWGArUpqgC0hdIAs7mqALJtpgCxjaoAsG2mAL8higK+5aYAvaWJAnABAIC7jaYAdAEAgLm5pgC49aYAeAEAgKZ1pgClbaYAfAEAgIABAICiTaYAhAEAgIgBAICvCaYAruXSAIwBAICsjaQAqymmAKolpgCpMaYAkAEAgJc5pgCWNaYAlQ2mAJQxhwKTmYoCkhHSAJExpgCQZYYCn62mAJ65qgCUAQCAnC2kAJthpgCarYoCmb2KApitigKHfaYAhk2mAIVJpgCEBaYAg72mAIIFhgKB+aoAgFXSAI/1qgCORaYAjcmKAox1pgCL8YoCijWmAIl1iQKIbaYAgCmnAIEhpwCCOacAgzWnAIRRpwCYAQCAhkmnAJwBAIDMSIkCzYiJAoqp0wCLRacAjEGnAI2hqwCOQacAj5WrAJDJ0wBFIwCAkpmHApMhpwCUmacAldWnAJbRpwCX4acAmPGnAJnpiAKaqacAm22LApzppwCdVYsCntmnAJ9pqwCgeYcCoS2nAKIN0wCjhYsCpC2GAqURpwCmKacApyWnAKixiwKpoYsCqrGLAqt9pwCsMaUArb2nAK6lqwCvsacAsNGnALHxqwCy0acAs+2nALT5pwC18acAtumnALflpwC4oacAua2nALq5pwC7tacAvBGlAL2VpwC+edMAv5WnAICRoACBiY8CgsmgAIMNjAKEiaAAhTWMAoa5oACHCawAiNmAAomNoACKrdQAiyWMAoyNgQKNsaAAjomgAI+FoACQUYwCkUGMApJRjAKTnaAAlNGiAJVdoACWRawAl1GgAJhxoACZUawAmnGgAJtNoACcWaAAnVGgAJ5JoACfRaAAoMGgAKHNoACi2aAAo9WgAKRxogCl9aAAphnUAKf1oACo0aAAqTGsAKrRoACrBawArDnUAK2VrACuaYACr9GgALAJoACxRaAAskGgALNxoAC0QaAAtVmPArYZoAC33YwCuHmgALnFjAK6SaAAu/msALwJgAK9XaAAvn3UAL/1jAKAvYACgYGhAIK5oQCDtaEAhAGNAoURjQKGAY0Ch82hAIihowCJLaEAijWtAIshoQCMIaEAjQGtAI4hoQCPHaEAkGmhAJFhoQCSeaEAk3WhAJQRoQCVHaEAlgmhAJcFoQCYgaMAmQWhAJrp1QCbBaEAnAGhAJ3hrQCeAaEAn9WtAKAJ1QChpa0AolmBAqPhoQCkWaEApRWhAKYRoQCnIaEAqDGhAKkpjgKqaaEAq62NAqwpoQCtlY0CrhmhAK+prQCwOYECsW2hALJN1QCzxY0CtG2AArVRoQC2aaEAt2WhALjxjQK54Y0CuvGNArs9oQC8caMAvf2hAL7lrQC/8aEAs2miALKF1gCxaaIAsO2gALe5rgC2baIAtY2uALRtogC7TaIAuvWCArkJrgC4pdYAv42iAL69ogC9uaIAvPWiAKNNogCiWa4AoUGiAKDNoACncaIApk2iAKVtrgCkTaIAq1miAKpVogCpTaIAqEWiAK8pogCuJaIArTGiAKw9ogCTla4AkiWiAJGpjgKQFaIAl5mOApYR1gCVMaIAlGWCApsZogCaFaIAmS2iAJgRgwKfYaIAnq2OAp29jgKcrY4Cg2muAIK9ogCBXa4AgL2iAIe9ogCGBYIChfmuAIRV1gCLXaIAim2iAIlpogCIJaIAj/GOAo41ogCNdY0CjG2iAIARowCBMa8AghGjAIMtowCEOaMAhTGjAIYpowCHJaMAiGGjAIltowCKeaMAi3WjAIzRoQCNVaMAjrnXAI9VowCQMaMAkdGvAJIxowCT5a8AlNnXAJV1rwCWiYMClzGjAJipowCZ5aMAmuGjAJvRowCc4aMAnfmMAp65owCffY8CoBmjAKGljwKiKaMAo5mvAKRpgwKlPaMAph3XAKeVjwKoHYICqSGjAKoZowCrFaMArKGPAq2xjwKuoY8Cr22jALBBoQCxzaMAstWvALPBowC0waMAteGvALbBowC3/aMAuMmjALnBowC62aMAu9WjALyxowC9vaMAvqmjAL+lowBnDQCA0QYAgG0NAIDIBwCAcw0AgA8HAICFDQCAlAcAgIsNAICaBwCAuA0AgH0HAIDKDQCAxQcAgAIOAIBPBwCAFA4AgFIHAIAgDgCAkB0AAOEGAIAPJACA4iUAgCguAICtLACAyS0AgKpVAACrKQAAMjcAgAErAIDGMACAsjIAgAEsAIBTLwCAmSsAgJ8wAIDtKwCAGjUAgI43AICtLQCA5SwAgGYyAIADMACALzAAgA44AIAjMACA+y8AgHI0AICAIa4AgaWsAIJJ2ACDpawAhKGsAIVBoACGoawAh3WgAIhp2ACJxaAAiv0AAIsxxgCM7QAAjdEAAI7VAACPyQAAgCmhAIFNFACCIQEAg+G4AoQ5qgCFOaoAhhG9AodRFACIEQEAidW4AorNrQCLLbsCjGEUAI3ZjQKObRQAj2UUAJB5AQCRubgCkkm9ApNFuwKUDRQAlTUUAJYZAQCXqbgCmF2qAJkBFACaIQEAmwUUAJx5vQKdhbgCnnm7Ap+JuAKggb0CoXm4AqKZCQCjlRQApFmuAKWJFACmmQEAp70UAKipAQCpvbsCqrkBAKuJFACsmRQArZkUAK6JFACviRQAsNkBALEJrgCy6QEAs9W7ArTNuwK17RQAtpW8ArfhFAC4oRQAuaEUALrBoQC7pRQAvNkBAL0ZuAK+0aoAv9GqAL9FFwC+RRcAvTUXALxBvwK7KRcAugm4ArkBuAK4PQIAt+2tALY9AgC1HRcAtB0XALMdFwCyHRcAsR0XALAtAgCvWbgCrk0CAK1pFwCsTQIAq00XAKqdrQCpQRcAqE0KAK40AIDRLACApX0XAKR9FwCjoa4Aom2CAqF9ggKgbYICnzmuAJ41rgCdDa4AnDGPApuZggKaEdoAmTGuAJhljgKXtaIAlgWuAJWJggKUNa4Ak7GCApJ1rgCRNYECkC2uAI99rgCOTa4AjUmuAIwFrgCLva4AigWOAon5ogCIVdoAh0miAIadrgCFfaIAhJ2uAIOZrgCCddoAgZmuAIAdrADMqIQCzUyGAswguQLNTLkCzECOAkYyAIDMmIUCzTyEAswQgwLNUIMCzKCDAs2MgwLMMIACzSSAAswYgALNhIACmjMAgAUsAIAxLQCAiSMAgE0jAIBXIwCAayMAgJMjAIB1IwCAnSMAgGEjAIB/IwCAzPC5As2EuQLMULgCzay7AoDNAACB1QAAgt0AAIPVAACEzQAAhfUAAIb9AACH9QAAiM0AAFcvAIDBLACA1SoAgM0qAIDdKgCAuekAgCErAICQZQAAkW0AAKiIKgA1KwCAPSsAgEUrAIBJKwCATSsAgKIAMACjzDMAoOg9AKHsPACm8DYAp/QoAKQANACl/DUAgFERAIHpiAKCXREAg1URAIQpBACF6b0Chhm4AocVvgKIfREAiUURAIppBACL2b0CjA2vAI1REQCOcQQAj1URAJBJuAKRtb0Ckkm+ApO5vQKUUbgClam9ApZJDACXRREAmKmrAJl5EQCaaQQAm00RAJx5BACdbb4CnmkEAJ9ZEQCgqREAoakRAKK5EQCjuREApIkEAKVZqwCmuQQAp4W+Aqi9vgKpnREAquW5AquREQCs8REArfERAK6RpACv9REAsOkEALEpvQKy4a8As+GvALTZuAK1mREAtukEALctvQK4BagAueW+Arq5EQC7AYgCvKURAL2tEQC+wQQAvwG9AoABuQKBDb8CglUQAINtEACEUQUAheG8AoYlrgCHeRAAiGkFAIlNEACKIbkCi928AowxvwKNwbwCjjm5Ao/BvAKQUQ0AkV0QAJKBqgCTURAAlFEFAJV1EACWUQUAl0W/AphxBQCZQRAAmkEQAJtBEACcQRAAnUEQAJ5hBQCfsaoAoKEFAKGdvwKilb8Co7UQAKTduAKlqRAAptkQAKfZEACoiaUAqe0QAKqBBQCrQbwCrJmuAK2ZrgCusbkCr/EQALDxBQCxNbwCsi2pALPNvwK0gRAAtTmJAraNEAC3hRAAuNkFALkZvAK66bkCu+W/ArytEAC9lRAAvrkFAL8JvAK5La0AuC2tALtFEwC6BboCveG/ArwlBgC/GbwCvvmqALEdEwCwabsCs20TALJtEwC1eRMAtB2mALfVvwK2FQYAqXUTAKh1EwCrhakAqlUGAK1JvAKsdQYAr2ETAK5BvAKhQRMAoGUGAKNxvAKiZQYApVUTAKRlBgCnVRMAplUTAJl1vwKYhbwCm3W/ApqNugKdiRMAnIUOAJ+FEwCeVakAkVW/ApDlBgCTzRMAkpGtAJXZEwCU/QYAl0m/Apa1ugKJmRMAiJETAIs1vwKK9QYAjdm8AozVugKPuRMAjoETAIGtEwCA7boCgxm/AoLdBgCF8bwChBGqAIcVigKGrRMAgD2sAIFhEgCCQQcAg2USAIQZuwKF5b4Chhm9AofpvgKIIbsCidm+AopFEgCLXRIAjSkAgM3pAICOzaoAj8mLApCdiwKRpYsCkrGqAJOxqgCU2akAldmpAJb5qQCX+akAmJWqAJmRiwKatYsCm42LApyJqgCdiaoAnvGpAJ/xqQCgIakAoSGpAKJ9qgCjeYsCpE2LAqV1iwKmYaoAp2GqAKgpqQCpKakAqgmpAKsJqQCsRaoArUGLAq5liwKvXYsCsDmqALE5qgCyQakAs0GpALRxqQC1cakAti2qALcpiwK4PYsCuQWLAroRqgC7EaoAvHmpAL15qQC+WakAv1mpAIKJIwBtKwCAcSsAgI0rAIC+6QCAh5kjAJEpAIB5KwCAyOkAgIu5JACpKwCAifkkAI6VIwCPiSMAsSsAgI2JJACSvSMAESsAgLkrAICR4SMAo+sAgJfFIwCU8SMA4SsAgJkpAICbkSMA+SsAgJndIwD9KwCAnwktAAksAICdjdUAogkjAJ0pAIBBLACAofUjAEUsAICnGSMApCUkAG0sAICq7SQAeSwAgKgdIwCpeSQArhUjAK8JIwCsCSQArQkkALI9IwCJLACAsDEjALFhIwC2VSMAt0UjALRxIwC1XSMAulkjALsRIwCRLACAuV0jAL6JLQCVLACAvI0tANzpAICAuSUAgX0iAIKBIgCDmSIAhK0lAIXZJQCGuSIAh5EiAIiVIgCJ8SUAljIAgIuxJQCMgSUAjYElAI6dIgCPgSIAkLkiAJHpIgCStSIAk9EiAJT5IgCV1SIAlt0iAJfNIgCY+SIAmdUiAJrRIgCbmSIAqSwAgLEsAIDh6QCAvSwAgGUAAACh/SIAogEiAKMZIgDFLACApVklAKY5IgCnESIAqBUiAKlxJQDNLACAqzElAKwBJQCtASUArh0iAK8BIgCwOSIAsWkiALI1IgCzUSIAtHkiALVVIgC2XSIAt00iALh5IgC5VSIAulEiALsZIgD1LACA4SwAgO0sAIDxLACAgI0vAIGlLwCCrS8Ag70vAISlLwCFrS8AhqUvAIfdLwCI5S8Aie0vAIrlLwD5LACAAS0AgAUtAIANLQCAFS0AgJCRLwCRkS8AkpEvAJORLwCUsS8AlbEvAJa1LwCXRTMAmE0zAJlVMwCaPTMAmxkzAJyZMwCdiTMAnlUwAJ9JMACgwTAAockwAKLZMACj1TAApM0wAKX9MACm5TAApzUwAKi1MQCpuTEAqu0xAKuxmgCs0ZYArbE6AK61OgAZLQCAsEGUALHNlgCy1ZoAs8GWALTBlgC14ZoAtsGWALf9lgC4yZYAucGWALrZlgC71ZYAvLGWAL29lgC+qZYAv6WWAMUAAAChfSAAooEgACktAICkrScALS0AgDktAICnkSAAXS0AgKnxJwCqZScAq7EnAKyBJwCtgScArp0gAK+BIACwuSAAsekgALK1IABhLQCAtPkgALXVIAC23SAAt80gAEUtAIC51SAATS0AgLuZIACpLQCAcS0AgHUtAIB5LQCAgDknAIH9IACCASAAgxkgAG0tAICFWScAhjkgAIcRIACIFSAAiXEnAIrlJwCLMScAjAEnAI0BJwCOHSAAjwEgAJA5IACRaSAAkjUgAJNRIACUeSAAlVUgAJZdIACXTSAAmHkgAJlVIACaUSAAmxkgAJyFLgCdBdYAnoEuAJ+BLgCArT8AgbU/AIK9PwCDtT8AhK0/AIW5yACG1T8Ah80/AIj1PwCJ/T8AipnIAIvxPwCMATsAjQE7AI6NyACPOQQAkEkEAJFJBACSWQQAk1UEAJRNBACV3TwAlnkEAJd1BACYWQQAmSEEAJohBACbNdQAnCEEAJ3Z5gCeJQQAnx0EAKDpBACh9QQAos0/AKP1BACkFQQApfnUAKYhyACnIcgAqNHUAKktBACqOQQAq03CAKwtBACtdcgArh0EAK95BACwKQQAsTEEALI9BACzOQQAtC0EALX9BQC2qQUAt6kFALiZBQC5mQUAunkFALtFBQC8AQUAvQEFAL4BBQC/AQUAgC0HAIE1BwCCPQcAgzUHAIQtBwCFqQcAhqUHAIdl1QCILQYAiTEGAIoxBgCLDQYAjPnJAI15BgCOWQYAj1UGAJBpyQCRNQYAkj0GAJM1BgCULQYAlcUGAJZdAwCXVQMAmG0DAJl1AwCafQMAm3UDAJxtAwCdET0AnlkDAJ9ZAwCgqQMAoakDAKK5AwCjuQMApKkDAKWpAwCm2QMAp9kDAKjpAwCp6QMAqvkDAKv9AwCs5QMAre0DAK7lAwCvbcMAsKEDALGhAwCyoQMAs6EDALShAwC1zeYAtq0DALelAwC4yeYAuZkDALppAwC7aQMAvHkDAL15AwC+aQMAv2kDAIAAAACBLQCAfS0AgJUtAIDm6QCAsS0AgLUtAIC9LQCA0S0AgPQtAIDr6QCA8OkAgAAuAIAELgCACC4AgPwtAIAQLgCAoSkAgKUpAIAYLgCAIC4AgPXpAIA8LgCAQC4AgEwuAID66QCAVC4AgFguAIA3LwCAqSkAgGwuAICILgCAhC4AgATqAICQLgCACeoAgJwuAICYLgCAoC4AgLAuAIC0LgCArSkAgMQuAIDMLgCA0C4AgNQuAICxKQCADuoAgLUpAID3LgCA+y4AgP8uAIDV6wCAGOoAgNo1AIAvLwCAuSkAgDvqAIAN6wCAPy8AgEcvAIC9KQCAWy8AgGsvAICqIfQAq7U/AKilPwCpzecArkXwAK+hPwCsSfAArTH0AKJl4gCjvT8AoLk/AKG5PwCmlT8Ap50/AKSlPwClnT8Augk8AG8vAIC4CTwAuQk8AHcvAICHLwCAxSkAgMEpAICy3T8AswU9ALBN7wCx1T8Atn3wALe55AC0HT0AtWk8AB3qAICPLwCAoy8AgKcvAIC3LwCAyy8AgMMvAIDHLwCAgrX7AM8vAICA/T8AgfU/AOMvAIDnLwCA/y8AgAcwAICavT8Am/3NAJi9PwCZtT8Anlk/AJ9ZPwCcWT8AnVk/AJKBPwCTaekAkHnkAJGxPwCWgT8Al4H0AJQh5wCVmT8AFzAAgCswAIAs6gCAJzAAgBswAIAzMACAOzAAgE8wAIAx6gCAVzAAgEoAAABLMACAQzAAgMkpAIBfMACAZzAAgG8wAIBjMACAzSkAgIcwAIA26gCAszAAgPUwAIDRMACA2SkAgNUpAIDRKQCAnSsAgKErAID5MACA4TAAgK41AIA9KgCADTEAgCExAIAZMQCAT+oAgN0pAIA1MQCAKTEAgFIxAIBZ6gCAXjEAgD0xAIBmMQCAajEAgG4xAIByMQCAfjEAgF7qAICGMQCA5SkAgJIxAIBj6gCAljEAgOkpAICiMQCArjEAgL4xAIBo6gCA/+kAgG3qAIDeMQCAcuoAgLgJAQC5CQEAuhkBALsZAQC8CQEAvQkBAL45AQC/OQEAsM3FALE1zACymQ4As5kOALSJDgC1iQ4AtjkBALc5AQCo6dkAqckOAKrZDgCrqcUArMUOAK3NDgCuxQ4Ar/kOAKA1DgChPQ4AojUOAKOxxQCk8Q4ApfEOAKbxDgCn8Q4AmGkPAJlpDwCaeQ8Am3kPAJxpDwCdaQ8Ant0OAJ/NDgCQ+eoAkXEPAJJ9DwCTdQ8AlG0PAJVpDwCWWQ8Al1kPAIh5DwCJeQ8AigkPAIsJDwCMGQ8AjRkPAI4NzACPDQ8AgHkPAIF5DwCCSQ8Ag0kPAIRZDwCFWQ8AhkkPAIdJDwCKUQIAi1ECAIj5xgCJQQIAjnECAI/txgCMQQIAjUECAIIVAgCDHQIAgAUCAIEdAgCGdQIAh30CAIQFAgCFfQIAmsUCAJvNAgCYkc8AmYXaAJ7FAgCfzQIAnNUCAJ3NAgCSDQIAkxUCAJANAgCRBQIAlg0CAJf1AgCUDQIAlQUCAKo9AgCrRQIAqD0CAKk1AgCuXQIAr0UCAKxdAgCtVQIAol3GAKMBAgCgNQIAoQ0CAKYBAgCnxdgApBECAKURAgC6OQIAuzkCALg5AgC5OQIAvtkBAL/ZAQC82QEAvdkBALI9AgCzBQIAsD0CALE1AgC2GQIAtxkCALQdAgC16cIA6jEAgPIxAIDiMQCA/jEAgA4yAIAWMgCAIjIAgCYyAIB36gCACjIAgD4yAIBCMgCA7SkAgFIyAIB86gCANjIAgHIyAICB6gCAhuoAgHYyAICKMgCAgjIAgPEpAICOMgCAnjIAgJoyAICmMgCAw+kAgLYyAICL6gCAwjIAgJXqAIDWMgCA9jIAgJrqAIAKMwCADjMAgJ/qAICk6gCAKjMAgDozAID1KQCAPjMAgPkpAIBWMwCAWjMAgGYzAIByMwCA/SkAgIozAICp6gCApjMAgK7qAIAT6gCAwjMAgLPqAIC4AAAAuOoAgL3qAIABKgCABSoAgMfqAIDC6gCAzOoAgIAB3gCB8QcAgvEHAIPxBwCEFQIAhR0CAIYVAgCHEQIAiCXeAIld3gCKOQIAizkCAIwpAgCNKQIAjhkCAI99ygCQTd4AkWECAJJhAgCT7cEAlH0CAJVlAgCWIcAAl2kCAJhZAgCZMcIAmlUCAJstAgCcNQIAnT0CAJ4xAgCfMQIAoNECAKHRAgCi0QIAo9ECAKTxAgCl8QIApvECAKfxAgCo0QIAqdECAKrRAgCr0QIArDECAK0xAgCuMQIArzECALBRAgCxUQIAslECALNRAgC0cQIAtXECALZxAgC3cQIAuFECALlRAgC6+dwAu1UCALxNAgC9NQIAvj0CAL81AgC+7QYAv/UGALztBgC95QYAuskGALvJBgC4xcsAuckGALbtBgC39QYAtO0GALXlBgCyjQYAs/UGALDR3QCxhQYArvEGAK/xBgCs5QYAreEGAKr1BgCr/QYAqMUGAKn9BgCm9QYAp/0GAKTlBgCl/QYAovUGAKP9BgCg+QYAoZ3dAJ75BgCf+QYAnPkGAJ35BgCa+QYAm/kGAJj5BgCZ+QYAlvkGAJf5BgCUcd0AlfkGAJL9BgCT5QYAkP0GAJH1BgCO/QYAj4UGAIz9BgCN9QYAiuEGAIsB3QCI8QYAifEGAIbBBgCHwQYAhPEGAIXxBgCCkccAg+EGAIDpBgCBxcAAgAAAANHqAIACNACABjQAgBI0AIARKgCAFSoAgNvqAIAmNACAGSoAgODqAIDl6gCA6uoAgJY0AIAdKgCAojQAgKY0AIDv6gCA9OoAgL40AIAhKgCA+eoAgNI0AIDWNACAJSoAgP7qAIDyNACAKSoAgAI1AID6NACACjUAgAjrAIAiNQCALSoAgC41AIA2NQCARjUAgDEqAIAS6wCAF+sAgDUqAIAc6wCAXjUAgCHrAIBqNQCAdjUAgCbrAIAr6wCAkjUAgDDrAICaNQCAQOoAgDkqAICyNQCAtjUAgEEqAIC6NQCAFC4AgDXrAIA66wCAReoAgErqAIDeNQCA9jcAgIDNAQCB1QEAgt0BAIPVAQCEzQEAhfUBAIb9AQCH9QEAiM0BAInVAQCK3QEAi/UJAIzJAQCNyQEAjgEcAI89HwCQRR8AkU0fAJJFHwCTXR8AlEUfAJVNHwCWRR8Al30fAJhBxwCZQR8AmkEfAJtBHwCcQR8AnUEfAJ5BHwCfYd8AoL0fAKHFHwCizR8Ao8UfAKTdHwClxR8Aps0fAKfFHwCo/R8AqcUfAKrNHwCrxR8ArN0fAK3FHwCuzR8Ar8UfALC9HwCxRR8Ask0fALNFHwC0/ckAtVkfALZJHwC3SR8AuHkfALl5HwC6SR8Au8XdALxVHwC9XR8AvlUfAL9NHwAKNgCABjYAgA42AIAZLACAEjYAgBY2AIAaNgCAIjYAgD/rAIAmNgCAOjYAgD42AIAqNgCAQjYAgFY2AIA2NgCASjYAgE42AIBSNgCAROsAgE7rAIBJ6wCASSoAgHI2AIB2NgCAfjYAgGLrAICCNgCAU+sAgE0qAIBRKgCAWOsAgF3rAIBVKgCAojYAgKo2AICuNgCAujYAgLY2AIDCNgCAvjYAgMY2AIDKNgCA0jYAgFkqAIDaNgCA3jYAgF0qAIDuNgCAZ+sAgP42AIACNwCAYSoAgA43AICVKQCAbOsAgHHrAIBlKgCAaSoAgDo3AIB26wCAkjcAgJY3AICuNwCAgLUBAIG9AQCCtQEAg80BAITt9ACF0QEAhtEBAIfRAQCI8QEAifEBAIrxAQCL8QEAjNEBAI3RAQCO0QEAj9EBAJB9wwCRBcMAkl35AJO9AQCUpQEAla0BAJalAQCXXQMAmGUDAJltAwCaZQMAm30DAJxlAwCdbQMAnmUDAJ85wwCgoQMAoaEDAKKhAwCjoQMApKEDAKWhAwCmoQMAp6EDAKjhAwCp4QMAquEDAKvhAwCs4QMAreEDAK7hAwCv4QMAsKEDALGhAwCyoQMAs6EDALShAwC1oQMAtqEDALehAwC4YQMAuWEDALphAwC7YQMAvGEDAL1hAwC+pcMAv6HDALo3AICA6wCA0ukAgMY3AIDCNwCAzjcAgNfpAIDaNwCAhesAgIrrAIAmOACAMjgAgDo4AICP6wCAPjgAgGY4AIByOACAdjgAgG44AICCOACAhjgAgJTrAICSOACAbSoAgJo4AICZ6wCAcSoAgNI4AICkLgCA6jgAgJ7rAICo6wCAdSoAgHkqAIASOQCAresAgH0qAICy6wCAMjkAgLfrAIBKOQCAgSoAgFo5AIBmOQCAbjkAgHY5AICFKgCAvOsAgKY5AICyOQCAiSoAgI0qAIC2OQCAwesAgJEqAIDG6wCAy+sAgNDrAICVKgCA9jkAgPo5AIACOgCACjoAgNrrAICQ1QEAkd0BAJLVAQCT7QEAlPUBAJXB+wCW8QEAl/n7AJjNAQCZ1QEAmt0BAJvVAQCcyfsAnckBAEUqAICPAAAAgNkBAIHZAQCC6QEAg+kBAIT5AQCF+QEAhukBAIfpAQCI2QEAidkBAIoJwQCLrQEAjLUBAI29AQCOtQEAj60BAKAAAAChAAAAogAAAKMAAACkAAAApQAAAKYAAACnAAAAqAAAAKkAAACqAAAAqwAAAKwAAACtAAAArgAAAK8AAACwAAAAsQAAALIAAACzAAAAtAAAALUAAAC2AAAAtwAAALgAAAC5AAAAugAAALsAAAC8AAAAvQAAAL4AAAC/AAAAACAAIMyBACDMgwAgzIQAIMyFACDMhgAgzIcAIMyIACDMiMyAACDMiMyBACDMiM2CACDMigAgzIsAIMyTACDMk8yAACDMk8yBACDMk82CACDMlAAgzJTMgAAgzJTMgQAgzJTNggAgzKcAIMyoACDMswAgzYIAIM2FACDZiwAg2YwAINmM2ZEAINmNACDZjdmRACDZjgAg2Y7ZkQAg2Y8AINmP2ZEAINmQACDZkNmRACDZkQAg2ZHZsAAg2ZIAIOOCmQAg44KaACEAISEAIT8AIgAjACQAJQAmACcAKAAoMSkAKDEwKQAoMTEpACgxMikAKDEzKQAoMTQpACgxNSkAKDE2KQAoMTcpACgxOCkAKDE5KQAoMikAKDIwKQAoMykAKDQpACg1KQAoNikAKDcpACg4KQAoOSkAKEEpAChCKQAoQykAKEQpAChFKQAoRikAKEcpAChIKQAoSSkAKEopAChLKQAoTCkAKE0pAChOKQAoTykAKFApAChRKQAoUikAKFMpAChUKQAoVSkAKFYpAChXKQAoWCkAKFkpAChaKQAoYSkAKGIpAChjKQAoZCkAKGUpAChmKQAoZykAKGgpAChpKQAoaikAKGspAChsKQAobSkAKG4pAChvKQAocCkAKHEpAChyKQAocykAKHQpACh1KQAodikAKHcpACh4KQAoeSkAKHopACjhhIApACjhhIIpACjhhIMpACjhhIUpACjhhIYpACjhhIcpACjhhIkpACjhhIspACjhhIwpACjhhI4pACjhhI8pACjhhJApACjhhJEpACjhhJIpACjkuIApACjkuIMpACjkuIkpACjkuZ0pACjkuowpACjkupQpACjku6MpACjkvIEpACjkvJEpACjlhaspACjlha0pACjlirQpACjljYEpACjljZQpACjlkI0pACjlkbwpACjlm5spACjlnJ8pACjlraYpACjml6UpACjmnIgpACjmnIkpACjmnKgpACjmoKopACjmsLQpACjngaspACjnibkpACjnm6MpACjnpL4pACjnpZ0pACjnpa0pACjoh6opACjoh7MpACjosqEpACjos4cpACjph5EpACjqsIApACjrgpgpACjri6QpACjrnbwpACjrp4gpACjrsJQpACjsgqwpACjslYQpACjsmKTsoIQpACjsmKTtm4QpACjsnpApACjso7wpACjssKgpACjsubQpACjtg4ApACjtjIwpACjtlZgpACkAKgArACwALQAuAC4uAC4uLgAvADAAMCwAMC4AMOKBhDMAMOeCuQAxADEsADEuADEwADEwLgAxMOaXpQAxMOaciAAxMOeCuQAxMQAxMS4AMTHml6UAMTHmnIgAMTHngrkAMTIAMTIuADEy5pelADEy5pyIADEy54K5ADEzADEzLgAxM+aXpQAxM+eCuQAxNAAxNC4AMTTml6UAMTTngrkAMTUAMTUuADE15pelADE154K5ADE2ADE2LgAxNuaXpQAxNueCuQAxNwAxNy4AMTfml6UAMTfngrkAMTgAMTguADE45pelADE454K5ADE5ADE5LgAxOeaXpQAxOeeCuQAx4oGEADHigYQxMAAx4oGEMgAx4oGEMwAx4oGENAAx4oGENQAx4oGENgAx4oGENwAx4oGEOAAx4oGEOQAx5pelADHmnIgAMeeCuQAyADIsADIuADIwADIwLgAyMOaXpQAyMOeCuQAyMQAyMeaXpQAyMeeCuQAyMgAyMuaXpQAyMueCuQAyMwAyM+aXpQAyM+eCuQAyNAAyNOaXpQAyNOeCuQAyNQAyNeaXpQAyNgAyNuaXpQAyNwAyN+aXpQAyOAAyOOaXpQAyOQAyOeaXpQAy4oGEMwAy4oGENQAy5pelADLmnIgAMueCuQAzADMsADMuADMwADMw5pelADMxADMx5pelADMyADMzADM0ADM1ADM2ADM3ADM4ADM5ADPigYQ0ADPigYQ1ADPigYQ4ADPml6UAM+aciAAz54K5ADQANCwANC4ANDAANDEANDIANDMANDQANDUANDYANDcANDgANDkANOKBhDUANOaXpQA05pyIADTngrkANQA1LAA1LgA1MAA14oGENgA14oGEOAA15pelADXmnIgANeeCuQA2ADYsADYuADbml6UANuaciAA254K5ADcANywANy4AN+KBhDgAN+aXpQA35pyIADfngrkAOAA4LAA4LgA45pelADjmnIgAOOeCuQA5ADksADkuADnml6UAOeaciAA554K5ADoAOjo9ADsAPAA9AD09AD09PQA+AD8APyEAPz8AQABBAEFVAEHiiJVtAEIAQnEAQwBDRABDby4AQ+KIlWtnAEQAREoARFoARHoARMW9AETFvgBFAEYARkFYAEcAR0IAR0h6AEdQYQBHeQBIAEhQAEhWAEhnAEh6AEkASUkASUlJAElKAElVAElWAElYAEoASwBLQgBLSwBLTQBMAExKAExURABMagBMwrcATQBNQgBNQwBNRABNSHoATVBhAE1WAE1XAE3OqQBOAE5KAE5qAE5vAE8AUABQSABQUE0AUFBWAFBSAFBURQBQYQBRAFIAUnMAUwBTRABTTQBTUwBTdgBUAFRFTABUSHoAVE0AVQBWAFZJAFZJSQBWSUlJAFbiiJVtAFcAV0MAV1oAV2IAWABYSQBYSUkAWQBaAFsAXABdAF4AXwBgAGEAYS5tLgBhL2MAYS9zAGHKvgBiAGJhcgBjAGMvbwBjL3UAY2FsAGNjAGNkAGNtAGNtMgBjbTMAZABkQgBkYQBkbABkbQBkbTIAZG0zAGR6AGTFvgBlAGVWAGVyZwBmAGZmAGZmaQBmZmwAZmkAZmwAZm0AZwBnYWwAaABoUGEAaGEAaQBpaQBpaWkAaWoAaW4AaXYAaXgAagBrAGtBAGtIegBrUGEAa1YAa1cAa2NhbABrZwBrbABrbQBrbTIAa20zAGt0AGvOqQBsAGxqAGxtAGxuAGxvZwBseABswrcAbQBtMgBtMwBtQQBtVgBtVwBtYgBtZwBtaWwAbWwAbW0AbW0yAG1tMwBtb2wAbXMAbeKIlXMAbeKIlXMyAG4AbkEAbkYAblYAblcAbmoAbm0AbnMAbwBvVgBwAHAubS4AcEEAcEYAcFYAcFcAcGMAcHMAcQByAHJhZAByYWTiiJVzAHJhZOKIlXMyAHMAc3IAc3QAdAB1AHYAdmkAdmlpAHZpaWkAdwB4AHhpAHhpaQB5AHoAewB8AH0AwqIAwqMAwqUAwqYAwqwAwrBDAMKwRgDCtwDDgADDgQDDggDDgwDDhADDhQDDhgDDhwDDiADDiQDDigDDiwDDjADDjQDDjgDDjwDDkQDDkgDDkwDDlADDlQDDlgDDmQDDmgDDmwDDnADDnQDDoADDoQDDogDDowDDpADDpQDDpwDDqADDqQDDqgDDqwDDrADDrQDDrgDDrwDDsADDsQDDsgDDswDDtADDtQDDtgDDuQDDugDDuwDDvADDvQDDvwDEgADEgQDEggDEgwDEhADEhQDEhgDEhwDEiADEiQDEigDEiwDEjADEjQDEjgDEjwDEkgDEkwDElADElQDElgDElwDEmADEmQDEmgDEmwDEnADEnQDEngDEnwDEoADEoQDEogDEowDEpADEpQDEpgDEpwDEqADEqQDEqgDEqwDErADErQDErgDErwDEsADEsQDEtADEtQDEtgDEtwDEuQDEugDEuwDEvADEvQDEvgDFgwDFhADFhQDFhgDFhwDFiADFiwDFjADFjQDFjgDFjwDFkADFkQDFkwDFlADFlQDFlgDFlwDFmADFmQDFmgDFmwDFnADFnQDFngDFnwDFoADFoQDFogDFowDFpADFpQDFqADFqQDFqgDFqwDFrADFrQDFrgDFrwDFsADFsQDFsgDFswDFtADFtQDFtgDFtwDFuADFuQDFugDFuwDFvADFvQDFvgDGjgDGkADGoADGoQDGqwDGrwDGsADHjQDHjgDHjwDHkADHkQDHkgDHkwDHlADHlQDHlgDHlwDHmADHmQDHmgDHmwDHnADHngDHnwDHoADHoQDHogDHowDHpgDHpwDHqADHqQDHqgDHqwDHrADHrQDHrgDHrwDHsADHtADHtQDHuADHuQDHugDHuwDHvADHvQDHvgDHvwDIgADIgQDIggDIgwDIhADIhQDIhgDIhwDIiADIiQDIigDIiwDIjADIjQDIjgDIjwDIkADIkQDIkgDIkwDIlADIlQDIlgDIlwDImADImQDImgDImwDIngDInwDIogDIpgDIpwDIqADIqQDIqgDIqwDIrADIrQDIrgDIrwDIsADIsQDIsgDIswDItwDJkADJkQDJkgDJlADJlQDJmQDJmwDJnADJnwDJoQDJowDJpQDJpgDJqADJqQDJqgDJqwDJrQDJrwDJsADJsQDJsgDJswDJtADJtQDJuADJuQDJuwDKgQDKggDKgwDKiQDKigDKiwDKjADKkADKkQDKkgDKlQDKnQDKnwDKuQDKvG4AzIAAzIEAzIjMgQDMkwDOhgDOiADOiQDOigDOjADOjgDOjwDOkADOkQDOkgDOkwDOlADOlQDOlgDOlwDOmADOmQDOmgDOmwDOnADOnQDOngDOnwDOoADOoQDOowDOpADOpQDOpgDOpwDOqADOqQDOqgDOqwDOrADOrQDOrgDOrwDOsADOsQDOsgDOswDOtADOtQDOtgDOtwDOuADOuQDOugDOuwDOvADOvEEAzrxGAM68VgDOvFcAzrxnAM68bADOvG0AzrxzAM69AM6+AM6/AM+AAM+BAM+CAM+DAM+EAM+FAM+GAM+HAM+IAM+JAM+KAM+LAM+MAM+NAM+OAM+cAM+dANCAANCBANCDANCHANCMANCNANCOANCZANC5ANC9ANGKANGMANGQANGRANGTANGXANGcANGdANGeANG2ANG3ANOBANOCANOQANORANOSANOTANOWANOXANOaANObANOcANOdANOeANOfANOiANOjANOkANOlANOmANOnANOqANOrANOsANOtANOuANOvANOwANOxANOyANOzANO0ANO1ANO4ANO5ANWl1oIA1bTVpQDVtNWrANW01a0A1bTVtgDVvtW2ANeQANeQ1rcA15DWuADXkNa8ANeQ15wA15EA15HWvADXkda/ANeSANeS1rwA15MA15PWvADXlADXlNa8ANeV1rkA15XWvADXlta8ANeY1rwA15nWtADXmda8ANea1rwA15sA15vWvADXm9a/ANecANec1rwA150A157WvADXoNa8ANeh1rwA16IA16PWvADXpNa8ANek1r8A16bWvADXp9a8ANeoANeo1rwA16nWvADXqda814EA16nWvNeCANep14EA16nXggDXqgDXqta8ANey1rcA2KEA2KIA2KMA2KQA2KUA2KYA2KbYpwDYptisANim2K0A2KbYrgDYptixANim2LIA2KbZhQDYptmGANim2YcA2KbZiADYptmJANim2YoA2KbbhgDYptuHANim24gA2KbbkADYptuVANinANin2YPYqNixANin2YTZhNmHANin2YsA2KfZtADYqADYqNisANio2K0A2KjYrdmKANio2K4A2KjYrtmKANio2LEA2KjYsgDYqNmFANio2YYA2KjZhwDYqNmJANio2YoA2KkA2KoA2KrYrADYqtis2YUA2KrYrNmJANiq2KzZigDYqtitANiq2K3YrADYqtit2YUA2KrYrgDYqtiu2YUA2KrYrtmJANiq2K7ZigDYqtixANiq2LIA2KrZhQDYqtmF2KwA2KrZhditANiq2YXYrgDYqtmF2YkA2KrZhdmKANiq2YYA2KrZhwDYqtmJANiq2YoA2KsA2KvYrADYq9ixANir2LIA2KvZhQDYq9mGANir2YcA2KvZiQDYq9mKANisANis2K0A2KzYrdmJANis2K3ZigDYrNmEINis2YTYp9mE2YcA2KzZhQDYrNmF2K0A2KzZhdmJANis2YXZigDYrNmJANis2YoA2K0A2K3YrADYrdis2YoA2K3ZhQDYrdmF2YkA2K3ZhdmKANit2YkA2K3ZigDYrgDYrtisANiu2K0A2K7ZhQDYrtmJANiu2YoA2K8A2LAA2LDZsADYsQDYsdiz2YjZhADYsdmwANix24zYp9mEANiyANizANiz2KwA2LPYrNitANiz2KzZiQDYs9itANiz2K3YrADYs9iuANiz2K7ZiQDYs9iu2YoA2LPYsQDYs9mFANiz2YXYrADYs9mF2K0A2LPZhdmFANiz2YcA2LPZiQDYs9mKANi0ANi02KwA2LTYrNmKANi02K0A2LTYrdmFANi02K3ZigDYtNiuANi02LEA2LTZhQDYtNmF2K4A2LTZhdmFANi02YcA2LTZiQDYtNmKANi1ANi12K0A2LXYrditANi12K3ZigDYtdiuANi12LEA2LXZhNi52YUA2LXZhNmJANi12YTZiSDYp9mE2YTZhyDYudmE2YrZhyDZiNiz2YTZhQDYtdmE25IA2LXZhQDYtdmF2YUA2LXZiQDYtdmKANi2ANi22KwA2LbYrQDYttit2YkA2LbYrdmKANi22K4A2LbYrtmFANi22LEA2LbZhQDYttmJANi22YoA2LcA2LfYrQDYt9mFANi32YXYrQDYt9mF2YUA2LfZhdmKANi32YkA2LfZigDYuADYuNmFANi5ANi52KwA2LnYrNmFANi52YTZitmHANi52YUA2LnZhdmFANi52YXZiQDYudmF2YoA2LnZiQDYudmKANi6ANi62KwA2LrZhQDYutmF2YUA2LrZhdmJANi62YXZigDYutmJANi62YoA2YDZiwDZgNmOANmA2Y7ZkQDZgNmPANmA2Y/ZkQDZgNmQANmA2ZDZkQDZgNmRANmA2ZIA2YEA2YHYrADZgditANmB2K4A2YHYrtmFANmB2YUA2YHZhdmKANmB2YkA2YHZigDZggDZgtitANmC2YTbkgDZgtmFANmC2YXYrQDZgtmF2YUA2YLZhdmKANmC2YkA2YLZigDZgwDZg9inANmD2KwA2YPYrQDZg9iuANmD2YQA2YPZhQDZg9mF2YUA2YPZhdmKANmD2YkA2YPZigDZhADZhNiiANmE2KMA2YTYpQDZhNinANmE2KwA2YTYrNisANmE2KzZhQDZhNis2YoA2YTYrQDZhNit2YUA2YTYrdmJANmE2K3ZigDZhNiuANmE2K7ZhQDZhNmFANmE2YXYrQDZhNmF2YoA2YTZhwDZhNmJANmE2YoA2YUA2YXYpwDZhdisANmF2KzYrQDZhdis2K4A2YXYrNmFANmF2KzZigDZhditANmF2K3YrADZhdit2YUA2YXYrdmF2K8A2YXYrdmKANmF2K4A2YXYrtisANmF2K7ZhQDZhdiu2YoA2YXZhQDZhdmF2YoA2YXZiQDZhdmKANmGANmG2KwA2YbYrNitANmG2KzZhQDZhtis2YkA2YbYrNmKANmG2K0A2YbYrdmFANmG2K3ZiQDZhtit2YoA2YbYrgDZhtixANmG2LIA2YbZhQDZhtmF2YkA2YbZhdmKANmG2YYA2YbZhwDZhtmJANmG2YoA2YcA2YfYrADZh9mFANmH2YXYrADZh9mF2YUA2YfZiQDZh9mKANmH2bAA2YgA2YjYs9mE2YUA2YjZtADZiQDZidmwANmKANmK2KwA2YrYrNmKANmK2K0A2YrYrdmKANmK2K4A2YrYsQDZitiyANmK2YUA2YrZhdmFANmK2YXZigDZitmGANmK2YcA2YrZiQDZitmKANmK2bQA2a4A2a8A2bEA2bkA2boA2bsA2b4A2b8A2oAA2oMA2oQA2oYA2ocA2ogA2owA2o0A2o4A2pEA2pgA2qEA2qQA2qYA2qkA2q0A2q8A2rEA2rMA2roA2rsA2r4A24AA24EA24IA24UA24YA24cA24fZtADbiADbiQDbiwDbjADbkADbkgDbkwDgpJXgpLwA4KSW4KS8AOCkl+CkvADgpJzgpLwA4KSh4KS8AOCkouCkvADgpKkA4KSr4KS8AOCkr+CkvADgpLEA4KS0AOCmoeCmvADgpqLgprwA4Kav4Ka8AOCniwDgp4wA4KiW4Ki8AOCol+CovADgqJzgqLwA4Kir4Ki8AOCosuCovADgqLjgqLwA4Kyh4Ky8AOCsouCsvADgrYgA4K2LAOCtjADgrpQA4K+KAOCviwDgr4wA4LGIAOCzgADgs4cA4LOIAOCzigDgs4sA4LWKAOC1iwDgtYwA4LeaAOC3nADgt50A4LeeAOC5jeC4sgDguqvgupkA4Lqr4LqhAOC7jeC6sgDgvIsA4L2A4L61AOC9guC+twDgvYzgvrcA4L2R4L63AOC9luC+twDgvZvgvrcA4L2x4L2yAOC9seC9tADgvbHgvoAA4L6Q4L61AOC+kuC+twDgvpzgvrcA4L6h4L63AOC+puC+twDgvqvgvrcA4L6y4L2x4L6AAOC+suC+gADgvrPgvbHgvoAA4L6z4L6AAOGApgDhg5wA4YSAAOGEgQDhhIIA4YSDAOGEhADhhIUA4YSGAOGEhwDhhIgA4YSJAOGEigDhhIsA4YSMAOGEjQDhhI4A4YSPAOGEkADhhJEA4YSSAOGElADhhJUA4YSaAOGEnADhhJ0A4YSeAOGEoADhhKEA4YSiAOGEowDhhKcA4YSpAOGEqwDhhKwA4YStAOGErgDhhK8A4YSyAOGEtgDhhYAA4YWHAOGFjADhhZcA4YWYAOGFmQDhhaAA4YWhAOGFogDhhaMA4YWkAOGFpQDhhaYA4YWnAOGFqADhhakA4YWqAOGFqwDhhawA4YWtAOGFrgDhha8A4YWwAOGFsQDhhbIA4YWzAOGFtADhhbUA4YaEAOGGhQDhhogA4YaRAOGGkgDhhpQA4YaeAOGGoQDhhqoA4YasAOGGrQDhhrAA4YaxAOGGsgDhhrMA4Ya0AOGGtQDhh4cA4YeIAOGHjADhh44A4YeTAOGHlwDhh5kA4YedAOGHnwDhh7EA4YeyAOGshgDhrIgA4ayKAOGsjADhrI4A4aySAOGsuwDhrL0A4a2AAOGtgQDhrYMA4bSCAOG0lgDhtJcA4bScAOG0nQDhtKUA4bW7AOG2hQDhuIAA4biBAOG4ggDhuIMA4biEAOG4hQDhuIYA4biHAOG4iADhuIkA4biKAOG4iwDhuIwA4biNAOG4jgDhuI8A4biQAOG4kQDhuJIA4biTAOG4lADhuJUA4biWAOG4lwDhuJgA4biZAOG4mgDhuJsA4bicAOG4nQDhuJ4A4bifAOG4oADhuKEA4biiAOG4owDhuKQA4bilAOG4pgDhuKcA4bioAOG4qQDhuKoA4birAOG4rADhuK0A4biuAOG4rwDhuLAA4bixAOG4sgDhuLMA4bi0AOG4tQDhuLYA4bi3AOG4uADhuLkA4bi6AOG4uwDhuLwA4bi9AOG4vgDhuL8A4bmAAOG5gQDhuYIA4bmDAOG5hADhuYUA4bmGAOG5hwDhuYgA4bmJAOG5igDhuYsA4bmMAOG5jQDhuY4A4bmPAOG5kADhuZEA4bmSAOG5kwDhuZQA4bmVAOG5lgDhuZcA4bmYAOG5mQDhuZoA4bmbAOG5nADhuZ0A4bmeAOG5nwDhuaAA4bmhAOG5ogDhuaMA4bmkAOG5pQDhuaYA4bmnAOG5qADhuakA4bmqAOG5qwDhuawA4bmtAOG5rgDhua8A4bmwAOG5sQDhubIA4bmzAOG5tADhubUA4bm2AOG5twDhubgA4bm5AOG5ugDhubsA4bm8AOG5vQDhub4A4bm/AOG6gADhuoEA4bqCAOG6gwDhuoQA4bqFAOG6hgDhuocA4bqIAOG6iQDhuooA4bqLAOG6jADhuo0A4bqOAOG6jwDhupAA4bqRAOG6kgDhupMA4bqUAOG6lQDhupYA4bqXAOG6mADhupkA4bqgAOG6oQDhuqIA4bqjAOG6pADhuqUA4bqmAOG6pwDhuqgA4bqpAOG6qgDhuqsA4bqsAOG6rQDhuq4A4bqvAOG6sADhurEA4bqyAOG6swDhurQA4bq1AOG6tgDhurcA4bq4AOG6uQDhuroA4bq7AOG6vADhur0A4bq+AOG6vwDhu4AA4buBAOG7ggDhu4MA4buEAOG7hQDhu4YA4buHAOG7iADhu4kA4buKAOG7iwDhu4wA4buNAOG7jgDhu48A4buQAOG7kQDhu5IA4buTAOG7lADhu5UA4buWAOG7lwDhu5gA4buZAOG7mgDhu5sA4bucAOG7nQDhu54A4bufAOG7oADhu6EA4buiAOG7owDhu6QA4bulAOG7pgDhu6cA4buoAOG7qQDhu6oA4burAOG7rADhu60A4buuAOG7rwDhu7AA4buxAOG7sgDhu7MA4bu0AOG7tQDhu7YA4bu3AOG7uADhu7kA4byAAOG8gQDhvIIA4byDAOG8hADhvIUA4byGAOG8hwDhvIgA4byJAOG8igDhvIsA4byMAOG8jQDhvI4A4byPAOG8kADhvJEA4bySAOG8kwDhvJQA4byVAOG8mADhvJkA4byaAOG8mwDhvJwA4bydAOG8oADhvKEA4byiAOG8owDhvKQA4bylAOG8pgDhvKcA4byoAOG8qQDhvKoA4byrAOG8rADhvK0A4byuAOG8rwDhvLAA4byxAOG8sgDhvLMA4by0AOG8tQDhvLYA4by3AOG8uADhvLkA4by6AOG8uwDhvLwA4by9AOG8vgDhvL8A4b2AAOG9gQDhvYIA4b2DAOG9hADhvYUA4b2IAOG9iQDhvYoA4b2LAOG9jADhvY0A4b2QAOG9kQDhvZIA4b2TAOG9lADhvZUA4b2WAOG9lwDhvZkA4b2bAOG9nQDhvZ8A4b2gAOG9oQDhvaIA4b2jAOG9pADhvaUA4b2mAOG9pwDhvagA4b2pAOG9qgDhvasA4b2sAOG9rQDhva4A4b2vAOG9sADhvbIA4b20AOG9tgDhvbgA4b26AOG9vADhvoAA4b6BAOG+ggDhvoMA4b6EAOG+hQDhvoYA4b6HAOG+iADhvokA4b6KAOG+iwDhvowA4b6NAOG+jgDhvo8A4b6QAOG+kQDhvpIA4b6TAOG+lADhvpUA4b6WAOG+lwDhvpgA4b6ZAOG+mgDhvpsA4b6cAOG+nQDhvp4A4b6fAOG+oADhvqEA4b6iAOG+owDhvqQA4b6lAOG+pgDhvqcA4b6oAOG+qQDhvqoA4b6rAOG+rADhvq0A4b6uAOG+rwDhvrAA4b6xAOG+sgDhvrMA4b60AOG+tgDhvrcA4b64AOG+uQDhvroA4b68AOG/ggDhv4MA4b+EAOG/hgDhv4cA4b+IAOG/igDhv4wA4b+QAOG/kQDhv5IA4b+WAOG/lwDhv5gA4b+ZAOG/mgDhv6AA4b+hAOG/ogDhv6QA4b+lAOG/pgDhv6cA4b+oAOG/qQDhv6oA4b+sAOG/sgDhv7MA4b+0AOG/tgDhv7cA4b+4AOG/ugDhv7wA4oCQAOKAkwDigJQA4oCy4oCyAOKAsuKAsuKAsgDigLLigLLigLLigLIA4oC14oC1AOKAteKAteKAtQDigqkA4oaQAOKGkQDihpIA4oaTAOKGmgDihpsA4oauAOKHjQDih44A4oePAOKIggDiiIQA4oiHAOKIiQDiiIwA4oiRAOKIkgDiiKQA4oimAOKIq+KIqwDiiKviiKviiKsA4oir4oir4oir4oirAOKIruKIrgDiiK7iiK7iiK4A4omBAOKJhADiiYcA4omJAOKJoADiiaIA4omtAOKJrgDiia8A4omwAOKJsQDiibQA4om1AOKJuADiibkA4oqAAOKKgQDiioQA4oqFAOKKiADiiokA4oqsAOKKrQDiiq4A4oqvAOKLoADii6EA4ouiAOKLowDii6oA4ourAOKLrADii60A4pSCAOKWoADil4sA4qaFAOKmhgDiq53MuADitaEA44CBAOOAggDjgIgA44CJAOOAigDjgIsA44CMAOOAjQDjgI4A44CPAOOAkADjgJEA44CSAOOAlADjgJRT44CVAOOAlOS4ieOAlQDjgJTkuozjgJUA44CU5Yud44CVAOOAlOWuieOAlQDjgJTmiZPjgJUA44CU5pWX44CVAOOAlOacrOOAlQDjgJTngrnjgJUA44CU55uX44CVAOOAlQDjgJYA44CXAOOBjADjgY4A44GQAOOBkgDjgZQA44GWAOOBmADjgZoA44GcAOOBngDjgaAA44GiAOOBpQDjgacA44GpAOOBsADjgbEA44GzAOOBtADjgbYA44G3AOOBuQDjgboA44G744GLAOOBvADjgb0A44KI44KKAOOClADjgpkA44KaAOOCngDjgqEA44KiAOOCouODkeODvOODiADjgqLjg6vjg5XjgqEA44Ki44Oz44Oa44KiAOOCouODvOODqwDjgqMA44KkAOOCpOODi+ODs+OCsADjgqTjg7Pjg4EA44KlAOOCpgDjgqbjgqnjg7MA44KnAOOCqADjgqjjgrnjgq/jg7zjg4kA44Ko44O844Kr44O8AOOCqQDjgqoA44Kq44Oz44K5AOOCquODvOODoADjgqsA44Kr44Kk44OqAOOCq+ODqeODg+ODiADjgqvjg63jg6rjg7wA44KsAOOCrOODreODswDjgqzjg7Pjg54A44KtAOOCreODpeODquODvADjgq3jg60A44Kt44Ot44Kw44Op44OgAOOCreODreODoeODvOODiOODqwDjgq3jg63jg6/jg4Pjg4gA44KuAOOCruOCrADjgq7jg4vjg7wA44Ku44Or44OA44O8AOOCrwDjgq/jg6vjgrzjgqTjg60A44Kv44Ot44O844ONAOOCsADjgrDjg6njg6AA44Kw44Op44Og44OI44OzAOOCsQDjgrHjg7zjgrkA44KyAOOCswDjgrPjgrMA44Kz44OIAOOCs+ODq+ODigDjgrPjg7zjg50A44K0AOOCtQDjgrXjgqTjgq/jg6sA44K144Oz44OB44O844OgAOOCtgDjgrcA44K344Oq44Oz44KwAOOCuADjgrkA44K6AOOCuwDjgrvjg7Pjg4EA44K744Oz44OIAOOCvADjgr0A44K+AOOCvwDjg4AA44OA44O844K5AOODgQDjg4IA44ODAOODhADjg4UA44OGAOODhwDjg4fjgrcA44OIAOODiOODswDjg4kA44OJ44OrAOODigDjg4rjg44A44OLAOODjADjg40A44OOAOODjuODg+ODiADjg48A44OP44Kk44OEAOODkADjg5Djg7zjg6zjg6sA44ORAOODkeODvOOCu+ODs+ODiADjg5Hjg7zjg4QA44OSAOODkwDjg5Pjg6sA44OUAOODlOOCouOCueODiOODqwDjg5Tjgq/jg6sA44OU44KzAOODlQDjg5XjgqHjg6njg4Pjg4kA44OV44Kj44O844OIAOODleODqeODswDjg5YA44OW44OD44K344Kn44OrAOODlwDjg5gA44OY44Kv44K/44O844OrAOODmOODq+ODhADjg5kA44OZ44O844K/AOODmgDjg5rjgr0A44Oa44OL44OSAOODmuODs+OCuQDjg5rjg7zjgrgA44ObAOODm+ODswDjg5vjg7zjg6sA44Ob44O844OzAOODnADjg5zjg6vjg4gA44OdAOODneOCpOODs+ODiADjg53jg7Pjg4kA44OeAOODnuOCpOOCr+ODrQDjg57jgqTjg6sA44Oe44OD44OPAOODnuODq+OCrwDjg57jg7Pjgrfjg6fjg7MA44OfAOODn+OCr+ODreODswDjg5/jg6oA44Of44Oq44OQ44O844OrAOODoADjg6EA44Oh44KsAOODoeOCrOODiOODswDjg6Hjg7zjg4jjg6sA44OiAOODowDjg6QA44Ok44O844OJAOODpOODvOODqwDjg6UA44OmAOODpuOCouODswDjg6cA44OoAOODqQDjg6oA44Oq44OD44OI44OrAOODquODqQDjg6sA44Or44OU44O8AOODq+ODvOODluODqwDjg6wA44Os44OgAOODrOODs+ODiOOCsuODswDjg60A44OvAOODr+ODg+ODiADjg7AA44OxAOODsgDjg7MA44O0AOODtwDjg7gA44O5AOODugDjg7sA44O8AOODvgDjkp4A45K5AOOSuwDjk58A45SVAOObrgDjm7wA456BAOOgrwDjoaIA46G8AOOjhwDjo6MA46ScAOOkugDjqK4A46msAOOrpADjrIgA46yZAOOtiQDjrp0A47CYAOOxjgDjtLMA47aWAOO6rADjurgA47ybAOO/vADkgIgA5ICYAOSAuQDkgYYA5IKWAOSDowDkhK8A5IiCAOSIpwDkiqAA5IyBAOSMtADkjZkA5I+VAOSPmQDkkIsA5JGrAOSUqwDklZ0A5JWhAOSVqwDkl5cA5Je5AOSYtQDkmr4A5JuHAOSmlQDkp6YA5KmuAOSptgDkqrIA5KyzAOSvjgDks44A5LOtAOSzuADktZYA5LiAAOS4gQDkuIMA5LiJAOS4igDkuIsA5LiNAOS4mQDkuKYA5LioAOS4rQDkuLIA5Li2AOS4uADkuLkA5Li9AOS4vwDkuYEA5LmZAOS5nQDkuoIA5LqFAOS6hgDkuowA5LqUAOS6oADkuqQA5LquAOS6ugDku4AA5LuMAOS7pADkvIEA5LyRAOS9oADkvoAA5L6GAOS+iwDkvq4A5L67AOS+vwDlgIIA5YCrAOWBugDlgpkA5YOPAOWDmgDlg6cA5YSqAOWEvwDlhYAA5YWFAOWFjQDlhZQA5YWkAOWFpQDlhacA5YWoAOWFqQDlhasA5YWtAOWFtwDlhoAA5YaCAOWGjQDlhpIA5YaVAOWGlgDlhpcA5YaZAOWGpADlhqsA5YasAOWGtQDlhrcA5YeJAOWHjADlh5wA5YeeAOWHoADlh7UA5YiAAOWIgwDliIcA5YiXAOWInQDliKkA5Yi6AOWIuwDliYYA5YmNAOWJsgDlibcA5YqJAOWKmwDliqMA5YqzAOWKtADli4cA5YuJAOWLkgDli54A5YukAOWLtQDli7kA5Yu6AOWMhQDljIYA5YyVAOWMlwDljJoA5Yy4AOWMuwDljL8A5Y2BAOWNhADljYUA5Y2JAOWNkQDljZQA5Y2aAOWNnADljakA5Y2wAOWNswDljbUA5Y29AOWNvwDljoIA5Y62AOWPgwDlj4gA5Y+KAOWPjADlj58A5Y+jAOWPpQDlj6sA5Y+vAOWPsQDlj7MA5ZCGAOWQiADlkI0A5ZCPAOWQnQDlkLgA5ZC5AOWRggDlkYgA5ZGoAOWSngDlkqIA5ZK9AOWTtgDllJAA5ZWPAOWVkwDllZUA5ZWjAOWWhADllocA5ZaZAOWWnQDllqsA5ZazAOWWtgDll4AA5ZeCAOWXogDlmIYA5ZmRAOWZqADlmbQA5ZuXAOWbmwDlm7kA5ZyWAOWclwDlnJ8A5ZywAOWeiwDln44A5Z+0AOWgjQDloLEA5aCyAOWhgADloZoA5aGeAOWiqADloqwA5aKzAOWjmADlo58A5aOrAOWjrgDlo7AA5aOyAOWjtwDlpIIA5aSGAOWkigDlpJUA5aSaAOWknADlpKIA5aSnAOWkp+atowDlpKkA5aWEAOWliADlpZEA5aWUAOWlogDlpbMA5aeYAOWnrADlqJsA5ainAOWpogDlqaYA5aq1AOWsiADlrKgA5ay+AOWtkADlrZcA5a2mAOWugADlroUA5a6XAOWvgwDlr5gA5a+nAOWvrgDlr7MA5a+4AOWvvwDlsIYA5bCPAOWwogDlsLgA5bC/AOWxoADlsaIA5bGkAOWxpQDlsa4A5bGxAOWyjQDls4AA5bSZAOW1gwDltZAA5bWrAOW1rgDltbwA5bayAOW2ugDlt5sA5behAOW3ogDlt6UA5bemAOW3sQDlt70A5be+AOW4qADluL0A5bmpAOW5sgDlubPmiJAA5bm0AOW5ugDlubwA5bm/AOW6pgDlurAA5bqzAOW6tgDlu4kA5buKAOW7kgDlu5MA5buZAOW7rADlu7QA5bu+AOW8hADlvIsA5byTAOW8ogDlvZAA5b2TAOW9oQDlvaIA5b2pAOW9qwDlvbMA5b6LAOW+jADlvpcA5b6aAOW+qQDlvq0A5b+DAOW/jQDlv5cA5b+1AOW/uQDmgJIA5oCcAOaBtQDmgoEA5oKUAOaDhwDmg5gA5oOhAOaEiADmhYQA5oWIAOaFjADmhY4A5oWgAOaFqADmhboA5oaOAOaGkADmhqQA5oavAOaGsgDmh54A5oeyAOaHtgDmiIAA5oiIAOaIkADmiJsA5oiuAOaItADmiLYA5omLAOaJkwDmiZ0A5oqVAOaKsQDmi4kA5ouPAOaLkwDmi5QA5ou8AOaLvgDmjIcA5oy9AOaNkADmjZUA5o2oAOaNuwDmjoMA5o6gAOaOqQDmj4QA5o+FAOaPpADmkJwA5pCiAOaRkgDmkakA5pG3AOaRvgDmkpoA5pKdAOaThADmlK8A5pS0AOaVjwDmlZYA5pWsAOaVuADmlocA5paXAOaWmQDmlqQA5pawAOaWuQDml4UA5pegAOaXogDml6MA5pelAOaYjuayuwDmmJMA5pigAOaYreWSjADmmYkA5pm0AOaaiADmmpEA5pqcAOaatADmm4YA5puwAOabtADmm7gA5pyAAOaciADmnIkA5pyXAOacmwDmnKEA5pyoAOadjgDmnZMA5p2WAOadngDmnbsA5p6FAOaelwDmn7MA5p+6AOaglwDmoJ8A5qCqAOagquW8j+S8muekvgDmoZIA5qKBAOaihQDmoo4A5qKoAOaklADmpYIA5qajAOanqgDmqIIA5qiTAOaqqADmq5MA5qubAOashADmrKAA5qyhAOatlADmraIA5q2jAOatsgDmrbcA5q25AOaunwDmrq4A5q6zAOauugDmrrsA5q+LAOavjQDmr5QA5q+bAOawjwDmsJQA5rC0AOaxjgDmsacA5rKIAOayvwDms4wA5rONAOazpQDms6gA5rSWAOa0mwDmtJ4A5rS0AOa0vgDmtYEA5rWpAOa1qgDmtbcA5rW4AOa2hQDmt4sA5reaAOa3qgDmt7kA5riaAOa4rwDmua4A5rqAAOa6nADmuroA5ruHAOa7iwDmu5EA5rubAOa8jwDmvJQA5ryiAOa8owDmva4A5r+GAOa/qwDmv74A54CbAOeAngDngLkA54GKAOeBqwDngbAA54G3AOeBvQDngpkA54KtAOeDiADng5kA54ShAOeFhQDnhYkA54WuAOeGnADnh44A54eQAOeIkADniJsA54ioAOeIqgDniKsA54i1AOeItgDniLsA54i/AOeJhwDniZAA54mZAOeJmwDniaIA54m5AOeKgADnipUA54qsAOeKrwDni4AA54u8AOeMqgDnjbUA5426AOeOhADnjocA546JAOeOiwDnjqUA546yAOePngDnkIYA55CJAOeQogDnkYcA55GcAOeRqQDnkbEA55KFAOeSiQDnkpgA55OKAOeTnADnk6YA55SGAOeUmADnlJ8A55SkAOeUqADnlLAA55SyAOeUswDnlLcA55S7AOeUvgDnlZkA55WlAOeVsADnlosA55aSAOeXogDnmJAA55idAOeYnwDnmYIA55mpAOeZtgDnmb0A55quAOeavwDnm4oA55ubAOebowDnm6cA55uuAOebtADnnIEA55yeAOecnwDnnYAA552KAOeeiwDnnqcA55+bAOefogDnn7MA56GOAOehqwDnoowA56KRAOejigDno4wA56O7AOekqgDnpLoA56S8AOekvgDnpYgA56WJAOelkADnpZYA56WdAOelngDnpaUA56W/AOemgQDnpo0A56aOAOemjwDnpq4A56a4AOemvgDnp4oA56eYAOenqwDnqJwA56mAAOepigDnqY8A56m0AOepugDnqoEA56qxAOeriwDnq64A56u5AOesoADnro8A56+AAOevhgDnr4kA57C+AOexoADnsbMA57G7AOeykgDnsr4A57OSAOezlgDns6MA57OnAOezqADns7gA57SAAOe0kADntKIA57SvAOe1ggDntZsA57WjAOe2oADntr4A57eHAOe3tADnuIIA57iJAOe4twDnuYEA57mFAOe8tgDnvL4A572RAOe9sgDnvbkA5726AOe+hQDnvooA576VAOe+mgDnvr0A57+6AOiAgQDogIUA6ICMAOiAkgDogLMA6IGGAOiBoADoga8A6IGwAOiBvgDogb8A6IKJAOiCiwDogq0A6IKyAOiEgwDohL4A6IeYAOiHowDoh6gA6IeqAOiHrQDoh7MA6Ie8AOiIgQDoiIQA6IiMAOiImADoiJsA6IifAOiJrgDoia8A6ImyAOiJuADoibkA6IqLAOiKkQDoip0A6IqxAOiKswDoir0A6IulAOiLpgDojJ0A6IyjAOiMtgDojZIA6I2TAOiNowDojq0A6I69AOiPiQDoj4oA6I+MAOiPnADoj6cA6I+vAOiPsQDokL0A6JGJAOiRlwDok64A6JOxAOiTswDok7wA6JSWAOiVpADol40A6Je6AOiYhgDomJIA6JitAOiYvwDomY0A6JmQAOiZnADomacA6JmpAOiZqwDomogA6JqpAOibogDonI4A6JyoAOidqwDonbkA6J6GAOieugDon6EA6KCBAOignwDooYAA6KGMAOihoADooaMA6KOCAOijjwDoo5cA6KOeAOijoQDoo7gA6KO6AOikkADopYEA6KWkAOilvgDopoYA6KaLAOimlgDop5IA6KejAOiogADoqqAA6KqqAOiqvwDoq4sA6KuSAOirlgDoq60A6Ku4AOirvgDorIEA6Ky5AOitmADoroAA6K6KAOiwtwDosYYA6LGIAOixlQDosbgA6LKdAOiyoQDosqkA6LKrAOizgQDos4IA6LOHAOiziADos5MA6LSIAOi0mwDotaQA6LWwAOi1twDotrMA6La8AOi3iwDot68A6LewAOi6qwDou4oA6LuUAOi8pgDovKoA6Ly4AOi8uwDovaIA6L6bAOi+ngDovrAA6L61AOi+tgDpgKMA6YC4AOmBigDpgakA6YGyAOmBvADpgo8A6YKRAOmClADpg44A6YOeAOmDsQDpg70A6YSRAOmEmwDphYkA6YWqAOmGmQDphrQA6YeGAOmHjADph48A6YeRAOmItADpiLgA6Ym2AOmJvADpi5cA6YuYAOmMhADpjYoA6Y+5AOmQlQDplbcA6ZaAAOmWiwDplq0A6Za3AOmYnADpmK4A6ZmLAOmZjQDpmbUA6Zm4AOmZvADpmoYA6ZqjAOmatgDpmrcA6Zq4AOmauQDpm4MA6ZuiAOmbowDpm6gA6Zu2AOmbtwDpnKMA6ZyyAOmdiADpnZEA6Z2WAOmdngDpnaIA6Z2pAOmfiwDpn5sA6Z+gAOmfrQDpn7MA6Z+/AOmggQDpoIUA6aCLAOmgmADpoKkA6aC7AOmhngDpoqgA6aObAOmjnwDpo6IA6aOvAOmjvADppKgA6aSpAOmmlgDpppkA6aanAOmmrADpp4IA6aexAOmnvgDpqaoA6aqoAOmrmADpq58A6aySAOmspQDprK8A6ayyAOmsvADprZoA6a2vAOmxgADpsZcA6bOlAOmzvQDptacA6ba0AOm3ugDpuJ4A6bm1AOm5vwDpupcA6bqfAOm6pQDpursA6buDAOm7jQDpu44A6buRAOm7uQDpu70A6bu+AOm8hQDpvI4A6byPAOm8kwDpvJYA6bygAOm8uwDpvYMA6b2KAOm9kgDpvo0A6b6OAOm+nADpvp8A6b6gAOqcpwDqna8A6qy3AOqtkgDqsIAA6rCBAOqwggDqsIMA6rCEAOqwhQDqsIYA6rCHAOqwiADqsIkA6rCKAOqwiwDqsIwA6rCNAOqwjgDqsI8A6rCQAOqwkQDqsJIA6rCTAOqwlADqsJUA6rCWAOqwlwDqsJgA6rCZAOqwmgDqsJsA6rCcAOqwnQDqsJ4A6rCfAOqwoADqsKEA6rCiAOqwowDqsKQA6rClAOqwpgDqsKcA6rCoAOqwqQDqsKoA6rCrAOqwrADqsK0A6rCuAOqwrwDqsLAA6rCxAOqwsgDqsLMA6rC0AOqwtQDqsLYA6rC3AOqwuADqsLkA6rC6AOqwuwDqsLwA6rC9AOqwvgDqsL8A6rGAAOqxgQDqsYIA6rGDAOqxhADqsYUA6rGGAOqxhwDqsYgA6rGJAOqxigDqsYsA6rGMAOqxjQDqsY4A6rGPAOqxkADqsZEA6rGSAOqxkwDqsZQA6rGVAOqxlgDqsZcA6rGYAOqxmQDqsZoA6rGbAOqxnADqsZ0A6rGeAOqxnwDqsaAA6rGhAOqxogDqsaMA6rGkAOqxpQDqsaYA6rGnAOqxqADqsakA6rGqAOqxqwDqsawA6rGtAOqxrgDqsa8A6rGwAOqxsQDqsbIA6rGzAOqxtADqsbUA6rG2AOqxtwDqsbgA6rG5AOqxugDqsbsA6rG8AOqxvQDqsb4A6rG/AOqygADqsoEA6rKCAOqygwDqsoQA6rKFAOqyhgDqsocA6rKIAOqyiQDqsooA6rKLAOqyjADqso0A6rKOAOqyjwDqspAA6rKRAOqykgDqspMA6rKUAOqylQDqspYA6rKXAOqymADqspkA6rKaAOqymwDqspwA6rKdAOqyngDqsp8A6rKgAOqyoQDqsqIA6rKjAOqypADqsqUA6rKmAOqypwDqsqgA6rKpAOqyqgDqsqsA6rKsAOqyrQDqsq4A6rKvAOqysADqsrEA6rKyAOqyswDqsrQA6rK1AOqytgDqsrcA6rK4AOqyuQDqsroA6rK7AOqyvADqsr0A6rK+AOqyvwDqs4AA6rOBAOqzggDqs4MA6rOEAOqzhQDqs4YA6rOHAOqziADqs4kA6rOKAOqziwDqs4wA6rONAOqzjgDqs48A6rOQAOqzkQDqs5IA6rOTAOqzlADqs5UA6rOWAOqzlwDqs5gA6rOZAOqzmgDqs5sA6rOcAOqznQDqs54A6rOfAOqzoADqs6EA6rOiAOqzowDqs6QA6rOlAOqzpgDqs6cA6rOoAOqzqQDqs6oA6rOrAOqzrADqs60A6rOuAOqzrwDqs7AA6rOxAOqzsgDqs7MA6rO0AOqztQDqs7YA6rO3AOqzuADqs7kA6rO6AOqzuwDqs7wA6rO9AOqzvgDqs78A6rSAAOq0gQDqtIIA6rSDAOq0hADqtIUA6rSGAOq0hwDqtIgA6rSJAOq0igDqtIsA6rSMAOq0jQDqtI4A6rSPAOq0kADqtJEA6rSSAOq0kwDqtJQA6rSVAOq0lgDqtJcA6rSYAOq0mQDqtJoA6rSbAOq0nADqtJ0A6rSeAOq0nwDqtKAA6rShAOq0ogDqtKMA6rSkAOq0pQDqtKYA6rSnAOq0qADqtKkA6rSqAOq0qwDqtKwA6rStAOq0rgDqtK8A6rSwAOq0sQDqtLIA6rSzAOq0tADqtLUA6rS2AOq0twDqtLgA6rS5AOq0ugDqtLsA6rS8AOq0vQDqtL4A6rS/AOq1gADqtYEA6rWCAOq1gwDqtYQA6rWFAOq1hgDqtYcA6rWIAOq1iQDqtYoA6rWLAOq1jADqtY0A6rWOAOq1jwDqtZAA6rWRAOq1kgDqtZMA6rWUAOq1lQDqtZYA6rWXAOq1mADqtZkA6rWaAOq1mwDqtZwA6rWdAOq1ngDqtZ8A6rWgAOq1oQDqtaIA6rWjAOq1pADqtaUA6rWmAOq1pwDqtagA6rWpAOq1qgDqtasA6rWsAOq1rQDqta4A6rWvAOq1sADqtbEA6rWyAOq1swDqtbQA6rW1AOq1tgDqtbcA6rW4AOq1uQDqtboA6rW7AOq1vADqtb0A6rW+AOq1vwDqtoAA6raBAOq2ggDqtoMA6raEAOq2hQDqtoYA6raHAOq2iADqtokA6raKAOq2iwDqtowA6raNAOq2jgDqto8A6raQAOq2kQDqtpIA6raTAOq2lADqtpUA6raWAOq2lwDqtpgA6raZAOq2mgDqtpsA6racAOq2nQDqtp4A6rafAOq2oADqtqEA6raiAOq2owDqtqQA6ralAOq2pgDqtqcA6raoAOq2qQDqtqoA6rarAOq2rADqtq0A6rauAOq2rwDqtrAA6raxAOq2sgDqtrMA6ra0AOq2tQDqtrYA6ra3AOq2uADqtrkA6ra6AOq2uwDqtrwA6ra9AOq2vgDqtr8A6reAAOq3gQDqt4IA6reDAOq3hADqt4UA6reGAOq3hwDqt4gA6reJAOq3igDqt4sA6reMAOq3jQDqt44A6rePAOq3kADqt5EA6reSAOq3kwDqt5QA6reVAOq3lgDqt5cA6reYAOq3mQDqt5oA6rebAOq3nADqt50A6reeAOq3nwDqt6AA6rehAOq3ogDqt6MA6rekAOq3pQDqt6YA6renAOq3qADqt6kA6reqAOq3qwDqt6wA6retAOq3rgDqt68A6rewAOq3sQDqt7IA6rezAOq3tADqt7UA6re2AOq3twDqt7gA6re5AOq3ugDqt7sA6re8AOq3vQDqt74A6re/AOq4gADquIEA6riCAOq4gwDquIQA6riFAOq4hgDquIcA6riIAOq4iQDquIoA6riLAOq4jADquI0A6riOAOq4jwDquJAA6riRAOq4kgDquJMA6riUAOq4lQDquJYA6riXAOq4mADquJkA6riaAOq4mwDquJwA6ridAOq4ngDquJ8A6rigAOq4oQDquKIA6rijAOq4pADquKUA6rimAOq4pwDquKgA6ripAOq4qgDquKsA6risAOq4rQDquK4A6rivAOq4sADquLEA6riyAOq4swDquLQA6ri1AOq4tgDquLcA6ri4AOq4uQDquLoA6ri7AOq4vADquL0A6ri+AOq4vwDquYAA6rmBAOq5ggDquYMA6rmEAOq5hQDquYYA6rmHAOq5iADquYkA6rmKAOq5iwDquYwA6rmNAOq5jgDquY8A6rmQAOq5kQDquZIA6rmTAOq5lADquZUA6rmWAOq5lwDquZgA6rmZAOq5mgDquZsA6rmcAOq5nQDquZ4A6rmfAOq5oADquaEA6rmiAOq5owDquaQA6rmlAOq5pgDquacA6rmoAOq5qQDquaoA6rmrAOq5rADqua0A6rmuAOq5rwDqubAA6rmxAOq5sgDqubMA6rm0AOq5tQDqubYA6rm3AOq5uADqubkA6rm6AOq5uwDqubwA6rm9AOq5vgDqub8A6rqAAOq6gQDquoIA6rqDAOq6hADquoUA6rqGAOq6hwDquogA6rqJAOq6igDquosA6rqMAOq6jQDquo4A6rqPAOq6kADqupEA6rqSAOq6kwDqupQA6rqVAOq6lgDqupcA6rqYAOq6mQDqupoA6rqbAOq6nADqup0A6rqeAOq6nwDquqAA6rqhAOq6ogDquqMA6rqkAOq6pQDquqYA6rqnAOq6qADquqkA6rqqAOq6qwDquqwA6rqtAOq6rgDquq8A6rqwAOq6sQDqurIA6rqzAOq6tADqurUA6rq2AOq6twDqurgA6rq5AOq6ugDqursA6rq8AOq6vQDqur4A6rq/AOq7gADqu4EA6ruCAOq7gwDqu4QA6ruFAOq7hgDqu4cA6ruIAOq7iQDqu4oA6ruLAOq7jADqu40A6ruOAOq7jwDqu5AA6ruRAOq7kgDqu5MA6ruUAOq7lQDqu5YA6ruXAOq7mADqu5kA6ruaAOq7mwDqu5wA6rudAOq7ngDqu58A6rugAOq7oQDqu6IA6rujAOq7pADqu6UA6rumAOq7pwDqu6gA6rupAOq7qgDqu6sA6rusAOq7rQDqu64A6ruvAOq7sADqu7EA6ruyAOq7swDqu7QA6ru1AOq7tgDqu7cA6ru4AOq7uQDqu7oA6ru7AOq7vADqu70A6ru+AOq7vwDqvIAA6ryBAOq8ggDqvIMA6ryEAOq8hQDqvIYA6ryHAOq8iADqvIkA6ryKAOq8iwDqvIwA6ryNAOq8jgDqvI8A6ryQAOq8kQDqvJIA6ryTAOq8lADqvJUA6ryWAOq8lwDqvJgA6ryZAOq8mgDqvJsA6rycAOq8nQDqvJ4A6ryfAOq8oADqvKEA6ryiAOq8owDqvKQA6rylAOq8pgDqvKcA6ryoAOq8qQDqvKoA6ryrAOq8rADqvK0A6ryuAOq8rwDqvLAA6ryxAOq8sgDqvLMA6ry0AOq8tQDqvLYA6ry3AOq8uADqvLkA6ry6AOq8uwDqvLwA6ry9AOq8vgDqvL8A6r2AAOq9gQDqvYIA6r2DAOq9hADqvYUA6r2GAOq9hwDqvYgA6r2JAOq9igDqvYsA6r2MAOq9jQDqvY4A6r2PAOq9kADqvZEA6r2SAOq9kwDqvZQA6r2VAOq9lgDqvZcA6r2YAOq9mQDqvZoA6r2bAOq9nADqvZ0A6r2eAOq9nwDqvaAA6r2hAOq9ogDqvaMA6r2kAOq9pQDqvaYA6r2nAOq9qADqvakA6r2qAOq9qwDqvawA6r2tAOq9rgDqva8A6r2wAOq9sQDqvbIA6r2zAOq9tADqvbUA6r22AOq9twDqvbgA6r25AOq9ugDqvbsA6r28AOq9vQDqvb4A6r2/AOq+gADqvoEA6r6CAOq+gwDqvoQA6r6FAOq+hgDqvocA6r6IAOq+iQDqvooA6r6LAOq+jADqvo0A6r6OAOq+jwDqvpAA6r6RAOq+kgDqvpMA6r6UAOq+lQDqvpYA6r6XAOq+mADqvpkA6r6aAOq+mwDqvpwA6r6dAOq+ngDqvp8A6r6gAOq+oQDqvqIA6r6jAOq+pADqvqUA6r6mAOq+pwDqvqgA6r6pAOq+qgDqvqsA6r6sAOq+rQDqvq4A6r6vAOq+sADqvrEA6r6yAOq+swDqvrQA6r61AOq+tgDqvrcA6r64AOq+uQDqvroA6r67AOq+vADqvr0A6r6+AOq+vwDqv4AA6r+BAOq/ggDqv4MA6r+EAOq/hQDqv4YA6r+HAOq/iADqv4kA6r+KAOq/iwDqv4wA6r+NAOq/jgDqv48A6r+QAOq/kQDqv5IA6r+TAOq/lADqv5UA6r+WAOq/lwDqv5gA6r+ZAOq/mgDqv5sA6r+cAOq/nQDqv54A6r+fAOq/oADqv6EA6r+iAOq/owDqv6QA6r+lAOq/pgDqv6cA6r+oAOq/qQDqv6oA6r+rAOq/rADqv60A6r+uAOq/rwDqv7AA6r+xAOq/sgDqv7MA6r+0AOq/tQDqv7YA6r+3AOq/uADqv7kA6r+6AOq/uwDqv7wA6r+9AOq/vgDqv78A64CAAOuAgQDrgIIA64CDAOuAhADrgIUA64CGAOuAhwDrgIgA64CJAOuAigDrgIsA64CMAOuAjQDrgI4A64CPAOuAkADrgJEA64CSAOuAkwDrgJQA64CVAOuAlgDrgJcA64CYAOuAmQDrgJoA64CbAOuAnADrgJ0A64CeAOuAnwDrgKAA64ChAOuAogDrgKMA64CkAOuApQDrgKYA64CnAOuAqADrgKkA64CqAOuAqwDrgKwA64CtAOuArgDrgK8A64CwAOuAsQDrgLIA64CzAOuAtADrgLUA64C2AOuAtwDrgLgA64C5AOuAugDrgLsA64C8AOuAvQDrgL4A64C/AOuBgADrgYEA64GCAOuBgwDrgYQA64GFAOuBhgDrgYcA64GIAOuBiQDrgYoA64GLAOuBjADrgY0A64GOAOuBjwDrgZAA64GRAOuBkgDrgZMA64GUAOuBlQDrgZYA64GXAOuBmADrgZkA64GaAOuBmwDrgZwA64GdAOuBngDrgZ8A64GgAOuBoQDrgaIA64GjAOuBpADrgaUA64GmAOuBpwDrgagA64GpAOuBqgDrgasA64GsAOuBrQDrga4A64GvAOuBsADrgbEA64GyAOuBswDrgbQA64G1AOuBtgDrgbcA64G4AOuBuQDrgboA64G7AOuBvADrgb0A64G+AOuBvwDrgoAA64KBAOuCggDrgoMA64KEAOuChQDrgoYA64KHAOuCiADrgokA64KKAOuCiwDrgowA64KNAOuCjgDrgo8A64KQAOuCkQDrgpIA64KTAOuClADrgpUA64KWAOuClwDrgpgA64KZAOuCmgDrgpsA64KcAOuCnQDrgp4A64KfAOuCoADrgqEA64KiAOuCowDrgqQA64KlAOuCpgDrgqcA64KoAOuCqQDrgqoA64KrAOuCrADrgq0A64KuAOuCrwDrgrAA64KxAOuCsgDrgrMA64K0AOuCtQDrgrYA64K3AOuCuADrgrkA64K6AOuCuwDrgrwA64K9AOuCvgDrgr8A64OAAOuDgQDrg4IA64ODAOuDhADrg4UA64OGAOuDhwDrg4gA64OJAOuDigDrg4sA64OMAOuDjQDrg44A64OPAOuDkADrg5EA64OSAOuDkwDrg5QA64OVAOuDlgDrg5cA64OYAOuDmQDrg5oA64ObAOuDnADrg50A64OeAOuDnwDrg6AA64OhAOuDogDrg6MA64OkAOuDpQDrg6YA64OnAOuDqADrg6kA64OqAOuDqwDrg6wA64OtAOuDrgDrg68A64OwAOuDsQDrg7IA64OzAOuDtADrg7UA64O2AOuDtwDrg7gA64O5AOuDugDrg7sA64O8AOuDvQDrg74A64O/AOuEgADrhIEA64SCAOuEgwDrhIQA64SFAOuEhgDrhIcA64SIAOuEiQDrhIoA64SLAOuEjADrhI0A64SOAOuEjwDrhJAA64SRAOuEkgDrhJMA64SUAOuElQDrhJYA64SXAOuEmADrhJkA64SaAOuEmwDrhJwA64SdAOuEngDrhJ8A64SgAOuEoQDrhKIA64SjAOuEpADrhKUA64SmAOuEpwDrhKgA64SpAOuEqgDrhKsA64SsAOuErQDrhK4A64SvAOuEsADrhLEA64SyAOuEswDrhLQA64S1AOuEtgDrhLcA64S4AOuEuQDrhLoA64S7AOuEvADrhL0A64S+AOuEvwDrhYAA64WBAOuFggDrhYMA64WEAOuFhQDrhYYA64WHAOuFiADrhYkA64WKAOuFiwDrhYwA64WNAOuFjgDrhY8A64WQAOuFkQDrhZIA64WTAOuFlADrhZUA64WWAOuFlwDrhZgA64WZAOuFmgDrhZsA64WcAOuFnQDrhZ4A64WfAOuFoADrhaEA64WiAOuFowDrhaQA64WlAOuFpgDrhacA64WoAOuFqQDrhaoA64WrAOuFrADrha0A64WuAOuFrwDrhbAA64WxAOuFsgDrhbMA64W0AOuFtQDrhbYA64W3AOuFuADrhbkA64W6AOuFuwDrhbwA64W9AOuFvgDrhb8A64aAAOuGgQDrhoIA64aDAOuGhADrhoUA64aGAOuGhwDrhogA64aJAOuGigDrhosA64aMAOuGjQDrho4A64aPAOuGkADrhpEA64aSAOuGkwDrhpQA64aVAOuGlgDrhpcA64aYAOuGmQDrhpoA64abAOuGnADrhp0A64aeAOuGnwDrhqAA64ahAOuGogDrhqMA64akAOuGpQDrhqYA64anAOuGqADrhqkA64aqAOuGqwDrhqwA64atAOuGrgDrhq8A64awAOuGsQDrhrIA64azAOuGtADrhrUA64a2AOuGtwDrhrgA64a5AOuGugDrhrsA64a8AOuGvQDrhr4A64a/AOuHgADrh4EA64eCAOuHgwDrh4QA64eFAOuHhgDrh4cA64eIAOuHiQDrh4oA64eLAOuHjADrh40A64eOAOuHjwDrh5AA64eRAOuHkgDrh5MA64eUAOuHlQDrh5YA64eXAOuHmADrh5kA64eaAOuHmwDrh5wA64edAOuHngDrh58A64egAOuHoQDrh6IA64ejAOuHpADrh6UA64emAOuHpwDrh6gA64epAOuHqgDrh6sA64esAOuHrQDrh64A64evAOuHsADrh7EA64eyAOuHswDrh7QA64e1AOuHtgDrh7cA64e4AOuHuQDrh7oA64e7AOuHvADrh70A64e+AOuHvwDriIAA64iBAOuIggDriIMA64iEAOuIhQDriIYA64iHAOuIiADriIkA64iKAOuIiwDriIwA64iNAOuIjgDriI8A64iQAOuIkQDriJIA64iTAOuIlADriJUA64iWAOuIlwDriJgA64iZAOuImgDriJsA64icAOuInQDriJ4A64ifAOuIoADriKEA64iiAOuIowDriKQA64ilAOuIpgDriKcA64ioAOuIqQDriKoA64irAOuIrADriK0A64iuAOuIrwDriLAA64ixAOuIsgDriLMA64i0AOuItQDriLYA64i3AOuIuADriLkA64i6AOuIuwDriLwA64i9AOuIvgDriL8A64mAAOuJgQDriYIA64mDAOuJhADriYUA64mGAOuJhwDriYgA64mJAOuJigDriYsA64mMAOuJjQDriY4A64mPAOuJkADriZEA64mSAOuJkwDriZQA64mVAOuJlgDriZcA64mYAOuJmQDriZoA64mbAOuJnADriZ0A64meAOuJnwDriaAA64mhAOuJogDriaMA64mkAOuJpQDriaYA64mnAOuJqADriakA64mqAOuJqwDriawA64mtAOuJrgDria8A64mwAOuJsQDribIA64mzAOuJtADribUA64m2AOuJtwDribgA64m5AOuJugDribsA64m8AOuJvQDrib4A64m/AOuKgADrioEA64qCAOuKgwDrioQA64qFAOuKhgDriocA64qIAOuKiQDriooA64qLAOuKjADrio0A64qOAOuKjwDripAA64qRAOuKkgDripMA64qUAOuKlQDripYA64qXAOuKmADripkA64qaAOuKmwDripwA64qdAOuKngDrip8A64qgAOuKoQDriqIA64qjAOuKpADriqUA64qmAOuKpwDriqgA64qpAOuKqgDriqsA64qsAOuKrQDriq4A64qvAOuKsADrirEA64qyAOuKswDrirQA64q1AOuKtgDrircA64q4AOuKuQDriroA64q7AOuKvADrir0A64q+AOuKvwDri4AA64uBAOuLggDri4MA64uEAOuLhQDri4YA64uHAOuLiADri4kA64uKAOuLiwDri4wA64uNAOuLjgDri48A64uQAOuLkQDri5IA64uTAOuLlADri5UA64uWAOuLlwDri5gA64uZAOuLmgDri5sA64ucAOuLnQDri54A64ufAOuLoADri6EA64uiAOuLowDri6QA64ulAOuLpgDri6cA64uoAOuLqQDri6oA64urAOuLrADri60A64uuAOuLrwDri7AA64uxAOuLsgDri7MA64u0AOuLtQDri7YA64u3AOuLuADri7kA64u6AOuLuwDri7wA64u9AOuLvgDri78A64yAAOuMgQDrjIIA64yDAOuMhADrjIUA64yGAOuMhwDrjIgA64yJAOuMigDrjIsA64yMAOuMjQDrjI4A64yPAOuMkADrjJEA64ySAOuMkwDrjJQA64yVAOuMlgDrjJcA64yYAOuMmQDrjJoA64ybAOuMnADrjJ0A64yeAOuMnwDrjKAA64yhAOuMogDrjKMA64ykAOuMpQDrjKYA64ynAOuMqADrjKkA64yqAOuMqwDrjKwA64ytAOuMrgDrjK8A64ywAOuMsQDrjLIA64yzAOuMtADrjLUA64y2AOuMtwDrjLgA64y5AOuMugDrjLsA64y8AOuMvQDrjL4A64y/AOuNgADrjYEA642CAOuNgwDrjYQA642FAOuNhgDrjYcA642IAOuNiQDrjYoA642LAOuNjADrjY0A642OAOuNjwDrjZAA642RAOuNkgDrjZMA642UAOuNlQDrjZYA642XAOuNmADrjZkA642aAOuNmwDrjZwA642dAOuNngDrjZ8A642gAOuNoQDrjaIA642jAOuNpADrjaUA642mAOuNpwDrjagA642pAOuNqgDrjasA642sAOuNrQDrja4A642vAOuNsADrjbEA642yAOuNswDrjbQA6421AOuNtgDrjbcA6424AOuNuQDrjboA6427AOuNvADrjb0A642+AOuNvwDrjoAA646BAOuOggDrjoMA646EAOuOhQDrjoYA646HAOuOiADrjokA646KAOuOiwDrjowA646NAOuOjgDrjo8A646QAOuOkQDrjpIA646TAOuOlADrjpUA646WAOuOlwDrjpgA646ZAOuOmgDrjpsA646cAOuOnQDrjp4A646fAOuOoADrjqEA646iAOuOowDrjqQA646lAOuOpgDrjqcA646oAOuOqQDrjqoA646rAOuOrADrjq0A646uAOuOrwDrjrAA646xAOuOsgDrjrMA6460AOuOtQDrjrYA6463AOuOuADrjrkA6466AOuOuwDrjrwA6469AOuOvgDrjr8A64+AAOuPgQDrj4IA64+DAOuPhADrj4UA64+GAOuPhwDrj4gA64+JAOuPigDrj4sA64+MAOuPjQDrj44A64+PAOuPkADrj5EA64+SAOuPkwDrj5QA64+VAOuPlgDrj5cA64+YAOuPmQDrj5oA64+bAOuPnADrj50A64+eAOuPnwDrj6AA64+hAOuPogDrj6MA64+kAOuPpQDrj6YA64+nAOuPqADrj6kA64+qAOuPqwDrj6wA64+tAOuPrgDrj68A64+wAOuPsQDrj7IA64+zAOuPtADrj7UA64+2AOuPtwDrj7gA64+5AOuPugDrj7sA64+8AOuPvQDrj74A64+/AOuQgADrkIEA65CCAOuQgwDrkIQA65CFAOuQhgDrkIcA65CIAOuQiQDrkIoA65CLAOuQjADrkI0A65COAOuQjwDrkJAA65CRAOuQkgDrkJMA65CUAOuQlQDrkJYA65CXAOuQmADrkJkA65CaAOuQmwDrkJwA65CdAOuQngDrkJ8A65CgAOuQoQDrkKIA65CjAOuQpADrkKUA65CmAOuQpwDrkKgA65CpAOuQqgDrkKsA65CsAOuQrQDrkK4A65CvAOuQsADrkLEA65CyAOuQswDrkLQA65C1AOuQtgDrkLcA65C4AOuQuQDrkLoA65C7AOuQvADrkL0A65C+AOuQvwDrkYAA65GBAOuRggDrkYMA65GEAOuRhQDrkYYA65GHAOuRiADrkYkA65GKAOuRiwDrkYwA65GNAOuRjgDrkY8A65GQAOuRkQDrkZIA65GTAOuRlADrkZUA65GWAOuRlwDrkZgA65GZAOuRmgDrkZsA65GcAOuRnQDrkZ4A65GfAOuRoADrkaEA65GiAOuRowDrkaQA65GlAOuRpgDrkacA65GoAOuRqQDrkaoA65GrAOuRrADrka0A65GuAOuRrwDrkbAA65GxAOuRsgDrkbMA65G0AOuRtQDrkbYA65G3AOuRuADrkbkA65G6AOuRuwDrkbwA65G9AOuRvgDrkb8A65KAAOuSgQDrkoIA65KDAOuShADrkoUA65KGAOuShwDrkogA65KJAOuSigDrkosA65KMAOuSjQDrko4A65KPAOuSkADrkpEA65KSAOuSkwDrkpQA65KVAOuSlgDrkpcA65KYAOuSmQDrkpoA65KbAOuSnADrkp0A65KeAOuSnwDrkqAA65KhAOuSogDrkqMA65KkAOuSpQDrkqYA65KnAOuSqADrkqkA65KqAOuSqwDrkqwA65KtAOuSrgDrkq8A65KwAOuSsQDrkrIA65KzAOuStADrkrUA65K2AOuStwDrkrgA65K5AOuSugDrkrsA65K8AOuSvQDrkr4A65K/AOuTgADrk4EA65OCAOuTgwDrk4QA65OFAOuThgDrk4cA65OIAOuTiQDrk4oA65OLAOuTjADrk40A65OOAOuTjwDrk5AA65ORAOuTkgDrk5MA65OUAOuTlQDrk5YA65OXAOuTmADrk5kA65OaAOuTmwDrk5wA65OdAOuTngDrk58A65OgAOuToQDrk6IA65OjAOuTpADrk6UA65OmAOuTpwDrk6gA65OpAOuTqgDrk6sA65OsAOuTrQDrk64A65OvAOuTsADrk7EA65OyAOuTswDrk7QA65O1AOuTtgDrk7cA65O4AOuTuQDrk7oA65O7AOuTvADrk70A65O+AOuTvwDrlIAA65SBAOuUggDrlIMA65SEAOuUhQDrlIYA65SHAOuUiADrlIkA65SKAOuUiwDrlIwA65SNAOuUjgDrlI8A65SQAOuUkQDrlJIA65STAOuUlADrlJUA65SWAOuUlwDrlJgA65SZAOuUmgDrlJsA65ScAOuUnQDrlJ4A65SfAOuUoADrlKEA65SiAOuUowDrlKQA65SlAOuUpgDrlKcA65SoAOuUqQDrlKoA65SrAOuUrADrlK0A65SuAOuUrwDrlLAA65SxAOuUsgDrlLMA65S0AOuUtQDrlLYA65S3AOuUuADrlLkA65S6AOuUuwDrlLwA65S9AOuUvgDrlL8A65WAAOuVgQDrlYIA65WDAOuVhADrlYUA65WGAOuVhwDrlYgA65WJAOuVigDrlYsA65WMAOuVjQDrlY4A65WPAOuVkADrlZEA65WSAOuVkwDrlZQA65WVAOuVlgDrlZcA65WYAOuVmQDrlZoA65WbAOuVnADrlZ0A65WeAOuVnwDrlaAA65WhAOuVogDrlaMA65WkAOuVpQDrlaYA65WnAOuVqADrlakA65WqAOuVqwDrlawA65WtAOuVrgDrla8A65WwAOuVsQDrlbIA65WzAOuVtADrlbUA65W2AOuVtwDrlbgA65W5AOuVugDrlbsA65W8AOuVvQDrlb4A65W/AOuWgADrloEA65aCAOuWgwDrloQA65aFAOuWhgDrlocA65aIAOuWiQDrlooA65aLAOuWjADrlo0A65aOAOuWjwDrlpAA65aRAOuWkgDrlpMA65aUAOuWlQDrlpYA65aXAOuWmADrlpkA65aaAOuWmwDrlpwA65adAOuWngDrlp8A65agAOuWoQDrlqIA65ajAOuWpADrlqUA65amAOuWpwDrlqgA65apAOuWqgDrlqsA65asAOuWrQDrlq4A65avAOuWsADrlrEA65ayAOuWswDrlrQA65a1AOuWtgDrlrcA65a4AOuWuQDrlroA65a7AOuWvADrlr0A65a+AOuWvwDrl4AA65eBAOuXggDrl4MA65eEAOuXhQDrl4YA65eHAOuXiADrl4kA65eKAOuXiwDrl4wA65eNAOuXjgDrl48A65eQAOuXkQDrl5IA65eTAOuXlADrl5UA65eWAOuXlwDrl5gA65eZAOuXmgDrl5sA65ecAOuXnQDrl54A65efAOuXoADrl6EA65eiAOuXowDrl6QA65elAOuXpgDrl6cA65eoAOuXqQDrl6oA65erAOuXrADrl60A65euAOuXrwDrl7AA65exAOuXsgDrl7MA65e0AOuXtQDrl7YA65e3AOuXuADrl7kA65e6AOuXuwDrl7wA65e9AOuXvgDrl78A65iAAOuYgQDrmIIA65iDAOuYhADrmIUA65iGAOuYhwDrmIgA65iJAOuYigDrmIsA65iMAOuYjQDrmI4A65iPAOuYkADrmJEA65iSAOuYkwDrmJQA65iVAOuYlgDrmJcA65iYAOuYmQDrmJoA65ibAOuYnADrmJ0A65ieAOuYnwDrmKAA65ihAOuYogDrmKMA65ikAOuYpQDrmKYA65inAOuYqADrmKkA65iqAOuYqwDrmKwA65itAOuYrgDrmK8A65iwAOuYsQDrmLIA65izAOuYtADrmLUA65i2AOuYtwDrmLgA65i5AOuYugDrmLsA65i8AOuYvQDrmL4A65i/AOuZgADrmYEA65mCAOuZgwDrmYQA65mFAOuZhgDrmYcA65mIAOuZiQDrmYoA65mLAOuZjADrmY0A65mOAOuZjwDrmZAA65mRAOuZkgDrmZMA65mUAOuZlQDrmZYA65mXAOuZmADrmZkA65maAOuZmwDrmZwA65mdAOuZngDrmZ8A65mgAOuZoQDrmaIA65mjAOuZpADrmaUA65mmAOuZpwDrmagA65mpAOuZqgDrmasA65msAOuZrQDrma4A65mvAOuZsADrmbEA65myAOuZswDrmbQA65m1AOuZtgDrmbcA65m4AOuZuQDrmboA65m7AOuZvADrmb0A65m+AOuZvwDrmoAA65qBAOuaggDrmoMA65qEAOuahQDrmoYA65qHAOuaiADrmokA65qKAOuaiwDrmowA65qNAOuajgDrmo8A65qQAOuakQDrmpIA65qTAOualADrmpUA65qWAOualwDrmpgA65qZAOuamgDrmpsA65qcAOuanQDrmp4A65qfAOuaoADrmqEA65qiAOuaowDrmqQA65qlAOuapgDrmqcA65qoAOuaqQDrmqoA65qrAOuarADrmq0A65quAOuarwDrmrAA65qxAOuasgDrmrMA65q0AOuatQDrmrYA65q3AOuauADrmrkA65q6AOuauwDrmrwA65q9AOuavgDrmr8A65uAAOubgQDrm4IA65uDAOubhADrm4UA65uGAOubhwDrm4gA65uJAOubigDrm4sA65uMAOubjQDrm44A65uPAOubkADrm5EA65uSAOubkwDrm5QA65uVAOublgDrm5cA65uYAOubmQDrm5oA65ubAOubnADrm50A65ueAOubnwDrm6AA65uhAOubogDrm6MA65ukAOubpQDrm6YA65unAOubqADrm6kA65uqAOubqwDrm6wA65utAOubrgDrm68A65uwAOubsQDrm7IA65uzAOubtADrm7UA65u2AOubtwDrm7gA65u5AOubugDrm7sA65u8AOubvQDrm74A65u/AOucgADrnIEA65yCAOucgwDrnIQA65yFAOuchgDrnIcA65yIAOuciQDrnIoA65yLAOucjADrnI0A65yOAOucjwDrnJAA65yRAOuckgDrnJMA65yUAOuclQDrnJYA65yXAOucmADrnJkA65yaAOucmwDrnJwA65ydAOucngDrnJ8A65ygAOucoQDrnKIA65yjAOucpADrnKUA65ymAOucpwDrnKgA65ypAOucqgDrnKsA65ysAOucrQDrnK4A65yvAOucsADrnLEA65yyAOucswDrnLQA65y1AOuctgDrnLcA65y4AOucuQDrnLoA65y7AOucvADrnL0A65y+AOucvwDrnYAA652BAOudggDrnYMA652EAOudhQDrnYYA652HAOudiADrnYkA652KAOudiwDrnYwA652NAOudjgDrnY8A652QAOudkQDrnZIA652TAOudlADrnZUA652WAOudlwDrnZgA652ZAOudmgDrnZsA652cAOudnQDrnZ4A652fAOudoADrnaEA652iAOudowDrnaQA652lAOudpgDrnacA652oAOudqQDrnaoA652rAOudrADrna0A652uAOudrwDrnbAA652xAOudsgDrnbMA6520AOudtQDrnbYA6523AOuduADrnbkA6526AOuduwDrnbwA6529AOudvgDrnb8A656AAOuegQDrnoIA656DAOuehADrnoUA656GAOuehwDrnogA656JAOueigDrnosA656MAOuejQDrno4A656PAOuekADrnpEA656SAOuekwDrnpQA656VAOuelgDrnpcA656YAOuemQDrnpoA656bAOuenADrnp0A656eAOuenwDrnqAA656hAOueogDrnqMA656kAOuepQDrnqYA656nAOueqADrnqkA656qAOueqwDrnqwA656tAOuergDrnq8A656wAOuesQDrnrIA656zAOuetADrnrUA6562AOuetwDrnrgA6565AOueugDrnrsA6568AOuevQDrnr4A656/AOufgADrn4EA65+CAOufgwDrn4QA65+FAOufhgDrn4cA65+IAOufiQDrn4oA65+LAOufjADrn40A65+OAOufjwDrn5AA65+RAOufkgDrn5MA65+UAOuflQDrn5YA65+XAOufmADrn5kA65+aAOufmwDrn5wA65+dAOufngDrn58A65+gAOufoQDrn6IA65+jAOufpADrn6UA65+mAOufpwDrn6gA65+pAOufqgDrn6sA65+sAOufrQDrn64A65+vAOufsADrn7EA65+yAOufswDrn7QA65+1AOuftgDrn7cA65+4AOufuQDrn7oA65+7AOufvADrn70A65++AOufvwDroIAA66CBAOugggDroIMA66CEAOughQDroIYA66CHAOugiADroIkA66CKAOugiwDroIwA66CNAOugjgDroI8A66CQAOugkQDroJIA66CTAOuglADroJUA66CWAOuglwDroJgA66CZAOugmgDroJsA66CcAOugnQDroJ4A66CfAOugoADroKEA66CiAOugowDroKQA66ClAOugpgDroKcA66CoAOugqQDroKoA66CrAOugrADroK0A66CuAOugrwDroLAA66CxAOugsgDroLMA66C0AOugtQDroLYA66C3AOuguADroLkA66C6AOuguwDroLwA66C9AOugvgDroL8A66GAAOuhgQDroYIA66GDAOuhhADroYUA66GGAOuhhwDroYgA66GJAOuhigDroYsA66GMAOuhjQDroY4A66GPAOuhkADroZEA66GSAOuhkwDroZQA66GVAOuhlgDroZcA66GYAOuhmQDroZoA66GbAOuhnADroZ0A66GeAOuhnwDroaAA66GhAOuhogDroaMA66GkAOuhpQDroaYA66GnAOuhqADroakA66GqAOuhqwDroawA66GtAOuhrgDroa8A66GwAOuhsQDrobIA66GzAOuhtADrobUA66G2AOuhtwDrobgA66G5AOuhugDrobsA66G8AOuhvQDrob4A66G/AOuigADrooEA66KCAOuigwDrooQA66KFAOuihgDroocA66KIAOuiiQDroooA66KLAOuijADroo0A66KOAOuijwDropAA66KRAOuikgDropMA66KUAOuilQDropYA66KXAOuimADropkA66KaAOuimwDropwA66KdAOuingDrop8A66KgAOuioQDroqIA66KjAOuipADroqUA66KmAOuipwDroqgA66KpAOuiqgDroqsA66KsAOuirQDroq4A66KvAOuisADrorEA66KyAOuiswDrorQA66K1AOuitgDrorcA66K4AOuiuQDroroA66K7AOuivADror0A66K+AOuivwDro4AA66OBAOujggDro4MA66OEAOujhQDro4YA66OHAOujiADro4kA66OKAOujiwDro4wA66ONAOujjgDro48A66OQAOujkQDro5IA66OTAOujlADro5UA66OWAOujlwDro5gA66OZAOujmgDro5sA66OcAOujnQDro54A66OfAOujoADro6EA66OiAOujowDro6QA66OlAOujpgDro6cA66OoAOujqQDro6oA66OrAOujrADro60A66OuAOujrwDro7AA66OxAOujsgDro7MA66O0AOujtQDro7YA66O3AOujuADro7kA66O6AOujuwDro7wA66O9AOujvgDro78A66SAAOukgQDrpIIA66SDAOukhADrpIUA66SGAOukhwDrpIgA66SJAOukigDrpIsA66SMAOukjQDrpI4A66SPAOukkADrpJEA66SSAOukkwDrpJQA66SVAOuklgDrpJcA66SYAOukmQDrpJoA66SbAOuknADrpJ0A66SeAOuknwDrpKAA66ShAOukogDrpKMA66SkAOukpQDrpKYA66SnAOukqADrpKkA66SqAOukqwDrpKwA66StAOukrgDrpK8A66SwAOuksQDrpLIA66SzAOuktADrpLUA66S2AOuktwDrpLgA66S5AOukugDrpLsA66S8AOukvQDrpL4A66S/AOulgADrpYEA66WCAOulgwDrpYQA66WFAOulhgDrpYcA66WIAOuliQDrpYoA66WLAOuljADrpY0A66WOAOuljwDrpZAA66WRAOulkgDrpZMA66WUAOullQDrpZYA66WXAOulmADrpZkA66WaAOulmwDrpZwA66WdAOulngDrpZ8A66WgAOuloQDrpaIA66WjAOulpADrpaUA66WmAOulpwDrpagA66WpAOulqgDrpasA66WsAOulrQDrpa4A66WvAOulsADrpbEA66WyAOulswDrpbQA66W1AOultgDrpbcA66W4AOuluQDrpboA66W7AOulvADrpb0A66W+AOulvwDrpoAA66aBAOumggDrpoMA66aEAOumhQDrpoYA66aHAOumiADrpokA66aKAOumiwDrpowA66aNAOumjgDrpo8A66aQAOumkQDrppIA66aTAOumlADrppUA66aWAOumlwDrppgA66aZAOummgDrppsA66acAOumnQDrpp4A66afAOumoADrpqEA66aiAOumowDrpqQA66alAOumpgDrpqcA66aoAOumqQDrpqoA66arAOumrADrpq0A66auAOumrwDrprAA66axAOumsgDrprMA66a0AOumtQDrprYA66a3AOumuADrprkA66a6AOumuwDrprwA66a9AOumvgDrpr8A66eAAOungQDrp4IA66eDAOunhADrp4UA66eGAOunhwDrp4gA66eJAOunigDrp4sA66eMAOunjQDrp44A66ePAOunkADrp5EA66eSAOunkwDrp5QA66eVAOunlgDrp5cA66eYAOunmQDrp5oA66ebAOunnADrp50A66eeAOunnwDrp6AA66ehAOunogDrp6MA66ekAOunpQDrp6YA66enAOunqADrp6kA66eqAOunqwDrp6wA66etAOunrgDrp68A66ewAOunsQDrp7IA66ezAOuntADrp7UA66e2AOuntwDrp7gA66e5AOunugDrp7sA66e8AOunvQDrp74A66e/AOuogADrqIEA66iCAOuogwDrqIQA66iFAOuohgDrqIcA66iIAOuoiQDrqIoA66iLAOuojADrqI0A66iOAOuojwDrqJAA66iRAOuokgDrqJMA66iUAOuolQDrqJYA66iXAOuomADrqJkA66iaAOuomwDrqJwA66idAOuongDrqJ8A66igAOuooQDrqKIA66ijAOuopADrqKUA66imAOuopwDrqKgA66ipAOuoqgDrqKsA66isAOuorQDrqK4A66ivAOuosADrqLEA66iyAOuoswDrqLQA66i1AOuotgDrqLcA66i4AOuouQDrqLoA66i7AOuovADrqL0A66i+AOuovwDrqYAA66mBAOupggDrqYMA66mEAOuphQDrqYYA66mHAOupiADrqYkA66mKAOupiwDrqYwA66mNAOupjgDrqY8A66mQAOupkQDrqZIA66mTAOuplADrqZUA66mWAOuplwDrqZgA66mZAOupmgDrqZsA66mcAOupnQDrqZ4A66mfAOupoADrqaEA66miAOupowDrqaQA66mlAOuppgDrqacA66moAOupqQDrqaoA66mrAOuprADrqa0A66muAOuprwDrqbAA66mxAOupsgDrqbMA66m0AOuptQDrqbYA66m3AOupuADrqbkA66m6AOupuwDrqbwA66m9AOupvgDrqb8A66qAAOuqgQDrqoIA66qDAOuqhADrqoUA66qGAOuqhwDrqogA66qJAOuqigDrqosA66qMAOuqjQDrqo4A66qPAOuqkADrqpEA66qSAOuqkwDrqpQA66qVAOuqlgDrqpcA66qYAOuqmQDrqpoA66qbAOuqnADrqp0A66qeAOuqnwDrqqAA66qhAOuqogDrqqMA66qkAOuqpQDrqqYA66qnAOuqqADrqqkA66qqAOuqqwDrqqwA66qtAOuqrgDrqq8A66qwAOuqsQDrqrIA66qzAOuqtADrqrUA66q2AOuqtwDrqrgA66q5AOuqugDrqrsA66q8AOuqvQDrqr4A66q/AOurgADrq4EA66uCAOurgwDrq4QA66uFAOurhgDrq4cA66uIAOuriQDrq4oA66uLAOurjADrq40A66uOAOurjwDrq5AA66uRAOurkgDrq5MA66uUAOurlQDrq5YA66uXAOurmADrq5kA66uaAOurmwDrq5wA66udAOurngDrq58A66ugAOuroQDrq6IA66ujAOurpADrq6UA66umAOurpwDrq6gA66upAOurqgDrq6sA66usAOurrQDrq64A66uvAOursADrq7EA66uyAOurswDrq7QA66u1AOurtgDrq7cA66u4AOuruQDrq7oA66u7AOurvADrq70A66u+AOurvwDrrIAA66yBAOusggDrrIMA66yEAOushQDrrIYA66yHAOusiADrrIkA66yKAOusiwDrrIwA66yNAOusjgDrrI8A66yQAOuskQDrrJIA66yTAOuslADrrJUA66yWAOuslwDrrJgA66yZAOusmgDrrJsA66ycAOusnQDrrJ4A66yfAOusoADrrKEA66yiAOusowDrrKQA66ylAOuspgDrrKcA66yoAOusqQDrrKoA66yrAOusrADrrK0A66yuAOusrwDrrLAA66yxAOussgDrrLMA66y0AOustQDrrLYA66y3AOusuADrrLkA66y6AOusuwDrrLwA66y9AOusvgDrrL8A662AAOutgQDrrYIA662DAOuthADrrYUA662GAOuthwDrrYgA662JAOutigDrrYsA662MAOutjQDrrY4A662PAOutkADrrZEA662SAOutkwDrrZQA662VAOutlgDrrZcA662YAOutmQDrrZoA662bAOutnADrrZ0A662eAOutnwDrraAA662hAOutogDrraMA662kAOutpQDrraYA662nAOutqADrrakA662qAOutqwDrrawA662tAOutrgDrra8A662wAOutsQDrrbIA662zAOuttADrrbUA6622AOuttwDrrbgA6625AOutugDrrbsA6628AOutvQDrrb4A662/AOuugADrroEA666CAOuugwDrroQA666FAOuuhgDrrocA666IAOuuiQDrrooA666LAOuujADrro0A666OAOuujwDrrpAA666RAOuukgDrrpMA666UAOuulQDrrpYA666XAOuumADrrpkA666aAOuumwDrrpwA666dAOuungDrrp8A666gAOuuoQDrrqIA666jAOuupADrrqUA666mAOuupwDrrqgA666pAOuuqgDrrqsA666sAOuurQDrrq4A666vAOuusADrrrEA666yAOuuswDrrrQA6661AOuutgDrrrcA6664AOuuuQDrrroA6667AOuuvADrrr0A666+AOuuvwDrr4AA66+BAOuvggDrr4MA66+EAOuvhQDrr4YA66+HAOuviADrr4kA66+KAOuviwDrr4wA66+NAOuvjgDrr48A66+QAOuvkQDrr5IA66+TAOuvlADrr5UA66+WAOuvlwDrr5gA66+ZAOuvmgDrr5sA66+cAOuvnQDrr54A66+fAOuvoADrr6EA66+iAOuvowDrr6QA66+lAOuvpgDrr6cA66+oAOuvqQDrr6oA66+rAOuvrADrr60A66+uAOuvrwDrr7AA66+xAOuvsgDrr7MA66+0AOuvtQDrr7YA66+3AOuvuADrr7kA66+6AOuvuwDrr7wA66+9AOuvvgDrr78A67CAAOuwgQDrsIIA67CDAOuwhADrsIUA67CGAOuwhwDrsIgA67CJAOuwigDrsIsA67CMAOuwjQDrsI4A67CPAOuwkADrsJEA67CSAOuwkwDrsJQA67CVAOuwlgDrsJcA67CYAOuwmQDrsJoA67CbAOuwnADrsJ0A67CeAOuwnwDrsKAA67ChAOuwogDrsKMA67CkAOuwpQDrsKYA67CnAOuwqADrsKkA67CqAOuwqwDrsKwA67CtAOuwrgDrsK8A67CwAOuwsQDrsLIA67CzAOuwtADrsLUA67C2AOuwtwDrsLgA67C5AOuwugDrsLsA67C8AOuwvQDrsL4A67C/AOuxgADrsYEA67GCAOuxgwDrsYQA67GFAOuxhgDrsYcA67GIAOuxiQDrsYoA67GLAOuxjADrsY0A67GOAOuxjwDrsZAA67GRAOuxkgDrsZMA67GUAOuxlQDrsZYA67GXAOuxmADrsZkA67GaAOuxmwDrsZwA67GdAOuxngDrsZ8A67GgAOuxoQDrsaIA67GjAOuxpADrsaUA67GmAOuxpwDrsagA67GpAOuxqgDrsasA67GsAOuxrQDrsa4A67GvAOuxsADrsbEA67GyAOuxswDrsbQA67G1AOuxtgDrsbcA67G4AOuxuQDrsboA67G7AOuxvADrsb0A67G+AOuxvwDrsoAA67KBAOuyggDrsoMA67KEAOuyhQDrsoYA67KHAOuyiADrsokA67KKAOuyiwDrsowA67KNAOuyjgDrso8A67KQAOuykQDrspIA67KTAOuylADrspUA67KWAOuylwDrspgA67KZAOuymgDrspsA67KcAOuynQDrsp4A67KfAOuyoADrsqEA67KiAOuyowDrsqQA67KlAOuypgDrsqcA67KoAOuyqQDrsqoA67KrAOuyrADrsq0A67KuAOuyrwDrsrAA67KxAOuysgDrsrMA67K0AOuytQDrsrYA67K3AOuyuADrsrkA67K6AOuyuwDrsrwA67K9AOuyvgDrsr8A67OAAOuzgQDrs4IA67ODAOuzhADrs4UA67OGAOuzhwDrs4gA67OJAOuzigDrs4sA67OMAOuzjQDrs44A67OPAOuzkADrs5EA67OSAOuzkwDrs5QA67OVAOuzlgDrs5cA67OYAOuzmQDrs5oA67ObAOuznADrs50A67OeAOuznwDrs6AA67OhAOuzogDrs6MA67OkAOuzpQDrs6YA67OnAOuzqADrs6kA67OqAOuzqwDrs6wA67OtAOuzrgDrs68A67OwAOuzsQDrs7IA67OzAOuztADrs7UA67O2AOuztwDrs7gA67O5AOuzugDrs7sA67O8AOuzvQDrs74A67O/AOu0gADrtIEA67SCAOu0gwDrtIQA67SFAOu0hgDrtIcA67SIAOu0iQDrtIoA67SLAOu0jADrtI0A67SOAOu0jwDrtJAA67SRAOu0kgDrtJMA67SUAOu0lQDrtJYA67SXAOu0mADrtJkA67SaAOu0mwDrtJwA67SdAOu0ngDrtJ8A67SgAOu0oQDrtKIA67SjAOu0pADrtKUA67SmAOu0pwDrtKgA67SpAOu0qgDrtKsA67SsAOu0rQDrtK4A67SvAOu0sADrtLEA67SyAOu0swDrtLQA67S1AOu0tgDrtLcA67S4AOu0uQDrtLoA67S7AOu0vADrtL0A67S+AOu0vwDrtYAA67WBAOu1ggDrtYMA67WEAOu1hQDrtYYA67WHAOu1iADrtYkA67WKAOu1iwDrtYwA67WNAOu1jgDrtY8A67WQAOu1kQDrtZIA67WTAOu1lADrtZUA67WWAOu1lwDrtZgA67WZAOu1mgDrtZsA67WcAOu1nQDrtZ4A67WfAOu1oADrtaEA67WiAOu1owDrtaQA67WlAOu1pgDrtacA67WoAOu1qQDrtaoA67WrAOu1rADrta0A67WuAOu1rwDrtbAA67WxAOu1sgDrtbMA67W0AOu1tQDrtbYA67W3AOu1uADrtbkA67W6AOu1uwDrtbwA67W9AOu1vgDrtb8A67aAAOu2gQDrtoIA67aDAOu2hADrtoUA67aGAOu2hwDrtogA67aJAOu2igDrtosA67aMAOu2jQDrto4A67aPAOu2kADrtpEA67aSAOu2kwDrtpQA67aVAOu2lgDrtpcA67aYAOu2mQDrtpoA67abAOu2nADrtp0A67aeAOu2nwDrtqAA67ahAOu2ogDrtqMA67akAOu2pQDrtqYA67anAOu2qADrtqkA67aqAOu2qwDrtqwA67atAOu2rgDrtq8A67awAOu2sQDrtrIA67azAOu2tADrtrUA67a2AOu2twDrtrgA67a5AOu2ugDrtrsA67a8AOu2vQDrtr4A67a/AOu3gADrt4EA67eCAOu3gwDrt4QA67eFAOu3hgDrt4cA67eIAOu3iQDrt4oA67eLAOu3jADrt40A67eOAOu3jwDrt5AA67eRAOu3kgDrt5MA67eUAOu3lQDrt5YA67eXAOu3mADrt5kA67eaAOu3mwDrt5wA67edAOu3ngDrt58A67egAOu3oQDrt6IA67ejAOu3pADrt6UA67emAOu3pwDrt6gA67epAOu3qgDrt6sA67esAOu3rQDrt64A67evAOu3sADrt7EA67eyAOu3swDrt7QA67e1AOu3tgDrt7cA67e4AOu3uQDrt7oA67e7AOu3vADrt70A67e+AOu3vwDruIAA67iBAOu4ggDruIMA67iEAOu4hQDruIYA67iHAOu4iADruIkA67iKAOu4iwDruIwA67iNAOu4jgDruI8A67iQAOu4kQDruJIA67iTAOu4lADruJUA67iWAOu4lwDruJgA67iZAOu4mgDruJsA67icAOu4nQDruJ4A67ifAOu4oADruKEA67iiAOu4owDruKQA67ilAOu4pgDruKcA67ioAOu4qQDruKoA67irAOu4rADruK0A67iuAOu4rwDruLAA67ixAOu4sgDruLMA67i0AOu4tQDruLYA67i3AOu4uADruLkA67i6AOu4uwDruLwA67i9AOu4vgDruL8A67mAAOu5gQDruYIA67mDAOu5hADruYUA67mGAOu5hwDruYgA67mJAOu5igDruYsA67mMAOu5jQDruY4A67mPAOu5kADruZEA67mSAOu5kwDruZQA67mVAOu5lgDruZcA67mYAOu5mQDruZoA67mbAOu5nADruZ0A67meAOu5nwDruaAA67mhAOu5ogDruaMA67mkAOu5pQDruaYA67mnAOu5qADruakA67mqAOu5qwDruawA67mtAOu5rgDrua8A67mwAOu5sQDrubIA67mzAOu5tADrubUA67m2AOu5twDrubgA67m5AOu5ugDrubsA67m8AOu5vQDrub4A67m/AOu6gADruoEA67qCAOu6gwDruoQA67qFAOu6hgDruocA67qIAOu6iQDruooA67qLAOu6jADruo0A67qOAOu6jwDrupAA67qRAOu6kgDrupMA67qUAOu6lQDrupYA67qXAOu6mADrupkA67qaAOu6mwDrupwA67qdAOu6ngDrup8A67qgAOu6oQDruqIA67qjAOu6pADruqUA67qmAOu6pwDruqgA67qpAOu6qgDruqsA67qsAOu6rQDruq4A67qvAOu6sADrurEA67qyAOu6swDrurQA67q1AOu6tgDrurcA67q4AOu6uQDruroA67q7AOu6vADrur0A67q+AOu6vwDru4AA67uBAOu7ggDru4MA67uEAOu7hQDru4YA67uHAOu7iADru4kA67uKAOu7iwDru4wA67uNAOu7jgDru48A67uQAOu7kQDru5IA67uTAOu7lADru5UA67uWAOu7lwDru5gA67uZAOu7mgDru5sA67ucAOu7nQDru54A67ufAOu7oADru6EA67uiAOu7owDru6QA67ulAOu7pgDru6cA67uoAOu7qQDru6oA67urAOu7rADru60A67uuAOu7rwDru7AA67uxAOu7sgDru7MA67u0AOu7tQDru7YA67u3AOu7uADru7kA67u6AOu7uwDru7wA67u9AOu7vgDru78A67yAAOu8gQDrvIIA67yDAOu8hADrvIUA67yGAOu8hwDrvIgA67yJAOu8igDrvIsA67yMAOu8jQDrvI4A67yPAOu8kADrvJEA67ySAOu8kwDrvJQA67yVAOu8lgDrvJcA67yYAOu8mQDrvJoA67ybAOu8nADrvJ0A67yeAOu8nwDrvKAA67yhAOu8ogDrvKMA67ykAOu8pQDrvKYA67ynAOu8qADrvKkA67yqAOu8qwDrvKwA67ytAOu8rgDrvK8A67ywAOu8sQDrvLIA67yzAOu8tADrvLUA67y2AOu8twDrvLgA67y5AOu8ugDrvLsA67y8AOu8vQDrvL4A67y/AOu9gADrvYEA672CAOu9gwDrvYQA672FAOu9hgDrvYcA672IAOu9iQDrvYoA672LAOu9jADrvY0A672OAOu9jwDrvZAA672RAOu9kgDrvZMA672UAOu9lQDrvZYA672XAOu9mADrvZkA672aAOu9mwDrvZwA672dAOu9ngDrvZ8A672gAOu9oQDrvaIA672jAOu9pADrvaUA672mAOu9pwDrvagA672pAOu9qgDrvasA672sAOu9rQDrva4A672vAOu9sADrvbEA672yAOu9swDrvbQA6721AOu9tgDrvbcA6724AOu9uQDrvboA6727AOu9vADrvb0A672+AOu9vwDrvoAA676BAOu+ggDrvoMA676EAOu+hQDrvoYA676HAOu+iADrvokA676KAOu+iwDrvowA676NAOu+jgDrvo8A676QAOu+kQDrvpIA676TAOu+lADrvpUA676WAOu+lwDrvpgA676ZAOu+mgDrvpsA676cAOu+nQDrvp4A676fAOu+oADrvqEA676iAOu+owDrvqQA676lAOu+pgDrvqcA676oAOu+qQDrvqoA676rAOu+rADrvq0A676uAOu+rwDrvrAA676xAOu+sgDrvrMA6760AOu+tQDrvrYA6763AOu+uADrvrkA6766AOu+uwDrvrwA6769AOu+vgDrvr8A67+AAOu/gQDrv4IA67+DAOu/hADrv4UA67+GAOu/hwDrv4gA67+JAOu/igDrv4sA67+MAOu/jQDrv44A67+PAOu/kADrv5EA67+SAOu/kwDrv5QA67+VAOu/lgDrv5cA67+YAOu/mQDrv5oA67+bAOu/nADrv50A67+eAOu/nwDrv6AA67+hAOu/ogDrv6MA67+kAOu/pQDrv6YA67+nAOu/qADrv6kA67+qAOu/qwDrv6wA67+tAOu/rgDrv68A67+wAOu/sQDrv7IA67+zAOu/tADrv7UA67+2AOu/twDrv7gA67+5AOu/ugDrv7sA67+8AOu/vQDrv74A67+/AOyAgADsgIEA7ICCAOyAgwDsgIQA7ICFAOyAhgDsgIcA7ICIAOyAiQDsgIoA7ICLAOyAjADsgI0A7ICOAOyAjwDsgJAA7ICRAOyAkgDsgJMA7ICUAOyAlQDsgJYA7ICXAOyAmADsgJkA7ICaAOyAmwDsgJwA7ICdAOyAngDsgJ8A7ICgAOyAoQDsgKIA7ICjAOyApADsgKUA7ICmAOyApwDsgKgA7ICpAOyAqgDsgKsA7ICsAOyArQDsgK4A7ICvAOyAsADsgLEA7ICyAOyAswDsgLQA7IC1AOyAtgDsgLcA7IC4AOyAuQDsgLoA7IC7AOyAvADsgL0A7IC+AOyAvwDsgYAA7IGBAOyBggDsgYMA7IGEAOyBhQDsgYYA7IGHAOyBiADsgYkA7IGKAOyBiwDsgYwA7IGNAOyBjgDsgY8A7IGQAOyBkQDsgZIA7IGTAOyBlADsgZUA7IGWAOyBlwDsgZgA7IGZAOyBmgDsgZsA7IGcAOyBnQDsgZ4A7IGfAOyBoADsgaEA7IGiAOyBowDsgaQA7IGlAOyBpgDsgacA7IGoAOyBqQDsgaoA7IGrAOyBrADsga0A7IGuAOyBrwDsgbAA7IGxAOyBsgDsgbMA7IG0AOyBtQDsgbYA7IG3AOyBuADsgbkA7IG6AOyBuwDsgbwA7IG9AOyBvgDsgb8A7IKAAOyCgQDsgoIA7IKDAOyChADsgoUA7IKGAOyChwDsgogA7IKJAOyCigDsgosA7IKMAOyCjQDsgo4A7IKPAOyCkADsgpEA7IKSAOyCkwDsgpQA7IKVAOyClgDsgpcA7IKYAOyCmQDsgpoA7IKbAOyCnADsgp0A7IKeAOyCnwDsgqAA7IKhAOyCogDsgqMA7IKkAOyCpQDsgqYA7IKnAOyCqADsgqkA7IKqAOyCqwDsgqwA7IKtAOyCrgDsgq8A7IKwAOyCsQDsgrIA7IKzAOyCtADsgrUA7IK2AOyCtwDsgrgA7IK5AOyCugDsgrsA7IK8AOyCvQDsgr4A7IK/AOyDgADsg4EA7IOCAOyDgwDsg4QA7IOFAOyDhgDsg4cA7IOIAOyDiQDsg4oA7IOLAOyDjADsg40A7IOOAOyDjwDsg5AA7IORAOyDkgDsg5MA7IOUAOyDlQDsg5YA7IOXAOyDmADsg5kA7IOaAOyDmwDsg5wA7IOdAOyDngDsg58A7IOgAOyDoQDsg6IA7IOjAOyDpADsg6UA7IOmAOyDpwDsg6gA7IOpAOyDqgDsg6sA7IOsAOyDrQDsg64A7IOvAOyDsADsg7EA7IOyAOyDswDsg7QA7IO1AOyDtgDsg7cA7IO4AOyDuQDsg7oA7IO7AOyDvADsg70A7IO+AOyDvwDshIAA7ISBAOyEggDshIMA7ISEAOyEhQDshIYA7ISHAOyEiADshIkA7ISKAOyEiwDshIwA7ISNAOyEjgDshI8A7ISQAOyEkQDshJIA7ISTAOyElADshJUA7ISWAOyElwDshJgA7ISZAOyEmgDshJsA7IScAOyEnQDshJ4A7ISfAOyEoADshKEA7ISiAOyEowDshKQA7ISlAOyEpgDshKcA7ISoAOyEqQDshKoA7ISrAOyErADshK0A7ISuAOyErwDshLAA7ISxAOyEsgDshLMA7IS0AOyEtQDshLYA7IS3AOyEuADshLkA7IS6AOyEuwDshLwA7IS9AOyEvgDshL8A7IWAAOyFgQDshYIA7IWDAOyFhADshYUA7IWGAOyFhwDshYgA7IWJAOyFigDshYsA7IWMAOyFjQDshY4A7IWPAOyFkADshZEA7IWSAOyFkwDshZQA7IWVAOyFlgDshZcA7IWYAOyFmQDshZoA7IWbAOyFnADshZ0A7IWeAOyFnwDshaAA7IWhAOyFogDshaMA7IWkAOyFpQDshaYA7IWnAOyFqADshakA7IWqAOyFqwDshawA7IWtAOyFrgDsha8A7IWwAOyFsQDshbIA7IWzAOyFtADshbUA7IW2AOyFtwDshbgA7IW5AOyFugDshbsA7IW8AOyFvQDshb4A7IW/AOyGgADshoEA7IaCAOyGgwDshoQA7IaFAOyGhgDshocA7IaIAOyGiQDshooA7IaLAOyGjADsho0A7IaOAOyGjwDshpAA7IaRAOyGkgDshpMA7IaUAOyGlQDshpYA7IaXAOyGmADshpkA7IaaAOyGmwDshpwA7IadAOyGngDshp8A7IagAOyGoQDshqIA7IajAOyGpADshqUA7IamAOyGpwDshqgA7IapAOyGqgDshqsA7IasAOyGrQDshq4A7IavAOyGsADshrEA7IayAOyGswDshrQA7Ia1AOyGtgDshrcA7Ia4AOyGuQDshroA7Ia7AOyGvADshr0A7Ia+AOyGvwDsh4AA7IeBAOyHggDsh4MA7IeEAOyHhQDsh4YA7IeHAOyHiADsh4kA7IeKAOyHiwDsh4wA7IeNAOyHjgDsh48A7IeQAOyHkQDsh5IA7IeTAOyHlADsh5UA7IeWAOyHlwDsh5gA7IeZAOyHmgDsh5sA7IecAOyHnQDsh54A7IefAOyHoADsh6EA7IeiAOyHowDsh6QA7IelAOyHpgDsh6cA7IeoAOyHqQDsh6oA7IerAOyHrADsh60A7IeuAOyHrwDsh7AA7IexAOyHsgDsh7MA7Ie0AOyHtQDsh7YA7Ie3AOyHuADsh7kA7Ie6AOyHuwDsh7wA7Ie9AOyHvgDsh78A7IiAAOyIgQDsiIIA7IiDAOyIhADsiIUA7IiGAOyIhwDsiIgA7IiJAOyIigDsiIsA7IiMAOyIjQDsiI4A7IiPAOyIkADsiJEA7IiSAOyIkwDsiJQA7IiVAOyIlgDsiJcA7IiYAOyImQDsiJoA7IibAOyInADsiJ0A7IieAOyInwDsiKAA7IihAOyIogDsiKMA7IikAOyIpQDsiKYA7IinAOyIqADsiKkA7IiqAOyIqwDsiKwA7IitAOyIrgDsiK8A7IiwAOyIsQDsiLIA7IizAOyItADsiLUA7Ii2AOyItwDsiLgA7Ii5AOyIugDsiLsA7Ii8AOyIvQDsiL4A7Ii/AOyJgADsiYEA7ImCAOyJgwDsiYQA7ImFAOyJhgDsiYcA7ImIAOyJiQDsiYoA7ImLAOyJjADsiY0A7ImOAOyJjwDsiZAA7ImRAOyJkgDsiZMA7ImUAOyJlQDsiZYA7ImXAOyJmADsiZkA7ImaAOyJmwDsiZwA7ImdAOyJngDsiZ8A7ImgAOyJoQDsiaIA7ImjAOyJpADsiaUA7ImmAOyJpwDsiagA7ImpAOyJqgDsiasA7ImsAOyJrQDsia4A7ImvAOyJsADsibEA7ImyAOyJswDsibQA7Im1AOyJtgDsibcA7Im4AOyJuQDsiboA7Im7AOyJvADsib0A7Im+AOyJvwDsioAA7IqBAOyKggDsioMA7IqEAOyKhQDsioYA7IqHAOyKiADsiokA7IqKAOyKiwDsiowA7IqNAOyKjgDsio8A7IqQAOyKkQDsipIA7IqTAOyKlADsipUA7IqWAOyKlwDsipgA7IqZAOyKmgDsipsA7IqcAOyKnQDsip4A7IqfAOyKoADsiqEA7IqiAOyKowDsiqQA7IqlAOyKpgDsiqcA7IqoAOyKqQDsiqoA7IqrAOyKrADsiq0A7IquAOyKrwDsirAA7IqxAOyKsgDsirMA7Iq0AOyKtQDsirYA7Iq3AOyKuADsirkA7Iq6AOyKuwDsirwA7Iq9AOyKvgDsir8A7IuAAOyLgQDsi4IA7IuDAOyLhADsi4UA7IuGAOyLhwDsi4gA7IuJAOyLigDsi4sA7IuMAOyLjQDsi44A7IuPAOyLkADsi5EA7IuSAOyLkwDsi5QA7IuVAOyLlgDsi5cA7IuYAOyLmQDsi5oA7IubAOyLnADsi50A7IueAOyLnwDsi6AA7IuhAOyLogDsi6MA7IukAOyLpQDsi6YA7IunAOyLqADsi6kA7IuqAOyLqwDsi6wA7IutAOyLrgDsi68A7IuwAOyLsQDsi7IA7IuzAOyLtADsi7UA7Iu2AOyLtwDsi7gA7Iu5AOyLugDsi7sA7Iu8AOyLvQDsi74A7Iu/AOyMgADsjIEA7IyCAOyMgwDsjIQA7IyFAOyMhgDsjIcA7IyIAOyMiQDsjIoA7IyLAOyMjADsjI0A7IyOAOyMjwDsjJAA7IyRAOyMkgDsjJMA7IyUAOyMlQDsjJYA7IyXAOyMmADsjJkA7IyaAOyMmwDsjJwA7IydAOyMngDsjJ8A7IygAOyMoQDsjKIA7IyjAOyMpADsjKUA7IymAOyMpwDsjKgA7IypAOyMqgDsjKsA7IysAOyMrQDsjK4A7IyvAOyMsADsjLEA7IyyAOyMswDsjLQA7Iy1AOyMtgDsjLcA7Iy4AOyMuQDsjLoA7Iy7AOyMvADsjL0A7Iy+AOyMvwDsjYAA7I2BAOyNggDsjYMA7I2EAOyNhQDsjYYA7I2HAOyNiADsjYkA7I2KAOyNiwDsjYwA7I2NAOyNjgDsjY8A7I2QAOyNkQDsjZIA7I2TAOyNlADsjZUA7I2WAOyNlwDsjZgA7I2ZAOyNmgDsjZsA7I2cAOyNnQDsjZ4A7I2fAOyNoADsjaEA7I2iAOyNowDsjaQA7I2lAOyNpgDsjacA7I2oAOyNqQDsjaoA7I2rAOyNrADsja0A7I2uAOyNrwDsjbAA7I2xAOyNsgDsjbMA7I20AOyNtQDsjbYA7I23AOyNuADsjbkA7I26AOyNuwDsjbwA7I29AOyNvgDsjb8A7I6AAOyOgQDsjoIA7I6DAOyOhADsjoUA7I6GAOyOhwDsjogA7I6JAOyOigDsjosA7I6MAOyOjQDsjo4A7I6PAOyOkADsjpEA7I6SAOyOkwDsjpQA7I6VAOyOlgDsjpcA7I6YAOyOmQDsjpoA7I6bAOyOnADsjp0A7I6eAOyOnwDsjqAA7I6hAOyOogDsjqMA7I6kAOyOpQDsjqYA7I6nAOyOqADsjqkA7I6qAOyOqwDsjqwA7I6tAOyOrgDsjq8A7I6wAOyOsQDsjrIA7I6zAOyOtADsjrUA7I62AOyOtwDsjrgA7I65AOyOugDsjrsA7I68AOyOvQDsjr4A7I6/AOyPgADsj4EA7I+CAOyPgwDsj4QA7I+FAOyPhgDsj4cA7I+IAOyPiQDsj4oA7I+LAOyPjADsj40A7I+OAOyPjwDsj5AA7I+RAOyPkgDsj5MA7I+UAOyPlQDsj5YA7I+XAOyPmADsj5kA7I+aAOyPmwDsj5wA7I+dAOyPngDsj58A7I+gAOyPoQDsj6IA7I+jAOyPpADsj6UA7I+mAOyPpwDsj6gA7I+pAOyPqgDsj6sA7I+sAOyPrQDsj64A7I+vAOyPsADsj7EA7I+yAOyPswDsj7QA7I+1AOyPtgDsj7cA7I+4AOyPuQDsj7oA7I+7AOyPvADsj70A7I++AOyPvwDskIAA7JCBAOyQggDskIMA7JCEAOyQhQDskIYA7JCHAOyQiADskIkA7JCKAOyQiwDskIwA7JCNAOyQjgDskI8A7JCQAOyQkQDskJIA7JCTAOyQlADskJUA7JCWAOyQlwDskJgA7JCZAOyQmgDskJsA7JCcAOyQnQDskJ4A7JCfAOyQoADskKEA7JCiAOyQowDskKQA7JClAOyQpgDskKcA7JCoAOyQqQDskKoA7JCrAOyQrADskK0A7JCuAOyQrwDskLAA7JCxAOyQsgDskLMA7JC0AOyQtQDskLYA7JC3AOyQuADskLkA7JC6AOyQuwDskLwA7JC9AOyQvgDskL8A7JGAAOyRgQDskYIA7JGDAOyRhADskYUA7JGGAOyRhwDskYgA7JGJAOyRigDskYsA7JGMAOyRjQDskY4A7JGPAOyRkADskZEA7JGSAOyRkwDskZQA7JGVAOyRlgDskZcA7JGYAOyRmQDskZoA7JGbAOyRnADskZ0A7JGeAOyRnwDskaAA7JGhAOyRogDskaMA7JGkAOyRpQDskaYA7JGnAOyRqADskakA7JGqAOyRqwDskawA7JGtAOyRrgDska8A7JGwAOyRsQDskbIA7JGzAOyRtADskbUA7JG2AOyRtwDskbgA7JG5AOyRugDskbsA7JG8AOyRvQDskb4A7JG/AOySgADskoEA7JKCAOySgwDskoQA7JKFAOyShgDskocA7JKIAOySiQDskooA7JKLAOySjADsko0A7JKOAOySjwDskpAA7JKRAOySkgDskpMA7JKUAOySlQDskpYA7JKXAOySmADskpkA7JKaAOySmwDskpwA7JKdAOySngDskp8A7JKgAOySoQDskqIA7JKjAOySpADskqUA7JKmAOySpwDskqgA7JKpAOySqgDskqsA7JKsAOySrQDskq4A7JKvAOySsADskrEA7JKyAOySswDskrQA7JK1AOyStgDskrcA7JK4AOySuQDskroA7JK7AOySvADskr0A7JK+AOySvwDsk4AA7JOBAOyTggDsk4MA7JOEAOyThQDsk4YA7JOHAOyTiADsk4kA7JOKAOyTiwDsk4wA7JONAOyTjgDsk48A7JOQAOyTkQDsk5IA7JOTAOyTlADsk5UA7JOWAOyTlwDsk5gA7JOZAOyTmgDsk5sA7JOcAOyTnQDsk54A7JOfAOyToADsk6EA7JOiAOyTowDsk6QA7JOlAOyTpgDsk6cA7JOoAOyTqQDsk6oA7JOrAOyTrADsk60A7JOuAOyTrwDsk7AA7JOxAOyTsgDsk7MA7JO0AOyTtQDsk7YA7JO3AOyTuADsk7kA7JO6AOyTuwDsk7wA7JO9AOyTvgDsk78A7JSAAOyUgQDslIIA7JSDAOyUhADslIUA7JSGAOyUhwDslIgA7JSJAOyUigDslIsA7JSMAOyUjQDslI4A7JSPAOyUkADslJEA7JSSAOyUkwDslJQA7JSVAOyUlgDslJcA7JSYAOyUmQDslJoA7JSbAOyUnADslJ0A7JSeAOyUnwDslKAA7JShAOyUogDslKMA7JSkAOyUpQDslKYA7JSnAOyUqADslKkA7JSqAOyUqwDslKwA7JStAOyUrgDslK8A7JSwAOyUsQDslLIA7JSzAOyUtADslLUA7JS2AOyUtwDslLgA7JS5AOyUugDslLsA7JS8AOyUvQDslL4A7JS/AOyVgADslYEA7JWCAOyVgwDslYQA7JWFAOyVhgDslYcA7JWIAOyViQDslYoA7JWLAOyVjADslY0A7JWOAOyVjwDslZAA7JWRAOyVkgDslZMA7JWUAOyVlQDslZYA7JWXAOyVmADslZkA7JWaAOyVmwDslZwA7JWdAOyVngDslZ8A7JWgAOyVoQDslaIA7JWjAOyVpADslaUA7JWmAOyVpwDslagA7JWpAOyVqgDslasA7JWsAOyVrQDsla4A7JWvAOyVsADslbEA7JWyAOyVswDslbQA7JW1AOyVtgDslbcA7JW4AOyVuQDslboA7JW7AOyVvADslb0A7JW+AOyVvwDsloAA7JaBAOyWggDsloMA7JaEAOyWhQDsloYA7JaHAOyWiADslokA7JaKAOyWiwDslowA7JaNAOyWjgDslo8A7JaQAOyWkQDslpIA7JaTAOyWlADslpUA7JaWAOyWlwDslpgA7JaZAOyWmgDslpsA7JacAOyWnQDslp4A7JafAOyWoADslqEA7JaiAOyWowDslqQA7JalAOyWpgDslqcA7JaoAOyWqQDslqoA7JarAOyWrADslq0A7JauAOyWrwDslrAA7JaxAOyWsgDslrMA7Ja0AOyWtQDslrYA7Ja3AOyWuADslrkA7Ja6AOyWuwDslrwA7Ja9AOyWvgDslr8A7JeAAOyXgQDsl4IA7JeDAOyXhADsl4UA7JeGAOyXhwDsl4gA7JeJAOyXigDsl4sA7JeMAOyXjQDsl44A7JePAOyXkADsl5EA7JeSAOyXkwDsl5QA7JeVAOyXlgDsl5cA7JeYAOyXmQDsl5oA7JebAOyXnADsl50A7JeeAOyXnwDsl6AA7JehAOyXogDsl6MA7JekAOyXpQDsl6YA7JenAOyXqADsl6kA7JeqAOyXqwDsl6wA7JetAOyXrgDsl68A7JewAOyXsQDsl7IA7JezAOyXtADsl7UA7Je2AOyXtwDsl7gA7Je5AOyXugDsl7sA7Je8AOyXvQDsl74A7Je/AOyYgADsmIEA7JiCAOyYgwDsmIQA7JiFAOyYhgDsmIcA7JiIAOyYiQDsmIoA7JiLAOyYjADsmI0A7JiOAOyYjwDsmJAA7JiRAOyYkgDsmJMA7JiUAOyYlQDsmJYA7JiXAOyYmADsmJkA7JiaAOyYmwDsmJwA7JidAOyYngDsmJ8A7JigAOyYoQDsmKIA7JijAOyYpADsmKUA7JimAOyYpwDsmKgA7JipAOyYqgDsmKsA7JisAOyYrQDsmK4A7JivAOyYsADsmLEA7JiyAOyYswDsmLQA7Ji1AOyYtgDsmLcA7Ji4AOyYuQDsmLoA7Ji7AOyYvADsmL0A7Ji+AOyYvwDsmYAA7JmBAOyZggDsmYMA7JmEAOyZhQDsmYYA7JmHAOyZiADsmYkA7JmKAOyZiwDsmYwA7JmNAOyZjgDsmY8A7JmQAOyZkQDsmZIA7JmTAOyZlADsmZUA7JmWAOyZlwDsmZgA7JmZAOyZmgDsmZsA7JmcAOyZnQDsmZ4A7JmfAOyZoADsmaEA7JmiAOyZowDsmaQA7JmlAOyZpgDsmacA7JmoAOyZqQDsmaoA7JmrAOyZrADsma0A7JmuAOyZrwDsmbAA7JmxAOyZsgDsmbMA7Jm0AOyZtQDsmbYA7Jm3AOyZuADsmbkA7Jm6AOyZuwDsmbwA7Jm9AOyZvgDsmb8A7JqAAOyagQDsmoIA7JqDAOyahADsmoUA7JqGAOyahwDsmogA7JqJAOyaigDsmosA7JqMAOyajQDsmo4A7JqPAOyakADsmpEA7JqSAOyakwDsmpQA7JqVAOyalgDsmpcA7JqYAOyamQDsmpoA7JqbAOyanADsmp0A7JqeAOyanwDsmqAA7JqhAOyaogDsmqMA7JqkAOyapQDsmqYA7JqnAOyaqADsmqkA7JqqAOyaqwDsmqwA7JqtAOyargDsmq8A7JqwAOyasQDsmrIA7JqzAOyatADsmrUA7Jq2AOyatwDsmrgA7Jq5AOyaugDsmrsA7Jq8AOyavQDsmr4A7Jq/AOybgADsm4EA7JuCAOybgwDsm4QA7JuFAOybhgDsm4cA7JuIAOybiQDsm4oA7JuLAOybjADsm40A7JuOAOybjwDsm5AA7JuRAOybkgDsm5MA7JuUAOyblQDsm5YA7JuXAOybmADsm5kA7JuaAOybmwDsm5wA7JudAOybngDsm58A7JugAOyboQDsm6IA7JujAOybpADsm6UA7JumAOybpwDsm6gA7JupAOybqgDsm6sA7JusAOybrQDsm64A7JuvAOybsADsm7EA7JuyAOybswDsm7QA7Ju1AOybtgDsm7cA7Ju4AOybuQDsm7oA7Ju7AOybvADsm70A7Ju+AOybvwDsnIAA7JyBAOycggDsnIMA7JyEAOychQDsnIYA7JyHAOyciADsnIkA7JyKAOyciwDsnIwA7JyNAOycjgDsnI8A7JyQAOyckQDsnJIA7JyTAOyclADsnJUA7JyWAOyclwDsnJgA7JyZAOycmgDsnJsA7JycAOycnQDsnJ4A7JyfAOycoADsnKEA7JyiAOycowDsnKQA7JylAOycpgDsnKcA7JyoAOycqQDsnKoA7JyrAOycrADsnK0A7JyuAOycrwDsnLAA7JyxAOycsgDsnLMA7Jy0AOyctQDsnLYA7Jy3AOycuADsnLkA7Jy6AOycuwDsnLwA7Jy9AOycvgDsnL8A7J2AAOydgQDsnYIA7J2DAOydhADsnYUA7J2GAOydhwDsnYgA7J2JAOydigDsnYsA7J2MAOydjQDsnY4A7J2PAOydkADsnZEA7J2SAOydkwDsnZQA7J2VAOydlgDsnZcA7J2YAOydmQDsnZoA7J2bAOydnADsnZ0A7J2eAOydnwDsnaAA7J2hAOydogDsnaMA7J2kAOydpQDsnaYA7J2nAOydqADsnakA7J2qAOydqwDsnawA7J2tAOydrgDsna8A7J2wAOydsQDsnbIA7J2zAOydtADsnbUA7J22AOydtwDsnbgA7J25AOydugDsnbsA7J28AOydvQDsnb4A7J2/AOyegADsnoEA7J6CAOyegwDsnoQA7J6FAOyehgDsnocA7J6IAOyeiQDsnooA7J6LAOyejADsno0A7J6OAOyejwDsnpAA7J6RAOyekgDsnpMA7J6UAOyelQDsnpYA7J6XAOyemADsnpkA7J6aAOyemwDsnpwA7J6dAOyengDsnp8A7J6gAOyeoQDsnqIA7J6jAOyepADsnqUA7J6mAOyepwDsnqgA7J6pAOyeqgDsnqsA7J6sAOyerQDsnq4A7J6vAOyesADsnrEA7J6yAOyeswDsnrQA7J61AOyetgDsnrcA7J64AOyeuQDsnroA7J67AOyevADsnr0A7J6+AOyevwDsn4AA7J+BAOyfggDsn4MA7J+EAOyfhQDsn4YA7J+HAOyfiADsn4kA7J+KAOyfiwDsn4wA7J+NAOyfjgDsn48A7J+QAOyfkQDsn5IA7J+TAOyflADsn5UA7J+WAOyflwDsn5gA7J+ZAOyfmgDsn5sA7J+cAOyfnQDsn54A7J+fAOyfoADsn6EA7J+iAOyfowDsn6QA7J+lAOyfpgDsn6cA7J+oAOyfqQDsn6oA7J+rAOyfrADsn60A7J+uAOyfrwDsn7AA7J+xAOyfsgDsn7MA7J+0AOyftQDsn7YA7J+3AOyfuADsn7kA7J+6AOyfuwDsn7wA7J+9AOyfvgDsn78A7KCAAOyggQDsoIIA7KCDAOyghADsoIUA7KCGAOyghwDsoIgA7KCJAOygigDsoIsA7KCMAOygjQDsoI4A7KCPAOygkADsoJEA7KCSAOygkwDsoJQA7KCVAOyglgDsoJcA7KCYAOygmQDsoJoA7KCbAOygnADsoJ0A7KCeAOygnwDsoKAA7KChAOygogDsoKMA7KCkAOygpQDsoKYA7KCnAOygqADsoKkA7KCqAOygqwDsoKwA7KCtAOygrgDsoK8A7KCwAOygsQDsoLIA7KCzAOygtADsoLUA7KC2AOygtwDsoLgA7KC5AOygugDsoLsA7KC8AOygvQDsoL4A7KC/AOyhgADsoYEA7KGCAOyhgwDsoYQA7KGFAOyhhgDsoYcA7KGIAOyhiQDsoYoA7KGLAOyhjADsoY0A7KGOAOyhjwDsoZAA7KGRAOyhkgDsoZMA7KGUAOyhlQDsoZYA7KGXAOyhmADsoZkA7KGaAOyhmwDsoZwA7KGdAOyhngDsoZ8A7KGgAOyhoQDsoaIA7KGjAOyhpADsoaUA7KGmAOyhpwDsoagA7KGpAOyhqgDsoasA7KGsAOyhrQDsoa4A7KGvAOyhsADsobEA7KGyAOyhswDsobQA7KG1AOyhtgDsobcA7KG4AOyhuQDsoboA7KG7AOyhvADsob0A7KG+AOyhvwDsooAA7KKBAOyiggDsooMA7KKEAOyihQDsooYA7KKHAOyiiADsookA7KKKAOyiiwDsoowA7KKNAOyijgDsoo8A7KKQAOyikQDsopIA7KKTAOyilADsopUA7KKWAOyilwDsopgA7KKZAOyimgDsopsA7KKcAOyinQDsop4A7KKfAOyioADsoqEA7KKiAOyiowDsoqQA7KKlAOyipgDsoqcA7KKoAOyiqQDsoqoA7KKrAOyirADsoq0A7KKuAOyirwDsorAA7KKxAOyisgDsorMA7KK0AOyitQDsorYA7KK3AOyiuADsorkA7KK6AOyiuwDsorwA7KK9AOyivgDsor8A7KOAAOyjgQDso4IA7KODAOyjhADso4UA7KOGAOyjhwDso4gA7KOJAOyjigDso4sA7KOMAOyjjQDso44A7KOPAOyjkADso5EA7KOSAOyjkwDso5QA7KOVAOyjlgDso5cA7KOYAOyjmQDso5oA7KObAOyjnADso50A7KOeAOyjnwDso6AA7KOhAOyjogDso6MA7KOkAOyjpQDso6YA7KOnAOyjqADso6kA7KOqAOyjqwDso6wA7KOtAOyjrgDso68A7KOwAOyjsQDso7IA7KOzAOyjtADso7UA7KO2AOyjtwDso7gA7KO5AOyjugDso7sA7KO8AOyjvOydmADso70A7KO+AOyjvwDspIAA7KSBAOykggDspIMA7KSEAOykhQDspIYA7KSHAOykiADspIkA7KSKAOykiwDspIwA7KSNAOykjgDspI8A7KSQAOykkQDspJIA7KSTAOyklADspJUA7KSWAOyklwDspJgA7KSZAOykmgDspJsA7KScAOyknQDspJ4A7KSfAOykoADspKEA7KSiAOykowDspKQA7KSlAOykpgDspKcA7KSoAOykqQDspKoA7KSrAOykrADspK0A7KSuAOykrwDspLAA7KSxAOyksgDspLMA7KS0AOyktQDspLYA7KS3AOykuADspLkA7KS6AOykuwDspLwA7KS9AOykvgDspL8A7KWAAOylgQDspYIA7KWDAOylhADspYUA7KWGAOylhwDspYgA7KWJAOyligDspYsA7KWMAOyljQDspY4A7KWPAOylkADspZEA7KWSAOylkwDspZQA7KWVAOyllgDspZcA7KWYAOylmQDspZoA7KWbAOylnADspZ0A7KWeAOylnwDspaAA7KWhAOylogDspaMA7KWkAOylpQDspaYA7KWnAOylqADspakA7KWqAOylqwDspawA7KWtAOylrgDspa8A7KWwAOylsQDspbIA7KWzAOyltADspbUA7KW2AOyltwDspbgA7KW5AOylugDspbsA7KW8AOylvQDspb4A7KW/AOymgADspoEA7KaCAOymgwDspoQA7KaFAOymhgDspocA7KaIAOymiQDspooA7KaLAOymjADspo0A7KaOAOymjwDsppAA7KaRAOymkgDsppMA7KaUAOymlQDsppYA7KaXAOymmADsppkA7KaaAOymmwDsppwA7KadAOymngDspp8A7KagAOymoQDspqIA7KajAOympADspqUA7KamAOympwDspqgA7KapAOymqgDspqsA7KasAOymrQDspq4A7KavAOymsADsprEA7KayAOymswDsprQA7Ka1AOymtgDsprcA7Ka4AOymuQDsproA7Ka7AOymvADspr0A7Ka+AOymvwDsp4AA7KeBAOynggDsp4MA7KeEAOynhQDsp4YA7KeHAOyniADsp4kA7KeKAOyniwDsp4wA7KeNAOynjgDsp48A7KeQAOynkQDsp5IA7KeTAOynlADsp5UA7KeWAOynlwDsp5gA7KeZAOynmgDsp5sA7KecAOynnQDsp54A7KefAOynoADsp6EA7KeiAOynowDsp6QA7KelAOynpgDsp6cA7KeoAOynqQDsp6oA7KerAOynrADsp60A7KeuAOynrwDsp7AA7KexAOynsgDsp7MA7Ke0AOyntQDsp7YA7Ke3AOynuADsp7kA7Ke6AOynuwDsp7wA7Ke9AOynvgDsp78A7KiAAOyogQDsqIIA7KiDAOyohADsqIUA7KiGAOyohwDsqIgA7KiJAOyoigDsqIsA7KiMAOyojQDsqI4A7KiPAOyokADsqJEA7KiSAOyokwDsqJQA7KiVAOyolgDsqJcA7KiYAOyomQDsqJoA7KibAOyonADsqJ0A7KieAOyonwDsqKAA7KihAOyoogDsqKMA7KikAOyopQDsqKYA7KinAOyoqADsqKkA7KiqAOyoqwDsqKwA7KitAOyorgDsqK8A7KiwAOyosQDsqLIA7KizAOyotADsqLUA7Ki2AOyotwDsqLgA7Ki5AOyougDsqLsA7Ki8AOyovQDsqL4A7Ki/AOypgADsqYEA7KmCAOypgwDsqYQA7KmFAOyphgDsqYcA7KmIAOypiQDsqYoA7KmLAOypjADsqY0A7KmOAOypjwDsqZAA7KmRAOypkgDsqZMA7KmUAOyplQDsqZYA7KmXAOypmADsqZkA7KmaAOypmwDsqZwA7KmdAOypngDsqZ8A7KmgAOypoQDsqaIA7KmjAOyppADsqaUA7KmmAOyppwDsqagA7KmpAOypqgDsqasA7KmsAOyprQDsqa4A7KmvAOypsADsqbEA7KmyAOypswDsqbQA7Km1AOyptgDsqbcA7Km4AOypuQDsqboA7Km7AOypvADsqb0A7Km+AOypvwDsqoAA7KqBAOyqggDsqoMA7KqEAOyqhQDsqoYA7KqHAOyqiADsqokA7KqKAOyqiwDsqowA7KqNAOyqjgDsqo8A7KqQAOyqkQDsqpIA7KqTAOyqlADsqpUA7KqWAOyqlwDsqpgA7KqZAOyqmgDsqpsA7KqcAOyqnQDsqp4A7KqfAOyqoADsqqEA7KqiAOyqowDsqqQA7KqlAOyqpgDsqqcA7KqoAOyqqQDsqqoA7KqrAOyqrADsqq0A7KquAOyqrwDsqrAA7KqxAOyqsgDsqrMA7Kq0AOyqtQDsqrYA7Kq3AOyquADsqrkA7Kq6AOyquwDsqrwA7Kq9AOyqvgDsqr8A7KuAAOyrgQDsq4IA7KuDAOyrhADsq4UA7KuGAOyrhwDsq4gA7KuJAOyrigDsq4sA7KuMAOyrjQDsq44A7KuPAOyrkADsq5EA7KuSAOyrkwDsq5QA7KuVAOyrlgDsq5cA7KuYAOyrmQDsq5oA7KubAOyrnADsq50A7KueAOyrnwDsq6AA7KuhAOyrogDsq6MA7KukAOyrpQDsq6YA7KunAOyrqADsq6kA7KuqAOyrqwDsq6wA7KutAOyrrgDsq68A7KuwAOyrsQDsq7IA7KuzAOyrtADsq7UA7Ku2AOyrtwDsq7gA7Ku5AOyrugDsq7sA7Ku8AOyrvQDsq74A7Ku/AOysgADsrIEA7KyCAOysgwDsrIQA7KyFAOyshgDsrIcA7KyIAOysiQDsrIoA7KyLAOysjADsrI0A7KyOAOysjwDsrJAA7KyRAOyskgDsrJMA7KyUAOyslQDsrJYA7KyXAOysmADsrJkA7KyaAOysmwDsrJwA7KydAOysngDsrJ8A7KygAOysoQDsrKIA7KyjAOyspADsrKUA7KymAOyspwDsrKgA7KypAOysqgDsrKsA7KysAOysrQDsrK4A7KyvAOyssADsrLEA7KyyAOysswDsrLQA7Ky1AOystgDsrLcA7Ky4AOysuQDsrLoA7Ky7AOysvADsrL0A7Ky+AOysvwDsrYAA7K2BAOytggDsrYMA7K2EAOythQDsrYYA7K2HAOytiADsrYkA7K2KAOytiwDsrYwA7K2NAOytjgDsrY8A7K2QAOytkQDsrZIA7K2TAOytlADsrZUA7K2WAOytlwDsrZgA7K2ZAOytmgDsrZsA7K2cAOytnQDsrZ4A7K2fAOytoADsraEA7K2iAOytowDsraQA7K2lAOytpgDsracA7K2oAOytqQDsraoA7K2rAOytrADsra0A7K2uAOytrwDsrbAA7K2xAOytsgDsrbMA7K20AOyttQDsrbYA7K23AOytuADsrbkA7K26AOytuwDsrbwA7K29AOytvgDsrb8A7K6AAOyugQDsroIA7K6DAOyuhADsroUA7K6GAOyuhwDsrogA7K6JAOyuigDsrosA7K6MAOyujQDsro4A7K6PAOyukADsrpEA7K6SAOyukwDsrpQA7K6VAOyulgDsrpcA7K6YAOyumQDsrpoA7K6bAOyunADsrp0A7K6eAOyunwDsrqAA7K6hAOyuogDsrqMA7K6kAOyupQDsrqYA7K6nAOyuqADsrqkA7K6qAOyuqwDsrqwA7K6tAOyurgDsrq8A7K6wAOyusQDsrrIA7K6zAOyutADsrrUA7K62AOyutwDsrrgA7K65AOyuugDsrrsA7K68AOyuvQDsrr4A7K6/AOyvgADsr4EA7K+CAOyvgwDsr4QA7K+FAOyvhgDsr4cA7K+IAOyviQDsr4oA7K+LAOyvjADsr40A7K+OAOyvjwDsr5AA7K+RAOyvkgDsr5MA7K+UAOyvlQDsr5YA7K+XAOyvmADsr5kA7K+aAOyvmwDsr5wA7K+dAOyvngDsr58A7K+gAOyvoQDsr6IA7K+jAOyvpADsr6UA7K+mAOyvpwDsr6gA7K+pAOyvqgDsr6sA7K+sAOyvrQDsr64A7K+vAOyvsADsr7EA7K+yAOyvswDsr7QA7K+1AOyvtgDsr7cA7K+4AOyvuQDsr7oA7K+7AOyvvADsr70A7K++AOyvvwDssIAA7LCBAOywggDssIMA7LCEAOywhQDssIYA7LCHAOywiADssIkA7LCKAOywiwDssIwA7LCNAOywjgDssI8A7LCQAOywkQDssJIA7LCTAOywlADssJUA7LCWAOywlwDssJgA7LCZAOywmgDssJsA7LCcAOywnQDssJ4A7LCfAOywoADssKEA7LCiAOywowDssKQA7LClAOywpgDssKcA7LCoAOywqQDssKoA7LCrAOywrADssK0A7LCuAOywrwDssLAA7LCxAOywsgDssLMA7LC0AOywtQDssLYA7LC3AOywuADssLjqs6AA7LC5AOywugDssLsA7LC8AOywvQDssL4A7LC/AOyxgADssYEA7LGCAOyxgwDssYQA7LGFAOyxhgDssYcA7LGIAOyxiQDssYoA7LGLAOyxjADssY0A7LGOAOyxjwDssZAA7LGRAOyxkgDssZMA7LGUAOyxlQDssZYA7LGXAOyxmADssZkA7LGaAOyxmwDssZwA7LGdAOyxngDssZ8A7LGgAOyxoQDssaIA7LGjAOyxpADssaUA7LGmAOyxpwDssagA7LGpAOyxqgDssasA7LGsAOyxrQDssa4A7LGvAOyxsADssbEA7LGyAOyxswDssbQA7LG1AOyxtgDssbcA7LG4AOyxuQDssboA7LG7AOyxvADssb0A7LG+AOyxvwDssoAA7LKBAOyyggDssoMA7LKEAOyyhQDssoYA7LKHAOyyiADssokA7LKKAOyyiwDssowA7LKNAOyyjgDsso8A7LKQAOyykQDsspIA7LKTAOyylADsspUA7LKWAOyylwDsspgA7LKZAOyymgDsspsA7LKcAOyynQDssp4A7LKfAOyyoADssqEA7LKiAOyyowDssqQA7LKlAOyypgDssqcA7LKoAOyyqQDssqoA7LKrAOyyrADssq0A7LKuAOyyrwDssrAA7LKxAOyysgDssrMA7LK0AOyytQDssrYA7LK3AOyyuADssrkA7LK6AOyyuwDssrwA7LK9AOyyvgDssr8A7LOAAOyzgQDss4IA7LODAOyzhADss4UA7LOGAOyzhwDss4gA7LOJAOyzigDss4sA7LOMAOyzjQDss44A7LOPAOyzkADss5EA7LOSAOyzkwDss5QA7LOVAOyzlgDss5cA7LOYAOyzmQDss5oA7LObAOyznADss50A7LOeAOyznwDss6AA7LOhAOyzogDss6MA7LOkAOyzpQDss6YA7LOnAOyzqADss6kA7LOqAOyzqwDss6wA7LOtAOyzrgDss68A7LOwAOyzsQDss7IA7LOzAOyztADss7UA7LO2AOyztwDss7gA7LO5AOyzugDss7sA7LO8AOyzvQDss74A7LO/AOy0gADstIEA7LSCAOy0gwDstIQA7LSFAOy0hgDstIcA7LSIAOy0iQDstIoA7LSLAOy0jADstI0A7LSOAOy0jwDstJAA7LSRAOy0kgDstJMA7LSUAOy0lQDstJYA7LSXAOy0mADstJkA7LSaAOy0mwDstJwA7LSdAOy0ngDstJ8A7LSgAOy0oQDstKIA7LSjAOy0pADstKUA7LSmAOy0pwDstKgA7LSpAOy0qgDstKsA7LSsAOy0rQDstK4A7LSvAOy0sADstLEA7LSyAOy0swDstLQA7LS1AOy0tgDstLcA7LS4AOy0uQDstLoA7LS7AOy0vADstL0A7LS+AOy0vwDstYAA7LWBAOy1ggDstYMA7LWEAOy1hQDstYYA7LWHAOy1iADstYkA7LWKAOy1iwDstYwA7LWNAOy1jgDstY8A7LWQAOy1kQDstZIA7LWTAOy1lADstZUA7LWWAOy1lwDstZgA7LWZAOy1mgDstZsA7LWcAOy1nQDstZ4A7LWfAOy1oADstaEA7LWiAOy1owDstaQA7LWlAOy1pgDstacA7LWoAOy1qQDstaoA7LWrAOy1rADsta0A7LWuAOy1rwDstbAA7LWxAOy1sgDstbMA7LW0AOy1tQDstbYA7LW3AOy1uADstbkA7LW6AOy1uwDstbwA7LW9AOy1vgDstb8A7LaAAOy2gQDstoIA7LaDAOy2hADstoUA7LaGAOy2hwDstogA7LaJAOy2igDstosA7LaMAOy2jQDsto4A7LaPAOy2kADstpEA7LaSAOy2kwDstpQA7LaVAOy2lgDstpcA7LaYAOy2mQDstpoA7LabAOy2nADstp0A7LaeAOy2nwDstqAA7LahAOy2ogDstqMA7LakAOy2pQDstqYA7LanAOy2qADstqkA7LaqAOy2qwDstqwA7LatAOy2rgDstq8A7LawAOy2sQDstrIA7LazAOy2tADstrUA7La2AOy2twDstrgA7La5AOy2ugDstrsA7La8AOy2vQDstr4A7La/AOy3gADst4EA7LeCAOy3gwDst4QA7LeFAOy3hgDst4cA7LeIAOy3iQDst4oA7LeLAOy3jADst40A7LeOAOy3jwDst5AA7LeRAOy3kgDst5MA7LeUAOy3lQDst5YA7LeXAOy3mADst5kA7LeaAOy3mwDst5wA7LedAOy3ngDst58A7LegAOy3oQDst6IA7LejAOy3pADst6UA7LemAOy3pwDst6gA7LepAOy3qgDst6sA7LesAOy3rQDst64A7LevAOy3sADst7EA7LeyAOy3swDst7QA7Le1AOy3tgDst7cA7Le4AOy3uQDst7oA7Le7AOy3vADst70A7Le+AOy3vwDsuIAA7LiBAOy4ggDsuIMA7LiEAOy4hQDsuIYA7LiHAOy4iADsuIkA7LiKAOy4iwDsuIwA7LiNAOy4jgDsuI8A7LiQAOy4kQDsuJIA7LiTAOy4lADsuJUA7LiWAOy4lwDsuJgA7LiZAOy4mgDsuJsA7LicAOy4nQDsuJ4A7LifAOy4oADsuKEA7LiiAOy4owDsuKQA7LilAOy4pgDsuKcA7LioAOy4qQDsuKoA7LirAOy4rADsuK0A7LiuAOy4rwDsuLAA7LixAOy4sgDsuLMA7Li0AOy4tQDsuLYA7Li3AOy4uADsuLkA7Li6AOy4uwDsuLwA7Li9AOy4vgDsuL8A7LmAAOy5gQDsuYIA7LmDAOy5hADsuYUA7LmGAOy5hwDsuYgA7LmJAOy5igDsuYsA7LmMAOy5jQDsuY4A7LmPAOy5kADsuZEA7LmSAOy5kwDsuZQA7LmVAOy5lgDsuZcA7LmYAOy5mQDsuZoA7LmbAOy5nADsuZ0A7LmeAOy5nwDsuaAA7LmhAOy5ogDsuaMA7LmkAOy5pQDsuaYA7LmnAOy5qADsuakA7LmqAOy5qwDsuawA7LmtAOy5rgDsua8A7LmwAOy5sQDsubIA7LmzAOy5tADsubUA7Lm2AOy5twDsubgA7Lm5AOy5ugDsubsA7Lm8AOy5vQDsub4A7Lm/AOy6gADsuoEA7LqCAOy6gwDsuoQA7LqFAOy6hgDsuocA7LqIAOy6iQDsuooA7LqLAOy6jADsuo0A7LqOAOy6jwDsupAA7LqRAOy6kgDsupMA7LqUAOy6lQDsupYA7LqXAOy6mADsupkA7LqaAOy6mwDsupwA7LqdAOy6ngDsup8A7LqgAOy6oQDsuqIA7LqjAOy6pADsuqUA7LqmAOy6pwDsuqgA7LqpAOy6qgDsuqsA7LqsAOy6rQDsuq4A7LqvAOy6sADsurEA7LqyAOy6swDsurQA7Lq1AOy6tgDsurcA7Lq4AOy6uQDsuroA7Lq7AOy6vADsur0A7Lq+AOy6vwDsu4AA7LuBAOy7ggDsu4MA7LuEAOy7hQDsu4YA7LuHAOy7iADsu4kA7LuKAOy7iwDsu4wA7LuNAOy7jgDsu48A7LuQAOy7kQDsu5IA7LuTAOy7lADsu5UA7LuWAOy7lwDsu5gA7LuZAOy7mgDsu5sA7LucAOy7nQDsu54A7LufAOy7oADsu6EA7LuiAOy7owDsu6QA7LulAOy7pgDsu6cA7LuoAOy7qQDsu6oA7LurAOy7rADsu60A7LuuAOy7rwDsu7AA7LuxAOy7sgDsu7MA7Lu0AOy7tQDsu7YA7Lu3AOy7uADsu7kA7Lu6AOy7uwDsu7wA7Lu9AOy7vgDsu78A7LyAAOy8gQDsvIIA7LyDAOy8hADsvIUA7LyGAOy8hwDsvIgA7LyJAOy8igDsvIsA7LyMAOy8jQDsvI4A7LyPAOy8kADsvJEA7LySAOy8kwDsvJQA7LyVAOy8lgDsvJcA7LyYAOy8mQDsvJoA7LybAOy8nADsvJ0A7LyeAOy8nwDsvKAA7LyhAOy8ogDsvKMA7LykAOy8pQDsvKYA7LynAOy8qADsvKkA7LyqAOy8qwDsvKwA7LytAOy8rgDsvK8A7LywAOy8sQDsvLIA7LyzAOy8tADsvLUA7Ly2AOy8twDsvLgA7Ly5AOy8ugDsvLsA7Ly8AOy8vQDsvL4A7Ly/AOy9gADsvYEA7L2CAOy9gwDsvYQA7L2FAOy9hgDsvYcA7L2IAOy9iQDsvYoA7L2LAOy9jADsvY0A7L2OAOy9jwDsvZAA7L2RAOy9kgDsvZMA7L2UAOy9lQDsvZYA7L2XAOy9mADsvZkA7L2aAOy9mwDsvZwA7L2dAOy9ngDsvZ8A7L2gAOy9oQDsvaIA7L2jAOy9pADsvaUA7L2mAOy9pwDsvagA7L2pAOy9qgDsvasA7L2sAOy9rQDsva4A7L2vAOy9sADsvbEA7L2yAOy9swDsvbQA7L21AOy9tgDsvbcA7L24AOy9uQDsvboA7L27AOy9vADsvb0A7L2+AOy9vwDsvoAA7L6BAOy+ggDsvoMA7L6EAOy+hQDsvoYA7L6HAOy+iADsvokA7L6KAOy+iwDsvowA7L6NAOy+jgDsvo8A7L6QAOy+kQDsvpIA7L6TAOy+lADsvpUA7L6WAOy+lwDsvpgA7L6ZAOy+mgDsvpsA7L6cAOy+nQDsvp4A7L6fAOy+oADsvqEA7L6iAOy+owDsvqQA7L6lAOy+pgDsvqcA7L6oAOy+qQDsvqoA7L6rAOy+rADsvq0A7L6uAOy+rwDsvrAA7L6xAOy+sgDsvrMA7L60AOy+tQDsvrYA7L63AOy+uADsvrkA7L66AOy+uwDsvrwA7L69AOy+vgDsvr8A7L+AAOy/gQDsv4IA7L+DAOy/hADsv4UA7L+GAOy/hwDsv4gA7L+JAOy/igDsv4sA7L+MAOy/jQDsv44A7L+PAOy/kADsv5EA7L+SAOy/kwDsv5QA7L+VAOy/lgDsv5cA7L+YAOy/mQDsv5oA7L+bAOy/nADsv50A7L+eAOy/nwDsv6AA7L+hAOy/ogDsv6MA7L+kAOy/pQDsv6YA7L+nAOy/qADsv6kA7L+qAOy/qwDsv6wA7L+tAOy/rgDsv68A7L+wAOy/sQDsv7IA7L+zAOy/tADsv7UA7L+2AOy/twDsv7gA7L+5AOy/ugDsv7sA7L+8AOy/vQDsv74A7L+/AO2AgADtgIEA7YCCAO2AgwDtgIQA7YCFAO2AhgDtgIcA7YCIAO2AiQDtgIoA7YCLAO2AjADtgI0A7YCOAO2AjwDtgJAA7YCRAO2AkgDtgJMA7YCUAO2AlQDtgJYA7YCXAO2AmADtgJkA7YCaAO2AmwDtgJwA7YCdAO2AngDtgJ8A7YCgAO2AoQDtgKIA7YCjAO2ApADtgKUA7YCmAO2ApwDtgKgA7YCpAO2AqgDtgKsA7YCsAO2ArQDtgK4A7YCvAO2AsADtgLEA7YCyAO2AswDtgLQA7YC1AO2AtgDtgLcA7YC4AO2AuQDtgLoA7YC7AO2AvADtgL0A7YC+AO2AvwDtgYAA7YGBAO2BggDtgYMA7YGEAO2BhQDtgYYA7YGHAO2BiADtgYkA7YGKAO2BiwDtgYwA7YGNAO2BjgDtgY8A7YGQAO2BkQDtgZIA7YGTAO2BlADtgZUA7YGWAO2BlwDtgZgA7YGZAO2BmgDtgZsA7YGcAO2BnQDtgZ4A7YGfAO2BoADtgaEA7YGiAO2BowDtgaQA7YGlAO2BpgDtgacA7YGoAO2BqQDtgaoA7YGrAO2BrADtga0A7YGuAO2BrwDtgbAA7YGxAO2BsgDtgbMA7YG0AO2BtQDtgbYA7YG3AO2BuADtgbkA7YG6AO2BuwDtgbwA7YG9AO2BvgDtgb8A7YKAAO2CgQDtgoIA7YKDAO2ChADtgoUA7YKGAO2ChwDtgogA7YKJAO2CigDtgosA7YKMAO2CjQDtgo4A7YKPAO2CkADtgpEA7YKSAO2CkwDtgpQA7YKVAO2ClgDtgpcA7YKYAO2CmQDtgpoA7YKbAO2CnADtgp0A7YKeAO2CnwDtgqAA7YKhAO2CogDtgqMA7YKkAO2CpQDtgqYA7YKnAO2CqADtgqkA7YKqAO2CqwDtgqwA7YKtAO2CrgDtgq8A7YKwAO2CsQDtgrIA7YKzAO2CtADtgrUA7YK2AO2CtwDtgrgA7YK5AO2CugDtgrsA7YK8AO2CvQDtgr4A7YK/AO2DgADtg4EA7YOCAO2DgwDtg4QA7YOFAO2DhgDtg4cA7YOIAO2DiQDtg4oA7YOLAO2DjADtg40A7YOOAO2DjwDtg5AA7YORAO2DkgDtg5MA7YOUAO2DlQDtg5YA7YOXAO2DmADtg5kA7YOaAO2DmwDtg5wA7YOdAO2DngDtg58A7YOgAO2DoQDtg6IA7YOjAO2DpADtg6UA7YOmAO2DpwDtg6gA7YOpAO2DqgDtg6sA7YOsAO2DrQDtg64A7YOvAO2DsADtg7EA7YOyAO2DswDtg7QA7YO1AO2DtgDtg7cA7YO4AO2DuQDtg7oA7YO7AO2DvADtg70A7YO+AO2DvwDthIAA7YSBAO2EggDthIMA7YSEAO2EhQDthIYA7YSHAO2EiADthIkA7YSKAO2EiwDthIwA7YSNAO2EjgDthI8A7YSQAO2EkQDthJIA7YSTAO2ElADthJUA7YSWAO2ElwDthJgA7YSZAO2EmgDthJsA7YScAO2EnQDthJ4A7YSfAO2EoADthKEA7YSiAO2EowDthKQA7YSlAO2EpgDthKcA7YSoAO2EqQDthKoA7YSrAO2ErADthK0A7YSuAO2ErwDthLAA7YSxAO2EsgDthLMA7YS0AO2EtQDthLYA7YS3AO2EuADthLkA7YS6AO2EuwDthLwA7YS9AO2EvgDthL8A7YWAAO2FgQDthYIA7YWDAO2FhADthYUA7YWGAO2FhwDthYgA7YWJAO2FigDthYsA7YWMAO2FjQDthY4A7YWPAO2FkADthZEA7YWSAO2FkwDthZQA7YWVAO2FlgDthZcA7YWYAO2FmQDthZoA7YWbAO2FnADthZ0A7YWeAO2FnwDthaAA7YWhAO2FogDthaMA7YWkAO2FpQDthaYA7YWnAO2FqADthakA7YWqAO2FqwDthawA7YWtAO2FrgDtha8A7YWwAO2FsQDthbIA7YWzAO2FtADthbUA7YW2AO2FtwDthbgA7YW5AO2FugDthbsA7YW8AO2FvQDthb4A7YW/AO2GgADthoEA7YaCAO2GgwDthoQA7YaFAO2GhgDthocA7YaIAO2GiQDthooA7YaLAO2GjADtho0A7YaOAO2GjwDthpAA7YaRAO2GkgDthpMA7YaUAO2GlQDthpYA7YaXAO2GmADthpkA7YaaAO2GmwDthpwA7YadAO2GngDthp8A7YagAO2GoQDthqIA7YajAO2GpADthqUA7YamAO2GpwDthqgA7YapAO2GqgDthqsA7YasAO2GrQDthq4A7YavAO2GsADthrEA7YayAO2GswDthrQA7Ya1AO2GtgDthrcA7Ya4AO2GuQDthroA7Ya7AO2GvADthr0A7Ya+AO2GvwDth4AA7YeBAO2HggDth4MA7YeEAO2HhQDth4YA7YeHAO2HiADth4kA7YeKAO2HiwDth4wA7YeNAO2HjgDth48A7YeQAO2HkQDth5IA7YeTAO2HlADth5UA7YeWAO2HlwDth5gA7YeZAO2HmgDth5sA7YecAO2HnQDth54A7YefAO2HoADth6EA7YeiAO2HowDth6QA7YelAO2HpgDth6cA7YeoAO2HqQDth6oA7YerAO2HrADth60A7YeuAO2HrwDth7AA7YexAO2HsgDth7MA7Ye0AO2HtQDth7YA7Ye3AO2HuADth7kA7Ye6AO2HuwDth7wA7Ye9AO2HvgDth78A7YiAAO2IgQDtiIIA7YiDAO2IhADtiIUA7YiGAO2IhwDtiIgA7YiJAO2IigDtiIsA7YiMAO2IjQDtiI4A7YiPAO2IkADtiJEA7YiSAO2IkwDtiJQA7YiVAO2IlgDtiJcA7YiYAO2ImQDtiJoA7YibAO2InADtiJ0A7YieAO2InwDtiKAA7YihAO2IogDtiKMA7YikAO2IpQDtiKYA7YinAO2IqADtiKkA7YiqAO2IqwDtiKwA7YitAO2IrgDtiK8A7YiwAO2IsQDtiLIA7YizAO2ItADtiLUA7Yi2AO2ItwDtiLgA7Yi5AO2IugDtiLsA7Yi8AO2IvQDtiL4A7Yi/AO2JgADtiYEA7YmCAO2JgwDtiYQA7YmFAO2JhgDtiYcA7YmIAO2JiQDtiYoA7YmLAO2JjADtiY0A7YmOAO2JjwDtiZAA7YmRAO2JkgDtiZMA7YmUAO2JlQDtiZYA7YmXAO2JmADtiZkA7YmaAO2JmwDtiZwA7YmdAO2JngDtiZ8A7YmgAO2JoQDtiaIA7YmjAO2JpADtiaUA7YmmAO2JpwDtiagA7YmpAO2JqgDtiasA7YmsAO2JrQDtia4A7YmvAO2JsADtibEA7YmyAO2JswDtibQA7Ym1AO2JtgDtibcA7Ym4AO2JuQDtiboA7Ym7AO2JvADtib0A7Ym+AO2JvwDtioAA7YqBAO2KggDtioMA7YqEAO2KhQDtioYA7YqHAO2KiADtiokA7YqKAO2KiwDtiowA7YqNAO2KjgDtio8A7YqQAO2KkQDtipIA7YqTAO2KlADtipUA7YqWAO2KlwDtipgA7YqZAO2KmgDtipsA7YqcAO2KnQDtip4A7YqfAO2KoADtiqEA7YqiAO2KowDtiqQA7YqlAO2KpgDtiqcA7YqoAO2KqQDtiqoA7YqrAO2KrADtiq0A7YquAO2KrwDtirAA7YqxAO2KsgDtirMA7Yq0AO2KtQDtirYA7Yq3AO2KuADtirkA7Yq6AO2KuwDtirwA7Yq9AO2KvgDtir8A7YuAAO2LgQDti4IA7YuDAO2LhADti4UA7YuGAO2LhwDti4gA7YuJAO2LigDti4sA7YuMAO2LjQDti44A7YuPAO2LkADti5EA7YuSAO2LkwDti5QA7YuVAO2LlgDti5cA7YuYAO2LmQDti5oA7YubAO2LnADti50A7YueAO2LnwDti6AA7YuhAO2LogDti6MA7YukAO2LpQDti6YA7YunAO2LqADti6kA7YuqAO2LqwDti6wA7YutAO2LrgDti68A7YuwAO2LsQDti7IA7YuzAO2LtADti7UA7Yu2AO2LtwDti7gA7Yu5AO2LugDti7sA7Yu8AO2LvQDti74A7Yu/AO2MgADtjIEA7YyCAO2MgwDtjIQA7YyFAO2MhgDtjIcA7YyIAO2MiQDtjIoA7YyLAO2MjADtjI0A7YyOAO2MjwDtjJAA7YyRAO2MkgDtjJMA7YyUAO2MlQDtjJYA7YyXAO2MmADtjJkA7YyaAO2MmwDtjJwA7YydAO2MngDtjJ8A7YygAO2MoQDtjKIA7YyjAO2MpADtjKUA7YymAO2MpwDtjKgA7YypAO2MqgDtjKsA7YysAO2MrQDtjK4A7YyvAO2MsADtjLEA7YyyAO2MswDtjLQA7Yy1AO2MtgDtjLcA7Yy4AO2MuQDtjLoA7Yy7AO2MvADtjL0A7Yy+AO2MvwDtjYAA7Y2BAO2NggDtjYMA7Y2EAO2NhQDtjYYA7Y2HAO2NiADtjYkA7Y2KAO2NiwDtjYwA7Y2NAO2NjgDtjY8A7Y2QAO2NkQDtjZIA7Y2TAO2NlADtjZUA7Y2WAO2NlwDtjZgA7Y2ZAO2NmgDtjZsA7Y2cAO2NnQDtjZ4A7Y2fAO2NoADtjaEA7Y2iAO2NowDtjaQA7Y2lAO2NpgDtjacA7Y2oAO2NqQDtjaoA7Y2rAO2NrADtja0A7Y2uAO2NrwDtjbAA7Y2xAO2NsgDtjbMA7Y20AO2NtQDtjbYA7Y23AO2NuADtjbkA7Y26AO2NuwDtjbwA7Y29AO2NvgDtjb8A7Y6AAO2OgQDtjoIA7Y6DAO2OhADtjoUA7Y6GAO2OhwDtjogA7Y6JAO2OigDtjosA7Y6MAO2OjQDtjo4A7Y6PAO2OkADtjpEA7Y6SAO2OkwDtjpQA7Y6VAO2OlgDtjpcA7Y6YAO2OmQDtjpoA7Y6bAO2OnADtjp0A7Y6eAO2OnwDtjqAA7Y6hAO2OogDtjqMA7Y6kAO2OpQDtjqYA7Y6nAO2OqADtjqkA7Y6qAO2OqwDtjqwA7Y6tAO2OrgDtjq8A7Y6wAO2OsQDtjrIA7Y6zAO2OtADtjrUA7Y62AO2OtwDtjrgA7Y65AO2OugDtjrsA7Y68AO2OvQDtjr4A7Y6/AO2PgADtj4EA7Y+CAO2PgwDtj4QA7Y+FAO2PhgDtj4cA7Y+IAO2PiQDtj4oA7Y+LAO2PjADtj40A7Y+OAO2PjwDtj5AA7Y+RAO2PkgDtj5MA7Y+UAO2PlQDtj5YA7Y+XAO2PmADtj5kA7Y+aAO2PmwDtj5wA7Y+dAO2PngDtj58A7Y+gAO2PoQDtj6IA7Y+jAO2PpADtj6UA7Y+mAO2PpwDtj6gA7Y+pAO2PqgDtj6sA7Y+sAO2PrQDtj64A7Y+vAO2PsADtj7EA7Y+yAO2PswDtj7QA7Y+1AO2PtgDtj7cA7Y+4AO2PuQDtj7oA7Y+7AO2PvADtj70A7Y++AO2PvwDtkIAA7ZCBAO2QggDtkIMA7ZCEAO2QhQDtkIYA7ZCHAO2QiADtkIkA7ZCKAO2QiwDtkIwA7ZCNAO2QjgDtkI8A7ZCQAO2QkQDtkJIA7ZCTAO2QlADtkJUA7ZCWAO2QlwDtkJgA7ZCZAO2QmgDtkJsA7ZCcAO2QnQDtkJ4A7ZCfAO2QoADtkKEA7ZCiAO2QowDtkKQA7ZClAO2QpgDtkKcA7ZCoAO2QqQDtkKoA7ZCrAO2QrADtkK0A7ZCuAO2QrwDtkLAA7ZCxAO2QsgDtkLMA7ZC0AO2QtQDtkLYA7ZC3AO2QuADtkLkA7ZC6AO2QuwDtkLwA7ZC9AO2QvgDtkL8A7ZGAAO2RgQDtkYIA7ZGDAO2RhADtkYUA7ZGGAO2RhwDtkYgA7ZGJAO2RigDtkYsA7ZGMAO2RjQDtkY4A7ZGPAO2RkADtkZEA7ZGSAO2RkwDtkZQA7ZGVAO2RlgDtkZcA7ZGYAO2RmQDtkZoA7ZGbAO2RnADtkZ0A7ZGeAO2RnwDtkaAA7ZGhAO2RogDtkaMA7ZGkAO2RpQDtkaYA7ZGnAO2RqADtkakA7ZGqAO2RqwDtkawA7ZGtAO2RrgDtka8A7ZGwAO2RsQDtkbIA7ZGzAO2RtADtkbUA7ZG2AO2RtwDtkbgA7ZG5AO2RugDtkbsA7ZG8AO2RvQDtkb4A7ZG/AO2SgADtkoEA7ZKCAO2SgwDtkoQA7ZKFAO2ShgDtkocA7ZKIAO2SiQDtkooA7ZKLAO2SjADtko0A7ZKOAO2SjwDtkpAA7ZKRAO2SkgDtkpMA7ZKUAO2SlQDtkpYA7ZKXAO2SmADtkpkA7ZKaAO2SmwDtkpwA7ZKdAO2SngDtkp8A7ZKgAO2SoQDtkqIA7ZKjAO2SpADtkqUA7ZKmAO2SpwDtkqgA7ZKpAO2SqgDtkqsA7ZKsAO2SrQDtkq4A7ZKvAO2SsADtkrEA7ZKyAO2SswDtkrQA7ZK1AO2StgDtkrcA7ZK4AO2SuQDtkroA7ZK7AO2SvADtkr0A7ZK+AO2SvwDtk4AA7ZOBAO2TggDtk4MA7ZOEAO2ThQDtk4YA7ZOHAO2TiADtk4kA7ZOKAO2TiwDtk4wA7ZONAO2TjgDtk48A7ZOQAO2TkQDtk5IA7ZOTAO2TlADtk5UA7ZOWAO2TlwDtk5gA7ZOZAO2TmgDtk5sA7ZOcAO2TnQDtk54A7ZOfAO2ToADtk6EA7ZOiAO2TowDtk6QA7ZOlAO2TpgDtk6cA7ZOoAO2TqQDtk6oA7ZOrAO2TrADtk60A7ZOuAO2TrwDtk7AA7ZOxAO2TsgDtk7MA7ZO0AO2TtQDtk7YA7ZO3AO2TuADtk7kA7ZO6AO2TuwDtk7wA7ZO9AO2TvgDtk78A7ZSAAO2UgQDtlIIA7ZSDAO2UhADtlIUA7ZSGAO2UhwDtlIgA7ZSJAO2UigDtlIsA7ZSMAO2UjQDtlI4A7ZSPAO2UkADtlJEA7ZSSAO2UkwDtlJQA7ZSVAO2UlgDtlJcA7ZSYAO2UmQDtlJoA7ZSbAO2UnADtlJ0A7ZSeAO2UnwDtlKAA7ZShAO2UogDtlKMA7ZSkAO2UpQDtlKYA7ZSnAO2UqADtlKkA7ZSqAO2UqwDtlKwA7ZStAO2UrgDtlK8A7ZSwAO2UsQDtlLIA7ZSzAO2UtADtlLUA7ZS2AO2UtwDtlLgA7ZS5AO2UugDtlLsA7ZS8AO2UvQDtlL4A7ZS/AO2VgADtlYEA7ZWCAO2VgwDtlYQA7ZWFAO2VhgDtlYcA7ZWIAO2ViQDtlYoA7ZWLAO2VjADtlY0A7ZWOAO2VjwDtlZAA7ZWRAO2VkgDtlZMA7ZWUAO2VlQDtlZYA7ZWXAO2VmADtlZkA7ZWaAO2VmwDtlZwA7ZWdAO2VngDtlZ8A7ZWgAO2VoQDtlaIA7ZWjAO2VpADtlaUA7ZWmAO2VpwDtlagA7ZWpAO2VqgDtlasA7ZWsAO2VrQDtla4A7ZWvAO2VsADtlbEA7ZWyAO2VswDtlbQA7ZW1AO2VtgDtlbcA7ZW4AO2VuQDtlboA7ZW7AO2VvADtlb0A7ZW+AO2VvwDtloAA7ZaBAO2WggDtloMA7ZaEAO2WhQDtloYA7ZaHAO2WiADtlokA7ZaKAO2WiwDtlowA7ZaNAO2WjgDtlo8A7ZaQAO2WkQDtlpIA7ZaTAO2WlADtlpUA7ZaWAO2WlwDtlpgA7ZaZAO2WmgDtlpsA7ZacAO2WnQDtlp4A7ZafAO2WoADtlqEA7ZaiAO2WowDtlqQA7ZalAO2WpgDtlqcA7ZaoAO2WqQDtlqoA7ZarAO2WrADtlq0A7ZauAO2WrwDtlrAA7ZaxAO2WsgDtlrMA7Za0AO2WtQDtlrYA7Za3AO2WuADtlrkA7Za6AO2WuwDtlrwA7Za9AO2WvgDtlr8A7ZeAAO2XgQDtl4IA7ZeDAO2XhADtl4UA7ZeGAO2XhwDtl4gA7ZeJAO2XigDtl4sA7ZeMAO2XjQDtl44A7ZePAO2XkADtl5EA7ZeSAO2XkwDtl5QA7ZeVAO2XlgDtl5cA7ZeYAO2XmQDtl5oA7ZebAO2XnADtl50A7ZeeAO2XnwDtl6AA7ZehAO2XogDtl6MA7ZekAO2XpQDtl6YA7ZenAO2XqADtl6kA7ZeqAO2XqwDtl6wA7ZetAO2XrgDtl68A7ZewAO2XsQDtl7IA7ZezAO2XtADtl7UA7Ze2AO2XtwDtl7gA7Ze5AO2XugDtl7sA7Ze8AO2XvQDtl74A7Ze/AO2YgADtmIEA7ZiCAO2YgwDtmIQA7ZiFAO2YhgDtmIcA7ZiIAO2YiQDtmIoA7ZiLAO2YjADtmI0A7ZiOAO2YjwDtmJAA7ZiRAO2YkgDtmJMA7ZiUAO2YlQDtmJYA7ZiXAO2YmADtmJkA7ZiaAO2YmwDtmJwA7ZidAO2YngDtmJ8A7ZigAO2YoQDtmKIA7ZijAO2YpADtmKUA7ZimAO2YpwDtmKgA7ZipAO2YqgDtmKsA7ZisAO2YrQDtmK4A7ZivAO2YsADtmLEA7ZiyAO2YswDtmLQA7Zi1AO2YtgDtmLcA7Zi4AO2YuQDtmLoA7Zi7AO2YvADtmL0A7Zi+AO2YvwDtmYAA7ZmBAO2ZggDtmYMA7ZmEAO2ZhQDtmYYA7ZmHAO2ZiADtmYkA7ZmKAO2ZiwDtmYwA7ZmNAO2ZjgDtmY8A7ZmQAO2ZkQDtmZIA7ZmTAO2ZlADtmZUA7ZmWAO2ZlwDtmZgA7ZmZAO2ZmgDtmZsA7ZmcAO2ZnQDtmZ4A7ZmfAO2ZoADtmaEA7ZmiAO2ZowDtmaQA7ZmlAO2ZpgDtmacA7ZmoAO2ZqQDtmaoA7ZmrAO2ZrADtma0A7ZmuAO2ZrwDtmbAA7ZmxAO2ZsgDtmbMA7Zm0AO2ZtQDtmbYA7Zm3AO2ZuADtmbkA7Zm6AO2ZuwDtmbwA7Zm9AO2ZvgDtmb8A7ZqAAO2agQDtmoIA7ZqDAO2ahADtmoUA7ZqGAO2ahwDtmogA7ZqJAO2aigDtmosA7ZqMAO2ajQDtmo4A7ZqPAO2akADtmpEA7ZqSAO2akwDtmpQA7ZqVAO2algDtmpcA7ZqYAO2amQDtmpoA7ZqbAO2anADtmp0A7ZqeAO2anwDtmqAA7ZqhAO2aogDtmqMA7ZqkAO2apQDtmqYA7ZqnAO2aqADtmqkA7ZqqAO2aqwDtmqwA7ZqtAO2argDtmq8A7ZqwAO2asQDtmrIA7ZqzAO2atADtmrUA7Zq2AO2atwDtmrgA7Zq5AO2augDtmrsA7Zq8AO2avQDtmr4A7Zq/AO2bgADtm4EA7ZuCAO2bgwDtm4QA7ZuFAO2bhgDtm4cA7ZuIAO2biQDtm4oA7ZuLAO2bjADtm40A7ZuOAO2bjwDtm5AA7ZuRAO2bkgDtm5MA7ZuUAO2blQDtm5YA7ZuXAO2bmADtm5kA7ZuaAO2bmwDtm5wA7ZudAO2bngDtm58A7ZugAO2boQDtm6IA7ZujAO2bpADtm6UA7ZumAO2bpwDtm6gA7ZupAO2bqgDtm6sA7ZusAO2brQDtm64A7ZuvAO2bsADtm7EA7ZuyAO2bswDtm7QA7Zu1AO2btgDtm7cA7Zu4AO2buQDtm7oA7Zu7AO2bvADtm70A7Zu+AO2bvwDtnIAA7ZyBAO2cggDtnIMA7ZyEAO2chQDtnIYA7ZyHAO2ciADtnIkA7ZyKAO2ciwDtnIwA7ZyNAO2cjgDtnI8A7ZyQAO2ckQDtnJIA7ZyTAO2clADtnJUA7ZyWAO2clwDtnJgA7ZyZAO2cmgDtnJsA7ZycAO2cnQDtnJ4A7ZyfAO2coADtnKEA7ZyiAO2cowDtnKQA7ZylAO2cpgDtnKcA7ZyoAO2cqQDtnKoA7ZyrAO2crADtnK0A7ZyuAO2crwDtnLAA7ZyxAO2csgDtnLMA7Zy0AO2ctQDtnLYA7Zy3AO2cuADtnLkA7Zy6AO2cuwDtnLwA7Zy9AO2cvgDtnL8A7Z2AAO2dgQDtnYIA7Z2DAO2dhADtnYUA7Z2GAO2dhwDtnYgA7Z2JAO2digDtnYsA7Z2MAO2djQDtnY4A7Z2PAO2dkADtnZEA7Z2SAO2dkwDtnZQA7Z2VAO2dlgDtnZcA7Z2YAO2dmQDtnZoA7Z2bAO2dnADtnZ0A7Z2eAO2dnwDtnaAA7Z2hAO2dogDtnaMA7Z2kAO2dpQDtnaYA7Z2nAO2dqADtnakA7Z2qAO2dqwDtnawA7Z2tAO2drgDtna8A7Z2wAO2dsQDtnbIA7Z2zAO2dtADtnbUA7Z22AO2dtwDtnbgA7Z25AO2dugDtnbsA7Z28AO2dvQDtnb4A7Z2/AO2egADtnoEA7Z6CAO2egwDtnoQA7Z6FAO2ehgDtnocA7Z6IAO2eiQDtnooA7Z6LAO2ejADtno0A7Z6OAO2ejwDtnpAA7Z6RAO2ekgDtnpMA7Z6UAO2elQDtnpYA7Z6XAO2emADtnpkA7Z6aAO2emwDtnpwA7Z6dAO2engDtnp8A7Z6gAO2eoQDtnqIA7Z6jAPCRgpoA8JGCnADwkYKrAPCRhK4A8JGErwDwkY2LAPCRjYwA8JGSuwDwkZK8APCRkr4A8JGWugDwkZa7APCdhZfwnYWlAPCdhZjwnYWlAPCdhZjwnYWl8J2FrgDwnYWY8J2FpfCdha8A8J2FmPCdhaXwnYWwAPCdhZjwnYWl8J2FsQDwnYWY8J2FpfCdhbIA8J2GufCdhaUA8J2GufCdhaXwnYWuAPCdhrnwnYWl8J2FrwDwnYa68J2FpQDwnYa68J2FpfCdha4A8J2GuvCdhaXwnYWvAPCghKIA8KCUnADwoJSlAPCglYsA8KCYugDwoKCEAPCgo54A8KCorADwoK2jAPChk6QA8KGaqADwoZuqAPChp4gA8KGsmADwobSLAPCht6QA8KG3pgDwooaDAPCihp8A8KKMsQDwopuUAPCioYQA8KKhigDwoqyMAPCir7EA8KOAigDwo4q4APCjjZ8A8KOOkwDwo46cAPCjj4MA8KOPlQDwo5GtAPCjmqMA8KOipwDwo6qNAPCjq7oA8KOyvADwo7SeAPCju5EA8KO9ngDwo76OAPCkiaMA8KSLrgDwpI6rAPCkmIgA8KSctQDwpKCUAPCksLYA8KSykgDwpL6hAPCkvrgA8KWBhADwpYOyAPClg7MA8KWEmQDwpYSzAPCliYkA8KWQnQDwpZimAPClmpoA8KWbhQDwpaW8APClqqcA8KWuqwDwpbKAAPCls5AA8KW+hgDwpoeaAPCmiKgA8KaJhwDwpouZAPCmjL4A8KaTmgDwppSjAPCmlqgA8KaepwDwpp61APCmrLwA8KawtgDwprOVAPCmtasA8Ka8rADwpr6xAPCng5IA8KePigDwp5mnAPCnoq4A8KelpgDwp7KoAPCnu5MA8Ke8rwDwqJeSAPCol60A8KicrgDwqK+6APCotbcA8KmFhQDwqYefAPCpiJoA8KmQigDwqZKWAPCplrYA8KmssADwqoOOAPCqhIUA8KqIjgDwqoqRAPCqjpIA8KqYgAA=" - }, - { - "type": "Strip", - "strip_left": false, - "strip_right": true - }, - { - "type": "Replace", - "pattern": { - "Regex": " {2,}" - }, - "content": "▁" - } - ] - }, - "pre_tokenizer": { - "type": "Metaspace", - "replacement": "▁", - "prepend_scheme": "always", - "split": true - }, - "post_processor": { - "type": "TemplateProcessing", - "single": [ - { - "Sequence": { - "id": "A", - "type_id": 0 - } - }, - { - "SpecialToken": { - "id": "", - "type_id": 0 - } - } - ], - "pair": [ - { - "Sequence": { - "id": "A", - "type_id": 0 - } - }, - { - "SpecialToken": { - "id": "", - "type_id": 0 - } - }, - { - "Sequence": { - "id": "B", - "type_id": 0 - } - }, - { - "SpecialToken": { - "id": "", - "type_id": 0 - } - } - ], - "special_tokens": { - "": { - "id": "", - "ids": [ - 1 - ], - "tokens": [ - "" - ] - } - } - }, - "decoder": { - "type": "Metaspace", - "replacement": "▁", - "prepend_scheme": "always", - "split": true - }, - "model": { - "type": "Unigram", - "unk_id": 2, - "vocab": [ - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "▁", - -2.0122928619384766 - ], - [ - "X", - -2.486478805541992 - ], - [ - ".", - -3.5449328422546387 - ], - [ - ",", - -3.649247407913208 - ], - [ - "s", - -3.9033992290496826 - ], - [ - "▁the", - -3.9598512649536133 - ], - [ - "a", - -4.097104549407959 - ], - [ - ":", - -4.414328098297119 - ], - [ - "▁and", - -4.420670986175537 - ], - [ - "▁to", - -4.4523234367370605 - ], - [ - "▁of", - -4.572070121765137 - ], - [ - "▁fill", - -4.575019836425781 - ], - [ - "e", - -4.674920082092285 - ], - [ - "▁in", - -4.812063694000244 - ], - [ - "t", - -5.063905715942383 - ], - [ - "-", - -5.129043102264404 - ], - [ - "▁is", - -5.283425331115723 - ], - [ - "▁de", - -5.344141960144043 - ], - [ - "▁for", - -5.3930158615112305 - ], - [ - "’", - -5.4228339195251465 - ], - [ - "i", - -5.469857692718506 - ], - [ - "▁that", - -5.576240539550781 - ], - [ - "▁you", - -5.596375465393066 - ], - [ - "d", - -5.6047282218933105 - ], - [ - "▁I", - -5.6640448570251465 - ], - [ - "▁with", - -5.703730583190918 - ], - [ - "n", - -5.737886905670166 - ], - [ - "▁on", - -5.784142971038818 - ], - [ - "'", - -5.828996181488037 - ], - [ - "o", - -5.925558090209961 - ], - [ - "▁are", - -5.931313991546631 - ], - [ - "▁it", - -5.939518928527832 - ], - [ - "en", - -5.9465556144714355 - ], - [ - "▁be", - -5.9556708335876465 - ], - [ - "▁The", - -5.990020751953125 - ], - [ - "▁as", - -6.057407379150391 - ], - [ - "▁your", - -6.132311820983887 - ], - [ - "l", - -6.139498710632324 - ], - [ - "▁(", - -6.184796333312988 - ], - [ - "▁or", - -6.241950035095215 - ], - [ - "▁have", - -6.27459192276001 - ], - [ - "▁at", - -6.327472686767578 - ], - [ - "▁from", - -6.349645137786865 - ], - [ - "▁an", - -6.350090980529785 - ], - [ - "▁was", - -6.350385665893555 - ], - [ - "▁this", - -6.352563381195068 - ], - [ - "er", - -6.3604278564453125 - ], - [ - "▁la", - -6.3624043464660645 - ], - [ - "m", - -6.375206470489502 - ], - [ - "r", - -6.376530170440674 - ], - [ - "ing", - -6.3778581619262695 - ], - [ - "▁can", - -6.387146472930908 - ], - [ - "!", - -6.421379566192627 - ], - [ - "▁will", - -6.423982620239258 - ], - [ - "▁by", - -6.44155216217041 - ], - [ - "?", - -6.585887432098389 - ], - [ - "▁not", - -6.5959086418151855 - ], - [ - "re", - -6.620072364807129 - ], - [ - ")", - -6.63656759262085 - ], - [ - "▁we", - -6.643022060394287 - ], - [ - "y", - -6.654535293579102 - ], - [ - "▁und", - -6.741473197937012 - ], - [ - "▁has", - -6.7602033615112305 - ], - [ - "▁all", - -6.768176555633545 - ], - [ - "▁die", - -6.8641204833984375 - ], - [ - "▁but", - -6.906830310821533 - ], - [ - "▁our", - -6.909878730773926 - ], - [ - "▁their", - -6.91325044631958 - ], - [ - "▁A", - -6.915814399719238 - ], - [ - "▁more", - -6.918668746948242 - ], - [ - "▁un", - -6.924930572509766 - ], - [ - "▁der", - -6.925402641296387 - ], - [ - "c", - -6.925714015960693 - ], - [ - "u", - -6.932939052581787 - ], - [ - "in", - -6.934063911437988 - ], - [ - "▁so", - -6.947050094604492 - ], - [ - "▁they", - -6.989297866821289 - ], - [ - "▁one", - -7.012735843658447 - ], - [ - "▁about", - -7.071486473083496 - ], - [ - "▁my", - -7.072140693664551 - ], - [ - "ul", - -7.076492786407471 - ], - [ - "▁which", - -7.097039222717285 - ], - [ - "à", - -7.099997520446777 - ], - [ - "▁In", - -7.100254535675049 - ], - [ - "/", - -7.100865840911865 - ], - [ - "he", - -7.104752540588379 - ], - [ - "f", - -7.110044002532959 - ], - [ - "▁le", - -7.112937927246094 - ], - [ - "▁out", - -7.128556728363037 - ], - [ - "▁also", - -7.133583068847656 - ], - [ - "▁des", - -7.156766414642334 - ], - [ - "▁It", - -7.162121295928955 - ], - [ - "▁up", - -7.1723432540893555 - ], - [ - "▁\"", - -7.172809600830078 - ], - [ - "▁time", - -7.178046703338623 - ], - [ - "ă", - -7.183253765106201 - ], - [ - "if", - -7.185171127319336 - ], - [ - "▁This", - -7.191652297973633 - ], - [ - "▁We", - -7.223267078399658 - ], - [ - "p", - -7.224130153656006 - ], - [ - "▁do", - -7.228212356567383 - ], - [ - "–", - -7.235409736633301 - ], - [ - "▁“", - -7.238142013549805 - ], - [ - "on", - -7.240827560424805 - ], - [ - "h", - -7.2543206214904785 - ], - [ - "▁si", - -7.276725769042969 - ], - [ - "le", - -7.2994256019592285 - ], - [ - "▁les", - -7.312957286834717 - ], - [ - "▁în", - -7.314571857452393 - ], - [ - "▁his", - -7.324767112731934 - ], - [ - "▁who", - -7.35105562210083 - ], - [ - "▁like", - -7.371364116668701 - ], - [ - "b", - -7.375369071960449 - ], - [ - "▁when", - -7.380199432373047 - ], - [ - ";", - -7.380846977233887 - ], - [ - "▁been", - -7.38668966293335 - ], - [ - "▁other", - -7.388518333435059 - ], - [ - "ly", - -7.394660949707031 - ], - [ - "\"", - -7.407205104827881 - ], - [ - "g", - -7.407997131347656 - ], - [ - "▁cu", - -7.415276527404785 - ], - [ - "▁care", - -7.432408332824707 - ], - [ - "▁what", - -7.433043003082275 - ], - [ - "▁new", - -7.4370903968811035 - ], - [ - "or", - -7.445409774780273 - ], - [ - "▁some", - -7.461953639984131 - ], - [ - "▁get", - -7.479001998901367 - ], - [ - "▁were", - -7.491549491882324 - ], - [ - "▁just", - -7.492495536804199 - ], - [ - "▁there", - -7.493194103240967 - ], - [ - "▁would", - -7.494382381439209 - ], - [ - "S", - -7.4974141120910645 - ], - [ - "▁them", - -7.513596057891846 - ], - [ - "▁any", - -7.520544052124023 - ], - [ - ").", - -7.521052360534668 - ], - [ - "al", - -7.523056983947754 - ], - [ - "▁into", - -7.527902603149414 - ], - [ - "▁me", - -7.528337001800537 - ], - [ - "▁had", - -7.532425403594971 - ], - [ - "▁se", - -7.5451483726501465 - ], - [ - "▁make", - -7.5827131271362305 - ], - [ - "at", - -7.589433670043945 - ], - [ - "▁than", - -7.592360019683838 - ], - [ - "▁du", - -7.595852375030518 - ], - [ - "▁over", - -7.6078782081604 - ], - [ - "▁You", - -7.626111030578613 - ], - [ - "▁how", - -7.635554313659668 - ], - [ - "▁no", - -7.63729190826416 - ], - [ - "▁people", - -7.639947414398193 - ], - [ - "an", - -7.64084005355835 - ], - [ - "”", - -7.644528865814209 - ], - [ - "é", - -7.646921157836914 - ], - [ - "it", - -7.648641109466553 - ], - [ - "▁If", - -7.648687839508057 - ], - [ - "k", - -7.6605634689331055 - ], - [ - "▁pe", - -7.662139415740967 - ], - [ - "is", - -7.66726016998291 - ], - [ - "▁her", - -7.6733808517456055 - ], - [ - "▁work", - -7.680386543273926 - ], - [ - "ve", - -7.687412738800049 - ], - [ - "▁only", - -7.69785737991333 - ], - [ - "▁may", - -7.702393531799316 - ], - [ - "▁its", - -7.702449798583984 - ], - [ - "▁first", - -7.704373836517334 - ], - [ - "▁most", - -7.708309173583984 - ], - [ - "▁well", - -7.708758354187012 - ], - [ - "▁use", - -7.715085983276367 - ], - [ - "▁zu", - -7.718777656555176 - ], - [ - "▁pour", - -7.736708164215088 - ], - [ - "z", - -7.745654106140137 - ], - [ - "il", - -7.745913982391357 - ], - [ - "▁need", - -7.74778938293457 - ], - [ - "▁these", - -7.763317584991455 - ], - [ - "▁din", - -7.769891262054443 - ], - [ - "▁den", - -7.775663375854492 - ], - [ - "▁us", - -7.778133869171143 - ], - [ - "able", - -7.779712200164795 - ], - [ - "▁S", - -7.781893730163574 - ], - [ - "▁mit", - -7.792516231536865 - ], - [ - "▁very", - -7.79970645904541 - ], - [ - "▁am", - -7.814100742340088 - ], - [ - "&", - -7.829529285430908 - ], - [ - "▁au", - -7.83012056350708 - ], - [ - "▁many", - -7.83834171295166 - ], - [ - "▁mai", - -7.84363317489624 - ], - [ - "A", - -7.849830150604248 - ], - [ - "th", - -7.855541229248047 - ], - [ - "▁through", - -7.859585285186768 - ], - [ - "▁pentru", - -7.86391544342041 - ], - [ - "▁two", - -7.873607158660889 - ], - [ - "▁von", - -7.874959945678711 - ], - [ - "▁way", - -7.887117385864258 - ], - [ - "ll", - -7.887749195098877 - ], - [ - "I", - -7.891303539276123 - ], - [ - "▁ce", - -7.9015631675720215 - ], - [ - "▁și", - -7.904444694519043 - ], - [ - "▁help", - -7.907405853271484 - ], - [ - "▁best", - -7.907911777496338 - ], - [ - "),", - -7.908212184906006 - ], - [ - "un", - -7.925017833709717 - ], - [ - "▁years", - -7.925964832305908 - ], - [ - "▁2", - -7.9282684326171875 - ], - [ - "▁C", - -7.936962604522705 - ], - [ - "▁nu", - -7.939520835876465 - ], - [ - "▁good", - -7.943995952606201 - ], - [ - "v", - -7.94746732711792 - ], - [ - "▁1", - -7.94765567779541 - ], - [ - "w", - -7.947978496551514 - ], - [ - "▁das", - -7.960538864135742 - ], - [ - "▁ca", - -7.962430477142334 - ], - [ - "▁where", - -7.964908123016357 - ], - [ - "▁know", - -7.96622896194458 - ], - [ - "▁year", - -7.971063613891602 - ], - [ - "▁He", - -7.974609375 - ], - [ - "▁see", - -7.980011463165283 - ], - [ - "▁für", - -7.984004497528076 - ], - [ - "▁auf", - -7.984249114990234 - ], - [ - "▁3", - -7.984433650970459 - ], - [ - "de", - -7.985401153564453 - ], - [ - "est", - -8.002091407775879 - ], - [ - "▁back", - -8.007022857666016 - ], - [ - "▁such", - -8.008523941040039 - ], - [ - "▁should", - -8.011754989624023 - ], - [ - "x", - -8.015050888061523 - ], - [ - "▁after", - -8.01761245727539 - ], - [ - "▁could", - -8.019674301147461 - ], - [ - "▁ist", - -8.020784378051758 - ], - [ - "▁now", - -8.022845268249512 - ], - [ - "▁much", - -8.023111343383789 - ], - [ - "and", - -8.02390193939209 - ], - [ - "...", - -8.030110359191895 - ], - [ - "▁home", - -8.036273956298828 - ], - [ - "to", - -8.03821086883545 - ], - [ - "▁ein", - -8.04833984375 - ], - [ - "▁even", - -8.048656463623047 - ], - [ - "▁que", - -8.049829483032227 - ], - [ - "▁day", - -8.051553726196289 - ], - [ - "▁take", - -8.054189682006836 - ], - [ - "▁want", - -8.054435729980469 - ], - [ - "▁For", - -8.06217098236084 - ], - [ - "▁said", - -8.063249588012695 - ], - [ - "▁sur", - -8.073471069335938 - ], - [ - "▁une", - -8.077030181884766 - ], - [ - "▁să", - -8.082921028137207 - ], - [ - "▁dans", - -8.084549903869629 - ], - [ - "▁great", - -8.088057518005371 - ], - [ - "▁este", - -8.08947467803955 - ], - [ - "▁because", - -8.094311714172363 - ], - [ - "▁information", - -8.104085922241211 - ], - [ - "ului", - -8.105451583862305 - ], - [ - "▁find", - -8.112174987792969 - ], - [ - "C", - -8.119946479797363 - ], - [ - "▁she", - -8.125317573547363 - ], - [ - "▁im", - -8.126056671142578 - ], - [ - "ation", - -8.130115509033203 - ], - [ - "▁then", - -8.13021469116211 - ], - [ - "▁est", - -8.13099479675293 - ], - [ - "▁par", - -8.138585090637207 - ], - [ - "▁used", - -8.141871452331543 - ], - [ - "▁E", - -8.146790504455566 - ], - [ - "▁made", - -8.149978637695312 - ], - [ - "▁So", - -8.15785026550293 - ], - [ - "am", - -8.16288948059082 - ], - [ - "▁eine", - -8.165464401245117 - ], - [ - "▁şi", - -8.168368339538574 - ], - [ - "▁business", - -8.17335033416748 - ], - [ - "▁right", - -8.173593521118164 - ], - [ - "▁here", - -8.176125526428223 - ], - [ - "▁being", - -8.184967041015625 - ], - [ - "▁B", - -8.185355186462402 - ], - [ - "▁those", - -8.185736656188965 - ], - [ - "▁before", - -8.194721221923828 - ], - [ - "▁And", - -8.199501037597656 - ], - [ - "▁P", - -8.200712203979492 - ], - [ - "ers", - -8.200922012329102 - ], - [ - "▁don", - -8.204029083251953 - ], - [ - "B", - -8.20487117767334 - ], - [ - "▁life", - -8.206265449523926 - ], - [ - "▁go", - -8.209736824035645 - ], - [ - "▁As", - -8.210551261901855 - ], - [ - "▁M", - -8.221170425415039 - ], - [ - "▁each", - -8.22955322265625 - ], - [ - "▁qui", - -8.23323917388916 - ], - [ - "▁place", - -8.236248970031738 - ], - [ - "com", - -8.237479209899902 - ], - [ - "ant", - -8.252915382385254 - ], - [ - "▁sich", - -8.255932807922363 - ], - [ - "▁There", - -8.261948585510254 - ], - [ - "ar", - -8.264991760253906 - ], - [ - "▁Sie", - -8.273868560791016 - ], - [ - "▁own", - -8.277531623840332 - ], - [ - "▁part", - -8.279440879821777 - ], - [ - "ent", - -8.281047821044922 - ], - [ - "▁world", - -8.28173542022705 - ], - [ - "ment", - -8.282004356384277 - ], - [ - "▁while", - -8.294474601745605 - ], - [ - "▁But", - -8.295366287231445 - ], - [ - "▁around", - -8.300799369812012 - ], - [ - "▁L", - -8.301082611083984 - ], - [ - "us", - -8.304039001464844 - ], - [ - "▁plus", - -8.313054084777832 - ], - [ - "▁To", - -8.313691139221191 - ], - [ - "▁5", - -8.31412410736084 - ], - [ - "▁high", - -8.31862735748291 - ], - [ - "▁long", - -8.319378852844238 - ], - [ - "D", - -8.320075035095215 - ], - [ - "▁D", - -8.320279121398926 - ], - [ - "▁really", - -8.322924613952637 - ], - [ - "▁nicht", - -8.332040786743164 - ], - [ - "▁Le", - -8.335328102111816 - ], - [ - "▁service", - -8.3412504196167 - ], - [ - "▁4", - -8.342093467712402 - ], - [ - "▁different", - -8.342538833618164 - ], - [ - "▁Die", - -8.348092079162598 - ], - [ - "▁think", - -8.353771209716797 - ], - [ - "—", - -8.355998039245605 - ], - [ - "▁auch", - -8.357160568237305 - ], - [ - "▁look", - -8.362202644348145 - ], - [ - "▁both", - -8.366817474365234 - ], - [ - "lor", - -8.36687183380127 - ], - [ - "▁down", - -8.367999076843262 - ], - [ - "ten", - -8.368885040283203 - ], - [ - "▁La", - -8.378066062927246 - ], - [ - "▁off", - -8.380044937133789 - ], - [ - "▁vous", - -8.380541801452637 - ], - [ - "▁They", - -8.381462097167969 - ], - [ - "M", - -8.383248329162598 - ], - [ - "▁pas", - -8.384513854980469 - ], - [ - "▁data", - -8.385709762573242 - ], - [ - "▁T", - -8.386754989624023 - ], - [ - "▁love", - -8.388101577758789 - ], - [ - "▁every", - -8.390009880065918 - ], - [ - "▁10", - -8.391179084777832 - ], - [ - "▁last", - -8.392083168029785 - ], - [ - "▁same", - -8.393481254577637 - ], - [ - "▁using", - -8.395487785339355 - ], - [ - "▁free", - -8.408831596374512 - ], - [ - "▁dem", - -8.40894889831543 - ], - [ - "▁still", - -8.409984588623047 - ], - [ - "ate", - -8.410931587219238 - ], - [ - "ist", - -8.415611267089844 - ], - [ - "▁between", - -8.420283317565918 - ], - [ - "P", - -8.420982360839844 - ], - [ - "be", - -8.428167343139648 - ], - [ - "▁available", - -8.429443359375 - ], - [ - "man", - -8.432978630065918 - ], - [ - "▁company", - -8.439678192138672 - ], - [ - "▁G", - -8.441640853881836 - ], - [ - "▁experience", - -8.444950103759766 - ], - [ - "▁going", - -8.449073791503906 - ], - [ - "▁site", - -8.453832626342773 - ], - [ - "j", - -8.455142974853516 - ], - [ - "are", - -8.456900596618652 - ], - [ - "▁set", - -8.470661163330078 - ], - [ - "2", - -8.473684310913086 - ], - [ - "▁system", - -8.474678039550781 - ], - [ - "▁important", - -8.476791381835938 - ], - [ - "▁few", - -8.482437133789062 - ], - [ - "▁fi", - -8.482551574707031 - ], - [ - "ich", - -8.483301162719727 - ], - [ - "▁What", - -8.488649368286133 - ], - [ - "▁services", - -8.502433776855469 - ], - [ - "▁under", - -8.502569198608398 - ], - [ - "▁When", - -8.50308895111084 - ], - [ - "▁online", - -8.50699520111084 - ], - [ - "▁New", - -8.51494312286377 - ], - [ - "▁come", - -8.524871826171875 - ], - [ - "▁provide", - -8.525650024414062 - ], - [ - "F", - -8.526449203491211 - ], - [ - "▁team", - -8.52782154083252 - ], - [ - "▁always", - -8.529409408569336 - ], - [ - "▁De", - -8.530412673950195 - ], - [ - "▁că", - -8.532517433166504 - ], - [ - "▁him", - -8.53586196899414 - ], - [ - "▁F", - -8.538305282592773 - ], - [ - "▁things", - -8.550079345703125 - ], - [ - "▁including", - -8.550943374633789 - ], - [ - "▁support", - -8.552608489990234 - ], - [ - "▁number", - -8.554113388061523 - ], - [ - "T", - -8.557183265686035 - ], - [ - "▁during", - -8.55886459350586 - ], - [ - "▁family", - -8.560463905334473 - ], - [ - "▁little", - -8.561317443847656 - ], - [ - "▁three", - -8.567726135253906 - ], - [ - "▁water", - -8.56810188293457 - ], - [ - "▁man", - -8.569759368896484 - ], - [ - "▁An", - -8.57192611694336 - ], - [ - "based", - -8.572155952453613 - ], - [ - "▁R", - -8.57442855834961 - ], - [ - "▁sau", - -8.574433326721191 - ], - [ - "▁avec", - -8.576035499572754 - ], - [ - "▁better", - -8.576830863952637 - ], - [ - "▁„", - -8.582253456115723 - ], - [ - "▁too", - -8.58635425567627 - ], - [ - "ge", - -8.586719512939453 - ], - [ - "▁must", - -8.589736938476562 - ], - [ - "▁per", - -8.589916229248047 - ], - [ - "ele", - -8.590399742126465 - ], - [ - "▁oder", - -8.59264850616455 - ], - [ - "au", - -8.59555435180664 - ], - [ - "▁aus", - -8.595727920532227 - ], - [ - "▁werden", - -8.598653793334961 - ], - [ - "▁does", - -8.599140167236328 - ], - [ - "▁without", - -8.599270820617676 - ], - [ - "▁ou", - -8.599929809570312 - ], - [ - "▁design", - -8.60101318359375 - ], - [ - "▁va", - -8.605440139770508 - ], - [ - "▁did", - -8.615679740905762 - ], - [ - "▁O", - -8.619062423706055 - ], - [ - "▁U", - -8.623565673828125 - ], - [ - "up", - -8.62901496887207 - ], - [ - "▁end", - -8.63367748260498 - ], - [ - "▁local", - -8.636231422424316 - ], - [ - "▁next", - -8.638967514038086 - ], - [ - "▁sure", - -8.64098072052002 - ], - [ - "▁lot", - -8.64644718170166 - ], - [ - "▁Re", - -8.647016525268555 - ], - [ - "▁top", - -8.647642135620117 - ], - [ - "▁Our", - -8.656886100769043 - ], - [ - "▁small", - -8.656978607177734 - ], - [ - "▁full", - -8.659418106079102 - ], - [ - "▁something", - -8.662886619567871 - ], - [ - "ung", - -8.666722297668457 - ], - [ - "▁vor", - -8.673250198364258 - ], - [ - "E", - -8.673337936401367 - ], - [ - "▁give", - -8.67603588104248 - ], - [ - "▁might", - -8.67660903930664 - ], - [ - "▁another", - -8.679330825805664 - ], - [ - "▁6", - -8.680779457092285 - ], - [ - "▁All", - -8.681318283081055 - ], - [ - "▁process", - -8.681672096252441 - ], - [ - "L", - -8.682575225830078 - ], - [ - "▁found", - -8.68941593170166 - ], - [ - "▁sind", - -8.690044403076172 - ], - [ - "▁since", - -8.69528865814209 - ], - [ - "▁With", - -8.695560455322266 - ], - [ - "K", - -8.696988105773926 - ], - [ - "um", - -8.701016426086426 - ], - [ - "▁within", - -8.701669692993164 - ], - [ - "▁post", - -8.706608772277832 - ], - [ - "▁car", - -8.709365844726562 - ], - [ - "une", - -8.714099884033203 - ], - [ - "▁N", - -8.715041160583496 - ], - [ - "▁J", - -8.715597152709961 - ], - [ - "ic", - -8.71823787689209 - ], - [ - "R", - -8.722309112548828 - ], - [ - "ter", - -8.727437019348145 - ], - [ - "ur", - -8.728265762329102 - ], - [ - "▁She", - -8.73131275177002 - ], - [ - "▁public", - -8.732009887695312 - ], - [ - "▁keep", - -8.735784530639648 - ], - [ - "▁H", - -8.736178398132324 - ], - [ - "▁order", - -8.740762710571289 - ], - [ - "▁start", - -8.742195129394531 - ], - [ - "ez", - -8.74746322631836 - ], - [ - "▁‘", - -8.749832153320312 - ], - [ - "uri", - -8.751104354858398 - ], - [ - "▁20", - -8.752482414245605 - ], - [ - "▁On", - -8.753515243530273 - ], - [ - "▁offer", - -8.763005256652832 - ], - [ - "▁quality", - -8.764988899230957 - ], - [ - "▁working", - -8.769987106323242 - ], - [ - "▁No", - -8.770307540893555 - ], - [ - "▁That", - -8.775156021118164 - ], - [ - "▁game", - -8.7863187789917 - ], - [ - "▁bei", - -8.786642074584961 - ], - [ - "▁today", - -8.788661003112793 - ], - [ - "▁never", - -8.794586181640625 - ], - [ - "▁week", - -8.79587173461914 - ], - [ - "▁St", - -8.797786712646484 - ], - [ - "▁feel", - -8.799317359924316 - ], - [ - "▁put", - -8.801899909973145 - ], - [ - "▁website", - -8.80322265625 - ], - [ - "Y", - -8.804483413696289 - ], - [ - "▁days", - -8.804709434509277 - ], - [ - "▁program", - -8.805448532104492 - ], - [ - "▁looking", - -8.810463905334473 - ], - [ - "▁K", - -8.810808181762695 - ], - [ - "▁students", - -8.811436653137207 - ], - [ - "▁create", - -8.811800956726074 - ], - [ - "▁change", - -8.812616348266602 - ], - [ - "▁book", - -8.812932014465332 - ], - [ - "ity", - -8.813761711120605 - ], - [ - "▁At", - -8.815207481384277 - ], - [ - "▁possible", - -8.815670013427734 - ], - [ - "▁sunt", - -8.81651496887207 - ], - [ - "▁7", - -8.818120002746582 - ], - [ - "▁real", - -8.823369026184082 - ], - [ - "▁al", - -8.824172019958496 - ], - [ - "▁making", - -8.825371742248535 - ], - [ - "▁Be", - -8.825761795043945 - ], - [ - "▁products", - -8.82592487335205 - ], - [ - "▁case", - -8.82653522491455 - ], - [ - "▁school", - -8.8272066116333 - ], - [ - "▁say", - -8.830352783203125 - ], - [ - "area", - -8.832084655761719 - ], - [ - "▁My", - -8.833836555480957 - ], - [ - "▁point", - -8.834731101989746 - ], - [ - "▁als", - -8.83560848236084 - ], - [ - "▁children", - -8.836194038391113 - ], - [ - "▁course", - -8.844061851501465 - ], - [ - "▁show", - -8.847993850708008 - ], - [ - "▁8", - -8.849273681640625 - ], - [ - "▁These", - -8.849345207214355 - ], - [ - "▁18", - -8.851140975952148 - ], - [ - "▁large", - -8.851323127746582 - ], - [ - "co", - -8.854362487792969 - ], - [ - "▁über", - -8.854788780212402 - ], - [ - "▁second", - -8.856559753417969 - ], - [ - "▁market", - -8.859807014465332 - ], - [ - "▁fost", - -8.86048698425293 - ], - [ - "▁easy", - -8.863983154296875 - ], - [ - "▁plan", - -8.864302635192871 - ], - [ - "▁project", - -8.864927291870117 - ], - [ - "G", - -8.865178108215332 - ], - [ - "W", - -8.869574546813965 - ], - [ - "3", - -8.871939659118652 - ], - [ - "▁son", - -8.873332023620605 - ], - [ - "la", - -8.879053115844727 - ], - [ - "▁face", - -8.88137435913086 - ], - [ - "▁needs", - -8.88148021697998 - ], - [ - "ch", - -8.883138656616211 - ], - [ - "▁personal", - -8.88343620300293 - ], - [ - "me", - -8.886031150817871 - ], - [ - "▁sont", - -8.887377738952637 - ], - [ - "▁je", - -8.894930839538574 - ], - [ - "▁non", - -8.895471572875977 - ], - [ - "▁got", - -8.896591186523438 - ], - [ - "▁Do", - -8.897382736206055 - ], - [ - "the", - -8.89765453338623 - ], - [ - "▁health", - -8.89908504486084 - ], - [ - "▁special", - -8.90555477142334 - ], - [ - ".\"", - -8.907710075378418 - ], - [ - "1", - -8.907852172851562 - ], - [ - "den", - -8.908616065979004 - ], - [ - "▁state", - -8.909355163574219 - ], - [ - "▁open", - -8.91019058227539 - ], - [ - "▁money", - -8.91053581237793 - ], - [ - "▁again", - -8.913084983825684 - ], - [ - "▁food", - -8.913167953491211 - ], - [ - "▁page", - -8.914595603942871 - ], - [ - "▁together", - -8.91628360748291 - ], - [ - "age", - -8.919108390808105 - ], - [ - "▁qu", - -8.921928405761719 - ], - [ - "hat", - -8.922386169433594 - ], - [ - "▁ver", - -8.926993370056152 - ], - [ - "▁W", - -8.927785873413086 - ], - [ - "▁away", - -8.928759574890137 - ], - [ - "▁wird", - -8.931641578674316 - ], - [ - "▁until", - -8.934249877929688 - ], - [ - "V", - -8.934935569763184 - ], - [ - "▁pre", - -8.935851097106934 - ], - [ - "▁One", - -8.936429977416992 - ], - [ - "▁product", - -8.936561584472656 - ], - [ - "▁often", - -8.939326286315918 - ], - [ - "▁wir", - -8.944111824035645 - ], - [ - "▁nach", - -8.945127487182617 - ], - [ - "▁include", - -8.946555137634277 - ], - [ - "▁um", - -8.948204040527344 - ], - [ - "▁room", - -8.953709602355957 - ], - [ - "▁group", - -8.953767776489258 - ], - [ - "▁name", - -8.954949378967285 - ], - [ - "ce", - -8.955448150634766 - ], - [ - "H", - -8.956180572509766 - ], - [ - "N", - -8.958139419555664 - ], - [ - "▁person", - -8.958183288574219 - ], - [ - "▁social", - -8.958606719970703 - ], - [ - "▁list", - -8.963666915893555 - ], - [ - "▁How", - -8.964127540588379 - ], - [ - "▁why", - -8.96571159362793 - ], - [ - "▁community", - -8.965995788574219 - ], - [ - "▁contact", - -8.973031044006348 - ], - [ - "­", - -8.9755859375 - ], - [ - "▁co", - -8.979683876037598 - ], - [ - "▁play", - -8.983960151672363 - ], - [ - "▁having", - -8.984169960021973 - ], - [ - "▁power", - -8.986917495727539 - ], - [ - "▁call", - -8.991690635681152 - ], - [ - "▁against", - -8.991816520690918 - ], - [ - "▁become", - -8.997780799865723 - ], - [ - "▁cost", - -9.003793716430664 - ], - [ - "▁V", - -9.004593849182129 - ], - [ - "▁research", - -9.006913185119629 - ], - [ - "▁12", - -9.007307052612305 - ], - [ - "▁wie", - -9.008277893066406 - ], - [ - "der", - -9.008386611938477 - ], - [ - "▁thing", - -9.014028549194336 - ], - [ - "▁along", - -9.017301559448242 - ], - [ - "4", - -9.017330169677734 - ], - [ - "▁access", - -9.020391464233398 - ], - [ - "▁level", - -9.020505905151367 - ], - [ - "▁price", - -9.022817611694336 - ], - [ - "▁einen", - -9.023714065551758 - ], - [ - "▁side", - -9.026359558105469 - ], - [ - "▁Un", - -9.026851654052734 - ], - [ - "▁means", - -9.030416488647461 - ], - [ - "(", - -9.032341957092285 - ], - [ - "▁big", - -9.034374237060547 - ], - [ - "▁God", - -9.036499977111816 - ], - [ - "▁dass", - -9.037314414978027 - ], - [ - "im", - -9.037374496459961 - ], - [ - "▁30", - -9.037432670593262 - ], - [ - "▁event", - -9.041665077209473 - ], - [ - "▁development", - -9.042060852050781 - ], - [ - "▁form", - -9.04226303100586 - ], - [ - "▁read", - -9.042579650878906 - ], - [ - "▁hand", - -9.043194770812988 - ], - [ - "▁control", - -9.04446792602539 - ], - [ - "▁However", - -9.046320915222168 - ], - [ - "▁done", - -9.048060417175293 - ], - [ - "▁job", - -9.051692008972168 - ], - [ - "▁hard", - -9.056619644165039 - ], - [ - "▁war", - -9.057538032531738 - ], - [ - "▁area", - -9.0584135055542 - ], - [ - "▁add", - -9.0586576461792 - ], - [ - "▁votre", - -9.0593900680542 - ], - [ - "▁live", - -9.059494018554688 - ], - [ - "▁range", - -9.060099601745605 - ], - [ - "▁After", - -9.060164451599121 - ], - [ - "▁Les", - -9.060513496398926 - ], - [ - "▁far", - -9.064413070678711 - ], - [ - "ver", - -9.064727783203125 - ], - [ - "▁old", - -9.069576263427734 - ], - [ - "▁perfect", - -9.06976318359375 - ], - [ - "▁15", - -9.070429801940918 - ], - [ - "▁space", - -9.073654174804688 - ], - [ - "▁house", - -9.074068069458008 - ], - [ - "ine", - -9.07408618927002 - ], - [ - "▁enough", - -9.074334144592285 - ], - [ - "0", - -9.075824737548828 - ], - [ - "▁several", - -9.077119827270508 - ], - [ - "The", - -9.081155776977539 - ], - [ - "mm", - -9.085619926452637 - ], - [ - "▁University", - -9.08637523651123 - ], - [ - "▁diese", - -9.087566375732422 - ], - [ - "▁Co", - -9.088335990905762 - ], - [ - "▁comes", - -9.088497161865234 - ], - [ - "▁across", - -9.088857650756836 - ], - [ - "▁already", - -9.090097427368164 - ], - [ - ",”", - -9.090341567993164 - ], - [ - "▁body", - -9.09276294708252 - ], - [ - "▁Das", - -9.094594955444336 - ], - [ - "▁einer", - -9.095956802368164 - ], - [ - "▁left", - -9.09921646118164 - ], - [ - "▁future", - -9.105711936950684 - ], - [ - "▁times", - -9.106670379638672 - ], - [ - "▁dar", - -9.109651565551758 - ], - [ - "▁simple", - -9.110408782958984 - ], - [ - "ry", - -9.112407684326172 - ], - [ - "▁getting", - -9.113155364990234 - ], - [ - "▁try", - -9.115362167358398 - ], - [ - "ți", - -9.116897583007812 - ], - [ - "ness", - -9.120043754577637 - ], - [ - "▁makes", - -9.120377540588379 - ], - [ - "▁past", - -9.120619773864746 - ], - [ - "ca", - -9.12130069732666 - ], - [ - "▁light", - -9.122207641601562 - ], - [ - "▁Der", - -9.122997283935547 - ], - [ - "▁run", - -9.125843048095703 - ], - [ - "▁four", - -9.126943588256836 - ], - [ - "ance", - -9.130500793457031 - ], - [ - "▁ever", - -9.131503105163574 - ], - [ - "▁einem", - -9.131816864013672 - ], - [ - "▁below", - -9.133723258972168 - ], - [ - "O", - -9.134073257446289 - ], - [ - "▁9", - -9.137282371520996 - ], - [ - "▁learn", - -9.14004135131836 - ], - [ - "out", - -9.140358924865723 - ], - [ - "▁video", - -9.143178939819336 - ], - [ - "▁etc", - -9.146929740905762 - ], - [ - "▁«", - -9.148795127868652 - ], - [ - "▁zum", - -9.149712562561035 - ], - [ - "▁kann", - -9.1504487991333 - ], - [ - "▁minutes", - -9.151180267333984 - ], - [ - "▁example", - -9.154194831848145 - ], - [ - "▁nous", - -9.154619216918945 - ], - [ - "▁Se", - -9.157441139221191 - ], - [ - "▁sie", - -9.159955024719238 - ], - [ - "▁industry", - -9.161614418029785 - ], - [ - "▁problem", - -9.162016868591309 - ], - [ - "J", - -9.162480354309082 - ], - [ - "▁country", - -9.163366317749023 - ], - [ - "▁fact", - -9.164189338684082 - ], - [ - "▁type", - -9.164190292358398 - ], - [ - "ner", - -9.164238929748535 - ], - [ - "▁companies", - -9.165864944458008 - ], - [ - "▁line", - -9.169849395751953 - ], - [ - "▁city", - -9.172713279724121 - ], - [ - "▁check", - -9.173710823059082 - ], - [ - "▁doing", - -9.174406051635742 - ], - [ - "elle", - -9.175037384033203 - ], - [ - "▁fun", - -9.176549911499023 - ], - [ - "▁En", - -9.177546501159668 - ], - [ - "▁Your", - -9.178601264953613 - ], - [ - "ling", - -9.181450843811035 - ], - [ - "▁share", - -9.18185806274414 - ], - [ - "ile", - -9.182005882263184 - ], - [ - "▁actually", - -9.187544822692871 - ], - [ - "▁value", - -9.187751770019531 - ], - [ - "zi", - -9.188661575317383 - ], - [ - "▁ab", - -9.1898832321167 - ], - [ - "▁offers", - -9.1905517578125 - ], - [ - "▁less", - -9.190573692321777 - ], - [ - "▁night", - -9.193560600280762 - ], - [ - "▁Dr", - -9.19518756866455 - ], - [ - "▁started", - -9.195454597473145 - ], - [ - "▁least", - -9.198020935058594 - ], - [ - "▁short", - -9.198562622070312 - ], - [ - "▁main", - -9.201143264770508 - ], - [ - "▁single", - -9.202939987182617 - ], - [ - "▁though", - -9.203780174255371 - ], - [ - "▁prin", - -9.203930854797363 - ], - [ - "time", - -9.20531177520752 - ], - [ - "▁hours", - -9.206608772277832 - ], - [ - "▁others", - -9.206849098205566 - ], - [ - "▁called", - -9.20730209350586 - ], - [ - "▁visit", - -9.208869934082031 - ], - [ - "▁bit", - -9.209009170532227 - ], - [ - "ée", - -9.210821151733398 - ], - [ - "▁customers", - -9.211383819580078 - ], - [ - "▁music", - -9.212000846862793 - ], - [ - "▁members", - -9.217191696166992 - ], - [ - "ies", - -9.21743392944336 - ], - [ - "▁pay", - -9.219176292419434 - ], - [ - "nd", - -9.219744682312012 - ], - [ - "▁once", - -9.221125602722168 - ], - [ - "gen", - -9.2217378616333 - ], - [ - "▁können", - -9.222976684570312 - ], - [ - "▁low", - -9.223771095275879 - ], - [ - "▁durch", - -9.227394104003906 - ], - [ - "▁story", - -9.228075981140137 - ], - [ - "▁understand", - -9.22953987121582 - ], - [ - "“", - -9.229856491088867 - ], - [ - "▁Am", - -9.231831550598145 - ], - [ - "▁didn", - -9.234603881835938 - ], - [ - "▁content", - -9.237217903137207 - ], - [ - "son", - -9.24180793762207 - ], - [ - "▁building", - -9.242242813110352 - ], - [ - "▁result", - -9.242605209350586 - ], - [ - "▁aux", - -9.243107795715332 - ], - [ - "▁complete", - -9.244999885559082 - ], - [ - "▁doesn", - -9.24510669708252 - ], - [ - "▁haben", - -9.246070861816406 - ], - [ - "▁questions", - -9.24661636352539 - ], - [ - "line", - -9.247077941894531 - ], - [ - "▁technology", - -9.247429847717285 - ], - [ - "▁Pro", - -9.247976303100586 - ], - [ - "▁current", - -9.248504638671875 - ], - [ - "▁won", - -9.248883247375488 - ], - [ - "▁let", - -9.250710487365723 - ], - [ - "▁features", - -9.251978874206543 - ], - [ - "▁please", - -9.258262634277344 - ], - [ - "5", - -9.258519172668457 - ], - [ - "▁above", - -9.259394645690918 - ], - [ - "ive", - -9.262128829956055 - ], - [ - "▁management", - -9.262394905090332 - ], - [ - "▁lui", - -9.262539863586426 - ], - [ - "her", - -9.263057708740234 - ], - [ - "▁training", - -9.265711784362793 - ], - [ - "▁everything", - -9.2665433883667 - ], - [ - "▁noch", - -9.266846656799316 - ], - [ - "▁came", - -9.267708778381348 - ], - [ - "▁web", - -9.272823333740234 - ], - [ - "▁ensure", - -9.272987365722656 - ], - [ - "▁months", - -9.273130416870117 - ], - [ - "▁art", - -9.27313232421875 - ], - [ - "▁sub", - -9.274359703063965 - ], - [ - "▁million", - -9.274559020996094 - ], - [ - "▁professional", - -9.275035858154297 - ], - [ - "▁results", - -9.278368949890137 - ], - [ - "▁kind", - -9.278395652770996 - ], - [ - "▁season", - -9.279285430908203 - ], - [ - "▁unique", - -9.281067848205566 - ], - [ - "ze", - -9.284360885620117 - ], - [ - "▁enjoy", - -9.28487777709961 - ], - [ - "▁early", - -9.287765502929688 - ], - [ - "▁major", - -9.288202285766602 - ], - [ - "▁yet", - -9.29152774810791 - ], - [ - "▁Ver", - -9.293331146240234 - ], - [ - "one", - -9.296777725219727 - ], - [ - "▁media", - -9.29719352722168 - ], - [ - "▁[", - -9.30095100402832 - ], - [ - "▁property", - -9.302969932556152 - ], - [ - "▁beautiful", - -9.304466247558594 - ], - [ - "▁given", - -9.305286407470703 - ], - [ - "▁due", - -9.306716918945312 - ], - [ - "▁government", - -9.307181358337402 - ], - [ - "▁nur", - -9.30881404876709 - ], - [ - "▁email", - -9.309103012084961 - ], - [ - "▁total", - -9.311080932617188 - ], - [ - "▁natural", - -9.311264038085938 - ], - [ - "▁test", - -9.311450004577637 - ], - [ - "▁provides", - -9.311640739440918 - ], - [ - "▁various", - -9.312631607055664 - ], - [ - "▁American", - -9.315605163574219 - ], - [ - "▁moment", - -9.318109512329102 - ], - [ - "▁air", - -9.318952560424805 - ], - [ - "▁idea", - -9.319236755371094 - ], - [ - "▁known", - -9.319981575012207 - ], - [ - "▁Il", - -9.320504188537598 - ], - [ - "▁friends", - -9.320576667785645 - ], - [ - "▁final", - -9.320919036865234 - ], - [ - "▁buy", - -9.32139778137207 - ], - [ - "▁specific", - -9.322234153747559 - ], - [ - "▁issues", - -9.32454776763916 - ], - [ - "▁took", - -9.325233459472656 - ], - [ - "▁mind", - -9.326258659362793 - ], - [ - "▁study", - -9.32675838470459 - ], - [ - "▁addition", - -9.328418731689453 - ], - [ - "▁size", - -9.332446098327637 - ], - [ - "▁pro", - -9.334047317504883 - ], - [ - "▁film", - -9.33545970916748 - ], - [ - "▁pot", - -9.335636138916016 - ], - [ - "▁thought", - -9.338120460510254 - ], - [ - "▁tell", - -9.33890438079834 - ], - [ - "▁While", - -9.339675903320312 - ], - [ - "▁head", - -9.339983940124512 - ], - [ - "▁clients", - -9.340429306030273 - ], - [ - "▁performance", - -9.346199989318848 - ], - [ - "▁question", - -9.346835136413574 - ], - [ - "▁whether", - -9.347925186157227 - ], - [ - "▁certain", - -9.34826946258545 - ], - [ - "▁model", - -9.348764419555664 - ], - [ - "▁following", - -9.350926399230957 - ], - [ - "▁energy", - -9.354207992553711 - ], - [ - "▁office", - -9.354207992553711 - ], - [ - "▁whole", - -9.356687545776367 - ], - [ - "▁bring", - -9.356956481933594 - ], - [ - "▁required", - -9.35726261138916 - ], - [ - "ţi", - -9.358223915100098 - ], - [ - "▁date", - -9.358695030212402 - ], - [ - "_", - -9.358983039855957 - ], - [ - "que", - -9.359789848327637 - ], - [ - "▁da", - -9.360264778137207 - ], - [ - "▁US", - -9.36120319366455 - ], - [ - "▁taking", - -9.36143684387207 - ], - [ - "go", - -9.362788200378418 - ], - [ - "▁living", - -9.36341667175293 - ], - [ - "▁someone", - -9.363489151000977 - ], - [ - "▁heart", - -9.365120887756348 - ], - [ - "▁key", - -9.365775108337402 - ], - [ - "▁areas", - -9.366238594055176 - ], - [ - "▁says", - -9.367013931274414 - ], - [ - "▁2018", - -9.369132041931152 - ], - [ - "▁month", - -9.37012767791748 - ], - [ - "▁Er", - -9.371354103088379 - ], - [ - "ste", - -9.375077247619629 - ], - [ - "▁11", - -9.375179290771484 - ], - [ - "▁front", - -9.37528133392334 - ], - [ - "▁Now", - -9.37669563293457 - ], - [ - "▁class", - -9.376946449279785 - ], - [ - "▁choose", - -9.377082824707031 - ], - [ - "pe", - -9.37808609008789 - ], - [ - "▁further", - -9.379021644592285 - ], - [ - "▁believe", - -9.37936019897461 - ], - [ - "of", - -9.379590034484863 - ], - [ - "▁among", - -9.380990982055664 - ], - [ - "sch", - -9.381686210632324 - ], - [ - "▁child", - -9.382609367370605 - ], - [ - "▁aber", - -9.38376235961914 - ], - [ - "▁Please", - -9.386269569396973 - ], - [ - "rea", - -9.387248992919922 - ], - [ - "▁later", - -9.387272834777832 - ], - [ - "▁amount", - -9.388760566711426 - ], - [ - "ice", - -9.390128135681152 - ], - [ - "▁National", - -9.390177726745605 - ], - [ - "▁style", - -9.390748977661133 - ], - [ - "▁tout", - -9.391490936279297 - ], - [ - "▁staff", - -9.392939567565918 - ], - [ - "▁white", - -9.397933959960938 - ], - [ - "▁ge", - -9.399179458618164 - ], - [ - "▁five", - -9.400984764099121 - ], - [ - "▁blog", - -9.40109920501709 - ], - [ - "▁designed", - -9.40125846862793 - ], - [ - "▁went", - -9.402216911315918 - ], - [ - "▁Da", - -9.40268611907959 - ], - [ - "▁general", - -9.403801918029785 - ], - [ - "▁rest", - -9.403874397277832 - ], - [ - "▁zur", - -9.40579891204834 - ], - [ - "▁quite", - -9.405948638916016 - ], - [ - "per", - -9.40687084197998 - ], - [ - "▁customer", - -9.408379554748535 - ], - [ - "▁close", - -9.408747673034668 - ], - [ - "▁Some", - -9.41054630279541 - ], - [ - "▁women", - -9.41075611114502 - ], - [ - "▁move", - -9.410761833190918 - ], - [ - "▁software", - -9.411357879638672 - ], - [ - "▁Ein", - -9.413651466369629 - ], - [ - "▁Ab", - -9.413823127746582 - ], - [ - "▁history", - -9.413864135742188 - ], - [ - "▁either", - -9.41564655303955 - ], - [ - "▁seen", - -9.417396545410156 - ], - [ - "▁card", - -9.419726371765137 - ], - [ - "▁City", - -9.421541213989258 - ], - [ - "▁hope", - -9.421769142150879 - ], - [ - "▁16", - -9.422072410583496 - ], - [ - "és", - -9.422825813293457 - ], - [ - "va", - -9.423294067382812 - ], - [ - "▁Al", - -9.423827171325684 - ], - [ - "▁especially", - -9.424827575683594 - ], - [ - "▁view", - -9.426136016845703 - ], - [ - "men", - -9.427363395690918 - ], - [ - "▁account", - -9.427489280700684 - ], - [ - "▁needed", - -9.429777145385742 - ], - [ - "▁United", - -9.429789543151855 - ], - [ - "]", - -9.432387351989746 - ], - [ - "▁yourself", - -9.432788848876953 - ], - [ - "▁100", - -9.433059692382812 - ], - [ - "▁receive", - -9.433417320251465 - ], - [ - "▁ideas", - -9.43369197845459 - ], - [ - "▁writing", - -9.434585571289062 - ], - [ - "▁simply", - -9.434741973876953 - ], - [ - "▁present", - -9.435087203979492 - ], - [ - "▁continue", - -9.436107635498047 - ], - [ - "▁application", - -9.44115161895752 - ], - [ - "▁build", - -9.44187068939209 - ], - [ - "▁turn", - -9.44249439239502 - ], - [ - "ated", - -9.442923545837402 - ], - [ - "▁everyone", - -9.443060874938965 - ], - [ - "cette", - -9.443114280700684 - ], - [ - "▁bien", - -9.444964408874512 - ], - [ - "less", - -9.445222854614258 - ], - [ - "▁Si", - -9.445359230041504 - ], - [ - "▁original", - -9.446867942810059 - ], - [ - "8", - -9.44794750213623 - ], - [ - "▁individual", - -9.448895454406738 - ], - [ - "tre", - -9.449433326721191 - ], - [ - "▁works", - -9.45171070098877 - ], - [ - "▁options", - -9.451821327209473 - ], - [ - "▁May", - -9.454456329345703 - ], - [ - "▁Not", - -9.454940795898438 - ], - [ - "▁report", - -9.455467224121094 - ], - [ - "mer", - -9.457239151000977 - ], - [ - "▁human", - -9.459118843078613 - ], - [ - "▁provided", - -9.459603309631348 - ], - [ - "▁By", - -9.460925102233887 - ], - [ - "▁series", - -9.462006568908691 - ], - [ - "7", - -9.46226692199707 - ], - [ - "▁modern", - -9.463875770568848 - ], - [ - "▁meet", - -9.463921546936035 - ], - [ - "▁50", - -9.464119911193848 - ], - [ - "▁25", - -9.46969985961914 - ], - [ - "▁color", - -9.470091819763184 - ], - [ - "▁download", - -9.470109939575195 - ], - [ - "▁Here", - -9.471144676208496 - ], - [ - "6", - -9.471323013305664 - ], - [ - "▁poate", - -9.471449851989746 - ], - [ - "▁În", - -9.472321510314941 - ], - [ - "▁phone", - -9.473695755004883 - ], - [ - "▁likely", - -9.474374771118164 - ], - [ - "▁table", - -9.476469993591309 - ], - [ - "▁ma", - -9.476551055908203 - ], - [ - "▁Or", - -9.479181289672852 - ], - [ - "Z", - -9.48026180267334 - ], - [ - "▁19", - -9.482215881347656 - ], - [ - "▁insurance", - -9.482544898986816 - ], - [ - "▁anything", - -9.483808517456055 - ], - [ - "▁search", - -9.485033988952637 - ], - [ - "▁Ge", - -9.48520565032959 - ], - [ - "▁issue", - -9.485564231872559 - ], - [ - "▁includes", - -9.485688209533691 - ], - [ - "▁clear", - -9.487342834472656 - ], - [ - "les", - -9.488021850585938 - ], - [ - "▁almost", - -9.488259315490723 - ], - [ - "ilor", - -9.48935317993164 - ], - [ - "▁14", - -9.490717887878418 - ], - [ - "by", - -9.494056701660156 - ], - [ - "▁Du", - -9.49624252319336 - ], - [ - "▁mais", - -9.497303009033203 - ], - [ - "ier", - -9.499163627624512 - ], - [ - "▁law", - -9.49924087524414 - ], - [ - "▁added", - -9.500134468078613 - ], - [ - "▁con", - -9.500962257385254 - ], - [ - ",\"", - -9.501530647277832 - ], - [ - "▁ago", - -9.502127647399902 - ], - [ - "▁His", - -9.504697799682617 - ], - [ - "▁points", - -9.504981994628906 - ], - [ - "▁mult", - -9.505581855773926 - ], - [ - "▁financial", - -9.506216049194336 - ], - [ - "▁problems", - -9.506428718566895 - ], - [ - "▁however", - -9.50648307800293 - ], - [ - "▁events", - -9.50675106048584 - ], - [ - "▁half", - -9.507889747619629 - ], - [ - "ard", - -9.511183738708496 - ], - [ - "▁ask", - -9.51156997680664 - ], - [ - "▁version", - -9.511631965637207 - ], - [ - "end", - -9.512478828430176 - ], - [ - "▁created", - -9.512639999389648 - ], - [ - "▁lead", - -9.512917518615723 - ], - [ - "▁focus", - -9.513853073120117 - ], - [ - "▁increase", - -9.515096664428711 - ], - [ - "ex", - -9.515118598937988 - ], - [ - "▁allow", - -9.515798568725586 - ], - [ - "▁extra", - -9.516464233398438 - ], - [ - "▁24", - -9.516692161560059 - ], - [ - "▁credit", - -9.516772270202637 - ], - [ - "▁production", - -9.516801834106445 - ], - [ - "zu", - -9.517256736755371 - ], - [ - "▁black", - -9.51754093170166 - ], - [ - "▁systems", - -9.518040657043457 - ], - [ - "▁17", - -9.518178939819336 - ], - [ - "▁opportunity", - -9.518531799316406 - ], - [ - "▁bis", - -9.519219398498535 - ], - [ - "▁fast", - -9.519807815551758 - ], - [ - "ring", - -9.521166801452637 - ], - [ - "▁Don", - -9.522114753723145 - ], - [ - "▁via", - -9.52242660522461 - ], - [ - "fer", - -9.5225248336792 - ], - [ - "▁comme", - -9.522799491882324 - ], - [ - "▁popular", - -9.523722648620605 - ], - [ - "▁South", - -9.524491310119629 - ], - [ - "ating", - -9.525003433227539 - ], - [ - "▁State", - -9.525198936462402 - ], - [ - "ator", - -9.525679588317871 - ], - [ - "▁common", - -9.525968551635742 - ], - [ - "con", - -9.526727676391602 - ], - [ - "▁throughout", - -9.527557373046875 - ], - [ - "▁risk", - -9.52774715423584 - ], - [ - "▁young", - -9.528532028198242 - ], - [ - "▁Je", - -9.528688430786133 - ], - [ - "▁image", - -9.52928352355957 - ], - [ - "ha", - -9.529376983642578 - ], - [ - "▁third", - -9.529587745666504 - ], - [ - "▁taken", - -9.530049324035645 - ], - [ - "▁Z", - -9.5314302444458 - ], - [ - "▁dis", - -9.5316162109375 - ], - [ - "▁From", - -9.533575057983398 - ], - [ - "▁details", - -9.534862518310547 - ], - [ - "▁games", - -9.53516674041748 - ], - [ - "▁practice", - -9.536040306091309 - ], - [ - "che", - -9.536151885986328 - ], - [ - "▁security", - -9.537364959716797 - ], - [ - "▁medical", - -9.537653923034668 - ], - [ - "▁learning", - -9.537806510925293 - ], - [ - "▁material", - -9.538509368896484 - ], - [ - "▁international", - -9.540703773498535 - ], - [ - "▁forward", - -9.541245460510254 - ], - [ - "▁paper", - -9.541247367858887 - ], - [ - "▁action", - -9.541348457336426 - ], - [ - "▁file", - -9.542378425598145 - ], - [ - "▁oil", - -9.543096542358398 - ], - [ - "▁self", - -9.54377555847168 - ], - [ - "▁private", - -9.545247077941895 - ], - [ - "▁interest", - -9.545559883117676 - ], - [ - "bar", - -9.546065330505371 - ], - [ - "▁sale", - -9.547115325927734 - ], - [ - "▁stay", - -9.547348976135254 - ], - [ - "ke", - -9.548089981079102 - ], - [ - "▁San", - -9.549053192138672 - ], - [ - "▁matter", - -9.549870491027832 - ], - [ - "▁reason", - -9.550254821777344 - ], - [ - "ted", - -9.55147647857666 - ], - [ - "▁potential", - -9.551742553710938 - ], - [ - "▁brand", - -9.552441596984863 - ], - [ - "▁field", - -9.55315113067627 - ], - [ - "▁treatment", - -9.553420066833496 - ], - [ - "▁period", - -9.553516387939453 - ], - [ - "▁York", - -9.553890228271484 - ], - [ - "▁Park", - -9.554738998413086 - ], - [ - "▁acest", - -9.556009292602539 - ], - [ - "ou", - -9.556926727294922 - ], - [ - "▁Ce", - -9.557014465332031 - ], - [ - "▁ready", - -9.558111190795898 - ], - [ - "▁rather", - -9.55860424041748 - ], - [ - "▁outside", - -9.560086250305176 - ], - [ - "▁standard", - -9.560121536254883 - ], - [ - "▁located", - -9.560770034790039 - ], - [ - "▁marketing", - -9.562313079833984 - ], - [ - "cu", - -9.564041137695312 - ], - [ - "▁Can", - -9.564562797546387 - ], - [ - "▁education", - -9.566105842590332 - ], - [ - "use", - -9.566640853881836 - ], - [ - "▁role", - -9.566828727722168 - ], - [ - "▁men", - -9.571505546569824 - ], - [ - "▁probably", - -9.571550369262695 - ], - [ - "▁store", - -9.57221508026123 - ], - [ - "▁John", - -9.572355270385742 - ], - [ - "▁rate", - -9.573956489562988 - ], - [ - "▁code", - -9.573994636535645 - ], - [ - "▁kids", - -9.574408531188965 - ], - [ - "▁currently", - -9.57552719116211 - ], - [ - "▁near", - -9.576475143432617 - ], - [ - "▁sales", - -9.576716423034668 - ], - [ - "▁usually", - -9.577012062072754 - ], - [ - "▁activities", - -9.577242851257324 - ], - [ - "▁party", - -9.577371597290039 - ], - [ - "▁leur", - -9.577434539794922 - ], - [ - "▁particular", - -9.577627182006836 - ], - [ - "▁mehr", - -9.577707290649414 - ], - [ - "ill", - -9.578757286071777 - ], - [ - "▁percent", - -9.579113006591797 - ], - [ - "▁fait", - -9.579537391662598 - ], - [ - "▁happy", - -9.579904556274414 - ], - [ - "▁inside", - -9.58005428314209 - ], - [ - "▁save", - -9.580510139465332 - ], - [ - "▁skills", - -9.580765724182129 - ], - [ - "▁consider", - -9.581025123596191 - ], - [ - "▁recent", - -9.58161735534668 - ], - [ - "▁strong", - -9.581781387329102 - ], - [ - "▁position", - -9.582076072692871 - ], - [ - "▁knowledge", - -9.582303047180176 - ], - [ - "▁tax", - -9.583868980407715 - ], - [ - "▁users", - -9.584261894226074 - ], - [ - "und", - -9.585564613342285 - ], - [ - "▁coming", - -9.585904121398926 - ], - [ - "▁article", - -9.585923194885254 - ], - [ - "min", - -9.586345672607422 - ], - [ - "▁sein", - -9.586555480957031 - ], - [ - "▁travel", - -9.586871147155762 - ], - [ - "▁changes", - -9.58765983581543 - ], - [ - "▁impact", - -9.588181495666504 - ], - [ - "▁wanted", - -9.588460922241211 - ], - [ - "▁address", - -9.5885591506958 - ], - [ - "▁soon", - -9.58873462677002 - ], - [ - "▁North", - -9.588915824890137 - ], - [ - "ată", - -9.589237213134766 - ], - [ - "▁trying", - -9.58985424041748 - ], - [ - "▁app", - -9.590612411499023 - ], - [ - "▁School", - -9.592510223388672 - ], - [ - "▁Es", - -9.592548370361328 - ], - [ - "we", - -9.59261703491211 - ], - [ - "▁conditions", - -9.59292984008789 - ], - [ - "▁digital", - -9.593293190002441 - ], - [ - "▁similar", - -9.594805717468262 - ], - [ - "▁solution", - -9.59514331817627 - ], - [ - "▁location", - -9.595183372497559 - ], - [ - "▁Of", - -9.595418930053711 - ], - [ - "▁follow", - -9.595842361450195 - ], - [ - "▁red", - -9.597526550292969 - ], - [ - "▁review", - -9.599202156066895 - ], - [ - "▁skin", - -9.599575996398926 - ], - [ - "▁pretty", - -9.600369453430176 - ], - [ - "day", - -9.600558280944824 - ], - [ - "▁dé", - -9.602072715759277 - ], - [ - "▁cause", - -9.602169036865234 - ], - [ - "▁Sa", - -9.602463722229004 - ], - [ - "▁user", - -9.602520942687988 - ], - [ - "▁Man", - -9.603377342224121 - ], - [ - "”.", - -9.604146003723145 - ], - [ - "▁Just", - -9.604366302490234 - ], - [ - "▁faire", - -9.604475021362305 - ], - [ - "▁member", - -9.605619430541992 - ], - [ - "▁iar", - -9.606892585754395 - ], - [ - "▁higher", - -9.607715606689453 - ], - [ - "▁step", - -9.607887268066406 - ], - [ - "▁wide", - -9.608185768127441 - ], - [ - "▁uns", - -9.608920097351074 - ], - [ - "▁World", - -9.609135627746582 - ], - [ - "▁additional", - -9.61176586151123 - ], - [ - "ber", - -9.613197326660156 - ], - [ - "▁easily", - -9.613990783691406 - ], - [ - "▁deal", - -9.615070343017578 - ], - [ - "▁ways", - -9.615514755249023 - ], - [ - "▁mobile", - -9.616837501525879 - ], - [ - "▁national", - -9.616913795471191 - ], - [ - "▁couple", - -9.617389678955078 - ], - [ - "▁ihre", - -9.61939811706543 - ], - [ - "▁choice", - -9.619612693786621 - ], - [ - "for", - -9.619686126708984 - ], - [ - "ous", - -9.62070083618164 - ], - [ - "▁Google", - -9.620855331420898 - ], - [ - "▁environment", - -9.622426986694336 - ], - [ - "urile", - -9.623322486877441 - ], - [ - "▁Center", - -9.626680374145508 - ], - [ - "mp", - -9.628592491149902 - ], - [ - "▁»", - -9.629727363586426 - ], - [ - "qui", - -9.630680084228516 - ], - [ - "▁growth", - -9.631048202514648 - ], - [ - "ler", - -9.633174896240234 - ], - [ - "▁improve", - -9.63360595703125 - ], - [ - "▁items", - -9.6336669921875 - ], - [ - "▁Nu", - -9.63393783569336 - ], - [ - "▁leave", - -9.634074211120605 - ], - [ - "▁true", - -9.634805679321289 - ], - [ - "▁wurde", - -9.63487434387207 - ], - [ - "▁cannot", - -9.635004043579102 - ], - [ - "▁13", - -9.635096549987793 - ], - [ - "▁running", - -9.636015892028809 - ], - [ - "▁anti", - -9.636177062988281 - ], - [ - "▁option", - -9.636306762695312 - ], - [ - "▁reading", - -9.63657283782959 - ], - [ - "▁Car", - -9.636698722839355 - ], - [ - "▁Wir", - -9.638110160827637 - ], - [ - "▁April", - -9.63975715637207 - ], - [ - "▁behind", - -9.640642166137695 - ], - [ - "▁client", - -9.640750885009766 - ], - [ - "▁cover", - -9.641012191772461 - ], - [ - "▁stop", - -9.641090393066406 - ], - [ - "ja", - -9.641277313232422 - ], - [ - "▁built", - -9.641307830810547 - ], - [ - "▁Con", - -9.641313552856445 - ], - [ - "ement", - -9.641366004943848 - ], - [ - "▁projects", - -9.641828536987305 - ], - [ - "▁variety", - -9.641840934753418 - ], - [ - "▁Ihre", - -9.642666816711426 - ], - [ - "ș", - -9.64302921295166 - ], - [ - "▁unter", - -9.64385986328125 - ], - [ - "▁longer", - -9.646577835083008 - ], - [ - "year", - -9.647161483764648 - ], - [ - "▁photo", - -9.648370742797852 - ], - [ - "▁Also", - -9.64933967590332 - ], - [ - "▁received", - -9.651098251342773 - ], - [ - "▁return", - -9.652676582336426 - ], - [ - "00", - -9.653081893920898 - ], - [ - "▁bar", - -9.653343200683594 - ], - [ - "ary", - -9.654427528381348 - ], - [ - "elor", - -9.655137062072754 - ], - [ - "▁Home", - -9.656189918518066 - ], - [ - "our", - -9.656298637390137 - ], - [ - "▁Me", - -9.65771198272705 - ], - [ - "▁held", - -9.659111022949219 - ], - [ - "▁click", - -9.66014289855957 - ], - [ - "▁ex", - -9.660178184509277 - ], - [ - "▁cum", - -9.661561965942383 - ], - [ - "▁takes", - -9.66395378112793 - ], - [ - "▁computer", - -9.665796279907227 - ], - [ - "▁told", - -9.668192863464355 - ], - [ - "+", - -9.670648574829102 - ], - [ - "▁patients", - -9.670809745788574 - ], - [ - "ting", - -9.672165870666504 - ], - [ - "▁direct", - -9.672248840332031 - ], - [ - "▁quickly", - -9.672410011291504 - ], - [ - "tic", - -9.672877311706543 - ], - [ - "▁vom", - -9.673723220825195 - ], - [ - "▁di", - -9.67381477355957 - ], - [ - "▁kitchen", - -9.674022674560547 - ], - [ - "▁network", - -9.675640106201172 - ], - [ - "▁2015", - -9.676688194274902 - ], - [ - "▁effective", - -9.677227020263672 - ], - [ - "▁collection", - -9.677703857421875 - ], - [ - "▁2017", - -9.677751541137695 - ], - [ - "▁words", - -9.678145408630371 - ], - [ - "▁cele", - -9.678857803344727 - ], - [ - "▁student", - -9.678862571716309 - ], - [ - "▁amazing", - -9.678932189941406 - ], - [ - "eur", - -9.680419921875 - ], - [ - ".”", - -9.68227481842041 - ], - [ - "▁ale", - -9.682716369628906 - ], - [ - "”,", - -9.68414306640625 - ], - [ - "▁purchase", - -9.684350967407227 - ], - [ - "▁mean", - -9.68477725982666 - ], - [ - "▁West", - -9.686846733093262 - ], - [ - "▁nice", - -9.6889066696167 - ], - [ - "▁age", - -9.689131736755371 - ], - [ - "▁base", - -9.68923568725586 - ], - [ - "▁summer", - -9.68928337097168 - ], - [ - "▁multi", - -9.689496994018555 - ], - [ - "▁allows", - -9.689573287963867 - ], - [ - "▁latest", - -9.689604759216309 - ], - [ - "▁global", - -9.68992805480957 - ], - [ - "▁chance", - -9.690792083740234 - ], - [ - "▁sense", - -9.690872192382812 - ], - [ - "ieren", - -9.692789077758789 - ], - [ - "▁difficult", - -9.693133354187012 - ], - [ - "ité", - -9.694750785827637 - ], - [ - "ka", - -9.694792747497559 - ], - [ - "du", - -9.69483757019043 - ], - [ - "▁providing", - -9.695744514465332 - ], - [ - "▁Art", - -9.696940422058105 - ], - [ - "▁drive", - -9.698554992675781 - ], - [ - "▁Go", - -9.698877334594727 - ], - [ - "▁très", - -9.699414253234863 - ], - [ - "U", - -9.699579238891602 - ], - [ - "▁Pre", - -9.699846267700195 - ], - [ - "▁shows", - -9.700040817260742 - ], - [ - "▁hair", - -9.701324462890625 - ], - [ - "▁success", - -9.701513290405273 - ], - [ - "▁UK", - -9.703169822692871 - ], - [ - "red", - -9.703241348266602 - ], - [ - "ü", - -9.703370094299316 - ], - [ - "ish", - -9.703631401062012 - ], - [ - "▁weeks", - -9.704839706420898 - ], - [ - "▁solutions", - -9.7055025100708 - ], - [ - "▁Pe", - -9.7057523727417 - ], - [ - "▁equipment", - -9.706141471862793 - ], - [ - "și", - -9.706482887268066 - ], - [ - "▁worked", - -9.707073211669922 - ], - [ - "\".", - -9.708627700805664 - ], - [ - "▁legal", - -9.708720207214355 - ], - [ - "▁bad", - -9.70892333984375 - ], - [ - "▁40", - -9.709561347961426 - ], - [ - "▁Internet", - -9.709798812866211 - ], - [ - "▁included", - -9.709976196289062 - ], - [ - "▁upon", - -9.710977554321289 - ], - [ - "▁excellent", - -9.71106243133545 - ], - [ - "▁goal", - -9.71130084991455 - ], - [ - "▁El", - -9.711408615112305 - ], - [ - "▁Mo", - -9.711703300476074 - ], - [ - "▁policy", - -9.71319580078125 - ], - [ - "▁aussi", - -9.713537216186523 - ], - [ - "▁weight", - -9.713687896728516 - ], - [ - "ici", - -9.715133666992188 - ], - [ - "▁approach", - -9.715584754943848 - ], - [ - "▁six", - -9.71579647064209 - ], - [ - "▁entire", - -9.715911865234375 - ], - [ - "9", - -9.71633529663086 - ], - [ - "▁send", - -9.716832160949707 - ], - [ - "▁1.", - -9.718971252441406 - ], - [ - "▁wenn", - -9.719056129455566 - ], - [ - "▁photos", - -9.71993637084961 - ], - [ - "://", - -9.721014022827148 - ], - [ - "ger", - -9.72281551361084 - ], - [ - "▁favorite", - -9.723104476928711 - ], - [ - "ley", - -9.723477363586426 - ], - [ - "▁else", - -9.72463321685791 - ], - [ - "▁types", - -9.72468376159668 - ], - [ - "▁link", - -9.725333213806152 - ], - [ - "▁recently", - -9.72584056854248 - ], - [ - "▁Mit", - -9.72631549835205 - ], - [ - "▁hot", - -9.726548194885254 - ], - [ - "tra", - -9.726597785949707 - ], - [ - "ş", - -9.727307319641113 - ], - [ - "▁according", - -9.728511810302734 - ], - [ - "▁necessary", - -9.728511810302734 - ], - [ - "▁multiple", - -9.729269027709961 - ], - [ - "▁Im", - -9.729510307312012 - ], - [ - "▁sehr", - -9.729660034179688 - ], - [ - "▁sign", - -9.732263565063477 - ], - [ - "▁anyone", - -9.73283576965332 - ], - [ - "▁land", - -9.733613014221191 - ], - [ - "▁States", - -9.734037399291992 - ], - [ - "▁unsere", - -9.734119415283203 - ], - [ - "ées", - -9.734639167785645 - ], - [ - "We", - -9.735671043395996 - ], - [ - "▁nothing", - -9.735845565795898 - ], - [ - "▁commercial", - -9.736858367919922 - ], - [ - "ful", - -9.737265586853027 - ], - [ - "▁seems", - -9.739325523376465 - ], - [ - "▁International", - -9.740097045898438 - ], - [ - "▁March", - -9.74163818359375 - ], - [ - "▁Thanks", - -9.743307113647461 - ], - [ - "▁County", - -9.74365234375 - ], - [ - "▁books", - -9.744638442993164 - ], - [ - "▁Ca", - -9.7451753616333 - ], - [ - "▁mi", - -9.746304512023926 - ], - [ - "▁meeting", - -9.746662139892578 - ], - [ - "▁tools", - -9.747593879699707 - ], - [ - "▁cut", - -9.747650146484375 - ], - [ - "▁related", - -9.74765682220459 - ], - [ - "▁lives", - -9.748003005981445 - ], - [ - "way", - -9.748501777648926 - ], - [ - "▁develop", - -9.748651504516602 - ], - [ - "▁sound", - -9.748723983764648 - ], - [ - "▁safe", - -9.748950958251953 - ], - [ - "▁Her", - -9.74937629699707 - ], - [ - "▁average", - -9.751277923583984 - ], - [ - "▁clean", - -9.75174331665039 - ], - [ - "▁talk", - -9.752362251281738 - ], - [ - "▁peut", - -9.75241756439209 - ], - [ - "▁dann", - -9.752546310424805 - ], - [ - "▁terms", - -9.753265380859375 - ], - [ - "▁foarte", - -9.753512382507324 - ], - [ - "▁super", - -9.754284858703613 - ], - [ - "▁programs", - -9.754853248596191 - ], - [ - "▁decision", - -9.75540828704834 - ], - [ - "▁costs", - -9.756058692932129 - ], - [ - "▁être", - -9.756291389465332 - ], - [ - "▁2019", - -9.757674217224121 - ], - [ - "led", - -9.759482383728027 - ], - [ - "▁parents", - -9.759617805480957 - ], - [ - "▁Mr", - -9.761702537536621 - ], - [ - "▁lower", - -9.762362480163574 - ], - [ - "▁door", - -9.762978553771973 - ], - [ - "▁été", - -9.763933181762695 - ], - [ - "▁box", - -9.764954566955566 - ], - [ - "▁record", - -9.765517234802246 - ], - [ - "▁win", - -9.765650749206543 - ], - [ - "ster", - -9.766402244567871 - ], - [ - "▁America", - -9.766748428344727 - ], - [ - "▁immer", - -9.768763542175293 - ], - [ - "▁road", - -9.76996898651123 - ], - [ - "▁leading", - -9.772759437561035 - ], - [ - "▁section", - -9.772838592529297 - ], - [ - "▁Facebook", - -9.772990226745605 - ], - [ - "▁Most", - -9.7738676071167 - ], - [ - "iert", - -9.77435302734375 - ], - [ - "▁morning", - -9.774497032165527 - ], - [ - "▁asked", - -9.775190353393555 - ], - [ - "▁involved", - -9.77551555633545 - ], - [ - "▁hier", - -9.777607917785645 - ], - [ - "▁images", - -9.77821159362793 - ], - [ - "▁House", - -9.778263092041016 - ], - [ - "▁highly", - -9.780763626098633 - ], - [ - "▁Bar", - -9.781620979309082 - ], - [ - "▁Service", - -9.782510757446289 - ], - [ - "▁attention", - -9.784318923950195 - ], - [ - "▁normal", - -9.784571647644043 - ], - [ - "▁plans", - -9.785883903503418 - ], - [ - "▁source", - -9.785930633544922 - ], - [ - "▁Aus", - -9.788092613220215 - ], - [ - "▁benefits", - -9.788655281066895 - ], - [ - "▁ses", - -9.789348602294922 - ], - [ - "des", - -9.789867401123047 - ], - [ - "▁internet", - -9.789949417114258 - ], - [ - "▁materials", - -9.790080070495605 - ], - [ - "▁même", - -9.791318893432617 - ], - [ - "▁fine", - -9.791522026062012 - ], - [ - "▁fit", - -9.792226791381836 - ], - [ - "▁21", - -9.792612075805664 - ], - [ - "▁itself", - -9.793739318847656 - ], - [ - "▁wieder", - -9.793972969055176 - ], - [ - "▁Many", - -9.795313835144043 - ], - [ - "▁nature", - -9.795402526855469 - ], - [ - "▁pain", - -9.795467376708984 - ], - [ - "▁device", - -9.796183586120605 - ], - [ - "art", - -9.796989440917969 - ], - [ - "pro", - -9.7971830368042 - ], - [ - "▁France", - -9.797271728515625 - ], - [ - "lich", - -9.797314643859863 - ], - [ - "▁2014", - -9.799542427062988 - ], - [ - "▁inter", - -9.799964904785156 - ], - [ - "▁Li", - -9.800453186035156 - ], - [ - "▁career", - -9.801136016845703 - ], - [ - "▁looks", - -9.80145263671875 - ], - [ - "▁ré", - -9.802245140075684 - ], - [ - "▁ability", - -9.802556991577148 - ], - [ - "▁situation", - -9.803154945373535 - ], - [ - "ville", - -9.803157806396484 - ], - [ - "▁2016", - -9.80319595336914 - ], - [ - "tes", - -9.803462982177734 - ], - [ - "▁remember", - -9.803879737854004 - ], - [ - "▁TV", - -9.803998947143555 - ], - [ - "▁levels", - -9.805853843688965 - ], - [ - "▁subject", - -9.807723999023438 - ], - [ - "ally", - -9.80844497680664 - ], - [ - "▁reduce", - -9.810232162475586 - ], - [ - "▁*", - -9.8108491897583 - ], - [ - "▁Day", - -9.810867309570312 - ], - [ - "▁write", - -9.812152862548828 - ], - [ - "▁pick", - -9.814252853393555 - ], - [ - "ence", - -9.815399169921875 - ], - [ - "▁fresh", - -9.816520690917969 - ], - [ - "▁traditional", - -9.816662788391113 - ], - [ - "chi", - -9.817692756652832 - ], - [ - "▁machine", - -9.818047523498535 - ], - [ - "▁resources", - -9.819125175476074 - ], - [ - "â", - -9.819502830505371 - ], - [ - "▁countries", - -9.820009231567383 - ], - [ - "▁Even", - -9.820342063903809 - ], - [ - "▁green", - -9.821283340454102 - ], - [ - "▁Free", - -9.821910858154297 - ], - [ - "▁daily", - -9.822112083435059 - ], - [ - "▁respect", - -9.823013305664062 - ], - [ - "▁instead", - -9.823714256286621 - ], - [ - "▁Once", - -9.82418155670166 - ], - [ - "▁word", - -9.824407577514648 - ], - [ - "▁construction", - -9.82489013671875 - ], - [ - "▁huge", - -9.825064659118652 - ], - [ - "▁feature", - -9.825220108032227 - ], - [ - "▁themselves", - -9.826369285583496 - ], - [ - "▁loss", - -9.82919692993164 - ], - [ - "%", - -9.830063819885254 - ], - [ - "▁safety", - -9.830256462097168 - ], - [ - "▁economic", - -9.831406593322754 - ], - [ - "▁require", - -9.831945419311523 - ], - [ - "30", - -9.83255386352539 - ], - [ - "▁planning", - -9.833393096923828 - ], - [ - "▁mal", - -9.834482192993164 - ], - [ - "▁directly", - -9.835214614868164 - ], - [ - "ure", - -9.835719108581543 - ], - [ - "▁track", - -9.835734367370605 - ], - [ - "▁tool", - -9.836135864257812 - ], - [ - "▁positive", - -9.836392402648926 - ], - [ - "▁piece", - -9.837076187133789 - ], - [ - "▁parts", - -9.837140083312988 - ], - [ - "ang", - -9.83740520477295 - ], - [ - "▁trip", - -9.837453842163086 - ], - [ - "▁organization", - -9.837935447692871 - ], - [ - "▁sites", - -9.838274002075195 - ], - [ - "▁fire", - -9.83831787109375 - ], - [ - "▁China", - -9.838876724243164 - ], - [ - "▁Pour", - -9.839289665222168 - ], - [ - "▁plant", - -9.84011459350586 - ], - [ - "▁board", - -9.840341567993164 - ], - [ - "▁interesting", - -9.841227531433105 - ], - [ - "gar", - -9.841713905334473 - ], - [ - "▁fie", - -9.841752052307129 - ], - [ - "▁late", - -9.842166900634766 - ], - [ - "▁wall", - -9.842294692993164 - ], - [ - "▁walk", - -9.842741966247559 - ], - [ - "ham", - -9.843868255615234 - ], - [ - "▁Ne", - -9.845427513122559 - ], - [ - "▁First", - -9.845462799072266 - ], - [ - "▁double", - -9.845701217651367 - ], - [ - "▁budget", - -9.847657203674316 - ], - [ - "▁cases", - -9.847670555114746 - ], - [ - "cal", - -9.849738121032715 - ], - [ - "old", - -9.849796295166016 - ], - [ - "▁Bo", - -9.849822998046875 - ], - [ - "▁spend", - -9.850439071655273 - ], - [ - "port", - -9.850828170776367 - ], - [ - "▁worth", - -9.850934028625488 - ], - [ - "ique", - -9.851308822631836 - ], - [ - "nes", - -9.85190486907959 - ], - [ - "cul", - -9.852272033691406 - ], - [ - "era", - -9.85296630859375 - ], - [ - "▁text", - -9.853032112121582 - ], - [ - "▁decided", - -9.854948997497559 - ], - [ - "▁floor", - -9.855036735534668 - ], - [ - "▁requirements", - -9.85529899597168 - ], - [ - "▁cel", - -9.855361938476562 - ], - [ - "▁effect", - -9.855412483215332 - ], - [ - "▁gibt", - -9.856159210205078 - ], - [ - "▁news", - -9.859238624572754 - ], - [ - "▁vos", - -9.859931945800781 - ], - [ - "▁players", - -9.86057186126709 - ], - [ - "▁saw", - -9.862728118896484 - ], - [ - "▁auto", - -9.863056182861328 - ], - [ - "▁town", - -9.863207817077637 - ], - [ - "▁myself", - -9.864106178283691 - ], - [ - "▁lost", - -9.864988327026367 - ], - [ - "▁$", - -9.865124702453613 - ], - [ - "▁June", - -9.86609172821045 - ], - [ - "▁significant", - -9.866196632385254 - ], - [ - "▁giving", - -9.866230010986328 - ], - [ - "▁stand", - -9.866744041442871 - ], - [ - "▁stock", - -9.867657661437988 - ], - [ - "▁hold", - -9.867766380310059 - ], - [ - "▁Are", - -9.869078636169434 - ], - [ - "▁shall", - -9.86923599243164 - ], - [ - "▁ideal", - -9.869279861450195 - ], - [ - "▁London", - -9.87080192565918 - ], - [ - "▁answer", - -9.870853424072266 - ], - [ - "▁Vor", - -9.87157917022705 - ], - [ - "▁gives", - -9.873115539550781 - ], - [ - "ative", - -9.87316608428955 - ], - [ - "▁timp", - -9.873167991638184 - ], - [ - "▁center", - -9.87362289428711 - ], - [ - "▁Group", - -9.874580383300781 - ], - [ - "▁sans", - -9.875143051147461 - ], - [ - "▁Ar", - -9.875466346740723 - ], - [ - "▁Ma", - -9.875568389892578 - ], - [ - "▁reach", - -9.876279830932617 - ], - [ - "ren", - -9.876652717590332 - ], - [ - "▁More", - -9.877446174621582 - ], - [ - "mit", - -9.878068923950195 - ], - [ - "▁guide", - -9.87833309173584 - ], - [ - "▁fully", - -9.878828048706055 - ], - [ - "▁Since", - -9.878952980041504 - ], - [ - "▁Inc", - -9.87923812866211 - ], - [ - "▁culture", - -9.879780769348145 - ], - [ - "eat", - -9.880531311035156 - ], - [ - "▁written", - -9.880722999572754 - ], - [ - "▁Ho", - -9.881338119506836 - ], - [ - "▁India", - -9.881625175476074 - ], - [ - "▁Well", - -9.881708145141602 - ], - [ - "back", - -9.881752967834473 - ], - [ - "▁goes", - -9.882170677185059 - ], - [ - "▁completely", - -9.88217544555664 - ], - [ - "▁tour", - -9.883081436157227 - ], - [ - "▁began", - -9.883196830749512 - ], - [ - "▁picture", - -9.883255958557129 - ], - [ - "▁mare", - -9.88353157043457 - ], - [ - "▁playing", - -9.884223937988281 - ], - [ - "▁trebuie", - -9.884926795959473 - ], - [ - "ils", - -9.884940147399902 - ], - [ - "chen", - -9.885220527648926 - ], - [ - "▁hit", - -9.885416984558105 - ], - [ - "▁complex", - -9.88591480255127 - ], - [ - "▁Thank", - -9.886140823364258 - ], - [ - "▁Let", - -9.886350631713867 - ], - [ - "▁applications", - -9.887116432189941 - ], - [ - "▁friend", - -9.888312339782715 - ], - [ - "▁English", - -9.889549255371094 - ], - [ - "▁charge", - -9.890040397644043 - ], - [ - "▁recommend", - -9.893453598022461 - ], - [ - "▁message", - -9.893672943115234 - ], - [ - "In", - -9.893722534179688 - ], - [ - "▁Mar", - -9.894762992858887 - ], - [ - "pp", - -9.895845413208008 - ], - [ - "▁method", - -9.89692497253418 - ], - [ - "▁successful", - -9.897004127502441 - ], - [ - "tion", - -9.898880958557129 - ], - [ - "▁release", - -9.899920463562012 - ], - [ - "▁creating", - -9.900403022766113 - ], - [ - "▁despre", - -9.90141773223877 - ], - [ - "esc", - -9.902434349060059 - ], - [ - "▁eye", - -9.902752876281738 - ], - [ - "▁apply", - -9.905945777893066 - ], - [ - "net", - -9.906000137329102 - ], - [ - "side", - -9.906539916992188 - ], - [ - "▁ar", - -9.906949996948242 - ], - [ - "▁platform", - -9.90713882446289 - ], - [ - "▁touch", - -9.907329559326172 - ], - [ - "▁towards", - -9.90785026550293 - ], - [ - "▁match", - -9.908224105834961 - ], - [ - "▁Black", - -9.909344673156738 - ], - [ - "▁fall", - -9.90961742401123 - ], - [ - "▁ground", - -9.910234451293945 - ], - [ - "▁High", - -9.910740852355957 - ], - [ - "▁Q", - -9.911155700683594 - ], - [ - "▁schon", - -9.911709785461426 - ], - [ - "▁hotel", - -9.911751747131348 - ], - [ - "▁prices", - -9.912031173706055 - ], - [ - "▁developed", - -9.913411140441895 - ], - [ - "uk", - -9.913476943969727 - ], - [ - "ide", - -9.91367244720459 - ], - [ - "▁September", - -9.91370964050293 - ], - [ - "ized", - -9.914202690124512 - ], - [ - "▁War", - -9.914704322814941 - ], - [ - "!!", - -9.916285514831543 - ], - [ - "▁grow", - -9.916997909545898 - ], - [ - "▁watch", - -9.917067527770996 - ], - [ - "▁storage", - -9.917412757873535 - ], - [ - "eau", - -9.917513847351074 - ], - [ - "can", - -9.918373107910156 - ], - [ - "▁Get", - -9.919524192810059 - ], - [ - "▁See", - -9.91953182220459 - ], - [ - "▁European", - -9.919703483581543 - ], - [ - "▁language", - -9.91982650756836 - ], - [ - "ează", - -9.920175552368164 - ], - [ - "▁court", - -9.920334815979004 - ], - [ - "▁Why", - -9.921106338500977 - ], - [ - "▁hear", - -9.921342849731445 - ], - [ - "▁doar", - -9.921804428100586 - ], - [ - "lan", - -9.92330265045166 - ], - [ - "▁Christmas", - -9.923810958862305 - ], - [ - "▁Web", - -9.923871994018555 - ], - [ - "vo", - -9.92405891418457 - ], - [ - "▁sent", - -9.924983024597168 - ], - [ - "▁businesses", - -9.925868034362793 - ], - [ - "▁Red", - -9.926278114318848 - ], - [ - "tel", - -9.926375389099121 - ], - [ - "▁Ha", - -9.926508903503418 - ], - [ - "▁wonderful", - -9.926653861999512 - ], - [ - "ations", - -9.926738739013672 - ], - [ - "za", - -9.92748737335205 - ], - [ - "▁22", - -9.928659439086914 - ], - [ - "▁thinking", - -9.92941665649414 - ], - [ - "▁became", - -9.929733276367188 - ], - [ - "▁cool", - -9.929835319519043 - ], - [ - "▁speed", - -9.930370330810547 - ], - [ - "mar", - -9.930426597595215 - ], - [ - "▁--", - -9.931743621826172 - ], - [ - "▁groups", - -9.931920051574707 - ], - [ - "▁interested", - -9.93198299407959 - ], - [ - "ak", - -9.93218994140625 - ], - [ - "▁60", - -9.932672500610352 - ], - [ - "▁screen", - -9.93370246887207 - ], - [ - "▁Design", - -9.933789253234863 - ], - [ - "▁limited", - -9.935648918151855 - ], - [ - "▁expected", - -9.935959815979004 - ], - [ - "▁opportunities", - -9.936376571655273 - ], - [ - "▁regular", - -9.936870574951172 - ], - [ - "off", - -9.93702220916748 - ], - [ - "▁Best", - -9.937298774719238 - ], - [ - "Re", - -9.938436508178711 - ], - [ - "▁ihr", - -9.938719749450684 - ], - [ - "▁Great", - -9.938907623291016 - ], - [ - "▁employees", - -9.93924617767334 - ], - [ - "▁custom", - -9.939679145812988 - ], - [ - "▁multe", - -9.940123558044434 - ], - [ - "let", - -9.940876007080078 - ], - [ - "▁benefit", - -9.942487716674805 - ], - [ - "▁term", - -9.942623138427734 - ], - [ - "▁bine", - -9.942869186401367 - ], - [ - "▁deep", - -9.944526672363281 - ], - [ - "▁August", - -9.94526481628418 - ], - [ - "▁President", - -9.945381164550781 - ], - [ - "▁Auf", - -9.945854187011719 - ], - [ - "▁wish", - -9.946924209594727 - ], - [ - "▁sometimes", - -9.947274208068848 - ], - [ - "ari", - -9.947793960571289 - ], - [ - "▁pressure", - -9.948184967041016 - ], - [ - "▁ani", - -9.94859504699707 - ], - [ - "▁trade", - -9.949930191040039 - ], - [ - "▁firm", - -9.950027465820312 - ], - [ - "▁comment", - -9.95003604888916 - ], - [ - "▁November", - -9.950242042541504 - ], - [ - "▁expect", - -9.951102256774902 - ], - [ - "▁2012", - -9.952491760253906 - ], - [ - "▁Ich", - -9.95328140258789 - ], - [ - "▁relationship", - -9.95363998413086 - ], - [ - "▁active", - -9.954682350158691 - ], - [ - "org", - -9.954710960388184 - ], - [ - "▁heat", - -9.956732749938965 - ], - [ - "▁wood", - -9.95678997039795 - ], - [ - "▁notre", - -9.957921028137207 - ], - [ - "▁function", - -9.958330154418945 - ], - [ - "▁2.", - -9.95909309387207 - ], - [ - "▁wedding", - -9.960049629211426 - ], - [ - "▁starting", - -9.961235046386719 - ], - [ - "▁Health", - -9.961249351501465 - ], - [ - "\",", - -9.961713790893555 - ], - [ - "▁death", - -9.962173461914062 - ], - [ - "▁pages", - -9.962764739990234 - ], - [ - "▁vehicle", - -9.96293830871582 - ], - [ - "▁request", - -9.963874816894531 - ], - [ - "▁helps", - -9.963916778564453 - ], - [ - "▁blue", - -9.964017868041992 - ], - [ - "▁analysis", - -9.964414596557617 - ], - [ - "▁posted", - -9.964544296264648 - ], - [ - "▁healthy", - -9.964814186096191 - ], - [ - "▁contract", - -9.964988708496094 - ], - [ - "▁•", - -9.965263366699219 - ], - [ - "▁Each", - -9.965293884277344 - ], - [ - "▁Fa", - -9.966179847717285 - ], - [ - "▁dintre", - -9.966221809387207 - ], - [ - "▁Friday", - -9.967202186584473 - ], - [ - "▁considered", - -9.967992782592773 - ], - [ - "cher", - -9.96826457977295 - ], - [ - "▁quick", - -9.968731880187988 - ], - [ - "▁understanding", - -9.96916389465332 - ], - [ - "▁condition", - -9.969378471374512 - ], - [ - "ization", - -9.971049308776855 - ], - [ - "▁document", - -9.971664428710938 - ], - [ - "▁prevent", - -9.971890449523926 - ], - [ - "▁growing", - -9.9725341796875 - ], - [ - "▁protection", - -9.972620964050293 - ], - [ - "▁cat", - -9.974002838134766 - ], - [ - "▁#", - -9.975058555603027 - ], - [ - "10", - -9.975275039672852 - ], - [ - "▁join", - -9.9759521484375 - ], - [ - "▁serve", - -9.976580619812012 - ], - [ - "▁blood", - -9.977095603942871 - ], - [ - "▁July", - -9.977341651916504 - ], - [ - "▁region", - -9.977787971496582 - ], - [ - "car", - -9.97933578491211 - ], - [ - "▁entre", - -9.979788780212402 - ], - [ - "▁physical", - -9.981287002563477 - ], - [ - "▁cash", - -9.9813232421875 - ], - [ - "aux", - -9.981823921203613 - ], - [ - "ng", - -9.982654571533203 - ], - [ - "▁stage", - -9.98281478881836 - ], - [ - "▁seem", - -9.983034133911133 - ], - [ - "▁definitely", - -9.983795166015625 - ], - [ - "▁investment", - -9.983827590942383 - ], - [ - "▁purpose", - -9.985441207885742 - ], - [ - "▁begin", - -9.985486030578613 - ], - [ - "®", - -9.985495567321777 - ], - [ - "▁break", - -9.985701560974121 - ], - [ - "itate", - -9.987293243408203 - ], - [ - "▁moving", - -9.989288330078125 - ], - [ - "▁met", - -9.990678787231445 - ], - [ - "ize", - -9.990833282470703 - ], - [ - "▁select", - -9.991165161132812 - ], - [ - "▁tous", - -9.991310119628906 - ], - [ - "▁Europe", - -9.991639137268066 - ], - [ - "@", - -9.992724418640137 - ], - [ - "▁individuals", - -9.993392944335938 - ], - [ - "▁Zeit", - -9.993524551391602 - ], - [ - "gu", - -9.995670318603516 - ], - [ - "▁unit", - -9.995753288269043 - ], - [ - "▁noi", - -9.996089935302734 - ], - [ - "▁places", - -9.996171951293945 - ], - [ - "all", - -9.99632453918457 - ], - [ - "▁wait", - -9.996755599975586 - ], - [ - "▁difference", - -9.997234344482422 - ], - [ - "▁round", - -9.998015403747559 - ], - [ - "50", - -9.99953842163086 - ], - [ - "rie", - -9.999545097351074 - ], - [ - "▁Et", - -9.999933242797852 - ], - [ - "20", - -10.000725746154785 - ], - [ - "▁activity", - -10.000792503356934 - ], - [ - "е", - -10.000866889953613 - ], - [ - "▁Windows", - -10.001087188720703 - ], - [ - "▁produce", - -10.001385688781738 - ], - [ - "▁keine", - -10.00212574005127 - ], - [ - "▁Air", - -10.002567291259766 - ], - [ - "▁January", - -10.004890441894531 - ], - [ - "▁deux", - -10.005081176757812 - ], - [ - "▁entry", - -10.005208015441895 - ], - [ - "king", - -10.006500244140625 - ], - [ - "▁goals", - -10.006736755371094 - ], - [ - "▁previous", - -10.0077543258667 - ], - [ - "▁+", - -10.008035659790039 - ], - [ - "▁Business", - -10.008259773254395 - ], - [ - "ont", - -10.008552551269531 - ], - [ - "▁Sunday", - -10.008694648742676 - ], - [ - "▁offering", - -10.010359764099121 - ], - [ - "▁response", - -10.011018753051758 - ], - [ - "▁surface", - -10.011393547058105 - ], - [ - "▁Department", - -10.01212215423584 - ], - [ - "▁exactly", - -10.012190818786621 - ], - [ - "▁Online", - -10.012577056884766 - ], - [ - "dem", - -10.013803482055664 - ], - [ - "ischen", - -10.014006614685059 - ], - [ - "▁hands", - -10.015100479125977 - ], - [ - "▁hour", - -10.016197204589844 - ], - [ - "▁dog", - -10.016946792602539 - ], - [ - "▁damage", - -10.017006874084473 - ], - [ - "▁capital", - -10.018792152404785 - ], - [ - "▁toate", - -10.020488739013672 - ], - [ - "▁wrong", - -10.020674705505371 - ], - [ - "unui", - -10.022201538085938 - ], - [ - "tri", - -10.023979187011719 - ], - [ - "▁sell", - -10.023999214172363 - ], - [ - "▁published", - -10.024175643920898 - ], - [ - "▁families", - -10.024675369262695 - ], - [ - "▁avoid", - -10.025490760803223 - ], - [ - "▁Ko", - -10.025506019592285 - ], - [ - "▁mod", - -10.026697158813477 - ], - [ - "rat", - -10.027653694152832 - ], - [ - "▁Make", - -10.0299654006958 - ], - [ - "▁October", - -10.030153274536133 - ], - [ - "▁former", - -10.031285285949707 - ], - [ - "▁Services", - -10.03281021118164 - ], - [ - "▁felt", - -10.033045768737793 - ], - [ - "▁selection", - -10.033309936523438 - ], - [ - "eaza", - -10.034177780151367 - ], - [ - "gel", - -10.034422874450684 - ], - [ - "▁Good", - -10.035792350769043 - ], - [ - "▁actual", - -10.0364351272583 - ], - [ - "▁gut", - -10.036853790283203 - ], - [ - "▁gas", - -10.03708553314209 - ], - [ - "15", - -10.038182258605957 - ], - [ - "▁structure", - -10.038285255432129 - ], - [ - "▁act", - -10.0386381149292 - ], - [ - "▁Zu", - -10.038654327392578 - ], - [ - "▁creative", - -10.039134979248047 - ], - [ - "▁Vi", - -10.039159774780273 - ], - [ - "▁shop", - -10.04066276550293 - ], - [ - "▁Lo", - -10.040735244750977 - ], - [ - "şi", - -10.042192459106445 - ], - [ - "▁mis", - -10.042224884033203 - ], - [ - "ungen", - -10.042301177978516 - ], - [ - "▁fan", - -10.04240608215332 - ], - [ - "▁|", - -10.043391227722168 - ], - [ - "▁Bei", - -10.044037818908691 - ], - [ - "▁protect", - -10.04454517364502 - ], - [ - "▁Na", - -10.0447998046875 - ], - [ - "q", - -10.045693397521973 - ], - [ - "ok", - -10.04710578918457 - ], - [ - "▁California", - -10.047263145446777 - ], - [ - "▁political", - -10.047301292419434 - ], - [ - "25", - -10.047530174255371 - ], - [ - "▁feeling", - -10.047913551330566 - ], - [ - "▁ces", - -10.048321723937988 - ], - [ - "▁display", - -10.048857688903809 - ], - [ - "▁essential", - -10.04964542388916 - ], - [ - "ând", - -10.049971580505371 - ], - [ - "▁seine", - -10.050551414489746 - ], - [ - "▁soft", - -10.050915718078613 - ], - [ - "ach", - -10.05102252960205 - ], - [ - "▁happen", - -10.051118850708008 - ], - [ - "▁Paul", - -10.053346633911133 - ], - [ - "▁Cu", - -10.054024696350098 - ], - [ - "house", - -10.055376052856445 - ], - [ - "ante", - -10.05582046508789 - ], - [ - "▁easier", - -10.056551933288574 - ], - [ - "▁sort", - -10.0567045211792 - ], - [ - "▁Post", - -10.057138442993164 - ], - [ - "▁accept", - -10.05730152130127 - ], - [ - "field", - -10.057648658752441 - ], - [ - "zen", - -10.057741165161133 - ], - [ - "▁character", - -10.057848930358887 - ], - [ - "▁beginning", - -10.058433532714844 - ], - [ - "▁Jesus", - -10.058760643005371 - ], - [ - "▁weekend", - -10.059663772583008 - ], - [ - "▁certainly", - -10.06114387512207 - ], - [ - "▁THE", - -10.061254501342773 - ], - [ - "▁alle", - -10.06189250946045 - ], - [ - "▁transport", - -10.062220573425293 - ], - [ - "▁Saturday", - -10.063043594360352 - ], - [ - "▁basic", - -10.064136505126953 - ], - [ - "▁loved", - -10.06431770324707 - ], - [ - "ros", - -10.065333366394043 - ], - [ - "▁offered", - -10.065996170043945 - ], - [ - "▁camera", - -10.067024230957031 - ], - [ - "▁Green", - -10.06789779663086 - ], - [ - "ology", - -10.069480895996094 - ], - [ - "ä", - -10.069646835327148 - ], - [ - "▁manage", - -10.070416450500488 - ], - [ - "▁paid", - -10.070881843566895 - ], - [ - "▁advice", - -10.071617126464844 - ], - [ - "▁patient", - -10.072234153747559 - ], - [ - "▁spent", - -10.072272300720215 - ], - [ - "▁mir", - -10.072366714477539 - ], - [ - "▁baby", - -10.072400093078613 - ], - [ - "ö", - -10.073193550109863 - ], - [ - "▁basis", - -10.073338508605957 - ], - [ - "▁cancer", - -10.073765754699707 - ], - [ - "▁Although", - -10.07400894165039 - ], - [ - "▁gift", - -10.074336051940918 - ], - [ - "▁3.", - -10.074871063232422 - ], - [ - "dieser", - -10.075157165527344 - ], - [ - "▁overall", - -10.07520580291748 - ], - [ - "▁Sch", - -10.075265884399414 - ], - [ - "▁Ex", - -10.076258659362793 - ], - [ - "▁December", - -10.07689094543457 - ], - [ - "▁released", - -10.078214645385742 - ], - [ - "▁prior", - -10.07900333404541 - ], - [ - "▁sowie", - -10.081072807312012 - ], - [ - "▁club", - -10.081326484680176 - ], - [ - "▁Street", - -10.081535339355469 - ], - [ - "▁College", - -10.08254623413086 - ], - [ - "▁î", - -10.083059310913086 - ], - [ - "over", - -10.083159446716309 - ], - [ - "▁gave", - -10.08454704284668 - ], - [ - "▁truly", - -10.084784507751465 - ], - [ - "par", - -10.084806442260742 - ], - [ - "▁Canada", - -10.084888458251953 - ], - [ - "▁existing", - -10.085420608520508 - ], - [ - "lie", - -10.086335182189941 - ], - [ - "▁ganz", - -10.086658477783203 - ], - [ - "▁setting", - -10.087109565734863 - ], - [ - "▁supply", - -10.08739185333252 - ], - [ - "▁college", - -10.087540626525879 - ], - [ - "▁communication", - -10.088407516479492 - ], - [ - "▁23", - -10.088834762573242 - ], - [ - "▁pass", - -10.091546058654785 - ], - [ - "▁devices", - -10.091872215270996 - ], - [ - "▁glass", - -10.092083930969238 - ], - [ - "▁experienced", - -10.092395782470703 - ], - [ - "▁grand", - -10.093363761901855 - ], - [ - "▁Po", - -10.093396186828613 - ], - [ - "▁beyond", - -10.094029426574707 - ], - [ - "▁format", - -10.094165802001953 - ], - [ - "▁mon", - -10.09461498260498 - ], - [ - "▁perform", - -10.094635009765625 - ], - [ - "sten", - -10.095130920410156 - ], - [ - "▁1,", - -10.096270561218262 - ], - [ - "▁Per", - -10.096640586853027 - ], - [ - "▁sold", - -10.097247123718262 - ], - [ - "▁rates", - -10.0972900390625 - ], - [ - "▁regarding", - -10.097782135009766 - ], - [ - "▁Paris", - -10.098291397094727 - ], - [ - "▁Dar", - -10.099579811096191 - ], - [ - "▁challenge", - -10.099649429321289 - ], - [ - "▁feet", - -10.100564002990723 - ], - [ - "▁Su", - -10.102017402648926 - ], - [ - "je", - -10.102593421936035 - ], - [ - "▁Bank", - -10.102627754211426 - ], - [ - "ven", - -10.103126525878906 - ], - [ - "jo", - -10.103290557861328 - ], - [ - "▁band", - -10.10348892211914 - ], - [ - "▁delivery", - -10.104915618896484 - ], - [ - "Vous", - -10.104924201965332 - ], - [ - "tele", - -10.10495376586914 - ], - [ - "▁East", - -10.105379104614258 - ], - [ - "▁pictures", - -10.106067657470703 - ], - [ - "▁useful", - -10.106481552124023 - ], - [ - "*", - -10.107648849487305 - ], - [ - "▁increased", - -10.107746124267578 - ], - [ - "▁stories", - -10.108119010925293 - ], - [ - "sion", - -10.108280181884766 - ], - [ - "bra", - -10.108345985412598 - ], - [ - "▁brought", - -10.108466148376465 - ], - [ - "▁effort", - -10.109898567199707 - ], - [ - "▁payment", - -10.11058235168457 - ], - [ - "▁heard", - -10.110925674438477 - ], - [ - "▁played", - -10.111245155334473 - ], - [ - "▁White", - -10.111417770385742 - ], - [ - "▁metal", - -10.111721992492676 - ], - [ - "tal", - -10.111754417419434 - ], - [ - "▁engine", - -10.112006187438965 - ], - [ - "▁Club", - -10.11218547821045 - ], - [ - "ical", - -10.114581108093262 - ], - [ - "▁effects", - -10.115421295166016 - ], - [ - "▁degree", - -10.115763664245605 - ], - [ - "▁bed", - -10.1159086227417 - ], - [ - "ette", - -10.115991592407227 - ], - [ - "▁David", - -10.116386413574219 - ], - [ - "°", - -10.117666244506836 - ], - [ - "▁Au", - -10.117938041687012 - ], - [ - "▁Company", - -10.11845874786377 - ], - [ - "▁player", - -10.11938190460205 - ], - [ - "▁Today", - -10.120569229125977 - ], - [ - "▁maintain", - -10.12093448638916 - ], - [ - "▁minute", - -10.121193885803223 - ], - [ - "mail", - -10.122172355651855 - ], - [ - "▁race", - -10.122366905212402 - ], - [ - "▁comfortable", - -10.123887062072754 - ], - [ - "▁responsible", - -10.124085426330566 - ], - [ - "vor", - -10.124622344970703 - ], - [ - "▁associated", - -10.124695777893066 - ], - [ - "▁weather", - -10.124701499938965 - ], - [ - "▁$1", - -10.125639915466309 - ], - [ - "▁tried", - -10.126176834106445 - ], - [ - "▁Check", - -10.127649307250977 - ], - [ - "▁solid", - -10.127864837646484 - ], - [ - "▁movie", - -10.128364562988281 - ], - [ - "▁coffee", - -10.12874698638916 - ], - [ - "board", - -10.129073143005371 - ], - [ - "▁po", - -10.12946605682373 - ], - [ - "▁warm", - -10.129583358764648 - ], - [ - "▁connect", - -10.131733894348145 - ], - [ - "▁Ad", - -10.133807182312012 - ], - [ - "work", - -10.133859634399414 - ], - [ - "mal", - -10.13397216796875 - ], - [ - "▁Act", - -10.134634971618652 - ], - [ - "▁achieve", - -10.134769439697266 - ], - [ - "▁Nach", - -10.136604309082031 - ], - [ - "www", - -10.136669158935547 - ], - [ - "term", - -10.13672161102295 - ], - [ - "▁claim", - -10.137251853942871 - ], - [ - "▁particularly", - -10.138245582580566 - ], - [ - "▁cas", - -10.138396263122559 - ], - [ - "▁furniture", - -10.138461112976074 - ], - [ - "▁finish", - -10.13896369934082 - ], - [ - "▁temps", - -10.139026641845703 - ], - [ - "▁disease", - -10.139115333557129 - ], - [ - "▁lots", - -10.139196395874023 - ], - [ - "▁ball", - -10.139307975769043 - ], - [ - "▁sun", - -10.14010238647461 - ], - [ - "▁strategy", - -10.140498161315918 - ], - [ - "bre", - -10.140518188476562 - ], - [ - "▁mine", - -10.141541481018066 - ], - [ - "▁Click", - -10.141743659973145 - ], - [ - "ran", - -10.141983032226562 - ], - [ - "▁Will", - -10.142234802246094 - ], - [ - "▁garden", - -10.142974853515625 - ], - [ - "▁stuff", - -10.14359188079834 - ], - [ - "▁limit", - -10.144641876220703 - ], - [ - "▁bottom", - -10.14494800567627 - ], - [ - "▁shown", - -10.144962310791016 - ], - [ - "ship", - -10.145271301269531 - ], - [ - "▁habe", - -10.145858764648438 - ], - [ - "▁Super", - -10.146219253540039 - ], - [ - "▁completed", - -10.146971702575684 - ], - [ - "▁wine", - -10.146979331970215 - ], - [ - "ische", - -10.147262573242188 - ], - [ - "▁largest", - -10.147466659545898 - ], - [ - "▁appropriate", - -10.148261070251465 - ], - [ - "▁immediately", - -10.150248527526855 - ], - [ - "▁Hi", - -10.152358055114746 - ], - [ - "▁trust", - -10.152767181396484 - ], - [ - "ability", - -10.154254913330078 - ], - [ - "▁powerful", - -10.155101776123047 - ], - [ - "▁helping", - -10.155620574951172 - ], - [ - "▁schedule", - -10.155688285827637 - ], - [ - "▁correct", - -10.155707359313965 - ], - [ - "▁transfer", - -10.156496047973633 - ], - [ - "pre", - -10.15665340423584 - ], - [ - "▁journey", - -10.15688419342041 - ], - [ - "pm", - -10.157002449035645 - ], - [ - "don", - -10.158435821533203 - ], - [ - "▁highest", - -10.159249305725098 - ], - [ - "▁finally", - -10.15999698638916 - ], - [ - "form", - -10.160258293151855 - ], - [ - "▁extremely", - -10.160404205322266 - ], - [ - "▁window", - -10.160501480102539 - ], - [ - "▁Over", - -10.162222862243652 - ], - [ - "▁remove", - -10.162469863891602 - ], - [ - "wood", - -10.162479400634766 - ], - [ - "▁2013", - -10.163631439208984 - ], - [ - "▁mother", - -10.164072036743164 - ], - [ - "▁Auto", - -10.16436767578125 - ], - [ - "▁annual", - -10.164615631103516 - ], - [ - "▁Star", - -10.164834976196289 - ], - [ - "▁Di", - -10.166138648986816 - ], - [ - "о", - -10.16711139678955 - ], - [ - "▁gold", - -10.167129516601562 - ], - [ - "tar", - -10.167352676391602 - ], - [ - "ju", - -10.167750358581543 - ], - [ - "▁Use", - -10.169474601745605 - ], - [ - "▁thanks", - -10.16960334777832 - ], - [ - "▁centre", - -10.170127868652344 - ], - [ - "▁Australia", - -10.170358657836914 - ], - [ - "▁estate", - -10.170504570007324 - ], - [ - "▁eyes", - -10.1714448928833 - ], - [ - "▁force", - -10.171592712402344 - ], - [ - "▁income", - -10.17395305633545 - ], - [ - "▁science", - -10.174036026000977 - ], - [ - "ori", - -10.174230575561523 - ], - [ - "▁enter", - -10.174851417541504 - ], - [ - "▁28", - -10.175408363342285 - ], - [ - "ire", - -10.17568302154541 - ], - [ - "▁schools", - -10.175797462463379 - ], - [ - "▁restaurant", - -10.176088333129883 - ], - [ - "▁Council", - -10.177032470703125 - ], - [ - "aus", - -10.177885055541992 - ], - [ - "▁agree", - -10.17905330657959 - ], - [ - "▁campaign", - -10.179192543029785 - ], - [ - "▁Ta", - -10.179428100585938 - ], - [ - "▁letter", - -10.179814338684082 - ], - [ - "▁central", - -10.179931640625 - ], - [ - "▁Because", - -10.180054664611816 - ], - [ - "▁path", - -10.180349349975586 - ], - [ - "▁loc", - -10.180882453918457 - ], - [ - "▁files", - -10.182587623596191 - ], - [ - "▁population", - -10.182705879211426 - ], - [ - "▁explore", - -10.182723999023438 - ], - [ - "▁mid", - -10.182734489440918 - ], - [ - "▁concept", - -10.182748794555664 - ], - [ - "▁church", - -10.183015823364258 - ], - [ - "80", - -10.183026313781738 - ], - [ - "▁einfach", - -10.185834884643555 - ], - [ - "▁reasons", - -10.186690330505371 - ], - [ - "▁determine", - -10.186755180358887 - ], - [ - "▁February", - -10.187095642089844 - ], - [ - "▁evidence", - -10.18797779083252 - ], - [ - "▁sleep", - -10.188036918640137 - ], - [ - "▁Board", - -10.188652992248535 - ], - [ - "▁maybe", - -10.189635276794434 - ], - [ - "▁wasn", - -10.189701080322266 - ], - [ - "▁Monday", - -10.190101623535156 - ], - [ - "▁director", - -10.190481185913086 - ], - [ - "well", - -10.190974235534668 - ], - [ - "During", - -10.191001892089844 - ], - [ - "▁sweet", - -10.191061973571777 - ], - [ - "▁assist", - -10.19124984741211 - ], - [ - "▁police", - -10.191511154174805 - ], - [ - "▁repair", - -10.191729545593262 - ], - [ - "▁techniques", - -10.191733360290527 - ], - [ - "▁served", - -10.191808700561523 - ], - [ - "vi", - -10.192037582397461 - ], - [ - "▁sports", - -10.192331314086914 - ], - [ - "▁opening", - -10.192401885986328 - ], - [ - "▁ones", - -10.192731857299805 - ], - [ - "▁notice", - -10.193460464477539 - ], - [ - "▁PC", - -10.193547248840332 - ], - [ - "▁alte", - -10.194242477416992 - ], - [ - "▁Bi", - -10.194340705871582 - ], - [ - "▁cold", - -10.195606231689453 - ], - [ - "▁billion", - -10.195794105529785 - ], - [ - "▁balance", - -10.196361541748047 - ], - [ - "cer", - -10.196417808532715 - ], - [ - "▁nearly", - -10.196725845336914 - ], - [ - "▁wear", - -10.197259902954102 - ], - [ - "free", - -10.19760799407959 - ], - [ - "▁Have", - -10.197748184204102 - ], - [ - "▁comfort", - -10.199211120605469 - ], - [ - "▁studies", - -10.199225425720215 - ], - [ - "▁traffic", - -10.199540138244629 - ], - [ - "▁item", - -10.200214385986328 - ], - [ - "▁teaching", - -10.200467109680176 - ], - [ - "▁turned", - -10.201326370239258 - ], - [ - "isation", - -10.201354026794434 - ], - [ - "12", - -10.202038764953613 - ], - [ - "▁greater", - -10.202167510986328 - ], - [ - "▁knew", - -10.20233154296875 - ], - [ - "▁Association", - -10.203333854675293 - ], - [ - "▁Office", - -10.203802108764648 - ], - [ - "▁established", - -10.204085350036621 - ], - [ - "45", - -10.204170227050781 - ], - [ - "▁Love", - -10.204318046569824 - ], - [ - "▁changed", - -10.204882621765137 - ], - [ - "▁pan", - -10.205184936523438 - ], - [ - "van", - -10.20565414428711 - ], - [ - "▁Mi", - -10.205663681030273 - ], - [ - "▁tend", - -10.20637321472168 - ], - [ - "▁connection", - -10.206522941589355 - ], - [ - "▁lack", - -10.206954002380371 - ], - [ - "▁bank", - -10.208464622497559 - ], - [ - "cat", - -10.208720207214355 - ], - [ - "▁helped", - -10.209071159362793 - ], - [ - "▁spot", - -10.209417343139648 - ], - [ - "▁spring", - -10.20974063873291 - ], - [ - "▁Wi", - -10.210912704467773 - ], - [ - "▁Mac", - -10.211682319641113 - ], - [ - "▁Christ", - -10.212015151977539 - ], - [ - "▁saying", - -10.212835311889648 - ], - [ - "▁General", - -10.213062286376953 - ], - [ - "▁port", - -10.213099479675293 - ], - [ - "▁Mal", - -10.213156700134277 - ], - [ - "▁System", - -10.213486671447754 - ], - [ - "▁According", - -10.2152738571167 - ], - [ - "▁chiar", - -10.21568489074707 - ], - [ - "log", - -10.21576976776123 - ], - [ - "▁mix", - -10.215974807739258 - ], - [ - "▁Lake", - -10.216042518615723 - ], - [ - "▁intr", - -10.216590881347656 - ], - [ - "▁deliver", - -10.216793060302734 - ], - [ - "mon", - -10.216931343078613 - ], - [ - "▁Ro", - -10.217060089111328 - ], - [ - "▁Management", - -10.217504501342773 - ], - [ - "bri", - -10.218718528747559 - ], - [ - "▁pieces", - -10.218774795532227 - ], - [ - "▁announced", - -10.218926429748535 - ], - [ - "▁Yes", - -10.219268798828125 - ], - [ - "▁dark", - -10.220884323120117 - ], - [ - "val", - -10.221765518188477 - ], - [ - "▁rights", - -10.22309684753418 - ], - [ - "▁Diese", - -10.223100662231445 - ], - [ - "ki", - -10.223350524902344 - ], - [ - "vent", - -10.22375774383545 - ], - [ - "▁born", - -10.22380542755127 - ], - [ - "▁muss", - -10.224031448364258 - ], - [ - "compared", - -10.224660873413086 - ], - [ - "▁demand", - -10.224669456481934 - ], - [ - "▁handle", - -10.225493431091309 - ], - [ - "▁mode", - -10.226058006286621 - ], - [ - "lic", - -10.226137161254883 - ], - [ - "▁ahead", - -10.226436614990234 - ], - [ - "▁sharing", - -10.227599143981934 - ], - [ - "▁micro", - -10.227779388427734 - ], - [ - "▁Par", - -10.228626251220703 - ], - [ - "▁Every", - -10.22950553894043 - ], - [ - "▁bag", - -10.229736328125 - ], - [ - "▁daca", - -10.22974967956543 - ], - [ - "▁Apple", - -10.23022174835205 - ], - [ - "▁Mark", - -10.230239868164062 - ], - [ - "▁larger", - -10.231284141540527 - ], - [ - "eze", - -10.231978416442871 - ], - [ - "▁progress", - -10.232234001159668 - ], - [ - "▁stress", - -10.232929229736328 - ], - [ - "▁cards", - -10.233663558959961 - ], - [ - "▁driving", - -10.233738899230957 - ], - [ - "▁dry", - -10.233970642089844 - ], - [ - "▁relevant", - -10.234556198120117 - ], - [ - "▁Jo", - -10.234825134277344 - ], - [ - "▁tree", - -10.235036849975586 - ], - [ - "▁reported", - -10.235770225524902 - ], - [ - "ities", - -10.23577880859375 - ], - [ - "▁tea", - -10.235806465148926 - ], - [ - "▁although", - -10.236145973205566 - ], - [ - "▁Research", - -10.236261367797852 - ], - [ - "▁pool", - -10.23691463470459 - ], - [ - "▁fin", - -10.237163543701172 - ], - [ - "▁Und", - -10.238130569458008 - ], - [ - "▁decide", - -10.239217758178711 - ], - [ - "▁expert", - -10.239344596862793 - ], - [ - "rate", - -10.239428520202637 - ], - [ - "zeit", - -10.239971160888672 - ], - [ - "▁26", - -10.24040412902832 - ], - [ - "▁Ka", - -10.24056339263916 - ], - [ - "▁fix", - -10.240666389465332 - ], - [ - "igen", - -10.240713119506836 - ], - [ - "▁direction", - -10.241188049316406 - ], - [ - "▁star", - -10.241661071777344 - ], - [ - "▁middle", - -10.241889953613281 - ], - [ - "▁Ja", - -10.241962432861328 - ], - [ - "▁Land", - -10.24207878112793 - ], - [ - "ken", - -10.242605209350586 - ], - [ - "▁button", - -10.242630004882812 - ], - [ - "▁rules", - -10.242656707763672 - ], - [ - "▁également", - -10.242706298828125 - ], - [ - "▁viel", - -10.243158340454102 - ], - [ - "▁welcome", - -10.243682861328125 - ], - [ - "că", - -10.243932723999023 - ], - [ - "▁Top", - -10.245308876037598 - ], - [ - "▁allowed", - -10.245487213134766 - ], - [ - "▁tip", - -10.245584487915039 - ], - [ - "▁cei", - -10.245768547058105 - ], - [ - "▁Nous", - -10.246004104614258 - ], - [ - "té", - -10.246850967407227 - ], - [ - "▁unei", - -10.246903419494629 - ], - [ - "▁efforts", - -10.247260093688965 - ], - [ - "▁note", - -10.247719764709473 - ], - [ - "▁title", - -10.247977256774902 - ], - [ - "ric", - -10.248047828674316 - ], - [ - "berg", - -10.248252868652344 - ], - [ - "▁ainsi", - -10.248576164245605 - ], - [ - "▁led", - -10.248713493347168 - ], - [ - "▁alone", - -10.248786926269531 - ], - [ - "ward", - -10.249215126037598 - ], - [ - "▁vie", - -10.249323844909668 - ], - [ - "▁brain", - -10.249427795410156 - ], - [ - "light", - -10.250100135803223 - ], - [ - "▁Court", - -10.250598907470703 - ], - [ - "set", - -10.250869750976562 - ], - [ - "▁steps", - -10.251251220703125 - ], - [ - "pri", - -10.251391410827637 - ], - [ - "Q", - -10.251654624938965 - ], - [ - "sti", - -10.251938819885254 - ], - [ - "▁voice", - -10.252121925354004 - ], - [ - "▁models", - -10.252705574035645 - ], - [ - "▁parties", - -10.25442886352539 - ], - [ - "▁radio", - -10.255270957946777 - ], - [ - "▁mission", - -10.25545883178711 - ], - [ - "▁methods", - -10.255658149719238 - ], - [ - "▁Te", - -10.256019592285156 - ], - [ - "air", - -10.256489753723145 - ], - [ - "▁essay", - -10.256719589233398 - ], - [ - "my", - -10.256826400756836 - ], - [ - "▁competition", - -10.257049560546875 - ], - [ - "ses", - -10.257447242736816 - ], - [ - "▁serious", - -10.258724212646484 - ], - [ - "▁Ti", - -10.258733749389648 - ], - [ - "▁Hand", - -10.259561538696289 - ], - [ - "not", - -10.25958251953125 - ], - [ - "▁winter", - -10.261277198791504 - ], - [ - "24", - -10.261724472045898 - ], - [ - "▁vision", - -10.26174545288086 - ], - [ - "▁technical", - -10.262110710144043 - ], - [ - "▁cross", - -10.262799263000488 - ], - [ - "▁update", - -10.262947082519531 - ], - [ - "▁Team", - -10.263564109802246 - ], - [ - "▁evening", - -10.264286041259766 - ], - [ - "▁experts", - -10.26435661315918 - ], - [ - "part", - -10.264640808105469 - ], - [ - "▁wo", - -10.265190124511719 - ], - [ - "▁App", - -10.265729904174805 - ], - [ - "▁peu", - -10.266267776489258 - ], - [ - "▁mich", - -10.26630687713623 - ], - [ - "▁reports", - -10.267001152038574 - ], - [ - "▁km", - -10.267594337463379 - ], - [ - "▁print", - -10.2678804397583 - ], - [ - "▁Hotel", - -10.268101692199707 - ], - [ - "▁earlier", - -10.268235206604004 - ], - [ - "▁uses", - -10.26826286315918 - ], - [ - "▁menu", - -10.268416404724121 - ], - [ - "▁miles", - -10.26845645904541 - ], - [ - "▁classes", - -10.268463134765625 - ], - [ - "▁mo", - -10.268525123596191 - ], - [ - "▁loan", - -10.2691011428833 - ], - [ - "▁host", - -10.269192695617676 - ], - [ - "▁author", - -10.269274711608887 - ], - [ - "-1", - -10.269434928894043 - ], - [ - "▁bun", - -10.269940376281738 - ], - [ - "19", - -10.270011901855469 - ], - [ - "uch", - -10.270670890808105 - ], - [ - "ble", - -10.270813941955566 - ], - [ - "▁holiday", - -10.270859718322754 - ], - [ - "los", - -10.271894454956055 - ], - [ - "▁looked", - -10.272663116455078 - ], - [ - "▁Test", - -10.272759437561035 - ], - [ - "▁moved", - -10.273000717163086 - ], - [ - "▁numbers", - -10.273306846618652 - ], - [ - "▁covered", - -10.273405075073242 - ], - [ - "ker", - -10.273696899414062 - ], - [ - "TM", - -10.273768424987793 - ], - [ - "▁album", - -10.274727821350098 - ], - [ - "▁27", - -10.27476692199707 - ], - [ - "▁când", - -10.27523422241211 - ], - [ - "▁shopping", - -10.275248527526855 - ], - [ - "▁Ihr", - -10.27531623840332 - ], - [ - "▁requires", - -10.275786399841309 - ], - [ - "▁USA", - -10.275909423828125 - ], - [ - "000", - -10.275951385498047 - ], - [ - "▁official", - -10.276010513305664 - ], - [ - "▁states", - -10.276346206665039 - ], - [ - "▁tips", - -10.276570320129395 - ], - [ - "ible", - -10.277321815490723 - ], - [ - "▁Lu", - -10.27756404876709 - ], - [ - "ces", - -10.278343200683594 - ], - [ - "▁figure", - -10.27839469909668 - ], - [ - "▁Take", - -10.278576850891113 - ], - [ - "▁după", - -10.278687477111816 - ], - [ - "▁teams", - -10.278980255126953 - ], - [ - "▁song", - -10.279138565063477 - ], - [ - "▁master", - -10.279386520385742 - ], - [ - "ED", - -10.279841423034668 - ], - [ - "▁cleaning", - -10.280523300170898 - ], - [ - "▁drop", - -10.280651092529297 - ], - [ - "▁primary", - -10.2808837890625 - ], - [ - "▁Life", - -10.28108024597168 - ], - [ - "▁carry", - -10.281129837036133 - ], - [ - "▁initial", - -10.281270980834961 - ], - [ - "▁encore", - -10.281617164611816 - ], - [ - "▁Add", - -10.281670570373535 - ], - [ - "▁woman", - -10.282076835632324 - ], - [ - "▁Water", - -10.282219886779785 - ], - [ - "▁advantage", - -10.28277587890625 - ], - [ - "see", - -10.283234596252441 - ], - [ - "ré", - -10.283341407775879 - ], - [ - "▁motor", - -10.283479690551758 - ], - [ - "mel", - -10.2838716506958 - ], - [ - "▁finding", - -10.284419059753418 - ], - [ - "▁plastic", - -10.286365509033203 - ], - [ - "▁IT", - -10.286602973937988 - ], - [ - "▁Church", - -10.286916732788086 - ], - [ - "▁shape", - -10.287345886230469 - ], - [ - "▁gets", - -10.287763595581055 - ], - [ - "▁followed", - -10.288186073303223 - ], - [ - "▁100%", - -10.288315773010254 - ], - [ - "▁Program", - -10.28912353515625 - ], - [ - "▁Another", - -10.28934383392334 - ], - [ - "▁zwei", - -10.289522171020508 - ], - [ - "▁father", - -10.289839744567871 - ], - [ - "▁rich", - -10.290282249450684 - ], - [ - "où", - -10.290810585021973 - ], - [ - "▁lines", - -10.290934562683105 - ], - [ - "▁distance", - -10.291757583618164 - ], - [ - "▁cell", - -10.291876792907715 - ], - [ - "▁parte", - -10.292072296142578 - ], - [ - "bit", - -10.292445182800293 - ], - [ - "▁perhaps", - -10.292749404907227 - ], - [ - "rii", - -10.293590545654297 - ], - [ - "▁session", - -10.294137954711914 - ], - [ - "▁Pentru", - -10.294528007507324 - ], - [ - "ING", - -10.295049667358398 - ], - [ - "ants", - -10.295478820800781 - ], - [ - "▁remain", - -10.295543670654297 - ], - [ - "13", - -10.295588493347168 - ], - [ - "▁finished", - -10.295763969421387 - ], - [ - "bel", - -10.298725128173828 - ], - [ - "▁organizations", - -10.299455642700195 - ], - [ - "▁Any", - -10.299896240234375 - ], - [ - "▁taste", - -10.300277709960938 - ], - [ - "Whether", - -10.300600051879883 - ], - [ - "ram", - -10.300874710083008 - ], - [ - "like", - -10.301307678222656 - ], - [ - "▁artist", - -10.301319122314453 - ], - [ - "aire", - -10.303369522094727 - ], - [ - "▁French", - -10.303386688232422 - ], - [ - "▁donc", - -10.303634643554688 - ], - [ - "ow", - -10.30386734008789 - ], - [ - "▁200", - -10.303993225097656 - ], - [ - "▁paint", - -10.304465293884277 - ], - [ - "▁Open", - -10.304535865783691 - ], - [ - "▁appear", - -10.304722785949707 - ], - [ - "▁Washington", - -10.304765701293945 - ], - [ - "▁target", - -10.30491828918457 - ], - [ - "pir", - -10.305578231811523 - ], - [ - "▁generally", - -10.305987358093262 - ], - [ - "▁British", - -10.306790351867676 - ], - [ - "▁seven", - -10.306937217712402 - ], - [ - "▁bio", - -10.307162284851074 - ], - [ - "▁sector", - -10.307358741760254 - ], - [ - "90", - -10.30777359008789 - ], - [ - "▁fapt", - -10.307881355285645 - ], - [ - "▁prefer", - -10.308316230773926 - ], - [ - "▁partner", - -10.308427810668945 - ], - [ - "ăm", - -10.308547973632812 - ], - [ - "▁diverse", - -10.308610916137695 - ], - [ - "▁onto", - -10.309283256530762 - ], - [ - "▁refer", - -10.309828758239746 - ], - [ - "▁Law", - -10.310302734375 - ], - [ - "▁Ri", - -10.310596466064453 - ], - [ - "▁critical", - -10.310735702514648 - ], - [ - "▁copy", - -10.310897827148438 - ], - [ - "ck", - -10.311517715454102 - ], - [ - "ix", - -10.311732292175293 - ], - [ - "tag", - -10.311793327331543 - ], - [ - "▁Road", - -10.311936378479004 - ], - [ - "▁concern", - -10.312053680419922 - ], - [ - "▁maximum", - -10.312095642089844 - ], - [ - "▁train", - -10.312148094177246 - ], - [ - "▁într", - -10.312189102172852 - ], - [ - "ura", - -10.313023567199707 - ], - [ - "▁Qu", - -10.313481330871582 - ], - [ - "▁links", - -10.313538551330566 - ], - [ - "▁audience", - -10.313969612121582 - ], - [ - "▁foot", - -10.314554214477539 - ], - [ - "▁Blue", - -10.314605712890625 - ], - [ - "ification", - -10.315386772155762 - ], - [ - "▁developing", - -10.315847396850586 - ], - [ - "▁interior", - -10.315876007080078 - ], - [ - "=", - -10.316556930541992 - ], - [ - "▁aceasta", - -10.31698989868164 - ], - [ - "▁dedicated", - -10.317373275756836 - ], - [ - "▁movement", - -10.317383766174316 - ], - [ - "sta", - -10.318868637084961 - ], - [ - "▁challenges", - -10.319018363952637 - ], - [ - "inte", - -10.319074630737305 - ], - [ - "▁Euro", - -10.319075584411621 - ], - [ - "▁classic", - -10.320341110229492 - ], - [ - "▁Um", - -10.320767402648926 - ], - [ - "▁alternative", - -10.321407318115234 - ], - [ - "mann", - -10.321614265441895 - ], - [ - "▁Une", - -10.322278022766113 - ], - [ - "qu", - -10.322415351867676 - ], - [ - "▁heavy", - -10.322434425354004 - ], - [ - "▁install", - -10.322484970092773 - ], - [ - "▁fiind", - -10.322504043579102 - ], - [ - "▁leaders", - -10.323003768920898 - ], - [ - "▁views", - -10.323019981384277 - ], - [ - "▁www", - -10.323084831237793 - ], - [ - "▁standards", - -10.323270797729492 - ], - [ - "ong", - -10.323580741882324 - ], - [ - "40", - -10.323833465576172 - ], - [ - "▁cm", - -10.323848724365234 - ], - [ - "▁park", - -10.324324607849121 - ], - [ - "▁himself", - -10.324419021606445 - ], - [ - "▁People", - -10.324649810791016 - ], - [ - "▁separate", - -10.324843406677246 - ], - [ - "▁secure", - -10.325018882751465 - ], - [ - "sie", - -10.325084686279297 - ], - [ - "▁maintenance", - -10.325199127197266 - ], - [ - "▁encourage", - -10.32766056060791 - ], - [ - "ein", - -10.328139305114746 - ], - [ - "▁reviews", - -10.328202247619629 - ], - [ - "▁Michael", - -10.328210830688477 - ], - [ - "▁background", - -10.328283309936523 - ], - [ - "▁therefore", - -10.328433990478516 - ], - [ - "▁server", - -10.328487396240234 - ], - [ - "▁dream", - -10.328742027282715 - ], - [ - "ping", - -10.329025268554688 - ], - [ - "▁block", - -10.329855918884277 - ], - [ - "▁2009", - -10.330734252929688 - ], - [ - "▁facilities", - -10.330931663513184 - ], - [ - "▁II", - -10.331367492675781 - ], - [ - "▁attend", - -10.33156967163086 - ], - [ - "▁cap", - -10.33224105834961 - ], - [ - "35", - -10.332416534423828 - ], - [ - "▁steel", - -10.332796096801758 - ], - [ - "▁shared", - -10.333391189575195 - ], - [ - "▁doctor", - -10.333939552307129 - ], - [ - "▁River", - -10.33411693572998 - ], - [ - "▁Bay", - -10.334456443786621 - ], - [ - "▁length", - -10.335005760192871 - ], - [ - "▁jobs", - -10.335466384887695 - ], - [ - "▁Plus", - -10.335992813110352 - ], - [ - "▁station", - -10.336140632629395 - ], - [ - "▁elements", - -10.336268424987793 - ], - [ - "▁rock", - -10.336668014526367 - ], - [ - "▁professionals", - -10.336670875549316 - ], - [ - "cle", - -10.336777687072754 - ], - [ - "▁dont", - -10.336873054504395 - ], - [ - "urilor", - -10.337142944335938 - ], - [ - "▁gain", - -10.337271690368652 - ], - [ - "▁programme", - -10.337540626525879 - ], - [ - "▁Cor", - -10.338377952575684 - ], - [ - "▁leader", - -10.338542938232422 - ], - [ - "ării", - -10.33876895904541 - ], - [ - "▁>", - -10.339137077331543 - ], - [ - "▁task", - -10.339471817016602 - ], - [ - "▁seeing", - -10.339943885803223 - ], - [ - "▁statement", - -10.34045696258545 - ], - [ - "vin", - -10.341094017028809 - ], - [ - "▁fish", - -10.341700553894043 - ], - [ - "▁advanced", - -10.342403411865234 - ], - [ - "▁discuss", - -10.342494010925293 - ], - [ - "die", - -10.342904090881348 - ], - [ - "isch", - -10.342944145202637 - ], - [ - "▁plenty", - -10.342947959899902 - ], - [ - "▁Hall", - -10.343120574951172 - ], - [ - "▁Other", - -10.343339920043945 - ], - [ - "▁homes", - -10.344944953918457 - ], - [ - "▁Ni", - -10.345016479492188 - ], - [ - "▁testing", - -10.345102310180664 - ], - [ - "▁Last", - -10.345392227172852 - ], - [ - "▁Note", - -10.345595359802246 - ], - [ - "▁talking", - -10.345934867858887 - ], - [ - "▁exchange", - -10.347042083740234 - ], - [ - "▁exercise", - -10.347189903259277 - ], - [ - "▁cea", - -10.347546577453613 - ], - [ - "▁wife", - -10.34820556640625 - ], - [ - "▁Für", - -10.348480224609375 - ], - [ - "▁Texas", - -10.34981918334961 - ], - [ - "▁fr", - -10.35065746307373 - ], - [ - "▁speak", - -10.350894927978516 - ], - [ - "17", - -10.351007461547852 - ], - [ - "70", - -10.351462364196777 - ], - [ - "▁promote", - -10.351851463317871 - ], - [ - "tul", - -10.351990699768066 - ], - [ - "apos", - -10.35208511352539 - ], - [ - "▁Jahr", - -10.35214900970459 - ], - [ - "▁Trump", - -10.352204322814941 - ], - [ - "▁ohne", - -10.352357864379883 - ], - [ - "▁learned", - -10.353700637817383 - ], - [ - "▁Sp", - -10.353803634643555 - ], - [ - "▁owner", - -10.354275703430176 - ], - [ - "mor", - -10.354422569274902 - ], - [ - "▁fois", - -10.354452133178711 - ], - [ - "▁meaning", - -10.35518741607666 - ], - [ - "▁dacă", - -10.355249404907227 - ], - [ - "nic", - -10.355484008789062 - ], - [ - "а", - -10.355525970458984 - ], - [ - "14", - -10.355767250061035 - ], - [ - "▁driver", - -10.356258392333984 - ], - [ - "▁Amazon", - -10.3567533493042 - ], - [ - "▁flow", - -10.358469009399414 - ], - [ - "▁shot", - -10.358726501464844 - ], - [ - "▁sous", - -10.35914421081543 - ], - [ - "▁Gold", - -10.359339714050293 - ], - [ - "▁straight", - -10.359562873840332 - ], - [ - "▁conference", - -10.359610557556152 - ], - [ - "▁peste", - -10.359662055969238 - ], - [ - "whose", - -10.36030101776123 - ], - [ - "▁installation", - -10.36050796508789 - ], - [ - "▁produced", - -10.360607147216797 - ], - [ - "▁independent", - -10.36192512512207 - ], - [ - "▁Institute", - -10.362021446228027 - ], - [ - "▁James", - -10.362373352050781 - ], - [ - "▁mental", - -10.362601280212402 - ], - [ - "ara", - -10.362798690795898 - ], - [ - "ium", - -10.363021850585938 - ], - [ - "▁husband", - -10.36306095123291 - ], - [ - "▁guests", - -10.363907814025879 - ], - [ - "27", - -10.364319801330566 - ], - [ - "▁Che", - -10.364651679992676 - ], - [ - "▁Indian", - -10.364694595336914 - ], - [ - "zer", - -10.36478042602539 - ], - [ - "▁minimum", - -10.364962577819824 - ], - [ - "500", - -10.365096092224121 - ], - [ - "▁sit", - -10.36561393737793 - ], - [ - "put", - -10.36656379699707 - ], - [ - "▁avea", - -10.36665153503418 - ], - [ - "▁ride", - -10.367088317871094 - ], - [ - "gan", - -10.367152214050293 - ], - [ - "▁Ke", - -10.36747932434082 - ], - [ - "book", - -10.367515563964844 - ], - [ - "ages", - -10.368019104003906 - ], - [ - "▁presented", - -10.368157386779785 - ], - [ - "▁Com", - -10.368927955627441 - ], - [ - "▁Call", - -10.369053840637207 - ], - [ - "▁fee", - -10.369847297668457 - ], - [ - "ări", - -10.369905471801758 - ], - [ - "▁putea", - -10.37072467803955 - ], - [ - "▁Public", - -10.371030807495117 - ], - [ - "▁pa", - -10.371152877807617 - ], - [ - "28", - -10.371233940124512 - ], - [ - "▁Director", - -10.37126350402832 - ], - [ - "▁contains", - -10.3717622756958 - ], - [ - "▁factors", - -10.372554779052734 - ], - [ - "▁famous", - -10.372614860534668 - ], - [ - "▁bathroom", - -10.373040199279785 - ], - [ - "▁core", - -10.37353229522705 - ], - [ - "▁viele", - -10.373610496520996 - ], - [ - "▁acum", - -10.374361991882324 - ], - [ - "▁animal", - -10.374407768249512 - ], - [ - "▁Ihnen", - -10.374425888061523 - ], - [ - "▁Find", - -10.374545097351074 - ], - [ - "▁Fall", - -10.374861717224121 - ], - [ - "ford", - -10.376051902770996 - ], - [ - "▁coverage", - -10.3765287399292 - ], - [ - "▁smart", - -10.376830101013184 - ], - [ - "ries", - -10.376893997192383 - ], - [ - "▁memory", - -10.3772554397583 - ], - [ - "▁dance", - -10.377443313598633 - ], - [ - "11", - -10.37746810913086 - ], - [ - "▁communities", - -10.377655982971191 - ], - [ - "eurs", - -10.378050804138184 - ], - [ - "▁Florida", - -10.378463745117188 - ], - [ - "▁sport", - -10.379366874694824 - ], - [ - "▁bus", - -10.37992000579834 - ], - [ - "▁colors", - -10.379969596862793 - ], - [ - "▁affect", - -10.380044937133789 - ], - [ - "▁score", - -10.380183219909668 - ], - [ - "▁properties", - -10.38050365447998 - ], - [ - "18", - -10.380593299865723 - ], - [ - "▁astfel", - -10.381312370300293 - ], - [ - "▁beach", - -10.382407188415527 - ], - [ - "▁friendly", - -10.382795333862305 - ], - [ - "izing", - -10.38288688659668 - ], - [ - "▁buying", - -10.383146286010742 - ], - [ - "▁forget", - -10.383195877075195 - ], - [ - "este", - -10.383198738098145 - ], - [ - "▁capacity", - -10.38360595703125 - ], - [ - "▁lose", - -10.383692741394043 - ], - [ - "▁listed", - -10.38407039642334 - ], - [ - "ica", - -10.384084701538086 - ], - [ - "han", - -10.384085655212402 - ], - [ - "▁selbst", - -10.384390830993652 - ], - [ - "▁values", - -10.384391784667969 - ], - [ - "▁Power", - -10.384559631347656 - ], - [ - "▁comments", - -10.384831428527832 - ], - [ - "eux", - -10.385346412658691 - ], - [ - "ați", - -10.385419845581055 - ], - [ - "▁context", - -10.385710716247559 - ], - [ - "liche", - -10.385944366455078 - ], - [ - "▁keeping", - -10.38620662689209 - ], - [ - "▁2008", - -10.38647174835205 - ], - [ - "▁su", - -10.386670112609863 - ], - [ - "▁biggest", - -10.386838912963867 - ], - [ - "▁fiecare", - -10.387356758117676 - ], - [ - "ight", - -10.38845157623291 - ], - [ - "▁toute", - -10.389808654785156 - ], - [ - "▁dinner", - -10.389827728271484 - ], - [ - "bau", - -10.390706062316895 - ], - [ - "▁Mai", - -10.390762329101562 - ], - [ - "▁status", - -10.390776634216309 - ], - [ - "rez", - -10.391340255737305 - ], - [ - "▁selected", - -10.391549110412598 - ], - [ - "▁cells", - -10.392601013183594 - ], - [ - "▁eight", - -10.393319129943848 - ], - [ - "▁package", - -10.393320083618164 - ], - [ - "▁scale", - -10.39333724975586 - ], - [ - "din", - -10.39336109161377 - ], - [ - "▁Who", - -10.393381118774414 - ], - [ - "▁century", - -10.393399238586426 - ], - [ - "▁bi", - -10.393516540527344 - ], - [ - "▁Africa", - -10.39384937286377 - ], - [ - "▁http", - -10.394133567810059 - ], - [ - "▁named", - -10.394230842590332 - ], - [ - "▁adding", - -10.394901275634766 - ], - [ - "▁mention", - -10.395039558410645 - ], - [ - "▁casino", - -10.395421981811523 - ], - [ - "▁couldn", - -10.395624160766602 - ], - [ - "▁outdoor", - -10.395912170410156 - ], - [ - "▁sugar", - -10.3960542678833 - ], - [ - "▁prepared", - -10.396124839782715 - ], - [ - "21", - -10.396528244018555 - ], - [ - "▁Ba", - -10.396632194519043 - ], - [ - "vers", - -10.396697998046875 - ], - [ - "ration", - -10.396773338317871 - ], - [ - "▁ja", - -10.397035598754883 - ], - [ - "▁aspect", - -10.397224426269531 - ], - [ - "▁31", - -10.397462844848633 - ], - [ - "▁treat", - -10.397475242614746 - ], - [ - "tru", - -10.397841453552246 - ], - [ - "▁flat", - -10.397890090942383 - ], - [ - "32", - -10.397989273071289 - ], - [ - "▁reality", - -10.398238182067871 - ], - [ - "▁waste", - -10.39876937866211 - ], - [ - "▁King", - -10.399649620056152 - ], - [ - "▁drug", - -10.399870872497559 - ], - [ - "▁operations", - -10.400120735168457 - ], - [ - "▁aim", - -10.40042495727539 - ], - [ - "▁fans", - -10.400444984436035 - ], - [ - "▁vers", - -10.400891304016113 - ], - [ - "▁plants", - -10.400971412658691 - ], - [ - "▁Dis", - -10.401477813720703 - ], - [ - "▁Daten", - -10.401510238647461 - ], - [ - "être", - -10.40267276763916 - ], - [ - "▁placed", - -10.40326976776123 - ], - [ - "▁bon", - -10.403977394104004 - ], - [ - "beim", - -10.4041109085083 - ], - [ - "▁slow", - -10.40501880645752 - ], - [ - "cri", - -10.405512809753418 - ], - [ - "▁Care", - -10.405691146850586 - ], - [ - "mes", - -10.406211853027344 - ], - [ - "26", - -10.406257629394531 - ], - [ - "box", - -10.406330108642578 - ], - [ - "▁helpful", - -10.406362533569336 - ], - [ - "▁documents", - -10.406543731689453 - ], - [ - "▁visitors", - -10.406773567199707 - ], - [ - "ture", - -10.406862258911133 - ], - [ - "▁Menschen", - -10.406891822814941 - ], - [ - "▁Chi", - -10.406975746154785 - ], - [ - "▁recipe", - -10.40764045715332 - ], - [ - "▁kept", - -10.407693862915039 - ], - [ - "▁Grand", - -10.407915115356445 - ], - [ - "▁operating", - -10.408178329467773 - ], - [ - "point", - -10.408329010009766 - ], - [ - "▁bin", - -10.40837287902832 - ], - [ - "▁Tri", - -10.40845775604248 - ], - [ - "Be", - -10.408512115478516 - ], - [ - "▁experiences", - -10.40856647491455 - ], - [ - "▁academic", - -10.408608436584473 - ], - [ - "▁finden", - -10.40870475769043 - ], - [ - "▁sera", - -10.409092903137207 - ], - [ - "act", - -10.410541534423828 - ], - [ - "▁Pa", - -10.410907745361328 - ], - [ - "▁society", - -10.411056518554688 - ], - [ - "▁combination", - -10.411237716674805 - ], - [ - "5%", - -10.41182804107666 - ], - [ - "▁owners", - -10.41188907623291 - ], - [ - "▁poor", - -10.412039756774902 - ], - [ - "▁Robert", - -10.412378311157227 - ], - [ - "▁military", - -10.412964820861816 - ], - [ - "▁economy", - -10.413033485412598 - ], - [ - "▁aware", - -10.413055419921875 - ], - [ - "rot", - -10.413443565368652 - ], - [ - "mie", - -10.413544654846191 - ], - [ - "▁Thursday", - -10.414399147033691 - ], - [ - "▁2011", - -10.41490650177002 - ], - [ - "▁fantastic", - -10.41554069519043 - ], - [ - "▁numerous", - -10.415921211242676 - ], - [ - "▁fair", - -10.4165620803833 - ], - [ - "med", - -10.416753768920898 - ], - [ - "▁welche", - -10.416893005371094 - ], - [ - "▁fruit", - -10.41712760925293 - ], - [ - "ku", - -10.417325019836426 - ], - [ - "▁Social", - -10.417583465576172 - ], - [ - "▁funds", - -10.418157577514648 - ], - [ - "▁atunci", - -10.418214797973633 - ], - [ - "▁Part", - -10.418238639831543 - ], - [ - "▁Big", - -10.418301582336426 - ], - [ - "▁2010", - -10.419414520263672 - ], - [ - "▁detail", - -10.419889450073242 - ], - [ - "▁Peter", - -10.419942855834961 - ], - [ - "ani", - -10.420196533203125 - ], - [ - "▁Wie", - -10.420795440673828 - ], - [ - "▁Tu", - -10.421649932861328 - ], - [ - "ear", - -10.421706199645996 - ], - [ - "▁Wenn", - -10.421941757202148 - ], - [ - "▁manager", - -10.42199993133545 - ], - [ - "▁Dan", - -10.422409057617188 - ], - [ - "▁Pi", - -10.42257308959961 - ], - [ - "▁wants", - -10.422652244567871 - ], - [ - "▁Data", - -10.42322826385498 - ], - [ - "pos", - -10.42387580871582 - ], - [ - "▁older", - -10.423946380615234 - ], - [ - "▁Download", - -10.424071311950684 - ], - [ - "▁Was", - -10.424107551574707 - ], - [ - "▁corner", - -10.424195289611816 - ], - [ - "▁president", - -10.424199104309082 - ], - [ - "mas", - -10.424248695373535 - ], - [ - "▁smaller", - -10.424361228942871 - ], - [ - "▁bright", - -10.424459457397461 - ], - [ - "▁proper", - -10.424582481384277 - ], - [ - "▁Kinder", - -10.424637794494629 - ], - [ - "▁Two", - -10.424668312072754 - ], - [ - "▁award", - -10.42471694946289 - ], - [ - "▁premier", - -10.425211906433105 - ], - [ - "▁seek", - -10.425646781921387 - ], - [ - "▁thank", - -10.425662994384766 - ], - [ - "▁proud", - -10.426509857177734 - ], - [ - "▁workers", - -10.426774024963379 - ], - [ - "▁2000", - -10.426970481872559 - ], - [ - "▁gone", - -10.427482604980469 - ], - [ - "▁medium", - -10.427693367004395 - ], - [ - "▁grade", - -10.42777156829834 - ], - [ - "▁Ru", - -10.427800178527832 - ], - [ - "cro", - -10.427851676940918 - ], - [ - "▁interview", - -10.428311347961426 - ], - [ - "23", - -10.428787231445312 - ], - [ - "▁mari", - -10.429442405700684 - ], - [ - "▁80", - -10.429756164550781 - ], - [ - "▁Ga", - -10.430047035217285 - ], - [ - "▁90", - -10.431839942932129 - ], - [ - "▁anderen", - -10.432605743408203 - ], - [ - "▁cultural", - -10.433018684387207 - ], - [ - "but", - -10.433144569396973 - ], - [ - "rum", - -10.433300018310547 - ], - [ - "get", - -10.43338680267334 - ], - [ - "▁pop", - -10.433582305908203 - ], - [ - "▁Information", - -10.433594703674316 - ], - [ - "▁press", - -10.434972763061523 - ], - [ - "▁Project", - -10.435359001159668 - ], - [ - "▁excited", - -10.435755729675293 - ], - [ - "▁Saint", - -10.436088562011719 - ], - [ - "▁England", - -10.436192512512207 - ], - [ - "▁beauty", - -10.43643856048584 - ], - [ - "▁agreement", - -10.436464309692383 - ], - [ - "▁Like", - -10.437565803527832 - ], - [ - "▁strength", - -10.437664985656738 - ], - [ - "▁waiting", - -10.438165664672852 - ], - [ - "и", - -10.438270568847656 - ], - [ - "Le", - -10.438329696655273 - ], - [ - "▁residents", - -10.43835735321045 - ], - [ - "▁Ben", - -10.438603401184082 - ], - [ - "▁mentioned", - -10.439260482788086 - ], - [ - "▁etwas", - -10.43930721282959 - ], - [ - "▁rooms", - -10.439347267150879 - ], - [ - "▁neue", - -10.439501762390137 - ], - [ - "▁Microsoft", - -10.439726829528809 - ], - [ - "▁passed", - -10.440205574035645 - ], - [ - "▁sea", - -10.440893173217773 - ], - [ - "▁electric", - -10.441244125366211 - ], - [ - "▁forms", - -10.441384315490723 - ], - [ - "▁Central", - -10.441597938537598 - ], - [ - "▁Lord", - -10.442625999450684 - ], - [ - "ute", - -10.442763328552246 - ], - [ - "▁pré", - -10.442790031433105 - ], - [ - "▁square", - -10.44308090209961 - ], - [ - "itatea", - -10.443451881408691 - ], - [ - "▁debt", - -10.443757057189941 - ], - [ - "▁street", - -10.443975448608398 - ], - [ - "▁pi", - -10.444917678833008 - ], - [ - "▁happened", - -10.445326805114746 - ], - [ - "▁Tuesday", - -10.445592880249023 - ], - [ - "recht", - -10.446094512939453 - ], - [ - "▁Eine", - -10.44627857208252 - ], - [ - "▁Set", - -10.446768760681152 - ], - [ - "▁federal", - -10.4468412399292 - ], - [ - "CC", - -10.446905136108398 - ], - [ - "....", - -10.446938514709473 - ], - [ - "lig", - -10.447463035583496 - ], - [ - "▁Christian", - -10.44870662689209 - ], - [ - "▁truth", - -10.449213981628418 - ], - [ - "▁map", - -10.449728012084961 - ], - [ - "▁secret", - -10.449979782104492 - ], - [ - "▁Chinese", - -10.450844764709473 - ], - [ - "hol", - -10.450895309448242 - ], - [ - "▁wrote", - -10.451505661010742 - ], - [ - "▁hospital", - -10.451783180236816 - ], - [ - "▁Island", - -10.451870918273926 - ], - [ - "▁frame", - -10.451946258544922 - ], - [ - "▁sources", - -10.452117919921875 - ], - [ - "pan", - -10.453242301940918 - ], - [ - "▁29", - -10.453530311584473 - ], - [ - "▁changing", - -10.454547882080078 - ], - [ - "▁Where", - -10.454627990722656 - ], - [ - "▁negative", - -10.45471477508545 - ], - [ - "▁processes", - -10.45491886138916 - ], - [ - "▁leadership", - -10.455029487609863 - ], - [ - "▁nos", - -10.455195426940918 - ], - [ - "▁info", - -10.455780029296875 - ], - [ - "▁Gu", - -10.45595645904541 - ], - [ - "▁CO", - -10.45605182647705 - ], - [ - "▁reference", - -10.456884384155273 - ], - [ - "▁corporate", - -10.457097053527832 - ], - [ - "▁characters", - -10.457563400268555 - ], - [ - "▁dining", - -10.4577054977417 - ], - [ - "▁becoming", - -10.459708213806152 - ], - [ - "▁4.", - -10.460311889648438 - ], - [ - "▁Science", - -10.460626602172852 - ], - [ - "▁Education", - -10.461943626403809 - ], - [ - "▁camp", - -10.46207046508789 - ], - [ - "fall", - -10.462146759033203 - ], - [ - "▁Auch", - -10.462471961975098 - ], - [ - "▁topic", - -10.462519645690918 - ], - [ - "▁influence", - -10.463460922241211 - ], - [ - "▁70", - -10.463892936706543 - ], - [ - "▁identify", - -10.464459419250488 - ], - [ - "▁(19", - -10.464646339416504 - ], - [ - "care", - -10.465216636657715 - ], - [ - "ions", - -10.466215133666992 - ], - [ - "ray", - -10.4663724899292 - ], - [ - "▁Both", - -10.466577529907227 - ], - [ - "▁collect", - -10.466997146606445 - ], - [ - "▁practices", - -10.467667579650879 - ], - [ - "▁fight", - -10.468058586120605 - ], - [ - "▁injury", - -10.46873664855957 - ], - [ - "▁nici", - -10.46905517578125 - ], - [ - "▁depuis", - -10.469563484191895 - ], - [ - "▁actions", - -10.469609260559082 - ], - [ - "▁Wednesday", - -10.47089958190918 - ], - [ - "▁bill", - -10.471086502075195 - ], - [ - "▁cheap", - -10.471318244934082 - ], - [ - "lui", - -10.471719741821289 - ], - [ - "▁awesome", - -10.471731185913086 - ], - [ - "tig", - -10.472554206848145 - ], - [ - "▁expensive", - -10.472636222839355 - ], - [ - "ceea", - -10.472834587097168 - ], - [ - "▁exact", - -10.472907066345215 - ], - [ - "22", - -10.473462104797363 - ], - [ - "▁avant", - -10.47352123260498 - ], - [ - "▁fat", - -10.47353744506836 - ], - [ - "▁spending", - -10.474353790283203 - ], - [ - "▁designs", - -10.47608470916748 - ], - [ - "▁damit", - -10.4761323928833 - ], - [ - "▁comp", - -10.47619342803955 - ], - [ - "▁whatever", - -10.476434707641602 - ], - [ - "▁Light", - -10.476442337036133 - ], - [ - "▁quarter", - -10.47680377960205 - ], - [ - "hand", - -10.477301597595215 - ], - [ - "▁connected", - -10.477584838867188 - ], - [ - "▁technologies", - -10.47772216796875 - ], - [ - "ges", - -10.477808952331543 - ], - [ - "▁shower", - -10.478998184204102 - ], - [ - "▁500", - -10.47923469543457 - ], - [ - "▁Time", - -10.479436874389648 - ], - [ - "▁zone", - -10.480525970458984 - ], - [ - "▁vote", - -10.480624198913574 - ], - [ - "▁andere", - -10.480871200561523 - ], - [ - "▁otherwise", - -10.480988502502441 - ], - [ - "tur", - -10.481294631958008 - ], - [ - "▁happens", - -10.481504440307617 - ], - [ - "hin", - -10.481597900390625 - ], - [ - "▁volume", - -10.482161521911621 - ], - [ - "▁thousands", - -10.482391357421875 - ], - [ - "war", - -10.482551574707031 - ], - [ - "▁Play", - -10.482900619506836 - ], - [ - "▁temperature", - -10.48371410369873 - ], - [ - "▁industrial", - -10.483830451965332 - ], - [ - "▁fuel", - -10.483915328979492 - ], - [ - "100", - -10.48409366607666 - ], - [ - "top", - -10.484210014343262 - ], - [ - "kin", - -10.484312057495117 - ], - [ - "▁efficient", - -10.484414100646973 - ], - [ - "teil", - -10.484525680541992 - ], - [ - "alt", - -10.484578132629395 - ], - [ - "▁monde", - -10.48483657836914 - ], - [ - "▁Ra", - -10.484899520874023 - ], - [ - "▁bedroom", - -10.485103607177734 - ], - [ - "▁showing", - -10.485316276550293 - ], - [ - "▁continued", - -10.485490798950195 - ], - [ - "▁Plan", - -10.48552131652832 - ], - [ - "▁assistance", - -10.486014366149902 - ], - [ - "▁discover", - -10.48622989654541 - ], - [ - "▁Year", - -10.486238479614258 - ], - [ - "▁applied", - -10.486433029174805 - ], - [ - "▁audio", - -10.48755931854248 - ], - [ - "▁thus", - -10.487645149230957 - ], - [ - "▁permet", - -10.48806095123291 - ], - [ - "▁fashion", - -10.488532066345215 - ], - [ - "cra", - -10.488645553588867 - ], - [ - "ious", - -10.488700866699219 - ], - [ - "▁focused", - -10.489258766174316 - ], - [ - "16", - -10.48930549621582 - ], - [ - "▁arm", - -10.489364624023438 - ], - [ - "▁Their", - -10.489789962768555 - ], - [ - "▁Foundation", - -10.49022388458252 - ], - [ - "▁majority", - -10.49022388458252 - ], - [ - "▁wind", - -10.490785598754883 - ], - [ - "▁bought", - -10.491056442260742 - ], - [ - "▁factor", - -10.491918563842773 - ], - [ - "▁opened", - -10.49213695526123 - ], - [ - "tern", - -10.492374420166016 - ], - [ - "▁cars", - -10.492597579956055 - ], - [ - "▁exciting", - -10.492691040039062 - ], - [ - "▁affordable", - -10.493510246276855 - ], - [ - "ches", - -10.493563652038574 - ], - [ - "▁panel", - -10.493720054626465 - ], - [ - "▁caused", - -10.493793487548828 - ], - [ - "▁travail", - -10.493998527526855 - ], - [ - "▁roof", - -10.494073867797852 - ], - [ - "▁enable", - -10.494202613830566 - ], - [ - "▁toward", - -10.494491577148438 - ], - [ - "▁Development", - -10.494688987731934 - ], - [ - "▁foreign", - -10.495308876037598 - ], - [ - "avi", - -10.495320320129395 - ], - [ - "long", - -10.495328903198242 - ], - [ - "De", - -10.49578857421875 - ], - [ - "▁Mon", - -10.49588394165039 - ], - [ - "▁Va", - -10.495942115783691 - ], - [ - "AP", - -10.496097564697266 - ], - [ - "▁asta", - -10.49720573425293 - ], - [ - "▁prepare", - -10.497220993041992 - ], - [ - "▁German", - -10.497261047363281 - ], - [ - "▁Centre", - -10.497325897216797 - ], - [ - "ère", - -10.497367858886719 - ], - [ - "▁fear", - -10.497537612915039 - ], - [ - "▁Este", - -10.497878074645996 - ], - [ - "▁Des", - -10.49793529510498 - ], - [ - "▁Kon", - -10.499308586120605 - ], - [ - "á", - -10.499866485595703 - ], - [ - "stand", - -10.500805854797363 - ], - [ - "▁Real", - -10.500842094421387 - ], - [ - "lichen", - -10.50098705291748 - ], - [ - "▁Beach", - -10.501455307006836 - ], - [ - "▁expertise", - -10.50185775756836 - ], - [ - "▁route", - -10.502445220947266 - ], - [ - "▁nation", - -10.502551078796387 - ], - [ - "▁snow", - -10.503022193908691 - ], - [ - "▁articles", - -10.503127098083496 - ], - [ - "▁Wood", - -10.504426956176758 - ], - [ - "▁operation", - -10.50494384765625 - ], - [ - "▁passion", - -10.505215644836426 - ], - [ - "▁cand", - -10.505690574645996 - ], - [ - "haus", - -10.505701065063477 - ], - [ - "OR", - -10.505711555480957 - ], - [ - "▁senior", - -10.506511688232422 - ], - [ - "▁becomes", - -10.506546020507812 - ], - [ - "▁sounds", - -10.506878852844238 - ], - [ - "▁enjoyed", - -10.50704574584961 - ], - [ - "▁gegen", - -10.507533073425293 - ], - [ - "▁courses", - -10.507919311523438 - ], - [ - "▁absolutely", - -10.508257865905762 - ], - [ - "tim", - -10.508264541625977 - ], - [ - "uff", - -10.508516311645508 - ], - [ - "▁moins", - -10.50860595703125 - ], - [ - "▁TO", - -10.509060859680176 - ], - [ - "▁fabric", - -10.509267807006836 - ], - [ - "poli", - -10.509326934814453 - ], - [ - "▁Bre", - -10.509761810302734 - ], - [ - "▁bo", - -10.509916305541992 - ], - [ - "▁Elle", - -10.510469436645508 - ], - [ - "bu", - -10.512336730957031 - ], - [ - "▁participants", - -10.512401580810547 - ], - [ - "stone", - -10.512794494628906 - ], - [ - "ties", - -10.51366138458252 - ], - [ - "▁listen", - -10.513700485229492 - ], - [ - "▁Spiel", - -10.513752937316895 - ], - [ - "pot", - -10.513872146606445 - ], - [ - "▁selling", - -10.514358520507812 - ], - [ - "▁geht", - -10.514680862426758 - ], - [ - "▁mini", - -10.515146255493164 - ], - [ - "▁trans", - -10.515408515930176 - ], - [ - "▁ingredients", - -10.515642166137695 - ], - [ - "auf", - -10.515671730041504 - ], - [ - "▁orice", - -10.51595401763916 - ], - [ - "▁Next", - -10.516300201416016 - ], - [ - "▁cream", - -10.516756057739258 - ], - [ - "▁edge", - -10.516973495483398 - ], - [ - "▁recommended", - -10.517022132873535 - ], - [ - "▁Form", - -10.517277717590332 - ], - [ - "▁processing", - -10.51746940612793 - ], - [ - "vert", - -10.517709732055664 - ], - [ - "▁described", - -10.518362998962402 - ], - [ - "▁installed", - -10.51884937286377 - ], - [ - "▁managed", - -10.518952369689941 - ], - [ - "▁electronic", - -10.518966674804688 - ], - [ - "▁performed", - -10.519064903259277 - ], - [ - "▁raise", - -10.519098281860352 - ], - [ - "▁imagine", - -10.519281387329102 - ], - [ - "down", - -10.51952838897705 - ], - [ - "▁fond", - -10.519978523254395 - ], - [ - "▁Inter", - -10.520434379577637 - ], - [ - "▁Mc", - -10.520550727844238 - ], - [ - "▁Dans", - -10.520679473876953 - ], - [ - "istic", - -10.520966529846191 - ], - [ - "▁miss", - -10.521052360534668 - ], - [ - "sur", - -10.521062850952148 - ], - [ - "▁Col", - -10.521879196166992 - ], - [ - "cut", - -10.522021293640137 - ], - [ - "▁dupa", - -10.522160530090332 - ], - [ - "▁Twitter", - -10.522604942321777 - ], - [ - "▁bowl", - -10.523721694946289 - ], - [ - "▁remains", - -10.5237455368042 - ], - [ - "▁Jan", - -10.524046897888184 - ], - [ - "▁smooth", - -10.524162292480469 - ], - [ - "▁fees", - -10.524415969848633 - ], - [ - "▁aid", - -10.524494171142578 - ], - [ - "▁presence", - -10.524827003479004 - ], - [ - "▁Android", - -10.52499771118164 - ], - [ - "▁decisions", - -10.52539348602295 - ], - [ - "▁names", - -10.5254487991333 - ], - [ - "▁Music", - -10.525546073913574 - ], - [ - "▁innovative", - -10.525578498840332 - ], - [ - "▁Tom", - -10.525997161865234 - ], - [ - "▁spread", - -10.526165962219238 - ], - [ - "▁lovely", - -10.526222229003906 - ], - [ - "▁daughter", - -10.526397705078125 - ], - [ - "US", - -10.527050971984863 - ], - [ - "▁facility", - -10.52710247039795 - ], - [ - "▁peace", - -10.527105331420898 - ], - [ - "▁department", - -10.527277946472168 - ], - [ - "▁weiter", - -10.527591705322266 - ], - [ - "▁Sun", - -10.527756690979004 - ], - [ - "▁fund", - -10.527772903442383 - ], - [ - "▁2018.", - -10.52792739868164 - ], - [ - "▁discussion", - -10.528186798095703 - ], - [ - "75", - -10.528799057006836 - ], - [ - "EC", - -10.529126167297363 - ], - [ - "▁lunch", - -10.529144287109375 - ], - [ - "▁videos", - -10.52927017211914 - ], - [ - "05", - -10.531253814697266 - ], - [ - "ige", - -10.531266212463379 - ], - [ - "▁parking", - -10.531564712524414 - ], - [ - "▁relationships", - -10.531732559204102 - ], - [ - "▁George", - -10.532986640930176 - ], - [ - "▁teachers", - -10.53299617767334 - ], - [ - "room", - -10.533458709716797 - ], - [ - "▁Tra", - -10.533605575561523 - ], - [ - "▁Sam", - -10.533651351928711 - ], - [ - "▁properly", - -10.535590171813965 - ], - [ - "▁Book", - -10.535629272460938 - ], - [ - "▁CA", - -10.536957740783691 - ], - [ - "▁calls", - -10.53756046295166 - ], - [ - "▁stat", - -10.538175582885742 - ], - [ - "ux", - -10.538220405578613 - ], - [ - "▁soit", - -10.538439750671387 - ], - [ - "▁Community", - -10.538684844970703 - ], - [ - "▁Jahren", - -10.538714408874512 - ], - [ - "▁increasing", - -10.539575576782227 - ], - [ - "▁civil", - -10.540184020996094 - ], - [ - "app", - -10.540573120117188 - ], - [ - "▁35", - -10.540589332580566 - ], - [ - "▁rise", - -10.540600776672363 - ], - [ - "▁dabei", - -10.540989875793457 - ], - [ - "▁studio", - -10.541803359985352 - ], - [ - "▁policies", - -10.542054176330566 - ], - [ - "▁agent", - -10.542055130004883 - ], - [ - "▁Before", - -10.542601585388184 - ], - [ - "▁Cal", - -10.543017387390137 - ], - [ - "▁2005", - -10.543404579162598 - ], - [ - "▁sample", - -10.543777465820312 - ], - [ - "▁manner", - -10.545186996459961 - ], - [ - "wing", - -10.54521369934082 - ], - [ - "stra", - -10.545552253723145 - ], - [ - "▁fel", - -10.545793533325195 - ], - [ - "▁Show", - -10.545952796936035 - ], - [ - "▁scene", - -10.54656982421875 - ], - [ - "mic", - -10.546764373779297 - ], - [ - "nom", - -10.546995162963867 - ], - [ - "▁typically", - -10.547088623046875 - ], - [ - "▁pair", - -10.547104835510254 - ], - [ - "▁detailed", - -10.547394752502441 - ], - [ - "▁Work", - -10.547422409057617 - ], - [ - "▁cities", - -10.547451972961426 - ], - [ - "▁Rock", - -10.54749584197998 - ], - [ - "▁Gar", - -10.547906875610352 - ], - [ - "▁serving", - -10.548352241516113 - ], - [ - "▁machen", - -10.548521995544434 - ], - [ - "▁trees", - -10.54888916015625 - ], - [ - "▁accident", - -10.549199104309082 - ], - [ - "▁cloud", - -10.54920482635498 - ], - [ - "▁animals", - -10.549297332763672 - ], - [ - "▁Den", - -10.549897193908691 - ], - [ - "▁Wa", - -10.54990291595459 - ], - [ - "▁suggest", - -10.550220489501953 - ], - [ - "putting", - -10.550407409667969 - ], - [ - "▁suite", - -10.550434112548828 - ], - [ - "▁clearly", - -10.550849914550781 - ], - [ - "▁net", - -10.551287651062012 - ], - [ - "▁funding", - -10.551506996154785 - ], - [ - "▁salt", - -10.551935195922852 - ], - [ - "▁Men", - -10.552119255065918 - ], - [ - "ped", - -10.552419662475586 - ], - [ - "▁Food", - -10.553142547607422 - ], - [ - "▁leaving", - -10.553544998168945 - ], - [ - "▁Government", - -10.554243087768555 - ], - [ - "ick", - -10.554381370544434 - ], - [ - "▁seat", - -10.555121421813965 - ], - [ - "▁Los", - -10.555183410644531 - ], - [ - "▁teacher", - -10.555587768554688 - ], - [ - "▁iPhone", - -10.555693626403809 - ], - [ - "▁300", - -10.556120872497559 - ], - [ - "▁commitment", - -10.556180000305176 - ], - [ - "▁aspects", - -10.556498527526855 - ], - [ - "▁previously", - -10.55711555480957 - ], - [ - "▁cent", - -10.5572509765625 - ], - [ - "▁Vo", - -10.557341575622559 - ], - [ - "▁artists", - -10.557963371276855 - ], - [ - "▁runs", - -10.558130264282227 - ], - [ - ">", - -10.558155059814453 - ], - [ - "▁Gi", - -10.558273315429688 - ], - [ - "▁mar", - -10.5585355758667 - ], - [ - "!!!", - -10.558544158935547 - ], - [ - "▁Media", - -10.558943748474121 - ], - [ - "▁feedback", - -10.559109687805176 - ], - [ - "▁resolution", - -10.559117317199707 - ], - [ - "IN", - -10.55915641784668 - ], - [ - "▁wurden", - -10.55952262878418 - ], - [ - "▁busy", - -10.559832572937012 - ], - [ - "▁adult", - -10.5600004196167 - ], - [ - "29", - -10.560487747192383 - ], - [ - "elles", - -10.561375617980957 - ], - [ - "▁closed", - -10.561762809753418 - ], - [ - "▁trouble", - -10.561767578125 - ], - [ - "▁rent", - -10.561984062194824 - ], - [ - "lot", - -10.56224536895752 - ], - [ - "▁importance", - -10.562314987182617 - ], - [ - "▁units", - -10.56257438659668 - ], - [ - "Pro", - -10.562713623046875 - ], - [ - "▁provider", - -10.563005447387695 - ], - [ - "▁visual", - -10.563288688659668 - ], - [ - "IT", - -10.563385009765625 - ], - [ - "▁diet", - -10.563733100891113 - ], - [ - "▁appearance", - -10.563932418823242 - ], - [ - "pin", - -10.564576148986816 - ], - [ - "▁Din", - -10.564760208129883 - ], - [ - "▁eating", - -10.565516471862793 - ], - [ - "Fi", - -10.565762519836426 - ], - [ - "ball", - -10.565765380859375 - ], - [ - "är", - -10.565861701965332 - ], - [ - "ney", - -10.565878868103027 - ], - [ - "▁records", - -10.566070556640625 - ], - [ - "▁Fi", - -10.566180229187012 - ], - [ - "▁faut", - -10.566329002380371 - ], - [ - "▁CD", - -10.566803932189941 - ], - [ - "ign", - -10.566930770874023 - ], - [ - "▁vă", - -10.566996574401855 - ], - [ - "▁agency", - -10.567153930664062 - ], - [ - "ierung", - -10.567323684692383 - ], - [ - "▁Back", - -10.567361831665039 - ], - [ - "▁windows", - -10.567545890808105 - ], - [ - "▁pull", - -10.567888259887695 - ], - [ - "ash", - -10.567959785461426 - ], - [ - "▁profit", - -10.568593978881836 - ], - [ - "▁brings", - -10.568605422973633 - ], - [ - "▁Committee", - -10.569122314453125 - ], - [ - "▁girl", - -10.569174766540527 - ], - [ - "▁vehicles", - -10.569372177124023 - ], - [ - "▁Hier", - -10.569567680358887 - ], - [ - "ES", - -10.569639205932617 - ], - [ - "până", - -10.569880485534668 - ], - [ - "▁Kunden", - -10.570380210876465 - ], - [ - "pen", - -10.570462226867676 - ], - [ - "▁explain", - -10.570505142211914 - ], - [ - "▁cadru", - -10.570760726928711 - ], - [ - "▁attack", - -10.571100234985352 - ], - [ - "▁markets", - -10.571115493774414 - ], - [ - "▁claims", - -10.571340560913086 - ], - [ - "▁walking", - -10.571385383605957 - ], - [ - "▁pouv", - -10.571528434753418 - ], - [ - "low", - -10.571642875671387 - ], - [ - "▁showed", - -10.572114944458008 - ], - [ - "▁principal", - -10.57211971282959 - ], - [ - "▁lucru", - -10.572144508361816 - ], - [ - "▁precum", - -10.572712898254395 - ], - [ - "TA", - -10.573094367980957 - ], - [ - "▁partners", - -10.573104858398438 - ], - [ - "▁exist", - -10.573136329650879 - ], - [ - "▁internal", - -10.57334041595459 - ], - [ - "hen", - -10.573945045471191 - ], - [ - "▁Master", - -10.573966979980469 - ], - [ - "unless", - -10.574013710021973 - ], - [ - "▁doubt", - -10.574721336364746 - ], - [ - "$", - -10.574785232543945 - ], - [ - "▁Long", - -10.574888229370117 - ], - [ - "▁leaves", - -10.574907302856445 - ], - [ - "allowing", - -10.575063705444336 - ], - [ - "pol", - -10.575272560119629 - ], - [ - "▁Up", - -10.575491905212402 - ], - [ - "▁Contact", - -10.576093673706055 - ], - [ - "▁practical", - -10.57708740234375 - ], - [ - "▁suit", - -10.57758903503418 - ], - [ - "▁Site", - -10.577656745910645 - ], - [ - "▁formation", - -10.57768726348877 - ], - [ - "▁signal", - -10.578215599060059 - ], - [ - "▁approximately", - -10.578414916992188 - ], - [ - "▁ourselves", - -10.578497886657715 - ], - [ - "▁colour", - -10.578519821166992 - ], - [ - "▁species", - -10.578530311584473 - ], - [ - "▁advance", - -10.578753471374512 - ], - [ - "▁PM", - -10.57891845703125 - ], - [ - "ans", - -10.579121589660645 - ], - [ - "▁locations", - -10.579397201538086 - ], - [ - "vous", - -10.579601287841797 - ], - [ - "▁updated", - -10.579636573791504 - ], - [ - "▁faith", - -10.579673767089844 - ], - [ - "mus", - -10.579740524291992 - ], - [ - "▁stores", - -10.579863548278809 - ], - [ - "heim", - -10.580127716064453 - ], - [ - "▁suitable", - -10.580558776855469 - ], - [ - "▁continues", - -10.580703735351562 - ], - [ - "▁fac", - -10.581133842468262 - ], - [ - "ever", - -10.581156730651855 - ], - [ - "▁Bill", - -10.581195831298828 - ], - [ - "▁chose", - -10.58121109008789 - ], - [ - "▁inform", - -10.581228256225586 - ], - [ - "▁environmental", - -10.581427574157715 - ], - [ - "▁responsibility", - -10.58188533782959 - ], - [ - "99", - -10.582542419433594 - ], - [ - "▁competitive", - -10.583723068237305 - ], - [ - "▁strategies", - -10.583903312683105 - ], - [ - "▁toujours", - -10.584270477294922 - ], - [ - "tive", - -10.58430290222168 - ], - [ - "▁automatically", - -10.585600852966309 - ], - [ - "▁dress", - -10.585609436035156 - ], - [ - "▁Minister", - -10.585624694824219 - ], - [ - "har", - -10.586076736450195 - ], - [ - "▁Start", - -10.586249351501465 - ], - [ - "▁=", - -10.586563110351562 - ], - [ - "▁pattern", - -10.58659553527832 - ], - [ - "tier", - -10.58676528930664 - ], - [ - "▁pays", - -10.587034225463867 - ], - [ - "▁profile", - -10.58725357055664 - ], - [ - "▁raised", - -10.587263107299805 - ], - [ - "ange", - -10.587288856506348 - ], - [ - "▁drink", - -10.587762832641602 - ], - [ - "▁element", - -10.588042259216309 - ], - [ - "▁landscape", - -10.58875560760498 - ], - [ - "▁Tag", - -10.589073181152344 - ], - [ - "▁cheese", - -10.589590072631836 - ], - [ - "ific", - -10.590009689331055 - ], - [ - "▁Stadt", - -10.590181350708008 - ], - [ - "39", - -10.591398239135742 - ], - [ - "▁launch", - -10.592113494873047 - ], - [ - "▁wouldn", - -10.592150688171387 - ], - [ - "AS", - -10.592202186584473 - ], - [ - "▁push", - -10.593059539794922 - ], - [ - "▁mill", - -10.593452453613281 - ], - [ - "▁mass", - -10.593647003173828 - ], - [ - "▁category", - -10.593790054321289 - ], - [ - "sondern", - -10.594050407409668 - ], - [ - "col", - -10.594111442565918 - ], - [ - "▁climate", - -10.594313621520996 - ], - [ - "lier", - -10.594437599182129 - ], - [ - "▁slightly", - -10.595514297485352 - ], - [ - "95", - -10.596519470214844 - ], - [ - "ace", - -10.596612930297852 - ], - [ - "▁domain", - -10.597633361816406 - ], - [ - "kan", - -10.598306655883789 - ], - [ - "▁feed", - -10.598485946655273 - ], - [ - "▁Live", - -10.598837852478027 - ], - [ - "▁Mais", - -10.599113464355469 - ], - [ - "▁après", - -10.599365234375 - ], - [ - "▁village", - -10.59941577911377 - ], - [ - "▁hatte", - -10.59968090057373 - ], - [ - "▁joined", - -10.599881172180176 - ], - [ - "▁Museum", - -10.600311279296875 - ], - [ - "head", - -10.600855827331543 - ], - [ - "▁draw", - -10.6009521484375 - ], - [ - "▁concerns", - -10.600966453552246 - ], - [ - "ER", - -10.601505279541016 - ], - [ - "▁technique", - -10.601648330688477 - ], - [ - "▁Bio", - -10.601861000061035 - ], - [ - "▁Sea", - -10.601881980895996 - ], - [ - "▁@", - -10.601927757263184 - ], - [ - "wer", - -10.6021146774292 - ], - [ - "▁battery", - -10.602462768554688 - ], - [ - "▁mostly", - -10.60267448425293 - ], - [ - "▁familiar", - -10.602680206298828 - ], - [ - "▁Sub", - -10.602689743041992 - ], - [ - "▁delicious", - -10.603222846984863 - ], - [ - "doch", - -10.60326099395752 - ], - [ - "60", - -10.603395462036133 - ], - [ - "▁carte", - -10.603611946105957 - ], - [ - "▁avut", - -10.604146957397461 - ], - [ - "▁premium", - -10.60460376739502 - ], - [ - "▁attempt", - -10.604704856872559 - ], - [ - "▁Über", - -10.60473346710205 - ], - [ - "▁combined", - -10.604935646057129 - ], - [ - "lement", - -10.604947090148926 - ], - [ - "▁voi", - -10.605031967163086 - ], - [ - "▁wonder", - -10.605376243591309 - ], - [ - "▁failure", - -10.606106758117676 - ], - [ - "which", - -10.606147766113281 - ], - [ - "esti", - -10.606316566467285 - ], - [ - "31", - -10.606547355651855 - ], - [ - "▁sta", - -10.606734275817871 - ], - [ - "▁transform", - -10.60673999786377 - ], - [ - "▁license", - -10.606743812561035 - ], - [ - "▁depending", - -10.606758117675781 - ], - [ - "▁specifically", - -10.606782913208008 - ], - [ - "▁OF", - -10.60693645477295 - ], - [ - "band", - -10.606959342956543 - ], - [ - "▁Sport", - -10.60731315612793 - ], - [ - "list", - -10.607434272766113 - ], - [ - "▁Tour", - -10.60753059387207 - ], - [ - "▁Israel", - -10.607564926147461 - ], - [ - "▁filled", - -10.607722282409668 - ], - [ - "▁manual", - -10.60776138305664 - ], - [ - "▁watching", - -10.608621597290039 - ], - [ - "▁rule", - -10.608877182006836 - ], - [ - "mat", - -10.60901927947998 - ], - [ - "▁notes", - -10.609585762023926 - ], - [ - "▁Oh", - -10.60960578918457 - ], - [ - "▁bereits", - -10.609634399414062 - ], - [ - "▁foundation", - -10.609916687011719 - ], - [ - "▁vital", - -10.610146522521973 - ], - [ - "▁lassen", - -10.610747337341309 - ], - [ - "▁cât", - -10.611162185668945 - ], - [ - "▁shipping", - -10.611433029174805 - ], - [ - "▁registered", - -10.611513137817383 - ], - [ - "▁jour", - -10.612669944763184 - ], - [ - "▁island", - -10.61276626586914 - ], - [ - "▁sets", - -10.613068580627441 - ], - [ - "▁football", - -10.613683700561523 - ], - [ - "▁EU", - -10.613860130310059 - ], - [ - "▁stone", - -10.614019393920898 - ], - [ - "▁Press", - -10.614699363708496 - ], - [ - "▁adapt", - -10.615066528320312 - ], - [ - "ised", - -10.615425109863281 - ], - [ - "▁thoughts", - -10.615434646606445 - ], - [ - "▁doors", - -10.615851402282715 - ], - [ - "€", - -10.615954399108887 - ], - [ - "▁components", - -10.616040229797363 - ], - [ - "rig", - -10.616332054138184 - ], - [ - "▁generation", - -10.616585731506348 - ], - [ - "▁guess", - -10.616700172424316 - ], - [ - "cker", - -10.61694049835205 - ], - [ - "▁realize", - -10.617207527160645 - ], - [ - "▁Roman", - -10.617310523986816 - ], - [ - "▁contre", - -10.617693901062012 - ], - [ - "▁Out", - -10.617938995361328 - ], - [ - "▁IN", - -10.619051933288574 - ], - [ - "cip", - -10.619085311889648 - ], - [ - "59", - -10.619330406188965 - ], - [ - "▁enhance", - -10.619768142700195 - ], - [ - "▁battle", - -10.61982250213623 - ], - [ - "▁monitor", - -10.619863510131836 - ], - [ - "▁Martin", - -10.62045955657959 - ], - [ - "▁websites", - -10.620461463928223 - ], - [ - "▁DE", - -10.620599746704102 - ], - [ - "▁Festival", - -10.620951652526855 - ], - [ - "ân", - -10.62131118774414 - ], - [ - "▁Place", - -10.621419906616211 - ], - [ - "▁rare", - -10.621554374694824 - ], - [ - "această", - -10.621726989746094 - ], - [ - "▁sollte", - -10.621731758117676 - ], - [ - "▁Read", - -10.621816635131836 - ], - [ - "ware", - -10.622169494628906 - ], - [ - "Those", - -10.622671127319336 - ], - [ - "ende", - -10.623543739318848 - ], - [ - "▁prix", - -10.623835563659668 - ], - [ - "▁roman", - -10.624101638793945 - ], - [ - "▁creation", - -10.624224662780762 - ], - [ - "▁confidence", - -10.624552726745605 - ], - [ - "▁Japan", - -10.624638557434082 - ], - [ - "▁rain", - -10.624942779541016 - ], - [ - "▁guys", - -10.62518310546875 - ], - [ - "▁south", - -10.625236511230469 - ], - [ - "▁trading", - -10.625646591186523 - ], - [ - "▁€", - -10.626100540161133 - ], - [ - "▁Film", - -10.626341819763184 - ], - [ - "▁pana", - -10.627065658569336 - ], - [ - "▁asemenea", - -10.627066612243652 - ], - [ - "36", - -10.627190589904785 - ], - [ - "▁instance", - -10.627884864807129 - ], - [ - "cou", - -10.629385948181152 - ], - [ - "▁nun", - -10.630074501037598 - ], - [ - "▁Pass", - -10.630390167236328 - ], - [ - "Cette", - -10.630579948425293 - ], - [ - "▁Network", - -10.630876541137695 - ], - [ - "▁prime", - -10.631010055541992 - ], - [ - "▁spiritual", - -10.632098197937012 - ], - [ - "▁tough", - -10.633030891418457 - ], - [ - "▁AND", - -10.633086204528809 - ], - [ - "▁Cat", - -10.633601188659668 - ], - [ - "▁boat", - -10.633611679077148 - ], - [ - "▁leads", - -10.634864807128906 - ], - [ - "▁Germany", - -10.63509750366211 - ], - [ - "▁valuable", - -10.635635375976562 - ], - [ - "57", - -10.635892868041992 - ], - [ - "lect", - -10.636148452758789 - ], - [ - "▁distribution", - -10.636445045471191 - ], - [ - "dar", - -10.636518478393555 - ], - [ - "▁Manager", - -10.637701988220215 - ], - [ - "cha", - -10.637725830078125 - ], - [ - "▁obtain", - -10.637741088867188 - ], - [ - "GB", - -10.637908935546875 - ], - [ - "▁unor", - -10.638079643249512 - ], - [ - "schaft", - -10.638603210449219 - ], - [ - "▁zwischen", - -10.638723373413086 - ], - [ - "▁winning", - -10.639172554016113 - ], - [ - "▁suis", - -10.639811515808105 - ], - [ - "58", - -10.640130996704102 - ], - [ - "▁Party", - -10.640372276306152 - ], - [ - "▁ceva", - -10.640416145324707 - ], - [ - "▁comprehensive", - -10.640684127807617 - ], - [ - "▁aceste", - -10.640726089477539 - ], - [ - "▁committed", - -10.640726089477539 - ], - [ - "▁Hu", - -10.641382217407227 - ], - [ - "ţ", - -10.64149284362793 - ], - [ - "▁north", - -10.642021179199219 - ], - [ - "werk", - -10.642542839050293 - ], - [ - "▁interface", - -10.642794609069824 - ], - [ - "▁Valley", - -10.64281177520752 - ], - [ - "▁anywhere", - -10.64281177520752 - ], - [ - "▁Only", - -10.642851829528809 - ], - [ - "TE", - -10.643295288085938 - ], - [ - "hui", - -10.6436767578125 - ], - [ - "bus", - -10.643951416015625 - ], - [ - "vis", - -10.6439790725708 - ], - [ - "▁Society", - -10.645116806030273 - ], - [ - "▁reliable", - -10.64556884765625 - ], - [ - "▁quelques", - -10.64563274383545 - ], - [ - "tech", - -10.646187782287598 - ], - [ - "ual", - -10.646377563476562 - ], - [ - "▁educational", - -10.646418571472168 - ], - [ - "serv", - -10.646490097045898 - ], - [ - "▁opinion", - -10.646628379821777 - ], - [ - "▁appears", - -10.646702766418457 - ], - [ - "▁count", - -10.646795272827148 - ], - [ - "irea", - -10.646981239318848 - ], - [ - "ban", - -10.647504806518555 - ], - [ - "▁45", - -10.647530555725098 - ], - [ - "▁contain", - -10.647661209106445 - ], - [ - "ost", - -10.647663116455078 - ], - [ - "▁anul", - -10.647706031799316 - ], - [ - "rien", - -10.648159980773926 - ], - [ - "gra", - -10.648360252380371 - ], - [ - "▁counter", - -10.648946762084961 - ], - [ - "-3", - -10.650411605834961 - ], - [ - "▁resource", - -10.650463104248047 - ], - [ - "▁Wo", - -10.6505126953125 - ], - [ - "▁posts", - -10.650618553161621 - ], - [ - "▁employee", - -10.651320457458496 - ], - [ - "rol", - -10.651863098144531 - ], - [ - "▁ended", - -10.651969909667969 - ], - [ - "met", - -10.653080940246582 - ], - [ - "▁meine", - -10.653165817260742 - ], - [ - "▁reached", - -10.653368949890137 - ], - [ - "gri", - -10.653716087341309 - ], - [ - "▁Bra", - -10.65374755859375 - ], - [ - "▁conduct", - -10.654294967651367 - ], - [ - "▁housing", - -10.654422760009766 - ], - [ - "▁tickets", - -10.654792785644531 - ], - [ - "▁database", - -10.655674934387207 - ], - [ - "IL", - -10.656150817871094 - ], - [ - "▁perspective", - -10.656359672546387 - ], - [ - "▁Har", - -10.656404495239258 - ], - [ - "▁error", - -10.656549453735352 - ], - [ - "▁meal", - -10.656569480895996 - ], - [ - "▁hearing", - -10.657238006591797 - ], - [ - "▁transition", - -10.657302856445312 - ], - [ - "▁browser", - -10.657609939575195 - ], - [ - "▁supported", - -10.657609939575195 - ], - [ - "▁starts", - -10.658814430236816 - ], - [ - "țe", - -10.658902168273926 - ], - [ - "▁adults", - -10.658905029296875 - ], - [ - "▁România", - -10.65917682647705 - ], - [ - "dra", - -10.659884452819824 - ], - [ - "▁worry", - -10.660222053527832 - ], - [ - "▁avoir", - -10.660497665405273 - ], - [ - "▁regional", - -10.660507202148438 - ], - [ - "▁min", - -10.660722732543945 - ], - [ - "▁Does", - -10.660806655883789 - ], - [ - "▁Keep", - -10.661200523376465 - ], - [ - "rom", - -10.661237716674805 - ], - [ - "sco", - -10.661320686340332 - ], - [ - "tem", - -10.661898612976074 - ], - [ - "▁Old", - -10.661954879760742 - ], - [ - "▁Under", - -10.662552833557129 - ], - [ - "▁Commission", - -10.662557601928711 - ], - [ - "▁Bau", - -10.6632661819458 - ], - [ - "▁News", - -10.663358688354492 - ], - [ - "▁mois", - -10.663444519042969 - ], - [ - "▁respond", - -10.66356372833252 - ], - [ - "▁alles", - -10.663878440856934 - ], - [ - "▁chair", - -10.664475440979004 - ], - [ - "▁ho", - -10.664854049682617 - ], - [ - "right", - -10.664908409118652 - ], - [ - "▁totally", - -10.665532112121582 - ], - [ - "gle", - -10.665534973144531 - ], - [ - "▁32", - -10.665604591369629 - ], - [ - "66", - -10.665664672851562 - ], - [ - "town", - -10.665902137756348 - ], - [ - "Ch", - -10.666261672973633 - ], - [ - "▁gr", - -10.66629695892334 - ], - [ - "▁garage", - -10.666328430175781 - ], - [ - "ții", - -10.666495323181152 - ], - [ - "▁Union", - -10.667136192321777 - ], - [ - "ică", - -10.667343139648438 - ], - [ - "▁2,", - -10.668437004089355 - ], - [ - "▁reflect", - -10.669163703918457 - ], - [ - "▁retail", - -10.669388771057129 - ], - [ - "▁unde", - -10.669605255126953 - ], - [ - "▁accessible", - -10.670262336730957 - ], - [ - "water", - -10.67059326171875 - ], - [ - "▁regard", - -10.670710563659668 - ], - [ - "▁logo", - -10.671489715576172 - ], - [ - "▁inspired", - -10.671518325805664 - ], - [ - "▁Wall", - -10.671859741210938 - ], - [ - "▁Ste", - -10.672093391418457 - ], - [ - "▁asking", - -10.672179222106934 - ], - [ - "▁Journal", - -10.673028945922852 - ], - [ - "▁Teil", - -10.674042701721191 - ], - [ - "▁collaboration", - -10.674185752868652 - ], - [ - "▁acid", - -10.674266815185547 - ], - [ - "▁Fund", - -10.674382209777832 - ], - [ - "▁spirit", - -10.6744384765625 - ], - [ - "despite", - -10.674457550048828 - ], - [ - "▁delivered", - -10.674821853637695 - ], - [ - "▁girls", - -10.675374984741211 - ], - [ - "▁Look", - -10.675896644592285 - ], - [ - "rant", - -10.675949096679688 - ], - [ - "▁District", - -10.676460266113281 - ], - [ - "▁rental", - -10.676709175109863 - ], - [ - "▁spune", - -10.676733016967773 - ], - [ - "els", - -10.677544593811035 - ], - [ - "▁permanent", - -10.677659034729004 - ], - [ - "▁iron", - -10.677709579467773 - ], - [ - "▁Thomas", - -10.677745819091797 - ], - [ - "EL", - -10.678071022033691 - ], - [ - "▁except", - -10.678074836730957 - ], - [ - "▁catch", - -10.678366661071777 - ], - [ - "▁providers", - -10.678375244140625 - ], - [ - "▁2006", - -10.678435325622559 - ], - [ - "▁chat", - -10.679931640625 - ], - [ - "▁emergency", - -10.680281639099121 - ], - [ - "gre", - -10.68030834197998 - ], - [ - "site", - -10.680888175964355 - ], - [ - "▁missing", - -10.68089485168457 - ], - [ - "abil", - -10.680914878845215 - ], - [ - "▁Hill", - -10.68099594116211 - ], - [ - "urs", - -10.681312561035156 - ], - [ - "▁plusieurs", - -10.681716918945312 - ], - [ - "▁birthday", - -10.681726455688477 - ], - [ - "DS", - -10.682019233703613 - ], - [ - "ersten", - -10.682381629943848 - ], - [ - "▁5.", - -10.68252944946289 - ], - [ - "▁library", - -10.68333911895752 - ], - [ - "▁earth", - -10.683515548706055 - ], - [ - "CI", - -10.683645248413086 - ], - [ - "▁lighting", - -10.684442520141602 - ], - [ - "▁fixed", - -10.684879302978516 - ], - [ - "tori", - -10.684891700744629 - ], - [ - "▁replace", - -10.684995651245117 - ], - [ - "▁administration", - -10.685074806213379 - ], - [ - "leurs", - -10.685229301452637 - ], - [ - "▁meat", - -10.686142921447754 - ], - [ - "▁songs", - -10.686662673950195 - ], - [ - "▁confirm", - -10.686866760253906 - ], - [ - "▁rapid", - -10.68698787689209 - ], - [ - "▁Special", - -10.686995506286621 - ], - [ - "▁holding", - -10.687115669250488 - ], - [ - "▁honor", - -10.687271118164062 - ], - [ - "▁Market", - -10.687409400939941 - ], - [ - "La", - -10.687535285949707 - ], - [ - "▁measure", - -10.687760353088379 - ], - [ - "▁guarantee", - -10.68785572052002 - ], - [ - "▁switch", - -10.68813419342041 - ], - [ - "▁extensive", - -10.688294410705566 - ], - [ - "▁Neu", - -10.688674926757812 - ], - [ - "avez", - -10.688901901245117 - ], - [ - "▁protein", - -10.688984870910645 - ], - [ - "▁infrastructure", - -10.689454078674316 - ], - [ - "▁functions", - -10.689494132995605 - ], - [ - "▁cont", - -10.689496040344238 - ], - [ - "row", - -10.689760208129883 - ], - [ - "star", - -10.689773559570312 - ], - [ - "▁Port", - -10.690192222595215 - ], - [ - "Using", - -10.690336227416992 - ], - [ - "▁faster", - -10.690557479858398 - ], - [ - "44", - -10.691168785095215 - ], - [ - "▁measures", - -10.691615104675293 - ], - [ - "▁celor", - -10.69186019897461 - ], - [ - "▁exam", - -10.69189739227295 - ], - [ - "200", - -10.69202995300293 - ], - [ - "î", - -10.692545890808105 - ], - [ - "▁conversation", - -10.692832946777344 - ], - [ - "▁brands", - -10.692959785461426 - ], - [ - "▁Code", - -10.69359016418457 - ], - [ - "▁Website", - -10.693748474121094 - ], - [ - "OS", - -10.693782806396484 - ], - [ - "▁alors", - -10.693822860717773 - ], - [ - "▁organ", - -10.694032669067383 - ], - [ - "▁removed", - -10.694823265075684 - ], - [ - "▁Head", - -10.694905281066895 - ], - [ - "▁Cha", - -10.694908142089844 - ], - [ - "▁visiting", - -10.694928169250488 - ], - [ - "▁wild", - -10.694928169250488 - ], - [ - "▁seit", - -10.694962501525879 - ], - [ - "49", - -10.695109367370605 - ], - [ - "▁organic", - -10.69539737701416 - ], - [ - "aţi", - -10.695775032043457 - ], - [ - "▁kit", - -10.695947647094727 - ], - [ - "68", - -10.695959091186523 - ], - [ - "▁flowers", - -10.696124076843262 - ], - [ - "▁appreciate", - -10.697006225585938 - ], - [ - "▁dead", - -10.697439193725586 - ], - [ - "▁Fire", - -10.697539329528809 - ], - [ - "▁cela", - -10.697591781616211 - ], - [ - "▁Ph", - -10.697633743286133 - ], - [ - "▁arrive", - -10.697921752929688 - ], - [ - "▁purposes", - -10.698213577270508 - ], - [ - "▁qualité", - -10.698226928710938 - ], - [ - "▁restaurants", - -10.698478698730469 - ], - [ - "▁advertising", - -10.698541641235352 - ], - [ - "cur", - -10.69855785369873 - ], - [ - "▁ça", - -10.698973655700684 - ], - [ - "▁introduced", - -10.699088096618652 - ], - [ - "▁returned", - -10.699111938476562 - ], - [ - "▁desire", - -10.699511528015137 - ], - [ - "▁soul", - -10.699983596801758 - ], - [ - "▁Technology", - -10.699994087219238 - ], - [ - ");", - -10.700163841247559 - ], - [ - "▁Royal", - -10.700282096862793 - ], - [ - "tant", - -10.70068645477295 - ], - [ - "▁possibly", - -10.700702667236328 - ], - [ - "▁consumers", - -10.700812339782715 - ], - [ - "▁doua", - -10.70097541809082 - ], - [ - "ified", - -10.70097827911377 - ], - [ - "▁Award", - -10.70114803314209 - ], - [ - "toutes", - -10.70130443572998 - ], - [ - "▁meant", - -10.701325416564941 - ], - [ - "ezi", - -10.701616287231445 - ], - [ - "▁plu", - -10.701766014099121 - ], - [ - "ţii", - -10.7021484375 - ], - [ - "▁talent", - -10.702789306640625 - ], - [ - "▁Security", - -10.703309059143066 - ], - [ - "arii", - -10.703352928161621 - ], - [ - "▁zi", - -10.703455924987793 - ], - [ - "▁Shop", - -10.703667640686035 - ], - [ - "▁breakfast", - -10.704107284545898 - ], - [ - "▁trial", - -10.704485893249512 - ], - [ - "ami", - -10.704936981201172 - ], - [ - "▁register", - -10.705301284790039 - ], - [ - "unserer", - -10.705646514892578 - ], - [ - "▁solar", - -10.705697059631348 - ], - [ - "▁deals", - -10.70591926574707 - ], - [ - "▁Ku", - -10.7059326171875 - ], - [ - "To", - -10.706186294555664 - ], - [ - "bat", - -10.70680046081543 - ], - [ - "MC", - -10.707010269165039 - ], - [ - "▁Global", - -10.707018852233887 - ], - [ - "у", - -10.707405090332031 - ], - [ - "▁nor", - -10.707818984985352 - ], - [ - "▁milk", - -10.707868576049805 - ], - [ - "▁choices", - -10.708206176757812 - ], - [ - "»", - -10.7086763381958 - ], - [ - "▁Sur", - -10.708695411682129 - ], - [ - "more", - -10.708739280700684 - ], - [ - "48", - -10.709024429321289 - ], - [ - "67", - -10.709375381469727 - ], - [ - "▁replacement", - -10.709942817687988 - ], - [ - "34", - -10.710440635681152 - ], - [ - "▁chocolate", - -10.710485458374023 - ], - [ - "▁Family", - -10.71059513092041 - ], - [ - "This", - -10.71122932434082 - ], - [ - "▁novel", - -10.711435317993164 - ], - [ - "▁Chicago", - -10.711563110351562 - ], - [ - "▁participate", - -10.71166706085205 - ], - [ - "▁trei", - -10.712727546691895 - ], - [ - "▁monthly", - -10.713729858398438 - ], - [ - "▁survey", - -10.713977813720703 - ], - [ - "▁End", - -10.714285850524902 - ], - [ - "▁Medical", - -10.71442699432373 - ], - [ - "autres", - -10.714678764343262 - ], - [ - "rich", - -10.714698791503906 - ], - [ - "▁bike", - -10.714703559875488 - ], - [ - "▁eventually", - -10.714717864990234 - ], - [ - "▁HD", - -10.714722633361816 - ], - [ - "bil", - -10.714744567871094 - ], - [ - "cent", - -10.714902877807617 - ], - [ - "▁afin", - -10.715676307678223 - ], - [ - "▁surgery", - -10.716160774230957 - ], - [ - "▁sin", - -10.716455459594727 - ], - [ - "▁manufacturing", - -10.716955184936523 - ], - [ - "▁consumer", - -10.717245101928711 - ], - [ - "system", - -10.717306137084961 - ], - [ - "▁object", - -10.717400550842285 - ], - [ - "▁Ju", - -10.717422485351562 - ], - [ - "ered", - -10.7178373336792 - ], - [ - "rac", - -10.718070030212402 - ], - [ - "▁clinical", - -10.718664169311523 - ], - [ - "▁dollars", - -10.719761848449707 - ], - [ - "▁chain", - -10.71994686126709 - ], - [ - "▁afternoon", - -10.720196723937988 - ], - [ - "▁ligne", - -10.720422744750977 - ], - [ - "▁accounts", - -10.721806526184082 - ], - [ - "ving", - -10.722037315368652 - ], - [ - "▁Australian", - -10.72240924835205 - ], - [ - "38", - -10.722542762756348 - ], - [ - "▁persoane", - -10.72258472442627 - ], - [ - "▁grande", - -10.722668647766113 - ], - [ - "▁Report", - -10.723472595214844 - ], - [ - "▁revenue", - -10.723649024963379 - ], - [ - "▁spre", - -10.723760604858398 - ], - [ - "▁cutting", - -10.7239990234375 - ], - [ - "▁approved", - -10.724133491516113 - ], - [ - "▁glad", - -10.724188804626465 - ], - [ - "chaque", - -10.724395751953125 - ], - [ - "win", - -10.724435806274414 - ], - [ - "▁waren", - -10.724733352661133 - ], - [ - "▁launched", - -10.725071907043457 - ], - [ - "▁layer", - -10.725645065307617 - ], - [ - "▁airport", - -10.725716590881348 - ], - [ - "▁effectively", - -10.72572135925293 - ], - [ - "▁coach", - -10.725946426391602 - ], - [ - "dé", - -10.726130485534668 - ], - [ - "LE", - -10.72627067565918 - ], - [ - "▁müssen", - -10.726386070251465 - ], - [ - "plan", - -10.726641654968262 - ], - [ - "dan", - -10.726705551147461 - ], - [ - "55", - -10.726786613464355 - ], - [ - "bringing", - -10.726895332336426 - ], - [ - "▁$2", - -10.726995468139648 - ], - [ - "nce", - -10.727181434631348 - ], - [ - "▁inspiration", - -10.728177070617676 - ], - [ - "You", - -10.728657722473145 - ], - [ - "▁soll", - -10.729095458984375 - ], - [ - "▁seemed", - -10.729595184326172 - ], - [ - "▁flight", - -10.729687690734863 - ], - [ - "▁prima", - -10.729883193969727 - ], - [ - "▁Welt", - -10.730123519897461 - ], - [ - "▁jetzt", - -10.730315208435059 - ], - [ - "ky", - -10.730428695678711 - ], - [ - "▁Western", - -10.73054027557373 - ], - [ - "▁label", - -10.730600357055664 - ], - [ - "▁möglich", - -10.73081111907959 - ], - [ - "▁input", - -10.730862617492676 - ], - [ - "▁laws", - -10.730995178222656 - ], - [ - "▁personnes", - -10.731708526611328 - ], - [ - "▁paying", - -10.731731414794922 - ], - [ - "▁Uhr", - -10.73173713684082 - ], - [ - "▁Mary", - -10.731745719909668 - ], - [ - "pur", - -10.73190689086914 - ], - [ - "▁covers", - -10.732133865356445 - ], - [ - "▁throw", - -10.732522964477539 - ], - [ - "▁Tor", - -10.733281135559082 - ], - [ - "▁bat", - -10.73355484008789 - ], - [ - "▁Gr", - -10.73373031616211 - ], - [ - "▁farm", - -10.73376178741455 - ], - [ - "▁improved", - -10.733843803405762 - ], - [ - "▁fără", - -10.734286308288574 - ], - [ - "▁theme", - -10.73437213897705 - ], - [ - "pens", - -10.734865188598633 - ], - [ - "▁Cup", - -10.734975814819336 - ], - [ - "▁settings", - -10.735114097595215 - ], - [ - "▁hire", - -10.735234260559082 - ], - [ - "▁massive", - -10.735248565673828 - ], - [ - "▁generate", - -10.735405921936035 - ], - [ - "▁earn", - -10.735837936401367 - ], - [ - "▁tab", - -10.736431121826172 - ], - [ - "For", - -10.736616134643555 - ], - [ - "gang", - -10.736891746520996 - ], - [ - "▁hin", - -10.73709487915039 - ], - [ - "▁roll", - -10.737113952636719 - ], - [ - "▁engagement", - -10.737157821655273 - ], - [ - "▁signed", - -10.737177848815918 - ], - [ - "▁League", - -10.737323760986328 - ], - [ - "▁registration", - -10.737931251525879 - ], - [ - "▁première", - -10.738763809204102 - ], - [ - "isse", - -10.73896598815918 - ], - [ - "▁university", - -10.739027976989746 - ], - [ - "ell", - -10.739157676696777 - ], - [ - "▁nou", - -10.739169120788574 - ], - [ - "rog", - -10.739191055297852 - ], - [ - "▁sitting", - -10.739206314086914 - ], - [ - "▁cazul", - -10.739571571350098 - ], - [ - "▁surrounding", - -10.73983383178711 - ], - [ - "▁Asia", - -10.740357398986816 - ], - [ - "▁bath", - -10.740825653076172 - ], - [ - "hal", - -10.740923881530762 - ], - [ - "▁plate", - -10.741026878356934 - ], - [ - "▁tests", - -10.741151809692383 - ], - [ - "▁presentation", - -10.741156578063965 - ], - [ - "▁chicken", - -10.741501808166504 - ], - [ - "▁Val", - -10.741586685180664 - ], - [ - "ably", - -10.74166488647461 - ], - [ - "▁magazine", - -10.741697311401367 - ], - [ - "▁Maybe", - -10.74187183380127 - ], - [ - "▁sauce", - -10.742673873901367 - ], - [ - "TC", - -10.742887496948242 - ], - [ - "▁exclusive", - -10.74296760559082 - ], - [ - "86", - -10.74306869506836 - ], - [ - "▁teeth", - -10.743474960327148 - ], - [ - "▁regularly", - -10.743524551391602 - ], - [ - "sed", - -10.743824005126953 - ], - [ - "gro", - -10.744174003601074 - ], - [ - "He", - -10.744211196899414 - ], - [ - "▁2017.", - -10.744302749633789 - ], - [ - "▁template", - -10.74489688873291 - ], - [ - "▁gleich", - -10.744938850402832 - ], - [ - "bal", - -10.745061874389648 - ], - [ - "▁African", - -10.74511432647705 - ], - [ - "în", - -10.745231628417969 - ], - [ - "▁rep", - -10.74543571472168 - ], - [ - "▁beat", - -10.74588394165039 - ], - [ - "▁deck", - -10.746064186096191 - ], - [ - "▁intended", - -10.746221542358398 - ], - [ - "▁para", - -10.746513366699219 - ], - [ - "▁IP", - -10.746712684631348 - ], - [ - "▁bra", - -10.746881484985352 - ], - [ - "▁forces", - -10.746966361999512 - ], - [ - "▁routine", - -10.747184753417969 - ], - [ - "▁Jahre", - -10.747758865356445 - ], - [ - "▁Bad", - -10.74797534942627 - ], - [ - "▁drivers", - -10.748074531555176 - ], - [ - "▁updates", - -10.748095512390137 - ], - [ - "▁elegant", - -10.748279571533203 - ], - [ - "▁external", - -10.748444557189941 - ], - [ - "▁engineering", - -10.748819351196289 - ], - [ - "ender", - -10.749544143676758 - ], - [ - "table", - -10.749755859375 - ], - [ - "inter", - -10.749878883361816 - ], - [ - "▁Romania", - -10.749948501586914 - ], - [ - "▁zile", - -10.750468254089355 - ], - [ - "▁luxury", - -10.750570297241211 - ], - [ - "▁calling", - -10.750750541687012 - ], - [ - "▁cooking", - -10.75101375579834 - ], - [ - "▁component", - -10.75114631652832 - ], - [ - "wan", - -10.75121021270752 - ], - [ - "schen", - -10.751212120056152 - ], - [ - "▁birth", - -10.751242637634277 - ], - [ - "asupra", - -10.751349449157715 - ], - [ - "Co", - -10.751471519470215 - ], - [ - "▁opt", - -10.75153923034668 - ], - [ - "▁discovered", - -10.751860618591309 - ], - [ - "▁teach", - -10.752084732055664 - ], - [ - "▁Son", - -10.75234317779541 - ], - [ - "▁guest", - -10.752384185791016 - ], - [ - "▁dogs", - -10.752695083618164 - ], - [ - "▁2003", - -10.752745628356934 - ], - [ - "▁behavior", - -10.752750396728516 - ], - [ - "pé", - -10.7529935836792 - ], - [ - "63", - -10.75316333770752 - ], - [ - "▁Human", - -10.753702163696289 - ], - [ - "▁expression", - -10.754800796508789 - ], - [ - "▁nevoie", - -10.754936218261719 - ], - [ - "▁recherche", - -10.75528621673584 - ], - [ - "ging", - -10.755767822265625 - ], - [ - "related", - -10.755948066711426 - ], - [ - "▁discount", - -10.756040573120117 - ], - [ - "▁Brown", - -10.756054878234863 - ], - [ - "▁Such", - -10.756107330322266 - ], - [ - "▁Ve", - -10.757149696350098 - ], - [ - "▁height", - -10.757265090942383 - ], - [ - "clo", - -10.757414817810059 - ], - [ - "▁incredible", - -10.757912635803223 - ], - [ - "▁bas", - -10.757916450500488 - ], - [ - "▁mă", - -10.75798225402832 - ], - [ - "▁purchased", - -10.758240699768066 - ], - [ - "▁compte", - -10.75831127166748 - ], - [ - "▁instructions", - -10.758537292480469 - ], - [ - "▁Instead", - -10.75866985321045 - ], - [ - "▁output", - -10.758706092834473 - ], - [ - "▁mom", - -10.758886337280273 - ], - [ - "DR", - -10.759828567504883 - ], - [ - "89", - -10.760168075561523 - ], - [ - "▁reduced", - -10.760621070861816 - ], - [ - "98", - -10.7606840133667 - ], - [ - "▁constant", - -10.760879516601562 - ], - [ - "▁therapy", - -10.762417793273926 - ], - [ - "▁capable", - -10.762757301330566 - ], - [ - "mark", - -10.763265609741211 - ], - [ - "▁Sometimes", - -10.76332950592041 - ], - [ - "▁joy", - -10.763419151306152 - ], - [ - "▁perfectly", - -10.763589859008789 - ], - [ - "▁painting", - -10.763704299926758 - ], - [ - "avait", - -10.763765335083008 - ], - [ - "▁Sha", - -10.764384269714355 - ], - [ - "▁dat", - -10.764463424682617 - ], - [ - "▁produits", - -10.764479637145996 - ], - [ - "tric", - -10.76456356048584 - ], - [ - "ierte", - -10.765153884887695 - ], - [ - "▁Smith", - -10.765836715698242 - ], - [ - "▁trebui", - -10.766264915466309 - ], - [ - "▁beaucoup", - -10.766630172729492 - ], - [ - "▁chosen", - -10.767189025878906 - ], - [ - "▁cre", - -10.76732063293457 - ], - [ - "▁complet", - -10.767341613769531 - ], - [ - "▁Ltd", - -10.767599105834961 - ], - [ - "▁recovery", - -10.76781940460205 - ], - [ - "▁district", - -10.768423080444336 - ], - [ - "78", - -10.768640518188477 - ], - [ - "▁Unter", - -10.76872730255127 - ], - [ - "▁schnell", - -10.768729209899902 - ], - [ - "▁apart", - -10.768943786621094 - ], - [ - "▁phase", - -10.76894760131836 - ], - [ - "▁seeking", - -10.769091606140137 - ], - [ - "▁mark", - -10.769148826599121 - ], - [ - "▁pet", - -10.769233703613281 - ], - [ - "▁PDF", - -10.769296646118164 - ], - [ - "▁efficiency", - -10.769577980041504 - ], - [ - "▁buildings", - -10.769611358642578 - ], - [ - "69", - -10.769723892211914 - ], - [ - "▁sens", - -10.769858360290527 - ], - [ - "▁Video", - -10.770115852355957 - ], - [ - "▁destination", - -10.770181655883789 - ], - [ - "▁female", - -10.770319938659668 - ], - [ - "▁supporting", - -10.770674705505371 - ], - [ - "▁signs", - -10.77077865600586 - ], - [ - "▁appeal", - -10.770784378051758 - ], - [ - "76", - -10.77110481262207 - ], - [ - "▁favourite", - -10.771612167358398 - ], - [ - "ock", - -10.771702766418457 - ], - [ - "▁readers", - -10.771757125854492 - ], - [ - "▁Did", - -10.771868705749512 - ], - [ - "rou", - -10.772045135498047 - ], - [ - "PA", - -10.77222728729248 - ], - [ - "▁Jean", - -10.772480964660645 - ], - [ - "▁Em", - -10.772586822509766 - ], - [ - "pass", - -10.77280330657959 - ], - [ - "▁Zi", - -10.773090362548828 - ], - [ - "▁între", - -10.773261070251465 - ], - [ - "▁fly", - -10.773427963256836 - ], - [ - "mos", - -10.773666381835938 - ], - [ - "▁emotional", - -10.773860931396484 - ], - [ - "asse", - -10.774768829345703 - ], - [ - "▁sessions", - -10.775086402893066 - ], - [ - "▁symptoms", - -10.77564811706543 - ], - [ - "▁died", - -10.776217460632324 - ], - [ - "▁seconds", - -10.776628494262695 - ], - [ - "▁procedure", - -10.777206420898438 - ], - [ - "▁express", - -10.777420997619629 - ], - [ - "▁două", - -10.777885437011719 - ], - [ - "▁valid", - -10.778393745422363 - ], - [ - "▁euro", - -10.7788667678833 - ], - [ - "▁interests", - -10.779032707214355 - ], - [ - "Having", - -10.779237747192383 - ], - [ - "▁hundreds", - -10.779669761657715 - ], - [ - "grad", - -10.780023574829102 - ], - [ - "▁neuen", - -10.780084609985352 - ], - [ - "▁cook", - -10.780552864074707 - ], - [ - "▁pur", - -10.780834197998047 - ], - [ - "▁charges", - -10.781024932861328 - ], - [ - "sche", - -10.78118896484375 - ], - [ - "▁smile", - -10.781468391418457 - ], - [ - "▁festival", - -10.781611442565918 - ], - [ - "cho", - -10.781672477722168 - ], - [ - "▁£", - -10.781937599182129 - ], - [ - "cht", - -10.78201675415039 - ], - [ - "▁macht", - -10.782021522521973 - ], - [ - "▁Wasser", - -10.782028198242188 - ], - [ - "▁Cap", - -10.78226375579834 - ], - [ - "▁Learn", - -10.78274154663086 - ], - [ - "▁load", - -10.783162117004395 - ], - [ - "▁aici", - -10.783225059509277 - ], - [ - "▁Ch", - -10.784143447875977 - ], - [ - "▁cycle", - -10.784223556518555 - ], - [ - "▁carried", - -10.784337997436523 - ], - [ - "▁jusqu", - -10.784517288208008 - ], - [ - "stein", - -10.78505802154541 - ], - [ - "ski", - -10.78513240814209 - ], - [ - "cap", - -10.78579330444336 - ], - [ - "▁Bal", - -10.785852432250977 - ], - [ - "▁minor", - -10.786053657531738 - ], - [ - "77", - -10.786175727844238 - ], - [ - "▁considering", - -10.78632640838623 - ], - [ - "innen", - -10.78644847869873 - ], - [ - "▁greatest", - -10.787055015563965 - ], - [ - "▁Training", - -10.787137031555176 - ], - [ - "08", - -10.787307739257812 - ], - [ - "▁significantly", - -10.787607192993164 - ], - [ - "gé", - -10.787728309631348 - ], - [ - "▁dumpster", - -10.788351058959961 - ], - [ - "▁allem", - -10.788930892944336 - ], - [ - "▁bonus", - -10.7889404296875 - ], - [ - "▁guy", - -10.789036750793457 - ], - [ - "fel", - -10.78904914855957 - ], - [ - "▁lifestyle", - -10.789241790771484 - ], - [ - "▁Bro", - -10.78961181640625 - ], - [ - "▁implement", - -10.789687156677246 - ], - [ - "lock", - -10.790046691894531 - ], - [ - "▁Earth", - -10.790142059326172 - ], - [ - "kar", - -10.790733337402344 - ], - [ - "▁invest", - -10.790833473205566 - ], - [ - "▁river", - -10.790933609008789 - ], - [ - "▁accurate", - -10.791494369506836 - ], - [ - "▁mu", - -10.791579246520996 - ], - [ - "▁celebrate", - -10.792119979858398 - ], - [ - "▁ran", - -10.79256820678711 - ], - [ - "▁bigger", - -10.792988777160645 - ], - [ - "▁Mer", - -10.793476104736328 - ], - [ - "▁millions", - -10.793486595153809 - ], - [ - "▁partie", - -10.793563842773438 - ], - [ - "▁dazu", - -10.793951988220215 - ], - [ - "▁Full", - -10.794130325317383 - ], - [ - "gie", - -10.794207572937012 - ], - [ - "bot", - -10.794373512268066 - ], - [ - "roll", - -10.79472827911377 - ], - [ - "▁Women", - -10.795303344726562 - ], - [ - "▁compare", - -10.796135902404785 - ], - [ - "▁van", - -10.796503067016602 - ], - [ - "▁apps", - -10.796521186828613 - ], - [ - "PC", - -10.797050476074219 - ], - [ - "▁drei", - -10.79736042022705 - ], - [ - "▁maison", - -10.797588348388672 - ], - [ - "▁knows", - -10.797712326049805 - ], - [ - "rid", - -10.797972679138184 - ], - [ - "62", - -10.798396110534668 - ], - [ - "class", - -10.798508644104004 - ], - [ - "▁chez", - -10.798669815063477 - ], - [ - "char", - -10.798828125 - ], - [ - "88", - -10.798989295959473 - ], - [ - "▁cast", - -10.79948902130127 - ], - [ - "▁examples", - -10.79973030090332 - ], - [ - "▁Therefore", - -10.799823760986328 - ], - [ - "▁topics", - -10.799941062927246 - ], - [ - "with", - -10.80013656616211 - ], - [ - "▁Anti", - -10.800555229187012 - ], - [ - "how", - -10.800620079040527 - ], - [ - "▁whom", - -10.80094051361084 - ], - [ - "▁Deutschland", - -10.801124572753906 - ], - [ - "tine", - -10.80113697052002 - ], - [ - "▁CEO", - -10.801224708557129 - ], - [ - "▁truck", - -10.801350593566895 - ], - [ - "▁Which", - -10.8015718460083 - ], - [ - "erie", - -10.802017211914062 - ], - [ - "fect", - -10.802069664001465 - ], - [ - "bou", - -10.8026762008667 - ], - [ - "▁(1", - -10.802818298339844 - ], - [ - "sum", - -10.802980422973633 - ], - [ - "▁bonne", - -10.803068161010742 - ], - [ - "▁remaining", - -10.80321216583252 - ], - [ - "▁equal", - -10.803543090820312 - ], - [ - "▁engage", - -10.803561210632324 - ], - [ - "▁RE", - -10.803849220275879 - ], - [ - "style", - -10.804182052612305 - ], - [ - "▁urma", - -10.804337501525879 - ], - [ - "▁Grund", - -10.80496883392334 - ], - [ - "ür", - -10.8051176071167 - ], - [ - "▁font", - -10.805353164672852 - ], - [ - "▁assets", - -10.805916786193848 - ], - [ - "AL", - -10.806102752685547 - ], - [ - "▁rear", - -10.80635929107666 - ], - [ - "▁contemporary", - -10.80646800994873 - ], - [ - "▁occur", - -10.8067045211792 - ], - [ - "rated", - -10.806941986083984 - ], - [ - "▁tight", - -10.807088851928711 - ], - [ - "▁machines", - -10.807921409606934 - ], - [ - "▁0.", - -10.808456420898438 - ], - [ - "▁Aber", - -10.808470726013184 - ], - [ - "sol", - -10.808517456054688 - ], - [ - "rü", - -10.80858039855957 - ], - [ - "▁2007", - -10.809479713439941 - ], - [ - "gg", - -10.809488296508789 - ], - [ - "▁unul", - -10.809691429138184 - ], - [ - "▁était", - -10.809908866882324 - ], - [ - "▁capture", - -10.809980392456055 - ], - [ - "▁command", - -10.810037612915039 - ], - [ - "▁wire", - -10.810425758361816 - ], - [ - "▁shift", - -10.810762405395508 - ], - [ - "▁bread", - -10.81084156036377 - ], - [ - "▁causes", - -10.810937881469727 - ], - [ - "PI", - -10.810938835144043 - ], - [ - "SC", - -10.811086654663086 - ], - [ - "▁lights", - -10.811190605163574 - ], - [ - "▁lived", - -10.811293601989746 - ], - [ - "mul", - -10.811446189880371 - ], - [ - "▁Cur", - -10.811917304992676 - ], - [ - "▁Richard", - -10.811973571777344 - ], - [ - "37", - -10.812638282775879 - ], - [ - "▁cup", - -10.812737464904785 - ], - [ - "▁fields", - -10.812983512878418 - ], - [ - "▁crusher", - -10.813389778137207 - ], - [ - "65", - -10.813774108886719 - ], - [ - "avons", - -10.813822746276855 - ], - [ - "▁gear", - -10.813835144042969 - ], - [ - "▁standing", - -10.813844680786133 - ], - [ - "▁thick", - -10.81445026397705 - ], - [ - "aff", - -10.815132141113281 - ], - [ - "ments", - -10.815434455871582 - ], - [ - "▁conflict", - -10.815728187561035 - ], - [ - "ität", - -10.815825462341309 - ], - [ - "▁worse", - -10.816295623779297 - ], - [ - "SE", - -10.816332817077637 - ], - [ - "imi", - -10.816459655761719 - ], - [ - "▁dating", - -10.817033767700195 - ], - [ - "Do", - -10.817073822021484 - ], - [ - "▁flexible", - -10.817093849182129 - ], - [ - "ologie", - -10.817131996154785 - ], - [ - "SU", - -10.817200660705566 - ], - [ - "▁contribute", - -10.817306518554688 - ], - [ - "▁denn", - -10.817428588867188 - ], - [ - "▁appointment", - -10.81746768951416 - ], - [ - "▁ticket", - -10.817523002624512 - ], - [ - "bed", - -10.817892074584961 - ], - [ - "▁2019.", - -10.817936897277832 - ], - [ - "▁tasks", - -10.81871223449707 - ], - [ - "▁carbon", - -10.818734169006348 - ], - [ - "▁situations", - -10.819400787353516 - ], - [ - "MA", - -10.819402694702148 - ], - [ - "▁portion", - -10.819498062133789 - ], - [ - "▁urban", - -10.819585800170898 - ], - [ - "▁Canadian", - -10.819805145263672 - ], - [ - "▁Bur", - -10.819937705993652 - ], - [ - "▁pack", - -10.81995964050293 - ], - [ - "▁effet", - -10.819992065429688 - ], - [ - "▁Ball", - -10.82008171081543 - ], - [ - "▁timpul", - -10.82014274597168 - ], - [ - "▁owned", - -10.820211410522461 - ], - [ - "▁surprise", - -10.820413589477539 - ], - [ - "▁Mu", - -10.820582389831543 - ], - [ - "▁decades", - -10.821001052856445 - ], - [ - "▁affected", - -10.821728706359863 - ], - [ - "▁proven", - -10.821732521057129 - ], - [ - "▁Fe", - -10.821990966796875 - ], - [ - "zy", - -10.822042465209961 - ], - [ - "42", - -10.822175979614258 - ], - [ - "▁trend", - -10.8223876953125 - ], - [ - "▁autres", - -10.82262897491455 - ], - [ - "No", - -10.823028564453125 - ], - [ - "▁nine", - -10.823565483093262 - ], - [ - "ON", - -10.82376480102539 - ], - [ - "NE", - -10.823953628540039 - ], - [ - "oli", - -10.824359893798828 - ], - [ - "▁Daniel", - -10.824434280395508 - ], - [ - "▁spa", - -10.824939727783203 - ], - [ - "▁messages", - -10.825084686279297 - ], - [ - "PS", - -10.825183868408203 - ], - [ - "47", - -10.825703620910645 - ], - [ - "▁doch", - -10.826032638549805 - ], - [ - "▁improvement", - -10.826187133789062 - ], - [ - "▁mountain", - -10.826350212097168 - ], - [ - "▁Room", - -10.826451301574707 - ], - [ - "▁edition", - -10.826546669006348 - ], - [ - "▁musical", - -10.826712608337402 - ], - [ - "CP", - -10.827024459838867 - ], - [ - "▁Mill", - -10.827027320861816 - ], - [ - "▁steht", - -10.827740669250488 - ], - [ - "▁determined", - -10.828083038330078 - ], - [ - "you", - -10.828392028808594 - ], - [ - "weg", - -10.828554153442383 - ], - [ - "▁Digital", - -10.828624725341797 - ], - [ - "▁filter", - -10.828903198242188 - ], - [ - "▁youth", - -10.829047203063965 - ], - [ - "▁assessment", - -10.829301834106445 - ], - [ - "▁butter", - -10.829370498657227 - ], - [ - "▁Watch", - -10.829427719116211 - ], - [ - "▁zusammen", - -10.829471588134766 - ], - [ - "▁View", - -10.829606056213379 - ], - [ - "09", - -10.829649925231934 - ], - [ - "▁sole", - -10.829816818237305 - ], - [ - ".00", - -10.830018997192383 - ], - [ - "33", - -10.83015251159668 - ], - [ - "▁export", - -10.830229759216309 - ], - [ - "ery", - -10.830373764038086 - ], - [ - "▁zurück", - -10.830426216125488 - ], - [ - "▁walls", - -10.83048152923584 - ], - [ - "▁recognize", - -10.8306884765625 - ], - [ - "law", - -10.830801963806152 - ], - [ - "▁parent", - -10.830863952636719 - ], - [ - "ST", - -10.831357955932617 - ], - [ - "▁description", - -10.831669807434082 - ], - [ - "MS", - -10.831887245178223 - ], - [ - "SM", - -10.83189582824707 - ], - [ - "▁Finally", - -10.831940650939941 - ], - [ - "▁hardware", - -10.831965446472168 - ], - [ - "ident", - -10.832464218139648 - ], - [ - "▁brown", - -10.832566261291504 - ], - [ - "▁kinds", - -10.832950592041016 - ], - [ - "▁Arts", - -10.83297061920166 - ], - [ - "▁concert", - -10.83341121673584 - ], - [ - "▁sec", - -10.83342456817627 - ], - [ - "▁represent", - -10.833512306213379 - ], - [ - "▁institutions", - -10.833597183227539 - ], - [ - "▁fur", - -10.833998680114746 - ], - [ - "▁Support", - -10.83403205871582 - ], - [ - "87", - -10.834076881408691 - ], - [ - "▁ease", - -10.834178924560547 - ], - [ - "▁feels", - -10.834218978881836 - ], - [ - "▁sheet", - -10.834342002868652 - ], - [ - "▁Though", - -10.83437442779541 - ], - [ - "▁propose", - -10.834381103515625 - ], - [ - "▁personnel", - -10.834409713745117 - ], - [ - "bie", - -10.834794044494629 - ], - [ - "▁contest", - -10.834836959838867 - ], - [ - "▁successfully", - -10.835152626037598 - ], - [ - "▁direkt", - -10.835397720336914 - ], - [ - "bietet", - -10.835597038269043 - ], - [ - "▁submit", - -10.835888862609863 - ], - [ - "▁sicher", - -10.835919380187988 - ], - [ - "▁Personal", - -10.83607006072998 - ], - [ - "94", - -10.836341857910156 - ], - [ - "61", - -10.836400985717773 - ], - [ - "▁Very", - -10.836540222167969 - ], - [ - "bol", - -10.836603164672852 - ], - [ - "▁ha", - -10.837089538574219 - ], - [ - "▁channel", - -10.8372220993042 - ], - [ - "mut", - -10.837289810180664 - ], - [ - "▁mouth", - -10.837342262268066 - ], - [ - "▁vast", - -10.837395668029785 - ], - [ - "▁Ob", - -10.837569236755371 - ], - [ - "lit", - -10.83763313293457 - ], - [ - "▁poly", - -10.837878227233887 - ], - [ - "▁trained", - -10.838102340698242 - ], - [ - "▁specialist", - -10.838122367858887 - ], - [ - "UL", - -10.83822250366211 - ], - [ - "▁seiner", - -10.838336944580078 - ], - [ - "SS", - -10.838627815246582 - ], - [ - "▁vacation", - -10.838672637939453 - ], - [ - "▁resume", - -10.839157104492188 - ], - [ - "▁constantly", - -10.839717864990234 - ], - [ - "▁treated", - -10.83986759185791 - ], - [ - "▁150", - -10.840936660766602 - ], - [ - "▁native", - -10.841246604919434 - ], - [ - "▁Russian", - -10.841329574584961 - ], - [ - "▁patterns", - -10.841371536254883 - ], - [ - "▁knowing", - -10.841670989990234 - ], - [ - "▁Pan", - -10.841682434082031 - ], - [ - "peri", - -10.841848373413086 - ], - [ - "aci", - -10.841864585876465 - ], - [ - "▁answers", - -10.842114448547363 - ], - [ - "▁heute", - -10.842985153198242 - ], - [ - "93", - -10.843056678771973 - ], - [ - "▁Winter", - -10.844083786010742 - ], - [ - "▁yes", - -10.844173431396484 - ], - [ - "SP", - -10.844185829162598 - ], - [ - "].", - -10.844388008117676 - ], - [ - "▁kein", - -10.844862937927246 - ], - [ - "▁introduce", - -10.8450927734375 - ], - [ - "-4", - -10.84555435180664 - ], - [ - "▁shoot", - -10.845762252807617 - ], - [ - "AR", - -10.84576416015625 - ], - [ - "▁receiving", - -10.845864295959473 - ], - [ - "▁intre", - -10.84702205657959 - ], - [ - "▁appeared", - -10.84708023071289 - ], - [ - "▁brother", - -10.847321510314941 - ], - [ - "▁extend", - -10.847765922546387 - ], - [ - "▁fara", - -10.848737716674805 - ], - [ - "▁kommt", - -10.848876953125 - ], - [ - "ali", - -10.848913192749023 - ], - [ - "▁numai", - -10.849047660827637 - ], - [ - "▁scientific", - -10.84913158416748 - ], - [ - "▁virtual", - -10.849145889282227 - ], - [ - "▁Ac", - -10.849513053894043 - ], - [ - "▁procedures", - -10.849631309509277 - ], - [ - "▁silver", - -10.849821090698242 - ], - [ - "▁leather", - -10.849979400634766 - ], - [ - "DA", - -10.85014820098877 - ], - [ - "▁executive", - -10.850263595581055 - ], - [ - "▁officials", - -10.850496292114258 - ], - [ - "▁agencies", - -10.850503921508789 - ], - [ - "▁Software", - -10.850540161132812 - ], - [ - "▁cor", - -10.850690841674805 - ], - [ - "Con", - -10.850741386413574 - ], - [ - "▁log", - -10.851066589355469 - ], - [ - "ț", - -10.851147651672363 - ], - [ - "02", - -10.851195335388184 - ], - [ - "▁7.", - -10.85245132446289 - ], - [ - "▁accepted", - -10.852483749389648 - ], - [ - "▁Berlin", - -10.852538108825684 - ], - [ - "ID", - -10.852582931518555 - ], - [ - "cot", - -10.852788925170898 - ], - [ - "▁employment", - -10.852799415588379 - ], - [ - "run", - -10.853020668029785 - ], - [ - "▁identified", - -10.853178977966309 - ], - [ - "96", - -10.853887557983398 - ], - [ - "▁déjà", - -10.853944778442383 - ], - [ - "▁cuisine", - -10.853952407836914 - ], - [ - "turi", - -10.854070663452148 - ], - [ - "▁Japanese", - -10.854316711425781 - ], - [ - "▁golf", - -10.854514122009277 - ], - [ - "▁Ki", - -10.854787826538086 - ], - [ - "▁carefully", - -10.854863166809082 - ], - [ - "▁remote", - -10.854973793029785 - ], - [ - "▁2018,", - -10.855148315429688 - ], - [ - "▁sus", - -10.855154991149902 - ], - [ - "tique", - -10.855293273925781 - ], - [ - "▁residential", - -10.855695724487305 - ], - [ - "97", - -10.855809211730957 - ], - [ - "▁Spring", - -10.855908393859863 - ], - [ - "▁Marketing", - -10.856186866760254 - ], - [ - "▁Control", - -10.85630989074707 - ], - [ - "var", - -10.856344223022461 - ], - [ - "▁historical", - -10.8563814163208 - ], - [ - "▁freedom", - -10.856423377990723 - ], - [ - "sure", - -10.856426239013672 - ], - [ - "▁broken", - -10.856796264648438 - ], - [ - "▁criminal", - -10.856949806213379 - ], - [ - "▁innovation", - -10.857075691223145 - ], - [ - "▁Italian", - -10.857192039489746 - ], - [ - "sper", - -10.857282638549805 - ], - [ - "▁cake", - -10.857653617858887 - ], - [ - "▁candidates", - -10.857894897460938 - ], - [ - "▁sizes", - -10.858267784118652 - ], - [ - "pel", - -10.858366966247559 - ], - [ - "▁frequently", - -10.85889720916748 - ], - [ - "▁planet", - -10.859138488769531 - ], - [ - "▁writer", - -10.859519958496094 - ], - [ - "1,", - -10.859569549560547 - ], - [ - "uvent", - -10.85959529876709 - ], - [ - "▁awareness", - -10.859807968139648 - ], - [ - "name", - -10.859954833984375 - ], - [ - "▁Children", - -10.859980583190918 - ], - [ - "▁relatively", - -10.860311508178711 - ], - [ - "▁pu", - -10.860321998596191 - ], - [ - "▁quiet", - -10.86038875579834 - ], - [ - "▁planned", - -10.860716819763184 - ], - [ - "▁election", - -10.861419677734375 - ], - [ - "▁6.", - -10.861761093139648 - ], - [ - "▁broad", - -10.861772537231445 - ], - [ - "▁skill", - -10.861835479736328 - ], - [ - "▁reasonable", - -10.862037658691406 - ], - [ - "▁Fort", - -10.862283706665039 - ], - [ - "▁aceea", - -10.862407684326172 - ], - [ - "▁arrived", - -10.86263370513916 - ], - [ - "▁payments", - -10.862680435180664 - ], - [ - "ack", - -10.862700462341309 - ], - [ - "▁Ort", - -10.863354682922363 - ], - [ - "▁investors", - -10.863364219665527 - ], - [ - "▁operate", - -10.86351203918457 - ], - [ - "ME", - -10.863556861877441 - ], - [ - "dic", - -10.863683700561523 - ], - [ - "▁foods", - -10.863731384277344 - ], - [ - "▁stick", - -10.863831520080566 - ], - [ - "▁agents", - -10.86412525177002 - ], - [ - "▁crowd", - -10.864175796508789 - ], - [ - "▁Students", - -10.864480972290039 - ], - [ - "▁concerned", - -10.864609718322754 - ], - [ - "test", - -10.864740371704102 - ], - [ - "▁designer", - -10.865334510803223 - ], - [ - "▁Conference", - -10.865593910217285 - ], - [ - "▁saving", - -10.866105079650879 - ], - [ - "▁recorded", - -10.866422653198242 - ], - [ - "▁proposed", - -10.866564750671387 - ], - [ - "▁ship", - -10.86657428741455 - ], - [ - "▁cred", - -10.867274284362793 - ], - [ - "▁Ci", - -10.867440223693848 - ], - [ - "RE", - -10.867619514465332 - ], - [ - "▁tradition", - -10.867753982543945 - ], - [ - "▁worldwide", - -10.867779731750488 - ], - [ - "64", - -10.867944717407227 - ], - [ - "▁television", - -10.867989540100098 - ], - [ - "▁projet", - -10.868102073669434 - ], - [ - "ency", - -10.868487358093262 - ], - [ - "▁struggle", - -10.868514060974121 - ], - [ - "▁twice", - -10.868955612182617 - ], - [ - "▁Off", - -10.869234085083008 - ], - [ - "▁begins", - -10.869577407836914 - ], - [ - "key", - -10.869794845581055 - ], - [ - "▁Table", - -10.869963645935059 - ], - [ - "▁demande", - -10.870177268981934 - ], - [ - "▁liquid", - -10.870441436767578 - ], - [ - "meter", - -10.870684623718262 - ], - [ - "▁2001", - -10.871190071105957 - ], - [ - "▁willing", - -10.871660232543945 - ], - [ - "▁medicine", - -10.871707916259766 - ], - [ - "▁expand", - -10.871747970581055 - ], - [ - "▁2004", - -10.871804237365723 - ], - [ - "▁2002", - -10.872016906738281 - ], - [ - "▁accord", - -10.872292518615723 - ], - [ - "▁Chris", - -10.872446060180664 - ], - [ - "▁prove", - -10.872543334960938 - ], - [ - "ston", - -10.872740745544434 - ], - [ - "mettre", - -10.872800827026367 - ], - [ - "▁moments", - -10.873537063598633 - ], - [ - "tik", - -10.87368392944336 - ], - [ - "such", - -10.874055862426758 - ], - [ - "2.", - -10.874431610107422 - ], - [ - "▁UN", - -10.874561309814453 - ], - [ - "▁jump", - -10.874737739562988 - ], - [ - "▁dish", - -10.87539291381836 - ], - [ - "▁Key", - -10.875663757324219 - ], - [ - "▁challenging", - -10.875975608825684 - ], - [ - "▁domestic", - -10.876410484313965 - ], - [ - "▁impressive", - -10.876752853393555 - ], - [ - "iger", - -10.877022743225098 - ], - [ - "▁Ram", - -10.877157211303711 - ], - [ - "▁doit", - -10.877263069152832 - ], - [ - "▁concrete", - -10.87734317779541 - ], - [ - "▁Unternehmen", - -10.877397537231445 - ], - [ - "▁LED", - -10.877429008483887 - ], - [ - "▁trouver", - -10.877533912658691 - ], - [ - "▁fundamental", - -10.877875328063965 - ], - [ - "▁implementation", - -10.878121376037598 - ], - [ - "85", - -10.878247261047363 - ], - [ - "▁hosting", - -10.87856388092041 - ], - [ - "▁Game", - -10.878691673278809 - ], - [ - "▁taught", - -10.878981590270996 - ], - [ - "tung", - -10.879016876220703 - ], - [ - "ront", - -10.87940502166748 - ], - [ - "▁shoes", - -10.879639625549316 - ], - [ - "79", - -10.8797607421875 - ], - [ - "▁stunning", - -10.879778861999512 - ], - [ - "▁Congress", - -10.880142211914062 - ], - [ - "▁Ent", - -10.880278587341309 - ], - [ - "▁Wer", - -10.880607604980469 - ], - [ - "▁alt", - -10.880608558654785 - ], - [ - "ör", - -10.880699157714844 - ], - [ - "▁calm", - -10.8808012008667 - ], - [ - "46", - -10.881132125854492 - ], - [ - "▁Daca", - -10.881404876708984 - ], - [ - "71", - -10.881938934326172 - ], - [ - "▁Dec", - -10.882392883300781 - ], - [ - "▁Fo", - -10.882437705993652 - ], - [ - "▁defense", - -10.88313102722168 - ], - [ - "▁expectations", - -10.883166313171387 - ], - [ - "▁Alle", - -10.88318920135498 - ], - [ - "▁brief", - -10.883691787719727 - ], - [ - "▁Hospital", - -10.883975982666016 - ], - [ - "▁sides", - -10.884121894836426 - ], - [ - "▁yellow", - -10.884140014648438 - ], - [ - "lei", - -10.88451862335205 - ], - [ - "▁speaking", - -10.884589195251465 - ], - [ - "▁crucial", - -10.885198593139648 - ], - [ - "▁Town", - -10.8854341506958 - ], - [ - "▁married", - -10.885574340820312 - ], - [ - "▁acesta", - -10.885583877563477 - ], - [ - "▁noted", - -10.885611534118652 - ], - [ - "▁Word", - -10.885659217834473 - ], - [ - "▁conducted", - -10.885963439941406 - ], - [ - "▁decor", - -10.886249542236328 - ], - [ - "kon", - -10.886565208435059 - ], - [ - "▁supplies", - -10.8866605758667 - ], - [ - "▁adventure", - -10.886691093444824 - ], - [ - "▁exhibition", - -10.887163162231445 - ], - [ - "heit", - -10.887300491333008 - ], - [ - "▁36", - -10.88744831085205 - ], - [ - "eria", - -10.887505531311035 - ], - [ - "ines", - -10.887551307678223 - ], - [ - "ological", - -10.887582778930664 - ], - [ - "quel", - -10.88806438446045 - ], - [ - "▁Van", - -10.88825511932373 - ], - [ - "-19", - -10.88853645324707 - ], - [ - "2,", - -10.888566970825195 - ], - [ - "▁Band", - -10.888989448547363 - ], - [ - "▁soil", - -10.889184951782227 - ], - [ - "▁Tim", - -10.889599800109863 - ], - [ - "▁NOT", - -10.88968563079834 - ], - [ - "▁pilot", - -10.889753341674805 - ], - [ - "▁Sh", - -10.889774322509766 - ], - [ - "Ho", - -10.890361785888672 - ], - [ - "CA", - -10.890509605407715 - ], - [ - "▁Eu", - -10.890745162963867 - ], - [ - "▁committee", - -10.890829086303711 - ], - [ - "▁Store", - -10.891075134277344 - ], - [ - "▁joint", - -10.89111614227295 - ], - [ - "▁Op", - -10.891315460205078 - ], - [ - "▁Jack", - -10.891985893249512 - ], - [ - "quality", - -10.89216423034668 - ], - [ - "▁Has", - -10.892489433288574 - ], - [ - "▁wenig", - -10.892507553100586 - ], - [ - "hood", - -10.892545700073242 - ], - [ - "▁Class", - -10.892582893371582 - ], - [ - "rus", - -10.892773628234863 - ], - [ - "▁grown", - -10.89294719696045 - ], - [ - "▁About", - -10.893518447875977 - ], - [ - "▁sum", - -10.893942832946777 - ], - [ - "▁Fair", - -10.893946647644043 - ], - [ - "SA", - -10.894149780273438 - ], - [ - "92", - -10.894185066223145 - ], - [ - "▁fourth", - -10.894354820251465 - ], - [ - "▁featured", - -10.894384384155273 - ], - [ - "▁Pen", - -10.89444637298584 - ], - [ - "▁natürlich", - -10.894885063171387 - ], - [ - "ched", - -10.894901275634766 - ], - [ - "▁ban", - -10.895112991333008 - ], - [ - "anne", - -10.89522647857666 - ], - [ - "▁theory", - -10.895413398742676 - ], - [ - "bin", - -10.895438194274902 - ], - [ - "iers", - -10.895819664001465 - ], - [ - "▁strategic", - -10.895903587341309 - ], - [ - "▁jours", - -10.895956039428711 - ], - [ - "▁communicate", - -10.896124839782715 - ], - [ - "▁pin", - -10.896320343017578 - ], - [ - "▁Bon", - -10.89721393585205 - ], - [ - "kom", - -10.897290229797363 - ], - [ - "-5", - -10.898177146911621 - ], - [ - "▁degrees", - -10.898643493652344 - ], - [ - "▁entertainment", - -10.899014472961426 - ], - [ - "ară", - -10.899248123168945 - ], - [ - "ales", - -10.899425506591797 - ], - [ - "▁pendant", - -10.89954662322998 - ], - [ - "▁Series", - -10.899575233459473 - ], - [ - "▁holds", - -10.899592399597168 - ], - [ - "▁Mini", - -10.899828910827637 - ], - [ - "▁Obama", - -10.899898529052734 - ], - [ - "▁conform", - -10.900163650512695 - ], - [ - "-10", - -10.900216102600098 - ], - [ - "▁preparation", - -10.9009370803833 - ], - [ - "▁autre", - -10.90105152130127 - ], - [ - "▁mortgage", - -10.901155471801758 - ], - [ - "▁Kan", - -10.901508331298828 - ], - [ - "▁typical", - -10.901538848876953 - ], - [ - "01", - -10.901711463928223 - ], - [ - "▁Review", - -10.901862144470215 - ], - [ - "▁laptop", - -10.902127265930176 - ], - [ - "CR", - -10.902610778808594 - ], - [ - "▁thread", - -10.90265941619873 - ], - [ - "BS", - -10.902661323547363 - ], - [ - "▁upper", - -10.902700424194336 - ], - [ - "▁searching", - -10.902932167053223 - ], - [ - "▁pen", - -10.903214454650879 - ], - [ - "▁Middle", - -10.90333080291748 - ], - [ - "73", - -10.903359413146973 - ], - [ - "▁leg", - -10.903650283813477 - ], - [ - "onic", - -10.904272079467773 - ], - [ - "IS", - -10.904356956481934 - ], - [ - "▁Kar", - -10.904623985290527 - ], - [ - "anz", - -10.9046630859375 - ], - [ - "▁circuit", - -10.904901504516602 - ], - [ - "▁Casino", - -10.905384063720703 - ], - [ - "07", - -10.90584659576416 - ], - [ - "▁petit", - -10.905906677246094 - ], - [ - "TV", - -10.905978202819824 - ], - [ - "level", - -10.906311988830566 - ], - [ - "▁Point", - -10.906312942504883 - ], - [ - "rau", - -10.906474113464355 - ], - [ - "▁cabinet", - -10.906991958618164 - ], - [ - "▁failed", - -10.907042503356934 - ], - [ - "▁stated", - -10.907126426696777 - ], - [ - "LA", - -10.907461166381836 - ], - [ - "▁privacy", - -10.907596588134766 - ], - [ - "vol", - -10.907901763916016 - ], - [ - "ativ", - -10.908151626586914 - ], - [ - "▁matters", - -10.908210754394531 - ], - [ - "▁Mor", - -10.908555030822754 - ], - [ - "▁Ur", - -10.90860652923584 - ], - [ - "view", - -10.908968925476074 - ], - [ - "▁consultation", - -10.90921688079834 - ], - [ - "TS", - -10.909296989440918 - ], - [ - "▁apartment", - -10.909412384033203 - ], - [ - "▁integrated", - -10.909425735473633 - ], - [ - "74", - -10.909669876098633 - ], - [ - "▁Through", - -10.909710884094238 - ], - [ - "▁kick", - -10.909798622131348 - ], - [ - "▁perioada", - -10.90993881225586 - ], - [ - "▁entirely", - -10.909953117370605 - ], - [ - "▁impossible", - -10.91015911102295 - ], - [ - "▁consideration", - -10.910268783569336 - ], - [ - "▁Alt", - -10.91054916381836 - ], - [ - "▁Come", - -10.911089897155762 - ], - [ - "▁outstanding", - -10.911276817321777 - ], - [ - "83", - -10.911727905273438 - ], - [ - "▁prezent", - -10.911859512329102 - ], - [ - "▁Local", - -10.911993980407715 - ], - [ - "▁Camp", - -10.912056922912598 - ], - [ - "▁bear", - -10.912067413330078 - ], - [ - "enden", - -10.912262916564941 - ], - [ - "life", - -10.91236686706543 - ], - [ - "▁Haus", - -10.912516593933105 - ], - [ - "▁William", - -10.912644386291504 - ], - [ - "“,", - -10.912665367126465 - ], - [ - "▁Instagram", - -10.91285514831543 - ], - [ - "▁solve", - -10.913195610046387 - ], - [ - "▁Ze", - -10.913431167602539 - ], - [ - "▁everyday", - -10.91357135772705 - ], - [ - "bla", - -10.913615226745605 - ], - [ - "eng", - -10.913662910461426 - ], - [ - "ough", - -10.914246559143066 - ], - [ - "84", - -10.914483070373535 - ], - [ - "?\"", - -10.914599418640137 - ], - [ - "rely", - -10.91476821899414 - ], - [ - "TH", - -10.914841651916504 - ], - [ - "lang", - -10.91511058807373 - ], - [ - "82", - -10.915817260742188 - ], - [ - "▁removal", - -10.91589641571045 - ], - [ - "ală", - -10.915956497192383 - ], - [ - "▁circumstances", - -10.916097640991211 - ], - [ - "ente", - -10.91622257232666 - ], - [ - "▁lieu", - -10.91645336151123 - ], - [ - "▁2016.", - -10.91710376739502 - ], - [ - "▁ales", - -10.917342185974121 - ], - [ - "▁pure", - -10.917482376098633 - ], - [ - "▁choosing", - -10.917590141296387 - ], - [ - "▁Russia", - -10.917698860168457 - ], - [ - "amp", - -10.917703628540039 - ], - [ - "▁Santa", - -10.91788387298584 - ], - [ - "▁happening", - -10.918203353881836 - ], - [ - "▁crew", - -10.91822338104248 - ], - [ - "▁lei", - -10.91855239868164 - ], - [ - "IP", - -10.91858196258545 - ], - [ - "RO", - -10.919425964355469 - ], - [ - "▁resort", - -10.919514656066895 - ], - [ - "ened", - -10.919689178466797 - ], - [ - "MB", - -10.920031547546387 - ], - [ - "▁styles", - -10.920052528381348 - ], - [ - "▁dernier", - -10.920533180236816 - ], - [ - "uck", - -10.920699119567871 - ], - [ - "▁Guide", - -10.920710563659668 - ], - [ - "fic", - -10.92096996307373 - ], - [ - "▁fitness", - -10.921977996826172 - ], - [ - "▁healthcare", - -10.92223072052002 - ], - [ - "mol", - -10.92237663269043 - ], - [ - "▁vis", - -10.922721862792969 - ], - [ - "▁atmosphere", - -10.922972679138184 - ], - [ - "▁motion", - -10.922989845275879 - ], - [ - "▁closer", - -10.923114776611328 - ], - [ - "▁SA", - -10.92335319519043 - ], - [ - "▁default", - -10.923371315002441 - ], - [ - "▁architecture", - -10.923471450805664 - ], - [ - "iile", - -10.923528671264648 - ], - [ - "zel", - -10.923675537109375 - ], - [ - "cla", - -10.92387866973877 - ], - [ - "OP", - -10.924382209777832 - ], - [ - "▁west", - -10.924965858459473 - ], - [ - "▁Energy", - -10.925613403320312 - ], - [ - "▁positions", - -10.925777435302734 - ], - [ - "▁contrast", - -10.925885200500488 - ], - [ - "▁serves", - -10.92605972290039 - ], - [ - "cup", - -10.926340103149414 - ], - [ - "▁rose", - -10.926485061645508 - ], - [ - "pers", - -10.92664623260498 - ], - [ - "▁noise", - -10.926846504211426 - ], - [ - "mont", - -10.92690658569336 - ], - [ - "#", - -10.927061080932617 - ], - [ - "lies", - -10.927326202392578 - ], - [ - "pat", - -10.927718162536621 - ], - [ - "IC", - -10.927956581115723 - ], - [ - "arc", - -10.927989959716797 - ], - [ - "▁winner", - -10.928524017333984 - ], - [ - "tent", - -10.928732872009277 - ], - [ - "▁Preis", - -10.929106712341309 - ], - [ - "▁vin", - -10.929254531860352 - ], - [ - "blo", - -10.92929458618164 - ], - [ - "ție", - -10.929520606994629 - ], - [ - "▁OR", - -10.930315017700195 - ], - [ - "▁Buch", - -10.930798530578613 - ], - [ - "▁nearby", - -10.931190490722656 - ], - [ - "▁meetings", - -10.931290626525879 - ], - [ - "▁48", - -10.931465148925781 - ], - [ - "▁quand", - -10.93152904510498 - ], - [ - "▁usual", - -10.931936264038086 - ], - [ - "▁weitere", - -10.932539939880371 - ], - [ - "▁caught", - -10.932571411132812 - ], - [ - "▁issued", - -10.932626724243164 - ], - [ - "ști", - -10.932896614074707 - ], - [ - "upcoming", - -10.933232307434082 - ], - [ - "▁agreed", - -10.933233261108398 - ], - [ - "place", - -10.933353424072266 - ], - [ - "▁Brand", - -10.93344497680664 - ], - [ - "▁relation", - -10.933969497680664 - ], - [ - "▁atât", - -10.934090614318848 - ], - [ - "▁Tre", - -10.934176445007324 - ], - [ - "▁lors", - -10.934438705444336 - ], - [ - "▁adopt", - -10.934452056884766 - ], - [ - "▁celui", - -10.93458366394043 - ], - [ - "cken", - -10.93505859375 - ], - [ - "▁partnership", - -10.935284614562988 - ], - [ - "?”", - -10.935376167297363 - ], - [ - "▁ba", - -10.935746192932129 - ], - [ - "▁ID", - -10.935832023620605 - ], - [ - "▁consistent", - -10.935835838317871 - ], - [ - "▁Ya", - -10.935941696166992 - ], - [ - "▁Academy", - -10.936182022094727 - ], - [ - "cial", - -10.936230659484863 - ], - [ - "1%", - -10.936366081237793 - ], - [ - "▁mise", - -10.936684608459473 - ], - [ - "▁gute", - -10.936728477478027 - ], - [ - "gli", - -10.936939239501953 - ], - [ - "▁Bu", - -10.937679290771484 - ], - [ - "▁reduction", - -10.937917709350586 - ], - [ - "acy", - -10.938126564025879 - ], - [ - "aga", - -10.938161849975586 - ], - [ - "▁Sc", - -10.938273429870605 - ], - [ - "▁Informationen", - -10.938308715820312 - ], - [ - "▁kommen", - -10.938352584838867 - ], - [ - "press", - -10.93837833404541 - ], - [ - "▁bridge", - -10.938379287719727 - ], - [ - "▁qualified", - -10.938671112060547 - ], - [ - "position", - -10.938821792602539 - ], - [ - "▁combat", - -10.938933372497559 - ], - [ - "!\"", - -10.938993453979492 - ], - [ - "eva", - -10.939217567443848 - ], - [ - "oase", - -10.939380645751953 - ], - [ - "▁inner", - -10.939410209655762 - ], - [ - "▁loans", - -10.939720153808594 - ], - [ - "made", - -10.939786911010742 - ], - [ - "▁Mexico", - -10.93993091583252 - ], - [ - "▁formal", - -10.940092086791992 - ], - [ - "▁fell", - -10.94021987915039 - ], - [ - "91", - -10.940524101257324 - ], - [ - "▁campus", - -10.9407320022583 - ], - [ - "ienne", - -10.940869331359863 - ], - [ - "▁framework", - -10.94105339050293 - ], - [ - "ncing", - -10.941157341003418 - ], - [ - "▁Para", - -10.941222190856934 - ], - [ - "▁password", - -10.941298484802246 - ], - [ - "▁sei", - -10.941422462463379 - ], - [ - "▁Cross", - -10.941532135009766 - ], - [ - "▁Ten", - -10.941873550415039 - ], - [ - "bank", - -10.941887855529785 - ], - [ - "▁gun", - -10.942000389099121 - ], - [ - "ient", - -10.942021369934082 - ], - [ - "▁usage", - -10.942176818847656 - ], - [ - "▁(2", - -10.942278861999512 - ], - [ - "Gra", - -10.942320823669434 - ], - [ - "▁prea", - -10.94253158569336 - ], - [ - "▁Als", - -10.942619323730469 - ], - [ - "▁finance", - -10.942638397216797 - ], - [ - "tate", - -10.942665100097656 - ], - [ - "ition", - -10.942703247070312 - ], - [ - "▁regulations", - -10.942741394042969 - ], - [ - "▁Professional", - -10.943001747131348 - ], - [ - "▁pl", - -10.94336986541748 - ], - [ - "▁SEO", - -10.943472862243652 - ], - [ - "▁trecut", - -10.943487167358398 - ], - [ - "▁aller", - -10.943509101867676 - ], - [ - "▁violence", - -10.943986892700195 - ], - [ - "▁membership", - -10.944117546081543 - ], - [ - "▁picked", - -10.944162368774414 - ], - [ - "▁collected", - -10.9443359375 - ], - [ - "▁extended", - -10.944449424743652 - ], - [ - "▁religious", - -10.944661140441895 - ], - [ - "▁salle", - -10.944767951965332 - ], - [ - "RA", - -10.944781303405762 - ], - [ - "▁blend", - -10.945232391357422 - ], - [ - "▁Min", - -10.94532299041748 - ], - [ - "kal", - -10.945887565612793 - ], - [ - "▁featuring", - -10.945902824401855 - ], - [ - "▁researchers", - -10.946263313293457 - ], - [ - "▁Search", - -10.946558952331543 - ], - [ - "CE", - -10.946675300598145 - ], - [ - "▁recognized", - -10.94682502746582 - ], - [ - "▁semi", - -10.94692611694336 - ], - [ - "▁exposure", - -10.94718074798584 - ], - [ - "grew", - -10.947466850280762 - ], - [ - "▁candidate", - -10.948250770568848 - ], - [ - "▁shares", - -10.948908805847168 - ], - [ - "▁edit", - -10.949745178222656 - ], - [ - "CS", - -10.949905395507812 - ], - [ - "▁Cl", - -10.950240135192871 - ], - [ - "▁Enjoy", - -10.951438903808594 - ], - [ - "▁hurt", - -10.951482772827148 - ], - [ - "▁bottle", - -10.951593399047852 - ], - [ - "▁Buy", - -10.95159912109375 - ], - [ - "▁superior", - -10.952286720275879 - ], - [ - "▁missed", - -10.952424049377441 - ], - [ - "▁workshop", - -10.952433586120605 - ], - [ - "action", - -10.952437400817871 - ], - [ - "ple", - -10.952699661254883 - ], - [ - "▁Schul", - -10.952814102172852 - ], - [ - "▁houses", - -10.953080177307129 - ], - [ - "▁2017,", - -10.953569412231445 - ], - [ - "▁killed", - -10.953750610351562 - ], - [ - "▁calendar", - -10.954306602478027 - ], - [ - "▁Mike", - -10.954597473144531 - ], - [ - "FA", - -10.954627990722656 - ], - [ - "nut", - -10.95487117767334 - ], - [ - "▁establish", - -10.955140113830566 - ], - [ - "▁alcohol", - -10.95514965057373 - ], - [ - "▁closely", - -10.955170631408691 - ], - [ - "▁MA", - -10.955381393432617 - ], - [ - "pul", - -10.955389022827148 - ], - [ - "▁defined", - -10.955666542053223 - ], - [ - "aires", - -10.955692291259766 - ], - [ - "▁Shi", - -10.955703735351562 - ], - [ - "▁plays", - -10.956303596496582 - ], - [ - "▁sister", - -10.95690631866455 - ], - [ - "▁cable", - -10.957179069519043 - ], - [ - "▁desk", - -10.957215309143066 - ], - [ - "▁apoi", - -10.957738876342773 - ], - [ - "▁identity", - -10.95785140991211 - ], - [ - "▁stars", - -10.957931518554688 - ], - [ - "▁fata", - -10.958008766174316 - ], - [ - "▁obvious", - -10.958330154418945 - ], - [ - "▁dental", - -10.95843505859375 - ], - [ - "AM", - -10.958802223205566 - ], - [ - "▁sharp", - -10.95881175994873 - ], - [ - "duc", - -10.959053993225098 - ], - [ - "▁manufacturer", - -10.95914077758789 - ], - [ - "!)", - -10.959270477294922 - ], - [ - "▁objects", - -10.959720611572266 - ], - [ - "▁Ag", - -10.959989547729492 - ], - [ - "referred", - -10.960195541381836 - ], - [ - "▁Ak", - -10.960308074951172 - ], - [ - "burg", - -10.960360527038574 - ], - [ - "▁nouveau", - -10.960854530334473 - ], - [ - "▁Pal", - -10.960994720458984 - ], - [ - "▁Arbeits", - -10.961280822753906 - ], - [ - "▁personally", - -10.961288452148438 - ], - [ - "▁Dé", - -10.961292266845703 - ], - [ - "▁import", - -10.961688041687012 - ], - [ - "▁justice", - -10.961913108825684 - ], - [ - "▁photography", - -10.962705612182617 - ], - [ - "▁portfolio", - -10.962841987609863 - ], - [ - "56", - -10.96314525604248 - ], - [ - "▁nouvelle", - -10.963293075561523 - ], - [ - "▁oven", - -10.964197158813477 - ], - [ - "▁400", - -10.964272499084473 - ], - [ - "▁mixed", - -10.964395523071289 - ], - [ - "▁relax", - -10.964427947998047 - ], - [ - "▁imp", - -10.964703559875488 - ], - [ - "▁».", - -10.964734077453613 - ], - [ - "▁mail", - -10.964777946472168 - ], - [ - "rage", - -10.964861869812012 - ], - [ - "nos", - -10.964974403381348 - ], - [ - "▁drugs", - -10.965195655822754 - ], - [ - "▁jede", - -10.965211868286133 - ], - [ - "▁einige", - -10.965232849121094 - ], - [ - "▁8.", - -10.965325355529785 - ], - [ - "ters", - -10.965412139892578 - ], - [ - "▁electrical", - -10.965432167053223 - ], - [ - "▁puis", - -10.965836524963379 - ], - [ - "▁films", - -10.965903282165527 - ], - [ - "41", - -10.966036796569824 - ], - [ - "▁moral", - -10.966398239135742 - ], - [ - "lage", - -10.966402053833008 - ], - [ - "▁spaces", - -10.966415405273438 - ], - [ - "▁Ed", - -10.966462135314941 - ], - [ - "▁classroom", - -10.966588020324707 - ], - [ - "▁große", - -10.966588973999023 - ], - [ - "▁baza", - -10.966887474060059 - ], - [ - "face", - -10.967308044433594 - ], - [ - "▁informed", - -10.967333793640137 - ], - [ - "▁improving", - -10.967477798461914 - ], - [ - "▁guidance", - -10.967880249023438 - ], - [ - "▁gallery", - -10.96800708770752 - ], - [ - "cular", - -10.968046188354492 - ], - [ - "53", - -10.968094825744629 - ], - [ - "Despite", - -10.968238830566406 - ], - [ - "▁forme", - -10.968304634094238 - ], - [ - "▁système", - -10.968415260314941 - ], - [ - "▁Win", - -10.968494415283203 - ], - [ - "▁Small", - -10.968537330627441 - ], - [ - "▁Mobile", - -10.968564987182617 - ], - [ - "▁tape", - -10.968606948852539 - ], - [ - "▁erhalten", - -10.968914985656738 - ], - [ - "▁movies", - -10.968928337097168 - ], - [ - "▁Unfortunately", - -10.968963623046875 - ], - [ - "▁Looking", - -10.96945858001709 - ], - [ - "▁guard", - -10.969584465026855 - ], - [ - "▁pr", - -10.969820976257324 - ], - [ - "▁confident", - -10.96988582611084 - ], - [ - "BA", - -10.970229148864746 - ], - [ - "bas", - -10.970272064208984 - ], - [ - "hum", - -10.97050666809082 - ], - [ - "ular", - -10.9705171585083 - ], - [ - "▁Still", - -10.970593452453613 - ], - [ - "▁flavor", - -10.970656394958496 - ], - [ - "▁boost", - -10.970773696899414 - ], - [ - "▁division", - -10.970842361450195 - ], - [ - "ising", - -10.971006393432617 - ], - [ - "▁monitoring", - -10.971044540405273 - ], - [ - "▁Sen", - -10.97105884552002 - ], - [ - "▁https", - -10.971527099609375 - ], - [ - "mainly", - -10.971735000610352 - ], - [ - "play", - -10.972251892089844 - ], - [ - "▁dynamic", - -10.972357749938965 - ], - [ - "▁coup", - -10.972370147705078 - ], - [ - "▁carpet", - -10.972561836242676 - ], - [ - "iner", - -10.972846984863281 - ], - [ - "ral", - -10.97325611114502 - ], - [ - "iser", - -10.973320007324219 - ], - [ - "RC", - -10.9739990234375 - ], - [ - "▁definition", - -10.97475814819336 - ], - [ - "▁Za", - -10.974767684936523 - ], - [ - "friendly", - -10.974883079528809 - ], - [ - "43", - -10.975123405456543 - ], - [ - "link", - -10.975180625915527 - ], - [ - "▁Multi", - -10.97519302368164 - ], - [ - "▁einmal", - -10.975272178649902 - ], - [ - "▁stopped", - -10.975394248962402 - ], - [ - "vel", - -10.975456237792969 - ], - [ - "▁ongoing", - -10.975565910339355 - ], - [ - "▁ancient", - -10.976259231567383 - ], - [ - "take", - -10.976301193237305 - ], - [ - "cia", - -10.976432800292969 - ], - [ - "▁USB", - -10.976545333862305 - ], - [ - "▁attorney", - -10.976866722106934 - ], - [ - "▁slot", - -10.976866722106934 - ], - [ - "▁Line", - -10.97693157196045 - ], - [ - "rice", - -10.977087020874023 - ], - [ - "ify", - -10.977520942687988 - ], - [ - "ó", - -10.978260040283203 - ], - [ - "▁flash", - -10.978483200073242 - ], - [ - "▁extension", - -10.978555679321289 - ], - [ - "▁Ende", - -10.979022979736328 - ], - [ - "▁powder", - -10.979114532470703 - ], - [ - "ească", - -10.979143142700195 - ], - [ - "03", - -10.979327201843262 - ], - [ - "▁normally", - -10.979416847229004 - ], - [ - "▁pun", - -10.980108261108398 - ], - [ - "viewed", - -10.980138778686523 - ], - [ - "ssen", - -10.980896949768066 - ], - [ - "ache", - -10.981121063232422 - ], - [ - "ește", - -10.98122787475586 - ], - [ - "▁PA", - -10.981266021728516 - ], - [ - "FI", - -10.981945991516113 - ], - [ - "▁Frank", - -10.98198127746582 - ], - [ - "▁apa", - -10.98242473602295 - ], - [ - "▁coast", - -10.982614517211914 - ], - [ - "▁boy", - -10.982665061950684 - ], - [ - "lim", - -10.982902526855469 - ], - [ - "▁putin", - -10.983194351196289 - ], - [ - "▁script", - -10.983332633972168 - ], - [ - "▁noticed", - -10.9837007522583 - ], - [ - "▁dealing", - -10.983922004699707 - ], - [ - "▁Trans", - -10.984100341796875 - ], - [ - "▁border", - -10.984447479248047 - ], - [ - "▁reputation", - -10.984657287597656 - ], - [ - "-2", - -10.984662055969238 - ], - [ - "HS", - -10.984707832336426 - ], - [ - "▁supports", - -10.984724998474121 - ], - [ - "▁horse", - -10.985146522521973 - ], - [ - "nik", - -10.98520565032959 - ], - [ - "▁clothes", - -10.985234260559082 - ], - [ - "▁Card", - -10.985612869262695 - ], - [ - "▁relief", - -10.98595905303955 - ], - [ - "▁Visit", - -10.986259460449219 - ], - [ - "▁luni", - -10.986593246459961 - ], - [ - "81", - -10.986693382263184 - ], - [ - "qua", - -10.986945152282715 - ], - [ - "▁Comp", - -10.98697280883789 - ], - [ - "▁investigation", - -10.987137794494629 - ], - [ - "▁depth", - -10.987598419189453 - ], - [ - "▁earned", - -10.987709045410156 - ], - [ - "▁Ren", - -10.988090515136719 - ], - [ - "▁Dumnezeu", - -10.988107681274414 - ], - [ - "▁Joe", - -10.988210678100586 - ], - [ - "▁goods", - -10.988288879394531 - ], - [ - "▁Vol", - -10.988686561584473 - ], - [ - "▁certified", - -10.989118576049805 - ], - [ - "▁favor", - -10.989326477050781 - ], - [ - "▁Scott", - -10.989599227905273 - ], - [ - "▁protest", - -10.989802360534668 - ], - [ - "▁pace", - -10.989803314208984 - ], - [ - "▁Angeles", - -10.990368843078613 - ], - [ - "inch", - -10.99050521850586 - ], - [ - "▁charged", - -10.99052619934082 - ], - [ - "code", - -10.990968704223633 - ], - [ - "▁convenient", - -10.99138355255127 - ], - [ - "▁Nord", - -10.991556167602539 - ], - [ - "▁yesterday", - -10.991691589355469 - ], - [ - "Dacă", - -10.99169635772705 - ], - [ - "▁Travel", - -10.991786003112793 - ], - [ - "▁kid", - -10.991941452026367 - ], - [ - "ction", - -10.991986274719238 - ], - [ - "▁groupe", - -10.992770195007324 - ], - [ - "pu", - -10.993056297302246 - ], - [ - "bzw", - -10.993196487426758 - ], - [ - "▁mixture", - -10.993513107299805 - ], - [ - "▁Farm", - -10.993715286254883 - ], - [ - "▁acces", - -10.993939399719238 - ], - [ - "matic", - -10.993950843811035 - ], - [ - "▁comparison", - -10.994006156921387 - ], - [ - "reich", - -10.994095802307129 - ], - [ - "pet", - -10.994502067565918 - ], - [ - "▁lit", - -10.994685173034668 - ], - [ - "▁organized", - -10.99476432800293 - ], - [ - "just", - -10.995564460754395 - ], - [ - "▁fellow", - -10.996004104614258 - ], - [ - "Ver", - -10.996209144592285 - ], - [ - "▁trends", - -10.99622631072998 - ], - [ - "▁evaluation", - -10.99626636505127 - ], - [ - "feld", - -10.99639892578125 - ], - [ - "▁Pu", - -10.99671459197998 - ], - [ - "▁equipped", - -10.99727725982666 - ], - [ - "▁catre", - -10.997278213500977 - ], - [ - "eck", - -10.997369766235352 - ], - [ - "▁facing", - -10.997998237609863 - ], - [ - "▁instrument", - -10.998361587524414 - ], - [ - "▁pleased", - -10.998507499694824 - ], - [ - "▁tap", - -10.998818397521973 - ], - [ - "dom", - -10.998826026916504 - ], - [ - "▁pump", - -10.999384880065918 - ], - [ - "▁functional", - -10.999429702758789 - ], - [ - "▁authority", - -10.999455451965332 - ], - [ - "▁experiment", - -10.999478340148926 - ], - [ - "LO", - -10.999529838562012 - ], - [ - "▁scheduled", - -10.999552726745605 - ], - [ - "halt", - -10.999604225158691 - ], - [ - "▁ceiling", - -10.999761581420898 - ], - [ - "▁Step", - -11.000310897827148 - ], - [ - "▁orders", - -11.00032901763916 - ], - [ - "▁speech", - -11.001046180725098 - ], - [ - "▁stands", - -11.001119613647461 - ], - [ - "▁disc", - -11.001920700073242 - ], - [ - "▁rec", - -11.001935958862305 - ], - [ - "▁Text", - -11.00243854522705 - ], - [ - "▁banks", - -11.00294017791748 - ], - [ - "▁oameni", - -11.003045082092285 - ], - [ - "▁communications", - -11.003194808959961 - ], - [ - "trag", - -11.003307342529297 - ], - [ - "▁trail", - -11.003803253173828 - ], - [ - "AN", - -11.00426197052002 - ], - [ - "▁Federal", - -11.004467964172363 - ], - [ - "▁quote", - -11.00455093383789 - ], - [ - "▁spus", - -11.004620552062988 - ], - [ - "▁managing", - -11.004990577697754 - ], - [ - "▁booking", - -11.00505256652832 - ], - [ - "▁Blog", - -11.005669593811035 - ], - [ - "▁tank", - -11.005681991577148 - ], - [ - "pon", - -11.005804061889648 - ], - [ - "GE", - -11.00582218170166 - ], - [ - "▁fiscal", - -11.005871772766113 - ], - [ - "▁satisfaction", - -11.006044387817383 - ], - [ - "cre", - -11.00614070892334 - ], - [ - "▁protected", - -11.006494522094727 - ], - [ - "▁enfants", - -11.006782531738281 - ], - [ - "▁dort", - -11.007554054260254 - ], - [ - "▁Mel", - -11.008041381835938 - ], - [ - "▁turns", - -11.00804615020752 - ], - [ - "▁savings", - -11.008106231689453 - ], - [ - "▁voir", - -11.008358001708984 - ], - [ - "▁Boston", - -11.008394241333008 - ], - [ - "▁debate", - -11.008469581604004 - ], - [ - "▁SO", - -11.008857727050781 - ], - [ - "▁tables", - -11.009193420410156 - ], - [ - "▁honest", - -11.009210586547852 - ], - [ - "mate", - -11.009283065795898 - ], - [ - "▁chart", - -11.0094633102417 - ], - [ - "decât", - -11.009682655334473 - ], - [ - "▁Radio", - -11.009685516357422 - ], - [ - "54", - -11.00986385345459 - ], - [ - "▁vol", - -11.010008811950684 - ], - [ - "last", - -11.010148048400879 - ], - [ - "▁tall", - -11.010408401489258 - ], - [ - "▁Should", - -11.010489463806152 - ], - [ - "▁sink", - -11.010525703430176 - ], - [ - "▁Right", - -11.010527610778809 - ], - [ - "▁male", - -11.010720252990723 - ], - [ - "▁Modern", - -11.010753631591797 - ], - [ - "▁indeed", - -11.010886192321777 - ], - [ - "▁Garden", - -11.011139869689941 - ], - [ - "▁Mod", - -11.011307716369629 - ], - [ - "▁turning", - -11.0115327835083 - ], - [ - "▁inches", - -11.011557579040527 - ], - [ - "▁Police", - -11.01183795928955 - ], - [ - "▁Pay", - -11.012016296386719 - ], - [ - "UE", - -11.0126371383667 - ], - [ - "mé", - -11.012652397155762 - ], - [ - "EE", - -11.013046264648438 - ], - [ - "▁cookies", - -11.013116836547852 - ], - [ - "rip", - -11.013351440429688 - ], - [ - "▁Motor", - -11.01352310180664 - ], - [ - "▁lung", - -11.01379680633545 - ], - [ - "▁Ap", - -11.013995170593262 - ], - [ - "▁sustainable", - -11.014066696166992 - ], - [ - "▁instant", - -11.014240264892578 - ], - [ - "▁Rose", - -11.014464378356934 - ], - [ - "▁Carolina", - -11.014906883239746 - ], - [ - "▁Help", - -11.014969825744629 - ], - [ - "IE", - -11.01535701751709 - ], - [ - "▁Jersey", - -11.015522956848145 - ], - [ - "▁Spanish", - -11.015586853027344 - ], - [ - "▁wheel", - -11.015660285949707 - ], - [ - "▁fishing", - -11.0158109664917 - ], - [ - "gram", - -11.015937805175781 - ], - [ - "▁ST", - -11.016227722167969 - ], - [ - "▁Nov", - -11.01632022857666 - ], - [ - "▁reporting", - -11.016362190246582 - ], - [ - "ked", - -11.016467094421387 - ], - [ - "▁Leben", - -11.016557693481445 - ], - [ - "▁organisation", - -11.016843795776367 - ], - [ - "▁tiny", - -11.017144203186035 - ], - [ - "▁Alex", - -11.017236709594727 - ], - [ - "▁obtained", - -11.017255783081055 - ], - [ - "▁Acest", - -11.017367362976074 - ], - [ - "▁dangerous", - -11.01749038696289 - ], - [ - "utter", - -11.017624855041504 - ], - [ - "▁rev", - -11.01801586151123 - ], - [ - "Un", - -11.018242835998535 - ], - [ - "▁revealed", - -11.018356323242188 - ], - [ - "▁decade", - -11.018709182739258 - ], - [ - "▁possibility", - -11.01945686340332 - ], - [ - "service", - -11.019577980041504 - ], - [ - "è", - -11.01966667175293 - ], - [ - "▁Chief", - -11.019674301147461 - ], - [ - "▁Durch", - -11.019795417785645 - ], - [ - "▁cadre", - -11.019843101501465 - ], - [ - "▁wearing", - -11.019845008850098 - ], - [ - "sized", - -11.01988410949707 - ], - [ - "LY", - -11.01989459991455 - ], - [ - "▁unser", - -11.019963264465332 - ], - [ - "▁2016,", - -11.019988059997559 - ], - [ - "▁fail", - -11.020028114318848 - ], - [ - "iques", - -11.020115852355957 - ], - [ - "▁Angel", - -11.020315170288086 - ], - [ - "▁transportation", - -11.020364761352539 - ], - [ - "▁dates", - -11.020395278930664 - ], - [ - "▁danger", - -11.020731925964355 - ], - [ - "▁forum", - -11.020828247070312 - ], - [ - "zug", - -11.020885467529297 - ], - [ - "▁filed", - -11.021199226379395 - ], - [ - "loc", - -11.021201133728027 - ], - [ - "éri", - -11.021234512329102 - ], - [ - "tribu", - -11.021393775939941 - ], - [ - "▁entered", - -11.021639823913574 - ], - [ - "▁porte", - -11.021928787231445 - ], - [ - "▁arts", - -11.021979331970215 - ], - [ - "▁reform", - -11.022001266479492 - ], - [ - "▁Main", - -11.022101402282715 - ], - [ - "▁dir", - -11.022111892700195 - ], - [ - "▁approval", - -11.022465705871582 - ], - [ - "▁juice", - -11.022750854492188 - ], - [ - "vier", - -11.022771835327148 - ], - [ - "▁nivel", - -11.02318000793457 - ], - [ - "▁returns", - -11.023423194885254 - ], - [ - "▁formed", - -11.023723602294922 - ], - [ - "▁combine", - -11.02436351776123 - ], - [ - "▁cours", - -11.024392127990723 - ], - [ - "▁Standard", - -11.024463653564453 - ], - [ - "▁certification", - -11.024677276611328 - ], - [ - "escu", - -11.024996757507324 - ], - [ - "▁achieved", - -11.025278091430664 - ], - [ - "▁Model", - -11.025280952453613 - ], - [ - "rul", - -11.025404930114746 - ], - [ - "▁Tage", - -11.025530815124512 - ], - [ - "▁injuries", - -11.02560806274414 - ], - [ - "▁Sal", - -11.025671005249023 - ], - [ - "▁expenses", - -11.025887489318848 - ], - [ - "▁cet", - -11.026009559631348 - ], - [ - "▁taxes", - -11.026028633117676 - ], - [ - "diesen", - -11.02626895904541 - ], - [ - "▁fairly", - -11.026638984680176 - ], - [ - "▁Access", - -11.026866912841797 - ], - [ - "wind", - -11.027122497558594 - ], - [ - "IM", - -11.027252197265625 - ], - [ - "ense", - -11.027548789978027 - ], - [ - "▁hang", - -11.027957916259766 - ], - [ - "▁citizens", - -11.028020858764648 - ], - [ - "3%", - -11.028101921081543 - ], - [ - "lum", - -11.028268814086914 - ], - [ - "▁discussed", - -11.028326034545898 - ], - [ - "AC", - -11.02841854095459 - ], - [ - "‘", - -11.0286865234375 - ], - [ - "▁Sol", - -11.028698921203613 - ], - [ - "06", - -11.028816223144531 - ], - [ - "stellen", - -11.029170989990234 - ], - [ - "▁participation", - -11.02917194366455 - ], - [ - "▁Box", - -11.029200553894043 - ], - [ - "▁bieten", - -11.029687881469727 - ], - [ - "▁Louis", - -11.029730796813965 - ], - [ - "▁lessons", - -11.029789924621582 - ], - [ - "▁visible", - -11.029966354370117 - ], - [ - "▁Cam", - -11.030128479003906 - ], - [ - "▁Ban", - -11.03053092956543 - ], - [ - "▁Far", - -11.03060245513916 - ], - [ - "▁travers", - -11.030759811401367 - ], - [ - "▁telling", - -11.030808448791504 - ], - [ - "▁magic", - -11.030855178833008 - ], - [ - "▁Night", - -11.031316757202148 - ], - [ - "▁judge", - -11.031400680541992 - ], - [ - "▁Pat", - -11.031482696533203 - ], - [ - "▁Southern", - -11.031901359558105 - ], - [ - "OL", - -11.031929969787598 - ], - [ - "fully", - -11.032191276550293 - ], - [ - "▁acestea", - -11.03223705291748 - ], - [ - "▁Order", - -11.032383918762207 - ], - [ - "▁facut", - -11.032523155212402 - ], - [ - "▁Matt", - -11.032600402832031 - ], - [ - "registr", - -11.03278923034668 - ], - [ - "▁Yet", - -11.032811164855957 - ], - [ - "ß", - -11.033596992492676 - ], - [ - "▁făcut", - -11.033618927001953 - ], - [ - "▁versions", - -11.033780097961426 - ], - [ - "▁Force", - -11.03396224975586 - ], - [ - "rick", - -11.034153938293457 - ], - [ - "▁rund", - -11.034563064575195 - ], - [ - "ike", - -11.034658432006836 - ], - [ - "▁Young", - -11.034675598144531 - ], - [ - "▁ski", - -11.034927368164062 - ], - [ - "CU", - -11.035385131835938 - ], - [ - "▁Second", - -11.035510063171387 - ], - [ - "▁graduate", - -11.03554916381836 - ], - [ - "▁Bible", - -11.036049842834473 - ], - [ - "▁vary", - -11.036060333251953 - ], - [ - "▁celebration", - -11.036151885986328 - ], - [ - "▁risks", - -11.036210060119629 - ], - [ - "erii", - -11.036327362060547 - ], - [ - "rance", - -11.036577224731445 - ], - [ - "▁MP", - -11.036787986755371 - ], - [ - "▁tale", - -11.036788940429688 - ], - [ - "▁Ford", - -11.037044525146484 - ], - [ - "▁attached", - -11.037278175354004 - ], - [ - "▁Sy", - -11.037312507629395 - ], - [ - "▁Ly", - -11.03765869140625 - ], - [ - "stellung", - -11.037687301635742 - ], - [ - "▁trop", - -11.0377197265625 - ], - [ - "▁années", - -11.037736892700195 - ], - [ - "▁linked", - -11.03792667388916 - ], - [ - "pit", - -11.038352012634277 - ], - [ - "So", - -11.03835391998291 - ], - [ - "ţe", - -11.038473129272461 - ], - [ - "▁origin", - -11.038509368896484 - ], - [ - "▁boys", - -11.039263725280762 - ], - [ - "holder", - -11.039352416992188 - ], - [ - "read", - -11.039461135864258 - ], - [ - "▁relative", - -11.03950023651123 - ], - [ - "▁industries", - -11.03958511352539 - ], - [ - "making", - -11.039688110351562 - ], - [ - "▁tun", - -11.039917945861816 - ], - [ - "▁forced", - -11.041061401367188 - ], - [ - "▁Welcome", - -11.041086196899414 - ], - [ - "▁explained", - -11.041138648986816 - ], - [ - "MP", - -11.041389465332031 - ], - [ - "▁Three", - -11.041613578796387 - ], - [ - "aza", - -11.041768074035645 - ], - [ - "▁1999", - -11.041924476623535 - ], - [ - "▁erst", - -11.042237281799316 - ], - [ - "RS", - -11.042623519897461 - ], - [ - "▁attractive", - -11.04279899597168 - ], - [ - "▁visited", - -11.042805671691895 - ], - [ - "▁nom", - -11.042874336242676 - ], - [ - "▁drum", - -11.042933464050293 - ], - [ - "cast", - -11.043068885803223 - ], - [ - "ogen", - -11.043105125427246 - ], - [ - "▁tech", - -11.04360294342041 - ], - [ - "▁Comment", - -11.043664932250977 - ], - [ - "▁Little", - -11.04405689239502 - ], - [ - "▁suggested", - -11.044086456298828 - ], - [ - "▁gar", - -11.044205665588379 - ], - [ - "▁crack", - -11.04458999633789 - ], - [ - "▁shooting", - -11.044676780700684 - ], - [ - "▁Try", - -11.044759750366211 - ], - [ - "▁Remember", - -11.045008659362793 - ], - [ - "▁folks", - -11.045217514038086 - ], - [ - "▁MS", - -11.045512199401855 - ], - [ - "▁Dia", - -11.04584789276123 - ], - [ - "3)", - -11.046561241149902 - ], - [ - "arbeit", - -11.04697036743164 - ], - [ - "▁pepper", - -11.047065734863281 - ], - [ - "zz", - -11.047107696533203 - ], - [ - "▁extreme", - -11.047235488891602 - ], - [ - "▁extrem", - -11.047367095947266 - ], - [ - "▁severe", - -11.047768592834473 - ], - [ - "▁networks", - -11.047882080078125 - ], - [ - "păr", - -11.047910690307617 - ], - [ - "sent", - -11.047933578491211 - ], - [ - "▁structures", - -11.048048973083496 - ], - [ - "▁Join", - -11.048078536987305 - ], - [ - "▁privind", - -11.048255920410156 - ], - [ - "▁marriage", - -11.04865837097168 - ], - [ - "▁liegt", - -11.048918724060059 - ], - [ - "eben", - -11.048995971679688 - ], - [ - "▁produse", - -11.049076080322266 - ], - [ - "▁tested", - -11.049090385437012 - ], - [ - "▁Queen", - -11.049134254455566 - ], - [ - "▁Tax", - -11.049687385559082 - ], - [ - "rian", - -11.049710273742676 - ], - [ - "▁Problem", - -11.050151824951172 - ], - [ - "izat", - -11.05023193359375 - ], - [ - "udi", - -11.050324440002441 - ], - [ - "▁LA", - -11.050718307495117 - ], - [ - "▁afford", - -11.051108360290527 - ], - [ - "▁percentage", - -11.05121898651123 - ], - [ - "▁cute", - -11.051547050476074 - ], - [ - "▁gorgeous", - -11.051891326904297 - ], - [ - "▁indoor", - -11.05190372467041 - ], - [ - "▁configuration", - -11.052103042602539 - ], - [ - "▁immediate", - -11.052303314208984 - ], - [ - "▁exemple", - -11.052450180053711 - ], - [ - "▁Being", - -11.052550315856934 - ], - [ - "▁introduction", - -11.052591323852539 - ], - [ - "ella", - -11.053206443786621 - ], - [ - "bare", - -11.053521156311035 - ], - [ - "▁besser", - -11.053539276123047 - ], - [ - "▁Put", - -11.053740501403809 - ], - [ - "gon", - -11.054248809814453 - ], - [ - "▁Italy", - -11.054259300231934 - ], - [ - "▁Thus", - -11.05435562133789 - ], - [ - "tari", - -11.054437637329102 - ], - [ - "0.000", - -11.054460525512695 - ], - [ - "▁Price", - -11.054651260375977 - ], - [ - "▁Trust", - -11.054824829101562 - ], - [ - "▁contra", - -11.054863929748535 - ], - [ - "▁layout", - -11.05504035949707 - ], - [ - "▁Ireland", - -11.055187225341797 - ], - [ - "ctor", - -11.055344581604004 - ], - [ - "atoare", - -11.055540084838867 - ], - [ - "pra", - -11.055729866027832 - ], - [ - "rent", - -11.055892944335938 - ], - [ - "▁Seite", - -11.05605411529541 - ], - [ - "▁ori", - -11.056280136108398 - ], - [ - "spiel", - -11.056541442871094 - ], - [ - "▁Times", - -11.056883811950684 - ], - [ - "primarily", - -11.056974411010742 - ], - [ - "nov", - -11.05703067779541 - ], - [ - "▁desired", - -11.057061195373535 - ], - [ - "▁Would", - -11.057072639465332 - ], - [ - "PL", - -11.057225227355957 - ], - [ - "▁originally", - -11.057367324829102 - ], - [ - "▁Ana", - -11.057463645935059 - ], - [ - "EN", - -11.05754566192627 - ], - [ - "▁occasion", - -11.05755615234375 - ], - [ - "▁grant", - -11.057572364807129 - ], - [ - "igkeit", - -11.057975769042969 - ], - [ - "▁scheme", - -11.058146476745605 - ], - [ - "▁2015.", - -11.058621406555176 - ], - [ - "izare", - -11.058778762817383 - ], - [ - "gate", - -11.058792114257812 - ], - [ - "▁poker", - -11.058899879455566 - ], - [ - "pping", - -11.058998107910156 - ], - [ - "▁Wild", - -11.059511184692383 - ], - [ - "▁YouTube", - -11.059995651245117 - ], - [ - "▁assume", - -11.060284614562988 - ], - [ - "с", - -11.060614585876465 - ], - [ - "▁rapport", - -11.060623168945312 - ], - [ - "▁labor", - -11.060996055603027 - ], - [ - "teur", - -11.061041831970215 - ], - [ - "▁genre", - -11.06116008758545 - ], - [ - "▁plat", - -11.061745643615723 - ], - [ - "▁listening", - -11.061750411987305 - ], - [ - "sky", - -11.061777114868164 - ], - [ - "▁neighborhood", - -11.061782836914062 - ], - [ - "▁3-", - -11.062150001525879 - ], - [ - "▁Library", - -11.062162399291992 - ], - [ - "agit", - -11.062249183654785 - ], - [ - "▁platforms", - -11.062849998474121 - ], - [ - "bei", - -11.062882423400879 - ], - [ - "AB", - -11.062897682189941 - ], - [ - "▁manufacturers", - -11.06295394897461 - ], - [ - "▁printing", - -11.063141822814941 - ], - [ - "▁crisis", - -11.063326835632324 - ], - [ - "▁Smart", - -11.06335163116455 - ], - [ - "▁drawing", - -11.063406944274902 - ], - [ - "MO", - -11.06348991394043 - ], - [ - "▁durable", - -11.063569068908691 - ], - [ - "chant", - -11.0636625289917 - ], - [ - "▁chemical", - -11.063764572143555 - ], - [ - "▁savoir", - -11.063776016235352 - ], - [ - "▁Max", - -11.063802719116211 - ], - [ - "gestellt", - -11.06380844116211 - ], - [ - "▁rural", - -11.063854217529297 - ], - [ - "52", - -11.064105033874512 - ], - [ - "▁invited", - -11.064169883728027 - ], - [ - "▁fil", - -11.0642728805542 - ], - [ - "▁Rob", - -11.064284324645996 - ], - [ - "▁Bell", - -11.064387321472168 - ], - [ - "▁neck", - -11.064831733703613 - ], - [ - "pac", - -11.064879417419434 - ], - [ - "wal", - -11.06491470336914 - ], - [ - "▁là", - -11.064922332763672 - ], - [ - "▁Virginia", - -11.065081596374512 - ], - [ - "▁applicable", - -11.06509017944336 - ], - [ - "▁abuse", - -11.065153121948242 - ], - [ - "aide", - -11.065321922302246 - ], - [ - "▁increases", - -11.065396308898926 - ], - [ - "▁moi", - -11.065568923950195 - ], - [ - "▁Non", - -11.065577507019043 - ], - [ - "▁Produkt", - -11.065627098083496 - ], - [ - "FC", - -11.065644264221191 - ], - [ - "▁shops", - -11.065677642822266 - ], - [ - "▁prendre", - -11.065923690795898 - ], - [ - "atul", - -11.065990447998047 - ], - [ - "▁sal", - -11.066137313842773 - ], - [ - "▁société", - -11.06627082824707 - ], - [ - "▁Hot", - -11.066329002380371 - ], - [ - "rim", - -11.066587448120117 - ], - [ - "gue", - -11.06661605834961 - ], - [ - "▁enterprise", - -11.066624641418457 - ], - [ - "▁33", - -11.067329406738281 - ], - [ - "mittel", - -11.067395210266113 - ], - [ - "ged", - -11.067439079284668 - ], - [ - "▁formula", - -11.06777286529541 - ], - [ - "▁spin", - -11.067784309387207 - ], - [ - "als", - -11.067826271057129 - ], - [ - "2%", - -11.06785774230957 - ], - [ - "bon", - -11.068192481994629 - ], - [ - "▁Executive", - -11.068323135375977 - ], - [ - "▁wirklich", - -11.068427085876465 - ], - [ - "îl", - -11.068608283996582 - ], - [ - "1.", - -11.068917274475098 - ], - [ - "▁Arm", - -11.069157600402832 - ], - [ - "▁rid", - -11.069358825683594 - ], - [ - "aries", - -11.069727897644043 - ], - [ - "▁incident", - -11.06982421875 - ], - [ - "▁copii", - -11.070008277893066 - ], - [ - "▁Charles", - -11.070141792297363 - ], - [ - "▁meals", - -11.070147514343262 - ], - [ - "▁wireless", - -11.070237159729004 - ], - [ - "Ex", - -11.070364952087402 - ], - [ - "▁Financial", - -11.070540428161621 - ], - [ - "▁AM", - -11.070615768432617 - ], - [ - "▁fest", - -11.070645332336426 - ], - [ - "▁Ol", - -11.071410179138184 - ], - [ - "oir", - -11.071447372436523 - ], - [ - "300", - -11.071893692016602 - ], - [ - "▁punct", - -11.072138786315918 - ], - [ - "▁Mad", - -11.07283878326416 - ], - [ - "▁Ali", - -11.072907447814941 - ], - [ - "lag", - -11.073214530944824 - ], - [ - "▁ocean", - -11.073314666748047 - ], - [ - "▁mirror", - -11.073326110839844 - ], - [ - "▁Additionally", - -11.073869705200195 - ], - [ - "alia", - -11.073884963989258 - ], - [ - "▁county", - -11.073899269104004 - ], - [ - "▁hip", - -11.074305534362793 - ], - [ - "dale", - -11.074395179748535 - ], - [ - "▁Stra", - -11.074429512023926 - ], - [ - "▁drag", - -11.074575424194336 - ], - [ - "▁Sand", - -11.074851036071777 - ], - [ - "▁historic", - -11.074980735778809 - ], - [ - "ière", - -11.075427055358887 - ], - [ - "▁examine", - -11.075624465942383 - ], - [ - "soci", - -11.075634002685547 - ], - [ - "ime", - -11.076088905334473 - ], - [ - "▁Insurance", - -11.07621955871582 - ], - [ - "▁crime", - -11.076736450195312 - ], - [ - "▁pare", - -11.076945304870605 - ], - [ - "▁craft", - -11.077105522155762 - ], - [ - "▁Building", - -11.077279090881348 - ], - [ - "mission", - -11.077534675598145 - ], - [ - "▁Americans", - -11.077573776245117 - ], - [ - "▁mg", - -11.077799797058105 - ], - [ - "▁passage", - -11.077938079833984 - ], - [ - "▁deposit", - -11.078346252441406 - ], - [ - "▁widely", - -11.078444480895996 - ], - [ - "nch", - -11.078453063964844 - ], - [ - "▁Coast", - -11.078756332397461 - ], - [ - "▁recipes", - -11.078784942626953 - ], - [ - "▁Ziel", - -11.07951545715332 - ], - [ - "▁duty", - -11.079646110534668 - ], - [ - "▁gerne", - -11.079704284667969 - ], - [ - "most", - -11.080034255981445 - ], - [ - "▁argument", - -11.080158233642578 - ], - [ - "▁root", - -11.08021354675293 - ], - [ - "▁consult", - -11.08024787902832 - ], - [ - "▁muscle", - -11.080255508422852 - ], - [ - "▁spoke", - -11.08038330078125 - ], - [ - "▁Cum", - -11.080950736999512 - ], - [ - "▁orange", - -11.081033706665039 - ], - [ - "▁reader", - -11.081123352050781 - ], - [ - "schw", - -11.081151008605957 - ], - [ - "▁commission", - -11.081332206726074 - ], - [ - "histoire", - -11.081811904907227 - ], - [ - "▁represents", - -11.082064628601074 - ], - [ - "▁meilleur", - -11.082343101501465 - ], - [ - "▁10.", - -11.082358360290527 - ], - [ - "HA", - -11.082427024841309 - ], - [ - "▁Systems", - -11.082573890686035 - ], - [ - "▁blind", - -11.082603454589844 - ], - [ - "▁HP", - -11.083221435546875 - ], - [ - "▁doi", - -11.083307266235352 - ], - [ - "▁signature", - -11.083404541015625 - ], - [ - "▁invite", - -11.083505630493164 - ], - [ - "▁Samsung", - -11.083802223205566 - ], - [ - "▁liber", - -11.083942413330078 - ], - [ - "▁letters", - -11.0840482711792 - ], - [ - "▁primul", - -11.084186553955078 - ], - [ - "▁losing", - -11.084328651428223 - ], - [ - "resulting", - -11.084467887878418 - ], - [ - "▁Computer", - -11.08474063873291 - ], - [ - "▁poll", - -11.0847749710083 - ], - [ - "rile", - -11.085102081298828 - ], - [ - "TI", - -11.085142135620117 - ], - [ - "▁cur", - -11.08566951751709 - ], - [ - "▁fonction", - -11.085833549499512 - ], - [ - "gat", - -11.086359977722168 - ], - [ - "AA", - -11.086480140686035 - ], - [ - "tiv", - -11.086692810058594 - ], - [ - "▁Str", - -11.087076187133789 - ], - [ - "ești", - -11.087677955627441 - ], - [ - "▁officer", - -11.0877046585083 - ], - [ - "reducing", - -11.08772087097168 - ], - [ - "▁gifts", - -11.08780288696289 - ], - [ - "▁performing", - -11.08788776397705 - ], - [ - "▁»,", - -11.088349342346191 - ], - [ - "▁guitar", - -11.08838939666748 - ], - [ - "▁segment", - -11.088580131530762 - ], - [ - "▁Tar", - -11.08861255645752 - ], - [ - "▁ultimately", - -11.088805198669434 - ], - [ - "▁cam", - -11.088960647583008 - ], - [ - "▁Arbeit", - -11.089076042175293 - ], - [ - "▁accessories", - -11.089418411254883 - ], - [ - "bad", - -11.089820861816406 - ], - [ - "home", - -11.0899019241333 - ], - [ - "▁clip", - -11.08995532989502 - ], - [ - "range", - -11.090432167053223 - ], - [ - "CM", - -11.090867042541504 - ], - [ - "▁printed", - -11.090883255004883 - ], - [ - "▁Pet", - -11.091177940368652 - ], - [ - "▁attract", - -11.091333389282227 - ], - [ - "date", - -11.091501235961914 - ], - [ - "▁Senior", - -11.091503143310547 - ], - [ - "▁genau", - -11.092177391052246 - ], - [ - "num", - -11.092435836791992 - ], - [ - "▁attended", - -11.092674255371094 - ], - [ - "▁Turn", - -11.092824935913086 - ], - [ - "▁History", - -11.092830657958984 - ], - [ - "some", - -11.092852592468262 - ], - [ - "▁describe", - -11.09308910369873 - ], - [ - "▁Lee", - -11.093143463134766 - ], - [ - "▁Fre", - -11.093314170837402 - ], - [ - "▁league", - -11.093345642089844 - ], - [ - "new", - -11.093505859375 - ], - [ - "tors", - -11.093535423278809 - ], - [ - "▁storm", - -11.094005584716797 - ], - [ - "▁Beispiel", - -11.094197273254395 - ], - [ - "▁index", - -11.094344139099121 - ], - [ - "▁awarded", - -11.094613075256348 - ], - [ - "state", - -11.094625473022461 - ], - [ - "▁1990", - -11.094874382019043 - ], - [ - "▁ends", - -11.094902992248535 - ], - [ - "kor", - -11.095070838928223 - ], - [ - "far", - -11.095418930053711 - ], - [ - "▁Page", - -11.095541000366211 - ], - [ - "▁promotion", - -11.095610618591309 - ], - [ - "▁weekly", - -11.095726013183594 - ], - [ - "400", - -11.095966339111328 - ], - [ - "iuni", - -11.096365928649902 - ], - [ - "▁Summer", - -11.096376419067383 - ], - [ - "▁thin", - -11.096627235412598 - ], - [ - "▁dafür", - -11.09669303894043 - ], - [ - "51", - -11.096769332885742 - ], - [ - "PR", - -11.096978187561035 - ], - [ - "▁Hy", - -11.097001075744629 - ], - [ - "gas", - -11.097013473510742 - ], - [ - "▁atat", - -11.097166061401367 - ], - [ - "▁mining", - -11.097347259521484 - ], - [ - "▁principles", - -11.09741497039795 - ], - [ - "gent", - -11.097545623779297 - ], - [ - "ika", - -11.097685813903809 - ], - [ - "▁religion", - -11.097787857055664 - ], - [ - "▁ordered", - -11.098284721374512 - ], - [ - "▁developers", - -11.098298072814941 - ], - [ - "▁pleasure", - -11.098456382751465 - ], - [ - "vit", - -11.098505020141602 - ], - [ - "mers", - -11.0988130569458 - ], - [ - "▁Section", - -11.098873138427734 - ], - [ - "▁por", - -11.098960876464844 - ], - [ - "▁Name", - -11.099200248718262 - ], - [ - "▁pink", - -11.099260330200195 - ], - [ - "dig", - -11.09934139251709 - ], - [ - "▁eligible", - -11.099397659301758 - ], - [ - "▁Happy", - -11.09941577911377 - ], - [ - "▁fo", - -11.099480628967285 - ], - [ - "▁availability", - -11.099541664123535 - ], - [ - "GO", - -11.099583625793457 - ], - [ - "▁Europa", - -11.099637985229492 - ], - [ - "▁Unit", - -11.099656105041504 - ], - [ - "▁1000", - -11.099837303161621 - ], - [ - "▁Berg", - -11.099846839904785 - ], - [ - "fini", - -11.099853515625 - ], - [ - "▁$3", - -11.100565910339355 - ], - [ - "iza", - -11.100749969482422 - ], - [ - "▁promo", - -11.100830078125 - ], - [ - "▁Low", - -11.101234436035156 - ], - [ - "abord", - -11.101326942443848 - ], - [ - "äh", - -11.101485252380371 - ], - [ - "▁Professor", - -11.101570129394531 - ], - [ - "▁array", - -11.101579666137695 - ], - [ - "▁hate", - -11.101594924926758 - ], - [ - "▁recording", - -11.101601600646973 - ], - [ - "RI", - -11.101649284362793 - ], - [ - "▁proof", - -11.101710319519043 - ], - [ - "lay", - -11.10185718536377 - ], - [ - "DE", - -11.102007865905762 - ], - [ - "▁surprised", - -11.102066040039062 - ], - [ - "▁boxes", - -11.102193832397461 - ], - [ - "▁noastre", - -11.102386474609375 - ], - [ - "zie", - -11.102387428283691 - ], - [ - "▁însă", - -11.10254192352295 - ], - [ - "▁ajuta", - -11.102783203125 - ], - [ - "▁weil", - -11.1028413772583 - ], - [ - "▁whenever", - -11.103026390075684 - ], - [ - "shi", - -11.103194236755371 - ], - [ - "satz", - -11.103605270385742 - ], - [ - "▁remind", - -11.10401725769043 - ], - [ - "▁consist", - -11.10412311553955 - ], - [ - "▁motiv", - -11.104240417480469 - ], - [ - "▁PS", - -11.1043062210083 - ], - [ - "▁trois", - -11.104543685913086 - ], - [ - "pad", - -11.10477352142334 - ], - [ - "▁besten", - -11.104904174804688 - ], - [ - "▁Stone", - -11.105140686035156 - ], - [ - "itz", - -11.105157852172852 - ], - [ - "fit", - -11.105164527893066 - ], - [ - "▁Mountain", - -11.105178833007812 - ], - [ - "OC", - -11.10519027709961 - ], - [ - "▁depends", - -11.105228424072266 - ], - [ - "▁Cover", - -11.105387687683105 - ], - [ - "▁bags", - -11.106058120727539 - ], - [ - "▁Bel", - -11.106199264526367 - ], - [ - "▁Engineering", - -11.106304168701172 - ], - [ - "▁flower", - -11.106647491455078 - ], - [ - "▁gratuit", - -11.106670379638672 - ], - [ - "▁smartphone", - -11.106780052185059 - ], - [ - "stan", - -11.107197761535645 - ], - [ - "spect", - -11.10726261138916 - ], - [ - "SL", - -11.107282638549805 - ], - [ - "sho", - -11.10738754272461 - ], - [ - "▁Ser", - -11.10791301727295 - ], - [ - "▁Perhaps", - -11.108247756958008 - ], - [ - "▁codes", - -11.108342170715332 - ], - [ - "▁Wind", - -11.10849666595459 - ], - [ - "aient", - -11.108757019042969 - ], - [ - "▁Prin", - -11.108802795410156 - ], - [ - "▁(1)", - -11.109090805053711 - ], - [ - "▁figures", - -11.109450340270996 - ], - [ - "▁ausge", - -11.10972785949707 - ], - [ - "▁episode", - -11.110050201416016 - ], - [ - "▁Spa", - -11.110370635986328 - ], - [ - "▁Silver", - -11.110386848449707 - ], - [ - "▁Sky", - -11.110396385192871 - ], - [ - "▁capabilities", - -11.1107177734375 - ], - [ - "▁Uni", - -11.11073112487793 - ], - [ - "▁încă", - -11.110876083374023 - ], - [ - "TO", - -11.111289978027344 - ], - [ - "▁Hal", - -11.111358642578125 - ], - [ - "ghi", - -11.111414909362793 - ], - [ - "▁sofa", - -11.111438751220703 - ], - [ - "hard", - -11.11150074005127 - ], - [ - "▁FOR", - -11.111587524414062 - ], - [ - "▁Ber", - -11.111820220947266 - ], - [ - "▁firms", - -11.11187744140625 - ], - [ - "▁memories", - -11.111883163452148 - ], - [ - "▁lift", - -11.11214542388916 - ], - [ - "▁sending", - -11.11214542388916 - ], - [ - "▁narrow", - -11.112646102905273 - ], - [ - "▁Steve", - -11.112784385681152 - ], - [ - "▁integration", - -11.112905502319336 - ], - [ - "known", - -11.113122940063477 - ], - [ - "▁nostru", - -11.113237380981445 - ], - [ - "iţi", - -11.113422393798828 - ], - [ - "▁Georgia", - -11.113759994506836 - ], - [ - "▁slowly", - -11.114026069641113 - ], - [ - "iere", - -11.114028930664062 - ], - [ - "aka", - -11.114255905151367 - ], - [ - "PE", - -11.114320755004883 - ], - [ - "▁venue", - -11.11468505859375 - ], - [ - "jar", - -11.11474609375 - ], - [ - "buch", - -11.114755630493164 - ], - [ - "rad", - -11.114858627319336 - ], - [ - "▁resistance", - -11.114899635314941 - ], - [ - "▁stehen", - -11.114914894104004 - ], - [ - "chin", - -11.11504077911377 - ], - [ - "▁weak", - -11.11535358428955 - ], - [ - "▁DVD", - -11.115598678588867 - ], - [ - "▁bodies", - -11.115856170654297 - ], - [ - "▁split", - -11.115884780883789 - ], - [ - "What", - -11.116231918334961 - ], - [ - "setzen", - -11.116467475891113 - ], - [ - "▁loves", - -11.116561889648438 - ], - [ - "▁kleine", - -11.117077827453613 - ], - [ - "▁increasingly", - -11.11746883392334 - ], - [ - "▁alert", - -11.117583274841309 - ], - [ - "▁AC", - -11.117647171020508 - ], - [ - "▁partir", - -11.117974281311035 - ], - [ - "▁ratio", - -11.11807918548584 - ], - [ - "▁keeps", - -11.118539810180664 - ], - [ - "▁Area", - -11.118544578552246 - ], - [ - "▁données", - -11.119071960449219 - ], - [ - "▁flag", - -11.119254112243652 - ], - [ - "▁NO", - -11.119277000427246 - ], - [ - "▁hotels", - -11.119336128234863 - ], - [ - "▁debut", - -11.119365692138672 - ], - [ - "▁suffer", - -11.119368553161621 - ], - [ - "▁hidden", - -11.119810104370117 - ], - [ - "▁clothing", - -11.120074272155762 - ], - [ - "▁household", - -11.120235443115234 - ], - [ - "medi", - -11.120268821716309 - ], - [ - "▁reste", - -11.120274543762207 - ], - [ - "bro", - -11.120381355285645 - ], - [ - "▁Bus", - -11.120405197143555 - ], - [ - "▁Ken", - -11.120572090148926 - ], - [ - "IR", - -11.120758056640625 - ], - [ - "▁suffering", - -11.121212005615234 - ], - [ - "▁publication", - -11.121246337890625 - ], - [ - "▁Mat", - -11.121360778808594 - ], - [ - "▁impression", - -11.121509552001953 - ], - [ - "▁founded", - -11.121562957763672 - ], - [ - "▁stable", - -11.121566772460938 - ], - [ - "▁promise", - -11.121719360351562 - ], - [ - "▁Cloud", - -11.121770858764648 - ], - [ - "▁prison", - -11.122099876403809 - ], - [ - "cor", - -11.122355461120605 - ], - [ - "▁Sports", - -11.122716903686523 - ], - [ - "▁erste", - -11.122745513916016 - ], - [ - "shire", - -11.122757911682129 - ], - [ - "▁recommendations", - -11.122916221618652 - ], - [ - "▁permit", - -11.123100280761719 - ], - [ - "▁tomorrow", - -11.123126983642578 - ], - [ - "▁lucky", - -11.123422622680664 - ], - [ - "▁realized", - -11.123449325561523 - ], - [ - "▁famille", - -11.123473167419434 - ], - [ - "▁Zealand", - -11.123542785644531 - ], - [ - "▁wooden", - -11.123601913452148 - ], - [ - "▁east", - -11.124269485473633 - ], - [ - "▁Bereich", - -11.12458324432373 - ], - [ - "während", - -11.124653816223145 - ], - [ - "rite", - -11.124836921691895 - ], - [ - "▁fla", - -11.124902725219727 - ], - [ - "platz", - -11.124991416931152 - ], - [ - "▁zero", - -11.125292778015137 - ], - [ - "▁priority", - -11.12535572052002 - ], - [ - "▁Airport", - -11.125506401062012 - ], - [ - "▁Kauf", - -11.125590324401855 - ], - [ - "▁ultimate", - -11.12601375579834 - ], - [ - "▁chest", - -11.126175880432129 - ], - [ - "▁tone", - -11.126376152038574 - ], - [ - "▁Kal", - -11.126431465148926 - ], - [ - "▁supposed", - -11.12669849395752 - ], - [ - "▁vedere", - -11.126846313476562 - ], - [ - "▁50%", - -11.126872062683105 - ], - [ - "▁Ger", - -11.127785682678223 - ], - [ - "pack", - -11.127849578857422 - ], - [ - "▁priv", - -11.128241539001465 - ], - [ - "▁Kit", - -11.128263473510742 - ], - [ - "▁tent", - -11.128457069396973 - ], - [ - "▁guidelines", - -11.128461837768555 - ], - [ - "▁Republic", - -11.128824234008789 - ], - [ - "including", - -11.129239082336426 - ], - [ - "▁chief", - -11.129615783691406 - ], - [ - "▁Living", - -11.129766464233398 - ], - [ - "keit", - -11.1298189163208 - ], - [ - "▁convert", - -11.129831314086914 - ], - [ - "tail", - -11.129928588867188 - ], - [ - "orient", - -11.129960060119629 - ], - [ - "eigenen", - -11.130245208740234 - ], - [ - "▁soup", - -11.130587577819824 - ], - [ - "▁zona", - -11.130661010742188 - ], - [ - "▁composition", - -11.130690574645996 - ], - [ - "▁Bob", - -11.130831718444824 - ], - [ - "▁exception", - -11.131170272827148 - ], - [ - "▁cr", - -11.131287574768066 - ], - [ - "▁str", - -11.131482124328613 - ], - [ - "▁Fl", - -11.13178825378418 - ], - [ - "AT", - -11.131909370422363 - ], - [ - "kel", - -11.132002830505371 - ], - [ - "▁pricing", - -11.132189750671387 - ], - [ - "▁Mass", - -11.132258415222168 - ], - [ - "vir", - -11.132333755493164 - ], - [ - "leg", - -11.132448196411133 - ], - [ - "▁rating", - -11.132455825805664 - ], - [ - "▁Sale", - -11.132628440856934 - ], - [ - "▁somewhere", - -11.132866859436035 - ], - [ - "▁submitted", - -11.133084297180176 - ], - [ - "▁Pop", - -11.133296012878418 - ], - [ - "▁papers", - -11.13330364227295 - ], - [ - "▁authorities", - -11.133326530456543 - ], - [ - "▁Person", - -11.133381843566895 - ], - [ - "▁kill", - -11.133512496948242 - ], - [ - "▁suggestions", - -11.133548736572266 - ], - [ - "-6", - -11.133644104003906 - ], - [ - "▁dust", - -11.133750915527344 - ], - [ - "taire", - -11.133805274963379 - ], - [ - "▁recognition", - -11.133870124816895 - ], - [ - "3.", - -11.134047508239746 - ], - [ - "▁Mont", - -11.134230613708496 - ], - [ - "▁produit", - -11.13430118560791 - ], - [ - "▁transmission", - -11.134340286254883 - ], - [ - "▁Th", - -11.13475513458252 - ], - [ - "▁passing", - -11.134928703308105 - ], - [ - "▁Partner", - -11.135161399841309 - ], - [ - "▁dire", - -11.135205268859863 - ], - [ - "▁DC", - -11.135432243347168 - ], - [ - "▁sky", - -11.135659217834473 - ], - [ - "▁Kitchen", - -11.135890007019043 - ], - [ - "▁fluid", - -11.135929107666016 - ], - [ - "▁scored", - -11.136005401611328 - ], - [ - "▁chapter", - -11.136100769042969 - ], - [ - "If", - -11.136231422424316 - ], - [ - "letzten", - -11.136275291442871 - ], - [ - "▁officers", - -11.13641357421875 - ], - [ - "▁avem", - -11.136631965637207 - ], - [ - "ister", - -11.136666297912598 - ], - [ - "▁involves", - -11.136688232421875 - ], - [ - "ico", - -11.136898040771484 - ], - [ - "bur", - -11.137056350708008 - ], - [ - "▁mieux", - -11.137064933776855 - ], - [ - "▁Photo", - -11.1371431350708 - ], - [ - "▁Cro", - -11.137228012084961 - ], - [ - "▁professor", - -11.137245178222656 - ], - [ - "▁besonders", - -11.137313842773438 - ], - [ - "д", - -11.137367248535156 - ], - [ - "▁alongside", - -11.137382507324219 - ], - [ - "▁stored", - -11.13770580291748 - ], - [ - "▁activ", - -11.137849807739258 - ], - [ - "▁setup", - -11.138169288635254 - ], - [ - "▁extract", - -11.138627052307129 - ], - [ - "▁accent", - -11.138633728027344 - ], - [ - "▁replaced", - -11.138638496398926 - ], - [ - "tec", - -11.138800621032715 - ], - [ - "▁Natur", - -11.138848304748535 - ], - [ - "▁Pacific", - -11.138887405395508 - ], - [ - "▁NY", - -11.139485359191895 - ], - [ - "▁Capital", - -11.139583587646484 - ], - [ - "▁forest", - -11.13969898223877 - ], - [ - "incredibly", - -11.14006233215332 - ], - [ - "▁choix", - -11.14021110534668 - ], - [ - "▁seriously", - -11.140281677246094 - ], - [ - "▁konnte", - -11.14030933380127 - ], - [ - "▁2014.", - -11.140443801879883 - ], - [ - "ensuring", - -11.140534400939941 - ], - [ - "▁handling", - -11.140661239624023 - ], - [ - "▁9.", - -11.140715599060059 - ], - [ - "▁relations", - -11.140876770019531 - ], - [ - "▁Kom", - -11.141045570373535 - ], - [ - "▁Hol", - -11.141282081604004 - ], - [ - "▁none", - -11.141515731811523 - ], - [ - "rob", - -11.141718864440918 - ], - [ - "▁Forum", - -11.141759872436523 - ], - [ - "hour", - -11.141776084899902 - ], - [ - "ème", - -11.141809463500977 - ], - [ - "▁Space", - -11.141986846923828 - ], - [ - "▁Ham", - -11.142992973327637 - ], - [ - "rap", - -11.143169403076172 - ], - [ - "▁Michigan", - -11.14317512512207 - ], - [ - "km", - -11.143202781677246 - ], - [ - "▁utilize", - -11.143548965454102 - ], - [ - "lov", - -11.143775939941406 - ], - [ - "▁luck", - -11.144388198852539 - ], - [ - "lä", - -11.144824981689453 - ], - [ - "▁healing", - -11.145010948181152 - ], - [ - "▁neu", - -11.145182609558105 - ], - [ - "aging", - -11.145251274108887 - ], - [ - "▁compliance", - -11.145583152770996 - ], - [ - "▁vertical", - -11.145675659179688 - ], - [ - "▁FREE", - -11.145729064941406 - ], - [ - "▁differences", - -11.146014213562012 - ], - [ - "▁Server", - -11.146252632141113 - ], - [ - "▁estimated", - -11.146378517150879 - ], - [ - "schutz", - -11.146692276000977 - ], - [ - "▁notamment", - -11.146736145019531 - ], - [ - "▁120", - -11.146919250488281 - ], - [ - "72", - -11.147282600402832 - ], - [ - "▁heating", - -11.147347450256348 - ], - [ - "late", - -11.14756965637207 - ], - [ - "▁younger", - -11.14783000946045 - ], - [ - "▁Intel", - -11.148171424865723 - ], - [ - "▁salad", - -11.148362159729004 - ], - [ - "▁commonly", - -11.148563385009766 - ], - [ - "▁treatments", - -11.148682594299316 - ], - [ - "▁speaker", - -11.148770332336426 - ], - [ - "▁producing", - -11.149120330810547 - ], - [ - "▁eggs", - -11.149367332458496 - ], - [ - "▁Spirit", - -11.149892807006836 - ], - [ - "▁beide", - -11.149918556213379 - ], - [ - "▁transaction", - -11.150283813476562 - ], - [ - "▁Machine", - -11.150464057922363 - ], - [ - "▁Games", - -11.150527000427246 - ], - [ - "▁niveau", - -11.150687217712402 - ], - [ - "▁Need", - -11.15082836151123 - ], - [ - "radi", - -11.150959968566895 - ], - [ - "mir", - -11.15096664428711 - ], - [ - "causing", - -11.151000022888184 - ], - [ - "▁début", - -11.151042938232422 - ], - [ - "▁rencontre", - -11.151063919067383 - ], - [ - "▁threat", - -11.151153564453125 - ], - [ - "▁enjoying", - -11.151320457458496 - ], - [ - "Com", - -11.151386260986328 - ], - [ - "▁Johnson", - -11.151555061340332 - ], - [ - "▁tournament", - -11.15156364440918 - ], - [ - "▁Micro", - -11.151582717895508 - ], - [ - "▁Drive", - -11.151667594909668 - ], - [ - "▁Cre", - -11.151866912841797 - ], - [ - "▁Lebens", - -11.151930809020996 - ], - [ - "▁categories", - -11.152358055114746 - ], - [ - "5,000", - -11.15261173248291 - ], - [ - "▁confirmed", - -11.152617454528809 - ], - [ - "pli", - -11.152763366699219 - ], - [ - "▁Francisco", - -11.153139114379883 - ], - [ - "▁raw", - -11.153157234191895 - ], - [ - "▁managers", - -11.153223991394043 - ], - [ - "ţie", - -11.153365135192871 - ], - [ - "UR", - -11.153368949890137 - ], - [ - "▁aproape", - -11.154065132141113 - ], - [ - "via", - -11.154606819152832 - ], - [ - "▁engaged", - -11.154646873474121 - ], - [ - "▁parti", - -11.154741287231445 - ], - [ - "▁posting", - -11.15517807006836 - ], - [ - "CO", - -11.155484199523926 - ], - [ - "▁bois", - -11.155815124511719 - ], - [ - "▁inch", - -11.15590763092041 - ], - [ - "vie", - -11.156068801879883 - ], - [ - "▁aside", - -11.156314849853516 - ], - [ - "▁exceptional", - -11.15658950805664 - ], - [ - "▁vintage", - -11.156668663024902 - ], - [ - "▁Him", - -11.156795501708984 - ], - [ - "▁expansion", - -11.156806945800781 - ], - [ - "▁Weg", - -11.157122611999512 - ], - [ - "▁authors", - -11.157535552978516 - ], - [ - "▁deine", - -11.15764045715332 - ], - [ - "▁Prime", - -11.158016204833984 - ], - [ - "▁scan", - -11.158055305480957 - ], - [ - "▁reg", - -11.158112525939941 - ], - [ - "ția", - -11.158141136169434 - ], - [ - "riv", - -11.158258438110352 - ], - [ - "selon", - -11.158440589904785 - ], - [ - "▁Studio", - -11.158571243286133 - ], - [ - "▁dich", - -11.158658027648926 - ], - [ - "▁vi", - -11.158745765686035 - ], - [ - "▁sequence", - -11.159016609191895 - ], - [ - "▁Four", - -11.159046173095703 - ], - [ - "RT", - -11.159050941467285 - ], - [ - "▁ihn", - -11.159072875976562 - ], - [ - "▁employ", - -11.159223556518555 - ], - [ - "umb", - -11.159659385681152 - ], - [ - "ită", - -11.159818649291992 - ], - [ - "▁Station", - -11.159950256347656 - ], - [ - "▁upload", - -11.159972190856934 - ], - [ - "▁upgrade", - -11.160445213317871 - ], - [ - "▁exterior", - -11.160528182983398 - ], - [ - "▁writers", - -11.160531997680664 - ], - [ - "▁plot", - -11.160543441772461 - ], - [ - "▁Gen", - -11.16068172454834 - ], - [ - "TER", - -11.160821914672852 - ], - [ - "-12", - -11.160930633544922 - ], - [ - "http", - -11.162168502807617 - ], - [ - "▁smell", - -11.1621732711792 - ], - [ - "post", - -11.162522315979004 - ], - [ - "von", - -11.162790298461914 - ], - [ - "mili", - -11.16280746459961 - ], - [ - "8%", - -11.162972450256348 - ], - [ - "▁Andrew", - -11.163065910339355 - ], - [ - "▁spun", - -11.16321086883545 - ], - [ - "▁grass", - -11.163444519042969 - ], - [ - "unter", - -11.163474082946777 - ], - [ - "▁burn", - -11.16356086730957 - ], - [ - "▁Gegen", - -11.163601875305176 - ], - [ - "fest", - -11.163721084594727 - ], - [ - "▁Northern", - -11.163738250732422 - ], - [ - "▁consumption", - -11.163775444030762 - ], - [ - "▁bird", - -11.164069175720215 - ], - [ - "▁Miss", - -11.164369583129883 - ], - [ - "anti", - -11.16447925567627 - ], - [ - "▁viata", - -11.164583206176758 - ], - [ - "bereich", - -11.164602279663086 - ], - [ - "▁Change", - -11.164871215820312 - ], - [ - "▁pouvoir", - -11.165255546569824 - ], - [ - "▁demonstrate", - -11.165435791015625 - ], - [ - "▁requirement", - -11.165483474731445 - ], - [ - "BI", - -11.16577434539795 - ], - [ - "ied", - -11.166099548339844 - ], - [ - "▁spray", - -11.166358947753906 - ], - [ - "▁calitate", - -11.166379928588867 - ], - [ - "▁souvent", - -11.1665620803833 - ], - [ - "▁samples", - -11.166682243347168 - ], - [ - "▁compete", - -11.166930198669434 - ], - [ - "ank", - -11.166946411132812 - ], - [ - "année", - -11.167037963867188 - ], - [ - "wick", - -11.167183876037598 - ], - [ - "iff", - -11.167254447937012 - ], - [ - "noi", - -11.167255401611328 - ], - [ - "ography", - -11.167450904846191 - ], - [ - "▁SE", - -11.167508125305176 - ], - [ - "▁250", - -11.16779899597168 - ], - [ - "▁wealth", - -11.167884826660156 - ], - [ - "4%", - -11.168235778808594 - ], - [ - "▁swimming", - -11.168269157409668 - ], - [ - "enne", - -11.168338775634766 - ], - [ - "Qu", - -11.168400764465332 - ], - [ - "▁connections", - -11.168476104736328 - ], - [ - "onne", - -11.16852855682373 - ], - [ - "▁Way", - -11.168676376342773 - ], - [ - "voll", - -11.168793678283691 - ], - [ - "▁extent", - -11.169041633605957 - ], - [ - "▁objective", - -11.169572830200195 - ], - [ - "▁clinic", - -11.169581413269043 - ], - [ - "NA", - -11.169848442077637 - ], - [ - "▁Hope", - -11.170098304748535 - ], - [ - "▁coat", - -11.170331954956055 - ], - [ - "▁depend", - -11.170393943786621 - ], - [ - "▁tine", - -11.170463562011719 - ], - [ - "acc", - -11.170486450195312 - ], - [ - "▁editor", - -11.170598983764648 - ], - [ - "▁Jim", - -11.170690536499023 - ], - [ - "600", - -11.171262741088867 - ], - [ - "▁module", - -11.171302795410156 - ], - [ - "▁deja", - -11.171821594238281 - ], - [ - "atur", - -11.171841621398926 - ], - [ - "▁maintaining", - -11.171918869018555 - ], - [ - "▁hoch", - -11.172059059143066 - ], - [ - "▁covering", - -11.17239761352539 - ], - [ - "vielen", - -11.172450065612793 - ], - [ - "hem", - -11.172531127929688 - ], - [ - "▁illegal", - -11.172656059265137 - ], - [ - "▁certificate", - -11.17329216003418 - ], - [ - "▁collective", - -11.173357963562012 - ], - [ - "▁blow", - -11.17343807220459 - ], - [ - "▁programming", - -11.17343807220459 - ], - [ - "HE", - -11.173727989196777 - ], - [ - "▁Division", - -11.173842430114746 - ], - [ - "▁ceux", - -11.174081802368164 - ], - [ - "▁saved", - -11.174202919006348 - ], - [ - "▁worst", - -11.17426586151123 - ], - [ - "▁arms", - -11.17430305480957 - ], - [ - "▁Officer", - -11.17463493347168 - ], - [ - "▁association", - -11.174838066101074 - ], - [ - "ington", - -11.1749906539917 - ], - [ - "▁belle", - -11.175024032592773 - ], - [ - "tting", - -11.17537784576416 - ], - [ - "▁attacks", - -11.175446510314941 - ], - [ - "▁vei", - -11.17546558380127 - ], - [ - "▁gerade", - -11.175470352172852 - ], - [ - "▁strain", - -11.175748825073242 - ], - [ - "▁offices", - -11.1759672164917 - ], - [ - "EM", - -11.17627239227295 - ], - [ - "EST", - -11.176509857177734 - ], - [ - "-8", - -11.176758766174316 - ], - [ - "▁faculty", - -11.176998138427734 - ], - [ - "▁Plant", - -11.177046775817871 - ], - [ - "pla", - -11.177295684814453 - ], - [ - "card", - -11.177618980407715 - ], - [ - "▁loose", - -11.177982330322266 - ], - [ - "▁PR", - -11.178044319152832 - ], - [ - "profit", - -11.178071022033691 - ], - [ - "▁channels", - -11.178119659423828 - ], - [ - "ATE", - -11.178257942199707 - ], - [ - "atic", - -11.178304672241211 - ], - [ - "wegen", - -11.178404808044434 - ], - [ - "word", - -11.178621292114258 - ], - [ - "▁sehen", - -11.178659439086914 - ], - [ - "▁nombre", - -11.178744316101074 - ], - [ - "▁DO", - -11.178763389587402 - ], - [ - "▁hoping", - -11.178949356079102 - ], - [ - "▁wollen", - -11.179091453552246 - ], - [ - "▁decat", - -11.179244995117188 - ], - [ - "IF", - -11.179386138916016 - ], - [ - "▁permission", - -11.179396629333496 - ], - [ - "▁Williams", - -11.179936408996582 - ], - [ - "▁beer", - -11.179962158203125 - ], - [ - "▁dernière", - -11.180052757263184 - ], - [ - "▁purchasing", - -11.18025016784668 - ], - [ - "▁pride", - -11.180416107177734 - ], - [ - "solv", - -11.180598258972168 - ], - [ - "ego", - -11.180691719055176 - ], - [ - "▁Oil", - -11.18079662322998 - ], - [ - "▁dishes", - -11.18102741241455 - ], - [ - "▁Baby", - -11.181109428405762 - ], - [ - "▁Roll", - -11.181137084960938 - ], - [ - "vez", - -11.18134593963623 - ], - [ - "▁drept", - -11.181367874145508 - ], - [ - "lly", - -11.18148136138916 - ], - [ - "▁potrivit", - -11.181495666503906 - ], - [ - "person", - -11.181961059570312 - ], - [ - "▁interactive", - -11.182269096374512 - ], - [ - "▁brilliant", - -11.182304382324219 - ], - [ - "▁000", - -11.182357788085938 - ], - [ - "▁giant", - -11.182657241821289 - ], - [ - "▁plain", - -11.182945251464844 - ], - [ - "▁lock", - -11.183197975158691 - ], - [ - "▁inspection", - -11.183762550354004 - ], - [ - "▁symbol", - -11.18392276763916 - ], - [ - "▁Gal", - -11.183953285217285 - ], - [ - "▁concepts", - -11.1840181350708 - ], - [ - "▁venture", - -11.18411922454834 - ], - [ - "▁Tr", - -11.184402465820312 - ], - [ - "▁Color", - -11.184469223022461 - ], - [ - "▁behalf", - -11.184635162353516 - ], - [ - "ink", - -11.184715270996094 - ], - [ - "atii", - -11.1848726272583 - ], - [ - "wie", - -11.184907913208008 - ], - [ - "▁stream", - -11.18514347076416 - ], - [ - "▁buyers", - -11.185192108154297 - ], - [ - "legen", - -11.185526847839355 - ], - [ - "iness", - -11.18578815460205 - ], - [ - "▁absolute", - -11.185945510864258 - ], - [ - "▁council", - -11.186067581176758 - ], - [ - "▁displayed", - -11.186172485351562 - ], - [ - "▁Bun", - -11.186405181884766 - ], - [ - "▁darauf", - -11.186585426330566 - ], - [ - "▁rod", - -11.186829566955566 - ], - [ - "▁repeat", - -11.186898231506348 - ], - [ - "quelle", - -11.187023162841797 - ], - [ - "lation", - -11.187433242797852 - ], - [ - "gul", - -11.18774700164795 - ], - [ - "▁compensation", - -11.188064575195312 - ], - [ - "▁string", - -11.1881685256958 - ], - [ - "▁joining", - -11.188251495361328 - ], - [ - "▁Pra", - -11.188429832458496 - ], - [ - "hab", - -11.188936233520508 - ], - [ - "▁plane", - -11.189024925231934 - ], - [ - "▁conversion", - -11.189078330993652 - ], - [ - "▁lesson", - -11.189361572265625 - ], - [ - "bound", - -11.1893949508667 - ], - [ - "▁seats", - -11.18946361541748 - ], - [ - "voc", - -11.189902305603027 - ], - [ - "▁Disney", - -11.190120697021484 - ], - [ - "esse", - -11.190277099609375 - ], - [ - "▁awards", - -11.190279006958008 - ], - [ - "▁initiative", - -11.190483093261719 - ], - [ - "UM", - -11.19050407409668 - ], - [ - "▁intelligence", - -11.190763473510742 - ], - [ - "▁laser", - -11.191128730773926 - ], - [ - "än", - -11.191228866577148 - ], - [ - "▁generated", - -11.191231727600098 - ], - [ - "▁allen", - -11.19186782836914 - ], - [ - "▁Aug", - -11.19261360168457 - ], - [ - "lini", - -11.192968368530273 - ], - [ - "▁Update", - -11.193015098571777 - ], - [ - "▁grab", - -11.193095207214355 - ], - [ - "▁Bridge", - -11.193219184875488 - ], - [ - "rock", - -11.193289756774902 - ], - [ - "hold", - -11.193461418151855 - ], - [ - "seinen", - -11.193643569946289 - ], - [ - "▁false", - -11.193758010864258 - ], - [ - "type", - -11.193792343139648 - ], - [ - "▁outcome", - -11.193906784057617 - ], - [ - "▁crazy", - -11.194161415100098 - ], - [ - "▁Platz", - -11.194281578063965 - ], - [ - "▁believed", - -11.194426536560059 - ], - [ - "▁adjust", - -11.194503784179688 - ], - [ - "▁entrance", - -11.194644927978516 - ], - [ - "▁Colorado", - -11.194751739501953 - ], - [ - "▁concentration", - -11.194865226745605 - ], - [ - "aid", - -11.194958686828613 - ], - [ - "▁regardless", - -11.195035934448242 - ], - [ - "▁mici", - -11.195063591003418 - ], - [ - "▁potentially", - -11.195109367370605 - ], - [ - "▁Custom", - -11.195867538452148 - ], - [ - "rag", - -11.196009635925293 - ], - [ - "▁employer", - -11.19604206085205 - ], - [ - "tagged", - -11.196158409118652 - ], - [ - "▁34", - -11.196271896362305 - ], - [ - "fro", - -11.196895599365234 - ], - [ - "▁Pas", - -11.197010040283203 - ], - [ - "▁AS", - -11.197013854980469 - ], - [ - "PP", - -11.197031021118164 - ], - [ - "stru", - -11.19741439819336 - ], - [ - "grâce", - -11.198037147521973 - ], - [ - "▁anyway", - -11.198240280151367 - ], - [ - "▁streets", - -11.1986083984375 - ], - [ - "▁Region", - -11.199190139770508 - ], - [ - "▁newly", - -11.199280738830566 - ], - [ - "▁assistant", - -11.199461936950684 - ], - [ - "▁requests", - -11.199618339538574 - ], - [ - "▁Ohio", - -11.199705123901367 - ], - [ - "▁continuing", - -11.200072288513184 - ], - [ - "▁îm", - -11.200136184692383 - ], - [ - "7%", - -11.20031452178955 - ], - [ - "▁basically", - -11.200325965881348 - ], - [ - "gabe", - -11.200334548950195 - ], - [ - "▁ultra", - -11.200355529785156 - ], - [ - "pic", - -11.200571060180664 - ], - [ - "▁jeder", - -11.200939178466797 - ], - [ - "▁Cook", - -11.201225280761719 - ], - [ - "▁tie", - -11.201227188110352 - ], - [ - "▁yard", - -11.20151424407959 - ], - [ - "▁wash", - -11.20152759552002 - ], - [ - "▁3,", - -11.20194149017334 - ], - [ - "▁exista", - -11.202128410339355 - ], - [ - "▁egg", - -11.202342987060547 - ], - [ - "▁marché", - -11.202616691589355 - ], - [ - "kommen", - -11.202630996704102 - ], - [ - "▁Select", - -11.202999114990234 - ], - [ - "geben", - -11.203126907348633 - ], - [ - "▁Joseph", - -11.203531265258789 - ], - [ - "▁Ces", - -11.203642845153809 - ], - [ - "▁hundred", - -11.203676223754883 - ], - [ - "even", - -11.203792572021484 - ], - [ - "gal", - -11.204232215881348 - ], - [ - "800", - -11.20443058013916 - ], - [ - "▁Jones", - -11.204599380493164 - ], - [ - "ova", - -11.204681396484375 - ], - [ - "▁careful", - -11.204727172851562 - ], - [ - "▁alarm", - -11.205070495605469 - ], - [ - "NI", - -11.205113410949707 - ], - [ - "▁residence", - -11.205327987670898 - ], - [ - "▁wäre", - -11.20590877532959 - ], - [ - "▁Dor", - -11.205986976623535 - ], - [ - "▁amounts", - -11.206369400024414 - ], - [ - "▁mistake", - -11.206687927246094 - ], - [ - "ates", - -11.206796646118164 - ], - [ - "▁bune", - -11.206951141357422 - ], - [ - "▁vegetables", - -11.207124710083008 - ], - [ - "▁Ann", - -11.207204818725586 - ], - [ - "logical", - -11.20776081085205 - ], - [ - "stadt", - -11.207806587219238 - ], - [ - "▁chances", - -11.207921981811523 - ], - [ - "%)", - -11.208030700683594 - ], - [ - "▁minimal", - -11.20810604095459 - ], - [ - "▁naturally", - -11.20817756652832 - ], - [ - "▁Geld", - -11.20822525024414 - ], - [ - "▁Yu", - -11.208361625671387 - ], - [ - "▁wrap", - -11.20840072631836 - ], - [ - "rest", - -11.208674430847168 - ], - [ - "▁legs", - -11.208758354187012 - ], - [ - "PM", - -11.208806991577148 - ], - [ - "▁Heart", - -11.208888053894043 - ], - [ - "▁suspect", - -11.209020614624023 - ], - [ - "Go", - -11.209098815917969 - ], - [ - "▁Fil", - -11.209175109863281 - ], - [ - "▁YOU", - -11.209175109863281 - ], - [ - "▁victory", - -11.209245681762695 - ], - [ - "pun", - -11.20960807800293 - ], - [ - "▁Zo", - -11.209632873535156 - ], - [ - "CT", - -11.209640502929688 - ], - [ - "▁trim", - -11.20969009399414 - ], - [ - "▁stuck", - -11.209836959838867 - ], - [ - "ators", - -11.209877014160156 - ], - [ - "▁Ideas", - -11.210016250610352 - ], - [ - "▁voyage", - -11.210166931152344 - ], - [ - "▁Restaurant", - -11.210205078125 - ], - [ - "▁pat", - -11.210234642028809 - ], - [ - "▁bond", - -11.210521697998047 - ], - [ - "▁Del", - -11.210552215576172 - ], - [ - "▁fighting", - -11.210705757141113 - ], - [ - "▁concerning", - -11.210867881774902 - ], - [ - "▁etwa", - -11.211141586303711 - ], - [ - "▁Thema", - -11.211237907409668 - ], - [ - "▁preferred", - -11.211423873901367 - ], - [ - "▁pitch", - -11.211465835571289 - ], - [ - "▁Singapore", - -11.211971282958984 - ], - [ - "▁tub", - -11.212018013000488 - ], - [ - "FT", - -11.212053298950195 - ], - [ - "▁Product", - -11.21212100982666 - ], - [ - "▁applying", - -11.212285995483398 - ], - [ - "▁Fr", - -11.212340354919434 - ], - [ - "ţa", - -11.212599754333496 - ], - [ - "▁iPad", - -11.212861061096191 - ], - [ - "PD", - -11.2129545211792 - ], - [ - "▁comun", - -11.212995529174805 - ], - [ - "▁pie", - -11.213286399841309 - ], - [ - "rank", - -11.21364688873291 - ], - [ - "tron", - -11.213677406311035 - ], - [ - "▁pest", - -11.213906288146973 - ], - [ - "▁herself", - -11.213936805725098 - ], - [ - "▁intense", - -11.213964462280273 - ], - [ - "foot", - -11.21413803100586 - ], - [ - "▁1998", - -11.2141695022583 - ], - [ - "▁anxiety", - -11.214616775512695 - ], - [ - "▁portable", - -11.214674949645996 - ], - [ - "▁harm", - -11.214735984802246 - ], - [ - "▁admit", - -11.214885711669922 - ], - [ - "sted", - -11.214900016784668 - ], - [ - "▁regions", - -11.215450286865234 - ], - [ - "cie", - -11.215556144714355 - ], - [ - "▁robust", - -11.21577262878418 - ], - [ - "▁stem", - -11.215982437133789 - ], - [ - "▁roles", - -11.216024398803711 - ], - [ - "▁Latin", - -11.216224670410156 - ], - [ - "▁Ré", - -11.216378211975098 - ], - [ - "▁ref", - -11.216381072998047 - ], - [ - "isme", - -11.216426849365234 - ], - [ - "▁contribution", - -11.216776847839355 - ], - [ - "▁forever", - -11.217447280883789 - ], - [ - "▁frei", - -11.21754264831543 - ], - [ - "▁mont", - -11.217818260192871 - ], - [ - "that", - -11.217999458312988 - ], - [ - "▁sensitive", - -11.218116760253906 - ], - [ - "▁wider", - -11.218175888061523 - ], - [ - "AF", - -11.218234062194824 - ], - [ - "▁liability", - -11.218748092651367 - ], - [ - "ţiei", - -11.219043731689453 - ], - [ - "▁Cho", - -11.219260215759277 - ], - [ - "aria", - -11.21960735321045 - ], - [ - "rang", - -11.21977710723877 - ], - [ - "▁Account", - -11.21986198425293 - ], - [ - "▁III", - -11.219941139221191 - ], - [ - "▁tooth", - -11.220222473144531 - ], - [ - "▁factory", - -11.220240592956543 - ], - [ - "▁dropped", - -11.220495223999023 - ], - [ - "horn", - -11.220780372619629 - ], - [ - "RP", - -11.221110343933105 - ], - [ - "▁container", - -11.22118091583252 - ], - [ - "fran", - -11.221474647521973 - ], - [ - "▁lawyer", - -11.221842765808105 - ], - [ - "▁Image", - -11.221907615661621 - ], - [ - "HO", - -11.22195816040039 - ], - [ - "▁incorporate", - -11.221992492675781 - ], - [ - "▁lume", - -11.22226333618164 - ], - [ - "GA", - -11.222331047058105 - ], - [ - "itati", - -11.222370147705078 - ], - [ - "autre", - -11.222665786743164 - ], - [ - "ierten", - -11.222688674926758 - ], - [ - "[", - -11.222746849060059 - ], - [ - "▁packages", - -11.222758293151855 - ], - [ - "▁Simon", - -11.22290325164795 - ], - [ - "▁somewhat", - -11.223734855651855 - ], - [ - "mbo", - -11.223737716674805 - ], - [ - "lite", - -11.223844528198242 - ], - [ - "▁eliminate", - -11.22395133972168 - ], - [ - "▁decrease", - -11.224117279052734 - ], - [ - "▁geben", - -11.224214553833008 - ], - [ - "▁approaches", - -11.224482536315918 - ], - [ - "▁tissue", - -11.224940299987793 - ], - [ - "▁personne", - -11.225192070007324 - ], - [ - "ional", - -11.225587844848633 - ], - [ - "unable", - -11.2256498336792 - ], - [ - "▁Case", - -11.225736618041992 - ], - [ - "hill", - -11.225744247436523 - ], - [ - "och", - -11.225862503051758 - ], - [ - "▁minister", - -11.225920677185059 - ], - [ - "▁Rad", - -11.226285934448242 - ], - [ - "▁yoga", - -11.226390838623047 - ], - [ - "▁encounter", - -11.22661018371582 - ], - [ - "text", - -11.22670841217041 - ], - [ - "▁OS", - -11.226719856262207 - ], - [ - "▁opera", - -11.22673225402832 - ], - [ - "▁loving", - -11.226977348327637 - ], - [ - "▁birds", - -11.227363586425781 - ], - [ - "▁prim", - -11.227389335632324 - ], - [ - "easca", - -11.227432250976562 - ], - [ - "park", - -11.227453231811523 - ], - [ - "fü", - -11.227797508239746 - ], - [ - "▁champion", - -11.227824211120605 - ], - [ - "▁warning", - -11.228245735168457 - ], - [ - "DC", - -11.228271484375 - ], - [ - "▁yield", - -11.228310585021973 - ], - [ - "raum", - -11.228334426879883 - ], - [ - "▁Student", - -11.228434562683105 - ], - [ - "▁Rev", - -11.22848892211914 - ], - [ - "▁Fu", - -11.228501319885254 - ], - [ - "▁intra", - -11.22854232788086 - ], - [ - "▁proces", - -11.228585243225098 - ], - [ - "▁margin", - -11.228621482849121 - ], - [ - "lands", - -11.228816986083984 - ], - [ - "04", - -11.228952407836914 - ], - [ - "▁Steel", - -11.229897499084473 - ], - [ - "▁besoin", - -11.230081558227539 - ], - [ - "şti", - -11.230561256408691 - ], - [ - "▁39", - -11.230635643005371 - ], - [ - "▁outcomes", - -11.230677604675293 - ], - [ - "wert", - -11.230719566345215 - ], - [ - "3,", - -11.23080062866211 - ], - [ - "▁hole", - -11.230888366699219 - ], - [ - "▁Create", - -11.23096752166748 - ], - [ - "▁hall", - -11.231266975402832 - ], - [ - "nach", - -11.231595039367676 - ], - [ - "▁indicate", - -11.232311248779297 - ], - [ - "cum", - -11.232604026794434 - ], - [ - "▁Mann", - -11.232690811157227 - ], - [ - "▁reaction", - -11.232828140258789 - ], - [ - "▁empty", - -11.23289680480957 - ], - [ - "▁Sign", - -11.232941627502441 - ], - [ - "▁pm", - -11.23300838470459 - ], - [ - "erung", - -11.23322582244873 - ], - [ - "▁würde", - -11.233592987060547 - ], - [ - "▁declarat", - -11.233602523803711 - ], - [ - "6%", - -11.23371410369873 - ], - [ - "▁Client", - -11.23377513885498 - ], - [ - "vil", - -11.234295845031738 - ], - [ - "▁electricity", - -11.234469413757324 - ], - [ - "▁75", - -11.234505653381348 - ], - [ - "▁buna", - -11.234505653381348 - ], - [ - "eşte", - -11.23473834991455 - ], - [ - "▁prop", - -11.234792709350586 - ], - [ - "▁journal", - -11.234883308410645 - ], - [ - "▁meu", - -11.23495101928711 - ], - [ - "▁chef", - -11.235034942626953 - ], - [ - "▁Ever", - -11.235102653503418 - ], - [ - "▁feelings", - -11.235466003417969 - ], - [ - "PT", - -11.23551082611084 - ], - [ - "▁proposal", - -11.235651969909668 - ], - [ - "▁Its", - -11.235709190368652 - ], - [ - "▁2013.", - -11.235795974731445 - ], - [ - "▁Bundes", - -11.23595142364502 - ], - [ - "▁droit", - -11.236333847045898 - ], - [ - "▁10%", - -11.236671447753906 - ], - [ - "gard", - -11.236772537231445 - ], - [ - "information", - -11.236814498901367 - ], - [ - "FE", - -11.237309455871582 - ], - [ - "▁Dun", - -11.237340927124023 - ], - [ - "▁Stock", - -11.237472534179688 - ], - [ - "ație", - -11.2374849319458 - ], - [ - "▁mag", - -11.237603187561035 - ], - [ - "▁br", - -11.237665176391602 - ], - [ - "▁sight", - -11.237772941589355 - ], - [ - "phone", - -11.237796783447266 - ], - [ - "▁Cy", - -11.237811088562012 - ], - [ - "▁opposite", - -11.238035202026367 - ], - [ - "ically", - -11.238235473632812 - ], - [ - "großen", - -11.238388061523438 - ], - [ - "▁Without", - -11.23845100402832 - ], - [ - "espace", - -11.238515853881836 - ], - [ - "▁chairs", - -11.238595008850098 - ], - [ - "▁matches", - -11.238685607910156 - ], - [ - "ateur", - -11.238697052001953 - ], - [ - "▁Cost", - -11.238699913024902 - ], - [ - "▁WordPress", - -11.238880157470703 - ], - [ - "▁Opera", - -11.239195823669434 - ], - [ - "walked", - -11.239234924316406 - ], - [ - "▁transactions", - -11.239521026611328 - ], - [ - "▁nuclear", - -11.239579200744629 - ], - [ - "ways", - -11.239594459533691 - ], - [ - "▁Oct", - -11.239738464355469 - ], - [ - "▁bomb", - -11.239835739135742 - ], - [ - "▁tracking", - -11.239879608154297 - ], - [ - "▁photograph", - -11.240066528320312 - ], - [ - "bio", - -11.240309715270996 - ], - [ - "▁branch", - -11.240363121032715 - ], - [ - "▁$5", - -11.240684509277344 - ], - [ - "▁diagram", - -11.240986824035645 - ], - [ - "▁Hard", - -11.241218566894531 - ], - [ - "bach", - -11.241232872009277 - ], - [ - "▁42", - -11.241249084472656 - ], - [ - "logy", - -11.241472244262695 - ], - [ - "▁tile", - -11.241593360900879 - ], - [ - "▁API", - -11.241833686828613 - ], - [ - "seront", - -11.24204158782959 - ], - [ - "ENT", - -11.242156982421875 - ], - [ - "▁accommodation", - -11.242409706115723 - ], - [ - "▁fiber", - -11.242438316345215 - ], - [ - "▁Give", - -11.242792129516602 - ], - [ - "▁Gas", - -11.242916107177734 - ], - [ - "▁Spain", - -11.243086814880371 - ], - [ - "▁listing", - -11.24312686920166 - ], - [ - "▁blocks", - -11.24349308013916 - ], - [ - "▁constitu", - -11.243762969970703 - ], - [ - "▁convenience", - -11.243797302246094 - ], - [ - "▁prize", - -11.243823051452637 - ], - [ - "▁aircraft", - -11.24404239654541 - ], - [ - "containing", - -11.244124412536621 - ], - [ - "▁vice", - -11.244247436523438 - ], - [ - "▁organisations", - -11.244304656982422 - ], - [ - "▁complicated", - -11.244588851928711 - ], - [ - "rons", - -11.244647979736328 - ], - [ - "▁bars", - -11.244670867919922 - ], - [ - "était", - -11.244705200195312 - ], - [ - "▁checking", - -11.245287895202637 - ], - [ - "vant", - -11.245542526245117 - ], - [ - "▁couch", - -11.245657920837402 - ], - [ - "▁brush", - -11.245870590209961 - ], - [ - "▁printer", - -11.245922088623047 - ], - [ - "▁Rat", - -11.246051788330078 - ], - [ - "▁announce", - -11.246057510375977 - ], - [ - "▁salari", - -11.246200561523438 - ], - [ - "▁Sk", - -11.246356964111328 - ], - [ - "pal", - -11.246383666992188 - ], - [ - "▁yards", - -11.24658203125 - ], - [ - "▁flexibility", - -11.246652603149414 - ], - [ - "▁jamais", - -11.24670696258545 - ], - [ - "UC", - -11.246740341186523 - ], - [ - "▁4,", - -11.246793746948242 - ], - [ - "▁Made", - -11.247078895568848 - ], - [ - "▁solche", - -11.247113227844238 - ], - [ - "▁tri", - -11.247237205505371 - ], - [ - "▁outfit", - -11.247243881225586 - ], - [ - "м", - -11.247267723083496 - ], - [ - "▁encouraged", - -11.247477531433105 - ], - [ - "trac", - -11.247552871704102 - ], - [ - "▁genetic", - -11.24755859375 - ], - [ - "▁beneficial", - -11.247747421264648 - ], - [ - "mă", - -11.247849464416504 - ], - [ - "involving", - -11.247879028320312 - ], - [ - "▁knee", - -11.247879028320312 - ], - [ - "▁respective", - -11.248316764831543 - ], - [ - "▁controlled", - -11.248350143432617 - ], - [ - "▁Rück", - -11.24837589263916 - ], - [ - "LC", - -11.248592376708984 - ], - [ - "▁highlight", - -11.248634338378906 - ], - [ - "chem", - -11.248797416687012 - ], - [ - "▁Bis", - -11.24956226348877 - ], - [ - "▁graphics", - -11.249592781066895 - ], - [ - "▁posibil", - -11.249672889709473 - ], - [ - "orul", - -11.249682426452637 - ], - [ - "imagin", - -11.249836921691895 - ], - [ - "▁draft", - -11.250006675720215 - ], - [ - "shaped", - -11.250219345092773 - ], - [ - "▁suggests", - -11.250221252441406 - ], - [ - "uvre", - -11.250509262084961 - ], - [ - "page", - -11.250545501708984 - ], - [ - "▁sentiment", - -11.250685691833496 - ], - [ - "▁loop", - -11.251015663146973 - ], - [ - "▁Quality", - -11.251839637756348 - ], - [ - "▁volunteers", - -11.251869201660156 - ], - [ - "▁representation", - -11.251923561096191 - ], - [ - "▁examination", - -11.252134323120117 - ], - [ - "▁(2)", - -11.252225875854492 - ], - [ - "assi", - -11.252435684204102 - ], - [ - "▁till", - -11.252486228942871 - ], - [ - "▁Catholic", - -11.252618789672852 - ], - [ - "▁2020", - -11.252726554870605 - ], - [ - "▁random", - -11.252764701843262 - ], - [ - "tage", - -11.253146171569824 - ], - [ - "▁baking", - -11.253690719604492 - ], - [ - "▁Musik", - -11.253852844238281 - ], - [ - "▁SC", - -11.253867149353027 - ], - [ - "▁möchte", - -11.254390716552734 - ], - [ - "▁gene", - -11.254411697387695 - ], - [ - "▁kam", - -11.254928588867188 - ], - [ - "▁inspire", - -11.254974365234375 - ], - [ - "unk", - -11.255097389221191 - ], - [ - "▁Final", - -11.255477905273438 - ], - [ - "▁jeden", - -11.255497932434082 - ], - [ - "▁LLC", - -11.255962371826172 - ], - [ - "▁sistem", - -11.25613784790039 - ], - [ - "▁stages", - -11.256441116333008 - ], - [ - "▁texture", - -11.256613731384277 - ], - [ - "rib", - -11.256739616394043 - ], - [ - "lung", - -11.256782531738281 - ], - [ - "▁breath", - -11.256814002990723 - ], - [ - "▁hosted", - -11.256844520568848 - ], - [ - "▁Kingdom", - -11.257079124450684 - ], - [ - "▁politics", - -11.257121086120605 - ], - [ - "▁mood", - -11.257122993469238 - ], - [ - "cam", - -11.257285118103027 - ], - [ - "▁liked", - -11.257287979125977 - ], - [ - "▁Credit", - -11.257304191589355 - ], - [ - "tisch", - -11.257527351379395 - ], - [ - "▁everywhere", - -11.257692337036133 - ], - [ - "▁poti", - -11.257915496826172 - ], - [ - "▁fruits", - -11.258264541625977 - ], - [ - "oire", - -11.258322715759277 - ], - [ - "▁mesure", - -11.258586883544922 - ], - [ - "▁Studies", - -11.258838653564453 - ], - [ - "▁provision", - -11.25888729095459 - ], - [ - "▁Maria", - -11.258927345275879 - ], - [ - "▁necessarily", - -11.259103775024414 - ], - [ - "▁Net", - -11.259212493896484 - ], - [ - "▁scar", - -11.259307861328125 - ], - [ - "▁tracks", - -11.259424209594727 - ], - [ - "▁ads", - -11.259856224060059 - ], - [ - "termin", - -11.259861946105957 - ], - [ - "▁Yo", - -11.26022720336914 - ], - [ - "atory", - -11.260252952575684 - ], - [ - "itoare", - -11.26025676727295 - ], - [ - "▁colours", - -11.260563850402832 - ], - [ - "▁correctly", - -11.260817527770996 - ], - [ - "▁Trade", - -11.26090145111084 - ], - [ - "▁Week", - -11.261052131652832 - ], - [ - "▁Premier", - -11.261499404907227 - ], - [ - "▁designers", - -11.261600494384766 - ], - [ - "▁BE", - -11.261879920959473 - ], - [ - "▁desktop", - -11.261929512023926 - ], - [ - "▁lifetime", - -11.262046813964844 - ], - [ - "▁Kind", - -11.26213264465332 - ], - [ - "▁divers", - -11.262246131896973 - ], - [ - "rain", - -11.262260437011719 - ], - [ - "▁Von", - -11.262263298034668 - ], - [ - "▁bal", - -11.262568473815918 - ], - [ - "▁shots", - -11.262624740600586 - ], - [ - "▁accommodate", - -11.262767791748047 - ], - [ - "▁Paper", - -11.263001441955566 - ], - [ - "▁interaction", - -11.263191223144531 - ], - [ - "▁acquisition", - -11.263233184814453 - ], - [ - "▁neuro", - -11.26378345489502 - ], - [ - "▁institution", - -11.26391887664795 - ], - [ - "▁automatic", - -11.26403522491455 - ], - [ - "▁assess", - -11.264177322387695 - ], - [ - "▁manifest", - -11.264199256896973 - ], - [ - "▁audit", - -11.264202117919922 - ], - [ - "▁câte", - -11.264406204223633 - ], - [ - "▁insight", - -11.264533996582031 - ], - [ - "▁lange", - -11.264781951904297 - ], - [ - "▁retirement", - -11.264795303344727 - ], - [ - "sons", - -11.264864921569824 - ], - [ - "▁Asian", - -11.26492691040039 - ], - [ - "▁rail", - -11.264978408813477 - ], - [ - "▁Awards", - -11.264982223510742 - ], - [ - "Avec", - -11.265035629272461 - ], - [ - "SO", - -11.26511287689209 - ], - [ - "para", - -11.265304565429688 - ], - [ - "▁tant", - -11.265562057495117 - ], - [ - "▁strike", - -11.265693664550781 - ], - [ - "▁transformation", - -11.265742301940918 - ], - [ - "▁leicht", - -11.26586627960205 - ], - [ - "л", - -11.265996932983398 - ], - [ - "fat", - -11.26629638671875 - ], - [ - "▁Qui", - -11.266626358032227 - ], - [ - "▁chip", - -11.26663589477539 - ], - [ - "titude", - -11.266640663146973 - ], - [ - "▁Projekt", - -11.266998291015625 - ], - [ - "▁statt", - -11.267010688781738 - ], - [ - "▁findet", - -11.267184257507324 - ], - [ - "▁telephone", - -11.267251968383789 - ], - [ - "▁staying", - -11.267267227172852 - ], - [ - "▁Mess", - -11.267353057861328 - ], - [ - "▁patio", - -11.267382621765137 - ], - [ - "▁afla", - -11.267890930175781 - ], - [ - "▁administrative", - -11.267910957336426 - ], - [ - "▁gemeinsam", - -11.268129348754883 - ], - [ - "▁suppliers", - -11.268136024475098 - ], - [ - "ark", - -11.268181800842285 - ], - [ - "▁rice", - -11.268397331237793 - ], - [ - "▁stretch", - -11.268439292907715 - ], - [ - "▁compact", - -11.268651008605957 - ], - [ - "fire", - -11.268756866455078 - ], - [ - "в", - -11.268963813781738 - ], - [ - "vision", - -11.269035339355469 - ], - [ - "▁Mag", - -11.269368171691895 - ], - [ - "▁dreams", - -11.269472122192383 - ], - [ - "▁funny", - -11.26968765258789 - ], - [ - "▁lässt", - -11.270216941833496 - ], - [ - "cade", - -11.270448684692383 - ], - [ - "▁drama", - -11.270484924316406 - ], - [ - "▁schimb", - -11.270767211914062 - ], - [ - "PO", - -11.270785331726074 - ], - [ - "▁Sim", - -11.270806312561035 - ], - [ - "▁motivation", - -11.271045684814453 - ], - [ - "▁presents", - -11.27138614654541 - ], - [ - "▁1997", - -11.271828651428223 - ], - [ - "agi", - -11.271883010864258 - ], - [ - "▁optimal", - -11.27198314666748 - ], - [ - "▁folder", - -11.271995544433594 - ], - [ - "stro", - -11.272034645080566 - ], - [ - "▁Han", - -11.272072792053223 - ], - [ - "▁Ei", - -11.27220344543457 - ], - [ - "▁pus", - -11.272356986999512 - ], - [ - "▁Learning", - -11.272531509399414 - ], - [ - "oop", - -11.272603034973145 - ], - [ - "▁Type", - -11.272658348083496 - ], - [ - "space", - -11.272665023803711 - ], - [ - "▁define", - -11.273098945617676 - ], - [ - "▁plug", - -11.273098945617676 - ], - [ - "yard", - -11.273188591003418 - ], - [ - "▁utility", - -11.273297309875488 - ], - [ - "über", - -11.273561477661133 - ], - [ - "▁commun", - -11.273627281188965 - ], - [ - "▁directed", - -11.273842811584473 - ], - [ - "▁consent", - -11.273893356323242 - ], - [ - "▁DNA", - -11.274068832397461 - ], - [ - "▁statements", - -11.274130821228027 - ], - [ - "real", - -11.274298667907715 - ], - [ - "active", - -11.274430274963379 - ], - [ - "school", - -11.274965286254883 - ], - [ - "▁mic", - -11.275360107421875 - ], - [ - "▁acestui", - -11.275467872619629 - ], - [ - "scale", - -11.27550220489502 - ], - [ - "▁Mid", - -11.275628089904785 - ], - [ - "▁Chair", - -11.275874137878418 - ], - [ - "к", - -11.275936126708984 - ], - [ - "▁Bas", - -11.27630615234375 - ], - [ - "▁38", - -11.276379585266113 - ], - [ - "erin", - -11.276461601257324 - ], - [ - "▁Everyone", - -11.27686882019043 - ], - [ - "COM", - -11.276907920837402 - ], - [ - "▁chronic", - -11.277079582214355 - ], - [ - "▁doctors", - -11.277222633361816 - ], - [ - "▁sh", - -11.277276039123535 - ], - [ - "sport", - -11.27740478515625 - ], - [ - "▁volunteer", - -11.277512550354004 - ], - [ - "▁drinking", - -11.277839660644531 - ], - [ - "▁Mas", - -11.277868270874023 - ], - [ - "▁pursue", - -11.2780122756958 - ], - [ - "▁exposed", - -11.278536796569824 - ], - [ - "exe", - -11.278660774230957 - ], - [ - "hung", - -11.278841972351074 - ], - [ - "▁Tier", - -11.278921127319336 - ], - [ - "▁plac", - -11.279121398925781 - ], - [ - "▁proiect", - -11.279136657714844 - ], - [ - "▁literally", - -11.279288291931152 - ], - [ - "▁acolo", - -11.279412269592285 - ], - [ - "▁User", - -11.279485702514648 - ], - [ - "UT", - -11.279598236083984 - ], - [ - "▁hyper", - -11.279623985290527 - ], - [ - "▁seed", - -11.279794692993164 - ], - [ - "▁literature", - -11.2802734375 - ], - [ - "▁Holy", - -11.280373573303223 - ], - [ - "▁jeu", - -11.280396461486816 - ], - [ - "▁licensed", - -11.280896186828613 - ], - [ - "station", - -11.280900955200195 - ], - [ - "▁criteria", - -11.281292915344238 - ], - [ - "▁sufficient", - -11.281292915344238 - ], - [ - "▁gestion", - -11.281512260437012 - ], - [ - "▁pic", - -11.281549453735352 - ], - [ - "▁64", - -11.28170108795166 - ], - [ - "▁facts", - -11.281905174255371 - ], - [ - "▁Bild", - -11.282098770141602 - ], - [ - "obi", - -11.28212833404541 - ], - [ - "▁nie", - -11.282362937927246 - ], - [ - "▁Jewish", - -11.282756805419922 - ], - [ - "bor", - -11.28281307220459 - ], - [ - "▁1980", - -11.28286361694336 - ], - [ - "▁Fach", - -11.282917976379395 - ], - [ - "craft", - -11.283047676086426 - ], - [ - "▁Pakistan", - -11.283408164978027 - ], - [ - "▁Mos", - -11.283621788024902 - ], - [ - "▁toilet", - -11.283844947814941 - ], - [ - "partea", - -11.28391170501709 - ], - [ - "case", - -11.284221649169922 - ], - [ - "▁clock", - -11.28430461883545 - ], - [ - "▁parc", - -11.284602165222168 - ], - [ - "▁legislation", - -11.284692764282227 - ], - [ - "▁icon", - -11.284933090209961 - ], - [ - "etz", - -11.285178184509277 - ], - [ - "ept", - -11.285270690917969 - ], - [ - "▁Corporation", - -11.28585433959961 - ], - [ - "▁requested", - -11.285983085632324 - ], - [ - "▁column", - -11.286088943481445 - ], - [ - "rier", - -11.286120414733887 - ], - [ - "uß", - -11.2861967086792 - ], - [ - "▁wohl", - -11.286418914794922 - ], - [ - "tell", - -11.286569595336914 - ], - [ - "gno", - -11.286608695983887 - ], - [ - "▁diseases", - -11.286726951599121 - ], - [ - "Sch", - -11.286762237548828 - ], - [ - "▁colon", - -11.287075996398926 - ], - [ - "▁Based", - -11.28709602355957 - ], - [ - "▁flu", - -11.28725528717041 - ], - [ - "▁vocal", - -11.287408828735352 - ], - [ - "▁virus", - -11.287693977355957 - ], - [ - "▁traveling", - -11.287750244140625 - ], - [ - "bul", - -11.287837982177734 - ], - [ - "т", - -11.28794002532959 - ], - [ - "city", - -11.287961959838867 - ], - [ - "AU", - -11.287991523742676 - ], - [ - "wide", - -11.288037300109863 - ], - [ - "▁solo", - -11.288061141967773 - ], - [ - "▁functionality", - -11.288214683532715 - ], - [ - "▁reveal", - -11.28831672668457 - ], - [ - "sign", - -11.288952827453613 - ], - [ - "▁closing", - -11.288971900939941 - ], - [ - "▁peak", - -11.289087295532227 - ], - [ - "▁practic", - -11.289398193359375 - ], - [ - "than", - -11.289473533630371 - ], - [ - "▁driven", - -11.289484977722168 - ], - [ - "êtes", - -11.289548873901367 - ], - [ - "high", - -11.290016174316406 - ], - [ - "power", - -11.290226936340332 - ], - [ - "▁Lin", - -11.29028606414795 - ], - [ - "▁dose", - -11.29034423828125 - ], - [ - "▁pocket", - -11.290650367736816 - ], - [ - "▁Classic", - -11.29067611694336 - ], - [ - "▁packaging", - -11.290792465209961 - ], - [ - "▁distinct", - -11.290800094604492 - ], - [ - "▁côté", - -11.291094779968262 - ], - [ - "▁breast", - -11.29127025604248 - ], - [ - "▁folosit", - -11.29133129119873 - ], - [ - "▁drinks", - -11.291353225708008 - ], - [ - "▁Dog", - -11.291529655456543 - ], - [ - "ailleurs", - -11.291658401489258 - ], - [ - "▁caz", - -11.291804313659668 - ], - [ - "▁escape", - -11.29188346862793 - ], - [ - "▁warranty", - -11.291902542114258 - ], - [ - "▁pulled", - -11.291996955871582 - ], - [ - "data", - -11.292088508605957 - ], - [ - "▁facilitate", - -11.292213439941406 - ], - [ - "É", - -11.292335510253906 - ], - [ - "▁SP", - -11.292403221130371 - ], - [ - "lant", - -11.292557716369629 - ], - [ - "AD", - -11.29256534576416 - ], - [ - "▁Print", - -11.292802810668945 - ], - [ - "mond", - -11.292863845825195 - ], - [ - "▁strange", - -11.292875289916992 - ], - [ - "▁Hor", - -11.293227195739746 - ], - [ - "▁Collection", - -11.293328285217285 - ], - [ - "arm", - -11.29346752166748 - ], - [ - "cas", - -11.293691635131836 - ], - [ - "arrow", - -11.29379940032959 - ], - [ - "▁carrying", - -11.293927192687988 - ], - [ - "▁wave", - -11.294661521911621 - ], - [ - "setzt", - -11.294907569885254 - ], - [ - "▁construct", - -11.29514217376709 - ], - [ - "▁acts", - -11.295269966125488 - ], - [ - "▁Action", - -11.295342445373535 - ], - [ - "▁Kim", - -11.295354843139648 - ], - [ - "oxid", - -11.295459747314453 - ], - [ - "fish", - -11.295519828796387 - ], - [ - "▁damaged", - -11.295660018920898 - ], - [ - "▁Greek", - -11.295747756958008 - ], - [ - "▁belt", - -11.295772552490234 - ], - [ - "▁Prior", - -11.295778274536133 - ], - [ - "▁marks", - -11.295936584472656 - ], - [ - "▁lumea", - -11.296183586120605 - ], - [ - "▁twenty", - -11.296196937561035 - ], - [ - "▁locul", - -11.296360969543457 - ], - [ - "▁Army", - -11.296524047851562 - ], - [ - "apt", - -11.296602249145508 - ], - [ - "▁limits", - -11.296733856201172 - ], - [ - "▁cruise", - -11.296966552734375 - ], - [ - "▁List", - -11.296998023986816 - ], - [ - "utilisation", - -11.29753589630127 - ], - [ - "▁personality", - -11.297622680664062 - ], - [ - "▁sections", - -11.297759056091309 - ], - [ - "▁drawn", - -11.29797649383545 - ], - [ - "▁mold", - -11.298277854919434 - ], - [ - "▁Think", - -11.298333168029785 - ], - [ - "▁holidays", - -11.298355102539062 - ], - [ - "▁critic", - -11.298545837402344 - ], - [ - "grade", - -11.298660278320312 - ], - [ - "▁sick", - -11.299074172973633 - ], - [ - "▁characteristics", - -11.299237251281738 - ], - [ - "▁echipa", - -11.299272537231445 - ], - [ - "▁Fast", - -11.29929256439209 - ], - [ - "▁Br", - -11.299600601196289 - ], - [ - "▁Reise", - -11.299734115600586 - ], - [ - "teen", - -11.299749374389648 - ], - [ - "uci", - -11.299949645996094 - ], - [ - "!”", - -11.300180435180664 - ], - [ - "ppe", - -11.300532341003418 - ], - [ - "▁talked", - -11.301164627075195 - ], - [ - "▁gap", - -11.301473617553711 - ], - [ - "homme", - -11.301778793334961 - ], - [ - "▁interact", - -11.301934242248535 - ], - [ - "▁dollar", - -11.302276611328125 - ], - [ - "▁bone", - -11.302309036254883 - ], - [ - "▁Einsatz", - -11.302343368530273 - ], - [ - "▁sad", - -11.302434921264648 - ], - [ - "any", - -11.302445411682129 - ], - [ - "tation", - -11.302666664123535 - ], - [ - "▁Haupt", - -11.302748680114746 - ], - [ - "iva", - -11.302781105041504 - ], - [ - "▁Schu", - -11.302916526794434 - ], - [ - "▁evaluate", - -11.3036470413208 - ], - [ - "▁variant", - -11.303807258605957 - ], - [ - "▁IS", - -11.303879737854004 - ], - [ - "▁PRO", - -11.303947448730469 - ], - [ - "▁vine", - -11.303959846496582 - ], - [ - "rut", - -11.304062843322754 - ], - [ - "▁existence", - -11.30443286895752 - ], - [ - "-7", - -11.304525375366211 - ], - [ - "ancy", - -11.304702758789062 - ], - [ - "▁Want", - -11.305023193359375 - ], - [ - "alism", - -11.305127143859863 - ], - [ - "ranging", - -11.30550765991211 - ], - [ - "preis", - -11.305551528930664 - ], - [ - "All", - -11.305620193481445 - ], - [ - "▁reception", - -11.30565071105957 - ], - [ - "mai", - -11.305730819702148 - ], - [ - "▁lease", - -11.30577278137207 - ], - [ - "▁finest", - -11.30578899383545 - ], - [ - "▁evident", - -11.305874824523926 - ], - [ - "▁Easy", - -11.306075096130371 - ], - [ - "▁gilt", - -11.306085586547852 - ], - [ - "▁trips", - -11.306344985961914 - ], - [ - "▁skilled", - -11.306368827819824 - ], - [ - "consists", - -11.306456565856934 - ], - [ - "front", - -11.306635856628418 - ], - [ - "rati", - -11.306652069091797 - ], - [ - "▁Following", - -11.30678653717041 - ], - [ - "▁Medicine", - -11.307161331176758 - ], - [ - "▁pune", - -11.30729866027832 - ], - [ - "▁errors", - -11.307354927062988 - ], - [ - "arian", - -11.307613372802734 - ], - [ - "lib", - -11.30811882019043 - ], - [ - "SR", - -11.308351516723633 - ], - [ - "ML", - -11.308568000793457 - ], - [ - "▁Safety", - -11.308823585510254 - ], - [ - "▁clar", - -11.309355735778809 - ], - [ - "New", - -11.309764862060547 - ], - [ - "▁37", - -11.309773445129395 - ], - [ - "▁Administration", - -11.309823036193848 - ], - [ - "▁2.0", - -11.310120582580566 - ], - [ - "▁obviously", - -11.310196876525879 - ], - [ - "▁Mitarbeiter", - -11.310254096984863 - ], - [ - "▁improvements", - -11.31043529510498 - ], - [ - "▁Cut", - -11.310630798339844 - ], - [ - "▁Natural", - -11.310672760009766 - ], - [ - "▁arrival", - -11.311182975769043 - ], - [ - "▁pizza", - -11.311339378356934 - ], - [ - "eşti", - -11.311570167541504 - ], - [ - "cept", - -11.311654090881348 - ], - [ - "▁livre", - -11.311686515808105 - ], - [ - "▁nombreux", - -11.312195777893066 - ], - [ - "▁authentic", - -11.312231063842773 - ], - [ - "▁gemacht", - -11.312472343444824 - ], - [ - "▁broadcast", - -11.312478065490723 - ], - [ - "▁stronger", - -11.312545776367188 - ], - [ - "UP", - -11.31257152557373 - ], - [ - "▁centers", - -11.312614440917969 - ], - [ - "▁petite", - -11.312617301940918 - ], - [ - "▁spots", - -11.312626838684082 - ], - [ - "▁crystal", - -11.312756538391113 - ], - [ - "▁salon", - -11.313044548034668 - ], - [ - "▁gained", - -11.313098907470703 - ], - [ - "▁Mus", - -11.313215255737305 - ], - [ - "▁lens", - -11.313223838806152 - ], - [ - "▁ihm", - -11.313231468200684 - ], - [ - "minute", - -11.313573837280273 - ], - [ - "▁greatly", - -11.313587188720703 - ], - [ - "LP", - -11.31361198425293 - ], - [ - "rait", - -11.314027786254883 - ], - [ - "▁bid", - -11.314154624938965 - ], - [ - "▁cit", - -11.314203262329102 - ], - [ - "entreprise", - -11.31435775756836 - ], - [ - "▁55", - -11.314533233642578 - ], - [ - "▁respectively", - -11.314536094665527 - ], - [ - "▁lo", - -11.314638137817383 - ], - [ - "▁cons", - -11.314743995666504 - ], - [ - "▁Energie", - -11.315169334411621 - ], - [ - "▁OK", - -11.31521224975586 - ], - [ - "▁grill", - -11.315338134765625 - ], - [ - "▁heading", - -11.31549072265625 - ], - [ - "▁sollten", - -11.315491676330566 - ], - [ - "▁Fragen", - -11.315528869628906 - ], - [ - "▁Poli", - -11.315556526184082 - ], - [ - "▁studying", - -11.315723419189453 - ], - [ - "▁développement", - -11.315882682800293 - ], - [ - "▁foam", - -11.316035270690918 - ], - [ - "▁1996", - -11.316511154174805 - ], - [ - "▁disaster", - -11.31662654876709 - ], - [ - "▁cafe", - -11.317262649536133 - ], - [ - "▁moves", - -11.317267417907715 - ], - [ - "focuses", - -11.317712783813477 - ], - [ - "▁Avenue", - -11.317834854125977 - ], - [ - "▁humans", - -11.31784439086914 - ], - [ - "▁(3", - -11.318021774291992 - ], - [ - "▁région", - -11.318347930908203 - ], - [ - "▁DJ", - -11.318608283996582 - ], - [ - "shop", - -11.318819046020508 - ], - [ - "▁acting", - -11.318843841552734 - ], - [ - "▁Justice", - -11.318967819213867 - ], - [ - "▁trouve", - -11.319010734558105 - ], - [ - "▁Estate", - -11.319040298461914 - ], - [ - "▁strict", - -11.319231986999512 - ], - [ - "▁talks", - -11.319283485412598 - ], - [ - "▁mat", - -11.319290161132812 - ], - [ - "▁completion", - -11.319327354431152 - ], - [ - "delivering", - -11.31943416595459 - ], - [ - "CD", - -11.31973934173584 - ], - [ - "0%", - -11.319960594177246 - ], - [ - "▁creativity", - -11.320253372192383 - ], - [ - "BR", - -11.320272445678711 - ], - [ - "▁occurred", - -11.320357322692871 - ], - [ - "Car", - -11.320590019226074 - ], - [ - "▁rising", - -11.320761680603027 - ], - [ - "gger", - -11.32086181640625 - ], - [ - "▁Gene", - -11.320901870727539 - ], - [ - "▁workplace", - -11.320914268493652 - ], - [ - "phy", - -11.321065902709961 - ], - [ - "▁Bla", - -11.32107162475586 - ], - [ - "▁trailer", - -11.32120418548584 - ], - [ - "▁Forest", - -11.321205139160156 - ], - [ - "▁profession", - -11.321246147155762 - ], - [ - "▁Father", - -11.32137680053711 - ], - [ - "flu", - -11.321487426757812 - ], - [ - "tone", - -11.321489334106445 - ], - [ - "▁sexual", - -11.321736335754395 - ], - [ - "▁Map", - -11.321805953979492 - ], - [ - "OT", - -11.3218412399292 - ], - [ - "▁Us", - -11.321878433227539 - ], - [ - "tôt", - -11.321892738342285 - ], - [ - "▁Wert", - -11.321901321411133 - ], - [ - "preparing", - -11.322121620178223 - ], - [ - "isé", - -11.322243690490723 - ], - [ - "▁lake", - -11.322461128234863 - ], - [ - "eed", - -11.32270336151123 - ], - [ - "jun", - -11.322888374328613 - ], - [ - "▁implemented", - -11.323014259338379 - ], - [ - "vid", - -11.323116302490234 - ], - [ - "igne", - -11.323201179504395 - ], - [ - "▁follows", - -11.323214530944824 - ], - [ - "▁Eric", - -11.323430061340332 - ], - [ - "body", - -11.323530197143555 - ], - [ - "▁contained", - -11.323585510253906 - ], - [ - "▁massage", - -11.323715209960938 - ], - [ - "AV", - -11.323725700378418 - ], - [ - "▁insa", - -11.323850631713867 - ], - [ - "▁observed", - -11.323892593383789 - ], - [ - "▁marque", - -11.324137687683105 - ], - [ - "lines", - -11.324451446533203 - ], - [ - "▁Frage", - -11.324482917785645 - ], - [ - "largely", - -11.324647903442383 - ], - [ - "gegeben", - -11.32473087310791 - ], - [ - "▁colleagues", - -11.324762344360352 - ], - [ - "pha", - -11.32494068145752 - ], - [ - "▁representative", - -11.325217247009277 - ], - [ - "▁shut", - -11.325650215148926 - ], - [ - "▁secondary", - -11.325779914855957 - ], - [ - "▁exhibit", - -11.325927734375 - ], - [ - "1)", - -11.325932502746582 - ], - [ - "mid", - -11.326109886169434 - ], - [ - "▁Due", - -11.326229095458984 - ], - [ - "▁initiatives", - -11.326457023620605 - ], - [ - "▁occurs", - -11.326458930969238 - ], - [ - "lent", - -11.326478958129883 - ], - [ - "▁façon", - -11.326778411865234 - ], - [ - "▁iOS", - -11.326803207397461 - ], - [ - "▁exploring", - -11.327000617980957 - ], - [ - "▁stations", - -11.327103614807129 - ], - [ - "nton", - -11.327234268188477 - ], - [ - "▁Country", - -11.32729721069336 - ], - [ - "▁shouldn", - -11.327406883239746 - ], - [ - "▁casual", - -11.327611923217773 - ], - [ - "-18", - -11.32769775390625 - ], - [ - "▁maintained", - -11.32772445678711 - ], - [ - "▁cart", - -11.327790260314941 - ], - [ - "▁propre", - -11.327836036682129 - ], - [ - "▁asset", - -11.327948570251465 - ], - [ - "firm", - -11.32803726196289 - ], - [ - "gla", - -11.328231811523438 - ], - [ - "viv", - -11.3282470703125 - ], - [ - "▁scientists", - -11.328873634338379 - ], - [ - "▁Nor", - -11.328936576843262 - ], - [ - "ites", - -11.329320907592773 - ], - [ - "▁engaging", - -11.329933166503906 - ], - [ - "My", - -11.330178260803223 - ], - [ - "▁workshops", - -11.330282211303711 - ], - [ - "ffer", - -11.3303804397583 - ], - [ - "activité", - -11.33047103881836 - ], - [ - "▁tension", - -11.330567359924316 - ], - [ - "▁dual", - -11.330668449401855 - ], - [ - "uer", - -11.33084774017334 - ], - [ - "900", - -11.330941200256348 - ], - [ - "SF", - -11.33108139038086 - ], - [ - "▁kannst", - -11.331146240234375 - ], - [ - "▁bur", - -11.33115291595459 - ], - [ - "▁visitor", - -11.331156730651855 - ], - [ - "▁granted", - -11.331178665161133 - ], - [ - "▁union", - -11.331355094909668 - ], - [ - "▁tablet", - -11.331461906433105 - ], - [ - "▁Choose", - -11.33146858215332 - ], - [ - "ibil", - -11.331551551818848 - ], - [ - "▁settlement", - -11.331830978393555 - ], - [ - "genommen", - -11.331892967224121 - ], - [ - "▁marked", - -11.332956314086914 - ], - [ - "▁diagnostic", - -11.333370208740234 - ], - [ - "▁prayer", - -11.333529472351074 - ], - [ - "▁Toronto", - -11.334035873413086 - ], - [ - "trans", - -11.334146499633789 - ], - [ - "▁respectiv", - -11.334160804748535 - ], - [ - "▁2012.", - -11.334207534790039 - ], - [ - "icul", - -11.334394454956055 - ], - [ - "▁satisfied", - -11.334527969360352 - ], - [ - "▁Fla", - -11.334596633911133 - ], - [ - "▁estimate", - -11.334638595581055 - ], - [ - "▁Agency", - -11.33466911315918 - ], - [ - "OD", - -11.334708213806152 - ], - [ - "▁McC", - -11.334746360778809 - ], - [ - "bert", - -11.334748268127441 - ], - [ - "▁seal", - -11.334771156311035 - ], - [ - "aine", - -11.334839820861816 - ], - [ - "▁cauza", - -11.334848403930664 - ], - [ - "▁wallpaper", - -11.335081100463867 - ], - [ - "▁alb", - -11.33536434173584 - ], - [ - "▁Sound", - -11.335681915283203 - ], - [ - "worth", - -11.33572769165039 - ], - [ - "chten", - -11.335858345031738 - ], - [ - "programm", - -11.335896492004395 - ], - [ - "▁pounds", - -11.336215019226074 - ], - [ - "▁coaching", - -11.336278915405273 - ], - [ - "▁Furthermore", - -11.336454391479492 - ], - [ - "▁Korea", - -11.336471557617188 - ], - [ - "▁flour", - -11.336530685424805 - ], - [ - "▁sommes", - -11.33657169342041 - ], - [ - "▁Repair", - -11.33661937713623 - ], - [ - "”)", - -11.336642265319824 - ], - [ - "itch", - -11.336675643920898 - ], - [ - "blu", - -11.336786270141602 - ], - [ - "zar", - -11.336882591247559 - ], - [ - "▁diferite", - -11.33745002746582 - ], - [ - "▁Golf", - -11.337685585021973 - ], - [ - "arch", - -11.33772087097168 - ], - [ - "▁panels", - -11.337799072265625 - ], - [ - "jan", - -11.337956428527832 - ], - [ - "“.", - -11.338240623474121 - ], - [ - "izarea", - -11.338324546813965 - ], - [ - "▁golden", - -11.33854866027832 - ], - [ - "▁flying", - -11.338550567626953 - ], - [ - "▁museum", - -11.338700294494629 - ], - [ - "▁equivalent", - -11.338759422302246 - ], - [ - "▁Lang", - -11.339032173156738 - ], - [ - "schi", - -11.339539527893066 - ], - [ - "MI", - -11.339595794677734 - ], - [ - "▁faci", - -11.339838027954102 - ], - [ - "▁Rahmen", - -11.339988708496094 - ], - [ - "▁attending", - -11.340130805969238 - ], - [ - "′′", - -11.340483665466309 - ], - [ - "▁Tro", - -11.341070175170898 - ], - [ - "▁gaming", - -11.341447830200195 - ], - [ - "▁aujourd", - -11.341479301452637 - ], - [ - "▁Wochen", - -11.341526985168457 - ], - [ - "▁entering", - -11.341535568237305 - ], - [ - "its", - -11.34155559539795 - ], - [ - "▁Private", - -11.341866493225098 - ], - [ - "▁Ocean", - -11.34188175201416 - ], - [ - "▁01", - -11.342098236083984 - ], - [ - "▁coloring", - -11.342188835144043 - ], - [ - "ător", - -11.34253215789795 - ], - [ - "▁flooring", - -11.342548370361328 - ], - [ - "▁downtown", - -11.34276294708252 - ], - [ - "rab", - -11.342998504638672 - ], - [ - "HI", - -11.343221664428711 - ], - [ - "▁illness", - -11.343234062194824 - ], - [ - "▁whil", - -11.343307495117188 - ], - [ - "▁diamond", - -11.34333324432373 - ], - [ - "Mail", - -11.343419075012207 - ], - [ - "▁Dream", - -11.34344482421875 - ], - [ - "▁Golden", - -11.344099044799805 - ], - [ - "▁rein", - -11.344220161437988 - ], - [ - "▁hi", - -11.344283103942871 - ], - [ - "▁expressed", - -11.344489097595215 - ], - [ - "▁luat", - -11.344511985778809 - ], - [ - "▁Share", - -11.34453010559082 - ], - [ - "▁Programm", - -11.344706535339355 - ], - [ - "▁Sales", - -11.344707489013672 - ], - [ - "▁prof", - -11.344890594482422 - ], - [ - "▁MO", - -11.34505844116211 - ], - [ - "▁Short", - -11.345088958740234 - ], - [ - "▁charm", - -11.345290184020996 - ], - [ - "▁Cer", - -11.345373153686523 - ], - [ - "▁Run", - -11.34553337097168 - ], - [ - "▁tutorial", - -11.345589637756348 - ], - [ - "oul", - -11.34561824798584 - ], - [ - "▁Fest", - -11.345794677734375 - ], - [ - "▁uniform", - -11.345929145812988 - ], - [ - "aß", - -11.346014976501465 - ], - [ - "▁pipe", - -11.346076965332031 - ], - [ - "▁Square", - -11.346283912658691 - ], - [ - "▁Kosten", - -11.346365928649902 - ], - [ - "▁checked", - -11.346590042114258 - ], - [ - "▁65", - -11.346626281738281 - ], - [ - "▁Adam", - -11.346686363220215 - ], - [ - "cel", - -11.346700668334961 - ], - [ - "ello", - -11.346965789794922 - ], - [ - "▁Res", - -11.347023963928223 - ], - [ - "▁drain", - -11.34708309173584 - ], - [ - "ză", - -11.347129821777344 - ], - [ - "▁Tech", - -11.34739875793457 - ], - [ - "▁strive", - -11.34749698638916 - ], - [ - "cycl", - -11.347506523132324 - ], - [ - "▁stark", - -11.347541809082031 - ], - [ - "load", - -11.34754753112793 - ], - [ - "▁Stat", - -11.347589492797852 - ], - [ - "▁Rec", - -11.347622871398926 - ], - [ - "ians", - -11.347716331481934 - ], - [ - "▁Tin", - -11.347738265991211 - ], - [ - "▁Agreement", - -11.347840309143066 - ], - [ - "▁pret", - -11.348027229309082 - ], - [ - "-9", - -11.348326683044434 - ], - [ - "▁sentence", - -11.348380088806152 - ], - [ - "▁Direct", - -11.348426818847656 - ], - [ - "▁Rep", - -11.348465919494629 - ], - [ - "▁Prozent", - -11.348799705505371 - ], - [ - "▁invitation", - -11.34882640838623 - ], - [ - "▁refund", - -11.349113464355469 - ], - [ - "▁Kids", - -11.349287986755371 - ], - [ - "stock", - -11.349383354187012 - ], - [ - "TP", - -11.349400520324707 - ], - [ - "▁tau", - -11.34941291809082 - ], - [ - "from", - -11.349421501159668 - ], - [ - "▁Ash", - -11.349451065063477 - ], - [ - "store", - -11.349535942077637 - ], - [ - "▁Common", - -11.34958553314209 - ], - [ - "▁Qualität", - -11.34968376159668 - ], - [ - "▁strongly", - -11.349727630615234 - ], - [ - "▁importante", - -11.34979248046875 - ], - [ - "ome", - -11.349912643432617 - ], - [ - "▁surtout", - -11.349946022033691 - ], - [ - "enables", - -11.35020637512207 - ], - [ - "▁decent", - -11.350221633911133 - ], - [ - "▁neutral", - -11.350237846374512 - ], - [ - "▁produs", - -11.350356101989746 - ], - [ - "bury", - -11.350451469421387 - ], - [ - "▁Level", - -11.350618362426758 - ], - [ - "▁interes", - -11.350699424743652 - ], - [ - "mov", - -11.350797653198242 - ], - [ - "▁backup", - -11.350939750671387 - ], - [ - "même", - -11.351094245910645 - ], - [ - "doc", - -11.351119041442871 - ], - [ - "▁#1", - -11.35130786895752 - ], - [ - "▁specified", - -11.351495742797852 - ], - [ - "▁founder", - -11.351655960083008 - ], - [ - "And", - -11.352090835571289 - ], - [ - "isten", - -11.352149963378906 - ], - [ - "▁lecture", - -11.352729797363281 - ], - [ - "▁wake", - -11.352895736694336 - ], - [ - "▁vraiment", - -11.352980613708496 - ], - [ - "▁swing", - -11.353188514709473 - ], - [ - "▁addresses", - -11.353275299072266 - ], - [ - "▁Verfügung", - -11.353504180908203 - ], - [ - "▁deadline", - -11.353761672973633 - ], - [ - "н", - -11.353791236877441 - ], - [ - "▁Content", - -11.353970527648926 - ], - [ - "▁Gre", - -11.354111671447754 - ], - [ - "▁Experience", - -11.354378700256348 - ], - [ - "tura", - -11.354458808898926 - ], - [ - "▁exit", - -11.354642868041992 - ], - [ - "▁Britain", - -11.354652404785156 - ], - [ - "▁Sunt", - -11.354684829711914 - ], - [ - "▁documentation", - -11.354690551757812 - ], - [ - "▁showcase", - -11.3547945022583 - ], - [ - "▁photographs", - -11.354822158813477 - ], - [ - "qué", - -11.35483169555664 - ], - [ - "zin", - -11.354909896850586 - ], - [ - "pres", - -11.354933738708496 - ], - [ - "▁decline", - -11.354955673217773 - ], - [ - "▁Large", - -11.355030059814453 - ], - [ - "▁bills", - -11.355141639709473 - ], - [ - "▁entitled", - -11.355222702026367 - ], - [ - "▁passionate", - -11.355393409729004 - ], - [ - "▁workout", - -11.355413436889648 - ], - [ - "▁Again", - -11.35560417175293 - ], - [ - "▁Haut", - -11.35582160949707 - ], - [ - "▁guaranteed", - -11.35599136352539 - ], - [ - "▁vue", - -11.35600471496582 - ], - [ - "▁farmers", - -11.356224060058594 - ], - [ - "▁admission", - -11.356500625610352 - ], - [ - "▁manière", - -11.357080459594727 - ], - [ - "▁reverse", - -11.357121467590332 - ], - [ - "▁FL", - -11.357142448425293 - ], - [ - "▁terminal", - -11.357206344604492 - ], - [ - "GI", - -11.35731029510498 - ], - [ - "▁speakers", - -11.35739803314209 - ], - [ - "▁responses", - -11.357398986816406 - ], - [ - "▁Doch", - -11.357457160949707 - ], - [ - "▁2013,", - -11.357717514038086 - ], - [ - "▁phones", - -11.357789993286133 - ], - [ - "ential", - -11.357851028442383 - ], - [ - "▁operator", - -11.357916831970215 - ], - [ - "▁steam", - -11.358036994934082 - ], - [ - "burn", - -11.358091354370117 - ], - [ - "▁seul", - -11.35815715789795 - ], - [ - "▁unusual", - -11.358322143554688 - ], - [ - "▁educate", - -11.358403205871582 - ], - [ - "▁Que", - -11.358680725097656 - ], - [ - "▁believes", - -11.359137535095215 - ], - [ - "▁succeed", - -11.359344482421875 - ], - [ - "▁delay", - -11.359533309936523 - ], - [ - "▁deeper", - -11.359633445739746 - ], - [ - "▁reaching", - -11.359890937805176 - ], - [ - "▁objectives", - -11.360086441040039 - ], - [ - "▁temporary", - -11.36028003692627 - ], - [ - "▁artistic", - -11.360421180725098 - ], - [ - "▁sou", - -11.360471725463867 - ], - [ - "▁transparent", - -11.36062240600586 - ], - [ - "There", - -11.360798835754395 - ], - [ - "ception", - -11.360836029052734 - ], - [ - "▁excess", - -11.360939979553223 - ], - [ - "▁gathering", - -11.361008644104004 - ], - [ - "▁Save", - -11.361095428466797 - ], - [ - "ază", - -11.361166000366211 - ], - [ - "▁français", - -11.361197471618652 - ], - [ - "▁laid", - -11.361210823059082 - ], - [ - "▁modul", - -11.361394882202148 - ], - [ - "avoir", - -11.361465454101562 - ], - [ - "under", - -11.362113952636719 - ], - [ - "dding", - -11.362226486206055 - ], - [ - "▁falls", - -11.362232208251953 - ], - [ - "▁Möglichkeit", - -11.362369537353516 - ], - [ - "▁ceremony", - -11.362370491027832 - ], - [ - "rai", - -11.36237621307373 - ], - [ - "▁Bor", - -11.362709045410156 - ], - [ - "▁Below", - -11.362750053405762 - ], - [ - "4)", - -11.362759590148926 - ], - [ - "▁Field", - -11.362833023071289 - ], - [ - "wear", - -11.362935066223145 - ], - [ - "motion", - -11.362948417663574 - ], - [ - "print", - -11.363311767578125 - ], - [ - "game", - -11.363360404968262 - ], - [ - "▁Irish", - -11.363458633422852 - ], - [ - "▁Las", - -11.363458633422852 - ], - [ - "Among", - -11.363570213317871 - ], - [ - "atori", - -11.363580703735352 - ], - [ - "▁ajuns", - -11.363837242126465 - ], - [ - "▁alive", - -11.363860130310059 - ], - [ - "▁retour", - -11.363900184631348 - ], - [ - "▁smoke", - -11.3640775680542 - ], - [ - "▁math", - -11.364285469055176 - ], - [ - "▁Ye", - -11.364337921142578 - ], - [ - "▁Denn", - -11.36436653137207 - ], - [ - "▁1995", - -11.364412307739258 - ], - [ - "▁bani", - -11.364644050598145 - ], - [ - "raz", - -11.364998817443848 - ], - [ - "world", - -11.365026473999023 - ], - [ - "▁engines", - -11.365140914916992 - ], - [ - "nehmen", - -11.365192413330078 - ], - [ - "stor", - -11.365328788757324 - ], - [ - "▁interpret", - -11.365403175354004 - ], - [ - "▁Ven", - -11.365489959716797 - ], - [ - "▁cotton", - -11.365622520446777 - ], - [ - "▁represented", - -11.366004943847656 - ], - [ - "▁fabulous", - -11.366166114807129 - ], - [ - "▁gender", - -11.366301536560059 - ], - [ - "Mar", - -11.366668701171875 - ], - [ - "vic", - -11.366991996765137 - ], - [ - "▁newsletter", - -11.367432594299316 - ], - [ - "sburg", - -11.367574691772461 - ], - [ - "pond", - -11.36838436126709 - ], - [ - "▁Carl", - -11.368454933166504 - ], - [ - "▁bunch", - -11.368714332580566 - ], - [ - "▁tower", - -11.368847846984863 - ], - [ - "▁trigger", - -11.368976593017578 - ], - [ - "▁explanation", - -11.369091033935547 - ], - [ - "Man", - -11.369114875793457 - ], - [ - "iunea", - -11.369168281555176 - ], - [ - "▁announcement", - -11.369492530822754 - ], - [ - "▁seeds", - -11.36952018737793 - ], - [ - "▁shell", - -11.369865417480469 - ], - [ - "▁Working", - -11.36989688873291 - ], - [ - "viz", - -11.370267868041992 - ], - [ - "▁Simply", - -11.370329856872559 - ], - [ - "sub", - -11.37037181854248 - ], - [ - "▁Village", - -11.37060832977295 - ], - [ - "▁falling", - -11.370742797851562 - ], - [ - "▁fits", - -11.37084674835205 - ], - [ - "▁wichtig", - -11.37088394165039 - ], - [ - "▁Down", - -11.37108039855957 - ], - [ - "bble", - -11.371573448181152 - ], - [ - "▁Orange", - -11.37165641784668 - ], - [ - "promoting", - -11.371932029724121 - ], - [ - "▁rapidly", - -11.37217903137207 - ], - [ - "▁translation", - -11.372330665588379 - ], - [ - "nig", - -11.3723726272583 - ], - [ - "fusion", - -11.37240982055664 - ], - [ - "kosten", - -11.372611045837402 - ], - [ - "2)", - -11.372783660888672 - ], - [ - "▁Express", - -11.372958183288574 - ], - [ - "▁Sw", - -11.373003959655762 - ], - [ - "▁frequency", - -11.373086929321289 - ], - [ - "▁diversity", - -11.373348236083984 - ], - [ - "MT", - -11.373452186584473 - ], - [ - "▁bekannt", - -11.373530387878418 - ], - [ - "lion", - -11.373871803283691 - ], - [ - "▁cop", - -11.37393856048584 - ], - [ - "▁Customer", - -11.374072074890137 - ], - [ - "▁demands", - -11.374427795410156 - ], - [ - "▁corn", - -11.374516487121582 - ], - [ - "▁Hamburg", - -11.374551773071289 - ], - [ - "SD", - -11.374628067016602 - ], - [ - "▁Rome", - -11.374677658081055 - ], - [ - "▁Pur", - -11.374750137329102 - ], - [ - "▁stamp", - -11.374885559082031 - ], - [ - "▁grateful", - -11.374967575073242 - ], - [ - "RM", - -11.37511157989502 - ], - [ - "▁Pl", - -11.37511920928955 - ], - [ - "▁Tele", - -11.375154495239258 - ], - [ - "▁plugin", - -11.375492095947266 - ], - [ - "▁maxim", - -11.375675201416016 - ], - [ - "▁Hoch", - -11.37574577331543 - ], - [ - "igung", - -11.375823020935059 - ], - [ - "▁Entwicklung", - -11.375858306884766 - ], - [ - "▁File", - -11.375931739807129 - ], - [ - "▁Eastern", - -11.376070022583008 - ], - [ - "▁scrap", - -11.376331329345703 - ], - [ - "▁acquired", - -11.376338958740234 - ], - [ - "sau", - -11.376364707946777 - ], - [ - "▁Klein", - -11.376452445983887 - ], - [ - "▁milioane", - -11.376492500305176 - ], - [ - "▁Stand", - -11.376693725585938 - ], - [ - "▁childhood", - -11.37671184539795 - ], - [ - "▁artificial", - -11.376752853393555 - ], - [ - "▁substantial", - -11.376851081848145 - ], - [ - "druck", - -11.377315521240234 - ], - [ - "▁Kra", - -11.377562522888184 - ], - [ - "▁performances", - -11.377645492553711 - ], - [ - "▁row", - -11.377824783325195 - ], - [ - "NT", - -11.377899169921875 - ], - [ - "mod", - -11.377904891967773 - ], - [ - "remained", - -11.378399848937988 - ], - [ - "▁nimic", - -11.378462791442871 - ], - [ - "▁Limited", - -11.378555297851562 - ], - [ - "▁cookie", - -11.378718376159668 - ], - [ - "▁retain", - -11.378816604614258 - ], - [ - "▁600", - -11.379144668579102 - ], - [ - "▁eigene", - -11.379158020019531 - ], - [ - "▁tune", - -11.379209518432617 - ], - [ - "NS", - -11.379256248474121 - ], - [ - "▁dad", - -11.379284858703613 - ], - [ - "Moreover", - -11.379415512084961 - ], - [ - "ès", - -11.379434585571289 - ], - [ - "▁worship", - -11.379439353942871 - ], - [ - "▁Material", - -11.3794584274292 - ], - [ - "▁verb", - -11.379528045654297 - ], - [ - "ziehen", - -11.37957763671875 - ], - [ - "lton", - -11.379645347595215 - ], - [ - "▁boot", - -11.379982948303223 - ], - [ - "plo", - -11.380118370056152 - ], - [ - "CF", - -11.380212783813477 - ], - [ - "GM", - -11.380215644836426 - ], - [ - "▁Mix", - -11.38046932220459 - ], - [ - "▁Front", - -11.380474090576172 - ], - [ - "▁repairs", - -11.380655288696289 - ], - [ - "▁proportion", - -11.381068229675293 - ], - [ - "▁habit", - -11.381132125854492 - ], - [ - "▁hide", - -11.38156509399414 - ], - [ - "focusing", - -11.381707191467285 - ], - [ - "▁Annual", - -11.381717681884766 - ], - [ - "▁twin", - -11.3817777633667 - ], - [ - "▁acord", - -11.381780624389648 - ], - [ - "ehr", - -11.381814956665039 - ], - [ - "month", - -11.382303237915039 - ], - [ - "venir", - -11.382535934448242 - ], - [ - "Or", - -11.38254165649414 - ], - [ - "awa", - -11.382600784301758 - ], - [ - "lass", - -11.382735252380371 - ], - [ - "ffe", - -11.383048057556152 - ], - [ - "iți", - -11.383074760437012 - ], - [ - "NO", - -11.3831148147583 - ], - [ - "▁scope", - -11.383295059204102 - ], - [ - "▁lowest", - -11.383527755737305 - ], - [ - "▁afraid", - -11.383572578430176 - ], - [ - "▁subjects", - -11.383578300476074 - ], - [ - "▁templates", - -11.383586883544922 - ], - [ - "▁jos", - -11.383604049682617 - ], - [ - "DM", - -11.383687973022461 - ], - [ - "ensemble", - -11.383792877197266 - ], - [ - "▁Ski", - -11.383941650390625 - ], - [ - "DP", - -11.384099960327148 - ], - [ - "▁grip", - -11.384171485900879 - ], - [ - "2-", - -11.38436222076416 - ], - [ - "▁sécurité", - -11.384743690490723 - ], - [ - "▁mono", - -11.384749412536621 - ], - [ - "▁controls", - -11.384854316711426 - ], - [ - "SV", - -11.384879112243652 - ], - [ - "install", - -11.384970664978027 - ], - [ - "berry", - -11.385042190551758 - ], - [ - "nial", - -11.385120391845703 - ], - [ - "shed", - -11.385462760925293 - ], - [ - "▁celle", - -11.385830879211426 - ], - [ - "FR", - -11.385936737060547 - ], - [ - "äng", - -11.385950088500977 - ], - [ - "▁gaz", - -11.385984420776367 - ], - [ - "êt", - -11.386184692382812 - ], - [ - "▁viewing", - -11.386412620544434 - ], - [ - "▁asigura", - -11.386524200439453 - ], - [ - "bling", - -11.3865327835083 - ], - [ - "master", - -11.386919975280762 - ], - [ - "▁Fin", - -11.387160301208496 - ], - [ - "VC", - -11.387365341186523 - ], - [ - "▁patent", - -11.387715339660645 - ], - [ - "▁Clean", - -11.38773250579834 - ], - [ - "▁1970", - -11.387789726257324 - ], - [ - "▁Char", - -11.387971878051758 - ], - [ - "thi", - -11.388010025024414 - ], - [ - "bli", - -11.388141632080078 - ], - [ - "▁haut", - -11.388307571411133 - ], - [ - "tica", - -11.38836669921875 - ], - [ - "▁venit", - -11.388578414916992 - ], - [ - "▁compatible", - -11.388678550720215 - ], - [ - "▁hanging", - -11.388690948486328 - ], - [ - "UN", - -11.388842582702637 - ], - [ - "▁forth", - -11.388911247253418 - ], - [ - "▁painted", - -11.388912200927734 - ], - [ - "lip", - -11.389031410217285 - ], - [ - "▁deeply", - -11.389089584350586 - ], - [ - "▁participating", - -11.389242172241211 - ], - [ - "▁Iran", - -11.38968276977539 - ], - [ - "▁conventional", - -11.389769554138184 - ], - [ - "ARE", - -11.38985824584961 - ], - [ - "▁accuracy", - -11.389896392822266 - ], - [ - "▁Familie", - -11.389955520629883 - ], - [ - "▁Dir", - -11.39001178741455 - ], - [ - "▁gehen", - -11.390127182006836 - ], - [ - "▁moderne", - -11.39022159576416 - ], - [ - "▁Iraq", - -11.39050579071045 - ], - [ - "▁vente", - -11.390582084655762 - ], - [ - "▁Donald", - -11.390998840332031 - ], - [ - "▁passer", - -11.391051292419434 - ], - [ - "▁mehrere", - -11.391267776489258 - ], - [ - "▁Everything", - -11.391291618347168 - ], - [ - "▁studied", - -11.391307830810547 - ], - [ - "▁acquire", - -11.391312599182129 - ], - [ - "für", - -11.391477584838867 - ], - [ - "▁gal", - -11.391502380371094 - ], - [ - "▁headed", - -11.391809463500977 - ], - [ - "▁screening", - -11.391865730285645 - ], - [ - "▁findings", - -11.392303466796875 - ], - [ - "▁nutrition", - -11.392305374145508 - ], - [ - "▁Secretary", - -11.392308235168457 - ], - [ - "duct", - -11.392431259155273 - ], - [ - "born", - -11.392436027526855 - ], - [ - "«", - -11.39261531829834 - ], - [ - "▁statistics", - -11.392616271972656 - ], - [ - "▁Sydney", - -11.392800331115723 - ], - [ - "▁Prof", - -11.392829895019531 - ], - [ - "▁dialogue", - -11.39327621459961 - ], - [ - "▁gather", - -11.393425941467285 - ], - [ - "valu", - -11.393746376037598 - ], - [ - "▁currency", - -11.394073486328125 - ], - [ - "▁Kat", - -11.394092559814453 - ], - [ - "gotten", - -11.394189834594727 - ], - [ - "main", - -11.39432144165039 - ], - [ - "▁coin", - -11.394340515136719 - ], - [ - "▁Nick", - -11.394380569458008 - ], - [ - "vă", - -11.394658088684082 - ], - [ - "▁Victoria", - -11.394832611083984 - ], - [ - "▁conclusion", - -11.3949613571167 - ], - [ - "▁lemon", - -11.394998550415039 - ], - [ - "▁Article", - -11.39516830444336 - ], - [ - "▁necesar", - -11.39516830444336 - ], - [ - "mag", - -11.395180702209473 - ], - [ - "▁riding", - -11.39537239074707 - ], - [ - "▁Eli", - -11.395599365234375 - ], - [ - "▁cord", - -11.395635604858398 - ], - [ - "wä", - -11.39572811126709 - ], - [ - "ußerdem", - -11.395737648010254 - ], - [ - "▁Bed", - -11.395759582519531 - ], - [ - "▁layers", - -11.395833015441895 - ], - [ - "▁harder", - -11.395975112915039 - ], - [ - "▁processor", - -11.396040916442871 - ], - [ - "▁Ils", - -11.39613151550293 - ], - [ - "▁Edition", - -11.39615535736084 - ], - [ - "▁Link", - -11.396393775939941 - ], - [ - "éré", - -11.396461486816406 - ], - [ - "▁nume", - -11.396576881408691 - ], - [ - "▁Boy", - -11.39659595489502 - ], - [ - "▁equally", - -11.396646499633789 - ], - [ - "▁Regel", - -11.397119522094727 - ], - [ - "▁hopes", - -11.397185325622559 - ], - [ - "odor", - -11.397311210632324 - ], - [ - "▁initially", - -11.397430419921875 - ], - [ - "▁$4", - -11.3974609375 - ], - [ - "▁exemplu", - -11.397537231445312 - ], - [ - "▁vari", - -11.397565841674805 - ], - [ - "schl", - -11.397698402404785 - ], - [ - "▁southern", - -11.39809799194336 - ], - [ - "▁mein", - -11.39818000793457 - ], - [ - "▁1994", - -11.398300170898438 - ], - [ - "▁importantly", - -11.398401260375977 - ], - [ - "▁succes", - -11.398526191711426 - ], - [ - "▁developer", - -11.398598670959473 - ], - [ - "▁lips", - -11.39889144897461 - ], - [ - "▁attitude", - -11.39900016784668 - ], - [ - "▁Age", - -11.399541854858398 - ], - [ - "▁corps", - -11.399713516235352 - ], - [ - "▁clicking", - -11.39976978302002 - ], - [ - "▁putem", - -11.399832725524902 - ], - [ - "▁journée", - -11.40003776550293 - ], - [ - "boy", - -11.4002103805542 - ], - [ - "▁injured", - -11.40028190612793 - ], - [ - "▁watched", - -11.400433540344238 - ], - [ - "▁flights", - -11.40079116821289 - ], - [ - "turn", - -11.400980949401855 - ], - [ - "▁stainless", - -11.401562690734863 - ], - [ - "▁besondere", - -11.40156364440918 - ], - [ - "▁Tur", - -11.401596069335938 - ], - [ - "▁hiring", - -11.401650428771973 - ], - [ - "▁roads", - -11.401727676391602 - ], - [ - "ificat", - -11.401785850524902 - ], - [ - "▁Flor", - -11.402045249938965 - ], - [ - "▁puternic", - -11.402215003967285 - ], - [ - "▁unexpected", - -11.40223503112793 - ], - [ - "▁Est", - -11.40238094329834 - ], - [ - "▁adopted", - -11.40253734588623 - ], - [ - "▁Fox", - -11.402647972106934 - ], - [ - "▁contributions", - -11.402870178222656 - ], - [ - "sec", - -11.402968406677246 - ], - [ - "IO", - -11.403059959411621 - ], - [ - "▁santé", - -11.403432846069336 - ], - [ - "▁Tree", - -11.403763771057129 - ], - [ - "▁scurt", - -11.40381908416748 - ], - [ - "▁Products", - -11.403848648071289 - ], - [ - "▁forecast", - -11.403998374938965 - ], - [ - "▁actor", - -11.404143333435059 - ], - [ - "▁Gallery", - -11.404149055480957 - ], - [ - "▁continuous", - -11.404163360595703 - ], - [ - "▁Hat", - -11.404291152954102 - ], - [ - "▁slip", - -11.404501914978027 - ], - [ - "9%", - -11.404960632324219 - ], - [ - "▁depression", - -11.405043601989746 - ], - [ - "UI", - -11.405229568481445 - ], - [ - "abile", - -11.405648231506348 - ], - [ - "▁merit", - -11.405671119689941 - ], - [ - "▁Fer", - -11.405805587768555 - ], - [ - "▁robot", - -11.405888557434082 - ], - [ - "▁gel", - -11.40589427947998 - ], - [ - "▁gentle", - -11.406017303466797 - ], - [ - "▁wanting", - -11.406071662902832 - ], - [ - "▁understood", - -11.406157493591309 - ], - [ - "▁terrain", - -11.406161308288574 - ], - [ - "▁associate", - -11.406176567077637 - ], - [ - "▁discussions", - -11.40632152557373 - ], - [ - "▁Job", - -11.406365394592285 - ], - [ - "spec", - -11.406440734863281 - ], - [ - "Dabei", - -11.406475067138672 - ], - [ - "etic", - -11.406517028808594 - ], - [ - "gol", - -11.40654468536377 - ], - [ - "▁20%", - -11.406584739685059 - ], - [ - "▁grup", - -11.406606674194336 - ], - [ - "▁Doctor", - -11.406813621520996 - ], - [ - "verse", - -11.407246589660645 - ], - [ - "▁victim", - -11.407258033752441 - ], - [ - "ță", - -11.407302856445312 - ], - [ - "▁scores", - -11.407544136047363 - ], - [ - "▁Policy", - -11.407634735107422 - ], - [ - "▁Anna", - -11.407736778259277 - ], - [ - "IV", - -11.407804489135742 - ], - [ - "▁mineral", - -11.408202171325684 - ], - [ - "live", - -11.40821647644043 - ], - [ - "▁grey", - -11.408368110656738 - ], - [ - "struct", - -11.40852165222168 - ], - [ - "▁emails", - -11.408738136291504 - ], - [ - "▁anymore", - -11.409114837646484 - ], - [ - "▁productivity", - -11.409387588500977 - ], - [ - "▁Dark", - -11.409463882446289 - ], - [ - "▁neither", - -11.409481048583984 - ], - [ - "▁quotes", - -11.409611701965332 - ], - [ - "LS", - -11.410368919372559 - ], - [ - "▁Arizona", - -11.41040325164795 - ], - [ - "night", - -11.410497665405273 - ], - [ - "élé", - -11.411019325256348 - ], - [ - "▁assigned", - -11.411153793334961 - ], - [ - "▁satellite", - -11.411328315734863 - ], - [ - "▁stability", - -11.411665916442871 - ], - [ - "▁networking", - -11.41172981262207 - ], - [ - "▁Transport", - -11.411847114562988 - ], - [ - "▁persons", - -11.411856651306152 - ], - [ - "fund", - -11.412043571472168 - ], - [ - "▁pratique", - -11.41213321685791 - ], - [ - "▁inca", - -11.412134170532227 - ], - [ - "iller", - -11.412349700927734 - ], - [ - "▁packed", - -11.41239070892334 - ], - [ - "▁Vegas", - -11.412484169006348 - ], - [ - "▁offre", - -11.412493705749512 - ], - [ - "▁Bin", - -11.412518501281738 - ], - [ - "stop", - -11.412609100341797 - ], - [ - "mini", - -11.412860870361328 - ], - [ - "▁jam", - -11.412877082824707 - ], - [ - "cord", - -11.41289234161377 - ], - [ - "▁Beautiful", - -11.412996292114258 - ], - [ - "▁trash", - -11.413012504577637 - ], - [ - "▁wise", - -11.413092613220215 - ], - [ - "▁accounting", - -11.413178443908691 - ], - [ - "▁différents", - -11.413182258605957 - ], - [ - "▁stil", - -11.413214683532715 - ], - [ - "suit", - -11.413951873779297 - ], - [ - "▁vier", - -11.414209365844727 - ], - [ - "▁permis", - -11.414224624633789 - ], - [ - "flow", - -11.414238929748535 - ], - [ - "▁col", - -11.414749145507812 - ], - [ - "ected", - -11.414960861206055 - ], - [ - "▁singer", - -11.414999008178711 - ], - [ - "▁GmbH", - -11.415038108825684 - ], - [ - "tics", - -11.415094375610352 - ], - [ - "▁ser", - -11.415159225463867 - ], - [ - "On", - -11.415315628051758 - ], - [ - "▁insights", - -11.415605545043945 - ], - [ - "BB", - -11.415946960449219 - ], - [ - "▁differ", - -11.415959358215332 - ], - [ - "▁Glass", - -11.416131973266602 - ], - [ - "▁Six", - -11.416482925415039 - ], - [ - "▁subscription", - -11.416584968566895 - ], - [ - "BC", - -11.416606903076172 - ], - [ - "▁returning", - -11.416664123535156 - ], - [ - "kleinen", - -11.416693687438965 - ], - [ - "▁advantages", - -11.416747093200684 - ], - [ - "omme", - -11.416852951049805 - ], - [ - "lus", - -11.417071342468262 - ], - [ - "now", - -11.417141914367676 - ], - [ - "▁Pack", - -11.417253494262695 - ], - [ - "▁leak", - -11.417333602905273 - ], - [ - "▁muscles", - -11.41748332977295 - ], - [ - "▁davon", - -11.417492866516113 - ], - [ - "mph", - -11.417858123779297 - ], - [ - "▁temple", - -11.417868614196777 - ], - [ - "▁Après", - -11.417901039123535 - ], - [ - "▁Illinois", - -11.41801643371582 - ], - [ - "▁variable", - -11.418065071105957 - ], - [ - "▁judgment", - -11.418389320373535 - ], - [ - "gran", - -11.41861629486084 - ], - [ - "▁pose", - -11.418621063232422 - ], - [ - "das", - -11.418647766113281 - ], - [ - "ures", - -11.418673515319824 - ], - [ - "▁Championship", - -11.418689727783203 - ], - [ - "ebenfalls", - -11.41872501373291 - ], - [ - "▁hydro", - -11.418753623962402 - ], - [ - "▁angle", - -11.419268608093262 - ], - [ - "▁5-", - -11.41940975189209 - ], - [ - "▁gest", - -11.419547080993652 - ], - [ - "▁Frau", - -11.420233726501465 - ], - [ - "▁knock", - -11.420275688171387 - ], - [ - "FS", - -11.420442581176758 - ], - [ - "spi", - -11.420577049255371 - ], - [ - "▁Regional", - -11.420717239379883 - ], - [ - "lets", - -11.421098709106445 - ], - [ - "▁Date", - -11.42115592956543 - ], - [ - "▁Finance", - -11.421211242675781 - ], - [ - "▁Dann", - -11.421320915222168 - ], - [ - "Star", - -11.421380043029785 - ], - [ - "▁Creek", - -11.421393394470215 - ], - [ - "▁fu", - -11.421648979187012 - ], - [ - "wohn", - -11.422141075134277 - ], - [ - "▁anniversary", - -11.422219276428223 - ], - [ - "▁investments", - -11.422292709350586 - ], - [ - "▁universal", - -11.422601699829102 - ], - [ - "▁pit", - -11.422745704650879 - ], - [ - "ște", - -11.422784805297852 - ], - [ - "▁lab", - -11.422822952270508 - ], - [ - "dienst", - -11.422884941101074 - ], - [ - "▁pal", - -11.422889709472656 - ], - [ - "▁graphic", - -11.42289924621582 - ], - [ - "▁bearing", - -11.422900199890137 - ], - [ - "▁stylish", - -11.423087120056152 - ], - [ - "▁mé", - -11.42319393157959 - ], - [ - "▁există", - -11.42326545715332 - ], - [ - "▁découvrir", - -11.423477172851562 - ], - [ - "comp", - -11.423606872558594 - ], - [ - "ridge", - -11.423667907714844 - ], - [ - "▁heads", - -11.423765182495117 - ], - [ - "▁consequences", - -11.423835754394531 - ], - [ - "self", - -11.423842430114746 - ], - [ - "fried", - -11.423870086669922 - ], - [ - "▁inventory", - -11.424199104309082 - ], - [ - "▁strip", - -11.42422866821289 - ], - [ - "▁Civil", - -11.42424488067627 - ], - [ - "bell", - -11.424307823181152 - ], - [ - "▁neben", - -11.424444198608398 - ], - [ - "▁Perfect", - -11.424470901489258 - ], - [ - "▁Notre", - -11.424478530883789 - ], - [ - "▁fraud", - -11.424630165100098 - ], - [ - "▁employers", - -11.424656867980957 - ], - [ - "▁Jackson", - -11.42470645904541 - ], - [ - "▁probleme", - -11.424915313720703 - ], - [ - "▁richtig", - -11.424957275390625 - ], - [ - "▁Method", - -11.425009727478027 - ], - [ - "▁tired", - -11.425010681152344 - ], - [ - "dies", - -11.425031661987305 - ], - [ - "▁Number", - -11.425315856933594 - ], - [ - "rland", - -11.425652503967285 - ], - [ - "▁latter", - -11.426031112670898 - ], - [ - "rendre", - -11.426064491271973 - ], - [ - "▁cameras", - -11.426095962524414 - ], - [ - "▁euch", - -11.426630020141602 - ], - [ - "▁Description", - -11.427038192749023 - ], - [ - "Spec", - -11.427061080932617 - ], - [ - "▁mile", - -11.427437782287598 - ], - [ - "▁Challenge", - -11.427474021911621 - ], - [ - "▁Solutions", - -11.427504539489746 - ], - [ - "▁trusted", - -11.427509307861328 - ], - [ - "▁einge", - -11.427515029907227 - ], - [ - "rück", - -11.427528381347656 - ], - [ - "▁Ober", - -11.427635192871094 - ], - [ - "kes", - -11.42764949798584 - ], - [ - "▁Log", - -11.427684783935547 - ], - [ - "▁dessert", - -11.427776336669922 - ], - [ - "▁murder", - -11.428033828735352 - ], - [ - "▁1/2", - -11.428311347961426 - ], - [ - "▁Provide", - -11.42872142791748 - ], - [ - "nivelul", - -11.428800582885742 - ], - [ - "nici", - -11.428818702697754 - ], - [ - "▁observe", - -11.42889404296875 - ], - [ - "▁prescription", - -11.429162979125977 - ], - [ - "▁Sau", - -11.429170608520508 - ], - [ - "▁genuine", - -11.42919635772705 - ], - [ - "▁operated", - -11.429231643676758 - ], - [ - "▁generous", - -11.429267883300781 - ], - [ - "▁weapons", - -11.429458618164062 - ], - [ - "▁belief", - -11.4295015335083 - ], - [ - "▁consum", - -11.429584503173828 - ], - [ - "▁unknown", - -11.430116653442383 - ], - [ - "deoarece", - -11.430135726928711 - ], - [ - "Art", - -11.430147171020508 - ], - [ - "▁kurz", - -11.430183410644531 - ], - [ - "▁Gut", - -11.430258750915527 - ], - [ - "▁medication", - -11.430522918701172 - ], - [ - "▁Mau", - -11.43058967590332 - ], - [ - "▁divorce", - -11.430678367614746 - ], - [ - "▁claimed", - -11.430811882019043 - ], - [ - "halten", - -11.430848121643066 - ], - [ - "▁Cons", - -11.43089485168457 - ], - [ - "▁operational", - -11.430975914001465 - ], - [ - "▁Hong", - -11.431081771850586 - ], - [ - "VI", - -11.431143760681152 - ], - [ - "▁Blick", - -11.431485176086426 - ], - [ - "▁lamp", - -11.431706428527832 - ], - [ - "pati", - -11.431853294372559 - ], - [ - "▁4-", - -11.43192195892334 - ], - [ - "▁interven", - -11.431964874267578 - ], - [ - "ques", - -11.43201732635498 - ], - [ - "▁Talk", - -11.432096481323242 - ], - [ - "▁zeigt", - -11.432318687438965 - ], - [ - "▁targeted", - -11.432390213012695 - ], - [ - "round", - -11.432640075683594 - ], - [ - "enfant", - -11.432748794555664 - ], - [ - "▁Reg", - -11.432836532592773 - ], - [ - "▁instruments", - -11.432872772216797 - ], - [ - "▁calcul", - -11.433363914489746 - ], - [ - "▁Henry", - -11.4335298538208 - ], - [ - "▁Cla", - -11.433616638183594 - ], - [ - "▁rack", - -11.433661460876465 - ], - [ - "sehen", - -11.43375301361084 - ], - [ - "▁ending", - -11.433754920959473 - ], - [ - "▁resolve", - -11.434130668640137 - ], - [ - "▁advise", - -11.434178352355957 - ], - [ - "▁sociale", - -11.434386253356934 - ], - [ - "▁cabin", - -11.434536933898926 - ], - [ - "▁involve", - -11.43480396270752 - ], - [ - "gă", - -11.434889793395996 - ], - [ - "▁automat", - -11.435132026672363 - ], - [ - "▁consultant", - -11.435258865356445 - ], - [ - "Bu", - -11.435370445251465 - ], - [ - "▁safely", - -11.435466766357422 - ], - [ - "état", - -11.435478210449219 - ], - [ - "▁pros", - -11.435657501220703 - ], - [ - "▁lies", - -11.435659408569336 - ], - [ - "▁Brian", - -11.435914993286133 - ], - [ - "▁talented", - -11.435954093933105 - ], - [ - "pus", - -11.43599796295166 - ], - [ - "▁hub", - -11.436060905456543 - ], - [ - "▁Ji", - -11.436066627502441 - ], - [ - "▁sought", - -11.436102867126465 - ], - [ - "▁energie", - -11.436210632324219 - ], - [ - "▁möchten", - -11.43634033203125 - ], - [ - "▁11.", - -11.436558723449707 - ], - [ - "▁Kong", - -11.436662673950195 - ], - [ - "▁grave", - -11.43666934967041 - ], - [ - "▁lists", - -11.436800956726074 - ], - [ - "tati", - -11.436809539794922 - ], - [ - "verschiedenen", - -11.43692398071289 - ], - [ - "dam", - -11.437061309814453 - ], - [ - "▁charity", - -11.437249183654785 - ], - [ - "▁breaking", - -11.43735122680664 - ], - [ - "kins", - -11.43747329711914 - ], - [ - "▁könnte", - -11.437517166137695 - ], - [ - "▁appointed", - -11.437532424926758 - ], - [ - "roc", - -11.4376859664917 - ], - [ - "▁Senate", - -11.437979698181152 - ], - [ - "wit", - -11.438002586364746 - ], - [ - "▁emerging", - -11.438162803649902 - ], - [ - "▁année", - -11.438288688659668 - ], - [ - "▁Cool", - -11.438365936279297 - ], - [ - "▁sensor", - -11.43842887878418 - ], - [ - "How", - -11.438488960266113 - ], - [ - "▁Ryan", - -11.438626289367676 - ], - [ - "▁computers", - -11.43871784210205 - ], - [ - "▁fault", - -11.4388427734375 - ], - [ - "▁présent", - -11.438843727111816 - ], - [ - "ulation", - -11.439149856567383 - ], - [ - "▁stir", - -11.439348220825195 - ], - [ - "lauf", - -11.439703941345215 - ], - [ - "▁AI", - -11.440389633178711 - ], - [ - "▁Bri", - -11.440438270568848 - ], - [ - "▁bain", - -11.441011428833008 - ], - [ - "▁5,", - -11.441287994384766 - ], - [ - "schein", - -11.44157886505127 - ], - [ - "▁weiß", - -11.441596031188965 - ], - [ - "▁possibilities", - -11.44235610961914 - ], - [ - "gur", - -11.442413330078125 - ], - [ - "▁hinter", - -11.442647933959961 - ], - [ - "Innen", - -11.442755699157715 - ], - [ - "▁vorba", - -11.442992210388184 - ], - [ - "fahren", - -11.443008422851562 - ], - [ - "▁Cell", - -11.443072319030762 - ], - [ - "univers", - -11.443137168884277 - ], - [ - "▁Follow", - -11.443424224853516 - ], - [ - "▁emotions", - -11.44360637664795 - ], - [ - "▁Ministry", - -11.443694114685059 - ], - [ - "▁curriculum", - -11.443694114685059 - ], - [ - "Je", - -11.443764686584473 - ], - [ - "▁gab", - -11.444080352783203 - ], - [ - "▁sigur", - -11.444270133972168 - ], - [ - "rise", - -11.444416999816895 - ], - [ - "Pri", - -11.44466495513916 - ], - [ - "▁stabil", - -11.444781303405762 - ], - [ - "▁superb", - -11.445100784301758 - ], - [ - "▁Oak", - -11.44510269165039 - ], - [ - "▁rubber", - -11.445286750793457 - ], - [ - "▁tag", - -11.445306777954102 - ], - [ - "PG", - -11.445361137390137 - ], - [ - "▁Heat", - -11.445477485656738 - ], - [ - "▁thousand", - -11.445504188537598 - ], - [ - "▁meets", - -11.445521354675293 - ], - [ - "▁faced", - -11.445578575134277 - ], - [ - "▁reserve", - -11.445640563964844 - ], - [ - "cateva", - -11.445767402648926 - ], - [ - "▁gym", - -11.445771217346191 - ], - [ - "▁vitamin", - -11.445960998535156 - ], - [ - "▁Rest", - -11.446457862854004 - ], - [ - "▁Single", - -11.446535110473633 - ], - [ - "▁Stephen", - -11.446623802185059 - ], - [ - "▁trick", - -11.446824073791504 - ], - [ - "DU", - -11.44694709777832 - ], - [ - "▁telefon", - -11.44711685180664 - ], - [ - "▁gând", - -11.447120666503906 - ], - [ - "▁primit", - -11.447345733642578 - ], - [ - "▁Connect", - -11.447351455688477 - ], - [ - "▁führt", - -11.447440147399902 - ], - [ - "▁Info", - -11.447500228881836 - ], - [ - "▁recall", - -11.447848320007324 - ], - [ - "▁restore", - -11.447885513305664 - ], - [ - "lege", - -11.44792652130127 - ], - [ - "▁franchise", - -11.448189735412598 - ], - [ - "▁seulement", - -11.44856071472168 - ], - [ - "reci", - -11.448598861694336 - ], - [ - "▁2019,", - -11.44864273071289 - ], - [ - "▁Ring", - -11.448663711547852 - ], - [ - "▁assembly", - -11.448678970336914 - ], - [ - "intérieur", - -11.448775291442871 - ], - [ - "▁shade", - -11.44887924194336 - ], - [ - "▁meaningful", - -11.448881149291992 - ], - [ - "bag", - -11.448989868164062 - ], - [ - "ONE", - -11.449249267578125 - ], - [ - "▁globe", - -11.449287414550781 - ], - [ - "▁WA", - -11.449406623840332 - ], - [ - "▁intervention", - -11.449495315551758 - ], - [ - "öl", - -11.449531555175781 - ], - [ - "▁Marine", - -11.45029067993164 - ], - [ - "▁Angebot", - -11.450512886047363 - ], - [ - "▁align", - -11.450618743896484 - ], - [ - "▁temperatures", - -11.450634956359863 - ], - [ - "ifier", - -11.45091724395752 - ], - [ - "▁Nigeria", - -11.451189041137695 - ], - [ - "▁survive", - -11.451216697692871 - ], - [ - "ounce", - -11.451275825500488 - ], - [ - "▁placement", - -11.451416969299316 - ], - [ - "▁deci", - -11.451528549194336 - ], - [ - "▁Taylor", - -11.451759338378906 - ], - [ - "step", - -11.45190715789795 - ], - [ - "▁Geschichte", - -11.452054023742676 - ], - [ - "▁Bet", - -11.452169418334961 - ], - [ - "▁Nature", - -11.45224380493164 - ], - [ - "▁FC", - -11.452256202697754 - ], - [ - "▁ownership", - -11.452286720275879 - ], - [ - "▁behaviour", - -11.452474594116211 - ], - [ - "▁deutlich", - -11.452532768249512 - ], - [ - "▁wondering", - -11.452798843383789 - ], - [ - "▁cleaner", - -11.453295707702637 - ], - [ - "uring", - -11.4534912109375 - ], - [ - "rä", - -11.453496932983398 - ], - [ - "▁ga", - -11.454296112060547 - ], - [ - "ador", - -11.454482078552246 - ], - [ - "▁artwork", - -11.454564094543457 - ], - [ - "ologic", - -11.45457649230957 - ], - [ - "▁eigentlich", - -11.454848289489746 - ], - [ - "▁hell", - -11.45522403717041 - ], - [ - "source", - -11.455251693725586 - ], - [ - "▁gem", - -11.455265045166016 - ], - [ - "▁boss", - -11.455307006835938 - ], - [ - "▁arise", - -11.455460548400879 - ], - [ - "about", - -11.455711364746094 - ], - [ - "▁SI", - -11.455951690673828 - ], - [ - "▁ME", - -11.45610237121582 - ], - [ - "akt", - -11.456191062927246 - ], - [ - "▁Style", - -11.456259727478027 - ], - [ - "▁Körper", - -11.456493377685547 - ], - [ - "gui", - -11.456799507141113 - ], - [ - "▁navigate", - -11.456819534301758 - ], - [ - "▁Meanwhile", - -11.456977844238281 - ], - [ - "▁așa", - -11.457111358642578 - ], - [ - "▁bulk", - -11.457298278808594 - ], - [ - "▁directions", - -11.457310676574707 - ], - [ - "▁brick", - -11.457747459411621 - ], - [ - "▁Poly", - -11.457752227783203 - ], - [ - "▁politique", - -11.457772254943848 - ], - [ - "▁patch", - -11.457777976989746 - ], - [ - "ра", - -11.457816123962402 - ], - [ - "commerce", - -11.457844734191895 - ], - [ - "▁înainte", - -11.457884788513184 - ], - [ - "▁intelligent", - -11.45823860168457 - ], - [ - "▁infection", - -11.458426475524902 - ], - [ - "▁Tru", - -11.458494186401367 - ], - [ - "▁raising", - -11.458504676818848 - ], - [ - "tragen", - -11.458539009094238 - ], - [ - "▁portrait", - -11.45858383178711 - ], - [ - "▁meisten", - -11.458783149719238 - ], - [ - "▁organize", - -11.45893669128418 - ], - [ - "metric", - -11.458962440490723 - ], - [ - "▁Season", - -11.459036827087402 - ], - [ - "▁enforcement", - -11.459259033203125 - ], - [ - "origine", - -11.459836959838867 - ], - [ - "▁Ros", - -11.460065841674805 - ], - [ - "▁Mount", - -11.460083961486816 - ], - [ - "have", - -11.460237503051758 - ], - [ - "▁romantic", - -11.460258483886719 - ], - [ - "▁comic", - -11.460810661315918 - ], - [ - "▁greu", - -11.461116790771484 - ], - [ - "ET", - -11.46133041381836 - ], - [ - "▁hook", - -11.461407661437988 - ], - [ - "▁mort", - -11.461411476135254 - ], - [ - "▁indicated", - -11.461583137512207 - ], - [ - "▁7,", - -11.461982727050781 - ], - [ - "▁Neben", - -11.46204662322998 - ], - [ - "yer", - -11.46214485168457 - ], - [ - "▁momentul", - -11.46214771270752 - ], - [ - "note", - -11.462313652038574 - ], - [ - "▁baz", - -11.46231460571289 - ], - [ - "▁abroad", - -11.462320327758789 - ], - [ - "nite", - -11.462464332580566 - ], - [ - "▁bass", - -11.462701797485352 - ], - [ - "▁norm", - -11.462714195251465 - ], - [ - "▁É", - -11.462788581848145 - ], - [ - "4.", - -11.462881088256836 - ], - [ - "▁province", - -11.463004112243652 - ], - [ - "▁merge", - -11.463419914245605 - ], - [ - "arbeiten", - -11.463438987731934 - ], - [ - "-20", - -11.463574409484863 - ], - [ - "▁Nicht", - -11.463674545288086 - ], - [ - "spo", - -11.463783264160156 - ], - [ - "size", - -11.463815689086914 - ], - [ - "▁assure", - -11.463849067687988 - ], - [ - "charge", - -11.463987350463867 - ], - [ - "▁olive", - -11.464017868041992 - ], - [ - "▁Pot", - -11.46408462524414 - ], - [ - "▁Figure", - -11.4642333984375 - ], - [ - "clair", - -11.464336395263672 - ], - [ - "▁discipline", - -11.464600563049316 - ], - [ - "elli", - -11.464639663696289 - ], - [ - "▁tackle", - -11.465169906616211 - ], - [ - "▁buyer", - -11.465237617492676 - ], - [ - "▁loud", - -11.465479850769043 - ], - [ - "▁180", - -11.465534210205078 - ], - [ - "▁căt", - -11.465587615966797 - ], - [ - "▁Palm", - -11.465738296508789 - ], - [ - "away", - -11.46593189239502 - ], - [ - "▁Mother", - -11.46607494354248 - ], - [ - "onia", - -11.466240882873535 - ], - [ - "▁Protection", - -11.466416358947754 - ], - [ - "auto", - -11.466547966003418 - ], - [ - "▁Version", - -11.466583251953125 - ], - [ - "▁Nice", - -11.466714859008789 - ], - [ - "▁12.", - -11.46682071685791 - ], - [ - "▁0,", - -11.466835021972656 - ], - [ - "ATION", - -11.466911315917969 - ], - [ - "▁Produkte", - -11.466955184936523 - ], - [ - "▁tube", - -11.467084884643555 - ], - [ - "▁Houston", - -11.467106819152832 - ], - [ - "chu", - -11.467500686645508 - ], - [ - "pas", - -11.467717170715332 - ], - [ - "▁Ele", - -11.467801094055176 - ], - [ - "▁mountains", - -11.467835426330566 - ], - [ - "PH", - -11.467937469482422 - ], - [ - "▁languages", - -11.468672752380371 - ], - [ - "▁servicii", - -11.468722343444824 - ], - [ - "▁Stay", - -11.468999862670898 - ], - [ - "fil", - -11.469138145446777 - ], - [ - "▁propos", - -11.469801902770996 - ], - [ - "▁coll", - -11.469825744628906 - ], - [ - "▁mor", - -11.470197677612305 - ], - [ - "▁arrange", - -11.470410346984863 - ], - [ - "▁sorry", - -11.470475196838379 - ], - [ - "▁instruction", - -11.470723152160645 - ], - [ - "▁holes", - -11.47077465057373 - ], - [ - "letting", - -11.471046447753906 - ], - [ - "▁wa", - -11.471074104309082 - ], - [ - "▁Feb", - -11.471227645874023 - ], - [ - "omb", - -11.471232414245605 - ], - [ - "▁prise", - -11.471290588378906 - ], - [ - "VO", - -11.471305847167969 - ], - [ - "week", - -11.471349716186523 - ], - [ - "▁Event", - -11.471427917480469 - ], - [ - "▁AT", - -11.471485137939453 - ], - [ - "ket", - -11.471492767333984 - ], - [ - "haft", - -11.471579551696777 - ], - [ - "▁hits", - -11.47159194946289 - ], - [ - "foli", - -11.471681594848633 - ], - [ - "this", - -11.471948623657227 - ], - [ - "GP", - -11.471970558166504 - ], - [ - "▁Pin", - -11.472332954406738 - ], - [ - "▁Stein", - -11.472503662109375 - ], - [ - "thing", - -11.472512245178223 - ], - [ - "▁emphasis", - -11.472556114196777 - ], - [ - "▁Mur", - -11.472631454467773 - ], - [ - "▁Bag", - -11.472647666931152 - ], - [ - "cons", - -11.47273063659668 - ], - [ - "tons", - -11.472835540771484 - ], - [ - "lash", - -11.472987174987793 - ], - [ - "▁Grant", - -11.473104476928711 - ], - [ - "▁pris", - -11.473175048828125 - ], - [ - "▁bună", - -11.47323989868164 - ], - [ - "▁buc", - -11.473699569702148 - ], - [ - "▁passe", - -11.473746299743652 - ], - [ - "▁jewelry", - -11.474213600158691 - ], - [ - "iens", - -11.474342346191406 - ], - [ - "▁forma", - -11.47453784942627 - ], - [ - "▁Med", - -11.474651336669922 - ], - [ - "laufen", - -11.474778175354004 - ], - [ - "▁hunt", - -11.474977493286133 - ], - [ - "stayed", - -11.475086212158203 - ], - [ - "party", - -11.475152015686035 - ], - [ - "▁fra", - -11.47529411315918 - ], - [ - "▁scenes", - -11.475305557250977 - ], - [ - "▁absorb", - -11.47535228729248 - ], - [ - "▁abilities", - -11.475377082824707 - ], - [ - "lug", - -11.475507736206055 - ], - [ - "▁Sarah", - -11.475693702697754 - ], - [ - "mpf", - -11.47570514678955 - ], - [ - "▁fle", - -11.4757080078125 - ], - [ - "accès", - -11.475872993469238 - ], - [ - "▁solicit", - -11.475926399230957 - ], - [ - "pie", - -11.476278305053711 - ], - [ - "▁Zum", - -11.476296424865723 - ], - [ - "▁universe", - -11.476390838623047 - ], - [ - "▁exists", - -11.476449012756348 - ], - [ - "oane", - -11.476597785949707 - ], - [ - "IVE", - -11.47668743133545 - ], - [ - "▁2011.", - -11.476906776428223 - ], - [ - "▁specialists", - -11.477072715759277 - ], - [ - "▁mess", - -11.477309226989746 - ], - [ - "fach", - -11.477402687072754 - ], - [ - "▁Recht", - -11.477404594421387 - ], - [ - "▁hack", - -11.47755241394043 - ], - [ - "▁jacket", - -11.477564811706543 - ], - [ - "HC", - -11.47769832611084 - ], - [ - "▁substance", - -11.477728843688965 - ], - [ - "▁signing", - -11.477775573730469 - ], - [ - "▁allerdings", - -11.478032112121582 - ], - [ - "▁publish", - -11.478139877319336 - ], - [ - "▁Lab", - -11.478157043457031 - ], - [ - "▁agenda", - -11.478249549865723 - ], - [ - "lane", - -11.478299140930176 - ], - [ - "stream", - -11.478620529174805 - ], - [ - "schau", - -11.47879409790039 - ], - [ - "▁realizat", - -11.478971481323242 - ], - [ - "▁supplier", - -11.479019165039062 - ], - [ - "▁moderate", - -11.47902774810791 - ], - [ - "▁tours", - -11.479212760925293 - ], - [ - "▁narrative", - -11.479220390319824 - ], - [ - "ația", - -11.479279518127441 - ], - [ - "▁maps", - -11.479423522949219 - ], - [ - "treten", - -11.479447364807129 - ], - [ - "▁mars", - -11.479706764221191 - ], - [ - "▁moon", - -11.479745864868164 - ], - [ - "rose", - -11.479751586914062 - ], - [ - "▁exp", - -11.479766845703125 - ], - [ - "zahl", - -11.480154037475586 - ], - [ - "psych", - -11.480195999145508 - ], - [ - "▁gehört", - -11.48024845123291 - ], - [ - "▁bound", - -11.4803466796875 - ], - [ - "▁submission", - -11.480451583862305 - ], - [ - "▁clubs", - -11.480722427368164 - ], - [ - "Am", - -11.480755805969238 - ], - [ - "tenir", - -11.480782508850098 - ], - [ - "▁boast", - -11.480851173400879 - ], - [ - "▁boards", - -11.4810791015625 - ], - [ - "▁Geschäfts", - -11.481216430664062 - ], - [ - "zing", - -11.48126220703125 - ], - [ - "wort", - -11.48137092590332 - ], - [ - "lid", - -11.481417655944824 - ], - [ - "▁contractor", - -11.481528282165527 - ], - [ - "▁donner", - -11.481672286987305 - ], - [ - "▁coupon", - -11.481974601745605 - ], - [ - "adresse", - -11.482004165649414 - ], - [ - "colo", - -11.48210334777832 - ], - [ - "▁perception", - -11.482124328613281 - ], - [ - "NC", - -11.48222541809082 - ], - [ - "▁abge", - -11.482245445251465 - ], - [ - "▁cheaper", - -11.482268333435059 - ], - [ - "▁grace", - -11.482312202453613 - ], - [ - "▁resident", - -11.482718467712402 - ], - [ - "kla", - -11.4828462600708 - ], - [ - "▁bug", - -11.4828462600708 - ], - [ - "▁Available", - -11.482893943786621 - ], - [ - "▁BA", - -11.483323097229004 - ], - [ - "▁Met", - -11.483601570129395 - ], - [ - "▁climb", - -11.48365592956543 - ], - [ - "▁expanded", - -11.484349250793457 - ], - [ - "ying", - -11.484426498413086 - ], - [ - "▁matching", - -11.484469413757324 - ], - [ - "▁suffered", - -11.484733581542969 - ], - [ - "▁employed", - -11.484755516052246 - ], - [ - "pper", - -11.484843254089355 - ], - [ - "▁experiencing", - -11.484884262084961 - ], - [ - "ddy", - -11.484953880310059 - ], - [ - "▁philosophy", - -11.484955787658691 - ], - [ - "▁utilisé", - -11.485008239746094 - ], - [ - "▁Jane", - -11.485079765319824 - ], - [ - "LI", - -11.485087394714355 - ], - [ - "▁elected", - -11.485185623168945 - ], - [ - "▁MI", - -11.485264778137207 - ], - [ - "▁ISO", - -11.485340118408203 - ], - [ - "winning", - -11.48537540435791 - ], - [ - "▁vot", - -11.485424041748047 - ], - [ - "▁generic", - -11.485519409179688 - ], - [ - "▁Bol", - -11.485650062561035 - ], - [ - "▁copies", - -11.48568058013916 - ], - [ - "▁mechanical", - -11.48568058013916 - ], - [ - "günstig", - -11.485682487487793 - ], - [ - "roy", - -11.485770225524902 - ], - [ - "Astfel", - -11.485808372497559 - ], - [ - "media", - -11.485868453979492 - ], - [ - "▁shoulder", - -11.4859037399292 - ], - [ - "▁directory", - -11.486000061035156 - ], - [ - "▁banking", - -11.486016273498535 - ], - [ - "▁mistakes", - -11.486040115356445 - ], - [ - "▁Fran", - -11.486425399780273 - ], - [ - "▁Jon", - -11.486544609069824 - ], - [ - "▁spare", - -11.486579895019531 - ], - [ - "metri", - -11.486668586730957 - ], - [ - "▁mask", - -11.486879348754883 - ], - [ - "▁consistently", - -11.48695182800293 - ], - [ - "▁Columbia", - -11.487278938293457 - ], - [ - "roid", - -11.48774242401123 - ], - [ - "essen", - -11.487935066223145 - ], - [ - "▁(“", - -11.48798656463623 - ], - [ - "▁série", - -11.488212585449219 - ], - [ - "▁Phil", - -11.488249778747559 - ], - [ - "▁usor", - -11.488249778747559 - ], - [ - "▁stood", - -11.488279342651367 - ], - [ - "▁racing", - -11.488335609436035 - ], - [ - "▁Comme", - -11.488555908203125 - ], - [ - "▁exceed", - -11.488565444946289 - ], - [ - "на", - -11.488618850708008 - ], - [ - "▁activate", - -11.48873233795166 - ], - [ - "▁circle", - -11.488836288452148 - ], - [ - "▁bold", - -11.488956451416016 - ], - [ - "▁handy", - -11.48909854888916 - ], - [ - "merely", - -11.489114761352539 - ], - [ - "▁Edward", - -11.489147186279297 - ], - [ - "▁contracts", - -11.489530563354492 - ], - [ - "ê", - -11.489595413208008 - ], - [ - "▁campaigns", - -11.489673614501953 - ], - [ - "▁ought", - -11.489733695983887 - ], - [ - "▁nursing", - -11.489781379699707 - ], - [ - "▁Jr", - -11.489917755126953 - ], - [ - "▁rarely", - -11.490032196044922 - ], - [ - "▁Mir", - -11.490050315856934 - ], - [ - "▁diagnosis", - -11.490379333496094 - ], - [ - "▁Theatre", - -11.490394592285156 - ], - [ - "▁producer", - -11.490407943725586 - ], - [ - "Currently", - -11.490492820739746 - ], - [ - "▁fitting", - -11.490580558776855 - ], - [ - "▁ajunge", - -11.490618705749512 - ], - [ - "minte", - -11.490754127502441 - ], - [ - "▁termen", - -11.490838050842285 - ], - [ - "▁Linux", - -11.491013526916504 - ], - [ - "▁1-", - -11.491068840026855 - ], - [ - "▁hätte", - -11.491202354431152 - ], - [ - "▁Resort", - -11.49129867553711 - ], - [ - "image", - -11.491527557373047 - ], - [ - "▁Rod", - -11.49189281463623 - ], - [ - "▁Fly", - -11.491924285888672 - ], - [ - "try", - -11.492317199707031 - ], - [ - "▁expense", - -11.49245834350586 - ], - [ - "▁Interior", - -11.492799758911133 - ], - [ - "▁fence", - -11.492920875549316 - ], - [ - "▁Kontakt", - -11.493063926696777 - ], - [ - "▁ALL", - -11.493142127990723 - ], - [ - "VA", - -11.493229866027832 - ], - [ - "▁Exchange", - -11.493316650390625 - ], - [ - "ranked", - -11.493558883666992 - ], - [ - "▁Performance", - -11.493621826171875 - ], - [ - "prim", - -11.493635177612305 - ], - [ - "▁basket", - -11.493694305419922 - ], - [ - "▁Vice", - -11.493703842163086 - ], - [ - "phan", - -11.4937105178833 - ], - [ - "▁broke", - -11.494003295898438 - ], - [ - "voir", - -11.49431324005127 - ], - [ - "arg", - -11.494512557983398 - ], - [ - "ART", - -11.494529724121094 - ], - [ - "▁floors", - -11.494856834411621 - ], - [ - "pression", - -11.495025634765625 - ], - [ - "▁possession", - -11.49507999420166 - ], - [ - "▁domaine", - -11.49510669708252 - ], - [ - "▁valeur", - -11.495132446289062 - ], - [ - "▁suddenly", - -11.495282173156738 - ], - [ - "▁mild", - -11.495304107666016 - ], - [ - "▁aflat", - -11.495431900024414 - ], - [ - "▁Tea", - -11.495731353759766 - ], - [ - "tritt", - -11.495767593383789 - ], - [ - "▁Mittel", - -11.495773315429688 - ], - [ - "▁regulatory", - -11.49580192565918 - ], - [ - "▁spectacular", - -11.495905876159668 - ], - [ - "fahrt", - -11.495949745178223 - ], - [ - "GS", - -11.496026039123535 - ], - [ - "MM", - -11.4961576461792 - ], - [ - "▁environments", - -11.496203422546387 - ], - [ - "▁Raum", - -11.496381759643555 - ], - [ - "▁lay", - -11.496664047241211 - ], - [ - "▁cré", - -11.496713638305664 - ], - [ - "▁Selbst", - -11.496726989746094 - ], - [ - "▁opposition", - -11.496821403503418 - ], - [ - "two", - -11.49729061126709 - ], - [ - "▁Clark", - -11.497822761535645 - ], - [ - "▁Netz", - -11.497845649719238 - ], - [ - "bald", - -11.497983932495117 - ], - [ - "▁Innovation", - -11.4982271194458 - ], - [ - "▁overcome", - -11.49825382232666 - ], - [ - "quot", - -11.499013900756836 - ], - [ - "▁Sin", - -11.499106407165527 - ], - [ - "▁Sto", - -11.499320983886719 - ], - [ - "▁grain", - -11.499560356140137 - ], - [ - "▁collections", - -11.499724388122559 - ], - [ - "▁applies", - -11.49986743927002 - ], - [ - "mach", - -11.499934196472168 - ], - [ - "▁wheels", - -11.499958992004395 - ], - [ - "▁universities", - -11.500049591064453 - ], - [ - "▁Ray", - -11.500182151794434 - ], - [ - "lina", - -11.500238418579102 - ], - [ - "▁arrangements", - -11.500393867492676 - ], - [ - "▁western", - -11.500728607177734 - ], - [ - "rous", - -11.500768661499023 - ], - [ - "aise", - -11.500784873962402 - ], - [ - "▁highlights", - -11.50112533569336 - ], - [ - "▁intend", - -11.501265525817871 - ], - [ - "aimed", - -11.501358032226562 - ], - [ - "▁Scotland", - -11.501360893249512 - ], - [ - "▁acestei", - -11.501466751098633 - ], - [ - "graf", - -11.50150203704834 - ], - [ - "duction", - -11.501517295837402 - ], - [ - "path", - -11.50156021118164 - ], - [ - "▁evil", - -11.501633644104004 - ], - [ - "▁scris", - -11.501791000366211 - ], - [ - "▁disposition", - -11.501927375793457 - ], - [ - "▁designing", - -11.5020751953125 - ], - [ - "zwar", - -11.502172470092773 - ], - [ - "▁Retrieve", - -11.50217342376709 - ], - [ - "▁aggressive", - -11.502374649047852 - ], - [ - "▁Glen", - -11.502411842346191 - ], - [ - "▁daher", - -11.502473831176758 - ], - [ - "▁Quick", - -11.502494812011719 - ], - [ - "▁recover", - -11.502632141113281 - ], - [ - "▁prominent", - -11.50288200378418 - ], - [ - "▁visits", - -11.503198623657227 - ], - [ - "▁Mis", - -11.503376960754395 - ], - [ - "▁edited", - -11.503456115722656 - ], - [ - "▁distributed", - -11.503564834594727 - ], - [ - "▁dés", - -11.503580093383789 - ], - [ - "▁alter", - -11.5035982131958 - ], - [ - "▁cooked", - -11.503697395324707 - ], - [ - "embl", - -11.503706932067871 - ], - [ - "Univers", - -11.503715515136719 - ], - [ - "▁Minuten", - -11.504156112670898 - ], - [ - "▁compris", - -11.504179954528809 - ], - [ - "rais", - -11.504182815551758 - ], - [ - "essentially", - -11.504199028015137 - ], - [ - "▁rel", - -11.504340171813965 - ], - [ - "▁appel", - -11.504570007324219 - ], - [ - "▁trace", - -11.504788398742676 - ], - [ - "relating", - -11.504830360412598 - ], - [ - "dès", - -11.504937171936035 - ], - [ - "aste", - -11.504961013793945 - ], - [ - "▁raison", - -11.504963874816895 - ], - [ - "▁frequent", - -11.505281448364258 - ], - [ - "▁beds", - -11.505316734313965 - ], - [ - "▁Miami", - -11.505511283874512 - ], - [ - "▁vibrant", - -11.50564193725586 - ], - [ - "▁Kam", - -11.505721092224121 - ], - [ - "▁klar", - -11.505861282348633 - ], - [ - "▁Tan", - -11.50598430633545 - ], - [ - "▁vidéo", - -11.506032943725586 - ], - [ - "▁Kur", - -11.506115913391113 - ], - [ - "▁themes", - -11.506134033203125 - ], - [ - "▁struggling", - -11.506440162658691 - ], - [ - "▁Magazine", - -11.506444931030273 - ], - [ - "maker", - -11.506476402282715 - ], - [ - "veni", - -11.506564140319824 - ], - [ - "▁Groß", - -11.506732940673828 - ], - [ - "▁streaming", - -11.506772994995117 - ], - [ - "▁analyze", - -11.506876945495605 - ], - [ - "▁titles", - -11.506982803344727 - ], - [ - "pier", - -11.507316589355469 - ], - [ - "▁participant", - -11.507347106933594 - ], - [ - "aims", - -11.507607460021973 - ], - [ - "▁convention", - -11.507638931274414 - ], - [ - "▁flood", - -11.507780075073242 - ], - [ - "▁nights", - -11.507842063903809 - ], - [ - "▁titre", - -11.50792407989502 - ], - [ - "▁voul", - -11.508010864257812 - ], - [ - "weit", - -11.50816822052002 - ], - [ - "where", - -11.508213996887207 - ], - [ - "▁Seiten", - -11.508286476135254 - ], - [ - "▁relaxing", - -11.508628845214844 - ], - [ - "▁piano", - -11.50883674621582 - ], - [ - "▁Pick", - -11.508842468261719 - ], - [ - "▁Sony", - -11.508955001831055 - ], - [ - "▁enhanced", - -11.509017944335938 - ], - [ - "▁visa", - -11.50915241241455 - ], - [ - "CH", - -11.50930118560791 - ], - [ - "▁instantly", - -11.50930404663086 - ], - [ - "▁Fan", - -11.509721755981445 - ], - [ - "▁diabetes", - -11.509988784790039 - ], - [ - "▁popul", - -11.50999641418457 - ], - [ - "Ang", - -11.510232925415039 - ], - [ - "▁Ask", - -11.510295867919922 - ], - [ - "cate", - -11.510650634765625 - ], - [ - "▁simplu", - -11.510666847229004 - ], - [ - "nahme", - -11.510685920715332 - ], - [ - "▁dentist", - -11.510842323303223 - ], - [ - "ubi", - -11.510920524597168 - ], - [ - "article", - -11.511030197143555 - ], - [ - "▁graph", - -11.511094093322754 - ], - [ - "▁rival", - -11.51121711730957 - ], - [ - "jahr", - -11.5113525390625 - ], - [ - "▁bloc", - -11.511370658874512 - ], - [ - "fern", - -11.511427879333496 - ], - [ - "▁dispar", - -11.511516571044922 - ], - [ - "▁servers", - -11.511582374572754 - ], - [ - "▁patru", - -11.511610984802246 - ], - [ - "▁Within", - -11.511634826660156 - ], - [ - "▁situated", - -11.511896133422852 - ], - [ - "▁HR", - -11.511981964111328 - ], - [ - "▁leaf", - -11.511981964111328 - ], - [ - "▁curs", - -11.512049674987793 - ], - [ - "antes", - -11.512325286865234 - ], - [ - "lux", - -11.512406349182129 - ], - [ - "▁1993", - -11.512463569641113 - ], - [ - "stance", - -11.512650489807129 - ], - [ - "▁northern", - -11.512683868408203 - ], - [ - "lves", - -11.512718200683594 - ], - [ - "▁contractors", - -11.512882232666016 - ], - [ - "▁dimensions", - -11.512920379638672 - ], - [ - "▁rolling", - -11.513068199157715 - ], - [ - "▁automobile", - -11.513211250305176 - ], - [ - "▁cru", - -11.51342487335205 - ], - [ - "▁displays", - -11.513570785522461 - ], - [ - "web", - -11.513812065124512 - ], - [ - "had", - -11.513850212097168 - ], - [ - "▁Never", - -11.513893127441406 - ], - [ - "▁2-", - -11.513932228088379 - ], - [ - "vine", - -11.51393985748291 - ], - [ - "▁Wahl", - -11.513975143432617 - ], - [ - "▁Markt", - -11.514166831970215 - ], - [ - "▁Double", - -11.514227867126465 - ], - [ - "▁acknowledge", - -11.514229774475098 - ], - [ - "stal", - -11.514288902282715 - ], - [ - "▁equity", - -11.514620780944824 - ], - [ - "▁ministry", - -11.514823913574219 - ], - [ - "▁Lor", - -11.514875411987305 - ], - [ - "▁sud", - -11.514968872070312 - ], - [ - "idée", - -11.515044212341309 - ], - [ - "▁measured", - -11.515448570251465 - ], - [ - "▁editing", - -11.515609741210938 - ], - [ - "▁singur", - -11.515620231628418 - ], - [ - "▁coal", - -11.515623092651367 - ], - [ - "▁dramatic", - -11.516212463378906 - ], - [ - "AG", - -11.516251564025879 - ], - [ - "asca", - -11.516280174255371 - ], - [ - "▁crash", - -11.516321182250977 - ], - [ - "ischer", - -11.516597747802734 - ], - [ - "▁Pla", - -11.516871452331543 - ], - [ - "▁psycho", - -11.517054557800293 - ], - [ - "piece", - -11.517118453979492 - ], - [ - "▁finger", - -11.517121315002441 - ], - [ - "▁Hollywood", - -11.517123222351074 - ], - [ - "▁Cr", - -11.517345428466797 - ], - [ - "▁locally", - -11.517622947692871 - ], - [ - "▁mouse", - -11.517792701721191 - ], - [ - "▁Base", - -11.517867088317871 - ], - [ - "uite", - -11.518095016479492 - ], - [ - "▁detect", - -11.518099784851074 - ], - [ - "cea", - -11.518150329589844 - ], - [ - "▁bull", - -11.518194198608398 - ], - [ - "▁curve", - -11.518208503723145 - ], - [ - "été", - -11.518218994140625 - ], - [ - "ddle", - -11.51839542388916 - ], - [ - "▁span", - -11.518523216247559 - ], - [ - "WS", - -11.518878936767578 - ], - [ - "CL", - -11.519017219543457 - ], - [ - "▁officially", - -11.519042015075684 - ], - [ - "▁corect", - -11.519168853759766 - ], - [ - "▁Artikel", - -11.5193510055542 - ], - [ - "▁customized", - -11.520099639892578 - ], - [ - "▁intellectual", - -11.52018928527832 - ], - [ - "▁heures", - -11.520334243774414 - ], - [ - "schule", - -11.520444869995117 - ], - [ - "▁investing", - -11.520585060119629 - ], - [ - "▁parallel", - -11.521227836608887 - ], - [ - "▁loi", - -11.521263122558594 - ], - [ - "ările", - -11.521566390991211 - ], - [ - "р", - -11.521679878234863 - ], - [ - "▁bench", - -11.521724700927734 - ], - [ - "▁principle", - -11.521756172180176 - ], - [ - "▁Galaxy", - -11.521829605102539 - ], - [ - "ța", - -11.522237777709961 - ], - [ - "▁(4", - -11.522418975830078 - ], - [ - "▁bedrooms", - -11.522578239440918 - ], - [ - "née", - -11.52273941040039 - ], - [ - "▁surely", - -11.52275276184082 - ], - [ - "very", - -11.522927284240723 - ], - [ - "stelle", - -11.523200988769531 - ], - [ - "activ", - -11.523216247558594 - ], - [ - "cite", - -11.523551940917969 - ], - [ - "▁Original", - -11.523553848266602 - ], - [ - "▁palm", - -11.523665428161621 - ], - [ - "▁losses", - -11.523934364318848 - ], - [ - "▁newspaper", - -11.524153709411621 - ], - [ - "ciu", - -11.52436351776123 - ], - [ - "▁Hold", - -11.524392127990723 - ], - [ - "BO", - -11.524422645568848 - ], - [ - "▁CON", - -11.524598121643066 - ], - [ - "▁modified", - -11.524624824523926 - ], - [ - "▁stake", - -11.524735450744629 - ], - [ - "▁Ton", - -11.524798393249512 - ], - [ - "▁luna", - -11.524968147277832 - ], - [ - "▁Mind", - -11.525094985961914 - ], - [ - "lap", - -11.525150299072266 - ], - [ - "▁opinions", - -11.525247573852539 - ], - [ - "▁Jordan", - -11.525351524353027 - ], - [ - "div", - -11.52537727355957 - ], - [ - "indi", - -11.525418281555176 - ], - [ - "▁Story", - -11.525476455688477 - ], - [ - "▁affiliate", - -11.52585506439209 - ], - [ - "▁matière", - -11.525918960571289 - ], - [ - "▁fifth", - -11.526399612426758 - ], - [ - "▁sheets", - -11.52645492553711 - ], - [ - "▁puțin", - -11.526909828186035 - ], - [ - "ush", - -11.526947021484375 - ], - [ - "geführt", - -11.526993751525879 - ], - [ - "▁Falls", - -11.527168273925781 - ], - [ - "legi", - -11.527295112609863 - ], - [ - "▁auction", - -11.527326583862305 - ], - [ - "▁cooperation", - -11.52735424041748 - ], - [ - "▁Fee", - -11.527474403381348 - ], - [ - "▁Daily", - -11.52774715423584 - ], - [ - "pies", - -11.527853965759277 - ], - [ - "▁basketball", - -11.527976036071777 - ], - [ - "removing", - -11.528056144714355 - ], - [ - "Besides", - -11.528294563293457 - ], - [ - "▁Body", - -11.528355598449707 - ], - [ - "▁AD", - -11.528369903564453 - ], - [ - "RU", - -11.528435707092285 - ], - [ - "ţia", - -11.52894401550293 - ], - [ - "▁Extra", - -11.528986930847168 - ], - [ - "▁Practice", - -11.52900218963623 - ], - [ - "▁Jeff", - -11.529017448425293 - ], - [ - "▁început", - -11.529253005981445 - ], - [ - "ching", - -11.529269218444824 - ], - [ - "▁Gift", - -11.529281616210938 - ], - [ - "kk", - -11.529295921325684 - ], - [ - "\")", - -11.529349327087402 - ], - [ - "▁Austin", - -11.529651641845703 - ], - [ - "thro", - -11.529766082763672 - ], - [ - "▁camping", - -11.529810905456543 - ], - [ - "▁theatre", - -11.529850959777832 - ], - [ - "école", - -11.529916763305664 - ], - [ - "vient", - -11.530159950256348 - ], - [ - "▁faces", - -11.530226707458496 - ], - [ - "▁constructed", - -11.530437469482422 - ], - [ - "▁overnight", - -11.530472755432129 - ], - [ - "▁locale", - -11.530574798583984 - ], - [ - "▁roots", - -11.530611038208008 - ], - [ - "▁bu", - -11.530662536621094 - ], - [ - "4,", - -11.530683517456055 - ], - [ - "▁Enterprise", - -11.530865669250488 - ], - [ - "screen", - -11.530935287475586 - ], - [ - "▁Chef", - -11.53096866607666 - ], - [ - "▁Along", - -11.531298637390137 - ], - [ - "▁MD", - -11.531431198120117 - ], - [ - "▁Supreme", - -11.531597137451172 - ], - [ - "En", - -11.531655311584473 - ], - [ - "▁verwendet", - -11.532015800476074 - ], - [ - "▁processed", - -11.532425880432129 - ], - [ - "▁vendors", - -11.532549858093262 - ], - [ - "▁FA", - -11.532651901245117 - ], - [ - "▁44", - -11.532716751098633 - ], - [ - "▁beautifully", - -11.532933235168457 - ], - [ - "▁eficient", - -11.533092498779297 - ], - [ - "▁Wil", - -11.533117294311523 - ], - [ - "▁Member", - -11.533121109008789 - ], - [ - "▁damages", - -11.5332670211792 - ], - [ - "▁mutual", - -11.533288955688477 - ], - [ - "SN", - -11.533506393432617 - ], - [ - "▁Dave", - -11.533665657043457 - ], - [ - "??", - -11.533998489379883 - ], - [ - "stat", - -11.534090995788574 - ], - [ - "▁tourist", - -11.534374237060547 - ], - [ - "fie", - -11.534425735473633 - ], - [ - "şte", - -11.534754753112793 - ], - [ - "▁donne", - -11.534764289855957 - ], - [ - "▁shadow", - -11.53493881225586 - ], - [ - "▁dough", - -11.534993171691895 - ], - [ - "▁Gro", - -11.535002708435059 - ], - [ - "▁Mah", - -11.535066604614258 - ], - [ - "RF", - -11.535126686096191 - ], - [ - "▁mechanism", - -11.535163879394531 - ], - [ - "▁2011,", - -11.535179138183594 - ], - [ - "▁Alter", - -11.53530502319336 - ], - [ - "▁opposed", - -11.53538990020752 - ], - [ - "▁Fri", - -11.535501480102539 - ], - [ - "▁remarkable", - -11.535572052001953 - ], - [ - "oral", - -11.535635948181152 - ], - [ - "▁verschiedene", - -11.535653114318848 - ], - [ - "▁difficulty", - -11.535691261291504 - ], - [ - "▁Application", - -11.535840034484863 - ], - [ - "▁Hay", - -11.535888671875 - ], - [ - "▁continua", - -11.535935401916504 - ], - [ - "EP", - -11.53609848022461 - ], - [ - "▁Pr", - -11.53617000579834 - ], - [ - "▁Lady", - -11.53631591796875 - ], - [ - "▁interval", - -11.536457061767578 - ], - [ - "▁Mil", - -11.536504745483398 - ], - [ - "▁2010.", - -11.537042617797852 - ], - [ - "VE", - -11.537074089050293 - ], - [ - "integr", - -11.537360191345215 - ], - [ - "▁création", - -11.537415504455566 - ], - [ - "weed", - -11.537456512451172 - ], - [ - "EG", - -11.53760051727295 - ], - [ - "▁6,", - -11.537784576416016 - ], - [ - "▁god", - -11.537866592407227 - ], - [ - "▁accomplish", - -11.537947654724121 - ], - [ - "▁thoroughly", - -11.538019180297852 - ], - [ - "2019", - -11.538228988647461 - ], - [ - "izer", - -11.538246154785156 - ], - [ - "▁Wal", - -11.538300514221191 - ], - [ - "ifying", - -11.538701057434082 - ], - [ - "▁Wohn", - -11.539227485656738 - ], - [ - "▁Holz", - -11.539474487304688 - ], - [ - "▁Advanced", - -11.539528846740723 - ], - [ - "▁honey", - -11.539626121520996 - ], - [ - "proof", - -11.539634704589844 - ], - [ - "▁saison", - -11.540029525756836 - ], - [ - "ându", - -11.540035247802734 - ], - [ - "▁Kevin", - -11.540116310119629 - ], - [ - "▁shelter", - -11.540199279785156 - ], - [ - "▁discut", - -11.540257453918457 - ], - [ - "▁hike", - -11.540257453918457 - ], - [ - "ités", - -11.540461540222168 - ], - [ - "▁boutique", - -11.540672302246094 - ], - [ - "▁Email", - -11.54067611694336 - ], - [ - "▁cosmetic", - -11.540830612182617 - ], - [ - "dian", - -11.540916442871094 - ], - [ - "▁hohe", - -11.540940284729004 - ], - [ - "▁absence", - -11.541071891784668 - ], - [ - "axi", - -11.541136741638184 - ], - [ - "nah", - -11.541178703308105 - ], - [ - "▁Frauen", - -11.541236877441406 - ], - [ - "▁actively", - -11.541278839111328 - ], - [ - "bind", - -11.541468620300293 - ], - [ - "▁everybody", - -11.541740417480469 - ], - [ - "▁controller", - -11.541802406311035 - ], - [ - "▁1.5", - -11.5418062210083 - ], - [ - "erau", - -11.541842460632324 - ], - [ - "gehen", - -11.541988372802734 - ], - [ - "▁scenario", - -11.542038917541504 - ], - [ - "▁odd", - -11.542083740234375 - ], - [ - "▁Ultra", - -11.542089462280273 - ], - [ - "▁finishing", - -11.542366981506348 - ], - [ - "▁cuts", - -11.542383193969727 - ], - [ - "▁financing", - -11.542515754699707 - ], - [ - "▁Chance", - -11.542579650878906 - ], - [ - "surrounded", - -11.542818069458008 - ], - [ - "▁joc", - -11.542903900146484 - ], - [ - "▁shelf", - -11.543004035949707 - ], - [ - "tief", - -11.54308032989502 - ], - [ - "▁Sir", - -11.543146133422852 - ], - [ - "▁Agent", - -11.543197631835938 - ], - [ - "▁scratch", - -11.543560981750488 - ], - [ - "2,000", - -11.54360294342041 - ], - [ - "nutri", - -11.54365348815918 - ], - [ - "nier", - -11.544063568115234 - ], - [ - "▁Dur", - -11.544175148010254 - ], - [ - "▁grid", - -11.544268608093262 - ], - [ - "road", - -11.544413566589355 - ], - [ - "▁pets", - -11.544429779052734 - ], - [ - "stud", - -11.54448127746582 - ], - [ - "OM", - -11.544569969177246 - ], - [ - "Die", - -11.544877052307129 - ], - [ - "▁800", - -11.54496955871582 - ], - [ - "▁arrangement", - -11.545088768005371 - ], - [ - "▁Sri", - -11.545185089111328 - ], - [ - "▁Patrick", - -11.545187950134277 - ], - [ - "ava", - -11.545212745666504 - ], - [ - "▁pension", - -11.54523754119873 - ], - [ - "dung", - -11.545353889465332 - ], - [ - "▁Chapter", - -11.545475006103516 - ], - [ - "▁Property", - -11.545475006103516 - ], - [ - "▁structural", - -11.545571327209473 - ], - [ - "▁overview", - -11.545731544494629 - ], - [ - "2015", - -11.545917510986328 - ], - [ - "▁lawn", - -11.545924186706543 - ], - [ - "▁Vin", - -11.546219825744629 - ], - [ - "lik", - -11.546402931213379 - ], - [ - "dus", - -11.546418190002441 - ], - [ - "Several", - -11.54654598236084 - ], - [ - "▁Bou", - -11.546670913696289 - ], - [ - "▁copper", - -11.546703338623047 - ], - [ - "▁duration", - -11.546867370605469 - ], - [ - "inate", - -11.546982765197754 - ], - [ - "▁podcast", - -11.547204971313477 - ], - [ - "▁Self", - -11.547208786010742 - ], - [ - "▁Construction", - -11.547491073608398 - ], - [ - "achat", - -11.54768180847168 - ], - [ - "???", - -11.547683715820312 - ], - [ - "▁Electric", - -11.547974586486816 - ], - [ - "▁Mrs", - -11.54799747467041 - ], - [ - "▁CT", - -11.548019409179688 - ], - [ - "▁proceed", - -11.548324584960938 - ], - [ - "▁Course", - -11.548333168029785 - ], - [ - "▁Frei", - -11.548699378967285 - ], - [ - "▁heavily", - -11.548868179321289 - ], - [ - "rique", - -11.548872947692871 - ], - [ - "version", - -11.549016952514648 - ], - [ - "▁representatives", - -11.549118041992188 - ], - [ - "▁tourism", - -11.549182891845703 - ], - [ - "▁shirt", - -11.5494966506958 - ], - [ - "▁rough", - -11.549507141113281 - ], - [ - "▁weniger", - -11.549735069274902 - ], - [ - "▁keyboard", - -11.550058364868164 - ], - [ - "▁heritage", - -11.550149917602539 - ], - [ - "kat", - -11.550535202026367 - ], - [ - "assez", - -11.550567626953125 - ], - [ - "▁cabinets", - -11.550591468811035 - ], - [ - "▁Komm", - -11.550762176513672 - ], - [ - "▁impressed", - -11.55078411102295 - ], - [ - "▁Oregon", - -11.550788879394531 - ], - [ - "▁Davis", - -11.55081558227539 - ], - [ - "specialized", - -11.55097770690918 - ], - [ - "▁gross", - -11.550999641418457 - ], - [ - "Located", - -11.551044464111328 - ], - [ - "ttle", - -11.551044464111328 - ], - [ - "▁2010,", - -11.551224708557129 - ], - [ - "chan", - -11.551253318786621 - ], - [ - "mine", - -11.551305770874023 - ], - [ - "▁aduce", - -11.551637649536133 - ], - [ - "▁subsequent", - -11.551729202270508 - ], - [ - "▁demo", - -11.551851272583008 - ], - [ - "aba", - -11.552209854125977 - ], - [ - "▁shock", - -11.552389144897461 - ], - [ - "▁theater", - -11.552854537963867 - ], - [ - "▁engineers", - -11.55294418334961 - ], - [ - "▁feu", - -11.553037643432617 - ], - [ - "▁Rot", - -11.553058624267578 - ], - [ - "▁addressed", - -11.553155899047852 - ], - [ - "▁Letter", - -11.553431510925293 - ], - [ - "gré", - -11.553448677062988 - ], - [ - "▁quantity", - -11.553449630737305 - ], - [ - "▁Seit", - -11.553640365600586 - ], - [ - "▁bacteria", - -11.553681373596191 - ], - [ - "kg", - -11.55408000946045 - ], - [ - "▁conservation", - -11.554191589355469 - ], - [ - "▁entreprises", - -11.55420207977295 - ], - [ - "▁pleasant", - -11.554207801818848 - ], - [ - "armed", - -11.554228782653809 - ], - [ - "dorf", - -11.554286003112793 - ], - [ - "fact", - -11.554320335388184 - ], - [ - "▁Much", - -11.554388046264648 - ], - [ - "▁laugh", - -11.55482006072998 - ], - [ - "▁blade", - -11.554835319519043 - ], - [ - "amine", - -11.554838180541992 - ], - [ - "▁insert", - -11.55493450164795 - ], - [ - "▁toys", - -11.555326461791992 - ], - [ - "▁в", - -11.555726051330566 - ], - [ - "cell", - -11.555747985839844 - ], - [ - "▁strengthen", - -11.555864334106445 - ], - [ - "GR", - -11.555882453918457 - ], - [ - "▁autor", - -11.556114196777344 - ], - [ - "▁LI", - -11.556147575378418 - ], - [ - "▁oamenii", - -11.556184768676758 - ], - [ - "▁Modell", - -11.556222915649414 - ], - [ - "▁sophisticated", - -11.556225776672363 - ], - [ - "▁Write", - -11.556283950805664 - ], - [ - "eți", - -11.556295394897461 - ], - [ - "say", - -11.556641578674316 - ], - [ - "▁nutzen", - -11.556783676147461 - ], - [ - "▁amenities", - -11.556979179382324 - ], - [ - "chel", - -11.557068824768066 - ], - [ - "Unlike", - -11.55720043182373 - ], - [ - "▁Bilder", - -11.557208061218262 - ], - [ - "fertig", - -11.55722713470459 - ], - [ - "PER", - -11.557244300842285 - ], - [ - "▁apparently", - -11.557282447814941 - ], - [ - "▁pointed", - -11.557332992553711 - ], - [ - "lop", - -11.557435989379883 - ], - [ - "▁commande", - -11.557848930358887 - ], - [ - "▁NEW", - -11.557923316955566 - ], - [ - "▁primi", - -11.55798625946045 - ], - [ - "▁aluminum", - -11.558046340942383 - ], - [ - "ificare", - -11.558063507080078 - ], - [ - "open", - -11.55815315246582 - ], - [ - "▁establishment", - -11.558305740356445 - ], - [ - "▁blanc", - -11.558349609375 - ], - [ - "▁1960", - -11.558454513549805 - ], - [ - "▁parameters", - -11.55856990814209 - ], - [ - "schluss", - -11.558685302734375 - ], - [ - "▁jet", - -11.55879020690918 - ], - [ - "gam", - -11.55902099609375 - ], - [ - "▁oral", - -11.559290885925293 - ], - [ - "▁tons", - -11.559348106384277 - ], - [ - "▁AL", - -11.55935001373291 - ], - [ - "▁intention", - -11.55947494506836 - ], - [ - "ives", - -11.55974292755127 - ], - [ - "▁BMW", - -11.559837341308594 - ], - [ - "gun", - -11.559967041015625 - ], - [ - "leben", - -11.560046195983887 - ], - [ - "▁Fresh", - -11.56010913848877 - ], - [ - "▁tuturor", - -11.560193061828613 - ], - [ - "▁marine", - -11.560208320617676 - ], - [ - "mile", - -11.560260772705078 - ], - [ - "▁alta", - -11.560271263122559 - ], - [ - "nnen", - -11.56050968170166 - ], - [ - "▁courts", - -11.560530662536621 - ], - [ - "▁Hello", - -11.560791015625 - ], - [ - "BL", - -11.560895919799805 - ], - [ - "▁reply", - -11.560962677001953 - ], - [ - "environnement", - -11.560975074768066 - ], - [ - "American", - -11.560995101928711 - ], - [ - "▁Tell", - -11.561040878295898 - ], - [ - "▁chic", - -11.56148624420166 - ], - [ - "bir", - -11.561542510986328 - ], - [ - "▁singing", - -11.561788558959961 - ], - [ - "▁earnings", - -11.561819076538086 - ], - [ - "▁ensemble", - -11.562082290649414 - ], - [ - "▁($", - -11.562169075012207 - ], - [ - "▁Tout", - -11.562192916870117 - ], - [ - "▁Abs", - -11.562264442443848 - ], - [ - "▁describes", - -11.562322616577148 - ], - [ - "▁navigation", - -11.5625 - ], - [ - "▁destul", - -11.562532424926758 - ], - [ - "legate", - -11.562586784362793 - ], - [ - "tral", - -11.562599182128906 - ], - [ - "aţie", - -11.562753677368164 - ], - [ - "▁supplied", - -11.562775611877441 - ], - [ - "▁paar", - -11.562911987304688 - ], - [ - "ionat", - -11.563241958618164 - ], - [ - "9.", - -11.563263893127441 - ], - [ - "▁41", - -11.563348770141602 - ], - [ - "▁Track", - -11.563451766967773 - ], - [ - "▁happiness", - -11.563636779785156 - ], - [ - "▁Personen", - -11.563680648803711 - ], - [ - "▁sac", - -11.56373119354248 - ], - [ - "▁shapes", - -11.563774108886719 - ], - [ - "eld", - -11.56393051147461 - ], - [ - "bett", - -11.563963890075684 - ], - [ - "tile", - -11.56400203704834 - ], - [ - "▁divided", - -11.564035415649414 - ], - [ - "▁13.", - -11.56403923034668 - ], - [ - "market", - -11.564109802246094 - ], - [ - "crafted", - -11.564115524291992 - ], - [ - "▁periods", - -11.564120292663574 - ], - [ - "uş", - -11.564568519592285 - ], - [ - "▁trainer", - -11.56460952758789 - ], - [ - "▁Licht", - -11.564871788024902 - ], - [ - "▁advisor", - -11.564948081970215 - ], - [ - "▁Herr", - -11.564980506896973 - ], - [ - "▁Halloween", - -11.565147399902344 - ], - [ - "alter", - -11.565154075622559 - ], - [ - "▁radical", - -11.565155029296875 - ], - [ - "▁nose", - -11.56527042388916 - ], - [ - "▁Sat", - -11.565323829650879 - ], - [ - "▁Mom", - -11.565372467041016 - ], - [ - "moni", - -11.565377235412598 - ], - [ - "▁semn", - -11.565397262573242 - ], - [ - "vé", - -11.565672874450684 - ], - [ - "identifie", - -11.56570053100586 - ], - [ - "▁hatten", - -11.565957069396973 - ], - [ - "completing", - -11.565959930419922 - ], - [ - "▁gust", - -11.565963745117188 - ], - [ - "▁creat", - -11.56601333618164 - ], - [ - "ché", - -11.566075325012207 - ], - [ - "pay", - -11.566216468811035 - ], - [ - "▁Money", - -11.566229820251465 - ], - [ - "IG", - -11.566243171691895 - ], - [ - "▁Cash", - -11.566327095031738 - ], - [ - "altă", - -11.566420555114746 - ], - [ - "▁bekommen", - -11.566620826721191 - ], - [ - "▁43", - -11.56662654876709 - ], - [ - "▁supplement", - -11.566637992858887 - ], - [ - "▁Early", - -11.566754341125488 - ], - [ - "▁mattress", - -11.56692123413086 - ], - [ - "▁worn", - -11.567182540893555 - ], - [ - "rov", - -11.567197799682617 - ], - [ - "▁pray", - -11.56733226776123 - ], - [ - "▁beans", - -11.567673683166504 - ], - [ - "▁passé", - -11.567782402038574 - ], - [ - "▁facilit", - -11.56782054901123 - ], - [ - "▁meters", - -11.56784439086914 - ], - [ - "cke", - -11.568163871765137 - ], - [ - "▁Villa", - -11.568199157714844 - ], - [ - "▁Diego", - -11.568217277526855 - ], - [ - "▁chips", - -11.568244934082031 - ], - [ - "▁mes", - -11.568349838256836 - ], - [ - "▁Seattle", - -11.568421363830566 - ], - [ - "BU", - -11.568621635437012 - ], - [ - "▁nevoi", - -11.568714141845703 - ], - [ - "▁lets", - -11.568737030029297 - ], - [ - "▁hopefully", - -11.56894302368164 - ], - [ - "▁AG", - -11.568954467773438 - ], - [ - "liable", - -11.568999290466309 - ], - [ - "pound", - -11.569067001342773 - ], - [ - "près", - -11.569085121154785 - ], - [ - "arul", - -11.56920337677002 - ], - [ - "isiert", - -11.569281578063965 - ], - [ - "▁Expert", - -11.569297790527344 - ], - [ - "▁particulier", - -11.569367408752441 - ], - [ - "stoff", - -11.569952964782715 - ], - [ - "▁interpretation", - -11.56999397277832 - ], - [ - "După", - -11.57007884979248 - ], - [ - "sait", - -11.57011604309082 - ], - [ - "▁nouvelles", - -11.570173263549805 - ], - [ - "▁Ok", - -11.570175170898438 - ], - [ - "tap", - -11.570301055908203 - ], - [ - "▁targets", - -11.570327758789062 - ], - [ - "rung", - -11.57052230834961 - ], - [ - "▁stare", - -11.570576667785645 - ], - [ - "▁efficiently", - -11.570908546447754 - ], - [ - "EV", - -11.571003913879395 - ], - [ - "évit", - -11.571310997009277 - ], - [ - "▁Moldova", - -11.571542739868164 - ], - [ - "▁Face", - -11.571663856506348 - ], - [ - "▁flo", - -11.57168960571289 - ], - [ - "▁acestora", - -11.5717134475708 - ], - [ - "▁Victor", - -11.57183837890625 - ], - [ - "▁breed", - -11.57198429107666 - ], - [ - "morph", - -11.572230339050293 - ], - [ - "sley", - -11.572274208068848 - ], - [ - "mot", - -11.57234001159668 - ], - [ - "▁URL", - -11.572395324707031 - ], - [ - "ellen", - -11.572502136230469 - ], - [ - "▁resist", - -11.572781562805176 - ], - [ - "zon", - -11.57282829284668 - ], - [ - "ndel", - -11.572967529296875 - ], - [ - "will", - -11.572989463806152 - ], - [ - "▁alege", - -11.573076248168945 - ], - [ - "▁Easter", - -11.573114395141602 - ], - [ - "▁Bat", - -11.573190689086914 - ], - [ - "▁Höhe", - -11.573223114013672 - ], - [ - "▁fascinating", - -11.573387145996094 - ], - [ - "▁Know", - -11.5735445022583 - ], - [ - "illon", - -11.573602676391602 - ], - [ - "flex", - -11.57363224029541 - ], - [ - "who", - -11.573701858520508 - ], - [ - "▁Always", - -11.573729515075684 - ], - [ - "▁Bush", - -11.573777198791504 - ], - [ - "ICE", - -11.574009895324707 - ], - [ - "verein", - -11.57448673248291 - ], - [ - "▁später", - -11.57448959350586 - ], - [ - "▁cherch", - -11.574575424194336 - ], - [ - "makers", - -11.574753761291504 - ], - [ - "versus", - -11.574790954589844 - ], - [ - "▁Clear", - -11.574846267700195 - ], - [ - "▁Pennsylvania", - -11.574912071228027 - ], - [ - "Dieser", - -11.575041770935059 - ], - [ - "▁picking", - -11.575072288513184 - ], - [ - "▁restoration", - -11.57513427734375 - ], - [ - "▁interviews", - -11.575201988220215 - ], - [ - "pressed", - -11.575210571289062 - ], - [ - "nnerhalb", - -11.575674057006836 - ], - [ - "▁connecting", - -11.575834274291992 - ], - [ - "jou", - -11.575943946838379 - ], - [ - "▁react", - -11.576189041137695 - ], - [ - "▁Merci", - -11.576223373413086 - ], - [ - "▁Phone", - -11.576356887817383 - ], - [ - "▁1)", - -11.57652473449707 - ], - [ - "▁victims", - -11.576618194580078 - ], - [ - "▁Spo", - -11.576685905456543 - ], - [ - "atului", - -11.576735496520996 - ], - [ - "▁Harry", - -11.576837539672852 - ], - [ - "▁Sala", - -11.576875686645508 - ], - [ - "Pol", - -11.577075958251953 - ], - [ - "▁Clo", - -11.577167510986328 - ], - [ - "▁Erfolg", - -11.577211380004883 - ], - [ - "autour", - -11.577308654785156 - ], - [ - "▁Template", - -11.577314376831055 - ], - [ - "▁invention", - -11.57754898071289 - ], - [ - "▁schwer", - -11.57761287689209 - ], - [ - "vac", - -11.577625274658203 - ], - [ - "▁Trail", - -11.577627182006836 - ], - [ - "▁Vietnam", - -11.577638626098633 - ], - [ - "▁Size", - -11.577689170837402 - ], - [ - "▁Bern", - -11.577783584594727 - ], - [ - "▁emp", - -11.577845573425293 - ], - [ - "▁shake", - -11.57787799835205 - ], - [ - "▁Ave", - -11.57794189453125 - ], - [ - "▁productive", - -11.578009605407715 - ], - [ - "▁apple", - -11.578015327453613 - ], - [ - "▁portal", - -11.578052520751953 - ], - [ - "▁ceramic", - -11.578082084655762 - ], - [ - "▁pad", - -11.578110694885254 - ], - [ - "▁Syn", - -11.578316688537598 - ], - [ - "Ab", - -11.57845401763916 - ], - [ - "▁syn", - -11.578761100769043 - ], - [ - "find", - -11.578888893127441 - ], - [ - "▁settle", - -11.578909873962402 - ], - [ - "▁général", - -11.578965187072754 - ], - [ - "▁okay", - -11.579032897949219 - ], - [ - "▁receipt", - -11.57906436920166 - ], - [ - "orii", - -11.579117774963379 - ], - [ - "▁Mission", - -11.579122543334961 - ], - [ - "entrée", - -11.579304695129395 - ], - [ - "▁besteht", - -11.579394340515137 - ], - [ - "▁wisdom", - -11.57950210571289 - ], - [ - "▁heraus", - -11.579645156860352 - ], - [ - "▁balanced", - -11.579753875732422 - ], - [ - "▁habits", - -11.579773902893066 - ], - [ - "tang", - -11.579888343811035 - ], - [ - "ură", - -11.580151557922363 - ], - [ - "▁winners", - -11.580182075500488 - ], - [ - "ç", - -11.580215454101562 - ], - [ - "▁folosi", - -11.580242156982422 - ], - [ - "aliment", - -11.5802583694458 - ], - [ - "▁fiction", - -11.580373764038086 - ], - [ - "▁Spe", - -11.580534934997559 - ], - [ - "▁elsewhere", - -11.580663681030273 - ], - [ - "▁dependent", - -11.580808639526367 - ], - [ - "▁Anne", - -11.581167221069336 - ], - [ - "▁excellence", - -11.581695556640625 - ], - [ - "▁Feel", - -11.581753730773926 - ], - [ - "lieb", - -11.581811904907227 - ], - [ - "▁sectors", - -11.581865310668945 - ], - [ - "▁expir", - -11.581886291503906 - ], - [ - "▁surfaces", - -11.58191204071045 - ], - [ - "▁minim", - -11.581937789916992 - ], - [ - "▁tumor", - -11.58204460144043 - ], - [ - "▁paragraph", - -11.582289695739746 - ], - [ - "▁disk", - -11.58232307434082 - ], - [ - "▁tonight", - -11.582379341125488 - ], - [ - "▁precious", - -11.582794189453125 - ], - [ - "▁console", - -11.58288288116455 - ], - [ - "Th", - -11.582939147949219 - ], - [ - "neu", - -11.583020210266113 - ], - [ - "effective", - -11.5839262008667 - ], - [ - "▁Republican", - -11.583944320678711 - ], - [ - "format", - -11.584297180175781 - ], - [ - "▁preserve", - -11.58436107635498 - ], - [ - "▁wiring", - -11.584599494934082 - ], - [ - "▁exercises", - -11.584757804870605 - ], - [ - "▁pregnancy", - -11.584774017333984 - ], - [ - "tries", - -11.58481502532959 - ], - [ - "▁jeunes", - -11.584883689880371 - ], - [ - "▁publishing", - -11.584932327270508 - ], - [ - "▁nehmen", - -11.584935188293457 - ], - [ - "▁capability", - -11.5849609375 - ], - [ - "▁prompt", - -11.584965705871582 - ], - [ - "▁Further", - -11.58497428894043 - ], - [ - "▁semaine", - -11.585173606872559 - ], - [ - "abo", - -11.585216522216797 - ], - [ - "▁evolution", - -11.585319519042969 - ], - [ - "▁Sud", - -11.585403442382812 - ], - [ - "▁frais", - -11.585525512695312 - ], - [ - "LT", - -11.585619926452637 - ], - [ - "▁stack", - -11.58581829071045 - ], - [ - "▁Inside", - -11.585854530334473 - ], - [ - "▁programmes", - -11.585997581481934 - ], - [ - "▁passes", - -11.586196899414062 - ], - [ - "mü", - -11.586474418640137 - ], - [ - "▁progressive", - -11.586518287658691 - ], - [ - "▁calculator", - -11.58658218383789 - ], - [ - "▁Core", - -11.586655616760254 - ], - [ - "BT", - -11.586956977844238 - ], - [ - "core", - -11.586996078491211 - ], - [ - "▁Moon", - -11.587004661560059 - ], - [ - "▁tender", - -11.587040901184082 - ], - [ - "durch", - -11.58721923828125 - ], - [ - "▁commune", - -11.587453842163086 - ], - [ - "▁Prince", - -11.587594032287598 - ], - [ - "▁demonstrated", - -11.587693214416504 - ], - [ - "▁conversations", - -11.587890625 - ], - [ - "▁fri", - -11.587984085083008 - ], - [ - "igh", - -11.587992668151855 - ], - [ - "being", - -11.588334083557129 - ], - [ - "pause", - -11.58853530883789 - ], - [ - "▁Bear", - -11.58871841430664 - ], - [ - "ayant", - -11.588875770568848 - ], - [ - "▁Industry", - -11.588967323303223 - ], - [ - "▁sponsor", - -11.589012145996094 - ], - [ - "▁numele", - -11.589098930358887 - ], - [ - "▁VA", - -11.589167594909668 - ], - [ - "▁Sommer", - -11.589366912841797 - ], - [ - "TB", - -11.589380264282227 - ], - [ - "▁optional", - -11.589505195617676 - ], - [ - "▁Landes", - -11.589812278747559 - ], - [ - "coli", - -11.589963912963867 - ], - [ - "empt", - -11.59018325805664 - ], - [ - "▁Iron", - -11.590620040893555 - ], - [ - "▁1992", - -11.59090518951416 - ], - [ - "▁attempts", - -11.59090518951416 - ], - [ - "halb", - -11.590960502624512 - ], - [ - "▁photographer", - -11.59097671508789 - ], - [ - "▁witness", - -11.59097957611084 - ], - [ - "bru", - -11.591073989868164 - ], - [ - "▁Ras", - -11.59107780456543 - ], - [ - "▁burden", - -11.591142654418945 - ], - [ - "▁kaufen", - -11.591256141662598 - ], - [ - "▁vu", - -11.591362953186035 - ], - [ - "▁Wedding", - -11.591601371765137 - ], - [ - "▁Kla", - -11.591604232788086 - ], - [ - "occasion", - -11.591915130615234 - ], - [ - "▁keys", - -11.592131614685059 - ], - [ - "▁oferi", - -11.592279434204102 - ], - [ - "▁puzzle", - -11.592302322387695 - ], - [ - "eaux", - -11.59254264831543 - ], - [ - "▁Eco", - -11.592805862426758 - ], - [ - "▁52", - -11.592817306518555 - ], - [ - "▁Elizabeth", - -11.59284496307373 - ], - [ - "▁dispose", - -11.593144416809082 - ], - [ - "▁cluster", - -11.59326171875 - ], - [ - "iki", - -11.593283653259277 - ], - [ - "▁Guys", - -11.593595504760742 - ], - [ - "▁Economic", - -11.593632698059082 - ], - [ - "▁apar", - -11.593677520751953 - ], - [ - "▁ziua", - -11.593688011169434 - ], - [ - "▁integral", - -11.593740463256836 - ], - [ - "▁tac", - -11.59376335144043 - ], - [ - "▁restrictions", - -11.593778610229492 - ], - [ - "▁nerve", - -11.593794822692871 - ], - [ - "▁Stop", - -11.59386157989502 - ], - [ - "burger", - -11.593897819519043 - ], - [ - "explo", - -11.593944549560547 - ], - [ - "lö", - -11.593958854675293 - ], - [ - "NP", - -11.594077110290527 - ], - [ - "▁Brook", - -11.59418773651123 - ], - [ - "▁Close", - -11.594278335571289 - ], - [ - "▁representing", - -11.59446907043457 - ], - [ - "▁certaine", - -11.594767570495605 - ], - [ - "▁discovery", - -11.594836235046387 - ], - [ - "▁rece", - -11.594964981079102 - ], - [ - "FF", - -11.594970703125 - ], - [ - "▁salary", - -11.595069885253906 - ], - [ - "▁Wolf", - -11.595137596130371 - ], - [ - "▁deserve", - -11.595166206359863 - ], - [ - "ţele", - -11.595417976379395 - ], - [ - "gathered", - -11.595934867858887 - ], - [ - "▁comply", - -11.59599494934082 - ], - [ - "lagen", - -11.596034049987793 - ], - [ - "ătoare", - -11.596192359924316 - ], - [ - "▁relate", - -11.596410751342773 - ], - [ - "▁Roger", - -11.59656810760498 - ], - [ - "▁blame", - -11.596575736999512 - ], - [ - "▁Jen", - -11.596914291381836 - ], - [ - "▁army", - -11.596936225891113 - ], - [ - "▁$10", - -11.597129821777344 - ], - [ - "▁Cabinet", - -11.597185134887695 - ], - [ - "Gu", - -11.597367286682129 - ], - [ - "▁wildlife", - -11.597452163696289 - ], - [ - "▁Memorial", - -11.597643852233887 - ], - [ - "▁Holiday", - -11.597742080688477 - ], - [ - "▁curat", - -11.598291397094727 - ], - [ - "iilor", - -11.598299026489258 - ], - [ - "▁fleet", - -11.598408699035645 - ], - [ - "▁reviewed", - -11.59843635559082 - ], - [ - "cet", - -11.598450660705566 - ], - [ - "▁virtually", - -11.598487854003906 - ], - [ - "▁Crusher", - -11.59852409362793 - ], - [ - "▁slide", - -11.59858226776123 - ], - [ - "▁générale", - -11.598604202270508 - ], - [ - "▁sensation", - -11.598630905151367 - ], - [ - "▁garlic", - -11.598638534545898 - ], - [ - "5)", - -11.598657608032227 - ], - [ - "▁batteries", - -11.598756790161133 - ], - [ - "SH", - -11.59876823425293 - ], - [ - "▁seller", - -11.59882926940918 - ], - [ - "design", - -11.598871231079102 - ], - [ - "5.", - -11.598944664001465 - ], - [ - "▁Overall", - -11.598969459533691 - ], - [ - "▁investigate", - -11.599058151245117 - ], - [ - "max", - -11.599064826965332 - ], - [ - "▁attach", - -11.599166870117188 - ], - [ - "▁Future", - -11.599209785461426 - ], - [ - "OUR", - -11.599284172058105 - ], - [ - "▁LE", - -11.59968090057373 - ], - [ - "▁bite", - -11.599811553955078 - ], - [ - "tige", - -11.599874496459961 - ], - [ - "▁twist", - -11.59987735748291 - ], - [ - "hole", - -11.600180625915527 - ], - [ - "▁Tony", - -11.600510597229004 - ], - [ - "LU", - -11.600598335266113 - ], - [ - "▁Organization", - -11.600617408752441 - ], - [ - "▁invit", - -11.600632667541504 - ], - [ - "▁Ant", - -11.600739479064941 - ], - [ - "NR", - -11.600788116455078 - ], - [ - "sorgt", - -11.600854873657227 - ], - [ - "▁Lan", - -11.600860595703125 - ], - [ - "▁Manchester", - -11.60091495513916 - ], - [ - "schrift", - -11.601066589355469 - ], - [ - "▁kg", - -11.601150512695312 - ], - [ - "▁aroma", - -11.60132884979248 - ], - [ - "▁Source", - -11.601388931274414 - ], - [ - "▁permite", - -11.601445198059082 - ], - [ - "▁Consider", - -11.601457595825195 - ], - [ - "▁Artist", - -11.601627349853516 - ], - [ - "▁transmit", - -11.601783752441406 - ], - [ - "oasa", - -11.601834297180176 - ], - [ - "▁Zen", - -11.60198974609375 - ], - [ - "ANT", - -11.602235794067383 - ], - [ - "▁consulting", - -11.602404594421387 - ], - [ - "▁commence", - -11.6025390625 - ], - [ - "▁quilt", - -11.60261058807373 - ], - [ - "owned", - -11.602642059326172 - ], - [ - "▁bro", - -11.602689743041992 - ], - [ - "▁integrate", - -11.602715492248535 - ], - [ - "▁Ontario", - -11.602775573730469 - ], - [ - "TF", - -11.602832794189453 - ], - [ - "▁Study", - -11.602887153625488 - ], - [ - "▁ensuite", - -11.603155136108398 - ], - [ - "itatii", - -11.603180885314941 - ], - [ - "Mon", - -11.603235244750977 - ], - [ - "-11", - -11.603299140930176 - ], - [ - "what", - -11.603384017944336 - ], - [ - "▁Things", - -11.60361385345459 - ], - [ - "▁Eye", - -11.603819847106934 - ], - [ - "▁présente", - -11.603828430175781 - ], - [ - "tention", - -11.603915214538574 - ], - [ - "|", - -11.603957176208496 - ], - [ - "stall", - -11.603963851928711 - ], - [ - "▁beef", - -11.603992462158203 - ], - [ - "figur", - -11.604005813598633 - ], - [ - "▁cancel", - -11.604146003723145 - ], - [ - "▁domeniul", - -11.604252815246582 - ], - [ - "▁360", - -11.604290008544922 - ], - [ - "▁sleeping", - -11.6045560836792 - ], - [ - "▁traitement", - -11.604580879211426 - ], - [ - "ühl", - -11.604769706726074 - ], - [ - "▁Environmental", - -11.604835510253906 - ], - [ - "cier", - -11.604894638061523 - ], - [ - "▁NC", - -11.604907035827637 - ], - [ - "pub", - -11.604925155639648 - ], - [ - "▁addiction", - -11.605071067810059 - ], - [ - "▁nest", - -11.605128288269043 - ], - [ - "▁ON", - -11.605395317077637 - ], - [ - "▁discrimin", - -11.605396270751953 - ], - [ - "▁proved", - -11.605517387390137 - ], - [ - "▁occasions", - -11.605864524841309 - ], - [ - "OH", - -11.606184959411621 - ], - [ - "▁lawyers", - -11.606203079223633 - ], - [ - "own", - -11.606290817260742 - ], - [ - "▁Meeting", - -11.606596946716309 - ], - [ - "▁Industrial", - -11.606704711914062 - ], - [ - "owed", - -11.606736183166504 - ], - [ - "▁Cel", - -11.606793403625488 - ], - [ - "legt", - -11.60706615447998 - ], - [ - "ily", - -11.607085227966309 - ], - [ - "▁wins", - -11.607155799865723 - ], - [ - "▁strap", - -11.607367515563965 - ], - [ - "digit", - -11.607441902160645 - ], - [ - "▁hinaus", - -11.607504844665527 - ], - [ - "mple", - -11.607712745666504 - ], - [ - "▁(5", - -11.607797622680664 - ], - [ - "▁pdf", - -11.607894897460938 - ], - [ - "▁eco", - -11.607915878295898 - ], - [ - "▁junior", - -11.608172416687012 - ], - [ - "DB", - -11.608556747436523 - ], - [ - "gelegt", - -11.608636856079102 - ], - [ - "ION", - -11.608678817749023 - ], - [ - "▁competitors", - -11.60880184173584 - ], - [ - "▁Arab", - -11.60898208618164 - ], - [ - "▁Secret", - -11.609148979187012 - ], - [ - "▁Kunst", - -11.609283447265625 - ], - [ - "▁worried", - -11.609297752380371 - ], - [ - "meiner", - -11.609378814697266 - ], - [ - "▁Magic", - -11.609450340270996 - ], - [ - "▁groß", - -11.609537124633789 - ], - [ - "▁travaux", - -11.609748840332031 - ], - [ - "▁sollen", - -11.609772682189941 - ], - [ - "▁Sciences", - -11.609850883483887 - ], - [ - "▁athletes", - -11.610055923461914 - ], - [ - "▁discounts", - -11.610079765319824 - ], - [ - "kit", - -11.610211372375488 - ], - [ - "lind", - -11.610305786132812 - ], - [ - "▁enjoyable", - -11.610421180725098 - ], - [ - "ground", - -11.610489845275879 - ], - [ - "▁Tat", - -11.610529899597168 - ], - [ - "▁passengers", - -11.610576629638672 - ], - [ - "▁Dami", - -11.610677719116211 - ], - [ - "▁Major", - -11.61070728302002 - ], - [ - "watch", - -11.610796928405762 - ], - [ - "working", - -11.610908508300781 - ], - [ - "arrêt", - -11.610923767089844 - ], - [ - "▁subtle", - -11.611069679260254 - ], - [ - "▁epi", - -11.611197471618652 - ], - [ - "▁Jahres", - -11.61128044128418 - ], - [ - "▁cooling", - -11.61141586303711 - ], - [ - "▁makeup", - -11.611427307128906 - ], - [ - "jet", - -11.611495018005371 - ], - [ - "▁Given", - -11.611519813537598 - ], - [ - "plex", - -11.61158275604248 - ], - [ - "▁exploit", - -11.611590385437012 - ], - [ - "rine", - -11.611604690551758 - ], - [ - "▁delivers", - -11.612122535705566 - ], - [ - "▁summary", - -11.612236022949219 - ], - [ - "▁beaches", - -11.612459182739258 - ], - [ - "lift", - -11.612550735473633 - ], - [ - "▁Suite", - -11.612554550170898 - ], - [ - "▁Assistant", - -11.612688064575195 - ], - [ - "▁taxi", - -11.61273193359375 - ], - [ - "▁peaceful", - -11.612805366516113 - ], - [ - "▁Mode", - -11.612980842590332 - ], - [ - "▁Fun", - -11.613059043884277 - ], - [ - "▁diameter", - -11.613142967224121 - ], - [ - "▁phrase", - -11.613150596618652 - ], - [ - "ACT", - -11.613265037536621 - ], - [ - "▁différentes", - -11.613322257995605 - ], - [ - "▁14.", - -11.613417625427246 - ], - [ - "▁CE", - -11.61352825164795 - ], - [ - "▁2)", - -11.613739013671875 - ], - [ - "▁Nat", - -11.613785743713379 - ], - [ - "▁delete", - -11.61388111114502 - ], - [ - "other", - -11.613930702209473 - ], - [ - "hang", - -11.613985061645508 - ], - [ - "▁sujet", - -11.614117622375488 - ], - [ - "▁precise", - -11.614212989807129 - ], - [ - "▁Total", - -11.614290237426758 - ], - [ - "▁chambre", - -11.614483833312988 - ], - [ - "sati", - -11.614666938781738 - ], - [ - "▁Metal", - -11.614995956420898 - ], - [ - "rust", - -11.615038871765137 - ], - [ - "▁Brazil", - -11.615508079528809 - ], - [ - "▁hybrid", - -11.615636825561523 - ], - [ - "ops", - -11.615691184997559 - ], - [ - "▁electro", - -11.615789413452148 - ], - [ - "utz", - -11.61608600616455 - ], - [ - "▁quoi", - -11.616246223449707 - ], - [ - "▁adoption", - -11.616331100463867 - ], - [ - "3.5", - -11.616518020629883 - ], - [ - "50,000", - -11.616599082946777 - ], - [ - "veti", - -11.616630554199219 - ], - [ - "hir", - -11.616957664489746 - ], - [ - "▁adequate", - -11.617067337036133 - ], - [ - "ologist", - -11.617109298706055 - ], - [ - "torii", - -11.617295265197754 - ], - [ - "wasser", - -11.617355346679688 - ], - [ - "▁Authority", - -11.617362976074219 - ], - [ - "▁donation", - -11.617364883422852 - ], - [ - "700", - -11.617375373840332 - ], - [ - "▁somehow", - -11.617375373840332 - ], - [ - "▁kostenlos", - -11.617425918579102 - ], - [ - "▁generations", - -11.617537498474121 - ], - [ - "▁Turkey", - -11.617711067199707 - ], - [ - "rata", - -11.617819786071777 - ], - [ - "▁animation", - -11.618206024169922 - ], - [ - "▁CH", - -11.618281364440918 - ], - [ - "ending", - -11.618317604064941 - ], - [ - "welt", - -11.618376731872559 - ], - [ - "bac", - -11.618380546569824 - ], - [ - "MG", - -11.618460655212402 - ], - [ - "▁parks", - -11.618468284606934 - ], - [ - "▁placing", - -11.618870735168457 - ], - [ - "sort", - -11.61915111541748 - ], - [ - "▁Bitcoin", - -11.619163513183594 - ], - [ - "▁disorder", - -11.619282722473145 - ], - [ - "MAN", - -11.619302749633789 - ], - [ - "aught", - -11.619412422180176 - ], - [ - "▁guides", - -11.61956787109375 - ], - [ - "▁circul", - -11.619651794433594 - ], - [ - "▁Steven", - -11.619954109191895 - ], - [ - "rrière", - -11.619976997375488 - ], - [ - "▁Arch", - -11.61999225616455 - ], - [ - "▁plates", - -11.620091438293457 - ], - [ - "MR", - -11.620118141174316 - ], - [ - "▁cow", - -11.620142936706543 - ], - [ - "▁integrity", - -11.620210647583008 - ], - [ - "▁(18", - -11.620217323303223 - ], - [ - "▁totul", - -11.62024211883545 - ], - [ - "jack", - -11.620373725891113 - ], - [ - "▁privire", - -11.620588302612305 - ], - [ - "▁terme", - -11.620752334594727 - ], - [ - "▁execution", - -11.620781898498535 - ], - [ - "▁organism", - -11.620838165283203 - ], - [ - "▁führen", - -11.620853424072266 - ], - [ - "▁patron", - -11.620940208435059 - ], - [ - "▁appreciated", - -11.62096881866455 - ], - [ - "liant", - -11.62100601196289 - ], - [ - "▁Solar", - -11.621055603027344 - ], - [ - "▁vinyl", - -11.621134757995605 - ], - [ - "▁treasure", - -11.621137619018555 - ], - [ - "▁retro", - -11.621167182922363 - ], - [ - "▁bout", - -11.621174812316895 - ], - [ - "lab", - -11.621183395385742 - ], - [ - "▁dimension", - -11.621394157409668 - ], - [ - "called", - -11.62146282196045 - ], - [ - "▁intern", - -11.621479034423828 - ], - [ - "issement", - -11.62173843383789 - ], - [ - "▁Erst", - -11.621837615966797 - ], - [ - "▁stellen", - -11.621920585632324 - ], - [ - "▁familia", - -11.622069358825684 - ], - [ - "▁notion", - -11.622176170349121 - ], - [ - "▁Could", - -11.622322082519531 - ], - [ - "Getting", - -11.622323036193848 - ], - [ - "▁drives", - -11.622397422790527 - ], - [ - "▁Israeli", - -11.622520446777344 - ], - [ - "▁nations", - -11.622546195983887 - ], - [ - "▁duties", - -11.622700691223145 - ], - [ - "▁personalized", - -11.622788429260254 - ], - [ - "▁weren", - -11.62282657623291 - ], - [ - "▁chemicals", - -11.622847557067871 - ], - [ - "▁killing", - -11.622913360595703 - ], - [ - "▁masa", - -11.622994422912598 - ], - [ - "▁parce", - -11.623026847839355 - ], - [ - "▁lady", - -11.623178482055664 - ], - [ - "ides", - -11.623221397399902 - ], - [ - "▁execut", - -11.62340259552002 - ], - [ - "▁floral", - -11.62341594696045 - ], - [ - "▁Child", - -11.623428344726562 - ], - [ - "▁medal", - -11.623503684997559 - ], - [ - "▁casa", - -11.623603820800781 - ], - [ - "▁enabled", - -11.623650550842285 - ], - [ - "12.", - -11.624239921569824 - ], - [ - "nger", - -11.624266624450684 - ], - [ - "▁vent", - -11.624297142028809 - ], - [ - "▁urmă", - -11.624727249145508 - ], - [ - "▁Herz", - -11.624835968017578 - ], - [ - "▁Jay", - -11.624916076660156 - ], - [ - ".....", - -11.624942779541016 - ], - [ - "▁Kris", - -11.62499713897705 - ], - [ - "kenn", - -11.625001907348633 - ], - [ - "ress", - -11.625027656555176 - ], - [ - "weight", - -11.62519359588623 - ], - [ - "▁indicates", - -11.625198364257812 - ], - [ - "▁mentor", - -11.625328063964844 - ], - [ - "using", - -11.625386238098145 - ], - [ - "▁femmes", - -11.625460624694824 - ], - [ - "▁Jung", - -11.625528335571289 - ], - [ - "▁Send", - -11.625574111938477 - ], - [ - "▁seasons", - -11.625906944274902 - ], - [ - "▁aesthetic", - -11.625964164733887 - ], - [ - "▁Block", - -11.626086235046387 - ], - [ - "▁babies", - -11.626150131225586 - ], - [ - "zig", - -11.626242637634277 - ], - [ - "edge", - -11.626428604125977 - ], - [ - "▁alike", - -11.626458168029785 - ], - [ - "▁immune", - -11.626609802246094 - ], - [ - "▁magical", - -11.626710891723633 - ], - [ - "▁Snow", - -11.626748085021973 - ], - [ - "▁spacious", - -11.627058982849121 - ], - [ - "▁Melbourne", - -11.62706184387207 - ], - [ - "order", - -11.627081871032715 - ], - [ - "▁timing", - -11.627176284790039 - ], - [ - "▁inainte", - -11.627220153808594 - ], - [ - "▁width", - -11.627327919006348 - ], - [ - "bild", - -11.627386093139648 - ], - [ - "Tra", - -11.627429008483887 - ], - [ - "▁appliances", - -11.627449989318848 - ], - [ - "▁dirt", - -11.627498626708984 - ], - [ - "▁Rent", - -11.627689361572266 - ], - [ - "responsibilities", - -11.627747535705566 - ], - [ - "▁blogs", - -11.62778377532959 - ], - [ - "nächsten", - -11.627799034118652 - ], - [ - "▁argue", - -11.627928733825684 - ], - [ - "▁Resume", - -11.627985954284668 - ], - [ - "▁Michel", - -11.628044128417969 - ], - [ - "▁terrible", - -11.628092765808105 - ], - [ - "graph", - -11.628151893615723 - ], - [ - "bird", - -11.628202438354492 - ], - [ - "▁Simple", - -11.628457069396973 - ], - [ - "nning", - -11.628658294677734 - ], - [ - "▁coconut", - -11.628683090209961 - ], - [ - "▁comprise", - -11.628787994384766 - ], - [ - "heure", - -11.628918647766113 - ], - [ - "▁nichts", - -11.628921508789062 - ], - [ - "▁manufacture", - -11.628966331481934 - ], - [ - "▁Sar", - -11.629011154174805 - ], - [ - "green", - -11.629014015197754 - ], - [ - "lining", - -11.62910270690918 - ], - [ - "▁tremendous", - -11.629128456115723 - ], - [ - "▁Wine", - -11.629164695739746 - ], - [ - "gir", - -11.629290580749512 - ], - [ - "▁Nothing", - -11.629562377929688 - ], - [ - "▁Miller", - -11.62957763671875 - ], - [ - "▁Schwe", - -11.629712104797363 - ], - [ - "zone", - -11.629942893981934 - ], - [ - "▁cunoscut", - -11.629964828491211 - ], - [ - "rupt", - -11.630166053771973 - ], - [ - "kle", - -11.630187034606934 - ], - [ - "▁Bucuresti", - -11.630510330200195 - ], - [ - "▁Abend", - -11.630574226379395 - ], - [ - "▁aura", - -11.630583763122559 - ], - [ - "▁Dance", - -11.63073444366455 - ], - [ - "▁Wilson", - -11.63086986541748 - ], - [ - "icide", - -11.630901336669922 - ], - [ - "bai", - -11.630910873413086 - ], - [ - "oriented", - -11.63103199005127 - ], - [ - "▁celebrated", - -11.631421089172363 - ], - [ - "schlag", - -11.631531715393066 - ], - [ - "▁10-", - -11.631600379943848 - ], - [ - "Unsere", - -11.63167667388916 - ], - [ - "énergie", - -11.632009506225586 - ], - [ - "▁qualify", - -11.63205623626709 - ], - [ - "▁contenu", - -11.632177352905273 - ], - [ - "▁Lauf", - -11.63220500946045 - ], - [ - "▁einzelne", - -11.632360458374023 - ], - [ - "▁Youth", - -11.632415771484375 - ], - [ - "explains", - -11.632601737976074 - ], - [ - "grat", - -11.632782936096191 - ], - [ - "▁72", - -11.632804870605469 - ], - [ - "labor", - -11.632885932922363 - ], - [ - "2018", - -11.632940292358398 - ], - [ - "▁Dank", - -11.633149147033691 - ], - [ - "▁Hey", - -11.633523941040039 - ], - [ - "▁refuse", - -11.633536338806152 - ], - [ - "▁graduated", - -11.633599281311035 - ], - [ - "▁României", - -11.633627891540527 - ], - [ - "punkt", - -11.633807182312012 - ], - [ - "▁regulation", - -11.633834838867188 - ], - [ - "Bru", - -11.633842468261719 - ], - [ - "▁Side", - -11.633891105651855 - ], - [ - "▁sol", - -11.633970260620117 - ], - [ - "▁extraordinary", - -11.634182929992676 - ], - [ - "▁ging", - -11.634247779846191 - ], - [ - "▁Creative", - -11.634299278259277 - ], - [ - "▁expanding", - -11.634349822998047 - ], - [ - "▁problème", - -11.63444995880127 - ], - [ - "▁Reserve", - -11.63459300994873 - ], - [ - "auteur", - -11.634642601013184 - ], - [ - "sphere", - -11.634657859802246 - ], - [ - "season", - -11.634716987609863 - ], - [ - "frei", - -11.634756088256836 - ], - [ - "▁8,", - -11.634765625 - ], - [ - "▁filing", - -11.634810447692871 - ], - [ - "▁Complete", - -11.635017395019531 - ], - [ - "▁revolution", - -11.635035514831543 - ], - [ - "▁unele", - -11.63520622253418 - ], - [ - "/8", - -11.635272979736328 - ], - [ - "istes", - -11.635310173034668 - ], - [ - "backed", - -11.635400772094727 - ], - [ - "shirt", - -11.635554313659668 - ], - [ - "▁Details", - -11.635673522949219 - ], - [ - "rod", - -11.635695457458496 - ], - [ - "▁pod", - -11.63582992553711 - ], - [ - "▁operators", - -11.635921478271484 - ], - [ - "was", - -11.635930061340332 - ], - [ - "hou", - -11.63594913482666 - ], - [ - "▁Coach", - -11.636075019836426 - ], - [ - "irii", - -11.636138916015625 - ], - [ - "▁ordinary", - -11.636186599731445 - ], - [ - "Institut", - -11.63620662689209 - ], - [ - "▁Flash", - -11.63633918762207 - ], - [ - "0-", - -11.636537551879883 - ], - [ - "▁flavour", - -11.6367769241333 - ], - [ - "specific", - -11.636906623840332 - ], - [ - "▁landing", - -11.636930465698242 - ], - [ - "▁geo", - -11.636935234069824 - ], - [ - "▁legend", - -11.636983871459961 - ], - [ - "vari", - -11.63703441619873 - ], - [ - "rop", - -11.637084007263184 - ], - [ - "▁Excel", - -11.6370849609375 - ], - [ - "▁Flu", - -11.637203216552734 - ], - [ - "▁intent", - -11.637582778930664 - ], - [ - "▁Deep", - -11.637594223022461 - ], - [ - "▁Kor", - -11.63763427734375 - ], - [ - "▁Philadelphia", - -11.637914657592773 - ], - [ - "▁rând", - -11.63800048828125 - ], - [ - "▁USD", - -11.638033866882324 - ], - [ - "laden", - -11.63803482055664 - ], - [ - "▁Hin", - -11.638047218322754 - ], - [ - "hap", - -11.638197898864746 - ], - [ - "▁thorough", - -11.638227462768555 - ], - [ - "▁oferit", - -11.63826847076416 - ], - [ - "kind", - -11.63831615447998 - ], - [ - "▁Cancer", - -11.638428688049316 - ], - [ - "apo", - -11.638596534729004 - ], - [ - "▁valve", - -11.638650894165039 - ], - [ - "▁encouraging", - -11.63884449005127 - ], - [ - "▁sûr", - -11.638904571533203 - ], - [ - "shing", - -11.638981819152832 - ], - [ - "▁49", - -11.639132499694824 - ], - [ - "gov", - -11.639142990112305 - ], - [ - "▁Five", - -11.63933277130127 - ], - [ - "▁stroke", - -11.639344215393066 - ], - [ - "▁apă", - -11.639398574829102 - ], - [ - "▁gambling", - -11.639543533325195 - ], - [ - "▁nord", - -11.63963508605957 - ], - [ - "onal", - -11.639691352844238 - ], - [ - "▁captured", - -11.63979721069336 - ], - [ - "▁lucruri", - -11.640068054199219 - ], - [ - "serait", - -11.640192985534668 - ], - [ - "▁Members", - -11.640265464782715 - ], - [ - "ital", - -11.640275955200195 - ], - [ - "▁mounted", - -11.640475273132324 - ], - [ - "▁opens", - -11.640792846679688 - ], - [ - "▁Marie", - -11.640861511230469 - ], - [ - "Tech", - -11.640902519226074 - ], - [ - "▁wishes", - -11.641016006469727 - ], - [ - "▁regards", - -11.641073226928711 - ], - [ - "going", - -11.641156196594238 - ], - [ - "Opti", - -11.641250610351562 - ], - [ - "▁femei", - -11.641331672668457 - ], - [ - "▁Fish", - -11.64142894744873 - ], - [ - "▁mount", - -11.641800880432129 - ], - [ - "▁Hunt", - -11.641887664794922 - ], - [ - "▁probabil", - -11.64205265045166 - ], - [ - "▁assured", - -11.642191886901855 - ], - [ - "pho", - -11.642230033874512 - ], - [ - "▁manufactured", - -11.642313003540039 - ], - [ - "▁realistic", - -11.642437934875488 - ], - [ - "ații", - -11.642580032348633 - ], - [ - "▁Planning", - -11.642598152160645 - ], - [ - "▁român", - -11.642645835876465 - ], - [ - "ggy", - -11.642669677734375 - ], - [ - "▁produces", - -11.642696380615234 - ], - [ - "▁reminder", - -11.64284896850586 - ], - [ - "TION", - -11.642868041992188 - ], - [ - "▁brake", - -11.642909049987793 - ], - [ - "▁pla", - -11.643172264099121 - ], - [ - "▁Premium", - -11.643270492553711 - ], - [ - "▁carb", - -11.643310546875 - ], - [ - "▁shine", - -11.643390655517578 - ], - [ - "▁carrier", - -11.643492698669434 - ], - [ - "▁poverty", - -11.64350414276123 - ], - [ - "▁effectiveness", - -11.6436128616333 - ], - [ - "administr", - -11.643655776977539 - ], - [ - "▁Chamber", - -11.643658638000488 - ], - [ - "▁suntem", - -11.64376163482666 - ], - [ - "▁noastră", - -11.643855094909668 - ], - [ - "▁sofort", - -11.643877983093262 - ], - [ - "▁moisture", - -11.644058227539062 - ], - [ - "limb", - -11.6441011428833 - ], - [ - "entre", - -11.644328117370605 - ], - [ - "▁SD", - -11.644330978393555 - ], - [ - "▁BC", - -11.644539833068848 - ], - [ - "▁selecting", - -11.6445951461792 - ], - [ - "achieving", - -11.644673347473145 - ], - [ - "info", - -11.644735336303711 - ], - [ - "▁membres", - -11.644983291625977 - ], - [ - "▁shoe", - -11.645014762878418 - ], - [ - "▁locate", - -11.645065307617188 - ], - [ - "▁assignment", - -11.645085334777832 - ], - [ - "lern", - -11.645283699035645 - ], - [ - "▁defeat", - -11.645406723022461 - ], - [ - "▁endless", - -11.645458221435547 - ], - [ - "▁Stunden", - -11.645523071289062 - ], - [ - "то", - -11.645561218261719 - ], - [ - "▁mur", - -11.645586013793945 - ], - [ - "▁wissen", - -11.645844459533691 - ], - [ - "aime", - -11.645915031433105 - ], - [ - "1-2", - -11.646056175231934 - ], - [ - "▁femme", - -11.646212577819824 - ], - [ - "robe", - -11.646468162536621 - ], - [ - "▁embrace", - -11.64647102355957 - ], - [ - "▁baseball", - -11.646614074707031 - ], - [ - "▁hunting", - -11.64663314819336 - ], - [ - "betrieb", - -11.646790504455566 - ], - [ - "▁gardens", - -11.647045135498047 - ], - [ - "▁risc", - -11.647096633911133 - ], - [ - "▁Cri", - -11.647263526916504 - ], - [ - "best", - -11.647506713867188 - ], - [ - "▁Audio", - -11.647621154785156 - ], - [ - "▁intens", - -11.647659301757812 - ], - [ - "▁Round", - -11.647744178771973 - ], - [ - "▁fireplace", - -11.6478271484375 - ], - [ - "▁dozen", - -11.647912979125977 - ], - [ - "▁hospitals", - -11.64802360534668 - ], - [ - "▁profits", - -11.648076057434082 - ], - [ - "▁Mail", - -11.64811897277832 - ], - [ - "obtenir", - -11.648191452026367 - ], - [ - "▁Ross", - -11.648241996765137 - ], - [ - "bun", - -11.648573875427246 - ], - [ - "polar", - -11.648688316345215 - ], - [ - "▁reflection", - -11.648873329162598 - ], - [ - "▁fut", - -11.648992538452148 - ], - [ - "phon", - -11.649017333984375 - ], - [ - "deck", - -11.649094581604004 - ], - [ - "renowned", - -11.649188041687012 - ], - [ - "▁cate", - -11.649308204650879 - ], - [ - "▁decorative", - -11.6494722366333 - ], - [ - "ieri", - -11.64957332611084 - ], - [ - "▁Tap", - -11.64958381652832 - ], - [ - "▁Dallas", - -11.649600982666016 - ], - [ - "rik", - -11.649665832519531 - ], - [ - "▁pied", - -11.649727821350098 - ], - [ - "rés", - -11.649821281433105 - ], - [ - "ppy", - -11.650137901306152 - ], - [ - "▁bitte", - -11.650188446044922 - ], - [ - "▁cave", - -11.650257110595703 - ], - [ - "▁rescue", - -11.650559425354004 - ], - [ - "▁Hilfe", - -11.650714874267578 - ], - [ - "▁Jason", - -11.650786399841309 - ], - [ - "▁Nations", - -11.650838851928711 - ], - [ - "▁profil", - -11.650938987731934 - ], - [ - "▁Atlantic", - -11.651105880737305 - ], - [ - "▁rub", - -11.651126861572266 - ], - [ - "▁collaborative", - -11.65113353729248 - ], - [ - "étude", - -11.651150703430176 - ], - [ - "▁Workshop", - -11.651389122009277 - ], - [ - "nez", - -11.651628494262695 - ], - [ - "▁chacun", - -11.651714324951172 - ], - [ - "▁Too", - -11.65211296081543 - ], - [ - "App", - -11.652313232421875 - ], - [ - "▁conseil", - -11.652399063110352 - ], - [ - "▁signals", - -11.652474403381348 - ], - [ - "▁Dead", - -11.652497291564941 - ], - [ - "▁Austria", - -11.652522087097168 - ], - [ - "▁slots", - -11.652579307556152 - ], - [ - "▁Dies", - -11.652623176574707 - ], - [ - "raj", - -11.652629852294922 - ], - [ - "stick", - -11.652833938598633 - ], - [ - "▁jaw", - -11.653030395507812 - ], - [ - "▁lounge", - -11.653059005737305 - ], - [ - "curi", - -11.653359413146973 - ], - [ - "nem", - -11.653456687927246 - ], - [ - "▁Cluj", - -11.653512954711914 - ], - [ - "▁rapide", - -11.653584480285645 - ], - [ - "▁companion", - -11.653716087341309 - ], - [ - "▁WE", - -11.653879165649414 - ], - [ - "▁bord", - -11.65389347076416 - ], - [ - "ody", - -11.654045104980469 - ], - [ - "gru", - -11.654057502746582 - ], - [ - "▁46", - -11.654410362243652 - ], - [ - "kra", - -11.654717445373535 - ], - [ - "eller", - -11.65477180480957 - ], - [ - "naire", - -11.65511703491211 - ], - [ - "hose", - -11.655253410339355 - ], - [ - "▁Atlanta", - -11.655254364013672 - ], - [ - "▁violent", - -11.65530776977539 - ], - [ - "▁imagination", - -11.655352592468262 - ], - [ - "▁reward", - -11.655389785766602 - ], - [ - "▁Korean", - -11.655441284179688 - ], - [ - "▁branches", - -11.655501365661621 - ], - [ - "▁GPS", - -11.655625343322754 - ], - [ - "glo", - -11.655633926391602 - ], - [ - "▁condo", - -11.655705451965332 - ], - [ - "▁Investment", - -11.655765533447266 - ], - [ - "▁involvement", - -11.655813217163086 - ], - [ - "▁trap", - -11.655829429626465 - ], - [ - "▁schön", - -11.655872344970703 - ], - [ - "▁ofera", - -11.655933380126953 - ], - [ - "▁unterschiedlich", - -11.65596866607666 - ], - [ - "Net", - -11.655987739562988 - ], - [ - "▁predict", - -11.656113624572754 - ], - [ - "identifying", - -11.656309127807617 - ], - [ - "▁noir", - -11.6566162109375 - ], - [ - "kos", - -11.656816482543945 - ], - [ - "poz", - -11.656816482543945 - ], - [ - "▁11,", - -11.65698528289795 - ], - [ - "▁fitted", - -11.657384872436523 - ], - [ - "MU", - -11.657469749450684 - ], - [ - "TT", - -11.657645225524902 - ], - [ - "▁vrea", - -11.657846450805664 - ], - [ - "▁wound", - -11.657864570617676 - ], - [ - "lac", - -11.657971382141113 - ], - [ - "▁purchases", - -11.658409118652344 - ], - [ - "▁Cape", - -11.65843677520752 - ], - [ - "▁Foto", - -11.658537864685059 - ], - [ - "▁acres", - -11.65865707397461 - ], - [ - "▁nec", - -11.658677101135254 - ], - [ - "▁burning", - -11.659050941467285 - ], - [ - "conf", - -11.659457206726074 - ], - [ - "▁browse", - -11.659486770629883 - ], - [ - "ural", - -11.659762382507324 - ], - [ - "▁Ah", - -11.659841537475586 - ], - [ - "▁stellt", - -11.65992259979248 - ], - [ - "▁ratings", - -11.660012245178223 - ], - [ - "▁Bowl", - -11.660027503967285 - ], - [ - "▁grav", - -11.660289764404297 - ], - [ - "titi", - -11.66048526763916 - ], - [ - "▁prêt", - -11.66075325012207 - ], - [ - "▁fallen", - -11.660818099975586 - ], - [ - "▁nombreuses", - -11.660940170288086 - ], - [ - "train", - -11.660953521728516 - ], - [ - "ène", - -11.661009788513184 - ], - [ - "Aceasta", - -11.661091804504395 - ], - [ - "▁drill", - -11.661421775817871 - ], - [ - "▁Exam", - -11.661477088928223 - ], - [ - "▁Furniture", - -11.661651611328125 - ], - [ - "eanu", - -11.661919593811035 - ], - [ - "étant", - -11.66230297088623 - ], - [ - "sville", - -11.662391662597656 - ], - [ - "▁swim", - -11.662796020507812 - ], - [ - "▁routes", - -11.662826538085938 - ], - [ - "INE", - -11.662860870361328 - ], - [ - "▁Por", - -11.662976264953613 - ], - [ - "ither", - -11.663168907165527 - ], - [ - "▁optim", - -11.663180351257324 - ], - [ - "▁lua", - -11.66331958770752 - ], - [ - "▁myth", - -11.663491249084473 - ], - [ - "▁Bett", - -11.6635103225708 - ], - [ - "chim", - -11.66355037689209 - ], - [ - "▁cyber", - -11.663553237915039 - ], - [ - "▁engineer", - -11.663825035095215 - ], - [ - "▁exploration", - -11.663918495178223 - ], - [ - "arranged", - -11.663973808288574 - ], - [ - "▁aged", - -11.663993835449219 - ], - [ - "▁beau", - -11.664024353027344 - ], - [ - "OUT", - -11.66402530670166 - ], - [ - "▁Minnesota", - -11.664031982421875 - ], - [ - "tress", - -11.664407730102539 - ], - [ - "▁Commercial", - -11.664509773254395 - ], - [ - "▁inspiring", - -11.66462516784668 - ], - [ - "▁Mare", - -11.664725303649902 - ], - [ - "apa", - -11.665140151977539 - ], - [ - "▁ignore", - -11.6651611328125 - ], - [ - "▁gros", - -11.665186882019043 - ], - [ - "▁measurement", - -11.66531753540039 - ], - [ - "ager", - -11.665395736694336 - ], - [ - "intele", - -11.665966987609863 - ], - [ - "▁suspension", - -11.666180610656738 - ], - [ - "▁cultures", - -11.666211128234863 - ], - [ - "▁Wow", - -11.666231155395508 - ], - [ - "▁pushing", - -11.666363716125488 - ], - [ - "▁bands", - -11.666438102722168 - ], - [ - "nage", - -11.666450500488281 - ], - [ - "▁Math", - -11.666515350341797 - ], - [ - "comb", - -11.66658878326416 - ], - [ - "▁créer", - -11.66658878326416 - ], - [ - "▁Lewis", - -11.666685104370117 - ], - [ - "▁VI", - -11.66678524017334 - ], - [ - "emploi", - -11.666791915893555 - ], - [ - "▁elections", - -11.666890144348145 - ], - [ - "▁logic", - -11.666982650756836 - ], - [ - "▁unlike", - -11.667122840881348 - ], - [ - "▁Matthew", - -11.66743278503418 - ], - [ - "▁pă", - -11.667486190795898 - ], - [ - "oxy", - -11.667620658874512 - ], - [ - "équipe", - -11.667717933654785 - ], - [ - "▁worden", - -11.668088912963867 - ], - [ - "dev", - -11.668258666992188 - ], - [ - "▁Massachusetts", - -11.668691635131836 - ], - [ - "▁Return", - -11.668695449829102 - ], - [ - "▁Friends", - -11.66891098022461 - ], - [ - "▁movements", - -11.66894245147705 - ], - [ - "chie", - -11.668964385986328 - ], - [ - "rak", - -11.669017791748047 - ], - [ - "▁Fit", - -11.66904354095459 - ], - [ - "▁copil", - -11.669113159179688 - ], - [ - "iunii", - -11.669188499450684 - ], - [ - "▁intensive", - -11.669234275817871 - ], - [ - "▁rug", - -11.669452667236328 - ], - [ - "lichkeit", - -11.669686317443848 - ], - [ - "kov", - -11.669724464416504 - ], - [ - "▁pense", - -11.66978645324707 - ], - [ - "pop", - -11.66978931427002 - ], - [ - "▁closet", - -11.669865608215332 - ], - [ - "▁prevention", - -11.669920921325684 - ], - [ - "▁Deb", - -11.670256614685059 - ], - [ - "▁devant", - -11.670430183410645 - ], - [ - "▁construit", - -11.670440673828125 - ], - [ - "▁breaks", - -11.67082405090332 - ], - [ - "otic", - -11.670886993408203 - ], - [ - "▁dig", - -11.67088794708252 - ], - [ - "▁près", - -11.670930862426758 - ], - [ - "chte", - -11.671029090881348 - ], - [ - "▁Chat", - -11.671029090881348 - ], - [ - "wel", - -11.671219825744629 - ], - [ - "▁edges", - -11.671272277832031 - ], - [ - "▁keen", - -11.671419143676758 - ], - [ - "▁infant", - -11.671716690063477 - ], - [ - "▁Hills", - -11.6719388961792 - ], - [ - "▁grounds", - -11.671969413757324 - ], - [ - "▁hab", - -11.672039031982422 - ], - [ - "▁Mun", - -11.67215347290039 - ], - [ - "▁references", - -11.672215461730957 - ], - [ - "▁hearts", - -11.672446250915527 - ], - [ - "exprim", - -11.672487258911133 - ], - [ - "▁tratament", - -11.672553062438965 - ], - [ - "LD", - -11.67258358001709 - ], - [ - "ssel", - -11.67275333404541 - ], - [ - "cover", - -11.672782897949219 - ], - [ - "bridge", - -11.672837257385254 - ], - [ - "▁Wein", - -11.672924995422363 - ], - [ - "▁voiture", - -11.673035621643066 - ], - [ - "▁Gemeinde", - -11.67313289642334 - ], - [ - "AI", - -11.673169136047363 - ], - [ - "▁renovation", - -11.673264503479004 - ], - [ - "bid", - -11.673285484313965 - ], - [ - "▁Reading", - -11.673481941223145 - ], - [ - "▁Gor", - -11.673490524291992 - ], - [ - "fur", - -11.673527717590332 - ], - [ - "▁Yoga", - -11.673544883728027 - ], - [ - "▁exclusively", - -11.673630714416504 - ], - [ - "▁emissions", - -11.67385482788086 - ], - [ - "ète", - -11.673905372619629 - ], - [ - "▁glasses", - -11.674055099487305 - ], - [ - "▁organizat", - -11.674135208129883 - ], - [ - "▁washing", - -11.67415714263916 - ], - [ - "▁Audi", - -11.674173355102539 - ], - [ - "▁Labor", - -11.674331665039062 - ], - [ - "▁legacy", - -11.674381256103516 - ], - [ - "▁abstract", - -11.674519538879395 - ], - [ - "▁knowledgeable", - -11.674601554870605 - ], - [ - "▁Glo", - -11.674795150756836 - ], - [ - "▁pregnant", - -11.67481803894043 - ], - [ - "liter", - -11.674851417541504 - ], - [ - "▁paintings", - -11.67522144317627 - ], - [ - "▁tête", - -11.675244331359863 - ], - [ - "voy", - -11.675626754760742 - ], - [ - "▁Jacob", - -11.675667762756348 - ], - [ - "▁dressing", - -11.675679206848145 - ], - [ - "▁provisions", - -11.675768852233887 - ], - [ - "bahn", - -11.675870895385742 - ], - [ - "▁depict", - -11.675875663757324 - ], - [ - "AW", - -11.676068305969238 - ], - [ - "▁bleibt", - -11.676163673400879 - ], - [ - "AND", - -11.676292419433594 - ], - [ - "▁fünf", - -11.676386833190918 - ], - [ - "▁hosts", - -11.676426887512207 - ], - [ - "vas", - -11.676708221435547 - ], - [ - "DO", - -11.67674732208252 - ], - [ - "▁max", - -11.676753997802734 - ], - [ - "▁contributed", - -11.676774978637695 - ], - [ - "roz", - -11.676796913146973 - ], - [ - "▁deschis", - -11.676800727844238 - ], - [ - "itaire", - -11.676809310913086 - ], - [ - "tube", - -11.676959991455078 - ], - [ - "▁Beck", - -11.676959991455078 - ], - [ - "▁curious", - -11.677130699157715 - ], - [ - "▁waves", - -11.677178382873535 - ], - [ - "▁regret", - -11.677248001098633 - ], - [ - "FO", - -11.677326202392578 - ], - [ - "droit", - -11.67734146118164 - ], - [ - "rö", - -11.677565574645996 - ], - [ - "▁Panel", - -11.677624702453613 - ], - [ - "▁pile", - -11.677660942077637 - ], - [ - "▁installing", - -11.677674293518066 - ], - [ - "▁Intr", - -11.677797317504883 - ], - [ - "nung", - -11.677823066711426 - ], - [ - "▁Outdoor", - -11.677855491638184 - ], - [ - "▁generator", - -11.67786693572998 - ], - [ - "▁zahlreiche", - -11.677868843078613 - ], - [ - "▁Third", - -11.67813491821289 - ], - [ - "frac", - -11.678180694580078 - ], - [ - "ovi", - -11.678236961364746 - ], - [ - "▁Casa", - -11.678374290466309 - ], - [ - "▁stomach", - -11.678393363952637 - ], - [ - "▁Lincoln", - -11.67844009399414 - ], - [ - "▁Electronic", - -11.678584098815918 - ], - [ - "coding", - -11.67895221710205 - ], - [ - "2017", - -11.67900276184082 - ], - [ - "▁friendship", - -11.679238319396973 - ], - [ - "ried", - -11.679250717163086 - ], - [ - "но", - -11.679265022277832 - ], - [ - "▁tail", - -11.679267883300781 - ], - [ - "▁petits", - -11.679308891296387 - ], - [ - "▁réseau", - -11.679696083068848 - ], - [ - "▁churches", - -11.679999351501465 - ], - [ - "▁marketplace", - -11.680062294006348 - ], - [ - "▁Pool", - -11.680318832397461 - ], - [ - "▁popularity", - -11.680455207824707 - ], - [ - "▁sprijin", - -11.680496215820312 - ], - [ - "▁Od", - -11.680527687072754 - ], - [ - "▁Transfer", - -11.680562973022461 - ], - [ - "▁fake", - -11.680791854858398 - ], - [ - "▁9,", - -11.681007385253906 - ], - [ - "▁weit", - -11.681264877319336 - ], - [ - "▁relaxed", - -11.681415557861328 - ], - [ - "pig", - -11.68161678314209 - ], - [ - "▁Lauren", - -11.68166732788086 - ], - [ - "gesetzt", - -11.681669235229492 - ], - [ - "▁Clar", - -11.681694984436035 - ], - [ - "▁unlikely", - -11.681731224060059 - ], - [ - "color", - -11.681832313537598 - ], - [ - "▁spouse", - -11.681843757629395 - ], - [ - "▁facile", - -11.681859970092773 - ], - [ - "▁Speed", - -11.681872367858887 - ], - [ - "KE", - -11.682230949401855 - ], - [ - "▁PO", - -11.68231201171875 - ], - [ - "▁Channel", - -11.682321548461914 - ], - [ - "argent", - -11.682356834411621 - ], - [ - "▁Making", - -11.682430267333984 - ], - [ - "▁Coll", - -11.682585716247559 - ], - [ - "cci", - -11.682721138000488 - ], - [ - "corresponding", - -11.68300724029541 - ], - [ - "▁heaven", - -11.683160781860352 - ], - [ - "ţă", - -11.68319320678711 - ], - [ - "▁darüber", - -11.683236122131348 - ], - [ - "acted", - -11.683420181274414 - ], - [ - "only", - -11.683460235595703 - ], - [ - "▁slight", - -11.683465003967285 - ], - [ - "lian", - -11.68348503112793 - ], - [ - "flă", - -11.683510780334473 - ], - [ - "▁vulnerable", - -11.683530807495117 - ], - [ - "▁creator", - -11.68356704711914 - ], - [ - "▁protecting", - -11.68360424041748 - ], - [ - "writing", - -11.68360710144043 - ], - [ - "▁Ter", - -11.68387222290039 - ], - [ - "▁barb", - -11.683987617492676 - ], - [ - "▁dată", - -11.683995246887207 - ], - [ - "▁Screen", - -11.684052467346191 - ], - [ - "▁BBC", - -11.684082984924316 - ], - [ - "Col", - -11.684206008911133 - ], - [ - "fung", - -11.684453964233398 - ], - [ - "▁dreptul", - -11.684494972229004 - ], - [ - "derived", - -11.684538841247559 - ], - [ - "▁designated", - -11.684553146362305 - ], - [ - "▁interactions", - -11.684617042541504 - ], - [ - "SG", - -11.684621810913086 - ], - [ - "▁häufig", - -11.684625625610352 - ], - [ - "▁Mega", - -11.684638023376465 - ], - [ - "▁jazz", - -11.684660911560059 - ], - [ - "lbs", - -11.684797286987305 - ], - [ - "▁Manual", - -11.68484115600586 - ], - [ - "pushed", - -11.685017585754395 - ], - [ - "▁analytics", - -11.685234069824219 - ], - [ - "▁lawsuit", - -11.68533706665039 - ], - [ - "▁gray", - -11.685364723205566 - ], - [ - "shirts", - -11.685401916503906 - ], - [ - "▁hill", - -11.685508728027344 - ], - [ - "▁1991", - -11.68550968170166 - ], - [ - "▁obligations", - -11.685568809509277 - ], - [ - "▁Dubai", - -11.68580436706543 - ], - [ - "()", - -11.685808181762695 - ], - [ - "▁acceptable", - -11.685810089111328 - ], - [ - "therapist", - -11.685877799987793 - ], - [ - "inger", - -11.6860990524292 - ], - [ - "▁territory", - -11.686208724975586 - ], - [ - "▁sang", - -11.6862211227417 - ], - [ - "ät", - -11.686224937438965 - ], - [ - "▁Zukunft", - -11.686238288879395 - ], - [ - "TU", - -11.68657398223877 - ], - [ - "▁horizontal", - -11.68665599822998 - ], - [ - "▁entrepreneurs", - -11.686710357666016 - ], - [ - "▁Eltern", - -11.687017440795898 - ], - [ - "▁presentations", - -11.687129974365234 - ], - [ - "▁confirmation", - -11.687173843383789 - ], - [ - "▁technological", - -11.687432289123535 - ], - [ - "▁1989", - -11.687530517578125 - ], - [ - "EF", - -11.687640190124512 - ], - [ - "ponent", - -11.687663078308105 - ], - [ - "NET", - -11.687699317932129 - ], - [ - "750", - -11.687772750854492 - ], - [ - "▁desert", - -11.687891960144043 - ], - [ - "▁contribu", - -11.687932968139648 - ], - [ - "▁Gun", - -11.687944412231445 - ], - [ - "▁Juli", - -11.688091278076172 - ], - [ - "ERS", - -11.688261985778809 - ], - [ - "▁inceput", - -11.688261985778809 - ], - [ - "▁answered", - -11.688369750976562 - ], - [ - "▁basement", - -11.688410758972168 - ], - [ - "film", - -11.688434600830078 - ], - [ - "▁taille", - -11.688593864440918 - ], - [ - "▁survival", - -11.688655853271484 - ], - [ - "ihnen", - -11.68869400024414 - ], - [ - "▁Bird", - -11.688840866088867 - ], - [ - "speed", - -11.689336776733398 - ], - [ - "▁journalist", - -11.68941879272461 - ], - [ - "▁Indonesia", - -11.689626693725586 - ], - [ - "▁15.", - -11.689973831176758 - ], - [ - "▁19.", - -11.690025329589844 - ], - [ - "étaient", - -11.690114974975586 - ], - [ - "▁tennis", - -11.69024658203125 - ], - [ - "▁aproximativ", - -11.69039249420166 - ], - [ - "▁Hans", - -11.690650939941406 - ], - [ - "▁Remove", - -11.69067096710205 - ], - [ - "▁cats", - -11.691022872924805 - ], - [ - "▁calories", - -11.691052436828613 - ], - [ - "▁limitations", - -11.69119644165039 - ], - [ - "▁subscribe", - -11.691198348999023 - ], - [ - "▁Dem", - -11.691339492797852 - ], - [ - "lust", - -11.691370010375977 - ], - [ - "▁adresa", - -11.691394805908203 - ], - [ - "▁sais", - -11.69140911102295 - ], - [ - "...\"", - -11.691473960876465 - ], - [ - "▁Luft", - -11.691485404968262 - ], - [ - "DL", - -11.691597938537598 - ], - [ - "▁estimates", - -11.691600799560547 - ], - [ - "▁protocol", - -11.691603660583496 - ], - [ - "▁Namen", - -11.691776275634766 - ], - [ - "▁grands", - -11.691901206970215 - ], - [ - "▁voter", - -11.691970825195312 - ], - [ - "▁vacuum", - -11.692075729370117 - ], - [ - "▁versch", - -11.692103385925293 - ], - [ - "▁Democratic", - -11.692107200622559 - ], - [ - "▁Books", - -11.692170143127441 - ], - [ - "▁frames", - -11.692727088928223 - ], - [ - "▁Bee", - -11.692864418029785 - ], - [ - "▁helfen", - -11.692934036254883 - ], - [ - "▁dive", - -11.692963600158691 - ], - [ - "▁physician", - -11.693037033081055 - ], - [ - "▁powered", - -11.693131446838379 - ], - [ - "▁zones", - -11.693337440490723 - ], - [ - "▁regime", - -11.69345474243164 - ], - [ - "check", - -11.693578720092773 - ], - [ - "11.", - -11.693793296813965 - ], - [ - "▁plaisir", - -11.693793296813965 - ], - [ - "▁physically", - -11.693811416625977 - ], - [ - "▁Pul", - -11.694245338439941 - ], - [ - "▁jardin", - -11.694294929504395 - ], - [ - "▁Nur", - -11.694417953491211 - ], - [ - "WC", - -11.694425582885742 - ], - [ - "▁Lock", - -11.694506645202637 - ], - [ - "▁économique", - -11.694530487060547 - ], - [ - "user", - -11.694536209106445 - ], - [ - "▁commit", - -11.694731712341309 - ], - [ - "▁oldest", - -11.694764137268066 - ], - [ - "▁fulfill", - -11.694780349731445 - ], - [ - "▁nervous", - -11.69482135772705 - ], - [ - "▁SH", - -11.695014953613281 - ], - [ - "SK", - -11.695150375366211 - ], - [ - "▁plein", - -11.695291519165039 - ], - [ - "show", - -11.695354461669922 - ], - [ - "▁disability", - -11.695356369018555 - ], - [ - "papier", - -11.69544506072998 - ], - [ - "▁Corp", - -11.695611000061035 - ], - [ - "ători", - -11.695676803588867 - ], - [ - "nţă", - -11.695813179016113 - ], - [ - "▁overseas", - -11.696009635925293 - ], - [ - "▁struck", - -11.69603157043457 - ], - [ - "astic", - -11.69607162475586 - ], - [ - "▁advised", - -11.696088790893555 - ], - [ - "BE", - -11.696161270141602 - ], - [ - "▁UV", - -11.696218490600586 - ], - [ - "patient", - -11.69626235961914 - ], - [ - "▁texte", - -11.696344375610352 - ], - [ - "▁timely", - -11.696444511413574 - ], - [ - "used", - -11.696471214294434 - ], - [ - "▁occasionally", - -11.696524620056152 - ], - [ - "▁entries", - -11.696550369262695 - ], - [ - "underlying", - -11.6967191696167 - ], - [ - "01.", - -11.696748733520508 - ], - [ - "▁automated", - -11.696791648864746 - ], - [ - "yes", - -11.696828842163086 - ], - [ - "▁Staff", - -11.697057723999023 - ], - [ - "▁Einzel", - -11.697546005249023 - ], - [ - "quit", - -11.697687149047852 - ], - [ - "▁Cela", - -11.697951316833496 - ], - [ - "▁snap", - -11.698298454284668 - ], - [ - "▁followers", - -11.698330879211426 - ], - [ - "CN", - -11.698709487915039 - ], - [ - "▁Cooper", - -11.698892593383789 - ], - [ - "ô", - -11.698921203613281 - ], - [ - "▁memorable", - -11.698965072631836 - ], - [ - "▁jur", - -11.698996543884277 - ], - [ - "▁ajutorul", - -11.69905948638916 - ], - [ - "▁Enter", - -11.6991548538208 - ], - [ - "Often", - -11.699294090270996 - ], - [ - "▁dintr", - -11.699341773986816 - ], - [ - "-30", - -11.699419975280762 - ], - [ - "ESS", - -11.699454307556152 - ], - [ - "▁weird", - -11.699462890625 - ], - [ - "▁Animal", - -11.699706077575684 - ], - [ - "▁complement", - -11.699719429016113 - ], - [ - "▁Bot", - -11.699756622314453 - ], - [ - "▁darf", - -11.699764251708984 - ], - [ - "yed", - -11.699808120727539 - ], - [ - "▁Mul", - -11.699872016906738 - ], - [ - "lick", - -11.700080871582031 - ], - [ - "▁Cambridge", - -11.700216293334961 - ], - [ - "adore", - -11.700407981872559 - ], - [ - "▁Dutch", - -11.700420379638672 - ], - [ - "▁Castle", - -11.700431823730469 - ], - [ - "igi", - -11.700563430786133 - ], - [ - "▁enemy", - -11.70071029663086 - ], - [ - "accompanied", - -11.700725555419922 - ], - [ - "▁teren", - -11.701102256774902 - ], - [ - "▁ET", - -11.701498985290527 - ], - [ - "ffle", - -11.701557159423828 - ], - [ - "-15", - -11.701651573181152 - ], - [ - "▁Geo", - -11.701680183410645 - ], - [ - "▁attractions", - -11.701730728149414 - ], - [ - "iker", - -11.70185661315918 - ], - [ - "▁bă", - -11.701990127563477 - ], - [ - "▁heal", - -11.701995849609375 - ], - [ - "weisen", - -11.702144622802734 - ], - [ - "▁spectrum", - -11.702186584472656 - ], - [ - "meld", - -11.702394485473633 - ], - [ - "▁eveniment", - -11.70247745513916 - ], - [ - "arra", - -11.702478408813477 - ], - [ - "rete", - -11.70250129699707 - ], - [ - "▁Had", - -11.70250415802002 - ], - [ - "looking", - -11.702692031860352 - ], - [ - "isierung", - -11.702805519104004 - ], - [ - "▁moyen", - -11.703129768371582 - ], - [ - "▁gesamte", - -11.703202247619629 - ], - [ - "▁destroy", - -11.703407287597656 - ], - [ - "125", - -11.703518867492676 - ], - [ - "▁suivant", - -11.703913688659668 - ], - [ - "▁declared", - -11.703925132751465 - ], - [ - "▁Urban", - -11.704131126403809 - ], - [ - "▁16.", - -11.704168319702148 - ], - [ - "▁Beg", - -11.704168319702148 - ], - [ - "▁canal", - -11.704225540161133 - ], - [ - "▁Pres", - -11.70431137084961 - ], - [ - "▁geeignet", - -11.704339981079102 - ], - [ - "▁strat", - -11.704365730285645 - ], - [ - "UB", - -11.704395294189453 - ], - [ - "▁Alexander", - -11.704424858093262 - ], - [ - "cycle", - -11.704666137695312 - ], - [ - "▁Var", - -11.704802513122559 - ], - [ - "▁domin", - -11.704805374145508 - ], - [ - "▁lasting", - -11.704939842224121 - ], - [ - "terio", - -11.705262184143066 - ], - [ - "▁Battle", - -11.705339431762695 - ], - [ - "▁publications", - -11.705647468566895 - ], - [ - "▁implica", - -11.705886840820312 - ], - [ - "▁NA", - -11.705963134765625 - ], - [ - "▁stocks", - -11.706036567687988 - ], - [ - "Plat", - -11.70611572265625 - ], - [ - "▁excitement", - -11.706149101257324 - ], - [ - "▁Muslim", - -11.706524848937988 - ], - [ - "▁Mari", - -11.706530570983887 - ], - [ - "▁Ul", - -11.706647872924805 - ], - [ - "nächst", - -11.706757545471191 - ], - [ - "▁trait", - -11.706833839416504 - ], - [ - "▁(3)", - -11.706852912902832 - ], - [ - "▁Attorney", - -11.706894874572754 - ], - [ - "▁Malaysia", - -11.70689582824707 - ], - [ - "▁slab", - -11.706960678100586 - ], - [ - "▁dam", - -11.707113265991211 - ], - [ - "▁Bir", - -11.707226753234863 - ], - [ - "▁sing", - -11.70738410949707 - ], - [ - "▁Culture", - -11.7073974609375 - ], - [ - "UD", - -11.707417488098145 - ], - [ - "▁Mes", - -11.707443237304688 - ], - [ - "ități", - -11.707615852355957 - ], - [ - "▁possess", - -11.708173751831055 - ], - [ - "enabling", - -11.70820426940918 - ], - [ - "▁settled", - -11.708335876464844 - ], - [ - "▁sagen", - -11.708492279052734 - ], - [ - "▁erfolgt", - -11.708564758300781 - ], - [ - "dog", - -11.708600997924805 - ], - [ - "ndu", - -11.708732604980469 - ], - [ - "ității", - -11.708745002746582 - ], - [ - "▁Islam", - -11.708930015563965 - ], - [ - "▁catalog", - -11.708931922912598 - ], - [ - "▁simt", - -11.709102630615234 - ], - [ - "tische", - -11.709150314331055 - ], - [ - "▁Mach", - -11.709334373474121 - ], - [ - "▁EP", - -11.709359169006348 - ], - [ - "▁Certified", - -11.709386825561523 - ], - [ - "▁Resources", - -11.70945930480957 - ], - [ - "▁Past", - -11.709607124328613 - ], - [ - "▁Termin", - -11.709755897521973 - ], - [ - "▁lightweight", - -11.709755897521973 - ], - [ - "▁championship", - -11.70994758605957 - ], - [ - "gebiet", - -11.710122108459473 - ], - [ - "▁jurisdiction", - -11.710135459899902 - ], - [ - "▁euros", - -11.710169792175293 - ], - [ - "▁Familien", - -11.710554122924805 - ], - [ - "▁GT", - -11.710677146911621 - ], - [ - "▁dvs", - -11.71081256866455 - ], - [ - "▁nouveaux", - -11.710838317871094 - ], - [ - "▁chill", - -11.710916519165039 - ], - [ - "▁ridicat", - -11.710920333862305 - ], - [ - "his", - -11.711079597473145 - ], - [ - "▁Indi", - -11.711159706115723 - ], - [ - "▁arrested", - -11.71116828918457 - ], - [ - "ităţii", - -11.711170196533203 - ], - [ - "onul", - -11.711274147033691 - ], - [ - "appar", - -11.711296081542969 - ], - [ - "▁Bachelor", - -11.711297988891602 - ], - [ - "▁erfolgreich", - -11.711426734924316 - ], - [ - "▁versatile", - -11.71163558959961 - ], - [ - "▁nécessaire", - -11.711761474609375 - ], - [ - "▁facial", - -11.712160110473633 - ], - [ - "▁Bull", - -11.712226867675781 - ], - [ - "Comm", - -11.712237358093262 - ], - [ - "atte", - -11.712307929992676 - ], - [ - "hom", - -11.7123384475708 - ], - [ - "start", - -11.712576866149902 - ], - [ - "▁roughly", - -11.712936401367188 - ], - [ - "▁bay", - -11.712984085083008 - ], - [ - "▁american", - -11.712986946105957 - ], - [ - "▁Wisconsin", - -11.713135719299316 - ], - [ - "▁Clinton", - -11.713142395019531 - ], - [ - "appareil", - -11.713153839111328 - ], - [ - "▁liberal", - -11.713455200195312 - ], - [ - "▁dau", - -11.713519096374512 - ], - [ - "ech", - -11.713521957397461 - ], - [ - "2014", - -11.713624000549316 - ], - [ - "▁lip", - -11.713645935058594 - ], - [ - "▁maintenant", - -11.713762283325195 - ], - [ - "▁Sil", - -11.713805198669434 - ], - [ - "rben", - -11.713891983032227 - ], - [ - "▁contents", - -11.713980674743652 - ], - [ - "▁magnetic", - -11.714111328125 - ], - [ - "▁terre", - -11.714151382446289 - ], - [ - "▁Rights", - -11.714475631713867 - ], - [ - "lose", - -11.714570045471191 - ], - [ - "▁crown", - -11.71468448638916 - ], - [ - "▁oils", - -11.7147216796875 - ], - [ - "▁entertaining", - -11.714841842651367 - ], - [ - "▁Option", - -11.714848518371582 - ], - [ - "▁Previous", - -11.714916229248047 - ], - [ - "▁vrai", - -11.714930534362793 - ], - [ - "▁Auswahl", - -11.715056419372559 - ], - [ - "▁horses", - -11.715106010437012 - ], - [ - "▁Author", - -11.71533489227295 - ], - [ - "▁Writing", - -11.715461730957031 - ], - [ - "▁travelling", - -11.715522766113281 - ], - [ - "▁350", - -11.715567588806152 - ], - [ - "daten", - -11.71560287475586 - ], - [ - "zan", - -11.715765953063965 - ], - [ - "▁sweat", - -11.715924263000488 - ], - [ - "▁Junior", - -11.715970993041992 - ], - [ - "markt", - -11.71609878540039 - ], - [ - "after", - -11.716105461120605 - ], - [ - "▁admitted", - -11.716262817382812 - ], - [ - "▁1950", - -11.716347694396973 - ], - [ - "▁Sche", - -11.71648120880127 - ], - [ - "▁dorit", - -11.716818809509277 - ], - [ - "▁transferred", - -11.716958045959473 - ], - [ - "utilise", - -11.717194557189941 - ], - [ - "sitz", - -11.717301368713379 - ], - [ - "gio", - -11.717320442199707 - ], - [ - "▁bisher", - -11.717473983764648 - ], - [ - "RD", - -11.717491149902344 - ], - [ - "▁Wales", - -11.717747688293457 - ], - [ - "▁smoking", - -11.717904090881348 - ], - [ - "dire", - -11.717939376831055 - ], - [ - "▁seating", - -11.717979431152344 - ], - [ - "▁constat", - -11.718056678771973 - ], - [ - "▁Hub", - -11.718324661254883 - ], - [ - "▁sieht", - -11.718345642089844 - ], - [ - "▁prospect", - -11.718378067016602 - ], - [ - "▁RO", - -11.718413352966309 - ], - [ - "▁Wars", - -11.718423843383789 - ], - [ - "eek", - -11.718496322631836 - ], - [ - "▁Bring", - -11.718646049499512 - ], - [ - "▁bleiben", - -11.718696594238281 - ], - [ - "arri", - -11.718826293945312 - ], - [ - "inal", - -11.718904495239258 - ], - [ - "▁Maryland", - -11.718932151794434 - ], - [ - "▁Process", - -11.719145774841309 - ], - [ - "They", - -11.719154357910156 - ], - [ - "▁Oxford", - -11.719176292419434 - ], - [ - "▁neat", - -11.719330787658691 - ], - [ - "▁cinema", - -11.719597816467285 - ], - [ - "▁Ist", - -11.719620704650879 - ], - [ - "▁vegan", - -11.719682693481445 - ], - [ - "wall", - -11.719708442687988 - ], - [ - "▁motive", - -11.72010612487793 - ], - [ - "▁mature", - -11.720544815063477 - ], - [ - "▁Dragon", - -11.720653533935547 - ], - [ - "▁google", - -11.720677375793457 - ], - [ - "blick", - -11.72110652923584 - ], - [ - "▁Cod", - -11.721220970153809 - ], - [ - "▁suffi", - -11.721319198608398 - ], - [ - "▁terrorist", - -11.721478462219238 - ], - [ - "Posted", - -11.721484184265137 - ], - [ - "▁Schi", - -11.72157096862793 - ], - [ - "▁Marc", - -11.721597671508789 - ], - [ - "▁operates", - -11.721661567687988 - ], - [ - "gress", - -11.721805572509766 - ], - [ - "has", - -11.721899032592773 - ], - [ - "sole", - -11.722108840942383 - ], - [ - "▁Buck", - -11.722122192382812 - ], - [ - "impl", - -11.722160339355469 - ], - [ - "▁Ron", - -11.722172737121582 - ], - [ - "▁handled", - -11.722346305847168 - ], - [ - "▁Apr", - -11.722347259521484 - ], - [ - "▁Storage", - -11.722467422485352 - ], - [ - "▁temp", - -11.722512245178223 - ], - [ - "▁differently", - -11.722614288330078 - ], - [ - "▁wherever", - -11.722670555114746 - ], - [ - "matched", - -11.722695350646973 - ], - [ - "rios", - -11.72276496887207 - ], - [ - "▁surprising", - -11.722846031188965 - ], - [ - "teilen", - -11.722867965698242 - ], - [ - "▁difficulties", - -11.72294807434082 - ], - [ - "tab", - -11.723064422607422 - ], - [ - "▁Leader", - -11.723128318786621 - ], - [ - "implementing", - -11.723372459411621 - ], - [ - "▁workforce", - -11.723384857177734 - ], - [ - "▁bereit", - -11.723503112792969 - ], - [ - "vig", - -11.72352123260498 - ], - [ - "▁LOVE", - -11.723580360412598 - ], - [ - "▁instances", - -11.723954200744629 - ], - [ - "▁frumos", - -11.723960876464844 - ], - [ - "▁Java", - -11.723974227905273 - ], - [ - "▁arrest", - -11.723977088928223 - ], - [ - "▁apparent", - -11.724152565002441 - ], - [ - "▁hence", - -11.724200248718262 - ], - [ - "▁entwickelt", - -11.72437572479248 - ], - [ - "▁Fra", - -11.724471092224121 - ], - [ - "▁prend", - -11.724486351013184 - ], - [ - "ließ", - -11.724522590637207 - ], - [ - "▁drawer", - -11.724671363830566 - ], - [ - "ARD", - -11.724926948547363 - ], - [ - "▁caring", - -11.72499942779541 - ], - [ - "▁wollte", - -11.725024223327637 - ], - [ - "▁vielleicht", - -11.72511100769043 - ], - [ - "▁iconic", - -11.725324630737305 - ], - [ - "äch", - -11.72552490234375 - ], - [ - "abel", - -11.725639343261719 - ], - [ - "▁génér", - -11.72570514678955 - ], - [ - "ault", - -11.725727081298828 - ], - [ - "▁alternatives", - -11.725909233093262 - ], - [ - "think", - -11.726025581359863 - ], - [ - "ро", - -11.726055145263672 - ], - [ - "whereas", - -11.726058006286621 - ], - [ - "erei", - -11.726366996765137 - ], - [ - "▁Eagle", - -11.726766586303711 - ], - [ - "situé", - -11.72704792022705 - ], - [ - "▁laboratory", - -11.727157592773438 - ], - [ - "▁Nutzung", - -11.727256774902344 - ], - [ - "▁Bathroom", - -11.72728157043457 - ], - [ - "▁loaded", - -11.727293968200684 - ], - [ - "niste", - -11.727408409118652 - ], - [ - "som", - -11.727429389953613 - ], - [ - "▁aucun", - -11.727666854858398 - ], - [ - "gebracht", - -11.727676391601562 - ], - [ - "▁tomb", - -11.727771759033203 - ], - [ - "▁Ty", - -11.727785110473633 - ], - [ - "▁afaceri", - -11.727971076965332 - ], - [ - "tex", - -11.72803783416748 - ], - [ - "ality", - -11.728147506713867 - ], - [ - "▁identification", - -11.728150367736816 - ], - [ - "▁cultiv", - -11.728255271911621 - ], - [ - "Not", - -11.728326797485352 - ], - [ - "▁acestor", - -11.72846508026123 - ], - [ - "▁PhD", - -11.728466033935547 - ], - [ - "nell", - -11.728470802307129 - ], - [ - "▁dial", - -11.728594779968262 - ], - [ - "chro", - -11.728673934936523 - ], - [ - "▁specifications", - -11.728682518005371 - ], - [ - "anii", - -11.72877025604248 - ], - [ - "▁cloth", - -11.728836059570312 - ], - [ - "▁highway", - -11.728914260864258 - ], - [ - "▁Vitamin", - -11.729118347167969 - ], - [ - "▁indication", - -11.729349136352539 - ], - [ - "80%", - -11.72959041595459 - ], - [ - "▁Lion", - -11.729681015014648 - ], - [ - "▁10,", - -11.729693412780762 - ], - [ - "▁Werk", - -11.72974967956543 - ], - [ - "▁combin", - -11.729803085327148 - ], - [ - "▁releases", - -11.7298583984375 - ], - [ - "LL", - -11.730006217956543 - ], - [ - "ktor", - -11.730186462402344 - ], - [ - "ufgrund", - -11.73018741607666 - ], - [ - "calc", - -11.73034381866455 - ], - [ - "▁accomplished", - -11.730606079101562 - ], - [ - "▁los", - -11.730619430541992 - ], - [ - "▁distant", - -11.730688095092773 - ], - [ - "▁secteur", - -11.73068904876709 - ], - [ - "logue", - -11.730781555175781 - ], - [ - "▁betting", - -11.730792999267578 - ], - [ - "elf", - -11.731180191040039 - ], - [ - "puteti", - -11.73123550415039 - ], - [ - "▁Moment", - -11.731236457824707 - ], - [ - "▁scoring", - -11.731548309326172 - ], - [ - "▁freuen", - -11.731572151184082 - ], - [ - "▁fastest", - -11.731873512268066 - ], - [ - "▁directors", - -11.732080459594727 - ], - [ - "▁fame", - -11.732234954833984 - ], - [ - "▁complaint", - -11.732239723205566 - ], - [ - "▁Ep", - -11.732314109802246 - ], - [ - "▁delicate", - -11.732329368591309 - ], - [ - "annonce", - -11.73240852355957 - ], - [ - "ext", - -11.732454299926758 - ], - [ - "▁quit", - -11.732473373413086 - ], - [ - "▁Cop", - -11.73253345489502 - ], - [ - "prop", - -11.732565879821777 - ], - [ - "365", - -11.732742309570312 - ], - [ - "▁Say", - -11.732879638671875 - ], - [ - "▁internationale", - -11.733064651489258 - ], - [ - "cott", - -11.733213424682617 - ], - [ - "▁Whatever", - -11.733261108398438 - ], - [ - "▁admir", - -11.733261108398438 - ], - [ - "▁bucur", - -11.733549118041992 - ], - [ - "▁entity", - -11.733779907226562 - ], - [ - "▁dancing", - -11.733837127685547 - ], - [ - "▁printre", - -11.733892440795898 - ], - [ - "▁meditation", - -11.734396934509277 - ], - [ - "▁avis", - -11.734416961669922 - ], - [ - "▁1988", - -11.73447036743164 - ], - [ - "10.", - -11.734506607055664 - ], - [ - "▁worker", - -11.734638214111328 - ], - [ - "▁$100", - -11.734784126281738 - ], - [ - "▁contrôle", - -11.7349853515625 - ], - [ - "▁insist", - -11.734997749328613 - ], - [ - "ements", - -11.73505973815918 - ], - [ - "izate", - -11.735163688659668 - ], - [ - "▁tied", - -11.735332489013672 - ], - [ - "▁correspond", - -11.735396385192871 - ], - [ - "▁apartments", - -11.735547065734863 - ], - [ - "▁2009.", - -11.735599517822266 - ], - [ - "▁tiles", - -11.735624313354492 - ], - [ - "▁boots", - -11.735639572143555 - ], - [ - "▁laundry", - -11.735673904418945 - ], - [ - "▁Coffee", - -11.735674858093262 - ], - [ - "▁CV", - -11.735727310180664 - ], - [ - "▁composed", - -11.736035346984863 - ], - [ - "atom", - -11.73622989654541 - ], - [ - "▁shore", - -11.736270904541016 - ], - [ - "▁marijuana", - -11.736312866210938 - ], - [ - "plic", - -11.73648452758789 - ], - [ - "▁Zahl", - -11.736649513244629 - ], - [ - "depth", - -11.73682689666748 - ], - [ - "▁Egypt", - -11.736854553222656 - ], - [ - "▁NFL", - -11.736906051635742 - ], - [ - "▁12,", - -11.736922264099121 - ], - [ - "▁pollution", - -11.736964225769043 - ], - [ - "▁Vergleich", - -11.73704719543457 - ], - [ - "û", - -11.737109184265137 - ], - [ - "▁nurse", - -11.737153053283691 - ], - [ - "▁Susan", - -11.737173080444336 - ], - [ - "▁verify", - -11.737393379211426 - ], - [ - "▁kon", - -11.737504959106445 - ], - [ - "▁ulei", - -11.7376127243042 - ], - [ - "▁Sept", - -11.737699508666992 - ], - [ - "▁Location", - -11.737908363342285 - ], - [ - "▁frozen", - -11.737991333007812 - ], - [ - "good", - -11.73802661895752 - ], - [ - "▁cine", - -11.738066673278809 - ], - [ - "forming", - -11.738181114196777 - ], - [ - "▁Near", - -11.738391876220703 - ], - [ - "▁Tab", - -11.738545417785645 - ], - [ - "▁Alexandr", - -11.738600730895996 - ], - [ - "ст", - -11.73863697052002 - ], - [ - "CK", - -11.738656044006348 - ], - [ - "▁loads", - -11.738948822021484 - ], - [ - "▁disorders", - -11.738957405090332 - ], - [ - "hip", - -11.739596366882324 - ], - [ - "▁blessing", - -11.73987102508545 - ], - [ - "▁vechi", - -11.73997688293457 - ], - [ - "▁Bookmark", - -11.740296363830566 - ], - [ - "SON", - -11.74036979675293 - ], - [ - "books", - -11.740428924560547 - ], - [ - "▁tropical", - -11.740438461303711 - ], - [ - "▁Garten", - -11.740447044372559 - ], - [ - "ôt", - -11.740760803222656 - ], - [ - "tures", - -11.740827560424805 - ], - [ - "▁obligation", - -11.741010665893555 - ], - [ - "▁admin", - -11.741011619567871 - ], - [ - "▁sélection", - -11.741106986999512 - ], - [ - "disp", - -11.741172790527344 - ], - [ - "▁Anyone", - -11.741225242614746 - ], - [ - "keeper", - -11.74138355255127 - ], - [ - "▁konnten", - -11.741521835327148 - ], - [ - "▁existe", - -11.741615295410156 - ], - [ - "▁Rund", - -11.741798400878906 - ], - [ - "▁retailers", - -11.74184799194336 - ], - [ - "folg", - -11.741948127746582 - ], - [ - "▁urmare", - -11.742019653320312 - ], - [ - "▁Liebe", - -11.742321014404297 - ], - [ - "▁actors", - -11.742422103881836 - ], - [ - "▁Druck", - -11.742618560791016 - ], - [ - "lien", - -11.742752075195312 - ], - [ - "sian", - -11.742847442626953 - ], - [ - "▁partid", - -11.74304485321045 - ], - [ - "▁loin", - -11.743114471435547 - ], - [ - "AZ", - -11.743119239807129 - ], - [ - "oasă", - -11.743501663208008 - ], - [ - "▁inclusiv", - -11.743656158447266 - ], - [ - "TD", - -11.743680953979492 - ], - [ - "▁anului", - -11.743766784667969 - ], - [ - "poc", - -11.743844985961914 - ], - [ - "▁musique", - -11.743972778320312 - ], - [ - "▁Hart", - -11.743997573852539 - ], - [ - "Sh", - -11.744283676147461 - ], - [ - "html", - -11.744290351867676 - ], - [ - "▁serial", - -11.744318008422852 - ], - [ - "țele", - -11.744369506835938 - ], - [ - "inning", - -11.744544982910156 - ], - [ - "▁Bureau", - -11.744555473327637 - ], - [ - "▁rush", - -11.744626998901367 - ], - [ - "▁deosebit", - -11.744637489318848 - ], - [ - "▁Wort", - -11.744648933410645 - ], - [ - "▁Thailand", - -11.744688987731934 - ], - [ - "▁Language", - -11.745193481445312 - ], - [ - "▁Governor", - -11.745213508605957 - ], - [ - "▁Later", - -11.74525260925293 - ], - [ - "rilor", - -11.745282173156738 - ], - [ - "▁activités", - -11.745372772216797 - ], - [ - "schaffen", - -11.745598793029785 - ], - [ - "▁harvest", - -11.74567985534668 - ], - [ - "▁municipal", - -11.745783805847168 - ], - [ - "einander", - -11.74600601196289 - ], - [ - "▁fingers", - -11.746383666992188 - ], - [ - "▁sculpture", - -11.74638843536377 - ], - [ - "▁Bien", - -11.746390342712402 - ], - [ - "▁departments", - -11.746562957763672 - ], - [ - "▁période", - -11.746746063232422 - ], - [ - "▁jeune", - -11.746960639953613 - ], - [ - "▁governments", - -11.74710750579834 - ], - [ - "uter", - -11.747179985046387 - ], - [ - "Aceste", - -11.747220039367676 - ], - [ - "▁Deal", - -11.747243881225586 - ], - [ - "▁Equipment", - -11.74726390838623 - ], - [ - "nous", - -11.747300148010254 - ], - [ - "▁gate", - -11.747315406799316 - ], - [ - "▁meta", - -11.747447967529297 - ], - [ - "▁stiu", - -11.747474670410156 - ], - [ - "fold", - -11.747486114501953 - ], - [ - "▁seule", - -11.747523307800293 - ], - [ - "▁varied", - -11.747541427612305 - ], - [ - "hit", - -11.747635841369629 - ], - [ - "▁DIY", - -11.74768352508545 - ], - [ - "▁lemn", - -11.747685432434082 - ], - [ - "OB", - -11.747865676879883 - ], - [ - "▁colorful", - -11.748095512390137 - ], - [ - "▁câ", - -11.74826431274414 - ], - [ - "▁semester", - -11.74830150604248 - ], - [ - "▁dealer", - -11.748575210571289 - ], - [ - "nett", - -11.748788833618164 - ], - [ - "▁shortly", - -11.748932838439941 - ], - [ - "▁Driver", - -11.748983383178711 - ], - [ - "culture", - -11.749052047729492 - ], - [ - "▁permitted", - -11.749072074890137 - ], - [ - "▁sorts", - -11.749432563781738 - ], - [ - "▁crop", - -11.74999713897705 - ], - [ - "▁valoare", - -11.75046157836914 - ], - [ - "▁analog", - -11.750576972961426 - ], - [ - "▁excuse", - -11.750588417053223 - ], - [ - "▁modèle", - -11.750657081604004 - ], - [ - "When", - -11.75068473815918 - ], - [ - "▁march", - -11.750744819641113 - ], - [ - "haz", - -11.750978469848633 - ], - [ - "▁minimize", - -11.750992774963379 - ], - [ - "traction", - -11.751028060913086 - ], - [ - "▁caracter", - -11.752382278442383 - ], - [ - "▁modules", - -11.7523832321167 - ], - [ - "clu", - -11.75244426727295 - ], - [ - "ţional", - -11.752482414245605 - ], - [ - "▁breach", - -11.752562522888184 - ], - [ - "▁priced", - -11.752614974975586 - ], - [ - "▁attorneys", - -11.752644538879395 - ], - [ - "▁implant", - -11.752645492553711 - ], - [ - "▁ANY", - -11.752655029296875 - ], - [ - "dition", - -11.752707481384277 - ], - [ - "▁trials", - -11.752838134765625 - ], - [ - "▁Nas", - -11.75293254852295 - ], - [ - "Pre", - -11.752970695495605 - ], - [ - "lorsque", - -11.752979278564453 - ], - [ - "plin", - -11.753050804138184 - ], - [ - "Er", - -11.753056526184082 - ], - [ - "▁Dom", - -11.753067970275879 - ], - [ - "▁tire", - -11.753190040588379 - ], - [ - "sili", - -11.753233909606934 - ], - [ - "▁coins", - -11.753350257873535 - ], - [ - "▁rend", - -11.753470420837402 - ], - [ - "▁reliability", - -11.753503799438477 - ], - [ - "▁Analysis", - -11.753508567810059 - ], - [ - "▁trails", - -11.753692626953125 - ], - [ - "trägt", - -11.753762245178223 - ], - [ - "▁Kansas", - -11.753908157348633 - ], - [ - "▁responsive", - -11.75390911102295 - ], - [ - "▁disappear", - -11.753988265991211 - ], - [ - "▁stakeholders", - -11.754022598266602 - ], - [ - "▁aplica", - -11.754164695739746 - ], - [ - "▁imi", - -11.754180908203125 - ], - [ - "▁Laura", - -11.754369735717773 - ], - [ - "▁Terms", - -11.75440788269043 - ], - [ - "450", - -11.754460334777832 - ], - [ - "▁voltage", - -11.754483222961426 - ], - [ - "▁Gel", - -11.754544258117676 - ], - [ - "▁qualities", - -11.754549026489258 - ], - [ - "▁qualifi", - -11.754603385925293 - ], - [ - "▁Mé", - -11.754735946655273 - ], - [ - "bereit", - -11.754829406738281 - ], - [ - "gleich", - -11.754875183105469 - ], - [ - "▁voting", - -11.754961013793945 - ], - [ - "▁trademark", - -11.755128860473633 - ], - [ - "▁2.5", - -11.75515079498291 - ], - [ - "ND", - -11.755438804626465 - ], - [ - "▁Kelly", - -11.755470275878906 - ], - [ - "▁weiteren", - -11.755559921264648 - ], - [ - "▁filters", - -11.75562572479248 - ], - [ - "▁coût", - -11.75562858581543 - ], - [ - "jur", - -11.755765914916992 - ], - [ - "acre", - -11.755804061889648 - ], - [ - "▁retired", - -11.756022453308105 - ], - [ - "▁Engine", - -11.756205558776855 - ], - [ - "▁président", - -11.756264686584473 - ], - [ - "ajul", - -11.756307601928711 - ], - [ - "▁GA", - -11.756425857543945 - ], - [ - "rät", - -11.75666332244873 - ], - [ - "▁instructor", - -11.756669998168945 - ], - [ - "▁Allen", - -11.75668716430664 - ], - [ - "▁Delhi", - -11.756771087646484 - ], - [ - "▁cure", - -11.756844520568848 - ], - [ - "seite", - -11.756898880004883 - ], - [ - "coming", - -11.756914138793945 - ], - [ - "▁mixing", - -11.756963729858398 - ], - [ - "▁Kno", - -11.757041931152344 - ], - [ - "▁Sure", - -11.757079124450684 - ], - [ - "▁hired", - -11.757102012634277 - ], - [ - "▁participated", - -11.757196426391602 - ], - [ - "Count", - -11.757320404052734 - ], - [ - "treffen", - -11.757355690002441 - ], - [ - "▁54", - -11.75735855102539 - ], - [ - "▁rings", - -11.75735855102539 - ], - [ - "▁Thor", - -11.757359504699707 - ], - [ - "éro", - -11.75744915008545 - ], - [ - "▁buttons", - -11.757488250732422 - ], - [ - "▁47", - -11.757539749145508 - ], - [ - "▁Tel", - -11.757694244384766 - ], - [ - "▁suport", - -11.757776260375977 - ], - [ - "▁rhythm", - -11.75782585144043 - ], - [ - "▁Theater", - -11.758113861083984 - ], - [ - "▁informatii", - -11.758121490478516 - ], - [ - "hält", - -11.758201599121094 - ], - [ - "▁ouvert", - -11.758238792419434 - ], - [ - "fewer", - -11.75828742980957 - ], - [ - "▁alumni", - -11.758466720581055 - ], - [ - "▁valley", - -11.758508682250977 - ], - [ - "tial", - -11.75860595703125 - ], - [ - "***", - -11.758782386779785 - ], - [ - "kri", - -11.75905704498291 - ], - [ - "▁accidents", - -11.759113311767578 - ], - [ - "▁barrel", - -11.759170532226562 - ], - [ - "mobil", - -11.759310722351074 - ], - [ - "etti", - -11.759437561035156 - ], - [ - "▁immigration", - -11.759515762329102 - ], - [ - "▁poveste", - -11.759528160095215 - ], - [ - "hren", - -11.759669303894043 - ], - [ - "hydr", - -11.759719848632812 - ], - [ - "▁tweet", - -11.759744644165039 - ], - [ - "▁zip", - -11.759872436523438 - ], - [ - "▁Bonus", - -11.760189056396484 - ], - [ - "ordnung", - -11.760287284851074 - ], - [ - "liber", - -11.76046085357666 - ], - [ - "▁Navy", - -11.760591506958008 - ], - [ - "▁agreements", - -11.760612487792969 - ], - [ - "▁detection", - -11.7607421875 - ], - [ - "DF", - -11.760762214660645 - ], - [ - "hur", - -11.760774612426758 - ], - [ - "0.00", - -11.760798454284668 - ], - [ - "▁07", - -11.760866165161133 - ], - [ - "etta", - -11.760884284973145 - ], - [ - "▁13,", - -11.760887145996094 - ], - [ - "rolled", - -11.760970115661621 - ], - [ - "▁injection", - -11.761002540588379 - ], - [ - "mig", - -11.761017799377441 - ], - [ - "wach", - -11.761107444763184 - ], - [ - "▁choisir", - -11.761515617370605 - ], - [ - "▁professionnels", - -11.76159954071045 - ], - [ - "▁Tower", - -11.76169490814209 - ], - [ - "▁neighbor", - -11.76170539855957 - ], - [ - "deutschen", - -11.76187801361084 - ], - [ - "▁luxurious", - -11.76201057434082 - ], - [ - "▁walks", - -11.762033462524414 - ], - [ - "reti", - -11.762046813964844 - ], - [ - "▁Pad", - -11.762085914611816 - ], - [ - "wise", - -11.762297630310059 - ], - [ - "▁exhaust", - -11.762307167053223 - ], - [ - "▁demonstration", - -11.762582778930664 - ], - [ - "▁agricultural", - -11.762667655944824 - ], - [ - "Upon", - -11.762885093688965 - ], - [ - "▁Blu", - -11.76292610168457 - ], - [ - "atorul", - -11.762967109680176 - ], - [ - "amour", - -11.762984275817871 - ], - [ - "issant", - -11.763004302978516 - ], - [ - "▁delighted", - -11.763031959533691 - ], - [ - "rita", - -11.763113021850586 - ], - [ - "requiring", - -11.763195037841797 - ], - [ - "ivity", - -11.763216972351074 - ], - [ - "▁Unser", - -11.763306617736816 - ], - [ - "FP", - -11.763379096984863 - ], - [ - "fait", - -11.763533592224121 - ], - [ - "dite", - -11.763562202453613 - ], - [ - "kul", - -11.763716697692871 - ], - [ - "arth", - -11.76376724243164 - ], - [ - "▁Ker", - -11.763815879821777 - ], - [ - "torilor", - -11.763816833496094 - ], - [ - "stage", - -11.763866424560547 - ], - [ - "▁HTML", - -11.76398754119873 - ], - [ - "▁Wheel", - -11.764005661010742 - ], - [ - "▁quelque", - -11.76414680480957 - ], - [ - "▁Ou", - -11.764196395874023 - ], - [ - "▁considerable", - -11.764277458190918 - ], - [ - "▁Sco", - -11.76458740234375 - ], - [ - "▁donations", - -11.76481819152832 - ], - [ - "dessen", - -11.765002250671387 - ], - [ - "▁pourquoi", - -11.765039443969727 - ], - [ - "▁Bow", - -11.765189170837402 - ], - [ - "▁Dupa", - -11.76522445678711 - ], - [ - "ska", - -11.765707015991211 - ], - [ - "hot", - -11.765732765197754 - ], - [ - "▁drove", - -11.765849113464355 - ], - [ - "▁oppos", - -11.766018867492676 - ], - [ - "▁hiking", - -11.766035079956055 - ], - [ - "▁Boot", - -11.766081809997559 - ], - [ - "One", - -11.766087532043457 - ], - [ - "▁guvern", - -11.766094207763672 - ], - [ - "▁15,", - -11.766400337219238 - ], - [ - "scheid", - -11.766437530517578 - ], - [ - "▁Miet", - -11.766458511352539 - ], - [ - "▁Technical", - -11.766767501831055 - ], - [ - "▁Dal", - -11.7669038772583 - ], - [ - "▁Metro", - -11.766966819763184 - ], - [ - "▁Baker", - -11.767215728759766 - ], - [ - "▁trece", - -11.767252922058105 - ], - [ - "tained", - -11.767302513122559 - ], - [ - "block", - -11.76738452911377 - ], - [ - "▁wander", - -11.767401695251465 - ], - [ - "▁penalty", - -11.76742172241211 - ], - [ - "▁shipped", - -11.767509460449219 - ], - [ - "▁30%", - -11.767518043518066 - ], - [ - "group", - -11.767541885375977 - ], - [ - "▁brothers", - -11.767701148986816 - ], - [ - "▁comanda", - -11.767777442932129 - ], - [ - "▁retreat", - -11.767789840698242 - ], - [ - "▁Movie", - -11.767802238464355 - ], - [ - "PU", - -11.76787281036377 - ], - [ - "▁Jun", - -11.767885208129883 - ], - [ - "▁$6", - -11.767969131469727 - ], - [ - "▁Fal", - -11.768054962158203 - ], - [ - "▁Palestinian", - -11.768075942993164 - ], - [ - "▁soccer", - -11.768217086791992 - ], - [ - "▁Autor", - -11.768254280090332 - ], - [ - "▁chamber", - -11.768266677856445 - ], - [ - "nement", - -11.768463134765625 - ], - [ - "▁offense", - -11.768610954284668 - ], - [ - "▁gig", - -11.768631935119629 - ], - [ - "▁abandon", - -11.768691062927246 - ], - [ - "▁Kraft", - -11.768783569335938 - ], - [ - "▁Medicare", - -11.768784523010254 - ], - [ - "▁soap", - -11.768835067749023 - ], - [ - "▁Fur", - -11.768990516662598 - ], - [ - "▁conditioning", - -11.769103050231934 - ], - [ - "rained", - -11.769132614135742 - ], - [ - "▁puts", - -11.769134521484375 - ], - [ - "▁cod", - -11.76930046081543 - ], - [ - "lassen", - -11.76941967010498 - ], - [ - "FL", - -11.769600868225098 - ], - [ - "▁komplett", - -11.769664764404297 - ], - [ - "▁entscheiden", - -11.769665718078613 - ], - [ - "▁Hour", - -11.769691467285156 - ], - [ - "?!", - -11.770040512084961 - ], - [ - "Stream", - -11.770145416259766 - ], - [ - "▁Grad", - -11.770209312438965 - ], - [ - "▁gently", - -11.770231246948242 - ], - [ - "▁poetry", - -11.770429611206055 - ], - [ - "▁secured", - -11.770438194274902 - ], - [ - "oph", - -11.770466804504395 - ], - [ - "hop", - -11.770561218261719 - ], - [ - "handel", - -11.770634651184082 - ], - [ - "▁besoins", - -11.770658493041992 - ], - [ - "got", - -11.770824432373047 - ], - [ - "▁Chrome", - -11.77088737487793 - ], - [ - "ILL", - -11.770930290222168 - ], - [ - "▁Schritt", - -11.771014213562012 - ], - [ - "▁spell", - -11.771063804626465 - ], - [ - "▁grinding", - -11.771334648132324 - ], - [ - "▁ramp", - -11.77144718170166 - ], - [ - "▁mama", - -11.7716064453125 - ], - [ - "▁bottles", - -11.77180290222168 - ], - [ - "▁canvas", - -11.771906852722168 - ], - [ - "▁ecosystem", - -11.77194595336914 - ], - [ - "aţii", - -11.771967887878418 - ], - [ - "cellular", - -11.772085189819336 - ], - [ - "▁Spin", - -11.772164344787598 - ], - [ - "▁Discover", - -11.772217750549316 - ], - [ - "-17", - -11.772322654724121 - ], - [ - "▁feeding", - -11.77246379852295 - ], - [ - "▁stops", - -11.7725191116333 - ], - [ - "▁haute", - -11.772552490234375 - ], - [ - "▁Entscheidung", - -11.7725830078125 - ], - [ - "▁semble", - -11.772590637207031 - ], - [ - "▁acele", - -11.772857666015625 - ], - [ - "▁Walk", - -11.773154258728027 - ], - [ - "▁joke", - -11.773180961608887 - ], - [ - "▁Fed", - -11.773294448852539 - ], - [ - "climat", - -11.773306846618652 - ], - [ - "▁Lot", - -11.773460388183594 - ], - [ - "runner", - -11.773551940917969 - ], - [ - "▁flip", - -11.773786544799805 - ], - [ - "▁werde", - -11.773818016052246 - ], - [ - "▁Deck", - -11.77417278289795 - ], - [ - "bala", - -11.774296760559082 - ], - [ - "▁sacrifice", - -11.774375915527344 - ], - [ - "cid", - -11.774388313293457 - ], - [ - "him", - -11.774569511413574 - ], - [ - "zahlen", - -11.774587631225586 - ], - [ - "▁heater", - -11.774596214294434 - ], - [ - "formed", - -11.774619102478027 - ], - [ - "plus", - -11.774711608886719 - ], - [ - "▁util", - -11.774742126464844 - ], - [ - "rama", - -11.775019645690918 - ], - [ - "(4)", - -11.7750244140625 - ], - [ - "▁knife", - -11.775111198425293 - ], - [ - "▁traditions", - -11.77520751953125 - ], - [ - "▁dip", - -11.775357246398926 - ], - [ - "kill", - -11.775405883789062 - ], - [ - "▁Rich", - -11.775418281555176 - ], - [ - "▁DI", - -11.775555610656738 - ], - [ - "▁containers", - -11.775677680969238 - ], - [ - "▁locuri", - -11.775728225708008 - ], - [ - "▁continent", - -11.775797843933105 - ], - [ - "teilung", - -11.776005744934082 - ], - [ - "▁vreme", - -11.776028633117676 - ], - [ - "organisation", - -11.776126861572266 - ], - [ - "serie", - -11.776135444641113 - ], - [ - "▁Diamond", - -11.776204109191895 - ], - [ - "magazin", - -11.77627944946289 - ], - [ - "▁poster", - -11.776455879211426 - ], - [ - "▁passenger", - -11.7765474319458 - ], - [ - "▁soldiers", - -11.776552200317383 - ], - [ - "▁urgent", - -11.776616096496582 - ], - [ - "▁Lip", - -11.77680778503418 - ], - [ - "▁aşa", - -11.776972770690918 - ], - [ - "▁BO", - -11.777024269104004 - ], - [ - "▁somebody", - -11.777076721191406 - ], - [ - "▁silence", - -11.777132034301758 - ], - [ - "cop", - -11.777359962463379 - ], - [ - "▁Burn", - -11.77749252319336 - ], - [ - "▁stopping", - -11.777544021606445 - ], - [ - "▁essence", - -11.777568817138672 - ], - [ - "▁hitting", - -11.777762413024902 - ], - [ - "▁producers", - -11.777801513671875 - ], - [ - "▁fibre", - -11.777894020080566 - ], - [ - "▁seasonal", - -11.777960777282715 - ], - [ - "▁tara", - -11.778096199035645 - ], - [ - "▁Jose", - -11.778099060058594 - ], - [ - "▁Better", - -11.77825927734375 - ], - [ - "▁steep", - -11.778295516967773 - ], - [ - "Alors", - -11.778353691101074 - ], - [ - "▁collecting", - -11.778507232666016 - ], - [ - "vre", - -11.778635025024414 - ], - [ - "▁disabled", - -11.77863883972168 - ], - [ - "▁voters", - -11.778679847717285 - ], - [ - "consuming", - -11.779092788696289 - ], - [ - "deemed", - -11.779115676879883 - ], - [ - "éra", - -11.779227256774902 - ], - [ - "opération", - -11.779273986816406 - ], - [ - "▁roller", - -11.779305458068848 - ], - [ - "Rather", - -11.779321670532227 - ], - [ - "▁leider", - -11.779370307922363 - ], - [ - "▁IV", - -11.779434204101562 - ], - [ - "▁erreichen", - -11.779473304748535 - ], - [ - "▁charging", - -11.779657363891602 - ], - [ - "tions", - -11.77973747253418 - ], - [ - "tiques", - -11.779861450195312 - ], - [ - "▁formats", - -11.779876708984375 - ], - [ - "▁painful", - -11.78000545501709 - ], - [ - "▁eager", - -11.780061721801758 - ], - [ - "generation", - -11.780137062072754 - ], - [ - "anna", - -11.780235290527344 - ], - [ - "▁races", - -11.780323028564453 - ], - [ - "force", - -11.780357360839844 - ], - [ - "▁ferm", - -11.780522346496582 - ], - [ - "▁breathing", - -11.780618667602539 - ], - [ - "▁offen", - -11.780648231506348 - ], - [ - "▁minds", - -11.780805587768555 - ], - [ - "▁musste", - -11.780832290649414 - ], - [ - "▁Vision", - -11.780888557434082 - ], - [ - "▁Installation", - -11.780988693237305 - ], - [ - "▁hesitate", - -11.781002044677734 - ], - [ - "▁somit", - -11.781023979187012 - ], - [ - "hôtel", - -11.781044006347656 - ], - [ - "cab", - -11.781235694885254 - ], - [ - "-16", - -11.781312942504883 - ], - [ - "▁Visual", - -11.781418800354004 - ], - [ - "intérêt", - -11.781524658203125 - ], - [ - "▁apel", - -11.781831741333008 - ], - [ - "therapy", - -11.782089233398438 - ], - [ - "volt", - -11.78225040435791 - ], - [ - "▁Rou", - -11.782439231872559 - ], - [ - "▁efficace", - -11.782464027404785 - ], - [ - "▁architectural", - -11.782605171203613 - ], - [ - "▁privilege", - -11.782670974731445 - ], - [ - "▁treating", - -11.782711029052734 - ], - [ - "▁Tam", - -11.782722473144531 - ], - [ - "tsch", - -11.782744407653809 - ], - [ - "building", - -11.782750129699707 - ], - [ - "▁associations", - -11.782929420471191 - ], - [ - "▁Consumer", - -11.783424377441406 - ], - [ - "▁Lim", - -11.783496856689453 - ], - [ - "newest", - -11.7835054397583 - ], - [ - "▁față", - -11.783675193786621 - ], - [ - "▁ships", - -11.783732414245605 - ], - [ - "lev", - -11.78373908996582 - ], - [ - "raft", - -11.783817291259766 - ], - [ - "▁variations", - -11.783845901489258 - ], - [ - "▁noua", - -11.78386402130127 - ], - [ - "▁Cab", - -11.784063339233398 - ], - [ - "1.2", - -11.78409481048584 - ], - [ - "▁ocazi", - -11.784347534179688 - ], - [ - "▁recommendation", - -11.784449577331543 - ], - [ - "titled", - -11.78445053100586 - ], - [ - "▁invoice", - -11.78459644317627 - ], - [ - "▁noastra", - -11.784647941589355 - ], - [ - "kur", - -11.784700393676758 - ], - [ - "issent", - -11.784758567810059 - ], - [ - "base", - -11.784778594970703 - ], - [ - "hä", - -11.7848482131958 - ], - [ - "888", - -11.784914016723633 - ], - [ - "▁declar", - -11.784941673278809 - ], - [ - "▁Football", - -11.7850341796875 - ], - [ - "▁Indeed", - -11.785293579101562 - ], - [ - "▁weapon", - -11.785333633422852 - ], - [ - "▁destroyed", - -11.785457611083984 - ], - [ - "▁enormous", - -11.785594940185547 - ], - [ - "▁blanket", - -11.7857084274292 - ], - [ - "▁aktiv", - -11.785759925842285 - ], - [ - "raw", - -11.785791397094727 - ], - [ - "▁computing", - -11.785823822021484 - ], - [ - "6)", - -11.785955429077148 - ], - [ - "▁Dam", - -11.786152839660645 - ], - [ - "▁confort", - -11.786174774169922 - ], - [ - "▁Gla", - -11.786198616027832 - ], - [ - "hardly", - -11.786242485046387 - ], - [ - "▁annually", - -11.786269187927246 - ], - [ - "▁destinations", - -11.786401748657227 - ], - [ - "▁guilty", - -11.786404609680176 - ], - [ - "▁scholarship", - -11.786439895629883 - ], - [ - "▁harmful", - -11.786453247070312 - ], - [ - "▁2-3", - -11.786616325378418 - ], - [ - "▁Race", - -11.786638259887695 - ], - [ - "▁hypo", - -11.78671646118164 - ], - [ - "▁shorter", - -11.786733627319336 - ], - [ - "quest", - -11.78675651550293 - ], - [ - "uze", - -11.786812782287598 - ], - [ - "izi", - -11.787005424499512 - ], - [ - "OO", - -11.787095069885254 - ], - [ - "▁Schutz", - -11.787097930908203 - ], - [ - "▁Teilnehmer", - -11.787185668945312 - ], - [ - "▁profiles", - -11.787199020385742 - ], - [ - "▁sustainability", - -11.78747272491455 - ], - [ - "▁emb", - -11.787489891052246 - ], - [ - "▁Augen", - -11.787516593933105 - ], - [ - "▁outdoors", - -11.787542343139648 - ], - [ - "▁Individual", - -11.787548065185547 - ], - [ - "▁pou", - -11.78757095336914 - ], - [ - "▁Together", - -11.787575721740723 - ], - [ - "HT", - -11.787674903869629 - ], - [ - "suited", - -11.787755012512207 - ], - [ - "▁tro", - -11.787782669067383 - ], - [ - "▁Strom", - -11.787805557250977 - ], - [ - "▁achievement", - -11.78799819946289 - ], - [ - "▁Range", - -11.78815746307373 - ], - [ - "tory", - -11.78817081451416 - ], - [ - "▁distribute", - -11.788250923156738 - ], - [ - "▁letzte", - -11.788276672363281 - ], - [ - "incorporated", - -11.788287162780762 - ], - [ - "▁Kir", - -11.788325309753418 - ], - [ - "ruf", - -11.78839111328125 - ], - [ - "▁disappointed", - -11.788543701171875 - ], - [ - "▁referral", - -11.788602828979492 - ], - [ - "flam", - -11.788687705993652 - ], - [ - "▁excessive", - -11.7886962890625 - ], - [ - "▁rapidement", - -11.788743019104004 - ], - [ - "▁Rio", - -11.78875732421875 - ], - [ - "aţia", - -11.788951873779297 - ], - [ - "▁meuble", - -11.78912353515625 - ], - [ - "▁2008.", - -11.789135932922363 - ], - [ - "▁Gall", - -11.78915023803711 - ], - [ - "▁française", - -11.789369583129883 - ], - [ - "▁ladies", - -11.789695739746094 - ], - [ - "ailed", - -11.789746284484863 - ], - [ - "El", - -11.789834976196289 - ], - [ - "▁wines", - -11.789868354797363 - ], - [ - "▁beispielsweise", - -11.789876937866211 - ], - [ - "▁gamme", - -11.790193557739258 - ], - [ - "▁guided", - -11.79028034210205 - ], - [ - "▁plin", - -11.790339469909668 - ], - [ - "Î", - -11.790390968322754 - ], - [ - "▁True", - -11.790498733520508 - ], - [ - "▁Temple", - -11.790507316589355 - ], - [ - "▁Pic", - -11.790520668029785 - ], - [ - "permalink", - -11.790547370910645 - ], - [ - "▁vedea", - -11.790656089782715 - ], - [ - "▁rank", - -11.790922164916992 - ], - [ - "▁Grill", - -11.791025161743164 - ], - [ - "clin", - -11.791070938110352 - ], - [ - "▁Hab", - -11.791089057922363 - ], - [ - "▁odds", - -11.791125297546387 - ], - [ - "▁anytime", - -11.791146278381348 - ], - [ - "▁Thanksgiving", - -11.791265487670898 - ], - [ - "guard", - -11.791300773620605 - ], - [ - "▁essays", - -11.791389465332031 - ], - [ - "▁PE", - -11.79139518737793 - ], - [ - "▁Rechts", - -11.791494369506836 - ], - [ - "mals", - -11.791751861572266 - ], - [ - "achi", - -11.791762351989746 - ], - [ - "▁Anthony", - -11.791765213012695 - ], - [ - "▁réponse", - -11.792036056518555 - ], - [ - "standing", - -11.79227352142334 - ], - [ - "▁Mol", - -11.792427062988281 - ], - [ - "▁Canon", - -11.792474746704102 - ], - [ - "▁silk", - -11.792515754699707 - ], - [ - "▁pourrait", - -11.79278564453125 - ], - [ - "▁raport", - -11.79280948638916 - ], - [ - "▁Woche", - -11.792889595031738 - ], - [ - "fallen", - -11.79293155670166 - ], - [ - "sting", - -11.79310131072998 - ], - [ - "▁circulation", - -11.793102264404297 - ], - [ - "▁skirt", - -11.7931547164917 - ], - [ - "▁Title", - -11.793187141418457 - ], - [ - "▁17.", - -11.79331111907959 - ], - [ - "▁Touch", - -11.793486595153809 - ], - [ - "▁utilizat", - -11.79352855682373 - ], - [ - "▁Organisation", - -11.793569564819336 - ], - [ - "▁mereu", - -11.793848991394043 - ], - [ - "▁oxygen", - -11.793953895568848 - ], - [ - "lique", - -11.793985366821289 - ], - [ - "▁consume", - -11.794100761413574 - ], - [ - "▁Barb", - -11.794102668762207 - ], - [ - "1.1", - -11.794105529785156 - ], - [ - "▁nicely", - -11.79419231414795 - ], - [ - "▁psychological", - -11.794227600097656 - ], - [ - "▁refrigerator", - -11.794478416442871 - ], - [ - "▁fantasy", - -11.79481029510498 - ], - [ - "▁dispute", - -11.79494571685791 - ], - [ - "▁IBM", - -11.794954299926758 - ], - [ - "▁Nation", - -11.794971466064453 - ], - [ - "▁mobil", - -11.795063972473145 - ], - [ - "▁density", - -11.795201301574707 - ], - [ - "ske", - -11.795230865478516 - ], - [ - "▁intimate", - -11.795313835144043 - ], - [ - "▁tailored", - -11.795319557189941 - ], - [ - "▁outline", - -11.795472145080566 - ], - [ - "TN", - -11.79554557800293 - ], - [ - "mur", - -11.795634269714355 - ], - [ - "GC", - -11.795662879943848 - ], - [ - "they", - -11.795992851257324 - ], - [ - "pag", - -11.796161651611328 - ], - [ - "▁Kultur", - -11.796246528625488 - ], - [ - "grün", - -11.796281814575195 - ], - [ - "voted", - -11.796529769897461 - ], - [ - "▁donné", - -11.796546936035156 - ], - [ - "▁Să", - -11.796629905700684 - ], - [ - "enberg", - -11.796648979187012 - ], - [ - "▁wi", - -11.79686450958252 - ], - [ - "▁Francis", - -11.797057151794434 - ], - [ - "▁Rick", - -11.797157287597656 - ], - [ - "accord", - -11.797403335571289 - ], - [ - "▁Zusammen", - -11.797415733337402 - ], - [ - "▁nonprofit", - -11.797456741333008 - ], - [ - "▁listings", - -11.797615051269531 - ], - [ - "6,", - -11.797908782958984 - ], - [ - "▁maximize", - -11.798253059387207 - ], - [ - "bud", - -11.798345565795898 - ], - [ - "▁promotional", - -11.798486709594727 - ], - [ - "cina", - -11.798646926879883 - ], - [ - "▁potatoes", - -11.79869556427002 - ], - [ - "▁mot", - -11.798871040344238 - ], - [ - "carries", - -11.799384117126465 - ], - [ - "▁stabilit", - -11.799458503723145 - ], - [ - "▁Door", - -11.799574851989746 - ], - [ - "▁downloaded", - -11.799574851989746 - ], - [ - "▁experimental", - -11.799724578857422 - ], - [ - "HD", - -11.7997407913208 - ], - [ - "▁parfois", - -11.79980182647705 - ], - [ - "▁zeigen", - -11.800092697143555 - ], - [ - "▁proposé", - -11.80030632019043 - ], - [ - "▁Verein", - -11.800636291503906 - ], - [ - "▁amestec", - -11.800676345825195 - ], - [ - "▁entreprise", - -11.800718307495117 - ], - [ - "▁PSD", - -11.800841331481934 - ], - [ - "▁bake", - -11.800897598266602 - ], - [ - "▁Rh", - -11.800904273986816 - ], - [ - "▁Mehr", - -11.800922393798828 - ], - [ - "▁purple", - -11.801074028015137 - ], - [ - "▁recipient", - -11.80109691619873 - ], - [ - "rare", - -11.801166534423828 - ], - [ - "egi", - -11.80117130279541 - ], - [ - "ancien", - -11.801176071166992 - ], - [ - "▁risque", - -11.80118465423584 - ], - [ - "▁mystery", - -11.80157470703125 - ], - [ - "mac", - -11.801697731018066 - ], - [ - "ibility", - -11.80182933807373 - ], - [ - "▁Moore", - -11.801881790161133 - ], - [ - "▁flavors", - -11.801911354064941 - ], - [ - "▁trauma", - -11.801966667175293 - ], - [ - "▁automotive", - -11.802112579345703 - ], - [ - "▁Anyway", - -11.802197456359863 - ], - [ - "▁simulation", - -11.802253723144531 - ], - [ - "▁crafts", - -11.802525520324707 - ], - [ - "▁measurements", - -11.80257511138916 - ], - [ - "▁cour", - -11.80257797241211 - ], - [ - "▁tard", - -11.802600860595703 - ], - [ - "nnie", - -11.802881240844727 - ], - [ - "▁Production", - -11.803388595581055 - ], - [ - "▁Cleaning", - -11.803567886352539 - ], - [ - "5,", - -11.803644180297852 - ], - [ - "▁Islamic", - -11.803766250610352 - ], - [ - "▁Gate", - -11.80378532409668 - ], - [ - "bay", - -11.803814888000488 - ], - [ - "HR", - -11.803990364074707 - ], - [ - "▁Offer", - -11.80399227142334 - ], - [ - "▁acceptance", - -11.804107666015625 - ], - [ - "▁Erfahrung", - -11.80412769317627 - ], - [ - "▁environ", - -11.804193496704102 - ], - [ - "▁fancy", - -11.804218292236328 - ], - [ - "▁bullet", - -11.80437183380127 - ], - [ - "organ", - -11.804466247558594 - ], - [ - "▁Peace", - -11.804520606994629 - ], - [ - "▁detalii", - -11.80461597442627 - ], - [ - "▁promised", - -11.804715156555176 - ], - [ - "▁wellness", - -11.804746627807617 - ], - [ - "▁satisfy", - -11.80481243133545 - ], - [ - "▁grants", - -11.805212020874023 - ], - [ - "accueil", - -11.80522346496582 - ], - [ - "▁oben", - -11.805412292480469 - ], - [ - "▁prospects", - -11.80543327331543 - ], - [ - "▁Events", - -11.805513381958008 - ], - [ - "2013", - -11.805569648742676 - ], - [ - "gesehen", - -11.805685997009277 - ], - [ - "▁£1", - -11.805727005004883 - ], - [ - "▁handelt", - -11.805798530578613 - ], - [ - "▁Spieler", - -11.805876731872559 - ], - [ - "▁Virtual", - -11.806145668029785 - ], - [ - "▁bubble", - -11.806239128112793 - ], - [ - "▁Trend", - -11.806254386901855 - ], - [ - "▁sistemul", - -11.806315422058105 - ], - [ - "▁Morgan", - -11.806320190429688 - ], - [ - "▁pole", - -11.806503295898438 - ], - [ - "▁spielen", - -11.806533813476562 - ], - [ - "tür", - -11.806571006774902 - ], - [ - "SCO", - -11.806572914123535 - ], - [ - "▁informative", - -11.806678771972656 - ], - [ - "▁affirm", - -11.806755065917969 - ], - [ - "▁Aqua", - -11.806818008422852 - ], - [ - "▁AR", - -11.806888580322266 - ], - [ - "richten", - -11.807071685791016 - ], - [ - "▁rewards", - -11.807122230529785 - ], - [ - "lub", - -11.807235717773438 - ], - [ - "shot", - -11.807236671447754 - ], - [ - "LM", - -11.807540893554688 - ], - [ - "Up", - -11.807586669921875 - ], - [ - "▁absolut", - -11.807737350463867 - ], - [ - "▁Mart", - -11.807806968688965 - ], - [ - "erweise", - -11.807812690734863 - ], - [ - "BP", - -11.807977676391602 - ], - [ - "▁difficile", - -11.808152198791504 - ], - [ - "▁Document", - -11.808159828186035 - ], - [ - "▁Sweet", - -11.8082914352417 - ], - [ - "▁indicator", - -11.808338165283203 - ], - [ - "▁Boden", - -11.808389663696289 - ], - [ - "mates", - -11.808477401733398 - ], - [ - "▁supporters", - -11.808504104614258 - ], - [ - "▁begun", - -11.808600425720215 - ], - [ - "▁blogging", - -11.808611869812012 - ], - [ - "▁CL", - -11.808663368225098 - ], - [ - "gres", - -11.808692932128906 - ], - [ - "▁preferences", - -11.808738708496094 - ], - [ - "▁screw", - -11.808756828308105 - ], - [ - "▁tutor", - -11.808858871459961 - ], - [ - "▁Additional", - -11.80891227722168 - ], - [ - "▁Bitte", - -11.808976173400879 - ], - [ - "utilizing", - -11.808998107910156 - ], - [ - "▁expérience", - -11.809073448181152 - ], - [ - "▁dur", - -11.809146881103516 - ], - [ - "▁precisely", - -11.809178352355957 - ], - [ - "▁janvier", - -11.809394836425781 - ], - [ - "AGE", - -11.80987548828125 - ], - [ - "moto", - -11.810007095336914 - ], - [ - "▁counsel", - -11.810195922851562 - ], - [ - "▁110", - -11.810226440429688 - ], - [ - "nick", - -11.810245513916016 - ], - [ - "licit", - -11.810540199279785 - ], - [ - "technik", - -11.810659408569336 - ], - [ - "▁collaborate", - -11.810736656188965 - ], - [ - "▁neighbors", - -11.810794830322266 - ], - [ - "tered", - -11.810922622680664 - ], - [ - "▁excel", - -11.811025619506836 - ], - [ - "▁Route", - -11.811059951782227 - ], - [ - "steuer", - -11.81109619140625 - ], - [ - "▁pioneer", - -11.811607360839844 - ], - [ - "nuit", - -11.81169319152832 - ], - [ - "▁skip", - -11.811963081359863 - ], - [ - "▁destruction", - -11.811997413635254 - ], - [ - "▁thesis", - -11.812249183654785 - ], - [ - "▁libre", - -11.812317848205566 - ], - [ - "▁petition", - -11.81234073638916 - ], - [ - "▁steady", - -11.812456130981445 - ], - [ - "▁medications", - -11.812458992004395 - ], - [ - "▁audiences", - -11.812623023986816 - ], - [ - "▁coaches", - -11.812689781188965 - ], - [ - "aller", - -11.812704086303711 - ], - [ - "3,000", - -11.812705993652344 - ], - [ - "▁anger", - -11.812785148620605 - ], - [ - "▁striking", - -11.812844276428223 - ], - [ - "▁shades", - -11.81291675567627 - ], - [ - "▁Sitz", - -11.812994956970215 - ], - [ - "▁gluten", - -11.813162803649902 - ], - [ - "▁egal", - -11.813222885131836 - ], - [ - "ania", - -11.813223838806152 - ], - [ - "▁defend", - -11.813241004943848 - ], - [ - "gut", - -11.81382942199707 - ], - [ - "▁reserves", - -11.813895225524902 - ], - [ - "▁advocate", - -11.814053535461426 - ], - [ - "▁Cit", - -11.814082145690918 - ], - [ - "▁technicians", - -11.814105033874512 - ], - [ - "▁cater", - -11.814138412475586 - ], - [ - "leitung", - -11.814190864562988 - ], - [ - "▁towns", - -11.814335823059082 - ], - [ - "▁Costa", - -11.814364433288574 - ], - [ - "▁confront", - -11.814567565917969 - ], - [ - "mount", - -11.814652442932129 - ], - [ - "▁nationale", - -11.814706802368164 - ], - [ - "▁adverse", - -11.814932823181152 - ], - [ - "▁couleur", - -11.815112113952637 - ], - [ - "▁delight", - -11.815169334411621 - ], - [ - "▁promises", - -11.815224647521973 - ], - [ - "▁silent", - -11.81550121307373 - ], - [ - "richtet", - -11.815556526184082 - ], - [ - "▁Companies", - -11.815614700317383 - ], - [ - "▁Charlotte", - -11.815620422363281 - ], - [ - "▁labels", - -11.815652847290039 - ], - [ - "▁Süd", - -11.815656661987305 - ], - [ - "▁Honor", - -11.81567096710205 - ], - [ - "▁complaints", - -11.815710067749023 - ], - [ - "▁siècle", - -11.815752029418945 - ], - [ - "▁suits", - -11.815792083740234 - ], - [ - "▁Bath", - -11.815827369689941 - ], - [ - "mise", - -11.815926551818848 - ], - [ - "▁acela", - -11.8159818649292 - ], - [ - "▁candidat", - -11.816011428833008 - ], - [ - "Flo", - -11.816207885742188 - ], - [ - "▁conservative", - -11.816215515136719 - ], - [ - "DD", - -11.816314697265625 - ], - [ - "▁changement", - -11.816414833068848 - ], - [ - "▁login", - -11.816492080688477 - ], - [ - "▁Fashion", - -11.816585540771484 - ], - [ - "reichen", - -11.816672325134277 - ], - [ - "through", - -11.816751480102539 - ], - [ - "aki", - -11.817240715026855 - ], - [ - "gna", - -11.817547798156738 - ], - [ - "▁verse", - -11.817551612854004 - ], - [ - "▁threats", - -11.817622184753418 - ], - [ - "▁Song", - -11.817770004272461 - ], - [ - "▁funded", - -11.81792163848877 - ], - [ - "langen", - -11.818023681640625 - ], - [ - "▁distribu", - -11.818195343017578 - ], - [ - "édition", - -11.818316459655762 - ], - [ - "▁royal", - -11.818562507629395 - ], - [ - "▁bevor", - -11.818829536437988 - ], - [ - "▁02", - -11.818854331970215 - ], - [ - "straße", - -11.818938255310059 - ], - [ - "edit", - -11.81904125213623 - ], - [ - "▁energetic", - -11.81922721862793 - ], - [ - "▁Carr", - -11.819757461547852 - ], - [ - "viol", - -11.819937705993652 - ], - [ - "▁niche", - -11.820054054260254 - ], - [ - "avais", - -11.820099830627441 - ], - [ - "▁backyard", - -11.82010269165039 - ], - [ - "▁Saudi", - -11.820158958435059 - ], - [ - "▁Zwei", - -11.820207595825195 - ], - [ - "▁Legal", - -11.82027530670166 - ], - [ - "accessed", - -11.820277214050293 - ], - [ - "▁choisi", - -11.820340156555176 - ], - [ - "▁GDP", - -11.820343971252441 - ], - [ - "oferă", - -11.820352554321289 - ], - [ - "hlen", - -11.820490837097168 - ], - [ - "▁Wor", - -11.820520401000977 - ], - [ - "▁cheer", - -11.820586204528809 - ], - [ - "▁barely", - -11.820625305175781 - ], - [ - "cost", - -11.820646286010742 - ], - [ - "▁Really", - -11.820661544799805 - ], - [ - "kol", - -11.820721626281738 - ], - [ - "▁binding", - -11.821045875549316 - ], - [ - "euer", - -11.821136474609375 - ], - [ - "▁optimization", - -11.821158409118652 - ], - [ - "▁Designer", - -11.8211669921875 - ], - [ - "▁measuring", - -11.82117748260498 - ], - [ - "ncy", - -11.821516036987305 - ], - [ - "weise", - -11.821520805358887 - ], - [ - "DER", - -11.821850776672363 - ], - [ - "▁$7", - -11.821949005126953 - ], - [ - "▁Anfang", - -11.821954727172852 - ], - [ - "material", - -11.821967124938965 - ], - [ - "▁antique", - -11.822281837463379 - ], - [ - "▁Certificate", - -11.822294235229492 - ], - [ - "▁modest", - -11.822370529174805 - ], - [ - "ției", - -11.822427749633789 - ], - [ - "▁praise", - -11.82245922088623 - ], - [ - "▁Springs", - -11.822660446166992 - ], - [ - "▁organiza", - -11.823041915893555 - ], - [ - "jurul", - -11.823047637939453 - ], - [ - "▁plumbing", - -11.82341194152832 - ], - [ - "▁foster", - -11.823490142822266 - ], - [ - "▁Wy", - -11.823491096496582 - ], - [ - "▁Sab", - -11.823503494262695 - ], - [ - "▁overwhelming", - -11.823677062988281 - ], - [ - "▁matin", - -11.823812484741211 - ], - [ - "▁responded", - -11.82408332824707 - ], - [ - "▁confused", - -11.824150085449219 - ], - [ - "▁blessed", - -11.824280738830566 - ], - [ - "▁160", - -11.824295997619629 - ], - [ - "▁ingredient", - -11.824360847473145 - ], - [ - "▁confer", - -11.82448673248291 - ], - [ - "▁Gesundheit", - -11.824530601501465 - ], - [ - "▁bucket", - -11.824555397033691 - ], - [ - "kraft", - -11.824565887451172 - ], - [ - "lange", - -11.824630737304688 - ], - [ - "▁Kopf", - -11.824678421020508 - ], - [ - "▁Prize", - -11.824678421020508 - ], - [ - "▁authorized", - -11.824779510498047 - ], - [ - "▁tick", - -11.824803352355957 - ], - [ - "▁steal", - -11.824910163879395 - ], - [ - "Depending", - -11.824918746948242 - ], - [ - "Depuis", - -11.824952125549316 - ], - [ - "▁functie", - -11.82499885559082 - ], - [ - "▁developments", - -11.825053215026855 - ], - [ - "▁Christians", - -11.825311660766602 - ], - [ - "▁calculated", - -11.8256254196167 - ], - [ - "▁Leave", - -11.825672149658203 - ], - [ - "▁Jam", - -11.82573413848877 - ], - [ - "▁habitat", - -11.825760841369629 - ], - [ - "▁Sorry", - -11.825801849365234 - ], - [ - "▁oficial", - -11.825944900512695 - ], - [ - "▁allein", - -11.826079368591309 - ], - [ - "▁concentrate", - -11.82608413696289 - ], - [ - "dica", - -11.826302528381348 - ], - [ - "▁Convention", - -11.826476097106934 - ], - [ - "illes", - -11.826550483703613 - ], - [ - "▁fum", - -11.82664680480957 - ], - [ - "▁Tal", - -11.826651573181152 - ], - [ - "Europe", - -11.826899528503418 - ], - [ - "▁attachment", - -11.826949119567871 - ], - [ - "▁sensibil", - -11.826995849609375 - ], - [ - "▁clue", - -11.82715892791748 - ], - [ - "▁specialty", - -11.827203750610352 - ], - [ - "▁Cou", - -11.827229499816895 - ], - [ - "▁liste", - -11.827278137207031 - ], - [ - "▁Penn", - -11.827465057373047 - ], - [ - "TRA", - -11.827559471130371 - ], - [ - "▁Themen", - -11.827561378479004 - ], - [ - "▁motivated", - -11.827906608581543 - ], - [ - "▁camere", - -11.828017234802246 - ], - [ - "▁14,", - -11.828393936157227 - ], - [ - "▁attendance", - -11.828557968139648 - ], - [ - "atorii", - -11.828581809997559 - ], - [ - "chemistry", - -11.82873821258545 - ], - [ - "▁roofing", - -11.828959465026855 - ], - [ - "▁Links", - -11.829048156738281 - ], - [ - "▁trou", - -11.829103469848633 - ], - [ - "▁trucks", - -11.829136848449707 - ], - [ - "hilfe", - -11.829557418823242 - ], - [ - "▁(6", - -11.829599380493164 - ], - [ - "vapor", - -11.82964038848877 - ], - [ - "mad", - -11.829668045043945 - ], - [ - "▁Albert", - -11.829877853393555 - ], - [ - "▁FIG", - -11.830073356628418 - ], - [ - "▁Rand", - -11.830187797546387 - ], - [ - "▁Constitution", - -11.830219268798828 - ], - [ - "ambi", - -11.830294609069824 - ], - [ - "▁Syria", - -11.830307006835938 - ], - [ - "▁Fond", - -11.830477714538574 - ], - [ - "▁gouvernement", - -11.830594062805176 - ], - [ - "▁Active", - -11.830705642700195 - ], - [ - "▁prints", - -11.830801963806152 - ], - [ - "▁weigh", - -11.8308687210083 - ], - [ - "▁Craft", - -11.831069946289062 - ], - [ - "▁projets", - -11.831247329711914 - ], - [ - "▁paste", - -11.831377029418945 - ], - [ - "anci", - -11.83139705657959 - ], - [ - "kie", - -11.831411361694336 - ], - [ - "▁gains", - -11.83165168762207 - ], - [ - "▁Record", - -11.831942558288574 - ], - [ - "▁beliefs", - -11.831954956054688 - ], - [ - "countless", - -11.831957817077637 - ], - [ - "▁tomatoes", - -11.831997871398926 - ], - [ - "arie", - -11.832082748413086 - ], - [ - "▁140", - -11.83211612701416 - ], - [ - "▁ethical", - -11.832229614257812 - ], - [ - "objectif", - -11.832279205322266 - ], - [ - "▁acestuia", - -11.832283973693848 - ], - [ - "▁Bluetooth", - -11.832398414611816 - ], - [ - "▁agriculture", - -11.832746505737305 - ], - [ - "uré", - -11.833027839660645 - ], - [ - "▁cale", - -11.833072662353516 - ], - [ - "▁articol", - -11.833073616027832 - ], - [ - "▁gum", - -11.833319664001465 - ], - [ - "▁vendor", - -11.833490371704102 - ], - [ - "ifié", - -11.833527565002441 - ], - [ - "▁peer", - -11.833662033081055 - ], - [ - "pod", - -11.834036827087402 - ], - [ - "▁utilized", - -11.834113121032715 - ], - [ - "▁Mü", - -11.834207534790039 - ], - [ - "owohl", - -11.834208488464355 - ], - [ - "hilst", - -11.834233283996582 - ], - [ - "frame", - -11.834260940551758 - ], - [ - "▁fridge", - -11.834822654724121 - ], - [ - "▁query", - -11.835108757019043 - ], - [ - "▁Survey", - -11.835227012634277 - ], - [ - "▁Hell", - -11.835247993469238 - ], - [ - "▁notification", - -11.83530044555664 - ], - [ - "TR", - -11.83538818359375 - ], - [ - "▁ultima", - -11.835505485534668 - ], - [ - "▁radiation", - -11.835631370544434 - ], - [ - "▁musicians", - -11.835821151733398 - ], - [ - "CAN", - -11.83595085144043 - ], - [ - "▁grocery", - -11.83607292175293 - ], - [ - "▁Sicherheit", - -11.83611011505127 - ], - [ - "▁Highway", - -11.836276054382324 - ], - [ - "▁Break", - -11.836285591125488 - ], - [ - "TED", - -11.836345672607422 - ], - [ - "ön", - -11.836352348327637 - ], - [ - "▁biological", - -11.836352348327637 - ], - [ - "qual", - -11.836397171020508 - ], - [ - "250", - -11.83641242980957 - ], - [ - "▁modify", - -11.836651802062988 - ], - [ - "▁Hit", - -11.836698532104492 - ], - [ - "▁Iar", - -11.836838722229004 - ], - [ - "aged", - -11.836884498596191 - ], - [ - "...)", - -11.83688735961914 - ], - [ - "▁contrat", - -11.836928367614746 - ], - [ - "▁centres", - -11.836956977844238 - ], - [ - "griff", - -11.836987495422363 - ], - [ - "Our", - -11.837233543395996 - ], - [ - "▁determination", - -11.837300300598145 - ], - [ - "▁variables", - -11.83742904663086 - ], - [ - "▁nuts", - -11.837472915649414 - ], - [ - "échange", - -11.837577819824219 - ], - [ - "extérieur", - -11.837631225585938 - ], - [ - "▁suflet", - -11.83764362335205 - ], - [ - "▁Scha", - -11.837752342224121 - ], - [ - "stück", - -11.837774276733398 - ], - [ - "▁Tau", - -11.837821960449219 - ], - [ - "▁participa", - -11.838008880615234 - ], - [ - "▁mad", - -11.838034629821777 - ], - [ - "▁relie", - -11.838051795959473 - ], - [ - "▁Fine", - -11.83808422088623 - ], - [ - "▁grape", - -11.838118553161621 - ], - [ - "▁wage", - -11.838141441345215 - ], - [ - "▁startup", - -11.838193893432617 - ], - [ - "▁blank", - -11.838194847106934 - ], - [ - "▁physique", - -11.838199615478516 - ], - [ - "▁punch", - -11.838233947753906 - ], - [ - "▁contacts", - -11.838321685791016 - ], - [ - "▁dezvolt", - -11.83835220336914 - ], - [ - "cross", - -11.838639259338379 - ], - [ - "▁TR", - -11.838652610778809 - ], - [ - "▁gener", - -11.838754653930664 - ], - [ - "▁indem", - -11.838823318481445 - ], - [ - "▁Stan", - -11.838839530944824 - ], - [ - "▁azi", - -11.838930130004883 - ], - [ - "▁Sel", - -11.838958740234375 - ], - [ - "▁Tot", - -11.83924674987793 - ], - [ - "vra", - -11.839341163635254 - ], - [ - "▁recruit", - -11.839482307434082 - ], - [ - "▁Yeah", - -11.839494705200195 - ], - [ - "/10", - -11.839507102966309 - ], - [ - "▁nail", - -11.83956241607666 - ], - [ - "▁Ky", - -11.839611053466797 - ], - [ - "▁beloved", - -11.839760780334473 - ], - [ - "operative", - -11.839823722839355 - ], - [ - "▁Tickets", - -11.83983325958252 - ], - [ - "▁tear", - -11.840229988098145 - ], - [ - "▁amp", - -11.840352058410645 - ], - [ - "▁04", - -11.840361595153809 - ], - [ - "▁illustrate", - -11.840361595153809 - ], - [ - "▁mac", - -11.840400695800781 - ], - [ - "▁receiver", - -11.840482711791992 - ], - [ - "atrice", - -11.840508460998535 - ], - [ - "▁souhait", - -11.840572357177734 - ], - [ - "▁Gewinn", - -11.840619087219238 - ], - [ - "▁Vit", - -11.840808868408203 - ], - [ - "roch", - -11.841202735900879 - ], - [ - "▁arata", - -11.841262817382812 - ], - [ - "▁Indiana", - -11.841364860534668 - ], - [ - "child", - -11.841516494750977 - ], - [ - "▁invested", - -11.84157657623291 - ], - [ - "▁Excellent", - -11.841625213623047 - ], - [ - "gori", - -11.841769218444824 - ], - [ - "▁thermal", - -11.841813087463379 - ], - [ - "Str", - -11.841973304748535 - ], - [ - "▁liver", - -11.84201717376709 - ], - [ - "miss", - -11.842035293579102 - ], - [ - "▁utiliser", - -11.842120170593262 - ], - [ - "▁prest", - -11.842445373535156 - ], - [ - "2016", - -11.842506408691406 - ], - [ - "isée", - -11.842508316040039 - ], - [ - "▁Index", - -11.842559814453125 - ], - [ - "▁arch", - -11.842639923095703 - ], - [ - "▁Toyota", - -11.842748641967773 - ], - [ - "▁YOUR", - -11.842782020568848 - ], - [ - "▁Mexican", - -11.842891693115234 - ], - [ - "▁gegenüber", - -11.842940330505371 - ], - [ - "▁cannabis", - -11.843033790588379 - ], - [ - "bis", - -11.843077659606934 - ], - [ - "vage", - -11.843083381652832 - ], - [ - "hall", - -11.843091011047363 - ], - [ - "fax", - -11.843137741088867 - ], - [ - "▁spoken", - -11.843232154846191 - ], - [ - "▁Zimmer", - -11.843544960021973 - ], - [ - "kauf", - -11.8436279296875 - ], - [ - "▁couleurs", - -11.843705177307129 - ], - [ - "▁NJ", - -11.844026565551758 - ], - [ - "▁Heritage", - -11.844318389892578 - ], - [ - "▁Pflege", - -11.844321250915527 - ], - [ - "luc", - -11.844361305236816 - ], - [ - "▁56", - -11.844489097595215 - ], - [ - "VP", - -11.844542503356934 - ], - [ - "▁cuvinte", - -11.844594955444336 - ], - [ - "▁Alliance", - -11.844614028930664 - ], - [ - "▁coco", - -11.844615936279297 - ], - [ - "▁leverage", - -11.844762802124023 - ], - [ - "auch", - -11.844844818115234 - ], - [ - "▁Cart", - -11.84506607055664 - ], - [ - "taux", - -11.84532642364502 - ], - [ - "east", - -11.84560775756836 - ], - [ - "▁decorating", - -11.84565258026123 - ], - [ - "tip", - -11.84565544128418 - ], - [ - "▁Communications", - -11.845780372619629 - ], - [ - "ACE", - -11.84580135345459 - ], - [ - "▁Consul", - -11.845993041992188 - ], - [ - "▁Swiss", - -11.846197128295898 - ], - [ - "inci", - -11.846230506896973 - ], - [ - "▁Fact", - -11.846312522888184 - ], - [ - "▁ajung", - -11.846321105957031 - ], - [ - "▁airline", - -11.846325874328613 - ], - [ - "▁kidney", - -11.846379280090332 - ], - [ - "▁Records", - -11.84642505645752 - ], - [ - "▁Olympic", - -11.846747398376465 - ], - [ - "▁dried", - -11.84719467163086 - ], - [ - "oivent", - -11.847333908081055 - ], - [ - "▁Adobe", - -11.847467422485352 - ], - [ - "▁powers", - -11.847748756408691 - ], - [ - "lande", - -11.847834587097168 - ], - [ - "▁relieve", - -11.847858428955078 - ], - [ - "ţine", - -11.847898483276367 - ], - [ - "▁gradually", - -11.847945213317871 - ], - [ - "mud", - -11.84811019897461 - ], - [ - "▁30,", - -11.848116874694824 - ], - [ - "▁plante", - -11.848133087158203 - ], - [ - "▁Hug", - -11.848225593566895 - ], - [ - "▁Focus", - -11.84853458404541 - ], - [ - "▁distinctive", - -11.848594665527344 - ], - [ - "▁Bab", - -11.848662376403809 - ], - [ - "tata", - -11.848679542541504 - ], - [ - "▁Nun", - -11.848797798156738 - ], - [ - "▁Eve", - -11.848811149597168 - ], - [ - "▁déc", - -11.848881721496582 - ], - [ - "▁Beitrag", - -11.84900951385498 - ], - [ - "▁devenit", - -11.849042892456055 - ], - [ - "driven", - -11.849250793457031 - ], - [ - "▁offerings", - -11.84933853149414 - ], - [ - "▁exc", - -11.84941577911377 - ], - [ - "encies", - -11.849576950073242 - ], - [ - "▁Neuro", - -11.849588394165039 - ], - [ - "scher", - -11.849604606628418 - ], - [ - "map", - -11.849703788757324 - ], - [ - "pending", - -11.849783897399902 - ], - [ - "▁courage", - -11.849799156188965 - ], - [ - "axe", - -11.849894523620605 - ], - [ - "▁Gesellschaft", - -11.849900245666504 - ], - [ - "▁ears", - -11.85000991821289 - ], - [ - "▁aider", - -11.850403785705566 - ], - [ - "▁Cast", - -11.85042667388916 - ], - [ - "fast", - -11.850442886352539 - ], - [ - "▁departe", - -11.850502014160156 - ], - [ - "▁oak", - -11.850507736206055 - ], - [ - "▁batch", - -11.850730895996094 - ], - [ - "▁Corporate", - -11.850762367248535 - ], - [ - "▁Ost", - -11.850895881652832 - ], - [ - "-14", - -11.850897789001465 - ], - [ - "▁Pie", - -11.85115909576416 - ], - [ - "▁ranking", - -11.851273536682129 - ], - [ - "clusion", - -11.851316452026367 - ], - [ - "▁costume", - -11.851347923278809 - ], - [ - "▁Knight", - -11.851449966430664 - ], - [ - "▁privat", - -11.851577758789062 - ], - [ - "▁Engineer", - -11.851593971252441 - ], - [ - "▁gens", - -11.8517427444458 - ], - [ - "physics", - -11.85176944732666 - ], - [ - "generating", - -11.851773262023926 - ], - [ - "directement", - -11.851786613464355 - ], - [ - "▁confidential", - -11.851810455322266 - ], - [ - "▁poet", - -11.851937294006348 - ], - [ - "▁monster", - -11.851944923400879 - ], - [ - "▁suppose", - -11.851984977722168 - ], - [ - "său", - -11.851996421813965 - ], - [ - "▁balls", - -11.852103233337402 - ], - [ - "▁substitute", - -11.852137565612793 - ], - [ - "▁simultaneously", - -11.852238655090332 - ], - [ - "▁specify", - -11.852272033691406 - ], - [ - "wald", - -11.852287292480469 - ], - [ - "▁collapse", - -11.852352142333984 - ], - [ - "dessus", - -11.852458953857422 - ], - [ - "▁vitr", - -11.852516174316406 - ], - [ - "▁recruitment", - -11.852607727050781 - ], - [ - "denken", - -11.852632522583008 - ], - [ - "▁candy", - -11.852691650390625 - ], - [ - "▁tourists", - -11.852721214294434 - ], - [ - "dimensional", - -11.852782249450684 - ], - [ - "conce", - -11.852814674377441 - ], - [ - "wechsel", - -11.852822303771973 - ], - [ - "▁passende", - -11.852971076965332 - ], - [ - "industrie", - -11.85299301147461 - ], - [ - "agne", - -11.853127479553223 - ], - [ - "▁warehouse", - -11.853233337402344 - ], - [ - "▁Jugend", - -11.853277206420898 - ], - [ - "▁Weise", - -11.853357315063477 - ], - [ - "▁Zone", - -11.853528022766113 - ], - [ - "▁licence", - -11.853550910949707 - ], - [ - "▁broker", - -11.853630065917969 - ], - [ - "▁Rolle", - -11.85365104675293 - ], - [ - "pton", - -11.853789329528809 - ], - [ - "▁preference", - -11.853846549987793 - ], - [ - "▁homeowners", - -11.853861808776855 - ], - [ - "▁Lum", - -11.85387134552002 - ], - [ - "▁Chairman", - -11.853879928588867 - ], - [ - "▁Pages", - -11.853998184204102 - ], - [ - "▁beam", - -11.854005813598633 - ], - [ - "▁coordinate", - -11.854158401489258 - ], - [ - "▁Tool", - -11.854212760925293 - ], - [ - "▁complexity", - -11.854272842407227 - ], - [ - "▁checks", - -11.854339599609375 - ], - [ - "▁Bedroom", - -11.854405403137207 - ], - [ - "minded", - -11.854538917541504 - ], - [ - "▁copiii", - -11.854694366455078 - ], - [ - "▁celebrating", - -11.85470199584961 - ], - [ - "zimmer", - -11.854759216308594 - ], - [ - "▁Imagine", - -11.854759216308594 - ], - [ - "▁decoration", - -11.854830741882324 - ], - [ - "team", - -11.855354309082031 - ], - [ - "▁împreună", - -11.855369567871094 - ], - [ - "▁publicly", - -11.855391502380371 - ], - [ - "▁centuries", - -11.855514526367188 - ], - [ - "▁Islands", - -11.855644226074219 - ], - [ - "▁ethnic", - -11.855663299560547 - ], - [ - "still", - -11.85576057434082 - ], - [ - "stieg", - -11.855823516845703 - ], - [ - "emia", - -11.855904579162598 - ], - [ - "tags", - -11.856026649475098 - ], - [ - "▁marche", - -11.856062889099121 - ], - [ - "▁migration", - -11.856096267700195 - ], - [ - "▁banner", - -11.85616683959961 - ], - [ - "▁macro", - -11.856378555297852 - ], - [ - "▁Edit", - -11.856379508972168 - ], - [ - "tran", - -11.85656452178955 - ], - [ - "ça", - -11.856597900390625 - ], - [ - "▁recycling", - -11.856670379638672 - ], - [ - "▁1,000", - -11.856673240661621 - ], - [ - "▁Quelle", - -11.856891632080078 - ], - [ - "▁Vel", - -11.85700511932373 - ], - [ - "▁Rit", - -11.857025146484375 - ], - [ - "▁Spaß", - -11.857046127319336 - ], - [ - "▁Corn", - -11.857074737548828 - ], - [ - "tracted", - -11.857177734375 - ], - [ - "cited", - -11.857185363769531 - ], - [ - "▁tablets", - -11.857202529907227 - ], - [ - "▁Display", - -11.857337951660156 - ], - [ - "▁persoana", - -11.857392311096191 - ], - [ - "Term", - -11.857410430908203 - ], - [ - "▁Vancouver", - -11.857537269592285 - ], - [ - "▁Gäste", - -11.857550621032715 - ], - [ - "determining", - -11.857608795166016 - ], - [ - "▁populations", - -11.85778522491455 - ], - [ - "aison", - -11.857873916625977 - ], - [ - "▁surgical", - -11.858072280883789 - ], - [ - "tale", - -11.858160018920898 - ], - [ - "ivi", - -11.858283042907715 - ], - [ - "▁Zur", - -11.858388900756836 - ], - [ - "esprit", - -11.858574867248535 - ], - [ - "▁Edge", - -11.858665466308594 - ], - [ - "dach", - -11.858760833740234 - ], - [ - "phi", - -11.858773231506348 - ], - [ - "▁suc", - -11.858841896057129 - ], - [ - "▁scrie", - -11.858848571777344 - ], - [ - "▁Ausbildung", - -11.858885765075684 - ], - [ - "▁51", - -11.85892391204834 - ], - [ - "ologi", - -11.858938217163086 - ], - [ - "▁correction", - -11.859049797058105 - ], - [ - "▁Wald", - -11.859078407287598 - ], - [ - "▁additionally", - -11.859131813049316 - ], - [ - "▁proche", - -11.859353065490723 - ], - [ - "▁classical", - -11.859477996826172 - ], - [ - "▁bringen", - -11.859490394592285 - ], - [ - "▁(10", - -11.859611511230469 - ], - [ - "▁Mile", - -11.859809875488281 - ], - [ - "lace", - -11.859885215759277 - ], - [ - "▁premi", - -11.85988712310791 - ], - [ - "▁constitute", - -11.860029220581055 - ], - [ - "▁bitter", - -11.860078811645508 - ], - [ - "▁Inform", - -11.860295295715332 - ], - [ - "▁corporations", - -11.860334396362305 - ], - [ - "▁Lisa", - -11.860494613647461 - ], - [ - "▁obligat", - -11.860685348510742 - ], - [ - "Throughout", - -11.860738754272461 - ], - [ - "▁Rs", - -11.860769271850586 - ], - [ - "▁Hair", - -11.860916137695312 - ], - [ - "▁supplements", - -11.86099624633789 - ], - [ - "▁motorcycle", - -11.861054420471191 - ], - [ - "escent", - -11.861132621765137 - ], - [ - "▁investi", - -11.861222267150879 - ], - [ - "▁continuously", - -11.861265182495117 - ], - [ - "▁Essen", - -11.861334800720215 - ], - [ - "▁precision", - -11.8613862991333 - ], - [ - "▁deficit", - -11.861461639404297 - ], - [ - "▁wallet", - -11.861481666564941 - ], - [ - "▁Bürger", - -11.861531257629395 - ], - [ - "chir", - -11.861574172973633 - ], - [ - "9)", - -11.86161994934082 - ], - [ - "▁Programme", - -11.861716270446777 - ], - [ - "▁simplement", - -11.86193561553955 - ], - [ - "MD", - -11.862093925476074 - ], - [ - "▁rouge", - -11.862096786499023 - ], - [ - "usion", - -11.862133979797363 - ], - [ - "▁stove", - -11.862208366394043 - ], - [ - "▁prospective", - -11.862224578857422 - ], - [ - "▁corp", - -11.86234188079834 - ], - [ - "▁impacts", - -11.862401008605957 - ], - [ - "▁bride", - -11.86266803741455 - ], - [ - "0.0", - -11.862788200378418 - ], - [ - "hid", - -11.862833976745605 - ], - [ - "▁warrant", - -11.862930297851562 - ], - [ - "▁Ice", - -11.8631010055542 - ], - [ - "▁sensible", - -11.863151550292969 - ], - [ - "▁vreo", - -11.863166809082031 - ], - [ - "spekt", - -11.863249778747559 - ], - [ - "▁appreciation", - -11.8633394241333 - ], - [ - "▁automation", - -11.863377571105957 - ], - [ - "Luc", - -11.86341381072998 - ], - [ - "teaches", - -11.863471031188965 - ], - [ - "▁fold", - -11.863506317138672 - ], - [ - "deutsche", - -11.863523483276367 - ], - [ - "▁assisted", - -11.86380386352539 - ], - [ - "▁straightforward", - -11.863932609558105 - ], - [ - "▁mechanic", - -11.864068031311035 - ], - [ - "observ", - -11.864169120788574 - ], - [ - "▁Schau", - -11.864195823669434 - ], - [ - "▁Recently", - -11.864301681518555 - ], - [ - "kers", - -11.86435604095459 - ], - [ - "▁Soft", - -11.864455223083496 - ], - [ - "muni", - -11.864537239074707 - ], - [ - "▁lie", - -11.864617347717285 - ], - [ - "▁Fat", - -11.864728927612305 - ], - [ - "cream", - -11.86476993560791 - ], - [ - "▁snack", - -11.864909172058105 - ], - [ - "▁juin", - -11.865068435668945 - ], - [ - "▁competent", - -11.865134239196777 - ], - [ - "▁Drug", - -11.865141868591309 - ], - [ - "▁Row", - -11.865302085876465 - ], - [ - "▁needle", - -11.865852355957031 - ], - [ - "▁convey", - -11.865900039672852 - ], - [ - "▁voie", - -11.86600399017334 - ], - [ - "▁Hon", - -11.866190910339355 - ], - [ - "▁ebook", - -11.866194725036621 - ], - [ - "▁veteran", - -11.866209030151367 - ], - [ - "▁statistical", - -11.866217613220215 - ], - [ - "190", - -11.866312980651855 - ], - [ - "▁munca", - -11.866402626037598 - ], - [ - "▁venues", - -11.866438865661621 - ], - [ - "▁Viel", - -11.866604804992676 - ], - [ - "▁décor", - -11.866799354553223 - ], - [ - "▁répond", - -11.8670015335083 - ], - [ - "▁produsele", - -11.86700439453125 - ], - [ - "ruc", - -11.867009162902832 - ], - [ - "▁drops", - -11.867011070251465 - ], - [ - "▁autant", - -11.867311477661133 - ], - [ - "▁Fahrzeug", - -11.867313385009766 - ], - [ - "▁hills", - -11.86735725402832 - ], - [ - "ference", - -11.867414474487305 - ], - [ - "▁Glück", - -11.86742115020752 - ], - [ - "▁Pac", - -11.867480278015137 - ], - [ - "▁permettr", - -11.867568969726562 - ], - [ - "▁mouvement", - -11.867713928222656 - ], - [ - "établissement", - -11.867859840393066 - ], - [ - "▁Parc", - -11.867874145507812 - ], - [ - "▁solving", - -11.867900848388672 - ], - [ - "▁jail", - -11.867972373962402 - ], - [ - "▁junk", - -11.867980003356934 - ], - [ - "▁jeux", - -11.868091583251953 - ], - [ - "▁rôle", - -11.868107795715332 - ], - [ - "▁cache", - -11.868124961853027 - ], - [ - "▁Answer", - -11.86832046508789 - ], - [ - "wir", - -11.868706703186035 - ], - [ - "option", - -11.868732452392578 - ], - [ - "▁Tiger", - -11.868739128112793 - ], - [ - "▁Ble", - -11.868793487548828 - ], - [ - "Mitglied", - -11.868797302246094 - ], - [ - "▁partial", - -11.868819236755371 - ], - [ - "▁Mercedes", - -11.86888313293457 - ], - [ - "tire", - -11.869001388549805 - ], - [ - "MENT", - -11.869091987609863 - ], - [ - "▁transit", - -11.869230270385742 - ], - [ - "▁cineva", - -11.869285583496094 - ], - [ - "▁Andrea", - -11.869294166564941 - ], - [ - "▁boundaries", - -11.869497299194336 - ], - [ - "script", - -11.870061874389648 - ], - [ - "▁Medi", - -11.870123863220215 - ], - [ - "schreiben", - -11.870203018188477 - ], - [ - "▁lobby", - -11.87035846710205 - ], - [ - "▁defendant", - -11.870406150817871 - ], - [ - "▁sq", - -11.870467185974121 - ], - [ - "▁forgotten", - -11.870569229125977 - ], - [ - "stimmung", - -11.870651245117188 - ], - [ - "hus", - -11.870665550231934 - ], - [ - "RY", - -11.870728492736816 - ], - [ - "▁Anderson", - -11.870748519897461 - ], - [ - "▁Dental", - -11.870828628540039 - ], - [ - "ject", - -11.87110710144043 - ], - [ - "▁Nutzer", - -11.871377944946289 - ], - [ - "▁Portland", - -11.871540069580078 - ], - [ - "scription", - -11.871636390686035 - ], - [ - "▁angel", - -11.871695518493652 - ], - [ - "▁monument", - -11.871748924255371 - ], - [ - "▁număr", - -11.871784210205078 - ], - [ - "▁Lane", - -11.871800422668457 - ], - [ - "▁Bai", - -11.871894836425781 - ], - [ - "But", - -11.871909141540527 - ], - [ - "▁calculate", - -11.872315406799316 - ], - [ - "▁provoca", - -11.87247371673584 - ], - [ - "▁votes", - -11.872493743896484 - ], - [ - "RNA", - -11.872503280639648 - ], - [ - "though", - -11.87259292602539 - ], - [ - "spor", - -11.872631072998047 - ], - [ - "▁connaissance", - -11.872695922851562 - ], - [ - "▁Anwendung", - -11.872932434082031 - ], - [ - "▁Kate", - -11.873123168945312 - ], - [ - "lob", - -11.87315845489502 - ], - [ - "▁Conf", - -11.873180389404297 - ], - [ - "bung", - -11.873212814331055 - ], - [ - "ander", - -11.873282432556152 - ], - [ - "▁functioning", - -11.873297691345215 - ], - [ - "▁sponsored", - -11.873324394226074 - ], - [ - "rav", - -11.873734474182129 - ], - [ - "▁resistant", - -11.873797416687012 - ], - [ - "tră", - -11.873916625976562 - ], - [ - "▁costly", - -11.873923301696777 - ], - [ - "▁Mars", - -11.873991012573242 - ], - [ - "▁tir", - -11.874075889587402 - ], - [ - "▁writes", - -11.874134063720703 - ], - [ - "▁Greg", - -11.874267578125 - ], - [ - "▁Question", - -11.874714851379395 - ], - [ - "▁corporation", - -11.87485408782959 - ], - [ - "▁lire", - -11.874991416931152 - ], - [ - "locked", - -11.875048637390137 - ], - [ - "8,", - -11.875092506408691 - ], - [ - "▁sagt", - -11.875301361083984 - ], - [ - "gaining", - -11.87536907196045 - ], - [ - "▁Pierre", - -11.875688552856445 - ], - [ - "verb", - -11.875725746154785 - ], - [ - "▁Barcelona", - -11.87578296661377 - ], - [ - "werte", - -11.876474380493164 - ], - [ - "▁disponible", - -11.87651538848877 - ], - [ - "▁urge", - -11.876521110534668 - ], - [ - "▁expecting", - -11.876572608947754 - ], - [ - "▁Girl", - -11.87662124633789 - ], - [ - "▁unlimited", - -11.876761436462402 - ], - [ - "watt", - -11.876788139343262 - ], - [ - "▁Möglichkeiten", - -11.876813888549805 - ], - [ - "▁schöne", - -11.876847267150879 - ], - [ - "rium", - -11.877076148986816 - ], - [ - "That", - -11.877272605895996 - ], - [ - "▁socio", - -11.877296447753906 - ], - [ - "▁Democrats", - -11.877351760864258 - ], - [ - "guten", - -11.877422332763672 - ], - [ - "▁Lou", - -11.877425193786621 - ], - [ - "ităţi", - -11.877559661865234 - ], - [ - "▁possibilité", - -11.877717018127441 - ], - [ - "▁adjustable", - -11.877938270568848 - ], - [ - "▁Salt", - -11.877967834472656 - ], - [ - "Thr", - -11.878021240234375 - ], - [ - "▁biseric", - -11.878056526184082 - ], - [ - "ieux", - -11.87808895111084 - ], - [ - "▁procur", - -11.8782377243042 - ], - [ - "▁credits", - -11.878250122070312 - ], - [ - "▁Netflix", - -11.878585815429688 - ], - [ - "doi", - -11.878605842590332 - ], - [ - "▁Jews", - -11.878663063049316 - ], - [ - "▁Ukraine", - -11.87873363494873 - ], - [ - "▁adevărat", - -11.878785133361816 - ], - [ - "▁Apply", - -11.878813743591309 - ], - [ - "▁coupons", - -11.878859519958496 - ], - [ - "▁Detroit", - -11.878881454467773 - ], - [ - "▁rue", - -11.878889083862305 - ], - [ - "anumite", - -11.878926277160645 - ], - [ - "ished", - -11.878973960876465 - ], - [ - "▁withdrawal", - -11.87915325164795 - ], - [ - "▁replacing", - -11.87917709350586 - ], - [ - "catching", - -11.879385948181152 - ], - [ - "▁climbing", - -11.879612922668457 - ], - [ - "▁Basic", - -11.879770278930664 - ], - [ - "▁inclus", - -11.879783630371094 - ], - [ - "scope", - -11.879887580871582 - ], - [ - "▁facem", - -11.879892349243164 - ], - [ - "▁plec", - -11.879904747009277 - ], - [ - "mäßig", - -11.879980087280273 - ], - [ - "▁tasty", - -11.880064010620117 - ], - [ - "▁tunnel", - -11.880074501037598 - ], - [ - "figured", - -11.88032341003418 - ], - [ - "gged", - -11.880390167236328 - ], - [ - "▁conditii", - -11.880599975585938 - ], - [ - "▁homework", - -11.880631446838379 - ], - [ - "volle", - -11.88063907623291 - ], - [ - "▁Gott", - -11.880807876586914 - ], - [ - "▁95", - -11.880969047546387 - ], - [ - "▁elect", - -11.881020545959473 - ], - [ - "▁blast", - -11.881043434143066 - ], - [ - "▁easiest", - -11.881248474121094 - ], - [ - "USE", - -11.881462097167969 - ], - [ - "concentr", - -11.881475448608398 - ], - [ - "orial", - -11.881596565246582 - ], - [ - "▁scroll", - -11.881638526916504 - ], - [ - "stead", - -11.881691932678223 - ], - [ - "▁hormone", - -11.881710052490234 - ], - [ - "▁starter", - -11.88179874420166 - ], - [ - "▁cald", - -11.881878852844238 - ], - [ - "▁wax", - -11.881895065307617 - ], - [ - "▁ridic", - -11.881900787353516 - ], - [ - "ously", - -11.881982803344727 - ], - [ - "maschine", - -11.882101058959961 - ], - [ - "licher", - -11.882399559020996 - ], - [ - "▁16,", - -11.882452964782715 - ], - [ - "▁hassle", - -11.882469177246094 - ], - [ - "semnat", - -11.882535934448242 - ], - [ - "▁pub", - -11.88260555267334 - ], - [ - "240", - -11.882800102233887 - ], - [ - "▁kits", - -11.882871627807617 - ], - [ - "▁Generation", - -11.88293743133545 - ], - [ - "▁merchant", - -11.883052825927734 - ], - [ - "▁Erd", - -11.883068084716797 - ], - [ - "▁café", - -11.883077621459961 - ], - [ - "hoff", - -11.88314151763916 - ], - [ - "▁WITH", - -11.883376121520996 - ], - [ - "▁gesch", - -11.883515357971191 - ], - [ - "▁Editor", - -11.883557319641113 - ], - [ - "▁treats", - -11.883609771728516 - ], - [ - "▁harsh", - -11.883711814880371 - ], - [ - "rome", - -11.883729934692383 - ], - [ - "▁Foreign", - -11.883928298950195 - ], - [ - "▁denied", - -11.883968353271484 - ], - [ - "▁Valentine", - -11.884014129638672 - ], - [ - "▁healthier", - -11.88408088684082 - ], - [ - "▁readily", - -11.884138107299805 - ], - [ - "nac", - -11.884190559387207 - ], - [ - "▁intake", - -11.884191513061523 - ], - [ - "▁puncte", - -11.884230613708496 - ], - [ - "erne", - -11.884431838989258 - ], - [ - "file", - -11.884668350219727 - ], - [ - "▁continually", - -11.884688377380371 - ], - [ - "door", - -11.884699821472168 - ], - [ - "▁imediat", - -11.884822845458984 - ], - [ - "▁accused", - -11.884833335876465 - ], - [ - "chy", - -11.884854316711426 - ], - [ - "▁wrapped", - -11.884861946105957 - ], - [ - "IES", - -11.884878158569336 - ], - [ - "▁terrace", - -11.884883880615234 - ], - [ - "mouth", - -11.884897232055664 - ], - [ - "▁defensive", - -11.884991645812988 - ], - [ - "▁Luci", - -11.88508129119873 - ], - [ - "▁significance", - -11.885107040405273 - ], - [ - "▁2007,", - -11.885213851928711 - ], - [ - "▁inclusion", - -11.885221481323242 - ], - [ - "▁rotation", - -11.885248184204102 - ], - [ - "hos", - -11.885283470153809 - ], - [ - "▁crea", - -11.885357856750488 - ], - [ - "üß", - -11.885903358459473 - ], - [ - "▁Install", - -11.885988235473633 - ], - [ - "▁dump", - -11.885998725891113 - ], - [ - "▁informations", - -11.886114120483398 - ], - [ - "▁Thi", - -11.886117935180664 - ], - [ - "▁85", - -11.886252403259277 - ], - [ - "dox", - -11.886283874511719 - ], - [ - "track", - -11.886436462402344 - ], - [ - "▁couples", - -11.886571884155273 - ], - [ - "▁Assembly", - -11.886594772338867 - ], - [ - "wagen", - -11.88672161102295 - ], - [ - "▁Hil", - -11.886723518371582 - ], - [ - "ières", - -11.886833190917969 - ], - [ - "▁Gabriel", - -11.886903762817383 - ], - [ - "▁patience", - -11.887053489685059 - ], - [ - "▁colored", - -11.887147903442383 - ], - [ - "▁separately", - -11.88715934753418 - ], - [ - "▁deployment", - -11.887166023254395 - ], - [ - "scape", - -11.887306213378906 - ], - [ - "▁Acum", - -11.8875150680542 - ], - [ - "▁länger", - -11.887518882751465 - ], - [ - "▁screens", - -11.887598991394043 - ], - [ - "▁prezenta", - -11.887630462646484 - ], - [ - "▁obicei", - -11.887638092041016 - ], - [ - "▁crisp", - -11.887758255004883 - ], - [ - "▁mechanisms", - -11.887771606445312 - ], - [ - "▁thirty", - -11.887786865234375 - ], - [ - "▁individually", - -11.887989044189453 - ], - [ - "▁internationally", - -11.887991905212402 - ], - [ - "lling", - -11.888050079345703 - ], - [ - "▁bureau", - -11.88843059539795 - ], - [ - "▁erfahren", - -11.88844108581543 - ], - [ - "TY", - -11.888553619384766 - ], - [ - "PF", - -11.888607025146484 - ], - [ - "wid", - -11.888752937316895 - ], - [ - "sell", - -11.888835906982422 - ], - [ - "▁Luke", - -11.888879776000977 - ], - [ - "▁Must", - -11.888916969299316 - ], - [ - "▁identical", - -11.888927459716797 - ], - [ - "▁Netherlands", - -11.888980865478516 - ], - [ - "▁investor", - -11.88905143737793 - ], - [ - "▁squad", - -11.889073371887207 - ], - [ - "▁21,", - -11.889143943786621 - ], - [ - "iko", - -11.889230728149414 - ], - [ - "▁departure", - -11.88937759399414 - ], - [ - "ega", - -11.889384269714355 - ], - [ - "uzi", - -11.889408111572266 - ], - [ - "▁lasa", - -11.889458656311035 - ], - [ - "bian", - -11.889525413513184 - ], - [ - "▁Madrid", - -11.889623641967773 - ], - [ - "▁Iowa", - -11.889806747436523 - ], - [ - "▁Yellow", - -11.890026092529297 - ], - [ - "conom", - -11.89004898071289 - ], - [ - "▁hint", - -11.890098571777344 - ], - [ - "NOW", - -11.890111923217773 - ], - [ - "dress", - -11.890204429626465 - ], - [ - "▁Stück", - -11.890267372131348 - ], - [ - "echt", - -11.890424728393555 - ], - [ - "rial", - -11.89045238494873 - ], - [ - "▁Initiative", - -11.890474319458008 - ], - [ - "▁magnificent", - -11.890474319458008 - ], - [ - "▁pipeline", - -11.890543937683105 - ], - [ - "▁08", - -11.890806198120117 - ], - [ - "▁écrit", - -11.890889167785645 - ], - [ - "KA", - -11.891085624694824 - ], - [ - "arile", - -11.891151428222656 - ], - [ - "▁unfortunately", - -11.891352653503418 - ], - [ - "dose", - -11.891355514526367 - ], - [ - "▁counts", - -11.891427993774414 - ], - [ - "deciding", - -11.891549110412598 - ], - [ - "WA", - -11.89167308807373 - ], - [ - "▁doresc", - -11.891685485839844 - ], - [ - "NY", - -11.892008781433105 - ], - [ - "olin", - -11.892112731933594 - ], - [ - "▁Urlaub", - -11.892133712768555 - ], - [ - "▁alătur", - -11.892317771911621 - ], - [ - "▁Vic", - -11.892515182495117 - ], - [ - "▁fier", - -11.89269733428955 - ], - [ - "EU", - -11.892772674560547 - ], - [ - "▁triple", - -11.892871856689453 - ], - [ - "▁compliment", - -11.89310359954834 - ], - [ - "▁vegetable", - -11.89334487915039 - ], - [ - "member", - -11.893743515014648 - ], - [ - "atiei", - -11.893793106079102 - ], - [ - "▁toxic", - -11.893835067749023 - ], - [ - "▁converted", - -11.893888473510742 - ], - [ - "▁Pink", - -11.893999099731445 - ], - [ - "▁fragment", - -11.894020080566406 - ], - [ - "presenting", - -11.894027709960938 - ], - [ - "▁garantie", - -11.894031524658203 - ], - [ - "▁31,", - -11.894052505493164 - ], - [ - "▁puisqu", - -11.894105911254883 - ], - [ - "aching", - -11.894107818603516 - ], - [ - "▁Shan", - -11.894119262695312 - ], - [ - "▁Affairs", - -11.894368171691895 - ], - [ - "üsse", - -11.894405364990234 - ], - [ - "▁CBD", - -11.894428253173828 - ], - [ - "▁quatre", - -11.894588470458984 - ], - [ - "▁horror", - -11.894651412963867 - ], - [ - "▁culoare", - -11.894661903381348 - ], - [ - "▁welcoming", - -11.894673347473145 - ], - [ - "▁headache", - -11.894808769226074 - ], - [ - "▁septembre", - -11.894820213317871 - ], - [ - "▁Tür", - -11.894862174987793 - ], - [ - "lateral", - -11.89507007598877 - ], - [ - "▁termin", - -11.895228385925293 - ], - [ - "▁Aid", - -11.895291328430176 - ], - [ - "second", - -11.895308494567871 - ], - [ - "▁Philip", - -11.895310401916504 - ], - [ - "berries", - -11.895347595214844 - ], - [ - "▁Slot", - -11.895431518554688 - ], - [ - "ка", - -11.895442962646484 - ], - [ - "▁consecutive", - -11.895590782165527 - ], - [ - "value", - -11.895705223083496 - ], - [ - "▁islands", - -11.8958101272583 - ], - [ - "▁posibilitatea", - -11.895928382873535 - ], - [ - "0.5", - -11.896341323852539 - ], - [ - "▁Dumpster", - -11.896471977233887 - ], - [ - "▁Gran", - -11.89647388458252 - ], - [ - "▁restricted", - -11.8967924118042 - ], - [ - "▁discussing", - -11.896921157836914 - ], - [ - "cock", - -11.896966934204102 - ], - [ - "Serie", - -11.896989822387695 - ], - [ - "▁crushing", - -11.896998405456543 - ], - [ - "RB", - -11.897034645080566 - ], - [ - "▁Gy", - -11.897068977355957 - ], - [ - "normal", - -11.897098541259766 - ], - [ - "DT", - -11.897180557250977 - ], - [ - "▁concurs", - -11.897181510925293 - ], - [ - "▁Beratung", - -11.897231101989746 - ], - [ - "▁handful", - -11.897235870361328 - ], - [ - "▁loading", - -11.897237777709961 - ], - [ - "▁WI", - -11.897269248962402 - ], - [ - "▁Fitness", - -11.897283554077148 - ], - [ - "▁RAM", - -11.897302627563477 - ], - [ - "▁Twi", - -11.89730453491211 - ], - [ - "adurch", - -11.897345542907715 - ], - [ - "▁obiectiv", - -11.897366523742676 - ], - [ - "BM", - -11.897635459899902 - ], - [ - "▁amendment", - -11.8976469039917 - ], - [ - "whi", - -11.897652626037598 - ], - [ - "▁Besonder", - -11.897871017456055 - ], - [ - "ALL", - -11.898003578186035 - ], - [ - "▁earning", - -11.898090362548828 - ], - [ - "▁nutrients", - -11.898580551147461 - ], - [ - "pru", - -11.898633003234863 - ], - [ - "▁offensive", - -11.898696899414062 - ], - [ - "▁shelves", - -11.898711204528809 - ], - [ - "▁încâ", - -11.898726463317871 - ], - [ - "▁execute", - -11.898923873901367 - ], - [ - "▁cauz", - -11.898966789245605 - ], - [ - "exist", - -11.899179458618164 - ], - [ - "▁Meter", - -11.899191856384277 - ], - [ - "there", - -11.899201393127441 - ], - [ - "▁réaliser", - -11.899249076843262 - ], - [ - "blog", - -11.899362564086914 - ], - [ - "▁résultats", - -11.89937973022461 - ], - [ - "baren", - -11.899391174316406 - ], - [ - "▁lang", - -11.899425506591797 - ], - [ - "▁mere", - -11.899870872497559 - ], - [ - "▁toti", - -11.900079727172852 - ], - [ - "DN", - -11.90017032623291 - ], - [ - "Hi", - -11.900310516357422 - ], - [ - "▁merg", - -11.900359153747559 - ], - [ - "▁Camera", - -11.90054988861084 - ], - [ - "▁parfum", - -11.900697708129883 - ], - [ - "CG", - -11.900701522827148 - ], - [ - "posed", - -11.900713920593262 - ], - [ - "▁proposals", - -11.900732040405273 - ], - [ - "▁incorrect", - -11.900811195373535 - ], - [ - "▁Denver", - -11.901168823242188 - ], - [ - "▁noapte", - -11.901397705078125 - ], - [ - "▁VPN", - -11.901436805725098 - ], - [ - "▁Oklahoma", - -11.90159797668457 - ], - [ - "horizon", - -11.901647567749023 - ], - [ - "▁villa", - -11.901668548583984 - ], - [ - "duce", - -11.901812553405762 - ], - [ - "Dienst", - -11.902042388916016 - ], - [ - "▁oversee", - -11.902511596679688 - ], - [ - "astr", - -11.902548789978027 - ], - [ - "brand", - -11.902713775634766 - ], - [ - "▁Safe", - -11.902746200561523 - ], - [ - "▁competing", - -11.902812004089355 - ], - [ - "▁subiect", - -11.902812004089355 - ], - [ - "▁équipe", - -11.903091430664062 - ], - [ - "▁Dress", - -11.903095245361328 - ], - [ - "▁Juni", - -11.903139114379883 - ], - [ - "▁repeated", - -11.90317153930664 - ], - [ - "2012", - -11.903226852416992 - ], - [ - "▁départ", - -11.903234481811523 - ], - [ - "immer", - -11.903335571289062 - ], - [ - "▁mondial", - -11.903374671936035 - ], - [ - "▁datelor", - -11.903703689575195 - ], - [ - "▁surgeon", - -11.903782844543457 - ], - [ - "▁demanding", - -11.903812408447266 - ], - [ - "▁concluded", - -11.903878211975098 - ], - [ - "țiile", - -11.903950691223145 - ], - [ - "marin", - -11.903999328613281 - ], - [ - "▁estim", - -11.904206275939941 - ], - [ - "▁Loan", - -11.904361724853516 - ], - [ - "sculpt", - -11.904373168945312 - ], - [ - "▁99", - -11.904391288757324 - ], - [ - "void", - -11.904400825500488 - ], - [ - "▁Empire", - -11.904499053955078 - ], - [ - "▁Brit", - -11.90450382232666 - ], - [ - "▁véhicule", - -11.904777526855469 - ], - [ - "▁dividend", - -11.905069351196289 - ], - [ - "▁refused", - -11.905077934265137 - ], - [ - "▁speaks", - -11.905156135559082 - ], - [ - "▁Morris", - -11.905282020568848 - ], - [ - "dict", - -11.905349731445312 - ], - [ - "▁funeral", - -11.905556678771973 - ], - [ - "▁Behandlung", - -11.905763626098633 - ], - [ - "▁Revolution", - -11.905905723571777 - ], - [ - "▁Sum", - -11.905935287475586 - ], - [ - "einigen", - -11.906030654907227 - ], - [ - "RES", - -11.906070709228516 - ], - [ - "▁vite", - -11.906071662902832 - ], - [ - "▁Captain", - -11.906190872192383 - ], - [ - "▁assurance", - -11.9061918258667 - ], - [ - "uga", - -11.906500816345215 - ], - [ - "▁conserv", - -11.906583786010742 - ], - [ - "▁therapeutic", - -11.906641006469727 - ], - [ - "▁Sweden", - -11.906753540039062 - ], - [ - "▁Lead", - -11.906888961791992 - ], - [ - "ément", - -11.907071113586426 - ], - [ - "▁53", - -11.90709114074707 - ], - [ - "▁fraction", - -11.9071683883667 - ], - [ - "▁magnet", - -11.907170295715332 - ], - [ - "assurer", - -11.907184600830078 - ], - [ - "▁Steuer", - -11.90733814239502 - ], - [ - "▁flori", - -11.90735149383545 - ], - [ - "▁charming", - -11.907588958740234 - ], - [ - "▁athletic", - -11.907621383666992 - ], - [ - "▁membri", - -11.907706260681152 - ], - [ - "▁Sep", - -11.907726287841797 - ], - [ - "ogue", - -11.907800674438477 - ], - [ - "▁familie", - -11.907800674438477 - ], - [ - "▁SW", - -11.90796947479248 - ], - [ - "▁diagnosed", - -11.908023834228516 - ], - [ - "RR", - -11.908143997192383 - ], - [ - "▁Fern", - -11.908233642578125 - ], - [ - "▁rational", - -11.908281326293945 - ], - [ - "▁talents", - -11.90828800201416 - ], - [ - "ziert", - -11.908317565917969 - ], - [ - "▁chemin", - -11.908459663391113 - ], - [ - "sheet", - -11.908562660217285 - ], - [ - "▁outer", - -11.908565521240234 - ], - [ - "▁Kap", - -11.908591270446777 - ], - [ - "▁HERE", - -11.908656120300293 - ], - [ - "▁uman", - -11.908824920654297 - ], - [ - "▁accompany", - -11.908880233764648 - ], - [ - "▁varieties", - -11.908881187438965 - ], - [ - "▁sensors", - -11.908957481384277 - ], - [ - "▁25%", - -11.90919017791748 - ], - [ - "▁tray", - -11.909354209899902 - ], - [ - "▁critique", - -11.909459114074707 - ], - [ - "▁puţin", - -11.909515380859375 - ], - [ - "▁Schüler", - -11.90953540802002 - ], - [ - "▁repar", - -11.909744262695312 - ], - [ - "▁overlook", - -11.909931182861328 - ], - [ - "▁surf", - -11.910048484802246 - ], - [ - "▁tasting", - -11.910118103027344 - ], - [ - "bog", - -11.91027545928955 - ], - [ - "▁Payment", - -11.910289764404297 - ], - [ - "▁Helen", - -11.91049575805664 - ], - [ - "▁Refer", - -11.910694122314453 - ], - [ - "application", - -11.910698890686035 - ], - [ - "lection", - -11.910856246948242 - ], - [ - "▁avril", - -11.911042213439941 - ], - [ - "▁Grace", - -11.911109924316406 - ], - [ - "▁kau", - -11.911274909973145 - ], - [ - "▁libraries", - -11.911319732666016 - ], - [ - "▁closest", - -11.911347389221191 - ], - [ - "▁coating", - -11.911351203918457 - ], - [ - "▁suicide", - -11.911364555358887 - ], - [ - "▁undergraduate", - -11.911449432373047 - ], - [ - "▁stitch", - -11.91149616241455 - ], - [ - "▁reset", - -11.911593437194824 - ], - [ - "▁Greece", - -11.911626815795898 - ], - [ - "▁Fred", - -11.91197681427002 - ], - [ - "▁18.", - -11.912047386169434 - ], - [ - "▁nuit", - -11.912087440490723 - ], - [ - "▁lying", - -11.912199974060059 - ], - [ - "▁cottage", - -11.91232681274414 - ], - [ - "bone", - -11.912477493286133 - ], - [ - "▁milieu", - -11.912480354309082 - ], - [ - "management", - -11.912623405456543 - ], - [ - "▁Freund", - -11.912724494934082 - ], - [ - "▁specially", - -11.912841796875 - ], - [ - "veut", - -11.912961959838867 - ], - [ - "▁necesare", - -11.912999153137207 - ], - [ - "▁cert", - -11.913081169128418 - ], - [ - "articul", - -11.913151741027832 - ], - [ - "150", - -11.913174629211426 - ], - [ - "rounded", - -11.913180351257324 - ], - [ - "▁longue", - -11.913193702697754 - ], - [ - "▁Quel", - -11.913240432739258 - ], - [ - "Until", - -11.913322448730469 - ], - [ - "▁700", - -11.913398742675781 - ], - [ - "▁installations", - -11.913423538208008 - ], - [ - "▁boats", - -11.913467407226562 - ], - [ - "Fig", - -11.913609504699707 - ], - [ - "▁cocktail", - -11.913613319396973 - ], - [ - "▁rocks", - -11.91366958618164 - ], - [ - "meinen", - -11.91374683380127 - ], - [ - "entrepreneur", - -11.913780212402344 - ], - [ - "schwarz", - -11.913924217224121 - ], - [ - "▁diesel", - -11.91392993927002 - ], - [ - "▁villages", - -11.913969039916992 - ], - [ - "▁cups", - -11.914076805114746 - ], - [ - "▁stairs", - -11.914241790771484 - ], - [ - "▁Match", - -11.914350509643555 - ], - [ - "Taking", - -11.914437294006348 - ], - [ - "prin", - -11.914469718933105 - ], - [ - "▁penal", - -11.91472053527832 - ], - [ - "partner", - -11.914867401123047 - ], - [ - "wave", - -11.91497802734375 - ], - [ - "▁baie", - -11.91515064239502 - ], - [ - "LAN", - -11.915151596069336 - ], - [ - "fix", - -11.915202140808105 - ], - [ - "▁surveillance", - -11.915295600891113 - ], - [ - "▁Register", - -11.915343284606934 - ], - [ - "oara", - -11.915536880493164 - ], - [ - "▁Phoenix", - -11.915602684020996 - ], - [ - "aktuellen", - -11.915613174438477 - ], - [ - "▁livres", - -11.915618896484375 - ], - [ - "▁entities", - -11.916102409362793 - ], - [ - "▁Regard", - -11.916112899780273 - ], - [ - "▁Jazz", - -11.91614055633545 - ], - [ - "▁flame", - -11.91616153717041 - ], - [ - "▁independence", - -11.916215896606445 - ], - [ - "▁Adventure", - -11.916341781616211 - ], - [ - "▁assign", - -11.916399955749512 - ], - [ - "▁Adult", - -11.916579246520996 - ], - [ - "kehr", - -11.916666984558105 - ], - [ - "▁ordering", - -11.916850090026855 - ], - [ - "▁charts", - -11.91687297821045 - ], - [ - "▁Român", - -11.916936874389648 - ], - [ - "bauen", - -11.916982650756836 - ], - [ - "▁Floor", - -11.917065620422363 - ], - [ - "▁Meet", - -11.917101860046387 - ], - [ - "▁compromise", - -11.917158126831055 - ], - [ - "regarded", - -11.917171478271484 - ], - [ - "02.", - -11.917215347290039 - ], - [ - "▁granite", - -11.917299270629883 - ], - [ - "▁Judge", - -11.917314529418945 - ], - [ - "opti", - -11.917373657226562 - ], - [ - "liste", - -11.917379379272461 - ], - [ - "▁capacité", - -11.917427062988281 - ], - [ - "▁criticism", - -11.917450904846191 - ], - [ - "LES", - -11.918198585510254 - ], - [ - "▁Century", - -11.918211936950684 - ], - [ - "▁mobility", - -11.918252944946289 - ], - [ - "▁variation", - -11.918622016906738 - ], - [ - "▁Utah", - -11.91867446899414 - ], - [ - "▁seminar", - -11.918678283691406 - ], - [ - "▁experiments", - -11.918803215026855 - ], - [ - "midst", - -11.918943405151367 - ], - [ - "▁Psycho", - -11.919002532958984 - ], - [ - "▁choses", - -11.919121742248535 - ], - [ - "▁Karl", - -11.919175148010254 - ], - [ - "▁ruling", - -11.919286727905273 - ], - [ - "▁Voice", - -11.919404983520508 - ], - [ - "▁împotriv", - -11.919442176818848 - ], - [ - "▁mesaj", - -11.919500350952148 - ], - [ - "▁vrei", - -11.919594764709473 - ], - [ - "fan", - -11.919601440429688 - ], - [ - "parent", - -11.919648170471191 - ], - [ - "▁oraș", - -11.919770240783691 - ], - [ - "▁printable", - -11.919777870178223 - ], - [ - "▁diver", - -11.919859886169434 - ], - [ - "▁ochi", - -11.919949531555176 - ], - [ - "▁teenager", - -11.920125961303711 - ], - [ - "▁Death", - -11.920150756835938 - ], - [ - "▁manque", - -11.920289993286133 - ], - [ - "ască", - -11.920345306396484 - ], - [ - "▁prob", - -11.9203519821167 - ], - [ - "▁télé", - -11.920354843139648 - ], - [ - "cursul", - -11.920378684997559 - ], - [ - "pion", - -11.92052173614502 - ], - [ - "▁dedication", - -11.920644760131836 - ], - [ - "▁opr", - -11.920687675476074 - ], - [ - "führung", - -11.920761108398438 - ], - [ - "▁cognitive", - -11.920827865600586 - ], - [ - "soft", - -11.920868873596191 - ], - [ - "▁19,", - -11.9209623336792 - ], - [ - "▁24-", - -11.921197891235352 - ], - [ - "▁legitimate", - -11.921220779418945 - ], - [ - "▁comedy", - -11.921277046203613 - ], - [ - "▁violation", - -11.921327590942383 - ], - [ - "▁disposal", - -11.921472549438477 - ], - [ - "▁liegen", - -11.921605110168457 - ], - [ - "ко", - -11.921878814697266 - ], - [ - "▁martie", - -11.921931266784668 - ], - [ - "▁Vas", - -11.92212200164795 - ], - [ - "rash", - -11.922134399414062 - ], - [ - "▁hadn", - -11.922174453735352 - ], - [ - "▁connu", - -11.922204971313477 - ], - [ - "▁regelmäßig", - -11.922216415405273 - ], - [ - "▁Webseite", - -11.922224998474121 - ], - [ - "▁failing", - -11.922273635864258 - ], - [ - "explique", - -11.922449111938477 - ], - [ - "▁Player", - -11.922513961791992 - ], - [ - "vul", - -11.922560691833496 - ], - [ - "camp", - -11.922992706298828 - ], - [ - "▁erreicht", - -11.922996520996094 - ], - [ - "▁tags", - -11.922998428344727 - ], - [ - "▁headline", - -11.923210144042969 - ], - [ - "▁banc", - -11.923253059387207 - ], - [ - "▁Mayor", - -11.923309326171875 - ], - [ - "trop", - -11.923395156860352 - ], - [ - "AK", - -11.9235258102417 - ], - [ - "▁lighter", - -11.923602104187012 - ], - [ - "▁syndrome", - -11.923604965209961 - ], - [ - "▁Adrian", - -11.92365550994873 - ], - [ - "▁EUR", - -11.923759460449219 - ], - [ - "▁Missouri", - -11.923916816711426 - ], - [ - "▁Chan", - -11.924108505249023 - ], - [ - "topped", - -11.924233436584473 - ], - [ - "▁nationwide", - -11.924276351928711 - ], - [ - "▁6-", - -11.924302101135254 - ], - [ - "final", - -11.924408912658691 - ], - [ - "ttes", - -11.924485206604004 - ], - [ - "▁FO", - -11.924537658691406 - ], - [ - "▁legi", - -11.924556732177734 - ], - [ - "▁Hum", - -11.924575805664062 - ], - [ - "vita", - -11.924662590026855 - ], - [ - "▁Regen", - -11.924695014953613 - ], - [ - "▁confusion", - -11.92498779296875 - ], - [ - "▁valori", - -11.925142288208008 - ], - [ - "mill", - -11.92516803741455 - ], - [ - "did", - -11.925237655639648 - ], - [ - "pid", - -11.925253868103027 - ], - [ - "▁implications", - -11.925284385681152 - ], - [ - "▁Value", - -11.92552375793457 - ], - [ - "lângă", - -11.925666809082031 - ], - [ - "▁véritable", - -11.92577075958252 - ], - [ - "▁Stick", - -11.925814628601074 - ], - [ - "zol", - -11.925835609436035 - ], - [ - "▁ebenso", - -11.925863265991211 - ], - [ - "west", - -11.925895690917969 - ], - [ - "▁auszu", - -11.92600154876709 - ], - [ - "▁adorable", - -11.926016807556152 - ], - [ - "▁clarity", - -11.92605209350586 - ], - [ - "▁Wash", - -11.926335334777832 - ], - [ - "▁alien", - -11.926423072814941 - ], - [ - "usement", - -11.926626205444336 - ], - [ - "▁bones", - -11.9266357421875 - ], - [ - "▁Beau", - -11.926726341247559 - ], - [ - "▁Jet", - -11.926727294921875 - ], - [ - "▁visibility", - -11.927034378051758 - ], - [ - "impose", - -11.927063941955566 - ], - [ - "food", - -11.927133560180664 - ], - [ - "▁duce", - -11.927361488342285 - ], - [ - "▁Format", - -11.927386283874512 - ], - [ - "▁durability", - -11.927424430847168 - ], - [ - "▁Prim", - -11.927614212036133 - ], - [ - "▁mele", - -11.927629470825195 - ], - [ - "▁dürfen", - -11.927631378173828 - ], - [ - "▁Angebote", - -11.92765998840332 - ], - [ - "▁discharge", - -11.927745819091797 - ], - [ - "▁Justin", - -11.928055763244629 - ], - [ - "▁shame", - -11.928228378295898 - ], - [ - "▁heated", - -11.928282737731934 - ], - [ - "ères", - -11.92856216430664 - ], - [ - "human", - -11.928810119628906 - ], - [ - "4.5", - -11.928831100463867 - ], - [ - "▁lien", - -11.928955078125 - ], - [ - "▁Alan", - -11.92896556854248 - ], - [ - "▁transmis", - -11.929130554199219 - ], - [ - "▁Bul", - -11.929137229919434 - ], - [ - "plu", - -11.929169654846191 - ], - [ - "acul", - -11.929337501525879 - ], - [ - "merk", - -11.929434776306152 - ], - [ - "▁altfel", - -11.929566383361816 - ], - [ - "deli", - -11.929689407348633 - ], - [ - "▁Cru", - -11.930001258850098 - ], - [ - "▁hommes", - -11.930127143859863 - ], - [ - "aurait", - -11.930137634277344 - ], - [ - "cca", - -11.930187225341797 - ], - [ - "▁Path", - -11.930208206176758 - ], - [ - "astronom", - -11.930241584777832 - ], - [ - "▁détail", - -11.930276870727539 - ], - [ - "▁blocked", - -11.930394172668457 - ], - [ - "iding", - -11.93044376373291 - ], - [ - "schä", - -11.930500030517578 - ], - [ - "▁30-", - -11.930624008178711 - ], - [ - "diction", - -11.930813789367676 - ], - [ - "▁pulling", - -11.930868148803711 - ], - [ - "▁Sample", - -11.930924415588379 - ], - [ - "▁renewable", - -11.930997848510742 - ], - [ - "▁Pinterest", - -11.93106746673584 - ], - [ - "▁Tages", - -11.93106746673584 - ], - [ - "▁shed", - -11.931171417236328 - ], - [ - "▁hart", - -11.931188583374023 - ], - [ - "▁serie", - -11.931200981140137 - ], - [ - "▁documentary", - -11.931208610534668 - ], - [ - "gebaut", - -11.931220054626465 - ], - [ - "▁Hause", - -11.931272506713867 - ], - [ - "share", - -11.931303977966309 - ], - [ - "▁inflation", - -11.93138599395752 - ], - [ - "▁gall", - -11.931504249572754 - ], - [ - "▁adjacent", - -11.931673049926758 - ], - [ - "jer", - -11.93173885345459 - ], - [ - "▁Universal", - -11.931946754455566 - ], - [ - "▁disabilities", - -11.931984901428223 - ], - [ - "▁proposition", - -11.93204116821289 - ], - [ - "Work", - -11.932293891906738 - ], - [ - "▁closure", - -11.932306289672852 - ], - [ - "▁separated", - -11.932496070861816 - ], - [ - "▁soda", - -11.932549476623535 - ], - [ - "▁elite", - -11.93263053894043 - ], - [ - "appro", - -11.93265438079834 - ], - [ - "▁acute", - -11.93266487121582 - ], - [ - "utton", - -11.932938575744629 - ], - [ - "▁facă", - -11.933053016662598 - ], - [ - "▁collector", - -11.933121681213379 - ], - [ - "▁unlock", - -11.933249473571777 - ], - [ - "▁Alpha", - -11.933267593383789 - ], - [ - "▁Used", - -11.933267593383789 - ], - [ - "▁applicants", - -11.933302879333496 - ], - [ - "▁înseamn", - -11.933387756347656 - ], - [ - "▁inclu", - -11.933414459228516 - ], - [ - "▁disclosure", - -11.933544158935547 - ], - [ - "▁Fahr", - -11.933995246887207 - ], - [ - "AST", - -11.934061050415039 - ], - [ - "▁vivre", - -11.934069633483887 - ], - [ - "»,", - -11.934167861938477 - ], - [ - "laud", - -11.93430233001709 - ], - [ - "▁soir", - -11.934365272521973 - ], - [ - "▁barrier", - -11.934405326843262 - ], - [ - "înd", - -11.934470176696777 - ], - [ - "▁ambition", - -11.93451976776123 - ], - [ - "asta", - -11.934550285339355 - ], - [ - "occupied", - -11.934747695922852 - ], - [ - "▁Gau", - -11.934774398803711 - ], - [ - "four", - -11.93481159210205 - ], - [ - "▁nap", - -11.934887886047363 - ], - [ - "iez", - -11.934922218322754 - ], - [ - "endra", - -11.935242652893066 - ], - [ - "gaben", - -11.935464859008789 - ], - [ - "▁Carol", - -11.935481071472168 - ], - [ - "▁Switzerland", - -11.935575485229492 - ], - [ - "▁Bond", - -11.935617446899414 - ], - [ - "▁crossing", - -11.935630798339844 - ], - [ - "▁Palace", - -11.9359769821167 - ], - [ - "NG", - -11.935986518859863 - ], - [ - "▁Budget", - -11.93622875213623 - ], - [ - "▁lid", - -11.936372756958008 - ], - [ - "bab", - -11.936393737792969 - ], - [ - "▁polish", - -11.936416625976562 - ], - [ - "▁herbs", - -11.93673038482666 - ], - [ - "▁dear", - -11.936747550964355 - ], - [ - "▁devrai", - -11.936846733093262 - ], - [ - "walk", - -11.936864852905273 - ], - [ - "▁humanity", - -11.936897277832031 - ], - [ - "▁tires", - -11.936978340148926 - ], - [ - "égal", - -11.936994552612305 - ], - [ - "▁bow", - -11.937032699584961 - ], - [ - "▁debris", - -11.937201499938965 - ], - [ - "▁keywords", - -11.937273025512695 - ], - [ - "irk", - -11.937345504760742 - ], - [ - "▁suspend", - -11.937360763549805 - ], - [ - "▁pourra", - -11.93738079071045 - ], - [ - "migran", - -11.937454223632812 - ], - [ - "thereby", - -11.937570571899414 - ], - [ - "▁Harris", - -11.937943458557129 - ], - [ - "ateurs", - -11.937956809997559 - ], - [ - "▁fal", - -11.938271522521973 - ], - [ - "alleged", - -11.938355445861816 - ], - [ - "noch", - -11.938494682312012 - ], - [ - "▁observation", - -11.938506126403809 - ], - [ - "▁București", - -11.93855094909668 - ], - [ - "▁SQL", - -11.938624382019043 - ], - [ - "▁Phase", - -11.938760757446289 - ], - [ - "▁adventures", - -11.93881607055664 - ], - [ - "▁Kol", - -11.938885688781738 - ], - [ - "▁professionnel", - -11.938916206359863 - ], - [ - "crit", - -11.939026832580566 - ], - [ - "LR", - -11.939313888549805 - ], - [ - "▁preview", - -11.939464569091797 - ], - [ - "▁highlighted", - -11.939942359924316 - ], - [ - "▁Stud", - -11.939949035644531 - ], - [ - "▁labour", - -11.939956665039062 - ], - [ - "MV", - -11.9399995803833 - ], - [ - "click", - -11.940049171447754 - ], - [ - "approche", - -11.94016170501709 - ], - [ - "tian", - -11.940183639526367 - ], - [ - "cité", - -11.940192222595215 - ], - [ - "▁Rain", - -11.94028377532959 - ], - [ - "typ", - -11.94032096862793 - ], - [ - "Usually", - -11.940435409545898 - ], - [ - "▁outlet", - -11.940513610839844 - ], - [ - "logging", - -11.940814018249512 - ], - [ - "▁Temperatur", - -11.940906524658203 - ], - [ - "▁Scottish", - -11.94090747833252 - ], - [ - "iga", - -11.940942764282227 - ], - [ - "▁glory", - -11.941086769104004 - ], - [ - "▁Rom", - -11.941242218017578 - ], - [ - "zeug", - -11.941337585449219 - ], - [ - "establishing", - -11.941339492797852 - ], - [ - "▁imaging", - -11.941926002502441 - ], - [ - "▁Beauty", - -11.942015647888184 - ], - [ - "igan", - -11.942042350769043 - ], - [ - "après", - -11.94224739074707 - ], - [ - "Adresse", - -11.942267417907715 - ], - [ - "cliff", - -11.942349433898926 - ], - [ - "▁unnecessary", - -11.943267822265625 - ], - [ - "▁slim", - -11.943324089050293 - ], - [ - "dir", - -11.943490982055664 - ], - [ - "▁leisure", - -11.943660736083984 - ], - [ - "▁principale", - -11.94368839263916 - ], - [ - "▁Viele", - -11.943770408630371 - ], - [ - "▁2007.", - -11.943802833557129 - ], - [ - "Hopefully", - -11.943829536437988 - ], - [ - "cola", - -11.943851470947266 - ], - [ - "▁Planet", - -11.943927764892578 - ], - [ - "▁orientation", - -11.943933486938477 - ], - [ - "▁angry", - -11.94419002532959 - ], - [ - "MIT", - -11.944234848022461 - ], - [ - "▁Kenya", - -11.944265365600586 - ], - [ - "▁bless", - -11.94435977935791 - ], - [ - "▁Fill", - -11.944524765014648 - ], - [ - "▁compar", - -11.944664001464844 - ], - [ - "▁curtain", - -11.94473934173584 - ], - [ - "ţei", - -11.944754600524902 - ], - [ - "▁Az", - -11.94482421875 - ], - [ - "▁Rang", - -11.944908142089844 - ], - [ - "▁dominant", - -11.944974899291992 - ], - [ - "race", - -11.944985389709473 - ], - [ - "▁Target", - -11.944987297058105 - ], - [ - "▁manually", - -11.944987297058105 - ], - [ - "objet", - -11.945024490356445 - ], - [ - "thrown", - -11.945131301879883 - ], - [ - "NF", - -11.945149421691895 - ], - [ - "durant", - -11.945185661315918 - ], - [ - "rect", - -11.945302963256836 - ], - [ - "▁Größe", - -11.945320129394531 - ], - [ - "VM", - -11.9453763961792 - ], - [ - "▁aprilie", - -11.945476531982422 - ], - [ - "▁Welche", - -11.945639610290527 - ], - [ - "▁verde", - -11.946157455444336 - ], - [ - "▁Portugal", - -11.946266174316406 - ], - [ - "▁algorithm", - -11.94627571105957 - ], - [ - "ăț", - -11.946328163146973 - ], - [ - "▁Grey", - -11.946371078491211 - ], - [ - "▁cleaned", - -11.94644832611084 - ], - [ - "▁modes", - -11.946463584899902 - ], - [ - "▁relaxation", - -11.946599006652832 - ], - [ - "mbr", - -11.946786880493164 - ], - [ - "étique", - -11.946821212768555 - ], - [ - "Her", - -11.946904182434082 - ], - [ - "▁beta", - -11.946952819824219 - ], - [ - "▁nobody", - -11.94699764251709 - ], - [ - "▁aplic", - -11.947060585021973 - ], - [ - "present", - -11.947080612182617 - ], - [ - "emis", - -11.947197914123535 - ], - [ - "éléments", - -11.947257995605469 - ], - [ - "▁lately", - -11.947303771972656 - ], - [ - "fab", - -11.94732666015625 - ], - [ - "▁aluminiu", - -11.947373390197754 - ], - [ - "▁vest", - -11.947524070739746 - ], - [ - "▁statue", - -11.947558403015137 - ], - [ - "▁publice", - -11.947586059570312 - ], - [ - "▁merchandise", - -11.9476900100708 - ], - [ - "▁relat", - -11.947810173034668 - ], - [ - "git", - -11.94796371459961 - ], - [ - "▁interne", - -11.948281288146973 - ], - [ - "▁Tokyo", - -11.948325157165527 - ], - [ - "chal", - -11.948348045349121 - ], - [ - "contacted", - -11.948430061340332 - ], - [ - "▁tras", - -11.948455810546875 - ], - [ - "▁Clinic", - -11.948626518249512 - ], - [ - "▁unbe", - -11.948633193969727 - ], - [ - "▁dumneavoastra", - -11.948798179626465 - ], - [ - "float", - -11.949078559875488 - ], - [ - "isson", - -11.94909381866455 - ], - [ - "▁vessel", - -11.949126243591309 - ], - [ - "attempting", - -11.949161529541016 - ], - [ - "▁doute", - -11.94918441772461 - ], - [ - "▁Leadership", - -11.949322700500488 - ], - [ - "▁sustain", - -11.94947338104248 - ], - [ - "▁textile", - -11.949666023254395 - ], - [ - "auer", - -11.949702262878418 - ], - [ - "▁90%", - -11.949899673461914 - ], - [ - "garten", - -11.949911117553711 - ], - [ - "▁adauga", - -11.949991226196289 - ], - [ - "▁Kil", - -11.950061798095703 - ], - [ - "▁troops", - -11.950420379638672 - ], - [ - "▁pale", - -11.950568199157715 - ], - [ - "host", - -11.950743675231934 - ], - [ - "▁cry", - -11.950757026672363 - ], - [ - "▁Alb", - -11.950793266296387 - ], - [ - "▁Brad", - -11.95089340209961 - ], - [ - "▁bicycle", - -11.951054573059082 - ], - [ - "▁24/7", - -11.951217651367188 - ], - [ - "▁с", - -11.951228141784668 - ], - [ - "▁stimul", - -11.951401710510254 - ], - [ - "gler", - -11.951445579528809 - ], - [ - "▁notwendig", - -11.951496124267578 - ], - [ - "▁cousin", - -11.95158863067627 - ], - [ - "cheie", - -11.951600074768066 - ], - [ - "hay", - -11.951751708984375 - ], - [ - "▁rezolv", - -11.952134132385254 - ], - [ - "▁THIS", - -11.952143669128418 - ], - [ - "ordre", - -11.952157974243164 - ], - [ - "iști", - -11.952173233032227 - ], - [ - "▁conclude", - -11.952310562133789 - ], - [ - "▁Lage", - -11.952327728271484 - ], - [ - "▁Entertainment", - -11.952454566955566 - ], - [ - "▁valued", - -11.952478408813477 - ], - [ - "ktion", - -11.95253849029541 - ], - [ - "▁priorities", - -11.95268440246582 - ], - [ - "▁1986", - -11.952770233154297 - ], - [ - "▁fatal", - -11.952934265136719 - ], - [ - "▁accurately", - -11.952988624572754 - ], - [ - "▁1987", - -11.953022956848145 - ], - [ - "▁folk", - -11.953073501586914 - ], - [ - "7)", - -11.953163146972656 - ], - [ - "führer", - -11.95360279083252 - ], - [ - "▁knot", - -11.953612327575684 - ], - [ - "haltung", - -11.953720092773438 - ], - [ - "▁Charlie", - -11.953733444213867 - ], - [ - "âge", - -11.95376205444336 - ], - [ - "▁threshold", - -11.954041481018066 - ], - [ - "▁assault", - -11.954130172729492 - ], - [ - "▁meist", - -11.954141616821289 - ], - [ - "bine", - -11.954155921936035 - ], - [ - "surprisingly", - -11.954171180725098 - ], - [ - "▁Protect", - -11.954180717468262 - ], - [ - "▁Hack", - -11.954258918762207 - ], - [ - "▁Quant", - -11.954537391662598 - ], - [ - "▁Cet", - -11.954782485961914 - ], - [ - "▁convinced", - -11.95481014251709 - ], - [ - "▁muncă", - -11.955033302307129 - ], - [ - "dging", - -11.955066680908203 - ], - [ - "▁Millionen", - -11.955129623413086 - ], - [ - "zahlung", - -11.955148696899414 - ], - [ - "▁anticipated", - -11.955192565917969 - ], - [ - "▁brass", - -11.9552001953125 - ], - [ - "KO", - -11.955244064331055 - ], - [ - "▁culori", - -11.955286979675293 - ], - [ - "▁Aero", - -11.955326080322266 - ], - [ - "▁intermediu", - -11.955373764038086 - ], - [ - "▁Philippines", - -11.955381393432617 - ], - [ - "▁jury", - -11.955387115478516 - ], - [ - "▁Funktion", - -11.95569896697998 - ], - [ - "▁probe", - -11.955704689025879 - ], - [ - "TL", - -11.955748558044434 - ], - [ - "1.0", - -11.955804824829102 - ], - [ - "ELL", - -11.95581340789795 - ], - [ - "She", - -11.956001281738281 - ], - [ - "▁Blood", - -11.956073760986328 - ], - [ - "▁Dean", - -11.956111907958984 - ], - [ - "▁scène", - -11.9561185836792 - ], - [ - "volu", - -11.95621395111084 - ], - [ - "▁Epi", - -11.95621395111084 - ], - [ - "▁séjour", - -11.95627498626709 - ], - [ - "▁Smartphone", - -11.956306457519531 - ], - [ - "▁fired", - -11.956357955932617 - ], - [ - "beat", - -11.95650577545166 - ], - [ - "▁pockets", - -11.956506729125977 - ], - [ - "▁serviciu", - -11.956624031066895 - ], - [ - "▁affairs", - -11.95678424835205 - ], - [ - "▁Ry", - -11.956842422485352 - ], - [ - "▁Stadium", - -11.956954956054688 - ], - [ - "▁snacks", - -11.957182884216309 - ], - [ - "▁efectu", - -11.957221031188965 - ], - [ - "▁Richtung", - -11.957273483276367 - ], - [ - "▁dresses", - -11.957352638244629 - ], - [ - "▁Medien", - -11.95744800567627 - ], - [ - "writer", - -11.95759105682373 - ], - [ - "changing", - -11.957655906677246 - ], - [ - "▁supportive", - -11.957849502563477 - ], - [ - "▁beneath", - -11.957873344421387 - ], - [ - "paid", - -11.958078384399414 - ], - [ - "▁customize", - -11.958155632019043 - ], - [ - "▁Ferr", - -11.958187103271484 - ], - [ - "reaches", - -11.958338737487793 - ], - [ - "arma", - -11.958401679992676 - ], - [ - "ción", - -11.958598136901855 - ], - [ - "▁elderly", - -11.959243774414062 - ], - [ - "▁modification", - -11.95934009552002 - ], - [ - "▁perfection", - -11.959381103515625 - ], - [ - "▁Allow", - -11.959492683410645 - ], - [ - "▁belonging", - -11.959542274475098 - ], - [ - "▁compound", - -11.959589004516602 - ], - [ - "▁Results", - -11.959681510925293 - ], - [ - "▁astăzi", - -11.959793090820312 - ], - [ - "▁Liber", - -11.959818840026855 - ], - [ - "jor", - -11.959850311279297 - ], - [ - "▁Nin", - -11.959980964660645 - ], - [ - "▁lumina", - -11.959992408752441 - ], - [ - "▁130", - -11.960073471069336 - ], - [ - "▁Platform", - -11.960121154785156 - ], - [ - "▁SMS", - -11.960221290588379 - ], - [ - "▁medic", - -11.96024227142334 - ], - [ - "hör", - -11.960315704345703 - ], - [ - "▁Kas", - -11.96038818359375 - ], - [ - "▁tomato", - -11.960403442382812 - ], - [ - "▁logiciel", - -11.960505485534668 - ], - [ - "php", - -11.960654258728027 - ], - [ - "▁premises", - -11.96071720123291 - ], - [ - "▁Communication", - -11.96072769165039 - ], - [ - "▁reprezintă", - -11.960762023925781 - ], - [ - "▁Partners", - -11.960866928100586 - ], - [ - "▁RV", - -11.961090087890625 - ], - [ - "▁pants", - -11.961197853088379 - ], - [ - "▁envie", - -11.961256980895996 - ], - [ - "▁commerce", - -11.961263656616211 - ], - [ - "▁tears", - -11.961298942565918 - ], - [ - "▁cooler", - -11.961494445800781 - ], - [ - "strand", - -11.961556434631348 - ], - [ - "▁Gil", - -11.961588859558105 - ], - [ - "▁référence", - -11.961641311645508 - ], - [ - "▁electronics", - -11.961681365966797 - ], - [ - "exposition", - -11.961700439453125 - ], - [ - "▁Caribbean", - -11.96171760559082 - ], - [ - "▁compelling", - -11.96171760559082 - ], - [ - "luci", - -11.961723327636719 - ], - [ - "▁Brooklyn", - -11.961892127990723 - ], - [ - "▁Thai", - -11.961950302124023 - ], - [ - "dler", - -11.96198844909668 - ], - [ - "▁supra", - -11.962016105651855 - ], - [ - "centered", - -11.962026596069336 - ], - [ - "▁metro", - -11.962081909179688 - ], - [ - "▁03", - -11.962299346923828 - ], - [ - "▁enrich", - -11.962437629699707 - ], - [ - "▁adevarat", - -11.962594985961914 - ], - [ - "5000", - -11.962961196899414 - ], - [ - "▁bell", - -11.96297550201416 - ], - [ - "▁sine", - -11.962996482849121 - ], - [ - "▁appealing", - -11.963088989257812 - ], - [ - "clam", - -11.963116645812988 - ], - [ - "▁vorhanden", - -11.963165283203125 - ], - [ - "▁pickup", - -11.963268280029297 - ], - [ - "▁Alaska", - -11.963269233703613 - ], - [ - "▁Nacht", - -11.963300704956055 - ], - [ - "borough", - -11.9633207321167 - ], - [ - "▁Blanc", - -11.96340274810791 - ], - [ - "▁apare", - -11.963616371154785 - ], - [ - "▁Works", - -11.963798522949219 - ], - [ - "mettent", - -11.963801383972168 - ], - [ - "atter", - -11.96389389038086 - ], - [ - "terra", - -11.963946342468262 - ], - [ - "▁Bit", - -11.964105606079102 - ], - [ - "RL", - -11.964131355285645 - ], - [ - "▁Wander", - -11.964262962341309 - ], - [ - "▁Hawk", - -11.964595794677734 - ], - [ - "▁Probleme", - -11.964665412902832 - ], - [ - "regel", - -11.964729309082031 - ], - [ - "hne", - -11.964739799499512 - ], - [ - "fass", - -11.96486759185791 - ], - [ - "▁Andy", - -11.965014457702637 - ], - [ - "▁befinde", - -11.965179443359375 - ], - [ - "boo", - -11.965265274047852 - ], - [ - "▁connectivity", - -11.965304374694824 - ], - [ - "▁spielt", - -11.965418815612793 - ], - [ - "zweiten", - -11.96547794342041 - ], - [ - "ţilor", - -11.965526580810547 - ], - [ - "▁confi", - -11.96561336517334 - ], - [ - "▁schlecht", - -11.965773582458496 - ], - [ - "▁Beginn", - -11.96581745147705 - ], - [ - "▁floating", - -11.965903282165527 - ], - [ - "nimmt", - -11.966071128845215 - ], - [ - "▁arbeiten", - -11.96611213684082 - ], - [ - "pillar", - -11.966131210327148 - ], - [ - "sterreich", - -11.966347694396973 - ], - [ - "▁Schule", - -11.966446876525879 - ], - [ - "▁durée", - -11.966521263122559 - ], - [ - "▁honestly", - -11.96653938293457 - ], - [ - "▁acel", - -11.9666166305542 - ], - [ - "▁Prozess", - -11.96662425994873 - ], - [ - "Min", - -11.966629028320312 - ], - [ - "enii", - -11.966632843017578 - ], - [ - "DAY", - -11.966758728027344 - ], - [ - "▁Blo", - -11.966806411743164 - ], - [ - "▁bolt", - -11.966946601867676 - ], - [ - "sicher", - -11.967070579528809 - ], - [ - "▁17,", - -11.967122077941895 - ], - [ - "▁anchor", - -11.967215538024902 - ], - [ - "▁consistency", - -11.967241287231445 - ], - [ - "▁relatives", - -11.967263221740723 - ], - [ - "▁lac", - -11.967385292053223 - ], - [ - "105", - -11.967432975769043 - ], - [ - "▁Craig", - -11.967534065246582 - ], - [ - "▁mandate", - -11.967598915100098 - ], - [ - "▁bedeutet", - -11.967674255371094 - ], - [ - "▁Soviet", - -11.967680931091309 - ], - [ - "▁arguments", - -11.967938423156738 - ], - [ - "▁Gebäude", - -11.967997550964355 - ], - [ - "▁Parliament", - -11.968005180358887 - ], - [ - "▁Kha", - -11.968087196350098 - ], - [ - "nica", - -11.968130111694336 - ], - [ - "▁Amazing", - -11.968162536621094 - ], - [ - "gründe", - -11.968179702758789 - ], - [ - "▁Ott", - -11.968269348144531 - ], - [ - "Exp", - -11.968314170837402 - ], - [ - "▁ianuarie", - -11.96848201751709 - ], - [ - "riot", - -11.968571662902832 - ], - [ - "▁futur", - -11.968626976013184 - ], - [ - "▁Honda", - -11.968647956848145 - ], - [ - "!!!!", - -11.96865177154541 - ], - [ - "▁citit", - -11.968689918518066 - ], - [ - "▁22,", - -11.968708992004395 - ], - [ - "țional", - -11.968711853027344 - ], - [ - "▁lovers", - -11.968732833862305 - ], - [ - "▁Current", - -11.968835830688477 - ], - [ - "▁drone", - -11.96927261352539 - ], - [ - "▁promising", - -11.969335556030273 - ], - [ - "devoted", - -11.969443321228027 - ], - [ - "▁Born", - -11.969520568847656 - ], - [ - "▁viitor", - -11.969589233398438 - ], - [ - "▁ritual", - -11.969614028930664 - ], - [ - "▁Guard", - -11.969681739807129 - ], - [ - "09.", - -11.969828605651855 - ], - [ - "▁Py", - -11.970260620117188 - ], - [ - "▁finds", - -11.970380783081055 - ], - [ - "▁boli", - -11.970394134521484 - ], - [ - "▁Mitglieder", - -11.970697402954102 - ], - [ - "ogni", - -11.97107982635498 - ], - [ - "▁stones", - -11.97118854522705 - ], - [ - "rox", - -11.971210479736328 - ], - [ - "▁dock", - -11.971390724182129 - ], - [ - "▁onion", - -11.97144889831543 - ], - [ - "▁classified", - -11.971538543701172 - ], - [ - "big", - -11.971833229064941 - ], - [ - "RG", - -11.971857070922852 - ], - [ - "influenced", - -11.971955299377441 - ], - [ - "▁sudden", - -11.971988677978516 - ], - [ - "▁ample", - -11.97204303741455 - ], - [ - "án", - -11.972095489501953 - ], - [ - "▁ornament", - -11.972122192382812 - ], - [ - "datele", - -11.972227096557617 - ], - [ - "▁Dad", - -11.97225284576416 - ], - [ - "BER", - -11.972278594970703 - ], - [ - "gerecht", - -11.972380638122559 - ], - [ - "kett", - -11.972536087036133 - ], - [ - "▁Antonio", - -11.972572326660156 - ], - [ - "Nu", - -11.972834587097168 - ], - [ - "dium", - -11.97284984588623 - ], - [ - "CAD", - -11.972850799560547 - ], - [ - "▁bundle", - -11.972916603088379 - ], - [ - "▁Vari", - -11.97301197052002 - ], - [ - "▁thrive", - -11.973020553588867 - ], - [ - "▁Seminar", - -11.973071098327637 - ], - [ - "wire", - -11.973084449768066 - ], - [ - "▁contributing", - -11.973114967346191 - ], - [ - "▁Bour", - -11.97320556640625 - ], - [ - "▁dori", - -11.973206520080566 - ], - [ - "▁packing", - -11.97343921661377 - ], - [ - "▁colleges", - -11.973459243774414 - ], - [ - "▁garbage", - -11.97366714477539 - ], - [ - "▁vector", - -11.973837852478027 - ], - [ - "▁suggestion", - -11.973897933959961 - ], - [ - "borne", - -11.973904609680176 - ], - [ - "▁Listen", - -11.973938941955566 - ], - [ - "▁Prix", - -11.973957061767578 - ], - [ - "viennent", - -11.974162101745605 - ], - [ - "insbesondere", - -11.97426700592041 - ], - [ - "▁fonctionne", - -11.974435806274414 - ], - [ - "▁mainstream", - -11.974485397338867 - ], - [ - "▁merci", - -11.974574089050293 - ], - [ - "oko", - -11.97460651397705 - ], - [ - "▁Commerce", - -11.97493839263916 - ], - [ - "▁droits", - -11.975115776062012 - ], - [ - "▁muzica", - -11.975141525268555 - ], - [ - "▁profesor", - -11.9751558303833 - ], - [ - "▁epic", - -11.97518253326416 - ], - [ - "▁intuitive", - -11.975186347961426 - ], - [ - "▁aggregate", - -11.975223541259766 - ], - [ - "▁vaccine", - -11.97529411315918 - ], - [ - "▁dank", - -11.975459098815918 - ], - [ - "▁situ", - -11.975578308105469 - ], - [ - "▁Cand", - -11.975593566894531 - ], - [ - "▁Ganz", - -11.97562313079834 - ], - [ - "▁Crystal", - -11.97578239440918 - ], - [ - "▁discretion", - -11.975825309753418 - ], - [ - "mug", - -11.975997924804688 - ], - [ - "▁anzu", - -11.976144790649414 - ], - [ - "▁cement", - -11.97616958618164 - ], - [ - "▁priest", - -11.97625732421875 - ], - [ - "▁rejected", - -11.976298332214355 - ], - [ - "▁Summit", - -11.976325988769531 - ], - [ - "▁Sara", - -11.976424217224121 - ], - [ - "▁palette", - -11.976527214050293 - ], - [ - "▁continuare", - -11.976569175720215 - ], - [ - "uge", - -11.976676940917969 - ], - [ - "ryl", - -11.976844787597656 - ], - [ - "▁Solid", - -11.977142333984375 - ], - [ - "▁meilleure", - -11.977177619934082 - ], - [ - "▁Tennessee", - -11.977248191833496 - ], - [ - "rail", - -11.977326393127441 - ], - [ - "▁attributes", - -11.9773530960083 - ], - [ - "▁vessels", - -11.977840423583984 - ], - [ - "cylinder", - -11.977900505065918 - ], - [ - "▁parfait", - -11.977916717529297 - ], - [ - "abb", - -11.97801399230957 - ], - [ - "▁Julie", - -11.97806167602539 - ], - [ - "▁pièces", - -11.978120803833008 - ], - [ - "▁proiecte", - -11.978142738342285 - ], - [ - "médi", - -11.978273391723633 - ], - [ - "▁décembre", - -11.9783935546875 - ], - [ - "Per", - -11.97841739654541 - ], - [ - "1/", - -11.978520393371582 - ], - [ - "regulated", - -11.978601455688477 - ], - [ - "▁Dy", - -11.978633880615234 - ], - [ - "▁23,", - -11.978694915771484 - ], - [ - "beck", - -11.978763580322266 - ], - [ - "tură", - -11.97885513305664 - ], - [ - "▁Chiar", - -11.978931427001953 - ], - [ - "▁isolated", - -11.979012489318848 - ], - [ - "▁kennen", - -11.979259490966797 - ], - [ - "Du", - -11.979260444641113 - ], - [ - "reflected", - -11.979482650756836 - ], - [ - "▁belong", - -11.979571342468262 - ], - [ - "▁welcomed", - -11.97969913482666 - ], - [ - "▁Rate", - -11.979776382446289 - ], - [ - "prestigious", - -11.979859352111816 - ], - [ - "▁1/4", - -11.979930877685547 - ], - [ - "▁distinction", - -11.979966163635254 - ], - [ - "▁boring", - -11.980001449584961 - ], - [ - "▁booked", - -11.980369567871094 - ], - [ - "▁citizen", - -11.980441093444824 - ], - [ - "▁comprises", - -11.980498313903809 - ], - [ - "▁aufge", - -11.98051929473877 - ], - [ - "GL", - -11.980566024780273 - ], - [ - "▁nearest", - -11.980616569519043 - ], - [ - "▁printr", - -11.980692863464355 - ], - [ - "▁département", - -11.981318473815918 - ], - [ - "▁planner", - -11.981510162353516 - ], - [ - "▁Rai", - -11.981817245483398 - ], - [ - "▁Broad", - -11.981934547424316 - ], - [ - "▁pastor", - -11.981947898864746 - ], - [ - "▁reservation", - -11.982243537902832 - ], - [ - "▁decembrie", - -11.982315063476562 - ], - [ - "▁suficient", - -11.982501983642578 - ], - [ - "geld", - -11.982560157775879 - ], - [ - "training", - -11.982620239257812 - ], - [ - "deshalb", - -11.982634544372559 - ], - [ - "▁chaud", - -11.982651710510254 - ], - [ - "Cor", - -11.982662200927734 - ], - [ - "▁Grade", - -11.982769966125488 - ], - [ - "▁faţă", - -11.982809066772461 - ], - [ - "story", - -11.982839584350586 - ], - [ - "gericht", - -11.98286247253418 - ], - [ - "▁Got", - -11.982954025268555 - ], - [ - "particulièrement", - -11.982976913452148 - ], - [ - "▁bump", - -11.983051300048828 - ], - [ - "▁fatigue", - -11.983160018920898 - ], - [ - "Activ", - -11.983250617980957 - ], - [ - "▁numéro", - -11.983302116394043 - ], - [ - "▁stranger", - -11.983312606811523 - ], - [ - "▁Skin", - -11.983327865600586 - ], - [ - "add", - -11.98344898223877 - ], - [ - "Ainsi", - -11.98357105255127 - ], - [ - "▁assists", - -11.983684539794922 - ], - [ - "▁zusätzlich", - -11.983943939208984 - ], - [ - "▁vede", - -11.983979225158691 - ], - [ - "RON", - -11.984108924865723 - ], - [ - "▁seemingly", - -11.984126091003418 - ], - [ - "▁NU", - -11.98417854309082 - ], - [ - "geb", - -11.984273910522461 - ], - [ - "▁Release", - -11.984353065490723 - ], - [ - "▁throwing", - -11.984427452087402 - ], - [ - "▁Alabama", - -11.984447479248047 - ], - [ - "▁Something", - -11.984590530395508 - ], - [ - "▁Cuba", - -11.98464584350586 - ], - [ - "▁Verbindung", - -11.984649658203125 - ], - [ - "▁Cir", - -11.984654426574707 - ], - [ - "your", - -11.984713554382324 - ], - [ - "-13", - -11.984748840332031 - ], - [ - "▁Delta", - -11.984801292419434 - ], - [ - "▁Twin", - -11.98504638671875 - ], - [ - "▁governance", - -11.985156059265137 - ], - [ - "▁groom", - -11.985310554504395 - ], - [ - "▁conception", - -11.98533821105957 - ], - [ - "▁governor", - -11.985383033752441 - ], - [ - "▁Spar", - -11.985416412353516 - ], - [ - "▁coastal", - -11.985652923583984 - ], - [ - "▁Seven", - -11.985856056213379 - ], - [ - "▁inclusive", - -11.986002922058105 - ], - [ - "cili", - -11.986035346984863 - ], - [ - "▁Ridge", - -11.986100196838379 - ], - [ - "teller", - -11.986224174499512 - ], - [ - "▁Kin", - -11.986247062683105 - ], - [ - "leiter", - -11.986279487609863 - ], - [ - "stern", - -11.986364364624023 - ], - [ - "change", - -11.986404418945312 - ], - [ - "▁presidential", - -11.986433982849121 - ], - [ - "▁composer", - -11.986544609069824 - ], - [ - "Stu", - -11.986560821533203 - ], - [ - "▁Frankfurt", - -11.986584663391113 - ], - [ - "prä", - -11.986639976501465 - ], - [ - "▁Ideal", - -11.986644744873047 - ], - [ - "▁linear", - -11.986857414245605 - ], - [ - "▁bloom", - -11.986879348754883 - ], - [ - "▁grades", - -11.986881256103516 - ], - [ - "mettant", - -11.98692512512207 - ], - [ - "▁finishes", - -11.986952781677246 - ], - [ - "holz", - -11.987086296081543 - ], - [ - "▁dirty", - -11.987317085266113 - ], - [ - "▁Roh", - -11.987386703491211 - ], - [ - "▁Praxis", - -11.987408638000488 - ], - [ - "tempo", - -11.987433433532715 - ], - [ - "▁attempted", - -11.987433433532715 - ], - [ - "▁primar", - -11.987434387207031 - ], - [ - "▁pomp", - -11.987528800964355 - ], - [ - "▁tolle", - -11.987614631652832 - ], - [ - "▁adres", - -11.988011360168457 - ], - [ - "▁Between", - -11.988066673278809 - ], - [ - "▁ruin", - -11.988432884216309 - ], - [ - "▁matériel", - -11.988561630249023 - ], - [ - "MER", - -11.988913536071777 - ], - [ - "Nevertheless", - -11.989055633544922 - ], - [ - "▁corruption", - -11.989119529724121 - ], - [ - "spire", - -11.989180564880371 - ], - [ - "▁mou", - -11.989208221435547 - ], - [ - "ROM", - -11.989278793334961 - ], - [ - "▁underground", - -11.98935604095459 - ], - [ - "▁relativ", - -11.989389419555664 - ], - [ - "waited", - -11.989462852478027 - ], - [ - "▁speeds", - -11.989468574523926 - ], - [ - "▁adjusted", - -11.989486694335938 - ], - [ - "▁Flat", - -11.989514350891113 - ], - [ - "UND", - -11.98965835571289 - ], - [ - "▁individuelle", - -11.989744186401367 - ], - [ - "▁anybody", - -11.98978042602539 - ], - [ - "EO", - -11.989790916442871 - ], - [ - "->", - -11.989791870117188 - ], - [ - "▁Spend", - -11.989876747131348 - ], - [ - "aktion", - -11.990011215209961 - ], - [ - "édit", - -11.99006462097168 - ], - [ - "▁quest", - -11.990078926086426 - ], - [ - "rind", - -11.990541458129883 - ], - [ - "▁mediu", - -11.99057388305664 - ], - [ - "▁barriers", - -11.99062442779541 - ], - [ - "▁répondre", - -11.990633010864258 - ], - [ - "▁novembre", - -11.990708351135254 - ], - [ - "▁champ", - -11.990736961364746 - ], - [ - "saw", - -11.990757942199707 - ], - [ - "▁fed", - -11.990804672241211 - ], - [ - "▁favorites", - -11.990939140319824 - ], - [ - "▁shield", - -11.991055488586426 - ], - [ - "▁Wide", - -11.991146087646484 - ], - [ - "▁problema", - -11.991445541381836 - ], - [ - "▁Asta", - -11.991525650024414 - ], - [ - "▁refreshing", - -11.99168872833252 - ], - [ - "hey", - -11.991692543029785 - ], - [ - "obtaining", - -11.991788864135742 - ], - [ - "▁parler", - -11.992072105407715 - ], - [ - "▁Cele", - -11.992134094238281 - ], - [ - "frage", - -11.992136001586914 - ], - [ - "écran", - -11.992324829101562 - ], - [ - "▁cleared", - -11.992448806762695 - ], - [ - "zehn", - -11.992594718933105 - ], - [ - "parmi", - -11.992647171020508 - ], - [ - "änder", - -11.992691993713379 - ], - [ - "▁Defense", - -11.992693901062012 - ], - [ - "tatea", - -11.992696762084961 - ], - [ - "▁reasonably", - -11.992939949035645 - ], - [ - "▁Idee", - -11.992985725402832 - ], - [ - "nehm", - -11.993000030517578 - ], - [ - "technologie", - -11.993020057678223 - ], - [ - "atura", - -11.993048667907715 - ], - [ - "▁slope", - -11.993332862854004 - ], - [ - "Hence", - -11.993351936340332 - ], - [ - "▁40%", - -11.993391990661621 - ], - [ - "▁jewe", - -11.993448257446289 - ], - [ - "▁queries", - -11.993470191955566 - ], - [ - "▁$8", - -11.994096755981445 - ], - [ - "▁Parker", - -11.994107246398926 - ], - [ - "▁publique", - -11.994488716125488 - ], - [ - "quant", - -11.994529724121094 - ], - [ - "issue", - -11.994690895080566 - ], - [ - "▁Cleveland", - -11.994847297668457 - ], - [ - "4,000", - -11.995071411132812 - ], - [ - "IDE", - -11.995145797729492 - ], - [ - "▁Barbara", - -11.995233535766602 - ], - [ - "udge", - -11.995477676391602 - ], - [ - "corn", - -11.99554443359375 - ], - [ - "veți", - -11.995588302612305 - ], - [ - "▁proteins", - -11.995707511901855 - ], - [ - "▁trăi", - -11.995793342590332 - ], - [ - "▁mijloc", - -11.995842933654785 - ], - [ - "logie", - -11.995884895324707 - ], - [ - "▁Walter", - -11.995884895324707 - ], - [ - "heißt", - -11.99593448638916 - ], - [ - "search", - -11.995946884155273 - ], - [ - "▁hochwertige", - -11.996010780334473 - ], - [ - "▁încerc", - -11.996014595031738 - ], - [ - "▁administrator", - -11.99608039855957 - ], - [ - "tension", - -11.996133804321289 - ], - [ - "▁homemade", - -11.996438026428223 - ], - [ - "▁$20", - -11.99651050567627 - ], - [ - "▁leben", - -11.996662139892578 - ], - [ - "netz", - -11.996665954589844 - ], - [ - "▁intensity", - -11.996882438659668 - ], - [ - "▁clever", - -11.996891975402832 - ], - [ - "▁installer", - -11.996999740600586 - ], - [ - "▁Wand", - -11.997087478637695 - ], - [ - "meister", - -11.997130393981934 - ], - [ - "ziel", - -11.99744701385498 - ], - [ - "▁architect", - -11.99748706817627 - ], - [ - "▁crede", - -11.997512817382812 - ], - [ - "▁Sleep", - -11.997675895690918 - ], - [ - "▁demonstr", - -11.997745513916016 - ], - [ - "cake", - -11.997781753540039 - ], - [ - "▁Cheap", - -11.997783660888672 - ], - [ - "pool", - -11.9979829788208 - ], - [ - "▁gadget", - -11.998004913330078 - ], - [ - "▁Anbieter", - -11.998005867004395 - ], - [ - "▁Jonathan", - -11.998170852661133 - ], - [ - "ül", - -11.998492240905762 - ], - [ - "▁Harvard", - -11.998503684997559 - ], - [ - "▁1985", - -11.998773574829102 - ], - [ - "HP", - -11.998839378356934 - ], - [ - "▁afara", - -11.99893569946289 - ], - [ - "▁halten", - -11.999008178710938 - ], - [ - "▁Technik", - -11.999042510986328 - ], - [ - "▁dressed", - -11.999149322509766 - ], - [ - "weis", - -11.999165534973145 - ], - [ - "▁donated", - -11.9993314743042 - ], - [ - "also", - -11.99938678741455 - ], - [ - "▁EN", - -11.999405860900879 - ], - [ - "▁imprim", - -11.99942398071289 - ], - [ - "▁onions", - -11.999458312988281 - ], - [ - "Par", - -11.99950122833252 - ], - [ - "▁donate", - -11.99958324432373 - ], - [ - "▁mice", - -11.999610900878906 - ], - [ - "referring", - -11.999897956848145 - ], - [ - "▁restored", - -12.00003433227539 - ], - [ - "▁amateur", - -12.0000581741333 - ], - [ - "▁Switch", - -12.000075340270996 - ], - [ - "appel", - -12.00013542175293 - ], - [ - "▁idéal", - -12.0001859664917 - ], - [ - "▁wheat", - -12.000199317932129 - ], - [ - "▁lime", - -12.000240325927734 - ], - [ - "REA", - -12.00027084350586 - ], - [ - "riti", - -12.000357627868652 - ], - [ - "ţiile", - -12.00058364868164 - ], - [ - "▁machinery", - -12.00064754486084 - ], - [ - "UNE", - -12.00089168548584 - ], - [ - "▁Cont", - -12.000971794128418 - ], - [ - "▁attendees", - -12.001014709472656 - ], - [ - "▁aparat", - -12.001080513000488 - ], - [ - "freundlich", - -12.00117301940918 - ], - [ - "▁zilnic", - -12.001175880432129 - ], - [ - "▁spark", - -12.001421928405762 - ], - [ - "▁Gast", - -12.001459121704102 - ], - [ - "▁Issue", - -12.00147533416748 - ], - [ - "▁scam", - -12.001566886901855 - ], - [ - "▁bonds", - -12.001618385314941 - ], - [ - "owner", - -12.001641273498535 - ], - [ - "▁empfehlen", - -12.001673698425293 - ], - [ - "elia", - -12.001749992370605 - ], - [ - "cic", - -12.001757621765137 - ], - [ - "▁honored", - -12.001800537109375 - ], - [ - "▁castle", - -12.001846313476562 - ], - [ - "avand", - -12.002058982849121 - ], - [ - "rough", - -12.002108573913574 - ], - [ - "▁Address", - -12.002116203308105 - ], - [ - "angle", - -12.00217342376709 - ], - [ - "leton", - -12.002259254455566 - ], - [ - "▁locked", - -12.002392768859863 - ], - [ - "▁consolid", - -12.00248908996582 - ], - [ - "▁voucher", - -12.003011703491211 - ], - [ - "ației", - -12.003201484680176 - ], - [ - "wachsen", - -12.003211975097656 - ], - [ - "▁magazines", - -12.003287315368652 - ], - [ - "▁Schools", - -12.003318786621094 - ], - [ - "▁voices", - -12.003362655639648 - ], - [ - "▁Dry", - -12.003479957580566 - ], - [ - "▁tricks", - -12.00349235534668 - ], - [ - "schließlich", - -12.003546714782715 - ], - [ - "▁loyalty", - -12.003687858581543 - ], - [ - "risk", - -12.003764152526855 - ], - [ - "▁Vers", - -12.003786087036133 - ], - [ - "chester", - -12.003802299499512 - ], - [ - "▁decorated", - -12.003830909729004 - ], - [ - "▁copiilor", - -12.003969192504883 - ], - [ - "riz", - -12.003994941711426 - ], - [ - "03.", - -12.004013061523438 - ], - [ - "▁Hur", - -12.004016876220703 - ], - [ - "▁archive", - -12.004021644592285 - ], - [ - "▁Continue", - -12.004042625427246 - ], - [ - "▁Nähe", - -12.004043579101562 - ], - [ - "jit", - -12.004090309143066 - ], - [ - "gekommen", - -12.004301071166992 - ], - [ - "▁conjunction", - -12.004349708557129 - ], - [ - "combining", - -12.004404067993164 - ], - [ - "▁Unterstützung", - -12.004517555236816 - ], - [ - "oza", - -12.004593849182129 - ], - [ - "▁sketch", - -12.004720687866211 - ], - [ - "▁arată", - -12.004731178283691 - ], - [ - "▁Mining", - -12.004765510559082 - ], - [ - "uous", - -12.004791259765625 - ], - [ - "▁devis", - -12.004834175109863 - ], - [ - "Almost", - -12.004862785339355 - ], - [ - "Hu", - -12.005037307739258 - ], - [ - "▁Om", - -12.005366325378418 - ], - [ - "MF", - -12.00544548034668 - ], - [ - "liz", - -12.005451202392578 - ], - [ - "▁fails", - -12.005456924438477 - ], - [ - "▁comparable", - -12.005459785461426 - ], - [ - "▁vein", - -12.005547523498535 - ], - [ - "▁Vis", - -12.00561809539795 - ], - [ - "▁viagra", - -12.005654335021973 - ], - [ - "▁farming", - -12.005678176879883 - ], - [ - "▁Late", - -12.005765914916992 - ], - [ - "geschrieben", - -12.006033897399902 - ], - [ - "hrew", - -12.006103515625 - ], - [ - "▁melt", - -12.006120681762695 - ], - [ - "lager", - -12.006168365478516 - ], - [ - "halte", - -12.006240844726562 - ], - [ - "▁Hotels", - -12.006266593933105 - ], - [ - "▁facebook", - -12.0064058303833 - ], - [ - "▁défi", - -12.006550788879395 - ], - [ - "shore", - -12.006802558898926 - ], - [ - "▁membrane", - -12.006866455078125 - ], - [ - "▁sixth", - -12.006903648376465 - ], - [ - "api", - -12.007003784179688 - ], - [ - "▁Owner", - -12.007222175598145 - ], - [ - "▁(\"", - -12.007234573364258 - ], - [ - "▁$50", - -12.007280349731445 - ], - [ - "▁protective", - -12.007420539855957 - ], - [ - "/2", - -12.007548332214355 - ], - [ - "▁Girls", - -12.007562637329102 - ], - [ - "Gri", - -12.00769329071045 - ], - [ - "▁nouă", - -12.007708549499512 - ], - [ - "▁infections", - -12.007813453674316 - ], - [ - "rân", - -12.007868766784668 - ], - [ - "▁Geb", - -12.0078763961792 - ], - [ - "▁Conseil", - -12.007905006408691 - ], - [ - "▁imagini", - -12.007909774780273 - ], - [ - "▁promotions", - -12.00794792175293 - ], - [ - "▁enforce", - -12.00795841217041 - ], - [ - "▁applicant", - -12.007965087890625 - ], - [ - "▁Apart", - -12.008087158203125 - ], - [ - "▁progression", - -12.008151054382324 - ], - [ - "▁careers", - -12.008511543273926 - ], - [ - "▁litigation", - -12.008533477783203 - ], - [ - "▁Menge", - -12.00866413116455 - ], - [ - "▁Contract", - -12.00871753692627 - ], - [ - "▁Kel", - -12.0087308883667 - ], - [ - "▁réserve", - -12.008769035339355 - ], - [ - "▁Cold", - -12.008870124816895 - ], - [ - "▁larg", - -12.009040832519531 - ], - [ - "▁microwave", - -12.009090423583984 - ], - [ - "▁Whit", - -12.009212493896484 - ], - [ - "▁Technologies", - -12.009381294250488 - ], - [ - "OU", - -12.00949478149414 - ], - [ - "itudine", - -12.00959587097168 - ], - [ - "▁handles", - -12.009895324707031 - ], - [ - "▁proceedings", - -12.009982109069824 - ], - [ - "▁prizes", - -12.010043144226074 - ], - [ - "▁unterstützen", - -12.010062217712402 - ], - [ - "▁piele", - -12.010090827941895 - ], - [ - "▁profound", - -12.010153770446777 - ], - [ - "schließen", - -12.0101957321167 - ], - [ - "▁trafic", - -12.01025104522705 - ], - [ - "▁Nar", - -12.010441780090332 - ], - [ - "▁Gesamt", - -12.0106201171875 - ], - [ - "▁bugs", - -12.010720252990723 - ], - [ - "▁Amy", - -12.010764122009277 - ], - [ - "▁eastern", - -12.010775566101074 - ], - [ - "nice", - -12.010784149169922 - ], - [ - "▁Besuch", - -12.010835647583008 - ], - [ - "▁synth", - -12.010892868041992 - ], - [ - "▁clasa", - -12.011194229125977 - ], - [ - "Book", - -12.01134204864502 - ], - [ - "▁ribbon", - -12.011415481567383 - ], - [ - "▁neues", - -12.011431694030762 - ], - [ - "ZE", - -12.011504173278809 - ], - [ - "▁peers", - -12.011613845825195 - ], - [ - "leistung", - -12.011730194091797 - ], - [ - "▁internship", - -12.011808395385742 - ], - [ - "count", - -12.011850357055664 - ], - [ - "nam", - -12.01193618774414 - ], - [ - "▁12-", - -12.012072563171387 - ], - [ - "acked", - -12.012146949768066 - ], - [ - "gonna", - -12.012146949768066 - ], - [ - "▁Dinge", - -12.01215648651123 - ], - [ - "Time", - -12.012299537658691 - ], - [ - "▁twelve", - -12.01242446899414 - ], - [ - "eye", - -12.012432098388672 - ], - [ - "▁avantaj", - -12.01253604888916 - ], - [ - "▁Glas", - -12.012731552124023 - ], - [ - "aucune", - -12.0127534866333 - ], - [ - "▁boil", - -12.012763977050781 - ], - [ - "▁Gray", - -12.012773513793945 - ], - [ - "adapt", - -12.01288890838623 - ], - [ - "occ", - -12.012895584106445 - ], - [ - "▁prieten", - -12.012897491455078 - ], - [ - "▁trai", - -12.01296615600586 - ], - [ - "▁Scal", - -12.013009071350098 - ], - [ - "▁conscious", - -12.013057708740234 - ], - [ - "▁charter", - -12.013093948364258 - ], - [ - "KS", - -12.013242721557617 - ], - [ - "▁Barr", - -12.013404846191406 - ], - [ - "▁summit", - -12.013411521911621 - ], - [ - "▁inflammation", - -12.013439178466797 - ], - [ - "tungs", - -12.013440132141113 - ], - [ - "ovic", - -12.013449668884277 - ], - [ - "▁conduit", - -12.013465881347656 - ], - [ - "▁Alice", - -12.013702392578125 - ], - [ - "▁veterans", - -12.013850212097168 - ], - [ - "Während", - -12.013944625854492 - ], - [ - "▁maximal", - -12.014013290405273 - ], - [ - "▁Hawaii", - -12.014037132263184 - ], - [ - "▁Pine", - -12.01432991027832 - ], - [ - "acelasi", - -12.014391899108887 - ], - [ - "hyp", - -12.014424324035645 - ], - [ - "sensitivity", - -12.01445198059082 - ], - [ - "pour", - -12.014481544494629 - ], - [ - "ре", - -12.014493942260742 - ], - [ - "▁Kentucky", - -12.015129089355469 - ], - [ - "▁badge", - -12.015276908874512 - ], - [ - "affecting", - -12.015310287475586 - ], - [ - "▁chairman", - -12.015311241149902 - ], - [ - "▁München", - -12.015467643737793 - ], - [ - "▁Hersteller", - -12.015469551086426 - ], - [ - "▁urmat", - -12.015615463256836 - ], - [ - "tels", - -12.015654563903809 - ], - [ - "▁FM", - -12.015701293945312 - ], - [ - "▁Basis", - -12.015732765197754 - ], - [ - "▁erklärt", - -12.015809059143066 - ], - [ - "▁changer", - -12.015859603881836 - ], - [ - "tischen", - -12.0159330368042 - ], - [ - "▁brave", - -12.015960693359375 - ], - [ - "▁siguranta", - -12.015986442565918 - ], - [ - "▁partnerships", - -12.015989303588867 - ], - [ - "ților", - -12.015999794006348 - ], - [ - "▁breathe", - -12.016141891479492 - ], - [ - "rink", - -12.016551971435547 - ], - [ - "▁footage", - -12.016654014587402 - ], - [ - "▁transformed", - -12.016658782958984 - ], - [ - "▁prep", - -12.016866683959961 - ], - [ - "▁upset", - -12.016901969909668 - ], - [ - "▁Native", - -12.017059326171875 - ], - [ - "▁Prima", - -12.017154693603516 - ], - [ - "▁jersey", - -12.017163276672363 - ], - [ - "230", - -12.017182350158691 - ], - [ - "▁lucrurile", - -12.017393112182617 - ], - [ - "▁divine", - -12.017502784729004 - ], - [ - "▁Pit", - -12.017593383789062 - ], - [ - "RIS", - -12.01765251159668 - ], - [ - "▁Cultural", - -12.017672538757324 - ], - [ - "▁exotic", - -12.017786979675293 - ], - [ - "▁tastes", - -12.017881393432617 - ], - [ - "▁bargain", - -12.017913818359375 - ], - [ - "▁optimize", - -12.017985343933105 - ], - [ - "▁électrique", - -12.018012046813965 - ], - [ - "deuxième", - -12.018030166625977 - ], - [ - "▁Gary", - -12.018085479736328 - ], - [ - "▁projection", - -12.018122673034668 - ], - [ - "▁sliding", - -12.018195152282715 - ], - [ - "club", - -12.018216133117676 - ], - [ - "association", - -12.01823902130127 - ], - [ - "▁LG", - -12.018259048461914 - ], - [ - "▁capsule", - -12.018291473388672 - ], - [ - "▁politicians", - -12.018397331237793 - ], - [ - "▁thumb", - -12.018423080444336 - ], - [ - "▁globally", - -12.018743515014648 - ], - [ - "positioned", - -12.018796920776367 - ], - [ - "▁Hamilton", - -12.018861770629883 - ], - [ - "arme", - -12.018881797790527 - ], - [ - "▁efectuat", - -12.018881797790527 - ], - [ - "zip", - -12.019111633300781 - ], - [ - "▁welfare", - -12.019201278686523 - ], - [ - "Leistung", - -12.019230842590332 - ], - [ - "▁Bac", - -12.019316673278809 - ], - [ - "▁fizic", - -12.019338607788086 - ], - [ - "OK", - -12.019454002380371 - ], - [ - "▁limba", - -12.019545555114746 - ], - [ - "▁wardrobe", - -12.019549369812012 - ], - [ - "▁offline", - -12.019627571105957 - ], - [ - "▁fortune", - -12.019665718078613 - ], - [ - "▁dialog", - -12.019681930541992 - ], - [ - "▁dramatically", - -12.01997184753418 - ], - [ - "▁NYC", - -12.020045280456543 - ], - [ - "▁Rem", - -12.02017593383789 - ], - [ - "▁bronze", - -12.020455360412598 - ], - [ - "▁pulse", - -12.02053451538086 - ], - [ - "Fortunately", - -12.020562171936035 - ], - [ - "▁glue", - -12.020596504211426 - ], - [ - "▁Expo", - -12.020720481872559 - ], - [ - "▁profitable", - -12.020776748657227 - ], - [ - "▁distributor", - -12.020845413208008 - ], - [ - "abilité", - -12.020869255065918 - ], - [ - "▁lyrics", - -12.020913124084473 - ], - [ - "▁mesh", - -12.02114486694336 - ], - [ - "▁organizational", - -12.021157264709473 - ], - [ - "▁vanilla", - -12.021249771118164 - ], - [ - "▁foc", - -12.021355628967285 - ], - [ - "▁1984", - -12.02147388458252 - ], - [ - "▁créé", - -12.02172565460205 - ], - [ - "▁servi", - -12.022027969360352 - ], - [ - "▁underneath", - -12.022095680236816 - ], - [ - "▁surveys", - -12.022143363952637 - ], - [ - "▁genes", - -12.022238731384277 - ], - [ - "▁limite", - -12.02224349975586 - ], - [ - "oder", - -12.022247314453125 - ], - [ - "▁mandatory", - -12.022269248962402 - ], - [ - "▁hospitality", - -12.022303581237793 - ], - [ - "▁bikes", - -12.022309303283691 - ], - [ - "▁Quote", - -12.022358894348145 - ], - [ - "glu", - -12.02241039276123 - ], - [ - "▁activitatea", - -12.022513389587402 - ], - [ - "preventing", - -12.022584915161133 - ], - [ - "▁Kh", - -12.02259635925293 - ], - [ - "économie", - -12.022616386413574 - ], - [ - "▁visite", - -12.022757530212402 - ], - [ - "▁spectacle", - -12.022778511047363 - ], - [ - "▁tract", - -12.022860527038574 - ], - [ - "▁quant", - -12.022862434387207 - ], - [ - "▁evolu", - -12.022866249084473 - ], - [ - "▁invata", - -12.023070335388184 - ], - [ - "▁homo", - -12.02311897277832 - ], - [ - "▁Users", - -12.02344799041748 - ], - [ - "introducing", - -12.023632049560547 - ], - [ - "hibi", - -12.023661613464355 - ], - [ - "▁Instrument", - -12.023805618286133 - ], - [ - "▁ép", - -12.023839950561523 - ], - [ - "▁Raj", - -12.023869514465332 - ], - [ - "▁executives", - -12.023881912231445 - ], - [ - "atoire", - -12.023885726928711 - ], - [ - "▁erforderlich", - -12.02397346496582 - ], - [ - "male", - -12.024211883544922 - ], - [ - "umble", - -12.024271011352539 - ], - [ - "erson", - -12.024277687072754 - ], - [ - "▁Treatment", - -12.024286270141602 - ], - [ - "▁Representative", - -12.024314880371094 - ], - [ - "▁corners", - -12.024409294128418 - ], - [ - "▁Petit", - -12.024599075317383 - ], - [ - "8)", - -12.02464771270752 - ], - [ - "▁Walker", - -12.024714469909668 - ], - [ - "▁Stir", - -12.02476692199707 - ], - [ - "/19", - -12.024767875671387 - ], - [ - "▁Stelle", - -12.024979591369629 - ], - [ - "ără", - -12.025009155273438 - ], - [ - "osse", - -12.025166511535645 - ], - [ - "2000", - -12.025189399719238 - ], - [ - "▁McG", - -12.025580406188965 - ], - [ - "DV", - -12.025773048400879 - ], - [ - "▁Firm", - -12.025862693786621 - ], - [ - "▁packet", - -12.025904655456543 - ], - [ - "Toate", - -12.02640438079834 - ], - [ - "▁institutional", - -12.026479721069336 - ], - [ - "rug", - -12.026663780212402 - ], - [ - "DG", - -12.026837348937988 - ], - [ - "fine", - -12.026837348937988 - ], - [ - "bringen", - -12.026856422424316 - ], - [ - "▁Horse", - -12.026921272277832 - ], - [ - "▁premiere", - -12.026937484741211 - ], - [ - "▁Că", - -12.027026176452637 - ], - [ - "acheter", - -12.02703857421875 - ], - [ - "▁Afghanistan", - -12.027053833007812 - ], - [ - "▁Prop", - -12.027085304260254 - ], - [ - "ühr", - -12.02715015411377 - ], - [ - "▁braucht", - -12.027398109436035 - ], - [ - "▁sunny", - -12.027424812316895 - ], - [ - "▁Sach", - -12.027461051940918 - ], - [ - "▁volumes", - -12.02753734588623 - ], - [ - "tinut", - -12.02759838104248 - ], - [ - "▁Sho", - -12.027722358703613 - ], - [ - "▁winds", - -12.027735710144043 - ], - [ - "▁Mall", - -12.027873992919922 - ], - [ - "ledge", - -12.027937889099121 - ], - [ - "▁sciences", - -12.027997016906738 - ], - [ - "plication", - -12.028024673461914 - ], - [ - "VR", - -12.028068542480469 - ], - [ - "destin", - -12.028234481811523 - ], - [ - "▁früh", - -12.02833366394043 - ], - [ - "▁tongue", - -12.028359413146973 - ], - [ - "▁Jennifer", - -12.028425216674805 - ], - [ - "▁bracket", - -12.028427124023438 - ], - [ - "▁episodes", - -12.02845287322998 - ], - [ - "breite", - -12.028461456298828 - ], - [ - "▁stoc", - -12.028635025024414 - ], - [ - "ilia", - -12.028728485107422 - ], - [ - "▁Gulf", - -12.02874755859375 - ], - [ - "▁transparency", - -12.028768539428711 - ], - [ - "Industrie", - -12.028853416442871 - ], - [ - "▁viewers", - -12.028916358947754 - ], - [ - "AIN", - -12.029129981994629 - ], - [ - "▁Registration", - -12.029149055480957 - ], - [ - "/4", - -12.029309272766113 - ], - [ - "▁fera", - -12.029337882995605 - ], - [ - "▁06", - -12.029351234436035 - ], - [ - "▁einzu", - -12.029391288757324 - ], - [ - "enburg", - -12.02944278717041 - ], - [ - "▁eff", - -12.029449462890625 - ], - [ - "▁Stage", - -12.029558181762695 - ], - [ - "▁Cour", - -12.029685020446777 - ], - [ - "indu", - -12.029836654663086 - ], - [ - "▁Tools", - -12.029909133911133 - ], - [ - "IST", - -12.029921531677246 - ], - [ - "grund", - -12.030105590820312 - ], - [ - "seitig", - -12.030153274536133 - ], - [ - "pai", - -12.030250549316406 - ], - [ - "▁waist", - -12.030350685119629 - ], - [ - "▁Therapy", - -12.03049373626709 - ], - [ - "▁nomination", - -12.030599594116211 - ], - [ - "▁seama", - -12.030790328979492 - ], - [ - "▁analyse", - -12.030975341796875 - ], - [ - "▁emerge", - -12.031044006347656 - ], - [ - "▁adjustment", - -12.031106948852539 - ], - [ - "▁stroll", - -12.031106948852539 - ], - [ - "▁Beyond", - -12.031174659729004 - ], - [ - "▁legally", - -12.03122615814209 - ], - [ - "▁gauge", - -12.03123664855957 - ], - [ - "▁26,", - -12.031360626220703 - ], - [ - "Tex", - -12.031390190124512 - ], - [ - "economic", - -12.031488418579102 - ], - [ - "stoffe", - -12.031532287597656 - ], - [ - "Wir", - -12.031559944152832 - ], - [ - "ffen", - -12.031601905822754 - ], - [ - "▁acoperi", - -12.031609535217285 - ], - [ - "▁finale", - -12.031792640686035 - ], - [ - "▁theoretical", - -12.031864166259766 - ], - [ - "1.3", - -12.031875610351562 - ], - [ - "anim", - -12.031888008117676 - ], - [ - "▁separation", - -12.031928062438965 - ], - [ - "agence", - -12.031937599182129 - ], - [ - "▁réalisé", - -12.032069206237793 - ], - [ - "sprech", - -12.03215503692627 - ], - [ - "▁embedded", - -12.032208442687988 - ], - [ - "▁defence", - -12.032242774963379 - ], - [ - "éni", - -12.032569885253906 - ], - [ - "▁Norman", - -12.032613754272461 - ], - [ - "▁insgesamt", - -12.032621383666992 - ], - [ - "▁reminde", - -12.032631874084473 - ], - [ - "▁timeline", - -12.032703399658203 - ], - [ - "▁symbols", - -12.032770156860352 - ], - [ - "▁booth", - -12.032783508300781 - ], - [ - "▁Window", - -12.032788276672363 - ], - [ - "▁Titan", - -12.032910346984863 - ], - [ - "înt", - -12.033021926879883 - ], - [ - "▁langa", - -12.033021926879883 - ], - [ - "isant", - -12.03303337097168 - ], - [ - "hart", - -12.033113479614258 - ], - [ - "broader", - -12.033266067504883 - ], - [ - "▁stays", - -12.033288955688477 - ], - [ - "dur", - -12.033488273620605 - ], - [ - "▁Actually", - -12.033514022827148 - ], - [ - "works", - -12.03351879119873 - ], - [ - "▁réussi", - -12.03357219696045 - ], - [ - "▁performant", - -12.033658981323242 - ], - [ - "▁banana", - -12.033788681030273 - ], - [ - "▁baked", - -12.033870697021484 - ], - [ - "▁Parlament", - -12.033931732177734 - ], - [ - "▁Legend", - -12.033967018127441 - ], - [ - "toata", - -12.034172058105469 - ], - [ - "platte", - -12.03419017791748 - ], - [ - "▁Mou", - -12.034192085266113 - ], - [ - "HL", - -12.034235000610352 - ], - [ - "▁(8", - -12.034290313720703 - ], - [ - "▁accepting", - -12.034313201904297 - ], - [ - "▁Senator", - -12.034340858459473 - ], - [ - "▁consciousness", - -12.034396171569824 - ], - [ - "▁conducting", - -12.0344820022583 - ], - [ - "▁panic", - -12.034833908081055 - ], - [ - "▁FDA", - -12.035112380981445 - ], - [ - "▁(7", - -12.035163879394531 - ], - [ - "tool", - -12.035300254821777 - ], - [ - "▁Shipping", - -12.03538703918457 - ], - [ - "▁hop", - -12.035545349121094 - ], - [ - "▁conferences", - -12.03564167022705 - ], - [ - "▁pork", - -12.035661697387695 - ], - [ - "▁spam", - -12.035730361938477 - ], - [ - "▁interesant", - -12.035815238952637 - ], - [ - "▁Tagen", - -12.03581714630127 - ], - [ - "sig", - -12.035886764526367 - ], - [ - "étro", - -12.036044120788574 - ], - [ - "▁legendary", - -12.036449432373047 - ], - [ - "▁Alternative", - -12.036643981933594 - ], - [ - "iana", - -12.036704063415527 - ], - [ - "▁responsable", - -12.036888122558594 - ], - [ - "▁Mihai", - -12.037237167358398 - ], - [ - "▁decreased", - -12.037345886230469 - ], - [ - "▁organised", - -12.037485122680664 - ], - [ - "▁Lamp", - -12.037589073181152 - ], - [ - "litz", - -12.037622451782227 - ], - [ - "ohn", - -12.037622451782227 - ], - [ - "▁moteur", - -12.0376615524292 - ], - [ - "III", - -12.03768539428711 - ], - [ - "▁Montag", - -12.037755012512207 - ], - [ - "▁naturel", - -12.037814140319824 - ], - [ - "▁Hus", - -12.037842750549316 - ], - [ - "▁Schl", - -12.037884712219238 - ], - [ - "ains", - -12.037968635559082 - ], - [ - "▁dying", - -12.0380859375 - ], - [ - "▁HIV", - -12.038115501403809 - ], - [ - "],", - -12.038164138793945 - ], - [ - "alität", - -12.03818416595459 - ], - [ - "▁institute", - -12.038249015808105 - ], - [ - "mix", - -12.038433074951172 - ], - [ - "▁Regulation", - -12.038453102111816 - ], - [ - "▁pagina", - -12.03857707977295 - ], - [ - "▁Awesome", - -12.03860092163086 - ], - [ - "▁Official", - -12.03860092163086 - ], - [ - "▁Minute", - -12.038601875305176 - ], - [ - "▁dairy", - -12.038787841796875 - ], - [ - "▁carti", - -12.038881301879883 - ], - [ - "isk", - -12.039091110229492 - ], - [ - "▁thrilled", - -12.039138793945312 - ], - [ - "▁german", - -12.039172172546387 - ], - [ - "▁frustration", - -12.039228439331055 - ], - [ - "▁forums", - -12.03927230834961 - ], - [ - "command", - -12.039361000061035 - ], - [ - "▁router", - -12.039399147033691 - ], - [ - "▁Lösung", - -12.039423942565918 - ], - [ - "white", - -12.039470672607422 - ], - [ - "▁synthetic", - -12.039487838745117 - ], - [ - "▁retrouver", - -12.039554595947266 - ], - [ - "alle", - -12.039621353149414 - ], - [ - "daran", - -12.039653778076172 - ], - [ - "▁wahr", - -12.039697647094727 - ], - [ - "▁paths", - -12.039875984191895 - ], - [ - "▁unver", - -12.039962768554688 - ], - [ - "▁Environment", - -12.0400972366333 - ], - [ - "▁médecin", - -12.040510177612305 - ], - [ - "crypt", - -12.040572166442871 - ], - [ - "▁pursuit", - -12.040595054626465 - ], - [ - "flat", - -12.040611267089844 - ], - [ - "bron", - -12.040698051452637 - ], - [ - "▁Specialist", - -12.040852546691895 - ], - [ - "▁Vent", - -12.041157722473145 - ], - [ - "Gen", - -12.04132080078125 - ], - [ - "▁attraction", - -12.04132080078125 - ], - [ - "▁piese", - -12.041372299194336 - ], - [ - "CHE", - -12.041665077209473 - ], - [ - "fähig", - -12.04172420501709 - ], - [ - "▁28,", - -12.041773796081543 - ], - [ - "defender", - -12.041810989379883 - ], - [ - "▁stupid", - -12.04181957244873 - ], - [ - "enfin", - -12.04185962677002 - ], - [ - "▁composite", - -12.04207706451416 - ], - [ - "fragen", - -12.042202949523926 - ], - [ - "Part", - -12.042232513427734 - ], - [ - "may", - -12.042238235473633 - ], - [ - "▁Bucureşti", - -12.042248725891113 - ], - [ - "▁février", - -12.042248725891113 - ], - [ - "RED", - -12.042417526245117 - ], - [ - "▁makers", - -12.042462348937988 - ], - [ - "▁guns", - -12.042594909667969 - ], - [ - "▁pasta", - -12.042706489562988 - ], - [ - "STR", - -12.04271125793457 - ], - [ - "▁worthy", - -12.042760848999023 - ], - [ - "Poate", - -12.042783737182617 - ], - [ - "▁101", - -12.04286003112793 - ], - [ - "▁souhaitez", - -12.04299545288086 - ], - [ - "GN", - -12.043449401855469 - ], - [ - "drive", - -12.043499946594238 - ], - [ - "▁aveti", - -12.043582916259766 - ], - [ - "▁eventual", - -12.043591499328613 - ], - [ - "▁américain", - -12.043642044067383 - ], - [ - "▁Mine", - -12.043678283691406 - ], - [ - "▁sunset", - -12.043729782104492 - ], - [ - "▁Choice", - -12.043844223022461 - ], - [ - "▁offset", - -12.043944358825684 - ], - [ - "APP", - -12.04410457611084 - ], - [ - "▁suchen", - -12.044130325317383 - ], - [ - "▁aduc", - -12.044228553771973 - ], - [ - "▁Unternehmens", - -12.044342041015625 - ], - [ - "▁//", - -12.044651985168457 - ], - [ - "▁astept", - -12.044678688049316 - ], - [ - "▁Birthday", - -12.045061111450195 - ], - [ - "▁barn", - -12.045083999633789 - ], - [ - "apport", - -12.045105934143066 - ], - [ - "▁collar", - -12.045212745666504 - ], - [ - "▁gefunden", - -12.045294761657715 - ], - [ - "▁Hai", - -12.045429229736328 - ], - [ - "▁Soul", - -12.045441627502441 - ], - [ - "ismus", - -12.045654296875 - ], - [ - "letzt", - -12.045754432678223 - ], - [ - "▁maker", - -12.045841217041016 - ], - [ - "▁executed", - -12.045857429504395 - ], - [ - "▁Forschung", - -12.045915603637695 - ], - [ - "▁täglich", - -12.045958518981934 - ], - [ - "▁tailor", - -12.045960426330566 - ], - [ - "▁headquarters", - -12.0460844039917 - ], - [ - "▁physicians", - -12.046112060546875 - ], - [ - "▁Scout", - -12.046126365661621 - ], - [ - "folgen", - -12.046175003051758 - ], - [ - "▁cycling", - -12.046184539794922 - ], - [ - "mindestens", - -12.04620361328125 - ], - [ - "▁joli", - -12.046216011047363 - ], - [ - "▁classification", - -12.046225547790527 - ], - [ - "▁Führung", - -12.046258926391602 - ], - [ - "▁peau", - -12.04629135131836 - ], - [ - "INT", - -12.046502113342285 - ], - [ - "▁Garage", - -12.046664237976074 - ], - [ - "teile", - -12.046714782714844 - ], - [ - "util", - -12.046716690063477 - ], - [ - "▁petrec", - -12.046751022338867 - ], - [ - "▁Nevada", - -12.046826362609863 - ], - [ - "▁laisser", - -12.04706859588623 - ], - [ - "▁territoire", - -12.047131538391113 - ], - [ - "▁fichier", - -12.047154426574707 - ], - [ - "▁Formula", - -12.047343254089355 - ], - [ - "scopul", - -12.047379493713379 - ], - [ - "▁Tee", - -12.047486305236816 - ], - [ - "▁Monte", - -12.047529220581055 - ], - [ - "▁pumpkin", - -12.04757022857666 - ], - [ - "▁picnic", - -12.047589302062988 - ], - [ - "▁occupation", - -12.047652244567871 - ], - [ - "▁numérique", - -12.047831535339355 - ], - [ - "linie", - -12.04786491394043 - ], - [ - "▁masina", - -12.048117637634277 - ], - [ - "▁Prä", - -12.048173904418945 - ], - [ - "▁dezvoltare", - -12.048177719116211 - ], - [ - "▁vient", - -12.048291206359863 - ], - [ - "▁ranks", - -12.048295021057129 - ], - [ - "▁Bruce", - -12.048420906066895 - ], - [ - "▁seara", - -12.048433303833008 - ], - [ - "▁hungry", - -12.048563003540039 - ], - [ - "▁resolved", - -12.048650741577148 - ], - [ - "paired", - -12.048735618591309 - ], - [ - "▁Congratulations", - -12.048881530761719 - ], - [ - "▁religi", - -12.048918724060059 - ], - [ - "sätze", - -12.04897689819336 - ], - [ - "▁Eat", - -12.049172401428223 - ], - [ - "▁dense", - -12.049442291259766 - ], - [ - "▁slice", - -12.049447059631348 - ], - [ - "▁mulți", - -12.049463272094727 - ], - [ - "▁vorbe", - -12.049517631530762 - ], - [ - "▁terminate", - -12.049779891967773 - ], - [ - "worm", - -12.049880981445312 - ], - [ - "ignon", - -12.0499267578125 - ], - [ - "▁Howard", - -12.049992561340332 - ], - [ - "▁toddler", - -12.050017356872559 - ], - [ - "▁waters", - -12.050033569335938 - ], - [ - "▁graduates", - -12.0501708984375 - ], - [ - "▁fundraising", - -12.050298690795898 - ], - [ - "06.", - -12.05031967163086 - ], - [ - "▁scent", - -12.050346374511719 - ], - [ - "▁CPU", - -12.050406455993652 - ], - [ - "▁Kid", - -12.05045223236084 - ], - [ - "▁Years", - -12.050460815429688 - ], - [ - "▁Oktober", - -12.05063533782959 - ], - [ - "filled", - -12.050726890563965 - ], - [ - "▁Laser", - -12.05079460144043 - ], - [ - "▁tut", - -12.051032066345215 - ], - [ - "ively", - -12.051101684570312 - ], - [ - "▁WiFi", - -12.051161766052246 - ], - [ - "standen", - -12.051176071166992 - ], - [ - "▁publié", - -12.051243782043457 - ], - [ - "▁explaining", - -12.051279067993164 - ], - [ - "trieb", - -12.051288604736328 - ], - [ - "▁Rapid", - -12.0513334274292 - ], - [ - "▁unterstützt", - -12.051352500915527 - ], - [ - "▁Sonnen", - -12.051401138305664 - ], - [ - "▁lenses", - -12.05141544342041 - ], - [ - "▁pressing", - -12.051477432250977 - ], - [ - "▁respected", - -12.051657676696777 - ], - [ - "adapted", - -12.051706314086914 - ], - [ - "Don", - -12.051726341247559 - ], - [ - "▁mun", - -12.051733016967773 - ], - [ - "MAR", - -12.05180835723877 - ], - [ - "▁seam", - -12.051852226257324 - ], - [ - "chev", - -12.052140235900879 - ], - [ - "▁Sozial", - -12.052424430847168 - ], - [ - "▁Arabia", - -12.052485466003418 - ], - [ - "▁equation", - -12.05257511138916 - ], - [ - "▁elevi", - -12.052780151367188 - ], - [ - "▁piata", - -12.052868843078613 - ], - [ - "JA", - -12.052873611450195 - ], - [ - "▁wholesale", - -12.052887916564941 - ], - [ - "▁faithful", - -12.05296516418457 - ], - [ - "legal", - -12.053092002868652 - ], - [ - "▁Brexit", - -12.053095817565918 - ], - [ - "vention", - -12.053120613098145 - ], - [ - "▁adhere", - -12.053221702575684 - ], - [ - "▁Associate", - -12.053257942199707 - ], - [ - "▁decorations", - -12.053272247314453 - ], - [ - "▁crois", - -12.053359985351562 - ], - [ - "buck", - -12.053370475769043 - ], - [ - "▁smartphones", - -12.053421020507812 - ], - [ - "Regardless", - -12.053427696228027 - ], - [ - "center", - -12.053434371948242 - ], - [ - "eiß", - -12.053481101989746 - ], - [ - "▁emotion", - -12.053584098815918 - ], - [ - "▁Gespräch", - -12.053797721862793 - ], - [ - "▁Avi", - -12.053963661193848 - ], - [ - "▁loft", - -12.054059982299805 - ], - [ - "▁Wissen", - -12.054391860961914 - ], - [ - "▁orchestra", - -12.05439567565918 - ], - [ - "▁gehören", - -12.054421424865723 - ], - [ - "▁Reich", - -12.054532051086426 - ], - [ - "▁abandoned", - -12.054548263549805 - ], - [ - "▁Lanka", - -12.054586410522461 - ], - [ - "pala", - -12.054832458496094 - ], - [ - "▁Stell", - -12.054838180541992 - ], - [ - "logged", - -12.054924964904785 - ], - [ - "terie", - -12.054935455322266 - ], - [ - "▁educa", - -12.054954528808594 - ], - [ - "1).", - -12.055097579956055 - ], - [ - "▁disponibil", - -12.055119514465332 - ], - [ - "IND", - -12.055197715759277 - ], - [ - "▁Pont", - -12.055288314819336 - ], - [ - "▁téléphone", - -12.055398941040039 - ], - [ - "▁rope", - -12.055595397949219 - ], - [ - "ève", - -12.055622100830078 - ], - [ - "▁Trainer", - -12.056062698364258 - ], - [ - "▁présence", - -12.0560941696167 - ], - [ - "▁Oscar", - -12.056121826171875 - ], - [ - "▁VR", - -12.056342124938965 - ], - [ - "▁Besucher", - -12.056357383728027 - ], - [ - "▁disponibles", - -12.056447982788086 - ], - [ - "▁gelten", - -12.056604385375977 - ], - [ - "▁ports", - -12.056645393371582 - ], - [ - "Invest", - -12.056693077087402 - ], - [ - "ésormais", - -12.056795120239258 - ], - [ - "schauen", - -12.056880950927734 - ], - [ - "▁Command", - -12.056958198547363 - ], - [ - "▁alternate", - -12.05709171295166 - ], - [ - "citation", - -12.05713939666748 - ], - [ - "évolution", - -12.05714225769043 - ], - [ - "▁Maine", - -12.057145118713379 - ], - [ - "pflege", - -12.057174682617188 - ], - [ - "2011", - -12.057343482971191 - ], - [ - "▁Ground", - -12.057364463806152 - ], - [ - "▁ghost", - -12.057418823242188 - ], - [ - "lebt", - -12.057530403137207 - ], - [ - "▁scenarios", - -12.057595252990723 - ], - [ - "▁mall", - -12.057634353637695 - ], - [ - "▁Kings", - -12.057653427124023 - ], - [ - "▁15%", - -12.057848930358887 - ], - [ - "▁Paint", - -12.057848930358887 - ], - [ - "FD", - -12.057849884033203 - ], - [ - "ugg", - -12.058011054992676 - ], - [ - "▁Leon", - -12.058023452758789 - ], - [ - "▁grows", - -12.058135032653809 - ], - [ - "▁pharmacy", - -12.058384895324707 - ], - [ - "▁situat", - -12.0584135055542 - ], - [ - "20,000", - -12.05855941772461 - ], - [ - "▁10,000", - -12.058760643005371 - ], - [ - "▁membre", - -12.058771133422852 - ], - [ - "▁facilement", - -12.058806419372559 - ], - [ - "▁Analytics", - -12.058915138244629 - ], - [ - "▁Marvel", - -12.058930397033691 - ], - [ - "▁survived", - -12.059097290039062 - ], - [ - "▁conviction", - -12.059124946594238 - ], - [ - "▁Produktion", - -12.059260368347168 - ], - [ - "▁professionally", - -12.059293746948242 - ], - [ - "▁contributor", - -12.059486389160156 - ], - [ - "▁Kurs", - -12.059503555297852 - ], - [ - "▁humor", - -12.059549331665039 - ], - [ - "▁cinci", - -12.059609413146973 - ], - [ - "▁Different", - -12.059670448303223 - ], - [ - "▁Verarbeitung", - -12.059800148010254 - ], - [ - "▁inexpensive", - -12.059800148010254 - ], - [ - "▁sortie", - -12.05980110168457 - ], - [ - "▁thankful", - -12.059951782226562 - ], - [ - "▁vacances", - -12.059978485107422 - ], - [ - "▁vergangen", - -12.059979438781738 - ], - [ - "▁wings", - -12.05998420715332 - ], - [ - "▁nano", - -12.06003475189209 - ], - [ - "▁touches", - -12.060088157653809 - ], - [ - "▁Notice", - -12.060348510742188 - ], - [ - "▁reprezinta", - -12.060466766357422 - ], - [ - "▁rewarding", - -12.060555458068848 - ], - [ - "▁Kurz", - -12.060580253601074 - ], - [ - "▁mega", - -12.060611724853516 - ], - [ - "▁secrets", - -12.060646057128906 - ], - [ - "▁vorher", - -12.060667037963867 - ], - [ - "▁crescut", - -12.06074333190918 - ], - [ - "▁coordination", - -12.060754776000977 - ], - [ - "▁dissertation", - -12.060863494873047 - ], - [ - "▁header", - -12.060873985290527 - ], - [ - "existent", - -12.061070442199707 - ], - [ - "thal", - -12.061185836791992 - ], - [ - "▁translate", - -12.061214447021484 - ], - [ - "vertrag", - -12.06124210357666 - ], - [ - "GU", - -12.06126594543457 - ], - [ - "▁Arthur", - -12.061315536499023 - ], - [ - "wahl", - -12.061534881591797 - ], - [ - "▁octobre", - -12.061573028564453 - ], - [ - "▁bother", - -12.06157398223877 - ], - [ - "▁pencil", - -12.061580657958984 - ], - [ - "▁Dyna", - -12.061604499816895 - ], - [ - "▁complimentary", - -12.061651229858398 - ], - [ - "écoute", - -12.061676979064941 - ], - [ - "PB", - -12.061722755432129 - ], - [ - "▁independently", - -12.061759948730469 - ], - [ - "▁targeting", - -12.061840057373047 - ], - [ - "fought", - -12.061944961547852 - ], - [ - "mental", - -12.062112808227539 - ], - [ - "▁Veranstaltung", - -12.062300682067871 - ], - [ - "▁tatsächlich", - -12.062314987182617 - ], - [ - "▁Features", - -12.0625 - ], - [ - "▁1920", - -12.062554359436035 - ], - [ - "▁Domain", - -12.062885284423828 - ], - [ - "▁rally", - -12.062901496887207 - ], - [ - "▁iunie", - -12.063036918640137 - ], - [ - "▁fabrics", - -12.063070297241211 - ], - [ - "▁mint", - -12.063331604003906 - ], - [ - "▁antioxidant", - -12.063347816467285 - ], - [ - "hut", - -12.063432693481445 - ], - [ - "EPA", - -12.063496589660645 - ], - [ - "▁rigid", - -12.063498497009277 - ], - [ - "▁evit", - -12.063549995422363 - ], - [ - "▁personnage", - -12.063977241516113 - ], - [ - "▁garanti", - -12.0640287399292 - ], - [ - "▁Hä", - -12.064042091369629 - ], - [ - "▁Days", - -12.064048767089844 - ], - [ - "boarding", - -12.064050674438477 - ], - [ - "jemand", - -12.064166069030762 - ], - [ - "▁Pos", - -12.064262390136719 - ], - [ - "▁wool", - -12.064288139343262 - ], - [ - "▁boom", - -12.064349174499512 - ], - [ - "▁wichtige", - -12.06447982788086 - ], - [ - "▁emerged", - -12.064517974853516 - ], - [ - "▁smoothly", - -12.064802169799805 - ], - [ - "▁Interview", - -12.064942359924316 - ], - [ - "gemäß", - -12.06505012512207 - ], - [ - "▁suivi", - -12.065064430236816 - ], - [ - "▁missions", - -12.065129280090332 - ], - [ - "▁Kreis", - -12.065328598022461 - ], - [ - "century", - -12.065348625183105 - ], - [ - "▁tuned", - -12.065370559692383 - ], - [ - "isieren", - -12.065407752990723 - ], - [ - "▁Branch", - -12.065427780151367 - ], - [ - "▁Russell", - -12.065483093261719 - ], - [ - "▁**", - -12.065519332885742 - ], - [ - "▁Lehr", - -12.065617561340332 - ], - [ - "▁perspectives", - -12.065690040588379 - ], - [ - "▁handed", - -12.06570816040039 - ], - [ - "▁apporte", - -12.065743446350098 - ], - [ - "unta", - -12.065959930419922 - ], - [ - "▁contemplat", - -12.066255569458008 - ], - [ - "riel", - -12.06633472442627 - ], - [ - "▁freely", - -12.066341400146484 - ], - [ - "▁loyal", - -12.066451072692871 - ], - [ - "▁evolved", - -12.066518783569336 - ], - [ - "▁Cafe", - -12.066548347473145 - ], - [ - "▁assignments", - -12.066598892211914 - ], - [ - "▁Cream", - -12.066718101501465 - ], - [ - "▁Build", - -12.066731452941895 - ], - [ - "▁exams", - -12.066746711730957 - ], - [ - "▁graduation", - -12.066765785217285 - ], - [ - "▁Dining", - -12.066773414611816 - ], - [ - "inne", - -12.06684398651123 - ], - [ - "▁propriu", - -12.067055702209473 - ], - [ - "▁accordingly", - -12.067241668701172 - ], - [ - "▁seniors", - -12.067484855651855 - ], - [ - "▁sisters", - -12.067505836486816 - ], - [ - "formerly", - -12.067658424377441 - ], - [ - "▁fleur", - -12.067702293395996 - ], - [ - "▁alten", - -12.067802429199219 - ], - [ - "▁Gefühl", - -12.06797981262207 - ], - [ - "▁freeze", - -12.068222045898438 - ], - [ - "▁structured", - -12.068312644958496 - ], - [ - "▁reserved", - -12.068367004394531 - ], - [ - "stellt", - -12.068638801574707 - ], - [ - "▁foto", - -12.068668365478516 - ], - [ - "linger", - -12.06871223449707 - ], - [ - "▁profiter", - -12.068737030029297 - ], - [ - "▁trup", - -12.068862915039062 - ], - [ - "▁Hunter", - -12.068974494934082 - ], - [ - "▁widespread", - -12.069050788879395 - ], - [ - "entretien", - -12.069242477416992 - ], - [ - "▁Truck", - -12.06958293914795 - ], - [ - "Can", - -12.069656372070312 - ], - [ - "péri", - -12.06976318359375 - ], - [ - "▁>>", - -12.069926261901855 - ], - [ - "▁trains", - -12.070141792297363 - ], - [ - "▁faca", - -12.070149421691895 - ], - [ - "▁Patienten", - -12.070170402526855 - ], - [ - "▁scor", - -12.070361137390137 - ], - [ - "▁perceived", - -12.070384979248047 - ], - [ - "setzung", - -12.070393562316895 - ], - [ - "▁Robin", - -12.070558547973633 - ], - [ - "▁geboren", - -12.07060718536377 - ], - [ - "lons", - -12.070687294006348 - ], - [ - "inţa", - -12.070836067199707 - ], - [ - "glob", - -12.070887565612793 - ], - [ - "subsequently", - -12.07111930847168 - ], - [ - "▁vet", - -12.071170806884766 - ], - [ - "▁Holland", - -12.071328163146973 - ], - [ - "▁Clinical", - -12.071370124816895 - ], - [ - "▁uncertainty", - -12.071381568908691 - ], - [ - "hohen", - -12.071386337280273 - ], - [ - "uza", - -12.071431159973145 - ], - [ - "▁kleiner", - -12.071518898010254 - ], - [ - "▁substances", - -12.07155704498291 - ], - [ - "ados", - -12.071627616882324 - ], - [ - "wheel", - -12.07178020477295 - ], - [ - "▁cone", - -12.071990966796875 - ], - [ - "▁castig", - -12.072218894958496 - ], - [ - "▁Conditions", - -12.072242736816406 - ], - [ - "minus", - -12.072643280029297 - ], - [ - "▁permits", - -12.07265853881836 - ], - [ - "fond", - -12.072784423828125 - ], - [ - "▁reactions", - -12.07278823852539 - ], - [ - "▁Mario", - -12.072819709777832 - ], - [ - "▁materiale", - -12.07291030883789 - ], - [ - "AH", - -12.072924613952637 - ], - [ - "▁juillet", - -12.073172569274902 - ], - [ - "▁juridic", - -12.073182106018066 - ], - [ - "▁dropping", - -12.073200225830078 - ], - [ - "expérience", - -12.073225021362305 - ], - [ - "▁depot", - -12.073345184326172 - ], - [ - "▁plea", - -12.073490142822266 - ], - [ - "dezvoltarea", - -12.073512077331543 - ], - [ - "▁Independent", - -12.07363224029541 - ], - [ - "▁Homes", - -12.073674201965332 - ], - [ - "▁crust", - -12.073808670043945 - ], - [ - "▁pillow", - -12.073899269104004 - ], - [ - "kreis", - -12.073920249938965 - ], - [ - "▁boiler", - -12.073928833007812 - ], - [ - "latin", - -12.073978424072266 - ], - [ - "▁stet", - -12.074131965637207 - ], - [ - "GH", - -12.074143409729004 - ], - [ - "▁absent", - -12.074334144592285 - ], - [ - "▁Directors", - -12.074501037597656 - ], - [ - "zwischen", - -12.07462215423584 - ], - [ - "▁comprendre", - -12.07465648651123 - ], - [ - "▁25,", - -12.074832916259766 - ], - [ - "▁pharmaceutical", - -12.075145721435547 - ], - [ - "▁placeholder", - -12.075174331665039 - ], - [ - "KI", - -12.075176239013672 - ], - [ - "▁români", - -12.07540225982666 - ], - [ - "▁Dollar", - -12.075509071350098 - ], - [ - "▁Operations", - -12.075525283813477 - ], - [ - "▁Dublin", - -12.075550079345703 - ], - [ - "▁drawings", - -12.0756196975708 - ], - [ - "▁respir", - -12.075769424438477 - ], - [ - "▁haul", - -12.0758056640625 - ], - [ - "Obviously", - -12.075864791870117 - ], - [ - "▁Beat", - -12.075864791870117 - ], - [ - "▁jeans", - -12.07590103149414 - ], - [ - "▁Masters", - -12.075927734375 - ], - [ - "▁bits", - -12.076213836669922 - ], - [ - "poți", - -12.076226234436035 - ], - [ - "▁asigur", - -12.076228141784668 - ], - [ - "▁intampla", - -12.076228141784668 - ], - [ - "▁marc", - -12.076282501220703 - ], - [ - "......", - -12.076404571533203 - ], - [ - "▁districts", - -12.076437950134277 - ], - [ - "cru", - -12.076457023620605 - ], - [ - "nav", - -12.076608657836914 - ], - [ - "huile", - -12.076644897460938 - ], - [ - "▁limitation", - -12.076647758483887 - ], - [ - "boat", - -12.076712608337402 - ], - [ - "IRE", - -12.076720237731934 - ], - [ - "Unis", - -12.07675838470459 - ], - [ - "dated", - -12.0769624710083 - ], - [ - "▁consultants", - -12.07699203491211 - ], - [ - "▁Josh", - -12.077007293701172 - ], - [ - "tanz", - -12.077184677124023 - ], - [ - "launching", - -12.0772066116333 - ], - [ - "▁browsing", - -12.077310562133789 - ], - [ - "▁incerc", - -12.077314376831055 - ], - [ - "▁27,", - -12.077375411987305 - ], - [ - "не", - -12.077398300170898 - ], - [ - "wig", - -12.077415466308594 - ], - [ - "▁spar", - -12.077458381652832 - ], - [ - "▁token", - -12.077547073364258 - ], - [ - "▁09", - -12.077548027038574 - ], - [ - "spa", - -12.07766056060791 - ], - [ - "ometer", - -12.07772159576416 - ], - [ - "▁riders", - -12.077869415283203 - ], - [ - "▁Drop", - -12.077898979187012 - ], - [ - "RN", - -12.078103065490723 - ], - [ - "▁pairs", - -12.07815933227539 - ], - [ - "▁psychology", - -12.078420639038086 - ], - [ - "▁Douglas", - -12.078437805175781 - ], - [ - "▁verwenden", - -12.078516960144043 - ], - [ - "▁(9", - -12.07857894897461 - ], - [ - "▁Rental", - -12.078728675842285 - ], - [ - "▁délai", - -12.078847885131836 - ], - [ - "▁sooner", - -12.078882217407227 - ], - [ - "▁bankruptcy", - -12.079109191894531 - ], - [ - "04.", - -12.079110145568848 - ], - [ - "abend", - -12.079194068908691 - ], - [ - "çon", - -12.079237937927246 - ], - [ - "▁Ple", - -12.079243659973145 - ], - [ - "fug", - -12.079337120056152 - ], - [ - "▁Wohnung", - -12.079410552978516 - ], - [ - "▁Preise", - -12.079424858093262 - ], - [ - "▁Kay", - -12.079427719116211 - ], - [ - "▁notify", - -12.079474449157715 - ], - [ - "▁Brain", - -12.079534530639648 - ], - [ - "▁optical", - -12.079580307006836 - ], - [ - "▁modifications", - -12.079727172851562 - ], - [ - "▁repos", - -12.07999324798584 - ], - [ - "▁worksheet", - -12.0800142288208 - ], - [ - "continu", - -12.08005428314209 - ], - [ - "▁assumed", - -12.08059024810791 - ], - [ - "varying", - -12.080626487731934 - ], - [ - "feier", - -12.080643653869629 - ], - [ - "▁Freedom", - -12.080717086791992 - ], - [ - "▁Inhalte", - -12.080740928649902 - ], - [ - "▁observations", - -12.080755233764648 - ], - [ - "▁Gruppe", - -12.080791473388672 - ], - [ - "▁Cyber", - -12.080883979797363 - ], - [ - "hort", - -12.080889701843262 - ], - [ - "▁langue", - -12.080915451049805 - ], - [ - "führen", - -12.08110523223877 - ], - [ - "ganze", - -12.081254005432129 - ], - [ - "▁forte", - -12.081327438354492 - ], - [ - "▁Stefan", - -12.081376075744629 - ], - [ - "▁Jetzt", - -12.081463813781738 - ], - [ - "mehr", - -12.081489562988281 - ], - [ - "trip", - -12.081549644470215 - ], - [ - "▁poem", - -12.081583976745605 - ], - [ - "▁practitioners", - -12.081720352172852 - ], - [ - "▁connector", - -12.08177661895752 - ], - [ - "ECT", - -12.081794738769531 - ], - [ - "▁inseamna", - -12.081820487976074 - ], - [ - "addressing", - -12.081867218017578 - ], - [ - "▁beliebt", - -12.081908226013184 - ], - [ - "▁Mama", - -12.082002639770508 - ], - [ - "▁fade", - -12.08204460144043 - ], - [ - "messen", - -12.08205509185791 - ], - [ - "▁Visa", - -12.082080841064453 - ], - [ - "▁Meta", - -12.082154273986816 - ], - [ - "lene", - -12.082188606262207 - ], - [ - "▁remembered", - -12.082334518432617 - ], - [ - "/3", - -12.082337379455566 - ], - [ - "apte", - -12.082347869873047 - ], - [ - "▁uncomfortable", - -12.082364082336426 - ], - [ - "▁romance", - -12.08253002166748 - ], - [ - "▁réalis", - -12.082601547241211 - ], - [ - "▁Vincent", - -12.082706451416016 - ], - [ - "▁ABC", - -12.08275318145752 - ], - [ - "▁handicap", - -12.082756042480469 - ], - [ - "▁Shin", - -12.082801818847656 - ], - [ - "▁Hunde", - -12.082847595214844 - ], - [ - "▁Ach", - -12.083131790161133 - ], - [ - "▁Questions", - -12.083136558532715 - ], - [ - "▁particles", - -12.083226203918457 - ], - [ - "usch", - -12.083230018615723 - ], - [ - "▁SUV", - -12.083279609680176 - ], - [ - "▁Tous", - -12.083301544189453 - ], - [ - "▁empower", - -12.08336067199707 - ], - [ - "▁Yi", - -12.083446502685547 - ], - [ - "▁LinkedIn", - -12.083453178405762 - ], - [ - "▁Profile", - -12.083507537841797 - ], - [ - "▁surround", - -12.083553314208984 - ], - [ - "▁wh", - -12.083560943603516 - ], - [ - "▁Weiter", - -12.083577156066895 - ], - [ - "▁Weight", - -12.083672523498535 - ], - [ - "▁creatures", - -12.083807945251465 - ], - [ - "Especially", - -12.08381462097168 - ], - [ - "▁repede", - -12.08383560180664 - ], - [ - "▁albums", - -12.083885192871094 - ], - [ - "▁compatibil", - -12.0839204788208 - ], - [ - "▁Interesse", - -12.083929061889648 - ], - [ - "abili", - -12.084062576293945 - ], - [ - "▁roast", - -12.084310531616211 - ], - [ - "▁unii", - -12.084310531616211 - ], - [ - "▁Glad", - -12.084421157836914 - ], - [ - "▁enthusiasm", - -12.084539413452148 - ], - [ - "▁whisk", - -12.084547996520996 - ], - [ - "▁freezer", - -12.084712982177734 - ], - [ - "▁stolen", - -12.084715843200684 - ], - [ - "▁neighbour", - -12.084883689880371 - ], - [ - "▁sake", - -12.084967613220215 - ], - [ - "▁Effect", - -12.0850191116333 - ], - [ - "▁fighter", - -12.085044860839844 - ], - [ - "▁tranquil", - -12.085084915161133 - ], - [ - "▁organizer", - -12.085199356079102 - ], - [ - "pixel", - -12.085306167602539 - ], - [ - "▁Guest", - -12.085338592529297 - ], - [ - "▁Philipp", - -12.085369110107422 - ], - [ - "kunft", - -12.085382461547852 - ], - [ - "▁Meer", - -12.085409164428711 - ], - [ - "▁inviting", - -12.085432052612305 - ], - [ - "gänge", - -12.085450172424316 - ], - [ - "▁Position", - -12.085627555847168 - ], - [ - "giving", - -12.085693359375 - ], - [ - "▁marble", - -12.085807800292969 - ], - [ - "▁neg", - -12.085813522338867 - ], - [ - "▁Haar", - -12.085914611816406 - ], - [ - "Ein", - -12.086039543151855 - ], - [ - "▁buses", - -12.086187362670898 - ], - [ - "▁Lodge", - -12.086188316345215 - ], - [ - "soare", - -12.086319923400879 - ], - [ - "▁Barn", - -12.086409568786621 - ], - [ - "▁captain", - -12.086527824401855 - ], - [ - "▁Fix", - -12.08657169342041 - ], - [ - "ulate", - -12.086629867553711 - ], - [ - "ență", - -12.086709022521973 - ], - [ - "▁finances", - -12.086770057678223 - ], - [ - "▁VIP", - -12.086800575256348 - ], - [ - "▁Adams", - -12.086801528930664 - ], - [ - "▁spécialisé", - -12.086960792541504 - ], - [ - "▁fortunate", - -12.087236404418945 - ], - [ - "ility", - -12.087345123291016 - ], - [ - "▁democracy", - -12.08749771118164 - ], - [ - "shu", - -12.087580680847168 - ], - [ - "▁consiste", - -12.087624549865723 - ], - [ - "▁tort", - -12.087692260742188 - ], - [ - "▁branding", - -12.087793350219727 - ], - [ - "▁porch", - -12.08780288696289 - ], - [ - "UNI", - -12.087867736816406 - ], - [ - "▁placut", - -12.087915420532227 - ], - [ - "▁coupled", - -12.088058471679688 - ], - [ - "▁ministre", - -12.088187217712402 - ], - [ - "▁minerals", - -12.088335037231445 - ], - [ - "▁safer", - -12.088335990905762 - ], - [ - "▁outlets", - -12.088438034057617 - ], - [ - "▁caution", - -12.08864688873291 - ], - [ - "▁lightly", - -12.0886869430542 - ], - [ - "▁utilizator", - -12.088700294494629 - ], - [ - "▁Pala", - -12.088959693908691 - ], - [ - "▁doll", - -12.088961601257324 - ], - [ - "(1)", - -12.089065551757812 - ], - [ - "chol", - -12.089120864868164 - ], - [ - "▁Left", - -12.08919620513916 - ], - [ - "▁roulant", - -12.089277267456055 - ], - [ - "▁propune", - -12.089301109313965 - ], - [ - "▁Cred", - -12.089339256286621 - ], - [ - "▁negotiations", - -12.089362144470215 - ], - [ - "amba", - -12.089393615722656 - ], - [ - "▁grasp", - -12.089420318603516 - ], - [ - "▁Amsterdam", - -12.089451789855957 - ], - [ - "▁Zweck", - -12.08945369720459 - ], - [ - "▁conven", - -12.089563369750977 - ], - [ - "▁organizing", - -12.089574813842773 - ], - [ - "section", - -12.089618682861328 - ], - [ - "▁endeavor", - -12.089634895324707 - ], - [ - "▁basics", - -12.089722633361816 - ], - [ - "jud", - -12.089874267578125 - ], - [ - "▁yarn", - -12.090049743652344 - ], - [ - "▁shout", - -12.09009075164795 - ], - [ - "fällt", - -12.090285301208496 - ], - [ - "▁dragoste", - -12.09054946899414 - ], - [ - "▁Rein", - -12.090594291687012 - ], - [ - "Cal", - -12.090688705444336 - ], - [ - "▁deaths", - -12.090729713439941 - ], - [ - "▁24,", - -12.0907564163208 - ], - [ - "▁măr", - -12.090773582458496 - ], - [ - "server", - -12.090825080871582 - ], - [ - "▁explic", - -12.09085464477539 - ], - [ - "▁sufer", - -12.090903282165527 - ], - [ - "▁lucrări", - -12.091097831726074 - ], - [ - "▁Disease", - -12.091126441955566 - ], - [ - "▁prescribed", - -12.091194152832031 - ], - [ - "prozess", - -12.091285705566406 - ], - [ - "▁dessin", - -12.091343879699707 - ], - [ - "▁refuge", - -12.091473579406738 - ], - [ - "▁cope", - -12.091631889343262 - ], - [ - "pole", - -12.09196949005127 - ], - [ - "▁vacant", - -12.091984748840332 - ], - [ - "▁sezon", - -12.092035293579102 - ], - [ - "▁Carbon", - -12.092227935791016 - ], - [ - "▁goût", - -12.092233657836914 - ], - [ - "Ste", - -12.092320442199707 - ], - [ - "▁surroundings", - -12.092754364013672 - ], - [ - "definite", - -12.09284496307373 - ], - [ - "▁adaptation", - -12.093358993530273 - ], - [ - "cteur", - -12.0933837890625 - ], - [ - "System", - -12.093442916870117 - ], - [ - "▁Burg", - -12.093550682067871 - ], - [ - "▁retention", - -12.093579292297363 - ], - [ - "examen", - -12.093618392944336 - ], - [ - "▁adjustments", - -12.093668937683105 - ], - [ - "nies", - -12.094213485717773 - ], - [ - "▁RSS", - -12.094215393066406 - ], - [ - "▁Umwelt", - -12.094259262084961 - ], - [ - "▁strengths", - -12.094326972961426 - ], - [ - "loom", - -12.094401359558105 - ], - [ - "▁pics", - -12.094404220581055 - ], - [ - "phase", - -12.09443187713623 - ], - [ - "▁Poland", - -12.094472885131836 - ], - [ - "▁practicing", - -12.094558715820312 - ], - [ - "monetary", - -12.094756126403809 - ], - [ - "▁embodiment", - -12.094756126403809 - ], - [ - "▁jocuri", - -12.094846725463867 - ], - [ - "▁impreuna", - -12.094939231872559 - ], - [ - "▁Lyon", - -12.094985961914062 - ], - [ - "keeping", - -12.095157623291016 - ], - [ - "▁Starting", - -12.095202445983887 - ], - [ - "▁începe", - -12.095357894897461 - ], - [ - "▁clay", - -12.095440864562988 - ], - [ - "bildung", - -12.095444679260254 - ], - [ - "Technologie", - -12.095513343811035 - ], - [ - "toxic", - -12.095624923706055 - ], - [ - "▁gasit", - -12.095819473266602 - ], - [ - "rott", - -12.095870018005371 - ], - [ - "brook", - -12.095935821533203 - ], - [ - "▁wann", - -12.096029281616211 - ], - [ - "▁lined", - -12.09610366821289 - ], - [ - "▁Chelsea", - -12.096223831176758 - ], - [ - "▁Orlando", - -12.096224784851074 - ], - [ - "▁Otherwise", - -12.096267700195312 - ], - [ - "▁debit", - -12.096273422241211 - ], - [ - "▁entsprechend", - -12.09648323059082 - ], - [ - "nism", - -12.09654426574707 - ], - [ - "issen", - -12.09664535522461 - ], - [ - "▁rendez", - -12.096646308898926 - ], - [ - "▁processus", - -12.096745491027832 - ], - [ - "mbi", - -12.096890449523926 - ], - [ - "▁Graduate", - -12.096960067749023 - ], - [ - "▁cozy", - -12.097119331359863 - ], - [ - "▁Freunde", - -12.097320556640625 - ], - [ - "▁teme", - -12.097389221191406 - ], - [ - "▁bias", - -12.097548484802246 - ], - [ - "102", - -12.09756851196289 - ], - [ - "terrorism", - -12.09770679473877 - ], - [ - "threatening", - -12.097756385803223 - ], - [ - "ни", - -12.097776412963867 - ], - [ - "▁Sonntag", - -12.098062515258789 - ], - [ - "▁efect", - -12.098116874694824 - ], - [ - "▁prayers", - -12.098134994506836 - ], - [ - "▁backpack", - -12.09841537475586 - ], - [ - "?)", - -12.098489761352539 - ], - [ - "▁searches", - -12.098788261413574 - ], - [ - "ouverture", - -12.09880256652832 - ], - [ - "▁sustained", - -12.098865509033203 - ], - [ - "hawk", - -12.098869323730469 - ], - [ - "messe", - -12.098958969116211 - ], - [ - "▁prototype", - -12.098989486694336 - ], - [ - "▁stră", - -12.09903335571289 - ], - [ - "▁Neo", - -12.099040985107422 - ], - [ - "▁29,", - -12.099109649658203 - ], - [ - "izo", - -12.099306106567383 - ], - [ - "▁Anton", - -12.099333763122559 - ], - [ - "SIS", - -12.099564552307129 - ], - [ - "pendant", - -12.099617958068848 - ], - [ - "▁passive", - -12.099813461303711 - ], - [ - "▁Aaron", - -12.099824905395508 - ], - [ - "▁Karen", - -12.099831581115723 - ], - [ - "▁Bildung", - -12.09994888305664 - ], - [ - "ario", - -12.099949836730957 - ], - [ - "▁regulator", - -12.100006103515625 - ], - [ - "gruppe", - -12.100032806396484 - ], - [ - "stepped", - -12.100053787231445 - ], - [ - "▁interventions", - -12.10014533996582 - ], - [ - "▁rounds", - -12.100149154663086 - ], - [ - "▁Khan", - -12.10020637512207 - ], - [ - "▁railway", - -12.10028076171875 - ], - [ - "▁souvenir", - -12.100296974182129 - ], - [ - "▁Plans", - -12.100336074829102 - ], - [ - "aille", - -12.100372314453125 - ], - [ - "▁billing", - -12.100473403930664 - ], - [ - "▁Spiele", - -12.100541114807129 - ], - [ - "▁supermarket", - -12.100556373596191 - ], - [ - "▁flows", - -12.100625991821289 - ], - [ - "▁PayPal", - -12.100641250610352 - ], - [ - "▁tribe", - -12.10067081451416 - ], - [ - "anni", - -12.100780487060547 - ], - [ - "▁rides", - -12.100934982299805 - ], - [ - "▁Orleans", - -12.101009368896484 - ], - [ - "▁evaluated", - -12.101021766662598 - ], - [ - "founder", - -12.10106372833252 - ], - [ - "▁Feld", - -12.101212501525879 - ], - [ - "▁altele", - -12.10122299194336 - ], - [ - "▁thermo", - -12.101290702819824 - ], - [ - "ugh", - -12.101330757141113 - ], - [ - "▁adus", - -12.101375579833984 - ], - [ - "▁Taiwan", - -12.101396560668945 - ], - [ - "▁clause", - -12.101409912109375 - ], - [ - "oxi", - -12.101465225219727 - ], - [ - "alcool", - -12.101495742797852 - ], - [ - "▁Noi", - -12.101531982421875 - ], - [ - "rub", - -12.101540565490723 - ], - [ - "▁dosar", - -12.101582527160645 - ], - [ - "▁Nelson", - -12.101751327514648 - ], - [ - "fassung", - -12.102316856384277 - ], - [ - "▁Kill", - -12.102489471435547 - ], - [ - "▁Standards", - -12.102490425109863 - ], - [ - "▁upward", - -12.102653503417969 - ], - [ - "▁Coloring", - -12.102664947509766 - ], - [ - "Designed", - -12.102754592895508 - ], - [ - "▁Nou", - -12.10281753540039 - ], - [ - "▁borrow", - -12.102940559387207 - ], - [ - "▁Poll", - -12.10321044921875 - ], - [ - "▁antibiotic", - -12.103277206420898 - ], - [ - "▁fabrication", - -12.103388786315918 - ], - [ - "quo", - -12.103432655334473 - ], - [ - "▁crimes", - -12.103464126586914 - ], - [ - "▁nahe", - -12.103484153747559 - ], - [ - "▁aplicat", - -12.103565216064453 - ], - [ - "OST", - -12.1035737991333 - ], - [ - "▁Beijing", - -12.103599548339844 - ], - [ - "fight", - -12.103612899780273 - ], - [ - "▁lodge", - -12.103612899780273 - ], - [ - "dreh", - -12.103922843933105 - ], - [ - "▁harness", - -12.104036331176758 - ], - [ - "▁noiembrie", - -12.104151725769043 - ], - [ - "ounded", - -12.104161262512207 - ], - [ - "▁Imp", - -12.1041841506958 - ], - [ - "▁nächste", - -12.104275703430176 - ], - [ - "funktion", - -12.104476928710938 - ], - [ - "exploitation", - -12.104569435119629 - ], - [ - "▁Ready", - -12.10457706451416 - ], - [ - "▁Plate", - -12.104598999023438 - ], - [ - "▁octombrie", - -12.104706764221191 - ], - [ - "▁considerat", - -12.104982376098633 - ], - [ - "▁Xbox", - -12.105067253112793 - ], - [ - "mind", - -12.105107307434082 - ], - [ - "▁Lind", - -12.105111122131348 - ], - [ - "runde", - -12.105352401733398 - ], - [ - "mination", - -12.105374336242676 - ], - [ - "▁memori", - -12.105377197265625 - ], - [ - "▁cere", - -12.105389595031738 - ], - [ - "barkeit", - -12.105517387390137 - ], - [ - "▁găsi", - -12.105761528015137 - ], - [ - "2.1", - -12.105863571166992 - ], - [ - "▁Finding", - -12.105891227722168 - ], - [ - "▁static", - -12.106405258178711 - ], - [ - "court", - -12.106439590454102 - ], - [ - "▁Gem", - -12.106489181518555 - ], - [ - "▁pièce", - -12.106494903564453 - ], - [ - "▁reel", - -12.10651969909668 - ], - [ - "▁manuscript", - -12.106560707092285 - ], - [ - "▁complications", - -12.106578826904297 - ], - [ - "▁controlling", - -12.106585502624512 - ], - [ - "▁favour", - -12.106738090515137 - ], - [ - "▁advancement", - -12.106739044189453 - ], - [ - "▁Radi", - -12.106870651245117 - ], - [ - "▁faites", - -12.107076644897461 - ], - [ - "▁ordin", - -12.107131958007812 - ], - [ - "sorted", - -12.107152938842773 - ], - [ - "▁1982", - -12.10715389251709 - ], - [ - "▁brutal", - -12.107154846191406 - ], - [ - "▁Guy", - -12.107226371765137 - ], - [ - "▁accomplishment", - -12.107248306274414 - ], - [ - "▁wer", - -12.107329368591309 - ], - [ - "▁withdraw", - -12.107460975646973 - ], - [ - "abilitate", - -12.1075439453125 - ], - [ - "▁NBA", - -12.107625961303711 - ], - [ - "▁Benefit", - -12.107675552368164 - ], - [ - "▁divide", - -12.107824325561523 - ], - [ - "induced", - -12.107913970947266 - ], - [ - "▁văzut", - -12.108049392700195 - ], - [ - "▁peel", - -12.10807991027832 - ], - [ - "▁joints", - -12.108160972595215 - ], - [ - "▁enthalten", - -12.108301162719727 - ], - [ - "▁spy", - -12.108397483825684 - ], - [ - "▁occasional", - -12.108437538146973 - ], - [ - "warm", - -12.108514785766602 - ], - [ - "ême", - -12.108542442321777 - ], - [ - "▁Betriebs", - -12.108551979064941 - ], - [ - "▁Ioan", - -12.1087064743042 - ], - [ - "▁balloon", - -12.108809471130371 - ], - [ - "▁leap", - -12.108869552612305 - ], - [ - "pelled", - -12.109000205993652 - ], - [ - "▁realise", - -12.109073638916016 - ], - [ - "▁Retail", - -12.109118461608887 - ], - [ - "▁Farben", - -12.109151840209961 - ], - [ - "▁Kennedy", - -12.10916519165039 - ], - [ - "▁Firma", - -12.109196662902832 - ], - [ - "▁tineri", - -12.10934066772461 - ], - [ - "tub", - -12.109354019165039 - ], - [ - "PORT", - -12.109381675720215 - ], - [ - "▁stiff", - -12.109416007995605 - ], - [ - "▁notable", - -12.109476089477539 - ], - [ - "tler", - -12.109498023986816 - ], - [ - "▁utile", - -12.10958480834961 - ], - [ - "▁jouer", - -12.109674453735352 - ], - [ - "▁Primary", - -12.109735488891602 - ], - [ - "▁retailer", - -12.109764099121094 - ], - [ - "▁jederzeit", - -12.109808921813965 - ], - [ - "▁amend", - -12.109817504882812 - ], - [ - "▁sagte", - -12.109845161437988 - ], - [ - "atch", - -12.10995864868164 - ], - [ - "ution", - -12.110008239746094 - ], - [ - "once", - -12.110018730163574 - ], - [ - "ended", - -12.1100435256958 - ], - [ - "▁literary", - -12.11013126373291 - ], - [ - "▁wrist", - -12.110281944274902 - ], - [ - "vii", - -12.11036205291748 - ], - [ - "scriere", - -12.110367774963379 - ], - [ - "▁compassion", - -12.110443115234375 - ], - [ - "▁Milan", - -12.110474586486816 - ], - [ - "▁Dach", - -12.110490798950195 - ], - [ - "▁problèmes", - -12.110630989074707 - ], - [ - "▁Pré", - -12.110687255859375 - ], - [ - "▁Feder", - -12.110759735107422 - ], - [ - "Dr", - -12.110814094543457 - ], - [ - "Spr", - -12.110908508300781 - ], - [ - "▁né", - -12.110969543457031 - ], - [ - "François", - -12.111023902893066 - ], - [ - "▁Shu", - -12.111115455627441 - ], - [ - "▁poison", - -12.111154556274414 - ], - [ - "zier", - -12.111176490783691 - ], - [ - "▁attain", - -12.11124038696289 - ], - [ - "▁switching", - -12.111310958862305 - ], - [ - "▁vibration", - -12.111348152160645 - ], - [ - "▁Tablet", - -12.11136531829834 - ], - [ - "▁Lern", - -12.11148452758789 - ], - [ - "offrir", - -12.111660957336426 - ], - [ - "123", - -12.11168098449707 - ], - [ - "cheapest", - -12.11173152923584 - ], - [ - "▁numărul", - -12.111764907836914 - ], - [ - "break", - -12.11180305480957 - ], - [ - "cyto", - -12.111836433410645 - ], - [ - "▁Mississippi", - -12.111955642700195 - ], - [ - "▁dragon", - -12.11207389831543 - ], - [ - "fir", - -12.112176895141602 - ], - [ - "▁fête", - -12.112180709838867 - ], - [ - "▁Wait", - -12.112350463867188 - ], - [ - "buy", - -12.112359046936035 - ], - [ - "având", - -12.112391471862793 - ], - [ - "▁Scar", - -12.112517356872559 - ], - [ - "▁Hund", - -12.112586975097656 - ], - [ - "bug", - -12.112807273864746 - ], - [ - "▁classique", - -12.112811088562012 - ], - [ - "▁tenant", - -12.112860679626465 - ], - [ - "▁Walt", - -12.11296272277832 - ], - [ - "▁timber", - -12.11296272277832 - ], - [ - "inscription", - -12.11300277709961 - ], - [ - "BD", - -12.113016128540039 - ], - [ - "▁Commissioner", - -12.113018989562988 - ], - [ - "▁casinos", - -12.11306095123291 - ], - [ - "▁prochain", - -12.113168716430664 - ], - [ - "▁rustic", - -12.11349868774414 - ], - [ - "▁Kent", - -12.113607406616211 - ], - [ - "▁Deci", - -12.113761901855469 - ], - [ - "ли", - -12.113855361938477 - ], - [ - "▁crossed", - -12.113861083984375 - ], - [ - "▁delightful", - -12.113869667053223 - ], - [ - "▁metres", - -12.113872528076172 - ], - [ - "▁scandal", - -12.113906860351562 - ], - [ - "▁activitate", - -12.113986015319824 - ], - [ - "▁nimeni", - -12.114009857177734 - ], - [ - "ease", - -12.11402416229248 - ], - [ - "▁revenues", - -12.1140775680542 - ], - [ - "▁partially", - -12.114187240600586 - ], - [ - "AE", - -12.114263534545898 - ], - [ - "nique", - -12.114410400390625 - ], - [ - "▁fixtures", - -12.114426612854004 - ], - [ - "▁pupils", - -12.114694595336914 - ], - [ - "Lib", - -12.11471176147461 - ], - [ - "analyse", - -12.114739418029785 - ], - [ - "▁Oracle", - -12.114767074584961 - ], - [ - "troph", - -12.114859580993652 - ], - [ - "▁detected", - -12.114879608154297 - ], - [ - "▁servant", - -12.11507797241211 - ], - [ - "▁badly", - -12.115121841430664 - ], - [ - "comparing", - -12.115150451660156 - ], - [ - "abs", - -12.115238189697266 - ], - [ - "▁fotografi", - -12.115443229675293 - ], - [ - "▁Million", - -12.115541458129883 - ], - [ - "▁Gordon", - -12.11557388305664 - ], - [ - "▁Smok", - -12.115592002868652 - ], - [ - "▁Essay", - -12.11565113067627 - ], - [ - "eptic", - -12.115665435791016 - ], - [ - "▁Transportation", - -12.115728378295898 - ], - [ - "/2019", - -12.115767478942871 - ], - [ - "▁alignment", - -12.115778923034668 - ], - [ - "▁laut", - -12.11578369140625 - ], - [ - "stände", - -12.115791320800781 - ], - [ - "▁concerts", - -12.115811347961426 - ], - [ - "▁weekends", - -12.11589241027832 - ], - [ - "▁obstacles", - -12.115941047668457 - ], - [ - "wür", - -12.115964889526367 - ], - [ - "▁Fisher", - -12.116219520568848 - ], - [ - "▁supervisor", - -12.116242408752441 - ], - [ - "▁traders", - -12.116262435913086 - ], - [ - "▁scary", - -12.116484642028809 - ], - [ - "▁Grove", - -12.116538047790527 - ], - [ - "▁expose", - -12.116583824157715 - ], - [ - "▁enemies", - -12.116630554199219 - ], - [ - "▁Lux", - -12.11667537689209 - ], - [ - "▁Berufs", - -12.11672306060791 - ], - [ - "▁Sheet", - -12.116780281066895 - ], - [ - "▁Natürlich", - -12.116819381713867 - ], - [ - "▁examined", - -12.116886138916016 - ], - [ - "pursuing", - -12.116920471191406 - ], - [ - "▁pools", - -12.116923332214355 - ], - [ - "▁Thompson", - -12.117005348205566 - ], - [ - "▁SAP", - -12.117010116577148 - ], - [ - "claiming", - -12.117053985595703 - ], - [ - "buried", - -12.117055892944336 - ], - [ - "assurance", - -12.117138862609863 - ], - [ - "▁sandwich", - -12.117195129394531 - ], - [ - "uber", - -12.117310523986816 - ], - [ - "▁laisse", - -12.117321968078613 - ], - [ - "peak", - -12.117348670959473 - ], - [ - "spring", - -12.1173677444458 - ], - [ - "▁august", - -12.117369651794434 - ], - [ - "▁benötigt", - -12.11738109588623 - ], - [ - "▁achievements", - -12.117470741271973 - ], - [ - "coala", - -12.117478370666504 - ], - [ - "▁scr", - -12.117842674255371 - ], - [ - "gesagt", - -12.118122100830078 - ], - [ - "▁envelope", - -12.118141174316406 - ], - [ - "▁mapping", - -12.118169784545898 - ], - [ - "▁Suche", - -12.118298530578613 - ], - [ - "first", - -12.118329048156738 - ], - [ - "▁Quin", - -12.118447303771973 - ], - [ - "räu", - -12.118561744689941 - ], - [ - "▁răs", - -12.118583679199219 - ], - [ - "chemical", - -12.118597984313965 - ], - [ - "dad", - -12.118927955627441 - ], - [ - "formation", - -12.118983268737793 - ], - [ - "▁cushion", - -12.119026184082031 - ], - [ - "▁Maß", - -12.119046211242676 - ], - [ - "07.", - -12.119184494018555 - ], - [ - "▁perioadă", - -12.119257926940918 - ], - [ - "▁Wunsch", - -12.11925983428955 - ], - [ - "▁joi", - -12.119423866271973 - ], - [ - "▁$25", - -12.119482040405273 - ], - [ - "▁uploaded", - -12.11952018737793 - ], - [ - "▁hobby", - -12.119633674621582 - ], - [ - "▁septembrie", - -12.119633674621582 - ], - [ - "▁Dimension", - -12.119634628295898 - ], - [ - "▁domeniu", - -12.119661331176758 - ], - [ - "▁Tourism", - -12.119747161865234 - ], - [ - "▁fais", - -12.119800567626953 - ], - [ - "aches", - -12.119919776916504 - ], - [ - "neck", - -12.119969367980957 - ], - [ - "▁Chip", - -12.119982719421387 - ], - [ - "▁Tisch", - -12.1199951171875 - ], - [ - "▁Pai", - -12.120006561279297 - ], - [ - "▁Butter", - -12.120083808898926 - ], - [ - "▁altor", - -12.120133399963379 - ], - [ - "cultural", - -12.120182991027832 - ], - [ - "▁bases", - -12.12028980255127 - ], - [ - "▁Christopher", - -12.120396614074707 - ], - [ - "Kindle", - -12.120401382446289 - ], - [ - "▁bathrooms", - -12.12049388885498 - ], - [ - "▁civilian", - -12.12052059173584 - ], - [ - "▁Architecture", - -12.12058162689209 - ], - [ - "heiten", - -12.120641708374023 - ], - [ - "otte", - -12.120763778686523 - ], - [ - "ри", - -12.120784759521484 - ], - [ - "wash", - -12.120792388916016 - ], - [ - "▁evenimente", - -12.12086296081543 - ], - [ - "lade", - -12.121132850646973 - ], - [ - "▁ermöglicht", - -12.121140480041504 - ], - [ - "Port", - -12.121149063110352 - ], - [ - "▁Horn", - -12.12119197845459 - ], - [ - "▁Housing", - -12.121232032775879 - ], - [ - "▁Profit", - -12.121304512023926 - ], - [ - "▁stressed", - -12.12136459350586 - ], - [ - "▁70%", - -12.121431350708008 - ], - [ - "laying", - -12.121458053588867 - ], - [ - "▁specialize", - -12.121490478515625 - ], - [ - "▁Published", - -12.121519088745117 - ], - [ - "corp", - -12.121554374694824 - ], - [ - "▁revision", - -12.121611595153809 - ], - [ - "▁sail", - -12.121804237365723 - ], - [ - "courtesy", - -12.121909141540527 - ], - [ - "tax", - -12.1219482421875 - ], - [ - "▁perfekt", - -12.122018814086914 - ], - [ - "▁Risk", - -12.122088432312012 - ], - [ - "▁chaleur", - -12.122129440307617 - ], - [ - "ych", - -12.122132301330566 - ], - [ - "▁spine", - -12.12218189239502 - ], - [ - "▁holders", - -12.122264862060547 - ], - [ - "▁Speaking", - -12.122271537780762 - ], - [ - "▁Bernard", - -12.122400283813477 - ], - [ - "incarc", - -12.122532844543457 - ], - [ - "shalb", - -12.122639656066895 - ], - [ - "Potrivit", - -12.12264633178711 - ], - [ - "arising", - -12.122654914855957 - ], - [ - "▁kingdom", - -12.122665405273438 - ], - [ - "▁potato", - -12.122766494750977 - ], - [ - "▁promoted", - -12.122814178466797 - ], - [ - "▁judges", - -12.1228609085083 - ], - [ - "▁naturelle", - -12.122992515563965 - ], - [ - "▁Kindern", - -12.123022079467773 - ], - [ - "schicht", - -12.123047828674316 - ], - [ - "▁Drag", - -12.123066902160645 - ], - [ - "atta", - -12.123132705688477 - ], - [ - "soient", - -12.123249053955078 - ], - [ - "INS", - -12.12336540222168 - ], - [ - "▁legislative", - -12.123642921447754 - ], - [ - "▁teens", - -12.123785018920898 - ], - [ - "▁Fotos", - -12.123842239379883 - ], - [ - "▁illustrations", - -12.12392520904541 - ], - [ - "möglichkeiten", - -12.12415599822998 - ], - [ - "Votre", - -12.124194145202637 - ], - [ - "▁tarif", - -12.124195098876953 - ], - [ - "cli", - -12.124488830566406 - ], - [ - "▁landlord", - -12.12473201751709 - ], - [ - "cine", - -12.124743461608887 - ], - [ - "▁bot", - -12.124798774719238 - ], - [ - "enhancing", - -12.12491226196289 - ], - [ - "▁März", - -12.12491226196289 - ], - [ - "▁succès", - -12.125106811523438 - ], - [ - "▁disclose", - -12.125120162963867 - ], - [ - "▁Geräte", - -12.125321388244629 - ], - [ - "▁Magn", - -12.125422477722168 - ], - [ - "dessous", - -12.12580680847168 - ], - [ - "▁miracle", - -12.125862121582031 - ], - [ - "▁travailler", - -12.125933647155762 - ], - [ - "▁herb", - -12.125945091247559 - ], - [ - "-01", - -12.126049041748047 - ], - [ - "litre", - -12.126104354858398 - ], - [ - "▁tău", - -12.126120567321777 - ], - [ - "ACC", - -12.126190185546875 - ], - [ - "▁diminu", - -12.126275062561035 - ], - [ - "itzer", - -12.126317024230957 - ], - [ - "▁personenbezogen", - -12.126395225524902 - ], - [ - "▁Pure", - -12.126436233520508 - ], - [ - "▁influences", - -12.12668228149414 - ], - [ - "ană", - -12.126765251159668 - ], - [ - "▁proposer", - -12.126856803894043 - ], - [ - "▁longest", - -12.12692642211914 - ], - [ - "euses", - -12.127080917358398 - ], - [ - "/1", - -12.127487182617188 - ], - [ - "hafte", - -12.127716064453125 - ], - [ - "▁Dich", - -12.127761840820312 - ], - [ - "▁candle", - -12.128026962280273 - ], - [ - "ouche", - -12.128191947937012 - ], - [ - "installation", - -12.128241539001465 - ], - [ - "▁Includes", - -12.128280639648438 - ], - [ - "▁entfernt", - -12.12831974029541 - ], - [ - "traf", - -12.128499031066895 - ], - [ - "▁None", - -12.128508567810059 - ], - [ - "▁produc", - -12.128510475158691 - ], - [ - "held", - -12.128519058227539 - ], - [ - "graphic", - -12.128531455993652 - ], - [ - "▁demographic", - -12.128584861755371 - ], - [ - "ingham", - -12.1287841796875 - ], - [ - "schul", - -12.128812789916992 - ], - [ - "▁sneak", - -12.128843307495117 - ], - [ - "laub", - -12.128889083862305 - ], - [ - "▁thickness", - -12.12911605834961 - ], - [ - "▁killer", - -12.129297256469727 - ], - [ - "▁entsprechende", - -12.129344940185547 - ], - [ - "▁theft", - -12.129396438598633 - ], - [ - "▁Jerusalem", - -12.129457473754883 - ], - [ - "Adapt", - -12.129495620727539 - ], - [ - "▁updating", - -12.129497528076172 - ], - [ - "tete", - -12.12954330444336 - ], - [ - "▁warming", - -12.129701614379883 - ], - [ - "anlage", - -12.129739761352539 - ], - [ - "▁lenders", - -12.129814147949219 - ], - [ - "mobile", - -12.130008697509766 - ], - [ - "▁Package", - -12.130080223083496 - ], - [ - "▁Volume", - -12.130152702331543 - ], - [ - "---", - -12.130167007446289 - ], - [ - "▁Others", - -12.130173683166504 - ], - [ - "content", - -12.130188941955566 - ], - [ - "tement", - -12.130253791809082 - ], - [ - "bildet", - -12.13027572631836 - ], - [ - "▁washer", - -12.13053035736084 - ], - [ - "▁freelance", - -12.130623817443848 - ], - [ - "▁fein", - -12.130753517150879 - ], - [ - "▁catering", - -12.130851745605469 - ], - [ - "▁warmth", - -12.130911827087402 - ], - [ - "▁Month", - -12.131103515625 - ], - [ - "▁Federation", - -12.131134033203125 - ], - [ - "▁editorial", - -12.13121223449707 - ], - [ - "▁Shopping", - -12.131241798400879 - ], - [ - "▁efort", - -12.131296157836914 - ], - [ - "▁damp", - -12.131314277648926 - ], - [ - "▁declined", - -12.131332397460938 - ], - [ - "▁1978", - -12.13135051727295 - ], - [ - "6,000", - -12.131355285644531 - ], - [ - "location", - -12.131551742553711 - ], - [ - "▁blogger", - -12.131572723388672 - ], - [ - "▁goodness", - -12.131826400756836 - ], - [ - "▁Purchase", - -12.132119178771973 - ], - [ - "▁suspended", - -12.132159233093262 - ], - [ - "▁assessed", - -12.132201194763184 - ], - [ - "rada", - -12.132286071777344 - ], - [ - "▁Lac", - -12.132291793823242 - ], - [ - "▁angeboten", - -12.13235092163086 - ], - [ - "▁Wetter", - -12.132370948791504 - ], - [ - "ores", - -12.13243579864502 - ], - [ - "▁fourni", - -12.132476806640625 - ], - [ - "▁retire", - -12.13269329071045 - ], - [ - "▁Baptist", - -12.132741928100586 - ], - [ - "▁Saison", - -12.13277530670166 - ], - [ - "Bar", - -12.132794380187988 - ], - [ - "▁dossier", - -12.132979393005371 - ], - [ - "brow", - -12.133044242858887 - ], - [ - "▁Kaffee", - -12.133071899414062 - ], - [ - "-25", - -12.133463859558105 - ], - [ - "▁festivals", - -12.133599281311035 - ], - [ - "▁sellers", - -12.133716583251953 - ], - [ - "Ü", - -12.13393783569336 - ], - [ - "▁publisher", - -12.133960723876953 - ], - [ - "▁Designs", - -12.133970260620117 - ], - [ - "▁putut", - -12.13400936126709 - ], - [ - "▁Built", - -12.134417533874512 - ], - [ - "▁recreational", - -12.134476661682129 - ], - [ - "▁european", - -12.134514808654785 - ], - [ - "▁binary", - -12.134631156921387 - ], - [ - "▁Nieder", - -12.134764671325684 - ], - [ - "taking", - -12.1348237991333 - ], - [ - "▁Lots", - -12.13494873046875 - ], - [ - "▁recognised", - -12.135031700134277 - ], - [ - "ssant", - -12.135063171386719 - ], - [ - "ITE", - -12.135271072387695 - ], - [ - "oom", - -12.135298728942871 - ], - [ - "▁Kre", - -12.135310173034668 - ], - [ - "▁pipes", - -12.135631561279297 - ], - [ - "▁hinge", - -12.135653495788574 - ], - [ - "▁enterprises", - -12.135664939880371 - ], - [ - "▁texts", - -12.13583755493164 - ], - [ - "Organiz", - -12.136080741882324 - ], - [ - "▁suivre", - -12.136124610900879 - ], - [ - "noc", - -12.136157989501953 - ], - [ - "fair", - -12.136194229125977 - ], - [ - "▁darkness", - -12.136305809020996 - ], - [ - "▁Whi", - -12.13631534576416 - ], - [ - "natural", - -12.136321067810059 - ], - [ - "Bas", - -12.136422157287598 - ], - [ - "▁tribute", - -12.136443138122559 - ], - [ - "▁Naţional", - -12.136573791503906 - ], - [ - "hara", - -12.136622428894043 - ], - [ - "▁catégorie", - -12.136697769165039 - ], - [ - "▁Schedule", - -12.136698722839355 - ], - [ - "▁lernen", - -12.13671875 - ], - [ - "▁Plastic", - -12.136725425720215 - ], - [ - "▁giveaway", - -12.13675594329834 - ], - [ - "▁Ideen", - -12.136906623840332 - ], - [ - "▁circa", - -12.13718032836914 - ], - [ - "▁lice", - -12.137242317199707 - ], - [ - "▁Meinung", - -12.137264251708984 - ], - [ - "▁beside", - -12.137566566467285 - ], - [ - "▁vazut", - -12.137673377990723 - ], - [ - "strom", - -12.137749671936035 - ], - [ - "boro", - -12.137775421142578 - ], - [ - "▁Soon", - -12.137796401977539 - ], - [ - "dozens", - -12.137896537780762 - ], - [ - "▁Arena", - -12.137943267822266 - ], - [ - "▁viața", - -12.137989044189453 - ], - [ - "▁Impact", - -12.138082504272461 - ], - [ - "current", - -12.138106346130371 - ], - [ - "FM", - -12.138117790222168 - ], - [ - "▁coil", - -12.138657569885254 - ], - [ - "gold", - -12.138679504394531 - ], - [ - "▁spate", - -12.138679504394531 - ], - [ - "1.4", - -12.13875675201416 - ], - [ - "solution", - -12.138769149780273 - ], - [ - "▁Wayne", - -12.138835906982422 - ], - [ - "▁queen", - -12.138898849487305 - ], - [ - "illion", - -12.139022827148438 - ], - [ - "greifen", - -12.139127731323242 - ], - [ - "▁Bil", - -12.139174461364746 - ], - [ - "rote", - -12.139185905456543 - ], - [ - "END", - -12.13918685913086 - ], - [ - "äl", - -12.139206886291504 - ], - [ - "▁reçu", - -12.139378547668457 - ], - [ - "flower", - -12.139495849609375 - ], - [ - "▁draws", - -12.139519691467285 - ], - [ - "plant", - -12.139605522155762 - ], - [ - "2010", - -12.139702796936035 - ], - [ - "▁oper", - -12.139762878417969 - ], - [ - "▁conserve", - -12.139777183532715 - ], - [ - "▁sprinkle", - -12.13984203338623 - ], - [ - "mode", - -12.139924049377441 - ], - [ - "▁lifting", - -12.139941215515137 - ], - [ - "▁Institution", - -12.139951705932617 - ], - [ - "Când", - -12.14001750946045 - ], - [ - "Aus", - -12.140048027038574 - ], - [ - "▁fears", - -12.140054702758789 - ], - [ - "▁appointments", - -12.140079498291016 - ], - [ - "oarele", - -12.140162467956543 - ], - [ - "▁duck", - -12.140193939208984 - ], - [ - "▁stadium", - -12.140213012695312 - ], - [ - "▁vezi", - -12.140227317810059 - ], - [ - "▁lap", - -12.140315055847168 - ], - [ - "▁proceeds", - -12.140382766723633 - ], - [ - "geschlossen", - -12.140412330627441 - ], - [ - "▁tren", - -12.140478134155273 - ], - [ - "VS", - -12.140536308288574 - ], - [ - "▁vais", - -12.140800476074219 - ], - [ - "ținut", - -12.140859603881836 - ], - [ - "▁Concert", - -12.140928268432617 - ], - [ - "▁planting", - -12.141008377075195 - ], - [ - "▁honour", - -12.141069412231445 - ], - [ - "▁gras", - -12.141071319580078 - ], - [ - "woo", - -12.141092300415039 - ], - [ - "▁Hero", - -12.141282081604004 - ], - [ - "▁stimulate", - -12.14134407043457 - ], - [ - "▁überhaupt", - -12.141426086425781 - ], - [ - "▁bounce", - -12.14148235321045 - ], - [ - "oodle", - -12.14151382446289 - ], - [ - "▁packs", - -12.141576766967773 - ], - [ - "▁Poker", - -12.14158821105957 - ], - [ - "▁acea", - -12.141684532165527 - ], - [ - "▁parish", - -12.141754150390625 - ], - [ - "-24", - -12.141766548156738 - ], - [ - "▁iTunes", - -12.141874313354492 - ], - [ - "▁lumière", - -12.141948699951172 - ], - [ - "third", - -12.142024993896484 - ], - [ - "▁dynamics", - -12.142038345336914 - ], - [ - "Unless", - -12.142162322998047 - ], - [ - "▁immense", - -12.142416000366211 - ], - [ - "▁Sec", - -12.142781257629395 - ], - [ - "lois", - -12.143009185791016 - ], - [ - "époque", - -12.14302921295166 - ], - [ - "NB", - -12.143139839172363 - ], - [ - "written", - -12.143210411071777 - ], - [ - "▁logement", - -12.143226623535156 - ], - [ - "submitting", - -12.143295288085938 - ], - [ - "▁Quand", - -12.14331340789795 - ], - [ - "▁foi", - -12.143322944641113 - ], - [ - "▁catalogue", - -12.143351554870605 - ], - [ - "nova", - -12.14343547821045 - ], - [ - "▁prezentat", - -12.143527030944824 - ], - [ - "▁tart", - -12.143877983093262 - ], - [ - "те", - -12.143912315368652 - ], - [ - "hack", - -12.143916130065918 - ], - [ - "▁Politic", - -12.144003868103027 - ], - [ - "▁18,", - -12.144048690795898 - ], - [ - "▁ignored", - -12.144145965576172 - ], - [ - "▁spoon", - -12.144245147705078 - ], - [ - "▁Joy", - -12.144280433654785 - ], - [ - "▁reside", - -12.144482612609863 - ], - [ - ".99", - -12.144488334655762 - ], - [ - "lytic", - -12.144625663757324 - ], - [ - "▁bogat", - -12.144643783569336 - ], - [ - "▁nurses", - -12.144845008850098 - ], - [ - "▁funcţi", - -12.145029067993164 - ], - [ - "▁produselor", - -12.145038604736328 - ], - [ - "▁Associates", - -12.145069122314453 - ], - [ - "Est", - -12.14511489868164 - ], - [ - "▁peanut", - -12.145187377929688 - ], - [ - "▁résultat", - -12.145257949829102 - ], - [ - "08.", - -12.145424842834473 - ], - [ - "▁Astro", - -12.145439147949219 - ], - [ - "▁personnelle", - -12.145527839660645 - ], - [ - "320", - -12.145668983459473 - ], - [ - "▁Grab", - -12.145748138427734 - ], - [ - "éco", - -12.145801544189453 - ], - [ - "▁clasic", - -12.145857810974121 - ], - [ - "offre", - -12.14588451385498 - ], - [ - "▁idee", - -12.14589786529541 - ], - [ - "▁cheat", - -12.146259307861328 - ], - [ - "▁Flug", - -12.146286964416504 - ], - [ - "▁1500", - -12.146413803100586 - ], - [ - "▁kurze", - -12.14643383026123 - ], - [ - "With", - -12.146512985229492 - ], - [ - "▁Half", - -12.146575927734375 - ], - [ - "▁disciplines", - -12.146642684936523 - ], - [ - "sorption", - -12.14669132232666 - ], - [ - "▁greutate", - -12.146927833557129 - ], - [ - "mä", - -12.146940231323242 - ], - [ - "▁Literatur", - -12.146956443786621 - ], - [ - "3/", - -12.147016525268555 - ], - [ - "4.0", - -12.147095680236816 - ], - [ - "▁déco", - -12.147119522094727 - ], - [ - "▁Fuß", - -12.147233963012695 - ], - [ - "▁Deutsche", - -12.147289276123047 - ], - [ - "▁abundance", - -12.14746379852295 - ], - [ - "▁Luther", - -12.14750862121582 - ], - [ - "▁nutritional", - -12.147562980651855 - ], - [ - "▁Jude", - -12.147687911987305 - ], - [ - "AY", - -12.14786148071289 - ], - [ - "▁chore", - -12.147916793823242 - ], - [ - "▁Kro", - -12.148006439208984 - ], - [ - "▁alin", - -12.14801025390625 - ], - [ - "lösung", - -12.148030281066895 - ], - [ - "▁geworden", - -12.148238182067871 - ], - [ - "▁sociaux", - -12.148255348205566 - ], - [ - "▁Spark", - -12.1486177444458 - ], - [ - "▁phenomenon", - -12.148624420166016 - ], - [ - "ICA", - -12.148805618286133 - ], - [ - "▁Ran", - -12.148836135864258 - ], - [ - "▁Schwarz", - -12.148959159851074 - ], - [ - "▁1983", - -12.148985862731934 - ], - [ - "ет", - -12.148990631103516 - ], - [ - "möglich", - -12.149084091186523 - ], - [ - "vocation", - -12.149087905883789 - ], - [ - "▁Organic", - -12.14926815032959 - ], - [ - "Oh", - -12.149408340454102 - ], - [ - "▁blockchain", - -12.149422645568848 - ], - [ - "▁Bă", - -12.149515151977539 - ], - [ - "▁Bass", - -12.14953899383545 - ], - [ - "enie", - -12.149687767028809 - ], - [ - "▁rêve", - -12.149807929992676 - ], - [ - "▁Rap", - -12.149986267089844 - ], - [ - "▁democratic", - -12.150044441223145 - ], - [ - "▁Chart", - -12.150167465209961 - ], - [ - "▁Voi", - -12.150189399719238 - ], - [ - "process", - -12.150263786315918 - ], - [ - "▁preach", - -12.150389671325684 - ], - [ - "tient", - -12.150456428527832 - ], - [ - "▁Train", - -12.150468826293945 - ], - [ - "▁Reihe", - -12.150472640991211 - ], - [ - "help", - -12.150514602661133 - ], - [ - "1.6", - -12.150547981262207 - ], - [ - "▁cazuri", - -12.150547981262207 - ], - [ - "▁chap", - -12.150559425354004 - ], - [ - "aktiv", - -12.150632858276367 - ], - [ - "▁2006.", - -12.15079116821289 - ], - [ - "iene", - -12.150849342346191 - ], - [ - "▁BBQ", - -12.150969505310059 - ], - [ - "dauer", - -12.151028633117676 - ], - [ - "2).", - -12.151226997375488 - ], - [ - "▁Monat", - -12.151277542114258 - ], - [ - "Generally", - -12.151285171508789 - ], - [ - "▁bracelet", - -12.151336669921875 - ], - [ - "▁cartoon", - -12.151349067687988 - ], - [ - "▁pui", - -12.151488304138184 - ], - [ - "temp", - -12.151506423950195 - ], - [ - "▁Particip", - -12.151555061340332 - ], - [ - "▁dumneavoastră", - -12.151725769042969 - ], - [ - "▁Gin", - -12.151824951171875 - ], - [ - "iunile", - -12.151829719543457 - ], - [ - "reise", - -12.151849746704102 - ], - [ - "▁einzige", - -12.15189266204834 - ], - [ - "ANCE", - -12.15192985534668 - ], - [ - "▁humble", - -12.151951789855957 - ], - [ - "claim", - -12.152093887329102 - ], - [ - "LV", - -12.152143478393555 - ], - [ - "▁confiance", - -12.152270317077637 - ], - [ - "▁Trading", - -12.152535438537598 - ], - [ - "▁Fabric", - -12.152770042419434 - ], - [ - "▁Duke", - -12.152851104736328 - ], - [ - "spieler", - -12.152937889099121 - ], - [ - "▁reject", - -12.152987480163574 - ], - [ - "▁crise", - -12.153170585632324 - ], - [ - "▁borders", - -12.153196334838867 - ], - [ - "▁Vehicle", - -12.153279304504395 - ], - [ - "zeiten", - -12.153481483459473 - ], - [ - "enrolled", - -12.153514862060547 - ], - [ - "venue", - -12.153555870056152 - ], - [ - "▁forests", - -12.153564453125 - ], - [ - "vascular", - -12.15358829498291 - ], - [ - "▁phrases", - -12.153661727905273 - ], - [ - "▁receptor", - -12.15368366241455 - ], - [ - "schied", - -12.153687477111816 - ], - [ - "▁soirée", - -12.153785705566406 - ], - [ - "▁partener", - -12.153987884521484 - ], - [ - "▁Jobs", - -12.15417194366455 - ], - [ - "▁segments", - -12.154216766357422 - ], - [ - "▁violate", - -12.154438972473145 - ], - [ - "▁viable", - -12.154500007629395 - ], - [ - "▁encountered", - -12.154533386230469 - ], - [ - "▁travelers", - -12.154552459716797 - ], - [ - "▁împ", - -12.154679298400879 - ], - [ - "▁convince", - -12.154693603515625 - ], - [ - "▁mailing", - -12.154693603515625 - ], - [ - "▁Zahn", - -12.154698371887207 - ], - [ - "attend", - -12.15477466583252 - ], - [ - "▁eBay", - -12.154836654663086 - ], - [ - "▁Emergency", - -12.154844284057617 - ], - [ - "wirtschaft", - -12.154882431030273 - ], - [ - "▁scholars", - -12.154947280883789 - ], - [ - "▁considerably", - -12.155118942260742 - ], - [ - "▁combo", - -12.1551513671875 - ], - [ - "hiver", - -12.155198097229004 - ], - [ - "▁mysterious", - -12.15522575378418 - ], - [ - "▁Degree", - -12.155234336853027 - ], - [ - "▁fate", - -12.155242919921875 - ], - [ - "▁transplant", - -12.155281066894531 - ], - [ - "▁samedi", - -12.155400276184082 - ], - [ - "unit", - -12.155519485473633 - ], - [ - "▁moyenne", - -12.155611991882324 - ], - [ - "▁Liverpool", - -12.155614852905273 - ], - [ - "▁Champions", - -12.155728340148926 - ], - [ - "zzle", - -12.155824661254883 - ], - [ - "▁arena", - -12.156228065490723 - ], - [ - "▁Pipe", - -12.15633487701416 - ], - [ - "▁waterproof", - -12.156356811523438 - ], - [ - "▁eternal", - -12.156463623046875 - ], - [ - "Whenever", - -12.156503677368164 - ], - [ - "▁Hop", - -12.156535148620605 - ], - [ - "▁Betrieb", - -12.156816482543945 - ], - [ - "gne", - -12.15692138671875 - ], - [ - "▁spe", - -12.156975746154785 - ], - [ - "▁Corner", - -12.157078742980957 - ], - [ - "▁devenir", - -12.157118797302246 - ], - [ - "ambiance", - -12.157144546508789 - ], - [ - "▁Graham", - -12.157200813293457 - ], - [ - "▁desires", - -12.157289505004883 - ], - [ - "▁Applications", - -12.157291412353516 - ], - [ - "▁genutzt", - -12.157477378845215 - ], - [ - "tek", - -12.157612800598145 - ], - [ - "▁Career", - -12.157641410827637 - ], - [ - "▁staple", - -12.157695770263672 - ], - [ - "▁Dodge", - -12.157817840576172 - ], - [ - "▁strictly", - -12.157889366149902 - ], - [ - "▁Gruppen", - -12.157952308654785 - ], - [ - "▁Finanz", - -12.157981872558594 - ], - [ - "▁sporting", - -12.15809440612793 - ], - [ - "▁Wieder", - -12.158127784729004 - ], - [ - "anny", - -12.158208847045898 - ], - [ - "▁bucura", - -12.158233642578125 - ], - [ - "▁Pest", - -12.15824031829834 - ], - [ - "▁circles", - -12.158246994018555 - ], - [ - "▁richtige", - -12.158309936523438 - ], - [ - "▁cycles", - -12.158379554748535 - ], - [ - "static", - -12.15845012664795 - ], - [ - "lasting", - -12.15847396850586 - ], - [ - "▁calcium", - -12.158549308776855 - ], - [ - "▁digest", - -12.158697128295898 - ], - [ - "Enfin", - -12.158865928649902 - ], - [ - "▁stressful", - -12.158951759338379 - ], - [ - "▁schemes", - -12.158981323242188 - ], - [ - "▁décision", - -12.158987045288086 - ], - [ - "▁comercial", - -12.15907096862793 - ], - [ - "işti", - -12.159098625183105 - ], - [ - "▁Comic", - -12.15910816192627 - ], - [ - "▁extensions", - -12.159140586853027 - ], - [ - "▁Sieg", - -12.159168243408203 - ], - [ - "▁pine", - -12.15919017791748 - ], - [ - "ieß", - -12.159272193908691 - ], - [ - "▁Images", - -12.159427642822266 - ], - [ - "▁Mensch", - -12.159668922424316 - ], - [ - "Pap", - -12.159773826599121 - ], - [ - "▁crops", - -12.15994930267334 - ], - [ - "▁sheep", - -12.159996032714844 - ], - [ - "▁istoric", - -12.160001754760742 - ], - [ - "▁Assessment", - -12.160035133361816 - ], - [ - "▁mounting", - -12.16035270690918 - ], - [ - "wirken", - -12.160469055175781 - ], - [ - "▁augment", - -12.160469055175781 - ], - [ - "▁picioare", - -12.160542488098145 - ], - [ - "organisme", - -12.160590171813965 - ], - [ - "▁Monitor", - -12.16060733795166 - ], - [ - "▁celles", - -12.160642623901367 - ], - [ - "▁Maison", - -12.160709381103516 - ], - [ - "notified", - -12.160783767700195 - ], - [ - "▁chew", - -12.160831451416016 - ], - [ - "▁bleu", - -12.16083812713623 - ], - [ - "dow", - -12.160844802856445 - ], - [ - "▁Grav", - -12.16097354888916 - ], - [ - "▁curtains", - -12.160975456237793 - ], - [ - "▁Campus", - -12.161076545715332 - ], - [ - "▁controversial", - -12.161087036132812 - ], - [ - "▁soutien", - -12.161189079284668 - ], - [ - "▁Dell", - -12.1613187789917 - ], - [ - "▁instrumental", - -12.161431312561035 - ], - [ - "▁Nan", - -12.161514282226562 - ], - [ - "▁prom", - -12.161520957946777 - ], - [ - "▁spatial", - -12.161523818969727 - ], - [ - "Similarly", - -12.161558151245117 - ], - [ - "▁Gala", - -12.161601066589355 - ], - [ - "ultimul", - -12.16162109375 - ], - [ - "▁Vom", - -12.161761283874512 - ], - [ - "▁Foot", - -12.161784172058105 - ], - [ - "bike", - -12.1618013381958 - ], - [ - "▁acids", - -12.161979675292969 - ], - [ - "entend", - -12.162002563476562 - ], - [ - "ivă", - -12.162040710449219 - ], - [ - "▁Weitere", - -12.162124633789062 - ], - [ - "▁vitamins", - -12.162131309509277 - ], - [ - "▁enhancement", - -12.16234016418457 - ], - [ - "▁Cruise", - -12.162367820739746 - ], - [ - "assemble", - -12.162385940551758 - ], - [ - "▁spécifique", - -12.162459373474121 - ], - [ - "affaires", - -12.16261100769043 - ], - [ - "▁indispensable", - -12.1626558303833 - ], - [ - "▁logistics", - -12.16283130645752 - ], - [ - "▁manche", - -12.162919044494629 - ], - [ - "▁dealt", - -12.16297435760498 - ], - [ - "▁favorable", - -12.163036346435547 - ], - [ - "▁unwanted", - -12.163047790527344 - ], - [ - "▁handmade", - -12.163065910339355 - ], - [ - "▁Regi", - -12.163102149963379 - ], - [ - "safe", - -12.163134574890137 - ], - [ - "persoanele", - -12.163202285766602 - ], - [ - "▁destinat", - -12.163252830505371 - ], - [ - "▁Maxi", - -12.163299560546875 - ], - [ - "▁salmon", - -12.163454055786133 - ], - [ - "wag", - -12.163578033447266 - ], - [ - "210", - -12.163769721984863 - ], - [ - "▁warned", - -12.163865089416504 - ], - [ - "läuft", - -12.16386604309082 - ], - [ - "agging", - -12.163931846618652 - ], - [ - "▁responsabil", - -12.16398811340332 - ], - [ - "▁presse", - -12.164271354675293 - ], - [ - "▁amis", - -12.164305686950684 - ], - [ - "▁rolls", - -12.164377212524414 - ], - [ - "control", - -12.164405822753906 - ], - [ - "▁Manufacturer", - -12.164422988891602 - ], - [ - "hnen", - -12.164449691772461 - ], - [ - "▁buget", - -12.164546012878418 - ], - [ - "OW", - -12.16467571258545 - ], - [ - "etro", - -12.164745330810547 - ], - [ - "▁communauté", - -12.164837837219238 - ], - [ - "unci", - -12.164944648742676 - ], - [ - "▁Chine", - -12.164952278137207 - ], - [ - "combines", - -12.16501235961914 - ], - [ - "▁learners", - -12.165046691894531 - ], - [ - "STE", - -12.165055274963379 - ], - [ - "ckel", - -12.16511344909668 - ], - [ - "Service", - -12.165169715881348 - ], - [ - "▁veröffentlicht", - -12.165209770202637 - ], - [ - "besides", - -12.165266036987305 - ], - [ - "getragen", - -12.165349960327148 - ], - [ - "▁opponent", - -12.165521621704102 - ], - [ - "▁volum", - -12.165533065795898 - ], - [ - "▁confusing", - -12.165802001953125 - ], - [ - "invasive", - -12.165813446044922 - ], - [ - "▁conseils", - -12.165881156921387 - ], - [ - "▁vibe", - -12.165928840637207 - ], - [ - "View", - -12.166062355041504 - ], - [ - "oară", - -12.166086196899414 - ], - [ - "Link", - -12.166261672973633 - ], - [ - "▁holy", - -12.166261672973633 - ], - [ - "▁crema", - -12.16629409790039 - ], - [ - "▁Michelle", - -12.166303634643555 - ], - [ - "▁Wien", - -12.166383743286133 - ], - [ - "▁undertake", - -12.166404724121094 - ], - [ - "▁Photograph", - -12.166421890258789 - ], - [ - "humain", - -12.16645336151123 - ], - [ - "▁Hang", - -12.166545867919922 - ], - [ - "designed", - -12.16657829284668 - ], - [ - "▁analyses", - -12.166614532470703 - ], - [ - "▁compose", - -12.166653633117676 - ], - [ - "▁substantially", - -12.166765213012695 - ], - [ - "▁marking", - -12.166772842407227 - ], - [ - "▁campagne", - -12.166826248168945 - ], - [ - "▁$15", - -12.166828155517578 - ], - [ - "pharma", - -12.166972160339355 - ], - [ - "▁playoff", - -12.1669921875 - ], - [ - "▁momentum", - -12.167091369628906 - ], - [ - "Temp", - -12.16714096069336 - ], - [ - "▁vinegar", - -12.167143821716309 - ], - [ - "▁descriptions", - -12.167581558227539 - ], - [ - "christ", - -12.167656898498535 - ], - [ - "wore", - -12.16773509979248 - ], - [ - "ITY", - -12.167768478393555 - ], - [ - "stehen", - -12.167771339416504 - ], - [ - "▁insulation", - -12.1677827835083 - ], - [ - "grav", - -12.167842864990234 - ], - [ - "2.2", - -12.167887687683105 - ], - [ - "▁Explore", - -12.168028831481934 - ], - [ - "▁dye", - -12.168127059936523 - ], - [ - "stair", - -12.168155670166016 - ], - [ - "artisan", - -12.168207168579102 - ], - [ - "▁zoom", - -12.168285369873047 - ], - [ - "▁turkey", - -12.168573379516602 - ], - [ - "▁locksmith", - -12.168577194213867 - ], - [ - "▁sewing", - -12.168610572814941 - ], - [ - "▁modeling", - -12.168627738952637 - ], - [ - "lied", - -12.16870403289795 - ], - [ - "adel", - -12.168773651123047 - ], - [ - "▁Going", - -12.168785095214844 - ], - [ - "WH", - -12.168798446655273 - ], - [ - "▁deserves", - -12.168919563293457 - ], - [ - "▁arriving", - -12.168960571289062 - ], - [ - "OFF", - -12.169039726257324 - ], - [ - "torului", - -12.169109344482422 - ], - [ - "ucked", - -12.16921615600586 - ], - [ - "▁approached", - -12.169351577758789 - ], - [ - "▁élevé", - -12.169354438781738 - ], - [ - "▁quotidien", - -12.169416427612305 - ], - [ - "▁derzeit", - -12.16942024230957 - ], - [ - "nutzt", - -12.169656753540039 - ], - [ - "science", - -12.169729232788086 - ], - [ - "▁Emma", - -12.169841766357422 - ], - [ - "▁builds", - -12.169879913330078 - ], - [ - "▁Logo", - -12.169949531555176 - ], - [ - "▁clouds", - -12.170061111450195 - ], - [ - "inflammatory", - -12.170141220092773 - ], - [ - "țiuni", - -12.170199394226074 - ], - [ - "▁Cisco", - -12.17025089263916 - ], - [ - "▁würden", - -12.170254707336426 - ], - [ - "▁Shaw", - -12.170256614685059 - ], - [ - "▁Ell", - -12.170266151428223 - ], - [ - "avance", - -12.1703519821167 - ], - [ - "anglais", - -12.170365333557129 - ], - [ - "weil", - -12.170368194580078 - ], - [ - "▁singura", - -12.170464515686035 - ], - [ - "ACK", - -12.170489311218262 - ], - [ - "likewise", - -12.170522689819336 - ], - [ - "ographie", - -12.170646667480469 - ], - [ - "liegen", - -12.17088508605957 - ], - [ - "▁Crow", - -12.170964241027832 - ], - [ - "▁unic", - -12.171187400817871 - ], - [ - "▁Ale", - -12.171241760253906 - ], - [ - "▁păstr", - -12.17125129699707 - ], - [ - "▁informal", - -12.171337127685547 - ], - [ - "650", - -12.17136287689209 - ], - [ - "Benz", - -12.171489715576172 - ], - [ - "▁antenna", - -12.171540260314941 - ], - [ - "▁pagini", - -12.171552658081055 - ], - [ - "▁lansat", - -12.171561241149902 - ], - [ - "▁Fans", - -12.171576499938965 - ], - [ - "taine", - -12.171822547912598 - ], - [ - "JO", - -12.171853065490723 - ], - [ - "▁Tips", - -12.172091484069824 - ], - [ - "cir", - -12.172130584716797 - ], - [ - "nou", - -12.172384262084961 - ], - [ - "▁planted", - -12.17241382598877 - ], - [ - "▁steering", - -12.172423362731934 - ], - [ - "▁Waren", - -12.172475814819336 - ], - [ - "▁clearance", - -12.172515869140625 - ], - [ - "▁Moscow", - -12.172516822814941 - ], - [ - "▁Faith", - -12.172534942626953 - ], - [ - "▁Pizza", - -12.172572135925293 - ], - [ - "▁Tank", - -12.17273998260498 - ], - [ - "QUE", - -12.172783851623535 - ], - [ - "▁studii", - -12.172804832458496 - ], - [ - "éné", - -12.172829627990723 - ], - [ - "▁guerre", - -12.1728515625 - ], - [ - "▁celebr", - -12.173083305358887 - ], - [ - "▁Factory", - -12.173111915588379 - ], - [ - "▁Browse", - -12.173198699951172 - ], - [ - "▁Request", - -12.17323112487793 - ], - [ - "▁taxpayer", - -12.173311233520508 - ], - [ - "▁assert", - -12.173562049865723 - ], - [ - "unternehmen", - -12.173588752746582 - ], - [ - "▁Ergebnis", - -12.173687934875488 - ], - [ - "▁Antwort", - -12.173727035522461 - ], - [ - "▁Photography", - -12.173808097839355 - ], - [ - "▁plă", - -12.173866271972656 - ], - [ - "IME", - -12.173982620239258 - ], - [ - "▁prochaine", - -12.174074172973633 - ], - [ - "ajouter", - -12.174103736877441 - ], - [ - "▁buffet", - -12.174227714538574 - ], - [ - "▁pixels", - -12.174239158630371 - ], - [ - "▁pledge", - -12.174250602722168 - ], - [ - "▁Inhalt", - -12.17435359954834 - ], - [ - "▁chase", - -12.174384117126465 - ], - [ - "Flow", - -12.174493789672852 - ], - [ - "▁melodi", - -12.174872398376465 - ], - [ - "▁Abu", - -12.174991607666016 - ], - [ - "▁1979", - -12.175042152404785 - ], - [ - "▁Photos", - -12.175042152404785 - ], - [ - "▁qualifications", - -12.175148963928223 - ], - [ - "▁zis", - -12.175213813781738 - ], - [ - "IAL", - -12.175354957580566 - ], - [ - "▁lender", - -12.175390243530273 - ], - [ - "▁indiferent", - -12.175494194030762 - ], - [ - "▁behaviors", - -12.175506591796875 - ], - [ - "▁flowing", - -12.175531387329102 - ], - [ - "▁zweite", - -12.1756010055542 - ], - [ - "abl", - -12.175765037536621 - ], - [ - "Schw", - -12.176004409790039 - ], - [ - "opi", - -12.176030158996582 - ], - [ - "ggi", - -12.176164627075195 - ], - [ - "▁depart", - -12.176314353942871 - ], - [ - "▁garde", - -12.17640209197998 - ], - [ - "▁tuition", - -12.176490783691406 - ], - [ - "fälle", - -12.17650032043457 - ], - [ - "▁determina", - -12.17652702331543 - ], - [ - "▁spice", - -12.176627159118652 - ], - [ - "▁petites", - -12.176777839660645 - ], - [ - "kot", - -12.176973342895508 - ], - [ - "▁intersection", - -12.177242279052734 - ], - [ - "hak", - -12.177248001098633 - ], - [ - "▁autumn", - -12.177284240722656 - ], - [ - "▁verbunden", - -12.177284240722656 - ], - [ - "▁ferme", - -12.177287101745605 - ], - [ - "PN", - -12.17733097076416 - ], - [ - "▁insurer", - -12.177390098571777 - ], - [ - "arten", - -12.177401542663574 - ], - [ - "▁Turkish", - -12.177715301513672 - ], - [ - "▁shoulders", - -12.177732467651367 - ], - [ - "=>", - -12.177742004394531 - ], - [ - "▁Nike", - -12.177760124206543 - ], - [ - "uire", - -12.177763938903809 - ], - [ - "▁Chile", - -12.177811622619629 - ], - [ - "jon", - -12.177842140197754 - ], - [ - "▁fragrance", - -12.177884101867676 - ], - [ - "▁bean", - -12.177908897399902 - ], - [ - "ips", - -12.178108215332031 - ], - [ - "assuming", - -12.178191184997559 - ], - [ - "liens", - -12.178215026855469 - ], - [ - "tocmai", - -12.178267478942871 - ], - [ - "▁60%", - -12.178301811218262 - ], - [ - "ipped", - -12.178384780883789 - ], - [ - "DIS", - -12.178473472595215 - ], - [ - "▁predicted", - -12.178537368774414 - ], - [ - "▁Picture", - -12.178555488586426 - ], - [ - "Bahn", - -12.178796768188477 - ], - [ - "104", - -12.178854942321777 - ], - [ - "tended", - -12.178958892822266 - ], - [ - "▁approve", - -12.179031372070312 - ], - [ - "▁magasin", - -12.17908000946045 - ], - [ - "▁mindset", - -12.179208755493164 - ], - [ - "rase", - -12.179363250732422 - ], - [ - "grand", - -12.179469108581543 - ], - [ - "▁Principal", - -12.17947769165039 - ], - [ - "▁informații", - -12.17959976196289 - ], - [ - "▁legătur", - -12.179628372192383 - ], - [ - "▁Farb", - -12.179692268371582 - ], - [ - "▁Dieu", - -12.179710388183594 - ], - [ - "▁alliance", - -12.180378913879395 - ], - [ - "weiligen", - -12.180397987365723 - ], - [ - "▁Câ", - -12.18048095703125 - ], - [ - "▁counseling", - -12.180521011352539 - ], - [ - "▁traveled", - -12.180533409118652 - ], - [ - "▁translated", - -12.180558204650879 - ], - [ - "▁carne", - -12.180679321289062 - ], - [ - "aked", - -12.180707931518555 - ], - [ - "▁LCD", - -12.180868148803711 - ], - [ - "▁Folge", - -12.180909156799316 - ], - [ - "▁Erfahrungen", - -12.18093204498291 - ], - [ - "▁1981", - -12.18106460571289 - ], - [ - "▁răspuns", - -12.181075096130371 - ], - [ - "itori", - -12.18117618560791 - ], - [ - "▁elementary", - -12.181200981140137 - ], - [ - "▁vorbei", - -12.18127727508545 - ], - [ - "▁cargo", - -12.181361198425293 - ], - [ - "disciplinary", - -12.18140983581543 - ], - [ - "WR", - -12.181492805480957 - ], - [ - "▁counterpart", - -12.18162727355957 - ], - [ - "family", - -12.181641578674316 - ], - [ - "▁viață", - -12.181644439697266 - ], - [ - "▁Definition", - -12.18167495727539 - ], - [ - "▁Cow", - -12.18171501159668 - ], - [ - "fällig", - -12.182003021240234 - ], - [ - "▁Sicht", - -12.182025909423828 - ], - [ - "▁mum", - -12.182145118713379 - ], - [ - "▁Mediterranean", - -12.182275772094727 - ], - [ - "nev", - -12.182278633117676 - ], - [ - "bü", - -12.182293891906738 - ], - [ - "▁slave", - -12.182293891906738 - ], - [ - "schnitt", - -12.18233871459961 - ], - [ - "▁firme", - -12.182430267333984 - ], - [ - "▁spill", - -12.182454109191895 - ], - [ - "▁wages", - -12.182592391967773 - ], - [ - "▁refine", - -12.182615280151367 - ], - [ - "▁upgraded", - -12.182632446289062 - ], - [ - "▁gospel", - -12.182698249816895 - ], - [ - "▁quartier", - -12.182744979858398 - ], - [ - "▁#2", - -12.182772636413574 - ], - [ - "▁Situation", - -12.18298625946045 - ], - [ - "▁suggesting", - -12.183075904846191 - ], - [ - "▁acne", - -12.183113098144531 - ], - [ - "▁Murray", - -12.183337211608887 - ], - [ - "▁Ian", - -12.183469772338867 - ], - [ - "hören", - -12.183489799499512 - ], - [ - "bia", - -12.183603286743164 - ], - [ - "▁Bewegung", - -12.183684349060059 - ], - [ - "▁abzu", - -12.18379020690918 - ], - [ - "reveals", - -12.183795928955078 - ], - [ - "friend", - -12.184025764465332 - ], - [ - "▁Connecticut", - -12.18407917022705 - ], - [ - "▁Testament", - -12.184151649475098 - ], - [ - "▁Lit", - -12.184199333190918 - ], - [ - "▁Ship", - -12.184209823608398 - ], - [ - "▁minunat", - -12.184344291687012 - ], - [ - "▁Moving", - -12.184346199035645 - ], - [ - "▁Device", - -12.184486389160156 - ], - [ - "▁Bake", - -12.18453598022461 - ], - [ - "▁qualification", - -12.184633255004883 - ], - [ - "▁challenged", - -12.184640884399414 - ], - [ - "▁Hinweis", - -12.184721946716309 - ], - [ - "▁sechs", - -12.184769630432129 - ], - [ - "та", - -12.184903144836426 - ], - [ - "120", - -12.184904098510742 - ], - [ - "licht", - -12.184940338134766 - ], - [ - "▁supervision", - -12.185022354125977 - ], - [ - "▁milestone", - -12.18503189086914 - ], - [ - "zeig", - -12.185050964355469 - ], - [ - "▁emphasize", - -12.185224533081055 - ], - [ - "▁complain", - -12.185232162475586 - ], - [ - "sack", - -12.185341835021973 - ], - [ - "▁rebuild", - -12.185445785522461 - ], - [ - "projekt", - -12.18548583984375 - ], - [ - "▁saint", - -12.185644149780273 - ], - [ - "lette", - -12.185752868652344 - ], - [ - "rade", - -12.18580150604248 - ], - [ - "▁pacient", - -12.185893058776855 - ], - [ - "signed", - -12.186169624328613 - ], - [ - "▁mil", - -12.186261177062988 - ], - [ - "cali", - -12.186266899108887 - ], - [ - "▁brochure", - -12.186487197875977 - ], - [ - "▁Bulgaria", - -12.186488151550293 - ], - [ - "Har", - -12.186623573303223 - ], - [ - "DH", - -12.186697006225586 - ], - [ - "▁jumping", - -12.186712265014648 - ], - [ - "ären", - -12.186732292175293 - ], - [ - "▁tactics", - -12.186911582946777 - ], - [ - "▁soleil", - -12.187030792236328 - ], - [ - "lessness", - -12.18705940246582 - ], - [ - "steigen", - -12.187085151672363 - ], - [ - "▁Brief", - -12.187117576599121 - ], - [ - "▁Oz", - -12.18718433380127 - ], - [ - "credit", - -12.187239646911621 - ], - [ - "glass", - -12.187241554260254 - ], - [ - "▁Baltimore", - -12.187292098999023 - ], - [ - "varies", - -12.187445640563965 - ], - [ - "sourced", - -12.187575340270996 - ], - [ - "▁documented", - -12.187604904174805 - ], - [ - "▁devine", - -12.187664985656738 - ], - [ - "möglichst", - -12.187732696533203 - ], - [ - "▁früher", - -12.187756538391113 - ], - [ - "outefois", - -12.18790054321289 - ], - [ - "▁Engagement", - -12.187934875488281 - ], - [ - "▁anumit", - -12.18806266784668 - ], - [ - "▁1930", - -12.188186645507812 - ], - [ - "▁Aufgaben", - -12.188214302062988 - ], - [ - "▁lineup", - -12.188227653503418 - ], - [ - "▁Cad", - -12.188349723815918 - ], - [ - "améliorer", - -12.188437461853027 - ], - [ - "▁februarie", - -12.188499450683594 - ], - [ - "▁cancellation", - -12.188529968261719 - ], - [ - "▁locks", - -12.188577651977539 - ], - [ - "▁modèles", - -12.188711166381836 - ], - [ - "▁breakdown", - -12.188748359680176 - ], - [ - "Ticket", - -12.188810348510742 - ], - [ - "▁Chen", - -12.188855171203613 - ], - [ - "▁Competition", - -12.188910484313965 - ], - [ - "▁median", - -12.18896770477295 - ], - [ - "rische", - -12.189159393310547 - ], - [ - "▁multipli", - -12.189269065856934 - ], - [ - "▁Belgium", - -12.189305305480957 - ], - [ - "▁Physical", - -12.189308166503906 - ], - [ - "▁parameter", - -12.189432144165039 - ], - [ - "▁carrot", - -12.189435005187988 - ], - [ - "▁mandat", - -12.189617156982422 - ], - [ - "▁towel", - -12.189697265625 - ], - [ - "▁insured", - -12.189825057983398 - ], - [ - "PRI", - -12.189868927001953 - ], - [ - "etter", - -12.189915657043457 - ], - [ - "▁Oder", - -12.190083503723145 - ], - [ - "argued", - -12.190171241760254 - ], - [ - "FB", - -12.190196990966797 - ], - [ - "versicherung", - -12.190197944641113 - ], - [ - "abila", - -12.190251350402832 - ], - [ - "▁Coin", - -12.190324783325195 - ], - [ - "around", - -12.19050121307373 - ], - [ - "▁Lorsqu", - -12.190773963928223 - ], - [ - "valent", - -12.190918922424316 - ], - [ - "▁weltweit", - -12.19092082977295 - ], - [ - "Mod", - -12.191039085388184 - ], - [ - "▁defect", - -12.191044807434082 - ], - [ - "ibly", - -12.191136360168457 - ], - [ - "▁Juan", - -12.191153526306152 - ], - [ - "▁Jur", - -12.191171646118164 - ], - [ - "large", - -12.191307067871094 - ], - [ - "▁indicators", - -12.191461563110352 - ], - [ - "invest", - -12.19168472290039 - ], - [ - "▁rehabilitation", - -12.191705703735352 - ], - [ - "nag", - -12.191823959350586 - ], - [ - "▁Grundlage", - -12.191829681396484 - ], - [ - "▁Strategy", - -12.192131042480469 - ], - [ - "▁supérieur", - -12.192173957824707 - ], - [ - "▁orbit", - -12.192281723022461 - ], - [ - "▁Auftrag", - -12.192360877990723 - ], - [ - "▁Verb", - -12.192441940307617 - ], - [ - "ANA", - -12.19256591796875 - ], - [ - "▁trimis", - -12.192611694335938 - ], - [ - "▁Rub", - -12.192704200744629 - ], - [ - "institu", - -12.192732810974121 - ], - [ - "▁inspect", - -12.1927490234375 - ], - [ - "▁Princess", - -12.192757606506348 - ], - [ - "especially", - -12.192777633666992 - ], - [ - "▁combinations", - -12.192793846130371 - ], - [ - "▁gaze", - -12.192842483520508 - ], - [ - "elemente", - -12.192970275878906 - ], - [ - "deal", - -12.192980766296387 - ], - [ - "polis", - -12.193157196044922 - ], - [ - "shaw", - -12.193168640136719 - ], - [ - "▁Republicans", - -12.193203926086426 - ], - [ - "aded", - -12.193244934082031 - ], - [ - "▁Louisiana", - -12.193364143371582 - ], - [ - "▁Ville", - -12.193368911743164 - ], - [ - "▁afterwards", - -12.193389892578125 - ], - [ - "ONG", - -12.193608283996582 - ], - [ - "▁dryer", - -12.193636894226074 - ], - [ - "▁Manhattan", - -12.19374942779541 - ], - [ - "▁recomanda", - -12.19412612915039 - ], - [ - "▁juca", - -12.194253921508789 - ], - [ - "▁Crown", - -12.194260597229004 - ], - [ - "▁flesh", - -12.194347381591797 - ], - [ - "sichtig", - -12.194358825683594 - ], - [ - "▁rempli", - -12.19437026977539 - ], - [ - "▁deposits", - -12.19438362121582 - ], - [ - "▁Voll", - -12.194599151611328 - ], - [ - "▁analysts", - -12.194672584533691 - ], - [ - "▁Krieg", - -12.19484806060791 - ], - [ - "▁Rosa", - -12.19495964050293 - ], - [ - "▁Supply", - -12.194964408874512 - ], - [ - "GF", - -12.19497013092041 - ], - [ - "idad", - -12.195098876953125 - ], - [ - "▁flush", - -12.195103645324707 - ], - [ - "▁circular", - -12.195355415344238 - ], - [ - "▁național", - -12.195379257202148 - ], - [ - "▁lorsqu", - -12.195441246032715 - ], - [ - "▁analyst", - -12.195459365844727 - ], - [ - "▁Jahrhundert", - -12.195586204528809 - ], - [ - "▁biology", - -12.195713996887207 - ], - [ - "copy", - -12.195733070373535 - ], - [ - "▁bringt", - -12.195765495300293 - ], - [ - "▁Gospel", - -12.195780754089355 - ], - [ - "▁sorgen", - -12.195842742919922 - ], - [ - "zeichnung", - -12.196181297302246 - ], - [ - "chair", - -12.196197509765625 - ], - [ - "EB", - -12.19636344909668 - ], - [ - "▁Beth", - -12.1964111328125 - ], - [ - "115", - -12.196416854858398 - ], - [ - "▁Neue", - -12.196479797363281 - ], - [ - "▁faible", - -12.196599960327148 - ], - [ - "▁methodology", - -12.196603775024414 - ], - [ - "spiele", - -12.196647644042969 - ], - [ - "▁cherry", - -12.196727752685547 - ], - [ - "▁Mak", - -12.196802139282227 - ], - [ - "▁volet", - -12.196982383728027 - ], - [ - "funk", - -12.197196006774902 - ], - [ - "▁aktuelle", - -12.197372436523438 - ], - [ - "▁Yahoo", - -12.197408676147461 - ], - [ - "▁Zusammenarbeit", - -12.197669982910156 - ], - [ - "▁Serve", - -12.197754859924316 - ], - [ - "▁simpler", - -12.197978019714355 - ], - [ - "intégr", - -12.197990417480469 - ], - [ - "ndlich", - -12.198083877563477 - ], - [ - "▁actress", - -12.198320388793945 - ], - [ - "▁reuse", - -12.198332786560059 - ], - [ - "▁reviewing", - -12.198405265808105 - ], - [ - "statt", - -12.198457717895508 - ], - [ - "▁diving", - -12.198469161987305 - ], - [ - "▁Național", - -12.198677062988281 - ], - [ - "voi", - -12.19873332977295 - ], - [ - "Disc", - -12.198812484741211 - ], - [ - "▁Mineral", - -12.19886302947998 - ], - [ - "▁emit", - -12.199007034301758 - ], - [ - "witz", - -12.199078559875488 - ], - [ - "▁forgot", - -12.19909954071045 - ], - [ - "▁dim", - -12.199115753173828 - ], - [ - "upper", - -12.19947624206543 - ], - [ - "sichtlich", - -12.19949722290039 - ], - [ - "▁parcours", - -12.199670791625977 - ], - [ - "8:00", - -12.199697494506836 - ], - [ - "▁keyword", - -12.199701309204102 - ], - [ - "▁upgrades", - -12.199763298034668 - ], - [ - "kunden", - -12.200177192687988 - ], - [ - "▁Seg", - -12.200257301330566 - ], - [ - "▁Circle", - -12.200289726257324 - ], - [ - "▁ginger", - -12.200336456298828 - ], - [ - "mment", - -12.200516700744629 - ], - [ - "▁expenditure", - -12.200655937194824 - ], - [ - "▁parle", - -12.200693130493164 - ], - [ - "▁Counsel", - -12.200722694396973 - ], - [ - "▁Gui", - -12.200722694396973 - ], - [ - "resident", - -12.20103645324707 - ], - [ - "▁benchmark", - -12.20103931427002 - ], - [ - "▁Elektro", - -12.201064109802246 - ], - [ - "▁réalité", - -12.201064109802246 - ], - [ - "▁ridiculous", - -12.201067924499512 - ], - [ - "▁necklace", - -12.20108699798584 - ], - [ - "nian", - -12.201117515563965 - ], - [ - "▁Move", - -12.20113468170166 - ], - [ - "▁elevated", - -12.201204299926758 - ], - [ - "WE", - -12.201281547546387 - ], - [ - "▁Drum", - -12.20132064819336 - ], - [ - "▁Delivery", - -12.201350212097168 - ], - [ - "indicating", - -12.201452255249023 - ], - [ - "▁Benjamin", - -12.201472282409668 - ], - [ - "▁Samuel", - -12.2014741897583 - ], - [ - "bene", - -12.201666831970215 - ], - [ - "▁experienta", - -12.201676368713379 - ], - [ - "▁rocket", - -12.201839447021484 - ], - [ - "▁fossil", - -12.201883316040039 - ], - [ - "▁festive", - -12.20193099975586 - ], - [ - "▁conscience", - -12.201964378356934 - ], - [ - "▁bacon", - -12.202136993408203 - ], - [ - "▁aero", - -12.202159881591797 - ], - [ - "public", - -12.202187538146973 - ], - [ - "▁zic", - -12.202218055725098 - ], - [ - "ombre", - -12.202356338500977 - ], - [ - "▁Drain", - -12.202550888061523 - ], - [ - "7.5", - -12.202672004699707 - ], - [ - "▁Deutschen", - -12.202703475952148 - ], - [ - "reportedly", - -12.202754974365234 - ], - [ - "▁Français", - -12.203105926513672 - ], - [ - "▁enzyme", - -12.203106880187988 - ], - [ - "▁inquiry", - -12.203117370605469 - ], - [ - "▁presque", - -12.203193664550781 - ], - [ - "▁Airlines", - -12.203228950500488 - ], - [ - "▁Salon", - -12.203237533569336 - ], - [ - "▁Volunteer", - -12.203310012817383 - ], - [ - "▁modular", - -12.203349113464355 - ], - [ - "ón", - -12.203364372253418 - ], - [ - "NH", - -12.203449249267578 - ], - [ - "▁souhaite", - -12.203516960144043 - ], - [ - "social", - -12.203659057617188 - ], - [ - "▁Include", - -12.203729629516602 - ], - [ - "▁Decor", - -12.2037992477417 - ], - [ - "dded", - -12.203965187072754 - ], - [ - "▁Außen", - -12.203969955444336 - ], - [ - "rendu", - -12.20412540435791 - ], - [ - "▁MBA", - -12.204150199890137 - ], - [ - "▁columns", - -12.204155921936035 - ], - [ - "▁Wing", - -12.204436302185059 - ], - [ - "▁landmark", - -12.204442977905273 - ], - [ - "schritt", - -12.204594612121582 - ], - [ - "▁désir", - -12.204630851745605 - ], - [ - "(5)", - -12.204680442810059 - ], - [ - "▁réseaux", - -12.204693794250488 - ], - [ - "income", - -12.204710960388184 - ], - [ - "▁revised", - -12.204819679260254 - ], - [ - "HY", - -12.204863548278809 - ], - [ - "▁Explorer", - -12.204873085021973 - ], - [ - "▁Lam", - -12.204877853393555 - ], - [ - "▁almond", - -12.204910278320312 - ], - [ - "▁faux", - -12.204910278320312 - ], - [ - "opt", - -12.204923629760742 - ], - [ - "Out", - -12.204939842224121 - ], - [ - "▁virtue", - -12.205025672912598 - ], - [ - "▁Chocolate", - -12.205151557922363 - ], - [ - "▁spannend", - -12.205305099487305 - ], - [ - "▁spices", - -12.205327033996582 - ], - [ - "▁Climate", - -12.205560684204102 - ], - [ - "▁Residential", - -12.205560684204102 - ], - [ - "gung", - -12.205700874328613 - ], - [ - "▁filtr", - -12.20606803894043 - ], - [ - "circ", - -12.206123352050781 - ], - [ - "sisted", - -12.206172943115234 - ], - [ - "▁dedicat", - -12.206243515014648 - ], - [ - "▁foil", - -12.206387519836426 - ], - [ - "▁uita", - -12.206392288208008 - ], - [ - "▁lié", - -12.206402778625488 - ], - [ - "▁Demo", - -12.206409454345703 - ], - [ - "▁spoil", - -12.2064208984375 - ], - [ - "Cu", - -12.206448554992676 - ], - [ - "naut", - -12.206525802612305 - ], - [ - "▁configured", - -12.206535339355469 - ], - [ - "UK", - -12.206543922424316 - ], - [ - "▁disagree", - -12.20656967163086 - ], - [ - "Medic", - -12.206767082214355 - ], - [ - "cosm", - -12.207074165344238 - ], - [ - "Toute", - -12.207109451293945 - ], - [ - "▁beneficia", - -12.207170486450195 - ], - [ - "fassen", - -12.207327842712402 - ], - [ - "▁bail", - -12.207337379455566 - ], - [ - "igue", - -12.207439422607422 - ], - [ - "▁Mă", - -12.20744800567627 - ], - [ - "▁strips", - -12.20748519897461 - ], - [ - "▁Dritte", - -12.207537651062012 - ], - [ - "▁putere", - -12.207597732543945 - ], - [ - "Play", - -12.20763111114502 - ], - [ - "▁Samstag", - -12.207632064819336 - ], - [ - "▁households", - -12.207791328430176 - ], - [ - "▁persistent", - -12.207914352416992 - ], - [ - "uben", - -12.207942962646484 - ], - [ - "Web", - -12.20809555053711 - ], - [ - "▁scenery", - -12.20820140838623 - ], - [ - "▁défini", - -12.208257675170898 - ], - [ - "news", - -12.208337783813477 - ], - [ - "eira", - -12.208428382873535 - ], - [ - "▁Mumbai", - -12.208438873291016 - ], - [ - "▁Ward", - -12.208558082580566 - ], - [ - "▁ladder", - -12.2086181640625 - ], - [ - "▁plaque", - -12.208623886108398 - ], - [ - "nés", - -12.208639144897461 - ], - [ - "▁condamn", - -12.20864486694336 - ], - [ - "▁attribute", - -12.208687782287598 - ], - [ - "atti", - -12.20873737335205 - ], - [ - "▁Emily", - -12.208953857421875 - ], - [ - "▁pleine", - -12.20896053314209 - ], - [ - "▁automatisch", - -12.209004402160645 - ], - [ - "ifies", - -12.209052085876465 - ], - [ - "onna", - -12.209104537963867 - ], - [ - "▁inject", - -12.209157943725586 - ], - [ - "▁evolve", - -12.209297180175781 - ], - [ - "▁breeze", - -12.209299087524414 - ], - [ - "▁montre", - -12.209415435791016 - ], - [ - "▁memorial", - -12.209425926208496 - ], - [ - "ämlich", - -12.209465026855469 - ], - [ - "NBC", - -12.209589958190918 - ], - [ - "▁1940", - -12.209836959838867 - ], - [ - "▁trouvé", - -12.209892272949219 - ], - [ - "when", - -12.209914207458496 - ], - [ - "▁Büro", - -12.209959983825684 - ], - [ - "▁probability", - -12.209978103637695 - ], - [ - "cute", - -12.21006965637207 - ], - [ - "▁sturdy", - -12.210078239440918 - ], - [ - "AMP", - -12.210165023803711 - ], - [ - "▁Constantin", - -12.210283279418945 - ], - [ - "▁batter", - -12.21037483215332 - ], - [ - "▁bist", - -12.210470199584961 - ], - [ - "▁streams", - -12.210528373718262 - ], - [ - "rushing", - -12.21057415008545 - ], - [ - "▁shaft", - -12.21065902709961 - ], - [ - "▁proprii", - -12.210722923278809 - ], - [ - "émi", - -12.21074390411377 - ], - [ - "online", - -12.210817337036133 - ], - [ - "▁vanity", - -12.210870742797852 - ], - [ - "▁mural", - -12.210878372192383 - ], - [ - "▁distinguish", - -12.210905075073242 - ], - [ - "▁niciun", - -12.211191177368164 - ], - [ - "▁européenne", - -12.211252212524414 - ], - [ - "▁secretary", - -12.211289405822754 - ], - [ - "▁gaps", - -12.211492538452148 - ], - [ - "▁realm", - -12.211499214172363 - ], - [ - "▁elastic", - -12.211504936218262 - ], - [ - "▁Avoid", - -12.211519241333008 - ], - [ - "▁mauvais", - -12.211931228637695 - ], - [ - "▁innovations", - -12.212663650512695 - ], - [ - "▁suprem", - -12.212776184082031 - ], - [ - "▁vederea", - -12.212817192077637 - ], - [ - "wenden", - -12.212892532348633 - ], - [ - "-40", - -12.213075637817383 - ], - [ - "prenant", - -12.213155746459961 - ], - [ - "utilisateur", - -12.213210105895996 - ], - [ - "▁Oliver", - -12.213228225708008 - ], - [ - "111", - -12.21326732635498 - ], - [ - "▁manifestation", - -12.213382720947266 - ], - [ - "▁Rachel", - -12.213458061218262 - ], - [ - "agog", - -12.21348762512207 - ], - [ - "▁seamless", - -12.213534355163574 - ], - [ - "▁Employee", - -12.213576316833496 - ], - [ - "▁dimanche", - -12.213582038879395 - ], - [ - "▁banii", - -12.213631629943848 - ], - [ - "▁Ruth", - -12.213781356811523 - ], - [ - "▁Roy", - -12.21385383605957 - ], - [ - "▁homeless", - -12.2139253616333 - ], - [ - "▁Lower", - -12.213932037353516 - ], - [ - "health", - -12.21393871307373 - ], - [ - "▁atenti", - -12.2140474319458 - ], - [ - "▁touched", - -12.214183807373047 - ], - [ - "May", - -12.214195251464844 - ], - [ - "▁Buc", - -12.214225769042969 - ], - [ - "▁explored", - -12.214393615722656 - ], - [ - "▁declare", - -12.214461326599121 - ], - [ - "▁garment", - -12.214469909667969 - ], - [ - "▁buzz", - -12.214483261108398 - ], - [ - "▁rappel", - -12.214662551879883 - ], - [ - "▁uscat", - -12.214903831481934 - ], - [ - "▁Hyper", - -12.214914321899414 - ], - [ - "Etat", - -12.215007781982422 - ], - [ - "▁Titel", - -12.215035438537598 - ], - [ - "product", - -12.215191841125488 - ], - [ - "woman", - -12.215280532836914 - ], - [ - "▁Gab", - -12.215450286865234 - ], - [ - "▁advances", - -12.215615272521973 - ], - [ - "2/", - -12.215753555297852 - ], - [ - "prone", - -12.215770721435547 - ], - [ - "kö", - -12.215986251831055 - ], - [ - "▁counting", - -12.21599292755127 - ], - [ - "Sollte", - -12.216043472290039 - ], - [ - "▁Konzept", - -12.216063499450684 - ], - [ - "▁backgrounds", - -12.216153144836426 - ], - [ - "jährige", - -12.216154098510742 - ], - [ - "▁Alltag", - -12.216187477111816 - ], - [ - "▁metrics", - -12.21619701385498 - ], - [ - "▁illustrated", - -12.216222763061523 - ], - [ - "▁Charge", - -12.21631908416748 - ], - [ - "▁thoughtful", - -12.216423034667969 - ], - [ - "gesetz", - -12.216527938842773 - ], - [ - "pfen", - -12.216611862182617 - ], - [ - "▁déroul", - -12.216713905334473 - ], - [ - "▁checkout", - -12.216876029968262 - ], - [ - "quette", - -12.216936111450195 - ], - [ - "▁pierdut", - -12.2170991897583 - ], - [ - "▁Seat", - -12.217140197753906 - ], - [ - "▁linen", - -12.217193603515625 - ], - [ - "archiv", - -12.217245101928711 - ], - [ - "arna", - -12.217254638671875 - ], - [ - "importe", - -12.21742057800293 - ], - [ - "▁PHP", - -12.217496871948242 - ], - [ - "▁Parents", - -12.217503547668457 - ], - [ - "▁Birmingham", - -12.217513084411621 - ], - [ - "▁Integr", - -12.217588424682617 - ], - [ - "▁Mason", - -12.217607498168945 - ], - [ - "zieht", - -12.217781066894531 - ], - [ - "▁camps", - -12.217803001403809 - ], - [ - "OG", - -12.21786117553711 - ], - [ - "▁syrup", - -12.217927932739258 - ], - [ - "▁Cookies", - -12.217928886413574 - ], - [ - "▁Comfort", - -12.217955589294434 - ], - [ - "ută", - -12.217976570129395 - ], - [ - "abia", - -12.217979431152344 - ], - [ - "zeci", - -12.218003273010254 - ], - [ - "▁Gardens", - -12.218009948730469 - ], - [ - "▁incidents", - -12.218149185180664 - ], - [ - "▁participat", - -12.218235969543457 - ], - [ - "▁glimpse", - -12.218342781066895 - ], - [ - "5.5", - -12.218437194824219 - ], - [ - "▁dealers", - -12.218469619750977 - ], - [ - "▁Grande", - -12.218565940856934 - ], - [ - "▁raid", - -12.218944549560547 - ], - [ - "owing", - -12.21903133392334 - ], - [ - "▁contrary", - -12.219109535217285 - ], - [ - "Earlier", - -12.219138145446777 - ], - [ - "tien", - -12.21916389465332 - ], - [ - "drop", - -12.219169616699219 - ], - [ - "▁angajat", - -12.219359397888184 - ], - [ - "▁procesul", - -12.219515800476074 - ], - [ - "▁focal", - -12.219564437866211 - ], - [ - "▁impart", - -12.219703674316406 - ], - [ - "▁Abschluss", - -12.219749450683594 - ], - [ - "carui", - -12.219830513000488 - ], - [ - "insul", - -12.220277786254883 - ], - [ - "▁creamy", - -12.220283508300781 - ], - [ - "eille", - -12.22032356262207 - ], - [ - "suppl", - -12.220335960388184 - ], - [ - "▁Heaven", - -12.220471382141113 - ], - [ - "éna", - -12.220667839050293 - ], - [ - "▁swap", - -12.220739364624023 - ], - [ - "▁vreau", - -12.220762252807617 - ], - [ - "▁Bryan", - -12.220809936523438 - ], - [ - "▁Zug", - -12.220815658569336 - ], - [ - "▁glance", - -12.220848083496094 - ], - [ - "▁elimin", - -12.220900535583496 - ], - [ - "▁yeux", - -12.221084594726562 - ], - [ - "wehr", - -12.221238136291504 - ], - [ - "2.5", - -12.221287727355957 - ], - [ - "▁poses", - -12.221364974975586 - ], - [ - "▁parcel", - -12.221585273742676 - ], - [ - "▁Apartment", - -12.221749305725098 - ], - [ - "▁NASA", - -12.221768379211426 - ], - [ - "▁bénéfici", - -12.22187614440918 - ], - [ - "▁Umgebung", - -12.221890449523926 - ], - [ - "asia", - -12.221946716308594 - ], - [ - "abi", - -12.221967697143555 - ], - [ - "coup", - -12.222002983093262 - ], - [ - "synchron", - -12.222017288208008 - ], - [ - "▁Sicherheits", - -12.222029685974121 - ], - [ - "bic", - -12.222076416015625 - ], - [ - "▁distract", - -12.222148895263672 - ], - [ - "▁rentals", - -12.222163200378418 - ], - [ - "constru", - -12.222290992736816 - ], - [ - "curs", - -12.222345352172852 - ], - [ - "genannten", - -12.222386360168457 - ], - [ - "▁Shanghai", - -12.222501754760742 - ], - [ - "▁vague", - -12.222504615783691 - ], - [ - "▁Leather", - -12.22250747680664 - ], - [ - "▁Vintage", - -12.222532272338867 - ], - [ - "pointing", - -12.22259521484375 - ], - [ - "avant", - -12.22268295288086 - ], - [ - "gues", - -12.222949028015137 - ], - [ - "sweise", - -12.22302532196045 - ], - [ - "▁Greater", - -12.223065376281738 - ], - [ - "fig", - -12.22310733795166 - ], - [ - "▁Blut", - -12.223217964172363 - ], - [ - "▁Stellen", - -12.22326946258545 - ], - [ - "▁isolation", - -12.22337818145752 - ], - [ - "▁overhead", - -12.22338581085205 - ], - [ - "▁wondered", - -12.223508834838867 - ], - [ - "essai", - -12.223609924316406 - ], - [ - "aves", - -12.2236328125 - ], - [ - "▁Shore", - -12.223637580871582 - ], - [ - "▁INC", - -12.223709106445312 - ], - [ - "rufen", - -12.223980903625488 - ], - [ - "▁magnifique", - -12.224069595336914 - ], - [ - "▁intéressant", - -12.224072456359863 - ], - [ - "▁tanks", - -12.224075317382812 - ], - [ - "▁Tun", - -12.224367141723633 - ], - [ - "▁approaching", - -12.224390029907227 - ], - [ - "▁relay", - -12.224479675292969 - ], - [ - "▁Küche", - -12.224529266357422 - ], - [ - "describing", - -12.224587440490723 - ], - [ - "▁Certification", - -12.224588394165039 - ], - [ - "▁Breakfast", - -12.224597930908203 - ], - [ - "▁Frame", - -12.224891662597656 - ], - [ - "▁Stoff", - -12.224909782409668 - ], - [ - "▁victime", - -12.224924087524414 - ], - [ - "Observ", - -12.224943161010742 - ], - [ - "▁gutter", - -12.224989891052246 - ], - [ - "standard", - -12.225220680236816 - ], - [ - "▁Sci", - -12.225244522094727 - ], - [ - "▁sept", - -12.225377082824707 - ], - [ - "▁Potter", - -12.225423812866211 - ], - [ - "letter", - -12.22577953338623 - ], - [ - "▁tobacco", - -12.225852012634277 - ], - [ - "▁threatened", - -12.22591781616211 - ], - [ - "MW", - -12.225936889648438 - ], - [ - "▁Cher", - -12.225944519042969 - ], - [ - "0.1", - -12.225957870483398 - ], - [ - "mitted", - -12.22596263885498 - ], - [ - "zustellen", - -12.225967407226562 - ], - [ - "dominated", - -12.226165771484375 - ], - [ - "/16", - -12.22623348236084 - ], - [ - "POS", - -12.226317405700684 - ], - [ - "▁Zin", - -12.226373672485352 - ], - [ - "▁Okay", - -12.226381301879883 - ], - [ - "▁projected", - -12.226405143737793 - ], - [ - "▁selber", - -12.226548194885254 - ], - [ - "▁proiectului", - -12.2266206741333 - ], - [ - "▁Shell", - -12.226683616638184 - ], - [ - "▁cartridge", - -12.226706504821777 - ], - [ - "Message", - -12.2267484664917 - ], - [ - "haben", - -12.226799964904785 - ], - [ - "▁slides", - -12.226829528808594 - ], - [ - "▁gleichzeitig", - -12.226886749267578 - ], - [ - "▁Racing", - -12.227051734924316 - ], - [ - "▁20,", - -12.227070808410645 - ], - [ - "▁separat", - -12.227094650268555 - ], - [ - "▁repeatedly", - -12.227110862731934 - ], - [ - "▁casting", - -12.22728157043457 - ], - [ - "▁sacred", - -12.227283477783203 - ], - [ - "verfahren", - -12.227387428283691 - ], - [ - "▁echilibr", - -12.227514266967773 - ], - [ - "▁rebel", - -12.2277250289917 - ], - [ - "säu", - -12.227794647216797 - ], - [ - "ummy", - -12.227815628051758 - ], - [ - "▁backing", - -12.227889060974121 - ], - [ - "▁sponsors", - -12.227912902832031 - ], - [ - "▁Stress", - -12.22802448272705 - ], - [ - "▁Rules", - -12.228083610534668 - ], - [ - "▁render", - -12.228241920471191 - ], - [ - "▁funktioniert", - -12.228384971618652 - ], - [ - "▁Pearl", - -12.228472709655762 - ], - [ - "▁Scho", - -12.228527069091797 - ], - [ - "schwer", - -12.228595733642578 - ], - [ - "▁descoperit", - -12.228702545166016 - ], - [ - "holen", - -12.228720664978027 - ], - [ - "imposed", - -12.228960990905762 - ], - [ - "▁appearing", - -12.228968620300293 - ], - [ - "▁höher", - -12.229082107543945 - ], - [ - "▁Victorian", - -12.229111671447754 - ], - [ - "▁founding", - -12.229155540466309 - ], - [ - "▁Polish", - -12.229239463806152 - ], - [ - "▁anume", - -12.229248046875 - ], - [ - "Box", - -12.229488372802734 - ], - [ - "▁intrat", - -12.229598999023438 - ], - [ - "▁Inspiration", - -12.229610443115234 - ], - [ - "▁Canyon", - -12.229625701904297 - ], - [ - "▁Franklin", - -12.22974681854248 - ], - [ - "▁susceptible", - -12.22982120513916 - ], - [ - "trap", - -12.229839324951172 - ], - [ - "▁Roma", - -12.23000717163086 - ], - [ - "▁ethics", - -12.230009078979492 - ], - [ - "▁Privat", - -12.230027198791504 - ], - [ - "▁journalists", - -12.230090141296387 - ], - [ - "▁Universität", - -12.230246543884277 - ], - [ - "▁conditioner", - -12.230308532714844 - ], - [ - "folge", - -12.230327606201172 - ], - [ - "kirche", - -12.230416297912598 - ], - [ - "gehalten", - -12.230530738830566 - ], - [ - "midi", - -12.230570793151855 - ], - [ - "▁radar", - -12.230619430541992 - ], - [ - "▁Yard", - -12.230775833129883 - ], - [ - "▁professionnelle", - -12.230863571166992 - ], - [ - "▁Orchestra", - -12.230870246887207 - ], - [ - "▁immigrants", - -12.230870246887207 - ], - [ - "▁refined", - -12.230929374694824 - ], - [ - "▁Bishop", - -12.231036186218262 - ], - [ - "string", - -12.231095314025879 - ], - [ - "▁majoritatea", - -12.231231689453125 - ], - [ - "▁workflow", - -12.23123836517334 - ], - [ - "▁întreg", - -12.231306076049805 - ], - [ - "went", - -12.231563568115234 - ], - [ - "▁trat", - -12.231689453125 - ], - [ - "felul", - -12.23176383972168 - ], - [ - "▁hardwood", - -12.231821060180664 - ], - [ - "▁Task", - -12.231867790222168 - ], - [ - "branded", - -12.231921195983887 - ], - [ - "▁cinq", - -12.231966018676758 - ], - [ - "▁curb", - -12.232041358947754 - ], - [ - "▁Discount", - -12.232043266296387 - ], - [ - "▁Episode", - -12.232131958007812 - ], - [ - "▁Knowledge", - -12.232144355773926 - ], - [ - "▁tricky", - -12.232173919677734 - ], - [ - "▁characteristic", - -12.232233047485352 - ], - [ - "▁plata", - -12.23226261138916 - ], - [ - "▁Labour", - -12.23232650756836 - ], - [ - "▁Tha", - -12.232372283935547 - ], - [ - "▁Liefer", - -12.232430458068848 - ], - [ - "▁Reader", - -12.232471466064453 - ], - [ - "▁Linda", - -12.232521057128906 - ], - [ - "ittlerweile", - -12.232552528381348 - ], - [ - "defining", - -12.232564926147461 - ], - [ - "▁delayed", - -12.232635498046875 - ], - [ - "▁Bewertung", - -12.232674598693848 - ], - [ - "▁Unique", - -12.232791900634766 - ], - [ - "▁Champion", - -12.232866287231445 - ], - [ - "2008", - -12.232897758483887 - ], - [ - "▁conclu", - -12.232934951782227 - ], - [ - "▁câștig", - -12.2329740524292 - ], - [ - "▁scheduling", - -12.2329740524292 - ], - [ - "▁sailing", - -12.233116149902344 - ], - [ - "▁Storm", - -12.23318862915039 - ], - [ - "▁Stil", - -12.23320198059082 - ], - [ - "▁Album", - -12.233211517333984 - ], - [ - "▁ultime", - -12.233343124389648 - ], - [ - "url", - -12.233369827270508 - ], - [ - "▁terrific", - -12.23339557647705 - ], - [ - "▁remedy", - -12.233396530151367 - ], - [ - "▁Around", - -12.233592987060547 - ], - [ - "▁Kni", - -12.233756065368652 - ], - [ - "etty", - -12.23376750946045 - ], - [ - "Managing", - -12.233809471130371 - ], - [ - "▁Bedeutung", - -12.233816146850586 - ], - [ - "▁earthquake", - -12.233817100524902 - ], - [ - "▁Telefon", - -12.233818054199219 - ], - [ - "▁Upper", - -12.233869552612305 - ], - [ - "▁validation", - -12.233892440795898 - ], - [ - "-22", - -12.233997344970703 - ], - [ - "▁queue", - -12.23401165008545 - ], - [ - "tinde", - -12.234025001525879 - ], - [ - "built", - -12.234047889709473 - ], - [ - "▁voix", - -12.234125137329102 - ], - [ - "▁Resource", - -12.234126091003418 - ], - [ - "ţiuni", - -12.234143257141113 - ], - [ - "▁satisfying", - -12.234299659729004 - ], - [ - "▁Kohl", - -12.234441757202148 - ], - [ - "▁Materials", - -12.234618186950684 - ], - [ - "▁esp", - -12.234732627868652 - ], - [ - "enseignement", - -12.234773635864258 - ], - [ - "danach", - -12.234883308410645 - ], - [ - "peux", - -12.234932899475098 - ], - [ - "▁deployed", - -12.235113143920898 - ], - [ - "▁1976", - -12.235126495361328 - ], - [ - "ușor", - -12.235334396362305 - ], - [ - "élection", - -12.235380172729492 - ], - [ - "ettes", - -12.235437393188477 - ], - [ - "▁Madison", - -12.235506057739258 - ], - [ - "108", - -12.235685348510742 - ], - [ - "berger", - -12.235696792602539 - ], - [ - "▁pedal", - -12.235702514648438 - ], - [ - "▁quasi", - -12.235820770263672 - ], - [ - "▁lend", - -12.235843658447266 - ], - [ - "VER", - -12.235940933227539 - ], - [ - "▁chapters", - -12.236002922058105 - ], - [ - "▁idei", - -12.23600959777832 - ], - [ - "Deine", - -12.236034393310547 - ], - [ - "▁endure", - -12.236092567443848 - ], - [ - "▁Studios", - -12.236259460449219 - ], - [ - "structure", - -12.236274719238281 - ], - [ - "▁puiss", - -12.236370086669922 - ], - [ - "▁Morning", - -12.236443519592285 - ], - [ - "guide", - -12.236462593078613 - ], - [ - "▁Wave", - -12.236617088317871 - ], - [ - "▁banque", - -12.236879348754883 - ], - [ - "änd", - -12.236912727355957 - ], - [ - "oubli", - -12.237070083618164 - ], - [ - "▁mixer", - -12.237125396728516 - ], - [ - "▁remedi", - -12.237210273742676 - ], - [ - "▁scop", - -12.237421989440918 - ], - [ - "▁Rosen", - -12.237561225891113 - ], - [ - "▁spital", - -12.23773193359375 - ], - [ - "blau", - -12.237811088562012 - ], - [ - "▁financiar", - -12.237865447998047 - ], - [ - "avour", - -12.237871170043945 - ], - [ - "Def", - -12.238025665283203 - ], - [ - "▁socket", - -12.238076210021973 - ], - [ - "▁occurring", - -12.238360404968262 - ], - [ - "▁munci", - -12.238368034362793 - ], - [ - "▁realiza", - -12.238426208496094 - ], - [ - "▁beating", - -12.2384614944458 - ], - [ - "▁Phillip", - -12.238490104675293 - ], - [ - "▁courant", - -12.238509178161621 - ], - [ - "Auto", - -12.238608360290527 - ], - [ - "▁Lager", - -12.238685607910156 - ], - [ - "▁folos", - -12.238696098327637 - ], - [ - "▁moyens", - -12.238770484924316 - ], - [ - "▁Ec", - -12.238780975341797 - ], - [ - "▁Strip", - -12.238788604736328 - ], - [ - "sparen", - -12.238848686218262 - ], - [ - "▁Nintendo", - -12.238886833190918 - ], - [ - "▁Murphy", - -12.238912582397461 - ], - [ - "▁flux", - -12.239034652709961 - ], - [ - "▁mots", - -12.239034652709961 - ], - [ - "▁rechts", - -12.239045143127441 - ], - [ - "▁cardio", - -12.239142417907715 - ], - [ - "avoiding", - -12.239343643188477 - ], - [ - "érer", - -12.239453315734863 - ], - [ - "hiel", - -12.239461898803711 - ], - [ - "▁rezistent", - -12.239521980285645 - ], - [ - "close", - -12.23954963684082 - ], - [ - "hésitez", - -12.239596366882324 - ], - [ - "Hz", - -12.239631652832031 - ], - [ - "▁elaborate", - -12.239689826965332 - ], - [ - "▁permanently", - -12.239709854125977 - ], - [ - "▁Pittsburgh", - -12.239734649658203 - ], - [ - "▁counties", - -12.239819526672363 - ], - [ - "▁bookmark", - -12.239919662475586 - ], - [ - "▁Label", - -12.239965438842773 - ], - [ - "▁Freude", - -12.239974021911621 - ], - [ - "▁preferat", - -12.239986419677734 - ], - [ - "▁Mein", - -12.239995002746582 - ], - [ - "▁Crew", - -12.240218162536621 - ], - [ - "▁clips", - -12.240253448486328 - ], - [ - "8,000", - -12.240263938903809 - ], - [ - "▁recognise", - -12.240311622619629 - ], - [ - "ință", - -12.240365028381348 - ], - [ - "▁prieteni", - -12.240447044372559 - ], - [ - "Heute", - -12.240522384643555 - ], - [ - "ancienne", - -12.240534782409668 - ], - [ - "▁annoying", - -12.240583419799805 - ], - [ - "▁awful", - -12.240704536437988 - ], - [ - "▁Comments", - -12.240774154663086 - ], - [ - "▁musician", - -12.240830421447754 - ], - [ - "▁Elite", - -12.241023063659668 - ], - [ - "▁patri", - -12.241024017333984 - ], - [ - "▁Coupon", - -12.241037368774414 - ], - [ - "▁Farbe", - -12.241097450256348 - ], - [ - "▁contribui", - -12.241110801696777 - ], - [ - "hari", - -12.241294860839844 - ], - [ - "▁activitati", - -12.24161148071289 - ], - [ - "▁Traum", - -12.2416410446167 - ], - [ - "1.8", - -12.24170207977295 - ], - [ - "▁Healthcare", - -12.24172306060791 - ], - [ - "▁refresh", - -12.241943359375 - ], - [ - "▁Maha", - -12.242060661315918 - ], - [ - "▁dép", - -12.242082595825195 - ], - [ - "▁Studien", - -12.242314338684082 - ], - [ - "▁spectacol", - -12.242378234863281 - ], - [ - "impro", - -12.24254035949707 - ], - [ - "▁commentaire", - -12.242544174194336 - ], - [ - "ported", - -12.242570877075195 - ], - [ - "▁reclam", - -12.242612838745117 - ], - [ - "▁Verkauf", - -12.242634773254395 - ], - [ - "▁newspapers", - -12.242661476135254 - ], - [ - "▁iubit", - -12.242838859558105 - ], - [ - "▁Kenne", - -12.242844581604004 - ], - [ - "▁Consultant", - -12.242958068847656 - ], - [ - "▁stau", - -12.242986679077148 - ], - [ - "TON", - -12.243057250976562 - ], - [ - "▁Fehler", - -12.243070602416992 - ], - [ - "▁lettre", - -12.243167877197266 - ], - [ - "▁investigator", - -12.243172645568848 - ], - [ - "▁quantities", - -12.243184089660645 - ], - [ - "ogram", - -12.243208885192871 - ], - [ - "avaient", - -12.24323844909668 - ], - [ - "▁reducere", - -12.243265151977539 - ], - [ - "Lite", - -12.243402481079102 - ], - [ - "kurs", - -12.243443489074707 - ], - [ - "pré", - -12.24383544921875 - ], - [ - "pap", - -12.243898391723633 - ], - [ - "▁Männer", - -12.243983268737793 - ], - [ - "▁gauche", - -12.244022369384766 - ], - [ - "▁ähnlich", - -12.244027137756348 - ], - [ - "▁sunlight", - -12.244063377380371 - ], - [ - "▁rester", - -12.24422550201416 - ], - [ - "jumped", - -12.244586944580078 - ], - [ - "▁exclusiv", - -12.24463176727295 - ], - [ - "▁electoral", - -12.244640350341797 - ], - [ - "▁Portal", - -12.244650840759277 - ], - [ - "ulent", - -12.244688987731934 - ], - [ - "▁sonst", - -12.24474048614502 - ], - [ - "entraîne", - -12.24483585357666 - ], - [ - "▁repas", - -12.244837760925293 - ], - [ - "▁redus", - -12.244858741760254 - ], - [ - "aku", - -12.244866371154785 - ], - [ - "▁Graphic", - -12.245251655578613 - ], - [ - "▁geringe", - -12.24539566040039 - ], - [ - "plätze", - -12.245474815368652 - ], - [ - "Trebuie", - -12.245479583740234 - ], - [ - "▁rezultate", - -12.245479583740234 - ], - [ - "▁configure", - -12.245683670043945 - ], - [ - "▁PV", - -12.245834350585938 - ], - [ - "▁insect", - -12.246109962463379 - ], - [ - "▁Reviews", - -12.246129035949707 - ], - [ - "releasing", - -12.246186256408691 - ], - [ - "▁appliance", - -12.246246337890625 - ], - [ - "▁oferte", - -12.246482849121094 - ], - [ - "▁WILL", - -12.246484756469727 - ], - [ - "rion", - -12.246499061584473 - ], - [ - "▁Cole", - -12.246582984924316 - ], - [ - "▁1975", - -12.246650695800781 - ], - [ - "Admin", - -12.24677848815918 - ], - [ - "▁parade", - -12.246800422668457 - ], - [ - "▁mélange", - -12.24692153930664 - ], - [ - "▁shortage", - -12.247007369995117 - ], - [ - "▁Measure", - -12.247400283813477 - ], - [ - "anchmal", - -12.24742603302002 - ], - [ - "▁transfers", - -12.247432708740234 - ], - [ - "▁sistemului", - -12.247573852539062 - ], - [ - "▁deschide", - -12.247819900512695 - ], - [ - "▁Künstler", - -12.247821807861328 - ], - [ - "▁Plain", - -12.247848510742188 - ], - [ - "▁messaging", - -12.247855186462402 - ], - [ - "▁metabolism", - -12.247879981994629 - ], - [ - "fill", - -12.248031616210938 - ], - [ - "▁Bomb", - -12.24814224243164 - ], - [ - "usine", - -12.248208045959473 - ], - [ - "▁restart", - -12.248233795166016 - ], - [ - "▁Discussion", - -12.248336791992188 - ], - [ - "smith", - -12.248472213745117 - ], - [ - "▁Bh", - -12.248607635498047 - ], - [ - "▁sap", - -12.248689651489258 - ], - [ - "Moo", - -12.248714447021484 - ], - [ - "▁indirect", - -12.248785972595215 - ], - [ - "▁eingesetzt", - -12.248863220214844 - ], - [ - "▁Hip", - -12.248870849609375 - ], - [ - "▁iulie", - -12.249113082885742 - ], - [ - "▁atac", - -12.249201774597168 - ], - [ - "▁passport", - -12.2492036819458 - ], - [ - "▁Egyptian", - -12.249290466308594 - ], - [ - "▁soluți", - -12.249349594116211 - ], - [ - "▁cakes", - -12.249356269836426 - ], - [ - "▁Fellow", - -12.24949836730957 - ], - [ - "▁collision", - -12.249533653259277 - ], - [ - "▁abundant", - -12.249961853027344 - ], - [ - "▁Wonder", - -12.24997329711914 - ], - [ - "▁theories", - -12.249991416931152 - ], - [ - "landed", - -12.250046730041504 - ], - [ - "▁meantime", - -12.2500638961792 - ], - [ - "schlüsse", - -12.25022029876709 - ], - [ - "▁helicopter", - -12.25039005279541 - ], - [ - "Voici", - -12.250479698181152 - ], - [ - "▁Honey", - -12.25049877166748 - ], - [ - "▁deleted", - -12.250511169433594 - ], - [ - "▁Projekte", - -12.250523567199707 - ], - [ - "▁gasi", - -12.2506742477417 - ], - [ - "applique", - -12.25068473815918 - ], - [ - "TAL", - -12.250699043273926 - ], - [ - "notch", - -12.250699996948242 - ], - [ - "▁Response", - -12.250818252563477 - ], - [ - "▁deveni", - -12.250818252563477 - ], - [ - "▁regulate", - -12.250829696655273 - ], - [ - "▁vegetarian", - -12.25083065032959 - ], - [ - "▁Pastor", - -12.250880241394043 - ], - [ - "▁Strong", - -12.250940322875977 - ], - [ - "▁élèves", - -12.251055717468262 - ], - [ - "▁alimente", - -12.25113582611084 - ], - [ - "graphy", - -12.251181602478027 - ], - [ - "▁spirits", - -12.251266479492188 - ], - [ - "▁Cau", - -12.251282691955566 - ], - [ - "determin", - -12.251304626464844 - ], - [ - "arilor", - -12.251382827758789 - ], - [ - "▁masura", - -12.251470565795898 - ], - [ - "RAN", - -12.251500129699707 - ], - [ - "marked", - -12.251564979553223 - ], - [ - "cuba", - -12.251602172851562 - ], - [ - "omni", - -12.251609802246094 - ], - [ - "▁detox", - -12.251662254333496 - ], - [ - "▁quartz", - -12.251741409301758 - ], - [ - "▁Bug", - -12.25177001953125 - ], - [ - "▁Sugar", - -12.25185775756836 - ], - [ - "▁opponents", - -12.25197982788086 - ], - [ - "▁solved", - -12.25207805633545 - ], - [ - "semn", - -12.252257347106934 - ], - [ - "▁Prepare", - -12.252558708190918 - ], - [ - "ffel", - -12.252586364746094 - ], - [ - "▁Highlight", - -12.252608299255371 - ], - [ - "▁curent", - -12.252618789672852 - ], - [ - "▁praktisch", - -12.252626419067383 - ], - [ - "▁lending", - -12.252676963806152 - ], - [ - "▁minority", - -12.252752304077148 - ], - [ - "Free", - -12.252970695495605 - ], - [ - "business", - -12.252997398376465 - ], - [ - "▁outlook", - -12.253097534179688 - ], - [ - "▁assessments", - -12.253168106079102 - ], - [ - "▁Brother", - -12.253266334533691 - ], - [ - "▁partager", - -12.25326919555664 - ], - [ - "▁Brun", - -12.25329303741455 - ], - [ - "▁pedestrian", - -12.25339412689209 - ], - [ - "anța", - -12.253413200378418 - ], - [ - "▁recycled", - -12.253457069396973 - ], - [ - "▁quicker", - -12.253626823425293 - ], - [ - "▁lamps", - -12.253683090209961 - ], - [ - "▁nationally", - -12.253813743591309 - ], - [ - "▁Supplier", - -12.253823280334473 - ], - [ - "ograph", - -12.253936767578125 - ], - [ - "engage", - -12.253981590270996 - ], - [ - "▁Marg", - -12.254131317138672 - ], - [ - "▁aplicare", - -12.254181861877441 - ], - [ - "▁scared", - -12.254194259643555 - ], - [ - "▁accredited", - -12.254255294799805 - ], - [ - "▁outils", - -12.25436019897461 - ], - [ - "▁bâtiment", - -12.254446029663086 - ], - [ - "▁existed", - -12.254586219787598 - ], - [ - "gegangen", - -12.254619598388672 - ], - [ - "▁elevation", - -12.25463581085205 - ], - [ - "▁Tradition", - -12.254670143127441 - ], - [ - "▁Gericht", - -12.254677772521973 - ], - [ - "hub", - -12.254680633544922 - ], - [ - "strahl", - -12.25473690032959 - ], - [ - "build", - -12.254796981811523 - ], - [ - "▁Customers", - -12.25487232208252 - ], - [ - "klasse", - -12.254890441894531 - ], - [ - "▁pierre", - -12.254895210266113 - ], - [ - "(2)", - -12.255006790161133 - ], - [ - "Life", - -12.255125999450684 - ], - [ - "▁bachelor", - -12.25513744354248 - ], - [ - "▁quad", - -12.255195617675781 - ], - [ - "▁dispozitiv", - -12.25523567199707 - ], - [ - "106", - -12.255266189575195 - ], - [ - "▁suburb", - -12.255495071411133 - ], - [ - "▁1977", - -12.255586624145508 - ], - [ - "▁Alzheimer", - -12.255973815917969 - ], - [ - "▁spicy", - -12.255988121032715 - ], - [ - "▁spreading", - -12.256002426147461 - ], - [ - "nötigen", - -12.256078720092773 - ], - [ - "▁novels", - -12.256104469299316 - ], - [ - "▁responsabilité", - -12.256141662597656 - ], - [ - "▁Bud", - -12.256332397460938 - ], - [ - "▁desirable", - -12.256407737731934 - ], - [ - "TOR", - -12.256444931030273 - ], - [ - "five", - -12.256547927856445 - ], - [ - "▁Firmen", - -12.256860733032227 - ], - [ - "oeuvre", - -12.257075309753418 - ], - [ - "grass", - -12.257233619689941 - ], - [ - "▁practically", - -12.257277488708496 - ], - [ - "▁runners", - -12.257281303405762 - ], - [ - "▁mothers", - -12.257341384887695 - ], - [ - "Shop", - -12.257345199584961 - ], - [ - "▁Chicken", - -12.257408142089844 - ], - [ - "▁License", - -12.257593154907227 - ], - [ - "▁Bach", - -12.25765323638916 - ], - [ - "earliest", - -12.257729530334473 - ], - [ - "▁replica", - -12.25774097442627 - ], - [ - "▁haunt", - -12.257833480834961 - ], - [ - "▁materi", - -12.257854461669922 - ], - [ - "▁Finland", - -12.257893562316895 - ], - [ - "▁europene", - -12.257919311523438 - ], - [ - "abilă", - -12.257944107055664 - ], - [ - "cati", - -12.258007049560547 - ], - [ - "▁cholesterol", - -12.258132934570312 - ], - [ - "...).", - -12.258151054382324 - ], - [ - "cardi", - -12.25838565826416 - ], - [ - "▁(12", - -12.258387565612793 - ], - [ - "analyzed", - -12.258506774902344 - ], - [ - "▁respondents", - -12.258591651916504 - ], - [ - "▁höchste", - -12.258646011352539 - ], - [ - "▁Kern", - -12.258647918701172 - ], - [ - "▁knapp", - -12.258781433105469 - ], - [ - "▁Someone", - -12.258955001831055 - ], - [ - "▁équipé", - -12.258997917175293 - ], - [ - "credited", - -12.259106636047363 - ], - [ - "▁numar", - -12.259163856506348 - ], - [ - "▁Ace", - -12.259185791015625 - ], - [ - "zentrum", - -12.2592191696167 - ], - [ - "nehmer", - -12.259270668029785 - ], - [ - "arrivée", - -12.259282112121582 - ], - [ - "ELE", - -12.259291648864746 - ], - [ - "clean", - -12.259418487548828 - ], - [ - "Boost", - -12.259538650512695 - ], - [ - "call", - -12.259575843811035 - ], - [ - "▁Polizei", - -12.259659767150879 - ], - [ - "▁Januar", - -12.259663581848145 - ], - [ - "▁Tile", - -12.259681701660156 - ], - [ - "▁traduc", - -12.259744644165039 - ], - [ - "▁promptly", - -12.259773254394531 - ], - [ - "limit", - -12.259809494018555 - ], - [ - "▁recharge", - -12.2598237991333 - ], - [ - "▁wipe", - -12.259862899780273 - ], - [ - "▁Norway", - -12.26001262664795 - ], - [ - "▁Municipal", - -12.260077476501465 - ], - [ - "▁medieval", - -12.260117530822754 - ], - [ - "▁Treat", - -12.26021671295166 - ], - [ - "Orient", - -12.260283470153809 - ], - [ - "▁Stewart", - -12.260294914245605 - ], - [ - "▁lol", - -12.26039981842041 - ], - [ - "appartement", - -12.260522842407227 - ], - [ - "▁payer", - -12.260655403137207 - ], - [ - "▁splash", - -12.260723114013672 - ], - [ - "doubtedly", - -12.260726928710938 - ], - [ - "dry", - -12.260846138000488 - ], - [ - "▁Forex", - -12.260939598083496 - ], - [ - "▁Edinburgh", - -12.260943412780762 - ], - [ - "▁Traditional", - -12.261032104492188 - ], - [ - "▁1968", - -12.261134147644043 - ], - [ - "▁glow", - -12.261248588562012 - ], - [ - "Alternatively", - -12.261265754699707 - ], - [ - "▁partly", - -12.261354446411133 - ], - [ - "égi", - -12.261401176452637 - ], - [ - "▁Prices", - -12.261640548706055 - ], - [ - "haupt", - -12.261651992797852 - ], - [ - "▁sentences", - -12.261711120605469 - ], - [ - "ouvre", - -12.261735916137695 - ], - [ - "▁Liter", - -12.261746406555176 - ], - [ - "▁Important", - -12.2620267868042 - ], - [ - "▁Collins", - -12.262077331542969 - ], - [ - "▁reproduce", - -12.262106895446777 - ], - [ - "▁selten", - -12.262124061584473 - ], - [ - "▁Mitte", - -12.262170791625977 - ], - [ - "OA", - -12.262174606323242 - ], - [ - "▁Sister", - -12.262358665466309 - ], - [ - "▁responding", - -12.262385368347168 - ], - [ - "▁ballot", - -12.262455940246582 - ], - [ - "▁Nutrition", - -12.262460708618164 - ], - [ - "occurrence", - -12.26246452331543 - ], - [ - "Atunci", - -12.262604713439941 - ], - [ - "▁hockey", - -12.262680053710938 - ], - [ - "▁undertaking", - -12.262697219848633 - ], - [ - "▁educators", - -12.262885093688965 - ], - [ - "▁Swedish", - -12.262893676757812 - ], - [ - "▁Recovery", - -12.262894630432129 - ], - [ - "▁circum", - -12.262910842895508 - ], - [ - "▁chains", - -12.263084411621094 - ], - [ - "▁genug", - -12.263113021850586 - ], - [ - "▁Pil", - -12.263227462768555 - ], - [ - "▁farms", - -12.263265609741211 - ], - [ - "▁simplicity", - -12.263336181640625 - ], - [ - "-21", - -12.263399124145508 - ], - [ - "▁partition", - -12.263493537902832 - ], - [ - "▁Relations", - -12.26360034942627 - ], - [ - "zentrale", - -12.263794898986816 - ], - [ - "lapse", - -12.263855934143066 - ], - [ - "▁toast", - -12.263862609863281 - ], - [ - "▁citi", - -12.263946533203125 - ], - [ - "▁longtemps", - -12.263984680175781 - ], - [ - "maj", - -12.264448165893555 - ], - [ - "▁Cin", - -12.264483451843262 - ], - [ - "zeichen", - -12.264504432678223 - ], - [ - "▁Zoo", - -12.264567375183105 - ], - [ - "▁frisch", - -12.264570236206055 - ], - [ - "▁permettra", - -12.264595031738281 - ], - [ - "▁Liberty", - -12.264642715454102 - ], - [ - "▁playground", - -12.264873504638672 - ], - [ - "▁Mate", - -12.265031814575195 - ], - [ - "▁evolving", - -12.265066146850586 - ], - [ - "national", - -12.265207290649414 - ], - [ - "▁signifie", - -12.265279769897461 - ], - [ - "▁Related", - -12.265292167663574 - ], - [ - "NES", - -12.265337944030762 - ], - [ - "euil", - -12.265473365783691 - ], - [ - "▁struggles", - -12.265542030334473 - ], - [ - "▁instinct", - -12.265628814697266 - ], - [ - "arbre", - -12.26608943939209 - ], - [ - "▁commands", - -12.266222953796387 - ], - [ - "▁frumoase", - -12.26637077331543 - ], - [ - "▁watches", - -12.266779899597168 - ], - [ - "NM", - -12.266804695129395 - ], - [ - "▁influential", - -12.266807556152344 - ], - [ - "▁gewesen", - -12.266901969909668 - ], - [ - "▁Pictures", - -12.267224311828613 - ], - [ - "▁HVAC", - -12.267242431640625 - ], - [ - "▁skate", - -12.26732063293457 - ], - [ - "▁Robot", - -12.267327308654785 - ], - [ - "▁Boys", - -12.267404556274414 - ], - [ - "▁Mutter", - -12.267425537109375 - ], - [ - "▁marques", - -12.267539024353027 - ], - [ - "utiliser", - -12.267793655395508 - ], - [ - "▁amazed", - -12.267799377441406 - ], - [ - "ächtig", - -12.26783275604248 - ], - [ - "▁Success", - -12.267870903015137 - ], - [ - "gramm", - -12.267956733703613 - ], - [ - "▁1972", - -12.267956733703613 - ], - [ - "▁marina", - -12.268269538879395 - ], - [ - "▁lou", - -12.268321990966797 - ], - [ - "▁précis", - -12.268380165100098 - ], - [ - "ographic", - -12.268482208251953 - ], - [ - "people", - -12.26848316192627 - ], - [ - "fahr", - -12.268547058105469 - ], - [ - "▁Contemporary", - -12.268550872802734 - ], - [ - "▁frustrating", - -12.26858139038086 - ], - [ - "chide", - -12.268704414367676 - ], - [ - "1.5", - -12.268807411193848 - ], - [ - "▁ankle", - -12.268850326538086 - ], - [ - "▁proximity", - -12.268986701965332 - ], - [ - "▁Leute", - -12.269006729125977 - ], - [ - "UA", - -12.269031524658203 - ], - [ - "union", - -12.269131660461426 - ], - [ - "▁recovered", - -12.269133567810059 - ], - [ - "▁sword", - -12.269216537475586 - ], - [ - "▁Mut", - -12.26923942565918 - ], - [ - "▁Rin", - -12.269360542297363 - ], - [ - "▁lectures", - -12.26942253112793 - ], - [ - "▁licensing", - -12.269423484802246 - ], - [ - "MAC", - -12.269498825073242 - ], - [ - "▁commute", - -12.269776344299316 - ], - [ - "Acesta", - -12.269858360290527 - ], - [ - "▁Koch", - -12.270088195800781 - ], - [ - "▁depozit", - -12.270119667053223 - ], - [ - "▁erstmal", - -12.270163536071777 - ], - [ - "arhi", - -12.270271301269531 - ], - [ - "▁Normal", - -12.270462036132812 - ], - [ - "EZ", - -12.270464897155762 - ], - [ - "ărilor", - -12.270986557006836 - ], - [ - "▁favoris", - -12.271041870117188 - ], - [ - "▁$9", - -12.271050453186035 - ], - [ - "▁Lawrence", - -12.271172523498535 - ], - [ - "▁fixing", - -12.271200180053711 - ], - [ - "▁researching", - -12.271288871765137 - ], - [ - "▁Pant", - -12.271467208862305 - ], - [ - "▁candid", - -12.271490097045898 - ], - [ - "▁Arkansas", - -12.27160930633545 - ], - [ - "▁bitcoin", - -12.271612167358398 - ], - [ - "ва", - -12.271645545959473 - ], - [ - "▁Finger", - -12.271692276000977 - ], - [ - "▁SRL", - -12.271718978881836 - ], - [ - "Arg", - -12.271797180175781 - ], - [ - "trade", - -12.271903991699219 - ], - [ - "▁extraction", - -12.271941184997559 - ], - [ - "▁footprint", - -12.2720308303833 - ], - [ - "▁folosite", - -12.272085189819336 - ], - [ - "▁Flex", - -12.272184371948242 - ], - [ - "▁dys", - -12.272294998168945 - ], - [ - "▁Wright", - -12.272343635559082 - ], - [ - "▁multitude", - -12.272378921508789 - ], - [ - "▁Chu", - -12.272494316101074 - ], - [ - "▁Jerry", - -12.27249526977539 - ], - [ - "▁notebook", - -12.272722244262695 - ], - [ - "▁SIM", - -12.272932052612305 - ], - [ - "dietary", - -12.272963523864746 - ], - [ - "▁polished", - -12.272984504699707 - ], - [ - "▁carriers", - -12.272993087768555 - ], - [ - "▁cardiac", - -12.27299976348877 - ], - [ - "▁burned", - -12.273038864135742 - ], - [ - "▁sealed", - -12.273062705993652 - ], - [ - "▁pumps", - -12.273224830627441 - ], - [ - "▁consumed", - -12.273233413696289 - ], - [ - "▁Teaching", - -12.273446083068848 - ], - [ - "▁daughters", - -12.27348518371582 - ], - [ - "serviciile", - -12.273600578308105 - ], - [ - "▁Teams", - -12.273690223693848 - ], - [ - "▁avoided", - -12.273903846740723 - ], - [ - "▁compagnie", - -12.274019241333008 - ], - [ - "▁mașin", - -12.274024963378906 - ], - [ - "▁Sean", - -12.27418041229248 - ], - [ - "▁arunc", - -12.274208068847656 - ], - [ - "kräfte", - -12.274238586425781 - ], - [ - "vani", - -12.274255752563477 - ], - [ - "Metall", - -12.27437973022461 - ], - [ - "2009", - -12.274449348449707 - ], - [ - "moi", - -12.274688720703125 - ], - [ - "▁THAT", - -12.274700164794922 - ], - [ - "▁Ny", - -12.274809837341309 - ], - [ - "▁countertops", - -12.274860382080078 - ], - [ - "Pod", - -12.274938583374023 - ], - [ - "amente", - -12.274943351745605 - ], - [ - "▁offshore", - -12.275001525878906 - ], - [ - "luti", - -12.275087356567383 - ], - [ - "parked", - -12.275160789489746 - ], - [ - "ajout", - -12.275247573852539 - ], - [ - "Shirt", - -12.275328636169434 - ], - [ - "▁3/4", - -12.275389671325684 - ], - [ - "▁gratuite", - -12.27543830871582 - ], - [ - "mètres", - -12.27557373046875 - ], - [ - "▁Wish", - -12.2755765914917 - ], - [ - "▁holistic", - -12.27558422088623 - ], - [ - "gren", - -12.275607109069824 - ], - [ - "compiled", - -12.275660514831543 - ], - [ - "▁innocent", - -12.275779724121094 - ], - [ - "▁sorte", - -12.275787353515625 - ], - [ - "▁insulin", - -12.275792121887207 - ], - [ - "▁Academic", - -12.275996208190918 - ], - [ - "▁acrylic", - -12.27600383758545 - ], - [ - "▁hinzu", - -12.27616024017334 - ], - [ - "▁compression", - -12.27619457244873 - ], - [ - "▁viral", - -12.276220321655273 - ], - [ - "▁stereo", - -12.2764892578125 - ], - [ - "▁Concept", - -12.276542663574219 - ], - [ - "▁Margaret", - -12.276659965515137 - ], - [ - "▁consolidation", - -12.276875495910645 - ], - [ - "Figure", - -12.277058601379395 - ], - [ - "zzo", - -12.277061462402344 - ], - [ - "▁Egg", - -12.277098655700684 - ], - [ - "weiterhin", - -12.277213096618652 - ], - [ - "▁Vista", - -12.277252197265625 - ], - [ - "▁necessity", - -12.277316093444824 - ], - [ - "▁kayak", - -12.277490615844727 - ], - [ - "▁consensus", - -12.277535438537598 - ], - [ - "▁Katz", - -12.277602195739746 - ], - [ - "▁Warren", - -12.277640342712402 - ], - [ - "▁custody", - -12.277755737304688 - ], - [ - "++", - -12.277759552001953 - ], - [ - "▁paiement", - -12.277782440185547 - ], - [ - "▁foul", - -12.277878761291504 - ], - [ - "Chaque", - -12.277934074401855 - ], - [ - "▁Syrian", - -12.277998924255371 - ], - [ - "▁photographers", - -12.278056144714355 - ], - [ - "▁dismiss", - -12.278270721435547 - ], - [ - "▁Gaz", - -12.278526306152344 - ], - [ - "▁développer", - -12.278529167175293 - ], - [ - "▁Dakota", - -12.27863883972168 - ], - [ - "▁cardiovascular", - -12.278642654418945 - ], - [ - "▁tattoo", - -12.278858184814453 - ], - [ - "▁Lighting", - -12.278918266296387 - ], - [ - "▁nowhere", - -12.278940200805664 - ], - [ - "vada", - -12.27895450592041 - ], - [ - "▁Favor", - -12.279084205627441 - ], - [ - "ruled", - -12.2791748046875 - ], - [ - "▁Dating", - -12.2793550491333 - ], - [ - "gain", - -12.279963493347168 - ], - [ - "rism", - -12.28016471862793 - ], - [ - "coloured", - -12.280169486999512 - ], - [ - "▁refugees", - -12.280184745788574 - ], - [ - "▁Schm", - -12.2803955078125 - ], - [ - "▁happily", - -12.280402183532715 - ], - [ - "▁specification", - -12.280607223510742 - ], - [ - "WM", - -12.280736923217773 - ], - [ - "▁intro", - -12.280823707580566 - ], - [ - "rack", - -12.28097915649414 - ], - [ - "characterized", - -12.28107738494873 - ], - [ - "▁externe", - -12.281136512756348 - ], - [ - "▁arrives", - -12.28114128112793 - ], - [ - "WO", - -12.281181335449219 - ], - [ - "bericht", - -12.281233787536621 - ], - [ - "▁delays", - -12.281242370605469 - ], - [ - "▁Flight", - -12.281256675720215 - ], - [ - "1-3", - -12.281524658203125 - ], - [ - "▁Singh", - -12.281548500061035 - ], - [ - "▁shifting", - -12.281651496887207 - ], - [ - "▁dashboard", - -12.281729698181152 - ], - [ - "▁lieux", - -12.281781196594238 - ], - [ - "▁validate", - -12.281901359558105 - ], - [ - "▁uniquement", - -12.281963348388672 - ], - [ - "clip", - -12.28199291229248 - ], - [ - "cov", - -12.282132148742676 - ], - [ - "▁tendance", - -12.282215118408203 - ], - [ - "èle", - -12.282258033752441 - ], - [ - "▁incepe", - -12.282261848449707 - ], - [ - "▁chunk", - -12.282585144042969 - ], - [ - "▁Nr", - -12.28266716003418 - ], - [ - "▁Montana", - -12.282674789428711 - ], - [ - "▁sticks", - -12.28277587890625 - ], - [ - "▁caps", - -12.28309154510498 - ], - [ - "▁Jimmy", - -12.283167839050293 - ], - [ - "▁Levi", - -12.283285140991211 - ], - [ - "▁cables", - -12.28345012664795 - ], - [ - "▁SB", - -12.283550262451172 - ], - [ - "▁thème", - -12.2836275100708 - ], - [ - "ADA", - -12.283672332763672 - ], - [ - "▁garant", - -12.283686637878418 - ], - [ - "▁Joint", - -12.283820152282715 - ], - [ - "▁partage", - -12.28398323059082 - ], - [ - "schreib", - -12.284119606018066 - ], - [ - "ether", - -12.28420352935791 - ], - [ - "▁Klima", - -12.284303665161133 - ], - [ - "▁medicines", - -12.284317016601562 - ], - [ - "▁pH", - -12.284320831298828 - ], - [ - "Architect", - -12.284378051757812 - ], - [ - "știi", - -12.284396171569824 - ], - [ - "▁retrouve", - -12.284700393676758 - ], - [ - "▁posture", - -12.284753799438477 - ], - [ - "Feature", - -12.284773826599121 - ], - [ - "▁drying", - -12.284884452819824 - ], - [ - "trifft", - -12.28488826751709 - ], - [ - "ibi", - -12.285079002380371 - ], - [ - "▁rezerv", - -12.285116195678711 - ], - [ - "▁Vă", - -12.28518009185791 - ], - [ - "▁Speaker", - -12.285282135009766 - ], - [ - "▁illustration", - -12.285319328308105 - ], - [ - "oooo", - -12.285419464111328 - ], - [ - "▁initiated", - -12.285518646240234 - ], - [ - "PK", - -12.285545349121094 - ], - [ - "▁algorithms", - -12.285630226135254 - ], - [ - "▁zice", - -12.285757064819336 - ], - [ - "WI", - -12.28581428527832 - ], - [ - "urgence", - -12.285823822021484 - ], - [ - "▁bloggers", - -12.285887718200684 - ], - [ - "▁realitate", - -12.285894393920898 - ], - [ - "eks", - -12.28598690032959 - ], - [ - "▁cushions", - -12.286149024963379 - ], - [ - "▁Kri", - -12.286224365234375 - ], - [ - "▁réalisation", - -12.286396026611328 - ], - [ - "▁Photoshop", - -12.286407470703125 - ], - [ - "cret", - -12.286462783813477 - ], - [ - "faire", - -12.286613464355469 - ], - [ - "▁Cei", - -12.286782264709473 - ], - [ - "ICO", - -12.286789894104004 - ], - [ - "Contin", - -12.28681755065918 - ], - [ - "▁Builder", - -12.286916732788086 - ], - [ - "look", - -12.28698444366455 - ], - [ - "▁tenants", - -12.287023544311523 - ], - [ - "▁gloves", - -12.287113189697266 - ], - [ - "Day", - -12.287169456481934 - ], - [ - "firmly", - -12.28725814819336 - ], - [ - "CIA", - -12.287352561950684 - ], - [ - "▁TVA", - -12.28741455078125 - ], - [ - "▁notifications", - -12.287446975708008 - ], - [ - "▁Higher", - -12.287459373474121 - ], - [ - "▁Weihnachts", - -12.287491798400879 - ], - [ - "▁blur", - -12.287755012512207 - ], - [ - "ов", - -12.288087844848633 - ], - [ - "feder", - -12.288159370422363 - ], - [ - "▁explosion", - -12.288171768188477 - ], - [ - "▁Fenster", - -12.288189888000488 - ], - [ - "▁junge", - -12.288225173950195 - ], - [ - "▁Highland", - -12.288230895996094 - ], - [ - "▁Lü", - -12.288290023803711 - ], - [ - "▁Alba", - -12.28832721710205 - ], - [ - "▁Dort", - -12.288338661193848 - ], - [ - "▁recruiting", - -12.28835391998291 - ], - [ - "▁Multiple", - -12.288549423217773 - ], - [ - "▁animated", - -12.288604736328125 - ], - [ - "▁Virgin", - -12.288637161254883 - ], - [ - "1000", - -12.288676261901855 - ], - [ - "▁resin", - -12.288700103759766 - ], - [ - "▁matrix", - -12.288826942443848 - ], - [ - "irri", - -12.289011001586914 - ], - [ - "▁chiffre", - -12.28904914855957 - ], - [ - "▁Corps", - -12.289252281188965 - ], - [ - "▁advocacy", - -12.28927230834961 - ], - [ - "▁pozitiv", - -12.289274215698242 - ], - [ - "▁pouss", - -12.289451599121094 - ], - [ - "événement", - -12.28950309753418 - ], - [ - "▁pielii", - -12.289717674255371 - ], - [ - "onnais", - -12.289750099182129 - ], - [ - "▁Statement", - -12.289754867553711 - ], - [ - "crimin", - -12.289868354797363 - ], - [ - "hidrat", - -12.289942741394043 - ], - [ - "▁Jugendliche", - -12.290057182312012 - ], - [ - "TRI", - -12.290223121643066 - ], - [ - "erra", - -12.290240287780762 - ], - [ - "chat", - -12.290321350097656 - ], - [ - "▁traits", - -12.290359497070312 - ], - [ - "▁incentives", - -12.29038143157959 - ], - [ - "▁accelerate", - -12.290568351745605 - ], - [ - "woven", - -12.290633201599121 - ], - [ - "UST", - -12.290688514709473 - ], - [ - "▁premiers", - -12.290717124938965 - ], - [ - "▁Ferien", - -12.290755271911621 - ], - [ - "▁mariage", - -12.290796279907227 - ], - [ - "▁financially", - -12.290801048278809 - ], - [ - "gesellschaft", - -12.290863037109375 - ], - [ - "▁situaţi", - -12.290865898132324 - ], - [ - "▁quoted", - -12.291373252868652 - ], - [ - "▁periodic", - -12.291421890258789 - ], - [ - "▁chaos", - -12.291543960571289 - ], - [ - "▁remodel", - -12.29159927368164 - ], - [ - "▁Contractor", - -12.291641235351562 - ], - [ - "▁recuper", - -12.291729927062988 - ], - [ - "▁driveway", - -12.291755676269531 - ], - [ - "▁entertain", - -12.291765213012695 - ], - [ - "▁condus", - -12.291769027709961 - ], - [ - "▁chefs", - -12.29184341430664 - ], - [ - "pak", - -12.291866302490234 - ], - [ - "▁possède", - -12.291948318481445 - ], - [ - "▁outreach", - -12.291984558105469 - ], - [ - "▁navig", - -12.292036056518555 - ], - [ - "▁renewal", - -12.292071342468262 - ], - [ - "▁Rice", - -12.292309761047363 - ], - [ - "▁Czech", - -12.292398452758789 - ], - [ - "▁entstehen", - -12.292445182800293 - ], - [ - "▁droite", - -12.292448997497559 - ], - [ - "▁Investor", - -12.292497634887695 - ], - [ - "▁Soci", - -12.29250431060791 - ], - [ - "▁scalp", - -12.292622566223145 - ], - [ - "▁politiques", - -12.292815208435059 - ], - [ - "▁plaintiff", - -12.292841911315918 - ], - [ - "extending", - -12.29287052154541 - ], - [ - "▁paperwork", - -12.29300594329834 - ], - [ - "vizi", - -12.293142318725586 - ], - [ - "assisting", - -12.29317569732666 - ], - [ - "local", - -12.293272972106934 - ], - [ - "▁Wear", - -12.293323516845703 - ], - [ - "▁descend", - -12.293340682983398 - ], - [ - "▁Wikipedia", - -12.293513298034668 - ], - [ - "▁Consiliului", - -12.293516159057617 - ], - [ - "▁Nokia", - -12.293540000915527 - ], - [ - "▁facult", - -12.293560028076172 - ], - [ - "▁altogether", - -12.293851852416992 - ], - [ - "▁rankings", - -12.29391860961914 - ], - [ - "▁downloading", - -12.293953895568848 - ], - [ - "QU", - -12.294007301330566 - ], - [ - "▁Olive", - -12.294041633605957 - ], - [ - "▁backdrop", - -12.294110298156738 - ], - [ - "▁recomandat", - -12.294116020202637 - ], - [ - "▁Faculty", - -12.294184684753418 - ], - [ - "ANS", - -12.294220924377441 - ], - [ - "▁fracture", - -12.294225692749023 - ], - [ - "job", - -12.29448127746582 - ], - [ - "▁anticipate", - -12.294525146484375 - ], - [ - "▁drift", - -12.294543266296387 - ], - [ - "▁Marco", - -12.294632911682129 - ], - [ - "▁witnessed", - -12.294700622558594 - ], - [ - "▁comprend", - -12.294974327087402 - ], - [ - "▁bulb", - -12.29504680633545 - ], - [ - "▁shallow", - -12.295059204101562 - ], - [ - "stärke", - -12.295063972473145 - ], - [ - "▁Jessica", - -12.295080184936523 - ], - [ - "▁démarche", - -12.29508113861084 - ], - [ - "▁traditionally", - -12.29508113861084 - ], - [ - "Deputy", - -12.295093536376953 - ], - [ - "▁rivers", - -12.295260429382324 - ], - [ - "▁livraison", - -12.29531192779541 - ], - [ - "▁lacking", - -12.295421600341797 - ], - [ - "▁remodeling", - -12.295426368713379 - ], - [ - "▁acesteia", - -12.295514106750488 - ], - [ - "▁grosse", - -12.295669555664062 - ], - [ - "▁propus", - -12.295833587646484 - ], - [ - "lessly", - -12.29587459564209 - ], - [ - "▁Kredit", - -12.295931816101074 - ], - [ - "reputable", - -12.295981407165527 - ], - [ - "▁Sell", - -12.2960205078125 - ], - [ - "▁Crime", - -12.296111106872559 - ], - [ - "Ent", - -12.296310424804688 - ], - [ - "finity", - -12.296422004699707 - ], - [ - "▁Complex", - -12.296500205993652 - ], - [ - "easing", - -12.296638488769531 - ], - [ - "dynamic", - -12.296670913696289 - ], - [ - "▁eaten", - -12.296727180480957 - ], - [ - "gezogen", - -12.296734809875488 - ], - [ - "▁2004,", - -12.296774864196777 - ], - [ - "▁Muslims", - -12.296822547912598 - ], - [ - "▁Sprache", - -12.296883583068848 - ], - [ - "▁Truth", - -12.296927452087402 - ], - [ - "▁guarantees", - -12.296928405761719 - ], - [ - "/5", - -12.29712963104248 - ], - [ - "”).", - -12.297135353088379 - ], - [ - "▁Medium", - -12.2972993850708 - ], - [ - "▁décidé", - -12.297445297241211 - ], - [ - "▁balcony", - -12.29747200012207 - ], - [ - "leuchte", - -12.297502517700195 - ], - [ - "hik", - -12.297849655151367 - ], - [ - "▁Agriculture", - -12.298221588134766 - ], - [ - "▁securities", - -12.298221588134766 - ], - [ - "Probably", - -12.298224449157715 - ], - [ - "▁macar", - -12.29824161529541 - ], - [ - "▁Signal", - -12.298399925231934 - ], - [ - "lake", - -12.298677444458008 - ], - [ - "▁compétences", - -12.298726081848145 - ], - [ - "▁proprietary", - -12.298812866210938 - ], - [ - "allons", - -12.298850059509277 - ], - [ - "▁belongs", - -12.298916816711426 - ], - [ - "▁missile", - -12.298958778381348 - ], - [ - "țiune", - -12.298999786376953 - ], - [ - "▁Integration", - -12.299116134643555 - ], - [ - "▁testimony", - -12.299120903015137 - ], - [ - "▁wesentlich", - -12.299142837524414 - ], - [ - "▁donors", - -12.299152374267578 - ], - [ - "▁pivot", - -12.299202919006348 - ], - [ - "▁Uber", - -12.299219131469727 - ], - [ - "▁databases", - -12.299281120300293 - ], - [ - "▁studi", - -12.299317359924316 - ], - [ - "totdeauna", - -12.299351692199707 - ], - [ - "▁briefly", - -12.299449920654297 - ], - [ - "▁livr", - -12.29952335357666 - ], - [ - "▁CRM", - -12.299581527709961 - ], - [ - "gone", - -12.299697875976562 - ], - [ - "10)", - -12.299761772155762 - ], - [ - "▁zilele", - -12.299920082092285 - ], - [ - "Basically", - -12.300008773803711 - ], - [ - "▁medie", - -12.300041198730469 - ], - [ - "spotted", - -12.30006217956543 - ], - [ - "▁troubles", - -12.30009937286377 - ], - [ - "▁acknowledged", - -12.300176620483398 - ], - [ - "350", - -12.300185203552246 - ], - [ - "LB", - -12.300273895263672 - ], - [ - "Phy", - -12.30038833618164 - ], - [ - "natal", - -12.300397872924805 - ], - [ - "illé", - -12.300445556640625 - ], - [ - "bilder", - -12.300625801086426 - ], - [ - "▁apples", - -12.300636291503906 - ], - [ - "graphical", - -12.300889015197754 - ], - [ - "organiser", - -12.301024436950684 - ], - [ - "▁ochii", - -12.301040649414062 - ], - [ - "glas", - -12.301178932189941 - ], - [ - "CAP", - -12.301180839538574 - ], - [ - "▁Doors", - -12.301331520080566 - ], - [ - "▁Eis", - -12.30156135559082 - ], - [ - "tipuri", - -12.301590919494629 - ], - [ - "▁Worth", - -12.301684379577637 - ], - [ - "izează", - -12.301719665527344 - ], - [ - "nunț", - -12.30180549621582 - ], - [ - "▁Trip", - -12.30186653137207 - ], - [ - "ISS", - -12.301976203918457 - ], - [ - "efficient", - -12.30201530456543 - ], - [ - "Luckily", - -12.302099227905273 - ], - [ - "▁vase", - -12.302133560180664 - ], - [ - "▁gay", - -12.302343368530273 - ], - [ - "▁certificates", - -12.302434921264648 - ], - [ - "riad", - -12.302549362182617 - ], - [ - "stab", - -12.302570343017578 - ], - [ - "affiche", - -12.302604675292969 - ], - [ - "▁iPod", - -12.302645683288574 - ], - [ - "▁aștept", - -12.302726745605469 - ], - [ - "▁$500", - -12.302751541137695 - ], - [ - "▁Catherine", - -12.302952766418457 - ], - [ - "▁Circuit", - -12.302957534790039 - ], - [ - "▁ranch", - -12.303045272827148 - ], - [ - "▁consequence", - -12.303118705749512 - ], - [ - "listened", - -12.303131103515625 - ], - [ - "▁Options", - -12.303187370300293 - ], - [ - "feed", - -12.30318832397461 - ], - [ - "▁adviser", - -12.303248405456543 - ], - [ - "▁présenter", - -12.30333423614502 - ], - [ - "substant", - -12.30337905883789 - ], - [ - "▁Flag", - -12.303604125976562 - ], - [ - "▁Keith", - -12.30366325378418 - ], - [ - "▁inima", - -12.303709983825684 - ], - [ - "▁substrate", - -12.30373764038086 - ], - [ - "▁charger", - -12.303803443908691 - ], - [ - "▁reporter", - -12.303844451904297 - ], - [ - "ütz", - -12.304068565368652 - ], - [ - "▁unten", - -12.30417537689209 - ], - [ - "▁sympa", - -12.304542541503906 - ], - [ - "▁defeated", - -12.304600715637207 - ], - [ - "ändig", - -12.304644584655762 - ], - [ - "individu", - -12.304747581481934 - ], - [ - "▁Straßen", - -12.304774284362793 - ], - [ - "▁Nepal", - -12.304791450500488 - ], - [ - "million", - -12.304803848266602 - ], - [ - "▁Cake", - -12.30499267578125 - ], - [ - "▁investigations", - -12.30526065826416 - ], - [ - "▁inspector", - -12.3054780960083 - ], - [ - "▁Campbell", - -12.305486679077148 - ], - [ - "▁consommation", - -12.305489540100098 - ], - [ - "▁Ministerul", - -12.305628776550293 - ], - [ - "Advisory", - -12.305749893188477 - ], - [ - "▁Leistungs", - -12.305939674377441 - ], - [ - "▁Pull", - -12.306157112121582 - ], - [ - "▁lover", - -12.306194305419922 - ], - [ - "▁trunk", - -12.306380271911621 - ], - [ - "▁folosesc", - -12.30639934539795 - ], - [ - "pom", - -12.306558609008789 - ], - [ - "wunder", - -12.306794166564941 - ], - [ - "▁happier", - -12.306801795959473 - ], - [ - "▁embark", - -12.30689525604248 - ], - [ - "▁mediul", - -12.3069486618042 - ], - [ - "riff", - -12.306973457336426 - ], - [ - "▁copilul", - -12.307039260864258 - ], - [ - "ommage", - -12.307126998901367 - ], - [ - "rechnung", - -12.307218551635742 - ], - [ - "NU", - -12.307220458984375 - ], - [ - "▁fellowship", - -12.307395935058594 - ], - [ - "▁Mental", - -12.307403564453125 - ], - [ - "▁fever", - -12.3074312210083 - ], - [ - "▁silly", - -12.307547569274902 - ], - [ - "Object", - -12.30756664276123 - ], - [ - "NV", - -12.307591438293457 - ], - [ - "от", - -12.30774974822998 - ], - [ - "▁Strand", - -12.307762145996094 - ], - [ - "▁Exist", - -12.30777359008789 - ], - [ - "warum", - -12.307832717895508 - ], - [ - "CY", - -12.307848930358887 - ], - [ - "kä", - -12.307856559753418 - ], - [ - "!!!!!", - -12.307869911193848 - ], - [ - "▁moarte", - -12.30793571472168 - ], - [ - "▁waterfall", - -12.308024406433105 - ], - [ - "left", - -12.30815601348877 - ], - [ - "▁Nursing", - -12.308225631713867 - ], - [ - "▁invalid", - -12.30826187133789 - ], - [ - "struktur", - -12.308385848999023 - ], - [ - "Allerdings", - -12.30838680267334 - ], - [ - "étranger", - -12.30838680267334 - ], - [ - "▁prost", - -12.308517456054688 - ], - [ - "▁Parent", - -12.308562278747559 - ], - [ - "▁întreag", - -12.308611869812012 - ], - [ - "▁compensate", - -12.308871269226074 - ], - [ - "▁sometime", - -12.308955192565918 - ], - [ - "graduate", - -12.308968544006348 - ], - [ - "▁Carter", - -12.30898380279541 - ], - [ - "▁crap", - -12.308998107910156 - ], - [ - "▁mathematics", - -12.309067726135254 - ], - [ - "resemble", - -12.309069633483887 - ], - [ - "Dame", - -12.309152603149414 - ], - [ - "▁Swa", - -12.309198379516602 - ], - [ - "▁celebrity", - -12.309239387512207 - ], - [ - "▁verified", - -12.309338569641113 - ], - [ - "▁Behind", - -12.309349060058594 - ], - [ - "carbon", - -12.309432983398438 - ], - [ - "▁gateway", - -12.309490203857422 - ], - [ - "▁ambitious", - -12.30952262878418 - ], - [ - "▁Wellness", - -12.30966567993164 - ], - [ - "30,000", - -12.30968189239502 - ], - [ - "defined", - -12.309929847717285 - ], - [ - "specializes", - -12.310121536254883 - ], - [ - "▁Chase", - -12.310199737548828 - ], - [ - "HF", - -12.310233116149902 - ], - [ - "ABLE", - -12.310348510742188 - ], - [ - "▁Ehr", - -12.310467720031738 - ], - [ - "▁régime", - -12.310480117797852 - ], - [ - "▁awake", - -12.310487747192383 - ], - [ - "▁seafood", - -12.310487747192383 - ], - [ - "leading", - -12.310554504394531 - ], - [ - "▁Rule", - -12.310602188110352 - ], - [ - "verkehr", - -12.310726165771484 - ], - [ - "erem", - -12.310737609863281 - ], - [ - "▁1973", - -12.310795783996582 - ], - [ - "personal", - -12.311171531677246 - ], - [ - "ența", - -12.311330795288086 - ], - [ - "apprend", - -12.311396598815918 - ], - [ - "faisant", - -12.311420440673828 - ], - [ - "▁Sounds", - -12.31151008605957 - ], - [ - "▁Launch", - -12.31151294708252 - ], - [ - "half", - -12.311636924743652 - ], - [ - "▁verre", - -12.311859130859375 - ], - [ - "▁Regular", - -12.31207275390625 - ], - [ - "▁Nancy", - -12.312142372131348 - ], - [ - "quelles", - -12.312161445617676 - ], - [ - "▁erhält", - -12.312169075012207 - ], - [ - "▁socks", - -12.3121919631958 - ], - [ - "lamp", - -12.312387466430664 - ], - [ - "▁durchgeführt", - -12.312472343444824 - ], - [ - "▁advertise", - -12.31260871887207 - ], - [ - "powered", - -12.312653541564941 - ], - [ - "▁concur", - -12.312699317932129 - ], - [ - "▁ressources", - -12.31293773651123 - ], - [ - "▁allocation", - -12.312986373901367 - ], - [ - "chon", - -12.313041687011719 - ], - [ - "▁Larry", - -12.313177108764648 - ], - [ - "lässig", - -12.313254356384277 - ], - [ - "OLD", - -12.313493728637695 - ], - [ - "itty", - -12.313599586486816 - ], - [ - "▁immuno", - -12.313645362854004 - ], - [ - "▁(+", - -12.313651084899902 - ], - [ - "▁Essential", - -12.313674926757812 - ], - [ - "▁semaines", - -12.313719749450684 - ], - [ - "Ru", - -12.31375503540039 - ], - [ - "▁Gear", - -12.313764572143555 - ], - [ - "völlig", - -12.313850402832031 - ], - [ - "liga", - -12.31391716003418 - ], - [ - "▁Neg", - -12.314082145690918 - ], - [ - "▁gratitude", - -12.31408977508545 - ], - [ - "aventure", - -12.314108848571777 - ], - [ - "▁frustrated", - -12.314115524291992 - ], - [ - "▁retrait", - -12.31422233581543 - ], - [ - "▁statut", - -12.314231872558594 - ], - [ - "550", - -12.31434440612793 - ], - [ - "ла", - -12.314428329467773 - ], - [ - "risto", - -12.314448356628418 - ], - [ - "WAY", - -12.314607620239258 - ], - [ - "▁pigment", - -12.314652442932129 - ], - [ - "Selon", - -12.314715385437012 - ], - [ - "stil", - -12.3148775100708 - ], - [ - "▁Marin", - -12.315055847167969 - ], - [ - "ashi", - -12.315085411071777 - ], - [ - "▁contine", - -12.31519889831543 - ], - [ - "▁Economics", - -12.315200805664062 - ], - [ - "both", - -12.3152437210083 - ], - [ - "▁Dou", - -12.31527328491211 - ], - [ - "Fel", - -12.315373420715332 - ], - [ - "UNT", - -12.315434455871582 - ], - [ - "▁grandmother", - -12.31548023223877 - ], - [ - "▁domicile", - -12.315678596496582 - ], - [ - "▁buffer", - -12.31574535369873 - ], - [ - "▁fuse", - -12.315815925598145 - ], - [ - "▁dosage", - -12.315821647644043 - ], - [ - "▁Nici", - -12.315839767456055 - ], - [ - "▁worries", - -12.315908432006836 - ], - [ - "▁Rail", - -12.3159818649292 - ], - [ - "uneori", - -12.315990447998047 - ], - [ - "▁Sierra", - -12.316030502319336 - ], - [ - "▁porni", - -12.316032409667969 - ], - [ - "▁NOTE", - -12.316056251525879 - ], - [ - "▁tendency", - -12.316065788269043 - ], - [ - "Set", - -12.316256523132324 - ], - [ - "▁Hof", - -12.31629753112793 - ], - [ - "▁Ruhe", - -12.316300392150879 - ], - [ - "harm", - -12.316360473632812 - ], - [ - "▁Developer", - -12.316367149353027 - ], - [ - "suing", - -12.316400527954102 - ], - [ - "persönlichen", - -12.31658935546875 - ], - [ - "▁agréable", - -12.316596031188965 - ], - [ - "commissioned", - -12.316696166992188 - ], - [ - "▁1974", - -12.31672191619873 - ], - [ - "▁1969", - -12.316758155822754 - ], - [ - "▁regl", - -12.316996574401855 - ], - [ - "▁terror", - -12.317042350769043 - ], - [ - "▁température", - -12.317051887512207 - ], - [ - "▁Archiv", - -12.31706714630127 - ], - [ - "▁Military", - -12.317140579223633 - ], - [ - "▁König", - -12.317290306091309 - ], - [ - "▁forex", - -12.31737232208252 - ], - [ - "wiki", - -12.31745719909668 - ], - [ - "thetic", - -12.317506790161133 - ], - [ - "alaturi", - -12.317974090576172 - ], - [ - "▁montant", - -12.3179931640625 - ], - [ - "▁maladie", - -12.318044662475586 - ], - [ - "gust", - -12.318151473999023 - ], - [ - "▁demander", - -12.318164825439453 - ], - [ - "avocat", - -12.318191528320312 - ], - [ - "▁sci", - -12.318192481994629 - ], - [ - "▁Wireless", - -12.318214416503906 - ], - [ - "▁Dein", - -12.318220138549805 - ], - [ - "▁trio", - -12.3183012008667 - ], - [ - "▁Same", - -12.318395614624023 - ], - [ - "Datei", - -12.318464279174805 - ], - [ - "▁alerg", - -12.318578720092773 - ], - [ - "crowded", - -12.318657875061035 - ], - [ - "▁Punkt", - -12.318853378295898 - ], - [ - "▁sanctions", - -12.318864822387695 - ], - [ - "stating", - -12.318922996520996 - ], - [ - "▁discusse", - -12.318949699401855 - ], - [ - "▁Eigen", - -12.319068908691406 - ], - [ - "▁sănătate", - -12.31911563873291 - ], - [ - "▁correspondence", - -12.319211959838867 - ], - [ - "cred", - -12.319331169128418 - ], - [ - "VG", - -12.319347381591797 - ], - [ - "▁différence", - -12.319347381591797 - ], - [ - "▁Montreal", - -12.319391250610352 - ], - [ - "▁masini", - -12.319398880004883 - ], - [ - "iata", - -12.319487571716309 - ], - [ - "▁sampling", - -12.319574356079102 - ], - [ - "▁Gib", - -12.319831848144531 - ], - [ - "▁sheer", - -12.319944381713867 - ], - [ - "330", - -12.319947242736816 - ], - [ - "CHI", - -12.319990158081055 - ], - [ - "▁damn", - -12.320030212402344 - ], - [ - "▁Advisor", - -12.320201873779297 - ], - [ - "Typically", - -12.320302963256836 - ], - [ - "ssé", - -12.320352554321289 - ], - [ - "quart", - -12.320361137390137 - ], - [ - "chete", - -12.320385932922363 - ], - [ - "▁Puerto", - -12.32049560546875 - ], - [ - "2-1", - -12.32050609588623 - ], - [ - "NN", - -12.320674896240234 - ], - [ - "▁styling", - -12.320707321166992 - ], - [ - "rud", - -12.320777893066406 - ], - [ - "од", - -12.320856094360352 - ], - [ - "▁Hydro", - -12.320941925048828 - ], - [ - "▁Cable", - -12.320961952209473 - ], - [ - "video", - -12.320974349975586 - ], - [ - "▁Wirkung", - -12.321194648742676 - ], - [ - "▁noble", - -12.321270942687988 - ], - [ - "▁Sonder", - -12.32129192352295 - ], - [ - "mati", - -12.321317672729492 - ], - [ - "850", - -12.321395874023438 - ], - [ - "▁Richmond", - -12.32143497467041 - ], - [ - "▁niciodată", - -12.321442604064941 - ], - [ - "AO", - -12.321527481079102 - ], - [ - "▁altered", - -12.321648597717285 - ], - [ - "▁(15", - -12.32168960571289 - ], - [ - "▁Motiv", - -12.322052001953125 - ], - [ - "AKE", - -12.322089195251465 - ], - [ - "▁bestimmte", - -12.322172164916992 - ], - [ - "6.5", - -12.322176933288574 - ], - [ - "hectare", - -12.322333335876465 - ], - [ - "atorită", - -12.322335243225098 - ], - [ - "▁phases", - -12.322447776794434 - ], - [ - "▁Nova", - -12.322566032409668 - ], - [ - "ordinateur", - -12.322579383850098 - ], - [ - "▁corrupt", - -12.322813034057617 - ], - [ - "error", - -12.322895050048828 - ], - [ - "▁attacked", - -12.323005676269531 - ], - [ - "▁Kirche", - -12.323019981384277 - ], - [ - "heir", - -12.323040962219238 - ], - [ - "Das", - -12.323254585266113 - ], - [ - "▁anxious", - -12.323258399963379 - ], - [ - "▁Doc", - -12.323386192321777 - ], - [ - "▁Roth", - -12.323415756225586 - ], - [ - "▁Cine", - -12.32388687133789 - ], - [ - "▁auditor", - -12.324418067932129 - ], - [ - "▁beverage", - -12.324586868286133 - ], - [ - "▁précédent", - -12.324637413024902 - ], - [ - "▁deploy", - -12.324837684631348 - ], - [ - "▁accessibility", - -12.324843406677246 - ], - [ - "▁cage", - -12.324885368347168 - ], - [ - "▁Contra", - -12.324934005737305 - ], - [ - "Best", - -12.324952125549316 - ], - [ - "iji", - -12.324972152709961 - ], - [ - "▁père", - -12.325060844421387 - ], - [ - "▁scenic", - -12.32511043548584 - ], - [ - "synthesis", - -12.325165748596191 - ], - [ - "ßen", - -12.32534408569336 - ], - [ - "▁Videos", - -12.325482368469238 - ], - [ - "▁refus", - -12.325484275817871 - ], - [ - "stimmen", - -12.3255615234375 - ], - [ - "▁sleek", - -12.325577735900879 - ], - [ - "artige", - -12.32563591003418 - ], - [ - "mari", - -12.32568359375 - ], - [ - "▁excelent", - -12.325740814208984 - ], - [ - "▁negativ", - -12.325806617736816 - ], - [ - "▁blocking", - -12.32590103149414 - ], - [ - "spricht", - -12.326001167297363 - ], - [ - "▁discomfort", - -12.32602310180664 - ], - [ - "▁stratégie", - -12.32602310180664 - ], - [ - "▁Datenschutz", - -12.326078414916992 - ], - [ - "curg", - -12.326128005981445 - ], - [ - "▁lapte", - -12.326432228088379 - ], - [ - "▁acasă", - -12.326491355895996 - ], - [ - "▁ausschließlich", - -12.32653522491455 - ], - [ - "▁unbedingt", - -12.326802253723145 - ], - [ - "▁Linie", - -12.32689380645752 - ], - [ - "▁subscribers", - -12.327019691467285 - ], - [ - "109", - -12.32702350616455 - ], - [ - "▁Waste", - -12.32712173461914 - ], - [ - "▁Planung", - -12.327231407165527 - ], - [ - "▁visually", - -12.32734489440918 - ], - [ - "utilizarea", - -12.327370643615723 - ], - [ - "uba", - -12.327381134033203 - ], - [ - "▁fifteen", - -12.327411651611328 - ], - [ - "▁légère", - -12.327411651611328 - ], - [ - "ința", - -12.327446937561035 - ], - [ - "▁tolerance", - -12.327460289001465 - ], - [ - "▁piscine", - -12.327536582946777 - ], - [ - "▁nails", - -12.327569007873535 - ], - [ - "▁accus", - -12.327693939208984 - ], - [ - "▁coeur", - -12.327773094177246 - ], - [ - "freie", - -12.327849388122559 - ], - [ - "enţă", - -12.32812213897705 - ], - [ - "▁glucose", - -12.328336715698242 - ], - [ - "▁Jar", - -12.32838249206543 - ], - [ - "▁commencer", - -12.328387260437012 - ], - [ - "▁eliminating", - -12.328414916992188 - ], - [ - "▁mutation", - -12.32844352722168 - ], - [ - "▁afirma", - -12.328444480895996 - ], - [ - "▁Consulting", - -12.328454971313477 - ], - [ - "adia", - -12.328543663024902 - ], - [ - "zog", - -12.328604698181152 - ], - [ - "▁pielea", - -12.328658103942871 - ], - [ - "rton", - -12.328706741333008 - ], - [ - "exercice", - -12.3287935256958 - ], - [ - "namely", - -12.328847885131836 - ], - [ - "▁ajutor", - -12.3289155960083 - ], - [ - "▁markers", - -12.328917503356934 - ], - [ - "▁gardening", - -12.328932762145996 - ], - [ - "Karte", - -12.329038619995117 - ], - [ - "▁Pump", - -12.329142570495605 - ], - [ - "▁Dual", - -12.329169273376465 - ], - [ - "▁pratiques", - -12.329349517822266 - ], - [ - "▁behavioral", - -12.329358100891113 - ], - [ - "▁construire", - -12.329511642456055 - ], - [ - "▁Leonard", - -12.329596519470215 - ], - [ - "ediglich", - -12.329630851745605 - ], - [ - "ubbed", - -12.3297758102417 - ], - [ - "NK", - -12.329792022705078 - ], - [ - "shell", - -12.329912185668945 - ], - [ - "▁persönliche", - -12.329996109008789 - ], - [ - "ecuring", - -12.329998970031738 - ], - [ - "beaten", - -12.33000373840332 - ], - [ - "ALE", - -12.330053329467773 - ], - [ - "▁puppy", - -12.33023452758789 - ], - [ - "▁capac", - -12.33027458190918 - ], - [ - "▁seventh", - -12.330394744873047 - ], - [ - "▁nursery", - -12.330400466918945 - ], - [ - "▁Rum", - -12.330419540405273 - ], - [ - "▁exquisite", - -12.330423355102539 - ], - [ - "▁Legi", - -12.330483436584473 - ], - [ - "▁persist", - -12.330497741699219 - ], - [ - "bacterial", - -12.330548286437988 - ], - [ - "▁cereal", - -12.330572128295898 - ], - [ - "▁principe", - -12.330693244934082 - ], - [ - "chip", - -12.330766677856445 - ], - [ - "rush", - -12.330832481384277 - ], - [ - "▁funnel", - -12.330904006958008 - ], - [ - "▁calitatea", - -12.331024169921875 - ], - [ - "ibă", - -12.33104419708252 - ], - [ - "▁reign", - -12.331086158752441 - ], - [ - "▁congregation", - -12.331120491027832 - ], - [ - "▁obtine", - -12.331270217895508 - ], - [ - "▁découverte", - -12.331286430358887 - ], - [ - "▁gama", - -12.331315040588379 - ], - [ - "▁judec", - -12.33132553100586 - ], - [ - "Plan", - -12.331351280212402 - ], - [ - "▁gesture", - -12.331539154052734 - ], - [ - "öffentlichen", - -12.331644058227539 - ], - [ - "▁imported", - -12.331693649291992 - ], - [ - "▁rotate", - -12.331747055053711 - ], - [ - "blown", - -12.331756591796875 - ], - [ - "▁Protein", - -12.331827163696289 - ], - [ - "parfaitement", - -12.331832885742188 - ], - [ - "ondo", - -12.331868171691895 - ], - [ - "ologists", - -12.331890106201172 - ], - [ - "▁neighborhoods", - -12.331989288330078 - ], - [ - "▁Pope", - -12.33202075958252 - ], - [ - "▁museums", - -12.332194328308105 - ], - [ - "▁porter", - -12.332330703735352 - ], - [ - "▁kiss", - -12.332335472106934 - ], - [ - "pdf", - -12.332354545593262 - ], - [ - "sided", - -12.332359313964844 - ], - [ - "▁gern", - -12.332395553588867 - ], - [ - "bedingungen", - -12.332496643066406 - ], - [ - "▁Ride", - -12.332582473754883 - ], - [ - "Apoi", - -12.332584381103516 - ], - [ - "▁bestehen", - -12.332603454589844 - ], - [ - "5\"", - -12.33285903930664 - ], - [ - "bob", - -12.332862854003906 - ], - [ - "ficient", - -12.33303165435791 - ], - [ - "premise", - -12.333086967468262 - ], - [ - "▁Clip", - -12.333112716674805 - ], - [ - "▁concours", - -12.333213806152344 - ], - [ - "olar", - -12.333281517028809 - ], - [ - "▁Centr", - -12.333356857299805 - ], - [ - "outlined", - -12.333429336547852 - ], - [ - "▁observa", - -12.333511352539062 - ], - [ - "▁negotiate", - -12.333537101745605 - ], - [ - "▁Partnership", - -12.33358383178711 - ], - [ - "clock", - -12.333662033081055 - ], - [ - "roasted", - -12.333755493164062 - ], - [ - "Pourquoi", - -12.33391284942627 - ], - [ - "▁Marshall", - -12.334005355834961 - ], - [ - "▁Gerade", - -12.334052085876465 - ], - [ - "▁pachet", - -12.334160804748535 - ], - [ - "▁preliminary", - -12.334162712097168 - ], - [ - "▁tragic", - -12.334200859069824 - ], - [ - "author", - -12.334268569946289 - ], - [ - "▁Gov", - -12.334309577941895 - ], - [ - "▁comunic", - -12.334403991699219 - ], - [ - "▁coordinator", - -12.334410667419434 - ], - [ - "YA", - -12.33445930480957 - ], - [ - "▁Steam", - -12.33476734161377 - ], - [ - "▁Nag", - -12.334796905517578 - ], - [ - "▁Kara", - -12.334851264953613 - ], - [ - "▁Gang", - -12.334858894348145 - ], - [ - "aurez", - -12.334868431091309 - ], - [ - "▁horrible", - -12.334869384765625 - ], - [ - "▁Luxury", - -12.335076332092285 - ], - [ - "▁encouragement", - -12.335169792175293 - ], - [ - "▁conceptual", - -12.335250854492188 - ], - [ - "▁constituent", - -12.335431098937988 - ], - [ - "nvelop", - -12.335494041442871 - ], - [ - "ucc", - -12.335500717163086 - ], - [ - "▁conçu", - -12.335542678833008 - ], - [ - "pfel", - -12.33559513092041 - ], - [ - "special", - -12.335700988769531 - ], - [ - "▁Growth", - -12.335834503173828 - ], - [ - "cada", - -12.335916519165039 - ], - [ - "▁oamenilor", - -12.335976600646973 - ], - [ - "▁vendredi", - -12.336021423339844 - ], - [ - "▁coupe", - -12.336055755615234 - ], - [ - "▁Danke", - -12.336134910583496 - ], - [ - "reflects", - -12.336181640625 - ], - [ - "▁girlfriend", - -12.336273193359375 - ], - [ - "▁diffuse", - -12.336325645446777 - ], - [ - "HER", - -12.336328506469727 - ], - [ - "storing", - -12.336464881896973 - ], - [ - "ailing", - -12.336591720581055 - ], - [ - "▁Desi", - -12.336601257324219 - ], - [ - "stitution", - -12.336832046508789 - ], - [ - "▁adun", - -12.336844444274902 - ], - [ - "▁Partie", - -12.336869239807129 - ], - [ - "▁tissues", - -12.336958885192871 - ], - [ - "▁discovering", - -12.337154388427734 - ], - [ - "Jacques", - -12.337178230285645 - ], - [ - "lungs", - -12.33724594116211 - ], - [ - "▁Handy", - -12.337261199951172 - ], - [ - "centric", - -12.337285995483398 - ], - [ - "slav", - -12.337442398071289 - ], - [ - "▁sights", - -12.337560653686523 - ], - [ - "▁Category", - -12.337644577026367 - ], - [ - "▁Einrichtung", - -12.337957382202148 - ], - [ - "▁Robinson", - -12.33804702758789 - ], - [ - "▁Terra", - -12.338150978088379 - ], - [ - "▁creep", - -12.338167190551758 - ], - [ - "▁Lob", - -12.338184356689453 - ], - [ - "001", - -12.33820629119873 - ], - [ - "kop", - -12.338208198547363 - ], - [ - "Emb", - -12.338292121887207 - ], - [ - "▁forgive", - -12.338391304016113 - ], - [ - "▁icons", - -12.33847427368164 - ], - [ - "electric", - -12.3385009765625 - ], - [ - "▁faucet", - -12.338516235351562 - ], - [ - "▁invisible", - -12.3386812210083 - ], - [ - "sprach", - -12.338801383972168 - ], - [ - "▁beachten", - -12.33881664276123 - ], - [ - "rahm", - -12.338833808898926 - ], - [ - "▁Teacher", - -12.338919639587402 - ], - [ - "Fab", - -12.339070320129395 - ], - [ - "▁joue", - -12.339101791381836 - ], - [ - "▁Popular", - -12.339120864868164 - ], - [ - "▁Februar", - -12.339171409606934 - ], - [ - "sound", - -12.339251518249512 - ], - [ - "▁(0", - -12.339317321777344 - ], - [ - "▁Compare", - -12.33938980102539 - ], - [ - "▁pads", - -12.339455604553223 - ], - [ - "270", - -12.339498519897461 - ], - [ - "ousse", - -12.339548110961914 - ], - [ - "▁UAE", - -12.339786529541016 - ], - [ - "izări", - -12.339787483215332 - ], - [ - "▁bonuses", - -12.33993911743164 - ], - [ - "▁switches", - -12.3400239944458 - ], - [ - "▁Brothers", - -12.340166091918945 - ], - [ - "▁environmentally", - -12.340171813964844 - ], - [ - "vista", - -12.340264320373535 - ], - [ - "▁intentions", - -12.3402738571167 - ], - [ - "▁Terri", - -12.340301513671875 - ], - [ - "▁diabet", - -12.34030532836914 - ], - [ - "▁prese", - -12.340333938598633 - ], - [ - "▁parcurs", - -12.340389251708984 - ], - [ - "Warum", - -12.340449333190918 - ], - [ - "▁credentials", - -12.340455055236816 - ], - [ - "▁PLA", - -12.34046459197998 - ], - [ - "▁instruct", - -12.340470314025879 - ], - [ - "▁benefic", - -12.340633392333984 - ], - [ - "write", - -12.340675354003906 - ], - [ - "▁poids", - -12.340773582458496 - ], - [ - "▁Anspruch", - -12.340923309326172 - ], - [ - "▁avocado", - -12.340923309326172 - ], - [ - "▁inevitable", - -12.340923309326172 - ], - [ - "▁poorly", - -12.340950965881348 - ], - [ - "karte", - -12.340994834899902 - ], - [ - "▁Publishing", - -12.340999603271484 - ], - [ - "odată", - -12.341140747070312 - ], - [ - "▁scientifique", - -12.341157913208008 - ], - [ - "▁lăsa", - -12.341262817382812 - ], - [ - "▁secol", - -12.34131908416748 - ], - [ - "▁nevertheless", - -12.341392517089844 - ], - [ - "SAT", - -12.341597557067871 - ], - [ - "280", - -12.341651916503906 - ], - [ - "▁prevederi", - -12.341670989990234 - ], - [ - "▁chrome", - -12.342002868652344 - ], - [ - "institut", - -12.342267036437988 - ], - [ - "richtigen", - -12.34228515625 - ], - [ - "▁grief", - -12.342338562011719 - ], - [ - "▁penalties", - -12.342373847961426 - ], - [ - "▁Bayern", - -12.34238052368164 - ], - [ - "▁caramel", - -12.342473983764648 - ], - [ - "Now", - -12.342495918273926 - ], - [ - "Stiftung", - -12.342576026916504 - ], - [ - "country", - -12.342737197875977 - ], - [ - "dication", - -12.34278678894043 - ], - [ - "▁Chor", - -12.342801094055176 - ], - [ - "▁rămâne", - -12.342936515808105 - ], - [ - "▁TOP", - -12.34300708770752 - ], - [ - "▁complète", - -12.34301471710205 - ], - [ - "▁Marian", - -12.34302806854248 - ], - [ - "▁Avant", - -12.343121528625488 - ], - [ - "▁Shower", - -12.343156814575195 - ], - [ - "treu", - -12.34316349029541 - ], - [ - "▁chop", - -12.34321403503418 - ], - [ - "▁comfortably", - -12.343220710754395 - ], - [ - "▁autism", - -12.34323787689209 - ], - [ - "▁Sind", - -12.34328556060791 - ], - [ - "▁(20", - -12.343340873718262 - ], - [ - "▁Cinema", - -12.343414306640625 - ], - [ - "compania", - -12.343606948852539 - ], - [ - "▁Lex", - -12.343622207641602 - ], - [ - "▁Sofa", - -12.343716621398926 - ], - [ - "dru", - -12.343753814697266 - ], - [ - "▁verification", - -12.343770027160645 - ], - [ - "▁Immer", - -12.343825340270996 - ], - [ - "lomb", - -12.343829154968262 - ], - [ - "meric", - -12.34385871887207 - ], - [ - "▁slower", - -12.34398365020752 - ], - [ - "▁propag", - -12.344090461730957 - ], - [ - "Inter", - -12.344097137451172 - ], - [ - "selling", - -12.34418773651123 - ], - [ - "▁Bright", - -12.344269752502441 - ], - [ - "condition", - -12.344280242919922 - ], - [ - "PDF", - -12.344291687011719 - ], - [ - "oyez", - -12.344391822814941 - ], - [ - "▁Fried", - -12.344420433044434 - ], - [ - "▁Nazi", - -12.34443187713623 - ], - [ - "▁Buffalo", - -12.344447135925293 - ], - [ - "▁Sue", - -12.344449043273926 - ], - [ - "▁Rhein", - -12.34468936920166 - ], - [ - "▁Klaus", - -12.344889640808105 - ], - [ - "▁indiqu", - -12.344963073730469 - ], - [ - "echte", - -12.344996452331543 - ], - [ - "▁frecvent", - -12.345165252685547 - ], - [ - "▁conveniently", - -12.345187187194824 - ], - [ - "▁Moi", - -12.345197677612305 - ], - [ - "▁greenhouse", - -12.345220565795898 - ], - [ - "▁rédui", - -12.34524154663086 - ], - [ - "▁lengthy", - -12.34542179107666 - ], - [ - "verband", - -12.345534324645996 - ], - [ - "inţă", - -12.345622062683105 - ], - [ - "▁rigorous", - -12.345625877380371 - ], - [ - "▁Finish", - -12.34580135345459 - ], - [ - "▁FBI", - -12.346052169799805 - ], - [ - "cultura", - -12.346083641052246 - ], - [ - "▁compartment", - -12.346110343933105 - ], - [ - "▁pretend", - -12.346117973327637 - ], - [ - "▁assembled", - -12.346212387084961 - ], - [ - "▁Nie", - -12.34639835357666 - ], - [ - "fession", - -12.34640884399414 - ], - [ - "▁£2", - -12.34642219543457 - ], - [ - "algré", - -12.3468017578125 - ], - [ - "▁anterior", - -12.346817970275879 - ], - [ - "▁Wissenschaft", - -12.34683609008789 - ], - [ - "▁Harbor", - -12.346923828125 - ], - [ - "lix", - -12.346985816955566 - ], - [ - "=\"", - -12.347049713134766 - ], - [ - "▁breathtaking", - -12.34705638885498 - ], - [ - "▁Stern", - -12.34708309173584 - ], - [ - "▁Internetseite", - -12.347132682800293 - ], - [ - "▁locker", - -12.347216606140137 - ], - [ - "▁feather", - -12.34726619720459 - ], - [ - "Serv", - -12.347297668457031 - ], - [ - "▁snake", - -12.347332000732422 - ], - [ - "▁Border", - -12.347396850585938 - ], - [ - "▁undergo", - -12.347518920898438 - ], - [ - "▁petrol", - -12.347558975219727 - ], - [ - "▁dealership", - -12.3475923538208 - ], - [ - "▁commander", - -12.347596168518066 - ], - [ - "▁Monate", - -12.347599983215332 - ], - [ - "▁Guardian", - -12.347665786743164 - ], - [ - "▁Todd", - -12.347774505615234 - ], - [ - "Ann", - -12.347825050354004 - ], - [ - "ibilité", - -12.347918510437012 - ], - [ - "▁Quarter", - -12.347987174987793 - ], - [ - "▁portray", - -12.348097801208496 - ], - [ - "▁Tai", - -12.34813404083252 - ], - [ - "▁strikes", - -12.348224639892578 - ], - [ - "illage", - -12.348381042480469 - ], - [ - "▁IRS", - -12.348417282104492 - ], - [ - "▁lupta", - -12.348455429077148 - ], - [ - "▁Sper", - -12.348493576049805 - ], - [ - "PRO", - -12.348530769348145 - ], - [ - "▁Export", - -12.348549842834473 - ], - [ - "▁crypto", - -12.348587989807129 - ], - [ - "▁barbecue", - -12.348692893981934 - ], - [ - "▁portions", - -12.348787307739258 - ], - [ - "▁explicit", - -12.348793983459473 - ], - [ - "▁angenehm", - -12.348834037780762 - ], - [ - "▁marathon", - -12.348946571350098 - ], - [ - "▁apartament", - -12.348982810974121 - ], - [ - "▁Eva", - -12.349079132080078 - ], - [ - "plate", - -12.349181175231934 - ], - [ - "viel", - -12.34925365447998 - ], - [ - "FIN", - -12.34926986694336 - ], - [ - "dependent", - -12.34935188293457 - ], - [ - "▁cercet", - -12.34942626953125 - ], - [ - "▁midnight", - -12.349499702453613 - ], - [ - "copie", - -12.349563598632812 - ], - [ - "▁companii", - -12.349621772766113 - ], - [ - "▁tenu", - -12.349660873413086 - ], - [ - "1/2", - -12.349662780761719 - ], - [ - "2.4", - -12.349693298339844 - ], - [ - "abri", - -12.349699974060059 - ], - [ - "▁warn", - -12.34980297088623 - ], - [ - "▁luggage", - -12.349875450134277 - ], - [ - "numarul", - -12.349968910217285 - ], - [ - "▁contour", - -12.350014686584473 - ], - [ - "▁Ghost", - -12.350016593933105 - ], - [ - "Angaben", - -12.35012435913086 - ], - [ - "▁unemployment", - -12.350296020507812 - ], - [ - "▁rău", - -12.350380897521973 - ], - [ - "▁dispatch", - -12.350445747375488 - ], - [ - "investissement", - -12.350547790527344 - ], - [ - "▁passt", - -12.35057258605957 - ], - [ - "▁Germania", - -12.350578308105469 - ], - [ - "▁webpage", - -12.350651741027832 - ], - [ - "▁reservations", - -12.350688934326172 - ], - [ - "▁Kai", - -12.350743293762207 - ], - [ - "▁Cav", - -12.350890159606934 - ], - [ - "▁Patient", - -12.351109504699707 - ], - [ - "ер", - -12.351213455200195 - ], - [ - "▁Belle", - -12.351236343383789 - ], - [ - "▁Nashville", - -12.351296424865723 - ], - [ - "▁Talent", - -12.351332664489746 - ], - [ - "ouvrage", - -12.351364135742188 - ], - [ - "▁bekommt", - -12.351365089416504 - ], - [ - "USA", - -12.351430892944336 - ], - [ - "CES", - -12.351432800292969 - ], - [ - "▁Peru", - -12.351499557495117 - ], - [ - "▁erkennen", - -12.35153579711914 - ], - [ - "prinde", - -12.351569175720215 - ], - [ - "▁constitution", - -12.351922035217285 - ], - [ - "itatile", - -12.351998329162598 - ], - [ - "bah", - -12.352147102355957 - ], - [ - "▁avail", - -12.352148056030273 - ], - [ - "▁disponibile", - -12.352149963378906 - ], - [ - "hér", - -12.352258682250977 - ], - [ - "ол", - -12.352411270141602 - ], - [ - "▁startups", - -12.352435111999512 - ], - [ - "▁carton", - -12.352485656738281 - ], - [ - "▁Newsletter", - -12.35251235961914 - ], - [ - "éti", - -12.352560997009277 - ], - [ - "▁investigating", - -12.352779388427734 - ], - [ - "itul", - -12.352925300598145 - ], - [ - "touch", - -12.352962493896484 - ], - [ - "Sport", - -12.353137016296387 - ], - [ - "AME", - -12.353203773498535 - ], - [ - "MIN", - -12.353222846984863 - ], - [ - "metry", - -12.353371620178223 - ], - [ - "icy", - -12.353492736816406 - ], - [ - "▁Luna", - -12.35351848602295 - ], - [ - "▁asthma", - -12.353614807128906 - ], - [ - "▁conduc", - -12.35365104675293 - ], - [ - "▁Ari", - -12.35369873046875 - ], - [ - "trust", - -12.353832244873047 - ], - [ - "▁defines", - -12.353894233703613 - ], - [ - "▁Blend", - -12.353927612304688 - ], - [ - "azo", - -12.353989601135254 - ], - [ - "▁sweep", - -12.354169845581055 - ], - [ - "lope", - -12.354331016540527 - ], - [ - "ţinut", - -12.35439682006836 - ], - [ - "WD", - -12.354503631591797 - ], - [ - "▁appetite", - -12.354619979858398 - ], - [ - "▁Seed", - -12.354753494262695 - ], - [ - "Friend", - -12.354854583740234 - ], - [ - "▁repet", - -12.354876518249512 - ], - [ - "▁throat", - -12.354936599731445 - ], - [ - "philosoph", - -12.355141639709473 - ], - [ - "▁connaître", - -12.355156898498535 - ], - [ - "▁Counter", - -12.355299949645996 - ], - [ - "▁Anforderungen", - -12.35533332824707 - ], - [ - "▁Polit", - -12.355363845825195 - ], - [ - "▁Weather", - -12.3554048538208 - ], - [ - "bow", - -12.355423927307129 - ], - [ - "▁recreation", - -12.355484008789062 - ], - [ - "▁culinary", - -12.355571746826172 - ], - [ - "▁plage", - -12.355609893798828 - ], - [ - "▁Cruz", - -12.355659484863281 - ], - [ - "▁equip", - -12.355668067932129 - ], - [ - "▁Recent", - -12.355697631835938 - ], - [ - "LED", - -12.355767250061035 - ], - [ - "▁steak", - -12.355772972106934 - ], - [ - "▁belly", - -12.355880737304688 - ], - [ - "photo", - -12.356130599975586 - ], - [ - "▁lakes", - -12.35623836517334 - ], - [ - "▁intact", - -12.356287956237793 - ], - [ - "▁spiral", - -12.356386184692383 - ], - [ - "▁Billy", - -12.356468200683594 - ], - [ - "▁Understanding", - -12.356534957885742 - ], - [ - "▁Lay", - -12.356558799743652 - ], - [ - "▁roster", - -12.356632232666016 - ], - [ - "▁admire", - -12.356647491455078 - ], - [ - "▁android", - -12.356732368469238 - ], - [ - "▁technician", - -12.356734275817871 - ], - [ - "gène", - -12.356818199157715 - ], - [ - "motiv", - -12.356954574584961 - ], - [ - "▁Boat", - -12.356988906860352 - ], - [ - "▁genießen", - -12.357000350952148 - ], - [ - "▁Geschmack", - -12.357001304626465 - ], - [ - "▁heroes", - -12.3570556640625 - ], - [ - "▁1800", - -12.357137680053711 - ], - [ - "numeroase", - -12.35776138305664 - ], - [ - "▁anschließend", - -12.357802391052246 - ], - [ - "▁Spur", - -12.357813835144043 - ], - [ - "▁clarify", - -12.35784912109375 - ], - [ - "▁warmer", - -12.357889175415039 - ], - [ - "▁Ranch", - -12.357955932617188 - ], - [ - "▁simti", - -12.358024597167969 - ], - [ - "Thank", - -12.35838508605957 - ], - [ - "▁freight", - -12.358434677124023 - ], - [ - "▁administrators", - -12.358453750610352 - ], - [ - "Reg", - -12.358588218688965 - ], - [ - "Această", - -12.358670234680176 - ], - [ - "▁legume", - -12.358741760253906 - ], - [ - "▁utilizare", - -12.358786582946777 - ], - [ - "CON", - -12.358904838562012 - ], - [ - "urgi", - -12.358917236328125 - ], - [ - "▁Gesicht", - -12.358920097351074 - ], - [ - "▁counselor", - -12.358954429626465 - ], - [ - "▁mondiale", - -12.359009742736816 - ], - [ - "helm", - -12.359137535095215 - ], - [ - "▁Promo", - -12.359156608581543 - ], - [ - "▁Schweiz", - -12.35917854309082 - ], - [ - "Ich", - -12.35929012298584 - ], - [ - "▁intalni", - -12.359295845031738 - ], - [ - "▁Bloom", - -12.359318733215332 - ], - [ - "▁Score", - -12.359362602233887 - ], - [ - "▁Fruit", - -12.35944652557373 - ], - [ - "▁constraints", - -12.359447479248047 - ], - [ - "▁farmer", - -12.359745979309082 - ], - [ - "▁précise", - -12.359807014465332 - ], - [ - "evaluating", - -12.359868049621582 - ], - [ - "▁Period", - -12.359891891479492 - ], - [ - "byte", - -12.359893798828125 - ], - [ - "wah", - -12.360025405883789 - ], - [ - "Mac", - -12.360123634338379 - ], - [ - "iron", - -12.360197067260742 - ], - [ - "′", - -12.360337257385254 - ], - [ - "▁tehnic", - -12.360539436340332 - ], - [ - "▁legat", - -12.36054515838623 - ], - [ - "▁Pilot", - -12.360574722290039 - ], - [ - "▁Carpet", - -12.36064624786377 - ], - [ - "TEN", - -12.360812187194824 - ], - [ - "▁shareholders", - -12.36082649230957 - ], - [ - "vină", - -12.360880851745605 - ], - [ - "▁parole", - -12.360939979553223 - ], - [ - "ătă", - -12.360984802246094 - ], - [ - "bbing", - -12.361000061035156 - ], - [ - "▁switched", - -12.361002922058105 - ], - [ - "▁Petro", - -12.361010551452637 - ], - [ - "▁Vertrags", - -12.36111831665039 - ], - [ - "cham", - -12.361178398132324 - ], - [ - "wang", - -12.361284255981445 - ], - [ - "▁Bean", - -12.36139965057373 - ], - [ - "minister", - -12.361442565917969 - ], - [ - "▁Wu", - -12.361522674560547 - ], - [ - "▁Olympics", - -12.361539840698242 - ], - [ - "tipul", - -12.361542701721191 - ], - [ - "▁Citi", - -12.36166763305664 - ], - [ - "▁Fold", - -12.361873626708984 - ], - [ - "▁Partei", - -12.361940383911133 - ], - [ - "▁centrale", - -12.361984252929688 - ], - [ - "île", - -12.362032890319824 - ], - [ - "pflicht", - -12.362175941467285 - ], - [ - "heli", - -12.362398147583008 - ], - [ - "▁erwartet", - -12.362414360046387 - ], - [ - "▁oferta", - -12.362458229064941 - ], - [ - "▁NHS", - -12.36246395111084 - ], - [ - "annon", - -12.362570762634277 - ], - [ - "▁Rud", - -12.362701416015625 - ], - [ - "▁Stuttgart", - -12.362737655639648 - ], - [ - "▁rămas", - -12.362746238708496 - ], - [ - "▁eliminated", - -12.36275577545166 - ], - [ - "▁hiding", - -12.362797737121582 - ], - [ - "▁cadeau", - -12.362832069396973 - ], - [ - "▁mock", - -12.363115310668945 - ], - [ - "▁elder", - -12.363333702087402 - ], - [ - "▁Liz", - -12.363364219665527 - ], - [ - "aji", - -12.363544464111328 - ], - [ - "▁endlich", - -12.363653182983398 - ], - [ - "sufficient", - -12.363668441772461 - ], - [ - "▁zusätzliche", - -12.363712310791016 - ], - [ - "scient", - -12.363757133483887 - ], - [ - "▁Adjust", - -12.363883972167969 - ], - [ - "▁incentive", - -12.363945007324219 - ], - [ - "▁Papa", - -12.364012718200684 - ], - [ - "▁Pharma", - -12.364041328430176 - ], - [ - "▁conflicts", - -12.364107131958008 - ], - [ - "zählen", - -12.364113807678223 - ], - [ - "▁chien", - -12.364118576049805 - ], - [ - "KB", - -12.36413288116455 - ], - [ - "ultimi", - -12.364188194274902 - ], - [ - "▁Jul", - -12.36421012878418 - ], - [ - "▁Male", - -12.36422061920166 - ], - [ - "▁viewer", - -12.36427116394043 - ], - [ - "▁Sector", - -12.364328384399414 - ], - [ - "▁REAL", - -12.364344596862793 - ], - [ - "▁arbitr", - -12.36436939239502 - ], - [ - "resistant", - -12.364399909973145 - ], - [ - "▁Bristol", - -12.364423751831055 - ], - [ - "▁shy", - -12.364540100097656 - ], - [ - "SW", - -12.364593505859375 - ], - [ - "▁Kirk", - -12.36460018157959 - ], - [ - "centrul", - -12.364653587341309 - ], - [ - "▁Venezuela", - -12.364657402038574 - ], - [ - "▁communicating", - -12.364657402038574 - ], - [ - "▁Chemical", - -12.364663124084473 - ], - [ - "▁surprises", - -12.364843368530273 - ], - [ - "▁Jamie", - -12.364933967590332 - ], - [ - "▁Heavy", - -12.364965438842773 - ], - [ - "▁turnover", - -12.36498737335205 - ], - [ - "▁étudiants", - -12.365114212036133 - ], - [ - "welcher", - -12.365124702453613 - ], - [ - "▁preturi", - -12.365200996398926 - ], - [ - "▁Mono", - -12.365283966064453 - ], - [ - "▁paddle", - -12.365309715270996 - ], - [ - "▁accountability", - -12.365364074707031 - ], - [ - "OUS", - -12.365592956542969 - ], - [ - "▁marketers", - -12.365762710571289 - ], - [ - "fection", - -12.365900993347168 - ], - [ - "▁Outside", - -12.365921020507812 - ], - [ - "▁Jefferson", - -12.366114616394043 - ], - [ - "oaie", - -12.36617660522461 - ], - [ - "tenue", - -12.366275787353516 - ], - [ - "HU", - -12.366329193115234 - ], - [ - "Très", - -12.36639404296875 - ], - [ - "valoarea", - -12.36642837524414 - ], - [ - "103", - -12.366482734680176 - ], - [ - "▁Privacy", - -12.366580963134766 - ], - [ - "▁Leistungen", - -12.366598129272461 - ], - [ - "(3)", - -12.36662483215332 - ], - [ - "▁études", - -12.366734504699707 - ], - [ - "sko", - -12.366750717163086 - ], - [ - "drum", - -12.366822242736816 - ], - [ - "▁lamb", - -12.366842269897461 - ], - [ - "▁nicio", - -12.367094993591309 - ], - [ - "▁NATO", - -12.367104530334473 - ], - [ - "▁Freitag", - -12.367178916931152 - ], - [ - "▁precedent", - -12.367178916931152 - ], - [ - "▁partenaires", - -12.367202758789062 - ], - [ - "▁companiei", - -12.367234230041504 - ], - [ - "▁Plaza", - -12.367249488830566 - ], - [ - "▁disruption", - -12.367274284362793 - ], - [ - "▁violations", - -12.367338180541992 - ], - [ - "▁Reference", - -12.367446899414062 - ], - [ - "▁habitants", - -12.36770248413086 - ], - [ - "▁compost", - -12.36776351928711 - ], - [ - "▁citoyen", - -12.367785453796387 - ], - [ - "▁Historical", - -12.367857933044434 - ], - [ - "vollen", - -12.36793327331543 - ], - [ - "▁Eck", - -12.36815357208252 - ], - [ - "▁lumii", - -12.368180274963379 - ], - [ - "▁reusit", - -12.368278503417969 - ], - [ - "genic", - -12.368307113647461 - ], - [ - "Why", - -12.368436813354492 - ], - [ - "ASE", - -12.368474006652832 - ], - [ - "▁athlete", - -12.36854076385498 - ], - [ - "▁Spitze", - -12.368559837341309 - ], - [ - "▁schimbat", - -12.368566513061523 - ], - [ - "▁anonymous", - -12.368850708007812 - ], - [ - "jedes", - -12.368856430053711 - ], - [ - "exclu", - -12.368874549865723 - ], - [ - "factor", - -12.369199752807617 - ], - [ - "▁Dezember", - -12.369231224060059 - ], - [ - "▁scientist", - -12.369373321533203 - ], - [ - "▁likelihood", - -12.36947250366211 - ], - [ - "▁Rhode", - -12.369488716125488 - ], - [ - "▁Balance", - -12.369521141052246 - ], - [ - "istoria", - -12.36959457397461 - ], - [ - "▁Neil", - -12.369780540466309 - ], - [ - "▁bush", - -12.369919776916504 - ], - [ - "▁Ergebnisse", - -12.369935989379883 - ], - [ - "▁Sinn", - -12.369956016540527 - ], - [ - "▁spezielle", - -12.370128631591797 - ], - [ - "▁jucat", - -12.37015438079834 - ], - [ - "▁spite", - -12.370179176330566 - ], - [ - "▁Ultimate", - -12.370365142822266 - ], - [ - "▁fructe", - -12.370401382446289 - ], - [ - "▁asleep", - -12.370441436767578 - ], - [ - "▁Goal", - -12.370539665222168 - ], - [ - "▁PAR", - -12.370631217956543 - ], - [ - "▁rows", - -12.370705604553223 - ], - [ - "▁Fol", - -12.3709135055542 - ], - [ - "▁durata", - -12.370945930480957 - ], - [ - "▁traditionnel", - -12.37100887298584 - ], - [ - "▁tema", - -12.37122917175293 - ], - [ - "▁crédit", - -12.371232986450195 - ], - [ - "smallest", - -12.371358871459961 - ], - [ - "▁amino", - -12.371358871459961 - ], - [ - "▁elephant", - -12.371405601501465 - ], - [ - "▁tubes", - -12.371685028076172 - ], - [ - "▁Verwendung", - -12.371719360351562 - ], - [ - "▁Excellence", - -12.371889114379883 - ], - [ - "▁utilities", - -12.371962547302246 - ], - [ - "frau", - -12.372111320495605 - ], - [ - "▁poze", - -12.3721342086792 - ], - [ - "août", - -12.372307777404785 - ], - [ - "ango", - -12.372514724731445 - ], - [ - "give", - -12.372532844543457 - ], - [ - "▁appelé", - -12.372576713562012 - ], - [ - "▁yeast", - -12.372671127319336 - ], - [ - "▁enrollment", - -12.372676849365234 - ], - [ - "organiz", - -12.3727445602417 - ], - [ - "▁asociat", - -12.372753143310547 - ], - [ - "▁cattle", - -12.372772216796875 - ], - [ - "▁Solution", - -12.372798919677734 - ], - [ - "evoke", - -12.372807502746582 - ], - [ - "▁Hampshire", - -12.372857093811035 - ], - [ - "▁yeah", - -12.372878074645996 - ], - [ - "▁Argentina", - -12.372928619384766 - ], - [ - "▁abnormal", - -12.373022079467773 - ], - [ - "▁Heights", - -12.373082160949707 - ], - [ - "▁Mitchell", - -12.373099327087402 - ], - [ - "▁Quad", - -12.373350143432617 - ], - [ - "▁textures", - -12.373382568359375 - ], - [ - "▁coalition", - -12.373384475708008 - ], - [ - "▁dataset", - -12.37338924407959 - ], - [ - "World", - -12.373438835144043 - ], - [ - "ständ", - -12.373456001281738 - ], - [ - "▁groove", - -12.373476028442383 - ], - [ - "▁emotionally", - -12.373562812805176 - ], - [ - "▁preciz", - -12.373636245727539 - ], - [ - "kte", - -12.373741149902344 - ], - [ - "berechtigt", - -12.373828887939453 - ], - [ - "▁1971", - -12.373888969421387 - ], - [ - "grandes", - -12.373907089233398 - ], - [ - "▁Broadway", - -12.37391185760498 - ], - [ - "▁comunicat", - -12.373994827270508 - ], - [ - "nui", - -12.37402629852295 - ], - [ - "GER", - -12.374079704284668 - ], - [ - "pick", - -12.374125480651855 - ], - [ - "inscrit", - -12.37414264678955 - ], - [ - "▁Gross", - -12.374258995056152 - ], - [ - "▁McDonald", - -12.374310493469238 - ], - [ - "▁Zero", - -12.374330520629883 - ], - [ - "▁Halb", - -12.374341011047363 - ], - [ - "▁caractère", - -12.374553680419922 - ], - [ - "▁doctrine", - -12.374553680419922 - ], - [ - "▁Sinne", - -12.37458610534668 - ], - [ - "MLS", - -12.374594688415527 - ], - [ - "▁réel", - -12.374759674072266 - ], - [ - "▁Ful", - -12.37476921081543 - ], - [ - "limiting", - -12.37483024597168 - ], - [ - "▁Gan", - -12.374870300292969 - ], - [ - "▁exclude", - -12.37490463256836 - ], - [ - "imba", - -12.374974250793457 - ], - [ - "rolul", - -12.374991416931152 - ], - [ - "▁veggies", - -12.375059127807617 - ], - [ - "▁fasci", - -12.375092506408691 - ], - [ - "▁oval", - -12.375173568725586 - ], - [ - "▁contacter", - -12.375221252441406 - ], - [ - "▁linking", - -12.375279426574707 - ], - [ - "▁knit", - -12.375308990478516 - ], - [ - "▁enroll", - -12.375504493713379 - ], - [ - "▁dédié", - -12.375533103942871 - ], - [ - "▁renting", - -12.375541687011719 - ], - [ - "▁genera", - -12.37567138671875 - ], - [ - "citing", - -12.375691413879395 - ], - [ - "▁bend", - -12.375700950622559 - ], - [ - "guin", - -12.375752449035645 - ], - [ - "▁caregiver", - -12.375768661499023 - ], - [ - "▁könnt", - -12.375791549682617 - ], - [ - "▁Scripture", - -12.375795364379883 - ], - [ - "▁Mic", - -12.375899314880371 - ], - [ - "▁Denmark", - -12.37590217590332 - ], - [ - "▁qualifying", - -12.375917434692383 - ], - [ - "▁costumes", - -12.375958442687988 - ], - [ - "▁dwelling", - -12.37601375579834 - ], - [ - "▁recrut", - -12.376099586486816 - ], - [ - "▁bedding", - -12.37618637084961 - ], - [ - "gesprochen", - -12.376253128051758 - ], - [ - "▁editors", - -12.376386642456055 - ], - [ - "/12", - -12.37657642364502 - ], - [ - "▁cumparat", - -12.376583099365234 - ], - [ - "fiction", - -12.376730918884277 - ], - [ - "▁spinal", - -12.376740455627441 - ], - [ - "▁pathway", - -12.376799583435059 - ], - [ - "▁vârst", - -12.37683391571045 - ], - [ - "mba", - -12.376874923706055 - ], - [ - "▁enthusiastic", - -12.37692642211914 - ], - [ - "▁Watt", - -12.37697982788086 - ], - [ - "symptom", - -12.376992225646973 - ], - [ - "▁pup", - -12.37712287902832 - ], - [ - "▁glorious", - -12.377225875854492 - ], - [ - "▁fața", - -12.377228736877441 - ], - [ - "▁prohibited", - -12.377256393432617 - ], - [ - "vergleich", - -12.377286911010742 - ], - [ - "▁suspected", - -12.377334594726562 - ], - [ - "▁Railway", - -12.377381324768066 - ], - [ - "▁Aujourd", - -12.377469062805176 - ], - [ - "▁Patients", - -12.377476692199707 - ], - [ - "▁séance", - -12.377501487731934 - ], - [ - "▁contraire", - -12.377503395080566 - ], - [ - "▁cuvânt", - -12.37771224975586 - ], - [ - "▁trotzdem", - -12.37773609161377 - ], - [ - "émission", - -12.377795219421387 - ], - [ - "▁bore", - -12.37782096862793 - ], - [ - "▁safeguard", - -12.377851486206055 - ], - [ - "▁galleries", - -12.37820053100586 - ], - [ - "cron", - -12.378268241882324 - ], - [ - "▁Rica", - -12.378335952758789 - ], - [ - "fläche", - -12.37839126586914 - ], - [ - "▁Slow", - -12.37842082977295 - ], - [ - "▁vara", - -12.378549575805664 - ], - [ - "▁Swan", - -12.378564834594727 - ], - [ - "▁compounds", - -12.378564834594727 - ], - [ - "▁Slo", - -12.378621101379395 - ], - [ - "▁accommodations", - -12.378621101379395 - ], - [ - "▁Putin", - -12.378708839416504 - ], - [ - "▁undertaken", - -12.378767967224121 - ], - [ - "▁prépar", - -12.37879467010498 - ], - [ - "▁gandi", - -12.37881088256836 - ], - [ - "sediul", - -12.378924369812012 - ], - [ - "▁Nathan", - -12.379143714904785 - ], - [ - "▁fountain", - -12.379173278808594 - ], - [ - "▁mère", - -12.379194259643555 - ], - [ - "fatty", - -12.379201889038086 - ], - [ - "▁concentrated", - -12.379241943359375 - ], - [ - "richtung", - -12.379300117492676 - ], - [ - "▁appropriately", - -12.37955379486084 - ], - [ - "107", - -12.379631996154785 - ], - [ - "▁shark", - -12.379735946655273 - ], - [ - "▁Topic", - -12.379867553710938 - ], - [ - "▁Ausstellung", - -12.379880905151367 - ], - [ - "▁SUA", - -12.380267143249512 - ], - [ - "SER", - -12.380359649658203 - ], - [ - "▁Nicole", - -12.38039779663086 - ], - [ - "▁utilisateurs", - -12.380620956420898 - ], - [ - "▁Brazilian", - -12.380753517150879 - ], - [ - "▁continut", - -12.380865097045898 - ], - [ - "▁sanatate", - -12.380881309509277 - ], - [ - "faudra", - -12.380882263183594 - ], - [ - "nahm", - -12.380938529968262 - ], - [ - "▁Specific", - -12.381153106689453 - ], - [ - "aiba", - -12.381199836730957 - ], - [ - "cepând", - -12.381296157836914 - ], - [ - "▁Beer", - -12.381366729736328 - ], - [ - "roni", - -12.381616592407227 - ], - [ - "kay", - -12.381636619567871 - ], - [ - "▁gravity", - -12.381844520568848 - ], - [ - "▁verfügt", - -12.381856918334961 - ], - [ - "7:30", - -12.381878852844238 - ], - [ - "▁Players", - -12.381945610046387 - ], - [ - "▁Industries", - -12.38198184967041 - ], - [ - "punkte", - -12.382119178771973 - ], - [ - "▁yacht", - -12.382135391235352 - ], - [ - "-04", - -12.382149696350098 - ], - [ - "onné", - -12.382192611694336 - ], - [ - "▁Cards", - -12.382221221923828 - ], - [ - "▁fete", - -12.382420539855957 - ], - [ - "breaking", - -12.38257884979248 - ], - [ - "baum", - -12.382621765136719 - ], - [ - "nada", - -12.382651329040527 - ], - [ - "▁geplant", - -12.382750511169434 - ], - [ - "genuinely", - -12.382766723632812 - ], - [ - "talk", - -12.382871627807617 - ], - [ - "▁disadvantage", - -12.382920265197754 - ], - [ - "▁shutter", - -12.383003234863281 - ], - [ - "virus", - -12.38302230834961 - ], - [ - "▁cricket", - -12.38308048248291 - ], - [ - "▁comenzi", - -12.383102416992188 - ], - [ - "hier", - -12.383170127868652 - ], - [ - "▁aufzu", - -12.383198738098145 - ], - [ - "▁Rez", - -12.38321304321289 - ], - [ - "▁conclusions", - -12.383329391479492 - ], - [ - "▁Wang", - -12.383509635925293 - ], - [ - "Darüber", - -12.383524894714355 - ], - [ - "▁CSS", - -12.383573532104492 - ], - [ - "CW", - -12.383780479431152 - ], - [ - "▁Chr", - -12.383790969848633 - ], - [ - "▁traded", - -12.383843421936035 - ], - [ - "▁Schon", - -12.384265899658203 - ], - [ - "mped", - -12.38429069519043 - ], - [ - "▁alloy", - -12.384385108947754 - ], - [ - "AVE", - -12.38451099395752 - ], - [ - "▁imagery", - -12.384542465209961 - ], - [ - "▁resurse", - -12.38479995727539 - ], - [ - "▁Thunder", - -12.384834289550781 - ], - [ - "▁schimbare", - -12.384860038757324 - ], - [ - "▁Youtube", - -12.38499927520752 - ], - [ - "▁Monster", - -12.385189056396484 - ], - [ - "phil", - -12.385234832763672 - ], - [ - "▁bébé", - -12.385284423828125 - ], - [ - "Creating", - -12.385428428649902 - ], - [ - "ănă", - -12.385466575622559 - ], - [ - "▁Staat", - -12.385504722595215 - ], - [ - "adică", - -12.385531425476074 - ], - [ - "▁boyfriend", - -12.385552406311035 - ], - [ - "▁Winner", - -12.385594367980957 - ], - [ - "▁disputes", - -12.385653495788574 - ], - [ - "▁lush", - -12.3856840133667 - ], - [ - "▁CMS", - -12.385719299316406 - ], - [ - "▁locaux", - -12.385725021362305 - ], - [ - "▁Verfahren", - -12.38576889038086 - ], - [ - "▁Café", - -12.385786056518555 - ], - [ - "▁Vorstand", - -12.385870933532715 - ], - [ - "▁lucrat", - -12.385960578918457 - ], - [ - "▁Root", - -12.38602352142334 - ], - [ - "▁decis", - -12.386059761047363 - ], - [ - "▁Shadow", - -12.386062622070312 - ], - [ - "▁countryside", - -12.386067390441895 - ], - [ - "▁analiza", - -12.386114120483398 - ], - [ - "obos", - -12.38616943359375 - ], - [ - "opera", - -12.386175155639648 - ], - [ - "actu", - -12.386207580566406 - ], - [ - "▁Songs", - -12.3864164352417 - ], - [ - "reifen", - -12.38648509979248 - ], - [ - "▁hilft", - -12.386650085449219 - ], - [ - "region", - -12.386727333068848 - ], - [ - "▁categoria", - -12.387001991271973 - ], - [ - "capturing", - -12.38701343536377 - ], - [ - "▁1967", - -12.387025833129883 - ], - [ - "▁optimized", - -12.387032508850098 - ], - [ - "▁Dim", - -12.387353897094727 - ], - [ - "▁adapté", - -12.387447357177734 - ], - [ - "zeichnet", - -12.387524604797363 - ], - [ - "▁strada", - -12.387625694274902 - ], - [ - "fulness", - -12.38774585723877 - ], - [ - "▁technically", - -12.38774585723877 - ], - [ - "▁marker", - -12.387757301330566 - ], - [ - "▁vizita", - -12.387808799743652 - ], - [ - "▁imperative", - -12.387986183166504 - ], - [ - "▁pensé", - -12.38802719116211 - ], - [ - "▁drilling", - -12.388030052185059 - ], - [ - "ISA", - -12.38818073272705 - ], - [ - "▁Massage", - -12.388201713562012 - ], - [ - "▁Terry", - -12.388238906860352 - ], - [ - "▁pourtant", - -12.38835334777832 - ], - [ - "▁declaration", - -12.388440132141113 - ], - [ - "▁instructors", - -12.388453483581543 - ], - [ - "Eventually", - -12.38847827911377 - ], - [ - "▁banned", - -12.38847827911377 - ], - [ - "MAT", - -12.388520240783691 - ], - [ - "▁medici", - -12.38856315612793 - ], - [ - "▁Warm", - -12.388615608215332 - ], - [ - "▁trec", - -12.388731002807617 - ], - [ - "▁ecran", - -12.388763427734375 - ], - [ - "▁goat", - -12.388838768005371 - ], - [ - "▁manipulation", - -12.388850212097168 - ], - [ - "▁mayor", - -12.388898849487305 - ], - [ - "▁unterwegs", - -12.388975143432617 - ], - [ - "▁journals", - -12.3890380859375 - ], - [ - "▁hedge", - -12.389239311218262 - ], - [ - "Merc", - -12.389300346374512 - ], - [ - "▁joueurs", - -12.389411926269531 - ], - [ - "▁Religion", - -12.3894624710083 - ], - [ - "▁Mountains", - -12.389477729797363 - ], - [ - "▁renewed", - -12.389497756958008 - ], - [ - "▁Limit", - -12.389543533325195 - ], - [ - "ikea", - -12.389771461486816 - ], - [ - "▁utiliza", - -12.38977336883545 - ], - [ - "sogenannte", - -12.389808654785156 - ], - [ - "0.2", - -12.389836311340332 - ], - [ - "▁Organ", - -12.38987922668457 - ], - [ - "▁Shakespeare", - -12.389952659606934 - ], - [ - "▁Maintenance", - -12.38995361328125 - ], - [ - "▁Wärme", - -12.389954566955566 - ], - [ - "▁Northwest", - -12.390060424804688 - ], - [ - "▁numit", - -12.390106201171875 - ], - [ - "▁mica", - -12.390165328979492 - ], - [ - "turm", - -12.390168190002441 - ], - [ - "▁motivate", - -12.390250205993652 - ], - [ - "▁Staats", - -12.390355110168457 - ], - [ - "optimum", - -12.390487670898438 - ], - [ - "▁sortir", - -12.390546798706055 - ], - [ - "▁Asset", - -12.390555381774902 - ], - [ - "▁hervorragend", - -12.390692710876465 - ], - [ - "▁commentary", - -12.39071273803711 - ], - [ - "▁actuellement", - -12.390732765197754 - ], - [ - "NER", - -12.390765190124512 - ], - [ - "NL", - -12.390789985656738 - ], - [ - "ritt", - -12.390803337097168 - ], - [ - "▁Wirtschafts", - -12.390813827514648 - ], - [ - "träger", - -12.390840530395508 - ], - [ - "▁Versand", - -12.390870094299316 - ], - [ - "▁nostri", - -12.390953063964844 - ], - [ - "▁enorm", - -12.391227722167969 - ], - [ - "▁whale", - -12.391260147094727 - ], - [ - "▁Aufgabe", - -12.391277313232422 - ], - [ - "▁unfair", - -12.391291618347168 - ], - [ - "▁Cord", - -12.391315460205078 - ], - [ - "incorporating", - -12.39134693145752 - ], - [ - "luck", - -12.39157772064209 - ], - [ - "Afrique", - -12.39168643951416 - ], - [ - "▁coated", - -12.391857147216797 - ], - [ - "▁india", - -12.391908645629883 - ], - [ - "▁temporarily", - -12.39193058013916 - ], - [ - "▁ciuda", - -12.392097473144531 - ], - [ - "▁coral", - -12.392184257507324 - ], - [ - "▁wirkt", - -12.392203330993652 - ], - [ - "▁folding", - -12.392309188842773 - ], - [ - "wichtigsten", - -12.392398834228516 - ], - [ - "impacted", - -12.392422676086426 - ], - [ - "▁wählen", - -12.392423629760742 - ], - [ - "▁differentiate", - -12.392492294311523 - ], - [ - "▁froid", - -12.392544746398926 - ], - [ - "▁hug", - -12.39255142211914 - ], - [ - "▁construi", - -12.39255428314209 - ], - [ - "▁membru", - -12.392603874206543 - ], - [ - "▁masculin", - -12.392667770385742 - ], - [ - "partisan", - -12.392711639404297 - ], - [ - "▁schimba", - -12.392725944519043 - ], - [ - "▁economies", - -12.392827987670898 - ], - [ - "▁Abraham", - -12.392914772033691 - ], - [ - "wesen", - -12.393013954162598 - ], - [ - "enia", - -12.393026351928711 - ], - [ - "▁answering", - -12.393080711364746 - ], - [ - "▁activități", - -12.39309024810791 - ], - [ - "▁mémoire", - -12.393160820007324 - ], - [ - "▁versucht", - -12.393305778503418 - ], - [ - "ember", - -12.39333438873291 - ], - [ - "▁instala", - -12.39334774017334 - ], - [ - "▁eligibility", - -12.393407821655273 - ], - [ - "▁enjoyment", - -12.393409729003906 - ], - [ - "▁Arme", - -12.39350414276123 - ], - [ - "although", - -12.393534660339355 - ], - [ - "▁encompass", - -12.393596649169922 - ], - [ - "▁zufrieden", - -12.393658638000488 - ], - [ - "Script", - -12.393691062927246 - ], - [ - "KG", - -12.39385986328125 - ], - [ - "▁adhesive", - -12.393902778625488 - ], - [ - "▁Verkehrs", - -12.393908500671387 - ], - [ - "▁monitored", - -12.394103050231934 - ], - [ - "▁Conservation", - -12.394148826599121 - ], - [ - "hav", - -12.394156455993652 - ], - [ - "▁Above", - -12.394174575805664 - ], - [ - "▁Former", - -12.394241333007812 - ], - [ - "▁Certain", - -12.394250869750977 - ], - [ - "saving", - -12.394311904907227 - ], - [ - "▁Pun", - -12.394390106201172 - ], - [ - "▁awkward", - -12.394397735595703 - ], - [ - "▁Pretty", - -12.394410133361816 - ], - [ - "▁scanning", - -12.394417762756348 - ], - [ - "layer", - -12.394527435302734 - ], - [ - "motor", - -12.39453125 - ], - [ - "▁beginnt", - -12.39455795288086 - ], - [ - "▁affiliated", - -12.394681930541992 - ], - [ - "▁archives", - -12.394686698913574 - ], - [ - "▁sunshine", - -12.394892692565918 - ], - [ - "kha", - -12.394988059997559 - ], - [ - "▁investigated", - -12.395149230957031 - ], - [ - "▁fantas", - -12.395277976989746 - ], - [ - "▁united", - -12.395355224609375 - ], - [ - "allegedly", - -12.395373344421387 - ], - [ - "▁Eugen", - -12.3955078125 - ], - [ - "▁proprie", - -12.395843505859375 - ], - [ - "uca", - -12.396183013916016 - ], - [ - "DES", - -12.396187782287598 - ], - [ - "ştii", - -12.396190643310547 - ], - [ - "▁Running", - -12.39620590209961 - ], - [ - "lbstverständlich", - -12.396248817443848 - ], - [ - "index", - -12.396300315856934 - ], - [ - "▁studiu", - -12.396512031555176 - ], - [ - "URE", - -12.396553039550781 - ], - [ - "gültig", - -12.396627426147461 - ], - [ - "▁lundi", - -12.396649360656738 - ], - [ - "▁Zucker", - -12.396650314331055 - ], - [ - "▁positively", - -12.396721839904785 - ], - [ - "folgenden", - -12.396758079528809 - ], - [ - "anță", - -12.396800994873047 - ], - [ - "▁clan", - -12.396866798400879 - ], - [ - "▁literacy", - -12.396879196166992 - ], - [ - "▁ober", - -12.39699935913086 - ], - [ - "John", - -12.397003173828125 - ], - [ - "greg", - -12.39700984954834 - ], - [ - "▁titlu", - -12.397049903869629 - ], - [ - "▁ţări", - -12.39707088470459 - ], - [ - "Bra", - -12.397100448608398 - ], - [ - "▁Evans", - -12.397164344787598 - ], - [ - "modern", - -12.397172927856445 - ], - [ - "▁hauteur", - -12.397353172302246 - ], - [ - "refers", - -12.397416114807129 - ], - [ - "▁plasma", - -12.397575378417969 - ], - [ - "▁optic", - -12.397595405578613 - ], - [ - "▁shampoo", - -12.397619247436523 - ], - [ - "▁cheek", - -12.397727966308594 - ], - [ - "opted", - -12.397741317749023 - ], - [ - "▁persönlich", - -12.397832870483398 - ], - [ - "▁1945", - -12.398118019104004 - ], - [ - "ICI", - -12.398193359375 - ], - [ - "biotic", - -12.398222923278809 - ], - [ - "▁Beruf", - -12.398372650146484 - ], - [ - "▁trez", - -12.398383140563965 - ], - [ - "▁diploma", - -12.398388862609863 - ], - [ - "nahmen", - -12.398421287536621 - ], - [ - "▁curl", - -12.398625373840332 - ], - [ - "▁agricole", - -12.398824691772461 - ], - [ - "▁recomand", - -12.398844718933105 - ], - [ - "▁pediatric", - -12.398862838745117 - ], - [ - "Fiecare", - -12.39887523651123 - ], - [ - "Anlage", - -12.398906707763672 - ], - [ - "weiß", - -12.398974418640137 - ], - [ - "elecommunication", - -12.39898681640625 - ], - [ - "hog", - -12.399184226989746 - ], - [ - "▁Stamp", - -12.399364471435547 - ], - [ - "▁Tipp", - -12.399369239807129 - ], - [ - "▁kindness", - -12.399415969848633 - ], - [ - "▁Marina", - -12.399577140808105 - ], - [ - "▁Gleich", - -12.39963436126709 - ], - [ - "▁grij", - -12.39970588684082 - ], - [ - "▁desperate", - -12.39974594116211 - ], - [ - "▁recordings", - -12.399842262268066 - ], - [ - "▁neglect", - -12.399861335754395 - ], - [ - "▁inherent", - -12.400035858154297 - ], - [ - "▁Rezept", - -12.400138854980469 - ], - [ - "▁soins", - -12.400164604187012 - ], - [ - "▁brut", - -12.400250434875488 - ], - [ - "▁revolutionary", - -12.400495529174805 - ], - [ - "▁liberté", - -12.400530815124512 - ], - [ - "cours", - -12.400945663452148 - ], - [ - "▁Similar", - -12.401247024536133 - ], - [ - "▁cheveux", - -12.40136432647705 - ], - [ - "▁ieftin", - -12.401599884033203 - ], - [ - "▁promovare", - -12.40160846710205 - ], - [ - "▁grains", - -12.401729583740234 - ], - [ - "ти", - -12.401749610900879 - ], - [ - "▁fonctionnement", - -12.401789665222168 - ], - [ - "▁Coming", - -12.401832580566406 - ], - [ - "▁analytical", - -12.401847839355469 - ], - [ - "▁simplify", - -12.401856422424316 - ], - [ - "▁chambres", - -12.401893615722656 - ], - [ - "▁fifty", - -12.401930809020996 - ], - [ - "jour", - -12.402070999145508 - ], - [ - "▁(17", - -12.402194023132324 - ], - [ - "cărui", - -12.402292251586914 - ], - [ - "▁harmony", - -12.402352333068848 - ], - [ - "grin", - -12.402355194091797 - ], - [ - "▁drunk", - -12.402359962463379 - ], - [ - "260", - -12.402374267578125 - ], - [ - "3-5", - -12.40243148803711 - ], - [ - "▁articole", - -12.402442932128906 - ], - [ - "▁flooding", - -12.402482986450195 - ], - [ - "halle", - -12.402580261230469 - ], - [ - "▁defects", - -12.40276050567627 - ], - [ - "▁rifle", - -12.402839660644531 - ], - [ - "▁Boc", - -12.402843475341797 - ], - [ - "▁Athletic", - -12.40284538269043 - ], - [ - "▁acordat", - -12.40292739868164 - ], - [ - "AIR", - -12.402969360351562 - ], - [ - "▁entwickeln", - -12.403104782104492 - ], - [ - "▁Advance", - -12.403188705444336 - ], - [ - "▁Heil", - -12.403216361999512 - ], - [ - "Stainless", - -12.403345108032227 - ], - [ - "▁Psychology", - -12.40337085723877 - ], - [ - "▁omul", - -12.403435707092285 - ], - [ - "▁Arbeiten", - -12.403494834899902 - ], - [ - "▁rabbit", - -12.403495788574219 - ], - [ - "▁méta", - -12.40351390838623 - ], - [ - "ismul", - -12.403534889221191 - ], - [ - "▁Herausforderung", - -12.403594970703125 - ], - [ - "▁Euch", - -12.403654098510742 - ], - [ - "geschichte", - -12.40390682220459 - ], - [ - "▁Milk", - -12.404057502746582 - ], - [ - "▁pregăt", - -12.404065132141113 - ], - [ - "▁Standort", - -12.404141426086426 - ], - [ - "Val", - -12.404180526733398 - ], - [ - "▁Ronald", - -12.404350280761719 - ], - [ - "▁Werbe", - -12.404558181762695 - ], - [ - "▁restrict", - -12.404658317565918 - ], - [ - "▁tablespoon", - -12.404844284057617 - ], - [ - "▁Amendment", - -12.404845237731934 - ], - [ - "▁Johnny", - -12.404914855957031 - ], - [ - "▁lively", - -12.404938697814941 - ], - [ - "ORD", - -12.405147552490234 - ], - [ - "▁mulţi", - -12.40523624420166 - ], - [ - "èrent", - -12.405241012573242 - ], - [ - "Every", - -12.405277252197266 - ], - [ - "eignet", - -12.405296325683594 - ], - [ - "GD", - -12.40546989440918 - ], - [ - "▁Ghana", - -12.405628204345703 - ], - [ - "▁wealthy", - -12.40576171875 - ], - [ - "▁advocates", - -12.405818939208984 - ], - [ - "▁Campaign", - -12.40584659576416 - ], - [ - "▁posters", - -12.405964851379395 - ], - [ - "flug", - -12.406011581420898 - ], - [ - "▁métier", - -12.406139373779297 - ], - [ - "kir", - -12.406148910522461 - ], - [ - "bond", - -12.406176567077637 - ], - [ - "datorita", - -12.406188011169434 - ], - [ - "▁Hochzeit", - -12.406230926513672 - ], - [ - "▁effectué", - -12.406271934509277 - ], - [ - "▁angles", - -12.40654182434082 - ], - [ - "▁Electrical", - -12.406705856323242 - ], - [ - "▁Administrator", - -12.40674114227295 - ], - [ - "▁spur", - -12.407389640808105 - ], - [ - "▁größere", - -12.407444953918457 - ], - [ - "woke", - -12.407515525817871 - ], - [ - "▁gewinnen", - -12.407689094543457 - ], - [ - "▁ajută", - -12.407712936401367 - ], - [ - "▁ventilation", - -12.407853126525879 - ], - [ - "▁viaţa", - -12.407853126525879 - ], - [ - "▁Dinner", - -12.408079147338867 - ], - [ - "respond", - -12.408095359802246 - ], - [ - "▁OEM", - -12.408120155334473 - ], - [ - "▁affair", - -12.4081392288208 - ], - [ - "▁öffentlich", - -12.408143043518066 - ], - [ - "ENS", - -12.408209800720215 - ], - [ - "▁Cent", - -12.408224105834961 - ], - [ - "▁făc", - -12.408267974853516 - ], - [ - "▁Doppel", - -12.408285140991211 - ], - [ - "▁fericit", - -12.408363342285156 - ], - [ - "▁coordon", - -12.40845775604248 - ], - [ - "geht", - -12.408547401428223 - ], - [ - "▁perfekte", - -12.408610343933105 - ], - [ - "▁sportive", - -12.408700942993164 - ], - [ - "▁proiectul", - -12.40870189666748 - ], - [ - "▁deadly", - -12.408804893493652 - ], - [ - "Geschäft", - -12.408822059631348 - ], - [ - "▁inspirational", - -12.408854484558105 - ], - [ - "+1", - -12.409013748168945 - ], - [ - "▁pearl", - -12.409022331237793 - ], - [ - "▁scrub", - -12.409036636352539 - ], - [ - "▁scheint", - -12.409079551696777 - ], - [ - "poo", - -12.409147262573242 - ], - [ - "▁Pier", - -12.409220695495605 - ], - [ - "▁commented", - -12.409285545349121 - ], - [ - "lute", - -12.409302711486816 - ], - [ - "▁cancelled", - -12.409488677978516 - ], - [ - "Win", - -12.409605979919434 - ], - [ - "▁payroll", - -12.409781455993652 - ], - [ - "▁varsta", - -12.409881591796875 - ], - [ - "stuffed", - -12.410097122192383 - ], - [ - "▁beads", - -12.410138130187988 - ], - [ - "▁poems", - -12.410356521606445 - ], - [ - "pokesman", - -12.410399436950684 - ], - [ - "▁checklist", - -12.410523414611816 - ], - [ - "▁Mich", - -12.410636901855469 - ], - [ - "GEN", - -12.410676002502441 - ], - [ - "▁Lau", - -12.410783767700195 - ], - [ - "▁stie", - -12.410965919494629 - ], - [ - "▁Lovely", - -12.4110107421875 - ], - [ - "▁Anschluss", - -12.411062240600586 - ], - [ - "▁personaj", - -12.41108226776123 - ], - [ - "▁ausgestattet", - -12.411121368408203 - ], - [ - "▁beginners", - -12.411163330078125 - ], - [ - "▁noon", - -12.411189079284668 - ], - [ - "▁celule", - -12.41128921508789 - ], - [ - "Trans", - -12.411324501037598 - ], - [ - "boot", - -12.411331176757812 - ], - [ - "▁drumul", - -12.41136646270752 - ], - [ - "gruppen", - -12.41140079498291 - ], - [ - "étend", - -12.41140365600586 - ], - [ - "▁risques", - -12.411405563354492 - ], - [ - "acclaimed", - -12.411447525024414 - ], - [ - "▁celelalte", - -12.411617279052734 - ], - [ - "▁condiţii", - -12.411620140075684 - ], - [ - "▁skiing", - -12.411685943603516 - ], - [ - "▁optimale", - -12.411689758300781 - ], - [ - "technology", - -12.411773681640625 - ], - [ - "▁renew", - -12.411784172058105 - ], - [ - "Cloud", - -12.41179084777832 - ], - [ - "▁damaging", - -12.411905288696289 - ], - [ - "GT", - -12.412219047546387 - ], - [ - "▁Reform", - -12.41230583190918 - ], - [ - "vedem", - -12.412349700927734 - ], - [ - "▁indicat", - -12.412461280822754 - ], - [ - "▁Maker", - -12.412467002868652 - ], - [ - "▁lichid", - -12.412582397460938 - ], - [ - "3.1", - -12.412614822387695 - ], - [ - "păt", - -12.412620544433594 - ], - [ - "lumina", - -12.41264820098877 - ], - [ - "▁Situ", - -12.412806510925293 - ], - [ - "▁Archives", - -12.412857055664062 - ], - [ - "▁allergies", - -12.41287899017334 - ], - [ - "▁Cameron", - -12.412883758544922 - ], - [ - "▁Immun", - -12.412899017333984 - ], - [ - "wissenschaftlich", - -12.41301441192627 - ], - [ - "▁supplémentaire", - -12.413128852844238 - ], - [ - "▁puterea", - -12.413261413574219 - ], - [ - "Lab", - -12.413331985473633 - ], - [ - "inspired", - -12.413384437561035 - ], - [ - "▁shrink", - -12.413403511047363 - ], - [ - "▁voit", - -12.413426399230957 - ], - [ - "▁chopped", - -12.413467407226562 - ], - [ - "▁Franz", - -12.413537979125977 - ], - [ - "oku", - -12.413652420043945 - ], - [ - "▁suppress", - -12.413673400878906 - ], - [ - "▁impress", - -12.413751602172852 - ], - [ - "▁Liga", - -12.413755416870117 - ], - [ - "▁Eight", - -12.41378402709961 - ], - [ - "720", - -12.413795471191406 - ], - [ - "▁securely", - -12.413870811462402 - ], - [ - "KU", - -12.413934707641602 - ], - [ - "modell", - -12.413992881774902 - ], - [ - "Ensure", - -12.414154052734375 - ], - [ - "größte", - -12.414204597473145 - ], - [ - "▁réuni", - -12.414215087890625 - ], - [ - "▁Internal", - -12.41423225402832 - ], - [ - "▁Punkte", - -12.414320945739746 - ], - [ - "▁replicate", - -12.414412498474121 - ], - [ - "▁spreadsheet", - -12.414434432983398 - ], - [ - "▁Hindu", - -12.414549827575684 - ], - [ - "▁Cham", - -12.414578437805176 - ], - [ - "nati", - -12.414670944213867 - ], - [ - "imply", - -12.414679527282715 - ], - [ - "funded", - -12.414894104003906 - ], - [ - "▁charitable", - -12.414896011352539 - ], - [ - "▁imagined", - -12.415014266967773 - ], - [ - "hausen", - -12.41517448425293 - ], - [ - "Keeping", - -12.415239334106445 - ], - [ - "▁attitudes", - -12.415287971496582 - ], - [ - "esque", - -12.415365219116211 - ], - [ - "▁Tennis", - -12.415409088134766 - ], - [ - "Jeremy", - -12.415410041809082 - ], - [ - "▁majeur", - -12.415475845336914 - ], - [ - "▁stii", - -12.4155912399292 - ], - [ - "▁herbal", - -12.415790557861328 - ], - [ - "▁cauta", - -12.41580867767334 - ], - [ - "▁voluntary", - -12.415828704833984 - ], - [ - "wohl", - -12.415877342224121 - ], - [ - "▁ideea", - -12.41588306427002 - ], - [ - "▁WW", - -12.415899276733398 - ], - [ - "▁erneut", - -12.416010856628418 - ], - [ - "größten", - -12.416094779968262 - ], - [ - "Grâce", - -12.416159629821777 - ], - [ - "▁Köln", - -12.416193008422852 - ], - [ - "▁mobilier", - -12.416199684143066 - ], - [ - "▁fool", - -12.416254043579102 - ], - [ - "▁Calcul", - -12.416295051574707 - ], - [ - "attaque", - -12.41637897491455 - ], - [ - "▁digestive", - -12.41656494140625 - ], - [ - "performance", - -12.416647911071777 - ], - [ - "▁homeowner", - -12.41675853729248 - ], - [ - "▁hunger", - -12.4169282913208 - ], - [ - "2.3", - -12.41696834564209 - ], - [ - "▁Sort", - -12.417085647583008 - ], - [ - "▁Dennis", - -12.41723918914795 - ], - [ - "▁certificat", - -12.417250633239746 - ], - [ - "▁Canal", - -12.417337417602539 - ], - [ - "▁Yesterday", - -12.417424201965332 - ], - [ - "▁sausage", - -12.417499542236328 - ], - [ - "▁perdu", - -12.417736053466797 - ], - [ - "ösen", - -12.417741775512695 - ], - [ - "▁preserved", - -12.417750358581543 - ], - [ - "▁trendy", - -12.4177885055542 - ], - [ - "▁iubire", - -12.417935371398926 - ], - [ - "▁grandfather", - -12.417961120605469 - ], - [ - "▁shoppers", - -12.41820240020752 - ], - [ - "▁verschieden", - -12.418252944946289 - ], - [ - "▁gagner", - -12.41826343536377 - ], - [ - "▁lucra", - -12.418437004089355 - ], - [ - "metru", - -12.418464660644531 - ], - [ - "buz", - -12.418469429016113 - ], - [ - "▁flourish", - -12.418484687805176 - ], - [ - "affin", - -12.418523788452148 - ], - [ - "▁Pflanzen", - -12.41858196258545 - ], - [ - "agh", - -12.418588638305664 - ], - [ - "▁Gill", - -12.418660163879395 - ], - [ - "▁Kä", - -12.418671607971191 - ], - [ - "▁Wege", - -12.41876220703125 - ], - [ - "▁Liberal", - -12.418929100036621 - ], - [ - "▁Glasgow", - -12.418944358825684 - ], - [ - "Objekt", - -12.4189453125 - ], - [ - "▁Huawei", - -12.4189453125 - ], - [ - "appropri", - -12.418986320495605 - ], - [ - "▁genius", - -12.419037818908691 - ], - [ - "▁brokers", - -12.419068336486816 - ], - [ - "▁themed", - -12.41918659210205 - ], - [ - "▁barre", - -12.419210433959961 - ], - [ - "1.7", - -12.419219017028809 - ], - [ - "▁Electro", - -12.419303894042969 - ], - [ - "▁umbrella", - -12.419333457946777 - ], - [ - "▁advisory", - -12.419417381286621 - ], - [ - "▁comport", - -12.419421195983887 - ], - [ - "▁neuer", - -12.419452667236328 - ], - [ - "▁Wick", - -12.419568061828613 - ], - [ - "wak", - -12.419618606567383 - ], - [ - "▁Woman", - -12.419695854187012 - ], - [ - "▁lesser", - -12.419843673706055 - ], - [ - "▁replied", - -12.419987678527832 - ], - [ - "▁représente", - -12.420050621032715 - ], - [ - "▁thé", - -12.420135498046875 - ], - [ - "Deutsch", - -12.420428276062012 - ], - [ - "Cat", - -12.420483589172363 - ], - [ - "▁équipes", - -12.420534133911133 - ], - [ - "▁spider", - -12.420578956604004 - ], - [ - "▁Gaming", - -12.420589447021484 - ], - [ - "▁Liste", - -12.420592308044434 - ], - [ - "▁affection", - -12.420639038085938 - ], - [ - "lipsa", - -12.420982360839844 - ], - [ - "▁Spider", - -12.420987129211426 - ], - [ - "▁Julia", - -12.421034812927246 - ], - [ - "anlagen", - -12.421159744262695 - ], - [ - "Kon", - -12.421363830566406 - ], - [ - "nței", - -12.421368598937988 - ], - [ - "▁Verwaltung", - -12.421483993530273 - ], - [ - "▁raspuns", - -12.421489715576172 - ], - [ - "samt", - -12.421491622924805 - ], - [ - "▁creștere", - -12.421512603759766 - ], - [ - "▁decorate", - -12.421701431274414 - ], - [ - "▁Chain", - -12.422021865844727 - ], - [ - "ów", - -12.422050476074219 - ], - [ - "0-0", - -12.422104835510254 - ], - [ - "▁Cran", - -12.422407150268555 - ], - [ - "▁streak", - -12.42242431640625 - ], - [ - "ор", - -12.422517776489258 - ], - [ - "▁căuta", - -12.422754287719727 - ], - [ - "wende", - -12.422801971435547 - ], - [ - "▁haine", - -12.42280387878418 - ], - [ - "▁landscaping", - -12.423009872436523 - ], - [ - "▁historian", - -12.423016548156738 - ], - [ - "▁grandchildren", - -12.423033714294434 - ], - [ - "▁crawl", - -12.423056602478027 - ], - [ - "▁Cub", - -12.423239707946777 - ], - [ - "▁nécessaires", - -12.423515319824219 - ], - [ - "▁swift", - -12.42352294921875 - ], - [ - "▁calculation", - -12.423656463623047 - ], - [ - "▁acteurs", - -12.423715591430664 - ], - [ - "VT", - -12.423752784729004 - ], - [ - "▁Hristos", - -12.423778533935547 - ], - [ - "▁slices", - -12.423850059509277 - ], - [ - "See", - -12.424203872680664 - ], - [ - "▁Bran", - -12.424233436584473 - ], - [ - "Symbol", - -12.424449920654297 - ], - [ - "▁allowance", - -12.424492835998535 - ], - [ - "▁Effective", - -12.424537658691406 - ], - [ - "▁Wünsche", - -12.424539566040039 - ], - [ - "▁shiny", - -12.424569129943848 - ], - [ - "▁professionalism", - -12.424715995788574 - ], - [ - "/6", - -12.424970626831055 - ], - [ - "▁terrasse", - -12.425087928771973 - ], - [ - "▁researcher", - -12.425156593322754 - ], - [ - "▁fragile", - -12.425203323364258 - ], - [ - "▁greeting", - -12.425274848937988 - ], - [ - "freien", - -12.4253511428833 - ], - [ - "▁valuation", - -12.425372123718262 - ], - [ - "▁incur", - -12.425386428833008 - ], - [ - "▁Zwischen", - -12.425559997558594 - ], - [ - "▁comfy", - -12.425569534301758 - ], - [ - "▁méthode", - -12.42569351196289 - ], - [ - "▁Pirate", - -12.425816535949707 - ], - [ - "▁Moto", - -12.425822257995605 - ], - [ - "(6)", - -12.425823211669922 - ], - [ - "▁devin", - -12.42582893371582 - ], - [ - "▁civic", - -12.425837516784668 - ], - [ - "usage", - -12.425889015197754 - ], - [ - "▁istorie", - -12.425945281982422 - ], - [ - "▁piste", - -12.425955772399902 - ], - [ - "▁Rug", - -12.426091194152832 - ], - [ - "pä", - -12.426129341125488 - ], - [ - "▁matur", - -12.426148414611816 - ], - [ - "CAS", - -12.426155090332031 - ], - [ - "TIC", - -12.42618465423584 - ], - [ - "▁Reduce", - -12.426234245300293 - ], - [ - "▁commemorat", - -12.426321983337402 - ], - [ - "▁cease", - -12.42653751373291 - ], - [ - "unterschiedliche", - -12.42656421661377 - ], - [ - "▁cinnamon", - -12.426581382751465 - ], - [ - "▁Font", - -12.426583290100098 - ], - [ - "▁justify", - -12.426751136779785 - ], - [ - "deteriorat", - -12.426797866821289 - ], - [ - "▁Schön", - -12.42684555053711 - ], - [ - "plain", - -12.426993370056152 - ], - [ - "frist", - -12.427002906799316 - ], - [ - "▁helmet", - -12.42712116241455 - ], - [ - "▁statute", - -12.42721939086914 - ], - [ - "accept", - -12.427236557006836 - ], - [ - "▁1,5", - -12.42724323272705 - ], - [ - "▁recon", - -12.42724323272705 - ], - [ - "▁Möbel", - -12.427348136901855 - ], - [ - "▁idées", - -12.427367210388184 - ], - [ - "automat", - -12.427552223205566 - ], - [ - "Team", - -12.42758846282959 - ], - [ - "▁performers", - -12.427688598632812 - ], - [ - "▁microphone", - -12.427722930908203 - ], - [ - "impotriva", - -12.427775382995605 - ], - [ - "▁pillows", - -12.42780876159668 - ], - [ - "▁accountable", - -12.427812576293945 - ], - [ - "▁strings", - -12.42782974243164 - ], - [ - "hydrate", - -12.427835464477539 - ], - [ - "▁Yan", - -12.427865028381348 - ], - [ - "starea", - -12.427918434143066 - ], - [ - "▁présenté", - -12.42793083190918 - ], - [ - "▁extensively", - -12.428048133850098 - ], - [ - "äst", - -12.428114891052246 - ], - [ - "▁correlation", - -12.428115844726562 - ], - [ - "bespoke", - -12.428119659423828 - ], - [ - "▁creste", - -12.428196907043457 - ], - [ - "▁Armenia", - -12.428248405456543 - ], - [ - "nose", - -12.428426742553711 - ], - [ - "▁strengthening", - -12.428604125976562 - ], - [ - "▁Horizon", - -12.428627014160156 - ], - [ - "▁obesity", - -12.428627967834473 - ], - [ - "seasoned", - -12.428686141967773 - ], - [ - "▁screenshot", - -12.428736686706543 - ], - [ - "girl", - -12.42875862121582 - ], - [ - "▁hardest", - -12.428826332092285 - ], - [ - "▁weakness", - -12.428855895996094 - ], - [ - "effectuer", - -12.429012298583984 - ], - [ - "▁Florence", - -12.429034233093262 - ], - [ - "▁Europene", - -12.429062843322754 - ], - [ - "triggered", - -12.429333686828613 - ], - [ - "Apparently", - -12.42939567565918 - ], - [ - "▁diagnose", - -12.42943286895752 - ], - [ - "rushed", - -12.429494857788086 - ], - [ - "▁trotz", - -12.429516792297363 - ], - [ - "▁spécial", - -12.429680824279785 - ], - [ - "▁lumi", - -12.429783821105957 - ], - [ - "7:00", - -12.429877281188965 - ], - [ - "▁publicat", - -12.429903984069824 - ], - [ - "ос", - -12.430086135864258 - ], - [ - "▁hue", - -12.430136680603027 - ], - [ - "▁termination", - -12.430139541625977 - ], - [ - "▁Nam", - -12.430240631103516 - ], - [ - "Well", - -12.430376052856445 - ], - [ - "▁Extract", - -12.430441856384277 - ], - [ - "atiile", - -12.43062686920166 - ], - [ - "▁vivid", - -12.43076229095459 - ], - [ - "hrs", - -12.430858612060547 - ], - [ - "▁povesti", - -12.430984497070312 - ], - [ - "stehenden", - -12.430988311767578 - ], - [ - "▁informieren", - -12.431070327758789 - ], - [ - "employed", - -12.431133270263672 - ], - [ - "▁armor", - -12.431180953979492 - ], - [ - "▁Columbus", - -12.431191444396973 - ], - [ - "Registr", - -12.431200981140137 - ], - [ - "▁Kamera", - -12.431203842163086 - ], - [ - "▁ugly", - -12.431203842163086 - ], - [ - "outil", - -12.431234359741211 - ], - [ - "▁evenly", - -12.43134593963623 - ], - [ - "lungul", - -12.431349754333496 - ], - [ - "koch", - -12.431439399719238 - ], - [ - "▁Dig", - -12.431450843811035 - ], - [ - "purely", - -12.431489944458008 - ], - [ - "▁Surf", - -12.431560516357422 - ], - [ - "rilla", - -12.431628227233887 - ], - [ - "▁Watson", - -12.43171215057373 - ], - [ - "trug", - -12.431719779968262 - ], - [ - "figuring", - -12.431784629821777 - ], - [ - "▁competitor", - -12.431807518005371 - ], - [ - "▁humid", - -12.431889533996582 - ], - [ - "▁Lawyer", - -12.43189811706543 - ], - [ - "Added", - -12.43205451965332 - ], - [ - "▁salva", - -12.432056427001953 - ], - [ - "▁drainage", - -12.4321870803833 - ], - [ - "Featuring", - -12.432220458984375 - ], - [ - "▁Pel", - -12.43234634399414 - ], - [ - "▁acasa", - -12.432611465454102 - ], - [ - "▁expectation", - -12.43265438079834 - ], - [ - "gibt", - -12.432663917541504 - ], - [ - "▁marginal", - -12.432831764221191 - ], - [ - "ceni", - -12.433028221130371 - ], - [ - "▁européen", - -12.433065414428711 - ], - [ - "clav", - -12.433090209960938 - ], - [ - "▁Shot", - -12.433167457580566 - ], - [ - "commun", - -12.43322467803955 - ], - [ - "▁Calendar", - -12.433247566223145 - ], - [ - "▁trek", - -12.433348655700684 - ], - [ - "rechtliche", - -12.433406829833984 - ], - [ - "▁Perry", - -12.43342399597168 - ], - [ - "▁surge", - -12.433484077453613 - ], - [ - "geschäft", - -12.433504104614258 - ], - [ - "paced", - -12.433793067932129 - ], - [ - "depend", - -12.433871269226074 - ], - [ - "▁Sache", - -12.433947563171387 - ], - [ - "▁Example", - -12.433998107910156 - ], - [ - "▁lider", - -12.434118270874023 - ], - [ - "▁nochmal", - -12.434240341186523 - ], - [ - "▁Present", - -12.434243202209473 - ], - [ - "KW", - -12.434335708618164 - ], - [ - "prompted", - -12.434350967407227 - ], - [ - "logique", - -12.434444427490234 - ], - [ - "Université", - -12.434466361999512 - ], - [ - "lith", - -12.434489250183105 - ], - [ - "▁Gefahr", - -12.434579849243164 - ], - [ - "▁Acid", - -12.434625625610352 - ], - [ - "objets", - -12.434791564941406 - ], - [ - "▁societies", - -12.434791564941406 - ], - [ - "▁distraction", - -12.434816360473633 - ], - [ - "▁puissance", - -12.434934616088867 - ], - [ - "▁alleviat", - -12.435026168823242 - ], - [ - "▁Capitol", - -12.435050010681152 - ], - [ - "▁Heim", - -12.435129165649414 - ], - [ - "judicial", - -12.435230255126953 - ], - [ - "▁nowadays", - -12.435309410095215 - ], - [ - "▁Hammer", - -12.435317039489746 - ], - [ - "▁metallic", - -12.435327529907227 - ], - [ - "▁distr", - -12.435388565063477 - ], - [ - "▁dispos", - -12.435397148132324 - ], - [ - "profile", - -12.435408592224121 - ], - [ - "▁Nicolas", - -12.435602188110352 - ], - [ - "▁presa", - -12.435760498046875 - ], - [ - "augh", - -12.43578052520752 - ], - [ - "schuss", - -12.435787200927734 - ], - [ - "▁Diana", - -12.436062812805176 - ], - [ - "4-5", - -12.436097145080566 - ], - [ - "▁Chapel", - -12.43612003326416 - ], - [ - "▁zahar", - -12.436150550842285 - ], - [ - "âmb", - -12.4362154006958 - ], - [ - "▁Tarif", - -12.436264991760254 - ], - [ - "▁devastating", - -12.436339378356934 - ], - [ - "6:00", - -12.4364013671875 - ], - [ - "▁100,000", - -12.43645191192627 - ], - [ - "NIC", - -12.436580657958984 - ], - [ - "▁Lucas", - -12.436612129211426 - ], - [ - "▁bequem", - -12.436662673950195 - ], - [ - "▁Motion", - -12.436698913574219 - ], - [ - "7,000", - -12.436701774597168 - ], - [ - "▁malware", - -12.436708450317383 - ], - [ - "▁avenue", - -12.436723709106445 - ], - [ - "▁manger", - -12.436747550964355 - ], - [ - "▁Queensland", - -12.436857223510742 - ], - [ - "▁Papier", - -12.436861991882324 - ], - [ - "▁Increase", - -12.436880111694336 - ], - [ - "▁implies", - -12.436954498291016 - ], - [ - "▁äußer", - -12.43697452545166 - ], - [ - "▁Meine", - -12.436980247497559 - ], - [ - "Reuters", - -12.437155723571777 - ], - [ - "▁Belt", - -12.437232971191406 - ], - [ - "Educat", - -12.437251091003418 - ], - [ - "▁Aktion", - -12.437355041503906 - ], - [ - "schläge", - -12.437372207641602 - ], - [ - "▁înregistrat", - -12.437426567077637 - ], - [ - "▁Ortho", - -12.43756103515625 - ], - [ - "▁bulbs", - -12.437761306762695 - ], - [ - "kap", - -12.437793731689453 - ], - [ - "▁peinture", - -12.437901496887207 - ], - [ - "▁Lounge", - -12.437907218933105 - ], - [ - "▁Tampa", - -12.438008308410645 - ], - [ - "ifiziert", - -12.438100814819336 - ], - [ - "kinder", - -12.438172340393066 - ], - [ - "▁comparativ", - -12.438281059265137 - ], - [ - "häuser", - -12.438323974609375 - ], - [ - "incarn", - -12.438363075256348 - ], - [ - "▁amazon", - -12.438464164733887 - ], - [ - "▁Southeast", - -12.438505172729492 - ], - [ - "▁economical", - -12.438667297363281 - ], - [ - "▁broth", - -12.438697814941406 - ], - [ - "▁Secure", - -12.438750267028809 - ], - [ - "damals", - -12.438875198364258 - ], - [ - "▁Elementary", - -12.438921928405762 - ], - [ - "▁Wildlife", - -12.438995361328125 - ], - [ - "▁Jewel", - -12.439001083374023 - ], - [ - "▁protocols", - -12.439297676086426 - ], - [ - "▁zbor", - -12.4393892288208 - ], - [ - "▁enthusiasts", - -12.439398765563965 - ], - [ - "▁Mirror", - -12.439444541931152 - ], - [ - "▁soak", - -12.439537048339844 - ], - [ - "▁Sad", - -12.439574241638184 - ], - [ - "▁dishwasher", - -12.439957618713379 - ], - [ - "▁vollständig", - -12.440186500549316 - ], - [ - "▁Vermont", - -12.440407752990723 - ], - [ - "▁caut", - -12.440449714660645 - ], - [ - "▁fournisseur", - -12.440475463867188 - ], - [ - "▁Concrete", - -12.44047737121582 - ], - [ - "▁Instant", - -12.440595626831055 - ], - [ - "▁reveni", - -12.440597534179688 - ], - [ - "▁Surface", - -12.44059944152832 - ], - [ - "zumindest", - -12.440713882446289 - ], - [ - "▁feast", - -12.440725326538086 - ], - [ - "▁stretching", - -12.440803527832031 - ], - [ - "ERA", - -12.440997123718262 - ], - [ - "▁Scholarship", - -12.441020965576172 - ], - [ - "▁vineyard", - -12.4410400390625 - ], - [ - "▁régulièrement", - -12.441083908081055 - ], - [ - "▁patches", - -12.441093444824219 - ], - [ - "▁Gamb", - -12.44113540649414 - ], - [ - "▁Vereins", - -12.441152572631836 - ], - [ - "ège", - -12.441372871398926 - ], - [ - "▁constitutional", - -12.441411018371582 - ], - [ - "erreur", - -12.441413879394531 - ], - [ - "▁Colombia", - -12.441514015197754 - ], - [ - "UF", - -12.441618919372559 - ], - [ - "aider", - -12.441665649414062 - ], - [ - "cision", - -12.44180965423584 - ], - [ - "▁publishers", - -12.441913604736328 - ], - [ - "▁prelua", - -12.441967964172363 - ], - [ - "▁keiner", - -12.441990852355957 - ], - [ - "▁amid", - -12.442020416259766 - ], - [ - "▁quantitative", - -12.442031860351562 - ], - [ - "▁decay", - -12.442058563232422 - ], - [ - "▁distinguished", - -12.4420747756958 - ], - [ - "▁Gründe", - -12.442209243774414 - ], - [ - "▁statului", - -12.442362785339355 - ], - [ - "CAT", - -12.442436218261719 - ], - [ - "allow", - -12.442481994628906 - ], - [ - "▁mathematical", - -12.442550659179688 - ], - [ - "▁tragedy", - -12.44255542755127 - ], - [ - "▁heels", - -12.442609786987305 - ], - [ - "opia", - -12.44265365600586 - ], - [ - "▁merger", - -12.4428071975708 - ], - [ - "dispositif", - -12.442813873291016 - ], - [ - "▁pneu", - -12.44283390045166 - ], - [ - "elte", - -12.443058013916016 - ], - [ - "▁Introduction", - -12.443070411682129 - ], - [ - "▁biscuit", - -12.443134307861328 - ], - [ - "▁leftover", - -12.443275451660156 - ], - [ - "▁tester", - -12.443314552307129 - ], - [ - "▁Terre", - -12.443380355834961 - ], - [ - "▁Oui", - -12.44338321685791 - ], - [ - "▁rar", - -12.443520545959473 - ], - [ - "▁beverages", - -12.443666458129883 - ], - [ - "▁parenting", - -12.443892478942871 - ], - [ - "1-0", - -12.444053649902344 - ], - [ - "▁Barry", - -12.44417667388916 - ], - [ - "▁Lynn", - -12.444209098815918 - ], - [ - "▁Tyler", - -12.444262504577637 - ], - [ - "▁fotbal", - -12.44437026977539 - ], - [ - "dron", - -12.444475173950195 - ], - [ - "▁donor", - -12.44455623626709 - ], - [ - "▁drape", - -12.444558143615723 - ], - [ - "▁positioning", - -12.444963455200195 - ], - [ - "▁Tang", - -12.445006370544434 - ], - [ - "▁overwhelmed", - -12.445161819458008 - ], - [ - "▁perte", - -12.445192337036133 - ], - [ - "▁blender", - -12.445302963256836 - ], - [ - "TG", - -12.445467948913574 - ], - [ - "GHz", - -12.445490837097168 - ], - [ - "▁administrat", - -12.445719718933105 - ], - [ - "▁glaube", - -12.445771217346191 - ], - [ - "Char", - -12.445947647094727 - ], - [ - "impression", - -12.44627571105957 - ], - [ - "proving", - -12.446297645568848 - ], - [ - "▁Inner", - -12.446434020996094 - ], - [ - "root", - -12.446501731872559 - ], - [ - "▁Gedanken", - -12.446508407592773 - ], - [ - "▁underway", - -12.446596145629883 - ], - [ - "coat", - -12.44660758972168 - ], - [ - "▁thereof", - -12.446663856506348 - ], - [ - "rius", - -12.446700096130371 - ], - [ - "▁intermediate", - -12.446751594543457 - ], - [ - "gmail", - -12.446869850158691 - ], - [ - "114", - -12.446893692016602 - ], - [ - "▁interfere", - -12.446908950805664 - ], - [ - "▁Found", - -12.446930885314941 - ], - [ - "LF", - -12.447071075439453 - ], - [ - "▁equality", - -12.447099685668945 - ], - [ - "▁concurrent", - -12.44710636138916 - ], - [ - "akh", - -12.447107315063477 - ], - [ - "▁touching", - -12.44715690612793 - ], - [ - "▁curiosity", - -12.447235107421875 - ], - [ - "▁rendering", - -12.447263717651367 - ], - [ - "▁1964", - -12.447442054748535 - ], - [ - "sorge", - -12.447468757629395 - ], - [ - "ARC", - -12.447505950927734 - ], - [ - "▁Desktop", - -12.44752311706543 - ], - [ - "▁Tak", - -12.44760799407959 - ], - [ - "filtration", - -12.447651863098145 - ], - [ - "▁gates", - -12.4478759765625 - ], - [ - "Sehr", - -12.44791316986084 - ], - [ - "▁spatiu", - -12.44798755645752 - ], - [ - "▁Leg", - -12.448103904724121 - ], - [ - "▁aviation", - -12.448277473449707 - ], - [ - "wandel", - -12.44827938079834 - ], - [ - "▁Shar", - -12.448323249816895 - ], - [ - "▁Volks", - -12.448409080505371 - ], - [ - "maz", - -12.448698997497559 - ], - [ - "governmental", - -12.44874095916748 - ], - [ - "euros", - -12.448819160461426 - ], - [ - "avantage", - -12.448823928833008 - ], - [ - "sitzt", - -12.448856353759766 - ], - [ - "IER", - -12.448920249938965 - ], - [ - "▁Theory", - -12.44894027709961 - ], - [ - "Cependant", - -12.44907283782959 - ], - [ - "▁Teachers", - -12.449080467224121 - ], - [ - "anspruch", - -12.449095726013184 - ], - [ - "▁afecta", - -12.449139595031738 - ], - [ - "enko", - -12.449193000793457 - ], - [ - "▁breeding", - -12.449198722839355 - ], - [ - "▁Peak", - -12.449457168579102 - ], - [ - "▁găsit", - -12.449516296386719 - ], - [ - "▁măsuri", - -12.4495267868042 - ], - [ - "edia", - -12.449625968933105 - ], - [ - "biz", - -12.449640274047852 - ], - [ - "zum", - -12.449776649475098 - ], - [ - "▁schwierig", - -12.449847221374512 - ], - [ - "Sense", - -12.450050354003906 - ], - [ - "▁Jump", - -12.450081825256348 - ], - [ - "▁cocktails", - -12.450108528137207 - ], - [ - "abhängig", - -12.45012378692627 - ], - [ - "realised", - -12.450140953063965 - ], - [ - "▁programul", - -12.450214385986328 - ], - [ - "▁prévu", - -12.450238227844238 - ], - [ - "▁twitter", - -12.450372695922852 - ], - [ - "Union", - -12.450400352478027 - ], - [ - "▁Marathon", - -12.45040225982666 - ], - [ - "▁Christianity", - -12.450432777404785 - ], - [ - "▁Alberta", - -12.450811386108398 - ], - [ - "einheit", - -12.45097827911377 - ], - [ - "▁wellbeing", - -12.450982093811035 - ], - [ - "phen", - -12.451166152954102 - ], - [ - "▁Charleston", - -12.451180458068848 - ], - [ - "▁uncover", - -12.451323509216309 - ], - [ - "▁humaine", - -12.451464653015137 - ], - [ - "▁bleeding", - -12.451531410217285 - ], - [ - "▁manipul", - -12.451532363891602 - ], - [ - "▁humidity", - -12.451570510864258 - ], - [ - "▁Puis", - -12.451748847961426 - ], - [ - "▁aktuell", - -12.451922416687012 - ], - [ - "▁Nissan", - -12.451943397521973 - ], - [ - "▁Eisen", - -12.45202922821045 - ], - [ - "treiben", - -12.452059745788574 - ], - [ - "cios", - -12.452073097229004 - ], - [ - "ikh", - -12.452381134033203 - ], - [ - "acquiring", - -12.452466011047363 - ], - [ - "▁Wallpaper", - -12.452488899230957 - ], - [ - "▁rond", - -12.452558517456055 - ], - [ - "▁Doug", - -12.45267391204834 - ], - [ - "sourcing", - -12.452696800231934 - ], - [ - "▁1900", - -12.452825546264648 - ], - [ - "▁buni", - -12.452913284301758 - ], - [ - "vest", - -12.452916145324707 - ], - [ - "▁Bangladesh", - -12.452990531921387 - ], - [ - "Home", - -12.453160285949707 - ], - [ - "▁wrinkle", - -12.453252792358398 - ], - [ - "rado", - -12.453290939331055 - ], - [ - "▁Pain", - -12.45334243774414 - ], - [ - "▁herzlich", - -12.453354835510254 - ], - [ - "MRI", - -12.453426361083984 - ], - [ - "UG", - -12.453631401062012 - ], - [ - "▁Desk", - -12.453679084777832 - ], - [ - "▁remarc", - -12.453718185424805 - ], - [ - "▁sodium", - -12.453857421875 - ], - [ - "▁Jede", - -12.453892707824707 - ], - [ - "▁réelle", - -12.453959465026855 - ], - [ - "▁Polar", - -12.454068183898926 - ], - [ - "▁activists", - -12.454273223876953 - ], - [ - "lasted", - -12.454300880432129 - ], - [ - "Some", - -12.45432186126709 - ], - [ - "ISE", - -12.454338073730469 - ], - [ - "▁peine", - -12.454671859741211 - ], - [ - "▁crude", - -12.454852104187012 - ], - [ - "Maur", - -12.454916954040527 - ], - [ - "▁forcing", - -12.454933166503906 - ], - [ - "▁politici", - -12.454970359802246 - ], - [ - "▁condiții", - -12.454988479614258 - ], - [ - "▁Saving", - -12.454999923706055 - ], - [ - "▁descoperi", - -12.455020904541016 - ], - [ - "avenir", - -12.455055236816406 - ], - [ - "Akt", - -12.455069541931152 - ], - [ - "▁vocabulary", - -12.45509147644043 - ], - [ - "▁pont", - -12.455168724060059 - ], - [ - "West", - -12.45518970489502 - ], - [ - "lenk", - -12.455278396606445 - ], - [ - "▁Verbraucher", - -12.455367088317871 - ], - [ - "affects", - -12.455448150634766 - ], - [ - "▁Flower", - -12.455543518066406 - ], - [ - "▁Nebraska", - -12.455617904663086 - ], - [ - "▁assortment", - -12.455618858337402 - ], - [ - "hock", - -12.455619812011719 - ], - [ - "▁discounted", - -12.455803871154785 - ], - [ - "▁Sensor", - -12.455840110778809 - ], - [ - "Lie", - -12.45588207244873 - ], - [ - "▁Volkswagen", - -12.455887794494629 - ], - [ - "isseur", - -12.455888748168945 - ], - [ - "indice", - -12.455936431884766 - ], - [ - "▁scanner", - -12.455986022949219 - ], - [ - "fashioned", - -12.456040382385254 - ], - [ - "▁postal", - -12.456141471862793 - ], - [ - "ouvrir", - -12.45615291595459 - ], - [ - "▁seminars", - -12.45622444152832 - ], - [ - "ioase", - -12.456232070922852 - ], - [ - "▁Stanley", - -12.456260681152344 - ], - [ - "Various", - -12.456335067749023 - ], - [ - "essentiel", - -12.45650577545166 - ], - [ - "▁administered", - -12.456693649291992 - ], - [ - "▁concession", - -12.456748008728027 - ], - [ - "▁mould", - -12.456789016723633 - ], - [ - "▁strongest", - -12.456826210021973 - ], - [ - "Erlebnis", - -12.456933975219727 - ], - [ - "▁ehemalige", - -12.456933975219727 - ], - [ - "▁Tale", - -12.457234382629395 - ], - [ - "▁Buyer", - -12.457353591918945 - ], - [ - "ück", - -12.457578659057617 - ], - [ - "▁Kommentar", - -12.457720756530762 - ], - [ - "▁Schrift", - -12.457756996154785 - ], - [ - "Design", - -12.457792282104492 - ], - [ - "▁stirring", - -12.457937240600586 - ], - [ - "▁towels", - -12.457987785339355 - ], - [ - "▁$30", - -12.458101272583008 - ], - [ - "sprache", - -12.458279609680176 - ], - [ - "▁Regierung", - -12.458346366882324 - ], - [ - "▁nachhaltig", - -12.458406448364258 - ], - [ - "▁électronique", - -12.458515167236328 - ], - [ - "▁Andrei", - -12.458587646484375 - ], - [ - "because", - -12.458647727966309 - ], - [ - "informatique", - -12.458650588989258 - ], - [ - "IGHT", - -12.4586820602417 - ], - [ - "stepping", - -12.4586820602417 - ], - [ - "▁gris", - -12.458748817443848 - ], - [ - "vious", - -12.458773612976074 - ], - [ - "▁upside", - -12.4591064453125 - ], - [ - "▁Examples", - -12.459108352661133 - ], - [ - "IU", - -12.459110260009766 - ], - [ - "▁princess", - -12.459111213684082 - ], - [ - "spielen", - -12.45921516418457 - ], - [ - "legung", - -12.45950984954834 - ], - [ - "▁reflecting", - -12.4597806930542 - ], - [ - "▁Processing", - -12.459939002990723 - ], - [ - "▁jungle", - -12.460033416748047 - ], - [ - "▁insects", - -12.46006965637207 - ], - [ - "▁Sibiu", - -12.460220336914062 - ], - [ - "160", - -12.460259437561035 - ], - [ - "▁interessante", - -12.460267066955566 - ], - [ - "▁multimedia", - -12.460455894470215 - ], - [ - "essel", - -12.46049690246582 - ], - [ - "/18", - -12.460647583007812 - ], - [ - "nière", - -12.460683822631836 - ], - [ - "ministru", - -12.46072006225586 - ], - [ - "▁implants", - -12.460826873779297 - ], - [ - "▁Settings", - -12.461360931396484 - ], - [ - "▁invaluable", - -12.461432456970215 - ], - [ - "stains", - -12.461448669433594 - ], - [ - "onym", - -12.461518287658691 - ], - [ - "▁searched", - -12.461570739746094 - ], - [ - "▁disappointment", - -12.461628913879395 - ], - [ - "▁Iranian", - -12.461630821228027 - ], - [ - "▁questionnaire", - -12.461630821228027 - ], - [ - "Founder", - -12.46178913116455 - ], - [ - "▁Bericht", - -12.461792945861816 - ], - [ - "▁youngest", - -12.461896896362305 - ], - [ - "▁Automatic", - -12.461956024169922 - ], - [ - "▁plecat", - -12.46203327178955 - ], - [ - "geber", - -12.462119102478027 - ], - [ - "soweit", - -12.462124824523926 - ], - [ - "▁unfold", - -12.462236404418945 - ], - [ - "▁befinden", - -12.462274551391602 - ], - [ - "▁susţin", - -12.462637901306152 - ], - [ - "▁Mack", - -12.462675094604492 - ], - [ - "▁dificil", - -12.462757110595703 - ], - [ - "enseigne", - -12.463038444519043 - ], - [ - "▁vitamine", - -12.463047981262207 - ], - [ - "▁Memory", - -12.463092803955078 - ], - [ - "ripping", - -12.463129043579102 - ], - [ - "drin", - -12.463146209716797 - ], - [ - "3.2", - -12.463278770446777 - ], - [ - "▁verstehen", - -12.463287353515625 - ], - [ - "▁scaun", - -12.46341323852539 - ], - [ - "▁procédure", - -12.46380615234375 - ], - [ - "▁molecules", - -12.463911056518555 - ], - [ - "▁Anzahl", - -12.46391487121582 - ], - [ - "▁yogurt", - -12.464071273803711 - ], - [ - "▁Dominic", - -12.464113235473633 - ], - [ - "▁shocked", - -12.464156150817871 - ], - [ - "▁zilei", - -12.464269638061523 - ], - [ - "▁Heiz", - -12.464412689208984 - ], - [ - "▁Educational", - -12.464571952819824 - ], - [ - "BN", - -12.464577674865723 - ], - [ - "analyzing", - -12.464601516723633 - ], - [ - "hair", - -12.464676856994629 - ], - [ - "spiegel", - -12.464871406555176 - ], - [ - "▁illusion", - -12.464889526367188 - ], - [ - "BG", - -12.46505355834961 - ], - [ - "deductible", - -12.46513557434082 - ], - [ - "▁adj", - -12.4651460647583 - ], - [ - "▁accessory", - -12.465166091918945 - ], - [ - "▁Draw", - -12.465167999267578 - ], - [ - "▁airlines", - -12.46518611907959 - ], - [ - "▁satisfai", - -12.46536636352539 - ], - [ - "▁architects", - -12.465447425842285 - ], - [ - "istische", - -12.465508460998535 - ], - [ - "▁Healthy", - -12.465539932250977 - ], - [ - "großer", - -12.465669631958008 - ], - [ - "▁comunicare", - -12.465764999389648 - ], - [ - "▁Meyer", - -12.46577262878418 - ], - [ - "▁reproduction", - -12.465882301330566 - ], - [ - "▁Manufacturing", - -12.465929985046387 - ], - [ - "immobilier", - -12.465930938720703 - ], - [ - "▁Unterschied", - -12.465958595275879 - ], - [ - "▁cumpara", - -12.466029167175293 - ], - [ - "▁duplicate", - -12.466094017028809 - ], - [ - "▁(16", - -12.466096878051758 - ], - [ - "▁detector", - -12.466279983520508 - ], - [ - "▁observat", - -12.466387748718262 - ], - [ - "▁1965", - -12.466682434082031 - ], - [ - "▁Fantasy", - -12.466728210449219 - ], - [ - "▁brauchen", - -12.466728210449219 - ], - [ - "▁Participants", - -12.466780662536621 - ], - [ - "▁décide", - -12.466817855834961 - ], - [ - "▁kicke", - -12.466819763183594 - ], - [ - "▁SSL", - -12.466885566711426 - ], - [ - "360", - -12.466989517211914 - ], - [ - "Anim", - -12.467019081115723 - ], - [ - "▁cupcake", - -12.467031478881836 - ], - [ - "▁Lamb", - -12.467107772827148 - ], - [ - "▁Sä", - -12.467155456542969 - ], - [ - "ntă", - -12.46738052368164 - ], - [ - "▁Pig", - -12.467421531677246 - ], - [ - "1,000", - -12.467677116394043 - ], - [ - "nhof", - -12.467782020568848 - ], - [ - "▁discret", - -12.467947959899902 - ], - [ - "▁deloc", - -12.467991828918457 - ], - [ - "▁Bücher", - -12.467999458312988 - ], - [ - "chor", - -12.468042373657227 - ], - [ - "course", - -12.468070030212402 - ], - [ - "▁cough", - -12.468076705932617 - ], - [ - "▁erstellt", - -12.468087196350098 - ], - [ - "▁Than", - -12.468097686767578 - ], - [ - "stätte", - -12.46812915802002 - ], - [ - "▁exceptionally", - -12.468162536621094 - ], - [ - "▁semnal", - -12.468186378479004 - ], - [ - "▁Interessen", - -12.468329429626465 - ], - [ - "ле", - -12.468356132507324 - ], - [ - "xx", - -12.468402862548828 - ], - [ - "▁Veterans", - -12.468422889709473 - ], - [ - "▁Kreuz", - -12.468683242797852 - ], - [ - "▁Nachricht", - -12.468701362609863 - ], - [ - "treated", - -12.468894004821777 - ], - [ - "▁tide", - -12.469230651855469 - ], - [ - "▁nonetheless", - -12.469390869140625 - ], - [ - "▁Subject", - -12.469439506530762 - ], - [ - "▁Stau", - -12.469440460205078 - ], - [ - "▁stickers", - -12.469463348388672 - ], - [ - "Alp", - -12.46950912475586 - ], - [ - "▁flagship", - -12.469541549682617 - ], - [ - "▁trimite", - -12.469619750976562 - ], - [ - "▁polyester", - -12.469664573669434 - ], - [ - "▁locui", - -12.469671249389648 - ], - [ - "▁chili", - -12.46968936920166 - ], - [ - "▁Browser", - -12.469808578491211 - ], - [ - "sieg", - -12.469809532165527 - ], - [ - "▁Arabic", - -12.469876289367676 - ], - [ - "blich", - -12.47001838684082 - ], - [ - "▁wunderbar", - -12.470090866088867 - ], - [ - "▁furnishings", - -12.470210075378418 - ], - [ - "rtie", - -12.470243453979492 - ], - [ - "8.5", - -12.470742225646973 - ], - [ - "▁Sponsor", - -12.471016883850098 - ], - [ - "▁glitter", - -12.471280097961426 - ], - [ - "▁piaț", - -12.471402168273926 - ], - [ - "▁interviewed", - -12.471519470214844 - ], - [ - "▁Statistics", - -12.471529006958008 - ], - [ - "▁cerc", - -12.47154712677002 - ], - [ - "augmentation", - -12.47155475616455 - ], - [ - "▁Navi", - -12.471558570861816 - ], - [ - "▁Begriff", - -12.47156047821045 - ], - [ - "▁știu", - -12.471596717834473 - ], - [ - "▁unabhängig", - -12.471778869628906 - ], - [ - "▁könnten", - -12.471978187561035 - ], - [ - "▁travaille", - -12.472000122070312 - ], - [ - "▁companie", - -12.472027778625488 - ], - [ - "▁Scientific", - -12.472061157226562 - ], - [ - "▁Outlook", - -12.472091674804688 - ], - [ - "▁fairy", - -12.472158432006836 - ], - [ - "zam", - -12.472282409667969 - ], - [ - "bak", - -12.472448348999023 - ], - [ - "▁Traffic", - -12.472596168518066 - ], - [ - "gerät", - -12.472671508789062 - ], - [ - "▁freezing", - -12.472701072692871 - ], - [ - "▁broadband", - -12.4727201461792 - ], - [ - "110", - -12.47279167175293 - ], - [ - "▁revenu", - -12.472887992858887 - ], - [ - "listed", - -12.472900390625 - ], - [ - "▁Rico", - -12.472941398620605 - ], - [ - "Laure", - -12.472990036010742 - ], - [ - "ATA", - -12.473112106323242 - ], - [ - "▁participer", - -12.47313117980957 - ], - [ - "▁sponsorship", - -12.473235130310059 - ], - [ - "▁distress", - -12.473286628723145 - ], - [ - "▁Brisbane", - -12.47339916229248 - ], - [ - "schönen", - -12.473437309265137 - ], - [ - "▁fizice", - -12.473465919494629 - ], - [ - "▁Political", - -12.47362232208252 - ], - [ - "uhr", - -12.473657608032227 - ], - [ - "▁procedura", - -12.473713874816895 - ], - [ - "▁hervor", - -12.473770141601562 - ], - [ - "melted", - -12.473776817321777 - ], - [ - "▁Emp", - -12.47384262084961 - ], - [ - "▁Ernährung", - -12.4739351272583 - ], - [ - "▁Pendant", - -12.473944664001465 - ], - [ - "▁recipients", - -12.474047660827637 - ], - [ - "Claude", - -12.474133491516113 - ], - [ - "▁regimen", - -12.47415828704834 - ], - [ - "expo", - -12.474346160888672 - ], - [ - "adevăr", - -12.47437858581543 - ], - [ - "▁critically", - -12.474440574645996 - ], - [ - "▁grabbe", - -12.474468231201172 - ], - [ - "▁Kann", - -12.474474906921387 - ], - [ - "▁directeur", - -12.474613189697266 - ], - [ - "gator", - -12.474908828735352 - ], - [ - "problem", - -12.474910736083984 - ], - [ - "scribe", - -12.474913597106934 - ], - [ - "▁exig", - -12.474920272827148 - ], - [ - "Tri", - -12.474969863891602 - ], - [ - "▁aqua", - -12.475631713867188 - ], - [ - "appréci", - -12.47569465637207 - ], - [ - "▁viaţă", - -12.47571849822998 - ], - [ - "▁dominate", - -12.475865364074707 - ], - [ - "disc", - -12.475889205932617 - ], - [ - "▁conseiller", - -12.47603988647461 - ], - [ - "▁shuttle", - -12.476180076599121 - ], - [ - "▁Status", - -12.47623062133789 - ], - [ - "▁ausreichend", - -12.476371765136719 - ], - [ - "▁spät", - -12.476411819458008 - ], - [ - "▁remainder", - -12.476417541503906 - ], - [ - "wett", - -12.476430892944336 - ], - [ - "schlossen", - -12.476491928100586 - ], - [ - "PAC", - -12.476505279541016 - ], - [ - "▁suprafata", - -12.476617813110352 - ], - [ - "5.000", - -12.476673126220703 - ], - [ - "supplying", - -12.47673225402832 - ], - [ - "▁uniquely", - -12.476905822753906 - ], - [ - "▁retard", - -12.476929664611816 - ], - [ - "▁Bang", - -12.477006912231445 - ], - [ - "ieuse", - -12.477087020874023 - ], - [ - "▁Ted", - -12.477248191833496 - ], - [ - "▁ermöglichen", - -12.47732925415039 - ], - [ - "▁builders", - -12.477380752563477 - ], - [ - "▁proximité", - -12.477423667907715 - ], - [ - "▁unforgettable", - -12.477423667907715 - ], - [ - "256", - -12.477446556091309 - ], - [ - "fähigkeit", - -12.477550506591797 - ], - [ - "▁procurement", - -12.477561950683594 - ], - [ - "▁Gewicht", - -12.477693557739258 - ], - [ - "▁potentiel", - -12.47778606414795 - ], - [ - "▁topping", - -12.478300094604492 - ], - [ - "▁canada", - -12.478304862976074 - ], - [ - "▁Destin", - -12.478355407714844 - ], - [ - "▁Knowing", - -12.478411674499512 - ], - [ - "▁retained", - -12.478426933288574 - ], - [ - "▁zinc", - -12.478470802307129 - ], - [ - "▁worrying", - -12.478655815124512 - ], - [ - "faţa", - -12.478676795959473 - ], - [ - "▁initi", - -12.478837966918945 - ], - [ - "ORI", - -12.4788818359375 - ], - [ - "▁refuz", - -12.478921890258789 - ], - [ - "bruch", - -12.479202270507812 - ], - [ - "▁impun", - -12.479233741760254 - ], - [ - "▁persoană", - -12.479308128356934 - ], - [ - "EAR", - -12.479347229003906 - ], - [ - "bedarf", - -12.479368209838867 - ], - [ - "▁Gebiet", - -12.47940731048584 - ], - [ - "▁Roof", - -12.479436874389648 - ], - [ - "▁negligence", - -12.47957706451416 - ], - [ - "security", - -12.479618072509766 - ], - [ - "▁accesorii", - -12.479641914367676 - ], - [ - "▁unclear", - -12.479667663574219 - ], - [ - "▁securitate", - -12.479848861694336 - ], - [ - "▁spotlight", - -12.479896545410156 - ], - [ - "▁speziell", - -12.479923248291016 - ], - [ - "▁mentally", - -12.479942321777344 - ], - [ - "▁preservation", - -12.48011589050293 - ], - [ - "▁Promotion", - -12.480156898498535 - ], - [ - "partnered", - -12.480274200439453 - ], - [ - "▁Hinter", - -12.48031997680664 - ], - [ - "▁punishment", - -12.480359077453613 - ], - [ - "▁grease", - -12.480713844299316 - ], - [ - "▁NW", - -12.480714797973633 - ], - [ - "▁curse", - -12.480897903442383 - ], - [ - "ckle", - -12.48101806640625 - ], - [ - "▁Hire", - -12.481043815612793 - ], - [ - "▁Whole", - -12.481088638305664 - ], - [ - "▁basse", - -12.481289863586426 - ], - [ - "▁DNS", - -12.481427192687988 - ], - [ - "flamm", - -12.481560707092285 - ], - [ - "▁scoop", - -12.481574058532715 - ], - [ - "Norm", - -12.481663703918457 - ], - [ - "▁Surgery", - -12.481735229492188 - ], - [ - "▁widget", - -12.481741905212402 - ], - [ - "connected", - -12.481863021850586 - ], - [ - "autorité", - -12.481961250305176 - ], - [ - "▁utilis", - -12.482096672058105 - ], - [ - "▁formă", - -12.482185363769531 - ], - [ - "▁clearing", - -12.482307434082031 - ], - [ - "▁jumătate", - -12.482815742492676 - ], - [ - "größe", - -12.482831954956055 - ], - [ - "▁Tief", - -12.482852935791016 - ], - [ - "épi", - -12.482939720153809 - ], - [ - "zunehmen", - -12.483174324035645 - ], - [ - "▁touchdown", - -12.48318099975586 - ], - [ - "▁scholarships", - -12.483236312866211 - ], - [ - "▁dementia", - -12.483319282531738 - ], - [ - "▁Jeder", - -12.48333740234375 - ], - [ - "▁nightmare", - -12.483379364013672 - ], - [ - "▁Raw", - -12.48342514038086 - ], - [ - "absorbed", - -12.483468055725098 - ], - [ - "lohnt", - -12.483484268188477 - ], - [ - "quent", - -12.483580589294434 - ], - [ - "interest", - -12.483626365661621 - ], - [ - "OSS", - -12.483649253845215 - ], - [ - "▁Leaf", - -12.483667373657227 - ], - [ - "▁timeless", - -12.48381519317627 - ], - [ - "DY", - -12.483865737915039 - ], - [ - "▁Remote", - -12.483907699584961 - ], - [ - "chner", - -12.483938217163086 - ], - [ - "▁Pam", - -12.484014511108398 - ], - [ - "urban", - -12.484060287475586 - ], - [ - "во", - -12.484146118164062 - ], - [ - "▁Kunde", - -12.484166145324707 - ], - [ - "▁Laptop", - -12.484169006347656 - ], - [ - "finder", - -12.484336853027344 - ], - [ - "▁Pole", - -12.484567642211914 - ], - [ - "2.8", - -12.484588623046875 - ], - [ - "finished", - -12.484670639038086 - ], - [ - "▁prophet", - -12.484697341918945 - ], - [ - "mailed", - -12.484758377075195 - ], - [ - "2-0", - -12.4849214553833 - ], - [ - "▁disciples", - -12.484949111938477 - ], - [ - "▁intriguing", - -12.484980583190918 - ], - [ - "IRA", - -12.485033988952637 - ], - [ - "petit", - -12.485077857971191 - ], - [ - "▁Membership", - -12.485097885131836 - ], - [ - "▁provincial", - -12.485177040100098 - ], - [ - "▁Prüfung", - -12.485292434692383 - ], - [ - "-50", - -12.485450744628906 - ], - [ - "▁cryptocurrency", - -12.485522270202637 - ], - [ - "▁journalism", - -12.485536575317383 - ], - [ - "▁Downtown", - -12.485593795776367 - ], - [ - "inserted", - -12.485655784606934 - ], - [ - "▁Direction", - -12.485718727111816 - ], - [ - "lipid", - -12.485732078552246 - ], - [ - "▁Sebastian", - -12.485793113708496 - ], - [ - "fordert", - -12.48591136932373 - ], - [ - "Originally", - -12.485989570617676 - ], - [ - "tipp", - -12.486048698425293 - ], - [ - "verantwortlich", - -12.486064910888672 - ], - [ - "▁wheelchair", - -12.486085891723633 - ], - [ - "▁structura", - -12.48609733581543 - ], - [ - "▁Danny", - -12.486138343811035 - ], - [ - "999", - -12.486284255981445 - ], - [ - "▁Schiff", - -12.486380577087402 - ], - [ - "formally", - -12.486408233642578 - ], - [ - "focused", - -12.486428260803223 - ], - [ - "▁Vater", - -12.486478805541992 - ], - [ - "▁Dear", - -12.486599922180176 - ], - [ - "▁reinforce", - -12.486794471740723 - ], - [ - "proprietar", - -12.48690414428711 - ], - [ - "▁Kyle", - -12.487004280090332 - ], - [ - "În", - -12.487015724182129 - ], - [ - "▁servir", - -12.487268447875977 - ], - [ - "length", - -12.48730754852295 - ], - [ - "▁showroom", - -12.48735237121582 - ], - [ - "reli", - -12.487473487854004 - ], - [ - "▁Brü", - -12.487529754638672 - ], - [ - "▁Schle", - -12.487634658813477 - ], - [ - "▁profond", - -12.487773895263672 - ], - [ - "▁Superior", - -12.487826347351074 - ], - [ - "▁lifted", - -12.487844467163086 - ], - [ - "highlighting", - -12.487850189208984 - ], - [ - "▁Connection", - -12.48793888092041 - ], - [ - "▁similarly", - -12.487998962402344 - ], - [ - "▁diferit", - -12.488005638122559 - ], - [ - "▁sweater", - -12.488014221191406 - ], - [ - "État", - -12.48803997039795 - ], - [ - "rooted", - -12.488069534301758 - ], - [ - "▁sleeves", - -12.488236427307129 - ], - [ - "де", - -12.488264083862305 - ], - [ - "▁Laboratory", - -12.488265991210938 - ], - [ - "ündig", - -12.488719940185547 - ], - [ - "▁Viking", - -12.488741874694824 - ], - [ - "▁Origin", - -12.48878002166748 - ], - [ - "▁vibr", - -12.488812446594238 - ], - [ - "199", - -12.488974571228027 - ], - [ - "▁yummy", - -12.489001274108887 - ], - [ - "STAR", - -12.489140510559082 - ], - [ - "▁repro", - -12.489152908325195 - ], - [ - "▁Kirchen", - -12.489229202270508 - ], - [ - "hopper", - -12.48925495147705 - ], - [ - "zza", - -12.489335060119629 - ], - [ - "▁vitesse", - -12.48934555053711 - ], - [ - "▁minimalist", - -12.489412307739258 - ], - [ - "▁Election", - -12.489420890808105 - ], - [ - "draw", - -12.489501953125 - ], - [ - "▁candles", - -12.48959732055664 - ], - [ - "▁Mund", - -12.489615440368652 - ], - [ - "urged", - -12.489901542663574 - ], - [ - "▁cânt", - -12.489917755126953 - ], - [ - "Ultimately", - -12.49002742767334 - ], - [ - "▁Lift", - -12.490124702453613 - ], - [ - "loaded", - -12.490334510803223 - ], - [ - "demand", - -12.490508079528809 - ], - [ - "▁aleg", - -12.490621566772461 - ], - [ - "▁Discovery", - -12.490755081176758 - ], - [ - "▁Vienna", - -12.490960121154785 - ], - [ - "▁Kategorie", - -12.490961074829102 - ], - [ - "▁Cotton", - -12.490962028503418 - ], - [ - "▁$200", - -12.491043090820312 - ], - [ - "▁Drei", - -12.491052627563477 - ], - [ - "▁reicht", - -12.491168975830078 - ], - [ - "speicher", - -12.491231918334961 - ], - [ - "▁Immobilien", - -12.491483688354492 - ], - [ - "gefühl", - -12.491509437561035 - ], - [ - "make", - -12.491525650024414 - ], - [ - "pell", - -12.49155044555664 - ], - [ - "▁dull", - -12.491598129272461 - ], - [ - "▁arbeitet", - -12.491681098937988 - ], - [ - "retaining", - -12.491700172424316 - ], - [ - "losen", - -12.491707801818848 - ], - [ - "match", - -12.491876602172852 - ], - [ - "-60", - -12.491880416870117 - ], - [ - "▁ecological", - -12.492000579833984 - ], - [ - "▁vend", - -12.492051124572754 - ], - [ - "▁grammar", - -12.492061614990234 - ], - [ - "▁1:1", - -12.492225646972656 - ], - [ - "grilled", - -12.492279052734375 - ], - [ - "geordnet", - -12.492321014404297 - ], - [ - "▁Pav", - -12.49236011505127 - ], - [ - "▁Depot", - -12.492368698120117 - ], - [ - "▁Walking", - -12.492372512817383 - ], - [ - "teamed", - -12.492402076721191 - ], - [ - "▁torque", - -12.492537498474121 - ], - [ - "▁Venture", - -12.492659568786621 - ], - [ - "▁beginner", - -12.49269962310791 - ], - [ - "▁Monaten", - -12.492712020874023 - ], - [ - "▁Pune", - -12.493054389953613 - ], - [ - "connect", - -12.493075370788574 - ], - [ - "▁textbook", - -12.493132591247559 - ], - [ - "▁unprecedented", - -12.49314022064209 - ], - [ - "▁implied", - -12.493168830871582 - ], - [ - "▁cubic", - -12.493668556213379 - ], - [ - "enthält", - -12.493696212768555 - ], - [ - "▁Brenn", - -12.49388313293457 - ], - [ - "▁Expect", - -12.49394416809082 - ], - [ - "▁lever", - -12.4939603805542 - ], - [ - "veux", - -12.49399185180664 - ], - [ - "▁Claire", - -12.494112968444824 - ], - [ - "Acc", - -12.49432373046875 - ], - [ - "▁Typ", - -12.494478225708008 - ], - [ - "▁smoothie", - -12.494501113891602 - ], - [ - "▁Idaho", - -12.494780540466309 - ], - [ - "▁spati", - -12.494802474975586 - ], - [ - "▁bénéficier", - -12.49488353729248 - ], - [ - "▁Kle", - -12.495161056518555 - ], - [ - "▁serviciilor", - -12.495169639587402 - ], - [ - "▁prohibit", - -12.495267868041992 - ], - [ - "EAD", - -12.495417594909668 - ], - [ - "▁Turner", - -12.495418548583984 - ], - [ - "▁elibera", - -12.49543571472168 - ], - [ - "▁payday", - -12.495464324951172 - ], - [ - "▁prolong", - -12.495466232299805 - ], - [ - "▁sued", - -12.495481491088867 - ], - [ - "▁Devil", - -12.495536804199219 - ], - [ - "▁Skills", - -12.495552062988281 - ], - [ - "▁Marcel", - -12.495553970336914 - ], - [ - "▁silhouette", - -12.495601654052734 - ], - [ - "▁preț", - -12.495742797851562 - ], - [ - "▁Gö", - -12.495747566223145 - ], - [ - "▁Creator", - -12.495774269104004 - ], - [ - "fed", - -12.4959077835083 - ], - [ - "Cap", - -12.495997428894043 - ], - [ - "▁dedicate", - -12.496042251586914 - ], - [ - "0000", - -12.496124267578125 - ], - [ - "▁VAT", - -12.496259689331055 - ], - [ - "▁Firefox", - -12.496443748474121 - ], - [ - "▁therapies", - -12.496477127075195 - ], - [ - "▁screws", - -12.496662139892578 - ], - [ - "▁Province", - -12.496697425842285 - ], - [ - "▁problematic", - -12.496871948242188 - ], - [ - "▁Vid", - -12.496915817260742 - ], - [ - "▁Lost", - -12.496950149536133 - ], - [ - "▁elegance", - -12.497520446777344 - ], - [ - "▁Elegant", - -12.497525215148926 - ], - [ - "ignant", - -12.497573852539062 - ], - [ - "▁darin", - -12.497649192810059 - ], - [ - "▁anonym", - -12.497669219970703 - ], - [ - "▁vegeta", - -12.49767780303955 - ], - [ - "incoming", - -12.497762680053711 - ], - [ - "▁pills", - -12.497846603393555 - ], - [ - "governing", - -12.497893333435059 - ], - [ - "▁Haven", - -12.497920989990234 - ], - [ - "paper", - -12.497947692871094 - ], - [ - "räume", - -12.497979164123535 - ], - [ - "paw", - -12.498099327087402 - ], - [ - "▁spelling", - -12.498283386230469 - ], - [ - "ambele", - -12.498318672180176 - ], - [ - "▁reprezentat", - -12.498371124267578 - ], - [ - "▁mâ", - -12.49853515625 - ], - [ - "wirtschaftliche", - -12.498558044433594 - ], - [ - "▁valabil", - -12.498579025268555 - ], - [ - "▁konkret", - -12.498618125915527 - ], - [ - "▁financier", - -12.498619079589844 - ], - [ - "▁irre", - -12.499135971069336 - ], - [ - "▁Silicon", - -12.499171257019043 - ], - [ - "Viv", - -12.499181747436523 - ], - [ - "▁viruses", - -12.49927043914795 - ], - [ - "▁CNN", - -12.499324798583984 - ], - [ - "▁erleben", - -12.499482154846191 - ], - [ - "gina", - -12.499492645263672 - ], - [ - "punctul", - -12.49951457977295 - ], - [ - "▁Sfânt", - -12.499753952026367 - ], - [ - "▁Manage", - -12.499811172485352 - ], - [ - "▁payable", - -12.499984741210938 - ], - [ - "▁practitioner", - -12.500143051147461 - ], - [ - "▁conférence", - -12.50026798248291 - ], - [ - "▁drought", - -12.50027084350586 - ], - [ - "▁devote", - -12.500361442565918 - ], - [ - "wertung", - -12.500420570373535 - ], - [ - "stabil", - -12.5004301071167 - ], - [ - "▁balcon", - -12.500553131103516 - ], - [ - "▁Lebensmittel", - -12.500603675842285 - ], - [ - "COL", - -12.500950813293457 - ], - [ - "▁Domnul", - -12.501093864440918 - ], - [ - "carved", - -12.501359939575195 - ], - [ - "▁preparat", - -12.5014009475708 - ], - [ - "101", - -12.501537322998047 - ], - [ - "▁specimen", - -12.501580238342285 - ], - [ - "urgeon", - -12.501596450805664 - ], - [ - "LIC", - -12.50163459777832 - ], - [ - "Plattform", - -12.501643180847168 - ], - [ - "▁ramas", - -12.501739501953125 - ], - [ - "▁copilului", - -12.501791954040527 - ], - [ - "bacter", - -12.501812934875488 - ], - [ - "körper", - -12.501940727233887 - ], - [ - "▁Kru", - -12.501981735229492 - ], - [ - "▁Employ", - -12.502055168151855 - ], - [ - "office", - -12.502080917358398 - ], - [ - "▁simmer", - -12.502120018005371 - ], - [ - "qualität", - -12.502137184143066 - ], - [ - "▁freshly", - -12.502215385437012 - ], - [ - "▁Nine", - -12.50223159790039 - ], - [ - "▁tonnes", - -12.50223445892334 - ], - [ - "boden", - -12.502236366271973 - ], - [ - "enquête", - -12.50240707397461 - ], - [ - "▁Colour", - -12.502481460571289 - ], - [ - "▁Diagram", - -12.502495765686035 - ], - [ - "▁gewählt", - -12.502516746520996 - ], - [ - "▁viitoare", - -12.502538681030273 - ], - [ - "▁reporters", - -12.502913475036621 - ], - [ - "guer", - -12.502991676330566 - ], - [ - "▁Kombination", - -12.503021240234375 - ], - [ - "▁qualitative", - -12.50302505493164 - ], - [ - "Centrul", - -12.503131866455078 - ], - [ - "avy", - -12.503170013427734 - ], - [ - "▁Eng", - -12.503175735473633 - ], - [ - "▁sufletul", - -12.50327205657959 - ], - [ - "▁germ", - -12.503412246704102 - ], - [ - "▁prevented", - -12.503448486328125 - ], - [ - "appelle", - -12.503533363342285 - ], - [ - "gins", - -12.503556251525879 - ], - [ - "▁Skype", - -12.503585815429688 - ], - [ - "conditioned", - -12.503617286682129 - ], - [ - "▁clutch", - -12.503641128540039 - ], - [ - "environ", - -12.503694534301758 - ], - [ - "3.3", - -12.503774642944336 - ], - [ - "▁webinar", - -12.503866195678711 - ], - [ - "▁forty", - -12.504104614257812 - ], - [ - "▁Medicaid", - -12.504127502441406 - ], - [ - "▁dismissed", - -12.504167556762695 - ], - [ - "▁siblings", - -12.504168510437012 - ], - [ - "▁Jaw", - -12.504196166992188 - ], - [ - "guiding", - -12.504220962524414 - ], - [ - "cigarette", - -12.504374504089355 - ], - [ - "▁Shah", - -12.504681587219238 - ], - [ - "▁Lehrer", - -12.504684448242188 - ], - [ - "▁muscular", - -12.504694938659668 - ], - [ - "spatele", - -12.504796981811523 - ], - [ - "▁réduction", - -12.504836082458496 - ], - [ - "▁fixes", - -12.504851341247559 - ], - [ - "Span", - -12.50511646270752 - ], - [ - "▁Hudson", - -12.505231857299805 - ], - [ - "development", - -12.505250930786133 - ], - [ - "▁excluded", - -12.50525951385498 - ], - [ - "Democrat", - -12.505260467529297 - ], - [ - "▁nominal", - -12.505317687988281 - ], - [ - "purpose", - -12.50540828704834 - ], - [ - "▁bored", - -12.505500793457031 - ], - [ - "espèce", - -12.50550651550293 - ], - [ - "▁(30", - -12.5055570602417 - ], - [ - "Neither", - -12.505608558654785 - ], - [ - "hänge", - -12.505610466003418 - ], - [ - "square", - -12.505728721618652 - ], - [ - "voller", - -12.505736351013184 - ], - [ - "▁pertinent", - -12.505783081054688 - ], - [ - "▁Wool", - -12.50595474243164 - ], - [ - "settling", - -12.50607681274414 - ], - [ - "fangen", - -12.506148338317871 - ], - [ - "▁Testing", - -12.506152153015137 - ], - [ - "distin", - -12.506196022033691 - ], - [ - "▁Marken", - -12.506227493286133 - ], - [ - "▁Beta", - -12.506300926208496 - ], - [ - "▁fulfilling", - -12.506339073181152 - ], - [ - "Leider", - -12.506357192993164 - ], - [ - "black", - -12.506389617919922 - ], - [ - "occupe", - -12.50658893585205 - ], - [ - "itățile", - -12.506688117980957 - ], - [ - "Pay", - -12.506887435913086 - ], - [ - "▁bandwidth", - -12.506890296936035 - ], - [ - "▁neighbourhood", - -12.506918907165527 - ], - [ - "▁Gutschein", - -12.506922721862793 - ], - [ - "degree", - -12.507055282592773 - ], - [ - "ivité", - -12.507116317749023 - ], - [ - "4.1", - -12.507169723510742 - ], - [ - "▁tätig", - -12.507170677185059 - ], - [ - "topic", - -12.507242202758789 - ], - [ - "ätz", - -12.507243156433105 - ], - [ - "these", - -12.50733470916748 - ], - [ - "▁propriété", - -12.507438659667969 - ], - [ - "▁innings", - -12.507458686828613 - ], - [ - "▁Prevention", - -12.50754165649414 - ], - [ - "▁Saw", - -12.507585525512695 - ], - [ - "▁opener", - -12.507752418518066 - ], - [ - "entwicklung", - -12.507824897766113 - ], - [ - "▁Johann", - -12.507865905761719 - ], - [ - "▁statistic", - -12.507881164550781 - ], - [ - "oids", - -12.507966995239258 - ], - [ - "▁Delaware", - -12.508000373840332 - ], - [ - "▁Isle", - -12.508001327514648 - ], - [ - "▁accompagn", - -12.508028984069824 - ], - [ - "▁Risiko", - -12.508079528808594 - ], - [ - "▁Conform", - -12.508268356323242 - ], - [ - "zeichnen", - -12.508395195007324 - ], - [ - "▁acuz", - -12.508479118347168 - ], - [ - "▁Mort", - -12.508524894714355 - ], - [ - "Fällen", - -12.50853157043457 - ], - [ - "▁blended", - -12.50871467590332 - ], - [ - "found", - -12.50872802734375 - ], - [ - "▁gestalten", - -12.50874137878418 - ], - [ - "▁Découvrez", - -12.508830070495605 - ], - [ - "▁Wett", - -12.508956909179688 - ], - [ - "▁débat", - -12.508990287780762 - ], - [ - "▁Tire", - -12.509007453918457 - ], - [ - "benz", - -12.509037017822266 - ], - [ - "Yes", - -12.509074211120605 - ], - [ - "▁pierde", - -12.509110450744629 - ], - [ - "▁niciodata", - -12.509121894836426 - ], - [ - "▁precipit", - -12.509145736694336 - ], - [ - "▁lazy", - -12.509334564208984 - ], - [ - "▁creature", - -12.509370803833008 - ], - [ - "Wettbewerb", - -12.509385108947754 - ], - [ - "▁Explo", - -12.509496688842773 - ], - [ - "wolf", - -12.509657859802246 - ], - [ - "▁conséquence", - -12.509662628173828 - ], - [ - "▁jewellery", - -12.509662628173828 - ], - [ - "▁Extension", - -12.509735107421875 - ], - [ - "▁transmitted", - -12.509872436523438 - ], - [ - "▁darker", - -12.509973526000977 - ], - [ - "▁simbol", - -12.510065078735352 - ], - [ - "kim", - -12.510069847106934 - ], - [ - "▁proteja", - -12.510098457336426 - ], - [ - "▁Copper", - -12.510189056396484 - ], - [ - "mitglied", - -12.510218620300293 - ], - [ - "▁explosive", - -12.510222434997559 - ], - [ - "▁Nicolae", - -12.510223388671875 - ], - [ - "▁intricate", - -12.510231971740723 - ], - [ - "lati", - -12.510313034057617 - ], - [ - "Mark", - -12.510334014892578 - ], - [ - "▁Porsche", - -12.510339736938477 - ], - [ - "▁Revenue", - -12.510479927062988 - ], - [ - "4.2", - -12.510613441467285 - ], - [ - "certain", - -12.510836601257324 - ], - [ - "▁Coaching", - -12.510879516601562 - ], - [ - "▁allocated", - -12.510879516601562 - ], - [ - "▁optimiz", - -12.511017799377441 - ], - [ - "▁heel", - -12.511205673217773 - ], - [ - "▁indigenous", - -12.511330604553223 - ], - [ - "▁vineri", - -12.511396408081055 - ], - [ - "▁Inspector", - -12.51145076751709 - ], - [ - "▁colleague", - -12.5115327835083 - ], - [ - "ANG", - -12.511649131774902 - ], - [ - "éducation", - -12.511887550354004 - ], - [ - "▁Geschenk", - -12.51188850402832 - ], - [ - "channel", - -12.511899948120117 - ], - [ - "▁trapped", - -12.511954307556152 - ], - [ - "BF", - -12.511974334716797 - ], - [ - "▁firing", - -12.512086868286133 - ], - [ - "▁chlor", - -12.512103080749512 - ], - [ - "▁Carlos", - -12.512115478515625 - ], - [ - "▁proxy", - -12.512128829956055 - ], - [ - "▁pinch", - -12.512167930603027 - ], - [ - "▁Pete", - -12.512201309204102 - ], - [ - "phospho", - -12.512458801269531 - ], - [ - "▁waiver", - -12.51246452331543 - ], - [ - "▁Croatia", - -12.512480735778809 - ], - [ - "▁behave", - -12.51258373260498 - ], - [ - "▁frig", - -12.512676239013672 - ], - [ - "▁Vorteil", - -12.51279067993164 - ], - [ - "▁wichtiger", - -12.512837409973145 - ], - [ - "........", - -12.512929916381836 - ], - [ - "▁flick", - -12.513007164001465 - ], - [ - "▁Stanford", - -12.51306438446045 - ], - [ - "öse", - -12.513096809387207 - ], - [ - "▁Fernseh", - -12.513099670410156 - ], - [ - "▁vélo", - -12.51322078704834 - ], - [ - "reisen", - -12.513304710388184 - ], - [ - "residing", - -12.513504981994629 - ], - [ - "▁Taste", - -12.513580322265625 - ], - [ - "▁disappeared", - -12.513630867004395 - ], - [ - "▁Hood", - -12.513776779174805 - ], - [ - "▁fabriqu", - -12.514046669006348 - ], - [ - "▁Jake", - -12.514470100402832 - ], - [ - "Lastly", - -12.51462173461914 - ], - [ - "▁furnace", - -12.514673233032227 - ], - [ - "▁Ottawa", - -12.51473331451416 - ], - [ - "▁dictate", - -12.514742851257324 - ], - [ - "zece", - -12.514817237854004 - ], - [ - "protect", - -12.514932632446289 - ], - [ - "FU", - -12.51495361328125 - ], - [ - "Stack", - -12.514954566955566 - ], - [ - "▁teilweise", - -12.515018463134766 - ], - [ - "▁Publisher", - -12.51506233215332 - ], - [ - "▁lutte", - -12.515159606933594 - ], - [ - "202", - -12.515178680419922 - ], - [ - "psy", - -12.515190124511719 - ], - [ - "▁wünschen", - -12.515238761901855 - ], - [ - "▁pathways", - -12.515356063842773 - ], - [ - "ivitate", - -12.515559196472168 - ], - [ - "▁continuă", - -12.515658378601074 - ], - [ - "ziemlich", - -12.515791893005371 - ], - [ - "verted", - -12.515812873840332 - ], - [ - "▁sequel", - -12.515839576721191 - ], - [ - "tinct", - -12.51599407196045 - ], - [ - "vette", - -12.516020774841309 - ], - [ - "▁exceeding", - -12.516032218933105 - ], - [ - "▁Yorkshire", - -12.51607608795166 - ], - [ - "▁cleanse", - -12.51613998413086 - ], - [ - "Sadly", - -12.516159057617188 - ], - [ - "▁präsentiert", - -12.516164779663086 - ], - [ - "angled", - -12.516311645507812 - ], - [ - "tude", - -12.516339302062988 - ], - [ - "chain", - -12.516371726989746 - ], - [ - "▁Oakland", - -12.51639175415039 - ], - [ - "xia", - -12.516514778137207 - ], - [ - "▁foremost", - -12.51653003692627 - ], - [ - "▁incomplete", - -12.516786575317383 - ], - [ - "▁restriction", - -12.516905784606934 - ], - [ - "▁whatsoever", - -12.516908645629883 - ], - [ - "▁shipment", - -12.517017364501953 - ], - [ - "**", - -12.517059326171875 - ], - [ - "Aici", - -12.517110824584961 - ], - [ - "PART", - -12.517247200012207 - ], - [ - "▁grams", - -12.517251014709473 - ], - [ - "▁Folk", - -12.517457008361816 - ], - [ - "▁encryption", - -12.517467498779297 - ], - [ - "▁Alfred", - -12.517748832702637 - ], - [ - "▁Veränderung", - -12.517749786376953 - ], - [ - "▁privately", - -12.517817497253418 - ], - [ - "£", - -12.517909049987793 - ], - [ - "▁Sonne", - -12.51799201965332 - ], - [ - "kow", - -12.518117904663086 - ], - [ - "▁CBS", - -12.518172264099121 - ], - [ - "▁Feuer", - -12.518198013305664 - ], - [ - "▁crushed", - -12.518230438232422 - ], - [ - "▁cazare", - -12.518270492553711 - ], - [ - "▁beraten", - -12.518401145935059 - ], - [ - "envoi", - -12.518423080444336 - ], - [ - "▁genannt", - -12.51843547821045 - ], - [ - "▁Lok", - -12.518472671508789 - ], - [ - "nox", - -12.518569946289062 - ], - [ - "wishing", - -12.518759727478027 - ], - [ - "▁freak", - -12.518759727478027 - ], - [ - "rasi", - -12.51879596710205 - ], - [ - "▁calculations", - -12.518888473510742 - ], - [ - "▁sprechen", - -12.51890754699707 - ], - [ - "5:00", - -12.519062042236328 - ], - [ - "▁Gam", - -12.519074440002441 - ], - [ - "▁invasion", - -12.519159317016602 - ], - [ - "ZA", - -12.519230842590332 - ], - [ - "aiming", - -12.519327163696289 - ], - [ - "▁näher", - -12.519404411315918 - ], - [ - "▁Maßnahmen", - -12.519433975219727 - ], - [ - "▁măsură", - -12.519490242004395 - ], - [ - "▁Bestellung", - -12.519610404968262 - ], - [ - "▁gown", - -12.519665718078613 - ], - [ - "▁oblige", - -12.519747734069824 - ], - [ - "länder", - -12.51977825164795 - ], - [ - "posi", - -12.519853591918945 - ], - [ - "▁Earn", - -12.51988410949707 - ], - [ - "▁dubl", - -12.51999282836914 - ], - [ - "▁sticky", - -12.520100593566895 - ], - [ - "▁litter", - -12.520181655883789 - ], - [ - "▁Salz", - -12.520257949829102 - ], - [ - "▁Matter", - -12.520272254943848 - ], - [ - "▁Driving", - -12.520275115966797 - ], - [ - "▁pursu", - -12.520285606384277 - ], - [ - "ographer", - -12.520390510559082 - ], - [ - "▁touring", - -12.520400047302246 - ], - [ - "opter", - -12.520444869995117 - ], - [ - "▁fierce", - -12.520475387573242 - ], - [ - "▁Audit", - -12.520480155944824 - ], - [ - "▁imperi", - -12.520755767822266 - ], - [ - "▁positiv", - -12.520780563354492 - ], - [ - "règles", - -12.520849227905273 - ], - [ - "▁bouton", - -12.520990371704102 - ], - [ - "▁victorie", - -12.520990371704102 - ], - [ - "▁manuel", - -12.521015167236328 - ], - [ - "▁await", - -12.52103042602539 - ], - [ - "▁transformer", - -12.521041870117188 - ], - [ - "▁cupboard", - -12.52108383178711 - ], - [ - "▁Hag", - -12.521117210388184 - ], - [ - "naj", - -12.521214485168457 - ], - [ - "▁annoncé", - -12.52139663696289 - ], - [ - "▁scolaire", - -12.521401405334473 - ], - [ - "▁étape", - -12.521482467651367 - ], - [ - "▁pirate", - -12.521761894226074 - ], - [ - "▁Rated", - -12.521794319152832 - ], - [ - "LOT", - -12.521846771240234 - ], - [ - "▁natura", - -12.521944046020508 - ], - [ - "oga", - -12.522336959838867 - ], - [ - "Read", - -12.522388458251953 - ], - [ - "idio", - -12.522444725036621 - ], - [ - "▁recession", - -12.522698402404785 - ], - [ - "veţi", - -12.522761344909668 - ], - [ - "▁blossom", - -12.523082733154297 - ], - [ - "▁lunar", - -12.523141860961914 - ], - [ - "▁inhibit", - -12.52316951751709 - ], - [ - "gemein", - -12.523219108581543 - ], - [ - "▁Historic", - -12.523262023925781 - ], - [ - "▁HTTP", - -12.523370742797852 - ], - [ - "misiune", - -12.5234956741333 - ], - [ - "▁Manda", - -12.523601531982422 - ], - [ - "▁Hurricane", - -12.523643493652344 - ], - [ - "Strat", - -12.523646354675293 - ], - [ - "▁populaire", - -12.523756980895996 - ], - [ - "▁useless", - -12.523762702941895 - ], - [ - "▁Leipzig", - -12.523924827575684 - ], - [ - "▁Krankheit", - -12.52392578125 - ], - [ - "▁Bonne", - -12.52397346496582 - ], - [ - "▁tissu", - -12.52399730682373 - ], - [ - "▁Baum", - -12.523998260498047 - ], - [ - "▁BUT", - -12.524152755737305 - ], - [ - "▁Mondial", - -12.52423095703125 - ], - [ - "▁triangle", - -12.524242401123047 - ], - [ - "▁Tesla", - -12.524250984191895 - ], - [ - "▁pământ", - -12.52430534362793 - ], - [ - "▁aminte", - -12.524726867675781 - ], - [ - "▁vehicul", - -12.524770736694336 - ], - [ - "▁cerut", - -12.52482795715332 - ], - [ - "▁respiratory", - -12.524836540222168 - ], - [ - "▁rayon", - -12.524993896484375 - ], - [ - "▁gestaltet", - -12.525067329406738 - ], - [ - "310", - -12.525139808654785 - ], - [ - "pfl", - -12.525239944458008 - ], - [ - "▁shrimp", - -12.525337219238281 - ], - [ - "▁reconnu", - -12.525409698486328 - ], - [ - "ologique", - -12.525476455688477 - ], - [ - "▁unity", - -12.525674819946289 - ], - [ - "Speicher", - -12.52569580078125 - ], - [ - "▁Movement", - -12.525794982910156 - ], - [ - "ddling", - -12.52581787109375 - ], - [ - "OE", - -12.525818824768066 - ], - [ - "▁Resolution", - -12.525863647460938 - ], - [ - "esteem", - -12.525898933410645 - ], - [ - "▁Teen", - -12.526288986206055 - ], - [ - "▁believing", - -12.526463508605957 - ], - [ - "▁Tipps", - -12.526481628417969 - ], - [ - "jpg", - -12.526494026184082 - ], - [ - "▁obs", - -12.526519775390625 - ], - [ - "SHA", - -12.526702880859375 - ], - [ - "▁quietly", - -12.526907920837402 - ], - [ - "setting", - -12.52712345123291 - ], - [ - "▁elevator", - -12.527185440063477 - ], - [ - "phor", - -12.527194023132324 - ], - [ - "Just", - -12.52725887298584 - ], - [ - "▁legatura", - -12.52739143371582 - ], - [ - "elected", - -12.527414321899414 - ], - [ - "▁disclosed", - -12.527419090270996 - ], - [ - "quarter", - -12.52743148803711 - ], - [ - "zzy", - -12.527461051940918 - ], - [ - "▁gata", - -12.527491569519043 - ], - [ - "SAN", - -12.527532577514648 - ], - [ - "▁Cathedral", - -12.527592658996582 - ], - [ - "192", - -12.527656555175781 - ], - [ - "▁RBI", - -12.527726173400879 - ], - [ - "▁Seller", - -12.527798652648926 - ], - [ - "▁urine", - -12.527807235717773 - ], - [ - "▁Hardware", - -12.527966499328613 - ], - [ - "▁steadi", - -12.527993202209473 - ], - [ - "percussion", - -12.528158187866211 - ], - [ - "▁francez", - -12.528172492980957 - ], - [ - "▁rude", - -12.528202056884766 - ], - [ - "bod", - -12.528223037719727 - ], - [ - "cession", - -12.528249740600586 - ], - [ - "▁HTC", - -12.528372764587402 - ], - [ - "HB", - -12.528576850891113 - ], - [ - "▁descent", - -12.528644561767578 - ], - [ - "▁Painting", - -12.528681755065918 - ], - [ - "119", - -12.528684616088867 - ], - [ - "sagen", - -12.52877426147461 - ], - [ - "▁salvation", - -12.52880573272705 - ], - [ - "arro", - -12.528814315795898 - ], - [ - "0.3", - -12.52886962890625 - ], - [ - "▁Duck", - -12.52890396118164 - ], - [ - "Mit", - -12.529052734375 - ], - [ - "да", - -12.52927017211914 - ], - [ - "▁Diesel", - -12.529322624206543 - ], - [ - "▁Medal", - -12.529413223266602 - ], - [ - "▁interim", - -12.529439926147461 - ], - [ - "▁montagne", - -12.529439926147461 - ], - [ - "▁Pixel", - -12.529631614685059 - ], - [ - "LINE", - -12.529806137084961 - ], - [ - "▁dureri", - -12.529938697814941 - ], - [ - "▁Bengal", - -12.529990196228027 - ], - [ - "Legea", - -12.530080795288086 - ], - [ - "▁Strecke", - -12.530094146728516 - ], - [ - "▁schneller", - -12.53012752532959 - ], - [ - "▁Karten", - -12.5301513671875 - ], - [ - "cion", - -12.530241966247559 - ], - [ - "▁Coco", - -12.53037166595459 - ], - [ - "troisième", - -12.53052806854248 - ], - [ - "401", - -12.530616760253906 - ], - [ - "▁sandwiches", - -12.530704498291016 - ], - [ - "▁folosind", - -12.530920028686523 - ], - [ - "▁Folgen", - -12.530953407287598 - ], - [ - "▁triumph", - -12.530991554260254 - ], - [ - "▁Hintergrund", - -12.530996322631836 - ], - [ - "▁revelation", - -12.531084060668945 - ], - [ - "ôme", - -12.531222343444824 - ], - [ - "▁Nex", - -12.531245231628418 - ], - [ - "jährigen", - -12.531295776367188 - ], - [ - "▁militant", - -12.531296730041504 - ], - [ - "▁fabricant", - -12.531671524047852 - ], - [ - "iano", - -12.531713485717773 - ], - [ - "▁formulation", - -12.53188705444336 - ], - [ - "integrating", - -12.532050132751465 - ], - [ - "▁Items", - -12.532142639160156 - ], - [ - "▁contractual", - -12.532320976257324 - ], - [ - "AIDS", - -12.532424926757812 - ], - [ - "▁pitcher", - -12.532610893249512 - ], - [ - "▁Snap", - -12.532623291015625 - ], - [ - "▁systematic", - -12.532663345336914 - ], - [ - "▁referendum", - -12.532694816589355 - ], - [ - "gau", - -12.53281021118164 - ], - [ - "administration", - -12.532917022705078 - ], - [ - "▁speci", - -12.532981872558594 - ], - [ - "ieni", - -12.532998085021973 - ], - [ - "prox", - -12.533186912536621 - ], - [ - "▁bouquet", - -12.533241271972656 - ], - [ - "▁sinnvoll", - -12.533270835876465 - ], - [ - "▁Fleisch", - -12.533309936523438 - ], - [ - "ktuell", - -12.533381462097168 - ], - [ - "▁mushrooms", - -12.533408164978027 - ], - [ - "▁Straf", - -12.533470153808594 - ], - [ - "▁cresc", - -12.533491134643555 - ], - [ - "TEM", - -12.533502578735352 - ], - [ - "▁vindec", - -12.53352165222168 - ], - [ - "▁Drama", - -12.533540725708008 - ], - [ - "chief", - -12.533550262451172 - ], - [ - "▁müsst", - -12.533614158630371 - ], - [ - "▁Warner", - -12.533662796020508 - ], - [ - "118", - -12.533761024475098 - ], - [ - "▁saptamana", - -12.533831596374512 - ], - [ - "▁animaux", - -12.53412914276123 - ], - [ - "▁Directory", - -12.534146308898926 - ], - [ - "▁entgegen", - -12.53415584564209 - ], - [ - "▁deduction", - -12.534156799316406 - ], - [ - "▁Strategic", - -12.53426456451416 - ], - [ - "▁rats", - -12.534419059753418 - ], - [ - "▁Moses", - -12.534448623657227 - ], - [ - "eko", - -12.534564971923828 - ], - [ - "strict", - -12.534590721130371 - ], - [ - "▁Ashley", - -12.534603118896484 - ], - [ - "mik", - -12.534622192382812 - ], - [ - "▁relocate", - -12.534668922424316 - ], - [ - "▁whip", - -12.534738540649414 - ], - [ - "central", - -12.534750938415527 - ], - [ - "mack", - -12.534892082214355 - ], - [ - "stufe", - -12.534961700439453 - ], - [ - "▁Metropolitan", - -12.5349702835083 - ], - [ - "▁croissance", - -12.534974098205566 - ], - [ - "▁celebrities", - -12.535021781921387 - ], - [ - "▁Geh", - -12.53507137298584 - ], - [ - "▁verifica", - -12.535196304321289 - ], - [ - "▁satisfac", - -12.535211563110352 - ], - [ - "▁Julian", - -12.535271644592285 - ], - [ - "▁remotely", - -12.535432815551758 - ], - [ - "▁Safari", - -12.535542488098145 - ], - [ - "▁Chic", - -12.53557014465332 - ], - [ - "▁clamp", - -12.535818099975586 - ], - [ - "▁Schnee", - -12.535918235778809 - ], - [ - "grown", - -12.536069869995117 - ], - [ - "▁Character", - -12.536110877990723 - ], - [ - "▁charities", - -12.536137580871582 - ], - [ - "Thankfully", - -12.536625862121582 - ], - [ - "▁țară", - -12.53681468963623 - ], - [ - "IZ", - -12.536816596984863 - ], - [ - "Vielleicht", - -12.536999702453613 - ], - [ - "▁Pon", - -12.537108421325684 - ], - [ - "gegen", - -12.53711986541748 - ], - [ - "chez", - -12.537185668945312 - ], - [ - "Black", - -12.537544250488281 - ], - [ - "▁alimentare", - -12.537555694580078 - ], - [ - "▁verloren", - -12.537562370300293 - ], - [ - "▁predictions", - -12.537657737731934 - ], - [ - "Founded", - -12.53795337677002 - ], - [ - "▁femeie", - -12.538022994995117 - ], - [ - "wahrscheinlich", - -12.538107872009277 - ], - [ - "▁squeeze", - -12.53819465637207 - ], - [ - "▁verfügbar", - -12.538259506225586 - ], - [ - "▁hygiene", - -12.538393020629883 - ], - [ - "voire", - -12.538667678833008 - ], - [ - "▁birou", - -12.538901329040527 - ], - [ - "▁initiate", - -12.538921356201172 - ], - [ - "▁Patriot", - -12.539009094238281 - ], - [ - "▁Income", - -12.539159774780273 - ], - [ - "▁marry", - -12.539310455322266 - ], - [ - "lokal", - -12.539336204528809 - ], - [ - "logic", - -12.53940486907959 - ], - [ - "▁Abstract", - -12.53966236114502 - ], - [ - "▁grundsätzlich", - -12.539822578430176 - ], - [ - "▁tariff", - -12.539886474609375 - ], - [ - "▁definitiv", - -12.539892196655273 - ], - [ - "paz", - -12.53989315032959 - ], - [ - "Result", - -12.539921760559082 - ], - [ - "1:30", - -12.54005241394043 - ], - [ - "▁Latest", - -12.540075302124023 - ], - [ - "▁Dauer", - -12.540155410766602 - ], - [ - "Med", - -12.540275573730469 - ], - [ - "gewicht", - -12.540348052978516 - ], - [ - "▁Gaza", - -12.540430068969727 - ], - [ - "▁Newton", - -12.540769577026367 - ], - [ - "Dokument", - -12.540897369384766 - ], - [ - "formular", - -12.540945053100586 - ], - [ - "ILE", - -12.540964126586914 - ], - [ - "▁surse", - -12.541040420532227 - ], - [ - "MH", - -12.54116153717041 - ], - [ - "▁Arctic", - -12.541255950927734 - ], - [ - "▁ISBN", - -12.541274070739746 - ], - [ - "▁quarterback", - -12.541315078735352 - ], - [ - "▁absurd", - -12.541555404663086 - ], - [ - "▁Zusammenhang", - -12.541561126708984 - ], - [ - "▁Module", - -12.54156494140625 - ], - [ - "mented", - -12.541667938232422 - ], - [ - "worthy", - -12.541797637939453 - ], - [ - "▁célèbre", - -12.541828155517578 - ], - [ - "▁maritime", - -12.541836738586426 - ], - [ - "▁Reed", - -12.541938781738281 - ], - [ - "▁threaten", - -12.542037010192871 - ], - [ - "▁Satz", - -12.542095184326172 - ], - [ - "▁sticking", - -12.542203903198242 - ], - [ - "▁transcript", - -12.542372703552246 - ], - [ - "▁Morgen", - -12.542425155639648 - ], - [ - "▁Förder", - -12.542435646057129 - ], - [ - "▁Gottes", - -12.542572021484375 - ], - [ - "▁Coordinator", - -12.542648315429688 - ], - [ - "LOG", - -12.54265022277832 - ], - [ - "EAN", - -12.542677879333496 - ], - [ - "▁préparation", - -12.54273509979248 - ], - [ - "▁Brass", - -12.542799949645996 - ], - [ - "Așa", - -12.542853355407715 - ], - [ - "▁Utiliz", - -12.54294490814209 - ], - [ - "framed", - -12.542973518371582 - ], - [ - "▁asphalt", - -12.543050765991211 - ], - [ - "116", - -12.543061256408691 - ], - [ - "▁historically", - -12.54310417175293 - ], - [ - "▁doamn", - -12.543176651000977 - ], - [ - "Air", - -12.543293952941895 - ], - [ - "▁economist", - -12.543838500976562 - ], - [ - "fresh", - -12.54384994506836 - ], - [ - "engine", - -12.543906211853027 - ], - [ - "▁Rücken", - -12.543919563293457 - ], - [ - "▁worthwhile", - -12.544124603271484 - ], - [ - "▁Therapie", - -12.544140815734863 - ], - [ - "▁Joshua", - -12.544151306152344 - ], - [ - "sicherheit", - -12.544175148010254 - ], - [ - "▁scena", - -12.544254302978516 - ], - [ - "ifiant", - -12.54433822631836 - ], - [ - "/20", - -12.54442024230957 - ], - [ - "fehl", - -12.544469833374023 - ], - [ - "karten", - -12.544515609741211 - ], - [ - "501", - -12.544656753540039 - ], - [ - "▁vide", - -12.544673919677734 - ], - [ - "▁miliarde", - -12.544699668884277 - ], - [ - "▁trillion", - -12.54470157623291 - ], - [ - "oudre", - -12.544761657714844 - ], - [ - "nderung", - -12.544803619384766 - ], - [ - "▁inquiries", - -12.544992446899414 - ], - [ - "▁echipe", - -12.545034408569336 - ], - [ - "▁investiga", - -12.545040130615234 - ], - [ - "▁detailing", - -12.545042991638184 - ], - [ - "VIS", - -12.545086860656738 - ], - [ - "▁geographical", - -12.545157432556152 - ], - [ - "▁authentication", - -12.54519271850586 - ], - [ - "▁Schwa", - -12.545201301574707 - ], - [ - "▁Scri", - -12.545230865478516 - ], - [ - "▁discourage", - -12.54527473449707 - ], - [ - "Pass", - -12.54529094696045 - ], - [ - "▁scattered", - -12.54529857635498 - ], - [ - "▁langsam", - -12.545300483703613 - ], - [ - "telles", - -12.545380592346191 - ], - [ - "▁ramane", - -12.5454740524292 - ], - [ - "▁inhibitor", - -12.545486450195312 - ], - [ - "▁Habit", - -12.54556941986084 - ], - [ - "▁10:00", - -12.545577049255371 - ], - [ - "▁rezultat", - -12.545595169067383 - ], - [ - "äck", - -12.545943260192871 - ], - [ - ",000.", - -12.545979499816895 - ], - [ - "▁remedies", - -12.546103477478027 - ], - [ - "▁comportament", - -12.546195983886719 - ], - [ - "namen", - -12.546229362487793 - ], - [ - "▁#3", - -12.546327590942383 - ], - [ - "enstein", - -12.546493530273438 - ], - [ - "▁relevance", - -12.546516418457031 - ], - [ - "▁présentation", - -12.54655933380127 - ], - [ - "MHz", - -12.546648979187012 - ], - [ - "EMA", - -12.546661376953125 - ], - [ - "▁palace", - -12.546709060668945 - ], - [ - "▁vizibil", - -12.546723365783691 - ], - [ - "▁griev", - -12.546820640563965 - ], - [ - "▁severely", - -12.54688549041748 - ], - [ - "expert", - -12.546942710876465 - ], - [ - "▁ravi", - -12.54696273803711 - ], - [ - "▁feasible", - -12.547002792358398 - ], - [ - "▁Wholesale", - -12.547009468078613 - ], - [ - "▁graduat", - -12.547077178955078 - ], - [ - "Kü", - -12.547094345092773 - ], - [ - "▁quotation", - -12.547157287597656 - ], - [ - "/11", - -12.54716968536377 - ], - [ - "lutter", - -12.547415733337402 - ], - [ - "▁dice", - -12.547467231750488 - ], - [ - "modal", - -12.547749519348145 - ], - [ - "ggling", - -12.547819137573242 - ], - [ - "▁considér", - -12.547986030578613 - ], - [ - "▁Insel", - -12.548097610473633 - ], - [ - "▁Database", - -12.5483980178833 - ], - [ - "icism", - -12.548508644104004 - ], - [ - "▁quarterly", - -12.54851245880127 - ], - [ - "▁formule", - -12.548558235168457 - ], - [ - "▁renouvel", - -12.54873275756836 - ], - [ - "▁Treasure", - -12.548737525939941 - ], - [ - "▁1962", - -12.548844337463379 - ], - [ - "▁republic", - -12.549111366271973 - ], - [ - "▁États", - -12.549254417419434 - ], - [ - "▁salut", - -12.549356460571289 - ], - [ - "HK", - -12.54941463470459 - ], - [ - "▁Bali", - -12.549427032470703 - ], - [ - "▁Rechnung", - -12.549447059631348 - ], - [ - "fruit", - -12.54945182800293 - ], - [ - "lays", - -12.549467086791992 - ], - [ - "LAS", - -12.54951000213623 - ], - [ - "inclin", - -12.549708366394043 - ], - [ - "▁Cré", - -12.549813270568848 - ], - [ - "▁compt", - -12.54985237121582 - ], - [ - "țiilor", - -12.550056457519531 - ], - [ - "heft", - -12.550111770629883 - ], - [ - "▁Comisi", - -12.55024242401123 - ], - [ - "▁Nurse", - -12.550516128540039 - ], - [ - "loid", - -12.550540924072266 - ], - [ - "grove", - -12.550761222839355 - ], - [ - "▁Copy", - -12.550867080688477 - ], - [ - "▁Kampf", - -12.550873756408691 - ], - [ - "izată", - -12.550945281982422 - ], - [ - "würdig", - -12.551244735717773 - ], - [ - "-2018", - -12.551305770874023 - ], - [ - "ozo", - -12.551350593566895 - ], - [ - "▁integriert", - -12.551397323608398 - ], - [ - "▁réunion", - -12.551448822021484 - ], - [ - "▁mică", - -12.551520347595215 - ], - [ - "▁Chau", - -12.551595687866211 - ], - [ - "▁allegations", - -12.551626205444336 - ], - [ - "▁shaping", - -12.551640510559082 - ], - [ - "▁transcription", - -12.551671981811523 - ], - [ - "▁Monica", - -12.551711082458496 - ], - [ - "▁torture", - -12.551795959472656 - ], - [ - "▁cooperative", - -12.551962852478027 - ], - [ - "▁invité", - -12.551987648010254 - ], - [ - "▁bamboo", - -12.552204132080078 - ], - [ - "▁Thinking", - -12.55232048034668 - ], - [ - "▁gratis", - -12.552392959594727 - ], - [ - "117", - -12.55267333984375 - ], - [ - "renz", - -12.55279541015625 - ], - [ - "▁Fußball", - -12.552823066711426 - ], - [ - "▁Gram", - -12.552873611450195 - ], - [ - "sprung", - -12.55290412902832 - ], - [ - "▁Schluss", - -12.55308723449707 - ], - [ - "▁Diploma", - -12.553345680236816 - ], - [ - "▁apparatus", - -12.553363800048828 - ], - [ - "notably", - -12.553483963012695 - ], - [ - "▁exercit", - -12.553532600402832 - ], - [ - "ământ", - -12.553536415100098 - ], - [ - "▁masses", - -12.553610801696777 - ], - [ - "▁preuve", - -12.553642272949219 - ], - [ - "great", - -12.553754806518555 - ], - [ - "▁Drink", - -12.553792953491211 - ], - [ - "islam", - -12.553828239440918 - ], - [ - "ARM", - -12.553914070129395 - ], - [ - "indre", - -12.554404258728027 - ], - [ - "DW", - -12.554410934448242 - ], - [ - "▁Flowers", - -12.554500579833984 - ], - [ - "▁pill", - -12.554574966430664 - ], - [ - "▁objectifs", - -12.554594039916992 - ], - [ - "▁Bezug", - -12.554659843444824 - ], - [ - "▁assumptions", - -12.55466365814209 - ], - [ - "▁vesti", - -12.554742813110352 - ], - [ - "route", - -12.554783821105957 - ], - [ - "▁Bangkok", - -12.554815292358398 - ], - [ - "▁seamlessly", - -12.55482006072998 - ], - [ - "config", - -12.554882049560547 - ], - [ - "▁username", - -12.554890632629395 - ], - [ - "unsure", - -12.555024147033691 - ], - [ - "▁poser", - -12.555129051208496 - ], - [ - "▁impozit", - -12.555246353149414 - ], - [ - "▁metode", - -12.555333137512207 - ], - [ - "defending", - -12.555347442626953 - ], - [ - "▁Nic", - -12.555431365966797 - ], - [ - "▁Vertrag", - -12.555508613586426 - ], - [ - "▁plăcut", - -12.55552864074707 - ], - [ - "▁Pou", - -12.555675506591797 - ], - [ - "UCH", - -12.555785179138184 - ], - [ - "▁Fein", - -12.555903434753418 - ], - [ - "reading", - -12.555994987487793 - ], - [ - "snip", - -12.55604076385498 - ], - [ - "▁Livre", - -12.556401252746582 - ], - [ - "lander", - -12.556509971618652 - ], - [ - "▁hydraulic", - -12.556559562683105 - ], - [ - "veiled", - -12.556563377380371 - ], - [ - "intr", - -12.556609153747559 - ], - [ - "▁Domnului", - -12.556641578674316 - ], - [ - "▁$0.", - -12.556713104248047 - ], - [ - "▁kilometers", - -12.556753158569336 - ], - [ - "spann", - -12.556870460510254 - ], - [ - "▁credibility", - -12.556892395019531 - ], - [ - "▁eBook", - -12.556953430175781 - ], - [ - "VERY", - -12.556994438171387 - ], - [ - "▁Charm", - -12.557122230529785 - ], - [ - "Evangeli", - -12.557193756103516 - ], - [ - "▁anderer", - -12.557193756103516 - ], - [ - "▁Entry", - -12.557195663452148 - ], - [ - "ffy", - -12.5573148727417 - ], - [ - "▁Exc", - -12.55737018585205 - ], - [ - "▁Omega", - -12.557446479797363 - ], - [ - "▁Funktionen", - -12.557455062866211 - ], - [ - "▁Gay", - -12.55752182006836 - ], - [ - "▁acht", - -12.557608604431152 - ], - [ - "colored", - -12.557615280151367 - ], - [ - "itude", - -12.557634353637695 - ], - [ - "▁accompagné", - -12.557645797729492 - ], - [ - "▁unfortunate", - -12.557981491088867 - ], - [ - "▁DIN", - -12.558091163635254 - ], - [ - "▁installment", - -12.558252334594727 - ], - [ - "▁indépendant", - -12.558307647705078 - ], - [ - "These", - -12.558364868164062 - ], - [ - "mitten", - -12.558394432067871 - ], - [ - "thank", - -12.558470726013184 - ], - [ - "▁Trek", - -12.558721542358398 - ], - [ - "üchte", - -12.55874252319336 - ], - [ - "▁cuir", - -12.55875015258789 - ], - [ - "▁turbo", - -12.558802604675293 - ], - [ - "Table", - -12.558847427368164 - ], - [ - "▁Extrem", - -12.558866500854492 - ], - [ - "▁advertisements", - -12.55915355682373 - ], - [ - "▁chaîne", - -12.559206008911133 - ], - [ - "▁corridor", - -12.559473991394043 - ], - [ - "▁râ", - -12.559651374816895 - ], - [ - "▁Opening", - -12.559718132019043 - ], - [ - "Get", - -12.559747695922852 - ], - [ - "▁storytelling", - -12.55976676940918 - ], - [ - "▁severity", - -12.559771537780762 - ], - [ - "4\"", - -12.559956550598145 - ], - [ - "▁parasit", - -12.559967994689941 - ], - [ - "angebot", - -12.56002426147461 - ], - [ - "Data", - -12.56005573272705 - ], - [ - "listen", - -12.560086250305176 - ], - [ - "▁vârstă", - -12.560094833374023 - ], - [ - "▁swallow", - -12.56025505065918 - ], - [ - "TRE", - -12.560321807861328 - ], - [ - "▁daunting", - -12.56035041809082 - ], - [ - "▁Oli", - -12.560481071472168 - ], - [ - "▁definitive", - -12.56066608428955 - ], - [ - "▁rezerva", - -12.560667037963867 - ], - [ - "/15", - -12.560807228088379 - ], - [ - "▁Landschaft", - -12.560887336730957 - ], - [ - "▁Automotive", - -12.560934066772461 - ], - [ - "▁convers", - -12.56113052368164 - ], - [ - "▁thru", - -12.561139106750488 - ], - [ - "▁Township", - -12.561140060424805 - ], - [ - "▁tilt", - -12.56119441986084 - ], - [ - "▁Criminal", - -12.561227798461914 - ], - [ - "riez", - -12.561407089233398 - ], - [ - "▁Parking", - -12.561440467834473 - ], - [ - "▁humanitarian", - -12.561518669128418 - ], - [ - "▁Kilometer", - -12.561529159545898 - ], - [ - "controlled", - -12.56189250946045 - ], - [ - "▁Klick", - -12.561910629272461 - ], - [ - "support", - -12.56199836730957 - ], - [ - "handed", - -12.562005996704102 - ], - [ - "ämtliche", - -12.562104225158691 - ], - [ - "access", - -12.562232971191406 - ], - [ - "▁eleven", - -12.562232971191406 - ], - [ - "▁ferry", - -12.56229305267334 - ], - [ - "zieren", - -12.562620162963867 - ], - [ - "▁Gebrauch", - -12.562688827514648 - ], - [ - "▁vigoare", - -12.562689781188965 - ], - [ - "MON", - -12.562756538391113 - ], - [ - "fox", - -12.562886238098145 - ], - [ - "bestimmten", - -12.562894821166992 - ], - [ - "▁Gur", - -12.563069343566895 - ], - [ - "▁Mannschaft", - -12.563146591186523 - ], - [ - "▁patrol", - -12.563173294067383 - ], - [ - "▁casă", - -12.563376426696777 - ], - [ - "▁Stories", - -12.563380241394043 - ], - [ - "▁robotic", - -12.563425064086914 - ], - [ - "tiri", - -12.563576698303223 - ], - [ - "gewiesen", - -12.5636568069458 - ], - [ - "CV", - -12.563722610473633 - ], - [ - "▁parinti", - -12.563899040222168 - ], - [ - "▁Owen", - -12.563931465148926 - ], - [ - "▁Katie", - -12.564116477966309 - ], - [ - "▁Combine", - -12.56422233581543 - ], - [ - "enfalls", - -12.56442928314209 - ], - [ - "▁financière", - -12.564447402954102 - ], - [ - "▁parliament", - -12.564549446105957 - ], - [ - "▁Weekend", - -12.564616203308105 - ], - [ - "▁Sonic", - -12.564757347106934 - ], - [ - "▁fixture", - -12.56479263305664 - ], - [ - "majorité", - -12.56497573852539 - ], - [ - "▁gravel", - -12.565028190612793 - ], - [ - "realizate", - -12.565109252929688 - ], - [ - "examining", - -12.565113067626953 - ], - [ - "▁grim", - -12.5653657913208 - ], - [ - "▁stabili", - -12.565458297729492 - ], - [ - "▁Wochenende", - -12.56551456451416 - ], - [ - "▁Hebrew", - -12.565597534179688 - ], - [ - "▁Harrison", - -12.565799713134766 - ], - [ - "▁boundary", - -12.565858840942383 - ], - [ - "40,000", - -12.565902709960938 - ], - [ - "▁Ambassador", - -12.566208839416504 - ], - [ - "▁scoate", - -12.566229820251465 - ], - [ - "ffin", - -12.56623363494873 - ], - [ - "▁crème", - -12.566269874572754 - ], - [ - "▁obiecte", - -12.566378593444824 - ], - [ - "enţa", - -12.566763877868652 - ], - [ - "▁subsidiary", - -12.566797256469727 - ], - [ - "▁Franco", - -12.56688404083252 - ], - [ - "▁visuel", - -12.567042350769043 - ], - [ - "▁uitat", - -12.56708812713623 - ], - [ - "▁revisit", - -12.567122459411621 - ], - [ - "▁Camping", - -12.567150115966797 - ], - [ - "▁Divine", - -12.567304611206055 - ], - [ - "4-6", - -12.567323684692383 - ], - [ - "▁Brandon", - -12.567378997802734 - ], - [ - "ма", - -12.567450523376465 - ], - [ - "sofern", - -12.56745433807373 - ], - [ - "ntweder", - -12.56748104095459 - ], - [ - "▁Shoot", - -12.567618370056152 - ], - [ - "étais", - -12.56771183013916 - ], - [ - "SPEC", - -12.567930221557617 - ], - [ - "▁dreapta", - -12.567973136901855 - ], - [ - "▁repaired", - -12.568055152893066 - ], - [ - "pyr", - -12.568136215209961 - ], - [ - "▁warranties", - -12.568175315856934 - ], - [ - "▁représent", - -12.568263053894043 - ], - [ - "ADE", - -12.568293571472168 - ], - [ - "▁selective", - -12.56836223602295 - ], - [ - "▁Banking", - -12.568441390991211 - ], - [ - "▁ergonomic", - -12.568562507629395 - ], - [ - "...”", - -12.568602561950684 - ], - [ - "▁willingness", - -12.56867790222168 - ], - [ - "isser", - -12.568784713745117 - ], - [ - "▁confection", - -12.568961143493652 - ], - [ - "admi", - -12.569009780883789 - ], - [ - "▁Freizeit", - -12.569023132324219 - ], - [ - "▁illuminate", - -12.569151878356934 - ], - [ - "▁Repeat", - -12.569170951843262 - ], - [ - "▁Zeitpunkt", - -12.56933879852295 - ], - [ - "claimed", - -12.569439888000488 - ], - [ - "▁erhältlich", - -12.569480895996094 - ], - [ - "▁paysage", - -12.569537162780762 - ], - [ - "▁Atom", - -12.569890022277832 - ], - [ - "▁Graf", - -12.570086479187012 - ], - [ - "▁firmware", - -12.570093154907227 - ], - [ - "▁Swift", - -12.570180892944336 - ], - [ - "▁cercetare", - -12.57018756866455 - ], - [ - "▁internațional", - -12.570330619812012 - ], - [ - "▁zombie", - -12.570330619812012 - ], - [ - "▁Spread", - -12.57050609588623 - ], - [ - "ECO", - -12.57056999206543 - ], - [ - "▁Gestaltung", - -12.570758819580078 - ], - [ - "rast", - -12.570858001708984 - ], - [ - "▁perfume", - -12.5709228515625 - ], - [ - "▁roulette", - -12.570924758911133 - ], - [ - "▁distill", - -12.57096004486084 - ], - [ - "▁Produkten", - -12.570992469787598 - ], - [ - "225", - -12.571310043334961 - ], - [ - "facing", - -12.571371078491211 - ], - [ - "▁paradigm", - -12.571514129638672 - ], - [ - "▁Rah", - -12.571532249450684 - ], - [ - "▁Renault", - -12.571846961975098 - ], - [ - "willig", - -12.571864128112793 - ], - [ - "▁Vet", - -12.571890830993652 - ], - [ - "▁reprezenta", - -12.572126388549805 - ], - [ - "stoß", - -12.572185516357422 - ], - [ - "▁Weiß", - -12.5722074508667 - ], - [ - "▁Solo", - -12.572210311889648 - ], - [ - "▁Jin", - -12.572646141052246 - ], - [ - "▁Brussels", - -12.572693824768066 - ], - [ - "▁Tournament", - -12.572693824768066 - ], - [ - "▁proced", - -12.572710037231445 - ], - [ - "▁Rabbi", - -12.572835922241211 - ], - [ - "▁gameplay", - -12.572851181030273 - ], - [ - "▁ATM", - -12.572901725769043 - ], - [ - "▁firearm", - -12.572906494140625 - ], - [ - "revealing", - -12.573003768920898 - ], - [ - "schütz", - -12.57310676574707 - ], - [ - "▁Absolutely", - -12.573288917541504 - ], - [ - "▁interference", - -12.573433876037598 - ], - [ - "▁Employment", - -12.573558807373047 - ], - [ - "▁chord", - -12.57356071472168 - ], - [ - "▁oportun", - -12.573585510253906 - ], - [ - "▁frontier", - -12.573770523071289 - ], - [ - "▁Lunch", - -12.573891639709473 - ], - [ - "bread", - -12.57397174835205 - ], - [ - "▁rendered", - -12.573976516723633 - ], - [ - "5.1", - -12.573984146118164 - ], - [ - "▁motif", - -12.574066162109375 - ], - [ - "▁Schlag", - -12.574227333068848 - ], - [ - "113", - -12.574264526367188 - ], - [ - "▁Deux", - -12.574288368225098 - ], - [ - "▁surplus", - -12.574309349060059 - ], - [ - "ALS", - -12.574417114257812 - ], - [ - "▁abortion", - -12.574472427368164 - ], - [ - "▁airplane", - -12.574475288391113 - ], - [ - "▁migrants", - -12.574501991271973 - ], - [ - "kli", - -12.574539184570312 - ], - [ - "▁crochet", - -12.57454776763916 - ], - [ - "fahrer", - -12.574671745300293 - ], - [ - "▁reconstruction", - -12.57471752166748 - ], - [ - "▁difer", - -12.574752807617188 - ], - [ - "▁Conserv", - -12.57478141784668 - ], - [ - "▁NSW", - -12.57479476928711 - ], - [ - "▁regim", - -12.574844360351562 - ], - [ - "▁Except", - -12.574904441833496 - ], - [ - "▁trage", - -12.574978828430176 - ], - [ - "▁Consiliul", - -12.575058937072754 - ], - [ - "▁Bedarf", - -12.575064659118652 - ], - [ - "▁additive", - -12.5750732421875 - ], - [ - "know", - -12.5751371383667 - ], - [ - "▁sauna", - -12.57517147064209 - ], - [ - "▁mortality", - -12.575201034545898 - ], - [ - "kräftig", - -12.575358390808105 - ], - [ - "▁Own", - -12.575445175170898 - ], - [ - "nzo", - -12.575519561767578 - ], - [ - "▁villes", - -12.575543403625488 - ], - [ - "▁recette", - -12.575749397277832 - ], - [ - "▁attacking", - -12.575799942016602 - ], - [ - "beruf", - -12.57608699798584 - ], - [ - "▁integrat", - -12.57612419128418 - ], - [ - "realizarea", - -12.576201438903809 - ], - [ - "▁exemption", - -12.57628345489502 - ], - [ - "GW", - -12.576285362243652 - ], - [ - "▁Nano", - -12.576395034790039 - ], - [ - "SCH", - -12.576440811157227 - ], - [ - "▁honesty", - -12.576457023620605 - ], - [ - "▁Arriv", - -12.576515197753906 - ], - [ - "▁gland", - -12.576542854309082 - ], - [ - "▁proactive", - -12.576746940612793 - ], - [ - "▁agile", - -12.576837539672852 - ], - [ - "▁kernel", - -12.576844215393066 - ], - [ - "▁nurture", - -12.576860427856445 - ], - [ - "▁Patent", - -12.576963424682617 - ], - [ - "▁excursi", - -12.577189445495605 - ], - [ - "pulsion", - -12.577326774597168 - ], - [ - "stellte", - -12.577351570129395 - ], - [ - "ständige", - -12.577421188354492 - ], - [ - "▁Rebecca", - -12.577436447143555 - ], - [ - "▁Securities", - -12.577436447143555 - ], - [ - "mètre", - -12.577446937561035 - ], - [ - "LOW", - -12.577469825744629 - ], - [ - "▁consilier", - -12.577537536621094 - ], - [ - "▁Architekt", - -12.577733993530273 - ], - [ - "▁china", - -12.57777214050293 - ], - [ - "älfte", - -12.577778816223145 - ], - [ - "▁Combin", - -12.577795028686523 - ], - [ - "480", - -12.577999114990234 - ], - [ - "liv", - -12.578021049499512 - ], - [ - "▁peur", - -12.578067779541016 - ], - [ - "keep", - -12.57822322845459 - ], - [ - "▁Verhalten", - -12.578324317932129 - ], - [ - "▁peek", - -12.578446388244629 - ], - [ - "▁dient", - -12.578550338745117 - ], - [ - "▁prevazut", - -12.578625679016113 - ], - [ - "Emmanuel", - -12.57862663269043 - ], - [ - "▁incidence", - -12.57862663269043 - ], - [ - "▁Framework", - -12.578715324401855 - ], - [ - "dass", - -12.578816413879395 - ], - [ - "artiste", - -12.578874588012695 - ], - [ - "▁Accept", - -12.578971862792969 - ], - [ - "▁plunge", - -12.579073905944824 - ], - [ - "chauff", - -12.579118728637695 - ], - [ - "▁guilt", - -12.579156875610352 - ], - [ - "▁senator", - -12.57945442199707 - ], - [ - "▁disable", - -12.579776763916016 - ], - [ - "▁partout", - -12.579901695251465 - ], - [ - "JC", - -12.580045700073242 - ], - [ - "▁Highly", - -12.580150604248047 - ], - [ - "▁beneficii", - -12.58021068572998 - ], - [ - "fibro", - -12.580347061157227 - ], - [ - "interpreted", - -12.580550193786621 - ], - [ - "▁genauso", - -12.58056354522705 - ], - [ - "▁basil", - -12.580601692199707 - ], - [ - "▁Angst", - -12.580697059631348 - ], - [ - "rzte", - -12.580933570861816 - ], - [ - "Master", - -12.58112907409668 - ], - [ - "▁french", - -12.581324577331543 - ], - [ - "▁Duration", - -12.581343650817871 - ], - [ - "HM", - -12.581402778625488 - ], - [ - "▁Bert", - -12.581518173217773 - ], - [ - "▁1963", - -12.581534385681152 - ], - [ - "▁warrior", - -12.581604957580566 - ], - [ - "2007", - -12.581696510314941 - ], - [ - "▁recycle", - -12.581722259521484 - ], - [ - "▁fertiliz", - -12.581808090209961 - ], - [ - "▁hatch", - -12.581809997558594 - ], - [ - "ISH", - -12.581811904907227 - ], - [ - "luft", - -12.582321166992188 - ], - [ - "▁crying", - -12.582452774047852 - ], - [ - "▁activist", - -12.5824613571167 - ], - [ - "schränkt", - -12.582500457763672 - ], - [ - "▁diff", - -12.582500457763672 - ], - [ - "▁Demand", - -12.58262825012207 - ], - [ - "▁transported", - -12.582669258117676 - ], - [ - "▁Remodel", - -12.582686424255371 - ], - [ - "▁Etats", - -12.582704544067383 - ], - [ - "ANI", - -12.582777976989746 - ], - [ - "▁spéciale", - -12.582804679870605 - ], - [ - "▁Konzert", - -12.582805633544922 - ], - [ - "▁Bedürfnisse", - -12.58281135559082 - ], - [ - "▁overlooked", - -12.582864761352539 - ], - [ - "▁cutter", - -12.582974433898926 - ], - [ - "klär", - -12.58311939239502 - ], - [ - "▁Materialien", - -12.583135604858398 - ], - [ - "▁gewisse", - -12.583388328552246 - ], - [ - "bull", - -12.583499908447266 - ], - [ - "Good", - -12.583513259887695 - ], - [ - "Gig", - -12.583616256713867 - ], - [ - "Logic", - -12.583736419677734 - ], - [ - "▁Schlaf", - -12.583970069885254 - ], - [ - "▁Yankee", - -12.583996772766113 - ], - [ - "▁Batman", - -12.584020614624023 - ], - [ - "▁funcție", - -12.584166526794434 - ], - [ - "▁partenariat", - -12.584294319152832 - ], - [ - "▁Antrag", - -12.584348678588867 - ], - [ - "▁Pill", - -12.584519386291504 - ], - [ - "▁tram", - -12.584637641906738 - ], - [ - "▁Minor", - -12.58465576171875 - ], - [ - "pertaining", - -12.584678649902344 - ], - [ - "▁apropiere", - -12.584843635559082 - ], - [ - "▁Barack", - -12.584965705871582 - ], - [ - "schön", - -12.585174560546875 - ], - [ - "▁Sandy", - -12.585182189941406 - ], - [ - "kilometre", - -12.585192680358887 - ], - [ - "▁diy", - -12.585234642028809 - ], - [ - "▁1966", - -12.585453987121582 - ], - [ - "gelassen", - -12.585485458374023 - ], - [ - "▁Trial", - -12.585592269897461 - ], - [ - "▁Bauer", - -12.585603713989258 - ], - [ - "▁assumption", - -12.585648536682129 - ], - [ - "birth", - -12.585668563842773 - ], - [ - "rechnen", - -12.585861206054688 - ], - [ - "▁meci", - -12.585867881774902 - ], - [ - "▁gloss", - -12.585906982421875 - ], - [ - "▁sewer", - -12.58593463897705 - ], - [ - "▁Stimme", - -12.585955619812012 - ], - [ - "▁Fortune", - -12.585967063903809 - ], - [ - "▁Lösungen", - -12.586007118225098 - ], - [ - "▁impresi", - -12.586074829101562 - ], - [ - "schlaf", - -12.586089134216309 - ], - [ - "prüfung", - -12.586097717285156 - ], - [ - "▁instalat", - -12.586198806762695 - ], - [ - "▁picturesque", - -12.586233139038086 - ], - [ - "vait", - -12.586240768432617 - ], - [ - "8.1", - -12.58629035949707 - ], - [ - "▁călători", - -12.586392402648926 - ], - [ - "▁dix", - -12.586400032043457 - ], - [ - "▁furnished", - -12.586411476135254 - ], - [ - "▁dolari", - -12.586445808410645 - ], - [ - "▁regener", - -12.586562156677246 - ], - [ - "▁astazi", - -12.586621284484863 - ], - [ - "▁Sprach", - -12.586750030517578 - ], - [ - "delà", - -12.586846351623535 - ], - [ - "avec", - -12.58694076538086 - ], - [ - "▁Buddhist", - -12.586990356445312 - ], - [ - "▁alphabet", - -12.586990356445312 - ], - [ - "▁berichtet", - -12.587201118469238 - ], - [ - "ideally", - -12.587209701538086 - ], - [ - "▁annuel", - -12.587421417236328 - ], - [ - "▁laughing", - -12.587532997131348 - ], - [ - "▁Zustand", - -12.587639808654785 - ], - [ - "cini", - -12.587692260742188 - ], - [ - "solid", - -12.587724685668945 - ], - [ - "▁Broker", - -12.587868690490723 - ], - [ - "▁developmental", - -12.5879545211792 - ], - [ - "▁Summary", - -12.588191032409668 - ], - [ - "▁Trinity", - -12.58819580078125 - ], - [ - "▁sucre", - -12.58821964263916 - ], - [ - "▁sandal", - -12.588231086730957 - ], - [ - "PEN", - -12.588274955749512 - ], - [ - "gewinn", - -12.588486671447754 - ], - [ - "olé", - -12.588555335998535 - ], - [ - "matric", - -12.58865737915039 - ], - [ - "xton", - -12.588695526123047 - ], - [ - "werten", - -12.588740348815918 - ], - [ - "▁Dust", - -12.588765144348145 - ], - [ - "▁Journey", - -12.588791847229004 - ], - [ - "▁Rush", - -12.588793754577637 - ], - [ - "▁NCAA", - -12.588839530944824 - ], - [ - "▁allgemeine", - -12.588926315307617 - ], - [ - "▁Universe", - -12.589007377624512 - ], - [ - "▁connais", - -12.589099884033203 - ], - [ - "▁quantité", - -12.58912467956543 - ], - [ - "▁Kab", - -12.589150428771973 - ], - [ - "▁purse", - -12.589150428771973 - ], - [ - "Health", - -12.589210510253906 - ], - [ - "▁apărut", - -12.589288711547852 - ], - [ - "▁bypass", - -12.589313507080078 - ], - [ - "pronounced", - -12.58936595916748 - ], - [ - "▁magnitude", - -12.589393615722656 - ], - [ - "▁Walmart", - -12.589394569396973 - ], - [ - "ède", - -12.589409828186035 - ], - [ - "▁serum", - -12.589590072631836 - ], - [ - "▁baseline", - -12.589765548706055 - ], - [ - "STER", - -12.589932441711426 - ], - [ - "▁ONLY", - -12.590052604675293 - ], - [ - "▁individuell", - -12.590086936950684 - ], - [ - "▁Ghi", - -12.590139389038086 - ], - [ - "▁Ruby", - -12.59020709991455 - ], - [ - "▁Chal", - -12.590241432189941 - ], - [ - "▁Vier", - -12.590261459350586 - ], - [ - "5.0", - -12.5903902053833 - ], - [ - "▁fog", - -12.590519905090332 - ], - [ - "esel", - -12.590557098388672 - ], - [ - "▁Python", - -12.590598106384277 - ], - [ - "▁urmează", - -12.590608596801758 - ], - [ - "▁trustworthy", - -12.590639114379883 - ], - [ - "hört", - -12.590729713439941 - ], - [ - "▁tâche", - -12.59078311920166 - ], - [ - "Patri", - -12.590799331665039 - ], - [ - "▁grind", - -12.590928077697754 - ], - [ - "▁Raven", - -12.590934753417969 - ], - [ - "▁poursuiv", - -12.590951919555664 - ], - [ - "▁simpli", - -12.591140747070312 - ], - [ - "▁echo", - -12.591165542602539 - ], - [ - "▁Attention", - -12.591313362121582 - ], - [ - "Against", - -12.591402053833008 - ], - [ - "GET", - -12.59148120880127 - ], - [ - "▁turistic", - -12.591535568237305 - ], - [ - "▁tenure", - -12.59158992767334 - ], - [ - "▁alimentaire", - -12.591651916503906 - ], - [ - "Who", - -12.59172248840332 - ], - [ - "▁ändern", - -12.591729164123535 - ], - [ - "▁rebound", - -12.591778755187988 - ], - [ - "grenze", - -12.591849327087402 - ], - [ - "▁Fame", - -12.592093467712402 - ], - [ - "▁Kick", - -12.592215538024902 - ], - [ - "▁Detail", - -12.59228801727295 - ], - [ - "▁Push", - -12.592308044433594 - ], - [ - "production", - -12.592430114746094 - ], - [ - "▁Candidates", - -12.59244441986084 - ], - [ - "▁reușit", - -12.592484474182129 - ], - [ - "istischen", - -12.592525482177734 - ], - [ - "lassung", - -12.592649459838867 - ], - [ - "▁Hann", - -12.592713356018066 - ], - [ - "espère", - -12.592965126037598 - ], - [ - "▁vergessen", - -12.593008041381836 - ], - [ - "▁smiling", - -12.593010902404785 - ], - [ - "▁devotion", - -12.593016624450684 - ], - [ - "▁pastry", - -12.593071937561035 - ], - [ - "Add", - -12.593390464782715 - ], - [ - "▁authorization", - -12.593494415283203 - ], - [ - "▁Suisse", - -12.593568801879883 - ], - [ - "▁Berkeley", - -12.593611717224121 - ], - [ - "▁Guild", - -12.593660354614258 - ], - [ - "▁choir", - -12.593748092651367 - ], - [ - "learning", - -12.593802452087402 - ], - [ - "▁Tanz", - -12.593894004821777 - ], - [ - "mardi", - -12.594076156616211 - ], - [ - "▁rezultatele", - -12.594191551208496 - ], - [ - "▁earrings", - -12.594218254089355 - ], - [ - "▁turbine", - -12.594223976135254 - ], - [ - "▁jeudi", - -12.594284057617188 - ], - [ - "terapie", - -12.594576835632324 - ], - [ - "regain", - -12.59461498260498 - ], - [ - "SET", - -12.594643592834473 - ], - [ - "▁Hände", - -12.594681739807129 - ], - [ - "▁Globe", - -12.594683647155762 - ], - [ - "frag", - -12.594775199890137 - ], - [ - "▁Treasury", - -12.594820976257324 - ], - [ - "▁hazardous", - -12.594820976257324 - ], - [ - "▁Fahrt", - -12.594928741455078 - ], - [ - "▁fulfilled", - -12.594966888427734 - ], - [ - "▁manga", - -12.594987869262695 - ], - [ - "▁composé", - -12.595067977905273 - ], - [ - "▁ABS", - -12.595132827758789 - ], - [ - "▁preced", - -12.595197677612305 - ], - [ - "▁beauté", - -12.595233917236328 - ], - [ - "▁interessant", - -12.59526252746582 - ], - [ - "▁lieber", - -12.595324516296387 - ], - [ - "▁Kö", - -12.595378875732422 - ], - [ - "EMS", - -12.595410346984863 - ], - [ - "FER", - -12.595413208007812 - ], - [ - "▁eure", - -12.595427513122559 - ], - [ - "▁plumber", - -12.595427513122559 - ], - [ - "Love", - -12.595463752746582 - ], - [ - "▁Marcus", - -12.595635414123535 - ], - [ - "▁registry", - -12.595637321472168 - ], - [ - "▁uncle", - -12.595696449279785 - ], - [ - "▁neuf", - -12.595728874206543 - ], - [ - "▁Fläche", - -12.59575080871582 - ], - [ - "▁restaur", - -12.595815658569336 - ], - [ - "▁noticeable", - -12.595833778381348 - ], - [ - "▁riches", - -12.595871925354004 - ], - [ - "occupy", - -12.596031188964844 - ], - [ - "▁hurricane", - -12.596031188964844 - ], - [ - "▁gespeichert", - -12.596033096313477 - ], - [ - "▁Bordeaux", - -12.596039772033691 - ], - [ - "▁Maj", - -12.59637451171875 - ], - [ - "Applied", - -12.596439361572266 - ], - [ - "▁compter", - -12.596575736999512 - ], - [ - "impact", - -12.59663200378418 - ], - [ - "▁Improve", - -12.596758842468262 - ], - [ - "▁Calif", - -12.596832275390625 - ], - [ - "▁desfășur", - -12.596939086914062 - ], - [ - "▁packaged", - -12.597001075744629 - ], - [ - "180", - -12.59703540802002 - ], - [ - "devenu", - -12.597042083740234 - ], - [ - "▁Battery", - -12.597243309020996 - ], - [ - "▁objection", - -12.597254753112793 - ], - [ - "▁anual", - -12.597305297851562 - ], - [ - "▁Landscape", - -12.59731674194336 - ], - [ - "IQ", - -12.597403526306152 - ], - [ - "grès", - -12.597586631774902 - ], - [ - "▁witnesses", - -12.597750663757324 - ], - [ - "enţial", - -12.597764015197754 - ], - [ - "▁plateau", - -12.597779273986816 - ], - [ - "▁bilete", - -12.59783935546875 - ], - [ - "▁Bronze", - -12.59786605834961 - ], - [ - "▁Kiss", - -12.597946166992188 - ], - [ - "▁Serge", - -12.598093032836914 - ], - [ - "atomic", - -12.598145484924316 - ], - [ - "▁renovated", - -12.59817886352539 - ], - [ - "player", - -12.598212242126465 - ], - [ - "▁dirig", - -12.598291397094727 - ], - [ - "▁Îm", - -12.598296165466309 - ], - [ - "▁plimb", - -12.59843635559082 - ], - [ - "▁ambassador", - -12.598455429077148 - ], - [ - "▁apropiat", - -12.598455429077148 - ], - [ - "▁adaug", - -12.598602294921875 - ], - [ - "ogenic", - -12.59872055053711 - ], - [ - "kämpfe", - -12.598779678344727 - ], - [ - "▁Hillary", - -12.598907470703125 - ], - [ - "yak", - -12.598942756652832 - ], - [ - "General", - -12.59925365447998 - ], - [ - "▁Zugang", - -12.599400520324707 - ], - [ - "▁fertil", - -12.599457740783691 - ], - [ - "incat", - -12.599536895751953 - ], - [ - "assessing", - -12.599587440490723 - ], - [ - "▁Cincinnati", - -12.59967041015625 - ], - [ - "▁convincing", - -12.599685668945312 - ], - [ - "sadly", - -12.59974479675293 - ], - [ - "kunde", - -12.599801063537598 - ], - [ - "ambul", - -12.599913597106934 - ], - [ - "▁familii", - -12.599974632263184 - ], - [ - "juri", - -12.60007095336914 - ], - [ - "ionen", - -12.600102424621582 - ], - [ - "▁Wirtschaft", - -12.600130081176758 - ], - [ - "contract", - -12.600135803222656 - ], - [ - "punem", - -12.600151062011719 - ], - [ - "handlung", - -12.600394248962402 - ], - [ - "▁fournir", - -12.600455284118652 - ], - [ - "▁Ambi", - -12.600663185119629 - ], - [ - "▁Isaac", - -12.600663185119629 - ], - [ - "▁praying", - -12.6007719039917 - ], - [ - "▁Italien", - -12.600848197937012 - ], - [ - "233", - -12.600850105285645 - ], - [ - "spawn", - -12.600913047790527 - ], - [ - "▁legii", - -12.60092544555664 - ], - [ - "▁zuvor", - -12.601018905639648 - ], - [ - "▁comune", - -12.601030349731445 - ], - [ - "official", - -12.601165771484375 - ], - [ - "144", - -12.601290702819824 - ], - [ - "izeaza", - -12.601329803466797 - ], - [ - "▁Keller", - -12.601372718811035 - ], - [ - "ORE", - -12.601378440856934 - ], - [ - "122", - -12.601485252380371 - ], - [ - "incurred", - -12.60150146484375 - ], - [ - "CHA", - -12.601579666137695 - ], - [ - "▁Herzen", - -12.601590156555176 - ], - [ - "▁reasoning", - -12.6016263961792 - ], - [ - "affaire", - -12.601849555969238 - ], - [ - "ooth", - -12.601890563964844 - ], - [ - "155", - -12.601998329162598 - ], - [ - "▁invented", - -12.602113723754883 - ], - [ - "▁Comun", - -12.602140426635742 - ], - [ - "zähl", - -12.602179527282715 - ], - [ - "geliefert", - -12.602212905883789 - ], - [ - "explorer", - -12.602213859558105 - ], - [ - "nect", - -12.602326393127441 - ], - [ - "▁mercredi", - -12.602408409118652 - ], - [ - "▁volonté", - -12.602408409118652 - ], - [ - "easy", - -12.602453231811523 - ], - [ - "▁feat", - -12.602490425109863 - ], - [ - "rented", - -12.602580070495605 - ], - [ - "▁converter", - -12.602592468261719 - ], - [ - "Verhältnis", - -12.602713584899902 - ], - [ - "▁Iceland", - -12.602792739868164 - ], - [ - "▁pretul", - -12.602933883666992 - ], - [ - "▁Vorstellung", - -12.602960586547852 - ], - [ - "▁hydrogen", - -12.603096008300781 - ], - [ - "▁pouvai", - -12.603097915649414 - ], - [ - "▁dawn", - -12.603153228759766 - ], - [ - "▁Georg", - -12.603269577026367 - ], - [ - "▁cautious", - -12.603367805480957 - ], - [ - "▁Pattern", - -12.603464126586914 - ], - [ - "▁Ox", - -12.603602409362793 - ], - [ - "▁decizie", - -12.603676795959473 - ], - [ - "REC", - -12.603889465332031 - ], - [ - "▁Mortgage", - -12.60393238067627 - ], - [ - "attributed", - -12.603973388671875 - ], - [ - "floor", - -12.603992462158203 - ], - [ - "▁Wichtig", - -12.604207992553711 - ], - [ - "enseignant", - -12.604265213012695 - ], - [ - "▁civilization", - -12.604302406311035 - ], - [ - "▁dispozitie", - -12.60450553894043 - ], - [ - "▁geographic", - -12.604543685913086 - ], - [ - "▁Kun", - -12.604607582092285 - ], - [ - "LIN", - -12.604679107666016 - ], - [ - "▁auzit", - -12.604707717895508 - ], - [ - "except", - -12.604761123657227 - ], - [ - "▁superbe", - -12.604904174804688 - ], - [ - "▁installé", - -12.605000495910645 - ], - [ - "▁Peninsula", - -12.605154037475586 - ], - [ - "▁norme", - -12.605164527893066 - ], - [ - "elul", - -12.60517406463623 - ], - [ - "▁Experten", - -12.605256080627441 - ], - [ - "expression", - -12.605295181274414 - ], - [ - "Christ", - -12.605320930480957 - ], - [ - "▁Fuel", - -12.605369567871094 - ], - [ - "▁muffin", - -12.605485916137695 - ], - [ - "▁lecteur", - -12.605521202087402 - ], - [ - "▁gifted", - -12.605589866638184 - ], - [ - "▁Japon", - -12.605602264404297 - ], - [ - "▁SSD", - -12.605644226074219 - ], - [ - "▁Calgary", - -12.605765342712402 - ], - [ - "▁hooked", - -12.605876922607422 - ], - [ - "▁Joan", - -12.605896949768066 - ], - [ - "▁tangible", - -12.606083869934082 - ], - [ - "FW", - -12.606225967407227 - ], - [ - "olli", - -12.6062593460083 - ], - [ - "▁Platinum", - -12.606376647949219 - ], - [ - "▁miniature", - -12.606392860412598 - ], - [ - "▁lump", - -12.606608390808105 - ], - [ - "ologische", - -12.60689926147461 - ], - [ - "▁Istanbul", - -12.606987953186035 - ], - [ - "▁Compar", - -12.607060432434082 - ], - [ - "tropic", - -12.607256889343262 - ], - [ - "KING", - -12.607279777526855 - ], - [ - "Präsident", - -12.607297897338867 - ], - [ - "▁fotografii", - -12.607303619384766 - ], - [ - "hoped", - -12.607451438903809 - ], - [ - "▁pâte", - -12.607601165771484 - ], - [ - "▁mercy", - -12.60760498046875 - ], - [ - "▁quiz", - -12.607619285583496 - ], - [ - "demonstrating", - -12.607678413391113 - ], - [ - "▁douce", - -12.607832908630371 - ], - [ - "▁Vest", - -12.607841491699219 - ], - [ - "▁Harvey", - -12.6082181930542 - ], - [ - "▁breit", - -12.608227729797363 - ], - [ - "▁Bereits", - -12.608291625976562 - ], - [ - "▁breakthrough", - -12.608316421508789 - ], - [ - "▁masterpiece", - -12.608320236206055 - ], - [ - "▁Chester", - -12.60838794708252 - ], - [ - "▁indiqué", - -12.608451843261719 - ], - [ - "hook", - -12.60857105255127 - ], - [ - "statutory", - -12.608596801757812 - ], - [ - "▁Direkt", - -12.608617782592773 - ], - [ - "▁specs", - -12.608708381652832 - ], - [ - "Drive", - -12.608725547790527 - ], - [ - "▁survivors", - -12.608826637268066 - ], - [ - "▁jackpot", - -12.608840942382812 - ], - [ - "▁garder", - -12.608872413635254 - ], - [ - "▁Geburtstag", - -12.60887336730957 - ], - [ - "145", - -12.608963966369629 - ], - [ - "▁Clay", - -12.609028816223145 - ], - [ - "▁WHO", - -12.60906982421875 - ], - [ - "▁Ellen", - -12.609393119812012 - ], - [ - "▁bonheur", - -12.609440803527832 - ], - [ - "▁hazards", - -12.609440803527832 - ], - [ - "▁Kaiser", - -12.609488487243652 - ], - [ - "▁tightly", - -12.609506607055664 - ], - [ - "Universitatea", - -12.609529495239258 - ], - [ - "▁rinse", - -12.609533309936523 - ], - [ - "▁passant", - -12.609640121459961 - ], - [ - "▁sânge", - -12.609832763671875 - ], - [ - "▁peuple", - -12.60983657836914 - ], - [ - "jungen", - -12.609975814819336 - ], - [ - "▁inappropriate", - -12.610054969787598 - ], - [ - "▁mitigate", - -12.610066413879395 - ], - [ - "MID", - -12.610221862792969 - ], - [ - "▁telecom", - -12.610297203063965 - ], - [ - "▁plaj", - -12.610316276550293 - ], - [ - "▁presupune", - -12.610361099243164 - ], - [ - "acco", - -12.61038875579834 - ], - [ - "expressing", - -12.610654830932617 - ], - [ - "▁Symphony", - -12.61066722869873 - ], - [ - "temperatur", - -12.610710144042969 - ], - [ - "▁activităţi", - -12.610800743103027 - ], - [ - "▁amended", - -12.610847473144531 - ], - [ - "▁rehab", - -12.610909461975098 - ], - [ - "▁sportiv", - -12.611004829406738 - ], - [ - "hotel", - -12.611031532287598 - ], - [ - "branche", - -12.61103630065918 - ], - [ - "▁Noch", - -12.611079216003418 - ], - [ - "▁1961", - -12.611238479614258 - ], - [ - "release", - -12.611359596252441 - ], - [ - "blaze", - -12.611381530761719 - ], - [ - "Adv", - -12.61139965057373 - ], - [ - "Line", - -12.611671447753906 - ], - [ - "▁financiare", - -12.61184310913086 - ], - [ - "▁chauffage", - -12.611919403076172 - ], - [ - "мо", - -12.61192512512207 - ], - [ - "schuhe", - -12.612035751342773 - ], - [ - "blé", - -12.612040519714355 - ], - [ - "▁Echo", - -12.612468719482422 - ], - [ - "▁remarks", - -12.61253547668457 - ], - [ - "scriu", - -12.612629890441895 - ], - [ - "Vir", - -12.612701416015625 - ], - [ - "War", - -12.61271858215332 - ], - [ - "atifs", - -12.613006591796875 - ], - [ - "RING", - -12.613082885742188 - ], - [ - "▁Instruction", - -12.613150596618652 - ], - [ - "▁verlassen", - -12.613155364990234 - ], - [ - "▁ergänz", - -12.613234519958496 - ], - [ - "▁Emil", - -12.613248825073242 - ], - [ - "▁empire", - -12.613263130187988 - ], - [ - "▁Einkauf", - -12.613306999206543 - ], - [ - "utigen", - -12.613329887390137 - ], - [ - "▁audition", - -12.613390922546387 - ], - [ - "travelled", - -12.61347484588623 - ], - [ - "ло", - -12.613579750061035 - ], - [ - "▁infinite", - -12.613720893859863 - ], - [ - "▁Lieblings", - -12.613749504089355 - ], - [ - "▁vân", - -12.613754272460938 - ], - [ - "▁spinning", - -12.613778114318848 - ], - [ - "converting", - -12.614031791687012 - ], - [ - "▁uncertain", - -12.61415958404541 - ], - [ - "restul", - -12.614168167114258 - ], - [ - "▁colourful", - -12.61420726776123 - ], - [ - "▁accountant", - -12.614338874816895 - ], - [ - "bourg", - -12.614532470703125 - ], - [ - "▁structuri", - -12.614538192749023 - ], - [ - "▁Booking", - -12.61465835571289 - ], - [ - "intéresse", - -12.614683151245117 - ], - [ - "▁coordinated", - -12.614753723144531 - ], - [ - "▁precaution", - -12.61497688293457 - ], - [ - "▁Cheese", - -12.615015983581543 - ], - [ - "▁surfing", - -12.615192413330078 - ], - [ - "▁souffr", - -12.61524486541748 - ], - [ - "▁Menu", - -12.615447998046875 - ], - [ - "▁arthritis", - -12.615593910217285 - ], - [ - "▁headphones", - -12.615601539611816 - ], - [ - "▁upgrading", - -12.615602493286133 - ], - [ - "▁apparel", - -12.615653038024902 - ], - [ - "▁Haushalt", - -12.61572551727295 - ], - [ - "▁Personally", - -12.615815162658691 - ], - [ - "▁insane", - -12.615950584411621 - ], - [ - "▁fonduri", - -12.616083145141602 - ], - [ - "▁entier", - -12.616239547729492 - ], - [ - "▁Herbst", - -12.616264343261719 - ], - [ - "▁cyclist", - -12.616331100463867 - ], - [ - "▁filmmaker", - -12.616741180419922 - ], - [ - "▁Portuguese", - -12.616829872131348 - ], - [ - "▁nominee", - -12.616851806640625 - ], - [ - "▁Yang", - -12.616857528686523 - ], - [ - "▁slate", - -12.616943359375 - ], - [ - "▁entièrement", - -12.616974830627441 - ], - [ - "▁Umgang", - -12.617049217224121 - ], - [ - "shifted", - -12.617135047912598 - ], - [ - "▁défaut", - -12.617138862609863 - ], - [ - "heiz", - -12.617246627807617 - ], - [ - "▁Seal", - -12.617379188537598 - ], - [ - "▁servicing", - -12.617451667785645 - ], - [ - "marketing", - -12.617562294006348 - ], - [ - "▁demandé", - -12.617755889892578 - ], - [ - "TING", - -12.617841720581055 - ], - [ - "▁modifier", - -12.617907524108887 - ], - [ - "lysis", - -12.617966651916504 - ], - [ - "▁suplimentare", - -12.618117332458496 - ], - [ - "OTHER", - -12.618359565734863 - ], - [ - "Graph", - -12.618379592895508 - ], - [ - "▁coincide", - -12.618448257446289 - ], - [ - "governed", - -12.618598937988281 - ], - [ - "▁locking", - -12.618638038635254 - ], - [ - "▁Properties", - -12.618685722351074 - ], - [ - "▁Panama", - -12.61876392364502 - ], - [ - "▁Coupe", - -12.618846893310547 - ], - [ - "songwriter", - -12.618978500366211 - ], - [ - "exhibited", - -12.618988990783691 - ], - [ - "▁semnificativ", - -12.618995666503906 - ], - [ - "▁purchaser", - -12.619004249572754 - ], - [ - "▁puff", - -12.619097709655762 - ], - [ - "Back", - -12.619105339050293 - ], - [ - "fragt", - -12.61919116973877 - ], - [ - "▁deputy", - -12.619362831115723 - ], - [ - "▁revien", - -12.619556427001953 - ], - [ - "▁Christine", - -12.619558334350586 - ], - [ - "▁Cities", - -12.619573593139648 - ], - [ - "▁Charakter", - -12.61961555480957 - ], - [ - "atteindre", - -12.619625091552734 - ], - [ - "▁fou", - -12.619635581970215 - ], - [ - "▁obligatoire", - -12.619643211364746 - ], - [ - "INA", - -12.619791030883789 - ], - [ - "etc", - -12.6198148727417 - ], - [ - "▁newborn", - -12.620091438293457 - ], - [ - "▁explicitly", - -12.620116233825684 - ], - [ - "simplest", - -12.620203018188477 - ], - [ - "▁plateforme", - -12.62023639678955 - ], - [ - "ordinate", - -12.620291709899902 - ], - [ - "displaying", - -12.620346069335938 - ], - [ - "▁messy", - -12.620464324951172 - ], - [ - "gespielt", - -12.620466232299805 - ], - [ - "▁electron", - -12.62061882019043 - ], - [ - "▁Dreh", - -12.620796203613281 - ], - [ - "▁ambient", - -12.620976448059082 - ], - [ - "340", - -12.620979309082031 - ], - [ - "▁directive", - -12.62109375 - ], - [ - "▁Vall", - -12.621152877807617 - ], - [ - "ookie", - -12.621206283569336 - ], - [ - "▁wasted", - -12.621304512023926 - ], - [ - "CIS", - -12.621367454528809 - ], - [ - "lude", - -12.621378898620605 - ], - [ - "rach", - -12.621472358703613 - ], - [ - "▁gasest", - -12.62150764465332 - ], - [ - "▁miros", - -12.62150764465332 - ], - [ - "transforming", - -12.621536254882812 - ], - [ - "▁Milwaukee", - -12.621787071228027 - ], - [ - "▁uncommon", - -12.621789932250977 - ], - [ - "▁tableau", - -12.621841430664062 - ], - [ - "geräte", - -12.621952056884766 - ], - [ - "ophil", - -12.622139930725098 - ], - [ - "▁Jeep", - -12.62220287322998 - ], - [ - "▁wreck", - -12.622422218322754 - ], - [ - "LAND", - -12.622434616088867 - ], - [ - "attach", - -12.622566223144531 - ], - [ - "▁Panther", - -12.622634887695312 - ], - [ - "9:30", - -12.622777938842773 - ], - [ - "▁induce", - -12.622974395751953 - ], - [ - "▁privest", - -12.623006820678711 - ], - [ - "Ident", - -12.623047828674316 - ], - [ - "▁illnesses", - -12.623076438903809 - ], - [ - "▁inhabitants", - -12.623138427734375 - ], - [ - "▁fehlen", - -12.623357772827148 - ], - [ - "obtenu", - -12.623391151428223 - ], - [ - "▁gegründet", - -12.623655319213867 - ], - [ - "ARA", - -12.623711585998535 - ], - [ - "3-2", - -12.623835563659668 - ], - [ - "▁milliards", - -12.623968124389648 - ], - [ - "▁Bü", - -12.624001502990723 - ], - [ - "▁angegeben", - -12.624102592468262 - ], - [ - "TUR", - -12.624143600463867 - ], - [ - "▁arab", - -12.624166488647461 - ], - [ - "▁Scientist", - -12.624275207519531 - ], - [ - "▁minut", - -12.624394416809082 - ], - [ - "▁beast", - -12.624481201171875 - ], - [ - "▁accidentally", - -12.624573707580566 - ], - [ - "WN", - -12.624579429626465 - ], - [ - "▁Ralph", - -12.624588966369629 - ], - [ - "hängt", - -12.62462329864502 - ], - [ - "▁Erik", - -12.624639511108398 - ], - [ - "▁différent", - -12.624711990356445 - ], - [ - "▁conformitate", - -12.624842643737793 - ], - [ - "thriving", - -12.624900817871094 - ], - [ - "▁Piece", - -12.625123023986816 - ], - [ - "plasm", - -12.625152587890625 - ], - [ - "▁erwarten", - -12.62520980834961 - ], - [ - "owski", - -12.62523365020752 - ], - [ - "prayed", - -12.625293731689453 - ], - [ - "three", - -12.625542640686035 - ], - [ - "▁soundtrack", - -12.625651359558105 - ], - [ - "guru", - -12.625709533691406 - ], - [ - "▁cracked", - -12.625710487365723 - ], - [ - "▁adh", - -12.625823020935059 - ], - [ - "▁maître", - -12.625834465026855 - ], - [ - "▁Oberfläche", - -12.62585735321045 - ], - [ - "▁crab", - -12.625886917114258 - ], - [ - "▁Foster", - -12.625944137573242 - ], - [ - "▁gemütlich", - -12.626145362854004 - ], - [ - "SIC", - -12.626226425170898 - ], - [ - "ième", - -12.626298904418945 - ], - [ - "▁Few", - -12.626330375671387 - ], - [ - "gérer", - -12.626360893249512 - ], - [ - "2006", - -12.626456260681152 - ], - [ - "cool", - -12.626498222351074 - ], - [ - "▁dispune", - -12.626523971557617 - ], - [ - "recevoir", - -12.626577377319336 - ], - [ - "▁Bak", - -12.626585960388184 - ], - [ - "▁steer", - -12.62659740447998 - ], - [ - "ICS", - -12.626733779907227 - ], - [ - "▁Brett", - -12.626733779907227 - ], - [ - "▁downside", - -12.626751899719238 - ], - [ - "▁residency", - -12.62678050994873 - ], - [ - "important", - -12.626991271972656 - ], - [ - "ubb", - -12.627073287963867 - ], - [ - "mony", - -12.627259254455566 - ], - [ - "▁leasing", - -12.627341270446777 - ], - [ - "▁Gir", - -12.62735366821289 - ], - [ - "▁Biology", - -12.627364158630371 - ], - [ - "▁Colin", - -12.627463340759277 - ], - [ - "▁complicat", - -12.627775192260742 - ], - [ - "▁regroup", - -12.627899169921875 - ], - [ - "SPA", - -12.627950668334961 - ], - [ - "▁Veranstaltungen", - -12.627986907958984 - ], - [ - "convicted", - -12.628019332885742 - ], - [ - "▁Wonderful", - -12.628636360168457 - ], - [ - "züge", - -12.628799438476562 - ], - [ - "yton", - -12.628813743591309 - ], - [ - "EMENT", - -12.628887176513672 - ], - [ - "▁bent", - -12.62893009185791 - ], - [ - "heben", - -12.629231452941895 - ], - [ - "▁Sustainable", - -12.62926959991455 - ], - [ - "▁Newcastle", - -12.629276275634766 - ], - [ - "mother", - -12.629507064819336 - ], - [ - "▁eighth", - -12.629572868347168 - ], - [ - "▁atmosfer", - -12.629582405090332 - ], - [ - "expériment", - -12.629584312438965 - ], - [ - "▁Interest", - -12.629608154296875 - ], - [ - "▁successes", - -12.62964153289795 - ], - [ - "▁preschool", - -12.629802703857422 - ], - [ - "▁Funeral", - -12.629900932312012 - ], - [ - "blast", - -12.630083084106445 - ], - [ - "▁dimensiuni", - -12.630125999450684 - ], - [ - "▁Dow", - -12.630167007446289 - ], - [ - "▁pulp", - -12.63022518157959 - ], - [ - "▁Heather", - -12.630356788635254 - ], - [ - "▁erstellen", - -12.63044261932373 - ], - [ - "locating", - -12.630470275878906 - ], - [ - "direct", - -12.630475997924805 - ], - [ - "▁tractor", - -12.630494117736816 - ], - [ - "growing", - -12.630576133728027 - ], - [ - "▁inventor", - -12.630587577819824 - ], - [ - "ASA", - -12.63060188293457 - ], - [ - "insta", - -12.630732536315918 - ], - [ - "yana", - -12.63082504272461 - ], - [ - "▁squash", - -12.630839347839355 - ], - [ - "▁Basketball", - -12.630853652954102 - ], - [ - "AMA", - -12.631041526794434 - ], - [ - "insel", - -12.631093978881836 - ], - [ - "▁Fisch", - -12.631138801574707 - ], - [ - "▁metaphor", - -12.631221771240234 - ], - [ - "TES", - -12.631304740905762 - ], - [ - "▁conduce", - -12.631308555603027 - ], - [ - "stehende", - -12.631370544433594 - ], - [ - "▁FAQ", - -12.631475448608398 - ], - [ - "▁bezeichnet", - -12.631658554077148 - ], - [ - "wendung", - -12.631706237792969 - ], - [ - "▁Commonwealth", - -12.631776809692383 - ], - [ - "▁bait", - -12.631793975830078 - ], - [ - "▁Umsetzung", - -12.631834030151367 - ], - [ - "▁Equi", - -12.632063865661621 - ], - [ - "▁validity", - -12.632109642028809 - ], - [ - "Off", - -12.63222599029541 - ], - [ - "▁produsul", - -12.632314682006836 - ], - [ - "▁sensory", - -12.632363319396973 - ], - [ - "▁Imperial", - -12.632501602172852 - ], - [ - "▁Dick", - -12.632542610168457 - ], - [ - "kampf", - -12.632596969604492 - ], - [ - "▁Arzt", - -12.63267993927002 - ], - [ - "▁Reason", - -12.63267993927002 - ], - [ - "ITS", - -12.63270092010498 - ], - [ - "URL", - -12.632720947265625 - ], - [ - "demonstrates", - -12.632725715637207 - ], - [ - "▁dépend", - -12.632753372192383 - ], - [ - "NAS", - -12.632970809936523 - ], - [ - "▁funcți", - -12.633031845092773 - ], - [ - "▁vulnerability", - -12.633085250854492 - ], - [ - "2.7", - -12.633143424987793 - ], - [ - "layered", - -12.633152961730957 - ], - [ - "escence", - -12.633206367492676 - ], - [ - "▁République", - -12.633346557617188 - ], - [ - "▁Lust", - -12.633377075195312 - ], - [ - "▁sute", - -12.633381843566895 - ], - [ - "▁autonomous", - -12.633661270141602 - ], - [ - "Biserica", - -12.633662223815918 - ], - [ - "▁Chuck", - -12.633749961853027 - ], - [ - "▁protéger", - -12.6339750289917 - ], - [ - "rrell", - -12.634061813354492 - ], - [ - "▁Schaden", - -12.634062767028809 - ], - [ - "prennent", - -12.634100914001465 - ], - [ - "maß", - -12.6343412399292 - ], - [ - "OV", - -12.634453773498535 - ], - [ - "▁Wake", - -12.63450813293457 - ], - [ - "produire", - -12.634635925292969 - ], - [ - "▁Elder", - -12.634749412536621 - ], - [ - "Max", - -12.634839057922363 - ], - [ - "▁Chemistry", - -12.634918212890625 - ], - [ - "▁gourmet", - -12.634918212890625 - ], - [ - "erri", - -12.634967803955078 - ], - [ - "ени", - -12.635085105895996 - ], - [ - "▁Gru", - -12.635147094726562 - ], - [ - "▁vorbit", - -12.635408401489258 - ], - [ - "▁precede", - -12.635455131530762 - ], - [ - "▁randomly", - -12.635489463806152 - ], - [ - "▁efecte", - -12.63563060760498 - ], - [ - "▁calatori", - -12.635668754577637 - ], - [ - "▁Poor", - -12.635765075683594 - ], - [ - "List", - -12.635781288146973 - ], - [ - "▁regula", - -12.635964393615723 - ], - [ - "▁organisé", - -12.636028289794922 - ], - [ - "Div", - -12.636076927185059 - ], - [ - "▁volunteering", - -12.636423110961914 - ], - [ - "▁horr", - -12.636449813842773 - ], - [ - "9.99", - -12.636487007141113 - ], - [ - "▁UPS", - -12.636513710021973 - ], - [ - "▁englez", - -12.63652229309082 - ], - [ - "▁Eden", - -12.636523246765137 - ], - [ - "GG", - -12.63659954071045 - ], - [ - "▁typing", - -12.63664722442627 - ], - [ - "Likewise", - -12.636700630187988 - ], - [ - "▁stabilize", - -12.636737823486328 - ], - [ - "physio", - -12.636747360229492 - ], - [ - "ми", - -12.636785507202148 - ], - [ - "▁protagonist", - -12.636808395385742 - ], - [ - "▁velvet", - -12.636812210083008 - ], - [ - "schrank", - -12.636861801147461 - ], - [ - "▁Allah", - -12.63693618774414 - ], - [ - "▁forefront", - -12.636968612670898 - ], - [ - "▁salaries", - -12.637001037597656 - ], - [ - "▁prediction", - -12.637041091918945 - ], - [ - "▁Advent", - -12.637182235717773 - ], - [ - "politik", - -12.637280464172363 - ], - [ - "▁Heimat", - -12.637350082397461 - ], - [ - "ducted", - -12.637380599975586 - ], - [ - "ASH", - -12.637386322021484 - ], - [ - "▁Mold", - -12.637773513793945 - ], - [ - "▁publi", - -12.63784122467041 - ], - [ - "▁Vil", - -12.637892723083496 - ], - [ - "▁stu", - -12.637925148010254 - ], - [ - "INTE", - -12.638032913208008 - ], - [ - "▁fave", - -12.638151168823242 - ], - [ - "▁grounded", - -12.638175010681152 - ], - [ - "▁Anything", - -12.638184547424316 - ], - [ - "vik", - -12.638481140136719 - ], - [ - "Bank", - -12.63853645324707 - ], - [ - "deserved", - -12.638550758361816 - ], - [ - "machen", - -12.63874626159668 - ], - [ - "▁rugged", - -12.638751029968262 - ], - [ - "▁Nest", - -12.638901710510254 - ], - [ - "▁profund", - -12.639043807983398 - ], - [ - "▁quantum", - -12.639067649841309 - ], - [ - "▁funcționa", - -12.639118194580078 - ], - [ - "klu", - -12.639158248901367 - ], - [ - "▁consulter", - -12.63917350769043 - ], - [ - "MED", - -12.639286994934082 - ], - [ - "▁câştig", - -12.639334678649902 - ], - [ - "▁săptămâni", - -12.639334678649902 - ], - [ - "questioned", - -12.639517784118652 - ], - [ - "▁Trop", - -12.639530181884766 - ], - [ - "▁convo", - -12.639533042907715 - ], - [ - "▁sparkling", - -12.639533996582031 - ], - [ - "▁specialise", - -12.639566421508789 - ], - [ - "▁pancake", - -12.639726638793945 - ], - [ - "habitude", - -12.639727592468262 - ], - [ - "phal", - -12.640009880065918 - ], - [ - "▁Roche", - -12.640158653259277 - ], - [ - "▁personalities", - -12.640250205993652 - ], - [ - "▁Venice", - -12.640308380126953 - ], - [ - "▁comerciale", - -12.640379905700684 - ], - [ - "▁wounded", - -12.64075756072998 - ], - [ - "▁oraş", - -12.640864372253418 - ], - [ - "▁Pepper", - -12.641044616699219 - ], - [ - "▁Tourist", - -12.641094207763672 - ], - [ - "▁Mull", - -12.64116382598877 - ], - [ - "▁dignity", - -12.641234397888184 - ], - [ - "▁Fixed", - -12.641291618347168 - ], - [ - "çant", - -12.64130687713623 - ], - [ - "▁spectator", - -12.641402244567871 - ], - [ - "▁somn", - -12.641685485839844 - ], - [ - "▁ständig", - -12.641820907592773 - ], - [ - "▁resilience", - -12.641866683959961 - ], - [ - "▁Malta", - -12.642251014709473 - ], - [ - "▁problemele", - -12.642253875732422 - ], - [ - "▁Martha", - -12.642254829406738 - ], - [ - "▁extern", - -12.642267227172852 - ], - [ - "embre", - -12.642379760742188 - ], - [ - "▁médical", - -12.642526626586914 - ], - [ - "fordern", - -12.64256477355957 - ], - [ - "nji", - -12.642592430114746 - ], - [ - "▁aboard", - -12.642740249633789 - ], - [ - "▁sidewalk", - -12.642759323120117 - ], - [ - "WIN", - -12.642775535583496 - ], - [ - "▁Bobby", - -12.642842292785645 - ], - [ - "▁umfangreiche", - -12.642876625061035 - ], - [ - "leid", - -12.64292049407959 - ], - [ - "▁compens", - -12.642967224121094 - ], - [ - "▁juge", - -12.64299488067627 - ], - [ - "gerufen", - -12.64311408996582 - ], - [ - "▁médicament", - -12.643135070800781 - ], - [ - "▁1918", - -12.643155097961426 - ], - [ - "▁blanche", - -12.643163681030273 - ], - [ - "▁pleasing", - -12.643220901489258 - ], - [ - "▁propria", - -12.643471717834473 - ], - [ - "ergebnisse", - -12.643503189086914 - ], - [ - "▁retrouv", - -12.643571853637695 - ], - [ - "urteil", - -12.643592834472656 - ], - [ - "▁Draft", - -12.64361572265625 - ], - [ - "▁concluzi", - -12.643671035766602 - ], - [ - "centralized", - -12.643789291381836 - ], - [ - "▁Hannah", - -12.64382266998291 - ], - [ - "grija", - -12.64392375946045 - ], - [ - "▁Exercise", - -12.643972396850586 - ], - [ - "RAL", - -12.644001960754395 - ], - [ - "creme", - -12.64408016204834 - ], - [ - "High", - -12.644126892089844 - ], - [ - "clude", - -12.644131660461426 - ], - [ - "Considering", - -12.644208908081055 - ], - [ - "▁Guarantee", - -12.644404411315918 - ], - [ - "▁cuptor", - -12.644436836242676 - ], - [ - "ivität", - -12.64468002319336 - ], - [ - "▁Southwest", - -12.644882202148438 - ], - [ - "▁vivant", - -12.644890785217285 - ], - [ - "Your", - -12.64498519897461 - ], - [ - "▁Stunde", - -12.645003318786621 - ], - [ - "▁Ethernet", - -12.645040512084961 - ], - [ - "angebote", - -12.645078659057617 - ], - [ - "▁Sage", - -12.645271301269531 - ], - [ - "▁Boeing", - -12.645295143127441 - ], - [ - "▁$300", - -12.645381927490234 - ], - [ - "2-4", - -12.64546012878418 - ], - [ - "▁nécessit", - -12.645516395568848 - ], - [ - "▁ferment", - -12.645599365234375 - ], - [ - "▁Anmeldung", - -12.64567756652832 - ], - [ - "▁exhausted", - -12.645758628845215 - ], - [ - "▁Schloss", - -12.645772933959961 - ], - [ - "▁Replacement", - -12.645859718322754 - ], - [ - "▁Aussi", - -12.645933151245117 - ], - [ - "jection", - -12.646127700805664 - ], - [ - "978", - -12.64615535736084 - ], - [ - "▁siège", - -12.646258354187012 - ], - [ - "crest", - -12.646310806274414 - ], - [ - "▁jumatate", - -12.646312713623047 - ], - [ - "effizient", - -12.646317481994629 - ], - [ - "▁colaborare", - -12.6464262008667 - ], - [ - "HQ", - -12.646615028381348 - ], - [ - "130", - -12.646695137023926 - ], - [ - "culaire", - -12.646907806396484 - ], - [ - "▁Jamaica", - -12.646952629089355 - ], - [ - "▁cardboard", - -12.64731216430664 - ], - [ - "▁technische", - -12.64731502532959 - ], - [ - "▁cereri", - -12.647507667541504 - ], - [ - "▁contradict", - -12.647570610046387 - ], - [ - "▁irrigation", - -12.647586822509766 - ], - [ - "Nume", - -12.64765739440918 - ], - [ - "▁Bier", - -12.647714614868164 - ], - [ - "▁livrare", - -12.647903442382812 - ], - [ - "▁reservoir", - -12.647906303405762 - ], - [ - "vâr", - -12.648130416870117 - ], - [ - "▁galben", - -12.648213386535645 - ], - [ - "▁Geneva", - -12.648303985595703 - ], - [ - "▁lightning", - -12.648418426513672 - ], - [ - "wished", - -12.64842414855957 - ], - [ - "▁Blind", - -12.648481369018555 - ], - [ - "Interested", - -12.648499488830566 - ], - [ - "▁Primări", - -12.648627281188965 - ], - [ - "anthropo", - -12.648954391479492 - ], - [ - "▁Transaction", - -12.648961067199707 - ], - [ - "▁marcat", - -12.648971557617188 - ], - [ - "▁gelegen", - -12.649077415466309 - ], - [ - "▁contemporain", - -12.649182319641113 - ], - [ - "▁politică", - -12.649182319641113 - ], - [ - "▁1948", - -12.64928150177002 - ], - [ - "▁Mik", - -12.649287223815918 - ], - [ - "▁preţ", - -12.649310111999512 - ], - [ - "moor", - -12.649312973022461 - ], - [ - "ANN", - -12.649432182312012 - ], - [ - "▁constructive", - -12.649454116821289 - ], - [ - "konzept", - -12.649502754211426 - ], - [ - "▁entendu", - -12.649511337280273 - ], - [ - "▁Genesis", - -12.649541854858398 - ], - [ - "arzt", - -12.649581909179688 - ], - [ - "▁Allgemein", - -12.64970874786377 - ], - [ - "▁Derby", - -12.649725914001465 - ], - [ - "Class", - -12.649762153625488 - ], - [ - "▁$12", - -12.649770736694336 - ], - [ - "▁Tube", - -12.6498441696167 - ], - [ - "▁Contribu", - -12.649847030639648 - ], - [ - "▁HAVE", - -12.649860382080078 - ], - [ - "▁oxide", - -12.64986515045166 - ], - [ - "▁producator", - -12.649941444396973 - ], - [ - "▁Bench", - -12.650132179260254 - ], - [ - "▁comprehend", - -12.650139808654785 - ], - [ - "▁Damen", - -12.650494575500488 - ], - [ - "▁Garant", - -12.65056037902832 - ], - [ - "▁disappointing", - -12.650614738464355 - ], - [ - "▁réalisée", - -12.650693893432617 - ], - [ - "▁comportement", - -12.65072250366211 - ], - [ - "▁clash", - -12.650753021240234 - ], - [ - "▁curry", - -12.65076732635498 - ], - [ - "▁Lebanon", - -12.65078067779541 - ], - [ - "▁Romaniei", - -12.650784492492676 - ], - [ - "▁reprise", - -12.650840759277344 - ], - [ - "▁perceive", - -12.65095329284668 - ], - [ - "▁weaknesses", - -12.65101146697998 - ], - [ - "▁aminti", - -12.651057243347168 - ], - [ - "▁Concern", - -12.651103973388672 - ], - [ - "shadow", - -12.651310920715332 - ], - [ - "▁basin", - -12.651311874389648 - ], - [ - "moral", - -12.652063369750977 - ], - [ - "▁Hughes", - -12.652101516723633 - ], - [ - "Psych", - -12.652266502380371 - ], - [ - "▁Lieferung", - -12.65227222442627 - ], - [ - "▁serrurier", - -12.652379035949707 - ], - [ - "ussi", - -12.652386665344238 - ], - [ - "▁timpului", - -12.6524658203125 - ], - [ - "üm", - -12.652629852294922 - ], - [ - "▁Vladimir", - -12.652701377868652 - ], - [ - "▁Jag", - -12.65279483795166 - ], - [ - "▁verific", - -12.652849197387695 - ], - [ - "▁Pru", - -12.652894020080566 - ], - [ - "▁Laut", - -12.653285026550293 - ], - [ - "ITA", - -12.653287887573242 - ], - [ - "usually", - -12.653294563293457 - ], - [ - "▁carrière", - -12.65341854095459 - ], - [ - "▁extracted", - -12.653663635253906 - ], - [ - "kultur", - -12.653679847717285 - ], - [ - "öpfe", - -12.653932571411133 - ], - [ - "▁rejection", - -12.654016494750977 - ], - [ - "▁Hydr", - -12.654062271118164 - ], - [ - "▁informaţii", - -12.654098510742188 - ], - [ - "▁tolerate", - -12.654122352600098 - ], - [ - "▁cinéma", - -12.654302597045898 - ], - [ - "traumatic", - -12.654305458068848 - ], - [ - "produkt", - -12.654450416564941 - ], - [ - "▁Contest", - -12.654560089111328 - ], - [ - "lotte", - -12.654570579528809 - ], - [ - "▁Pension", - -12.65461254119873 - ], - [ - "▁Advertising", - -12.654623985290527 - ], - [ - "▁payout", - -12.654772758483887 - ], - [ - "▁Amanda", - -12.65481185913086 - ], - [ - "Elect", - -12.65485668182373 - ], - [ - "▁interiorul", - -12.654996871948242 - ], - [ - "stay", - -12.655348777770996 - ], - [ - "▁feminine", - -12.655352592468262 - ], - [ - "▁întâmplă", - -12.655437469482422 - ], - [ - "▁insult", - -12.65562915802002 - ], - [ - "▁chocolat", - -12.65567398071289 - ], - [ - "▁noroc", - -12.655750274658203 - ], - [ - "▁centr", - -12.655781745910645 - ], - [ - "▁Bühne", - -12.655858039855957 - ], - [ - "mighty", - -12.6558837890625 - ], - [ - "▁Buddha", - -12.655908584594727 - ], - [ - "▁parental", - -12.655997276306152 - ], - [ - "storm", - -12.656451225280762 - ], - [ - "recurring", - -12.6565523147583 - ], - [ - "▁luxe", - -12.656588554382324 - ], - [ - "niște", - -12.656728744506836 - ], - [ - "cuit", - -12.656839370727539 - ], - [ - "▁ausgewählt", - -12.656880378723145 - ], - [ - "▁dumb", - -12.657047271728516 - ], - [ - "IPS", - -12.657127380371094 - ], - [ - "▁Thir", - -12.65717887878418 - ], - [ - "Definitely", - -12.657195091247559 - ], - [ - "▁hilarious", - -12.657195091247559 - ], - [ - "▁rainbow", - -12.657231330871582 - ], - [ - "▁Bravo", - -12.657251358032227 - ], - [ - "▁entstanden", - -12.657259941101074 - ], - [ - "itorul", - -12.657269477844238 - ], - [ - "▁prosperity", - -12.657299041748047 - ], - [ - "▁Bord", - -12.657336235046387 - ], - [ - "▁familiei", - -12.657363891601562 - ], - [ - "▁scade", - -12.657425880432129 - ], - [ - "wöhn", - -12.657426834106445 - ], - [ - "▁ingrediente", - -12.65743637084961 - ], - [ - "RAD", - -12.657441139221191 - ], - [ - "▁tăi", - -12.657472610473633 - ], - [ - "bours", - -12.65747356414795 - ], - [ - "ATI", - -12.657540321350098 - ], - [ - "▁Blake", - -12.65761661529541 - ], - [ - "▁Implement", - -12.657712936401367 - ], - [ - "▁Beziehung", - -12.657838821411133 - ], - [ - "finanz", - -12.657953262329102 - ], - [ - "intestin", - -12.658513069152832 - ], - [ - "ließen", - -12.658535957336426 - ], - [ - "▁récent", - -12.658594131469727 - ], - [ - "▁laminate", - -12.658692359924316 - ], - [ - "▁Hör", - -12.65876579284668 - ], - [ - "▁personnalisé", - -12.658804893493652 - ], - [ - "edel", - -12.65890121459961 - ], - [ - "▁advertisement", - -12.658902168273926 - ], - [ - "▁pinterest", - -12.658921241760254 - ], - [ - "185", - -12.659058570861816 - ], - [ - "identité", - -12.65938949584961 - ], - [ - "▁Brick", - -12.659408569335938 - ], - [ - "Glu", - -12.65941047668457 - ], - [ - "▁attendant", - -12.659571647644043 - ], - [ - "▁Flip", - -12.659614562988281 - ], - [ - "attracting", - -12.659662246704102 - ], - [ - "functional", - -12.659703254699707 - ], - [ - "conceived", - -12.659772872924805 - ], - [ - "▁summarize", - -12.659773826599121 - ], - [ - "adjusting", - -12.659809112548828 - ], - [ - "CAL", - -12.660041809082031 - ], - [ - "▁Operating", - -12.660076141357422 - ], - [ - "zzi", - -12.66008472442627 - ], - [ - "▁Rover", - -12.6603364944458 - ], - [ - "▁versuchen", - -12.6603364944458 - ], - [ - "▁articulate", - -12.660600662231445 - ], - [ - "▁privé", - -12.660614013671875 - ], - [ - "▁consequent", - -12.660663604736328 - ], - [ - "EAT", - -12.660690307617188 - ], - [ - "▁Marsh", - -12.660696983337402 - ], - [ - "▁teenage", - -12.660717964172363 - ], - [ - "▁Renaissance", - -12.660740852355957 - ], - [ - "▁furnizor", - -12.660883903503418 - ], - [ - "▁Desert", - -12.660894393920898 - ], - [ - "unicipiului", - -12.66104793548584 - ], - [ - "▁ulterior", - -12.661065101623535 - ], - [ - "▁Ebene", - -12.661280632019043 - ], - [ - "▁monkey", - -12.661351203918457 - ], - [ - "▁enclosed", - -12.661389350891113 - ], - [ - "▁profitability", - -12.66139030456543 - ], - [ - "▁Evolution", - -12.661628723144531 - ], - [ - "▁adica", - -12.661670684814453 - ], - [ - "▁Structure", - -12.661709785461426 - ], - [ - "▁primer", - -12.661761283874512 - ], - [ - "▁asigură", - -12.662001609802246 - ], - [ - "▁Manuel", - -12.662220001220703 - ], - [ - "polita", - -12.662267684936523 - ], - [ - "▁Portable", - -12.662286758422852 - ], - [ - "fecți", - -12.662413597106934 - ], - [ - "▁obscure", - -12.662424087524414 - ], - [ - "▁Atlas", - -12.662436485290527 - ], - [ - "fährt", - -12.662679672241211 - ], - [ - "▁clinician", - -12.662837982177734 - ], - [ - "fuhr", - -12.66310977935791 - ], - [ - "▁matériaux", - -12.663113594055176 - ], - [ - "écrire", - -12.663142204284668 - ], - [ - "▁suspicious", - -12.6632080078125 - ], - [ - "pore", - -12.663263320922852 - ], - [ - "▁outdated", - -12.663304328918457 - ], - [ - "▁Mädchen", - -12.663328170776367 - ], - [ - "rcis", - -12.663420677185059 - ], - [ - "nicht", - -12.663463592529297 - ], - [ - "holding", - -12.663561820983887 - ], - [ - "▁heavier", - -12.66366195678711 - ], - [ - "ezimal", - -12.663960456848145 - ], - [ - "▁silicone", - -12.66397476196289 - ], - [ - "punerea", - -12.664108276367188 - ], - [ - "▁begeistert", - -12.664237976074219 - ], - [ - "2004", - -12.664283752441406 - ], - [ - "▁predecessor", - -12.664299011230469 - ], - [ - "▁overlap", - -12.664369583129883 - ], - [ - "▁digging", - -12.664376258850098 - ], - [ - "▁Upgrade", - -12.664407730102539 - ], - [ - "▁interesat", - -12.664543151855469 - ], - [ - "▁spinach", - -12.66456127166748 - ], - [ - "▁politice", - -12.664626121520996 - ], - [ - "activity", - -12.664831161499023 - ], - [ - "▁Rating", - -12.66484546661377 - ], - [ - "▁serrure", - -12.664846420288086 - ], - [ - "▁tânăr", - -12.664959907531738 - ], - [ - "▁WHAT", - -12.664970397949219 - ], - [ - "▁railroad", - -12.664989471435547 - ], - [ - "▁avid", - -12.665081024169922 - ], - [ - "▁Sophie", - -12.665084838867188 - ], - [ - "preferably", - -12.665173530578613 - ], - [ - "▁Fourth", - -12.665431022644043 - ], - [ - "kommenden", - -12.665452003479004 - ], - [ - "QUI", - -12.665478706359863 - ], - [ - "lohn", - -12.665505409240723 - ], - [ - "▁promis", - -12.665611267089844 - ], - [ - "▁shrub", - -12.665621757507324 - ], - [ - "nummer", - -12.66579818725586 - ], - [ - "▁dinosaur", - -12.665922164916992 - ], - [ - "▁Lucky", - -12.665937423706055 - ], - [ - "relates", - -12.666038513183594 - ], - [ - "▁FROM", - -12.666049003601074 - ], - [ - "▁racism", - -12.66610336303711 - ], - [ - "physical", - -12.66611385345459 - ], - [ - "alcoholic", - -12.666119575500488 - ], - [ - "▁reef", - -12.666126251220703 - ], - [ - "▁centru", - -12.66618824005127 - ], - [ - "université", - -12.66622257232666 - ], - [ - "▁visage", - -12.666232109069824 - ], - [ - "ităţile", - -12.666253089904785 - ], - [ - "▁Gent", - -12.666345596313477 - ], - [ - "zugeben", - -12.66643238067627 - ], - [ - "▁paradise", - -12.66646957397461 - ], - [ - "fuel", - -12.666505813598633 - ], - [ - "ografie", - -12.666568756103516 - ], - [ - "▁TIP", - -12.666730880737305 - ], - [ - "schreibung", - -12.66683292388916 - ], - [ - "▁bark", - -12.666840553283691 - ], - [ - "accéder", - -12.666895866394043 - ], - [ - "▁contamination", - -12.666937828063965 - ], - [ - "▁swelling", - -12.666950225830078 - ], - [ - "▁optimistic", - -12.666974067687988 - ], - [ - "▁differential", - -12.667015075683594 - ], - [ - "▁Arad", - -12.667030334472656 - ], - [ - "toxins", - -12.667075157165527 - ], - [ - "▁übernehmen", - -12.667091369628906 - ], - [ - "▁anime", - -12.667143821716309 - ], - [ - "actuel", - -12.667462348937988 - ], - [ - "▁bientôt", - -12.667525291442871 - ], - [ - "▁Patio", - -12.66761302947998 - ], - [ - "▁baisse", - -12.667630195617676 - ], - [ - "▁sprint", - -12.66773796081543 - ], - [ - "▁bilden", - -12.66811466217041 - ], - [ - "VAL", - -12.668132781982422 - ], - [ - "▁réflexion", - -12.668220520019531 - ], - [ - "hopping", - -12.668242454528809 - ], - [ - "genesis", - -12.66834545135498 - ], - [ - "achtet", - -12.668435096740723 - ], - [ - "▁chinois", - -12.668525695800781 - ], - [ - "▁dezvoltat", - -12.668795585632324 - ], - [ - "arguably", - -12.66884708404541 - ], - [ - "▁Protocol", - -12.66884708404541 - ], - [ - "▁Sterling", - -12.668862342834473 - ], - [ - "▁Cave", - -12.668975830078125 - ], - [ - "▁Condo", - -12.66921615600586 - ], - [ - "▁erhöht", - -12.669235229492188 - ], - [ - "typische", - -12.669416427612305 - ], - [ - "merged", - -12.669439315795898 - ], - [ - "▁accumulation", - -12.669560432434082 - ], - [ - "sicherlich", - -12.669569969177246 - ], - [ - "kW", - -12.669620513916016 - ], - [ - "▁schriftlich", - -12.669757843017578 - ], - [ - "▁Vorteile", - -12.669918060302734 - ], - [ - "▁Northeast", - -12.669922828674316 - ], - [ - "frunt", - -12.669941902160645 - ], - [ - "istik", - -12.670003890991211 - ], - [ - "erster", - -12.670035362243652 - ], - [ - "▁Assistance", - -12.670150756835938 - ], - [ - "▁Fantastic", - -12.670150756835938 - ], - [ - "▁bărbat", - -12.670150756835938 - ], - [ - "▁Grinding", - -12.670151710510254 - ], - [ - "▁diffusion", - -12.670161247253418 - ], - [ - "▁vreun", - -12.670331954956055 - ], - [ - "▁Butler", - -12.670342445373535 - ], - [ - "▁Cherry", - -12.670352935791016 - ], - [ - "▁visualization", - -12.670540809631348 - ], - [ - "Paket", - -12.670572280883789 - ], - [ - "blin", - -12.670619010925293 - ], - [ - "▁cadou", - -12.670705795288086 - ], - [ - "▁Celtic", - -12.670754432678223 - ], - [ - "alegerea", - -12.670894622802734 - ], - [ - "▁Dorf", - -12.671035766601562 - ], - [ - "▁Noir", - -12.671185493469238 - ], - [ - "payment", - -12.67126750946045 - ], - [ - "▁Caroline", - -12.671334266662598 - ], - [ - "▁Berry", - -12.671359062194824 - ], - [ - "▁professeur", - -12.67147445678711 - ], - [ - "▁gratuitement", - -12.671503067016602 - ], - [ - "Suntem", - -12.671523094177246 - ], - [ - "IAN", - -12.671738624572754 - ], - [ - "▁fingerprint", - -12.671780586242676 - ], - [ - "▁controversy", - -12.671781539916992 - ], - [ - "▁fled", - -12.671875 - ], - [ - "▁Pokémon", - -12.67210865020752 - ], - [ - "excluding", - -12.67211627960205 - ], - [ - "▁friction", - -12.672161102294922 - ], - [ - "therapie", - -12.67225456237793 - ], - [ - "/7", - -12.672398567199707 - ], - [ - "▁designation", - -12.672442436218262 - ], - [ - "▁Belgia", - -12.672704696655273 - ], - [ - "▁cursuri", - -12.672836303710938 - ], - [ - "model", - -12.672840118408203 - ], - [ - "super", - -12.672987937927246 - ], - [ - "▁réduit", - -12.673028945922852 - ], - [ - "▁implicit", - -12.673177719116211 - ], - [ - "athlon", - -12.673227310180664 - ], - [ - "anniversaire", - -12.673416137695312 - ], - [ - "▁teaspoon", - -12.673416137695312 - ], - [ - "▁corrosion", - -12.673418998718262 - ], - [ - "▁überzeugt", - -12.673418998718262 - ], - [ - "▁flawless", - -12.673421859741211 - ], - [ - "▁vegetation", - -12.673477172851562 - ], - [ - "▁iarna", - -12.673507690429688 - ], - [ - "▁psychologist", - -12.673591613769531 - ], - [ - "hora", - -12.673625946044922 - ], - [ - "gab", - -12.67387580871582 - ], - [ - "▁soothing", - -12.674084663391113 - ], - [ - "▁stew", - -12.674141883850098 - ], - [ - "▁wager", - -12.674172401428223 - ], - [ - "▁tinere", - -12.674322128295898 - ], - [ - "▁baut", - -12.674323081970215 - ], - [ - "ecunoscut", - -12.674352645874023 - ], - [ - "gearbeitet", - -12.674422264099121 - ], - [ - "▁functi", - -12.674480438232422 - ], - [ - "▁dürfte", - -12.674724578857422 - ], - [ - "▁média", - -12.674724578857422 - ], - [ - "▁campanie", - -12.67475700378418 - ], - [ - "▁Distribu", - -12.674817085266113 - ], - [ - "▁mentoring", - -12.674959182739258 - ], - [ - "▁criz", - -12.675020217895508 - ], - [ - "findest", - -12.675056457519531 - ], - [ - "▁Vasile", - -12.675058364868164 - ], - [ - "▁compassionate", - -12.675115585327148 - ], - [ - "▁Tudor", - -12.675140380859375 - ], - [ - "▁flare", - -12.675260543823242 - ], - [ - "intreaga", - -12.675283432006836 - ], - [ - "gaz", - -12.6753511428833 - ], - [ - "▁porcelain", - -12.675379753112793 - ], - [ - "▁expedition", - -12.675520896911621 - ], - [ - "▁Azure", - -12.67553997039795 - ], - [ - "räumen", - -12.675549507141113 - ], - [ - "eiro", - -12.675567626953125 - ], - [ - "variante", - -12.675804138183594 - ], - [ - "▁Lucy", - -12.675825119018555 - ], - [ - "ôle", - -12.675909996032715 - ], - [ - "▁revenir", - -12.67602252960205 - ], - [ - "▁stained", - -12.676040649414062 - ], - [ - "▁falsch", - -12.676166534423828 - ], - [ - "▁incorpor", - -12.676166534423828 - ], - [ - "merkt", - -12.676187515258789 - ], - [ - "▁achten", - -12.6762056350708 - ], - [ - "▁hello", - -12.676290512084961 - ], - [ - "selben", - -12.676422119140625 - ], - [ - "ifty", - -12.676525115966797 - ], - [ - "▁Feier", - -12.67653751373291 - ], - [ - "1.000", - -12.676557540893555 - ], - [ - "▁Patch", - -12.676583290100098 - ], - [ - "peptid", - -12.676846504211426 - ], - [ - "▁recovering", - -12.676898956298828 - ], - [ - "Symptom", - -12.677020072937012 - ], - [ - "▁Auckland", - -12.677020072937012 - ], - [ - "▁retrieve", - -12.677328109741211 - ], - [ - "▁800-", - -12.67733097076416 - ], - [ - "schlagen", - -12.677473068237305 - ], - [ - "▁lourd", - -12.677562713623047 - ], - [ - "▁Purple", - -12.67760181427002 - ], - [ - "▁mittels", - -12.677776336669922 - ], - [ - "▁Düsseldorf", - -12.67800521850586 - ], - [ - "▁getaway", - -12.67803955078125 - ], - [ - "▁Cedar", - -12.678061485290527 - ], - [ - "▁Function", - -12.678241729736328 - ], - [ - "▁bizarre", - -12.67833423614502 - ], - [ - "4.3", - -12.67849063873291 - ], - [ - "▁fundraiser", - -12.67866325378418 - ], - [ - "geared", - -12.678780555725098 - ], - [ - "▁privée", - -12.678781509399414 - ], - [ - "▁Bonjour", - -12.67894458770752 - ], - [ - "Gar", - -12.67895793914795 - ], - [ - "▁Lloyd", - -12.678991317749023 - ], - [ - "▁Reinigung", - -12.6790132522583 - ], - [ - "▁Geno", - -12.679155349731445 - ], - [ - "▁Teilnahme", - -12.67919635772705 - ], - [ - "pian", - -12.679362297058105 - ], - [ - "sammelt", - -12.679368019104004 - ], - [ - "Pad", - -12.679755210876465 - ], - [ - "▁Troy", - -12.67976188659668 - ], - [ - "HG", - -12.679943084716797 - ], - [ - "▁klein", - -12.679962158203125 - ], - [ - "▁lettuce", - -12.679978370666504 - ], - [ - "▁patrimoine", - -12.679978370666504 - ], - [ - "▁cooker", - -12.680055618286133 - ], - [ - "▁accesibil", - -12.680137634277344 - ], - [ - "▁Spray", - -12.680201530456543 - ], - [ - "▁negotiation", - -12.68047046661377 - ], - [ - "▁jewel", - -12.680480003356934 - ], - [ - "▁dynamique", - -12.68063735961914 - ], - [ - "▁plastique", - -12.68067741394043 - ], - [ - "▁Limo", - -12.680682182312012 - ], - [ - "▁Funk", - -12.68069076538086 - ], - [ - "▁omului", - -12.680702209472656 - ], - [ - "title", - -12.680768013000488 - ], - [ - "curved", - -12.68082046508789 - ], - [ - "▁Lemon", - -12.680851936340332 - ], - [ - "förder", - -12.680891990661621 - ], - [ - "▁bewusst", - -12.681112289428711 - ], - [ - "inevitably", - -12.681296348571777 - ], - [ - "▁derivative", - -12.681297302246094 - ], - [ - "2:30", - -12.681300163269043 - ], - [ - "komfort", - -12.681305885314941 - ], - [ - "original", - -12.681480407714844 - ], - [ - "sanct", - -12.681540489196777 - ], - [ - "▁matte", - -12.6815767288208 - ], - [ - "empêche", - -12.681628227233887 - ], - [ - "▁jucător", - -12.681634902954102 - ], - [ - "▁attentive", - -12.681640625 - ], - [ - "▁recunoscut", - -12.681674003601074 - ], - [ - "▁Brush", - -12.68167495727539 - ], - [ - "▁consommateur", - -12.68183422088623 - ], - [ - "érence", - -12.682063102722168 - ], - [ - "typical", - -12.682084083557129 - ], - [ - "strategie", - -12.682205200195312 - ], - [ - "Effekt", - -12.682290077209473 - ], - [ - "▁Alcohol", - -12.682292938232422 - ], - [ - "oji", - -12.682333946228027 - ], - [ - "▁ruler", - -12.682357788085938 - ], - [ - "▁Norwegian", - -12.682615280151367 - ], - [ - "▁PlayStation", - -12.682615280151367 - ], - [ - "▁Hook", - -12.682747840881348 - ], - [ - "▁viewpoint", - -12.682759284973145 - ], - [ - "THER", - -12.682841300964355 - ], - [ - "420", - -12.682888984680176 - ], - [ - "Consequently", - -12.68294620513916 - ], - [ - "▁entschieden", - -12.68294620513916 - ], - [ - "▁Trag", - -12.68295669555664 - ], - [ - "▁Dawn", - -12.683003425598145 - ], - [ - "▁fuss", - -12.68301773071289 - ], - [ - "*****", - -12.683040618896484 - ], - [ - "▁Bullet", - -12.683140754699707 - ], - [ - "CAM", - -12.683155059814453 - ], - [ - "▁wonderfully", - -12.683201789855957 - ], - [ - "▁parlamentar", - -12.683263778686523 - ], - [ - "▁geometric", - -12.683307647705078 - ], - [ - "talement", - -12.683321952819824 - ], - [ - "/2018", - -12.683577537536621 - ], - [ - "▁oversight", - -12.684036254882812 - ], - [ - "kindly", - -12.684080123901367 - ], - [ - "therm", - -12.684305191040039 - ], - [ - "▁treaba", - -12.6846342086792 - ], - [ - "▁Trim", - -12.68471908569336 - ], - [ - "▁intelege", - -12.684842109680176 - ], - [ - "cino", - -12.685032844543457 - ], - [ - "▁straw", - -12.68508529663086 - ], - [ - "Tru", - -12.685251235961914 - ], - [ - "▁Television", - -12.68530559539795 - ], - [ - "Trader", - -12.68538761138916 - ], - [ - "▁Passion", - -12.685394287109375 - ], - [ - "rescu", - -12.685622215270996 - ], - [ - "Nicol", - -12.685635566711426 - ], - [ - "luj", - -12.685805320739746 - ], - [ - "▁mijloace", - -12.685921669006348 - ], - [ - "▁Removal", - -12.685922622680664 - ], - [ - "▁1944", - -12.686034202575684 - ], - [ - "▁shortcut", - -12.686159133911133 - ], - [ - "▁Fett", - -12.686258316040039 - ], - [ - "largement", - -12.686371803283691 - ], - [ - "▁altern", - -12.686446189880371 - ], - [ - "▁cleansing", - -12.686562538146973 - ], - [ - "▁Qatar", - -12.686692237854004 - ], - [ - "▁Ceci", - -12.686826705932617 - ], - [ - "▁weave", - -12.686848640441895 - ], - [ - "schmerz", - -12.686878204345703 - ], - [ - "▁dots", - -12.686888694763184 - ], - [ - "Télécharger", - -12.68691635131836 - ], - [ - "▁Conduct", - -12.686944007873535 - ], - [ - "bekannten", - -12.687325477600098 - ], - [ - "▁lungime", - -12.687344551086426 - ], - [ - "▁Ferrari", - -12.687390327453613 - ], - [ - "▁totusi", - -12.687605857849121 - ], - [ - "▁Anniversary", - -12.687911033630371 - ], - [ - "▁wilderness", - -12.687911987304688 - ], - [ - "▁Christoph", - -12.687939643859863 - ], - [ - "▁Nikon", - -12.688112258911133 - ], - [ - "▁Digi", - -12.68818473815918 - ], - [ - "▁Blumen", - -12.688190460205078 - ], - [ - "▁altul", - -12.688249588012695 - ], - [ - "▁Parish", - -12.688321113586426 - ], - [ - "czy", - -12.688393592834473 - ], - [ - "▁temper", - -12.688401222229004 - ], - [ - "▁Powder", - -12.688576698303223 - ], - [ - "▁Arnold", - -12.688577651977539 - ], - [ - "capacitatea", - -12.688687324523926 - ], - [ - "nderungen", - -12.688787460327148 - ], - [ - "▁utilization", - -12.688859939575195 - ], - [ - "99%", - -12.688942909240723 - ], - [ - "▁Fear", - -12.689099311828613 - ], - [ - "JE", - -12.689165115356445 - ], - [ - "▁Simpson", - -12.689239501953125 - ], - [ - "▁Podcast", - -12.68924617767334 - ], - [ - "▁Cardinal", - -12.689290046691895 - ], - [ - "▁Distribution", - -12.689315795898438 - ], - [ - "▁Drawing", - -12.689373970031738 - ], - [ - "▁tint", - -12.689412117004395 - ], - [ - "▁hran", - -12.68945598602295 - ], - [ - "▁Slide", - -12.68960189819336 - ], - [ - "▁Vertrauen", - -12.689654350280762 - ], - [ - "cloth", - -12.68971061706543 - ], - [ - "▁redirect", - -12.689728736877441 - ], - [ - "126", - -12.689842224121094 - ], - [ - "▁constituie", - -12.68985652923584 - ], - [ - "Mai", - -12.690070152282715 - ], - [ - "▁idol", - -12.690088272094727 - ], - [ - "▁tehnice", - -12.690163612365723 - ], - [ - "dip", - -12.690393447875977 - ], - [ - "▁soldier", - -12.690400123596191 - ], - [ - "▁Ordin", - -12.690409660339355 - ], - [ - "wobe", - -12.69050407409668 - ], - [ - "▁Brent", - -12.69058895111084 - ], - [ - "▁Sudan", - -12.690597534179688 - ], - [ - "6000", - -12.690619468688965 - ], - [ - "turism", - -12.690689086914062 - ], - [ - "▁Rocky", - -12.690744400024414 - ], - [ - "naming", - -12.69092082977295 - ], - [ - "▁entrepreneurial", - -12.690925598144531 - ], - [ - "hearted", - -12.690962791442871 - ], - [ - "ayne", - -12.69097900390625 - ], - [ - "▁hover", - -12.691081047058105 - ], - [ - "▁skull", - -12.691279411315918 - ], - [ - "▁tribal", - -12.691407203674316 - ], - [ - "▁crafting", - -12.691543579101562 - ], - [ - "bewertungen", - -12.691569328308105 - ], - [ - "▁decizii", - -12.691625595092773 - ], - [ - "obwohl", - -12.691655158996582 - ], - [ - "▁compromised", - -12.691875457763672 - ], - [ - "▁quelqu", - -12.69195556640625 - ], - [ - "▁Hilton", - -12.692075729370117 - ], - [ - "▁maturity", - -12.692095756530762 - ], - [ - "gelesen", - -12.692100524902344 - ], - [ - "▁harbor", - -12.69210433959961 - ], - [ - "▁maple", - -12.692326545715332 - ], - [ - "▁développ", - -12.6924409866333 - ], - [ - "▁Nobody", - -12.692517280578613 - ], - [ - "équipement", - -12.69255542755127 - ], - [ - "121", - -12.69274616241455 - ], - [ - "140", - -12.692827224731445 - ], - [ - "▁artistes", - -12.692914962768555 - ], - [ - "▁depune", - -12.692941665649414 - ], - [ - "▁erase", - -12.693129539489746 - ], - [ - "▁erzählt", - -12.693197250366211 - ], - [ - "▁Hyundai", - -12.69323444366455 - ], - [ - "▁impairment", - -12.69323444366455 - ], - [ - "▁conving", - -12.693279266357422 - ], - [ - "chasing", - -12.693426132202148 - ], - [ - "▁Claus", - -12.693438529968262 - ], - [ - "▁adaptée", - -12.693687438964844 - ], - [ - "▁Raz", - -12.693740844726562 - ], - [ - "rugs", - -12.693796157836914 - ], - [ - "▁urme", - -12.69387435913086 - ], - [ - "Nonetheless", - -12.693902015686035 - ], - [ - "▁Cemetery", - -12.693902969360352 - ], - [ - "umps", - -12.693906784057617 - ], - [ - "ACA", - -12.694003105163574 - ], - [ - "▁perioade", - -12.694235801696777 - ], - [ - "▁slogan", - -12.694263458251953 - ], - [ - "▁downward", - -12.694441795349121 - ], - [ - "eidig", - -12.694446563720703 - ], - [ - "RAC", - -12.69444751739502 - ], - [ - "▁inaugur", - -12.694496154785156 - ], - [ - "се", - -12.694588661193848 - ], - [ - "▁înțeleg", - -12.694608688354492 - ], - [ - "▁hopeful", - -12.694635391235352 - ], - [ - "▁customization", - -12.6946439743042 - ], - [ - "▁prisoners", - -12.694708824157715 - ], - [ - "▁Rau", - -12.695270538330078 - ], - [ - "▁Pitt", - -12.695389747619629 - ], - [ - "ături", - -12.695542335510254 - ], - [ - "▁metabolic", - -12.695842742919922 - ], - [ - "▁Zach", - -12.695868492126465 - ], - [ - "▁umfassende", - -12.695914268493652 - ], - [ - "▁révél", - -12.695950508117676 - ], - [ - "131", - -12.696052551269531 - ], - [ - "ismului", - -12.696062088012695 - ], - [ - "▁Sac", - -12.696076393127441 - ], - [ - "efficacité", - -12.69624137878418 - ], - [ - "cruci", - -12.69625473022461 - ], - [ - "bisschen", - -12.69632339477539 - ], - [ - "▁Oster", - -12.696324348449707 - ], - [ - "lowered", - -12.6964693069458 - ], - [ - "▁Ausland", - -12.69674015045166 - ], - [ - "▁Pub", - -12.696794509887695 - ], - [ - "▁Marseille", - -12.696925163269043 - ], - [ - "▁Charter", - -12.696959495544434 - ], - [ - "howcasing", - -12.697010040283203 - ], - [ - "risti", - -12.6971435546875 - ], - [ - "▁thermostat", - -12.697151184082031 - ], - [ - "▁Clin", - -12.697233200073242 - ], - [ - "▁entsteht", - -12.697246551513672 - ], - [ - "Choosing", - -12.697248458862305 - ], - [ - "▁Schmerz", - -12.697284698486328 - ], - [ - "▁Till", - -12.697307586669922 - ], - [ - "▁Polo", - -12.697399139404297 - ], - [ - "▁proceduri", - -12.697402000427246 - ], - [ - "▁Believe", - -12.697444915771484 - ], - [ - "▁playful", - -12.697514533996582 - ], - [ - "▁verändert", - -12.697588920593262 - ], - [ - "▁pairing", - -12.697654724121094 - ], - [ - "MAG", - -12.69784927368164 - ], - [ - "leiste", - -12.69788932800293 - ], - [ - "▁testimonial", - -12.697916030883789 - ], - [ - "▁Economy", - -12.697916984558105 - ], - [ - "▁Wechsel", - -12.697918891906738 - ], - [ - "wirkung", - -12.69801139831543 - ], - [ - "▁exceeded", - -12.698030471801758 - ], - [ - "South", - -12.698067665100098 - ], - [ - "create", - -12.698221206665039 - ], - [ - "▁davantage", - -12.698270797729492 - ], - [ - "Log", - -12.69831657409668 - ], - [ - "▁irregular", - -12.698587417602539 - ], - [ - "VB", - -12.698691368103027 - ], - [ - "▁Rö", - -12.698741912841797 - ], - [ - "▁intreb", - -12.698881149291992 - ], - [ - "▁penser", - -12.698920249938965 - ], - [ - "▁déclaré", - -12.698923110961914 - ], - [ - "▁Tommy", - -12.699026107788086 - ], - [ - "2,500", - -12.699163436889648 - ], - [ - "▁Uganda", - -12.699260711669922 - ], - [ - "contacting", - -12.699445724487305 - ], - [ - "▁apreciat", - -12.699485778808594 - ], - [ - "▁beginnen", - -12.6995210647583 - ], - [ - "▁Gain", - -12.699580192565918 - ], - [ - "Office", - -12.69969654083252 - ], - [ - "ermittlung", - -12.699710845947266 - ], - [ - "▁Admission", - -12.699727058410645 - ], - [ - "▁Earl", - -12.6997652053833 - ], - [ - "▁Aviation", - -12.699833869934082 - ], - [ - "▁apologize", - -12.699929237365723 - ], - [ - "▁enclosure", - -12.699929237365723 - ], - [ - "▁Lack", - -12.69998836517334 - ], - [ - "wife", - -12.699995994567871 - ], - [ - "▁rotating", - -12.700016975402832 - ], - [ - "▁hergestellt", - -12.700020790100098 - ], - [ - "▁repository", - -12.70002269744873 - ], - [ - "TK", - -12.700149536132812 - ], - [ - "▁lectur", - -12.700190544128418 - ], - [ - "▁reflex", - -12.700286865234375 - ], - [ - "▁Harmon", - -12.700401306152344 - ], - [ - "▁vrem", - -12.700479507446289 - ], - [ - "▁Strange", - -12.70055103302002 - ], - [ - "▁champagne", - -12.700615882873535 - ], - [ - "▁oscil", - -12.700647354125977 - ], - [ - "sensitive", - -12.700677871704102 - ], - [ - "▁Sheriff", - -12.700841903686523 - ], - [ - "PRES", - -12.700956344604492 - ], - [ - "▁vow", - -12.70123291015625 - ], - [ - "▁dioxide", - -12.701276779174805 - ], - [ - "ен", - -12.701374053955078 - ], - [ - "▁corpului", - -12.701376914978027 - ], - [ - "▁prevăzut", - -12.70160961151123 - ], - [ - "India", - -12.701827049255371 - ], - [ - "hausse", - -12.70189094543457 - ], - [ - "▁clienți", - -12.701957702636719 - ], - [ - "▁entour", - -12.70202350616455 - ], - [ - "▁Sharp", - -12.70209789276123 - ], - [ - "▁teatru", - -12.702285766601562 - ], - [ - "▁Grow", - -12.702327728271484 - ], - [ - "▁caravan", - -12.70234203338623 - ], - [ - "▁sieben", - -12.702420234680176 - ], - [ - "▁cunosc", - -12.702502250671387 - ], - [ - "Bereichen", - -12.702527046203613 - ], - [ - "▁Benutzer", - -12.702619552612305 - ], - [ - "▁Ethiopia", - -12.702619552612305 - ], - [ - "▁Physics", - -12.702619552612305 - ], - [ - "preserving", - -12.70263385772705 - ], - [ - "ал", - -12.702712059020996 - ], - [ - "▁aerial", - -12.70272159576416 - ], - [ - "▁nouvel", - -12.702741622924805 - ], - [ - "▁stamped", - -12.702954292297363 - ], - [ - "▁inaugural", - -12.702970504760742 - ], - [ - "▁medicinal", - -12.702999114990234 - ], - [ - "Quite", - -12.703028678894043 - ], - [ - "accumulated", - -12.703165054321289 - ], - [ - "register", - -12.703271865844727 - ], - [ - "▁Falcon", - -12.70327377319336 - ], - [ - "▁boiling", - -12.703301429748535 - ], - [ - "▁advertised", - -12.703339576721191 - ], - [ - "collect", - -12.703362464904785 - ], - [ - "albeit", - -12.703418731689453 - ], - [ - "▁Organis", - -12.703473091125488 - ], - [ - "luate", - -12.703536033630371 - ], - [ - "▁préféré", - -12.70369815826416 - ], - [ - "▁frumoasa", - -12.703968048095703 - ], - [ - "▁truc", - -12.704092979431152 - ], - [ - "▁Fä", - -12.704154968261719 - ], - [ - "▁dome", - -12.704180717468262 - ], - [ - "Mobile", - -12.704191207885742 - ], - [ - "▁redeem", - -12.704198837280273 - ], - [ - "IONS", - -12.70422077178955 - ], - [ - "▁țări", - -12.704235076904297 - ], - [ - "▁singular", - -12.704385757446289 - ], - [ - "▁livestock", - -12.704425811767578 - ], - [ - "▁démont", - -12.704427719116211 - ], - [ - "clés", - -12.704527854919434 - ], - [ - "music", - -12.704561233520508 - ], - [ - "▁explicat", - -12.704602241516113 - ], - [ - "▁Fellowship", - -12.704703330993652 - ], - [ - "▁electrode", - -12.704760551452637 - ], - [ - "129", - -12.704977035522461 - ], - [ - "▁Rescue", - -12.704983711242676 - ], - [ - "▁Rocket", - -12.705159187316895 - ], - [ - "OSE", - -12.705301284790039 - ], - [ - "▁Sacramento", - -12.705317497253418 - ], - [ - "▁Haiti", - -12.705357551574707 - ], - [ - "▁Erwachsene", - -12.705390930175781 - ], - [ - "▁Terminal", - -12.70541000366211 - ], - [ - "URI", - -12.705453872680664 - ], - [ - "▁Rural", - -12.70549201965332 - ], - [ - "▁achizitiona", - -12.70552921295166 - ], - [ - "▁identifiable", - -12.705655097961426 - ], - [ - "▁gekauft", - -12.705659866333008 - ], - [ - "▁improper", - -12.705673217773438 - ], - [ - "lashes", - -12.705751419067383 - ], - [ - "vorbim", - -12.705751419067383 - ], - [ - "▁hinder", - -12.705862045288086 - ], - [ - "▁Grenz", - -12.705878257751465 - ], - [ - "Nav", - -12.705955505371094 - ], - [ - "alimentation", - -12.705972671508789 - ], - [ - "▁Cottage", - -12.7059965133667 - ], - [ - "▁nötig", - -12.706197738647461 - ], - [ - "▁cuprinde", - -12.70622444152832 - ], - [ - "session", - -12.706256866455078 - ], - [ - "▁Separat", - -12.70634651184082 - ], - [ - "▁besuchen", - -12.706672668457031 - ], - [ - "▁noodles", - -12.706684112548828 - ], - [ - "▁ballet", - -12.706696510314941 - ], - [ - "WG", - -12.706731796264648 - ], - [ - "▁Duty", - -12.706871032714844 - ], - [ - "▁porc", - -12.706944465637207 - ], - [ - "▁booster", - -12.70698356628418 - ], - [ - "galerie", - -12.707056045532227 - ], - [ - "▁Lance", - -12.707119941711426 - ], - [ - "▁déplac", - -12.707178115844727 - ], - [ - "▁rugby", - -12.707240104675293 - ], - [ - "▁upholstery", - -12.707345962524414 - ], - [ - "▁bustl", - -12.70736312866211 - ], - [ - "▁Dealer", - -12.70740032196045 - ], - [ - "▁genome", - -12.707414627075195 - ], - [ - "▁citizenship", - -12.707466125488281 - ], - [ - "rora", - -12.707515716552734 - ], - [ - "ARK", - -12.707776069641113 - ], - [ - "▁Semi", - -12.707820892333984 - ], - [ - "▁Improvement", - -12.707892417907715 - ], - [ - "▁negru", - -12.708142280578613 - ], - [ - "▁Bruxelles", - -12.70836067199707 - ], - [ - "flüge", - -12.70837688446045 - ], - [ - "▁Technique", - -12.708392143249512 - ], - [ - "▁Obst", - -12.708413124084473 - ], - [ - "2020", - -12.708560943603516 - ], - [ - "▁gek", - -12.708593368530273 - ], - [ - "▁drepturi", - -12.708600997924805 - ], - [ - "▁Logan", - -12.708605766296387 - ], - [ - "gelöst", - -12.70863151550293 - ], - [ - "▁grandparents", - -12.708702087402344 - ], - [ - "phin", - -12.708950996398926 - ], - [ - "▁dwell", - -12.709037780761719 - ], - [ - "▁Nobel", - -12.709151268005371 - ], - [ - "dial", - -12.70927906036377 - ], - [ - "▁spontan", - -12.709344863891602 - ], - [ - "advancing", - -12.70937728881836 - ], - [ - "starring", - -12.70947551727295 - ], - [ - "▁astea", - -12.709498405456543 - ], - [ - "igueur", - -12.709638595581055 - ], - [ - "▁Ancient", - -12.709700584411621 - ], - [ - "filter", - -12.70971965789795 - ], - [ - "Doar", - -12.709758758544922 - ], - [ - "▁Workers", - -12.709759712219238 - ], - [ - "Certainly", - -12.709906578063965 - ], - [ - "▁commencé", - -12.709914207458496 - ], - [ - "▁zipper", - -12.710001945495605 - ], - [ - "▁Selection", - -12.710070610046387 - ], - [ - "▁succ", - -12.710280418395996 - ], - [ - "headed", - -12.710345268249512 - ], - [ - "RIA", - -12.710350036621094 - ], - [ - "▁papa", - -12.710366249084473 - ], - [ - "▁profesionale", - -12.710394859313965 - ], - [ - "▁Zeichen", - -12.710402488708496 - ], - [ - "▁artisans", - -12.710489273071289 - ], - [ - "▁Geist", - -12.710585594177246 - ], - [ - "practic", - -12.710741996765137 - ], - [ - "▁ministrul", - -12.71076488494873 - ], - [ - "viens", - -12.710912704467773 - ], - [ - "prezintă", - -12.710919380187988 - ], - [ - "Integrated", - -12.710981369018555 - ], - [ - "▁rooftop", - -12.710989952087402 - ], - [ - "▁successor", - -12.710991859436035 - ], - [ - "OTO", - -12.711012840270996 - ], - [ - "liés", - -12.711027145385742 - ], - [ - "▁Diver", - -12.71121597290039 - ], - [ - "Specifically", - -12.711297988891602 - ], - [ - "▁calibr", - -12.711301803588867 - ], - [ - "KK", - -12.711341857910156 - ], - [ - "▁défense", - -12.711414337158203 - ], - [ - "▁english", - -12.711414337158203 - ], - [ - "verbrauch", - -12.711418151855469 - ], - [ - "▁attire", - -12.711433410644531 - ], - [ - "▁Recipe", - -12.711441040039062 - ], - [ - "équilibre", - -12.711457252502441 - ], - [ - "accumul", - -12.71157169342041 - ], - [ - "▁financement", - -12.71169662475586 - ], - [ - "rij", - -12.711962699890137 - ], - [ - "▁prince", - -12.711999893188477 - ], - [ - "▁préparer", - -12.7120361328125 - ], - [ - "surviving", - -12.71211051940918 - ], - [ - "operation", - -12.712233543395996 - ], - [ - "▁judet", - -12.71242904663086 - ], - [ - "▁Verantwortung", - -12.712433815002441 - ], - [ - "▁Vinyl", - -12.712536811828613 - ], - [ - "DEN", - -12.712584495544434 - ], - [ - "▁Tail", - -12.712589263916016 - ], - [ - "yearly", - -12.712590217590332 - ], - [ - "▁comisi", - -12.712613105773926 - ], - [ - "lava", - -12.71261978149414 - ], - [ - "▁succession", - -12.71264934539795 - ], - [ - "▁Whisk", - -12.713030815124512 - ], - [ - "▁precizat", - -12.713096618652344 - ], - [ - "▁unmittelbar", - -12.713117599487305 - ], - [ - "ICH", - -12.713139533996582 - ], - [ - "▁atteint", - -12.713199615478516 - ], - [ - "▁hometown", - -12.713268280029297 - ], - [ - "▁Zip", - -12.71328353881836 - ], - [ - "▁Weekly", - -12.71336841583252 - ], - [ - "▁crashes", - -12.713401794433594 - ], - [ - "▁Turbo", - -12.713421821594238 - ], - [ - "▁susține", - -12.713468551635742 - ], - [ - "▁Venus", - -12.713587760925293 - ], - [ - "▁finalement", - -12.713595390319824 - ], - [ - "rewarded", - -12.713693618774414 - ], - [ - "▁principau", - -12.713899612426758 - ], - [ - "▁régional", - -12.713979721069336 - ], - [ - "▁1958", - -12.714178085327148 - ], - [ - "▁Musical", - -12.714189529418945 - ], - [ - "▁stylist", - -12.714251518249512 - ], - [ - "cetate", - -12.714282035827637 - ], - [ - "gorge", - -12.71433162689209 - ], - [ - "▁espresso", - -12.714493751525879 - ], - [ - "überall", - -12.714576721191406 - ], - [ - "▁NHL", - -12.714593887329102 - ], - [ - "▁Dock", - -12.71472454071045 - ], - [ - "▁mosquito", - -12.71481704711914 - ], - [ - "▁forthcoming", - -12.714852333068848 - ], - [ - "▁Visitors", - -12.714881896972656 - ], - [ - "kro", - -12.714882850646973 - ], - [ - "_______", - -12.715048789978027 - ], - [ - "▁STEM", - -12.715105056762695 - ], - [ - "9.5", - -12.715141296386719 - ], - [ - "accompagne", - -12.715177536010742 - ], - [ - "▁Trick", - -12.715202331542969 - ], - [ - "▁endorsement", - -12.715400695800781 - ], - [ - "▁amplifier", - -12.715498924255371 - ], - [ - "▁malicious", - -12.715499877929688 - ], - [ - "▁roam", - -12.71552848815918 - ], - [ - "▁kennt", - -12.715635299682617 - ], - [ - "Connor", - -12.715690612792969 - ], - [ - "▁dysfunction", - -12.715828895568848 - ], - [ - "▁zuverlässig", - -12.715840339660645 - ], - [ - "▁corpul", - -12.71595573425293 - ], - [ - "▁boule", - -12.715967178344727 - ], - [ - "otti", - -12.715991973876953 - ], - [ - "440", - -12.716050148010254 - ], - [ - "▁mimic", - -12.716056823730469 - ], - [ - "farben", - -12.716129302978516 - ], - [ - "▁Wagner", - -12.716214179992676 - ], - [ - "Kom", - -12.7162504196167 - ], - [ - "▁miteinander", - -12.716269493103027 - ], - [ - "▁String", - -12.716296195983887 - ], - [ - "▁Ellis", - -12.716313362121582 - ], - [ - "▁Perth", - -12.716337203979492 - ], - [ - "▁temperatura", - -12.716381072998047 - ], - [ - "umbling", - -12.716397285461426 - ], - [ - "▁Medizin", - -12.716554641723633 - ], - [ - "▁KY", - -12.71660327911377 - ], - [ - "apei", - -12.716642379760742 - ], - [ - "counter", - -12.716647148132324 - ], - [ - "strich", - -12.71665096282959 - ], - [ - "▁Între", - -12.716652870178223 - ], - [ - "▁Cliff", - -12.716785430908203 - ], - [ - "▁foreclosure", - -12.716864585876465 - ], - [ - "................", - -12.716878890991211 - ], - [ - "Clearly", - -12.717028617858887 - ], - [ - "AJ", - -12.717057228088379 - ], - [ - "ndro", - -12.717180252075195 - ], - [ - "▁Arsenal", - -12.717206001281738 - ], - [ - "▁Recherche", - -12.717216491699219 - ], - [ - "Guests", - -12.717225074768066 - ], - [ - "▁besucht", - -12.717242240905762 - ], - [ - "wissen", - -12.717266082763672 - ], - [ - "fekt", - -12.717414855957031 - ], - [ - "hottest", - -12.717414855957031 - ], - [ - "▁Tomorrow", - -12.717547416687012 - ], - [ - "▁Signature", - -12.717557907104492 - ], - [ - "127", - -12.717583656311035 - ], - [ - "▁competence", - -12.71766471862793 - ], - [ - "Einige", - -12.717686653137207 - ], - [ - "patented", - -12.71782112121582 - ], - [ - "▁Exhibition", - -12.717889785766602 - ], - [ - "▁verbessern", - -12.717889785766602 - ], - [ - "▁Garcia", - -12.718043327331543 - ], - [ - "▁inquire", - -12.718278884887695 - ], - [ - "coping", - -12.718353271484375 - ], - [ - "▁linguri", - -12.71842098236084 - ], - [ - "▁trivia", - -12.718433380126953 - ], - [ - "▁începutul", - -12.718489646911621 - ], - [ - "▁parteneriat", - -12.7186279296875 - ], - [ - "tagen", - -12.718636512756348 - ], - [ - "▁engagé", - -12.718916893005371 - ], - [ - "▁chalk", - -12.718944549560547 - ], - [ - "▁fashionable", - -12.719416618347168 - ], - [ - "0.8", - -12.719635009765625 - ], - [ - "▁sticker", - -12.719751358032227 - ], - [ - "▁desperately", - -12.719765663146973 - ], - [ - "höhe", - -12.719903945922852 - ], - [ - "▁fericire", - -12.71994400024414 - ], - [ - "évaluation", - -12.719948768615723 - ], - [ - "▁Divide", - -12.719959259033203 - ], - [ - "▁indulge", - -12.719979286193848 - ], - [ - "fett", - -12.720014572143555 - ], - [ - "▁communal", - -12.72017765045166 - ], - [ - "▁mindful", - -12.720187187194824 - ], - [ - "dauert", - -12.720192909240723 - ], - [ - "▁veille", - -12.720263481140137 - ], - [ - "▁vér", - -12.720330238342285 - ], - [ - "▁Baseball", - -12.720373153686523 - ], - [ - "▁succeeded", - -12.720418930053711 - ], - [ - "▁Terrasse", - -12.720420837402344 - ], - [ - "irgend", - -12.720500946044922 - ], - [ - "▁Munich", - -12.720556259155273 - ], - [ - "weisung", - -12.72067642211914 - ], - [ - "metre", - -12.720916748046875 - ], - [ - "▁Raymond", - -12.721015930175781 - ], - [ - "▁chute", - -12.72102165222168 - ], - [ - "▁Accounting", - -12.721075057983398 - ], - [ - "▁pantry", - -12.721122741699219 - ], - [ - "▁underwater", - -12.721181869506836 - ], - [ - "ARI", - -12.721222877502441 - ], - [ - "lowed", - -12.721245765686035 - ], - [ - "numbered", - -12.721430778503418 - ], - [ - "REN", - -12.72148609161377 - ], - [ - "▁industriel", - -12.721489906311035 - ], - [ - "wäh", - -12.721531867980957 - ], - [ - "kenntnis", - -12.721631050109863 - ], - [ - "▁govern", - -12.721635818481445 - ], - [ - "strained", - -12.721661567687988 - ], - [ - "▁rythme", - -12.721689224243164 - ], - [ - "ин", - -12.72169303894043 - ], - [ - "▁burner", - -12.721723556518555 - ], - [ - "▁zählt", - -12.721790313720703 - ], - [ - "▁verte", - -12.721883773803711 - ], - [ - "▁Catalog", - -12.721896171569824 - ], - [ - "▁Bruno", - -12.721988677978516 - ], - [ - "0.7", - -12.721997261047363 - ], - [ - "▁litig", - -12.72207260131836 - ], - [ - "▁greet", - -12.722129821777344 - ], - [ - "▁stool", - -12.722393035888672 - ], - [ - "gression", - -12.722457885742188 - ], - [ - "▁Klassen", - -12.722491264343262 - ], - [ - "▁neon", - -12.722661018371582 - ], - [ - "▁Tall", - -12.722734451293945 - ], - [ - "▁satin", - -12.722895622253418 - ], - [ - "▁Bend", - -12.722915649414062 - ], - [ - "▁soluţi", - -12.723077774047852 - ], - [ - "▁styl", - -12.723196983337402 - ], - [ - "▁Siri", - -12.723358154296875 - ], - [ - "▁Sanders", - -12.723464012145996 - ], - [ - "▁spike", - -12.723499298095703 - ], - [ - "pinion", - -12.723854064941406 - ], - [ - "▁purta", - -12.724122047424316 - ], - [ - "CARE", - -12.724224090576172 - ], - [ - "▁creştere", - -12.724311828613281 - ], - [ - "▁fry", - -12.724374771118164 - ], - [ - "▁Schweizer", - -12.724400520324707 - ], - [ - "durchschnittlich", - -12.724411010742188 - ], - [ - "celaşi", - -12.724446296691895 - ], - [ - "▁deceased", - -12.724474906921387 - ], - [ - "▁Nerv", - -12.724668502807617 - ], - [ - "2-2", - -12.7247314453125 - ], - [ - "▁Stahl", - -12.724753379821777 - ], - [ - "▁workload", - -12.724834442138672 - ], - [ - "erhielt", - -12.724984169006348 - ], - [ - "▁hypothesis", - -12.725103378295898 - ], - [ - "bib", - -12.725110054016113 - ], - [ - "▁ţară", - -12.725116729736328 - ], - [ - "vaut", - -12.725122451782227 - ], - [ - "prehensi", - -12.725184440612793 - ], - [ - "▁Offering", - -12.725188255310059 - ], - [ - "▁dislike", - -12.725252151489258 - ], - [ - "▁firewall", - -12.725252151489258 - ], - [ - "mania", - -12.725255966186523 - ], - [ - "195", - -12.725278854370117 - ], - [ - "▁Champ", - -12.725324630737305 - ], - [ - "▁philosophical", - -12.725343704223633 - ], - [ - "länge", - -12.72553539276123 - ], - [ - "advisable", - -12.725785255432129 - ], - [ - "negotiating", - -12.725785255432129 - ], - [ - "Providing", - -12.725791931152344 - ], - [ - "▁1959", - -12.725801467895508 - ], - [ - "▁spyware", - -12.725831031799316 - ], - [ - "sharing", - -12.725837707519531 - ], - [ - "▁prévoi", - -12.725905418395996 - ], - [ - "▁jaune", - -12.7260103225708 - ], - [ - "schoss", - -12.726028442382812 - ], - [ - "▁obține", - -12.726129531860352 - ], - [ - "▁attraktiv", - -12.726489067077637 - ], - [ - "gemeinschaft", - -12.7265043258667 - ], - [ - "BV", - -12.726505279541016 - ], - [ - "Top", - -12.726617813110352 - ], - [ - "▁Sharon", - -12.726625442504883 - ], - [ - "bok", - -12.726675033569336 - ], - [ - "▁résist", - -12.726811408996582 - ], - [ - "Napoca", - -12.726822853088379 - ], - [ - "▁Uncategorized", - -12.726898193359375 - ], - [ - "▁trustee", - -12.726936340332031 - ], - [ - "▁remise", - -12.727025985717773 - ], - [ - "▁aştept", - -12.727165222167969 - ], - [ - "▁allergic", - -12.727206230163574 - ], - [ - "èvre", - -12.727211952209473 - ], - [ - "LAR", - -12.72734546661377 - ], - [ - "1.9", - -12.727497100830078 - ], - [ - "▁outbreak", - -12.727520942687988 - ], - [ - "▁trocken", - -12.727568626403809 - ], - [ - "▁laughter", - -12.727724075317383 - ], - [ - "▁Attend", - -12.727785110473633 - ], - [ - "jung", - -12.727822303771973 - ], - [ - "racking", - -12.727934837341309 - ], - [ - "ORS", - -12.728178024291992 - ], - [ - "▁rasp", - -12.728527069091797 - ], - [ - "VF", - -12.728551864624023 - ], - [ - "▁Tamil", - -12.72860050201416 - ], - [ - "124", - -12.728602409362793 - ], - [ - "▁Fiber", - -12.728714942932129 - ], - [ - "▁launches", - -12.728755950927734 - ], - [ - "Post", - -12.728777885437012 - ], - [ - "▁bucks", - -12.729072570800781 - ], - [ - "▁Nicholas", - -12.72923755645752 - ], - [ - "▁cărți", - -12.729255676269531 - ], - [ - "emper", - -12.729681968688965 - ], - [ - "Point", - -12.729689598083496 - ], - [ - "fraction", - -12.729753494262695 - ], - [ - "▁BIG", - -12.729804992675781 - ], - [ - "▁lancer", - -12.729829788208008 - ], - [ - "EVER", - -12.72997760772705 - ], - [ - "trend", - -12.73000431060791 - ], - [ - "▁remerci", - -12.730076789855957 - ], - [ - "▁prevalent", - -12.730168342590332 - ], - [ - "370", - -12.730290412902832 - ], - [ - "▁bestellen", - -12.730327606201172 - ], - [ - "Buying", - -12.730341911315918 - ], - [ - "▁Aufbau", - -12.730416297912598 - ], - [ - "▁opini", - -12.730416297912598 - ], - [ - "▁regiune", - -12.730663299560547 - ], - [ - "▁martial", - -12.73069953918457 - ], - [ - "LK", - -12.730754852294922 - ], - [ - "▁Feuerwehr", - -12.730974197387695 - ], - [ - "screened", - -12.73099422454834 - ], - [ - "Blue", - -12.73120403289795 - ], - [ - "▁analize", - -12.731237411499023 - ], - [ - "▁lure", - -12.731247901916504 - ], - [ - "▁internally", - -12.731283187866211 - ], - [ - "father", - -12.731322288513184 - ], - [ - "▁diplomatic", - -12.731343269348145 - ], - [ - "▁Activity", - -12.731464385986328 - ], - [ - "▁cliqu", - -12.73156452178955 - ], - [ - "▁adequately", - -12.731809616088867 - ], - [ - "▁Elena", - -12.73183822631836 - ], - [ - "▁Citizens", - -12.732102394104004 - ], - [ - "▁Länge", - -12.732295989990234 - ], - [ - "▁respectful", - -12.732300758361816 - ], - [ - "▁zuständig", - -12.73248291015625 - ], - [ - "▁réception", - -12.732584953308105 - ], - [ - "▁headset", - -12.732686996459961 - ], - [ - "▁awhile", - -12.732705116271973 - ], - [ - "▁speculation", - -12.732707977294922 - ], - [ - "▁WhatsApp", - -12.732714653015137 - ], - [ - "▁tulbur", - -12.732731819152832 - ], - [ - "▁voluntar", - -12.732758522033691 - ], - [ - "▁Studium", - -12.73277473449707 - ], - [ - "▁protector", - -12.732833862304688 - ], - [ - "▁Wrap", - -12.732840538024902 - ], - [ - "staat", - -12.732951164245605 - ], - [ - "▁judgement", - -12.733396530151367 - ], - [ - "unauthorized", - -12.733397483825684 - ], - [ - "Rank", - -12.733487129211426 - ], - [ - "pră", - -12.733503341674805 - ], - [ - "▁Paw", - -12.733627319335938 - ], - [ - "▁relev", - -12.733664512634277 - ], - [ - "▁arbor", - -12.733830451965332 - ], - [ - "stretches", - -12.733885765075684 - ], - [ - "nook", - -12.733906745910645 - ], - [ - "▁Tunis", - -12.733907699584961 - ], - [ - "▁shocking", - -12.734036445617676 - ], - [ - "▁oppress", - -12.73414421081543 - ], - [ - "10.1", - -12.7341890335083 - ], - [ - "▁ERP", - -12.734310150146484 - ], - [ - "wolle", - -12.7343168258667 - ], - [ - "▁Catch", - -12.734352111816406 - ], - [ - "Plus", - -12.734368324279785 - ], - [ - "Market", - -12.734445571899414 - ], - [ - "scribed", - -12.734536170959473 - ], - [ - "▁décoration", - -12.734594345092773 - ], - [ - "▁chanson", - -12.734607696533203 - ], - [ - "▁Midwest", - -12.734763145446777 - ], - [ - "▁Spencer", - -12.734795570373535 - ], - [ - "▁societate", - -12.734807968139648 - ], - [ - "curated", - -12.735087394714355 - ], - [ - "▁canopy", - -12.735135078430176 - ], - [ - "ат", - -12.735142707824707 - ], - [ - "Sig", - -12.73514461517334 - ], - [ - "▁witch", - -12.735153198242188 - ], - [ - "envoyer", - -12.735175132751465 - ], - [ - "▁$1,000", - -12.735230445861816 - ], - [ - "▁peripheral", - -12.735482215881348 - ], - [ - "nnouncing", - -12.735509872436523 - ], - [ - "perfect", - -12.73559284210205 - ], - [ - "▁warten", - -12.735748291015625 - ], - [ - "ELI", - -12.735822677612305 - ], - [ - "▁recap", - -12.735912322998047 - ], - [ - "dün", - -12.735978126525879 - ], - [ - "▁Spre", - -12.736029624938965 - ], - [ - "2005", - -12.736153602600098 - ], - [ - "▁réparation", - -12.73617935180664 - ], - [ - "▁extraordinar", - -12.736196517944336 - ], - [ - "existence", - -12.736337661743164 - ], - [ - "oanele", - -12.736467361450195 - ], - [ - "▁reprezentant", - -12.736474990844727 - ], - [ - "▁attacker", - -12.736490249633789 - ], - [ - "▁Berliner", - -12.73657512664795 - ], - [ - "experience", - -12.736649513244629 - ], - [ - "▁Monde", - -12.736800193786621 - ], - [ - "intervention", - -12.736956596374512 - ], - [ - "▁Einstellung", - -12.736977577209473 - ], - [ - "▁Valentin", - -12.737011909484863 - ], - [ - "▁zonă", - -12.737200736999512 - ], - [ - "occupant", - -12.737223625183105 - ], - [ - "▁mobilis", - -12.737260818481445 - ], - [ - "metall", - -12.737261772155762 - ], - [ - "evangeli", - -12.73729133605957 - ], - [ - "Adding", - -12.737326622009277 - ], - [ - "▁Roland", - -12.73735237121582 - ], - [ - "ENCE", - -12.737462043762207 - ], - [ - "▁Insul", - -12.737478256225586 - ], - [ - "tellement", - -12.737497329711914 - ], - [ - "▁Blogger", - -12.737499237060547 - ], - [ - "▁prote", - -12.737504005432129 - ], - [ - "▁Minimum", - -12.737574577331543 - ], - [ - "▁termic", - -12.737624168395996 - ], - [ - "▁Sachen", - -12.737859725952148 - ], - [ - "▁Maschinen", - -12.737863540649414 - ], - [ - "▁Dragnea", - -12.737926483154297 - ], - [ - "▁overtime", - -12.737967491149902 - ], - [ - "calorie", - -12.737968444824219 - ], - [ - "▁jene", - -12.73814868927002 - ], - [ - "▁Satan", - -12.738153457641602 - ], - [ - "▁currencies", - -12.73827075958252 - ], - [ - "▁echipamente", - -12.738329887390137 - ], - [ - "▁forgiveness", - -12.73843765258789 - ], - [ - "▁Pause", - -12.738479614257812 - ], - [ - "▁Witt", - -12.738529205322266 - ], - [ - "STOR", - -12.738632202148438 - ], - [ - "▁actuelle", - -12.738703727722168 - ], - [ - "▁Ard", - -12.738853454589844 - ], - [ - "▁Constitu", - -12.738880157470703 - ], - [ - "ghan", - -12.7388916015625 - ], - [ - "Make", - -12.738906860351562 - ], - [ - "▁garne", - -12.738947868347168 - ], - [ - "▁Hitler", - -12.738956451416016 - ], - [ - "▁rubbish", - -12.738973617553711 - ], - [ - "6.0", - -12.739025115966797 - ], - [ - "▁Giving", - -12.739177703857422 - ], - [ - "▁persever", - -12.73937702178955 - ], - [ - "wirk", - -12.7394380569458 - ], - [ - "liegenden", - -12.739455223083496 - ], - [ - "▁morceau", - -12.73946762084961 - ], - [ - "atty", - -12.73961067199707 - ], - [ - "▁Quebec", - -12.739669799804688 - ], - [ - "harmonie", - -12.739705085754395 - ], - [ - "Nummer", - -12.739721298217773 - ], - [ - "▁splendid", - -12.739747047424316 - ], - [ - "▁halfway", - -12.739808082580566 - ], - [ - "▁periodically", - -12.740071296691895 - ], - [ - "▁Ländern", - -12.740077018737793 - ], - [ - "▁AAA", - -12.740083694458008 - ], - [ - "▁Frost", - -12.740198135375977 - ], - [ - "▁heroin", - -12.740289688110352 - ], - [ - "▁bucurie", - -12.7403564453125 - ], - [ - "▁Pradesh", - -12.74036693572998 - ], - [ - "zusetzen", - -12.740405082702637 - ], - [ - "raising", - -12.740425109863281 - ], - [ - "▁furniz", - -12.740567207336426 - ], - [ - "▁convi", - -12.740575790405273 - ], - [ - "pictured", - -12.740911483764648 - ], - [ - "▁inadequate", - -12.741065979003906 - ], - [ - "▁aprobat", - -12.741069793701172 - ], - [ - "▁exercising", - -12.741083145141602 - ], - [ - "▁faisai", - -12.741138458251953 - ], - [ - "▁prosecution", - -12.741231918334961 - ], - [ - "380", - -12.741402626037598 - ], - [ - "▁Potential", - -12.74145793914795 - ], - [ - "▁Magi", - -12.741523742675781 - ], - [ - "From", - -12.741752624511719 - ], - [ - "batterie", - -12.74181079864502 - ], - [ - "▁poisson", - -12.74185562133789 - ], - [ - "▁Probe", - -12.741950988769531 - ], - [ - "▁pastel", - -12.741998672485352 - ], - [ - "▁tracked", - -12.742410659790039 - ], - [ - "▁advertisers", - -12.74251937866211 - ], - [ - "adevar", - -12.742537498474121 - ], - [ - "ит", - -12.742776870727539 - ], - [ - "▁Herren", - -12.742815971374512 - ], - [ - "EAM", - -12.742820739746094 - ], - [ - "▁scooter", - -12.742822647094727 - ], - [ - "requesting", - -12.742841720581055 - ], - [ - "dynamis", - -12.742949485778809 - ], - [ - "▁dahin", - -12.742961883544922 - ], - [ - "▁tweak", - -12.743061065673828 - ], - [ - "▁hail", - -12.743101119995117 - ], - [ - "▁întotdeauna", - -12.743160247802734 - ], - [ - "▁Publikum", - -12.743167877197266 - ], - [ - "▁panoramic", - -12.743167877197266 - ], - [ - "▁PRE", - -12.74331283569336 - ], - [ - "▁thrill", - -12.743361473083496 - ], - [ - "Open", - -12.743366241455078 - ], - [ - "▁Layer", - -12.74345588684082 - ], - [ - "▁Bosch", - -12.743459701538086 - ], - [ - "hull", - -12.743511199951172 - ], - [ - "▁născut", - -12.743518829345703 - ], - [ - "tausch", - -12.743559837341309 - ], - [ - "▁autoturism", - -12.743577003479004 - ], - [ - "▁crank", - -12.743701934814453 - ], - [ - "CLE", - -12.743735313415527 - ], - [ - "▁Frederick", - -12.74386978149414 - ], - [ - "mog", - -12.743887901306152 - ], - [ - "behalten", - -12.74396800994873 - ], - [ - "▁aunt", - -12.744050979614258 - ], - [ - "▁Triple", - -12.744141578674316 - ], - [ - "▁Ark", - -12.744242668151855 - ], - [ - "AUD", - -12.744440078735352 - ], - [ - "▁Candy", - -12.744505882263184 - ], - [ - "tama", - -12.744515419006348 - ], - [ - "▁Evaluation", - -12.744571685791016 - ], - [ - "▁Memphis", - -12.744571685791016 - ], - [ - "▁stellar", - -12.74457836151123 - ], - [ - "▁fabricat", - -12.744632720947266 - ], - [ - "▁terminat", - -12.744868278503418 - ], - [ - "▁domnul", - -12.744913101196289 - ], - [ - "▁keynote", - -12.744925498962402 - ], - [ - "▁dentistry", - -12.744951248168945 - ], - [ - "rift", - -12.745052337646484 - ], - [ - "▁bilan", - -12.745119094848633 - ], - [ - "2.6", - -12.745125770568848 - ], - [ - "undergoing", - -12.745210647583008 - ], - [ - "▁pseudo", - -12.745274543762207 - ], - [ - "▁maşin", - -12.745280265808105 - ], - [ - "▁munte", - -12.74555492401123 - ], - [ - "▁VW", - -12.745932579040527 - ], - [ - "▁Rab", - -12.74593448638916 - ], - [ - "▁sustine", - -12.745972633361816 - ], - [ - "▁Bedingungen", - -12.745977401733398 - ], - [ - "▁învăţ", - -12.745980262756348 - ], - [ - "▁pyramid", - -12.745983123779297 - ], - [ - "HEN", - -12.746020317077637 - ], - [ - "▁citrus", - -12.746058464050293 - ], - [ - "Code", - -12.746064186096191 - ], - [ - "▁Beginning", - -12.746164321899414 - ], - [ - "▁discourse", - -12.746249198913574 - ], - [ - "▁miercuri", - -12.746329307556152 - ], - [ - "▁producător", - -12.74637508392334 - ], - [ - "▁analys", - -12.746397972106934 - ], - [ - "▁Evan", - -12.7467041015625 - ], - [ - "138", - -12.746987342834473 - ], - [ - "▁târziu", - -12.74703311920166 - ], - [ - "▁relocation", - -12.747052192687988 - ], - [ - "decizia", - -12.74708080291748 - ], - [ - "tollen", - -12.74714183807373 - ], - [ - "TRO", - -12.747180938720703 - ], - [ - "▁runway", - -12.74719524383545 - ], - [ - "illet", - -12.747270584106445 - ], - [ - "▁serveur", - -12.747387886047363 - ], - [ - "bezogen", - -12.747427940368652 - ], - [ - "▁believers", - -12.747668266296387 - ], - [ - "determined", - -12.747711181640625 - ], - [ - "▁reinforced", - -12.74791431427002 - ], - [ - "▁wedge", - -12.748006820678711 - ], - [ - "methyl", - -12.74807357788086 - ], - [ - "MES", - -12.748188018798828 - ], - [ - "vpn", - -12.748374938964844 - ], - [ - "▁consta", - -12.74837875366211 - ], - [ - "▁vizitat", - -12.748420715332031 - ], - [ - "modul", - -12.748455047607422 - ], - [ - "▁routing", - -12.748528480529785 - ], - [ - "tempted", - -12.748540878295898 - ], - [ - "URS", - -12.748785018920898 - ], - [ - "apprentissage", - -12.748795509338379 - ], - [ - "▁Hungary", - -12.748796463012695 - ], - [ - "Previously", - -12.74880313873291 - ], - [ - "▁translator", - -12.748804092407227 - ], - [ - "▁resonate", - -12.748830795288086 - ], - [ - "201", - -12.748851776123047 - ], - [ - "3-0", - -12.749029159545898 - ], - [ - "▁reunion", - -12.749090194702148 - ], - [ - "▁palate", - -12.749096870422363 - ], - [ - "0.4", - -12.749171257019043 - ], - [ - "reheat", - -12.74924373626709 - ], - [ - "Roo", - -12.749261856079102 - ], - [ - "200,000", - -12.74940013885498 - ], - [ - "Bro", - -12.749431610107422 - ], - [ - "▁estimation", - -12.749468803405762 - ], - [ - "schneiden", - -12.749499320983887 - ], - [ - "▁Inspired", - -12.749506950378418 - ], - [ - "▁lottery", - -12.749539375305176 - ], - [ - "▁Friedrich", - -12.749887466430664 - ], - [ - "FIT", - -12.749913215637207 - ], - [ - "0.6", - -12.7499418258667 - ], - [ - "▁dagegen", - -12.74997615814209 - ], - [ - "▁Reb", - -12.750115394592285 - ], - [ - "▁Eigenschaften", - -12.75020694732666 - ], - [ - "▁molding", - -12.750361442565918 - ], - [ - "▁Harper", - -12.750548362731934 - ], - [ - "verwaltung", - -12.75055980682373 - ], - [ - "▁Schlüssel", - -12.75055980682373 - ], - [ - "▁desfasura", - -12.75055980682373 - ], - [ - "▁rencontrer", - -12.75055980682373 - ], - [ - "▁negoci", - -12.750581741333008 - ], - [ - "▁Leading", - -12.750615119934082 - ], - [ - "▁necesita", - -12.750652313232422 - ], - [ - "▁biking", - -12.750683784484863 - ], - [ - "▁jointly", - -12.75069808959961 - ], - [ - "▁crush", - -12.750702857971191 - ], - [ - "Vol", - -12.750768661499023 - ], - [ - "▁ebay", - -12.750836372375488 - ], - [ - "▁Shri", - -12.750991821289062 - ], - [ - "▁AMD", - -12.751029968261719 - ], - [ - "FG", - -12.751032829284668 - ], - [ - "Argentin", - -12.75120735168457 - ], - [ - "▁incercat", - -12.751431465148926 - ], - [ - "▁tidy", - -12.751628875732422 - ], - [ - "▁provoqu", - -12.751635551452637 - ], - [ - "▁Written", - -12.751649856567383 - ], - [ - "▁Kooperation", - -12.751666069030762 - ], - [ - "▁scripture", - -12.751952171325684 - ], - [ - "▁Pflicht", - -12.751974105834961 - ], - [ - "ficial", - -12.752013206481934 - ], - [ - "vremea", - -12.752013206481934 - ], - [ - "▁Growing", - -12.752115249633789 - ], - [ - "▁redesign", - -12.752119064331055 - ], - [ - "▁obstacle", - -12.752214431762695 - ], - [ - "▁rugam", - -12.752235412597656 - ], - [ - "▁SPD", - -12.752243995666504 - ], - [ - "165", - -12.752270698547363 - ], - [ - "fiz", - -12.752284049987793 - ], - [ - "▁startet", - -12.752326011657715 - ], - [ - "▁Principle", - -12.752327919006348 - ], - [ - "▁abdominal", - -12.752327919006348 - ], - [ - "▁podium", - -12.752528190612793 - ], - [ - "duty", - -12.752616882324219 - ], - [ - "bonne", - -12.752679824829102 - ], - [ - "▁Serbia", - -12.752687454223633 - ], - [ - "▁brunch", - -12.752839088439941 - ], - [ - "▁Personne", - -12.752975463867188 - ], - [ - "▁Idea", - -12.753034591674805 - ], - [ - "forementioned", - -12.753036499023438 - ], - [ - "▁chassis", - -12.753037452697754 - ], - [ - "gebühr", - -12.753050804138184 - ], - [ - "ucun", - -12.753061294555664 - ], - [ - "▁Maz", - -12.7531156539917 - ], - [ - "1-4", - -12.75318431854248 - ], - [ - "kleid", - -12.753273963928223 - ], - [ - "▁Volvo", - -12.753337860107422 - ], - [ - "brechen", - -12.753378868103027 - ], - [ - "▁homepage", - -12.753472328186035 - ], - [ - "fuz", - -12.753509521484375 - ], - [ - "▁abgeschlossen", - -12.753595352172852 - ], - [ - "▁gelungen", - -12.753658294677734 - ], - [ - "▁booklet", - -12.753711700439453 - ], - [ - "▁Ukrainian", - -12.753745079040527 - ], - [ - "▁Melissa", - -12.753746032714844 - ], - [ - "CENT", - -12.75379467010498 - ], - [ - "▁intégré", - -12.753806114196777 - ], - [ - "weighing", - -12.753827095031738 - ], - [ - "▁crumbl", - -12.753894805908203 - ], - [ - "▁bunk", - -12.754167556762695 - ], - [ - "krieg", - -12.754207611083984 - ], - [ - "▁freshman", - -12.754307746887207 - ], - [ - "alaya", - -12.754339218139648 - ], - [ - "Avem", - -12.754353523254395 - ], - [ - "▁Kne", - -12.754423141479492 - ], - [ - "▁upstairs", - -12.75448226928711 - ], - [ - "AIL", - -12.754508972167969 - ], - [ - "țul", - -12.75478744506836 - ], - [ - "▁Lecture", - -12.754817962646484 - ], - [ - "▁entdecken", - -12.754843711853027 - ], - [ - "▁GMT", - -12.754912376403809 - ], - [ - "▁Leitung", - -12.754937171936035 - ], - [ - "▁inclined", - -12.755170822143555 - ], - [ - "▁skillet", - -12.75555419921875 - ], - [ - "FN", - -12.755742073059082 - ], - [ - "▁Perform", - -12.755821228027344 - ], - [ - "shift", - -12.75583267211914 - ], - [ - "recognizing", - -12.755873680114746 - ], - [ - "▁concise", - -12.755873680114746 - ], - [ - "▁obsessed", - -12.755873680114746 - ], - [ - "▁removable", - -12.755873680114746 - ], - [ - "▁Relax", - -12.755888938903809 - ], - [ - "delegates", - -12.75605583190918 - ], - [ - "▁expedi", - -12.756074905395508 - ], - [ - "▁Schä", - -12.756138801574707 - ], - [ - "iete", - -12.756211280822754 - ], - [ - "▁reciproc", - -12.756229400634766 - ], - [ - "▁neutr", - -12.75625228881836 - ], - [ - "lactic", - -12.756314277648926 - ], - [ - "▁Nah", - -12.756328582763672 - ], - [ - "scene", - -12.7565279006958 - ], - [ - "▁Helm", - -12.756563186645508 - ], - [ - "▁Bewerbung", - -12.756671905517578 - ], - [ - "▁Cassi", - -12.75667953491211 - ], - [ - "▁Gelegenheit", - -12.756939888000488 - ], - [ - "▁reflective", - -12.757140159606934 - ], - [ - "▁încredere", - -12.757149696350098 - ], - [ - "▁cigarettes", - -12.75717544555664 - ], - [ - "▁Zusätzlich", - -12.757295608520508 - ], - [ - "▁intercept", - -12.75731372833252 - ], - [ - "▁Finn", - -12.757468223571777 - ], - [ - "▁ignor", - -12.757661819458008 - ], - [ - "gian", - -12.75766372680664 - ], - [ - "BRA", - -12.757740020751953 - ], - [ - "leader", - -12.757957458496094 - ], - [ - "nius", - -12.757981300354004 - ], - [ - "▁skies", - -12.757987022399902 - ], - [ - "▁nunta", - -12.758023262023926 - ], - [ - "▁grec", - -12.758041381835938 - ], - [ - "arranging", - -12.75816822052002 - ], - [ - "wartet", - -12.758231163024902 - ], - [ - "▁kostet", - -12.758377075195312 - ], - [ - "▁Entre", - -12.758541107177734 - ], - [ - "Mag", - -12.758575439453125 - ], - [ - "▁radiator", - -12.758598327636719 - ], - [ - "übrigens", - -12.758689880371094 - ], - [ - "Internet", - -12.758706092834473 - ], - [ - "▁connexion", - -12.758718490600586 - ], - [ - "▁prolonged", - -12.758854866027832 - ], - [ - "▁capabil", - -12.75914192199707 - ], - [ - "▁feeder", - -12.759217262268066 - ], - [ - "Initially", - -12.759223937988281 - ], - [ - "Green", - -12.75926685333252 - ], - [ - "▁passiert", - -12.759272575378418 - ], - [ - "▁courtyard", - -12.759299278259277 - ], - [ - "▁judeţ", - -12.759320259094238 - ], - [ - "▁Coalition", - -12.759431838989258 - ], - [ - "▁atmospheric", - -12.759431838989258 - ], - [ - "▁velocity", - -12.759431838989258 - ], - [ - "▁Frühstück", - -12.759432792663574 - ], - [ - "vacancies", - -12.759438514709473 - ], - [ - "unified", - -12.759538650512695 - ], - [ - "▁Ahmed", - -12.759538650512695 - ], - [ - "poured", - -12.759550094604492 - ], - [ - "▁Mikro", - -12.75959587097168 - ], - [ - "▁Klar", - -12.759661674499512 - ], - [ - "kommt", - -12.759681701660156 - ], - [ - "seated", - -12.759744644165039 - ], - [ - "musik", - -12.75976848602295 - ], - [ - "▁stimulation", - -12.759841918945312 - ], - [ - "▁solicitat", - -12.759880065917969 - ], - [ - "▁politically", - -12.760165214538574 - ], - [ - "restoring", - -12.760322570800781 - ], - [ - "▁Rag", - -12.760435104370117 - ], - [ - "▁officielle", - -12.760468482971191 - ], - [ - "▁Annie", - -12.760479927062988 - ], - [ - "▁tourne", - -12.760634422302246 - ], - [ - "▁Joel", - -12.760642051696777 - ], - [ - "blieben", - -12.760666847229004 - ], - [ - "▁repayment", - -12.760736465454102 - ], - [ - "▁Strategi", - -12.760781288146973 - ], - [ - "▁prietenii", - -12.760804176330566 - ], - [ - "▁Montgomery", - -12.760858535766602 - ], - [ - "▁résidence", - -12.760858535766602 - ], - [ - "▁sunglasses", - -12.760858535766602 - ], - [ - "▁1956", - -12.760882377624512 - ], - [ - "MEN", - -12.76093578338623 - ], - [ - "pouvant", - -12.760997772216797 - ], - [ - "375", - -12.761061668395996 - ], - [ - "directed", - -12.761173248291016 - ], - [ - "▁grinder", - -12.76120662689209 - ], - [ - "rträge", - -12.761279106140137 - ], - [ - "▁nickel", - -12.761299133300781 - ], - [ - "▁Maintain", - -12.761313438415527 - ], - [ - "▁Holmes", - -12.761392593383789 - ], - [ - "▁obtinut", - -12.76157283782959 - ], - [ - "▁walnut", - -12.761585235595703 - ], - [ - "▁consultancy", - -12.761640548706055 - ], - [ - "cooled", - -12.761651039123535 - ], - [ - "▁Brig", - -12.761711120605469 - ], - [ - "▁Produc", - -12.761873245239258 - ], - [ - "street", - -12.76187515258789 - ], - [ - "▁Einfach", - -12.761897087097168 - ], - [ - "North", - -12.762149810791016 - ], - [ - "▁PET", - -12.76220989227295 - ], - [ - "▁Président", - -12.762288093566895 - ], - [ - "▁produsului", - -12.762457847595215 - ], - [ - "literatur", - -12.762483596801758 - ], - [ - "133", - -12.762561798095703 - ], - [ - "▁recours", - -12.762591361999512 - ], - [ - "▁verpflichtet", - -12.76264476776123 - ], - [ - "▁Wur", - -12.762733459472656 - ], - [ - "▁psiholog", - -12.762796401977539 - ], - [ - "Veg", - -12.762871742248535 - ], - [ - "▁hype", - -12.762930870056152 - ], - [ - "augmenter", - -12.762974739074707 - ], - [ - "▁Welsh", - -12.763012886047363 - ], - [ - "mounted", - -12.763158798217773 - ], - [ - "▁Wann", - -12.763425827026367 - ], - [ - "▁gezeigt", - -12.763620376586914 - ], - [ - "▁memo", - -12.763631820678711 - ], - [ - "veterinary", - -12.763717651367188 - ], - [ - "▁Olympia", - -12.763717651367188 - ], - [ - "▁handsome", - -12.763871192932129 - ], - [ - "yama", - -12.763911247253418 - ], - [ - "studio", - -12.763912200927734 - ], - [ - "sozial", - -12.764020919799805 - ], - [ - "▁reap", - -12.764104843139648 - ], - [ - "▁didactic", - -12.764111518859863 - ], - [ - "▁Cookie", - -12.764126777648926 - ], - [ - "▁cooper", - -12.764230728149414 - ], - [ - "▁discern", - -12.76441478729248 - ], - [ - "▁Ubuntu", - -12.764433860778809 - ], - [ - "domain", - -12.76443862915039 - ], - [ - "▁plasa", - -12.764460563659668 - ], - [ - "hong", - -12.764585494995117 - ], - [ - "▁Freiheit", - -12.764662742614746 - ], - [ - "▁Gateway", - -12.764678001403809 - ], - [ - "▁poke", - -12.764796257019043 - ], - [ - "▁niedrig", - -12.76484203338623 - ], - [ - "▁corrected", - -12.764899253845215 - ], - [ - "▁predator", - -12.76490306854248 - ], - [ - "QA", - -12.76507568359375 - ], - [ - "Physio", - -12.765101432800293 - ], - [ - "MAS", - -12.765108108520508 - ], - [ - "▁sanctuary", - -12.765151023864746 - ], - [ - "▁aferent", - -12.76523494720459 - ], - [ - "▁perdre", - -12.765268325805664 - ], - [ - "▁recherch", - -12.765397071838379 - ], - [ - "ready", - -12.76559829711914 - ], - [ - "without", - -12.76560115814209 - ], - [ - "▁locuitori", - -12.765628814697266 - ], - [ - "▁Memo", - -12.765636444091797 - ], - [ - "▁Laden", - -12.765646934509277 - ], - [ - "danken", - -12.76577377319336 - ], - [ - "▁CNC", - -12.765861511230469 - ], - [ - "▁jealous", - -12.765881538391113 - ], - [ - "▁Background", - -12.765951156616211 - ], - [ - "▁Marx", - -12.765999794006348 - ], - [ - "▁Heli", - -12.766039848327637 - ], - [ - "▁osteo", - -12.766057968139648 - ], - [ - "▁rassembl", - -12.766162872314453 - ], - [ - "▁altceva", - -12.766226768493652 - ], - [ - "▁beschäftigt", - -12.766226768493652 - ], - [ - "▁accru", - -12.766266822814941 - ], - [ - "üft", - -12.766273498535156 - ], - [ - "▁sprout", - -12.766288757324219 - ], - [ - "endorf", - -12.76647663116455 - ], - [ - "▁specialitate", - -12.766483306884766 - ], - [ - "éanmoins", - -12.766586303710938 - ], - [ - "▁poign", - -12.766663551330566 - ], - [ - "▁mânca", - -12.766668319702148 - ], - [ - "▁stretched", - -12.766752243041992 - ], - [ - "fensiv", - -12.76677131652832 - ], - [ - "▁Auction", - -12.76683235168457 - ], - [ - "hints", - -12.766944885253906 - ], - [ - "▁typo", - -12.766983032226562 - ], - [ - "▁Rare", - -12.767003059387207 - ], - [ - "▁interruption", - -12.767043113708496 - ], - [ - "▁Mean", - -12.76709270477295 - ], - [ - "privileged", - -12.767108917236328 - ], - [ - "▁purtat", - -12.767129898071289 - ], - [ - "studie", - -12.767229080200195 - ], - [ - "offres", - -12.767248153686523 - ], - [ - "▁flap", - -12.76729679107666 - ], - [ - "▁rhetoric", - -12.767304420471191 - ], - [ - "▁snapshot", - -12.767325401306152 - ], - [ - "▁Conservative", - -12.767367362976074 - ], - [ - "▁taie", - -12.767416954040527 - ], - [ - "Game", - -12.767499923706055 - ], - [ - "▁naissance", - -12.767663955688477 - ], - [ - "Prof", - -12.767704963684082 - ], - [ - "qualified", - -12.767745971679688 - ], - [ - "▁suppression", - -12.767749786376953 - ], - [ - "▁răspunde", - -12.767765045166016 - ], - [ - "▁1/3", - -12.767803192138672 - ], - [ - "▁lieben", - -12.767858505249023 - ], - [ - "ù", - -12.767898559570312 - ], - [ - "america", - -12.767955780029297 - ], - [ - "▁Mum", - -12.768182754516602 - ], - [ - "▁Researchers", - -12.76827335357666 - ], - [ - "quip", - -12.768308639526367 - ], - [ - "▁fenomen", - -12.768383026123047 - ], - [ - "stools", - -12.768387794494629 - ], - [ - "▁commodity", - -12.768742561340332 - ], - [ - "▁rejuvenat", - -12.768745422363281 - ], - [ - "▁ausgezeichnet", - -12.76876449584961 - ], - [ - "▁păcate", - -12.768784523010254 - ], - [ - "3.6", - -12.76882553100586 - ], - [ - "zwei", - -12.768904685974121 - ], - [ - "accounted", - -12.768982887268066 - ], - [ - "▁Cycle", - -12.76900863647461 - ], - [ - "politischen", - -12.769031524658203 - ], - [ - "Normally", - -12.76904010772705 - ], - [ - "▁transcend", - -12.769158363342285 - ], - [ - "▁Classes", - -12.769268989562988 - ], - [ - "▁vene", - -12.769363403320312 - ], - [ - "protein", - -12.76942253112793 - ], - [ - "formulaire", - -12.76944351196289 - ], - [ - "▁endurance", - -12.769463539123535 - ], - [ - "▁Census", - -12.769464492797852 - ], - [ - "▁census", - -12.7694673538208 - ], - [ - "▁conțin", - -12.76952838897705 - ], - [ - "▁multinational", - -12.769563674926758 - ], - [ - "▁consomm", - -12.769572257995605 - ], - [ - "▁Porter", - -12.769762992858887 - ], - [ - "▁marvel", - -12.769777297973633 - ], - [ - "▁probable", - -12.769824028015137 - ], - [ - "dependable", - -12.770044326782227 - ], - [ - "▁crore", - -12.77015495300293 - ], - [ - "▁6:30", - -12.770224571228027 - ], - [ - "▁Bradley", - -12.77032470703125 - ], - [ - "molecule", - -12.770400047302246 - ], - [ - "inclusiv", - -12.770516395568848 - ], - [ - "▁privilégi", - -12.770543098449707 - ], - [ - "▁cerere", - -12.770611763000488 - ], - [ - "ouille", - -12.770696640014648 - ], - [ - "▁âgé", - -12.770787239074707 - ], - [ - "▁ghid", - -12.770801544189453 - ], - [ - "▁Controller", - -12.77082347869873 - ], - [ - "▁incredere", - -12.770988464355469 - ], - [ - "▁hostel", - -12.771015167236328 - ], - [ - "wissenschaft", - -12.771121978759766 - ], - [ - "▁cooperate", - -12.771183967590332 - ], - [ - "ки", - -12.771202087402344 - ], - [ - "▁Küchen", - -12.771384239196777 - ], - [ - "▁BIO", - -12.771406173706055 - ], - [ - "▁deliveries", - -12.771458625793457 - ], - [ - "▁urmări", - -12.771553993225098 - ], - [ - "▁überzeugen", - -12.771631240844727 - ], - [ - "Roofing", - -12.771703720092773 - ], - [ - "▁Adel", - -12.771737098693848 - ], - [ - "▁navy", - -12.77181339263916 - ], - [ - "▁cider", - -12.772101402282715 - ], - [ - "▁dulce", - -12.772109985351562 - ], - [ - "▁inspirat", - -12.772163391113281 - ], - [ - "allez", - -12.772164344787598 - ], - [ - "HH", - -12.77221965789795 - ], - [ - "▁Danish", - -12.7722749710083 - ], - [ - "CDC", - -12.7722806930542 - ], - [ - "▁Milch", - -12.772303581237793 - ], - [ - "▁Hockey", - -12.772346496582031 - ], - [ - "▁Smooth", - -12.772347450256348 - ], - [ - "▁FIFA", - -12.772361755371094 - ], - [ - "▁Devon", - -12.772364616394043 - ], - [ - "chung", - -12.772379875183105 - ], - [ - "▁villain", - -12.772420883178711 - ], - [ - "▁musée", - -12.772441864013672 - ], - [ - "tiennent", - -12.772557258605957 - ], - [ - "chou", - -12.772732734680176 - ], - [ - "kopf", - -12.772809982299805 - ], - [ - "printed", - -12.77281379699707 - ], - [ - "▁Depression", - -12.773076057434082 - ], - [ - "▁opioid", - -12.773082733154297 - ], - [ - "nomie", - -12.773098945617676 - ], - [ - "▁footwear", - -12.773211479187012 - ], - [ - "▁Cause", - -12.773260116577148 - ], - [ - "SEL", - -12.773515701293945 - ], - [ - "▁Roller", - -12.773523330688477 - ], - [ - "▁einzigartige", - -12.773589134216309 - ], - [ - "desea", - -12.773597717285156 - ], - [ - "▁nasty", - -12.773792266845703 - ], - [ - "formulated", - -12.773877143859863 - ], - [ - "breaker", - -12.773958206176758 - ], - [ - "▁goodies", - -12.773961067199707 - ], - [ - "▁sandy", - -12.774189949035645 - ], - [ - "method", - -12.77425479888916 - ], - [ - "▁Maple", - -12.774308204650879 - ], - [ - "gefragt", - -12.774435997009277 - ], - [ - "▁decreasing", - -12.774515151977539 - ], - [ - "ceşti", - -12.774555206298828 - ], - [ - "▁DUI", - -12.774563789367676 - ], - [ - "▁pierdere", - -12.774574279785156 - ], - [ - "▁brushes", - -12.77466869354248 - ], - [ - "▁Fully", - -12.774712562561035 - ], - [ - "filtered", - -12.774789810180664 - ], - [ - "ruins", - -12.774988174438477 - ], - [ - "Save", - -12.775114059448242 - ], - [ - "sweeping", - -12.7752046585083 - ], - [ - "PCR", - -12.775334358215332 - ], - [ - "▁folded", - -12.775337219238281 - ], - [ - "▁urca", - -12.775444030761719 - ], - [ - "▁clic", - -12.775484085083008 - ], - [ - "▁spécialiste", - -12.775614738464355 - ], - [ - "▁durfte", - -12.775686264038086 - ], - [ - "tuși", - -12.775871276855469 - ], - [ - "▁diligent", - -12.77596378326416 - ], - [ - "▁verdict", - -12.775972366333008 - ], - [ - "▁chaise", - -12.776039123535156 - ], - [ - "▁cleanup", - -12.776068687438965 - ], - [ - "▁Guitar", - -12.776076316833496 - ], - [ - "▁Dip", - -12.776142120361328 - ], - [ - "vru", - -12.776260375976562 - ], - [ - "▁cogn", - -12.776373863220215 - ], - [ - "something", - -12.776529312133789 - ], - [ - "hidr", - -12.776535034179688 - ], - [ - "ENG", - -12.776607513427734 - ], - [ - "Paul", - -12.776679039001465 - ], - [ - "▁reboot", - -12.776687622070312 - ], - [ - "savvy", - -12.776688575744629 - ], - [ - "▁Macron", - -12.776710510253906 - ], - [ - "▁Kino", - -12.77682876586914 - ], - [ - "232", - -12.776832580566406 - ], - [ - "▁gravit", - -12.776861190795898 - ], - [ - "ANC", - -12.776883125305176 - ], - [ - "▁petrecut", - -12.776944160461426 - ], - [ - "▁signage", - -12.776959419250488 - ], - [ - "odia", - -12.776987075805664 - ], - [ - "▁GRA", - -12.77712631225586 - ], - [ - "▁alegeril", - -12.777129173278809 - ], - [ - "leger", - -12.77717399597168 - ], - [ - "▁medicamente", - -12.777174949645996 - ], - [ - "pentru", - -12.777249336242676 - ], - [ - "▁collectif", - -12.777251243591309 - ], - [ - "▁Sohn", - -12.777298927307129 - ], - [ - "205", - -12.777313232421875 - ], - [ - "▁Reach", - -12.77733039855957 - ], - [ - "RAM", - -12.777400970458984 - ], - [ - "3.4", - -12.777405738830566 - ], - [ - "▁bleach", - -12.777409553527832 - ], - [ - "▁diligence", - -12.777414321899414 - ], - [ - "▁MORE", - -12.777440071105957 - ], - [ - "▁Critical", - -12.777471542358398 - ], - [ - "▁singură", - -12.77767276763916 - ], - [ - "▁adversar", - -12.777791023254395 - ], - [ - "▁Buzz", - -12.7778902053833 - ], - [ - "▁demeure", - -12.778063774108887 - ], - [ - "▁nephew", - -12.778141021728516 - ], - [ - "▁Boom", - -12.77817440032959 - ], - [ - "▁shining", - -12.77819538116455 - ], - [ - "▁sponge", - -12.778206825256348 - ], - [ - "liest", - -12.77841854095459 - ], - [ - "rseits", - -12.778690338134766 - ], - [ - "▁capita", - -12.778823852539062 - ], - [ - "esthesia", - -12.778867721557617 - ], - [ - "500,000", - -12.77895736694336 - ], - [ - "▁Pressure", - -12.77898120880127 - ], - [ - "ifikation", - -12.779021263122559 - ], - [ - "▁acceleration", - -12.779181480407715 - ], - [ - "▁Pfarr", - -12.779282569885254 - ], - [ - "▁imobil", - -12.779304504394531 - ], - [ - "▁pericol", - -12.779326438903809 - ], - [ - "▁flock", - -12.779454231262207 - ], - [ - "▁Scholar", - -12.77962875366211 - ], - [ - "▁Fusion", - -12.779630661010742 - ], - [ - "▁revolve", - -12.779637336730957 - ], - [ - "Plugin", - -12.779664993286133 - ], - [ - "▁Ruf", - -12.779691696166992 - ], - [ - "▁tehnici", - -12.780024528503418 - ], - [ - "voice", - -12.78005313873291 - ], - [ - "▁anomal", - -12.780203819274902 - ], - [ - "▁gefallen", - -12.780252456665039 - ], - [ - "▁Wyoming", - -12.780322074890137 - ], - [ - "▁9:00", - -12.780354499816895 - ], - [ - "packed", - -12.780461311340332 - ], - [ - "▁Zimbabwe", - -12.780686378479004 - ], - [ - "▁glücklich", - -12.780766487121582 - ], - [ - "ethanol", - -12.78077220916748 - ], - [ - "▁effektiv", - -12.780936241149902 - ], - [ - "▁saptamani", - -12.781049728393555 - ], - [ - "▁umfasst", - -12.781052589416504 - ], - [ - "▁Werbung", - -12.781103134155273 - ], - [ - "▁undermine", - -12.781164169311523 - ], - [ - "▁Lego", - -12.781322479248047 - ], - [ - "▁Rac", - -12.781323432922363 - ], - [ - "educating", - -12.781441688537598 - ], - [ - "leiten", - -12.781451225280762 - ], - [ - "derma", - -12.781518936157227 - ], - [ - "hängen", - -12.781597137451172 - ], - [ - "Lumin", - -12.781846046447754 - ], - [ - "▁PNL", - -12.781913757324219 - ], - [ - "▁volcano", - -12.782064437866211 - ], - [ - "▁Anfrage", - -12.782066345214844 - ], - [ - "▁resp", - -12.782124519348145 - ], - [ - "leigh", - -12.78217601776123 - ], - [ - "▁addict", - -12.782176971435547 - ], - [ - "WORK", - -12.782312393188477 - ], - [ - "▁FY", - -12.782322883605957 - ], - [ - "▁maneuver", - -12.782513618469238 - ], - [ - "flächen", - -12.782525062561035 - ], - [ - "zweck", - -12.782527923583984 - ], - [ - "tolerant", - -12.782609939575195 - ], - [ - "Davidson", - -12.78272533416748 - ], - [ - "▁meteor", - -12.782849311828613 - ], - [ - "▁Stephanie", - -12.78291130065918 - ], - [ - "▁plafon", - -12.783126831054688 - ], - [ - "technischen", - -12.78316879272461 - ], - [ - "unused", - -12.783193588256836 - ], - [ - "▁voulai", - -12.783228874206543 - ], - [ - "▁fehlt", - -12.783447265625 - ], - [ - "möglichen", - -12.783955574035645 - ], - [ - "▁Twenty", - -12.783968925476074 - ], - [ - "composing", - -12.783979415893555 - ], - [ - "▁rebate", - -12.78400707244873 - ], - [ - "Italie", - -12.784036636352539 - ], - [ - "▁goodbye", - -12.784058570861816 - ], - [ - "wild", - -12.784061431884766 - ], - [ - "▁lancé", - -12.784077644348145 - ], - [ - "▁wunderschöne", - -12.784083366394043 - ], - [ - "▁Frontier", - -12.784139633178711 - ], - [ - "▁murit", - -12.784313201904297 - ], - [ - "▁scump", - -12.78464412689209 - ], - [ - "OVER", - -12.784682273864746 - ], - [ - "▁meme", - -12.784709930419922 - ], - [ - "Super", - -12.784733772277832 - ], - [ - "▁Crack", - -12.784849166870117 - ], - [ - "rennen", - -12.784907341003418 - ], - [ - "▁interessiert", - -12.784941673278809 - ], - [ - "▁relaţi", - -12.784942626953125 - ], - [ - "▁factories", - -12.784975051879883 - ], - [ - "▁[...]", - -12.785066604614258 - ], - [ - "▁vizite", - -12.785075187683105 - ], - [ - "▁erfolgen", - -12.785199165344238 - ], - [ - "▁Hosting", - -12.785244941711426 - ], - [ - "▁localitate", - -12.78528118133545 - ], - [ - "▁chasse", - -12.785415649414062 - ], - [ - "▁Meadow", - -12.785465240478516 - ], - [ - "▁expansive", - -12.785513877868652 - ], - [ - "hov", - -12.785874366760254 - ], - [ - "Phil", - -12.785978317260742 - ], - [ - "illian", - -12.786107063293457 - ], - [ - "▁manipulate", - -12.786107063293457 - ], - [ - "informationen", - -12.786130905151367 - ], - [ - "▁profesionist", - -12.786162376403809 - ], - [ - "risen", - -12.786252975463867 - ], - [ - "frem", - -12.786300659179688 - ], - [ - "Act", - -12.78640079498291 - ], - [ - "supervised", - -12.786491394042969 - ], - [ - "▁capul", - -12.786506652832031 - ], - [ - "▁Craiova", - -12.786528587341309 - ], - [ - "▁victoire", - -12.786528587341309 - ], - [ - "▁guitarist", - -12.786680221557617 - ], - [ - "▁identific", - -12.786684036254883 - ], - [ - "democrat", - -12.786864280700684 - ], - [ - "Authentic", - -12.786894798278809 - ], - [ - "▁Autumn", - -12.786894798278809 - ], - [ - "▁bodi", - -12.787014961242676 - ], - [ - "April", - -12.787044525146484 - ], - [ - "▁Burger", - -12.787049293518066 - ], - [ - "▁BEST", - -12.787490844726562 - ], - [ - "▁torrent", - -12.78749942779541 - ], - [ - "UV", - -12.787567138671875 - ], - [ - "▁renal", - -12.787676811218262 - ], - [ - "founded", - -12.787693977355957 - ], - [ - "203", - -12.787956237792969 - ], - [ - "▁Flooring", - -12.78799057006836 - ], - [ - "▁kilogram", - -12.787994384765625 - ], - [ - "▁garantiert", - -12.788139343261719 - ], - [ - "▁fulfil", - -12.788204193115234 - ], - [ - "303", - -12.788330078125 - ], - [ - "▁schafft", - -12.788363456726074 - ], - [ - "▁butterfly", - -12.788365364074707 - ], - [ - "▁Stuart", - -12.788382530212402 - ], - [ - "▁Versuch", - -12.788392066955566 - ], - [ - "▁liking", - -12.788412094116211 - ], - [ - "▁chercher", - -12.788508415222168 - ], - [ - "▁wrapping", - -12.788527488708496 - ], - [ - "schrieb", - -12.788652420043945 - ], - [ - "▁abuz", - -12.788718223571777 - ], - [ - "▁maîtrise", - -12.788772583007812 - ], - [ - "EQ", - -12.788887977600098 - ], - [ - "▁Erinnerung", - -12.789095878601074 - ], - [ - "▁bridal", - -12.78909969329834 - ], - [ - "Rock", - -12.789118766784668 - ], - [ - "▁copied", - -12.789193153381348 - ], - [ - "Met", - -12.789206504821777 - ], - [ - "▁incep", - -12.789233207702637 - ], - [ - "▁sinus", - -12.789336204528809 - ], - [ - "▁Felix", - -12.789831161499023 - ], - [ - "▁Deluxe", - -12.789837837219238 - ], - [ - "▁GPU", - -12.789848327636719 - ], - [ - "Sie", - -12.790164947509766 - ], - [ - "lowering", - -12.790262222290039 - ], - [ - "▁Trotz", - -12.790282249450684 - ], - [ - "333", - -12.790417671203613 - ], - [ - "withstand", - -12.79055118560791 - ], - [ - "▁Aufenthalt", - -12.790566444396973 - ], - [ - "▁unhealthy", - -12.790567398071289 - ], - [ - "▁urbain", - -12.790573120117188 - ], - [ - "▁LOL", - -12.790702819824219 - ], - [ - "▁Ballet", - -12.79074478149414 - ], - [ - "▁Decoration", - -12.79083251953125 - ], - [ - "weist", - -12.790839195251465 - ], - [ - "▁Residence", - -12.790932655334473 - ], - [ - "▁Leeds", - -12.791055679321289 - ], - [ - "▁Genau", - -12.791084289550781 - ], - [ - "Imagin", - -12.791136741638184 - ], - [ - "▁suspicion", - -12.791300773620605 - ], - [ - "▁pêche", - -12.791301727294922 - ], - [ - "▁Soccer", - -12.791306495666504 - ], - [ - "▁protectie", - -12.791553497314453 - ], - [ - "ATS", - -12.791796684265137 - ], - [ - "stocked", - -12.791838645935059 - ], - [ - "▁gymnas", - -12.79184627532959 - ], - [ - "ASP", - -12.792027473449707 - ], - [ - "▁Independence", - -12.792037010192871 - ], - [ - "▁Wizard", - -12.792037963867188 - ], - [ - "▁nitrogen", - -12.79204273223877 - ], - [ - "amerikanische", - -12.7920503616333 - ], - [ - "▁Indianapolis", - -12.79205322265625 - ], - [ - "catches", - -12.792131423950195 - ], - [ - "stria", - -12.792275428771973 - ], - [ - "schätze", - -12.79235553741455 - ], - [ - "▁Räume", - -12.792387962341309 - ], - [ - "▁Interesting", - -12.792403221130371 - ], - [ - "bürger", - -12.79240608215332 - ], - [ - "sweet", - -12.792410850524902 - ], - [ - "Identify", - -12.792632102966309 - ], - [ - "EEN", - -12.792651176452637 - ], - [ - "▁£3", - -12.792654991149902 - ], - [ - "interacting", - -12.7926664352417 - ], - [ - "NYSE", - -12.792762756347656 - ], - [ - "▁Dynamics", - -12.79277515411377 - ], - [ - "▁modificări", - -12.792777061462402 - ], - [ - "▁Kumar", - -12.792936325073242 - ], - [ - "chette", - -12.79313850402832 - ], - [ - "▁presiune", - -12.79316234588623 - ], - [ - "arni", - -12.793164253234863 - ], - [ - "▁vielfältig", - -12.793221473693848 - ], - [ - "KC", - -12.793259620666504 - ], - [ - "▁Cuisine", - -12.793513298034668 - ], - [ - "▁australia", - -12.793885231018066 - ], - [ - "▁încet", - -12.794026374816895 - ], - [ - "▁caracteristic", - -12.794257164001465 - ], - [ - "▁cookbook", - -12.794501304626465 - ], - [ - "▁douleur", - -12.79453182220459 - ], - [ - "AVI", - -12.794593811035156 - ], - [ - "artikel", - -12.794740676879883 - ], - [ - "feta", - -12.79493522644043 - ], - [ - "▁fréquent", - -12.794987678527832 - ], - [ - "▁Prophet", - -12.795051574707031 - ], - [ - "▁dépense", - -12.795202255249023 - ], - [ - "▁Smile", - -12.795235633850098 - ], - [ - "▁lawmakers", - -12.79525375366211 - ], - [ - "▁Kollegen", - -12.795391082763672 - ], - [ - "▁Pir", - -12.79555606842041 - ], - [ - "serez", - -12.79561710357666 - ], - [ - "▁consumator", - -12.795656204223633 - ], - [ - "▁playlist", - -12.795730590820312 - ], - [ - "▁envisage", - -12.795733451843262 - ], - [ - "swept", - -12.795780181884766 - ], - [ - "▁Grim", - -12.795825004577637 - ], - [ - "▁widow", - -12.795836448669434 - ], - [ - "authorised", - -12.795886039733887 - ], - [ - "▁(...)", - -12.796035766601562 - ], - [ - "▁photographic", - -12.796060562133789 - ], - [ - "▁libertate", - -12.796173095703125 - ], - [ - "▁principalement", - -12.796201705932617 - ], - [ - "umming", - -12.796260833740234 - ], - [ - "▁Montréal", - -12.796465873718262 - ], - [ - "▁compilation", - -12.796468734741211 - ], - [ - "▁erlaubt", - -12.79647159576416 - ], - [ - "▁biblical", - -12.796518325805664 - ], - [ - "volume", - -12.796561241149902 - ], - [ - "5-7", - -12.796809196472168 - ], - [ - "▁Versch", - -12.79689884185791 - ], - [ - "▁Shark", - -12.796957015991211 - ], - [ - "ologne", - -12.796969413757324 - ], - [ - "4.4", - -12.797086715698242 - ], - [ - "decken", - -12.797112464904785 - ], - [ - "▁frequencies", - -12.797205924987793 - ], - [ - "▁inferior", - -12.79720687866211 - ], - [ - "visible", - -12.797321319580078 - ], - [ - "▁educator", - -12.797394752502441 - ], - [ - "▁soziale", - -12.797420501708984 - ], - [ - "▁billet", - -12.797523498535156 - ], - [ - "folosirea", - -12.797574996948242 - ], - [ - "▁aufgenommen", - -12.797590255737305 - ], - [ - "▁Thread", - -12.797649383544922 - ], - [ - "registering", - -12.797694206237793 - ], - [ - "▁Loop", - -12.797747611999512 - ], - [ - "innovation", - -12.79783821105957 - ], - [ - "▁elimination", - -12.797857284545898 - ], - [ - "136", - -12.797883987426758 - ], - [ - "▁fluctu", - -12.797892570495605 - ], - [ - "▁Mercury", - -12.79794692993164 - ], - [ - "▁bouche", - -12.797955513000488 - ], - [ - "▁hurdle", - -12.7979736328125 - ], - [ - "▁Bennett", - -12.798040390014648 - ], - [ - "STI", - -12.79818344116211 - ], - [ - "▁théâtre", - -12.798316955566406 - ], - [ - "▁confortable", - -12.798359870910645 - ], - [ - "▁Automobil", - -12.79838752746582 - ], - [ - "▁Donna", - -12.798399925231934 - ], - [ - "▁foyer", - -12.79841136932373 - ], - [ - "▁hollow", - -12.798465728759766 - ], - [ - "▁règlement", - -12.79861068725586 - ], - [ - "effi", - -12.798616409301758 - ], - [ - "▁sediment", - -12.79869270324707 - ], - [ - "▁Mä", - -12.798774719238281 - ], - [ - "▁faint", - -12.798833847045898 - ], - [ - "feti", - -12.79890251159668 - ], - [ - "▁Concord", - -12.798959732055664 - ], - [ - "▁Ladies", - -12.798990249633789 - ], - [ - "▁pregatit", - -12.799052238464355 - ], - [ - "▁Ensemble", - -12.79905891418457 - ], - [ - "▁Ingredient", - -12.79905891418457 - ], - [ - "▁Respond", - -12.79914379119873 - ], - [ - "▁impaired", - -12.799356460571289 - ], - [ - "▁Feedback", - -12.799430847167969 - ], - [ - "▁ultrasound", - -12.799461364746094 - ], - [ - "▁Guvernului", - -12.799617767333984 - ], - [ - "▁Unterricht", - -12.799654006958008 - ], - [ - "▁prosecut", - -12.799662590026855 - ], - [ - "spend", - -12.799732208251953 - ], - [ - "▁capitol", - -12.799800872802734 - ], - [ - "USD", - -12.799822807312012 - ], - [ - "observing", - -12.799947738647461 - ], - [ - "▁effortlessly", - -12.800045013427734 - ], - [ - "▁Setting", - -12.80010986328125 - ], - [ - "▁spontaneous", - -12.80020809173584 - ], - [ - "▁LEGO", - -12.800238609313965 - ], - [ - "initiative", - -12.800299644470215 - ], - [ - "▁Sak", - -12.800299644470215 - ], - [ - "Interestingly", - -12.800326347351074 - ], - [ - "▁Yale", - -12.800352096557617 - ], - [ - "▁größer", - -12.80038070678711 - ], - [ - "RIC", - -12.800406455993652 - ], - [ - "▁distracted", - -12.800436973571777 - ], - [ - "drafted", - -12.800484657287598 - ], - [ - "▁Brenda", - -12.800522804260254 - ], - [ - "monopol", - -12.800551414489746 - ], - [ - "städt", - -12.800580024719238 - ], - [ - "▁altar", - -12.80058765411377 - ], - [ - "▁Hannover", - -12.800596237182617 - ], - [ - "▁Spiritual", - -12.800702095031738 - ], - [ - "▁thriller", - -12.800747871398926 - ], - [ - "▁Schneider", - -12.800760269165039 - ], - [ - "▁accumulate", - -12.800817489624023 - ], - [ - "▁mediului", - -12.800822257995605 - ], - [ - "▁Mathematics", - -12.800914764404297 - ], - [ - "▁paradox", - -12.800986289978027 - ], - [ - "▁Sham", - -12.801230430603027 - ], - [ - "▁SITE", - -12.801375389099121 - ], - [ - "▁echipei", - -12.801508903503418 - ], - [ - "▁staircase", - -12.801660537719727 - ], - [ - "▁întrebări", - -12.801705360412598 - ], - [ - "Commerce", - -12.802020072937012 - ], - [ - "▁selfie", - -12.802353858947754 - ], - [ - "▁Pocket", - -12.802404403686523 - ], - [ - "▁niemand", - -12.80263614654541 - ], - [ - "Tool", - -12.802678108215332 - ], - [ - "igma", - -12.802695274353027 - ], - [ - "utilisant", - -12.802915573120117 - ], - [ - "▁negatively", - -12.80295181274414 - ], - [ - "Secondly", - -12.802955627441406 - ], - [ - "▁ROI", - -12.8030366897583 - ], - [ - "Arch", - -12.803121566772461 - ], - [ - "▁continuity", - -12.80318546295166 - ], - [ - "▁Prayer", - -12.803235054016113 - ], - [ - "inverse", - -12.803241729736328 - ], - [ - "▁Himmel", - -12.803336143493652 - ], - [ - "prinz", - -12.803478240966797 - ], - [ - "wichtigen", - -12.803496360778809 - ], - [ - "étage", - -12.803522109985352 - ], - [ - "summe", - -12.8036527633667 - ], - [ - "▁Zeitung", - -12.80366039276123 - ], - [ - "▁realization", - -12.803897857666016 - ], - [ - "▁influent", - -12.804291725158691 - ], - [ - "▁Valid", - -12.804357528686523 - ], - [ - "▁publicity", - -12.804439544677734 - ], - [ - "▁vertreten", - -12.804447174072266 - ], - [ - "▁Shoes", - -12.804609298706055 - ], - [ - "▁Diabetes", - -12.80463695526123 - ], - [ - "▁anticipation", - -12.804670333862305 - ], - [ - "▁Blank", - -12.8047456741333 - ], - [ - "asked", - -12.804899215698242 - ], - [ - "Power", - -12.804938316345215 - ], - [ - "arrelage", - -12.805140495300293 - ], - [ - "▁appraisal", - -12.80538272857666 - ], - [ - "▁harassment", - -12.805542945861816 - ], - [ - "Anzeige", - -12.805682182312012 - ], - [ - "liners", - -12.80584716796875 - ], - [ - "Firstly", - -12.805851936340332 - ], - [ - "transferring", - -12.805951118469238 - ], - [ - "▁Diane", - -12.806012153625488 - ], - [ - "▁1/2\"", - -12.80606746673584 - ], - [ - "▁adrenal", - -12.806131362915039 - ], - [ - "▁Prague", - -12.806208610534668 - ], - [ - "insertion", - -12.80635929107666 - ], - [ - "▁Fahrer", - -12.806465148925781 - ], - [ - "▁divin", - -12.806585311889648 - ], - [ - "▁douche", - -12.80673885345459 - ], - [ - "▁meticulous", - -12.806879043579102 - ], - [ - "▁IEEE", - -12.806981086730957 - ], - [ - "▁Rabatt", - -12.807259559631348 - ], - [ - "Runner", - -12.807342529296875 - ], - [ - "▁Leder", - -12.807429313659668 - ], - [ - "project", - -12.80745792388916 - ], - [ - "▁Split", - -12.807562828063965 - ], - [ - "Gold", - -12.807600021362305 - ], - [ - "5.00", - -12.807629585266113 - ], - [ - "iola", - -12.807655334472656 - ], - [ - "standardized", - -12.807890892028809 - ], - [ - "ordination", - -12.807984352111816 - ], - [ - "▁Egal", - -12.808158874511719 - ], - [ - "▁ruhig", - -12.808241844177246 - ], - [ - "▁judiciar", - -12.80837345123291 - ], - [ - "▁Nowadays", - -12.808374404907227 - ], - [ - "▁whistle", - -12.808374404907227 - ], - [ - "▁superhero", - -12.808379173278809 - ], - [ - "▁PowerPoint", - -12.808408737182617 - ], - [ - "flop", - -12.808420181274414 - ], - [ - "olph", - -12.808460235595703 - ], - [ - "▁pallet", - -12.808916091918945 - ], - [ - "posons", - -12.809005737304688 - ], - [ - "▁Listing", - -12.809032440185547 - ], - [ - "Tag", - -12.809075355529785 - ], - [ - "introductory", - -12.809122085571289 - ], - [ - "▁Profil", - -12.809123992919922 - ], - [ - "symmetric", - -12.809126853942871 - ], - [ - "▁aisle", - -12.809138298034668 - ], - [ - "▁ajouté", - -12.809147834777832 - ], - [ - "opathy", - -12.809149742126465 - ], - [ - "prezentate", - -12.809155464172363 - ], - [ - "▁hurry", - -12.809165000915527 - ], - [ - "Auth", - -12.809310913085938 - ], - [ - "▁Homepage", - -12.809435844421387 - ], - [ - "ashes", - -12.809489250183105 - ], - [ - "▁inklusive", - -12.809496879577637 - ], - [ - "populated", - -12.809502601623535 - ], - [ - "▁nein", - -12.809554100036621 - ], - [ - "▁syndicat", - -12.809690475463867 - ], - [ - "▁développé", - -12.809842109680176 - ], - [ - "▁Domestic", - -12.809877395629883 - ], - [ - "essay", - -12.809967994689941 - ], - [ - "Atelier", - -12.809980392456055 - ], - [ - "▁proceeding", - -12.810006141662598 - ], - [ - "▁SAS", - -12.810038566589355 - ], - [ - "task", - -12.810063362121582 - ], - [ - "▁blackjack", - -12.810114860534668 - ], - [ - "Key", - -12.810186386108398 - ], - [ - "thérapie", - -12.810247421264648 - ], - [ - "▁Cohen", - -12.810397148132324 - ], - [ - "Direct", - -12.810510635375977 - ], - [ - "▁Estimat", - -12.810517311096191 - ], - [ - "élève", - -12.810616493225098 - ], - [ - "cind", - -12.810640335083008 - ], - [ - "▁prezenț", - -12.810701370239258 - ], - [ - "▁notorious", - -12.810725212097168 - ], - [ - "climbed", - -12.810816764831543 - ], - [ - "▁flexibil", - -12.810830116271973 - ], - [ - "▁entlang", - -12.810855865478516 - ], - [ - "longed", - -12.81103515625 - ], - [ - "▁elbow", - -12.811078071594238 - ], - [ - "BH", - -12.811296463012695 - ], - [ - "▁Radu", - -12.811376571655273 - ], - [ - "▁lonely", - -12.811378479003906 - ], - [ - "ALA", - -12.811405181884766 - ], - [ - "Variante", - -12.811639785766602 - ], - [ - "▁Influen", - -12.81169319152832 - ], - [ - "▁Budapest", - -12.811747550964355 - ], - [ - "▁Gemüse", - -12.811747550964355 - ], - [ - "▁continental", - -12.811750411987305 - ], - [ - "ippo", - -12.811771392822266 - ], - [ - "▁Affordable", - -12.81212329864502 - ], - [ - "▁niece", - -12.812187194824219 - ], - [ - "oscopic", - -12.812190055847168 - ], - [ - "▁Grid", - -12.81222152709961 - ], - [ - "sliced", - -12.812270164489746 - ], - [ - "▁voici", - -12.812294006347656 - ], - [ - "aveam", - -12.812471389770508 - ], - [ - "▁Lars", - -12.812612533569336 - ], - [ - "APA", - -12.812657356262207 - ], - [ - "▁particulière", - -12.812858581542969 - ], - [ - "sorb", - -12.8128662109375 - ], - [ - "▁1955", - -12.812887191772461 - ], - [ - "▁solutii", - -12.812942504882812 - ], - [ - "loch", - -12.812960624694824 - ], - [ - "▁summon", - -12.813212394714355 - ], - [ - "wurf", - -12.813271522521973 - ], - [ - "▁protecți", - -12.813288688659668 - ], - [ - "2001", - -12.813499450683594 - ], - [ - "▁sophomore", - -12.813627243041992 - ], - [ - "▁Schwerpunkt", - -12.813628196716309 - ], - [ - "▁diplomat", - -12.813687324523926 - ], - [ - "▁artistique", - -12.813726425170898 - ], - [ - "▁accueille", - -12.813739776611328 - ], - [ - "Disp", - -12.813746452331543 - ], - [ - "inherited", - -12.813764572143555 - ], - [ - "▁COMP", - -12.813889503479004 - ], - [ - "▁envoyé", - -12.814046859741211 - ], - [ - "▁tuning", - -12.814056396484375 - ], - [ - "▁entspricht", - -12.814062118530273 - ], - [ - "▁exerc", - -12.81406307220459 - ], - [ - "▁accessoires", - -12.8140869140625 - ], - [ - "▁Automat", - -12.814348220825195 - ], - [ - "importance", - -12.814408302307129 - ], - [ - "▁travellers", - -12.814432144165039 - ], - [ - "seiten", - -12.814474105834961 - ], - [ - "▁slider", - -12.814481735229492 - ], - [ - "effect", - -12.814591407775879 - ], - [ - "▁siding", - -12.814669609069824 - ], - [ - "▁Crit", - -12.814780235290527 - ], - [ - "▁sportif", - -12.814827919006348 - ], - [ - "▁Accessories", - -12.81513500213623 - ], - [ - "▁Anteil", - -12.815184593200684 - ], - [ - "▁limbi", - -12.81519603729248 - ], - [ - "▁vendre", - -12.815269470214844 - ], - [ - "borg", - -12.815435409545898 - ], - [ - "▁Deposit", - -12.815508842468262 - ], - [ - "▁Hö", - -12.815717697143555 - ], - [ - "employé", - -12.8157320022583 - ], - [ - "▁Bangalore", - -12.815887451171875 - ], - [ - "▁itinerary", - -12.815888404846191 - ], - [ - "▁Deliver", - -12.816008567810059 - ], - [ - "dik", - -12.816024780273438 - ], - [ - "▁advent", - -12.816100120544434 - ], - [ - "▁Turk", - -12.81614875793457 - ], - [ - "▁Nico", - -12.816154479980469 - ], - [ - "organizarea", - -12.816161155700684 - ], - [ - "▁remport", - -12.816166877746582 - ], - [ - "▁tribunal", - -12.816266059875488 - ], - [ - "▁Rusia", - -12.8162841796875 - ], - [ - "glazed", - -12.816339492797852 - ], - [ - "▁destiné", - -12.816502571105957 - ], - [ - "304", - -12.816533088684082 - ], - [ - "album", - -12.816650390625 - ], - [ - "▁junction", - -12.81665325164795 - ], - [ - "▁Fleet", - -12.816664695739746 - ], - [ - "venant", - -12.81667423248291 - ], - [ - "▁buddy", - -12.816694259643555 - ], - [ - "▁neglected", - -12.816694259643555 - ], - [ - "▁Mask", - -12.816783905029297 - ], - [ - "▁testament", - -12.816844940185547 - ], - [ - "▁Basil", - -12.81690788269043 - ], - [ - "masă", - -12.816922187805176 - ], - [ - "▁racist", - -12.81692886352539 - ], - [ - "640", - -12.816990852355957 - ], - [ - "▁Standing", - -12.817028045654297 - ], - [ - "▁MUST", - -12.817266464233398 - ], - [ - "situation", - -12.817327499389648 - ], - [ - "▁informiert", - -12.817337036132812 - ], - [ - "ABA", - -12.817353248596191 - ], - [ - "▁Timothy", - -12.817397117614746 - ], - [ - "▁generosity", - -12.817397117614746 - ], - [ - "▁erscheint", - -12.817402839660645 - ], - [ - "▁verarbeitet", - -12.81740665435791 - ], - [ - "▁burial", - -12.817444801330566 - ], - [ - "▁limestone", - -12.817458152770996 - ], - [ - "▁1953", - -12.817480087280273 - ], - [ - "▁Lucr", - -12.817506790161133 - ], - [ - "small", - -12.817633628845215 - ], - [ - "aveau", - -12.81763744354248 - ], - [ - "versiune", - -12.81773567199707 - ], - [ - "▁inkl", - -12.81775951385498 - ], - [ - "▁Minneapolis", - -12.81777572631836 - ], - [ - "Spiel", - -12.81781005859375 - ], - [ - "▁encode", - -12.817895889282227 - ], - [ - "▁beforehand", - -12.818021774291992 - ], - [ - "▁Vital", - -12.818086624145508 - ], - [ - "▁socialist", - -12.818228721618652 - ], - [ - "inho", - -12.81824779510498 - ], - [ - "▁chapel", - -12.81825065612793 - ], - [ - "▁Monitoring", - -12.81838607788086 - ], - [ - "▁quotidienne", - -12.818404197692871 - ], - [ - "cloud", - -12.818506240844727 - ], - [ - "▁desfăşur", - -12.818531036376953 - ], - [ - "▁1952", - -12.818638801574707 - ], - [ - "▁Rü", - -12.818690299987793 - ], - [ - "▁Sigma", - -12.818804740905762 - ], - [ - "134", - -12.818835258483887 - ], - [ - "Sullivan", - -12.818909645080566 - ], - [ - "▁Bevölkerung", - -12.818909645080566 - ], - [ - "▁sufficiently", - -12.818953514099121 - ], - [ - "Check", - -12.818992614746094 - ], - [ - "rnie", - -12.8190336227417 - ], - [ - "contamin", - -12.819132804870605 - ], - [ - "▁gewonnen", - -12.81928825378418 - ], - [ - "▁bugetul", - -12.819376945495605 - ], - [ - "▁mustard", - -12.819414138793945 - ], - [ - "132", - -12.819478988647461 - ], - [ - "0.9", - -12.819535255432129 - ], - [ - "▁tratat", - -12.81957721710205 - ], - [ - "▁dilemma", - -12.819666862487793 - ], - [ - "▁versatility", - -12.819666862487793 - ], - [ - "▁clutter", - -12.819670677185059 - ], - [ - "▁Musk", - -12.81973934173584 - ], - [ - "▁Beide", - -12.819750785827637 - ], - [ - "hurst", - -12.819758415222168 - ], - [ - "atsu", - -12.819767951965332 - ], - [ - "absence", - -12.819784164428711 - ], - [ - "rebounds", - -12.819881439208984 - ], - [ - "6.1", - -12.820029258728027 - ], - [ - "Dia", - -12.820046424865723 - ], - [ - "▁siguranță", - -12.820060729980469 - ], - [ - "▁Blade", - -12.820072174072266 - ], - [ - "▁disrupt", - -12.820074081420898 - ], - [ - "▁visiteurs", - -12.820169448852539 - ], - [ - "tested", - -12.820282936096191 - ], - [ - "▁Lup", - -12.820353507995605 - ], - [ - "▁Rouge", - -12.820371627807617 - ], - [ - "▁asbestos", - -12.82042407989502 - ], - [ - "▁moisturize", - -12.820427894592285 - ], - [ - "▁acknowledg", - -12.82045841217041 - ], - [ - "▁procent", - -12.820467948913574 - ], - [ - "▁swear", - -12.82050895690918 - ], - [ - "▁911", - -12.820647239685059 - ], - [ - "präsent", - -12.820724487304688 - ], - [ - "▁cohort", - -12.82072639465332 - ], - [ - "▁intimid", - -12.820830345153809 - ], - [ - "JS", - -12.820849418640137 - ], - [ - "îm", - -12.82096004486084 - ], - [ - "▁Kunststoff", - -12.820963859558105 - ], - [ - "rison", - -12.820972442626953 - ], - [ - "▁praf", - -12.82097339630127 - ], - [ - "▁convient", - -12.821019172668457 - ], - [ - "▁partenaire", - -12.821088790893555 - ], - [ - "▁Verantwortlich", - -12.821182250976562 - ], - [ - "▁semiconductor", - -12.821182250976562 - ], - [ - "▁kürz", - -12.821187019348145 - ], - [ - "▁Bottom", - -12.821187973022461 - ], - [ - "▁tratamentul", - -12.82127571105957 - ], - [ - "Source", - -12.821331024169922 - ], - [ - "authored", - -12.82172679901123 - ], - [ - "robo", - -12.821867942810059 - ], - [ - "▁turf", - -12.82194709777832 - ], - [ - "▁liebe", - -12.821971893310547 - ], - [ - "▁Fotografi", - -12.821995735168457 - ], - [ - "Big", - -12.822064399719238 - ], - [ - "▁fireworks", - -12.822081565856934 - ], - [ - "▁presă", - -12.822135925292969 - ], - [ - "▁conceal", - -12.822269439697266 - ], - [ - "▁originated", - -12.82227897644043 - ], - [ - "▁biciclet", - -12.822319984436035 - ], - [ - "acești", - -12.822577476501465 - ], - [ - "▁mortar", - -12.822585105895996 - ], - [ - "▁Wunder", - -12.822626113891602 - ], - [ - "ionist", - -12.822696685791016 - ], - [ - "KM", - -12.822871208190918 - ], - [ - "▁Marion", - -12.822918891906738 - ], - [ - "produkte", - -12.822933197021484 - ], - [ - "▁Sprint", - -12.822999000549316 - ], - [ - "▁Nachde", - -12.8230619430542 - ], - [ - "▁verfüge", - -12.823100090026855 - ], - [ - "Marea", - -12.823177337646484 - ], - [ - "▁compressor", - -12.823253631591797 - ], - [ - "Arm", - -12.823290824890137 - ], - [ - "Auf", - -12.823311805725098 - ], - [ - "▁Polyester", - -12.823461532592773 - ], - [ - "▁Sheffield", - -12.823461532592773 - ], - [ - "illiard", - -12.823494911193848 - ], - [ - "▁misleading", - -12.82353401184082 - ], - [ - "multi", - -12.823749542236328 - ], - [ - "ripped", - -12.82381820678711 - ], - [ - "▁Cosmetic", - -12.82383918762207 - ], - [ - "▁Regal", - -12.823890686035156 - ], - [ - "▁authenticity", - -12.82414436340332 - ], - [ - "▁customizable", - -12.824219703674316 - ], - [ - "▁bathtub", - -12.824275016784668 - ], - [ - "▁Average", - -12.824292182922363 - ], - [ - "▁Muster", - -12.824522018432617 - ], - [ - "290", - -12.824529647827148 - ], - [ - "▁Ersatz", - -12.824570655822754 - ], - [ - "▁Might", - -12.824588775634766 - ], - [ - "published", - -12.82461929321289 - ], - [ - "▁Interpret", - -12.824640274047852 - ], - [ - "▁încep", - -12.82480239868164 - ], - [ - "▁proto", - -12.824851036071777 - ], - [ - "▁disque", - -12.824889183044434 - ], - [ - "▁Palestine", - -12.824980735778809 - ], - [ - "Over", - -12.824981689453125 - ], - [ - "▁verbessert", - -12.824983596801758 - ], - [ - "▁liefern", - -12.825017929077148 - ], - [ - "▁Handlung", - -12.825095176696777 - ], - [ - "▁Handels", - -12.825150489807129 - ], - [ - "▁eater", - -12.825201988220215 - ], - [ - "▁$40", - -12.825251579284668 - ], - [ - "illard", - -12.825334548950195 - ], - [ - "▁apariti", - -12.825413703918457 - ], - [ - "▁gag", - -12.825422286987305 - ], - [ - "▁chimic", - -12.825541496276855 - ], - [ - "▁Guru", - -12.825594902038574 - ], - [ - "▁Toilet", - -12.82571792602539 - ], - [ - "▁Tochter", - -12.825748443603516 - ], - [ - "▁Aurora", - -12.82579231262207 - ], - [ - "contro", - -12.825922966003418 - ], - [ - "▁GOP", - -12.825995445251465 - ], - [ - "Provence", - -12.826130867004395 - ], - [ - "▁Frieden", - -12.82614803314209 - ], - [ - "ăci", - -12.826216697692871 - ], - [ - "portée", - -12.826268196105957 - ], - [ - "▁upright", - -12.826300621032715 - ], - [ - "▁Physician", - -12.82650375366211 - ], - [ - "▁juridique", - -12.82650375366211 - ], - [ - "▁territorial", - -12.82650375366211 - ], - [ - "▁kindergarten", - -12.826505661010742 - ], - [ - "aéroport", - -12.826510429382324 - ], - [ - "▁whisper", - -12.826513290405273 - ], - [ - "▁capacities", - -12.826562881469727 - ], - [ - "dichte", - -12.826641082763672 - ], - [ - "▁Grenzen", - -12.826822280883789 - ], - [ - "▁Riv", - -12.82710075378418 - ], - [ - "épreuve", - -12.827266693115234 - ], - [ - "▁Scheme", - -12.827290534973145 - ], - [ - "mesures", - -12.827330589294434 - ], - [ - "▁Einfluss", - -12.827333450317383 - ], - [ - "appui", - -12.827713966369629 - ], - [ - "▁apuc", - -12.827827453613281 - ], - [ - "▁radiat", - -12.82794189453125 - ], - [ - "▁allergy", - -12.828035354614258 - ], - [ - "▁spear", - -12.828038215637207 - ], - [ - "▁Luxembourg", - -12.828086853027344 - ], - [ - "▁Registered", - -12.828115463256836 - ], - [ - "▁Shape", - -12.828198432922363 - ], - [ - "genie", - -12.828328132629395 - ], - [ - "nsonsten", - -12.828385353088379 - ], - [ - "▁Symposium", - -12.828412055969238 - ], - [ - "forderung", - -12.828474998474121 - ], - [ - "▁personalizat", - -12.82866096496582 - ], - [ - "▁ştiu", - -12.82875919342041 - ], - [ - "blatt", - -12.828804016113281 - ], - [ - "▁geometry", - -12.828807830810547 - ], - [ - "▁8:30", - -12.828831672668457 - ], - [ - "▁Fahrrad", - -12.828861236572266 - ], - [ - "After", - -12.828927040100098 - ], - [ - "▁ventilat", - -12.829072952270508 - ], - [ - "▁nylon", - -12.829190254211426 - ], - [ - "▁verkauft", - -12.829304695129395 - ], - [ - "öß", - -12.829345703125 - ], - [ - "▁Kath", - -12.829523086547852 - ], - [ - "▁Nuclear", - -12.829558372497559 - ], - [ - "▁Verizon", - -12.829560279846191 - ], - [ - "▁spokesperson", - -12.829560279846191 - ], - [ - "▁vietii", - -12.829560279846191 - ], - [ - "▁prescri", - -12.829629898071289 - ], - [ - "ру", - -12.829666137695312 - ], - [ - "6.2", - -12.829801559448242 - ], - [ - "▁spațiu", - -12.830018997192383 - ], - [ - "▁solvent", - -12.83006763458252 - ], - [ - ",000,000", - -12.830142974853516 - ], - [ - "reuen", - -12.830185890197754 - ], - [ - "plast", - -12.830245018005371 - ], - [ - "▁Activities", - -12.830334663391113 - ], - [ - "▁domni", - -12.83056926727295 - ], - [ - "▁trophy", - -12.830572128295898 - ], - [ - "▁saddle", - -12.830657958984375 - ], - [ - "▁renovat", - -12.830708503723145 - ], - [ - "▁bumper", - -12.830717086791992 - ], - [ - "▁penny", - -12.830741882324219 - ], - [ - "omato", - -12.830743789672852 - ], - [ - "AQ", - -12.83083438873291 - ], - [ - "kunst", - -12.830843925476074 - ], - [ - "hydrat", - -12.830860137939453 - ], - [ - "minder", - -12.830931663513184 - ], - [ - "trecerea", - -12.830949783325195 - ], - [ - "brush", - -12.831185340881348 - ], - [ - "TEC", - -12.83121395111084 - ], - [ - "Please", - -12.831253051757812 - ], - [ - "hydrated", - -12.831483840942383 - ], - [ - "ICAL", - -12.831636428833008 - ], - [ - "trauen", - -12.831639289855957 - ], - [ - "9,000", - -12.83175277709961 - ], - [ - "▁2030", - -12.831830024719238 - ], - [ - "▁Chennai", - -12.831854820251465 - ], - [ - "▁empirical", - -12.831854820251465 - ], - [ - "▁Subscribe", - -12.83206844329834 - ], - [ - "▁vorgestellt", - -12.832120895385742 - ], - [ - "▁Springfield", - -12.832159996032715 - ], - [ - "▁continuu", - -12.832311630249023 - ], - [ - "208", - -12.832351684570312 - ], - [ - "▁Bearing", - -12.83240795135498 - ], - [ - "2003", - -12.832572937011719 - ], - [ - "cheta", - -12.832608222961426 - ], - [ - "▁empathy", - -12.832623481750488 - ], - [ - "▁Alert", - -12.832817077636719 - ], - [ - "▁recreate", - -12.832879066467285 - ], - [ - "PJ", - -12.833159446716309 - ], - [ - "Name", - -12.83323860168457 - ], - [ - "▁Mouse", - -12.833405494689941 - ], - [ - "▁disturbing", - -12.833443641662598 - ], - [ - "▁leichter", - -12.83344841003418 - ], - [ - "▁cruel", - -12.833507537841797 - ], - [ - "▁detective", - -12.833531379699707 - ], - [ - "▁reimbursement", - -12.833626747131348 - ], - [ - "▁Gemeinschaft", - -12.833772659301758 - ], - [ - "▁adolescents", - -12.833772659301758 - ], - [ - "▁Reality", - -12.833954811096191 - ], - [ - "▁Stockholm", - -12.83415699005127 - ], - [ - "▁Gründen", - -12.834304809570312 - ], - [ - "▁Reflect", - -12.83432388305664 - ], - [ - "▁Palmer", - -12.834336280822754 - ], - [ - "▁treac", - -12.8343505859375 - ], - [ - "▁tentative", - -12.834497451782227 - ], - [ - "▁surrender", - -12.834677696228027 - ], - [ - "▁broadly", - -12.834734916687012 - ], - [ - "▁județ", - -12.834814071655273 - ], - [ - "▁Thu", - -12.834845542907715 - ], - [ - "wärts", - -12.834961891174316 - ], - [ - "▁crește", - -12.835074424743652 - ], - [ - "▁déplacement", - -12.835208892822266 - ], - [ - "blanc", - -12.835268020629883 - ], - [ - "▁£5", - -12.835308074951172 - ], - [ - "▁confidentiality", - -12.835320472717285 - ], - [ - "veraging", - -12.835444450378418 - ], - [ - "unité", - -12.835609436035156 - ], - [ - "clar", - -12.83564567565918 - ], - [ - "rigg", - -12.835693359375 - ], - [ - "honneur", - -12.835694313049316 - ], - [ - "▁adventurous", - -12.835694313049316 - ], - [ - "▁Nutzen", - -12.835758209228516 - ], - [ - "▁Kabel", - -12.835800170898438 - ], - [ - "empowering", - -12.836040496826172 - ], - [ - "verhalten", - -12.836042404174805 - ], - [ - "▁prevail", - -12.8361234664917 - ], - [ - "mashed", - -12.836138725280762 - ], - [ - "▁1947", - -12.83616828918457 - ], - [ - "function", - -12.836292266845703 - ], - [ - "niveaux", - -12.83633041381836 - ], - [ - "▁territories", - -12.836463928222656 - ], - [ - "▁Permanent", - -12.836465835571289 - ], - [ - "▁christmas", - -12.836471557617188 - ], - [ - "arguing", - -12.836490631103516 - ], - [ - "zukünftig", - -12.836654663085938 - ], - [ - "▁Eindruck", - -12.836817741394043 - ], - [ - "personalised", - -12.836854934692383 - ], - [ - "▁vecin", - -12.837211608886719 - ], - [ - "▁Affiliate", - -12.837234497070312 - ], - [ - "▁Silk", - -12.837249755859375 - ], - [ - "▁Tub", - -12.837440490722656 - ], - [ - "▁remont", - -12.837493896484375 - ], - [ - "▁sauber", - -12.837530136108398 - ], - [ - "gehörig", - -12.837562561035156 - ], - [ - "Maritime", - -12.83771800994873 - ], - [ - "▁Bö", - -12.837973594665527 - ], - [ - "▁1957", - -12.83800220489502 - ], - [ - "▁unparalleled", - -12.838005065917969 - ], - [ - "▁fulfillment", - -12.838042259216309 - ], - [ - "▁collage", - -12.838179588317871 - ], - [ - "fenders", - -12.838248252868652 - ], - [ - "▁neige", - -12.838275909423828 - ], - [ - "▁gamers", - -12.838325500488281 - ], - [ - "tefan", - -12.838339805603027 - ], - [ - "▁wifi", - -12.838349342346191 - ], - [ - "▁leisten", - -12.83835506439209 - ], - [ - "▁Verbesserung", - -12.838390350341797 - ], - [ - "▁composant", - -12.838400840759277 - ], - [ - "▁LORD", - -12.8384370803833 - ], - [ - "arrive", - -12.838472366333008 - ], - [ - "▁conquer", - -12.838562965393066 - ], - [ - "▁lentil", - -12.838767051696777 - ], - [ - "▁Sprech", - -12.838995933532715 - ], - [ - "▁substitution", - -12.839015007019043 - ], - [ - ".05.", - -12.839020729064941 - ], - [ - "FORM", - -12.839144706726074 - ], - [ - "cădere", - -12.839154243469238 - ], - [ - "▁canyon", - -12.839430809020996 - ], - [ - "▁capacitate", - -12.839442253112793 - ], - [ - "▁menace", - -12.839461326599121 - ], - [ - "▁Antique", - -12.839519500732422 - ], - [ - "▁dizaine", - -12.839550971984863 - ], - [ - "▁Saturn", - -12.839578628540039 - ], - [ - "▁gastro", - -12.83962631225586 - ], - [ - "▁Vand", - -12.839641571044922 - ], - [ - "▁africa", - -12.839682579040527 - ], - [ - "▁hackers", - -12.839702606201172 - ], - [ - "▁Bailey", - -12.839736938476562 - ], - [ - "ouette", - -12.839822769165039 - ], - [ - "hoch", - -12.839885711669922 - ], - [ - "étudiant", - -12.839973449707031 - ], - [ - "▁1600", - -12.840004920959473 - ], - [ - "utiliz", - -12.840167999267578 - ], - [ - "reinigung", - -12.840263366699219 - ], - [ - "▁mileage", - -12.84029483795166 - ], - [ - "▁consacré", - -12.840309143066406 - ], - [ - "▁Norfolk", - -12.840327262878418 - ], - [ - "stacked", - -12.840659141540527 - ], - [ - "anbieter", - -12.840731620788574 - ], - [ - "▁gewünschte", - -12.84073543548584 - ], - [ - "▁silicon", - -12.840761184692383 - ], - [ - "Ensuite", - -12.840794563293457 - ], - [ - "▁vendu", - -12.840850830078125 - ], - [ - "▁viteza", - -12.840851783752441 - ], - [ - "▁evaluare", - -12.840913772583008 - ], - [ - "▁contient", - -12.841036796569824 - ], - [ - "▁Viagra", - -12.841100692749023 - ], - [ - "▁circumstance", - -12.841283798217773 - ], - [ - "walker", - -12.841383934020996 - ], - [ - "▁Aluminium", - -12.84148120880127 - ], - [ - "ço", - -12.841556549072266 - ], - [ - "▁Kli", - -12.841643333435059 - ], - [ - "▁deliberately", - -12.841649055480957 - ], - [ - "▁gamble", - -12.841893196105957 - ], - [ - "▁nourri", - -12.841903686523438 - ], - [ - "▁sealing", - -12.84194278717041 - ], - [ - "▁Atmosphäre", - -12.842255592346191 - ], - [ - "▁erschien", - -12.842260360717773 - ], - [ - "▁brightness", - -12.842340469360352 - ], - [ - "autonomie", - -12.84251594543457 - ], - [ - "▁propel", - -12.842525482177734 - ], - [ - "▁Infrastructure", - -12.842642784118652 - ], - [ - "▁război", - -12.842642784118652 - ], - [ - "▁jelly", - -12.842684745788574 - ], - [ - "scalable", - -12.84280776977539 - ], - [ - "regal", - -12.84296703338623 - ], - [ - "▁sarcini", - -12.843031883239746 - ], - [ - "▁Dienstag", - -12.84304428100586 - ], - [ - "▁Receive", - -12.8430814743042 - ], - [ - "▁mango", - -12.843356132507324 - ], - [ - "▁compétition", - -12.84341812133789 - ], - [ - "▁Monument", - -12.843428611755371 - ], - [ - "▁mast", - -12.844159126281738 - ], - [ - "▁instructed", - -12.84425163269043 - ], - [ - "▁aventur", - -12.844277381896973 - ], - [ - "139", - -12.844298362731934 - ], - [ - "▁Parmi", - -12.84435749053955 - ], - [ - "confined", - -12.844416618347168 - ], - [ - "acious", - -12.844441413879395 - ], - [ - "▁simptome", - -12.844581604003906 - ], - [ - "▁Fischer", - -12.844897270202637 - ], - [ - "störung", - -12.844985008239746 - ], - [ - "▁bilateral", - -12.84504508972168 - ], - [ - "preşedintele", - -12.845274925231934 - ], - [ - "accueillir", - -12.845357894897461 - ], - [ - "▁Schmidt", - -12.845359802246094 - ], - [ - "litis", - -12.845373153686523 - ], - [ - "WL", - -12.8454008102417 - ], - [ - "▁Rise", - -12.845436096191406 - ], - [ - "▁streamline", - -12.845556259155273 - ], - [ - "sozialen", - -12.845585823059082 - ], - [ - "▁Emirates", - -12.845746040344238 - ], - [ - "▁encrypted", - -12.845746040344238 - ], - [ - "▁unfamiliar", - -12.845746040344238 - ], - [ - "established", - -12.84577751159668 - ], - [ - "▁Tätigkeit", - -12.845818519592285 - ], - [ - "▁unaware", - -12.845913887023926 - ], - [ - "2:00", - -12.8460054397583 - ], - [ - "macher", - -12.846013069152832 - ], - [ - "NSA", - -12.8461275100708 - ], - [ - "▁rutier", - -12.846177101135254 - ], - [ - "▁Trent", - -12.846212387084961 - ], - [ - "▁sickness", - -12.846277236938477 - ], - [ - "▁advert", - -12.846417427062988 - ], - [ - "▁Kranken", - -12.846426963806152 - ], - [ - "▁Sandra", - -12.846443176269531 - ], - [ - "▁Recreation", - -12.846449851989746 - ], - [ - "▁Evidence", - -12.846524238586426 - ], - [ - "▁Immigration", - -12.846524238586426 - ], - [ - "▁carriage", - -12.846524238586426 - ], - [ - "▁justified", - -12.84655475616455 - ], - [ - "▁veche", - -12.846579551696777 - ], - [ - "PGA", - -12.846604347229004 - ], - [ - "▁Carmen", - -12.846735000610352 - ], - [ - "▁Faites", - -12.846750259399414 - ], - [ - "▁erfüllt", - -12.84691333770752 - ], - [ - "▁voilà", - -12.846931457519531 - ], - [ - "▁împlin", - -12.846959114074707 - ], - [ - "deposited", - -12.84721565246582 - ], - [ - "▁decisiv", - -12.847241401672363 - ], - [ - "CSA", - -12.847249031066895 - ], - [ - "pathy", - -12.84726619720459 - ], - [ - "▁erweitert", - -12.847302436828613 - ], - [ - "▁liquor", - -12.847302436828613 - ], - [ - "▁resilient", - -12.847302436828613 - ], - [ - "▁walmart", - -12.847302436828613 - ], - [ - "▁fencing", - -12.847308158874512 - ], - [ - "▁dépasse", - -12.84731388092041 - ], - [ - "KT", - -12.847354888916016 - ], - [ - "▁fries", - -12.847368240356445 - ], - [ - "vadă", - -12.847421646118164 - ], - [ - "▁Spania", - -12.847478866577148 - ], - [ - "▁complètement", - -12.847725868225098 - ], - [ - "▁lucrari", - -12.84777545928955 - ], - [ - "▁Lieb", - -12.847908973693848 - ], - [ - "leistungen", - -12.847943305969238 - ], - [ - "198", - -12.847979545593262 - ], - [ - "▁Schnell", - -12.847997665405273 - ], - [ - "▁radius", - -12.84814453125 - ], - [ - "▁beneficiaries", - -12.848151206970215 - ], - [ - "▁northwest", - -12.848174095153809 - ], - [ - "▁#4", - -12.848223686218262 - ], - [ - "▁embryo", - -12.848492622375488 - ], - [ - "▁ditch", - -12.848791122436523 - ], - [ - "▁Seriously", - -12.848859786987305 - ], - [ - "oppel", - -12.848941802978516 - ], - [ - "▁stalk", - -12.849053382873535 - ], - [ - "écriture", - -12.849066734313965 - ], - [ - "512", - -12.84912109375 - ], - [ - "wiesen", - -12.849271774291992 - ], - [ - "▁Consum", - -12.849321365356445 - ], - [ - "▁lună", - -12.849405288696289 - ], - [ - "▁lantern", - -12.849441528320312 - ], - [ - "▁italian", - -12.849629402160645 - ], - [ - "▁achiziți", - -12.849639892578125 - ], - [ - "▁catalyst", - -12.849639892578125 - ], - [ - "▁Arbeitgeber", - -12.849662780761719 - ], - [ - "▁researched", - -12.8496675491333 - ], - [ - "▁drastically", - -12.849679946899414 - ], - [ - "versammlung", - -12.849735260009766 - ], - [ - "410", - -12.849800109863281 - ], - [ - "▁impus", - -12.850153923034668 - ], - [ - "▁interchange", - -12.850173950195312 - ], - [ - "▁pharmacie", - -12.850215911865234 - ], - [ - "Live", - -12.850354194641113 - ], - [ - "dents", - -12.850384712219238 - ], - [ - "▁charcoal", - -12.850419998168945 - ], - [ - "▁odihn", - -12.850420951843262 - ], - [ - "▁pistol", - -12.850444793701172 - ], - [ - "▁complaining", - -12.850576400756836 - ], - [ - "manager", - -12.850578308105469 - ], - [ - "themed", - -12.850578308105469 - ], - [ - "▁Chang", - -12.850650787353516 - ], - [ - "▁rookie", - -12.85070514678955 - ], - [ - "Great", - -12.850706100463867 - ], - [ - "▁smoker", - -12.850733757019043 - ], - [ - "▁Container", - -12.850812911987305 - ], - [ - "▁bancaire", - -12.850852966308594 - ], - [ - "▁Actual", - -12.850966453552246 - ], - [ - "füllen", - -12.850982666015625 - ], - [ - "forum", - -12.850985527038574 - ], - [ - "bleib", - -12.851073265075684 - ], - [ - "▁combi", - -12.851079940795898 - ], - [ - "smoked", - -12.851137161254883 - ], - [ - "difficultés", - -12.851161003112793 - ], - [ - "▁tactical", - -12.851240158081055 - ], - [ - "▁sichtbar", - -12.851483345031738 - ], - [ - "▁dreptate", - -12.851598739624023 - ], - [ - "ERT", - -12.85168743133545 - ], - [ - "▁Pond", - -12.85177993774414 - ], - [ - "▁Holly", - -12.851844787597656 - ], - [ - "erfolg", - -12.8518705368042 - ], - [ - "▁Nordic", - -12.851896286010742 - ], - [ - "évènement", - -12.851983070373535 - ], - [ - "embracing", - -12.851984024047852 - ], - [ - "▁Maximum", - -12.851984024047852 - ], - [ - "▁défend", - -12.85205078125 - ], - [ - "▁fruct", - -12.852056503295898 - ], - [ - "▁Conditioning", - -12.852099418640137 - ], - [ - "LG", - -12.852127075195312 - ], - [ - "exigence", - -12.852166175842285 - ], - [ - "amide", - -12.852187156677246 - ], - [ - "▁darunter", - -12.852208137512207 - ], - [ - "▁EVERY", - -12.852420806884766 - ], - [ - "▁comparat", - -12.85244083404541 - ], - [ - "boosting", - -12.852452278137207 - ], - [ - "▁Hawaiian", - -12.852553367614746 - ], - [ - "▁Geburt", - -12.852752685546875 - ], - [ - "deci", - -12.852782249450684 - ], - [ - "▁Apollo", - -12.852803230285645 - ], - [ - "▁schützen", - -12.852821350097656 - ], - [ - "tragere", - -12.852893829345703 - ], - [ - "Online", - -12.852904319763184 - ], - [ - "▁neural", - -12.852913856506348 - ], - [ - "▁lucrez", - -12.853188514709473 - ], - [ - "▁phenomenal", - -12.853253364562988 - ], - [ - "▁Height", - -12.853368759155273 - ], - [ - "coordinating", - -12.853548049926758 - ], - [ - "geschnitten", - -12.853631019592285 - ], - [ - "auront", - -12.853641510009766 - ], - [ - "▁administer", - -12.853644371032715 - ], - [ - "▁contend", - -12.853707313537598 - ], - [ - "▁crispy", - -12.853784561157227 - ], - [ - "chuck", - -12.854011535644531 - ], - [ - "▁Condition", - -12.8540678024292 - ], - [ - "gestaltung", - -12.854324340820312 - ], - [ - "▁Blvd", - -12.854331970214844 - ], - [ - "▁subjective", - -12.854470252990723 - ], - [ - "▁événements", - -12.854708671569824 - ], - [ - "▁Jenny", - -12.855131149291992 - ], - [ - "▁cumpăra", - -12.85519027709961 - ], - [ - "constructing", - -12.855262756347656 - ], - [ - "▁instructional", - -12.85539436340332 - ], - [ - "▁sterling", - -12.855446815490723 - ], - [ - "scrise", - -12.855470657348633 - ], - [ - "▁Boulevard", - -12.855551719665527 - ], - [ - "pipe", - -12.855620384216309 - ], - [ - "▁Pride", - -12.855748176574707 - ], - [ - "▁Kau", - -12.855751991271973 - ], - [ - "▁overhaul", - -12.855924606323242 - ], - [ - "▁Recruitment", - -12.855925559997559 - ], - [ - "▁thrilling", - -12.856218338012695 - ], - [ - "living", - -12.856302261352539 - ], - [ - "▁rămân", - -12.85645866394043 - ], - [ - "▁MOD", - -12.85661792755127 - ], - [ - "▁Newport", - -12.856675148010254 - ], - [ - "▁infectious", - -12.856688499450684 - ], - [ - "6-3", - -12.856860160827637 - ], - [ - "▁Apache", - -12.856976509094238 - ], - [ - "▁dependence", - -12.85698413848877 - ], - [ - "nutzung", - -12.857199668884277 - ], - [ - "praised", - -12.857211112976074 - ], - [ - "▁craving", - -12.857346534729004 - ], - [ - "▁cramp", - -12.857397079467773 - ], - [ - "▁mancare", - -12.857455253601074 - ], - [ - "▁entdeckt", - -12.857474327087402 - ], - [ - "▁Pioneer", - -12.857484817504883 - ], - [ - "▁Adelaide", - -12.857490539550781 - ], - [ - "2.0", - -12.857503890991211 - ], - [ - "168", - -12.857526779174805 - ], - [ - "▁Decorating", - -12.857611656188965 - ], - [ - "▁unpleasant", - -12.857854843139648 - ], - [ - "▁déclaration", - -12.857865333557129 - ], - [ - "▁Grafik", - -12.857908248901367 - ], - [ - "5-2", - -12.857937812805176 - ], - [ - "căci", - -12.857940673828125 - ], - [ - "▁invade", - -12.858171463012695 - ], - [ - "▁internaţional", - -12.858259201049805 - ], - [ - "▁fraudulent", - -12.858281135559082 - ], - [ - "▁crestere", - -12.858441352844238 - ], - [ - "ografic", - -12.858729362487793 - ], - [ - "plină", - -12.859140396118164 - ], - [ - "sunteti", - -12.859150886535645 - ], - [ - "/04", - -12.859176635742188 - ], - [ - "▁admis", - -12.85935115814209 - ], - [ - "▁mediation", - -12.859403610229492 - ], - [ - "ICC", - -12.859424591064453 - ], - [ - "roș", - -12.859660148620605 - ], - [ - "▁Aroma", - -12.8596773147583 - ], - [ - "1:00", - -12.859792709350586 - ], - [ - "gasesc", - -12.859822273254395 - ], - [ - "▁Defence", - -12.859850883483887 - ], - [ - "▁dictionary", - -12.859856605529785 - ], - [ - "▁Batterie", - -12.859865188598633 - ], - [ - "▁gesunde", - -12.85997486114502 - ], - [ - "146", - -12.860099792480469 - ], - [ - "▁mortal", - -12.860129356384277 - ], - [ - "▁Flughafen", - -12.860230445861816 - ], - [ - "hhh", - -12.860284805297852 - ], - [ - "▁novice", - -12.860342025756836 - ], - [ - "▁Develop", - -12.86043930053711 - ], - [ - "▁accidental", - -12.860516548156738 - ], - [ - "Muzeul", - -12.86054515838623 - ], - [ - "▁Jupiter", - -12.86062240600586 - ], - [ - "supposedly", - -12.860662460327148 - ], - [ - "energy", - -12.860758781433105 - ], - [ - "▁montrer", - -12.860764503479004 - ], - [ - "recalled", - -12.860795021057129 - ], - [ - "Press", - -12.860801696777344 - ], - [ - "▁postcard", - -12.86080265045166 - ], - [ - "target", - -12.86081600189209 - ], - [ - "▁vêtements", - -12.860881805419922 - ], - [ - "▁particle", - -12.860888481140137 - ], - [ - "professional", - -12.8608980178833 - ], - [ - "▁1949", - -12.860917091369629 - ], - [ - "yah", - -12.860980033874512 - ], - [ - "▁Spiegel", - -12.861017227172852 - ], - [ - "▁Jeffrey", - -12.861023902893066 - ], - [ - "fahrzeug", - -12.861027717590332 - ], - [ - "▁Plug", - -12.861051559448242 - ], - [ - "▁violin", - -12.861150741577148 - ], - [ - "▁condemn", - -12.861381530761719 - ], - [ - "▁conducere", - -12.861398696899414 - ], - [ - "▁Chevrolet", - -12.861412048339844 - ], - [ - "▁conceput", - -12.861461639404297 - ], - [ - "▁Merri", - -12.861493110656738 - ], - [ - "judging", - -12.861559867858887 - ], - [ - "embraced", - -12.86168098449707 - ], - [ - "▁Compact", - -12.861715316772461 - ], - [ - "▁château", - -12.861807823181152 - ], - [ - "etch", - -12.861945152282715 - ], - [ - "bedroom", - -12.861995697021484 - ], - [ - "People", - -12.862038612365723 - ], - [ - "25,000", - -12.86209774017334 - ], - [ - "ocyte", - -12.862146377563477 - ], - [ - "▁Lenovo", - -12.862205505371094 - ], - [ - "▁Hampton", - -12.862241744995117 - ], - [ - "5.2", - -12.862244606018066 - ], - [ - "▁progres", - -12.862266540527344 - ], - [ - "hoc", - -12.862288475036621 - ], - [ - "▁complementary", - -12.86241340637207 - ], - [ - "turned", - -12.862485885620117 - ], - [ - "mangel", - -12.862508773803711 - ], - [ - "▁Drew", - -12.862592697143555 - ], - [ - "épisode", - -12.86259651184082 - ], - [ - "▁Versorgung", - -12.86259651184082 - ], - [ - "▁ausdrücklich", - -12.86259651184082 - ], - [ - "ciune", - -12.862788200378418 - ], - [ - "▁sfârșit", - -12.862990379333496 - ], - [ - "Agricultural", - -12.862991333007812 - ], - [ - "▁caffeine", - -12.862991333007812 - ], - [ - "▁emergencies", - -12.862991333007812 - ], - [ - "▁unhappy", - -12.862991333007812 - ], - [ - "(7)", - -12.863043785095215 - ], - [ - "▁inlocui", - -12.863059043884277 - ], - [ - "▁Rochester", - -12.863153457641602 - ], - [ - "183", - -12.863155364990234 - ], - [ - "niz", - -12.863285064697266 - ], - [ - "tasche", - -12.863462448120117 - ], - [ - "▁Salle", - -12.86347484588623 - ], - [ - "cît", - -12.863478660583496 - ], - [ - "▁Singer", - -12.863489151000977 - ], - [ - "▁economically", - -12.863506317138672 - ], - [ - "▁ieși", - -12.863525390625 - ], - [ - "▁façade", - -12.86378288269043 - ], - [ - "Ohne", - -12.863801956176758 - ], - [ - "▁edible", - -12.863842964172363 - ], - [ - "Rob", - -12.863851547241211 - ], - [ - "▁(2014)", - -12.863859176635742 - ], - [ - "▁Zar", - -12.863919258117676 - ], - [ - "▁obey", - -12.863995552062988 - ], - [ - "Pack", - -12.864087104797363 - ], - [ - "▁Omni", - -12.864198684692383 - ], - [ - "▁Gilbert", - -12.864212036132812 - ], - [ - "▁Vlad", - -12.86429500579834 - ], - [ - "▁pauvre", - -12.864333152770996 - ], - [ - "▁secular", - -12.864383697509766 - ], - [ - "Center", - -12.864415168762207 - ], - [ - "▁Prospect", - -12.864457130432129 - ], - [ - "▁Noah", - -12.86450481414795 - ], - [ - "▁Interactive", - -12.86471176147461 - ], - [ - "▁centaine", - -12.86485767364502 - ], - [ - "▁cerebral", - -12.864971160888672 - ], - [ - "▁Novel", - -12.865013122558594 - ], - [ - "▁Käufer", - -12.865039825439453 - ], - [ - "werfen", - -12.865056991577148 - ], - [ - "▁reluctant", - -12.865143775939941 - ], - [ - "ес", - -12.86520004272461 - ], - [ - "Look", - -12.86521053314209 - ], - [ - "Erkrankung", - -12.86536693572998 - ], - [ - "▁cucumber", - -12.86536693572998 - ], - [ - "/2017", - -12.865399360656738 - ], - [ - "▁flank", - -12.865405082702637 - ], - [ - "opportunité", - -12.865667343139648 - ], - [ - "zugleich", - -12.865766525268555 - ], - [ - "RAT", - -12.865840911865234 - ], - [ - "▁avantages", - -12.865880012512207 - ], - [ - "▁außer", - -12.866008758544922 - ], - [ - "GV", - -12.866090774536133 - ], - [ - "▁Continental", - -12.866159439086914 - ], - [ - "▁affiliation", - -12.866159439086914 - ], - [ - "▁ursprünglich", - -12.86618423461914 - ], - [ - "▁hardship", - -12.866349220275879 - ], - [ - "âme", - -12.86647891998291 - ], - [ - "▁hallway", - -12.866576194763184 - ], - [ - "▁afară", - -12.866578102111816 - ], - [ - "western", - -12.866714477539062 - ], - [ - "▁Jacket", - -12.866802215576172 - ], - [ - "▁culturelle", - -12.866876602172852 - ], - [ - "▁glaci", - -12.866995811462402 - ], - [ - "metoda", - -12.867036819458008 - ], - [ - "▁clerk", - -12.867045402526855 - ], - [ - "▁ordinance", - -12.867185592651367 - ], - [ - "▁Initial", - -12.867197036743164 - ], - [ - "waking", - -12.86722469329834 - ], - [ - "▁Secondary", - -12.867366790771484 - ], - [ - "▁Solomon", - -12.867411613464355 - ], - [ - "glomer", - -12.867488861083984 - ], - [ - "SYS", - -12.867530822753906 - ], - [ - "▁Florin", - -12.867596626281738 - ], - [ - "ffentlich", - -12.867670059204102 - ], - [ - "▁Printer", - -12.867674827575684 - ], - [ - "▁dimineata", - -12.86774730682373 - ], - [ - "▁stripes", - -12.867748260498047 - ], - [ - "plugged", - -12.86776065826416 - ], - [ - "öhl", - -12.867836952209473 - ], - [ - "infused", - -12.867875099182129 - ], - [ - "▁Rubber", - -12.867895126342773 - ], - [ - "paved", - -12.867898941040039 - ], - [ - "▁Devi", - -12.867995262145996 - ], - [ - "▁subway", - -12.8681640625 - ], - [ - "▁gases", - -12.868306159973145 - ], - [ - "▁reguli", - -12.868371963500977 - ], - [ - "▁Rebel", - -12.868413925170898 - ], - [ - "▁destructive", - -12.868546485900879 - ], - [ - "▁oferind", - -12.868664741516113 - ], - [ - "9001", - -12.868876457214355 - ], - [ - "CRA", - -12.868912696838379 - ], - [ - "why", - -12.868932723999023 - ], - [ - "sensul", - -12.869036674499512 - ], - [ - "guter", - -12.869277000427246 - ], - [ - "Empfehlung", - -12.869338035583496 - ], - [ - "▁convertible", - -12.86953353881836 - ], - [ - "▁predominantly", - -12.869637489318848 - ], - [ - "▁Mentor", - -12.869649887084961 - ], - [ - "Practic", - -12.869720458984375 - ], - [ - "▁echipă", - -12.869754791259766 - ], - [ - "onsite", - -12.869853019714355 - ], - [ - "▁zunehmend", - -12.86994743347168 - ], - [ - "▁Harbour", - -12.870016098022461 - ], - [ - "▁pineapple", - -12.870133399963379 - ], - [ - "▁gasoline", - -12.870139122009277 - ], - [ - "▁Jaguar", - -12.870158195495605 - ], - [ - "kno", - -12.870259284973145 - ], - [ - "▁heap", - -12.870448112487793 - ], - [ - "▁fictional", - -12.870481491088867 - ], - [ - "fiinta", - -12.870753288269043 - ], - [ - "▁Amber", - -12.87081241607666 - ], - [ - "▁Exclusive", - -12.870929718017578 - ], - [ - "▁Pharmaceutical", - -12.870929718017578 - ], - [ - "▁unterscheide", - -12.871044158935547 - ], - [ - "▁1942", - -12.871116638183594 - ], - [ - "▁Ceiling", - -12.87115478515625 - ], - [ - "developed", - -12.871228218078613 - ], - [ - "▁consacr", - -12.87132453918457 - ], - [ - "▁Membr", - -12.871411323547363 - ], - [ - "erton", - -12.871447563171387 - ], - [ - "habitation", - -12.871685981750488 - ], - [ - "▁longevity", - -12.871726989746094 - ], - [ - "▁Starbucks", - -12.871728897094727 - ], - [ - "▁poat", - -12.871771812438965 - ], - [ - "▁commissioner", - -12.871794700622559 - ], - [ - "pedia", - -12.871938705444336 - ], - [ - "popped", - -12.872468948364258 - ], - [ - "versorgung", - -12.872525215148926 - ], - [ - "▁Aktivitäten", - -12.872525215148926 - ], - [ - "▁Betreuung", - -12.872525215148926 - ], - [ - "▁afacere", - -12.872968673706055 - ], - [ - "▁Mechanical", - -12.873323440551758 - ], - [ - "▁Leiter", - -12.873346328735352 - ], - [ - "▁scaling", - -12.873427391052246 - ], - [ - "▁Slim", - -12.87350082397461 - ], - [ - "▁temperaturi", - -12.873516082763672 - ], - [ - "ACH", - -12.873558044433594 - ], - [ - "▁jährlich", - -12.873682022094727 - ], - [ - "▁photographie", - -12.873722076416016 - ], - [ - "▁préalable", - -12.873725891113281 - ], - [ - "▁părinți", - -12.87372875213623 - ], - [ - "▁Farmers", - -12.873873710632324 - ], - [ - "▁Printable", - -12.873905181884766 - ], - [ - "Früh", - -12.873908996582031 - ], - [ - "approved", - -12.87398624420166 - ], - [ - "otro", - -12.874094009399414 - ], - [ - "▁veneer", - -12.874099731445312 - ], - [ - "▁Warriors", - -12.874122619628906 - ], - [ - "▁Approach", - -12.874149322509766 - ], - [ - "Share", - -12.874238967895508 - ], - [ - "▁buds", - -12.874252319335938 - ], - [ - "▁Într", - -12.874330520629883 - ], - [ - "glichen", - -12.87452507019043 - ], - [ - "▁anbieten", - -12.87452507019043 - ], - [ - "MET", - -12.874539375305176 - ], - [ - "amélioration", - -12.87468147277832 - ], - [ - "ländische", - -12.87468433380127 - ], - [ - "nsgesamt", - -12.874764442443848 - ], - [ - "einiger", - -12.874822616577148 - ], - [ - "▁Förderung", - -12.874876022338867 - ], - [ - "destroying", - -12.874910354614258 - ], - [ - "▁accreditation", - -12.874922752380371 - ], - [ - "reminiscent", - -12.875094413757324 - ], - [ - "▁retriev", - -12.87528133392334 - ], - [ - "▁Flü", - -12.875306129455566 - ], - [ - "▁Monsieur", - -12.875322341918945 - ], - [ - "German", - -12.87536334991455 - ], - [ - "Orice", - -12.875443458557129 - ], - [ - "künftig", - -12.875523567199707 - ], - [ - "▁vorbi", - -12.875639915466309 - ], - [ - "▁intentionally", - -12.875733375549316 - ], - [ - "▁îngrij", - -12.875743865966797 - ], - [ - "▁laughed", - -12.875850677490234 - ], - [ - "▁Fiction", - -12.875913619995117 - ], - [ - "▁inteligent", - -12.875914573669434 - ], - [ - "▁Translation", - -12.875953674316406 - ], - [ - "greete", - -12.875983238220215 - ], - [ - "▁énergétique", - -12.876123428344727 - ], - [ - "uncovered", - -12.876248359680176 - ], - [ - "▁évidemment", - -12.876523971557617 - ], - [ - "▁Vietnamese", - -12.876535415649414 - ], - [ - "▁Libya", - -12.876675605773926 - ], - [ - "▁Trailer", - -12.876734733581543 - ], - [ - "▁Wohl", - -12.876871109008789 - ], - [ - "▁Congo", - -12.87698745727539 - ], - [ - "▁freut", - -12.877002716064453 - ], - [ - "zauber", - -12.877090454101562 - ], - [ - "▁Pân", - -12.877142906188965 - ], - [ - "▁mentine", - -12.877333641052246 - ], - [ - "▁welding", - -12.877335548400879 - ], - [ - "▁Mircea", - -12.8773775100708 - ], - [ - "▁optimism", - -12.877455711364746 - ], - [ - "VEL", - -12.877504348754883 - ], - [ - "oilea", - -12.877540588378906 - ], - [ - "▁thereafter", - -12.877612113952637 - ], - [ - "▁André", - -12.877710342407227 - ], - [ - "forschung", - -12.877799987792969 - ], - [ - "running", - -12.878022193908691 - ], - [ - "▁hostile", - -12.878059387207031 - ], - [ - "Homme", - -12.87811279296875 - ], - [ - "▁Satellite", - -12.878129005432129 - ], - [ - "▁collagen", - -12.87841796875 - ], - [ - "▁concedi", - -12.878518104553223 - ], - [ - "▁produziert", - -12.87852954864502 - ], - [ - "▁virgin", - -12.878540992736816 - ], - [ - "frant", - -12.87857723236084 - ], - [ - "▁teammates", - -12.878744125366211 - ], - [ - "▁faceti", - -12.878802299499512 - ], - [ - "▁Restoration", - -12.87893295288086 - ], - [ - "▁detached", - -12.878935813903809 - ], - [ - "▁Instructor", - -12.878950119018555 - ], - [ - "montag", - -12.879227638244629 - ], - [ - "▁borrowing", - -12.879375457763672 - ], - [ - "▁Retro", - -12.879446983337402 - ], - [ - "▁behandelt", - -12.879536628723145 - ], - [ - "▁Aussage", - -12.879715919494629 - ], - [ - "▁snorkel", - -12.879734992980957 - ], - [ - "▁Proceedings", - -12.879754066467285 - ], - [ - "▁Judy", - -12.879776000976562 - ], - [ - "▁Wendy", - -12.879783630371094 - ], - [ - "artă", - -12.879920959472656 - ], - [ - "▁Vergangenheit", - -12.88013744354248 - ], - [ - "▁Gegner", - -12.880139350891113 - ], - [ - "▁ulcer", - -12.880166053771973 - ], - [ - "wirksam", - -12.880553245544434 - ], - [ - "▁închis", - -12.880560874938965 - ], - [ - "▁emission", - -12.88068962097168 - ], - [ - "ulescu", - -12.880754470825195 - ], - [ - "▁bancar", - -12.880819320678711 - ], - [ - "compromising", - -12.880924224853516 - ], - [ - "▁Priest", - -12.881156921386719 - ], - [ - "▁Progress", - -12.881318092346191 - ], - [ - "▁punish", - -12.88144588470459 - ], - [ - "▁Afin", - -12.881450653076172 - ], - [ - "▁Bog", - -12.881514549255371 - ], - [ - "lunii", - -12.881525039672852 - ], - [ - "▁ressembl", - -12.881570816040039 - ], - [ - "▁Creation", - -12.881644248962402 - ], - [ - "effet", - -12.881668090820312 - ], - [ - "Versicherung", - -12.881671905517578 - ], - [ - "médias", - -12.881672859191895 - ], - [ - "▁Kritik", - -12.881793975830078 - ], - [ - "idia", - -12.881896018981934 - ], - [ - "▁Wasch", - -12.881929397583008 - ], - [ - "UAL", - -12.882059097290039 - ], - [ - "Approximately", - -12.882149696350098 - ], - [ - "izari", - -12.882152557373047 - ], - [ - "▁Dortmund", - -12.882152557373047 - ], - [ - "▁contul", - -12.882343292236328 - ], - [ - "▁Airways", - -12.882408142089844 - ], - [ - "sicherung", - -12.882535934448242 - ], - [ - "échelle", - -12.882560729980469 - ], - [ - "ADD", - -12.882582664489746 - ], - [ - "DIA", - -12.88259506225586 - ], - [ - "kabel", - -12.882621765136719 - ], - [ - "Media", - -12.88268756866455 - ], - [ - "ampli", - -12.882894515991211 - ], - [ - "▁quarry", - -12.88295841217041 - ], - [ - "▁acoper", - -12.883072853088379 - ], - [ - "halter", - -12.883326530456543 - ], - [ - "▁solicitor", - -12.883684158325195 - ], - [ - "phosphat", - -12.883763313293457 - ], - [ - "▁drown", - -12.883773803710938 - ], - [ - "congratulat", - -12.884047508239746 - ], - [ - "▁uneven", - -12.884087562561035 - ], - [ - "▁rupe", - -12.884154319763184 - ], - [ - "▁heureux", - -12.88417911529541 - ], - [ - "caractéristiques", - -12.884221076965332 - ], - [ - "60,000", - -12.884283065795898 - ], - [ - "ambigu", - -12.884340286254883 - ], - [ - "224", - -12.884417533874512 - ], - [ - "dov", - -12.88454532623291 - ], - [ - "▁Naturally", - -12.884629249572754 - ], - [ - "▁Ernst", - -12.884634017944336 - ], - [ - "Camp", - -12.884757995605469 - ], - [ - "▁Worldwide", - -12.884909629821777 - ], - [ - "▁antrenament", - -12.885042190551758 - ], - [ - "▁jocul", - -12.88521671295166 - ], - [ - "▁broccoli", - -12.88537883758545 - ], - [ - "▁fascinated", - -12.88537883758545 - ], - [ - "▁Abbey", - -12.885387420654297 - ], - [ - "▁aquarium", - -12.885390281677246 - ], - [ - "HAN", - -12.885458946228027 - ], - [ - "chaffung", - -12.885480880737305 - ], - [ - "137", - -12.885503768920898 - ], - [ - "rumors", - -12.885515213012695 - ], - [ - "reliance", - -12.885557174682617 - ], - [ - "▁vaccination", - -12.8856782913208 - ], - [ - "responsabilitate", - -12.885777473449707 - ], - [ - "▁legislati", - -12.885782241821289 - ], - [ - "ATT", - -12.885826110839844 - ], - [ - "206", - -12.885896682739258 - ], - [ - "▁miere", - -12.885967254638672 - ], - [ - "▁rezultatul", - -12.885988235473633 - ], - [ - "părea", - -12.88599681854248 - ], - [ - "zuführen", - -12.886159896850586 - ], - [ - "▁Kompetenz", - -12.886187553405762 - ], - [ - "▁nickname", - -12.886195182800293 - ], - [ - "pilot", - -12.88620376586914 - ], - [ - "▁ninth", - -12.886252403259277 - ], - [ - "▁Tyr", - -12.886446952819824 - ], - [ - "▁misuse", - -12.886469841003418 - ], - [ - "▁SUP", - -12.886514663696289 - ], - [ - "▁Attack", - -12.88667106628418 - ], - [ - "Smart", - -12.88669490814209 - ], - [ - "▁Philosoph", - -12.886930465698242 - ], - [ - "▁Alege", - -12.886931419372559 - ], - [ - "▁femeile", - -12.886967658996582 - ], - [ - "▁Heating", - -12.88698673248291 - ], - [ - "▁Cricket", - -12.886999130249023 - ], - [ - "▁scholar", - -12.887049674987793 - ], - [ - "Model", - -12.887073516845703 - ], - [ - "▁stimulating", - -12.887182235717773 - ], - [ - "▁industrielle", - -12.887189865112305 - ], - [ - "▁phenomena", - -12.887303352355957 - ], - [ - "▁Nahrung", - -12.887414932250977 - ], - [ - "▁Conditioner", - -12.887433052062988 - ], - [ - "führ", - -12.887489318847656 - ], - [ - "▁révolution", - -12.88757610321045 - ], - [ - "plastic", - -12.887595176696777 - ], - [ - "▁approximate", - -12.887596130371094 - ], - [ - "▁dienen", - -12.887624740600586 - ], - [ - "▁obsession", - -12.887807846069336 - ], - [ - "▁rectangular", - -12.887807846069336 - ], - [ - "Allemagne", - -12.887808799743652 - ], - [ - "▁Tanzania", - -12.887824058532715 - ], - [ - "border", - -12.887884140014648 - ], - [ - "▁crashed", - -12.887958526611328 - ], - [ - "visor", - -12.887974739074707 - ], - [ - "▁autorizat", - -12.888072967529297 - ], - [ - "▁Champagne", - -12.888222694396973 - ], - [ - "längst", - -12.888238906860352 - ], - [ - "▁realities", - -12.888314247131348 - ], - [ - "▁Keyword", - -12.88831615447998 - ], - [ - "▁GUI", - -12.888495445251465 - ], - [ - "▁simplified", - -12.88865852355957 - ], - [ - "▁Rack", - -12.888681411743164 - ], - [ - "▁Zahlen", - -12.888693809509277 - ], - [ - "growth", - -12.888897895812988 - ], - [ - "▁rehearsal", - -12.888991355895996 - ], - [ - "▁Epic", - -12.888999938964844 - ], - [ - "▁réussite", - -12.889195442199707 - ], - [ - "▁politician", - -12.889263153076172 - ], - [ - "▁emoți", - -12.889378547668457 - ], - [ - "▁delegation", - -12.889449119567871 - ], - [ - "▁со", - -12.889464378356934 - ], - [ - "oversized", - -12.889477729797363 - ], - [ - "▁Motto", - -12.889481544494629 - ], - [ - "1860", - -12.889788627624512 - ], - [ - "▁defective", - -12.889803886413574 - ], - [ - "brewing", - -12.889852523803711 - ], - [ - "linguistic", - -12.890243530273438 - ], - [ - "▁Hopkins", - -12.890265464782715 - ], - [ - "▁(2012)", - -12.89030933380127 - ], - [ - "crease", - -12.890436172485352 - ], - [ - "▁Versicherungs", - -12.89052677154541 - ], - [ - "▁Noble", - -12.890752792358398 - ], - [ - "▁Bekannt", - -12.890896797180176 - ], - [ - "▁vorstellen", - -12.89095401763916 - ], - [ - "▁suburban", - -12.890970230102539 - ], - [ - "DAC", - -12.890995025634766 - ], - [ - "▁scatter", - -12.89103889465332 - ], - [ - "▁Artificial", - -12.8910551071167 - ], - [ - "▁reactor", - -12.891073226928711 - ], - [ - "▁modelling", - -12.89108943939209 - ], - [ - "▁Holder", - -12.891148567199707 - ], - [ - "athon", - -12.891149520874023 - ], - [ - "147", - -12.891190528869629 - ], - [ - "▁stagn", - -12.891257286071777 - ], - [ - "ARY", - -12.891261100769043 - ], - [ - "Space", - -12.89126968383789 - ], - [ - "▁Gibson", - -12.891718864440918 - ], - [ - "▁Investigator", - -12.89173698425293 - ], - [ - "▁1914", - -12.891818046569824 - ], - [ - "▁Muhammad", - -12.891868591308594 - ], - [ - "▁shove", - -12.892073631286621 - ], - [ - "▁erklären", - -12.892276763916016 - ], - [ - "▁abdomen", - -12.892277717590332 - ], - [ - "▁Mazda", - -12.892349243164062 - ], - [ - "▁hemo", - -12.892364501953125 - ], - [ - "National", - -12.892455101013184 - ], - [ - "starken", - -12.89267635345459 - ], - [ - "▁Cyprus", - -12.892683982849121 - ], - [ - "▁tread", - -12.892721176147461 - ], - [ - "▁sweetness", - -12.892725944519043 - ], - [ - "stunden", - -12.892790794372559 - ], - [ - "▁couverture", - -12.893059730529785 - ], - [ - "▁Successful", - -12.893060684204102 - ], - [ - "▁oublier", - -12.893171310424805 - ], - [ - "▁esential", - -12.893203735351562 - ], - [ - "estival", - -12.89321231842041 - ], - [ - "gnac", - -12.893280029296875 - ], - [ - "▁Basement", - -12.893457412719727 - ], - [ - "presumably", - -12.893497467041016 - ], - [ - "▁mourn", - -12.893561363220215 - ], - [ - "armée", - -12.893677711486816 - ], - [ - "148", - -12.893845558166504 - ], - [ - "▁residue", - -12.894006729125977 - ], - [ - "▁metalic", - -12.89404296875 - ], - [ - "▁Zell", - -12.89425277709961 - ], - [ - "Build", - -12.894280433654785 - ], - [ - "▁prevalence", - -12.894312858581543 - ], - [ - "▁wrestling", - -12.894312858581543 - ], - [ - "▁ascuns", - -12.894325256347656 - ], - [ - "Sacred", - -12.894340515136719 - ], - [ - "Tec", - -12.89438533782959 - ], - [ - "▁Kindergarten", - -12.894389152526855 - ], - [ - "bindung", - -12.894464492797852 - ], - [ - "▁ritm", - -12.894545555114746 - ], - [ - "▁triste", - -12.894651412963867 - ], - [ - "▁introdus", - -12.894758224487305 - ], - [ - "/2016", - -12.894824028015137 - ], - [ - "▁română", - -12.894899368286133 - ], - [ - "▁bibli", - -12.89490032196045 - ], - [ - "▁cigar", - -12.894913673400879 - ], - [ - "Rie", - -12.894990921020508 - ], - [ - "▁intentional", - -12.894999504089355 - ], - [ - "▁cuprins", - -12.895098686218262 - ], - [ - "remarkably", - -12.895129203796387 - ], - [ - "▁printemps", - -12.895133972167969 - ], - [ - "▁declining", - -12.895171165466309 - ], - [ - "Magazin", - -12.89552116394043 - ], - [ - "▁săptămână", - -12.895537376403809 - ], - [ - "▁vérifier", - -12.895549774169922 - ], - [ - "▁Speise", - -12.895584106445312 - ], - [ - "▁reteta", - -12.8956298828125 - ], - [ - "heed", - -12.895772933959961 - ], - [ - "▁Compliance", - -12.895946502685547 - ], - [ - "▁embroidery", - -12.895946502685547 - ], - [ - "cried", - -12.896025657653809 - ], - [ - "▁(„", - -12.896282196044922 - ], - [ - "▁heck", - -12.89629077911377 - ], - [ - "▁sadness", - -12.896501541137695 - ], - [ - "▁impulse", - -12.896585464477539 - ], - [ - "ATH", - -12.896740913391113 - ], - [ - "▁lavender", - -12.896773338317871 - ], - [ - "uiesc", - -12.896790504455566 - ], - [ - "▁Disorder", - -12.896876335144043 - ], - [ - "stroke", - -12.896991729736328 - ], - [ - "▁piaţ", - -12.8970365524292 - ], - [ - "ournée", - -12.897049903869629 - ], - [ - "▁Barnes", - -12.8971586227417 - ], - [ - "▁scăzut", - -12.897172927856445 - ], - [ - "▁équipements", - -12.89725112915039 - ], - [ - "OND", - -12.897375106811523 - ], - [ - "▁Compet", - -12.897424697875977 - ], - [ - "▁Bestell", - -12.89748477935791 - ], - [ - "▁immédiatement", - -12.897587776184082 - ], - [ - "aparut", - -12.89759635925293 - ], - [ - "▁rainfall", - -12.897882461547852 - ], - [ - "oreille", - -12.89797306060791 - ], - [ - "▁ministère", - -12.898014068603516 - ], - [ - "iris", - -12.898140907287598 - ], - [ - "dyna", - -12.898279190063477 - ], - [ - "drücken", - -12.898343086242676 - ], - [ - "▁détect", - -12.89834976196289 - ], - [ - "▁fonctionnalité", - -12.89840030670166 - ], - [ - "▁imbalance", - -12.89840030670166 - ], - [ - "▁unpredictable", - -12.89840030670166 - ], - [ - "▁literar", - -12.89846134185791 - ], - [ - "▁Windsor", - -12.898472785949707 - ], - [ - "▁Unlimited", - -12.898481369018555 - ], - [ - "colour", - -12.898674964904785 - ], - [ - "▁Portfolio", - -12.898810386657715 - ], - [ - "149", - -12.898883819580078 - ], - [ - "volution", - -12.898890495300293 - ], - [ - "▁folgende", - -12.899078369140625 - ], - [ - "▁arbitration", - -12.899105072021484 - ], - [ - "kicking", - -12.89913558959961 - ], - [ - "zügig", - -12.89923095703125 - ], - [ - "▁1941", - -12.899311065673828 - ], - [ - "▁Drake", - -12.89955997467041 - ], - [ - "▁ausführlich", - -12.899630546569824 - ], - [ - "▁chaussure", - -12.899630546569824 - ], - [ - "▁intestinal", - -12.89976692199707 - ], - [ - "▁pilgrim", - -12.900040626525879 - ], - [ - "▁Bark", - -12.900142669677734 - ], - [ - "between", - -12.900157928466797 - ], - [ - "disposed", - -12.900175094604492 - ], - [ - "▁Dylan", - -12.900218963623047 - ], - [ - "ств", - -12.900253295898438 - ], - [ - "NOR", - -12.900287628173828 - ], - [ - "traces", - -12.90038776397705 - ], - [ - "▁moindre", - -12.900500297546387 - ], - [ - "▁$10,000", - -12.900552749633789 - ], - [ - "212", - -12.900599479675293 - ], - [ - "wusste", - -12.900659561157227 - ], - [ - "▁predictable", - -12.900671005249023 - ], - [ - "poţi", - -12.900679588317871 - ], - [ - "▁Celsius", - -12.900860786437988 - ], - [ - "gebunden", - -12.90086841583252 - ], - [ - "▁Legacy", - -12.900891304016113 - ], - [ - "movers", - -12.90090274810791 - ], - [ - "▁concret", - -12.90098762512207 - ], - [ - "▁simpla", - -12.901050567626953 - ], - [ - "rechnet", - -12.901103973388672 - ], - [ - "▁certainty", - -12.901144981384277 - ], - [ - "entrepreneurship", - -12.901153564453125 - ], - [ - "kohl", - -12.901289939880371 - ], - [ - "▁curte", - -12.901311874389648 - ], - [ - "▁Forbes", - -12.901411056518555 - ], - [ - "▁Zusatz", - -12.901535987854004 - ], - [ - "blending", - -12.90163803100586 - ], - [ - "▁variat", - -12.901642799377441 - ], - [ - "▁galaxy", - -12.90168285369873 - ], - [ - "▁safari", - -12.90168571472168 - ], - [ - "▁municipalities", - -12.9017972946167 - ], - [ - "▁Drept", - -12.90180778503418 - ], - [ - "aufnahme", - -12.902128219604492 - ], - [ - "▁endorse", - -12.902223587036133 - ], - [ - "einrichtung", - -12.902244567871094 - ], - [ - "Sync", - -12.902270317077637 - ], - [ - "abide", - -12.902323722839355 - ], - [ - "brushed", - -12.902350425720215 - ], - [ - "▁actiune", - -12.902410507202148 - ], - [ - "quaint", - -12.902498245239258 - ], - [ - "▁volatility", - -12.902504920959473 - ], - [ - "▁repetitive", - -12.902505874633789 - ], - [ - "▁découvr", - -12.902560234069824 - ], - [ - "Totodat", - -12.902585983276367 - ], - [ - "▁românesc", - -12.902682304382324 - ], - [ - "▁tempting", - -12.902772903442383 - ], - [ - "thesis", - -12.902947425842285 - ], - [ - "secure", - -12.903013229370117 - ], - [ - "delt", - -12.903019905090332 - ], - [ - "▁şef", - -12.903167724609375 - ], - [ - "▁epidemic", - -12.903326988220215 - ], - [ - "▁Appliance", - -12.903327941894531 - ], - [ - "cearcă", - -12.903331756591797 - ], - [ - "▁lodging", - -12.903361320495605 - ], - [ - "▁photographed", - -12.903507232666016 - ], - [ - "geschlagen", - -12.903794288635254 - ], - [ - "▁Methodist", - -12.90380859375 - ], - [ - "▁Transit", - -12.90389347076416 - ], - [ - "▁Länder", - -12.903934478759766 - ], - [ - "villa", - -12.903986930847168 - ], - [ - "▁toilette", - -12.904031753540039 - ], - [ - "anno", - -12.904074668884277 - ], - [ - "▁Aufnahme", - -12.904091835021973 - ], - [ - "▁Coral", - -12.904099464416504 - ], - [ - "pourraient", - -12.904129981994629 - ], - [ - "▁digestion", - -12.904245376586914 - ], - [ - "▁Vacation", - -12.904274940490723 - ], - [ - "▁Rugby", - -12.904275894165039 - ], - [ - "MIC", - -12.904311180114746 - ], - [ - "▁choc", - -12.904417991638184 - ], - [ - "2002", - -12.904492378234863 - ], - [ - "gestion", - -12.904674530029297 - ], - [ - "▁Zoom", - -12.904745101928711 - ], - [ - "essor", - -12.904763221740723 - ], - [ - "weighed", - -12.904793739318848 - ], - [ - "▁dispus", - -12.904987335205078 - ], - [ - "▁redemption", - -12.90502643585205 - ], - [ - "▁plaster", - -12.905071258544922 - ], - [ - "▁Quilt", - -12.90507698059082 - ], - [ - "▁teritoriul", - -12.905088424682617 - ], - [ - "ndern", - -12.905097961425781 - ], - [ - "▁expired", - -12.905105590820312 - ], - [ - "▁Tribunal", - -12.905122756958008 - ], - [ - "occupation", - -12.9052152633667 - ], - [ - "▁woodland", - -12.905248641967773 - ], - [ - "vieux", - -12.905254364013672 - ], - [ - "▁Midland", - -12.905465126037598 - ], - [ - "gât", - -12.90571117401123 - ], - [ - "électricité", - -12.905800819396973 - ], - [ - "▁vanzare", - -12.905811309814453 - ], - [ - "biologi", - -12.905961036682129 - ], - [ - "▁vive", - -12.906060218811035 - ], - [ - "▁Alarm", - -12.906097412109375 - ], - [ - "▁experiență", - -12.9061279296875 - ], - [ - "▁Loch", - -12.906133651733398 - ], - [ - "▁Pedro", - -12.906194686889648 - ], - [ - "▁detergent", - -12.906217575073242 - ], - [ - "language", - -12.906554222106934 - ], - [ - "▁sedan", - -12.906655311584473 - ], - [ - "▁Brady", - -12.906736373901367 - ], - [ - "▁compus", - -12.906976699829102 - ], - [ - "▁landfill", - -12.906982421875 - ], - [ - "giu", - -12.907039642333984 - ], - [ - "beziehung", - -12.9070405960083 - ], - [ - "▁picior", - -12.907184600830078 - ], - [ - "ALI", - -12.907235145568848 - ], - [ - "▁Commander", - -12.907256126403809 - ], - [ - "EPS", - -12.907303810119629 - ], - [ - "▁Textil", - -12.907320022583008 - ], - [ - "▁industria", - -12.907339096069336 - ], - [ - "lox", - -12.907365798950195 - ], - [ - "▁eclectic", - -12.907453536987305 - ], - [ - "▁gracious", - -12.907477378845215 - ], - [ - "Uniunea", - -12.907525062561035 - ], - [ - "bps", - -12.90754222869873 - ], - [ - "▁entertained", - -12.907634735107422 - ], - [ - "depinde", - -12.907767295837402 - ], - [ - "▁daylight", - -12.907893180847168 - ], - [ - "▁résistance", - -12.907995223999023 - ], - [ - "ARN", - -12.908194541931152 - ], - [ - "▁unavailable", - -12.908201217651367 - ], - [ - "Curtea", - -12.908390045166016 - ], - [ - "▁pores", - -12.908502578735352 - ], - [ - "▁Tonight", - -12.908649444580078 - ], - [ - "▁datori", - -12.90869426727295 - ], - [ - "▁gezielt", - -12.908703804016113 - ], - [ - "▁rupture", - -12.90875244140625 - ], - [ - "▁disput", - -12.908848762512207 - ], - [ - "▁sonstige", - -12.908895492553711 - ], - [ - "▁Ordnung", - -12.90910816192627 - ], - [ - "▁beschrieben", - -12.909114837646484 - ], - [ - "▁Rainbow", - -12.90911865234375 - ], - [ - "▁Werkzeug", - -12.909136772155762 - ], - [ - "GIN", - -12.909354209899902 - ], - [ - "facilitating", - -12.909490585327148 - ], - [ - "hunt", - -12.90955638885498 - ], - [ - "▁Serving", - -12.909673690795898 - ], - [ - "Writ", - -12.909692764282227 - ], - [ - "requisite", - -12.909798622131348 - ], - [ - "▁Kerry", - -12.90989875793457 - ], - [ - "▁riesig", - -12.909957885742188 - ], - [ - "▁Healing", - -12.91030502319336 - ], - [ - "▁1954", - -12.910365104675293 - ], - [ - "▁mousse", - -12.910428047180176 - ], - [ - "▁Positive", - -12.910764694213867 - ], - [ - "embodie", - -12.910772323608398 - ], - [ - "▁penetrate", - -12.910774230957031 - ], - [ - "endorsed", - -12.910882949829102 - ], - [ - "▁situatia", - -12.910927772521973 - ], - [ - "▁Unity", - -12.911083221435547 - ], - [ - "142", - -12.911102294921875 - ], - [ - "▁farmhouse", - -12.911138534545898 - ], - [ - "▁Handbook", - -12.911368370056152 - ], - [ - "▁symbolic", - -12.911378860473633 - ], - [ - "pristine", - -12.911439895629883 - ], - [ - "moitié", - -12.911595344543457 - ], - [ - "▁Sessions", - -12.912017822265625 - ], - [ - "technisch", - -12.912116050720215 - ], - [ - "▁lesquel", - -12.912148475646973 - ], - [ - "▁electronically", - -12.912208557128906 - ], - [ - "▁modificat", - -12.912240982055664 - ], - [ - "▁adjoin", - -12.912242889404297 - ], - [ - "actualité", - -12.912256240844727 - ], - [ - "vati", - -12.91229248046875 - ], - [ - "VENT", - -12.912299156188965 - ], - [ - "▁salsa", - -12.912333488464355 - ], - [ - "acupunctur", - -12.912424087524414 - ], - [ - "▁Opportunity", - -12.912424087524414 - ], - [ - "▁Inspection", - -12.912425994873047 - ], - [ - "▁vereinbart", - -12.912425994873047 - ], - [ - "▁Residents", - -12.912426948547363 - ], - [ - "▁perennial", - -12.91242790222168 - ], - [ - "CHAN", - -12.912555694580078 - ], - [ - "Search", - -12.912572860717773 - ], - [ - "UTE", - -12.912696838378906 - ], - [ - "▁Lens", - -12.912703514099121 - ], - [ - "▁Banner", - -12.91281509399414 - ], - [ - "aménagement", - -12.912839889526367 - ], - [ - "▁Decision", - -12.91286849975586 - ], - [ - "▁ferr", - -12.912869453430176 - ], - [ - "▁Transformation", - -12.912878036499023 - ], - [ - "▁Stamm", - -12.912955284118652 - ], - [ - "▁Galerie", - -12.913003921508789 - ], - [ - "onny", - -12.913126945495605 - ], - [ - "▁caption", - -12.913195610046387 - ], - [ - "▁viitorul", - -12.91323471069336 - ], - [ - "▁professionelle", - -12.913281440734863 - ], - [ - "drepturile", - -12.913294792175293 - ], - [ - "ylon", - -12.913345336914062 - ], - [ - "Société", - -12.913387298583984 - ], - [ - "AIS", - -12.913456916809082 - ], - [ - "March", - -12.91350269317627 - ], - [ - "▁Rav", - -12.91357707977295 - ], - [ - "▁1946", - -12.913691520690918 - ], - [ - "accompagnement", - -12.913713455200195 - ], - [ - "Liviu", - -12.913716316223145 - ], - [ - "▁Appeal", - -12.913826942443848 - ], - [ - "▁sentir", - -12.913952827453613 - ], - [ - "▁Indigenous", - -12.914087295532227 - ], - [ - "▁wizard", - -12.914087295532227 - ], - [ - "▁collateral", - -12.914127349853516 - ], - [ - "▁Proof", - -12.914324760437012 - ], - [ - "▁prze", - -12.914398193359375 - ], - [ - "▁obținut", - -12.91450309753418 - ], - [ - "COP", - -12.914629936218262 - ], - [ - "▁obiect", - -12.914681434631348 - ], - [ - "▁isolate", - -12.914685249328613 - ], - [ - "▁nieder", - -12.914793014526367 - ], - [ - "TECH", - -12.914953231811523 - ], - [ - "▁Sharing", - -12.914998054504395 - ], - [ - "Ideally", - -12.915008544921875 - ], - [ - "▁naked", - -12.915059089660645 - ], - [ - "horaire", - -12.915130615234375 - ], - [ - "▁prelucrare", - -12.915180206298828 - ], - [ - "▁forcément", - -12.915349006652832 - ], - [ - "▁ESPN", - -12.915403366088867 - ], - [ - "▁southwest", - -12.9154634475708 - ], - [ - "▁Timber", - -12.915682792663574 - ], - [ - "kleidung", - -12.915748596191406 - ], - [ - "MJ", - -12.915854454040527 - ], - [ - "Ped", - -12.915889739990234 - ], - [ - "▁lymph", - -12.916181564331055 - ], - [ - "wärme", - -12.916399002075195 - ], - [ - "▁Olivia", - -12.916610717773438 - ], - [ - "Ziua", - -12.916705131530762 - ], - [ - "reihe", - -12.916747093200684 - ], - [ - "▁selfish", - -12.916752815246582 - ], - [ - "▁geography", - -12.916814804077148 - ], - [ - "▁etaj", - -12.916924476623535 - ], - [ - "▁acquis", - -12.91698932647705 - ], - [ - "▁rejoin", - -12.91701602935791 - ], - [ - "7.1", - -12.917097091674805 - ], - [ - "▁paix", - -12.91713809967041 - ], - [ - "tirer", - -12.917284965515137 - ], - [ - "▁clase", - -12.91745662689209 - ], - [ - "▁blink", - -12.917572021484375 - ], - [ - "▁Interface", - -12.917611122131348 - ], - [ - "nado", - -12.917655944824219 - ], - [ - "RIT", - -12.91777515411377 - ], - [ - "ESC", - -12.918120384216309 - ], - [ - "▁carving", - -12.918190002441406 - ], - [ - "▁articolul", - -12.918194770812988 - ], - [ - "▁wreath", - -12.918258666992188 - ], - [ - "▁propaganda", - -12.918266296386719 - ], - [ - "▁Pair", - -12.918267250061035 - ], - [ - "▁pamant", - -12.91831111907959 - ], - [ - "▁venituri", - -12.918357849121094 - ], - [ - "rtz", - -12.91835880279541 - ], - [ - "uddle", - -12.918529510498047 - ], - [ - "uille", - -12.918543815612793 - ], - [ - "▁embed", - -12.918654441833496 - ], - [ - "0.05", - -12.918655395507812 - ], - [ - "▁Brighton", - -12.918718338012695 - ], - [ - "estens", - -12.918742179870605 - ], - [ - "▁occupational", - -12.918862342834473 - ], - [ - "ем", - -12.918890953063965 - ], - [ - "wünsche", - -12.919081687927246 - ], - [ - "▁Poetry", - -12.91909408569336 - ], - [ - "▁visualize", - -12.919109344482422 - ], - [ - "Across", - -12.919121742248535 - ], - [ - "▁essentielle", - -12.919123649597168 - ], - [ - "beratung", - -12.919143676757812 - ], - [ - "▁Guidelines", - -12.91919231414795 - ], - [ - "▁Fehl", - -12.919198036193848 - ], - [ - "▁liberty", - -12.91921329498291 - ], - [ - "▁Investigation", - -12.91922378540039 - ], - [ - "▁sunrise", - -12.919266700744629 - ], - [ - "▁12:00", - -12.919541358947754 - ], - [ - "venind", - -12.919583320617676 - ], - [ - "▁lotion", - -12.919655799865723 - ], - [ - "conscious", - -12.91968822479248 - ], - [ - "logists", - -12.91973876953125 - ], - [ - "▁judecător", - -12.919893264770508 - ], - [ - "▁Ecuador", - -12.919928550720215 - ], - [ - "▁ambulance", - -12.91994857788086 - ], - [ - "▁Already", - -12.920026779174805 - ], - [ - "▁eröffnet", - -12.920090675354004 - ], - [ - "▁naval", - -12.92010498046875 - ], - [ - "▁imposibil", - -12.92011547088623 - ], - [ - "▁Merry", - -12.92011833190918 - ], - [ - "▁Duncan", - -12.920272827148438 - ], - [ - "▁léger", - -12.9203519821167 - ], - [ - "▁delta", - -12.920391082763672 - ], - [ - "▁Machinery", - -12.920578002929688 - ], - [ - "▁craftsmanship", - -12.920766830444336 - ], - [ - "▁angezeigt", - -12.9207763671875 - ], - [ - "▁formidable", - -12.9207763671875 - ], - [ - "▁Startup", - -12.920878410339355 - ], - [ - "venus", - -12.920969009399414 - ], - [ - "▁tannin", - -12.921019554138184 - ], - [ - "collaborating", - -12.921128273010254 - ], - [ - "▁abrupt", - -12.921152114868164 - ], - [ - "emergence", - -12.921171188354492 - ], - [ - "Dienstleistungen", - -12.921197891235352 - ], - [ - "▁liefert", - -12.921217918395996 - ], - [ - "engagement", - -12.921222686767578 - ], - [ - "▁maximise", - -12.921304702758789 - ], - [ - "modeled", - -12.9214448928833 - ], - [ - "▁crane", - -12.92148208618164 - ], - [ - "▁effortless", - -12.921540260314941 - ], - [ - "▁Buffet", - -12.92160701751709 - ], - [ - "8000", - -12.921648979187012 - ], - [ - "▁Überblick", - -12.921687126159668 - ], - [ - "micro", - -12.921981811523438 - ], - [ - "▁vergleichen", - -12.92204475402832 - ], - [ - "143", - -12.922080993652344 - ], - [ - "5.6", - -12.922094345092773 - ], - [ - "▁odata", - -12.922131538391113 - ], - [ - "▁interviu", - -12.922162055969238 - ], - [ - "▁poliţi", - -12.922375679016113 - ], - [ - "plated", - -12.922383308410645 - ], - [ - "Roman", - -12.922406196594238 - ], - [ - "▁satisfactory", - -12.922453880310059 - ], - [ - "▁unanimous", - -12.922459602355957 - ], - [ - "▁întâln", - -12.922464370727539 - ], - [ - "nonsense", - -12.922558784484863 - ], - [ - "▁HOW", - -12.922616004943848 - ], - [ - "prezinta", - -12.922639846801758 - ], - [ - "▁măsura", - -12.9226655960083 - ], - [ - "▁Fuji", - -12.92275619506836 - ], - [ - "▁Meaning", - -12.92278003692627 - ], - [ - "aspiring", - -12.922850608825684 - ], - [ - "▁Suceava", - -12.922863006591797 - ], - [ - "arba", - -12.922983169555664 - ], - [ - "pressive", - -12.922988891601562 - ], - [ - "▁creek", - -12.92301082611084 - ], - [ - "trakt", - -12.923023223876953 - ], - [ - "▁fluffy", - -12.923303604125977 - ], - [ - "▁bateau", - -12.923371315002441 - ], - [ - "ме", - -12.923545837402344 - ], - [ - "UNG", - -12.923609733581543 - ], - [ - "motifs", - -12.923907279968262 - ], - [ - "Type", - -12.923958778381348 - ], - [ - "perçu", - -12.924132347106934 - ], - [ - "singurul", - -12.924139022827148 - ], - [ - "▁(2011)", - -12.92418384552002 - ], - [ - "▁hemp", - -12.924263954162598 - ], - [ - "betroffenen", - -12.92431640625 - ], - [ - "▁sermon", - -12.924369812011719 - ], - [ - "AID", - -12.924545288085938 - ], - [ - "3.7", - -12.924627304077148 - ], - [ - "▁heiß", - -12.92463207244873 - ], - [ - "▁bolnav", - -12.924982070922852 - ], - [ - "First", - -12.924995422363281 - ], - [ - "▁interrupt", - -12.925040245056152 - ], - [ - "phag", - -12.925106048583984 - ], - [ - "235", - -12.925201416015625 - ], - [ - "▁discoveries", - -12.925262451171875 - ], - [ - "▁Wellington", - -12.925263404846191 - ], - [ - "▁wechseln", - -12.925298690795898 - ], - [ - "▁strategically", - -12.925379753112793 - ], - [ - "▁iphone", - -12.925440788269043 - ], - [ - "geteilt", - -12.925646781921387 - ], - [ - "generative", - -12.925748825073242 - ], - [ - "▁Monroe", - -12.925806045532227 - ], - [ - "▁Execut", - -12.925863265991211 - ], - [ - "▁knitting", - -12.925931930541992 - ], - [ - "▁Couple", - -12.925939559936523 - ], - [ - "▁Shade", - -12.926020622253418 - ], - [ - "▁Taj", - -12.926060676574707 - ], - [ - "950", - -12.926077842712402 - ], - [ - "boiled", - -12.92609977722168 - ], - [ - "▁mixes", - -12.926130294799805 - ], - [ - "betroffene", - -12.926156044006348 - ], - [ - "▁continuation", - -12.926169395446777 - ], - [ - "▁begleitet", - -12.926226615905762 - ], - [ - "▁numerical", - -12.926281929016113 - ], - [ - "▁(2013)", - -12.92630386352539 - ], - [ - "▁nourish", - -12.926399230957031 - ], - [ - "oricar", - -12.926485061645508 - ], - [ - "focus", - -12.926486015319824 - ], - [ - "▁Crazy", - -12.926651000976562 - ], - [ - "▁ascend", - -12.926671028137207 - ], - [ - "▁vinde", - -12.926855087280273 - ], - [ - "roar", - -12.926874160766602 - ], - [ - "Vac", - -12.926929473876953 - ], - [ - "▁Zuschauer", - -12.927068710327148 - ], - [ - "izeze", - -12.927179336547852 - ], - [ - "▁Mindest", - -12.92721939086914 - ], - [ - "lingual", - -12.927229881286621 - ], - [ - "▁violet", - -12.927264213562012 - ], - [ - "▁Opfer", - -12.927299499511719 - ], - [ - "ARS", - -12.927431106567383 - ], - [ - "4.7", - -12.92744255065918 - ], - [ - "millennial", - -12.927492141723633 - ], - [ - "▁striv", - -12.927639961242676 - ], - [ - "▁bishop", - -12.927680015563965 - ], - [ - "▁Durham", - -12.927708625793457 - ], - [ - "opathic", - -12.927817344665527 - ], - [ - "Where", - -12.927999496459961 - ], - [ - "▁Rider", - -12.928030014038086 - ], - [ - "▁Reid", - -12.928030967712402 - ], - [ - "stumbled", - -12.928156852722168 - ], - [ - "deep", - -12.92827320098877 - ], - [ - "▁11:00", - -12.928340911865234 - ], - [ - "▁Essex", - -12.928380966186523 - ], - [ - "▁Analyst", - -12.928397178649902 - ], - [ - "feel", - -12.928546905517578 - ], - [ - "▁rave", - -12.928601264953613 - ], - [ - "▁Eddie", - -12.928631782531738 - ], - [ - "▁communiqué", - -12.928756713867188 - ], - [ - "[/", - -12.928791046142578 - ], - [ - "▁Tho", - -12.929011344909668 - ], - [ - "ffentlichkeit", - -12.929019927978516 - ], - [ - "instrument", - -12.929126739501953 - ], - [ - "▁metropolitan", - -12.929179191589355 - ], - [ - "▁experienţ", - -12.929181098937988 - ], - [ - "East", - -12.929198265075684 - ], - [ - "Compared", - -12.929434776306152 - ], - [ - "worn", - -12.929484367370605 - ], - [ - "berufliche", - -12.92966365814209 - ], - [ - "▁Umstände", - -12.929710388183594 - ], - [ - "individuellen", - -12.929901123046875 - ], - [ - "siehe", - -12.929912567138672 - ], - [ - "▁sfarsit", - -12.929969787597656 - ], - [ - "▁Strength", - -12.929999351501465 - ], - [ - "▁prejudice", - -12.930024147033691 - ], - [ - "▁shutdown", - -12.930159568786621 - ], - [ - "chatting", - -12.93022346496582 - ], - [ - "▁Gerne", - -12.930227279663086 - ], - [ - "▁Yum", - -12.930305480957031 - ], - [ - "▁coastline", - -12.930387496948242 - ], - [ - "▁headboard", - -12.930623054504395 - ], - [ - "▁politische", - -12.930768966674805 - ], - [ - "Sub", - -12.930838584899902 - ], - [ - "▁Henderson", - -12.930870056152344 - ], - [ - "▁astonishing", - -12.930870056152344 - ], - [ - "▁Dresden", - -12.930871963500977 - ], - [ - "▁strawberry", - -12.93088436126709 - ], - [ - "prenez", - -12.930889129638672 - ], - [ - "▁Monaco", - -12.930912971496582 - ], - [ - "▁empowered", - -12.930953025817871 - ], - [ - "fäl", - -12.93109130859375 - ], - [ - "▁creier", - -12.931120872497559 - ], - [ - "▁Equ", - -12.931300163269043 - ], - [ - "▁Selling", - -12.931379318237305 - ], - [ - "▁$35", - -12.931483268737793 - ], - [ - "konto", - -12.931503295898438 - ], - [ - "▁Procedure", - -12.931715965270996 - ], - [ - "▁reduziert", - -12.931715965270996 - ], - [ - "▁royalty", - -12.931740760803223 - ], - [ - "wyn", - -12.931756019592285 - ], - [ - "▁Unfall", - -12.932141304016113 - ], - [ - "NAT", - -12.932161331176758 - ], - [ - "▁grafic", - -12.93251895904541 - ], - [ - "▁Collective", - -12.932563781738281 - ], - [ - "▁Computing", - -12.932564735412598 - ], - [ - "▁Established", - -12.932594299316406 - ], - [ - "▁zest", - -12.932598114013672 - ], - [ - "venez", - -12.932611465454102 - ], - [ - "follow", - -12.9326171875 - ], - [ - "▁Motivation", - -12.932640075683594 - ], - [ - "▁dictator", - -12.932755470275879 - ], - [ - "whichever", - -12.93281078338623 - ], - [ - "▁întâmpl", - -12.93293285369873 - ], - [ - "Flüchtling", - -12.932987213134766 - ], - [ - "EMI", - -12.933015823364258 - ], - [ - "404", - -12.933019638061523 - ], - [ - "ICK", - -12.93302059173584 - ], - [ - "emplacement", - -12.933191299438477 - ], - [ - "complete", - -12.933349609375 - ], - [ - "advising", - -12.933412551879883 - ], - [ - "▁Administrative", - -12.933481216430664 - ], - [ - "▁deviation", - -12.933496475219727 - ], - [ - "▁experienț", - -12.933500289916992 - ], - [ - "lethor", - -12.933996200561523 - ], - [ - "▁compress", - -12.934081077575684 - ], - [ - "rival", - -12.934173583984375 - ], - [ - "reprendre", - -12.934186935424805 - ], - [ - "ugi", - -12.934266090393066 - ], - [ - "▁Invitation", - -12.934267044067383 - ], - [ - "▁retina", - -12.934332847595215 - ], - [ - "▁farther", - -12.934335708618164 - ], - [ - "▁fenêtre", - -12.934799194335938 - ], - [ - "6-7", - -12.934815406799316 - ], - [ - "zhou", - -12.934834480285645 - ], - [ - "▁Piano", - -12.934840202331543 - ], - [ - "▁Congrats", - -12.935114860534668 - ], - [ - "▁Configur", - -12.935131072998047 - ], - [ - "▁superficial", - -12.935179710388184 - ], - [ - "▁melting", - -12.935315132141113 - ], - [ - "▁raspunde", - -12.935626983642578 - ], - [ - "▁drip", - -12.93564224243164 - ], - [ - "östlich", - -12.9358491897583 - ], - [ - "189", - -12.935925483703613 - ], - [ - "▁Ludwig", - -12.935959815979004 - ], - [ - "▁keto", - -12.935985565185547 - ], - [ - "▁Bogdan", - -12.936013221740723 - ], - [ - "▁contracted", - -12.936029434204102 - ], - [ - "▁revive", - -12.936100006103516 - ], - [ - "▁cristal", - -12.936232566833496 - ], - [ - "▁mailbox", - -12.936257362365723 - ], - [ - "președintele", - -12.936559677124023 - ], - [ - "▁seekers", - -12.936627388000488 - ], - [ - "func", - -12.936904907226562 - ], - [ - "▁Markus", - -12.93691349029541 - ], - [ - "Unter", - -12.936923027038574 - ], - [ - "▁übertragen", - -12.937003135681152 - ], - [ - "▁adaptive", - -12.937024116516113 - ], - [ - "caster", - -12.937051773071289 - ], - [ - "▁geek", - -12.937164306640625 - ], - [ - "▁réservation", - -12.937236785888672 - ], - [ - "▁irritation", - -12.937240600585938 - ], - [ - "▁HDMI", - -12.937346458435059 - ], - [ - "Seeing", - -12.937485694885254 - ], - [ - "▁genul", - -12.937569618225098 - ], - [ - "▁catastrophe", - -12.937662124633789 - ], - [ - "▁Tweet", - -12.937665939331055 - ], - [ - "TZ", - -12.937729835510254 - ], - [ - "▁credible", - -12.937946319580078 - ], - [ - "▁cobor", - -12.938064575195312 - ], - [ - "▁realizeaz", - -12.938159942626953 - ], - [ - "journal", - -12.938274383544922 - ], - [ - "▁shaking", - -12.938532829284668 - ], - [ - "3-6", - -12.938572883605957 - ], - [ - "▁beneficiaz", - -12.938605308532715 - ], - [ - "▁Frankreich", - -12.938633918762207 - ], - [ - "committing", - -12.9386568069458 - ], - [ - "AMS", - -12.938835144042969 - ], - [ - "▁Feli", - -12.939007759094238 - ], - [ - "▁Producer", - -12.939023971557617 - ], - [ - "▁übrig", - -12.93940544128418 - ], - [ - "gemeinde", - -12.939593315124512 - ], - [ - "should", - -12.939799308776855 - ], - [ - "▁neurons", - -12.939799308776855 - ], - [ - "▁Agenda", - -12.939833641052246 - ], - [ - "▁hashtag", - -12.939896583557129 - ], - [ - "▁confortabil", - -12.939897537231445 - ], - [ - "520", - -12.940008163452148 - ], - [ - "bonded", - -12.940033912658691 - ], - [ - "▁următoare", - -12.940191268920898 - ], - [ - "▁volatile", - -12.940223693847656 - ], - [ - "infamous", - -12.940225601196289 - ], - [ - "seară", - -12.940229415893555 - ], - [ - "▁Sorge", - -12.940346717834473 - ], - [ - "▁Beiträge", - -12.940420150756836 - ], - [ - "▁îndeplin", - -12.940449714660645 - ], - [ - "gespräch", - -12.940649032592773 - ], - [ - "▁joueur", - -12.940701484680176 - ], - [ - "▁outsourcing", - -12.940701484680176 - ], - [ - "▁Guvernul", - -12.940814018249512 - ], - [ - "6-2", - -12.940818786621094 - ], - [ - "▁prioritize", - -12.941068649291992 - ], - [ - "▁duminică", - -12.941076278686523 - ], - [ - "▁resignation", - -12.941076278686523 - ], - [ - "▁Converter", - -12.941079139709473 - ], - [ - "hereby", - -12.941155433654785 - ], - [ - "▁stresses", - -12.941299438476562 - ], - [ - "▁brun", - -12.941415786743164 - ], - [ - "▁elev", - -12.941423416137695 - ], - [ - "▁Skip", - -12.941479682922363 - ], - [ - "540", - -12.941499710083008 - ], - [ - "TURE", - -12.941603660583496 - ], - [ - "▁Lynch", - -12.941635131835938 - ], - [ - "▁preveni", - -12.941643714904785 - ], - [ - "compatible", - -12.941692352294922 - ], - [ - "surveyed", - -12.941702842712402 - ], - [ - "▁Ausnahme", - -12.941713333129883 - ], - [ - "▁medicul", - -12.941812515258789 - ], - [ - "▁subtil", - -12.941865921020508 - ], - [ - "▁Quali", - -12.941890716552734 - ], - [ - "▁techno", - -12.941900253295898 - ], - [ - "presently", - -12.94193172454834 - ], - [ - "▁Müller", - -12.941934585571289 - ], - [ - "DIRECT", - -12.941937446594238 - ], - [ - "schuld", - -12.941944122314453 - ], - [ - "▁Bloomberg", - -12.941994667053223 - ], - [ - "feuer", - -12.942181587219238 - ], - [ - "▁Pharmacy", - -12.942270278930664 - ], - [ - "▁Schnitt", - -12.942301750183105 - ], - [ - "186", - -12.942333221435547 - ], - [ - "peaks", - -12.942355155944824 - ], - [ - "▁Gemeinsam", - -12.94235897064209 - ], - [ - "▁récemment", - -12.94235897064209 - ], - [ - "▁Pascal", - -12.942490577697754 - ], - [ - "filmed", - -12.942523956298828 - ], - [ - "RCA", - -12.942548751831055 - ], - [ - "▁virtuelle", - -12.942622184753418 - ], - [ - "▁dotat", - -12.942630767822266 - ], - [ - "logisch", - -12.942717552185059 - ], - [ - "▁Luck", - -12.943005561828613 - ], - [ - "cosy", - -12.943132400512695 - ], - [ - "▁Awareness", - -12.943216323852539 - ], - [ - "▁gesetzlich", - -12.943263053894043 - ], - [ - "padded", - -12.943306922912598 - ], - [ - "▁Lotus", - -12.943395614624023 - ], - [ - "urging", - -12.9434175491333 - ], - [ - "▁mushroom", - -12.943426132202148 - ], - [ - "▁adultes", - -12.943527221679688 - ], - [ - "▁Coca", - -12.943571090698242 - ], - [ - "▁recev", - -12.943586349487305 - ], - [ - "▁mantra", - -12.943610191345215 - ], - [ - "▁practise", - -12.943644523620605 - ], - [ - "▁acceler", - -12.943663597106934 - ], - [ - "bolster", - -12.943756103515625 - ], - [ - "▁compressed", - -12.943818092346191 - ], - [ - "TIN", - -12.943899154663086 - ], - [ - "▁aromatic", - -12.944236755371094 - ], - [ - "geleitet", - -12.944408416748047 - ], - [ - "▁fibr", - -12.944443702697754 - ], - [ - "exécut", - -12.94444751739502 - ], - [ - "▁unconscious", - -12.94456958770752 - ], - [ - "HAR", - -12.944607734680176 - ], - [ - "▁Gregory", - -12.944661140441895 - ], - [ - "▁Manila", - -12.944738388061523 - ], - [ - "ozitate", - -12.944756507873535 - ], - [ - "exemplary", - -12.944803237915039 - ], - [ - "éventuel", - -12.944906234741211 - ], - [ - "▁Craciun", - -12.944930076599121 - ], - [ - "▁tehnologii", - -12.944931030273438 - ], - [ - "▁Despre", - -12.945138931274414 - ], - [ - "▁1917", - -12.945141792297363 - ], - [ - "▁upfront", - -12.945146560668945 - ], - [ - "▁Iulia", - -12.945280075073242 - ], - [ - "▁erwähnt", - -12.945359230041504 - ], - [ - "▁magnesium", - -12.945359230041504 - ], - [ - "▁descriptive", - -12.94536304473877 - ], - [ - "▁consumul", - -12.945364952087402 - ], - [ - "▁10-15", - -12.945423126220703 - ], - [ - "▁erfüllen", - -12.945611953735352 - ], - [ - "gig", - -12.945657730102539 - ], - [ - "430", - -12.945765495300293 - ], - [ - "▁Migration", - -12.945789337158203 - ], - [ - "bră", - -12.94579029083252 - ], - [ - "▁réforme", - -12.945863723754883 - ], - [ - "▁york", - -12.94610595703125 - ], - [ - "dritten", - -12.946109771728516 - ], - [ - "cumva", - -12.946182250976562 - ], - [ - "▁Alumni", - -12.946218490600586 - ], - [ - "▁Ceramic", - -12.946222305297852 - ], - [ - "▁rappelle", - -12.946236610412598 - ], - [ - "▁pianist", - -12.946248054504395 - ], - [ - "twisted", - -12.946306228637695 - ], - [ - "earned", - -12.946432113647461 - ], - [ - "▁Hose", - -12.946514129638672 - ], - [ - "156", - -12.946610450744629 - ], - [ - "▁Salmon", - -12.946687698364258 - ], - [ - "Level", - -12.946913719177246 - ], - [ - "▁swirl", - -12.947052001953125 - ], - [ - "erfahrung", - -12.947061538696289 - ], - [ - "▁liabilities", - -12.947078704833984 - ], - [ - "praxis", - -12.9470853805542 - ], - [ - "IPO", - -12.947089195251465 - ], - [ - "▁screaming", - -12.947092056274414 - ], - [ - "emphasized", - -12.947200775146484 - ], - [ - "DEA", - -12.947260856628418 - ], - [ - "▁dermatolog", - -12.947351455688477 - ], - [ - "▁pacate", - -12.947498321533203 - ], - [ - "▁ansamblu", - -12.947507858276367 - ], - [ - "▁beteiligt", - -12.947509765625 - ], - [ - "▁Needles", - -12.947574615478516 - ], - [ - "▁organisiert", - -12.947607040405273 - ], - [ - "Pacific", - -12.947639465332031 - ], - [ - "actual", - -12.947823524475098 - ], - [ - "prindere", - -12.94801139831543 - ], - [ - "▁Indoor", - -12.948348045349121 - ], - [ - "▁Gewalt", - -12.948431015014648 - ], - [ - "▁rezid", - -12.948507308959961 - ], - [ - "censor", - -12.948522567749023 - ], - [ - "▁unlawful", - -12.94882869720459 - ], - [ - "▁Explain", - -12.948873519897461 - ], - [ - "▁Flame", - -12.948897361755371 - ], - [ - "▁brachte", - -12.948941230773926 - ], - [ - "▁Mustang", - -12.94899845123291 - ], - [ - "ectomy", - -12.949044227600098 - ], - [ - "▁deliberate", - -12.949064254760742 - ], - [ - "▁sparkle", - -12.949225425720215 - ], - [ - "▁inchis", - -12.94926929473877 - ], - [ - "▁Cristian", - -12.949289321899414 - ], - [ - "▁facture", - -12.949291229248047 - ], - [ - "▁Grundstück", - -12.949292182922363 - ], - [ - "außerhalb", - -12.949300765991211 - ], - [ - "coast", - -12.949321746826172 - ], - [ - "anilor", - -12.949396133422852 - ], - [ - "255", - -12.94952392578125 - ], - [ - "nterdisciplinary", - -12.949576377868652 - ], - [ - "▁Isabel", - -12.949655532836914 - ], - [ - "▁Städte", - -12.949701309204102 - ], - [ - "▁cicl", - -12.949837684631348 - ], - [ - "▁Zeug", - -12.949905395507812 - ], - [ - "▁Muskel", - -12.949951171875 - ], - [ - "▁indirectly", - -12.950051307678223 - ], - [ - "▁Vorbereitung", - -12.950093269348145 - ], - [ - "MMA", - -12.95012378692627 - ], - [ - "▁pudding", - -12.950197219848633 - ], - [ - "rax", - -12.950389862060547 - ], - [ - "▁Stimmung", - -12.95052433013916 - ], - [ - "▁hierarchy", - -12.95052433013916 - ], - [ - "partie", - -12.950597763061523 - ], - [ - "▁elevate", - -12.950685501098633 - ], - [ - "▁Persian", - -12.950690269470215 - ], - [ - "forensic", - -12.95077896118164 - ], - [ - "Become", - -12.950854301452637 - ], - [ - "leicht", - -12.9508695602417 - ], - [ - "▁staging", - -12.950942039489746 - ], - [ - "▁fühlt", - -12.950965881347656 - ], - [ - "fenster", - -12.950979232788086 - ], - [ - "▁unbelievable", - -12.951089859008789 - ], - [ - "„", - -12.951260566711426 - ], - [ - "▁Guatemala", - -12.951387405395508 - ], - [ - "LET", - -12.95141315460205 - ], - [ - "▁buff", - -12.951454162597656 - ], - [ - "▁Primul", - -12.951626777648926 - ], - [ - "▁mainland", - -12.951702117919922 - ], - [ - "campus", - -12.951923370361328 - ], - [ - "▁gefällt", - -12.952075958251953 - ], - [ - "BAN", - -12.952153205871582 - ], - [ - "finish", - -12.952229499816895 - ], - [ - "accustomed", - -12.952251434326172 - ], - [ - "▁Businesses", - -12.95234203338623 - ], - [ - "▁întreb", - -12.95239543914795 - ], - [ - "▁recomandă", - -12.952425956726074 - ], - [ - "▁pellet", - -12.952474594116211 - ], - [ - "▁GST", - -12.952507972717285 - ], - [ - "SEA", - -12.952601432800293 - ], - [ - "▁categorie", - -12.952631950378418 - ], - [ - "▁convainc", - -12.95268440246582 - ], - [ - "▁considéré", - -12.952739715576172 - ], - [ - "rois", - -12.952853202819824 - ], - [ - "▁thrust", - -12.952898979187012 - ], - [ - "ijk", - -12.953001022338867 - ], - [ - "gefüllt", - -12.953118324279785 - ], - [ - "▁situatii", - -12.953327178955078 - ], - [ - "▁Jacksonville", - -12.95337200164795 - ], - [ - "▁bakery", - -12.953473091125488 - ], - [ - "▁Accident", - -12.953554153442383 - ], - [ - "▁urmeaza", - -12.953572273254395 - ], - [ - "▁crib", - -12.953593254089355 - ], - [ - "getroffen", - -12.953707695007324 - ], - [ - "Based", - -12.953877449035645 - ], - [ - "Including", - -12.95398235321045 - ], - [ - "▁Morocco", - -12.95398235321045 - ], - [ - "▁casserole", - -12.95398235321045 - ], - [ - "▁enquiry", - -12.953983306884766 - ], - [ - "▁pahar", - -12.954017639160156 - ], - [ - "▁Unternehmer", - -12.954025268554688 - ], - [ - "électro", - -12.954068183898926 - ], - [ - "Marie", - -12.95413589477539 - ], - [ - "▁Sno", - -12.954153060913086 - ], - [ - "▁prostate", - -12.954168319702148 - ], - [ - "▁Wallace", - -12.95426082611084 - ], - [ - "empre", - -12.954402923583984 - ], - [ - "▁Multumesc", - -12.954415321350098 - ], - [ - "White", - -12.954675674438477 - ], - [ - "brief", - -12.954751014709473 - ], - [ - "▁kitten", - -12.954751014709473 - ], - [ - "füh", - -12.954780578613281 - ], - [ - "▁mankind", - -12.954821586608887 - ], - [ - "ENE", - -12.95483112335205 - ], - [ - "▁Ethics", - -12.954848289489746 - ], - [ - "▁Realty", - -12.954946517944336 - ], - [ - "▁Emerg", - -12.954988479614258 - ], - [ - "7-8", - -12.955055236816406 - ], - [ - "museum", - -12.955096244812012 - ], - [ - "BRE", - -12.95518970489502 - ], - [ - "▁kilometri", - -12.955282211303711 - ], - [ - "oyaume", - -12.955286026000977 - ], - [ - "▁Cambodia", - -12.955288887023926 - ], - [ - "▁bruit", - -12.955304145812988 - ], - [ - "▁sépar", - -12.955334663391113 - ], - [ - "mastered", - -12.9554443359375 - ], - [ - "shake", - -12.955608367919922 - ], - [ - "▁liaison", - -12.955718994140625 - ], - [ - "▁Boulder", - -12.955719947814941 - ], - [ - "▁tortilla", - -12.955720901489258 - ], - [ - "▁Fokus", - -12.955731391906738 - ], - [ - "▁Blair", - -12.95573902130127 - ], - [ - "▁disturbance", - -12.955775260925293 - ], - [ - "geladen", - -12.955843925476074 - ], - [ - "▁sunscreen", - -12.955886840820312 - ], - [ - "▁reuș", - -12.955896377563477 - ], - [ - "▁Braun", - -12.956155776977539 - ], - [ - "▁existente", - -12.956157684326172 - ], - [ - "stift", - -12.956242561340332 - ], - [ - "▁preot", - -12.956387519836426 - ], - [ - "▁doved", - -12.956445693969727 - ], - [ - "sexual", - -12.956488609313965 - ], - [ - "meanwhile", - -12.956583976745605 - ], - [ - "▁legislature", - -12.956583976745605 - ], - [ - "▁vermeiden", - -12.956583976745605 - ], - [ - "▁inequality", - -12.95687484741211 - ], - [ - "▁turc", - -12.956881523132324 - ], - [ - "ви", - -12.95698070526123 - ], - [ - "▁Kontrolle", - -12.95702075958252 - ], - [ - "▁Ursache", - -12.95704174041748 - ], - [ - "▁confess", - -12.95704174041748 - ], - [ - "▁poetic", - -12.957109451293945 - ], - [ - "attention", - -12.957236289978027 - ], - [ - "textured", - -12.957386016845703 - ], - [ - "GES", - -12.957586288452148 - ], - [ - "6-4", - -12.957637786865234 - ], - [ - "Ray", - -12.957696914672852 - ], - [ - "chromat", - -12.957745552062988 - ], - [ - "▁insightful", - -12.957775115966797 - ], - [ - "▁Navigation", - -12.957887649536133 - ], - [ - "▁destiny", - -12.957887649536133 - ], - [ - "▁ergeben", - -12.957892417907715 - ], - [ - "▁versteh", - -12.958090782165527 - ], - [ - "301", - -12.958209037780762 - ], - [ - "▁Exterior", - -12.958321571350098 - ], - [ - "église", - -12.958322525024414 - ], - [ - "▁Failure", - -12.958322525024414 - ], - [ - "▁Patricia", - -12.958324432373047 - ], - [ - "▁geschützt", - -12.958328247070312 - ], - [ - "intrarea", - -12.95833969116211 - ], - [ - "▁Forward", - -12.958368301391602 - ], - [ - "▁Portrait", - -12.95844841003418 - ], - [ - "▁enregistré", - -12.958480834960938 - ], - [ - "▁wagon", - -12.958620071411133 - ], - [ - "stealing", - -12.958879470825195 - ], - [ - "▁Numero", - -12.958880424499512 - ], - [ - "▁tradui", - -12.958986282348633 - ], - [ - "▁klassische", - -12.959033966064453 - ], - [ - "▁profitieren", - -12.959043502807617 - ], - [ - "▁laboratories", - -12.95919132232666 - ], - [ - "▁reconnaissance", - -12.95919132232666 - ], - [ - "ку", - -12.959314346313477 - ], - [ - "▁Petersburg", - -12.959359169006348 - ], - [ - "▁fertility", - -12.959421157836914 - ], - [ - "▁Understand", - -12.959516525268555 - ], - [ - "dehors", - -12.959746360778809 - ], - [ - "▁Knox", - -12.959762573242188 - ], - [ - "software", - -12.959797859191895 - ], - [ - "▁Celebration", - -12.959823608398438 - ], - [ - "4.6", - -12.959897994995117 - ], - [ - "quino", - -12.959930419921875 - ], - [ - "▁endeavour", - -12.960073471069336 - ], - [ - "▁temptation", - -12.960136413574219 - ], - [ - "▁Registry", - -12.96035385131836 - ], - [ - "IMP", - -12.960502624511719 - ], - [ - "bedingt", - -12.960625648498535 - ], - [ - "▁$60", - -12.960846900939941 - ], - [ - "▁Kriterien", - -12.96093463897705 - ], - [ - "▁strawberries", - -12.960943222045898 - ], - [ - "▁conspiracy", - -12.96094799041748 - ], - [ - "▁pouch", - -12.960976600646973 - ], - [ - "▁Alexandria", - -12.961017608642578 - ], - [ - "▁Mick", - -12.961102485656738 - ], - [ - "extra", - -12.961114883422852 - ], - [ - "▁Operator", - -12.961151123046875 - ], - [ - "enduring", - -12.96132755279541 - ], - [ - "▁smash", - -12.961359024047852 - ], - [ - "Euro", - -12.961360931396484 - ], - [ - "▁Nouvelle", - -12.961370468139648 - ], - [ - "▁Raspberry", - -12.961370468139648 - ], - [ - "▁präsentieren", - -12.961380004882812 - ], - [ - "▁electrician", - -12.961404800415039 - ], - [ - "▁cheerful", - -12.961472511291504 - ], - [ - "▁chargé", - -12.961508750915527 - ], - [ - "▁Diskussion", - -12.961511611938477 - ], - [ - "▁surpass", - -12.961604118347168 - ], - [ - "▁Acces", - -12.961701393127441 - ], - [ - "tausend", - -12.961771011352539 - ], - [ - "▁vigorous", - -12.961808204650879 - ], - [ - "▁tava", - -12.961810111999512 - ], - [ - "CHO", - -12.96193790435791 - ], - [ - "▁1951", - -12.961941719055176 - ], - [ - "▁Umsatz", - -12.962019920349121 - ], - [ - "▁slavery", - -12.962055206298828 - ], - [ - "travel", - -12.962294578552246 - ], - [ - "▁correspondent", - -12.962297439575195 - ], - [ - "▁$150", - -12.962307929992676 - ], - [ - "▁stärker", - -12.962594985961914 - ], - [ - "Alb", - -12.96264362335205 - ], - [ - "▁Lopez", - -12.962682723999023 - ], - [ - "▁longueur", - -12.962767601013184 - ], - [ - "▁successive", - -12.962772369384766 - ], - [ - "▁(2015)", - -12.96278190612793 - ], - [ - "teig", - -12.962790489196777 - ], - [ - "custom", - -12.962944984436035 - ], - [ - "TIM", - -12.963099479675293 - ], - [ - "▁Escape", - -12.963174819946289 - ], - [ - "▁Sekunden", - -12.963349342346191 - ], - [ - "tiré", - -12.963444709777832 - ], - [ - "▁chantier", - -12.963489532470703 - ], - [ - "▁saturated", - -12.963555335998535 - ], - [ - "▁confrontation", - -12.963804244995117 - ], - [ - "▁biography", - -12.963805198669434 - ], - [ - "zuerst", - -12.9639892578125 - ], - [ - "▁rencontré", - -12.963991165161133 - ], - [ - "▁harmless", - -12.96412181854248 - ], - [ - "Branche", - -12.964139938354492 - ], - [ - "▁QR", - -12.964380264282227 - ], - [ - "▁Ereignis", - -12.964430809020996 - ], - [ - "▁verkaufen", - -12.96444320678711 - ], - [ - "0:00", - -12.96451187133789 - ], - [ - "Association", - -12.96469783782959 - ], - [ - "▁Santiago", - -12.964865684509277 - ], - [ - "Control", - -12.964993476867676 - ], - [ - "▁Angriff", - -12.9650297164917 - ], - [ - "lase", - -12.96505069732666 - ], - [ - "▁sfaturi", - -12.965224266052246 - ], - [ - "▁Comprehensive", - -12.965304374694824 - ], - [ - "▁Shepherd", - -12.965304374694824 - ], - [ - "▁exponential", - -12.965304374694824 - ], - [ - "▁penetration", - -12.965304374694824 - ], - [ - "▁comble", - -12.965394973754883 - ], - [ - "ionar", - -12.965557098388672 - ], - [ - "slept", - -12.965563774108887 - ], - [ - "▁Spice", - -12.965633392333984 - ], - [ - "mAh", - -12.965688705444336 - ], - [ - "▁Vertreter", - -12.965747833251953 - ], - [ - "fehler", - -12.965752601623535 - ], - [ - "▁Scroll", - -12.96599292755127 - ], - [ - "▁WARRANT", - -12.966179847717285 - ], - [ - "▁minimise", - -12.966326713562012 - ], - [ - "▁Dept", - -12.966474533081055 - ], - [ - "▁urinar", - -12.96661376953125 - ], - [ - "établir", - -12.966619491577148 - ], - [ - "verhältnis", - -12.966713905334473 - ], - [ - "▁glowing", - -12.966979026794434 - ], - [ - "kulturelle", - -12.966984748840332 - ], - [ - "▁Pediatric", - -12.967057228088379 - ], - [ - "▁inconvenience", - -12.967057228088379 - ], - [ - "Antoine", - -12.967121124267578 - ], - [ - "▁Heck", - -12.967164993286133 - ], - [ - "▁couches", - -12.967265129089355 - ], - [ - "▁1938", - -12.967331886291504 - ], - [ - "maybe", - -12.967333793640137 - ], - [ - "ETA", - -12.9673433303833 - ], - [ - "▁solaire", - -12.96748161315918 - ], - [ - "▁Zürich", - -12.967495918273926 - ], - [ - "computer", - -12.967545509338379 - ], - [ - "milk", - -12.96756362915039 - ], - [ - "он", - -12.967585563659668 - ], - [ - "modalitate", - -12.967608451843262 - ], - [ - "spanning", - -12.967655181884766 - ], - [ - "▁Crypto", - -12.96774959564209 - ], - [ - "▁Spotify", - -12.967935562133789 - ], - [ - "mycin", - -12.967944145202637 - ], - [ - "▁similarities", - -12.96811294555664 - ], - [ - "▁eclipse", - -12.968377113342285 - ], - [ - "Map", - -12.968610763549805 - ], - [ - "double", - -12.96861743927002 - ], - [ - "corporate", - -12.968734741210938 - ], - [ - "▁Hindi", - -12.968853950500488 - ], - [ - "battling", - -12.968866348266602 - ], - [ - "▁habituel", - -12.969098091125488 - ], - [ - "▁Transition", - -12.969196319580078 - ], - [ - "▁luptă", - -12.96920394897461 - ], - [ - "▁trainee", - -12.969219207763672 - ], - [ - "LIS", - -12.96922492980957 - ], - [ - "▁Vatican", - -12.969254493713379 - ], - [ - "Archived", - -12.9692964553833 - ], - [ - "Connect", - -12.969305038452148 - ], - [ - "▁prealabil", - -12.969307899475098 - ], - [ - "▁Chambre", - -12.969327926635742 - ], - [ - "stuhl", - -12.969440460205078 - ], - [ - "▁arrivé", - -12.969557762145996 - ], - [ - "▁Urteil", - -12.969575881958008 - ], - [ - "▁scrutiny", - -12.969818115234375 - ], - [ - "▁memoir", - -12.969854354858398 - ], - [ - "▁innovant", - -12.9699068069458 - ], - [ - "▁sublime", - -12.969943046569824 - ], - [ - "children", - -12.970004081726074 - ], - [ - "▁Handwerk", - -12.970056533813477 - ], - [ - "▁campuses", - -12.970268249511719 - ], - [ - "▁durabil", - -12.970502853393555 - ], - [ - "▁immersive", - -12.970632553100586 - ], - [ - "▁Magnet", - -12.970732688903809 - ], - [ - "läufe", - -12.970808029174805 - ], - [ - "▁Techno", - -12.970837593078613 - ], - [ - "MAP", - -12.9710693359375 - ], - [ - "7.2", - -12.971145629882812 - ], - [ - "▁Schwimm", - -12.971181869506836 - ], - [ - "BOOK", - -12.971186637878418 - ], - [ - "188", - -12.971441268920898 - ], - [ - "▁Supervisor", - -12.971498489379883 - ], - [ - "prévue", - -12.971691131591797 - ], - [ - "needed", - -12.971813201904297 - ], - [ - "▁creditors", - -12.971822738647461 - ], - [ - "▁brin", - -12.971837043762207 - ], - [ - "▁Neck", - -12.971900939941406 - ], - [ - "▁Salut", - -12.971988677978516 - ], - [ - "▁despair", - -12.972105979919434 - ], - [ - "▁Sauce", - -12.972261428833008 - ], - [ - "▁Westminster", - -12.972335815429688 - ], - [ - "▁langfristig", - -12.972335815429688 - ], - [ - "▁northeast", - -12.972365379333496 - ], - [ - "▁încercat", - -12.972399711608887 - ], - [ - "▁nausea", - -12.972408294677734 - ], - [ - "▁Paypal", - -12.972440719604492 - ], - [ - "▁Arrow", - -12.972469329833984 - ], - [ - "▁Travis", - -12.972633361816406 - ], - [ - "(2009)", - -12.972713470458984 - ], - [ - "▁Rising", - -12.972719192504883 - ], - [ - "termes", - -12.973097801208496 - ], - [ - "Australie", - -12.973154067993164 - ], - [ - "▁scarf", - -12.973187446594238 - ], - [ - "klassischen", - -12.97337818145752 - ], - [ - "▁boug", - -12.973466873168945 - ], - [ - "DOT", - -12.97360610961914 - ], - [ - "▁Trink", - -12.97361946105957 - ], - [ - "▁bestätigt", - -12.97365951538086 - ], - [ - "▁officiel", - -12.97370433807373 - ], - [ - "Produkt", - -12.973873138427734 - ], - [ - "DNA", - -12.974140167236328 - ], - [ - "▁*******", - -12.97426700592041 - ], - [ - "GAR", - -12.974271774291992 - ], - [ - "therapeut", - -12.974377632141113 - ], - [ - "187", - -12.974420547485352 - ], - [ - "▁Louisville", - -12.974493026733398 - ], - [ - "▁geöffnet", - -12.97462272644043 - ], - [ - "Watch", - -12.974640846252441 - ], - [ - "85%", - -12.974678993225098 - ], - [ - "▁Candida", - -12.974698066711426 - ], - [ - "▁Kathy", - -12.974703788757324 - ], - [ - "▁Animation", - -12.974711418151855 - ], - [ - "planung", - -12.974715232849121 - ], - [ - "woche", - -12.974730491638184 - ], - [ - "Video", - -12.974966049194336 - ], - [ - "▁Automation", - -12.97507095336914 - ], - [ - "▁foliage", - -12.97507381439209 - ], - [ - "▁evenimentului", - -12.975175857543945 - ], - [ - "SEN", - -12.975362777709961 - ], - [ - "▁Dialog", - -12.975372314453125 - ], - [ - "▁ZIP", - -12.975372314453125 - ], - [ - "▁vieții", - -12.97537612915039 - ], - [ - "▁passionné", - -12.975425720214844 - ], - [ - "▁WOW", - -12.97544002532959 - ], - [ - "ectiv", - -12.975464820861816 - ], - [ - "▁vorbesc", - -12.975482940673828 - ], - [ - "▁computational", - -12.975533485412598 - ], - [ - "▁idiot", - -12.97557258605957 - ], - [ - "▁stigma", - -12.97567081451416 - ], - [ - "▁multumesc", - -12.975870132446289 - ], - [ - "▁sărbători", - -12.975870132446289 - ], - [ - "▁Advantage", - -12.975906372070312 - ], - [ - "▁alegeri", - -12.976024627685547 - ], - [ - "▁philosopher", - -12.976031303405762 - ], - [ - "RIE", - -12.976117134094238 - ], - [ - "refundable", - -12.976221084594727 - ], - [ - "▁Sofia", - -12.97623348236084 - ], - [ - "▁încheiat", - -12.976313591003418 - ], - [ - "meilleures", - -12.976473808288574 - ], - [ - "critical", - -12.976744651794434 - ], - [ - "▁cavity", - -12.976766586303711 - ], - [ - "▁ressort", - -12.976792335510254 - ], - [ - "strong", - -12.976798057556152 - ], - [ - "▁Backup", - -12.976948738098145 - ], - [ - "▁Zeitraum", - -12.977023124694824 - ], - [ - "▁Szene", - -12.977027893066406 - ], - [ - "▁Candle", - -12.977173805236816 - ], - [ - "▁ciocolat", - -12.977198600769043 - ], - [ - "etched", - -12.977227210998535 - ], - [ - "ан", - -12.977302551269531 - ], - [ - "▁Anchor", - -12.977365493774414 - ], - [ - "equate", - -12.977470397949219 - ], - [ - "▁bulg", - -12.977476119995117 - ], - [ - "▁motorist", - -12.977524757385254 - ], - [ - "träglich", - -12.977736473083496 - ], - [ - "please", - -12.977936744689941 - ], - [ - "different", - -12.978011131286621 - ], - [ - "▁Accel", - -12.97813606262207 - ], - [ - "Proiectul", - -12.97829818725586 - ], - [ - "▁cabbage", - -12.97852897644043 - ], - [ - "▁télécharger", - -12.97852897644043 - ], - [ - "▁Presentation", - -12.97856330871582 - ], - [ - "▁Struktur", - -12.978621482849121 - ], - [ - "bücher", - -12.978650093078613 - ], - [ - "▁flatter", - -12.978672981262207 - ], - [ - "emprunt", - -12.979074478149414 - ], - [ - "▁oriental", - -12.979111671447754 - ], - [ - "▁Turnier", - -12.979166984558105 - ], - [ - "brücke", - -12.97917366027832 - ], - [ - "▁légumes", - -12.979416847229004 - ], - [ - "gerechnet", - -12.979595184326172 - ], - [ - "flooded", - -12.979621887207031 - ], - [ - "LER", - -12.979679107666016 - ], - [ - "üben", - -12.97973918914795 - ], - [ - "internaute", - -12.979888916015625 - ], - [ - "▁Austausch", - -12.979935646057129 - ], - [ - "gefordert", - -12.980034828186035 - ], - [ - "▁adoptat", - -12.980277061462402 - ], - [ - "▁erinnern", - -12.980305671691895 - ], - [ - "▁dolphin", - -12.980307579040527 - ], - [ - "▁Parkinson", - -12.980308532714844 - ], - [ - "büro", - -12.980310440063477 - ], - [ - "▁Crest", - -12.980368614196777 - ], - [ - "▁Ikea", - -12.980437278747559 - ], - [ - "▁ecologic", - -12.980470657348633 - ], - [ - "mplă", - -12.98065185546875 - ], - [ - "▁șef", - -12.980655670166016 - ], - [ - "coop", - -12.980868339538574 - ], - [ - "▁Carson", - -12.980900764465332 - ], - [ - "▁uşor", - -12.981054306030273 - ], - [ - "▁exert", - -12.981070518493652 - ], - [ - "▁countertop", - -12.981114387512207 - ], - [ - "ntended", - -12.981136322021484 - ], - [ - "▁Civic", - -12.981313705444336 - ], - [ - "▁attentes", - -12.98133373260498 - ], - [ - "gesetzlichen", - -12.981356620788574 - ], - [ - "frischen", - -12.981475830078125 - ], - [ - "▁Bottle", - -12.981636047363281 - ], - [ - "▁cautare", - -12.982080459594727 - ], - [ - "▁waterfront", - -12.982226371765137 - ], - [ - "▁centerpiece", - -12.982312202453613 - ], - [ - "▁Castel", - -12.982441902160645 - ], - [ - "510", - -12.98270034790039 - ], - [ - "capped", - -12.982709884643555 - ], - [ - "▁mattresses", - -12.982850074768066 - ], - [ - "▁readiness", - -12.982865333557129 - ], - [ - "diag", - -12.982970237731934 - ], - [ - "▁geändert", - -12.982980728149414 - ], - [ - "▁complained", - -12.983051300048828 - ], - [ - "▁diary", - -12.983073234558105 - ], - [ - "▁ceremonies", - -12.983144760131836 - ], - [ - "▁următor", - -12.983181953430176 - ], - [ - "▁Engel", - -12.983270645141602 - ], - [ - "▁disconnect", - -12.9832763671875 - ], - [ - "▁Silvi", - -12.983282089233398 - ], - [ - "▁eingerichtet", - -12.9834566116333 - ], - [ - "medizin", - -12.983512878417969 - ], - [ - "▁majestic", - -12.983869552612305 - ], - [ - "▁Random", - -12.983943939208984 - ], - [ - "▁Equity", - -12.984046936035156 - ], - [ - "▁Echipa", - -12.984111785888672 - ], - [ - "са", - -12.984163284301758 - ], - [ - "316", - -12.984179496765137 - ], - [ - "▁Formation", - -12.984183311462402 - ], - [ - "inland", - -12.98421859741211 - ], - [ - "appuy", - -12.984301567077637 - ], - [ - "TAN", - -12.984481811523438 - ], - [ - "slipped", - -12.984918594360352 - ], - [ - "Certains", - -12.985247611999512 - ], - [ - "▁Silber", - -12.98525333404541 - ], - [ - "▁reçoi", - -12.985257148742676 - ], - [ - "▁Monthly", - -12.985323905944824 - ], - [ - "calculating", - -12.985494613647461 - ], - [ - "▁scratches", - -12.98554515838623 - ], - [ - "▁concurrence", - -12.985654830932617 - ], - [ - "▁Stärke", - -12.985662460327148 - ], - [ - "▁intermediar", - -12.985751152038574 - ], - [ - "▁erlebt", - -12.98579216003418 - ], - [ - "gesellschaftlich", - -12.986037254333496 - ], - [ - "▁Volk", - -12.986041069030762 - ], - [ - "▁Ansprüche", - -12.986101150512695 - ], - [ - "▁cumulative", - -12.986103057861328 - ], - [ - "▁Randy", - -12.986183166503906 - ], - [ - "▁instituții", - -12.98622989654541 - ], - [ - "together", - -12.986489295959473 - ], - [ - "▁Sap", - -12.986539840698242 - ], - [ - "▁modificari", - -12.986551284790039 - ], - [ - "▁erosion", - -12.986572265625 - ], - [ - "▁wicked", - -12.986577033996582 - ], - [ - "soaked", - -12.986613273620605 - ], - [ - "▁cellar", - -12.9866361618042 - ], - [ - "ignoring", - -12.986726760864258 - ], - [ - "▁scarce", - -12.986815452575684 - ], - [ - "ueuse", - -12.98697280883789 - ], - [ - "▁bibliothèque", - -12.986995697021484 - ], - [ - "critères", - -12.987017631530762 - ], - [ - "▁overlay", - -12.987166404724121 - ], - [ - "IPA", - -12.98737907409668 - ], - [ - "director", - -12.987393379211426 - ], - [ - "▁Krishna", - -12.987444877624512 - ], - [ - "▁methodologies", - -12.987451553344727 - ], - [ - "iocese", - -12.987513542175293 - ], - [ - "▁saucepan", - -12.987713813781738 - ], - [ - "184", - -12.987948417663574 - ], - [ - "275", - -12.987981796264648 - ], - [ - "▁précieu", - -12.988165855407715 - ], - [ - "▁academy", - -12.9883394241333 - ], - [ - "460", - -12.988438606262207 - ], - [ - "ERN", - -12.988679885864258 - ], - [ - "▁emoti", - -12.988725662231445 - ], - [ - "▁télévision", - -12.988823890686035 - ], - [ - "EDIT", - -12.988901138305664 - ], - [ - "▁Valeri", - -12.989045143127441 - ], - [ - "▁Charity", - -12.98911190032959 - ], - [ - "Voilà", - -12.989297866821289 - ], - [ - "▁lipsit", - -12.989356994628906 - ], - [ - "▁unleash", - -12.989373207092285 - ], - [ - "▁suferit", - -12.989506721496582 - ], - [ - "▁Lifestyle", - -12.98953914642334 - ], - [ - "▁Edel", - -12.989603996276855 - ], - [ - "▁Derek", - -12.989643096923828 - ], - [ - "▁Manga", - -12.989801406860352 - ], - [ - "▁increment", - -12.989990234375 - ], - [ - "▁plötzlich", - -12.990133285522461 - ], - [ - "▁5:30", - -12.990208625793457 - ], - [ - "▁Republicii", - -12.990246772766113 - ], - [ - "▁capitalism", - -12.990293502807617 - ], - [ - "ROW", - -12.990510940551758 - ], - [ - "▁Paar", - -12.990523338317871 - ], - [ - "allée", - -12.99057674407959 - ], - [ - "▁motto", - -12.990610122680664 - ], - [ - "Schäden", - -12.990630149841309 - ], - [ - "▁£10", - -12.99063491821289 - ], - [ - "RIP", - -12.990728378295898 - ], - [ - "courir", - -12.990761756896973 - ], - [ - "rocky", - -12.990944862365723 - ], - [ - "▁Sunshine", - -12.991031646728516 - ], - [ - "▁chimney", - -12.991044998168945 - ], - [ - "▁préfér", - -12.991153717041016 - ], - [ - "▁relaxare", - -12.991189956665039 - ], - [ - "▁colabora", - -12.99134349822998 - ], - [ - "liefer", - -12.99142837524414 - ], - [ - "▁ordentlich", - -12.991486549377441 - ], - [ - "▁dauerhaft", - -12.991535186767578 - ], - [ - "kammer", - -12.991572380065918 - ], - [ - "▁Basket", - -12.991579055786133 - ], - [ - "Site", - -12.991657257080078 - ], - [ - "▁Regina", - -12.991716384887695 - ], - [ - "▁simulate", - -12.991868019104004 - ], - [ - "▁wrestle", - -12.991939544677734 - ], - [ - "wertig", - -12.991986274719238 - ], - [ - "▁Christie", - -12.992018699645996 - ], - [ - "download", - -12.992033004760742 - ], - [ - "▁torch", - -12.992213249206543 - ], - [ - "riya", - -12.992216110229492 - ], - [ - "▁Grie", - -12.992247581481934 - ], - [ - "bitten", - -12.992356300354004 - ], - [ - "▁spezialisiert", - -12.99238109588623 - ], - [ - "▁Parade", - -12.992408752441406 - ], - [ - "▁migraine", - -12.992830276489258 - ], - [ - "▁Armstrong", - -12.992846488952637 - ], - [ - "▁cutie", - -12.9928560256958 - ], - [ - "▁bullying", - -12.992889404296875 - ], - [ - "▁Estonia", - -12.99293041229248 - ], - [ - "▁harvested", - -12.992948532104492 - ], - [ - "▁Hunger", - -12.992971420288086 - ], - [ - "▁frapp", - -12.992999076843262 - ], - [ - "REM", - -12.993117332458496 - ], - [ - "sensor", - -12.993189811706543 - ], - [ - "▁GREAT", - -12.993293762207031 - ], - [ - "▁thyroid", - -12.993302345275879 - ], - [ - "▁mărturi", - -12.993335723876953 - ], - [ - "ocupă", - -12.993809700012207 - ], - [ - "▁Wealth", - -12.993812561035156 - ], - [ - "▁convins", - -12.993841171264648 - ], - [ - "141", - -12.993876457214355 - ], - [ - "▁vingt", - -12.993901252746582 - ], - [ - "▁revel", - -12.994054794311523 - ], - [ - "▁Adri", - -12.994083404541016 - ], - [ - "▁remix", - -12.994207382202148 - ], - [ - "▁fermentation", - -12.99425220489502 - ], - [ - "▁achiziti", - -12.994352340698242 - ], - [ - "dream", - -12.994426727294922 - ], - [ - "▁contemporan", - -12.994632720947266 - ], - [ - "▁youngsters", - -12.994685173034668 - ], - [ - "▁Hartford", - -12.994745254516602 - ], - [ - "▁Wagen", - -12.994988441467285 - ], - [ - "▁Celebr", - -12.995214462280273 - ], - [ - "leveraging", - -12.99527645111084 - ], - [ - "▁Iasi", - -12.99549674987793 - ], - [ - "tackling", - -12.9955415725708 - ], - [ - "▁intrinsic", - -12.995553970336914 - ], - [ - "▁Macedon", - -12.995603561401367 - ], - [ - "NIA", - -12.995784759521484 - ], - [ - "▁bliss", - -12.995905876159668 - ], - [ - "▁gradual", - -12.995908737182617 - ], - [ - "▁inregistrat", - -12.995981216430664 - ], - [ - "▁volleyball", - -12.995986938476562 - ], - [ - "▁offiziell", - -12.996054649353027 - ], - [ - "▁carré", - -12.99611759185791 - ], - [ - "Mostly", - -12.996174812316895 - ], - [ - "▁Harley", - -12.996193885803223 - ], - [ - "▁locati", - -12.996216773986816 - ], - [ - "▁Klo", - -12.996223449707031 - ], - [ - "▁Equal", - -12.996238708496094 - ], - [ - "▁citat", - -12.996369361877441 - ], - [ - "▁argint", - -12.996478080749512 - ], - [ - "prüft", - -12.996528625488281 - ], - [ - "▁Fence", - -12.996600151062012 - ], - [ - "positive", - -12.996988296508789 - ], - [ - "▁Kaz", - -12.997245788574219 - ], - [ - "▁distortion", - -12.997342109680176 - ], - [ - "▁sâmbătă", - -12.997342109680176 - ], - [ - "▁frontière", - -12.997346878051758 - ], - [ - "▁revanch", - -12.997394561767578 - ], - [ - "▁Held", - -12.997465133666992 - ], - [ - "▁Hobb", - -12.99776554107666 - ], - [ - "▁reuşit", - -12.997796058654785 - ], - [ - "deem", - -12.997880935668945 - ], - [ - "▁dorint", - -12.997902870178223 - ], - [ - "▁Anlagen", - -12.997908592224121 - ], - [ - "▁cheval", - -12.997973442077637 - ], - [ - "630", - -12.99806022644043 - ], - [ - "▁implementare", - -12.99808406829834 - ], - [ - "▁curator", - -12.99821662902832 - ], - [ - "▁legislator", - -12.998247146606445 - ], - [ - "▁potassium", - -12.998247146606445 - ], - [ - "▁veterinarian", - -12.998247146606445 - ], - [ - "▁domenii", - -12.998273849487305 - ], - [ - "▁revue", - -12.998310089111328 - ], - [ - "Vielen", - -12.998333930969238 - ], - [ - "africain", - -12.998570442199707 - ], - [ - "before", - -12.998680114746094 - ], - [ - "▁Bestandteil", - -12.998702049255371 - ], - [ - "▁(2010)", - -12.998767852783203 - ], - [ - "▁Arlington", - -12.999153137207031 - ], - [ - "▁Gründung", - -12.999153137207031 - ], - [ - "▁Sprinkle", - -12.999153137207031 - ], - [ - "▁Princeton", - -12.999186515808105 - ], - [ - "chirurg", - -12.999228477478027 - ], - [ - "▁laissé", - -12.999357223510742 - ], - [ - "whoever", - -12.999384880065918 - ], - [ - "▁pasture", - -12.999431610107422 - ], - [ - "ajute", - -12.999436378479004 - ], - [ - "▁joyful", - -12.999494552612305 - ], - [ - "etapa", - -12.999905586242676 - ], - [ - "ESP", - -13.000017166137695 - ], - [ - "▁Iohannis", - -13.000059127807617 - ], - [ - "▁10:30", - -13.000127792358398 - ], - [ - "▁Kingston", - -13.000140190124512 - ], - [ - "▁contender", - -13.000164031982422 - ], - [ - "▁Damage", - -13.000177383422852 - ], - [ - "▁schreibt", - -13.000482559204102 - ], - [ - "sstisch", - -13.000631332397461 - ], - [ - "Associated", - -13.00072956085205 - ], - [ - "▁disposable", - -13.000782012939453 - ], - [ - "veranstaltung", - -13.00096607208252 - ], - [ - "▁puppet", - -13.00100040435791 - ], - [ - "pong", - -13.001093864440918 - ], - [ - "▁Chronicle", - -13.001176834106445 - ], - [ - "222", - -13.001286506652832 - ], - [ - "intuit", - -13.001396179199219 - ], - [ - "inscrire", - -13.001429557800293 - ], - [ - "▁speeches", - -13.001431465148926 - ], - [ - "▁Eingang", - -13.001775741577148 - ], - [ - "▁Adidas", - -13.001875877380371 - ], - [ - "▁cemetery", - -13.001877784729004 - ], - [ - "▁juicy", - -13.001885414123535 - ], - [ - "▁wertvolle", - -13.0018892288208 - ], - [ - "▁militari", - -13.001917839050293 - ], - [ - "China", - -13.00196361541748 - ], - [ - "ecția", - -13.002041816711426 - ], - [ - "luster", - -13.002063751220703 - ], - [ - "auftrag", - -13.00234317779541 - ], - [ - "▁Marius", - -13.002523422241211 - ], - [ - "▁crossover", - -13.002555847167969 - ], - [ - "▁enthusiast", - -13.002555847167969 - ], - [ - "▁cantitate", - -13.002630233764648 - ], - [ - "▁animat", - -13.002634048461914 - ], - [ - "Park", - -13.002793312072754 - ], - [ - "▁unchanged", - -13.00279426574707 - ], - [ - "russia", - -13.00281810760498 - ], - [ - "instant", - -13.002833366394043 - ], - [ - "ţiunea", - -13.002835273742676 - ], - [ - "▁franchi", - -13.002920150756836 - ], - [ - "▁mobiliz", - -13.002963066101074 - ], - [ - "athlet", - -13.003013610839844 - ], - [ - "▁Cardio", - -13.0031099319458 - ], - [ - "▁supus", - -13.003119468688965 - ], - [ - "▁Griff", - -13.003137588500977 - ], - [ - "flakes", - -13.003217697143555 - ], - [ - "soluble", - -13.003250122070312 - ], - [ - "Known", - -13.003693580627441 - ], - [ - "leaking", - -13.003741264343262 - ], - [ - "▁Holocaust", - -13.004148483276367 - ], - [ - "gift", - -13.004197120666504 - ], - [ - "▁tradiţi", - -13.004359245300293 - ], - [ - "▁southeast", - -13.004498481750488 - ], - [ - "▁correspondant", - -13.00460147857666 - ], - [ - "Isaiah", - -13.004603385925293 - ], - [ - "▁diagonal", - -13.004606246948242 - ], - [ - "▁Probabil", - -13.004680633544922 - ], - [ - "▁dégust", - -13.004791259765625 - ], - [ - "▁Naval", - -13.004802703857422 - ], - [ - "▁cultivation", - -13.004839897155762 - ], - [ - "▁Vertrieb", - -13.004849433898926 - ], - [ - "▁pony", - -13.004854202270508 - ], - [ - "▁Throw", - -13.0050048828125 - ], - [ - "little", - -13.005010604858398 - ], - [ - "▁remarque", - -13.005074501037598 - ], - [ - "▁parcare", - -13.005085945129395 - ], - [ - "3.8", - -13.00518798828125 - ], - [ - "▁renunt", - -13.005330085754395 - ], - [ - "▁Rewards", - -13.005487442016602 - ], - [ - "▁Thur", - -13.005496978759766 - ], - [ - "▁underestimate", - -13.005515098571777 - ], - [ - "▁frankly", - -13.005516052246094 - ], - [ - "Bretagne", - -13.005517959594727 - ], - [ - "axial", - -13.005537986755371 - ], - [ - "▁identities", - -13.0055570602417 - ], - [ - "▁Harvest", - -13.00561237335205 - ], - [ - "▁skippe", - -13.00561237335205 - ], - [ - "▁Boutique", - -13.005670547485352 - ], - [ - "▁intuition", - -13.005746841430664 - ], - [ - "▁Rotary", - -13.00581169128418 - ], - [ - "▁SERVICE", - -13.005875587463379 - ], - [ - "▁refill", - -13.005915641784668 - ], - [ - "▁arcade", - -13.006060600280762 - ], - [ - "▁komme", - -13.006386756896973 - ], - [ - "▁irrelevant", - -13.006427764892578 - ], - [ - "▁Sortiment", - -13.006429672241211 - ], - [ - "▁scriitor", - -13.006488800048828 - ], - [ - "▁clicked", - -13.006516456604004 - ], - [ - "▁ciel", - -13.006610870361328 - ], - [ - "▁Caesar", - -13.00680160522461 - ], - [ - "hound", - -13.006803512573242 - ], - [ - "whipped", - -13.006843566894531 - ], - [ - "licate", - -13.006867408752441 - ], - [ - "▁formatting", - -13.006986618041992 - ], - [ - "▁mosaic", - -13.007028579711914 - ], - [ - "(2017)", - -13.007122039794922 - ], - [ - "777", - -13.007257461547852 - ], - [ - "▁Messenger", - -13.007342338562012 - ], - [ - "dulci", - -13.007369041442871 - ], - [ - "▁(2016)", - -13.007420539855957 - ], - [ - "▁popcorn", - -13.007425308227539 - ], - [ - "▁Presidential", - -13.007497787475586 - ], - [ - "▁brokerage", - -13.007564544677734 - ], - [ - "dachte", - -13.00762939453125 - ], - [ - "verkauf", - -13.00768756866455 - ], - [ - "▁pomme", - -13.007721900939941 - ], - [ - "▁fret", - -13.007822036743164 - ], - [ - "▁revere", - -13.007894515991211 - ], - [ - "▁Canvas", - -13.008092880249023 - ], - [ - "▁Nottingham", - -13.008255004882812 - ], - [ - "▁Refuge", - -13.008257865905762 - ], - [ - "▁injustice", - -13.008259773254395 - ], - [ - "▁External", - -13.008264541625977 - ], - [ - "dincolo", - -13.008304595947266 - ], - [ - "directing", - -13.008511543273926 - ], - [ - "▁Toulouse", - -13.008710861206055 - ], - [ - "▁cheltuieli", - -13.008746147155762 - ], - [ - "▁distrus", - -13.008816719055176 - ], - [ - "impôt", - -13.008912086486816 - ], - [ - "landschaft", - -13.008964538574219 - ], - [ - "passion", - -13.00897216796875 - ], - [ - "▁Hobby", - -13.009099006652832 - ], - [ - "significant", - -13.009115219116211 - ], - [ - "▁Guinea", - -13.009209632873535 - ], - [ - "pecializing", - -13.009237289428711 - ], - [ - "pozitie", - -13.009245872497559 - ], - [ - "bourne", - -13.009295463562012 - ], - [ - "▁mâini", - -13.00933837890625 - ], - [ - "▁CFR", - -13.009395599365234 - ], - [ - "▁Konflikt", - -13.009626388549805 - ], - [ - "▁Vodafone", - -13.009626388549805 - ], - [ - "OUG", - -13.009681701660156 - ], - [ - "▁Übersicht", - -13.009735107421875 - ], - [ - "negotiated", - -13.009903907775879 - ], - [ - "▁gliss", - -13.010042190551758 - ], - [ - "▁Kapital", - -13.010111808776855 - ], - [ - "QC", - -13.0101318359375 - ], - [ - "▁gentleman", - -13.01024341583252 - ], - [ - "Inde", - -13.010514259338379 - ], - [ - "▁immensely", - -13.010639190673828 - ], - [ - "Business", - -13.010702133178711 - ], - [ - "▁04/2", - -13.010882377624512 - ], - [ - "societatea", - -13.010973930358887 - ], - [ - "fluoxetine", - -13.011000633239746 - ], - [ - "▁Wachstum", - -13.011000633239746 - ], - [ - "▁récit", - -13.011011123657227 - ], - [ - "▁Preisvergleich", - -13.011034965515137 - ], - [ - "▁Mohammed", - -13.011460304260254 - ], - [ - "gefangen", - -13.011462211608887 - ], - [ - "▁calibration", - -13.011608123779297 - ], - [ - "bekam", - -13.011728286743164 - ], - [ - "▁FUN", - -13.011758804321289 - ], - [ - "wasting", - -13.011839866638184 - ], - [ - "▁prosper", - -13.011862754821777 - ], - [ - "▁Afghan", - -13.011919021606445 - ], - [ - "▁Heroes", - -13.011921882629395 - ], - [ - "▁VMware", - -13.011927604675293 - ], - [ - "exception", - -13.011969566345215 - ], - [ - "▁înlocui", - -13.01244831085205 - ], - [ - "Neu", - -13.01246452331543 - ], - [ - "initiation", - -13.01250171661377 - ], - [ - "▁Peel", - -13.01281452178955 - ], - [ - "▁cunoaste", - -13.012836456298828 - ], - [ - "▁menschliche", - -13.012849807739258 - ], - [ - "▁poarta", - -13.012852668762207 - ], - [ - "▁congestion", - -13.012930870056152 - ], - [ - "▁îmbunătăț", - -13.013103485107422 - ], - [ - "EUR", - -13.013171195983887 - ], - [ - "▁sushi", - -13.01326847076416 - ], - [ - "Jährige", - -13.01329517364502 - ], - [ - "espoir", - -13.013423919677734 - ], - [ - "inspected", - -13.013444900512695 - ], - [ - "▁etape", - -13.013677597045898 - ], - [ - "▁pharmacist", - -13.013754844665527 - ], - [ - "flect", - -13.013840675354004 - ], - [ - "Changing", - -13.013932228088379 - ], - [ - "▁radiant", - -13.014046669006348 - ], - [ - "Daddy", - -13.014275550842285 - ], - [ - "▁categorii", - -13.014360427856445 - ], - [ - "quête", - -13.014628410339355 - ], - [ - "▁skincare", - -13.014657020568848 - ], - [ - "hébergement", - -13.014674186706543 - ], - [ - "840", - -13.01477336883545 - ], - [ - "awaiting", - -13.014822006225586 - ], - [ - "▁murdered", - -13.014841079711914 - ], - [ - "▁proficient", - -13.014863967895508 - ], - [ - "▁chauffe", - -13.014899253845215 - ], - [ - "▁contur", - -13.014937400817871 - ], - [ - "▁rejoindre", - -13.015145301818848 - ], - [ - "▁foloseste", - -13.01521110534668 - ], - [ - "▁Grup", - -13.01535701751709 - ], - [ - "152", - -13.01541519165039 - ], - [ - "▁workspace", - -13.015438079833984 - ], - [ - "▁primitive", - -13.015546798706055 - ], - [ - "▁Ginger", - -13.015557289123535 - ], - [ - "▁chemotherapy", - -13.015595436096191 - ], - [ - "▁platinum", - -13.015596389770508 - ], - [ - "▁sarcina", - -13.01559829711914 - ], - [ - "▁revival", - -13.015820503234863 - ], - [ - "▁Meditation", - -13.016111373901367 - ], - [ - "▁Vogel", - -13.0161714553833 - ], - [ - "IMA", - -13.016359329223633 - ], - [ - "▁handset", - -13.016486167907715 - ], - [ - "▁Nachmittag", - -13.01651668548584 - ], - [ - "▁déchets", - -13.016517639160156 - ], - [ - "▁Cornwall", - -13.0165433883667 - ], - [ - "▁Curry", - -13.016605377197266 - ], - [ - "▁cuplu", - -13.016607284545898 - ], - [ - "▁Birth", - -13.016822814941406 - ], - [ - "forward", - -13.016936302185059 - ], - [ - "Dezvoltare", - -13.016977310180664 - ], - [ - "▁irgendwie", - -13.016980171203613 - ], - [ - "▁erzielt", - -13.016993522644043 - ], - [ - "LOS", - -13.01700496673584 - ], - [ - "▁overload", - -13.01708984375 - ], - [ - "▁repay", - -13.01713752746582 - ], - [ - "urlaub", - -13.017155647277832 - ], - [ - "7.0", - -13.01716423034668 - ], - [ - "▁Wheat", - -13.01748275756836 - ], - [ - "▁degrab", - -13.017488479614258 - ], - [ - "▁Brock", - -13.017491340637207 - ], - [ - "▁inhabit", - -13.0176362991333 - ], - [ - "▁Speech", - -13.017834663391113 - ], - [ - "directional", - -13.017862319946289 - ], - [ - "▁Mandel", - -13.017909049987793 - ], - [ - "▁erscheinen", - -13.01791763305664 - ], - [ - "consciously", - -13.018059730529785 - ], - [ - "▁sunet", - -13.0182523727417 - ], - [ - "▁stole", - -13.018259048461914 - ], - [ - "▁Utilis", - -13.018349647521973 - ], - [ - "▁obstruction", - -13.01852798461914 - ], - [ - "▁mindfulness", - -13.0186767578125 - ], - [ - "partnering", - -13.01868724822998 - ], - [ - "CSI", - -13.018819808959961 - ], - [ - "204", - -13.01905632019043 - ], - [ - "▁squirrel", - -13.019286155700684 - ], - [ - "▁Rwanda", - -13.01975154876709 - ], - [ - "▁hunters", - -13.019850730895996 - ], - [ - "▁revitaliz", - -13.02022647857666 - ], - [ - "▁avansat", - -13.020232200622559 - ], - [ - "▁Yamaha", - -13.020294189453125 - ], - [ - "foto", - -13.020435333251953 - ], - [ - "▁Vegan", - -13.020469665527344 - ], - [ - "▁pitched", - -13.02053165435791 - ], - [ - "▁Vortrag", - -13.020540237426758 - ], - [ - "traditional", - -13.020809173583984 - ], - [ - "offrent", - -13.021024703979492 - ], - [ - "▁Expression", - -13.021315574645996 - ], - [ - "▁apprécié", - -13.021354675292969 - ], - [ - "▁Christina", - -13.021408081054688 - ], - [ - "eilig", - -13.021464347839355 - ], - [ - "▁verhindern", - -13.021599769592285 - ], - [ - "culturii", - -13.021607398986816 - ], - [ - "Aşa", - -13.021703720092773 - ], - [ - "▁enamel", - -13.021756172180176 - ], - [ - "▁fördern", - -13.021771430969238 - ], - [ - "▁acheté", - -13.021798133850098 - ], - [ - "▁eventuell", - -13.021842956542969 - ], - [ - "▁Sino", - -13.021873474121094 - ], - [ - "▁totodat", - -13.022008895874023 - ], - [ - "accelerated", - -13.022202491760254 - ], - [ - "▁strengthened", - -13.02245044708252 - ], - [ - "corro", - -13.022482872009277 - ], - [ - "4,5", - -13.02253246307373 - ], - [ - "▁Beverly", - -13.022533416748047 - ], - [ - "ulevard", - -13.022615432739258 - ], - [ - "▁hamper", - -13.022644996643066 - ], - [ - "▁Tempe", - -13.02268123626709 - ], - [ - "▁Yacht", - -13.022799491882324 - ], - [ - "▁LGBT", - -13.022871017456055 - ], - [ - "▁fingertips", - -13.022991180419922 - ], - [ - "▁Auftraggeber", - -13.02299976348877 - ], - [ - "▁harbour", - -13.0230131149292 - ], - [ - "blew", - -13.0230712890625 - ], - [ - "▁ideology", - -13.023115158081055 - ], - [ - "▁covenant", - -13.023170471191406 - ], - [ - "▁faction", - -13.023419380187988 - ], - [ - "▁animé", - -13.023481369018555 - ], - [ - "energie", - -13.023515701293945 - ], - [ - "iterführende", - -13.02369499206543 - ], - [ - "▁MAI", - -13.023784637451172 - ], - [ - "▁pluie", - -13.023905754089355 - ], - [ - "▁cathedral", - -13.023919105529785 - ], - [ - "▁chiropractic", - -13.023919105529785 - ], - [ - "monies", - -13.023968696594238 - ], - [ - "▁contraction", - -13.024054527282715 - ], - [ - "pvc", - -13.024202346801758 - ], - [ - "staff", - -13.024209022521973 - ], - [ - "BIT", - -13.024216651916504 - ], - [ - "EET", - -13.024514198303223 - ], - [ - "▁sanction", - -13.024575233459473 - ], - [ - "▁Reiki", - -13.024709701538086 - ], - [ - "Trying", - -13.024772644042969 - ], - [ - "▁endangered", - -13.024847984313965 - ], - [ - "▁Emperor", - -13.024849891662598 - ], - [ - "▁empfi", - -13.024909973144531 - ], - [ - "animation", - -13.024998664855957 - ], - [ - "207", - -13.025029182434082 - ], - [ - "separating", - -13.02512264251709 - ], - [ - "▁lucrative", - -13.025148391723633 - ], - [ - "▁ortho", - -13.02524185180664 - ], - [ - "variété", - -13.025266647338867 - ], - [ - "hésit", - -13.025287628173828 - ], - [ - "nuances", - -13.025289535522461 - ], - [ - "▁$250", - -13.025394439697266 - ], - [ - "▁drumuri", - -13.025435447692871 - ], - [ - "▁unsafe", - -13.025446891784668 - ], - [ - "▁1943", - -13.025477409362793 - ], - [ - "▁automatique", - -13.025524139404297 - ], - [ - "billed", - -13.025585174560547 - ], - [ - "▁rectangle", - -13.02578067779541 - ], - [ - "▁Spannung", - -13.025781631469727 - ], - [ - "▁dévoil", - -13.025790214538574 - ], - [ - "▁perimeter", - -13.02580738067627 - ], - [ - "▁imaginative", - -13.02581787109375 - ], - [ - "actifs", - -13.025851249694824 - ], - [ - "neuve", - -13.0259428024292 - ], - [ - "leagă", - -13.026269912719727 - ], - [ - "gehende", - -13.026700973510742 - ], - [ - "▁Gorgeous", - -13.026708602905273 - ], - [ - "▁impeccable", - -13.026708602905273 - ], - [ - "▁Curtain", - -13.026718139648438 - ], - [ - "▁presume", - -13.026731491088867 - ], - [ - "surpassed", - -13.02687931060791 - ], - [ - "schiff", - -13.026927947998047 - ], - [ - "Allied", - -13.02699089050293 - ], - [ - "fanden", - -13.027080535888672 - ], - [ - "▁célébr", - -13.027174949645996 - ], - [ - "▁phénomène", - -13.027174949645996 - ], - [ - "▁Powell", - -13.027413368225098 - ], - [ - "jean", - -13.027631759643555 - ], - [ - "▁peculiar", - -13.027640342712402 - ], - [ - "▁Antarctic", - -13.027641296386719 - ], - [ - "▁gradient", - -13.027663230895996 - ], - [ - "▁brainstorm", - -13.027704238891602 - ], - [ - "échapp", - -13.027726173400879 - ], - [ - "Bot", - -13.027738571166992 - ], - [ - "cita", - -13.027743339538574 - ], - [ - "▁lumber", - -13.027752876281738 - ], - [ - "weichen", - -13.027852058410645 - ], - [ - "▁Halte", - -13.028024673461914 - ], - [ - "▁noștri", - -13.028107643127441 - ], - [ - "construction", - -13.028165817260742 - ], - [ - "DOC", - -13.028236389160156 - ], - [ - "▁aluat", - -13.028319358825684 - ], - [ - "streamlined", - -13.028462409973145 - ], - [ - "Bio", - -13.028494834899902 - ], - [ - "▁nutritious", - -13.028573036193848 - ], - [ - "▁délicat", - -13.0286283493042 - ], - [ - "▁sticla", - -13.028656959533691 - ], - [ - "OVE", - -13.028721809387207 - ], - [ - "▁panneau", - -13.028793334960938 - ], - [ - "▁hetero", - -13.028801918029785 - ], - [ - "▁annul", - -13.028839111328125 - ], - [ - "IDA", - -13.028935432434082 - ], - [ - "▁pitches", - -13.028960227966309 - ], - [ - "▁Edmonton", - -13.029040336608887 - ], - [ - "mediated", - -13.029136657714844 - ], - [ - "AFP", - -13.029139518737793 - ], - [ - "▁Tibetan", - -13.029228210449219 - ], - [ - "intégration", - -13.02934455871582 - ], - [ - "▁Rox", - -13.0294771194458 - ], - [ - "energia", - -13.02950668334961 - ], - [ - "▁reconnaît", - -13.029509544372559 - ], - [ - "▁ține", - -13.029525756835938 - ], - [ - "▁ignition", - -13.029534339904785 - ], - [ - "Foarte", - -13.029541015625 - ], - [ - "▁HOME", - -13.029545783996582 - ], - [ - "▁MLB", - -13.029545783996582 - ], - [ - "▁Wähle", - -13.029590606689453 - ], - [ - "▁Merkel", - -13.029658317565918 - ], - [ - "poarte", - -13.029664993286133 - ], - [ - "ALT", - -13.02979850769043 - ], - [ - "jenigen", - -13.029985427856445 - ], - [ - "▁conflit", - -13.029987335205078 - ], - [ - "▁buckle", - -13.029996871948242 - ], - [ - "▁cacao", - -13.030035018920898 - ], - [ - "▁représentation", - -13.030076026916504 - ], - [ - "incepand", - -13.030267715454102 - ], - [ - "▁Carroll", - -13.030306816101074 - ], - [ - "▁clientilor", - -13.030370712280273 - ], - [ - "▁immunity", - -13.030441284179688 - ], - [ - "oût", - -13.03044319152832 - ], - [ - "▁Witch", - -13.030488014221191 - ], - [ - "▁Wolfgang", - -13.030532836914062 - ], - [ - "▁prudent", - -13.030701637268066 - ], - [ - "fotograf", - -13.03084945678711 - ], - [ - "paar", - -13.030871391296387 - ], - [ - "ergeti", - -13.030927658081055 - ], - [ - "▁empowerment", - -13.031112670898438 - ], - [ - "▁Admir", - -13.03122329711914 - ], - [ - "▁complémentaire", - -13.031340599060059 - ], - [ - "▁angepasst", - -13.031376838684082 - ], - [ - "▁flirt", - -13.031376838684082 - ], - [ - "▁elektronische", - -13.031388282775879 - ], - [ - "▁stereotype", - -13.03140640258789 - ], - [ - "SIL", - -13.031465530395508 - ], - [ - "▁Realtor", - -13.031471252441406 - ], - [ - "Edit", - -13.031774520874023 - ], - [ - "requête", - -13.03181266784668 - ], - [ - "▁Herstellung", - -13.031815528869629 - ], - [ - "▁cyst", - -13.031947135925293 - ], - [ - "syndic", - -13.031994819641113 - ], - [ - "leni", - -13.032007217407227 - ], - [ - "▁fringe", - -13.032020568847656 - ], - [ - "▁Jardin", - -13.032032012939453 - ], - [ - "▁Vezi", - -13.032052993774414 - ], - [ - "▁Ausstattung", - -13.032312393188477 - ], - [ - "▁glide", - -13.032590866088867 - ], - [ - "▁Andere", - -13.032758712768555 - ], - [ - "▁Haftung", - -13.032781600952148 - ], - [ - "maßnahmen", - -13.032788276672363 - ], - [ - "▁recommandé", - -13.032790184020996 - ], - [ - "▁nave", - -13.032793998718262 - ], - [ - "viziune", - -13.033051490783691 - ], - [ - "▁stimulus", - -13.033098220825195 - ], - [ - "faulty", - -13.0331449508667 - ], - [ - "▁vicinity", - -13.033249855041504 - ], - [ - "▁turnaround", - -13.033445358276367 - ], - [ - "stammt", - -13.033846855163574 - ], - [ - "▁problemlos", - -13.033856391906738 - ], - [ - "▁Establish", - -13.03415298461914 - ], - [ - "▁Silva", - -13.034172058105469 - ], - [ - "▁muzică", - -13.034187316894531 - ], - [ - "▁theatrical", - -13.03421401977539 - ], - [ - "▁braid", - -13.034242630004883 - ], - [ - "▁blieb", - -13.034276962280273 - ], - [ - "158", - -13.034296989440918 - ], - [ - "▁ignorance", - -13.034330368041992 - ], - [ - "onset", - -13.034416198730469 - ], - [ - "zeitlich", - -13.034523963928223 - ], - [ - "▁Sink", - -13.034523963928223 - ], - [ - "▁caractéris", - -13.034594535827637 - ], - [ - "▁kreative", - -13.03465747833252 - ], - [ - "behörde", - -13.034677505493164 - ], - [ - "repairing", - -13.034680366516113 - ], - [ - "▁tumble", - -13.034757614135742 - ], - [ - "zione", - -13.034871101379395 - ], - [ - "▁Evil", - -13.03494644165039 - ], - [ - "▁popping", - -13.034952163696289 - ], - [ - "▁mutant", - -13.035025596618652 - ], - [ - "emme", - -13.035030364990234 - ], - [ - "▁Pleasant", - -13.035125732421875 - ], - [ - "▁appetizer", - -13.035125732421875 - ], - [ - "▁PLEASE", - -13.035126686096191 - ], - [ - "▁physiological", - -13.035128593444824 - ], - [ - "▁Facility", - -13.035131454467773 - ], - [ - "▁quirky", - -13.035131454467773 - ], - [ - "▁colectiv", - -13.035154342651367 - ], - [ - "151", - -13.035181999206543 - ], - [ - "August", - -13.03531551361084 - ], - [ - "▁Jewelry", - -13.035327911376953 - ], - [ - "▁ziar", - -13.035481452941895 - ], - [ - "▁puissant", - -13.035489082336426 - ], - [ - "▁Argument", - -13.035595893859863 - ], - [ - "▁Betracht", - -13.035621643066406 - ], - [ - "▁TRANS", - -13.035636901855469 - ], - [ - "Exception", - -13.036011695861816 - ], - [ - "nosti", - -13.036083221435547 - ], - [ - "▁Geographic", - -13.036155700683594 - ], - [ - "amazingly", - -13.036173820495605 - ], - [ - "▁météo", - -13.036181449890137 - ], - [ - "streit", - -13.036314010620117 - ], - [ - "▁idle", - -13.036439895629883 - ], - [ - "179", - -13.036441802978516 - ], - [ - "▁Bremen", - -13.036534309387207 - ], - [ - "▁Kläger", - -13.03653621673584 - ], - [ - "▁Grammy", - -13.036598205566406 - ], - [ - "▁Philosophy", - -13.036613464355469 - ], - [ - "▁utilizeaz", - -13.036779403686523 - ], - [ - "Accord", - -13.036897659301758 - ], - [ - "▁USDA", - -13.036986351013184 - ], - [ - "Continuing", - -13.037010192871094 - ], - [ - "geschenk", - -13.037178039550781 - ], - [ - "kredit", - -13.037248611450195 - ], - [ - "Laugh", - -13.037297248840332 - ], - [ - "oaring", - -13.037406921386719 - ], - [ - "▁Richter", - -13.037460327148438 - ], - [ - "▁Figur", - -13.037938117980957 - ], - [ - "▁inconsistent", - -13.037947654724121 - ], - [ - "cresterea", - -13.038069725036621 - ], - [ - "▁regeneration", - -13.038130760192871 - ], - [ - "speaking", - -13.03818416595459 - ], - [ - "▁nasal", - -13.03824234008789 - ], - [ - "▁partagé", - -13.038259506225586 - ], - [ - "▁Warranty", - -13.038419723510742 - ], - [ - "▁Mueller", - -13.038501739501953 - ], - [ - "formează", - -13.038734436035156 - ], - [ - "hundert", - -13.038745880126953 - ], - [ - "gemeldet", - -13.038893699645996 - ], - [ - "▁excursions", - -13.038912773132324 - ], - [ - "▁linii", - -13.039066314697266 - ], - [ - "gefährlich", - -13.039067268371582 - ], - [ - "▁schema", - -13.03907299041748 - ], - [ - "nişte", - -13.039131164550781 - ], - [ - "▁roadway", - -13.039132118225098 - ], - [ - "▁regression", - -13.039135932922363 - ], - [ - "▁mână", - -13.039366722106934 - ], - [ - "5.3", - -13.039373397827148 - ], - [ - "▁Spät", - -13.039734840393066 - ], - [ - "▁stubborn", - -13.039833068847656 - ], - [ - "efectele", - -13.040030479431152 - ], - [ - "▁atenţi", - -13.040136337280273 - ], - [ - "▁dovedit", - -13.04018497467041 - ], - [ - "▁Agile", - -13.040190696716309 - ], - [ - "denying", - -13.04023265838623 - ], - [ - "fluss", - -13.040620803833008 - ], - [ - "▁Calvin", - -13.04066276550293 - ], - [ - "Sculpt", - -13.04083251953125 - ], - [ - "égalité", - -13.040884971618652 - ], - [ - "ticket", - -13.040977478027344 - ], - [ - "marketed", - -13.041044235229492 - ], - [ - "holic", - -13.041173934936523 - ], - [ - "▁eCommerce", - -13.041346549987793 - ], - [ - "▁Slip", - -13.041369438171387 - ], - [ - "▁degradation", - -13.041736602783203 - ], - [ - "écart", - -13.041742324829102 - ], - [ - "AGR", - -13.041807174682617 - ], - [ - "▁burglar", - -13.041837692260742 - ], - [ - "▁conjug", - -13.041903495788574 - ], - [ - "LLP", - -13.04194164276123 - ], - [ - "couvrir", - -13.041997909545898 - ], - [ - "▁Hearing", - -13.042001724243164 - ], - [ - "▁canton", - -13.042006492614746 - ], - [ - "▁sixteen", - -13.042068481445312 - ], - [ - "▁Verlust", - -13.042097091674805 - ], - [ - "allied", - -13.042268753051758 - ], - [ - "Performing", - -13.042393684387207 - ], - [ - "▁évoqu", - -13.042519569396973 - ], - [ - "▁bookstore", - -13.042574882507324 - ], - [ - "▁intrebari", - -13.042627334594727 - ], - [ - "▁Hyderabad", - -13.042668342590332 - ], - [ - "▁repertoire", - -13.042668342590332 - ], - [ - "▁cablu", - -13.042678833007812 - ], - [ - "▁Costume", - -13.04269790649414 - ], - [ - "▁Shannon", - -13.042713165283203 - ], - [ - "▁glossy", - -13.042800903320312 - ], - [ - "▁cible", - -13.042876243591309 - ], - [ - "Saint", - -13.042984008789062 - ], - [ - "▁Ultima", - -13.043042182922363 - ], - [ - "▁teint", - -13.0432767868042 - ], - [ - "▁envision", - -13.043477058410645 - ], - [ - "▁thinner", - -13.043478965759277 - ], - [ - "ис", - -13.043609619140625 - ], - [ - "▁bladder", - -13.043615341186523 - ], - [ - "▁Prairie", - -13.043618202209473 - ], - [ - "▁puppies", - -13.043633460998535 - ], - [ - "▁overweight", - -13.043729782104492 - ], - [ - "destined", - -13.043925285339355 - ], - [ - "▁addictive", - -13.043935775756836 - ], - [ - "▁posé", - -13.043993949890137 - ], - [ - "▁mecanism", - -13.044112205505371 - ], - [ - "▁chorus", - -13.044466972351074 - ], - [ - "weder", - -13.044528007507324 - ], - [ - "▁begrüß", - -13.044562339782715 - ], - [ - "▁unsuccessful", - -13.044562339782715 - ], - [ - "executing", - -13.044564247131348 - ], - [ - "▁metadata", - -13.044611930847168 - ], - [ - "traiter", - -13.044620513916016 - ], - [ - "▁borrowed", - -13.044649124145508 - ], - [ - "▁aeroport", - -13.044679641723633 - ], - [ - "▁Bibli", - -13.044761657714844 - ], - [ - "▁youthful", - -13.044902801513672 - ], - [ - "▁Herbert", - -13.044913291931152 - ], - [ - "client", - -13.04500961303711 - ], - [ - "merci", - -13.04520034790039 - ], - [ - "▁Beast", - -13.045210838317871 - ], - [ - "▁Entrepreneur", - -13.045230865478516 - ], - [ - "▁Gelände", - -13.045256614685059 - ], - [ - "▁Packers", - -13.045268058776855 - ], - [ - "formarea", - -13.045469284057617 - ], - [ - "▁Kündigung", - -13.045511245727539 - ], - [ - "▁verdient", - -13.045515060424805 - ], - [ - "▁solutie", - -13.045530319213867 - ], - [ - "figuration", - -13.045611381530762 - ], - [ - "voluntarily", - -13.045622825622559 - ], - [ - "Gregor", - -13.045742988586426 - ], - [ - "▁Uncle", - -13.04589557647705 - ], - [ - "tarifs", - -13.045907020568848 - ], - [ - "▁écologique", - -13.045987129211426 - ], - [ - "▁Investition", - -13.045991897583008 - ], - [ - "exemplar", - -13.046127319335938 - ], - [ - "▁prevede", - -13.046144485473633 - ], - [ - "▁waive", - -13.046147346496582 - ], - [ - "▁Legion", - -13.046156883239746 - ], - [ - "similar", - -13.046247482299805 - ], - [ - "▁shareholder", - -13.04626750946045 - ], - [ - "▁oyster", - -13.046476364135742 - ], - [ - "▁Lightning", - -13.046530723571777 - ], - [ - "experimenting", - -13.04662799835205 - ], - [ - "▁replies", - -13.04663372039795 - ], - [ - "80,000", - -13.046757698059082 - ], - [ - "▁adept", - -13.04692554473877 - ], - [ - "▁Crăciun", - -13.046935081481934 - ], - [ - "▁sanatos", - -13.046935081481934 - ], - [ - "305", - -13.04699993133545 - ], - [ - "specialised", - -13.047069549560547 - ], - [ - "▁drummer", - -13.047189712524414 - ], - [ - "Applicants", - -13.04741096496582 - ], - [ - "objekt", - -13.04741096496582 - ], - [ - "▁Fifth", - -13.047446250915527 - ], - [ - "rgic", - -13.047567367553711 - ], - [ - "theater", - -13.047635078430176 - ], - [ - "▁terminé", - -13.047852516174316 - ], - [ - "▁Englisch", - -13.047894477844238 - ], - [ - "▁Oradea", - -13.047898292541504 - ], - [ - "possesses", - -13.0479097366333 - ], - [ - "illiers", - -13.047986030578613 - ], - [ - "▁refurbish", - -13.048110961914062 - ], - [ - "graphie", - -13.04814338684082 - ], - [ - "▁Booth", - -13.048174858093262 - ], - [ - "▁Ausdruck", - -13.048192977905273 - ], - [ - "▁Marriage", - -13.048361778259277 - ], - [ - "▁knives", - -13.048362731933594 - ], - [ - "▁Relief", - -13.048368453979492 - ], - [ - "▁Clerk", - -13.048392295837402 - ], - [ - "wait", - -13.048501014709473 - ], - [ - "▁probablement", - -13.048698425292969 - ], - [ - "▁suplimentar", - -13.048701286315918 - ], - [ - "dollar", - -13.048797607421875 - ], - [ - "English", - -13.04898452758789 - ], - [ - "866", - -13.049300193786621 - ], - [ - "▁Savannah", - -13.049314498901367 - ], - [ - "▁aftermath", - -13.049318313598633 - ], - [ - "phé", - -13.04932689666748 - ], - [ - "▁Plum", - -13.049417495727539 - ], - [ - "264", - -13.049566268920898 - ], - [ - "2.000", - -13.049582481384277 - ], - [ - "niei", - -13.049603462219238 - ], - [ - "ATP", - -13.049803733825684 - ], - [ - "mila", - -13.04985523223877 - ], - [ - "▁glut", - -13.049887657165527 - ], - [ - "gotta", - -13.049891471862793 - ], - [ - "schütt", - -13.049893379211426 - ], - [ - "klick", - -13.049996376037598 - ], - [ - "whether", - -13.050090789794922 - ], - [ - "▁Wade", - -13.050163269042969 - ], - [ - "▁Riley", - -13.050280570983887 - ], - [ - "Chancellor", - -13.050288200378418 - ], - [ - "▁nebun", - -13.050300598144531 - ], - [ - "▁aufgebaut", - -13.050374984741211 - ], - [ - "steigt", - -13.050423622131348 - ], - [ - "▁entirety", - -13.050494194030762 - ], - [ - "▁telefoane", - -13.05074691772461 - ], - [ - "▁Roulette", - -13.050763130187988 - ], - [ - "1700", - -13.050787925720215 - ], - [ - "▁lycée", - -13.050856590270996 - ], - [ - "rotary", - -13.051128387451172 - ], - [ - "benefited", - -13.051170349121094 - ], - [ - "▁Bisericii", - -13.051220893859863 - ], - [ - "▁Rehabilitation", - -13.051220893859863 - ], - [ - "▁lithium", - -13.051228523254395 - ], - [ - "imposing", - -13.051279067993164 - ], - [ - "176", - -13.051329612731934 - ], - [ - "▁thunder", - -13.051527976989746 - ], - [ - "ăsesc", - -13.052000045776367 - ], - [ - "▁Einblick", - -13.052010536193848 - ], - [ - "oiled", - -13.052151679992676 - ], - [ - "SSA", - -13.052181243896484 - ], - [ - "apparition", - -13.05224609375 - ], - [ - "▁Impress", - -13.052273750305176 - ], - [ - "▁Aboriginal", - -13.052297592163086 - ], - [ - "loos", - -13.052383422851562 - ], - [ - "▁Bread", - -13.052440643310547 - ], - [ - "177", - -13.052619934082031 - ], - [ - "VERS", - -13.052638053894043 - ], - [ - "▁Respect", - -13.05271053314209 - ], - [ - "▁Practical", - -13.053047180175781 - ], - [ - "drafting", - -13.05306339263916 - ], - [ - "си", - -13.053099632263184 - ], - [ - "▁faza", - -13.053109169006348 - ], - [ - "▁sovereign", - -13.053123474121094 - ], - [ - "▁Untersuchung", - -13.05314826965332 - ], - [ - "▁Niveau", - -13.053154945373535 - ], - [ - "transport", - -13.053182601928711 - ], - [ - "▁downstream", - -13.053293228149414 - ], - [ - "▁Milton", - -13.053383827209473 - ], - [ - "▁knob", - -13.053390502929688 - ], - [ - "employeur", - -13.053499221801758 - ], - [ - "▁furnish", - -13.053544044494629 - ], - [ - "weather", - -13.053564071655273 - ], - [ - "LAB", - -13.053646087646484 - ], - [ - "166", - -13.053853988647461 - ], - [ - "▁salaire", - -13.053937911987305 - ], - [ - "▁Carnival", - -13.054088592529297 - ], - [ - "4-0", - -13.054168701171875 - ], - [ - "▁Angle", - -13.054291725158691 - ], - [ - "▁José", - -13.054399490356445 - ], - [ - "architecture", - -13.054475784301758 - ], - [ - "▁Sunset", - -13.054574966430664 - ], - [ - "▁Absolut", - -13.054694175720215 - ], - [ - "▁herrlich", - -13.05470085144043 - ], - [ - "12%", - -13.054703712463379 - ], - [ - "▁Indo", - -13.054823875427246 - ], - [ - "▁Komfort", - -13.055049896240234 - ], - [ - "▁acțiuni", - -13.05505084991455 - ], - [ - "energize", - -13.055085182189941 - ], - [ - "▁Warning", - -13.055171966552734 - ], - [ - "▁Sunny", - -13.055216789245605 - ], - [ - "▁razor", - -13.055489540100098 - ], - [ - "▁psychic", - -13.055490493774414 - ], - [ - "▁convivial", - -13.055525779724121 - ], - [ - "Voraussetzungen", - -13.05555534362793 - ], - [ - "IMO", - -13.055622100830078 - ], - [ - "opérateur", - -13.055743217468262 - ], - [ - "▁langjährige", - -13.05575942993164 - ], - [ - "▁Spanie", - -13.055901527404785 - ], - [ - "pulmonary", - -13.056004524230957 - ], - [ - "▁Bingo", - -13.056050300598145 - ], - [ - "▁confession", - -13.056096076965332 - ], - [ - "▁Petru", - -13.056100845336914 - ], - [ - "▁prerequisite", - -13.056164741516113 - ], - [ - "▁dodge", - -13.056352615356445 - ], - [ - "▁McN", - -13.056436538696289 - ], - [ - "▁originate", - -13.056577682495117 - ], - [ - "▁nettoy", - -13.056612014770508 - ], - [ - "▁$14", - -13.056645393371582 - ], - [ - "▁Bride", - -13.05669116973877 - ], - [ - "▁noisy", - -13.05673885345459 - ], - [ - "▁Worcester", - -13.056963920593262 - ], - [ - "▁Surrey", - -13.056982040405273 - ], - [ - "harmonis", - -13.057110786437988 - ], - [ - "▁représentant", - -13.057304382324219 - ], - [ - "organisée", - -13.057475090026855 - ], - [ - "truction", - -13.057513236999512 - ], - [ - "injected", - -13.057597160339355 - ], - [ - "▁Suzuki", - -13.057924270629883 - ], - [ - "▁japonais", - -13.057924270629883 - ], - [ - "▁turquoise", - -13.057924270629883 - ], - [ - "▁Peut", - -13.058004379272461 - ], - [ - "▁Sequ", - -13.058028221130371 - ], - [ - "slated", - -13.058037757873535 - ], - [ - "▁Alma", - -13.058215141296387 - ], - [ - "▁gebraucht", - -13.05827522277832 - ], - [ - "gängig", - -13.058281898498535 - ], - [ - "▁commis", - -13.058377265930176 - ], - [ - "ACS", - -13.05856990814209 - ], - [ - "pressure", - -13.058664321899414 - ], - [ - "cured", - -13.05874252319336 - ], - [ - "▁Jackie", - -13.058757781982422 - ], - [ - "▁Kashmir", - -13.05888557434082 - ], - [ - "▁recruited", - -13.059000968933105 - ], - [ - "▁vécu", - -13.059011459350586 - ], - [ - "▁opus", - -13.059052467346191 - ], - [ - "kWh", - -13.05927562713623 - ], - [ - "▁tapping", - -13.059292793273926 - ], - [ - "▁tehnologie", - -13.05931282043457 - ], - [ - "▁Gentle", - -13.059365272521973 - ], - [ - "▁bombard", - -13.059372901916504 - ], - [ - "▁caméra", - -13.059427261352539 - ], - [ - "züglich", - -13.059431076049805 - ], - [ - "▁bingo", - -13.059453010559082 - ], - [ - "private", - -13.059496879577637 - ], - [ - "▁mediator", - -13.059642791748047 - ], - [ - "▁carbohydrates", - -13.059847831726074 - ], - [ - "▁workmanship", - -13.059849739074707 - ], - [ - "▁Combat", - -13.059853553771973 - ], - [ - "▁Mickey", - -13.059901237487793 - ], - [ - "▁distressed", - -13.059908866882324 - ], - [ - "lucrează", - -13.059924125671387 - ], - [ - "treatment", - -13.06007194519043 - ], - [ - "▁Einwohner", - -13.060330390930176 - ], - [ - "▁glaze", - -13.060386657714844 - ], - [ - "scholarly", - -13.06043529510498 - ], - [ - "ROC", - -13.060750007629395 - ], - [ - "▁Darwin", - -13.060774803161621 - ], - [ - "drückt", - -13.060775756835938 - ], - [ - "▁treadmill", - -13.060819625854492 - ], - [ - "ntz", - -13.060830116271973 - ], - [ - "620", - -13.061087608337402 - ], - [ - "surface", - -13.061148643493652 - ], - [ - "▁vieţii", - -13.0612211227417 - ], - [ - "990", - -13.061296463012695 - ], - [ - "▁doigt", - -13.061341285705566 - ], - [ - "▁explor", - -13.061450004577637 - ], - [ - "▁asistent", - -13.061670303344727 - ], - [ - "coloriage", - -13.061734199523926 - ], - [ - "▁Martinez", - -13.061758041381836 - ], - [ - "▁antibodies", - -13.061775207519531 - ], - [ - "Schülerinnen", - -13.061779975891113 - ], - [ - "Honestly", - -13.06178092956543 - ], - [ - "grabbing", - -13.061871528625488 - ], - [ - "▁Cardiff", - -13.061897277832031 - ], - [ - "▁Trophy", - -13.062084197998047 - ], - [ - "▁pupil", - -13.062117576599121 - ], - [ - "▁invoke", - -13.062161445617676 - ], - [ - "bezüglich", - -13.062193870544434 - ], - [ - "Anschließend", - -13.062275886535645 - ], - [ - "perks", - -13.062360763549805 - ], - [ - "530", - -13.062373161315918 - ], - [ - "▁emblem", - -13.062431335449219 - ], - [ - "770", - -13.062543869018555 - ], - [ - "clairement", - -13.062590599060059 - ], - [ - "▁sublinia", - -13.062597274780273 - ], - [ - "▁1910", - -13.062719345092773 - ], - [ - "▁Embassy", - -13.062740325927734 - ], - [ - "▁Valencia", - -13.062740325927734 - ], - [ - "▁catastrophic", - -13.062740325927734 - ], - [ - "▁simulator", - -13.06274700164795 - ], - [ - "Pierre", - -13.062766075134277 - ], - [ - "▁doorstep", - -13.062806129455566 - ], - [ - "▁rallie", - -13.062881469726562 - ], - [ - "▁șans", - -13.062891960144043 - ], - [ - "▁crosses", - -13.06300163269043 - ], - [ - "▁zodi", - -13.06312084197998 - ], - [ - "Next", - -13.06314754486084 - ], - [ - "▁rebuilt", - -13.063152313232422 - ], - [ - "▁panorama", - -13.063222885131836 - ], - [ - "196", - -13.06324291229248 - ], - [ - "▁erinnert", - -13.06370735168457 - ], - [ - "lism", - -13.06371784210205 - ], - [ - "opened", - -13.06383228302002 - ], - [ - "▁breakout", - -13.064126014709473 - ], - [ - "▁mosque", - -13.064153671264648 - ], - [ - "boc", - -13.064507484436035 - ], - [ - "▁grout", - -13.064568519592285 - ], - [ - "▁Gather", - -13.064582824707031 - ], - [ - "▁vampire", - -13.06467342376709 - ], - [ - "▁tandem", - -13.064684867858887 - ], - [ - "▁pastra", - -13.064702033996582 - ], - [ - "▁lösen", - -13.064794540405273 - ], - [ - "▁discontinu", - -13.064826965332031 - ], - [ - "fuses", - -13.064885139465332 - ], - [ - "▁identitate", - -13.064947128295898 - ], - [ - "BAC", - -13.064964294433594 - ], - [ - "▁$100,000", - -13.065122604370117 - ], - [ - "Finder", - -13.06515121459961 - ], - [ - "▁Leicester", - -13.065157890319824 - ], - [ - "▁1933", - -13.065159797668457 - ], - [ - "informatiile", - -13.065234184265137 - ], - [ - "lädt", - -13.065309524536133 - ], - [ - "iggle", - -13.065399169921875 - ], - [ - "▁Discuss", - -13.065462112426758 - ], - [ - "distributing", - -13.065470695495605 - ], - [ - "▁disappoint", - -13.065475463867188 - ], - [ - "ecţia", - -13.065611839294434 - ], - [ - "▁condiment", - -13.065640449523926 - ], - [ - "▁Marriott", - -13.065642356872559 - ], - [ - "▁entspannt", - -13.065644264221191 - ], - [ - "arbitrary", - -13.06564998626709 - ], - [ - "rühren", - -13.06574821472168 - ], - [ - "Intensiv", - -13.065771102905273 - ], - [ - "eliminare", - -13.065895080566406 - ], - [ - "muster", - -13.06594467163086 - ], - [ - "▁komplexe", - -13.066130638122559 - ], - [ - "▁(2008)", - -13.066184997558594 - ], - [ - "absolument", - -13.066349029541016 - ], - [ - "aloo", - -13.066420555114746 - ], - [ - "cererea", - -13.06655216217041 - ], - [ - "▁imobiliar", - -13.066696166992188 - ], - [ - "▁paramount", - -13.066705703735352 - ], - [ - "▁Vince", - -13.066723823547363 - ], - [ - "pov", - -13.067076683044434 - ], - [ - "▁conveyor", - -13.067549705505371 - ], - [ - "▁Natalie", - -13.067583084106445 - ], - [ - "▁Comedy", - -13.067623138427734 - ], - [ - "Developing", - -13.0678129196167 - ], - [ - "disputed", - -13.067878723144531 - ], - [ - "164", - -13.067911148071289 - ], - [ - "▁Communist", - -13.067949295043945 - ], - [ - "▁Bahnhof", - -13.06806468963623 - ], - [ - "dokument", - -13.068145751953125 - ], - [ - "▁Somali", - -13.06828498840332 - ], - [ - "▁Strasbourg", - -13.068503379821777 - ], - [ - "▁Technician", - -13.068550109863281 - ], - [ - "▁subsidies", - -13.068633079528809 - ], - [ - "judeţul", - -13.068723678588867 - ], - [ - "▁bible", - -13.068769454956055 - ], - [ - "gefahren", - -13.068855285644531 - ], - [ - "▁literal", - -13.068882942199707 - ], - [ - "▁diminish", - -13.068940162658691 - ], - [ - "Sfântul", - -13.0689697265625 - ], - [ - "▁doreșt", - -13.068978309631348 - ], - [ - "▁Xiaomi", - -13.069036483764648 - ], - [ - "▁planète", - -13.069130897521973 - ], - [ - "▁LTD", - -13.069175720214844 - ], - [ - "▁Zugriff", - -13.069196701049805 - ], - [ - "beginn", - -13.06921672821045 - ], - [ - "▁Einführung", - -13.069294929504395 - ], - [ - "▁coronar", - -13.069393157958984 - ], - [ - "lomi", - -13.0693941116333 - ], - [ - "▁Accueil", - -13.0695219039917 - ], - [ - "scanned", - -13.069528579711914 - ], - [ - "▁Banque", - -13.06952953338623 - ], - [ - "▁réaction", - -13.069531440734863 - ], - [ - "▁Hoffman", - -13.069546699523926 - ], - [ - "▁merveille", - -13.069637298583984 - ], - [ - "navigating", - -13.069719314575195 - ], - [ - "schalten", - -13.06984806060791 - ], - [ - "▁ieşi", - -13.070136070251465 - ], - [ - "1-6", - -13.070175170898438 - ], - [ - "▁frustr", - -13.070670127868652 - ], - [ - "▁réfléchi", - -13.0709810256958 - ], - [ - "▁difuz", - -13.071100234985352 - ], - [ - "▁freue", - -13.07121753692627 - ], - [ - "besuch", - -13.071349143981934 - ], - [ - "153", - -13.071386337280273 - ], - [ - "▁butterflies", - -13.071467399597168 - ], - [ - "▁terrifying", - -13.071467399597168 - ], - [ - "▁încuraj", - -13.071468353271484 - ], - [ - "▁Château", - -13.071470260620117 - ], - [ - "▁contingent", - -13.071474075317383 - ], - [ - "▁abusive", - -13.0714750289917 - ], - [ - "▁SharePoint", - -13.07148551940918 - ], - [ - "▁skating", - -13.071573257446289 - ], - [ - "▁militaire", - -13.07166576385498 - ], - [ - "▁Vig", - -13.071690559387207 - ], - [ - "omics", - -13.071840286254883 - ], - [ - "▁Blockchain", - -13.07197093963623 - ], - [ - "▁principii", - -13.071975708007812 - ], - [ - "▁permitting", - -13.071979522705078 - ], - [ - "optimisation", - -13.072270393371582 - ], - [ - "▁maintien", - -13.072328567504883 - ], - [ - "▁Aluminum", - -13.072442054748535 - ], - [ - "▁Plymouth", - -13.072443008422852 - ], - [ - "▁Weiterbildung", - -13.072457313537598 - ], - [ - "▁Finanzierung", - -13.072505950927734 - ], - [ - "▁Kerala", - -13.072514533996582 - ], - [ - "insulated", - -13.072668075561523 - ], - [ - "▁loaf", - -13.072802543640137 - ], - [ - "▁Sammlung", - -13.072929382324219 - ], - [ - "▁îndepărt", - -13.072930335998535 - ], - [ - "▁Gewerbe", - -13.072942733764648 - ], - [ - "udel", - -13.072988510131836 - ], - [ - "▁coursework", - -13.073104858398438 - ], - [ - "▁Darstellung", - -13.073246002197266 - ], - [ - "▁indeplin", - -13.073433876037598 - ], - [ - "▁Gandhi", - -13.073434829711914 - ], - [ - "tossed", - -13.07361888885498 - ], - [ - "ewed", - -13.073844909667969 - ], - [ - "▁classement", - -13.073884963989258 - ], - [ - "▁Protestant", - -13.073905944824219 - ], - [ - "▁frumoasă", - -13.073905944824219 - ], - [ - "▁pantalon", - -13.073906898498535 - ], - [ - "▁rivet", - -13.073966979980469 - ], - [ - "▁Echt", - -13.0741605758667 - ], - [ - "erviciului", - -13.07421588897705 - ], - [ - "fabricated", - -13.074322700500488 - ], - [ - "Compania", - -13.074372291564941 - ], - [ - "▁juvenile", - -13.074394226074219 - ], - [ - "▁souligne", - -13.07444953918457 - ], - [ - "▁chrono", - -13.07447338104248 - ], - [ - "▁VII", - -13.074594497680664 - ], - [ - "▁Kirch", - -13.074714660644531 - ], - [ - "catcher", - -13.075014114379883 - ], - [ - "salv", - -13.075263023376465 - ], - [ - "▁Enforcement", - -13.075370788574219 - ], - [ - "▁Penguin", - -13.075410842895508 - ], - [ - "kowski", - -13.075465202331543 - ], - [ - "▁2:1", - -13.075470924377441 - ], - [ - "gesundheit", - -13.075475692749023 - ], - [ - "▁unveil", - -13.075519561767578 - ], - [ - "bending", - -13.075531959533691 - ], - [ - "▁conecta", - -13.075579643249512 - ], - [ - "▁faim", - -13.075885772705078 - ], - [ - "▁MacBook", - -13.075969696044922 - ], - [ - "versuch", - -13.07600212097168 - ], - [ - "▁regiuni", - -13.076029777526855 - ], - [ - "▁Willow", - -13.076184272766113 - ], - [ - "▁finanziell", - -13.076303482055664 - ], - [ - "▁nurturing", - -13.076354026794434 - ], - [ - "impuls", - -13.076370239257812 - ], - [ - "▁funktionieren", - -13.076371192932129 - ], - [ - "▁rezult", - -13.076554298400879 - ], - [ - "▁spui", - -13.076593399047852 - ], - [ - "▁walkway", - -13.076653480529785 - ], - [ - "▁Rauch", - -13.076708793640137 - ], - [ - "169", - -13.076793670654297 - ], - [ - "610", - -13.076863288879395 - ], - [ - "▁scazut", - -13.0773286819458 - ], - [ - "▁Garrett", - -13.077329635620117 - ], - [ - "▁necesită", - -13.077352523803711 - ], - [ - "Articolul", - -13.077364921569824 - ], - [ - "numită", - -13.077371597290039 - ], - [ - "Coastal", - -13.077383041381836 - ], - [ - "▁canned", - -13.077421188354492 - ], - [ - "▁Friendly", - -13.077499389648438 - ], - [ - "dissolved", - -13.0775728225708 - ], - [ - "seid", - -13.077674865722656 - ], - [ - "▁feminin", - -13.077685356140137 - ], - [ - "▁fetch", - -13.077710151672363 - ], - [ - "▁Accent", - -13.077767372131348 - ], - [ - "phrase", - -13.077771186828613 - ], - [ - "effekt", - -13.077775955200195 - ], - [ - "▁Progressive", - -13.077777862548828 - ], - [ - "▁canadien", - -13.077820777893066 - ], - [ - "iety", - -13.077839851379395 - ], - [ - "eignen", - -13.077984809875488 - ], - [ - "paraître", - -13.07812213897705 - ], - [ - "▁asylum", - -13.07833194732666 - ], - [ - "▁Albany", - -13.078362464904785 - ], - [ - "▁remis", - -13.078386306762695 - ], - [ - "▁Joyce", - -13.078664779663086 - ], - [ - "schätzt", - -13.078784942626953 - ], - [ - "▁begleiten", - -13.078801155090332 - ], - [ - "▁Siemens", - -13.079007148742676 - ], - [ - "▁schlimm", - -13.079061508178711 - ], - [ - "▁Libra", - -13.079254150390625 - ], - [ - "▁Composite", - -13.079290390014648 - ], - [ - "▁écr", - -13.079315185546875 - ], - [ - "disciplina", - -13.079379081726074 - ], - [ - "▁premature", - -13.079630851745605 - ], - [ - "▁scopuri", - -13.079681396484375 - ], - [ - "ffnung", - -13.079715728759766 - ], - [ - "7000", - -13.079726219177246 - ], - [ - "▁conséquent", - -13.079780578613281 - ], - [ - "▁côte", - -13.079787254333496 - ], - [ - "celul", - -13.079872131347656 - ], - [ - "▁fourteen", - -13.079940795898438 - ], - [ - "▁Riverside", - -13.080077171325684 - ], - [ - "gemacht", - -13.08013916015625 - ], - [ - "▁volcanic", - -13.080272674560547 - ], - [ - "▁Salesforce", - -13.080315589904785 - ], - [ - "▁Granite", - -13.080317497253418 - ], - [ - "▁Zentral", - -13.080329895019531 - ], - [ - "▁Female", - -13.080341339111328 - ], - [ - "▁culmin", - -13.08047103881836 - ], - [ - "▁urmatoare", - -13.080547332763672 - ], - [ - "toxicity", - -13.080560684204102 - ], - [ - "▁mâna", - -13.080678939819336 - ], - [ - "▁Umfang", - -13.080764770507812 - ], - [ - "▁Encore", - -13.08077621459961 - ], - [ - "▁Edgar", - -13.080831527709961 - ], - [ - "▁négoci", - -13.080852508544922 - ], - [ - "njeux", - -13.080873489379883 - ], - [ - "▁variance", - -13.080917358398438 - ], - [ - "▁Functional", - -13.080973625183105 - ], - [ - "172", - -13.081046104431152 - ], - [ - "▁dissolve", - -13.0811185836792 - ], - [ - "förderung", - -13.081188201904297 - ], - [ - "▁Brilliant", - -13.081254959106445 - ], - [ - "▁comprehension", - -13.081254959106445 - ], - [ - "▁soybean", - -13.081254959106445 - ], - [ - "▁standalone", - -13.081255912780762 - ], - [ - "▁Communi", - -13.081303596496582 - ], - [ - "▁ajut", - -13.081313133239746 - ], - [ - "▁lavish", - -13.081338882446289 - ], - [ - "Ouest", - -13.081384658813477 - ], - [ - "▁Maggie", - -13.081385612487793 - ], - [ - "▁evolutionary", - -13.081550598144531 - ], - [ - "bowel", - -13.081575393676758 - ], - [ - "▁glyco", - -13.081626892089844 - ], - [ - "▁Happi", - -13.081706047058105 - ], - [ - "organising", - -13.081710815429688 - ], - [ - "▁übernimm", - -13.081727027893066 - ], - [ - "▁snowboard", - -13.081793785095215 - ], - [ - "▁prévention", - -13.081830024719238 - ], - [ - "▁Celebrate", - -13.082160949707031 - ], - [ - "▁pottery", - -13.082254409790039 - ], - [ - "▁Outstanding", - -13.082328796386719 - ], - [ - "▁toamna", - -13.082331657409668 - ], - [ - "▁graceful", - -13.082548141479492 - ], - [ - "197", - -13.082559585571289 - ], - [ - "strecke", - -13.082598686218262 - ], - [ - "▁medizinische", - -13.082733154296875 - ], - [ - "216", - -13.082839965820312 - ], - [ - "▁prune", - -13.082868576049805 - ], - [ - "Pourtant", - -13.083000183105469 - ], - [ - "▁Difference", - -13.083224296569824 - ], - [ - "▁factura", - -13.083830833435059 - ], - [ - "Mass", - -13.084161758422852 - ], - [ - "▁Enhanc", - -13.084190368652344 - ], - [ - "upholstered", - -13.084209442138672 - ], - [ - "▁übernommen", - -13.084209442138672 - ], - [ - "▁mitigation", - -13.084210395812988 - ], - [ - "▁Hidden", - -13.084219932556152 - ], - [ - "▁Häuser", - -13.084234237670898 - ], - [ - "▁Pavel", - -13.084403991699219 - ], - [ - "▁congress", - -13.084512710571289 - ], - [ - "▁antibody", - -13.084598541259766 - ], - [ - "▁stitches", - -13.084811210632324 - ], - [ - "▁colonies", - -13.084820747375488 - ], - [ - "Into", - -13.084900856018066 - ], - [ - "▁démo", - -13.084924697875977 - ], - [ - "▁MVP", - -13.085041046142578 - ], - [ - "▁replay", - -13.085062026977539 - ], - [ - "▁usoara", - -13.08522891998291 - ], - [ - "▁Breast", - -13.085278511047363 - ], - [ - "ooney", - -13.085336685180664 - ], - [ - "▁außen", - -13.085663795471191 - ], - [ - "▁Motorola", - -13.085695266723633 - ], - [ - "▁spalat", - -13.08578109741211 - ], - [ - "euillez", - -13.086088180541992 - ], - [ - "▁jeunesse", - -13.086170196533203 - ], - [ - "▁pastoral", - -13.086174011230469 - ], - [ - "▁Sussex", - -13.086185455322266 - ], - [ - "▁stencil", - -13.08619213104248 - ], - [ - "▁organismului", - -13.086504936218262 - ], - [ - "seized", - -13.086649894714355 - ], - [ - "▁întrebare", - -13.086865425109863 - ], - [ - "cliquez", - -13.086874961853027 - ], - [ - "5.7", - -13.086984634399414 - ], - [ - "▁Yama", - -13.087080955505371 - ], - [ - "painted", - -13.08708667755127 - ], - [ - "▁Swimming", - -13.087176322937012 - ], - [ - "Rhythm", - -13.087202072143555 - ], - [ - "▁sorrow", - -13.087210655212402 - ], - [ - "▁Movers", - -13.08731460571289 - ], - [ - "renforcer", - -13.08735466003418 - ], - [ - "▁Wach", - -13.087381362915039 - ], - [ - "0,00", - -13.087390899658203 - ], - [ - "▁glove", - -13.08753490447998 - ], - [ - "▁stâng", - -13.087669372558594 - ], - [ - "rgendwann", - -13.087687492370605 - ], - [ - "▁Philippine", - -13.08769416809082 - ], - [ - "▁anunțat", - -13.087716102600098 - ], - [ - "▁Coleman", - -13.087723731994629 - ], - [ - "affir", - -13.087918281555176 - ], - [ - "uleiul", - -13.08808422088623 - ], - [ - "▁Coconut", - -13.088197708129883 - ], - [ - "▁Supplement", - -13.088210105895996 - ], - [ - "haudiere", - -13.088293075561523 - ], - [ - "▁kettle", - -13.088313102722168 - ], - [ - "▁3,5", - -13.088370323181152 - ], - [ - "refurbished", - -13.088425636291504 - ], - [ - "esthétique", - -13.088665962219238 - ], - [ - "performing", - -13.088667869567871 - ], - [ - "▁Engag", - -13.088762283325195 - ], - [ - "Group", - -13.088801383972168 - ], - [ - "▁viande", - -13.088887214660645 - ], - [ - "▁oricum", - -13.088888168334961 - ], - [ - "Spitalul", - -13.089093208312988 - ], - [ - "▁cesse", - -13.089110374450684 - ], - [ - "▁contradiction", - -13.089130401611328 - ], - [ - "▁Chrysler", - -13.089154243469238 - ], - [ - "▁poultry", - -13.089154243469238 - ], - [ - "▁thirteen", - -13.089154243469238 - ], - [ - "▁sightseeing", - -13.089155197143555 - ], - [ - "▁Miguel", - -13.089158058166504 - ], - [ - "▁terminology", - -13.089334487915039 - ], - [ - "▁Genetic", - -13.089553833007812 - ], - [ - "commercial", - -13.08963394165039 - ], - [ - "gehoben", - -13.08965015411377 - ], - [ - "RIGHT", - -13.08995532989502 - ], - [ - "▁proprietate", - -13.089990615844727 - ], - [ - "▁Cannes", - -13.090012550354004 - ], - [ - "▁klicken", - -13.090023040771484 - ], - [ - "▁Belgique", - -13.0901460647583 - ], - [ - "tapped", - -13.09034538269043 - ], - [ - "kinetic", - -13.090569496154785 - ], - [ - "▁feuilles", - -13.090673446655273 - ], - [ - "whitening", - -13.090760231018066 - ], - [ - "Any", - -13.090946197509766 - ], - [ - "Manager", - -13.091099739074707 - ], - [ - "▁constatat", - -13.091106414794922 - ], - [ - "▁Myanmar", - -13.091140747070312 - ], - [ - "▁Examination", - -13.091142654418945 - ], - [ - "▁règle", - -13.091208457946777 - ], - [ - "▁umgesetzt", - -13.09128475189209 - ], - [ - "211", - -13.091336250305176 - ], - [ - "▁Herald", - -13.091449737548828 - ], - [ - "Alex", - -13.091680526733398 - ], - [ - "▁drauf", - -13.091707229614258 - ], - [ - "logger", - -13.091714859008789 - ], - [ - "▁pictur", - -13.09186840057373 - ], - [ - "▁Divi", - -13.09196949005127 - ], - [ - "▁furnizat", - -13.092089653015137 - ], - [ - "▁verzichten", - -13.092132568359375 - ], - [ - "▁Sergi", - -13.092199325561523 - ], - [ - "contaminated", - -13.09223747253418 - ], - [ - "▁Buddy", - -13.092243194580078 - ], - [ - "▁chilled", - -13.092268943786621 - ], - [ - "▁vorlieg", - -13.092317581176758 - ], - [ - "▁Claudia", - -13.092632293701172 - ], - [ - "▁miserable", - -13.092653274536133 - ], - [ - "▁sketches", - -13.092683792114258 - ], - [ - "schicken", - -13.092814445495605 - ], - [ - "since", - -13.0928373336792 - ], - [ - "2.9", - -13.092840194702148 - ], - [ - "▁sitzen", - -13.092928886413574 - ], - [ - "ceapa", - -13.093396186828613 - ], - [ - "respectarea", - -13.093438148498535 - ], - [ - "▁handheld", - -13.093448638916016 - ], - [ - "popular", - -13.093527793884277 - ], - [ - "calming", - -13.093603134155273 - ], - [ - "Govern", - -13.093632698059082 - ], - [ - "▁omega", - -13.093645095825195 - ], - [ - "▁Planner", - -13.093791007995605 - ], - [ - "enriched", - -13.093850135803223 - ], - [ - "154", - -13.093976974487305 - ], - [ - "▁autorisé", - -13.093989372253418 - ], - [ - "▁cadouri", - -13.09407901763916 - ], - [ - "▁vulnerabilities", - -13.094143867492676 - ], - [ - "▁Arbeitnehmer", - -13.094158172607422 - ], - [ - "éditeur", - -13.094234466552734 - ], - [ - "▁Anleitung", - -13.094317436218262 - ], - [ - "rubbing", - -13.094343185424805 - ], - [ - "▁autovehicul", - -13.094621658325195 - ], - [ - "▁öffnen", - -13.094621658325195 - ], - [ - "▁Napoleon", - -13.094622611999512 - ], - [ - "▁cliché", - -13.094637870788574 - ], - [ - "▁Schaf", - -13.09469985961914 - ], - [ - "regulating", - -13.094894409179688 - ], - [ - "▁Kühl", - -13.09490966796875 - ], - [ - "▁blush", - -13.094913482666016 - ], - [ - "▁discard", - -13.094992637634277 - ], - [ - "▁confine", - -13.095027923583984 - ], - [ - "▁Rodriguez", - -13.09511947631836 - ], - [ - "▁ADHD", - -13.095165252685547 - ], - [ - "▁Madame", - -13.09516716003418 - ], - [ - "▁résolution", - -13.095319747924805 - ], - [ - "▁flair", - -13.095369338989258 - ], - [ - "▁claw", - -13.095422744750977 - ], - [ - "▁1929", - -13.095643043518066 - ], - [ - "ETH", - -13.095672607421875 - ], - [ - "nähe", - -13.095804214477539 - ], - [ - "▁soothe", - -13.0958251953125 - ], - [ - "4.9", - -13.095833778381348 - ], - [ - "montée", - -13.095925331115723 - ], - [ - "confirming", - -13.095989227294922 - ], - [ - "continent", - -13.09613037109375 - ], - [ - "reiz", - -13.09643840789795 - ], - [ - "john", - -13.096577644348145 - ], - [ - "IONAL", - -13.096588134765625 - ], - [ - "▁exported", - -13.0966215133667 - ], - [ - "▁Prison", - -13.096651077270508 - ], - [ - "possessed", - -13.096952438354492 - ], - [ - "▁placebo", - -13.096991539001465 - ], - [ - "▁biodiversity", - -13.097116470336914 - ], - [ - "▁combustion", - -13.097116470336914 - ], - [ - "▁Plumbing", - -13.09711742401123 - ], - [ - "ixie", - -13.097124099731445 - ], - [ - "▁repetition", - -13.09715461730957 - ], - [ - "▁soumis", - -13.097372055053711 - ], - [ - "▁reduc", - -13.097671508789062 - ], - [ - "▁constrain", - -13.097759246826172 - ], - [ - "Anti", - -13.097760200500488 - ], - [ - "consolidated", - -13.097817420959473 - ], - [ - "214", - -13.098095893859863 - ], - [ - "▁breaches", - -13.098108291625977 - ], - [ - "infringement", - -13.098115921020508 - ], - [ - "▁drizzle", - -13.098115921020508 - ], - [ - "▁erhöhen", - -13.098116874694824 - ], - [ - "▁Somerset", - -13.098118782043457 - ], - [ - "▁blonde", - -13.098132133483887 - ], - [ - "▁Funny", - -13.09813404083252 - ], - [ - "tuşi", - -13.098149299621582 - ], - [ - "▁reinvent", - -13.098162651062012 - ], - [ - "▁sérieux", - -13.098247528076172 - ], - [ - "▁croire", - -13.098308563232422 - ], - [ - "general", - -13.098315238952637 - ], - [ - "▁Distance", - -13.098319053649902 - ], - [ - "▁VoIP", - -13.098348617553711 - ], - [ - "▁adăugat", - -13.098406791687012 - ], - [ - "matik", - -13.098546028137207 - ], - [ - "▁avatar", - -13.098647117614746 - ], - [ - "▁superstar", - -13.098804473876953 - ], - [ - "8.0", - -13.098814010620117 - ], - [ - "lusieurs", - -13.098982810974121 - ], - [ - "▁Judeţean", - -13.099117279052734 - ], - [ - "offenen", - -13.099128723144531 - ], - [ - "RAF", - -13.099133491516113 - ], - [ - "▁restroom", - -13.099207878112793 - ], - [ - "enfance", - -13.099348068237305 - ], - [ - "▁garnish", - -13.099499702453613 - ], - [ - "▁vermittelt", - -13.099631309509277 - ], - [ - "Histoire", - -13.099634170532227 - ], - [ - "cyan", - -13.100628852844238 - ], - [ - "Talk", - -13.100666046142578 - ], - [ - "▁Varianten", - -13.10069465637207 - ], - [ - "▁Lille", - -13.10085678100586 - ], - [ - "▁offenbar", - -13.10098934173584 - ], - [ - "▁rénovation", - -13.10112190246582 - ], - [ - "▁comentarii", - -13.101249694824219 - ], - [ - "▁Bedford", - -13.10130500793457 - ], - [ - "▁cercetări", - -13.101325988769531 - ], - [ - "▁précision", - -13.101337432861328 - ], - [ - "MRC", - -13.101358413696289 - ], - [ - "alterations", - -13.101476669311523 - ], - [ - "▁discours", - -13.101531028747559 - ], - [ - "äger", - -13.101577758789062 - ], - [ - "▁antreprenor", - -13.101622581481934 - ], - [ - "▁Oriental", - -13.101849555969238 - ], - [ - "conducerea", - -13.101868629455566 - ], - [ - "CBC", - -13.101932525634766 - ], - [ - "▁mince", - -13.101985931396484 - ], - [ - "▁presidency", - -13.10212516784668 - ], - [ - "▁lipstick", - -13.102167129516602 - ], - [ - "▁SERVICES", - -13.102237701416016 - ], - [ - "productive", - -13.10237979888916 - ], - [ - "Assad", - -13.102400779724121 - ], - [ - "▁efectiv", - -13.102540969848633 - ], - [ - "▁gestern", - -13.102596282958984 - ], - [ - "▁RGB", - -13.102606773376465 - ], - [ - "▁Transilvania", - -13.102627754211426 - ], - [ - "▁Raleigh", - -13.102670669555664 - ], - [ - "DOM", - -13.102702140808105 - ], - [ - "▁iesit", - -13.102806091308594 - ], - [ - "▁anuntat", - -13.102810859680176 - ], - [ - "▁automatiquement", - -13.102901458740234 - ], - [ - "▁proliferation", - -13.103130340576172 - ], - [ - "▁Maroc", - -13.103156089782715 - ], - [ - "▁prezenţ", - -13.10323429107666 - ], - [ - "▁Filipino", - -13.103296279907227 - ], - [ - "▁Traian", - -13.103351593017578 - ], - [ - "▁swimmer", - -13.10356616973877 - ], - [ - "▁Slovenia", - -13.103632926940918 - ], - [ - "phobia", - -13.103724479675293 - ], - [ - "curricular", - -13.103734016418457 - ], - [ - "jurnal", - -13.103825569152832 - ], - [ - "▁vorne", - -13.103870391845703 - ], - [ - "▁asuma", - -13.103875160217285 - ], - [ - "defended", - -13.104104995727539 - ], - [ - "▁imminent", - -13.104140281677246 - ], - [ - "favored", - -13.10417366027832 - ], - [ - "▁innovator", - -13.104179382324219 - ], - [ - "▁Salzburg", - -13.104289054870605 - ], - [ - "5.4", - -13.104452133178711 - ], - [ - "Safe", - -13.104597091674805 - ], - [ - "▁inteleg", - -13.104744911193848 - ], - [ - "▁charisma", - -13.104781150817871 - ], - [ - "nature", - -13.104784965515137 - ], - [ - "4.8", - -13.104942321777344 - ], - [ - "argues", - -13.105104446411133 - ], - [ - "▁dimensiune", - -13.105142593383789 - ], - [ - "▁subdivision", - -13.105142593383789 - ], - [ - "▁embarrassing", - -13.105144500732422 - ], - [ - "▁confuse", - -13.105207443237305 - ], - [ - "DIC", - -13.105460166931152 - ], - [ - "rubrique", - -13.10549545288086 - ], - [ - "dépendance", - -13.105598449707031 - ], - [ - "INCLUD", - -13.10565185546875 - ], - [ - "▁Griffin", - -13.10574722290039 - ], - [ - "157", - -13.105751037597656 - ], - [ - "▁revamp", - -13.105839729309082 - ], - [ - "▁umgehen", - -13.10595989227295 - ], - [ - "▁mențin", - -13.106231689453125 - ], - [ - "▁1937", - -13.106695175170898 - ], - [ - "eklagte", - -13.106766700744629 - ], - [ - "▁clientèle", - -13.106801986694336 - ], - [ - "▁campsite", - -13.10708999633789 - ], - [ - "▁florist", - -13.107144355773926 - ], - [ - "▁Ferguson", - -13.107159614562988 - ], - [ - "▁demolition", - -13.107160568237305 - ], - [ - "▁McCain", - -13.107254981994629 - ], - [ - "▁reckon", - -13.10733413696289 - ], - [ - "striped", - -13.107414245605469 - ], - [ - "▁sonore", - -13.107481002807617 - ], - [ - "migrated", - -13.107548713684082 - ], - [ - "▁fluorescent", - -13.107664108276367 - ], - [ - "▁Colegi", - -13.107762336730957 - ], - [ - "ianu", - -13.107860565185547 - ], - [ - "cruising", - -13.107882499694824 - ], - [ - "LINK", - -13.107965469360352 - ], - [ - "▁Cutting", - -13.108001708984375 - ], - [ - "ABILITY", - -13.108168601989746 - ], - [ - "▁Categories", - -13.108168601989746 - ], - [ - "▁erhoben", - -13.108168601989746 - ], - [ - "▁Cocktail", - -13.108169555664062 - ], - [ - "▁Generator", - -13.108177185058594 - ], - [ - "▁gesucht", - -13.108186721801758 - ], - [ - "▁telescope", - -13.10818862915039 - ], - [ - "KET", - -13.108192443847656 - ], - [ - "▁hilfreich", - -13.108192443847656 - ], - [ - "▁beneficiary", - -13.108585357666016 - ], - [ - "▁Winston", - -13.108636856079102 - ], - [ - "Auswirkungen", - -13.108675956726074 - ], - [ - "portrayed", - -13.108705520629883 - ], - [ - "▁Aspekte", - -13.108743667602539 - ], - [ - "ffected", - -13.108901023864746 - ], - [ - "eutic", - -13.108905792236328 - ], - [ - "International", - -13.109021186828613 - ], - [ - "attente", - -13.109078407287598 - ], - [ - "mentioning", - -13.109119415283203 - ], - [ - "launch", - -13.109129905700684 - ], - [ - "▁EURO", - -13.109152793884277 - ], - [ - "▁Fraser", - -13.109344482421875 - ], - [ - "▁Johannes", - -13.109408378601074 - ], - [ - "▁felicit", - -13.109477043151855 - ], - [ - "▁plâng", - -13.109522819519043 - ], - [ - "izant", - -13.10971736907959 - ], - [ - "▁reţe", - -13.109846115112305 - ], - [ - "Mech", - -13.109954833984375 - ], - [ - "▁algebra", - -13.110193252563477 - ], - [ - "▁surgeries", - -13.110257148742676 - ], - [ - "▁semifinal", - -13.110262870788574 - ], - [ - "▁intimidating", - -13.110288619995117 - ], - [ - "▁exkl", - -13.110604286193848 - ], - [ - "asigurarea", - -13.110918998718262 - ], - [ - "Tek", - -13.111136436462402 - ], - [ - "▁Einladung", - -13.111205101013184 - ], - [ - "▁similaire", - -13.111205101013184 - ], - [ - "▁bebelus", - -13.111221313476562 - ], - [ - "▁déclin", - -13.111400604248047 - ], - [ - "▁Console", - -13.111495018005371 - ], - [ - "RET", - -13.111573219299316 - ], - [ - "appli", - -13.111586570739746 - ], - [ - "45%", - -13.111663818359375 - ], - [ - "Evenimentul", - -13.111811637878418 - ], - [ - "sincerely", - -13.111812591552734 - ], - [ - "sammlung", - -13.112098693847656 - ], - [ - "Amérique", - -13.112220764160156 - ], - [ - "▁1919", - -13.112326622009277 - ], - [ - "regulation", - -13.112367630004883 - ], - [ - "gebäude", - -13.112726211547852 - ], - [ - "▁Perspektive", - -13.112726211547852 - ], - [ - "Espagne", - -13.112744331359863 - ], - [ - "▁Underground", - -13.11283016204834 - ], - [ - "secret", - -13.112833976745605 - ], - [ - "▁Aussicht", - -13.112874031066895 - ], - [ - "Photo", - -13.112977027893066 - ], - [ - "▁Brust", - -13.113144874572754 - ], - [ - "▁Sustainability", - -13.11323356628418 - ], - [ - "▁clădiri", - -13.11323356628418 - ], - [ - "▁librarian", - -13.11323356628418 - ], - [ - "▁HBO", - -13.113235473632812 - ], - [ - "▁Parallel", - -13.113240242004395 - ], - [ - "▁shimmer", - -13.113283157348633 - ], - [ - "▁schlicht", - -13.113292694091797 - ], - [ - "▁anticipat", - -13.113311767578125 - ], - [ - "▁foolish", - -13.11335563659668 - ], - [ - "▁Ability", - -13.11347484588623 - ], - [ - "▁ceremoni", - -13.11358642578125 - ], - [ - "▁Ablauf", - -13.11359977722168 - ], - [ - "icrobial", - -13.113606452941895 - ], - [ - "▁actiuni", - -13.11362361907959 - ], - [ - "▁Wilhelm", - -13.113761901855469 - ], - [ - "▁nennen", - -13.113775253295898 - ], - [ - "▁botez", - -13.113832473754883 - ], - [ - "Alpes", - -13.113912582397461 - ], - [ - "▁libér", - -13.11392593383789 - ], - [ - "▁sneakers", - -13.114052772521973 - ], - [ - "geschafft", - -13.114252090454102 - ], - [ - "▁downstairs", - -13.114261627197266 - ], - [ - "▁wrench", - -13.114294052124023 - ], - [ - "▁erheblich", - -13.11442756652832 - ], - [ - "▁alimentar", - -13.114710807800293 - ], - [ - "▁suger", - -13.11474323272705 - ], - [ - "analysis", - -13.114883422851562 - ], - [ - "öhn", - -13.114891052246094 - ], - [ - "▁Nantes", - -13.114895820617676 - ], - [ - "▁Arbor", - -13.114899635314941 - ], - [ - "ooze", - -13.115150451660156 - ], - [ - "▁facade", - -13.115229606628418 - ], - [ - "▁MySQL", - -13.115266799926758 - ], - [ - "▁Salvador", - -13.115266799926758 - ], - [ - "▁Schlafzimmer", - -13.115279197692871 - ], - [ - "▁autentic", - -13.115320205688477 - ], - [ - "▁prezint", - -13.115348815917969 - ], - [ - "▁campground", - -13.115397453308105 - ], - [ - "Query", - -13.11540412902832 - ], - [ - "bekannt", - -13.115598678588867 - ], - [ - "arcinia", - -13.115632057189941 - ], - [ - "▁stunt", - -13.115825653076172 - ], - [ - "▁informare", - -13.115830421447754 - ], - [ - "▁interzis", - -13.11584186553955 - ], - [ - "▁Burke", - -13.115995407104492 - ], - [ - "certified", - -13.11601734161377 - ], - [ - "▁clove", - -13.11605167388916 - ], - [ - "java", - -13.116271018981934 - ], - [ - "▁Vielfalt", - -13.116284370422363 - ], - [ - "gebung", - -13.116329193115234 - ], - [ - "▁9/11", - -13.116497993469238 - ], - [ - "▁disruptive", - -13.11650562286377 - ], - [ - "visual", - -13.116693496704102 - ], - [ - "▁anunţat", - -13.11679458618164 - ], - [ - "▁Plätze", - -13.116799354553223 - ], - [ - "▁reduceri", - -13.116920471191406 - ], - [ - "autorisation", - -13.116950035095215 - ], - [ - "▁ligament", - -13.11705207824707 - ], - [ - "▁învăța", - -13.117081642150879 - ], - [ - "läufig", - -13.117303848266602 - ], - [ - "▁Copenhagen", - -13.117303848266602 - ], - [ - "▁commodities", - -13.117303848266602 - ], - [ - "▁eindeutig", - -13.117313385009766 - ], - [ - "▁catheter", - -13.117321014404297 - ], - [ - "erklärung", - -13.117720603942871 - ], - [ - "▁intelectual", - -13.117814064025879 - ], - [ - "▁municipality", - -13.117891311645508 - ], - [ - "▁1936", - -13.11798095703125 - ], - [ - "rruption", - -13.118217468261719 - ], - [ - "▁Lafayette", - -13.118324279785156 - ], - [ - "▁berühmte", - -13.118324279785156 - ], - [ - "▁idylli", - -13.118325233459473 - ], - [ - "▁caldura", - -13.118447303771973 - ], - [ - "▁tablette", - -13.118535995483398 - ], - [ - "▁liquidity", - -13.118728637695312 - ], - [ - "NGOs", - -13.118885040283203 - ], - [ - "▁supliment", - -13.11889934539795 - ], - [ - "contact", - -13.119075775146484 - ], - [ - "lustig", - -13.119219779968262 - ], - [ - "▁watercolor", - -13.119319915771484 - ], - [ - "▁Tiffany", - -13.119344711303711 - ], - [ - "▁Glauben", - -13.119365692138672 - ], - [ - "Immobilie", - -13.119406700134277 - ], - [ - "▁stripped", - -13.119549751281738 - ], - [ - "▁Beatles", - -13.119601249694824 - ], - [ - "ани", - -13.119770050048828 - ], - [ - "▁lifespan", - -13.119986534118652 - ], - [ - "▁profondeur", - -13.120251655578613 - ], - [ - "▁durere", - -13.120329856872559 - ], - [ - "▁Lithuania", - -13.120367050170898 - ], - [ - "▁resurrection", - -13.120367050170898 - ], - [ - "▁suitcase", - -13.120535850524902 - ], - [ - "▁Plumber", - -13.120545387268066 - ], - [ - "criticized", - -13.120595932006836 - ], - [ - "feared", - -13.120756149291992 - ], - [ - "▁Aunt", - -13.120929718017578 - ], - [ - "otwithstanding", - -13.121068000793457 - ], - [ - "verständlich", - -13.12115478515625 - ], - [ - "fiber", - -13.121248245239258 - ], - [ - "headquartered", - -13.121390342712402 - ], - [ - "▁Perspective", - -13.121391296386719 - ], - [ - "▁semantic", - -13.121413230895996 - ], - [ - "VIEW", - -13.121431350708008 - ], - [ - "▁Ersatzteile", - -13.121567726135254 - ], - [ - "▁disgust", - -13.121685981750488 - ], - [ - "rrington", - -13.121834754943848 - ], - [ - "ässe", - -13.121922492980957 - ], - [ - "▁anerkannt", - -13.121956825256348 - ], - [ - "meaning", - -13.12203598022461 - ], - [ - "178", - -13.122039794921875 - ], - [ - "▁grupuri", - -13.1221284866333 - ], - [ - "ciones", - -13.122267723083496 - ], - [ - "▁Mobility", - -13.122414588928223 - ], - [ - "▁unstable", - -13.122422218322754 - ], - [ - "▁FULL", - -13.122456550598145 - ], - [ - "austausch", - -13.122491836547852 - ], - [ - "▁culminat", - -13.122549057006836 - ], - [ - "▁Roast", - -13.122742652893066 - ], - [ - "existant", - -13.122940063476562 - ], - [ - "167", - -13.123008728027344 - ], - [ - "tinerii", - -13.123040199279785 - ], - [ - "September", - -13.123115539550781 - ], - [ - "▁haircut", - -13.123274803161621 - ], - [ - "▁Tutorial", - -13.123440742492676 - ], - [ - "▁enquiries", - -13.123440742492676 - ], - [ - "▁livelihood", - -13.123440742492676 - ], - [ - "▁proficiency", - -13.123440742492676 - ], - [ - "▁pavement", - -13.123443603515625 - ], - [ - "▁Reservation", - -13.123445510864258 - ], - [ - "aimerai", - -13.123491287231445 - ], - [ - "▁laboratoire", - -13.123492240905762 - ], - [ - "leihen", - -13.123501777648926 - ], - [ - "ministerium", - -13.123518943786621 - ], - [ - "▁Concentr", - -13.12366008758545 - ], - [ - "▁swipe", - -13.12368106842041 - ], - [ - "extrêmement", - -13.123687744140625 - ], - [ - "cultivated", - -13.123708724975586 - ], - [ - "▁Converse", - -13.123845100402832 - ], - [ - "▁paycheck", - -13.123863220214844 - ], - [ - "olltest", - -13.123995780944824 - ], - [ - "▁Bauch", - -13.124022483825684 - ], - [ - "▁autobuz", - -13.124067306518555 - ], - [ - "attack", - -13.124094009399414 - ], - [ - "While", - -13.124311447143555 - ], - [ - "Retrouvez", - -13.124320983886719 - ], - [ - "▁Dolphin", - -13.124466896057129 - ], - [ - "▁Shelby", - -13.124480247497559 - ], - [ - "▁Diagnostic", - -13.124486923217773 - ], - [ - "▁reconcil", - -13.124558448791504 - ], - [ - "▁Iaşi", - -13.124733924865723 - ], - [ - "▁iubesc", - -13.124979972839355 - ], - [ - "▁Bestseller", - -13.124985694885254 - ], - [ - "▁antrenor", - -13.125035285949707 - ], - [ - "▁Imaging", - -13.125089645385742 - ], - [ - "▁priorité", - -13.125295639038086 - ], - [ - "▁brewery", - -13.125494003295898 - ], - [ - "▁residual", - -13.125494003295898 - ], - [ - "▁intermittent", - -13.125494956970215 - ], - [ - "Kollekt", - -13.125585556030273 - ], - [ - "▁Walsh", - -13.12558650970459 - ], - [ - "▁marvelous", - -13.125653266906738 - ], - [ - "canceled", - -13.125686645507812 - ], - [ - "174", - -13.125761985778809 - ], - [ - "normes", - -13.125837326049805 - ], - [ - "▁Tempo", - -13.125996589660645 - ], - [ - "▁Târgu", - -13.126008987426758 - ], - [ - "877", - -13.126165390014648 - ], - [ - "5-8", - -13.126190185546875 - ], - [ - "960", - -13.126486778259277 - ], - [ - "▁Scandinavia", - -13.1265230178833 - ], - [ - "▁prolific", - -13.126526832580566 - ], - [ - "lasi", - -13.126916885375977 - ], - [ - "glück", - -13.127097129821777 - ], - [ - "▁immersion", - -13.127204895019531 - ], - [ - "RSA", - -13.127323150634766 - ], - [ - "▁Polk", - -13.127340316772461 - ], - [ - "▁transmitter", - -13.12747859954834 - ], - [ - "▁Kleidung", - -13.12755298614502 - ], - [ - "▁Cosmo", - -13.127676963806152 - ], - [ - "▁1935", - -13.127788543701172 - ], - [ - "höhere", - -13.127906799316406 - ], - [ - "▁Tatsache", - -13.128074645996094 - ], - [ - "▁Outlet", - -13.1282377243042 - ], - [ - "▁canalisation", - -13.12824821472168 - ], - [ - "Mbps", - -13.128433227539062 - ], - [ - "▁skeptical", - -13.128582954406738 - ], - [ - "mplification", - -13.128617286682129 - ], - [ - "▁Advice", - -13.128618240356445 - ], - [ - "▁détaillé", - -13.128676414489746 - ], - [ - "660", - -13.128701210021973 - ], - [ - "▁eyebrow", - -13.128722190856934 - ], - [ - "▁HIGH", - -13.128898620605469 - ], - [ - "hnlich", - -13.129073143005371 - ], - [ - "▁depăș", - -13.12910270690918 - ], - [ - "▁procurori", - -13.129140853881836 - ], - [ - "▁refrain", - -13.129212379455566 - ], - [ - "▁geschaffen", - -13.12952995300293 - ], - [ - "justement", - -13.129663467407227 - ], - [ - "exposing", - -13.129700660705566 - ], - [ - "243", - -13.1298828125 - ], - [ - "sectorul", - -13.130104064941406 - ], - [ - "▁courrier", - -13.130180358886719 - ], - [ - "▁carcas", - -13.130199432373047 - ], - [ - "sitter", - -13.13022518157959 - ], - [ - "▁Schreiben", - -13.130335807800293 - ], - [ - "▁malfunction", - -13.130358695983887 - ], - [ - "poartă", - -13.130522727966309 - ], - [ - "raisons", - -13.130565643310547 - ], - [ - "▁HOT", - -13.130650520324707 - ], - [ - "▁refreshed", - -13.130730628967285 - ], - [ - "mânt", - -13.130744934082031 - ], - [ - "▁coefficient", - -13.13097858428955 - ], - [ - "▁instituţii", - -13.131194114685059 - ], - [ - "▁sanguin", - -13.131202697753906 - ], - [ - "▁ceci", - -13.131213188171387 - ], - [ - "▁garçon", - -13.131232261657715 - ], - [ - "deluxe", - -13.131237030029297 - ], - [ - "▁rectif", - -13.131311416625977 - ], - [ - "920", - -13.131364822387695 - ], - [ - "Exista", - -13.131428718566895 - ], - [ - "▁magnif", - -13.131568908691406 - ], - [ - "efficiencies", - -13.131681442260742 - ], - [ - "▁Mitsubishi", - -13.131681442260742 - ], - [ - "▁consortium", - -13.131681442260742 - ], - [ - "▁baggage", - -13.131683349609375 - ], - [ - "▁guild", - -13.131736755371094 - ], - [ - "▁sixty", - -13.13193130493164 - ], - [ - "▁Retreat", - -13.13245677947998 - ], - [ - "batting", - -13.132473945617676 - ], - [ - "470", - -13.132708549499512 - ], - [ - "▁Britanie", - -13.132718086242676 - ], - [ - "displaced", - -13.132734298706055 - ], - [ - "▁spați", - -13.132794380187988 - ], - [ - "▁exceptionnelle", - -13.13281536102295 - ], - [ - "▁authorize", - -13.132906913757324 - ], - [ - "▁prescribe", - -13.133187294006348 - ], - [ - "▁dépannage", - -13.133234024047852 - ], - [ - "▁sexuelle", - -13.133234024047852 - ], - [ - "valid", - -13.133275032043457 - ], - [ - "▁hymn", - -13.133752822875977 - ], - [ - "▁histories", - -13.133757591247559 - ], - [ - "▁oriunde", - -13.133764266967773 - ], - [ - "Pop", - -13.133785247802734 - ], - [ - "▁dispoziţi", - -13.133800506591797 - ], - [ - "ADI", - -13.133819580078125 - ], - [ - "Google", - -13.133830070495605 - ], - [ - "▁Autism", - -13.133918762207031 - ], - [ - "▁aggr", - -13.134354591369629 - ], - [ - "bleed", - -13.134618759155273 - ], - [ - "▁displacement", - -13.13478946685791 - ], - [ - "▁hobbies", - -13.13478946685791 - ], - [ - "▁anatomy", - -13.134799003601074 - ], - [ - "▁Klinik", - -13.134821891784668 - ], - [ - "▁CCTV", - -13.1348237991333 - ], - [ - "readable", - -13.134886741638184 - ], - [ - "ulph", - -13.134982109069824 - ], - [ - "metabol", - -13.135035514831543 - ], - [ - "▁rugăm", - -13.135037422180176 - ], - [ - "▁Scotia", - -13.135087013244629 - ], - [ - "▁Einheit", - -13.135211944580078 - ], - [ - "▁troupe", - -13.13581371307373 - ], - [ - "▁Practitioner", - -13.135828018188477 - ], - [ - "▁oarec", - -13.135909080505371 - ], - [ - "Appel", - -13.135998725891113 - ], - [ - "situația", - -13.136096000671387 - ], - [ - "▁Yemen", - -13.136353492736816 - ], - [ - "piping", - -13.136515617370605 - ], - [ - "blood", - -13.136772155761719 - ], - [ - "engraved", - -13.136866569519043 - ], - [ - "▁Cristina", - -13.136866569519043 - ], - [ - "▁inaccurate", - -13.136866569519043 - ], - [ - "savory", - -13.136878967285156 - ], - [ - "atism", - -13.136919021606445 - ], - [ - "▁dependency", - -13.137007713317871 - ], - [ - "▁assertion", - -13.137015342712402 - ], - [ - "▁intersect", - -13.137201309204102 - ], - [ - "DATA", - -13.137224197387695 - ], - [ - "▁britanic", - -13.1373872756958 - ], - [ - "▁sanitaire", - -13.137393951416016 - ], - [ - "▁PLUS", - -13.137436866760254 - ], - [ - "▁platter", - -13.137730598449707 - ], - [ - "▁reconsider", - -13.137802124023438 - ], - [ - "▁Swim", - -13.13786792755127 - ], - [ - "▁Scene", - -13.137896537780762 - ], - [ - "▁Reynolds", - -13.137907028198242 - ], - [ - "▁gesund", - -13.137922286987305 - ], - [ - "international", - -13.137959480285645 - ], - [ - "government", - -13.13804817199707 - ], - [ - "▁gemstone", - -13.138052940368652 - ], - [ - "▁reproductive", - -13.1381196975708 - ], - [ - "▁expressive", - -13.13820743560791 - ], - [ - "▁tranche", - -13.13842487335205 - ], - [ - "▁Niagara", - -13.138427734375 - ], - [ - "▁Studierende", - -13.138434410095215 - ], - [ - "▁crave", - -13.138607025146484 - ], - [ - "pathetic", - -13.138739585876465 - ], - [ - "▁1916", - -13.138858795166016 - ], - [ - "▁Thousand", - -13.138873100280762 - ], - [ - "uffed", - -13.138893127441406 - ], - [ - "▁Lancaster", - -13.138960838317871 - ], - [ - "▁revenge", - -13.138972282409668 - ], - [ - "▁melody", - -13.1389741897583 - ], - [ - "Suitable", - -13.138991355895996 - ], - [ - "▁beacon", - -13.139082908630371 - ], - [ - "▁MAY", - -13.139205932617188 - ], - [ - "livré", - -13.139216423034668 - ], - [ - "Virus", - -13.139391899108887 - ], - [ - "▁collaborator", - -13.139413833618164 - ], - [ - "produktion", - -13.139480590820312 - ], - [ - "▁iluminat", - -13.139593124389648 - ], - [ - "facets", - -13.13975715637207 - ], - [ - "▁expus", - -13.139784812927246 - ], - [ - "▁baptism", - -13.13999080657959 - ], - [ - "▁urgency", - -13.140016555786133 - ], - [ - "artery", - -13.14030647277832 - ], - [ - "▁eingeladen", - -13.14043140411377 - ], - [ - "▁entfernen", - -13.14051342010498 - ], - [ - "soaking", - -13.140555381774902 - ], - [ - "▁irré", - -13.140557289123535 - ], - [ - "▁purity", - -13.140700340270996 - ], - [ - "▁adăug", - -13.140731811523438 - ], - [ - "historischen", - -13.140777587890625 - ], - [ - "crezi", - -13.140793800354004 - ], - [ - "▁tarziu", - -13.141035079956055 - ], - [ - "▁Mozart", - -13.141040802001953 - ], - [ - "▁trimming", - -13.141056060791016 - ], - [ - "▁violat", - -13.141056060791016 - ], - [ - "▁Vermögen", - -13.14108943939209 - ], - [ - "▁Theorie", - -13.141114234924316 - ], - [ - "scheibe", - -13.14114761352539 - ], - [ - "Partidul", - -13.141324996948242 - ], - [ - "▁childcare", - -13.14133071899414 - ], - [ - "ajele", - -13.141345977783203 - ], - [ - "▁Punjab", - -13.141390800476074 - ], - [ - "6.3", - -13.14156436920166 - ], - [ - "▁recount", - -13.141571044921875 - ], - [ - "▁repel", - -13.141799926757812 - ], - [ - "vantage", - -13.1419095993042 - ], - [ - "6.4", - -13.141953468322754 - ], - [ - "▁comedian", - -13.142087936401367 - ], - [ - "▁snappe", - -13.142256736755371 - ], - [ - "PLE", - -13.142271041870117 - ], - [ - "▁rapper", - -13.142439842224121 - ], - [ - "▁Belfast", - -13.142657279968262 - ], - [ - "▁predictive", - -13.14271068572998 - ], - [ - "dépôt", - -13.1427583694458 - ], - [ - "flavored", - -13.142769813537598 - ], - [ - "chließlich", - -13.14293098449707 - ], - [ - "▁stump", - -13.142955780029297 - ], - [ - "▁lakh", - -13.142963409423828 - ], - [ - "3:30", - -13.143021583557129 - ], - [ - "▁cetățeni", - -13.1431245803833 - ], - [ - "▁Milliarden", - -13.143125534057617 - ], - [ - "Assurance", - -13.143128395080566 - ], - [ - "▁Marketplace", - -13.143329620361328 - ], - [ - "equipped", - -13.143423080444336 - ], - [ - "▁russe", - -13.143462181091309 - ], - [ - "Exactly", - -13.143651008605957 - ], - [ - "▁Venez", - -13.144125938415527 - ], - [ - "▁Pavilion", - -13.144171714782715 - ], - [ - "▁incontournable", - -13.144171714782715 - ], - [ - "▁slaughter", - -13.14417839050293 - ], - [ - "asteptam", - -13.144190788269043 - ], - [ - "▁Fighter", - -13.144196510314941 - ], - [ - "▁Landkreis", - -13.144278526306152 - ], - [ - "▁lumini", - -13.144312858581543 - ], - [ - "▁connaît", - -13.144615173339844 - ], - [ - "▁Breite", - -13.144674301147461 - ], - [ - "▁Disability", - -13.144774436950684 - ], - [ - "▁Alfa", - -13.144786834716797 - ], - [ - "▁poise", - -13.144895553588867 - ], - [ - "▁Alpen", - -13.144898414611816 - ], - [ - "betont", - -13.145031929016113 - ], - [ - "159", - -13.145161628723145 - ], - [ - "▁geprägt", - -13.145219802856445 - ], - [ - "▁intrigued", - -13.145219802856445 - ], - [ - "▁sympathy", - -13.145220756530762 - ], - [ - "societal", - -13.145225524902344 - ], - [ - "▁sédui", - -13.145243644714355 - ], - [ - "▁differentiation", - -13.145384788513184 - ], - [ - "▁aprobare", - -13.145744323730469 - ], - [ - "schirm", - -13.14585018157959 - ], - [ - "sagt", - -13.145956039428711 - ], - [ - "7.3", - -13.146101951599121 - ], - [ - "Bib", - -13.146263122558594 - ], - [ - "europäischen", - -13.146268844604492 - ], - [ - "▁Innovative", - -13.146268844604492 - ], - [ - "▁autonome", - -13.146330833435059 - ], - [ - "▁Objective", - -13.146400451660156 - ], - [ - "▁refusal", - -13.146551132202148 - ], - [ - "▁exposé", - -13.146719932556152 - ], - [ - "▁cetăţeni", - -13.146793365478516 - ], - [ - "▁stimmt", - -13.146798133850098 - ], - [ - "acordul", - -13.147162437438965 - ], - [ - "▁hormonal", - -13.147254943847656 - ], - [ - "intermédiaire", - -13.147319793701172 - ], - [ - "▁doubl", - -13.147374153137207 - ], - [ - "▁flute", - -13.147509574890137 - ], - [ - "▁Balkon", - -13.147523880004883 - ], - [ - "▁Florian", - -13.147607803344727 - ], - [ - "737", - -13.147614479064941 - ], - [ - "▁dritte", - -13.147639274597168 - ], - [ - "spitze", - -13.147685050964355 - ], - [ - "donnent", - -13.14778995513916 - ], - [ - "▁Zuhause", - -13.147850036621094 - ], - [ - "▁VIII", - -13.147852897644043 - ], - [ - "familien", - -13.148151397705078 - ], - [ - "▁sécurisé", - -13.148313522338867 - ], - [ - "▁glamour", - -13.148370742797852 - ], - [ - "▁societati", - -13.148370742797852 - ], - [ - "typique", - -13.1483793258667 - ], - [ - "▁addicted", - -13.148421287536621 - ], - [ - "▁Providence", - -13.148500442504883 - ], - [ - "▁Extended", - -13.148506164550781 - ], - [ - "▁Barbie", - -13.148513793945312 - ], - [ - "zustand", - -13.148516654968262 - ], - [ - "▁Sauna", - -13.148638725280762 - ], - [ - "▁propane", - -13.148663520812988 - ], - [ - "europa", - -13.148894309997559 - ], - [ - "glued", - -13.148940086364746 - ], - [ - "▁Mystery", - -13.148941993713379 - ], - [ - "▁travaillé", - -13.149106979370117 - ], - [ - "riol", - -13.149251937866211 - ], - [ - "fleisch", - -13.149288177490234 - ], - [ - "▁Eintritt", - -13.149327278137207 - ], - [ - "▁Syndrome", - -13.149422645568848 - ], - [ - "▁petroleum", - -13.149426460266113 - ], - [ - "▁genial", - -13.149433135986328 - ], - [ - "sponsored", - -13.149436950683594 - ], - [ - "▁Cindy", - -13.149436950683594 - ], - [ - "▁courier", - -13.149600982666016 - ], - [ - "▁Scrap", - -13.149640083312988 - ], - [ - "▁conţin", - -13.149724006652832 - ], - [ - "(2007)", - -13.149764060974121 - ], - [ - "▁gewährleisten", - -13.149949073791504 - ], - [ - "▁proprietor", - -13.15011215209961 - ], - [ - "▁cheque", - -13.15046215057373 - ], - [ - "maternity", - -13.150477409362793 - ], - [ - "▁Gustav", - -13.15048599243164 - ], - [ - "▁arterial", - -13.150497436523438 - ], - [ - "▁whiskey", - -13.150510787963867 - ], - [ - "▁concealed", - -13.150525093078613 - ], - [ - "thèque", - -13.150553703308105 - ], - [ - "felony", - -13.150579452514648 - ], - [ - "▁tweeted", - -13.150613784790039 - ], - [ - "OTA", - -13.150619506835938 - ], - [ - "nsel", - -13.150664329528809 - ], - [ - "▁coarse", - -13.150664329528809 - ], - [ - "▁identificat", - -13.150707244873047 - ], - [ - "▁variability", - -13.150716781616211 - ], - [ - "civ", - -13.150843620300293 - ], - [ - "▁drastic", - -13.150956153869629 - ], - [ - "▁hatred", - -13.151090621948242 - ], - [ - "▁Bürgermeister", - -13.151237487792969 - ], - [ - "▁utilizatorilor", - -13.15124225616455 - ], - [ - "OULD", - -13.15137004852295 - ], - [ - "rmaßen", - -13.151383399963379 - ], - [ - "▁windshield", - -13.151530265808105 - ], - [ - "▁Particular", - -13.151531219482422 - ], - [ - "▁Tunnel", - -13.151638984680176 - ], - [ - "▁litri", - -13.15164852142334 - ], - [ - "extrême", - -13.15180492401123 - ], - [ - "▁Schalt", - -13.151944160461426 - ], - [ - "paket", - -13.152159690856934 - ], - [ - "berlin", - -13.152169227600098 - ], - [ - "▁slujb", - -13.152193069458008 - ], - [ - "facilitated", - -13.152206420898438 - ], - [ - "Congressional", - -13.152510643005371 - ], - [ - "▁honeymoon", - -13.152585983276367 - ], - [ - "▁Provision", - -13.152697563171387 - ], - [ - "▁Outfit", - -13.152779579162598 - ], - [ - "udder", - -13.152814865112305 - ], - [ - "▁chandelier", - -13.153002738952637 - ], - [ - "donating", - -13.153132438659668 - ], - [ - "historic", - -13.15333080291748 - ], - [ - "organized", - -13.153508186340332 - ], - [ - "(8)", - -13.15356731414795 - ], - [ - "▁touristique", - -13.153610229492188 - ], - [ - "▁Roosevelt", - -13.153643608093262 - ], - [ - "▁Verständnis", - -13.153643608093262 - ], - [ - "▁prilej", - -13.153655052185059 - ], - [ - "Vanity", - -13.153806686401367 - ], - [ - "chilly", - -13.153964042663574 - ], - [ - "loyer", - -13.154031753540039 - ], - [ - "▁Zhang", - -13.154053688049316 - ], - [ - "▁Nouveau", - -13.154193878173828 - ], - [ - "Soft", - -13.154326438903809 - ], - [ - "▁motherboard", - -13.15441608428955 - ], - [ - "▁Erklärung", - -13.154701232910156 - ], - [ - "▁Tasmania", - -13.154702186584473 - ], - [ - "▁verändern", - -13.154703140258789 - ], - [ - "▁seldom", - -13.154711723327637 - ], - [ - "▁Karriere", - -13.154714584350586 - ], - [ - "▁Mixed", - -13.154902458190918 - ], - [ - "umfang", - -13.154970169067383 - ], - [ - "▁Strategies", - -13.155035972595215 - ], - [ - "CHAR", - -13.155051231384277 - ], - [ - "olitary", - -13.155075073242188 - ], - [ - "▁Persoan", - -13.1550874710083 - ], - [ - "bewegung", - -13.155242919921875 - ], - [ - "▁Ernest", - -13.155367851257324 - ], - [ - "withdrawn", - -13.155855178833008 - ], - [ - "▁stationary", - -13.155881881713867 - ], - [ - "▁bland", - -13.155939102172852 - ], - [ - "▁Replace", - -13.156059265136719 - ], - [ - "▁Londres", - -13.156290054321289 - ], - [ - "▁plural", - -13.156290054321289 - ], - [ - "▁concentrat", - -13.156515121459961 - ], - [ - "Maschine", - -13.156675338745117 - ], - [ - "▁Advocate", - -13.156820297241211 - ], - [ - "▁vermitteln", - -13.156824111938477 - ], - [ - "▁dispenser", - -13.156827926635742 - ], - [ - "▁tedious", - -13.15695858001709 - ], - [ - "▁Straight", - -13.15705394744873 - ], - [ - "▁Corona", - -13.157061576843262 - ], - [ - "▁monumental", - -13.157073020935059 - ], - [ - "▁migrate", - -13.15720272064209 - ], - [ - "▁verlieren", - -13.157366752624512 - ], - [ - "▁Lub", - -13.157482147216797 - ], - [ - "▁reinforcement", - -13.157827377319336 - ], - [ - "▁cherish", - -13.157843589782715 - ], - [ - "Veterinary", - -13.157881736755371 - ], - [ - "geschwindigkeit", - -13.157881736755371 - ], - [ - "▁féminin", - -13.157881736755371 - ], - [ - "▁Facilities", - -13.157964706420898 - ], - [ - "▁urmari", - -13.158050537109375 - ], - [ - "▁Vertical", - -13.158098220825195 - ], - [ - "echoe", - -13.158188819885254 - ], - [ - "toured", - -13.158548355102539 - ], - [ - "Served", - -13.158772468566895 - ], - [ - "más", - -13.158853530883789 - ], - [ - "license", - -13.158893585205078 - ], - [ - "misunderstanding", - -13.158944129943848 - ], - [ - "▁glamorous", - -13.158944129943848 - ], - [ - "BJP", - -13.158973693847656 - ], - [ - "▁découvert", - -13.159173965454102 - ], - [ - "schönsten", - -13.159517288208008 - ], - [ - "▁(2018)", - -13.159577369689941 - ], - [ - "▁orasului", - -13.159581184387207 - ], - [ - "328", - -13.159674644470215 - ], - [ - "thighs", - -13.159801483154297 - ], - [ - "éclairage", - -13.160008430480957 - ], - [ - "Oamenii", - -13.160009384155273 - ], - [ - "▁Transmission", - -13.16014575958252 - ], - [ - "▁transpir", - -13.16015911102295 - ], - [ - "▁președinte", - -13.160321235656738 - ], - [ - "finalists", - -13.160327911376953 - ], - [ - "genügend", - -13.160524368286133 - ], - [ - "▁Aufmerksamkeit", - -13.160539627075195 - ], - [ - "▁unglaublich", - -13.160539627075195 - ], - [ - "▁descarc", - -13.160604476928711 - ], - [ - "▁Couch", - -13.160683631896973 - ], - [ - "eaucoup", - -13.160788536071777 - ], - [ - "▁adidas", - -13.161075592041016 - ], - [ - "▁1-800-", - -13.161077499389648 - ], - [ - "▁Communities", - -13.161102294921875 - ], - [ - "▁Einkommen", - -13.161102294921875 - ], - [ - "▁Reagan", - -13.16114330291748 - ], - [ - "▁Stoke", - -13.161260604858398 - ], - [ - "▁Snapchat", - -13.161269187927246 - ], - [ - "éclat", - -13.161272048950195 - ], - [ - "▁auseinander", - -13.161367416381836 - ], - [ - "▁richesse", - -13.16137409210205 - ], - [ - "▁toggle", - -13.161396026611328 - ], - [ - "▁Zutaten", - -13.161606788635254 - ], - [ - "▁député", - -13.16161060333252 - ], - [ - "▁battlefield", - -13.161611557006836 - ], - [ - "▁spirituel", - -13.161611557006836 - ], - [ - "▁Shuttle", - -13.161632537841797 - ], - [ - "▁Aktien", - -13.161665916442871 - ], - [ - "hormon", - -13.161819458007812 - ], - [ - "connection", - -13.16187858581543 - ], - [ - "▁vizitatori", - -13.16191577911377 - ], - [ - "érité", - -13.161971092224121 - ], - [ - "truck", - -13.1619873046875 - ], - [ - "▁yourselves", - -13.162139892578125 - ], - [ - "▁Logistics", - -13.162140846252441 - ], - [ - "coveted", - -13.16215705871582 - ], - [ - "▁şedinţ", - -13.162671089172363 - ], - [ - "▁messenger", - -13.162703514099121 - ], - [ - "▁țar", - -13.162918090820312 - ], - [ - "▁Grau", - -13.163025856018066 - ], - [ - "chirurgie", - -13.163138389587402 - ], - [ - "▁Ressourcen", - -13.16320514678955 - ], - [ - "▁Jésus", - -13.163207054138184 - ], - [ - "▁acțiune", - -13.163208961486816 - ], - [ - "▁Bundesliga", - -13.163249015808105 - ], - [ - "Lizenz", - -13.163379669189453 - ], - [ - "ELLE", - -13.163908958435059 - ], - [ - "vraie", - -13.1639986038208 - ], - [ - "ruined", - -13.164018630981445 - ], - [ - "▁Marble", - -13.164109230041504 - ], - [ - "▁Zambia", - -13.164308547973633 - ], - [ - "▁Finnish", - -13.164366722106934 - ], - [ - "▁trackback", - -13.164488792419434 - ], - [ - "héros", - -13.16451644897461 - ], - [ - "▁réclam", - -13.164534568786621 - ], - [ - "locurile", - -13.164706230163574 - ], - [ - "tägliche", - -13.164753913879395 - ], - [ - "IFF", - -13.164824485778809 - ], - [ - "▁contextual", - -13.164938926696777 - ], - [ - "▁Elvis", - -13.165084838867188 - ], - [ - "▁Batch", - -13.165183067321777 - ], - [ - "▁appris", - -13.16519546508789 - ], - [ - "intensive", - -13.165404319763184 - ], - [ - "▁întâmplat", - -13.16565990447998 - ], - [ - "▁prelucr", - -13.16576099395752 - ], - [ - "flore", - -13.165873527526855 - ], - [ - "▁Alkohol", - -13.165877342224121 - ], - [ - "Konzern", - -13.165895462036133 - ], - [ - "Delete", - -13.166082382202148 - ], - [ - "öck", - -13.16612720489502 - ], - [ - "▁clientii", - -13.16614818572998 - ], - [ - "▁innovate", - -13.166224479675293 - ], - [ - "▁ASAP", - -13.166345596313477 - ], - [ - "crumbs", - -13.166425704956055 - ], - [ - "reusable", - -13.166489601135254 - ], - [ - "▁Beaver", - -13.166507720947266 - ], - [ - "▁rosii", - -13.166643142700195 - ], - [ - "Arr", - -13.166704177856445 - ], - [ - "▁Zubehör", - -13.166948318481445 - ], - [ - "▁stolz", - -13.166952133178711 - ], - [ - "▁$75", - -13.16695499420166 - ], - [ - "▁Frühling", - -13.166967391967773 - ], - [ - "▁disagreement", - -13.166988372802734 - ], - [ - "▁formulate", - -13.167381286621094 - ], - [ - "braking", - -13.167522430419922 - ], - [ - "▁submarine", - -13.167535781860352 - ], - [ - "▁identificare", - -13.167652130126953 - ], - [ - "lansarea", - -13.167659759521484 - ], - [ - "covered", - -13.167753219604492 - ], - [ - "benso", - -13.167859077453613 - ], - [ - "▁situatie", - -13.167989730834961 - ], - [ - "hilf", - -13.1681547164917 - ], - [ - "▁Southampton", - -13.168557167053223 - ], - [ - "▁intéressé", - -13.168557167053223 - ], - [ - "▁congressional", - -13.168572425842285 - ], - [ - "65%", - -13.168595314025879 - ], - [ - "▁Allison", - -13.168627738952637 - ], - [ - "Mainland", - -13.168726921081543 - ], - [ - "▁touchscreen", - -13.16882038116455 - ], - [ - "leitet", - -13.168922424316406 - ], - [ - "mnului", - -13.16958999633789 - ], - [ - "▁engagiert", - -13.169631004333496 - ], - [ - "joacă", - -13.16964340209961 - ], - [ - "▁$5,000", - -13.169652938842773 - ], - [ - "upscale", - -13.1697359085083 - ], - [ - "▁vérité", - -13.16983413696289 - ], - [ - "flüssig", - -13.170167922973633 - ], - [ - "Richtlinie", - -13.170169830322266 - ], - [ - "▁positif", - -13.170169830322266 - ], - [ - "▁diferenta", - -13.170175552368164 - ], - [ - "▁întâi", - -13.170707702636719 - ], - [ - "ethylene", - -13.170791625976562 - ], - [ - "kreuz", - -13.170913696289062 - ], - [ - "Surely", - -13.170990943908691 - ], - [ - "puneti", - -13.171002388000488 - ], - [ - "europe", - -13.171142578125 - ], - [ - "▁comunist", - -13.171271324157715 - ], - [ - "unterricht", - -13.171302795410156 - ], - [ - "▁Füll", - -13.171304702758789 - ], - [ - "▁Aberdeen", - -13.171792030334473 - ], - [ - "▁DSLR", - -13.171792030334473 - ], - [ - "▁functioneaza", - -13.171799659729004 - ], - [ - "▁benches", - -13.171807289123535 - ], - [ - "▁Alpine", - -13.171866416931152 - ], - [ - "phthal", - -13.172003746032715 - ], - [ - "▁counselling", - -13.17219066619873 - ], - [ - "▁erzielen", - -13.172323226928711 - ], - [ - "▁părinţi", - -13.172329902648926 - ], - [ - "▁besitzen", - -13.17236614227295 - ], - [ - "heavenly", - -13.172389030456543 - ], - [ - "▁masque", - -13.17281723022461 - ], - [ - "▁Legislature", - -13.172859191894531 - ], - [ - "▁Recycling", - -13.172861099243164 - ], - [ - "▁Derma", - -13.172883987426758 - ], - [ - "reunite", - -13.172926902770996 - ], - [ - "recettes", - -13.17310619354248 - ], - [ - "converge", - -13.173262596130371 - ], - [ - "▁compoziti", - -13.17327880859375 - ], - [ - "▁Nürnberg", - -13.173398971557617 - ], - [ - "760", - -13.173545837402344 - ], - [ - "▁entière", - -13.173674583435059 - ], - [ - "▁parchment", - -13.173944473266602 - ], - [ - "▁Aufwand", - -13.173945426940918 - ], - [ - "▁antivirus", - -13.174087524414062 - ], - [ - "▁remettr", - -13.17409610748291 - ], - [ - "▁NEVER", - -13.174243927001953 - ], - [ - "▁restrictive", - -13.174266815185547 - ], - [ - "▁beurre", - -13.174283027648926 - ], - [ - "▁frigider", - -13.174478530883789 - ], - [ - "acquisition", - -13.174642562866211 - ], - [ - "▁Correct", - -13.174866676330566 - ], - [ - "▁immortal", - -13.175017356872559 - ], - [ - "▁occupancy", - -13.175017356872559 - ], - [ - "▁Tucson", - -13.175019264221191 - ], - [ - "▁Dhabi", - -13.175025939941406 - ], - [ - "obligation", - -13.175033569335938 - ], - [ - "▁warfare", - -13.175037384033203 - ], - [ - "▁syntax", - -13.175045013427734 - ], - [ - "APS", - -13.175106048583984 - ], - [ - "мен", - -13.175209999084473 - ], - [ - "▁diferenț", - -13.175251960754395 - ], - [ - "wordpress", - -13.17549991607666 - ], - [ - "▁Wohnzimmer", - -13.175593376159668 - ], - [ - "oppo", - -13.175736427307129 - ], - [ - "▁miscare", - -13.175762176513672 - ], - [ - "companiilor", - -13.17581558227539 - ], - [ - "▁bezahlt", - -13.17584228515625 - ], - [ - "Sterne", - -13.175864219665527 - ], - [ - "inability", - -13.175898551940918 - ], - [ - "▁Hoffnung", - -13.176156044006348 - ], - [ - "▁românească", - -13.176176071166992 - ], - [ - "document", - -13.176177024841309 - ], - [ - "borrowers", - -13.17625904083252 - ], - [ - "▁rasa", - -13.176301956176758 - ], - [ - "▁bénéfice", - -13.176445960998535 - ], - [ - "▁Panda", - -13.17645263671875 - ], - [ - "▁cărţi", - -13.176730155944824 - ], - [ - "▁Vorgehen", - -13.17690658569336 - ], - [ - "▁afecteaz", - -13.176956176757812 - ], - [ - "▁diagnos", - -13.177050590515137 - ], - [ - "▁Dentistry", - -13.177180290222168 - ], - [ - "▁staggering", - -13.177180290222168 - ], - [ - "präsident", - -13.177181243896484 - ], - [ - "▁vocational", - -13.177239418029785 - ], - [ - "Combined", - -13.177287101745605 - ], - [ - "stère", - -13.177306175231934 - ], - [ - "▁frunze", - -13.177478790283203 - ], - [ - "OLI", - -13.177525520324707 - ], - [ - "▁răc", - -13.177752494812012 - ], - [ - "▁changé", - -13.177754402160645 - ], - [ - "▁reprezentanți", - -13.177757263183594 - ], - [ - "▁ausgeschlossen", - -13.177777290344238 - ], - [ - "Windows", - -13.177891731262207 - ], - [ - "sometimes", - -13.177898406982422 - ], - [ - "▁dargestellt", - -13.178120613098145 - ], - [ - "provoking", - -13.178263664245605 - ], - [ - "terribly", - -13.178264617919922 - ], - [ - "▁speculate", - -13.178274154663086 - ], - [ - "▁complément", - -13.178305625915527 - ], - [ - "▁(2006)", - -13.178306579589844 - ], - [ - "zulegen", - -13.178668022155762 - ], - [ - "▁définitive", - -13.178876876831055 - ], - [ - "considerare", - -13.17911148071289 - ], - [ - "▁Subaru", - -13.179354667663574 - ], - [ - "WAN", - -13.179390907287598 - ], - [ - "guessed", - -13.179417610168457 - ], - [ - "spannung", - -13.179479598999023 - ], - [ - "▁supernatural", - -13.179515838623047 - ], - [ - "▁Interstate", - -13.17957878112793 - ], - [ - "▁redundant", - -13.179891586303711 - ], - [ - "▁HUG", - -13.179893493652344 - ], - [ - "▁restauration", - -13.180006980895996 - ], - [ - "repute", - -13.180011749267578 - ], - [ - "coagul", - -13.180028915405273 - ], - [ - "tehnologia", - -13.18043327331543 - ], - [ - "warded", - -13.180444717407227 - ], - [ - "▁lobster", - -13.180469512939453 - ], - [ - "▁Hafen", - -13.180542945861816 - ], - [ - "▁Guess", - -13.18056583404541 - ], - [ - "seraient", - -13.181038856506348 - ], - [ - "▁trench", - -13.181156158447266 - ], - [ - "▁piept", - -13.181283950805664 - ], - [ - "categorized", - -13.181396484375 - ], - [ - "softer", - -13.1815185546875 - ], - [ - "▁feasibility", - -13.181519508361816 - ], - [ - "▁restructuring", - -13.181519508361816 - ], - [ - "▁GOOD", - -13.181537628173828 - ], - [ - "▁inspiré", - -13.181610107421875 - ], - [ - "▁spéci", - -13.18163013458252 - ], - [ - "▁Mattress", - -13.181686401367188 - ], - [ - "▁biologique", - -13.181702613830566 - ], - [ - "▁Crema", - -13.182043075561523 - ], - [ - "▁korrekt", - -13.182063102722168 - ], - [ - "▁imperfect", - -13.182205200195312 - ], - [ - "▁advantageous", - -13.182329177856445 - ], - [ - "9.00", - -13.182390213012695 - ], - [ - "PAL", - -13.182557106018066 - ], - [ - "▁Illustration", - -13.182607650756836 - ], - [ - "▁Katherine", - -13.182607650756836 - ], - [ - "▁cervical", - -13.182607650756836 - ], - [ - "▁hectic", - -13.182611465454102 - ], - [ - "▁Belastung", - -13.182615280151367 - ], - [ - "▁Laguna", - -13.182628631591797 - ], - [ - "▁Burton", - -13.182761192321777 - ], - [ - "nettoyage", - -13.182875633239746 - ], - [ - "Toward", - -13.183072090148926 - ], - [ - "continuare", - -13.183072090148926 - ], - [ - "▁acumulat", - -13.183106422424316 - ], - [ - "▁déposé", - -13.183216094970703 - ], - [ - "▁prestige", - -13.183269500732422 - ], - [ - "▁LNG", - -13.183525085449219 - ], - [ - "▁Dacia", - -13.183662414550781 - ], - [ - "▁concede", - -13.183691024780273 - ], - [ - "▁reconciliation", - -13.183822631835938 - ], - [ - "Sistemul", - -13.183877944946289 - ], - [ - "Speed", - -13.183937072753906 - ], - [ - "▁Implant", - -13.183977127075195 - ], - [ - "▁möchtest", - -13.184020042419434 - ], - [ - "▁Norton", - -13.184064865112305 - ], - [ - "▁cosmic", - -13.184181213378906 - ], - [ - "enregistrement", - -13.184247016906738 - ], - [ - "țării", - -13.18433952331543 - ], - [ - "Veröffentlichung", - -13.184786796569824 - ], - [ - "erlebnis", - -13.184786796569824 - ], - [ - "▁Carpenter", - -13.184786796569824 - ], - [ - "▁INFORMATION", - -13.184786796569824 - ], - [ - "invites", - -13.18481731414795 - ], - [ - "▁gewan", - -13.1849365234375 - ], - [ - "▁réservé", - -13.184986114501953 - ], - [ - "▁aquatic", - -13.184988021850586 - ], - [ - "▁Seoul", - -13.18507194519043 - ], - [ - "▁älter", - -13.185185432434082 - ], - [ - "▁classmates", - -13.185223579406738 - ], - [ - "gelangen", - -13.185253143310547 - ], - [ - "▁Camill", - -13.185285568237305 - ], - [ - "simo", - -13.185291290283203 - ], - [ - "▁dormitor", - -13.185333251953125 - ], - [ - "wahren", - -13.185354232788086 - ], - [ - "▁incremental", - -13.185357093811035 - ], - [ - "▁caci", - -13.185494422912598 - ], - [ - "mittlere", - -13.185752868652344 - ], - [ - "▁condominium", - -13.185877799987793 - ], - [ - "▁rainforest", - -13.185877799987793 - ], - [ - "▁championnat", - -13.185891151428223 - ], - [ - "▁interrupted", - -13.185921669006348 - ], - [ - "▁tactile", - -13.185930252075195 - ], - [ - "▁unconditional", - -13.185945510864258 - ], - [ - "▁reactive", - -13.186041831970215 - ], - [ - "▁Stretch", - -13.1861572265625 - ], - [ - "▁serene", - -13.18624210357666 - ], - [ - "570", - -13.186318397521973 - ], - [ - "igte", - -13.186376571655273 - ], - [ - "Louis", - -13.186410903930664 - ], - [ - "▁Mittelpunkt", - -13.186493873596191 - ], - [ - "EEP", - -13.18651294708252 - ], - [ - "▁vault", - -13.186552047729492 - ], - [ - "absolu", - -13.186893463134766 - ], - [ - "▁solidarity", - -13.186971664428711 - ], - [ - "CLICK", - -13.18708324432373 - ], - [ - "▁hustle", - -13.187090873718262 - ], - [ - "▁microscope", - -13.187105178833008 - ], - [ - "▁Recommended", - -13.187111854553223 - ], - [ - "âche", - -13.18716812133789 - ], - [ - "▁flashlight", - -13.187286376953125 - ], - [ - "modificarea", - -13.18754768371582 - ], - [ - "izaţi", - -13.18773078918457 - ], - [ - "planned", - -13.187899589538574 - ], - [ - "Download", - -13.187906265258789 - ], - [ - "▁gourmand", - -13.188064575195312 - ], - [ - "▁subsidiaries", - -13.188064575195312 - ], - [ - "orthodox", - -13.188135147094727 - ], - [ - "▁Auburn", - -13.188323020935059 - ], - [ - "▁exprimat", - -13.188336372375488 - ], - [ - "procédé", - -13.18861198425293 - ], - [ - "▁ressenti", - -13.188648223876953 - ], - [ - "▁stint", - -13.188678741455078 - ], - [ - "Essentially", - -13.189072608947754 - ], - [ - "▁Savior", - -13.189164161682129 - ], - [ - "▁Flood", - -13.189168930053711 - ], - [ - "▁neurological", - -13.189249038696289 - ], - [ - "▁strig", - -13.189340591430664 - ], - [ - "scended", - -13.189421653747559 - ], - [ - "▁Shiva", - -13.189483642578125 - ], - [ - "▁Sketch", - -13.189544677734375 - ], - [ - "▁monarch", - -13.18956184387207 - ], - [ - "▁Preview", - -13.189632415771484 - ], - [ - "▁bewegt", - -13.189811706542969 - ], - [ - "mapped", - -13.189818382263184 - ], - [ - "énorme", - -13.189962387084961 - ], - [ - "▁définition", - -13.189963340759277 - ], - [ - "▁nécessité", - -13.189984321594238 - ], - [ - "▁antren", - -13.190027236938477 - ], - [ - "▁Infant", - -13.190072059631348 - ], - [ - "▁incumbent", - -13.190255165100098 - ], - [ - "▁pavilion", - -13.190255165100098 - ], - [ - "▁Taliban", - -13.19025707244873 - ], - [ - "Easily", - -13.19025993347168 - ], - [ - "▁verteilt", - -13.19030475616455 - ], - [ - "▁Biblical", - -13.190320014953613 - ], - [ - "Christian", - -13.190333366394043 - ], - [ - "județul", - -13.190436363220215 - ], - [ - "Learning", - -13.19046688079834 - ], - [ - "▁Expand", - -13.19054126739502 - ], - [ - "▁Attach", - -13.19056224822998 - ], - [ - "consideră", - -13.190573692321777 - ], - [ - "einsatz", - -13.190574645996094 - ], - [ - "Numai", - -13.190585136413574 - ], - [ - "▁Eintrag", - -13.190597534179688 - ], - [ - "▁üblich", - -13.190607070922852 - ], - [ - "▁cumpără", - -13.19062614440918 - ], - [ - "escaped", - -13.190693855285645 - ], - [ - "▁Ortodox", - -13.190804481506348 - ], - [ - "▁obţinut", - -13.190805435180664 - ], - [ - "ecluded", - -13.191036224365234 - ], - [ - "▁brownie", - -13.191089630126953 - ], - [ - "▁regulament", - -13.191253662109375 - ], - [ - "▁Chaos", - -13.191302299499512 - ], - [ - "▁masiv", - -13.19132137298584 - ], - [ - "▁Gerald", - -13.191376686096191 - ], - [ - "▁Sigur", - -13.191380500793457 - ], - [ - "▁wavelength", - -13.191380500793457 - ], - [ - "▁retiring", - -13.191396713256836 - ], - [ - "▁exactement", - -13.191819190979004 - ], - [ - "ntino", - -13.191823959350586 - ], - [ - "▁Krebs", - -13.19194221496582 - ], - [ - "▁monatlich", - -13.191956520080566 - ], - [ - "▁aranj", - -13.192011833190918 - ], - [ - "▁priveşt", - -13.192099571228027 - ], - [ - "▁mecanic", - -13.192109107971191 - ], - [ - "money", - -13.192233085632324 - ], - [ - "parliamentary", - -13.1922607421875 - ], - [ - "▁probation", - -13.192427635192871 - ], - [ - "embroidered", - -13.192451477050781 - ], - [ - "▁amenajat", - -13.192451477050781 - ], - [ - "▁remnant", - -13.192451477050781 - ], - [ - "▁senzati", - -13.192472457885742 - ], - [ - "▁Declaration", - -13.192483901977539 - ], - [ - "farbe", - -13.192506790161133 - ], - [ - "▁skinny", - -13.19260311126709 - ], - [ - "Energi", - -13.192648887634277 - ], - [ - "verhältnisse", - -13.19288158416748 - ], - [ - "Recruit", - -13.192972183227539 - ], - [ - "frying", - -13.193161010742188 - ], - [ - "925", - -13.193294525146484 - ], - [ - "nstruire", - -13.193302154541016 - ], - [ - "toasted", - -13.193424224853516 - ], - [ - "▁nicotine", - -13.193551063537598 - ], - [ - "recessed", - -13.193570137023926 - ], - [ - "▁dialect", - -13.193572044372559 - ], - [ - "▁confisc", - -13.193575859069824 - ], - [ - "▁bubbl", - -13.193643569946289 - ], - [ - "▁Precision", - -13.193682670593262 - ], - [ - "▁sollicit", - -13.193842887878418 - ], - [ - "▁Moral", - -13.193977355957031 - ], - [ - "▁renseignements", - -13.194112777709961 - ], - [ - "UMP", - -13.194116592407227 - ], - [ - "ijn", - -13.194183349609375 - ], - [ - "▁fermeture", - -13.194320678710938 - ], - [ - "▁blueprint", - -13.19462776184082 - ], - [ - "▁groceries", - -13.194652557373047 - ], - [ - "möbel", - -13.194655418395996 - ], - [ - "▁Plenty", - -13.194657325744629 - ], - [ - "▁forfeit", - -13.194719314575195 - ], - [ - "méthodes", - -13.194915771484375 - ], - [ - "paving", - -13.19493293762207 - ], - [ - "outheastern", - -13.194979667663574 - ], - [ - "▁Overview", - -13.19503116607666 - ], - [ - "▁observers", - -13.195171356201172 - ], - [ - "▁Timișoara", - -13.19520378112793 - ], - [ - "noticing", - -13.195332527160645 - ], - [ - "▁Owl", - -13.195381164550781 - ], - [ - "▁1925", - -13.195517539978027 - ], - [ - "▁prüfen", - -13.195755004882812 - ], - [ - "▁Bewohner", - -13.195756912231445 - ], - [ - "▁Latvia", - -13.195770263671875 - ], - [ - "▁Tuscan", - -13.19577407836914 - ], - [ - "▁apprenticeship", - -13.195789337158203 - ], - [ - "▁courteous", - -13.1958646774292 - ], - [ - "adult", - -13.196023941040039 - ], - [ - "Licensed", - -13.196029663085938 - ], - [ - "abused", - -13.196762084960938 - ], - [ - "confidence", - -13.19678020477295 - ], - [ - "▁revolt", - -13.196782112121582 - ], - [ - "conference", - -13.196861267089844 - ], - [ - "genoss", - -13.196914672851562 - ], - [ - "▁răni", - -13.196944236755371 - ], - [ - "▁Intervention", - -13.196949005126953 - ], - [ - "▁primesc", - -13.196969985961914 - ], - [ - "trays", - -13.197041511535645 - ], - [ - "nozzle", - -13.197216033935547 - ], - [ - "▁splitting", - -13.197443962097168 - ], - [ - "▁könne", - -13.197507858276367 - ], - [ - "▁peisaj", - -13.197943687438965 - ], - [ - "▁academia", - -13.197962760925293 - ], - [ - "▁chakra", - -13.197979927062988 - ], - [ - "▁Abdul", - -13.1981201171875 - ], - [ - "▁Beschreibung", - -13.198225021362305 - ], - [ - "Regeln", - -13.19831371307373 - ], - [ - "eezy", - -13.198314666748047 - ], - [ - "▁problématique", - -13.198515892028809 - ], - [ - "▁Ausführung", - -13.198524475097656 - ], - [ - "▁reconnect", - -13.19868278503418 - ], - [ - "▁telefonic", - -13.198966026306152 - ], - [ - "▁Ethereum", - -13.199069023132324 - ], - [ - "▁Winnipeg", - -13.199069023132324 - ], - [ - "▁misconception", - -13.199069023132324 - ], - [ - "▁Verpackung", - -13.199070930480957 - ], - [ - "▁erzeugt", - -13.199097633361816 - ], - [ - "▁Identity", - -13.199104309082031 - ], - [ - "▁dunkle", - -13.199109077453613 - ], - [ - "sustaining", - -13.19916820526123 - ], - [ - "▁pereche", - -13.199178695678711 - ], - [ - "▁neîn", - -13.199239730834961 - ], - [ - "directorul", - -13.199291229248047 - ], - [ - "▁élabor", - -13.199584007263184 - ], - [ - "▁Hollow", - -13.19960880279541 - ], - [ - "▁getestet", - -13.199751853942871 - ], - [ - "▁Promote", - -13.199797630310059 - ], - [ - "agriculture", - -13.199920654296875 - ], - [ - "▁deosebir", - -13.199934005737305 - ], - [ - "▁neam", - -13.199999809265137 - ], - [ - "aufbau", - -13.200042724609375 - ], - [ - "▁susținut", - -13.200079917907715 - ], - [ - "fueled", - -13.200119018554688 - ], - [ - "▁impresionant", - -13.200177192687988 - ], - [ - "innate", - -13.20026969909668 - ], - [ - "grenzt", - -13.200340270996094 - ], - [ - "rescued", - -13.200514793395996 - ], - [ - "bestand", - -13.200559616088867 - ], - [ - "▁adjunct", - -13.200729370117188 - ], - [ - "▁Mischung", - -13.200754165649414 - ], - [ - "▁Lease", - -13.201258659362793 - ], - [ - "espagnol", - -13.201284408569336 - ], - [ - "▁Kickstarter", - -13.201284408569336 - ], - [ - "▁buzunar", - -13.201284408569336 - ], - [ - "▁buddies", - -13.20129108428955 - ], - [ - "käufe", - -13.201485633850098 - ], - [ - "cevoir", - -13.201582908630371 - ], - [ - "▁creşte", - -13.201675415039062 - ], - [ - "▁Cluster", - -13.201825141906738 - ], - [ - "▁obișnui", - -13.201838493347168 - ], - [ - "▁cassette", - -13.201889038085938 - ], - [ - "▁optisch", - -13.201947212219238 - ], - [ - "manned", - -13.20200252532959 - ], - [ - "schneid", - -13.202362060546875 - ], - [ - "Württemberg", - -13.202393531799316 - ], - [ - "shredded", - -13.202393531799316 - ], - [ - "▁botanical", - -13.20239543914795 - ], - [ - "characterization", - -13.202445983886719 - ], - [ - "▁Durchführung", - -13.202452659606934 - ], - [ - "▁tireless", - -13.20250129699707 - ], - [ - "lässlich", - -13.20254135131836 - ], - [ - "▁Merchant", - -13.202570915222168 - ], - [ - "joutez", - -13.20259952545166 - ], - [ - "▁amélior", - -13.202676773071289 - ], - [ - "fixed", - -13.202741622924805 - ], - [ - "kho", - -13.202760696411133 - ], - [ - "▁televizor", - -13.202948570251465 - ], - [ - "▁Davies", - -13.202964782714844 - ], - [ - "enceinte", - -13.203118324279785 - ], - [ - "▁Panorama", - -13.20350456237793 - ], - [ - "▁maternal", - -13.203507423400879 - ], - [ - "diversified", - -13.203513145446777 - ], - [ - "▁Jü", - -13.203570365905762 - ], - [ - "▁naz", - -13.203730583190918 - ], - [ - "▁plonge", - -13.2039213180542 - ], - [ - "geschickt", - -13.203944206237793 - ], - [ - "MIS", - -13.204215049743652 - ], - [ - "ragged", - -13.204553604125977 - ], - [ - "▁diarrhea", - -13.20461654663086 - ], - [ - "▁tsunami", - -13.20461654663086 - ], - [ - "▁Nikola", - -13.204625129699707 - ], - [ - "▁festivities", - -13.20464038848877 - ], - [ - "potting", - -13.20479965209961 - ], - [ - "▁telefonisch", - -13.204874038696289 - ], - [ - "TAR", - -13.204971313476562 - ], - [ - "▁schimbări", - -13.205023765563965 - ], - [ - "▁occidental", - -13.205172538757324 - ], - [ - "schloss", - -13.205179214477539 - ], - [ - "Print", - -13.205284118652344 - ], - [ - "▁autoritățil", - -13.205361366271973 - ], - [ - "idos", - -13.20556640625 - ], - [ - "mediocr", - -13.20559310913086 - ], - [ - "▁Decla", - -13.205686569213867 - ], - [ - "▁Elliott", - -13.205729484558105 - ], - [ - "▁pinpoint", - -13.205734252929688 - ], - [ - "▁disciple", - -13.20579719543457 - ], - [ - "▁Cairo", - -13.2058744430542 - ], - [ - "▁15-20", - -13.2059326171875 - ], - [ - "▁limbaj", - -13.20611572265625 - ], - [ - "▁retenu", - -13.206154823303223 - ], - [ - "▁Blüte", - -13.20628833770752 - ], - [ - "▁MINI", - -13.206467628479004 - ], - [ - "▁lumină", - -13.206567764282227 - ], - [ - "▁flawed", - -13.206846237182617 - ], - [ - "▁Belarus", - -13.207067489624023 - ], - [ - "Totul", - -13.207207679748535 - ], - [ - "hôte", - -13.207273483276367 - ], - [ - "▁verbringen", - -13.207315444946289 - ], - [ - "▁simultaneous", - -13.207344055175781 - ], - [ - "▁competiți", - -13.207402229309082 - ], - [ - "▁lancement", - -13.207413673400879 - ], - [ - "▁proprietati", - -13.207432746887207 - ], - [ - "▁angajator", - -13.207465171813965 - ], - [ - "▁ignorant", - -13.207674026489258 - ], - [ - "▁indicative", - -13.207700729370117 - ], - [ - "▁Bearbeitung", - -13.207961082458496 - ], - [ - "▁Ungaria", - -13.207961082458496 - ], - [ - "▁Sfint", - -13.208015441894531 - ], - [ - "▁Trojan", - -13.20804214477539 - ], - [ - "▁1911", - -13.208100318908691 - ], - [ - "▁reliabl", - -13.2081937789917 - ], - [ - "6-0", - -13.20827865600586 - ], - [ - "obst", - -13.208523750305176 - ], - [ - "▁relève", - -13.208579063415527 - ], - [ - "▁standpoint", - -13.208874702453613 - ], - [ - "ridden", - -13.208918571472168 - ], - [ - "▁Pdf", - -13.209005355834961 - ], - [ - "tatewide", - -13.209051132202148 - ], - [ - "Water", - -13.209062576293945 - ], - [ - "▁Pricing", - -13.209089279174805 - ], - [ - "▁protecţi", - -13.209168434143066 - ], - [ - "November", - -13.209615707397461 - ], - [ - "▁televiziune", - -13.20964241027832 - ], - [ - "Sodium", - -13.209881782531738 - ], - [ - "douceur", - -13.209942817687988 - ], - [ - "▁Flasche", - -13.210183143615723 - ], - [ - "3.9", - -13.210193634033203 - ], - [ - "▁electromagnetic", - -13.210195541381836 - ], - [ - "▁mitochondria", - -13.210195541381836 - ], - [ - "Suddenly", - -13.210199356079102 - ], - [ - "▁Drupal", - -13.210201263427734 - ], - [ - "▁supraveghere", - -13.210211753845215 - ], - [ - "▁cornea", - -13.210288047790527 - ], - [ - "räumt", - -13.210309982299805 - ], - [ - "▁healed", - -13.210410118103027 - ], - [ - "Roc", - -13.210649490356445 - ], - [ - "▁temporar", - -13.210707664489746 - ], - [ - "▁amaze", - -13.210770606994629 - ], - [ - "▁confrunta", - -13.210833549499512 - ], - [ - "Afterward", - -13.210836410522461 - ], - [ - "▁festgelegt", - -13.21084213256836 - ], - [ - "▁Kuchen", - -13.210844993591309 - ], - [ - "▁perpetual", - -13.210858345031738 - ], - [ - "systematically", - -13.211000442504883 - ], - [ - "▁coloan", - -13.211006164550781 - ], - [ - "▁extensi", - -13.211058616638184 - ], - [ - "▁Județean", - -13.211315155029297 - ], - [ - "▁amelior", - -13.211315155029297 - ], - [ - "▁illustrator", - -13.211315155029297 - ], - [ - "▁titanium", - -13.211344718933105 - ], - [ - "SMEs", - -13.211384773254395 - ], - [ - "taxable", - -13.211578369140625 - ], - [ - "▁Borough", - -13.211607933044434 - ], - [ - "verlust", - -13.211772918701172 - ], - [ - "ductive", - -13.21233081817627 - ], - [ - "▁Küste", - -13.212335586547852 - ], - [ - "▁végétal", - -13.212410926818848 - ], - [ - "▁breastfeeding", - -13.212435722351074 - ], - [ - "▁captivating", - -13.212435722351074 - ], - [ - "▁Chevy", - -13.212443351745605 - ], - [ - "▁aerospace", - -13.212469100952148 - ], - [ - "pozitia", - -13.213095664978027 - ], - [ - "Tutor", - -13.213199615478516 - ], - [ - "▁spum", - -13.213312149047852 - ], - [ - "curând", - -13.213419914245605 - ], - [ - "iscus", - -13.213458061218262 - ], - [ - "October", - -13.213495254516602 - ], - [ - "▁Reparatur", - -13.213557243347168 - ], - [ - "▁Servicii", - -13.213574409484863 - ], - [ - "▁Gonz", - -13.21357536315918 - ], - [ - "▁cybersecurity", - -13.21357536315918 - ], - [ - "▁UCLA", - -13.213678359985352 - ], - [ - "rissa", - -13.213835716247559 - ], - [ - "▁Kemp", - -13.213850021362305 - ], - [ - "▁piston", - -13.214046478271484 - ], - [ - "▁révèle", - -13.214118957519531 - ], - [ - "▁posséd", - -13.21412181854248 - ], - [ - "▁versehen", - -13.214129447937012 - ], - [ - "▁scrutin", - -13.214226722717285 - ], - [ - "donnant", - -13.21436882019043 - ], - [ - "▁Geschwindigkeit", - -13.214680671691895 - ], - [ - "▁Panasonic", - -13.214680671691895 - ], - [ - "audio", - -13.214700698852539 - ], - [ - "▁Packaging", - -13.214771270751953 - ], - [ - "phra", - -13.2147798538208 - ], - [ - "▁Letzte", - -13.214954376220703 - ], - [ - "insicht", - -13.215141296386719 - ], - [ - "▁sammeln", - -13.215243339538574 - ], - [ - "▁extins", - -13.215259552001953 - ], - [ - "▁collège", - -13.215266227722168 - ], - [ - "ancies", - -13.215343475341797 - ], - [ - "▁întâlnit", - -13.215350151062012 - ], - [ - "▁Servi", - -13.215392112731934 - ], - [ - "stattet", - -13.215493202209473 - ], - [ - "▁abstraction", - -13.215566635131836 - ], - [ - "▁candidature", - -13.215592384338379 - ], - [ - "ONU", - -13.215676307678223 - ], - [ - "▁raffle", - -13.215826988220215 - ], - [ - "▁Soldier", - -13.215834617614746 - ], - [ - "▁stipulate", - -13.215883255004883 - ], - [ - "▁vizual", - -13.215950012207031 - ], - [ - "lucht", - -13.216007232666016 - ], - [ - "▁circus", - -13.216068267822266 - ], - [ - "▁decree", - -13.216259002685547 - ], - [ - "immeuble", - -13.216367721557617 - ], - [ - "Store", - -13.216426849365234 - ], - [ - "randul", - -13.216622352600098 - ], - [ - "▁narration", - -13.216933250427246 - ], - [ - "implication", - -13.216958045959473 - ], - [ - "▁discontinued", - -13.216971397399902 - ], - [ - "▁Pilates", - -13.216989517211914 - ], - [ - "▁biais", - -13.21701431274414 - ], - [ - "panel", - -13.217325210571289 - ], - [ - "▁mower", - -13.217458724975586 - ], - [ - "▁Castro", - -13.21753978729248 - ], - [ - "pregătire", - -13.217641830444336 - ], - [ - "▁denomination", - -13.218062400817871 - ], - [ - "▁throttle", - -13.21806526184082 - ], - [ - "▁finition", - -13.218086242675781 - ], - [ - "▁clarification", - -13.218286514282227 - ], - [ - "laut", - -13.218366622924805 - ], - [ - "▁wastewater", - -13.2184419631958 - ], - [ - "▁Sanchez", - -13.218770980834961 - ], - [ - "▁Umfeld", - -13.2189359664917 - ], - [ - "▁consili", - -13.218997955322266 - ], - [ - "extrait", - -13.219013214111328 - ], - [ - "ionism", - -13.2190523147583 - ], - [ - "▁Cannabis", - -13.219186782836914 - ], - [ - "▁misconduct", - -13.219186782836914 - ], - [ - "▁shepherd", - -13.219186782836914 - ], - [ - "▁feminist", - -13.21919059753418 - ], - [ - "▁criterii", - -13.219212532043457 - ], - [ - "America", - -13.219219207763672 - ], - [ - "▁Telephone", - -13.219270706176758 - ], - [ - "▁Fritz", - -13.219438552856445 - ], - [ - "▁cheltui", - -13.219794273376465 - ], - [ - "▁Übung", - -13.219857215881348 - ], - [ - "făcută", - -13.22006893157959 - ], - [ - "▁străzi", - -13.220170021057129 - ], - [ - "influencing", - -13.220315933227539 - ], - [ - "▁Democracy", - -13.220321655273438 - ], - [ - "atorium", - -13.220376014709473 - ], - [ - "▁Stufe", - -13.220465660095215 - ], - [ - "▁Cornell", - -13.220660209655762 - ], - [ - "zugehen", - -13.22074031829834 - ], - [ - "▁coton", - -13.220804214477539 - ], - [ - "▁beinhaltet", - -13.220881462097168 - ], - [ - "▁kritisch", - -13.220884323120117 - ], - [ - "▁Kalender", - -13.22105884552002 - ], - [ - "▁Teig", - -13.221253395080566 - ], - [ - "cooked", - -13.221264839172363 - ], - [ - "▁diversité", - -13.221390724182129 - ], - [ - "recognizable", - -13.221446990966797 - ], - [ - "▁Dictionary", - -13.221446990966797 - ], - [ - "attribution", - -13.22145938873291 - ], - [ - "▁Teresa", - -13.221471786499023 - ], - [ - "▁Ahmad", - -13.221487998962402 - ], - [ - "HAM", - -13.221627235412598 - ], - [ - "▁floss", - -13.221668243408203 - ], - [ - "génie", - -13.2218599319458 - ], - [ - "▁Espa", - -13.221989631652832 - ], - [ - "hersteller", - -13.221993446350098 - ], - [ - "Musée", - -13.222001075744629 - ], - [ - "▁Crawford", - -13.222579002380371 - ], - [ - "▁Phantom", - -13.222579002380371 - ], - [ - "▁Jenkins", - -13.222640037536621 - ], - [ - "genauer", - -13.222774505615234 - ], - [ - "▁acţiuni", - -13.222885131835938 - ], - [ - "▁meciuri", - -13.22322940826416 - ], - [ - "▁verstärkt", - -13.22326374053955 - ], - [ - "▁troop", - -13.22341251373291 - ], - [ - "räder", - -13.223483085632324 - ], - [ - "Putting", - -13.223536491394043 - ], - [ - "NASDAQ", - -13.223712921142578 - ], - [ - "▁Buddhism", - -13.223712921142578 - ], - [ - "▁Religious", - -13.223712921142578 - ], - [ - "▁accommodating", - -13.223712921142578 - ], - [ - "▁lendemain", - -13.223712921142578 - ], - [ - "▁plywood", - -13.223714828491211 - ], - [ - "▁inflatable", - -13.223724365234375 - ], - [ - "▁sèche", - -13.223731994628906 - ], - [ - "▁fragil", - -13.223845481872559 - ], - [ - "▁Filip", - -13.224115371704102 - ], - [ - "▁Terrace", - -13.224274635314941 - ], - [ - "Biblio", - -13.22432804107666 - ], - [ - "resides", - -13.22448444366455 - ], - [ - "▁varf", - -13.22451114654541 - ], - [ - "Bildern", - -13.224528312683105 - ], - [ - "loß", - -13.224685668945312 - ], - [ - "555", - -13.224702835083008 - ], - [ - "▁astounding", - -13.224847793579102 - ], - [ - "▁brillant", - -13.224857330322266 - ], - [ - "▁Railroad", - -13.224871635437012 - ], - [ - "minimizing", - -13.224907875061035 - ], - [ - "▁Benedict", - -13.225019454956055 - ], - [ - "▁$400", - -13.225068092346191 - ], - [ - "▁schematic", - -13.225217819213867 - ], - [ - "Canada", - -13.225371360778809 - ], - [ - "▁psihic", - -13.225415229797363 - ], - [ - "▁avertiz", - -13.225497245788574 - ], - [ - "▁Breed", - -13.225550651550293 - ], - [ - "▁gradina", - -13.225606918334961 - ], - [ - "▁Liege", - -13.225822448730469 - ], - [ - "▁Retirement", - -13.225983619689941 - ], - [ - "▁pergola", - -13.226005554199219 - ], - [ - "▁Kuwait", - -13.2260103225708 - ], - [ - "▁logistic", - -13.22629451751709 - ], - [ - "▁captive", - -13.22651481628418 - ], - [ - "prepared", - -13.226568222045898 - ], - [ - "▁prononc", - -13.226568222045898 - ], - [ - "Celui", - -13.226676940917969 - ], - [ - "deutschland", - -13.227120399475098 - ], - [ - "▁devreme", - -13.227124214172363 - ], - [ - "▁părți", - -13.227270126342773 - ], - [ - "▁1934", - -13.227517127990723 - ], - [ - "▁ersetzt", - -13.227560997009277 - ], - [ - "▁frightening", - -13.227689743041992 - ], - [ - "▁fiecărui", - -13.227819442749023 - ], - [ - "correct", - -13.22799015045166 - ], - [ - "6.6", - -13.228057861328125 - ], - [ - "▁Manitoba", - -13.228259086608887 - ], - [ - "Chartered", - -13.228416442871094 - ], - [ - "▁părăs", - -13.228543281555176 - ], - [ - "Powered", - -13.228697776794434 - ], - [ - "impede", - -13.22876262664795 - ], - [ - "agonist", - -13.22878646850586 - ], - [ - "▁stratégique", - -13.228829383850098 - ], - [ - "▁vigilant", - -13.228830337524414 - ], - [ - "faceted", - -13.228930473327637 - ], - [ - "available", - -13.229308128356934 - ], - [ - "▁Promise", - -13.229388236999512 - ], - [ - "▁humorous", - -13.229446411132812 - ], - [ - "treibt", - -13.229449272155762 - ], - [ - "▁Patrol", - -13.229514122009277 - ], - [ - "huh", - -13.229523658752441 - ], - [ - "ztlich", - -13.229804039001465 - ], - [ - "▁rejet", - -13.2299165725708 - ], - [ - "odeur", - -13.229935646057129 - ], - [ - "usziehbar", - -13.22996997833252 - ], - [ - "▁gespannt", - -13.229972839355469 - ], - [ - "church", - -13.230018615722656 - ], - [ - "▁Popescu", - -13.230109214782715 - ], - [ - "▁einmalig", - -13.230518341064453 - ], - [ - "diluted", - -13.230551719665527 - ], - [ - "lighted", - -13.231070518493652 - ], - [ - "▁stattfinden", - -13.23111343383789 - ], - [ - "▁Reaktion", - -13.231183052062988 - ], - [ - "▁délivr", - -13.23134994506836 - ], - [ - "▁Helfer", - -13.231407165527344 - ], - [ - "Fiind", - -13.23142147064209 - ], - [ - "rmând", - -13.231507301330566 - ], - [ - "▁Beweis", - -13.231671333312988 - ], - [ - "▁Violet", - -13.231733322143555 - ], - [ - "kamera", - -13.231764793395996 - ], - [ - "▁Romney", - -13.231779098510742 - ], - [ - "▁Bradford", - -13.231800079345703 - ], - [ - "stellbar", - -13.231852531433105 - ], - [ - "▁roadmap", - -13.231921195983887 - ], - [ - "▁subconscious", - -13.23204231262207 - ], - [ - "contrasting", - -13.232138633728027 - ], - [ - "mécanisme", - -13.232254981994629 - ], - [ - "kämpft", - -13.232255935668945 - ], - [ - "▁Preston", - -13.232719421386719 - ], - [ - "▁Anliegen", - -13.232802391052246 - ], - [ - "▁necessities", - -13.232827186584473 - ], - [ - "▁detrimental", - -13.232828140258789 - ], - [ - "▁sprawl", - -13.232830047607422 - ], - [ - "▁Erfüllung", - -13.23287582397461 - ], - [ - "▁massacre", - -13.2329683303833 - ], - [ - "▁pietre", - -13.232987403869629 - ], - [ - "▁situații", - -13.233027458190918 - ], - [ - "vêtement", - -13.233080863952637 - ], - [ - "Listed", - -13.233144760131836 - ], - [ - "▁extravagant", - -13.233399391174316 - ], - [ - "▁axle", - -13.233525276184082 - ], - [ - "OTT", - -13.233663558959961 - ], - [ - "wildly", - -13.233744621276855 - ], - [ - "70,000", - -13.233797073364258 - ], - [ - "▁chauffeur", - -13.23384952545166 - ], - [ - "▁Brasov", - -13.233972549438477 - ], - [ - "▁Fähigkeiten", - -13.233972549438477 - ], - [ - "▁staatlich", - -13.234025001525879 - ], - [ - "outlines", - -13.234034538269043 - ], - [ - "▁aufmerksam", - -13.234545707702637 - ], - [ - "▁Relation", - -13.234749794006348 - ], - [ - "▁Stephan", - -13.234947204589844 - ], - [ - "yland", - -13.23494815826416 - ], - [ - "proclaimed", - -13.235086441040039 - ], - [ - "Wallet", - -13.235100746154785 - ], - [ - "verarbeitung", - -13.235118865966797 - ], - [ - "▁überraschen", - -13.235118865966797 - ], - [ - "▁Injury", - -13.235125541687012 - ], - [ - "▁horsepower", - -13.235237121582031 - ], - [ - "▁Tropical", - -13.23523998260498 - ], - [ - "▁wives", - -13.235459327697754 - ], - [ - "adherence", - -13.235677719116211 - ], - [ - "schätzung", - -13.235692977905273 - ], - [ - "▁coherent", - -13.235708236694336 - ], - [ - "parlament", - -13.23574161529541 - ], - [ - "▁stup", - -13.235852241516113 - ], - [ - "▁resonance", - -13.23626708984375 - ], - [ - "▁inheritance", - -13.236355781555176 - ], - [ - "commenced", - -13.23645305633545 - ], - [ - "▁supervise", - -13.236475944519043 - ], - [ - "▁facilitator", - -13.236488342285156 - ], - [ - "fares", - -13.236678123474121 - ], - [ - "▁Tibet", - -13.23672866821289 - ], - [ - "communication", - -13.236787796020508 - ], - [ - "yog", - -13.236806869506836 - ], - [ - "▁WLAN", - -13.236842155456543 - ], - [ - "▁Chili", - -13.23685073852539 - ], - [ - "▁Harold", - -13.2369966506958 - ], - [ - "▁Guerre", - -13.237005233764648 - ], - [ - "▁Femme", - -13.237146377563477 - ], - [ - "▁Lisbon", - -13.237231254577637 - ], - [ - "▁mulțumi", - -13.237415313720703 - ], - [ - "▁vorbereitet", - -13.237415313720703 - ], - [ - "▁aperture", - -13.237422943115234 - ], - [ - "▁Universities", - -13.237442016601562 - ], - [ - "▁reckless", - -13.237471580505371 - ], - [ - "▁Botschaft", - -13.237533569335938 - ], - [ - "▁Squad", - -13.238022804260254 - ], - [ - "▁buoy", - -13.238061904907227 - ], - [ - "participarea", - -13.238236427307129 - ], - [ - "stiinta", - -13.238389015197754 - ], - [ - "▁repeal", - -13.238415718078613 - ], - [ - "drilled", - -13.238489151000977 - ], - [ - "▁Conversation", - -13.238567352294922 - ], - [ - "▁subsid", - -13.238615036010742 - ], - [ - "anstalt", - -13.238741874694824 - ], - [ - "faktor", - -13.23874282836914 - ], - [ - "▁swamp", - -13.238790512084961 - ], - [ - "pflichtig", - -13.238921165466309 - ], - [ - "▁camion", - -13.238970756530762 - ], - [ - "▁gouvern", - -13.239032745361328 - ], - [ - "▁archaeological", - -13.239141464233398 - ], - [ - "▁glitch", - -13.239198684692383 - ], - [ - "average", - -13.239294052124023 - ], - [ - "▁coffre", - -13.239481925964355 - ], - [ - "▁Insert", - -13.239513397216797 - ], - [ - "▁colonne", - -13.2395601272583 - ], - [ - "▁Assess", - -13.23962116241455 - ], - [ - "▁batches", - -13.239716529846191 - ], - [ - "▁ammunition", - -13.239717483520508 - ], - [ - "▁scissors", - -13.239717483520508 - ], - [ - "▁Locksmith", - -13.239740371704102 - ], - [ - "▁Bollywood", - -13.239991188049316 - ], - [ - "expédi", - -13.240288734436035 - ], - [ - "▁descendants", - -13.24039363861084 - ], - [ - "▁unwilling", - -13.240506172180176 - ], - [ - "▁Noise", - -13.240649223327637 - ], - [ - "▁Directive", - -13.240660667419434 - ], - [ - "ATOR", - -13.240765571594238 - ], - [ - "▁Rajasthan", - -13.240870475769043 - ], - [ - "▁chaotic", - -13.240888595581055 - ], - [ - "▁NEED", - -13.24093246459961 - ], - [ - "▁părere", - -13.24095344543457 - ], - [ - "▁begonnen", - -13.241448402404785 - ], - [ - "▁Reef", - -13.241504669189453 - ], - [ - "▁vorgesehen", - -13.24161434173584 - ], - [ - "▁allocate", - -13.241826057434082 - ], - [ - "▁exceptionnel", - -13.241936683654785 - ], - [ - "▁gefertigt", - -13.24203872680664 - ], - [ - "fading", - -13.242072105407715 - ], - [ - "▁interpersonal", - -13.242178916931152 - ], - [ - "▁occupie", - -13.242204666137695 - ], - [ - "▁Teatr", - -13.242579460144043 - ], - [ - "▁kilomètres", - -13.242603302001953 - ], - [ - "▁verbinden", - -13.242608070373535 - ], - [ - "▁Frucht", - -13.242643356323242 - ], - [ - "augmented", - -13.242720603942871 - ], - [ - "▁twentieth", - -13.243181228637695 - ], - [ - "▁aggression", - -13.243183135986328 - ], - [ - "▁Miracle", - -13.243184089660645 - ], - [ - "▁peninsula", - -13.243184089660645 - ], - [ - "▁Fernando", - -13.243185043334961 - ], - [ - "▁autorităţil", - -13.243203163146973 - ], - [ - "▁Iisus", - -13.243217468261719 - ], - [ - "▁puck", - -13.243423461914062 - ], - [ - "titel", - -13.243454933166504 - ], - [ - "▁remake", - -13.243562698364258 - ], - [ - "freiheit", - -13.243563652038574 - ], - [ - "▁Belize", - -13.243590354919434 - ], - [ - "▁secundar", - -13.243779182434082 - ], - [ - "▁perpetrat", - -13.243786811828613 - ], - [ - "jedenfalls", - -13.243797302246094 - ], - [ - "linked", - -13.243820190429688 - ], - [ - "▁dégag", - -13.243918418884277 - ], - [ - "LAY", - -13.243926048278809 - ], - [ - "behandlung", - -13.244172096252441 - ], - [ - "▁1928", - -13.244193077087402 - ], - [ - "▁Nickel", - -13.244205474853516 - ], - [ - "rophy", - -13.244256973266602 - ], - [ - "▁autonomy", - -13.244338989257812 - ], - [ - "▁Treffen", - -13.244402885437012 - ], - [ - "▁groundbreaking", - -13.24445915222168 - ], - [ - "politisch", - -13.244484901428223 - ], - [ - "▁Vector", - -13.244553565979004 - ], - [ - "oricine", - -13.244684219360352 - ], - [ - "utilisées", - -13.244684219360352 - ], - [ - "plete", - -13.244771003723145 - ], - [ - "droht", - -13.244918823242188 - ], - [ - "▁alternativ", - -13.245104789733887 - ], - [ - "▁Bernie", - -13.245213508605957 - ], - [ - "▁embellish", - -13.245260238647461 - ], - [ - "▁Curriculum", - -13.24549674987793 - ], - [ - "herrscht", - -13.245525360107422 - ], - [ - "escalier", - -13.246126174926758 - ], - [ - "hian", - -13.246333122253418 - ], - [ - "ertaining", - -13.246387481689453 - ], - [ - "hitter", - -13.246430397033691 - ], - [ - "▁kompetente", - -13.24665641784668 - ], - [ - "▁trekking", - -13.246760368347168 - ], - [ - "EACH", - -13.246841430664062 - ], - [ - "▁Bedien", - -13.2470703125 - ], - [ - "starred", - -13.247169494628906 - ], - [ - "▁săptămâna", - -13.247236251831055 - ], - [ - "▁Gratuit", - -13.247239112854004 - ], - [ - "▁Jahrzehnte", - -13.247241020202637 - ], - [ - "ingénieur", - -13.24731731414795 - ], - [ - "▁Huang", - -13.24736213684082 - ], - [ - "Music", - -13.247401237487793 - ], - [ - "misiei", - -13.247544288635254 - ], - [ - "▁masuri", - -13.247733116149902 - ], - [ - "▁Achievement", - -13.247817039489746 - ], - [ - "▁Dorothy", - -13.247817039489746 - ], - [ - "blätter", - -13.247817993164062 - ], - [ - "éloign", - -13.247817993164062 - ], - [ - "▁Anglia", - -13.247990608215332 - ], - [ - "brach", - -13.248013496398926 - ], - [ - "▁Optimization", - -13.248085021972656 - ], - [ - "6.7", - -13.248170852661133 - ], - [ - "winkel", - -13.248210906982422 - ], - [ - "contenan", - -13.248347282409668 - ], - [ - "Astăzi", - -13.248398780822754 - ], - [ - "wiped", - -13.248441696166992 - ], - [ - "granting", - -13.248665809631348 - ], - [ - "▁plăti", - -13.248859405517578 - ], - [ - "▁Compensation", - -13.248979568481445 - ], - [ - "▁Verkäufer", - -13.248979568481445 - ], - [ - "▁angajați", - -13.248980522155762 - ], - [ - "▁diminished", - -13.24902057647705 - ], - [ - "employment", - -13.249250411987305 - ], - [ - "yahoo", - -13.249435424804688 - ], - [ - "▁détrui", - -13.249698638916016 - ], - [ - "▁suffisant", - -13.24982738494873 - ], - [ - "▁Moldovei", - -13.250144004821777 - ], - [ - "▁Pokemon", - -13.250144004821777 - ], - [ - "▁Malcolm", - -13.250144958496094 - ], - [ - "▁mysteries", - -13.250147819519043 - ], - [ - "▁Diversity", - -13.250149726867676 - ], - [ - "▁clinique", - -13.250327110290527 - ], - [ - "landais", - -13.250344276428223 - ], - [ - "▁campanii", - -13.250399589538574 - ], - [ - "▁témoignage", - -13.250439643859863 - ], - [ - "▁paralel", - -13.250467300415039 - ], - [ - "▁travailleurs", - -13.250576972961426 - ], - [ - "▁salvage", - -13.250580787658691 - ], - [ - "▁crayon", - -13.250732421875 - ], - [ - "immédiat", - -13.25085163116455 - ], - [ - "hopped", - -13.250958442687988 - ], - [ - "▁senzor", - -13.25102710723877 - ], - [ - "▁imbunatati", - -13.251073837280273 - ], - [ - "▁capitalize", - -13.2511568069458 - ], - [ - "▁Elephant", - -13.25130844116211 - ], - [ - "▁insomnia", - -13.25131607055664 - ], - [ - "▁Ansicht", - -13.251325607299805 - ], - [ - "▁lupte", - -13.251556396484375 - ], - [ - "▁genomic", - -13.251557350158691 - ], - [ - "▁Grape", - -13.251769065856934 - ], - [ - "MONT", - -13.25197982788086 - ], - [ - "métiers", - -13.252004623413086 - ], - [ - "▁Pierce", - -13.252123832702637 - ], - [ - "consulted", - -13.252388954162598 - ], - [ - "▁Responsible", - -13.252474784851074 - ], - [ - "symmetry", - -13.252476692199707 - ], - [ - "▁sulfur", - -13.252487182617188 - ], - [ - "▁înapoi", - -13.252510070800781 - ], - [ - "▁Junction", - -13.252549171447754 - ], - [ - "▁trilogy", - -13.252622604370117 - ], - [ - "▁unkompliziert", - -13.253059387207031 - ], - [ - "▁zugänglich", - -13.253059387207031 - ], - [ - "▁préfèr", - -13.253153800964355 - ], - [ - "oarelor", - -13.253361701965332 - ], - [ - "langage", - -13.253460884094238 - ], - [ - "admired", - -13.253589630126953 - ], - [ - "platform", - -13.253595352172852 - ], - [ - "▁pluralit", - -13.253616333007812 - ], - [ - "▁betrachtet", - -13.253643035888672 - ], - [ - "▁reproduc", - -13.253790855407715 - ], - [ - "exemple", - -13.25385570526123 - ], - [ - "▁conspir", - -13.254347801208496 - ], - [ - "▁pelvi", - -13.25437068939209 - ], - [ - "leased", - -13.254551887512207 - ], - [ - "▁souffle", - -13.254570960998535 - ], - [ - "▁approprié", - -13.254705429077148 - ], - [ - "absorbing", - -13.254817962646484 - ], - [ - "dividing", - -13.254855155944824 - ], - [ - "herently", - -13.255147933959961 - ], - [ - "▁blister", - -13.255179405212402 - ], - [ - "löst", - -13.255182266235352 - ], - [ - "Apotheke", - -13.255398750305176 - ], - [ - "▁Asociaţi", - -13.255424499511719 - ], - [ - "education", - -13.255904197692871 - ], - [ - "▁retract", - -13.255982398986816 - ], - [ - "▁appraise", - -13.255990982055664 - ], - [ - "▁Debbie", - -13.256075859069824 - ], - [ - "▁arhitect", - -13.256193161010742 - ], - [ - "▁Mohamed", - -13.256568908691406 - ], - [ - "▁îndrept", - -13.256568908691406 - ], - [ - "▁exhaustive", - -13.256753921508789 - ], - [ - "▁Notebook", - -13.257004737854004 - ], - [ - "crashing", - -13.257068634033203 - ], - [ - "▁Betreiber", - -13.257155418395996 - ], - [ - "▁présidentielle", - -13.257159233093262 - ], - [ - "▁Träger", - -13.257172584533691 - ], - [ - "▁noteworthy", - -13.257259368896484 - ], - [ - "▁séparé", - -13.257729530334473 - ], - [ - "▁doppelt", - -13.257795333862305 - ], - [ - "tină", - -13.258066177368164 - ], - [ - "Quelques", - -13.258085250854492 - ], - [ - "culoarea", - -13.258100509643555 - ], - [ - "▁ethic", - -13.258166313171387 - ], - [ - "▁cohesive", - -13.258329391479492 - ], - [ - "▁congratulations", - -13.258334159851074 - ], - [ - "▁sovereignty", - -13.25833797454834 - ], - [ - "▁Aplica", - -13.258413314819336 - ], - [ - "▁Covenant", - -13.25851058959961 - ], - [ - "▁multicultural", - -13.258591651916504 - ], - [ - "assemblée", - -13.258955001831055 - ], - [ - "▁petals", - -13.258974075317383 - ], - [ - "erode", - -13.259026527404785 - ], - [ - "▁porumb", - -13.259035110473633 - ], - [ - "▁Barrier", - -13.259050369262695 - ], - [ - "▁WWE", - -13.259085655212402 - ], - [ - "Etwa", - -13.259175300598145 - ], - [ - "▁recunosc", - -13.259271621704102 - ], - [ - "▁turtle", - -13.259415626525879 - ], - [ - "▁vârf", - -13.259444236755371 - ], - [ - "▁Ranking", - -13.259448051452637 - ], - [ - "▁sympathetic", - -13.259514808654785 - ], - [ - "exploded", - -13.2595796585083 - ], - [ - "▁influenț", - -13.259591102600098 - ], - [ - "▁Fireplace", - -13.25972843170166 - ], - [ - "▁Nachwuchs", - -13.260090827941895 - ], - [ - "▁empfohlen", - -13.260090827941895 - ], - [ - "Voir", - -13.260661125183105 - ], - [ - "▁Vimeo", - -13.26069164276123 - ], - [ - "▁weaving", - -13.260967254638672 - ], - [ - "beneficiar", - -13.261198043823242 - ], - [ - "▁balade", - -13.261216163635254 - ], - [ - "▁Mercy", - -13.261566162109375 - ], - [ - "3.000", - -13.26181697845459 - ], - [ - "Immediately", - -13.261857032775879 - ], - [ - "▁frosting", - -13.261868476867676 - ], - [ - "▁Fiscal", - -13.261882781982422 - ], - [ - "downloadable", - -13.26188850402832 - ], - [ - "▁Hwy", - -13.261902809143066 - ], - [ - "évoluer", - -13.261951446533203 - ], - [ - "▁vieille", - -13.2620210647583 - ], - [ - "heißen", - -13.262436866760254 - ], - [ - "▁étrangère", - -13.262446403503418 - ], - [ - "▁incapable", - -13.262490272521973 - ], - [ - "volunteered", - -13.262520790100098 - ], - [ - "fortunately", - -13.262564659118652 - ], - [ - "company", - -13.262738227844238 - ], - [ - "denkt", - -13.2627592086792 - ], - [ - "▁citesc", - -13.262818336486816 - ], - [ - "▁intrebare", - -13.262896537780762 - ], - [ - "pleasantly", - -13.262990951538086 - ], - [ - "▁Minecraft", - -13.263079643249512 - ], - [ - "▁Schmuck", - -13.26308536529541 - ], - [ - "▁maghiar", - -13.263099670410156 - ], - [ - "conductive", - -13.263339042663574 - ], - [ - "décrit", - -13.263534545898438 - ], - [ - "provide", - -13.26353931427002 - ], - [ - "▁depăş", - -13.263628959655762 - ], - [ - "ituated", - -13.263657569885254 - ], - [ - "▁trumpet", - -13.264216423034668 - ], - [ - "▁nastere", - -13.2642240524292 - ], - [ - "▁Région", - -13.264245986938477 - ], - [ - "Occupational", - -13.264411926269531 - ], - [ - "▁Grecia", - -13.264415740966797 - ], - [ - "▁Conclusion", - -13.26449203491211 - ], - [ - "▁collaborateurs", - -13.264927864074707 - ], - [ - "▁Alibaba", - -13.265398025512695 - ], - [ - "▁amplasat", - -13.265398979187012 - ], - [ - "▁Plastik", - -13.265992164611816 - ], - [ - "▁stash", - -13.266023635864258 - ], - [ - "▁Bonnie", - -13.266045570373535 - ], - [ - "▁ehrlich", - -13.266156196594238 - ], - [ - "▁contention", - -13.266193389892578 - ], - [ - "▁Oslo", - -13.266263008117676 - ], - [ - "englische", - -13.266319274902344 - ], - [ - "measurable", - -13.266439437866211 - ], - [ - "loppy", - -13.266470909118652 - ], - [ - "▁Refrigerat", - -13.266579627990723 - ], - [ - "▁remboursement", - -13.266580581665039 - ], - [ - "▁societăţi", - -13.266580581665039 - ], - [ - "translates", - -13.266607284545898 - ], - [ - "ichtigkeit", - -13.266685485839844 - ], - [ - "agentur", - -13.266741752624512 - ], - [ - "▁compute", - -13.266800880432129 - ], - [ - "berater", - -13.266921043395996 - ], - [ - "▁Georgetown", - -13.266945838928223 - ], - [ - "wolves", - -13.266951560974121 - ], - [ - "ceased", - -13.266959190368652 - ], - [ - "▁Binary", - -13.267030715942383 - ], - [ - "▁kontrolliert", - -13.267172813415527 - ], - [ - "informer", - -13.267416000366211 - ], - [ - "lehrer", - -13.267578125 - ], - [ - "lieferung", - -13.267709732055664 - ], - [ - "▁definit", - -13.267742156982422 - ], - [ - "chèque", - -13.267765045166016 - ], - [ - "▁clergy", - -13.267765045166016 - ], - [ - "▁ministries", - -13.267767906188965 - ], - [ - "▁plague", - -13.267779350280762 - ], - [ - "▁Jedi", - -13.267805099487305 - ], - [ - "▁Blackjack", - -13.268025398254395 - ], - [ - "▁subsection", - -13.26807689666748 - ], - [ - "▁Sachsen", - -13.268121719360352 - ], - [ - "valorile", - -13.268146514892578 - ], - [ - "molded", - -13.26816463470459 - ], - [ - "▁betroffen", - -13.268183708190918 - ], - [ - "▁adecvat", - -13.268229484558105 - ], - [ - "▁collègue", - -13.26835823059082 - ], - [ - "▁chinez", - -13.268392562866211 - ], - [ - "emelle", - -13.268695831298828 - ], - [ - "▁körperliche", - -13.268902778625488 - ], - [ - "▁titan", - -13.26891040802002 - ], - [ - "▁sophistication", - -13.268951416015625 - ], - [ - "▁provoke", - -13.268957138061523 - ], - [ - "▁pensii", - -13.269042015075684 - ], - [ - "▁Tucker", - -13.269377708435059 - ], - [ - "▁motoare", - -13.26943302154541 - ], - [ - "supported", - -13.269536972045898 - ], - [ - "▁Sicil", - -13.269697189331055 - ], - [ - "▁Ausgangs", - -13.26987361907959 - ], - [ - "▁verletzt", - -13.269908905029297 - ], - [ - "Ligue", - -13.269996643066406 - ], - [ - "▁organizatori", - -13.270026206970215 - ], - [ - "▁apprentice", - -13.270099639892578 - ], - [ - "▁Potato", - -13.270183563232422 - ], - [ - "▁Duft", - -13.27039623260498 - ], - [ - "▁medicament", - -13.270566940307617 - ], - [ - "Hôtel", - -13.270740509033203 - ], - [ - "▁Triangle", - -13.270842552185059 - ], - [ - "buted", - -13.271100044250488 - ], - [ - "▁Bentley", - -13.271336555480957 - ], - [ - "următoarele", - -13.271389961242676 - ], - [ - "animate", - -13.271404266357422 - ], - [ - "megapixel", - -13.271404266357422 - ], - [ - "einfachen", - -13.271514892578125 - ], - [ - "▁performanț", - -13.271544456481934 - ], - [ - "lurry", - -13.27184009552002 - ], - [ - "suffisamment", - -13.27192211151123 - ], - [ - "▁Weihnachten", - -13.27192211151123 - ], - [ - "▁Detective", - -13.27194595336914 - ], - [ - "▁lovit", - -13.272049903869629 - ], - [ - "▁blouse", - -13.27213191986084 - ], - [ - "▁hartie", - -13.272163391113281 - ], - [ - "vro", - -13.27225112915039 - ], - [ - "▁disastrous", - -13.272517204284668 - ], - [ - "vermutlich", - -13.2725191116333 - ], - [ - "▁Stafford", - -13.272527694702148 - ], - [ - "ehlt", - -13.272628784179688 - ], - [ - "▁vielseitig", - -13.272643089294434 - ], - [ - "Manifest", - -13.273274421691895 - ], - [ - "homage", - -13.27354907989502 - ], - [ - "menée", - -13.273566246032715 - ], - [ - "▁erläuter", - -13.27370834350586 - ], - [ - "▁volontaire", - -13.273709297180176 - ], - [ - "wrought", - -13.27371597290039 - ], - [ - "▁Naples", - -13.273719787597656 - ], - [ - "recommending", - -13.273759841918945 - ], - [ - "▁thermique", - -13.273774147033691 - ], - [ - "▁subtitle", - -13.273787498474121 - ], - [ - "▁Slam", - -13.273809432983398 - ], - [ - "▁necesitate", - -13.273809432983398 - ], - [ - "trimmed", - -13.274099349975586 - ], - [ - "urmatoarele", - -13.274178504943848 - ], - [ - "▁Sorin", - -13.274245262145996 - ], - [ - "▁compromis", - -13.274300575256348 - ], - [ - "overcoming", - -13.274477005004883 - ], - [ - "▁Samantha", - -13.274901390075684 - ], - [ - "dazzling", - -13.27490234375 - ], - [ - "▁Pearson", - -13.274903297424316 - ], - [ - "▁glazing", - -13.274911880493164 - ], - [ - "Revelation", - -13.274921417236328 - ], - [ - "destinée", - -13.275156021118164 - ], - [ - "öffnet", - -13.27515983581543 - ], - [ - "CERT", - -13.275327682495117 - ], - [ - "▁Sneak", - -13.275503158569336 - ], - [ - "proiectele", - -13.275605201721191 - ], - [ - "▁longitudinal", - -13.27609634399414 - ], - [ - "▁cocaine", - -13.276098251342773 - ], - [ - "▁universitar", - -13.276108741760254 - ], - [ - "▁refreshments", - -13.276166915893555 - ], - [ - "▁instanţ", - -13.276243209838867 - ], - [ - "▁kostenfrei", - -13.276397705078125 - ], - [ - "▁comédie", - -13.276451110839844 - ], - [ - "▁Locat", - -13.276725769042969 - ], - [ - "▁Albania", - -13.276732444763184 - ], - [ - "▁mécanique", - -13.276776313781738 - ], - [ - "messung", - -13.27683162689209 - ], - [ - "issus", - -13.277260780334473 - ], - [ - "pinned", - -13.277328491210938 - ], - [ - "▁sanft", - -13.277335166931152 - ], - [ - "▁geprüft", - -13.277435302734375 - ], - [ - "▁procè", - -13.277442932128906 - ], - [ - "▁Üb", - -13.277765274047852 - ], - [ - "5-0", - -13.277802467346191 - ], - [ - "▁Catering", - -13.277957916259766 - ], - [ - "▁prosperous", - -13.27801513671875 - ], - [ - "▁replication", - -13.278098106384277 - ], - [ - "▁obese", - -13.278441429138184 - ], - [ - "clerosis", - -13.278489112854004 - ], - [ - "▁Carnegie", - -13.278489112854004 - ], - [ - "▁Incredible", - -13.278489112854004 - ], - [ - "▁Teppich", - -13.278489112854004 - ], - [ - "▁crunchy", - -13.278489112854004 - ], - [ - "▁vomiting", - -13.278529167175293 - ], - [ - "▁sourire", - -13.278619766235352 - ], - [ - "publish", - -13.278948783874512 - ], - [ - "▁exterioar", - -13.279094696044922 - ], - [ - "▁forehead", - -13.279107093811035 - ], - [ - "▁climatique", - -13.279313087463379 - ], - [ - "▁conservator", - -13.279458999633789 - ], - [ - "▁Russland", - -13.279687881469727 - ], - [ - "▁kombiniert", - -13.279687881469727 - ], - [ - "▁Thrones", - -13.279688835144043 - ], - [ - "▁Griffith", - -13.27968978881836 - ], - [ - "▁fragrant", - -13.279695510864258 - ], - [ - "▁RSVP", - -13.279698371887207 - ], - [ - "klima", - -13.279751777648926 - ], - [ - "▁situație", - -13.279808044433594 - ], - [ - "deschiderea", - -13.280009269714355 - ], - [ - "▁moale", - -13.280033111572266 - ], - [ - "▁Trevor", - -13.280112266540527 - ], - [ - "ménager", - -13.28011417388916 - ], - [ - "deploying", - -13.280428886413574 - ], - [ - "▁Loft", - -13.280500411987305 - ], - [ - "▁Willkommen", - -13.28059196472168 - ], - [ - "▁Bezirks", - -13.280887603759766 - ], - [ - "▁Himself", - -13.280975341796875 - ], - [ - "▁quarant", - -13.28101634979248 - ], - [ - "▁1901", - -13.281079292297363 - ], - [ - "▁tripod", - -13.28136920928955 - ], - [ - "▁récolt", - -13.281553268432617 - ], - [ - "natură", - -13.281631469726562 - ], - [ - "School", - -13.281649589538574 - ], - [ - "contested", - -13.281773567199707 - ], - [ - "bwohl", - -13.281784057617188 - ], - [ - "Darren", - -13.281830787658691 - ], - [ - "medicine", - -13.281903266906738 - ], - [ - "▁Impuls", - -13.282041549682617 - ], - [ - "prevailing", - -13.282057762145996 - ], - [ - "▁orthodontic", - -13.282089233398438 - ], - [ - "▁sequential", - -13.282089233398438 - ], - [ - "▁Kolkata", - -13.28209114074707 - ], - [ - "▁séch", - -13.282100677490234 - ], - [ - "▁diaper", - -13.28212833404541 - ], - [ - "▁simplifie", - -13.282144546508789 - ], - [ - "▁reflux", - -13.282163619995117 - ], - [ - "▁Hypo", - -13.282242774963379 - ], - [ - "imprimer", - -13.282251358032227 - ], - [ - "▁Folosi", - -13.282401084899902 - ], - [ - "Info", - -13.282570838928223 - ], - [ - "▁Investiga", - -13.282801628112793 - ], - [ - "stabilirea", - -13.282845497131348 - ], - [ - "élis", - -13.283149719238281 - ], - [ - "ccessed", - -13.28320026397705 - ], - [ - "▁recyclable", - -13.283293724060059 - ], - [ - "▁forbidden", - -13.283295631408691 - ], - [ - "▁Colonel", - -13.283297538757324 - ], - [ - "▁nisip", - -13.28330135345459 - ], - [ - "▁Fundamental", - -13.283303260803223 - ], - [ - "▁nouveauté", - -13.283308029174805 - ], - [ - "khi", - -13.283357620239258 - ], - [ - "▁ecology", - -13.28339672088623 - ], - [ - "▁filament", - -13.283540725708008 - ], - [ - "▁relentless", - -13.283559799194336 - ], - [ - "▁Behavior", - -13.283669471740723 - ], - [ - "titulaire", - -13.283900260925293 - ], - [ - "▁administrativ", - -13.28404426574707 - ], - [ - "▁Vorlage", - -13.284209251403809 - ], - [ - "zeigte", - -13.28427791595459 - ], - [ - "▁Bäume", - -13.284497261047363 - ], - [ - "▁Kartoffel", - -13.284497261047363 - ], - [ - "▁Possible", - -13.284500122070312 - ], - [ - "▁perturb", - -13.28466510772705 - ], - [ - "▁Grigor", - -13.284717559814453 - ], - [ - "▁streng", - -13.284759521484375 - ], - [ - "▁vânzare", - -13.285101890563965 - ], - [ - "concentrating", - -13.285698890686035 - ], - [ - "▁rechtzeitig", - -13.2857027053833 - ], - [ - "▁eternity", - -13.28570556640625 - ], - [ - "▁Puzzle", - -13.28575611114502 - ], - [ - "▁malade", - -13.285775184631348 - ], - [ - "▁Metallic", - -13.285776138305664 - ], - [ - "▁Unterhaltung", - -13.285783767700195 - ], - [ - "▁4:00", - -13.285820960998535 - ], - [ - "▁magique", - -13.285908699035645 - ], - [ - "▁cellphone", - -13.285975456237793 - ], - [ - "▁inhibition", - -13.286023139953613 - ], - [ - "▁remplacement", - -13.286025047302246 - ], - [ - "▁WWII", - -13.286089897155762 - ], - [ - "Eff", - -13.286258697509766 - ], - [ - "kontakt", - -13.286832809448242 - ], - [ - "Update", - -13.286869049072266 - ], - [ - "▁Emerald", - -13.286910057067871 - ], - [ - "▁hammock", - -13.286910057067871 - ], - [ - "POWER", - -13.286917686462402 - ], - [ - "automne", - -13.286917686462402 - ], - [ - "▁(2004)", - -13.286961555480957 - ], - [ - "▁participanți", - -13.287012100219727 - ], - [ - "1998)", - -13.287014961242676 - ], - [ - "▁deletion", - -13.287186622619629 - ], - [ - "▁Proiect", - -13.287226676940918 - ], - [ - "IDENT", - -13.287504196166992 - ], - [ - "▁precis", - -13.287623405456543 - ], - [ - "▁limp", - -13.287676811218262 - ], - [ - "▁Pompe", - -13.287686347961426 - ], - [ - "▁ménage", - -13.28780746459961 - ], - [ - "▁Wahrheit", - -13.288119316101074 - ], - [ - "▁Intelligent", - -13.28812026977539 - ], - [ - "▁instability", - -13.2881441116333 - ], - [ - "insurance", - -13.288346290588379 - ], - [ - "▁Nursery", - -13.288352966308594 - ], - [ - "▁synonym", - -13.288427352905273 - ], - [ - "▁ignite", - -13.28848934173584 - ], - [ - "▁Vernon", - -13.28849983215332 - ], - [ - "purchase", - -13.288524627685547 - ], - [ - "▁disponibilité", - -13.288662910461426 - ], - [ - "▁producţi", - -13.28909969329834 - ], - [ - "▁Pentagon", - -13.289329528808594 - ], - [ - "▁illumination", - -13.289329528808594 - ], - [ - "▁obsolete", - -13.289329528808594 - ], - [ - "▁unacceptable", - -13.28933048248291 - ], - [ - "Gleichzeitig", - -13.289938926696777 - ], - [ - "rutsch", - -13.290071487426758 - ], - [ - "viziuni", - -13.290409088134766 - ], - [ - "▁Nicaragua", - -13.29054069519043 - ], - [ - "▁hesitation", - -13.290541648864746 - ], - [ - "▁nascut", - -13.290545463562012 - ], - [ - "▁Warehouse", - -13.29055404663086 - ], - [ - "geboten", - -13.290558815002441 - ], - [ - "▁Lagos", - -13.290844917297363 - ], - [ - "produced", - -13.290874481201172 - ], - [ - "cativa", - -13.291309356689453 - ], - [ - "▁Tracy", - -13.291326522827148 - ], - [ - "Projekt", - -13.291468620300293 - ], - [ - "▁malaria", - -13.291692733764648 - ], - [ - "▁Baldwin", - -13.291755676269531 - ], - [ - "Take", - -13.291791915893555 - ], - [ - "▁fluctuations", - -13.291844367980957 - ], - [ - "▁titular", - -13.29194450378418 - ], - [ - "bmw", - -13.291976928710938 - ], - [ - "▁brevet", - -13.29202651977539 - ], - [ - "étapes", - -13.292173385620117 - ], - [ - "wikipedia", - -13.292373657226562 - ], - [ - "▁corporal", - -13.292424201965332 - ], - [ - "▁Schönheit", - -13.2926664352417 - ], - [ - "utilizatorii", - -13.292695999145508 - ], - [ - "INFO", - -13.292807579040527 - ], - [ - "▁formularul", - -13.292900085449219 - ], - [ - "femi", - -13.292959213256836 - ], - [ - "Konferenz", - -13.29296875 - ], - [ - "▁carnival", - -13.29296875 - ], - [ - "▁Kräuter", - -13.292969703674316 - ], - [ - "▁gelernt", - -13.292981147766113 - ], - [ - "▁Sherman", - -13.293017387390137 - ], - [ - "▁persistence", - -13.293289184570312 - ], - [ - "▁Behörden", - -13.293577194213867 - ], - [ - "▁Frühjahr", - -13.293578147888184 - ], - [ - "▁Guvern", - -13.293649673461914 - ], - [ - "interpreting", - -13.293878555297852 - ], - [ - "▁nommé", - -13.294021606445312 - ], - [ - "consult", - -13.294035911560059 - ], - [ - "▁obligaţi", - -13.294184684753418 - ], - [ - "▁Newspaper", - -13.2942476272583 - ], - [ - "(2005)", - -13.294515609741211 - ], - [ - "pumped", - -13.294614791870117 - ], - [ - "▁autoritati", - -13.294634819030762 - ], - [ - "▁aplicatii", - -13.294644355773926 - ], - [ - "▁verhindert", - -13.294794082641602 - ], - [ - "▁évident", - -13.294794082641602 - ], - [ - "▁getrennt", - -13.294795036315918 - ], - [ - "▁Encourage", - -13.295403480529785 - ], - [ - "▁lurk", - -13.295432090759277 - ], - [ - "▁condemned", - -13.295455932617188 - ], - [ - "▁4:30", - -13.295502662658691 - ], - [ - "labelled", - -13.29576587677002 - ], - [ - "ordinea", - -13.295899391174316 - ], - [ - "▁pantofi", - -13.296012878417969 - ], - [ - "Default", - -13.296042442321777 - ], - [ - "▁beruh", - -13.296120643615723 - ], - [ - "/01/", - -13.296268463134766 - ], - [ - "league", - -13.296503067016602 - ], - [ - "▁couvert", - -13.296524047851562 - ], - [ - "▁competencies", - -13.296622276306152 - ], - [ - "▁mozzarella", - -13.296622276306152 - ], - [ - "jihad", - -13.29662799835205 - ], - [ - "▁gossip", - -13.29662799835205 - ], - [ - "▁Omaha", - -13.296628952026367 - ], - [ - "▁coincidence", - -13.296669960021973 - ], - [ - "▁Pinot", - -13.296710968017578 - ], - [ - "dotted", - -13.296789169311523 - ], - [ - "schilder", - -13.297197341918945 - ], - [ - "▁Munte", - -13.297224998474121 - ], - [ - "▁Vermieter", - -13.297232627868652 - ], - [ - "▁britannique", - -13.297232627868652 - ], - [ - "▁comentariu", - -13.297235488891602 - ], - [ - "abonnement", - -13.29725456237793 - ], - [ - "▁inventive", - -13.29727840423584 - ], - [ - "complie", - -13.297279357910156 - ], - [ - "composée", - -13.29734992980957 - ], - [ - "▁glatt", - -13.297684669494629 - ], - [ - "adorned", - -13.297842979431152 - ], - [ - "▁Opportunities", - -13.297842979431152 - ], - [ - "▁equilibrium", - -13.297842979431152 - ], - [ - "▁persuasive", - -13.297842979431152 - ], - [ - "▁achiziţi", - -13.297843933105469 - ], - [ - "▁déterminer", - -13.297843933105469 - ], - [ - "▁fleece", - -13.297857284545898 - ], - [ - "▁ivory", - -13.29786205291748 - ], - [ - "▁Genuss", - -13.297900199890137 - ], - [ - "Thousands", - -13.297930717468262 - ], - [ - "▁izolat", - -13.297965049743652 - ], - [ - "▁symbolize", - -13.298033714294434 - ], - [ - "gâteau", - -13.298051834106445 - ], - [ - "▁relații", - -13.298062324523926 - ], - [ - "▁Classroom", - -13.298144340515137 - ], - [ - "settlers", - -13.298155784606934 - ], - [ - "▁vremuri", - -13.298195838928223 - ], - [ - "▁Serial", - -13.29838752746582 - ], - [ - "▁boite", - -13.298399925231934 - ], - [ - "équivalent", - -13.298453330993652 - ], - [ - "▁benutzen", - -13.298454284667969 - ], - [ - "▁Recomand", - -13.298462867736816 - ], - [ - "▁Sinai", - -13.298968315124512 - ], - [ - "▁Advertise", - -13.29906940460205 - ], - [ - "▁Thermal", - -13.299206733703613 - ], - [ - "fiance", - -13.299471855163574 - ], - [ - "▁universitaire", - -13.299683570861816 - ], - [ - "▁rivière", - -13.299793243408203 - ], - [ - "▁reimburse", - -13.299907684326172 - ], - [ - "ţara", - -13.299932479858398 - ], - [ - "tician", - -13.30002498626709 - ], - [ - "intelligence", - -13.300041198730469 - ], - [ - "▁abgestimmt", - -13.300288200378418 - ], - [ - "▁compliqué", - -13.300288200378418 - ], - [ - "▁succulent", - -13.300297737121582 - ], - [ - "opéra", - -13.300395011901855 - ], - [ - "7-9", - -13.300456047058105 - ], - [ - "▁pierderi", - -13.300654411315918 - ], - [ - "extinction", - -13.30090045928955 - ], - [ - "▁Zweifel", - -13.30103874206543 - ], - [ - "ATCH", - -13.30112361907959 - ], - [ - "10,000", - -13.301222801208496 - ], - [ - "▁uninterrupted", - -13.301513671875 - ], - [ - "▁Eigentum", - -13.301517486572266 - ], - [ - "▁Utility", - -13.301517486572266 - ], - [ - "ско", - -13.301529884338379 - ], - [ - "▁tornado", - -13.301544189453125 - ], - [ - "▁Güte", - -13.301727294921875 - ], - [ - "▁pertain", - -13.301923751831055 - ], - [ - "painters", - -13.301993370056152 - ], - [ - "Help", - -13.3021240234375 - ], - [ - "▁străinătate", - -13.30212688446045 - ], - [ - "▁stammen", - -13.302170753479004 - ], - [ - "opposition", - -13.302229881286621 - ], - [ - "▁rhino", - -13.302233695983887 - ], - [ - "intervenir", - -13.302427291870117 - ], - [ - "▁hyperlink", - -13.302441596984863 - ], - [ - "höchst", - -13.302518844604492 - ], - [ - "roach", - -13.302627563476562 - ], - [ - "wSt", - -13.302687644958496 - ], - [ - "▁monastery", - -13.302740097045898 - ], - [ - "▁algae", - -13.302754402160645 - ], - [ - "▁shaving", - -13.302757263183594 - ], - [ - "présentent", - -13.302804946899414 - ], - [ - "Africa", - -13.302860260009766 - ], - [ - "eigener", - -13.303047180175781 - ], - [ - "▁glace", - -13.303153991699219 - ], - [ - "▁discurs", - -13.303179740905762 - ], - [ - "▁autograph", - -13.303204536437988 - ], - [ - "▁Conflict", - -13.303359031677246 - ], - [ - "▁școli", - -13.303411483764648 - ], - [ - "▁excerpt", - -13.303617477416992 - ], - [ - "correlated", - -13.303628921508789 - ], - [ - "empel", - -13.303841590881348 - ], - [ - "cryptocurrencies", - -13.30396842956543 - ], - [ - "▁symposium", - -13.30396842956543 - ], - [ - "▁gewohnt", - -13.303994178771973 - ], - [ - "PTSD", - -13.304070472717285 - ], - [ - "▁harmonic", - -13.304166793823242 - ], - [ - "discarded", - -13.304282188415527 - ], - [ - "▁Flint", - -13.304359436035156 - ], - [ - "Russia", - -13.304422378540039 - ], - [ - "▁ședinț", - -13.304583549499512 - ], - [ - "▁accusations", - -13.304727554321289 - ], - [ - "▁încălc", - -13.304827690124512 - ], - [ - "sendung", - -13.305152893066406 - ], - [ - "▁Chiropractic", - -13.305197715759277 - ], - [ - "▁excepți", - -13.305201530456543 - ], - [ - "▁proclaim", - -13.305201530456543 - ], - [ - "▁Flexible", - -13.305295944213867 - ], - [ - "▁Hüt", - -13.30538272857666 - ], - [ - "▁Baltic", - -13.30539608001709 - ], - [ - "▁inaltime", - -13.30553913116455 - ], - [ - "▁montré", - -13.305868148803711 - ], - [ - "exécution", - -13.305898666381836 - ], - [ - "partei", - -13.305961608886719 - ], - [ - "▁specifie", - -13.306072235107422 - ], - [ - "▁Jackpot", - -13.306105613708496 - ], - [ - "▁stumble", - -13.306134223937988 - ], - [ - "▁individuel", - -13.306161880493164 - ], - [ - "▁Veteran", - -13.306217193603516 - ], - [ - "▁Supplies", - -13.306428909301758 - ], - [ - "▁excavation", - -13.306428909301758 - ], - [ - "▁Libraries", - -13.306469917297363 - ], - [ - "▁prénom", - -13.306476593017578 - ], - [ - "WOOD", - -13.30650806427002 - ], - [ - "meciul", - -13.306917190551758 - ], - [ - "Chef", - -13.306938171386719 - ], - [ - "▁SUPER", - -13.306940078735352 - ], - [ - "Appeals", - -13.30696964263916 - ], - [ - "terapia", - -13.307113647460938 - ], - [ - "▁relatii", - -13.30713939666748 - ], - [ - "modifying", - -13.30748462677002 - ], - [ - "▁Regulament", - -13.307662010192871 - ], - [ - "▁bănci", - -13.307662963867188 - ], - [ - "▁agility", - -13.307666778564453 - ], - [ - "▁Magnetic", - -13.307674407958984 - ], - [ - "▁piatra", - -13.30767822265625 - ], - [ - "▁Governance", - -13.307680130004883 - ], - [ - "▁clown", - -13.30772876739502 - ], - [ - "▁Choir", - -13.308337211608887 - ], - [ - "aujourd", - -13.308548927307129 - ], - [ - "▁vendeur", - -13.308732032775879 - ], - [ - "ndererseits", - -13.308859825134277 - ], - [ - "▁Bahrain", - -13.3088960647583 - ], - [ - "▁Timisoara", - -13.3088960647583 - ], - [ - "▁exklusive", - -13.3088960647583 - ], - [ - "▁Population", - -13.309001922607422 - ], - [ - "▁nepo", - -13.309073448181152 - ], - [ - "▁relish", - -13.309085845947266 - ], - [ - "▁Pumpkin", - -13.309571266174316 - ], - [ - "▁détente", - -13.309784889221191 - ], - [ - "▁episcop", - -13.309860229492188 - ], - [ - "patterned", - -13.309929847717285 - ], - [ - "▁THANK", - -13.310132026672363 - ], - [ - "▁Widerspruch", - -13.310132026672363 - ], - [ - "▁Crisis", - -13.310189247131348 - ], - [ - "▁goose", - -13.310226440429688 - ], - [ - "▁couture", - -13.310307502746582 - ], - [ - "▁hinweg", - -13.310446739196777 - ], - [ - "supplemental", - -13.310486793518066 - ], - [ - "shingles", - -13.31060791015625 - ], - [ - "investir", - -13.310635566711426 - ], - [ - "▁steriliz", - -13.310759544372559 - ], - [ - "tractors", - -13.310761451721191 - ], - [ - "cellules", - -13.31078815460205 - ], - [ - "▁Gloria", - -13.310888290405273 - ], - [ - "▁teilnehmen", - -13.311092376708984 - ], - [ - "companiile", - -13.311248779296875 - ], - [ - "surfacing", - -13.311279296875 - ], - [ - "▁nostalgic", - -13.311368942260742 - ], - [ - "▁Badezimmer", - -13.311369895935059 - ], - [ - "▁conjoint", - -13.311370849609375 - ], - [ - "vacancy", - -13.31145191192627 - ], - [ - "▁homeland", - -13.311582565307617 - ], - [ - "▁Abschnitt", - -13.311625480651855 - ], - [ - "Cartea", - -13.311653137207031 - ], - [ - "SIA", - -13.311782836914062 - ], - [ - "▁explode", - -13.311786651611328 - ], - [ - "fostering", - -13.311959266662598 - ], - [ - "▁ceilalti", - -13.31198787689209 - ], - [ - "▁gentil", - -13.31214714050293 - ], - [ - "oplasty", - -13.31218433380127 - ], - [ - "bodied", - -13.312424659729004 - ], - [ - "▁1906", - -13.312499046325684 - ], - [ - "▁BlackBerry", - -13.312607765197754 - ], - [ - "▁Presbyterian", - -13.312607765197754 - ], - [ - "▁berücksichtigt", - -13.312607765197754 - ], - [ - "▁compartiment", - -13.312607765197754 - ], - [ - "▁compulsory", - -13.312607765197754 - ], - [ - "Millennial", - -13.312609672546387 - ], - [ - "▁sanitar", - -13.312638282775879 - ], - [ - "▁stink", - -13.312975883483887 - ], - [ - "lius", - -13.313047409057617 - ], - [ - "thankfully", - -13.313136100769043 - ], - [ - "modalité", - -13.313173294067383 - ], - [ - "▁cunoaște", - -13.313226699829102 - ], - [ - "Infrastruktur", - -13.313227653503418 - ], - [ - "▁studenți", - -13.313253402709961 - ], - [ - "Bref", - -13.313270568847656 - ], - [ - "London", - -13.31360149383545 - ], - [ - "▁Arduino", - -13.313847541809082 - ], - [ - "▁cilantro", - -13.313847541809082 - ], - [ - "▁Rafael", - -13.313848495483398 - ], - [ - "▁untersucht", - -13.313861846923828 - ], - [ - "▁martyr", - -13.31389331817627 - ], - [ - "▁Mormon", - -13.313984870910645 - ], - [ - "▁wicket", - -13.313996315002441 - ], - [ - "cherished", - -13.314335823059082 - ], - [ - "liquid", - -13.314417839050293 - ], - [ - "▁dorinț", - -13.314571380615234 - ], - [ - "lehnt", - -13.314717292785645 - ], - [ - "meisterschaft", - -13.31493091583252 - ], - [ - "fondateur", - -13.314971923828125 - ], - [ - "câble", - -13.315078735351562 - ], - [ - "▁erreichbar", - -13.315091133117676 - ], - [ - "▁footsteps", - -13.315094947814941 - ], - [ - "▁Kloster", - -13.31519889831543 - ], - [ - "▁multiplayer", - -13.315218925476074 - ], - [ - "▁substitu", - -13.315276145935059 - ], - [ - "▁Frisch", - -13.315526962280273 - ], - [ - "▁arsenal", - -13.315712928771973 - ], - [ - "explication", - -13.315866470336914 - ], - [ - "▁conexiun", - -13.315986633300781 - ], - [ - "muddy", - -13.316045761108398 - ], - [ - "▁Reifen", - -13.316120147705078 - ], - [ - "auraient", - -13.316132545471191 - ], - [ - "▁biologic", - -13.316136360168457 - ], - [ - "▁acquainted", - -13.316332817077637 - ], - [ - "▁shelving", - -13.316341400146484 - ], - [ - "Stunning", - -13.316373825073242 - ], - [ - "▁Clothing", - -13.316394805908203 - ], - [ - "▁kidding", - -13.316431999206543 - ], - [ - "excellent", - -13.316452026367188 - ], - [ - "▁susțin", - -13.316487312316895 - ], - [ - "bătut", - -13.316502571105957 - ], - [ - "elusive", - -13.3165283203125 - ], - [ - "werbung", - -13.316743850708008 - ], - [ - "slipping", - -13.316813468933105 - ], - [ - "▁configura", - -13.316926956176758 - ], - [ - "▁proaspat", - -13.31695556640625 - ], - [ - "▁apporté", - -13.317120552062988 - ], - [ - "▁démarr", - -13.317328453063965 - ], - [ - "Spezialist", - -13.317578315734863 - ], - [ - "▁obligați", - -13.317578315734863 - ], - [ - "▁societăți", - -13.317578315734863 - ], - [ - "▁malpractice", - -13.31757926940918 - ], - [ - "Hundreds", - -13.317609786987305 - ], - [ - "▁3:1", - -13.318138122558594 - ], - [ - "▁computation", - -13.31817626953125 - ], - [ - "▁Heilig", - -13.318528175354004 - ], - [ - "▁Helsinki", - -13.318824768066406 - ], - [ - "▁firefighters", - -13.318824768066406 - ], - [ - "▁obedience", - -13.318824768066406 - ], - [ - "▁evacuate", - -13.318825721740723 - ], - [ - "▁Floyd", - -13.318840026855469 - ], - [ - "▁Disneyland", - -13.318859100341797 - ], - [ - "Cathy", - -13.319069862365723 - ], - [ - "▁Broken", - -13.319278717041016 - ], - [ - "cript", - -13.319952011108398 - ], - [ - "▁Gewähr", - -13.320073127746582 - ], - [ - "▁embarrassed", - -13.320073127746582 - ], - [ - "▁Leicht", - -13.32007884979248 - ], - [ - "▁témoign", - -13.320379257202148 - ], - [ - "▁viteze", - -13.3206148147583 - ], - [ - "▁hallmark", - -13.320731163024902 - ], - [ - "uploads", - -13.32082462310791 - ], - [ - "▁Submission", - -13.320929527282715 - ], - [ - "▁croissant", - -13.321049690246582 - ], - [ - "awning", - -13.32105827331543 - ], - [ - "detecting", - -13.321198463439941 - ], - [ - "▁Bahamas", - -13.321322441101074 - ], - [ - "▁Kathleen", - -13.321325302124023 - ], - [ - "▁latch", - -13.321377754211426 - ], - [ - "▁pronounce", - -13.321380615234375 - ], - [ - "▁choke", - -13.321428298950195 - ], - [ - "▁$50,000", - -13.3215970993042 - ], - [ - "▁historische", - -13.321642875671387 - ], - [ - "jugé", - -13.321829795837402 - ], - [ - "▁MasterCard", - -13.321949005126953 - ], - [ - "▁Horror", - -13.321955680847168 - ], - [ - "spoiled", - -13.321958541870117 - ], - [ - "▁apariți", - -13.32202434539795 - ], - [ - "geschaltet", - -13.3225736618042 - ], - [ - "▁Londra", - -13.322578430175781 - ], - [ - "viction", - -13.322580337524414 - ], - [ - "▁Disaster", - -13.322593688964844 - ], - [ - "▁desigur", - -13.322601318359375 - ], - [ - "▁substanț", - -13.322601318359375 - ], - [ - "▁compiler", - -13.322613716125488 - ], - [ - "▁vanzari", - -13.32262897491455 - ], - [ - "▁Simulation", - -13.322669982910156 - ], - [ - "Occasionally", - -13.322842597961426 - ], - [ - "Seite", - -13.322884559631348 - ], - [ - "Linked", - -13.322938919067383 - ], - [ - "Roll", - -13.323015213012695 - ], - [ - "▁trajet", - -13.323244094848633 - ], - [ - "Molecular", - -13.323834419250488 - ], - [ - "▁pragmatic", - -13.323843002319336 - ], - [ - "judecată", - -13.323915481567383 - ], - [ - "ров", - -13.32400894165039 - ], - [ - "serrurerie", - -13.324024200439453 - ], - [ - "▁reconstruct", - -13.324129104614258 - ], - [ - "▁heureuse", - -13.324179649353027 - ], - [ - "▁knight", - -13.32422924041748 - ], - [ - "knowingly", - -13.324431419372559 - ], - [ - "▁perspectiva", - -13.324453353881836 - ], - [ - "ordinary", - -13.324604034423828 - ], - [ - "▁chaudière", - -13.324721336364746 - ], - [ - "Neill", - -13.324727058410645 - ], - [ - "cellulose", - -13.325080871582031 - ], - [ - "▁Delicious", - -13.325080871582031 - ], - [ - "▁incearca", - -13.325080871582031 - ], - [ - "▁retrospective", - -13.325080871582031 - ], - [ - "▁mundane", - -13.325081825256348 - ], - [ - "▁definiert", - -13.32508659362793 - ], - [ - "▁cockpit", - -13.325088500976562 - ], - [ - "Aktionen", - -13.325363159179688 - ], - [ - "▁distanț", - -13.325654029846191 - ], - [ - "▁diplôme", - -13.325708389282227 - ], - [ - "prepaid", - -13.325737953186035 - ], - [ - "▁Tabellen", - -13.325758934020996 - ], - [ - "▁economie", - -13.325770378112793 - ], - [ - "December", - -13.325826644897461 - ], - [ - "Punkten", - -13.32613754272461 - ], - [ - "▁Punch", - -13.32614517211914 - ], - [ - "Martin", - -13.326154708862305 - ], - [ - "▁Espresso", - -13.326314926147461 - ], - [ - "▁ubiquitous", - -13.326335906982422 - ], - [ - "▁Mongolia", - -13.326337814331055 - ], - [ - "▁collabor", - -13.326635360717773 - ], - [ - "▁Vordergrund", - -13.32696533203125 - ], - [ - "cameră", - -13.327091217041016 - ], - [ - "represented", - -13.327268600463867 - ], - [ - "▁AUTO", - -13.327446937561035 - ], - [ - "▁Ofert", - -13.327542304992676 - ], - [ - "neig", - -13.327593803405762 - ], - [ - "▁Hazard", - -13.327595710754395 - ], - [ - "▁Constanta", - -13.327596664428711 - ], - [ - "▁tumour", - -13.32759952545166 - ], - [ - "▁Neighborhood", - -13.327603340148926 - ], - [ - "▁detaliat", - -13.327619552612305 - ], - [ - "▁extraordinaire", - -13.327665328979492 - ], - [ - "▁Therapeutic", - -13.327686309814453 - ], - [ - "predicting", - -13.327693939208984 - ], - [ - "▁institutii", - -13.32776165008545 - ], - [ - "ifizierung", - -13.327797889709473 - ], - [ - "wählt", - -13.328207015991211 - ], - [ - "▁remarquable", - -13.32822322845459 - ], - [ - "Invent", - -13.328512191772461 - ], - [ - "▁foloseșt", - -13.328514099121094 - ], - [ - "öfte", - -13.328703880310059 - ], - [ - "▁discreet", - -13.328853607177734 - ], - [ - "▁Flickr", - -13.32885456085205 - ], - [ - "▁trésor", - -13.328856468200684 - ], - [ - "▁steroids", - -13.328872680664062 - ], - [ - "▁personnalité", - -13.328953742980957 - ], - [ - "▁Krankenhaus", - -13.32901668548584 - ], - [ - "▁affordability", - -13.329218864440918 - ], - [ - "deuten", - -13.329398155212402 - ], - [ - "Detailed", - -13.329412460327148 - ], - [ - "Walk", - -13.329444885253906 - ], - [ - "▁parallèle", - -13.329483032226562 - ], - [ - "thèse", - -13.329649925231934 - ], - [ - "▁gefördert", - -13.330117225646973 - ], - [ - "Greeting", - -13.33014965057373 - ], - [ - "gelistet", - -13.330172538757324 - ], - [ - "▁chlorine", - -13.330392837524414 - ], - [ - "behält", - -13.33039665222168 - ], - [ - "emption", - -13.330435752868652 - ], - [ - "▁mobilité", - -13.330601692199707 - ], - [ - "▁randonnée", - -13.330668449401855 - ], - [ - "habitant", - -13.330718040466309 - ], - [ - "zilla", - -13.331082344055176 - ], - [ - "▁Lili", - -13.331160545349121 - ], - [ - "▁répét", - -13.331341743469238 - ], - [ - "trucât", - -13.331376075744629 - ], - [ - "▁Hospice", - -13.331376075744629 - ], - [ - "▁grassroots", - -13.331377029418945 - ], - [ - "▁affiché", - -13.331393241882324 - ], - [ - "pears", - -13.331470489501953 - ], - [ - "▁linistit", - -13.331497192382812 - ], - [ - "▁Patron", - -13.331552505493164 - ], - [ - "▁Stalin", - -13.331626892089844 - ], - [ - "▁închiri", - -13.331751823425293 - ], - [ - "▁Apostol", - -13.332018852233887 - ], - [ - "▁poudre", - -13.332246780395508 - ], - [ - "▁piscin", - -13.332419395446777 - ], - [ - "merlin", - -13.33259391784668 - ], - [ - "limited", - -13.33260726928711 - ], - [ - "▁métallique", - -13.332639694213867 - ], - [ - "gazebo", - -13.33267879486084 - ], - [ - "weilige", - -13.332718849182129 - ], - [ - "prosecutors", - -13.33278751373291 - ], - [ - "Expert", - -13.33314323425293 - ], - [ - "Assemblée", - -13.333271980285645 - ], - [ - "▁fauna", - -13.333285331726074 - ], - [ - "▁Turtle", - -13.333353996276855 - ], - [ - "▁Consortium", - -13.333905220031738 - ], - [ - "▁assemblies", - -13.333905220031738 - ], - [ - "▁trajectory", - -13.333905220031738 - ], - [ - "▁Vineyard", - -13.333906173706055 - ], - [ - "▁Mehrwert", - -13.334037780761719 - ], - [ - "▁sunflower", - -13.334043502807617 - ], - [ - "develop", - -13.334060668945312 - ], - [ - "▁heroic", - -13.334100723266602 - ], - [ - "▁riscuri", - -13.334151268005371 - ], - [ - "oeuf", - -13.334300994873047 - ], - [ - "influence", - -13.334452629089355 - ], - [ - "▁Voraussetzung", - -13.334500312805176 - ], - [ - "utoritatea", - -13.334518432617188 - ], - [ - "Produsul", - -13.334654808044434 - ], - [ - "▁gewährleistet", - -13.335171699523926 - ], - [ - "▁brûl", - -13.335175514221191 - ], - [ - "▁Column", - -13.335184097290039 - ], - [ - "▁trousers", - -13.335209846496582 - ], - [ - "▁posterior", - -13.33521556854248 - ], - [ - "glyph", - -13.335251808166504 - ], - [ - "▁Happen", - -13.335280418395996 - ], - [ - "▁créateur", - -13.335667610168457 - ], - [ - "▁apostle", - -13.335898399353027 - ], - [ - "▁padding", - -13.335907936096191 - ], - [ - "▁Digitalisierung", - -13.335908889770508 - ], - [ - "▁Laurie", - -13.335915565490723 - ], - [ - "▁Erwerb", - -13.336065292358398 - ], - [ - "▁bătrân", - -13.336440086364746 - ], - [ - "▁harmonious", - -13.336441040039062 - ], - [ - "▁ailments", - -13.336456298828125 - ], - [ - "▁Venue", - -13.33650016784668 - ], - [ - "▁Motorcycle", - -13.336523056030273 - ], - [ - "▁cortex", - -13.336551666259766 - ], - [ - "▁Sunrise", - -13.336636543273926 - ], - [ - "Software", - -13.336775779724121 - ], - [ - "▁advocat", - -13.336934089660645 - ], - [ - "essentiellement", - -13.337422370910645 - ], - [ - "•", - -13.337494850158691 - ], - [ - "părut", - -13.337522506713867 - ], - [ - "▁Suffolk", - -13.337711334228516 - ], - [ - "▁righteousness", - -13.337711334228516 - ], - [ - "▁Shirley", - -13.337712287902832 - ], - [ - "▁Famous", - -13.337749481201172 - ], - [ - "▁emulate", - -13.337788581848145 - ], - [ - "vermögen", - -13.33788776397705 - ], - [ - "generated", - -13.337963104248047 - ], - [ - "Ecole", - -13.337977409362793 - ], - [ - "▁managerial", - -13.338086128234863 - ], - [ - "believe", - -13.338091850280762 - ], - [ - "▁récupére", - -13.338348388671875 - ], - [ - "▁recens", - -13.338531494140625 - ], - [ - "▁Barrett", - -13.338778495788574 - ], - [ - "▁courageous", - -13.338814735412598 - ], - [ - "9.95", - -13.338961601257324 - ], - [ - "▁Odyssey", - -13.338982582092285 - ], - [ - "▁Violence", - -13.338982582092285 - ], - [ - "▁concasseur", - -13.338982582092285 - ], - [ - "▁evacuation", - -13.338982582092285 - ], - [ - "▁kontinuierlich", - -13.338982582092285 - ], - [ - "▁epidemi", - -13.3389892578125 - ], - [ - "▁disconnected", - -13.339197158813477 - ], - [ - "frucht", - -13.339339256286621 - ], - [ - "Trustees", - -13.339348793029785 - ], - [ - "▁Massiv", - -13.339459419250488 - ], - [ - "gebucht", - -13.339473724365234 - ], - [ - "stütze", - -13.339526176452637 - ], - [ - "▁febr", - -13.339741706848145 - ], - [ - "honoured", - -13.339743614196777 - ], - [ - "▁digitiz", - -13.340079307556152 - ], - [ - "Image", - -13.34021282196045 - ], - [ - "▁Brunswick", - -13.34025764465332 - ], - [ - "▁Therapist", - -13.34026050567627 - ], - [ - "accessoire", - -13.340264320373535 - ], - [ - "▁croqu", - -13.340291023254395 - ], - [ - "Pflanz", - -13.34052848815918 - ], - [ - "dragging", - -13.340536117553711 - ], - [ - "▁Facilit", - -13.340750694274902 - ], - [ - "soucis", - -13.340765953063965 - ], - [ - "Asadar", - -13.34081745147705 - ], - [ - "▁Thames", - -13.341021537780762 - ], - [ - "▁cariera", - -13.341116905212402 - ], - [ - "▁mercury", - -13.341530799865723 - ], - [ - "▁Blessed", - -13.341533660888672 - ], - [ - "▁Whitney", - -13.341630935668945 - ], - [ - "▁géant", - -13.341926574707031 - ], - [ - "▁coordonnée", - -13.342217445373535 - ], - [ - "oidal", - -13.342623710632324 - ], - [ - "Wohnungen", - -13.342696189880371 - ], - [ - "▁Spectrum", - -13.34280776977539 - ], - [ - "▁Avengers", - -13.342808723449707 - ], - [ - "▁Gloucester", - -13.342808723449707 - ], - [ - "▁nützlich", - -13.342811584472656 - ], - [ - "▁toothbrush", - -13.342830657958984 - ], - [ - "▁Vanessa", - -13.342843055725098 - ], - [ - "Saxon", - -13.342947959899902 - ], - [ - "▁comunități", - -13.343165397644043 - ], - [ - "reprezentanţi", - -13.343175888061523 - ], - [ - "▁întâlnire", - -13.343225479125977 - ], - [ - "delve", - -13.343234062194824 - ], - [ - "▁technologique", - -13.343452453613281 - ], - [ - "Describe", - -13.343466758728027 - ], - [ - "▁constient", - -13.343501091003418 - ], - [ - "gestalt", - -13.343600273132324 - ], - [ - "▁Tribune", - -13.344090461730957 - ], - [ - "▁fiberglass", - -13.34412956237793 - ], - [ - "verbindung", - -13.344210624694824 - ], - [ - "sacrificing", - -13.344351768493652 - ], - [ - "▁Pablo", - -13.344470024108887 - ], - [ - "▁adanc", - -13.34525203704834 - ], - [ - "omia", - -13.345309257507324 - ], - [ - "hâte", - -13.345317840576172 - ], - [ - "▁Sanctuary", - -13.345366477966309 - ], - [ - "▁accolade", - -13.345368385314941 - ], - [ - "▁Wurzel", - -13.345398902893066 - ], - [ - "▁spacing", - -13.345433235168457 - ], - [ - "▁bedeutend", - -13.345481872558594 - ], - [ - "▁biased", - -13.345499992370605 - ], - [ - "randomized", - -13.345747947692871 - ], - [ - "▁agenți", - -13.345856666564941 - ], - [ - "▁excepţi", - -13.346012115478516 - ], - [ - "▁fișier", - -13.346028327941895 - ], - [ - "▁fisier", - -13.34664535522461 - ], - [ - "irrespective", - -13.346648216247559 - ], - [ - "▁Gardner", - -13.34665584564209 - ], - [ - "▁aprecia", - -13.346884727478027 - ], - [ - "▁Klu", - -13.347082138061523 - ], - [ - "▁apropie", - -13.347535133361816 - ], - [ - "▁echival", - -13.347784042358398 - ], - [ - "tauchen", - -13.347862243652344 - ], - [ - "▁hauptsächlich", - -13.347930908203125 - ], - [ - "▁pollutants", - -13.347930908203125 - ], - [ - "▁mammals", - -13.347931861877441 - ], - [ - "▁Landwirtschaft", - -13.347936630249023 - ], - [ - "▁stăpân", - -13.34793758392334 - ], - [ - "▁Prüf", - -13.347990989685059 - ], - [ - "▁Motorsport", - -13.34807300567627 - ], - [ - "Leaving", - -13.348352432250977 - ], - [ - "schädigung", - -13.348573684692383 - ], - [ - "▁calendrier", - -13.348573684692383 - ], - [ - "plikation", - -13.348655700683594 - ], - [ - "▁DOE", - -13.348655700683594 - ], - [ - "ред", - -13.348966598510742 - ], - [ - "Jahr", - -13.34913444519043 - ], - [ - "▁entitlement", - -13.34921646118164 - ], - [ - "schuldig", - -13.349217414855957 - ], - [ - "▁Münster", - -13.349218368530273 - ], - [ - "pository", - -13.349451065063477 - ], - [ - "▁numero", - -13.350220680236816 - ], - [ - "▁entsprechen", - -13.350383758544922 - ], - [ - "▁astronaut", - -13.350502967834473 - ], - [ - "▁hexagon", - -13.350502967834473 - ], - [ - "▁DAMAGE", - -13.350503921508789 - ], - [ - "▁Quartz", - -13.350504875183105 - ], - [ - "▁rédaction", - -13.350504875183105 - ], - [ - "▁replenish", - -13.350508689880371 - ], - [ - "▁amoureux", - -13.350523948669434 - ], - [ - "▁opțiun", - -13.350616455078125 - ], - [ - "Custom", - -13.350622177124023 - ], - [ - "▁Telekom", - -13.350639343261719 - ], - [ - "▁RFID", - -13.351163864135742 - ], - [ - "▁Scorpio", - -13.351264953613281 - ], - [ - "▁thirst", - -13.35152816772461 - ], - [ - "▁Kosovo", - -13.351791381835938 - ], - [ - "▁precursor", - -13.351794242858887 - ], - [ - "▁sarbatori", - -13.351810455322266 - ], - [ - "▁Daisy", - -13.351828575134277 - ], - [ - "▁Dropbox", - -13.351898193359375 - ], - [ - "Smith", - -13.351949691772461 - ], - [ - "contabil", - -13.352191925048828 - ], - [ - "▁monnaie", - -13.352437973022461 - ], - [ - "capsul", - -13.352577209472656 - ], - [ - "treff", - -13.352760314941406 - ], - [ - "beauftragte", - -13.352761268615723 - ], - [ - "industrial", - -13.353006362915039 - ], - [ - "responsables", - -13.353010177612305 - ], - [ - "▁FIRST", - -13.353080749511719 - ], - [ - "▁crezut", - -13.35308837890625 - ], - [ - "▁reseller", - -13.353107452392578 - ], - [ - "▁direcți", - -13.353154182434082 - ], - [ - "mouvoir", - -13.353294372558594 - ], - [ - "▁Invite", - -13.353431701660156 - ], - [ - "▁constructii", - -13.353440284729004 - ], - [ - "▁oublié", - -13.353577613830566 - ], - [ - "găseșt", - -13.353687286376953 - ], - [ - "▁végét", - -13.353755950927734 - ], - [ - "idine", - -13.35385799407959 - ], - [ - "▁Ajout", - -13.353951454162598 - ], - [ - "▁Shelf", - -13.354195594787598 - ], - [ - "HALL", - -13.35422420501709 - ], - [ - "▁nostalgia", - -13.35437297821045 - ], - [ - "▁ottoman", - -13.35437297821045 - ], - [ - "▁ambalaj", - -13.354398727416992 - ], - [ - "municipiul", - -13.354405403137207 - ], - [ - "NOVA", - -13.354500770568848 - ], - [ - "▁disregard", - -13.354997634887695 - ], - [ - "▁bijuterii", - -13.355018615722656 - ], - [ - "▁sorgfältig", - -13.355018615722656 - ], - [ - "vraient", - -13.355307579040527 - ], - [ - "▁backsplash", - -13.355669975280762 - ], - [ - "▁nuisance", - -13.355679512023926 - ], - [ - "▁Territory", - -13.35568618774414 - ], - [ - "▁surprins", - -13.355693817138672 - ], - [ - "enchanting", - -13.35571002960205 - ], - [ - "trospecti", - -13.355847358703613 - ], - [ - "▁dvd", - -13.356199264526367 - ], - [ - "Totally", - -13.356329917907715 - ], - [ - "▁Edelstahl", - -13.35696029663086 - ], - [ - "▁sequencing", - -13.356961250305176 - ], - [ - "▁Circus", - -13.35696792602539 - ], - [ - "▁ashamed", - -13.35696792602539 - ], - [ - "▁horrific", - -13.357028007507324 - ], - [ - "▁taiat", - -13.357033729553223 - ], - [ - "▁Angehörige", - -13.357125282287598 - ], - [ - "Michel", - -13.357256889343262 - ], - [ - "▁communion", - -13.357298851013184 - ], - [ - "▁psiho", - -13.357378959655762 - ], - [ - "losigkeit", - -13.357405662536621 - ], - [ - "dipping", - -13.357512474060059 - ], - [ - "▁profesională", - -13.357608795166016 - ], - [ - "Indiferent", - -13.357609748840332 - ], - [ - "▁crestin", - -13.357723236083984 - ], - [ - "wholesome", - -13.357796669006348 - ], - [ - "▁Welfare", - -13.358257293701172 - ], - [ - "▁plentiful", - -13.358257293701172 - ], - [ - "▁Triumph", - -13.358258247375488 - ], - [ - "▁fascination", - -13.358260154724121 - ], - [ - "▁vicious", - -13.358291625976562 - ], - [ - "▁Höchst", - -13.358294486999512 - ], - [ - "▁Dunkel", - -13.358386039733887 - ], - [ - "▁harass", - -13.358406066894531 - ], - [ - "ambogia", - -13.358475685119629 - ], - [ - "▁synonymous", - -13.358598709106445 - ], - [ - "bottom", - -13.35879898071289 - ], - [ - "▁bénévole", - -13.358906745910645 - ], - [ - "▁suprafaț", - -13.358906745910645 - ], - [ - "▁umplut", - -13.358997344970703 - ], - [ - "▁Teddy", - -13.359162330627441 - ], - [ - "breathable", - -13.359292984008789 - ], - [ - "▁Toshiba", - -13.3595552444458 - ], - [ - "▁seismic", - -13.359569549560547 - ], - [ - "▁dringend", - -13.359583854675293 - ], - [ - "▁cultură", - -13.359585762023926 - ], - [ - "▁Waffen", - -13.359665870666504 - ], - [ - "▁Bubble", - -13.359702110290527 - ], - [ - "▁Brigade", - -13.359759330749512 - ], - [ - "▁Blatt", - -13.36012077331543 - ], - [ - "▁scénario", - -13.36020565032959 - ], - [ - "allah", - -13.360396385192871 - ], - [ - "▁superintendent", - -13.360855102539062 - ], - [ - "pflanzen", - -13.360856056213379 - ], - [ - "▁kurzfristig", - -13.360856056213379 - ], - [ - "▁raspberry", - -13.360876083374023 - ], - [ - "▁Evident", - -13.360904693603516 - ], - [ - "▁inutile", - -13.361076354980469 - ], - [ - "prouvé", - -13.361104011535645 - ], - [ - "▁obtien", - -13.36141300201416 - ], - [ - "▁Matthias", - -13.361506462097168 - ], - [ - "▁déclench", - -13.361506462097168 - ], - [ - "Situationen", - -13.361529350280762 - ], - [ - "▁Disclaimer", - -13.362156867980957 - ], - [ - "▁loneliness", - -13.362156867980957 - ], - [ - "▁Gothic", - -13.362164497375488 - ], - [ - "▁humility", - -13.362165451049805 - ], - [ - "▁machiaj", - -13.362175941467285 - ], - [ - "▁Sophia", - -13.362178802490234 - ], - [ - "▁Forecast", - -13.362265586853027 - ], - [ - "IBLE", - -13.362456321716309 - ], - [ - "ivism", - -13.362480163574219 - ], - [ - "israel", - -13.36278247833252 - ], - [ - "▁kümmern", - -13.362809181213379 - ], - [ - "▁verbreitet", - -13.362825393676758 - ], - [ - "▁capacitor", - -13.362832069396973 - ], - [ - "deprived", - -13.3634614944458 - ], - [ - "unbiased", - -13.3634614944458 - ], - [ - "▁Dominique", - -13.3634614944458 - ], - [ - "▁Bamboo", - -13.363462448120117 - ], - [ - "▁Heinrich", - -13.363465309143066 - ], - [ - "individualized", - -13.363550186157227 - ], - [ - "▁ansprechen", - -13.363776206970215 - ], - [ - "ordinaire", - -13.363801002502441 - ], - [ - "▁Ucraina", - -13.364112854003906 - ], - [ - "▁militare", - -13.364115715026855 - ], - [ - "massif", - -13.364352226257324 - ], - [ - "▁emisiuni", - -13.364501953125 - ], - [ - "maladies", - -13.364622116088867 - ], - [ - "▁pneumonia", - -13.364765167236328 - ], - [ - "▁graffiti", - -13.364767074584961 - ], - [ - "▁Determine", - -13.3648099899292 - ], - [ - "▁Northwestern", - -13.364893913269043 - ], - [ - "▁grasimi", - -13.364897727966309 - ], - [ - "▁lebendig", - -13.364920616149902 - ], - [ - "▁cifre", - -13.364946365356445 - ], - [ - "▁accelerator", - -13.36533260345459 - ], - [ - "▁nib", - -13.365374565124512 - ], - [ - "▁Jocuri", - -13.365400314331055 - ], - [ - "▁außergewöhnlich", - -13.365402221679688 - ], - [ - "▁orchid", - -13.36542797088623 - ], - [ - "zugreifen", - -13.365530967712402 - ], - [ - "utilisent", - -13.365662574768066 - ], - [ - "▁nineteenth", - -13.366071701049805 - ], - [ - "improvisation", - -13.366072654724121 - ], - [ - "▁Disclosure", - -13.366072654724121 - ], - [ - "▁Überraschung", - -13.366072654724121 - ], - [ - "▁Casual", - -13.366093635559082 - ], - [ - "▁Witness", - -13.366093635559082 - ], - [ - "teacher", - -13.366125106811523 - ], - [ - "Printed", - -13.366129875183105 - ], - [ - "▁prețuri", - -13.366189956665039 - ], - [ - "rues", - -13.366216659545898 - ], - [ - "▁cerinte", - -13.366338729858398 - ], - [ - "rouvent", - -13.36662483215332 - ], - [ - "assembling", - -13.36673355102539 - ], - [ - "▁atenție", - -13.366769790649414 - ], - [ - "▁amintiri", - -13.366782188415527 - ], - [ - "▁sustinut", - -13.366805076599121 - ], - [ - "Digital", - -13.367257118225098 - ], - [ - "▁Deborah", - -13.36738109588623 - ], - [ - "gesichts", - -13.367382049560547 - ], - [ - "▁temperament", - -13.367440223693848 - ], - [ - "▁competency", - -13.367447853088379 - ], - [ - "▁dwarf", - -13.367515563964844 - ], - [ - "▁dureaz", - -13.367539405822754 - ], - [ - "habilit", - -13.367764472961426 - ], - [ - "leaned", - -13.3679838180542 - ], - [ - "▁illicit", - -13.368348121643066 - ], - [ - "Availability", - -13.368691444396973 - ], - [ - "▁Brașov", - -13.368691444396973 - ], - [ - "▁Pyramid", - -13.368691444396973 - ], - [ - "▁achievable", - -13.368691444396973 - ], - [ - "▁judiciaire", - -13.368691444396973 - ], - [ - "Übrigen", - -13.368693351745605 - ], - [ - "▁activism", - -13.368795394897461 - ], - [ - "▁boycott", - -13.368839263916016 - ], - [ - "Desigur", - -13.368927001953125 - ], - [ - "klingt", - -13.369264602661133 - ], - [ - "▁Leidenschaft", - -13.369346618652344 - ], - [ - "▁Richtig", - -13.369701385498047 - ], - [ - "▁Airbnb", - -13.370002746582031 - ], - [ - "▁învățământ", - -13.370002746582031 - ], - [ - "Kampagne", - -13.370004653930664 - ], - [ - "▁thumbnail", - -13.370014190673828 - ], - [ - "Bestimmungen", - -13.370016098022461 - ], - [ - "▁vollkommen", - -13.37001895904541 - ], - [ - "▁biomass", - -13.370027542114258 - ], - [ - "▁escalate", - -13.370030403137207 - ], - [ - "wächst", - -13.370085716247559 - ], - [ - "▁scăpa", - -13.370098114013672 - ], - [ - "▁résult", - -13.37014389038086 - ], - [ - "▁shrine", - -13.370217323303223 - ], - [ - "maximizing", - -13.370370864868164 - ], - [ - "avoue", - -13.370492935180664 - ], - [ - "dirigeants", - -13.370665550231934 - ], - [ - "▁cerveau", - -13.370672225952148 - ], - [ - "▁proast", - -13.370955467224121 - ], - [ - "▁contaminants", - -13.371325492858887 - ], - [ - "effectue", - -13.37151050567627 - ], - [ - "ediție", - -13.371539115905762 - ], - [ - "monetiz", - -13.371772766113281 - ], - [ - "▁deplasare", - -13.371976852416992 - ], - [ - "▁Sfant", - -13.37209415435791 - ], - [ - "ROOM", - -13.372113227844238 - ], - [ - "bushes", - -13.372151374816895 - ], - [ - "mairie", - -13.37251091003418 - ], - [ - "obligate", - -13.372528076171875 - ], - [ - "▁tug", - -13.372573852539062 - ], - [ - "▁Collector", - -13.372632026672363 - ], - [ - "▁annoyed", - -13.372633934020996 - ], - [ - "▁aerobic", - -13.372654914855957 - ], - [ - "▁integer", - -13.372830390930176 - ], - [ - "▁Upload", - -13.373249053955078 - ], - [ - "▁impartial", - -13.37346076965332 - ], - [ - "▁discuţi", - -13.373623847961426 - ], - [ - "gastrointestinal", - -13.37394905090332 - ], - [ - "▁chiropractor", - -13.37394905090332 - ], - [ - "▁treptat", - -13.373950004577637 - ], - [ - "▁fishermen", - -13.37395191192627 - ], - [ - "levitra", - -13.3739595413208 - ], - [ - "Gruppe", - -13.373964309692383 - ], - [ - "▁Apostle", - -13.373970985412598 - ], - [ - "▁conseillé", - -13.374068260192871 - ], - [ - "Isra", - -13.37421703338623 - ], - [ - "▁Persönlichkeit", - -13.374431610107422 - ], - [ - "▁cantitati", - -13.374459266662598 - ], - [ - "▁incredibil", - -13.374614715576172 - ], - [ - "▁Berater", - -13.374800682067871 - ], - [ - "▁propuneri", - -13.374835014343262 - ], - [ - "MEDIA", - -13.375236511230469 - ], - [ - "▁opaque", - -13.37526798248291 - ], - [ - "▁Nielsen", - -13.375269889831543 - ], - [ - "▁cartofi", - -13.375277519226074 - ], - [ - "▁Whale", - -13.37533950805664 - ], - [ - "erzeugen", - -13.375890731811523 - ], - [ - "▁knack", - -13.375931739807129 - ], - [ - "Kandidat", - -13.375936508178711 - ], - [ - "▁tradițional", - -13.375937461853027 - ], - [ - "zählige", - -13.375983238220215 - ], - [ - "▁Petroleum", - -13.376588821411133 - ], - [ - "▁deficiencies", - -13.376588821411133 - ], - [ - "▁persecution", - -13.376588821411133 - ], - [ - "▁zgomot", - -13.376588821411133 - ], - [ - "▁reiterate", - -13.376592636108398 - ], - [ - "▁Slice", - -13.376670837402344 - ], - [ - "▁envy", - -13.376704216003418 - ], - [ - "▁stomac", - -13.376851081848145 - ], - [ - "Donnell", - -13.376914978027344 - ], - [ - "▁primordial", - -13.377249717712402 - ], - [ - "reclining", - -13.377274513244629 - ], - [ - "PASS", - -13.377861976623535 - ], - [ - "▁Resistance", - -13.377910614013672 - ], - [ - "▁Widerruf", - -13.377911567687988 - ], - [ - "▁vodka", - -13.377911567687988 - ], - [ - "▁yolk", - -13.377912521362305 - ], - [ - "ollywood", - -13.377915382385254 - ], - [ - "▁truffle", - -13.377933502197266 - ], - [ - "▁Sänger", - -13.377955436706543 - ], - [ - "▁Kenntnis", - -13.377968788146973 - ], - [ - "▁Kiel", - -13.37803840637207 - ], - [ - "▁Mutual", - -13.378044128417969 - ], - [ - "▁saliva", - -13.37816047668457 - ], - [ - "▁renforce", - -13.378411293029785 - ], - [ - "▁mulch", - -13.378680229187012 - ], - [ - "▁reviste", - -13.378875732421875 - ], - [ - "lucrarea", - -13.378978729248047 - ], - [ - "▁multiply", - -13.379130363464355 - ], - [ - "▁marshmallow", - -13.379234313964844 - ], - [ - "▁Durchschnitt", - -13.379288673400879 - ], - [ - "▁Authorities", - -13.379426002502441 - ], - [ - "▁greed", - -13.379521369934082 - ], - [ - "Visiting", - -13.379638671875 - ], - [ - "Carlton", - -13.379727363586426 - ], - [ - "▁splend", - -13.37975025177002 - ], - [ - "▁Erkenntnisse", - -13.379898071289062 - ], - [ - "▁Russie", - -13.379916191101074 - ], - [ - "Agence", - -13.38007926940918 - ], - [ - "schickt", - -13.380288124084473 - ], - [ - "##", - -13.3804931640625 - ], - [ - "▁Erweiterung", - -13.380560874938965 - ], - [ - "▁Franchise", - -13.380560874938965 - ], - [ - "Dedicated", - -13.380563735961914 - ], - [ - "▁Wisdom", - -13.380569458007812 - ], - [ - "▁gagnant", - -13.380592346191406 - ], - [ - "planetary", - -13.380598068237305 - ], - [ - "▁affinity", - -13.380619049072266 - ], - [ - "▁préférence", - -13.380739212036133 - ], - [ - "▁intellect", - -13.380810737609863 - ], - [ - "▁Translat", - -13.380830764770508 - ], - [ - "▁Sultan", - -13.38089370727539 - ], - [ - "▁birouri", - -13.38101577758789 - ], - [ - "▁Academie", - -13.381224632263184 - ], - [ - "▁consequential", - -13.38138484954834 - ], - [ - "▁festgestellt", - -13.381402015686035 - ], - [ - "▁Chanel", - -13.381444931030273 - ], - [ - "▁soutenu", - -13.381875038146973 - ], - [ - "▁Montessori", - -13.381888389587402 - ], - [ - "▁equitable", - -13.381892204284668 - ], - [ - "▁théorie", - -13.381893157958984 - ], - [ - "▁primavara", - -13.3818941116333 - ], - [ - "▁Daughter", - -13.38189697265625 - ], - [ - "▁Dixon", - -13.381898880004883 - ], - [ - "▁unravel", - -13.38190746307373 - ], - [ - "Olimp", - -13.381915092468262 - ], - [ - "▁disturbed", - -13.381916999816895 - ], - [ - "▁novelty", - -13.382004737854004 - ], - [ - "synchronous", - -13.382113456726074 - ], - [ - "relevant", - -13.382166862487793 - ], - [ - "bourgeois", - -13.38251781463623 - ], - [ - "▁Parfum", - -13.38255500793457 - ], - [ - "▁Polonia", - -13.382563591003418 - ], - [ - "▁monoton", - -13.382781028747559 - ], - [ - "tratare", - -13.38302230834961 - ], - [ - "dumping", - -13.38318157196045 - ], - [ - "▁Bibliothek", - -13.383217811584473 - ], - [ - "▁Saskatchewan", - -13.383217811584473 - ], - [ - "▁experiential", - -13.383217811584473 - ], - [ - "▁verursacht", - -13.383217811584473 - ], - [ - "intègre", - -13.383218765258789 - ], - [ - "▁Intermediate", - -13.383275032043457 - ], - [ - "Israel", - -13.383476257324219 - ], - [ - "lucreaza", - -13.383495330810547 - ], - [ - "▁quantify", - -13.383862495422363 - ], - [ - "▁zahăr", - -13.383882522583008 - ], - [ - "▁încadr", - -13.383902549743652 - ], - [ - "Personalized", - -13.383946418762207 - ], - [ - "▁Chronic", - -13.384309768676758 - ], - [ - "hôpital", - -13.384549140930176 - ], - [ - "▁diskutiert", - -13.384549140930176 - ], - [ - "electrique", - -13.3848876953125 - ], - [ - "ethos", - -13.384978294372559 - ], - [ - "Nase", - -13.385059356689453 - ], - [ - "atmosphère", - -13.385214805603027 - ], - [ - "▁ungefähr", - -13.385215759277344 - ], - [ - "évaluer", - -13.385251998901367 - ], - [ - "▁scuz", - -13.385321617126465 - ], - [ - "haltige", - -13.38533878326416 - ], - [ - "January", - -13.38557243347168 - ], - [ - "▁Sharma", - -13.385603904724121 - ], - [ - "▁seizures", - -13.385881423950195 - ], - [ - "▁zucchini", - -13.385881423950195 - ], - [ - "▁Stadi", - -13.385885238647461 - ], - [ - "▁eccentric", - -13.385885238647461 - ], - [ - "▁offensichtlich", - -13.385909080505371 - ], - [ - "▁Irvine", - -13.385920524597168 - ], - [ - "cuprinse", - -13.38601303100586 - ], - [ - "▁Arbitr", - -13.386157035827637 - ], - [ - "Buenos", - -13.386183738708496 - ], - [ - "▁Shelter", - -13.386210441589355 - ], - [ - "CEPT", - -13.386454582214355 - ], - [ - "ouvri", - -13.386455535888672 - ], - [ - "acryl", - -13.386539459228516 - ], - [ - "▁Gourmet", - -13.38654899597168 - ], - [ - "scented", - -13.386595726013184 - ], - [ - "doubling", - -13.38659954071045 - ], - [ - "▁rafina", - -13.386608123779297 - ], - [ - "▁Vereinbarung", - -13.38721752166748 - ], - [ - "▁Dashboard", - -13.387218475341797 - ], - [ - "▁Sandwich", - -13.387218475341797 - ], - [ - "▁Riviera", - -13.387226104736328 - ], - [ - "échec", - -13.387237548828125 - ], - [ - "Giro", - -13.387253761291504 - ], - [ - "▁oasis", - -13.38725757598877 - ], - [ - "▁apology", - -13.3872709274292 - ], - [ - "▁YEAR", - -13.387272834777832 - ], - [ - "▁realtor", - -13.387504577636719 - ], - [ - "acheteur", - -13.38754653930664 - ], - [ - "▁larva", - -13.387613296508789 - ], - [ - "▁invitați", - -13.388097763061523 - ], - [ - "exhibiting", - -13.38830852508545 - ], - [ - "modernen", - -13.388331413269043 - ], - [ - "▁Collaboration", - -13.38855266571045 - ], - [ - "▁dezvălui", - -13.38855266571045 - ], - [ - "▁kiosk", - -13.38855266571045 - ], - [ - "▁Bermuda", - -13.388553619384766 - ], - [ - "Copiii", - -13.388564109802246 - ], - [ - "▁goddess", - -13.388581275939941 - ], - [ - "uplifting", - -13.388609886169434 - ], - [ - "▁simultan", - -13.388808250427246 - ], - [ - "▁episod", - -13.388884544372559 - ], - [ - "▁Braşov", - -13.38922119140625 - ], - [ - "cunoscută", - -13.389634132385254 - ], - [ - "▁Cherokee", - -13.389890670776367 - ], - [ - "▁Kazakhstan", - -13.389890670776367 - ], - [ - "▁Lauderdale", - -13.389890670776367 - ], - [ - "▁închisoare", - -13.389898300170898 - ], - [ - "▁Christchurch", - -13.389934539794922 - ], - [ - "▁influenţ", - -13.389982223510742 - ], - [ - "▁Meghan", - -13.390019416809082 - ], - [ - "▁Dienstleistung", - -13.390557289123535 - ], - [ - "▁cladiri", - -13.390564918518066 - ], - [ - "▁evrei", - -13.391148567199707 - ], - [ - "▁oatmeal", - -13.391230583190918 - ], - [ - "▁chronique", - -13.3912353515625 - ], - [ - "▁associée", - -13.391264915466309 - ], - [ - "▁Goose", - -13.391283988952637 - ], - [ - "gänz", - -13.391855239868164 - ], - [ - "▁Blätter", - -13.391901969909668 - ], - [ - "▁jurnalist", - -13.392212867736816 - ], - [ - "cedat", - -13.392263412475586 - ], - [ - "nommée", - -13.392315864562988 - ], - [ - "écrivain", - -13.392572402954102 - ], - [ - "▁epoxy", - -13.392577171325684 - ], - [ - "▁verlangt", - -13.392590522766113 - ], - [ - "Störung", - -13.392708778381348 - ], - [ - "▁Doyle", - -13.392729759216309 - ], - [ - "▁Philharmoni", - -13.392844200134277 - ], - [ - "▁déclare", - -13.393044471740723 - ], - [ - "effort", - -13.393045425415039 - ], - [ - "ström", - -13.393118858337402 - ], - [ - "▁cunoaşte", - -13.393244743347168 - ], - [ - "▁gigantic", - -13.3932466506958 - ], - [ - "któ", - -13.393378257751465 - ], - [ - "▁ilustr", - -13.393529891967773 - ], - [ - "▁frec", - -13.39371109008789 - ], - [ - "▁Syracuse", - -13.393916130065918 - ], - [ - "▁Einwilligung", - -13.393917083740234 - ], - [ - "▁miraculous", - -13.393917083740234 - ], - [ - "▁ökologisch", - -13.393917083740234 - ], - [ - "▁Simmons", - -13.393922805786133 - ], - [ - "▁albastru", - -13.393926620483398 - ], - [ - "besser", - -13.393962860107422 - ], - [ - "▁interioare", - -13.394006729125977 - ], - [ - "▁Trocken", - -13.394068717956543 - ], - [ - "niveau", - -13.39406967163086 - ], - [ - "▁Torah", - -13.394122123718262 - ], - [ - "▁beobachten", - -13.3945894241333 - ], - [ - "▁behandeln", - -13.394637107849121 - ], - [ - "staffed", - -13.394742965698242 - ], - [ - "hütte", - -13.394824028015137 - ], - [ - "Central", - -13.394939422607422 - ], - [ - "▁Freiburg", - -13.395198822021484 - ], - [ - "▁Netanyahu", - -13.395261764526367 - ], - [ - "▁Lexington", - -13.395302772521973 - ], - [ - "▁insotit", - -13.395492553710938 - ], - [ - "▁depasi", - -13.39560604095459 - ], - [ - "sewage", - -13.395853996276855 - ], - [ - "erkrankung", - -13.395951271057129 - ], - [ - "▁părţi", - -13.396234512329102 - ], - [ - "▁Nixon", - -13.39661693572998 - ], - [ - "Byron", - -13.396905899047852 - ], - [ - "▁varietat", - -13.39724063873291 - ], - [ - "▁Bildschirm", - -13.397299766540527 - ], - [ - "▁accompli", - -13.397424697875977 - ], - [ - "affirmed", - -13.397525787353516 - ], - [ - "▁phyto", - -13.397533416748047 - ], - [ - "sectiune", - -13.397592544555664 - ], - [ - "abteilung", - -13.397932052612305 - ], - [ - "▁voastre", - -13.397957801818848 - ], - [ - "GitHub", - -13.397958755493164 - ], - [ - "▁Jorge", - -13.39796257019043 - ], - [ - "ACTION", - -13.397972106933594 - ], - [ - "voastra", - -13.397984504699707 - ], - [ - "▁Peanut", - -13.397987365722656 - ], - [ - "▁bilingual", - -13.398011207580566 - ], - [ - "▁nourriture", - -13.39803695678711 - ], - [ - "▁Asphalt", - -13.398640632629395 - ], - [ - "emballage", - -13.399310111999512 - ], - [ - "▁sanitation", - -13.399310111999512 - ], - [ - "▁Dessert", - -13.399313926696777 - ], - [ - "intitulé", - -13.399322509765625 - ], - [ - "▁acţiune", - -13.399374008178711 - ], - [ - "▁Übersetzung", - -13.399402618408203 - ], - [ - "destinate", - -13.39941692352295 - ], - [ - "▁Goddess", - -13.399504661560059 - ], - [ - "poziție", - -13.399576187133789 - ], - [ - "denumirea", - -13.400002479553223 - ], - [ - "cantitatea", - -13.40002727508545 - ], - [ - "▁Stereo", - -13.400223731994629 - ], - [ - "object", - -13.400373458862305 - ], - [ - "▁décè", - -13.40058708190918 - ], - [ - "▁Handeln", - -13.400665283203125 - ], - [ - "▁ambience", - -13.400697708129883 - ], - [ - "▁Lindsay", - -13.4006986618042 - ], - [ - "▁tensiune", - -13.400781631469727 - ], - [ - "▁thrift", - -13.400788307189941 - ], - [ - "▁Optimiz", - -13.400843620300293 - ], - [ - "▁beantworten", - -13.401338577270508 - ], - [ - "▁magistrat", - -13.401342391967773 - ], - [ - "évidence", - -13.402016639709473 - ], - [ - "▁Eclipse", - -13.402016639709473 - ], - [ - "▁Ribbon", - -13.402016639709473 - ], - [ - "▁condensation", - -13.402016639709473 - ], - [ - "▁innocence", - -13.402018547058105 - ], - [ - "▁mascara", - -13.402023315429688 - ], - [ - "▁seventeen", - -13.402290344238281 - ], - [ - "▁compétent", - -13.402694702148438 - ], - [ - "bewertet", - -13.402717590332031 - ], - [ - "▁Muzic", - -13.40285587310791 - ], - [ - "complexities", - -13.402928352355957 - ], - [ - "ddington", - -13.403324127197266 - ], - [ - "Entwickler", - -13.403372764587402 - ], - [ - "masonry", - -13.4033784866333 - ], - [ - "Führer", - -13.403386116027832 - ], - [ - "▁awakening", - -13.403388977050781 - ], - [ - "▁lovitur", - -13.403806686401367 - ], - [ - "gebrochen", - -13.404068946838379 - ], - [ - "indexed", - -13.404478073120117 - ], - [ - "campania", - -13.404515266418457 - ], - [ - "▁Fountain", - -13.404730796813965 - ], - [ - "▁Joomla", - -13.404730796813965 - ], - [ - "▁Superintendent", - -13.404730796813965 - ], - [ - "▁Dahl", - -13.404742240905762 - ], - [ - "▁Benefici", - -13.404863357543945 - ], - [ - "optimiser", - -13.404919624328613 - ], - [ - "bursting", - -13.405380249023438 - ], - [ - "diplom", - -13.405427932739258 - ], - [ - "microsoft", - -13.405621528625488 - ], - [ - "▁correlate", - -13.405776977539062 - ], - [ - "▁arhitectura", - -13.405848503112793 - ], - [ - "▁lunette", - -13.40611743927002 - ], - [ - "Statistical", - -13.406147003173828 - ], - [ - "▁iarnă", - -13.406201362609863 - ], - [ - "▁importanț", - -13.406932830810547 - ], - [ - "sistence", - -13.407366752624512 - ], - [ - "associated", - -13.407402992248535 - ], - [ - "Occident", - -13.407452583312988 - ], - [ - "▁Heidelberg", - -13.407452583312988 - ], - [ - "▁acquaintance", - -13.407452583312988 - ], - [ - "Introducing", - -13.407453536987305 - ], - [ - "▁ripple", - -13.407480239868164 - ], - [ - "▁Childhood", - -13.407563209533691 - ], - [ - "drywall", - -13.407577514648438 - ], - [ - "Vreau", - -13.40771770477295 - ], - [ - "▁compétence", - -13.407967567443848 - ], - [ - "▁asteapta", - -13.408135414123535 - ], - [ - "▁duhovnic", - -13.408135414123535 - ], - [ - "▁învăţământ", - -13.408141136169434 - ], - [ - "encompassing", - -13.40829849243164 - ], - [ - "1997)", - -13.408370018005371 - ], - [ - "▁atractiv", - -13.408515930175781 - ], - [ - "Majoritatea", - -13.408775329589844 - ], - [ - "▁bungalow", - -13.40881633758545 - ], - [ - "▁Introduce", - -13.408817291259766 - ], - [ - "▁culprit", - -13.408817291259766 - ], - [ - "▁malheureusement", - -13.408817291259766 - ], - [ - "▁voudrai", - -13.408817291259766 - ], - [ - "Europäische", - -13.408825874328613 - ], - [ - "wunsch", - -13.408880233764648 - ], - [ - "▁înțeles", - -13.408892631530762 - ], - [ - "▁infestation", - -13.40889835357666 - ], - [ - "Bringing", - -13.409186363220215 - ], - [ - "▁Mehrheit", - -13.409229278564453 - ], - [ - "ски", - -13.409456253051758 - ], - [ - "▁procéder", - -13.409499168395996 - ], - [ - "grupului", - -13.409504890441895 - ], - [ - "▁dispoziti", - -13.40964412689209 - ], - [ - "▁snug", - -13.409950256347656 - ], - [ - "▁Afrika", - -13.41018295288086 - ], - [ - "▁Madagascar", - -13.41018295288086 - ], - [ - "Părinte", - -13.410195350646973 - ], - [ - "▁Clayton", - -13.410223960876465 - ], - [ - "▁antagonist", - -13.410239219665527 - ], - [ - "termeni", - -13.410250663757324 - ], - [ - "▁Literary", - -13.410391807556152 - ], - [ - "▁Babylon", - -13.410452842712402 - ], - [ - "▁überprüfen", - -13.410865783691406 - ], - [ - "▁duminica", - -13.410879135131836 - ], - [ - "farbig", - -13.410970687866211 - ], - [ - "nennt", - -13.411064147949219 - ], - [ - "annual", - -13.411487579345703 - ], - [ - "▁Qualcomm", - -13.41154956817627 - ], - [ - "▁Slovakia", - -13.41154956817627 - ], - [ - "▁plictis", - -13.411552429199219 - ], - [ - "▁prairie", - -13.411554336547852 - ], - [ - "▁Schatten", - -13.411622047424316 - ], - [ - "▁compléter", - -13.41223430633545 - ], - [ - "inauguration", - -13.412376403808594 - ], - [ - "▁apărare", - -13.412407875061035 - ], - [ - "▁întăr", - -13.412412643432617 - ], - [ - "▁pronunciation", - -13.412919044494629 - ], - [ - "▁bewährt", - -13.412919998168945 - ], - [ - "▁Viertel", - -13.413084983825684 - ], - [ - "▁Heidi", - -13.413252830505371 - ], - [ - "▁Gummi", - -13.413507461547852 - ], - [ - "▁veggie", - -13.413552284240723 - ], - [ - "▁monsieur", - -13.413604736328125 - ], - [ - "éveil", - -13.413630485534668 - ], - [ - "shipments", - -13.413928985595703 - ], - [ - "▁Medikamente", - -13.414290428161621 - ], - [ - "▁Johannesburg", - -13.414314270019531 - ], - [ - "▁ermittelt", - -13.414321899414062 - ], - [ - "▁bataille", - -13.414440155029297 - ], - [ - "extrem", - -13.414609909057617 - ], - [ - "▁1:2", - -13.414671897888184 - ], - [ - "Array", - -13.414725303649902 - ], - [ - "▁portail", - -13.414857864379883 - ], - [ - "▁găzdui", - -13.414977073669434 - ], - [ - "▁Calcium", - -13.41497802734375 - ], - [ - "▁Correction", - -13.415104866027832 - ], - [ - "bureaux", - -13.41528034210205 - ], - [ - "bestselling", - -13.415338516235352 - ], - [ - "Übungen", - -13.415420532226562 - ], - [ - "paramètres", - -13.415633201599121 - ], - [ - "▁Provincial", - -13.415663719177246 - ], - [ - "▁outrageous", - -13.415680885314941 - ], - [ - "▁Giveaway", - -13.415775299072266 - ], - [ - "▁LGBTQ", - -13.41589641571045 - ], - [ - "geklärt", - -13.416854858398438 - ], - [ - "▁Karlsruhe", - -13.417038917541504 - ], - [ - "▁esențial", - -13.417038917541504 - ], - [ - "avancée", - -13.41703987121582 - ], - [ - "hesitant", - -13.417040824890137 - ], - [ - "enlarged", - -13.417069435119629 - ], - [ - "▁inherit", - -13.417121887207031 - ], - [ - "Food", - -13.4171724319458 - ], - [ - "bucuria", - -13.417181015014648 - ], - [ - "▁BTW", - -13.417400360107422 - ], - [ - "associe", - -13.417579650878906 - ], - [ - "▁Möchte", - -13.417742729187012 - ], - [ - "demokrat", - -13.417789459228516 - ], - [ - "Turcia", - -13.417964935302734 - ], - [ - "forged", - -13.418370246887207 - ], - [ - "▁Zhao", - -13.418442726135254 - ], - [ - "▁cherries", - -13.418556213378906 - ], - [ - "▁evangelical", - -13.418631553649902 - ], - [ - "▁jüng", - -13.418792724609375 - ], - [ - "spans", - -13.41880989074707 - ], - [ - "▁străluc", - -13.41888427734375 - ], - [ - "▁geschie", - -13.41893196105957 - ], - [ - "▁Tattoo", - -13.419112205505371 - ], - [ - "sanitary", - -13.419114112854004 - ], - [ - "▁biopsy", - -13.419353485107422 - ], - [ - "▁imprumut", - -13.419795036315918 - ], - [ - "▁unreasonable", - -13.419795036315918 - ], - [ - "Funktion", - -13.419800758361816 - ], - [ - "▁prohibition", - -13.419904708862305 - ], - [ - "▁Prezent", - -13.419939041137695 - ], - [ - "boosted", - -13.419967651367188 - ], - [ - "▁chalet", - -13.420382499694824 - ], - [ - "▁tanar", - -13.420450210571289 - ], - [ - "Faktoren", - -13.420489311218262 - ], - [ - "▁Mozilla", - -13.420550346374512 - ], - [ - "▁Lambert", - -13.420760154724121 - ], - [ - "▁Cruci", - -13.420927047729492 - ], - [ - "▁Flugzeug", - -13.421198844909668 - ], - [ - "reassure", - -13.421205520629883 - ], - [ - "envisioned", - -13.421542167663574 - ], - [ - "Traditionally", - -13.421773910522461 - ], - [ - "▁parametri", - -13.42185115814209 - ], - [ - "▁unicorn", - -13.421891212463379 - ], - [ - "▁adéquat", - -13.421894073486328 - ], - [ - "▁Colonial", - -13.421915054321289 - ], - [ - "▁Kwa", - -13.422097206115723 - ], - [ - "▁SERV", - -13.422333717346191 - ], - [ - "tourism", - -13.422627449035645 - ], - [ - "▁Kiev", - -13.422974586486816 - ], - [ - "heightened", - -13.42309284210205 - ], - [ - "circulating", - -13.423099517822266 - ], - [ - "▁Kreditkarte", - -13.42310619354248 - ], - [ - "gedruckt", - -13.423110008239746 - ], - [ - "▁Depend", - -13.423120498657227 - ], - [ - "Style", - -13.423196792602539 - ], - [ - "▁Rettungs", - -13.42325496673584 - ], - [ - "wrongful", - -13.423418998718262 - ], - [ - "▁devour", - -13.423453330993652 - ], - [ - "▁manevr", - -13.423582077026367 - ], - [ - "carora", - -13.423628807067871 - ], - [ - "erfolgreichen", - -13.423723220825195 - ], - [ - "überwiegend", - -13.423942565917969 - ], - [ - "▁Sauvignon", - -13.423942565917969 - ], - [ - "händler", - -13.423944473266602 - ], - [ - "▁annotation", - -13.424009323120117 - ], - [ - "▁expans", - -13.424020767211914 - ], - [ - "▁recital", - -13.424080848693848 - ], - [ - "inhabited", - -13.424367904663086 - ], - [ - "OnePlus", - -13.424549102783203 - ], - [ - "Gästen", - -13.424588203430176 - ], - [ - "beliebig", - -13.424613952636719 - ], - [ - "▁Anonymous", - -13.424635887145996 - ], - [ - "▁Ansprechpartner", - -13.424635887145996 - ], - [ - "▁tamb", - -13.42464542388916 - ], - [ - "estimating", - -13.424670219421387 - ], - [ - "frequent", - -13.424769401550293 - ], - [ - "▁disciplin", - -13.425241470336914 - ], - [ - "▁plombier", - -13.425329208374023 - ], - [ - "▁teoretic", - -13.42533016204834 - ], - [ - "greift", - -13.425339698791504 - ], - [ - "▁Einschränkung", - -13.42537784576416 - ], - [ - "obscur", - -13.426115989685059 - ], - [ - "architecte", - -13.426233291625977 - ], - [ - "▁détour", - -13.42647647857666 - ], - [ - "▁spaghetti", - -13.426717758178711 - ], - [ - "croft", - -13.42693042755127 - ], - [ - "▁Grammar", - -13.426953315734863 - ], - [ - "▁investitii", - -13.427062034606934 - ], - [ - "▁glorif", - -13.427067756652832 - ], - [ - "architekt", - -13.427412033081055 - ], - [ - "Oricum", - -13.427451133728027 - ], - [ - "▁bruise", - -13.427692413330078 - ], - [ - "▁McCarthy", - -13.428107261657715 - ], - [ - "▁Uruguay", - -13.428107261657715 - ], - [ - "Produsele", - -13.428109169006348 - ], - [ - "▁Comparison", - -13.42811107635498 - ], - [ - "▁fondamental", - -13.42811107635498 - ], - [ - "▁stradă", - -13.428115844726562 - ], - [ - "▁Countries", - -13.428131103515625 - ], - [ - "▁guéri", - -13.42825698852539 - ], - [ - "▁bâti", - -13.428339004516602 - ], - [ - "▁blunt", - -13.428515434265137 - ], - [ - "▁Sistem", - -13.428645133972168 - ], - [ - "▁Betroffenen", - -13.428803443908691 - ], - [ - "efectuare", - -13.428823471069336 - ], - [ - "▁scharf", - -13.428899765014648 - ], - [ - "naps", - -13.429057121276855 - ], - [ - "▁plaid", - -13.429163932800293 - ], - [ - "▁investiții", - -13.429367065429688 - ], - [ - "evenimentele", - -13.42948055267334 - ], - [ - "▁Phuket", - -13.429499626159668 - ], - [ - "▁testosterone", - -13.429499626159668 - ], - [ - "▁scaffold", - -13.429500579833984 - ], - [ - "▁rasch", - -13.430022239685059 - ], - [ - "▁adânc", - -13.430076599121094 - ], - [ - "atteinte", - -13.430228233337402 - ], - [ - "▁educație", - -13.430320739746094 - ], - [ - "▁leopard", - -13.430893898010254 - ], - [ - "▁superioare", - -13.430893898010254 - ], - [ - "▁téléchargement", - -13.430893898010254 - ], - [ - "▁Weapon", - -13.431103706359863 - ], - [ - "favourable", - -13.431336402893066 - ], - [ - "nourishing", - -13.43143367767334 - ], - [ - "▁verfolgt", - -13.43160629272461 - ], - [ - "▁tablou", - -13.431633949279785 - ], - [ - "Algérie", - -13.431657791137695 - ], - [ - "Islam", - -13.431700706481934 - ], - [ - "faser", - -13.431825637817383 - ], - [ - "rhythm", - -13.432214736938477 - ], - [ - "▁Anthropolog", - -13.432291030883789 - ], - [ - "▁clôtur", - -13.432291030883789 - ], - [ - "spüren", - -13.432291984558105 - ], - [ - "▁Architectural", - -13.432294845581055 - ], - [ - "▁imaginary", - -13.432368278503418 - ], - [ - "cône", - -13.432456016540527 - ], - [ - "▁snuggl", - -13.432744026184082 - ], - [ - "disadvantaged", - -13.432745933532715 - ], - [ - "radically", - -13.4329195022583 - ], - [ - "Première", - -13.433011054992676 - ], - [ - "▁combinaison", - -13.433027267456055 - ], - [ - "▁Algeria", - -13.43303108215332 - ], - [ - "▁Wände", - -13.43317985534668 - ], - [ - "aesthetically", - -13.43336009979248 - ], - [ - "▁McKe", - -13.433368682861328 - ], - [ - "interroge", - -13.433473587036133 - ], - [ - "exclusive", - -13.433475494384766 - ], - [ - "▁Thomson", - -13.433688163757324 - ], - [ - "▁Gujarat", - -13.43368911743164 - ], - [ - "irgendwo", - -13.433690071105957 - ], - [ - "Severin", - -13.433767318725586 - ], - [ - "▁imitation", - -13.433926582336426 - ], - [ - "constructed", - -13.434194564819336 - ], - [ - "▁Montpellier", - -13.434388160705566 - ], - [ - "cedent", - -13.434539794921875 - ], - [ - "accelerating", - -13.434563636779785 - ], - [ - "dommages", - -13.4346284866333 - ], - [ - "lideri", - -13.434730529785156 - ], - [ - "▁Millennium", - -13.435089111328125 - ], - [ - "▁imprisonment", - -13.435089111328125 - ], - [ - "machining", - -13.435111999511719 - ], - [ - "▁anxiet", - -13.43521499633789 - ], - [ - "Contains", - -13.435298919677734 - ], - [ - "pleade", - -13.435563087463379 - ], - [ - "DOWN", - -13.43564510345459 - ], - [ - "geschehen", - -13.435797691345215 - ], - [ - "restaurant", - -13.435811996459961 - ], - [ - "Totusi", - -13.435839653015137 - ], - [ - "amintesc", - -13.436158180236816 - ], - [ - "▁Crisp", - -13.436233520507812 - ], - [ - "aduse", - -13.436278343200684 - ], - [ - "▁imposé", - -13.436351776123047 - ], - [ - "Jubiläum", - -13.436490058898926 - ], - [ - "▁Plaintiff", - -13.436491012573242 - ], - [ - "▁authoritative", - -13.436491966247559 - ], - [ - "▁rendition", - -13.436633110046387 - ], - [ - "Royce", - -13.436707496643066 - ], - [ - "1996)", - -13.436724662780762 - ], - [ - "Asociația", - -13.437192916870117 - ], - [ - "▁Gluten", - -13.437264442443848 - ], - [ - "feature", - -13.43741226196289 - ], - [ - "Behavioral", - -13.437454223632812 - ], - [ - "tearing", - -13.437763214111328 - ], - [ - "▁Entfernung", - -13.437894821166992 - ], - [ - "▁Responsibility", - -13.437894821166992 - ], - [ - "▁negligent", - -13.437894821166992 - ], - [ - "▁syllabus", - -13.437894821166992 - ], - [ - "▁Cycling", - -13.437895774841309 - ], - [ - "generell", - -13.438114166259766 - ], - [ - "customised", - -13.438392639160156 - ], - [ - "Management", - -13.43850326538086 - ], - [ - "▁timid", - -13.438518524169922 - ], - [ - "Tagged", - -13.438730239868164 - ], - [ - "▁susţinut", - -13.438809394836426 - ], - [ - "anchored", - -13.43892765045166 - ], - [ - "alternating", - -13.439055442810059 - ], - [ - "▁obligatoriu", - -13.439300537109375 - ], - [ - "▁reinstate", - -13.439456939697266 - ], - [ - "Können", - -13.43946361541748 - ], - [ - "▁Paol", - -13.439596176147461 - ], - [ - "öhr", - -13.439603805541992 - ], - [ - "▁Asociati", - -13.439876556396484 - ], - [ - "▁commenc", - -13.440285682678223 - ], - [ - "reinigt", - -13.440293312072754 - ], - [ - "commended", - -13.440350532531738 - ], - [ - "▁Proceed", - -13.440675735473633 - ], - [ - "beutel", - -13.440702438354492 - ], - [ - "▁Experimental", - -13.44070816040039 - ], - [ - "▁constellation", - -13.44070816040039 - ], - [ - "▁gepflegt", - -13.44070816040039 - ], - [ - "▁Ergänzung", - -13.440709114074707 - ], - [ - "Judith", - -13.440713882446289 - ], - [ - "▁Quartet", - -13.440720558166504 - ], - [ - "complemented", - -13.440742492675781 - ], - [ - "ausbildung", - -13.440750122070312 - ], - [ - "▁uncertainties", - -13.44077205657959 - ], - [ - "▁humiliat", - -13.440914154052734 - ], - [ - "luta", - -13.441121101379395 - ], - [ - "▁complexion", - -13.441482543945312 - ], - [ - "Serviciul", - -13.441612243652344 - ], - [ - "▁Toast", - -13.441722869873047 - ], - [ - "ummies", - -13.442425727844238 - ], - [ - "▁irit", - -13.442463874816895 - ], - [ - "producing", - -13.442585945129395 - ], - [ - "amenajare", - -13.442825317382812 - ], - [ - "▁béton", - -13.442828178405762 - ], - [ - "▁serpent", - -13.442851066589355 - ], - [ - "▁vizită", - -13.442996978759766 - ], - [ - "▁Beamte", - -13.443017959594727 - ], - [ - "▁Füße", - -13.443166732788086 - ], - [ - "▁Norwich", - -13.443531036376953 - ], - [ - "▁acronym", - -13.443531036376953 - ], - [ - "▁eradicate", - -13.443531036376953 - ], - [ - "▁solidarité", - -13.44353199005127 - ], - [ - "▁eggplant", - -13.443582534790039 - ], - [ - "▁sailors", - -13.443619728088379 - ], - [ - "waschen", - -13.444538116455078 - ], - [ - "Editura", - -13.444757461547852 - ], - [ - "▁erwerben", - -13.444944381713867 - ], - [ - "▁unconventional", - -13.444944381713867 - ], - [ - "▁boulder", - -13.444948196411133 - ], - [ - "Diplom", - -13.445013046264648 - ], - [ - "influx", - -13.446162223815918 - ], - [ - "▁Twelve", - -13.446361541748047 - ], - [ - "▁Sexual", - -13.44636344909668 - ], - [ - "numite", - -13.446369171142578 - ], - [ - "▁kontaktieren", - -13.446370124816895 - ], - [ - "▁strâns", - -13.44637680053711 - ], - [ - "▁précisément", - -13.446382522583008 - ], - [ - "empfindlich", - -13.446405410766602 - ], - [ - "▁divulg", - -13.446490287780762 - ], - [ - "▁delicat", - -13.446539878845215 - ], - [ - "compete", - -13.446542739868164 - ], - [ - "▁implique", - -13.446616172790527 - ], - [ - "implantation", - -13.44672966003418 - ], - [ - "frères", - -13.447328567504883 - ], - [ - "shedding", - -13.44758415222168 - ], - [ - "découvrez", - -13.447657585144043 - ], - [ - "rith", - -13.447735786437988 - ], - [ - "▁réglementation", - -13.447778701782227 - ], - [ - "▁transistor", - -13.447785377502441 - ], - [ - "inflated", - -13.447792053222656 - ], - [ - "▁Bluff", - -13.447887420654297 - ], - [ - "▁Aquarium", - -13.448526382446289 - ], - [ - "▁mananc", - -13.448638916015625 - ], - [ - "▁disinfect", - -13.448700904846191 - ], - [ - "tuft", - -13.448740005493164 - ], - [ - "Public", - -13.449081420898438 - ], - [ - "conceivabl", - -13.449197769165039 - ], - [ - "▁Cadillac", - -13.449197769165039 - ], - [ - "Assassin", - -13.449199676513672 - ], - [ - "issuance", - -13.449252128601074 - ], - [ - "▁Achtung", - -13.449287414550781 - ], - [ - "▁grundlegend", - -13.449909210205078 - ], - [ - "▁Băsescu", - -13.449910163879395 - ], - [ - "schaden", - -13.45014476776123 - ], - [ - "coached", - -13.450409889221191 - ], - [ - "▁betreffend", - -13.45046329498291 - ], - [ - "ergebnis", - -13.450541496276855 - ], - [ - "▁Lieutenant", - -13.4506196975708 - ], - [ - "WORLD", - -13.450620651245117 - ], - [ - "▁Moroccan", - -13.450620651245117 - ], - [ - "▁Butterfly", - -13.450621604919434 - ], - [ - "would", - -13.450737953186035 - ], - [ - "▁Metropol", - -13.451025009155273 - ], - [ - "lexic", - -13.451192855834961 - ], - [ - "comunitatea", - -13.45124340057373 - ], - [ - "vapeur", - -13.451456069946289 - ], - [ - "4.000", - -13.451559066772461 - ], - [ - "Pentru", - -13.451581954956055 - ], - [ - "üblichen", - -13.451613426208496 - ], - [ - "▁Général", - -13.451770782470703 - ], - [ - "▁Versailles", - -13.452046394348145 - ], - [ - "▁engraving", - -13.452046394348145 - ], - [ - "▁pédagogique", - -13.452192306518555 - ], - [ - "▁Policies", - -13.452759742736816 - ], - [ - "descending", - -13.453235626220703 - ], - [ - "stärkt", - -13.453349113464355 - ], - [ - "▁démocratie", - -13.453470230102539 - ], - [ - "▁granddaughter", - -13.453470230102539 - ], - [ - "▁buffalo", - -13.453474998474121 - ], - [ - "Datorita", - -13.45347785949707 - ], - [ - "hydroxy", - -13.453537940979004 - ], - [ - "▁ganduri", - -13.453566551208496 - ], - [ - "▁hijack", - -13.453624725341797 - ], - [ - "zahn", - -13.453699111938477 - ], - [ - "poziția", - -13.45406436920166 - ], - [ - "▁Zähne", - -13.454184532165527 - ], - [ - "▁grossesse", - -13.454296112060547 - ], - [ - "embassy", - -13.4548978805542 - ], - [ - "▁cérémonie", - -13.4548978805542 - ], - [ - "Rhône", - -13.454898834228516 - ], - [ - "▁Cabernet", - -13.454898834228516 - ], - [ - "▁Namibia", - -13.454902648925781 - ], - [ - "▁pedestal", - -13.454902648925781 - ], - [ - "▁Fighting", - -13.45490550994873 - ], - [ - "▁Threat", - -13.454962730407715 - ], - [ - "▁ideological", - -13.455047607421875 - ], - [ - "▁restitu", - -13.455183029174805 - ], - [ - "gelangt", - -13.455510139465332 - ], - [ - "Mitgliedern", - -13.455537796020508 - ], - [ - "acquérir", - -13.455613136291504 - ], - [ - "▁inferioar", - -13.45561695098877 - ], - [ - "Thierry", - -13.455619812011719 - ], - [ - "▁Entspannung", - -13.455638885498047 - ], - [ - "frequency", - -13.45566177368164 - ], - [ - "▁Fluid", - -13.455686569213867 - ], - [ - "▁betreut", - -13.455901145935059 - ], - [ - "Biological", - -13.455965995788574 - ], - [ - "▁Constanţa", - -13.456328392028809 - ], - [ - "▁beschäftigen", - -13.456328392028809 - ], - [ - "▁undesirable", - -13.456328392028809 - ], - [ - "▁protégé", - -13.456365585327148 - ], - [ - "▁nautical", - -13.456474304199219 - ], - [ - "▁sniff", - -13.456507682800293 - ], - [ - "Decizi", - -13.456510543823242 - ], - [ - "▁căldur", - -13.45706558227539 - ], - [ - "▁ideologi", - -13.457335472106934 - ], - [ - "Fraktion", - -13.457545280456543 - ], - [ - "collegiate", - -13.45776081085205 - ], - [ - "▁sănătos", - -13.45776081085205 - ], - [ - "▁Observatory", - -13.45776653289795 - ], - [ - "▁saturation", - -13.457769393920898 - ], - [ - "organizate", - -13.457771301269531 - ], - [ - "mergem", - -13.458321571350098 - ], - [ - "Publish", - -13.458451271057129 - ], - [ - "▁rattle", - -13.458460807800293 - ], - [ - "▁întâlniri", - -13.458663940429688 - ], - [ - "emporte", - -13.458741188049316 - ], - [ - "▁înscris", - -13.459046363830566 - ], - [ - "▁Patterson", - -13.459195137023926 - ], - [ - "▁ehrenamtlich", - -13.459195137023926 - ], - [ - "linux", - -13.459213256835938 - ], - [ - "conduire", - -13.45921802520752 - ], - [ - "▁absolven", - -13.459223747253418 - ], - [ - "▁einzigartig", - -13.459598541259766 - ], - [ - "▁_____", - -13.459803581237793 - ], - [ - "▁Beschäftigung", - -13.459912300109863 - ], - [ - "▁erfasst", - -13.459927558898926 - ], - [ - "▁Datum", - -13.459992408752441 - ], - [ - "raportul", - -13.460284233093262 - ], - [ - "ennemi", - -13.460460662841797 - ], - [ - "default", - -13.460643768310547 - ], - [ - "icillin", - -13.46066951751709 - ], - [ - "▁diamant", - -13.460671424865723 - ], - [ - "amerika", - -13.460684776306152 - ], - [ - "▁pescuit", - -13.46070384979248 - ], - [ - "▁grappl", - -13.460797309875488 - ], - [ - "▁Homeland", - -13.46082592010498 - ], - [ - "▁tromb", - -13.46112060546875 - ], - [ - "▁reduzieren", - -13.461349487304688 - ], - [ - "▁Statut", - -13.461593627929688 - ], - [ - "booming", - -13.461670875549316 - ], - [ - "fenced", - -13.461723327636719 - ], - [ - "measure", - -13.461888313293457 - ], - [ - "témoin", - -13.462069511413574 - ], - [ - "▁Inventory", - -13.462069511413574 - ], - [ - "▁circonstance", - -13.462069511413574 - ], - [ - "▁téléphonique", - -13.462069511413574 - ], - [ - "▁împiedic", - -13.46207046508789 - ], - [ - "▁Settlement", - -13.462072372436523 - ], - [ - "kannte", - -13.462076187133789 - ], - [ - "▁substantive", - -13.462385177612305 - ], - [ - "miterea", - -13.462642669677734 - ], - [ - "▁noştri", - -13.462790489196777 - ], - [ - "▁plăcere", - -13.462791442871094 - ], - [ - "▁eticheta", - -13.462823867797852 - ], - [ - "quickest", - -13.462993621826172 - ], - [ - "▁pasageri", - -13.463089942932129 - ], - [ - "▁Publi", - -13.463495254516602 - ], - [ - "▁Suzanne", - -13.463509559631348 - ], - [ - "▁bucătări", - -13.463509559631348 - ], - [ - "Regulatory", - -13.463510513305664 - ], - [ - "▁Mandarin", - -13.463647842407227 - ], - [ - "surgical", - -13.463947296142578 - ], - [ - "▁Smash", - -13.463950157165527 - ], - [ - "▁mândr", - -13.46403694152832 - ], - [ - "▁Unterkunft", - -13.464315414428711 - ], - [ - "moos", - -13.464374542236328 - ], - [ - "Camere", - -13.464510917663574 - ], - [ - "/03/", - -13.464651107788086 - ], - [ - "▁ethno", - -13.464677810668945 - ], - [ - "▁Eröffnung", - -13.46495246887207 - ], - [ - "▁Snyder", - -13.46495246887207 - ], - [ - "▁Wilmington", - -13.46495246887207 - ], - [ - "▁Canberra", - -13.464953422546387 - ], - [ - "▁Tahoe", - -13.464953422546387 - ], - [ - "▁slippery", - -13.464953422546387 - ], - [ - "▁Snake", - -13.464957237243652 - ], - [ - "▁turmeric", - -13.464963912963867 - ], - [ - "▁Cartoon", - -13.46499252319336 - ], - [ - "▁scrisoare", - -13.46500015258789 - ], - [ - "▁reprend", - -13.465425491333008 - ], - [ - "▁Konkurrenz", - -13.46567440032959 - ], - [ - "▁raisins", - -13.465693473815918 - ], - [ - "▁Werkstatt", - -13.465713500976562 - ], - [ - "▁agresiv", - -13.465795516967773 - ], - [ - "hugs", - -13.46615219116211 - ], - [ - "cazurile", - -13.46618938446045 - ], - [ - "spirited", - -13.466232299804688 - ], - [ - "▁britisch", - -13.466307640075684 - ], - [ - "spritz", - -13.466367721557617 - ], - [ - "auxiliary", - -13.46639633178711 - ], - [ - "interprétation", - -13.46639633178711 - ], - [ - "▁verbindet", - -13.46639633178711 - ], - [ - "▁fuzzy", - -13.466429710388184 - ], - [ - "▁turmoil", - -13.466432571411133 - ], - [ - "▁redefine", - -13.466819763183594 - ], - [ - "▁Kiwi", - -13.466890335083008 - ], - [ - "oiseaux", - -13.46712875366211 - ], - [ - "▁pamper", - -13.467146873474121 - ], - [ - "▁desfaso", - -13.46719741821289 - ], - [ - "▁pragu", - -13.467576026916504 - ], - [ - "prevenirea", - -13.467730522155762 - ], - [ - "▁convergence", - -13.467846870422363 - ], - [ - "tufted", - -13.467878341674805 - ], - [ - "brewed", - -13.467981338500977 - ], - [ - "villagers", - -13.468003273010254 - ], - [ - "▁Irving", - -13.468170166015625 - ], - [ - "nigsten", - -13.468660354614258 - ], - [ - "▁embod", - -13.468742370605469 - ], - [ - "Alicia", - -13.468938827514648 - ], - [ - "probably", - -13.469009399414062 - ], - [ - "divider", - -13.46904468536377 - ], - [ - "Attempt", - -13.469223022460938 - ], - [ - "▁Cognitive", - -13.469292640686035 - ], - [ - "▁Recognition", - -13.469292640686035 - ], - [ - "▁concierge", - -13.469292640686035 - ], - [ - "▁Semester", - -13.4692964553833 - ], - [ - "Economie", - -13.469417572021484 - ], - [ - "sortiment", - -13.469460487365723 - ], - [ - "shortest", - -13.46961498260498 - ], - [ - "üchtig", - -13.469650268554688 - ], - [ - "▁conveyanc", - -13.469978332519531 - ], - [ - "▁Ferdinand", - -13.470017433166504 - ], - [ - "▁permanence", - -13.470019340515137 - ], - [ - "▁incadr", - -13.470145225524902 - ], - [ - "▁estrogen", - -13.470290184020996 - ], - [ - "February", - -13.470661163330078 - ], - [ - "gedeckt", - -13.470704078674316 - ], - [ - "▁reagieren", - -13.470743179321289 - ], - [ - "▁meditate", - -13.470980644226074 - ], - [ - "simulated", - -13.471010208129883 - ], - [ - "▁supprimer", - -13.471468925476074 - ], - [ - "▁bumbac", - -13.47146987915039 - ], - [ - "▁vânzări", - -13.471477508544922 - ], - [ - "▁Kapitel", - -13.471478462219238 - ], - [ - "▁Weltkrieg", - -13.471513748168945 - ], - [ - "déposer", - -13.471674919128418 - ], - [ - "Asus", - -13.4718017578125 - ], - [ - "▁Communicat", - -13.471851348876953 - ], - [ - "Finished", - -13.47188949584961 - ], - [ - "▁Telegraph", - -13.472054481506348 - ], - [ - "▁Competitive", - -13.472196578979492 - ], - [ - "▁collectivités", - -13.472197532653809 - ], - [ - "▁protège", - -13.472199440002441 - ], - [ - "▁scallop", - -13.472219467163086 - ], - [ - "Happy", - -13.472335815429688 - ], - [ - "tehnică", - -13.472352981567383 - ], - [ - "▁Gestalt", - -13.47270393371582 - ], - [ - "▁benign", - -13.47295093536377 - ], - [ - "kraut", - -13.473149299621582 - ], - [ - "louer", - -13.473221778869629 - ], - [ - "▁Printr", - -13.47326946258545 - ], - [ - "mputation", - -13.473346710205078 - ], - [ - "▁dicke", - -13.473429679870605 - ], - [ - "▁Halifax", - -13.473650932312012 - ], - [ - "▁bounty", - -13.473650932312012 - ], - [ - "▁cauliflower", - -13.473650932312012 - ], - [ - "▁Survival", - -13.473654747009277 - ], - [ - "▁Chandler", - -13.473684310913086 - ], - [ - "▁bemüh", - -13.473760604858398 - ], - [ - "phro", - -13.473855972290039 - ], - [ - "Friday", - -13.474018096923828 - ], - [ - "particularly", - -13.474032402038574 - ], - [ - "arteries", - -13.474197387695312 - ], - [ - "Lösung", - -13.474771499633789 - ], - [ - "▁causal", - -13.474817276000977 - ], - [ - "▁recueilli", - -13.475075721740723 - ], - [ - "Stylish", - -13.47510814666748 - ], - [ - "schränke", - -13.47510814666748 - ], - [ - "▁francophone", - -13.47510814666748 - ], - [ - "▁limousine", - -13.47510814666748 - ], - [ - "▁statistiques", - -13.47510814666748 - ], - [ - "▁Kleider", - -13.475111961364746 - ], - [ - "▁dunkel", - -13.475127220153809 - ], - [ - "tätigkeit", - -13.475190162658691 - ], - [ - "▁punished", - -13.475257873535156 - ], - [ - "▁implică", - -13.475539207458496 - ], - [ - "▁inițial", - -13.475568771362305 - ], - [ - "▁Eminescu", - -13.475837707519531 - ], - [ - "▁expliqué", - -13.475837707519531 - ], - [ - "▁Eduard", - -13.475839614868164 - ], - [ - "▁psychologique", - -13.475870132446289 - ], - [ - "▁protejeaz", - -13.476580619812012 - ], - [ - "spül", - -13.476709365844727 - ], - [ - "▁Virtu", - -13.477021217346191 - ], - [ - "▁régulière", - -13.477044105529785 - ], - [ - "▁Outreach", - -13.477130889892578 - ], - [ - "▁Apprentice", - -13.47729778289795 - ], - [ - "▁compréhension", - -13.47729778289795 - ], - [ - "▁zwölf", - -13.47729778289795 - ], - [ - "Surgical", - -13.477315902709961 - ], - [ - "latéral", - -13.477417945861816 - ], - [ - "▁Ceremony", - -13.47803020477295 - ], - [ - "▁Shampoo", - -13.47803783416748 - ], - [ - "Global", - -13.478239059448242 - ], - [ - "▁paradis", - -13.478302955627441 - ], - [ - "Developed", - -13.478493690490723 - ], - [ - "▁figurine", - -13.478549003601074 - ], - [ - "sujets", - -13.478574752807617 - ], - [ - "▁Naomi", - -13.478772163391113 - ], - [ - "financed", - -13.478838920593262 - ], - [ - "forestry", - -13.478896141052246 - ], - [ - "▁Anregung", - -13.479494094848633 - ], - [ - "▁spectateur", - -13.479804039001465 - ], - [ - "▁exercitii", - -13.479815483093262 - ], - [ - "▁russisch", - -13.479888916015625 - ], - [ - "gefunden", - -13.479988098144531 - ], - [ - "schleunig", - -13.480225563049316 - ], - [ - "▁géographique", - -13.480225563049316 - ], - [ - "▁Delphi", - -13.480317115783691 - ], - [ - "Freddie", - -13.4806489944458 - ], - [ - "▁muzici", - -13.480958938598633 - ], - [ - "▁Edmund", - -13.48095989227295 - ], - [ - "finanzielle", - -13.481032371520996 - ], - [ - "(2003)", - -13.481319427490234 - ], - [ - "accentuate", - -13.481437683105469 - ], - [ - "overlapping", - -13.48151969909668 - ], - [ - "▁Pluto", - -13.481595993041992 - ], - [ - "românii", - -13.481683731079102 - ], - [ - "▁Timişoara", - -13.48169231414795 - ], - [ - "▁poivr", - -13.481754302978516 - ], - [ - "▁repris", - -13.481852531433105 - ], - [ - "▁Geschlecht", - -13.482426643371582 - ], - [ - "▁thieves", - -13.482426643371582 - ], - [ - "▁Transformer", - -13.482431411743164 - ], - [ - "▁shortcomings", - -13.482438087463379 - ], - [ - "▁aptitude", - -13.48244571685791 - ], - [ - "pitfalls", - -13.482468605041504 - ], - [ - "▁manicure", - -13.482577323913574 - ], - [ - "mystical", - -13.482723236083984 - ], - [ - "▁abolish", - -13.482833862304688 - ], - [ - "▁Zielgruppe", - -13.482873916625977 - ], - [ - "▁naţionale", - -13.483160972595215 - ], - [ - "▁trandafir", - -13.483160972595215 - ], - [ - "▁matematic", - -13.483193397521973 - ], - [ - "▁Hirsch", - -13.483257293701172 - ], - [ - "Fahr", - -13.483458518981934 - ], - [ - "connaissent", - -13.483476638793945 - ], - [ - "browned", - -13.483846664428711 - ], - [ - "▁bearbeitet", - -13.483881950378418 - ], - [ - "▁usturoi", - -13.483896255493164 - ], - [ - "▁Surprise", - -13.48389720916748 - ], - [ - "▁Tehran", - -13.483899116516113 - ], - [ - "▁BLACK", - -13.483901023864746 - ], - [ - "▁abonament", - -13.483904838562012 - ], - [ - "▁mêl", - -13.483972549438477 - ], - [ - "Angebot", - -13.484091758728027 - ], - [ - "ajungi", - -13.48410415649414 - ], - [ - "▁Woodland", - -13.48420524597168 - ], - [ - "▁gradini", - -13.484305381774902 - ], - [ - "▁Marilyn", - -13.48464584350586 - ], - [ - "kilometer", - -13.484880447387695 - ], - [ - "tempered", - -13.485230445861816 - ], - [ - "▁intimacy", - -13.485371589660645 - ], - [ - "▁thunderstorm", - -13.485373497009277 - ], - [ - "▁Uttar", - -13.485413551330566 - ], - [ - "▁varnish", - -13.485535621643066 - ], - [ - "opathie", - -13.485982894897461 - ], - [ - "▁școlar", - -13.48611068725586 - ], - [ - "▁raisonnable", - -13.486114501953125 - ], - [ - "proactively", - -13.486490249633789 - ], - [ - "▁gib", - -13.486536979675293 - ], - [ - "▁hospice", - -13.48684310913086 - ], - [ - "▁constă", - -13.486896514892578 - ], - [ - "▁Crescent", - -13.48690128326416 - ], - [ - "▁ambasad", - -13.486933708190918 - ], - [ - "hotărâre", - -13.486969947814941 - ], - [ - "▁fraîche", - -13.48709774017334 - ], - [ - "▁bundesweit", - -13.487581253051758 - ], - [ - "nsbesondere", - -13.487812042236328 - ], - [ - "▁intoarce", - -13.487863540649414 - ], - [ - "▁Schokolade", - -13.488319396972656 - ], - [ - "▁adjective", - -13.488319396972656 - ], - [ - "▁incalzire", - -13.488319396972656 - ], - [ - "▁Qualification", - -13.488320350646973 - ], - [ - "▁Bolivia", - -13.488324165344238 - ], - [ - "▁cruelty", - -13.488334655761719 - ], - [ - "pläne", - -13.48834228515625 - ], - [ - "▁solitude", - -13.488354682922363 - ], - [ - "▁Bosnia", - -13.488568305969238 - ], - [ - "rohr", - -13.488643646240234 - ], - [ - "▁regrette", - -13.48877239227295 - ], - [ - "zusammengestellt", - -13.48924732208252 - ], - [ - "▁Kardashian", - -13.489798545837402 - ], - [ - "▁Picasso", - -13.489798545837402 - ], - [ - "▁unverbindlich", - -13.489798545837402 - ], - [ - "▁Headquarters", - -13.489799499511719 - ], - [ - "métrage", - -13.4898099899292 - ], - [ - "▁Magento", - -13.489816665649414 - ], - [ - "▁exhibitors", - -13.489898681640625 - ], - [ - "utty", - -13.490381240844727 - ], - [ - "▁Fünf", - -13.490538597106934 - ], - [ - "▁Peugeot", - -13.490538597106934 - ], - [ - "▁verdienen", - -13.490538597106934 - ], - [ - "▁absolviert", - -13.49053955078125 - ], - [ - "schutzerklärung", - -13.490679740905762 - ], - [ - "sistemele", - -13.49089241027832 - ], - [ - "▁concrète", - -13.491279602050781 - ], - [ - "▁rhyme", - -13.491279602050781 - ], - [ - "▁Continuous", - -13.49128246307373 - ], - [ - "versprechen", - -13.491312026977539 - ], - [ - "▁Melanie", - -13.49202823638916 - ], - [ - "▁clienţi", - -13.492046356201172 - ], - [ - "luckily", - -13.492205619812012 - ], - [ - "▁counterfeit", - -13.492762565612793 - ], - [ - "▁locomotive", - -13.492889404296875 - ], - [ - "▁reacți", - -13.492908477783203 - ], - [ - "ampered", - -13.493005752563477 - ], - [ - "atenția", - -13.493011474609375 - ], - [ - "Suppose", - -13.493062973022461 - ], - [ - "hinweis", - -13.493464469909668 - ], - [ - "verletzung", - -13.493504524230957 - ], - [ - "▁mănânc", - -13.493504524230957 - ], - [ - "▁provoac", - -13.493507385253906 - ], - [ - "▁regizor", - -13.493511199951172 - ], - [ - "kundig", - -13.49352741241455 - ], - [ - "embarqu", - -13.493584632873535 - ], - [ - "Radio", - -13.493690490722656 - ], - [ - "Ministrul", - -13.493896484375 - ], - [ - "weakened", - -13.494214057922363 - ], - [ - "▁translucent", - -13.494247436523438 - ], - [ - "George", - -13.494380950927734 - ], - [ - "▁bacterii", - -13.494402885437012 - ], - [ - "intervalul", - -13.494803428649902 - ], - [ - "▁vizualiz", - -13.494832038879395 - ], - [ - "▁Feuchtigkeit", - -13.494991302490234 - ], - [ - "▁choisissez", - -13.494991302490234 - ], - [ - "▁plausible", - -13.494991302490234 - ], - [ - "▁perpetu", - -13.495122909545898 - ], - [ - "▁bucati", - -13.495194435119629 - ], - [ - "▁Giovanni", - -13.495735168457031 - ], - [ - "▁bluetooth", - -13.495736122131348 - ], - [ - "▁translating", - -13.49573802947998 - ], - [ - "▁Kyoto", - -13.495739936828613 - ], - [ - "▁homosexual", - -13.495745658874512 - ], - [ - "treabă", - -13.495820045471191 - ], - [ - "ntrepid", - -13.495983123779297 - ], - [ - "▁fachlich", - -13.496664047241211 - ], - [ - "Vaccin", - -13.496774673461914 - ], - [ - "▁Treib", - -13.497248649597168 - ], - [ - "varsity", - -13.497272491455078 - ], - [ - "▁Tavern", - -13.497278213500977 - ], - [ - "▁ensue", - -13.497330665588379 - ], - [ - "flexibel", - -13.497971534729004 - ], - [ - "retrieved", - -13.498102188110352 - ], - [ - "traditionellen", - -13.498230934143066 - ], - [ - "▁circulati", - -13.498546600341797 - ], - [ - "▁Diagnose", - -13.498717308044434 - ], - [ - "▁Strawberry", - -13.498717308044434 - ], - [ - "Societatea", - -13.49871826171875 - ], - [ - "expertise", - -13.498849868774414 - ], - [ - "▁naturii", - -13.499464988708496 - ], - [ - "▁4:1", - -13.499515533447266 - ], - [ - "Frequently", - -13.500210762023926 - ], - [ - "disproportionate", - -13.500210762023926 - ], - [ - "▁LIMITED", - -13.500210762023926 - ], - [ - "▁ancestral", - -13.500227928161621 - ], - [ - "▁Logistik", - -13.500237464904785 - ], - [ - "▁recolt", - -13.50042724609375 - ], - [ - "▁liebevoll", - -13.500436782836914 - ], - [ - "importing", - -13.500452041625977 - ], - [ - "aparatul", - -13.500458717346191 - ], - [ - "poziţia", - -13.500564575195312 - ], - [ - "facerilor", - -13.500658988952637 - ], - [ - "Submitted", - -13.50086784362793 - ], - [ - "ografia", - -13.501221656799316 - ], - [ - "onformément", - -13.50168228149414 - ], - [ - "▁dissemination", - -13.501708030700684 - ], - [ - "afli", - -13.501834869384766 - ], - [ - "luminous", - -13.502154350280762 - ], - [ - "▁draußen", - -13.502456665039062 - ], - [ - "▁Zauber", - -13.502535820007324 - ], - [ - "▁Ibrahim", - -13.503207206726074 - ], - [ - "▁eruption", - -13.503216743469238 - ], - [ - "écrite", - -13.50357723236084 - ], - [ - "avril", - -13.503898620605469 - ], - [ - "Increasing", - -13.504171371459961 - ], - [ - "hingeg", - -13.504411697387695 - ], - [ - "fidelity", - -13.504707336425781 - ], - [ - "étonnant", - -13.504707336425781 - ], - [ - "▁créativité", - -13.504707336425781 - ], - [ - "▁Required", - -13.504708290100098 - ], - [ - "▁Edison", - -13.504719734191895 - ], - [ - "▁Stuhl", - -13.504719734191895 - ], - [ - "outhwestern", - -13.506060600280762 - ], - [ - "▁Beschwerden", - -13.506210327148438 - ], - [ - "▁angajaţi", - -13.506210327148438 - ], - [ - "▁Currency", - -13.506211280822754 - ], - [ - "▁reagiert", - -13.506214141845703 - ], - [ - "Science", - -13.506229400634766 - ], - [ - "hospital", - -13.506253242492676 - ], - [ - "professionellen", - -13.50649356842041 - ], - [ - "▁Trouve", - -13.506768226623535 - ], - [ - "▁utopi", - -13.50683307647705 - ], - [ - "gypte", - -13.506928443908691 - ], - [ - "▁Konsequenz", - -13.506962776184082 - ], - [ - "▁pacienți", - -13.506962776184082 - ], - [ - "▁orizont", - -13.506988525390625 - ], - [ - "Corey", - -13.506999015808105 - ], - [ - "▁quartet", - -13.507009506225586 - ], - [ - "▁Sherlock", - -13.50710678100586 - ], - [ - "▁gagné", - -13.507237434387207 - ], - [ - "▁Jusqu", - -13.50732707977295 - ], - [ - "▁Clickfunnel", - -13.507465362548828 - ], - [ - "Survivor", - -13.507716178894043 - ], - [ - "▁Beethoven", - -13.507716178894043 - ], - [ - "▁Exemplar", - -13.507716178894043 - ], - [ - "▁Gonzalez", - -13.507716178894043 - ], - [ - "▁Illustrator", - -13.507716178894043 - ], - [ - "▁Verpflichtung", - -13.507718086242676 - ], - [ - "Possibly", - -13.507719993591309 - ], - [ - "Maintenant", - -13.507721900939941 - ], - [ - "▁incendiu", - -13.507721900939941 - ], - [ - "▁poêl", - -13.507747650146484 - ], - [ - "▁aşez", - -13.507757186889648 - ], - [ - "phenol", - -13.508248329162598 - ], - [ - "▁magician", - -13.508421897888184 - ], - [ - "éventuellement", - -13.508512496948242 - ], - [ - "▁amortiz", - -13.508736610412598 - ], - [ - "bouchage", - -13.50873851776123 - ], - [ - "▁Accommodation", - -13.509223937988281 - ], - [ - "▁Significant", - -13.509223937988281 - ], - [ - "▁rejoice", - -13.509223937988281 - ], - [ - "▁Lorraine", - -13.509224891662598 - ], - [ - "▁Necklace", - -13.509234428405762 - ], - [ - "▁hamburger", - -13.509273529052734 - ], - [ - "Enhanced", - -13.5095796585083 - ], - [ - "▁Audrey", - -13.509978294372559 - ], - [ - "▁considère", - -13.509986877441406 - ], - [ - "hafen", - -13.51050853729248 - ], - [ - "acordare", - -13.510509490966797 - ], - [ - "▁ediți", - -13.51075553894043 - ], - [ - "▁militia", - -13.510767936706543 - ], - [ - "captivate", - -13.510771751403809 - ], - [ - "▁rebellion", - -13.510777473449707 - ], - [ - "▁veranstalte", - -13.510844230651855 - ], - [ - "▁matelas", - -13.510859489440918 - ], - [ - "originating", - -13.510873794555664 - ], - [ - "Typical", - -13.51092529296875 - ], - [ - "▁législat", - -13.511360168457031 - ], - [ - "▁Kräfte", - -13.511488914489746 - ], - [ - "▁Eigentümer", - -13.511489868164062 - ], - [ - "▁gonfl", - -13.511608123779297 - ], - [ - "dispoziție", - -13.512028694152832 - ], - [ - "▁Fabulous", - -13.512246131896973 - ], - [ - "▁Guillaume", - -13.512246131896973 - ], - [ - "▁Genuine", - -13.512247085571289 - ], - [ - "selbe", - -13.512449264526367 - ], - [ - "(2002)", - -13.512616157531738 - ], - [ - "Einen", - -13.512908935546875 - ], - [ - "▁Snapdragon", - -13.513002395629883 - ], - [ - "▁plagiarism", - -13.513002395629883 - ], - [ - "▁Rendez", - -13.513019561767578 - ], - [ - "▁înregistrare", - -13.513033866882324 - ], - [ - "probiert", - -13.513081550598145 - ], - [ - "gestiegen", - -13.513153076171875 - ], - [ - "Teatrul", - -13.513370513916016 - ], - [ - "trove", - -13.513469696044922 - ], - [ - "ntsprechend", - -13.513566017150879 - ], - [ - "Städten", - -13.513691902160645 - ], - [ - "unforeseen", - -13.513760566711426 - ], - [ - "▁Meridian", - -13.513761520385742 - ], - [ - "▁Ministries", - -13.513763427734375 - ], - [ - "plaît", - -13.513769149780273 - ], - [ - "▁Telefonnummer", - -13.513772010803223 - ], - [ - "welded", - -13.513788223266602 - ], - [ - "pondere", - -13.513976097106934 - ], - [ - "▁funcţiona", - -13.514012336730957 - ], - [ - "▁politicieni", - -13.514187812805176 - ], - [ - "fleck", - -13.514240264892578 - ], - [ - "▁Nitro", - -13.514264106750488 - ], - [ - "wettbewerb", - -13.514518737792969 - ], - [ - "▁ingrijire", - -13.514518737792969 - ], - [ - "▁Gehirn", - -13.514521598815918 - ], - [ - "sigură", - -13.514904022216797 - ], - [ - "400,000", - -13.515237808227539 - ], - [ - "▁cataract", - -13.515277862548828 - ], - [ - "outskirt", - -13.515280723571777 - ], - [ - "▁Identification", - -13.515287399291992 - ], - [ - "▁imperfections", - -13.515317916870117 - ], - [ - "▁Dokumentation", - -13.515474319458008 - ], - [ - "Engine", - -13.515851974487305 - ], - [ - "extindere", - -13.516046524047852 - ], - [ - "bijoux", - -13.516797065734863 - ], - [ - "▁dărui", - -13.516802787780762 - ], - [ - "▁Moderator", - -13.516913414001465 - ], - [ - "biblio", - -13.517024040222168 - ], - [ - "енн", - -13.517024040222168 - ], - [ - "▁Relevan", - -13.51728630065918 - ], - [ - "ansprüche", - -13.517557144165039 - ], - [ - "épaisseur", - -13.517580032348633 - ], - [ - "▁emoţi", - -13.517677307128906 - ], - [ - "exacerbate", - -13.518318176269531 - ], - [ - "▁Wimbledon", - -13.518318176269531 - ], - [ - "▁Pandora", - -13.518319129943848 - ], - [ - "perhaps", - -13.518725395202637 - ], - [ - "certify", - -13.518762588500977 - ], - [ - "Strukturen", - -13.5189208984375 - ], - [ - "▁Kreativität", - -13.519079208374023 - ], - [ - "schlägt", - -13.51908016204834 - ], - [ - "▁certifié", - -13.51911735534668 - ], - [ - "/09/", - -13.519211769104004 - ], - [ - "▁suprafaţ", - -13.519493103027344 - ], - [ - "verständnis", - -13.519841194152832 - ], - [ - "presedintele", - -13.519842147827148 - ], - [ - "▁orthopedic", - -13.519842147827148 - ], - [ - "▁superioara", - -13.519843101501465 - ], - [ - "älteste", - -13.519903182983398 - ], - [ - "▁conducător", - -13.520153999328613 - ], - [ - "supplementary", - -13.520243644714355 - ], - [ - "wetlands", - -13.520438194274902 - ], - [ - "▁suprafete", - -13.520605087280273 - ], - [ - "▁aparțin", - -13.520951271057129 - ], - [ - "analiză", - -13.521014213562012 - ], - [ - "Uneori", - -13.52115535736084 - ], - [ - "Toujours", - -13.521368026733398 - ], - [ - "▁Nairobi", - -13.521368026733398 - ], - [ - "▁asparagus", - -13.521368026733398 - ], - [ - "▁crowdfunding", - -13.521368026733398 - ], - [ - "gutachten", - -13.521369934082031 - ], - [ - "smelling", - -13.521659851074219 - ], - [ - "▁elektrisch", - -13.521718978881836 - ], - [ - "begging", - -13.522055625915527 - ], - [ - "▁Renewable", - -13.522896766662598 - ], - [ - "▁Trouble", - -13.522896766662598 - ], - [ - "▁devastated", - -13.522896766662598 - ], - [ - "▁remplacé", - -13.522896766662598 - ], - [ - "▁schmeckt", - -13.522896766662598 - ], - [ - "▁exerciți", - -13.523005485534668 - ], - [ - "▁vermute", - -13.523650169372559 - ], - [ - "▁Constanța", - -13.523661613464355 - ], - [ - "expunere", - -13.523693084716797 - ], - [ - "▁Fitzgerald", - -13.52442741394043 - ], - [ - "▁Mechanism", - -13.524429321289062 - ], - [ - "▁underscore", - -13.524484634399414 - ], - [ - "poziţie", - -13.524901390075684 - ], - [ - "stöbern", - -13.525193214416504 - ], - [ - "▁littérature", - -13.525193214416504 - ], - [ - "▁împrumut", - -13.525193214416504 - ], - [ - "Vision", - -13.525771141052246 - ], - [ - "▁overwhelm", - -13.525773048400879 - ], - [ - "▁erweitern", - -13.525959968566895 - ], - [ - "skeletal", - -13.525960922241211 - ], - [ - "▁terrified", - -13.525960922241211 - ], - [ - "aggravate", - -13.525962829589844 - ], - [ - "▁Malawi", - -13.525969505310059 - ], - [ - "▁neuroscience", - -13.526009559631348 - ], - [ - "trecută", - -13.526097297668457 - ], - [ - "▁maestr", - -13.52634334564209 - ], - [ - "нов", - -13.526555061340332 - ], - [ - "▁Cobb", - -13.52667236328125 - ], - [ - "▁Schwangerschaft", - -13.526727676391602 - ], - [ - "▁internationaux", - -13.526727676391602 - ], - [ - "▁entspannen", - -13.526729583740234 - ], - [ - "▁Früchte", - -13.52676773071289 - ], - [ - "mâine", - -13.526805877685547 - ], - [ - "stützt", - -13.526938438415527 - ], - [ - "flipped", - -13.527076721191406 - ], - [ - "Palatul", - -13.527252197265625 - ], - [ - "▁Gérard", - -13.527496337890625 - ], - [ - "▁Kensington", - -13.527498245239258 - ], - [ - "chargée", - -13.52807331085205 - ], - [ - "iolo", - -13.528203964233398 - ], - [ - "▁excesiv", - -13.52904987335205 - ], - [ - "▁Gymnas", - -13.52962875366211 - ], - [ - "▁optimise", - -13.529678344726562 - ], - [ - "possibilités", - -13.529717445373535 - ], - [ - "▁periculoas", - -13.529810905456543 - ], - [ - "mechanical", - -13.529839515686035 - ], - [ - "▁confruntă", - -13.529868125915527 - ], - [ - "quatrième", - -13.530573844909668 - ], - [ - "▁Preservation", - -13.530573844909668 - ], - [ - "▁Juventus", - -13.530574798583984 - ], - [ - "vorsitzende", - -13.5305757522583 - ], - [ - "électora", - -13.530586242675781 - ], - [ - "▁fascinant", - -13.53061580657959 - ], - [ - "▁lagoon", - -13.530671119689941 - ], - [ - "referencing", - -13.53079605102539 - ], - [ - "appointed", - -13.530988693237305 - ], - [ - "Audible", - -13.531112670898438 - ], - [ - "sighted", - -13.531612396240234 - ], - [ - "▁gewünscht", - -13.532061576843262 - ], - [ - "▁Expedition", - -13.532115936279297 - ], - [ - "▁genunchi", - -13.532115936279297 - ], - [ - "▁PROVIDE", - -13.53211784362793 - ], - [ - "▁rosemary", - -13.532118797302246 - ], - [ - "▁cleanliness", - -13.532130241394043 - ], - [ - "commanded", - -13.53223991394043 - ], - [ - "ältere", - -13.532530784606934 - ], - [ - "ност", - -13.532547950744629 - ], - [ - "kühlen", - -13.532917976379395 - ], - [ - "mettez", - -13.533548355102539 - ], - [ - "connaitre", - -13.533661842346191 - ], - [ - "Qaeda", - -13.533662796020508 - ], - [ - "▁traumhaft", - -13.53366470336914 - ], - [ - "kommst", - -13.533666610717773 - ], - [ - "▁Abbott", - -13.533669471740723 - ], - [ - "▁Fool", - -13.533686637878418 - ], - [ - "▁médaill", - -13.533687591552734 - ], - [ - "▁genotyp", - -13.533693313598633 - ], - [ - "▁Fälle", - -13.53375244140625 - ], - [ - "▁actuator", - -13.533843994140625 - ], - [ - "CLASS", - -13.534042358398438 - ], - [ - "progressively", - -13.534421920776367 - ], - [ - "negative", - -13.53469467163086 - ], - [ - "bundled", - -13.535009384155273 - ], - [ - "▁dezbatere", - -13.535208702087402 - ], - [ - "kamagra", - -13.535237312316895 - ], - [ - "gardinen", - -13.535250663757324 - ], - [ - "unsecured", - -13.535271644592285 - ], - [ - "Assisted", - -13.535298347473145 - ], - [ - "Gymnasium", - -13.535386085510254 - ], - [ - "▁brusc", - -13.535591125488281 - ], - [ - "prinzip", - -13.535655975341797 - ], - [ - "Torrent", - -13.535964965820312 - ], - [ - "Presented", - -13.535967826843262 - ], - [ - "▁impressionnant", - -13.53628921508789 - ], - [ - "charakter", - -13.536758422851562 - ], - [ - "▁Acoustic", - -13.536762237548828 - ], - [ - "▁appartient", - -13.536763191223145 - ], - [ - "gesteuert", - -13.536879539489746 - ], - [ - "▁condiți", - -13.537089347839355 - ], - [ - "authentic", - -13.537313461303711 - ], - [ - "▁Erholung", - -13.537534713745117 - ], - [ - "▁Veranstalter", - -13.537534713745117 - ], - [ - "▁Filial", - -13.537665367126465 - ], - [ - "ruhigen", - -13.537714958190918 - ], - [ - "symptôme", - -13.538311004638672 - ], - [ - "▁Efficiency", - -13.538311004638672 - ], - [ - "▁stunned", - -13.538311004638672 - ], - [ - "▁sympathique", - -13.538311004638672 - ], - [ - "Uploaded", - -13.538352966308594 - ], - [ - "▁geistig", - -13.538453102111816 - ], - [ - "Pläne", - -13.538509368896484 - ], - [ - "▁Apartament", - -13.53855037689209 - ], - [ - "▁ușoar", - -13.539119720458984 - ], - [ - "▁locuinț", - -13.539122581481934 - ], - [ - "épouse", - -13.539166450500488 - ], - [ - "îngrijire", - -13.539215087890625 - ], - [ - "Obtain", - -13.539261817932129 - ], - [ - "Detect", - -13.539590835571289 - ], - [ - "▁Dumitru", - -13.539865493774414 - ], - [ - "▁refrigeration", - -13.539865493774414 - ], - [ - "ärztliche", - -13.539881706237793 - ], - [ - "efficiency", - -13.540032386779785 - ], - [ - "▁snail", - -13.540328979492188 - ], - [ - "gelände", - -13.540419578552246 - ], - [ - "expected", - -13.540620803833008 - ], - [ - "kompetenz", - -13.540643692016602 - ], - [ - "▁sfânt", - -13.540643692016602 - ], - [ - "océan", - -13.540685653686523 - ], - [ - "▁Plasma", - -13.540717124938965 - ], - [ - "▁vulgar", - -13.54075813293457 - ], - [ - "▁slump", - -13.541083335876465 - ], - [ - "autoimmune", - -13.541422843933105 - ], - [ - "▁Cynthia", - -13.541422843933105 - ], - [ - "▁dimineaţ", - -13.541422843933105 - ], - [ - "▁whimsical", - -13.541422843933105 - ], - [ - "▁evaporate", - -13.541488647460938 - ], - [ - "▁calorii", - -13.54186725616455 - ], - [ - "portion", - -13.54187297821045 - ], - [ - "crowned", - -13.5419282913208 - ], - [ - "▁întâmpin", - -13.54220199584961 - ], - [ - "▁Centenar", - -13.542620658874512 - ], - [ - "▁Genehmigung", - -13.54298210144043 - ], - [ - "▁Wahrscheinlich", - -13.54298210144043 - ], - [ - "▁accompaniment", - -13.54298210144043 - ], - [ - "▁Negoti", - -13.542984962463379 - ], - [ - "▁Vanilla", - -13.543000221252441 - ], - [ - "▁Receiv", - -13.543014526367188 - ], - [ - "▁bestseller", - -13.543052673339844 - ], - [ - "tendons", - -13.543069839477539 - ], - [ - "Reilly", - -13.543192863464355 - ], - [ - "▁refroidi", - -13.543731689453125 - ], - [ - "▁überrascht", - -13.543763160705566 - ], - [ - "Gitarre", - -13.543828964233398 - ], - [ - "wände", - -13.544173240661621 - ], - [ - "veniturile", - -13.544321060180664 - ], - [ - "▁portofoliu", - -13.54454517364502 - ], - [ - "▁temporaire", - -13.54454517364502 - ], - [ - "▁Dawson", - -13.544546127319336 - ], - [ - "foreseeable", - -13.544547080993652 - ], - [ - "▁Gastgeber", - -13.545344352722168 - ], - [ - "Access", - -13.545432090759277 - ], - [ - "▁Defender", - -13.545537948608398 - ], - [ - "▁Quarry", - -13.546109199523926 - ], - [ - "▁trolley", - -13.546110153198242 - ], - [ - "▁carburant", - -13.546111106872559 - ], - [ - "▁titluri", - -13.54631233215332 - ], - [ - "comparatively", - -13.546327590942383 - ], - [ - "nachfolgend", - -13.54659652709961 - ], - [ - "anfang", - -13.546740531921387 - ], - [ - "▁faszinieren", - -13.546891212463379 - ], - [ - "trăiesc", - -13.547082901000977 - ], - [ - "▁Travail", - -13.547159194946289 - ], - [ - "Contact", - -13.547235488891602 - ], - [ - "fashion", - -13.547245025634766 - ], - [ - "▁épais", - -13.547585487365723 - ], - [ - "plattform", - -13.547676086425781 - ], - [ - "ventricular", - -13.547677040100098 - ], - [ - "▁Portsmouth", - -13.547677993774414 - ], - [ - "▁împărat", - -13.54767894744873 - ], - [ - "▁vândut", - -13.547698020935059 - ], - [ - "▁evidenț", - -13.547708511352539 - ], - [ - "Purchasing", - -13.547877311706543 - ], - [ - "discerning", - -13.54804801940918 - ], - [ - "odonti", - -13.548080444335938 - ], - [ - "distilled", - -13.548316955566406 - ], - [ - "saveur", - -13.548447608947754 - ], - [ - "▁récompense", - -13.54845905303955 - ], - [ - "confortul", - -13.548552513122559 - ], - [ - "arbeitete", - -13.548787117004395 - ], - [ - "partenerii", - -13.549064636230469 - ], - [ - "mirrored", - -13.54908561706543 - ], - [ - "Dienstleister", - -13.549243927001953 - ], - [ - "▁Jakarta", - -13.549243927001953 - ], - [ - "▁WEBSITE", - -13.549243927001953 - ], - [ - "▁Acquisition", - -13.549262046813965 - ], - [ - "▁Miranda", - -13.549287796020508 - ], - [ - "Syndic", - -13.549356460571289 - ], - [ - "▁stadiu", - -13.549450874328613 - ], - [ - "▁Parchet", - -13.549498558044434 - ], - [ - "Générale", - -13.54954719543457 - ], - [ - "▁jpl", - -13.549579620361328 - ], - [ - "attainable", - -13.549949645996094 - ], - [ - "École", - -13.550041198730469 - ], - [ - "Sphere", - -13.550538063049316 - ], - [ - "obtainable", - -13.550592422485352 - ], - [ - "▁Sapphire", - -13.55081558227539 - ], - [ - "▁aérienne", - -13.55081558227539 - ], - [ - "▁bărbați", - -13.55081558227539 - ], - [ - "▁irritating", - -13.55081558227539 - ], - [ - "▁ultraviolet", - -13.550816535949707 - ], - [ - "untouched", - -13.550817489624023 - ], - [ - "▁Ramsey", - -13.550819396972656 - ], - [ - "titres", - -13.551087379455566 - ], - [ - "▁Coordinat", - -13.551218032836914 - ], - [ - "believable", - -13.551358222961426 - ], - [ - "▁Grundsätzlich", - -13.551602363586426 - ], - [ - "▁konsequent", - -13.551602363586426 - ], - [ - "▁Cerceta", - -13.551909446716309 - ], - [ - "dirigé", - -13.552116394042969 - ], - [ - "▁disturb", - -13.552151679992676 - ], - [ - "conciliation", - -13.552210807800293 - ], - [ - "▁gelöscht", - -13.552390098571777 - ], - [ - "▁sauvegarde", - -13.552391052246094 - ], - [ - "▁cavities", - -13.552393913269043 - ], - [ - "stunde", - -13.55241584777832 - ], - [ - "▁foloseasc", - -13.552430152893066 - ], - [ - "▁simpati", - -13.552873611450195 - ], - [ - "Chacun", - -13.553032875061035 - ], - [ - "adversaire", - -13.553178787231445 - ], - [ - "Eigentlich", - -13.55319881439209 - ], - [ - "defense", - -13.553593635559082 - ], - [ - "consider", - -13.553672790527344 - ], - [ - "▁Trinidad", - -13.553966522216797 - ], - [ - "▁strategist", - -13.553966522216797 - ], - [ - "distorted", - -13.553967475891113 - ], - [ - "▁hypothetical", - -13.553967475891113 - ], - [ - "▁ramburs", - -13.55396842956543 - ], - [ - "▁Mallorca", - -13.553970336914062 - ], - [ - "▁Domino", - -13.554018020629883 - ], - [ - "arrondissement", - -13.554756164550781 - ], - [ - "konferenz", - -13.554756164550781 - ], - [ - "▁Beleuchtung", - -13.554756164550781 - ], - [ - "aggregat", - -13.55484676361084 - ], - [ - "subsidize", - -13.554896354675293 - ], - [ - "shri", - -13.555503845214844 - ], - [ - "Kaufentscheidung", - -13.555545806884766 - ], - [ - "▁Hernandez", - -13.555545806884766 - ], - [ - "▁Upholster", - -13.555546760559082 - ], - [ - "atlantic", - -13.555614471435547 - ], - [ - "▁locuinte", - -13.555652618408203 - ], - [ - "integrates", - -13.55583381652832 - ], - [ - "ewusst", - -13.555878639221191 - ], - [ - "▁Avocado", - -13.556337356567383 - ], - [ - "Decorative", - -13.557014465332031 - ], - [ - "▁Corinthians", - -13.557127952575684 - ], - [ - "▁clădire", - -13.557127952575684 - ], - [ - "▁plomberie", - -13.557127952575684 - ], - [ - "vases", - -13.557143211364746 - ], - [ - "▁crippl", - -13.557247161865234 - ], - [ - "cluttered", - -13.557487487792969 - ], - [ - "departed", - -13.557807922363281 - ], - [ - "▁entscheidet", - -13.5579195022583 - ], - [ - "Certaine", - -13.558243751525879 - ], - [ - "honda", - -13.558294296264648 - ], - [ - "triggering", - -13.558527946472168 - ], - [ - "▁Erdogan", - -13.558712005615234 - ], - [ - "▁Widerstand", - -13.558712005615234 - ], - [ - "▁Bhutan", - -13.558713912963867 - ], - [ - "▁ascunde", - -13.558736801147461 - ], - [ - "▁shading", - -13.558748245239258 - ], - [ - "behavioural", - -13.559172630310059 - ], - [ - "▁transfér", - -13.55960750579834 - ], - [ - "versichert", - -13.559623718261719 - ], - [ - "▁vinovat", - -13.559646606445312 - ], - [ - "▁airfare", - -13.560142517089844 - ], - [ - "▁simplistic", - -13.56030559539795 - ], - [ - "▁Asigura", - -13.560320854187012 - ], - [ - "Chauffe", - -13.560480117797852 - ], - [ - "scrisă", - -13.560585975646973 - ], - [ - "trouvez", - -13.560702323913574 - ], - [ - "greasy", - -13.560709953308105 - ], - [ - "bottled", - -13.560809135437012 - ], - [ - "grouped", - -13.560934066772461 - ], - [ - "▁beeinflussen", - -13.561092376708984 - ], - [ - "▁chronological", - -13.561114311218262 - ], - [ - "(2000)", - -13.56127643585205 - ], - [ - "sheltered", - -13.561298370361328 - ], - [ - "Historically", - -13.561931610107422 - ], - [ - "piled", - -13.562012672424316 - ], - [ - "publicate", - -13.562378883361816 - ], - [ - "▁étudié", - -13.56268310546875 - ], - [ - "▁vertraut", - -13.562688827514648 - ], - [ - "▁Anpassung", - -13.562697410583496 - ], - [ - "cifra", - -13.562705993652344 - ], - [ - "▁recueil", - -13.562762260437012 - ], - [ - "enforceable", - -13.563183784484863 - ], - [ - "Distinguished", - -13.56347942352295 - ], - [ - "Empfänger", - -13.56347942352295 - ], - [ - "▁Acrylic", - -13.56347942352295 - ], - [ - "▁Encyclopedia", - -13.56347942352295 - ], - [ - "▁proaspete", - -13.56347942352295 - ], - [ - "▁unrealistic", - -13.56347942352295 - ], - [ - "▁Assignment", - -13.563481330871582 - ], - [ - "▁incubator", - -13.563491821289062 - ], - [ - "▁unilateral", - -13.563501358032227 - ], - [ - "elasticity", - -13.564398765563965 - ], - [ - "amintim", - -13.564475059509277 - ], - [ - "fournit", - -13.564553260803223 - ], - [ - "semblent", - -13.564763069152832 - ], - [ - "▁$69.", - -13.56496524810791 - ], - [ - "▁prominence", - -13.56507396697998 - ], - [ - "Übertragung", - -13.565075874328613 - ], - [ - "▁2014-11-", - -13.565075874328613 - ], - [ - "▁Giurgiu", - -13.565104484558105 - ], - [ - "étendue", - -13.565123558044434 - ], - [ - "ceputul", - -13.565187454223633 - ], - [ - "Schwierigkeiten", - -13.565872192382812 - ], - [ - "▁subtract", - -13.565881729125977 - ], - [ - "▁gesichert", - -13.56589126586914 - ], - [ - "▁uimit", - -13.565925598144531 - ], - [ - "▁mensuel", - -13.565967559814453 - ], - [ - "Vorgaben", - -13.566215515136719 - ], - [ - "▁legitimacy", - -13.566670417785645 - ], - [ - "▁Kendall", - -13.566673278808594 - ], - [ - "▁détach", - -13.566790580749512 - ], - [ - "▁kennenlernen", - -13.567469596862793 - ], - [ - "▁gewöhnlich", - -13.56747055053711 - ], - [ - "Octav", - -13.567917823791504 - ], - [ - "responsive", - -13.568169593811035 - ], - [ - "▁Mängel", - -13.568269729614258 - ], - [ - "▁mișcare", - -13.568269729614258 - ], - [ - "▁ludique", - -13.568270683288574 - ], - [ - "▁Exeter", - -13.568324089050293 - ], - [ - "▁respins", - -13.569114685058594 - ], - [ - "oraşului", - -13.569173812866211 - ], - [ - "▁sfârşit", - -13.56949520111084 - ], - [ - "BUSINESS", - -13.56987190246582 - ], - [ - "illustrating", - -13.56987190246582 - ], - [ - "▁Tottenham", - -13.56987190246582 - ], - [ - "▁pruning", - -13.569886207580566 - ], - [ - "▁Înainte", - -13.569904327392578 - ], - [ - "▁interesel", - -13.570096969604492 - ], - [ - "discovered", - -13.57031536102295 - ], - [ - "(0)", - -13.570572853088379 - ], - [ - "▁Bewerber", - -13.570673942565918 - ], - [ - "▁DESIGN", - -13.570673942565918 - ], - [ - "▁Orientierung", - -13.570686340332031 - ], - [ - "library", - -13.571041107177734 - ], - [ - "cheltuielile", - -13.571419715881348 - ], - [ - "▁Canterbury", - -13.571475982666016 - ], - [ - "▁intellectuelle", - -13.571477890014648 - ], - [ - "▁amalgam", - -13.571497917175293 - ], - [ - "▁Toledo", - -13.57150650024414 - ], - [ - "gezahlt", - -13.571531295776367 - ], - [ - "Veronica", - -13.571659088134766 - ], - [ - "deleting", - -13.571946144104004 - ], - [ - "▁Merlin", - -13.572442054748535 - ], - [ - "▁opérationnel", - -13.572554588317871 - ], - [ - "schmutz", - -13.572568893432617 - ], - [ - "hyroid", - -13.57279109954834 - ], - [ - "▁Compatible", - -13.57308292388916 - ], - [ - "▁Leopard", - -13.57308292388916 - ], - [ - "▁cylindrical", - -13.57308292388916 - ], - [ - "▁terrestrial", - -13.57308292388916 - ], - [ - "conferencing", - -13.573088645935059 - ], - [ - "▁Variety", - -13.573097229003906 - ], - [ - "▁Screw", - -13.573164939880371 - ], - [ - "character", - -13.573637962341309 - ], - [ - "shortened", - -13.573643684387207 - ], - [ - "▁întrerup", - -13.573736190795898 - ], - [ - "freude", - -13.573884010314941 - ], - [ - "▁dezbateri", - -13.573887825012207 - ], - [ - "viteză", - -13.574563026428223 - ], - [ - "formațiile", - -13.574600219726562 - ], - [ - "▁responsibly", - -13.574692726135254 - ], - [ - "Dimensiuni", - -13.574695587158203 - ], - [ - "Arrangement", - -13.57469654083252 - ], - [ - "▁Leisure", - -13.574712753295898 - ], - [ - "escaping", - -13.5750732421875 - ], - [ - "flexion", - -13.575104713439941 - ], - [ - "▁religieuse", - -13.575308799743652 - ], - [ - "crystalline", - -13.575457572937012 - ], - [ - "▁clasp", - -13.575520515441895 - ], - [ - "festigt", - -13.57554817199707 - ], - [ - "▁trouvai", - -13.57596206665039 - ], - [ - "cutaneous", - -13.576305389404297 - ], - [ - "▁carcinoma", - -13.576305389404297 - ], - [ - "▁juxtapos", - -13.576305389404297 - ], - [ - "assemblage", - -13.576306343078613 - ], - [ - "▁Messiah", - -13.576306343078613 - ], - [ - "▁Sleeve", - -13.576306343078613 - ], - [ - "▁șofer", - -13.576386451721191 - ], - [ - "/05/", - -13.57666301727295 - ], - [ - "▁expoziți", - -13.576703071594238 - ], - [ - "▁pătrun", - -13.577343940734863 - ], - [ - "▁Lydia", - -13.57739543914795 - ], - [ - "▁grădini", - -13.577919006347656 - ], - [ - "▁toothpaste", - -13.577919960021973 - ], - [ - "ordained", - -13.577921867370605 - ], - [ - "▁Renovation", - -13.577922821044922 - ], - [ - "voicing", - -13.578327178955078 - ], - [ - "président", - -13.578595161437988 - ], - [ - "▁gestartet", - -13.578728675842285 - ], - [ - "Multi", - -13.579121589660645 - ], - [ - "itinéraire", - -13.579537391662598 - ], - [ - "▁influenza", - -13.579537391662598 - ], - [ - "▁psychiatrist", - -13.579537391662598 - ], - [ - "▁schizophrenia", - -13.579537391662598 - ], - [ - "▁Magnolia", - -13.57953929901123 - ], - [ - "▁Scottsdale", - -13.579541206359863 - ], - [ - "▁interessieren", - -13.579548835754395 - ], - [ - "▁asfalt", - -13.579643249511719 - ], - [ - "▁Journalism", - -13.57977294921875 - ], - [ - "Multe", - -13.580089569091797 - ], - [ - "Westfalen", - -13.580347061157227 - ], - [ - "▁Vorschriften", - -13.580348014831543 - ], - [ - "Angleterre", - -13.58034896850586 - ], - [ - "sustainable", - -13.580354690551758 - ], - [ - "▁Retour", - -13.580589294433594 - ], - [ - "▁pâr", - -13.5809965133667 - ], - [ - "steigert", - -13.581120491027832 - ], - [ - "▁AMAZING", - -13.581157684326172 - ], - [ - "▁turbulent", - -13.581157684326172 - ], - [ - "costing", - -13.58155345916748 - ], - [ - "▁Carolyn", - -13.581634521484375 - ], - [ - "utti", - -13.581802368164062 - ], - [ - "dürftig", - -13.581968307495117 - ], - [ - "Keep", - -13.582038879394531 - ], - [ - "▁Théâtre", - -13.582780838012695 - ], - [ - "▁combustibil", - -13.582780838012695 - ], - [ - "▁halloween", - -13.582780838012695 - ], - [ - "▁emulator", - -13.582785606384277 - ], - [ - "▁povești", - -13.582785606384277 - ], - [ - "broyeur", - -13.582810401916504 - ], - [ - "▁émerg", - -13.582927703857422 - ], - [ - "overwhelmingly", - -13.583025932312012 - ], - [ - "regulă", - -13.583124160766602 - ], - [ - "goutte", - -13.583125114440918 - ], - [ - "▁Fertigung", - -13.583593368530273 - ], - [ - "constituted", - -13.584304809570312 - ], - [ - "▁QuickBooks", - -13.584406852722168 - ], - [ - "▁genealogy", - -13.584407806396484 - ], - [ - "▁laundering", - -13.584432601928711 - ], - [ - "▁échéan", - -13.584491729736328 - ], - [ - "Account", - -13.584601402282715 - ], - [ - "oyons", - -13.584792137145996 - ], - [ - "nitro", - -13.584905624389648 - ], - [ - "▁corespund", - -13.585219383239746 - ], - [ - "▁suggér", - -13.58527660369873 - ], - [ - "manipulated", - -13.585348129272461 - ], - [ - "deseori", - -13.585817337036133 - ], - [ - "permeabil", - -13.585912704467773 - ], - [ - "Australia", - -13.58594799041748 - ], - [ - "▁Erasmus", - -13.586034774780273 - ], - [ - "▁disrespect", - -13.586034774780273 - ], - [ - "▁trimestre", - -13.586038589477539 - ], - [ - "▁emanat", - -13.586103439331055 - ], - [ - "Schraub", - -13.58624267578125 - ], - [ - "distinctly", - -13.586319923400879 - ], - [ - "Germain", - -13.586637496948242 - ], - [ - "▁pedepse", - -13.5868501663208 - ], - [ - "réglage", - -13.5868558883667 - ], - [ - "făcute", - -13.587308883666992 - ], - [ - "▁garanteaz", - -13.587434768676758 - ], - [ - "▁unterlieg", - -13.587701797485352 - ], - [ - "▁cheddar", - -13.587712287902832 - ], - [ - "▁refugi", - -13.587756156921387 - ], - [ - "▁inférieur", - -13.587836265563965 - ], - [ - "dimension", - -13.588440895080566 - ], - [ - "▁erkennt", - -13.588570594787598 - ], - [ - "amitié", - -13.588632583618164 - ], - [ - "▁predominant", - -13.588680267333984 - ], - [ - "nourishe", - -13.588800430297852 - ], - [ - "exerce", - -13.588907241821289 - ], - [ - "▁disguise", - -13.589225769042969 - ], - [ - "▁traditi", - -13.589289665222168 - ], - [ - "▁Intellectual", - -13.5892972946167 - ], - [ - "▁imunitar", - -13.589299201965332 - ], - [ - "▁Cushion", - -13.589300155639648 - ], - [ - "▁erwachsene", - -13.589517593383789 - ], - [ - "▁Internațional", - -13.590115547180176 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ], - [ - "", - 0.0 - ] - ], - "byte_fallback": false - } -} \ No newline at end of file diff --git a/diffsynth/tokenizer_configs/flux/tokenizer_2/tokenizer_config.json b/diffsynth/tokenizer_configs/flux/tokenizer_2/tokenizer_config.json deleted file mode 100644 index b336fa2..0000000 --- a/diffsynth/tokenizer_configs/flux/tokenizer_2/tokenizer_config.json +++ /dev/null @@ -1,940 +0,0 @@ -{ - "add_prefix_space": true, - "added_tokens_decoder": { - "0": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "1": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "2": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32000": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32001": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32002": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32003": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32004": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32005": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32006": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32007": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32008": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32009": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32010": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32011": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32012": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32013": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32014": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32015": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32016": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32017": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32018": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32019": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32020": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32021": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32022": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32023": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32024": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32025": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32026": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32027": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32028": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32029": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32030": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32031": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32032": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32033": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32034": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32035": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32036": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32037": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32038": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32039": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32040": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32041": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32042": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32043": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32044": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32045": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32046": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32047": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32048": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32049": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32050": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32051": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32052": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32053": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32054": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32055": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32056": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32057": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32058": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32059": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32060": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32061": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32062": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32063": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32064": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32065": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32066": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32067": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32068": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32069": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32070": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32071": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32072": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32073": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32074": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32075": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32076": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32077": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32078": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32079": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32080": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32081": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32082": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32083": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32084": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32085": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32086": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32087": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32088": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32089": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32090": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32091": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32092": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32093": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32094": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32095": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32096": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32097": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32098": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32099": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - } - }, - "additional_special_tokens": [ - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "" - ], - "clean_up_tokenization_spaces": true, - "eos_token": "", - "extra_ids": 100, - "legacy": true, - "model_max_length": 512, - "pad_token": "", - "sp_model_kwargs": {}, - "tokenizer_class": "T5Tokenizer", - "unk_token": "" -} diff --git a/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer/special_tokens_map.json b/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer/special_tokens_map.json deleted file mode 100644 index a8b3208..0000000 --- a/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer/special_tokens_map.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "cls_token": "[CLS]", - "mask_token": "[MASK]", - "pad_token": "[PAD]", - "sep_token": "[SEP]", - "unk_token": "[UNK]" -} diff --git a/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer/tokenizer_config.json b/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer/tokenizer_config.json deleted file mode 100644 index a143560..0000000 --- a/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer/tokenizer_config.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "cls_token": "[CLS]", - "do_basic_tokenize": true, - "do_lower_case": true, - "mask_token": "[MASK]", - "name_or_path": "hfl/chinese-roberta-wwm-ext", - "never_split": null, - "pad_token": "[PAD]", - "sep_token": "[SEP]", - "special_tokens_map_file": "/home/chenweifeng/.cache/huggingface/hub/models--hfl--chinese-roberta-wwm-ext/snapshots/5c58d0b8ec1d9014354d691c538661bf00bfdb44/special_tokens_map.json", - "strip_accents": null, - "tokenize_chinese_chars": true, - "tokenizer_class": "BertTokenizer", - "unk_token": "[UNK]", - "model_max_length": 77 -} diff --git a/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer/vocab.txt b/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer/vocab.txt deleted file mode 100644 index 6246906..0000000 --- a/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer/vocab.txt +++ /dev/null @@ -1,47020 +0,0 @@ -[PAD] -[unused1] -[unused2] -[unused3] -[unused4] -[unused5] -[unused6] -[unused7] -[unused8] -[unused9] -[unused10] -[unused11] -[unused12] -[unused13] -[unused14] -[unused15] -[unused16] -[unused17] -[unused18] -[unused19] -[unused20] -[unused21] -[unused22] -[unused23] -[unused24] -[unused25] -[unused26] -[unused27] -[unused28] -[unused29] -[unused30] -[unused31] -[unused32] -[unused33] -[unused34] -[unused35] -[unused36] -[unused37] -[unused38] -[unused39] -[unused40] -[unused41] -[unused42] -[unused43] -[unused44] -[unused45] -[unused46] -[unused47] -[unused48] -[unused49] -[unused50] -[unused51] -[unused52] -[unused53] -[unused54] -[unused55] -[unused56] -[unused57] -[unused58] -[unused59] -[unused60] -[unused61] -[unused62] -[unused63] -[unused64] -[unused65] -[unused66] -[unused67] -[unused68] -[unused69] -[unused70] -[unused71] -[unused72] -[unused73] -[unused74] -[unused75] -[unused76] -[unused77] -[unused78] -[unused79] -[unused80] -[unused81] -[unused82] -[unused83] -[unused84] -[unused85] -[unused86] -[unused87] -[unused88] -[unused89] -[unused90] -[unused91] -[unused92] -[unused93] -[unused94] -[unused95] -[unused96] -[unused97] -[unused98] -[unused99] -[UNK] -[CLS] -[SEP] -[MASK] - - -! -" -# -$ -% -& -' -( -) -* -+ -, -- -. -/ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -: -; -< -= -> -? -@ -[ -\ -] -^ -_ -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z -{ -| -} -~ -£ -¤ -¥ -§ -© -« -® -° -± -² -³ -µ -· -¹ -º -» -¼ -× -ß -æ -÷ -ø -đ -ŋ -ɔ -ə -ɡ -ʰ -ˇ -ˈ -ˊ -ˋ -ˍ -ː -˙ -˚ -ˢ -α -β -γ -δ -ε -η -θ -ι -κ -λ -μ -ν -ο -π -ρ -ς -σ -τ -υ -φ -χ -ψ -ω -а -б -в -г -д -е -ж -з -и -к -л -м -н -о -п -р -с -т -у -ф -х -ц -ч -ш -ы -ь -я -і -ا -ب -ة -ت -د -ر -س -ع -ل -م -ن -ه -و -ي -۩ -ก -ง -น -ม -ย -ร -อ -า -เ -๑ -་ -ღ -ᄀ -ᄁ -ᄂ -ᄃ -ᄅ -ᄆ -ᄇ -ᄈ -ᄉ -ᄋ -ᄌ -ᄎ -ᄏ -ᄐ -ᄑ -ᄒ -ᅡ -ᅢ -ᅣ -ᅥ -ᅦ -ᅧ -ᅨ -ᅩ -ᅪ -ᅬ -ᅭ -ᅮ -ᅯ -ᅲ -ᅳ -ᅴ -ᅵ -ᆨ -ᆫ -ᆯ -ᆷ -ᆸ -ᆺ -ᆻ -ᆼ -ᗜ -ᵃ -ᵉ -ᵍ -ᵏ -ᵐ -ᵒ -ᵘ -‖ -„ -† -• -‥ -‧ -
 -‰ -′ -″ -‹ -› -※ -‿ -⁄ -ⁱ -⁺ -ⁿ -₁ -₂ -₃ -₄ -€ -℃ -№ -™ -ⅰ -ⅱ -ⅲ -ⅳ -ⅴ -← -↑ -→ -↓ -↔ -↗ -↘ -⇒ -∀ -− -∕ -∙ -√ -∞ -∟ -∠ -∣ -∥ -∩ -∮ -∶ -∼ -∽ -≈ -≒ -≡ -≤ -≥ -≦ -≧ -≪ -≫ -⊙ -⋅ -⋈ -⋯ -⌒ -① -② -③ -④ -⑤ -⑥ -⑦ -⑧ -⑨ -⑩ -⑴ -⑵ -⑶ -⑷ -⑸ -⒈ -⒉ -⒊ -⒋ -ⓒ -ⓔ -ⓘ -─ -━ -│ -┃ -┅ -┆ -┊ -┌ -└ -├ -┣ -═ -║ -╚ -╞ -╠ -╭ -╮ -╯ -╰ -╱ -╳ -▂ -▃ -▅ -▇ -█ -▉ -▋ -▌ -▍ -▎ -■ -□ -▪ -▫ -▬ -▲ -△ -▶ -► -▼ -▽ -◆ -◇ -○ -◎ -● -◕ -◠ -◢ -◤ -☀ -★ -☆ -☕ -☞ -☺ -☼ -♀ -♂ -♠ -♡ -♣ -♥ -♦ -♪ -♫ -♬ -✈ -✔ -✕ -✖ -✦ -✨ -✪ -✰ -✿ -❀ -❤ -➜ -➤ -⦿ -、 -。 -〃 -々 -〇 -〈 -〉 -《 -》 -「 -」 -『 -』 -【 -】 -〓 -〔 -〕 -〖 -〗 -〜 -〝 -〞 -ぁ -あ -ぃ -い -う -ぇ -え -お -か -き -く -け -こ -さ -し -す -せ -そ -た -ち -っ -つ -て -と -な -に -ぬ -ね -の -は -ひ -ふ -へ -ほ -ま -み -む -め -も -ゃ -や -ゅ -ゆ -ょ -よ -ら -り -る -れ -ろ -わ -を -ん -゜ -ゝ -ァ -ア -ィ -イ -ゥ -ウ -ェ -エ -ォ -オ -カ -キ -ク -ケ -コ -サ -シ -ス -セ -ソ -タ -チ -ッ -ツ -テ -ト -ナ -ニ -ヌ -ネ -ノ -ハ -ヒ -フ -ヘ -ホ -マ -ミ -ム -メ -モ -ャ -ヤ -ュ -ユ -ョ -ヨ -ラ -リ -ル -レ -ロ -ワ -ヲ -ン -ヶ -・ -ー -ヽ -ㄅ -ㄆ -ㄇ -ㄉ -ㄋ -ㄌ -ㄍ -ㄎ -ㄏ -ㄒ -ㄚ -ㄛ -ㄞ -ㄟ -ㄢ -ㄤ -ㄥ -ㄧ -ㄨ -ㆍ -㈦ -㊣ -㎡ -㗎 -一 -丁 -七 -万 -丈 -三 -上 -下 -不 -与 -丐 -丑 -专 -且 -丕 -世 -丘 -丙 -业 -丛 -东 -丝 -丞 -丟 -両 -丢 -两 -严 -並 -丧 -丨 -个 -丫 -中 -丰 -串 -临 -丶 -丸 -丹 -为 -主 -丼 -丽 -举 -丿 -乂 -乃 -久 -么 -义 -之 -乌 -乍 -乎 -乏 -乐 -乒 -乓 -乔 -乖 -乗 -乘 -乙 -乜 -九 -乞 -也 -习 -乡 -书 -乩 -买 -乱 -乳 -乾 -亀 -亂 -了 -予 -争 -事 -二 -于 -亏 -云 -互 -五 -井 -亘 -亙 -亚 -些 -亜 -亞 -亟 -亡 -亢 -交 -亥 -亦 -产 -亨 -亩 -享 -京 -亭 -亮 -亲 -亳 -亵 -人 -亿 -什 -仁 -仃 -仄 -仅 -仆 -仇 -今 -介 -仍 -从 -仏 -仑 -仓 -仔 -仕 -他 -仗 -付 -仙 -仝 -仞 -仟 -代 -令 -以 -仨 -仪 -们 -仮 -仰 -仲 -件 -价 -任 -份 -仿 -企 -伉 -伊 -伍 -伎 -伏 -伐 -休 -伕 -众 -优 -伙 -会 -伝 -伞 -伟 -传 -伢 -伤 -伦 -伪 -伫 -伯 -估 -伴 -伶 -伸 -伺 -似 -伽 -佃 -但 -佇 -佈 -位 -低 -住 -佐 -佑 -体 -佔 -何 -佗 -佘 -余 -佚 -佛 -作 -佝 -佞 -佟 -你 -佢 -佣 -佤 -佥 -佩 -佬 -佯 -佰 -佳 -併 -佶 -佻 -佼 -使 -侃 -侄 -來 -侈 -例 -侍 -侏 -侑 -侖 -侗 -供 -依 -侠 -価 -侣 -侥 -侦 -侧 -侨 -侬 -侮 -侯 -侵 -侶 -侷 -便 -係 -促 -俄 -俊 -俎 -俏 -俐 -俑 -俗 -俘 -俚 -保 -俞 -俟 -俠 -信 -俨 -俩 -俪 -俬 -俭 -修 -俯 -俱 -俳 -俸 -俺 -俾 -倆 -倉 -個 -倌 -倍 -倏 -們 -倒 -倔 -倖 -倘 -候 -倚 -倜 -借 -倡 -値 -倦 -倩 -倪 -倫 -倬 -倭 -倶 -债 -值 -倾 -偃 -假 -偈 -偉 -偌 -偎 -偏 -偕 -做 -停 -健 -側 -偵 -偶 -偷 -偻 -偽 -偿 -傀 -傅 -傍 -傑 -傘 -備 -傚 -傢 -傣 -傥 -储 -傩 -催 -傭 -傲 -傳 -債 -傷 -傻 -傾 -僅 -働 -像 -僑 -僕 -僖 -僚 -僥 -僧 -僭 -僮 -僱 -僵 -價 -僻 -儀 -儂 -億 -儆 -儉 -儋 -儒 -儕 -儘 -償 -儡 -優 -儲 -儷 -儼 -儿 -兀 -允 -元 -兄 -充 -兆 -兇 -先 -光 -克 -兌 -免 -児 -兑 -兒 -兔 -兖 -党 -兜 -兢 -入 -內 -全 -兩 -八 -公 -六 -兮 -兰 -共 -兲 -关 -兴 -兵 -其 -具 -典 -兹 -养 -兼 -兽 -冀 -内 -円 -冇 -冈 -冉 -冊 -册 -再 -冏 -冒 -冕 -冗 -写 -军 -农 -冠 -冢 -冤 -冥 -冨 -冪 -冬 -冯 -冰 -冲 -决 -况 -冶 -冷 -冻 -冼 -冽 -冾 -净 -凄 -准 -凇 -凈 -凉 -凋 -凌 -凍 -减 -凑 -凛 -凜 -凝 -几 -凡 -凤 -処 -凪 -凭 -凯 -凰 -凱 -凳 -凶 -凸 -凹 -出 -击 -函 -凿 -刀 -刁 -刃 -分 -切 -刈 -刊 -刍 -刎 -刑 -划 -列 -刘 -则 -刚 -创 -初 -删 -判 -別 -刨 -利 -刪 -别 -刮 -到 -制 -刷 -券 -刹 -刺 -刻 -刽 -剁 -剂 -剃 -則 -剉 -削 -剋 -剌 -前 -剎 -剐 -剑 -剔 -剖 -剛 -剜 -剝 -剣 -剤 -剥 -剧 -剩 -剪 -副 -割 -創 -剷 -剽 -剿 -劃 -劇 -劈 -劉 -劊 -劍 -劏 -劑 -力 -劝 -办 -功 -加 -务 -劣 -动 -助 -努 -劫 -劭 -励 -劲 -劳 -労 -劵 -効 -劾 -势 -勁 -勃 -勇 -勉 -勋 -勐 -勒 -動 -勖 -勘 -務 -勛 -勝 -勞 -募 -勢 -勤 -勧 -勳 -勵 -勸 -勺 -勻 -勾 -勿 -匀 -包 -匆 -匈 -匍 -匐 -匕 -化 -北 -匙 -匝 -匠 -匡 -匣 -匪 -匮 -匯 -匱 -匹 -区 -医 -匾 -匿 -區 -十 -千 -卅 -升 -午 -卉 -半 -卍 -华 -协 -卑 -卒 -卓 -協 -单 -卖 -南 -単 -博 -卜 -卞 -卟 -占 -卡 -卢 -卤 -卦 -卧 -卫 -卮 -卯 -印 -危 -即 -却 -卵 -卷 -卸 -卻 -卿 -厂 -厄 -厅 -历 -厉 -压 -厌 -厕 -厘 -厚 -厝 -原 -厢 -厥 -厦 -厨 -厩 -厭 -厮 -厲 -厳 -去 -县 -叁 -参 -參 -又 -叉 -及 -友 -双 -反 -収 -发 -叔 -取 -受 -变 -叙 -叛 -叟 -叠 -叡 -叢 -口 -古 -句 -另 -叨 -叩 -只 -叫 -召 -叭 -叮 -可 -台 -叱 -史 -右 -叵 -叶 -号 -司 -叹 -叻 -叼 -叽 -吁 -吃 -各 -吆 -合 -吉 -吊 -吋 -同 -名 -后 -吏 -吐 -向 -吒 -吓 -吕 -吖 -吗 -君 -吝 -吞 -吟 -吠 -吡 -否 -吧 -吨 -吩 -含 -听 -吭 -吮 -启 -吱 -吳 -吴 -吵 -吶 -吸 -吹 -吻 -吼 -吽 -吾 -呀 -呂 -呃 -呆 -呈 -告 -呋 -呎 -呐 -呓 -呕 -呗 -员 -呛 -呜 -呢 -呤 -呦 -周 -呱 -呲 -味 -呵 -呷 -呸 -呻 -呼 -命 -咀 -咁 -咂 -咄 -咆 -咋 -和 -咎 -咏 -咐 -咒 -咔 -咕 -咖 -咗 -咘 -咙 -咚 -咛 -咣 -咤 -咦 -咧 -咨 -咩 -咪 -咫 -咬 -咭 -咯 -咱 -咲 -咳 -咸 -咻 -咽 -咿 -哀 -品 -哂 -哄 -哆 -哇 -哈 -哉 -哋 -哌 -响 -哎 -哏 -哐 -哑 -哒 -哔 -哗 -哟 -員 -哥 -哦 -哧 -哨 -哩 -哪 -哭 -哮 -哲 -哺 -哼 -哽 -唁 -唄 -唆 -唇 -唉 -唏 -唐 -唑 -唔 -唠 -唤 -唧 -唬 -售 -唯 -唰 -唱 -唳 -唷 -唸 -唾 -啃 -啄 -商 -啉 -啊 -問 -啓 -啕 -啖 -啜 -啞 -啟 -啡 -啤 -啥 -啦 -啧 -啪 -啫 -啬 -啮 -啰 -啱 -啲 -啵 -啶 -啷 -啸 -啻 -啼 -啾 -喀 -喂 -喃 -善 -喆 -喇 -喉 -喊 -喋 -喎 -喏 -喔 -喘 -喙 -喚 -喜 -喝 -喟 -喧 -喪 -喫 -喬 -單 -喰 -喱 -喲 -喳 -喵 -営 -喷 -喹 -喺 -喻 -喽 -嗅 -嗆 -嗇 -嗎 -嗑 -嗒 -嗓 -嗔 -嗖 -嗚 -嗜 -嗝 -嗟 -嗡 -嗣 -嗤 -嗦 -嗨 -嗪 -嗬 -嗯 -嗰 -嗲 -嗳 -嗶 -嗷 -嗽 -嘀 -嘅 -嘆 -嘈 -嘉 -嘌 -嘍 -嘎 -嘔 -嘖 -嘗 -嘘 -嘚 -嘛 -嘜 -嘞 -嘟 -嘢 -嘣 -嘤 -嘧 -嘩 -嘭 -嘮 -嘯 -嘰 -嘱 -嘲 -嘴 -嘶 -嘸 -嘹 -嘻 -嘿 -噁 -噌 -噎 -噓 -噔 -噗 -噙 -噜 -噠 -噢 -噤 -器 -噩 -噪 -噬 -噱 -噴 -噶 -噸 -噹 -噻 -噼 -嚀 -嚇 -嚎 -嚏 -嚐 -嚓 -嚕 -嚟 -嚣 -嚥 -嚨 -嚮 -嚴 -嚷 -嚼 -囂 -囉 -囊 -囍 -囑 -囔 -囗 -囚 -四 -囝 -回 -囟 -因 -囡 -团 -団 -囤 -囧 -囪 -囫 -园 -困 -囱 -囲 -図 -围 -囹 -固 -国 -图 -囿 -圃 -圄 -圆 -圈 -國 -圍 -圏 -園 -圓 -圖 -團 -圜 -土 -圣 -圧 -在 -圩 -圭 -地 -圳 -场 -圻 -圾 -址 -坂 -均 -坊 -坍 -坎 -坏 -坐 -坑 -块 -坚 -坛 -坝 -坞 -坟 -坠 -坡 -坤 -坦 -坨 -坪 -坯 -坳 -坵 -坷 -垂 -垃 -垄 -型 -垒 -垚 -垛 -垠 -垢 -垣 -垦 -垩 -垫 -垭 -垮 -垵 -埂 -埃 -埋 -城 -埔 -埕 -埗 -域 -埠 -埤 -埵 -執 -埸 -培 -基 -埼 -堀 -堂 -堃 -堅 -堆 -堇 -堑 -堕 -堙 -堡 -堤 -堪 -堯 -堰 -報 -場 -堵 -堺 -堿 -塊 -塌 -塑 -塔 -塗 -塘 -塚 -塞 -塢 -塩 -填 -塬 -塭 -塵 -塾 -墀 -境 -墅 -墉 -墊 -墒 -墓 -増 -墘 -墙 -墜 -增 -墟 -墨 -墩 -墮 -墳 -墻 -墾 -壁 -壅 -壆 -壇 -壊 -壑 -壓 -壕 -壘 -壞 -壟 -壢 -壤 -壩 -士 -壬 -壮 -壯 -声 -売 -壳 -壶 -壹 -壺 -壽 -处 -备 -変 -复 -夏 -夔 -夕 -外 -夙 -多 -夜 -够 -夠 -夢 -夥 -大 -天 -太 -夫 -夭 -央 -夯 -失 -头 -夷 -夸 -夹 -夺 -夾 -奂 -奄 -奇 -奈 -奉 -奋 -奎 -奏 -奐 -契 -奔 -奕 -奖 -套 -奘 -奚 -奠 -奢 -奥 -奧 -奪 -奬 -奮 -女 -奴 -奶 -奸 -她 -好 -如 -妃 -妄 -妆 -妇 -妈 -妊 -妍 -妒 -妓 -妖 -妘 -妙 -妝 -妞 -妣 -妤 -妥 -妨 -妩 -妪 -妮 -妲 -妳 -妹 -妻 -妾 -姆 -姉 -姊 -始 -姍 -姐 -姑 -姒 -姓 -委 -姗 -姚 -姜 -姝 -姣 -姥 -姦 -姨 -姪 -姫 -姬 -姹 -姻 -姿 -威 -娃 -娄 -娅 -娆 -娇 -娉 -娑 -娓 -娘 -娛 -娜 -娟 -娠 -娣 -娥 -娩 -娱 -娲 -娴 -娶 -娼 -婀 -婁 -婆 -婉 -婊 -婕 -婚 -婢 -婦 -婧 -婪 -婭 -婴 -婵 -婶 -婷 -婺 -婿 -媒 -媚 -媛 -媞 -媧 -媲 -媳 -媽 -媾 -嫁 -嫂 -嫉 -嫌 -嫑 -嫔 -嫖 -嫘 -嫚 -嫡 -嫣 -嫦 -嫩 -嫲 -嫵 -嫻 -嬅 -嬉 -嬌 -嬗 -嬛 -嬢 -嬤 -嬪 -嬰 -嬴 -嬷 -嬸 -嬿 -孀 -孃 -子 -孑 -孔 -孕 -孖 -字 -存 -孙 -孚 -孛 -孜 -孝 -孟 -孢 -季 -孤 -学 -孩 -孪 -孫 -孬 -孰 -孱 -孳 -孵 -學 -孺 -孽 -孿 -宁 -它 -宅 -宇 -守 -安 -宋 -完 -宏 -宓 -宕 -宗 -官 -宙 -定 -宛 -宜 -宝 -实 -実 -宠 -审 -客 -宣 -室 -宥 -宦 -宪 -宫 -宮 -宰 -害 -宴 -宵 -家 -宸 -容 -宽 -宾 -宿 -寂 -寄 -寅 -密 -寇 -富 -寐 -寒 -寓 -寛 -寝 -寞 -察 -寡 -寢 -寥 -實 -寧 -寨 -審 -寫 -寬 -寮 -寰 -寵 -寶 -寸 -对 -寺 -寻 -导 -対 -寿 -封 -専 -射 -将 -將 -專 -尉 -尊 -尋 -對 -導 -小 -少 -尔 -尕 -尖 -尘 -尚 -尝 -尤 -尧 -尬 -就 -尴 -尷 -尸 -尹 -尺 -尻 -尼 -尽 -尾 -尿 -局 -屁 -层 -屄 -居 -屆 -屈 -屉 -届 -屋 -屌 -屍 -屎 -屏 -屐 -屑 -展 -屜 -属 -屠 -屡 -屢 -層 -履 -屬 -屯 -山 -屹 -屿 -岀 -岁 -岂 -岌 -岐 -岑 -岔 -岖 -岗 -岘 -岙 -岚 -岛 -岡 -岩 -岫 -岬 -岭 -岱 -岳 -岷 -岸 -峇 -峋 -峒 -峙 -峡 -峤 -峥 -峦 -峨 -峪 -峭 -峯 -峰 -峴 -島 -峻 -峽 -崁 -崂 -崆 -崇 -崎 -崑 -崔 -崖 -崗 -崙 -崛 -崧 -崩 -崭 -崴 -崽 -嵇 -嵊 -嵋 -嵌 -嵐 -嵘 -嵩 -嵬 -嵯 -嶂 -嶄 -嶇 -嶋 -嶙 -嶺 -嶼 -嶽 -巅 -巍 -巒 -巔 -巖 -川 -州 -巡 -巢 -工 -左 -巧 -巨 -巩 -巫 -差 -己 -已 -巳 -巴 -巷 -巻 -巽 -巾 -巿 -币 -市 -布 -帅 -帆 -师 -希 -帐 -帑 -帕 -帖 -帘 -帚 -帛 -帜 -帝 -帥 -带 -帧 -師 -席 -帮 -帯 -帰 -帳 -帶 -帷 -常 -帼 -帽 -幀 -幂 -幄 -幅 -幌 -幔 -幕 -幟 -幡 -幢 -幣 -幫 -干 -平 -年 -并 -幸 -幹 -幺 -幻 -幼 -幽 -幾 -广 -庁 -広 -庄 -庆 -庇 -床 -序 -庐 -库 -应 -底 -庖 -店 -庙 -庚 -府 -庞 -废 -庠 -度 -座 -庫 -庭 -庵 -庶 -康 -庸 -庹 -庾 -廁 -廂 -廃 -廈 -廉 -廊 -廓 -廖 -廚 -廝 -廟 -廠 -廢 -廣 -廬 -廳 -延 -廷 -建 -廿 -开 -弁 -异 -弃 -弄 -弈 -弊 -弋 -式 -弑 -弒 -弓 -弔 -引 -弗 -弘 -弛 -弟 -张 -弥 -弦 -弧 -弩 -弭 -弯 -弱 -張 -強 -弹 -强 -弼 -弾 -彅 -彆 -彈 -彌 -彎 -归 -当 -录 -彗 -彙 -彝 -形 -彤 -彥 -彦 -彧 -彩 -彪 -彫 -彬 -彭 -彰 -影 -彷 -役 -彻 -彼 -彿 -往 -征 -径 -待 -徇 -很 -徉 -徊 -律 -後 -徐 -徑 -徒 -従 -徕 -得 -徘 -徙 -徜 -從 -徠 -御 -徨 -復 -循 -徬 -微 -徳 -徴 -徵 -德 -徹 -徼 -徽 -心 -必 -忆 -忌 -忍 -忏 -忐 -忑 -忒 -忖 -志 -忘 -忙 -応 -忠 -忡 -忤 -忧 -忪 -快 -忱 -念 -忻 -忽 -忿 -怀 -态 -怂 -怅 -怆 -怎 -怏 -怒 -怔 -怕 -怖 -怙 -怜 -思 -怠 -怡 -急 -怦 -性 -怨 -怪 -怯 -怵 -总 -怼 -恁 -恃 -恆 -恋 -恍 -恐 -恒 -恕 -恙 -恚 -恢 -恣 -恤 -恥 -恨 -恩 -恪 -恫 -恬 -恭 -息 -恰 -恳 -恵 -恶 -恸 -恺 -恻 -恼 -恿 -悄 -悅 -悉 -悌 -悍 -悔 -悖 -悚 -悟 -悠 -患 -悦 -您 -悩 -悪 -悬 -悯 -悱 -悲 -悴 -悵 -悶 -悸 -悻 -悼 -悽 -情 -惆 -惇 -惊 -惋 -惑 -惕 -惘 -惚 -惜 -惟 -惠 -惡 -惦 -惧 -惨 -惩 -惫 -惬 -惭 -惮 -惯 -惰 -惱 -想 -惴 -惶 -惹 -惺 -愁 -愆 -愈 -愉 -愍 -意 -愕 -愚 -愛 -愜 -感 -愣 -愤 -愧 -愫 -愷 -愿 -慄 -慈 -態 -慌 -慎 -慑 -慕 -慘 -慚 -慟 -慢 -慣 -慧 -慨 -慫 -慮 -慰 -慳 -慵 -慶 -慷 -慾 -憂 -憊 -憋 -憎 -憐 -憑 -憔 -憚 -憤 -憧 -憨 -憩 -憫 -憬 -憲 -憶 -憾 -懂 -懇 -懈 -應 -懊 -懋 -懑 -懒 -懦 -懲 -懵 -懶 -懷 -懸 -懺 -懼 -懾 -懿 -戀 -戈 -戊 -戌 -戍 -戎 -戏 -成 -我 -戒 -戕 -或 -战 -戚 -戛 -戟 -戡 -戦 -截 -戬 -戮 -戰 -戲 -戳 -戴 -戶 -户 -戸 -戻 -戾 -房 -所 -扁 -扇 -扈 -扉 -手 -才 -扎 -扑 -扒 -打 -扔 -払 -托 -扛 -扣 -扦 -执 -扩 -扪 -扫 -扬 -扭 -扮 -扯 -扰 -扱 -扳 -扶 -批 -扼 -找 -承 -技 -抄 -抉 -把 -抑 -抒 -抓 -投 -抖 -抗 -折 -抚 -抛 -抜 -択 -抟 -抠 -抡 -抢 -护 -报 -抨 -披 -抬 -抱 -抵 -抹 -押 -抽 -抿 -拂 -拄 -担 -拆 -拇 -拈 -拉 -拋 -拌 -拍 -拎 -拐 -拒 -拓 -拔 -拖 -拗 -拘 -拙 -拚 -招 -拜 -拟 -拡 -拢 -拣 -拥 -拦 -拧 -拨 -择 -括 -拭 -拮 -拯 -拱 -拳 -拴 -拷 -拼 -拽 -拾 -拿 -持 -挂 -指 -挈 -按 -挎 -挑 -挖 -挙 -挚 -挛 -挝 -挞 -挟 -挠 -挡 -挣 -挤 -挥 -挨 -挪 -挫 -振 -挲 -挹 -挺 -挽 -挾 -捂 -捅 -捆 -捉 -捋 -捌 -捍 -捎 -捏 -捐 -捕 -捞 -损 -捡 -换 -捣 -捧 -捨 -捩 -据 -捱 -捲 -捶 -捷 -捺 -捻 -掀 -掂 -掃 -掇 -授 -掉 -掌 -掏 -掐 -排 -掖 -掘 -掙 -掛 -掠 -採 -探 -掣 -接 -控 -推 -掩 -措 -掬 -掰 -掲 -掳 -掴 -掷 -掸 -掺 -揀 -揃 -揄 -揆 -揉 -揍 -描 -提 -插 -揖 -揚 -換 -握 -揣 -揩 -揪 -揭 -揮 -援 -揶 -揸 -揹 -揽 -搀 -搁 -搂 -搅 -損 -搏 -搐 -搓 -搔 -搖 -搗 -搜 -搞 -搡 -搪 -搬 -搭 -搵 -搶 -携 -搽 -摀 -摁 -摄 -摆 -摇 -摈 -摊 -摒 -摔 -摘 -摞 -摟 -摧 -摩 -摯 -摳 -摸 -摹 -摺 -摻 -撂 -撃 -撅 -撇 -撈 -撐 -撑 -撒 -撓 -撕 -撚 -撞 -撤 -撥 -撩 -撫 -撬 -播 -撮 -撰 -撲 -撵 -撷 -撸 -撻 -撼 -撿 -擀 -擁 -擂 -擄 -擅 -擇 -擊 -擋 -操 -擎 -擒 -擔 -擘 -據 -擞 -擠 -擡 -擢 -擦 -擬 -擰 -擱 -擲 -擴 -擷 -擺 -擼 -擾 -攀 -攏 -攒 -攔 -攘 -攙 -攜 -攝 -攞 -攢 -攣 -攤 -攥 -攪 -攫 -攬 -支 -收 -攸 -改 -攻 -放 -政 -故 -效 -敌 -敍 -敎 -敏 -救 -敕 -敖 -敗 -敘 -教 -敛 -敝 -敞 -敢 -散 -敦 -敬 -数 -敲 -整 -敵 -敷 -數 -斂 -斃 -文 -斋 -斌 -斎 -斐 -斑 -斓 -斗 -料 -斛 -斜 -斟 -斡 -斤 -斥 -斧 -斩 -斫 -斬 -断 -斯 -新 -斷 -方 -於 -施 -旁 -旃 -旅 -旋 -旌 -旎 -族 -旖 -旗 -无 -既 -日 -旦 -旧 -旨 -早 -旬 -旭 -旮 -旱 -时 -旷 -旺 -旻 -昀 -昂 -昆 -昇 -昉 -昊 -昌 -明 -昏 -易 -昔 -昕 -昙 -星 -映 -春 -昧 -昨 -昭 -是 -昱 -昴 -昵 -昶 -昼 -显 -晁 -時 -晃 -晉 -晋 -晌 -晏 -晒 -晓 -晔 -晕 -晖 -晗 -晚 -晝 -晞 -晟 -晤 -晦 -晨 -晩 -普 -景 -晰 -晴 -晶 -晷 -智 -晾 -暂 -暄 -暇 -暈 -暉 -暌 -暐 -暑 -暖 -暗 -暝 -暢 -暧 -暨 -暫 -暮 -暱 -暴 -暸 -暹 -曄 -曆 -曇 -曉 -曖 -曙 -曜 -曝 -曠 -曦 -曬 -曰 -曲 -曳 -更 -書 -曹 -曼 -曾 -替 -最 -會 -月 -有 -朋 -服 -朐 -朔 -朕 -朗 -望 -朝 -期 -朦 -朧 -木 -未 -末 -本 -札 -朮 -术 -朱 -朴 -朵 -机 -朽 -杀 -杂 -权 -杆 -杈 -杉 -李 -杏 -材 -村 -杓 -杖 -杜 -杞 -束 -杠 -条 -来 -杨 -杭 -杯 -杰 -東 -杳 -杵 -杷 -杼 -松 -板 -极 -构 -枇 -枉 -枋 -析 -枕 -林 -枚 -果 -枝 -枢 -枣 -枪 -枫 -枭 -枯 -枰 -枱 -枳 -架 -枷 -枸 -柄 -柏 -某 -柑 -柒 -染 -柔 -柘 -柚 -柜 -柞 -柠 -柢 -查 -柩 -柬 -柯 -柱 -柳 -柴 -柵 -査 -柿 -栀 -栃 -栄 -栅 -标 -栈 -栉 -栋 -栎 -栏 -树 -栓 -栖 -栗 -校 -栩 -株 -样 -核 -根 -格 -栽 -栾 -桀 -桁 -桂 -桃 -桅 -框 -案 -桉 -桌 -桎 -桐 -桑 -桓 -桔 -桜 -桠 -桡 -桢 -档 -桥 -桦 -桧 -桨 -桩 -桶 -桿 -梁 -梅 -梆 -梏 -梓 -梗 -條 -梟 -梢 -梦 -梧 -梨 -梭 -梯 -械 -梳 -梵 -梶 -检 -棂 -棄 -棉 -棋 -棍 -棒 -棕 -棗 -棘 -棚 -棟 -棠 -棣 -棧 -森 -棱 -棲 -棵 -棹 -棺 -椁 -椅 -椋 -植 -椎 -椒 -検 -椪 -椭 -椰 -椹 -椽 -椿 -楂 -楊 -楓 -楔 -楚 -楝 -楞 -楠 -楣 -楨 -楫 -業 -楮 -極 -楷 -楸 -楹 -楼 -楽 -概 -榄 -榆 -榈 -榉 -榔 -榕 -榖 -榛 -榜 -榨 -榫 -榭 -榮 -榱 -榴 -榷 -榻 -槁 -槃 -構 -槌 -槍 -槎 -槐 -槓 -様 -槛 -槟 -槤 -槭 -槲 -槳 -槻 -槽 -槿 -樁 -樂 -樊 -樑 -樓 -標 -樞 -樟 -模 -樣 -権 -横 -樫 -樯 -樱 -樵 -樸 -樹 -樺 -樽 -樾 -橄 -橇 -橋 -橐 -橘 -橙 -機 -橡 -橢 -橫 -橱 -橹 -橼 -檀 -檄 -檎 -檐 -檔 -檗 -檜 -檢 -檬 -檯 -檳 -檸 -檻 -櫃 -櫚 -櫛 -櫥 -櫸 -櫻 -欄 -權 -欒 -欖 -欠 -次 -欢 -欣 -欧 -欲 -欸 -欺 -欽 -款 -歆 -歇 -歉 -歌 -歎 -歐 -歓 -歙 -歛 -歡 -止 -正 -此 -步 -武 -歧 -歩 -歪 -歯 -歲 -歳 -歴 -歷 -歸 -歹 -死 -歼 -殁 -殃 -殆 -殇 -殉 -殊 -残 -殒 -殓 -殖 -殘 -殞 -殡 -殤 -殭 -殯 -殲 -殴 -段 -殷 -殺 -殼 -殿 -毀 -毁 -毂 -毅 -毆 -毋 -母 -毎 -每 -毒 -毓 -比 -毕 -毗 -毘 -毙 -毛 -毡 -毫 -毯 -毽 -氈 -氏 -氐 -民 -氓 -气 -氖 -気 -氙 -氛 -氟 -氡 -氢 -氣 -氤 -氦 -氧 -氨 -氪 -氫 -氮 -氯 -氰 -氲 -水 -氷 -永 -氹 -氾 -汀 -汁 -求 -汆 -汇 -汉 -汎 -汐 -汕 -汗 -汙 -汛 -汝 -汞 -江 -池 -污 -汤 -汨 -汩 -汪 -汰 -汲 -汴 -汶 -汹 -決 -汽 -汾 -沁 -沂 -沃 -沅 -沈 -沉 -沌 -沏 -沐 -沒 -沓 -沖 -沙 -沛 -沟 -没 -沢 -沣 -沥 -沦 -沧 -沪 -沫 -沭 -沮 -沱 -河 -沸 -油 -治 -沼 -沽 -沾 -沿 -況 -泄 -泉 -泊 -泌 -泓 -法 -泗 -泛 -泞 -泠 -泡 -波 -泣 -泥 -注 -泪 -泫 -泮 -泯 -泰 -泱 -泳 -泵 -泷 -泸 -泻 -泼 -泽 -泾 -洁 -洄 -洋 -洒 -洗 -洙 -洛 -洞 -津 -洩 -洪 -洮 -洱 -洲 -洵 -洶 -洸 -洹 -活 -洼 -洽 -派 -流 -浃 -浄 -浅 -浆 -浇 -浊 -测 -济 -浏 -浑 -浒 -浓 -浔 -浙 -浚 -浜 -浣 -浦 -浩 -浪 -浬 -浮 -浯 -浴 -海 -浸 -涂 -涅 -涇 -消 -涉 -涌 -涎 -涓 -涔 -涕 -涙 -涛 -涝 -涞 -涟 -涠 -涡 -涣 -涤 -润 -涧 -涨 -涩 -涪 -涮 -涯 -液 -涵 -涸 -涼 -涿 -淀 -淄 -淅 -淆 -淇 -淋 -淌 -淑 -淒 -淖 -淘 -淙 -淚 -淞 -淡 -淤 -淦 -淨 -淩 -淪 -淫 -淬 -淮 -深 -淳 -淵 -混 -淹 -淺 -添 -淼 -清 -済 -渉 -渊 -渋 -渍 -渎 -渐 -渔 -渗 -渙 -渚 -減 -渝 -渠 -渡 -渣 -渤 -渥 -渦 -温 -測 -渭 -港 -渲 -渴 -游 -渺 -渾 -湃 -湄 -湊 -湍 -湖 -湘 -湛 -湟 -湧 -湫 -湮 -湯 -湳 -湾 -湿 -満 -溃 -溅 -溉 -溏 -源 -準 -溜 -溝 -溟 -溢 -溥 -溧 -溪 -溫 -溯 -溱 -溴 -溶 -溺 -溼 -滁 -滂 -滄 -滅 -滇 -滋 -滌 -滑 -滓 -滔 -滕 -滙 -滚 -滝 -滞 -滟 -满 -滢 -滤 -滥 -滦 -滨 -滩 -滬 -滯 -滲 -滴 -滷 -滸 -滾 -滿 -漁 -漂 -漆 -漉 -漏 -漓 -演 -漕 -漠 -漢 -漣 -漩 -漪 -漫 -漬 -漯 -漱 -漲 -漳 -漸 -漾 -漿 -潆 -潇 -潋 -潍 -潑 -潔 -潘 -潛 -潜 -潞 -潟 -潢 -潤 -潦 -潧 -潭 -潮 -潰 -潴 -潸 -潺 -潼 -澀 -澄 -澆 -澈 -澍 -澎 -澗 -澜 -澡 -澤 -澧 -澱 -澳 -澹 -激 -濁 -濂 -濃 -濑 -濒 -濕 -濘 -濛 -濟 -濠 -濡 -濤 -濫 -濬 -濮 -濯 -濱 -濺 -濾 -瀅 -瀆 -瀉 -瀋 -瀏 -瀑 -瀕 -瀘 -瀚 -瀛 -瀝 -瀞 -瀟 -瀧 -瀨 -瀬 -瀰 -瀾 -灌 -灏 -灑 -灘 -灝 -灞 -灣 -火 -灬 -灭 -灯 -灰 -灵 -灶 -灸 -灼 -災 -灾 -灿 -炀 -炁 -炅 -炉 -炊 -炎 -炒 -炔 -炕 -炖 -炙 -炜 -炫 -炬 -炭 -炮 -炯 -炳 -炷 -炸 -点 -為 -炼 -炽 -烁 -烂 -烃 -烈 -烊 -烏 -烘 -烙 -烛 -烟 -烤 -烦 -烧 -烨 -烩 -烫 -烬 -热 -烯 -烷 -烹 -烽 -焉 -焊 -焕 -焖 -焗 -焘 -焙 -焚 -焜 -無 -焦 -焯 -焰 -焱 -然 -焼 -煅 -煉 -煊 -煌 -煎 -煒 -煖 -煙 -煜 -煞 -煤 -煥 -煦 -照 -煨 -煩 -煮 -煲 -煸 -煽 -熄 -熊 -熏 -熒 -熔 -熙 -熟 -熠 -熨 -熬 -熱 -熵 -熹 -熾 -燁 -燃 -燄 -燈 -燉 -燊 -燎 -燒 -燔 -燕 -燙 -燜 -營 -燥 -燦 -燧 -燭 -燮 -燴 -燻 -燼 -燿 -爆 -爍 -爐 -爛 -爪 -爬 -爭 -爰 -爱 -爲 -爵 -父 -爷 -爸 -爹 -爺 -爻 -爽 -爾 -牆 -片 -版 -牌 -牍 -牒 -牙 -牛 -牝 -牟 -牠 -牡 -牢 -牦 -牧 -物 -牯 -牲 -牴 -牵 -特 -牺 -牽 -犀 -犁 -犄 -犊 -犍 -犒 -犢 -犧 -犬 -犯 -状 -犷 -犸 -犹 -狀 -狂 -狄 -狈 -狎 -狐 -狒 -狗 -狙 -狞 -狠 -狡 -狩 -独 -狭 -狮 -狰 -狱 -狸 -狹 -狼 -狽 -猎 -猕 -猖 -猗 -猙 -猛 -猜 -猝 -猥 -猩 -猪 -猫 -猬 -献 -猴 -猶 -猷 -猾 -猿 -獄 -獅 -獎 -獐 -獒 -獗 -獠 -獣 -獨 -獭 -獰 -獲 -獵 -獷 -獸 -獺 -獻 -獼 -獾 -玄 -率 -玉 -王 -玑 -玖 -玛 -玟 -玠 -玥 -玩 -玫 -玮 -环 -现 -玲 -玳 -玷 -玺 -玻 -珀 -珂 -珅 -珈 -珉 -珊 -珍 -珏 -珐 -珑 -珙 -珞 -珠 -珣 -珥 -珩 -珪 -班 -珮 -珲 -珺 -現 -球 -琅 -理 -琇 -琉 -琊 -琍 -琏 -琐 -琛 -琢 -琥 -琦 -琨 -琪 -琬 -琮 -琰 -琲 -琳 -琴 -琵 -琶 -琺 -琼 -瑀 -瑁 -瑄 -瑋 -瑕 -瑗 -瑙 -瑚 -瑛 -瑜 -瑞 -瑟 -瑠 -瑣 -瑤 -瑩 -瑪 -瑯 -瑰 -瑶 -瑾 -璀 -璁 -璃 -璇 -璉 -璋 -璎 -璐 -璜 -璞 -璟 -璧 -璨 -環 -璽 -璿 -瓊 -瓏 -瓒 -瓜 -瓢 -瓣 -瓤 -瓦 -瓮 -瓯 -瓴 -瓶 -瓷 -甄 -甌 -甕 -甘 -甙 -甚 -甜 -生 -產 -産 -甥 -甦 -用 -甩 -甫 -甬 -甭 -甯 -田 -由 -甲 -申 -电 -男 -甸 -町 -画 -甾 -畀 -畅 -界 -畏 -畑 -畔 -留 -畜 -畝 -畢 -略 -畦 -番 -畫 -異 -畲 -畳 -畴 -當 -畸 -畹 -畿 -疆 -疇 -疊 -疏 -疑 -疔 -疖 -疗 -疙 -疚 -疝 -疟 -疡 -疣 -疤 -疥 -疫 -疮 -疯 -疱 -疲 -疳 -疵 -疸 -疹 -疼 -疽 -疾 -痂 -病 -症 -痈 -痉 -痊 -痍 -痒 -痔 -痕 -痘 -痙 -痛 -痞 -痠 -痢 -痣 -痤 -痧 -痨 -痪 -痫 -痰 -痱 -痴 -痹 -痺 -痼 -痿 -瘀 -瘁 -瘋 -瘍 -瘓 -瘘 -瘙 -瘟 -瘠 -瘡 -瘢 -瘤 -瘦 -瘧 -瘩 -瘪 -瘫 -瘴 -瘸 -瘾 -療 -癇 -癌 -癒 -癖 -癜 -癞 -癡 -癢 -癣 -癥 -癫 -癬 -癮 -癱 -癲 -癸 -発 -登 -發 -白 -百 -皂 -的 -皆 -皇 -皈 -皋 -皎 -皑 -皓 -皖 -皙 -皚 -皮 -皰 -皱 -皴 -皺 -皿 -盂 -盃 -盅 -盆 -盈 -益 -盎 -盏 -盐 -监 -盒 -盔 -盖 -盗 -盘 -盛 -盜 -盞 -盟 -盡 -監 -盤 -盥 -盧 -盪 -目 -盯 -盱 -盲 -直 -相 -盹 -盼 -盾 -省 -眈 -眉 -看 -県 -眙 -眞 -真 -眠 -眦 -眨 -眩 -眯 -眶 -眷 -眸 -眺 -眼 -眾 -着 -睁 -睇 -睏 -睐 -睑 -睛 -睜 -睞 -睡 -睢 -督 -睥 -睦 -睨 -睪 -睫 -睬 -睹 -睽 -睾 -睿 -瞄 -瞅 -瞇 -瞋 -瞌 -瞎 -瞑 -瞒 -瞓 -瞞 -瞟 -瞠 -瞥 -瞧 -瞩 -瞪 -瞬 -瞭 -瞰 -瞳 -瞻 -瞼 -瞿 -矇 -矍 -矗 -矚 -矛 -矜 -矢 -矣 -知 -矩 -矫 -短 -矮 -矯 -石 -矶 -矽 -矾 -矿 -码 -砂 -砌 -砍 -砒 -研 -砖 -砗 -砚 -砝 -砣 -砥 -砧 -砭 -砰 -砲 -破 -砷 -砸 -砺 -砼 -砾 -础 -硅 -硐 -硒 -硕 -硝 -硫 -硬 -确 -硯 -硼 -碁 -碇 -碉 -碌 -碍 -碎 -碑 -碓 -碗 -碘 -碚 -碛 -碟 -碣 -碧 -碩 -碰 -碱 -碳 -碴 -確 -碼 -碾 -磁 -磅 -磊 -磋 -磐 -磕 -磚 -磡 -磨 -磬 -磯 -磲 -磷 -磺 -礁 -礎 -礙 -礡 -礦 -礪 -礫 -礴 -示 -礼 -社 -祀 -祁 -祂 -祇 -祈 -祉 -祎 -祐 -祕 -祖 -祗 -祚 -祛 -祜 -祝 -神 -祟 -祠 -祢 -祥 -票 -祭 -祯 -祷 -祸 -祺 -祿 -禀 -禁 -禄 -禅 -禍 -禎 -福 -禛 -禦 -禧 -禪 -禮 -禱 -禹 -禺 -离 -禽 -禾 -禿 -秀 -私 -秃 -秆 -秉 -秋 -种 -科 -秒 -秘 -租 -秣 -秤 -秦 -秧 -秩 -秭 -积 -称 -秸 -移 -秽 -稀 -稅 -程 -稍 -税 -稔 -稗 -稚 -稜 -稞 -稟 -稠 -稣 -種 -稱 -稲 -稳 -稷 -稹 -稻 -稼 -稽 -稿 -穀 -穂 -穆 -穌 -積 -穎 -穗 -穢 -穩 -穫 -穴 -究 -穷 -穹 -空 -穿 -突 -窃 -窄 -窈 -窍 -窑 -窒 -窓 -窕 -窖 -窗 -窘 -窜 -窝 -窟 -窠 -窥 -窦 -窨 -窩 -窪 -窮 -窯 -窺 -窿 -竄 -竅 -竇 -竊 -立 -竖 -站 -竜 -竞 -竟 -章 -竣 -童 -竭 -端 -競 -竹 -竺 -竽 -竿 -笃 -笆 -笈 -笋 -笏 -笑 -笔 -笙 -笛 -笞 -笠 -符 -笨 -第 -笹 -笺 -笼 -筆 -等 -筊 -筋 -筍 -筏 -筐 -筑 -筒 -答 -策 -筛 -筝 -筠 -筱 -筲 -筵 -筷 -筹 -签 -简 -箇 -箋 -箍 -箏 -箐 -箔 -箕 -算 -箝 -管 -箩 -箫 -箭 -箱 -箴 -箸 -節 -篁 -範 -篆 -篇 -築 -篑 -篓 -篙 -篝 -篠 -篡 -篤 -篩 -篪 -篮 -篱 -篷 -簇 -簌 -簍 -簡 -簦 -簧 -簪 -簫 -簷 -簸 -簽 -簾 -簿 -籁 -籃 -籌 -籍 -籐 -籟 -籠 -籤 -籬 -籮 -籲 -米 -类 -籼 -籽 -粄 -粉 -粑 -粒 -粕 -粗 -粘 -粟 -粤 -粥 -粧 -粪 -粮 -粱 -粲 -粳 -粵 -粹 -粼 -粽 -精 -粿 -糅 -糊 -糍 -糕 -糖 -糗 -糙 -糜 -糞 -糟 -糠 -糧 -糬 -糯 -糰 -糸 -系 -糾 -紀 -紂 -約 -紅 -紉 -紊 -紋 -納 -紐 -紓 -純 -紗 -紘 -紙 -級 -紛 -紜 -素 -紡 -索 -紧 -紫 -紮 -累 -細 -紳 -紹 -紺 -終 -絃 -組 -絆 -経 -結 -絕 -絞 -絡 -絢 -給 -絨 -絮 -統 -絲 -絳 -絵 -絶 -絹 -綁 -綏 -綑 -經 -継 -続 -綜 -綠 -綢 -綦 -綫 -綬 -維 -綱 -網 -綴 -綵 -綸 -綺 -綻 -綽 -綾 -綿 -緊 -緋 -総 -緑 -緒 -緘 -線 -緝 -緞 -締 -緣 -編 -緩 -緬 -緯 -練 -緹 -緻 -縁 -縄 -縈 -縛 -縝 -縣 -縫 -縮 -縱 -縴 -縷 -總 -績 -繁 -繃 -繆 -繇 -繋 -織 -繕 -繚 -繞 -繡 -繩 -繪 -繫 -繭 -繳 -繹 -繼 -繽 -纂 -續 -纍 -纏 -纓 -纔 -纖 -纜 -纠 -红 -纣 -纤 -约 -级 -纨 -纪 -纫 -纬 -纭 -纯 -纰 -纱 -纲 -纳 -纵 -纶 -纷 -纸 -纹 -纺 -纽 -纾 -线 -绀 -练 -组 -绅 -细 -织 -终 -绊 -绍 -绎 -经 -绑 -绒 -结 -绔 -绕 -绘 -给 -绚 -绛 -络 -绝 -绞 -统 -绡 -绢 -绣 -绥 -绦 -继 -绩 -绪 -绫 -续 -绮 -绯 -绰 -绳 -维 -绵 -绶 -绷 -绸 -绻 -综 -绽 -绾 -绿 -缀 -缄 -缅 -缆 -缇 -缈 -缉 -缎 -缓 -缔 -缕 -编 -缘 -缙 -缚 -缜 -缝 -缠 -缢 -缤 -缥 -缨 -缩 -缪 -缭 -缮 -缰 -缱 -缴 -缸 -缺 -缽 -罂 -罄 -罌 -罐 -网 -罔 -罕 -罗 -罚 -罡 -罢 -罩 -罪 -置 -罰 -署 -罵 -罷 -罹 -羁 -羅 -羈 -羊 -羌 -美 -羔 -羚 -羞 -羟 -羡 -羣 -群 -羥 -羧 -羨 -義 -羯 -羲 -羸 -羹 -羽 -羿 -翁 -翅 -翊 -翌 -翎 -習 -翔 -翘 -翟 -翠 -翡 -翦 -翩 -翰 -翱 -翳 -翹 -翻 -翼 -耀 -老 -考 -耄 -者 -耆 -耋 -而 -耍 -耐 -耒 -耕 -耗 -耘 -耙 -耦 -耨 -耳 -耶 -耷 -耸 -耻 -耽 -耿 -聂 -聆 -聊 -聋 -职 -聒 -联 -聖 -聘 -聚 -聞 -聪 -聯 -聰 -聲 -聳 -聴 -聶 -職 -聽 -聾 -聿 -肃 -肄 -肅 -肆 -肇 -肉 -肋 -肌 -肏 -肓 -肖 -肘 -肚 -肛 -肝 -肠 -股 -肢 -肤 -肥 -肩 -肪 -肮 -肯 -肱 -育 -肴 -肺 -肽 -肾 -肿 -胀 -胁 -胃 -胄 -胆 -背 -胍 -胎 -胖 -胚 -胛 -胜 -胝 -胞 -胡 -胤 -胥 -胧 -胫 -胭 -胯 -胰 -胱 -胳 -胴 -胶 -胸 -胺 -能 -脂 -脅 -脆 -脇 -脈 -脉 -脊 -脍 -脏 -脐 -脑 -脓 -脖 -脘 -脚 -脛 -脣 -脩 -脫 -脯 -脱 -脲 -脳 -脸 -脹 -脾 -腆 -腈 -腊 -腋 -腌 -腎 -腐 -腑 -腓 -腔 -腕 -腥 -腦 -腩 -腫 -腭 -腮 -腰 -腱 -腳 -腴 -腸 -腹 -腺 -腻 -腼 -腾 -腿 -膀 -膈 -膊 -膏 -膑 -膘 -膚 -膛 -膜 -膝 -膠 -膦 -膨 -膩 -膳 -膺 -膻 -膽 -膾 -膿 -臀 -臂 -臃 -臆 -臉 -臊 -臍 -臓 -臘 -臟 -臣 -臥 -臧 -臨 -自 -臬 -臭 -至 -致 -臺 -臻 -臼 -臾 -舀 -舂 -舅 -舆 -與 -興 -舉 -舊 -舌 -舍 -舎 -舐 -舒 -舔 -舖 -舗 -舛 -舜 -舞 -舟 -航 -舫 -般 -舰 -舱 -舵 -舶 -舷 -舸 -船 -舺 -舾 -艇 -艋 -艘 -艙 -艦 -艮 -良 -艰 -艱 -色 -艳 -艷 -艹 -艺 -艾 -节 -芃 -芈 -芊 -芋 -芍 -芎 -芒 -芙 -芜 -芝 -芡 -芥 -芦 -芩 -芪 -芫 -芬 -芭 -芮 -芯 -花 -芳 -芷 -芸 -芹 -芻 -芽 -芾 -苁 -苄 -苇 -苋 -苍 -苏 -苑 -苒 -苓 -苔 -苕 -苗 -苛 -苜 -苞 -苟 -苡 -苣 -若 -苦 -苫 -苯 -英 -苷 -苹 -苻 -茁 -茂 -范 -茄 -茅 -茉 -茎 -茏 -茗 -茜 -茧 -茨 -茫 -茬 -茭 -茯 -茱 -茲 -茴 -茵 -茶 -茸 -茹 -茼 -荀 -荃 -荆 -草 -荊 -荏 -荐 -荒 -荔 -荖 -荘 -荚 -荞 -荟 -荠 -荡 -荣 -荤 -荥 -荧 -荨 -荪 -荫 -药 -荳 -荷 -荸 -荻 -荼 -荽 -莅 -莆 -莉 -莊 -莎 -莒 -莓 -莖 -莘 -莞 -莠 -莢 -莧 -莪 -莫 -莱 -莲 -莴 -获 -莹 -莺 -莽 -莿 -菀 -菁 -菅 -菇 -菈 -菊 -菌 -菏 -菓 -菖 -菘 -菜 -菟 -菠 -菡 -菩 -華 -菱 -菲 -菸 -菽 -萁 -萃 -萄 -萊 -萋 -萌 -萍 -萎 -萘 -萝 -萤 -营 -萦 -萧 -萨 -萩 -萬 -萱 -萵 -萸 -萼 -落 -葆 -葉 -著 -葚 -葛 -葡 -董 -葦 -葩 -葫 -葬 -葭 -葯 -葱 -葳 -葵 -葷 -葺 -蒂 -蒋 -蒐 -蒔 -蒙 -蒜 -蒞 -蒟 -蒡 -蒨 -蒲 -蒸 -蒹 -蒻 -蒼 -蒿 -蓁 -蓄 -蓆 -蓉 -蓋 -蓑 -蓓 -蓖 -蓝 -蓟 -蓦 -蓬 -蓮 -蓼 -蓿 -蔑 -蔓 -蔔 -蔗 -蔘 -蔚 -蔡 -蔣 -蔥 -蔫 -蔬 -蔭 -蔵 -蔷 -蔺 -蔻 -蔼 -蔽 -蕁 -蕃 -蕈 -蕉 -蕊 -蕎 -蕙 -蕤 -蕨 -蕩 -蕪 -蕭 -蕲 -蕴 -蕻 -蕾 -薄 -薅 -薇 -薈 -薊 -薏 -薑 -薔 -薙 -薛 -薦 -薨 -薩 -薪 -薬 -薯 -薰 -薹 -藉 -藍 -藏 -藐 -藓 -藕 -藜 -藝 -藤 -藥 -藩 -藹 -藻 -藿 -蘆 -蘇 -蘊 -蘋 -蘑 -蘚 -蘭 -蘸 -蘼 -蘿 -虎 -虏 -虐 -虑 -虔 -處 -虚 -虛 -虜 -虞 -號 -虢 -虧 -虫 -虬 -虱 -虹 -虻 -虽 -虾 -蚀 -蚁 -蚂 -蚊 -蚌 -蚓 -蚕 -蚜 -蚝 -蚣 -蚤 -蚩 -蚪 -蚯 -蚱 -蚵 -蛀 -蛆 -蛇 -蛊 -蛋 -蛎 -蛐 -蛔 -蛙 -蛛 -蛟 -蛤 -蛭 -蛮 -蛰 -蛳 -蛹 -蛻 -蛾 -蜀 -蜂 -蜃 -蜆 -蜇 -蜈 -蜊 -蜍 -蜒 -蜓 -蜕 -蜗 -蜘 -蜚 -蜜 -蜡 -蜢 -蜥 -蜱 -蜴 -蜷 -蜻 -蜿 -蝇 -蝈 -蝉 -蝌 -蝎 -蝕 -蝗 -蝙 -蝟 -蝠 -蝦 -蝨 -蝴 -蝶 -蝸 -蝼 -螂 -螃 -融 -螞 -螢 -螨 -螯 -螳 -螺 -蟀 -蟄 -蟆 -蟋 -蟎 -蟑 -蟒 -蟠 -蟬 -蟲 -蟹 -蟻 -蟾 -蠅 -蠍 -蠔 -蠕 -蠛 -蠟 -蠡 -蠢 -蠣 -蠱 -蠶 -蠹 -蠻 -血 -衄 -衅 -衆 -行 -衍 -術 -衔 -街 -衙 -衛 -衝 -衞 -衡 -衢 -衣 -补 -表 -衩 -衫 -衬 -衮 -衰 -衲 -衷 -衹 -衾 -衿 -袁 -袂 -袄 -袅 -袈 -袋 -袍 -袒 -袖 -袜 -袞 -袤 -袪 -被 -袭 -袱 -裁 -裂 -装 -裆 -裊 -裏 -裔 -裕 -裘 -裙 -補 -裝 -裟 -裡 -裤 -裨 -裱 -裳 -裴 -裸 -裹 -製 -裾 -褂 -複 -褐 -褒 -褓 -褔 -褚 -褥 -褪 -褫 -褲 -褶 -褻 -襁 -襄 -襟 -襠 -襪 -襬 -襯 -襲 -西 -要 -覃 -覆 -覇 -見 -規 -覓 -視 -覚 -覦 -覧 -親 -覬 -観 -覷 -覺 -覽 -觀 -见 -观 -规 -觅 -视 -览 -觉 -觊 -觎 -觐 -觑 -角 -觞 -解 -觥 -触 -觸 -言 -訂 -計 -訊 -討 -訓 -訕 -訖 -託 -記 -訛 -訝 -訟 -訣 -訥 -訪 -設 -許 -訳 -訴 -訶 -診 -註 -証 -詆 -詐 -詔 -評 -詛 -詞 -詠 -詡 -詢 -詣 -試 -詩 -詫 -詬 -詭 -詮 -詰 -話 -該 -詳 -詹 -詼 -誅 -誇 -誉 -誌 -認 -誓 -誕 -誘 -語 -誠 -誡 -誣 -誤 -誥 -誦 -誨 -說 -説 -読 -誰 -課 -誹 -誼 -調 -諄 -談 -請 -諏 -諒 -論 -諗 -諜 -諡 -諦 -諧 -諫 -諭 -諮 -諱 -諳 -諷 -諸 -諺 -諾 -謀 -謁 -謂 -謄 -謊 -謎 -謐 -謔 -謗 -謙 -講 -謝 -謠 -謨 -謬 -謹 -謾 -譁 -證 -譎 -譏 -識 -譙 -譚 -譜 -警 -譬 -譯 -議 -譲 -譴 -護 -譽 -讀 -變 -讓 -讚 -讞 -计 -订 -认 -讥 -讧 -讨 -让 -讪 -讫 -训 -议 -讯 -记 -讲 -讳 -讴 -讶 -讷 -许 -讹 -论 -讼 -讽 -设 -访 -诀 -证 -诃 -评 -诅 -识 -诈 -诉 -诊 -诋 -词 -诏 -译 -试 -诗 -诘 -诙 -诚 -诛 -话 -诞 -诟 -诠 -诡 -询 -诣 -诤 -该 -详 -诧 -诩 -诫 -诬 -语 -误 -诰 -诱 -诲 -说 -诵 -诶 -请 -诸 -诺 -读 -诽 -课 -诿 -谀 -谁 -调 -谄 -谅 -谆 -谈 -谊 -谋 -谌 -谍 -谎 -谏 -谐 -谑 -谒 -谓 -谔 -谕 -谗 -谘 -谙 -谚 -谛 -谜 -谟 -谢 -谣 -谤 -谥 -谦 -谧 -谨 -谩 -谪 -谬 -谭 -谯 -谱 -谲 -谴 -谶 -谷 -豁 -豆 -豇 -豈 -豉 -豊 -豌 -豎 -豐 -豔 -豚 -象 -豢 -豪 -豫 -豬 -豹 -豺 -貂 -貅 -貌 -貓 -貔 -貘 -貝 -貞 -負 -財 -貢 -貧 -貨 -販 -貪 -貫 -責 -貯 -貰 -貳 -貴 -貶 -買 -貸 -費 -貼 -貽 -貿 -賀 -賁 -賂 -賃 -賄 -資 -賈 -賊 -賑 -賓 -賜 -賞 -賠 -賡 -賢 -賣 -賤 -賦 -質 -賬 -賭 -賴 -賺 -購 -賽 -贅 -贈 -贊 -贍 -贏 -贓 -贖 -贛 -贝 -贞 -负 -贡 -财 -责 -贤 -败 -账 -货 -质 -贩 -贪 -贫 -贬 -购 -贮 -贯 -贰 -贱 -贲 -贴 -贵 -贷 -贸 -费 -贺 -贻 -贼 -贾 -贿 -赁 -赂 -赃 -资 -赅 -赈 -赊 -赋 -赌 -赎 -赏 -赐 -赓 -赔 -赖 -赘 -赚 -赛 -赝 -赞 -赠 -赡 -赢 -赣 -赤 -赦 -赧 -赫 -赭 -走 -赳 -赴 -赵 -赶 -起 -趁 -超 -越 -趋 -趕 -趙 -趟 -趣 -趨 -足 -趴 -趵 -趸 -趺 -趾 -跃 -跄 -跆 -跋 -跌 -跎 -跑 -跖 -跚 -跛 -距 -跟 -跡 -跤 -跨 -跩 -跪 -路 -跳 -践 -跷 -跹 -跺 -跻 -踉 -踊 -踌 -踏 -踐 -踝 -踞 -踟 -踢 -踩 -踪 -踮 -踱 -踴 -踵 -踹 -蹂 -蹄 -蹇 -蹈 -蹉 -蹊 -蹋 -蹑 -蹒 -蹙 -蹟 -蹣 -蹤 -蹦 -蹩 -蹬 -蹭 -蹲 -蹴 -蹶 -蹺 -蹼 -蹿 -躁 -躇 -躉 -躊 -躋 -躍 -躏 -躪 -身 -躬 -躯 -躲 -躺 -軀 -車 -軋 -軌 -軍 -軒 -軟 -転 -軸 -軼 -軽 -軾 -較 -載 -輒 -輓 -輔 -輕 -輛 -輝 -輟 -輩 -輪 -輯 -輸 -輻 -輾 -輿 -轄 -轅 -轆 -轉 -轍 -轎 -轟 -车 -轧 -轨 -轩 -转 -轭 -轮 -软 -轰 -轲 -轴 -轶 -轻 -轼 -载 -轿 -较 -辄 -辅 -辆 -辇 -辈 -辉 -辊 -辍 -辐 -辑 -输 -辕 -辖 -辗 -辘 -辙 -辛 -辜 -辞 -辟 -辣 -辦 -辨 -辩 -辫 -辭 -辮 -辯 -辰 -辱 -農 -边 -辺 -辻 -込 -辽 -达 -迁 -迂 -迄 -迅 -过 -迈 -迎 -运 -近 -返 -还 -这 -进 -远 -违 -连 -迟 -迢 -迤 -迥 -迦 -迩 -迪 -迫 -迭 -述 -迴 -迷 -迸 -迹 -迺 -追 -退 -送 -适 -逃 -逅 -逆 -选 -逊 -逍 -透 -逐 -递 -途 -逕 -逗 -這 -通 -逛 -逝 -逞 -速 -造 -逢 -連 -逮 -週 -進 -逵 -逶 -逸 -逻 -逼 -逾 -遁 -遂 -遅 -遇 -遊 -運 -遍 -過 -遏 -遐 -遑 -遒 -道 -達 -違 -遗 -遙 -遛 -遜 -遞 -遠 -遢 -遣 -遥 -遨 -適 -遭 -遮 -遲 -遴 -遵 -遶 -遷 -選 -遺 -遼 -遽 -避 -邀 -邁 -邂 -邃 -還 -邇 -邈 -邊 -邋 -邏 -邑 -邓 -邕 -邛 -邝 -邢 -那 -邦 -邨 -邪 -邬 -邮 -邯 -邰 -邱 -邳 -邵 -邸 -邹 -邺 -邻 -郁 -郅 -郊 -郎 -郑 -郜 -郝 -郡 -郢 -郤 -郦 -郧 -部 -郫 -郭 -郴 -郵 -郷 -郸 -都 -鄂 -鄉 -鄒 -鄔 -鄙 -鄞 -鄢 -鄧 -鄭 -鄰 -鄱 -鄲 -鄺 -酉 -酊 -酋 -酌 -配 -酐 -酒 -酗 -酚 -酝 -酢 -酣 -酥 -酩 -酪 -酬 -酮 -酯 -酰 -酱 -酵 -酶 -酷 -酸 -酿 -醃 -醇 -醉 -醋 -醍 -醐 -醒 -醚 -醛 -醜 -醞 -醣 -醪 -醫 -醬 -醮 -醯 -醴 -醺 -釀 -釁 -采 -釉 -释 -釋 -里 -重 -野 -量 -釐 -金 -釗 -釘 -釜 -針 -釣 -釦 -釧 -釵 -鈀 -鈉 -鈍 -鈎 -鈔 -鈕 -鈞 -鈣 -鈦 -鈪 -鈴 -鈺 -鈾 -鉀 -鉄 -鉅 -鉉 -鉑 -鉗 -鉚 -鉛 -鉤 -鉴 -鉻 -銀 -銃 -銅 -銑 -銓 -銖 -銘 -銜 -銬 -銭 -銮 -銳 -銷 -銹 -鋁 -鋅 -鋒 -鋤 -鋪 -鋰 -鋸 -鋼 -錄 -錐 -錘 -錚 -錠 -錢 -錦 -錨 -錫 -錮 -錯 -録 -錳 -錶 -鍊 -鍋 -鍍 -鍛 -鍥 -鍰 -鍵 -鍺 -鍾 -鎂 -鎊 -鎌 -鎏 -鎔 -鎖 -鎗 -鎚 -鎧 -鎬 -鎮 -鎳 -鏈 -鏖 -鏗 -鏘 -鏞 -鏟 -鏡 -鏢 -鏤 -鏽 -鐘 -鐮 -鐲 -鐳 -鐵 -鐸 -鐺 -鑄 -鑊 -鑑 -鑒 -鑣 -鑫 -鑰 -鑲 -鑼 -鑽 -鑾 -鑿 -针 -钉 -钊 -钎 -钏 -钒 -钓 -钗 -钙 -钛 -钜 -钝 -钞 -钟 -钠 -钡 -钢 -钣 -钤 -钥 -钦 -钧 -钨 -钩 -钮 -钯 -钰 -钱 -钳 -钴 -钵 -钺 -钻 -钼 -钾 -钿 -铀 -铁 -铂 -铃 -铄 -铅 -铆 -铉 -铎 -铐 -铛 -铜 -铝 -铠 -铡 -铢 -铣 -铤 -铨 -铩 -铬 -铭 -铮 -铰 -铲 -铵 -银 -铸 -铺 -链 -铿 -销 -锁 -锂 -锄 -锅 -锆 -锈 -锉 -锋 -锌 -锏 -锐 -锑 -错 -锚 -锟 -锡 -锢 -锣 -锤 -锥 -锦 -锭 -键 -锯 -锰 -锲 -锵 -锹 -锺 -锻 -镀 -镁 -镂 -镇 -镉 -镌 -镍 -镐 -镑 -镕 -镖 -镗 -镛 -镜 -镣 -镭 -镯 -镰 -镳 -镶 -長 -长 -門 -閃 -閉 -開 -閎 -閏 -閑 -閒 -間 -閔 -閘 -閡 -関 -閣 -閥 -閨 -閩 -閱 -閲 -閹 -閻 -閾 -闆 -闇 -闊 -闌 -闍 -闔 -闕 -闖 -闘 -關 -闡 -闢 -门 -闪 -闫 -闭 -问 -闯 -闰 -闲 -间 -闵 -闷 -闸 -闹 -闺 -闻 -闽 -闾 -阀 -阁 -阂 -阅 -阆 -阇 -阈 -阉 -阎 -阐 -阑 -阔 -阕 -阖 -阙 -阚 -阜 -队 -阡 -阪 -阮 -阱 -防 -阳 -阴 -阵 -阶 -阻 -阿 -陀 -陂 -附 -际 -陆 -陇 -陈 -陋 -陌 -降 -限 -陕 -陛 -陝 -陞 -陟 -陡 -院 -陣 -除 -陨 -险 -陪 -陰 -陲 -陳 -陵 -陶 -陷 -陸 -険 -陽 -隅 -隆 -隈 -隊 -隋 -隍 -階 -随 -隐 -隔 -隕 -隘 -隙 -際 -障 -隠 -隣 -隧 -隨 -險 -隱 -隴 -隶 -隸 -隻 -隼 -隽 -难 -雀 -雁 -雄 -雅 -集 -雇 -雉 -雋 -雌 -雍 -雎 -雏 -雑 -雒 -雕 -雖 -雙 -雛 -雜 -雞 -離 -難 -雨 -雪 -雯 -雰 -雲 -雳 -零 -雷 -雹 -電 -雾 -需 -霁 -霄 -霆 -震 -霈 -霉 -霊 -霍 -霎 -霏 -霑 -霓 -霖 -霜 -霞 -霧 -霭 -霰 -露 -霸 -霹 -霽 -霾 -靂 -靄 -靈 -青 -靓 -靖 -静 -靚 -靛 -靜 -非 -靠 -靡 -面 -靥 -靦 -革 -靳 -靴 -靶 -靼 -鞅 -鞋 -鞍 -鞏 -鞑 -鞘 -鞠 -鞣 -鞦 -鞭 -韆 -韋 -韌 -韓 -韜 -韦 -韧 -韩 -韬 -韭 -音 -韵 -韶 -韻 -響 -頁 -頂 -頃 -項 -順 -須 -頌 -預 -頑 -頒 -頓 -頗 -領 -頜 -頡 -頤 -頫 -頭 -頰 -頷 -頸 -頹 -頻 -頼 -顆 -題 -額 -顎 -顏 -顔 -願 -顛 -類 -顧 -顫 -顯 -顱 -顴 -页 -顶 -顷 -项 -顺 -须 -顼 -顽 -顾 -顿 -颁 -颂 -预 -颅 -领 -颇 -颈 -颉 -颊 -颌 -颍 -颐 -频 -颓 -颔 -颖 -颗 -题 -颚 -颛 -颜 -额 -颞 -颠 -颡 -颢 -颤 -颦 -颧 -風 -颯 -颱 -颳 -颶 -颼 -飄 -飆 -风 -飒 -飓 -飕 -飘 -飙 -飚 -飛 -飞 -食 -飢 -飨 -飩 -飪 -飯 -飲 -飼 -飽 -飾 -餃 -餅 -餉 -養 -餌 -餐 -餒 -餓 -餘 -餚 -餛 -餞 -餡 -館 -餮 -餵 -餾 -饅 -饈 -饋 -饌 -饍 -饑 -饒 -饕 -饗 -饞 -饥 -饨 -饪 -饬 -饭 -饮 -饯 -饰 -饱 -饲 -饴 -饵 -饶 -饷 -饺 -饼 -饽 -饿 -馀 -馁 -馄 -馅 -馆 -馈 -馋 -馍 -馏 -馒 -馔 -首 -馗 -香 -馥 -馨 -馬 -馭 -馮 -馳 -馴 -駁 -駄 -駅 -駆 -駐 -駒 -駕 -駛 -駝 -駭 -駱 -駿 -騁 -騎 -騏 -験 -騙 -騨 -騰 -騷 -驀 -驅 -驊 -驍 -驒 -驕 -驗 -驚 -驛 -驟 -驢 -驥 -马 -驭 -驮 -驯 -驰 -驱 -驳 -驴 -驶 -驷 -驸 -驹 -驻 -驼 -驾 -驿 -骁 -骂 -骄 -骅 -骆 -骇 -骈 -骊 -骋 -验 -骏 -骐 -骑 -骗 -骚 -骛 -骜 -骞 -骠 -骡 -骤 -骥 -骧 -骨 -骯 -骰 -骶 -骷 -骸 -骼 -髂 -髅 -髋 -髏 -髒 -髓 -體 -髖 -高 -髦 -髪 -髮 -髯 -髻 -鬃 -鬆 -鬍 -鬓 -鬚 -鬟 -鬢 -鬣 -鬥 -鬧 -鬱 -鬼 -魁 -魂 -魄 -魅 -魇 -魍 -魏 -魔 -魘 -魚 -魯 -魷 -鮑 -鮨 -鮪 -鮭 -鮮 -鯉 -鯊 -鯖 -鯛 -鯨 -鯰 -鯽 -鰍 -鰓 -鰭 -鰲 -鰻 -鰾 -鱈 -鱉 -鱔 -鱗 -鱷 -鱸 -鱼 -鱿 -鲁 -鲈 -鲍 -鲑 -鲛 -鲜 -鲟 -鲢 -鲤 -鲨 -鲫 -鲱 -鲲 -鲶 -鲷 -鲸 -鳃 -鳄 -鳅 -鳌 -鳍 -鳕 -鳖 -鳗 -鳝 -鳞 -鳥 -鳩 -鳳 -鳴 -鳶 -鴉 -鴕 -鴛 -鴦 -鴨 -鴻 -鴿 -鵑 -鵜 -鵝 -鵡 -鵬 -鵰 -鵲 -鶘 -鶩 -鶯 -鶴 -鷗 -鷲 -鷹 -鷺 -鸚 -鸞 -鸟 -鸠 -鸡 -鸢 -鸣 -鸥 -鸦 -鸨 -鸪 -鸭 -鸯 -鸳 -鸵 -鸽 -鸾 -鸿 -鹂 -鹃 -鹄 -鹅 -鹈 -鹉 -鹊 -鹌 -鹏 -鹑 -鹕 -鹘 -鹜 -鹞 -鹤 -鹦 -鹧 -鹫 -鹭 -鹰 -鹳 -鹵 -鹹 -鹼 -鹽 -鹿 -麂 -麋 -麒 -麓 -麗 -麝 -麟 -麥 -麦 -麩 -麴 -麵 -麸 -麺 -麻 -麼 -麽 -麾 -黃 -黄 -黍 -黎 -黏 -黑 -黒 -黔 -默 -黛 -黜 -黝 -點 -黠 -黨 -黯 -黴 -鼋 -鼎 -鼐 -鼓 -鼠 -鼬 -鼹 -鼻 -鼾 -齁 -齊 -齋 -齐 -齒 -齡 -齢 -齣 -齦 -齿 -龄 -龅 -龈 -龊 -龋 -龌 -龍 -龐 -龔 -龕 -龙 -龚 -龛 -龜 -龟 -︰ -︱ -︶ -︿ -﹁ -﹂ -﹍ -﹏ -﹐ -﹑ -﹒ -﹔ -﹕ -﹖ -﹗ -﹙ -﹚ -﹝ -﹞ -﹡ -﹣ -! -" -# -$ -% -& -' -( -) -* -+ -, -- -. -/ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -: -; -< -= -> -? -@ -[ -\ -] -^ -_ -` -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z -{ -| -} -~ -。 -「 -」 -、 -・ -ッ -ー -イ -ク -シ -ス -ト -ノ -フ -ラ -ル -ン -゙ -゚ - ̄ -¥ -👍 -🔥 -😂 -😎 -... -yam -10 -2017 -12 -11 -2016 -20 -30 -15 -06 -lofter -##s -2015 -by -16 -14 -18 -13 -24 -17 -2014 -21 -##0 -22 -19 -25 -23 -com -100 -00 -05 -2013 -##a -03 -09 -08 -28 -##2 -50 -01 -04 -##1 -27 -02 -2012 -##3 -26 -##e -07 -##8 -##5 -##6 -##4 -##9 -##7 -29 -2011 -40 -##t -2010 -##o -##d -##i -2009 -##n -app -www -the -##m -31 -##c -##l -##y -##r -##g -2008 -60 -http -200 -qq -##p -80 -##f -google -pixnet -90 -cookies -tripadvisor -500 -##er -##k -35 -##h -facebook -2007 -2000 -70 -##b -of -##x -##u -45 -300 -iphone -32 -1000 -2006 -48 -ip -36 -in -38 -3d -##w -##ing -55 -ctrip -##on -##v -33 -##の -to -34 -400 -id -2005 -it -37 -windows -llc -top -99 -42 -39 -000 -led -at -##an -41 -51 -52 -46 -49 -43 -53 -44 -##z -android -58 -and -59 -2004 -56 -vr -##か -5000 -2003 -47 -blogthis -twitter -54 -##le -150 -ok -2018 -57 -75 -cn -no -ios -##in -##mm -##00 -800 -on -te -3000 -65 -2001 -360 -95 -ig -lv -120 -##ng -##を -##us -##に -pc -てす -── -600 -##te -85 -2002 -88 -##ed -html -ncc -wifi -email -64 -blog -is -##10 -##て -mail -online -##al -dvd -##ic -studio -##は -##℃ -##ia -##と -line -vip -72 -##q -98 -##ce -##en -for -##is -##ra -##es -##j -usb -net -cp -1999 -asia -4g -##cm -diy -new -3c -##お -ta -66 -language -vs -apple -tw -86 -web -##ne -ipad -62 -you -##re -101 -68 -##tion -ps -de -bt -pony -atm -##2017 -1998 -67 -##ch -ceo -##or -go -##na -av -pro -cafe -96 -pinterest -97 -63 -pixstyleme3c -##ta -more -said -##2016 -1997 -mp3 -700 -##ll -nba -jun -##20 -92 -tv -1995 -pm -61 -76 -nbsp -250 -##ie -linux -##ma -cd -110 -hd -##17 -78 -##ion -77 -6000 -am -##th -##st -94 -##se -##et -69 -180 -gdp -my -105 -81 -abc -89 -flash -79 -one -93 -1990 -1996 -##ck -gps -##も -##ly -web885 -106 -2020 -91 -##ge -4000 -1500 -xd -boss -isbn -1994 -org -##ry -me -love -##11 -0fork -73 -##12 -3g -##ter -##ar -71 -82 -##la -hotel -130 -1970 -pk -83 -87 -140 -ie -##os -##30 -##el -74 -##50 -seo -cpu -##ml -p2p -84 -may -##る -sun -tue -internet -cc -posted -youtube -##at -##ン -##man -ii -##ル -##15 -abs -nt -pdf -yahoo -ago -1980 -##it -news -mac -104 -##てす -##me -##り -java -1992 -spa -##de -##nt -hk -all -plus -la -1993 -##mb -##16 -##ve -west -##da -160 -air -##い -##ps -から -##to -1989 -logo -htc -php -https -fi -momo -##son -sat -##ke -##80 -ebd -suv -wi -day -apk -##88 -##um -mv -galaxy -wiki -or -brake -##ス -1200 -する -this -1991 -mon -##こ -❤2017 -po -##ない -javascript -life -home -june -##ss -system -900 -##ー -##0 -pp -1988 -world -fb -4k -br -##as -ic -ai -leonardo -safari -##60 -live -free -xx -wed -win7 -kiehl -##co -lg -o2o -##go -us -235 -1949 -mm -しい -vfm -kanye -##90 -##2015 -##id -jr -##ey -123 -rss -##sa -##ro -##am -##no -thu -fri -350 -##sh -##ki -103 -comments -name -##のて -##pe -##ine -max -1987 -8000 -uber -##mi -##ton -wordpress -office -1986 -1985 -##ment -107 -bd -win10 -##ld -##li -gmail -bb -dior -##rs -##ri -##rd -##ます -up -cad -##® -dr -して -read -##21 -をお -##io -##99 -url -1984 -pvc -paypal -show -policy -##40 -##ty -##18 -with -##★ -##01 -txt -102 -##ba -dna -from -post -mini -ar -taiwan -john -##ga -privacy -agoda -##13 -##ny -word -##24 -##22 -##by -##ur -##hz -1982 -##ang -265 -cookie -netscape -108 -##ka -##~ -##ad -house -share -note -ibm -code -hello -nike -sim -survey -##016 -1979 -1950 -wikia -##32 -##017 -5g -cbc -##tor -##kg -1983 -##rt -##14 -campaign -store -2500 -os -##ct -##ts -##° -170 -api -##ns -365 -excel -##な -##ao -##ら -##し -~~ -##nd -university -163 -には -518 -##70 -##ya -##il -##25 -pierre -ipo -0020 -897 -##23 -hotels -##ian -のお -125 -years -6606 -##ers -##26 -high -##day -time -##ay -bug -##line -##く -##す -##be -xp -talk2yam -yamservice -10000 -coco -##dy -sony -##ies -1978 -microsoft -david -people -##ha -1960 -instagram -intel -その -##ot -iso -1981 -##va -115 -##mo -##land -xxx -man -co -ltxsw -##ation -baby -220 -##pa -##ol -1945 -7000 -tag -450 -##ue -msn -##31 -oppo -##ト -##ca -control -##om -st -chrome -##ure -##ん -be -##き -lol -##19 -した -##bo -240 -lady -##100 -##way -##から -4600 -##ko -##do -##un -4s -corporation -168 -##ni -herme -##28 -cp -978 -##up -##06 -ui -##ds -ppt -admin -three -します -bbc -re -128 -##48 -ca -##015 -##35 -hp -##ee -tpp -##た -##ive -×× -root -##cc -##ました -##ble -##ity -adobe -park -114 -et -oled -city -##ex -##ler -##ap -china -##book -20000 -view -##ice -global -##km -your -hong -##mg -out -##ms -ng -ebay -##29 -menu -ubuntu -##cy -rom -##view -open -ktv -do -server -##lo -if -english -##ね -##5 -##oo -1600 -##02 -step1 -kong -club -135 -july -inc -1976 -mr -hi -##net -touch -##ls -##ii -michael -lcd -##05 -##33 -phone -james -step2 -1300 -ios9 -##box -dc -##2 -##ley -samsung -111 -280 -pokemon -css -##ent -##les -いいえ -##1 -s8 -atom -play -bmw -##said -sa -etf -ctrl -♥yoyo♥ -##55 -2025 -##2014 -##66 -adidas -amazon -1958 -##ber -##ner -visa -##77 -##der -1800 -connectivity -##hi -firefox -109 -118 -hr -so -style -mark -pop -ol -skip -1975 -as -##27 -##ir -##61 -190 -mba -##う -##ai -le -##ver -1900 -cafe2017 -lte -super -113 -129 -##ron -amd -like -##☆ -are -##ster -we -##sk -paul -data -international -##ft -longchamp -ssd -good -##ート -##ti -reply -##my -↓↓↓ -apr -star -##ker -source -136 -js -112 -get -force -photo -##one -126 -##2013 -##ow -link -bbs -1972 -goods -##lin -python -119 -##ip -game -##ics -##ません -blue -##● -520 -##45 -page -itunes -##03 -1955 -260 -1968 -gt -gif -618 -##ff -##47 -group -くたさい -about -bar -ganji -##nce -music -lee -not -1977 -1971 -1973 -##per -an -faq -comment -##って -days -##ock -116 -##bs -1974 -1969 -v1 -player -1956 -xbox -sql -fm -f1 -139 -##ah -210 -##lv -##mp -##000 -melody -1957 -##3 -550 -17life -199 -1966 -xml -market -##au -##71 -999 -##04 -what -gl -##95 -##age -tips -##68 -book -##ting -mysql -can -1959 -230 -##ung -wonderland -watch -10℃ -##ction -9000 -mar -mobile -1946 -1962 -article -##db -part -▲top -party -って -1967 -1964 -1948 -##07 -##ore -##op -この -dj -##78 -##38 -010 -main -225 -1965 -##ong -art -320 -ad -134 -020 -##73 -117 -pm2 -japan -228 -##08 -ts -1963 -##ica -der -sm -##36 -2019 -##wa -ct -##7 -##や -##64 -1937 -homemesh -search -##85 -##れは -##tv -##di -macbook -##9 -##くたさい -service -##♥ -type -った -750 -##ier -##si -##75 -##います -##ok -best -##ット -goris -lock -##った -cf -3m -big -##ut -ftp -carol -##vi -10 -1961 -happy -sd -##ac -122 -anti -pe -cnn -iii -1920 -138 -##ラ -1940 -esp -jan -tags -##98 -##51 -august -vol -##86 -154 -##™ -##fs -##れ -##sion -design -ac -##ム -press -jordan -ppp -that -key -check -##6 -##tt -##㎡ -1080p -##lt -power -##42 -1952 -##bc -vivi -##ック -he -133 -121 -jpg -##rry -201 -175 -3500 -1947 -nb -##ted -##rn -しています -1954 -usd -##t00 -master -##ンク -001 -model -##58 -al -##09 -1953 -##34 -ram -goo -ても -##ui -127 -1930 -red -##ary -rpg -item -##pm -##41 -270 -##za -project -##2012 -hot -td -blogabstract -##ger -##62 -650 -##44 -gr2 -##します -##m -black -electronic -nfc -year -asus -また -html5 -cindy -##hd -m3 -132 -esc -##od -booking -##53 -fed -tvb -##81 -##ina -mit -165 -##いる -chan -192 -distribution -next -になる -peter -bios -steam -cm -1941 -にも -pk10 -##ix -##65 -##91 -dec -nasa -##ana -icecat -00z -b1 -will -##46 -li -se -##ji -##み -##ard -oct -##ain -jp -##ze -##bi -cio -##56 -smart -h5 -##39 -##port -curve -vpn -##nm -##dia -utc -##あり -12345678910 -##52 -rmvb -chanel -a4 -miss -##and -##im -media -who -##63 -she -girl -5s -124 -vera -##して -class -vivo -king -##フ -##ei -national -ab -1951 -5cm -888 -145 -ipod -ap -1100 -5mm -211 -ms -2756 -##69 -mp4 -msci -##po -##89 -131 -mg -index -380 -##bit -##out -##zz -##97 -##67 -158 -apec -##8 -photoshop -opec -¥799 -ては -##96 -##tes -##ast -2g -○○ -##ール -¥2899 -##ling -##よ -##ory -1938 -##ical -kitty -content -##43 -step3 -##cn -win8 -155 -vc -1400 -iphone7 -robert -##した -tcl -137 -beauty -##87 -en -dollars -##ys -##oc -step -pay -yy -a1 -##2011 -##lly -##ks -##♪ -1939 -188 -download -1944 -sep -exe -ph -います -school -gb -center -pr -street -##board -uv -##37 -##lan -winrar -##que -##ua -##com -1942 -1936 -480 -gpu -##4 -ettoday -fu -tom -##54 -##ren -##via -149 -##72 -b2b -144 -##79 -##tch -rose -arm -mb -##49 -##ial -##nn -nvidia -step4 -mvp -00㎡ -york -156 -##イ -how -cpi -591 -2765 -gov -kg -joe -##xx -mandy -pa -##ser -copyright -fashion -1935 -don -##け -ecu -##ist -##art -erp -wap -have -##lm -talk -##ek -##ning -##if -ch -##ite -video -1943 -cs -san -iot -look -##84 -##2010 -##ku -october -##ux -trump -##hs -##ide -box -141 -first -##ins -april -##ight -##83 -185 -angel -protected -aa -151 -162 -x1 -m2 -##fe -##× -##ho -size -143 -min -ofo -fun -gomaji -ex -hdmi -food -dns -march -chris -kevin -##のか -##lla -##pp -##ec -ag -ems -6s -720p -##rm -##ham -off -##92 -asp -team -fandom -ed -299 -▌♥ -##ell -info -されています -##82 -sina -4066 -161 -##able -##ctor -330 -399 -315 -dll -rights -ltd -idc -jul -3kg -1927 -142 -ma -surface -##76 -##ク -~~~ -304 -mall -eps -146 -green -##59 -map -space -donald -v2 -sodu -##light -1931 -148 -1700 -まて -310 -reserved -htm -##han -##57 -2d -178 -mod -##ise -##tions -152 -ti -##shi -doc -1933 -icp -055 -wang -##ram -shopping -aug -##pi -##well -now -wam -b2 -からお -##hu -236 -1928 -##gb -266 -f2 -##93 -153 -mix -##ef -##uan -bwl -##plus -##res -core -##ess -tea -5℃ -hktvmall -nhk -##ate -list -##ese -301 -feb -4m -inn -ての -nov -159 -12345 -daniel -##ci -pass -##bet -##nk -coffee -202 -ssl -airbnb -##ute -fbi -woshipm -skype -ea -cg -sp -##fc -##www -yes -edge -alt -007 -##94 -fpga -##ght -##gs -iso9001 -さい -##ile -##wood -##uo -image -lin -icon -american -##em -1932 -set -says -##king -##tive -blogger -##74 -なと -256 -147 -##ox -##zy -##red -##ium -##lf -nokia -claire -##リ -##ding -november -lohas -##500 -##tic -##マ -##cs -##ある -##che -##ire -##gy -##ult -db -january -win -##カ -166 -road -ptt -##ま -##つ -198 -##fa -##mer -anna -pchome -はい -udn -ef -420 -##time -##tte -2030 -##ア -g20 -white -かかります -1929 -308 -garden -eleven -di -##おります -chen -309b -777 -172 -young -cosplay -ちてない -4500 -bat -##123 -##tra -##ては -kindle -npc -steve -etc -##ern -##| -call -xperia -ces -travel -sk -s7 -##ous -1934 -##int -みいたたけます -183 -edu -file -cho -qr -##car -##our -186 -##ant -##d -eric -1914 -rends -##jo -##する -mastercard -##2000 -kb -##min -290 -##ino -vista -##ris -##ud -jack -2400 -##set -169 -pos -1912 -##her -##ou -taipei -しく -205 -beta -##ませんか -232 -##fi -express -255 -body -##ill -aphojoy -user -december -meiki -##ick -tweet -richard -##av -##ᆫ -iphone6 -##dd -ちてすか -views -##mark -321 -pd -##00 -times -##▲ -level -##ash -10g -point -5l -##ome -208 -koreanmall -##ak -george -q2 -206 -wma -tcp -##200 -スタッフ -full -mlb -##lle -##watch -tm -run -179 -911 -smith -business -##und -1919 -color -##tal -222 -171 -##less -moon -4399 -##rl -update -pcb -shop -499 -157 -little -なし -end -##mhz -van -dsp -easy -660 -##house -##key -history -##o -oh -##001 -##hy -##web -oem -let -was -##2009 -##gg -review -##wan -182 -##°c -203 -uc -title -##val -united -233 -2021 -##ons -doi -trivago -overdope -sbs -##ance -##ち -grand -special -573032185 -imf -216 -wx17house -##so -##ーム -audi -##he -london -william -##rp -##ake -science -beach -cfa -amp -ps4 -880 -##800 -##link -##hp -crm -ferragamo -bell -make -##eng -195 -under -zh -photos -2300 -##style -##ント -via -176 -da -##gi -company -i7 -##ray -thomas -370 -ufo -i5 -##max -plc -ben -back -research -8g -173 -mike -##pc -##ッフ -september -189 -##ace -vps -february -167 -pantos -wp -lisa -1921 -★★ -jquery -night -long -offer -##berg -##news -1911 -##いて -ray -fks -wto -せます -over -164 -340 -##all -##rus -1924 -##888 -##works -blogtitle -loftpermalink -##→ -187 -martin -test -ling -km -##め -15000 -fda -v3 -##ja -##ロ -wedding -かある -outlet -family -##ea -をこ -##top -story -##ness -salvatore -##lu -204 -swift -215 -room -している -oracle -##ul -1925 -sam -b2c -week -pi -rock -##のは -##a -##けと -##ean -##300 -##gle -cctv -after -chinese -##back -powered -x2 -##tan -1918 -##nes -##イン -canon -only -181 -##zi -##las -say -##oe -184 -##sd -221 -##bot -##world -##zo -sky -made -top100 -just -1926 -pmi -802 -234 -gap -##vr -177 -les -174 -▲topoct -ball -vogue -vi -ing -ofweek -cos -##list -##ort -▲topmay -##なら -##lon -として -last -##tc -##of -##bus -##gen -real -eva -##コ -a3 -nas -##lie -##ria -##coin -##bt -▲topapr -his -212 -cat -nata -vive -health -⋯⋯ -drive -sir -▲topmar -du -cup -##カー -##ook -##よう -##sy -alex -msg -tour -しました -3ce -##word -193 -ebooks -r8 -block -318 -##より -2200 -nice -pvp -207 -months -1905 -rewards -##ther -1917 -0800 -##xi -##チ -##sc -micro -850 -gg -blogfp -op -1922 -daily -m1 -264 -true -##bb -ml -##tar -##のお -##ky -anthony -196 -253 -##yo -state -218 -##ara -##aa -##rc -##tz -##ston -より -gear -##eo -##ade -ge -see -1923 -##win -##ura -ss -heart -##den -##ita -down -##sm -el -png -2100 -610 -rakuten -whatsapp -bay -dream -add -##use -680 -311 -pad -gucci -mpv -##ode -##fo -island -▲topjun -##▼ -223 -jason -214 -chicago -##❤ -しの -##hone -io -##れる -##ことか -sogo -be2 -##ology -990 -cloud -vcd -##con -2~3 -##ford -##joy -##kb -##こさいます -##rade -but -##ach -docker -##ful -rfid -ul -##ase -hit -ford -##star -580 -##○ -11 -a2 -sdk -reading -edited -##are -cmos -##mc -238 -siri -light -##ella -##ため -bloomberg -##read -pizza -##ison -jimmy -##vm -college -node -journal -ba -18k -##play -245 -##cer -20 -magic -##yu -191 -jump -288 -tt -##ings -asr -##lia -3200 -step5 -network -##cd -mc -いします -1234 -pixstyleme -273 -##600 -2800 -money -★★★★★ -1280 -12 -430 -bl -みの -act -##tus -tokyo -##rial -##life -emba -##ae -saas -tcs -##rk -##wang -summer -##sp -ko -##ving -390 -premium -##その -netflix -##ヒ -uk -mt -##lton -right -frank -two -209 -える -##ple -##cal -021 -##んな -##sen -##ville -hold -nexus -dd -##ius -てお -##mah -##なく -tila -zero -820 -ce -##tin -resort -##ws -charles -old -p10 -5d -report -##360 -##ru -##には -bus -vans -lt -##est -pv -##レ -links -rebecca -##ツ -##dm -azure -##365 -きな -limited -bit -4gb -##mon -1910 -moto -##eam -213 -1913 -var -eos -なとの -226 -blogspot -された -699 -e3 -dos -dm -fc -##ments -##ik -##kw -boy -##bin -##ata -960 -er -##せ -219 -##vin -##tu -##ula -194 -##∥ -station -##ろ -##ature -835 -files -zara -hdr -top10 -nature -950 -magazine -s6 -marriott -##シ -avira -case -##っと -tab -##ran -tony -##home -oculus -im -##ral -jean -saint -cry -307 -rosie -##force -##ini -ice -##bert -のある -##nder -##mber -pet -2600 -##◆ -plurk -▲topdec -##sis -00kg -▲topnov -720 -##ence -tim -##ω -##nc -##ても -##name -log -ips -great -ikea -malaysia -unix -##イト -3600 -##ncy -##nie -12000 -akb48 -##ye -##oid -404 -##chi -##いた -oa -xuehai -##1000 -##orm -##rf -275 -さん -##ware -##リー -980 -ho -##pro -text -##era -560 -bob -227 -##ub -##2008 -8891 -scp -avi -##zen -2022 -mi -wu -museum -qvod -apache -lake -jcb -▲topaug -★★★ -ni -##hr -hill -302 -ne -weibo -490 -ruby -##ーシ -##ヶ -##row -4d -▲topjul -iv -##ish -github -306 -mate -312 -##スト -##lot -##ane -andrew -のハイト -##tina -t1 -rf -ed2k -##vel -##900 -way -final -りの -ns -5a -705 -197 -##メ -sweet -bytes -##ene -▲topjan -231 -##cker -##2007 -##px -100g -topapp -229 -helpapp -rs -low -14k -g4g -care -630 -ldquo -あり -##fork -leave -rm -edition -##gan -##zon -##qq -▲topsep -##google -##ism -gold -224 -explorer -##zer -toyota -category -select -visual -##labels -restaurant -##md -posts -s1 -##ico -もっと -angelababy -123456 -217 -sports -s3 -mbc -1915 -してくたさい -shell -x86 -candy -##new -kbs -face -xl -470 -##here -4a -swissinfo -v8 -▲topfeb -dram -##ual -##vice -3a -##wer -sport -q1 -ios10 -public -int -card -##c -ep -au -rt -##れた -1080 -bill -##mll -kim -30 -460 -wan -##uk -##ミ -x3 -298 -0t -scott -##ming -239 -e5 -##3d -h7n9 -worldcat -brown -##あります -##vo -##led -##580 -##ax -249 -410 -##ert -paris -##~6 -polo -925 -##lr -599 -##ナ -capital -##hing -bank -cv -1g -##chat -##s -##たい -adc -##ule -2m -##e -digital -hotmail -268 -##pad -870 -bbq -quot -##ring -before -wali -##まて -mcu -2k -2b -という -costco -316 -north -333 -switch -##city -##p -philips -##mann -management -panasonic -##cl -##vd -##ping -##rge -alice -##lk -##ましょう -css3 -##ney -vision -alpha -##ular -##400 -##tter -lz -にお -##ありません -mode -gre -1916 -pci -##tm -237 -1~2 -##yan -##そ -について -##let -##キ -work -war -coach -ah -mary -##ᅵ -huang -##pt -a8 -pt -follow -##berry -1895 -##ew -a5 -ghost -##ション -##wn -##og -south -##code -girls -##rid -action -villa -git -r11 -table -games -##cket -error -##anonymoussaid -##ag -here -##ame -##gc -qa -##■ -##lis -gmp -##gin -vmalife -##cher -yu -wedding -##tis -demo -dragon -530 -soho -social -bye -##rant -river -orz -acer -325 -##↑ -##ース -##ats -261 -del -##ven -440 -ups -##ように -##ター -305 -value -macd -yougou -##dn -661 -##ano -ll -##urt -##rent -continue -script -##wen -##ect -paper -263 -319 -shift -##chel -##フト -##cat -258 -x5 -fox -243 -##さん -car -aaa -##blog -loading -##yn -##tp -kuso -799 -si -sns -イカせるテンマ -ヒンクテンマ3 -rmb -vdc -forest -central -prime -help -ultra -##rmb -##ような -241 -square -688 -##しい -のないフロクに -##field -##reen -##ors -##ju -c1 -start -510 -##air -##map -cdn -##wo -cba -stephen -m8 -100km -##get -opera -##base -##ood -vsa -com™ -##aw -##ail -251 -なのて -count -t2 -##ᅡ -##een -2700 -hop -##gp -vsc -tree -##eg -##ose -816 -285 -##ories -##shop -alphago -v4 -1909 -simon -##ᆼ -fluke62max -zip -スホンサー -##sta -louis -cr -bas -##~10 -bc -##yer -hadoop -##ube -##wi -1906 -0755 -hola -##low -place -centre -5v -d3 -##fer -252 -##750 -##media -281 -540 -0l -exchange -262 -series -##ハー -##san -eb -##bank -##k -q3 -##nge -##mail -take -##lp -259 -1888 -client -east -cache -event -vincent -##ールを -きを -##nse -sui -855 -adchoice -##и -##stry -##なたの -246 -##zone -ga -apps -sea -##ab -248 -cisco -##タ -##rner -kymco -##care -dha -##pu -##yi -minkoff -royal -p1 -への -annie -269 -collection -kpi -playstation -257 -になります -866 -bh -##bar -queen -505 -radio -1904 -andy -armani -##xy -manager -iherb -##ery -##share -spring -raid -johnson -1908 -##ob -volvo -hall -##ball -v6 -our -taylor -##hk -bi -242 -##cp -kate -bo -water -technology -##rie -サイトは -277 -##ona -##sl -hpv -303 -gtx -hip -rdquo -jayz -stone -##lex -##rum -namespace -##やり -620 -##ale -##atic -des -##erson -##ql -##ves -##type -enter -##この -##てきます -d2 -##168 -##mix -##bian -との -a9 -jj -ky -##lc -access -movie -##hc -リストに -tower -##ration -##mit -ます -##nch -ua -tel -prefix -##o2 -1907 -##point -1901 -ott -~10 -##http -##ury -baidu -##ink -member -##logy -bigbang -nownews -##js -##shot -##tb -##こと -247 -eba -##tics -##lus -ける -v5 -spark -##ama -there -##ions -god -##lls -##down -hiv -##ress -burberry -day2 -##kv -◆◆ -jeff -related -film -edit -joseph -283 -##ark -cx -32gb -order -g9 -30000 -##ans -##tty -s5 -##bee -かあります -thread -xr -buy -sh -005 -land -spotify -mx -##ari -276 -##verse -×email -sf -why -##ことて -244 -7headlines -nego -sunny -dom -exo -401 -666 -positioning -fit -rgb -##tton -278 -kiss -alexa -adam -lp -みリストを -##g -mp -##ties -##llow -amy -##du -np -002 -institute -271 -##rth -##lar -2345 -590 -##des -sidebar -15 -imax -site -##cky -##kit -##ime -##009 -season -323 -##fun -##ンター -##ひ -gogoro -a7 -pu -lily -fire -twd600 -##ッセーシを -いて -##vis -30ml -##cture -##をお -information -##オ -close -friday -##くれる -yi -nick -てすか -##tta -##tel -6500 -##lock -cbd -economy -254 -かお -267 -tinker -double -375 -8gb -voice -##app -oops -channel -today -985 -##right -raw -xyz -##+ -jim -edm -##cent -7500 -supreme -814 -ds -##its -##asia -dropbox -##てすか -##tti -books -272 -100ml -##tle -##ller -##ken -##more -##boy -sex -309 -##dom -t3 -##ider -##なります -##unch -1903 -810 -feel -5500 -##かった -##put -により -s2 -mo -##gh -men -ka -amoled -div -##tr -##n1 -port -howard -##tags -ken -dnf -##nus -adsense -##а -ide -##へ -buff -thunder -##town -##ique -has -##body -auto -pin -##erry -tee -てした -295 -number -##the -##013 -object -psp -cool -udnbkk -16gb -##mic -miui -##tro -most -r2 -##alk -##nity -1880 -±0 -##いました -428 -s4 -law -version -##oa -n1 -sgs -docomo -##tf -##ack -henry -fc2 -##ded -##sco -##014 -##rite -286 -0mm -linkedin -##ada -##now -wii -##ndy -ucbug -##◎ -sputniknews -legalminer -##ika -##xp -2gb -##bu -q10 -oo -b6 -come -##rman -cheese -ming -maker -##gm -nikon -##fig -ppi -kelly -##ります -jchere -てきます -ted -md -003 -fgo -tech -##tto -dan -soc -##gl -##len -hair -earth -640 -521 -img -##pper -##a1 -##てきる -##ロク -acca -##ition -##ference -suite -##ig -outlook -##mond -##cation -398 -##pr -279 -101vip -358 -##999 -282 -64gb -3800 -345 -airport -##over -284 -##おり -jones -##ith -lab -##su -##いるのて -co2 -town -piece -##llo -no1 -vmware -24h -##qi -focus -reader -##admin -##ora -tb -false -##log -1898 -know -lan -838 -##ces -f4 -##ume -motel -stop -##oper -na -flickr -netcomponents -##af -##─ -pose -williams -local -##ound -##cg -##site -##iko -いお -274 -5m -gsm -con -##ath -1902 -friends -##hip -cell -317 -##rey -780 -cream -##cks -012 -##dp -facebooktwitterpinterestgoogle -sso -324 -shtml -song -swiss -##mw -##キンク -lumia -xdd -string -tiffany -522 -marc -られた -insee -russell -sc -dell -##ations -ok -camera -289 -##vs -##flow -##late -classic -287 -##nter -stay -g1 -mtv -512 -##ever -##lab -##nger -qe -sata -ryan -d1 -50ml -cms -##cing -su -292 -3300 -editor -296 -##nap -security -sunday -association -##ens -##700 -##bra -acg -##かり -sofascore -とは -mkv -##ign -jonathan -gary -build -labels -##oto -tesla -moba -qi -gohappy -general -ajax -1024 -##かる -サイト -society -##test -##urs -wps -fedora -##ich -mozilla -328 -##480 -##dr -usa -urn -##lina -##r -grace -##die -##try -##ader -1250 -##なり -elle -570 -##chen -##ᆯ -price -##ten -uhz -##ough -eq -##hen -states -push -session -balance -wow -506 -##cus -##py -when -##ward -##ep -34e -wong -library -prada -##サイト -##cle -running -##ree -313 -ck -date -q4 -##ctive -##ool -##> -mk -##ira -##163 -388 -die -secret -rq -dota -buffet -は1ヶ -e6 -##ez -pan -368 -ha -##card -##cha -2a -##さ -alan -day3 -eye -f3 -##end -france -keep -adi -rna -tvbs -##ala -solo -nova -##え -##tail -##ょう -support -##ries -##なる -##ved -base -copy -iis -fps -##ways -hero -hgih -profile -fish -mu -ssh -entertainment -chang -##wd -click -cake -##ond -pre -##tom -kic -pixel -##ov -##fl -product -6a -##pd -dear -##gate -es -yumi -audio -##² -##sky -echo -bin -where -##ture -329 -##ape -find -sap -isis -##なと -nand -##101 -##load -##ream -band -a6 -525 -never -##post -festival -50cm -##we -555 -guide -314 -zenfone -##ike -335 -gd -forum -jessica -strong -alexander -##ould -software -allen -##ious -program -360° -else -lohasthree -##gar -することかてきます -please -##れます -rc -##ggle -##ric -bim -50000 -##own -eclipse -355 -brian -3ds -##side -061 -361 -##other -##ける -##tech -##ator -485 -engine -##ged -##t -plaza -##fit -cia -ngo -westbrook -shi -tbs -50mm -##みませんか -sci -291 -reuters -##ily -contextlink -##hn -af -##cil -bridge -very -##cel -1890 -cambridge -##ize -15g -##aid -##data -790 -frm -##head -award -butler -##sun -meta -##mar -america -ps3 -puma -pmid -##すか -lc -670 -kitchen -##lic -オーフン5 -きなしソフトサーヒス -そして -day1 -future -★★★★ -##text -##page -##rris -pm1 -##ket -fans -##っています -1001 -christian -bot -kids -trackback -##hai -c3 -display -##hl -n2 -1896 -idea -さんも -##sent -airmail -##ug -##men -pwm -けます -028 -##lution -369 -852 -awards -schemas -354 -asics -wikipedia -font -##tional -##vy -c2 -293 -##れている -##dget -##ein -っている -contact -pepper -スキル -339 -##~5 -294 -##uel -##ument -730 -##hang -みてす -q5 -##sue -rain -##ndi -wei -swatch -##cept -わせ -331 -popular -##ste -##tag -p2 -501 -trc -1899 -##west -##live -justin -honda -ping -messenger -##rap -v9 -543 -##とは -unity -appqq -はすへて -025 -leo -##tone -##テ -##ass -uniqlo -##010 -502 -her -jane -memory -moneydj -##tical -human -12306 -していると -##m2 -coc -miacare -##mn -tmt -##core -vim -kk -##may -fan -target -use -too -338 -435 -2050 -867 -737 -fast -##2c -services -##ope -omega -energy -##わ -pinkoi -1a -##なから -##rain -jackson -##ement -##シャンルの -374 -366 -そんな -p9 -rd -##ᆨ -1111 -##tier -##vic -zone -##│ -385 -690 -dl -isofix -cpa -m4 -322 -kimi -めて -davis -##lay -lulu -##uck -050 -weeks -qs -##hop -920 -##n -ae -##ear -~5 -eia -405 -##fly -korea -jpeg -boost -##ship -small -##リア -1860 -eur -297 -425 -valley -##iel -simple -##ude -rn -k2 -##ena -されます -non -patrick -しているから -##ナー -feed -5757 -30g -process -well -qqmei -##thing -they -aws -lu -pink -##ters -##kin -または -board -##vertisement -wine -##ien -unicode -##dge -r1 -359 -##tant -いを -##twitter -##3c -cool1 -される -##れて -##l -isp -##012 -standard -45㎡2 -402 -##150 -matt -##fu -326 -##iner -googlemsn -pixnetfacebookyahoo -##ラン -x7 -886 -##uce -メーカー -sao -##ev -##きました -##file -9678 -403 -xddd -shirt -6l -##rio -##hat -3mm -givenchy -ya -bang -##lio -monday -crystal -ロクイン -##abc -336 -head -890 -ubuntuforumwikilinuxpastechat -##vc -##~20 -##rity -cnc -7866 -ipv6 -null -1897 -##ost -yang -imsean -tiger -##fet -##ンス -352 -##= -dji -327 -ji -maria -##come -##んて -foundation -3100 -##beth -##なった -1m -601 -active -##aft -##don -3p -sr -349 -emma -##khz -living -415 -353 -1889 -341 -709 -457 -sas -x6 -##face -pptv -x4 -##mate -han -sophie -##jing -337 -fifa -##mand -other -sale -inwedding -##gn -てきちゃいます -##mmy -##pmlast -bad -nana -nbc -してみてくたさいね -なとはお -##wu -##かあります -##あ -note7 -single -##340 -せからこ -してくたさい♪この -しにはとんとんワークケートを -するとあなたにもっとマッチした -ならワークケートへ -もみつかっちゃうかも -ワークケートの -##bel -window -##dio -##ht -union -age -382 -14 -##ivity -##y -コメント -domain -neo -##isa -##lter -5k -f5 -steven -##cts -powerpoint -tft -self -g2 -ft -##テル -zol -##act -mwc -381 -343 -もう -nbapop -408 -てある -eds -ace -##room -previous -author -tomtom -il -##ets -hu -financial -☆☆☆ -っています -bp -5t -chi -1gb -##hg -fairmont -cross -008 -gay -h2 -function -##けて -356 -also -1b -625 -##ータ -##raph -1894 -3~5 -##ils -i3 -334 -avenue -##host -による -##bon -##tsu -message -navigation -50g -fintech -h6 -##ことを -8cm -##ject -##vas -##firm -credit -##wf -xxxx -form -##nor -##space -huawei -plan -json -sbl -##dc -machine -921 -392 -wish -##120 -##sol -windows7 -edward -##ために -development -washington -##nsis -lo -818 -##sio -##ym -##bor -planet -##~8 -##wt -ieee -gpa -##めて -camp -ann -gm -##tw -##oka -connect -##rss -##work -##atus -wall -chicken -soul -2mm -##times -fa -##ather -##cord -009 -##eep -hitachi -gui -harry -##pan -e1 -disney -##press -##ーション -wind -386 -frigidaire -##tl -liu -hsu -332 -basic -von -ev -いた -てきる -スホンサーサイト -learning -##ull -expedia -archives -change -##wei -santa -cut -ins -6gb -turbo -brand -cf1 -508 -004 -return -747 -##rip -h1 -##nis -##をこ -128gb -##にお -3t -application -しており -emc -rx -##oon -384 -quick -412 -15058 -wilson -wing -chapter -##bug -beyond -##cms -##dar -##oh -zoom -e2 -trip -sb -##nba -rcep -342 -aspx -ci -080 -gc -gnu -める -##count -advanced -dance -dv -##url -##ging -367 -8591 -am09 -shadow -battle -346 -##i -##cia -##という -emily -##のてす -##tation -host -ff -techorz -sars -##mini -##mporary -##ering -nc -4200 -798 -##next -cma -##mbps -##gas -##ift -##dot -##ィ -455 -##~17 -amana -##りの -426 -##ros -ir -00㎡1 -##eet -##ible -##↓ -710 -ˋ▽ˊ -##aka -dcs -iq -##v -l1 -##lor -maggie -##011 -##iu -588 -##~1 -830 -##gt -1tb -articles -create -##burg -##iki -database -fantasy -##rex -##cam -dlc -dean -##you -hard -path -gaming -victoria -maps -cb -##lee -##itor -overchicstoretvhome -systems -##xt -416 -p3 -sarah -760 -##nan -407 -486 -x9 -install -second -626 -##ann -##ph -##rcle -##nic -860 -##nar -ec -##とう -768 -metro -chocolate -##rian -~4 -##table -##しています -skin -##sn -395 -mountain -##0mm -inparadise -6m -7x24 -ib -4800 -##jia -eeworld -creative -g5 -g3 -357 -parker -ecfa -village -からの -18000 -sylvia -サーヒス -hbl -##ques -##onsored -##x2 -##きます -##v4 -##tein -ie6 -383 -##stack -389 -ver -##ads -##baby -sound -bbe -##110 -##lone -##uid -ads -022 -gundam -351 -thinkpad -006 -scrum -match -##ave -mems -##470 -##oy -##なりました -##talk -glass -lamigo -span -##eme -job -##a5 -jay -wade -kde -498 -##lace -ocean -tvg -##covery -##r3 -##ners -##rea -junior -think -##aine -cover -##ision -##sia -↓↓ -##bow -msi -413 -458 -406 -##love -711 -801 -soft -z2 -##pl -456 -1840 -mobil -mind -##uy -427 -nginx -##oi -めた -##rr -6221 -##mple -##sson -##ーシてす -371 -##nts -91tv -comhd -crv3000 -##uard -1868 -397 -deep -lost -field -gallery -##bia -rate -spf -redis -traction -930 -icloud -011 -なら -fe -jose -372 -##tory -into -sohu -fx -899 -379 -kicstart2 -##hia -すく -##~3 -##sit -ra -24 -##walk -##xure -500g -##pact -pacific -xa -natural -carlo -##250 -##walker -1850 -##can -cto -gigi -516 -##サー -pen -##hoo -ob -matlab -##b -##yy -13913459 -##iti -mango -##bbs -sense -c5 -oxford -##ニア -walker -jennifer -##ola -course -##bre -701 -##pus -##rder -lucky -075 -##ぁ -ivy -なお -##nia -sotheby -side -##ugh -joy -##orage -##ush -##bat -##dt -364 -r9 -##2d -##gio -511 -country -wear -##lax -##~7 -##moon -393 -seven -study -411 -348 -lonzo -8k -##ェ -evolution -##イフ -##kk -gs -kd -##レス -arduino -344 -b12 -##lux -arpg -##rdon -cook -##x5 -dark -five -##als -##ida -とても -sign -362 -##ちの -something -20mm -##nda -387 -##posted -fresh -tf -1870 -422 -cam -##mine -##skip -##form -##ssion -education -394 -##tee -dyson -stage -##jie -want -##night -epson -pack -あります -##ppy -テリヘル -##█ -wd -##eh -##rence -left -##lvin -golden -mhz -discovery -##trix -##n2 -loft -##uch -##dra -##sse -speed -~1 -1mdb -sorry -welcome -##urn -wave -gaga -##lmer -teddy -##160 -トラックハック -せよ -611 -##f2016 -378 -rp -##sha -rar -##あなたに -##きた -840 -holiday -##ュー -373 -074 -##vg -##nos -##rail -gartner -gi -6p -##dium -kit -488 -b3 -eco -##ろう -20g -sean -##stone -autocad -nu -##np -f16 -write -029 -m5 -##ias -images -atp -##dk -fsm -504 -1350 -ve -52kb -##xxx -##のに -##cake -414 -unit -lim -ru -1v -##ification -published -angela -16g -analytics -ak -##q -##nel -gmt -##icon -again -##₂ -##bby -ios11 -445 -かこさいます -waze -いてす -##ハ -9985 -##ust -##ティー -framework -##007 -iptv -delete -52sykb -cl -wwdc -027 -30cm -##fw -##ての -1389 -##xon -brandt -##ses -##dragon -tc -vetements -anne -monte -modern -official -##へて -##ere -##nne -##oud -もちろん -50 -etnews -##a2 -##graphy -421 -863 -##ちゃん -444 -##rtex -##てお -l2 -##gma -mount -ccd -たと -archive -morning -tan -ddos -e7 -##ホ -day4 -##ウ -gis -453 -its -495 -factory -bruce -pg -##ito -ってくたさい -guest -cdma -##lling -536 -n3 -しかし -3~4 -mega -eyes -ro -13 -women -dac -church -##jun -singapore -##facebook -6991 -starbucks -##tos -##stin -##shine -zen -##mu -tina -20℃ -1893 -##たけて -503 -465 -request -##gence -qt -##っ -1886 -347 -363 -q7 -##zzi -diary -##tore -409 -##ead -468 -cst -##osa -canada -agent -va -##jiang -##ちは -##ーク -##lam -sg -##nix -##sday -##よって -g6 -##master -bing -##zl -charlie -16 -8mm -nb40 -##ーン -thai -##ルフ -ln284ct -##itz -##2f -bonnie -##food -##lent -originals -##stro -##lts -418 -∟∣ -##bscribe -children -ntd -yesstyle -##かも -hmv -##tment -d5 -2cm -arts -sms -##pn -##я -##いい -topios9 -539 -lifestyle -virtual -##ague -xz -##deo -muji -024 -unt -##nnis -##ᅩ -faq1 -1884 -396 -##ette -fly -64㎡ -はしめまして -441 -curry -##pop -のこ -release -##← -##◆◆ -##cast -073 -ありな -500ml -##ews -5c -##stle -ios7 -##ima -787 -dog -lenovo -##r4 -roger -013 -cbs -vornado -100m -417 -##desk -##クok -##ald -1867 -9595 -2900 -##van -oil -##x -some -break -common -##jy -##lines -g7 -twice -419 -ella -nano -belle -にこ -##mes -##self -##note -jb -##ことかてきます -benz -##との -##ova -451 -save -##wing -##ますのて -kai -りは -##hua -##rect -rainer -##unge -448 -##0m -adsl -##かな -guestname -##uma -##kins -##zu -tokichoi -##price -county -##med -##mus -rmk -391 -address -vm -えて -openload -##group -##hin -##iginal -amg -urban -##oz -jobs -emi -##public -beautiful -##sch -album -##dden -##bell -jerry -works -hostel -miller -##drive -##rmin -##10 -376 -boot -828 -##370 -##fx -##cm~ -1885 -##nome -##ctionary -##oman -##lish -##cr -##hm -433 -##how -432 -francis -xi -c919 -b5 -evernote -##uc -vga -##3000 -coupe -##urg -##cca -##uality -019 -6g -れる -multi -##また -##ett -em -hey -##ani -##tax -##rma -inside -than -740 -leonnhurt -##jin -ict -れた -bird -notes -200mm -くの -##dical -##lli -result -442 -iu -ee -438 -smap -gopro -##last -yin -pure -998 -32g -けた -5kg -##dan -##rame -mama -##oot -bean -marketing -##hur -2l -bella -sync -xuite -##ground -515 -discuz -##getrelax -##ince -##bay -##5s -cj -##イス -gmat -apt -##pass -jing -##rix -c4 -rich -##とても -niusnews -##ello -bag -770 -##eting -##mobile -18 -culture -015 -##のてすか -377 -1020 -area -##ience -616 -details -gp -universal -silver -dit -はお -private -ddd -u11 -kanshu -##ified -fung -##nny -dx -##520 -tai -475 -023 -##fr -##lean -3s -##pin -429 -##rin -25000 -ly -rick -##bility -usb3 -banner -##baru -##gion -metal -dt -vdf -1871 -karl -qualcomm -bear -1010 -oldid -ian -jo -##tors -population -##ernel -1882 -mmorpg -##mv -##bike -603 -##© -ww -friend -##ager -exhibition -##del -##pods -fpx -structure -##free -##tings -kl -##rley -##copyright -##mma -california -3400 -orange -yoga -4l -canmake -honey -##anda -##コメント -595 -nikkie -##ルハイト -dhl -publishing -##mall -##gnet -20cm -513 -##クセス -##┅ -e88 -970 -##dog -fishbase -##! -##" -### -##$ -##% -##& -##' -##( -##) -##* -##+ -##, -##- -##. -##/ -##: -##; -##< -##= -##> -##? -##@ -##[ -##\ -##] -##^ -##_ -##{ -##| -##} -##~ -##£ -##¤ -##¥ -##§ -##« -##± -##³ -##µ -##· -##¹ -##º -##» -##¼ -##ß -##æ -##÷ -##ø -##đ -##ŋ -##ɔ -##ə -##ɡ -##ʰ -##ˇ -##ˈ -##ˊ -##ˋ -##ˍ -##ː -##˙ -##˚ -##ˢ -##α -##β -##γ -##δ -##ε -##η -##θ -##ι -##κ -##λ -##μ -##ν -##ο -##π -##ρ -##ς -##σ -##τ -##υ -##φ -##χ -##ψ -##б -##в -##г -##д -##е -##ж -##з -##к -##л -##м -##н -##о -##п -##р -##с -##т -##у -##ф -##х -##ц -##ч -##ш -##ы -##ь -##і -##ا -##ب -##ة -##ت -##د -##ر -##س -##ع -##ل -##م -##ن -##ه -##و -##ي -##۩ -##ก -##ง -##น -##ม -##ย -##ร -##อ -##า -##เ -##๑ -##་ -##ღ -##ᄀ -##ᄁ -##ᄂ -##ᄃ -##ᄅ -##ᄆ -##ᄇ -##ᄈ -##ᄉ -##ᄋ -##ᄌ -##ᄎ -##ᄏ -##ᄐ -##ᄑ -##ᄒ -##ᅢ -##ᅣ -##ᅥ -##ᅦ -##ᅧ -##ᅨ -##ᅪ -##ᅬ -##ᅭ -##ᅮ -##ᅯ -##ᅲ -##ᅳ -##ᅴ -##ᆷ -##ᆸ -##ᆺ -##ᆻ -##ᗜ -##ᵃ -##ᵉ -##ᵍ -##ᵏ -##ᵐ -##ᵒ -##ᵘ -##‖ -##„ -##† -##• -##‥ -##‧ -##
 -##‰ -##′ -##″ -##‹ -##› -##※ -##‿ -##⁄ -##ⁱ -##⁺ -##ⁿ -##₁ -##₃ -##₄ -##€ -##№ -##ⅰ -##ⅱ -##ⅲ -##ⅳ -##ⅴ -##↔ -##↗ -##↘ -##⇒ -##∀ -##− -##∕ -##∙ -##√ -##∞ -##∟ -##∠ -##∣ -##∩ -##∮ -##∶ -##∼ -##∽ -##≈ -##≒ -##≡ -##≤ -##≥ -##≦ -##≧ -##≪ -##≫ -##⊙ -##⋅ -##⋈ -##⋯ -##⌒ -##① -##② -##③ -##④ -##⑤ -##⑥ -##⑦ -##⑧ -##⑨ -##⑩ -##⑴ -##⑵ -##⑶ -##⑷ -##⑸ -##⒈ -##⒉ -##⒊ -##⒋ -##ⓒ -##ⓔ -##ⓘ -##━ -##┃ -##┆ -##┊ -##┌ -##└ -##├ -##┣ -##═ -##║ -##╚ -##╞ -##╠ -##╭ -##╮ -##╯ -##╰ -##╱ -##╳ -##▂ -##▃ -##▅ -##▇ -##▉ -##▋ -##▌ -##▍ -##▎ -##□ -##▪ -##▫ -##▬ -##△ -##▶ -##► -##▽ -##◇ -##◕ -##◠ -##◢ -##◤ -##☀ -##☕ -##☞ -##☺ -##☼ -##♀ -##♂ -##♠ -##♡ -##♣ -##♦ -##♫ -##♬ -##✈ -##✔ -##✕ -##✖ -##✦ -##✨ -##✪ -##✰ -##✿ -##❀ -##➜ -##➤ -##⦿ -##、 -##。 -##〃 -##々 -##〇 -##〈 -##〉 -##《 -##》 -##「 -##」 -##『 -##』 -##【 -##】 -##〓 -##〔 -##〕 -##〖 -##〗 -##〜 -##〝 -##〞 -##ぃ -##ぇ -##ぬ -##ふ -##ほ -##む -##ゃ -##ゅ -##ゆ -##ょ -##゜ -##ゝ -##ァ -##ゥ -##エ -##ォ -##ケ -##サ -##セ -##ソ -##ッ -##ニ -##ヌ -##ネ -##ノ -##ヘ -##モ -##ャ -##ヤ -##ュ -##ユ -##ョ -##ヨ -##ワ -##ヲ -##・ -##ヽ -##ㄅ -##ㄆ -##ㄇ -##ㄉ -##ㄋ -##ㄌ -##ㄍ -##ㄎ -##ㄏ -##ㄒ -##ㄚ -##ㄛ -##ㄞ -##ㄟ -##ㄢ -##ㄤ -##ㄥ -##ㄧ -##ㄨ -##ㆍ -##㈦ -##㊣ -##㗎 -##一 -##丁 -##七 -##万 -##丈 -##三 -##上 -##下 -##不 -##与 -##丐 -##丑 -##专 -##且 -##丕 -##世 -##丘 -##丙 -##业 -##丛 -##东 -##丝 -##丞 -##丟 -##両 -##丢 -##两 -##严 -##並 -##丧 -##丨 -##个 -##丫 -##中 -##丰 -##串 -##临 -##丶 -##丸 -##丹 -##为 -##主 -##丼 -##丽 -##举 -##丿 -##乂 -##乃 -##久 -##么 -##义 -##之 -##乌 -##乍 -##乎 -##乏 -##乐 -##乒 -##乓 -##乔 -##乖 -##乗 -##乘 -##乙 -##乜 -##九 -##乞 -##也 -##习 -##乡 -##书 -##乩 -##买 -##乱 -##乳 -##乾 -##亀 -##亂 -##了 -##予 -##争 -##事 -##二 -##于 -##亏 -##云 -##互 -##五 -##井 -##亘 -##亙 -##亚 -##些 -##亜 -##亞 -##亟 -##亡 -##亢 -##交 -##亥 -##亦 -##产 -##亨 -##亩 -##享 -##京 -##亭 -##亮 -##亲 -##亳 -##亵 -##人 -##亿 -##什 -##仁 -##仃 -##仄 -##仅 -##仆 -##仇 -##今 -##介 -##仍 -##从 -##仏 -##仑 -##仓 -##仔 -##仕 -##他 -##仗 -##付 -##仙 -##仝 -##仞 -##仟 -##代 -##令 -##以 -##仨 -##仪 -##们 -##仮 -##仰 -##仲 -##件 -##价 -##任 -##份 -##仿 -##企 -##伉 -##伊 -##伍 -##伎 -##伏 -##伐 -##休 -##伕 -##众 -##优 -##伙 -##会 -##伝 -##伞 -##伟 -##传 -##伢 -##伤 -##伦 -##伪 -##伫 -##伯 -##估 -##伴 -##伶 -##伸 -##伺 -##似 -##伽 -##佃 -##但 -##佇 -##佈 -##位 -##低 -##住 -##佐 -##佑 -##体 -##佔 -##何 -##佗 -##佘 -##余 -##佚 -##佛 -##作 -##佝 -##佞 -##佟 -##你 -##佢 -##佣 -##佤 -##佥 -##佩 -##佬 -##佯 -##佰 -##佳 -##併 -##佶 -##佻 -##佼 -##使 -##侃 -##侄 -##來 -##侈 -##例 -##侍 -##侏 -##侑 -##侖 -##侗 -##供 -##依 -##侠 -##価 -##侣 -##侥 -##侦 -##侧 -##侨 -##侬 -##侮 -##侯 -##侵 -##侶 -##侷 -##便 -##係 -##促 -##俄 -##俊 -##俎 -##俏 -##俐 -##俑 -##俗 -##俘 -##俚 -##保 -##俞 -##俟 -##俠 -##信 -##俨 -##俩 -##俪 -##俬 -##俭 -##修 -##俯 -##俱 -##俳 -##俸 -##俺 -##俾 -##倆 -##倉 -##個 -##倌 -##倍 -##倏 -##們 -##倒 -##倔 -##倖 -##倘 -##候 -##倚 -##倜 -##借 -##倡 -##値 -##倦 -##倩 -##倪 -##倫 -##倬 -##倭 -##倶 -##债 -##值 -##倾 -##偃 -##假 -##偈 -##偉 -##偌 -##偎 -##偏 -##偕 -##做 -##停 -##健 -##側 -##偵 -##偶 -##偷 -##偻 -##偽 -##偿 -##傀 -##傅 -##傍 -##傑 -##傘 -##備 -##傚 -##傢 -##傣 -##傥 -##储 -##傩 -##催 -##傭 -##傲 -##傳 -##債 -##傷 -##傻 -##傾 -##僅 -##働 -##像 -##僑 -##僕 -##僖 -##僚 -##僥 -##僧 -##僭 -##僮 -##僱 -##僵 -##價 -##僻 -##儀 -##儂 -##億 -##儆 -##儉 -##儋 -##儒 -##儕 -##儘 -##償 -##儡 -##優 -##儲 -##儷 -##儼 -##儿 -##兀 -##允 -##元 -##兄 -##充 -##兆 -##兇 -##先 -##光 -##克 -##兌 -##免 -##児 -##兑 -##兒 -##兔 -##兖 -##党 -##兜 -##兢 -##入 -##內 -##全 -##兩 -##八 -##公 -##六 -##兮 -##兰 -##共 -##兲 -##关 -##兴 -##兵 -##其 -##具 -##典 -##兹 -##养 -##兼 -##兽 -##冀 -##内 -##円 -##冇 -##冈 -##冉 -##冊 -##册 -##再 -##冏 -##冒 -##冕 -##冗 -##写 -##军 -##农 -##冠 -##冢 -##冤 -##冥 -##冨 -##冪 -##冬 -##冯 -##冰 -##冲 -##决 -##况 -##冶 -##冷 -##冻 -##冼 -##冽 -##冾 -##净 -##凄 -##准 -##凇 -##凈 -##凉 -##凋 -##凌 -##凍 -##减 -##凑 -##凛 -##凜 -##凝 -##几 -##凡 -##凤 -##処 -##凪 -##凭 -##凯 -##凰 -##凱 -##凳 -##凶 -##凸 -##凹 -##出 -##击 -##函 -##凿 -##刀 -##刁 -##刃 -##分 -##切 -##刈 -##刊 -##刍 -##刎 -##刑 -##划 -##列 -##刘 -##则 -##刚 -##创 -##初 -##删 -##判 -##別 -##刨 -##利 -##刪 -##别 -##刮 -##到 -##制 -##刷 -##券 -##刹 -##刺 -##刻 -##刽 -##剁 -##剂 -##剃 -##則 -##剉 -##削 -##剋 -##剌 -##前 -##剎 -##剐 -##剑 -##剔 -##剖 -##剛 -##剜 -##剝 -##剣 -##剤 -##剥 -##剧 -##剩 -##剪 -##副 -##割 -##創 -##剷 -##剽 -##剿 -##劃 -##劇 -##劈 -##劉 -##劊 -##劍 -##劏 -##劑 -##力 -##劝 -##办 -##功 -##加 -##务 -##劣 -##动 -##助 -##努 -##劫 -##劭 -##励 -##劲 -##劳 -##労 -##劵 -##効 -##劾 -##势 -##勁 -##勃 -##勇 -##勉 -##勋 -##勐 -##勒 -##動 -##勖 -##勘 -##務 -##勛 -##勝 -##勞 -##募 -##勢 -##勤 -##勧 -##勳 -##勵 -##勸 -##勺 -##勻 -##勾 -##勿 -##匀 -##包 -##匆 -##匈 -##匍 -##匐 -##匕 -##化 -##北 -##匙 -##匝 -##匠 -##匡 -##匣 -##匪 -##匮 -##匯 -##匱 -##匹 -##区 -##医 -##匾 -##匿 -##區 -##十 -##千 -##卅 -##升 -##午 -##卉 -##半 -##卍 -##华 -##协 -##卑 -##卒 -##卓 -##協 -##单 -##卖 -##南 -##単 -##博 -##卜 -##卞 -##卟 -##占 -##卡 -##卢 -##卤 -##卦 -##卧 -##卫 -##卮 -##卯 -##印 -##危 -##即 -##却 -##卵 -##卷 -##卸 -##卻 -##卿 -##厂 -##厄 -##厅 -##历 -##厉 -##压 -##厌 -##厕 -##厘 -##厚 -##厝 -##原 -##厢 -##厥 -##厦 -##厨 -##厩 -##厭 -##厮 -##厲 -##厳 -##去 -##县 -##叁 -##参 -##參 -##又 -##叉 -##及 -##友 -##双 -##反 -##収 -##发 -##叔 -##取 -##受 -##变 -##叙 -##叛 -##叟 -##叠 -##叡 -##叢 -##口 -##古 -##句 -##另 -##叨 -##叩 -##只 -##叫 -##召 -##叭 -##叮 -##可 -##台 -##叱 -##史 -##右 -##叵 -##叶 -##号 -##司 -##叹 -##叻 -##叼 -##叽 -##吁 -##吃 -##各 -##吆 -##合 -##吉 -##吊 -##吋 -##同 -##名 -##后 -##吏 -##吐 -##向 -##吒 -##吓 -##吕 -##吖 -##吗 -##君 -##吝 -##吞 -##吟 -##吠 -##吡 -##否 -##吧 -##吨 -##吩 -##含 -##听 -##吭 -##吮 -##启 -##吱 -##吳 -##吴 -##吵 -##吶 -##吸 -##吹 -##吻 -##吼 -##吽 -##吾 -##呀 -##呂 -##呃 -##呆 -##呈 -##告 -##呋 -##呎 -##呐 -##呓 -##呕 -##呗 -##员 -##呛 -##呜 -##呢 -##呤 -##呦 -##周 -##呱 -##呲 -##味 -##呵 -##呷 -##呸 -##呻 -##呼 -##命 -##咀 -##咁 -##咂 -##咄 -##咆 -##咋 -##和 -##咎 -##咏 -##咐 -##咒 -##咔 -##咕 -##咖 -##咗 -##咘 -##咙 -##咚 -##咛 -##咣 -##咤 -##咦 -##咧 -##咨 -##咩 -##咪 -##咫 -##咬 -##咭 -##咯 -##咱 -##咲 -##咳 -##咸 -##咻 -##咽 -##咿 -##哀 -##品 -##哂 -##哄 -##哆 -##哇 -##哈 -##哉 -##哋 -##哌 -##响 -##哎 -##哏 -##哐 -##哑 -##哒 -##哔 -##哗 -##哟 -##員 -##哥 -##哦 -##哧 -##哨 -##哩 -##哪 -##哭 -##哮 -##哲 -##哺 -##哼 -##哽 -##唁 -##唄 -##唆 -##唇 -##唉 -##唏 -##唐 -##唑 -##唔 -##唠 -##唤 -##唧 -##唬 -##售 -##唯 -##唰 -##唱 -##唳 -##唷 -##唸 -##唾 -##啃 -##啄 -##商 -##啉 -##啊 -##問 -##啓 -##啕 -##啖 -##啜 -##啞 -##啟 -##啡 -##啤 -##啥 -##啦 -##啧 -##啪 -##啫 -##啬 -##啮 -##啰 -##啱 -##啲 -##啵 -##啶 -##啷 -##啸 -##啻 -##啼 -##啾 -##喀 -##喂 -##喃 -##善 -##喆 -##喇 -##喉 -##喊 -##喋 -##喎 -##喏 -##喔 -##喘 -##喙 -##喚 -##喜 -##喝 -##喟 -##喧 -##喪 -##喫 -##喬 -##單 -##喰 -##喱 -##喲 -##喳 -##喵 -##営 -##喷 -##喹 -##喺 -##喻 -##喽 -##嗅 -##嗆 -##嗇 -##嗎 -##嗑 -##嗒 -##嗓 -##嗔 -##嗖 -##嗚 -##嗜 -##嗝 -##嗟 -##嗡 -##嗣 -##嗤 -##嗦 -##嗨 -##嗪 -##嗬 -##嗯 -##嗰 -##嗲 -##嗳 -##嗶 -##嗷 -##嗽 -##嘀 -##嘅 -##嘆 -##嘈 -##嘉 -##嘌 -##嘍 -##嘎 -##嘔 -##嘖 -##嘗 -##嘘 -##嘚 -##嘛 -##嘜 -##嘞 -##嘟 -##嘢 -##嘣 -##嘤 -##嘧 -##嘩 -##嘭 -##嘮 -##嘯 -##嘰 -##嘱 -##嘲 -##嘴 -##嘶 -##嘸 -##嘹 -##嘻 -##嘿 -##噁 -##噌 -##噎 -##噓 -##噔 -##噗 -##噙 -##噜 -##噠 -##噢 -##噤 -##器 -##噩 -##噪 -##噬 -##噱 -##噴 -##噶 -##噸 -##噹 -##噻 -##噼 -##嚀 -##嚇 -##嚎 -##嚏 -##嚐 -##嚓 -##嚕 -##嚟 -##嚣 -##嚥 -##嚨 -##嚮 -##嚴 -##嚷 -##嚼 -##囂 -##囉 -##囊 -##囍 -##囑 -##囔 -##囗 -##囚 -##四 -##囝 -##回 -##囟 -##因 -##囡 -##团 -##団 -##囤 -##囧 -##囪 -##囫 -##园 -##困 -##囱 -##囲 -##図 -##围 -##囹 -##固 -##国 -##图 -##囿 -##圃 -##圄 -##圆 -##圈 -##國 -##圍 -##圏 -##園 -##圓 -##圖 -##團 -##圜 -##土 -##圣 -##圧 -##在 -##圩 -##圭 -##地 -##圳 -##场 -##圻 -##圾 -##址 -##坂 -##均 -##坊 -##坍 -##坎 -##坏 -##坐 -##坑 -##块 -##坚 -##坛 -##坝 -##坞 -##坟 -##坠 -##坡 -##坤 -##坦 -##坨 -##坪 -##坯 -##坳 -##坵 -##坷 -##垂 -##垃 -##垄 -##型 -##垒 -##垚 -##垛 -##垠 -##垢 -##垣 -##垦 -##垩 -##垫 -##垭 -##垮 -##垵 -##埂 -##埃 -##埋 -##城 -##埔 -##埕 -##埗 -##域 -##埠 -##埤 -##埵 -##執 -##埸 -##培 -##基 -##埼 -##堀 -##堂 -##堃 -##堅 -##堆 -##堇 -##堑 -##堕 -##堙 -##堡 -##堤 -##堪 -##堯 -##堰 -##報 -##場 -##堵 -##堺 -##堿 -##塊 -##塌 -##塑 -##塔 -##塗 -##塘 -##塚 -##塞 -##塢 -##塩 -##填 -##塬 -##塭 -##塵 -##塾 -##墀 -##境 -##墅 -##墉 -##墊 -##墒 -##墓 -##増 -##墘 -##墙 -##墜 -##增 -##墟 -##墨 -##墩 -##墮 -##墳 -##墻 -##墾 -##壁 -##壅 -##壆 -##壇 -##壊 -##壑 -##壓 -##壕 -##壘 -##壞 -##壟 -##壢 -##壤 -##壩 -##士 -##壬 -##壮 -##壯 -##声 -##売 -##壳 -##壶 -##壹 -##壺 -##壽 -##处 -##备 -##変 -##复 -##夏 -##夔 -##夕 -##外 -##夙 -##多 -##夜 -##够 -##夠 -##夢 -##夥 -##大 -##天 -##太 -##夫 -##夭 -##央 -##夯 -##失 -##头 -##夷 -##夸 -##夹 -##夺 -##夾 -##奂 -##奄 -##奇 -##奈 -##奉 -##奋 -##奎 -##奏 -##奐 -##契 -##奔 -##奕 -##奖 -##套 -##奘 -##奚 -##奠 -##奢 -##奥 -##奧 -##奪 -##奬 -##奮 -##女 -##奴 -##奶 -##奸 -##她 -##好 -##如 -##妃 -##妄 -##妆 -##妇 -##妈 -##妊 -##妍 -##妒 -##妓 -##妖 -##妘 -##妙 -##妝 -##妞 -##妣 -##妤 -##妥 -##妨 -##妩 -##妪 -##妮 -##妲 -##妳 -##妹 -##妻 -##妾 -##姆 -##姉 -##姊 -##始 -##姍 -##姐 -##姑 -##姒 -##姓 -##委 -##姗 -##姚 -##姜 -##姝 -##姣 -##姥 -##姦 -##姨 -##姪 -##姫 -##姬 -##姹 -##姻 -##姿 -##威 -##娃 -##娄 -##娅 -##娆 -##娇 -##娉 -##娑 -##娓 -##娘 -##娛 -##娜 -##娟 -##娠 -##娣 -##娥 -##娩 -##娱 -##娲 -##娴 -##娶 -##娼 -##婀 -##婁 -##婆 -##婉 -##婊 -##婕 -##婚 -##婢 -##婦 -##婧 -##婪 -##婭 -##婴 -##婵 -##婶 -##婷 -##婺 -##婿 -##媒 -##媚 -##媛 -##媞 -##媧 -##媲 -##媳 -##媽 -##媾 -##嫁 -##嫂 -##嫉 -##嫌 -##嫑 -##嫔 -##嫖 -##嫘 -##嫚 -##嫡 -##嫣 -##嫦 -##嫩 -##嫲 -##嫵 -##嫻 -##嬅 -##嬉 -##嬌 -##嬗 -##嬛 -##嬢 -##嬤 -##嬪 -##嬰 -##嬴 -##嬷 -##嬸 -##嬿 -##孀 -##孃 -##子 -##孑 -##孔 -##孕 -##孖 -##字 -##存 -##孙 -##孚 -##孛 -##孜 -##孝 -##孟 -##孢 -##季 -##孤 -##学 -##孩 -##孪 -##孫 -##孬 -##孰 -##孱 -##孳 -##孵 -##學 -##孺 -##孽 -##孿 -##宁 -##它 -##宅 -##宇 -##守 -##安 -##宋 -##完 -##宏 -##宓 -##宕 -##宗 -##官 -##宙 -##定 -##宛 -##宜 -##宝 -##实 -##実 -##宠 -##审 -##客 -##宣 -##室 -##宥 -##宦 -##宪 -##宫 -##宮 -##宰 -##害 -##宴 -##宵 -##家 -##宸 -##容 -##宽 -##宾 -##宿 -##寂 -##寄 -##寅 -##密 -##寇 -##富 -##寐 -##寒 -##寓 -##寛 -##寝 -##寞 -##察 -##寡 -##寢 -##寥 -##實 -##寧 -##寨 -##審 -##寫 -##寬 -##寮 -##寰 -##寵 -##寶 -##寸 -##对 -##寺 -##寻 -##导 -##対 -##寿 -##封 -##専 -##射 -##将 -##將 -##專 -##尉 -##尊 -##尋 -##對 -##導 -##小 -##少 -##尔 -##尕 -##尖 -##尘 -##尚 -##尝 -##尤 -##尧 -##尬 -##就 -##尴 -##尷 -##尸 -##尹 -##尺 -##尻 -##尼 -##尽 -##尾 -##尿 -##局 -##屁 -##层 -##屄 -##居 -##屆 -##屈 -##屉 -##届 -##屋 -##屌 -##屍 -##屎 -##屏 -##屐 -##屑 -##展 -##屜 -##属 -##屠 -##屡 -##屢 -##層 -##履 -##屬 -##屯 -##山 -##屹 -##屿 -##岀 -##岁 -##岂 -##岌 -##岐 -##岑 -##岔 -##岖 -##岗 -##岘 -##岙 -##岚 -##岛 -##岡 -##岩 -##岫 -##岬 -##岭 -##岱 -##岳 -##岷 -##岸 -##峇 -##峋 -##峒 -##峙 -##峡 -##峤 -##峥 -##峦 -##峨 -##峪 -##峭 -##峯 -##峰 -##峴 -##島 -##峻 -##峽 -##崁 -##崂 -##崆 -##崇 -##崎 -##崑 -##崔 -##崖 -##崗 -##崙 -##崛 -##崧 -##崩 -##崭 -##崴 -##崽 -##嵇 -##嵊 -##嵋 -##嵌 -##嵐 -##嵘 -##嵩 -##嵬 -##嵯 -##嶂 -##嶄 -##嶇 -##嶋 -##嶙 -##嶺 -##嶼 -##嶽 -##巅 -##巍 -##巒 -##巔 -##巖 -##川 -##州 -##巡 -##巢 -##工 -##左 -##巧 -##巨 -##巩 -##巫 -##差 -##己 -##已 -##巳 -##巴 -##巷 -##巻 -##巽 -##巾 -##巿 -##币 -##市 -##布 -##帅 -##帆 -##师 -##希 -##帐 -##帑 -##帕 -##帖 -##帘 -##帚 -##帛 -##帜 -##帝 -##帥 -##带 -##帧 -##師 -##席 -##帮 -##帯 -##帰 -##帳 -##帶 -##帷 -##常 -##帼 -##帽 -##幀 -##幂 -##幄 -##幅 -##幌 -##幔 -##幕 -##幟 -##幡 -##幢 -##幣 -##幫 -##干 -##平 -##年 -##并 -##幸 -##幹 -##幺 -##幻 -##幼 -##幽 -##幾 -##广 -##庁 -##広 -##庄 -##庆 -##庇 -##床 -##序 -##庐 -##库 -##应 -##底 -##庖 -##店 -##庙 -##庚 -##府 -##庞 -##废 -##庠 -##度 -##座 -##庫 -##庭 -##庵 -##庶 -##康 -##庸 -##庹 -##庾 -##廁 -##廂 -##廃 -##廈 -##廉 -##廊 -##廓 -##廖 -##廚 -##廝 -##廟 -##廠 -##廢 -##廣 -##廬 -##廳 -##延 -##廷 -##建 -##廿 -##开 -##弁 -##异 -##弃 -##弄 -##弈 -##弊 -##弋 -##式 -##弑 -##弒 -##弓 -##弔 -##引 -##弗 -##弘 -##弛 -##弟 -##张 -##弥 -##弦 -##弧 -##弩 -##弭 -##弯 -##弱 -##張 -##強 -##弹 -##强 -##弼 -##弾 -##彅 -##彆 -##彈 -##彌 -##彎 -##归 -##当 -##录 -##彗 -##彙 -##彝 -##形 -##彤 -##彥 -##彦 -##彧 -##彩 -##彪 -##彫 -##彬 -##彭 -##彰 -##影 -##彷 -##役 -##彻 -##彼 -##彿 -##往 -##征 -##径 -##待 -##徇 -##很 -##徉 -##徊 -##律 -##後 -##徐 -##徑 -##徒 -##従 -##徕 -##得 -##徘 -##徙 -##徜 -##從 -##徠 -##御 -##徨 -##復 -##循 -##徬 -##微 -##徳 -##徴 -##徵 -##德 -##徹 -##徼 -##徽 -##心 -##必 -##忆 -##忌 -##忍 -##忏 -##忐 -##忑 -##忒 -##忖 -##志 -##忘 -##忙 -##応 -##忠 -##忡 -##忤 -##忧 -##忪 -##快 -##忱 -##念 -##忻 -##忽 -##忿 -##怀 -##态 -##怂 -##怅 -##怆 -##怎 -##怏 -##怒 -##怔 -##怕 -##怖 -##怙 -##怜 -##思 -##怠 -##怡 -##急 -##怦 -##性 -##怨 -##怪 -##怯 -##怵 -##总 -##怼 -##恁 -##恃 -##恆 -##恋 -##恍 -##恐 -##恒 -##恕 -##恙 -##恚 -##恢 -##恣 -##恤 -##恥 -##恨 -##恩 -##恪 -##恫 -##恬 -##恭 -##息 -##恰 -##恳 -##恵 -##恶 -##恸 -##恺 -##恻 -##恼 -##恿 -##悄 -##悅 -##悉 -##悌 -##悍 -##悔 -##悖 -##悚 -##悟 -##悠 -##患 -##悦 -##您 -##悩 -##悪 -##悬 -##悯 -##悱 -##悲 -##悴 -##悵 -##悶 -##悸 -##悻 -##悼 -##悽 -##情 -##惆 -##惇 -##惊 -##惋 -##惑 -##惕 -##惘 -##惚 -##惜 -##惟 -##惠 -##惡 -##惦 -##惧 -##惨 -##惩 -##惫 -##惬 -##惭 -##惮 -##惯 -##惰 -##惱 -##想 -##惴 -##惶 -##惹 -##惺 -##愁 -##愆 -##愈 -##愉 -##愍 -##意 -##愕 -##愚 -##愛 -##愜 -##感 -##愣 -##愤 -##愧 -##愫 -##愷 -##愿 -##慄 -##慈 -##態 -##慌 -##慎 -##慑 -##慕 -##慘 -##慚 -##慟 -##慢 -##慣 -##慧 -##慨 -##慫 -##慮 -##慰 -##慳 -##慵 -##慶 -##慷 -##慾 -##憂 -##憊 -##憋 -##憎 -##憐 -##憑 -##憔 -##憚 -##憤 -##憧 -##憨 -##憩 -##憫 -##憬 -##憲 -##憶 -##憾 -##懂 -##懇 -##懈 -##應 -##懊 -##懋 -##懑 -##懒 -##懦 -##懲 -##懵 -##懶 -##懷 -##懸 -##懺 -##懼 -##懾 -##懿 -##戀 -##戈 -##戊 -##戌 -##戍 -##戎 -##戏 -##成 -##我 -##戒 -##戕 -##或 -##战 -##戚 -##戛 -##戟 -##戡 -##戦 -##截 -##戬 -##戮 -##戰 -##戲 -##戳 -##戴 -##戶 -##户 -##戸 -##戻 -##戾 -##房 -##所 -##扁 -##扇 -##扈 -##扉 -##手 -##才 -##扎 -##扑 -##扒 -##打 -##扔 -##払 -##托 -##扛 -##扣 -##扦 -##执 -##扩 -##扪 -##扫 -##扬 -##扭 -##扮 -##扯 -##扰 -##扱 -##扳 -##扶 -##批 -##扼 -##找 -##承 -##技 -##抄 -##抉 -##把 -##抑 -##抒 -##抓 -##投 -##抖 -##抗 -##折 -##抚 -##抛 -##抜 -##択 -##抟 -##抠 -##抡 -##抢 -##护 -##报 -##抨 -##披 -##抬 -##抱 -##抵 -##抹 -##押 -##抽 -##抿 -##拂 -##拄 -##担 -##拆 -##拇 -##拈 -##拉 -##拋 -##拌 -##拍 -##拎 -##拐 -##拒 -##拓 -##拔 -##拖 -##拗 -##拘 -##拙 -##拚 -##招 -##拜 -##拟 -##拡 -##拢 -##拣 -##拥 -##拦 -##拧 -##拨 -##择 -##括 -##拭 -##拮 -##拯 -##拱 -##拳 -##拴 -##拷 -##拼 -##拽 -##拾 -##拿 -##持 -##挂 -##指 -##挈 -##按 -##挎 -##挑 -##挖 -##挙 -##挚 -##挛 -##挝 -##挞 -##挟 -##挠 -##挡 -##挣 -##挤 -##挥 -##挨 -##挪 -##挫 -##振 -##挲 -##挹 -##挺 -##挽 -##挾 -##捂 -##捅 -##捆 -##捉 -##捋 -##捌 -##捍 -##捎 -##捏 -##捐 -##捕 -##捞 -##损 -##捡 -##换 -##捣 -##捧 -##捨 -##捩 -##据 -##捱 -##捲 -##捶 -##捷 -##捺 -##捻 -##掀 -##掂 -##掃 -##掇 -##授 -##掉 -##掌 -##掏 -##掐 -##排 -##掖 -##掘 -##掙 -##掛 -##掠 -##採 -##探 -##掣 -##接 -##控 -##推 -##掩 -##措 -##掬 -##掰 -##掲 -##掳 -##掴 -##掷 -##掸 -##掺 -##揀 -##揃 -##揄 -##揆 -##揉 -##揍 -##描 -##提 -##插 -##揖 -##揚 -##換 -##握 -##揣 -##揩 -##揪 -##揭 -##揮 -##援 -##揶 -##揸 -##揹 -##揽 -##搀 -##搁 -##搂 -##搅 -##損 -##搏 -##搐 -##搓 -##搔 -##搖 -##搗 -##搜 -##搞 -##搡 -##搪 -##搬 -##搭 -##搵 -##搶 -##携 -##搽 -##摀 -##摁 -##摄 -##摆 -##摇 -##摈 -##摊 -##摒 -##摔 -##摘 -##摞 -##摟 -##摧 -##摩 -##摯 -##摳 -##摸 -##摹 -##摺 -##摻 -##撂 -##撃 -##撅 -##撇 -##撈 -##撐 -##撑 -##撒 -##撓 -##撕 -##撚 -##撞 -##撤 -##撥 -##撩 -##撫 -##撬 -##播 -##撮 -##撰 -##撲 -##撵 -##撷 -##撸 -##撻 -##撼 -##撿 -##擀 -##擁 -##擂 -##擄 -##擅 -##擇 -##擊 -##擋 -##操 -##擎 -##擒 -##擔 -##擘 -##據 -##擞 -##擠 -##擡 -##擢 -##擦 -##擬 -##擰 -##擱 -##擲 -##擴 -##擷 -##擺 -##擼 -##擾 -##攀 -##攏 -##攒 -##攔 -##攘 -##攙 -##攜 -##攝 -##攞 -##攢 -##攣 -##攤 -##攥 -##攪 -##攫 -##攬 -##支 -##收 -##攸 -##改 -##攻 -##放 -##政 -##故 -##效 -##敌 -##敍 -##敎 -##敏 -##救 -##敕 -##敖 -##敗 -##敘 -##教 -##敛 -##敝 -##敞 -##敢 -##散 -##敦 -##敬 -##数 -##敲 -##整 -##敵 -##敷 -##數 -##斂 -##斃 -##文 -##斋 -##斌 -##斎 -##斐 -##斑 -##斓 -##斗 -##料 -##斛 -##斜 -##斟 -##斡 -##斤 -##斥 -##斧 -##斩 -##斫 -##斬 -##断 -##斯 -##新 -##斷 -##方 -##於 -##施 -##旁 -##旃 -##旅 -##旋 -##旌 -##旎 -##族 -##旖 -##旗 -##无 -##既 -##日 -##旦 -##旧 -##旨 -##早 -##旬 -##旭 -##旮 -##旱 -##时 -##旷 -##旺 -##旻 -##昀 -##昂 -##昆 -##昇 -##昉 -##昊 -##昌 -##明 -##昏 -##易 -##昔 -##昕 -##昙 -##星 -##映 -##春 -##昧 -##昨 -##昭 -##是 -##昱 -##昴 -##昵 -##昶 -##昼 -##显 -##晁 -##時 -##晃 -##晉 -##晋 -##晌 -##晏 -##晒 -##晓 -##晔 -##晕 -##晖 -##晗 -##晚 -##晝 -##晞 -##晟 -##晤 -##晦 -##晨 -##晩 -##普 -##景 -##晰 -##晴 -##晶 -##晷 -##智 -##晾 -##暂 -##暄 -##暇 -##暈 -##暉 -##暌 -##暐 -##暑 -##暖 -##暗 -##暝 -##暢 -##暧 -##暨 -##暫 -##暮 -##暱 -##暴 -##暸 -##暹 -##曄 -##曆 -##曇 -##曉 -##曖 -##曙 -##曜 -##曝 -##曠 -##曦 -##曬 -##曰 -##曲 -##曳 -##更 -##書 -##曹 -##曼 -##曾 -##替 -##最 -##會 -##月 -##有 -##朋 -##服 -##朐 -##朔 -##朕 -##朗 -##望 -##朝 -##期 -##朦 -##朧 -##木 -##未 -##末 -##本 -##札 -##朮 -##术 -##朱 -##朴 -##朵 -##机 -##朽 -##杀 -##杂 -##权 -##杆 -##杈 -##杉 -##李 -##杏 -##材 -##村 -##杓 -##杖 -##杜 -##杞 -##束 -##杠 -##条 -##来 -##杨 -##杭 -##杯 -##杰 -##東 -##杳 -##杵 -##杷 -##杼 -##松 -##板 -##极 -##构 -##枇 -##枉 -##枋 -##析 -##枕 -##林 -##枚 -##果 -##枝 -##枢 -##枣 -##枪 -##枫 -##枭 -##枯 -##枰 -##枱 -##枳 -##架 -##枷 -##枸 -##柄 -##柏 -##某 -##柑 -##柒 -##染 -##柔 -##柘 -##柚 -##柜 -##柞 -##柠 -##柢 -##查 -##柩 -##柬 -##柯 -##柱 -##柳 -##柴 -##柵 -##査 -##柿 -##栀 -##栃 -##栄 -##栅 -##标 -##栈 -##栉 -##栋 -##栎 -##栏 -##树 -##栓 -##栖 -##栗 -##校 -##栩 -##株 -##样 -##核 -##根 -##格 -##栽 -##栾 -##桀 -##桁 -##桂 -##桃 -##桅 -##框 -##案 -##桉 -##桌 -##桎 -##桐 -##桑 -##桓 -##桔 -##桜 -##桠 -##桡 -##桢 -##档 -##桥 -##桦 -##桧 -##桨 -##桩 -##桶 -##桿 -##梁 -##梅 -##梆 -##梏 -##梓 -##梗 -##條 -##梟 -##梢 -##梦 -##梧 -##梨 -##梭 -##梯 -##械 -##梳 -##梵 -##梶 -##检 -##棂 -##棄 -##棉 -##棋 -##棍 -##棒 -##棕 -##棗 -##棘 -##棚 -##棟 -##棠 -##棣 -##棧 -##森 -##棱 -##棲 -##棵 -##棹 -##棺 -##椁 -##椅 -##椋 -##植 -##椎 -##椒 -##検 -##椪 -##椭 -##椰 -##椹 -##椽 -##椿 -##楂 -##楊 -##楓 -##楔 -##楚 -##楝 -##楞 -##楠 -##楣 -##楨 -##楫 -##業 -##楮 -##極 -##楷 -##楸 -##楹 -##楼 -##楽 -##概 -##榄 -##榆 -##榈 -##榉 -##榔 -##榕 -##榖 -##榛 -##榜 -##榨 -##榫 -##榭 -##榮 -##榱 -##榴 -##榷 -##榻 -##槁 -##槃 -##構 -##槌 -##槍 -##槎 -##槐 -##槓 -##様 -##槛 -##槟 -##槤 -##槭 -##槲 -##槳 -##槻 -##槽 -##槿 -##樁 -##樂 -##樊 -##樑 -##樓 -##標 -##樞 -##樟 -##模 -##樣 -##権 -##横 -##樫 -##樯 -##樱 -##樵 -##樸 -##樹 -##樺 -##樽 -##樾 -##橄 -##橇 -##橋 -##橐 -##橘 -##橙 -##機 -##橡 -##橢 -##橫 -##橱 -##橹 -##橼 -##檀 -##檄 -##檎 -##檐 -##檔 -##檗 -##檜 -##檢 -##檬 -##檯 -##檳 -##檸 -##檻 -##櫃 -##櫚 -##櫛 -##櫥 -##櫸 -##櫻 -##欄 -##權 -##欒 -##欖 -##欠 -##次 -##欢 -##欣 -##欧 -##欲 -##欸 -##欺 -##欽 -##款 -##歆 -##歇 -##歉 -##歌 -##歎 -##歐 -##歓 -##歙 -##歛 -##歡 -##止 -##正 -##此 -##步 -##武 -##歧 -##歩 -##歪 -##歯 -##歲 -##歳 -##歴 -##歷 -##歸 -##歹 -##死 -##歼 -##殁 -##殃 -##殆 -##殇 -##殉 -##殊 -##残 -##殒 -##殓 -##殖 -##殘 -##殞 -##殡 -##殤 -##殭 -##殯 -##殲 -##殴 -##段 -##殷 -##殺 -##殼 -##殿 -##毀 -##毁 -##毂 -##毅 -##毆 -##毋 -##母 -##毎 -##每 -##毒 -##毓 -##比 -##毕 -##毗 -##毘 -##毙 -##毛 -##毡 -##毫 -##毯 -##毽 -##氈 -##氏 -##氐 -##民 -##氓 -##气 -##氖 -##気 -##氙 -##氛 -##氟 -##氡 -##氢 -##氣 -##氤 -##氦 -##氧 -##氨 -##氪 -##氫 -##氮 -##氯 -##氰 -##氲 -##水 -##氷 -##永 -##氹 -##氾 -##汀 -##汁 -##求 -##汆 -##汇 -##汉 -##汎 -##汐 -##汕 -##汗 -##汙 -##汛 -##汝 -##汞 -##江 -##池 -##污 -##汤 -##汨 -##汩 -##汪 -##汰 -##汲 -##汴 -##汶 -##汹 -##決 -##汽 -##汾 -##沁 -##沂 -##沃 -##沅 -##沈 -##沉 -##沌 -##沏 -##沐 -##沒 -##沓 -##沖 -##沙 -##沛 -##沟 -##没 -##沢 -##沣 -##沥 -##沦 -##沧 -##沪 -##沫 -##沭 -##沮 -##沱 -##河 -##沸 -##油 -##治 -##沼 -##沽 -##沾 -##沿 -##況 -##泄 -##泉 -##泊 -##泌 -##泓 -##法 -##泗 -##泛 -##泞 -##泠 -##泡 -##波 -##泣 -##泥 -##注 -##泪 -##泫 -##泮 -##泯 -##泰 -##泱 -##泳 -##泵 -##泷 -##泸 -##泻 -##泼 -##泽 -##泾 -##洁 -##洄 -##洋 -##洒 -##洗 -##洙 -##洛 -##洞 -##津 -##洩 -##洪 -##洮 -##洱 -##洲 -##洵 -##洶 -##洸 -##洹 -##活 -##洼 -##洽 -##派 -##流 -##浃 -##浄 -##浅 -##浆 -##浇 -##浊 -##测 -##济 -##浏 -##浑 -##浒 -##浓 -##浔 -##浙 -##浚 -##浜 -##浣 -##浦 -##浩 -##浪 -##浬 -##浮 -##浯 -##浴 -##海 -##浸 -##涂 -##涅 -##涇 -##消 -##涉 -##涌 -##涎 -##涓 -##涔 -##涕 -##涙 -##涛 -##涝 -##涞 -##涟 -##涠 -##涡 -##涣 -##涤 -##润 -##涧 -##涨 -##涩 -##涪 -##涮 -##涯 -##液 -##涵 -##涸 -##涼 -##涿 -##淀 -##淄 -##淅 -##淆 -##淇 -##淋 -##淌 -##淑 -##淒 -##淖 -##淘 -##淙 -##淚 -##淞 -##淡 -##淤 -##淦 -##淨 -##淩 -##淪 -##淫 -##淬 -##淮 -##深 -##淳 -##淵 -##混 -##淹 -##淺 -##添 -##淼 -##清 -##済 -##渉 -##渊 -##渋 -##渍 -##渎 -##渐 -##渔 -##渗 -##渙 -##渚 -##減 -##渝 -##渠 -##渡 -##渣 -##渤 -##渥 -##渦 -##温 -##測 -##渭 -##港 -##渲 -##渴 -##游 -##渺 -##渾 -##湃 -##湄 -##湊 -##湍 -##湖 -##湘 -##湛 -##湟 -##湧 -##湫 -##湮 -##湯 -##湳 -##湾 -##湿 -##満 -##溃 -##溅 -##溉 -##溏 -##源 -##準 -##溜 -##溝 -##溟 -##溢 -##溥 -##溧 -##溪 -##溫 -##溯 -##溱 -##溴 -##溶 -##溺 -##溼 -##滁 -##滂 -##滄 -##滅 -##滇 -##滋 -##滌 -##滑 -##滓 -##滔 -##滕 -##滙 -##滚 -##滝 -##滞 -##滟 -##满 -##滢 -##滤 -##滥 -##滦 -##滨 -##滩 -##滬 -##滯 -##滲 -##滴 -##滷 -##滸 -##滾 -##滿 -##漁 -##漂 -##漆 -##漉 -##漏 -##漓 -##演 -##漕 -##漠 -##漢 -##漣 -##漩 -##漪 -##漫 -##漬 -##漯 -##漱 -##漲 -##漳 -##漸 -##漾 -##漿 -##潆 -##潇 -##潋 -##潍 -##潑 -##潔 -##潘 -##潛 -##潜 -##潞 -##潟 -##潢 -##潤 -##潦 -##潧 -##潭 -##潮 -##潰 -##潴 -##潸 -##潺 -##潼 -##澀 -##澄 -##澆 -##澈 -##澍 -##澎 -##澗 -##澜 -##澡 -##澤 -##澧 -##澱 -##澳 -##澹 -##激 -##濁 -##濂 -##濃 -##濑 -##濒 -##濕 -##濘 -##濛 -##濟 -##濠 -##濡 -##濤 -##濫 -##濬 -##濮 -##濯 -##濱 -##濺 -##濾 -##瀅 -##瀆 -##瀉 -##瀋 -##瀏 -##瀑 -##瀕 -##瀘 -##瀚 -##瀛 -##瀝 -##瀞 -##瀟 -##瀧 -##瀨 -##瀬 -##瀰 -##瀾 -##灌 -##灏 -##灑 -##灘 -##灝 -##灞 -##灣 -##火 -##灬 -##灭 -##灯 -##灰 -##灵 -##灶 -##灸 -##灼 -##災 -##灾 -##灿 -##炀 -##炁 -##炅 -##炉 -##炊 -##炎 -##炒 -##炔 -##炕 -##炖 -##炙 -##炜 -##炫 -##炬 -##炭 -##炮 -##炯 -##炳 -##炷 -##炸 -##点 -##為 -##炼 -##炽 -##烁 -##烂 -##烃 -##烈 -##烊 -##烏 -##烘 -##烙 -##烛 -##烟 -##烤 -##烦 -##烧 -##烨 -##烩 -##烫 -##烬 -##热 -##烯 -##烷 -##烹 -##烽 -##焉 -##焊 -##焕 -##焖 -##焗 -##焘 -##焙 -##焚 -##焜 -##無 -##焦 -##焯 -##焰 -##焱 -##然 -##焼 -##煅 -##煉 -##煊 -##煌 -##煎 -##煒 -##煖 -##煙 -##煜 -##煞 -##煤 -##煥 -##煦 -##照 -##煨 -##煩 -##煮 -##煲 -##煸 -##煽 -##熄 -##熊 -##熏 -##熒 -##熔 -##熙 -##熟 -##熠 -##熨 -##熬 -##熱 -##熵 -##熹 -##熾 -##燁 -##燃 -##燄 -##燈 -##燉 -##燊 -##燎 -##燒 -##燔 -##燕 -##燙 -##燜 -##營 -##燥 -##燦 -##燧 -##燭 -##燮 -##燴 -##燻 -##燼 -##燿 -##爆 -##爍 -##爐 -##爛 -##爪 -##爬 -##爭 -##爰 -##爱 -##爲 -##爵 -##父 -##爷 -##爸 -##爹 -##爺 -##爻 -##爽 -##爾 -##牆 -##片 -##版 -##牌 -##牍 -##牒 -##牙 -##牛 -##牝 -##牟 -##牠 -##牡 -##牢 -##牦 -##牧 -##物 -##牯 -##牲 -##牴 -##牵 -##特 -##牺 -##牽 -##犀 -##犁 -##犄 -##犊 -##犍 -##犒 -##犢 -##犧 -##犬 -##犯 -##状 -##犷 -##犸 -##犹 -##狀 -##狂 -##狄 -##狈 -##狎 -##狐 -##狒 -##狗 -##狙 -##狞 -##狠 -##狡 -##狩 -##独 -##狭 -##狮 -##狰 -##狱 -##狸 -##狹 -##狼 -##狽 -##猎 -##猕 -##猖 -##猗 -##猙 -##猛 -##猜 -##猝 -##猥 -##猩 -##猪 -##猫 -##猬 -##献 -##猴 -##猶 -##猷 -##猾 -##猿 -##獄 -##獅 -##獎 -##獐 -##獒 -##獗 -##獠 -##獣 -##獨 -##獭 -##獰 -##獲 -##獵 -##獷 -##獸 -##獺 -##獻 -##獼 -##獾 -##玄 -##率 -##玉 -##王 -##玑 -##玖 -##玛 -##玟 -##玠 -##玥 -##玩 -##玫 -##玮 -##环 -##现 -##玲 -##玳 -##玷 -##玺 -##玻 -##珀 -##珂 -##珅 -##珈 -##珉 -##珊 -##珍 -##珏 -##珐 -##珑 -##珙 -##珞 -##珠 -##珣 -##珥 -##珩 -##珪 -##班 -##珮 -##珲 -##珺 -##現 -##球 -##琅 -##理 -##琇 -##琉 -##琊 -##琍 -##琏 -##琐 -##琛 -##琢 -##琥 -##琦 -##琨 -##琪 -##琬 -##琮 -##琰 -##琲 -##琳 -##琴 -##琵 -##琶 -##琺 -##琼 -##瑀 -##瑁 -##瑄 -##瑋 -##瑕 -##瑗 -##瑙 -##瑚 -##瑛 -##瑜 -##瑞 -##瑟 -##瑠 -##瑣 -##瑤 -##瑩 -##瑪 -##瑯 -##瑰 -##瑶 -##瑾 -##璀 -##璁 -##璃 -##璇 -##璉 -##璋 -##璎 -##璐 -##璜 -##璞 -##璟 -##璧 -##璨 -##環 -##璽 -##璿 -##瓊 -##瓏 -##瓒 -##瓜 -##瓢 -##瓣 -##瓤 -##瓦 -##瓮 -##瓯 -##瓴 -##瓶 -##瓷 -##甄 -##甌 -##甕 -##甘 -##甙 -##甚 -##甜 -##生 -##產 -##産 -##甥 -##甦 -##用 -##甩 -##甫 -##甬 -##甭 -##甯 -##田 -##由 -##甲 -##申 -##电 -##男 -##甸 -##町 -##画 -##甾 -##畀 -##畅 -##界 -##畏 -##畑 -##畔 -##留 -##畜 -##畝 -##畢 -##略 -##畦 -##番 -##畫 -##異 -##畲 -##畳 -##畴 -##當 -##畸 -##畹 -##畿 -##疆 -##疇 -##疊 -##疏 -##疑 -##疔 -##疖 -##疗 -##疙 -##疚 -##疝 -##疟 -##疡 -##疣 -##疤 -##疥 -##疫 -##疮 -##疯 -##疱 -##疲 -##疳 -##疵 -##疸 -##疹 -##疼 -##疽 -##疾 -##痂 -##病 -##症 -##痈 -##痉 -##痊 -##痍 -##痒 -##痔 -##痕 -##痘 -##痙 -##痛 -##痞 -##痠 -##痢 -##痣 -##痤 -##痧 -##痨 -##痪 -##痫 -##痰 -##痱 -##痴 -##痹 -##痺 -##痼 -##痿 -##瘀 -##瘁 -##瘋 -##瘍 -##瘓 -##瘘 -##瘙 -##瘟 -##瘠 -##瘡 -##瘢 -##瘤 -##瘦 -##瘧 -##瘩 -##瘪 -##瘫 -##瘴 -##瘸 -##瘾 -##療 -##癇 -##癌 -##癒 -##癖 -##癜 -##癞 -##癡 -##癢 -##癣 -##癥 -##癫 -##癬 -##癮 -##癱 -##癲 -##癸 -##発 -##登 -##發 -##白 -##百 -##皂 -##的 -##皆 -##皇 -##皈 -##皋 -##皎 -##皑 -##皓 -##皖 -##皙 -##皚 -##皮 -##皰 -##皱 -##皴 -##皺 -##皿 -##盂 -##盃 -##盅 -##盆 -##盈 -##益 -##盎 -##盏 -##盐 -##监 -##盒 -##盔 -##盖 -##盗 -##盘 -##盛 -##盜 -##盞 -##盟 -##盡 -##監 -##盤 -##盥 -##盧 -##盪 -##目 -##盯 -##盱 -##盲 -##直 -##相 -##盹 -##盼 -##盾 -##省 -##眈 -##眉 -##看 -##県 -##眙 -##眞 -##真 -##眠 -##眦 -##眨 -##眩 -##眯 -##眶 -##眷 -##眸 -##眺 -##眼 -##眾 -##着 -##睁 -##睇 -##睏 -##睐 -##睑 -##睛 -##睜 -##睞 -##睡 -##睢 -##督 -##睥 -##睦 -##睨 -##睪 -##睫 -##睬 -##睹 -##睽 -##睾 -##睿 -##瞄 -##瞅 -##瞇 -##瞋 -##瞌 -##瞎 -##瞑 -##瞒 -##瞓 -##瞞 -##瞟 -##瞠 -##瞥 -##瞧 -##瞩 -##瞪 -##瞬 -##瞭 -##瞰 -##瞳 -##瞻 -##瞼 -##瞿 -##矇 -##矍 -##矗 -##矚 -##矛 -##矜 -##矢 -##矣 -##知 -##矩 -##矫 -##短 -##矮 -##矯 -##石 -##矶 -##矽 -##矾 -##矿 -##码 -##砂 -##砌 -##砍 -##砒 -##研 -##砖 -##砗 -##砚 -##砝 -##砣 -##砥 -##砧 -##砭 -##砰 -##砲 -##破 -##砷 -##砸 -##砺 -##砼 -##砾 -##础 -##硅 -##硐 -##硒 -##硕 -##硝 -##硫 -##硬 -##确 -##硯 -##硼 -##碁 -##碇 -##碉 -##碌 -##碍 -##碎 -##碑 -##碓 -##碗 -##碘 -##碚 -##碛 -##碟 -##碣 -##碧 -##碩 -##碰 -##碱 -##碳 -##碴 -##確 -##碼 -##碾 -##磁 -##磅 -##磊 -##磋 -##磐 -##磕 -##磚 -##磡 -##磨 -##磬 -##磯 -##磲 -##磷 -##磺 -##礁 -##礎 -##礙 -##礡 -##礦 -##礪 -##礫 -##礴 -##示 -##礼 -##社 -##祀 -##祁 -##祂 -##祇 -##祈 -##祉 -##祎 -##祐 -##祕 -##祖 -##祗 -##祚 -##祛 -##祜 -##祝 -##神 -##祟 -##祠 -##祢 -##祥 -##票 -##祭 -##祯 -##祷 -##祸 -##祺 -##祿 -##禀 -##禁 -##禄 -##禅 -##禍 -##禎 -##福 -##禛 -##禦 -##禧 -##禪 -##禮 -##禱 -##禹 -##禺 -##离 -##禽 -##禾 -##禿 -##秀 -##私 -##秃 -##秆 -##秉 -##秋 -##种 -##科 -##秒 -##秘 -##租 -##秣 -##秤 -##秦 -##秧 -##秩 -##秭 -##积 -##称 -##秸 -##移 -##秽 -##稀 -##稅 -##程 -##稍 -##税 -##稔 -##稗 -##稚 -##稜 -##稞 -##稟 -##稠 -##稣 -##種 -##稱 -##稲 -##稳 -##稷 -##稹 -##稻 -##稼 -##稽 -##稿 -##穀 -##穂 -##穆 -##穌 -##積 -##穎 -##穗 -##穢 -##穩 -##穫 -##穴 -##究 -##穷 -##穹 -##空 -##穿 -##突 -##窃 -##窄 -##窈 -##窍 -##窑 -##窒 -##窓 -##窕 -##窖 -##窗 -##窘 -##窜 -##窝 -##窟 -##窠 -##窥 -##窦 -##窨 -##窩 -##窪 -##窮 -##窯 -##窺 -##窿 -##竄 -##竅 -##竇 -##竊 -##立 -##竖 -##站 -##竜 -##竞 -##竟 -##章 -##竣 -##童 -##竭 -##端 -##競 -##竹 -##竺 -##竽 -##竿 -##笃 -##笆 -##笈 -##笋 -##笏 -##笑 -##笔 -##笙 -##笛 -##笞 -##笠 -##符 -##笨 -##第 -##笹 -##笺 -##笼 -##筆 -##等 -##筊 -##筋 -##筍 -##筏 -##筐 -##筑 -##筒 -##答 -##策 -##筛 -##筝 -##筠 -##筱 -##筲 -##筵 -##筷 -##筹 -##签 -##简 -##箇 -##箋 -##箍 -##箏 -##箐 -##箔 -##箕 -##算 -##箝 -##管 -##箩 -##箫 -##箭 -##箱 -##箴 -##箸 -##節 -##篁 -##範 -##篆 -##篇 -##築 -##篑 -##篓 -##篙 -##篝 -##篠 -##篡 -##篤 -##篩 -##篪 -##篮 -##篱 -##篷 -##簇 -##簌 -##簍 -##簡 -##簦 -##簧 -##簪 -##簫 -##簷 -##簸 -##簽 -##簾 -##簿 -##籁 -##籃 -##籌 -##籍 -##籐 -##籟 -##籠 -##籤 -##籬 -##籮 -##籲 -##米 -##类 -##籼 -##籽 -##粄 -##粉 -##粑 -##粒 -##粕 -##粗 -##粘 -##粟 -##粤 -##粥 -##粧 -##粪 -##粮 -##粱 -##粲 -##粳 -##粵 -##粹 -##粼 -##粽 -##精 -##粿 -##糅 -##糊 -##糍 -##糕 -##糖 -##糗 -##糙 -##糜 -##糞 -##糟 -##糠 -##糧 -##糬 -##糯 -##糰 -##糸 -##系 -##糾 -##紀 -##紂 -##約 -##紅 -##紉 -##紊 -##紋 -##納 -##紐 -##紓 -##純 -##紗 -##紘 -##紙 -##級 -##紛 -##紜 -##素 -##紡 -##索 -##紧 -##紫 -##紮 -##累 -##細 -##紳 -##紹 -##紺 -##終 -##絃 -##組 -##絆 -##経 -##結 -##絕 -##絞 -##絡 -##絢 -##給 -##絨 -##絮 -##統 -##絲 -##絳 -##絵 -##絶 -##絹 -##綁 -##綏 -##綑 -##經 -##継 -##続 -##綜 -##綠 -##綢 -##綦 -##綫 -##綬 -##維 -##綱 -##網 -##綴 -##綵 -##綸 -##綺 -##綻 -##綽 -##綾 -##綿 -##緊 -##緋 -##総 -##緑 -##緒 -##緘 -##線 -##緝 -##緞 -##締 -##緣 -##編 -##緩 -##緬 -##緯 -##練 -##緹 -##緻 -##縁 -##縄 -##縈 -##縛 -##縝 -##縣 -##縫 -##縮 -##縱 -##縴 -##縷 -##總 -##績 -##繁 -##繃 -##繆 -##繇 -##繋 -##織 -##繕 -##繚 -##繞 -##繡 -##繩 -##繪 -##繫 -##繭 -##繳 -##繹 -##繼 -##繽 -##纂 -##續 -##纍 -##纏 -##纓 -##纔 -##纖 -##纜 -##纠 -##红 -##纣 -##纤 -##约 -##级 -##纨 -##纪 -##纫 -##纬 -##纭 -##纯 -##纰 -##纱 -##纲 -##纳 -##纵 -##纶 -##纷 -##纸 -##纹 -##纺 -##纽 -##纾 -##线 -##绀 -##练 -##组 -##绅 -##细 -##织 -##终 -##绊 -##绍 -##绎 -##经 -##绑 -##绒 -##结 -##绔 -##绕 -##绘 -##给 -##绚 -##绛 -##络 -##绝 -##绞 -##统 -##绡 -##绢 -##绣 -##绥 -##绦 -##继 -##绩 -##绪 -##绫 -##续 -##绮 -##绯 -##绰 -##绳 -##维 -##绵 -##绶 -##绷 -##绸 -##绻 -##综 -##绽 -##绾 -##绿 -##缀 -##缄 -##缅 -##缆 -##缇 -##缈 -##缉 -##缎 -##缓 -##缔 -##缕 -##编 -##缘 -##缙 -##缚 -##缜 -##缝 -##缠 -##缢 -##缤 -##缥 -##缨 -##缩 -##缪 -##缭 -##缮 -##缰 -##缱 -##缴 -##缸 -##缺 -##缽 -##罂 -##罄 -##罌 -##罐 -##网 -##罔 -##罕 -##罗 -##罚 -##罡 -##罢 -##罩 -##罪 -##置 -##罰 -##署 -##罵 -##罷 -##罹 -##羁 -##羅 -##羈 -##羊 -##羌 -##美 -##羔 -##羚 -##羞 -##羟 -##羡 -##羣 -##群 -##羥 -##羧 -##羨 -##義 -##羯 -##羲 -##羸 -##羹 -##羽 -##羿 -##翁 -##翅 -##翊 -##翌 -##翎 -##習 -##翔 -##翘 -##翟 -##翠 -##翡 -##翦 -##翩 -##翰 -##翱 -##翳 -##翹 -##翻 -##翼 -##耀 -##老 -##考 -##耄 -##者 -##耆 -##耋 -##而 -##耍 -##耐 -##耒 -##耕 -##耗 -##耘 -##耙 -##耦 -##耨 -##耳 -##耶 -##耷 -##耸 -##耻 -##耽 -##耿 -##聂 -##聆 -##聊 -##聋 -##职 -##聒 -##联 -##聖 -##聘 -##聚 -##聞 -##聪 -##聯 -##聰 -##聲 -##聳 -##聴 -##聶 -##職 -##聽 -##聾 -##聿 -##肃 -##肄 -##肅 -##肆 -##肇 -##肉 -##肋 -##肌 -##肏 -##肓 -##肖 -##肘 -##肚 -##肛 -##肝 -##肠 -##股 -##肢 -##肤 -##肥 -##肩 -##肪 -##肮 -##肯 -##肱 -##育 -##肴 -##肺 -##肽 -##肾 -##肿 -##胀 -##胁 -##胃 -##胄 -##胆 -##背 -##胍 -##胎 -##胖 -##胚 -##胛 -##胜 -##胝 -##胞 -##胡 -##胤 -##胥 -##胧 -##胫 -##胭 -##胯 -##胰 -##胱 -##胳 -##胴 -##胶 -##胸 -##胺 -##能 -##脂 -##脅 -##脆 -##脇 -##脈 -##脉 -##脊 -##脍 -##脏 -##脐 -##脑 -##脓 -##脖 -##脘 -##脚 -##脛 -##脣 -##脩 -##脫 -##脯 -##脱 -##脲 -##脳 -##脸 -##脹 -##脾 -##腆 -##腈 -##腊 -##腋 -##腌 -##腎 -##腐 -##腑 -##腓 -##腔 -##腕 -##腥 -##腦 -##腩 -##腫 -##腭 -##腮 -##腰 -##腱 -##腳 -##腴 -##腸 -##腹 -##腺 -##腻 -##腼 -##腾 -##腿 -##膀 -##膈 -##膊 -##膏 -##膑 -##膘 -##膚 -##膛 -##膜 -##膝 -##膠 -##膦 -##膨 -##膩 -##膳 -##膺 -##膻 -##膽 -##膾 -##膿 -##臀 -##臂 -##臃 -##臆 -##臉 -##臊 -##臍 -##臓 -##臘 -##臟 -##臣 -##臥 -##臧 -##臨 -##自 -##臬 -##臭 -##至 -##致 -##臺 -##臻 -##臼 -##臾 -##舀 -##舂 -##舅 -##舆 -##與 -##興 -##舉 -##舊 -##舌 -##舍 -##舎 -##舐 -##舒 -##舔 -##舖 -##舗 -##舛 -##舜 -##舞 -##舟 -##航 -##舫 -##般 -##舰 -##舱 -##舵 -##舶 -##舷 -##舸 -##船 -##舺 -##舾 -##艇 -##艋 -##艘 -##艙 -##艦 -##艮 -##良 -##艰 -##艱 -##色 -##艳 -##艷 -##艹 -##艺 -##艾 -##节 -##芃 -##芈 -##芊 -##芋 -##芍 -##芎 -##芒 -##芙 -##芜 -##芝 -##芡 -##芥 -##芦 -##芩 -##芪 -##芫 -##芬 -##芭 -##芮 -##芯 -##花 -##芳 -##芷 -##芸 -##芹 -##芻 -##芽 -##芾 -##苁 -##苄 -##苇 -##苋 -##苍 -##苏 -##苑 -##苒 -##苓 -##苔 -##苕 -##苗 -##苛 -##苜 -##苞 -##苟 -##苡 -##苣 -##若 -##苦 -##苫 -##苯 -##英 -##苷 -##苹 -##苻 -##茁 -##茂 -##范 -##茄 -##茅 -##茉 -##茎 -##茏 -##茗 -##茜 -##茧 -##茨 -##茫 -##茬 -##茭 -##茯 -##茱 -##茲 -##茴 -##茵 -##茶 -##茸 -##茹 -##茼 -##荀 -##荃 -##荆 -##草 -##荊 -##荏 -##荐 -##荒 -##荔 -##荖 -##荘 -##荚 -##荞 -##荟 -##荠 -##荡 -##荣 -##荤 -##荥 -##荧 -##荨 -##荪 -##荫 -##药 -##荳 -##荷 -##荸 -##荻 -##荼 -##荽 -##莅 -##莆 -##莉 -##莊 -##莎 -##莒 -##莓 -##莖 -##莘 -##莞 -##莠 -##莢 -##莧 -##莪 -##莫 -##莱 -##莲 -##莴 -##获 -##莹 -##莺 -##莽 -##莿 -##菀 -##菁 -##菅 -##菇 -##菈 -##菊 -##菌 -##菏 -##菓 -##菖 -##菘 -##菜 -##菟 -##菠 -##菡 -##菩 -##華 -##菱 -##菲 -##菸 -##菽 -##萁 -##萃 -##萄 -##萊 -##萋 -##萌 -##萍 -##萎 -##萘 -##萝 -##萤 -##营 -##萦 -##萧 -##萨 -##萩 -##萬 -##萱 -##萵 -##萸 -##萼 -##落 -##葆 -##葉 -##著 -##葚 -##葛 -##葡 -##董 -##葦 -##葩 -##葫 -##葬 -##葭 -##葯 -##葱 -##葳 -##葵 -##葷 -##葺 -##蒂 -##蒋 -##蒐 -##蒔 -##蒙 -##蒜 -##蒞 -##蒟 -##蒡 -##蒨 -##蒲 -##蒸 -##蒹 -##蒻 -##蒼 -##蒿 -##蓁 -##蓄 -##蓆 -##蓉 -##蓋 -##蓑 -##蓓 -##蓖 -##蓝 -##蓟 -##蓦 -##蓬 -##蓮 -##蓼 -##蓿 -##蔑 -##蔓 -##蔔 -##蔗 -##蔘 -##蔚 -##蔡 -##蔣 -##蔥 -##蔫 -##蔬 -##蔭 -##蔵 -##蔷 -##蔺 -##蔻 -##蔼 -##蔽 -##蕁 -##蕃 -##蕈 -##蕉 -##蕊 -##蕎 -##蕙 -##蕤 -##蕨 -##蕩 -##蕪 -##蕭 -##蕲 -##蕴 -##蕻 -##蕾 -##薄 -##薅 -##薇 -##薈 -##薊 -##薏 -##薑 -##薔 -##薙 -##薛 -##薦 -##薨 -##薩 -##薪 -##薬 -##薯 -##薰 -##薹 -##藉 -##藍 -##藏 -##藐 -##藓 -##藕 -##藜 -##藝 -##藤 -##藥 -##藩 -##藹 -##藻 -##藿 -##蘆 -##蘇 -##蘊 -##蘋 -##蘑 -##蘚 -##蘭 -##蘸 -##蘼 -##蘿 -##虎 -##虏 -##虐 -##虑 -##虔 -##處 -##虚 -##虛 -##虜 -##虞 -##號 -##虢 -##虧 -##虫 -##虬 -##虱 -##虹 -##虻 -##虽 -##虾 -##蚀 -##蚁 -##蚂 -##蚊 -##蚌 -##蚓 -##蚕 -##蚜 -##蚝 -##蚣 -##蚤 -##蚩 -##蚪 -##蚯 -##蚱 -##蚵 -##蛀 -##蛆 -##蛇 -##蛊 -##蛋 -##蛎 -##蛐 -##蛔 -##蛙 -##蛛 -##蛟 -##蛤 -##蛭 -##蛮 -##蛰 -##蛳 -##蛹 -##蛻 -##蛾 -##蜀 -##蜂 -##蜃 -##蜆 -##蜇 -##蜈 -##蜊 -##蜍 -##蜒 -##蜓 -##蜕 -##蜗 -##蜘 -##蜚 -##蜜 -##蜡 -##蜢 -##蜥 -##蜱 -##蜴 -##蜷 -##蜻 -##蜿 -##蝇 -##蝈 -##蝉 -##蝌 -##蝎 -##蝕 -##蝗 -##蝙 -##蝟 -##蝠 -##蝦 -##蝨 -##蝴 -##蝶 -##蝸 -##蝼 -##螂 -##螃 -##融 -##螞 -##螢 -##螨 -##螯 -##螳 -##螺 -##蟀 -##蟄 -##蟆 -##蟋 -##蟎 -##蟑 -##蟒 -##蟠 -##蟬 -##蟲 -##蟹 -##蟻 -##蟾 -##蠅 -##蠍 -##蠔 -##蠕 -##蠛 -##蠟 -##蠡 -##蠢 -##蠣 -##蠱 -##蠶 -##蠹 -##蠻 -##血 -##衄 -##衅 -##衆 -##行 -##衍 -##術 -##衔 -##街 -##衙 -##衛 -##衝 -##衞 -##衡 -##衢 -##衣 -##补 -##表 -##衩 -##衫 -##衬 -##衮 -##衰 -##衲 -##衷 -##衹 -##衾 -##衿 -##袁 -##袂 -##袄 -##袅 -##袈 -##袋 -##袍 -##袒 -##袖 -##袜 -##袞 -##袤 -##袪 -##被 -##袭 -##袱 -##裁 -##裂 -##装 -##裆 -##裊 -##裏 -##裔 -##裕 -##裘 -##裙 -##補 -##裝 -##裟 -##裡 -##裤 -##裨 -##裱 -##裳 -##裴 -##裸 -##裹 -##製 -##裾 -##褂 -##複 -##褐 -##褒 -##褓 -##褔 -##褚 -##褥 -##褪 -##褫 -##褲 -##褶 -##褻 -##襁 -##襄 -##襟 -##襠 -##襪 -##襬 -##襯 -##襲 -##西 -##要 -##覃 -##覆 -##覇 -##見 -##規 -##覓 -##視 -##覚 -##覦 -##覧 -##親 -##覬 -##観 -##覷 -##覺 -##覽 -##觀 -##见 -##观 -##规 -##觅 -##视 -##览 -##觉 -##觊 -##觎 -##觐 -##觑 -##角 -##觞 -##解 -##觥 -##触 -##觸 -##言 -##訂 -##計 -##訊 -##討 -##訓 -##訕 -##訖 -##託 -##記 -##訛 -##訝 -##訟 -##訣 -##訥 -##訪 -##設 -##許 -##訳 -##訴 -##訶 -##診 -##註 -##証 -##詆 -##詐 -##詔 -##評 -##詛 -##詞 -##詠 -##詡 -##詢 -##詣 -##試 -##詩 -##詫 -##詬 -##詭 -##詮 -##詰 -##話 -##該 -##詳 -##詹 -##詼 -##誅 -##誇 -##誉 -##誌 -##認 -##誓 -##誕 -##誘 -##語 -##誠 -##誡 -##誣 -##誤 -##誥 -##誦 -##誨 -##說 -##説 -##読 -##誰 -##課 -##誹 -##誼 -##調 -##諄 -##談 -##請 -##諏 -##諒 -##論 -##諗 -##諜 -##諡 -##諦 -##諧 -##諫 -##諭 -##諮 -##諱 -##諳 -##諷 -##諸 -##諺 -##諾 -##謀 -##謁 -##謂 -##謄 -##謊 -##謎 -##謐 -##謔 -##謗 -##謙 -##講 -##謝 -##謠 -##謨 -##謬 -##謹 -##謾 -##譁 -##證 -##譎 -##譏 -##識 -##譙 -##譚 -##譜 -##警 -##譬 -##譯 -##議 -##譲 -##譴 -##護 -##譽 -##讀 -##變 -##讓 -##讚 -##讞 -##计 -##订 -##认 -##讥 -##讧 -##讨 -##让 -##讪 -##讫 -##训 -##议 -##讯 -##记 -##讲 -##讳 -##讴 -##讶 -##讷 -##许 -##讹 -##论 -##讼 -##讽 -##设 -##访 -##诀 -##证 -##诃 -##评 -##诅 -##识 -##诈 -##诉 -##诊 -##诋 -##词 -##诏 -##译 -##试 -##诗 -##诘 -##诙 -##诚 -##诛 -##话 -##诞 -##诟 -##诠 -##诡 -##询 -##诣 -##诤 -##该 -##详 -##诧 -##诩 -##诫 -##诬 -##语 -##误 -##诰 -##诱 -##诲 -##说 -##诵 -##诶 -##请 -##诸 -##诺 -##读 -##诽 -##课 -##诿 -##谀 -##谁 -##调 -##谄 -##谅 -##谆 -##谈 -##谊 -##谋 -##谌 -##谍 -##谎 -##谏 -##谐 -##谑 -##谒 -##谓 -##谔 -##谕 -##谗 -##谘 -##谙 -##谚 -##谛 -##谜 -##谟 -##谢 -##谣 -##谤 -##谥 -##谦 -##谧 -##谨 -##谩 -##谪 -##谬 -##谭 -##谯 -##谱 -##谲 -##谴 -##谶 -##谷 -##豁 -##豆 -##豇 -##豈 -##豉 -##豊 -##豌 -##豎 -##豐 -##豔 -##豚 -##象 -##豢 -##豪 -##豫 -##豬 -##豹 -##豺 -##貂 -##貅 -##貌 -##貓 -##貔 -##貘 -##貝 -##貞 -##負 -##財 -##貢 -##貧 -##貨 -##販 -##貪 -##貫 -##責 -##貯 -##貰 -##貳 -##貴 -##貶 -##買 -##貸 -##費 -##貼 -##貽 -##貿 -##賀 -##賁 -##賂 -##賃 -##賄 -##資 -##賈 -##賊 -##賑 -##賓 -##賜 -##賞 -##賠 -##賡 -##賢 -##賣 -##賤 -##賦 -##質 -##賬 -##賭 -##賴 -##賺 -##購 -##賽 -##贅 -##贈 -##贊 -##贍 -##贏 -##贓 -##贖 -##贛 -##贝 -##贞 -##负 -##贡 -##财 -##责 -##贤 -##败 -##账 -##货 -##质 -##贩 -##贪 -##贫 -##贬 -##购 -##贮 -##贯 -##贰 -##贱 -##贲 -##贴 -##贵 -##贷 -##贸 -##费 -##贺 -##贻 -##贼 -##贾 -##贿 -##赁 -##赂 -##赃 -##资 -##赅 -##赈 -##赊 -##赋 -##赌 -##赎 -##赏 -##赐 -##赓 -##赔 -##赖 -##赘 -##赚 -##赛 -##赝 -##赞 -##赠 -##赡 -##赢 -##赣 -##赤 -##赦 -##赧 -##赫 -##赭 -##走 -##赳 -##赴 -##赵 -##赶 -##起 -##趁 -##超 -##越 -##趋 -##趕 -##趙 -##趟 -##趣 -##趨 -##足 -##趴 -##趵 -##趸 -##趺 -##趾 -##跃 -##跄 -##跆 -##跋 -##跌 -##跎 -##跑 -##跖 -##跚 -##跛 -##距 -##跟 -##跡 -##跤 -##跨 -##跩 -##跪 -##路 -##跳 -##践 -##跷 -##跹 -##跺 -##跻 -##踉 -##踊 -##踌 -##踏 -##踐 -##踝 -##踞 -##踟 -##踢 -##踩 -##踪 -##踮 -##踱 -##踴 -##踵 -##踹 -##蹂 -##蹄 -##蹇 -##蹈 -##蹉 -##蹊 -##蹋 -##蹑 -##蹒 -##蹙 -##蹟 -##蹣 -##蹤 -##蹦 -##蹩 -##蹬 -##蹭 -##蹲 -##蹴 -##蹶 -##蹺 -##蹼 -##蹿 -##躁 -##躇 -##躉 -##躊 -##躋 -##躍 -##躏 -##躪 -##身 -##躬 -##躯 -##躲 -##躺 -##軀 -##車 -##軋 -##軌 -##軍 -##軒 -##軟 -##転 -##軸 -##軼 -##軽 -##軾 -##較 -##載 -##輒 -##輓 -##輔 -##輕 -##輛 -##輝 -##輟 -##輩 -##輪 -##輯 -##輸 -##輻 -##輾 -##輿 -##轄 -##轅 -##轆 -##轉 -##轍 -##轎 -##轟 -##车 -##轧 -##轨 -##轩 -##转 -##轭 -##轮 -##软 -##轰 -##轲 -##轴 -##轶 -##轻 -##轼 -##载 -##轿 -##较 -##辄 -##辅 -##辆 -##辇 -##辈 -##辉 -##辊 -##辍 -##辐 -##辑 -##输 -##辕 -##辖 -##辗 -##辘 -##辙 -##辛 -##辜 -##辞 -##辟 -##辣 -##辦 -##辨 -##辩 -##辫 -##辭 -##辮 -##辯 -##辰 -##辱 -##農 -##边 -##辺 -##辻 -##込 -##辽 -##达 -##迁 -##迂 -##迄 -##迅 -##过 -##迈 -##迎 -##运 -##近 -##返 -##还 -##这 -##进 -##远 -##违 -##连 -##迟 -##迢 -##迤 -##迥 -##迦 -##迩 -##迪 -##迫 -##迭 -##述 -##迴 -##迷 -##迸 -##迹 -##迺 -##追 -##退 -##送 -##适 -##逃 -##逅 -##逆 -##选 -##逊 -##逍 -##透 -##逐 -##递 -##途 -##逕 -##逗 -##這 -##通 -##逛 -##逝 -##逞 -##速 -##造 -##逢 -##連 -##逮 -##週 -##進 -##逵 -##逶 -##逸 -##逻 -##逼 -##逾 -##遁 -##遂 -##遅 -##遇 -##遊 -##運 -##遍 -##過 -##遏 -##遐 -##遑 -##遒 -##道 -##達 -##違 -##遗 -##遙 -##遛 -##遜 -##遞 -##遠 -##遢 -##遣 -##遥 -##遨 -##適 -##遭 -##遮 -##遲 -##遴 -##遵 -##遶 -##遷 -##選 -##遺 -##遼 -##遽 -##避 -##邀 -##邁 -##邂 -##邃 -##還 -##邇 -##邈 -##邊 -##邋 -##邏 -##邑 -##邓 -##邕 -##邛 -##邝 -##邢 -##那 -##邦 -##邨 -##邪 -##邬 -##邮 -##邯 -##邰 -##邱 -##邳 -##邵 -##邸 -##邹 -##邺 -##邻 -##郁 -##郅 -##郊 -##郎 -##郑 -##郜 -##郝 -##郡 -##郢 -##郤 -##郦 -##郧 -##部 -##郫 -##郭 -##郴 -##郵 -##郷 -##郸 -##都 -##鄂 -##鄉 -##鄒 -##鄔 -##鄙 -##鄞 -##鄢 -##鄧 -##鄭 -##鄰 -##鄱 -##鄲 -##鄺 -##酉 -##酊 -##酋 -##酌 -##配 -##酐 -##酒 -##酗 -##酚 -##酝 -##酢 -##酣 -##酥 -##酩 -##酪 -##酬 -##酮 -##酯 -##酰 -##酱 -##酵 -##酶 -##酷 -##酸 -##酿 -##醃 -##醇 -##醉 -##醋 -##醍 -##醐 -##醒 -##醚 -##醛 -##醜 -##醞 -##醣 -##醪 -##醫 -##醬 -##醮 -##醯 -##醴 -##醺 -##釀 -##釁 -##采 -##釉 -##释 -##釋 -##里 -##重 -##野 -##量 -##釐 -##金 -##釗 -##釘 -##釜 -##針 -##釣 -##釦 -##釧 -##釵 -##鈀 -##鈉 -##鈍 -##鈎 -##鈔 -##鈕 -##鈞 -##鈣 -##鈦 -##鈪 -##鈴 -##鈺 -##鈾 -##鉀 -##鉄 -##鉅 -##鉉 -##鉑 -##鉗 -##鉚 -##鉛 -##鉤 -##鉴 -##鉻 -##銀 -##銃 -##銅 -##銑 -##銓 -##銖 -##銘 -##銜 -##銬 -##銭 -##銮 -##銳 -##銷 -##銹 -##鋁 -##鋅 -##鋒 -##鋤 -##鋪 -##鋰 -##鋸 -##鋼 -##錄 -##錐 -##錘 -##錚 -##錠 -##錢 -##錦 -##錨 -##錫 -##錮 -##錯 -##録 -##錳 -##錶 -##鍊 -##鍋 -##鍍 -##鍛 -##鍥 -##鍰 -##鍵 -##鍺 -##鍾 -##鎂 -##鎊 -##鎌 -##鎏 -##鎔 -##鎖 -##鎗 -##鎚 -##鎧 -##鎬 -##鎮 -##鎳 -##鏈 -##鏖 -##鏗 -##鏘 -##鏞 -##鏟 -##鏡 -##鏢 -##鏤 -##鏽 -##鐘 -##鐮 -##鐲 -##鐳 -##鐵 -##鐸 -##鐺 -##鑄 -##鑊 -##鑑 -##鑒 -##鑣 -##鑫 -##鑰 -##鑲 -##鑼 -##鑽 -##鑾 -##鑿 -##针 -##钉 -##钊 -##钎 -##钏 -##钒 -##钓 -##钗 -##钙 -##钛 -##钜 -##钝 -##钞 -##钟 -##钠 -##钡 -##钢 -##钣 -##钤 -##钥 -##钦 -##钧 -##钨 -##钩 -##钮 -##钯 -##钰 -##钱 -##钳 -##钴 -##钵 -##钺 -##钻 -##钼 -##钾 -##钿 -##铀 -##铁 -##铂 -##铃 -##铄 -##铅 -##铆 -##铉 -##铎 -##铐 -##铛 -##铜 -##铝 -##铠 -##铡 -##铢 -##铣 -##铤 -##铨 -##铩 -##铬 -##铭 -##铮 -##铰 -##铲 -##铵 -##银 -##铸 -##铺 -##链 -##铿 -##销 -##锁 -##锂 -##锄 -##锅 -##锆 -##锈 -##锉 -##锋 -##锌 -##锏 -##锐 -##锑 -##错 -##锚 -##锟 -##锡 -##锢 -##锣 -##锤 -##锥 -##锦 -##锭 -##键 -##锯 -##锰 -##锲 -##锵 -##锹 -##锺 -##锻 -##镀 -##镁 -##镂 -##镇 -##镉 -##镌 -##镍 -##镐 -##镑 -##镕 -##镖 -##镗 -##镛 -##镜 -##镣 -##镭 -##镯 -##镰 -##镳 -##镶 -##長 -##长 -##門 -##閃 -##閉 -##開 -##閎 -##閏 -##閑 -##閒 -##間 -##閔 -##閘 -##閡 -##関 -##閣 -##閥 -##閨 -##閩 -##閱 -##閲 -##閹 -##閻 -##閾 -##闆 -##闇 -##闊 -##闌 -##闍 -##闔 -##闕 -##闖 -##闘 -##關 -##闡 -##闢 -##门 -##闪 -##闫 -##闭 -##问 -##闯 -##闰 -##闲 -##间 -##闵 -##闷 -##闸 -##闹 -##闺 -##闻 -##闽 -##闾 -##阀 -##阁 -##阂 -##阅 -##阆 -##阇 -##阈 -##阉 -##阎 -##阐 -##阑 -##阔 -##阕 -##阖 -##阙 -##阚 -##阜 -##队 -##阡 -##阪 -##阮 -##阱 -##防 -##阳 -##阴 -##阵 -##阶 -##阻 -##阿 -##陀 -##陂 -##附 -##际 -##陆 -##陇 -##陈 -##陋 -##陌 -##降 -##限 -##陕 -##陛 -##陝 -##陞 -##陟 -##陡 -##院 -##陣 -##除 -##陨 -##险 -##陪 -##陰 -##陲 -##陳 -##陵 -##陶 -##陷 -##陸 -##険 -##陽 -##隅 -##隆 -##隈 -##隊 -##隋 -##隍 -##階 -##随 -##隐 -##隔 -##隕 -##隘 -##隙 -##際 -##障 -##隠 -##隣 -##隧 -##隨 -##險 -##隱 -##隴 -##隶 -##隸 -##隻 -##隼 -##隽 -##难 -##雀 -##雁 -##雄 -##雅 -##集 -##雇 -##雉 -##雋 -##雌 -##雍 -##雎 -##雏 -##雑 -##雒 -##雕 -##雖 -##雙 -##雛 -##雜 -##雞 -##離 -##難 -##雨 -##雪 -##雯 -##雰 -##雲 -##雳 -##零 -##雷 -##雹 -##電 -##雾 -##需 -##霁 -##霄 -##霆 -##震 -##霈 -##霉 -##霊 -##霍 -##霎 -##霏 -##霑 -##霓 -##霖 -##霜 -##霞 -##霧 -##霭 -##霰 -##露 -##霸 -##霹 -##霽 -##霾 -##靂 -##靄 -##靈 -##青 -##靓 -##靖 -##静 -##靚 -##靛 -##靜 -##非 -##靠 -##靡 -##面 -##靥 -##靦 -##革 -##靳 -##靴 -##靶 -##靼 -##鞅 -##鞋 -##鞍 -##鞏 -##鞑 -##鞘 -##鞠 -##鞣 -##鞦 -##鞭 -##韆 -##韋 -##韌 -##韓 -##韜 -##韦 -##韧 -##韩 -##韬 -##韭 -##音 -##韵 -##韶 -##韻 -##響 -##頁 -##頂 -##頃 -##項 -##順 -##須 -##頌 -##預 -##頑 -##頒 -##頓 -##頗 -##領 -##頜 -##頡 -##頤 -##頫 -##頭 -##頰 -##頷 -##頸 -##頹 -##頻 -##頼 -##顆 -##題 -##額 -##顎 -##顏 -##顔 -##願 -##顛 -##類 -##顧 -##顫 -##顯 -##顱 -##顴 -##页 -##顶 -##顷 -##项 -##顺 -##须 -##顼 -##顽 -##顾 -##顿 -##颁 -##颂 -##预 -##颅 -##领 -##颇 -##颈 -##颉 -##颊 -##颌 -##颍 -##颐 -##频 -##颓 -##颔 -##颖 -##颗 -##题 -##颚 -##颛 -##颜 -##额 -##颞 -##颠 -##颡 -##颢 -##颤 -##颦 -##颧 -##風 -##颯 -##颱 -##颳 -##颶 -##颼 -##飄 -##飆 -##风 -##飒 -##飓 -##飕 -##飘 -##飙 -##飚 -##飛 -##飞 -##食 -##飢 -##飨 -##飩 -##飪 -##飯 -##飲 -##飼 -##飽 -##飾 -##餃 -##餅 -##餉 -##養 -##餌 -##餐 -##餒 -##餓 -##餘 -##餚 -##餛 -##餞 -##餡 -##館 -##餮 -##餵 -##餾 -##饅 -##饈 -##饋 -##饌 -##饍 -##饑 -##饒 -##饕 -##饗 -##饞 -##饥 -##饨 -##饪 -##饬 -##饭 -##饮 -##饯 -##饰 -##饱 -##饲 -##饴 -##饵 -##饶 -##饷 -##饺 -##饼 -##饽 -##饿 -##馀 -##馁 -##馄 -##馅 -##馆 -##馈 -##馋 -##馍 -##馏 -##馒 -##馔 -##首 -##馗 -##香 -##馥 -##馨 -##馬 -##馭 -##馮 -##馳 -##馴 -##駁 -##駄 -##駅 -##駆 -##駐 -##駒 -##駕 -##駛 -##駝 -##駭 -##駱 -##駿 -##騁 -##騎 -##騏 -##験 -##騙 -##騨 -##騰 -##騷 -##驀 -##驅 -##驊 -##驍 -##驒 -##驕 -##驗 -##驚 -##驛 -##驟 -##驢 -##驥 -##马 -##驭 -##驮 -##驯 -##驰 -##驱 -##驳 -##驴 -##驶 -##驷 -##驸 -##驹 -##驻 -##驼 -##驾 -##驿 -##骁 -##骂 -##骄 -##骅 -##骆 -##骇 -##骈 -##骊 -##骋 -##验 -##骏 -##骐 -##骑 -##骗 -##骚 -##骛 -##骜 -##骞 -##骠 -##骡 -##骤 -##骥 -##骧 -##骨 -##骯 -##骰 -##骶 -##骷 -##骸 -##骼 -##髂 -##髅 -##髋 -##髏 -##髒 -##髓 -##體 -##髖 -##高 -##髦 -##髪 -##髮 -##髯 -##髻 -##鬃 -##鬆 -##鬍 -##鬓 -##鬚 -##鬟 -##鬢 -##鬣 -##鬥 -##鬧 -##鬱 -##鬼 -##魁 -##魂 -##魄 -##魅 -##魇 -##魍 -##魏 -##魔 -##魘 -##魚 -##魯 -##魷 -##鮑 -##鮨 -##鮪 -##鮭 -##鮮 -##鯉 -##鯊 -##鯖 -##鯛 -##鯨 -##鯰 -##鯽 -##鰍 -##鰓 -##鰭 -##鰲 -##鰻 -##鰾 -##鱈 -##鱉 -##鱔 -##鱗 -##鱷 -##鱸 -##鱼 -##鱿 -##鲁 -##鲈 -##鲍 -##鲑 -##鲛 -##鲜 -##鲟 -##鲢 -##鲤 -##鲨 -##鲫 -##鲱 -##鲲 -##鲶 -##鲷 -##鲸 -##鳃 -##鳄 -##鳅 -##鳌 -##鳍 -##鳕 -##鳖 -##鳗 -##鳝 -##鳞 -##鳥 -##鳩 -##鳳 -##鳴 -##鳶 -##鴉 -##鴕 -##鴛 -##鴦 -##鴨 -##鴻 -##鴿 -##鵑 -##鵜 -##鵝 -##鵡 -##鵬 -##鵰 -##鵲 -##鶘 -##鶩 -##鶯 -##鶴 -##鷗 -##鷲 -##鷹 -##鷺 -##鸚 -##鸞 -##鸟 -##鸠 -##鸡 -##鸢 -##鸣 -##鸥 -##鸦 -##鸨 -##鸪 -##鸭 -##鸯 -##鸳 -##鸵 -##鸽 -##鸾 -##鸿 -##鹂 -##鹃 -##鹄 -##鹅 -##鹈 -##鹉 -##鹊 -##鹌 -##鹏 -##鹑 -##鹕 -##鹘 -##鹜 -##鹞 -##鹤 -##鹦 -##鹧 -##鹫 -##鹭 -##鹰 -##鹳 -##鹵 -##鹹 -##鹼 -##鹽 -##鹿 -##麂 -##麋 -##麒 -##麓 -##麗 -##麝 -##麟 -##麥 -##麦 -##麩 -##麴 -##麵 -##麸 -##麺 -##麻 -##麼 -##麽 -##麾 -##黃 -##黄 -##黍 -##黎 -##黏 -##黑 -##黒 -##黔 -##默 -##黛 -##黜 -##黝 -##點 -##黠 -##黨 -##黯 -##黴 -##鼋 -##鼎 -##鼐 -##鼓 -##鼠 -##鼬 -##鼹 -##鼻 -##鼾 -##齁 -##齊 -##齋 -##齐 -##齒 -##齡 -##齢 -##齣 -##齦 -##齿 -##龄 -##龅 -##龈 -##龊 -##龋 -##龌 -##龍 -##龐 -##龔 -##龕 -##龙 -##龚 -##龛 -##龜 -##龟 -##︰ -##︱ -##︶ -##︿ -##﹁ -##﹂ -##﹍ -##﹏ -##﹐ -##﹑ -##﹒ -##﹔ -##﹕ -##﹖ -##﹗ -##﹙ -##﹚ -##﹝ -##﹞ -##﹡ -##﹣ -##! -##" -### -##$ -##% -##& -##' -##( -##) -##* -##, -##- -##. -##/ -##: -##; -##< -##? -##@ -##[ -##\ -##] -##^ -##_ -##` -##f -##h -##j -##u -##w -##z -##{ -##} -##。 -##「 -##」 -##、 -##・ -##ッ -##ー -##イ -##ク -##シ -##ス -##ト -##ノ -##フ -##ラ -##ル -##ン -##゙ -##゚ -## ̄ -##¥ -##👍 -##🔥 -##😂 -##😎 -[unused0] -[unused100] -[unused101] -[unused102] -[unused103] -[unused104] -[unused105] -[unused106] -[unused107] -[unused108] -[unused109] -[unused110] -[unused111] -[unused112] -[unused113] -[unused114] -[unused115] -[unused116] -[unused117] -[unused118] -[unused119] -[unused120] -[unused121] -[unused122] -[unused123] -[unused124] -[unused125] -[unused126] -[unused127] -[unused128] -[unused129] -[unused130] -[unused131] -[unused132] -[unused133] -[unused134] -[unused135] -[unused136] -[unused137] -[unused138] -[unused139] -[unused140] -[unused141] -[unused142] -[unused143] -[unused144] -[unused145] -[unused146] -[unused147] -[unused148] -[unused149] -[unused150] -[unused151] -[unused152] -[unused153] -[unused154] -[unused155] -[unused156] -[unused157] -[unused158] -[unused159] -[unused160] -[unused161] -[unused162] -[unused163] -[unused164] -[unused165] -[unused166] -[unused167] -[unused168] -[unused169] -[unused170] -[unused171] -[unused172] -[unused173] -[unused174] -[unused175] -[unused176] -[unused177] -[unused178] -[unused179] -[unused180] -[unused181] -[unused182] -[unused183] -[unused184] -[unused185] -[unused186] -[unused187] -[unused188] -[unused189] -[unused190] -[unused191] -[unused192] -[unused193] -[unused194] -[unused195] -[unused196] -[unused197] -[unused198] -[unused199] -[unused200] -[unused201] -[unused202] -[unused203] -[unused204] -[unused205] -[unused206] -[unused207] -[unused208] -[unused209] -[unused210] -[unused211] -[unused212] -[unused213] -[unused214] -[unused215] -[unused216] -[unused217] -[unused218] -[unused219] -[unused220] -[unused221] -[unused222] -[unused223] -[unused224] -[unused225] -[unused226] -[unused227] -[unused228] -[unused229] -[unused230] -[unused231] -[unused232] -[unused233] -[unused234] -[unused235] -[unused236] -[unused237] -[unused238] -[unused239] -[unused240] -[unused241] -[unused242] -[unused243] -[unused244] -[unused245] -[unused246] -[unused247] -[unused248] -[unused249] -[unused250] -[unused251] -[unused252] -[unused253] -[unused254] -[unused255] -[unused256] -[unused257] -[unused258] -[unused259] -[unused260] -[unused261] -[unused262] -[unused263] -[unused264] -[unused265] -[unused266] -[unused267] -[unused268] -[unused269] -[unused270] -[unused271] -[unused272] -[unused273] -[unused274] -[unused275] -[unused276] -[unused277] -[unused278] -[unused279] -[unused280] -[unused281] -[unused282] -[unused283] -[unused284] -[unused285] -[unused286] -[unused287] -[unused288] -[unused289] -[unused290] -[unused291] -[unused292] -[unused293] -[unused294] -[unused295] -[unused296] -[unused297] -[unused298] -[unused299] -[unused300] -[unused301] -[unused302] -[unused303] -[unused304] -[unused305] -[unused306] -[unused307] -[unused308] -[unused309] -[unused310] -[unused311] -[unused312] -[unused313] -[unused314] -[unused315] -[unused316] -[unused317] -[unused318] -[unused319] -[unused320] -[unused321] -[unused322] -[unused323] -[unused324] -[unused325] -[unused326] -[unused327] -[unused328] -[unused329] -[unused330] -[unused331] -[unused332] -[unused333] -[unused334] -[unused335] -[unused336] -[unused337] -[unused338] -[unused339] -[unused340] -[unused341] -[unused342] -[unused343] -[unused344] -[unused345] -[unused346] -[unused347] -[unused348] -[unused349] -[unused350] -[unused351] -[unused352] -[unused353] -[unused354] -[unused355] -[unused356] -[unused357] -[unused358] -[unused359] -[unused360] -[unused361] -[unused362] -[unused363] -[unused364] -[unused365] -[unused366] -[unused367] -[unused368] -[unused369] -[unused370] -[unused371] -[unused372] -[unused373] -[unused374] -[unused375] -[unused376] -[unused377] -[unused378] -[unused379] -[unused380] -[unused381] -[unused382] -[unused383] -[unused384] -[unused385] -[unused386] -[unused387] -[unused388] -[unused389] -[unused390] -[unused391] -[unused392] -[unused393] -[unused394] -[unused395] -[unused396] -[unused397] -[unused398] -[unused399] -[unused400] -[unused401] -[unused402] -[unused403] -[unused404] -[unused405] -[unused406] -[unused407] -[unused408] -[unused409] -[unused410] -[unused411] -[unused412] -[unused413] -[unused414] -[unused415] -[unused416] -[unused417] -[unused418] -[unused419] -[unused420] -[unused421] -[unused422] -[unused423] -[unused424] -[unused425] -[unused426] -[unused427] -[unused428] -[unused429] -[unused430] -[unused431] -[unused432] -[unused433] -[unused434] -[unused435] -[unused436] -[unused437] -[unused438] -[unused439] -[unused440] -[unused441] -[unused442] -[unused443] -[unused444] -[unused445] -[unused446] -[unused447] -[unused448] -[unused449] -[unused450] -[unused451] -[unused452] -[unused453] -[unused454] -[unused455] -[unused456] -[unused457] -[unused458] -[unused459] -[unused460] -[unused461] -[unused462] -[unused463] -[unused464] -[unused465] -[unused466] -[unused467] -[unused468] -[unused469] -[unused470] -[unused471] -[unused472] -[unused473] -[unused474] -[unused475] -[unused476] -[unused477] -[unused478] -[unused479] -[unused480] -[unused481] -[unused482] -[unused483] -[unused484] -[unused485] -[unused486] -[unused487] -[unused488] -[unused489] -[unused490] -[unused491] -[unused492] -[unused493] -[unused494] -[unused495] -[unused496] -[unused497] -[unused498] -[unused499] -[unused500] -[unused501] -[unused502] -[unused503] -[unused504] -[unused505] -[unused506] -[unused507] -[unused508] -[unused509] -[unused510] -[unused511] -[unused512] -[unused513] -[unused514] -[unused515] -[unused516] -[unused517] -[unused518] -[unused519] -[unused520] -[unused521] -[unused522] -[unused523] -[unused524] -[unused525] -[unused526] -[unused527] -[unused528] -[unused529] -[unused530] -[unused531] -[unused532] -[unused533] -[unused534] -[unused535] -[unused536] -[unused537] -[unused538] -[unused539] -[unused540] -[unused541] -[unused542] -[unused543] -[unused544] -[unused545] -[unused546] -[unused547] -[unused548] -[unused549] -[unused550] -[unused551] -[unused552] -[unused553] -[unused554] -[unused555] -[unused556] -[unused557] -[unused558] -[unused559] -[unused560] -[unused561] -[unused562] -[unused563] -[unused564] -[unused565] -[unused566] -[unused567] -[unused568] -[unused569] -[unused570] -[unused571] -[unused572] -[unused573] -[unused574] -[unused575] -[unused576] -[unused577] -[unused578] -[unused579] -[unused580] -[unused581] -[unused582] -[unused583] -[unused584] -[unused585] -[unused586] -[unused587] -[unused588] -[unused589] -[unused590] -[unused591] -[unused592] -[unused593] -[unused594] -[unused595] -[unused596] -[unused597] -[unused598] -[unused599] -[unused600] -[unused601] -[unused602] -[unused603] -[unused604] -[unused605] -[unused606] -[unused607] -[unused608] -[unused609] -[unused610] -[unused611] -[unused612] -[unused613] -[unused614] -[unused615] -[unused616] -[unused617] -[unused618] -[unused619] -[unused620] -[unused621] -[unused622] -[unused623] -[unused624] -[unused625] -[unused626] -[unused627] -[unused628] -[unused629] -[unused630] -[unused631] -[unused632] -[unused633] -[unused634] -[unused635] -[unused636] -[unused637] -[unused638] -[unused639] -[unused640] -[unused641] -[unused642] -[unused643] -[unused644] -[unused645] -[unused646] -[unused647] -[unused648] -[unused649] -[unused650] -[unused651] -[unused652] -[unused653] -[unused654] -[unused655] -[unused656] -[unused657] -[unused658] -[unused659] -[unused660] -[unused661] -[unused662] -[unused663] -[unused664] -[unused665] -[unused666] -[unused667] -[unused668] -[unused669] -[unused670] -[unused671] -[unused672] -[unused673] -[unused674] -[unused675] -[unused676] -[unused677] -[unused678] -[unused679] -[unused680] -[unused681] -[unused682] -[unused683] -[unused684] -[unused685] -[unused686] -[unused687] -[unused688] -[unused689] -[unused690] -[unused691] -[unused692] -[unused693] -[unused694] -[unused695] -[unused696] -[unused697] -[unused698] -[unused699] -[unused700] -[unused701] -[unused702] -[unused703] -[unused704] -[unused705] -[unused706] -[unused707] -[unused708] -[unused709] -[unused710] -[unused711] -[unused712] -[unused713] -[unused714] -[unused715] -[unused716] -[unused717] -[unused718] -[unused719] -[unused720] -[unused721] -[unused722] -[unused723] -[unused724] -[unused725] -[unused726] -[unused727] -[unused728] -[unused729] -[unused730] -[unused731] -[unused732] -[unused733] -[unused734] -[unused735] -[unused736] -[unused737] -[unused738] -[unused739] -[unused740] -[unused741] -[unused742] -[unused743] -[unused744] -[unused745] -[unused746] -[unused747] -[unused748] -[unused749] -[unused750] -[unused751] -[unused752] -[unused753] -[unused754] -[unused755] -[unused756] -[unused757] -[unused758] -[unused759] -[unused760] -[unused761] -[unused762] -[unused763] -[unused764] -[unused765] -[unused766] -[unused767] -[unused768] -[unused769] -[unused770] -[unused771] -[unused772] -[unused773] -[unused774] -[unused775] -[unused776] -[unused777] -[unused778] -[unused779] -[unused780] -[unused781] -[unused782] -[unused783] -[unused784] -[unused785] -[unused786] -[unused787] -[unused788] -[unused789] -[unused790] -[unused791] -[unused792] -[unused793] -[unused794] -[unused795] -[unused796] -[unused797] -[unused798] -[unused799] -[unused800] -[unused801] -[unused802] -[unused803] -[unused804] -[unused805] -[unused806] -[unused807] -[unused808] -[unused809] -[unused810] -[unused811] -[unused812] -[unused813] -[unused814] -[unused815] -[unused816] -[unused817] -[unused818] -[unused819] -[unused820] -[unused821] -[unused822] -[unused823] -[unused824] -[unused825] -[unused826] -[unused827] -[unused828] -[unused829] -[unused830] -[unused831] -[unused832] -[unused833] -[unused834] -[unused835] -[unused836] -[unused837] -[unused838] -[unused839] -[unused840] -[unused841] -[unused842] -[unused843] -[unused844] -[unused845] -[unused846] -[unused847] -[unused848] -[unused849] -[unused850] -[unused851] -[unused852] -[unused853] -[unused854] -[unused855] -[unused856] -[unused857] -[unused858] -[unused859] -[unused860] -[unused861] -[unused862] -[unused863] -[unused864] -[unused865] -[unused866] -[unused867] -[unused868] -[unused869] -[unused870] -[unused871] -[unused872] -[unused873] -[unused874] -[unused875] -[unused876] -[unused877] -[unused878] -[unused879] -[unused880] -[unused881] -[unused882] -[unused883] -[unused884] -[unused885] -[unused886] -[unused887] -[unused888] -[unused889] -[unused890] -[unused891] -[unused892] -[unused893] -[unused894] -[unused895] -[unused896] -[unused897] -[unused898] -[unused899] -[unused900] -[unused901] -[unused902] -[unused903] -[unused904] -[unused905] -[unused906] -[unused907] -[unused908] -[unused909] -[unused910] -[unused911] -[unused912] -[unused913] -[unused914] -[unused915] -[unused916] -[unused917] -[unused918] -[unused919] -[unused920] -[unused921] -[unused922] -[unused923] -[unused924] -[unused925] -[unused926] -[unused927] -[unused928] -[unused929] -[unused930] -[unused931] -[unused932] -[unused933] -[unused934] -[unused935] -[unused936] -[unused937] -[unused938] -[unused939] -[unused940] -[unused941] -[unused942] -[unused943] -[unused944] -[unused945] -[unused946] -[unused947] -[unused948] -[unused949] -[unused950] -[unused951] -[unused952] -[unused953] -[unused954] -[unused955] -[unused956] -[unused957] -[unused958] -[unused959] -[unused960] -[unused961] -[unused962] -[unused963] -[unused964] -[unused965] -[unused966] -[unused967] -[unused968] -[unused969] -[unused970] -[unused971] -[unused972] -[unused973] -[unused974] -[unused975] -[unused976] -[unused977] -[unused978] -[unused979] -[unused980] -[unused981] -[unused982] -[unused983] -[unused984] -[unused985] -[unused986] -[unused987] -[unused988] -[unused989] -[unused990] -[unused991] -[unused992] -[unused993] -` -¡ -¢ -¦ -¨ -ª -¬ -´ -¶ -½ -¾ -¿ -ð -þ -ħ -ı -ł -œ -ƒ -ɐ -ɑ -ɒ -ɕ -ɛ -ɣ -ɨ -ɪ -ɫ -ɬ -ɯ -ɲ -ɴ -ɹ -ɾ -ʀ -ʁ -ʂ -ʃ -ʉ -ʊ -ʋ -ʌ -ʎ -ʐ -ʑ -ʒ -ʔ -ʲ -ʳ -ʷ -ʸ -ʻ -ʼ -ʾ -ʿ -ˡ -ˣ -ˤ -ζ -ξ -щ -ъ -э -ю -ђ -є -ј -љ -њ -ћ -ӏ -ա -բ -գ -դ -ե -թ -ի -լ -կ -հ -մ -յ -ն -ո -պ -ս -վ -տ -ր -ւ -ք -־ -א -ב -ג -ד -ה -ו -ז -ח -ט -י -ך -כ -ל -ם -מ -ן -נ -ס -ע -ף -פ -ץ -צ -ק -ר -ש -ת -، -ء -ث -ج -ح -خ -ذ -ز -ش -ص -ض -ط -ظ -غ -ـ -ف -ق -ك -ى -ٹ -پ -چ -ک -گ -ں -ھ -ہ -ی -ے -अ -आ -उ -ए -क -ख -ग -च -ज -ट -ड -ण -त -थ -द -ध -न -प -ब -भ -म -य -र -ल -व -श -ष -स -ह -ा -ि -ी -ो -। -॥ -ং -অ -আ -ই -উ -এ -ও -ক -খ -গ -চ -ছ -জ -ট -ড -ণ -ত -থ -দ -ধ -ন -প -ব -ভ -ম -য -র -ল -শ -ষ -স -হ -া -ি -ী -ে -க -ச -ட -த -ந -ன -ப -ம -ய -ர -ல -ள -வ -ா -ி -ு -ே -ை -ನ -ರ -ಾ -ක -ය -ර -ල -ව -ා -ต -ท -พ -ล -ว -ส -། -ག -ང -ད -ན -པ -བ -མ -འ -ར -ལ -ས -မ -ა -ბ -გ -დ -ე -ვ -თ -ი -კ -ლ -მ -ნ -ო -რ -ს -ტ -უ -ᄊ -ᴬ -ᴮ -ᴰ -ᴵ -ᴺ -ᵀ -ᵇ -ᵈ -ᵖ -ᵗ -ᵢ -ᵣ -ᵤ -ᵥ -ᶜ -ᶠ -‐ -‑ -‒ -– -— -― -‘ -’ -‚ -“ -” -‡ -… -⁰ -⁴ -⁵ -⁶ -⁷ -⁸ -⁹ -⁻ -₀ -₅ -₆ -₇ -₈ -₉ -₊ -₍ -₎ -ₐ -ₑ -ₒ -ₓ -ₕ -ₖ -ₗ -ₘ -ₙ -ₚ -ₛ -ₜ -₤ -₩ -₱ -₹ -ℓ -ℝ -⅓ -⅔ -↦ -⇄ -⇌ -∂ -∅ -∆ -∇ -∈ -∗ -∘ -∧ -∨ -∪ -⊂ -⊆ -⊕ -⊗ -☉ -♭ -♯ -⟨ -⟩ -ⱼ -⺩ -⺼ -⽥ -亻 -宀 -彳 -忄 -扌 -氵 -疒 -糹 -訁 -辶 -阝 -龸 -fi -fl -had -were -which -him -their -been -would -then -them -could -during -through -between -while -later -around -did -such -being -used -against -many -both -these -known -until -even -didn -because -born -since -still -became -any -including -took -same -each -called -much -however -four -another -found -won -going -away -hand -several -following -released -played -began -district -those -held -own -early -league -government -came -based -thought -looked -along -went -few -father -former -located -got -though -every -century -without -within -building -large -named -started -once -should -built -british -death -moved -door -need -president -wasn -although -due -major -died -third -knew -asked -turned -wanted -together -received -son -served -different -behind -himself -felt -members -football -near -having -saw -mother -army -front -late -hands -put -division -across -told -often -ever -french -six -include -tell -among -species -really -according -half -original -gave -making -enough -opened -must -included -given -german -woman -community -might -million -court -short -round -seen -always -become -sure -almost -director -council -career -things -using -couldn -better -students -married -nothing -worked -others -record -anything -continued -give -military -established -returned -does -written -thing -feet -far -already -championship -western -department -role -various -production -television -produced -working -region -present -period -looking -least -total -england -wife -per -brother -soon -political -taken -created -further -able -reached -joined -upon -done -important -either -appeared -position -ground -lead -election -arms -police -instead -words -moment -someone -announced -less -wrote -past -followed -founded -finally -india -taking -records -considered -northern -toward -european -outside -described -track -playing -heard -professional -australia -miles -yet -trying -blood -southern -maybe -everything -mouth -race -recorded -above -daughter -points -middle -move -tried -elected -closed -ten -minister -chief -person -similar -brought -rest -formed -floor -doing -killed -training -needed -turn -finished -railway -rather -sent -example -ran -term -coming -currently -forces -despite -areas -fact -dead -originally -germany -probably -developed -pulled -stood -signed -songs -child -eventually -met -average -teams -minutes -current -kind -decided -usually -eastern -seemed -episode -bed -added -indian -route -available -throughout -addition -appointed -eight -construction -mean -remained -schools -sometimes -events -possible -australian -forward -debut -seat -performance -committee -features -character -herself -lot -russian -range -hours -sold -quickly -directed -guitar -performed -players -smile -myself -placed -province -towards -wouldn -leading -whole -designed -census -europe -attack -japanese -getting -alone -lower -wide -hospital -believe -changed -sister -gone -hadn -ship -studies -academy -shot -below -involved -kept -largest -especially -beginning -movement -section -female -professor -lord -longer -walked -actually -civil -families -thus -aircraft -completed -includes -captain -fight -vocals -featured -fourth -officer -hear -means -medical -groups -lips -competition -entire -lived -leaving -federal -tournament -passed -independent -kingdom -spent -fine -doesn -reported -fall -raised -itself -replaced -leader -theatre -whose -parents -spanish -canadian -degree -writing -awarded -higher -coast -provided -senior -organization -stopped -onto -countries -parts -conference -interest -saying -allowed -earlier -matter -winning -try -happened -moving -los -breath -nearly -mid -certain -italian -african -standing -fell -artist -shows -deal -mine -industry -everyone -republic -provide -student -primary -owned -older -heavy -1st -makes -attention -anyone -africa -stated -length -ended -fingers -command -staff -foreign -opening -governor -okay -medal -kill -introduced -chest -hell -feeling -success -meet -reason -meeting -novel -trade -buildings -guy -goal -native -husband -previously -entered -producer -operations -takes -covered -forced -roman -complete -successful -texas -cold -traditional -films -clear -approximately -nine -prince -question -tracks -ireland -regional -personal -operation -economic -holding -twenty -additional -hour -regular -historic -places -whom -shook -km² -secretary -prior -scored -units -ask -property -ready -immediately -month -listed -contract -themselves -lines -navy -writer -meant -runs -practice -championships -singer -commission -required -starting -generally -giving -attended -couple -stand -catholic -caught -executive -thinking -chair -quite -shoulder -hope -decision -plays -defeated -municipality -whether -offered -slowly -pain -direction -mission -mostly -noted -individual -managed -lives -plant -helped -except -studied -computer -figure -relationship -issue -significant -loss -smiled -gun -highest -male -bring -goals -mexico -problem -distance -commercial -completely -location -annual -famous -neck -caused -italy -understand -greek -highway -wrong -comes -appearance -issues -musical -companies -castle -income -assembly -bass -initially -parliament -artists -experience -particular -walk -foot -engineering -talking -dropped -boys -stars -remember -carried -train -stadium -angeles -evidence -becoming -assistant -soviet -upper -youth -reach -actor -numerous -nodded -arrived -minute -believed -complex -victory -associated -temple -chance -perhaps -bishop -launched -particularly -retired -subject -prize -contains -yeah -theory -empire -suddenly -waiting -trust -recording -terms -champion -religious -zealand -names -2nd -ancient -corner -represented -legal -justice -cause -watched -brothers -material -changes -simply -response -answer -historical -stories -straight -feature -increased -administration -virginia -activities -cultural -overall -winner -programs -basketball -legs -guard -cast -doctor -flight -results -remains -cost -effect -winter -larger -islands -problems -chairman -grew -commander -isn -failed -selected -hurt -fort -regiment -majority -plans -shown -pretty -irish -characters -directly -scene -likely -operated -allow -matches -looks -houses -fellow -marriage -rules -florida -expected -nearby -congress -peace -recent -wait -subsequently -variety -serving -agreed -poor -attempt -wood -democratic -rural -mile -appears -township -soldiers -##ized -pennsylvania -closer -fighting -claimed -score -physical -filled -genus -specific -sitting -mom -therefore -supported -status -fear -cases -meaning -wales -minor -spain -vice -parish -separate -horse -fifth -remaining -branch -presented -stared -uses -forms -baseball -exactly -choice -discovered -composed -truth -russia -dad -ring -referred -numbers -greater -metres -slightly -direct -increase -responsible -crew -rule -trees -troops -broke -goes -individuals -hundred -weight -creek -sleep -defense -provides -ordered -jewish -safe -judge -whatever -corps -realized -growing -cities -gaze -lies -spread -letter -showed -situation -mayor -transport -watching -workers -extended -expression -normal -chart -multiple -border -mrs -walls -piano -heat -cannot -earned -products -drama -era -authority -seasons -join -grade -difficult -territory -mainly -stations -squadron -stepped -iron -19th -serve -appear -speak -broken -charge -knowledge -kilometres -removed -ships -campus -pushed -britain -leaves -recently -boston -latter -acquired -poland -quality -officers -presence -planned -nations -mass -broadcast -influence -wild -emperor -electric -headed -ability -promoted -yellow -ministry -throat -smaller -politician -latin -spoke -cars -males -lack -acting -seeing -consists -estate -pressure -newspaper -olympics -conditions -beat -elements -walking -vote -needs -carolina -featuring -levels -francisco -purpose -females -dutch -duke -ahead -gas -safety -serious -turning -highly -lieutenant -firm -amount -mixed -proposed -perfect -agreement -affairs -3rd -seconds -contemporary -paid -prison -label -administrative -intended -constructed -academic -teacher -races -formerly -nation -issued -shut -drums -housing -seems -graduated -mentioned -picked -recognized -shortly -protection -picture -notable -elections -1980s -loved -percent -racing -elizabeth -volume -hockey -beside -settled -competed -replied -drew -actress -marine -scotland -steel -glanced -farm -risk -tonight -positive -singles -effects -gray -screen -residents -sides -none -secondary -literature -polish -destroyed -flying -founder -households -lay -reserve -industrial -younger -approach -appearances -ones -finish -powerful -fully -growth -honor -jersey -projects -revealed -infantry -pair -equipment -visit -evening -grant -effort -treatment -buried -republican -primarily -bottom -owner -1970s -israel -gives -remain -spot -produce -champions -accepted -ways -##ally -losing -split -capacity -basis -trial -questions -20th -guess -officially -memorial -naval -initial -##ization -whispered -median -engineer -sydney -columbia -strength -tears -senate -asian -draw -warm -supposed -transferred -leaned -candidate -escape -mountains -potential -activity -seem -traffic -murder -slow -orchestra -haven -agency -taught -website -comedy -unable -storm -planning -albums -rugby -environment -scientific -grabbed -protect -boat -typically -damage -principal -divided -dedicated -ohio -pick -fought -driver -empty -shoulders -sort -thank -berlin -prominent -account -freedom -necessary -efforts -headquarters -follows -alongside -suggested -operating -steps -technical -begin -easily -teeth -speaking -settlement -scale -renamed -enemy -semi -joint -compared -scottish -leadership -analysis -offers -georgia -pieces -captured -animal -deputy -organized -combined -method -challenge -1960s -huge -wants -battalion -sons -rise -crime -types -facilities -telling -platform -sit -1990s -tells -assigned -pull -commonly -alive -letters -concept -conducted -wearing -happen -bought -becomes -holy -gets -defeat -languages -purchased -occurred -titled -declared -applied -sciences -concert -sounds -jazz -brain -painting -fleet -tax -michigan -animals -leaders -episodes -birth -clubs -palace -critical -refused -fair -leg -laughed -returning -surrounding -participated -formation -lifted -pointed -connected -rome -medicine -laid -powers -tall -shared -focused -knowing -yards -entrance -falls -calling -sources -chosen -beneath -resources -yard -nominated -silence -defined -gained -thirty -bodies -adopted -christmas -widely -register -apart -iran -premier -serves -unknown -parties -generation -continues -fields -brigade -quiet -teaching -clothes -impact -weapons -partner -flat -theater -relations -plants -suffered -begins -seats -armed -models -worth -laws -communities -classes -background -knows -thanks -quarter -reaching -humans -carry -killing -format -setting -architecture -disease -railroad -possibly -arthur -thoughts -doors -density -crowd -illinois -stomach -tone -unique -reports -anyway -liberal -vehicle -thick -dry -drug -faced -largely -facility -theme -holds -creation -strange -colonel -revolution -politics -turns -silent -rail -relief -independence -combat -shape -determined -sales -learned -4th -finger -providing -heritage -fiction -situated -designated -allowing -hosted -sight -interview -estimated -reduced -toronto -footballer -keeping -guys -damn -claim -motion -sixth -stayed -rear -receive -handed -twelve -dress -audience -granted -brazil -spirit -##ated -noticed -olympic -representative -tight -trouble -reviews -drink -vampire -missing -roles -ranked -newly -household -finals -critics -phase -massachusetts -pilot -unlike -philadelphia -bright -guns -crown -organizations -roof -respectively -clearly -tongue -marked -circle -bronze -expanded -sexual -supply -yourself -inspired -labour -reference -draft -connection -reasons -driving -jesus -cells -entry -neither -trail -claims -atlantic -orders -labor -nose -afraid -identified -intelligence -calls -cancer -attacked -passing -positions -imperial -grey -swedish -avoid -extra -uncle -covers -allows -surprise -materials -fame -hunter -citizens -figures -environmental -confirmed -shit -titles -performing -difference -acts -attacks -existing -votes -opportunity -nor -entirely -trains -opposite -pakistan -develop -resulted -representatives -actions -reality -pressed -barely -conversation -faculty -northwest -ends -documentary -nuclear -stock -sets -eat -alternative -resulting -creating -surprised -cemetery -drop -finding -cricket -streets -tradition -ride -ear -explained -composer -injury -apartment -municipal -educational -occupied -netherlands -clean -billion -constitution -learn -maximum -classical -lose -opposition -ontario -hills -rolled -ending -drawn -permanent -lewis -sites -chamber -scoring -height -lyrics -staring -officials -snow -oldest -qualified -interior -apparently -succeeded -thousand -dinner -lights -existence -heavily -greatest -conservative -send -bowl -catch -duty -speech -authorities -princess -performances -versions -shall -graduate -pictures -effective -remembered -poetry -desk -crossed -starring -starts -passenger -sharp -acres -ass -weather -falling -rank -fund -supporting -adult -heads -southeast -lane -condition -transfer -prevent -regions -earl -federation -relatively -answered -besides -obtained -portion -reaction -liked -peak -counter -religion -chain -rare -convention -aid -lie -vehicles -perform -squad -wonder -lying -crazy -sword -attempted -centuries -weren -philosophy -interested -sweden -wolf -frequently -abandoned -literary -alliance -task -entitled -threw -promotion -tiny -soccer -visited -achieved -defence -internal -persian -methods -arrested -otherwise -programming -villages -elementary -districts -rooms -criminal -conflict -worry -trained -attempts -waited -signal -truck -subsequent -programme -communist -faith -sector -carrying -laugh -controlled -korean -showing -origin -fuel -evil -brief -identity -darkness -pool -missed -publication -wings -invited -briefly -standards -kissed -ideas -climate -causing -walter -worse -albert -winners -desire -aged -northeast -dangerous -gate -doubt -wooden -poet -rising -funding -communications -communication -violence -copies -prepared -investigation -skills -pulling -containing -ultimately -offices -singing -understanding -tomorrow -christ -ward -pope -stands -5th -flow -studios -aired -commissioned -contained -exist -americans -wrestling -approved -kid -employed -respect -suit -asking -increasing -frame -angry -selling -1950s -thin -finds -temperature -statement -ali -explain -inhabitants -towns -extensive -narrow -flowers -promise -somewhere -closely -bureau -cape -weekly -presidential -legislative -launch -founding -artillery -strike -un -institutions -roll -writers -landing -chose -anymore -attorney -billboard -receiving -agricultural -breaking -sought -dave -admitted -lands -mexican -##bury -specifically -hole -moscow -roads -accident -proved -struck -guards -stuff -slid -expansion -melbourne -opposed -sub -southwest -architect -failure -plane -tank -listen -regarding -wet -introduction -metropolitan -fighter -inch -grown -gene -anger -fixed -khan -domestic -worldwide -chapel -mill -functions -examples -developing -turkey -hits -pocket -antonio -papers -grow -unless -circuit -18th -concerned -attached -journalist -selection -journey -converted -provincial -painted -hearing -aren -bands -negative -aside -wondered -knight -lap -noise -billy -shooting -bedroom -priest -resistance -motor -homes -sounded -giant -scenes -equal -comic -patients -hidden -solid -actual -bringing -afternoon -touched -funds -consisted -marie -canal -treaty -turkish -recognition -residence -cathedral -broad -knees -incident -shaped -fired -norwegian -handle -cheek -contest -represent -representing -birds -advantage -emergency -wrapped -drawing -notice -broadcasting -somehow -bachelor -seventh -collected -registered -establishment -assumed -chemical -personnel -retirement -portuguese -wore -tied -device -threat -progress -advance -##ised -banks -hired -manchester -nfl -teachers -structures -forever -tennis -helping -saturday -applications -junction -incorporated -neighborhood -dressed -ceremony -influenced -hers -stairs -decades -inner -kansas -hung -hoped -gain -scheduled -downtown -engaged -austria -clock -norway -certainly -pale -victor -employees -plate -putting -surrounded -##ists -finishing -blues -tropical -minnesota -consider -philippines -accept -retrieved -concern -anderson -properties -institution -gordon -successfully -vietnam -backing -outstanding -muslim -crossing -folk -producing -usual -demand -occurs -observed -lawyer -educated -pleasure -budget -items -quietly -colorado -philip -typical -##worth -derived -survived -asks -mental -jake -jews -distinguished -sri -extremely -athletic -loud -thousands -worried -transportation -horses -weapon -arena -importance -users -objects -contributed -douglas -aware -senator -johnny -sisters -engines -flag -investment -samuel -shock -capable -clark -row -wheel -refers -familiar -biggest -wins -hate -maintained -drove -hamilton -expressed -injured -underground -churches -wars -tunnel -passes -stupid -agriculture -softly -cabinet -regarded -joining -indiana -dates -spend -behavior -woods -protein -gently -chase -morgan -mention -burning -wake -combination -occur -mirror -leads -indeed -impossible -paintings -covering -soldier -locations -attendance -sell -historian -wisconsin -invasion -argued -painter -diego -changing -egypt -experienced -inches -missouri -grounds -spoken -switzerland -reform -rolling -forget -massive -resigned -burned -tennessee -locked -values -improved -wounded -universe -sick -dating -facing -purchase -##pur -moments -merged -anniversary -coal -brick -understood -causes -dynasty -queensland -establish -stores -crisis -promote -hoping -cards -referee -extension -raise -arizona -improve -colonial -formal -charged -palm -hide -rescue -faces -feelings -candidates -juan -6th -courses -weekend -luke -cash -fallen -delivered -affected -installed -carefully -tries -hollywood -costs -lincoln -responsibility -shore -proper -normally -maryland -assistance -constant -offering -friendly -waters -persons -realize -contain -trophy -partnership -factor -musicians -bound -oregon -indicated -houston -medium -consisting -somewhat -cycle -beer -moore -frederick -gotten -worst -weak -approached -arranged -chin -loan -bond -fifteen -pattern -disappeared -translated -##zed -lip -arab -capture -interests -insurance -shifted -cave -prix -warning -sections -courts -coat -plot -smell -golf -favorite -maintain -knife -voted -degrees -finance -quebec -opinion -translation -manner -ruled -operate -productions -choose -musician -confused -tired -separated -stream -techniques -committed -attend -ranking -kings -throw -passengers -measure -horror -mining -sand -danger -salt -calm -decade -dam -require -runner -rush -associate -greece -rivers -consecutive -matthew -##ski -sighed -sq -documents -closing -tie -accused -islamic -distributed -directors -organisation -7th -breathing -mad -lit -arrival -concrete -taste -composition -shaking -faster -amateur -adjacent -stating -twin -flew -publications -obviously -ridge -storage -carl -pages -concluded -desert -driven -universities -ages -terminal -sequence -borough -constituency -cousin -economics -dreams -margaret -notably -reduce -montreal -17th -ears -saved -vocal -riding -roughly -threatened -meters -meanwhile -landed -compete -repeated -grass -czech -regularly -charges -sudden -appeal -solution -describes -classification -glad -parking -belt -physics -rachel -hungarian -participate -expedition -damaged -gift -childhood -fifty -mathematics -jumped -letting -defensive -mph -testing -hundreds -shoot -owners -matters -smoke -israeli -kentucky -dancing -mounted -grandfather -designs -profit -argentina -truly -lawrence -cole -begun -detroit -willing -branches -smiling -decide -miami -enjoyed -recordings -##dale -poverty -ethnic -arabic -accompanied -fishing -determine -residential -acid -returns -starred -strategy -forty -businesses -equivalent -commonwealth -distinct -ill -seriously -##ped -harris -replace -rio -imagine -formula -ensure -additionally -scheme -conservation -occasionally -purposes -feels -favor -1930s -contrast -hanging -hunt -movies -instruments -victims -danish -christopher -busy -demon -sugar -earliest -colony -studying -duties -belgium -slipped -carter -visible -stages -iraq -commune -forming -continuing -talked -counties -legend -bathroom -option -tail -clay -daughters -afterwards -severe -jaw -visitors -devices -aviation -entering -subjects -temporary -swimming -forth -smooth -bush -operates -rocks -movements -signs -eddie -voices -honorary -memories -dallas -measures -racial -promised -harvard -16th -parliamentary -indicate -benefit -flesh -dublin -louisiana -patient -sleeping -membership -coastal -medieval -wanting -element -scholars -rice -limit -survive -makeup -rating -definitely -collaboration -obvious -baron -birthday -linked -soil -diocese -ncaa -offensive -shouldn -waist -plain -ross -organ -resolution -manufacturing -adding -relative -kennedy -whilst -moth -gardens -crash -heading -partners -credited -carlos -moves -cable -marshall -depending -bottle -represents -rejected -responded -existed -denmark -##ating -treated -graham -routes -talent -commissioner -drugs -secure -tests -reign -restored -photography -contributions -oklahoma -designer -disc -grin -seattle -robin -paused -atlanta -unusual -praised -las -laughing -satellite -hungary -visiting -interesting -factors -deck -poems -norman -##water -stuck -speaker -rifle -premiered -comics -actors -reputation -eliminated -8th -ceiling -prisoners -leather -austin -mississippi -rapidly -admiral -parallel -charlotte -guilty -tools -gender -divisions -fruit -laboratory -nelson -marry -rapid -aunt -tribe -requirements -aspects -suicide -amongst -adams -bone -ukraine -kick -sees -edinburgh -clothing -column -rough -gods -hunting -broadway -gathered -concerns -spending -ty -12th -snapped -requires -solar -bones -cavalry -iowa -drinking -waste -franklin -charity -thompson -stewart -tip -landscape -enjoy -singh -poem -listening -eighth -fred -differences -adapted -bomb -ukrainian -surgery -corporate -masters -anywhere -waves -odd -portugal -orleans -dick -debate -kent -eating -puerto -cleared -expect -cinema -guitarist -blocks -electrical -agree -involving -depth -dying -panel -struggle -peninsula -adults -novels -emerged -vienna -debuted -shoes -tamil -songwriter -meets -prove -beating -instance -heaven -scared -sending -marks -artistic -passage -superior -significantly -retained -##izing -technique -cheeks -warren -maintenance -destroy -extreme -allied -appearing -fill -advice -alabama -qualifying -policies -cleveland -hat -battery -authors -10th -soundtrack -acted -dated -lb -glance -equipped -coalition -funny -outer -ambassador -roy -possibility -couples -campbell -loose -ethan -supplies -gonna -monster -shake -agents -frequency -springs -dogs -practices -gang -plastic -easier -suggests -gulf -blade -exposed -colors -industries -markets -nervous -electoral -charts -legislation -ownership -##idae -appointment -shield -assault -socialist -abbey -monument -license -throne -employment -replacement -charter -suffering -accounts -oak -connecticut -strongly -wright -colour -13th -context -welsh -networks -voiced -gabriel -forehead -manage -schedule -totally -remix -forests -occupation -print -nicholas -brazilian -strategic -vampires -engineers -roots -seek -correct -instrumental -und -alfred -backed -stanley -robinson -traveled -wayne -austrian -achieve -exit -rates -strip -whereas -sing -deeply -adventure -bobby -jamie -careful -components -cap -useful -personality -knee -pushing -hosts -protest -ottoman -symphony -boundary -processes -considering -considerable -tons -cooper -trading -conduct -illegal -revolutionary -definition -harder -jacob -circumstances -destruction -popularity -grip -classified -liverpool -baltimore -flows -seeking -honour -approval -mechanical -till -happening -statue -critic -increasingly -immediate -describe -commerce -stare -indonesia -meat -rounds -boats -baker -orthodox -depression -formally -worn -naked -muttered -sentence -11th -document -criticism -wished -vessel -spiritual -bent -virgin -minimum -murray -lunch -danny -printed -compilation -keyboards -blow -belonged -raising -cutting -pittsburgh -9th -shadows -hated -indigenous -jon -15th -barry -scholar -oliver -stick -susan -meetings -attracted -spell -romantic -ye -demanded -customers -logan -revival -keys -modified -commanded -jeans -upset -phil -detective -hiding -resident -##bly -experiences -diamond -defeating -coverage -lucas -external -parks -franchise -helen -bible -successor -percussion -celebrated -lift -clan -romania -##ied -mills -nobody -achievement -shrugged -fault -rhythm -initiative -breakfast -carbon -lasted -violent -wound -killer -gradually -filmed -°c -processing -remove -criticized -guests -sang -chemistry -legislature -##bridge -uniform -escaped -integrated -proposal -purple -denied -liquid -influential -morris -nights -stones -intense -experimental -twisted -pace -nazi -mitchell -ny -blind -reporter -newspapers -14th -centers -burn -basin -forgotten -surviving -filed -collections -monastery -losses -manual -couch -description -appropriate -merely -missions -sebastian -restoration -replacing -triple -elder -julia -warriors -benjamin -julian -convinced -stronger -amazing -declined -versus -merchant -happens -output -finland -bare -barbara -absence -ignored -dawn -injuries -producers -luis -##ities -kw -admit -expensive -electricity -exception -symbol -ladies -shower -sheriff -characteristics -##je -aimed -button -ratio -effectively -summit -angle -jury -bears -foster -vessels -pants -executed -evans -dozen -advertising -kicked -patrol -competitions -lifetime -principles -athletics -birmingham -sponsored -rob -nomination -acoustic -creature -longest -credits -harbor -dust -josh -territories -milk -infrastructure -completion -thailand -indians -leon -archbishop -assist -pitch -blake -arrangement -girlfriend -serbian -operational -hence -sad -scent -fur -sessions -refer -rarely -exists -1892 -scientists -dirty -penalty -burst -portrait -seed -pole -limits -rival -stable -grave -constitutional -alcohol -arrest -flower -mystery -devil -architectural -relationships -greatly -habitat -##istic -larry -progressive -remote -cotton -preserved -reaches -cited -vast -scholarship -decisions -teach -editions -knocked -eve -searching -partly -participation -animated -fate -excellent -alternate -saints -youngest -climbed -suggest -discussion -staying -choir -lakes -jacket -revenue -nevertheless -peaked -instrument -wondering -annually -managing -neil -1891 -signing -terry -apply -clinical -brooklyn -aim -catherine -fuck -farmers -figured -ninth -pride -hugh -ordinary -involvement -comfortable -shouted -encouraged -representation -sharing -panic -exact -cargo -competing -fat -cried -1920s -occasions -cabin -borders -utah -marcus -##isation -badly -muscles -victorian -transition -warner -bet -permission -slave -terrible -similarly -shares -seth -uefa -possession -medals -benefits -colleges -lowered -perfectly -transit -##kar -publisher -##ened -harrison -deaths -elevation -asleep -machines -sigh -ash -hardly -argument -occasion -parent -decline -contribution -concentration -opportunities -hispanic -guardian -extent -emotions -hips -mason -volumes -bloody -controversy -diameter -steady -mistake -phoenix -identify -violin -departure -richmond -spin -funeral -enemies -1864 -literally -connor -random -sergeant -grab -confusion -1865 -transmission -informed -leaning -sacred -suspended -thinks -gates -portland -luck -agencies -yours -hull -expert -muscle -layer -practical -sculpture -jerusalem -latest -lloyd -statistics -deeper -recommended -warrior -arkansas -mess -supports -greg -eagle -recovered -rated -concerts -rushed -stops -eggs -premiere -keith -delhi -turner -pit -affair -belief -paint -##zing -victim -withdrew -bonus -styles -fled -glasgow -technologies -funded -adaptation -portrayed -cooperation -supporters -judges -bernard -hallway -ralph -graduating -controversial -distant -continental -spider -bite -recognize -intention -mixing -egyptian -bow -tourism -suppose -claiming -dominated -participants -nurse -partially -tape -psychology -essential -touring -duo -voting -civilian -emotional -channels -apparent -hebrew -1887 -tommy -carrier -intersection -beast -hudson -bench -discuss -costa -##ered -detailed -behalf -drivers -unfortunately -obtain -rocky -##dae -siege -friendship -1861 -hang -governments -collins -respond -wildlife -preferred -operator -laura -pregnant -videos -dennis -suspected -boots -instantly -weird -automatic -businessman -alleged -placing -throwing -mood -1862 -perry -venue -jet -remainder -passion -biological -boyfriend -1863 -dirt -buffalo -ron -segment -abuse -genre -thrown -stroke -colored -stress -exercise -displayed -struggled -abroad -dramatic -wonderful -thereafter -madrid -component -widespread -##sed -tale -citizen -todd -vancouver -overseas -forcing -crying -descent -discussed -substantial -ranks -regime -provinces -drum -zane -tribes -proof -researchers -volunteer -manor -silk -milan -donated -allies -venture -principle -delivery -enterprise -bars -traditionally -witch -reminded -copper -pete -inter -colin -grinned -elsewhere -competitive -frequent -scream -tension -texts -submarine -finnish -defending -defend -pat -detail -affiliated -stuart -themes -periods -tool -belgian -ruling -crimes -answers -folded -licensed -demolished -hans -lucy -1881 -lion -traded -photographs -writes -craig -trials -generated -beth -noble -debt -percentage -yorkshire -erected -viewed -grades -confidence -ceased -islam -telephone -retail -chile -m² -roberts -sixteen -commented -hampshire -innocent -dual -pounds -checked -regulations -afghanistan -sung -rico -liberty -assets -bigger -options -angels -relegated -tribute -wells -attending -leaf -romanian -monthly -patterns -gmina -madison -hurricane -rev -##ians -bristol -elite -valuable -disaster -democracy -awareness -germans -freyja -loop -absolutely -paying -populations -maine -sole -prayer -spencer -releases -doorway -bull -lover -midnight -conclusion -thirteen -mediterranean -nhl -proud -sample -##hill -drummer -guinea -murphy -climb -instant -attributed -horn -ain -railways -autumn -ferry -opponent -traveling -secured -corridor -stretched -tales -sheet -trinity -cattle -helps -indicates -manhattan -murdered -fitted -gentle -grandmother -mines -shocked -vegas -produces -caribbean -belong -continuous -desperate -drunk -historically -trio -waved -raf -dealing -nathan -murmured -interrupted -residing -scientist -pioneer -harold -aaron -delta -attempting -minority -believes -chorus -tend -lots -eyed -indoor -load -shots -updated -jail -concerning -connecting -wealth -slaves -arrive -rangers -sufficient -rebuilt -##wick -cardinal -flood -muhammad -whenever -relation -runners -moral -repair -viewers -arriving -revenge -punk -assisted -bath -fairly -breathe -lists -innings -illustrated -whisper -nearest -voters -clinton -ties -ultimate -screamed -beijing -lions -andre -fictional -gathering -comfort -radar -suitable -dismissed -hms -ban -pine -wrist -atmosphere -voivodeship -bid -timber -##ned -giants -cameron -recovery -uss -identical -categories -switched -serbia -laughter -noah -ensemble -therapy -peoples -touching -##off -locally -pearl -platforms -everywhere -ballet -tables -lanka -herbert -outdoor -toured -derek -1883 -spaces -contested -swept -1878 -exclusive -slight -connections -winds -prisoner -collective -bangladesh -tube -publicly -wealthy -isolated -insisted -fortune -ticket -spotted -reportedly -animation -enforcement -tanks -decides -wider -lowest -owen -nod -hitting -gregory -furthermore -magazines -fighters -solutions -pointing -requested -peru -reed -chancellor -knights -mask -worker -eldest -flames -reduction -volunteers -reporting -wire -advisory -endemic -origins -settlers -pursue -knock -consumer -1876 -eu -compound -creatures -mansion -sentenced -ivan -deployed -guitars -frowned -involves -mechanism -kilometers -perspective -shops -terminus -duncan -alien -fist -bridges -##pers -heroes -derby -swallowed -patent -sara -illness -characterized -adventures -slide -hawaii -jurisdiction -organised -adelaide -walks -biology -rogers -swing -tightly -boundaries -prepare -implementation -stolen -certified -colombia -edwards -garage -recalled -rage -harm -nigeria -breast -furniture -pupils -settle -cuba -balls -alaska -21st -linear -thrust -celebration -latino -genetic -terror -##ening -lightning -fee -witness -lodge -establishing -skull -earning -hood -rebellion -sporting -warned -missile -devoted -activist -porch -worship -fourteen -package -decorated -##shire -housed -chess -sailed -doctors -oscar -joan -treat -garcia -harbour -jeremy -traditions -dominant -jacques -##gon -relocated -1879 -amendment -sized -companion -simultaneously -volleyball -spun -acre -increases -stopping -loves -belongs -affect -drafted -tossed -scout -battles -1875 -filming -shoved -munich -tenure -vertical -romance -argue -craft -ranging -opens -honest -tyler -yesterday -muslims -reveal -snake -immigrants -radical -screaming -speakers -firing -saving -belonging -ease -lighting -prefecture -blame -farmer -hungry -grows -rubbed -beam -sur -subsidiary -armenian -dropping -conventional -qualify -spots -sweat -festivals -immigration -physician -discover -exposure -sandy -explanation -isaac -implemented -##fish -hart -initiated -stakes -presents -heights -householder -pleased -tourist -regardless -slip -closest -surely -sultan -brings -riley -preparation -aboard -slammed -baptist -experiment -ongoing -interstate -organic -playoffs -1877 -hindu -tours -tier -plenty -arrangements -talks -trapped -excited -sank -athens -1872 -denver -welfare -suburb -athletes -trick -diverse -belly -exclusively -yelled -conversion -1874 -internationally -computers -conductor -abilities -sensitive -dispute -measured -globe -rocket -prices -amsterdam -flights -tigers -municipalities -emotion -references -explains -airlines -manufactured -archaeological -1873 -interpretation -devon -##ites -settlements -kissing -absolute -improvement -impressed -barcelona -sullivan -jefferson -towers -jesse -julie -grandson -gauge -regard -rings -interviews -trace -raymond -thumb -departments -burns -serial -bulgarian -scores -demonstrated -1866 -kyle -alberta -underneath -romanized -relieved -acquisition -phrase -cliff -reveals -cuts -merger -custom -nee -gilbert -graduation -assessment -difficulty -demands -swung -democrat -commons -1940s -grove -completing -focuses -sum -substitute -bearing -stretch -reception -reflected -essentially -destination -pairs -##ched -survival -resource -##bach -promoting -doubles -messages -tear -##fully -parade -florence -harvey -incumbent -partial -pedro -frozen -procedure -olivia -controls -shelter -personally -temperatures -brisbane -tested -sits -marble -comprehensive -oxygen -leonard -##kov -inaugural -iranian -referring -quarters -attitude -mainstream -lined -mars -dakota -norfolk -unsuccessful -explosion -helicopter -congressional -##sing -inspector -bitch -seal -departed -divine -coaching -examination -punishment -manufacturer -sink -columns -unincorporated -signals -nevada -squeezed -dylan -dining -martial -manuel -eighteen -elevator -brushed -plates -ministers -congregation -slept -specialized -taxes -restricted -negotiations -likes -statistical -arnold -inspiration -execution -bold -intermediate -significance -margin -ruler -wheels -gothic -intellectual -dependent -listened -eligible -buses -widow -syria -earn -cincinnati -collapsed -recipient -secrets -accessible -philippine -maritime -goddess -clerk -surrender -breaks -playoff -ideal -beetle -aspect -soap -regulation -strings -expand -anglo -shorter -crosses -retreat -tough -coins -wallace -directions -pressing -shipping -locomotives -comparison -topics -nephew -distinction -honors -travelled -sierra -ibn -fortress -recognised -carved -1869 -clients -intent -coaches -describing -bread -##ington -beaten -northwestern -merit -collapse -challenges -historians -objective -submitted -virus -attacking -drake -assume -diseases -stem -leeds -farming -glasses -visits -nowhere -fellowship -relevant -carries -restaurants -experiments -constantly -bases -targets -shah -tenth -opponents -verse -territorial -writings -corruption -instruction -inherited -reverse -emphasis -employee -arch -keeps -rabbi -watson -payment -uh -nancy -##tre -venice -fastest -sexy -banned -adrian -properly -ruth -touchdown -dollar -boards -metre -circles -edges -favour -travels -liberation -scattered -firmly -holland -permitted -diesel -kenya -den -originated -demons -resumed -dragged -rider -servant -blinked -extend -torn -##sey -input -meal -everybody -cylinder -kinds -camps -bullet -logic -croatian -evolved -healthy -fool -wise -preserve -pradesh -respective -artificial -gross -corresponding -convicted -cage -caroline -dialogue -##dor -narrative -stranger -mario -christianity -failing -trent -commanding -buddhist -1848 -maurice -focusing -yale -bike -altitude -mouse -revised -##sley -veteran -pulls -theology -crashed -campaigns -legion -##ability -drag -excellence -customer -cancelled -intensity -excuse -liga -participating -contributing -printing -##burn -variable -curious -legacy -renaissance -symptoms -binding -vocalist -dancer -grammar -gospel -democrats -enters -diplomatic -hitler -clouds -mathematical -quit -defended -oriented -##heim -fundamental -hardware -impressive -equally -convince -confederate -guilt -chuck -sliding -magnetic -narrowed -petersburg -bulgaria -otto -phd -skill -hopes -pitcher -reservoir -hearts -automatically -expecting -mysterious -bennett -extensively -imagined -seeds -monitor -fix -##ative -journalism -struggling -signature -ranch -encounter -photographer -observation -protests -influences -calendar -cruz -croatia -locomotive -hughes -naturally -shakespeare -basement -hook -uncredited -faded -theories -approaches -dare -phillips -filling -fury -obama -efficient -arc -deliver -breeding -inducted -leagues -efficiency -axis -montana -eagles -##ked -supplied -instructions -karen -picking -indicating -trap -anchor -practically -christians -tomb -vary -occasional -electronics -lords -readers -newcastle -faint -innovation -collect -situations -engagement -claude -mixture -##feld -peer -tissue -lean -°f -floors -architects -reducing -rope -1859 -ottawa -##har -samples -banking -declaration -proteins -resignation -francois -saudi -advocate -exhibited -armor -twins -divorce -##ras -abraham -reviewed -temporarily -matrix -physically -pulse -curled -difficulties -bengal -usage -##ban -riders -certificate -holes -warsaw -distinctive -mutual -1857 -customs -circular -eugene -removal -loaded -mere -vulnerable -depicted -generations -dame -heir -enormous -lightly -climbing -pitched -lessons -pilots -nepal -preparing -brad -louise -renowned -liam -##ably -shaw -brilliant -bills -##nik -fucking -mainland -pleasant -seized -veterans -jerked -fail -brush -radiation -stored -warmth -southeastern -nate -sin -raced -berkeley -joke -athlete -designation -trunk -roland -qualification -heels -artwork -receives -judicial -reserves -##bed -woke -installation -abu -floating -fake -lesser -excitement -interface -concentrated -addressed -characteristic -amanda -saxophone -monk -releasing -egg -dies -interaction -defender -outbreak -glory -loving -sequel -consciousness -awake -ski -enrolled -handling -rookie -brow -somebody -biography -warfare -amounts -contracts -presentation -fabric -dissolved -challenged -meter -psychological -elevated -rally -accurate -##tha -hospitals -undergraduate -specialist -venezuela -exhibit -shed -nursing -protestant -fluid -structural -footage -jared -consistent -prey -##ska -succession -reflect -exile -lebanon -wiped -suspect -shanghai -resting -integration -preservation -marvel -variant -pirates -sheep -rounded -capita -sailing -colonies -manuscript -deemed -variations -clarke -functional -emerging -boxing -relaxed -curse -azerbaijan -heavyweight -nickname -editorial -rang -grid -tightened -earthquake -flashed -miguel -rushing -##ches -improvements -boxes -brooks -consumption -molecular -felix -societies -repeatedly -variation -aids -civic -graphics -professionals -realm -autonomous -receiver -delayed -workshop -militia -chairs -canyon -harsh -extending -lovely -happiness -##jan -stake -eyebrows -embassy -wellington -hannah -corners -bishops -swear -cloth -contents -namely -commenced -1854 -stanford -nashville -courage -graphic -commitment -garrison -hamlet -clearing -rebels -attraction -literacy -cooking -ruins -temples -jenny -humanity -celebrate -hasn -freight -sixty -rebel -bastard -newton -deer -##ges -##ching -smiles -delaware -singers -approaching -assists -flame -boulevard -barrel -planted -pursuit -consequences -shallow -invitation -rode -depot -ernest -kane -rod -concepts -preston -topic -chambers -striking -blast -arrives -descendants -montgomery -ranges -worlds -chaos -praise -fewer -1855 -sanctuary -mud -programmes -maintaining -harper -bore -handsome -closure -tournaments -nebraska -linda -facade -puts -satisfied -argentine -dale -cork -dome -panama -##yl -1858 -tasks -experts -##ates -feeding -equation -engage -bryan -um -quartet -disbanded -sheffield -blocked -gasped -delay -kisses -connects -##non -sts -poured -creator -publishers -guided -ellis -extinct -hug -gaining -##ord -complicated -poll -clenched -investigate -thereby -quantum -spine -cdp -humor -kills -administered -semifinals -encountered -ignore -commentary -##maker -bother -roosevelt -plains -halfway -flowing -cultures -crack -imprisoned -neighboring -airline -gather -wolves -marathon -transformed -cruise -organisations -punch -exhibitions -numbered -alarm -ratings -daddy -silently -##stein -queens -colours -impression -guidance -tactical -##rat -marshal -della -arrow -rested -feared -tender -owns -bitter -advisor -escort -##ides -spare -farms -grants -dragons -encourage -colleagues -cameras -sucked -pile -spirits -prague -statements -suspension -landmark -fence -torture -recreation -bags -permanently -survivors -pond -spy -predecessor -bombing -coup -protecting -transformation -glow -##lands -dug -priests -andrea -feat -barn -jumping -##ologist -casualties -stern -auckland -pipe -serie -revealing -trevor -mercy -spectrum -consist -governing -collaborated -possessed -epic -comprises -blew -shane -lopez -honored -magical -sacrifice -judgment -perceived -hammer -baronet -tune -das -missionary -sheets -neutral -oral -threatening -attractive -shade -aims -seminary -estates -1856 -michel -wounds -refugees -manufacturers -mercury -syndrome -porter -##iya -##din -hamburg -identification -upstairs -purse -widened -pause -cared -breathed -affiliate -santiago -prevented -celtic -fisher -recruited -byzantine -reconstruction -farther -diet -sake -spite -sensation -blank -separation -##hon -vladimir -armies -anime -accommodate -orbit -cult -sofia -##ify -founders -sustained -disorder -honours -northeastern -mia -crops -violet -threats -blanket -fires -canton -followers -southwestern -prototype -voyage -assignment -altered -moderate -protocol -pistol -questioned -brass -lifting -1852 -math -authored -doug -dimensional -dynamic -1851 -pronounced -grateful -quest -uncomfortable -boom -presidency -stevens -relating -politicians -barrier -quinn -diana -mosque -tribal -palmer -portions -sometime -chester -treasure -bend -millions -reforms -registration -consequently -monitoring -ate -preliminary -brandon -invented -eaten -exterior -intervention -ports -documented -displays -lecture -sally -favourite -vermont -invisible -isle -breed -journalists -relay -speaks -backward -explore -midfielder -actively -stefan -procedures -cannon -blond -kenneth -centered -servants -chains -libraries -malcolm -essex -henri -slavery -##hal -facts -fairy -coached -cassie -cats -washed -cop -announcement -2000s -vinyl -activated -marco -frontier -growled -curriculum -##das -loyal -accomplished -leslie -ritual -kenny -vii -napoleon -hollow -hybrid -jungle -stationed -friedrich -counted -##ulated -platinum -theatrical -seated -col -rubber -glen -diversity -healing -extends -provisions -administrator -columbus -tributary -assured -##uous -prestigious -examined -lectures -grammy -ronald -associations -bailey -allan -essays -flute -believing -consultant -proceedings -travelling -1853 -kerala -yugoslavia -buddy -methodist -burial -centres -batman -discontinued -dock -stockholm -lungs -severely -citing -manga -steal -mumbai -iraqi -robot -celebrity -bride -broadcasts -abolished -pot -joel -overhead -franz -packed -reconnaissance -johann -acknowledged -introduce -handled -doctorate -developments -drinks -alley -palestine -##aki -proceeded -recover -bradley -grain -patch -afford -infection -nationalist -legendary -interchange -virtually -gen -gravity -exploration -amber -vital -wishes -powell -doctrine -elbow -screenplay -##bird -contribute -indonesian -creates -enzyme -kylie -discipline -drops -manila -hunger -layers -suffer -fever -bits -monica -keyboard -manages -##hood -searched -appeals -##bad -testament -grande -reid -##war -beliefs -congo -requiring -casey -1849 -regret -streak -rape -depends -syrian -sprint -pound -tourists -upcoming -pub -tense -##els -practiced -nationwide -guild -motorcycle -liz -##zar -chiefs -desired -elena -precious -absorbed -relatives -booth -pianist -##mal -citizenship -exhausted -wilhelm -##ceae -##hed -noting -quarterback -urge -hectares -##gue -holly -blonde -davies -parked -sustainable -stepping -twentieth -airfield -nest -chip -##nell -shaft -paulo -requirement -paradise -tobacco -trans -renewed -vietnamese -suggesting -catching -holmes -enjoying -trips -colt -holder -butterfly -nerve -reformed -cherry -bowling -trailer -carriage -goodbye -appreciate -toy -joshua -interactive -enabled -involve -##kan -collar -determination -bunch -recall -shorts -superintendent -episcopal -frustration -giovanni -nineteenth -laser -privately -array -circulation -##ovic -armstrong -deals -painful -permit -discrimination -aires -retiring -cottage -horizon -ellen -jamaica -ripped -fernando -chapters -patron -lecturer -behaviour -genes -georgian -export -solomon -rivals -seventeen -rodriguez -princeton -independently -sox -1847 -arguing -entity -casting -hank -criteria -oakland -geographic -milwaukee -reflection -expanding -conquest -dubbed -halt -brave -brunswick -arched -curtis -divorced -predominantly -somerset -streams -ugly -zoo -horrible -curved -buenos -fierce -dictionary -vector -theological -unions -handful -stability -punjab -segments -altar -ignoring -gesture -monsters -pastor -thighs -unexpected -operators -abruptly -coin -compiled -associates -improving -migration -compact -collegiate -quarterfinals -roster -restore -assembled -hurry -oval -##cies -1846 -flags -martha -victories -sharply -##rated -argues -deadly -drawings -symbols -performer -griffin -restrictions -editing -andrews -journals -arabia -compositions -dee -pierce -removing -hindi -casino -runway -civilians -minds -##zation -refuge -rent -retain -potentially -conferences -suburban -conducting -descended -massacre -ammunition -terrain -fork -souls -counts -chelsea -durham -drives -cab -perth -realizing -palestinian -finn -simpson -##dal -betty -moreover -particles -cardinals -tent -evaluation -extraordinary -inscription -wednesday -chloe -maintains -panels -ashley -trucks -##nation -cluster -sunlight -strikes -zhang -dialect -tucked -collecting -##mas -##sville -quoted -evan -franco -aria -buying -cleaning -closet -provision -apollo -clinic -rat -necessarily -##ising -venues -flipped -cent -spreading -trustees -checking -authorized -disappointed -##ado -notion -duration -trumpet -hesitated -topped -brussels -rolls -theoretical -hint -define -aggressive -repeat -wash -peaceful -optical -width -allegedly -mcdonald -strict -##illa -investors -jam -witnesses -sounding -miranda -michelle -hugo -harmony -valid -lynn -glared -nina -headquartered -diving -boarding -gibson -albanian -marsh -routine -dealt -enhanced -intelligent -substance -targeted -enlisted -discovers -spinning -observations -pissed -smoking -capitol -varied -costume -seemingly -indies -compensation -surgeon -thursday -arsenal -westminster -suburbs -rid -anglican -##ridge -knots -foods -alumni -lighter -fraser -whoever -portal -scandal -gavin -advised -instructor -flooding -terrorist -teenage -interim -senses -duck -teen -thesis -abby -eager -overcome -newport -glenn -rises -shame -prompted -priority -forgot -bomber -nicolas -protective -cartoon -katherine -breeze -lonely -trusted -henderson -richardson -relax -palms -remarkable -legends -cricketer -essay -ordained -edmund -rifles -trigger -##uri -##away -sail -alert -1830 -audiences -penn -sussex -siblings -pursued -indianapolis -resist -rosa -consequence -succeed -avoided -1845 -##ulation -inland -##tie -##nna -counsel -profession -chronicle -hurried -##una -eyebrow -eventual -bleeding -innovative -cure -committees -accounting -scope -hardy -heather -tenor -gut -herald -codes -tore -scales -wagon -luxury -tin -prefer -fountain -triangle -bonds -darling -convoy -dried -traced -beings -troy -accidentally -slam -findings -smelled -joey -lawyers -outcome -steep -bosnia -configuration -shifting -toll -brook -performers -lobby -philosophical -construct -shrine -aggregate -cox -phenomenon -savage -insane -solely -reynolds -nationally -holdings -consideration -enable -edgar -fights -relegation -chances -atomic -hub -conjunction -awkward -reactions -currency -finale -kumar -underwent -steering -elaborate -gifts -comprising -melissa -veins -reasonable -sunshine -solve -trails -inhabited -elimination -ethics -huh -ana -molly -consent -apartments -layout -marines -hunters -bulk -##oma -hometown -##wall -##mont -cracked -reads -neighbouring -withdrawn -admission -wingspan -damned -anthology -lancashire -brands -batting -forgive -cuban -awful -##lyn -dimensions -imagination -dante -tracking -desperately -goalkeeper -##yne -groaned -workshops -confident -burton -gerald -milton -circus -uncertain -slope -copenhagen -sophia -fog -philosopher -portraits -accent -cycling -varying -gripped -larvae -garrett -specified -scotia -mature -luther -kurt -rap -##kes -aerial -ferdinand -heated -transported -##shan -safely -nonetheless -##orn -##gal -motors -demanding -##sburg -startled -##brook -ally -generate -caps -ghana -stained -mentions -beds -afterward -##bling -utility -##iro -richards -1837 -conspiracy -conscious -shining -footsteps -observer -cyprus -urged -loyalty -developer -probability -olive -upgraded -gym -miracle -insects -graves -1844 -ourselves -hydrogen -katie -tickets -poets -planes -prevention -witnessed -dense -jin -randy -tang -warehouse -monroe -archived -elderly -investigations -alec -granite -mineral -conflicts -controlling -aboriginal -mechanics -stan -stark -rhode -skirt -est -bombs -respected -##horn -imposed -limestone -deny -nominee -memphis -grabbing -disabled -amusement -frankfurt -corn -referendum -varies -slowed -disk -firms -unconscious -incredible -clue -sue -##zhou -twist -##cio -joins -idaho -chad -developers -computing -destroyer -mortal -tucker -kingston -choices -carson -whitney -geneva -pretend -dimension -staged -plateau -maya -##une -freestyle -rovers -##ids -tristan -classroom -prospect -##hus -honestly -diploma -lied -thermal -auxiliary -feast -unlikely -iata -morocco -pounding -treasury -lithuania -considerably -1841 -dish -1812 -geological -matching -stumbled -destroying -marched -brien -advances -nicole -settling -measuring -directing -##mie -tuesday -bassist -capabilities -stunned -fraud -torpedo -##phone -anton -wisdom -surveillance -ruined -##ulate -lawsuit -healthcare -theorem -halls -trend -aka -horizontal -dozens -acquire -lasting -swim -hawk -gorgeous -fees -vicinity -decrease -adoption -tactics -##ography -pakistani -##ole -draws -##hall -willie -burke -heath -algorithm -integral -powder -elliott -brigadier -jackie -tate -varieties -darker -##cho -lately -cigarette -specimens -adds -##ensis -##inger -exploded -finalist -murders -wilderness -arguments -nicknamed -acceptance -onwards -manufacture -robertson -jets -tampa -enterprises -loudly -composers -nominations -1838 -malta -inquiry -automobile -hosting -viii -rays -tilted -grief -museums -strategies -furious -euro -equality -cohen -poison -surrey -wireless -governed -ridiculous -moses -##esh -vanished -barnes -attract -morrison -istanbul -##iness -absent -rotation -petition -janet -##logical -satisfaction -custody -deliberately -observatory -comedian -surfaces -pinyin -novelist -strictly -canterbury -oslo -monks -embrace -jealous -photograph -continent -dorothy -marina -excess -holden -allegations -explaining -stack -avoiding -lance -storyline -majesty -poorly -spike -bradford -raven -travis -classics -proven -voltage -pillow -fists -butt -1842 -interpreted -1839 -gage -telegraph -lens -promising -expelled -casual -collector -zones -silly -nintendo -##kh -downstairs -chef -suspicious -afl -flies -vacant -uganda -pregnancy -condemned -lutheran -estimates -cheap -decree -saxon -proximity -stripped -idiot -deposits -contrary -presenter -magnus -glacier -offense -edwin -##ori -upright -##long -bolt -##ois -toss -geographical -##izes -environments -delicate -marking -abstract -xavier -nails -windsor -plantation -occurring -equity -saskatchewan -fears -drifted -sequences -vegetation -revolt -##stic -1843 -sooner -fusion -opposing -nato -skating -1836 -secretly -ruin -lease -flora -anxiety -##ological -##mia -bout -taxi -emmy -frost -rainbow -compounds -foundations -rainfall -assassination -nightmare -dominican -achievements -deserve -orlando -intact -armenia -##nte -calgary -valentine -marion -proclaimed -theodore -bells -courtyard -thigh -gonzalez -console -troop -minimal -everyday -supporter -terrorism -buck -openly -presbyterian -activists -carpet -##iers -rubbing -uprising -cute -conceived -legally -##cht -millennium -cello -velocity -rescued -cardiff -1835 -rex -concentrate -senators -beard -rendered -glowing -battalions -scouts -competitors -sculptor -catalogue -arctic -ion -raja -bicycle -glancing -lawn -##woman -gentleman -lighthouse -publish -predicted -calculated -variants -##gne -strain -winston -deceased -touchdowns -brady -caleb -sinking -echoed -crush -hon -blessed -protagonist -hayes -endangered -magnitude -editors -##tine -estimate -responsibilities -##mel -backup -laying -consumed -sealed -zurich -lovers -frustrated -##eau -ahmed -kicking -treasurer -1832 -biblical -refuse -terrified -pump -agrees -genuine -imprisonment -refuses -plymouth -lou -##nen -tara -trembling -antarctic -ton -learns -##tas -crap -crucial -faction -atop -##borough -wrap -lancaster -odds -hopkins -erik -lyon -##eon -bros -snap -locality -empress -crowned -cal -acclaimed -chuckled -clara -sends -mild -towel -wishing -assuming -interviewed -##bal -interactions -eden -cups -helena -indie -beck -##fire -batteries -filipino -wizard -parted -traces -##born -rows -idol -albany -delegates -##ees -##sar -discussions -notre -instructed -belgrade -highways -suggestion -lauren -possess -orientation -alexandria -abdul -beats -salary -reunion -ludwig -alright -wagner -intimate -pockets -slovenia -hugged -brighton -merchants -cruel -stole -trek -slopes -repairs -enrollment -politically -underlying -promotional -counting -boeing -isabella -naming -keen -bacteria -listing -separately -belfast -ussr -lithuanian -anybody -ribs -sphere -martinez -cock -embarrassed -proposals -fragments -nationals -##wski -premises -fin -alpine -matched -freely -bounded -jace -sleeve -pier -populated -evident -##like -frances -flooded -##dle -frightened -pour -trainer -framed -visitor -challenging -pig -wickets -##fold -infected -##pes -arose -reward -ecuador -oblast -vale -shuttle -##usa -bach -rankings -forbidden -cornwall -accordance -salem -consumers -bruno -fantastic -toes -machinery -resolved -julius -remembering -propaganda -iceland -bombardment -tide -contacts -wives -##rah -concerto -macdonald -albania -implement -daisy -tapped -sudan -helmet -mistress -crop -sunk -finest -##craft -hostile -boxer -fr -paths -adjusted -habit -ballot -supervision -soprano -bullets -wicked -sunset -regiments -disappear -lamp -performs -##gia -rabbit -digging -incidents -entries -##cion -dishes -introducing -##ati -##fied -freshman -slot -jill -tackles -baroque -backs -##iest -lone -sponsor -destiny -altogether -convert -##aro -consensus -shapes -demonstration -basically -feminist -auction -artifacts -##bing -strongest -halifax -allmusic -mighty -smallest -precise -alexandra -viola -##los -##ille -manuscripts -##illo -dancers -ari -managers -monuments -blades -barracks -springfield -maiden -consolidated -electron -berry -airing -wheat -nobel -inclusion -blair -payments -geography -bee -eleanor -react -##hurst -afc -manitoba -lineup -fitness -recreational -investments -airborne -disappointment -##dis -edmonton -viewing -renovation -infant -bankruptcy -roses -aftermath -pavilion -carpenter -withdrawal -ladder -discussing -popped -reliable -agreements -rochester -##abad -curves -bombers -rao -reverend -decreased -choosing -stiff -consulting -naples -crawford -tracy -ribbon -cops -crushed -deciding -unified -teenager -accepting -flagship -poles -sanchez -inspection -revived -skilled -induced -exchanged -flee -locals -tragedy -swallow -hanna -demonstrate -##ela -salvador -flown -contestants -civilization -##ines -wanna -rhodes -fletcher -hector -knocking -considers -nash -mechanisms -sensed -mentally -walt -unclear -##eus -renovated -madame -crews -governmental -undertaken -monkey -##ben -##ato -fatal -armored -copa -caves -governance -grasp -perception -certification -froze -damp -tugged -wyoming -##rg -##ero -newman -nerves -curiosity -graph -##ami -withdraw -tunnels -dull -meredith -moss -exhibits -neighbors -communicate -accuracy -explored -raiders -republicans -secular -kat -superman -penny -criticised -freed -conviction -ham -likewise -delegation -gotta -doll -promises -technological -myth -nationality -resolve -convent -sharon -dig -sip -coordinator -entrepreneur -fold -##dine -capability -councillor -synonym -blown -swan -cursed -1815 -jonas -haired -sofa -canvas -keeper -rivalry -##hart -rapper -speedway -swords -postal -maxwell -estonia -potter -recurring -errors -##oni -cognitive -1834 -claws -nadu -roberto -bce -wrestler -ellie -infinite -ink -##tia -presumably -finite -staircase -noel -patricia -nacional -chill -eternal -tu -preventing -prussia -fossil -limbs -##logist -ernst -frog -perez -rene -prussian -##ios -molecules -regulatory -answering -opinions -sworn -lengths -supposedly -hypothesis -upward -habitats -seating -ancestors -drank -yield -synthesis -researcher -modest -##var -mothers -peered -voluntary -homeland -acclaim -##igan -static -valve -luxembourg -alto -carroll -receptor -norton -ambulance -##tian -johnston -catholics -depicting -jointly -elephant -gloria -mentor -badge -ahmad -distinguish -remarked -councils -precisely -allison -advancing -detection -crowded -cooperative -ankle -mercedes -dagger -surrendered -pollution -commit -subway -jeffrey -lesson -sculptures -provider -##fication -membrane -timothy -rectangular -fiscal -heating -teammate -basket -particle -anonymous -deployment -missiles -courthouse -proportion -shoe -sec -complaints -forbes -blacks -abandon -remind -sizes -overwhelming -autobiography -natalie -##awa -risks -contestant -countryside -babies -scorer -invaded -enclosed -proceed -hurling -disorders -##cu -reflecting -continuously -cruiser -graduates -freeway -investigated -ore -deserved -maid -blocking -phillip -jorge -shakes -dove -mann -variables -lacked -burden -accompanying -que -consistently -organizing -provisional -complained -endless -tubes -juice -georges -krishna -mick -thriller -laps -arcade -sage -snail -shannon -laurence -seoul -vacation -presenting -hire -churchill -surprisingly -prohibited -savannah -technically -##oli -##lessly -testimony -suited -speeds -toys -romans -flowering -measurement -talented -kay -settings -charleston -expectations -shattered -achieving -triumph -ceremonies -portsmouth -lanes -mandatory -loser -stretching -cologne -realizes -seventy -cornell -careers -webb -##ulating -americas -budapest -ava -suspicion -yo -conrad -sterling -jessie -rector -##az -1831 -transform -organize -loans -christine -volcanic -warrant -slender -summers -subfamily -newer -danced -dynamics -rhine -proceeds -heinrich -gastropod -commands -sings -facilitate -easter -positioned -responses -expense -fruits -yanked -imported -25th -velvet -vic -primitive -tribune -baldwin -neighbourhood -donna -rip -hay -##uro -1814 -espn -welcomed -##aria -qualifier -glare -highland -timing -##cted -shells -eased -geometry -louder -exciting -slovakia -##iz -savings -prairie -marching -rafael -tonnes -##lled -curtain -preceding -shy -heal -greene -worthy -##pot -detachment -bury -sherman -##eck -reinforced -seeks -bottles -contracted -duchess -outfit -walsh -mickey -geoffrey -archer -squeeze -dawson -eliminate -invention -##enberg -neal -##eth -stance -dealer -coral -maple -retire -simplified -1833 -hid -watts -backwards -jules -##oke -genesis -frames -rebounds -burma -woodland -moist -santos -whispers -drained -subspecies -streaming -ulster -burnt -correspondence -maternal -gerard -denis -stealing -genius -duchy -##oria -inaugurated -momentum -suits -placement -sovereign -clause -thames -##hara -confederation -reservation -sketch -yankees -lets -rotten -charm -hal -verses -commercially -dot -salon -citation -adopt -winnipeg -mist -allocated -cairo -jenkins -interference -objectives -##wind -1820 -portfolio -armoured -sectors -initiatives -integrity -exercises -robe -tap -gazed -##tones -distracted -rulers -favorable -jerome -tended -cart -factories -##eri -diplomat -valued -gravel -charitable -calvin -exploring -shepherd -terrace -pupil -##ural -reflects -##rch -governors -shelf -depths -##nberg -trailed -crest -tackle -##nian -hatred -##kai -clare -makers -ethiopia -longtime -detected -embedded -lacking -slapped -rely -thomson -anticipation -morton -successive -agnes -screenwriter -straightened -philippe -playwright -haunted -licence -iris -intentions -sutton -logical -correctly -##weight -branded -licked -tipped -silva -ricky -narrator -requests -##ents -greeted -supernatural -cow -##wald -lung -refusing -employer -strait -gaelic -liner -##piece -zoe -sabha -##mba -driveway -harvest -prints -bates -reluctantly -threshold -algebra -ira -wherever -coupled -assumption -picks -designers -raids -gentlemen -roller -blowing -leipzig -locks -screw -dressing -strand -##lings -scar -dwarf -depicts -##nu -nods -differ -boris -##eur -yuan -flip -##gie -mob -invested -questioning -applying -shout -##sel -gameplay -blamed -illustrations -bothered -weakness -rehabilitation -##zes -envelope -rumors -miners -leicester -subtle -kerry -ferguson -premiership -bengali -prof -catches -remnants -dana -##rily -shouting -presidents -baltic -ought -ghosts -dances -sailors -shirley -fancy -dominic -##bie -madonna -##rick -bark -buttons -gymnasium -ashes -liver -toby -oath -providence -doyle -evangelical -nixon -cement -carnegie -embarked -hatch -surroundings -guarantee -needing -pirate -essence -filter -crane -hammond -projected -immune -percy -twelfth -regent -doctoral -damon -mikhail -##ichi -critically -elect -realised -abortion -acute -screening -mythology -steadily -frown -nottingham -kirk -wa -minneapolis -##rra -module -algeria -nautical -encounters -surprising -statues -availability -shirts -pie -alma -brows -munster -mack -soup -crater -tornado -sanskrit -cedar -explosive -bordered -dixon -planets -stamp -exam -happily -##bble -carriers -kidnapped -accommodation -emigrated -##met -knockout -correspondent -violation -profits -peaks -lang -specimen -agenda -ancestry -pottery -spelling -equations -obtaining -ki -linking -1825 -debris -asylum -buddhism -##ants -gazette -dental -eligibility -fathers -averaged -zimbabwe -francesco -coloured -hissed -translator -lynch -mandate -humanities -mackenzie -uniforms -##iana -asset -fitting -samantha -genera -rim -beloved -shark -riot -entities -expressions -indo -carmen -slipping -owing -abbot -neighbor -sidney -rats -recommendations -encouraging -squadrons -anticipated -commanders -conquered -donations -diagnosed -divide -##iva -guessed -decoration -vernon -auditorium -revelation -conversations -##kers -##power -herzegovina -dash -alike -protested -lateral -herman -accredited -##gent -freeman -mel -fiji -crow -crimson -##rine -livestock -##pped -humanitarian -bored -oz -whip -##lene -##ali -legitimate -alter -grinning -spelled -anxious -oriental -wesley -##nin -##hole -carnival -controller -detect -##ssa -bowed -educator -kosovo -macedonia -##sin -occupy -mastering -stephanie -janeiro -para -unaware -nurses -noon -hopefully -ranger -combine -sociology -polar -rica -##eer -neill -##sman -holocaust -doubled -lust -1828 -decent -cooling -unveiled -1829 -nsw -homer -chapman -meyer -dive -mae -reagan -expertise -##gled -darwin -brooke -sided -prosecution -investigating -comprised -petroleum -genres -reluctant -differently -trilogy -johns -vegetables -corpse -highlighted -lounge -pension -unsuccessfully -elegant -aided -ivory -beatles -amelia -cain -dubai -immigrant -babe -underwater -combining -mumbled -atlas -horns -accessed -ballad -physicians -homeless -gestured -rpm -freak -louisville -corporations -patriots -prizes -rational -warn -modes -decorative -overnight -din -troubled -phantom -monarch -sheer -##dorf -generals -guidelines -organs -addresses -enhance -curling -parishes -cord -##kie -caesar -deutsche -bavaria -coleman -cyclone -##eria -bacon -petty -##yama -##old -hampton -diagnosis -1824 -throws -complexity -rita -disputed -pablo -marketed -trafficking -##ulus -examine -plague -formats -vault -faithful -##bourne -webster -highlights -##ient -phones -vacuum -sandwich -modeling -##gated -bolivia -clergy -qualities -isabel -##nas -##ars -wears -screams -reunited -annoyed -bra -##ancy -##rate -differential -transmitter -tattoo -container -poker -##och -excessive -resides -cowboys -##tum -augustus -trash -providers -statute -retreated -balcony -reversed -void -storey -preceded -masses -leap -laughs -neighborhoods -wards -schemes -falcon -santo -battlefield -ronnie -lesbian -venus -##dian -beg -sandstone -daylight -punched -gwen -analog -stroked -wwe -acceptable -measurements -toxic -##kel -adequate -surgical -economist -parameters -varsity -##sberg -quantity -##chy -##rton -countess -generating -precision -diamonds -expressway -##ı -1821 -uruguay -talents -galleries -expenses -scanned -colleague -outlets -ryder -lucien -##ila -paramount -syracuse -dim -fangs -gown -sweep -##sie -missionaries -websites -sentences -adviser -val -trademark -spells -##plane -patience -starter -slim -##borg -toe -incredibly -shoots -elliot -nobility -##wyn -cowboy -endorsed -gardner -tendency -persuaded -organisms -emissions -kazakhstan -amused -boring -chips -themed -##hand -constantinople -chasing -systematic -guatemala -borrowed -erin -carey -##hard -highlands -struggles -1810 -##ifying -##ced -exceptions -develops -enlarged -kindergarten -castro -##rina -leigh -zombie -juvenile -##most -consul -sailor -hyde -clarence -intensive -pinned -nasty -useless -jung -clayton -stuffed -exceptional -ix -apostolic -transactions -exempt -swinging -cove -religions -shields -dairy -bypass -pursuing -joyce -bombay -chassis -southampton -chat -interact -redesignated -##pen -nascar -pray -salmon -rigid -regained -malaysian -grim -publicity -constituted -capturing -toilet -delegate -purely -tray -drift -loosely -striker -weakened -trinidad -mitch -itv -defines -transmitted -scarlet -nodding -fitzgerald -narrowly -tooth -standings -virtue -##wara -##cting -chateau -gloves -lid -hurting -conservatory -##pel -sinclair -reopened -sympathy -nigerian -strode -advocated -optional -chronic -discharge -suck -compatible -laurel -stella -fails -wage -dodge -informal -sorts -levi -buddha -villagers -chronicles -heavier -summoned -gateway -eleventh -jewelry -translations -accordingly -seas -##ency -fiber -pyramid -cubic -dragging -##ista -caring -##ops -contacted -lunar -lisbon -patted -1826 -sacramento -theft -madagascar -subtropical -disputes -holidays -piper -willow -mare -cane -newfoundland -benny -companions -dong -raj -observe -roar -charming -plaque -tibetan -fossils -enacted -manning -bubble -tanzania -##eda -##hir -funk -swamp -deputies -cloak -ufc -scenario -par -scratch -metals -anthem -guru -engaging -specially -##boat -dialects -nineteen -cecil -duet -disability -unofficial -##lies -defunct -moonlight -drainage -surname -puzzle -switching -conservatives -mammals -knox -broadcaster -sidewalk -cope -##ried -benson -princes -peterson -##sal -bedford -sharks -eli -wreck -alberto -gasp -archaeology -lgbt -teaches -securities -madness -compromise -waving -coordination -davidson -visions -leased -possibilities -eighty -fernandez -enthusiasm -assassin -sponsorship -reviewer -kingdoms -estonian -laboratories -##fy -##nal -applies -verb -celebrations -##zzo -rowing -lightweight -sadness -submit -balanced -dude -explicitly -metric -magnificent -mound -brett -mohammad -mistakes -irregular -sanders -betrayed -shipped -surge -##enburg -reporters -termed -georg -pity -verbal -bulls -abbreviated -enabling -appealed -sicily -sting -heel -sweetheart -bart -spacecraft -brutal -monarchy -aberdeen -cameo -diane -survivor -clyde -##aries -complaint -##makers -clarinet -delicious -chilean -karnataka -coordinates -1818 -panties -##rst -pretending -dramatically -kiev -tends -distances -catalog -launching -instances -telecommunications -portable -lindsay -vatican -##eim -angles -aliens -marker -stint -screens -bolton -##rne -judy -wool -benedict -plasma -europa -imaging -filmmaker -swiftly -contributor -opted -stamps -apologize -financing -butter -gideon -sophisticated -alignment -avery -chemicals -yearly -speculation -prominence -professionally -immortal -institutional -inception -wrists -identifying -tribunal -derives -gains -papal -preference -linguistic -vince -operative -brewery -##ont -unemployment -boyd -##ured -##outs -albeit -prophet -1813 -##rad -quarterly -asteroid -cleaned -radius -temper -##llen -telugu -jerk -viscount -##ote -glimpse -##aya -yacht -hawaiian -baden -laptop -readily -##gu -monetary -offshore -scots -watches -##yang -##arian -upgrade -needle -lea -encyclopedia -flank -fingertips -delight -teachings -confirm -roth -beaches -midway -winters -##iah -teasing -daytime -beverly -gambling -##backs -regulated -clement -hermann -tricks -knot -##shing -##uring -##vre -detached -ecological -owed -specialty -byron -inventor -bats -stays -screened -unesco -midland -trim -affection -##ander -jess -thoroughly -feedback -chennai -strained -heartbeat -wrapping -overtime -pleaded -##sworth -leisure -oclc -##tate -##ele -feathers -angelo -thirds -nuts -surveys -clever -gill -commentator -##dos -darren -rides -gibraltar -dissolution -dedication -shin -meals -saddle -elvis -reds -chaired -taller -appreciation -functioning -niece -favored -advocacy -robbie -criminals -suffolk -yugoslav -passport -constable -congressman -hastings -##rov -consecrated -sparks -ecclesiastical -confined -##ovich -muller -floyd -nora -1822 -paved -1827 -cumberland -ned -saga -spiral -appreciated -collaborative -treating -similarities -feminine -finishes -##ib -jade -import -##hot -champagne -mice -securing -celebrities -helsinki -attributes -##gos -cousins -phases -ache -lucia -gandhi -submission -vicar -spear -shine -tasmania -biting -detention -constitute -tighter -seasonal -##gus -terrestrial -matthews -effectiveness -parody -philharmonic -##onic -1816 -strangers -encoded -consortium -guaranteed -regards -shifts -tortured -collision -supervisor -inform -broader -insight -theaters -armour -emeritus -blink -incorporates -mapping -handball -flexible -##nta -substantially -generous -thief -carr -loses -1793 -prose -ucla -romeo -generic -metallic -realization -damages -commissioners -zach -default -helicopters -lengthy -stems -partnered -spectators -rogue -indication -penalties -teresa -1801 -sen -##tric -dalton -##wich -irving -photographic -##vey -deaf -peters -excluded -unsure -##vable -patterson -crawled -##zio -resided -whipped -latvia -slower -ecole -pipes -employers -maharashtra -comparable -textile -pageant -##gel -alphabet -binary -irrigation -chartered -choked -antoine -offs -waking -supplement -quantities -demolition -regain -locate -urdu -folks -scary -andreas -whites -##ava -classrooms -mw -aesthetic -publishes -valleys -guides -cubs -johannes -bryant -conventions -affecting -##itt -drain -awesome -isolation -prosecutor -ambitious -apology -captive -downs -atmospheric -lorenzo -aisle -beef -foul -##onia -kidding -composite -disturbed -illusion -natives -##ffer -rockets -riverside -wartime -painters -adolf -melted -uncertainty -simulation -hawks -progressed -meantime -builder -spray -breach -unhappy -regina -russians -determining -tram -1806 -##quin -aging -1823 -garion -rented -mister -diaz -terminated -clip -1817 -depend -nervously -disco -owe -defenders -shiva -notorious -disbelief -shiny -worcester -##gation -##yr -trailing -undertook -islander -belarus -limitations -watershed -fuller -overlooking -utilized -raphael -1819 -synthetic -breakdown -klein -##nate -moaned -memoir -lamb -practicing -##erly -cellular -arrows -exotic -witches -charted -rey -hut -hierarchy -subdivision -freshwater -giuseppe -aloud -reyes -qatar -marty -sideways -utterly -sexually -jude -prayers -mccarthy -softball -blend -damien -##gging -##metric -wholly -erupted -lebanese -negro -revenues -tasted -comparative -teamed -transaction -labeled -maori -sovereignty -parkway -trauma -gran -malay -advancement -descendant -buzz -salvation -inventory -symbolic -##making -antarctica -mps -##bro -mohammed -myanmar -holt -submarines -tones -##lman -locker -patriarch -bangkok -emerson -remarks -predators -kin -afghan -confession -norwich -rental -emerge -advantages -##zel -rca -##hold -shortened -storms -aidan -##matic -autonomy -compliance -##quet -dudley -##osis -1803 -motto -documentation -summary -professors -spectacular -christina -archdiocese -flashing -innocence -remake -##dell -psychic -reef -scare -employ -sticks -meg -gus -leans -accompany -bergen -tomas -doom -wages -pools -##bes -breasts -scholarly -alison -outline -brittany -breakthrough -willis -realistic -##cut -##boro -competitor -##stan -pike -picnic -designing -commercials -washing -villain -skiing -costumes -auburn -halted -executives -logistics -cycles -vowel -applicable -barrett -exclaimed -eurovision -eternity -ramon -##umi -modifications -sweeping -disgust -torch -aviv -ensuring -rude -dusty -sonic -donovan -outskirts -cu -pathway -##band -##gun -disciplines -acids -cadet -paired -sketches -##sive -marriages -folding -peers -slovak -implies -admired -##beck -1880s -leopold -instinct -attained -weston -megan -horace -##ination -dorsal -ingredients -evolutionary -complications -deity -lethal -brushing -levy -deserted -institutes -posthumously -delivering -telescope -coronation -motivated -rapids -luc -flicked -pays -volcano -tanner -weighed -##nica -crowds -frankie -gifted -addressing -granddaughter -winding -##rna -constantine -gomez -##front -landscapes -rudolf -anthropology -slate -werewolf -astronomy -circa -rouge -dreaming -sack -knelt -drowned -naomi -prolific -tracked -freezing -herb -agony -randall -twisting -wendy -deposit -touches -vein -wheeler -##bbled -batted -retaining -tire -presently -compare -specification -daemon -nigel -##grave -merry -recommendation -czechoslovakia -sandra -roma -##sts -lambert -inheritance -sheikh -winchester -cries -examining -##yle -comeback -cuisine -nave -##iv -retrieve -tomatoes -barker -polished -defining -irene -lantern -personalities -begging -tract -swore -1809 -##gic -omaha -brotherhood -haiti -##ots -exeter -##ete -##zia -steele -dumb -pearson -surveyed -elisabeth -trends -fritz -bugs -fraction -calmly -viking -##birds -tug -inserted -unusually -##ield -confronted -distress -crashing -brent -turks -resign -##olo -cambodia -gabe -sauce -##kal -evelyn -extant -clusters -quarry -teenagers -luna -##lers -##ister -affiliation -drill -##ashi -panthers -scenic -libya -anita -strengthen -inscriptions -##cated -lace -sued -judith -riots -##uted -mint -##eta -preparations -midst -dub -challenger -##vich -mock -displaced -wicket -breaths -enables -schmidt -analyst -##lum -highlight -automotive -axe -josef -newark -sufficiently -resembles -50th -##pal -flushed -mum -traits -##ante -commodore -incomplete -warming -titular -ceremonial -ethical -celebrating -eighteenth -cao -lima -medalist -mobility -strips -snakes -miniature -zagreb -barton -escapes -umbrella -automated -doubted -differs -cooled -georgetown -dresden -cooked -fade -wyatt -jacobs -carlton -abundant -stereo -madras -inning -spur -malayalam -begged -osaka -groan -escaping -charging -dose -##aj -bud -papa -communists -advocates -edged -tri -resemble -peaking -necklace -fried -montenegro -saxony -goose -glances -stuttgart -curator -recruit -grocery -sympathetic -##tting -##fort -lotus -randolph -ancestor -##rand -succeeding -jupiter -1798 -macedonian -##heads -hiking -1808 -handing -fischer -##itive -garbage -##pies -prone -singular -papua -inclined -attractions -italia -pouring -motioned -grandma -garnered -jacksonville -corp -ego -ringing -aluminum -##hausen -ordering -##foot -drawer -traders -synagogue -##kawa -resistant -wandering -fragile -fiona -teased -hardcore -soaked -jubilee -decisive -exposition -mercer -poster -valencia -hale -kuwait -1811 -##ises -##wr -##eed -tavern -gamma -johan -##uer -airways -amino -gil -vocational -domains -torres -generator -folklore -outcomes -##keeper -canberra -shooter -fl -beams -confrontation -##gram -aligned -forestry -pipeline -jax -motorway -conception -decay -coffin -##cott -stalin -1805 -escorted -minded -##nam -sitcom -purchasing -twilight -veronica -additions -passive -tensions -straw -frequencies -1804 -refugee -cultivation -##iate -christie -clary -bulletin -crept -disposal -##rich -##zong -processor -crescent -##rol -emphasized -whale -nazis -aurora -dwelling -hauled -sponsors -toledo -ideology -theatres -tessa -cerambycidae -saves -turtle -cone -suspects -kara -rusty -yelling -greeks -mozart -shades -cocked -participant -shire -spit -freeze -necessity -##cos -inmates -nielsen -councillors -loaned -uncommon -omar -peasants -botanical -offspring -daniels -formations -jokes -1794 -pioneers -sigma -licensing -##sus -wheelchair -polite -1807 -liquor -pratt -trustee -##uta -forewings -balloon -kilometre -camping -explicit -casually -shawn -foolish -teammates -nm -hassan -carrie -judged -satisfy -vanessa -knives -selective -flowed -##lice -stressed -eliza -mathematician -cease -cultivated -##roy -commissions -browns -##ania -destroyers -sheridan -meadow -##rius -minerals -##cial -downstream -clash -gram -memoirs -ventures -baha -seymour -archie -midlands -edith -fare -flynn -invite -canceled -tiles -stabbed -boulder -incorporate -amended -camden -facial -mollusk -unreleased -descriptions -grabs -raises -ramp -shiver -##rose -coined -pioneering -tunes -qing -warwick -tops -melanie -giles -##rous -wandered -##inal -annexed -30th -unnamed -##ished -organizational -airplane -normandy -stoke -whistle -blessing -violations -chased -holders -shotgun -##ctic -reactor -##vik -tires -tearing -shores -fortified -mascot -constituencies -columnist -productive -tibet -##rta -lineage -hooked -tapes -judging -cody -##gger -hansen -kashmir -triggered -##eva -solved -cliffs -##tree -resisted -anatomy -protesters -transparent -implied -##iga -injection -mattress -excluding -##mbo -defenses -helpless -devotion -##elli -growl -liberals -weber -phenomena -atoms -plug -##iff -mortality -apprentice -howe -convincing -swimmer -barber -leone -promptly -sodium -def -nowadays -arise -##oning -gloucester -corrected -dignity -norm -erie -##ders -elders -evacuated -compression -##yar -hartford -backpack -reasoning -accepts -24th -wipe -millimetres -marcel -##oda -dodgers -albion -1790 -overwhelmed -aerospace -oaks -1795 -showcase -acknowledge -recovering -nolan -ashe -hurts -geology -fashioned -disappearance -farewell -swollen -shrug -marquis -wimbledon -rue -1792 -commemorate -reduces -experiencing -inevitable -calcutta -##court -murderer -sticking -fisheries -imagery -bloom -##inus -gustav -hesitation -memorable -viral -beans -accidents -tunisia -antenna -spilled -consort -treatments -aye -perimeter -##gard -donation -hostage -migrated -banker -addiction -apex -lil -trout -##ously -conscience -##nova -rams -sands -genome -passionate -troubles -##lets -amid -##ibility -##ret -higgins -exceed -vikings -##vie -payne -##zan -muscular -defendant -sucking -##wal -ibrahim -fuselage -claudia -vfl -europeans -snails -interval -##garh -preparatory -statewide -tasked -lacrosse -viktor -##lation -angola -##hra -flint -implications -employs -teens -patrons -stall -weekends -barriers -scrambled -nucleus -tehran -jenna -parsons -lifelong -robots -displacement -##bles -precipitation -knuckles -clutched -1802 -marrying -ecology -marx -accusations -declare -scars -kolkata -mat -meadows -bermuda -skeleton -finalists -vintage -crawl -coordinate -affects -subjected -orchestral -mistaken -mirrors -dipped -relied -arches -candle -##nick -incorporating -wildly -fond -basilica -owl -fringe -rituals -whispering -stirred -feud -tertiary -slick -goat -honorable -whereby -ricardo -stripes -parachute -adjoining -submerged -synthesizer -##gren -intend -positively -ninety -phi -beaver -partition -fellows -alexis -prohibition -carlisle -bizarre -fraternity -doubts -icy -aquatic -sneak -sonny -combines -airports -crude -supervised -spatial -merge -alfonso -##bic -corrupt -scan -undergo -##ams -disabilities -colombian -comparing -dolphins -perkins -reprinted -unanimous -bounced -hairs -underworld -midwest -semester -bucket -paperback -miniseries -coventry -demise -##leigh -demonstrations -sensor -rotating -yan -##hler -arrange -soils -##idge -hyderabad -labs -brakes -grandchildren -##nde -negotiated -rover -ferrari -continuation -directorate -augusta -stevenson -counterpart -gore -##rda -nursery -rican -ave -collectively -broadly -pastoral -repertoire -asserted -discovering -nordic -styled -fiba -cunningham -harley -middlesex -survives -tumor -tempo -zack -aiming -lok -urgent -##nto -devils -contractor -turin -##wl -bliss -repaired -simmons -moan -astronomical -negotiate -lyric -1890s -lara -bred -clad -angus -pbs -engineered -posed -hernandez -possessions -elbows -psychiatric -strokes -confluence -electorate -lifts -campuses -lava -alps -##ution -##date -physicist -woody -##ographic -##itis -juliet -reformation -sparhawk -complement -suppressed -jewel -##½ -floated -##kas -continuity -sadly -##ische -inability -melting -scanning -paula -flour -judaism -safer -vague -solving -curb -##stown -financially -gable -bees -expired -miserable -cassidy -dominion -1789 -cupped -robbery -facto -amos -warden -resume -tallest -marvin -pounded -declaring -gasoline -##aux -darkened -sophomore -##mere -erection -gossip -televised -risen -dial -##eu -pillars -passages -profound -arabian -ashton -silicon -nail -##lated -##hardt -fleming -firearms -ducked -circuits -blows -waterloo -titans -fireplace -cheshire -financed -activation -algorithms -constituent -catcher -cherokee -partnerships -sexuality -platoon -tragic -vivian -guarded -whiskey -meditation -poetic -##nga -porto -listeners -dominance -kendra -mona -chandler -factions -22nd -salisbury -attitudes -derivative -##ido -##haus -intake -paced -javier -illustrator -barrels -bias -cockpit -burnett -dreamed -ensuing -receptors -someday -hawkins -mattered -##lal -slavic -1799 -jesuit -cameroon -wasted -wax -lowering -victorious -freaking -outright -hancock -librarian -sensing -bald -calcium -myers -tablet -announcing -barack -shipyard -pharmaceutical -greenwich -flush -medley -patches -wolfgang -speeches -acquiring -exams -nikolai -hayden -kannada -reilly -waitress -abdomen -devastated -capped -pseudonym -pharmacy -fulfill -paraguay -1796 -clicked -##trom -archipelago -syndicated -##hman -lumber -orgasm -rejection -clifford -lorraine -advent -mafia -rodney -brock -##used -##elia -cassette -chamberlain -despair -mongolia -sensors -developmental -upstream -##alis -spanning -trombone -basque -seeded -interred -renewable -rhys -leapt -revision -molecule -##ages -chord -vicious -nord -shivered -23rd -arlington -debts -corpus -sunrise -bays -blackburn -centimetres -##uded -shuddered -strangely -gripping -cartoons -isabelle -orbital -##ppa -seals -proving -refusal -strengthened -bust -assisting -baghdad -batsman -portrayal -mara -pushes -spears -og -##cock -reside -nathaniel -brennan -1776 -confirmation -caucus -##worthy -markings -yemen -nobles -ku -lazy -viewer -catalan -encompasses -sawyer -##fall -sparked -substances -patents -braves -arranger -evacuation -sergio -persuade -dover -tolerance -penguin -cum -jockey -insufficient -townships -occupying -declining -plural -processed -projection -puppet -flanders -introduces -liability -##yon -gymnastics -antwerp -hobart -candles -jeep -wes -observers -chaplain -bundle -glorious -##hine -hazel -flung -sol -excavations -dumped -stares -bangalore -triangular -icelandic -intervals -expressing -turbine -##vers -songwriting -crafts -##igo -jasmine -ditch -rite -entertaining -comply -sorrow -wrestlers -basel -emirates -marian -rivera -helpful -##some -caution -downward -networking -##atory -##tered -darted -genocide -emergence -replies -specializing -spokesman -convenient -unlocked -fading -augustine -concentrations -resemblance -elijah -investigator -andhra -##uda -promotes -##rrell -fleeing -simone -announcer -lydia -weaver -residency -modification -##fest -stretches -alternatively -nat -lowe -lacks -##ented -pam -tile -concealed -inferior -abdullah -residences -tissues -vengeance -##ided -moisture -peculiar -groove -bologna -jennings -ninja -oversaw -zombies -pumping -batch -livingston -emerald -installations -1797 -peel -nitrogen -rama -##fying -schooling -strands -responding -werner -lime -casa -accurately -targeting -##rod -underway -##uru -hemisphere -lester -##yard -occupies -griffith -angrily -reorganized -##owing -courtney -deposited -estadio -##ifies -dunn -exiled -##ying -checks -##combe -successes -unexpectedly -blu -assessed -##flower -observing -sacked -spiders -kn -nodes -prosperity -audrey -divisional -broncos -tangled -adjust -feeds -erosion -paolo -surf -directory -snatched -humid -admiralty -screwed -reddish -##nese -modules -trench -lamps -bind -leah -bucks -competes -##nz -transcription -isles -violently -clutching -pga -cyclist -inflation -flats -ragged -unnecessary -##hian -stubborn -coordinated -harriet -baba -disqualified -insect -wolfe -##fies -reinforcements -rocked -duel -winked -embraced -bricks -##raj -hiatus -defeats -pending -brightly -jealousy -##xton -##uki -lena -colorful -##dley -stein -kidney -##shu -underwear -wanderers -##haw -##icus -guardians -m³ -roared -habits -##wise -permits -uranium -punished -disguise -bundesliga -elise -dundee -erotic -partisan -collectors -float -individually -rendering -behavioral -bucharest -ser -hare -valerie -corporal -nutrition -proportional -immense -##kis -pavement -##zie -##eld -sutherland -crouched -1775 -suzuki -trades -endurance -operas -crosby -prayed -priory -rory -socially -gujarat -walton -cube -pasha -privilege -lennon -floods -thorne -waterfall -nipple -scouting -approve -##lov -minorities -voter -dwight -extensions -assure -ballroom -slap -dripping -privileges -rejoined -confessed -demonstrating -patriotic -yell -investor -##uth -pagan -slumped -squares -confront -bert -embarrassment -aston -urging -sweater -starr -yuri -brains -williamson -commuter -mortar -structured -selfish -exports -##jon -cds -##him -unfinished -##rre -mortgage -destinations -##nagar -canoe -solitary -buchanan -delays -magistrate -fk -##pling -motivation -##lier -##vier -recruiting -assess -##mouth -malik -antique -1791 -pius -rahman -reich -tub -zhou -smashed -airs -galway -xii -conditioning -honduras -discharged -dexter -##pf -lionel -debates -lemon -volunteered -dioxide -procession -devi -sic -tremendous -advertisements -colts -transferring -verdict -hanover -decommissioned -utter -relate -pac -racism -beacon -limp -similarity -terra -occurrence -ant -becky -capt -updates -armament -richie -pal -##graph -halloween -mayo -##ssen -##bone -cara -serena -fcc -dolls -obligations -##dling -violated -lafayette -jakarta -exploitation -infamous -iconic -##lah -##park -moody -reginald -dread -spill -crystals -olivier -modeled -bluff -equilibrium -separating -notices -ordnance -extinction -onset -cosmic -attachment -sammy -expose -privy -anchored -##bil -abbott -admits -bending -baritone -emmanuel -policeman -vaughan -winged -climax -dresses -denny -polytechnic -mohamed -burmese -authentic -nikki -genetics -grandparents -homestead -gaza -postponed -metacritic -una -##sby -unstable -dissertation -##cian -curls -obscure -uncovered -bronx -praying -disappearing -##hoe -prehistoric -coke -turret -mutations -nonprofit -pits -monaco -##usion -prominently -dispatched -podium -##mir -uci -##uation -fortifications -birthplace -kendall -##lby -##oll -preacher -rack -goodman -persistent -##ott -countless -jaime -recorder -lexington -persecution -jumps -renewal -wagons -crushing -##holder -decorations -##lake -abundance -wrath -laundry -£1 -garde -jeanne -beetles -peasant -splitting -caste -sergei -##rer -##ema -scripts -##ively -rub -satellites -##vor -inscribed -verlag -scrapped -gale -packages -chick -potato -slogan -kathleen -arabs -##culture -counterparts -reminiscent -choral -##tead -rand -retains -bushes -dane -accomplish -courtesy -closes -##oth -slaughter -hague -krakow -lawson -tailed -elias -ginger -##ttes -canopy -betrayal -rebuilding -turf -##hof -frowning -allegiance -brigades -kicks -rebuild -polls -alias -nationalism -rowan -audition -bowie -fortunately -recognizes -harp -dillon -horrified -##oro -renault -ropes -presumed -rewarded -infrared -wiping -accelerated -illustration -presses -practitioners -badminton -##iard -detained -##tera -recognizing -relates -misery -##sies -##tly -reproduction -piercing -potatoes -thornton -esther -manners -hbo -##aan -ours -bullshit -ernie -perennial -sensitivity -illuminated -rupert -##iss -rfc -nassau -##dock -staggered -socialism -##haven -appointments -nonsense -prestige -sharma -haul -solidarity -##rata -igor -pedestrian -##uit -baxter -tenants -wires -medication -unlimited -guiding -impacts -diabetes -##rama -sasha -pas -clive -extraction -continually -constraints -##bilities -sonata -hunted -sixteenth -chu -planting -quote -mayer -pretended -spat -ceramic -##cci -curtains -pigs -pitching -##dad -latvian -sore -dayton -##sted -patrols -slice -playground -##nted -shone -stool -apparatus -inadequate -mates -treason -##ija -desires -##liga -##croft -somalia -laurent -mir -grape -obliged -chevrolet -thirteenth -stunning -enthusiastic -##ede -accounted -concludes -currents -basil -##kovic -drought -##rica -mai -##aire -shove -posting -##shed -pilgrimage -humorous -packing -fry -pencil -wines -smells -marilyn -aching -newest -clung -bon -neighbours -sanctioned -##pie -mug -##stock -drowning -hydraulic -##vil -hiring -reminder -lilly -investigators -##ncies -sour -##eous -compulsory -packet -##rion -##graphic -##elle -cannes -##inate -depressed -##rit -heroic -importantly -theresa -##tled -conway -saturn -marginal -rae -##xia -corresponds -royce -pact -jasper -explosives -packaging -aluminium -##ttered -denotes -rhythmic -spans -assignments -hereditary -outlined -originating -sundays -lad -reissued -greeting -beatrice -##dic -pillar -marcos -plots -handbook -alcoholic -judiciary -avant -slides -extract -masculine -blur -##eum -homage -trembled -owens -hymn -trey -signaling -socks -accumulated -reacted -attic -theo -lining -angie -distraction -primera -talbot -creativity -billed -##hey -deacon -eduardo -identifies -proposition -dizzy -gunner -hogan -##yam -##pping -##hol -ja -##chan -jensen -reconstructed -##berger -clearance -darius -##nier -abe -harlem -plea -dei -circled -emotionally -notation -fascist -neville -exceeded -upwards -viable -ducks -workforce -racer -limiting -shri -##lson -possesses -kerr -moths -devastating -laden -disturbing -locking -gal -fearing -accreditation -flavor -aide -1870s -mountainous -##baum -melt -##ures -texture -servers -soda -herd -##nium -erect -puzzled -hum -peggy -examinations -gould -testified -geoff -ren -devised -sacks -##law -denial -posters -grunted -cesar -tutor -gerry -offerings -byrne -falcons -combinations -incoming -pardon -rocking -26th -avengers -flared -mankind -seller -uttar -loch -nadia -stroking -exposing -fertile -ancestral -instituted -##has -noises -prophecy -taxation -eminent -vivid -pol -##bol -dart -indirect -multimedia -notebook -upside -displaying -adrenaline -referenced -geometric -##iving -progression -##ddy -blunt -announce -##far -implementing -##lav -aggression -liaison -cooler -cares -headache -plantations -gorge -dots -impulse -thickness -ashamed -averaging -kathy -obligation -precursor -fowler -symmetry -thee -hears -##rai -undergoing -butcher -bowler -##lip -cigarettes -subscription -goodness -##ically -browne -##hos -kyoto -donor -##erty -damaging -friction -drifting -expeditions -hardened -prostitution -fauna -blankets -claw -tossing -snarled -butterflies -recruits -investigative -coated -healed -communal -hai -xiii -academics -boone -psychologist -restless -lahore -stephens -brendan -foreigners -printer -ached -explode -27th -deed -scratched -dared -##pole -cardiac -1780 -okinawa -proto -commando -compelled -oddly -electrons -replica -thanksgiving -##rist -sheila -deliberate -stafford -tidal -representations -hercules -ou -##path -##iated -kidnapping -lenses -##tling -deficit -samoa -mouths -consuming -computational -maze -granting -smirk -razor -fixture -ideals -inviting -aiden -nominal -issuing -julio -pitt -ramsey -docks -##oss -exhaust -##owed -bavarian -draped -anterior -mating -ethiopian -explores -noticing -##nton -discarded -convenience -hoffman -endowment -beasts -cartridge -mormon -paternal -probe -sleeves -interfere -lump -deadline -jenks -bulldogs -scrap -alternating -justified -reproductive -nam -seize -descending -secretariat -kirby -grouped -smash -panther -sedan -tapping -lola -cheer -germanic -unfortunate -##eter -unrelated -##fan -subordinate -##sdale -suzanne -advertisement -##ility -horsepower -##lda -cautiously -discourse -luigi -##mans -##fields -noun -prevalent -mao -schneider -everett -surround -governorate -kira -##avia -westward -##take -misty -rails -sustainability -unused -##rating -packs -toast -unwilling -regulate -thy -suffrage -nile -awe -assam -definitions -travelers -affordable -##rb -conferred -sells -undefeated -beneficial -torso -basal -repeating -remixes -bahrain -cables -fang -##itated -excavated -numbering -statutory -deluxe -##lian -forested -ramirez -derbyshire -zeus -slamming -transfers -astronomer -banana -lottery -berg -histories -bamboo -##uchi -resurrection -posterior -bowls -vaguely -##thi -thou -preserving -tensed -offence -##inas -meyrick -callum -ridden -watt -langdon -tying -lowland -snorted -daring -truman -##hale -##girl -aura -overly -filing -weighing -goa -infections -philanthropist -saunders -eponymous -##owski -latitude -perspectives -reviewing -mets -commandant -radial -##kha -flashlight -reliability -koch -vowels -amazed -ada -elaine -supper -##encies -predator -debated -soviets -cola -##boards -##nah -compartment -crooked -arbitrary -fourteenth -havana -majors -steelers -clips -profitable -ambush -exited -packers -##tile -nude -cracks -fungi -limb -trousers -josie -shelby -tens -frederic -##ος -definite -smoothly -constellation -insult -baton -discs -lingering -##nco -conclusions -lent -staging -becker -grandpa -shaky -##tron -einstein -obstacles -adverse -economically -##moto -mccartney -thor -dismissal -motions -readings -nostrils -treatise -##pace -squeezing -evidently -prolonged -1783 -venezuelan -je -marguerite -beirut -takeover -shareholders -##vent -denise -digit -airplay -norse -##bbling -imaginary -pills -hubert -blaze -vacated -eliminating -vine -mansfield -retrospective -barrow -borne -clutch -bail -forensic -weaving -##nett -##witz -desktop -citadel -promotions -worrying -dorset -subdivided -##iating -manned -expeditionary -pickup -synod -chuckle -barney -##rz -##ffin -functionality -karachi -litigation -meanings -lick -anders -##ffed -execute -curl -oppose -ankles -typhoon -##ache -linguistics -compassion -pressures -grazing -perfection -##iting -immunity -monopoly -muddy -backgrounds -namibia -francesca -monitors -attracting -stunt -tuition -##ии -vegetable -##mates -##quent -mgm -jen -complexes -forts -cellar -bites -seventeenth -royals -flemish -failures -mast -charities -##cular -peruvian -capitals -macmillan -ipswich -outward -frigate -postgraduate -folds -employing -##ouse -concurrently -fiery -##tai -contingent -nightmares -monumental -nicaragua -##kowski -lizard -mal -fielding -gig -reject -harding -##ipe -coastline -##cin -beethoven -humphrey -innovations -##tam -norris -doris -solicitor -obey -niagara -shelves -bourbon -nightclub -specifications -hilton -##ndo -centennial -dispersed -worm -neglected -briggs -kuala -uneasy -##nstein -##bound -##aking -##burgh -awaiting -pronunciation -##bbed -##quest -eh -optimal -zhu -raped -greens -presided -brenda -worries -venetian -marxist -turnout -##lius -refined -braced -sins -grasped -sunderland -nickel -speculated -lowell -cyrillic -communism -fundraising -resembling -colonists -mutant -freddie -usc -##mos -gratitude -##run -mural -##lous -chemist -reminds -28th -steals -tess -pietro -##ingen -promoter -ri -microphone -honoured -rai -sant -##qui -feather -##nson -burlington -kurdish -terrorists -deborah -sickness -##wed -hazard -irritated -desperation -veil -clarity -##rik -jewels -xv -##gged -##ows -##cup -berkshire -unfair -mysteries -orchid -winced -exhaustion -renovations -stranded -obe -infinity -##nies -adapt -redevelopment -thanked -registry -olga -domingo -noir -tudor -ole -commenting -behaviors -##ais -crisp -pauline -probable -stirling -wigan -paralympics -panting -surpassed -##rew -luca -barred -famed -##sters -cassandra -waiter -carolyn -exported -##orted -andres -destructive -deeds -jonah -castles -vacancy -##glass -1788 -orchard -yep -famine -belarusian -sprang -##forth -skinny -##mis -administrators -rotterdam -zambia -zhao -boiler -discoveries -##ride -##physics -lucius -disappointing -outreach -spoon -##frame -qualifications -unanimously -enjoys -regency -##iidae -stade -realism -veterinary -rodgers -dump -alain -chestnut -castile -censorship -rumble -gibbs -communion -reggae -inactivated -logs -loads -##houses -homosexual -##iano -ale -informs -##cas -phrases -plaster -linebacker -ambrose -kaiser -fascinated -limerick -recruitment -forge -mastered -##nding -leinster -rooted -threaten -##strom -borneo -##hes -suggestions -scholarships -propeller -documentaries -patronage -coats -constructing -invest -neurons -comet -entirety -shouts -identities -annoying -unchanged -wary -##antly -##ogy -neat -oversight -##kos -phillies -replay -constance -##kka -incarnation -humble -skies -minus -##acy -smithsonian -guerrilla -jar -cadets -##plate -surplus -audit -##aru -cracking -joanna -louisa -pacing -##lights -intentionally -##iri -diner -nwa -imprint -australians -tong -unprecedented -bunker -naive -specialists -ark -nichols -railing -leaked -pedal -##uka -shrub -longing -roofs -captains -neural -tuned -##ntal -##jet -emission -medina -frantic -codex -definitive -sid -abolition -intensified -stocks -enrique -sustain -genoa -oxide -##written -clues -cha -##gers -tributaries -fragment -venom -##ente -##sca -muffled -vain -sire -laos -##ingly -##hana -hastily -snapping -surfaced -sentiment -motive -##oft -contests -approximate -mesa -luckily -dinosaur -exchanges -propelled -accord -bourne -relieve -tow -masks -offended -##ues -cynthia -##mmer -rains -bartender -zinc -reviewers -lois -##sai -legged -arrogant -rafe -comprise -handicap -blockade -inlet -lagoon -copied -drilling -shelley -petals -##inian -mandarin -obsolete -##inated -onward -arguably -productivity -praising -seldom -busch -discusses -raleigh -shortage -ranged -stanton -encouragement -firstly -conceded -overs -temporal -##uke -cbe -##bos -woo -certainty -pumps -##pton -stalked -##uli -lizzie -periodic -thieves -weaker -gases -shoving -chooses -wc -##chemical -prompting -weights -##kill -robust -flanked -sticky -tuberculosis -##eb -##eal -christchurch -resembled -wallet -reese -inappropriate -pictured -distract -fixing -fiddle -giggled -burger -heirs -hairy -mechanic -torque -obsessed -chiefly -cheng -logging -extracted -meaningful -numb -##vsky -gloucestershire -reminding -unite -##lit -breeds -diminished -clown -glove -1860s -archibald -focal -freelance -sliced -depiction -##yk -organism -switches -sights -stray -crawling -##ril -lever -leningrad -interpretations -loops -anytime -reel -alicia -delighted -##ech -inhaled -xiv -suitcase -bernie -vega -licenses -northampton -exclusion -induction -monasteries -racecourse -homosexuality -##sfield -##rky -dimitri -michele -alternatives -ions -commentators -genuinely -objected -pork -hospitality -fencing -stephan -warships -peripheral -wit -drunken -wrinkled -quentin -spends -departing -chung -numerical -spokesperson -johannesburg -caliber -killers -##udge -assumes -neatly -demographic -abigail -bloc -mounting -##lain -bentley -slightest -xu -recipients -##jk -merlin -##writer -seniors -prisons -blinking -hindwings -flickered -kappa -##hel -80s -strengthening -appealing -brewing -gypsy -mali -lashes -hulk -unpleasant -harassment -bio -treaties -predict -instrumentation -pulp -troupe -boiling -mantle -##ffe -##vn -dividing -handles -verbs -##onal -coconut -senegal -thorough -gum -momentarily -##sto -cocaine -panicked -destined -##turing -teatro -denying -weary -captained -mans -##hawks -wakefield -bollywood -thankfully -cyril -amendments -##bahn -consultation -stud -reflections -kindness -1787 -internally -##ovo -tex -mosaic -distribute -paddy -seeming -##hic -piers -##mura -popularly -winger -kang -sentinel -mccoy -##anza -covenant -##bag -verge -fireworks -suppress -thrilled -dominate -##jar -swansea -reconciliation -stiffened -cue -dorian -##uf -damascus -amor -ida -foremost -##aga -porsche -unseen -dir -##had -##azi -stony -lexi -melodies -##nko -angular -integer -podcast -ants -inherent -jaws -justify -persona -##olved -josephine -##nr -##ressed -customary -flashes -gala -cyrus -glaring -backyard -ariel -physiology -greenland -stir -avon -atletico -finch -methodology -ked -mas -catholicism -townsend -branding -quincy -fits -containers -1777 -ashore -aragon -forearm -poisoning -adopting -conquer -grinding -amnesty -keller -finances -evaluate -forged -lankan -instincts -##uto -guam -bosnian -photographed -workplace -desirable -protector -allocation -intently -encourages -willy -##sten -bodyguard -electro -brighter -bihar -##chev -lasts -opener -amphibious -sal -verde -arte -##cope -captivity -vocabulary -yields -##tted -agreeing -desmond -pioneered -##chus -strap -campaigned -railroads -##ович -emblem -##dre -stormed -##ulous -marijuana -northumberland -##nath -bowen -landmarks -beaumont -##qua -danube -##bler -attorneys -th -flyers -critique -villains -cass -mutation -acc -##0s -colombo -mckay -motif -sampling -concluding -syndicate -##rell -neon -stables -warnings -clint -mourning -wilkinson -##tated -merrill -leopard -evenings -exhaled -emil -sonia -ezra -discrete -stove -farrell -fifteenth -prescribed -superhero -##rier -worms -helm -wren -##duction -expo -##rator -hq -unfamiliar -antony -prevents -acceleration -fiercely -mari -painfully -calculations -cheaper -ign -clifton -irvine -davenport -mozambique -pierced -##evich -wonders -##wig -##cate -##iling -crusade -ware -enzymes -reasonably -mls -##coe -mater -ambition -bunny -eliot -kernel -##fin -asphalt -headmaster -torah -aden -lush -pins -waived -##yas -joao -substrate -enforce -##grad -##ules -alvarez -selections -epidemic -tempted -bremen -translates -ensured -waterfront -29th -forrest -manny -malone -kramer -reigning -simpler -absorption -engraved -##ffy -evaluated -1778 -haze -comforting -crossover -##abe -thorn -##rift -##imo -suppression -fatigue -cutter -wurttemberg -##orf -enforced -hovering -proprietary -samurai -syllable -ascent -lacey -tick -lars -tractor -merchandise -rep -bouncing -defendants -##yre -huntington -##oko -standardized -##hor -##hima -assassinated -predecessors -rainy -liar -assurance -lyrical -##uga -secondly -flattened -parameter -undercover -##mity -bordeaux -punish -ridges -markers -exodus -inactive -hesitate -debbie -nyc -pledge -savoy -nagar -offset -organist -##tium -hesse -marin -converting -##iver -diagram -propulsion -validity -reverted -supportive -ministries -clans -responds -proclamation -##inae -ein -pleading -patriot -birch -islanders -strauss -hates -##dh -brandenburg -concession -1900s -killings -textbook -antiquity -cinematography -wharf -embarrassing -setup -creed -farmland -inequality -centred -signatures -fallon -##ingham -##uts -ceylon -gazing -directive -laurie -##tern -globally -##uated -##dent -allah -excavation -threads -##cross -frantically -icc -utilize -determines -respiratory -thoughtful -receptions -##dicate -merging -chandra -seine -builders -builds -diagnostic -dev -visibility -goddamn -analyses -dhaka -proves -chancel -concurrent -curiously -canadians -pumped -restoring -1850s -turtles -jaguar -sinister -spinal -declan -vows -1784 -glowed -capitalism -swirling -universidad -##lder -##oat -soloist -##genic -##oor -coincidence -beginnings -nissan -dip -resorts -caucasus -combustion -infectious -##eno -pigeon -serpent -##itating -conclude -masked -salad -jew -##gr -surreal -toni -##wc -harmonica -##gins -##etic -##coat -fishermen -intending -bravery -##wave -klaus -titan -wembley -taiwanese -ransom -40th -incorrect -hussein -eyelids -cooke -dramas -utilities -##etta -##print -eisenhower -principally -granada -lana -##rak -openings -concord -##bl -bethany -connie -morality -sega -##mons -##nard -earnings -##kara -##cine -communes -##rel -coma -composing -softened -severed -grapes -nguyen -analyzed -warlord -hubbard -heavenly -behave -slovenian -##hit -##ony -hailed -filmmakers -trance -caldwell -skye -unrest -coward -likelihood -##aging -bern -taliban -honolulu -propose -browser -imagining -cobra -contributes -dukes -instinctively -conan -violinist -##ores -accessories -gradual -##amp -quotes -sioux -##dating -undertake -intercepted -sparkling -compressed -fungus -tombs -haley -imposing -rests -degradation -lincolnshire -retailers -wetlands -tulsa -distributor -dungeon -nun -greenhouse -convey -atlantis -aft -exits -oman -dresser -lyons -##sti -joking -eddy -judgement -omitted -digits -##game -juniors -##rae -cents -stricken -une -##ngo -wizards -weir -breton -nan -technician -fibers -liking -royalty -persia -terribly -magician -##rable -##unt -vance -cafeteria -booker -camille -warmer -##static -consume -cavern -gaps -compass -contemporaries -foyer -soothing -graveyard -maj -plunged -blush -##wear -cascade -demonstrates -ordinance -##nov -boyle -##lana -rockefeller -shaken -banjo -izzy -##ense -breathless -vines -##eman -alterations -chromosome -dwellings -feudal -mole -catalonia -relics -tenant -mandated -##fm -fridge -hats -honesty -patented -raul -heap -cruisers -accusing -enlightenment -infants -wherein -chatham -contractors -affinity -hc -osborne -piston -traps -maturity -##rana -lagos -##zal -peering -##nay -attendant -dealers -protocols -subset -prospects -biographical -##cre -artery -##zers -insignia -nuns -endured -##eration -recommend -schwartz -serbs -berger -cromwell -crossroads -enduring -clasped -grounded -##bine -marseille -twitched -abel -choke -catalyst -moldova -italians -##tist -disastrous -wee -##oured -##nti -wwf -nope -##piration -##asa -expresses -thumbs -##nza -coca -1781 -cheating -##ption -skipped -sensory -heidelberg -spies -satan -dangers -semifinal -bohemia -whitish -confusing -shipbuilding -relies -surgeons -landings -ravi -baku -moor -suffix -alejandro -##yana -litre -upheld -##unk -rajasthan -##rek -coaster -insists -posture -scenarios -etienne -favoured -appoint -transgender -elephants -poked -greenwood -defences -fulfilled -militant -somali -1758 -chalk -potent -##ucci -migrants -wink -assistants -nos -restriction -activism -niger -##ario -colon -shaun -##sat -daphne -##erated -swam -congregations -reprise -considerations -magnet -playable -xvi -overthrow -tobias -knob -chavez -coding -##mers -propped -katrina -orient -newcomer -##suke -temperate -##pool -farmhouse -interrogation -committing -##vert -forthcoming -strawberry -joaquin -macau -ponds -shocking -siberia -##cellular -chant -contributors -##nant -##ologists -sped -absorb -hail -1782 -spared -##hore -barbados -karate -opus -originates -saul -##xie -evergreen -leaped -##rock -correlation -exaggerated -weekday -unification -bump -tracing -brig -afb -pathways -utilizing -disturbance -kneeling -##stad -##guchi -100th -pune -##thy -decreasing -manipulation -miriam -academia -ecosystem -occupational -rbi -##lem -rift -rotary -stacked -incorporation -awakening -generators -guerrero -racist -##omy -cyber -derivatives -culminated -allie -annals -panzer -sainte -pops -zu -austro -##vate -algerian -politely -nicholson -mornings -educate -tastes -thrill -dartmouth -##gating -##jee -regan -differing -concentrating -choreography -divinity -pledged -alexandre -routing -gregor -madeline -##idal -apocalypse -##hora -gunfire -culminating -elves -fined -liang -lam -programmed -tar -guessing -transparency -gabrielle -##gna -cancellation -flexibility -##lining -accession -shea -stronghold -nets -specializes -##rgan -abused -hasan -sgt -exceeding -admiration -supermarket -photographers -specialised -tilt -resonance -hmm -perfume -sami -threatens -garland -botany -guarding -boiled -greet -puppy -russo -supplier -wilmington -vibrant -vijay -##bius -paralympic -grumbled -paige -faa -licking -margins -hurricanes -##gong -fest -grenade -ripping -##uz -counseling -weigh -##sian -needles -wiltshire -edison -costly -##not -fulton -tramway -redesigned -staffordshire -gasping -watkins -sleepy -candidacy -monkeys -timeline -throbbing -##bid -##sos -berth -uzbekistan -vanderbilt -bothering -overturned -ballots -gem -##iger -sunglasses -subscribers -hooker -compelling -ang -exceptionally -saloon -stab -##rdi -carla -terrifying -##vision -coil -##oids -satisfying -vendors -31st -mackay -deities -overlooked -ambient -bahamas -felipe -olympia -whirled -botanist -advertised -tugging -disciples -morales -unionist -rites -foley -morse -motives -creepy -##₀ -soo -##sz -bargain -highness -frightening -turnpike -tory -reorganization -depict -biographer -unopposed -manifesto -##gles -institut -emile -accidental -kapoor -##dam -kilkenny -cortex -lively -romanesque -jain -shan -cannons -##ske -petrol -echoing -amalgamated -disappears -cautious -proposes -sanctions -trenton -flotilla -aus -contempt -tor -canary -cote -theirs -##hun -conceptual -deleted -fascinating -paso -blazing -elf -honourable -hutchinson -##eiro -##outh -##zin -surveyor -amidst -wooded -reissue -intro -##ono -cobb -shelters -newsletter -hanson -brace -encoding -confiscated -dem -caravan -marino -scroll -melodic -cows -imam -##adi -##aneous -northward -searches -biodiversity -cora -roaring -##bers -connell -theologian -halo -compose -pathetic -unmarried -dynamo -az -calculation -toulouse -deserves -humour -nr -forgiveness -tam -undergone -martyr -pamela -myths -whore -counselor -hicks -heavens -battleship -electromagnetic -stellar -establishments -presley -hopped -##chin -temptation -90s -wills -##yuan -nhs -##nya -seminars -##yev -adaptations -gong -asher -lex -indicator -sikh -tobago -cites -goin -##yte -satirical -##gies -characterised -correspond -bubbles -lure -participates -##vid -eruption -skate -therapeutic -1785 -canals -wholesale -defaulted -sac -petit -##zzled -virgil -leak -ravens -portraying -##yx -ghetto -creators -dams -portray -vicente -##rington -fae -namesake -bounty -##arium -joachim -##ota -##iser -aforementioned -axle -snout -depended -dismantled -reuben -##ibly -gallagher -##lau -earnest -##ieu -##iary -inflicted -objections -##llar -asa -gritted -##athy -jericho -##sea -##was -flick -underside -ceramics -undead -substituted -eastward -undoubtedly -wheeled -chimney -##iche -guinness -siding -traitor -baptiste -disguised -inauguration -tipperary -choreographer -perched -warmed -stationary -##ntes -bacterial -##aurus -flores -phosphate -attacker -invaders -alvin -intersects -indirectly -immigrated -businessmen -cornelius -valves -narrated -pill -sober -nationale -monastic -applicants -scenery -##jack -motifs -constitutes -##osh -jurisdictions -tuning -irritation -woven -##uddin -fertility -gao -##erie -antagonist -impatient -glacial -hides -boarded -denominations -interception -##jas -nicola -algebraic -marquess -bahn -parole -buyers -bait -turbines -paperwork -bestowed -natasha -renee -oceans -purchases -vaccine -##tock -fixtures -playhouse -integrate -jai -oswald -intellectuals -booked -nests -mortimer -##isi -obsession -sept -##gler -##sum -scrutiny -simultaneous -squinted -##shin -collects -oven -shankar -penned -remarkably -slips -luggage -spectral -1786 -collaborations -louie -consolidation -##ailed -##ivating -hoover -blackpool -harness -ignition -vest -tails -belmont -mongol -skinner -##nae -visually -mage -derry -##tism -##unce -stevie -transitional -##rdy -redskins -drying -prep -prospective -annoyance -oversee -##loaded -fills -##books -announces -scowled -respects -prasad -mystic -tucson -##vale -revue -springer -bankrupt -1772 -aristotle -habsburg -##geny -dal -natal -nut -pod -chewing -darts -moroccan -walkover -rosario -lenin -punjabi -##ße -grossed -scattering -wired -invasive -hui -polynomial -corridors -wakes -gina -portrays -##cratic -arid -retreating -erich -irwin -sniper -##dha -linen -lindsey -maneuver -butch -shutting -socio -bounce -commemorative -postseason -jeremiah -pines -mystical -beads -abbas -furnace -bidding -consulted -assaulted -empirical -rubble -enclosure -sob -weakly -cancel -polly -yielded -##emann -curly -prediction -battered -70s -vhs -jacqueline -render -sails -barked -detailing -grayson -riga -sloane -raging -##yah -herbs -bravo -##athlon -alloy -giggle -imminent -suffers -assumptions -waltz -##itate -accomplishments -##ited -bathing -remixed -deception -##emia -deepest -##eis -balkan -frogs -##rong -slab -##pate -philosophers -peterborough -grains -imports -dickinson -rwanda -##atics -1774 -dirk -tablets -##rove -clone -##rice -caretaker -hostilities -mclean -##gre -regimental -treasures -norms -impose -tsar -tango -diplomacy -variously -complain -recognise -arrests -1779 -celestial -pulitzer -##dus -libretto -##moor -adele -splash -expectation -lds -confronts -##izer -spontaneous -harmful -wedge -entrepreneurs -buyer -bilingual -translate -rugged -conner -circulated -uae -eaton -##gra -##zzle -lingered -lockheed -vishnu -reelection -alonso -##oom -joints -yankee -headline -cooperate -heinz -laureate -invading -##sford -echoes -scandinavian -##dham -hugging -vitamin -salute -micah -hind -trader -##sper -radioactive -##ndra -militants -poisoned -ratified -remark -campeonato -deprived -wander -prop -##dong -##tani -##eye -chiang -darcy -##oping -mandolin -spice -statesman -babylon -walled -forgetting -afro -##cap -giorgio -buffer -##polis -planetary -##gis -overlap -terminals -kinda -centenary -##bir -arising -manipulate -elm -ke -1770 -##tad -chrysler -mapped -moose -pomeranian -quad -macarthur -assemblies -shoreline -recalls -stratford -##rted -noticeable -##evic -imp -##rita -##sque -accustomed -supplying -tents -disgusted -sipped -filters -khz -reno -selecting -luftwaffe -mcmahon -tyne -masterpiece -carriages -collided -dunes -exercised -flare -remembers -muzzle -heck -##rson -burgess -lunged -middleton -boycott -bilateral -##sity -hazardous -lumpur -multiplayer -spotlight -jackets -goldman -liege -porcelain -rag -waterford -attracts -hopeful -battling -ottomans -kensington -baked -hymns -cheyenne -lattice -levine -borrow -polymer -clashes -michaels -monitored -commitments -denounced -##von -cavity -##oney -hobby -akin -##holders -futures -intricate -cornish -patty -##oned -illegally -dolphin -##lag -barlow -yellowish -maddie -apologized -luton -plagued -##puram -##rds -sway -fanny -łodz -##rino -psi -suspicions -hanged -##eding -initiate -charlton -##por -nak -competent -analytical -annex -wardrobe -reservations -sect -fairfax -hedge -piled -buckingham -uneven -bauer -simplicity -snyder -interpret -accountability -donors -moderately -byrd -continents -##cite -disciple -jamaican -nominees -##uss -mongolian -diver -attackers -eagerly -ideological -pillows -miracles -apartheid -revolver -sulfur -clinics -moran -##enko -ile -katy -rhetoric -##icated -chronology -recycling -##hrer -elongated -mughal -pascal -profiles -vibration -databases -domination -##fare -matthias -digest -rehearsal -polling -weiss -initiation -reeves -clinging -flourished -impress -##hoff -buckley -symposium -rhythms -weed -emphasize -transforming -##taking -##yman -accountant -analyze -flicker -foil -priesthood -voluntarily -decreases -##hya -slater -sv -charting -mcgill -##lde -moreno -besieged -zur -robes -##phic -admitting -deported -turmoil -peyton -earthquakes -##ares -nationalists -beau -clair -brethren -interrupt -welch -curated -galerie -requesting -##ested -impending -steward -viper -##vina -complaining -beautifully -brandy -foam -nl -1660 -alessandro -punches -laced -explanations -##lim -attribute -clit -reggie -discomfort -##cards -smoothed -whales -##cene -adler -countered -duffy -disciplinary -widening -recipe -reliance -conducts -goats -gradient -preaching -##shaw -matilda -quasi -striped -meridian -cannabis -cordoba -certificates -##agh -##tering -graffiti -hangs -pilgrims -repeats -##ych -revive -urine -etat -##hawk -fueled -belts -fuzzy -susceptible -mauritius -salle -sincere -beers -hooks -##cki -arbitration -entrusted -advise -sniffed -seminar -junk -donnell -processors -principality -strapped -celia -mendoza -everton -fortunes -prejudice -starving -reassigned -steamer -##lund -tuck -evenly -foreman -##ffen -dans -envisioned -slit -baseman -liberia -rosemary -##weed -electrified -periodically -potassium -stride -contexts -sperm -slade -mariners -influx -bianca -subcommittee -##rane -spilling -icao -estuary -##nock -delivers -##ulata -isa -mira -bohemian -dessert -##sbury -welcoming -proudly -slowing -##chs -musee -ascension -russ -##vian -waits -##psy -africans -exploit -##morphic -eccentric -crab -peck -entrances -formidable -marketplace -groom -bolted -metabolism -patton -robbins -courier -payload -endure -##ifier -andes -refrigerator -ornate -##uca -ruthless -illegitimate -masonry -strasbourg -bikes -apples -quintet -willingly -niche -bakery -corpses -energetic -##cliffe -##sser -##ards -centimeters -centro -fuscous -cretaceous -rancho -##yde -andrei -telecom -tottenham -oasis -ordination -vulnerability -presiding -corey -penguins -sims -##pis -malawi -piss -correction -##cked -##ffle -##ryn -countdown -detectives -psychiatrist -psychedelic -dinosaurs -blouse -choi -vowed -randomly -##pol -49ers -scrub -blanche -bruins -dusseldorf -##using -unwanted -##ums -dominique -elevations -headlights -om -laguna -##oga -1750 -famously -ignorance -shrewsbury -breuning -che -confederacy -greco -overhaul -##screen -paz -skirts -disagreement -cruelty -jagged -phoebe -shifter -hovered -viruses -##wes -##lined -landlord -squirrel -dashed -ornamental -gag -wally -grange -literal -spurs -undisclosed -proceeding -billie -orphan -spanned -humidity -indy -weighted -presentations -explosions -lucian -##tary -vaughn -hindus -##anga -##hell -psycho -daytona -protects -efficiently -rematch -sly -tandem -##oya -rebranded -impaired -hee -metropolis -peach -godfrey -diaspora -ethnicity -prosperous -gleaming -dar -grossing -playback -##rden -stripe -pistols -##tain -births -labelled -##cating -rudy -alba -##onne -aquarium -hostility -##tase -shudder -sumatra -hardest -lakers -consonant -creeping -demos -homicide -capsule -zeke -liberties -expulsion -pueblo -##comb -trait -transporting -##ddin -##neck -##yna -depart -gregg -mold -ledge -hangar -oldham -playboy -termination -analysts -gmbh -romero -##itic -insist -cradle -filthy -brightness -slash -shootout -deposed -bordering -##truct -microwave -tumbled -sheltered -cathy -werewolves -messy -andersen -convex -clapped -clinched -satire -wasting -edo -rufus -##jak -mont -##etti -poznan -##keeping -restructuring -transverse -##rland -azerbaijani -slovene -gestures -roommate -choking -shear -##quist -vanguard -oblivious -##hiro -disagreed -baptism -##lich -coliseum -##aceae -salvage -societe -cory -locke -relocation -relying -versailles -ahl -swelling -##elo -cheerful -##edes -gin -sarajevo -obstacle -diverted -##nac -messed -thoroughbred -fluttered -utrecht -chewed -acquaintance -assassins -dispatch -mirza -##wart -salzburg -swell -yen -##gee -idle -ligue -samson -##nds -##igh -playful -spawned -##cise -tease -##case -burgundy -stirring -skeptical -interceptions -marathi -##dies -bedrooms -aroused -pinch -##lik -preferences -tattoos -buster -digitally -projecting -rust -##ital -kitten -priorities -addison -pseudo -##guard -dusk -icons -sermon -##psis -##iba -##lift -ju -truce -rink -##dah -##wy -defects -psychiatry -offences -calculate -glucose -##iful -##rized -##unda -francaise -##hari -richest -warwickshire -carly -1763 -purity -redemption -lending -##cious -muse -bruises -cerebral -aero -carving -preface -terminology -invade -monty -anarchist -blurred -##iled -rossi -treats -guts -shu -foothills -ballads -undertaking -premise -cecilia -affiliates -blasted -conditional -wilder -minors -drone -rudolph -buffy -swallowing -horton -attested -rutherford -howell -primetime -livery -penal -##bis -minimize -hydro -wrecked -wrought -palazzo -##gling -cans -vernacular -friedman -nobleman -shale -walnut -danielle -##ection -##tley -sears -##kumar -chords -lend -flipping -streamed -por -dracula -gallons -sacrifices -gamble -orphanage -##iman -mckenzie -##gible -boxers -daly -##balls -##ان -##ific -##rative -##iq -exploited -slated -##uity -circling -hillary -pinched -goldberg -provost -campaigning -piles -ironically -jong -mohan -successors -usaf -##tem -##ught -autobiographical -haute -preserves -##ending -acquitted -comparisons -hydroelectric -gangs -cypriot -torpedoes -rushes -derive -bumps -instability -fiat -pets -##mbe -silas -dye -reckless -settler -##itation -heats -##writing -canonical -maltese -fins -mushroom -stacy -aspen -avid -##kur -##loading -vickers -gaston -hillside -statutes -wilde -gail -kung -sabine -comfortably -motorcycles -##rgo -pneumonia -fetch -##sonic -axel -faintly -parallels -##oop -mclaren -spouse -compton -interdisciplinary -miner -##eni -clamped -##chal -##llah -separates -versa -##mler -scarborough -labrador -##lity -##osing -rutgers -hurdles -como -burt -divers -wichita -cade -coincided -bruised -mla -vineyard -##ili -##brush -notch -mentioning -jase -hearted -kits -doe -##acle -pomerania -##ady -ronan -seizure -pavel -problematic -##zaki -domenico -##ulin -catering -penelope -dependence -parental -emilio -ministerial -atkinson -##bolic -clarkson -chargers -colby -grill -peeked -arises -summon -##aged -fools -##grapher -faculties -qaeda -##vial -garner -refurbished -##hwa -geelong -disasters -nudged -bs -shareholder -lori -algae -reinstated -rot -##ades -##nous -invites -stainless -inclusive -##itude -diocesan -til -##icz -denomination -##xa -benton -floral -registers -##erman -##kell -absurd -brunei -guangzhou -hitter -retaliation -##uled -##eve -blanc -nh -consistency -contamination -##eres -dire -palermo -broadcasters -diaries -inspire -vols -brewer -tightening -mixtape -hormone -##tok -stokes -##color -##dly -##ssi -##ometer -##lington -sanitation -##tility -intercontinental -##adt -¹⁄₂ -cylinders -economies -favourable -unison -croix -gertrude -odyssey -vanity -dangling -##logists -upgrades -dice -middleweight -practitioner -henrik -parlor -orion -angered -lac -blurted -##rri -sensual -intends -swings -angled -##phs -husky -attain -peerage -precinct -textiles -cheltenham -shuffled -dai -confess -tasting -bhutan -##riation -tyrone -segregation -abrupt -ruiz -##rish -smirked -blackwell -confidential -browning -amounted -vase -scarce -fabulous -raided -staple -guyana -unemployed -glider -shay -##tow -carmine -troll -intervene -squash -superstar -cylindrical -len -roadway -researched -handy -##rium -##jana -lao -declares -##rring -##tadt -##elin -##kova -willem -shrubs -napoleonic -realms -skater -volkswagen -##ł -tad -hara -archaeologist -awkwardly -eerie -##kind -wiley -##heimer -titus -organizers -cfl -crusaders -lama -vent -enraged -thankful -occupants -maximilian -##gaard -possessing -textbooks -##oran -collaborator -quaker -##ulo -avalanche -mono -silky -straits -isaiah -mustang -surged -resolutions -potomac -descend -kilograms -plato -strains -saturdays -##olin -bernstein -##ype -holstein -ponytail -belize -conversely -heroine -perpetual -##ylus -charcoal -piedmont -glee -negotiating -backdrop -prologue -##jah -pasadena -climbs -ramos -sunni -##holm -##tner -##tri -anand -deficiency -hertfordshire -stout -##avi -aperture -orioles -##irs -doncaster -intrigued -bombed -coating -otis -##mat -cocktail -##jit -##eto -amir -arousal -sar -##proof -dixie -pots -whereabouts -##fted -drains -bullying -cottages -scripture -coherent -fore -poe -appetite -##uration -sampled -##ators -derrick -rotor -jays -peacock -installment -##rro -advisors -##coming -rodeo -scotch -##mot -##fen -##vant -ensued -rodrigo -dictatorship -martyrs -twenties -towed -incidence -marta -rainforest -sai -scaled -##cles -oceanic -qualifiers -symphonic -mcbride -dislike -generalized -aubrey -colonization -##iation -##lion -##ssing -disliked -lublin -salesman -##ulates -spherical -whatsoever -sweating -avalon -contention -punt -severity -alderman -atari -##dina -##grant -##rop -scarf -seville -vertices -annexation -fairfield -fascination -inspiring -launches -palatinate -regretted -##rca -feral -##iom -elk -nap -olsen -reddy -yong -##leader -##iae -garment -transports -feng -gracie -outrage -viceroy -insides -##esis -breakup -grady -organizer -softer -grimaced -murals -galicia -arranging -vectors -##rsten -##sb -##cens -sloan -##eka -bitten -ara -fender -nausea -bumped -kris -banquet -comrades -detector -persisted -##llan -adjustment -endowed -cinemas -sellers -##uman -peek -epa -kindly -neglect -simpsons -talon -mausoleum -runaway -hangul -lookout -##cic -coughed -acquainted -chloride -quicker -accordion -neolithic -##qa -artemis -coefficient -lenny -pandora -tx -##xed -ecstasy -litter -segunda -chairperson -gemma -hiss -rumor -vow -nasal -antioch -compensate -patiently -transformers -##eded -judo -morrow -penis -posthumous -bandits -husbands -denote -flaming -##any -##phones -langley -yorker -1760 -walters -##kle -gubernatorial -fatty -leroy -outlaw -##nine -unpublished -poole -jakob -##ᵢ -##ₙ -crete -distorted -superiority -##dhi -intercept -crust -mig -claus -crashes -stallion -frontal -armistice -##estinal -elton -aj -encompassing -camel -commemorated -malaria -woodward -calf -cigar -penetrate -##oso -willard -##rno -##uche -illustrate -amusing -convergence -noteworthy -##lma -##rva -journeys -realise -manfred -##sable -##vocation -hearings -fiance -##posed -educators -provoked -adjusting -##cturing -modular -stockton -paterson -vlad -rejects -electors -selena -maureen -##tres -##rce -swirled -##num -proportions -nanny -pawn -naturalist -parma -apostles -awoke -ethel -wen -##bey -monsoon -overview -##inating -mccain -rendition -risky -adorned -##ih -equestrian -germain -nj -conspicuous -confirming -##yoshi -shivering -##imeter -milestone -rumours -flinched -bounds -smacked -token -##bei -lectured -automobiles -##shore -impacted -##iable -nouns -nero -##leaf -ismail -prostitute -trams -bridget -sud -stimulus -impressions -reins -revolves -##gned -giro -honeymoon -##swell -criterion -##sms -##uil -libyan -prefers -##osition -preview -sucks -accusation -bursts -metaphor -diffusion -tolerate -faye -betting -cinematographer -liturgical -specials -bitterly -humboldt -##ckle -flux -rattled -##itzer -archaeologists -odor -authorised -marshes -discretion -##ов -alarmed -archaic -inverse -##leton -explorers -##pine -drummond -tsunami -woodlands -##minate -##tland -booklet -insanity -owning -insert -crafted -calculus -receivers -stung -##eca -##nched -prevailing -travellers -eyeing -lila -graphs -##borne -julien -##won -morale -adaptive -therapist -erica -cw -libertarian -bowman -pitches -vita -##ional -crook -##entation -caledonia -mutiny -##sible -1840s -automation -flock -##pia -ironic -pathology -##imus -remarried -joker -withstand -energies -##att -shropshire -hostages -madeleine -tentatively -conflicting -mateo -recipes -euros -mercenaries -nico -##ndon -albuquerque -augmented -mythical -bel -freud -##child -cough -##lica -freddy -lillian -genetically -nuremberg -calder -bonn -outdoors -paste -suns -urgency -vin -restraint -tyson -##cera -##selle -barrage -bethlehem -kahn -##par -mounts -nippon -barony -happier -ryu -makeshift -sheldon -blushed -castillo -barking -listener -taped -bethel -fluent -headlines -pornography -rum -disclosure -sighing -mace -doubling -gunther -manly -##plex -interventions -physiological -forwards -emerges -##tooth -##gny -compliment -rib -recession -visibly -barge -faults -connector -exquisite -prefect -##rlin -patio -##cured -elevators -italics -pena -wasp -satin -botswana -graceful -respectable -##jima -##rter -##oic -franciscan -generates -##dl -alfredo -disgusting -##olate -##iously -sherwood -warns -cod -promo -cheryl -sino -##escu -twitch -##zhi -brownish -thom -ortiz -##dron -densely -##beat -carmel -reinforce -##bana -anastasia -downhill -vertex -contaminated -remembrance -harmonic -homework -fiancee -gears -olds -angelica -ramsay -quiz -colliery -sevens -##cape -autism -##hil -walkway -##boats -ruben -abnormal -ounce -khmer -##bbe -zachary -bedside -morphology -punching -##olar -sparrow -convinces -hewitt -queer -remastered -rods -mabel -solemn -notified -lyricist -symmetric -##xide -encore -passports -wildcats -##uni -baja -##pac -mildly -##ease -bleed -commodity -mounds -glossy -orchestras -##omo -damian -prelude -ambitions -##vet -awhile -remotely -##aud -asserts -imply -##iques -distinctly -modelling -remedy -##dded -windshield -dani -xiao -##endra -audible -powerplant -invalid -elemental -acquisitions -##hala -immaculate -libby -plata -smuggling -ventilation -denoted -minh -##morphism -differed -dion -kelley -lore -mocking -sabbath -spikes -hygiene -drown -runoff -stylized -tally -liberated -aux -interpreter -righteous -aba -siren -reaper -pearce -millie -##cier -##yra -gaius -##iso -captures -##ttering -dorm -claudio -##sic -benches -knighted -blackness -##ored -discount -fumble -oxidation -routed -novak -perpendicular -spoiled -fracture -splits -pads -topology -##cats -axes -fortunate -offenders -protestants -esteem -broadband -convened -frankly -hound -prototypes -isil -facilitated -keel -##sher -sahara -awaited -bubba -orb -prosecutors -hem -##xing -relaxing -remnant -romney -sorted -slalom -stefano -ulrich -##active -exemption -folder -pauses -foliage -hitchcock -epithet -criticisms -##aca -ballistic -brody -hinduism -chaotic -youths -equals -##pala -pts -thicker -analogous -capitalist -improvised -overseeing -sinatra -ascended -beverage -straightforward -##kon -curran -bois -induce -surveying -emperors -sax -unpopular -cartoonist -fused -##mble -unto -##yuki -localities -##cko -##ln -darlington -slain -academie -lobbying -sediment -puzzles -##grass -defiance -dickens -manifest -tongues -alumnus -arbor -coincide -appalachian -mustafa -examiner -cabaret -traumatic -yves -bracelet -draining -heroin -magnum -baths -odessa -consonants -mitsubishi -##gua -kellan -vaudeville -joked -straps -probation -##ław -ceded -interfaces -##pas -##zawa -blinding -viet -rothschild -museo -huddersfield -tactic -##storm -brackets -dazed -incorrectly -##vu -reg -glazed -fearful -manifold -benefited -irony -stumbling -##rte -willingness -balkans -mei -wraps -##aba -injected -##lea -gu -syed -harmless -##hammer -bray -takeoff -poppy -timor -cardboard -astronaut -purdue -weeping -southbound -cursing -stalls -diagonal -##neer -lamar -bryce -comte -weekdays -harrington -##uba -negatively -##see -lays -grouping -##cken -##henko -affirmed -halle -modernist -##lai -hodges -smelling -aristocratic -baptized -dismiss -justification -oilers -coupling -qin -snack -healer -##qing -gardener -layla -battled -formulated -stephenson -gravitational -##gill -1768 -granny -coordinating -suites -##ioned -monarchs -##cote -##hips -blended -barrister -deposition -fia -mina -policemen -paranoid -##pressed -churchyard -covert -crumpled -creep -abandoning -tr -transmit -conceal -barr -understands -readiness -spire -##cology -##enia -startling -unlock -vida -bowled -slots -##nat -##islav -spaced -trusting -admire -rig -slack -casualty -classmates -##odes -##rar -##rked -amherst -furnished -evolve -foundry -menace -mead -##lein -flu -wesleyan -##kled -monterey -webber -##vos -wil -##mith -##на -bartholomew -justices -restrained -##cke -amenities -mediated -sewage -trenches -mainz -##thus -1800s -##cula -##inski -caine -bonding -converts -spheres -superseded -marianne -crypt -sweaty -ensign -historia -##br -spruce -##ask -forks -thoughtfully -yukon -pamphlet -ames -##uter -karma -##yya -bryn -negotiation -sighs -incapable -##mbre -##ntial -actresses -taft -##mill -luce -prevailed -##amine -1773 -motionless -envoy -testify -investing -sculpted -instructors -provence -kali -cullen -horseback -##while -goodwin -##jos -gaa -norte -##ldon -modify -wavelength -abd -skinned -sprinter -forecast -scheduling -marries -squared -tentative -##chman -boer -##isch -bolts -swap -fisherman -assyrian -impatiently -guthrie -martins -murdoch -tanya -nicely -dolly -lacy -med -syn -decks -fashionable -millionaire -surfing -heaved -tammy -consulate -attendees -routinely -fuse -saxophonist -backseat -malaya -##lord -scowl -tau -##ishly -sighted -steaming -##rks -##holes -##hong -ching -##wife -bless -conserved -jurassic -stacey -zion -chunk -rigorous -blaine -peabody -slayer -dismay -brewers -nz -##jer -det -##glia -glover -postwar -penetration -sylvester -imitation -vertically -airlift -heiress -knoxville -viva -##uin -macon -##rim -##fighter -##gonal -janice -##orescence -##wari -marius -belongings -leicestershire -blanco -inverted -preseason -sanity -sobbing -##due -##elt -##dled -collingwood -regeneration -flickering -shortest -##mount -##osi -feminism -##lat -sherlock -cabinets -fumbled -northbound -precedent -snaps -##mme -researching -##akes -guillaume -insights -manipulated -vapor -neighbour -gangster -frey -stalking -scarcely -callie -barnett -tendencies -doomed -assessing -slung -panchayat -ambiguous -bartlett -##etto -distributing -violating -wolverhampton -##hetic -swami -histoire -##urus -liable -pounder -groin -hussain -larsen -popping -surprises -##atter -vie -curt -##station -mute -relocate -musicals -authorization -richter -##sef -immortality -tna -bombings -deteriorated -yiddish -##acious -robbed -colchester -ao -verified -balancing -apostle -swayed -recognizable -oxfordshire -retention -nottinghamshire -contender -judd -invitational -shrimp -uhf -##icient -cleaner -longitudinal -tanker -##mur -acronym -broker -koppen -sundance -suppliers -##gil -clipped -fuels -petite -##anne -landslide -helene -diversion -populous -landowners -auspices -melville -quantitative -##xes -ferries -nicky -##llus -doo -haunting -roche -carver -downed -unavailable -##pathy -approximation -hiroshima -##hue -garfield -valle -comparatively -keyboardist -traveler -##eit -congestion -calculating -subsidiaries -##bate -serb -modernization -fairies -deepened -ville -averages -##lore -inflammatory -tonga -##itch -co₂ -squads -##hea -gigantic -serum -enjoyment -retailer -verona -35th -cis -##phobic -magna -technicians -##vati -arithmetic -##sport -levin -##dation -amtrak -chow -sienna -##eyer -backstage -entrepreneurship -##otic -learnt -tao -##udy -worcestershire -formulation -baggage -hesitant -bali -sabotage -##kari -barren -enhancing -murmur -pl -freshly -putnam -syntax -aces -medicines -resentment -bandwidth -##sier -grins -chili -guido -##sei -framing -implying -gareth -lissa -genevieve -pertaining -admissions -geo -thorpe -proliferation -sato -bela -analyzing -parting -##gor -awakened -##isman -huddled -secrecy -##kling -hush -gentry -dungeons -##ego -coasts -##utz -sacrificed -##chule -landowner -mutually -prevalence -programmer -adolescent -disrupted -seaside -gee -trusts -vamp -georgie -##nesian -##iol -schedules -sindh -##market -etched -hm -sparse -bey -beaux -scratching -gliding -unidentified -collaborating -gems -jesuits -oro -accumulation -shaping -mbe -anal -##xin -enthusiasts -newscast -##egan -janata -dewey -parkinson -ankara -biennial -towering -inconsistent -##chet -thriving -terminate -cabins -furiously -eats -advocating -donkey -marley -muster -phyllis -leiden -##user -grassland -glittering -iucn -loneliness -memorandum -armenians -##ddle -popularized -rhodesia -60s -lame -##illon -sans -bikini -header -orbits -##finger -##ulator -sharif -spines -biotechnology -strolled -naughty -yates -##wire -fremantle -milo -##mour -abducted -removes -##atin -humming -##chrome -##ester -hume -pivotal -##rates -armand -grams -believers -elector -rte -apron -bis -scraped -##yria -endorsement -initials -##llation -dotted -hints -buzzing -emigration -nearer -indicators -##ulu -coarse -neutron -protectorate -##uze -directional -exploits -pains -loire -1830s -proponents -guggenheim -rabbits -ritchie -hectare -inputs -hutton -##raz -verify -##ako -boilers -longitude -##lev -skeletal -yer -emilia -citrus -compromised -##gau -prescription -paragraph -eduard -cadillac -attire -categorized -kenyan -weddings -charley -##bourg -entertain -monmouth -##lles -nutrients -davey -mesh -incentive -practised -ecosystems -kemp -subdued -overheard -##rya -bodily -maxim -##nius -apprenticeship -ursula -##fight -lodged -rug -silesian -unconstitutional -patel -inspected -coyote -unbeaten -##hak -34th -disruption -convict -parcel -##nham -collier -implicated -mallory -##iac -susannah -winkler -##rber -shia -phelps -sediments -graphical -robotic -##sner -adulthood -mart -smoked -##isto -kathryn -clarified -##aran -divides -convictions -oppression -pausing -burying -##mt -federico -mathias -eileen -##tana -kite -hunched -##acies -##atz -disadvantage -liza -kinetic -greedy -paradox -yokohama -dowager -trunks -ventured -##gement -gupta -vilnius -olaf -##thest -crimean -hopper -##ej -progressively -arturo -mouthed -arrondissement -##fusion -rubin -simulcast -oceania -##orum -##stra -##rred -busiest -intensely -navigator -cary -##vine -##hini -##bies -fife -rowe -rowland -posing -insurgents -shafts -lawsuits -activate -conor -inward -culturally -garlic -##eering -eclectic -##hui -##kee -##nl -furrowed -vargas -meteorological -rendezvous -##aus -culinary -commencement -##dition -quota -##notes -mommy -salaries -overlapping -mule -##iology -##mology -sums -wentworth -##isk -##zione -mainline -subgroup -##illy -hack -plaintiff -verdi -bulb -differentiation -engagements -multinational -supplemented -bertrand -caller -regis -##naire -##sler -##arts -##imated -blossom -propagation -kilometer -viaduct -vineyards -##uate -beckett -optimization -golfer -songwriters -seminal -semitic -thud -volatile -evolving -ridley -##wley -trivial -distributions -scandinavia -jiang -wrestled -insistence -emphasizes -napkin -##ods -adjunct -rhyme -##ricted -##eti -hopeless -surrounds -tremble -32nd -smoky -##ntly -oils -medicinal -padded -steer -wilkes -concessions -hue -uniquely -blinded -landon -##lane -hendrix -commemorating -dex -specify -chicks -##ggio -intercity -morley -##torm -highlighting -##oting -pang -oblique -stalled -##liner -flirting -newborn -1769 -bishopric -shaved -currie -dharma -spartan -##ooped -favorites -smug -novella -sirens -abusive -creations -espana -##lage -paradigm -semiconductor -sheen -##rdo -##yen -##zak -nrl -renew -##pose -##tur -adjutant -marches -norma -##enity -ineffective -weimar -grunt -##gat -lordship -plotting -expenditure -infringement -lbs -refrain -mimi -mistakenly -postmaster -1771 -##bara -ras -motorsports -tito -subjective -##zza -bully -stew -##kaya -prescott -##raphic -##zam -bids -styling -paranormal -reeve -sneaking -exploding -katz -akbar -migrant -syllables -indefinitely -##ogical -destroys -replaces -applause -##phine -pest -##fide -articulated -bertie -##cars -##ptic -courtroom -crowley -aesthetics -cummings -tehsil -hormones -titanic -dangerously -##ibe -stadion -jaenelle -auguste -ciudad -##chu -mysore -partisans -lucan -philipp -##aly -debating -henley -interiors -##rano -##tious -homecoming -beyonce -usher -henrietta -prepares -weeds -ely -plucked -##pire -##dable -luxurious -##aq -artifact -password -pasture -juno -maddy -minsk -##dder -##ologies -##rone -assessments -martian -royalist -1765 -examines -##mani -nino -parry -scooped -relativity -##eli -##uting -##cao -congregational -noisy -traverse -##agawa -strikeouts -nickelodeon -obituary -transylvania -binds -depictions -polk -trolley -##yed -##lard -breeders -##under -dryly -hokkaido -1762 -strengths -stacks -bonaparte -neared -prostitutes -stamped -anaheim -gutierrez -sinai -##zzling -bram -fresno -madhya -proton -##lena -##llum -##phon -reelected -wanda -##anus -##lb -ample -distinguishing -##yler -grasping -sermons -tomato -bland -stimulation -avenues -##eux -spreads -scarlett -fern -pentagon -assert -baird -chesapeake -calmed -distortion -fatalities -##olis -correctional -pricing -##astic -##gina -prom -dammit -ying -collaborate -##chia -welterweight -33rd -pointer -substitution -bonded -umpire -communicating -multitude -paddle -##obe -federally -intimacy -##insky -betray -ssr -##lett -##lves -##therapy -airbus -##tery -functioned -ud -bearer -biomedical -##hire -##nca -condom -brink -ik -##nical -macy -flap -gma -experimented -jelly -lavender -##icles -##ulia -munro -##mian -##tial -rye -##rle -60th -gigs -hottest -rotated -predictions -fuji -bu -##erence -##omi -barangay -##fulness -##sas -clocks -##rwood -##liness -cereal -roe -wight -decker -uttered -babu -onion -forcibly -##df -petra -sarcasm -hartley -peeled -storytelling -##xley -##ysis -##ffa -fibre -kiel -auditor -fig -harald -greenville -##berries -geographically -nell -quartz -##athic -cemeteries -crossings -nah -holloway -reptiles -chun -sichuan -snowy -corrections -##ivo -zheng -ambassadors -blacksmith -fielded -fluids -hardcover -turnover -medications -melvin -academies -##erton -roach -absorbing -spaniards -colton -##founded -outsider -espionage -kelsey -edible -##ulf -dora -establishes -##sham -##tries -contracting -##tania -cinematic -costello -nesting -##uron -connolly -duff -##nology -mma -##mata -fergus -sexes -optics -spectator -woodstock -banning -##hee -##fle -differentiate -outfielder -refinery -gerhard -horde -lair -drastically -##udi -landfall -##cheng -motorsport -odi -##achi -predominant -quay -skins -##ental -edna -harshly -complementary -murdering -##aves -wreckage -ono -outstretched -lennox -munitions -galen -reconcile -scalp -bicycles -gillespie -questionable -rosenberg -guillermo -jarvis -kabul -opium -yd -##twined -abuses -decca -outpost -##cino -sensible -neutrality -ponce -anchorage -atkins -turrets -inadvertently -disagree -libre -vodka -reassuring -weighs -##yal -glide -jumper -ceilings -repertory -outs -stain -##bial -envy -##ucible -smashing -heightened -policing -hyun -mixes -lai -prima -##ples -celeste -##bina -lucrative -intervened -kc -manually -##rned -stature -staffed -bun -bastards -nairobi -priced -##auer -thatcher -##kia -tripped -comune -##ogan -##pled -brasil -incentives -emanuel -hereford -musica -##kim -benedictine -biennale -##lani -eureka -gardiner -rb -knocks -sha -##ael -##elled -##onate -efficacy -ventura -masonic -sanford -maize -leverage -##feit -capacities -santana -##aur -novelty -vanilla -##cter -##tour -benin -##oir -neptune -drafting -tallinn -##cable -humiliation -##boarding -schleswig -fabian -bernardo -liturgy -spectacle -sweeney -pont -routledge -cosmos -ut -hilt -sleek -universally -##eville -##gawa -typed -##dry -favors -allegheny -glaciers -##rly -recalling -aziz -parasite -requiem -auf -##berto -##llin -illumination -##breaker -##issa -festivities -bows -govern -vibe -vp -sprawled -larson -pilgrim -bwf -leaping -##rts -##ssel -alexei -greyhound -hoarse -##dler -##oration -seneca -##cule -gaping -##ulously -##pura -cinnamon -##gens -##rricular -craven -fantasies -houghton -engined -reigned -dictator -supervising -##oris -bogota -commentaries -unnatural -fingernails -spirituality -tighten -canadiens -protesting -intentional -cheers -sparta -##ytic -##iere -##zine -widen -belgarath -controllers -dodd -iaaf -navarre -##ication -defect -squire -steiner -whisky -##mins -inevitably -tome -##gold -chew -##lid -elastic -##aby -streaked -alliances -jailed -regal -##ined -##phy -czechoslovak -narration -absently -##uld -bluegrass -guangdong -quran -criticizing -hose -hari -##liest -##owa -skier -streaks -deploy -##lom -raft -bose -dialed -huff -##eira -haifa -simplest -bursting -endings -sultanate -##titled -franks -whitman -ensures -sven -##ggs -collaborators -forster -organising -banished -napier -injustice -teller -layered -thump -##otti -roc -battleships -evidenced -fugitive -sadie -robotics -##roud -equatorial -geologist -##iza -yielding -##bron -##sr -internationale -mecca -##diment -skyline -toad -uploaded -reflective -undrafted -lal -leafs -bayern -##dai -lakshmi -shortlisted -##stick -##wicz -camouflage -donate -christi -lau -##acio -disclosed -nemesis -1761 -assemble -straining -northamptonshire -tal -##asi -bernardino -premature -heidi -42nd -coefficients -galactic -reproduce -buzzed -sensations -zionist -monsieur -myrtle -archery -strangled -musically -viewpoint -antiquities -bei -trailers -seahawks -cured -pee -preferring -tasmanian -lange -sul -##working -colder -overland -lucivar -massey -gatherings -haitian -##smith -disapproval -flaws -##cco -##enbach -1766 -npr -##icular -boroughs -creole -forums -techno -1755 -dent -abdominal -streetcar -##eson -##stream -procurement -gemini -predictable -##tya -acheron -christoph -feeder -fronts -vendor -bernhard -jammu -tumors -slang -##uber -goaltender -twists -curving -manson -vuelta -mer -peanut -confessions -pouch -unpredictable -allowance -theodor -vascular -##factory -bala -authenticity -metabolic -coughing -nanjing -##cea -pembroke -##bard -splendid -36th -hourly -##ahu -elmer -handel -##ivate -awarding -thrusting -experimentation -##hesion -caressed -entertained -steak -##rangle -biologist -orphans -baroness -oyster -stepfather -##dridge -mirage -reefs -speeding -barons -1764 -inhabit -preached -repealed -##tral -honoring -boogie -captives -administer -johanna -##imate -gel -suspiciously -1767 -sobs -##dington -backbone -hayward -garry -##folding -##nesia -maxi -##oof -##ppe -ellison -galileo -##stand -crimea -frenzy -amour -bumper -matrices -natalia -baking -garth -palestinians -##grove -smack -conveyed -ensembles -gardening -##manship -##rup -##stituting -1640 -harvesting -topography -shifters -dormitory -##carriage -##lston -ist -skulls -##stadt -dolores -jewellery -sarawak -##wai -##zier -fences -christy -confinement -tumbling -credibility -fir -stench -##bria -##plication -##nged -##sam -virtues -##belt -marjorie -pba -##eem -##made -celebrates -schooner -agitated -barley -fulfilling -anthropologist -restrict -novi -regulating -##nent -padres -##rani -##hesive -loyola -tabitha -milky -olson -proprietor -crambidae -guarantees -intercollegiate -ljubljana -hilda -##sko -ignorant -hooded -sardinia -##lidae -##vation -frontman -privileged -witchcraft -jammed -laude -poking -##than -bracket -amazement -yunnan -##erus -maharaja -linnaeus -commissioning -milano -peacefully -##logies -akira -rani -regulator -grasses -##rance -luzon -crows -compiler -gretchen -seaman -edouard -buccaneers -ellington -hamlets -whig -socialists -##anto -directorial -easton -mythological -##kr -##vary -rhineland -semantic -taut -dune -inventions -succeeds -##iter -replication -branched -##pired -prosecuted -kangaroo -penetrated -##avian -middlesbrough -doses -bleak -madam -predatory -relentless -##vili -reluctance -##vir -hailey -crore -silvery -1759 -monstrous -swimmers -transmissions -hawthorn -informing -##eral -toilets -caracas -crouch -##sett -cartel -hadley -##aling -alexia -yvonne -##biology -cinderella -eton -superb -blizzard -stabbing -industrialist -maximus -##orus -groves -maud -clade -oversized -comedic -##bella -rosen -nomadic -fulham -montane -beverages -galaxies -redundant -swarm -##rot -##folia -##llis -buckinghamshire -fen -bearings -bahadur -##rom -gilles -phased -dynamite -faber -benoit -##ount -fractured -tailored -anya -spices -westwood -cairns -auditions -inflammation -steamed -##rocity -##acion -##urne -skyla -thereof -watford -torment -archdeacon -transforms -demeanor -fucked -serge -##sor -mckenna -minas -entertainer -##icide -caress -originate -residue -##sty -1740 -##ilised -##org -beech -##wana -subsidies -##ghton -emptied -gladstone -firefighters -voodoo -het -nightingale -tamara -edmond -ingredient -weaknesses -silhouette -compatibility -withdrawing -hampson -##mona -anguish -giggling -bookstore -southernmost -tilting -##vance -bai -economical -briefcase -dreadful -hinted -projections -shattering -totaling -##rogate -analogue -indicted -periodical -fullback -##dman -haynes -##tenberg -##ffs -##ishment -1745 -thirst -stumble -penang -vigorous -##ddling -##kor -##lium -octave -##ove -##enstein -##inen -##ones -siberian -##uti -cbn -repeal -swaying -##vington -khalid -tanaka -unicorn -otago -plastered -lobe -riddle -##rella -perch -##ishing -croydon -filtered -graeme -tripoli -##ossa -crocodile -##chers -sufi -mined -##tung -inferno -lsu -##phi -swelled -utilizes -£2 -cale -periodicals -styx -hike -informally -coop -lund -##tidae -ala -hen -qui -transformations -disposed -sheath -chickens -##cade -fitzroy -silesia -unacceptable -odisha -1650 -sabrina -spokane -ratios -athena -massage -shen -dilemma -##drum -##riz -##hul -corona -doubtful -niall -##pha -##bino -fines -cite -acknowledging -bangor -ballard -bathurst -##resh -huron -mustered -alzheimer -garments -kinase -tyre -warship -flashback -pulmonary -braun -cheat -kamal -cyclists -constructions -grenades -ndp -traveller -excuses -stomped -signalling -trimmed -futsal -mosques -relevance -##wine -wta -##vah -hoc -##riding -optimistic -##´s -deco -interacting -rejecting -moniker -waterways -##ieri -##oku -mayors -gdansk -outnumbered -pearls -##ended -##hampton -fairs -totals -dominating -notions -stairway -compiling -pursed -commodities -grease -yeast -##jong -carthage -griffiths -residual -amc -contraction -laird -sapphire -##marine -##ivated -amalgamation -dissolve -inclination -lyle -packaged -altitudes -suez -canons -graded -lurched -narrowing -boasts -guise -enrico -##ovsky -rower -scarred -bree -cub -iberian -protagonists -bargaining -proposing -trainers -voyages -fishes -##aea -##ivist -##verance -encryption -artworks -kazan -sabre -cleopatra -hepburn -rotting -supremacy -mecklenburg -##brate -burrows -hazards -outgoing -flair -organizes -##ctions -scorpion -##usions -boo -chevalier -dunedin -slapping -ineligible -pensions -##omic -manufactures -emails -bismarck -weakening -blackish -ding -mcgee -quo -##rling -northernmost -manpower -greed -sampson -clicking -##ange -##horpe -##inations -##roving -torre -##eptive -##moral -symbolism -38th -asshole -meritorious -outfits -splashed -biographies -sprung -astros -##tale -filly -raoul -nw -tokugawa -linden -clubhouse -##apa -tracts -romano -##pio -putin -chained -dickson -gunshot -moe -gunn -rashid -##tails -zipper -##bas -##nea -contrasted -##ply -##udes -plum -pharaoh -##pile -aw -comedies -ingrid -sandwiches -subdivisions -mariana -kamen -hz -delaney -veto -herring -##words -possessive -outlines -##roup -siemens -stairwell -gallantry -messiah -palais -yells -zeppelin -bolivar -##cede -smackdown -mckinley -##mora -##yt -muted -geologic -finely -unitary -avatar -hamas -maynard -rees -bog -contrasting -##rut -liv -chico -disposition -##erate -becca -dmitry -yeshiva -narratives -##lva -##ulton -mercenary -sharpe -tempered -navigate -stealth -amassed -keynes -##lini -untouched -##rrie -havoc -lithium -##fighting -abyss -graf -southward -wolverine -balloons -implements -ngos -transitions -##icum -ambushed -concacaf -dormant -economists -##dim -costing -csi -rana -universite -boulders -verity -##llon -collin -mellon -misses -cypress -fluorescent -lifeless -spence -##ulla -crewe -shepard -pak -revelations -jolly -gibbons -paw -##dro -##quel -freeing -shack -fries -palatine -##hiko -accompaniment -cruising -recycled -##aver -erwin -sorting -synthesizers -dyke -realities -strides -enslaved -wetland -##ghan -competence -gunpowder -grassy -maroon -reactors -objection -##oms -carlson -gearbox -macintosh -radios -shelton -##sho -clergyman -prakash -mongols -trophies -oricon -stimuli -twenty20 -cantonese -cortes -mirrored -##saurus -bhp -cristina -melancholy -##lating -enjoyable -nuevo -##wny -downfall -schumacher -##ind -banging -lausanne -rumbled -paramilitary -reflex -ax -amplitude -migratory -##gall -##ups -midi -barnard -lastly -sherry -##nall -keystone -##kra -carleton -slippery -coloring -foe -socket -otter -##rgos -mats -##tose -consultants -bafta -bison -topping -primal -abandonment -transplant -atoll -hideous -mort -pained -reproduced -tae -howling -##turn -unlawful -billionaire -hotter -poised -lansing -##chang -dinamo -retro -messing -domesday -##mina -blitz -timed -##athing -##kley -ascending -gesturing -##izations -signaled -tis -chinatown -mermaid -savanna -jameson -##aint -catalina -##pet -##hers -cochrane -cy -chatting -##kus -alerted -computation -mused -noelle -majestic -mohawk -campo -octagonal -##sant -##hend -aspiring -##mart -comprehend -iona -paralyzed -shimmering -swindon -rhone -##eley -reputed -configurations -pitchfork -agitation -francais -gillian -lipstick -##ilo -outsiders -pontifical -resisting -bitterness -sewer -rockies -##edd -##ucher -misleading -1756 -exiting -galloway -##nging -risked -##heart -commemoration -schultz -##rka -integrating -##rsa -poses -shrieked -##weiler -guineas -gladys -jerking -owls -goldsmith -nightly -penetrating -##unced -lia -ignited -betsy -##aring -##thorpe -follower -vigorously -##rave -coded -kiran -knit -zoology -tbilisi -##bered -repository -govt -deciduous -dino -growling -##bba -enhancement -unleashed -chanting -pussy -biochemistry -##eric -kettle -repression -toxicity -nrhp -##arth -##kko -##bush -ernesto -commended -outspoken -mca -parchment -kristen -##aton -bisexual -raked -glamour -navajo -conditioned -showcased -##hma -spacious -youthful -##esa -usl -appliances -junta -brest -layne -conglomerate -enchanted -chao -loosened -picasso -circulating -inspect -montevideo -##centric -##kti -piazza -spurred -##aith -bari -freedoms -poultry -stamford -lieu -indigo -sarcastic -bahia -stump -attach -dvds -frankenstein -lille -approx -scriptures -pollen -##script -nmi -overseen -##ivism -tides -proponent -newmarket -inherit -milling -##erland -centralized -##rou -distributors -credentials -drawers -abbreviation -##lco -downing -uncomfortably -ripe -##oes -erase -franchises -populace -##bery -##khar -decomposition -pleas -##tet -daryl -sabah -##wide -fearless -genie -lesions -annette -##ogist -oboe -appendix -nair -dripped -petitioned -maclean -mosquito -parrot -hampered -1648 -operatic -reservoirs -##tham -irrelevant -jolt -summarized -##fp -medallion -##taff -clawed -harlow -narrower -goddard -marcia -bodied -fremont -suarez -altering -tempest -mussolini -porn -##isms -sweetly -oversees -walkers -solitude -grimly -shrines -ich -supervisors -hostess -dietrich -legitimacy -brushes -expressive -##yp -dissipated -##rse -localized -systemic -##nikov -gettysburg -##uaries -dialogues -muttering -housekeeper -sicilian -discouraged -##frey -beamed -kaladin -halftime -kidnap -##amo -##llet -1754 -synonymous -depleted -instituto -insulin -reprised -##opsis -clashed -##ctric -interrupting -radcliffe -insisting -medici -1715 -ejected -playfully -turbulent -starvation -##rini -shipment -rebellious -petersen -verification -merits -##rified -cakes -##charged -1757 -milford -shortages -spying -fidelity -##aker -emitted -storylines -harvested -seismic -##iform -cheung -kilda -theoretically -barbie -lynx -##rgy -##tius -goblin -mata -poisonous -##nburg -reactive -residues -obedience -##евич -conjecture -##rac -hating -sixties -kicker -moaning -motown -##bha -emancipation -neoclassical -##hering -consoles -ebert -professorship -##tures -sustaining -assaults -obeyed -affluent -incurred -tornadoes -##eber -##zow -emphasizing -highlanders -cheated -helmets -##ctus -internship -terence -bony -executions -legislators -berries -peninsular -tinged -##aco -1689 -amplifier -corvette -ribbons -lavish -pennant -##lander -worthless -##chfield -##forms -mariano -pyrenees -expenditures -##icides -chesterfield -mandir -tailor -39th -sergey -nestled -willed -aristocracy -devotees -goodnight -raaf -rumored -weaponry -remy -appropriations -harcourt -burr -riaa -##lence -limitation -unnoticed -guo -soaking -swamps -##tica -collapsing -tatiana -descriptive -brigham -psalm -##chment -maddox -##lization -patti -caliph -##aja -akron -injuring -serra -##ganj -basins -##sari -astonished -launcher -##church -hilary -wilkins -sewing -##sf -stinging -##fia -##ncia -underwood -startup -compilations -vibrations -embankment -jurist -bard -juventus -groundwater -kern -palaces -helium -boca -cramped -marissa -soto -##worm -jae -princely -##ggy -faso -bazaar -warmly -##voking -pairing -##lite -##grate -##nets -wien -freaked -ulysses -rebirth -##alia -mummy -guzman -jimenez -stilled -##nitz -trajectory -tha -woken -archival -professions -##pts -##pta -hilly -shadowy -shrink -##bolt -norwood -glued -migrate -stereotypes -devoid -##pheus -evacuate -horrors -infancy -gotham -knowles -optic -downloaded -sachs -kingsley -parramatta -darryl -mor -##onale -shady -commence -confesses -kan -##meter -##placed -marlborough -roundabout -regents -frigates -##imating -gothenburg -revoked -carvings -clockwise -convertible -intruder -##sche -banged -##ogo -vicky -bourgeois -##mony -dupont -footing -##gum -##real -buckle -yun -penthouse -sane -serviced -stakeholders -neumann -##eers -comb -##gam -catchment -pinning -rallies -typing -##elles -forefront -freiburg -sweetie -giacomo -widowed -goodwill -worshipped -aspirations -midday -##vat -fishery -##trick -bournemouth -turk -hearth -ethanol -guadalajara -murmurs -sl -##uge -afforded -scripted -##hta -wah -##jn -coroner -translucent -memorials -puck -progresses -clumsy -##race -candace -recounted -##slin -##uve -filtering -##mac -howl -strata -heron -leveled -##ays -dubious -##oja -##wheel -citations -exhibiting -##laya -##mics -turkic -##lberg -injunction -##ennial -antibodies -organise -##rigues -cardiovascular -cushion -inverness -##zquez -dia -cocoa -sibling -##tman -##roid -expanse -feasible -tunisian -algiers -##relli -rus -dso -westphalia -bro -tacoma -downloads -##ours -konrad -duran -##hdi -continuum -jett -compares -legislator -secession -##nable -##gues -##zuka -translating -reacher -##gley -##ła -aleppo -##agi -orchards -trapping -linguist -versatile -drumming -postage -calhoun -superiors -##mx -barefoot -leary -##cis -ignacio -alfa -kaplan -##rogen -bratislava -mori -##vot -disturb -haas -cartridges -gilmore -radiated -salford -tunic -hades -##ulsive -archeological -delilah -magistrates -auditioned -brewster -charters -empowerment -blogs -cappella -dynasties -iroquois -whipping -##krishna -raceway -truths -myra -weaken -judah -mcgregor -##horse -mic -refueling -37th -burnley -bosses -markus -premio -query -##gga -dunbar -##economic -darkest -lyndon -sealing -commendation -reappeared -##mun -addicted -ezio -slaughtered -satisfactory -shuffle -##eves -##thic -##uj -fortification -warrington -##otto -resurrected -fargo -mane -##utable -##lei -foreword -ox -##aris -##vern -abrams -hua -##mento -sakura -##alo -sentimental -##skaya -midfield -##eses -sturdy -scrolls -macleod -##kyu -entropy -##lance -mitochondrial -cicero -excelled -thinner -convoys -perceive -##oslav -##urable -systematically -grind -burkina -##tagram -ops -##aman -guantanamo -##cloth -##tite -forcefully -wavy -##jou -pointless -##linger -##tze -layton -portico -superficial -clerical -outlaws -##hism -burials -muir -##inn -creditors -hauling -rattle -##leg -calais -monde -archers -reclaimed -dwell -wexford -hellenic -falsely -remorse -##tek -dough -furnishings -##uttered -gabon -neurological -novice -##igraphy -contemplated -pulpit -nightstand -saratoga -##istan -documenting -pulsing -taluk -##firmed -busted -marital -##rien -disagreements -wasps -##yes -hodge -mcdonnell -mimic -fran -pendant -dhabi -musa -##nington -congratulations -argent -darrell -concussion -losers -regrets -thessaloniki -reversal -donaldson -hardwood -thence -achilles -ritter -##eran -demonic -jurgen -prophets -goethe -eki -classmate -##cking -yank -irrational -##inging -perished -seductive -qur -sourced -##crat -##typic -mustard -ravine -barre -horizontally -characterization -phylogenetic -boise -##dit -##runner -##tower -brutally -intercourse -seduce -##bbing -fay -ferris -ogden -amar -nik -unarmed -##inator -evaluating -kyrgyzstan -sweetness -##lford -##oki -mccormick -meiji -notoriety -stimulate -disrupt -figuring -instructional -mcgrath -##zoo -groundbreaking -##lto -flinch -khorasan -agrarian -bengals -mixer -radiating -##sov -ingram -pitchers -nad -tariff -##cript -tata -##codes -##emi -##ungen -appellate -lehigh -##bled -##giri -brawl -duct -texans -##ciation -##ropolis -skipper -speculative -vomit -doctrines -stresses -davy -graders -whitehead -jozef -timely -cumulative -haryana -paints -appropriately -boon -cactus -##ales -##pid -dow -legions -##pit -perceptions -1730 -picturesque -##yse -periphery -rune -wr -##aha -celtics -sentencing -whoa -##erin -confirms -variance -moines -mathews -spade -rave -fronted -blending -alleging -reared -##paper -grassroots -eroded -##physical -directs -ordeal -##sław -accelerate -hacker -rooftop -##inia -lev -buys -cebu -devote -##lce -specialising -##ulsion -choreographed -repetition -warehouses -##ryl -paisley -tuscany -analogy -sorcerer -hash -huts -shards -descends -exclude -nix -chaplin -ito -vane -##drich -causeway -misconduct -limo -orchestrated -glands -jana -##kot -u2 -##sons -branching -contrasts -scoop -longed -##virus -chattanooga -syrup -cornerstone -##tized -##mind -##iaceae -careless -precedence -frescoes -##uet -chilled -consult -modelled -snatch -peat -##thermal -caucasian -humane -relaxation -spins -temperance -##lbert -occupations -lambda -hybrids -moons -##oese -rolf -societal -yerevan -ness -##ssler -befriended -mechanized -nominate -trough -boasted -cues -seater -##hom -bends -##tangle -conductors -emptiness -eurasian -adriatic -tian -##cie -anxiously -lark -propellers -chichester -jock -##holding -credible -recounts -tori -loyalist -abduction -##hoot -##redo -nepali -##mite -ventral -tempting -##ango -##crats -steered -##wice -javelin -dipping -laborers -prentice -looming -titanium -badges -emir -tensor -##ntation -egyptians -rash -denies -hawthorne -lombard -showers -wehrmacht -dietary -trojan -##reus -welles -executing -horseshoe -lifeboat -##lak -elsa -infirmary -nearing -roberta -boyer -mutter -trillion -joanne -##fine -##oked -sinks -vortex -uruguayan -clasp -sirius -##block -accelerator -prohibit -sunken -byu -chronological -diplomats -ochreous -symmetrical -1644 -maia -##tology -salts -reigns -atrocities -##ия -hess -bared -issn -##vyn -cater -saturated -##cycle -##isse -sable -voyager -dyer -yusuf -##inge -fountains -wolff -##nni -engraving -rollins -atheist -ominous -##ault -herr -chariot -martina -strung -##fell -##farlane -horrific -sahib -gazes -saetan -erased -ptolemy -##olic -flushing -lauderdale -analytic -##ices -navarro -beak -gorilla -herrera -broom -guadalupe -raiding -sykes -bsc -deliveries -1720 -invasions -carmichael -tajikistan -thematic -ecumenical -sentiments -onstage -##rians -##brand -##sume -catastrophic -flanks -molten -##arns -waller -aimee -terminating -##icing -alternately -##oche -nehru -printers -outraged -##eving -empires -template -banners -repetitive -za -##oise -vegetarian -##tell -guiana -opt -cavendish -lucknow -synthesized -##hani -##mada -finalized -##ctable -fictitious -mayoral -unreliable -##enham -embracing -peppers -rbis -##chio -##neo -inhibition -slashed -togo -orderly -embroidered -salty -barron -benito -totaled -##dak -pubs -simulated -caden -devin -tolkien -momma -welding -sesame -##ept -gottingen -hardness -shaman -temeraire -adequately -pediatric -assertion -radicals -composure -cadence -seafood -beaufort -lazarus -mani -warily -cunning -kurdistan -cantata -##kir -ares -##clusive -nape -townland -geared -insulted -flutter -boating -violate -draper -dumping -malmo -##hh -##romatic -firearm -alta -bono -obscured -##clave -exceeds -panorama -unbelievable -##train -preschool -##essed -disconnected -installing -rescuing -secretaries -accessibility -##castle -##ifice -##film -bouts -slug -waterway -mindanao -##buro -##ratic -halves -calming -liter -maternity -adorable -bragg -electrification -mcc -##dote -roxy -schizophrenia -munoz -kaye -whaling -mil -tingling -tolerant -##ago -unconventional -volcanoes -##finder -deportivo -##llie -robson -kaufman -neuroscience -wai -deportation -masovian -scraping -converse -##bh -hacking -bulge -##oun -administratively -yao -mammoth -booster -claremont -hooper -nomenclature -pursuits -mclaughlin -melinda -##sul -catfish -barclay -substrates -taxa -zee -kimberly -packets -padma -##ality -borrowing -ostensibly -solvent -##bri -##genesis -##mist -lukas -shreveport -veracruz -##lou -##wives -cheney -anatolia -hobbs -##zyn -cyclic -radiant -alistair -greenish -siena -dat -independents -##bation -conform -pieter -hyper -applicant -bradshaw -spores -telangana -vinci -inexpensive -nuclei -jang -nme -spd -cradled -receptionist -pow -##rika -fascism -##ifer -experimenting -##ading -##iec -##region -jocelyn -maris -stair -nocturnal -toro -constabulary -elgin -##kker -msc -##giving -##schen -##rase -doherty -doping -sarcastically -batter -maneuvers -##cano -##apple -##gai -##git -intrinsic -##nst -##stor -1753 -showtime -cafes -gasps -lviv -ushered -##thed -fours -restart -astonishment -transmitting -flyer -shrugs -##sau -intriguing -cones -dictated -mushrooms -medial -##kovsky -##elman -escorting -gaped -godfather -##door -##sell -djs -recaptured -timetable -vila -1710 -aerodrome -mortals -scientology -##orne -angelina -mag -convection -unpaid -insertion -intermittent -lego -##nated -endeavor -kota -pereira -##lz -bwv -glamorgan -insults -agatha -fey -##cend -fleetwood -mahogany -protruding -steamship -zeta -##arty -mcguire -suspense -##sphere -advising -urges -##wala -hurriedly -meteor -gilded -inline -arroyo -stalker -##oge -excitedly -revered -##cure -earle -introductory -##break -##ilde -mutants -puff -pulses -reinforcement -##haling -curses -lizards -stalk -correlated -##fixed -fallout -macquarie -##unas -bearded -denton -heaving -##ocation -winery -assign -dortmund -##lkirk -everest -invariant -charismatic -susie -##elling -bled -lesley -telegram -sumner -bk -##ogen -wilcox -needy -colbert -duval -##iferous -##mbled -allotted -attends -imperative -##hita -replacements -hawker -##inda -insurgency -##zee -##eke -casts -##yla -ives -transitioned -##pack -##powering -authoritative -baylor -flex -cringed -plaintiffs -woodrow -##skie -drastic -ape -aroma -unfolded -commotion -preoccupied -theta -routines -lasers -privatization -wand -domino -ek -clenching -nsa -strategically -showered -bile -handkerchief -pere -storing -christophe -insulting -nakamura -romani -asiatic -magdalena -palma -cruises -stripping -konstantin -soaring -##berman -colloquially -forerunner -havilland -incarcerated -parasites -sincerity -##utus -disks -plank -saigon -##ining -corbin -homo -ornaments -powerhouse -##tlement -chong -fastened -feasibility -idf -morphological -usable -##nish -##zuki -aqueduct -jaguars -keepers -##flies -aleksandr -faust -assigns -ewing -bacterium -hurled -tricky -hungarians -integers -wallis -yamaha -##isha -hushed -oblivion -aviator -evangelist -friars -##eller -monograph -ode -##nary -airplanes -labourers -charms -##nee -1661 -hagen -tnt -rudder -fiesta -transcript -dorothea -ska -inhibitor -maccabi -retorted -raining -encompassed -clauses -menacing -1642 -lineman -##gist -vamps -##dick -gloom -##rera -dealings -easing -seekers -##nut -##pment -helens -unmanned -##anu -##isson -basics -##amy -##ckman -adjustments -1688 -brutality -horne -##zell -##mable -aggregator -##thal -rhino -##drick -##vira -counters -##rting -mn -montenegrin -packard -##unciation -##♭ -##kki -reclaim -scholastic -thugs -pulsed -##icia -syriac -quan -saddam -banda -kobe -blaming -buddies -dissent -##lusion -##usia -corbett -jaya -delle -erratic -lexie -##hesis -amiga -hermes -##pressing -##leen -chapels -gospels -jamal -##uating -compute -revolving -warp -##sso -##thes -armory -##eras -##gol -antrim -loki -##kow -##asian -##good -##zano -braid -handwriting -subdistrict -funky -pantheon -##iculate -concurrency -estimation -improper -juliana -##his -newcomers -johnstone -staten -communicated -##oco -##alle -sausage -stormy -##stered -##tters -superfamily -##grade -acidic -collateral -tabloid -##oped -##rza -bladder -austen -##ellant -mcgraw -##hay -hannibal -mein -aquino -lucifer -wo -badger -boar -cher -christensen -greenberg -interruption -##kken -jem -mocked -bottoms -cambridgeshire -##lide -sprawling -##bbly -eastwood -ghent -synth -##buck -advisers -##bah -nominally -hapoel -qu -daggers -estranged -fabricated -towels -vinnie -wcw -misunderstanding -anglia -nothin -unmistakable -##dust -##lova -chilly -marquette -truss -##edge -##erine -reece -##lty -##chemist -##connected -41st -bash -raion -waterfalls -##ump -##main -labyrinth -queue -theorist -##istle -bharatiya -flexed -soundtracks -rooney -leftist -patrolling -wharton -plainly -alleviate -eastman -schuster -topographic -engages -immensely -unbearable -fairchild -1620 -dona -lurking -parisian -oliveira -ia -indictment -hahn -bangladeshi -##aster -##uming -##ential -antonia -expects -indoors -kildare -harlan -##logue -##ogenic -##sities -forgiven -##wat -childish -tavi -##mide -##orra -plausible -grimm -successively -scooted -##bola -##rith -spartans -emery -flatly -epilogue -##wark -flourish -##iny -##tracted -##overs -##oshi -bestseller -distressed -receipt -spitting -hermit -topological -##cot -drilled -subunit -francs -##layer -eel -##fk -##itas -octopus -footprint -petitions -##say -##foil -interfering -leaking -palo -##metry -thistle -valiant -##pic -narayan -mcpherson -##fast -gonzales -##enne -dustin -novgorod -solos -##zman -doin -##patient -##meyer -soluble -ashland -cuffs -carole -pendleton -whistling -vassal -##river -deviation -revisited -constituents -rallied -rotate -loomed -##eil -##nting -amateurs -augsburg -auschwitz -crowns -skeletons -##cona -bonnet -dummy -globalization -simeon -sleeper -mandal -differentiated -##crow -##mare -milne -bundled -exasperated -talmud -owes -segregated -##feng -##uary -dentist -piracy -props -##rang -devlin -##torium -malicious -paws -##laid -dependency -##ergy -##fers -##enna -pistons -rourke -jed -grammatical -tres -maha -wig -ghostly -jayne -##achal -##creen -##ilis -##lins -designate -##with -arrogance -cambodian -clones -showdown -throttle -twain -##ception -lobes -metz -nagoya -braking -##furt -roaming -##minster -amin -crippled -##llary -indifferent -hoffmann -idols -intimidating -1751 -influenza -memo -onions -1748 -bandage -consciously -##landa -##rage -clandestine -observes -swiped -tangle -##ener -##jected -##trum -##bill -##lta -hugs -congresses -josiah -spirited -##dek -humanist -managerial -filmmaking -inmate -rhymes -debuting -grimsby -ur -##laze -duplicate -vigor -republished -bolshevik -refurbishment -antibiotics -martini -methane -newscasts -royale -horizons -levant -iain -visas -##ischen -paler -##around -manifestation -snuck -alf -chop -futile -pedestal -rehab -##kat -bmg -kerman -res -fairbanks -jarrett -abstraction -saharan -##zek -1746 -procedural -clearer -kincaid -sash -luciano -##ffey -crunch -helmut -##vara -revolutionaries -##tute -creamy -leach -##mmon -1747 -permitting -nes -plight -wendell -##lese -contra -clancy -ipa -mach -staples -autopsy -disturbances -nueva -karin -pontiac -##uding -proxy -venerable -haunt -leto -bergman -expands -##helm -wal -##pipe -canning -celine -cords -obesity -##enary -intrusion -planner -##phate -reasoned -sequencing -harrow -##chon -##dora -marred -mcintyre -repay -tarzan -darting -harrisburg -margarita -repulsed -##lding -belinda -hamburger -novo -compliant -runways -bingham -registrar -skyscraper -cuthbert -improvisation -livelihood -##corp -##elial -admiring -##dened -sporadic -believer -casablanca -popcorn -asha -shovel -##bek -##dice -coiled -tangible -##dez -casper -elsie -resin -tenderness -rectory -##ivision -avail -sonar -##mori -boutique -##dier -guerre -bathed -upbringing -vaulted -sandals -blessings -##naut -##utnant -1680 -foxes -pia -corrosion -hesitantly -confederates -crystalline -footprints -shapiro -tirana -valentin -drones -45th -microscope -shipments -texted -inquisition -wry -guernsey -unauthorized -resigning -ripple -schubert -stu -reassure -felony -##ardo -brittle -koreans -##havan -##ives -dun -implicit -tyres -##aldi -##lth -magnolia -##ehan -##puri -##poulos -aggressively -fei -gr -familiarity -##poo -indicative -##trust -fundamentally -jimmie -overrun -anchors -moans -##opus -britannia -armagh -purposely -seizing -##vao -bewildered -mundane -avoidance -cosmopolitan -geometridae -quartermaster -caf -chatter -engulfed -gleam -purge -##icate -juliette -jurisprudence -guerra -revisions -##bn -casimir -brew -##jm -1749 -clapton -cloudy -conde -hermitage -simulations -torches -vincenzo -matteo -##rill -hidalgo -booming -westbound -accomplishment -tentacles -unaffected -##sius -annabelle -flopped -sloping -##litz -dreamer -interceptor -vu -##loh -consecration -copying -messaging -breaker -climates -hospitalized -1752 -torino -afternoons -winfield -witnessing -##teacher -breakers -choirs -sawmill -coldly -##ege -sipping -haste -uninhabited -conical -bibliography -pamphlets -severn -edict -##oca -deux -illnesses -grips -rehearsals -sis -thinkers -tame -##keepers -1690 -acacia -reformer -##osed -##rys -shuffling -##iring -##shima -eastbound -ionic -rhea -flees -littered -##oum -rocker -vomiting -groaning -champ -overwhelmingly -civilizations -paces -sloop -adoptive -##tish -skaters -##vres -aiding -nikola -shriek -##ignon -pharmaceuticals -tuna -calvert -gustavo -stocked -yearbook -##urai -##mana -computed -subsp -riff -hanoi -kelvin -hamid -moors -pastures -summons -jihad -nectar -##ctors -bayou -untitled -pleasing -vastly -republics -intellect -##ulio -##tou -crumbling -stylistic -##ی -consolation -frequented -h₂o -walden -widows -##iens -##ignment -chunks -improves -grit -recited -##dev -snarl -sociological -##arte -##gul -inquired -##held -bruise -clube -consultancy -homogeneous -hornets -multiplication -pasta -prick -savior -##grin -##kou -##phile -yoon -##gara -grimes -vanishing -cheering -reacting -bn -distillery -##quisite -##vity -coe -dockyard -massif -##jord -escorts -voss -##valent -byte -chopped -hawke -illusions -workings -floats -##koto -##vac -kv -annapolis -madden -##onus -alvaro -noctuidae -##cum -##scopic -avenge -steamboat -forte -illustrates -erika -##trip -dew -nationalities -bran -manifested -thirsty -diversified -muscled -reborn -##standing -arson -##lessness -##dran -##logram -##boys -##kushima -##vious -willoughby -##phobia -alsace -dashboard -yuki -##chai -granville -myspace -publicized -tricked -##gang -adjective -##ater -relic -reorganisation -enthusiastically -indications -saxe -##lassified -consolidate -iec -padua -helplessly -ramps -renaming -regulars -pedestrians -accents -convicts -inaccurate -lowers -mana -##pati -barrie -bjp -outta -someplace -berwick -flanking -invoked -marrow -sparsely -excerpts -clothed -rei -##ginal -wept -##straße -##vish -##ptive -membranes -aquitaine -creeks -cutler -sheppard -implementations -##dur -fragrance -budge -concordia -magnesium -marcelo -##antes -gladly -vibrating -##rral -##ggles -montrose -##omba -lew -seamus -1630 -cocky -##ament -##uen -bjorn -##rrick -fielder -fluttering -##lase -methyl -kimberley -mcdowell -reductions -barbed -##jic -##tonic -aeronautical -condensed -distracting -##promising -huffed -##cala -##sle -claudius -invincible -missy -pious -balthazar -##lang -butte -combo -orson -##dication -myriad -1707 -silenced -##fed -##rh -netball -yourselves -##oza -clarify -heller -peg -durban -etudes -offender -roast -blackmail -curvature -##woods -vile -illicit -suriname -##linson -overture -1685 -bubbling -gymnast -tucking -##mming -##ouin -maldives -##bala -gurney -##dda -##eased -##oides -backside -pinto -jars -racehorse -tending -##rdial -baronetcy -wiener -duly -##rke -barbarian -cupping -flawed -##thesis -bertha -pleistocene -puddle -swearing -##nob -##tically -fleeting -prostate -amulet -educating -##mined -##tler -75th -jens -respondents -cavaliers -papacy -raju -##iente -##ulum -##tip -funnel -disneyland -##lley -sociologist -##iam -faulkner -louvre -menon -##dson -##ower -afterlife -mannheim -peptide -referees -comedians -meaningless -##anger -##laise -fabrics -hurley -renal -sleeps -##bour -##icle -breakout -kristin -roadside -animator -clover -disdain -unsafe -redesign -##urity -firth -barnsley -portage -reset -narrows -commandos -expansive -speechless -tubular -essendon -eyelashes -smashwords -##yad -##bang -##claim -craved -sprinted -chet -somme -astor -wrocław -orton -bane -##erving -##uing -mischief -##amps -##sund -scaling -terre -##xious -impairment -offenses -undermine -moi -soy -contiguous -arcadia -inuit -seam -##tops -macbeth -rebelled -##icative -##iot -elaborated -frs -uniformed -##dberg -powerless -priscilla -stimulated -qc -arboretum -frustrating -trieste -bullock -##nified -enriched -glistening -intern -##adia -locus -nouvelle -ollie -ike -lash -starboard -tapestry -headlined -hove -rigged -##vite -pollock -##yme -thrive -clustered -cas -roi -gleamed -olympiad -##lino -pressured -regimes -##hosis -##lick -ripley -##ophone -kickoff -gallon -rockwell -##arable -crusader -glue -revolutions -scrambling -1714 -grover -##jure -englishman -aztec -contemplating -coven -preach -triumphant -tufts -##esian -rotational -##phus -falkland -##brates -strewn -clarissa -rejoin -environmentally -glint -banded -drenched -moat -albanians -johor -rr -maestro -malley -nouveau -shaded -taxonomy -adhere -bunk -airfields -##ritan -1741 -encompass -remington -tran -##erative -amelie -mazda -friar -morals -passions -##zai -breadth -vis -##hae -argus -burnham -caressing -insider -rudd -##imov -##rso -italianate -murderous -textual -wainwright -armada -bam -weave -timer -##taken -##nh -fra -##crest -ardent -salazar -taps -tunis -##ntino -allegro -gland -philanthropic -##chester -implication -##optera -esq -judas -noticeably -wynn -##dara -inched -indexed -crises -villiers -bandit -royalties -patterned -cupboard -interspersed -accessory -isla -kendrick -entourage -stitches -##esthesia -headwaters -##ior -interlude -distraught -draught -1727 -##basket -biased -sy -transient -triad -subgenus -adapting -kidd -shortstop -##umatic -dimly -spiked -mcleod -reprint -nellie -pretoria -windmill -##cek -singled -##mps -reunite -##orous -bankers -outlying -##omp -##ports -##tream -apologies -cosmetics -patsy -##deh -##ocks -##yson -bender -nantes -serene -##nad -lucha -mmm -##cius -##gli -cmll -coinage -nestor -juarez -##rook -smeared -sprayed -twitching -sterile -irina -embodied -juveniles -enveloped -miscellaneous -cancers -dq -gulped -luisa -crested -swat -donegal -ref -##anov -##acker -hearst -mercantile -##lika -doorbell -vicki -##alla -##som -bilbao -psychologists -stryker -sw -horsemen -turkmenistan -wits -##national -anson -mathew -screenings -##umb -rihanna -##agne -##nessy -aisles -##iani -##osphere -hines -kenton -saskatoon -tasha -truncated -##champ -##itan -mildred -advises -fredrik -interpreting -inhibitors -##athi -spectroscopy -##hab -##kong -karim -panda -##oia -##nail -conqueror -kgb -leukemia -##dity -arrivals -cheered -pisa -phosphorus -shielded -##riated -mammal -unitarian -urgently -chopin -sanitary -##mission -spicy -drugged -hinges -##tort -tipping -trier -impoverished -westchester -##caster -epoch -nonstop -##gman -##khov -aromatic -centrally -cerro -##tively -##vio -billions -modulation -sedimentary -facilitating -outrageous -goldstein -##eak -##kt -ld -maitland -penultimate -pollard -##dance -fleets -spaceship -vertebrae -##nig -alcoholism -als -recital -##bham -##omics -##bm -trois -##tropical -commemorates -##meric -marge -##raction -1643 -cosmetic -ravaged -##ige -catastrophe -eng -##shida -albrecht -arterial -bellamy -decor -harmon -##rde -bulbs -synchronized -vito -easiest -shetland -shielding -wnba -##glers -##ssar -##riam -brianna -cumbria -##aceous -##rard -cores -thayer -##nsk -brood -hilltop -luminous -carts -keynote -larkin -logos -##cta -##mund -##quay -lilith -tinted -wrestle -mobilization -##uses -sequential -siam -bloomfield -takahashi -##ieving -presenters -ringo -blazed -witty -##oven -##ignant -devastation -haydn -harmed -newt -therese -##peed -gershwin -molina -rabbis -sudanese -innate -restarted -##sack -##fus -slices -wb -##shah -enroll -hypothetical -hysterical -1743 -fabio -indefinite -warped -exchanging -unsuitable -##sboro -gallo -1603 -bret -cobalt -homemade -##hunter -operatives -##dhar -terraces -durable -latch -pens -whorls -##ctuated -##eaux -billing -ligament -succumbed -##gly -regulators -spawn -##brick -##stead -filmfare -rochelle -##nzo -1725 -circumstance -saber -supplements -##nsky -##tson -crowe -wellesley -carrot -##9th -##movable -primate -drury -sincerely -topical -##mad -##rao -callahan -kyiv -smarter -tits -undo -##yeh -announcements -anthologies -barrio -nebula -##islaus -##shaft -##tyn -bodyguards -assassinate -barns -emmett -scully -##yd -##eland -##tino -##itarian -demoted -gorman -lashed -prized -adventist -writ -##gui -alla -invertebrates -##ausen -1641 -amman -1742 -align -healy -redistribution -##gf -##rize -insulation -##drop -adherents -hezbollah -vitro -ferns -yanking -registering -uppsala -cheerleading -confines -mischievous -tully -##ross -49th -docked -roam -stipulated -pumpkin -##bry -prompt -##ezer -blindly -shuddering -craftsmen -frail -scented -katharine -scramble -shaggy -sponge -helix -zaragoza -43rd -backlash -fontaine -seizures -posse -cowan -nonfiction -telenovela -wwii -hammered -undone -##gpur -encircled -irs -##ivation -artefacts -oneself -searing -smallpox -##belle -##osaurus -shandong -breached -upland -blushing -rankin -infinitely -psyche -tolerated -docking -evicted -##col -unmarked -##lving -gnome -lettering -litres -musique -##oint -benevolent -##jal -blackened -##anna -mccall -racers -tingle -##ocene -##orestation -introductions -radically -##hiff -##باد -1610 -1739 -munchen -plead -##nka -condo -scissors -##sight -##tens -apprehension -##cey -##yin -hallmark -watering -formulas -sequels -##llas -aggravated -bae -commencing -##building -enfield -prohibits -marne -vedic -civilized -euclidean -jagger -beforehand -blasts -dumont -##arney -##nem -conversions -hierarchical -rios -simulator -##dya -##lellan -hedges -oleg -thrusts -shadowed -darby -maximize -1744 -gregorian -##nded -##routed -sham -unspecified -##hog -emory -factual -##smo -fooled -##rger -ortega -wellness -marlon -##oton -##urance -casket -keating -ley -enclave -##ayan -char -influencing -jia -##chenko -ammonia -erebidae -incompatible -violins -cornered -##arat -grooves -astronauts -columbian -rampant -fabrication -kyushu -mahmud -vanish -##dern -mesopotamia -##lete -##rgen -caspian -kenji -pitted -##vered -grimace -roanoke -tchaikovsky -twinned -##analysis -##awan -xinjiang -arias -clemson -kazakh -sizable -1662 -##khand -##vard -plunge -tatum -vittorio -##nden -cholera -##dana -bracing -indifference -projectile -superliga -##chee -realises -upgrading -porte -retribution -##vies -nk -stil -##resses -ama -bureaucracy -blackberry -bosch -testosterone -collapses -greer -##pathic -ioc -fifties -malls -##erved -bao -baskets -adolescents -siegfried -##osity -##tosis -mantra -detecting -existent -fledgling -##cchi -dissatisfied -gan -telecommunication -mingled -sobbed -controversies -outdated -taxis -##raus -fright -slams -##lham -##fect -##tten -detectors -fetal -tanned -##uw -fray -goth -olympian -skipping -mandates -scratches -sheng -unspoken -hyundai -tracey -hotspur -restrictive -##buch -americana -mundo -##bari -burroughs -diva -vulcan -##6th -distinctions -thumping -##ngen -mikey -sheds -fide -rescues -springsteen -vested -valuation -##ece -##ely -pinnacle -rake -sylvie -##edo -almond -quivering -##irus -alteration -faltered -##wad -51st -hydra -ticked -##kato -recommends -##dicated -antigua -arjun -stagecoach -wilfred -trickle -pronouns -##pon -aryan -nighttime -##anian -gall -pea -stitch -##hei -leung -milos -##dini -eritrea -starved -snowfall -kant -parasitic -cot -discus -hana -strikers -appleton -kitchens -##erina -##partisan -##itha -##vius -disclose -metis -##channel -1701 -##vera -fitch -1735 -blooded -##tila -decimal -##tang -##bai -cyclones -eun -bottled -peas -pensacola -basha -bolivian -crabs -boil -lanterns -partridge -roofed -1645 -necks -##phila -opined -patting -##kla -##lland -chuckles -volta -whereupon -##nche -devout -euroleague -suicidal -##dee -inherently -involuntary -knitting -nasser -##hide -puppets -colourful -courageous -southend -stills -miraculous -hodgson -richer -rochdale -ethernet -greta -uniting -prism -umm -##haya -##itical -##utation -deterioration -pointe -prowess -##ropriation -lids -scranton -billings -subcontinent -##koff -##scope -brute -kellogg -psalms -degraded -##vez -stanisław -##ructured -ferreira -pun -astonishing -gunnar -##yat -arya -prc -gottfried -##tight -excursion -##ographer -dina -##quil -##nare -huffington -illustrious -wilbur -verandah -##zard -naacp -##odle -constructive -fjord -kade -##naud -generosity -thrilling -baseline -cayman -frankish -plastics -accommodations -zoological -##fting -cedric -qb -motorized -##dome -##otted -squealed -tackled -canucks -budgets -situ -asthma -dail -gabled -grasslands -whimpered -writhing -judgments -minnie -##carbon -bananas -grille -domes -monique -odin -maguire -markham -tierney -##estra -##chua -libel -poke -speedy -atrium -laval -notwithstanding -##edly -fai -kala -##sur -robb -##sma -listings -luz -supplementary -tianjin -##acing -enzo -jd -ric -scanner -croats -transcribed -arden -##hair -##raphy -##lver -seventies -staggering -alam -horticultural -hs -regression -timbers -blasting -##ounded -montagu -manipulating -##cit -catalytic -1550 -troopers -##meo -condemnation -fitzpatrick -##oire -##roved -inexperienced -1670 -castes -##lative -outing -dubois -flicking -quarrel -ste -learners -1625 -whistled -##class -classify -tariffs -temperament -folly -liszt -##yles -immersed -jordanian -ceasefire -apparel -extras -maru -fished -##bio -harta -stockport -assortment -craftsman -paralysis -transmitters -##cola -blindness -##wk -fatally -proficiency -solemnly -##orno -repairing -amore -groceries -ultraviolet -##chase -schoolhouse -##tua -resurgence -nailed -##otype -ruse -saliva -diagrams -##tructing -albans -rann -thirties -antennas -hilarious -cougars -paddington -stats -##eger -breakaway -reza -authorship -prohibiting -scoffed -##etz -##ttle -conscription -defected -trondheim -##fires -ivanov -keenan -##adan -##ciful -##fb -##slow -locating -##ials -##tford -cadiz -basalt -blankly -interned -rags -rattling -##tick -carpathian -reassured -bum -guildford -iss -staunch -##onga -astronomers -sera -sofie -emergencies -susquehanna -##heard -duc -mastery -vh1 -williamsburg -bayer -buckled -craving -##khan -##rdes -bloomington -##write -alton -barbecue -##bians -justine -##hri -##ndt -delightful -smartphone -newtown -photon -retrieval -peugeot -hissing -##monium -##orough -flavors -lighted -relaunched -tainted -##games -##lysis -anarchy -microscopic -hopping -adept -evade -evie -##beau -inhibit -sinn -adjustable -hurst -intuition -wilton -44th -lawful -lowlands -stockings -thierry -##dalen -##hila -##nai -fates -prank -maison -lobbied -provocative -1724 -utopia -##qual -carbonate -gujarati -purcell -##rford -curtiss -##mei -overgrown -arenas -mediation -swallows -##rnik -respectful -turnbull -##hedron -##hope -alyssa -ozone -##ʻi -ami -gestapo -johansson -snooker -canteen -cuff -declines -empathy -stigma -##ags -##raine -taxpayers -volga -##wright -##copic -lifespan -overcame -tattooed -enactment -giggles -##ador -##camp -barrington -bribe -obligatory -orbiting -peng -##enas -elusive -sucker -##vating -cong -hardship -empowered -anticipating -estrada -cryptic -greasy -detainees -planck -sudbury -plaid -dod -kayla -##ears -##vb -##zd -mortally -##hein -cognition -radha -liechtenstein -meade -richly -argyle -harpsichord -liberalism -trumpets -lauded -tyrant -salsa -tiled -lear -promoters -reused -slicing -trident -##chuk -##gami -##lka -cantor -checkpoint -##points -gaul -leger -mammalian -##tov -##aar -##schaft -doha -frenchman -nirvana -##vino -delgado -headlining -##eron -##iography -jug -tko -1649 -naga -intersections -benfica -nawab -##suka -ashford -gulp -##deck -##vill -##rug -brentford -frazier -pleasures -dunne -potsdam -shenzhen -dentistry -##tec -flanagan -##dorff -##hear -chorale -dinah -prem -quezon -##rogated -relinquished -sutra -terri -##pani -flaps -##rissa -poly -##rnet -homme -aback -##eki -linger -womb -##kson -##lewood -doorstep -orthodoxy -threaded -westfield -##rval -dioceses -fridays -subsided -##gata -loyalists -##biotic -##ettes -letterman -lunatic -prelate -tenderly -invariably -souza -thug -winslow -##otide -furlongs -gogh -jeopardy -##runa -pegasus -##umble -humiliated -standalone -tagged -##roller -freshmen -klan -##bright -attaining -initiating -transatlantic -logged -viz -##uance -1723 -combatants -intervening -stephane -chieftain -despised -grazed -cdc -galveston -godzilla -macro -simulate -##planes -parades -##esses -##ductive -##unes -equator -overdose -##cans -##hosh -##lifting -joshi -epstein -sonora -treacherous -aquatics -manchu -responsive -##sation -supervisory -##christ -##llins -##ibar -##balance -##uso -kimball -karlsruhe -mab -##emy -ignores -phonetic -spaghetti -almighty -danzig -rumbling -tombstone -designations -lured -outset -##felt -supermarkets -grupo -kei -kraft -susanna -##blood -comprehension -genealogy -##aghan -##verted -redding -##ythe -1722 -bowing -##pore -##roi -lest -sharpened -fulbright -valkyrie -sikhs -##unds -swans -bouquet -merritt -##tage -##venting -commuted -redhead -clerks -leasing -cesare -dea -hazy -##vances -fledged -greenfield -servicemen -##gical -armando -blackout -sagged -downloadable -intra -potion -pods -##4th -##mism -attendants -gambia -stale -##ntine -plump -asteroids -rediscovered -buds -flea -hive -##neas -1737 -classifications -debuts -##eles -olympus -scala -##eurs -##gno -##mute -hummed -sigismund -visuals -wiggled -await -pilasters -clench -sulfate -##ances -bellevue -enigma -trainee -snort -##sw -clouded -denim -##rank -churning -hartman -lodges -riches -sima -##missible -accountable -socrates -regulates -mueller -1702 -avoids -solids -himalayas -nutrient -pup -##jevic -squat -fades -nec -##lates -##pina -##rona -##ου -privateer -tequila -##gative -##mpton -hornet -immortals -##dou -asturias -cleansing -dario -##rries -##anta -etymology -servicing -zhejiang -##venor -##nx -horned -erasmus -rayon -relocating -£10 -##bags -escalated -promenade -stubble -2010s -artisans -axial -liquids -mora -sho -yoo -##tsky -bundles -oldies -##nally -notification -bastion -##ths -sparkle -##lved -1728 -leash -pathogen -highs -##hmi -immature -gonzaga -ignatius -mansions -monterrey -sweets -bryson -##loe -polled -regatta -brightest -pei -rosy -squid -hatfield -payroll -addict -meath -cornerback -heaviest -lodging -##mage -capcom -rippled -##sily -barnet -mayhem -ymca -snuggled -rousseau -##cute -blanchard -fragmented -leighton -chromosomes -risking -##strel -##utter -corinne -coyotes -cynical -hiroshi -yeomanry -##ractive -ebook -grading -mandela -plume -agustin -magdalene -##rkin -bea -femme -trafford -##coll -##lun -##tance -52nd -fourier -upton -##mental -camilla -gust -iihf -islamabad -longevity -##kala -feldman -netting -##rization -endeavour -foraging -mfa -orr -##open -greyish -contradiction -graz -##ruff -handicapped -marlene -tweed -oaxaca -spp -campos -miocene -pri -configured -cooks -pluto -cozy -pornographic -##entes -70th -fairness -glided -jonny -lynne -rounding -sired -##emon -##nist -remade -uncover -##mack -complied -lei -newsweek -##jured -##parts -##enting -##pg -finer -guerrillas -athenian -deng -disused -stepmother -accuse -gingerly -seduction -confronting -##going -gora -nostalgia -sabres -virginity -wrenched -##minated -syndication -wielding -eyre -##gnon -##igny -behaved -taxpayer -sweeps -##growth -childless -gallant -##ywood -amplified -geraldine -scrape -##ffi -babylonian -fresco -##rdan -##kney -##position -1718 -restricting -tack -fukuoka -osborn -selector -partnering -##dlow -kia -tak -whitley -gables -##mania -mri -softness -immersion -##bots -##evsky -1713 -chilling -insignificant -pcs -##uis -elites -lina -purported -supplemental -teaming -##americana -##dding -##inton -proficient -rouen -##nage -##rret -niccolo -selects -##bread -fluffy -1621 -gruff -knotted -mukherjee -polgara -thrash -nicholls -secluded -smoothing -thru -corsica -loaf -whitaker -inquiries -##rrier -##kam -indochina -marlins -myles -peking -##tea -extracts -pastry -superhuman -connacht -vogel -##ditional -##het -##udged -##lash -gloss -quarries -refit -teaser -##alic -##gaon -20s -materialized -sling -camped -pickering -tung -tracker -pursuant -##cide -cranes -##cini -##typical -##viere -anhalt -overboard -workout -chores -fares -orphaned -stains -##logie -fenton -surpassing -joyah -triggers -##itte -grandmaster -##lass -##lists -clapping -fraudulent -ledger -nagasaki -##cor -##nosis -##tsa -eucalyptus -tun -##icio -##rney -##tara -dax -heroism -ina -wrexham -onboard -unsigned -##dates -moshe -galley -winnie -droplets -exiles -praises -watered -noodles -##aia -fein -leland -multicultural -stink -bingo -comets -erskine -modernized -canned -constraint -domestically -chemotherapy -featherweight -stifled -##mum -darkly -irresistible -refreshing -hasty -isolate -##oys -kitchener -planners -##wehr -cages -yarn -implant -toulon -elects -childbirth -yue -##lind -rightful -sportsman -junctions -remodeled -specifies -##rgh -##oons -complimented -##urgent -lister -ot -##logic -bequeathed -cheekbones -fontana -gabby -##dial -amadeus -corrugated -maverick -resented -triangles -##hered -##usly -nazareth -tyrol -1675 -assent -poorer -sectional -aegean -##cous -nylon -ghanaian -##egorical -##weig -cushions -forbid -fusiliers -obstruction -somerville -##scia -dime -earrings -elliptical -leyte -oder -polymers -timmy -midtown -piloted -settles -continual -externally -mayfield -##uh -enrichment -henson -keane -persians -1733 -benji -braden -pep -##efe -contenders -pepsi -valet -##isches -##asse -##earing -goofy -stroll -##amen -authoritarian -occurrences -adversary -ahmedabad -tangent -toppled -dorchester -1672 -modernism -marxism -islamist -charlemagne -exponential -racks -brunette -pic -skirmish -##bund -##lad -##powered -##yst -hoisted -messina -shatter -##ctum -jedi -vantage -##music -##neil -clemens -mahmoud -corrupted -authentication -lowry -nils -##washed -omnibus -wounding -jillian -##itors -##opped -serialized -narcotics -handheld -##arm -##plicity -intersecting -stimulating -##onis -crate -fellowships -hemingway -casinos -climatic -fordham -copeland -drip -beatty -leaflets -robber -brothel -madeira -##hedral -sphinx -ultrasound -##vana -valor -forbade -leonid -villas -##aldo -duane -marquez -##cytes -disadvantaged -forearms -kawasaki -reacts -consular -lax -uncles -uphold -##hopper -concepcion -dorsey -lass -##izan -arching -passageway -1708 -researches -tia -internationals -##graphs -##opers -distinguishes -javanese -divert -##uven -plotted -##listic -##rwin -##erik -##tify -affirmative -signifies -validation -##bson -kari -felicity -georgina -zulu -##eros -##rained -##rath -overcoming -argyll -##rbin -1734 -chiba -ratification -windy -earls -parapet -##marks -hunan -pristine -astrid -punta -##gart -brodie -##kota -##oder -malaga -minerva -rouse -##phonic -bellowed -pagoda -portals -reclamation -##gur -##odies -##⁄₄ -parentheses -quoting -allergic -palette -showcases -benefactor -heartland -nonlinear -##tness -bladed -cheerfully -scans -##ety -1666 -girlfriends -pedersen -hiram -sous -##liche -##nator -1683 -##nery -##orio -##umen -bobo -primaries -smiley -##cb -unearthed -uniformly -fis -metadata -1635 -ind -##oted -recoil -##titles -##tura -##ια -hilbert -jamestown -mcmillan -tulane -seychelles -##frid -antics -coli -fated -stucco -##grants -1654 -bulky -accolades -arrays -caledonian -carnage -optimism -puebla -##tative -##cave -enforcing -rotherham -dunlop -aeronautics -chimed -incline -zoning -archduke -hellenistic -##oses -##sions -candi -thong -##ople -magnate -rustic -##rsk -projective -slant -##offs -danes -hollis -vocalists -##ammed -congenital -contend -gesellschaft -##ocating -##pressive -douglass -quieter -##kshi -howled -salim -spontaneously -townsville -buena -southport -##bold -kato -1638 -faerie -stiffly -##vus -##rled -flawless -realising -taboo -##7th -straightening -jena -##hid -cartwright -berber -bertram -soloists -noses -coping -fission -hardin -inca -##cen -1717 -mobilized -vhf -##raf -biscuits -curate -##anial -gaunt -neighbourhoods -1540 -##abas -blanca -bypassed -sockets -behold -coincidentally -##bane -nara -shave -splinter -terrific -##arion -##erian -commonplace -juris -redwood -waistband -boxed -caitlin -fingerprints -jennie -naturalized -##ired -balfour -craters -jody -bungalow -hugely -quilt -glitter -pigeons -undertaker -bulging -constrained -##sil -##akh -assimilation -reworked -##person -persuasion -##pants -felicia -##cliff -##ulent -1732 -explodes -##dun -##inium -##zic -lyman -vulture -hog -overlook -begs -northwards -ow -spoil -##urer -fatima -favorably -accumulate -sargent -sorority -corresponded -dispersal -kochi -toned -##imi -##lita -internacional -newfound -##agger -##lynn -##rigue -booths -peanuts -##eborg -medicare -muriel -nur -##uram -crates -millennia -pajamas -worsened -##breakers -jimi -vanuatu -yawned -##udeau -carousel -##hony -hurdle -##ccus -##mounted -##pod -rv -##eche -airship -ambiguity -compulsion -recapture -##claiming -arthritis -##osomal -1667 -asserting -ngc -sniffing -dade -discontent -glendale -ported -##amina -defamation -rammed -##scent -fling -livingstone -##fleet -875 -apocalyptic -comrade -##lowe -cessna -eine -persecuted -subsistence -demi -hoop -reliefs -coptic -progressing -stemmed -perpetrators -1665 -priestess -##nio -dobson -ebony -rooster -itf -tortricidae -##bbon -##jian -cleanup -##jean -##øy -1721 -eighties -taxonomic -holiness -##hearted -##spar -antilles -showcasing -stabilized -##nb -gia -mascara -michelangelo -dawned -##uria -##vinsky -extinguished -fitz -grotesque -£100 -##fera -##loid -##mous -barges -neue -throbbed -cipher -johnnie -##mpt -outburst -##swick -spearheaded -administrations -heartbreak -pixels -pleasantly -##enay -lombardy -plush -##nsed -bobbie -##hly -reapers -tremor -xiang -minogue -substantive -hitch -barak -##wyl -kwan -##encia -910 -obscene -elegance -indus -surfer -bribery -conserve -##hyllum -##masters -horatio -##fat -apes -rebound -psychotic -##pour -iteration -##mium -##vani -botanic -horribly -antiques -dispose -paxton -##hli -##wg -timeless -1704 -disregard -engraver -hounds -##bau -##version -looted -uno -facilitates -groans -masjid -rutland -antibody -disqualification -decatur -footballers -quake -slacks -48th -rein -scribe -stabilize -commits -exemplary -tho -##hort -##chison -pantry -traversed -##hiti -disrepair -identifiable -vibrated -baccalaureate -csa -interviewing -##iensis -##raße -greaves -wealthiest -classed -jogged -£5 -##atal -illuminating -knicks -respecting -##uno -scrubbed -##iji -##dles -kruger -moods -growls -raider -silvia -chefs -kam -cree -percival -##terol -gunter -counterattack -defiant -henan -ze -##rasia -##riety -equivalence -submissions -##fra -##thor -bautista -mechanically -##heater -cornice -herbal -templar -##mering -outputs -ruining -ligand -renumbered -extravagant -mika -blockbuster -eta -insurrection -##ilia -darkening -ferocious -pianos -strife -kinship -##aer -melee -##anor -##iste -##oue -decidedly -weep -##jad -##missive -##ppel -puget -unease -##gnant -1629 -hammering -kassel -wessex -##lga -bromwich -egan -paranoia -utilization -##atable -##idad -contradictory -provoke -##ols -##ouring -##tangled -knesset -##very -##lette -plumbing -##sden -greensboro -occult -sniff -zev -beaming -gamer -haggard -mahal -##olt -##pins -mendes -utmost -briefing -gunnery -##gut -##pher -##zh -##rok -1679 -khalifa -sonya -##boot -principals -urbana -wiring -##liffe -##minating -##rrado -dahl -nyu -skepticism -townspeople -ithaca -lobster -somethin -##fur -##arina -##−1 -freighter -zimmerman -biceps -contractual -##herton -amend -hurrying -subconscious -##anal -meng -clermont -spawning -##eia -##lub -dignitaries -impetus -snacks -spotting -twigs -##bilis -##cz -##ouk -libertadores -nic -skylar -##aina -gustave -asean -##anum -dieter -legislatures -flirt -bromley -trolls -umar -##bbies -##tyle -blah -parc -bridgeport -crank -negligence -##nction -46th -constantin -molded -bandages -seriousness -00pm -siegel -carpets -compartments -upbeat -statehood -##dner -##edging -marko -platt -##hane -paving -##iy -1738 -abbess -impatience -limousine -nbl -lucille -mojo -nightfall -robbers -##nais -karel -brisk -calves -replicate -ascribed -telescopes -##olf -intimidated -ballast -specialization -aerodynamic -caliphate -visionary -##arded -epsilon -##aday -##onte -aggregation -auditory -boosted -reunification -kathmandu -loco -robyn -acknowledges -appointing -humanoid -newell -redeveloped -restraints -##tained -barbarians -chopper -1609 -italiana -##lez -##lho -investigates -wrestlemania -##anies -##bib -##falls -creaked -dragoons -gravely -minions -stupidity -volley -##harat -##week -musik -##eries -##uously -fungal -massimo -semantics -malvern -##ahl -##pee -discourage -embryo -imperialism -1910s -profoundly -##ddled -jiangsu -sparkled -stat -##holz -sweatshirt -tobin -##iction -sneered -##cheon -##oit -brit -causal -smyth -##neuve -diffuse -perrin -silvio -##ipes -##recht -detonated -iqbal -selma -##nism -##zumi -roasted -##riders -tay -##ados -##mament -##mut -##rud -completes -nipples -flavour -hirsch -##laus -calderon -sneakers -moravian -##ksha -1622 -##imeters -bodo -##isance -##pre -##ronia -anatomical -excerpt -##lke -dh -kunst -##tablished -##scoe -biomass -panted -unharmed -gael -housemates -montpellier -coa -rodents -tonic -hickory -singleton -##taro -1719 -aldo -breaststroke -dempsey -och -rocco -##cuit -merton -dissemination -midsummer -serials -##idi -haji -polynomials -enoch -prematurely -shutter -taunton -£3 -##grating -##inates -archangel -harassed -##asco -archway -dazzling -##ecin -1736 -sumo -wat -##kovich -1086 -honneur -##ently -##nostic -##ttal -##idon -1605 -1716 -rents -##gnan -hires -##ikh -##dant -howie -##rons -handler -retracted -shocks -1632 -arun -duluth -kepler -trumpeter -##lary -peeking -seasoned -trooper -##mara -laszlo -##iciencies -##rti -heterosexual -##inatory -indira -jogging -##inga -##lism -beit -dissatisfaction -malice -##ately -nedra -peeling -##rgeon -47th -stadiums -vertigo -##ains -iced -restroom -##plify -##tub -illustrating -pear -##chner -##sibility -inorganic -rappers -receipts -watery -##kura -lucinda -##oulos -reintroduced -##8th -##tched -gracefully -saxons -nutritional -wastewater -rained -favourites -bedrock -fisted -hallways -likeness -upscale -##lateral -1580 -blinds -prequel -##pps -##tama -deter -humiliating -restraining -tn -vents -1659 -laundering -recess -rosary -tractors -coulter -federer -##ifiers -##plin -persistence -##quitable -geschichte -pendulum -quakers -##beam -bassett -pictorial -koln -##sitor -drills -reciprocal -shooters -##cton -##tees -converge -pip -dmitri -donnelly -yamamoto -aqua -azores -demographics -hypnotic -spitfire -suspend -wryly -roderick -##rran -sebastien -##asurable -mavericks -##fles -himalayan -prodigy -##iance -transvaal -demonstrators -handcuffs -dodged -mcnamara -sublime -1726 -crazed -##efined -##till -ivo -pondered -reconciled -shrill -sava -##duk -bal -heresy -jaipur -goran -##nished -lux -shelly -whitehall -##hre -israelis -peacekeeping -##wled -1703 -demetrius -ousted -##arians -##zos -beale -anwar -backstroke -raged -shrinking -cremated -##yck -benign -towing -wadi -darmstadt -landfill -parana -soothe -colleen -sidewalks -mayfair -tumble -hepatitis -ferrer -superstructure -##gingly -##urse -##wee -anthropological -translators -##mies -closeness -hooves -##pw -mondays -##roll -##vita -landscaping -##urized -purification -sock -thorns -thwarted -jalan -tiberius -##taka -saline -##rito -confidently -khyber -sculptors -##ij -brahms -hammersmith -inspectors -battista -fivb -fragmentation -hackney -##uls -arresting -exercising -antoinette -bedfordshire -##zily -dyed -##hema -1656 -racetrack -variability -##tique -1655 -austrians -deteriorating -madman -theorists -aix -lehman -weathered -1731 -decreed -eruptions -1729 -flaw -quinlan -sorbonne -flutes -nunez -1711 -adored -downwards -fable -rasped -1712 -moritz -mouthful -renegade -shivers -stunts -dysfunction -restrain -translit -pancakes -##avio -##cision -##tray -vial -##lden -bain -##maid -##oxide -chihuahua -malacca -vimes -##rba -##rnier -1664 -donnie -plaques -##ually -bangs -floppy -huntsville -loretta -nikolay -##otte -eater -handgun -ubiquitous -##hett -eras -zodiac -1634 -##omorphic -1820s -##zog -cochran -##bula -##lithic -warring -##rada -dalai -excused -blazers -mcconnell -reeling -este -##abi -geese -hoax -taxon -##bla -guitarists -condemning -hunts -inversion -moffat -taekwondo -##lvis -1624 -stammered -##rest -##rzy -sousa -fundraiser -marylebone -navigable -uptown -cabbage -daniela -salman -shitty -whimper -##kian -##utive -programmers -protections -##rmi -##rued -forceful -##enes -fuss -##tao -##wash -brat -oppressive -reykjavik -spartak -ticking -##inkles -##kiewicz -adolph -horst -maui -protege -straighten -cpc -landau -concourse -clements -resultant -##ando -imaginative -joo -reactivated -##rem -##ffled -##uising -consultative -##guide -flop -kaitlyn -mergers -parenting -somber -##vron -supervise -vidhan -##imum -courtship -exemplified -harmonies -medallist -refining -##rrow -##ка -amara -##hum -goalscorer -sited -overshadowed -rohan -displeasure -secretive -multiplied -osman -##orth -engravings -padre -##kali -##veda -miniatures -mis -##yala -clap -pali -rook -##cana -1692 -57th -antennae -astro -oskar -1628 -bulldog -crotch -hackett -yucatan -##sure -amplifiers -brno -ferrara -migrating -##gree -thanking -turing -##eza -mccann -ting -andersson -onslaught -gaines -ganga -incense -standardization -##mation -sentai -scuba -stuffing -turquoise -waivers -alloys -##vitt -regaining -vaults -##clops -##gizing -digger -furry -memorabilia -probing -##iad -payton -rec -deutschland -filippo -opaque -seamen -zenith -afrikaans -##filtration -disciplined -inspirational -##merie -banco -confuse -grafton -tod -##dgets -championed -simi -anomaly -biplane -##ceptive -electrode -##para -1697 -cleavage -crossbow -swirl -informant -##lars -##osta -afi -bonfire -spec -##oux -lakeside -slump -##culus -##lais -##qvist -##rrigan -1016 -facades -borg -inwardly -cervical -pointedly -stabilization -##odon -chests -1699 -hacked -ctv -orthogonal -suzy -##lastic -gaulle -jacobite -rearview -##erted -ashby -##drik -##igate -##mise -##zbek -affectionately -canine -disperse -latham -##istles -##ivar -spielberg -##orin -##idium -ezekiel -cid -##sg -durga -middletown -##cina -customized -frontiers -harden -##etano -##zzy -1604 -bolsheviks -coloration -yoko -##bedo -briefs -slabs -debra -liquidation -plumage -##oin -blossoms -dementia -subsidy -1611 -proctor -relational -jerseys -parochial -ter -##ici -esa -peshawar -cavalier -loren -idiots -shamrock -1646 -dutton -malabar -mustache -##endez -##ocytes -referencing -terminates -marche -yarmouth -##sop -acton -mated -seton -subtly -baptised -beige -extremes -jolted -kristina -telecast -##actic -safeguard -waldo -##baldi -##bular -endeavors -sloppy -subterranean -##ensburg -##itung -delicately -pigment -tq -##scu -1626 -collisions -coveted -herds -##personal -##meister -##nberger -chopra -##ricting -abnormalities -defective -galician -lucie -##dilly -alligator -likened -##genase -burundi -clears -complexion -derelict -deafening -diablo -fingered -champaign -dogg -enlist -isotope -labeling -mrna -##erre -brilliance -marvelous -##ayo -1652 -crawley -ether -footed -dwellers -deserts -hamish -rubs -warlock -skimmed -##lizer -buick -embark -heraldic -irregularities -##ajan -kiara -##kulam -##ieg -antigen -kowalski -##lge -oakley -visitation -##mbit -vt -##suit -1570 -murderers -##miento -##rites -chimneys -##sling -condemn -custer -exchequer -havre -##ghi -fluctuations -##rations -dfb -hendricks -vaccines -##tarian -nietzsche -biking -juicy -##duced -brooding -scrolling -selangor -##ragan -annum -boomed -seminole -sugarcane -##dna -departmental -dismissing -innsbruck -arteries -ashok -batavia -daze -kun -overtook -##rga -##tlan -beheaded -gaddafi -holm -electronically -faulty -galilee -fractures -kobayashi -##lized -gunmen -magma -aramaic -mala -eastenders -inference -messengers -bf -##qu -bathrooms -##vere -1658 -flashbacks -ideally -misunderstood -##jali -##weather -mendez -##grounds -uncanny -##iii -1709 -friendships -##nbc -sacrament -accommodated -reiterated -logistical -pebbles -thumped -##escence -administering -decrees -drafts -##flight -##cased -##tula -futuristic -picket -intimidation -winthrop -##fahan -interfered -afar -francoise -morally -uta -cochin -croft -dwarfs -##bruck -##dents -##nami -biker -##hner -##meral -##isen -##ometric -##pres -##ан -brightened -meek -parcels -securely -gunners -##jhl -##zko -agile -hysteria -##lten -##rcus -bukit -champs -chevy -cuckoo -leith -sadler -theologians -welded -##section -1663 -plurality -xander -##rooms -##formed -shredded -temps -intimately -pau -tormented -##lok -##stellar -1618 -charred -essen -##mmel -alarms -spraying -ascot -blooms -twinkle -##abia -##apes -internment -obsidian -##chaft -snoop -##dav -##ooping -malibu -##tension -quiver -##itia -hays -mcintosh -travers -walsall -##ffie -1623 -beverley -schwarz -plunging -structurally -rosenthal -vikram -##tsk -ghz -##onda -##tiv -chalmers -groningen -pew -reckon -unicef -##rvis -55th -##gni -1651 -sulawesi -avila -cai -metaphysical -screwing -turbulence -##mberg -augusto -samba -56th -baffled -momentary -toxin -##urian -##wani -aachen -condoms -dali -steppe -##oed -##year -adolescence -dauphin -electrically -inaccessible -microscopy -nikita -##ega -atv -##enter -##oles -##oteric -accountants -punishments -wrongly -bribes -adventurous -clinch -flinders -southland -##hem -##kata -gough -##ciency -lads -soared -##ה -undergoes -deformation -outlawed -rubbish -##arus -##mussen -##nidae -##rzburg -arcs -##ingdon -##tituted -1695 -wheelbase -wheeling -bombardier -campground -zebra -##lices -##oj -##bain -lullaby -##ecure -donetsk -wylie -grenada -##arding -##ης -squinting -eireann -opposes -##andra -maximal -runes -##broken -##cuting -##iface -##ror -##rosis -additive -britney -adultery -triggering -##drome -detrimental -aarhus -containment -jc -swapped -vichy -##ioms -madly -##oric -##rag -brant -##ckey -1560 -1612 -broughton -rustling -##stems -##uder -asbestos -mentoring -##nivorous -finley -leaps -##isan -apical -pry -slits -substitutes -##dict -intuitive -fantasia -insistent -unreasonable -##igen -##vna -domed -hannover -margot -ponder -##zziness -impromptu -jian -rampage -stemming -##eft -andrey -gerais -whichever -amnesia -appropriated -anzac -clicks -modifying -ultimatum -cambrian -maids -verve -yellowstone -##mbs -conservatoire -##scribe -adherence -dinners -spectra -imperfect -mysteriously -sidekick -tatar -tuba -##aks -##ifolia -distrust -##athan -##zle -ronin -zac -##pse -celaena -instrumentalist -scents -skopje -##mbling -comical -compensated -vidal -condor -intersect -jingle -wavelengths -##urrent -mcqueen -##izzly -carp -weasel -militias -postdoctoral -eugen -gunslinger -##ɛ -faux -hospice -##for -appalled -derivation -dwarves -##elis -dilapidated -##folk -astoria -philology -##lwyn -##otho -##saka -inducing -philanthropy -##bf -##itative -geek -markedly -##yce -bessie -indices -##flict -frowns -resolving -weightlifting -tugs -cleric -contentious -1653 -mania -rms -##miya -##reate -##ruck -##tucket -bien -eels -marek -##ayton -##cence -discreet -unofficially -##ife -leaks -##bber -1705 -dung -compressor -hillsborough -pandit -shillings -distal -##skin -##tat -nosed -##nir -mangrove -undeveloped -##idia -textures -##inho -##rise -irritating -nay -amazingly -bancroft -apologetic -compassionate -kata -symphonies -##lovic -airspace -##lch -gifford -precautions -fulfillment -sevilla -vulgar -martinique -##urities -looting -piccolo -tidy -##dermott -quadrant -armchair -incomes -mathematicians -stampede -nilsson -##inking -##scan -foo -quarterfinal -##ostal -shang -shouldered -squirrels -##owe -vinegar -##bner -##rchy -##systems -delaying -##trics -ars -dwyer -rhapsody -sponsoring -##gration -bipolar -cinder -starters -##olio -##urst -signage -##nty -aground -figurative -mons -acquaintances -duets -erroneously -soyuz -elliptic -recreated -##cultural -##quette -##ssed -##tma -##zcz -moderator -scares -##itaire -##stones -##udence -juniper -sighting -##just -##nsen -britten -calabria -ry -bop -cramer -forsyth -stillness -airmen -gathers -unfit -##umber -##upt -taunting -seeker -streamlined -##bution -holster -schumann -tread -vox -##gano -##onzo -strive -dil -reforming -covent -newbury -predicting -##orro -decorate -tre -##puted -andover -asahi -dept -dunkirk -gills -##tori -buren -huskies -##stis -##stov -abstracts -bets -loosen -##opa -1682 -yearning -##glio -##sir -berman -effortlessly -enamel -napoli -persist -##peration -##uez -attache -elisa -invitations -##kic -accelerating -reindeer -boardwalk -clutches -nelly -polka -##kei -adamant -huey -lough -unbroken -adventurer -embroidery -inspecting -stanza -##ducted -naia -taluka -##pone -##roids -chases -deprivation -florian -##ppet -earthly -##lib -##ssee -colossal -foreigner -vet -freaks -patrice -rosewood -triassic -upstate -##pkins -dominates -ata -chants -ks -vo -##bley -##raya -##rmed -agra -infiltrate -##ailing -##ilation -##tzer -##uppe -##werk -binoculars -enthusiast -fujian -squeak -##avs -abolitionist -almeida -boredom -hampstead -marsden -rations -##ands -inflated -bonuses -rosalie -patna -##rco -detachments -penitentiary -54th -flourishing -woolf -##dion -##etched -papyrus -##lster -##nsor -##toy -bobbed -dismounted -endelle -inhuman -motorola -wince -wreath -##ticus -hideout -inspections -sanjay -disgrace -infused -pudding -stalks -##urbed -arsenic -leases -##hyl -##rrard -collarbone -##waite -##wil -dowry -##bant -##edance -genealogical -nitrate -salamanca -scandals -thyroid -necessitated -##` -##¡ -##¢ -##¦ -##¨ -##ª -##¬ -##´ -##¶ -##¾ -##¿ -##ð -##þ -##ħ -##œ -##ƒ -##ɐ -##ɑ -##ɒ -##ɕ -##ɣ -##ɨ -##ɪ -##ɫ -##ɬ -##ɯ -##ɲ -##ɴ -##ɹ -##ɾ -##ʀ -##ʁ -##ʂ -##ʃ -##ʉ -##ʊ -##ʋ -##ʌ -##ʎ -##ʐ -##ʑ -##ʒ -##ʔ -##ʲ -##ʳ -##ʷ -##ʸ -##ʻ -##ʼ -##ʾ -##ʿ -##ˡ -##ˣ -##ˤ -##ζ -##ξ -##щ -##ъ -##э -##ю -##ђ -##є -##ј -##љ -##њ -##ћ -##ӏ -##ա -##բ -##գ -##դ -##ե -##թ -##ի -##լ -##կ -##հ -##մ -##յ -##ն -##ո -##պ -##ս -##վ -##տ -##ր -##ւ -##ք -##־ -##א -##ב -##ג -##ד -##ו -##ז -##ח -##ט -##י -##ך -##כ -##ל -##ם -##מ -##ן -##נ -##ס -##ע -##ף -##פ -##ץ -##צ -##ק -##ר -##ש -##ת -##، -##ء -##ث -##ج -##ح -##خ -##ذ -##ز -##ش -##ص -##ض -##ط -##ظ -##غ -##ـ -##ف -##ق -##ك -##ى -##ٹ -##پ -##چ -##ک -##گ -##ں -##ھ -##ہ -##ے -##अ -##आ -##उ -##ए -##क -##ख -##ग -##च -##ज -##ट -##ड -##ण -##त -##थ -##द -##ध -##न -##प -##ब -##भ -##म -##य -##र -##ल -##व -##श -##ष -##स -##ह -##ा -##ि -##ी -##ो -##। -##॥ -##ং -##অ -##আ -##ই -##উ -##এ -##ও -##ক -##খ -##গ -##চ -##ছ -##জ -##ট -##ড -##ণ -##ত -##থ -##দ -##ধ -##ন -##প -##ব -##ভ -##ম -##য -##র -##ল -##শ -##ষ -##স -##হ -##া -##ি -##ী -##ে -##க -##ச -##ட -##த -##ந -##ன -##ப -##ம -##ய -##ர -##ல -##ள -##வ -##ா -##ி -##ு -##ே -##ை -##ನ -##ರ -##ಾ -##ක -##ය -##ර -##ල -##ව -##ා -##ต -##ท -##พ -##ล -##ว -##ส -##། -##ག -##ང -##ད -##ན -##པ -##བ -##མ -##འ -##ར -##ལ -##ས -##မ -##ა -##ბ -##გ -##დ -##ე -##ვ -##თ -##ი -##კ -##ლ -##მ -##ნ -##ო -##რ -##ს -##ტ -##უ -##ᄊ -##ᴬ -##ᴮ -##ᴰ -##ᴵ -##ᴺ -##ᵀ -##ᵇ -##ᵈ -##ᵖ -##ᵗ -##ᵣ -##ᵤ -##ᵥ -##ᶜ -##ᶠ -##‐ -##‑ -##‒ -##– -##— -##― -##‘ -##’ -##‚ -##“ -##” -##‡ -##… -##⁰ -##⁴ -##⁵ -##⁶ -##⁷ -##⁸ -##⁹ -##⁻ -##₅ -##₆ -##₇ -##₈ -##₉ -##₊ -##₍ -##₎ -##ₐ -##ₑ -##ₒ -##ₓ -##ₕ -##ₖ -##ₗ -##ₘ -##ₚ -##ₛ -##ₜ -##₤ -##₩ -##₱ -##₹ -##ℓ -##ℝ -##⅓ -##⅔ -##↦ -##⇄ -##⇌ -##∂ -##∅ -##∆ -##∇ -##∈ -##∗ -##∘ -##∧ -##∨ -##∪ -##⊂ -##⊆ -##⊕ -##⊗ -##☉ -##♯ -##⟨ -##⟩ -##ⱼ -##⺩ -##⺼ -##⽥ -##亻 -##宀 -##彳 -##忄 -##扌 -##氵 -##疒 -##糹 -##訁 -##辶 -##阝 -##龸 -##fi -##fl diff --git a/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer/vocab_org.txt b/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer/vocab_org.txt deleted file mode 100644 index ca4f978..0000000 --- a/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer/vocab_org.txt +++ /dev/null @@ -1,21128 +0,0 @@ -[PAD] -[unused1] -[unused2] -[unused3] -[unused4] -[unused5] -[unused6] -[unused7] -[unused8] -[unused9] -[unused10] -[unused11] -[unused12] -[unused13] -[unused14] -[unused15] -[unused16] -[unused17] -[unused18] -[unused19] -[unused20] -[unused21] -[unused22] -[unused23] -[unused24] -[unused25] -[unused26] -[unused27] -[unused28] -[unused29] -[unused30] -[unused31] -[unused32] -[unused33] -[unused34] -[unused35] -[unused36] -[unused37] -[unused38] -[unused39] -[unused40] -[unused41] -[unused42] -[unused43] -[unused44] -[unused45] -[unused46] -[unused47] -[unused48] -[unused49] -[unused50] -[unused51] -[unused52] -[unused53] -[unused54] -[unused55] -[unused56] -[unused57] -[unused58] -[unused59] -[unused60] -[unused61] -[unused62] -[unused63] -[unused64] -[unused65] -[unused66] -[unused67] -[unused68] -[unused69] -[unused70] -[unused71] -[unused72] -[unused73] -[unused74] -[unused75] -[unused76] -[unused77] -[unused78] -[unused79] -[unused80] -[unused81] -[unused82] -[unused83] -[unused84] -[unused85] -[unused86] -[unused87] -[unused88] -[unused89] -[unused90] -[unused91] -[unused92] -[unused93] -[unused94] -[unused95] -[unused96] -[unused97] -[unused98] -[unused99] -[UNK] -[CLS] -[SEP] -[MASK] - - -! -" -# -$ -% -& -' -( -) -* -+ -, -- -. -/ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -: -; -< -= -> -? -@ -[ -\ -] -^ -_ -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z -{ -| -} -~ -£ -¤ -¥ -§ -© -« -® -° -± -² -³ -µ -· -¹ -º -» -¼ -× -ß -æ -÷ -ø -đ -ŋ -ɔ -ə -ɡ -ʰ -ˇ -ˈ -ˊ -ˋ -ˍ -ː -˙ -˚ -ˢ -α -β -γ -δ -ε -η -θ -ι -κ -λ -μ -ν -ο -π -ρ -ς -σ -τ -υ -φ -χ -ψ -ω -а -б -в -г -д -е -ж -з -и -к -л -м -н -о -п -р -с -т -у -ф -х -ц -ч -ш -ы -ь -я -і -ا -ب -ة -ت -د -ر -س -ع -ل -م -ن -ه -و -ي -۩ -ก -ง -น -ม -ย -ร -อ -า -เ -๑ -་ -ღ -ᄀ -ᄁ -ᄂ -ᄃ -ᄅ -ᄆ -ᄇ -ᄈ -ᄉ -ᄋ -ᄌ -ᄎ -ᄏ -ᄐ -ᄑ -ᄒ -ᅡ -ᅢ -ᅣ -ᅥ -ᅦ -ᅧ -ᅨ -ᅩ -ᅪ -ᅬ -ᅭ -ᅮ -ᅯ -ᅲ -ᅳ -ᅴ -ᅵ -ᆨ -ᆫ -ᆯ -ᆷ -ᆸ -ᆺ -ᆻ -ᆼ -ᗜ -ᵃ -ᵉ -ᵍ -ᵏ -ᵐ -ᵒ -ᵘ -‖ -„ -† -• -‥ -‧ -
 -‰ -′ -″ -‹ -› -※ -‿ -⁄ -ⁱ -⁺ -ⁿ -₁ -₂ -₃ -₄ -€ -℃ -№ -™ -ⅰ -ⅱ -ⅲ -ⅳ -ⅴ -← -↑ -→ -↓ -↔ -↗ -↘ -⇒ -∀ -− -∕ -∙ -√ -∞ -∟ -∠ -∣ -∥ -∩ -∮ -∶ -∼ -∽ -≈ -≒ -≡ -≤ -≥ -≦ -≧ -≪ -≫ -⊙ -⋅ -⋈ -⋯ -⌒ -① -② -③ -④ -⑤ -⑥ -⑦ -⑧ -⑨ -⑩ -⑴ -⑵ -⑶ -⑷ -⑸ -⒈ -⒉ -⒊ -⒋ -ⓒ -ⓔ -ⓘ -─ -━ -│ -┃ -┅ -┆ -┊ -┌ -└ -├ -┣ -═ -║ -╚ -╞ -╠ -╭ -╮ -╯ -╰ -╱ -╳ -▂ -▃ -▅ -▇ -█ -▉ -▋ -▌ -▍ -▎ -■ -□ -▪ -▫ -▬ -▲ -△ -▶ -► -▼ -▽ -◆ -◇ -○ -◎ -● -◕ -◠ -◢ -◤ -☀ -★ -☆ -☕ -☞ -☺ -☼ -♀ -♂ -♠ -♡ -♣ -♥ -♦ -♪ -♫ -♬ -✈ -✔ -✕ -✖ -✦ -✨ -✪ -✰ -✿ -❀ -❤ -➜ -➤ -⦿ -、 -。 -〃 -々 -〇 -〈 -〉 -《 -》 -「 -」 -『 -』 -【 -】 -〓 -〔 -〕 -〖 -〗 -〜 -〝 -〞 -ぁ -あ -ぃ -い -う -ぇ -え -お -か -き -く -け -こ -さ -し -す -せ -そ -た -ち -っ -つ -て -と -な -に -ぬ -ね -の -は -ひ -ふ -へ -ほ -ま -み -む -め -も -ゃ -や -ゅ -ゆ -ょ -よ -ら -り -る -れ -ろ -わ -を -ん -゜ -ゝ -ァ -ア -ィ -イ -ゥ -ウ -ェ -エ -ォ -オ -カ -キ -ク -ケ -コ -サ -シ -ス -セ -ソ -タ -チ -ッ -ツ -テ -ト -ナ -ニ -ヌ -ネ -ノ -ハ -ヒ -フ -ヘ -ホ -マ -ミ -ム -メ -モ -ャ -ヤ -ュ -ユ -ョ -ヨ -ラ -リ -ル -レ -ロ -ワ -ヲ -ン -ヶ -・ -ー -ヽ -ㄅ -ㄆ -ㄇ -ㄉ -ㄋ -ㄌ -ㄍ -ㄎ -ㄏ -ㄒ -ㄚ -ㄛ -ㄞ -ㄟ -ㄢ -ㄤ -ㄥ -ㄧ -ㄨ -ㆍ -㈦ -㊣ -㎡ -㗎 -一 -丁 -七 -万 -丈 -三 -上 -下 -不 -与 -丐 -丑 -专 -且 -丕 -世 -丘 -丙 -业 -丛 -东 -丝 -丞 -丟 -両 -丢 -两 -严 -並 -丧 -丨 -个 -丫 -中 -丰 -串 -临 -丶 -丸 -丹 -为 -主 -丼 -丽 -举 -丿 -乂 -乃 -久 -么 -义 -之 -乌 -乍 -乎 -乏 -乐 -乒 -乓 -乔 -乖 -乗 -乘 -乙 -乜 -九 -乞 -也 -习 -乡 -书 -乩 -买 -乱 -乳 -乾 -亀 -亂 -了 -予 -争 -事 -二 -于 -亏 -云 -互 -五 -井 -亘 -亙 -亚 -些 -亜 -亞 -亟 -亡 -亢 -交 -亥 -亦 -产 -亨 -亩 -享 -京 -亭 -亮 -亲 -亳 -亵 -人 -亿 -什 -仁 -仃 -仄 -仅 -仆 -仇 -今 -介 -仍 -从 -仏 -仑 -仓 -仔 -仕 -他 -仗 -付 -仙 -仝 -仞 -仟 -代 -令 -以 -仨 -仪 -们 -仮 -仰 -仲 -件 -价 -任 -份 -仿 -企 -伉 -伊 -伍 -伎 -伏 -伐 -休 -伕 -众 -优 -伙 -会 -伝 -伞 -伟 -传 -伢 -伤 -伦 -伪 -伫 -伯 -估 -伴 -伶 -伸 -伺 -似 -伽 -佃 -但 -佇 -佈 -位 -低 -住 -佐 -佑 -体 -佔 -何 -佗 -佘 -余 -佚 -佛 -作 -佝 -佞 -佟 -你 -佢 -佣 -佤 -佥 -佩 -佬 -佯 -佰 -佳 -併 -佶 -佻 -佼 -使 -侃 -侄 -來 -侈 -例 -侍 -侏 -侑 -侖 -侗 -供 -依 -侠 -価 -侣 -侥 -侦 -侧 -侨 -侬 -侮 -侯 -侵 -侶 -侷 -便 -係 -促 -俄 -俊 -俎 -俏 -俐 -俑 -俗 -俘 -俚 -保 -俞 -俟 -俠 -信 -俨 -俩 -俪 -俬 -俭 -修 -俯 -俱 -俳 -俸 -俺 -俾 -倆 -倉 -個 -倌 -倍 -倏 -們 -倒 -倔 -倖 -倘 -候 -倚 -倜 -借 -倡 -値 -倦 -倩 -倪 -倫 -倬 -倭 -倶 -债 -值 -倾 -偃 -假 -偈 -偉 -偌 -偎 -偏 -偕 -做 -停 -健 -側 -偵 -偶 -偷 -偻 -偽 -偿 -傀 -傅 -傍 -傑 -傘 -備 -傚 -傢 -傣 -傥 -储 -傩 -催 -傭 -傲 -傳 -債 -傷 -傻 -傾 -僅 -働 -像 -僑 -僕 -僖 -僚 -僥 -僧 -僭 -僮 -僱 -僵 -價 -僻 -儀 -儂 -億 -儆 -儉 -儋 -儒 -儕 -儘 -償 -儡 -優 -儲 -儷 -儼 -儿 -兀 -允 -元 -兄 -充 -兆 -兇 -先 -光 -克 -兌 -免 -児 -兑 -兒 -兔 -兖 -党 -兜 -兢 -入 -內 -全 -兩 -八 -公 -六 -兮 -兰 -共 -兲 -关 -兴 -兵 -其 -具 -典 -兹 -养 -兼 -兽 -冀 -内 -円 -冇 -冈 -冉 -冊 -册 -再 -冏 -冒 -冕 -冗 -写 -军 -农 -冠 -冢 -冤 -冥 -冨 -冪 -冬 -冯 -冰 -冲 -决 -况 -冶 -冷 -冻 -冼 -冽 -冾 -净 -凄 -准 -凇 -凈 -凉 -凋 -凌 -凍 -减 -凑 -凛 -凜 -凝 -几 -凡 -凤 -処 -凪 -凭 -凯 -凰 -凱 -凳 -凶 -凸 -凹 -出 -击 -函 -凿 -刀 -刁 -刃 -分 -切 -刈 -刊 -刍 -刎 -刑 -划 -列 -刘 -则 -刚 -创 -初 -删 -判 -別 -刨 -利 -刪 -别 -刮 -到 -制 -刷 -券 -刹 -刺 -刻 -刽 -剁 -剂 -剃 -則 -剉 -削 -剋 -剌 -前 -剎 -剐 -剑 -剔 -剖 -剛 -剜 -剝 -剣 -剤 -剥 -剧 -剩 -剪 -副 -割 -創 -剷 -剽 -剿 -劃 -劇 -劈 -劉 -劊 -劍 -劏 -劑 -力 -劝 -办 -功 -加 -务 -劣 -动 -助 -努 -劫 -劭 -励 -劲 -劳 -労 -劵 -効 -劾 -势 -勁 -勃 -勇 -勉 -勋 -勐 -勒 -動 -勖 -勘 -務 -勛 -勝 -勞 -募 -勢 -勤 -勧 -勳 -勵 -勸 -勺 -勻 -勾 -勿 -匀 -包 -匆 -匈 -匍 -匐 -匕 -化 -北 -匙 -匝 -匠 -匡 -匣 -匪 -匮 -匯 -匱 -匹 -区 -医 -匾 -匿 -區 -十 -千 -卅 -升 -午 -卉 -半 -卍 -华 -协 -卑 -卒 -卓 -協 -单 -卖 -南 -単 -博 -卜 -卞 -卟 -占 -卡 -卢 -卤 -卦 -卧 -卫 -卮 -卯 -印 -危 -即 -却 -卵 -卷 -卸 -卻 -卿 -厂 -厄 -厅 -历 -厉 -压 -厌 -厕 -厘 -厚 -厝 -原 -厢 -厥 -厦 -厨 -厩 -厭 -厮 -厲 -厳 -去 -县 -叁 -参 -參 -又 -叉 -及 -友 -双 -反 -収 -发 -叔 -取 -受 -变 -叙 -叛 -叟 -叠 -叡 -叢 -口 -古 -句 -另 -叨 -叩 -只 -叫 -召 -叭 -叮 -可 -台 -叱 -史 -右 -叵 -叶 -号 -司 -叹 -叻 -叼 -叽 -吁 -吃 -各 -吆 -合 -吉 -吊 -吋 -同 -名 -后 -吏 -吐 -向 -吒 -吓 -吕 -吖 -吗 -君 -吝 -吞 -吟 -吠 -吡 -否 -吧 -吨 -吩 -含 -听 -吭 -吮 -启 -吱 -吳 -吴 -吵 -吶 -吸 -吹 -吻 -吼 -吽 -吾 -呀 -呂 -呃 -呆 -呈 -告 -呋 -呎 -呐 -呓 -呕 -呗 -员 -呛 -呜 -呢 -呤 -呦 -周 -呱 -呲 -味 -呵 -呷 -呸 -呻 -呼 -命 -咀 -咁 -咂 -咄 -咆 -咋 -和 -咎 -咏 -咐 -咒 -咔 -咕 -咖 -咗 -咘 -咙 -咚 -咛 -咣 -咤 -咦 -咧 -咨 -咩 -咪 -咫 -咬 -咭 -咯 -咱 -咲 -咳 -咸 -咻 -咽 -咿 -哀 -品 -哂 -哄 -哆 -哇 -哈 -哉 -哋 -哌 -响 -哎 -哏 -哐 -哑 -哒 -哔 -哗 -哟 -員 -哥 -哦 -哧 -哨 -哩 -哪 -哭 -哮 -哲 -哺 -哼 -哽 -唁 -唄 -唆 -唇 -唉 -唏 -唐 -唑 -唔 -唠 -唤 -唧 -唬 -售 -唯 -唰 -唱 -唳 -唷 -唸 -唾 -啃 -啄 -商 -啉 -啊 -問 -啓 -啕 -啖 -啜 -啞 -啟 -啡 -啤 -啥 -啦 -啧 -啪 -啫 -啬 -啮 -啰 -啱 -啲 -啵 -啶 -啷 -啸 -啻 -啼 -啾 -喀 -喂 -喃 -善 -喆 -喇 -喉 -喊 -喋 -喎 -喏 -喔 -喘 -喙 -喚 -喜 -喝 -喟 -喧 -喪 -喫 -喬 -單 -喰 -喱 -喲 -喳 -喵 -営 -喷 -喹 -喺 -喻 -喽 -嗅 -嗆 -嗇 -嗎 -嗑 -嗒 -嗓 -嗔 -嗖 -嗚 -嗜 -嗝 -嗟 -嗡 -嗣 -嗤 -嗦 -嗨 -嗪 -嗬 -嗯 -嗰 -嗲 -嗳 -嗶 -嗷 -嗽 -嘀 -嘅 -嘆 -嘈 -嘉 -嘌 -嘍 -嘎 -嘔 -嘖 -嘗 -嘘 -嘚 -嘛 -嘜 -嘞 -嘟 -嘢 -嘣 -嘤 -嘧 -嘩 -嘭 -嘮 -嘯 -嘰 -嘱 -嘲 -嘴 -嘶 -嘸 -嘹 -嘻 -嘿 -噁 -噌 -噎 -噓 -噔 -噗 -噙 -噜 -噠 -噢 -噤 -器 -噩 -噪 -噬 -噱 -噴 -噶 -噸 -噹 -噻 -噼 -嚀 -嚇 -嚎 -嚏 -嚐 -嚓 -嚕 -嚟 -嚣 -嚥 -嚨 -嚮 -嚴 -嚷 -嚼 -囂 -囉 -囊 -囍 -囑 -囔 -囗 -囚 -四 -囝 -回 -囟 -因 -囡 -团 -団 -囤 -囧 -囪 -囫 -园 -困 -囱 -囲 -図 -围 -囹 -固 -国 -图 -囿 -圃 -圄 -圆 -圈 -國 -圍 -圏 -園 -圓 -圖 -團 -圜 -土 -圣 -圧 -在 -圩 -圭 -地 -圳 -场 -圻 -圾 -址 -坂 -均 -坊 -坍 -坎 -坏 -坐 -坑 -块 -坚 -坛 -坝 -坞 -坟 -坠 -坡 -坤 -坦 -坨 -坪 -坯 -坳 -坵 -坷 -垂 -垃 -垄 -型 -垒 -垚 -垛 -垠 -垢 -垣 -垦 -垩 -垫 -垭 -垮 -垵 -埂 -埃 -埋 -城 -埔 -埕 -埗 -域 -埠 -埤 -埵 -執 -埸 -培 -基 -埼 -堀 -堂 -堃 -堅 -堆 -堇 -堑 -堕 -堙 -堡 -堤 -堪 -堯 -堰 -報 -場 -堵 -堺 -堿 -塊 -塌 -塑 -塔 -塗 -塘 -塚 -塞 -塢 -塩 -填 -塬 -塭 -塵 -塾 -墀 -境 -墅 -墉 -墊 -墒 -墓 -増 -墘 -墙 -墜 -增 -墟 -墨 -墩 -墮 -墳 -墻 -墾 -壁 -壅 -壆 -壇 -壊 -壑 -壓 -壕 -壘 -壞 -壟 -壢 -壤 -壩 -士 -壬 -壮 -壯 -声 -売 -壳 -壶 -壹 -壺 -壽 -处 -备 -変 -复 -夏 -夔 -夕 -外 -夙 -多 -夜 -够 -夠 -夢 -夥 -大 -天 -太 -夫 -夭 -央 -夯 -失 -头 -夷 -夸 -夹 -夺 -夾 -奂 -奄 -奇 -奈 -奉 -奋 -奎 -奏 -奐 -契 -奔 -奕 -奖 -套 -奘 -奚 -奠 -奢 -奥 -奧 -奪 -奬 -奮 -女 -奴 -奶 -奸 -她 -好 -如 -妃 -妄 -妆 -妇 -妈 -妊 -妍 -妒 -妓 -妖 -妘 -妙 -妝 -妞 -妣 -妤 -妥 -妨 -妩 -妪 -妮 -妲 -妳 -妹 -妻 -妾 -姆 -姉 -姊 -始 -姍 -姐 -姑 -姒 -姓 -委 -姗 -姚 -姜 -姝 -姣 -姥 -姦 -姨 -姪 -姫 -姬 -姹 -姻 -姿 -威 -娃 -娄 -娅 -娆 -娇 -娉 -娑 -娓 -娘 -娛 -娜 -娟 -娠 -娣 -娥 -娩 -娱 -娲 -娴 -娶 -娼 -婀 -婁 -婆 -婉 -婊 -婕 -婚 -婢 -婦 -婧 -婪 -婭 -婴 -婵 -婶 -婷 -婺 -婿 -媒 -媚 -媛 -媞 -媧 -媲 -媳 -媽 -媾 -嫁 -嫂 -嫉 -嫌 -嫑 -嫔 -嫖 -嫘 -嫚 -嫡 -嫣 -嫦 -嫩 -嫲 -嫵 -嫻 -嬅 -嬉 -嬌 -嬗 -嬛 -嬢 -嬤 -嬪 -嬰 -嬴 -嬷 -嬸 -嬿 -孀 -孃 -子 -孑 -孔 -孕 -孖 -字 -存 -孙 -孚 -孛 -孜 -孝 -孟 -孢 -季 -孤 -学 -孩 -孪 -孫 -孬 -孰 -孱 -孳 -孵 -學 -孺 -孽 -孿 -宁 -它 -宅 -宇 -守 -安 -宋 -完 -宏 -宓 -宕 -宗 -官 -宙 -定 -宛 -宜 -宝 -实 -実 -宠 -审 -客 -宣 -室 -宥 -宦 -宪 -宫 -宮 -宰 -害 -宴 -宵 -家 -宸 -容 -宽 -宾 -宿 -寂 -寄 -寅 -密 -寇 -富 -寐 -寒 -寓 -寛 -寝 -寞 -察 -寡 -寢 -寥 -實 -寧 -寨 -審 -寫 -寬 -寮 -寰 -寵 -寶 -寸 -对 -寺 -寻 -导 -対 -寿 -封 -専 -射 -将 -將 -專 -尉 -尊 -尋 -對 -導 -小 -少 -尔 -尕 -尖 -尘 -尚 -尝 -尤 -尧 -尬 -就 -尴 -尷 -尸 -尹 -尺 -尻 -尼 -尽 -尾 -尿 -局 -屁 -层 -屄 -居 -屆 -屈 -屉 -届 -屋 -屌 -屍 -屎 -屏 -屐 -屑 -展 -屜 -属 -屠 -屡 -屢 -層 -履 -屬 -屯 -山 -屹 -屿 -岀 -岁 -岂 -岌 -岐 -岑 -岔 -岖 -岗 -岘 -岙 -岚 -岛 -岡 -岩 -岫 -岬 -岭 -岱 -岳 -岷 -岸 -峇 -峋 -峒 -峙 -峡 -峤 -峥 -峦 -峨 -峪 -峭 -峯 -峰 -峴 -島 -峻 -峽 -崁 -崂 -崆 -崇 -崎 -崑 -崔 -崖 -崗 -崙 -崛 -崧 -崩 -崭 -崴 -崽 -嵇 -嵊 -嵋 -嵌 -嵐 -嵘 -嵩 -嵬 -嵯 -嶂 -嶄 -嶇 -嶋 -嶙 -嶺 -嶼 -嶽 -巅 -巍 -巒 -巔 -巖 -川 -州 -巡 -巢 -工 -左 -巧 -巨 -巩 -巫 -差 -己 -已 -巳 -巴 -巷 -巻 -巽 -巾 -巿 -币 -市 -布 -帅 -帆 -师 -希 -帐 -帑 -帕 -帖 -帘 -帚 -帛 -帜 -帝 -帥 -带 -帧 -師 -席 -帮 -帯 -帰 -帳 -帶 -帷 -常 -帼 -帽 -幀 -幂 -幄 -幅 -幌 -幔 -幕 -幟 -幡 -幢 -幣 -幫 -干 -平 -年 -并 -幸 -幹 -幺 -幻 -幼 -幽 -幾 -广 -庁 -広 -庄 -庆 -庇 -床 -序 -庐 -库 -应 -底 -庖 -店 -庙 -庚 -府 -庞 -废 -庠 -度 -座 -庫 -庭 -庵 -庶 -康 -庸 -庹 -庾 -廁 -廂 -廃 -廈 -廉 -廊 -廓 -廖 -廚 -廝 -廟 -廠 -廢 -廣 -廬 -廳 -延 -廷 -建 -廿 -开 -弁 -异 -弃 -弄 -弈 -弊 -弋 -式 -弑 -弒 -弓 -弔 -引 -弗 -弘 -弛 -弟 -张 -弥 -弦 -弧 -弩 -弭 -弯 -弱 -張 -強 -弹 -强 -弼 -弾 -彅 -彆 -彈 -彌 -彎 -归 -当 -录 -彗 -彙 -彝 -形 -彤 -彥 -彦 -彧 -彩 -彪 -彫 -彬 -彭 -彰 -影 -彷 -役 -彻 -彼 -彿 -往 -征 -径 -待 -徇 -很 -徉 -徊 -律 -後 -徐 -徑 -徒 -従 -徕 -得 -徘 -徙 -徜 -從 -徠 -御 -徨 -復 -循 -徬 -微 -徳 -徴 -徵 -德 -徹 -徼 -徽 -心 -必 -忆 -忌 -忍 -忏 -忐 -忑 -忒 -忖 -志 -忘 -忙 -応 -忠 -忡 -忤 -忧 -忪 -快 -忱 -念 -忻 -忽 -忿 -怀 -态 -怂 -怅 -怆 -怎 -怏 -怒 -怔 -怕 -怖 -怙 -怜 -思 -怠 -怡 -急 -怦 -性 -怨 -怪 -怯 -怵 -总 -怼 -恁 -恃 -恆 -恋 -恍 -恐 -恒 -恕 -恙 -恚 -恢 -恣 -恤 -恥 -恨 -恩 -恪 -恫 -恬 -恭 -息 -恰 -恳 -恵 -恶 -恸 -恺 -恻 -恼 -恿 -悄 -悅 -悉 -悌 -悍 -悔 -悖 -悚 -悟 -悠 -患 -悦 -您 -悩 -悪 -悬 -悯 -悱 -悲 -悴 -悵 -悶 -悸 -悻 -悼 -悽 -情 -惆 -惇 -惊 -惋 -惑 -惕 -惘 -惚 -惜 -惟 -惠 -惡 -惦 -惧 -惨 -惩 -惫 -惬 -惭 -惮 -惯 -惰 -惱 -想 -惴 -惶 -惹 -惺 -愁 -愆 -愈 -愉 -愍 -意 -愕 -愚 -愛 -愜 -感 -愣 -愤 -愧 -愫 -愷 -愿 -慄 -慈 -態 -慌 -慎 -慑 -慕 -慘 -慚 -慟 -慢 -慣 -慧 -慨 -慫 -慮 -慰 -慳 -慵 -慶 -慷 -慾 -憂 -憊 -憋 -憎 -憐 -憑 -憔 -憚 -憤 -憧 -憨 -憩 -憫 -憬 -憲 -憶 -憾 -懂 -懇 -懈 -應 -懊 -懋 -懑 -懒 -懦 -懲 -懵 -懶 -懷 -懸 -懺 -懼 -懾 -懿 -戀 -戈 -戊 -戌 -戍 -戎 -戏 -成 -我 -戒 -戕 -或 -战 -戚 -戛 -戟 -戡 -戦 -截 -戬 -戮 -戰 -戲 -戳 -戴 -戶 -户 -戸 -戻 -戾 -房 -所 -扁 -扇 -扈 -扉 -手 -才 -扎 -扑 -扒 -打 -扔 -払 -托 -扛 -扣 -扦 -执 -扩 -扪 -扫 -扬 -扭 -扮 -扯 -扰 -扱 -扳 -扶 -批 -扼 -找 -承 -技 -抄 -抉 -把 -抑 -抒 -抓 -投 -抖 -抗 -折 -抚 -抛 -抜 -択 -抟 -抠 -抡 -抢 -护 -报 -抨 -披 -抬 -抱 -抵 -抹 -押 -抽 -抿 -拂 -拄 -担 -拆 -拇 -拈 -拉 -拋 -拌 -拍 -拎 -拐 -拒 -拓 -拔 -拖 -拗 -拘 -拙 -拚 -招 -拜 -拟 -拡 -拢 -拣 -拥 -拦 -拧 -拨 -择 -括 -拭 -拮 -拯 -拱 -拳 -拴 -拷 -拼 -拽 -拾 -拿 -持 -挂 -指 -挈 -按 -挎 -挑 -挖 -挙 -挚 -挛 -挝 -挞 -挟 -挠 -挡 -挣 -挤 -挥 -挨 -挪 -挫 -振 -挲 -挹 -挺 -挽 -挾 -捂 -捅 -捆 -捉 -捋 -捌 -捍 -捎 -捏 -捐 -捕 -捞 -损 -捡 -换 -捣 -捧 -捨 -捩 -据 -捱 -捲 -捶 -捷 -捺 -捻 -掀 -掂 -掃 -掇 -授 -掉 -掌 -掏 -掐 -排 -掖 -掘 -掙 -掛 -掠 -採 -探 -掣 -接 -控 -推 -掩 -措 -掬 -掰 -掲 -掳 -掴 -掷 -掸 -掺 -揀 -揃 -揄 -揆 -揉 -揍 -描 -提 -插 -揖 -揚 -換 -握 -揣 -揩 -揪 -揭 -揮 -援 -揶 -揸 -揹 -揽 -搀 -搁 -搂 -搅 -損 -搏 -搐 -搓 -搔 -搖 -搗 -搜 -搞 -搡 -搪 -搬 -搭 -搵 -搶 -携 -搽 -摀 -摁 -摄 -摆 -摇 -摈 -摊 -摒 -摔 -摘 -摞 -摟 -摧 -摩 -摯 -摳 -摸 -摹 -摺 -摻 -撂 -撃 -撅 -撇 -撈 -撐 -撑 -撒 -撓 -撕 -撚 -撞 -撤 -撥 -撩 -撫 -撬 -播 -撮 -撰 -撲 -撵 -撷 -撸 -撻 -撼 -撿 -擀 -擁 -擂 -擄 -擅 -擇 -擊 -擋 -操 -擎 -擒 -擔 -擘 -據 -擞 -擠 -擡 -擢 -擦 -擬 -擰 -擱 -擲 -擴 -擷 -擺 -擼 -擾 -攀 -攏 -攒 -攔 -攘 -攙 -攜 -攝 -攞 -攢 -攣 -攤 -攥 -攪 -攫 -攬 -支 -收 -攸 -改 -攻 -放 -政 -故 -效 -敌 -敍 -敎 -敏 -救 -敕 -敖 -敗 -敘 -教 -敛 -敝 -敞 -敢 -散 -敦 -敬 -数 -敲 -整 -敵 -敷 -數 -斂 -斃 -文 -斋 -斌 -斎 -斐 -斑 -斓 -斗 -料 -斛 -斜 -斟 -斡 -斤 -斥 -斧 -斩 -斫 -斬 -断 -斯 -新 -斷 -方 -於 -施 -旁 -旃 -旅 -旋 -旌 -旎 -族 -旖 -旗 -无 -既 -日 -旦 -旧 -旨 -早 -旬 -旭 -旮 -旱 -时 -旷 -旺 -旻 -昀 -昂 -昆 -昇 -昉 -昊 -昌 -明 -昏 -易 -昔 -昕 -昙 -星 -映 -春 -昧 -昨 -昭 -是 -昱 -昴 -昵 -昶 -昼 -显 -晁 -時 -晃 -晉 -晋 -晌 -晏 -晒 -晓 -晔 -晕 -晖 -晗 -晚 -晝 -晞 -晟 -晤 -晦 -晨 -晩 -普 -景 -晰 -晴 -晶 -晷 -智 -晾 -暂 -暄 -暇 -暈 -暉 -暌 -暐 -暑 -暖 -暗 -暝 -暢 -暧 -暨 -暫 -暮 -暱 -暴 -暸 -暹 -曄 -曆 -曇 -曉 -曖 -曙 -曜 -曝 -曠 -曦 -曬 -曰 -曲 -曳 -更 -書 -曹 -曼 -曾 -替 -最 -會 -月 -有 -朋 -服 -朐 -朔 -朕 -朗 -望 -朝 -期 -朦 -朧 -木 -未 -末 -本 -札 -朮 -术 -朱 -朴 -朵 -机 -朽 -杀 -杂 -权 -杆 -杈 -杉 -李 -杏 -材 -村 -杓 -杖 -杜 -杞 -束 -杠 -条 -来 -杨 -杭 -杯 -杰 -東 -杳 -杵 -杷 -杼 -松 -板 -极 -构 -枇 -枉 -枋 -析 -枕 -林 -枚 -果 -枝 -枢 -枣 -枪 -枫 -枭 -枯 -枰 -枱 -枳 -架 -枷 -枸 -柄 -柏 -某 -柑 -柒 -染 -柔 -柘 -柚 -柜 -柞 -柠 -柢 -查 -柩 -柬 -柯 -柱 -柳 -柴 -柵 -査 -柿 -栀 -栃 -栄 -栅 -标 -栈 -栉 -栋 -栎 -栏 -树 -栓 -栖 -栗 -校 -栩 -株 -样 -核 -根 -格 -栽 -栾 -桀 -桁 -桂 -桃 -桅 -框 -案 -桉 -桌 -桎 -桐 -桑 -桓 -桔 -桜 -桠 -桡 -桢 -档 -桥 -桦 -桧 -桨 -桩 -桶 -桿 -梁 -梅 -梆 -梏 -梓 -梗 -條 -梟 -梢 -梦 -梧 -梨 -梭 -梯 -械 -梳 -梵 -梶 -检 -棂 -棄 -棉 -棋 -棍 -棒 -棕 -棗 -棘 -棚 -棟 -棠 -棣 -棧 -森 -棱 -棲 -棵 -棹 -棺 -椁 -椅 -椋 -植 -椎 -椒 -検 -椪 -椭 -椰 -椹 -椽 -椿 -楂 -楊 -楓 -楔 -楚 -楝 -楞 -楠 -楣 -楨 -楫 -業 -楮 -極 -楷 -楸 -楹 -楼 -楽 -概 -榄 -榆 -榈 -榉 -榔 -榕 -榖 -榛 -榜 -榨 -榫 -榭 -榮 -榱 -榴 -榷 -榻 -槁 -槃 -構 -槌 -槍 -槎 -槐 -槓 -様 -槛 -槟 -槤 -槭 -槲 -槳 -槻 -槽 -槿 -樁 -樂 -樊 -樑 -樓 -標 -樞 -樟 -模 -樣 -権 -横 -樫 -樯 -樱 -樵 -樸 -樹 -樺 -樽 -樾 -橄 -橇 -橋 -橐 -橘 -橙 -機 -橡 -橢 -橫 -橱 -橹 -橼 -檀 -檄 -檎 -檐 -檔 -檗 -檜 -檢 -檬 -檯 -檳 -檸 -檻 -櫃 -櫚 -櫛 -櫥 -櫸 -櫻 -欄 -權 -欒 -欖 -欠 -次 -欢 -欣 -欧 -欲 -欸 -欺 -欽 -款 -歆 -歇 -歉 -歌 -歎 -歐 -歓 -歙 -歛 -歡 -止 -正 -此 -步 -武 -歧 -歩 -歪 -歯 -歲 -歳 -歴 -歷 -歸 -歹 -死 -歼 -殁 -殃 -殆 -殇 -殉 -殊 -残 -殒 -殓 -殖 -殘 -殞 -殡 -殤 -殭 -殯 -殲 -殴 -段 -殷 -殺 -殼 -殿 -毀 -毁 -毂 -毅 -毆 -毋 -母 -毎 -每 -毒 -毓 -比 -毕 -毗 -毘 -毙 -毛 -毡 -毫 -毯 -毽 -氈 -氏 -氐 -民 -氓 -气 -氖 -気 -氙 -氛 -氟 -氡 -氢 -氣 -氤 -氦 -氧 -氨 -氪 -氫 -氮 -氯 -氰 -氲 -水 -氷 -永 -氹 -氾 -汀 -汁 -求 -汆 -汇 -汉 -汎 -汐 -汕 -汗 -汙 -汛 -汝 -汞 -江 -池 -污 -汤 -汨 -汩 -汪 -汰 -汲 -汴 -汶 -汹 -決 -汽 -汾 -沁 -沂 -沃 -沅 -沈 -沉 -沌 -沏 -沐 -沒 -沓 -沖 -沙 -沛 -沟 -没 -沢 -沣 -沥 -沦 -沧 -沪 -沫 -沭 -沮 -沱 -河 -沸 -油 -治 -沼 -沽 -沾 -沿 -況 -泄 -泉 -泊 -泌 -泓 -法 -泗 -泛 -泞 -泠 -泡 -波 -泣 -泥 -注 -泪 -泫 -泮 -泯 -泰 -泱 -泳 -泵 -泷 -泸 -泻 -泼 -泽 -泾 -洁 -洄 -洋 -洒 -洗 -洙 -洛 -洞 -津 -洩 -洪 -洮 -洱 -洲 -洵 -洶 -洸 -洹 -活 -洼 -洽 -派 -流 -浃 -浄 -浅 -浆 -浇 -浊 -测 -济 -浏 -浑 -浒 -浓 -浔 -浙 -浚 -浜 -浣 -浦 -浩 -浪 -浬 -浮 -浯 -浴 -海 -浸 -涂 -涅 -涇 -消 -涉 -涌 -涎 -涓 -涔 -涕 -涙 -涛 -涝 -涞 -涟 -涠 -涡 -涣 -涤 -润 -涧 -涨 -涩 -涪 -涮 -涯 -液 -涵 -涸 -涼 -涿 -淀 -淄 -淅 -淆 -淇 -淋 -淌 -淑 -淒 -淖 -淘 -淙 -淚 -淞 -淡 -淤 -淦 -淨 -淩 -淪 -淫 -淬 -淮 -深 -淳 -淵 -混 -淹 -淺 -添 -淼 -清 -済 -渉 -渊 -渋 -渍 -渎 -渐 -渔 -渗 -渙 -渚 -減 -渝 -渠 -渡 -渣 -渤 -渥 -渦 -温 -測 -渭 -港 -渲 -渴 -游 -渺 -渾 -湃 -湄 -湊 -湍 -湖 -湘 -湛 -湟 -湧 -湫 -湮 -湯 -湳 -湾 -湿 -満 -溃 -溅 -溉 -溏 -源 -準 -溜 -溝 -溟 -溢 -溥 -溧 -溪 -溫 -溯 -溱 -溴 -溶 -溺 -溼 -滁 -滂 -滄 -滅 -滇 -滋 -滌 -滑 -滓 -滔 -滕 -滙 -滚 -滝 -滞 -滟 -满 -滢 -滤 -滥 -滦 -滨 -滩 -滬 -滯 -滲 -滴 -滷 -滸 -滾 -滿 -漁 -漂 -漆 -漉 -漏 -漓 -演 -漕 -漠 -漢 -漣 -漩 -漪 -漫 -漬 -漯 -漱 -漲 -漳 -漸 -漾 -漿 -潆 -潇 -潋 -潍 -潑 -潔 -潘 -潛 -潜 -潞 -潟 -潢 -潤 -潦 -潧 -潭 -潮 -潰 -潴 -潸 -潺 -潼 -澀 -澄 -澆 -澈 -澍 -澎 -澗 -澜 -澡 -澤 -澧 -澱 -澳 -澹 -激 -濁 -濂 -濃 -濑 -濒 -濕 -濘 -濛 -濟 -濠 -濡 -濤 -濫 -濬 -濮 -濯 -濱 -濺 -濾 -瀅 -瀆 -瀉 -瀋 -瀏 -瀑 -瀕 -瀘 -瀚 -瀛 -瀝 -瀞 -瀟 -瀧 -瀨 -瀬 -瀰 -瀾 -灌 -灏 -灑 -灘 -灝 -灞 -灣 -火 -灬 -灭 -灯 -灰 -灵 -灶 -灸 -灼 -災 -灾 -灿 -炀 -炁 -炅 -炉 -炊 -炎 -炒 -炔 -炕 -炖 -炙 -炜 -炫 -炬 -炭 -炮 -炯 -炳 -炷 -炸 -点 -為 -炼 -炽 -烁 -烂 -烃 -烈 -烊 -烏 -烘 -烙 -烛 -烟 -烤 -烦 -烧 -烨 -烩 -烫 -烬 -热 -烯 -烷 -烹 -烽 -焉 -焊 -焕 -焖 -焗 -焘 -焙 -焚 -焜 -無 -焦 -焯 -焰 -焱 -然 -焼 -煅 -煉 -煊 -煌 -煎 -煒 -煖 -煙 -煜 -煞 -煤 -煥 -煦 -照 -煨 -煩 -煮 -煲 -煸 -煽 -熄 -熊 -熏 -熒 -熔 -熙 -熟 -熠 -熨 -熬 -熱 -熵 -熹 -熾 -燁 -燃 -燄 -燈 -燉 -燊 -燎 -燒 -燔 -燕 -燙 -燜 -營 -燥 -燦 -燧 -燭 -燮 -燴 -燻 -燼 -燿 -爆 -爍 -爐 -爛 -爪 -爬 -爭 -爰 -爱 -爲 -爵 -父 -爷 -爸 -爹 -爺 -爻 -爽 -爾 -牆 -片 -版 -牌 -牍 -牒 -牙 -牛 -牝 -牟 -牠 -牡 -牢 -牦 -牧 -物 -牯 -牲 -牴 -牵 -特 -牺 -牽 -犀 -犁 -犄 -犊 -犍 -犒 -犢 -犧 -犬 -犯 -状 -犷 -犸 -犹 -狀 -狂 -狄 -狈 -狎 -狐 -狒 -狗 -狙 -狞 -狠 -狡 -狩 -独 -狭 -狮 -狰 -狱 -狸 -狹 -狼 -狽 -猎 -猕 -猖 -猗 -猙 -猛 -猜 -猝 -猥 -猩 -猪 -猫 -猬 -献 -猴 -猶 -猷 -猾 -猿 -獄 -獅 -獎 -獐 -獒 -獗 -獠 -獣 -獨 -獭 -獰 -獲 -獵 -獷 -獸 -獺 -獻 -獼 -獾 -玄 -率 -玉 -王 -玑 -玖 -玛 -玟 -玠 -玥 -玩 -玫 -玮 -环 -现 -玲 -玳 -玷 -玺 -玻 -珀 -珂 -珅 -珈 -珉 -珊 -珍 -珏 -珐 -珑 -珙 -珞 -珠 -珣 -珥 -珩 -珪 -班 -珮 -珲 -珺 -現 -球 -琅 -理 -琇 -琉 -琊 -琍 -琏 -琐 -琛 -琢 -琥 -琦 -琨 -琪 -琬 -琮 -琰 -琲 -琳 -琴 -琵 -琶 -琺 -琼 -瑀 -瑁 -瑄 -瑋 -瑕 -瑗 -瑙 -瑚 -瑛 -瑜 -瑞 -瑟 -瑠 -瑣 -瑤 -瑩 -瑪 -瑯 -瑰 -瑶 -瑾 -璀 -璁 -璃 -璇 -璉 -璋 -璎 -璐 -璜 -璞 -璟 -璧 -璨 -環 -璽 -璿 -瓊 -瓏 -瓒 -瓜 -瓢 -瓣 -瓤 -瓦 -瓮 -瓯 -瓴 -瓶 -瓷 -甄 -甌 -甕 -甘 -甙 -甚 -甜 -生 -產 -産 -甥 -甦 -用 -甩 -甫 -甬 -甭 -甯 -田 -由 -甲 -申 -电 -男 -甸 -町 -画 -甾 -畀 -畅 -界 -畏 -畑 -畔 -留 -畜 -畝 -畢 -略 -畦 -番 -畫 -異 -畲 -畳 -畴 -當 -畸 -畹 -畿 -疆 -疇 -疊 -疏 -疑 -疔 -疖 -疗 -疙 -疚 -疝 -疟 -疡 -疣 -疤 -疥 -疫 -疮 -疯 -疱 -疲 -疳 -疵 -疸 -疹 -疼 -疽 -疾 -痂 -病 -症 -痈 -痉 -痊 -痍 -痒 -痔 -痕 -痘 -痙 -痛 -痞 -痠 -痢 -痣 -痤 -痧 -痨 -痪 -痫 -痰 -痱 -痴 -痹 -痺 -痼 -痿 -瘀 -瘁 -瘋 -瘍 -瘓 -瘘 -瘙 -瘟 -瘠 -瘡 -瘢 -瘤 -瘦 -瘧 -瘩 -瘪 -瘫 -瘴 -瘸 -瘾 -療 -癇 -癌 -癒 -癖 -癜 -癞 -癡 -癢 -癣 -癥 -癫 -癬 -癮 -癱 -癲 -癸 -発 -登 -發 -白 -百 -皂 -的 -皆 -皇 -皈 -皋 -皎 -皑 -皓 -皖 -皙 -皚 -皮 -皰 -皱 -皴 -皺 -皿 -盂 -盃 -盅 -盆 -盈 -益 -盎 -盏 -盐 -监 -盒 -盔 -盖 -盗 -盘 -盛 -盜 -盞 -盟 -盡 -監 -盤 -盥 -盧 -盪 -目 -盯 -盱 -盲 -直 -相 -盹 -盼 -盾 -省 -眈 -眉 -看 -県 -眙 -眞 -真 -眠 -眦 -眨 -眩 -眯 -眶 -眷 -眸 -眺 -眼 -眾 -着 -睁 -睇 -睏 -睐 -睑 -睛 -睜 -睞 -睡 -睢 -督 -睥 -睦 -睨 -睪 -睫 -睬 -睹 -睽 -睾 -睿 -瞄 -瞅 -瞇 -瞋 -瞌 -瞎 -瞑 -瞒 -瞓 -瞞 -瞟 -瞠 -瞥 -瞧 -瞩 -瞪 -瞬 -瞭 -瞰 -瞳 -瞻 -瞼 -瞿 -矇 -矍 -矗 -矚 -矛 -矜 -矢 -矣 -知 -矩 -矫 -短 -矮 -矯 -石 -矶 -矽 -矾 -矿 -码 -砂 -砌 -砍 -砒 -研 -砖 -砗 -砚 -砝 -砣 -砥 -砧 -砭 -砰 -砲 -破 -砷 -砸 -砺 -砼 -砾 -础 -硅 -硐 -硒 -硕 -硝 -硫 -硬 -确 -硯 -硼 -碁 -碇 -碉 -碌 -碍 -碎 -碑 -碓 -碗 -碘 -碚 -碛 -碟 -碣 -碧 -碩 -碰 -碱 -碳 -碴 -確 -碼 -碾 -磁 -磅 -磊 -磋 -磐 -磕 -磚 -磡 -磨 -磬 -磯 -磲 -磷 -磺 -礁 -礎 -礙 -礡 -礦 -礪 -礫 -礴 -示 -礼 -社 -祀 -祁 -祂 -祇 -祈 -祉 -祎 -祐 -祕 -祖 -祗 -祚 -祛 -祜 -祝 -神 -祟 -祠 -祢 -祥 -票 -祭 -祯 -祷 -祸 -祺 -祿 -禀 -禁 -禄 -禅 -禍 -禎 -福 -禛 -禦 -禧 -禪 -禮 -禱 -禹 -禺 -离 -禽 -禾 -禿 -秀 -私 -秃 -秆 -秉 -秋 -种 -科 -秒 -秘 -租 -秣 -秤 -秦 -秧 -秩 -秭 -积 -称 -秸 -移 -秽 -稀 -稅 -程 -稍 -税 -稔 -稗 -稚 -稜 -稞 -稟 -稠 -稣 -種 -稱 -稲 -稳 -稷 -稹 -稻 -稼 -稽 -稿 -穀 -穂 -穆 -穌 -積 -穎 -穗 -穢 -穩 -穫 -穴 -究 -穷 -穹 -空 -穿 -突 -窃 -窄 -窈 -窍 -窑 -窒 -窓 -窕 -窖 -窗 -窘 -窜 -窝 -窟 -窠 -窥 -窦 -窨 -窩 -窪 -窮 -窯 -窺 -窿 -竄 -竅 -竇 -竊 -立 -竖 -站 -竜 -竞 -竟 -章 -竣 -童 -竭 -端 -競 -竹 -竺 -竽 -竿 -笃 -笆 -笈 -笋 -笏 -笑 -笔 -笙 -笛 -笞 -笠 -符 -笨 -第 -笹 -笺 -笼 -筆 -等 -筊 -筋 -筍 -筏 -筐 -筑 -筒 -答 -策 -筛 -筝 -筠 -筱 -筲 -筵 -筷 -筹 -签 -简 -箇 -箋 -箍 -箏 -箐 -箔 -箕 -算 -箝 -管 -箩 -箫 -箭 -箱 -箴 -箸 -節 -篁 -範 -篆 -篇 -築 -篑 -篓 -篙 -篝 -篠 -篡 -篤 -篩 -篪 -篮 -篱 -篷 -簇 -簌 -簍 -簡 -簦 -簧 -簪 -簫 -簷 -簸 -簽 -簾 -簿 -籁 -籃 -籌 -籍 -籐 -籟 -籠 -籤 -籬 -籮 -籲 -米 -类 -籼 -籽 -粄 -粉 -粑 -粒 -粕 -粗 -粘 -粟 -粤 -粥 -粧 -粪 -粮 -粱 -粲 -粳 -粵 -粹 -粼 -粽 -精 -粿 -糅 -糊 -糍 -糕 -糖 -糗 -糙 -糜 -糞 -糟 -糠 -糧 -糬 -糯 -糰 -糸 -系 -糾 -紀 -紂 -約 -紅 -紉 -紊 -紋 -納 -紐 -紓 -純 -紗 -紘 -紙 -級 -紛 -紜 -素 -紡 -索 -紧 -紫 -紮 -累 -細 -紳 -紹 -紺 -終 -絃 -組 -絆 -経 -結 -絕 -絞 -絡 -絢 -給 -絨 -絮 -統 -絲 -絳 -絵 -絶 -絹 -綁 -綏 -綑 -經 -継 -続 -綜 -綠 -綢 -綦 -綫 -綬 -維 -綱 -網 -綴 -綵 -綸 -綺 -綻 -綽 -綾 -綿 -緊 -緋 -総 -緑 -緒 -緘 -線 -緝 -緞 -締 -緣 -編 -緩 -緬 -緯 -練 -緹 -緻 -縁 -縄 -縈 -縛 -縝 -縣 -縫 -縮 -縱 -縴 -縷 -總 -績 -繁 -繃 -繆 -繇 -繋 -織 -繕 -繚 -繞 -繡 -繩 -繪 -繫 -繭 -繳 -繹 -繼 -繽 -纂 -續 -纍 -纏 -纓 -纔 -纖 -纜 -纠 -红 -纣 -纤 -约 -级 -纨 -纪 -纫 -纬 -纭 -纯 -纰 -纱 -纲 -纳 -纵 -纶 -纷 -纸 -纹 -纺 -纽 -纾 -线 -绀 -练 -组 -绅 -细 -织 -终 -绊 -绍 -绎 -经 -绑 -绒 -结 -绔 -绕 -绘 -给 -绚 -绛 -络 -绝 -绞 -统 -绡 -绢 -绣 -绥 -绦 -继 -绩 -绪 -绫 -续 -绮 -绯 -绰 -绳 -维 -绵 -绶 -绷 -绸 -绻 -综 -绽 -绾 -绿 -缀 -缄 -缅 -缆 -缇 -缈 -缉 -缎 -缓 -缔 -缕 -编 -缘 -缙 -缚 -缜 -缝 -缠 -缢 -缤 -缥 -缨 -缩 -缪 -缭 -缮 -缰 -缱 -缴 -缸 -缺 -缽 -罂 -罄 -罌 -罐 -网 -罔 -罕 -罗 -罚 -罡 -罢 -罩 -罪 -置 -罰 -署 -罵 -罷 -罹 -羁 -羅 -羈 -羊 -羌 -美 -羔 -羚 -羞 -羟 -羡 -羣 -群 -羥 -羧 -羨 -義 -羯 -羲 -羸 -羹 -羽 -羿 -翁 -翅 -翊 -翌 -翎 -習 -翔 -翘 -翟 -翠 -翡 -翦 -翩 -翰 -翱 -翳 -翹 -翻 -翼 -耀 -老 -考 -耄 -者 -耆 -耋 -而 -耍 -耐 -耒 -耕 -耗 -耘 -耙 -耦 -耨 -耳 -耶 -耷 -耸 -耻 -耽 -耿 -聂 -聆 -聊 -聋 -职 -聒 -联 -聖 -聘 -聚 -聞 -聪 -聯 -聰 -聲 -聳 -聴 -聶 -職 -聽 -聾 -聿 -肃 -肄 -肅 -肆 -肇 -肉 -肋 -肌 -肏 -肓 -肖 -肘 -肚 -肛 -肝 -肠 -股 -肢 -肤 -肥 -肩 -肪 -肮 -肯 -肱 -育 -肴 -肺 -肽 -肾 -肿 -胀 -胁 -胃 -胄 -胆 -背 -胍 -胎 -胖 -胚 -胛 -胜 -胝 -胞 -胡 -胤 -胥 -胧 -胫 -胭 -胯 -胰 -胱 -胳 -胴 -胶 -胸 -胺 -能 -脂 -脅 -脆 -脇 -脈 -脉 -脊 -脍 -脏 -脐 -脑 -脓 -脖 -脘 -脚 -脛 -脣 -脩 -脫 -脯 -脱 -脲 -脳 -脸 -脹 -脾 -腆 -腈 -腊 -腋 -腌 -腎 -腐 -腑 -腓 -腔 -腕 -腥 -腦 -腩 -腫 -腭 -腮 -腰 -腱 -腳 -腴 -腸 -腹 -腺 -腻 -腼 -腾 -腿 -膀 -膈 -膊 -膏 -膑 -膘 -膚 -膛 -膜 -膝 -膠 -膦 -膨 -膩 -膳 -膺 -膻 -膽 -膾 -膿 -臀 -臂 -臃 -臆 -臉 -臊 -臍 -臓 -臘 -臟 -臣 -臥 -臧 -臨 -自 -臬 -臭 -至 -致 -臺 -臻 -臼 -臾 -舀 -舂 -舅 -舆 -與 -興 -舉 -舊 -舌 -舍 -舎 -舐 -舒 -舔 -舖 -舗 -舛 -舜 -舞 -舟 -航 -舫 -般 -舰 -舱 -舵 -舶 -舷 -舸 -船 -舺 -舾 -艇 -艋 -艘 -艙 -艦 -艮 -良 -艰 -艱 -色 -艳 -艷 -艹 -艺 -艾 -节 -芃 -芈 -芊 -芋 -芍 -芎 -芒 -芙 -芜 -芝 -芡 -芥 -芦 -芩 -芪 -芫 -芬 -芭 -芮 -芯 -花 -芳 -芷 -芸 -芹 -芻 -芽 -芾 -苁 -苄 -苇 -苋 -苍 -苏 -苑 -苒 -苓 -苔 -苕 -苗 -苛 -苜 -苞 -苟 -苡 -苣 -若 -苦 -苫 -苯 -英 -苷 -苹 -苻 -茁 -茂 -范 -茄 -茅 -茉 -茎 -茏 -茗 -茜 -茧 -茨 -茫 -茬 -茭 -茯 -茱 -茲 -茴 -茵 -茶 -茸 -茹 -茼 -荀 -荃 -荆 -草 -荊 -荏 -荐 -荒 -荔 -荖 -荘 -荚 -荞 -荟 -荠 -荡 -荣 -荤 -荥 -荧 -荨 -荪 -荫 -药 -荳 -荷 -荸 -荻 -荼 -荽 -莅 -莆 -莉 -莊 -莎 -莒 -莓 -莖 -莘 -莞 -莠 -莢 -莧 -莪 -莫 -莱 -莲 -莴 -获 -莹 -莺 -莽 -莿 -菀 -菁 -菅 -菇 -菈 -菊 -菌 -菏 -菓 -菖 -菘 -菜 -菟 -菠 -菡 -菩 -華 -菱 -菲 -菸 -菽 -萁 -萃 -萄 -萊 -萋 -萌 -萍 -萎 -萘 -萝 -萤 -营 -萦 -萧 -萨 -萩 -萬 -萱 -萵 -萸 -萼 -落 -葆 -葉 -著 -葚 -葛 -葡 -董 -葦 -葩 -葫 -葬 -葭 -葯 -葱 -葳 -葵 -葷 -葺 -蒂 -蒋 -蒐 -蒔 -蒙 -蒜 -蒞 -蒟 -蒡 -蒨 -蒲 -蒸 -蒹 -蒻 -蒼 -蒿 -蓁 -蓄 -蓆 -蓉 -蓋 -蓑 -蓓 -蓖 -蓝 -蓟 -蓦 -蓬 -蓮 -蓼 -蓿 -蔑 -蔓 -蔔 -蔗 -蔘 -蔚 -蔡 -蔣 -蔥 -蔫 -蔬 -蔭 -蔵 -蔷 -蔺 -蔻 -蔼 -蔽 -蕁 -蕃 -蕈 -蕉 -蕊 -蕎 -蕙 -蕤 -蕨 -蕩 -蕪 -蕭 -蕲 -蕴 -蕻 -蕾 -薄 -薅 -薇 -薈 -薊 -薏 -薑 -薔 -薙 -薛 -薦 -薨 -薩 -薪 -薬 -薯 -薰 -薹 -藉 -藍 -藏 -藐 -藓 -藕 -藜 -藝 -藤 -藥 -藩 -藹 -藻 -藿 -蘆 -蘇 -蘊 -蘋 -蘑 -蘚 -蘭 -蘸 -蘼 -蘿 -虎 -虏 -虐 -虑 -虔 -處 -虚 -虛 -虜 -虞 -號 -虢 -虧 -虫 -虬 -虱 -虹 -虻 -虽 -虾 -蚀 -蚁 -蚂 -蚊 -蚌 -蚓 -蚕 -蚜 -蚝 -蚣 -蚤 -蚩 -蚪 -蚯 -蚱 -蚵 -蛀 -蛆 -蛇 -蛊 -蛋 -蛎 -蛐 -蛔 -蛙 -蛛 -蛟 -蛤 -蛭 -蛮 -蛰 -蛳 -蛹 -蛻 -蛾 -蜀 -蜂 -蜃 -蜆 -蜇 -蜈 -蜊 -蜍 -蜒 -蜓 -蜕 -蜗 -蜘 -蜚 -蜜 -蜡 -蜢 -蜥 -蜱 -蜴 -蜷 -蜻 -蜿 -蝇 -蝈 -蝉 -蝌 -蝎 -蝕 -蝗 -蝙 -蝟 -蝠 -蝦 -蝨 -蝴 -蝶 -蝸 -蝼 -螂 -螃 -融 -螞 -螢 -螨 -螯 -螳 -螺 -蟀 -蟄 -蟆 -蟋 -蟎 -蟑 -蟒 -蟠 -蟬 -蟲 -蟹 -蟻 -蟾 -蠅 -蠍 -蠔 -蠕 -蠛 -蠟 -蠡 -蠢 -蠣 -蠱 -蠶 -蠹 -蠻 -血 -衄 -衅 -衆 -行 -衍 -術 -衔 -街 -衙 -衛 -衝 -衞 -衡 -衢 -衣 -补 -表 -衩 -衫 -衬 -衮 -衰 -衲 -衷 -衹 -衾 -衿 -袁 -袂 -袄 -袅 -袈 -袋 -袍 -袒 -袖 -袜 -袞 -袤 -袪 -被 -袭 -袱 -裁 -裂 -装 -裆 -裊 -裏 -裔 -裕 -裘 -裙 -補 -裝 -裟 -裡 -裤 -裨 -裱 -裳 -裴 -裸 -裹 -製 -裾 -褂 -複 -褐 -褒 -褓 -褔 -褚 -褥 -褪 -褫 -褲 -褶 -褻 -襁 -襄 -襟 -襠 -襪 -襬 -襯 -襲 -西 -要 -覃 -覆 -覇 -見 -規 -覓 -視 -覚 -覦 -覧 -親 -覬 -観 -覷 -覺 -覽 -觀 -见 -观 -规 -觅 -视 -览 -觉 -觊 -觎 -觐 -觑 -角 -觞 -解 -觥 -触 -觸 -言 -訂 -計 -訊 -討 -訓 -訕 -訖 -託 -記 -訛 -訝 -訟 -訣 -訥 -訪 -設 -許 -訳 -訴 -訶 -診 -註 -証 -詆 -詐 -詔 -評 -詛 -詞 -詠 -詡 -詢 -詣 -試 -詩 -詫 -詬 -詭 -詮 -詰 -話 -該 -詳 -詹 -詼 -誅 -誇 -誉 -誌 -認 -誓 -誕 -誘 -語 -誠 -誡 -誣 -誤 -誥 -誦 -誨 -說 -説 -読 -誰 -課 -誹 -誼 -調 -諄 -談 -請 -諏 -諒 -論 -諗 -諜 -諡 -諦 -諧 -諫 -諭 -諮 -諱 -諳 -諷 -諸 -諺 -諾 -謀 -謁 -謂 -謄 -謊 -謎 -謐 -謔 -謗 -謙 -講 -謝 -謠 -謨 -謬 -謹 -謾 -譁 -證 -譎 -譏 -識 -譙 -譚 -譜 -警 -譬 -譯 -議 -譲 -譴 -護 -譽 -讀 -變 -讓 -讚 -讞 -计 -订 -认 -讥 -讧 -讨 -让 -讪 -讫 -训 -议 -讯 -记 -讲 -讳 -讴 -讶 -讷 -许 -讹 -论 -讼 -讽 -设 -访 -诀 -证 -诃 -评 -诅 -识 -诈 -诉 -诊 -诋 -词 -诏 -译 -试 -诗 -诘 -诙 -诚 -诛 -话 -诞 -诟 -诠 -诡 -询 -诣 -诤 -该 -详 -诧 -诩 -诫 -诬 -语 -误 -诰 -诱 -诲 -说 -诵 -诶 -请 -诸 -诺 -读 -诽 -课 -诿 -谀 -谁 -调 -谄 -谅 -谆 -谈 -谊 -谋 -谌 -谍 -谎 -谏 -谐 -谑 -谒 -谓 -谔 -谕 -谗 -谘 -谙 -谚 -谛 -谜 -谟 -谢 -谣 -谤 -谥 -谦 -谧 -谨 -谩 -谪 -谬 -谭 -谯 -谱 -谲 -谴 -谶 -谷 -豁 -豆 -豇 -豈 -豉 -豊 -豌 -豎 -豐 -豔 -豚 -象 -豢 -豪 -豫 -豬 -豹 -豺 -貂 -貅 -貌 -貓 -貔 -貘 -貝 -貞 -負 -財 -貢 -貧 -貨 -販 -貪 -貫 -責 -貯 -貰 -貳 -貴 -貶 -買 -貸 -費 -貼 -貽 -貿 -賀 -賁 -賂 -賃 -賄 -資 -賈 -賊 -賑 -賓 -賜 -賞 -賠 -賡 -賢 -賣 -賤 -賦 -質 -賬 -賭 -賴 -賺 -購 -賽 -贅 -贈 -贊 -贍 -贏 -贓 -贖 -贛 -贝 -贞 -负 -贡 -财 -责 -贤 -败 -账 -货 -质 -贩 -贪 -贫 -贬 -购 -贮 -贯 -贰 -贱 -贲 -贴 -贵 -贷 -贸 -费 -贺 -贻 -贼 -贾 -贿 -赁 -赂 -赃 -资 -赅 -赈 -赊 -赋 -赌 -赎 -赏 -赐 -赓 -赔 -赖 -赘 -赚 -赛 -赝 -赞 -赠 -赡 -赢 -赣 -赤 -赦 -赧 -赫 -赭 -走 -赳 -赴 -赵 -赶 -起 -趁 -超 -越 -趋 -趕 -趙 -趟 -趣 -趨 -足 -趴 -趵 -趸 -趺 -趾 -跃 -跄 -跆 -跋 -跌 -跎 -跑 -跖 -跚 -跛 -距 -跟 -跡 -跤 -跨 -跩 -跪 -路 -跳 -践 -跷 -跹 -跺 -跻 -踉 -踊 -踌 -踏 -踐 -踝 -踞 -踟 -踢 -踩 -踪 -踮 -踱 -踴 -踵 -踹 -蹂 -蹄 -蹇 -蹈 -蹉 -蹊 -蹋 -蹑 -蹒 -蹙 -蹟 -蹣 -蹤 -蹦 -蹩 -蹬 -蹭 -蹲 -蹴 -蹶 -蹺 -蹼 -蹿 -躁 -躇 -躉 -躊 -躋 -躍 -躏 -躪 -身 -躬 -躯 -躲 -躺 -軀 -車 -軋 -軌 -軍 -軒 -軟 -転 -軸 -軼 -軽 -軾 -較 -載 -輒 -輓 -輔 -輕 -輛 -輝 -輟 -輩 -輪 -輯 -輸 -輻 -輾 -輿 -轄 -轅 -轆 -轉 -轍 -轎 -轟 -车 -轧 -轨 -轩 -转 -轭 -轮 -软 -轰 -轲 -轴 -轶 -轻 -轼 -载 -轿 -较 -辄 -辅 -辆 -辇 -辈 -辉 -辊 -辍 -辐 -辑 -输 -辕 -辖 -辗 -辘 -辙 -辛 -辜 -辞 -辟 -辣 -辦 -辨 -辩 -辫 -辭 -辮 -辯 -辰 -辱 -農 -边 -辺 -辻 -込 -辽 -达 -迁 -迂 -迄 -迅 -过 -迈 -迎 -运 -近 -返 -还 -这 -进 -远 -违 -连 -迟 -迢 -迤 -迥 -迦 -迩 -迪 -迫 -迭 -述 -迴 -迷 -迸 -迹 -迺 -追 -退 -送 -适 -逃 -逅 -逆 -选 -逊 -逍 -透 -逐 -递 -途 -逕 -逗 -這 -通 -逛 -逝 -逞 -速 -造 -逢 -連 -逮 -週 -進 -逵 -逶 -逸 -逻 -逼 -逾 -遁 -遂 -遅 -遇 -遊 -運 -遍 -過 -遏 -遐 -遑 -遒 -道 -達 -違 -遗 -遙 -遛 -遜 -遞 -遠 -遢 -遣 -遥 -遨 -適 -遭 -遮 -遲 -遴 -遵 -遶 -遷 -選 -遺 -遼 -遽 -避 -邀 -邁 -邂 -邃 -還 -邇 -邈 -邊 -邋 -邏 -邑 -邓 -邕 -邛 -邝 -邢 -那 -邦 -邨 -邪 -邬 -邮 -邯 -邰 -邱 -邳 -邵 -邸 -邹 -邺 -邻 -郁 -郅 -郊 -郎 -郑 -郜 -郝 -郡 -郢 -郤 -郦 -郧 -部 -郫 -郭 -郴 -郵 -郷 -郸 -都 -鄂 -鄉 -鄒 -鄔 -鄙 -鄞 -鄢 -鄧 -鄭 -鄰 -鄱 -鄲 -鄺 -酉 -酊 -酋 -酌 -配 -酐 -酒 -酗 -酚 -酝 -酢 -酣 -酥 -酩 -酪 -酬 -酮 -酯 -酰 -酱 -酵 -酶 -酷 -酸 -酿 -醃 -醇 -醉 -醋 -醍 -醐 -醒 -醚 -醛 -醜 -醞 -醣 -醪 -醫 -醬 -醮 -醯 -醴 -醺 -釀 -釁 -采 -釉 -释 -釋 -里 -重 -野 -量 -釐 -金 -釗 -釘 -釜 -針 -釣 -釦 -釧 -釵 -鈀 -鈉 -鈍 -鈎 -鈔 -鈕 -鈞 -鈣 -鈦 -鈪 -鈴 -鈺 -鈾 -鉀 -鉄 -鉅 -鉉 -鉑 -鉗 -鉚 -鉛 -鉤 -鉴 -鉻 -銀 -銃 -銅 -銑 -銓 -銖 -銘 -銜 -銬 -銭 -銮 -銳 -銷 -銹 -鋁 -鋅 -鋒 -鋤 -鋪 -鋰 -鋸 -鋼 -錄 -錐 -錘 -錚 -錠 -錢 -錦 -錨 -錫 -錮 -錯 -録 -錳 -錶 -鍊 -鍋 -鍍 -鍛 -鍥 -鍰 -鍵 -鍺 -鍾 -鎂 -鎊 -鎌 -鎏 -鎔 -鎖 -鎗 -鎚 -鎧 -鎬 -鎮 -鎳 -鏈 -鏖 -鏗 -鏘 -鏞 -鏟 -鏡 -鏢 -鏤 -鏽 -鐘 -鐮 -鐲 -鐳 -鐵 -鐸 -鐺 -鑄 -鑊 -鑑 -鑒 -鑣 -鑫 -鑰 -鑲 -鑼 -鑽 -鑾 -鑿 -针 -钉 -钊 -钎 -钏 -钒 -钓 -钗 -钙 -钛 -钜 -钝 -钞 -钟 -钠 -钡 -钢 -钣 -钤 -钥 -钦 -钧 -钨 -钩 -钮 -钯 -钰 -钱 -钳 -钴 -钵 -钺 -钻 -钼 -钾 -钿 -铀 -铁 -铂 -铃 -铄 -铅 -铆 -铉 -铎 -铐 -铛 -铜 -铝 -铠 -铡 -铢 -铣 -铤 -铨 -铩 -铬 -铭 -铮 -铰 -铲 -铵 -银 -铸 -铺 -链 -铿 -销 -锁 -锂 -锄 -锅 -锆 -锈 -锉 -锋 -锌 -锏 -锐 -锑 -错 -锚 -锟 -锡 -锢 -锣 -锤 -锥 -锦 -锭 -键 -锯 -锰 -锲 -锵 -锹 -锺 -锻 -镀 -镁 -镂 -镇 -镉 -镌 -镍 -镐 -镑 -镕 -镖 -镗 -镛 -镜 -镣 -镭 -镯 -镰 -镳 -镶 -長 -长 -門 -閃 -閉 -開 -閎 -閏 -閑 -閒 -間 -閔 -閘 -閡 -関 -閣 -閥 -閨 -閩 -閱 -閲 -閹 -閻 -閾 -闆 -闇 -闊 -闌 -闍 -闔 -闕 -闖 -闘 -關 -闡 -闢 -门 -闪 -闫 -闭 -问 -闯 -闰 -闲 -间 -闵 -闷 -闸 -闹 -闺 -闻 -闽 -闾 -阀 -阁 -阂 -阅 -阆 -阇 -阈 -阉 -阎 -阐 -阑 -阔 -阕 -阖 -阙 -阚 -阜 -队 -阡 -阪 -阮 -阱 -防 -阳 -阴 -阵 -阶 -阻 -阿 -陀 -陂 -附 -际 -陆 -陇 -陈 -陋 -陌 -降 -限 -陕 -陛 -陝 -陞 -陟 -陡 -院 -陣 -除 -陨 -险 -陪 -陰 -陲 -陳 -陵 -陶 -陷 -陸 -険 -陽 -隅 -隆 -隈 -隊 -隋 -隍 -階 -随 -隐 -隔 -隕 -隘 -隙 -際 -障 -隠 -隣 -隧 -隨 -險 -隱 -隴 -隶 -隸 -隻 -隼 -隽 -难 -雀 -雁 -雄 -雅 -集 -雇 -雉 -雋 -雌 -雍 -雎 -雏 -雑 -雒 -雕 -雖 -雙 -雛 -雜 -雞 -離 -難 -雨 -雪 -雯 -雰 -雲 -雳 -零 -雷 -雹 -電 -雾 -需 -霁 -霄 -霆 -震 -霈 -霉 -霊 -霍 -霎 -霏 -霑 -霓 -霖 -霜 -霞 -霧 -霭 -霰 -露 -霸 -霹 -霽 -霾 -靂 -靄 -靈 -青 -靓 -靖 -静 -靚 -靛 -靜 -非 -靠 -靡 -面 -靥 -靦 -革 -靳 -靴 -靶 -靼 -鞅 -鞋 -鞍 -鞏 -鞑 -鞘 -鞠 -鞣 -鞦 -鞭 -韆 -韋 -韌 -韓 -韜 -韦 -韧 -韩 -韬 -韭 -音 -韵 -韶 -韻 -響 -頁 -頂 -頃 -項 -順 -須 -頌 -預 -頑 -頒 -頓 -頗 -領 -頜 -頡 -頤 -頫 -頭 -頰 -頷 -頸 -頹 -頻 -頼 -顆 -題 -額 -顎 -顏 -顔 -願 -顛 -類 -顧 -顫 -顯 -顱 -顴 -页 -顶 -顷 -项 -顺 -须 -顼 -顽 -顾 -顿 -颁 -颂 -预 -颅 -领 -颇 -颈 -颉 -颊 -颌 -颍 -颐 -频 -颓 -颔 -颖 -颗 -题 -颚 -颛 -颜 -额 -颞 -颠 -颡 -颢 -颤 -颦 -颧 -風 -颯 -颱 -颳 -颶 -颼 -飄 -飆 -风 -飒 -飓 -飕 -飘 -飙 -飚 -飛 -飞 -食 -飢 -飨 -飩 -飪 -飯 -飲 -飼 -飽 -飾 -餃 -餅 -餉 -養 -餌 -餐 -餒 -餓 -餘 -餚 -餛 -餞 -餡 -館 -餮 -餵 -餾 -饅 -饈 -饋 -饌 -饍 -饑 -饒 -饕 -饗 -饞 -饥 -饨 -饪 -饬 -饭 -饮 -饯 -饰 -饱 -饲 -饴 -饵 -饶 -饷 -饺 -饼 -饽 -饿 -馀 -馁 -馄 -馅 -馆 -馈 -馋 -馍 -馏 -馒 -馔 -首 -馗 -香 -馥 -馨 -馬 -馭 -馮 -馳 -馴 -駁 -駄 -駅 -駆 -駐 -駒 -駕 -駛 -駝 -駭 -駱 -駿 -騁 -騎 -騏 -験 -騙 -騨 -騰 -騷 -驀 -驅 -驊 -驍 -驒 -驕 -驗 -驚 -驛 -驟 -驢 -驥 -马 -驭 -驮 -驯 -驰 -驱 -驳 -驴 -驶 -驷 -驸 -驹 -驻 -驼 -驾 -驿 -骁 -骂 -骄 -骅 -骆 -骇 -骈 -骊 -骋 -验 -骏 -骐 -骑 -骗 -骚 -骛 -骜 -骞 -骠 -骡 -骤 -骥 -骧 -骨 -骯 -骰 -骶 -骷 -骸 -骼 -髂 -髅 -髋 -髏 -髒 -髓 -體 -髖 -高 -髦 -髪 -髮 -髯 -髻 -鬃 -鬆 -鬍 -鬓 -鬚 -鬟 -鬢 -鬣 -鬥 -鬧 -鬱 -鬼 -魁 -魂 -魄 -魅 -魇 -魍 -魏 -魔 -魘 -魚 -魯 -魷 -鮑 -鮨 -鮪 -鮭 -鮮 -鯉 -鯊 -鯖 -鯛 -鯨 -鯰 -鯽 -鰍 -鰓 -鰭 -鰲 -鰻 -鰾 -鱈 -鱉 -鱔 -鱗 -鱷 -鱸 -鱼 -鱿 -鲁 -鲈 -鲍 -鲑 -鲛 -鲜 -鲟 -鲢 -鲤 -鲨 -鲫 -鲱 -鲲 -鲶 -鲷 -鲸 -鳃 -鳄 -鳅 -鳌 -鳍 -鳕 -鳖 -鳗 -鳝 -鳞 -鳥 -鳩 -鳳 -鳴 -鳶 -鴉 -鴕 -鴛 -鴦 -鴨 -鴻 -鴿 -鵑 -鵜 -鵝 -鵡 -鵬 -鵰 -鵲 -鶘 -鶩 -鶯 -鶴 -鷗 -鷲 -鷹 -鷺 -鸚 -鸞 -鸟 -鸠 -鸡 -鸢 -鸣 -鸥 -鸦 -鸨 -鸪 -鸭 -鸯 -鸳 -鸵 -鸽 -鸾 -鸿 -鹂 -鹃 -鹄 -鹅 -鹈 -鹉 -鹊 -鹌 -鹏 -鹑 -鹕 -鹘 -鹜 -鹞 -鹤 -鹦 -鹧 -鹫 -鹭 -鹰 -鹳 -鹵 -鹹 -鹼 -鹽 -鹿 -麂 -麋 -麒 -麓 -麗 -麝 -麟 -麥 -麦 -麩 -麴 -麵 -麸 -麺 -麻 -麼 -麽 -麾 -黃 -黄 -黍 -黎 -黏 -黑 -黒 -黔 -默 -黛 -黜 -黝 -點 -黠 -黨 -黯 -黴 -鼋 -鼎 -鼐 -鼓 -鼠 -鼬 -鼹 -鼻 -鼾 -齁 -齊 -齋 -齐 -齒 -齡 -齢 -齣 -齦 -齿 -龄 -龅 -龈 -龊 -龋 -龌 -龍 -龐 -龔 -龕 -龙 -龚 -龛 -龜 -龟 -︰ -︱ -︶ -︿ -﹁ -﹂ -﹍ -﹏ -﹐ -﹑ -﹒ -﹔ -﹕ -﹖ -﹗ -﹙ -﹚ -﹝ -﹞ -﹡ -﹣ -! -" -# -$ -% -& -' -( -) -* -+ -, -- -. -/ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -: -; -< -= -> -? -@ -[ -\ -] -^ -_ -` -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z -{ -| -} -~ -。 -「 -」 -、 -・ -ッ -ー -イ -ク -シ -ス -ト -ノ -フ -ラ -ル -ン -゙ -゚ - ̄ -¥ -👍 -🔥 -😂 -😎 -... -yam -10 -2017 -12 -11 -2016 -20 -30 -15 -06 -lofter -##s -2015 -by -16 -14 -18 -13 -24 -17 -2014 -21 -##0 -22 -19 -25 -23 -com -100 -00 -05 -2013 -##a -03 -09 -08 -28 -##2 -50 -01 -04 -##1 -27 -02 -2012 -##3 -26 -##e -07 -##8 -##5 -##6 -##4 -##9 -##7 -29 -2011 -40 -##t -2010 -##o -##d -##i -2009 -##n -app -www -the -##m -31 -##c -##l -##y -##r -##g -2008 -60 -http -200 -qq -##p -80 -##f -google -pixnet -90 -cookies -tripadvisor -500 -##er -##k -35 -##h -facebook -2007 -2000 -70 -##b -of -##x -##u -45 -300 -iphone -32 -1000 -2006 -48 -ip -36 -in -38 -3d -##w -##ing -55 -ctrip -##on -##v -33 -##の -to -34 -400 -id -2005 -it -37 -windows -llc -top -99 -42 -39 -000 -led -at -##an -41 -51 -52 -46 -49 -43 -53 -44 -##z -android -58 -and -59 -2004 -56 -vr -##か -5000 -2003 -47 -blogthis -twitter -54 -##le -150 -ok -2018 -57 -75 -cn -no -ios -##in -##mm -##00 -800 -on -te -3000 -65 -2001 -360 -95 -ig -lv -120 -##ng -##を -##us -##に -pc -てす -── -600 -##te -85 -2002 -88 -##ed -html -ncc -wifi -email -64 -blog -is -##10 -##て -mail -online -##al -dvd -##ic -studio -##は -##℃ -##ia -##と -line -vip -72 -##q -98 -##ce -##en -for -##is -##ra -##es -##j -usb -net -cp -1999 -asia -4g -##cm -diy -new -3c -##お -ta -66 -language -vs -apple -tw -86 -web -##ne -ipad -62 -you -##re -101 -68 -##tion -ps -de -bt -pony -atm -##2017 -1998 -67 -##ch -ceo -##or -go -##na -av -pro -cafe -96 -pinterest -97 -63 -pixstyleme3c -##ta -more -said -##2016 -1997 -mp3 -700 -##ll -nba -jun -##20 -92 -tv -1995 -pm -61 -76 -nbsp -250 -##ie -linux -##ma -cd -110 -hd -##17 -78 -##ion -77 -6000 -am -##th -##st -94 -##se -##et -69 -180 -gdp -my -105 -81 -abc -89 -flash -79 -one -93 -1990 -1996 -##ck -gps -##も -##ly -web885 -106 -2020 -91 -##ge -4000 -1500 -xd -boss -isbn -1994 -org -##ry -me -love -##11 -0fork -73 -##12 -3g -##ter -##ar -71 -82 -##la -hotel -130 -1970 -pk -83 -87 -140 -ie -##os -##30 -##el -74 -##50 -seo -cpu -##ml -p2p -84 -may -##る -sun -tue -internet -cc -posted -youtube -##at -##ン -##man -ii -##ル -##15 -abs -nt -pdf -yahoo -ago -1980 -##it -news -mac -104 -##てす -##me -##り -java -1992 -spa -##de -##nt -hk -all -plus -la -1993 -##mb -##16 -##ve -west -##da -160 -air -##い -##ps -から -##to -1989 -logo -htc -php -https -fi -momo -##son -sat -##ke -##80 -ebd -suv -wi -day -apk -##88 -##um -mv -galaxy -wiki -or -brake -##ス -1200 -する -this -1991 -mon -##こ -❤2017 -po -##ない -javascript -life -home -june -##ss -system -900 -##ー -##0 -pp -1988 -world -fb -4k -br -##as -ic -ai -leonardo -safari -##60 -live -free -xx -wed -win7 -kiehl -##co -lg -o2o -##go -us -235 -1949 -mm -しい -vfm -kanye -##90 -##2015 -##id -jr -##ey -123 -rss -##sa -##ro -##am -##no -thu -fri -350 -##sh -##ki -103 -comments -name -##のて -##pe -##ine -max -1987 -8000 -uber -##mi -##ton -wordpress -office -1986 -1985 -##ment -107 -bd -win10 -##ld -##li -gmail -bb -dior -##rs -##ri -##rd -##ます -up -cad -##® -dr -して -read -##21 -をお -##io -##99 -url -1984 -pvc -paypal -show -policy -##40 -##ty -##18 -with -##★ -##01 -txt -102 -##ba -dna -from -post -mini -ar -taiwan -john -##ga -privacy -agoda -##13 -##ny -word -##24 -##22 -##by -##ur -##hz -1982 -##ang -265 -cookie -netscape -108 -##ka -##~ -##ad -house -share -note -ibm -code -hello -nike -sim -survey -##016 -1979 -1950 -wikia -##32 -##017 -5g -cbc -##tor -##kg -1983 -##rt -##14 -campaign -store -2500 -os -##ct -##ts -##° -170 -api -##ns -365 -excel -##な -##ao -##ら -##し -~~ -##nd -university -163 -には -518 -##70 -##ya -##il -##25 -pierre -ipo -0020 -897 -##23 -hotels -##ian -のお -125 -years -6606 -##ers -##26 -high -##day -time -##ay -bug -##line -##く -##す -##be -xp -talk2yam -yamservice -10000 -coco -##dy -sony -##ies -1978 -microsoft -david -people -##ha -1960 -instagram -intel -その -##ot -iso -1981 -##va -115 -##mo -##land -xxx -man -co -ltxsw -##ation -baby -220 -##pa -##ol -1945 -7000 -tag -450 -##ue -msn -##31 -oppo -##ト -##ca -control -##om -st -chrome -##ure -##ん -be -##き -lol -##19 -した -##bo -240 -lady -##100 -##way -##から -4600 -##ko -##do -##un -4s -corporation -168 -##ni -herme -##28 -cp -978 -##up -##06 -ui -##ds -ppt -admin -three -します -bbc -re -128 -##48 -ca -##015 -##35 -hp -##ee -tpp -##た -##ive -×× -root -##cc -##ました -##ble -##ity -adobe -park -114 -et -oled -city -##ex -##ler -##ap -china -##book -20000 -view -##ice -global -##km -your -hong -##mg -out -##ms -ng -ebay -##29 -menu -ubuntu -##cy -rom -##view -open -ktv -do -server -##lo -if -english -##ね -##5 -##oo -1600 -##02 -step1 -kong -club -135 -july -inc -1976 -mr -hi -##net -touch -##ls -##ii -michael -lcd -##05 -##33 -phone -james -step2 -1300 -ios9 -##box -dc -##2 -##ley -samsung -111 -280 -pokemon -css -##ent -##les -いいえ -##1 -s8 -atom -play -bmw -##said -sa -etf -ctrl -♥yoyo♥ -##55 -2025 -##2014 -##66 -adidas -amazon -1958 -##ber -##ner -visa -##77 -##der -1800 -connectivity -##hi -firefox -109 -118 -hr -so -style -mark -pop -ol -skip -1975 -as -##27 -##ir -##61 -190 -mba -##う -##ai -le -##ver -1900 -cafe2017 -lte -super -113 -129 -##ron -amd -like -##☆ -are -##ster -we -##sk -paul -data -international -##ft -longchamp -ssd -good -##ート -##ti -reply -##my -↓↓↓ -apr -star -##ker -source -136 -js -112 -get -force -photo -##one -126 -##2013 -##ow -link -bbs -1972 -goods -##lin -python -119 -##ip -game -##ics -##ません -blue -##● -520 -##45 -page -itunes -##03 -1955 -260 -1968 -gt -gif -618 -##ff -##47 -group -くたさい -about -bar -ganji -##nce -music -lee -not -1977 -1971 -1973 -##per -an -faq -comment -##って -days -##ock -116 -##bs -1974 -1969 -v1 -player -1956 -xbox -sql -fm -f1 -139 -##ah -210 -##lv -##mp -##000 -melody -1957 -##3 -550 -17life -199 -1966 -xml -market -##au -##71 -999 -##04 -what -gl -##95 -##age -tips -##68 -book -##ting -mysql -can -1959 -230 -##ung -wonderland -watch -10℃ -##ction -9000 -mar -mobile -1946 -1962 -article -##db -part -▲top -party -って -1967 -1964 -1948 -##07 -##ore -##op -この -dj -##78 -##38 -010 -main -225 -1965 -##ong -art -320 -ad -134 -020 -##73 -117 -pm2 -japan -228 -##08 -ts -1963 -##ica -der -sm -##36 -2019 -##wa -ct -##7 -##や -##64 -1937 -homemesh -search -##85 -##れは -##tv -##di -macbook -##9 -##くたさい -service -##♥ -type -った -750 -##ier -##si -##75 -##います -##ok -best -##ット -goris -lock -##った -cf -3m -big -##ut -ftp -carol -##vi -10 -1961 -happy -sd -##ac -122 -anti -pe -cnn -iii -1920 -138 -##ラ -1940 -esp -jan -tags -##98 -##51 -august -vol -##86 -154 -##™ -##fs -##れ -##sion -design -ac -##ム -press -jordan -ppp -that -key -check -##6 -##tt -##㎡ -1080p -##lt -power -##42 -1952 -##bc -vivi -##ック -he -133 -121 -jpg -##rry -201 -175 -3500 -1947 -nb -##ted -##rn -しています -1954 -usd -##t00 -master -##ンク -001 -model -##58 -al -##09 -1953 -##34 -ram -goo -ても -##ui -127 -1930 -red -##ary -rpg -item -##pm -##41 -270 -##za -project -##2012 -hot -td -blogabstract -##ger -##62 -650 -##44 -gr2 -##します -##m -black -electronic -nfc -year -asus -また -html5 -cindy -##hd -m3 -132 -esc -##od -booking -##53 -fed -tvb -##81 -##ina -mit -165 -##いる -chan -192 -distribution -next -になる -peter -bios -steam -cm -1941 -にも -pk10 -##ix -##65 -##91 -dec -nasa -##ana -icecat -00z -b1 -will -##46 -li -se -##ji -##み -##ard -oct -##ain -jp -##ze -##bi -cio -##56 -smart -h5 -##39 -##port -curve -vpn -##nm -##dia -utc -##あり -12345678910 -##52 -rmvb -chanel -a4 -miss -##and -##im -media -who -##63 -she -girl -5s -124 -vera -##して -class -vivo -king -##フ -##ei -national -ab -1951 -5cm -888 -145 -ipod -ap -1100 -5mm -211 -ms -2756 -##69 -mp4 -msci -##po -##89 -131 -mg -index -380 -##bit -##out -##zz -##97 -##67 -158 -apec -##8 -photoshop -opec -¥799 -ては -##96 -##tes -##ast -2g -○○ -##ール -¥2899 -##ling -##よ -##ory -1938 -##ical -kitty -content -##43 -step3 -##cn -win8 -155 -vc -1400 -iphone7 -robert -##した -tcl -137 -beauty -##87 -en -dollars -##ys -##oc -step -pay -yy -a1 -##2011 -##lly -##ks -##♪ -1939 -188 -download -1944 -sep -exe -ph -います -school -gb -center -pr -street -##board -uv -##37 -##lan -winrar -##que -##ua -##com -1942 -1936 -480 -gpu -##4 -ettoday -fu -tom -##54 -##ren -##via -149 -##72 -b2b -144 -##79 -##tch -rose -arm -mb -##49 -##ial -##nn -nvidia -step4 -mvp -00㎡ -york -156 -##イ -how -cpi -591 -2765 -gov -kg -joe -##xx -mandy -pa -##ser -copyright -fashion -1935 -don -##け -ecu -##ist -##art -erp -wap -have -##lm -talk -##ek -##ning -##if -ch -##ite -video -1943 -cs -san -iot -look -##84 -##2010 -##ku -october -##ux -trump -##hs -##ide -box -141 -first -##ins -april -##ight -##83 -185 -angel -protected -aa -151 -162 -x1 -m2 -##fe -##× -##ho -size -143 -min -ofo -fun -gomaji -ex -hdmi -food -dns -march -chris -kevin -##のか -##lla -##pp -##ec -ag -ems -6s -720p -##rm -##ham -off -##92 -asp -team -fandom -ed -299 -▌♥ -##ell -info -されています -##82 -sina -4066 -161 -##able -##ctor -330 -399 -315 -dll -rights -ltd -idc -jul -3kg -1927 -142 -ma -surface -##76 -##ク -~~~ -304 -mall -eps -146 -green -##59 -map -space -donald -v2 -sodu -##light -1931 -148 -1700 -まて -310 -reserved -htm -##han -##57 -2d -178 -mod -##ise -##tions -152 -ti -##shi -doc -1933 -icp -055 -wang -##ram -shopping -aug -##pi -##well -now -wam -b2 -からお -##hu -236 -1928 -##gb -266 -f2 -##93 -153 -mix -##ef -##uan -bwl -##plus -##res -core -##ess -tea -5℃ -hktvmall -nhk -##ate -list -##ese -301 -feb -4m -inn -ての -nov -159 -12345 -daniel -##ci -pass -##bet -##nk -coffee -202 -ssl -airbnb -##ute -fbi -woshipm -skype -ea -cg -sp -##fc -##www -yes -edge -alt -007 -##94 -fpga -##ght -##gs -iso9001 -さい -##ile -##wood -##uo -image -lin -icon -american -##em -1932 -set -says -##king -##tive -blogger -##74 -なと -256 -147 -##ox -##zy -##red -##ium -##lf -nokia -claire -##リ -##ding -november -lohas -##500 -##tic -##マ -##cs -##ある -##che -##ire -##gy -##ult -db -january -win -##カ -166 -road -ptt -##ま -##つ -198 -##fa -##mer -anna -pchome -はい -udn -ef -420 -##time -##tte -2030 -##ア -g20 -white -かかります -1929 -308 -garden -eleven -di -##おります -chen -309b -777 -172 -young -cosplay -ちてない -4500 -bat -##123 -##tra -##ては -kindle -npc -steve -etc -##ern -##| -call -xperia -ces -travel -sk -s7 -##ous -1934 -##int -みいたたけます -183 -edu -file -cho -qr -##car -##our -186 -##ant -##d -eric -1914 -rends -##jo -##する -mastercard -##2000 -kb -##min -290 -##ino -vista -##ris -##ud -jack -2400 -##set -169 -pos -1912 -##her -##ou -taipei -しく -205 -beta -##ませんか -232 -##fi -express -255 -body -##ill -aphojoy -user -december -meiki -##ick -tweet -richard -##av -##ᆫ -iphone6 -##dd -ちてすか -views -##mark -321 -pd -##00 -times -##▲ -level -##ash -10g -point -5l -##ome -208 -koreanmall -##ak -george -q2 -206 -wma -tcp -##200 -スタッフ -full -mlb -##lle -##watch -tm -run -179 -911 -smith -business -##und -1919 -color -##tal -222 -171 -##less -moon -4399 -##rl -update -pcb -shop -499 -157 -little -なし -end -##mhz -van -dsp -easy -660 -##house -##key -history -##o -oh -##001 -##hy -##web -oem -let -was -##2009 -##gg -review -##wan -182 -##°c -203 -uc -title -##val -united -233 -2021 -##ons -doi -trivago -overdope -sbs -##ance -##ち -grand -special -573032185 -imf -216 -wx17house -##so -##ーム -audi -##he -london -william -##rp -##ake -science -beach -cfa -amp -ps4 -880 -##800 -##link -##hp -crm -ferragamo -bell -make -##eng -195 -under -zh -photos -2300 -##style -##ント -via -176 -da -##gi -company -i7 -##ray -thomas -370 -ufo -i5 -##max -plc -ben -back -research -8g -173 -mike -##pc -##ッフ -september -189 -##ace -vps -february -167 -pantos -wp -lisa -1921 -★★ -jquery -night -long -offer -##berg -##news -1911 -##いて -ray -fks -wto -せます -over -164 -340 -##all -##rus -1924 -##888 -##works -blogtitle -loftpermalink -##→ -187 -martin -test -ling -km -##め -15000 -fda -v3 -##ja -##ロ -wedding -かある -outlet -family -##ea -をこ -##top -story -##ness -salvatore -##lu -204 -swift -215 -room -している -oracle -##ul -1925 -sam -b2c -week -pi -rock -##のは -##a -##けと -##ean -##300 -##gle -cctv -after -chinese -##back -powered -x2 -##tan -1918 -##nes -##イン -canon -only -181 -##zi -##las -say -##oe -184 -##sd -221 -##bot -##world -##zo -sky -made -top100 -just -1926 -pmi -802 -234 -gap -##vr -177 -les -174 -▲topoct -ball -vogue -vi -ing -ofweek -cos -##list -##ort -▲topmay -##なら -##lon -として -last -##tc -##of -##bus -##gen -real -eva -##コ -a3 -nas -##lie -##ria -##coin -##bt -▲topapr -his -212 -cat -nata -vive -health -⋯⋯ -drive -sir -▲topmar -du -cup -##カー -##ook -##よう -##sy -alex -msg -tour -しました -3ce -##word -193 -ebooks -r8 -block -318 -##より -2200 -nice -pvp -207 -months -1905 -rewards -##ther -1917 -0800 -##xi -##チ -##sc -micro -850 -gg -blogfp -op -1922 -daily -m1 -264 -true -##bb -ml -##tar -##のお -##ky -anthony -196 -253 -##yo -state -218 -##ara -##aa -##rc -##tz -##ston -より -gear -##eo -##ade -ge -see -1923 -##win -##ura -ss -heart -##den -##ita -down -##sm -el -png -2100 -610 -rakuten -whatsapp -bay -dream -add -##use -680 -311 -pad -gucci -mpv -##ode -##fo -island -▲topjun -##▼ -223 -jason -214 -chicago -##❤ -しの -##hone -io -##れる -##ことか -sogo -be2 -##ology -990 -cloud -vcd -##con -2~3 -##ford -##joy -##kb -##こさいます -##rade -but -##ach -docker -##ful -rfid -ul -##ase -hit -ford -##star -580 -##○ -11 -a2 -sdk -reading -edited -##are -cmos -##mc -238 -siri -light -##ella -##ため -bloomberg -##read -pizza -##ison -jimmy -##vm -college -node -journal -ba -18k -##play -245 -##cer -20 -magic -##yu -191 -jump -288 -tt -##ings -asr -##lia -3200 -step5 -network -##cd -mc -いします -1234 -pixstyleme -273 -##600 -2800 -money -★★★★★ -1280 -12 -430 -bl -みの -act -##tus -tokyo -##rial -##life -emba -##ae -saas -tcs -##rk -##wang -summer -##sp -ko -##ving -390 -premium -##その -netflix -##ヒ -uk -mt -##lton -right -frank -two -209 -える -##ple -##cal -021 -##んな -##sen -##ville -hold -nexus -dd -##ius -てお -##mah -##なく -tila -zero -820 -ce -##tin -resort -##ws -charles -old -p10 -5d -report -##360 -##ru -##には -bus -vans -lt -##est -pv -##レ -links -rebecca -##ツ -##dm -azure -##365 -きな -limited -bit -4gb -##mon -1910 -moto -##eam -213 -1913 -var -eos -なとの -226 -blogspot -された -699 -e3 -dos -dm -fc -##ments -##ik -##kw -boy -##bin -##ata -960 -er -##せ -219 -##vin -##tu -##ula -194 -##∥ -station -##ろ -##ature -835 -files -zara -hdr -top10 -nature -950 -magazine -s6 -marriott -##シ -avira -case -##っと -tab -##ran -tony -##home -oculus -im -##ral -jean -saint -cry -307 -rosie -##force -##ini -ice -##bert -のある -##nder -##mber -pet -2600 -##◆ -plurk -▲topdec -##sis -00kg -▲topnov -720 -##ence -tim -##ω -##nc -##ても -##name -log -ips -great -ikea -malaysia -unix -##イト -3600 -##ncy -##nie -12000 -akb48 -##ye -##oid -404 -##chi -##いた -oa -xuehai -##1000 -##orm -##rf -275 -さん -##ware -##リー -980 -ho -##pro -text -##era -560 -bob -227 -##ub -##2008 -8891 -scp -avi -##zen -2022 -mi -wu -museum -qvod -apache -lake -jcb -▲topaug -★★★ -ni -##hr -hill -302 -ne -weibo -490 -ruby -##ーシ -##ヶ -##row -4d -▲topjul -iv -##ish -github -306 -mate -312 -##スト -##lot -##ane -andrew -のハイト -##tina -t1 -rf -ed2k -##vel -##900 -way -final -りの -ns -5a -705 -197 -##メ -sweet -bytes -##ene -▲topjan -231 -##cker -##2007 -##px -100g -topapp -229 -helpapp -rs -low -14k -g4g -care -630 -ldquo -あり -##fork -leave -rm -edition -##gan -##zon -##qq -▲topsep -##google -##ism -gold -224 -explorer -##zer -toyota -category -select -visual -##labels -restaurant -##md -posts -s1 -##ico -もっと -angelababy -123456 -217 -sports -s3 -mbc -1915 -してくたさい -shell -x86 -candy -##new -kbs -face -xl -470 -##here -4a -swissinfo -v8 -▲topfeb -dram -##ual -##vice -3a -##wer -sport -q1 -ios10 -public -int -card -##c -ep -au -rt -##れた -1080 -bill -##mll -kim -30 -460 -wan -##uk -##ミ -x3 -298 -0t -scott -##ming -239 -e5 -##3d -h7n9 -worldcat -brown -##あります -##vo -##led -##580 -##ax -249 -410 -##ert -paris -##~6 -polo -925 -##lr -599 -##ナ -capital -##hing -bank -cv -1g -##chat -##s -##たい -adc -##ule -2m -##e -digital -hotmail -268 -##pad -870 -bbq -quot -##ring -before -wali -##まて -mcu -2k -2b -という -costco -316 -north -333 -switch -##city -##p -philips -##mann -management -panasonic -##cl -##vd -##ping -##rge -alice -##lk -##ましょう -css3 -##ney -vision -alpha -##ular -##400 -##tter -lz -にお -##ありません -mode -gre -1916 -pci -##tm -237 -1~2 -##yan -##そ -について -##let -##キ -work -war -coach -ah -mary -##ᅵ -huang -##pt -a8 -pt -follow -##berry -1895 -##ew -a5 -ghost -##ション -##wn -##og -south -##code -girls -##rid -action -villa -git -r11 -table -games -##cket -error -##anonymoussaid -##ag -here -##ame -##gc -qa -##■ -##lis -gmp -##gin -vmalife -##cher -yu -wedding -##tis -demo -dragon -530 -soho -social -bye -##rant -river -orz -acer -325 -##↑ -##ース -##ats -261 -del -##ven -440 -ups -##ように -##ター -305 -value -macd -yougou -##dn -661 -##ano -ll -##urt -##rent -continue -script -##wen -##ect -paper -263 -319 -shift -##chel -##フト -##cat -258 -x5 -fox -243 -##さん -car -aaa -##blog -loading -##yn -##tp -kuso -799 -si -sns -イカせるテンマ -ヒンクテンマ3 -rmb -vdc -forest -central -prime -help -ultra -##rmb -##ような -241 -square -688 -##しい -のないフロクに -##field -##reen -##ors -##ju -c1 -start -510 -##air -##map -cdn -##wo -cba -stephen -m8 -100km -##get -opera -##base -##ood -vsa -com™ -##aw -##ail -251 -なのて -count -t2 -##ᅡ -##een -2700 -hop -##gp -vsc -tree -##eg -##ose -816 -285 -##ories -##shop -alphago -v4 -1909 -simon -##ᆼ -fluke62max -zip -スホンサー -##sta -louis -cr -bas -##~10 -bc -##yer -hadoop -##ube -##wi -1906 -0755 -hola -##low -place -centre -5v -d3 -##fer -252 -##750 -##media -281 -540 -0l -exchange -262 -series -##ハー -##san -eb -##bank -##k -q3 -##nge -##mail -take -##lp -259 -1888 -client -east -cache -event -vincent -##ールを -きを -##nse -sui -855 -adchoice -##и -##stry -##なたの -246 -##zone -ga -apps -sea -##ab -248 -cisco -##タ -##rner -kymco -##care -dha -##pu -##yi -minkoff -royal -p1 -への -annie -269 -collection -kpi -playstation -257 -になります -866 -bh -##bar -queen -505 -radio -1904 -andy -armani -##xy -manager -iherb -##ery -##share -spring -raid -johnson -1908 -##ob -volvo -hall -##ball -v6 -our -taylor -##hk -bi -242 -##cp -kate -bo -water -technology -##rie -サイトは -277 -##ona -##sl -hpv -303 -gtx -hip -rdquo -jayz -stone -##lex -##rum -namespace -##やり -620 -##ale -##atic -des -##erson -##ql -##ves -##type -enter -##この -##てきます -d2 -##168 -##mix -##bian -との -a9 -jj -ky -##lc -access -movie -##hc -リストに -tower -##ration -##mit -ます -##nch -ua -tel -prefix -##o2 -1907 -##point -1901 -ott -~10 -##http -##ury -baidu -##ink -member -##logy -bigbang -nownews -##js -##shot -##tb -##こと -247 -eba -##tics -##lus -ける -v5 -spark -##ama -there -##ions -god -##lls -##down -hiv -##ress -burberry -day2 -##kv -◆◆ -jeff -related -film -edit -joseph -283 -##ark -cx -32gb -order -g9 -30000 -##ans -##tty -s5 -##bee -かあります -thread -xr -buy -sh -005 -land -spotify -mx -##ari -276 -##verse -×email -sf -why -##ことて -244 -7headlines -nego -sunny -dom -exo -401 -666 -positioning -fit -rgb -##tton -278 -kiss -alexa -adam -lp -みリストを -##g -mp -##ties -##llow -amy -##du -np -002 -institute -271 -##rth -##lar -2345 -590 -##des -sidebar -15 -imax -site -##cky -##kit -##ime -##009 -season -323 -##fun -##ンター -##ひ -gogoro -a7 -pu -lily -fire -twd600 -##ッセーシを -いて -##vis -30ml -##cture -##をお -information -##オ -close -friday -##くれる -yi -nick -てすか -##tta -##tel -6500 -##lock -cbd -economy -254 -かお -267 -tinker -double -375 -8gb -voice -##app -oops -channel -today -985 -##right -raw -xyz -##+ -jim -edm -##cent -7500 -supreme -814 -ds -##its -##asia -dropbox -##てすか -##tti -books -272 -100ml -##tle -##ller -##ken -##more -##boy -sex -309 -##dom -t3 -##ider -##なります -##unch -1903 -810 -feel -5500 -##かった -##put -により -s2 -mo -##gh -men -ka -amoled -div -##tr -##n1 -port -howard -##tags -ken -dnf -##nus -adsense -##а -ide -##へ -buff -thunder -##town -##ique -has -##body -auto -pin -##erry -tee -てした -295 -number -##the -##013 -object -psp -cool -udnbkk -16gb -##mic -miui -##tro -most -r2 -##alk -##nity -1880 -±0 -##いました -428 -s4 -law -version -##oa -n1 -sgs -docomo -##tf -##ack -henry -fc2 -##ded -##sco -##014 -##rite -286 -0mm -linkedin -##ada -##now -wii -##ndy -ucbug -##◎ -sputniknews -legalminer -##ika -##xp -2gb -##bu -q10 -oo -b6 -come -##rman -cheese -ming -maker -##gm -nikon -##fig -ppi -kelly -##ります -jchere -てきます -ted -md -003 -fgo -tech -##tto -dan -soc -##gl -##len -hair -earth -640 -521 -img -##pper -##a1 -##てきる -##ロク -acca -##ition -##ference -suite -##ig -outlook -##mond -##cation -398 -##pr -279 -101vip -358 -##999 -282 -64gb -3800 -345 -airport -##over -284 -##おり -jones -##ith -lab -##su -##いるのて -co2 -town -piece -##llo -no1 -vmware -24h -##qi -focus -reader -##admin -##ora -tb -false -##log -1898 -know -lan -838 -##ces -f4 -##ume -motel -stop -##oper -na -flickr -netcomponents -##af -##─ -pose -williams -local -##ound -##cg -##site -##iko -いお -274 -5m -gsm -con -##ath -1902 -friends -##hip -cell -317 -##rey -780 -cream -##cks -012 -##dp -facebooktwitterpinterestgoogle -sso -324 -shtml -song -swiss -##mw -##キンク -lumia -xdd -string -tiffany -522 -marc -られた -insee -russell -sc -dell -##ations -ok -camera -289 -##vs -##flow -##late -classic -287 -##nter -stay -g1 -mtv -512 -##ever -##lab -##nger -qe -sata -ryan -d1 -50ml -cms -##cing -su -292 -3300 -editor -296 -##nap -security -sunday -association -##ens -##700 -##bra -acg -##かり -sofascore -とは -mkv -##ign -jonathan -gary -build -labels -##oto -tesla -moba -qi -gohappy -general -ajax -1024 -##かる -サイト -society -##test -##urs -wps -fedora -##ich -mozilla -328 -##480 -##dr -usa -urn -##lina -##r -grace -##die -##try -##ader -1250 -##なり -elle -570 -##chen -##ᆯ -price -##ten -uhz -##ough -eq -##hen -states -push -session -balance -wow -506 -##cus -##py -when -##ward -##ep -34e -wong -library -prada -##サイト -##cle -running -##ree -313 -ck -date -q4 -##ctive -##ool -##> -mk -##ira -##163 -388 -die -secret -rq -dota -buffet -は1ヶ -e6 -##ez -pan -368 -ha -##card -##cha -2a -##さ -alan -day3 -eye -f3 -##end -france -keep -adi -rna -tvbs -##ala -solo -nova -##え -##tail -##ょう -support -##ries -##なる -##ved -base -copy -iis -fps -##ways -hero -hgih -profile -fish -mu -ssh -entertainment -chang -##wd -click -cake -##ond -pre -##tom -kic -pixel -##ov -##fl -product -6a -##pd -dear -##gate -es -yumi -audio -##² -##sky -echo -bin -where -##ture -329 -##ape -find -sap -isis -##なと -nand -##101 -##load -##ream -band -a6 -525 -never -##post -festival -50cm -##we -555 -guide -314 -zenfone -##ike -335 -gd -forum -jessica -strong -alexander -##ould -software -allen -##ious -program -360° -else -lohasthree -##gar -することかてきます -please -##れます -rc -##ggle -##ric -bim -50000 -##own -eclipse -355 -brian -3ds -##side -061 -361 -##other -##ける -##tech -##ator -485 -engine -##ged -##t -plaza -##fit -cia -ngo -westbrook -shi -tbs -50mm -##みませんか -sci -291 -reuters -##ily -contextlink -##hn -af -##cil -bridge -very -##cel -1890 -cambridge -##ize -15g -##aid -##data -790 -frm -##head -award -butler -##sun -meta -##mar -america -ps3 -puma -pmid -##すか -lc -670 -kitchen -##lic -オーフン5 -きなしソフトサーヒス -そして -day1 -future -★★★★ -##text -##page -##rris -pm1 -##ket -fans -##っています -1001 -christian -bot -kids -trackback -##hai -c3 -display -##hl -n2 -1896 -idea -さんも -##sent -airmail -##ug -##men -pwm -けます -028 -##lution -369 -852 -awards -schemas -354 -asics -wikipedia -font -##tional -##vy -c2 -293 -##れている -##dget -##ein -っている -contact -pepper -スキル -339 -##~5 -294 -##uel -##ument -730 -##hang -みてす -q5 -##sue -rain -##ndi -wei -swatch -##cept -わせ -331 -popular -##ste -##tag -p2 -501 -trc -1899 -##west -##live -justin -honda -ping -messenger -##rap -v9 -543 -##とは -unity -appqq -はすへて -025 -leo -##tone -##テ -##ass -uniqlo -##010 -502 -her -jane -memory -moneydj -##tical -human -12306 -していると -##m2 -coc -miacare -##mn -tmt -##core -vim -kk -##may -fan -target -use -too -338 -435 -2050 -867 -737 -fast -##2c -services -##ope -omega -energy -##わ -pinkoi -1a -##なから -##rain -jackson -##ement -##シャンルの -374 -366 -そんな -p9 -rd -##ᆨ -1111 -##tier -##vic -zone -##│ -385 -690 -dl -isofix -cpa -m4 -322 -kimi -めて -davis -##lay -lulu -##uck -050 -weeks -qs -##hop -920 -##n -ae -##ear -~5 -eia -405 -##fly -korea -jpeg -boost -##ship -small -##リア -1860 -eur -297 -425 -valley -##iel -simple -##ude -rn -k2 -##ena -されます -non -patrick -しているから -##ナー -feed -5757 -30g -process -well -qqmei -##thing -they -aws -lu -pink -##ters -##kin -または -board -##vertisement -wine -##ien -unicode -##dge -r1 -359 -##tant -いを -##twitter -##3c -cool1 -される -##れて -##l -isp -##012 -standard -45㎡2 -402 -##150 -matt -##fu -326 -##iner -googlemsn -pixnetfacebookyahoo -##ラン -x7 -886 -##uce -メーカー -sao -##ev -##きました -##file -9678 -403 -xddd -shirt -6l -##rio -##hat -3mm -givenchy -ya -bang -##lio -monday -crystal -ロクイン -##abc -336 -head -890 -ubuntuforumwikilinuxpastechat -##vc -##~20 -##rity -cnc -7866 -ipv6 -null -1897 -##ost -yang -imsean -tiger -##fet -##ンス -352 -##= -dji -327 -ji -maria -##come -##んて -foundation -3100 -##beth -##なった -1m -601 -active -##aft -##don -3p -sr -349 -emma -##khz -living -415 -353 -1889 -341 -709 -457 -sas -x6 -##face -pptv -x4 -##mate -han -sophie -##jing -337 -fifa -##mand -other -sale -inwedding -##gn -てきちゃいます -##mmy -##pmlast -bad -nana -nbc -してみてくたさいね -なとはお -##wu -##かあります -##あ -note7 -single -##340 -せからこ -してくたさい♪この -しにはとんとんワークケートを -するとあなたにもっとマッチした -ならワークケートへ -もみつかっちゃうかも -ワークケートの -##bel -window -##dio -##ht -union -age -382 -14 -##ivity -##y -コメント -domain -neo -##isa -##lter -5k -f5 -steven -##cts -powerpoint -tft -self -g2 -ft -##テル -zol -##act -mwc -381 -343 -もう -nbapop -408 -てある -eds -ace -##room -previous -author -tomtom -il -##ets -hu -financial -☆☆☆ -っています -bp -5t -chi -1gb -##hg -fairmont -cross -008 -gay -h2 -function -##けて -356 -also -1b -625 -##ータ -##raph -1894 -3~5 -##ils -i3 -334 -avenue -##host -による -##bon -##tsu -message -navigation -50g -fintech -h6 -##ことを -8cm -##ject -##vas -##firm -credit -##wf -xxxx -form -##nor -##space -huawei -plan -json -sbl -##dc -machine -921 -392 -wish -##120 -##sol -windows7 -edward -##ために -development -washington -##nsis -lo -818 -##sio -##ym -##bor -planet -##~8 -##wt -ieee -gpa -##めて -camp -ann -gm -##tw -##oka -connect -##rss -##work -##atus -wall -chicken -soul -2mm -##times -fa -##ather -##cord -009 -##eep -hitachi -gui -harry -##pan -e1 -disney -##press -##ーション -wind -386 -frigidaire -##tl -liu -hsu -332 -basic -von -ev -いた -てきる -スホンサーサイト -learning -##ull -expedia -archives -change -##wei -santa -cut -ins -6gb -turbo -brand -cf1 -508 -004 -return -747 -##rip -h1 -##nis -##をこ -128gb -##にお -3t -application -しており -emc -rx -##oon -384 -quick -412 -15058 -wilson -wing -chapter -##bug -beyond -##cms -##dar -##oh -zoom -e2 -trip -sb -##nba -rcep -342 -aspx -ci -080 -gc -gnu -める -##count -advanced -dance -dv -##url -##ging -367 -8591 -am09 -shadow -battle -346 -##i -##cia -##という -emily -##のてす -##tation -host -ff -techorz -sars -##mini -##mporary -##ering -nc -4200 -798 -##next -cma -##mbps -##gas -##ift -##dot -##ィ -455 -##~17 -amana -##りの -426 -##ros -ir -00㎡1 -##eet -##ible -##↓ -710 -ˋ▽ˊ -##aka -dcs -iq -##v -l1 -##lor -maggie -##011 -##iu -588 -##~1 -830 -##gt -1tb -articles -create -##burg -##iki -database -fantasy -##rex -##cam -dlc -dean -##you -hard -path -gaming -victoria -maps -cb -##lee -##itor -overchicstoretvhome -systems -##xt -416 -p3 -sarah -760 -##nan -407 -486 -x9 -install -second -626 -##ann -##ph -##rcle -##nic -860 -##nar -ec -##とう -768 -metro -chocolate -##rian -~4 -##table -##しています -skin -##sn -395 -mountain -##0mm -inparadise -6m -7x24 -ib -4800 -##jia -eeworld -creative -g5 -g3 -357 -parker -ecfa -village -からの -18000 -sylvia -サーヒス -hbl -##ques -##onsored -##x2 -##きます -##v4 -##tein -ie6 -383 -##stack -389 -ver -##ads -##baby -sound -bbe -##110 -##lone -##uid -ads -022 -gundam -351 -thinkpad -006 -scrum -match -##ave -mems -##470 -##oy -##なりました -##talk -glass -lamigo -span -##eme -job -##a5 -jay -wade -kde -498 -##lace -ocean -tvg -##covery -##r3 -##ners -##rea -junior -think -##aine -cover -##ision -##sia -↓↓ -##bow -msi -413 -458 -406 -##love -711 -801 -soft -z2 -##pl -456 -1840 -mobil -mind -##uy -427 -nginx -##oi -めた -##rr -6221 -##mple -##sson -##ーシてす -371 -##nts -91tv -comhd -crv3000 -##uard -1868 -397 -deep -lost -field -gallery -##bia -rate -spf -redis -traction -930 -icloud -011 -なら -fe -jose -372 -##tory -into -sohu -fx -899 -379 -kicstart2 -##hia -すく -##~3 -##sit -ra -24 -##walk -##xure -500g -##pact -pacific -xa -natural -carlo -##250 -##walker -1850 -##can -cto -gigi -516 -##サー -pen -##hoo -ob -matlab -##b -##yy -13913459 -##iti -mango -##bbs -sense -c5 -oxford -##ニア -walker -jennifer -##ola -course -##bre -701 -##pus -##rder -lucky -075 -##ぁ -ivy -なお -##nia -sotheby -side -##ugh -joy -##orage -##ush -##bat -##dt -364 -r9 -##2d -##gio -511 -country -wear -##lax -##~7 -##moon -393 -seven -study -411 -348 -lonzo -8k -##ェ -evolution -##イフ -##kk -gs -kd -##レス -arduino -344 -b12 -##lux -arpg -##rdon -cook -##x5 -dark -five -##als -##ida -とても -sign -362 -##ちの -something -20mm -##nda -387 -##posted -fresh -tf -1870 -422 -cam -##mine -##skip -##form -##ssion -education -394 -##tee -dyson -stage -##jie -want -##night -epson -pack -あります -##ppy -テリヘル -##█ -wd -##eh -##rence -left -##lvin -golden -mhz -discovery -##trix -##n2 -loft -##uch -##dra -##sse -speed -~1 -1mdb -sorry -welcome -##urn -wave -gaga -##lmer -teddy -##160 -トラックハック -せよ -611 -##f2016 -378 -rp -##sha -rar -##あなたに -##きた -840 -holiday -##ュー -373 -074 -##vg -##nos -##rail -gartner -gi -6p -##dium -kit -488 -b3 -eco -##ろう -20g -sean -##stone -autocad -nu -##np -f16 -write -029 -m5 -##ias -images -atp -##dk -fsm -504 -1350 -ve -52kb -##xxx -##のに -##cake -414 -unit -lim -ru -1v -##ification -published -angela -16g -analytics -ak -##q -##nel -gmt -##icon -again -##₂ -##bby -ios11 -445 -かこさいます -waze -いてす -##ハ -9985 -##ust -##ティー -framework -##007 -iptv -delete -52sykb -cl -wwdc -027 -30cm -##fw -##ての -1389 -##xon -brandt -##ses -##dragon -tc -vetements -anne -monte -modern -official -##へて -##ere -##nne -##oud -もちろん -50 -etnews -##a2 -##graphy -421 -863 -##ちゃん -444 -##rtex -##てお -l2 -##gma -mount -ccd -たと -archive -morning -tan -ddos -e7 -##ホ -day4 -##ウ -gis -453 -its -495 -factory -bruce -pg -##ito -ってくたさい -guest -cdma -##lling -536 -n3 -しかし -3~4 -mega -eyes -ro -13 -women -dac -church -##jun -singapore -##facebook -6991 -starbucks -##tos -##stin -##shine -zen -##mu -tina -20℃ -1893 -##たけて -503 -465 -request -##gence -qt -##っ -1886 -347 -363 -q7 -##zzi -diary -##tore -409 -##ead -468 -cst -##osa -canada -agent -va -##jiang -##ちは -##ーク -##lam -sg -##nix -##sday -##よって -g6 -##master -bing -##zl -charlie -16 -8mm -nb40 -##ーン -thai -##ルフ -ln284ct -##itz -##2f -bonnie -##food -##lent -originals -##stro -##lts -418 -∟∣ -##bscribe -children -ntd -yesstyle -##かも -hmv -##tment -d5 -2cm -arts -sms -##pn -##я -##いい -topios9 -539 -lifestyle -virtual -##ague -xz -##deo -muji -024 -unt -##nnis -##ᅩ -faq1 -1884 -396 -##ette -fly -64㎡ -はしめまして -441 -curry -##pop -のこ -release -##← -##◆◆ -##cast -073 -ありな -500ml -##ews -5c -##stle -ios7 -##ima -787 -dog -lenovo -##r4 -roger -013 -cbs -vornado -100m -417 -##desk -##クok -##ald -1867 -9595 -2900 -##van -oil -##x -some -break -common -##jy -##lines -g7 -twice -419 -ella -nano -belle -にこ -##mes -##self -##note -jb -##ことかてきます -benz -##との -##ova -451 -save -##wing -##ますのて -kai -りは -##hua -##rect -rainer -##unge -448 -##0m -adsl -##かな -guestname -##uma -##kins -##zu -tokichoi -##price -county -##med -##mus -rmk -391 -address -vm -えて -openload -##group -##hin -##iginal -amg -urban -##oz -jobs -emi -##public -beautiful -##sch -album -##dden -##bell -jerry -works -hostel -miller -##drive -##rmin -##10 -376 -boot -828 -##370 -##fx -##cm~ -1885 -##nome -##ctionary -##oman -##lish -##cr -##hm -433 -##how -432 -francis -xi -c919 -b5 -evernote -##uc -vga -##3000 -coupe -##urg -##cca -##uality -019 -6g -れる -multi -##また -##ett -em -hey -##ani -##tax -##rma -inside -than -740 -leonnhurt -##jin -ict -れた -bird -notes -200mm -くの -##dical -##lli -result -442 -iu -ee -438 -smap -gopro -##last -yin -pure -998 -32g -けた -5kg -##dan -##rame -mama -##oot -bean -marketing -##hur -2l -bella -sync -xuite -##ground -515 -discuz -##getrelax -##ince -##bay -##5s -cj -##イス -gmat -apt -##pass -jing -##rix -c4 -rich -##とても -niusnews -##ello -bag -770 -##eting -##mobile -18 -culture -015 -##のてすか -377 -1020 -area -##ience -616 -details -gp -universal -silver -dit -はお -private -ddd -u11 -kanshu -##ified -fung -##nny -dx -##520 -tai -475 -023 -##fr -##lean -3s -##pin -429 -##rin -25000 -ly -rick -##bility -usb3 -banner -##baru -##gion -metal -dt -vdf -1871 -karl -qualcomm -bear -1010 -oldid -ian -jo -##tors -population -##ernel -1882 -mmorpg -##mv -##bike -603 -##© -ww -friend -##ager -exhibition -##del -##pods -fpx -structure -##free -##tings -kl -##rley -##copyright -##mma -california -3400 -orange -yoga -4l -canmake -honey -##anda -##コメント -595 -nikkie -##ルハイト -dhl -publishing -##mall -##gnet -20cm -513 -##クセス -##┅ -e88 -970 -##dog -fishbase -##! -##" -### -##$ -##% -##& -##' -##( -##) -##* -##+ -##, -##- -##. -##/ -##: -##; -##< -##= -##> -##? -##@ -##[ -##\ -##] -##^ -##_ -##{ -##| -##} -##~ -##£ -##¤ -##¥ -##§ -##« -##± -##³ -##µ -##· -##¹ -##º -##» -##¼ -##ß -##æ -##÷ -##ø -##đ -##ŋ -##ɔ -##ə -##ɡ -##ʰ -##ˇ -##ˈ -##ˊ -##ˋ -##ˍ -##ː -##˙ -##˚ -##ˢ -##α -##β -##γ -##δ -##ε -##η -##θ -##ι -##κ -##λ -##μ -##ν -##ο -##π -##ρ -##ς -##σ -##τ -##υ -##φ -##χ -##ψ -##б -##в -##г -##д -##е -##ж -##з -##к -##л -##м -##н -##о -##п -##р -##с -##т -##у -##ф -##х -##ц -##ч -##ш -##ы -##ь -##і -##ا -##ب -##ة -##ت -##د -##ر -##س -##ع -##ل -##م -##ن -##ه -##و -##ي -##۩ -##ก -##ง -##น -##ม -##ย -##ร -##อ -##า -##เ -##๑ -##་ -##ღ -##ᄀ -##ᄁ -##ᄂ -##ᄃ -##ᄅ -##ᄆ -##ᄇ -##ᄈ -##ᄉ -##ᄋ -##ᄌ -##ᄎ -##ᄏ -##ᄐ -##ᄑ -##ᄒ -##ᅢ -##ᅣ -##ᅥ -##ᅦ -##ᅧ -##ᅨ -##ᅪ -##ᅬ -##ᅭ -##ᅮ -##ᅯ -##ᅲ -##ᅳ -##ᅴ -##ᆷ -##ᆸ -##ᆺ -##ᆻ -##ᗜ -##ᵃ -##ᵉ -##ᵍ -##ᵏ -##ᵐ -##ᵒ -##ᵘ -##‖ -##„ -##† -##• -##‥ -##‧ -##
 -##‰ -##′ -##″ -##‹ -##› -##※ -##‿ -##⁄ -##ⁱ -##⁺ -##ⁿ -##₁ -##₃ -##₄ -##€ -##№ -##ⅰ -##ⅱ -##ⅲ -##ⅳ -##ⅴ -##↔ -##↗ -##↘ -##⇒ -##∀ -##− -##∕ -##∙ -##√ -##∞ -##∟ -##∠ -##∣ -##∩ -##∮ -##∶ -##∼ -##∽ -##≈ -##≒ -##≡ -##≤ -##≥ -##≦ -##≧ -##≪ -##≫ -##⊙ -##⋅ -##⋈ -##⋯ -##⌒ -##① -##② -##③ -##④ -##⑤ -##⑥ -##⑦ -##⑧ -##⑨ -##⑩ -##⑴ -##⑵ -##⑶ -##⑷ -##⑸ -##⒈ -##⒉ -##⒊ -##⒋ -##ⓒ -##ⓔ -##ⓘ -##━ -##┃ -##┆ -##┊ -##┌ -##└ -##├ -##┣ -##═ -##║ -##╚ -##╞ -##╠ -##╭ -##╮ -##╯ -##╰ -##╱ -##╳ -##▂ -##▃ -##▅ -##▇ -##▉ -##▋ -##▌ -##▍ -##▎ -##□ -##▪ -##▫ -##▬ -##△ -##▶ -##► -##▽ -##◇ -##◕ -##◠ -##◢ -##◤ -##☀ -##☕ -##☞ -##☺ -##☼ -##♀ -##♂ -##♠ -##♡ -##♣ -##♦ -##♫ -##♬ -##✈ -##✔ -##✕ -##✖ -##✦ -##✨ -##✪ -##✰ -##✿ -##❀ -##➜ -##➤ -##⦿ -##、 -##。 -##〃 -##々 -##〇 -##〈 -##〉 -##《 -##》 -##「 -##」 -##『 -##』 -##【 -##】 -##〓 -##〔 -##〕 -##〖 -##〗 -##〜 -##〝 -##〞 -##ぃ -##ぇ -##ぬ -##ふ -##ほ -##む -##ゃ -##ゅ -##ゆ -##ょ -##゜ -##ゝ -##ァ -##ゥ -##エ -##ォ -##ケ -##サ -##セ -##ソ -##ッ -##ニ -##ヌ -##ネ -##ノ -##ヘ -##モ -##ャ -##ヤ -##ュ -##ユ -##ョ -##ヨ -##ワ -##ヲ -##・ -##ヽ -##ㄅ -##ㄆ -##ㄇ -##ㄉ -##ㄋ -##ㄌ -##ㄍ -##ㄎ -##ㄏ -##ㄒ -##ㄚ -##ㄛ -##ㄞ -##ㄟ -##ㄢ -##ㄤ -##ㄥ -##ㄧ -##ㄨ -##ㆍ -##㈦ -##㊣ -##㗎 -##一 -##丁 -##七 -##万 -##丈 -##三 -##上 -##下 -##不 -##与 -##丐 -##丑 -##专 -##且 -##丕 -##世 -##丘 -##丙 -##业 -##丛 -##东 -##丝 -##丞 -##丟 -##両 -##丢 -##两 -##严 -##並 -##丧 -##丨 -##个 -##丫 -##中 -##丰 -##串 -##临 -##丶 -##丸 -##丹 -##为 -##主 -##丼 -##丽 -##举 -##丿 -##乂 -##乃 -##久 -##么 -##义 -##之 -##乌 -##乍 -##乎 -##乏 -##乐 -##乒 -##乓 -##乔 -##乖 -##乗 -##乘 -##乙 -##乜 -##九 -##乞 -##也 -##习 -##乡 -##书 -##乩 -##买 -##乱 -##乳 -##乾 -##亀 -##亂 -##了 -##予 -##争 -##事 -##二 -##于 -##亏 -##云 -##互 -##五 -##井 -##亘 -##亙 -##亚 -##些 -##亜 -##亞 -##亟 -##亡 -##亢 -##交 -##亥 -##亦 -##产 -##亨 -##亩 -##享 -##京 -##亭 -##亮 -##亲 -##亳 -##亵 -##人 -##亿 -##什 -##仁 -##仃 -##仄 -##仅 -##仆 -##仇 -##今 -##介 -##仍 -##从 -##仏 -##仑 -##仓 -##仔 -##仕 -##他 -##仗 -##付 -##仙 -##仝 -##仞 -##仟 -##代 -##令 -##以 -##仨 -##仪 -##们 -##仮 -##仰 -##仲 -##件 -##价 -##任 -##份 -##仿 -##企 -##伉 -##伊 -##伍 -##伎 -##伏 -##伐 -##休 -##伕 -##众 -##优 -##伙 -##会 -##伝 -##伞 -##伟 -##传 -##伢 -##伤 -##伦 -##伪 -##伫 -##伯 -##估 -##伴 -##伶 -##伸 -##伺 -##似 -##伽 -##佃 -##但 -##佇 -##佈 -##位 -##低 -##住 -##佐 -##佑 -##体 -##佔 -##何 -##佗 -##佘 -##余 -##佚 -##佛 -##作 -##佝 -##佞 -##佟 -##你 -##佢 -##佣 -##佤 -##佥 -##佩 -##佬 -##佯 -##佰 -##佳 -##併 -##佶 -##佻 -##佼 -##使 -##侃 -##侄 -##來 -##侈 -##例 -##侍 -##侏 -##侑 -##侖 -##侗 -##供 -##依 -##侠 -##価 -##侣 -##侥 -##侦 -##侧 -##侨 -##侬 -##侮 -##侯 -##侵 -##侶 -##侷 -##便 -##係 -##促 -##俄 -##俊 -##俎 -##俏 -##俐 -##俑 -##俗 -##俘 -##俚 -##保 -##俞 -##俟 -##俠 -##信 -##俨 -##俩 -##俪 -##俬 -##俭 -##修 -##俯 -##俱 -##俳 -##俸 -##俺 -##俾 -##倆 -##倉 -##個 -##倌 -##倍 -##倏 -##們 -##倒 -##倔 -##倖 -##倘 -##候 -##倚 -##倜 -##借 -##倡 -##値 -##倦 -##倩 -##倪 -##倫 -##倬 -##倭 -##倶 -##债 -##值 -##倾 -##偃 -##假 -##偈 -##偉 -##偌 -##偎 -##偏 -##偕 -##做 -##停 -##健 -##側 -##偵 -##偶 -##偷 -##偻 -##偽 -##偿 -##傀 -##傅 -##傍 -##傑 -##傘 -##備 -##傚 -##傢 -##傣 -##傥 -##储 -##傩 -##催 -##傭 -##傲 -##傳 -##債 -##傷 -##傻 -##傾 -##僅 -##働 -##像 -##僑 -##僕 -##僖 -##僚 -##僥 -##僧 -##僭 -##僮 -##僱 -##僵 -##價 -##僻 -##儀 -##儂 -##億 -##儆 -##儉 -##儋 -##儒 -##儕 -##儘 -##償 -##儡 -##優 -##儲 -##儷 -##儼 -##儿 -##兀 -##允 -##元 -##兄 -##充 -##兆 -##兇 -##先 -##光 -##克 -##兌 -##免 -##児 -##兑 -##兒 -##兔 -##兖 -##党 -##兜 -##兢 -##入 -##內 -##全 -##兩 -##八 -##公 -##六 -##兮 -##兰 -##共 -##兲 -##关 -##兴 -##兵 -##其 -##具 -##典 -##兹 -##养 -##兼 -##兽 -##冀 -##内 -##円 -##冇 -##冈 -##冉 -##冊 -##册 -##再 -##冏 -##冒 -##冕 -##冗 -##写 -##军 -##农 -##冠 -##冢 -##冤 -##冥 -##冨 -##冪 -##冬 -##冯 -##冰 -##冲 -##决 -##况 -##冶 -##冷 -##冻 -##冼 -##冽 -##冾 -##净 -##凄 -##准 -##凇 -##凈 -##凉 -##凋 -##凌 -##凍 -##减 -##凑 -##凛 -##凜 -##凝 -##几 -##凡 -##凤 -##処 -##凪 -##凭 -##凯 -##凰 -##凱 -##凳 -##凶 -##凸 -##凹 -##出 -##击 -##函 -##凿 -##刀 -##刁 -##刃 -##分 -##切 -##刈 -##刊 -##刍 -##刎 -##刑 -##划 -##列 -##刘 -##则 -##刚 -##创 -##初 -##删 -##判 -##別 -##刨 -##利 -##刪 -##别 -##刮 -##到 -##制 -##刷 -##券 -##刹 -##刺 -##刻 -##刽 -##剁 -##剂 -##剃 -##則 -##剉 -##削 -##剋 -##剌 -##前 -##剎 -##剐 -##剑 -##剔 -##剖 -##剛 -##剜 -##剝 -##剣 -##剤 -##剥 -##剧 -##剩 -##剪 -##副 -##割 -##創 -##剷 -##剽 -##剿 -##劃 -##劇 -##劈 -##劉 -##劊 -##劍 -##劏 -##劑 -##力 -##劝 -##办 -##功 -##加 -##务 -##劣 -##动 -##助 -##努 -##劫 -##劭 -##励 -##劲 -##劳 -##労 -##劵 -##効 -##劾 -##势 -##勁 -##勃 -##勇 -##勉 -##勋 -##勐 -##勒 -##動 -##勖 -##勘 -##務 -##勛 -##勝 -##勞 -##募 -##勢 -##勤 -##勧 -##勳 -##勵 -##勸 -##勺 -##勻 -##勾 -##勿 -##匀 -##包 -##匆 -##匈 -##匍 -##匐 -##匕 -##化 -##北 -##匙 -##匝 -##匠 -##匡 -##匣 -##匪 -##匮 -##匯 -##匱 -##匹 -##区 -##医 -##匾 -##匿 -##區 -##十 -##千 -##卅 -##升 -##午 -##卉 -##半 -##卍 -##华 -##协 -##卑 -##卒 -##卓 -##協 -##单 -##卖 -##南 -##単 -##博 -##卜 -##卞 -##卟 -##占 -##卡 -##卢 -##卤 -##卦 -##卧 -##卫 -##卮 -##卯 -##印 -##危 -##即 -##却 -##卵 -##卷 -##卸 -##卻 -##卿 -##厂 -##厄 -##厅 -##历 -##厉 -##压 -##厌 -##厕 -##厘 -##厚 -##厝 -##原 -##厢 -##厥 -##厦 -##厨 -##厩 -##厭 -##厮 -##厲 -##厳 -##去 -##县 -##叁 -##参 -##參 -##又 -##叉 -##及 -##友 -##双 -##反 -##収 -##发 -##叔 -##取 -##受 -##变 -##叙 -##叛 -##叟 -##叠 -##叡 -##叢 -##口 -##古 -##句 -##另 -##叨 -##叩 -##只 -##叫 -##召 -##叭 -##叮 -##可 -##台 -##叱 -##史 -##右 -##叵 -##叶 -##号 -##司 -##叹 -##叻 -##叼 -##叽 -##吁 -##吃 -##各 -##吆 -##合 -##吉 -##吊 -##吋 -##同 -##名 -##后 -##吏 -##吐 -##向 -##吒 -##吓 -##吕 -##吖 -##吗 -##君 -##吝 -##吞 -##吟 -##吠 -##吡 -##否 -##吧 -##吨 -##吩 -##含 -##听 -##吭 -##吮 -##启 -##吱 -##吳 -##吴 -##吵 -##吶 -##吸 -##吹 -##吻 -##吼 -##吽 -##吾 -##呀 -##呂 -##呃 -##呆 -##呈 -##告 -##呋 -##呎 -##呐 -##呓 -##呕 -##呗 -##员 -##呛 -##呜 -##呢 -##呤 -##呦 -##周 -##呱 -##呲 -##味 -##呵 -##呷 -##呸 -##呻 -##呼 -##命 -##咀 -##咁 -##咂 -##咄 -##咆 -##咋 -##和 -##咎 -##咏 -##咐 -##咒 -##咔 -##咕 -##咖 -##咗 -##咘 -##咙 -##咚 -##咛 -##咣 -##咤 -##咦 -##咧 -##咨 -##咩 -##咪 -##咫 -##咬 -##咭 -##咯 -##咱 -##咲 -##咳 -##咸 -##咻 -##咽 -##咿 -##哀 -##品 -##哂 -##哄 -##哆 -##哇 -##哈 -##哉 -##哋 -##哌 -##响 -##哎 -##哏 -##哐 -##哑 -##哒 -##哔 -##哗 -##哟 -##員 -##哥 -##哦 -##哧 -##哨 -##哩 -##哪 -##哭 -##哮 -##哲 -##哺 -##哼 -##哽 -##唁 -##唄 -##唆 -##唇 -##唉 -##唏 -##唐 -##唑 -##唔 -##唠 -##唤 -##唧 -##唬 -##售 -##唯 -##唰 -##唱 -##唳 -##唷 -##唸 -##唾 -##啃 -##啄 -##商 -##啉 -##啊 -##問 -##啓 -##啕 -##啖 -##啜 -##啞 -##啟 -##啡 -##啤 -##啥 -##啦 -##啧 -##啪 -##啫 -##啬 -##啮 -##啰 -##啱 -##啲 -##啵 -##啶 -##啷 -##啸 -##啻 -##啼 -##啾 -##喀 -##喂 -##喃 -##善 -##喆 -##喇 -##喉 -##喊 -##喋 -##喎 -##喏 -##喔 -##喘 -##喙 -##喚 -##喜 -##喝 -##喟 -##喧 -##喪 -##喫 -##喬 -##單 -##喰 -##喱 -##喲 -##喳 -##喵 -##営 -##喷 -##喹 -##喺 -##喻 -##喽 -##嗅 -##嗆 -##嗇 -##嗎 -##嗑 -##嗒 -##嗓 -##嗔 -##嗖 -##嗚 -##嗜 -##嗝 -##嗟 -##嗡 -##嗣 -##嗤 -##嗦 -##嗨 -##嗪 -##嗬 -##嗯 -##嗰 -##嗲 -##嗳 -##嗶 -##嗷 -##嗽 -##嘀 -##嘅 -##嘆 -##嘈 -##嘉 -##嘌 -##嘍 -##嘎 -##嘔 -##嘖 -##嘗 -##嘘 -##嘚 -##嘛 -##嘜 -##嘞 -##嘟 -##嘢 -##嘣 -##嘤 -##嘧 -##嘩 -##嘭 -##嘮 -##嘯 -##嘰 -##嘱 -##嘲 -##嘴 -##嘶 -##嘸 -##嘹 -##嘻 -##嘿 -##噁 -##噌 -##噎 -##噓 -##噔 -##噗 -##噙 -##噜 -##噠 -##噢 -##噤 -##器 -##噩 -##噪 -##噬 -##噱 -##噴 -##噶 -##噸 -##噹 -##噻 -##噼 -##嚀 -##嚇 -##嚎 -##嚏 -##嚐 -##嚓 -##嚕 -##嚟 -##嚣 -##嚥 -##嚨 -##嚮 -##嚴 -##嚷 -##嚼 -##囂 -##囉 -##囊 -##囍 -##囑 -##囔 -##囗 -##囚 -##四 -##囝 -##回 -##囟 -##因 -##囡 -##团 -##団 -##囤 -##囧 -##囪 -##囫 -##园 -##困 -##囱 -##囲 -##図 -##围 -##囹 -##固 -##国 -##图 -##囿 -##圃 -##圄 -##圆 -##圈 -##國 -##圍 -##圏 -##園 -##圓 -##圖 -##團 -##圜 -##土 -##圣 -##圧 -##在 -##圩 -##圭 -##地 -##圳 -##场 -##圻 -##圾 -##址 -##坂 -##均 -##坊 -##坍 -##坎 -##坏 -##坐 -##坑 -##块 -##坚 -##坛 -##坝 -##坞 -##坟 -##坠 -##坡 -##坤 -##坦 -##坨 -##坪 -##坯 -##坳 -##坵 -##坷 -##垂 -##垃 -##垄 -##型 -##垒 -##垚 -##垛 -##垠 -##垢 -##垣 -##垦 -##垩 -##垫 -##垭 -##垮 -##垵 -##埂 -##埃 -##埋 -##城 -##埔 -##埕 -##埗 -##域 -##埠 -##埤 -##埵 -##執 -##埸 -##培 -##基 -##埼 -##堀 -##堂 -##堃 -##堅 -##堆 -##堇 -##堑 -##堕 -##堙 -##堡 -##堤 -##堪 -##堯 -##堰 -##報 -##場 -##堵 -##堺 -##堿 -##塊 -##塌 -##塑 -##塔 -##塗 -##塘 -##塚 -##塞 -##塢 -##塩 -##填 -##塬 -##塭 -##塵 -##塾 -##墀 -##境 -##墅 -##墉 -##墊 -##墒 -##墓 -##増 -##墘 -##墙 -##墜 -##增 -##墟 -##墨 -##墩 -##墮 -##墳 -##墻 -##墾 -##壁 -##壅 -##壆 -##壇 -##壊 -##壑 -##壓 -##壕 -##壘 -##壞 -##壟 -##壢 -##壤 -##壩 -##士 -##壬 -##壮 -##壯 -##声 -##売 -##壳 -##壶 -##壹 -##壺 -##壽 -##处 -##备 -##変 -##复 -##夏 -##夔 -##夕 -##外 -##夙 -##多 -##夜 -##够 -##夠 -##夢 -##夥 -##大 -##天 -##太 -##夫 -##夭 -##央 -##夯 -##失 -##头 -##夷 -##夸 -##夹 -##夺 -##夾 -##奂 -##奄 -##奇 -##奈 -##奉 -##奋 -##奎 -##奏 -##奐 -##契 -##奔 -##奕 -##奖 -##套 -##奘 -##奚 -##奠 -##奢 -##奥 -##奧 -##奪 -##奬 -##奮 -##女 -##奴 -##奶 -##奸 -##她 -##好 -##如 -##妃 -##妄 -##妆 -##妇 -##妈 -##妊 -##妍 -##妒 -##妓 -##妖 -##妘 -##妙 -##妝 -##妞 -##妣 -##妤 -##妥 -##妨 -##妩 -##妪 -##妮 -##妲 -##妳 -##妹 -##妻 -##妾 -##姆 -##姉 -##姊 -##始 -##姍 -##姐 -##姑 -##姒 -##姓 -##委 -##姗 -##姚 -##姜 -##姝 -##姣 -##姥 -##姦 -##姨 -##姪 -##姫 -##姬 -##姹 -##姻 -##姿 -##威 -##娃 -##娄 -##娅 -##娆 -##娇 -##娉 -##娑 -##娓 -##娘 -##娛 -##娜 -##娟 -##娠 -##娣 -##娥 -##娩 -##娱 -##娲 -##娴 -##娶 -##娼 -##婀 -##婁 -##婆 -##婉 -##婊 -##婕 -##婚 -##婢 -##婦 -##婧 -##婪 -##婭 -##婴 -##婵 -##婶 -##婷 -##婺 -##婿 -##媒 -##媚 -##媛 -##媞 -##媧 -##媲 -##媳 -##媽 -##媾 -##嫁 -##嫂 -##嫉 -##嫌 -##嫑 -##嫔 -##嫖 -##嫘 -##嫚 -##嫡 -##嫣 -##嫦 -##嫩 -##嫲 -##嫵 -##嫻 -##嬅 -##嬉 -##嬌 -##嬗 -##嬛 -##嬢 -##嬤 -##嬪 -##嬰 -##嬴 -##嬷 -##嬸 -##嬿 -##孀 -##孃 -##子 -##孑 -##孔 -##孕 -##孖 -##字 -##存 -##孙 -##孚 -##孛 -##孜 -##孝 -##孟 -##孢 -##季 -##孤 -##学 -##孩 -##孪 -##孫 -##孬 -##孰 -##孱 -##孳 -##孵 -##學 -##孺 -##孽 -##孿 -##宁 -##它 -##宅 -##宇 -##守 -##安 -##宋 -##完 -##宏 -##宓 -##宕 -##宗 -##官 -##宙 -##定 -##宛 -##宜 -##宝 -##实 -##実 -##宠 -##审 -##客 -##宣 -##室 -##宥 -##宦 -##宪 -##宫 -##宮 -##宰 -##害 -##宴 -##宵 -##家 -##宸 -##容 -##宽 -##宾 -##宿 -##寂 -##寄 -##寅 -##密 -##寇 -##富 -##寐 -##寒 -##寓 -##寛 -##寝 -##寞 -##察 -##寡 -##寢 -##寥 -##實 -##寧 -##寨 -##審 -##寫 -##寬 -##寮 -##寰 -##寵 -##寶 -##寸 -##对 -##寺 -##寻 -##导 -##対 -##寿 -##封 -##専 -##射 -##将 -##將 -##專 -##尉 -##尊 -##尋 -##對 -##導 -##小 -##少 -##尔 -##尕 -##尖 -##尘 -##尚 -##尝 -##尤 -##尧 -##尬 -##就 -##尴 -##尷 -##尸 -##尹 -##尺 -##尻 -##尼 -##尽 -##尾 -##尿 -##局 -##屁 -##层 -##屄 -##居 -##屆 -##屈 -##屉 -##届 -##屋 -##屌 -##屍 -##屎 -##屏 -##屐 -##屑 -##展 -##屜 -##属 -##屠 -##屡 -##屢 -##層 -##履 -##屬 -##屯 -##山 -##屹 -##屿 -##岀 -##岁 -##岂 -##岌 -##岐 -##岑 -##岔 -##岖 -##岗 -##岘 -##岙 -##岚 -##岛 -##岡 -##岩 -##岫 -##岬 -##岭 -##岱 -##岳 -##岷 -##岸 -##峇 -##峋 -##峒 -##峙 -##峡 -##峤 -##峥 -##峦 -##峨 -##峪 -##峭 -##峯 -##峰 -##峴 -##島 -##峻 -##峽 -##崁 -##崂 -##崆 -##崇 -##崎 -##崑 -##崔 -##崖 -##崗 -##崙 -##崛 -##崧 -##崩 -##崭 -##崴 -##崽 -##嵇 -##嵊 -##嵋 -##嵌 -##嵐 -##嵘 -##嵩 -##嵬 -##嵯 -##嶂 -##嶄 -##嶇 -##嶋 -##嶙 -##嶺 -##嶼 -##嶽 -##巅 -##巍 -##巒 -##巔 -##巖 -##川 -##州 -##巡 -##巢 -##工 -##左 -##巧 -##巨 -##巩 -##巫 -##差 -##己 -##已 -##巳 -##巴 -##巷 -##巻 -##巽 -##巾 -##巿 -##币 -##市 -##布 -##帅 -##帆 -##师 -##希 -##帐 -##帑 -##帕 -##帖 -##帘 -##帚 -##帛 -##帜 -##帝 -##帥 -##带 -##帧 -##師 -##席 -##帮 -##帯 -##帰 -##帳 -##帶 -##帷 -##常 -##帼 -##帽 -##幀 -##幂 -##幄 -##幅 -##幌 -##幔 -##幕 -##幟 -##幡 -##幢 -##幣 -##幫 -##干 -##平 -##年 -##并 -##幸 -##幹 -##幺 -##幻 -##幼 -##幽 -##幾 -##广 -##庁 -##広 -##庄 -##庆 -##庇 -##床 -##序 -##庐 -##库 -##应 -##底 -##庖 -##店 -##庙 -##庚 -##府 -##庞 -##废 -##庠 -##度 -##座 -##庫 -##庭 -##庵 -##庶 -##康 -##庸 -##庹 -##庾 -##廁 -##廂 -##廃 -##廈 -##廉 -##廊 -##廓 -##廖 -##廚 -##廝 -##廟 -##廠 -##廢 -##廣 -##廬 -##廳 -##延 -##廷 -##建 -##廿 -##开 -##弁 -##异 -##弃 -##弄 -##弈 -##弊 -##弋 -##式 -##弑 -##弒 -##弓 -##弔 -##引 -##弗 -##弘 -##弛 -##弟 -##张 -##弥 -##弦 -##弧 -##弩 -##弭 -##弯 -##弱 -##張 -##強 -##弹 -##强 -##弼 -##弾 -##彅 -##彆 -##彈 -##彌 -##彎 -##归 -##当 -##录 -##彗 -##彙 -##彝 -##形 -##彤 -##彥 -##彦 -##彧 -##彩 -##彪 -##彫 -##彬 -##彭 -##彰 -##影 -##彷 -##役 -##彻 -##彼 -##彿 -##往 -##征 -##径 -##待 -##徇 -##很 -##徉 -##徊 -##律 -##後 -##徐 -##徑 -##徒 -##従 -##徕 -##得 -##徘 -##徙 -##徜 -##從 -##徠 -##御 -##徨 -##復 -##循 -##徬 -##微 -##徳 -##徴 -##徵 -##德 -##徹 -##徼 -##徽 -##心 -##必 -##忆 -##忌 -##忍 -##忏 -##忐 -##忑 -##忒 -##忖 -##志 -##忘 -##忙 -##応 -##忠 -##忡 -##忤 -##忧 -##忪 -##快 -##忱 -##念 -##忻 -##忽 -##忿 -##怀 -##态 -##怂 -##怅 -##怆 -##怎 -##怏 -##怒 -##怔 -##怕 -##怖 -##怙 -##怜 -##思 -##怠 -##怡 -##急 -##怦 -##性 -##怨 -##怪 -##怯 -##怵 -##总 -##怼 -##恁 -##恃 -##恆 -##恋 -##恍 -##恐 -##恒 -##恕 -##恙 -##恚 -##恢 -##恣 -##恤 -##恥 -##恨 -##恩 -##恪 -##恫 -##恬 -##恭 -##息 -##恰 -##恳 -##恵 -##恶 -##恸 -##恺 -##恻 -##恼 -##恿 -##悄 -##悅 -##悉 -##悌 -##悍 -##悔 -##悖 -##悚 -##悟 -##悠 -##患 -##悦 -##您 -##悩 -##悪 -##悬 -##悯 -##悱 -##悲 -##悴 -##悵 -##悶 -##悸 -##悻 -##悼 -##悽 -##情 -##惆 -##惇 -##惊 -##惋 -##惑 -##惕 -##惘 -##惚 -##惜 -##惟 -##惠 -##惡 -##惦 -##惧 -##惨 -##惩 -##惫 -##惬 -##惭 -##惮 -##惯 -##惰 -##惱 -##想 -##惴 -##惶 -##惹 -##惺 -##愁 -##愆 -##愈 -##愉 -##愍 -##意 -##愕 -##愚 -##愛 -##愜 -##感 -##愣 -##愤 -##愧 -##愫 -##愷 -##愿 -##慄 -##慈 -##態 -##慌 -##慎 -##慑 -##慕 -##慘 -##慚 -##慟 -##慢 -##慣 -##慧 -##慨 -##慫 -##慮 -##慰 -##慳 -##慵 -##慶 -##慷 -##慾 -##憂 -##憊 -##憋 -##憎 -##憐 -##憑 -##憔 -##憚 -##憤 -##憧 -##憨 -##憩 -##憫 -##憬 -##憲 -##憶 -##憾 -##懂 -##懇 -##懈 -##應 -##懊 -##懋 -##懑 -##懒 -##懦 -##懲 -##懵 -##懶 -##懷 -##懸 -##懺 -##懼 -##懾 -##懿 -##戀 -##戈 -##戊 -##戌 -##戍 -##戎 -##戏 -##成 -##我 -##戒 -##戕 -##或 -##战 -##戚 -##戛 -##戟 -##戡 -##戦 -##截 -##戬 -##戮 -##戰 -##戲 -##戳 -##戴 -##戶 -##户 -##戸 -##戻 -##戾 -##房 -##所 -##扁 -##扇 -##扈 -##扉 -##手 -##才 -##扎 -##扑 -##扒 -##打 -##扔 -##払 -##托 -##扛 -##扣 -##扦 -##执 -##扩 -##扪 -##扫 -##扬 -##扭 -##扮 -##扯 -##扰 -##扱 -##扳 -##扶 -##批 -##扼 -##找 -##承 -##技 -##抄 -##抉 -##把 -##抑 -##抒 -##抓 -##投 -##抖 -##抗 -##折 -##抚 -##抛 -##抜 -##択 -##抟 -##抠 -##抡 -##抢 -##护 -##报 -##抨 -##披 -##抬 -##抱 -##抵 -##抹 -##押 -##抽 -##抿 -##拂 -##拄 -##担 -##拆 -##拇 -##拈 -##拉 -##拋 -##拌 -##拍 -##拎 -##拐 -##拒 -##拓 -##拔 -##拖 -##拗 -##拘 -##拙 -##拚 -##招 -##拜 -##拟 -##拡 -##拢 -##拣 -##拥 -##拦 -##拧 -##拨 -##择 -##括 -##拭 -##拮 -##拯 -##拱 -##拳 -##拴 -##拷 -##拼 -##拽 -##拾 -##拿 -##持 -##挂 -##指 -##挈 -##按 -##挎 -##挑 -##挖 -##挙 -##挚 -##挛 -##挝 -##挞 -##挟 -##挠 -##挡 -##挣 -##挤 -##挥 -##挨 -##挪 -##挫 -##振 -##挲 -##挹 -##挺 -##挽 -##挾 -##捂 -##捅 -##捆 -##捉 -##捋 -##捌 -##捍 -##捎 -##捏 -##捐 -##捕 -##捞 -##损 -##捡 -##换 -##捣 -##捧 -##捨 -##捩 -##据 -##捱 -##捲 -##捶 -##捷 -##捺 -##捻 -##掀 -##掂 -##掃 -##掇 -##授 -##掉 -##掌 -##掏 -##掐 -##排 -##掖 -##掘 -##掙 -##掛 -##掠 -##採 -##探 -##掣 -##接 -##控 -##推 -##掩 -##措 -##掬 -##掰 -##掲 -##掳 -##掴 -##掷 -##掸 -##掺 -##揀 -##揃 -##揄 -##揆 -##揉 -##揍 -##描 -##提 -##插 -##揖 -##揚 -##換 -##握 -##揣 -##揩 -##揪 -##揭 -##揮 -##援 -##揶 -##揸 -##揹 -##揽 -##搀 -##搁 -##搂 -##搅 -##損 -##搏 -##搐 -##搓 -##搔 -##搖 -##搗 -##搜 -##搞 -##搡 -##搪 -##搬 -##搭 -##搵 -##搶 -##携 -##搽 -##摀 -##摁 -##摄 -##摆 -##摇 -##摈 -##摊 -##摒 -##摔 -##摘 -##摞 -##摟 -##摧 -##摩 -##摯 -##摳 -##摸 -##摹 -##摺 -##摻 -##撂 -##撃 -##撅 -##撇 -##撈 -##撐 -##撑 -##撒 -##撓 -##撕 -##撚 -##撞 -##撤 -##撥 -##撩 -##撫 -##撬 -##播 -##撮 -##撰 -##撲 -##撵 -##撷 -##撸 -##撻 -##撼 -##撿 -##擀 -##擁 -##擂 -##擄 -##擅 -##擇 -##擊 -##擋 -##操 -##擎 -##擒 -##擔 -##擘 -##據 -##擞 -##擠 -##擡 -##擢 -##擦 -##擬 -##擰 -##擱 -##擲 -##擴 -##擷 -##擺 -##擼 -##擾 -##攀 -##攏 -##攒 -##攔 -##攘 -##攙 -##攜 -##攝 -##攞 -##攢 -##攣 -##攤 -##攥 -##攪 -##攫 -##攬 -##支 -##收 -##攸 -##改 -##攻 -##放 -##政 -##故 -##效 -##敌 -##敍 -##敎 -##敏 -##救 -##敕 -##敖 -##敗 -##敘 -##教 -##敛 -##敝 -##敞 -##敢 -##散 -##敦 -##敬 -##数 -##敲 -##整 -##敵 -##敷 -##數 -##斂 -##斃 -##文 -##斋 -##斌 -##斎 -##斐 -##斑 -##斓 -##斗 -##料 -##斛 -##斜 -##斟 -##斡 -##斤 -##斥 -##斧 -##斩 -##斫 -##斬 -##断 -##斯 -##新 -##斷 -##方 -##於 -##施 -##旁 -##旃 -##旅 -##旋 -##旌 -##旎 -##族 -##旖 -##旗 -##无 -##既 -##日 -##旦 -##旧 -##旨 -##早 -##旬 -##旭 -##旮 -##旱 -##时 -##旷 -##旺 -##旻 -##昀 -##昂 -##昆 -##昇 -##昉 -##昊 -##昌 -##明 -##昏 -##易 -##昔 -##昕 -##昙 -##星 -##映 -##春 -##昧 -##昨 -##昭 -##是 -##昱 -##昴 -##昵 -##昶 -##昼 -##显 -##晁 -##時 -##晃 -##晉 -##晋 -##晌 -##晏 -##晒 -##晓 -##晔 -##晕 -##晖 -##晗 -##晚 -##晝 -##晞 -##晟 -##晤 -##晦 -##晨 -##晩 -##普 -##景 -##晰 -##晴 -##晶 -##晷 -##智 -##晾 -##暂 -##暄 -##暇 -##暈 -##暉 -##暌 -##暐 -##暑 -##暖 -##暗 -##暝 -##暢 -##暧 -##暨 -##暫 -##暮 -##暱 -##暴 -##暸 -##暹 -##曄 -##曆 -##曇 -##曉 -##曖 -##曙 -##曜 -##曝 -##曠 -##曦 -##曬 -##曰 -##曲 -##曳 -##更 -##書 -##曹 -##曼 -##曾 -##替 -##最 -##會 -##月 -##有 -##朋 -##服 -##朐 -##朔 -##朕 -##朗 -##望 -##朝 -##期 -##朦 -##朧 -##木 -##未 -##末 -##本 -##札 -##朮 -##术 -##朱 -##朴 -##朵 -##机 -##朽 -##杀 -##杂 -##权 -##杆 -##杈 -##杉 -##李 -##杏 -##材 -##村 -##杓 -##杖 -##杜 -##杞 -##束 -##杠 -##条 -##来 -##杨 -##杭 -##杯 -##杰 -##東 -##杳 -##杵 -##杷 -##杼 -##松 -##板 -##极 -##构 -##枇 -##枉 -##枋 -##析 -##枕 -##林 -##枚 -##果 -##枝 -##枢 -##枣 -##枪 -##枫 -##枭 -##枯 -##枰 -##枱 -##枳 -##架 -##枷 -##枸 -##柄 -##柏 -##某 -##柑 -##柒 -##染 -##柔 -##柘 -##柚 -##柜 -##柞 -##柠 -##柢 -##查 -##柩 -##柬 -##柯 -##柱 -##柳 -##柴 -##柵 -##査 -##柿 -##栀 -##栃 -##栄 -##栅 -##标 -##栈 -##栉 -##栋 -##栎 -##栏 -##树 -##栓 -##栖 -##栗 -##校 -##栩 -##株 -##样 -##核 -##根 -##格 -##栽 -##栾 -##桀 -##桁 -##桂 -##桃 -##桅 -##框 -##案 -##桉 -##桌 -##桎 -##桐 -##桑 -##桓 -##桔 -##桜 -##桠 -##桡 -##桢 -##档 -##桥 -##桦 -##桧 -##桨 -##桩 -##桶 -##桿 -##梁 -##梅 -##梆 -##梏 -##梓 -##梗 -##條 -##梟 -##梢 -##梦 -##梧 -##梨 -##梭 -##梯 -##械 -##梳 -##梵 -##梶 -##检 -##棂 -##棄 -##棉 -##棋 -##棍 -##棒 -##棕 -##棗 -##棘 -##棚 -##棟 -##棠 -##棣 -##棧 -##森 -##棱 -##棲 -##棵 -##棹 -##棺 -##椁 -##椅 -##椋 -##植 -##椎 -##椒 -##検 -##椪 -##椭 -##椰 -##椹 -##椽 -##椿 -##楂 -##楊 -##楓 -##楔 -##楚 -##楝 -##楞 -##楠 -##楣 -##楨 -##楫 -##業 -##楮 -##極 -##楷 -##楸 -##楹 -##楼 -##楽 -##概 -##榄 -##榆 -##榈 -##榉 -##榔 -##榕 -##榖 -##榛 -##榜 -##榨 -##榫 -##榭 -##榮 -##榱 -##榴 -##榷 -##榻 -##槁 -##槃 -##構 -##槌 -##槍 -##槎 -##槐 -##槓 -##様 -##槛 -##槟 -##槤 -##槭 -##槲 -##槳 -##槻 -##槽 -##槿 -##樁 -##樂 -##樊 -##樑 -##樓 -##標 -##樞 -##樟 -##模 -##樣 -##権 -##横 -##樫 -##樯 -##樱 -##樵 -##樸 -##樹 -##樺 -##樽 -##樾 -##橄 -##橇 -##橋 -##橐 -##橘 -##橙 -##機 -##橡 -##橢 -##橫 -##橱 -##橹 -##橼 -##檀 -##檄 -##檎 -##檐 -##檔 -##檗 -##檜 -##檢 -##檬 -##檯 -##檳 -##檸 -##檻 -##櫃 -##櫚 -##櫛 -##櫥 -##櫸 -##櫻 -##欄 -##權 -##欒 -##欖 -##欠 -##次 -##欢 -##欣 -##欧 -##欲 -##欸 -##欺 -##欽 -##款 -##歆 -##歇 -##歉 -##歌 -##歎 -##歐 -##歓 -##歙 -##歛 -##歡 -##止 -##正 -##此 -##步 -##武 -##歧 -##歩 -##歪 -##歯 -##歲 -##歳 -##歴 -##歷 -##歸 -##歹 -##死 -##歼 -##殁 -##殃 -##殆 -##殇 -##殉 -##殊 -##残 -##殒 -##殓 -##殖 -##殘 -##殞 -##殡 -##殤 -##殭 -##殯 -##殲 -##殴 -##段 -##殷 -##殺 -##殼 -##殿 -##毀 -##毁 -##毂 -##毅 -##毆 -##毋 -##母 -##毎 -##每 -##毒 -##毓 -##比 -##毕 -##毗 -##毘 -##毙 -##毛 -##毡 -##毫 -##毯 -##毽 -##氈 -##氏 -##氐 -##民 -##氓 -##气 -##氖 -##気 -##氙 -##氛 -##氟 -##氡 -##氢 -##氣 -##氤 -##氦 -##氧 -##氨 -##氪 -##氫 -##氮 -##氯 -##氰 -##氲 -##水 -##氷 -##永 -##氹 -##氾 -##汀 -##汁 -##求 -##汆 -##汇 -##汉 -##汎 -##汐 -##汕 -##汗 -##汙 -##汛 -##汝 -##汞 -##江 -##池 -##污 -##汤 -##汨 -##汩 -##汪 -##汰 -##汲 -##汴 -##汶 -##汹 -##決 -##汽 -##汾 -##沁 -##沂 -##沃 -##沅 -##沈 -##沉 -##沌 -##沏 -##沐 -##沒 -##沓 -##沖 -##沙 -##沛 -##沟 -##没 -##沢 -##沣 -##沥 -##沦 -##沧 -##沪 -##沫 -##沭 -##沮 -##沱 -##河 -##沸 -##油 -##治 -##沼 -##沽 -##沾 -##沿 -##況 -##泄 -##泉 -##泊 -##泌 -##泓 -##法 -##泗 -##泛 -##泞 -##泠 -##泡 -##波 -##泣 -##泥 -##注 -##泪 -##泫 -##泮 -##泯 -##泰 -##泱 -##泳 -##泵 -##泷 -##泸 -##泻 -##泼 -##泽 -##泾 -##洁 -##洄 -##洋 -##洒 -##洗 -##洙 -##洛 -##洞 -##津 -##洩 -##洪 -##洮 -##洱 -##洲 -##洵 -##洶 -##洸 -##洹 -##活 -##洼 -##洽 -##派 -##流 -##浃 -##浄 -##浅 -##浆 -##浇 -##浊 -##测 -##济 -##浏 -##浑 -##浒 -##浓 -##浔 -##浙 -##浚 -##浜 -##浣 -##浦 -##浩 -##浪 -##浬 -##浮 -##浯 -##浴 -##海 -##浸 -##涂 -##涅 -##涇 -##消 -##涉 -##涌 -##涎 -##涓 -##涔 -##涕 -##涙 -##涛 -##涝 -##涞 -##涟 -##涠 -##涡 -##涣 -##涤 -##润 -##涧 -##涨 -##涩 -##涪 -##涮 -##涯 -##液 -##涵 -##涸 -##涼 -##涿 -##淀 -##淄 -##淅 -##淆 -##淇 -##淋 -##淌 -##淑 -##淒 -##淖 -##淘 -##淙 -##淚 -##淞 -##淡 -##淤 -##淦 -##淨 -##淩 -##淪 -##淫 -##淬 -##淮 -##深 -##淳 -##淵 -##混 -##淹 -##淺 -##添 -##淼 -##清 -##済 -##渉 -##渊 -##渋 -##渍 -##渎 -##渐 -##渔 -##渗 -##渙 -##渚 -##減 -##渝 -##渠 -##渡 -##渣 -##渤 -##渥 -##渦 -##温 -##測 -##渭 -##港 -##渲 -##渴 -##游 -##渺 -##渾 -##湃 -##湄 -##湊 -##湍 -##湖 -##湘 -##湛 -##湟 -##湧 -##湫 -##湮 -##湯 -##湳 -##湾 -##湿 -##満 -##溃 -##溅 -##溉 -##溏 -##源 -##準 -##溜 -##溝 -##溟 -##溢 -##溥 -##溧 -##溪 -##溫 -##溯 -##溱 -##溴 -##溶 -##溺 -##溼 -##滁 -##滂 -##滄 -##滅 -##滇 -##滋 -##滌 -##滑 -##滓 -##滔 -##滕 -##滙 -##滚 -##滝 -##滞 -##滟 -##满 -##滢 -##滤 -##滥 -##滦 -##滨 -##滩 -##滬 -##滯 -##滲 -##滴 -##滷 -##滸 -##滾 -##滿 -##漁 -##漂 -##漆 -##漉 -##漏 -##漓 -##演 -##漕 -##漠 -##漢 -##漣 -##漩 -##漪 -##漫 -##漬 -##漯 -##漱 -##漲 -##漳 -##漸 -##漾 -##漿 -##潆 -##潇 -##潋 -##潍 -##潑 -##潔 -##潘 -##潛 -##潜 -##潞 -##潟 -##潢 -##潤 -##潦 -##潧 -##潭 -##潮 -##潰 -##潴 -##潸 -##潺 -##潼 -##澀 -##澄 -##澆 -##澈 -##澍 -##澎 -##澗 -##澜 -##澡 -##澤 -##澧 -##澱 -##澳 -##澹 -##激 -##濁 -##濂 -##濃 -##濑 -##濒 -##濕 -##濘 -##濛 -##濟 -##濠 -##濡 -##濤 -##濫 -##濬 -##濮 -##濯 -##濱 -##濺 -##濾 -##瀅 -##瀆 -##瀉 -##瀋 -##瀏 -##瀑 -##瀕 -##瀘 -##瀚 -##瀛 -##瀝 -##瀞 -##瀟 -##瀧 -##瀨 -##瀬 -##瀰 -##瀾 -##灌 -##灏 -##灑 -##灘 -##灝 -##灞 -##灣 -##火 -##灬 -##灭 -##灯 -##灰 -##灵 -##灶 -##灸 -##灼 -##災 -##灾 -##灿 -##炀 -##炁 -##炅 -##炉 -##炊 -##炎 -##炒 -##炔 -##炕 -##炖 -##炙 -##炜 -##炫 -##炬 -##炭 -##炮 -##炯 -##炳 -##炷 -##炸 -##点 -##為 -##炼 -##炽 -##烁 -##烂 -##烃 -##烈 -##烊 -##烏 -##烘 -##烙 -##烛 -##烟 -##烤 -##烦 -##烧 -##烨 -##烩 -##烫 -##烬 -##热 -##烯 -##烷 -##烹 -##烽 -##焉 -##焊 -##焕 -##焖 -##焗 -##焘 -##焙 -##焚 -##焜 -##無 -##焦 -##焯 -##焰 -##焱 -##然 -##焼 -##煅 -##煉 -##煊 -##煌 -##煎 -##煒 -##煖 -##煙 -##煜 -##煞 -##煤 -##煥 -##煦 -##照 -##煨 -##煩 -##煮 -##煲 -##煸 -##煽 -##熄 -##熊 -##熏 -##熒 -##熔 -##熙 -##熟 -##熠 -##熨 -##熬 -##熱 -##熵 -##熹 -##熾 -##燁 -##燃 -##燄 -##燈 -##燉 -##燊 -##燎 -##燒 -##燔 -##燕 -##燙 -##燜 -##營 -##燥 -##燦 -##燧 -##燭 -##燮 -##燴 -##燻 -##燼 -##燿 -##爆 -##爍 -##爐 -##爛 -##爪 -##爬 -##爭 -##爰 -##爱 -##爲 -##爵 -##父 -##爷 -##爸 -##爹 -##爺 -##爻 -##爽 -##爾 -##牆 -##片 -##版 -##牌 -##牍 -##牒 -##牙 -##牛 -##牝 -##牟 -##牠 -##牡 -##牢 -##牦 -##牧 -##物 -##牯 -##牲 -##牴 -##牵 -##特 -##牺 -##牽 -##犀 -##犁 -##犄 -##犊 -##犍 -##犒 -##犢 -##犧 -##犬 -##犯 -##状 -##犷 -##犸 -##犹 -##狀 -##狂 -##狄 -##狈 -##狎 -##狐 -##狒 -##狗 -##狙 -##狞 -##狠 -##狡 -##狩 -##独 -##狭 -##狮 -##狰 -##狱 -##狸 -##狹 -##狼 -##狽 -##猎 -##猕 -##猖 -##猗 -##猙 -##猛 -##猜 -##猝 -##猥 -##猩 -##猪 -##猫 -##猬 -##献 -##猴 -##猶 -##猷 -##猾 -##猿 -##獄 -##獅 -##獎 -##獐 -##獒 -##獗 -##獠 -##獣 -##獨 -##獭 -##獰 -##獲 -##獵 -##獷 -##獸 -##獺 -##獻 -##獼 -##獾 -##玄 -##率 -##玉 -##王 -##玑 -##玖 -##玛 -##玟 -##玠 -##玥 -##玩 -##玫 -##玮 -##环 -##现 -##玲 -##玳 -##玷 -##玺 -##玻 -##珀 -##珂 -##珅 -##珈 -##珉 -##珊 -##珍 -##珏 -##珐 -##珑 -##珙 -##珞 -##珠 -##珣 -##珥 -##珩 -##珪 -##班 -##珮 -##珲 -##珺 -##現 -##球 -##琅 -##理 -##琇 -##琉 -##琊 -##琍 -##琏 -##琐 -##琛 -##琢 -##琥 -##琦 -##琨 -##琪 -##琬 -##琮 -##琰 -##琲 -##琳 -##琴 -##琵 -##琶 -##琺 -##琼 -##瑀 -##瑁 -##瑄 -##瑋 -##瑕 -##瑗 -##瑙 -##瑚 -##瑛 -##瑜 -##瑞 -##瑟 -##瑠 -##瑣 -##瑤 -##瑩 -##瑪 -##瑯 -##瑰 -##瑶 -##瑾 -##璀 -##璁 -##璃 -##璇 -##璉 -##璋 -##璎 -##璐 -##璜 -##璞 -##璟 -##璧 -##璨 -##環 -##璽 -##璿 -##瓊 -##瓏 -##瓒 -##瓜 -##瓢 -##瓣 -##瓤 -##瓦 -##瓮 -##瓯 -##瓴 -##瓶 -##瓷 -##甄 -##甌 -##甕 -##甘 -##甙 -##甚 -##甜 -##生 -##產 -##産 -##甥 -##甦 -##用 -##甩 -##甫 -##甬 -##甭 -##甯 -##田 -##由 -##甲 -##申 -##电 -##男 -##甸 -##町 -##画 -##甾 -##畀 -##畅 -##界 -##畏 -##畑 -##畔 -##留 -##畜 -##畝 -##畢 -##略 -##畦 -##番 -##畫 -##異 -##畲 -##畳 -##畴 -##當 -##畸 -##畹 -##畿 -##疆 -##疇 -##疊 -##疏 -##疑 -##疔 -##疖 -##疗 -##疙 -##疚 -##疝 -##疟 -##疡 -##疣 -##疤 -##疥 -##疫 -##疮 -##疯 -##疱 -##疲 -##疳 -##疵 -##疸 -##疹 -##疼 -##疽 -##疾 -##痂 -##病 -##症 -##痈 -##痉 -##痊 -##痍 -##痒 -##痔 -##痕 -##痘 -##痙 -##痛 -##痞 -##痠 -##痢 -##痣 -##痤 -##痧 -##痨 -##痪 -##痫 -##痰 -##痱 -##痴 -##痹 -##痺 -##痼 -##痿 -##瘀 -##瘁 -##瘋 -##瘍 -##瘓 -##瘘 -##瘙 -##瘟 -##瘠 -##瘡 -##瘢 -##瘤 -##瘦 -##瘧 -##瘩 -##瘪 -##瘫 -##瘴 -##瘸 -##瘾 -##療 -##癇 -##癌 -##癒 -##癖 -##癜 -##癞 -##癡 -##癢 -##癣 -##癥 -##癫 -##癬 -##癮 -##癱 -##癲 -##癸 -##発 -##登 -##發 -##白 -##百 -##皂 -##的 -##皆 -##皇 -##皈 -##皋 -##皎 -##皑 -##皓 -##皖 -##皙 -##皚 -##皮 -##皰 -##皱 -##皴 -##皺 -##皿 -##盂 -##盃 -##盅 -##盆 -##盈 -##益 -##盎 -##盏 -##盐 -##监 -##盒 -##盔 -##盖 -##盗 -##盘 -##盛 -##盜 -##盞 -##盟 -##盡 -##監 -##盤 -##盥 -##盧 -##盪 -##目 -##盯 -##盱 -##盲 -##直 -##相 -##盹 -##盼 -##盾 -##省 -##眈 -##眉 -##看 -##県 -##眙 -##眞 -##真 -##眠 -##眦 -##眨 -##眩 -##眯 -##眶 -##眷 -##眸 -##眺 -##眼 -##眾 -##着 -##睁 -##睇 -##睏 -##睐 -##睑 -##睛 -##睜 -##睞 -##睡 -##睢 -##督 -##睥 -##睦 -##睨 -##睪 -##睫 -##睬 -##睹 -##睽 -##睾 -##睿 -##瞄 -##瞅 -##瞇 -##瞋 -##瞌 -##瞎 -##瞑 -##瞒 -##瞓 -##瞞 -##瞟 -##瞠 -##瞥 -##瞧 -##瞩 -##瞪 -##瞬 -##瞭 -##瞰 -##瞳 -##瞻 -##瞼 -##瞿 -##矇 -##矍 -##矗 -##矚 -##矛 -##矜 -##矢 -##矣 -##知 -##矩 -##矫 -##短 -##矮 -##矯 -##石 -##矶 -##矽 -##矾 -##矿 -##码 -##砂 -##砌 -##砍 -##砒 -##研 -##砖 -##砗 -##砚 -##砝 -##砣 -##砥 -##砧 -##砭 -##砰 -##砲 -##破 -##砷 -##砸 -##砺 -##砼 -##砾 -##础 -##硅 -##硐 -##硒 -##硕 -##硝 -##硫 -##硬 -##确 -##硯 -##硼 -##碁 -##碇 -##碉 -##碌 -##碍 -##碎 -##碑 -##碓 -##碗 -##碘 -##碚 -##碛 -##碟 -##碣 -##碧 -##碩 -##碰 -##碱 -##碳 -##碴 -##確 -##碼 -##碾 -##磁 -##磅 -##磊 -##磋 -##磐 -##磕 -##磚 -##磡 -##磨 -##磬 -##磯 -##磲 -##磷 -##磺 -##礁 -##礎 -##礙 -##礡 -##礦 -##礪 -##礫 -##礴 -##示 -##礼 -##社 -##祀 -##祁 -##祂 -##祇 -##祈 -##祉 -##祎 -##祐 -##祕 -##祖 -##祗 -##祚 -##祛 -##祜 -##祝 -##神 -##祟 -##祠 -##祢 -##祥 -##票 -##祭 -##祯 -##祷 -##祸 -##祺 -##祿 -##禀 -##禁 -##禄 -##禅 -##禍 -##禎 -##福 -##禛 -##禦 -##禧 -##禪 -##禮 -##禱 -##禹 -##禺 -##离 -##禽 -##禾 -##禿 -##秀 -##私 -##秃 -##秆 -##秉 -##秋 -##种 -##科 -##秒 -##秘 -##租 -##秣 -##秤 -##秦 -##秧 -##秩 -##秭 -##积 -##称 -##秸 -##移 -##秽 -##稀 -##稅 -##程 -##稍 -##税 -##稔 -##稗 -##稚 -##稜 -##稞 -##稟 -##稠 -##稣 -##種 -##稱 -##稲 -##稳 -##稷 -##稹 -##稻 -##稼 -##稽 -##稿 -##穀 -##穂 -##穆 -##穌 -##積 -##穎 -##穗 -##穢 -##穩 -##穫 -##穴 -##究 -##穷 -##穹 -##空 -##穿 -##突 -##窃 -##窄 -##窈 -##窍 -##窑 -##窒 -##窓 -##窕 -##窖 -##窗 -##窘 -##窜 -##窝 -##窟 -##窠 -##窥 -##窦 -##窨 -##窩 -##窪 -##窮 -##窯 -##窺 -##窿 -##竄 -##竅 -##竇 -##竊 -##立 -##竖 -##站 -##竜 -##竞 -##竟 -##章 -##竣 -##童 -##竭 -##端 -##競 -##竹 -##竺 -##竽 -##竿 -##笃 -##笆 -##笈 -##笋 -##笏 -##笑 -##笔 -##笙 -##笛 -##笞 -##笠 -##符 -##笨 -##第 -##笹 -##笺 -##笼 -##筆 -##等 -##筊 -##筋 -##筍 -##筏 -##筐 -##筑 -##筒 -##答 -##策 -##筛 -##筝 -##筠 -##筱 -##筲 -##筵 -##筷 -##筹 -##签 -##简 -##箇 -##箋 -##箍 -##箏 -##箐 -##箔 -##箕 -##算 -##箝 -##管 -##箩 -##箫 -##箭 -##箱 -##箴 -##箸 -##節 -##篁 -##範 -##篆 -##篇 -##築 -##篑 -##篓 -##篙 -##篝 -##篠 -##篡 -##篤 -##篩 -##篪 -##篮 -##篱 -##篷 -##簇 -##簌 -##簍 -##簡 -##簦 -##簧 -##簪 -##簫 -##簷 -##簸 -##簽 -##簾 -##簿 -##籁 -##籃 -##籌 -##籍 -##籐 -##籟 -##籠 -##籤 -##籬 -##籮 -##籲 -##米 -##类 -##籼 -##籽 -##粄 -##粉 -##粑 -##粒 -##粕 -##粗 -##粘 -##粟 -##粤 -##粥 -##粧 -##粪 -##粮 -##粱 -##粲 -##粳 -##粵 -##粹 -##粼 -##粽 -##精 -##粿 -##糅 -##糊 -##糍 -##糕 -##糖 -##糗 -##糙 -##糜 -##糞 -##糟 -##糠 -##糧 -##糬 -##糯 -##糰 -##糸 -##系 -##糾 -##紀 -##紂 -##約 -##紅 -##紉 -##紊 -##紋 -##納 -##紐 -##紓 -##純 -##紗 -##紘 -##紙 -##級 -##紛 -##紜 -##素 -##紡 -##索 -##紧 -##紫 -##紮 -##累 -##細 -##紳 -##紹 -##紺 -##終 -##絃 -##組 -##絆 -##経 -##結 -##絕 -##絞 -##絡 -##絢 -##給 -##絨 -##絮 -##統 -##絲 -##絳 -##絵 -##絶 -##絹 -##綁 -##綏 -##綑 -##經 -##継 -##続 -##綜 -##綠 -##綢 -##綦 -##綫 -##綬 -##維 -##綱 -##網 -##綴 -##綵 -##綸 -##綺 -##綻 -##綽 -##綾 -##綿 -##緊 -##緋 -##総 -##緑 -##緒 -##緘 -##線 -##緝 -##緞 -##締 -##緣 -##編 -##緩 -##緬 -##緯 -##練 -##緹 -##緻 -##縁 -##縄 -##縈 -##縛 -##縝 -##縣 -##縫 -##縮 -##縱 -##縴 -##縷 -##總 -##績 -##繁 -##繃 -##繆 -##繇 -##繋 -##織 -##繕 -##繚 -##繞 -##繡 -##繩 -##繪 -##繫 -##繭 -##繳 -##繹 -##繼 -##繽 -##纂 -##續 -##纍 -##纏 -##纓 -##纔 -##纖 -##纜 -##纠 -##红 -##纣 -##纤 -##约 -##级 -##纨 -##纪 -##纫 -##纬 -##纭 -##纯 -##纰 -##纱 -##纲 -##纳 -##纵 -##纶 -##纷 -##纸 -##纹 -##纺 -##纽 -##纾 -##线 -##绀 -##练 -##组 -##绅 -##细 -##织 -##终 -##绊 -##绍 -##绎 -##经 -##绑 -##绒 -##结 -##绔 -##绕 -##绘 -##给 -##绚 -##绛 -##络 -##绝 -##绞 -##统 -##绡 -##绢 -##绣 -##绥 -##绦 -##继 -##绩 -##绪 -##绫 -##续 -##绮 -##绯 -##绰 -##绳 -##维 -##绵 -##绶 -##绷 -##绸 -##绻 -##综 -##绽 -##绾 -##绿 -##缀 -##缄 -##缅 -##缆 -##缇 -##缈 -##缉 -##缎 -##缓 -##缔 -##缕 -##编 -##缘 -##缙 -##缚 -##缜 -##缝 -##缠 -##缢 -##缤 -##缥 -##缨 -##缩 -##缪 -##缭 -##缮 -##缰 -##缱 -##缴 -##缸 -##缺 -##缽 -##罂 -##罄 -##罌 -##罐 -##网 -##罔 -##罕 -##罗 -##罚 -##罡 -##罢 -##罩 -##罪 -##置 -##罰 -##署 -##罵 -##罷 -##罹 -##羁 -##羅 -##羈 -##羊 -##羌 -##美 -##羔 -##羚 -##羞 -##羟 -##羡 -##羣 -##群 -##羥 -##羧 -##羨 -##義 -##羯 -##羲 -##羸 -##羹 -##羽 -##羿 -##翁 -##翅 -##翊 -##翌 -##翎 -##習 -##翔 -##翘 -##翟 -##翠 -##翡 -##翦 -##翩 -##翰 -##翱 -##翳 -##翹 -##翻 -##翼 -##耀 -##老 -##考 -##耄 -##者 -##耆 -##耋 -##而 -##耍 -##耐 -##耒 -##耕 -##耗 -##耘 -##耙 -##耦 -##耨 -##耳 -##耶 -##耷 -##耸 -##耻 -##耽 -##耿 -##聂 -##聆 -##聊 -##聋 -##职 -##聒 -##联 -##聖 -##聘 -##聚 -##聞 -##聪 -##聯 -##聰 -##聲 -##聳 -##聴 -##聶 -##職 -##聽 -##聾 -##聿 -##肃 -##肄 -##肅 -##肆 -##肇 -##肉 -##肋 -##肌 -##肏 -##肓 -##肖 -##肘 -##肚 -##肛 -##肝 -##肠 -##股 -##肢 -##肤 -##肥 -##肩 -##肪 -##肮 -##肯 -##肱 -##育 -##肴 -##肺 -##肽 -##肾 -##肿 -##胀 -##胁 -##胃 -##胄 -##胆 -##背 -##胍 -##胎 -##胖 -##胚 -##胛 -##胜 -##胝 -##胞 -##胡 -##胤 -##胥 -##胧 -##胫 -##胭 -##胯 -##胰 -##胱 -##胳 -##胴 -##胶 -##胸 -##胺 -##能 -##脂 -##脅 -##脆 -##脇 -##脈 -##脉 -##脊 -##脍 -##脏 -##脐 -##脑 -##脓 -##脖 -##脘 -##脚 -##脛 -##脣 -##脩 -##脫 -##脯 -##脱 -##脲 -##脳 -##脸 -##脹 -##脾 -##腆 -##腈 -##腊 -##腋 -##腌 -##腎 -##腐 -##腑 -##腓 -##腔 -##腕 -##腥 -##腦 -##腩 -##腫 -##腭 -##腮 -##腰 -##腱 -##腳 -##腴 -##腸 -##腹 -##腺 -##腻 -##腼 -##腾 -##腿 -##膀 -##膈 -##膊 -##膏 -##膑 -##膘 -##膚 -##膛 -##膜 -##膝 -##膠 -##膦 -##膨 -##膩 -##膳 -##膺 -##膻 -##膽 -##膾 -##膿 -##臀 -##臂 -##臃 -##臆 -##臉 -##臊 -##臍 -##臓 -##臘 -##臟 -##臣 -##臥 -##臧 -##臨 -##自 -##臬 -##臭 -##至 -##致 -##臺 -##臻 -##臼 -##臾 -##舀 -##舂 -##舅 -##舆 -##與 -##興 -##舉 -##舊 -##舌 -##舍 -##舎 -##舐 -##舒 -##舔 -##舖 -##舗 -##舛 -##舜 -##舞 -##舟 -##航 -##舫 -##般 -##舰 -##舱 -##舵 -##舶 -##舷 -##舸 -##船 -##舺 -##舾 -##艇 -##艋 -##艘 -##艙 -##艦 -##艮 -##良 -##艰 -##艱 -##色 -##艳 -##艷 -##艹 -##艺 -##艾 -##节 -##芃 -##芈 -##芊 -##芋 -##芍 -##芎 -##芒 -##芙 -##芜 -##芝 -##芡 -##芥 -##芦 -##芩 -##芪 -##芫 -##芬 -##芭 -##芮 -##芯 -##花 -##芳 -##芷 -##芸 -##芹 -##芻 -##芽 -##芾 -##苁 -##苄 -##苇 -##苋 -##苍 -##苏 -##苑 -##苒 -##苓 -##苔 -##苕 -##苗 -##苛 -##苜 -##苞 -##苟 -##苡 -##苣 -##若 -##苦 -##苫 -##苯 -##英 -##苷 -##苹 -##苻 -##茁 -##茂 -##范 -##茄 -##茅 -##茉 -##茎 -##茏 -##茗 -##茜 -##茧 -##茨 -##茫 -##茬 -##茭 -##茯 -##茱 -##茲 -##茴 -##茵 -##茶 -##茸 -##茹 -##茼 -##荀 -##荃 -##荆 -##草 -##荊 -##荏 -##荐 -##荒 -##荔 -##荖 -##荘 -##荚 -##荞 -##荟 -##荠 -##荡 -##荣 -##荤 -##荥 -##荧 -##荨 -##荪 -##荫 -##药 -##荳 -##荷 -##荸 -##荻 -##荼 -##荽 -##莅 -##莆 -##莉 -##莊 -##莎 -##莒 -##莓 -##莖 -##莘 -##莞 -##莠 -##莢 -##莧 -##莪 -##莫 -##莱 -##莲 -##莴 -##获 -##莹 -##莺 -##莽 -##莿 -##菀 -##菁 -##菅 -##菇 -##菈 -##菊 -##菌 -##菏 -##菓 -##菖 -##菘 -##菜 -##菟 -##菠 -##菡 -##菩 -##華 -##菱 -##菲 -##菸 -##菽 -##萁 -##萃 -##萄 -##萊 -##萋 -##萌 -##萍 -##萎 -##萘 -##萝 -##萤 -##营 -##萦 -##萧 -##萨 -##萩 -##萬 -##萱 -##萵 -##萸 -##萼 -##落 -##葆 -##葉 -##著 -##葚 -##葛 -##葡 -##董 -##葦 -##葩 -##葫 -##葬 -##葭 -##葯 -##葱 -##葳 -##葵 -##葷 -##葺 -##蒂 -##蒋 -##蒐 -##蒔 -##蒙 -##蒜 -##蒞 -##蒟 -##蒡 -##蒨 -##蒲 -##蒸 -##蒹 -##蒻 -##蒼 -##蒿 -##蓁 -##蓄 -##蓆 -##蓉 -##蓋 -##蓑 -##蓓 -##蓖 -##蓝 -##蓟 -##蓦 -##蓬 -##蓮 -##蓼 -##蓿 -##蔑 -##蔓 -##蔔 -##蔗 -##蔘 -##蔚 -##蔡 -##蔣 -##蔥 -##蔫 -##蔬 -##蔭 -##蔵 -##蔷 -##蔺 -##蔻 -##蔼 -##蔽 -##蕁 -##蕃 -##蕈 -##蕉 -##蕊 -##蕎 -##蕙 -##蕤 -##蕨 -##蕩 -##蕪 -##蕭 -##蕲 -##蕴 -##蕻 -##蕾 -##薄 -##薅 -##薇 -##薈 -##薊 -##薏 -##薑 -##薔 -##薙 -##薛 -##薦 -##薨 -##薩 -##薪 -##薬 -##薯 -##薰 -##薹 -##藉 -##藍 -##藏 -##藐 -##藓 -##藕 -##藜 -##藝 -##藤 -##藥 -##藩 -##藹 -##藻 -##藿 -##蘆 -##蘇 -##蘊 -##蘋 -##蘑 -##蘚 -##蘭 -##蘸 -##蘼 -##蘿 -##虎 -##虏 -##虐 -##虑 -##虔 -##處 -##虚 -##虛 -##虜 -##虞 -##號 -##虢 -##虧 -##虫 -##虬 -##虱 -##虹 -##虻 -##虽 -##虾 -##蚀 -##蚁 -##蚂 -##蚊 -##蚌 -##蚓 -##蚕 -##蚜 -##蚝 -##蚣 -##蚤 -##蚩 -##蚪 -##蚯 -##蚱 -##蚵 -##蛀 -##蛆 -##蛇 -##蛊 -##蛋 -##蛎 -##蛐 -##蛔 -##蛙 -##蛛 -##蛟 -##蛤 -##蛭 -##蛮 -##蛰 -##蛳 -##蛹 -##蛻 -##蛾 -##蜀 -##蜂 -##蜃 -##蜆 -##蜇 -##蜈 -##蜊 -##蜍 -##蜒 -##蜓 -##蜕 -##蜗 -##蜘 -##蜚 -##蜜 -##蜡 -##蜢 -##蜥 -##蜱 -##蜴 -##蜷 -##蜻 -##蜿 -##蝇 -##蝈 -##蝉 -##蝌 -##蝎 -##蝕 -##蝗 -##蝙 -##蝟 -##蝠 -##蝦 -##蝨 -##蝴 -##蝶 -##蝸 -##蝼 -##螂 -##螃 -##融 -##螞 -##螢 -##螨 -##螯 -##螳 -##螺 -##蟀 -##蟄 -##蟆 -##蟋 -##蟎 -##蟑 -##蟒 -##蟠 -##蟬 -##蟲 -##蟹 -##蟻 -##蟾 -##蠅 -##蠍 -##蠔 -##蠕 -##蠛 -##蠟 -##蠡 -##蠢 -##蠣 -##蠱 -##蠶 -##蠹 -##蠻 -##血 -##衄 -##衅 -##衆 -##行 -##衍 -##術 -##衔 -##街 -##衙 -##衛 -##衝 -##衞 -##衡 -##衢 -##衣 -##补 -##表 -##衩 -##衫 -##衬 -##衮 -##衰 -##衲 -##衷 -##衹 -##衾 -##衿 -##袁 -##袂 -##袄 -##袅 -##袈 -##袋 -##袍 -##袒 -##袖 -##袜 -##袞 -##袤 -##袪 -##被 -##袭 -##袱 -##裁 -##裂 -##装 -##裆 -##裊 -##裏 -##裔 -##裕 -##裘 -##裙 -##補 -##裝 -##裟 -##裡 -##裤 -##裨 -##裱 -##裳 -##裴 -##裸 -##裹 -##製 -##裾 -##褂 -##複 -##褐 -##褒 -##褓 -##褔 -##褚 -##褥 -##褪 -##褫 -##褲 -##褶 -##褻 -##襁 -##襄 -##襟 -##襠 -##襪 -##襬 -##襯 -##襲 -##西 -##要 -##覃 -##覆 -##覇 -##見 -##規 -##覓 -##視 -##覚 -##覦 -##覧 -##親 -##覬 -##観 -##覷 -##覺 -##覽 -##觀 -##见 -##观 -##规 -##觅 -##视 -##览 -##觉 -##觊 -##觎 -##觐 -##觑 -##角 -##觞 -##解 -##觥 -##触 -##觸 -##言 -##訂 -##計 -##訊 -##討 -##訓 -##訕 -##訖 -##託 -##記 -##訛 -##訝 -##訟 -##訣 -##訥 -##訪 -##設 -##許 -##訳 -##訴 -##訶 -##診 -##註 -##証 -##詆 -##詐 -##詔 -##評 -##詛 -##詞 -##詠 -##詡 -##詢 -##詣 -##試 -##詩 -##詫 -##詬 -##詭 -##詮 -##詰 -##話 -##該 -##詳 -##詹 -##詼 -##誅 -##誇 -##誉 -##誌 -##認 -##誓 -##誕 -##誘 -##語 -##誠 -##誡 -##誣 -##誤 -##誥 -##誦 -##誨 -##說 -##説 -##読 -##誰 -##課 -##誹 -##誼 -##調 -##諄 -##談 -##請 -##諏 -##諒 -##論 -##諗 -##諜 -##諡 -##諦 -##諧 -##諫 -##諭 -##諮 -##諱 -##諳 -##諷 -##諸 -##諺 -##諾 -##謀 -##謁 -##謂 -##謄 -##謊 -##謎 -##謐 -##謔 -##謗 -##謙 -##講 -##謝 -##謠 -##謨 -##謬 -##謹 -##謾 -##譁 -##證 -##譎 -##譏 -##識 -##譙 -##譚 -##譜 -##警 -##譬 -##譯 -##議 -##譲 -##譴 -##護 -##譽 -##讀 -##變 -##讓 -##讚 -##讞 -##计 -##订 -##认 -##讥 -##讧 -##讨 -##让 -##讪 -##讫 -##训 -##议 -##讯 -##记 -##讲 -##讳 -##讴 -##讶 -##讷 -##许 -##讹 -##论 -##讼 -##讽 -##设 -##访 -##诀 -##证 -##诃 -##评 -##诅 -##识 -##诈 -##诉 -##诊 -##诋 -##词 -##诏 -##译 -##试 -##诗 -##诘 -##诙 -##诚 -##诛 -##话 -##诞 -##诟 -##诠 -##诡 -##询 -##诣 -##诤 -##该 -##详 -##诧 -##诩 -##诫 -##诬 -##语 -##误 -##诰 -##诱 -##诲 -##说 -##诵 -##诶 -##请 -##诸 -##诺 -##读 -##诽 -##课 -##诿 -##谀 -##谁 -##调 -##谄 -##谅 -##谆 -##谈 -##谊 -##谋 -##谌 -##谍 -##谎 -##谏 -##谐 -##谑 -##谒 -##谓 -##谔 -##谕 -##谗 -##谘 -##谙 -##谚 -##谛 -##谜 -##谟 -##谢 -##谣 -##谤 -##谥 -##谦 -##谧 -##谨 -##谩 -##谪 -##谬 -##谭 -##谯 -##谱 -##谲 -##谴 -##谶 -##谷 -##豁 -##豆 -##豇 -##豈 -##豉 -##豊 -##豌 -##豎 -##豐 -##豔 -##豚 -##象 -##豢 -##豪 -##豫 -##豬 -##豹 -##豺 -##貂 -##貅 -##貌 -##貓 -##貔 -##貘 -##貝 -##貞 -##負 -##財 -##貢 -##貧 -##貨 -##販 -##貪 -##貫 -##責 -##貯 -##貰 -##貳 -##貴 -##貶 -##買 -##貸 -##費 -##貼 -##貽 -##貿 -##賀 -##賁 -##賂 -##賃 -##賄 -##資 -##賈 -##賊 -##賑 -##賓 -##賜 -##賞 -##賠 -##賡 -##賢 -##賣 -##賤 -##賦 -##質 -##賬 -##賭 -##賴 -##賺 -##購 -##賽 -##贅 -##贈 -##贊 -##贍 -##贏 -##贓 -##贖 -##贛 -##贝 -##贞 -##负 -##贡 -##财 -##责 -##贤 -##败 -##账 -##货 -##质 -##贩 -##贪 -##贫 -##贬 -##购 -##贮 -##贯 -##贰 -##贱 -##贲 -##贴 -##贵 -##贷 -##贸 -##费 -##贺 -##贻 -##贼 -##贾 -##贿 -##赁 -##赂 -##赃 -##资 -##赅 -##赈 -##赊 -##赋 -##赌 -##赎 -##赏 -##赐 -##赓 -##赔 -##赖 -##赘 -##赚 -##赛 -##赝 -##赞 -##赠 -##赡 -##赢 -##赣 -##赤 -##赦 -##赧 -##赫 -##赭 -##走 -##赳 -##赴 -##赵 -##赶 -##起 -##趁 -##超 -##越 -##趋 -##趕 -##趙 -##趟 -##趣 -##趨 -##足 -##趴 -##趵 -##趸 -##趺 -##趾 -##跃 -##跄 -##跆 -##跋 -##跌 -##跎 -##跑 -##跖 -##跚 -##跛 -##距 -##跟 -##跡 -##跤 -##跨 -##跩 -##跪 -##路 -##跳 -##践 -##跷 -##跹 -##跺 -##跻 -##踉 -##踊 -##踌 -##踏 -##踐 -##踝 -##踞 -##踟 -##踢 -##踩 -##踪 -##踮 -##踱 -##踴 -##踵 -##踹 -##蹂 -##蹄 -##蹇 -##蹈 -##蹉 -##蹊 -##蹋 -##蹑 -##蹒 -##蹙 -##蹟 -##蹣 -##蹤 -##蹦 -##蹩 -##蹬 -##蹭 -##蹲 -##蹴 -##蹶 -##蹺 -##蹼 -##蹿 -##躁 -##躇 -##躉 -##躊 -##躋 -##躍 -##躏 -##躪 -##身 -##躬 -##躯 -##躲 -##躺 -##軀 -##車 -##軋 -##軌 -##軍 -##軒 -##軟 -##転 -##軸 -##軼 -##軽 -##軾 -##較 -##載 -##輒 -##輓 -##輔 -##輕 -##輛 -##輝 -##輟 -##輩 -##輪 -##輯 -##輸 -##輻 -##輾 -##輿 -##轄 -##轅 -##轆 -##轉 -##轍 -##轎 -##轟 -##车 -##轧 -##轨 -##轩 -##转 -##轭 -##轮 -##软 -##轰 -##轲 -##轴 -##轶 -##轻 -##轼 -##载 -##轿 -##较 -##辄 -##辅 -##辆 -##辇 -##辈 -##辉 -##辊 -##辍 -##辐 -##辑 -##输 -##辕 -##辖 -##辗 -##辘 -##辙 -##辛 -##辜 -##辞 -##辟 -##辣 -##辦 -##辨 -##辩 -##辫 -##辭 -##辮 -##辯 -##辰 -##辱 -##農 -##边 -##辺 -##辻 -##込 -##辽 -##达 -##迁 -##迂 -##迄 -##迅 -##过 -##迈 -##迎 -##运 -##近 -##返 -##还 -##这 -##进 -##远 -##违 -##连 -##迟 -##迢 -##迤 -##迥 -##迦 -##迩 -##迪 -##迫 -##迭 -##述 -##迴 -##迷 -##迸 -##迹 -##迺 -##追 -##退 -##送 -##适 -##逃 -##逅 -##逆 -##选 -##逊 -##逍 -##透 -##逐 -##递 -##途 -##逕 -##逗 -##這 -##通 -##逛 -##逝 -##逞 -##速 -##造 -##逢 -##連 -##逮 -##週 -##進 -##逵 -##逶 -##逸 -##逻 -##逼 -##逾 -##遁 -##遂 -##遅 -##遇 -##遊 -##運 -##遍 -##過 -##遏 -##遐 -##遑 -##遒 -##道 -##達 -##違 -##遗 -##遙 -##遛 -##遜 -##遞 -##遠 -##遢 -##遣 -##遥 -##遨 -##適 -##遭 -##遮 -##遲 -##遴 -##遵 -##遶 -##遷 -##選 -##遺 -##遼 -##遽 -##避 -##邀 -##邁 -##邂 -##邃 -##還 -##邇 -##邈 -##邊 -##邋 -##邏 -##邑 -##邓 -##邕 -##邛 -##邝 -##邢 -##那 -##邦 -##邨 -##邪 -##邬 -##邮 -##邯 -##邰 -##邱 -##邳 -##邵 -##邸 -##邹 -##邺 -##邻 -##郁 -##郅 -##郊 -##郎 -##郑 -##郜 -##郝 -##郡 -##郢 -##郤 -##郦 -##郧 -##部 -##郫 -##郭 -##郴 -##郵 -##郷 -##郸 -##都 -##鄂 -##鄉 -##鄒 -##鄔 -##鄙 -##鄞 -##鄢 -##鄧 -##鄭 -##鄰 -##鄱 -##鄲 -##鄺 -##酉 -##酊 -##酋 -##酌 -##配 -##酐 -##酒 -##酗 -##酚 -##酝 -##酢 -##酣 -##酥 -##酩 -##酪 -##酬 -##酮 -##酯 -##酰 -##酱 -##酵 -##酶 -##酷 -##酸 -##酿 -##醃 -##醇 -##醉 -##醋 -##醍 -##醐 -##醒 -##醚 -##醛 -##醜 -##醞 -##醣 -##醪 -##醫 -##醬 -##醮 -##醯 -##醴 -##醺 -##釀 -##釁 -##采 -##釉 -##释 -##釋 -##里 -##重 -##野 -##量 -##釐 -##金 -##釗 -##釘 -##釜 -##針 -##釣 -##釦 -##釧 -##釵 -##鈀 -##鈉 -##鈍 -##鈎 -##鈔 -##鈕 -##鈞 -##鈣 -##鈦 -##鈪 -##鈴 -##鈺 -##鈾 -##鉀 -##鉄 -##鉅 -##鉉 -##鉑 -##鉗 -##鉚 -##鉛 -##鉤 -##鉴 -##鉻 -##銀 -##銃 -##銅 -##銑 -##銓 -##銖 -##銘 -##銜 -##銬 -##銭 -##銮 -##銳 -##銷 -##銹 -##鋁 -##鋅 -##鋒 -##鋤 -##鋪 -##鋰 -##鋸 -##鋼 -##錄 -##錐 -##錘 -##錚 -##錠 -##錢 -##錦 -##錨 -##錫 -##錮 -##錯 -##録 -##錳 -##錶 -##鍊 -##鍋 -##鍍 -##鍛 -##鍥 -##鍰 -##鍵 -##鍺 -##鍾 -##鎂 -##鎊 -##鎌 -##鎏 -##鎔 -##鎖 -##鎗 -##鎚 -##鎧 -##鎬 -##鎮 -##鎳 -##鏈 -##鏖 -##鏗 -##鏘 -##鏞 -##鏟 -##鏡 -##鏢 -##鏤 -##鏽 -##鐘 -##鐮 -##鐲 -##鐳 -##鐵 -##鐸 -##鐺 -##鑄 -##鑊 -##鑑 -##鑒 -##鑣 -##鑫 -##鑰 -##鑲 -##鑼 -##鑽 -##鑾 -##鑿 -##针 -##钉 -##钊 -##钎 -##钏 -##钒 -##钓 -##钗 -##钙 -##钛 -##钜 -##钝 -##钞 -##钟 -##钠 -##钡 -##钢 -##钣 -##钤 -##钥 -##钦 -##钧 -##钨 -##钩 -##钮 -##钯 -##钰 -##钱 -##钳 -##钴 -##钵 -##钺 -##钻 -##钼 -##钾 -##钿 -##铀 -##铁 -##铂 -##铃 -##铄 -##铅 -##铆 -##铉 -##铎 -##铐 -##铛 -##铜 -##铝 -##铠 -##铡 -##铢 -##铣 -##铤 -##铨 -##铩 -##铬 -##铭 -##铮 -##铰 -##铲 -##铵 -##银 -##铸 -##铺 -##链 -##铿 -##销 -##锁 -##锂 -##锄 -##锅 -##锆 -##锈 -##锉 -##锋 -##锌 -##锏 -##锐 -##锑 -##错 -##锚 -##锟 -##锡 -##锢 -##锣 -##锤 -##锥 -##锦 -##锭 -##键 -##锯 -##锰 -##锲 -##锵 -##锹 -##锺 -##锻 -##镀 -##镁 -##镂 -##镇 -##镉 -##镌 -##镍 -##镐 -##镑 -##镕 -##镖 -##镗 -##镛 -##镜 -##镣 -##镭 -##镯 -##镰 -##镳 -##镶 -##長 -##长 -##門 -##閃 -##閉 -##開 -##閎 -##閏 -##閑 -##閒 -##間 -##閔 -##閘 -##閡 -##関 -##閣 -##閥 -##閨 -##閩 -##閱 -##閲 -##閹 -##閻 -##閾 -##闆 -##闇 -##闊 -##闌 -##闍 -##闔 -##闕 -##闖 -##闘 -##關 -##闡 -##闢 -##门 -##闪 -##闫 -##闭 -##问 -##闯 -##闰 -##闲 -##间 -##闵 -##闷 -##闸 -##闹 -##闺 -##闻 -##闽 -##闾 -##阀 -##阁 -##阂 -##阅 -##阆 -##阇 -##阈 -##阉 -##阎 -##阐 -##阑 -##阔 -##阕 -##阖 -##阙 -##阚 -##阜 -##队 -##阡 -##阪 -##阮 -##阱 -##防 -##阳 -##阴 -##阵 -##阶 -##阻 -##阿 -##陀 -##陂 -##附 -##际 -##陆 -##陇 -##陈 -##陋 -##陌 -##降 -##限 -##陕 -##陛 -##陝 -##陞 -##陟 -##陡 -##院 -##陣 -##除 -##陨 -##险 -##陪 -##陰 -##陲 -##陳 -##陵 -##陶 -##陷 -##陸 -##険 -##陽 -##隅 -##隆 -##隈 -##隊 -##隋 -##隍 -##階 -##随 -##隐 -##隔 -##隕 -##隘 -##隙 -##際 -##障 -##隠 -##隣 -##隧 -##隨 -##險 -##隱 -##隴 -##隶 -##隸 -##隻 -##隼 -##隽 -##难 -##雀 -##雁 -##雄 -##雅 -##集 -##雇 -##雉 -##雋 -##雌 -##雍 -##雎 -##雏 -##雑 -##雒 -##雕 -##雖 -##雙 -##雛 -##雜 -##雞 -##離 -##難 -##雨 -##雪 -##雯 -##雰 -##雲 -##雳 -##零 -##雷 -##雹 -##電 -##雾 -##需 -##霁 -##霄 -##霆 -##震 -##霈 -##霉 -##霊 -##霍 -##霎 -##霏 -##霑 -##霓 -##霖 -##霜 -##霞 -##霧 -##霭 -##霰 -##露 -##霸 -##霹 -##霽 -##霾 -##靂 -##靄 -##靈 -##青 -##靓 -##靖 -##静 -##靚 -##靛 -##靜 -##非 -##靠 -##靡 -##面 -##靥 -##靦 -##革 -##靳 -##靴 -##靶 -##靼 -##鞅 -##鞋 -##鞍 -##鞏 -##鞑 -##鞘 -##鞠 -##鞣 -##鞦 -##鞭 -##韆 -##韋 -##韌 -##韓 -##韜 -##韦 -##韧 -##韩 -##韬 -##韭 -##音 -##韵 -##韶 -##韻 -##響 -##頁 -##頂 -##頃 -##項 -##順 -##須 -##頌 -##預 -##頑 -##頒 -##頓 -##頗 -##領 -##頜 -##頡 -##頤 -##頫 -##頭 -##頰 -##頷 -##頸 -##頹 -##頻 -##頼 -##顆 -##題 -##額 -##顎 -##顏 -##顔 -##願 -##顛 -##類 -##顧 -##顫 -##顯 -##顱 -##顴 -##页 -##顶 -##顷 -##项 -##顺 -##须 -##顼 -##顽 -##顾 -##顿 -##颁 -##颂 -##预 -##颅 -##领 -##颇 -##颈 -##颉 -##颊 -##颌 -##颍 -##颐 -##频 -##颓 -##颔 -##颖 -##颗 -##题 -##颚 -##颛 -##颜 -##额 -##颞 -##颠 -##颡 -##颢 -##颤 -##颦 -##颧 -##風 -##颯 -##颱 -##颳 -##颶 -##颼 -##飄 -##飆 -##风 -##飒 -##飓 -##飕 -##飘 -##飙 -##飚 -##飛 -##飞 -##食 -##飢 -##飨 -##飩 -##飪 -##飯 -##飲 -##飼 -##飽 -##飾 -##餃 -##餅 -##餉 -##養 -##餌 -##餐 -##餒 -##餓 -##餘 -##餚 -##餛 -##餞 -##餡 -##館 -##餮 -##餵 -##餾 -##饅 -##饈 -##饋 -##饌 -##饍 -##饑 -##饒 -##饕 -##饗 -##饞 -##饥 -##饨 -##饪 -##饬 -##饭 -##饮 -##饯 -##饰 -##饱 -##饲 -##饴 -##饵 -##饶 -##饷 -##饺 -##饼 -##饽 -##饿 -##馀 -##馁 -##馄 -##馅 -##馆 -##馈 -##馋 -##馍 -##馏 -##馒 -##馔 -##首 -##馗 -##香 -##馥 -##馨 -##馬 -##馭 -##馮 -##馳 -##馴 -##駁 -##駄 -##駅 -##駆 -##駐 -##駒 -##駕 -##駛 -##駝 -##駭 -##駱 -##駿 -##騁 -##騎 -##騏 -##験 -##騙 -##騨 -##騰 -##騷 -##驀 -##驅 -##驊 -##驍 -##驒 -##驕 -##驗 -##驚 -##驛 -##驟 -##驢 -##驥 -##马 -##驭 -##驮 -##驯 -##驰 -##驱 -##驳 -##驴 -##驶 -##驷 -##驸 -##驹 -##驻 -##驼 -##驾 -##驿 -##骁 -##骂 -##骄 -##骅 -##骆 -##骇 -##骈 -##骊 -##骋 -##验 -##骏 -##骐 -##骑 -##骗 -##骚 -##骛 -##骜 -##骞 -##骠 -##骡 -##骤 -##骥 -##骧 -##骨 -##骯 -##骰 -##骶 -##骷 -##骸 -##骼 -##髂 -##髅 -##髋 -##髏 -##髒 -##髓 -##體 -##髖 -##高 -##髦 -##髪 -##髮 -##髯 -##髻 -##鬃 -##鬆 -##鬍 -##鬓 -##鬚 -##鬟 -##鬢 -##鬣 -##鬥 -##鬧 -##鬱 -##鬼 -##魁 -##魂 -##魄 -##魅 -##魇 -##魍 -##魏 -##魔 -##魘 -##魚 -##魯 -##魷 -##鮑 -##鮨 -##鮪 -##鮭 -##鮮 -##鯉 -##鯊 -##鯖 -##鯛 -##鯨 -##鯰 -##鯽 -##鰍 -##鰓 -##鰭 -##鰲 -##鰻 -##鰾 -##鱈 -##鱉 -##鱔 -##鱗 -##鱷 -##鱸 -##鱼 -##鱿 -##鲁 -##鲈 -##鲍 -##鲑 -##鲛 -##鲜 -##鲟 -##鲢 -##鲤 -##鲨 -##鲫 -##鲱 -##鲲 -##鲶 -##鲷 -##鲸 -##鳃 -##鳄 -##鳅 -##鳌 -##鳍 -##鳕 -##鳖 -##鳗 -##鳝 -##鳞 -##鳥 -##鳩 -##鳳 -##鳴 -##鳶 -##鴉 -##鴕 -##鴛 -##鴦 -##鴨 -##鴻 -##鴿 -##鵑 -##鵜 -##鵝 -##鵡 -##鵬 -##鵰 -##鵲 -##鶘 -##鶩 -##鶯 -##鶴 -##鷗 -##鷲 -##鷹 -##鷺 -##鸚 -##鸞 -##鸟 -##鸠 -##鸡 -##鸢 -##鸣 -##鸥 -##鸦 -##鸨 -##鸪 -##鸭 -##鸯 -##鸳 -##鸵 -##鸽 -##鸾 -##鸿 -##鹂 -##鹃 -##鹄 -##鹅 -##鹈 -##鹉 -##鹊 -##鹌 -##鹏 -##鹑 -##鹕 -##鹘 -##鹜 -##鹞 -##鹤 -##鹦 -##鹧 -##鹫 -##鹭 -##鹰 -##鹳 -##鹵 -##鹹 -##鹼 -##鹽 -##鹿 -##麂 -##麋 -##麒 -##麓 -##麗 -##麝 -##麟 -##麥 -##麦 -##麩 -##麴 -##麵 -##麸 -##麺 -##麻 -##麼 -##麽 -##麾 -##黃 -##黄 -##黍 -##黎 -##黏 -##黑 -##黒 -##黔 -##默 -##黛 -##黜 -##黝 -##點 -##黠 -##黨 -##黯 -##黴 -##鼋 -##鼎 -##鼐 -##鼓 -##鼠 -##鼬 -##鼹 -##鼻 -##鼾 -##齁 -##齊 -##齋 -##齐 -##齒 -##齡 -##齢 -##齣 -##齦 -##齿 -##龄 -##龅 -##龈 -##龊 -##龋 -##龌 -##龍 -##龐 -##龔 -##龕 -##龙 -##龚 -##龛 -##龜 -##龟 -##︰ -##︱ -##︶ -##︿ -##﹁ -##﹂ -##﹍ -##﹏ -##﹐ -##﹑ -##﹒ -##﹔ -##﹕ -##﹖ -##﹗ -##﹙ -##﹚ -##﹝ -##﹞ -##﹡ -##﹣ -##! -##" -### -##$ -##% -##& -##' -##( -##) -##* -##, -##- -##. -##/ -##: -##; -##< -##? -##@ -##[ -##\ -##] -##^ -##_ -##` -##f -##h -##j -##u -##w -##z -##{ -##} -##。 -##「 -##」 -##、 -##・ -##ッ -##ー -##イ -##ク -##シ -##ス -##ト -##ノ -##フ -##ラ -##ル -##ン -##゙ -##゚ -## ̄ -##¥ -##👍 -##🔥 -##😂 -##😎 diff --git a/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer_t5/config.json b/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer_t5/config.json deleted file mode 100644 index a9f8cc7..0000000 --- a/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer_t5/config.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "_name_or_path": "/home/patrick/t5/mt5-xl", - "architectures": [ - "MT5ForConditionalGeneration" - ], - "d_ff": 5120, - "d_kv": 64, - "d_model": 2048, - "decoder_start_token_id": 0, - "dropout_rate": 0.1, - "eos_token_id": 1, - "feed_forward_proj": "gated-gelu", - "initializer_factor": 1.0, - "is_encoder_decoder": true, - "layer_norm_epsilon": 1e-06, - "model_type": "mt5", - "num_decoder_layers": 24, - "num_heads": 32, - "num_layers": 24, - "output_past": true, - "pad_token_id": 0, - "relative_attention_num_buckets": 32, - "tie_word_embeddings": false, - "tokenizer_class": "T5Tokenizer", - "transformers_version": "4.10.0.dev0", - "use_cache": true, - "vocab_size": 250112 -} diff --git a/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer_t5/special_tokens_map.json b/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer_t5/special_tokens_map.json deleted file mode 100644 index 6dc4d43..0000000 --- a/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer_t5/special_tokens_map.json +++ /dev/null @@ -1 +0,0 @@ -{"eos_token": "", "unk_token": "", "pad_token": ""} \ No newline at end of file diff --git a/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer_t5/spiece.model b/diffsynth/tokenizer_configs/hunyuan_dit/tokenizer_t5/spiece.model deleted file mode 100644 index 26a2a78808e998e45d2e0c184265897e6695b914..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4309802 zcmZ6U2Xs`$_s8uDV#TZez(SFxv?L@Hu>=x|R0Tvu#Z9s#OOoA?>?VMy2{rWIy95ZK zg%U`iC;=5qNK{M$mJLk=Y}ot%GkZ6W|9Lql=X}3&@66n}GjHC!X=m-5a{LAKkuvFyMyTfDn5?s68Oe9?-!a5N;2M zum^Os2XwLrbhZaX+5@`T1G?J-qU-_D_JAI?fDpSsLhSwsvHK&$?vD_=KSJ#O2(kMk z#O{v}yFWtg{s^)ABgF2H5W7D@?EVO``y<5ek5IcmLhb$twfiH~?vGHrKSJ&P2(|ko z)b5W^yFWth{s^`EBh>DXP`f`u?fwY0`y((2($Ym z%+)2)Fwq-0qJEyFViA{)n*qBf{>F2)jQb?EZ+b z`y;~cj|jUzBJBQ%u=^vz?vDt&KO*e@h_L%3!tRfbc7JrV`=g`XA06%f=xFyxN4q~d z+Wpbd?vIXke{{6_qods)9qs<;X!l1)yFWVG{n63xk4|=fbh7)SlieSk?EdIv_eUqY zKRVg{(aG+QPIiBEviqZx-5;Io{^(@)M<=^KI@$fv$?lKNc7JrX`=hhnAD!*~=xp~# zXS+W-+x^km?vKuPe{{C{qqE%~o$dbUZ1+cJyFWVH{n6R(k4U>eBJKW&wEH8{?vF^j zKO*h^h_w47((aE)yFViB{)n{uBhv1VNV`8G?f!_g`yo4ay4n5F&F+uxc7JrY`=h(vAKmT# z=x+B%ce_8j+x^kq?vL(ve{{F|qr2T7-R=J9ZudubyFa?y{n6d-k0`r8qU`>Nvil>- z?vE(DKcejZh_d@5%I=RSyFa4r{)n>sBg*cND7!zR?EZ+d`yoEBTR9$`5Gok>0C}YMPg~!rTd(f*q(XYj`!YMR{Y%Zcp%bnxj5ys0D3|u-B=k&{&Z%RjP9fOROV9g&IGG>+zx47 z1T#fRl^4DNV&|jMTdD&nR@R1|sC7!D%zg=Cbkk(#2`fBZ{(kRdZHjR&`Sj{90{OOW z+g74_fHki)r(`~DG0zBTwF;)ibGYMV=xL31I&-`{dKs)$F;2c|`eh*3@_cEz0*Hl; zW%9qETkGD4Z_4*0nKtmNfTW3}_Vou)OX-vc(o~vA{`#6gaH5=Od#cu^@}07*Ba9?t zWYrrWRgabZXEZoonw$rznWKD8nYHqCt=1r!*ZxevG>2RM*#uOzQ0@J$oRelpx3d1!)dGw+y=en_8vba%*1*73)cpOg9kyP&swYg<5@A znv^xZSj&=}vMZRV8O>>FONgJWk1(S-5!~`1x6;-Q{;g!6!PS^ z{T_&Q{`EQXecfLI2Bk~WXF%wtI_0nfs9A@}d$TQ)AW;V(>Qtv!79FwJNT+Q13P#yR zO1~F=t<`cR$%|uQnp#r{n`VhBvgEqOF!HQeBror<1Tjvz@1EZRCU$ApPBKP=wTPkZ zLpsUUiEyo`6gsc9-$K=VELBGa}lJCB=;vL#7ulxcdTenj-)&C;^PtK5f zkqT+ck97yCF^STBB81$>NvpYvI>?*LRk!?rEGe~g`$)?Z5VbGPCwrg#vo@Iwncm$Z z(Q!LGHx*pdw7~3I{?&@JE-ZEtNyA@p@Zyw6M|alR(Wlc6`mqHH*2`pAq@`z zX*#E~wn>i~u4xAkx|ipq#S2h1Cea}seK4Yh%dcO7$d8t^xdx~X z375U+EfOW?>-4Adyk%6+#&w5C!VsSbI4sOlXm5zjavCO2u_ zM4mXN!M-%)7yFUdFGgVa1nlH6VA-`ZsROH>0$Z9!7ygGfs@K>GB8U<#d4$wq}$ zo223Az+jWv&GQ)hmldU#eAw{6+MH+_>z{*RK_@x$5=gU&CAq?hDu|HI(-d`+;WI&M zByIlWHCMfD^7bW3y?Q_u#Eg;Y|Falf;I&O)YRni1UCO=|aCKQ<85L=%MmgoTUNDSl zFJC&XaNchB`C#OP?_V6F@X%HnJ{hP6r=#@MLMuw;0?FEFv9S)>ldGzyGv}VpoFl%s zprpsEWO$WI%wUZ~E?-cIGg)#PUmZ{?T`MPh0yQTeV}-IlDxso&eKDw%D^rF8)dwN% zW&S7?Fjg8eT4D7x`FskH0!B;70u7`sx((FS&7kG2Leq+FrFoZg>x_*`;};(dRIV(@$3VL27dvW8=3jTWX0;8vS*( ziDbCw>W|(CS6}v!Q~!ftrrRaw+5m%z`=o7G*zI-e(8|h+%XX9%NkxA`G?NqtP07z$ zG|7x_T^1eV@yUk^prp&o;}3u|nd(V0v^PwP)?CiMY>BG2OYbyTtrNU5 zJOdi+M-Nnt7w3F9WhmVx_ph=B>YPae^%nZwZdZ)~4l@nQDF<5sHMIzFwbF22V#C`gWTdgP9ZN4&Z49HBNeXM@WTFNj3rI*dQ;@^M2*>fKtH)^J{S`m$qIKS_23aP-1zkynU1f0>S-c13e zvV(G$0Li15+};>Ou})>Kkba@6;7pYHj{!AnN4(S@Xfa-Cn}@(O*(5$l9W@l8Kvwp~(`x9cX3QSt{RE5kuwsKLTnN^sA2<7HsRg89vSN!!hR4%87FC zUmEHC66yc92Aj{yl4f;k6R66S*`XFoa>>{ZFm2}tWN5e&y1$N6r=z9Ez^J&Z#VFh% z!_<+c+ke+k9NAMEM_PgeMrwb&V#P}E$kqf)$hb>Zrdq;ummErm8K+Jtk+rjfIBTQS znQLi*K={MW8z8D>P{ARs+Gy~l zl}qH=_8QFlwND68GmG^}_w1r;L^b2Z4chyieA@1J=@1u8{jb)EIQdg;G`l!9kD9_1{@aA>q>edsuDWOXT{W zEt<{{`2A~cu1(OFEZ^J#Q{7}*Z;4pKM2}2r03(A(Z^3@WcmcSDytCw3WgU^!H7K?Yvq%AOQZ9AoK7lcg8_sf?C;twx1 z@ote7qkOGgIApQPRq{%O#ZnUFy-#53-N&Wu93)syQhDe{i^kv+=Vhq2SlaNxW0U0U z?}TXUsM=VyQ<~JN7qEsQU;R53(m8%!AE>Fu&=s6$0M$q)8S&p|@v{A8J7j%pjmf9o z{UMfUL@MKivEgualG`T>qEwMOc;*#_XUP;Y`YC*gSeEjy89s6 zR|2JpG?iv=gR}(kE_vgy#k@YL{|StInd|IRrI3$&mnrW@8?W?T`*@@*2|S9QyGmzOX#(sKY*Ti6S-Hywg)sXqBDQ`LBP z#PU#i#;b(h>%>G$V!kV%oTL(mOD@j_YFZ=Y$TXdAOwzsCEu= zN|zE?pvTxPuOCpQidpjed%$3;=Y9F@QI*6x$0|3`&)im}ntVOH zcS>QC)-v_9!bAD;@+F|A>~$o{o?n#ub8=08U-c(cjf?TgXa8Pd9@$*))>_$EpR}qE z!)u2p$vvX5Y@2xR1*$nI9va=+W>%cOPWh+>j5aq+!a}WZn$V~SSkNXO>Q$Yfnn+bH zufEn#z{x7sC1HIn&C*S2Qu16-;*kNvtVn#tq{?Q)iNqtC=>TdCb(U!fmfnd?C+Eo> zF1Y6H_IRbYS9Qw^8QMIV1yOI57mIJAr5VSp?QK(_WZ+7cnyEm_HA&e|K226h@bK?S^W;8BPf+w%9XHdEuzVqLbvp2Nt81 zXMY64q+{ddl`^2_b}I8t@s>krX6dDC5_1SCUeGH=hWqjK~zKe!yiZKqk)I(!2v%0*O zC15V~^FA;VeoprE2U!V(%R2-AKM#{O&%o7%EkjyK#1JT%GomVzIxj*rGqQdx!(yX+ zvcWJ6uAU=XURB6v*09My&8>P84rsgxj&sb*`_-l3VAEj0dEH7Cnd!=&Z(1TZoyr?m zXdKQSvks~C`rAvrXXI+!Q<+O-$$OU6pHt*tpDlwwB7`zjPh;XPzFbMgct+q8KY?I$ z#SFRTxI*48)6Ocyp~V+~*vqVzocj?RbhS&){H~I7i`UDr{}g|_aD_Z_&F!^Xp&ew( zjW9A`F64`w6jo1_59(ALnhvd&0FZVVlkerifdIKK@TXhyH z{X^mEfH)@(a!b1os(rR-x7^hc9IT}BfWP*X@bZKR#H2OrbgGrLMUs@5E;OTeE9f1fZvt*WmCc@%5vinXAPIXE1 z`+-^wBTDzA%2yAlh!Mk%Wn>P*gd24i+JS=Ml19?SH|ral%;;i33xkVbs0iBt`~!8#Th3L7ds zoF!8`0X5009cYd2VsYlq4#?oHaFW2W`I7WFB-nt-KHE@J-V*t;2av*;W$1~$!NCe4 z7H{Gkhgu*b9at8%rxlxJyHEMYb=ev672(QY*m15Rhokdb>dnE9V$2Z8vE6}ZW=uoObVRYlbk z=|I2wNNJ$QoGgt$Q_ApVJZa}vYD|W``7*p3LXlX7TyYYp)~2}*FO}jiH5#d$8ox#E zsDTC_4v+l#HB24FfPWX`wb|cT@d6_H+v7Sy5SFizJFW+6 zs;&_-@+O#?FrsuHUC7OF3dsj@ffU?s3E45?k+}LQ#J=a`z&#L6hw<=V55m;Zr8%h* z+6amZ-7M+tYicQmbe29XEf(vM7p}0%<+9~rE0K`)?Io<8CgS7;d~YX9QEL7^b-r<>tje@~fFzAYUxCltM#fz*?1DoVGw#Yyg|;D`pjL z_C|=Q^gS{!2pMT#-cG1mSH6?3z+0qRr5k0*TR`&gZ=51iJ_trWD7SwY49>}ymkg%Y zeL<283Z`>_zHy4-S}ISBWC!D1*tJHse2hfhlR*DcaNbgk_sCD*z%UZO47#AObe%l# zEl_LeSY}{C;g_GSSc!b@{|VE~16|m#Ywyx>7cwfWf4vg7+2AqZ4v1#%=EFbs?x11; zif`R($x@hFd7?2?OAy{(UT*`#up_(VSUXG4;Ts{dxtxlUR=LVMkiX5&`>#?X0edA(sm3i zSoSPBl!@cu7|JT_^4Ao4(q+^ND^8?auFX?{W2D?>psD2CT~<}-sm;D45=d?n?qd=QRz9B!|?(iE(XFwh%J zmlG{CA~RSQb zNY#88nbs7q_saQ25FBF`+UG4*LB%3=!JRThZ6D+uEhQ^duQbms-PeQFP@<*DuelcI z-J2~@n^f2It$%J&e0D{yEGkm$-#uP>lv;7(9Q4UqRq)`0w@YT8gyBOb%pdq%Aze}5 zGe9*dimAEP-&;{exa9a>u;6=xaTlMC&Hf`q%`D%^zL0yax;v0)HNLtANU5FFbG@6u zs#f_XQ$yjmsEoER>33SPWDj+F`3h%oJW1Za8@XET7$cJ!!LGDNdgF(hT5_6Kd<&S` z--9<_S&;g-ZS!xRDf`MXYA5i_oM>H+fi?4i3 zljOaw8q2?dUN+K_bn{7QPnEb>Ccd)|IM7+Fb;-B=AZl6+{(b0a)vKAdP`(}n)~Bdq zHa*)9&%yOsFnt2k8N**7gp_Cy56KV1l(-!%gx&d~rR&crG%tkiul&D|g{C=9j3GqR zr-R!eU1BZnN@N!Ix;QxQII>p4M*ztvu&n)~A!@RlPIPp<6^&_Or~GWVy6Y*kpBhFY zo{`efqp)m?e3AkriIHjERHsv#`k+)u>8=bplA)p$Dvo*a*GFq4cHota18Tx2Cd=g4 zU}}B!0X}>33@0PLsmEmkP5s9EWbi~Vmb-oQEy;#x9KNV`$et;1O))e=Zp*fqYpgs! z6GkFQ3`E<_2a|SMqTH|wh~;OuE%|!?T#+?aoPqu1%k>tk&XN|n77OhpKW>7NMw7Pk z_jZLx4#;=Af#hg-%U2gv;7m=EA>S!ZqpkmR8BD1UQ$c?zY}rEU{|h8)x!G+p zzh zIJvS1=8}PwCAM^JteootQEMGZY4ZHzs>Z}jaukqUmv>d#h z5=N?4<$S451Zu8TTjhJV3iu{}Iz{zsN_NPCQI=BgBx#-s(}evSX3OIlP_;BAiG7|; zv*1|Da^%0$fX1@f6Xn%e;M$isv%%Bl#s#WqmdjQGO*2}Q^`w-pf~ZLy+RM^4FmgMc zwefV;Qn~3()hyk@dh)$FmL^LY5qU7pfp@)m3(m|}x8wS;}F*i43;`~*z&z=TSTA;C7{ zkt3hNf(_EaYU`KBHChbQnvE(o+VSBh#;eQ{K`X*?S#Ot06I=bT%-vW^^q!%3IS?XcMspI7K?I7?*=f8d|I|N1mUw&nS7ev z2R3dlUh9@cO&}D~3=+sLF$}7y@#(s-BTS1{wTp>?o4UZUodw^62C9e&Tc=EZ6t2#S zW3yAy6DnrUFk1?|k{AgmT}R+U?8_c7tSQBShkGloDLyFA7_1H%pmXXeL8*Uzru^A2 zsF^)eG6#Uw&Y@1}|FlYsA-RKsS|#h{iJ@RMvV1Y0Za2IH$NtyaiX#pfd<57R&g{hG zBtkUF!~{8SSinge-7?w@(ZVIOy*I=IB^ehzTM+WOGS)|^DHMa_=u9xRba?JN`86f$ zVx@kT3LSLv?jUwEpZ{AB$1u)%ev0axDV{7Z7_8>8%9fAA&j+PuHR+$JNQ32C%`>!X zy2dnze2RkDzbi}LHCXK)#Nf?4Q`P*rMe_PAFy*7(XG`d82nAxRuycXxc-bwN62$G> zhsoHLgqwE4d?jCs53EwPn&Q3UU8{IxO1vz26Rb|;c*L??impH=41S>WfR(EkfmTbDm? zxqmaW;J*`sLEcmk-O!JJXk2RGX=(SD#)WuzzT$3d5Jp-+2R!`v*ao=rl z+<2xiQwH1tCP`OK$rQJYuMeZPW8E_D9);BtB(9-CI<^~H0I@g06F){)J{;6emVckn zaIbSTUnY?d^>{Kn@E>_f)yBFcv5zHj@{A?)g{p&lvy1U*i&dF1x@`gdQ zdL1#QCA|F#0a~PrET%{sk5Fy%eJ2}S5R#3f?{&zbkt*UdkdsSZD;`T<3+0gEf#z8x zG8#)O`DsRWR` z>0BR~Whsv|OOwY9S1WDjWaE0UDSvK`f729M zztvK$p2VzC!+cdW=P`O1jMonBkd}LZY8Ep!&b=@SKwmUl&K5#~O@i0qrXr2x9U~R{ zfp^utg@#*JP`2Id4`Yk{7jF|9G%lB~fzF5Ensn8h(#tSSErG6Ra48haFKx_}BZsYs zDIUCh`v-7iMG0?~KMc`SnKI88&xdeqZ}qCgeH7Ho$J#T71g&Mh=A|-=`Zus4{9~mK zUBN0k4JG$vUZ^9^f`c!FnLPamG1&1VXy zO6k8){DRw?N!(Qp1C}s?OS~GW*+)goAJ@P%%Q(I)UcKI;$?Rx}yhWq24a9M$6|MYD z`TZ^xuuq}h-BuJA%h{R#gOY9aWCki9H-+Fi_u=I->mjgKjn~EZ+?$5u9QF_$ZlQ5} zNvyrrc^IM=`EwS_Q(-DIlV9>~M@yE*j-gk&sw~~ZC_eN_2zmL~Je?P%dibS?Jsu1D zLCMNFRxEZcFmPNTRq-0bmzpF8Mk@B_ z=8G>8Z2X5QdN=1l zvCV8*yb#1Otm-_tN#mc`Bt5sN%GvD6GJ1z%vn%D#-C!*y^9`LOvH;F_;uZO5k7{|D zsUKYeRh7qhDJ#^ZlY0U4>`UZ+c$8+WS3qV>QyYXV>uZtR9F-89T4ydkU zwVBEApHz=o^Of@3ZxAdkebX&5e}T0S4j$34zoBYX_XKJF?-e04l)VfkU$!9sdDVTj z>7UG^8*yC^C&`q!5pwV*Flwbm@@X9)$z>#%SM$+ZRO#H3?Q+%aifc+1%4&nvhJLh+ z?qX3l!@4K$hLT)HirjdQM)ESUAABE}_USKN>@O_v7iG(}4_NVztY(S1k;XIat}#%D zF6pi~r$^>Jq^kbh4gO7;(!4EHOC8xyT0A@0q)C@oo5%+pAe6Eud#k+F z*@{!PK>iCdGb`+r(T`fPL7bI+LSy+0X39t1!KB^R94a3e4bgHt8OpVON@YprK81>B zHIlgzK!#d+m$H{gqvxPF;+2_$DZ0{vCuo})}`4*r?}EQ97}H%M-V2r;MC(A z25WvTLtBa03Dr`kBumT)75X>im?i7i;w|Aohdi1H!)yj$iSlo%C92vYQKK~;c`_j^ zJ;y;Yj!Dt$G8Kk|$;Sq2R^?=u^$?!8K=yv0qe7s=80k5?M)MaO7y;2Wt#elj5>LFXkGds({uS!8)iC9M6+`oXn$q0}1!#+or6Ic%UBI>Icu zG<#M>?6#Y1An9J5o+J4N2dj4n3%omqsv3*9EuT}hRK8P-UI7Om=iCB+?m_7sLx`3( zI!&H+s;)nOy!>u3o<3gq+4$qTrOgPXw6y2hfbobMsu?mcoh&=kRLmll>7qBLL$S@D zlP!0T2AewSER)6pgU;E>+WKzkmPv@_KhP;HCR(hQQ+~O^%I8YzWGgNkG0aNtqHIF$ zta~#ix#P|J)=HN&nN1j(RnL{!IY48>o?>}?o(8iyzT$Pk9vds8Hdt!09Q`$ntkM`T&B+C83yq>{`f;1a;WT9P z?N%IWcEC;*u;O%fHxL7AiZ}Y?oFSyeSBwlO1Z!IHjC*2=f_f#JVrAL^2*$kFPPQAU zYKa`Re4rE>aQ=1)eIKm-2NgsAQECEI*O43}t`DH3!q@WB<3Oya&Ss}syU!rO`Yob< zo1TP|4#z9lq{)_o6=w)gKONpKf1ia}t-gS1!dos7!f5h!x&9&ub4s@|9b&L?^=>nz zT<3v+DQAmzot>R8>*_+$tH~~qO*bnxrCxojVh2Z=p1KW;xiKCoG7tmCvZIFc!FR#5 zoZNwc-=Do(WAS|;?-{JNvU>Q&1D2|*bR+X@%^MLAC^!WzHAMY0i0=<+3dN`-=2+V{ z2b1{|Pe`7DT3#*=aK&?2tra08;xfx;9Ug(G_r|(Lx}{!gjamApeBS}6wxsyv@d!(? z{2-s}>pH@X4@#!Upe`DT9UaXdwIWq+l+O**)GMao$LG6hq*(g-y4@|!53}TR!?2iB zS~org)N+kvxiC6NSq%Mp^HK@vZ%NaSE|u*ApcE#s7VzajNU$la=Noju)5_yq$-X2S za+_!2>I+u4%~xuu3DGhRW>`NIh7s)lTVNo?`*z`OY4V(6rlrMUFcA{HrMvhx8V18u z-f;1(vR(f&RND-@B;p}j&t8xD>UhVc%5(z$ZIk5gkrvHx#>m8Eja#t<=Pq@tY#_5H zT<;K{3dNX<8@9+}>59EfJ>?m!p7Qc-ej-CfF1ob_V=R%Ix6k)iXbhj$2ghEKVp-Iy z8qusb@5)qct}2^8(NgQn@zc@;K@mgIwo4V8@BY!NRn4E5>(4tNRcn=+8^KyZBV0+zawON{W-uZJd2qZNaw=zbRWB zrG${IKX;ip-_e+Co*Xap--Bpj*p|R$14mWSEMc|z2&^^J4|5Wa1w~vuanHwKHLagZ zUO282wpGelgH__;_*0ci(wO>)I0?q$vaRGhG)MxoW>0*fT4wupRuI=rn=cCtrYR2} zE{(qesx9d>rm2Q%F5Sk+!gH#}{(-kH04b=GU0bUzT5#a1?Ku=!>S2L=%$&Bk^YGaIRr1);MC>zb> z;oG353TXem?*vn@$|Z8~E=$Wx)o~)DHjN|uoU+?c&7f?ZoNovtiAyUBW!QaSl3;6P z!$v@TNVz?P%TJm%R@vd1QrZ-#3G`y-`XLp>q)MA+3Y&$=KL)DKAg`=z4l{PLJTRvv zm@4y{{j%@0f@tylIa?&GjTMVolYZ?i*2^RB8Ack1N?OYkAz*D`jL`CBKnFN!1~w-R ziGb*s!b}z%n?%!?XTmg}NWLALcZ8~SgHvT_CrjwhpCqptN@CTM`MN#USqWPV5+uEg zrI^eJ^yFhueI&k~wf#)-GFjJ+kbsSC#noN0mkwfDU&ZF;l{Wpsns@npS@VoaYG%xo z&4Vln&19cpRK}4_()4+Sjs&Uq0x(cgwvXixQ^I$6o-`j0QJ-+wE=z8F1&%Fdjk=-J zk}*%zQJ#+1h-@>6mFE&5!N-6D(^-a^y4t%;atziqSwBjX9WIUMX)v{${R4earzoK2J5hJV3qY zgS84-&Pc++&n{N!rB&+e3Zon`F6q0>67+P+UxsN(0taSp zSOr&8YNoA`t5;j%lUZyo9kc;Xkyt^0r#^!rnsZmJh{LE^NDQrzPpmFn9SbDEXP|HtQS#Ui5Umt1yUND=qtro`HIobjI}rdinVT<(DRQe zHWNQH9tUgNWnb&r*>k1Q6NHcoH?OsoRy`q_B^|N6V<@SN;;cwsZ?G0K#UM4b%eA2AC_@9+WZbKW&gEK->Iqqf8qD zQ%|rns8IfS7OrNc(1RR#9!i>=_}~8$5DU$W>lZI;aK!@oc(_JjaB7w!A9X-AJ(rm~ z@Wp5>)1mGgp{nGZ!Nw6ufRc8_WH}Io9DmuAXeGd$Y^wOvG?IT?j%lEWy-=*E$(|(L z)4}Rt^X{kREY8pnp23>Ttu>k3rP_p`-G`b=I&V68nKuBEe8Vjh^Hd0_Po zyPm!B%zQZZaOQ*S^9;ezR36SI3&AA8CWkDabX*L>BD1lrVu`E`(uA6l?TnjzOEq3g zPAdPhOf_+^xh&1GQj>wQGxJBof(4_8EO`^E#ssDS7Ohidrk0!I~nCXTGI2XUZr0G+s^dR=4=~L)7_^W2M0%7>@zt zYd*ASooh@0$(XBscD<+A!4ayCN5PoF$UUACfApc!z_IyEX?9FCoy;xHGdNhg>t)Pw zRlKxfpV|HS_GeJt8xW{07Sw5!Gb&YyYVhxwB=ILK8qR~i_#_mQ{JDj4qz0^y?T9g{ zyluZW0eCNwng6rEV14B=Ug%OyJ<4tf6dGtI)rSrdrH@%6{INSAt6YzIzVaH~ZV8TjqF4XQO7 zl`1uNSgdz%St_t#4`VK-D3G!Ds@&`H^0J-Q07fO0u5rnlhKl_g7E0R(z@{zdGd4f> zpbF9%e*f24aUxH7Xfv>8oHjU6{=M~B4tt}x(Uh1@g>0%XE-!LPdEN4xS#!gI? zIc+UgHdk&6fl<-i()C#fpeCEh))RNQMg6%`WoZNy``t`7HRuG^vU1uxi))Y?bXI9i z$!w|D1+10K135)j7*0$x3;AbPFnLriU_3rYW zF9yw0GFZ(L?cLN%auJuZnC=KRnvxlOAn!Vd>82G_()cm;%rB@HIdZKKxJkSef zT)&%{^E>*fnuCpWdxn6?BqdSOpHs+{ZT|)#cK3f?LzyyLX&@zHDafBYUaor)ifjBu zS^mQHewJ099Zm@Oo2@AqUICjN&Bo(991t~>^*V>#7z5V_Dj{$_=42cJL2DOEqZZfBy5Oa^N@W?PsvPK9gjWH?jJ#R)x3NH8aJ)mg3=t_hbflmf#jfjRn3 z8%)c9Qh46obw;a%`gn>_!?-LfT7Q@PHr^5@GAH)g1gKhXhR9C2V(`y%C6HP^ zzV`Tb{`W|y_XtoAvgM@6FyprT*|O~eF!_3toC)%QAqdXp@0NlO!P-)u4Qw0uzyu(4 z@>XeC4psw`9n96W_!y46+lR|I15x{W0k?#G5)2NJ@t*^Ojd`&&`~pUzqcdDVd8Ki@sBV zD^`9q5UXNoS@pjMlQ~K*>1qg>$2mthd`VvEW;ls48Tt8TAlb0&d^&5jeD*7pB;t7E zbiB4ntp)x)b7j%>P(6m|r0JYAQB5Keyn4Xgbd4$7=hRZF0}$>Ct~KE7WHB2x|QYWb9Ubb3B z8>S|wFwbAFGgM0y<06N549B8)hA}rjYRQ-ZWQwY*M)l|A`SaGv?cJbwrnHD6K4A#) z%;R6$T?K4apDyits-R}-dfC|zY|Pg~e*K~|x%^b|-IH*2-hv%9tTe%|$(44wC zWUpZqiQ`IZWtdB&mTe8(ZTR0vDArbIO0Og!2A$crOb#1N0vR4(Qk=|B);OjdyC+HU z6>9c!><>~ia_1BB8n>lv=3J7mP#>FmS9>&*n(13*UK&`0&}w6w^fW0n z9E;32Ejj~?p`jh6d^C{q;G8^pZwv(Q2PaWu4F$rlB1e3#^maV5yjdthp}gUc>E6sS*% zK#HlcVoh&H2W$H;NiJ0wW+YBa=>hUuB3!px6Lk7tsU#U_mUz6Vj zaT->aoYQc#)#`z7z?uyc&Qfs^YJ9&?-u*TRbLI2zfNC3a`E%ry;p84ii_N&zlfzp^MoeuK!fk%ttz}ZW>4=Hn990eho~E zQx1vh>zqEV`GuV#NvzmmI@L zuV#9I%(`2nun{j_RyBkKEzVo$&*O(G9w0U_Uz*_LZKIz{WM%LwK zFV3bW5G_EOgNs4?G_`oVhr3(~3@4cjJ7>ttt-zW?0`n%HJz`NW_5EsV)iifN-`qy= zw+q+EkWj^r?@BgPBW<6A1)p9|D&J8zMG`;;9okEi zZa~f2OFP?cC?+O5X!V`CLySK+Y?EFFt6mLH&C^jTO63b<%br@d5# zIW8&ct+CHdoqldbvCMl)Y2c0(ZtCi*n&$XbzkXm6c{yC8kjP~%|M&PGeMVHifQS`woIo`AA7vfnT)@fS_-7cQ0e3?*xG z9sbcc#g~?5%H{+xUT4og*Ef&UIOebr1BanWs%I`{_&JC@bbftO2-gPQZ;Y%qEKpTD z<@QlvZ8O}GxK)-Ij_mA|V(H~od~U@Sxj7xIZZbR68yTtzmLHUtuL#YKol#>|H?X@u zQ$8N2$@rKOZS)#AnB76n(WJhvs(2vH$9CX1pp=cBG5Kt~stpp}1PMR=`cd$YJf7xKIX(qbOQNJ@&2_bRT-V0>#42T-S=bT6WHwUieGc$8< z8A?huQzyxQ1z=JN%>N%WD}r4YS;}cnpQJCgXsTzdykRK$c*n49EOCjFsywz`9b5(> z)vvcsmDH7B^<30noDQ5=~oYgSu=&h6#iwJ^1lIqyO_Wwqw#(yv#nHrE`?SP#`?c;zKZRj$S~eL=fTV6}vc3%r{(A~ll8ewsJ7Ldh?1 z5nSH_2v#2BCeJ;V+6Wq3`Cf~r`C{ayA}EHr=rQjs25a(NxJo_9%yHjT`PMK~8uQCu zM@oW(!=aM8A3|1x`^mB(Oo*5A0~(6Q9(o&SvPh1X*@wVdI({|8Edxs7_~Pu$g=c5a zkhcvrZpxl6OO9Bo?$R}5WWC|)HeU*dRo}J5X%1JM?0FxK8QfH{@tDH$Ig(rs)FPEF zll%%8h2U#-l05#gYNdN*{3k%NJ-ov$VV{D@W}w-}=aXT_Ra8?lUq%`nthhb9<-DPq zc37y~@|h*f@&U`;=rX}^CB z)$}>sn)j6@r1zIcPHDVvrp%T4rxkN2h1_sPBbnLUG`Z{RAfpc5a`iXLXfW*~q`^fP zCRMI7hmF34Xx1zy$iDBD@&KASs_DO~YQ-XMPx$dS2##*sM5g}^R12Tx=FUH1TDSZF zgID_g1vkZH8GGm7U@~JCWQJV*4}=sLjXh+brqHF8Jb%?gwJEU0(<7U%g{zbe5h*vq z)J6}(3&%~+d+Oe9*6jZOpNugAjm0DXw_$1(t9cPJr>-UKjoxq@jaQ~M2P+nqz)eCc>V+zgO6m}$jUVFFVn(S=(%wP)6P*lrB!1C{H!DBB!&^oNp)@URtwI9L*H-%UW`0 z3oscbnZ@#cEj5n$Hf1`=^DtCB(>qO;wT9I`i;UeqG+bZI-IAE3k+`-5kcB^IwTx=7 zxCM(@dkxlzRoki0-^1ZrtZsA_*%46f&&yW_?u_1LLQwGM@AU7;lD|7z3d|ClGi|H7 zSSi$Gu9eo01@$-VG-jWC97;8CEPelzKuj9L8!9@|iWJQdBcq!|-T2{0L)8j){qO-I zKSx;#DMWAC0~&B(mb}mltR> zuz~8qz|5mO^|Y$`H%&LQ)u#<5%bJ-pW!E6Z{#}!$%3!rM@Xnkfe+(u>4JgZzFNeUW zQ#vG05)QkfY`k}njhuv#Eyqe+f%_INyhH%`oX(sk+g}E2(MI_g+=jgZ$J|k@4?2Ls zZ1Zx=zOT#}jcB&|mKq#SU!g z6LQVU&0xz#kzAi;srK{8#fh56h!N6n z5|D!N!pQhyW5vV*an^60laexwVfk!bVF82|mob;I}Wcc;L-YVYtRfae7i1 zhtF9~2-$J5DXCw#0v2?vCqrH|RBajUl)Z*&nR;MKjG2PCypk|u_*jl70kQ1TsyPz5 z8jLRj$1N8af)1Bbx!t8wA&5S@?)C05Uzfa6=KG-=GIOVqIR_m zYbDq1gHo)EGcu*S!Rp??T*;FCmTEflUKR9W<|2s=?d%al*bHH-2^ke z{=DJj8aNcRPd2_~MP<_C#dl%Y5aSswJ>LW4&F0NzuYsiR3T$ROaugQq05;4v%cCvc zCnQ*#j46NpKx6iH%fBCKO#Iiez0^6TdThEr%a!GAgA}2gG3JYgQV-SUj);jt61YO) zSgm9(KgD}5XwPD9oAhx4WHd6@c!^H^r0Vv`A>RIWIl z%o;J`aH-&)$9cyp8L_8$l!YgWX~C(;c+L9y!W zjvaEd!J3fU$(HA>zo{y|O=FM6Z-!Fvv;^7myT&r7Qm^?7jB)WUz7v*T-7Jt?>6>G? zcoM9IjAu~!-t|!QSkO?gXnXW(Vo}{K8Gp#Ymm(N&W3^RjAAf zd?atZ$I|!joghaIC5zLU)1_GhjqA^w6kK?_>j9Oz%(eLjV_DfYwjsm>NnjW9t_M}? zOyMlKqY)Taa&XrTb)ufut1YN@q8>6^oN=nbI{}V^KPD5Ufr|i>9;8_Q(}#E*k1* zC~5LDlB=#rx}mtOGZM0RkNfa%=mOT~k>$u0h9Tk1p%fcTe6D?-|G1@8I#s@lglSo- zwy+h)+g%koR=UPOnnaR$TKtC4(0Mm7$Ov4nGcSrD(jR$vk~|y@*6KJrWxLrR_-s!? zG~r|iQyjZ{!?jAHhdd)k4JGo|J$0Fr7_HYyEM5w1*S>& zWqm%*P)ZWd9EX9La))sFJSgbYOpcCHL2BSA)8cfY zi0AEKx)sH2obekHtWneW(H2QvbVW5MPkJtfVnN{Zr0G(P zXSR0lG+5&?*}!VWYAaqhEZ?yPYFcqe>9Q8c28qBy*-gg_0{bEQuOpDE^%qU_7cTG@ zO_dkc6GHu@dB-qT9T#LcA~8wgay2#D=toG0e19vya&PI!L=R4Zx|xn0a|g zJ~shsu7mSe@#RqROdBgd8W^nJ9X$2jKZTPGS7M||+Hp(B9*ZPdS7mX2A9;&hTMfrT zGXt3{Jq*)K!dMmh#!xb$*WY>qh(EsHze@fI;<0>Z-hNVJm_r^+Q_L{bR0GR8KN}o) z0@*W=#s1UcFI4E}j`nq@G@f~v?d0p+P<2%KN_p^%Dw;!cZ4AbSmxf8+Sq(N5UnyTJ z_UC1bd;<=?*>_iM=OGHaKnN~im+(}nb5TiL4c8ZrGlY!H4Qf7FZ&)xUKeonZ%*byE zQD>%`ezwMNQVQ&{-}Svl#^}l%S^p!9vYN9V(LaN+vQ3C&8A$Ni?0os`7qB)JFB>tq z)awrdf{l)|YE%A%YHn`xv$X|=6ZbV{fa?DRR+mI3%g=wS#Kje?zx)Fx5kC_OtQ!CG zpQYH%C4XGiJRqb(@>yc+b}dv(nBt4!#VjA(LI`=(Ox-07ZUvhPUc@|{2D1(-pWX&m zvskCxA*b(xtJU54=x=d1R4wc2lH2Zq;UNy9rb_YuteDB>D!5Gzte9>-Fh0KzO5QHc zKX1BUaZT|K8S#K(v$Zq+K`V+`?JR8s)!~%&qXS=>Fp@F%>OIpGOcA=ald2%(0-zQT z5vs*DcRli}FM~~p8XeJ=-KQ4oJwhHgjP%S;RXorfOnN=L$rJ-M%3vk~AGTPuS2{hS zQM?SrD_eurU4dxr)&X#Hv{ZrE}P&FsRY@Xckn8kTGvgO1TUOgEe z(H*(^ifg7a`IH)BLV`BacVCEts*!y?@=CM{PiJj5zdW97Xt1s1a_IbB!zqJ*d$zpQ zOI6K7o|_}@_OVnsZta!JhSxqFd@Rd#eF?yEf#s`X{UD}N{*S5i0I#ap+VHhq?0s3T zdc9%;D^&z5O~fljP_XMsAqgZWF%7|%&>{5Rq=pazgdRdy0(KEpq^Q>uA%Ft*UjO$y zYqRq|K8EjG)7F%|&+M5sYnW3%-VsdgQ}XPBPC(bTb%bqCE)J69n6??V=^;;-#`bN} z!(mVd)^p!|1RCf%mzw`2PQ`j;SIy&9_LxVG`}$pC-#zYuDBn5N3ZM2sRRw1sh@JQ=6KhBP=-zb~O~iCT7CP47Vp8vw*>P z$g>8|ctBzw`!hQX;6v$zT$tyOmED(LzkIkiL2UUz`>=ONyK{2_z3$-oKo0cXM=4l z1dVr>?%l|*&R$W%lI2eSrtuK9bnyb~DD0VHLmS~@0#C;|;rLp8K(%~@2hrCjddkD& zrrRgN-X=NR=Gb*okkD_2o-IllI;FQh2$E?6=DkM)O8=*`_jkh-Pl(4ddBOO2YM5;X zhI-FUgL*%-Ot6&!bI(&d#3eb>r64xuv34{55v8e%#xIc;X z`5wBjaPGcBov4J8uWr`uyZ}tY#ofBqDf56{$$jijVFI{PENUMGL~C=_bul5{FmmR7 zbGb*1+q1m71me}nOR-f;VT>cj=hO;^492>vfuv_*ET@jiYoK10oCu$toz{h%)4cO7 zChlW5Cljv_>Gn;nB;HUtHfTL@f=!*|?0l`5zAur#S~56Dg?*|7D8rGmw_gdfj303+ zwEdExQ{3QiqP_g8$K5s0j<50%dNYF`hpR4b|(B33SfqIlk1#;VIt0j()r zwtvlN``2DV>Dhe+qcF$KE*t<>QEOcFH6WL>6pO_(yDO1*U;jGWVl zxk83DIfR6~IF-UDp9uQm9p|zyfMjwus1kgt+{saD-S#T@_Os^4Ok-yClV|f z_xsj^V#U+#wdp_HsZ3O{$9) z+7&;6QSi>&t@+O`u9_dQZNGqhpe1BR$+E|<9$2$vjlK1|r@?aJ4?&bwJ8~;;)gKs^u|oB`aF8oj()?(=bUhMp?;hGey6Z+bDqw2bLC~{K!scQ?gD@t? zxhkwkoP48LOf3`ksJdkSzJ?xDJ!GLhBCJLpv#Zqd8$rB$ELZfIGomRHFCQDphz+>K zlj@6kLx8hc%C2aJ9KG@93FkVj_EbfNMr>vQs;|d|%-J`{;o%!+Uvm9u!p{FR%}~c`W@h zMZ1Twdf_tnl1HE#vO*GRN=wKIBXh1MV&94TL&z~fE_=(cf8=`lL^;;Vv8{Pw`WtVw z)1HMU^|KdVB-ERQALM~} zBj{D=@oB?`oaY|f4@$l{S=2qHe;Ago`wGYHD=fEBqU5!&u#~~GuW*oEKOl_% zASYG34uqm_Jse=S9t>tIy*JWsE^;vjqITwRFvZ1(cCv^!J*O&q*&1>0nFOBU8%IEC z%!cvf0M%k1sEw3hTcQIV+W2wzDUA#Ok%{?81p)%EMnC zch*{~NnzCPX*e)`dotV~?LNs7E&-~Vf`B5iVy`N;;nQIh6VVaeq?aL-ubU@66n4$J z@WPwv(e&+R<7Pv=hWU)n>7uS&L??n%=D=x~s(v<4&<6t35uLDqQxb}ZV3+!Rac$_P zxUayCB_3K+Jk@S31A8fr8r)>Ji;|FQK5W-QFiKKRqk-h0^7d?=sB~yYELeAs)KJlP3=}ez2e)Z*!~Kb=Pqw#)@e1IqLX#t^eYh$ z#P!EU9K?q_NBhA?#DmuZpH`e=Dp?mwTIS)CgbM@KhiQ7r|6OzgoC4VLoM~rnhEVAv zMYC+LuxlM<7kI`N59&pGOnk$GxDW|%JNEvYVNCrJjd&}hNu0$$`8}vBy_;o0OWhs@ z$129#LF%59aE*S00U9levqH3%JkKDqZD3x6io3`7- zs@F}jdV9cBBe0BmR@9^Nn>@p|?)9kYd~Mmsj&&E-d7pVQULd?M?IBT0+rJi@zUnYY zb_DfAX+Vr=$J#R@-UGdo<5v<)m&Bh37VY>{+ZZI5Ed%4E+vkLnvy8!Z?+3ebSz9?K zanpe?w^;F@7@G?%4tj2U<#IDDhiEb-@X^C*yfeRmd7tIrlCRtUqa5rx_e(-4Dg~3m zW{1JSP$?d$tbdS%!g0OOGs0x*-$%n3zAI+=lDK*;<>Tnoui#RBc#$>v8cfgNBkUjF z08t`7WnO}xSf{jPS4RJ}-+3Y(MH?Dmd5!M$y=U;jTzgiKd~3=p>}O$bJ|<`y5I-QG zx!F)!Ww-qZ@zsp@CETm9PwFh0Yq@6ts}lG91P|H-BZ;qnhSFdh#bQAHB}Sl}Yq4P6 z{ud8K*Ig6sD>3v)&U@Ad{R*ajxIjGVcc30gdC5o;IrhlKe;qR=A=QzoOKs+#VJHh9 zS|$5L67s=MQvJVy-gS>~^8L7a@nm_p;g~J#A(-s`bUe(9h+j*96N?j&colhUr{J>< zjz$=*jlh-wP|o)x2MXrfTf#xd45G2^ zLJx~ou=0Fy7?#5xh;{!LoKiW=vi=f}_T{QYR&;3?Ex&42nw@wV9K|u0n_%aNP;_vU z`n#8VT%N)ttpMlz2afh>`1UImW>m#joHlS3&VZ{Cc#Ai_#rj_ZQzf{`13%2STnnX? z%zPfAE9*hf46pu21Qo4fnGBzo)`wDnE|6V*ohRWPGt=^~cfxca)wYU|h~`b_lkw*p zJkil9#8Q50w+4zhksKo4@6*wCvN_EljMf*gAlnE@8B{lPuvGNJA z@)eq0ywKdE#@A2Z74e#J*u{pn2r1|4?9+h8S1Xs=g|~&F8LT8$h`RC^_@~)9cZZyl zgjZYVR&amNJ7(s4%X}=E_gS->Y;tRus#`MMzG~xG%NvdE0i)G{6{R(UhTHS)f@oz+ ztWA5chW$Doo8tRCAf5N-C&KXn-Z8)4AEaP+@{SIUYX)JK)zKrx?ai<3>}1C@QZ()g z@rH<%PPe-s3~46T{GW)DdHhnCwhuvqXJRl*;d8{jPMxwbfPkq^OE=k7-NBw|4%4Ma z9u6b!EMS2hMxGo7i`YOB?_fDz;7G>wN02aPn>4j%PlV}M09)}?7&RxEnNE^D`7{zQ zMu%gIB2YBY55hQM3SkS48>ka&;|(jdp(#+WN8B4g`&19sYAkc6fA)m>TfA%hjO|Ii z5TMcK3cD^HNVf6!@r4->)wp7X^~>})9Qzw2=vr0r;~dTQB=Jj6uF3HvRb?4=S#B6F zR=UdG326K=UvEwGJQ=PKvMou3md?scW+kOXUr)$qlnx7hBZ_()cKG&rAR6T|_5a`n zaIhp4dg1u(MTA~AuDavu$4?6ocw1l*x7O+n@Ep>xwth6g)kB8bg#(dODULYFc%IEJ}{%Bk$6?xvE`Ly*%EkO8CXqC z8V9A6stW5k5$KJ;62xdr6Guth##ljsk4)nxq{(5t?cfwWhEFVB&;DdA&U_ zH^971rx%0uKt)*=|(q%5Sl)E`@mW ze7U^8?Ogyp)}ZrrV0FTxF!k<5B)D!doCfTjW={zEXEbhy077Sl>uzt z+{yyDV}R9J=MmnYX3wt=vx=4R_Iw4((^OR^lcI!IVJI3~zSIT_(;$3(rdY*hh=$=U z)@&=#b>R%F*_%*rn;edFFl1kS%M-FcvC+6;h0mY=J2C!J2NQ z{UqXgbdV#BzxTrZ^?h#}%lsG?Eb-46S3P5O^^5`bqlBd2I=gDIb^Zi`qPthwOM>1W znxUQfKj;Ogo^@n$@zL=Em>qDz#L;1RtNzTB?-|CYROLPh?VLkrURL8c6NmH*g*~qf zEO1{9s4Q?9bf+EgP+YvM;!S?hK~GpSe64j5MjwvcWeR##?!42sAM!}~dVv&&{6Nts-+A1c;S=rG@4>;7$H8lkfBYp0 zHBCscfBobTTkkwU4EUb3K|-j-GQkFWUBuPuAtwjt{~Yo?1Nqh7{|j6X=CH+fSlHvn z&j4)v6;3;H;EpZAZDPcXa(&6+-@}-)3AP{p^spmLQ|A8#rjg>S7?t+O-(l2k#W=sY zvyX?OxE5$r%}zL$5ijY@B0(48-=)q;9wuWRyF`Sr_+{{%2Cd^^@teWUIT=Eu^F4K) zAPw=_>@@q~6tL^XFZEJ=R;}xv>Z%=ibV*cJ#~-WMF#aTiT_mgTz;0o>vezIHrD3?DB12S7-<6nEX&FV zE=2AMaIqhk``L4ns8;wIoFwe(#=Yw;x)d&j%F8q-Z})GHdf?@G_J=Ua@v@p^2d{>B zu1xjh1@^LQkoa#*PI5vd1>20q646DydYN4d^me^B&C+UF>oi*?hCXR5`0lL-rjY2C z{u%a2eMr!j*=)Y9755s)mJYUuu7kP`v64zHf4y;or^YO-$lkgULbgZNOtyP&3Zv?f zF$1(tBP6a+<0jUpmTjAEott_#Jg;2oa*Jn^hD~l>GqCqTD$9lJAV@$NdAwhK6DCJ_ zH%qoIEnrjx8<;%1`8J3v-j@~7R-&ZREim10_n`P>af1CQ>Q&B&aO>8&cX`wte3H}) zZ~}9)^(|d~c>Zwvp%vJx!d;=yw}#Q^Rb2hj2I!e3M3`T6ZyR!ceWDf>M-O>wdO;X{ zcP-!$#PUBUr&C*<9A5n%DF)MeO(?w?)kAzY&{+eX@qyubE2LL^Pv=L_e2b2ZH zhu73vtC9qL4$U5J6C#fF-7z-N6Q=VvVM+YoNgl)lW;Y~*z2h6+VC_;oB6{i<;R#XI zuXv)(NDU))%CLjAtZ`#|AuSB6Dzq`ZV6+&f3CXr4Aj}Wu+i~fHc!lDN;RD6v`A@Z* zG935H%(p&K&!JHRZqzv{;)y!yV%oDa;jZz07#(JNG96NBB}@p89eB6BBIa$|gU#=| zb39`8kQKH+-?8oxZ&(2K?0HA7vNCaW>Qhx_Cl`ic+)R*Z%fvlPU8B*hzf(?K#8$J` zIv@%22Qu$R~H1tuBIleYOv>#=}Dv&9+7(U|wJ}InAm@ zLTTCfwq~zU5b}v%d(G%*KE^qH$3|ept{dxVV=GH_aooOuGEn4?>bwaasu5X+Q~ya& zlsz!1!tR{{W}2v<43;8>$6Yv9P4x(J5_2%X@&mMXn#*hEZLk$Hz`WYx$2v4iUNzGL zBOIP+F+0Ev?APXiy;>c3C*NA)lmfLn@Zo%qSu=X1eYU`HtaOqcU*^#c%^&o|oH16i z5Xv*kA;1}nfjkL#Yl{EO7ukoBsPX3wx2u;p*5wZ03VX?^%;}de^&p*cYp~35HX9dX z0<4qpWy@WjiJ{F=VehLQBkXH2@5#>T*m(3^0Vng=%B5=Yf369Gb28I2?NMqr@T+hzDi&L}^2b()O?x)RJS?Bq>Q|25*~M7~GbN`MYYIQ+ckl`t;WQg*kf)SEfp`UrdR zJ@POn9kChC$V|z`HsNa#FC&AWg`a~koG>NYDOH|HFV-4=*b>I8*QCCk_L}Dr_l^FS zh}WMD?;Bqa+4hNc!5bbCyS|8x7xCJ%v8zR| z8$_vod_iU{Gs#u&BA^~|!}Rw=JguyUvbI58rM4MXvIFKVz=Fa8+bB-tn)1m9Ca$q9 zqIy(UabNxSB6QB4HJ0(d%em?z$Hs|x9@uLp+u1we)TO%rR8CY^_n&QdN#g5_OoKU* z%}2*Ol94sn)92XbA351ox@)REwHtzlQ9cT|>s-v$|9sFGE8PpF`?4`jKI3DsXN$oE zb1iP;o+laEX6lQ1!6%+JFs|$TKWNY!xpvWK9?0gm9PzD)LA@3%N^v;0+9M~%2lYw& zVP2HmQZb+W;t(=aso^BcC0~NQ|5(G(bmn16T<=62hK?2Wuh#Y)jAX-}Ucc=yLe(@y z{z>i-BR}@{t&^~8laZX7ZM%b*yy#lCvwBAfr6-zRZBxDiQcQLKiPima+5U}FzHoHQ z^dV7NBchF^x4(5#x5Z<1?e*^<6edSVjlKtaVM)n!VEGRr-y;jufBy*gcI%X5+r>z! ztC{{5_GU@S)lp1Bo}BETX>pJES^2Gx@mdj)lf5hFYBypeXZY)DMRv@_22h;?7Mp{{im z@vxrQ$M+LeeoL&y1s=lAK`xmr6ZJQro5VD`2!Xe!&a7}8GglJN>u_1Ay(UU^4{jW3 z&tC%ej_24;k=bQ%Jv_3ryY_O3w*@D;Zun2g?r&)`#Yo5kc2r)~e-U>z^a|)}4_|@6 z)xb@_GFDZZ)g`qrb)`vZ_^n~ z5O{ycTH~IkQ2$zFxThsD+x+~NP{dCxt3m1Z?`DX|u@}Z3{cZ*G9QivsxELrO5Whgw`=H6KHuWwKWFr%EAS=Ba zN`J;GXT~b$a?`L~*UBX{m0WnSq`X}cFO{``CU#uwkf&5tWZT9zaO!ZVq|mNx3l20| zAJ<1`_uAisR2nT^n3e3pqI5}Oqxx622ar;=Hhd2O+fj@p28-;4^Mh19}n6v9q}~OAHBHuoQ<|a64y-b@-B&l zG**n;dr|5cTQJ-@C4u$atYcwoyohI{+g7j|ye!$H;+)xz?*T^T^hmEfT>$@}xECTz zh@XOBRL+o5t)+N$xLnZNi{dm~az5KwKpzYL&$aVBNxez1yvwu+>2S2Uqj7`AjT<&- zbdAlI#7mU_{S~6*xOu+y%K*~2iOlVj?N2c(_<6w`yD<|S)Jg7kH(I?c57mBXV_`3o zHHU~@{S2HEc8uD-mIk>wpz4N-3j0a}$g?d6IgMI1ALk`4 z^J){VVD`OA9ObGDreMJb_6kM0;P#z-C>83|)zW)AtQu#JJqz?w`98wS;`6nMIC^;f zi%_Z_cW7me#CZ9{KZ5jxO?k;<$CgftE!}8aMJfGoMS-1M5a5}M?1MruD#uMd?jPU* ze0STY!aj|Om5+@tZWP*Q0}-OPcCcCNI55~L%@nO#VSR@{smAUSyL4z6lAN1pch}NL zLLYlV6xG?2=XJWUHn1Jj_LfCq(!@xX9S}u9E*r_Udxr-kmsON0BD4<<@L7xMV~54P zQFHRrZP5s*KX+ZyY-KHL*3?cLNf?^)op+TW4WmmX7L5f557gB73dNFfNIcKjO77(t z@9A1JunvMASN>*cvBC*R&=f~LtL*H_VZv5~-6KjVTEi}x8V0c!Zxg3E zOznKPoelOXFm-Q*}K+oa|dtQ{NDJ;^=D|UQj?s|J>9vG!# zB_*u+%OL-KNz|hy8%0}{K)uAO85W)INn@-Xtrn)qxb5_9L5(%JL(u7$HVa(9IKb|T zeVKF1JYjXw%T^lTo($m)!oh>I6s!1O#9i%-JRFcL^kg-|rr8?-mTA>{!k#Qu%T>=V zhP#&SGi=ooPgOl+nr#vGUcqO3a$>H%7KCC*Yd0-f#$EGba~v3ONR<7ZR;AKHvtA64eV=iO2&w7ja{@BLWX_$ zsoNp!ZPO{iQrE$}TD{n!{Vt$dB4J+e-}Qu2c+V`mYeN{ZFH1#rHik5U##f6HvC&O7 zIDpSTZ=+v9iVob;G11z*3ULKn$MrJHAeR(WPDdb7Ebu@&aE?0|v&d!p#IX9!0GE`AMyULD(Z z$9$MG9&dPHS}w*&MQ?c^1GkkNPG!9fry+Bgy*>2~*fY(*1I>Bw20@%0zC+l9VijZU zjBU{PQ-#fF{4x4Q67o5S!=r1qgQ*8!bKTw#({*5~cSw|2hvqD_zdvwXGi`>Qy3?a! zfT}rkc0f^5&v3tw2qnL&@z!@Ykcw8#rV--Al}g1dd)8jUyg%4D9AK~2a?Dn7eEYFy zb9Ey-_Y)u~v8a^)vu2{G6)Rq7SA7nq)370@xw`CwQQrIGcigIbNm1d*fJ(dg0GRr| zzrw}~BCc6G$tnWzdj+;%@K{y(ioN|yxVJ*GHWS9RQW1gFxdKtQt~ZL z?AGr+B>v>B=X@5_RZhrb($rbp8z!p5w&}KC5;QoxW`J$@*%LDY^7-Vd|EtrOjxZEh zKT-1Pl$e_-i?<4KuVmAvwp)xo!DWUYFdOh20y1XN#V-0CjJ{f4dT+R8iV~E@QJVEZ zkjkWM{}iRIc8$Y6;JZH|u6%9 zi>TMUEwewo>ppa%N8kfz0Gm6bMb#B$Gwtq^9QWc=`GS9fy_&n2aMbaTWPToA7IyU> zjHKjP!pU&D;f_0P?kOHk&h+0Crt$};tgG2D!JarZOvT>GKzr>pk6N>InKe2a>`KNe zR%-6uPZA##O?Vv-`+pHQ0*29J&N-f3&&@1h8jts4nr#&!WqeLQ-pbF1y0WR+xPtkY zg}hZH%Du7T{!GbLYMQ+-0qNtb_*3nU3!TzW4Kcg_TQtZ{7Dn4Hf_p2q>}6kG9Hd>o zG`6D9eifx*SQqbq36NIKW#%+n81c?x`}I;F6_1r~ij^+C3td$OEqpIS0g&EvG(QPXnjC z;%lZ;B=GV%Kq}u$A4)>j)g$KG?bn07ms|7Y^Q5TiP-+j}0Q7>`6n`HCbdg(=-r^*y8f?1-z2CcMX0zIUMneSZcios* zWHC`!eD@lbw+7h9jXVM);qbU+_CVv>2+R1mTy9$>p=L*zm3yTdKN8N_QPyUyoCQ4ibh7ivQ~^T@p_>e{e^k7>O<(Hx0aEkBad$GY7Xa>cM0 z?+E#QnW<5$e-|9(@P=xi3j3h#7O@ZR4r6W^!6fqMmSHG~03==ovbMvB3P>rb}W!9;);~F+82McR3$P4(T4>;kDqC{&eLJFRF zdsmod@7T_|cL8b;U|G$pzuto$tGZ}~{Sx3KEN5NW-J>xSQqwZ5sVJEpSTVsm3wtGT zLs9o(kEpFIHvYSaqR;0A({HeA9`#f;g9h67Pk21NWg0cGR!>5`y4d9>+qi(|B(rAD zJL>$Wf}{ic+ci&nQrVbu^z5;S2QiViQxd=$?n`WXBG3m=+(>_!HBa)0)k8|{4q@+$ zdJXDZ-DGI+ZMv>7wsu}@?Na-94}@N^dJV0a7&YNS;~YCV6+->f`BHjV*rRpqV~x{b z!HD1kaIL5ZURBQ~_3~s9E-HE`9qctu*G%!esE27QzFijBGtqqQ2~p4Fo`|K3sqI&- zwuu4Or|cWq$Wbsek&WA}Ii3Tysj@J;G#5(!Sm&?qKPK2(*^`IRHMl1&J;R#yhSPY> z8*&?57_@r@_EbNKlXq?&X2ce{0Ldn{e1bK65$t;da?B;8Y3nT+;WYzG?ZyJI=h_uBqT!;Z*xwj=D*9;bTe;eqLk4}v5q7@7ZBUkqy z%i%hkAc+^-jg{w1hd^l*RxoE+(ohH)$&Q_PHWMpo|+<6$}*dWJ(BU4IEhj#-Fq^WvX0CjV56pjY4rG)%#v8idRroiXT&|rRxQfl+|gg({;cS3!1{mt z41|n%R%v)34$Opk#kS92T0CwxoEBy|XN#Z*Wh66MeV`al(V7`OEKEVf!RysA8!TQw&=n#S+mZ<3+D&c$|J3Z|Bv z3m7MexF_OH%fMb^44#uLMVzL>2OHnLzbuEjhB)Mjt)6N>t?;yIjQ5=@!DP98lAXQE z#ro}kIl#KF<*^FnWQR?@9^ln5D#NvLa!Oh$O8Oqj*0nN7&*XKkuwwa??wYmG;Q3v- zqk!gLVb4hBFR|t~*~E38IF&cwr^4jPx5aVmfwUD9zl~N`#M?27GyRSKk1|_7eho4`Kb+r1>dTQeYTTUjW%znpXjI+u>Uf=>=tq%se?|MS6haYG&--Gzb z=n>%&Ef;rnV#~{Im8f@1LLP_Tv;+V2`~NS>BD?lODAlYUT40TLhCvT?ZE5$3(oJoW ztx(Xv)DyMy$_(r4j}ZB=+r1JWYka!y3X>;L`B|b~sjai@S22$+qXlj-e`OC6^lExZ?v8NgOhD%7Qi&NX(3vEhm5Ni%>hKqZv{+AAEUJdn%aT0u?7=e5n zV>xR_MT4gi^GeL3pV}9u=A)iP52)8JOZVbU7f0Wk@}YM3{@Soi`s%nE58J`G)rdfH=0p^WYLYSvQRrOuK-dT3#8js5gB)QjO8c%a?;ZJ3k3B^ho0 zU-mN+w9`GmtlIuAN)yt7OscQ`)ze0GIoq1wAQ~2BmA3p(u#aEv@^CYt5B~C`6x5`V z9TfADSXoI+q0f)I;aJ7f(|BvuJswVex@5b(a8T=tF><@`=Ltvx``h@Thlou)(Gzf; zn!O|Jt=L+NSUVbhf2yid($HM5r#`G|2c^bs$veh%jSI?-Z zo-xi2Nk|#n=h(6{9p?445oZC(FAbxW6VGwXGMmjl*Kve8e}5j>*VE)N5SK!_O{D64 zkHN;2oqvJjnwcZ5-GyK@$xLT)ZpuXv3dKP3w#!_O+y7Gg=HC!+{-{d6<0f9(I;ZBTqYaRdbybla-w@~uu*zin8+l0M=5|=n zlj*I33DhaKgmH8ET$(D1vUykq=#0zzfo!%Bs zMqB%3TJHcK#Su!eus7(Vc~&8YaywVXUF@E9PnZdI0gdfzap^AC^@FXv9h5xNcnz2Q z7p!MMD;HREDY35^Z{SL}>QjNZy~bMJSsR60?)-=)=&U zXXPJbm0kWQ66(~G+2$jUc}m?QZ6gD+yObZ-Z$!{TKjSI(yI9cWV=>0r^*G!M>BECk z`~;K=ZW(94JmrvgXp|! zVh`5x>`ZR5+?+%>b=@(HmAsT>Ps=Mx#}Zx-C{;f)VVK>~Bg_gXd26h_xR(@LImjLt z^_K43+Rp9?BVPtt#5!a+x$EP8u;uLU@QGrm22JSjzF zFb$v6H>CHmZnmP9#!AZU`sWDsUfEN~=*WK_?(b|_a$!V;&5~`AluB@Mcn*6`KZpi{ zTW*9l*<&w;lHKL;VRqF^o&+sA%7vsTNviwLbqh`FB?*fs7R*8`1?qd6FFi;Jv-AgMN;sGrh+8RMJ z%-z!8GKPZVV;19{WwFYs_MRmEz(2?}%31c(Fa+K{@mnafi#+)OX1S%pv`VZb{%!i3 zC`BeE=kU3-WCV=7lX05FESHb|Ws<~)*G%g%%0p=#j`wP(WSk`EzJ0PS7o-7l^x3sa zgj5Mk$}b%4SiXVo5GH~O@5GL2jDyab#(G3t5Va8`-F&n{E14TuuCU+LT6qmJ^*#F9qc#ZIq8?^?D;Iswy z7#sq#JW=wE%^PAjEp{BmyykOZBMTRdrxKX`f9q`X%423Y(8IE z2_*|zMq9ZEg*9nt?+bbz;|^Cgul0Cwv-$e#ApZN-IXjajvka?QkBHK1MlHotJeX{_ zK76zw+4oLn&17L9zxG<2wGp{z&vYkh^3d*|&4n9;k(q%Woos+l&& zy6-^ft=AQ!oKxTT3~C0>vm1oHs*xyG9}R0cbG&udSKR9wTRlL_IzN7Z#Ou0k1t0#N zR~Ao2u`qqy@Fv^zk&9~v4z)XWg$cSd<=pKdM+VHYg?qrRSM|_&R`M~F^v#;tmx9z$ z7luYY0aN)1ca+;S5dsb`T4~q*5A4ZvdGD?If0U-#SW8ndTDs*k&jpLSs68O!gO#~J z&*WaY_Er$StBmt_pH_P*Iyu&CpC?T2#kC0QL{O0 ztM~#+!^Dp&U-D%b7AvW+KSVuYC$5A#;xXf%2tE{{xLiH}ZaC_gF=*|C{nxfla-zFw zsgTS|%)lMkZC^ntkppVW>}e6zqI9|ToY#nYCFNw)CVvg5nCjuP?DB8HL6ek}YbW4y zNl2^*3RB`n-$BV(pN*C!qCQ?yX%*ksrYs)Hc|fcCgGY@QQcr|xasZtH_?nojz>0UW zUHoIn@m@CHzWz1j@nxrGzjD4Hf`U&y>0r(jq%zhu~$|lOm&jt z7NybSV4h_P)+zSAI2!5WE57jE{di9rt1PqYPH>#bO#9>$9VamEn+U6yeBSnesOQPe zLzof-!PKHFua9lD6z2u@Q7z5oikU}G@_ZASN)-x|FK*FNI2w89KcPGrRTJ!_Ivzrn zDIAsgwGss>OMmyHdCo`gEoC8G%)}C;dc&?|}*MGhJCG3yMvk^P~ zJWqw@-RaVML|v78(>SUBRc&zmoT?~kGkeS9YafHfhx=T6S(xJRC5kh95r0MZX2q6! zv4WUi6il@fF7Q-YOz9_I=r~rq$X*rpL>P`?c0A-F1k^ddj|~gp2UBc}plis7Q^ek_ z<$Op_vFk1-T%BK9B9ovGW-XT3rvhrJ?P-66gx>b&1E`t3n<>IeFk}xQZy7Yn`tF(<*Tz=Pn|_8=I4HTLNB9)PLC0z2i#FkSq&buou4ka!oe(0InpFjP%s3Qk*J(Ev*M z@}Wf+2_zV=n`9*zKVWvD zq+a~C<Dbk)mE&`;IpMb{IMK%C^mSgz*x2InBKj>Uy-&dCkO@aP>p+Y#hiQp*rSipRF zk>$a(^oZ#r! zs+3W+(iR7}YIeHyz1QUjrkC1cVQ+@qWM0a{+rxbb_zhfj?n9vbd4kU8OMU@{z>R@rNO<5*`pM`9*&G5s!Lg>Udi# z?C;Isi_|W96p`ngn979=(;tIVMV%F4b#KlSP;Z>>sVu$QWlx6jv1VnpSR75ht{jCC zhNq>U8ZzIz>?sfWqJOE~EX=5^8fYB^(Ko(@cJyfo`o?_XApv-J@qt`JvX=Gv+R zk3pv0yrZYB73Csg4iK0r7EXz+(`R+pGbnvJm2_tO@)x}!}r@DK_AR%7*c0z3)S+|JaSqB zX54c{d^0gtULGr77XR2>W(|8HR}ZdFw?_l4HI%8sTyvs*7cKDc8(74u-fW4zX7}dV zE-`Nb-IMcWZD7Y#tJ^D#+N6o~PltJzbE-eN2Sa+BL_u}ac=+Cn!cnCczc(KWQ}qLr zCLJ2Gv}WW$`y~^JkAU3xvpPNtffpXl%d{<`o?HB@_OfT-$_=lKV}*V8$jt?u)?>F~ zPfnOT8!P4`xlkH2H{UkqIn2tip9Lu+B8SMiR?!frFw;rZbhY@sfBs*wsgDwK%a4PT64V3&OD0y1!&hAzUMBI0nw2 zig+8hiO9HhY(E5EiFUy;yqH8JV6AhR{VL*p5L-IL{@EW&K}odl0%3wM6lMw5&KclA zS_CW;_R)%?rr3fJy1sngK!oJmGbcF%mvo-EdPtF+1%4$;SR|W8%eI3d!AM>?(w!pk z*IErl?k(0M&wdiC?GV=Z&mLAQDJj-Gf3PH|8!Mh=p9}jCk=Y{uFR`yBQ?atJSymK8 zE}dtW4F`L*AL`o1UJ)f9{EOKgBf#GFyH~JU)MBKEF%`9`!k&ze9a}UijLH-%$6gg@ z2tJr;Lq`Jx?Mo~5s`zkBm^Q-EiSx!o)zWMk?GPqi6>e)M0KEhK?!KoaaCPGD{}Lv_ zsnUaeuqS?hav0b{laPz2z{&LB$RZm$9qjGF;^wON3NZpY@nvM*!rO+}vtnMG&OEW# z%?N2a2P*2%grbQSi1iM!$0Rt`AroTr3he_)C@#uz;g4rIJ~)1rT{H)bj?DDd+e#6y z03KkX_Kmo79J;{P&2_w`FvFJ41AF%Gub{~F_NOG?78#id99>yjf&hi%D=QUt{d`Z& zwJG*ismJBpC4QGF_Av-(Q{G%V%78%=4x}P07s9>XjksxSv#1xmeRX`V{If+!P_btH zC_8sCn0&BnTf5Z7IoOL`u+qg%Z?!Iht|q7A>CQ)juv>4jv4TO~o8q6J(N&=&KE<4G z7gr#0b#xkfPA$#Bil$3t7@EMzu#Ib>s1_@kXeX@)dvAWY%uW@Hk9F*gOoV7h`Yp8w z0{lgP{Ba9Yo2HHJjSWCoTPN$Y?ZS-+g26G)Hj0shcArMP;#fBht_-j|jjnh#%;H`) zD!4FI0?(q+4R*z57!_j}=qS}gqTUS{h|jVUUx!n|!J%vH4Pm0j%8O-*z55LhIy`#1 zt$GvediG^`Eoyt-^0@lVEtzC3-wxtpNPXTrU~g_-LRrk?9+E^AZfL^-ST)Kr-wor% z&ogkdf+T9a>@5qe+jfZeNY#QA%NHdKZx{<~^M?>JY@2SkedN(%OP6YfbFV1T($ZUB?%aYe~4PfBn;4SA%Nw&!cF&h9XFDnGUMp}4=qGqdrQnAX?E>q5UKQ z`g69r()xS`QRhrAvWu%7SC!^iYhiDi9z2m{qTV?i6~vVLz`iiN6`$uniZUSh>SBU= z@qQSY$~9Y~0P8UCN5Y97Kw=CD0*g)cXcUXK8lVmmc--yp>iS;F=jl zcEw@jKDayQ*h$jM7c%8c*jWFGN!vC@gQ^lLC}$$M;#9Tw5xE*%^zt+bQBbsB$F zO|r)W8mri#(-%L8qG&R|SS!Bsn2eGwBRJFd1JrBHx*{&m?b07Z$=(I=+qgPOLYHq{ zX3GSSHDesRLG0BfEio&&$2JWUV$>d9iu zm)Kdqg@G-zvKXa%CGcLX*R;N!{)flK@vs&B?f8qA=h${(Z*C6BM!6i*!(uC!Sfk@^ zimO#MJKJs#@Zqw7woTZx?nsY)Eb4l3%P#Zm@y8?Z-#SJEw|sEoXP;!GiE84PZJ8&8 zS#)FdWt=Dt5G%u<)=3@|Ke0d2R*1TS8GZ0q`GIo{;{@ z<0pOY$sX~*1a>4&@t_`6mFYD7=c2SRmmpxre=3ANda#?_bDG1QFWZO!vR1fR@Ysmt zpv>o|!zqT?#$fB&XBxt6e zg0l_mUB-TkT_8%Gn>V#T1ig@?Y;K|Z=LK*INv)a_we}Z6f>&L7Qr?!CdG>-tI+jIo5Lemqu7}UcV6jDByvq3 z5#t(Iz!t3cEf-Y+AN+oc5Fx zp7U=9dtKRLZzx7c7PGMH@9tt{7h+8n-Z5i zQ(AjUmbYwu8^;I7FSUO6fN6Qw-{du`5;yw@ynTA<^4rC=oP%OX*5F>kDK?r+JqES+ zoDPqgVk3oJh3cZx>LF9DGzi7ts{QMJkEv6n(*k^m8E+hWs%~jX$!zj1Q7Bc$9it8! zt?TFstA~`^O4(Y zpb`h{^F>jqk5<3;i_ki~qjr7-h&l%)}Dv zp<&~jEG@+;Znou5DNB?*I!5dzL7I-`s&)Lg$mXRYpsW~yS2)5hh=NtX`1maq zmy4>ur|@pLItxM*P=)8}T0=2!uOw|y4}J!&CK*y_qmSXWc)!R-?(O&tr%&7Fgqhsi z*|Nk4^aWNN!az}(OSldPA-iT!HeNCEJR<90T=8IwM7?l)FQ!{kZ#acVS!)?BOeULW z+jGwXQJ;0sh<(`y;$t!^!WV$O+82RqricC7=b%)NW!%E3&3--%Y{`7K&I=)}THn_m z5v9BX)7M(zi(m@jx|Z2CQiOn-H7o3AVHCl=$@4FH9C>$O;SJ)-x0OlIQE__b$c%x9 zi{gi~Vg*Q01rw#zTsu?TIRekkj!Y+Egv${an!dh~Z3o7d026%Ojh zB@X8dfCdjiNtwRZ4i7})EyA0L8B3QzL26tYH68+XEwi>1X4@EXbke?cs(mU(-tvD> z_xvC#6$h8ufT3YR>|7Vxjl(=D1F8s5{?kM$lsC5<@6VKg_Tx}JgI&Z+Wqvrps%v?M z*4s}mBAkNsyRv9_kg$>#zHEdC^x}8rCSjE0mt2;C(*w%5{qL3$_Lr#Fx?Q$@DHBF| zbiy0m)WAlFs))+LHenPvP-pZ-VWl*t;gm`LgyCJSd9)|3`#9=*aY0_IMD48&1m?W>4L!@;X1S1nxf)U4z@6D<$$Mozv0$w$hMy*ZGCL;@%^B z`o*1kBrZULHuOllh>a4XxX6~lwnEq&p&JwJtIIs7dT4)JB&=4SY2OHXtORUJ&R^)U zgxKfN0~N2B3CzI>%v^?TMl<+w)0~u<{jGc*wKmfr5=MfQvW=A zNep!pnMNHDMyIrAt>tnc1+(APkDsxQqQpA1X^K4~Ost*j?DQ2Li<;|D-ES+Q7oU2r zFOBJM=&)c^MHsUy#*1gI_B^U4+eK@DUI9$-a_wnx9^4)HezNaHC^ws_(x+>|s1#dS zEJwLH>pVzPpWy+Hq~}WD?XWf7O?rVfo zAg64zZSU(&b_}GRUEc7N#KidqHjm z=8bIrJ03z#9K5olqQP*OW*fJ`eC*!OJxQ!jB)x}(g0@V;r;4z*R&-0DeX`wys{2o} zF9V#}6C1xB$SDkC%!7hdIQ6xqugzwn^u_z0L;MisFzfUI)E`XNI63RzR|1kYZDjpF z1X6G=r#6=Cbn)f|7X1hqjE3ZBA6vQyPLpvQKWeK2f`7R6ws|ihWUfVjHVv2B^&cbf z%5C4oRA!yMBT3NxbWiLPsOK$PU#t2Q?rpQJ#NH7@A>Bv2^?zW`N{7KYHL+b1&x*75 zRxcJ({MK+`qNh#A&B%+NgS`~ZGVDij>iiBD+3gEcVGVBY*FY(si%*zHREtr5b-^^- zCrqu=bqVfk2O+-3sP7WI1L6PC+8pvISIC~f=nDvC+|t+@e+fiU&cN%(I9-%nqil4Q z1^BMJ?y@(8DFOSch@~9$ND-ccB4Jmb!?y#h$ye~8DRi85tc{bzngoAl>}BVE?XeHcEjl=KxLyAZlvf19Ggdy*uXl}a5Et6#3JT0hmdA}s)^n!0{98{? zg2VkL+vp#m=zW8#{lrfm7QdAwYDEDh>VpjZuSL<^uU3&?wVQwT#LNL^T5AznX8*d8 zHe8r0_2TiO3(x!o>f;vkAXc@~ehU(g#xTE+NHC1_199>na56fyp~Q~+6HG=JeIzpT z=_ZP{5!u|I@E62$)%^n4=1V}r77cE*JO2iIrX6*%Ed3uh$1|!~I_!&$Q*54SFt*vC zVEyZy<2-_%bH0TBBkJkdipjQj#L2t5XsMlcyhn}iD`(mQQ6KgXW!byyWQ3b_0a%3(vDX9< z@9SU2Ws+cTS8hh{Wt+u`me&e}ze+{5aor$+0AS40aMBUNY0z(9RTheN!=asV7QN=`7+sAsl4R z(a+s0?CHiHTD3OO3Y&mk5sr<1A?7->oxn?NN>dL`;Br;2WWELFS;^c0o4Jn7kOZGE zy7FiRcMv8?=DMU~Wb&SU3e$VtC$eM z{3uJDI?1(K#ogfAI_)X6yILY~MQ9K%(c^ouL+daH?QwC|6CV8BEz@*`t&Sn~o}{is z&zwvf-v;XH;#DcRZJ0$bZOtAQ=P`KfX8TGoX!7I)yZ#;zd6WBC+X2xuwqUwV5_U~v zG9oXt^!A?az_5av)$^_Se}l;76RdZD({g&*`@$EUdJexf+vj4pigo&qNlZN}E&F~# z(2yksY$!h$b2a2F_v%hi%BxvAiUqR+Lq^$4lAulX(4}^2XUEiq+eSSm1ym24S3PW; zJtYZQ;WJr}<`hwHiHCCScQK!ex8kS{tMnM_G<^smc_-5}dxZTx zpmfhLdi>%?J^Jo7*g{s=Gmk;_(rRk&3VK}JqiUV~$;XkPdp_^qp-+0mOgx7^1ym#G znyvzyD@v?PcHUoq8qCS^Qah%eh6 zX;3e=*o<+!svxOx8cEF`o; z{Ao`R@pgziiN7)%PAMr&RNu+*qztjRkHd&{&kZGbRGh;~=Rrw`AFC|1Q}P^Fj~HT; zg@Wp{J0eMf%!OX z8^w`#PO4?~_0);HwoiX9OwB$zce9PI6=dXThjz>J9=oS3*G*XI#Y|HFsC*wL#-A?xu(r* z-Eg2Q*O7rOrqb7~u|G$IX<|#uS=57)KX&lLtx$w`k%WkSI~we7d=^YNJIqh>&0{?> zYv@z#l5rlH8yxI*VONab*Jaiw2&kFpW6LH%7;d<_;}?fH!zznOt+#V36q>N17kEhOtct+jRY=M|(q%EPQbD*vz zPfQk5|63&?tsYL>BSNR+<)%k5(2KlZ_T|UVgQII24m=(b_Ljv(9=m(V5~$(iveIo5 z8k8XNlC=K7lEgkqPz66W>-j`Ku@s7)xv80ap*$|;rlhp}23lW!L2$4kImKo$W&2jx z>(VuuPo)JRJu-2nJy90Y1g27L7ecAmjxpr&vWTa=j}?`SMNYK`&ctZFs8g7q&w{sgs}9}m@m0{E z2K?YhtX_qu?7{uFNj5!T#7@e#bz*dKN{XGg8i-a0rWaU^u#cPVB{sPd77XWgJ4WIl z?F~s(gAr5hsIY3l9EXSGqcu*o5sRh6AELxOTrt3AtoI;_ZbPqZ6{W7s00!I58=b_x zM!h5ApJ^3c_0V%Q!MqBFfe`*;yiJ(sVe0ayI69o|{QFBlk}4 zgIBlNqEwr+bd9Tk-dqWs-AuLV5_sXcY4{TVTAUhaYBg#L*t2Pf%gvWXJ#AM`aOc`% zTf^{t-kWt_gL+o{YL2#(-hiX3_S)@D5%Q{9o6f!F_R5=3iit4a+V++Q(NNVzL+k@l zZ~ypi++^$WHWK;-#+@k-CLIUmEl<{ctQy8Gr`qaHF41t|o#j(k?rbIa}iA2-Lp$O+ZQGA$Wd1BU)&F+wcS1GS z@By&bjUD(U_Vhux=NVf**mjDNrwom3<{=1Kr1r5BzXbZD*yI+gFXodVc2Ilh!hqWk zBccp7K$`&9tSzyG04HL@zDwB4(jkdqM?A^FMT_kDqhN|^eyfcTM1{0mHgC@U%Hz~b z+h}cs(IIo|G`4C|0!H)fEwnyjR4Y+Sv;1Oj{~GQ^<+3#1_gm<(cSrnSeVy+-Gm`II zW6j0Xl>M;mUn3HH46ebEm|hI8N#^<8$HqjR@1fq>I7nphkNUyGWro5Pol|~aQx zH4IAZ+sCf_6Y6@UMraFrvau+aV| zp;zI)Jk&b>9}VK4zxxWt>?;_3ga2dM$02oP<@%AVmK2lQi>^HHXGArXtGQ&?q zqMmMMr=J9*Ui`4?GPZL?y+ls<4ziu%L0Z`mPByC(CXjQ4a#1SBnXm;`EusNfGS7}4 z!(}CQ&&i&Kj!xhe^O-2c{~uN70iRWs{C#!pZEa)k6&00UtVAVpo*-Bs82et&l^&+~sjpNR9DIc3hd=icX( zIWsMq*sW(ldVQJzSZ850ciWS~5GfumzTD16M(K4$%k3r6RLfc7NShV{oQJlwo6d0w zR)To$67b>^Fn+Od=YqXso=Uc2q#P-<_4g6Nte`}raS-aZ+h+0_?&!Yo~{oNO^6Gv?Ly4`E8I9kt3v zUJ6aqiL>`CKIGmHA-hw$P`_6&sE_@Khb{y7hbD=uS&CBqnOHOftED?H_oPgblI@}^ z0O9B%%(!hArkp(sag*_(0Plrg;2`cs;gvAF`y02s+rAZ~&{nPO;;Uj3xv+d+5XJEy zZjG*n)`MTk8X#Z5OK8u}&1u&F>C#Twm~dg|=_xa@!?Yp+3g9x+s04>}02~dI# z#F&q_xDH57>74$ji}u2`63!D0KR4L}8Cz#x4+BjS^Y|D&D?rA(2&4OkXwTRWJCr}( z0HkVsdsK<^XTE2p&Ak!i<1HzzFTuXFr%@v&1=UZSX7}6#?cKSfddG5mTo{=Qxp#=B z61iPsSygtkL*B6CuU6Y_w*b*CSza%mzZF2O8EHC-A1Fj2{c2X&mXMaOvg>by@9oRU z)(obj7?cqqfTr~nkXjQa+297yu2!x>W88Z?I8Y_HzWRtD`BGW)+Y18WuTNOtEtt#!?e8C(ZsNOi71}mf8h( zIi11)?jYJrNoRf5O^{dZ>yjm*jn9qu!11Qz7>b)fQ^fG+#C5}G8pZNQ7tLg2Hzs_t zz%FVGa+PsRx5WM?Ol9@Mm(@?Gwl#uu#@=bnrWQ4grB#R@`&^iM_pM>Oa}|5m^P0oZ zv)R7d)`zsNF#J=r7E)U{#$<88$>d%7ym^fg>KX_qVm+NPR&+{vSv=iUq_FvJcmoymT^qboOXu+PvmLZZf5u7}& zXzz5*h8+7;v}@bTCwc*glm|CVv5Ov#N%?(U%x?X{j&LY{cQIb@jtiit;;P__;K@!7 z^)60ZW`FApq@W`s$Jv}mpuM$^4Rdzs-+7kG=s*8)T2e{)O) zY|G0PgZpZ6!oiJ^b8xPE-E*;A?d3Y7w=lISyltgu*E2tRkZpTDmK2ZTo9*+zfN6T| z28BMF9Rbm^z1!HkA{ha=#!g79S;EnLf-_K!*nupQ|B>j4@4M9wh@=g;XUMi&lL201 zV_aU}CrFvCn^{6%NPp#O=M)AmT4&mNF-gwy{?OD4`!ZzwnJFMh;Q|cY0xOqwvE+`47(IpB6w|I-Y=GMfcES9 zq=Wx|D+VeXB`!s^KEu0Y1hGi6zLk(%(hE1JRMz@!6xYrL4<5a@#}JK zCW!KGyxn?-WYb%0n@G|mW~AX+_;Vru#xR>~-O!%Mf1u}&@E=Y@y zj2vt4W<%3)yGQUz7~9_kSvbDwlIwIqW`QM&CJ}|VuoneT2shQQi45QNX&77HkPkjF z3`^uW{S9$Qj1Tg6M23kmH?|oA!4%5JpwccF1VE)|`D~ji+7oi0Gud8?<4hb(TD344 zaavCt<}jloR9`#T1iEx*-ef zg}C(4^s@TWB{n(E3GAL`*dlTK1A?n>%rger*06~BiIeP+mk|3%?v|EF)ZJ`6>AoYT zx36aelcK3FgZ)KF*e!*z%)900=QUwZy?t;UV-S)@KnU<0gg4Qi`=53i6uUaeDjLVVu%qrjNIQAssE7Z(oYm%P?*}f3|-VFg#||9po*% zsoJI9WDQ1p0rJl9@)&515zX&!8S8?>3+LFYqP^eqa*{E?$*|8u@wTEo>oX23bw~^qr)v$1ZTKXY2NuW` zST5c13JluJwM#1!tyxca=bKqi(qvfvjP{mt38TzX#PN=gu3c_tOaXcy4oGGZuvOT_ zX>{pVaoCs}%;%KEijyf1%lr$(p?)s;+Bc%TtYAa0a18Wn%NzcrJ;Yv4dgqE`%N%l$33xe0K3SUi8)(bCzPLjEX@E3RO)Z0Q)S}sBw^vOJ%Ale(!E4Y-fO$;wPkMlY!1Nt*-*WQ-IQJVsL zZi$z3yrRq=To%iAv~sh}jKlWq6Kz=@lP0qEI3h@)+Dy!{ZYx3F2T6+TeMbe@+ndDU z3Uj7s#o=SY)cB57kgjhU>$P0#QV9cn>xLB94H<2XR|7RHbUiX%fO-lN<<_wzMD7)5 zbC@tEneTR^D$k{$#&*#f0JYW3wEIL-+TK}g(>^WWdhu=1mFIKU!g0+9up?lNh0!Ir zgHUddhp+$xVkYKStaB;DS0@_{s)3Z9P)ne-%>rne8hBdYw82yETgQ56xt+QZ$n&aS z_o@|%_A1%>USQS2-XD>IJbcLP3lkrjHqZVU7iJc)sct`QA~j9Gd;>Rt`hB{5vzINu zK39nL21eK(W>hDN!_Q50(PXz-mR)G7n6AQ}>W_z5t}w0U7c-y7`Sg~U7!4=d@i@kC zIK}?36*;X*;;d=@>#?-$WI6NFXC0N#n zK(8E}DIUl*aKO3eT_5W>8EO{g;$?84^(nP#K{ADksS3M}1&v4IuE=owT43TK4 z^R>PEq5a#HE;m7V`@HcJPuaSu^%ohA$_3a2OcM6525-?TpMoeyf!tsC4BGY1f2+ba z#9>dCQZIe(DWhvP*?nJlN~Uv)WH(;WtH6jN$>x0NNxv$@r@rX0`d4@09E*ha|?&acmxdj95?Jzlh zT(($-A1+4UgTkTdGW_VS^alpm=!he14U4#O*K##gbV{q#9} zO&Il4lM@u)sQHnYI7?yqUKU1wjwNq93K_iJCBe2rg?)C+^XgR~CQ$itpzDuW#d;zC ztzqJ^CRo{=^|fb+dJN2BL06kSBTN;(1)t~={C97$((mB-;FRB+#5AI+*h&9_8J<@M zb1|(Kc^Q|A?+-t^Xu5Uy9_WKMCsMOK&$9mwgSN1Q855FkF5tw%xw}g2JCUyPzA^?z z6}$X-|Aps;_QBe2eF!lS)TY4wKOl8g>c>tyxMA5>Yi7yXdc61=+cisVK}aWalD{yd zYZhc;%k63EimEMBbf|jy3j0owCx+LKJUja*fNS;15~A_2i)C+%=RL(zSvwgg{S1d1 zwPo{rNY@ru*qOh=r_m3m*##oKlE#f2+U(zfRKhBWFt8^z4gltADR$M#4mh2hVs8mh zaQ(v7Rwvp^h_J5yIZVf#X@gyNN=z0d+D%VEvK^l~$;OFBhgR6LgsIqVDcL*8-u**7 zQyH$_1^tn6&5`5BPsAYSjt$oNR8K17Dh~BeISm+&6=K|!R#-1_NPAZ^%NFT+K8E?- z06|(>P_rb@ssvE%@Z8n5L$ucyVSHuSr>Dc9)Y`&{wS|?ng@hnK!;>fR@K>J+O|knH zf1-`?Kb!@m!tBBJ$Fp6+yWNl9fhPrBIX6kP+s=U#F22{&UzO}K4zaG{`WwT=lYTL}&f3n2dTol8-{shNh%fibcnDYh#(0LVC>vwyB*v+y-6d!j_F~fk>Lop?5kvGZ~iy!`D-8 zzN>fv^R9qL&C%*9mV6~Nk2yo%k}IzQc)^@&7PFh!SsZF@+0-V8Bv(K7#pZ}67n_^u zcIwrD@KZmB&E(C(|m(``p@Kt`3@i|q%Ip6vL%66<=S7lA=~u6=xyC*yAUf}0^} z&G8ZA?7>?-i(D%%8EG}Qxrier%WMFB;)BdLV+|{@>+T4X>rM{wPy{I{2|MESP{fc{ zw5DlyhAG!B*HxR=4S{IM4|SPHGUcc5iF2kDNUfoZ)T7*{r87 zU`qY?z0j_EcMQyC2vX3IkrV8D(OwY8g*5ZNSkkQQG&>;ZVuF5-wxRcfsi(GZRq%D& zWMdwHLt)yA zI)S|>qNOwKH$fhK%VyT6vy-iw*E_)u1>(rYciTmSF*DSlmVVi=! z_Kcu++~5c|*yjm*aZDo>HRH~|z(MP1b&1UqjS}hFBJ353Wo6Sl&7KiPh#4QTb_If5 z3v8TdS15r4nfC;}iXEE*mkT?>oZR0SVv7^J7@}PB(~=0FJ2+gxM&Z99;yd|Ro?V~h z34;$$X94PK#Fdd%qP@OWtT>1D1$q;AO($mFX5nybDClaL%}#+6&T>6!uE!vZq{62~ zOnQgfC;>0#!zC1RERKVI`aKOf8r094Wr_VfV{#IE7k?8#fZH`&Ale^q|A<|l9z$(% ztVM>W%f`vcI?>*Qp7iDyp(wg&mK_a6(TYXUimf&&lavgRhiK9F41z^G!Pt4;AEqtEnp0m z#Pw7I21g!YH`a5iD6?EFcw?D@G^G(Ig%KgbcJUHJ;Y%fgeM;@wwHhU4SM z?@0&O9-o;4SEG>v3q;eNuQsn#zit)uq4a16ODu6A+wn`-kpFCIOxQ8kehX0^K|S)V z*)(L{PaU(dK<+HeC}-;PYFxBy5}%Ll!swhEVbyrr41ijTKhSe##mc~{X(iE~VwnA) zG@CiwGs+Ob>O#P+zxDRetFe@M5zMjY2vh3uc@ynJ(P*-#&>ouu>61|%GI3N+@Llo3 zS8Sc;$-H8lt-rY556J^;gbDuD)_M&JCq+=(s19!`Opg-oCwF*l)QVbtu7|Ro%8tB?|NV4?{To~M6j@Tom zv9!_3@%FNyI<#ty4HK=mM82E97Vy!LC>!f!>#)$pS{Yhb0q>h&GnZo!`$arggLs#A z(qf>uE~#coqLm1{xF0*>oepm!VhZ4@c*t2jqy z6ik!WS9lgG!bhx)S{2KpX~_N|woM%L?2IG7KU6~d!)9|PcaWuvLEWue*r}^MT{4T^ zE7m}xheEcqgL}qWppSueJ?uOoln>%-r-uO3u87SP@P~4I#8}%G%GkCpw^{+pz-*3L zOY3zYlsaBgY(I*oJoa^_+V#~gh?Xw5hedm}-P&54^&sy%!nV&ZxrRnWEShJ0>gwcv>YqJUH-7dd|_|0uE4kfWa zgc~klc*TMdZwBeX0O&bqSzY$B5syBCvFf}!9#C&T@Tqv!rQ4PWq zV~O3@3j0wUY8y~sows>TwkyWi6WamkC}#t_h|#xP#@r&`ro9btwRqqIZHq9CPG(v& z;ytJ9N6)h_Li+ft+|ByC7Z3!|yncr#QqZu;JE8sgw(o2Y?gDwy>_x7$OLv1&FR8Cp zh$LaYe9_y}AAqQ{Zpczw_z|=};%MnoyJnAz>etP&_M#Cf6cif+MIq$qL7RNDLP*bV z8^@GPkjgR!*b$NRqE-)fRKQ;tw#%?+Yx}WFAD0u_ZUX)r+b;C=pSu?p%HbQ^CWSPC zRqg(L@b!hL7!s|R7Ofz1rZ}#8I^X#p_XDXmt7dhYb^X*!$x9z(V?=wu;6y*)4he_5 z8h9TGepP=F5A}CUv$H>oCy#Jl__EKzD8xl=n-tP%d6~9Bv=_@`uC^n>>aNOa-XQy4 z(1%N}LHIDsw41+(aSm-+ePmjhJtPjDqpf3p3Rl{2F}z8=*_{g@Rb#eq>m}k5vSi`J z^qMb0VHNhW}%?Dp-TV@4KRp`)#3eGCUXz=MUUig`-f;(uQ5O8j~|XFT|yA| zB4K|)I$)Q0BqrT8lNYnmQ7}D`o+10XV*pADMth-63L#2l^57plj#Tebf;}ga4%PQ( z*Cr-2O}_SGIOR&SRstxO%*DGx(PT?}#A!?_I?Ec<#Re^rC$1>0WE5FhVX9rP+7^@1|b7 zcJG8R*i+bId;b%Y9$QPap8tADG@UruT;~>6HA^^i9wDICkMs81EHS+3<6~x9vmb!2 z^sZNQieul096VTHdQw+3)*k+mG(KvA;HX#p1g3&KeZt=qpr>d9OXUM0LYI~eZTSCU z83`lC&!wH~9O8|(I;i(mD6WtxseOYZ(i6$#|az@dQhXol;xZ1tw6qo4dh57DZ1t~z^ z;kkcsdOM+8>qJvPRHq$Vt>{!2v6pAxilznqWX?J1v@o55K3CY7(_O~or`S@@a2XFE zpI|Y=1vOk}Rone%#zeXt{!$3}eON+tycTD9N<{-`EjsM)bwlOQ`>L~DdTic8%Mpz( zN!zAn*qP@5{1-v?qgrP^A{KJ2YFFDd0Teh`z0uOnh4#Mfh)u@@=K*=fn9}gBbQj{W zpm-j)rIIaC3{PD%J`1Pao|-i=Tlzc0)Oe(EOVUfv_w4)d0TNgiN5lvR_F8$j=cWi8 z9~J4^czyUnuvZ|<;q4d2ASWdD)WxyXgXQ3%N|@A~wjZ82!rl!bwJl}y>q|keEx5Hl z-To~O4cGPC3oe27(fr9M8!3cD@n6PX3QhL~E1}haUD8FD#S(L$X@(Gqdp+9A=8N_Q zV2PPzHK8mE^-jAS8J&KhaQ59cS-`c?oS1{G&#s7N#pEx+jtcwlQD~jK$}YbW9#z$j zns2v?_Ns_Joot^9)B3>odx>3s6_Bx=JkTB%iG-+M`iwmpLLOsVW;`h&4E+D)S&OS< z#qL_kDM^a3zD`s53}Dq_m0|?FmTk4yK%ba&%AuI%a6PZ}1ksfvZKh~fk{_Chwq4jC zsP24sxXzR2bIN$y^)ZpIGPI7vUf7tW3R0`0>wG3UthKT{m|NrC?S@$P98Tw23ewrk z0m^J$3@{Gfk0-2IGQcjpF_tf%#m6N##b7rK%vT7y{`k92vVY$UW|$;m3dtR$TR`Ce zmfNi&`$imZA9hVP_0||{&yM#TL6l;1bCvyeTTGV6N^tt^KpKdJk8KxCIyO^NZQ>mO zs`N-xx8CWwf?5Au3t%{mRtAUCk0`;3g&`Yc6*}d#h`1U!cw;`Zp%+oVZsEbrv#m=A zv$KL_;K_G|DMwGW*Y1Hv4?>LLC$o_Y>V|M++t`b!D_UANWQcWX0%WWW%CJo$y#t8a zYVDeNCWR31D;ix{21nQPS*)}aalChFYLcDTJcb9>g(yiX`9R74!8WIJ+g#-y{roL5p7uiigi;``c zIOq^~1u3_4JH`?ai&MXc2gISyyxc?^-3gkO_A6lV`DkZ=*1{`Fg3IarTV%b&bWQRi z1G4Q`VRGNaVfcfO#ANA_?OSYGml)QqLA$l9r_JP)ae+8K8f5CM7sU38N5l4P;p}gj_2>aab+)6X*dKcWPH2gXmAS$Q@iMvDp7}Gt ze-areH1y<0U5X)6#Igk_HaL)+VIK>6N&KLu+r7QOv^RsB@b8MI>kBY|NVZEJ15wAo zY=X!5QA6vG`{vn#W(XuqykId@>gX)3+)Kl#3$30&zr}niWeXzRRHa-EL z!fQ8<;YaC7fcFN@F!Iv0zkQE*u|6aituPsC)?f(pk$`uo&M3Ll-S%ncL@TF7D_2KX z7ul2IcmZswV@>!Bm=@&c4YL1*bbZlGyZ+f&x;zew?4CF#?kMLt&mpHljK=}|M%M{a zOmy8OyX*OoUR!0!q8Y(>xn3YL{Au7$%hGYW*0NGeR|gBH0;>@Y-<9A{$FBMd99ozs z=SY7QpqjcNEA6{bkdf9e-R@3sLA3N$`&6_)_?+O5WM-llfe+S*Eft^$T(a92AzeSb z*cvCncTF;NFg83H?3(Ng?ta@_;?S0MZERs*NIlNVHR{wWQh{nc=e_;XLeYx3cpKWm9;B?}Z5l@>#_T^iLtABMZ_CY-vBzyBau+iP%cv6MF{16xzmQgf+;|073o) zsa&2JeirSuu^Qob&)R2tVv6sXdcX)uf4g9l?yoKIy!Fc$S-~KuWq&kKG#Uom zm!oWwpnoSHBLGh*44vG+BS;0c8&+mnzrhahRUffj0q^EKR^}NTH(dB4Ec!CKVMuht z^5}+m!L;w{BA`CO zr9n`wg=3E*GsNjMCL>>q_Q#i($r+9PR}31sf7qw5*tJ6) zu%9~D+6YiYdZeGN6is!SA{3R%pGMIlD;fsVhfXjDC-jFnG>DIw++-{i^wvMz&Nd2B zQzWrpntdCl%+ASAV^RBE2pMzzwlB5khLhB_j3jX~Wm#g7mz6(}V+6e3X!(RFKd2Hj$lACCuw>C2o{8#oG* zj>y5U>l)Gi$f8x_ZC?m$Hq|Vy*<>9@d!}8l+F4^D(U{+)D!XrNELUPpl@$t7Hm2(% zMTTuD(0MCE`F@?vD3`qR94zcF`+{&|%k9J0Ox&!L^7&YcbgiXwtlqJtf-fC33_X z8ym{%hR%u6$GAwpeTt_e*on3W?BWuzkCk4*I`%m+sE)8q?~0_!y3(_5Dl}CSI8as% z_e}%RtiS_zx&T?Y=Jb_FvOMt2jn-^BG(|JZT*iMZt+^O9pS2e=l@XyhDLb(sP-XZG zPtPdEM+sIr>t@2B+*VC(#%xG$9&gutyYE#O?H^}}BB`>rc!a$#8Z8S31tBCZm=nvD zB|DTyg-MD{sg=$3q;mLBy28E?JTa1(zj2>;o+sF~`h%628zs*7yx1DqDAArcSg@|N zp$lC4^~|x>q0}?djs$!JqzF=JbkS0q5YoBuUymoNojb+;E$C$im#K;WurO9mZ&rQl zLKqB@iMC0Q-VFo&mD#n6yq?_1U^^_Dp5qG3&c%>!wQ+n-@$u1X!rSkI%iz;hXo(vY z8xn^)qAeCg=`38{mW84q!r)fhC+LsjNx5Tsw%n7}%ulpoq8S!x+RWW25Dv+i{Mwwd zG=}@?vPIQ0uxro_^YiV3;S<0SZuwTbWO*z>3QiP{3i=C->l+T>BX-sbSk#v%W1-F= zU=~|t->h^=DmF3Jqyj)|>WZe=-$hHsJhrC(vC0Jp*H5%NE1gc~>P#omt~voiBKDE6 zt9oo4cAcv|>9(Pnwk4$NORKDV75o#^X0Wxt+6z&f9ICt|n&Q%HRu8oC0{q6Xr#%$A`oQ;^ z?Gw}6kv5q9jl;qWnq4K#0`Qk(*_&Wc9C1GF_RY`~$NXWGO%y;oHf{!I+f*TvWkk4J zz6F}TUqjn_6Lzqyw>*w$)A^;(bw*NGnTejYIcrw7XCkPX_ckb z#5j9a?wOBS#HejvK#o3Y=Wcg#u$4SSKCet1^+HB+0>{om^g?vS8awwbXm2N#Gs_$) zhHKWFXEXn8pfpQoXm1n>c}f;6C&~m|nAM4Br~;j%810G`1I{_tLQS%?m}*s!ACf zHT(b(O5trFE!7?n^1k55UiW0@gdA?~*AAUjJ9NH%6i=htNOSXt^C$6jt|86QK^)2Z4-X}lNU-IvOzeU6}a zU$>sNOo)^j@?f)YUtGlP(*2%KJ;ej*CP>OO{EAqi06l?UQ2XH%r;{;~Yxt=r>aLB^ zYJn3Yf@lq6Y}sdUTos~)*t>$1lE`Sv``qb6!{*v*(cxIsO_WMbJ#POJo&Q1?o4*)+D-{wi3B#FGHYXLJ845_MjT_eI?PrG zll;bKUv5YstetW3zGZCUFF)Yoonr}DxYafZ`Ys@o)ii>-X&PK!2RHmsaoUw({}Y3j z%XjaC2fc)Y>xc7a?+PO7%Lej4>Y>pa3xVl&%OMA8Nm$}c3_;BXz9o+x_7eDc?arce zo**S0TK<~ZSJ3_p0>=iktdTfKYjMN!PtoDa!!-dLdnAU5MxJI*9R;H|+cR87+*4(f zLk2aaN4P+XKR8lop*J*mWoGh`?kScJu+F{hIU%0PBW-MkNNSEQT5BgCclM6g>|T+s z1V0p;tm)Ta&rhh&wnE;a7>Lg&#*{v#e+kp@+niBg$G#yA1=kggu#3Ni_Lm47nZOtE zR%;}tSAqMzT$?V;3YKQH8O+&nLS&F^D3<*bwvP;4=-TW50-^}v9yvJMDddAPDE*=)$u>j zu7Ny5lnIiSEl!WTb=7}8Z2|U^w}~cgUD0cGMRVRqlORn3)qvNm*VODJvf2D<`kTx2 z!1!r@UC>80+d1hhi!S}$6UaXfXP}jWs1RWbuH8vZ16oEJkBjEHFzw{zqxs1oJwAmU zTqeLk&FyPzM4~8~%w})-%u|4B&ql7%h$ahD@ga7`sVqzS++Mit?EZuEj z9B0d_WKNj&__PXpN3?5`!ffZeP!z2wwHwd$BIU{;NwgQq<1e!eVOM}@;~Xr7#*5FhYoa3_EQELfT z9Ft*MlWg731*1q{A%jdXp*PEg(UDCFs}k)^Y}~4$?HA+;D;l;HUjXp-x;y$=iMC0+Fn!}jHuFNDioznV z@*-&OKN+a=G23^sO9#}HTcb;yK31{B+KKj*@-xrlD>FD@Pp=(gI|M1VZpa3^<5Fn< zWCV_mY*d_qHBL^R-FCUBu4mu#nk%3g$OO;)n@I15-pQD`b-EH9s*Vl9R6&&BYH3CR z4zN?K$yJ_rP=SpW>3Inl#EMARmEblY->rdHJ460OapAPaCWsO1^ab{+Fiptqk6$6t z-d_JK+nLwE;qeh{mV@!8Lay2IHM6Z)kgO4G3Z{l4Uwqn5K`*IIW`y%+4nsY0w5r6~ zUkmLI3?p*(8X1sJibr8Jgkjqw+UsePL#*=afZ;L^>tR`x<%{L*?2F6KJlijZH;etw zGQ0nJFav~H?0{F;T91bC z{ZY`_NvtK`5yzG7tJ4obGS-fsWj}`^UJE((eCaMkbX=$QHcq5JGqzgsQHY_=MR$A7 z=;AS!Et;e)TiDinoGeJS8I2&lsdPkk09kXIe%`Yjrhz?&$asWwU& z-42h(9j<7~3ih){T0#>SeLZ!Q-PY7ae0SP0C&+IG^oPII{@&bW2RCm$Jf)07QJ~j= zHT44fTMJJccuzbaKqGSc*_xJ+)O>7NwGF!u+FOx6FwM@mALy+J%pTU-qYuP5I}3Lf z?Hp@OAB<(eWwiBp$RTql8H|q*bS3ENjh6l=F!i?SWiN@OF1aG%kZhVDO{v|mKF2;0 z@QR~rS4P*(u-R?lpk*5SkzcoSTHZ%~2x#*}0-MW; zVud{~kr&e=!}xLplxP zf*(YOU%ga}5N+5~u`&{H=-B6Jptl02Ao^7O4nMVAZ5$L^cd0MvsAd+L!k`+ zA$I+Fo@y@sQDD8jAD3qXJAA6nPB)(m+7&xeRg>wJX%3a0=qt>k1edW z7NW^oTUb(CxYQmO^qR2@Otw!#F`5%~*1i!8x^hKe)>LM{i^F3ekV(YOOp4`ZuXGSM z4#BQh;}#9ARb1RJJ0ZcQ#BtwTZH@_hzx3s)at*s@a*U(zC-d1i#Gy#;a%kH6vmnDR z14H{P`y~_}pBFg6J*lq?u~A|Lc)bv>d?0*_$IAwc6Kdq==E&dMHOe@t;$ zS6HOYt&9-T8^mF>TtU~D*kRo9v!U?>32Yu8h{LwrW2sLiy}u)!``B?Il;DGp2kA6` z_j}K53^eZT=SjEE&)>#2lMoN@*yK_>Ki#wSOU8V3kASONvp8a-GJrlXdImP|i!(jr zk-|~-couYcY17@rtzn0Fh@G1apMGiZqWwiA^&FZ$*A|BKKt6u=3qIO zrTyW%*8Q^CMDLj!!$h3JJEAb^p*O+RttKxfekz#>$cMtH%Y*ex7PInUcn3Y!#rhY3 zd>HQ^^66L%7RzmjcxZT_aK1e~0NT4>W&$h-E*uDlE*&IC>(v6TobI{Vc`txz=iWv3 zph(i~-@+EsF9M#fqc+Co4gym}ZSh!J7t%*wonu=>dpa(ykFXuW-uKb+Y1VMCXT!0V z6^kZmWX1_N zFgfTGLR?ggVE<{7Aib8I%Dt!(0ZPf>{OW+{uxAz!Bjy`ne~fP~wx5Q1wxpT`S(ZLL zCd$mQt4D&+ji04exD+Y3JH_yl*vO370^x9cug$V4#WC^TDSM|`?@?f{=(gMJB_ULj zr|8L|(UtAp4fbg$cz`|UC&wUQ6!0VUxk&Gs3`TE;oi-MR*QVP}spElO`pyxyT!<2L zvJ1f}RyGywoS`DkK)<2JX~5&4EQlz1r9(BE77RY)aiY59nW?>fO`X8f_5H^JT&>FUaiRxWdX6pLCS83Sc>wd^&AJSQy#b4!8rCpQ4L+I#_&`+9nb zUA52!HJfl0^O=Cpd4u(15d8C?m}<)O;dX*PG;N7pvIxGngDuT|LUd?SGB<$;3qCT$ z)ZaG^_o*KW`zwPP1Dh0w#Gu&2vsPH6#n5P{D@K+e;N8`v@!d8=kPc|y#-@t&Zf>tz zl^OO^n6_60x36jTMHvag0Wlj#OcyMP;a&51PFD$g(d;K#qM(v#bFk2^8?FbCHLstSLQBH$2q}q|1wJoInlMN!uZ{7 zSHx;;%Vg}IaoB|=>KQ9z(k3{%>Me+>={PG+6HVvJ#RIm`YwUe7=unoH!)!rCEMxcN z0(-I&NUQ4X1#C4 z#ceqlV7j(PoN$cb=CsvXFD+l*6s@CB_>>C<*cs~_2ERok?a68&CF4@Ak^OZ&fKJS$ zmKmabgtp;kU#^|J!G*QOGwfW^l*vLz`-guDAvRW%F`mr|Azjc8Bec>J8OLt69vjI> zb-OA)m}Q3qydSYgoV3}?PvGF~JJI15d=Url^8Kds#r9j^d(Z410cWEXiQ}!)(fI3e zn8>vA)U8Ov-mhkriaD;GC7wUc{Ui5}wvob8sT$uJ7rqXl2aZoKvKvHu!kZi1c$0M% z_8!T^9cZ@g5+i($+Va|D$Hbwmy{~Wr_s_VvcFss#hTEUsM9j;}6}fzot~b{YZM~ou zmEcy4uZu%b`En!Iw+4V(jWJ7_E!x#$D!72)6$Qo>i;q^yxYPqD)W{N|V+CmSjHmoPmba(eC)_Da(nwNpqq~=2 z`0Z|Ob3!5R8ghYeH6c@cZuh^Fn?NXslwDHBdn%7VzT@KHt1Fe(>rV) zueXzT0_YC??#jv5Xlt^|IlL@+$;6gRwa3IkxkH8H53Vn^qA&qLL8H~HqSZ5OnK;xo zfPJyOVdChb*+iPR(|5b*s|`zS^9NoICCfWO^ABUNCpTE`6-2G1g3rw)Ars*O!c_pIl@sLP&SDWsE=n1yTm$n+xp?k>0}Y45lMr z0@W5iFvmg~gL}K67CzMz88wCbCU*J(FrzAePzE*wmkCh_XKk`XJ+BUk;xswWZWM6E z^JT-69>BQW zllhc=8G`IYM!+fGdOi|%$LQ`>K@w);#m1U`2cYg~*>HPHw5!##W})pAq|^HMw|bFe z;kWBmyXjvJ?!D)pd+j9w=~Om5TDHl?3Hr!nLrX^yzr{Jaz(xe^v%dE%sMCeF;iCTl z{o}!AAEzSh6EqjkyPi#?WP4B;Eiss~JkdNe;;UlQGFT|=%B(D#`2!F|KP_S{Q6=EY zu!Ry`vBa+V(eoxp24feTD&#+*E)Ooota84OiXCIET&bu#dHXP1DjK~qvaTdPmV zxES(XNNbIK)=#8E4UDCd>~jHEF%=&UUH<3gMfzpg-jL2?xV8G(^RUXr^MC-AVz_PX zeu1Xaw+Mngi($}7n8L9<7;29RFt2A>%KmKi~c&mOlV)X>(jI>}|-w&*#e_RJ6?{BkjGov>VgV?*#ql zwsy|+AP~r8Tl%|~fC~?{LT$64zY}{-h3-g>)PF#O4`93QP5g2O#) zb23bSpn+{bndP72xiprT&JPeg@uuP4$lCoO#*lL#z7t~52O14;i}vS?BaqL8yve~O z;cHKg;l9~?P`V0x7vy6=aYQhjH*MyCWi6Z5^G}1P!q+abH%@mto44@`(XJLpT9oyz z7?gEr#*iaZC)=54IEJ0@zw;%avs9$P%#PCLh?QNm~NFvtwNsW1Dxy z4__vQSQ=@~&x3=SOiFUCn}GKoe!|MQd+fr%n&9U1W5U3epqH>OM%&|(Q&HY7|D&h> zU^2c&A_86)J8@cHfu8HY@~Y}~~$>7#tb+g$>r%Ke)xS)@0Pxy%$RzBGV1#JbhS>p&cAFSVWr+90m0oUWE_KFL zvBc3ui{#JdZgEgCqOb)cLck^V_15QVBvhgZb?ZcjPt1=UD>9OeHltLgs~-f({}qS*&o+SeLDap6 z{VYT^34^u|v0nu|S6gIz82|q_hV-*@$OjKV98lw6U8Adw>FoEq>(YbOqbyB) zSEor+HjxCWv2Mt6+bo(EMhJJu4Q`>J!fDYBYoZ&AZFy5Tw3&14Wb4t)Gs>R(1JSe~ zCBY7f^oN*|os@4qn|rb(-jijby$5^uupL4u5?xU(&t7}OgbJGZX*}Vs752YSas)@) zl`Xuu0nD{qwsblXlfrvLTKizHiKZHCENI3TLf&3RoL$}uh^m89?Lm>QY9jY)>^)&m zsi^V^t%3fV!?g(Jz%Pg)g-d5@JQNF3(vH=3^}UdEOwP8+i8eui9&OdCl~s!-3%f+) z@B#8;2ziT`*;>#0kgC74vIa%$Q6UP!Tx^Q15+Gx6COO957Ia-wf{4i7?}tGp5oY~) zqLmTDM%y0(?7~Ldk0A&a)JYGJ%$wU+!FEPJ2==GZGuLK51foKjAED`2f?gq4dl`+l z{|QDgPS#mV(Nx&KfXVRvAs~>}1Z }4>SViVc{D3aDTXnDJ>Y7g|^`@qeeY;}GU zk5cQ`ueHt{pk1HlO>L!+hE%GZ`>-cEv~0f36CLzRIrs7GA0cE~Jqe?`OFMeXx}oDO z(#cbDcU(_vL7>CD}Rav91p0a-UQfn?6U6{d^*;WC{-#=7dO6>D)4i(q+<6+jiJJ6p4 z%^0@tHB7J5#B+Tg&Boypk;cWLj9|yHnj7ap%FoZODYMIZ0MuQJ$J#rheN;ubIbz=k z>#M=NKAAg|*|46T`cUyy+aQ{!+NQld_h(2dz&=A!l!^ph19lAwieT403Wq9p4&6D* z{v86G8ZEG~y*w+=vMs%|SI||^OxXS-9KI}pXR`*qVW4k1i=`f-z3PJhzVr56;~)uO%p9tHXKq@e4T z8DS-vD@+F@WY|qlL3)d|&ZEU$#bH(lSi$={Y=4nGE0%hxX!khg$H{`UjyqVeQ4VTsT}! z76(s%qdoX6JbsCZ;#6C>p|)^YZDFsTExz7+VioLefX@87W9Jh(>Qlr`xh{6K3p}-Hj1WB3e`=+ErMYe>Q`@- zT^5NY&>`V%ad>>lGW(05hF^w_5lN}FV`f=qf@cccOqSTof`|@ooNk*$qbOdyvhCdy z>B?TyDkC}3(+y-sv0F5Xg>I$KOLB;wnRv{&O^_0qh*`2|IwGM!!Ip}I!bOn0vu)-C zv_mpHwA{PeTJ?ogAInpEk^NQByJlY*+mnlg(Jna!72XNc1`lbo-JgPtdQhQxE9)(! z<`#2w5z>d0#Yl8`CKve0}(WZ*@pb|k?+l|2lR42A`B=MTI z&%xvYL`~e+u$l~LFTHD8-(<_qjA6F;vg}J?hOJEBStnIn%Pbhw*S(Fc5=l7+3g=nJ zY^Q5C&bRfV^;sCnR_FaWfD=|$xKm-UV`vrlwK^gZMd9M|tp1SRsLYzRc8#EFmFs1$ z-C0*L!lvmAfOvenhauZ+&r8Aps~fVhZs^dup~d#PI6+5_wnL)5dHCx|vr}_jrn^CT zqP@=JV@hpS9Ok8_*~fz3WjkisCqgKsMGVWB3D!T)3;Jr&82fv^)6qrCZPoy2+P-fU zug$*%sDZAlV##;@Ku?#3726}Cz4Cs1lRpmO;VEnCH?Ok(FCcZb`^jJLO@lmjv~-m2 znw>Wo=xa-&G6mm+E5-60EI+5&&NwEvHZ}z>BBxXGxYu^lOI}+21iTL~v;4);J=@qbLL|hzI@=2V2Jr648<=e$#9{5|iMB5ekI%2L{|Yj1J~V2~q1hYiS1ce( z(&42mt!1GXg0l)aX;~CPG|wB9WlIFThqx#fvDQVgDl;Q5SZ_i9KI>>UVv$hJKCPTg zza$uTa5;Acei8N$8|MMo@!K6k;HW{eqdsjYfKJWg2d!ANH}2Tti8eJ9)sCsOQqf-O zj=8)o3Bw%oL(RIH&>zAYLAs(>XcrHUl~h2lwHgu2&Gf3o1`AVZ{A+EfX8 zCr8)LjIJAQw~mA3)#6%%sl?a`Cvr^HByzr>zZzJOu}F$onOI&(KD#L?#03iTpj+=w z9jq=6kB^yWcTOZdt-Pg4Bik(6pHI7N<{R@TfxUX}(Xa%0^c7EBJ7%pj;?sj6YVcTgJ$+p2}mO)LXqnQ&{*#lAD3!AF8>jWf})FybeeO`RG=%-Nfw#o zgemxFRiXVP+7-&rPGZG!*EG*7qip8ef1K`Vqbn*ceFn6i0;VBzziDz_Y>gPuVzISN01p=3bQ^-Jf3CGZZ=<}G}R1af~}1~%rGa|J0YZ> z73=o@K<)qjD5*axH>l6IvuD9VL(GTrZI%FK>nWuc*xz1_!7hBg%LKhs*o-4S^&BuA z5g5}}?;dWy&y6Qo%_idu^E^KtGlTt_H$q71>2%wa`ADgmmBLKhD?s1K;<~{Cr=#VI zt(j<_3VgC8*t1$`m%IkgM^{_C39-;!B@Vf9$8FW3!!2IDh0DeKMoj8r#yH8ITjEEzIWpZaGkKu{w3@?2zu%02-ZC;FFGyqj5JPREHbRs zVi+1StFT=c&ExBvXc;0;ykEnkYpd*-I8;et)td9#1!d0oU?uScIoz;_PAyR$VU^NQws8VH~%aani4eAu2>08pYwZ}X%|!g zyr!BZ8FsrMGFCOi?UPUz_sBFiB^eqewn~^X2Ql^98q#{`^`iYf?3bLBm2H1m z?Fo}P&A6t@>AJ$%mM7W^WZ5*%R>m=t@(SB3?6)-|S((Whc>a}D?a6CkqcXcHoIP8n zpoY*4evoSg=-)l_EpaWR_fk9EW*KBf>s;6)%Ra4kVSKCZ3vpbTN3;3OOJ5JBbqBZ3 zw}GO)lWL2LZJl5^4c305q5%GJgC~zxPqX(#hfgz=q48-PV=a(omu^Ik#$?=RLj}CI z`r?TU!zeMR>@Jx>-9;=?m-~SLU&OaV`LGSa$pTE5br4WaJz78T&) z9I1YMDzpW{2Wf(x{WdJGmQUqG>m0{@b2&--i!kjTNaW4}D-=@km1T0CT`7o8I+rth z2cU7e#NHQ4vOG6QYxr)Mi{OwLnPdy1{P8i%Y_90AAKZv#(R-fwz|h&9&weV})A4!+v+t&JE|hH;zq6-`;h77C0i7Ta`jy!AS2 zv{U~9_R`w1jrx!v?IU!;t2QwN1zAY`8&8)jz}_K`Cgc6G#b`!Swk-vWLxrhFJHBa+D0YM!0{5ge~p7pVH$wR>Q=MaQnSAFRTI z_844|-XN~GXU_=yH-KpBjx9UZSwuiXk}E=KqFp0qlB@0fk7L=I++`OEk+OEoWNRea zJ3L|gQhPEE|4an>)Az>8$>K`By(sKjb`7pprtE`3zkgC}t3=W-fnUUFGU@wN9Lm=A zFtf`G_XDZ+(3&F45bf2brxn&@N=z3f_CdiN7eQM}wlLbqLl-dDuLAri?S>tX2qTxSDB z(xSn-NARKm{ZfGOYw{tdb2(;w^DwlxV}Kk@oc9&j2cEoE@V2xP$CdBN)>_v%R{GUr~eR3%(*W+Qub$YT*WL!cCm5sB*eRt^OVWrc=bCb z?bvK>e~cBv0+Dd$_PjV=2$3dOl=c1zMmTRTou)0Th2yM`m?&~|&2p<1?Qi|Q1%Zih zC5w=&{|DD=;>3u+1;XU08?x0Fil&$O8JJ?3KRe(>^G-Iq#af9Z z1HVI+cG<50`h*+JvK#9tNV3~+w@#sipGU0p?EK%5Fc|U8vu%}KE#zZ2AB(-a1*z%q zk`2}`Oh~>w-ff{!b4=y!`rl)iebpRmBaAY^)vr8(@R2QG$+5~>oYXwlf!SHfgRIlZ zFv9-P5e9Cf8C)~PMeUyLY^z9r`8p5A#szfZrNUmL$SVFXhtu7ywp$!;OBaq>>IG?w z{BWQC2k5KMHrm2~j$KS7T%FGICrOI#oS`RM%hLef(7}uk>n!Yg^yp=SPmf94>CyZy z9Q0;7v(%@~0HOZ2iT0LASEd~={iSCDQ6{=*u{|%^2UC8{TJGg75QE0m4P}c`f3{2< zS0XQ&pRt|7lyhwQBSHD-PH98yQ zpTN+OTb>n&M~1+2!4O*+Lb|F6X6xsigH(#I9fFH~XjeRs@U*Gc|9sC%+IAe7oppi3 zo#S^F;adDfL8`8qPdr_F;6fMj*^$S`Ru=&&_bBJtmtE|1gbT!9T;j}95xYj4S$a~blyWZ*yWc( zdV`XJsnhwF!Jq<*Z4Du{am~{%zQ$7~vM18vI%pq9xRt`_BaZj2A(G)vVM;wbtIA%x9-6lx zEqVJUdr!byki<>x9fDymGK%{`j~5UpPh883(;c$pU+D} zh(%xm^UXp*FCKf|B>PR6mPNRY_10~%oK0FbwCitoQEqZy>nPf@5ygG9-FOGs`?q%J zJg((i8*wP8zG8|sYzXZSBLV03vjlzAvKZh{tIb`WHoB_Nz7x$;%zSr7y4`oT3yyQc z|I~Y2KtPBzTM+_At!_sm) zxv|rE#B&@V8Vx?!_~8tDpot3(ZZ5G4o4R24SXPm53Q$M1YGrf{hUatj%=e1v-NM&3 z+3s!TxsOj_883&h zcFefi(It4L@T4g?t+Iz(0%%cm-BcST+WWj0r#+{%0=j0q<}!UR6L#_A{Akn*`tysf z9gfw$UC6_Fypgxt}!NXkHAw*$Q8I_Qkvk`fbrp9wlQsMtPr0=H(?}Kg+{= z@c}q!%M>anJ1bB36JHj~3&aLA+Y%oHqkTF*(JMuVUAU-3kIEi`(GOVc>43C||?av~;(!Tlz4iNT7w|#!LjSS&ei-*;3 zA;eo-q^Q)G&HadIR|(hL{9xM8;&@s41zfnkx?K$GEZ!CiqjFM2;jC+fc&_E%jx2`0?H(x(uMY14`BF3C&q3tqq(tXn1qWi>qpvAL4W&okqvKGVxSYQjV=Ap z&wOoDQqv_(90qI;*Y#C*Om#QBMx(h+E)UUDNn9f_jbWR zS!|~ZAQ*)0?8~AH3cuCMz_*AwUiBH5Z z%j!ioN!UBky-ziHGR8>DWFT9i7%0g?V;v#J9bjIQZ1V-YDje6@p8rSHd4OkKU4OjR zZQXj*wr=adDgp{P>R$l?Q9uf~>Zn;HAt6aKK&T^w5vBsNH-r@+Bq0f5s{yO6MR0<( z_6;E*0&3N^YHR!de1GTizO9esIp1^Fx#PXRJI*~P0S9oz`-{g24JSf02s#I9_Cj&V zjmc<|?GPY?{f*NPPMcsCJ|54nLx;B3AwX`d7xQ7hN08opeU`l}lFAR|72uyllVPSV z%8z@(1Cp@Gb>frIUUQanW9(dE5~Z^7`B@^YhkRXZ7AEfxr(3sQLwcpO<2w1NI84Um z(^&xqiy6-(LbHnUtU}Po0Nv{kwU&!Vv7c<4Y1Q?bmNs@U z8RQ+48X4tFNe!_pYh$a5?W8n^TA6U$&@32QTmB>>?GC#>J*45}ps^m65+I{0@=38) zdf0u$@lv`oDQ7wZahn!p*>4pXhO{eazhINa^Hxn|Lv1y}#6P&T!oEmkR=`3d&3a_S zBV%Zx(8`2q+XK@#99TEaUJ(q_4Z79Kw%K3BA-7~|G&9rb17)M^Rna6wEwKirs`~~z z0rXZfa5F6zW z2~z1iJ>f6N;7Q!|g>a}LhE{Uz$3>npcW5Sl)S>B<=oXuY9F{7Erh+ZI(H|6j)*7b;eVlB zE9{LmfVO*7@Yt7-jpGV!9B=q7Osc$Z=4g9FG;;F#xvjN-#sMA3({}-46|h%*Q?!d% zfM%FuuaFka+(>SI)|2in-dk*c5+JFrx_|tf2jE$7ymbhmFf%>LdOc4F!!{37@wt{K z#CX}Y#;QfCOE#fX7wFcBldQHFKBY$c)YBw~!EPG|jbw##np-45 zKt9$bKNPL$e3i8u4;eN(5cSJ%TQv?CquJqi_O;;`*n_xbwmZApfyp z@NC=8P29<@7LOQ7Jb2HvH9}tJ~*BSH`(H3LQe ztpVy}iu0uIGd&^qkQ2EGxnx!%e?8KQ?C%(1a zjSvMU6-6t)W4{=_Ht2>G*eqeHj#7ob1uFyO3toyQ#d}K# zr6pp^X4|?7Xr$2&)tUrIMg!MjF>q%H<2J3v9$xBUG}*oxFW5QDfd1Qi7}fiHs}=*v zqVW5{A31Na`6^mjOo9%Cc zG(WfU+u}M<7&9E7=dj#t5{s;kPMdoeE!*AoK(gX@Xg_Fx zc2$>w`I&iLQio=8eQg(tjm&#XI61pWfNJw|lM?q(QsvkX^R)d@b>7zFJhM&9i{{$1;zZrRq%g*23X-`tZTEA>awuTR-dVGAqxIY78Gz9eMupd^#i1zqXMW&i z&x{G6^24n*UC_tn&N>v#R@=r90Uu`#d`JHhAi4d9$-3Js@%XXI(Ym@W5XUC6Is@p=f5%dObCStQy! zN0(TUUB@5(4jvMxMYbaD67qZ_zuAp8R2-L-1&}?Lh!9y1t+G0CC@r^XVUGR$bpWMB zY8s+Lr5YE+8kcCl_MMo--aU>_*L`nz>>Z1WEK{`CFe8b>hf{wK_QozKEW&^a4yMEk zjZnr-XrDL1R2c_RgG74GaDSJVjSmU?w|La7sCm=m6ubV9AS6q|qrhXLJ)@y%+b7yl zVXxznvJ%VK84~8{IOnT?7#6$`KmIMkC=*SMOGJ7qoqXI7AWo}!jbhD$YL=n&@7i6S zst^9wL?Trxx4VO}TLMxaV+$umG+Co6{=>H+LpN~LiG76=-+@D6yu{Pv+tUMVE}pBx zE)NhHvzSok2$Jiend_`B5yYvbZGG2+WG3Rg_n;Ax8DgZ>2q4$+6n#JYz8`|hIbgZ) z15a~s<2E~K542ahClyNL=X$rG`sRI1o`LjK2BwMD3xH~ zKLbUFQDTs|)75q$kgSg$h{2^!%{F}{={b9ee$UcSJUOl=$z7-aY!#zwH{W{?QkPKx$ z4Dqa|M}1Qe?|Z`M3o`Z&a|h*AX#)m@}dA@^6l2MnP)$DNDKGo zceO783$gli#}|Z3=!*L2Wg=d_#c=`gJT=VD`7&hJNS{~RrGhFum-f}#s@K;Z-dq-K zjduvrfX$oM+sA><=PqQ{-<^MC@hDp$+8Z)2t;nhq@E&@3i=Zk~LkGVa!uaV_qFcXR zknw))9d=Y?_{FYg*Zl%lx~KmGp39z=PF6#Ni9X<7o0kZ>uLuM1mwZDIrF^kqR;m47 z0HKFssW#HIYYlX)a<-lR9dY~@*P)Fq z7D5JI-#_xtkag8;-ADiYUqG6YlUQ~esF;+9;q5XcH`6vIVEbFzS*sw~;^l>(V)6GN z|1Cs|gG&2Y917~%(>@jH?ZBP&aQi$2?ZW2kzX_sHhC?#S_NXZTpwqFqJZnNh@;ER; z49nEh4SOy?*)He){=6T0D2pSx8NEW#3zCCcTPBQ*+b3qCK)x;@cQ1O;@$tA!Jh}Ev z0%OD}&E6NLM>6#Uu-6Ik$mso#v!{d!MEknhR-Y6PjJ&&kB21^Wlr6BnCqt{tGwsw< zAZa2$(bG2JadD^{bA^0`9}N&oeli<%!l{H(Mr1<#Vgd4bFvmU->6OgOW&7IZG%&@r zzBtm}5A-$HTxZ*U1mC+amv7+5g5>bdTKh5)lEZHy<#a-PfZzyYjgTknL$dTUfL>bf zB(5-iC+r>8ljF)lt2q+}`Mx>Q4xbf|o6mr^8_ov%ptxUGwK!TlCmw_MN{j8E!qky3 zZi)TiTnCYRoOcAMm6Xy?Ixn6oQc8PG7!jibo-}*@e20vof#~*c6C~x~>Y294;z5NR z8oe**!pr zLY`}^uF)n5QlQ4}TG76Qj=a#&5!*xBtR1D??(P-}3Y1dDjjVcefckcbrC(-&-FX$U zsP67L*8gXa^hTCc#c~B)P%U*>D846-OZMJGOS&55P0NKjenx~n^tz6A@iidimMsJ> zM(+&}EgoHmj1I6cgEzV9_O+l7Wc(CzJU;tc7*sPGJ0tT&Bj25O+IEp%WIw$_IsH11 zcA$2hrL>8MMW50yggsB*o6xP#UGI^x%Vx7hyR2-ez7ay!BJJ?e$^5xP1~=bf%MOlS1BZ_sT<|JuQX{#bGdd{vo>GW}fqZgi;8q)p9g`o*=0K%UKl7v!})I)HsU3 z#94^I$j{?-Xo`IsGJr9-X`OYsi}=)uON8x?;yvn=6t+@se_|Vxv_E zQgyV~3UdpG=VA0l9O_zxA(?0Hh9*b00IRGf04`!!ISIIIxaq(gcNZ`v49>9LBE1!Q zWahKnoF#_OglO_`8<%gt?+S}T4lk&--rb>MvL}Lq^iP?>qBwW8q4=I+HpMqVXUmQ8A2{$PEi^k%n!ssdc2vH z^o+;M7;c{kc}(6?aQoTeUKj|T%oJQInl|97YETl>Vty|$O-py+PsjS+=TQ$WnPmL} zePk3HJJA|jvN^rciUhrg4BjPj@fYG=*QTxAe?NgBvg=Ha- zZ3OT4`Vr`@dQTU-^Oqpii}z&9R@*iGf#kS-fjuz5Q#!+iWh&`6k@gOia65UHhfg*~nSxBkGZgXRTOn&(e#p1-Mi z{zyCh5rVwb?PD`-l%SW2`7#{2J_;t6*3Ap8TC|7VmxR^v%YOx?nn}1kE3kVW1BJ$A zIrBR~AKdv#dC@1R^W*Ut-B`0$1&9@Z>|Uw%2f;9;BFik(pMY`ffPNXO=dzJ=#gm=~ zCS+yacSeBB4bxb9?G2ETHh{-q*Ztaah%TYl#+Gf?Ec&UK^m?pvTI9H~*q(pN^VImP z;%r>dhlL#7A!+|4IR1@_Z*jjA&tu7KIG?GLlHt%)vV7e=&^yYLY`SQAC5wX)YX||& zV;9>8qUljAcMVU03|~Q+_~0VjHi)PESO5-5g{E;(Nm{MP9rhnFG^nDBDmyU^c+9+q zUuNj7ge=;es~{hYojTY9Lf*=Kcyf_x&xwI_ylt$qVgavIv^Gn#3kSvHXUUkqT`LAs z#p=h{E22p|MAvbz3((=*OX>kV|EwS#mJgUh{Q22S3H-q#zHdu~z1Gs1`f%f)Ua~)n zNB){jzY|RZHO;c28S$)GWG}KhVf`9r*|@=wYKn%9dhPxPK|*%Sw?B&Xst-b|P{^f8 zPitCkBeK9=SswW6wcr`qaJ+E_pd?sm7l{$IGDbMcZJ?m%z=d79y&r<*a0V^3e+g1t z2KRZn_Sg`RXaCk1>FV1Qaj4qi$uC;J9B6Xpq)>Xuj|kF|k?o=^0WXGE6=_x=OulTU z(IlA`QgS+=GuMv;WbpRp9mNlFiOY{BYK&RYGsAKAKpspABTmP5S6hW3g|XioWfOh_ zpt`DxLww?UjRj&8Nzhj46J2d%2f1%$iQyXNq1y`m6IDFsRC z7y=fcl-^xzj7ajqE;F^6BjnRW_tebj@aQwK)J*7P4=#dMChXVkTLF*jm&p0{<}g?; zX7q4xy>)yVP7tF8)9&fQ)LrfyP-yvgBAC-s9JT(I;Gk?PWp+82KV~DdLSYEfJYkgm zLp0f-B-OgG#-4Zvs3yd482Tm-WHZ-pf6f!}Ce*HY9@=GLtyy4?CU8q-jxs9TxO5UD@Tnl6d^+)#{GYI4+dYycS{qkur{P2FTN`i05To*S=jFt1OGh zVX0CXU2Z=;GMC78!*~q=U{<(k!Y`XN>UNW|Q>gg&FpD zaXkJo&S0;c0Hj4bckXQe6z%=hL+>a0PV}gdJ_>R%RW745_NkbN z%TK+|a$f?txbN26D?%!dGck^pPoD<#^3rk!=MJ}X#R%V6S;3jYt4Qp*;UC2=QxLhL z;yB7G1-u_pdA-$UI*{tc8kgH9(FDmkC|~#&XLvG>Ni*#iGvmqnvoG%=Nbzi1)m(GrDnq4~=%&(_g1}Zv` zF6PZQTCtg9D*k)w6ami-%-+uZ@CMh6-;RM1Br9%1uY1l5S+B}e-u>%P#V zXg69_t>g2@#i77%O?7m3v5;4^Y3op%6(IJ#Wh3rqi{hcOclf9v{iOGB(?ruGnLMbl z(Tn5p3ez%dfgtf(Uzl!d6G7~B?2w>W@L|1y=(5C9>S~PN|C55sRfaFC1Zbt~WV@mQ zlA6XUSHxD8#41be!DE~aBV&z=qyjrn9HQTOt!)xXHaixfPw~ePlb>$16dYq$F7@D@ ztC^@DT?X(L*s(CvmI{(|i1gQ*mjg5~t1IpL6;3yAT4%o$t&vx?rygDOdP^09Mr9jD zFMKZKmF&sMlbyQ~jP%DYp4fki<3ICg<{J||C>SHIr`ubK-fxXC#UB{A)HWo-24T;_ z9;)=P&Zq|zt%7C{{*P_m7BNZOJa(BCuZCt>@YV#*Ti5fi#!AKVvij#D^VBt9TCc-R zc5fA=53ANKD-SN6Z%-#UXlCPt#LC3+mvZGHl?Ox|p}gem>t^2s)8Ews1rLmYLy zCFn`90J_FL5ccAdQgGXuZGQ_14!*SDK;0%gyP61m^06@WKiibc#iy$<%reAA3wR+t zIVIRHr~$L0g0~~F6{U7GL^v|z#lz!XvU6*Q;8MI>^IpC65q2qLQVu1S$Hbuu7!9-O zA(RCdHfave+iYLf#^WEJP>jxybz29=CCK9)@*JV-c7^2&GumWa>8(i&**y&~=qr3691%$vx58`XKFUsLgh8XA5R_%t z2~dT?rqwng1Qco8P$uBDJuOm@2bI(Vn14&+NNy0#h27 z{|lKbiv@|A!eQbz(NvDl;!1l}z$Na%*^>|rL8=wes{gpnFg#lnoU&|L0(VVH9&TR? zlW(kYs@ax!T(-$`tW21&=J7T5_*SP6&!1^O+y+hgoU*-Sr@jn8PL^^t+@C)oNLcim z{n}SNQ6>+G=ZYo(n>1;5NC2UKgO(zi3a^3?nyD!zTff4+VuUfo$;ICj;hY+=D*1a8 z;a;ikTAQD^-QzO}$wl(d1j8V%8pWJ%J;kAL{cP=30jec0KY!Q(?K!39Qhc1WBWH zX?Y@O`-C+6=^qL50wOi^MRw`VI0yfk3`hG=9DYq1+4iTmAj7nbhOn+1P*?g+Tq4|` zWd9QB`SHb?zH5^`{WctLH(i*f+Z-{FG%JbM%c}*Lc=xQ~Iu@-*`sj>zV0lwwMr*5` zFO2jlycx-a8wUl!w0u@FZl*l-t*a?+bN;)5svoaq)=NdSX#S=+1@ zaEZAsk=cn|;&|%~;`Zc>_Z;#beB_=Lc9WnN6)9^<8yEBpX5QKYfA#2`7UW0tbV zlk@B7n)@^=OCIzPb(ktT>=Se)qO*n7iRftH+DUtff`|{N*(8xnL6M3QI>U6`1e+qJ z_XFzI+4h?cJ^q`;HZ737Gp*B~;L=#=giJ@(dz>JRUXX#FQLTVCOZ3Sow^ng{bU${b zo%0dMOW;~L-F__Wm6cndZ0jh725xngEey2w9+&(XK84Dk1iy*N0n!dxC{z9?0sElu zd-lhkYjmU}D=K%35w;N8XG&eL%0`Oo!gSM zZO+fi#IEtX37&i)MvIhh61+Y<`5Z2vsTc1fEnP57o=*QD;9Y>Hts@-Pek=~f$p80# z(Lu(W+gU~n(Az%?4cBa&Dh6>sT{qGeiguBBT~V*^sT~aAMK~1thiLL*^S+K7)3*D8 z-kgzRm7QAPp7>vC7v9+qfo)=Wd)h@8({|1dTUGX<2(lbYshXbDk)(Nd07WS6u z-rFwxdptIZM@iORn9%GuX7Tu_Ob9V#-as!}v{ttf|A6T~xjx(y=G*vhVK6c5tl~#r z6aC0Hi{%yOeWV=`MAS^)Tb=kFG+l<*2vjZwh{rdX&xhS0=(1y*3gzDr%s^<`nq=!k zQe0N&*fsz3JP(W?YiIt;>DJM6?B+l>t;@F)zlTp*sri}d)=$7Q=WJn=Jts`r(Tj(2 zD-Cf`d@Ho6|MrBOY>cyg0p#n7_nH3?LS;2^bN-)?-p*pN{MW&1K-yIAibH)fFyhhW z2c3^KMTgF9?HxgvqA-nWTyY$j22bIDW4v`1B4KvZW*aHm+c_CK4>>k51h$k`+T=ht zPZ(#H{*Z`Nglptfv=nahm?lli@ty^oflZ5S zf?$|~cqS8VWY;I6^d-t(L8PO**`jcT-una?48f$|*efEb#o_9?_UjX&qgF<>WQ_gg zBp^MKku)gDrk)INxwv&pvVFp&%jYcOlvAL+Q6pDLn9~=BoLCpjym`H#HxF9fDAykm z!+S>7)(bPUY~-o1kf~Q+n=6ubPRh<@(mVAu5P4DG!wtO15OhJL4`;U|@UFRhe$V(3 z;Ru$Cb?}ve&O(dkl|XmmdSUA6@jQ@lh24CHr-?PJvi+iw$^f?x&Dph{&Qtx!o*|$7P_~-~{Z$2QfDR z^||<`AjN0u9{*xb$Sj1_5qmJe=CKQ#$JSf2pbs_l()jF@i$T&M%zk!>(>i;-OthCD zTQS{6grLG=rolZMTLRO85uQTCH7JP2ib zy{5ZH&&eTKLi>ZTON1gFhMH`5A_h+n*)a&aDEF~p&avg!!tloIbhDk%CY~V{yXx!$Vb2gV zFS^p`ElwB{9HzW0h&1^O+Vif5CM!-=P)@x@$Rpm2&+Xcu$6*00$(@44#p{39us6_ z%)-*d^Kpm`T3N5QA!3nD3JaC#qG`P=uCy5B=q zAVeI<;pFMJIDPF6*V^1bH*Lgq`P=p!5fpC2%r*#CJA@US%Cnp=$nYcQ{D1W0p-4 zNyMRCXW!ie+G{(gsWIPLgpajVtt@Mt)YCaFB@etC-%?7)uS3JCZ+m6;zkfaAG=PA)HiOf~n??O}*ETzTtNr*ax zXEZ;*FP_Ulu2qr+eMZu@CpzbPL%cAYZ?k_2d98MB#MQzU%ez0GMFAK0WeJ$gM`4_x zE>r4eMM}rBVs+~@|4tF#rxsVDxvMt}COIrkY|%aqpD*``gaoJ^*~WdgKD+6dlRhX5wi2HPD1VilupPoPK8ytnlA1TCfYmM+@MDa=O! z|5ITru#Us2j;EpV53h?0X9@I#4k zwqP=Rf%75pTmYOaTGoTWFf~?;iZ-$(RxYN;k%^H-g5Dlobk=sqL-FA5ENA`;rlY%4 zrBKAp#Gd7&9NFx9Q;e{;NZ9C;s?HS;$8*(|xTB!P2JSXbe*~Ilj8#=d21r-h z&&8n-Ub4-y@d0ociC1m`(rFQP@}q&qwdQ?+ZYg`gN<>pXBw|r(Kll}p>={)pr4=ou z8|@Wwh`?%j`JP3#TToMLgMBKJ><2XQ>iaQI$MlERn>=<*5l3PzM!{^608#s*JbuFC z9xy`o)4K|Iqu|$ zk)>c=A^uVvM8LZJ3$|9kvr1~J$+h#IisM~#u|;yLFj;TsXymYHDtRBf>Lf@XwmPI| z8}NRDBU33{KHQv49GXb>Uih;8QZP(O>s)yUr+1!XuYq=BipQp@`f)m4Bs*%{>8aj5{>1OIwFe7i#kA%`+5z86iY@@~Qu+IEmf@%1O z8Vp3jEbjG+MSH&NgK!lq?43OTT{f)KXTwmJ$*(u>{u-=O9Fp%?lxFu0f#&zvT(YgX zcv^FDmHZ%%6O#zIPqR6qX{gA4;>r+!{SrGbhX8emUr*g1;NG&xUDA_+$2LhAB4Myz zk{fcZt+&fWyOcwc2Gd%Xi$NC@WHzninR!4o4yM5R=Mkn(SUyG%ST+iJXR;%c4VzcR z@!aS%mecKjrmwZL@}a3tQw2_WZxuPOC2ttBE z{9>L7L7(6!ZL(-@ag+>r++u4tDH-AG1Fz8FZr*PK_2g#gbB z|41cvsxTrH^76S`pj(S4+M}Y86mgQdk!=?AA23Q|#}$FdksB@i`UrS=+1ppKnH?Yo zNil4oaX2Z!*vg6QsV#S?r{&mis~s5z;3s)!d+YzS^SQ)cVo3sIA#d~i%AXG~x>TvN zuZ9zf%!4@VIB$gK(CuD3;kS_f3zba;JPcu#;Td=cr@b_pRomsyI)i%UL@)en#PG(% z_X#7~UKYoz9;=*VEdd^wHtUnpT08DJLTL$>$+90WgeX_GxxqFDI*%L8m}qZi4%%|< zug}NxWMN=e76WNOREaa~0Ri$jI=&j?VbEcyVc&?ZJt_{;=;;Ex87qlLh^?3#Td~c4 zEspY%DXcF7VBvYQ^(ghEe0w!W?iUO*SPfmL7x8^Ul-8=Tt)o_0$1=~0pIE)!EufFW zY%|9^*XK2Q&FdPI2f>H!y=| zsRcGt5lESn!nxBK;{idPx0VU9&Zl`%_7jDX>yi1RY@uipWhWJ6SgnAM?bIR}EBi`} z(1tQFiX~<{I02sjHoBx(pNXC%atVgZYI{=*mn#R8s4S^J5r+gQd!=OAH883PknB=qzL{nU9 zZc7( zltAaOPyJrBXM;-|du*!1=xtbu^?S)vC8eg?uSFwyewv(6Va@V&F-RJ{z!+~|1{gi# zsI;r65lZiHw_9t!34jAXUO)->V0##gZ~gcHu&$c!nbB}7m~U^I0Yu!$Cx~du;T(1| z24;KB1co-{&LRWf<>GkVlVvI=X_hC>Xj)=p1Ks*UxlIy{NV{e*y=)NhiVtS}zd6Kf zDVu0#%qA!(Iq-wjvp)V-+g$Mx>XRyzH${7Lc~if5jzjGY*DSN%g0vMkucP=iY_Ovt zoXKoJ)^NLfuID884%vauIxH@o@;>zqw>TCnqN^jz=GY6^IK3T@IZ?Fp2z39JS*LzK0=lYsJde zg{9VzNSn&C7Vq{harsQUGSE$ni>$wBdJ={FG)oq6@nTqv{$9|_i&d#iG;YmGmV>HV;iBHUyl4&ia6STYjy*N~jZpzBsu^`hfSsG`cvcXJQD+VRA8OgL; zmjP%vUNE9%T4+haRP505Q8raH5ewOT*vHEOh=^L2c6HNM0DXYPs>a5u)#TI8 zr*A3aHK1tkls)UX#iXR+eRJXy#SHht9H0;gUGyy7# zy=MKtWu+|_2T8lIr@C=9bU3pNTX?=fJ;f!1cO)**}CR zb`X7+%PnoX-Ler5ReL(kibQ(UUuD|!LP0`|t1S4M}GG z`2ByylFj{1cE_f8PLa#6ZI&huh3=ke*@?g%TiNIp+7fYmgk&Zs*$zSK7OPug@5O0s za&Ct2W&4tz55srjdB4gU&~Q6_3k)g|xldjxrS|87wC$^4TWEX9X=fYxFXF;WQ zi+GfX<3>Auo74L@PPVH8&5?uUh^8byldCyeTxb6hgM6dYqa3^FWlxvR%JyQ>1Ys#x ztN*(Nz4LZfpkLh_qJA=Fo%MailkqslMh7~L1<5;sZr{GG-SjGP7zvTFt49R9vAW>& z>2Ha!*>-soVYG7Zd#p~RYW>2C`?t=qR|KgcFTdypiT_@`cTz<^(E1p-VPm^9+5JbfVh{U5y! z3mIC<7TT48=2fS45$)qHw!TacLe6{x4pJR^S#LiVgK|*zD754L;9%z(UInfc@BxsN zstxTMVt56T3OR{t7N*pE_M8{J3GE|jK<=QzVX~1kQ9N(YET-g{f+W{Osau1i!jv4X zyUzY2z<)2DI@>uyq{Q%tepfdOdchdfO|$;Oq~v)Ds!Sm+`ePe*ZnGvq7vuqSO3<&t zOGbzyryu3@_L-mtG8*R=R*xjtu>6v)A5o$mVAfKZMJ7Ii!2B1Z#g5r0w#!i8*68w3oiC4yP$M3sYKi zagAHYYc5`I`C@uw4q$jZ8(>Ces;&n&B(lho9e@aH9zS;55nEeg=e`{;XK)hJXxjw7 zKZ*XFChScY**2->Ir+QdG49_QZ701083q&CR&BKl#i0@zXiT*g?H$Nt+2VIW{%KH5v~wrHC5f*7MM{1i)_7U;vZgDCwJ;!3DTsIAx_TX*;Spg-(#c{rRDQ_X1O5pM!%~$I{0e{ z5vbphndNAV5`kg4cY<{gd8{QkJ-<(wHY^;@dimi*;E*DI%!MH^JD2VC76IzTykD(7 zmi^)Y4CKTXrRMn_f)vuVoAn7KFLxe4#=XZbI_U8w6h1N933@AY)F8{goyDOM@K*mPPGuxpO zJ0vbuXq{W#I*;?mLqIpoD;E&ixp`7au{XqKEO9l!>${JIynk6gaMEz~r!c%C_$kqY z#GAwkQBX)uvuw3GAE_BN-Xb)Jxf>{Y!%iBC@-_uMN?A%K&k6hR!a%CcPekX1J>I&6$v2Co{&7d3X{n;2c9}>QI<{6z7cCWm z@>t^-`yvr1nX{^Y2vX+*GaC3`JFCuLc&b<<&;FK2H`R+QQ%t18>T{YsBjCk!;To>} zm!1X1@D_JS(<-|~9K?V4;fL)`(Jp>Lk-HNf8-hQre$n1eBu>i9R^_IAMFJY^iMCcJ zl2Qws*5ufi0qm*cqkY@gUP4;t)AlRTR6#~X<~KCYpKULSLl)0IYp;qV3)Y&I_Vd31 zG-zd>-JTF4h{fe5OBL{b9i;DLrm)v>U`CRCF6cu;H^jVCc=zvcsAKDz8TO-pKzkii zIMI<_?E_*VLf7v0s7T7;ptPN}{RZIq_To@CRgkdA_riJsFGP0rvBmU(IOH<0r|nN9 z%;6UMnr{iANPapz4~^DXTgCEU*gcszq_MNVgW+Wr;2XM3(6f(K;6-zVHG~Ks%)pMr za@!{8ZGb{(re*&#j)^hLPWu;_Di@#$wpuiT>sraS3V1<>msi;7-vguByfQi$;BSeY zEhY())w*Q@UJ`GGJK1t!>e+3e)rq8!7)|NP*6ZIOAHrNjb0>-4CwoIo`q%{K2hJdYaaT z1v-oS?J=S$q@a+?H9HP~uvp<`VRpHYH^Xo?2<-*ER1}VMMR2<~Uh1xe%oRDp$cRF* z4G&?*y!$Rl#AR7J%j*9NPv#7IQvx#~r`d0QNM8O4j;?&x@Cp7}JR}GnfL4vSGmnRZ z^pWL`}0B{d-8CS-_|9y%R#1j;9;#x)Xuq zJxFWbTLsAb;7c_&>?CNfcF)}GK~^q&Y#?Hre1pCCUz}y5boCk&hxDygi*3%y(8o@j zCdI0>ALXyTE527*CrljFR@zCYz(ZcDfE^9&YC2>z$q3}Brvm*a(kD|Eqo#`yL|Hal zm#0U?@lGtr;-h?bEIr`Ij=R$biQH-=v z6n3LH{Mfe7VBJ(8;KQ1a%x!H^^oHSWQNUs0z>9!kJVaUQr_C>RrTEn6K=lm!Akar= zj(Nq^Fw!IqDX=}FsX%k_0vU0?|6+$UcC3oCIYIy4b_=gQaxV24(dR@P z)~%wY_`Z-vty#SqueU_;0=N`N!}7uT;xM?_SdF*;69{98mby<+ZPB>IGD5tC*&9jw|_iop>X0Ja_i@Put_d-e9q+*_I~glJ%lm z*;bTb;1rn^??f?Zp?kYnt4KuIF$b@mp9=*2E-6NDhg5klo!N>|&B?L0|V z5x0QP3lPxyLWO-F=;m?r>?iF#QnV$xSiq~;qsV?4f(kM+^K6p<>6HwcgppB$Uvk5js46pRPSTeGI-t2j@%wfXmdP_X?VY$wnDNOR{zD*_!7KlMPSt!Pz z(-E5XVoIt<7obFtphHt??CwrZ?<=mfE~06r0ZGB7h$biDqu5jh2t&G zj+L2Z%WWQi&uX69d@bPd2kS~3UHm&dI5&rd?N)&>3iY6*_bN0$|HhQ(+UmP;deX(5~6Nsg@+&s(lwMAM=n_DnZV6SLgqO=h-(QkTMQ)sM5KYXR*E7Zokiy;o?^ha2^j(Hv+6Eel6&| z65Y4(q+1-XPh`e_PDsExgAAT87WC%g0d&7RSIPpv(W0rA#^ZVZw>TNPUiQCMue;)6zSB9DdfnJ{F~fc&1{q*| z-wqG(c-OYEjDa2x&7VeQ>P8zahPPSQB#weQJqV`CyyC?Tht&!Cu=6JsESqn82p%%z z>#DXu02y|^WXC@YNs+PYYWs<3A0}OK957vwBsoo6hS>H;JglXpk-J6vlb}n$?y{q; ze-uoPd#i0vAm3}WpZp50GG8&z9v1D*#TCD{_dj_Ij?a;gJ!W?bd0$A;oz0x>6wBpF zT)MA#o|OuEYxawNDSe-a$In8)%VrBB`?FR*I(tZ0|COE*jXwTB#BKie8ayNJ!>SJ@^(A{LBDVV-+k$R)zD zE;IPMAvSZg7WN+`5@Q?B&VS1DEn*mU747+AgWMhzL|#lap&i5dRi*6+v3G1OvMZ7Z z_6}zYpIVesSYZ9cJN8SflN%J{LmmZa}aK}IIy2W`=PR^2(~cRJq^CMgPbW1wZSR3=1{^k#6L*u=q{K%T7@K{@f!1k<1ys%LtuD+ep)@v(geMZsQxUs z>vO=A-n6*T@LAjQm=b=Nk*X&qe?+a0o z)}icS4h#Co;K52Q&9L55e*@3OP08n>V4>Yu0Egl;QM3EKXfnV`xU4jFD+GFJ$($hD zJYi%(nT7Au`b6k}Oh)``!eqn)JliK)V>UA1K{c!F_99r`3I(}YHgzb_^XB3;*M2$- z>?WC*>QJ&rsV_2g!m?$n%~B|NS+LsLyz$Kl1F4GtJd3%ISokSO8aMl$J~W@i?A z0!5JCcW+_;dFN%@WhEfmclTs=%iROmF=spPsRX=%nVq}#Mn?_&puQk3=4Wz=7Z&>=(sXiJ}hF()>@+=b<8fxvdyAhtS%2a zqQSF<*(oClK*H^-aD8%30Q%b5^`i*!9_J)ylobh+1>ThTrmPhr-L9=~&#{w71N_&) zP~-3oS7)W-5kcy8wom|3_BU48$zz~h%C7uQ?g=nfT@kApZ~X*`d8BrY4JwEB#_k{a z*-9A;!)x(isy!=2RN0{ZK(uF?!z^^uIH1plK@|~SP+p~w=A}|MI(K9YrWad zoak{$+FV|1mk4_MGXEA@#{|}~k&RDamIm$YRbd(wdnhPmO#-Oy_=)IsjIP~aPl=|5 zl5iTBZMVJPNjUk=w21e@C!<0dS3F=}_UFLh@^D3Tie*zg z6B;O&Z}(k-2>fYdv3($#5!<7S?Gx#e+=~NRAzy)DK8^>-!|mT<1<{vqop-}jFb&5# z7frX90BX36FWF$ga(V5K~ zx8+!g5S8ItJI%HP(6pG@`fUOK2^@Z5mi;RvLw}f6*vjbyhPgB-4otWD88E&6kr$G? zXM(9jOKFKcAeu_NvBJiRbfGXKDo^EDCKu0tnz`K2=U7chn~Gt+mqXgVY4)l}#Ld<> ze&FQC6$~Us}n-_$gco~(gi(xZYy}n{=OK9)8_%ZZ;mHqloSlLtLB2d zRk`z&J@XoIs7322tVzyu`tVCHSxhup?4D+$=R5n8ZF6j9pl{<*ah+(Q>{@A8Er8UZ znT(nIn*@-xse(N^%D*7VTFVDzv^9sA9I36h))0np{rW#(zgXx|fuGh?Te%?l_sX`V zBFR6}NV;@U$Z`EhjK*Ry)gjv1Yq(+k?Gm6j2AeH*PxHm_&ScR@%c@5j#q@?K;88R` z_`(V}VR%au4`&lM^n}A0~F0>UZJTjqq z_?i7eP@}L6-JX^4pspMlT`WiqwB)u&1gJ3P;G4@<=%(xoaeOZ5f}bq;Ld>d!MT__E zVjDyvImI-Mvfl~$ucZT5)7Py6l37b>O-tG4meLJ&vpAkv8uog4wPVZ0@-(r^nRZCf zGraW{>%H0oSsa$|EnX5}OUcxhk`=a5kV<7R*}g2AS`67fHQ&xz1Mp75Y6)*(gb9p1 zQ$891=Tf;gIe>@cm8N4A(R>fW%Keys@HyfJ0jn$Qp|wC4@Wa{m(`paI)Y5uZMz;uh z!8&cV$Al>um&^5*U*mvjd9D=*cuYJFRa-$V*t6|?n{5zcFybydGB7Yl8q=?e>4NZ4 zMW5Fp-tN#sv)>B(c;nrDL6*HIhL2soCZkz=d?b#fna)z@lsX5w>0CoE3V>7WwN_hC zh>P+d<^qLiDM1@=ff0zi?>tFlfdV*DpV3uHgtP%5(V*13V z{T4s70$U^=`NS$W#8wsCmH>GIbZAk%UC>A<5*%2#+446)ho3E8tWd-gBMeVzM{vh< z_C{E$CjQIqHi0mnI@yCl-X^-7dTWe5D-K2DxkN@LHY6e>$w_merEK!t(vt9LeClS8 z%?)bflH+ZUgZ67Bv zW<9g*mTg`ZmufFr9|4d2klX||hCr>5?XLm|i2|z}KKv^YH!mNf(Ux?}5)*t7DEKkpT)iaSTeST^0 z3R0U~M$;Ef@yHx*;DXIg-tLi`CzLmrudyzI)F7#8c~d~g;o4+&yb)1t={<<@Hl9RK{pq8soS4`!x_8Sd?(Edcj+dJFLBYw*KiiTp$~M3;oC zeg~J9px_SN0sd8xR_fHLQ(G&19YBq^ps=q4-8{a^{w|u~au1KA)9kD_K;AID)3^pp zvr~Q#$MY;q%C+tZ$llzx2=Ymb)m22TTahp})8P*UQDAK8YTG5+f3(Sw_tU?L;kn{I z*ap5ChYvC4R|_I|eiEOx&joyZMD~_$iAdkDGO021a?r|KGCmviA;G8fC)n%ndE!<)g?uTR7S=YFT|vM19qL!X z74En_9#bQu!(M1QCtp^Tb!_sU7z~MF(Q~2G{^U`0fO67DP7hs!@n*T0J~H(&%CzqjsWNlW zs+so}BG3ujx7t>b6jYR7WN(T_hRFHCID1Es8ltRnpnUzoRcr0c7#uJCerY3g5GL{d zjTJUsv=_={Gz0#l5ZJn**0P%kBbFW#R1ZB*WK2}#tG6igpD=Z=TQl}nNLcd zAVnNpu+(OX_OIS;9jrNow3NPJUxg4}3a1aZ?}dDBO4H{Ke^z!v3q0>GtnKMF@1^39 z8{cZ4hTJYhlRVnR9u=u(sGJ+C+-mzmJOt`|t0UyWcBOq9(sb!(Mf*tu8>_fh+zPFp z)=Ak+fn&!DYa(TRJPB42~U;@DX6wJbp`838(?*@@`bLH;nGYqt{*~;!#_w*Ws3Swph?- zqCPx9&SbRg6N~JIrVYmigaFy$hu&^F4DH!*P3BwoJH;crw??vdKm{rhCAt@>qW8;> z5QUK<+ZqKL#6LO;!)u9B81BYwg*cSbv?||f0-efN=dRD3-%?g$Pl)#4s*WxFblpA( zF=BOF?Qf#h5e4{0ee4T>*K^2@RhAQAWaq!yCJ1`9vA_`R2hR8_EJno6I%^O~M!7>c zj{Q);M`8OL?ZYoYWSg8s-{sh)UjY&PQ0WpY3-l-T6Ks)aau_UM(x-gwX=rj;KTh8H8&^4HvSFZKSIt_umk*ePu@BX7jB}FM#?lSj1Gq& z*_N&UhevB#mTyg>iH1u{9VedojYH%qWE!|pkkZI~y*(bnbVbgBDfbwxi4ONC3wqDR zYRBOG!&Zw!WezT%V(UU=^37m_b=kKB1;IyQt?FT68aQu|wTP5JwbdNF+DYFzBw;`H za<2(`=Wwbpf**n{{wFLtVF<<}a&4^;v*58Q5WioG?1F!JCLDv;a6z6FU~}0t&5?P6 zVX?tGeojmIrq~W~k-nvLnf+Nb&5$8Qfa|{pxa>PO@=B|lFy&)7 zhBa0o4lywciIR(eh8?vpu&h(!ky(Lq4`40gs557BW&C9#IC^wC!!A9QU>B-4I#_nK zFwu8z;n~;Q0_xu4S_ti-Z&<#GtUAd zKug(FOBPKK^PrSas|CpigC8imVGAu_+9xf0$7DMcqJs~Q=XCpQqI*YTa=L)s&%^|+TA2&GO{(Pc`*l=NLPCIqyDEMQ9NSgE%B+TRq#} z7w{rK#ai>4i-7#x`r`lT$MKNtTr|>u;;Fb7D!0_10+8(R$Z~sDbeOGa?P$2x+5~Ya zgx4{{ZKZ$@y&+-Ya!@R_aC{@jHaX=InCkh+QkcCTAk8r#qsTg5N~qUFx{th7Efz-EEc3m!)1P1z5O9Xh*hrD0{iU{fi8&DR(WW6bv%a= zJi9$XkQ#i_u+AP4?H$xJjr#2co)RzsD*Biksx?;|X|9E^7cUBsg3M$r;Pb;`qJ- zA4=n~^`)l_rPeAwVhq90-DinZdp2qrWRL%xQ2I$0rtL`qDv*(xo~w4ILGm52 ze7yA)^q<3neNhDUh$FCxJ)?t%$OAY2m4v)kBb`^~;9C3n%^v;W@@h-D1)A1+eGXPZ z%LP2w2QuukjvmCr;C%a604&c7?f6cFP_2Wrv1d|gm4b9{WQ=z;RbOj&cZQ`lV@*6r zG(|KuV8-v=5ODN`)po+I1bB1ADyww7uvZ+CwUm_G%-f)u2;N&_Kf4{$zXIegBSfR@ z;XB|VD;|Dm`N2Y-RRP*3TuVJK4y9suu)uEn1pt9**i;&J=AA&FJ_^!!*7B-w_{yy3 zb6354jJ+)$q8yzt!A9H#O}|DiEcGQk{%#mv9Q{0s1La?If#X^72w}2yPvD(PI2-%Y%2qOj{@%4x%E{c+d^-gt;Y6MtyJY9#6WrgmuKz!fq7n zy<*vx!5@m5L@RYo8bgehUPVIIFRajr(gtNQ~Q?ady)q9>h383Fr|amtXF#`IWSA@JNuT z*rG??am7L2<7>9qxMXTBZs!Y8YDRvVUbfZ>A|PJVqi4DY1EwgQ8rruXC7f`iq7jnmzl>vz!pDHL_@IF{Fp$~Vwp}C*_C(}9;+9?@8Z57vi=}%4U|grz&jrX> zw&{C|)+lD)%(4a{byjtqzLph_dmsXJ!z`W<$&RhxU_b5c5woyqLwi3aNLhNMXQ=|- zAp8hqGA>UXAKH%_tE~~BQu?9b2IzPQV|FRElOYT-_4N46zFr2`B5}J$fOPD% zWxe#P0M$Eu$L~M zZ)=|hctyMMuG`)argQstvZ`kwy<#2vYhbSsBg~Nc2x~;{Nk+@Vw6UCN4~pZJ zPviMtzLkmL4V5Fc*;9WBMpUexq}yo%l)7V*T_BPQVXT+U+V_72qN*7?N7~DxLn}Pi z+tv$G0z+yc|5w`^$?(Teuj%zX;S|8lZQMQ+2>pv98XwngDRB5g> zDohG5$j;{-cfC+Zv@+6Pt55TARF=7XZ4jaao=jnHNWdd=ZlJ5VGc({I`-wSYZL?@^ zj(CR5Pyb5{Zw`c?V_o}$y>dHha_zGKv0W!qNY7>xN=Z$dN8k=fz>~7>Pq8J!G~TYc zRu|$iDHYizF^@MWwqsUKe2AT%4fX|OXKXIU`Ql$FmS>e+%%ysF;jn!QBa{zyiMYs? zsIHLvgX(0Wct$E_{6lbLDsQ=6l0$6Tkli$sQjL%|OL_)Ij(FB1j@Pt1{wh1?0=;EY zn8VYpZ5|AliJ#_X`>n7_$KP{9rTslXUbM4@v2O%vWL>6q&UbnM*O0ZMy_6hwKDQP) zWPglunUxFDo;#OdBJ}J59u}>NhTDaLl>Z=>*YKff7J~WlC6-p&n}RMc#(z0qcrVFd zi+9J(cIOMkP`_7{*t4QFnt9@xVb2M9V{i_HDHJOX;bi`9olOuztaQ$ZZK(h?;NEu( zGgKnL@H+FPAcbV%h3=9{i}?x`b+G}_(!4D0sgKUuW@AOG zeQOpaYB~BZ0wP0Bj0JO=fMj5*;vVhSf}v~Xa}m2#n6i-mj$5r#2$7ocBfVKPxiD>S zvHu8YPJGOI6+?Qr^hLZeg5Drn0J5B^7stQ1@r(gQZX4{Fm_BhvGnt-HojUOUm6q4!k&5KOe+zh-=6AY`$c+tVn`p=Y5OP{b=M+mQ$jfT zHg3q*9ZHd4SWxTf9tU8!BB;0arSQFXJ7;kcTBNIxyTzoKE}d+bNE#+n2X`L}kmrfH zL-io~q+k%CYD1!Gn%!RJu`zxevzUNaW!I`*n=DV*YnrH=YyS|`;6(Izd27@IRqv0s zGGW>{+7u1NRP#77NE_@#EES|x2681*ghDe28L=pwVVMGCTZGE|A<yQ38IiETl4mlsITW)vuY8L%|%dv8F?kE^^j6B&?Z_|cRf6MG%@xAqA&e@(4 zj#>cisu|X2G!Oxr=1+}mj7+g~aS$(;kLxF*{RbrFU6#2r2l?j^qd&XXPGdZ~lmUf} zRdG8hNP{xp^LD8BSdf>UBPFC9;g*iB*Xx+m#(8{xUG46+?+76q=&}C&?LI+|t}|zj z`yUdAmX+y98!12&@E&avUs_a8#X$06>>sQRfn>$ni>=k~L(=z09-Lwy1i(jQxt%rM zlQk|MU^4@)W7CNCpUe{lwkd=h8M)o2O(2DTZ)M6&CU;*Gi?VZQwR0vy`)`B-YNeva zZ*T;9(D13Abi)wwY(a^eTWt4=_Lk}vEt@8Ln66wd4s`R?$6cn$3$ z5-FQTZnlJIm6d^m@aLxk!sJ?mb_h@8(6byg!&5$zVLL^tNwNy;^FSx67utn0oiD@S z_lx$H>{VEhtv-HTEJUG8r=h&{y8wf6gY##3DE*4>dHcB_Wi*dlWcLR;kC)G{hiKda z>Z-3v&}G$OtfpIj3ir>3OLb6jon-F{cnkFAEcsh=JmX{7$h&MVv=4;4I@q3oit}~d zbL~9ByaMSN=)1 zg#96#bu<62NA8FTDTZLfg@K)ym=PYQ=H6=eL-$gfxL&N29xO$&x zuV3R(+YwMVHrlv}T}YU>(M;>G2*9xD+10W{YG%bUX=H+^NDLwtXYUwhO(EjLy{!FW z_+BA)WLW>P9B~l1i}W1x?FB(~;e>IvNVI?Suz$-cjP>4QOW=9^^K=hUBB}v__B?98YiWPI9pBuLb>Tal%;E)q<{rH`}}7(v-ToTRJXW z&!jAc>BT=nZ;Ti8{?B+ZB~LH*=ZWPKB`_VkqSQ`V2FJ74mk7gQ!=wTuF3pZz()^2OU2cC4WSiUU)+)H_kaf&BU!%gno>Cjo5=@$VhH+tW}Q_B zTEp-6EzxQt?b|m6B?fkD_ym{P{Aw89eeqakzHJglEL5vy!yR!tfL?;-aog1 z6?$(Wb?=5Hkufl=g4NkSSHtx3`rx^YE8Zv8cpl6+tZVEQL62~9+T3IHt88I10&YWW zt)O}iS1NxLO>2;r@8hC@*0}~o7zw)6)O(QO_Kdh88irmoY>60Z`-wWPvt@!_CPS0D z*p?)xe{uA>|v0wj)6`97juGM-j2XATX4JGZ6VJkc~~Olmx~K*+y(iPgheZFbrb@x6;;*kk(k z+E(}|CMzciQkXUyo#`BeMZo^W4_}pvu}bMq}(5F}(HD{3P)caeTtO ztDW^)@2N4^x7ua}fELpN`ek!Ndks>=2)P50x*r^LIe|0D6j7?3W4pVB0er9gk~-sTL=day2TqM}7WJl*Q=u%t_+B z7+1kp?QG4lCwGAl2)bC&q6fEw;!wjLJ#1-+I}pb|MLG7dAVU>vlmqO-*8$!K?ns~s z_a|+|baDH#+gK&&nai9LH-PUZBRtAuse<1T!Kb7r@z=rUP(JF0iQ6)ha(FpPIQ+Vo zZREjdV%ZjHlz09;d>Tr29wv+SG;&&L(}i7 zfb)ZMzDt8fI=QXthf@?lTd+2(;;3|~Acgaj!{urSBT*KHW%mboaQO$NoZTH0WR7Fq z%4SyS?Vlkv=6n~}>F;~aI~UR2ar>oUSmo$gwl11?QbHtM|Fh`H5cHa1Ws*hDJ~5Do z7aK7<`hkN34Z6(!(81ffTplSvEs!)(J<1LXdN!Cfq)m?{!{sk}$w!3KA!yRj#=C{Q z3%bTRq~KKbV>pz0^QWjY#|0Q3vbFl6XfG$uBz;WK%VCGkIe|vwYcpTa54JIOU_3Dcg{-T zXtvP)Buqo-DdFrx(5gYzN*&pJBuGO8sQX3`O zTk`m@4K^+W>AdtG$)LP~G<&?+Q|ZQ{Su`ETb4DI~op}UA6^9J6=S7ko`+-@U`WyzAC-xU={k@3QXFd0i@x|uv^SrQHvStRLv8!+GK(K0G|UH^S$j+tra)OM zw7&|F%}wp@u#1j+#FSjEdAU&+rhQWjbjf`pgi?E6EvMTlCkXXwaxf#~HukDmWY$=j zVgD2DrJ`SgPR2YQ?yW}fK&l@{Pal0ak*&9^ZLrL5%dJmYysXFZ4BOFRkdC}e#jHWSN z>_7ciN7U)gyKJ@)QTZN>!M?`71B?&m+CR@qzxGh{yc4zKtU-`oY{m}8h`%|lOQ{;s zUS7wXVyhSQhKynF28%!TPKbSE-XiPqcY?ikn47rODumS&%P?++w*kC(iG#q1i1+n^ ze}E+7T3uP#U_o!o0R?=d&~Z$1IOfprx>6hjJybs3&iJR(cpS2`MSI3w2imzp`cPvm zR3zgLD~iKxSct_L4a(2sgeYE*{;_rb!+&{9jALMPPk@>pT;tye--d@qS@NHoPVvTsb zY_7HWug7I>F0fJo!kWq#>z(>6K~LI6uU8j`;9Xmyx47#Aq@5o|fwxJJR=ck4_4ecc zIh~0)^czIeI6NjnI%7+#ALuHZ%mo z?vIL3l};=hX{n;gV*g~u$YT3SFf26O?tqR{dY=ZD9;GAjk_Jp7doFj#u%p}GlMy6GU=+SZ8WP3x9QG_ls z-=g=0kRYRf+-98tO*`JLHOK!15C=6v-FBUFrib;<-~nB`0N4dBu|)zdH@ge_jUd^l z73W|Y@CzZAZRY|WrXBiLE5_1EhTV7;n7Oq$zaZuJvzY$dis3(_J383yLOy&WA3J51 zCk`?`nZYBpxdIYCz1X^)?KE1!cKWxS#zMV~5KRJBhqbm#Kx+a`%WF6T)m7BTV!Dj_ z@L`eKQ|z719{Kch9Asv&>OCWXoD>d-CkRluJlxF+bPk(}E4~At;g+3m*NG&`AU@o$ z2Rf5w%_--?_a+;NAM0BJj85W5*yn;S4=c+Wd*D2elnq0Tv(Sh$@1A~Fb0SixKdS5^c)eIXwzZUI9 z#o7IpCZTME?hzzo9FgZ^soFCpAHGxU308{{>^=oQae9NqkR5g*AuUZ#D}Qu|DMC6+r~G~*=kP-B2Ro^nw5qicDlM=KJWVkdE4kNiy8JMag>$3N^`R+NJ(6J z?pT0Q^Aut7(vj%G5H)287pE9yXbpgh-cd=*6H();BfN_EvMjQ*FcE53) zj%e@ufyW_9UzW@p14LE7o^SYBKO~eNf4CK#!WGhaVrgWqKp~GV8;f z9^<>eN-Q@|uGP1;M{;njAsgLaJbg)P4jtO!ry>bu6=s+qzpLWNKL9N z_Ls$w>>H*<6Z0yo7l$rxN7-+SCMUj0_&pNvLi1U%j{S*4&YrjK9%bLV9Oy%-2Nybp z`Sx>hJU$xzRw9TXjY|vljyf6GxGCGty@Ejc_dpe&GCXE5*-R138>Mm6j&U|y7}4od zwAipi-T6uw%oIKPSWIO2$mlkRSMNH?vbo~>2PhB+Vh2&m0j;~&{$q$ zvjk{~rjhgH)im#CK-F&CG0rJ9myPin5y*)r4JM&cG*Mgp!BfjVX;~&MMG&1as zvPYr)@eUX+?}18keE!ZPu41V+R2XRnvj*87=tRY0`@CFE07jVe>@Bfea+Wq) z!2Bc0NvzwTsin`)TeBTdz%;#9m_ETk#Q?kMK7gdFsnN^GE&(3hyrp^KNPF~t52XV- zKi1P`1$cPFs^;zGRv%zuS%Vy%{8y0P(zhY%;H(E=1mR-1!A-YS;!v6%sItxZV}Q}j zzsN>(^iVwoL+kEyL7Ju+xlZc@P4i;E7)9hWh3JbRL+t!OcIj*nh*bHDStse~R<;NS{Gd3qgNJwXBo;}^QH9|*!j`mA&sL4B3_OeL6kU@P~&b8{~ zU+e1-<*qh%eK(?n3RTycJqV=2S}1aEv)y`$;o$|GTKmpJaQtJevyh@#e`fAG0yD9(JhHpBNNU~P#H4|akC|zYi1uG8dg&}H&V3jL zVni<&M%rzHh#}jYbL=@GFO{oKE%wKVLtZhy*>8$wY~ri5*8U{mzlSUt{;qli?A7bY z)OSpfw(Egb%%hNG(Vvy&=c2uU{=A!GUhL2v4g#QC&2?BGA>zuJ^#`K;m#nK-{A^{| z-90?~vu)dKZ=mz>4taA=_!6ytSDig5Kqjf2=?)a_1+sY5ZO2S;T+qJ!NEaqy9tT?2 zK1QfkQnK5JA_CLR17dkiG1@i4)(cZsk)DiQ&BHPbG60^r5%T!0~hYlKy&i4&~5XfI1A5GX${6o;fSJ+`b*1~XHsif268 znn3RqIMw@>I7rXtMW@jZ3u?)yo5yHN#eQl}h@;-AnQg}eeJsX{d5}|aIt=;QGaijwxrJ=dej8w- zZjl@(lt1gavUI3q^IUB+#qi#Gcde}!@-k4twl4*djRWs;`=3DQp4HWf)phpBbFHyc zInv4h1<>1@4>`YOVb3hiz1NEW6D&QrtKf|k;rv=HK-_EQ7Rwm?G^eW&oWP&hq zG?mP^O@U4?9hZqls6<@D}J)X!Sz=JL`>|+>}Dz)O;kK z=E0JQO(k2JN`^L-Y_#KnNm&C5avN6_#c-{2QcUkLe1q`bu}usHeVx>r>Da=ls^Z>G z^L5w2AtxsNp`12kh!Z-VN9~0c7sH>G$j&|I{?W6NG0BvODq9+NWfPI+&`E9t~ZQekwim@a-i zBNG$-E5tyWCt~)cNH0u3GdA;=6tu?fm&?M$(#23aFXKJKECETsc_=2S6Prib+v3oe z_=aK-{5M3-O36*N%LfodIgj?SP9j|fEM;(F*iQ@weg#uK@eh^GJ5)O3 zQ0X%38HeM7Wu@qAc}NWZ6=Fvu$4)B(Q;Dt}t*c0HtjJ#GCYvn|*(a)($Y4cvhX8sUoTG!6{rwz9m3I^k;HBYap~2uxpw;vD-p%JR8n(I7vDt z25Gr{m}V;n0bEw@X$x$#u-8`Zu=XW`3$VLv9}0W+YHGe~&Ej}t7evpu;`YCg$MJ@e zlS6Cl2QLzX?!_nAS?pNe$G#aIBXi# z&@^hRl?##^TGf{Qk_YJwt7e=1_+_B?>-(rdqFTPuvc+R?^P|Luw3exCD1a7acITJ=;`JV8xhyw%lpP(6Gn&k;$}`H;_F_M z-*~E^#%UD~dFlkb?Yryl<=d~e;{Ng(^nGCpjdly8IXt!uaa7%Sn$?O9^(&3e=ZD%G z;;CyVak2FG0McSb7!wpgq#a{zOo>NIkr}ut0xknBJlbw71ye#1PY)j#?O`0-47CDb za*wn*$_2ddm}0p;|0smAI>4=-fPW_Lyu-d0@Bt1bxuH?S#o;Rr&a{$VCAu~z~l!xT0HqXa1}D<(H$i-f$&T?+C^ygnJb-OX+7 z)=^%H7VDNO;FV+qRa;D$Iu!S}KZy)i9h>CfnD=Lq*BkqrLXcTI&RL^dGk7S*;o>c0 zz}|@MZf<8+jCIIXU?OMiPYRMUfx(Xs<62`r%&@vk(A!gq9oXQYetmRR0k80X2f50f}fXWOL{2}iExapiV}XbO~VOD4^G1YMdt z+F4H_&nTY3mhr|(t(bfM_O?}+*#6q26t47GH_>_U-(HeIw+8^JEHVOIp_&r69!}nYYbWiS{R=ksd;A)IP4Q z`ZtBqLTvu??Vll07RO4#pc(EN0!I(H5#xcpcbBV1VtyFBvqA|zgO)-63^(iXg zNMfrPw6YuMYbu|N?gg~HG!MSF&A~N%DjJ08=mU#ww@Ad$xj)Cu_snmNxpP}yfyIOg z=94AAzEc8Z0*+(TdYPctGHTbkc0?SXIr;>nxVJ8VMPVOowFx3=vc@X(1vUz}^eF%H z81t5eFer=6`*ZyGxF9`#bPhJjLJ(RB@;`j~BF_hNh&a_<44~3(`U@vs7{ildbbIkV ze;&cH%I+=(g7oc*$BLE=IDgq504G%I>{EeIC9dwc!uV|k9R1W#4!CD2G$QV4*gJ0T zIO{0r?aDD@uDvPDFL6+ojB9sW2BNl2n@fxBK>?SBH80cEU6nA%R>qU=6-~C>HcEZ| zTU9`^<&uQ0V7~n*U>atKtR+1ztg*~?uyTY{q1r5mL)8wJ+E9_m(58L+HXZDskhkdz zoCuDr4)HeGJdr+63~1aW>sgq_TLBM=vtnE%4h^7j6`j=};Q7Q-i|w-z#*#I|Zd*wh zHPI#Fi=xRv9w2PGfER(vGY{8LsGBFAw{KUv9o@#NRnAFNOu!j~etIKU!=Vb9SxuYA z*y}4DvGg+;%n8p2JEdL%a5}V zFA*d3T#Vb+gTh|FIEo*`YFmSIv8iKIgsB#aUnA_j0CG9nxAWI}!sexGkCar}C4!1r z$@{pg*8#k)(Pp^bt{2CP>6o8o8G>rLh8mkFnv}VXo6_w40QN0?Z>R&8^&t#X^4!Q% z4diiPyaAeyzq`HtbiK0&a{#zQv?uA2fy)e+^K3QO-U79U#uPxAJpnvysG9C!2RY8$?qbEI=376`NXfWwo`; zo=swOE{iN%*uP*Md)Xg_TqG?N*7w;*MUgW^!yK(+PR zzXA9l&Bgg{pKB}B&&Xs9f(-XQ|5)|Hck+$8)NdXE?G4Dq>p(`>ii z3dNz-ct4h60|gK!^4T@pW(v|yk9V|fBE6kj&T$yE8*GR8G;1MU-|KZ~Z*xAoWH%tm zcmW5;JQf#&4EuGo%YFwL)`Y{kxRc?ic1a%l`R&~1Y_^BR)5m&sJ@%}kyPJF_7Y_yKlYmDr*oT z?NL5|TSfZ-(V(?|3ww*DV}~Zgdj1iHj~G8wxIZWlq;G@97@U9-Wag+SDII3s#Fwnlc z7f9JW9x5uvJW@Mh#5_3_{UOm|IJdZY)s@Uj@xAhWiu^2fvsm7&jZ^GLZ-cyk?QUj+ zB}gDo+xW5F`3{Ksqz&P2B~w8CUboHO5G~2@=g8h?UkJi~l=XbqgP2d&aA&qzkSwAt z`X?cb#!SgCwjujGD*NDSKHitT=a5h-TDJ|bd0E-vt<&tDWN3d}*1jxAzNB1WuZFP0 z+ZP=vU1LW=7`E!MjJ}_Uh<#%IQ0pQZv70BX`K)}aJuKLwzAHr$KE%T^4? zlb2tJrbvf3W;|7=;f8M%s`avv$r&!yT3wE))-zOxe%fc=H|h^5b!d1Lls#|yy+tk z$AM?eItIXl*7 z!#)@Ge#%E#|J;vTVM=kP^%e}pv5vt5!MUHnp}08S#c@y{5Y68GK}R%ACWjDX6oMc` zOUq$@@CRYS4wcNXo(ZSZvC~v4I{bEPBP-es+J)O^;(N*2j~8b!FCP;pG%$7+>_igF z-Mu~E)Eb^%fNaNwsX7m?XGL4I>ppFbz%#f~iyZ>1C)bu)wrHxwSmIk#Aml|oiL-3S z-ijSlnSiV+86l6yZMHXL)iipt{qQh}>DyRR4vf8du7wL z!_T4Xt#4H{e=2;y)ar73`b3KSoo(&Nu z*EDWiHi-@u^Ru5V;`j(|fAekjNr;TvjP?1lH8PLgYtdSKGRcYDkFB)n;%GcB=Cdb% zov(@Er58%k_j_M~DV<-LwG~aB*y+dZb^&i7o+M(+Vq}OJ%gDA#qCJ;ARcIpY4Jo-o z%d!sz{6ojObQ{YpeYG$7D_klPxgwov*9OS=XhNOocY=s2OQL_hsNeV0zOug4%O#U#_blp*%MXchpSitUX5KzdiPyXG)EMGP9FdHAsA)f=rW zKsK)2D%+SAxK6HRZ1Oimj($I!9ZrfC&x>rMnB*WWE{;Gx6Qs7h^NHD&{{VQ&H1~E6>aHdC%l*C8!@Y^2v{^}^nayw>8w_oW#0Rm-CpR#K;&0}G)K zZ{2R+5>3-Y_h}{eBSDg5vx>z(TjzlxZsXcCd*C|+hM_u@9ZL4Op3jbL#kNnhw`t>e z>wF%F(GbgG1@H?2iimtUlvt%;s6%xPzF4hB9Da^H2HO#lYVY|A?CWI21Dm3C#u?`m zO13-{OyPAGfbK{~?Z3s|>Q6(2VqH5<6!4}fOc@ZvLXsF>a#*+wy8sR)_sy`;f$Ugl z>qG_#YgVHJZLf<%!5wbD4ZE}71$b3=&9t6Ep;6}VpKSAlscocHG2DJ9NXk@QyY32M z`lgItWv5=~Q6r}qHFlez|EdNuxKK#6&SLqH!&tTqAg#1l#iMwhX|S9x6(WCb>ql9c zfY0`c4W*H$@De-kdoaB=C(7p?t0}ct#8D%UUTxb&dn2L$%g4dK55wz&KZINxki=M6 zEwb6d6ps=ao4NtQVF?blxh47(p`iOL|bjtM7N^ZDu7GVME8!=vVd*wEcC znjA1hTWh5PRI}fswpnCY;K_jCW{z9<5Q~d&ky?ATU3HB^x}|5zK-(s$aot$O0C=tq z$Oiy+471TO94yZNIa0#1l>cDq&3CCJaf!BdKT=clb4xxCvw)SeWFB)o#EML+5zL7L{|*ok&5 zL{7*D9WMq-?c(b^a%4YojQvDVa-(f-_Xz|&4xN{6+sm0PW3SY@D}+2bOtA8h!-*La zkJXK}Hw9_2d=}L)J1Ind+yJk!%dYpJ&e7wtM}>T1OG%9v*Qu*LR>LA=3C?}tQt(>{b!^j`!+pGTf|qTV_02Dm-~8MZ}s zcM>;l;Zo!QVb6qH44ZT#ko*%BYwcsv5>c8aBkYo!fXKp;rCMgJ-6w|PZWwL%i}qrQ zQc(3R4q-=UZL(z{A>%9_Db>}9K}&JaS!COiDOp?>+6^~*F~lz7{-m;Gt#0v&CuCV*X3f}-JF>-5AZzIKY`7#UmVSXx%2vRDw^VvR)PXP{lvba}!H-gQiTGsQ>7y+irfejxRf^c2;;2p~VUbBmpRk0(N)#u91?Qtwxy;}Aj`mvRhd>xFz6v90Cm zRG4}l-8$*$=DBt>z=0f2@uB-`NUIkCT*lGI>HtN7&XyU9=#Dt zHTW2Ft+q=DiH5LAS@QGNeA2M$Wk-@YI`@6%e!{&qaq7Z+^6dva@u9LQ_LOJ}Jw9>f z@zGOlm!Q`NXVW?MWr%vbvckG`B#ds%XBV33R|Nya@~pW!Vb?xR3{StWg74c4oxncS z_blX>RBFE#2g&hrV>?8vTOYPh0{h6SW!9y$v->=0D?}n56M+l~90(y-UT^Poc{ z2y;-t<>=v2%GhqWEVyudyMMIX=to7mqwrb!uKDz#J4=_?;EGF`r?@ zAqKBZs_mZwv~G0Gu$sl|Fnlk;@`81S#?`}v;;8Y;HdwQ0>VOdW+}Pdq5YSt`NCuK- z2s2&?W&?cQ!ywNp^3u#4Cl2D#tTDSrfP9bDuCu2^d%nFepqC}+`S5nIJVewR$eqcE zk1`DVJVHblKwmV=6(*lu>vcM3;{_=~PoU?9pv-~-S;1)YC_&zP3|KUiKNG`6kb{%s z0VbB|C4+q}NKQFenEZ!mq|<#0R-cmTFx_uYcK2+W*KBLvKG!k>q(%A;DzLeNG{A!~ zj%}(0ynUj#O6#mC#MXrzMzSyJ;RT_`!(wQU5CZa6D3#syhk}|OyW2B8A^mHxZym>$ zTlcNt=(1c~B3#$brimme_EDRM54RbDJ~!sY7&v^l&v*=$%My31ycdY$?a?tk-L4N2 zWKXYjxlYHPO(t=h=~ga|_r{*d%z_Qc80n2$WFp{@IG3JdGzTkcz2Dl3MqbPJ>v%GL z(78_SMSR+eedl_M3wW1dNR?&XWHIQJoqZl+^B5st3zP*<5F5GCD@nJ$PkPcF^aTtcZ zpR}(s*b?^Xvh!$d*OFtgOXtlo({oo7-p|Ju~N zqwHGIl&Wh^_R2+RKmfE^uf;kK(^hO38o;%PnqQk&~hlY+G^3^S8*5K5cUgu z#%%qWx<3=ce}H^caYc!w-_C-EAh<;?5E(j(g;qY#;9$5QX=tkN6$~zJv@gU&qGlgv z&pJCVMi}i8Ir$vNUn>%akrJJqmPGdNme?y|`WLn6MVl^!@JGilw}ql9t8q&abF}Rj zq>_1hG=F2Zhe>FDF{1*E78KiTWq^#|M8zcAEl3fp_h|Ni3aPP%a-(}qP6)y^TAZh9 zAZosAvOOZwYu+bP@_Aeg@7~0^)rtC2TPuz?F7_XAvMH<~yQIdh%WaLlXDto0)mDq+ zvG>g3#;GBRIZjz@7vvF+Sh)o}xA?Ar&j3yH7dI`Ktmisd?Yc*N&!msG{N02Rc)(!G z5g9c%zx>Fi->;LAG20in_k_bFhkiuVU7PF&`JRrpVp79!k-Z=enRb5Az7Xl%oj?R!hy(+<)-RXYLF@(a&dL^p|Y_yNwiDH^A=5cTdgjHrxpyhH$~GNiPZ~a<28LC(962BgB1voQ9d8#*`ojd zU$LqhU1(J)jLu0N%qruc06F@*2RkX~waF=FjsLSjVDcQm8sVg9{gjs4p8|U@%i_O^ zCWpvQ*;ae$MPTR?S_o%1lf|JtJhj)`o&cf?ZyW46c7NJ_D3VC=49tO?IRrqKpO?-! zvSor@CFnzu@hn%Sc5jHGuOOOP?+GGiV`a>Cz2u4WV;RMRSTYBg*f14Ogm&@EgoXhU zsUXn*!^HFav0ERLrHDqckU*bHew2FzX=s)m?5d9jl!>+tmdjru49WQrrrU1?yuVWm zi&>)_5`#wT*4dgxdfRerz?t!{J*am-%ND6IP+nnUM0*3WE|l?}cV6{~JIC0&B0Z3w zAoti-QQ}GacI|091C6~?`(r75YJlR!Vst*T%793z8%q1508(~ODH_OyAs4FU9+~Rq zzFvcqE{BP_8l6kd6IAV^rzUHKs8OH3)(`?n_E-+D+dNfdLt?hQFQ^|+P{XgXi-*DS z24GUkwW|ZpFThqCw&$)B_D07pncXBvL07$EMIwDEAGuYk$1d{*%|X!v^|1=2^*&!* zWs^lCD{B&7^8H!JOT=Gzu1y{8xJPkzq)_l?z;606-#!u6INDrjO`>TeM(Vc60M{`w z5Ko$F++w#K0i-0nim+735n>FncEz0^U(5+sD3%v~U>yfow~zGnwALUldd3Kbk%&)Z zwkGIIC!-{)X4+?hh;yQHlKo^cfCeRc6$43)N_u?5n-<}Zgb}Zt#+w+37 zWADB;I*>QqU|)$;Rckrq8Pl5JVY>A#K^OeGb{(#_cZ5|pW)MCxx)gtQEG!@AU88Ht zC1Ox!G-<81JH|Pru6>w}emveG7dTa1;dBu6)UkY=qS*stAWUNY6nj=QRY@zrD$zd$ zLL)`15%vYQPk`s*#nN$P`jRk3pBP(E=!*n8=C@Q!kQ;m+KZx* z>-dz46Z1-JRtP#&KFz*0nIOu+!4ijo0;Fjwn`$dW`&9Hu3h(tfxvUGpC(A1AwLpKi zWSCtz#j}q*8*H<5L6;;~2UYpPl4Mw|Ee`b2)idm@sh*7bcVyXrK+u!r6rhnha2nXV zrh7pe18u4p`nh+qe~3it%oL{mE2jgfT>>9hSf{upz@Vm-6ClTgM|jh}L(p@@NH{CD zzlq`VSa4~7&J2&TXA7pOOZN`9&SFq-_pbJmNXkMU*&wY6P+iDR@8X$+QqwnP71>Rq z5iyp-1K9Qfa1Wbl;{-xQ8SWk!r8u+z+f}E%R5o@gcEMbaO(6+x zwrfi)CXUD8grLfn2_ptujZ!Y9UK6BNh1yk}I?uxnl`c3`R%&Mm1~2V?P1!OB|41Q- zkcthLbkWhw-*OQ6syLy@Nx0;lEbQZ1qV zqeB9$zF>(?hECvIznmE(N!tg=4{ zQjtvx~(ebJoKtwoib;*|Dcxv=|Z*3b0IpNns&k z6~uQgwSEE=iMEvtq3;t66DNls`o7rsCGfOatJyoX^(%E(MQcPBC8;U7ICObVyeKcm za*bsON}2*rqQ?qQm;Q}aHeIw=TR&U7e<>JAdpvAIL@JAA({-e|Fu>31$2QO1WbX)) zBiaTrdtq5?E)<_@Wy0iw&gD9hDt}43x-PhG*M<3RKg=G%wMEfmH@)V@O?QZ z1Te;D+A0C69$B{H>;}cHiS|y2-Mny}B}7w_u6Xy(wB1#nQ9d_>2SgLJYXg(p8v9g` zKIqFie5R!?_rT~@bGtPPhObMSy<81a2Q_ZyXkP%?cGpl(v zUQFtdSCGmROPduS&hTXD@Z*p{!Hxx-ZQr??xCpy*Tgb>9{Ki!|wo-tKq`Xkf zDbg)9Ac`Z4=rCi1Wr^d(WuR)fM$lUdM-hCNej^5wXLB)nC>ggXR&1+l2|_d;U412z zTv(7`qR=j_bI58Jw>H*B&`ZTJAL_)GA&!<(5tk*z_sa2-otv*q*1@2qBFn_>1w1sb zfE~I$E(TTaz_;nRXm2L@a2!_;L|Bd(v8*&ph=@lRAi&?(v2 z1>ew`MTd4bT0;`Xd1d|KMnZ##+c9)sWuz09;mHd)Qlo#Cu~F51Y0^Q%cf|%4yp?@_~&}+A{>bGI!uRjT9sY`5Y=6DW5}Mg$RkNxpM4v(Qk-A#kh{P%S97eW`@_T)q3F%frEHj*>L^; z_-~2hW2uLx2m6&6j9}E=YD7{t(}bQ}{6?@v`k8vB_Ki3cc5?bs`}{ShQ_&PVwGlcr zSmV-MyH%Jrl9nTHbA?D%gcHJ*$(XxGU{vL(u+LxWUp|w}ft5R8(%${ztO3^xxWHVv zbHW=j$c{C?_1x)MYw0;vG}3E-#UkKug4FAd>+QcGRxuaysjtH(?}3Gko)-kjDmEm= ziUOUeZm?HHlhy9wyH+9O20^u}oSKYDRFv5p;`s04$pSkhL{U%lw$pwGN%s9U`L`GF zzU+usk5`lMI;_2H3Q?oq%f|v_nANmtRgrxoM25V&i%9Q;y~EyK!|yZY_a1oT)mA2w z)}z8$XJLyxX3s|3FD8|e5v9}L@CeOwtGG(~tg`Ir+%a~RnBljIn=-po*o%cfSO7zjcl9}|8_nDX8BncHy$;ZR$WC71H<3(N`<=Px^yeSfO)%Kwv zGHQc++gq(c**i*Yji9GxzbX4?Z;C@?R>jui4<0#kbuh+K1u0eb0qkV~+7O*K#`V?^ zlcC?P{hju!KN9HKHqD>jG=GEDh=WMclZ_G9*aGV+bQE&+EyFCb{Xc?b6SI}>jnky=sCrnCe!Tlt}#Pmv{Zl!e>^oHkY zH}lJLVz}67UvsBt4dPJCw8q8UjW-5_Q7t;5%^nZqvrLXYCD@>bWHy!5?97+_v2ZM}qbkFmOKsoy)yMu_9}=z$9>u3_RfMm&Dp z?{2oG$pn5WX|IW+9II9&s2RGg15ZXUmpee>--wqqzDqRZqbps z+y#CBR1?+PPXqZ-hP@zC1xJq56BTISh(k@euHI407I>sE%|$tVa%`>;?UTveaN&p0 z^f}KTc!en7ADzVN5s{CaQakS>Ps<00PP#*g_{D6k?J)u3HVOOud+!mc$3D@L^UJ;tt+1!afmAneT7pdt-6%o!v^m^}8XGAYRWt4P~7|2+_ zyP*_&IRw))efc!z*nBa(K-N7nYPMM%q-&Z#qG^7EeG($@!%8%)(9i4QL!P9?U_yo6 zC=N*yyglN^{82$~h(1L!2tQQ}YWUfjrPk}P$K|?3mo2{(Bq*yW&nAne;dd@!r8pt8$|T?%*gM?8TKG21H`ra;-dLOH~~b=0HKiv2m^>=@vICDDeXub{-CezfR# z`;{18<^c8|Hc41NAZ1B&d&$6zs2ygtG16xlKuYOKGYOPhmRrhJPZTaYYr+0!2i@%RZ>+x36)cqhipJytW+ZWN@w7)lM1=HSo&3QDquvM4Amx&|! zR@ccI^mT%WUyxH&AT5-;gw>0YIkIjc6yeeum*-kZNDMx{W`j)-q-f3vY_n)DNoHO- z6nra$Hji6tABpy6;0O;NX6#3Q{MXhLJ$SUrOYB?3@wUX*1$JqKX(*g4O}8t*22fbj zn05AuXm2!>4D2aEFPr(ScJC;gnT(Lmc4=iutNY?t|3+kfc#rh4p(4G!zOr85_3y1X zI&-NLrpU$(h4wqq>NMSxy%7RFoiV|hMYn7O3wWrV$ByY6F+FE4Y8Kl?|M1kYJQl*4 z0!Y7iB$M4Rs}l4Kn&vNTn!l}S{w%y1!t|cx!^9QY3^CNrbLZJ#L@NUsHr`}kgrK6v zh4%e_c{EmhcCly|BKo=(35UTT=h~&VDa7t@TU-0RXfIJ8V@^E&4TFa2*U#FCROM^< zz~>4?l8m#rTj;SP?4)SVzLyl$a_o-(z|mOau>zV8@c+P zuJ{JzvhSLQi=J9x@9~pk$Jz-&|6RNv-9SeNgg5;cu4aOI@?3?g|H^xBYGvcr4{xcNnyqizEiB6zjYcs zpUa-=Zi9r#kKKS~$hrV&&`xaLzI!^M)Q}Ok!0r$rDo#u5ag3cE!itJ{F45}@!h(gy z0RzyKc1`lZV$)j9<8US%=+CE5v44stU*_*zyZKB&_^oj#%!1?ralCDIjLpDS*(+jD zH1kyjCKZMYlI@`~jL?WCt8Cr-jQv&!C?8?-MU&?6=9z~|$Ju*=-ZVL~8~;fNW(Hrj zf1-6f%VSgV+}HryA?Tmit~7lk_r*Uv8x}(87?M|g)2)jbl+1zdbQ>q&*$=>mgRKpL zyVmU5;;WRIE;1E=1%+^)hWZ`w3DCdYcuNE#OVX zS$GGx~ZQvmjl*4vu`6p)TBhrfwtE^gTn)+8#H*l8ESRKsYsELDc{ z#UMpt5iVy$lU0#6NskC1P)=dvq9V%*(LP@`%gRN2TcQGp%A+j~apST6{LZ%tQ8T{U zvhTb*ByFC+TqpX5bFPV2zMQ+yNNlKy7Q(X=8|qmL%S*?G1&IwyfL48x=fiK9M~gP- zdq9nlO{KP3v<6SZa@kbbDd-Y~L*I`=%K+9%Pst?g6D)Z3~JU^b!#z!a36Oec>#<4y0CIHJd;gCJp`hYy zjQNZ9>8Zu=%f=9VymFqM`s3DMCZobZ@ga7aSU*1J`+LSSFVwOv!u49z)Vk)lPJ!yv zhK<%=v^UqjB{m_1@CwfsiAGY?8>EPz5L6Fs!r)2wOC6*aYdtRD&Dd04%Px{_KG{R^Y4)ot0AUo!x6Eq3v(JmC2|z!8J=&ck z1}RVx#${3nB{y}_1gjN;+D9wy$#(9QK*WnKcJUcG&u$e*oq=~gdr*M3&`EF_Co+9Q zAeoJ~Q~~-on?+IFJ`5-$(r0?=Do;qGU|_A%F1;G;ee@s-bJcv$xi_; zeMj^R>~&$pP2*E@JkZDLYwQcrBsOGy)%3YS;eEb4{!*P1My}TS4lzTkb1s1vJZG z3ges`JYat%U)D7O-b!qC((TS0!T;Yhyq?udjZGJy#&2Aa&pr1>A+n1%F16#LH7ayL zzta8^ATNPr=JnB=Jd|GS&O5(;f{2?s$cBmZlF_wh*(Z#=o$(NBKfW2@GvPhg+owW& z$94Fmi4%Rt@pf2DA0IS5$6%-20wY|HY+$C963$jTPh9UIJ=?-s%EjU^Bk_BPPN+B) z%SuM_7ebaV4${P9x>OSIqWY%D&h1<=s7B))`!GbzZd{aOhXrVtSUwi`+d}&fxHl)c zmkE-jV{f}DgixJ&F3R3*M~Dx!;0i9qj)+HM`bgK<|BtQrfbY7x{{P$Bp|o0Sd+Od> zivn(|ZILbW3y4~)HseF`A=7*kmK_<4umnMtpnwDj0YVa1*w%onm7!w6IzAzUAy}<# z)eiOle7(;lpFVz%zYoddyv|+cj?aDHaqc;%bOO2nIDO%@?-gQrtr!wx`LVA`aSGCh z$rs4q#UYQ#4(dkBya`B+a;2|vLcq%;xSpB4-hOhk$7ds}_2Lde@1Mws#Jj@OZ75gn ze-llI9NIeLXnDh7Kf(WZNUtR#0xi3xE&ZOQwDEdjx|G+OJe+z>i00_p)y}#Vl8jqx z=U8Xa$b59t#>3CfxAG8lXxk)vN3>TN|0VReoq3xlEW-C(mS_*vZPAMH_DbNilr3v1 zo7+;h$_|J4dS?8gXf@6_Eb*decDrYFqG68Zi4Kmcuui{zbL8E9qBP6rglIC(xg-^> zrDA1E#rT%;#Vr+ETFN)t3ko8027OQLOSKC6kpSnAc`|V_%03ENwN{O>bM7F5kM`F$ z+fRjPkET_ai4pB%I*UoIC_kF#zI7*Dex$CShgr&G?Hb*_v=kNOY+zdcM^l2!2Sm9v?D`??*$fV`6S>tEyvFNDX|za{OHEoF#)gB@%3YD&Rsx8 z2UmfSnT~zJ#7)Qdj&-`*19y~VMB3i{114|g&Gq&>VIO_C(cx~D>(}L}IK2zF7Ch@` zo(E|PIb>v!bX}nG*6CPa(}loz?(9Q7a$J`5|+)r-7dcepdUkHqg^AKfPEYHt-!}9 zzy~Z2L` zGXc+L5L&0-=>=q-h?X#T*k#AYi;uNNV!Z{Cl6z@1-%V8Rc3(93*k}MjJK9l z_#kZXbDeN62^d}&PiEoMLx|jH)rmBG-vIf@OG99^;K^YitNtTxLP(w!!_1|v4e{xy z0bGH-BuH~bnn2rjA=Yk7 zu3aoOrH?~&UZ@t^xB3&zsA#U7@3i;)PzF#Z1xcFqjgj+_dYL@9LrjlTgn^IW1c+6J zmhy@AdqJ;qa_!jU+I2Ql9P;OTXx9&D3&keuY`*xH_KBdlJMTf5p{I2MeEBaui=^JaEwkGMeMXHoD(sN0i+Hrk zA+}kUKIGBqprLq;xlGVIp))(He9SNO2{Dpu7wLELvS1jzvie_Uhs2=?v9erbheK@C zzPLiSZ~V%$kCwMvtY?6f){`ApA3;jf(w8$QsfiC23)xzqTw{fzLs4~&JRq>C;`~3~ zg2<&5MeY2b*lNx3_GfVrj7L}mE@2+YNptDW&MfwVgtbtx+h5e$g_wf_N1+i<~z7S=V_i09)X!ObE*iNqj>%YOTl$1k(05X=D~-Gi># zj#M6=E5}zm#Y32;#V<~`K|=vvp^i6pvPZMp^6!h$o|sJ)gZyc>LD;Mlq9=}SU1^pL z?Tr&zW}aiq#35t!Wb*AZ0TN0ZHepxC+ahCMs|>y70&zU!?nSm*h>Y3W*m=XC5h;2{ zzuo5N08b8Av;k{{Ntj!Z#nR?=Aw_POEraNra)IR2Jh{SN3iR=1)9jo)kH+1WJjiwk zkXrRY)&~WB0*hR3vl5so78%`o>#epl6+D7BM!Kv%E1zKcuz!(VQ~*h>F?F@tel8G+ zGgDW33qlyP{u8!3gsCa{HSHCogx0Beh89hB_YAbt3Oxs6-NPcHT?%%_uJNx0{dtH{w&GoXy9i;-YIFU@ocg%jZm22-u{>XH9IzTihZ{T+B3!~4GL^s z#h}oAOYLEiG*gV^zeUz>g+jM$t<;zKmpWBEaeM7-{!YjhwmVMF3;7JHaEaLf3G&|Gw1~ej4UOy;=B%BjhzP-sl67(x% z6lE~8jjIjzIcA;d)G4ZBQsI6%6g5H+IqqY0(8 zIoDrrRRToq;TEvI`8W{Sim1l9qP?+Ly*+EQg&D)chg#>~K$69-O$>!^{?^l!@GSLP zq6yfu_KmrAxq#Pv$CO+K=2ao)k)>1Z7ovTT7Rc05!X5}|*^)Cp1t^>|$~c4Cwxyyc zt7~MW|M?JuS`~BCX#W&O^kmg|>+(Bj#K5^lmJJc`w&^{}ekp`dc`;5nI3c&V7^G{d zc+y6tq8`|8-GA>PI~S$dy`sr6@@Tb9fIL_jqh!d2X@?lzR8fSFgsEI~mQbk z82jb-=m}v;?Raws`*vAd*uAktiJdEqz?a?G5q;E~guJ1&jbj7XQyew&!aY^?lz=AD zK2|FdDT*;ru93V`_?mW?Mz^KxvC$b^rAJE}EA;W#uk_@F>9@*>i8Ogli|y1vckI~7 z&J^uUH{d=SEabJsfpyFt4KauF0yi_z$z?07NHp>lt9j?85^-RP)n#D@Tz*01v}K=Q z_}ruH%GJ_3JDy6y^svRws~`?i;G%MLfjuweQe>)tw}pL_X`fSI$q=1^apQINkr3s9 z`mkX6mmspm^SNdEcL2M#@yq+}7!TR|Bn!N20^kmGjP(?tiJSPHi6l3)DQd8i{D`2J z%>=;0p> zD0}n~M2_~iT7^Qwpr}GgyWA$8jJtB#k!??4p@0ZT#uMfzbn4@Vo3Z+8aM==n7$0A#liY(u{XpF29hjKFchQBA8Cf1jMWB&G&J?Qx2$xfgADPV$+ znSzC|O zGpiDlp(mqXMfT(qo&`NB?K#jji*n_$5vJVAG*slot1dv`#2`^>X8y*zL)9O~kzCXPU=f}B+ zxm1|ejdKk@LbT`G7lpb{1bqM(^B|g)*9;Hmh~jB}iWdp`8Y?XmgM%z;v&Ev$`)m24 zR|t^xa5igy5bcx2-~o2yQy?!WosZ6VL6@4NlXN!z3&f#lrbCp{nM6MmgNzHYXJG#h z@uPPJkvq*fGYO`au5X=bD}^aw?>Gz*%(hnrNzDb?GQ1IcTskT{#@WiJAujmdf^-`? zi(n+>G+dwVgwSzlOW-Gy8} zl)Uoog1K!ttALf>RAKT@RyRa1qsr}Banw%ba%*TG3L=7@Cp6OTH zC0><05!8;r=x!INvIOq@(pL48{0v!fHkg8DRSHX~FO_ zD=99>V+*ugEE?o!-3GhpIggFJ{Wur!XK#o>^gHgj!A^)q zwn$N>+)i2UL9O-JdkHiOX!ae^p_glGlk6U@NO3S;Et8!;6-Q!ij8y!_Szj^8o2`IO z$7Tt7zw$}IEn~jDB34+wpS&iu^J-v519xV$WTeZ*LEyu+>ntvsT)1J@&AYtY{CEW{ z%Hg;xgG<$G1?eUkEV27T7#*q~{Dcs;XVIRC_ND;YHmyq7H&;T_4~5L?1!VF)VV}BC zyXD;PHZka8_S~4;%oHMNOW9(ZESmCgOq^y*LpmIjtl%67Kb>ND2X~e_eO(Ik9%_u; zwTf_Wl|%`b?)XM25{t~+-_*f2idLh_t;RZTZdTNKU^=#^wg*}lf_p`KJ{)Lsu2yU# zR>Sh5*%(*Z)51&^FVC}A0{K8^yS5H4;{SO%TIsh5(3F^HU(T9vhoE=+AU&Q<*v=Fu zxqdzROxC-eINnlu@tmRpJ0S+*7Zz{{)d1}sOrPsbG);M*m=wagaVFOk_pAYWAzW+Y zrO~p*Ap%przPk$q$x!;T+eBa1?tFgz(otN^lefiOTSvC#PP%3NgW^yl&iqE&scSti z666%b?7{#KZQ67gyMC7ohH<^LTC>-E;!qu}3b{C`wBl3-L-0B^BETbSHXqr%z&flW zILFzez-J1;+Amd^5Ky@l!UyjSpXe7uhDRaMQn4O*CutVg# zVm0D;FLu$-$i{5+{A4uxNzwk{VAnp1`|BJg<$2;VoO%qhDv?OCe;NKTYXm$isoL=s z-vomuaA!TXQ4mRwPMKg^LlCVq2!+Wv15Bd|VWy1`kCuw`O=|6NL9)+Fd(6g&R$E}{08`gOWS6X|O0M8V8?Vd6(P9Im zpt_7)t!4bb#YEWLIJ3oR+W}`Fl&73c>o{PSFVS$hOL3CS_|E2#*a;0|T( zdP5vrEsQgxglH8?SGqfHa-lO;pG$QB8ET?cisZ#02iwB*u_(uX-KaFR9E z$(ki{f_EVGX7B9)Q*GXHY{HkegMLkm3(NvQje42gF5nF^n9D27G>hT++@pt2m8l3) z-jCV$UL*oSCRdHOi$$xgBFDxyQqa?K=Q+jhc?s+-@29+#}frGuDlUz#zINtAsd9YgM3P7Kf9LliHoxpaDSX{}u>j!SmDyxKk% zL^6!_rC41wCWm zd!1m)#8gFdX@fl#Akzbi$5t(f2u<4%c-I>MuM=}YMbz`3h(`n#|1taL|3uJfnr5TV z#UuY+Pe-1J&Uq8)(#Ry>l5*=Uj&~1F7Gw6HFcmE>;LO_&3Xz;clUY2oJN&=NXJK;O z3if$~=!tc;4G)ku&SY~vT@dlocg)SS)gg>8yEJm{c#AL>B`+;nrZW7z1Q`E6r+dda%DE&`ig^4 zm{H5L`$Fs-*}=%N7D4a6tYLEW_(6)%pJis6oqoWxmzHtaQtUH$X55jbnF-K#m)-dg z7nA1+@q;_OaHA~_(eh)g_UZ(vCy(#t9qmm)Z-}Pae3^`E|F)-W-|<%KC>m)oF0pSpmpn$a*Me<6)-r6rIG(bzE=F2}qp`%; z#DLYiKpM+!i(V^4AOq(F`Y%O-UYmQc;>0Ee2X`S#WaoLCy(s7fCSn;y+~>U876Bs` zPuthtYs-TMV71AK)eps^P>Lzcu=9kx6}m`K)b0?2#vUf?gZBy`Qd+!dM_G>5g)r_s zH`t4*lw7WIvOM5@k4(k`FpoW6kc>E?!>z4=ONv!VwBYT^qzB{VXN^q}@ODCjG~b>P zMk)?j7Dc8Xx2GcDi35*_&29Nbsw!J-zc_T*kxk3&zoPlsbZl=I9CUJchFu}j3pr4U zt@$x2%)M8kZB60q7&;X13wzV_D8gN=J=5ZO<)qshkyH*-6cr9azen3JdtSkzE9aEi zaY34#_GJ)c9|CD;ZCGpL1AS!nSR8CS|M0>^*6uL0%gS+b%mxUPA?Jh@OdPufeQaRy ztDddGIO}wzEy>VaKKv7f5%HC|s5E^2D1cK1)NZ@w#-t2<&nV24cN81J_~B#r=q@3b zS~k3YB}fL@&6`(SUMj3>T9!Q(Ag4)_FnO?2(8p9F!#)$D;iB6Ny>0&5F&KQocGb3B z^PL}9ELxWHHgg(3$Q-dKp3afwiW!35{6o`ZtL)4VV9;(Y<+$kz^c@{(}OICpIq_ zqqo@QVtO5~yUzX~hfjR5IF!^px!P()``D##pR;;lPs|$vTPx`G%j1iaZR=C<@7D+DkT7{2 z-TtioLo}^4O81zz{LNF!%bM)jDQ+V&Pb~5sGxV34nU(J zapNcO=;g@q$7bH3$Q5&jm`oj=+S^Kz$T4!{NP8>LnN78}?C+v_qGJIwN+R127yOfO1m97XyQ9oH2}xFrG4P&ul%TiYzyg$y#tMg1 z>XWK`$=Vw1{@9L?^2oxqN0%+O1A;z8v4(xxr=TF%0ZX(_L2saVreQE7L5=E>-P9ZK0gVI*b&wvOZE!-8IJx1sj+&qE-J z8?5E87E}>S8>6GWCDvaYHGG8|IGv+?-gp1ysVJf^i<0{V$r7XGOquJ1d`O}riyzY> zdqphu_E?sj{}u46;tyf6PEPH#f5Y=}mWjUx{K1Ohncyvuj!D>qVrh8knc7eR`nyNZ z(N-Af7(eTG{^OA%%dsB|sIO$Am^Y9Ab%@kCsL`DpU}UB}W;Y6webLS*bFE&$e{`}K z&TF9e#i79COXgU+FQB#3>u%?W^oUx&ak+&sa~5|eZA=84Zq@qAKk^JM<6uwmkR z3l8QBk+4l6KWdxF>q9&Xojk-jr7-lp5(6hAVN zA3fM*oZRuXyQ7MYz`jzZRSHq%IAdXoXeyb8<`*ycjtY7~d&jaXedg=nFwP_Apxi0# z7SG$IH?!n1!BFkS%HVYLM2Hid!JPIDkJDzwO&YAvodVOx4E{+n;*7<8V)-g-~+oj(EYQRieXcI+y=ddD58)27!8-{D!OVOAY>e03LtzvsTxiHJHJH+tj zliwyAoWdk1wMT`c-z#r`w&>VnL&%~l_cHPP`-<)K?|2l(0jo@$wpzJ3)G=$AtfYMX zG?16cmyr|lHGv_^#Q31wU`=AU5FPKh{SLdk9hjP>aYH*vwD;Wr4rN{vB(umM!5V(- z$Heds=-t_V@?B5aTsqdK2fCFl&J59B0$Zd~3?6vAcuuyR`aNiVmpQVdbgO_D$GEje zLs+pMoaPI7MS4Uwu&Y93UCVKVvsBPK8@mB|;rBsEJ}hl?rd@fum&3t9mX!!l4j#b^ zcv@5ypq>z5t?BMF2=zf6i;P#WpLkF_&;7u7t%_{EI6g>tO~jjQeViM_q&CIUJ6|i{ z5)W&t;;VQxmC1?qPn~F(Zx?)@CWe=oCQnet(#<>e+W%h(N?e23Is{Yf%7Jd{|Mkx^USNrUKb`VL$Vk1(osP~ zZ7!{EE}a&A+j%B$Fujer8fuUpgiRCA)7{HA0B3GD9i?Ve`K&V}XDvGR`D^TK3QjFq))Yk!_+lw32}(nM1pmQEV2 zP{7-lWp=~<`nOlxIPt>h8plOJy)an~;aaG`nuNUdSqqw-52PY{%It2DbYM&QIz5i8 zn4$NPqr~(A*kEH@2%7{?isuC+^hJ7G7>WKoi?j8=33yW#7wGh}>jfT~xno|j)e2~) zYnYjA*oF@oTP+T4!4;!?J4kKlWwBhu4DNMb6QmUn&st?ay3pz5rYY86G#K2nvDvabJ8<$X5e-zE-F` z#c_d}X60MI6zq6g2m6(v*ElU*m&;9J(BS+qx(%`)TnzFO`UhKIUB#loqVIV>0jk$r zInB0-_Lk)|Bgg(J>;n_c(~Y~f*v~I%<8TDk$i>5jqD2Xkakdk?G#Bnvy)o4Z>7B=NL~f$ zyyUb2SAaB7bVwmS>lb7lud?c8PdiWd8&&kKYmYk3VbFWjv$R4d6KYM=Bn( z5*O`P5>Ml0A=gAR>~TTL=EsNzd8Lqsn(W1{ZZ2JAe-MXeYpGad*IWhdMGlS^ChQ(z zvSKYhfl5CvsPQ|0o*lW`qZSk*uU@VQdk1&UjAv5KZ(ajOV*_PyYbQY4^iQ|jMXJEE zI(t&Ir`okprao+T;GilJ>F1BLrfWTE&wEydJ$z-CI%8lCN{=MeVT5cAto7OH7H?A1-#=1pp;|P!n8oLu13yK zwg^%i?3HsK^ybOHs?{=3d;X0C(lys#ZCyo@=VcGy{WE(|KogBTusj_ZC+2UNEfrq_ ziWg|bc6kSn#T21UBAdGCy_#JoK6O8`X{p^M+PjS-eVk#uDMt7;En{GPn!?>RT84Ba zTpd^wjqt|=JvxJl)sV2-l>>n05Ebq2HC*+y2vURn8(8w*(8@{)7e_!LijrNg%53GJ+*0|^v zSYA~*Ngrul#0XuDp8hi-Zx&|UbJ>=W%38&#mEX&;C|R&PJe>gDk}0NxVO$Cc@LyjWzETvuvM zqV;N7n;LX4_CNseQ~IDYwd*ADKo5pfb`X%Zv8D?FigCWTONkb zLGip5M~uQ}!FPWOrs(T9Yr9djhZW0XR9Og1Zdzoo1zKJ@KNjtci^mEnXy14j9BRxd zqa55kV?D(133$(9w1YT_-aDK9;=^})L`fKDfZ7f*DF0yTH2YjMwTWRpEei`zKXXW2 z*>*T2NX#OROb?6Zci80~>+o|(7lyKDvoijTIQq5>vQm-A#pM}RBv1-(6oX8TmQS#~ zfzIQw?8aYsE(6$HprOG^v4c3oEM~jEN3>V3I;!4fong>YJqt!eYWUX*BRQgW`pGRe zI|Ku9iM_>LpOWd&tY@Y25$t-Q#)A7RwRW`{w=_Hg)TS z5%0)^dG>~Ah8MP*682>X7|p50x4L+wQ5+L=60M=KvGM5kX;u`14mWJJBck~wNl!jz z1G|E}r0leu9D7RG>z2nSV7DM`@pkl97j4=DVt5De$Xkc#U!_v?;OwhwH_zbswE4#; zRoQ)lNdCc^HTH;TepYT0Ei#fi#m0(Bb<$X8+HwJMPcB{N56f{y@9B4rE3xK4XQ1%< zfoPXIk&S8CbGm!H)~c!YYtaa;%~EtDct8xQJfKJyk%xq+BF6zzN%?yZAOZ}~4&qY* z&z6TinU>xYjQGuy=Gth{L}Wu&ZC?nGNu+|Z(7x8IE!_aDvfDUeM2L=>u=$5kf&YoX zvhS#{w_rf9jBK(zEcLB0Lx_$1U#u_`TcU$W2X;h)BFu_tRRC{Ao`uV`{-K{4^bMEfelrLPKkDwk{D8BDlDkxQlT3y?6mw89<`O-tSIkUc0e z`rS5!-|f~A*ezk(L{b1bo)}Ya+XM6)1N&6a2VXxPKNRuYHPbG>4<-$r7OiTo5~BB* zjOU}-tS`<_#6qa1WsK!Q0SbsLW=yqxC*jgnvJ(CEAs)40=a!gVD?rrnLU39LOB=Rh ztaZHKBl4oQ#(pc{ox>$`rhWYZFqM7yr#4Dt__;Jpk9>42wdoH!Gr74mxoKWF!l5)3sk7Mpkc66k9Ie^iaZ>lABHNnzw;{8h1N;^TF7_F140kC|tuJp?~$_cB@OpNN~cB2IEF?5Q$$^9nWA|1ZilQ$1Aq3zXDOofoV2g zq&k|Tt;wQ2SKhIYx3wYc@S;ll$HRnaqHB~_Y^>5;^N8nhR5FrCJU zUcz<@P%@S&xyC#5(YC0pR%@)gFl`(>9F;AWO2kkxX@QoCc`X&=S}Impr8r30RGn*o z674_E2Rqxb5RzO|iEn#CT#mG2u5G&t|F0DZDKsN|nLVTs|LG*Fo(0fQF>;WkKt8K}zRsKcCGj(?I@J_f=nXq&QAorI(|!DFSNXwQHh}K)!%j=6Qdj-{ek+;KEAu=1yVcRb<+9Kw#th-;3LUYqlKYdt$ z`bAoRkrTW!ap>|Mc!WzO%)=1UkrX`f>>TTr>Dls8U5&oOBZ6xAwY-=XjV!L5zEX%0 zLBr(9&#rA2gJI9Ed==YHnIZa73>HGYF_*>8FaiBaqSddB6!f;EtYGBU#*0gN$Hq># zB_TDJ^vigZPXM{IHsxy`we!m%e8&{7-;)8L@G#dt79f{l%xiZKb-J~x-WG~R9*+En z+a3YrQR&q5m>?}Q_yId3%R?|vDxJ`~0z5KzbL-e8)Ou%JzH@V-V$L(PYGa9+TGrBy<71PC!3I z{jk@t;y4;YQA2c4ask+!M7EMf@*0NTZV-o_y@3*mexJnGF+`qeBHw@C}EO~9n zMH}Wuv8ZQj{a7m#jXYURON#6r0UxsH!7j3o1LiBj)!it&Hm5C|Z`U@vIu{(ODx1s= zcC9!xedO(8j`axoxaln$Mn3D`8_Qwn2Q88 zP_fI+TVNrIPp(}M?co>LtKv`_R41?!pYOrsCPR^|sm>E8jBGyGX|_TP|4nMPelG=w z@WuaD0im>0dtSU=B-)d9Ey8cOT^R!Nc^1@Fw6|LhCw4YX*xNL^%xeF2XJ&r^7Z_5TTOGYV4&z7ot~m=p?_Y!Y+Bt z<0N7+yCKkrHqEqOiY6<5!d#r#(*e>z!?}%rO_0V*%O4iAQ;PsDSl6`tXnAvySfs|{ zmVE1e8enVLlGaTdY)FDoZ=lTREarYO$RVhva(-e3VtG5UE-0{ZDU91o{F)Ve9-Mfr zjgCiN7K0+Wbisw0{Z;U!`&!Ny%UO5V?H$Lt*u5ofIcKNk4z*li7e6f*gP3th$*G?!56+H;IYsK(xz=(<6l!ASjANmQ>D+lLHwRF+p zL3 zVFxTS@Ud8|a5N#sA*LvMRXnOZ8eM=rq7jHE`q5(R%u!&<3zxW+g2;zXQNHaJU~cN! z+rBm$(n}gDl~}g3UB%LGx(Zur(Hp;Jal*%*{R9&a2R{7UY{wO%9^;A5BKwb!*RhZ@ z<(ZEI5%1WX?RNWboX%#pZWQg&7zTFHZyg@mw5)YT8Kwz5X7hweHYCtl@u4;?MB}G& zV)k0w8(?H2W1^i9^#0%zN+o{RRz?C1x?cx~Yf_BK+Ry;8e15obwLO-KoLx|2*ZiKe z-uE1H&tw*!CQjS0rq~V$Q?q=oG4Cpcre*Qxm#{YlJZWD(O#HywmBFF1DDp`w>P|so z>(s}d5b$gZWkheWuy+P8TX>qCZAZjXXPkUUVLvE`Lxvn^ufxX1If7iS>q!|SUG}GR zIdrYql*81w#`*{#Zh{$gxM+S=yfWdU0@E|Eh@lC+Sey0lR)Acj$U{VgNt#?zYiFgx zGSf=3>{el~Y=YxRuIIXlL+s{f$Jj{GNN`gJbb==c=(kqIBTHKyPyyG^F?&_Wvq{#} z@_@uz#37p^jPU(Kv21|9RS58#rfp zNL&iY48O%umK_qku;_)wHhyed37t9jx8CF0FkV*5Y@9GH#+ROpBu)ft#GptXWKFg| zrefzeEiJM?2_t(UwxQaMho-C8Zas(P;5!ACQN0eS_?ZtEBQ&;}@1aUC8D|zSVN?i^ zm`jIk80&dakQU4@kgMwVg!l#b?PBo>kQCUuda^w(nhbb)9mNfnyBuYN)TdGi>y($K**D($GJ|)x0NBBXHuv{JTQ&$&}6!hs*et;rix1^ zk_YDvYz!BPK^wA!TWaqJ&}?}1&B2t!CxR4MoLz$FvM+_az`fl1S@?HsBCDKP_caE1|5Tq<^jouDHTv2RiKAbbdW9mkdmiVI} zF*9&peg0F>Ub?nDTyu3uaqf3J5t_-Vk+Wm6p<_1%sMg|%TCt!H$V7p(w_;WymS-HjR=If=nEiWmIk&0& zU#mOzUy13FxO>KIp|ID9gPo`Ns0^A7#|J~Vkr_M-9G+qfjpt&btzHcM3UN(dV1E%p z&dB{b#^c-I{AAS@J8KT1j3o4gu&F8Fy)cND7$3q=$BEg`<`U+`4v3B;dWb>fXwq!7 z;*(I`TX=IJh}`bnUW%|Q#i1m=LfLk~(*UpLAT2wu6;@_i`fRit1u3j^5wENBl3x{8b>bk)m7RSZNMaeB6t07axq5@s-#~cDA*`=Yo#ntTPg5C?P zKC%*)5yCm6#(IiP5cJumM}IpegkX_L>n{ad66vd3!$J>4wPutx3Xrd?eMeuC8y3NE zIU_rYn4g0p+H>=tL`FyRj9||L6KH4RDGVOMO7~a--SjM?f&|7i< zo&jQsY`Z=rVL7%OA?Dc4sU$<_o_ht6yk8f4QzW9GX_U!U+_BdrompWCp;R}y>>>j$2SOfuo_rm36gv{~7|JQ#anVSr2W!!O!M+Ts`Bg|=6t@A(iQ?7mrfpnrVX}{Y ztGx4a2GaKKX5$qWnnE^K#zvowN#gq6#}8asggq-Bvtx8SgmbEB)q7P9%<$d8EPJ%f z(ne{j_(&<8Evb%I+NCStFp#heS8O)~Fapb|c3%Jti5WH}0AAC~vP}ZCU4kFP8zF!q zw%K_r2?%>L47p0{!mbd{YnjJNJ`W=eFc3I0u8ngL%xcfIEHSBipI%lX(tEdK`%YFb z7=8=erSoOGLj)P0;ahrG5NX=q+`&E(eRjJuAN_y+vwwt0P3v>)oK>CxSHO0YXzvqV z6k$eGn8I^knakm+eJtpOO98BYqMcdm5$LoJXI4A(+Tn&0y}b(+PKxHzdCjF;rTTM! z;6!G`BV#i)CpnwTlAD$!H`Q6c)ota-Y+HVHQ)Z$voLj&oFX=*fzjH!rcP13(!r&)y6GQ(c?v zyp5h=WGD#xO@iTP5NTxCgW`CX$|i>8pG=^KC);hZppVR~$nM)WH?_s@&IZut2&;i* z!q!c;tPm?2yr?qr)VI+ZLn2-v73VYACxt!N+_+Ba`fi5dGfi{F^ymhAhK@&9itp_) znk$B`TRdk5m<*xniy{s!+3A)m?F-SgmJYFFcH34E?V2d&D4<-xYnd10z6s(VpvIxapE2G?5`>;QUAcg@GDe=LQEad&FJ6?UofO7fUAY(gr=PCfO0 zUKm+1NpHJ$w&fOQVPJn2K54fLd+3~?&5vwlt+rEN^km0M7usc_>6f9StdmHFBRVT= z^rd}Z{lxM%)zNl;K_upkrKG4N+a408<+3ApT2xsrjhA5}Pv>4XP$aeM$b-KzqDjoj$2v}y*4dMRq4|;x=}>z` z3}hLMQf7|*RY+ru+h3ls{`M7M_?Yv>VxY;m{v>fjKWT3m8J1*T|3Z8cOH#g{m%Iuj zXNu}I$i@m0$Z6bjtgf&8qeHepW3bWCH9+|-#RynsfGuUUEoGDJfFLz$#+F~FKS8Ut zE2vYQRt+}nHCWVrLXwDRVo`M57h9516 zu=tk$)m}*2DxYsfj%d#Yl?fddB?2dM9Eba$t@eyKR4!|nEW_5PQZ!dA*L3(^isL&y z>-4%8ws*eWFVYK$Mp&7R6h{M2DqNdba=j}C@*JtHx39n9bP?Kx=Zp5F$qi$Y8|K?( zAvA|)i`_-jKJwbMzs`CIk}I_@7|LW-6zu_0rn8K^(PiO?k3rwiO0Q z>Y?$GHa`{r!6rfx?aLIkfTZD$SY~*`{ zpVf8Zk%A&VoQF|UK`K3b=V}`(n!f2ez@~;+2ew4_zfTK#v7F2m+W-9-?6r*U3hh^7 zgkdm^#nD2$e7|$#vNpzoAYnP;XmIAq8rb6@968=zegIuy+bE6~l7AXB zda^G%?|siLk;Apn^#T-=$(nPeXs;jchw zxy0^E5=ny(gBW-n7xGN-3#{p8p*TL+^vWXgN?RiyC4W#i_rx=$R-a15N=@b>w~K?A z@+r4dbhx+T8cYL4g@SVb^aWof|+qvRW5_8cc)}=8)@;$V5 zs+Eg&QGKAmrT7vkrQ~9W(OGM0>k+?05%u9f0(}zV-VW-zwj? z)_&Xq!!tfnQTE~VN*f{$#g59%vHQ5rEP%2?lfDUN*Bx*e-Yx+jzyMps4ZJo zulmhah~s_FlV7Ipbz+1qtQN++7q?Sd;a$}3Y^DRY-P;&6p=~0yvj!WbaMd&;JD=~M zEf7XNjBwQ2aREjE>HO{aHv1yPCqEfjwI3b!_~|?sv>yw2)zeEdu?Atk5+{g+P9nE0 z<#yf?c*xE89rrBp9uzjan?Asp_NY``w5Sa)?-R%sqyv~ELtYh~IpR^qC zbaM5$WbNumd4IL-Ohw&Mw{wBLCG15>W%5NCIw1j;uhPS;e1aO-Dr8q7K&IXI^ zqQAFc-r5!0CBo#ne--XhCmtxbYsK&uNY>1-eyR9aN=jIkFw(V@EwUQXh|N|iX78lZ zrLjG~;va<3a=WVRI+4^>gSnRt65>u7)RoCOx1caO7>S8}NxO4l>+U9+PNj3C(>GtV z&r}SRT(QQ!`A=xC6-UT%>n=>^uqZ2MWK`4QtHeS$dFQorKK1a(a)T^ktg{Qn@`;%g zwffw0;c|<((JxqU?l%b07M!E3Cd;!v1Ck|uh6QE;7oj&gDArrp$44e#ym&5enZ#0Z zUOk~C^o`FQQpEw>aJ(5{0^9#o;WI(1H!L>XuKJh946bJW6Cehhq}u+#zX_$wn1*DJ zKhtW&2%n-SnO`}T91;WnYL5074Y`{&di z5Z}igE(6ibdNh>{o5(zi2_rk^@$BVPSY)8*{eTM!b=#G&qrx;IkLnXxjsP%Tqxbae zXX~&R0mp}5F)Qf52r}zZo2PUN%hzG59Td}RrB~zjy)QfspYbyK@GL zKgl%vlQ0RR{oGc&{%b#ra^=8}7H%)(wHp!V1V@mzC`2pO9uXM|VW9YXX~OIbB~hmlRPZ8&bVcT$P^axi@1*NKMQ$*Qq-kLb{BIxDXgc46|^ ze4PCaAi^{)!JzIf0^WwXu`GVxUBuw`+_$U!QKSp<=5~8W$Q!pyc9B*|?N5P2zQ<~2 zx7K4>L69!w*;n-5c(%6D`C?KSeLB@D1w4zvX%v=e2gPww6HV(!+uwxAGPz<(vWDZ1 zZ?**w;9G2Sg~=^a`dcXAx#5Y06J*;fj{h_{kS^pAzTNaKSTY+C{Z{zCp~J;Du9w(U zt$1`Sr@;0Jg>fJGtol?8&uBy}mc!EvyXV`myb)Rc*!_Y?vTG&R#bX6rl4LFBd#mi! z@4)e2YV`R+5e^N)Wqicq3Za$4m4n3DW_Bt}#L40wvdC@}qAz95poeIRZJoF6$b|8> zP%xAvW97B>e2Bnglec4r-Fg}kyg}Kv&$HWvUF?oG+-yAry{75xoj79?PvuQ}(JqPsLyj-e8x1j~JeP4^~fo1LUh9>w^ym z*t%+|OpaxxA}80bwATc~xMyxHvfZih`>4$&rxT8BOz|vCIttMenL9VxfDnSN6<>px z5I>g@BkW_5G$q@N8VBAQOy3zg4*f*oUj0D@tjyY-0gj#=uKRyI7GJL*uUVXiIxgrb zae)%E&xFG$SUM9MP*(qaPY}7@eA17jn&0#De>Vug8`Q&!1o5B~rj?S~RIUmk(bKatwNvtoD~T+{L9Yi)NGGsio1JPzpbos$%C{d0xX7#wD(pgGO5^0j){FlCbKl4` z+2O~8X(DZ$8F}%MQy=Sbwr7@$_YCa_=LvfY4q&}kq8CD|#B=dJs2gLi33`1G&n-V% zU1=Ae0|)UUeZ)Hh*gKDt(!K&DWiPvI|02s2MAT!mCRufemm9}J`imjp(B=(xIHaNL z4?b0H_nqsREg`aBOxu#%va!tmC=P{SyU7lT)@STq`;SOOInuCA zs-dTy?+~d6Vjm=3kcbaGY+t(o(k013KiAF|R+~i67cs5@)bD#N`h5Kaylr{Niv4hl zh1mX^v+085dtj?og%C>Yirt%40qSgw#paiSWZN}1I%a2F=-J8unw}xoSYL4vB3ZRe z_MIy)a)_I@(%qBoxd3&FxV+Bl0^D7(dkmqyE+#S!lB~y+iD-TeR0f434#;MR5!#)D zE?bnsJQ|qon1YW_n{#5;TKjLv>%PHV?8-|>ipZU1bL46P|Cym}%8%q1VtB@ASf^S4 z5Idg#%3K>PK(5h3W{izWB`%I!e3Xmf?aCKYZ_4MTIL(#o*lF4pal8Po=j&|ArC@$9 znu78;lq||*L3C713WAy5RAa{kLz;P{$-b;DxChIGB0*Z}$WxnbexRkr+bB9D<2kjx zC#*rov@1XI_#M6+PWaL<+rL3KW72`(6VO*7UKP!Zv_!r*OucA;v8nr5$n(axL5Y1X zjKqGOVc)m{7%luJyR+n$vS{I#FFvB`_+XJ}IwD_B0tZ|P(2yT!vqgH=J!RSKqN~6z ziY&6!<6}%5L}7JKK7B%XQYVjC6x?ppLpW<6CYDV?-pBFWES3YGh=CBd-FAchN3>7x znRs*43g$Ogd+_iq-e-xXTAaL9Vln4^LGP@{>RR;FE-7Z{bzNXJ*zjxMQ2Ucc^cFW) z@N#61%@^OhwUBl80YT)ryTecIN7q7or3=$`F0$Rio@sFj+GY9ng*e1+shDm}*ExL) zZXy3J+B21BsecE^A*`C>((66uk#Y6*Yth7HQMZwo7mo@0B;1$RJ&}p0r^KUAdg>d$ zNFbaF>Pf6@Ppq+>AxgB4y6z`Lp+O@1H>+gwr2HwHC??&*=p8V?mI`@IusXE-?QM2u zdpOi2xoN!pxQ%|=Dn!#!zW-(SyFv`4;czX}-Vr!Cn%`J%=iUIOl%cUW%d1{OsvENd zibC-K`E=29wpP%G088<_0^2A?7*l#BUT00>P}YtW9PUiN5#VJpD2lC7IP6^`{X3p1 zRwuQK&V@7wKM|ecY;Iq>|-Xq zKF`j-35Y0#ym-GL&=?c7n?x(xRITsrs{k>I7GwN<<7Prbg}Ak}Tw(9!eXDGZ5P2Rh zU1INw_S#&-8Sllnc*HC;&I;^mAp)aCOpWyr^ju}>+1?OF_?Gg9$hOo%`>QzKcj$<6 zIP}X~+mc`(${YTB3m&ZipxZM4M~gUt+5{NiSPzO{r+Oxw1bBdJ+1`&{Xe=<3FUPWf~nj7x0yD z6_*Cg+quy^-0x7ko3 zvcfw|uDvXPFs<8at>SKI?@vzRM%yRCL~N>IUGHJ*!qiE2xHB~M z=-0|A^^YtKpP~I`VGZke}Vlj#KNM~eEV2{;#a53|FdpAsw92411EAnhgKsoucl{i<3JoHX$foN}`gx(1(OyLZ*#`D5t z+g!0w->mz)wMCB}5k4Rsz8b9Mv0j&J4~a+ihbK(2@<3zZ%+Bl%-#fAMV7n!tg62(& z;`VAl{G`!2IW9!Ybk4NvdO%V>J5h{q+4X{86ooV zYyU~5%!n6d*eN}U>O%qd7$=@yZa)^s2Vl3zvV9LRyh-mt#VJ>idKI8jkQeBqr4{y! zXwRv;o{ZrGT^u@Q#}qC}F6`BoO9D-Z=TeZ~w%vkc+PT-g_PS`QJ*cmpelMiokVeNa zb&(C9>S!TdWiu2?BJN%61<|xBO1^aGCn4nEtc7+;Z$c2ChLhvh^94yXocoSzMTg%d z`#T#V>{XEMM>chjgxDom;YbAf=(>4!X`i;3{=(g6h~up!Z$z_F5t8elli8hx;?Nq> z>avXj7q>enWxG`dud1WRvh~SzJVEG7gfI;-hviw37$i?LEzh#00xmdr6yxmM{lKiP z_`OHl)>@o;>nQ2;|KYD+8`*9hE0ebK#HXTdD->HK4vpTe$ZAAVlPIU{wyHnS%SzA1 z6GD@)H&OyqkUSN*XaF3V^zh1UHdD0A-GwtAj(EiIQ*=Uj;6OB|H`)M`VZlFiRLmLMaj|;g>(I>Rdo)CxVCuUb_ zLVQ(_Qe(M!aeGU^MPl;XXkUfEo#m+OpErm=K1*`z(DYnxe(dGB$aS5#NOEZ7Sj!Pj zQ*{|+&xrJ;Z#V1#`WyMH2gCI2SkA~oQ!Zge_tv=5uc>B!W34b zdu!2Nvw|YF>-qMHI9{eqOHh9t_xwXls{0q7XJc&iD?t>0xu4a?7Av{hc%fc;<`9n; z`Io7(GC|KBmvOl)D5r`;*|?&#AKVY^6~XLUj*SzhkVvf&7l^4~T{!6s@^)cwX00Ze6vaT?$WzpI+a&07G|#!?k%sq!zl29^IpwmrXiwj5 zcoD0>6=JwN9LJ>DUqj^9X%p%e9#y{?5gm zn$8Vi*H#P*Hwqxa@y5w^;lt2~zzEkU-)e)!aG3|6{D~|SFWN0P4;!u$a1a5TuY3bE>VAXyT=fLQ(NP0T;cvfRl`w!d{>Z{zRvE^TqRq z8H&-Ige^`bk`po8FHG*4blN44LR0XO%~KDrd%~^|^ln3EG?&D`5GR_brN)^pMA8I? zH~%8qWl6-?U;GcwL0J3#uL-AFS%<5Aq>9j2tdJzHX^Eu?lU+WuQiW(28?S)egRDqJ z=GAyOpoGDbE))orVO6+}$ZJ7Ysj^<|JwLqg%XK#uQ`{Z~UOZhxI zAeLvEm0_O>g;86NH=fOQSvnl@&51YR>r{X;lX$CSM>1DX(sPD(ZHA|eeDq9eDW71K zffK3HH#C>ev}xjy9L0+5?0)75l67K7{b*Y%5L!leTQ3T$*-=N$wy#1;Dl66PA7*+= zRJUvFo*2LzqL{nRazS$CElHYX4RwI#Ha-FZxLGfG|1O}aRBx1*n@h5Q{z2M1hFxK~ zo+MKJ$Hb?K&6A$6jiM0=@4|WXwvaa!7s1$OurspZC`-BQonluDdW7Kn?I+@R4|CkY zA;qu6KvwJ|q*;alMRmHNgZ(!ZvrF_K>dLrh)@EeVx{2e}?ucEjp@Lq4J;CmtHcmDy zM8g{MEM8kI5v0VuQ#b@@NM(_Mm6vw~y$hr}WXD3>cpg840mBGHx-3R&#~f&i-aVej z+ZYWFkj@#v5@dFO+Ewsl+a&0@a_rY&p9_awc=U_qr=e%~TkYmtLTKNei?jjG7NQUi zS{YL|KEPtx+|IX&g5H$8;m#awd&Kb8mXarLP!EWM#CP0rhkcyqnMJ;w8tjt*k@!R% zI=cCU(kLA}bhy&$1i}v@+kRBw=w3E#`F5KSU($hn>}8RmvDPw&zL~Ri)sryH;hAA1Ars3T_J}a$pfNl`q~{Z@6js;=;;3ON zrfFx<|1lu)W)_uTOYSits*=tpTY%zN0 z14ScZansg3doh4G)(-6R0Mcc)@TQWsO!G_FXm=O(xr>{)^fZ1kzZc8PN#v9il0#*R zvq$O)8|)9__&`Zd!x6Jp4~OB~_+pkL`Fb}!%9_RXDnt*a{wnO>m2j1qYXe5KWrfio z>iwJ;)auy0YO53NvgmOqmYUN?!r>RG=|Fb$@Cb@)ut~O6d@pL(%-yB-XJMDHORntF z-ZBaX*(R4Pv$$xF%%_p-A7K_#xX4X?NuNkX8Ys={#*jfeb_f0w=nPK2XN>m5&8657 z-4gtFcP&+{NoU%J;v)pV+AeHM{~MqV_egbiX%r zdG^qz3CHF)+J)lNCb5w*yF#=#_n`ikD@6U!rzjB_$$)aGz7FHWK!)b>snP4VF+2uu zF5O^D#iv@4UUfB2EjNCqKO_9@($FiZcMX3mf}R$Pve|J z4RfWVm9V*rfaq8Yw%SzW$FQTo@#fp&5Sg1OdD#7Fxx*vN%38`+TZN!%G(m%z1=n0L zy!_^8pKM+*)-I~>6bOu=Yx`KxBVf_Lhz-?e;&``3*SL(F=K=M8sYJeQj>VI~ZIXp^K3@cLqVQY>%y!CbP=6^yhXs{Eja zwJnd#p>4f_yd86xHA<}aL>NAgNFltaB+@9HAuck>V04veGEdeu$m75YL9Zdx2n!y2 zAw*!yAcG2j4Up>LqQlOZM0^_kzHWAnNKdLuMkeP@;&|P%^=@G9|HRO%!i{!N9C~y2 zc;1BlM*vaW3y@_`c1TG>82OV05xTWvsy!KkTFT2?%EsBj0GSlWVR_(!Da6!II#SWO zRES)6t;S8qM#~VS#JHTZGzgGqK}N2T&}hsh@tMpwI_@E9}0%<V46)1iG0V-5wP3 zf)Z(XNk1a&zbw3x$B;E{KY9vYwD#aV+A=f%TgoQe9b%FV+SKSG-6_QAeRYeC6se&= z_x?^ajfkjOO-mgTbRX_&N<(^V@zQf;LVi}1Tv`*HBtH>yN;X#7-$G7%=Wsw&YCoAt zd~YxYC;j_~7+#Vdj{Z)N?ut&~vEIJLYQ$l<@`@&AF9>+83v$>2{YeX6<;w0v}*=_vjcZ~fRdK2Xx@m+TI8+q16*lS+MzJ8z}RXJY1 z&|V0%{HOnXPFu1_BPm~)WZ1@^fGgw!f|?8ExR=*0>!P=xdI4(hD@ct~jW+mPX!?mc zU@N8}+^YU0G0Dn@o6QpRrrNcb>&~sVO`L1mT^i{cPiwAN5*^HOL4o)7nb9>n7wlM! z#$2RghT#~;`q(*7d)9|%)g7C?88azBhS}}c+UP)z%(iNgB;r^TEs!kR6B1!WDuE54 zcLV(Ys5%e$tgGw)*IBD}_te%}TSr^8iVXKGLqGwcf}pjwCL|;e8BfeYs3Qq`$ri#6 z1PGB4LN==!(Ap}37Da3Ii5P~Ub+7ts|L@QDT=MiUzVbSsbJw}!d!IYbJtxsTP9y2< z5SuZ{G*@bQi%HE;;`dV~Bb)6pF9SuFxu~2Xrhir(GDMF?v9|(`;ew0K`F7b-0G|_5-8I(k5~4u2_+`Tc zJ#|4kzel=_dN@QO0e(lVL=c%S?0liUDcYCHIqCRaW-uK}#?E9@fB!OK(2hNFY>mhh zQ;+2TQVeoBvX-~5%b}^AzR9(vx@g}f4sFfPACD?i>`7rHMT`NxdOM5~RLI2{yWgi2 zfm-%qIbz>f;rS68vr>9e-A5csJ3elT)1DXO!oCVYZ6%BB=M^69NLhvT73~sdaPo|n zj~L!|gYkhN6oybragmh>Q`akQu_+=c?24{ktWGqN;s&a1>io88_0}Q|lIF)~q31%> z82)cQ5AYZrxXwg)<_o-C#h0=IK#2gRYJuHCM+OIAS#fiRV1HwhDV z8U9jU6YW*b<1*%bL9g0qoIc|aSqx1n4V;x;PzmsiQc>fvw}rjf{VkaN-f3TnLw1QR z6BD(YZ1`$Vy=w+P8*c~@hU1IF^OoDkAuKhai3J!%7rtnfd z!)gQET^{*hd{5BJjGR~7*VnCK*lek2FAHpjUbJt3+Qbj9w(d2ZNGp9nQM+qQM<`L z64Ub@g3$q%NS}uEow{6RUDp%pV`D%JPui1(l{XuPxuR*cf^5`{ZGJN5tysdh4}=j) z=F?u>WQ#U9-uqg*%K z_ltvI5nRK*BRfP%G(DbZYLNcy-^4lb-7;5CuK_clVh`AtBI%|>k5$=kH$&47I0nwO zVgUjYHA`&52^yWu*Fq$Y((CQ802yVEvXg5eeYZ17vl5HGX<`vN>^7?si3A0lKFccS zPGPSGr#PvcQM{DoWX2=oQ+>7&hf#A!N)9UWMMB>GY0-7*I5ChG4_8?>RlsYwcNxpG zN9w>npTu(0n53}`_7Cw$fQlkM1nL3E*1F94i=-FaEQbvibkP!38+E%kA<4;?bKM-9 zBMyx-f=6KYG&p^*tj?y0CaXQ`Ww^>77Ytd|HOX4{iV%U3H;5(NYQZqrnoAN*tI-XT zRiky{dv&t3a<5O~T#gRy>PEu7G4hzI3T&Ymp5UHzE=~tFfhjrBu*5yY+BR{haF5^F zvm%i`jW-j26^+FD()Vt*?#&*AKUSM0nnGCdvIV3+9~UEhdwO5yCZpaF*DJ{}k$ks( zyTvmZ!BD$Mv=4DzdR@NNAxlZ=w^j=xNp|aYdn*L-fMF9)7`kukNH`W{)Opgn?0jK@{qSkerG4=V zyHqr}Bx;t+H$^`|`mI0xEs|<57I4K7-VdGj49swYBk%86LtzPoWfi+tA>NoWDk77U zak0EAdsj1h9}q^whcElng*HvVhjL^;hgBT4T_Ua?OjSgOoF_~HH;Ao9=4fUkiZwHA zXUGdbV-5Cx0J$kSnU?hz;(EK>fkFJyf;8D%3#M76Xdl9ys>t|r>9ZdBKz(%5wnPvK z@o|`L+XKjw!NuW!^^lyDQE8ShKr3Mmwos&JnW&zhsBX4JNe&AH{k%RSjt{(4DKOjL z#9)NSGDe{g_2Az?F7ObHo6ZwdN6N4Dwh$I=#p>TQ*;Mpb&E1{{+tr+XN7Zm)<)t#*3lP=V2gFR#EPlbQ>(BG11VHXk2Qs0AE?m zkNa|4Cm3p6Ss_QgPl`j{FRf#&-MI(gBH|lxt=*NxticNH-XzY5aR|^VOyXi)bMFyQ z$#s?Tr1z#EeRq7#Qu|bND7Ik{&wuRn7eeH!9SL5SoEsp=H&Z#wze7$RPglUO33mR$5{|7frtpO|hFr zB6j)+W{KtiUY+>b6ZWP61!H@aQ=X3l;sGfKl;`auO!bQ%DcZ;YSw(2sFe*Tu*YFFv zR?utA!(ra<*{%~2a8$xm(jhMr!GF6jMt}|mmy5c%zE^xod36#QZ?+skM8(Hhx{Vc} zI!7uN*r%es0DL}b@%nj)nkx0gufOE!UR%h_^KSuW13hx@I3g|iyEFMMm1X@HPP|iW zo|p_SdCiE*-zEk{=5j{UX+N}&4xSXp>{?-oS{gZOxKj{`5_LOV!^|co6AYEx-9}-5 zxfG3T9d2M?$Sz4voa43s6Gm+E6#s&?sCj`9{3gTFR$hK~;!tt4LPqW_A$c>#{Bd*7bJcZR-Z*e@MJdUUCPr_lm zO_(o8t7r0XHTN}W1ma|$YqkJ#;+fgDN5E^buMD01^ZwC+IRxEew>yXnuN83n28dIzN~tC zPb?a-P~NZlv^m(bg{Ad+0S5f=fwo&DqvAoPkRfkD(^Xht^egR|Vo)Z_Cw|VYQjiQW zJ;XUd2y34|^Jv9%drmNn#tDqZgfK<)8)E+o$#C3+yTfEWOdeF*Hx3ey^4oVVz}gJ7 zkIFk(Ev^ytfr9bP$UTfW-ith&{r=k^ioBICoL!CKaq>j`gnnjb*%jh=hQqkU>KS07 za)EAr2L;G|NaLQZwoEYkUExQC!vvnk^1D)FM^B`xUXysD#37BaPmjUAQkd6TkDKl5 z?O*~?=gP7x1k_^oahsd@+89$AqQPdzf?@z|R z{_;!g%|l)S$$PM4KJPG)F~E8PTOGU+EVB#5qD$FPZPt0OEfWKw-l?xSURGnr1U2=I zu?vns(i7dfb+PWENk+zeBRsnuA%@1|#`5U0ZN6;~$E%K3MWMBXI7oc3bnC&=b=Z^v zlJh`aX8c#cbH+MYzAb(aj4=5R*lQw5rn4F@%f9t5PnIHeoG}7kNU_`iq}yz9=ripN zQ482^ivmMeVq7pc+X^wrFIbIQ6(F6Q&dG6ohz=Z_FU8n{V$kaNkD<>#5kjusMfRQd zJs)D~f!2e9)SWXqdrP#0z(q{--sxj8sB6Cb>Yn_81AKqv+gSo+c*T`gA~I}ES~P!Y zO~7w?iOp1qx6VL50((!GDo}LW)Rp#ML2re~)FtNgKJ4JIu+k1{kT{5!h4Su}WN3z7 zz2w>+asKDWq@GLr+dqOA3gx8Ha)f;ZyE(CJD-sLg``&2}i}Y`0NDi07Rxbtx9xQ9H z|BCjk@bAZ~ppU^c$7r3G{^%12T$P{@EqQjVw4bTa~UIg}z(6@;g-VC{| zEw(usKMzBowq2M~IFp-e`vg30c6z2A2w@m8iQ8G9dc>Gq$$V3Qh-^G`EZA4@MB6Tq zX~JRRAY!6sgFPD}W2tWfmiqn?BI|mwY#Jsai}A&U<$2NfW(KEEwbsN>6-q7 z>>ZJeV0;Ag*yh?VL1E3!#r%tNqbs>)%M)L%wHUS3rv%7pRO<@+*}tK^8w$tpsg4x( z7I~+p zzIcq~a&d_g7E7@|CsU?Bh%!r|Jui-D&GDbLCn0Bb_N)IR)W-@cDNLs0#8BI8*kY4J zhtV!`PLuhGcv(EE-zujlqyFO{Erui2WddH%V9wiq_FspG7OXp3Im@~QD5o^2Ff0-D z#^M!tr7aV7Ir7uFQMXDl=$D@T?NBlcrY{HcB*=$>adWu0FXVrHtWOk#J}@E+C#2^peu@(?=EBk52K# z+Ow_Km7EVJ7}aDkrwOcWq5W?Qc5ZKj|LIST(1HY=oNFx-{P9Zi8XhSY^J zK)oZRCU^+vb61{9YL{eWd<30%jTlseWBf_>TY(b`2gWRiH+IHp@O-R{;1@D-iHl1c zamfeoceB~>SKFgvkWU;pAT~q52k5TJwp)l{k-uw!{WYX>Y1uiyD25N$e5?!P7u$z{ zqb;}IuP?VJPbUs7@XF&9Rb{&cDO=B*`MsH7r+(cTWQu+FuJu+WaI$1}p7F@tBEe1N}2YKqJ2_HwyT- zaaXR+s?)?ElY`?HU_%Dlg-tXqvu}LY-v*(hW zA?&@Fsb5ZVbhVH9;`6^pDh(7i_`elRhKZVuiOm!3Zb2l?Y}>ll{wUz<%=D2X<-3O; zoe#yN6ObHDm6N{@@|yO>6qS81iBWSZvWtbu9<`2fds?hVfa*}*rbW;TxD73~80OD7 z6;d3pQ)Ixl&dSB1Wiv)vLL}c1N8~Jbd{cMf58!!&vNI-!TrpgzTy6twov`P)cZIep z&haGd2KYYu7*UW@pB1WFD+OtW$WcU-fM>^(BwiAPxK!dew>;@T^rXP7(do8F&|_z1 zv$_9Vn5-fjWbvg95v>8Ut|n2nz_JCsB?o8gSg}qF7A5T)S00+&c=(A;Z#PXl zvVFU~A-?LmW@WSw)o#?z`mtvw$0$5bJX_EuW;CJ36@sIq_8qn;KwA3F9s{j5z!C19 z=7=CdzP-KNPCpmgrNS03ztu%zAa&&VX}vuwNL{(AjoZfp(lyeZ+p0uUV|48nnHsw8 zCmv5aQ1(Co`8@QtV*);Od-A%IM_??J&pr<(rRb!cozIU2y@w(_rV_hK9LkLsFn9J# z2EVi}^5%MHl2gn}1Qvx~i9@BXySj_r|5IrH{n*pO%cbovjJI{-Q6f#!2PKGq3z9#p zDjc4k4+sbF40>JBag3rzpubQE(PWj{`U{W+*Bn)QXW1VGDe=hEGHmKV>jVKe$Q!pr zXRWom@c5_rC%(809?E~~XE3O##v178866xq>0sF^>nRw1h_^+zXjQGMlodWr z;h)P_h^R=FWo)3?bGi+uOb0HnGH6ePj+|6?; zB1l&I*Y00$g%^8PbTMio_J4w2Q68-EBU$nb7&Jovbek)ZbcIrzJ|N(=M3IkIwyapr z{v|w@gXh=F?03Rm>%A4M`|zZ=Z=wBHTyJRI+i;#yL1LOHlewR^>&eh8HrPqCK- zy(~JMi#Xcu+D;w0=!k;5nTQiwlZX0tvtoEA-Mjq41`E=dDOhHFBpH-akY$f2^TNl1 zbaSu2gveC&KpiHi<_q{n$ZhxsOaAYPPmaB@YkR?^&|WwXyRz-q!W7OYAr0{Blfm3S z6xmI})L6>e`flu&5JB4aamb|m2-hOw5 z=lL2Q-DNb|Hh1ypXd2qnqP_f)yJy=`LAroXD$R}yxG?=wv|0Kpgy!mFxTGtg-XV#W zvP8=UnG5PICSOi(O!SIGs3WEGY^P{%1C|E4_OUR7jk5_ypnz?1~p=#p|$@d-GNAyG1~4H=~5KYaJEU9m^vcl zFm<0G0=G@6v+rN&K@5W1>4k2Bh;V4(rm)N|6^9z24mHch2f%S^sZA2-=04?796#ILwI)`1DcUr~a>t>-EjV zuQ|KV?_3K9Au!%&T}68yI+(Vc5EkPTeNS{4db-}-$ad4dejQAgAyM;$oh?XuR8rXb zupAX8?_*m^Fx3N1>!9#aVzWa;x*Da&bbC|S#Y%rr>wP=#dN>TB;dj^#A}It_qeT67 zdqB|F1JN~8GbUKvKNk=@r&b zG@*&66_Le)W%kXR;83_W&+Hl(33`?T<3*S_XOs4sc%Ee>&`SHs&7LSS7c1w3|ZLqP#10yl_5&*HQ)n*)zr`^7DuoYqOyF0w^}zO*>Lu=3q`kFtFcVjEa>Swb-n5m>($ePk>yy;IvW?@!IBvVOVA4zB=;`eI@?sy{-N~Y#ZnxN zxaUJ`_Ezeh3wsg6znh_P>37~G2GvH*pEKkKg^+}zhH;kmP=JZbO)}2-uppywbcQVw zsiDE+Q#o*|u{9y?h*+B5e7q@0L2@D+P5Flc10~iGksj`sAq&bHiPvP5M0WHh4OKZ< zGQs{Wy5pA=ohB5c&c4+PB?Z%0Gl~d57l*Ng8d3vxg#ZXoNZr0^_LLyGvBTP zgIqhgj|a-E)#-sgykwiDi6-KZqP#r(LFd~eA+W7^l;_hAwpu~ za{I>ZMDPMf#tNj^zf2rzCpTxZ&Dy&^7;(p@XOFaz0t|{iL#$e4m}Gg3CR;!#k$fgD zEk&=5$+sU4@aTKi+D}9xRi{fXvx`Kl@3!%ySz=d&SVMZ*jUp+G1HN3lH$>Be2L{pfPT zoix}PoM~-lwr;jxia|HTc2C53qzAK5&$d&B!tkk%%>yn7Q*5DFNZ7h9o4d|0g?s|xthx|u zmHBqlT^+gL*`1NRNDNBh!9A9+?c!lTy7$gLHe00EDIet=YY--L&c7&XcQSM|_saIN za4^kP)0C*$=`X`|{#LWYURS6OzeMc>R3q%X;czIIJI)kz|E>|F;_X;9z9|GTT=ZUl zZh*L8lB(4wf{39VWr2O;ZV;{FjvA=X55@34JGQ0CRtS=BAxnr}_dwHQXtTuZhTj2P zB6+6Z+P1eiYP<%z!tN5H_UO#T;+*u)PR2*~C6&|U0rz@#X>odIWB~dsZIpm_)euhB z3hZ$)ysHMpI05EYX}MSkcCd7jo$-5UWX{LTb^2)QBp9}R8?hXI!oJy!;zu7xRdgEB zZ1;tPG|Uq;%_KoDH!GgWF-z%v&ftDu_Y7-nju_rL1K93^Wm-I?dCsrB+W`G8cD5F#bN0Dx~nyc^r4d@MX=q%UTguo z;AaEO#QQo4|DFszRy*0wyr0OFayv)kH;WEUv1U_rz{cNgHb8u@DFrY=SBZg~yXG?4 z?+iH8DEnXNS)2i$r0sf;D`?W20OptoqdbfQy6%tWex1d=O z=yz(@+i1}Yl^FLESY!zyhwwDDMzpsmC+T<#vD(0CE2#)3AwEmS37^L=M|i4^cY4-U zoKQhFdWCjFh~f{#_-l8GgK()uSvF3zH~0{})9jH34r_;<`=&A@)>@W$o+UlY5X=!n z8L;*EM6_psSD^WJPC8iMiadg?7w}4Pub!tZatOvHRkMvA*%8dHq6ym^&x=D7VoCa- zND4`?&)3bYy&@Ln9A7xuejI~Fs=XKw5vk(WZn6!5J~nr=ojVFX z9b1UjZn|AA5B5JcDgZia2E3waZl&6CGM~4lNo_;t?qb zTbE4DD2nHCJSGmA^UNy64hw`Ib}kZmg^jl@j-QT*uOhDLVn>r%$jCCEfn|AUKKISH zh^D#&;`X3O?|M13O0i#M!|*R4pUG{wVALENtvJwyu{?WTG|homdzAYx%IQdSZ`b?m zRzbvNbf#ODfY-KXJPi+f!X1lOG-kP-Se}f-soXeQE9_FGV$dtswu^xn7uKuqb??`OT{cYTO^Y0Y2MTF!gTz9oD&%QB;BgcvU`&ZH9cl9#UQijV0fE7B8W6c zW-qXXf$lx5x9tve%Ifu9paUVWmm8KRJt$Fc8~_9Ov5EVrews3EPGX zcpLS^|Ic#)w$ChUpSr>h2>N#(9>7Vt`|O80(iHDnY~K|0?cS~$J%LH(X6J%roE#3i zZU~SIsCtZU+#*QZWnq(aiD<7x$_Nfks)ZRX2`$Z{v*_0XhdSVpFO{Rp_r!6jd*9K+ zzWJ~xAwr^YoPI~{7l%x-AB3e-E{Bsb_Rfo5CN%yJN7hM7?R{~=fm`JUtn)}c@{C7d z`nQjgP3{Q0Qw&7cN@|8^^`qv%I-4&@!=nF}ZpQ_@T6{e-?bJs*ur8CWi!hNpaVe^OLY>o|-B1iKTV4n|G_ToyQ$jq%Xj?W{7J?24`sWszpqXhm zmBR5#^uXOKuVcg@x183N0=;(_9xx4<%TCAc6+W@NVUm$%Pm_2^lZ{ob)@1xqn54DO zgpnZH>ut7E%Yf8Po@wnY0hhcumNLSAn8aLF#;sQpU)<@!&Q>W*Ifv@D+N+|ypS3D_ zX&lh&QJh}b`h>kF260nTQq0D81g1uo`#OoiM>sOWwg+<0QagJBTx2`2mJ6VP0;K2b zv4=%_=Gie>5O_KS9-2AJ4v1#%z&JIIZU#>TX^2*DOjK9t#oqnmkkQc<8!RQz?UO2O zX-CY;$RSsaT|3FsYOLaaY70V$gG7-Uc!~f&714Rvw8*q;m9G3Zh_9hGk)hFS&xJIY zlq$4O1*mAoD7$E~=fzW-YP(JVp>pG+Y?x>&fQB-kV~&tdseN>H$VVNqh@4o{X!#2E zrjUQ}Oe+-w5exO6t6hK&O&h~oH7+MknF99j2A_9&H^(vG17dp5CF!8M;%6u$O@q2fI(fKyqv5 zin~lO92GTkW}`!kTALxRm%^LZ)(v^KK5!EH<~e6vV4K82*!EeAZI5V|LADa?v}s_P z2HTNSupYU|9uPxgK^oO0ueF2XxLEy|q%)=iY5A3-w2P6#?V3#xcA4`yr8s3)M{uON-aCm0>M&xEFj@U;)egH`H1)gW;Ty07F+GIz zU{|_Pv}f9bt->>c(a`1+zD#BXb>+XxJ`&$Y8wx3`1->^M1~T{TW5Y$#x@{#B+e+lx z!+tLgLveQnM?HTOK#t?{sw_pcXA_yovn?Se`>j>>tbkXz>vh+5vA+ttY$+o~*nb4o zi&gc}YPH5rnFGf^kt`HNWF06&EaVz8z+Mok_$}p$mioljDfWgqUOYBNS!#YJMi_s= zaUw^R^XGa(K7Ty{;2>m=IMe~BPu67~G*TrR`5~KX_X)bt2Uem0nIh~mp?VX2w)r7N zWTd&w76wT6dHJKQMG%Sm-e$W+(oIF9(sRbxOUZa6a9n)T{EmX?f)tAjd+Q8MDY7R8 zz1&`C$?P9#J^-nF z*en5W9gebfH(_mJA#B&~UF@)EuT3OCbSrjDOuhv_6~ZFkvF7o|r&ikei{QJoqw}SM zmMI3(Vr5zvy>kS~SdZoY8{+B?Eb`a)y~PC5+Z>r+B$8zCaYtrd15B(dMgPjK7W5{I zPIH^=x)4FuM7duo7F1)`RoI*mMEmwFVNVFWf%?U&1l;62#&x-NE0 zfT)!v)|FT+Kv{R&v(q{)?}%KGnP>M2hObiFets3$LS>4p(W(oq$pZSe*EENhPiw{T zE=)Acj^0#Fwa>+LDKPMAg)2PO!O|z>Y`Sc!Jt7X{r?o!DuhWx4)VL_6*q#>6h|yy* z>AcXf?};S|^bYV}0%Uz`ZsqZ%_z&kOiMke^jF$N0rN zES`@Ztzhuk_lY+QldB=>KBWKpgh zLRyAcbh~cV3+x#o&p#EtmV<%_kn}?Pc8I_kOBG}NrnN+%g0D{EXmhXtiSdV;*to-n z3VMA9vqH6EVK1B?@kp1uB&3ME+11(OCo<6K+9G>jFpT4-1@ifaF7@YP>T_Gq<=^B3 zRd!(&jL^T8OLQxnBaVO0TxxK)RT`r30&8173M!elQ5kBsq27@ zj@#qbFOb9UvfqmI%Aj|XZ6k%r;NZA%RuZE1?Pn83Moq90^4^onjnp;C2!rwMYkh?E>w!7c4C^OE?NZsa{7y7N z9jH3cXvHDmc->^1F50_iFinD=Qf9-)#Pr@6&4PjD(Gqd!cE0V%o(C^ntHkmO zg?tu>ej6sp_=UZ@9xfsz)=joy8=yVYwka#xrqtL!#36^)iV@8JZ9*pwEqIN`>ty>> zJerS2>VYE9jT?dL@M}BUMA06UMcZ7w2}oY7=7$So9h90XI{8hp(s(NvMoz;p%X$PuMMVe?osW}1pv1_)jg5-6mX_NH`^dL@a zvqbyH!Y#$f7z@&e#qy%i_~BS4M9;=&!~xM^$VXR9XKeP!v9{(_Z7t3Ab79KK&Wzi| zqFqMnoXYn6+7NnpJeCfVp}`V$hB(v!opG$X+H-QFQ{)$YCn511V>x;9rj8x z7@H(9dqbFvF-&haZ}l=GV+a-2OHdVFP-pjuj@oy7qMBjBR0J3$uTqb~^ zvIvjD)jtF&H*y)wW7Y+B%TpmjZMBRq)C-a=zqQzDZv}XX!)~?TClOkySM2hL7`(d1 z4hYbuZIj2_8M~of>>kkD4=^eKE|0`zrG~(Kvjj_v~hDElVkgw!~e2<_@z$N}#dq>!diD%k<&v+7h0{bkAYtNKy%ssMTkLH6pm?au=D>y48#EQv7^c%_c9NQ$uJy=dK1%dnk^8+ zOO??`+YusYI}jaIy(|Vz8%sz3J|RG>9BNu;pCnVJ=`H!^!ak#)IHR}pXW{vP(_wsp zpo^5KS%mZ2M9n6v7t1 z1BVo*qmlD{F%-Ry-koTF7t}bKHpRa4ya#EQf?})qA8Tqa23dY(H zG030hxt(VM#e(D)d8rvI5PDpOs3QX&3quakPiiUsv0MzVz7A$s_?^73BTr^0J6n)` z?{T;NGLW|ov#ui5p4oaLb-R#%VIyMM#k=Na*$A;zo5p#Gh9;e4?F$@ajXXS8S*I6? zNw5*hJWJ z(UhZWWZM=364)ZU&nLpQ{UEAM{hN&?floBqyE#^G2S+dL+jnz%kKUsq<;t46WjbM#{jteeqLO9 zk&7)3Fy8?9gfD%CF|sB!T09@ZJOxOza$yp1dS7ify$R@;O1gKpTroTcHm{uGaX$5k zc=~$S?9LMHxykak9S~OWve^A*GI2C%j6jbi;*q$BC*=R9aJrYVQi^$`TM|G%CTe@< z5bGrr1#;Rk(OLqOjYlqg5&;so8z-kfYa?m`~XJm!|{N82+%?6x*=f;%oi?qX-}t-xxNC)9yjzk|?7oTq8lmI$dO zXY;6gjR1KfU`Fdo+ac)f#5Hb<4S3sArl;b<*Cq(jt9#a1lSt%>vHE^dv?oeS$xpSn zguRv+TV9N2!MoywfyrXN(04UEm>@*9rCP>uV}ItMSD4eDJE{OiQ%HSzNgt?kA8Uj0{eBKd&VEM$)df? zE}gsDB0>L#aR{q-wf3JRi^Dq9dkd}K5l?hbScc6{mRCFwEoUE}=8Gf_H-!vj9*6PPbbG!WUJ=7q!-wisKS# zvq4qrtnYj9^sBoqw{@Gf2PBtM8FtCP2=j7pPvIfJWx})-OPXqIU5pMe@{F*+p}t2X z+!2B_?QN{LOW!A<4?gtqSR!{8!(~H>J;o_xcX9ZDUYluUBB|2#T^{ailR{GMlJ{X& z*gg=0VvaxFWM_N;9lpsjE~c`C2}>DOWRC>8y>X2#7VQOPMf@oB5{F^$8$W)7>TQfzvCGXcBZzS{?aTTw`yDR=er8Oo)^^ zeC6ow8Np}-XqKY7&Gt%&fu%87e|a|~r(1^>+mJ6kK63c&)&!d^7C9y=*K5xmL7zAn zCvM)Q#I}V<{lA^sMdCvJze z)BmyO%ZIyIVJAjdjm*CMEo6y!+5EzOAm{=_YoC+;4@^Zd9Gq*X2r&OM#_Ez3e1m^FlKqNOMkD)=zxJCb`wUIf z7YDGrjNiUg0ew@oC;XO=7l$!UtM?gbPYby$dGgwHK$vFY2cwpOyV(@?kcxXvUd*xc}RsmgbzJzs{t^&K9nDUHlLs=*BRCygF zNAFvQg-n=fwffp7h)5_mS$halyP);|fgp9kA4{Q~eX4`(*zPShOu$R#vZdMX4S|Kb zE7I)&0q?(bHjobnsQYS;wWbJCo`y_iGUhuq<@TnaW}$(0B!sZH&}!xbK`)Pi9=DrL z^Gp&oTkI~;6c?G@U2jtb$z(`>TOra@_18^O_tQOQWPYQ>z7Rx`4Bnrf{dH&`Pcpj6 zw{*TZbV=gG%_089xk9un#n)<~GKU5EjY8ycyk@)ICEAM{p3cL!G-1zEmS>pVvqEg$ za!e8Jvv^)Q&UafkTV*oA?buI9A7eF14yF-w#rL#0UPundd-j|#wQt|BM*d-LIs+J% zgDawgD0hbWsQ7$D^lYSLhvwEyD-{#rk4>CoJ41xFaeVZ1qxH-ZJM9~ud7`G8+Y#$0 z=w)+9R%FG(jE`MUphH~U5q>qWa<_>@^~dxQ}sTepgK`Pm-x zaMfdWrDz)cg^8TImqxm|J;YMkPfQA?xb6~%vI|=`@dH(^pg{eEF2BQB^3r0l>JR?pSeEC#~fHKd2VDB3GCM2DlA8o&N6cnGW~n7KAk zh}gSk#O;33-fwNo7Pc*GMCsKE#bk)1u<(dT#2d|}G>V8;ChWgS(S4!T?wf6LUZDisG4tU1ij=O zO)Im$3#Jiz-eXUSBw4g1e>Q+T*^NjDgptEc!V0>^hJ4Rsql|_X0|DB)y{gnM`#yB| zJ?9CZHd3_C-QxOJoTy!bWpbM;j!S~!up(O^jG$EP*t{BBE9mu%CDz-4WU9<(%k%Bu!r}d;oIatt$o?8HbNEGW zuy6m6P+A;a!-dvez#Ey-wt*49$(|8U@=0ZQhn1WI^hO+<$rG>wnh3}7v=G$$d(Er7qhwa9O`}oq_1Ur z9EX}|_IEKf6gQ*X@?Qb(2@H6QwBqx?e9Y}jst&j8JhFD;(Ur69RWZqth8xG+_=leY z>B-|$>zxi?Oi^xDq2()(xJRaJLZil333^Rb)@{j3R%cwSmIT?|4pBzE2=y3lf zjX1=Y9w?iTF^b@Y@0$H|jr}R4=aW3XsK!pZz;owptPI~HBLqD=&arqEWecc<3hr`eF_WWHd-(_i)g%JPhgI?o+;mlUYKE& zI3!F|H%X`dX+at`rl)Hkg)l16T2W}9g)lUEn$0dEq6@=$`3~zUOhO*(EV17Rg#Knr z!VheX4GIb3siV^6KJ8Iq#?`A6WJ`i`$;o1PgYcVxXK{N@9C~jgZg1muM2Hev*Yf4A zxfl?}1Im5d1YH^wc;a^TFFY=$IMc1K0PT9XZiV#|O}a#Ng{FjJK}ulViL%^NAz~5R z(=P&jeEu@)^-E$gz=jRAF(MHnGTJ^xK$*F-sp*1b#>EJC-V20$j?*t5M`}24w}G zAXYfWo(rj%%2x1H^MD|&kl0vf{}*U%vsm9w@YQh@r~%FvpbD4`;V^8CkT8Wi(MVr zw2j;7+e_OdCN)L_n-TwFh!U;0H%E3{CfOk|eQ+_?&$f4kspO!M_MS+ZB%YJsx>}|N zCR_%CCObT@zJ0?iTP8^TisINpZVO;1LGp^_@S}TPR}UjEIbyt0kfQf&lovJ@$D8dpVyOp9C)w?y zITfbJ=;1VNw4HO=qGTpT>9To@q-)`E_KC{w3NT!f(Pp)0oop2wXdJVR$xoz2ayFpAk03*3$5Rka89g&=0|HFRyAdoBEENbqH=|$3c5HbqTw~s9!^H+!Bn?ZkoJk^7238y zV+{o(VDQ7|)+qZc>|oEisw3}y%y)MRdfmHrlVil(Bx7(qiYLC0@t?xf=+L&Ahw7Hu<<|fib$tieO(H{G>!#Qpf>b3P z+Zp$Z_Dp16(@KS1)=_a8C|I40fT^%-+bZmN-N~`R%Yx)}Y|A#g@7j*MF!aQBRoL^& zYHi@d*e{09xshcZX0mgxgB9i+joWE>>9YI9bfLOjcYSwj5vI(j_sTHv3S2Gco-x!N0N6NDKohs$@gZ)|wGd6vB)j<-r%$t-(QklZ+;EVP5kFuYLPdm%BO zBI8Uy+2`MwOgwqMT_}jehfp~jCYt7nOl7fIuVBk=4~eNCz}78jhPMg1uuPlF?86X> zFFwvkuehlrlxw7X>my9&*L3-X8ArI;wZ$P6;SiDx5>A?LY`Hx zSTX9cZ;C;EWfJuJJ)xO=qO%T8l!6&u`$T*nE(y-|hV^n7-NBXF_yB3_MBO5rAc!Q~ z0v6cf5Y|@8V}}ykD9HGTrUbMFbaZc9#H8|f_P19=BFsIUt-cZ1{Rf)$hV4QZ@XWc; zE);_@ut^oSZUQ7KQ6SAZvSjlh>(}QyNoSEg`j?Y^JO4sRWkJuT6lGUk%qxcdtT6I72eQu zyOv^|`g9bRg*%K?L6?T#yY-eWOa-$MWqhF9O3LhoknF$)dAzWHCX=CWnPDgQ^<;^f zHFl0@dN%r@!8Q7-$UNUddrW+qDA7{E<2lC=^wTK;HXsC zM_})G5&KIlnRUcKjV{GL4>2MW2Fo?jkR8)J-?xHR+B9(K~|SAZCY7? z2+|WH47CADLw+)jnFYxx7XuK*w|m0FOO_vRRk@Y}UTm@4c>t1iUk_)?~L zjs?g`+qONmc71=(ht)RML6#%P2utC6w=V?f-uOr*ymbI5h_2(1?ZVy^m@L4Ko9!1z z^LkCkz37<(VNpMJMOcQl8wGtt7pBYR;DcgNUM!1)`qd#awI9l1)Gc>-TA6=9rw7-nQ{8SjJraq*$JvuO951zlGyMWTi79{y~`RX%T#Ae@} zC4?m0jJ404jbZe|tC!hBV!9+6`YisIhG^06?+Mq@e+^7VU<&&zdEgmg&x`3*m17m_ z_qz^!Pdd7Ww+Z{O9Z-~NqXYD3hqfshS{xs1o$nv_M2LzDFxYQGZcfpq71j;^i~hwS6LxBjPS zB#y4`F;*PX^%`Iwgjns97utuSz22R=UVVvu5yB3ysI%C8gi#GHDtK-t;3JqboEVyM ztllSw7+QGPX@3X>ZhfhoN#`sn;qLni_$H18s?SkYxjFDmv_6;x&`|1 z+G$oG+G9pf0(RP5alG~W~Xd(JhR$5h}5rCh%G%HL2&f2^P^eE{52^RNGXvwwy6S2JZ_UM6zyBU z-qB0*=mFhcaY-F1rf;)fj{qXpb(h%vA_-|-$;-_Ys}}UCu?In`;{6a#bKl92&}C_a zQ*LVoozqnyL>?jcb=}4AHXg)c`RWjvk9eJ37s5tyk4i?H1C&x23KClcLqBtRyKky> zOZU9qZYnu4dA;=!^s+Gf!`r(;F+#WCVqw!%D@{h>`F+6%n<@rP&W4qNIZX)Bd8UzK zj|sS>BhVB0k06;vk35#^82#LlaP-mPxs_$g&KG7_wpgJ=?0Gg~SSSMy4d2?piEp6W zmd&(`K%<6Z#iA)G*wUFTh!mLX&a&47;C*_tU6bh{d&>9BwtED8eDHziT7fWCiJnf` zn?hlqk76Ca;8=mR|A^~NG%`D-Ak)4R>tNjaAnO5))4@QN-1zRryC&HMf{34Jdcy7z z?Vptnv$20>^TqPnl}Se?N_N__;&~ZEBGaCqg*=X|;{ItAfz*xL_yW63fXq{AgW;n6 zY-R6!ZbP->-pOLDR{RrvSe@8ZY9ECx_!*dHS7#BdrmJ2qXPXZSl4JYq?e=J(6Ag{F zS~SJ-+O^PL3TYUx#VL3!3h|^E`)M+$7vo_;wr3a5h+Bo|@HuJ;U75r^W%c5P5O);& z^3)vS(gb@~QVTS^FiYndy;Z*2ri=D*l~q)1a|AtK7B%U1W^M;YnI9)D!c=Ks%7a!K z=tIkKB`4aOnduX)y(MC(Y2{UFmjDxl7PKh1ZP8zQOH9r8J?-K+WHg#LMmN&scAGd1 zA6jMaI-4EBg)sYs=FKW`TNrEnM@3;(o+t>2p$W&bIPWj&5G9&W9yCYFsBNHB{D3m^UZK36e zB;m5x!$spE=mo!2@_=%R zvN-O#TL@v8fl&)!ZS$}gE)16+?4O0bTHJBi)+D?WO9-zCO0t%X10#1W^?>oy5eyVk}o zPp!8`p$>s#_N*}FMfTfS5oFsN$r#*fEVM(yUZnyaa$f%@Pk=bOcf+#EE^&y!HR)Q` zweKY(VEJw{&Xs@p5E1+{*9(*A0-DF8cC+{-40`wd1rdduF@LPDZ%l}Q^B%p3EfYL( zN8`u1boo~9Xx1x)4$jo|`*$A(&`FV{lO6jOTB#VcKyDFs3v9EH7Zd%EZpH|nhmXx| zt;n$p9sy~3t6yo?i}twT;AWUO$b7IgS~g@0dUFXgE01n_*5z#CrIZ$ zN;sJxU9i#46YX)+;*MPNy}kmnt1+uRfwImiquqlRxGbef0WA+ z=2daLls?fSFMARUq@$Fy-J7gf&|8DO9#;_mAD)O=yN>YOG1O+dFm14BJ-do60yI?X zmK^(gG9vCW*4Y7Jy5M)i?Y=3HE>&b)ah??@7J}>4rA|{I!mLe^3Fvr{=L$g1a0pIJs zce!R8>n)CV055dy)&L8~>cr_=_LnCO6`ot z9PD1s1G~osXz(FLwm~F?o>+BEv@POLD4Qu>S=i+>Jt2pnvuscR1ND^P`dJ>rb$Y7Z z8~`Vk)mAJ(rfntj+Df9|y#?ZUGjLTB=l92si{%}YrFVDt&jzbH6V}=R(a3;y)=CbN zE}H{nR;3?$-)_f+s25wWH2X+^n0vOqFn!M!TR9g-l`h9(rR22rXM z=3~l<;}9q|D$7QTK^;c&8V7|Qq0rYAtdqCdX^(qm$MClGW6>d0&ss|Dx8jgmCf3l> z3~?{-+TC7F#?>n@wqkFJLp|G;J>Isg$vzH{Cq%lcJY@l)RGYSxInl;hiH3D{hL}{l zvusw}B0#k|b-%3fg~(|vc1lE3!}MH!8k<8vCgMFUnvQyP2j=Z~qGex*;WgMd z;l)i%@&gycaUt?#ef%zAkD=>MCYAZ(APOtenYg)GB}fg9u32w8MALJFMoT~84@*2E zr=;^=S#Ey}kSYwsvA`Tbgv^r8`!WGCY2UuthAoA530_=`y3}G}KH$6VuoWU*7@d7n zffqvH;NJF9GF`Fm!2Tml0ld*t@b*f}=VfEQeJT!(*?W-vW)&paYB2Y()k0)@Y|D777R_gqsN0w{ zkn^ROo>%k|zSO!^!l7kQAUX8-BpV^<^&Xs$Ypg}We!8-6f^J5%K(!U(dN!ENozAu2 z&sM_;za%&=XIPcl>1*Kma39G9(zOBlGc?N-^pdiR*vKCX!H3t*Ia;#ZJ{F`ZiI!^X zvbG~CzlgK!Dq+I9^K^NgXbM5UQ^)x9+8<(sT$phqzSuL^XRE?w;n(UdI}&1hlJE(UceC`!%YF#6p9-`={K$==SW_AC$M z_U*f(Ny>QU;TR{1Ys6s0@R6>wzXnj8&L^=~fJ~#e#O3y}AoaVguU-6vr(u?zWRD6U z@F?CUOcYHL+KlrQTN7Y(t2W7gu--$XJ^w`ev7onLI_AvttY-+1jm+osvXTJ#U6eQU zg@R$U;|W2V%7{g;9A7X?hA*eiv)2^KM|EnZy)NV>aD^SDWh`YHC}wLYbIKj__h$k zh05aU!SXkQ5qNm!7@H~DW!kfZD}z!%x^DvC+7A1U zptnK$^aZ-hyei4*i#JJ@2E)WbGL4;aqUlt$^l+~iz-W07d_S4)(1O*6maMmr#G$|X zjkUshNSBcp3S(`GuxeCSpQx+RWyWH0$hc5WD;paCp?f2RwJ=#85#O`PkMpqh)JCxE zOzFTtTI+6L=WMYN_}Gd`7SZ%!UVeNGF4yDsm;$|1v$(>`vPLl|8Sf{#)-2$S*AEr7 zQ=5R^Jo{@n8mhOSisKPR6y;fGL5j*|NBX-E#2hT`zG^}8#V~)NwS;(wAFHvgfj(F= z&)Py728Q0R^=T%JS1?nW2r0r|KF`)B?Ac~B1Lt7r+=FG)50-AS#p0-(BG%6P1zf!2 zYiHZ}EkOT0!fsc7T)tbc7LWK?>&v#q0mKS8k9=RiOS@gF2`To|tsR_<)@^9`*ivzP zP8xixeI-O*oWvB^1=|3$#lg~AyGXPbJVYv;(sq6@gttwpwC>wI?)BW`XNx8i43DnC zFd4unM$RyzWx!Igs1utC`8;b8^bX99$r{^}Aw02Wk&N2x6^vvWQ(*rTU?jDb&2&TA zZ6)K|N}8?n4)|US&zCV{oozQISyD-rz26>Ukrn<3i|qCQn7`-Rx&XL{iP=^GmnbJ4 zJq-KS&i}{Nd%$;HUH|{K+M%tjRnOPjYU{4Hir}_RHp&zbwYJqyk`Kuz+YAu8G8kbA zvX?+)Bm_x92#_#sgVt(A94KIGKQSyBR%_MP4*$>B`&{zr|0h1+zRq3eo_jy{eaE@y zcj@!L`2$j$rN&%AcaT?!G0P5s|M^m z3aJ0e=GcsoiFp@AD|=bc*FWx7lJ(;{yW+P{84Ips+eOI7E>A{h+3mtq#0%+|Jrn@` zClfYPKpl$1M)aW<1c-j*WHvev|E{x~z4a*Y`mRPKk#|Ma!l{g9m3j`&Gwt8z; zn6~R)O{!rV?2NX~va(`~=bMEoi_c`Yt~c4Og2-*eFdL^}=%1BL)MfUpD3n~nqU}&B zUw;gLe{CmGKDq~@;+U6*GT3)TBl&xCCR#s*y_$i1jXx3e@xd8aQKp^tTxWg`{<2(qK-cWXr0v}2%j#f zQ-B9oZj(j8Hw39SkyC2#r&6YK`T0*lWJ<}!oC$O2`~FK#q3okAjmMYFu+l$z;d)fX zL(VaRaQ=8K_c2=#;&j#f%6QutAerUxTUY5|2)Kh$%qN zXlNE#=`Ii~Cz~p4w89L5J=1K7g0xe+7L4yreZ^DdGH8}4>>}u$ImT)PX;`G*JfppG zo~;#y3gB&2sXZs;Lp`2R8g{F%iKv0cr!svvVahu+Z~UQ_7Q6ISAiSbW@r2zhz~@2J z9Dmj`4ZR@}Ob#uuw-qF3WSU~T{auiHdDX;Iw@-y!B)knOu{E!Osq*k_zKXAVMkYS# zSq%$N(;U<3BZ6=csqtkBP~EUBt5PrsS!$=#g#G6?c%i}!9VM;9kxfnf6K|n6@1|AqCHT8V2W8v_7)~Za>b;e2+=Q4=)jEV zU6tJ@3N;>la=z^j;X_Z&uyfvoP9r%xlz;Hh!&udMVCq!bLQQBh)JKyP;w6@2E!!fXCJJy`qx>~vD zG`~cjxM>333(3Y=_MzZO3ou+uZ6HUqTDA1O#CYjEN{ibnVV9e}Z~0^Gf_d;oU)X0>?X|cxu3`xO zXg^dRTBBmQZR_nJ5xgCFI4R0V#Se*N*UzS>2zk8D%CIFOcs(cv;8a`~sc@{2Z<_=t z4bM+gc>jLU+dyw?kJuRbp_?E|unXkg+|UU@UV45W#f5Lzin-APV@$v*10_ZtmbTbC z0WyNRU3a}*@F!xaoJT{&c2@uxXJ2DK6%Z#papRgXP0;&P3SHq9^L9HdI`Z&mGb{?u z{WAnaIu^}W>cxl+)pV}i=C-wJDl=uG4FkT!bF^pt%p=*+S@ z)EfBWc2A96h6#49fODz?-dyV`0x~$ba;24>Oxb{v(rN+!d9%q_o65(#Zd5pb^seW- z9)qHT6owxh6UA(vfHRAsqXKIcrkq|3=eQk7#WK&N*-tu%^&aWYE4@R4&K`#ESUsE& z!As-Fs@^_NCBT&t&%I7R;05r5oQ#3vvjjbTK_SKny9y)tA$WBPDd~J{=h`B{uuzH| zfJe)x4@89l99XQ2hE>0V5NI3j8OPbL1&Bp-W3aVbVl^Tlz4odW+oZ5JWi%ci4hZTq zvT?0!f6I6EX1nSTL=94TI6)PYZ@{C01Y5&-fGVRE3q+#M*EpoHuN?+>i@1^=V-E=< z&yrGkN-hXU1GCuLgvi*o4ci@!M*#4XtX(SWFkg8O$j;x@2VR-R@%ntFnpUj-?8y+Y7`w&MyFg`1SF)& zCAKw$H0Z5yd&kET#Mn!f-lFCo}(>8k@xB_6Jem z??~mggV@yHpGu$y?CG2whXk=)`7*cs_d83+0D0VI3R8Ob3?`uv?xkc7jbtu)u1U+y2eLFYwikPe)FHMYwa=ZC&owYGTMH^Ji4|;A_{y(sJ z(tC?>(<`EWBTIKJw=N$6e6UNuHi38V+eC7y?0%Z5EM3^8`T!rrDT3bY{SA9pv1525 zB#5qSC)@N?g5vn&T=v_?qIe~8_pXk+?C+h07GrLztDt6w&5P{O5XD7@tU}}oIymy97!xEU?gY^#uX2dg# zxH)(*K+O5bb5&9hA?PvsfKR<@ot9#p;M9M3RB~OF4ON&?I+CN|5?d|=6Sz_4(_otg z!^EgVbr$T*|IKFqgzmf#h~YnasIUgX+Fi|d=Vt)#Qj9A|+iZj=l*(i`i)l3>D4FN+ zc&aeOA>ZzKX|_DT_r}jWxEbe(0dgz+-U3YIhMYwScA^~u)D!ti$G z@GlZLmzlZ_zD*c5B1e%GHbRh=XSQubjo>jM=XzuUNB2dxK@@L1AAmA8@`wbbyXQkO z$rVdwoAsDr7*R1kR(<~s4n_oYWVxBesAeV|vdg?^6b~1BTcR_JT}n%>R8(tj`Ttkxp)!sf*{4B05LW3ct+eh@wJP@n&bffU~!I-tHOL z83gOwz?SCi5WcQ!7rXtG?ml?6gOk>Ji}e!;F;M?c_yqqdqyWmXv5atZh~#zO%k6Ao zmYwm1PD-+IPI6tjT`7t)9obb|h4Bl!Rz!qCS^az4jY44z>$0OY+Lbg}mgvqqox9Fv z3&T+K@+-rZ3ervtUFF)=5SMJKMpN0|5=63*IV2W|_JIgNc6;n9D*17@o9rJ7yNCy|6gcNJpnq7qtdqI1i|I=KwNJNwZgqq zM4xT>aWt|8+HA8#^EyXyptn811KSoKoK-Rk*DkD2?R50IBHn612?tdTJk%Jf&qTAy7QkJV6=4Sop*qY ztm9bz|EnPK>Y0^@kF*bk!XOClTkGwzuR`=ee1f0!!r9he6la`w!7+O+g-4>GVXvn! z+aDf^pH6x>;)CDHLGlfPG-NRQ5Br6HmmVotl-cBvI;#Lx)qI;PNKdd&!u~A>|GyW( zk5u>_RAPlq27E9fY`a2I+J=h=ezXTf&=+SL=1~2t>i#nzg(hMcz#TzEL^z7&v&=~- zg>%I6GlS1{o+xzM!5!o6*CCA2Dyvgi#p^^z)>d2X%&$T6-X96`Jp}odre)iM3cAoZ zFv+o!kdn)R#6bJuqz1QwJ6DYxq;_h~ifVWLw zi8X@WpuW17IwXu_ll3^h;)p78fAzI*K=hRk7B{`z&d3xvd9KA-Ok1)L4ELd$KG zaM%e(E?%PUc|K(6C2I&T1i+@e(%uw+v7?+qA6HmXsjJc2>5~w3q_NUIQ<&*LxoVdG z#KWk)_FK?>B)H25yGs;rMj^LVy;2E=6-MUhzI8SUd;-0nog6kaMDQkLCPtQ6H(@U` zdbPd7dW%Af^H`|b&jhI8Xmi=IWwkaTl^d4{`IZc6`(Wk54u;6cDFW9?pN72emd)mW zJN+Clid{_$-=A{>q+>AOVdo1nZWx~IrZq&ZSR{?(jZ(fvJ7TN|`dO}6W914v-$ikm z%C^r$@gKT$KXFsRFS+_$h%TTk-Ye`1P(NT)V!m!3%Pr;-u0zfhq~$mWFR_aQ$V*_S zuUAM2Z&GOfogj=PYbV${3RA65Nl^k;bs;^LDN30i&;CEJCvp@w#m*CS-Z34FuVi~X zP_!=8kz}166@@AeFIr?1&m-)8mX^S3!)wA3pM1FP__hOX>FVr$0q^e71x0wXn=OL3 zzo>14?GN#9t%&Rl)Y&H?KGI6S%HH{&T%KE4GjA01iscW)u@{f;X1-t_DuEMv4jyq& zFT3tL5WVP zgk5q>mU_HBRutsaQ9j?chS+5Dis(+S(YA>~g^}5LO@)6F!TIYu+%CQV;3XGA_W4blF&VDR6N&D4zVZ2Ln zCW(Gh{5&mdw&z2($mi6#7ZHuL_q6bZ!|8~9PXr3ZgBL6ccni6q$D4q#7q)9QPtH(O z76o}6TEZJ=g?-@W;LUGOh$_)0N?|W3k&Cs}GcI;qT3pJMRbXccQ%pWz?6Vd2N}{)B zt#+v>@Wi?$*KQY}mdH@S3>zWnQwZN579W~I8bSt4^JdsS0UyD|(MjruA(l1qYU_U~ zu|Ymn(J@(uD3JuTvJp6$ImUQf9%2h+U1z%hyy$TZKk`F@tR$}bl?}Pf3&EFciM$8zktF7}^P_GHr}`#eD1A+mbA z@d}bt^)OVGeyp(nE(hIj-G$UGleX#5sz-o)DrBSn!2pl8mgxapR)92TEq%}!pbngH zfA=Rrc*?>;U-b68=MNzG=#ehCJsNQD(xQR_%!$N=H9m4sII0kkOdIQyjoUHR2&7MX zVWaO#4-Ob$V-Xei@P|aC9A4#2hR1&k((+^D%dFrkkK(EO;mtKx zCrBSeALH$sLXL_;;)84F+nGP~SR{ce`C6vTABy6AI-nq|mSUpOgfW_8n*`veeaXW1 zC9~U?ENx#p#ap=PzIW5tJ|o#<3sD z3GW>#Qgi~!;GQ88rA8+k<87v(%bn+@nYKSf)08o>vG&&#?7`0dZ-Qz<8WZei0)d12 zIyr*=*43R9tl8v@DY5U1LI&B!v0eg*tZj3#%~6;Epe-vSqR##(0*vIw3!}s0_K-Li zRgMos_y)8qzjzJl=2qrn5mfw~Hr7BG}Z7-hJRZS435Vrju3%y#5sh=|}sU z$X>e54>>!^t0Vj4%X#f-n?*$ae7v~(u+IdksQ}|-*I!H6`=VzNF9(JRyMXts#90(C zH&PVplx?9ebtR3D>!VGTa$!DYg`FcJQs29gQ_u?qs4%%=4G(xQEeWK;Ze64Dt5})zT=~3x=1i+ZkP@R?u~q>Iu)c|24?B!~A#;}-Z?qBDc|oJ%*)}(X zk5skViV!|HtN7DFBF7fX25D&FW^HUgJ%_E?A{waF}Zv6=xO%_sCV0_Jr@Ruu_Sr1 zTPBFa4y~`X7gHHW@lxzB0Y*E|4R-cTUXN7qMY11>02f$Ri&@VA@c))?y#&-Nn!%FR$a2Y9r-wo66xX{`?$20|)Ei-9SYCFoMzi|s4hB+QRx*n_q` zO^0B_-+{luG2*@pK;Cc9tTbv-1Elg+=tw01S;)7bp*}*zN{XkeHuU9yn zfyg0zeWcdbEU(L7xYctZ=l05(_6yP8>U92AM7+nV$2 z=#L2_KO#2SX9DVn=IwUoZG@wCZ{*O$z8@g3(|MLO2m`r6?*O; zIWv}}g|Ui3N`rdXrMDBLLRQ-4Rw)oVMIE9?9FuIes2Z8ldFDXFYC|4=q1E;;0T_dm zUg#j7e+Lkr6Kn$$3R6wLo>rqEsqkH;rD~m^%O%>4Pq+1=gr5UXi|rq&6#F$_qA7AG zDSRC8-Me21zBAieifofWG&$ja7WL72wo???;F{U~5Rw+lyWVjDA7QL#bs@Xn{w*5J z;rI(Xa`x4`fIh?IqY#7U5L5Y0B09(IWi#8$)>scwoa0QLxjrmRDSOwwU5?ghZYr8( z$ucWV;SmW|%D)o!j~B0xO5%^BxFwQT%uQ+J?_!xXi--_&vTc8=h+R!s27L5xqM~MR z40Cc-CuLJaS!%x(CE|)-dbzzIh}a@;$ZrOaEd{{;goN~CMr^cQ-kpR&PP%JtW;xzX zG;eWnY-HSWQfVTuZ=9XmV$rC8yCrNz3xsZ#ukjt$qeo{E!}Ln;m%=_#v3cf-bgl^G z-#3w&yivgO?~Z(i*tS%D&TLEUl%Ad+lkYLKesM)4La0)k*b4iKD2$uYiJc45&4iE# z&6IoHQ2}aS&za@3soZFi=m_zEC}D!xz&iDmUYG36SFYC9-XRV33+DKX7?r zLqn42asD#~`zc&3-9}u-PbWvljyxEem4D1;PiUAdSnr^ZR*H z`6jWu1^9KvxxX41BJP(p>n8=gaaVS``6_D@hSgsEF$n1e#ENj>eG;HXQD*1dLriGl zQ=DA2VjdET8nI$=a%Gi0dJ^*KSU1tu2~zvPs(IF`Fr6#ky~VagD2RVYSRegcw4ewl z6*vy3qj_+feXbaQnE9f;{jG9k+`Gys)A;x~9_h859y=H-|^u0`M^$U5zmc z!!a&;v<&@sfb`cw)@Wz;_t@y%rOJLH7+8|d8RJAJ_#C*-R*af9eZAG4%+%PBY^<=~ z3Nkr#R8H)u*w|6A0wXkb!2pQ9LFNmp5hqJl&32vWOfG!7h z%OBtz*|6xywpF%JMEb5511Xm~h7W{k61HL3JlKaJKbCs!oPk7zdpb^vBaL!7{@O(4 z+Th))`0yiap2I6h8zzc+)bGX~67hImZc zb8AYhOu&mNmiLo?3&Y-#`t3*RtL>uuI-~Pq9B^GOOpDRtm0?bv-vt6(VV{_t%15aj zbYHstSC|5I9>@Odj-j0erj@ev?j_7GbKnTOYnZ2G8(PWX+C2dtdaC-smW}p^APEQb zv|Cxp?Jf_b5!;q8o&An-`$=L`>qV1cPV`Ib5j~rUh5s zWv?q3rW$Sjx7Y`wFi1E>X2tTA5g`3Y<}Al(rDq#7$a*SBwvNiD_4BJ-W6z2LYuGfw zUC;xbt9|MgYgRZ+D&^Qf++-c1z${0*(!sg!K~Ip}QkmSmCb?yjO%;Vo)42m`QCI`H zaR)zo>@I1oyhB8IOe-zHy8p{UslmU&-Vq_3{zs1?m~|%FzeT0m0Vp}bW5Gi}pUpU) z(Cby6go;MRN9vZ?5`|r88F6O1Wx_#H&6-*4L`a>B=I6J5LM+Y4)I+(QBS1|I#r!r5 zUzihX{}h z?S#FmIF~i9j;pz9eDvo~-~#I?R;@7AMV6E&+OvY>Y+KJR^&RqA zMlxg_f@hD5V?;S80}AA;@HP=BH=7&pehMoeKYx~Wi2##z>t#}JZGe0Q<*5BFL6-)) zy!!Nyemf8gw zM8T1)%wZ|1ry$w(H|(pju>xLd^ts<`zY)dj#ko?IC51x+aKX)jyTv{gE%@tXz(Uj4 zbpzWLRupmKR9oV?MR{ZFya1EUP54r@?+JQoLr*SU?h(zqUV5FZXKY`{#jj=<*7m`Z_z z2j8+A0)bzz4h>rE77_fq)xAffoe?d!vO-F9hZ>@8UoL`|jR&zY_KYxt?7D7UZQK9E zN&Ehytj>CK=tyj+iGa*tzZ+*ZT?NT|VCyoQ7NU}Mi{*`Er(l>Y_1)6b!`-5J)u_g1 z+sDFMdFE%Ji+gUOGsjyqBXKRZM@4hV6t%5?ZmXqaO^k}8A+cUD)74+gCZ1Lt+1_eH6sG(%J)#>baB>=EdATiG zV5W)cttgIXmRe;B<2iP*HHISOeG1E%B!;i(ySmxpIY1vli9Ajgxln8oi4ma>1iv#} zzw)8jCZZ1{as<2K-ojb*q8G`6f_*qB?{7jXi}PC{RO?;n9R=9 zh3ForQKM}BmLNrzB+!2eQPHXHc>BMC&YWpHFv6Cm7kqH(69-!=?1li_r)-ct{P}_| z7=B=-d@M4Y8b`QrdE4ivMC3q6K+xlh{!0h-ZkxHX)lv;MW?0dP|m zvnz^7uOIpQ$t>i%JpiS4+GMjQZ5GRL&H1UT_hz!OB9kET7}dg+;+P~2re>s z(L7mUFPF(9gDG^1C}d1epw%-+Km!N|){MJ`5Qj-JuAT(^!;@@Tm~3gn6_!`RrBY?CA_OXFw1bt)msdR_EDd1ulSXykSmpk0Ei6ax1fr6CN3%hd631h6U2;@zS8dG2o2)Iyb zBV5+cP%k4e5RK-K=Ewr>U)EgxGP zk`#NbICBX3c-mEDGeSa{pGXAQ$mmIDbsm7&za91T;8f}c|aKj#RzU>i^%yQiN zJ<_n&eyT9VkhTCr1LIOjODGt#qC~?7H+KHm@QYPWAU#`0__#y_ALEhpphY%U6xvOa z7v|eKA==IR*z4@y0@TJ^mC^WqId77u)NiN6z9;0YBpcUSS3&=I6l>mmEESzyT57)# zbO~h?#S2Spu?YWjMx)c|a$6p9(IbmE)B7mEBRdux;Q7tDlgZ;fj{*l*nmKxFdZQAlX^@PvX5mFIAtnf24%nAcDI5q9CUwMtXkM*m(RqrI>5*z zdA>a_sIiB=Z~JqI<2htU#T5ISp!Z)fgO47)Vw#tgY^h7O%(hdW0Ma6c&T39ZYwThX zV5uilWn5uOW6QjV3@f3pDmP`oxx ze--r3Mx~MYiFIm7F|vqnN8HYvK??7@VOa%8e2@q(X0Ea@`H{kT`7Cf6gkg*gX|4TM zfa-Lz@M@KPAjr=kJywKii~&!AymF>#%q` znVuef2`P);FG#D0+-ol==yw!*pXPhK?(M0v=wmO54tv=4Y_QX30qC}a;}_Z)3X^wm zcRNo(38^Z&sLd6WbXT#X38CMqX1Q8v~9Dasf5EcDSUm7 zm%@&QceDaDgjNW{QIK zt~_=ahf|cy+&I4(`|4b;_`v4bI7_s%1EiV^cJSX%K_)8uksu;OZ`lo7-w_BT$A}7( zaFqz*CyB-<_Zr>ic^)Oh00F(Xe2G01QZk$2t&{nzQ3Rijc?HrQ z@?G-dORlw3=M&}i@VS;5^KXbk;-07<#;w1Q3nyBH{SO}U+yaaQE1r5|&sAHg!Y=#% zJIk#)K*l7zoOYW~5+Pj7Q98PK(=xwyhG7a<9ddpQDJju{Nu~iB= z&z$pd*J!Vc;-jv;a*g}nvo}Mg;u3Cr?IR(LaT%Q`w=))c85ld1^*x^EeK$~$?*MM7 z`1Rc^3T->mfMGs`>9L%69#%FOk+D>A-l{*tmDD+lNa7zK*{$VcDZ2)@i|Rl6KDn5F z<3wPPNU*{&T_$JVLj}D#Oal!p`(8}tVU|LlqR3triGC<$Ymm~gb3tpuUcl^s|mx+dkbb9tY2sk3zC*G*`gy-s}R9EEP8Lc z-d+|3hB+$Xxa}`OUKwv%a_rQn!Ia0y>elsE8!AXuIrRO53VTa0y}`0W%!xUxZEeVu zlbc;_>9%7~#9SrY*2_;|lUP4k{Gz=FvfTak43*c+rvPX_+ZpA{s@@?(ZYD9!@r` zv{ef8b&5Y`YZatMdacy{c`^-Wf!D4kBI3NBBQU!qKv>51ggqkYor76;_8X6jKshC) zIR(}r;PM$5XBW^GGRvp+ihA1rfKfEqc#!pGg$UH5cj*fi_L~c)49q?ib8(T+C-BsIBGfG#8#EW( zEa(lv%v6!h3eik`ytuP%LJG zuK+{WF*_j)lSi9sJxu=S>oZrcd{1vA7VgNOY}#m-2Z+MMoVS+S?Wx#IBsoNoPuNLY zu#N0BZK#O!Z*d|sKQhhD=|-z)F1y`mYq4_LvJOh1fxFW6S!G`qNryFrkyJ6SrHhX14`|y>k0ZG!kYA%Lh6d< z7G0QpSI~LF+$V-Bgq z&sFo1v%J{)Hg{HnPw=jGf53yX+P5??QXdtjdx}`rJeEp3M($YOPvIdk4wAmQ!P9oT z@p2oWFwAnx+_qYt8;U~Yb6aCJHiVNr@!b&;YG%JhUff?6!JEx@r__!K^YO*_+;|?% zti!tgmp4N4QPFQy%y24wvO6QU(=p3TQDnuNi?WDT8B*%fWKo?%Z_e%2D7?A zA8wJU_supVB%skf@Sk!@3kiJfm0UH+9WU7rMD*_N#i4q!brS(T5@T7+^b(-V2gW?N zXmlc8f=@(ITyEJq?f+OgS$NzDS#FiwIFO*Wi=c zUJO}_dBAQjg}9E2){e?5ds&bgJIbH1y$ZXuxT3(!pRkK(SRp<|?e?u+Zrl2d1RKx$ z1o_bp?`0zu^xg^%#L}}ZPefQ|XOn3Q1iWy(*WPS3Au@Vb)@>V+jAvO+6Uuy;i9>N&Z<0p1QU z5-dbrCT=eaQ5uH=^H9Kl;91X}tX~&RBI~R~6cTh)PUxsuZchr*vV5+I>;(bDBxM0U zrxR_T2(bR%mYH_LZwVu#A%kqPf{@2xxNEn<6u@oH8hcB?xs7J|YTFy4cTS6C+Yteu z5HGpJ{viY>$LCGZA<`Fq*O|A6>!_bEiuamcw8pKc2-KU+HO>Tu$(>wLEfXGdLX<9( zn-oSug^ApR2FmBc8gP1*dtsXcU8mb+0;I(Bc9G0)PY|Y1x@Tl=sZ9!Pe(+s9Dx~sO)yOu~)z1OdWec}kpAc@_nrBZd zOu715v5soBgCZc6SD$3J_qPBzfV|13JntnQT-JPWMwN94k`RMPCH6sxLqaEJZ?&)g zo;YgjUdVFWJJkQ(k8U^7AtCiuqTi6esEfB^|SG{iIPNu}`K85xT#~T66V!i4@KfVrF%3zZAt~5SbXY(jNM$%L8__o5y#g26b7sxNAQVxOXk3!0;es!CD?`6+* zuzsFB62d#zrCEl;h-&8~8?7J%Bf4GJt1q2L{6<7?Ct8#F2(dyG@B2P6p1`jb4t+1p z5Y+VSL($;z;Pz_!D3zXPa)oy4F5zh)u}Uja~Li zDpu>73Bpu!;;CnBdMb_&#!WUO#9jG=OYL=qy~>e^v9>QnWihjUsxae*wcUCi3A!VczR&KSa=>8mb*}f3X@Epow_K+ZL9Fv>NTyp-a zon^(i1N&cGtd~^Rr{a6*9^ejP=e}Fln{A9Bg++>8HdzRTL|s%y#{l|wgUu8f$t0T= z+8l)$%&3BHV zgpV(GNj5LQ16#`vtz2ur7j)jEmF7D8mnhB(6KJJMOj`DcmHBJO7 z7+PxQ?(m^r44p6Bj_Jy%br4sNS+(xPf^| zkU@fj+ah~Kz6G~$VM=>$Q9d^%r@aG$JB-e4u`dgFaTuM$u2Pm|hj?B}#H?6=s*@YG z+ZX;qn3jx0-G$T3uLhW`tCR7VD^J4u_1e8n6ZCeXv{Av;`AYkbXvp@tr|ervf-q6o zw!YY|5TF6Cl)o}jM`d=42riIl65C|AiQ-Z$q}z`O(jrFJu%Y&~b`VVF?wn;mRG5T} z<++%2|KYnp?=-HoS&3k|NiG9=iw-Me(rl!HL0U2~R3aP=^Qw;W3DTSSO`uR)G_P*5 zMWRp(8aUGZSuW^}jb1NsUu(^xd9k}^%Wg@?l5ClyCFt)3eXI^o=v@DTj?VNW5`3-8 zgriQ!501h}Yix{z`Z=kDyu(P?7Y=l093}hg7YXa97AeEG3%THu^|P$&!Op~J<22Z# z!s>?@x87v}-VD^H$MFLxvz?;BXQbt+hk#!^k|N)4=61OI9F{o37aa2$n`$QubyUx1ktfet34YMM8@jow^#B==CEg>PfFss z*;);Va&jp9PqRQIS~psDMJEb`U*7m=@4e1yLLR^P5@0UwBmmXymy zT9+eGobwF1p%SE*j?byKj#Sie6y`q(@Zj{uL#^BGy7x%wqCzR9!fq3Wx3_1$J=+Ef zP|T6)3`!GG=JL=SyM_+;2Z>RGa`qIR?U+O zoQFh#lSn;if^8J^vSN8@zCrl@acGR9=tWu;`}1o=pp^EqMeSwlP`3Nu2~o+WIWm@T+WSNWRs8YMk{sidQpx7mafae(7w7%Fk>JAh zneQo?mC92xQ|9uEG8y;Pv$*E*TQ* zehv%2tL>r>J^sjsafjwEut9=8e7P2xY`+$!*2A^+Hbr6YlF{rr3sAm%LL?t0nljn# zu1ryq^5|A;FNor8+B4Zs`x^*;cn2cW7Iz71a6V+c6m<83?Mo)NFWKI{WPaor5KAI0 zHd?VTvXfKLD?ajYHY@pr!jzX>RiXZF5v1*XWTvZ4AdF#oifpjI{ST$NPOldKAqwRd zpvrpY-wAu;8DpC)TR02}-Suvwr?-f0&Y}wy=Tc8AD8T&H%2eik6Bvu{3e&Patgb&M z=zK)0YmB6d;-bvpDZI{_-W16z?;R^fJ5Sif)?=uh^ND9*!k)?@Knwcn6Gie2!wT6@ z)Cq@aU6w8$gKTT-lB>&Yjwnd-V11Q`sVKUw?)MKOXo)tG`LCpT?S_ALmcn6T zaY4*RisG$c*Wx-zPm4yDq5_T=w+b-F$#Jjl&!hcn*Uum#?CuyCQIN!_DP-Hv1&}kB z(rY;{lHsq(BDqL-cV??oa9IEQZC`+qo#%;mAQgL@1E2x_A}=lB44)S{WkQq`&9mip zSkO6-&WYokIGyruXknC;>XqB6A}CJ_`oUZ68v*hZ?MPLlT`cHA#n0B!M&5LW0{B46 zVPuPyh8%QLVZkVSTo7phyBD!l=P3r}a8hZ8V(+F#kd8UFdbYiuit3N2xeh_(HeiUI z{vU#H78xMhV&4@Eqe=R)`fYa&NvNzC4VnT$AH+j>o}nX~1tQS^S;N)od5dibc@9<0 zI5>H!ec`_(@KJr`kL)W#aCYSny4ZMysh{J(8W!mIa7!w>SY~zZOkwObueAbUiE9E{4kf9`L9+Za zHiZYq*F>hM0#8oSt3q6Q0eWkb1-zkn^v*zatR_&>c{Yz?IY-s6q|)Fp z2p5L-YA7PDZB3E=-&eh^v>02z>jbEaopdf&uJ;Oh@rkw?y25@L5^#bKtF}(if5_o+ zp8ZZFc;EdLYx=hYT+aM9=h)lAtbUGf#24VoV>OG8S3GmPrp!(~9lDQey#c@~!Fans zG?)J1F?Oktq)@?PXq9yl^nu8WWVR*`ia_sVv6!8d%K2J@EetUOyAQHWA!cWJ2C814 z33>CP%Ub+_oB_qho@~1Y<9g?cN|W~N;5Owp0cVv}9%@-E-g=1U&Frb;?PJ0)OOFrb z3GWAjFdII>jds=7Ao%it3u=yVb;u<<53Crg6&Eh|TFa6xOOh?iqem@`)}kn{npw1v zPj{xhDjK6{S3L_4E^1leR@mM^r2fcD{S>a}?fkEk(wQ2-f% zU2~{uf!(ce5IIU3EF-6fr1+h^$_@*-AQ48neJJcak3#eRvTp#xW_>YID`!C?lM*+G z?gFlObu#Y09u|Qr(PXiS3I`3nEs>&4Vrhj` z+wCp^rik0`us#Y>dRL5w4OZAo$Us3Uf^dSu@nY5~PucT|hoF5yq_%d?w;=ei?myV7 zgvg$3UMl^A8bNA|=c6(DTLJ%Zafy*&?{5DT$;G$#>By_YhO?m{F};|O<;AVSq&Rsf z-)Or;frH|#gb$fyNW$n~Nh+fVHRpICQ2G?`-r-Bp2mWihj9y`Xnf zdKM=nHRkDkr@|yvFM=nCp@3?hx-QquD6 z9B59m#UfMaW2Jgbvm|5~)cXP3tgtttL`F#dD~u41H!VIsr^>E5zcYG>Ceoh^d*8^9 z$&|Gg4=Dz+K$Ur^Cj-WAb&>EJ!fukI5J6?FQu0@AsYXymFpW0&EL-^3T3VSys z*$qiL@g~FtYKCshmu%I^%@5(wlEeQz@uBbUUzSf=<^tlOpCX83UGoPd~^@ztx zQD!^sLa(8S*vSfek(|WK)NGw7v<9y$Om+e=T|7!w3SSX|9VW^~)Xm-xDWk9D=OHBz zq%%wG2jBCQjDenQPg`bySV=#!s@%p4hS^oev1pVuM2E5~Y@VXLyLWBprOT#}Gg&uB zh6oM|I=gB3J3~S8wC_WK!&06E<=fc-QSa_?cB_z&#Jo5MHTJLww2iNLi)9GV8hI4K zq+ZrZSl=kSkxvImNn_Y&)C+pWyk*w?Y=bCaVCt3B31JPEC5!A!7loo%PE4-YXtxK* z7qx=U9wSIfT<2Jo!gPF(!8S=j-#}xvSBgxVBQ<$klJG^^t=RBm=Jd^aUkrAJIMYx^ zXN#haTg$oCYXY8p-$d4|7hM8IOftrXc5*-idE}XEeS|zM7n1YrAz?3?n-VR+GetQm zNIkn+C<09=K!>GHVd{f#OkqO*`)!XlY|%xh z6)Db}i`rVu77N2}Q8M3HUEC|h^3n5k>*XFu>f4DW1w zR`59)DGK$s)s@=g3NtFNyw2tZ@*Q{F;bGdB$a`+R%}Z8C`y}ees8$mx_n-Na9jKB$-^f(ymgNY7#{`xmv(nFYJt* zn6p@hQG13IWFDA6N8tfs$tk}L`z@*DEM=r>ddiPHn$v`48zSKS98DFi(V^TbQT>~o z$1|mLKK<{DMC;nCYTGAqlY2D~E@bI*U6mZMo+XMBR4$T(*Bbr;ZC`R zBw<-EXF8Y{wces>Mk$h(Vjm$dM9LEgBrXb^^Fj|_@HSP@*()w&&)ab_dI9==v+bi) zbhOqvuZ!n~g#m05E)yg-4q)4Axxc>h`i3?PXir z%gW?JexB%lo|D$`!U; z6uJ;MbiB5WEUlc?wKLJ~jjuH@Q=Tu1H{1gE_4X&)N=tc@8o!PdMxqF0=Gj%UIUC92Iq9BZ+L#<3fuX>kMW>$u*n0RI% zvRlZ9WnaB2_I2i~qQYK2x9A1e5k>$Vi%ycc+=Osw(=rQW)@)SXG$%uCX+b+$*K9cj$n@(Vm z#@-T*da>p)&n~#h^P|x%fA`-LbZK*1$DOwoib7re23V1T&LdX_+1m_XGOHOxh z22(P=_10KIz>69d=MFr>o)E>8^HoMGL)gpH(N2M_4f*lzUTpgXsQ3@_?Ce`QGw#Mp zy5PAG7Pf;#Lqm7L~G!zeRxFIaD*>$`p3) zic9$(FAE8yZ`*qTV6dmwPWv$_y=Bog{sbE+in?+$XB9t5MGs>m!8#{BMbU1cYJ<%d zg~ko9YfUE-M0}>V`bU~TM#`2J$dg;8!QxND%{e)wjAkXhYY%4 z;0m_O?Idw2-y19B(C>B;X!Jnmm=g)-&LxFwLbbDDO z+Rj=}n^24(z8)wz$&8#lZ8W=sG%m?~QK2H_E@hDFf?<5uCrN3M4GXYA|$^+MdU%w0JmF;>88_8XTL+9w`sL5_mr5MRE zm<( zf25|uVhVeUvSjmjnlRjQN#TkxABy0^aPScOMo&ks!v8HH&zvXy=^qI**oO?ZA*rYl zdO{IldMh$Vjfb#W`&cQEdS2Vw68luZ*(_qaGfG#=XZM2U9h_0Hb1AM^dy9e;IAfW^ zHGH|CSHEvNmyb`Ra4{Y?Y+p!@a0hYq_pYFyBPDglwkX*;QR?ysMaQ_uNe?%?0M%7Q zWG#7Ru%diiX%NeqP$($^U^D&c2UBQTNgJnC-Tj{(0tg$P+J&f7mMJn?Z;Vy-5^Yb`T#KX+?a~S zVRphE2+^^;Le4ns=YsHac+LXb5u%XGtvBCd7x(jmMi%gZc(g4M^m-mh^hwxpVc3nf z9g&j7uzQ?Beq6H_kBwQjC_X&9bBs74m0&E&?98zfMWOlu98xS&m{BsU4|aQq;q+jLOEzstuAgX+3sM#;dsuG3jLjIE8j@$mSgA)qaxR;LE%Em9 zDK=ZtFv=-kHcDJpC+rQ4js!TNw2KEqgWJNwI2LsY`;ftW5e^L2h~OfX&LJzBZ6W!4 zT(p0E5V7!ncv1bqNz?2>LC>9meP_ITi$G&ITrRhfso0WO5u2}T1{3R2>@Us!yM%qx zN|%vFc#De^4sPY0ii7j$T~@Z}@GPacWePis98Kv3-(5pG3*t0ytWKeaiUcPmObnJI z;36Nye7#c8`Cx(0OF3cY;P$dDktFM-MY2mouO-=3FZaSfyswjjcx9yUR#5`Cx^2a- zq7DqLQgrI$dbyeYpBUi59cvD>&anA{F14OGrp>oyA~-*z65KyOBg}ZoDwWQDix5pe zGH#*$R$*^QU-U9g8R}678@Jn!6!uBC_dvTP#N==;G(h37P!)$VTUurhit3fNZAH1D zHiaYYr3PVbcCntV$qk1c6YXOW;pM>g2_EJ*`RYvjkI3Gl?y_BKU4}t$;c>N(%B4*g zMIzCi&1BmXAoD>Q`d#k@m|VRM)28;XRQCNSI@TADm_tRHYp#UNxE~UYD#cFDiwZmAyO&0C>M{Zf zZOg&)aiWCL(@g|MRv}wUD;01HAE>eI;G(pZtCiMWo2^nYlqwCpSqgi%^*}Fjo*dD(sjj-Wm65efgO%vc2|lyXgVXo51M-OZ~n9%7rfajmHJmOB>3Q&9#xAuv+W= zpl8vu8`Q@J1sECn!)UA^{NuT?!LkM5GLF%;ija;zL2uforC?ulKb8pUL*38bRuFDE z(I_oKB42q3O!E$|syMV{v0W?Zjmgc%%hb)n)R8|XVSN;K`O4Ais%NafC_Z+^a+Vge z*&dMDw(2(Qs)rrnah%)RK|(Gl2K0IxA?!6I8|&nvsacfh zNBB(iBc%NkZLf&*M6V+IAeDpL?n)MFABh6f2evLe%qxoz0J`B!fy*AO~!hOJt}b785hzzz65dzS*a%&$V+!3OeCy1ws|UVa0zNgB^FaQ zdgss+Gc6~C+gq1gmBQ+r)zgx-t#pyi6$MsNc#@saT0!S+fZiK@?dM>a$&>oP4+K0C z&7xN8B}^?Gb6Y=!5z?Xg4LII7yl~vX8S`v_h|U-4-te_k1d78+_#*pL0657iv5Aim z10Tod%{VrFoXrl0)%Vq-}1<|}w1qDTED6nQ& zx+rv0>1caOL1#vvxjg$s1l2Kn2PTdQ!#;98=>%qu4HD6Z!tfYIBueaMQOJ_PvUHci zk{oKZoGag$26QfE5ykEjR#Rv4>}{w3X`^A?YTE=UrUX6K-z$9bm%!F0a%*7srh5h+ zJng{Lz{i4Kc-LEge4|~O;nA?mA+f~;J(>~LXjQ`giz`HRuq1(tyF;Q;r|bcLJ?`l< z87y4>U}P8!uLIgv;7K_|Qg&f3JyRYci*r!4nlC^poYvUW3cGmtb*^V)U6Z2lTTJKq z-!V~WIP3n!oL<~H5=bBT861RuBAk?aZL(paR9znvkxFty6P zg#|*Vut;XQ`A8J%qOgp%^)@^K^lxw97$#}?GLB4`ol9wIFh&D+|Y2_eDfVueUd7^8|}u}pg@ zRZ3B846DH7AsW{Dbhn3cI?F5=Ig;BnMp#@lM4`Ew=e_#_B*W3BMmsJ@@wufsg#Jv3 zZsM>n!^YIWMMQ#6pA|9SSGcaWEBps`xkaR8fPM?+3( z9mHDZhk3;7`&KRC{YZ#bN1q5b4}3*$6p11GhXoh|SD0#cPUAptpa3jLWSpps6ZC#S zm9oKFgqc1c>1taPq=-^1o3w@Sp=A?oS4c8qh`p|$=N{Fzf%iJXu)XVGyp{A+g9ci*1%YBjBtJ!xrWLKUblUkA|{X z%In6n#kv)dE_8n_3WfuO!*8lQ`c+lfi>Wl+){L_L#a=c;(G>vMX`r4oKx71vfnp`9 z9YX^o)evGw3Tg`NcC#&47`escJUx6NL~ssjpM*TRzhw9EsUTzV;UCy{N(efig*;HV zuELCe4#N1=T_+^z>e6Z&e_M}K`p6&rAYqu}VP0v;SW6EO=DJ>eqvZ=CVh$bT<)q%~ zMS$CM-f27^BDtN$E)Uz@7es)qdq&zDAvkQX{Z~O6gA_?Fp3Rs-?lR_|>C5Gc8Qmj|H6dXbH<{L5hOzUK)Fw{-XF~9j)3qh*&NX?UVBq zyhZhV9O&I1efWk8BS`fF{n21EMDRxVQRAB}_N-_WJ2YO{wmFS?7Z=ka(XKt);QXs& z0Ny#h$HeWn0Nbk?+YwY@fO|Lbe!ap*36gVk9RGN~32|tT&0&oV6bJ2Z4Y3ytc?FHWW^{dTAlW#u_ScvuD|96`<7rRNeVIL zcC#pygb%PTHc;S-Grmu~K~qt$zRLN1xA6PsnUFshvf)Y>2~H_pn)zLXjX&fsx@gzlH$MEP*!e+Q2tv9Y+J#ibzcd7tgfJ-w<}TI9$!K$Ame! zifk7K3xQIl|CQ)2Q@)y8__K+F>sLi(c)c{iURMwv>4nX9K^Xx457#_-XmXj|E=XKn zK|#Xq5D<^@4XVc~BSLIx9tyRK1K9s0G0zK7Oyarf9Ojyv%7O5W5iBcEn5a>*m6C5& zA*$OAx7v1vqrP-kvbA<*1r%>&4xWmwYYGpFqgi!Bz-Xdo$^<}$Foh%4zRcb01Sv3j zNH$J4<@-cQO-$@FCffy-&}byT7(56G`1zk^uDxs2RuVOc?ipqNnCr1J+bkN*N{eUN z`=JoVJjOc&qK4FkCH5zxz!>`$Gy<$;oWta%1^NL$D;QRm9C@=UwJXMZ5wv6nT_yn zXSY^Cp&v&kSZ|FMfTf~Dn*CB?9~HehI({aklSNzmT}T(hXxr(NI@2XrRomAE>6M;Y zvC`t=Qn`-#rbv`}VjixALh9s-rBbo(ElBOJt>>Ln9|3RYNcKb-c7MotcxA0U93mrC zGStXyQi#mwZCuUCgbTLVZiRi!>A-_CgLM1xWG^{7x6zaQpNl}_m@HLRrl5<3w_6#^ zxjCZ17?#vZY=(e$B6q_jHdELc#kdk*jHC#(uqP{k|0<012Mn}prx0`z_RlJ{TLd+d z(7@)WFx3VILJ>BPFyn_C0ukWj#GLu;Mvm1?wdtb3N34i<$x8$%Z_jqj6jj@sAvK+w zY^~IF+(T160hG3JJU$69*;p;Lt)qh844y9)*!v+`mp44@K64t;RGw_EkIql)?c9@; zRg0rHo{iRB6xhj1j4iO<0<O1RXTR8Wr{?VZL2eFQYxAsQsl^OrbsY&Xm*2`S|SJw{IX_Qtw8t!avHIzGP!BC zZ4N1lOKHzh0hcKAdy{=AOl2Gh+c`4``&WC|12$7g&8gdfwsNwrRvNOu6Or1|+Bdh_ z(NuaK$4z*C-Oa4aY{EJs*5vB zl}bM#FJ72A&K8RjzLL?Y8jEq2qJ8J|Z$_3L)*aXyEIwfKA+k{RW~$UYJ#JW11ZJB| zCYIP)3qjtSY+tt6MZ$CnKg|BzP7Dx)<<9b_qb*#${XCTcb2T}(Dx`>PCbKVUwm*mx z47uryLCp0zg8XK$;ZE?jC4}jiWcB=L-D$A9M1ieLne>T9F9M zgu)mv*armEqJ|1tjQO{qjLB4O3~v^4sx$#LT04CyQO+iJ$=BHsVOlWaPP6ymnApa!SOlHA`?(ddV;ZNG9E(Qx|MDEqO3q@<+BAGf$r=xO~{WTYw)68F8+ zeygC@6B}280OKD;P@}k9#VPTT04WUj-}b(s2BLXYvUzi|d5V1^3KjQBv&)tfbXf)r zr({I7jktvwE^?G-L9)3e*<5N3qEIO(N9{^$Y=;Q`edrgwFP*x=b5XHg-HP)tTn&XW z#quDswgS%gz*s@SI6F@SEhu{Y(am1HjSp$`U5~E%UKPQA0Iy8sY~FyEsEKw=R3CI) zA?Dk0Vd}yCV~w4@(!s$w2YJF4h$a#Sh$h=Zf^;;7Q8Mt2Aw)A_xKExmM{H_HPTj0g zwnC6{kps!MwE|x0FufoBF{C9oUqEbL6HQI>49U&xXhD7pL+=@8vlaFW@W==vJKCos ziDTqg2vpcQC7>QGA*0tQK)38$wQuK3IM(A-bWEhsUWrqGC`?`W59U$BPF)4W*e@G8 z`D}IrLd{6eeNz#ngT(|7E8i6iU*T;N+;*qk6w>fR+>92~U_tm!woJ2~sqFXA;9m*) zyp&XOhHQ&nz8V^X$`NrA*P`33mk3^EI}Lu_-*P@N zvy!!S(my(D4MbQty0yxBC`{V^18uB=bP`fIJgN&Ybe-jkBjNvPc6K!s?<0)f#qA1V z|4m)i!|o8G%pTpZvq1`bH*}P*i%u6yZGwpK-7#TF$Ao&D71C!QyjG_$C5=iv#v*WB z$XkT6G*@}t)ZDoinolI@kaGzT6NTcE%@xV!N~;V|=7bRm_)5*m^ej%&ti4wh((~TW z8=0jG($=xD$FQ&@;6F8Y#2B|{MGD=zEE@8ib~NP4TToM}rj;a3XkL6^*h#hrIh2@vZq@Em3MnQ!^9Bt`UT>G?@h$8{(K=BakGZA4f}2 zlXZ#W<9N~rj2#KN;0b3WcKP~~l_(AGfhhtTHr}gqMW9@k2iYoPk$6HB@2(fiaRa|o zm^paw06T31L8ejrjyyX@VY)&Wx;8|B#zb-?J9&8Ty%Bo+X)lXNqljij)@5;2AvEo)<}zM;7SI>=i-h4LMt67d{n%d52vp zd`*5HQe)hx`qephMLm%;3uW1D_9Fq$IxaRIpL&Z$pcTUsmQ>Kk3$tNucGYH&I=I-X z6x86hlqQ?;gH>uRqQKJrsru;H4;vgZT?V2*@P8?|yFSl02AJG3A=yx8zZUc<7)j=| z*zQ#Ndl`wp2zswz!aG~4itmOLvf_b!{yzoj&zpK&XP+wUZOY;WI>XM}0)ZAw1KzF{ z@XChs8|5|cW>H`rUI!qTbazOdDeH1Cq~Zp1%sv!gv|-w6?+JQYNg0sYW#>1ZEK4V> zm>@Otg{`&{0eDPqs<71xle427k6r$)66OE<*svM=k0N{+hDg}r$6LX^sPr3PV}&TR zt9hzDsW1(RE~A#%Bthzk1Wb{KDz@1n@zIH!ZBaqb5@;-3h@>6kc6$bUjkDIDur zx!%6erp7on%esaj7*dozk`Y6sG zUQn0`_wd8kYa2mtDZ-!mc6~D#o?;Tl&J>_=1zOYcEl&_0xOUUk+cGN<;e;0GCN8~Y zw8f2z|085Q)I9f4Gwz03Na>>{qu=z)5K-VquFcjfOr_YgjoYRG+Bfl9_*(%uz-ZvH z$_hJUyJy1SvVBuwGO;uk*f0SvAF+;Aui_jjB*iPR{Z?V0ZO11+ZNEz;-nTi@!}-Dv z68QLD*3-TtM9F!XS#iq{@Q%l$4oWvqh~Pa|l+Cf}^N^Z5RV;9}5=qbYA8FGSq|!q> zwjEyCYU>4Q?~!dawq0Rl>e4$*7eY4F|_o8&M>+*@8jI zbi1~84D-zWPq73@Zt=H@b#@E*US`<&J3Y6*xmk{&ca*eLm>4ypXg1H9Vzmm>HiUx{ zT#;Fuuz!}A4|zornX{Kgq<;^c|rv z6(x#ZnQY#suTt|y``WWaF(xDKweKrP5mKd873fqFvcB?%dKtN_Z_U z$+Y5BN-j66LnK=!88rIhuf5b*B*|bu6m-V-ER~s3EvvjO}EfgU)pOG=Pf2WE#R@eN_){SQECkvIIDn5p{i^5=aZCZ4ALCc}$8AsdN zY@~>^n7!RbzU_GdMz)*E?3qwVq*1v|owYb5pxqhmjduBNvTJfIn_@pzn4(}xLf%sX zB;^BKV{R4n;Tw^K=Eh%yX>`~NbLo;4g?@>w_Opc8@j%dVNwQjY)*erRWYsJQCfFzu zywG9D(mRHQNFi>eZBaB37OumF#77}JkVOH(flpKMeQ?Ec&T}O91DKr0>QIK!*jq&P zI`%K-zIm)LZ0_5Fg|u1xr1pv6!`CmXq>wYqt-panz4>TWpi?$N&VyLE}k zM3pd|ef75vkWScULjz!M9B;9?vI|toma} zgN@M5Y(Xx2o;3a=d2FoRAp~=BqLHPrZxMLCD~xlqGE*e)3YH%}Yx6`1GT^d?$oUvS z$z9eF65>0nkpowm4gZ}NoQimu>anMU|qOP(6jTP6EEaF=(&)B zx9h1~gnf`I02S?g$66F*v#Bn*8Bc6`DZ_hRUBK_}EK$6YPw_TZWLAfXCUGEf5wHksqq$no_G4g+?c2VSaOn zW&m;$kCvdz|5Av~isk0me+0ZV*-)~x_ksE0-ZR*)P!KL6|8JGnQ&2yZQJBY8*mEJ- zS!7wl-fWqEV1;-ym4dhO9J_cwDc}h+-H1O1z?;fM`;kDnNR@PujO~_MhNxZxuTi+- z6!tFZi{i?LRJ0s%TdT1DAh_*kGVvohBAT`(@^K(_je5KJB}ieGVn|2>#IO!G@W|_L zv(7nR{38@+B04V93H*POohiEiHj41{UTAlT;Jqr(83=f%i4rE$;$}-1grV`A>5COk zc^{3I1tjhG49!*y&EQMTVJ_XSI{*%g8rNgE@=Ou#LUX6kN<^R<9$Au2>ur)?;G=pH zzAKzU>-WvHrHZ6AkvdIjw4}F4qvmtbeMHnsOfXEfvtRLYnTs7$r(2%@4^N+uwNQIi z5I!T9@o;g$t6p*>gR!NvY-49Rf9sv@CCigoLEG z^Mg|^yqO)w1 zO%LHd{rlJ)g{c-@_Vz0KU5II~Ew+~xCi~t7L~gqTe3|4bB9gX0^$ke=Lm865-rdT2^-i{a1p9jpmDv~F)(ZUoL-Q>EE!ZWs7MEYYGUycTaok5N{GT!@zKY14B z>SQERQRYJ~?MoukBb0krzFj5ct&B;+q+7^BEZ!RM_ab{g{Y9j!5WLN>UkUin_4&m` z_M#vy7<9XJgqW_SD-Shqx5I)yW7&9M!SA#`d(QAcbG0D+CabqakHjnN?vNmQt}C@a z1bAd=`jrUK12_{{MJdzC}igO zlAbLJGm7P7sdX3dAK!3Hg2>%yp-B3jH`ZE7NSmF7<=bZk=#}uM|AAmwkXx`QAamN^ z>4X;K|FtS=VT^7**F=GtcAFA7g9q2K(oVG7MS-0IO}zFzBR~~79L+fgE)b-#MF}q0 zY@-mrLMB3Hem1Wlk#cGV$hYLh&^ z?Xr=AFvBukWYYqO{C3U@0A-3|TPXlvyVs{7j`qBe4^csk)#?S|ljo?&R0122PEqM; zJx}Tt6w$S#sf;=8P2=oe!Y95n1ql=(yC5(}km5*Iw?@(=OZjWlMAr~)nQp5UKCyfz zxGoxFYeeD)*JqG5DoFcShNS9xAV9isJX^hg48-`L39aMapa9kW(hXMkOkHgvVlwyz%Zl*k-la-XmtH@8Vvk#YBp@iawv1JiYs zdRCYs57uE$@)_$8q<3+`z|Z{m$31cWSj>QOf3R1WhIQ3VwtWhdk*;PJZXJT&yKHIm z*lhNA_ap^z0@Zb$C}fd@6@mZ{1V{yCGR!Zv}sa zNYln+mZL~vaw9{?>%rqfnncCBm)gpZ46_kdtFU(^KhO=iT4x= zQ?vYil!tIveXEE4dxP|gj6<>Kw53YnjE_N;xWv{4!Y~XOCHV1Jy9mx18*dqWD6yQs zL!*JCJ9nT*G?W96s9*AOQ|NtA1$$cB2hb}LAjO; zi81YMT?$jqv8D;fnkL#&LE4j!(R#b>Umi!UK8S8k4iI(AI2z+C1=XS)EU(!bA@59e z%_|eEMih#|d|}KE1i<~=I_nhh`7tDxNxz@{Z%>M)wFj@LKzuD6qwbvY^;L6Y_2<9+zRKe0nl@c030&RyJM~xa7x>uqOn( z=kCdt342`Th(yQUaAP;ys<88oWiWOfdqa8&D(b$AJL_X2d4>IWU^w?PApBtjC|ZfD z?7N~+V9}m6Hbr3)k74EFQ+xu_-vu!wxI-+zW^9RAgCNXBk~dA!o$|IPXKAYz9HbV zb7UXu6!M1XM4qq@MFH$oX22gH=|`eVsd83icwzLSC@#ueDvr;uB}zD?!UQ(^j* zpPri+wA)0X?EEx7udM=-&PdtD-^rVqE?nOY>1bBHoq8(i)aY8^2K%BQosZNFFK&5u zeuz%iH_N#Gr6C%QbK+P8zxhOTb$zmWwe1#!1MbY?_MAYtAjI%hbXeSB*M8CKb1Un1 zn;=h_$xVwR!$J0bf}n30PndF#%z!Qson`mH7F(zwJ;Y|$ z{;V)_C9?9-zP5cUQ~z(G(cI*kGFdnKT+n}r!LXzK{7asKCT8I5>5eZ0>5wAM&L z5Z0DQPmBn=VC|~ZmWilepNJd?8|=@bz{lGY8tnZL#u$j%|3Fv$l3#4Fp9^~Z7<`ll zoRx{>(?2slHs4+n4$qnmEsprHHSAGS^uV-&v z&>!9+0`VO)>N}=xv^GKCP*F=HowwOO5K*;bzEzis^>))&p?C+3#P5M@dEX)u)$U$c zyl0Co7osEcu=%(~VX8>uW6b7qt)N#CxlG^^j+3c~-hC`T*V)ylgK3`5S@xg+_0vUq zgknQBM-(dajqO)|Na2d zxx%zR3HVPZy6{C7s?tvXx|g+Qa?HM^FfEr^D|vXDZ`X>VwqRMA)=alSAqk8j1E0xj z;1p4)uXpd>-K^9iGBZE&rT=vV#QTeUlBb1hVGZ!5J5U26OE@_D+oxxokA-Y zVtwzMZxa>t^&2 zga((~qRMdV06{Od80YPFuQ07EOz_Nde@Kgr&=`&y8G_8O!z*j)v|wr}Q$%lBar`mv zHAaU_^t0~Po)UB(3KAS=A{D{&q4}B-X=b^z$qPjFl16fEQDm2h;7yFwMkd$|qA+Ln zO|c9GVYDP4J2SHdycOJz*4Xlp?zVgEu!0QU?HpAfvyX&mJ?~|@R($XRk7QT|47GG2 z2J`U7HMTF{>>K*%vHA}*Uqrkkbv?> z@D6aPaU82z#Ff&n}xg}4l8(T5vDHgIO@dfDB^-vETcaO; z7wF>=eO?#ZouW{t?iN`brKs_+NSa-ZGpr+o||(lvSOmOflG*40F`LV{zr zu$KjCPsz6TG%(eA_JZP{zub{?}ouqvxz}U#$Po8})$1nP?} z=-EbnV@<0_AWVwR=gBgx;TZTO;g1;bgh>_vK$c0n%%Uu%k9^Jb1Xj`?d6J*4P z-fnFnCX)P~XRin{wuc+nSkVs%pWHp@imKc`6%CF?OI>j24?S@)Z;20uFq+j?s<3Co z224qG0In3x*^OZxGiLQ7Xj02PL#rJS3=_Xuwm+mq({Db<#y-~&M^kgSIPDw4od~9FP?$Wi z9Aqu)1t^^z7dsdGTreDV8f3Ganb~OPck`06$ClU>g~=^vB~~p!Zl-e@w_?wRIHuZG zdr^Q6i7t}lWb6$Qyw}q3*cU0M{8cn~e7m;BzVIW$VeU)YrqzBViWlFbXHVp6Syv;XbIor~@>@K8iRPR}vUt1fBT?W+n=TYr8||Ng@ZwC#&AvDZw-coBVdcs@ zZpFf0Mq#`l#>soDDC${WgzYZ^8uj`LEKb?mg7gzRVe5UZ=S)^@iVTZvvI0>^oD<)( zlaJ_Qf+Wsp-)>VvVn%2P#|jRjvqj;zG4L*XO+n|W4_*uGla zh~ALM%f(d76@@Yo1eA%jH38D6cQeEG3Q`{vrhUD=8X`xvZ+*jEsH5*wO?GZth6rJ)Z>FJ+(M!CCNuCR|~9C_V>==HW%L<%U$!$*0hB}4ko@+J1R!c1{4 z0eO|>IeChG_h*p8^pjanY=Vm7Z5e^Yu3g#(40i>5TyqQAqQmvxE$IDicyY4jhC&Y3vWF-+QApKVY1u9lq$|3nA`hZ)_8Q5w2rnWn^szuAhqy?=N$5MDlxY=cnb@Ok1v{fe14m)6{O&-1Y+v# zsk8_;Zm^F-B!^gx_zxf@?8I|R>=z-tdv1mmDIDfdb(6M*RiePp@%fA7ChYx?IJu$5 z{+UWFF*_#9jkl868#7MY=!=BaN9bK;Syd{wuzgL2C50Jhnu>1)K$?9pA6ve;szN_Rc_>J4WRu~=6H??noJX_&Z%Ekm3-Q~`;Uk1nqEm=F; zW(!hZJV%#I6++B6_81ZXmn8fPA~_3@;ns2XCs8z4ni}jIcX;;Pm@HUaCd6Fi{^kk= zy=NjDdyfm#GukI2Y+n%|Ea$aX+Y15iFV)qm4H!gXCd!!h>OzYNz+qPV8s1=b3sKJA z9g#zmQw9T_yU5eWGP_cg@RP|e-m_Ro_!MbK`?|*x>G@VD5`}7P$J$aMXO=ex>_ent z53F%StM+ZMzbOL7+Bas8v%d#`glL|9bx4R=%dp)n;N@`^&9r-jolALtZSfv~c`5>k{@J zV(#%CY+oM=1ztGnAdG|*V37#$5^eVC?O8!DKSzE1VTi^tB!ZvzsURc84tKMib+>2b zc1;>t3j`U^zW3NE!#r7{7+IE^1-$uOg+jPp1gexA4txCEka*7uZkXo;!11ubHVF82 zVI+e5Ovmyi9m`uga7D0PQSe!?r*8D_S=sg*VfqjWJ1)}R6QWAqPTEj0UtoiVL-6U# zCIsu{c!d#3J<-T#N_K-E7r{F#vWZ-5i$fBI0)xN2ES@`pBs9N3YV+R~puKkuwTlBi zORm2!6`*-MZ5*EZwDlD9<`0V_8DbBIcuGCKYKA3J@mEG3uxF&=hhfgAMvyvRFXKqP zE5yAxQx+WUupqsFXgv1RVs=cJnbNgtxqbT{!oIhNEE{2TX0hdn2yYm*lm*>cg5E7W zHe>lP!`6#LFSt=rNlot#d5%u0Ju@!hdxr7a}{U>Cn zpSk0(YessN2WpT~Sr>f_uM^2TeqRGxIy>x!6a|@;xyYIJ6NQ$&J9Cn)2;sx^&Gs9G z8FxPX904x87es#`tAyir+*t~fE5QfvA%(q`NIbE~%0g@*ug)73)|{NgoN2Tzg5ELd zXm3jD^S?+#^9yj0UTD7$1&l-9=*;`bL7VY-&|(7x=p(&mzCMw0D;7b$T3a7!m2R`K zqQE#3eLQf^4e|QX%YWHITO|q%9ooKKw$OK_60if3y7ZnD1q)2b1=w?ew{9PeJ%B($?_j0zQ^f7(%;31pf~5G%fp*>6Rs$mZuoTW~@}m7l`b9 zhWFG5PU3@;SzxXehHuuBQkIv;1C$UxOW_*9aF&*VGRZuCCYm!bFjk0`!Z#m;K!tS1 z{WvWXqCovz8kgBhL9c&cJg2}`343lyl$2Oo$jz-P`?_`^A4pzd(rOHWOpbCbBTBwTalqc-k#`dGJ}E17R@<~ zu0f^kz>GV~D1~1Gpi~|GB3|b6NL&Qw1+S{i0~5 z%nx?B)}VS_C(~vMa0o!zN6BbUhVIno3c&u|cjKDFOCx(4%Hbaeh~$Hu#r^DIVqrBB zp^_!2z9M+%_uvHBD(Fq?r?c-z!gNMaJdG=+zYFneYp;*lkY5lE1ARj2ZK7zNo2iA4 zv#=&sNdj@XFFpc-<2bK}c1HlL5Ib#%0O_z_w=re~Axiq_HckNkaM!(!{q_@r{LG?D z^p$!soGA)C-Np8IR!B_SkImR%e-LzbZo@C6y%eH5rY@5p$l+9U7ADs1L*WyXVjPRj zry)Hj3eFWaGIlZ^ja5W@?-PNkhp{dmo?qL@GO$V{TEfjUAJaF5VE$NH&GDsA+qcqy zVHuR_RHNOHq9oTMA6IR&MB%)ajLgH!`QlpkuUx~4PBS@{Sc}5`?8Iq~rGoFQeBbCc zr`cmk*)eOb9SP+};y-1Q>$y0cybJ<21;uuwfd6)ep=YTT?-r4~&$vvAS&lHBg6zCY zfX)=b&wpI|MfT}v$b3;LHd{KW8wKE?U!OkK7{W3g+^VqF$f^yIRks^^CWRE9?`ME@)quV+Rz5*CJG53hmpO9+Qrf?;j`}xLS_g@f(D_ zrlR(Z7&Q8s2>K05`^WkU`BcSgYYZjbMX3Tf^<~>OVNIlT9ALjH;H4Ggd_2dl$b#T~ zn2|rmt`ekhc;xnGM2O`6P>!$?0q$85&*F}>T9Ec+%Ma+meb z6k+T*-K6zOcr{ER6bmbam=B$$3s0ybuCTQtQu>jpldVSK@Jbl0dqro%W_wmKtfvnQ zw>K1o1rDAG`z(Ohr{e$g>e0l|M!w#1Pj;=KcUiK2tJJ@Gr6|!MYo4Wv5>3$xpP$;Y zrHkm3$VGCI`ppViBcVOy_Dbz>QRu=)OE3d#PlbZA;*a%{&t4>LaS9}&cTjdLjnl%P zL|`$A47aR_L@c7$w=U6rGO$UE+qv0bAFbmHH(F0YjR2aX}c*c&^`zLJC|pGgNj-u4llE z!PAIJ_ZNf#Y*p3TtpOmTxy9}f2tS8NFOi*Pt4$WwdF1vJ!{fqqO61{f7ulwZpcVF{ z(@%*JRoPRb)4@!h;X~}oJP>`5gDc!Jg}q;gAlj3#$s&ZI)(P-w{DOF9776Zi9uV?I zVn7w)a9nbUL>5^>lJkzqHcJ$07?VHVmMP49>zlB>3Wi@!NxmHpsnQbT?41x5okPp) zy%44M*Ru=ArCB-_JEd0%P`#Xj=tIwzWk?{=xPsWY=oWICNYp+ba|4o?bDcl4mEari?>%xzf>zmh`%vY1Jd^rB*tdu1a^ zxY+6~3~wD1=Gn&zQw^;eoj=Y_EjgKVcO4(xF9~~e_ioqP8G`ifZ5Rqy5Qbi>dE;rj zC4g*P<=G$s*0|_77|}%Dg%{Zf5$PQsvG-tOTZm%iACgT|hM+Szyckn@ACa&!lkZ10?(g3(_)Hp9S`)0DSA3F4L9@`7p`Q{r*W#A$|nkM9w0iB(@fM?S55| zcD%iGn{_EnscGmr*uMnyb026wevBZMGYyN;W&eqwKSANv3+X%r)+xB3$W+6lDt(Y8 zL|L(7W^!35fDy!Pg*_i2jw$4T;D_T#>eCd(c$B1W5rL|2e!>QZxX7eN6P6OPQi*vv z;-aHk1R5Y)3ELH>Qe*~J<1)k}k{e4cGlWaH_<2lW?;xxg>F%d8#6EHLCAbJ77G`*W zXdp{1KjWp!IJi9@NsOJ}R-~1Z^Iw`u%tG71;?3li>W9lrs{FlTs zo|&VOj^@ef5n=s2^a0T;`QJsL8Q4#>8=fExhwn6$AFpVY-L^iW_;lbcEoM&$Q!_(2 z(G~=N=x5CK3;0ACiD=(ZK@BgWN@M=d!=!ZC4cb4%s({^ZE{3sn`?Md$*A3VPkOu(b9lQ#(7Wu?UN%<9r+N@EwFM&iCo-0S zz9&K>tjlXfrOG=s`*#RX<$Z}*K0Df@f;8&7>$=&Q6Fe%CB&o8y1%00~E+2b5@tjfC zCK}Ue$o+PEDM6oV8Sw=6RHutTHQl>Oc3Qw&_SVA5wk-wmCv2|;h(L@S>>U*Jmh?k% z8&9S7v0U74vc(|-i{b)I87&DJm~H!7 zU#hdUDV)7~bCEqK9PJV|MMrS#VC&><*OWtNb(2Hj=4nSaRd!8%#_kf4rt2l0Q`ka# zN(8v$S129oI>B&Oj?Y6BU(9g-RZ%{y!w1_<6TPhHzGs&` zA{dSwI2PobooAV%`7D#+SuSbDibCH#Yx0_-;R=t znLff4C-3d@Vz9xci9&m#&TF+90n&N*CPv$?kUux}8250w_PY?>)jFXIxz1yPJ~1AU zx{URme6qfMYdNu&2*ckXo_R}bd@442NSP(L`#j`HHr8R0 z)2^N7Ri^HZO9F*D`lRzfT4ax>C}`PW4ab&>Lf5BpD_9-ExttLmnGT)t%j11=jAaQi z^^h2g+nf-!XDR|D_In}Pm8@AP%fpw?@bZT9&A(b<%D}_}FNW6$@#}*dw0V~^^>ts; zW+kFiz_ID`j?LnwUC`$}E24J)b3~yt=(pvpn(OS0nNFaj0g>*|L=nQFPyS@M1k<69 zo6OaUVZKO8FA^A;ZLf$Fez=YLVeu&PwrKRiD2`ztD+~jXbE%E8XViNZ6k6ZDF(cn@ z6$p!#Y(qR0Fdy}7-YD#GVd>Hxc@t~iB|2TM!axwEGUX<$@HItf{lw5bmm& zVf708)RF{$nstVJNV`lz#Q2i{k;AXTtIv6}NlpjC-j0{%Mgw-WNc2Ef{#ad**nohk z7!?AH>k3Dk#ieW$qb=h~%Mg)1?1S-H`@H}YqlkOXxVQxhk@~N_Dezkl%*BskEPKzKctW!$E`bTfgola$Z&gJ zq!B2E4aU3fM7r8$Nd;~Zq!yewW!vus7}Dd*8jme&eP_!`d;A%X&X*j(Bmo~dx%JDl z7et^J6T6pj8Pp}@JQl>p+j}7^vavC{elBs|KUh4@v1P)fjg=H*x6;-NQcwm;a0%NY z6k11Sndf>jB+x|6wJYb5fZpvWUu6SBxNG}#yIW!BAe;C9@q)B1@(wW}00e#VtVO`t z(UqUQBOF$6ZGyC6Xt49@*0`sHP#>uZ&FmWRso8VQYzcSXG0ubQPD_UNE}sW(x599_MaTw zNV}njC{&V*$<3JEF68ac#BtgiDL9z-gQ1H&R-T^Nmk$$+ek5o^`87_ZJtCr3y7~OG ziW|LV5#Td&%k)~vLX!?JFMWH$3id1Zo`_Vcds`OM>lXvnXD6s`UJp3!crR{K!f=1^O-8LdxiaH&J$cTF)msGi8jh}Ld-@AolH;^M|<-= zkzgb}n;YLL0boaBqb(Hh?n2CRv@I3(4t;%RB-3=kO3%AzH6!-}fpCUF$%=RF1Q(4z z3`zD++ds$d7Vy5x7*}Lt1nH~8OLy91A>HnU>_0XNc+K2H7u%~T+y|9C`-iY5Ni2gc z&bgH!=L@ky31R+Hl(4gs!Ps)d^Xf(OGSgzC^X&_(z_go=oMJZ$coD4B$IDA?gedeB zulrlkPu8aNA(7xdJI>OL(w`tq@Mz7eNQMxCNmLr0?56!0!LnKQE_B}Xq zekOvC^lyWXlA6LJY%9qh#}WSE&-!=Ri7lwsnF&+tWF# z;-O*<1g{Y99_cnHg^{QpV^0c`pLdmJE_Qw=NdD-RsD_`<%OO5Zu1O2+pdcJ^pf0g9 zt35Bj)&Bj5*#JS>f-bKOR@kQu8XYoaP-DYI^FBhSMk+P-v`BDQ9JvOH+j3!wmx#5! zC_s5#OSgNNi2Ks=AG3}Saj0dpePgYcpaoIq`n!e0T-~BwE?>S8qJw%Ha4QW+}4HTG^Q^S)IS*J_W{5bXoO{E*2#y(s@uRIkL{r`vi_ zd@RO{lVOf$Mewn>Gln~{udF+X*~wKohO5Yy%|tu%zoPM)Yi9|=7U~6g78BqHaz7sH z6x7E`8guLdWVg*e5RuM1)Vj#NTuazlJHE2w-DjracEu@ruV`sJzNpGB6@~O~S5LN~ zAvSuw;oDnl`J(tzjrIW2PV5ToXvoqvyUcE0PXd}&n830PYGe-z^J~uG2{Y4P5%Qjp z1be>yO9X1$y*b;yy5VFb3ieFt?~6cWvcAc>DePS~n1@K~DGVc#(9I6ELHeSP6wxQv z$e@EfBb5(zr4pNy!i?c|+bc|89$0^%#azcY1+Wg`qFx! zcMCW3i)@E5xnsMx=G#sIt>c^R52m$WUmLliGI4|!xP|Htg_ zqIqkw^D|5EiUOt@>Ya=g2ef2pa$ga>C@e?o7B`Wiyu_LPCCsUb;&YzMN6s#5Meve~ zWwH2IAxpBRIa$*xZ;{qW3VH+^1YGP}t{|QTquY#D7MZPVjJGn&%DBqgZl`S}n#ScN z;-js%00YSFB*QsCh^j8R`jQ@(+;oX86!wM=kj$4%UM&*M`I9ZhxK)cldE7otwH*Sq zFuLoB{7(P0>16uC*m%~^5h9TO<#kNegn&0Lmd2rRp|JN5Yb3@}Ba_-Qi>UZNhwQniUFookMeMW=NKc z4U~$I3D;rhD%k6y=$9)~vQy}b>slbt{`B-j`xZp51$`(I z<1=``ULZm!5(PoqE(}}jUkmK~?VdWCva+Y}eG&ML-80gP6(l34tx++afes7%Z;Y?K zU93P;oh*Gmgb7^p$fUiKZ;=gZOblEakRQjeT{?gk*`0(mYwqIfAcogsO zNXxZC2#uDxf-*Zt6sp!`9dGT=3VM^$V&i#k4bcq35JaZF)kaM$#MDUuD)svIeQ!HB)Z)8VlndJwVtjE4mgRK>mxk!mcrxYvOY+Xpl za;pc5=ToH=B0%;#K{~bHAnQ;Nc5;w|`B-89)UcI|E+X@GLh_akl?$Pt3Q=h9p|;6~ z7uDID0n(Fc?K|wOUlB`(1p(riT`dC1kF9IOM1b8JAjy05y4JD<;r7HTRAVzk0=|_MvtCQ&paY-5f+}`X9ma{0o90`ZD9Ttm&z=g2 zU&Ht-N-nDfy$7+Rjp68DgY{uW$~oF!Z9&rcOcA|X$y6$hV^MtEi+F^~67*8WOCfBw zFujgG5l$p7qd z9`ws#7j?fN%)UPPV5v>;5j0hZj6cUiE%Y30zaGw zFR>>>98+xo*T#kRp$K%l+#TfD&A$QB2T7^s)Hw8NhRUsAUq$;*ZB(>tM^!~6Sj?iJGKJet{+ z&qJXTI?5MIVBi`-?}R~=W!;25!`MXhCVqn`^ud@|JJ)>zbawxHY;Y>&zRKu+=CCOA zc2S8OdL0v@A|lK7t+XqC=kWNlt;e~)?n^0ZG74xol+!i`o$rRebQ{U$-4!KlRZCsUciU6FEUCQ z31nnm5{+`ZrmnZ1zbEW0@Pt`m3E?ncno?72wI~#~yOl?~odSGZbPL7DMYjc*UuSdn zydr2e_mA8{+E4xf^kPR!3-5Me-@Hb4Ag1sHY7dI25t32K7Mmx?km#sG`OQ`%0v$c7 zy(x#+#}JK5{G3Sq<-L%dX*e8dsLurH>D?IY*y|-ldSy$o;P_J!sOiO0Za*g4^Z>~@ zj43`dK>PqnPI0!NcLCODbNS(}5ye~c;)EAxTD7n`uxggQpfJA=T>UjguCN>Jloug+ zpWiDvvKR*6&k@ZFP1bI}%ewtc6fYD*uM4bPm_iFt^{!HwK1b7Mv;AFwZoq&stNzq| zp6pQTbX%Y>O&;9Wb}Fc43Ed{$Wmvli^te7qa*Bs*yjk|Kh?F}f&Zg_k{Qz~p9Nacp z?*OA$vU2;epfk)(wmvD7MA6TySUas}guIby?Dp~4oNenwQ=Ls4Q%Y067LjV;s{vt| z_XX8=IQrWCl7|mt0Q*gaHQ4zmeB0*%p|;OY`K*_TgKf;w;d4)b@*?+PyR3&`=-8(1 z^4EQvC~(0(M^4dox4y%2Q_1A245{P%P!5ZD9lbnG5WaTLDzV2y;*N6sP(=3uOGNR( z;%;h#o%+X3i&g*#u}+G{ARRQ=GuJ%uoDSq%J`Kmo!u{@FF(;*k5?Ga zvTxbGMV2K%6T7Mw9G+Qbxgjx!1-$+695Gh}Z*xID^0RqXD+;w2aZk5jVdoWH`Kzoe zm71elP7d!vqA+g;4zaHvAn1&waR;1j=ZZktQa@NTBUx2z7mLEkjE^PQ5Z;{1kXa&+ zjBycyvWgy9r3;1yp8cpAhobnI^)EzWBhQ`|g@$Pt#G&U;0g|9Nzr;>^g;=WYEUoM; zD@UHg{#O*uor$llX5Vy&AbjteZgmR!$C57-NBRt_7YROxMZ$A03NZ%gc#pFq0+d5V zF-%^47*ZA?2_CbrzDgve9&EL5E2t&N4Y>2Y5yAVCNBSDOOPD$+Xm69v6r$IU*S6V$ z6XD86`$S=eoK*O%D$zS%k$v~IlSRo{s$G%7oYE53KZWIW!A1ledF$im^g&@-3~#cw zFEdTKBGIglsq3ZmF) zVehRw`P|a z3FqdX2R$Rcbqnps0U&)l%6=l?3k;7D9O2VStXwo7b-keKa;8copTM_8<|Te50xiK2 zk0n!)c<<#B{Ju9lGJkZ!9#EK^k&TtP$Pi=%0w17y`gn#-38_1#SKC5`{Wr(-)@{{u zqJ%Y}oOAfg!t?=`JuPpLrZNQGl2$s+Tt^USi7y;fCuhTo=ik*IH4i0m&P-4m*XMx8b_3c{ZeveE8wO zuf?7d1s>j=H`BiHCok=A#fGjnq^Q>0j{`++suyt>v zJtIg5OFGitOcj88+_-)DFCJNv&ynVT1jr~)5NH#17o@HDz_TSGT`Y^E>pui&6qiKN z(e~P;VJi*c&;pz11F6w945v&mKu7Wfjb^JYk<79m`Qw$6iY+MRp+*STR{}@I-Q6HTO@mzXgb8G!&-n)VIjt?Z`xpl%Mba zh~i_HnSWedo()OI$=hi$uO_PLHL$SK;+ zkb*;-ZmRzxsHLR6GRKY!Q1@M7xX5JXy-@@&CYHl-eTT3Ha>aIj zG4_(67?#5F2LbS+TyE!g5#xOsZNSRx+oF(F*7JCb`jMay4_>}><2uD@6c^nhU9SYp zg0Ybivb%(c!Ub@?JuJX5XYmPJr?6KPN%`)wA0LL|gBBl!_@E6GfxNG+ePf<27a(t} zWR&evSPMs7gZv#ixVSC}H> zSbfIc>*#9rP35nvjhi5d1*0LUOc zV_$fi81K1g&%rgZeN8m|NE&Jpvb1Xi=@ePaD2&V$-5^T1^V1!`W_~e^_OhrKo_=0* z?$Cu9w|P4Gu*`Es{SJ|EfIjmAyD$JY*bUZKK(%2WVA}+n6kyk+nzzeq?6r^-p&XpE z9wR9`j7PEDp&*U#nzhn?p)e&#=87e-Fr+%T#fla5P9G&Z7mI{Xd<%Ir(!F4#XfzMU z3hf)y^(kx;#k-H^HnKe{0+kINc9-oA$(j4f#;wRC*>6OldMG`v>t)aXSAyy#@__V< z<0li0W(^uENLLi^sm!;90yJsZefCmFwy!a|8SfMfyvK0WeA~NVe(~H)=%9gzQF~M* zGGErio>b6h?$G=k_NTT<6xc$k2=PDLEa;6I#A6!<8%0nL)@`>#3j1op3o>f& zT}LNOHJ$w)DJcI|1owII;3@9wqBwtja=31~L|ElbUv588m?jn`P`~IcK;g`X@@PxY zX19o>E}O83e{FZjs%O)86lN)jw3}uG!^)@r6(Y<>^1*TlakdDsgK3sX>UN3^7sZ#L z6N+0a?O_q&F^!#E9&&%NfElX6ah9k&VR%k9EV8#1hG$*H*!SKCcz2Fu=ZRAPGotVd zV{5@SZ-tQerr(mh8KPwx7Pn;|5Ka4J4~vibC_(SuJMyz|xtC|-MDk^R?{-WIwIFlb zvUj^J3_0NtZxeRG-$+KQlFjRr&G;q5X2BG@Pee7oxi->@V}^-BJ3337EK^}0mn(bR zc$F0j!z#a0{UCX87J-Id+&N*EO&6d7T|O|YwrOTrqlgrDbk{b!^h3hTu~!>ypn{Nf zSzfO&-2tEbC)$fb6qzae^5=d8pz)pM3$fFB5^{2hjUR8{RIK+Lo*Z*Z>}Mi)=fv}5 z${b-?k*M>ghit8a{)^2|$EUnLEboa%ZF?*B&asRB4hZAEd{M{pwvOeC?LHAHVBp>M zpn`D2y~5T5>upMa@cq`}H8v-}$f&?f`&1C-bR`*?DfrqyAgHUG>Nyg#owADpg#wDk z<)YSSKRJ=6VQaEsXR@K%?iYnRqs!A>_Mo6Q8UF`-_*SOMaL2XQ6>=QiwMbX<=X^|# zu)uESHhTyEwOT?{AK8614CHv>=tqDmG+NJ7Y=I~~Dn+@rD#XL;YmK^*w!!}-9>$>D z#k?FIV6ti}XH-iFdig^o+BhqP_aUBYUBZ;kGKn(E2O%?MWtF7ad7lu;pyak!V#PTC z7@uzA1(-H(Ev~f3L%0{l=Nc7uX6{8hd{=-+)|N{DASvjbHaL-2@|YbFf!cHOaY1BX z`xl5=!w(0kAZ$#P*$+i>_V49k(q2ko%*n;f4%Gj=vnPKkacSoonqVDint@RT>&O* z%aXN~_L?9~8eWoy)-Uo=H+%*Gj;_U@+f54lJU%JZYS|(>3w)ht*&8X0xeByTgz5Br z$vb!d+yR%9#kO6*J2#7UWEc%prUJt%^&j;tuWE<44myRt_w`zRzx)>b3TpR8@N&qeX(B{$V#4H)kecE%U_ zN9-4mMwH=l0Z+lE5``H)Gxo5E@b%CoH$8NT?*V&PL0@)w>$=-Eg}rGx?NwZ@?GnLRIyjY; zDue47B zg`w6R1qaQuPA83jd)S%G)bhG_mlcajZNvH9Ax`|UAU&Tb=9b`$uX!9R3l<&hGC}>q zHf=rW04Q<;G}UelDXDD?7nQdOQcAJ>N2Lq+6y?PQ$9ycHkBY_wzjKg{QIMwOb8_Uq z(Pjwy4~L8H;{05j9a2#ED4nD)JHvB;v$5M_*9g-Da9^(DdJ1|0y2|4Rep@O-j~lP= zZi9rw`cWtkRje>JTtr6m@brb2sW2rXJS8QvM+0QV(3@$spjUu{v-Jmd*r%c}%E9f- zG`sUmF946Rd+={KUC?tRYbuiKCOE|hFxmnfRto>{py(r$mysZ|eH03!ZAIuMf8i{W z1qG~4^OJSclbcqD4_jTS^;WDGDs$*IK$v0pQ4bUkmjuAIPKK=!aE4HZ%w<8Y4bk1Z z_q^UV3eX-tA-t?}N@x?oOGY~vVGZGE`M#Rv`>lXnv8dPCo|X$s86mZ4!i@D@18t^) z^lP$qp)@&`2*Tf0kN={Z)d*0ezp6HcD0Boy+lA*41v^~k$;uDbZUX^J&M2Dk!R;Xe zgV!-(j@_g%rLr96*&PAwTfT3Z%?;Tkjzu5i$27#=p(nWEUnd{T@#T~OvOGb$D2eIM zs-1F`Fjf>QIa0a7o>163gxj_fd*eiEhSz1c{w#{bOjfeS{r5}YzZIc|=HjT=zIrZ* zTHh{j(+2SjLFy<@q}k6x6!)dO_O_mcUDI0*V;3%pbq;dSj<)$q9p+e5Q*v8{EerW0 zUAGywDimGF%HOE)3HI3$esVJDCY^jQ`-U@xM6z^DuLv+Hml0+56G1ZPXNS0UJA9|<}Wkx<+OJimBN{B#DPiE@D;TpVcQwD7P%P=4dG zIx@S9{tQR&-HHi~sXe$8D^(H2*DoC&WH86xQY!u6`5LmwwAuH}OXm?M({2@@j1zXM zs;pELIs{%`s<)*=VH3hrg-*}5Q8WfVf!y~&g(;oXIqhpG@C)Dc$c+3<8?SKaG07*^ z2zz1UORzJl&C(vxXlpJGHSK@T2Y8G6C-5Xc&ay@E^06$OKidS0>`>=^+TfjfrTdqKQPFMf*%XJIhv5%imh zLZN$?psl~y9ueg8|KcPr%(rnk-)f6P3RR1QUzH%^bZ;V~ILo#Qc~_%QUumtvVcZ&6 z>t$5pH==o^xr`Oxwm*sj>zR4^HuHZ8!$8-J`A6pMuqHt-kcSSOtq9YLI3$iv8g?L+ zAYGDt#{+J!MOJ}FjQcM1^0G0uO8$f(g38rySG&~O$6G^4H;clDdgkOqjxW>4Uq4y465%CWEkI=Z*M3_b;+iscK&w= z(+S7s&pfuF`S`-8t(PdocI}*G(-ii(GPE$kF?qTOUdIEdNzV!pI~>~o^ti~Avq3@H zfb-uiBEX=0NinyDPJX-@W}R6(L}X+cM&z5bbeV9`cOm(}NPCIb)LTTMx43MG+3FC@ zG~qE&(j--)z*W2u3C2vj@_RtI#+pweQ{$Q@0`bZE70Jz5MiHbm+@x){)dD`SqxAUl zUJ4iFr;oR@FDBlZNmkV+tE&H-G8pZVk}-CX($KJIc4A|U`MJez7m-RazFnBew^af1 zB9D4}8OmE$b41^H8XF^@K1(@L;c>TZ_UJ!v*1^*xlGGNZa_GZpZ%UE_*|S zaCmB&7M-6elg%w$P1*gI66KTLA0+tJ6p6-&@FW0g6be5RKDv&5yk%N3W`~r3mL;2) zW9KQl>{^WD7KpGE?vYQq4Co^$g#uY)IzuGNjpep)L`cNagni_ZA57Mj>A*il#L%Lf(#-`)W#mPN-+-MyN zd+o!~a`;e?ZbLa{1IqzUBEIkO+|KJ)bD1D5EhK8YMF)=6B+niejoRF@!;!W*mI#@;W-sV0 zn{HzT!%vQvkwV^{>P2D}?OP$KbgjwkWy|@y#o81V#%|lh$XxI<_KJwkf1m#Ly(>XJ zjnGtz+2z6^cBE|+qCXE$+iA}!Oveo$C&eoJhoE!9NhLD(dgE0mNrU4!;9%_U6^R1& z%*nPag}vu{cDv3>1!*hFxzXuqsm&3Eu_!8Tuj1h)(^iWRmi|cWi&OBYqIsJ-ra#>= zZKXulZ~uXp#VIY>(5$msOcW|RymcG$Di#-nkGB@I+Bk)2Vh&PgW9)GuMvSvrsg()9 zQzT}BGW-&&J3-ktA=%WHY+7rFPf#R^^8dIx55TOebN}1ntk@oE$H96Edz7Y~Y(B3Yu){g(@ z`+H9EdjE=ezR%guIluFs@eI-|w7&)lb0AauOZ@VPLZO-Zy}0Ik0P5CN>UR`&cE_YQ zc9Sq&W&^hlYo$BwP7!DUWu?}w!cd_-B8qx4t?cyP^;RfI(TL5Bw3#6;RWUhLG2W`u znK9Y2fSdi7Me$W(a7;Q0$3^gAixpci@y&6J|Y%^p+XZPHTyv6Vcj4|IlTtik3u@S)VHe@3poe4p2pBa8guX) zZ)=4qN2xpYvkiipfmrT(OJQ1oMBIKZ_^a$a5okF=FbnKI?*Mqgxh2?oYa{FhW%3An zcZk-r)<_Pl_X(4ipSuJ06M^ud=z(~y?xwqnNOdUpuZ}+8ViA07ipC_Q2mF~fP_Dc5 zn>3d5hP;_XQbxM?$9R3MeM|9r#5({<>bjBN=;)j|(|)8#pWQt3>iaLYuAGa;pvkUu+8nZXz*IVwVQM<|S!g zYKf6t?=S(n*S2E^`*{eDKyT(33VTnut}V*7#UWQtlDD^20y0z~H^N*mK9{Q96Ft@F z!qI*i$|*)h`h}1UX}E3nj(~R`MdwWWFhufFnPvYJP>+`KojhyTw*%6CPVm$1b^&I? z|2gLO64Cn{8JwuFh(ZyO3)Hz*q6DxIi5~7!*QAp(?0U1^EQ$}H^e)jQwrY`lyoT^? z?hh$+YZhIE*Vy|aQ799vOBee_dl2lyytaK`VRfK_xw+UL5%kWYIaF>7L(&dy@Y^1e zp5D9gbo~tbb%1iD#j)}I0I8xguNZ$6q()>bv3T)N2d{<5LEMB)6NF9Ci}`$kaHPN( z0tYIlzcq{Me3JKJ=JfmNGCJ|ieCwc%z3c4K`w4s1*geNu z$>kyt$=S@hE9@=p8;^W>JS>V&0xmjiYJic-&J>#`=!NCt9X4k3Lp+lNNqbwE&YeT< zcS3S{gr_ZbiKjw5gj#-wKlkI->Qgmar8;z_C{#b91m)i%>l7eUaujzpvjk!Esj_+B zKUFr@7KZ4^B-XQ56X4N(n2Z^3^&vHxWkSM!B@EAzdY@>&76?7(vL6Zes~_;fNglb( zdPtb2Z~)PIoE;$1k}@A72p3~xM%oAIoJUt6`SZn}5JyE=U|DJF0$@{x*F6Dpom)1? z_9`6inObgdWVYs@=)QhSl7UrCrU#x6ITK8t28DgT4VL@qMqwYb_bSWnZ|P{gblLTt zT9adGzt}3%m|Kr|_O`GODjObt!hKHKdb` zQkY&B>TYIv0NJuBvN?cwG0$pOb|D#!J-cb)=>u!*n}Sp>QJ+ZV=i4F#wqS#g?O8pK z0{g9l=oFtJo5KSHeC6j?c`D*Nn{AkA6f7}LWNIf1dc*r+q=yxyS`_vVEOs)DGKce6 z845TMDYiiG?H6?g@QDxTY?mtNJnf2(=}m&pBYBw^ZI7f0eb9?5v=Jh(gfk%AKNN)Q zGR3r`*5(Pqvvzm2w}l~`$*+2K3%z3-JFKvGJTm>o!d9m};IBpZ(ZMJw`|~+HmJ8mP%Pii*1*HS5Z{T zt^YA$O6n8OPmWI7wLKl4-F~2H9a295GS1`iuQ*ZATRbjSaHuNJ-Vg=e;^@J?)Qd2K zFrp-n3!-lbQ832GOW8D;pj*g^ULhtNo5n6gGJMtcO~p|NJ3wn6vNe{iXe_I?4uZ7( z&3SLmv91Dv-<8X>#f~V-$q(DgkQ667W%jCocau{_wf#+)*06xcui&r;TT5cAi&auv zFN!!iW8H;kXWG^PWkgo9SFoix7MWkB^Gm0m3V$UEKccc9!#V6Aw}t;LlIC5K0B_9TZoO?-fbOWoei|T7d9l7`YXr6N*4rs(FhsncvCKY!CTlXwZYGdr6NK62jJgmfS>IZV3aWS8;AE)u?LTps_6{flL$HkCs zaG*4nO+t5}v8>j{h~i&<0T+6ORw@F`EWthd^pKqDE=;U_Z|6jNNtD|zy#WfUJCQ=r z_Q--ARwS`zw>47fS;x(arP6t|0b>CFDGHrNOL&r9)!zZPlsR^dfOn)9x7c=18cU$s zdJ8k0(bi$F4G)mpGwFn_7IZ#E*NF{Pwo@dyIH;E$Q_#!iellTg27u|`*|jsQhr<3_ z#47>IYNW34WJr;P%uR*D6f|U@?NLzN+^`{%)Y@%FMe$kInb(@H3wZwrF2kZNCgjJE3@f<)GH2Lv)B4Aq>m1ATr@a-IEeD1k}!@WT(=O+$&J@aFZ~th2&=n|$uG{Q8C8zI+kGhr#AmF91)9M0u!A z71~?@T6Csnb@Q`xZHpiu&BzjYk+Hk9ul2a(nXu5Ftrbt5BtRe@Qw?rJXx=JOPJQAHgDELcvDC_GL9O=IROf3 z*S?MYTVVzvhy8gnp*!)1AkgXU<-?wfQuMmYopC!+d^Qy2BKH2H zG#)vIU$~zM(|yE>k~U5tEGSs>;$c?=7#Q87@Y1lyMYim8I$@7QA!omf9w8y^!0nAx zPcIJe%$x&yJ-#f!#u*jT?;0fN!-%^h&Rw=#6k5UE$`V^8Ku76BYIlv6b;Q#5wG=UR zd*ihI_DXG5lOaC>Lv!ry0OE&M;V9`*Z&+&cfz;+&>n=z|G9_jY27rb*w*LgY z?>WhQruirlyp_?1RczxzdW1-e?4STWrj^6+v~et*UA}lr>}x;u_(-yKhy64_lK0@k z>v=&9ukM3!lyty;{Ik~T#*E9wiLP}Kg}Kiy2K%92LM#F&cG)BanKw<#x7i$p=}@wW zHXRBPsjYQ(%VR`%^P_h)WG+Ra#q5bLRM%KxfV8M9=b5rJe1FbX`-dQPq;^lYe=4kx zFrhut)jtP0qq5n&B&?4JBt28V($^b%5Yd`;M!0<;dBX)MBAqP#7F ze*n29*gk6#rYi1eH=!r~uK=6o%sx{y%Wi+7HFZ`zUSQpXDUS1!Y??2y$3-AJQs)J% zm@5MG`vTi2NW&wmr7sJFPcP0VeNI?>+BMNWP?$N+wH*>b924p=PkT!YqVEs}_q2P3 zJbRuF$M*|6-_eYN?>$7IygRU|*IQwl#b7qBTxE|4(hin@ot6mcnmB;XQrHW{hZNS- zg`JydgyUJ){v0T$%4eP`AFmgrzlq{y%e!l@44}7TFhB6T)`>tnisNJLsKQ?UXjvfa znCWq&3v;Y6gqy0D+nNw==5=Me!jzw#mqc~fb_PgSE-Yo&BIv6@r@Z)B?pRotznkS* zQWXbN73=K#f}X}Na&$-6Go+!MVwSRj0Y&$nJvqO8G^Kni_jt~4{;cQdw&5o&IP@hefrzQ zIUpa0ws+rqmz4_BKIZ&V)PKtbqc1uiYxeV6A{TH|vcl#Ecybr>vdT6JdQq4%Ew%<> zIM*rL4k<``xZ=mnXaYbd-=DX_zM`)~EZu#KQkO@6&NS0zR8-3BfIA}9B+ zhAd2rPV6_{6Qm35ZMex6pbO{LPPQ9zTg%Et5+MgKLl8nUrF2y*l?{75P30S|;K5(Y zW?7=r|H$uCp5+9nD@U5SSddoXA}IR#;x(aCBnAX2Y7P=rA%V0pFaww1dE>261V$OF zSSrp1m|cQZ>X>~ZNG~4XANz-Z4;VKGQ-_snqtl z_PC%%6l>(RF95DO4q8J12n)vS>XEIvdU1EuQ_#m(X7SkkJ|c>@;jPIWuO3aOKo(6V zWgZtrj8BZnvXuff>*xl3QCuGXArKg)$Q!Yp62S}PH4gs@`CvNr(>ou-y%Ayhkg8lG ze>}4UJ!vGbCdX+rc^}TXW;iG0z?lrts#Py+{XH zpLF!_7~a3;35#JHVA#9q$lN5mz(EB>GO9TU#^u;hA?H#`_wX~UOcXki&)H^T2#>^% z#xjM&>p?wJZ6Exv63u&6n$68seH!1^zTNHiSK;WX4!IRBX#A-;dJn#K6j8J>D{mZD zr2>c)qwW`=NvS$q&@JSJ%pMIC7>_Ax$@2{tg-&o)!AzSby zw=ER(l{k_D&DBuXgfxh#V)?=TnobkhlE5&PS86v4!kL12mfffD^_N~1 zZG7Py8(+O*=&E1g3d>Leue4=e$MJsz(La3_4BiNf?P-zd$#9gU6ILe#S6Fw^H4*U7 zfFr+s5+K&0FwbE>ENYD{Dk!v^H0*=oOhS;!%cYWrYL|@_!3Ua~u%*0El!_88AnV;1 zpaN))r&EuKXCMw>(?wy09Tv08iV4y#UA3Ztba{Y$0vX6k-Y7`Ihq~J%3epI+ z-#g^_&=!l}12u>rOgN#C{wDb1AUX2_VN;`EZ?FS za?;2lk?qCXMeuPMg0M^nK^odO!4+ju5|fub(+T?Y>0(pTuqSeY%LO%e^Cf2Z8zEXf zB&WpQP*^j6-7>$@v`fZ#TDIH8rS>NwDo$-)mD*fwKN<^!(|7*Bx+@uSB`s!40+3f@J1D^XjP5D=59O|{pBy&-6A zX~nL6iUiDt=4msUSJa=GHuZGvjI;aqTX)g5PdpfXx_VS%$7oN8Oh2<4 z_s+7B0?f|Q4f}Z!elnnfJcMQJX(1+D@2)meL3+oXQ!P7=<$}y66dfbsrSZ~uToKYl z#)P)w`H3_veq^X-pDLyDcSQ1@qeD_`r-f;Wey?#26@V1!FM1k4eQaGPqg8QhfNXe{ zuv{$>Bp)(92kn<3?)VPO=9b$#f^buIkWcPr@A!v6;P|wZrLe+23>lb^v4UK?`4?nh zXi}9+tdqjNzbZ(|oprP#3>4MpLf2lMEH3E1$k1u-m@v#hx>Ft1W8NHlQ$(84DT@oq zOnyNwghKd{p2y3?XUd58pV#Qgd>>auKNpR9d|z+N1OvD9)A3hfeQTRg3z?OwsIyDU zq4?cWB#9l_tcC}di%yqHh7Y%n3j1%6j0m&wv92P82~{Dhv^GQ(7)tYZTS)+shlCmd z+JvCP4qoCv6!f{;HSz=TQHaML)l?(|ZW`~&o3?E@jrLtfK^oEnw+O!t;gN@siBgzG zy*ioSiSi@cxQox4;7LnRaC=f=XZUa~JYN^2b?gL+?0h;-rsF>j*+#GrxOXDyym2fL zT%Y_{1Tu2sSZkRAw54g{L7S~GGrQZPR;M6!$rWCCr9B_A99nvKpS>X9BOvp=c0`yW z3ej6StFR9s-y|-(*;RZXnll!k-3c@gE}7Jts^x~*+K5Jti2I-dVGjvXXkJ6LJ)yAo zU~nRDbj$`!_OwW|kEep^f?D?@e+`jkm-#$9*xRD3Z__r~-xT(pNU9FGqcz%6T{*=w zGUr(fq~jf#9=Cp?dxa>+GfK~h(7Nrbcvc4I(g#ZW2{0!iqWHE8js_iX7?=l(@{KP?}8yFl$zsqQUnICE&iC#h46*y)%I_NnZ4LrirZ(V1H_stloz?e7ECW6 zTHkGKNJ1NREju?r>>gnG&6W$&1Kxo+Ku3r=Wy-loMkq2x^CF7%)G#ZJb+2U0(pW#3_IKfMj&;MjdfLBqrdNYZVa&<| zXkqhn%WRXvbY7xb=)Y_YQTc3H-d7lI@?I^w1^ey8kQOc!BTe|#Ss`BLV6`ycE`qOz zqhf`JSD{8YMkMdBTf(w3QN;QLBAN39e6+L2@mw$H^9N)4NM(I?HUy^W(5`lwf;75u z>H_<^!p@VvvO(51Bzv&LW~HO}>M#Hja9$GQWw7r#VQOry+JAQc2HPv>Gm^36XsdgM z*Fu^zdncaRJH<{2daFhtWSei7%<(eto`M}*yH1dnox+~VXoZ;yd9iUkmK_Q)sme)q zL}A)2WxTQdhuhV29m7nHQp0V&u-AfPFs9{l5j0AR=U9cpFbzML3H!Bx=Ic%txykmr zpm%{o9}a}n&3}mKrJ=CPPUq@*5GXAV4=3#u_Q*^uXvFLu5qubA+`n^(=NvQs*bHo@ z*QDd+IQ@A+T2msm&|eDp2;lcejs#Gg#M*xZeB5x(7Pr?GsWGizYfYh? zmecJN`|&fxlDZ%fA9-kgp_K_!Z6p-d0wRaGHHz^z6lLZlaC-N=DD)`O#9d&A1ii;u z?8dp&4DoQkW80hU?R5OG$l-XC2(;`%?HW5D(w=C6FWcSwTTVbRG!SyEKq$;U-9yLj-!GqdA`}%c3n6$=P=o60yq#X+WlaTWbV-?-$u% z-Yd^K?CyUrI^}As%g^7l_MQlIE3z`erA)Pb6q5AIezCG~FS_^t5P^2(BKq)eg}pCF z>rtt{esL@2Ikv>Q3VWN{wY$>>reQ%z!hR~K$uZbU74+W;-YpjD$Ji!|<~(N)rC+6V zqAoFc?_nNPi$)DurHMmm#C&E6&>J*Bt{AQmrm@&GvkoELw0y0NQrKq$H<+AY5QY%x zQ`e;!!#&Dna>$h0!>)5FDZ(V+qB6^JM9|PxY>q57NEkI$vDNZK_ihixy6`+fCRRo) zF*a#?LS*!=Ki}#EeIoT5i6UB&T@Zz~A?3#rfdy-(eRElBqR6_$RLd2`s~CYdpc(y z-aS4Esks?T*aIull~a%2maDCoh_vx^Z5d|Z&Xv!%Ux?zBCmJ@|mNY!Gb*t@2!_@js zcG6R-Ga&O^p|%PaWEzzo>BF3g$_+ zACy1CQzF9ShX&gH`NmV_vm)D66YW+JnLxn+U((u$K#55VIP_82KLNdLy*dqh z$&ZUhfp`4ao={MOrgzgq0osW+n~Y3O5TqFVg4u!)MVp5s&$wLB+v!)0GVb?MNW5;+44xE zv(DOxLR&{3T7Gz`{anZkcr<|!YH1p?jbM`~4AUYB*A@153&epizBMmdODs%?WmEU# z0(7W(;d36QT1jl_i3pFw zU&7x=#__sPZcm1sxQIXoM1YnSm1Ig!`0s+$!DM-Ls(o>tM`EF-4Er?W1vM>lc}!X% z07n|J5b&JB-v6w4CZFufAtkQo$Jv_#RG~{8My*LuV-%CS?yJ^&G16YTNnszaJ|*0Y zJ}gW|6gW=r-EGeWNZY#OitFMH#5xDjsYqtVti32|5(eS=h46*RYwYJCe7c@9vci-p z>*GiEShisJYLV2~y;stA^F`DuzM(d{A6@TkwJSDyUJfNY@lSTOAft*~xmibQU!BCh zuuDk6X+jh9$pC3_Cv>AWqa+75Owyz{BzG#**k);wm6Ou%U3JBo=9|Yp4}x# z9L}wh_J{yww7csr%T*XomT=LRYg2`sA4gUrq*!h>pQ0ef0HrQXd7GyU*4lbe{6{X9 zk3DXDz_1beeMl$mSGQN*l+MZLvi6Nd_K_%>fO=GUa8qmLMLJ>>+H_HfW=*fdPZ+=_ z2p=B%fM-Qu7@8(cJ+okyZ3&6l`9R7BK zeObXUlIv>uptuybheY%x6yfYdCSnLU6oW?9=%j3X3x(}kHYg)9sbUl-r-Kq0t--{@Q$Dxi5* zc`%Z`K;)G`g*`1gV~lM$s}AAj8O!ZV2&cAI+69Hf;>hE!b=nH1SJZl_vdDfW=pAPb zDr4cPvbmyp<1v6%=(m#%Az$N+ZL&H0vY<~Q-A3@1{)#A6G(aX%xCj43Iz_4)FX{)J zLe(h0%D&6@wvmC>#}TbeC>$)3{~y1ZHd--c;v^>%*|mc3h3kc+{WZiPiKd;`Odv;}0ZLM8h>5vWc z1|DI)FX-GV${Cd?DCVWCjcD{YdeH45;EnH7f;G5-!rBw|v26x zW^O~Ss`9crbm-8=?iHXZXtYk{mEk@?pGn{<9D8Mm0uOn*v7+0O-Oa*ys-5Ms_&??H9kCJB1)xkZrc zwskgJH0nSeE@7AM1TZer9gi-eE*HVujD)phQ|}B(sG74r_XtNspa3pOitS`NHkOyk zq4RAKyvoQ~q0R(fu4ye)hM!N=*dS5pThr_rHcVm4ds}XW9uZm*X zjgssln;f#CHzEb#nIYR-2e>(WK>!AqA_$moABL1Co{NUX_7KT*h@4Vrc&`+J8Z-21 zd@Fz~q|U6TfLO*3VMCpjhdAtaAhzBTCw1dhwph?vM04c+^{^=ZV~B+;Bv@W=$)UIU zb@r|x)#AQA-_D0Pn9#W-%RXOAob#wiw~OBrRzvV?UNhcq7bGjIT++T5;$)!l%_)`; zWU9!;_^1%Yp;c1%DM5V*7-7~P)&3|#SY{)mHxg*5gGN<)zx;j3yqMNQ@Shi+>pa=UlXcwZ->MjL!?rl)cJRJuw~IvolZcQc6{hj|3BV!&CIxAG z^s-k&R3a;uZQt1I>9Bg8ur30w`2Ok?-ktXFfLJHfcD&8-tZM}!aY+{ULu&=RI}Bso zo)?CDav#QEyeCLw8duiZ|0wL;%D~<{4$by?B(Acpe+Z{)@VTWh2_ubkS=VR^1X|wi z?t3x4mbaf2UMpLZTq_cWzax0oP>=@5BH{7pY^NX%xUg%5?Fvy8(~oV*^MWu3pN0AM zx`3C63DjKsj{~h3pFWH2abenoahI(pYEK-nH6r+Y*MhQpue~M;RiUkHrxm7b9l7iS z0h)Ab!lqLbcG`#OoEVFd5Ax3)^wfC`YcPHjY_Q+m3hIVTI{(p8WB>64GJsRxcgr1Ou}+tU(6C+P(nAC*$E| z)>p`@>0E*+*9c+y<(_t<3!_R~l+H4En5`9}>Id1Go=}(`)1}@>PV{d8hJ&`gbD3+_ zP3SPu8AHuuPDQ({J`iGuS7EP+Q(BipUTD*E<<>uh8#Wi(q!7LXpYHP%rZ1_A?Ws*u zWt!uP!%m@k4AmeqZC@9K2C?4mM)Ccw05LH%g)Kt_og0Hn(5#4kn!?TdB+L~3SXtoa z=mft4Jq4KF-Md;M6xBSv?ChHPHeC=l7GvDz8{N})M{;y z(uASVsb`hFE{ZQ|K_<7@hP~LD3IW44yh%bNtLs?3AnvnQMWuOYHDIHq$c_uc2wjb!0TW^=_xLh) zCZX@iF{Nm^qbGU^yWryVvPRtWHU%9Gzr^__J^jiIWYYNzv{e z$CXH|xBU%qaIX}Xih~pmGiy(|?u#B4g*x7-w!)CG>EJFaQ8=(!(|baQJG*6(O;K1I zo(-E@WVRMY*AlyJTS$gKW!tVW3xs~fqvq&gWv7UtT&<2Lh5g5|b38FZk7?Kc77{Fj zN%%{COb8b7EQ)Hm{aldFj6<@oRAD-Es{GkV=3$3ZG{hCi-7U0fO5lZ$j&q7wz%u7Ejn$40P+ z`RG&el7#+e8W*v3cT7#v3bUdk6y~3{uh`6sr$p1-K5zG$g!5$%c9e7B&AS|u~dKvOX?Qxi?4cYWTK|l z?iKWv0ihKRtoMoFo2M4}P=2sP^d_;QO|^%G8K%*k9byX88GM@**gOHBfU(?S>1Nf6 zf@~M993U%nGHuOiqFH3Bktl5C95=}2L{BS6#=1Dr0pnw(r9%|aP>w#sQLiKV^m!** zVZRrJ(L`#U;ke?M7tPb&elE-8{SR{?BGSr|B97>z1-zB0R#v^W$qtL+Q%Rfo1RhJ? z6$#G7#}*)zN3KD~AvhCsK(ptBVM61qX?9RyKC_!|x8n+i4XVzrW!x`+^EFSJh3xX6 z5bi&8kZo0%hV!b7INi0cgS?+PJY!rZNF-tv2;V&>L~VKT9Lo<0PuH!Iebw` z(6SZs5yb;o7T05GLNa3nzt!_XcI};!W;iA2BOYl6W65lry(eNA@ySfezlS`GK_`q; z^*+&BNq77;7vlB}0?jLJ*lrUPRzJ#?^2cTihDB*TK4dr84pHECbd8UUCm;S7ei!m! z?7yJEz9>lF@jDLZhYERPu!e%RSsL@4I@%_M%#oDyWSc5Thf=jOB2z}P{b##E8amL6 z1J|DfX)n)j_P+`Z=wBn>**>4agahCiw#nO?P@-;LyM6#;f zY1;*0g9HQdRUbjd1(9QXg<-{IzvBH!!Tt!#ExNOoNdr5m80Q!QwlN-RKS@)h;DOW)eO0P{sa2#C#!4{7XxF~! z#n6OuyHx=GG_6@+4=7AWX!TA`=?@9Q-WE3?kyPBHA^q8H8*GNcH0^i0?UJ`V>zM@= zP4mm`iU9eI-iXDDu7cjyL$mUdqpd&$XFkHLsN;t83|dY@ja^8?EF5NjAV?ce*X^{w zE9^hlh}2;}vp^bE-*_7;oXI;>HOhV@KsUI;mnFuR1)a$wlbDN2Iz{tf62D}>Q#6Hl zXxG**{ykxD)<|t9+6&X}U=2FodIkdR9Ly6++#VD`v!sN%wnPAabnj_v71XDJ#=uKU z@!tldlJl$U?5pn(radyvgd{Xa|F)v&BgjmkqyDoHkLXu(*|Vq0C)?wqP*|#Rp)42_ z2zo0cv+`roxMW;*iTxsthq2z+RAJf}XX@I)bR-9+d3M`>dts8Ek(sb@FOfh{HAj{X4uyyU6>6d@|(Y z!=}*kMNs!5J#k^i%hO2-L+EpKcy+$zL%k zRZ(eg2dL8&SE3&TNV|&oz525tEs=Ux!_IvBSXisz_GOVZ^<{Dsa`U?oX+k8)&$fA; z-69GNMu3nrhd>xDe$8_Q!%d!E+S8WV?P*0Z8ds8n-mj%3wNU# zRmWw`hoVsjhCrqxaMk7%FwELG_cR@aoh4ng$+ku51V@*!1+Pvg$cdNmAs-V#ecHS+ zQe3XI_eEheAL(iTR?vI&D0|8gr-5EumlzJ+bA^3K_??&V@)S|Le(cDx2CWr=p5D>^ z4%?xymlor#f4?BS%SNr+E^lg$>x{7VU4oP|Ty|{ZXutLnfy8WH7h1l6*E<>qmc{ut zD`c0iVV?Jvh15kc?i}uJCbfn(h}4w{Q?_pCnGCg7B?28DkYoEouw<0|PQkG9SI@@z zG7i$@g86f2pnK)R5}6X;j~^puZS?gj4>WL_rlnSiuXKzvm%SQZK1R9Xj7sN&cpNiy2A zg1)2Vp_*Tum>m^KY7VHvA_ApzQ!O9Fv6iCbp{+8tYqI@Slop;H-eGsX=SAybkfNV# zfqN*Y%xVM~8@*wTvp)#=;HD~frK%=dlPGX#a?Cr{Gcc%Yncmk`YI& z(7q)CsnMFTYZazT87S29Y5q(w6un`E+`kML#T)ui3=47Ngcjs&%aL&XK$P*icI!M zmx|;=h)#vHt0voZqES!u+&JOa2WT?|`h}|XvT^N$Yll89zsJ3~<=H_SD+c80^y{p4LcGVvV zd&#}^GTTp>d_1JjvjGAvx6FD+VoBZ2e|TP-#6C_5@8+ zC+p9_EjCodFk+D-BdZX_s~>_6kG&{N$Jm6=;@>h$i6D;Etdk+PNB`vTydBCRcOdVd_nwZ==vX7_&UD~dN$ewgeRpGpz&-md?v=ZVhak)%sIQRujSjB}D4 z>pP?gy^|vmROu`NgLP;XvWW^aSdC?S8_O0&MvCgJkBH1)OuZKraGBAag{q%uG&MHj z(4Pq5E5-B2I8nUfPTbAr+Y%Ar-TQN<*`ZKI(_!!Yhfg~`kh z^%A>TfPxvUJyZphf`6mhIO0x zrT>NG9HZY^RwC>Zb8Nyk2!#^(3KF(Y1fSo%^0V+G6e*>@EUJGE(Z%&X-idZ0zjs1( zc!plGeWs6v6+JOmkseEyHkLa?y?@cSC$b{-hYebE9lJV!}(;a zAT>1a#QlfDKG=N`T*!%!v7I4$3_IBu(mC@EZLt%A-f{d&XWID?EfH|mp#S)oXj(Ly zmoaN6;FWe|*?vHf21ZNZG)dk(BZ?Q6EJoJ$rH~<2F$Kfjc0>@Sj$u^d_EA7NY2auxmmAEtbO8 z;Mb3oniS}E=i28!0Xdh3CyKdAFS3gRDbmg2MUngPn?>>;GHP`R#cra(l59OmcNg$> z2D=kIL~$mi);()|)37(c#My!jcE9d6T0zQ*B;v*kcysu`M@z76wn+4>2)U{~3iGw{ zQB`qIR>A-G*l`h^+ZfBZ)7}!MKrEmZ+Q$Ok@?1=su<hLPnJE}b#6rA1E$rMt zl*nEXq=`6t%Q2LhPrRA0_ig%IiAgbCSL-XQEXV=WM^McMNH|fa2 z{YTMg-)PP;_Qi|3MHR4bkk#001OpE-8HP3ATS9{9;tm!DNP`gezq@IxUls*jw}4NGI* z=Mt7B3}g8*#rA>3OP&fTwic+ZYIK!asII1Q>9rDw8+jw}hNs`TTmE3#qAX9<^<@+b$-awjW-@wCO6KJxASy z$T9IOshJKCkrEXX{UYWKS=ES9u(ut_^Pl<)M0gd^l@yr zwjz2TdX(^zVqHa`ilQ8D8?$Ukx)4M}^6Z$f4?$5u!=}j3{qIDh-58V3w`Kt^5I5Hx zQQu1w29_k4s2_>oY-rcMoz43~Yl*tzj3iNhEt*!lp7Aykk9gY*l$cE7{QGM@|s-DtXf;}!e z{pf;dBZ^@{8tm0aCm-c)JiyfE6{#)dc3jY_%Fv6Gy(WTB=r^U+Uu9=SVMVy%0s9~w ziDJ8M<-U8Vm)TIJFbjBh*J;`PY)4=G$ zalWk%Q0pFAyDjjIxxd?kF!VnZ3y<06ZIih;@?UhB=f{7F-p0QrNbiRyix9BBR)_{Q zZQp4RgoKfF;BHF@(mtj=+whR<@cI~fLtBKL2}c*}&cpsCiZr5Y7fYZ|9+hxMi>Gho z`xe+mUn17Kl*4m^T`f$l7nbbuFg?#1Zr6$I#Uc<^#3FfPIwK|)WB9cs1-v+MfMv7V z1_qd_*}^X}ZYn)Cllh0-*$zQ^nv|5HBOgh8+^i|K=9bLpNMYao-YG zMq(E@^ybL4RNNj@JQbXk86Jh<$=fwnrXcB}GczJnRxW}wiOWN|0-F$W#*0d=R$=Ef zp6v_llCOgOn{%nZxP4nB*uq|sOWoUqC|?cc-x=0cgirzpO=~CY^<`n^3Jb4^bc!*u zc{54a>(DI|s$%x6NPNpK9SlQcv15yfa3EE=LdK3%*uFaX`$UF z;Ok#>6;+4Xf*~P630_m}@pJ;b!PoM$QER!PiB*xIT`DXWK|RG-#JfN&vNM zdxyO%sL#G~lMS@igq?fYLqbgW!9(pa1*sK>gxMSrRtP%#BjuqPb|6iW8~_*8heYug zoJbOfJ82YO*tTd=)4#o*fb3y*vA3#NV*@nw$9%qR;+AQ ztHo^T!y-~w!xj`4G6ZPE`Hkf^PGN667asPMV7O(I7(TMHk?0r?YTU-pu{Ku`?~^z_ z7VpC-&)GoHe3(l4HMXx^-HHck3wX7#b2)lpjN4pMXuxPx6Mm^MEyLS-roALUYu;GI zZfdE$9OCe*+15IJ%QK~FcUY0aUSu&_zuY1_DGJ4)kb#Yj0Ioc@#b~P=PZyR z?0KW3_C8!aLZMbU34C?7oeYpeXEnc0X9a0&PSIHV`n81pTa<&Bh5O#Ct+XGC$Y7nU zlC}+t$xsnmn8fBpc2;JJMjhIqaWm8q;K_QneO2~yD1s?e8kr_IDS|h;UB`Rc+h_h0 zO!v6n&bEsMTE2^MNxNRydsK*K8d{b2i{i8C?P}{5;#;0LDr`o&gd?@gnFGQUkMB{W z{{?(XWMMM*cz{|4*{r`S=#@pLzzgm2>pb)DaU7}d7x1?AU_rC-!X%IEm@mYjxy=m8 zv$>eHg90#@Q>0yXy(fhUI01%jcBLqxF#Q;OQ&`O`=8O4>0L^T!-g{>0cpH&UJG`M5 z>F%@_c$6ph)zB$%Yhpr;P26){{5&($xuzAD=*Dm^*5fv1Ak1Xv%EqwJm= z2;O|@byOSqJ;Gcz0UkhSMSm0CE+fNQQZ6YMro=p{SVO&sm!TgO0PWs080?14alchN0?{X_(x$nDy7uz`YNnmh`PO2<8v zleEJDMo;ex?BxI%FKG&V^(J!C{%mAb9#Ggnt)u1EMaX+oq)WaX!mv#@Wg|cB=Brv(v`iSD=&E=unplJ||is|JhG^c(g=wC%qG81P)3R-|z^7l;`0Tdvoo=!%p%f{Nu>>3z^wIJgHh%Np7mYSF&6{qQ z-TtX~*vUD-z9d>;@j?Akb3zcSciNW~LpOC#&!c%4L7K#&EM|{{xYW8WTx44!#O27h z^{5cX!d1m>a7loCP({ctj}_$eerS-*4>3hZGVrGQq983jyb3AL?|R*NY_*Xyx>PXC zfgk70lA!;;_g z$cAmjhnCxTAs>y{Xr4Zf2)FPR-qi6mM>LANxqbVa+uK#&Ck4GaRBvq*^zUJCiG;vL zir}RkUc`q}Z5u-w4-T*@1>tpm!=^kt96%1@9Ck#&OJ_CZdgz=8 zux3oMFfVIN0>#B{C}p+7IfCiBLzw}6(NDU^|C(+ z!s$bm{p=Hkshy>Jp>8eh^KCpbNxffHn0hexA<69<1?dBC)m?2yOB}LLcu{*kK;{H* zn)dh)Na}6SJw6XUnQftXH;XWIm1PMLXhkm9cQX~HWG+M}+lm0JQb z%R!bHapof;&`1QVk?FH{L%dEDyczNaD*^+_A?PdZ2vTK1;TXGIVfC$YheVg379>?a zq|y}hwxblX7rDrpqWEkXEZgm}V)M5&jYTXvecRXCla6jMp$_7&(TkCS&-iuPP5ej{ zA4YDBa_tFWnt;Qk1ZzuC$dKBwGJ5l0<`fdZ)OPLfwy_~S@UFYsI;1u%DdyXvkeN6{ zJ9NvkUlcl<+Es6#?LZjjYq8_NJ=2~D1d<~=2U{b8*s*<=YBKtvSrf8}EeT zjn6{>)NV^-q&nl)K8=xZYiAD#`)`I0=!rP@_af1vVk9M}D9p!3jAXBcXdj%^>8a8_ z5`n(y;Ao!+grYKY#>5IsEpZnlnkz3@)l+SSpr=6AGM;;Qja7@}J&2EG*O+gA5ygAJ zYeNDjk6?zp&x7_&1*t9@*4gy}K2y0Chc}Oj5EhFXnkW&M$3^pfQ0s=w@CI!I#IHQ>!;ElRh;)@ zR7s3W9AU3#BwsC??U*RkgFZ-XWU*xn(ji%AWiQ2^SOjlp@fcf@&d;WTD?QsPinFtS zNpY5>shdQCSv}FVzlR{hSb{00hZOd<4919t?GkoYp^~_RL(C5A)Uh>9sSHiME==p- z62}tzgCNBr`DEu6reX2yY_=Vj-`kq8k7RkS5~ic>a)av%yEUZ1wqZ#;k!L-Hy<aty7rN@ntzCW-kkcUmiKj-H0!Fbb{@I=o+qJ_Mw74L}+)6 zv@hHbrtawJ81XT-Uf&Q2#zwYesvFDpSZ1IwMx@}$*K$OmNjP$n5&Ri~G#Am_O*UJ= ztLT)6Oyky&R_AEGm0yNvf*1;4_w+8lFqq(}J{B>$Yx{_^-}piAdAfA!ON# z0FpU~tDZLaSz98`hut}JVs@0sXUwSnPW|&)6DjF{m}lUu+P)xs#Rxa%#pO# z7dk;?8oj;Cu2Rs)qqI;Kntmbz4P}btCNYmM$h<7Z&{&4WgjyyY9UmVO(|qiw+7!XC zqDFIsM$S!V;Vn1YmZz}#ceS&H>DVsF+hCw^pC~l=?8K_(9m}n6SBDHsd#*pn32InM&_J;IbYu=H zUeiKb`KtNB5@OpwAF z%ct5c3NtZJOt*U!^bz8yqg7sO{a$-0BS5lC+6%(8NK3608@#h3U!Y zaqM^o2~aEsA@RvER>%hf5z#6hjn|4o(M?n8B?9_OLH`De8+I3>-jQ!FiUwOp<=EQ_ zQcW>-Tfg4R!|&~>vu`U*DZ#C7H$f`JTnl$^j|%z39ytciWD7e7Gmn4SLY}yE&IPioHVgG zD_z%*r2LMw2<~sti}Xz*lKCjN6K!fp zK>En#$O2m#67a2a^s?Ux()esSUi(9cOUa_hDmx>no=Uo1;a1O@v(4V`gKbXcL z5SJzCFhRb7NBi5Y3Npln4K=n{VX8niXp6li5avr{J6T%wcF_Q+KF##@cpL?`K#7)u z4X4JBw_8P_vPev0h8+_OQ!JOCn?DB}ADPINm}!@R#8WGF!{XLMK%=lW5-EL9P;8T} zmBsYd`UVPo!hG6h?D%DiLJyj&R@;B2V|7o&PwtvPX_`Ch%)~XP$`{*OQJ5r1#j$+X z2E^O-Zk&w?d0RQatg+vI3a>;A;G#jqi_x{wPP-D9nh|Bgu}AB6v@*#Vz-`Pl&=0jKFT-ScS=(DU&MG1$@U->np-Rqbr#Cp%fAFP zg*NiSFy2ar0V$ZS6y+fjG*OsnULzLat73~Fy^9y-#PXzXQ7;0;vp=Yi5-B&7dqw*6 z232}0zZFqaGZ+!Xw~{FSYlHRnYz|A9b`7b-Jj}T>ZhsWTJKCube9=Q-+CEImkaq~s zxD=PTnBu)h&^djy9#!={HaR4qyljM%HwnTe8S%)sSB2;cKRE0TxB}_*a41eH78r&U zgo`O&?v^VYj2lEI|DzpCRkS~UNpUnOOKL|=0zQPD@{{@FMoQJ9-6IgGi3bZEfUE32 z5xk|1E6W;J&9Q_i-p@!YG*_6)Ga9zpGYV6|7;Ph87NE~u&*{eC(nlTA1=%L}hG1Yp z^|9$3wQd%Lx}!%qEq31%!E5S??;mRu62POPNaA)#noyt{HY*WM|J9uAY8;& z6L(ZYe(I6Yv)yt_2>Pf7NpJKkMRT5TNK5URkChCo3i%@Sglc<1&|8(8;A{VP8uP5# z!9Mph;+bc(vU`w9{<280sA=s4GlfIH=S7>nYen$_GaA+u+Av}GKmk}wW+v?vLBpmU z+OGpZH$UIr6QF}V(Dcc(&pigB_hcN7q|>#6)Oum+3LB`3#5Plz1Dt%p*$7bu|j;+!hd-CTH z;ZAhHiR)x+Th0?HaHNWlVXysG6n!nakgl}{9tYA`zI3*VKM|r^XKI$&g9>|(N5#h3 z!vUtY%t&osV|fAM2bDkeRDiEeWM?woW(iWx>H35AvchmU6ImHM8X``W@3rfmAfj~& zmfB;&G^K02h#$3bA$k}sc;#Fr%Yf@|MWifxILST{@CJ_M_MlS?=*4kiz1VsQt1pX} zTb9DgcaV#KPtpmI-a#8HgUBGUA@v-jm02Js@^dBaRaEKh3Z?L(J7aLv?iHr!rt%fG zJ{{Lrw}*R$o#TyDm&k8XLr8t@*`;W#eO${VmuGnUj^uNs(Nvxxw+VGi9-peFBZt7 zdymi8;m&%A?v=4_WLu`N`X9M8vfm3j_aYUijrN`>bm8zSeltHwN8@*AB%-GO7RAfU zWpiX3hlAme^y>IA+i6uI__EW79RhX+=vL2ch;*@E!+I-HI9#dfB6X)9LbX@1r6FCC zopFuAl*~Tdt-v1_!N)4I1XX6v3x5`g-e`g2o#$TxMw6<}Zp$IoJBRACOpbIBMM@j1 zb$`PiSn8ZREhcikNOn|Yr0>fkAhHr@nSajyLt94Mb`iWW1&KUt%nQSV=0%lk@pS6hbn%L2q_v zF-W+??J5CZbcQAKa*M_wJDYE}ha_*x;c+Jcel)uEwcZM9Igw_# zez~@b;2q7!$l@+R7|IO;AJZNo8j-+*`UeVA$ZZ|$GsOgbYG7A`!{8T0@CG7!7qhI8 zfLhqsa>G4V1h3^me)6UX!fzP>i?oNVUKC#>J7r;Yp5OO|kVki>TGrA<^h5sQ;*ypk zFcCis8-n%)QD|U~gqec0J3oxe)FsV6B$S9Eyl>0}Mg2_vDHLUGe1LWiDC8B5JVuGbRM+E1Y zu4Qau$Q2nAL-`tG1!eZGh)ft3*ep9G0N3bAKb@-M_JJsL`H{i)xiJLcaAMrJmzEaW zj|F{Ja6W6Xu=}{EG(tZ@(d&Dj2)>Lr)zzLoI9IO(6Gilv4Jbj`ZJsdP%tNfCQempW zXy`Qdsc#9kta-TZq}_E`%d=CW({6c*w~NODyu5f3c3#^G6FDZCgKnMxg*R>AZg~n* zcqBi($JT}@oGsWcg}py~XTzBvy2ok=8Q@57-Ynh}qa;6-c7@32qkG7*PiSBc}C;HIR@;f3DLrR~d%fi*kwWUzt0l)XCs#>-nEok2n zgQyDA!a;GI@Qmpte=0UABxjM_&JtNFNXxXTI9b70Qz3%SnIo$ad!1>27lk3`W`whG zBwF^IUH2qJhJ>5aefDDk&(pYae&ebM)}N#KWR=EMLjZ-@88~SY@M+DR7nT}@X*YfD&U*PTLDgG%AmzR)KL2!U zf&rWbuNCxfkA??w_azI0U27>(d^Nl{EiHl3u1ttyxKY;Bxp0 zY1qk5^^!7Z&e4JxgYgw%eGAw$=RWqH0P$aw%a8bwmRLPJ)Y(IVw0T4c2d)f(&_*;9 zxVn8(6fc`I#xC0slAqklFT&0M@((R8vOfq=D_0A$z4)OZvk6mN4B9u#LE_93IP+Zr zuLWtV6&Pz8CJKqMr^UB?*?5rmk*@%GL$>~35{(oHcJSjgSctmbJP^H*JuB#p?8JsT z-?oUr9QXm&ldBaDrwf-+}}M?LiTI-EmX2`8GPFVHS+w z?lNh+(gf}xY3J)AL^e7r+?{dsE4vk`Q~Q00B6DQ=38PKbk>oOR3X0N*$s})`{V71? zDKD(tY8Ou=7Umt=l3iq94xn-GjAGjnz~MC<#Tx`DMD{jWPLBsjiS_6%T@ZAp^5cs3 zB-_A0hwO~P60=F3oxS7MBh}VPkV=R2;$dfyfO89x#!OzGW1{fE4KA>a3c~DmciwHk zQkY@l9hF1#8$!-O78NE&w%s-v5+yZG*b(h#S6f?Am|>*_hj*|6wJySRLk0)fXblXI zM#;QPamXJ%hK|SKradhR)xW!N9^OqoT!Js?`3m!M61fFB&{#GRfsV#9HjJXv#F3bC zTW5QPeCia*{G2t4Km%LWJw*CNp@QaxHFjEI8pjxR;VkfBfaf=CmH6)$rx5Er9xhR4 zT-DpxL?icmdsf)@6sBs_PT6bTD-=F0tYg>%AvSjme$lN|h?-ES(*xSvkQm2ZJe3#Q zCSf0uRK-GUL)ngy;k}x&_jXRSlK~D&Wan^m@;5<-b665V&CgD4tr=OEOuHxG{1UvQ zz=KZ0zQnfpXRfeuBGUMV)pE%3tPth#yvfWHP+QqD@pb+tK$ujFZ?iuLdTGtm7oMp& z*t~eX-8>DVcU}VPcE2!1TzGEZg=d#ob~>6zft~i^r!eB7_O38oEo9gHgXx|%a^GCm z3Y#kD=_zNjVl|!{^bw{uOtF=sc-5U_{2~&En6hX;+`~J+<=-*91DRZ0k7sabd)t1{pLGLN2f$cUT z9eropn{9?*SQ($;T%*P3fb9{LD%gSTv3&s`GdIPKhwKu~<7(sgf-C`@AF{h=c^=GP zkG1;*;3p5ReDKc*k?QrGwlD+{j$fo8<2U3Gn_-2$m6>t|cUIUt(W!4&`yk}%-^X5= z?YT1W0d(aY&xLpZ`?@^4S(sd$9;!Gu+$u;f*ptq%2LxzC4=i*j=-)d($}n-Y2&_@j z$xcQ@_SxEyMgm{D_^~PxyoDoQT8hyWVOoEd&G)ZEVkSFch&%)zO(%X(_TA42(~?pw zQQKGN0^m(SQZg&|2~keFJMXfD!nA)#PQz*}P(;v&J~ljo*rXLH0p-Yihm95pBU{eU z%nG^rmR6%@zpMpH(SNgjcAnRBe$FJkqI$Rq+b}&87SCMJWJrkWaQ9udTw$+2RlP8^ zv)opM*b%InO(FJF`TXcjcZdC56gtN986vJ7p8pnV)(cSU`HeHr zkFT&h<^y3u|--2tS@QL zg2*a4B3_b0x8IA-3>f;T{X;?8Qe47g+P?*8w%pMa*=H7b)Em`Ef8HTLmLWr}qk^

733rhj-vSr^ism};jKr0ASN-b6DO0^neDw2SONj}wJ_h?roc=eQ7sGI0hW zz5crZo1dNAylAOCy2y*l($2L+z?0@T?6a|g-sv$(?r)wHhJytS)wWgP@J+AbrH*@j zo=U1h0jI|Ak_7fUg0vGuX5;OY0IlM_k(KESi$Pvh??K({azXe>Bl$I!yMVoHOZVBg zL~r5ynml_>`2R6=9)ML=XCJTq9Ict7JTjU;yWw3ppsM?{0qTtr(wRTvf!UQf0j3tU9cUOg>NVHjwwNTVdXQ;>9c z-$=GU3DBhou1BPynq%vIbVNS{?+C!u&~Q$CgzNM7h56y(nl13PxT?Ld%03p+HLz=5 zll|A{p@B%_qhqkpMekpY9mLIcjfV#|aWHJKTLfvQ%--zZM0~N2C;L#w{3#DPi5K$p zvQIzYE!(BDn^cf+(4c`Sdqw%MmRSd7)rNp@rf)CbQ zOAekoeP`o8HLqzRda(g;uf6|KleF#Daf{z3R0-w zw-n2V&XXQ?Hg4#cIo}otY68?W1!e)3S+ywYxcW-l;A8nrWayUR-Qu%zms`Y#{;D86 zCdgpa`vUHxYAv-dgxm}snaAyiom>gQb*;{l2v}r_Lhg)2uBE5==$zJy+=*P4q*$3K z?px}&YnlDcr{?EBV!y2-4lX;_Ot&i)b_FBxC;!eZqL7n~%xc!RzY4mcVWhI{C1E-( zFjTY1U%Pb1N@4Gv!5zoHMS(qRbWXD~R|BZ<(AKFoSYcP8Os(KwXsjr%hSA})e7jGW zu0Z33As8Qv#bLGJD%c4I4bs-dET$gqLf?9qpJXbbd-6ni=QS09=Q0zL1_R8we)QX3 zL%_8%BalRktcBnP1#8rktvH5LLkJ116F%qkpRu`#S^v|WO@ksFqXUQBXFp%pT@KsQ zT)R-1iVjpt8=N2Xv{(X+T4&orVV6H1gC1La`VhuEQe`dS3lU&DzO^#ZE?eJSIxf9d z+ttEk#3h{MfAWspES0wRN#L-t8l{Ti6wy zlZnX>doh*(i&_|V5_ZeakQ5tR*PR@3eO~;kMbLz5Yp%4d0#a;-@&xj-71kyQ8+?~d!85>Du|jTFA-U)(1-5VgcLOPDD{?0xJ6C{G zs45@1lzRnP(14@4<_n>V9JVYG-89YNMps?%azEW3@$hijo*;}H}jgbgHW19#&IU06bTePh2 z*&$JMK8C+4&P~JP)lFT~m)m)pq0n~;x&u95fHikSe;cNtrjvwJ3++ikSmHfNDoz{m zI0=D%#f1Ad{xVL2pj2Wo%d9j0@{mqwij==n>=J31~HKn1>J{r$0*Aba`SVz z)J^vYQ}_vY^eyd$3vHh-LB2MSpF5?AG@9UZ_%%LVz>RXWacgt~hTS&U86v_#ejZwv z8TOEe(V9){OzaV#KfQHDx|Iny1~Bc#R#cdV94W1_cNKP@gI;UGb~u)J4`z?;s87%> zu+wWRIbe-P!WMf#fTG%pEA0t|X|>!~<7G#PO0&F^s2RfW8_fS2ds@(M@Gzt*+upWQ zGztr)=h@I^mz^FZw#ZUq7#YntD-d>F-2Wb1;1SCn@|-`}=E~_@Hp$`@rds{%9Kg1e zpz8`Gax!eEuxow@vEZ=1;nP!Z*XA{LFqWQskC2_co%DQLL2-zFv7@_0BxYR(enT*2 zz_Dk5C=|+8O4c4{+hCuhtzd3j0e1eHY^0}9jdY&tCR=8VGFq#3wml|_>mnxRPGq7R zZMtYQhkV1>*e>>&I%=nOwCvDp_{vx@(VCKI4bF3X7TL5a*oQDv}e|-rujKYF?U8_qF#Vv(AajdgjM4=JL zDC)XvfFOOFD4mRr0hG@S?W1Wmlhgy|?;9pZ8*kSz~LZmPTA1c@YBrO!yyN8xbL-VvsoI<^;d zG|aUQLE6U&dy@U?Zve+4Ry<4j)PExisXMpM>zFa$F7^o|>+K$<`ia80>x52MmstN3 zNp#Cr6|F4AqbEt|5;z#l3Lr!urqZ9;J}oiK0qq?oi9&r)J}g{k(*@nElJyRr6k|C8 z+w<5K6@~KJ3ZAwXz870RUv8}4_61-uH%J)SFJ=dJp(wNoe-_Jm z6u(N)?+)~8Ud?^N)jmm1ICmmT{Rm%1w6-N$yGG{69uS537pSt!vQo>w1-4W~O*&** zxi7Mpd?H2%u|0V*L_fU$%OrxI_Coez)$K)v_P8kUlO0Y*lEO+nJk-4UP}3rtB}h}R z>4gQrasfBMy8MmUN}pKvTK8|WRX%afL~N>8o&YI4&pa>S9}T1gfagEXHv2_&ofF7f z?zB^0fx-xM%vx(dRalc1Yi@xM#+g1cr*)lOr?B5K6ss^sxxvRDuG(hX6sA>jV#cy` z`W_(P=1q^n zi>>!Rx+_E2C}K&%bo(D~u`~s{d#Z?@DiDR1O+W)=iNbEm>6c1IPx=2M{DgMrk{mEK zel3b?&zL+em43S)OnW+NmmYqq-p&_v)ue@x)=sheM4_6&1BTgRh23zD;pG)=e-Y@v z#Mbp`w#9>8^N~^bM!P9goF^iHz0MKgkM|j_;grHLzjrP}oh3~DoMTWkWf!r~mq@4dzM+S`=i}M& zIg?RsKAydKC1$!02{MK$=(KezOgDv)w@tI39(0HeAkFxNkQ;rBI!iF{N>Lmq2b!5a zslt>bhZyS=hGp4jwO0jP7fCiD(cX#W92d$P5H~IEkkxS>B!4avtm%abzdRQTk{m-_nYLMgI=f1j z+YTSTDeeY)MPb*ATp97>X6L@+N(nYTJFSl(84qu&vRf7QLtVxp<3V9E>jzC*Fje+{ zqQDiuutoZ~5%kFLiD>-Za!%qqEGJe>d>(H(#lmzjmbQ>vT`NR|Bf0B;!{?3GRC^QB z9X_^g$7VbET~hi@Km(JhPYZ8hhCj;5;#qwCfOo4(1Jx7b+X_9*_>s)9Nx=z|ARHlF6}EBEZMd!l&$A zg~`aZ9d=IaBSF@iNJgMCl`9GNlizpAJDRuHj}<046{qAy>{o)W+i{jxrE%)((eThZ^x*u&n=JyUE|80b2OJJ7> zUZ~}-skwddA;oC|=pqJ-wwvsSQ7EvkwS)!n3_&-&f|Kh;yHXUoC9-?F%~crA zbX`HI@}By>NVE+pzkEEP0w`?X9Mx!J1iyDh5I7r=z>_5@M!$w7GmmWCdSvzFqbp}y zmWV2SDVq+I+qnZO5sAh*5jb`s%S51tL(Qd!H%_vxC*qrf&He&A^#d2r$I!I5!G;OK zd$!@%wBO1nI>Q*^+};WNmq^^1b*n`bxZ*B$6O**T z_K4sHJU7I<+`ojWwi6dLL)!`SGkfh-mZzYbCdW3F;oY=5*RA}=HVX$m-D00A?0(^J z*vsz-dy{}hEtrf5o`cUg)Lqn_!=M%0Y)veC99Dy# z^)OJgsUmi=kfnhmBDmtCRV(z8{E5#IctKig|I>_*P)bn(O3^Gf1UkZCnG%q8QW1?a*-V=c)i%Za3?b4be-q|WGu zDqDRbTv=>iD$G=32$9A}&*Nt=?l2_H&B?)Cn zK+4YYf+IU;TY{jg3B81Jd&H+jz&q0l1>6c9D0%EeO$bA8qSDJfd8cdf$>Swe^;ZGk ziCAahaC_>@0 zOVISsv&RJ7M-gm9SFvYUAR1l6^>Z07`j}R=H6l@F3c{g?Xb7>?1hU}soo_okD5BJa zb+*C8G;h3`<5zV-c9Zn-xHuam=w?wwt_WDYTSaml^5dmV%&$J|rbOk+YK5b?QWE(! zxK0UNhbJJeV|NPEq}J{D1sNyc(hjAC%Y9r5!)WU~;IgLF-WFgCdR*Jbe*KZl6zCD` zu-<~MQ_=Y4r>&2VXYwRr0eGAsS&x@3uw_1cquZw~{@(Pd;R?7+H2)LD$E8~ccWs1sRAQJnOpGQeiD`Rf`$beEFrc(`rtCi+ z^~qprWOjIBB#?Of#1o^n4Z+VW{}$R6|01vJ%bvaPo|h_2;}AndETCA3zQP<(wk;D- zHDe#?JY~mBW#GgWWAam0b3%#{kMbf^%EbFO$)6xV%So9Ecuf^aY0HSHEp`V>st zusDzW`GZ0(JEl6NgZGdql+H76lRYUwS?xtz^x|3~2!DDn)#S8UKAx@^J7kFc+e5zZ zV;h#+k3S0B!;2umui;r*dI~6SK3s~6g8%VD4dT|ToTZt%C!XcN7s<&hDd*(4` z-%9Zs>-TMUZl32N{B}Pm3b~W8oStcOgs2?@`3SNLFdX;aXNMGYzj}AIHt|D{63WMZ zpr~rF-ej;jGTqMo4hr29oC9*~VnGUL;6@I%D}_kjwV>j|IopplG}+CfP~E@~KP&Yik9%kMk}2 zN`vEkm#E@3JwB8WvXg#rM*ww&&nJGCAVnXUUS-$&D7jv*+KN+8qTCP!4^}uJ_mp5A zT-9DY$r43z!-~i9Ldz0{AL&Wh6ai}PS~EQ@TO67VrG&PZZ%#9J-`Q|Hcm z>{SKb>X9E#iJ(QJ&)+Kk>g_R+-HgzgSHCBnB2kHK!)L!9p(QCyzPcy^xLT`UvH4N4NmC~;}bKl zbW^t7KJxL|dhyS(uLRxG0UHp6Q?NNAJJ~<~u`33mZ2X_&VLw{Wh+HN}V-L09c-V)- z$rJ1`g&k`+x8;j37XjAtGZQCRrGOj9h~5@zePogpn6?VQH#2w~=a}a_j5d{H$KC$s zlj7z*)AkBbI~zCqMq$e2`BXA6cGBst7~My4We`A;_Uhfk{#Q}1bK8n#ti~{_e1f4} z^{uPzT%Q>py5`NcUV^lSj+E$Bjg9stMC*%WmjGWG**=LDfKWU?V~c$e!5IZ_3?35C z`>wq%NO#0*zx<|v>yA+&Uc>(QzhHQ6o3_A4DeQV9B`?n&jKO0K8!cXt885m097_|T zP>e(5Sh|2~Bu8c$;+QB3#b$7PZ&sLM56@Y~>#-g8Ipa|%w(oqpw!$fP=^3ub^e|m| zxd3b0U8Agzr>BG-e%R7IeRq@1Q4og4&>4d1(TI$jZ(smxxI+u-2G z=9oyXnG={TU;1LM>DeQ$mtFZ2BIVOmTaFFEMl$B*%lavnh6X0w(S4a_w~Iny>6pl8 z2Hh>_jhr>;X+#SB{vCRZPs*?i<*{PEhtjG*D&sjp#+{=8_Y<6+UJ!vU%t8dKOJRDu ze>h{J_4+BmnY+=Q6`O7C|FI3EJ1py?Sh97NZm~fM!`z{!fHDg08ixdCdH*pMg(2k(Z;>7QEkA<1iFD9Upu={P0`7 z2xLs=p-7I64hp;Z60MzPUkbugBDaS>JIjS5NjY|>!mc+*=Z2H`m3vz8hu5U zE=Uhaf;Gq1_(*sNJ)CE+d#KfgUy82;T`f6Cb@93U)Ep(c5uKKq{A<^XLV2M;)pL*# zl^w2{)v>MF3O&UBkEHeA^H56!Pez9XnO>MB;=T)m8#vSY^)I2pdA|YuY_P&K38U?) z_JDvJ+5L0vsF0s_@|5t6u$w(`z53b@&H=iKi)0Psfq=qwqEXtug4gS8qzAZE$+XH7 zAQd_M9RluGIXnDtUW6av<083k@Qd4sbD_Yc45U7!uw$ji4L8MEUJUmShw^NeFsxxf zPJY_H^^hKKTei|}`IU>MCJCGj5_F8yMcT2fwtGc$6~u?|fA@gT!l|#G=o~?~3-0!c zqBZkvi6}GydGZ}R)?qNnDnz39cDLyC`;IRG9!3rwZeIwxvxI+S!%&NTbE1@udI7{= zcCr2RJXaO@u$qbC2^$)t@Houdo{bfS8;~H&B-yNDLrrE}&&-V_)+Gox7aK*<+BVVt zuAn|fi4*CT;(}j8U?u6=T;8>`vTN?7WAz1HxW2kXbk`k8X_@0~pic)=G`^9uYJderj68|7^L4FhiOB!)aV(SBOCW z<=}=2$96p{H88A7XWFf^H%YOtG(Hd9m%I zm!_ZmhIkmFZVa_!bw*m~LqvojZq(#)837UzTrXVSv*#7|SPWyZE@V##Q%f#p3AhOG zXf#StVXGHXSIg%O!Zr9~bp(y8^k=P21o|#{_Xhhc7MayrvwN9+?IUy0;z;@}3Eg)$ z2C4Sx9v&>ve{Hs)W0G6f9u8A%3i^r{obT#n7qOJ9(`Al8WF(Yo|EI`cylRjNz-g+3 z9c42*=Wn)h4w5nSy>M691-+b<-tpD#wquUeYXxOkw z^&7KRmLswz!uLH4%@fhJZ!oHmANVvB)={~`8-}~&_pUtcOPRgFn;lO-*NaS6?S?46 zpNH+mOLea}Qjo5ZimB8~i-nmncudQ*7X{o0Ih;GyY>q#PIrh1zYFpL(=;|pP-0kKI zTp_T7Vm5Zsh26A`>vIz_gY_c70}>vrCAQT=ok96!`&%qKFMkR4fiPd=!%22jLD<<} zwSTSs)6)kIvdb=n?so(j5Yf59t`v>_k2Wp9aVlXqx$h2HypS9JK)o(33_H>4W%3I@ z&qLV1oY$>#L4T#eQvklRMn8U5WCYOZuwY!MhVd)-Rhz?RuoqwwlJsIdSQ6O z>xgYq*!7MiGP7-m2!5}EEkkTdY+f$~qyN1ay88nCeFC4QiAYIt$bp=vFzw(bCIeBm zUkkcQa{@I)VJhO5qyRIU8|$JAGVqZ9%l=83*C3y|B#Axx{4)T!&TLZ#>M6$h?dV^biBQ(>X2PD+rf- zfI3gs*c1_{<*fxw*Gd6Rwx$))rY-Upu-~VLCp=DZtN)1zZmi#$W&ag|Wo;8F{rpRT zz7JV+aD7dOKPe(r?%f=GCG|E%1Ty7>lk?E6?-YaszR7C)kHCr1!O90NvlyMH?K_|7 z?LxM2Kfa7auIt}ifk%-Gh2d#TAX9#o5PdT^bAsKhFjZlATc6G!1t~q)o^nltky%!Z zEjG|6qm^-euCeqOoXBtDbU_-x!qE_@7OoWmjcU4Gw%eZ-hOM!K>}dtvAn`rm@JzPh=ZNSz8an{* zra(>RK%9lo{jZ)7Tp#FNu0sU3V`A&uTEP?izhY$!XW9Jem85XY1!h|o*+rtb)0KZ! z=bCk!!6KVLB-_FT|DU zM2|-2>3d%&uDlMEIDS zf?}UHg4WeD3R7juINPV7e=2p)=D}4W>*IdK(EI4ZV1yC_nwb`NH6+)nQ5+p=1Ysvy zzuLAcOof3>(!iL*b3UZD=`y5=48ZRM>F^QU zVDpkHFP|oBVp;G*l3@)oi~;68wl{|N&SlTo zCQJv$V^;*nt^n78s4!v<$xgi%3jA;eOG>f_gy_Q2V=Y5Lk_JB(CD<0Q38K(XYyjkq za+x4Q#S4`r5}K^i$Ma3~L>BWULHaQsPrL6a?7C)vyK;`O!hYaOI8cWVVYwLZ^jQv1 zUuS1sM-~{v&;S=?{e>tDsnhN-0$DahbXphJhs9x>2X{!n_(=iBp0l?zRRjveJs#6_ zzYy~P-cRg6J{CzWUb>)d>5jIg3+*#e+|M`HlssfHWZ#J9W=krUb-j83-3SKr^{yDs z()<3<7~VS@<6EPJ-5L^DO`Ri$++#$f%=Y49iF7_7s3m8pJ))o+FsWnlDDtEzYL^}} zb?PjNB|%_!$G(|%KonO;Mr&1e3ftlg`!1F&T3?IH4ZA6hWOT?7`SQ5ln!6E=*wvd2knF-}ua(MVP|s<)XNtIZ{w!eLZBhX#aVmpj#3H50?#rqp11O zrcD@)vnLcw3nbIbkEs=T%-obCM_VhbUPRt2=*12Zm=Ag*WP3l0+@)UF2LcsNl>eiyI<|ZE(0=*}5!~?O|2e_}H(3;TA3q_-{;Dw5r)N%Je!d~(dPQbV zxk65}s88L|QfNmMc6IY^iQB#F!7#>0mSICZ*!L6?u#XDB6~C0zS@ECoahX!bTqEGE zHq}1WZLvfTx&Kj=`)UHiX2Gk(SvNq0M~q|gf)Xd>cw|o|rxly*@1l{dy>J=|ona+H8tBbU z=%3mW51Ao3;c>P`&@q-I)AI6o*6Nd_X6{~$t%1HbxdPli#6}6yeW6rNzxN8bfy>Gc z=^c9tA6iuZ12&D2e6spvt=*B+g>GHCg9TPvb#`5h?0TB9&6|6pO2 zoqIE3eHj~TgWIqg`#(_}-;(}iO>4AD(Sj+xQ-(ZW@sO5wHB7R93cB(oQ*8bI(2XU; zXLrZ&>kIK#HAa}y$UcBAbDp4^k;!UlqUFaZ67|PE(?p-;wP)a{$<_(dQ}{ZH*&Wlhr})Y8c}Fkl=qt#6sCq?4rv&XA-BJZ=tg6-d?masd}1QgjAl5I z>AAzA(+vk!1upc?=m(_S_QI#FkHXZ7PaQ5?6Zv`T>nTjAXmxoMyDRsI;;O;w#RmJ6 zFvT5en%Yr1*^*)zu$QmL_7qWIHc)iM&scCd5%Iau_L;^tWwwWbb&hg-Qjj_Zb`3yJlX*Q!1o}s3Lp$KX!4?L(*ASNtogeF~u*)`-m%j`f;|WgGfaNJk z1j^a7Y|kQ#2)N-3j%>}2;(nFr$n@4V7_LtDshoI1bn|K}_NCIEfw*{2u{9#_)#=T) zkG(ELcO02g(>Z;nedZ%cJ#wi1{7xeIl!o79=P2m9?Jj=5t`>CM1eO`Osu0CB2agRj ze4z-MCDS;CZH%R7Pbb4kZK5z@*b>Ao^`YjeN6VY+xQOsZ^#exQ8Fzu)L>ME3u|q;!hU~L#ztc!8^~3n zx#DF1k&pK+pX2ZxtVk+M(HueIIW4^{2qS5&>r?GN0w;z)BRSW;_KCUZ#P6LY40VZ< z(WA}f-Xv+EhINQHe1r1m8d z);iG`tfBYV&xR9pEQK;qL$-57fd8y8cZM1EJ3*3;7-;zlY7OQ=0(Gtt4kdEUg3Pp# zn#nJU-8vHB27_Zqr9Cd}`@g;DBLWp9 z$aC8f0armNg7xix#ZqH31RIhk-R(-_=UEQ#uJ*8_bn~Gd<#xRwJ)26_F+MzCOg~$v zFqP_!B~NNGZ6dgOM#+`M6{B1N=3HD~9S7n>fa@e|qi6>&NDp1d_tL`#3i;a;O!aDS z%@_w%(q!k!)=ibt&@~-fcOvd2yE<*6(BXd?U>_>zhR!QV=i2W_L!%#99M{<}4{|0> zNVf+Be04Gr5DEpdTEEhg}XTP!bkq;kVI@?aZ$EQJ` z^EV2Uo8oV{#(pQ{1|1EftTY=a0w2DA0i0jbHF<`O7tysdJB;F+jCZkT9qUR;Lz*Db zh6%ZWyH^5&rNXodGbsu9?_MtGIwjCKD6|)R_VLVSrKY68|Kq8p983CV~$ zNUr@}SCF$fCb6RnJcSlXR)pRERiaS$5Onu>Mi?PTll7{H?bujye()w8SY>IVxK6+{ z3`f%Oo)B0c*9-Y$u{^qElt6Z=BHA$B8bqN|*hoh;aI1$MQ)kL2*=|855i)0d zYCf|#<}Hu~ZjUIAh5YcYmQ;IF1ctSvuEoAl*p)0Z?caVc_tVJ9Y_yC1zn9t6s=$`NA3b z5t#&g!zYlJO1uPnl#{1Z{f9!-!g~i^w(JuR;b$DL5MO&3yxh#OlOG^49pT>|>+RPf z(909^m_L0yz)B4UECk$#F({8S$UVZeEGh3{OZ4H1^jDJ4Gy#+M_LPt4*RP*7E9?r` zJDbabXN7(DRc#Fx5!TxWU8y%A=`}=Qx4c3>?{*TmeBnxUm5ftajS}5zkx;9(OC< zi@IJ)R5izrNGNVQelb|2BUe5Jkb!>vvEcGaIrM!xO`cVLXiY{#Qq_0qPK7$Qx~n;?uagbN^6yU<}+YnV4D5lAy-?pdQ+hD&}gTM z;(8FZ>P$OBnB1tI<>D>y5J0$ zg&iZqI0k(z=nt3pkYX#jA-Mnl)F*?`!XxrGg8ndr12+i?UH=zXsFc`-DeU?yK9ct0 zLTv7yh((a^Ngs}`tFYM$GZzNmXK#3N@4naDA%$UnS0R)6MSsQ@jZ<9pp? zeT3jXuxwsve-@;)5d$phW4HvIv+pVE6!aq~>D~K5K{tl% zAQNy)>$2*E#}mwKpA}=OVOtVQySI$%pZA2_L?6S!nfv;1H-TpWb}2WC03!#BD6PnD z7o?ZQgi<(4goJ2YdqJs{Dh!LraL%`tKB6ZsI~x_I1a2EA<3jvz9@5W)xh;QJ(ER}K zuS58GXJBXfoFu4Jla4LB47*r}-oLxQU9KQC=1$16KPXI%ScS!Dgn;X?Xho%M7Gzk0 zSKe9jrv0o>Jr1w&_PhY?ma07GgcAEzG6c$v*3Yv`6m~OV_z)W<vLRTvJCaIIlq+hAY##P?)yj2xd~ zzfB@I_wNK<$!J1^?LuK%H)8nhcB8^>3&Z;qmu+t~ z+c2M9_eQMe8G^3wZcoI3OSWZ-!ivJ@uG53`HF&odk*2=55EVo#_Xt@dMEEZo*ImTm z%-nDq)?-D{*rEKVv%Remf#z};9cSBpEYmANFPL^xsw+U^zgs5h1O1^Wv{AMqm^nWZ zq`W|W38xkIV-frZl^qJ(S;CrUIqddsjYaAUIee*2#i zbW`h|kv2CL$q~U8dKj%QmxkPFX{3cOwS@bLu$?D@>rFo1ZTz79P87%KI=5NzFx4ZMRP)wHMTSUlU}Y+KLz2M<)tcidOMlJLh2+ zH4)cd7c1X)g)f92$jp&_)mO z$;nN)z3x+9gB0z@3QLr81Hzn}_B6A}-HIXP^|-bCLpD)#D=#9fZbOCqJP)kETcRk; zH3WZBaOn$Bhg9Qje6E!U($sW*tmY}~+A}Vb$yV-@23EAHt--@Uj=a(K3sP%hS|&@~ zk8@nHd0F@rPPFp{sj^Ss>#wz|d>m&esr~m6bkzj*X#2-79!&b!AYls53+1u^jSzD4 z4xs`$E~v3FqIs2r3id(B`Dv#b+%M$D;#l7By~o~74~SA@eIgbY2)f|)}u>S+FC*PJ@rcnC5F={*an|Mihf)qd>}}3r7mk7vE&?y(<8{- z9P#lSGZEDi@B`Ek+(UdPieIY&leRz2bxDqwtvNou^!W4&8z2fia3~C z(W7mYhkl)3E=YSit5?`6pIOUjIE~kau+@m<=W1h9v~j2H5ycG)q8`2NSPZ}UG_o8Y z3#-88P4=Ta!s?(&%i0SU*y(~qjb>BROJV&)q1zCMDQP125z+NvC?lWO&ctq#)_Kn6 z(u5f3ONGsQva&2B7>I@jdeK{h->XSh=u<_jYxF4HCaB*tOhyMgwhQtd1C$a7x}Y7s z$VQ6j8aFJIF~Oz_Gsm!%z$t#F5Y5TKP6q;0m4ft*lR(t&=M_OzG?6 zXS!C9KJJNesJDESnz(U`{o)Z9g_&vVtuXbXPlNdj0g6DW6h%@SF6gFNzxYHf$xj!- zF+E&AP>Y1ALoVOf_;3~v$6I~)@;+DC3qE`k`tL6*td6Q$ADlMl+fOGz38pC{Du-?x z>@1O3tlw_3^A&Uq#P!yl-S0$zLB7LHysZ!QNe2$Mu#cgMIjPx}B}nK-jC=E_)QBTh?TR zN!vL~5zS2s3{M1>f+vgQW{y+>c@Ul<3T!3puCv9lLbyA_5;4k9FZp;(soARvJHB|+ z#IUolX1yHw+1KC8t&bIjDjV^bt1nu;)$8?-5rsSwnLxinh<;CvP~2kzbQsNyi@V;6 z1*r`!C(IltTeYx1pEWLGi8*1B6FU_?DLKQ59B*IuB}J>6qN|JSAA;1E`dkuAaVNKr!v$eKl91ZE!_tNH9Z&W*z?mYPxTp|H~Cy~*Y`FnAdUaQT!0i8KOr<$DyK#EOU1!#zTPh~ELYHRgY`j7 zi3-Dw4jcNBwHgsQEQ(qAK zMA>TjVKFJ`hC7GZ)e6GEp>6Z<$7_G`Ft7yPY~_Nk#;i=-yrkNDK0(LRGdc>k+sTt$ zcIM~=-cL{QkZa+o>>%R=UAGJmW0QQIFzm+lK-BVipB9OO!xDxN)Ji!J|&Fh?vFi`EE*QX`jn7g{)D8Ru{9_~mRUC0&5EE>NYm^O0&ej3E=Si9 zSqgjHC&0M$40}U>fs)0xcNBK)r?#%ivL8=&aX0qtVP`7r;_i}E+0TXP$An-?+65jm z7xUN_UE*^NlMyI|nO-R}PH1C=7>Y}7u(=AlA_wSUeN7Cf<*~sHu$xrz+_YxcekGvJ zx7GwZl=lT;eH@dt%SSOTyh7QjQ(Q`5*YwSHMhtdLsj~|N{Vkx@`@qWqpX3#>WL)N3 zFG0%hH?*HkQ`qsE6ygPVl`x&mRY9ILdw>&+4Ewu)`w+QP*(4EU)~;JhUHatKIQ@K-ba@dH93$vBPIuANn8YvAP!BogN`!ZmpsNrOUp%ckimu1K7v*v%$%B()Qo6Pwc$$ME zrDeo2^Bi7`c-d3F^tNf0ZPUuE%tPMgWY1(xEdB9y8;`G=Va+~4a9nP&GiEqmQ38{% z_?rZ^!b)1e?icd&J~KND@5L^0;ND`Zz3&sZR;6**6%}G&(;~Z9Sf_w%WKzWsWnx z*ODg76NT~70UouKVnJrvH9fDlFBJBBGt}Sfie&ug2T!>K*rvAA6{fUkb7{1>G1^>c zKM{q>^TzR`bB+LKl<&!&)uV29fyhiNwsnPpiu%1GuzFnA*H$X%_)Tf8%(kW&9v0$) z?$lW>J}Z=roGRXDgx!RFIK{3L5$~vXoHU<J3d4Vnt_<}&SRn$7wsru4AS0(bq~Mr8ig2yj zSMvHo+%<#YW&(nr7niy4I41Q~3e#)-Ls<8IK!8l#2c)KOkrH6{6{4EPs*s`tuFV66 zTB(qWX8X+|_LK;2i1|b@nq_sObQ@S@GtDomv#9Z`VrXU8^0{40X4)=6W|No9*YD2P zBKe=13in%V>9e81F1IABY@L8>BUdP4d(}tc!(xM-`E+;W0Nt^WgqR2@`4bkYP5>mjUG74;g-&B?@gwd2wO7^_=4v2yV=|#t?*Ie2^V(-r4!| z`p!8mHcCVo9x`IIJ#iv^MUyVjrV6rEk}Q~X!kF|e(TYa7=RM$4c9d+8SnIcf)QSMx zI{WEd2VN~fQMz1!J_s}#Hrcy^u1y$3$wiAh&5nrXdiu_AN~T>n&*dK%=I69bz(pc< zn_=sPVfR|Boi-@!3PY`(vanpSHI{zwD#rJ}!qneZx}a^^T(kK=7){Q@&d<3X@X#E# z9s+Kj56YH|%O6B=zxUDlsewd6m8_wR72VHR8Ix(m6vQ}RQlhkQvpsk?kDthV0aq*{ zqVah)PXrh`yl4{s5^bSiuX?Rn}Qo$U08oE0vsgrvX+*Ie`lAL%M}}=F!jlUtd$8c0ohOn zCNzTEoC_Aa%+YO=q8JUiNKhS8KNn4w6GmuSEKYR){ngBsVXU5_P+E|3dbGZY#Ma-J z#;w_0RH@Si-O|MBDOn(Yg=aO2p%LSe8Oq5;)ws?R=*En^bS@$f`=UZ92LVqiCxiPb z?4Oy*t5dRFB$7@DcUh8xw3BbW%5nw7S(9{t0f>t^A)DU?hVIdT+TKM z(zf=ZsmM#W7cR41qEOtclTnlWt4|GQ$$5$PiBApmw{mAKAvLVANXVOY5x1>pi$rZh zMh&vw3RBoy@-TX{fE(DoD|PQ7>?Y{oP+k^;6idK&Apg-MfL9$E(b`+>A+-O4F_u2 za}O1SiMFNBv@PA*wsexs6oqciNnyWRE2*tBVFxJv}u5J{5l{Re~`bBN_8Tar)Ci=PvZT_=KumFwoW z3+;f1ToRSCOFF&6Wgm@ji(FivB}@@1+l?HQ5s)!uS5Q$aieQ0>mT{yl!IcqCixyE@t?3nNLo?hEv&*;nthv z^a6wdSW6TV`$!Y)Q<$~#^4|8YPc$)o*J8vNRue(b1oi|g+6yPO7gn`n!T&nZ#XG;^ z_bRNR!h=RdwY3Sty6$s=LxS#w;;SJtTIk4!Ek}Ub#*Vc@Pe&zYt<4pn&=JG?S*60R zLuAF;UJ%y2YOD#I@zvOWPEeXxMw_=rx0TxGqR_syFm}={yvCJwP2V0iO<`E%Z8k)2 zzbohp54P8bgf)Z1d<{RV>5k;-KhrJ~b|cq69AR&lY|qA$#NF7tkDa-eBru+X5#~!2 z_A|AnUhZe+i{c81kK`s~vhr4n#5xmPrDIetBQnw2L~}zPZLE!MD2g^V+7H%2q^pv~ z*~tp}?+6jeu$?V}YXx$?d3LR^o5k8%UyK6*zDEdn`dzTlXsa*?JPth+V7fECt~a zhp{K5bFsx;X}5~*7T!_etUUavrP>(LD3|Fv8Ud$=Vo4-l9C%jTDH@D@Fn5hTr?Bt8 z&D*?HijQJM{ql3wg;@M)wXR^au-K#68G(d|?ubtfR6HulE=G)zZ z?n@dzz*Y$PWI>3SHBh&V4E)JHkW~jArH(_&qU&*OfhcaaG8l*z&@W7 z10&cAwEw9m-t~Rp?VfCQn`o}@?-(-3h70n6?2}pVhXmkh2v5xu6sGLpcUC3`PY_^I zcE;na!k2WkU~1>|HP$Z3#}FtItv=AmgXup-qB9Yt#xK-oLax&iBRN@_893Do+nEhe zVIYDzVCy45hCBF}F>5U3*bh_$5}3dxqB-`HIs2a8=yD@My1>r#fYAuqB?7c1lUKio zd_<~5aA*36!;7ZaDupRc+u@?=Rx3#L@rcu8*mfcJ6~=3keNz}tTi0{pchUv`#qV01 zZvU$=5f|Qc;pKLQ0NwCfot$38A4Fa0U-@H;X{0Nr9NDTp~!P4ZPE4_?SHv zd*<(1Wfg*a!~(7G6HT%T=P#86$?- zI-e4o26z+|pmqwm92XEjY67}CQd=t~*m=UPpeS;hl{4)3qPQ;%vvE`872O7j<~~zx zp@KWW0?QE8tJK-?N$e_xS9l->}M?OhhuU_`C!md;FY_EGp8@$bx zC&S*BAV5oVLgQLj<2BxJFfR^c(nw(dkyv*(P$)d@%AjcSI7+zZQ@b(Pa-hReBQIYMd15HaucX)?s9v` z7Z9JB%J=cO5Mwx=P1a%ZM!uA8_@kzizkfk^tp1 z8&>noHpxB{!EYoQ*JFhxFi>A%=RE5Q=~`NbwMDx~kXE)eZ?Zfe#Vxj0_ew#>z}^)B z<)BaS;=+U%7pB^o&vhqgD_GiAP;cjnqDG~$W?U`cdN(~Y(}sGO9!X?~?QTIvi-!S@ zG-02SGXOV=dr`oTG7pCl`@1JF#(Hd8XrGGU zS`o#2_l9ZqjVO+<(W3`i|L1{jvLnIUV0Q`=orvN2-IMG|!S0JTy}USStu0ol7)tJ# z(tK$9BF-~5Nfg?II3E}7B|`9qFb@v(cn_Z{f?MbGJdd!59`Uz{>gwN9zGt1iBKdIo0L?W9(RFdoU;cbX9C`VG9@&hz0T#WP%( zJXx*P@OYbITxFxe378qO;hxf7IG zv4;g1AwDx5rC$-GvNSHc|KZc|o)fmWJlMSu8T2oFstMe+4c<)-*P;XK`FQV(;gn1+ zu)@NQqd?fN$Vx<^%AJ<>64Io_)5qgXTowdMAS$61_`*~{7meT-W)y z!;71ates-p1YKb~L*ObVWFLv-YGk765dN9Zvu^>g(JuabcY;9N3iIHe5_sENjU5~t zDvG`^sq2psfW22|aD%kfo)mOrpN0OH&GgaHnu=&my_E?v3~7jNMQoptdRhnPwbE_; zP87x!3)x}BJCN9ask^*;B?6EnOqRe&EFNMDZJsDFa34CjoKv<5xj3BjepVboRE6ovsK^Zb)xGnOL>Zs)PMQ3N;oVXpPicu&PSIy5~; z>uZ&=j1x7UVZT%a+>je(FS|;Rt~yx6J@qXD)RNM=!3HVpr>Jw4V?#x9UB`D2v8)&# zhCG>d2~!q77`gV_Jpk%qQ>S;*KYJK$s)=r$VT%Mw%8>4~l>$`7uYF+OmS6nmP9)zj zGul{Z&wDrq>mz6pb_l{}*W3m6UxmA;e$Yz$^{ddRo1gmPy({c$L6UbgV6?zT(fs}# z+;RlnXUc||_m8JeWJMtzxuG&aHyv>oCxMc8Pb83vVUzt^(0xULlRr5`(S{>!FLdTn zD1qa-K?Rr>&r{tzdgw!6Yu=+X|Yjx9E-mHtt!hwl|AHkv%XfJyu~VZ!fNs z{o-5?nSHC?D6kq2$Ar`5Xysc$=C7v$5$RS*-h`8}L~a8_q}agJ;cOcx z$S~91ZDg8GFJ+13*iV#L+FGCEz|KR}C1GoO(N39D?i7W((7eE8 zIxn{$`6PiiBc2Cp?bLs`!Utn<$A02L!bBwVuNH7KK9Yd}0Q8=0h-gf=@nLD_%okGe z>-6@B>%wXgJo{X}*0e-x%I(=0F_`;oEfVcl`$_303NJ1a9JxajS~;gCFnAa#$IY_#BLNt)Aq)mxNdiev9m25i)n(AglL-Y94bPUh^CI9ko;6gzhR!2~FJ9kZnH_h3T1-)w)sceSazJH@&PhysFJ( zOM9`Xuypx_7unT59CvMB8?7*-z}Buf_(;-4a07RsTIP^2ob!Ze?hAO0i)owZGtmLu z;&H&;6-&S^Wu{$ufDH7IHmpQH?jfg993RhAF{erIfvJ-=L(;QJWx;%=8ZJ`k3Ipz*qrl4awxZ2(zOi60f!D-ea z2oq!3J-=`wN)xfc_6oXR70HvP@%#LmNQ@1ddD-?qZ@H4_+-;oouN9=E_QEChsKTz& z95ltO#V6Gl#9DDckYa*Mg^zq(u;VSX{|LIKNhbyaShv0ng~r@B#8MP={gRm=-KIhj zXbsXyO+Q4?8MWbDpz%Jd`{vGd4w#)?faBJ*FV78-%rlQib6&5Fet7T3acKe@LsCDseik zn@+^2%8n|QI#Z-Kn`6g3)DDKD%+>F?k~^1FTeiZkkNG)-zsE&@cbP-6DFT#psJY^3 z!6Iw%8IP`9Vz2qAj#)LG6j09&M&e5D(t3Je4s)Yu^#V3a?@o%CSEfp z32Rh&kY`lOJj9Ze++8gYq#L4j)2vBhx*-=wYVjEo34Y#F=v)k5Bp^X{L=>kkz)R3J z+_1?3{bc@llU1B%A{MIcYXMr$f+=rl$d5)LP*cb3m3D!`u#KMKLJs-5sl7}jH>Hs4 z?KY5U_lr(B!GbPhp9@9sa~i3dh61bbl#W@e+KbqEF0*GnC3vOe2wY{yd@-~{cXyB2 zmm;_|>5db!UT3#KqpL6}rSUL9X3JeEmg-|#D^hfmSs>{8_+c4~UMmcf*uTJe1e@(W zBAl2%=!Iap9LY4MP#~Z7(+`N{SBdX;5<5S%xL6^2ipZjq9^5-52*C>c?p_+f-MWLv zi#B#GT4U=4sV|s-yl&bHBG9?{XtKPbFvF12){HfHpD|D`sbe6gRo=%^0?l1 z!y%WwXOBL1m%^^+xKNpFcl$V&M5|U9#u1*}WNTvyhohlm2RzQnk042(Wd}W$E;=q* z?69!cW0B=;4(x$8i?pYXD9)=J#lB`XKlOHghvO$Xn?hOHelP4;9~EZHA;_ShVbA36 z>m#*C_#IUQ3L@6 zNsPI>ds!}9?z)8)V%b73g3^0iU;$wZuq-{vLf)7t#S&2yb5~gEl2?t!n&kia{?07d z_j&%}v(J3b^fU9DJ9FmDIjgKr*g2Y`C(Q|aH%-ZkQ&pCID2fU%$>x#smk)!SvEinC zhYb)7j$}2hDYS$D)pfe3pDj?B8Kk4@DSrrm7m@N3aXnRhN61@%B{}pXjGRboJa}lUIQmepT)l7RNoupl4zjvt!VcXWQA@VP_)sBehEPhDZ z(Ptg=awr%R9C||Z15rFh{}|?@uM&o161iQgFzm%Ba{}|LywxqTTScQwu`Eo5-w`sS zZ8B6hp52A{R9M=|xZ$W_sndhLCq#zHnIq&*a-0xMOI6R%i_?_>(xmnsJJ<_pD0?0M z6!a0!L6Lw*5rvk0+>2w5ApLkohz7Cm;BA=zWow0C-D?um zhm>O>6D_!BHzu&N2XWH(b~>dqBRe5N)RP9357wC+3E;WXCjB2=NA+y z{;is*YxFk-i5n@Kc1J^;UTZYu*)1oD^PjpHB9k1P3FnJMi*c}REecZ-9q-3&N2?%h z;8cm+>$HD(q_({^W9{35{%J&-6$tJ`@kXNmm$YnQD&totIHa^CB2nV*8t(sWrBK)r z*J~AJ|FhkiMD>FA$tuJ)dqor<^8>5j2^RgZ)#?;z|QzzAiX%cb+TQfFbs$l zVvOo(0evX-)sc!zwT%3bA%t9>}cGGWl$XODd>#9|U)vVG%SlJZFo z?rLW%=)L8tXO;~XrdY1Ma7Yf}NfFxgh)(&C4zB;KOtCa_SbQXgwFSI0Ioyan6XLMP znzT0s=-E3nSKASVefT1E4n$t(zX!>=g@Xb8jI|fV8-~G)61z*70$L_6uzM7yBE3GM zZ}$b*GH%}Cnb;%$Le8eOMYc(pHYT&tNp2C)JVE|&RADg{ufb9nFSmb-!oUucXvhk0vmA zs`cc|e|kxHj4ZaZ1e|NB4NL5b5XU3)gxxFPgPa#Dz4~KMgIV#o-7KJuLG_B%st!gL7rdViz5 zKb?TpW3~NFSR-4$(Z2HuVLGcf;V9QwfRuhe8pz$#Z~)8KNkK9`&clTBKPAjJ*(ocD zjny9rITQ0riV*VtL)_jGT4G;FTNeX&B1qptxW`$%&$1Sw|3VWkEMJ8GQA_A3h8&!vS^mhW3 zx+cGw5oAeCZI~0y@Hwds>!S(3D7A5neW1j&3>Wu9?99)+AbwV45aKRDDvER#@>wPO ziU4zlG_B9g!Rb7R<__#(nIYqwVgPNm#KfuUMMZLuYJ;y#>EXz?@hG?*BIK0x5H&mF0GTvSL+YBN9j>MCd zqimKa8Xmpw-eSKG2~r!DMJ!t66be8q2~)_sLUEiG_>lf8#E{{h#NG?x!d;{7^nahK zCKH)+9B0*{XhyCqvr85B5gi(5M1L(z4LD*ijoH+YvfZ`qY=OcINk-GweEWj{Ozzyv zQh}bU9-j3dPqcUPo}G5S0NshMr^;=uAWW7P7@Ov7+u4@5d{2$N6A~XSt2|gf&aV0| ziJduJ84)(KH;dve#jww2yDgo?>a)7EWjKgA=!WA=)mFqO8Ef14I=AduR5Ppm;Bv9b*Nk zK8YQ~H40Nm5klEKdsB$T+8t&`wwaqn^7i%B?cz~k1|iyWU@F<(O(%G>=0LsmKAi-V zezb12#TC}#xq59RDn?*jWeY{5rPv6_ZCY>J1Zf&xS!4F~FMAwTMq3&y?QB7oA?b*w zUkB|p6P-Gm$CY{5nUt!Ur=NfeLm>>*P!#4b3h=m?C+QqZ|@uNL09Aw5-~_mr3XBB6IeJLIZRfKp(Xq~a?S;6H;r z%|~w~-xi2O$9Xt6&Xx(#qZkH{K2(^-MB=aY*ctiKS0VUsY*$&k=kUyyLPW9;lN9VS z0kR)n*kE@m>}|z7Sc&yaV;=3Ybfqz-*9vTbFjIh*b{%GmglH&yKz9&Rd1%pyMBxXq z^Lr#jlek-cyf7P|eg74S=0fU%nogEocm{+prL`nnCQM5?F%{X30`vvRP6Gl9T%__T zb`L4gwJ5Oi0B~iKWwip%w|GIKNRQqlk}s5O>9b7&IZl#hx; zcO)D^{uAjmSdoZuv=j235jIN5`O}HV|1ldc0;R$*9U|w3)S1#iuodaZzTNJ(Jps02 z1iF5My(Y-A*=LabO~LRBgH~ZNj&1C)XmlMxOe7pzV*e1u$09$DF^bPa3hM48BR8e? zlQTUJ6lo*!%J~XWe7bQ3Co^>{%mYLe$7KFYrICrGVWbL3C}o)dqc?^xkWk zXpPw;BEXaa6a(`uONar>#!z8{!eN=y%lb0=ohVcm>9RD~17|sGSvLu@1QruiFZJz~ z={3mGK%ozu)z~grohaIKpzxp|Rb?5`HJcCPeblfQQ5H;D2c%FSf4-7SLW*85aEn&*lV#wgyjl1*E#m5W9bP#GA8+pK*7 z(hautmJ(EB`KbRJ!jB_ry6BtGeTH%a1dntz1*kv7xb2Lixx-X_^>G6(*Cj~o{zcZOwnjd0&`Vm=^W@@jzN6#yAXSL<9vHX zVZLxSE!*tOvmNk!Fwf2va1Os(_gaIs2-DvF#gW26uWxx;uAob7Q2^*6&awspzO_iL zZ}XnUJ=?5FB=TlAZOd=kX1hZ20n$-_S0H@BGS^YB?J6_rif?-!_|t)3)EfjDGe2Rm1ZadX^#rhrW5tkEfF#nm#}Rw z3F+v}z5I5Ug>+9maiz^y*he&;-L$G`r2Rz{zPG$YYMDCgVNbSiDgNjpOtMGrv_eJs#Os{Qv2g+7%1L*P69nlZmwGZI z_^izm0oL{IcK-ldC_pg}4YY#_db>NbtK&BAT~Wk2c}H1o9|`(b7C8g3JHC60fZ`pO z#WO`+AxBX-jmeFi}t9 zI|K?0=WCnDe7Y-5$;C$=UAf?AH#Zyhh~PExP-Id_izfF7TO+{d z9FF7(#Iv?mH2?L|`xU%qImyR2mV+#3kCOPH-h=Gn4MDFluVf_0#J`K+qowUA>(9q& z3f^JFj~^Oo{7Z~pC%F~2`F4-!URS&J_{1J4>~;0ynR7)NCRuNuOT!`rlXKFU`*JL| zzo(-QmOW$hzE4^3DVE9Ox4i;h@Pk-IJ|YM|?|AZhI~k&wmNQ;|&i*L~12GoP#P}#A zyspEocJq0jN&ZmpQkdDgXR7s7kb1HUSsn)qcs;55DfY(@7i}NEb-u?H#}Mvk*|~z` z%WGPmYnKZ+BZ>+*H+K(dqaE@_iwXKLWfsM9o0eqRQ|TNTO%3cQLJs-Y<_7ZF0GrG3 z?pb5o(gh%sXFBj4FQnicJ?@Pib~r$OMQSld|1Uua!t0}RV|39Yg-iQ1q-&nAwt4(U z`%i#KG^>%zefI*g!Mzyv53}ujA$paK@#PG*1Gfm%TPy&#i`a~?6dw-o$n>o(l~e3UNP1usS_9|)fTZE( zO1}kkX#aTMpxH%=(@01~Yp4M8TW*(GZq{09h>d48J#Ui*=wD7uS0RgqXle$NvqoXQ zE}kl4E7}{hO*9|Xeam^nwM&>;bN?Vas-RDlK`c4{OsC9?6_?n*g=q!aMs?Be-1mQY zD!ru8KNj@uSo6Xu%?lea;$t_9NOR;V9rxIPY#jUdl-VPlnP!;bIaJ=5FR%c4mOYxzfC&oq^_h@@?>WDi ztCcr};S#fdk-a70^Sl6q55;y!1Yd`TaEg`vunz+XAu3B!bnkH5C8VO+d&lm51|iTl z18iB>aCp`f>mbMkq!&Gg+N}XKtt#dRuZNI#XwPE4{O#696lvPdWkW+oe+kbFu>w(;REz+EDps;}%%VaO!}~E=lG20a9aa=F(;~bkmf^>^LKJ$y zy&R6MZFYc>c)!l71)Zz%v2A}9=EF-28DR$%hJW|pbAJ!}R3O~nNf!bQYgEN9x-?Cd zQ!V11z>x3eiymE;-}<9pwIxk%nj$4{R2__qu^SXjS{W| zPy5L!TBK#d%VwuhaGrcEeNzc&TJyr0cALWVDLRLj+rt4;LmqG6K5mPx-H24nzH}Lh zxrIRD5NTMTt-;c3-x1k4-|m*%JJ@%HsSM*Zal1^wCr_$+u6%3ci$e7yOB41%2zN!g zaQWq)jqA6B-5Efp9zXOFAjhsbMV6n`SE*iH&NtGf)a4+6?Z=n%JS ze(J>>YTR!96=o@XWwuo-NDEU{nmPn6jZpC+iOdDRW4S-(oxW3Xp$(*zUrf z$9W=ni|%6sc#R;bxssv#orFT+5_MEqPCEY(Ha#&b6oGFdItEv}OGJAsovMGa{9*r7 z1n*dkP z66pmzBpM9m_LARxbch3HM&y(fBrGH38Bmxg=#+(GA8N1T(`lpxLAc++nR{2`H`C3SH ztPn#Ow<#>H@SqEZ_7L=53@MD$QHG$8XmDz13^C4=Aw4bIy{N>-reQaPPs;^iQ#R)* zZeEl2f(SlqdndB8_)+~lOVwBufR1c`wq*f72kjtI zJ59w?$wfjG!7n|kj;Q@d1aAR*);gVc>=Gr4CS(3EJt@o<4m?GYbG^SU2nB$06L@F)Kv+z!XQwx){i)JYtLMr>+c;4I7e=v4$;kOc(I_yoJ7m*^ zU_>{pfFv^Q><&OCh+j8F@`QFGc^kEOGGzvfLLM4{Pc!>fIsw_Y2)RJMBh? zXvdA(v)0ZG@X*!?hi5If^95lGTG$86C+p$;)uLzyY{taL1h(O~gd}toy&*kz%@Kv+ z%1QA1@Gk*+f!7;OIN!esq=jg`?!+asx0pyIKt+b>JzB_H&f}d8_6K3gij2bCakB#+ zEzGpL1(>ZUQ7lJ+qVX>kBrg=vJ9jU??h&NyT%0f`ZL%VLt9Va-esZ|qZAVnPC@8053jF2JPKn`0m7Px(OirP z(wDx{G@dHJkQ}X9VAIm+@{%KMu3(rx zD*|(kdOLNu{}J*|;5II9FA4j^?21~#eV z*hYI^5k9%N!OiD~YImB_N&2>NJ0yz#o}Ci2+UFul#kMXw%x`5e8NCM=GIv0Rw={%B zm2auo28#rj7^)F5drZhHOmg#tPOiZBS3!rl7aBcWf9tzgI<|uNh6-F$4If5EJRrJMXl& z6;{a_u%&jxoj`q}aV*{y3iv$gd5=9V?5X=Jz7nVrn9BCR;ISBNo^f(NfBv5G(^znUlymX zTy%6R(*<bY{P$vXi3&#n{JM_@(|R z3;0MRc`_lv{nxuda^~hqtW|91h~hPLmj1?d!Ze7NnVDRKCat>&`U0grQIV>eC1VtQ zM1*OFXNZ>a|0CQ*^_k)A|lu8!}{OX4)S^ zco@I-Zz=3dA1Z^u$3onZ9oy{#h3RAS_&OQw{#5Y)w@g;qmwI|iyxAXKjzfw7S-clw z)aD66cp+`Ze48hvSyVZfhtdZt8f{TXt})%c#$FYq^DR~5?Trw{V+oN-| zevussX|s8Pf6hIgZ}*&pU9YgSzehj2Uns~xaR0;<7`04 zg{B}XIS>Yw+NW{*y55kjGcVD8CI*zbRbS{W+bQ)d8b2AsQ1|7-z6MdXZU64oVAl zzW_Zf9x=kQ6!uyek6c?6;@%lI*Q!G%Ed9)}-wF7~nas=Lp$NSE&b?mhzNP!7+QkBt zT7<`<&LMn5`wsSCNR-P0Z;uN2WYM00$I?kr=vK01cp?LA>Hy9<~0`Rw+rx96btUP;X42~t%Cva|jQ z^Cc10yl_JE!gHDyTDLK^Zi8H;+%6(Bf}O-V`-K1t&;t)zo`Ru4 zku)h82-5DM6*V?i1p3r6d8R!R!d<#_v26-d$XgBeR|Uf`ua*r0`#=;eBf5FjV@Ni( zc6uL(^hc+n0~@%s9%EOEM1N>&Ho7c74-os;+7I2=22rHnG~J#Eu#n9GM~hX0UMmv& z=z=>?Qp-_div+@#^z|w!ADgqhD$cR%MdD*+Q^gMV4k0lVi!3~*xU&t`uSl&QYnue& zjw~-lE8*`&fQ{^M7T8_^#;XhFWu-uV$pc;%U#OfXT_H$j4#XAw1eRGZ5#ZR%OKhlu zFg3N|8OsgXqRWk)_Ozg$ZD4kQYdyKwULENbO}EvGqV|Mtj-D6rFO)Uc?Pc}o2aVp~ zdSVuD2Kos4;Nl*RAFu~Rpa2;cv|kIr2pI&7*{ed%O`ZbCeDeuWd^+|+fx+(X@0o{5 z0y#r~z6|PXFDj_9mPGLbUbpl&MFaskQS5 z0GS*dMLDdX_`TgO5J@v{?J~PsVNy3&&eRWcXF*yUy)|O*QDuEZp=$@0zFA}ALe35y zI#{*B{uOm|Yc}?VNEAj}<&LCP(7Vq0nL~hm^+70YyybqZ65D4NiRP&!vPCcSQc>tQ z%SD@umW_6|h|Y6442oNp2+l^T&$btYshss|HEIek3DRza>-csPpvSwH#ZQHMG8A8X z$P;0;ZH8Sb;2pw(L*%9UT9LeMI1i8ujt*%Gx>p#WwFg9@wOSTxN4ska%M{TY$1X5# zxL3q|(PlNPMFew7n=HOVOG(=KFMl73~?5QF1(Vg>JMlZKjg5GzG8zFqM z*Fu8SN^X2s*!zOcuc8FrM{$_)dC3(4M8Tr(55XI z<`MAfyP#QCXvanHvU^Cc?B<8Ta3-7g;DZ%bxvMJlN;VOop2^@0B1e#JV>6b+bYwR1 zNs+=-EaXI1B}`#isNbGQ$7P^SZ6}2(oVROnendY{N27W%)v%WveamzLCCM{|=)?Z` zEc4$LfVm^HZJ>faHsK~YR}_jRUtfN%pAK;Mj>K->V$T+&L4CUQwlja>;S)0^Sr>&_ zIYuTj5kPkp(vUP(L~b^g+IUgCW3dwKS#A<$uAf+4eRA}4*Fv!0hHO;*<|>2~b#^$N zjR(>l?UQth{o`aZ*iQc?1&PJ=vm~>>Mo@S22(~4s(Q{$e`Ym>wB49u=uQYDm0$@8@ zWxp2irtMpa_;8-EvjmTBT1wwZQ?LM*C$%O~sP*M#FHhqe`!qmAZq*Fze*_s=enzFQ zr2Fv;9ywKLD0%RVv?|6%~3hGyRj=is7n@=lAa3su^xdV$mN(|#GqXRZd zfE@P?vY85gLEiz}Xb53ei%Nsoa_AOoogjT_hoOc#g()a=*seLYH-NknZr9%ua1IT_ zG#In^eNo`Z?k&0szvaOB^jq66SiSNZT=<+d)Q zIa;~BrD~DYhBVx(J;Pc)iQn!ib}Xc!mToAYe?R6;7?><%O=u_N1A&=RJ*OIBy+xy0 z;g!h!f|i5U7ce_)_Tso+6%M~bh^8aebo)a(Q|8cQ!d?=l;|RU@KHd{@#`NJt z%PWL`i2_59Oq^w(g}iJ4aqy69U&=U@8u{lAq}q3gLJPGQfR?3 zA47BH;>dm;uU;hquwkNmXNDId5-ANO!5qw(+O#y33$+!3UU1*Y)ohM6iWEeG@(jO) z9f#WRg6Li_TY|W~A4-bU|9NndNwg1j63I`7pt`$;+f;=ag2;ajPOIg&dah_*TBdw@ zyeRAq>BmbG86Max5+$apYxuRW*8~{_h92L}tYPDLW&e?gVbkN(F;S!MqcrmrY{>*dXQ_T6U=f#lrl85?Pr3zGe{#@9C5 zl>$mG>x(c+_iUD>Z`&Ml$`0}RiFQ1|!{f_jr6?<#f}HF7 zHbr~Up`v&fJK?o#n_w6@bxUS0e@yn`-Mr|zjy@7 z28BZ288)U`o=1uTpOR?7&Q+L7b933~y`D}QIr_q+x+q?>yt~*DVQ-SpwmLRRd?hT9YuR>%bKiO_EL00&OF!rio_^L5hqaWa1fozp0k}GY>d=VosS+*!%GOu&9Y@9GnDj8N{%R*{u zX|8;h)y=9!fmM4p@QXc(%2o%0CJ$f7L^~h?6-FZDvG%4QUjR>2q{`zyhfah{k&ud~ zf_7o9XQHAEZhS8baNoS>TK^6~AA7mx8#mb^iWZ`BtqjrWvECk1GVck3?-Cm;0%8!bC8!1;@g&75Y71SvkNumokHEg_Nd@)}6685@~^OBcuar-!pu^WnFVAAuqy%k(!_lQDM*)93) z^@D)`$8evD)b=3}7_{cfWzjDt6834Ddt(A0$6}EgtH>!&WTv-96lTl4g|=HkDy6OW z>2}OM5P?dXw@$=KdLe)oM7F`WC+{xo8;j^le`;#eD4CUbQgliQ+pQv-83-J|R;xF* zNCclS2bM;DVSb;c@XMtUdNX9=c(a39y~92hK@&aFN9HG}!7eO<1T%MU!#SnGUS?jh z7<(^HagMdWjT@ORO5r%VSnig_?9tZR;B*N=J2*oWMv^;*TduQF>3AuDFu|sY0((Ya zEYNBN!WV=(V2M0|)rSPTCZl)oo&d8mvW&OXK2E3CTFI099J`>{lOR(-|H3X8q<(*& z*(*fyTnw*zHZVXMl0#dH0%Ytmkm*hhS+Qx%7q&o1C#}YXTCC+7lDC0N?A1UglX&bB zJU)eYLT>1tAGVW1u(`NosJ$0rWXQAFJ_#{8@_eVnQ>8X7^e~KR+SatvexwNJJJzIe zg`39rppt5j3H!*&$uL6i5u#|>U%`*tdRre7MBZfO^vr%Q3XCZ!%+R%|P&fc-kI24? zQ$%wpBzjS@+Wt3WL7u`T_UA%BJnLMhQJadmw2yr2c-n9xy+Qp^$78kmfhgXEcDJDc zeW@@U$w37Ckix7)O!s7FLJHV?8$(2d0c`CPmMahq;|L^>$qg`oQ zYSv093r1D;en=3R!CCE)${3i`y74llxNe4hzm#0zLyTHYgp}pSY(5X<`wVb$F2yqIGVQ*S$!&)09NWXPSv1nBDLOe=`1lpXU zar{y`z06S@3hA+hlv+7fo(De^MI(|NYTp~-g~QSf7!BwcAZ+Nud!MHS>FVx^SaGR6 zC*(^Wf=pb&uV~9$fm~yW)eCyVUmJxRzWu_m0KMYX_O?JY4J8&k8FEt3m6(M4Af1z| zY?nQWe32%_5^X_XD>TfL9Zyml~}$Ni{KS? zLHfaKr_G}9>w#85VM!)$Xkzw9erar{V+!6Nmgg2XXK=+TF`J)77^*V+6KhY$8=ZEuK^T#9>&w?bS|4w@Qo3wYC0 ztDjG;DYI*zg2LyWe#kU?T(TZ7Yh3!qju%(gZ2x(G&DjHLRH`{jF(}ca? z$eh_y>m`cXo4`ci0|LH$*B77j0B z4Ou3@@HCf=;c;nmS%ZwgKQE%Uh&zFL+bkS@3*;10HoZ}^5tUYP97+te&jKpMy-Ajx zS56#tpjkM=ZV(`$ZmMuTCj|SBJiE~D4&kGdYwQVy+azur^rGhhHZJ5Xj-k3|&j>lY zk;LdnWSJ;*Hm|8#e%_k}ebtsqJpximVvFo+6{j*jEN#Y8YbOdBkpoQOhpmqwUDg}# z9IPl02pLk1iz89=D0@^Cx>TY^cljYXJnj`kVen~Tn2b)qdK}ViwzVSo#MxV>lNeVw zzYQhiO6T*%01zl;*sw}c!;=ozcd$l+N$lUhN6s8p|HQQ{r!W6u#9HX19 z0yOFMseft2pUv5$J?>V?s4f;@0v)W_ic5fCw*CeYDXEYjj*be`=(KsY+XDsW@xl@< zE$c6e219Ro%WZ@p6NUso`A&?x6UDqOIjPGus0_Uo0I&u{W?wIa%VM{u}_G?QP0;Q zmOLbf`zP6ak$v{{L10#B3(|yw+@>X%J`({xV2V<21ojB}N7teK?Kjz>H0B;-l^qYs z8JxUi+)kh1<+BAx5t)7Xw?v?$NG4)-UWip&b_ z{C@8f{y&DLcXIY*)R>8{DbDMQjtEl5iD+~oQukYJhm^#7^f=u=5n!~*YOsF^!j0~F z0{p{C0G6Iek9(0jIkG!MBtH{DuSZG+$$-Q55~~Ptsj4Y@+f^Cz_ik#)#;4HG$ZX$4 z5!nH?f5hg779B8qY70E3m5tLq8)!J{+|>i^igc z=~i}KeT94q_UA4Hk=j$C2x=QLH0hAyBfp8W73YJ1&B)odCZs*Qcx=nqF;*)`+M#h- z{Y$5Ph&M5I!851wX2gcYzKC&G$z`+R!v;;}*3tQ#X^)Et7ukNyL>=eOHn@#p| z08L9UDRSl%Pl%Ow?1kC)1nJ}ee0uZhQ;2NEywS3bA&UMXmgyRzuw0U~dV&9QJr`*k zGJM+=6Kzj$Q-gz=vgWeQc1T2@B3(EVeP^n}mbEL6%~@)f3sPynB>DhX3D6!?9@utY z7ouL?e?1t;vKm1u zLTZ38?)M?#uDSyI>a)c72h$6&Qo=42fl`iE)LVCj!v~0=Idq+Jt$&C=SiaE1o|!W? z^^8#@EjqGdO3OHe(1LX0l#CM-63_fjjp%*1-$`iS#fmgAL6w@v4 zwnh}1uxA~wr>uT3NDBYS20TdbX+4-w6gMn*qT_z+NR((7*PFHA|9Be{!t zB!GQ0_ARkl0$*5acI+Qz^F>1gD{T5V&Ocr|<3vs4iP88jQWQ1sTf}ciqwNr+=2#X! z*NP?2-zSQHf{{{i`Sg%M(v__?L6C-UqT=;Xh(qP5ny+I3L z2M@M46;uK8+# zj{QrA08_QKsk*TaPi?aiwM$pZdWZbScqSTGnD+hQIm=bhJAHp__y{W&hAl3hXsz66 zC4oZUB1t8;KBCZ+)>+$ZtHSyb)uDnOKVqpd_ zGVZ_$lf$bG7Y!z%8a={R3DD2z4MmatS{CkvL zJ&S0*SuW)%`*tDUL`2eS-8!NSWkW@$8hjja)nMZUX+ml9*5x);z$a*QZ&6{-i9(gf zmQ1(l3d6{O37Z$lM=K`T0fp&ksjP!>X1RVg*c;wyh$M?x*AaK&egHa&pyX9_rB?u%BT<6rFF>=f85!rt zpeR7ST0mw#N6=Y!uzVUf#`bo|oE)ylGy>iU6jsaaEAzm-R|zw(VSekvCA z8*j3)>Eu%VvvI;S66=esy0rln$K~U5e?VBgpgV61(Sb;|!q@bieJDzpC1uYxm#vlE zsNw|>eL=*cxcf4%5KV)(79#~!0zQoj&>>qc7*_rfvURgo1X`5fcO=_VLf+j>yn5MZ z!W6~(QVdWp1W;5WmdS3%ej-R*juZ*I33$e#D1O_og{en7yhtmmDr81{JMO?1I~n5N z7=!&;t%UE10N3yjWT!16%qMp5gVs(#v0=;H)Yi#(%eJ^Ew4!q&HzS7lSddomTqkCw zZK&Mq4gOjU`QWF?&IPP8^%oU_V_FvaMKxJlAd)^m_8+tmWltj7Dg#RSGk_Jl$qf zH8&kAvqkpX5X2*3TLcc8NDLZ$z zdqN~1TNQ`7K7u}%Gm|pl-#5f_Oyo)gqgOVjT%yza2KK!Yw(a7=bN>OA4 z9#>-W2IUtx9E0yGxJ=9F3|A=>qZyOQWB2&;0F zBVxHcKra)fC~2xhGNH~LdYK3`(Dmyc45(DtwO~!vNz5{7|g?_G&Mbg@H|Fmi=5fRL;GHgjl+jwQ|v&A^l5a)n`f|L{B+4 zzP8GyiU2FG@7U3nDeO(2i!0@P5cJ9frh3fc0$~;>Zl&A4R0H_BXb-e<=*FQ`PG* zAeO4Gw)59P@%jc}GtYh?%&z9dlsP{6(i`eK6%__b`FGlbqEK`Jt595F235~s3awH| zGZU96h|m8j$PdJxC3Z$lh(XDI{bV~wkeD7B)+xlGpgabtYF|OBi_GX&NK2zAB!Cyq z6RK=xfU)6N5GbI9uRGTVoWhzKOc(r?j?LeJq74MzlZEW1^v}N zFBN2Q4>cYMyY+6t1}m1nMAGj%%Mgs1ir3rKI>yWrCG(eSC)Fw( zjtLbyw-bLrG#ZDN2S4=Z*Mpo#TpShKmBN}C?Cv@$?A^ErQ-(s~ceOlup>eg^W~CEg z9(uMAMU9M%tj6tk!W5NiSZD7ltZ7v{Lvo9=*E`gY0~V473zG8i+*vkEVITfPQ7%@= zb42hTbo8pSN?776Px&)ZyhUBYMexr>Vpwm#(RzmT^ewU(QEq<}h0bG& zsfHV*=8)azANSC_Z3}r)D>g+Zk;Qh}M)JUt+@UeMM&U34SFY4!srI61RnB9W9un{l zN6vwt64rL`S;jj|DMa&6i%f~dxS1HOu$RbHfgaU98)A=cU24Az;S4Szjw?*tB_Bp? z{^N#I89B-8Db*%XRXNI5`n=+%!K_eHu&XsR-=<>b#L~*5-8p+ zk|s&xsOT+dy*(W$n&RA3=HPZDO_37lx?0;O3f08wTABD zuX8i$^_?_Kvx^jF#=ZQkU8W$#@RVM!e{L6q3Gqm~GBe~!HBPii3I_(r^jU??4O!z{ zAn^-XlTIMN_O$8!kbrLF@dNlDLCOa18)x0OfMAweRpQm|KoMvOl8oIucvLBz^gj_nq~3m?$m{wG9JRadNfV&Pl_3Xe8l+w7>ICSC@Ix&NjU zN6t*`(XCz_{T@ z^2<)&28oq7t!^@79KLDo$D-5zL@uVL?J^-v3HeXf+bw%o6o#*L*<#C7IIPV2C6YaW z0@3IZlDn7{3BcQ-JX_|#T5KgE(EZdpZpU`mq>%XKY5Z8s6rkje=qYbh*asPE`f>#CrC5}c>nn&w!vV`e91Ev>+8*RD>Fur|T>-C_V@uqw6V~^cJ%aV6UdZQH5f8>CuDSeokSKI0AKWzxGuQOyq`p4#gjugQ zj-5rP?Zo4eo&d8wBW4{gw=v-$UD^&=3HTWV&BbLqV?;+7hS?09<0^+#t z&RgZnc7fY=MNB1mZ@XMH_!+tXF0!8qhI{Ym5w-50 z(QVh8oqHAM#a-E<<5f0Hm{tww*58U0rtQ&jtVY0ZXL`kk$MC(AC@8iwc0={r@5$#T zq?l3urf6C@vWsEHWkNnyD4J#HG;xwN9_L zJjHs=Ow%?`8zV#wJ4OkF+8kg%A-YeVH`JVXr0| z3Fs<8MzCc}t-Y?WGoKS*G6VgvOaBKF6{H#`1QQR}i9-IuMD|b=(z=8M@kl76Z|hg0 zFb}wJpJT%Xe8#Itj4Ys4FkM7;X*k8j#74w3{@NCiX-bma`woS@l|7mo?BYK<#9W+K z;%fy-d9-efbq-O7pQ%1Nd8!o#sOgG)W3ix5p8i^5tUkoUAKtUuUO^g*?UH=^dx%5T zYBS1K-+F;K7PUcr>>LGY&R<4F@Aq#ObVlXHSsW40{Y)gL4bLkO3HB7C+xW6d#4{7| z-Ahq%dOWLD`FGjNP!9H4AHT8dgH1PYQ__ z8#Y?|KN0rcvg1cc&{2d_7sV%5H=bBo)yj48`o;DOMbV{}+9`);HQ1Aa`g-;3cZvX` zez0=J!HOlQ!#RaEVRE)G&cXG&fA$oxF*?^ww4aGW0emG(EGD3j8e3dNmMui)!u&*2 zHOpX*FjYtDE9cE^KrpHVZtsDCX4Ry(K@^~MC*A;tN>Fx!l$}5>WvB3Nfe(p+=WFCg?}j$ExWD# z&6cAuUBs|y!X^vQ2W>W3A|~205xn+(GR^%`8u!qf?~C_%ZnQub+D`@a+2Z$F?!YG7 z-J(#@EjL|m-4qTT!3(P0FB}%f$Q%xmCtRO6oeoOgjRItUC1X-YwtO(;^DO3kEG{B7 zaXQV7--jT@@U^C=}PS ztllnA*jqMSFLkaErsghuT!_IQ3Xq1Jm@w4_3i{MSagBTAj1Yfh>X;+b>nvB$>te4i zwaZdbsH^?W*V}ItZd>??l{(obD+?)()-ADx3P<1euF;vcR>&E{7!=9B#BPxotJYa- z|Ic3JZP97=;psaM*Kf7!_5oo)DZYp81_AG4``fTS+)tQF=u`gVmLnwI;xXnig}r;l z{MhDLg$Vu~V6~-8W>{^!XkHu_ZUr_yjXR-sY&l$g=MW)7T=O zxp4G)OpscGjHAd(MWFTg=*qJ3>C8O1!JK=Dd(B*YEauxeqG@L}r}6m4wc1t%{e(#;Dyr=U#gczO0-x>HS;!k!AcKxQ zgki=0g?yA#?16OjQ0zh4AYqzwuwv=K%2@|1SKAa(=n1Okja<-exAma}rsb79t9(b$ z8yH!+(4E6U(HJy6-ifZ&H`~8N@_wTDIUFIt>3=zuu`7a@$b`vRqIoI=V_f{3iQ?_Y zL?*5(gc-ZPjIpcJQ9~1KM(z-1Jc>*4m#gqaXPqBDzRK~f6X(0?RqM>g6BEZ-zjWsO zri~>wR2aTs6SD{*=>G`9M1C2oifx||Jc!JA%ZT?yuS1}!rusbVrmz+n zMSdL@S%oO%#)VWx`GyuW(qtw76}R)`-m%m3FW-;CF&~b|?@S$2=J$x8owH?W-G8 zYbMLL?&)uM9+Gs8u1v2L!N(`8GWUyeYM0+SW)gCV)(KT7=S*%LRcDVW)+a}#S0#PJ zV$o=K7ahYZ1YiK1oxo4b1VJBd>>kgymxW;n4JgEPuzrg%F3TlW$c91!Y6+GK+1N^R zvw{+m#Ocm*@eH51z$Z`RopigI>wNZ^8+QL42uqA~%rk1DiTLbNM4 ziMa~Szl%a_L9zhb>z@e0{G%II+NTQZbHQ?3q``gDTM*iQOk9kHRI5Z(jaS`5`=`S2 zfKKAZ(k}Tc(0OogtYnB?D@=(*mtdoDxaEuBOm(pqYEh-4`7(*ZZ&s3D&n2N8E;89s zCvC4V)!_7p$K&?0AT4-jdWE$p>}>XJ3_sd>di1{`3p|;}%>6{Ltt(6{J!Pq|FaHgy zCb*ui%ZcAtMeq)=pvLXnApspI<<}tFE*HT&T!8SnouFDWW2N01!iTn&TTEd-_3i_$ zAmotTVE-&D3NRbr=#)2HkQs%eC0zXpcJk0`o6?#}nt9 ziQ-xMXAUi~D}*%|tD<#pbjSdU^e_T5vSZSeu1EwPkK1ffXwczx6YWKXX=!28nvwQ~ z0M&BPe6TwqwkJeeWtuq$$lXARqyxkE} zMmNl3t*4;(p{smS4-V0?D~g2dfFO)URfH4i+aZ!3;c?4O1c<}kV+)qrCm}6XX`0sC zh0WxoHwf_7*(CyAOW#6V_4E~{+r@0r?XFTPxEhxgyfc{A700o)hp2 zF(|sk)(JD@*SGy=mmfTp;vr48Thq9xfHmbVVV^ZTTPq%EokXAksk%z*qOh-EBVv!s zp!R*Dc$2$5p2@BcnSP~c;zzMeIZqIxeXq~>%QgpIfbJ3aEN3fPTcxfxUqNcZx4Ugo z_zSGqz$3_)(zL#tzw3&%L-OSvfC!tQL~?9#)hQurpu zekuyRrBM%IRb7bm+OhHWz@815*hmX{maxyae$0-!f-q-jGB?i_rQ@*GY_)>v4{&x> z>$F;QGI)<|zgLv^k_~ynT~VEP1fn;U=f%VA$HHQKEU(xu6rkRdQ=Uhi#;y%%k$Ym4 zx!4{S4)eOIF|xKj+lGZiv^`Tlu1^RvJCN&HZ8|OoPT6n2SOBiL*vVGgA4G(m=qE;Z zgEe+Q6huc9soOif_0>=R_3y;pO|M1Q-JH3_Uw2{L>%+3&S?s0~eBft_nPc-U|eq&eI`5`@i zG>~x!P&u4pDlie+2ym-6mxjf5ZAbw@Wor%G6#su^czpCk#LPmCNHlMV&?R-$;O(s%}j2-qB7 z5>iE1$U9`*;(AeF_Q^@rcAvs@iv6P=InJ{oBETZig zvx3x4XQ`+J;rojr^0f;4n}VVBZVhCUr9|^>ZPXTKz)s9q+ee~PBbLkYz48wSor>6C zTqgj>B~N7{C{u>~O+zpIt|$JvqqIick9;%#NfMLSUth)hsN`-d9Q;lj}s z>urm|G(j#T?1PX$n(|mM{$Jv>$kgwQ?6{Su>Nna&B5EufHeuY)ZWE-ef$VL%hV;#4 zGhO$-xnjCKEF#}A?|XJ{vtNbWG^sNxrsaZ+!QRPxM@6!t(IPl&Q}wHHm}C)!`u0!e z7ks;I4GG%JeAe4eQRus=u04f}7*sc>vZmLw9%xlSsw9}1A1 z*Aub~-d>Q-aFJ}chbRMa(*Ouv62uzJad+fQy5+qq6LTUrcjt9EW$kem?MfaES@vcwuG$Bix=C=3j4qz z&M&do(n%BSE57niqG>prC9O1#cAE$^yc=@I+ZCp5=+aNMJ_2b018j^eXM_#TfRJ?G zf_)2Zl7Mqau9T%tWrsvlMKiHWBroLiK7bH@N+ffyzyOb}5|NIiR!&T=cd zj<){@(w_Vn$LMoEbfCxn_9FpW{mzUT_G5)9Ljo3_LFEfN12}2n!%P@n>Sr&*o)BVe ze)CJ4s-TZyQE40vk98tY9jBj!H40FJhUr)cAAWAF{ZnBIc<4d<_P+>vP52$fuk>$1y#oV!c%YED8mmq>4EC4&2%Xn9yhdWw4vP7iPmMM)^q41?={ZJFT zb~Sflde~RH7FVdWeWNW;Q+eDV zRFWxUHvb9*u&3Bz{|!LSUF?F7C=D(jSjUH6X9EPi@AvbQWP^mAnV92X{1fQ}?Qh0@ zM4>QE+f&5_48&-H@7NNu8blv8Rr#I^h+d%zh*ad`hj` zWItAzf^i8{ZNoql4nV3#Ws5&B;}~T(gq874{|Uc;+Pg3tV9C4-X0BU zaYbKl^=%oaF>>1lSmVC%SOJ}4iI(j%MR~t4H-z?tQTHC7kHtf%?#Jy} z5&Zi^Jhqi@zrps2MtAW{*iyC1UKRY}q8WsphyE{p*68EwN43r$Wf9$rpKA$Zt9)r^9 zie%NLMA$dhdDtt|!KE~v1U;gpJ)Oqfb#$~v!Zb&=MBJw8tD?ZdAqlB&UHlmchVUu$ zVxQhY&|7kH`lu62<{#hGcw%9-jS`(qBtjwqf0JD1i@(C6@d(G!FR)A3y}-JF#!V1LESHL&GrQG6=#v?tHT3e)bB zv*+6kg~cUl(`|-Jcm4NNV#b9hEW$MWwJlf-=r6zr&bN*G4MtFNwgqF4=!{fw_$z*^_}tcl9fUyC7sWKRvBy+t^ALrf2m|qaD#}#0QnY zS<;i|OUnht?#A-SXe@_1QTU0bSQ#He_hY?u@z`5#-m}Fn|B`1%J4SXat`MYXoC(Bi zSOA#iDYnT0aQ~8LH3Htl&Pl`}#JZ2-_$On<&=q%Rso#?aewH zD4>z7T^YHmtG0(lIkj&-v2s%DlDZS?H}Lq`ei?Fd%*DEY0Feok+yJnES!@LY3`M8= zJ6TB@c^9xck$ej8o)fnXg0uxiD-5LmStu&4oxQDadbc@h z{)x?_khTOKydP4g)~vBl71kKkV8w9}r(={j>@yLmnomHMp!g8%oUcL(qbqUA4~40_ zAcpMFE)t?)@f@t96GTc%EK8PALQkUcs<&#=vcS_9n7 zOS%171TPPV!Ew9lOt4RnP6#>s3BvaFx3;&53e#OKda&rSL5M*QBBoiRx$w02MD)`3 zj=@aehr*Pm9Rmx_4PSSd*u4Z3vesVE7lr8fyCKpLyj4`1$f`Md@2I_#>~;|to$e3X z-3o>kOIrE$62){Ckv1MJTW|L$>_z&mY@`?Wpr|xsPYuiQPJ1F`xR}o-8!N^z+`*~q!Mz_b~JolIP z+NVOE<_YtfCr-99LCQm`I&P~4m>o!sn=4mKcKp|nI#vJws5%e$s;je)cWAA)wXODP zwc0vbS6zUDVzremf?xqf9Bpp~NyxY{HwlDVOA_`FQNorPLI`_gu&n{BRU-}rSG{Qr zLr~k=rPlg>fB)x%+xMf!^Lx&I&iS8v#xoX3PWjhgCxLU1K5U_ta@0{Is`qqM^nz3> z0u0}~afp?La1LV23l#QF79ax59bv+1MboD>(Do|m-5G=s&VdlKx6uwN=sip5LG}${ zcqgm8MfR4EPik2*;g;!rQD{KdYp{a%4Z?7tt#WN!<>PIYD<$J~wTN^ZD;}5}4PbP9 zRHof4;DtrbOls}dDN6RJ=9;`h%Mc|D^;WcIrVG>XBU2j=Zz;DHLFY%GtTKdzUkU_W zm1z~XS440=BGjJ_BQU^YUp>={VPk=PI=fzwHs)~4Z#M~0CRE6fOFk9Ef|WLBO196$#TjB+Yn&;_PIwV&$TUrK5>S|^O!e? z64>eAJXuV%Y&Ic0ZK6@k*s-JNfqh?)u^fE2byZO70=5SRDD3&eZsxsFf45@gTv60NVvv2L zAT2}aN6H+he#^@bi;>HW`2O(git_SD>6+fvmkn*SzyCN@h`Q15Bs%{GpBW0 zEk<2ir>(I>$V8(sj-bngvXF_}Af99<3NeRe{baZL$4F zAbd3yudHS}{^hfvFlLusc9or~u*WiEo6t|bGQ?)Z<2FFys7btHjkCf4N0;r}nHXzh z1!-Zv4!z?;9GtoP&O!FHpmV#hgvb5YL-gU<+m1c4+TIHA$n1K#g?~rTPjr!uq=}J~ z@iNOf8#;aJf4kKvNTczoVef?SH8d2VMS>N$O?j2%%6jXoE zd|o-(ZWp8j3AP$GNgzztnl+rmY_cf+LE%Yc0%jA}*%r}!AQG9y!!QDYY>6ERd0Q9H zYhB!ktr2Sz#rrQC2=-TDYDNO+#1rG~9|6Lx*2Qb>Tq8D`i#UL2(&+aRg{E zVk)6JVm}Z;>ugRM7b7D(qIEi8p|va#0&9>~h& zo$)=QX+<8tIi|4BgJDH-sd%0U+D=Cb!zgTSv!8_Yz1fC!SC}r#iErG7b|hW^N47+Q zKKnXIa&3f$J3oMDpFcHX(r=y)a~Y8NT&9UsUB zBxaX~$mZ3#c9p_D4HG%M!`vcF#l6vz&bKllDsEr2q z&7KU|bzu^-eW~=!aXDCiUKDsv(_IQ6(+;N+mGX4UZP4FEp}~jNMD}Uc*f+lK1ac)} z+3a5Xh!9#WS=D~RI=vuO!zI4; zTahmURWXqyexDFzxo=%OsdaHf>*AR#5>9kp$(oGpcz%#7Cy_0~B$u6^Dj1&tal1nV znle_;beRJ5pzPWCwpd~32bwA+{A8>ag@vMDZ+k;Qnv+?S8{|)iT(d3Q`q{d|R3fnCr*V> zW{dqZm9sxW57;fah~#wX&^qoV>+D=X3i=a*hYG@tw(=+1$|pvKA?Dji5xvViC?Pb9 zV#@2HF|Z~1d1&?iMTl0j?ZH}X0NkV|?6Uw4Y&cMBU%8lM&dMHqOvC8?okej(fMkbzlZas~8dl;V8=q-v1Qxr z)E|4g{CJvOsc?8h&@C3qiT;AXDY2X4Xesi%Jo@X`aJnc|pPR!?Uj%$A4~%n%ZZ(Qc z-4aBKQQU~yI?)*l+&lLlWb1|KEi09->JJENdas{=B71WE0xoK-O+@cDr|fe3Pr$K~ zQSlNMrL!*a5@B|a{&s6jdZsuYlXxy;&mZmUGMiaWgG6YnagsNoOMT>4_$R2HB zn^+o`1_Gt`#-1p@7b2*3e)^|w^tvoFSq{&h(%8f_*wr;Jr;vjdPyk^kdp9< zY;y!DMHSRjI_Sri|%pvv~1>$ z^BPg%@~D!0Y_Z%gL^BW1nRH~$QkxZEJZ+RLU{(pjl$NTNm9|`fs*6iX@Fh7a*V;w! zd*b~|RVQkhctmD{&)bs6QV$Qeg$Sc7Epw!G>Z6gHiCX)QDAY`!_wCBG&qC?!|KgE= z;^!hzH`fF?cGcwoI8qufvFj8jf*-3ATO>fa$-4STML_C7c z>&GE)B9Uc3`iVzTXDR*%ZWr`VntL!QUf(MUeR)GJm?j8#$+5gxQNB$Rfo9}!q+AhF z-dV!u{d)m;i@YJNJ(kLsfy;sd`zWNvz%~ZCuOuza!)W9xc4!v}(w!_x598qDL19hx zdVZz0MCa5kmJl%<*)Rgjl8*MS{YKPaXBmlw$js>u>|``rxng`Qdtj8ICItu5oL2-< z-hd0Oi^u7lAzN#6S{Lu|O@)0D(sL~<9m&rGolURrM!9~IWnAUuN4xKdme&DmFrzrm zN(EtJksR7q3V7xi=MQ8(ejid(eKDH2Zv-fnN5q`}D(LHR1}CC|Xq$Kb)m|cZ+xd2- zfG^H1OS#bFLBuAA#sKUsm;cX+LUf<4wthux1btQNDQkgk!p_ZLUO;*yw_F32RKC$Ido2ali|IQ9mw>C+{@N=?>!P~;% zy(tc6y%5V=U3Qs3X+Z=b;Q1(7ow4|9A^GY0^YA-y4`+F zIGmLtAF9%{V2yesWI~Uu$l3(N8=1W;w-W*KwmX?;pU(uD^8DgS6=0lwv6B;M<}is2 z+36yXAOGc3@cn+dAbdEsV50q0VR5Nuy&KrD`$P#p@YuG?l1=dkLILa$YPnS^5u|_n zCT1dEJNaZ0I=Lomc-ai`8IgX$haofKnKI4s*8sU9lFs!%1pR|8i=O4b&=~@aEq0CD z(*=DlA{#LKV4ayLo}(X@ri$z$5%l@VYZB%`HpmL-Td`$^t-@dYgGZOLVcF0h6FlfSM8 zogw=qWMMQS7a`y5LKYf^;iiHz>n`N=MK(i9thXr4qvtnSKLue0(|a+dS8fk6!+&8J zA%;SGqTgC5=*=jMk7YBoUIfiKeE!D2f=^p@OHc<#iF^ z%F*%#cHwn|DPIyH{5aLyl_EF`2Q)W~b=)By5nXHRa+^nSwl+PKmV#Pm;GRoi+J&8& zV!Ky>?j5gNjp&j+6yjQ*XqjbCgt$n3W0Tc}xY6=Vx;n(sd3^5Lo)C9nEf2(h3UN4I z(k)1Ph>Il2PP?A^;50VHr%fxN!KoH%Dw?*9S``h)&(~AUm_rrU{0%MwUD< ztz!#Bqjr8}x018LmW4bBOR=Qd?**AA{qL{?3i@Cm6x2A2*Qm5 zY$Zj{8+9RGBIqmu{}pr=j7B5WJ{NWtM3Z`&ee(t{5H;)hFkygT_#M{rE^9{o$Lg6L z5^DCMy#Amd4eWBmHC7R#bpMk+)+Py3Ne%`^zVG057}TS}w>ZCq&YgaW9e zOz)3>74}Jt8!j}e&$?M`6vInuCjNd`L&p zJ+aU>D?oNxIfyMhE$D~$+u1Q-A;w-1$;Z1irzn>rV3vKqyO&s&ws)>wAvcZ3 zT@>%{iHS2TJ*2*-E4tt10x(mmdLwPBkh8ipUX*65Lu?F}!v*$|5amc#k^TBnLHcy; z@p8NPCc;|mHm^-?nIwr@A!}6g8cP>&h8D_* zw*bAc_vcrk=Vo^Yiq37?J>eQy6tZxk#a*tg7la8xWIx}whycTeX6LY7-W8&mDFaHd zMs7z!v35RePlf8lEZRt&76m)qECO8zJoTYIJ5s^rL&9NHm7yDvJG9 z9bn||ZoJhBQrO_ThS{MI#c!52Reu&_AoI}ourq${=~~OD+dUziT-9ic6{c@26N}mJ z*V%tXpgOib8@MAn=hl#K#v^uF2;)c91}p3hR!NyH6!y6@GDn_1 zo)N)W&(>xZqP?$*LKA!IP2~dtxccn;G)}Z%>;a-Qc1&Fh((6MF)@mLUk= zvgg?&3JgwUrrV!VvE0z}g6I^uB{?$=>F4%3nVl;)UJva`Q6do{>`VK-05+wWWmVQ+ zQOr?J*{0EP?6#dJ$;GUW8}4_kLB7 zPl65XwnKH1kijNol}??Ik2=R)EP$l&9hn6s_KdJ|l>L#!Ar6T`W2G0yko{ATI#B?xuz#ge z%gNG)srHE|)RsG%71hq`-wS4q=%e9LG#L9;ZgkO?M&yNt>1sg1irb*LEx6PQ8n}DWd(`aCtETdkiow0x=wbdVqtLevaAxjTfiHV zHHz7u6N06-01u*0CXf) z*_UtkYH+d2FHo6XB&?5W!kdrUj|J!m1@%d@-a>Tba07Zv3e%BfV`Z{omDL2ua82d9 z_@JP-{SNsAd0*JSw_%Z-74T&d&wG3?-b@kjM)RiRLldc zK8P$qmYREq*+c~?GnSK;ZL0-nd2!_GbwkJ`%Srsc?G*GnM#}!&5n(zumaXZj0}1mb z@Y{rT0SX0dY|a(Un;*;Nm)ouufvVWhCG1uKswyd!^_F6xu=SMHoqy z0uyzgM$E1$U8-#<*nrt z?ClUP;j!}zcR+_r%{vSD*Uk}CvZ=T;Q`ncB%l3);Rj@My`KS-n;Y#S z(L)zyf{Rx`yI!=83#>?aK*C}DMGYfRGLqe7sR(qCMvmtES}CaN_bwb|^98*6JBzr5 zTO}OMFdB|Mq1YYvx?-Y%A7$@`BG?`^*k=ONr<)?0Rc|*9hTuOCXAdspv2;OZ;;%Z}dIh}!$O~m!gD|Z_N##byo!w|pE6V##wu}-+_XUv{wOAn(r2KiMw3k9@S)90EQP_Lm zvjoG$ZNjvPa}^hq{}J-(_a@$24%OIMzwq*?oeno}p})hjM5A>@sP#Rhu$R~AhMPKD zwJ;1}=cH}t##A(C)++mBh)!;%X_=$6$LJ;kJmW$o!-fYSb5Ax;z-M3rr^2TV1%}T`XQgR&u_(^EJ7anP%(YG; z!3s3?FoH2kh|i+Kaqsfvwo3Fk?P1YWP~ze0MMt(Sx7C7lpZyqSx;F^1&hUlF z1;Q4-%9*xDM6a}E5pqOtg)H*NTwtFB6wlS$UhFQ?I$LxYMAD)#P~=`DQm?czqQI6T zb0^5-;=_Vrb&>e^4&GxhZy_oS9~Q%ILG$VY`_um<(+d+T0KEFUsPHOYmatDkmPj`M z=?44q-9*y2Y&MTM_6;G780aF9`)28E$Y9F%ME2el=j4sFi-nn8v3)CY>{#Hz}$>2iESJ0PZq)7|-2KkEOA#E2nQzZ)fL>$a{?KdH7zuRr1f)t;IppU)!KgkeZ zd{>a#j+Z}S$3wcv25}ATVJc`|hLgK~0^ZQ%`ua#; zdWjW@D7GL7ldy+`!aUje%6zu0>=KJB&Y0oVXe-t z%ev9dz6S#A>UEc0s9>lf@;4ynbMU`Hbo$G~ja{p7v}Z0)ZkrM5;;&3LZIWxPL5lL0 z6yY?zO3?dMi1dF3Tl`Y{Ohj)U_DvAW9|497BXJ{Q7Yc-R04G4mfDK4d%9?i~tZ%uZ zcmbtl#idp%Or^+I$kD^Z0MWXXBK^7mwZt=y>jOM7cjEDB)%Lm|tIS9Y87Htf#gF0J zB84XCRdT%jHRQoIc@^q^=iKY{Mc?W+yHF6;9G(v9LR^K17Lr3$kU>9ztP5^^On zgDa7Rf)s+rZ;mY%phL-xTOyg;&DJ0a^|fzaZQpsob4I_)8|`{QiiI!tb++FMQ4Y39 zGi{}Sw|XR}Hd_~Rx-UK35YoQ9g0XEBU?$^k4oPKBooD_M0t~+QF1txV+KO#_$(Z#P z^ocEH1WgT1ZM#i$T2jD;FDir~6YUzr#(S2a4-&?8N^yDcM4%LBre`B$%`*C;C{%g- z?e>O(bhwz;kh6Z}sDRS95LH{x1 zLXwM;O(7nY+=-krpA;0=^|5fRP;Y+@30gKt3(QUgD8XSS_g?~Joy|bA{l~xd+PHG! z)M`Hy43ko?4cjb36uOjPF61ffBO|ZB_8VdUh3;3>+|ay)CtZ;kN%YPWwkf2~#JhDC zKR?aFbPfHQ!y9MW$AaE9P8ql*>>oQx(35wL-6sOgIsSO{iMfxyw|Tl{h(dE1Zkry$ zU2p7U3l*j*_?fLlcu~)fD@39;4*FZUU|%mt%Mh7HnM8m=Ph`hOCv0y>d1Cy|_s4Iv zzY0>8UaeSSwZNX27Sczr05=QyU?w7Ydx;y~E~2xJTYB!2Y`Q44_2|}hc1pTuiEf~3 zqmin$%S3br4CPHS+lGihjzY{T6e;Y?&Xfs&IiUn*rYsidj)fQ7kfF7FioK^W9n5Xs zm2ZC$po7JkB{*e2H3LN3*lR?-#vZrR1BKH1a&dw_ZGk9WA5xb&HdYw+ks>!X+GYvT zv^=a7RVnNr?~wksB$Y82+aR_^IQ)VHL*ncbCfU=X(?0C3mDqCvv?#fCeza4rvR6f+ zQ1lnFi|wEgwKH^cusYWkAdSHbvz?vkvD_AHW!8RIP+6O+5h=V?h?W+Y>0I z*48Cg^QOAd{wRv~y-y^Ob5I1IJjt2~sb}`LLrEBYN^YBOopVU$%_xY$vKxfG987_- zuH7sGQ=*JzE^faR($rhV=;aG|lM{u!&=y-<6m=QVMD`D(1?fTi%m&+~uov8y!AsbG z|0j1&U&QJy<;S-!*05d7X5--2Ox6{K^x%i>0KEYgKPPz1{guk7=A=&C~DYK2JN zlmmMXtmKhwvnbv%Ojo7bbEy>QFyXe>-V_B19hueHFe5UqgJqpf!MqZJcJHN%=*8+} z-_Iq3cOgHz^te<6uY;C6Zasv3c4x$kt#5$Gc8otdWwT`o!i`akZBdAl6bHgQ>jf!U z3uW|Mx;Ml}l5?xg*?7d8od>IJ2oTqSDX=IgqYoxQX)y@~)*%~W|EK~llqmYHD(^T07g0!S~Y-Y?l z3pjuBVufrke;TsqvdU)bn#}GF+1snfW5wErhwPEYMX5kI5^)^j2}s^r9u7J1q%QHh zIze9pqOV%}(uhVQxGyiVHGcID*;H|?Z~IXFzC-!Ywgl}!c>vpytTkC7l6yrOZiT_L6GT)p!+)g`g85rKctXD1+=(+ z9dcuhJZ_@|ya!ovuJ(A9x0Rw%RdMq!s|ksb*Q>HM0kAzvw`~GGy4d%#gMvCd$i!0Z z|8rZ?swnRo%E27>3r=G0!B$$CFeM>oiVna1ThNC&xv^fZ5Y6)9N)Om z9#S~mj%~zp9om>$nK>3uQ;f5W6R>Wi>aAWhXU3g4XWfy4?=PNbZwoT>W9c{yw-1FF zz3ftcF+LOU&T?&pNTQuqc#_gX=IV;=Y*A=d^JavV6sAj&x5ow~RxTC6`!KYaL*ca{ z0W*%*2}ZNC2;Q{Lc!az`kiw2F9B+LUrX?fgH9S+m3mPCdT_c5kH#Aa06p>cIViBFA zL-3=PXRnIjANAmVcIJa1uWKlpnr8`m=ePkc=Lz6EQAmzzEZ^S`gv5ywjzAX+gs&dq z2A(r?SyExwiB6S!A4iZ{uT=d+prQGAFPD7$U}5!W*P}Kp#L@eqabEq_h0NH~T**%c zR;WYb<4bGpd4*|ASu7i6wf93(vgV8}-TS!xR|F=~kU{pX4O zVZ0veV%A%TZXld9mK)q*0ZO-w8|eE}dFfWIr3*8g5fk5Gd8tT5GD>ZeaNrx6b#caH z-IkCgmW7w$y#eHxSDW+^myf6H&|JO#aR+52YLHG;m1;4&eJL9pB~6X;Cjxk6UEtrZFamN3Ty3? zc<=%Nel%&2+)c=D*e;Ri2S2Nt;kyHDD_`GMQDILBhT+uPiR5&4SW8I6Luxro^XG#8 z@gPCZSu!F}C%;*5?h9>iVOmtk)gI#T#hfC?iNvSSx@5U6 zQJB_^$js)!+@21Q_9wTlv1bLn;$j|Ki)??$n$JG)M`c8Y`H{neYC@RAI(_2p5mO){ z_z3nx>g20q9nz(h2iDp*1v@@cCb1;Qx{F4Ug)-L21f3&HOAqguW&a6rG%dGeRE~Y; zH^h0{GBTwu+F1mclBo;aZUW-E*8T~0Uw~{ps}P)jK+u~$Jf4lTtT1IATB8-^&>CwI z!H1nE0=*Hu7D~|g@MCgJP`ecD)cud`Qg+$)#fOOU-yce=K+FTPIP9LjDY{pRyN8%v zA{-bXDOIU4^b?I{M~nD$>o4eiMBCI<>S%5c9Fdz z3Js8FkSX={rvTw6(%<%BfHIxO7xV7`vx?#+sHlADVe&e!=~9zjBTR3bSC?}CeTxu{ zkKSid=5MrLi{fLZyY&r|f+>#z(P;xOkc}A79WBUsFsI};XSzKU;@MDnm)S}I8d=o5juCuYh^3Gt&W9lacW+E)667C;w15!~)Hsi-!nD!oAc(5)yP!@J?d<~2s9Nf(mZ=90Nykz?9%{vC@-;_%Sq-V z$I(L1Kzi=N43g+nmaLm2gAX$W8HpoP*V-|Ky*K(Hvr{U-8ib_>m)L0np&zjz-jw@XGmHiJ@1wf_wKf7?TT*~p;^KGRZl^9!D(EjkL-S=edQMWS1NjFoN+j70?DJ7oh6YF zR8?ead7aD@Ua2UmERY+CTLM5$X_oa7@Ya^3l@)U|DG^13EqR_AtWs2@lC&(4g(j;B zNm%ardUz{aB?4@h52-qZnL0%F8)fw&sazdIOVQ0g122J3W;T9kQ?I51LTJ?y?LG87a$ky6WE5v1*tTX5wQma z^x-$Jx1}N6UNzM=g*;S$V)+DX6omWS635t?*pneC_YT$?k|wJsCaWh~TZlS5y}~|F zI4rf{v0#qNu=~XVR8d$89QGTSg*YXhACE9-wr|~$jwXZ5D|Pz z^uZf~?G<*G_2tt3?G(J12dp+hhC;6B<95nq5NpxE0d|FgR2`|g??%MZhKWMKrI|eS zjViFmLJF9{DqhL2%5qVt3PbEo+|WN8AUn8SjNSeKNq>Axy}csn?MxK%3;m8TEy2sf z&DYwwQ@p4=j7af#dZVB-qE7)9Z4i7)Q4%G*8e(}v6b7vuf(olccIqmNkFgd(4XzY2 zUJ(dC+za@MbXjvyG&+7@lf9=PRUuHA!I#gk(V0`Bz;g<_jl*P-^$@{ZGZ5PeTpRWk z#hH8u76S$dQXiHqOKrJ;I>yPVn^gN9+(brPmB?k_X#=K>U8$k*OWVHy>Qq)xVbLR6P-*V|Zysp$4z z_LJ!Zy)(S9VYJJ-i{cEzsbyv^LSA-_1L&Y zCtYU6M~b1B#o7F(>gVX&Gd$bjmGySB!v0evrA@ihzDFdlzQ0Ne1(P_Fq$wG!#*h;RX{QN!&)_ddZ>tl9^{-_+&u^QNKrP1_OJihq<4Hw1R}U^b zSZ!xM0;X?hFWWa2_PFdAjt$Ndrr^k18Jc1%?FS)s5zd=0R@mpl@o8J_QbD?}eVkrL zZI%dL8oyt-^9y+|zZhp{GegOds-Z;Y2a7_NvdWrQr`a%}aNf|?*sY|m5Y=mJt*o_X z!7yLTWD4(15xmBhomgG|b4ZW*1-ftFd-P;0rB zPX;!&gp_nea_4#05>h5>r`X>V)~0#_f*)L%@+0xZ#~^vv2i<0;1;mXQ#(Ro|ys7f* z60=7G0b$x@2O3^0xA`JCU+?HK)V2!Js%yG*v5!MkZgaJ)@ICZ6QGA|#M_7e|v^Q6d zP_Ld1pbn<9geP~~O(KN(fGH;?x!ywihh*#-rKVJ8*&=w&gLyp}72@f3=j(3bOL;jJ zKPq#yL%R83WSQ?(Mbge;g9q7Bh2e>w*mabm37>|%@+8g2eM%K6Xm5s&F<%iNYaw?* z4=U_mBNkHgc)fc##I}{Mw($znLT&!oBTf-?FDE?;3o)Nd;T)WIg)(H}H&M8EVX3_o zD0D@J84j$pPef4@btK!w^J0bFFb5LN=zQIcHavunEUdO)DNLmZib&ZxOVB?P8c}J# z4bhZciZ$2Gf-oPZ&{2A~PXySMnKOos$|pi6&yC}GUE`dC4jw$9|Db+$wy@9t zr&gK?g~=CrO@-kNc9W>o%^cNz2%gNWw@5zy2EfR^g3jk$sZfp(rqmqlsiDp(L~q;5 zr?r)@Xe(c0_lp7t6D+-B6oyks3K!UK1t|B8)%rfIDnM-`ISVZfFgR)39w4JRh^wz1 z0pfIBm&yMU^fujzGf4YPm^xA2vaif1tmPh|c6a9b?LeU{x0!Eei4yuG7mVk2MK3q+x@(tP5?n`E8on5x-rV0jm@A=_JcE%;reB&l*EAsvg2O)FSBh zvQBNcHevWPy#KJ<>;nPncxj40Dt>xC5kZ`03TN4!3qU?BTCaW)AiEKrz=j8S_>ooZ zlN;>50J(x$C3|&({Q62%p{Ufq&V?aKTSdK$CO#pk+Bxz>CDsK3P49!L4JNh-+I2l* zPbujA;0is*jtbL#wi)>7i2l}S%kiP;I)+!y?|46Z%svs3jzl6vi|n&hS*(8x?F$P% zPf|YS)+D!0k`(D_B9iCC(kfk)pB^A>Yn{5@EI>}^dtRJjKM_<{CejryZS5)%*hr8( zxDhO?~KHJN$Ade84*`(E|GgS-urkqo={QuY@4}XZj5v2} zoj$`>gapa$DW%Ck6cjij>T8mYmERX}sB)A|M!`Up1`>_0;4qKpn# z+vkGt>c#1ND)>f5r0x8ro<1kRso1&;`IzB*+J*@Fs6+-hn!=m@aM9sJZ0~N{ps;f( z5y#*4R$;ALkxp)dkWU*rU(ecEZ%v|UcxrbiYbPaZC)%r`sPB@U-fn%Cc@cYO#1aMe zpb(LV=j=LKyUu~Rq|F#PmzXJ{*ZktRmTeewL`^i6m$MT4 zlys{T$(z4_9_OEJ!p`?-t(a`Di^94$Fwr!G(BUPtaTg+2EjTr-rF z*tbRTX7uKPmfP(N>n0i{Uf$LEE2sggp36g`l?nQfH1^RjWyw#$I4kc+RJT^wYx6?( zF(tNCK?b5XgSA%Su&Vs82+9W0X%lK%?Twp`EEsQp5XJla(s;H`r>p?`sP|>w-z*4+ zd6m)niG?%k8B#zUa0&tdtV8;P>0YZ?U~hHB<}dwHvLG5&%VpWS6ys(2!wH}uEK+#4GWa$K{oO{ zG(2QSf{wlJqEwENB?Y-0YBq@C{cauCXpMp}bMNDLE?s#tss~&7?*!N@rvy7)cA=mT z>TrZo66|{IXQI)4wnbEyE<{`T?zdYahCqS16^%-gzA$-jZH(H^>&fUD6oMIdlH;4kS4y|Row$}Cu(q?qIX7WZe$NnG! zZSZB78@)H$w|n2-6Nf2ZiO%{MBKjEW(qK&XnQ4Vgxe5l5i$I1Z(M*? zz79t_8}0O3qG-*L`bQ5hUSypGsR@gNe3ZWo=q|kpT8HN?Ri;jZ3awZ@U$C=5ZRRu-^;&B8cuq8V1em zMUi|KjpXr>-AEk-AH)5#I9ZLe?}_52aHlfex}{PuPbGgnP?Z07NRUKhQ7Rv=nz=SQ zg|qNOU<-xeRRRxWxwbaMrsw5w&6;W3gy~0YY~21E;#e_IrHt7(*APdeHRjQ-v%d)8 zvz8Z4j>n=Gp-bchIhHDt94QshQIho@aVD0Nmy}9HZ6`E)}eBN zr$)RXMmO|P2AVAu`~tg8*{Pdn>r6ah+8v?jV>=dGeh3fh-@_hOm`3nm{ch7btS{PD zQJ668wVU4E_GtV1c}I55vX4ZhU%CyqzlS_9DDuiS!@j$YXnGdyc2?MRf=bOH*M2VG zTqum^me>O!vhTpb!)=CurrerUHcw&qcNP8iBQp~25p??*of2%F(r6XnE8yOKj%^cx zUNxZU)S9pQrP=FvUy99Jtj;`aqG)1=4zoZ?`1Y( zTO>ZPRaEMOvzI@$#-0}RMvIl423``yzd7DQ5qEw)VWJD`3?S|AZ@+Ab0_Pnu(uUGlLE(LbF`I&H8*zTMbC^a@XrNLZnC6yhyf&g7Xw5PMPMDW)4#O|YAEldGv&CAlQy8wl2?MJezm!Pw|Wfr^kJvJ^Rkil&psoFz= z)T91CLISORTQ^#_dP<;0+@&o$ut8V3_A8O7LzW|9%*X&EsgH7dC_wUe@7&pz3hGmn zjDha|*NFgcj$(=UUm?sBy?3epTdPy1Uc>b7{6UD4=K?Ms%} z-3r6pEEJjTHv(R0Uu1^Z#*7n%-qD_kv}LuGr;>0TQ+IH#Efs~k+^ESbdEaKG3qe$l~sHd7?Hqk&qr#-0d}UzMp` zuq_qji{Kc`ony5S-PkuPEzN2JU^*4sQvtBI$g$@Hyn}cQgq8MzD4LK`a`kQ0KSk02 zHqNqBw|O*ekodlA0h}WeRiQ7^X!i(smo)$E*TUia%Vf~wW32d^{GfI3}fr| z?d=YCVJl6F}>{$UH)ZRH6_NHLi-C|2aCS+IH2O$j#Y;rSm z*$&Ub#F7lm1A>9YdL?n6#Yp#bB-?~s#spg_=p|>w(@Uf|o3NyaKK=;jA#G;uAs7AX z*=LBoCkRV$$%rF?zojCFAX@TCfLw(%vYYtgPSQH-b4&7y_pK?iFN@?~DguWX59j^w zBGKqES2}Yr;Cg9*RG5yoLe`ow8o&(Zf*VQ<1f9j}UPh$z%hDvSFC2xr_c0yXd^ zS8Uk=@Em)II9z^6$XPf{Dm(nJ+EUTH2JVwE*e&cG$i)l$JAz(eq==bgAB*C1B)Z1r z5m#ob>~qoS3|sQGD8cFn;yZi1ER@fn2t8KO~F0^w0EGnRG~Q7T=y+x&DH|<2M0M$OofU`h5U#4mDeRrr zgBDg0%0*!~BfTuNv#@|T&!&nP&CPYXH~piaw@vn66JzP|DNmm)De`(T#ZDK+nUWhh znf$g0jAVXu6ThpM3w3-@eju~1sY3QI=cjU~^%aFaoxnZS10nxNw9m%c>p~13@{}3& zu>cLkBVHq&_`x4M*VT9g>>0v`CpXy;g}u9xSE_~fpeWA5QaCzVP%}bKKpze{ues*u zRu^(6x7Gz4IO{~Ad%R24%O8TR4@nq}$OpwPdqETm=Mf1T4)$_@DCW*!o6*(*>t9;P z9C}yK2X}uJQn^2T2CVOD;}SPKAU9vPi|C0GvLQ4)B$7xX>z&I$&kDr0@-=PcQ#sM& zJ=D&A*4x4wEDxYp2RJkxOJRA|DZrk5p8Pfs7liA_mMyjs3Wr-dc4a58F1FjR6a%x; zWv4qmln`8%jS+-9dC$)0gXfc<9O97(rw&^j;$c>zAjanO4Jud}cI@&A=2F;@=r4K_-2 zpMm0uzG-_{G|C^UuYvPKxiB*(vIdI9xY@QLP^eKVYyA8)h!XBRmT6=C1dqssi zsI1_?N5~nIqDS%yuGQ~<_q|3YDJYAlaEqfHlqZ#kAeiZ|LBAt!Q7M=N*PEJ00OiFsn1a)Vw}kAyS@ zSv(46*$0B&wl3FU#pAESunY5ZtL>{V0O(~~WqDiW3cEAFNNTOZ?iJLRx9QMwZpk;; zFGZlG?9XkA!p<_jO57dU@{qf&vf=Qe$+k+6mUDH^Q^*=2jS`mHZC6N%9y3ld>^VU% zitj=OvHvLwKgzF7urIa{q|UsOG1gUKntNpHHrX8<5TZ^r)!V%aQ$8zTv5gMtm^#?~ zwkknqD%yHk32P9+dBTlio@1)}#ckG}%G&jsu6D|cUSnIuI>{WI7T}@n@TAr*67+%x z=U3fF$omxr+XdjuUH$DL1sToaxVAFWgnTtfR%0Ml#zSnLh+Y&DZMC*m*qKqXZ)aI? zF4|O^MWcaz(NRr{^T_^+2-GK!8g|A@9?Q55iOZ$Hc_PsKW8)Sa#R|-&g7EI>ikbF+ z!rp|o%13o@sX(khq(}Qta_*&qzBgg#h?!FyIEdsN*uP36WHUvf&?CF*Y@Wh&0WV>g zH_5T(!qmTi8UHM`+5lUptZ1D)!|DW`+b{(M$$AlBQWi&vg#A4geK&Vcnf8sBiB?|= zxgWeD01Pc>TW104iA0Ogqz* z(Y&A0Yka*^$OLE5*S6=BfUc&=>K3wNp9mA3tlOq-)S0gUVK1A5lnB5e5lO?mls&{> z2AFKvh`lcxCFtWnAP?(J)hUeCiURvXNPc+E{9}`5*=K_EDtfZr#Z~-|UxmPI8!_0r zDCnH!RMf=tg9y|t>-pF@$Pjcx{@j=~Vj_GQ77!!`Io=u-#~@(TD>KJ-q)N(gBoY5ta+ng3fv--YlRGO(tjU&jC@%oxSaZ zkc%w_4BX^e6YU~}!|Z>8U(n5#-;r2$h0Co{ zklOP(7-NDam7L2qZpG{oQ8Z{eW9m4!P=s(@yj5P~QP0?Bi$bzstTADABG6^NLf)5R zwoO<}m*dEN9ROi3`#^x^cE09DJNuyLqW3aaW|s&PH;6;RRSMIU$oltYxi-086z0#M zQPw>q%))$oEQFKQJ8e`*^2Ru;RM4jwDnz`!%o8E-CbBzeXB~n_Fa4=ca;IkqH)UtYlH(t#6&ndv!Pn112)lj#R#vN!yaFg(dBVR<+w z6}i8bIr9Awi6OYS-74Tr*H4T#RMKJ$N$@Ecw=w~rorCxc$_0Jq^voR13Q-cd-&~qX zHJsPUwSulz&Ot$sO1kAn5hSVwBS;}Rx6(S`?h=GQ?c?k0dv6f-&U2Z3y`3ja{XDbH z;dTEKL6RnLvU;1s^b?VZ)ix}EeamR{O9EjCw!gfA2jCAyp_s_pDAf;23T+pWSrz&$aOSzvub=HVEJvMK?pYAdg5tC+5h`-N{oi9Wn6 z_L_r)ytE9RNrwx2Y3MWZ);dB2Z);j?-x4Y;6~$Rzl2&351<0kt?w97-!c_X==GA#v zLt7Fk!MX6p-L_m5xHJ~&$v=c}b~Co!750)xW{$EhZv**R^OcjapJ~^LM2b9A>DWKr zBS>G+9pO$XCggp2Wi?OwV?yHAsgFx_vMxkpfd?DES+-V~@xiuxiR}=eq}b@pvFH*A zITw0Gq8$fB5F0DD$cS^rHv8V6pwJSeN+k(&b%3lM^3HIJAO(cgaE%QQ@y#poG?r!e z33^j8K-p;J9objw?$8zp(we+D6VR3lvE+7~zh<>nOtZBj()!%IoRRjb01R#Awe<~! zsXCs{gN3BM|0)74%|PiM>!5!Z^vyz6nM|VR+u(N~`55!Iqo3kmib5Up=1Sw~UDW`{(qVuRQ_EfWlPgR*A6J92Wj!d8jyKOm9C`3k9L)QRXV zkfqVN6KrjYa=dE#@wH91Q4~7ay-QbnMPb^9Jb+vh{zVXWrtvz+v!s0*k{(+#+fGS( z(UJK3GYfNa>}*lI`P>NRMt;C^t-q)gpKMra4~205K|O3-2w%e^)HH>C*s_awN1qwu zZX#}$!cq#{Sl8i38{1KwwGmxY6kU$!o0IakU2eNYqzjRar-^o9E6|6fhom!jm+m8y zQplXeMw=+;m1E?#&~^wr&p0B=b^ZIIXd0mN_CZK3%iKj=bY1yp;%Ea$oQb@7ak^)D z?YPVQ}!akldwQPW8ZCusG-_A-!4#?j9F5LxI(~x+F80Is|z+DB-po%4lWb$9`V&bYWswl z4`k*6>P;c|5m^H(N4fmm!w_i5;C|LgLE@wj%zt~ViwIuSoyZ3{?&#tMI%+p51x?G& z!L0f{0<@W1R{L!TcSZbSV<@I5lbsAJ_$FcMVHZa>)`8t3s9Zfm;JkaLy(S8*L%>4& zvHByP;P~`$?@z2hzI2;??I;v)S}$EyT`WvPn8q1)j{ptn)Tz5QC`?IA>G$U~*|QwQOmdo+dnW9`b`6!x0Y8$)07t&m?wDYgRt z36PfMBDMZ`NG=up=tT0R<0s3{$>8_?3Sn`(b9Wn|uzx4PO1Aw{q>~f2UhkeZCM44j z5OqpA=^>G5b&m{`D%fa0EKIj)#T~t@N{BkT-FS_yP?+u>p806|j7hdSMD3f!17?WI zEZ`?4%Pu_ODUlP|fL3-t!DxsVX_UE)iYU&iPTg)?e7Y!R5^piSltFY<9#SJF#4UGY+VG&*rjV%yH{c7{gB8M-IZgU#wyoXbo~) z;=V;hO+Y-aqcYztNJ}a3PNV|3K7A#Wg25}SofO$;!b}s1q#(dk-?G6z7YQ!)A3E4h z`;ail0d5o`HCp{ja>)3)$lew%64I@+u&QNqgFwU}L9bSZ>98jIuqX^?!6;U~iU6Xk zkVjH!sf0Ob+eLvb{N&U`Pliv6022ldw=evKAahZU665w2A*!W&ov-U`-xBmzp*)}G z*!i1Vf>Sg5r4rBz4)k;EcLH?t@UoTmcnEjC{zhA^uos%#y3HB{oz0Qmcl36q*ftTp zgUOn0tdzDhm1{IcEA|UgIO;6+b}BKB^h!7ny_cdS8yfVecgkN$Pg7a3rbKh2Yaq}y zbkjJ*j1j>*#~Ejll?l@qUN+|0bOHa^`{p2|&dU3okL0rcrnPb^FIR9t5|Na^;9@2s#=?|x)9YH z9qmmD>-*5h!I5dBJuiwcAp`R4+P{0&Xn-HHTLqn&eRJ~k41b#_G~uQjEi0sj57%AO z*~SZsuPbqss4yJpDIW@Z1-y{`v!s?{&xNF2*joNGgmVxVx$qxe#9_2Of2uH@khMmP zc8n0DnTXHu`(7&4v4Zl#kLcjvLf+A31$Np$y_}+wZ0oLY$2psyY_8jw$R(os2L69; zVUS4?z2i=Rwjva2-Twtm6F}0+x-Yp|1fPc4vAFdTq_ZuvT9(-m0iO%9Hf2LYW}U@3 zfwLZk%-ZJih&I0?K02dw(r66vhi7j%JaenPAV^P@H&lPlzahcsoY9%qSz#6d+>@2R zIE9sB(~DDEwp$mG;Z|&Ky`D;h7_&P|Y@mYP^gK3xX#s|kvjU9Vn^xL`sl+{z2`Urx zapVF>iYUJkg_dM+MKe)hxKP@>F5RXGI2Xpsh2`VIw4l#mYf{iT&w3SIuX7(d&UUA= zu*XErTG&?z#8Hu%dQ}9Po~v76WS9axQCa@}{MGg+L2q0?ewX+nPKZL|`rqBdK2X@( zqYGF&IOA?rC$ScV&;KCqJRM^OB%%oc-L?PLkvAp8!GOHD)f*Wq= zX1f)pz(`~XL1jDZ69}}nrMhJa-nefPBySmNm1k_h+m^Wr6`f2(4G9$b8!P@{U zC#*vHeSux41U|U^WV&cT3J*Z*)Yc2bq>MPSQm+PZXfrlx4xa?PAO1@~L%4%~cE7yE zzWf;k?@wB0tORwGuZZF`v7JRtQm7}6)V<3(wswFH>R%unO> zW4|CR=+wQlwJGfENyM}}w||D%_D$9H<1DGQ{Tbi#thS zT9~YO}v2~elrH&$Cq$lIaG`>6drB;d|e$FDDcp?^o`5lmZd-x7{K2@I8=rRGVE|(NEqGY-kgfXgf-8PcZoo& zhE$Y&B%05f%=EbZOVB4+RvvQQ9}AyUMv_Sh>1IgN=)x}%LnqoQr`r`F{QkV1C&MwG z<*P*3bf}opR$iz3g*~Fs;H(($ug?U4P-dki1-xbyx-#q|VQ<)2ytw`F%MN)q+InEQ zT_Z?27$;a^KNFxSGF0|!g;6VsbW>PH>LW*g%OkH&z-Sqnt28jVFf+kZ7e-n(ia=d* z8&$i+q4SB2Q1edtjO$3k+H?m$QM*Ewxm>!oDok4NhH{cgl;V@%Ra&-5GMS&P`aAC4(tK1`2h^WtN zge_5!((*Y}*fIg1uV|<+Y;7WhZPl{y0Q!+=-h=lSj^}37PWg(LAv;!fqku;zrGq`+ zDRie#NkYDha+JWQLcgAO+73ZyT+bLXr*8?q zz|e-}TzbjrVQxs1z-1p|h+_kk+&jAalLRS}&q{_7n(Pq~XwB7~udt^Sro7g1i!fKy zI<8830k4GuTBlFuY1<(!%+84+gxvM3UK~R|5Wm)W)=vb6Ym9bEIRY$Rg?T7~pw`Vt zm?x6=U?jghyF>Dp6)kh^DFK+#led3me=nrva(x>&c;}t&(bP3S25Bx4_M)2S23>%1}&p9496dLL4pZk3jcw!7v>bFKk`B+gehTWbOJS&cNPIWhvhK zxSbGWH1hfAKM7F?#Oo*hvNMPZQwdv+1zE^otQXB$&>K;s4Jnu?%eMwW>JP3C_zi4M zCFnDBnEgS}XJ0e7YG%(Ye?Ad0-muiFAa8s52E zciW?|7u!2V+|$B7jj4vs!XHI|_iW7`vxBMBctX#$W5V92LN36L2gvzW?@u3wtl5~; zw;z7}WYW$zcC~ClR2X5u>-THF1@|JgZ!1--w*vQgi2ptoa!b$)L9{0Y>FrQfL{0%h zKJwhMMb?Gh5Xrlhz;3A>7pBX{9$Ri-{03nf9r@}lu`>nfAX0T1c2)psZ1wDB0WwLy z?3NHwj7>XREI^+HNBMm!+BKSnZot>T%-V}w- zz=~hQ?LDEeN=hREWfHsQOlV#K?|wThSC}SaN-5h)1k{~PdD7E}AdT*FeOH?oqK;JW zZlAv0k|7(OCNW*tZr&(Ji&*%xY^VU`v@cv?_oOn8lqQgk50SatW-ktr?)njfW7Q&1OjWqgTva% z9kDP2JUSs)ROo-$(LkXMhqfRYw86gqZO`MXK)F^R?2`idrm?6i&k%)4!EBbf03rB- zcsE}41Q-jRO7^hauD5qXEdICS)*iqct2xem?<~*v?0k-0=LvWv52ELVn81ai&=VRq z5X)QLQwj1AI^`$3PsktBesi6G&jIcx`92~7Jx7Znt9f0_o=@cuV$dIp626RWhc?>- zXM^d-wVjc~d02qjW6kSj?{-3fS@=bzmoElL+j7RH+3SKd{_xzDhZn6yN!eOOp*alb z?uMkAev#hlm6IRsrPlK<$k1Ia8JE4x`;MTE7wgTy|hkT>y8xiqjL~6&196I;{)HZnJNE`TF z!t8op-E5f(dO>^+X;vf*&$&vLy`tX=!qv9&rETT2r1U*4P&8NZs>Cj2yD0jaw^!N@ zh5fge4}#Akx+!OqDLOR{=w)B~9zn0MFkV_@XAAp6pWIp-IYq6F1j6;#eyG^+V~&oJ zp0uKy_#cVxGqZE&o9r4v#+o%QKbByX8z};fy8il3mZh-I>vV*%`8CcB$>{_xxY)^* zi9m4@f#G^>y`Z;|CzO~q2{RP@;&D~5GbB!K-XcpnrO2){*DeM(1OmLueKS2bPqVRuT=?od6<38vkk&D zrDa*mEc>GX{b*f0!A?Dwuz&Adbg%*JAmH;QZSNY?n!cqJG#tz42-4dHf|QhMnGC5J7(-t>w3qAh)ONFade!lQb=c| zo;FiKTEjX`18ufoV76=vS(OMdlhb&ntreh@f!uoL+18M?FlX=l+yZ+tz<5S;Eoom2 zke*;x)}gaLS-m8(P_xkvgz}J%;nwdb=Xq1eF_LS^p9R=fxmfnG1`9eX5w(2uwMOnB zb3~)Hkx?eS2~82f8K#pRTi_=`DM;Csv)%YxDnaC{YPDT>K3SM*5Aw>@J%o{Tw3`%m zP85{j@7D&30Mnj&%mypy)yOadnp_znHAg1&*O2%aErL&P+!EhldxbU3dnfFjWX}ru zxJA}WxG}xp2aq%^>Kde~(PcY-JsIuutKF@9LN zG@c@ocOoD0t2~<~f^$*!Ye!3_dzNSnF_%eY_D2D4Hx8$A?FC`lJ(N!`+fM%>$R`)( z1!SiM>wk8?s1&e&Hbd}h0dMJvscVlf+hD6wl)^ZMeq**>6tCp=*x0f5d`Q7B2o8I6 z1N@B(yh!BGtE{^K9pezTo1K=YlNv`^VZZt|CZ316Q9t1zag9Q#eLGEX)F@ zjtqXl(~)0&QY1GNv4+jIK+v7BSS^lG%hkt9M0cwQLE;cMt{3BFaRHEBK06gJYY3_k z{}yDnqyZCrR(Bvg`JJmF(#@R6l>PPgh)4RGq}uaBt`fc;Qj~v36xU$)9>lY)NHn-( z!LH*ah4!f+{e`iVVBfn2zjN67W~~xq#~g zI~CSDe*vadfl^#+dw$?sN|yTY0S?dEmXEa)oNCyZ0& z4pDsj5J$_f|HKIxY2#FR>Gh8kh~dSzaf^K=1@EhC_F^#x_xN` zyP;%sQbl5v3A1h@Lat=2dM8+^Fh36o>(tJd#U$ixYD8o;j~X`Co>Q0#SgEpQ_m&_- zGkn`6~yxKeogM{sL{C=~k9 z`jz>%U4RPP^J{q)uxcNNfr zmKaYOy!Uf>^hxy`U_}CMN#GPMH_M7epp(HX=6Y)qWFTKEL37$#(T`GZ}3^5|Q#4)BPKA?BX~a0*@Ap1h70U9pT8G&H{Ms+9UhQ@(L+a#ozttkOze+^;9OiO z3N^MD6xm3HgX!hT2gBQfY#1Z&wq^xg%{^~GTz98WNvribaa_<z+ZGjoOAEx=N8+dF&HZkr$y3xMN;JSMNPkO+R@ zabS(Q%~DY)GJ;IF-qH5>6v3Ne@JRW#Pr-l;l}gveeY(q#R}bC;FBgUO_UhHgu8YTx zN2VRopqoW;jSl>hu8rfYTy7y7gc(7$rEK=Zau2C4GlVxHLC@u?!spnQzy$af`%HA# zlL>fw?C5AYV#$7(%DM;+I82{2+X7W71V;%?+Aytn;Y z$bYZuxc1`x2IfMd&+_Io>@Ag+D@cdo1=VPITY-=7Tt2&FJ9~&Aweva4wfR1-WBra3 zRoiW~&zl|M!JqGnZ4rU`+sb6dXPeJDXu`b{Y`XySb+ljw{ss_TusuF{S{S+6GjDd$ z%zE%ME3lu4LdEPw>2|dMqlNx;bMC=AOp}yHJoxK?Ki!p|C6R;A;7(b`&@D2RCqQyIm=0 zaNE+hRW`zx1WzM@H?)TZwXCr3{`LZj&ho{v9|Q)q*VufYgC8ymJ9fmw!_~p7$A=yQ z>d5}FpsRaon3u+H40Ls4F}u*N_8?halkO2=q4m3Rw+!6WPY>^Bcv;= z-cy(aSZgvf(kP0~dpnRIC=D1CrAU8Ri|B3wvO`l4q(3PFoyU<-g8fH;*_sIdJ(Oo( z#uE=l#}hS*TZpGksR`VjY>*Ibn!soOUWMI3A~}qT3U&!5`NX5q;=oRo5Jl6(scg58 z3Zhrlt-%-=AlZX-u9Ygvt4B8l;(IbI*eHLYR;D=Dd|VyD_8-AKa!27P!o0dh6p9xogc%qq=<-MP_C8ja4TcP{%0~Zfd^}zXay*+}ud)@s z6lB&bP|sQEA-N(jNOua-LgebB_L6`b4CFF7Q5}pEqSNJ-@LdrY9dhSPv407<{`bbe z-;~>c4A!ag)t03&jZHy`IR_u$bA_oVS2wT~0_xaK7ICe;=Akwv1eM+uZn6 zf-MlT&jhGD2`zvG`?ruAjf0qHwy)pbjmPQRbWt1+iNO5$nJ|r$FYuo$?Am>8?>=^e zpxYgXJ;t`It3=4|6V?69r$+K56&m$L^uD>D6)NniACbu=^BG}RI4|;Zi9IZBDnf=L1BMx^yxG4%4>fVp!YdQ4DeVlM0GIR^mh57 zhfK?marfG1f|`iB4R-#o2vd(<^JoXoenjy5UoE?zuC~wmMC4CI_b9=3i9q?mW78gc zQIM7oliS``1t|TlAw%p3cMx{#Vfr*1C`7J6&92^31>JI*z*RokvP7VmNH(vrPYO^> z|C{^Tn?56^{I;`g9TjvF!p90f@WHX}eNkP*#n{8B#T@%gG&c=A>qPC+L10%Uj%awr zyH*63APx6E_A6mGPnr7{TehGZbXjoZ>hV-OLn5DNn&o#j(O#uf1^?o3|_xq8Qo z?b3g+OMIk2JX3pIkgbwmEbnB)^E!JI{I5~(dRU9pFsv$KV$@Evq8iZ&;F0um6K4uzf@Tt}+ zMAuV7iP^HlBex_h20Ng%ozKj5F>#vz685bfPsqDJssSeifx$Bc-dvQSdLH1 z?=Qig@Day%t+lxd)1TnREuD#_B2Y!!7K9vZtC0H&=zJts<*+hDbRW;5NUdeVz&Ybi zNQ^?q>YW``tL#UDQh6%krK#r7s^IFh$8Hjt$~Z+&N4P#@LqwpTr#CNhVTw4`QruBo zYIph)1Bd0CZIqy!=(NcGQk&?DVq721%YhMezX+a(Ajh-2x#>z(W4Wa�={?%op? z#djbC?~qM9BVOK=glRnCZio|5rsKZ0O$2Q(nRy|*@h$+3rTWulGpxTL{m4YtXQ&U$ z{rlYtyH+Cwm2CG2Gq|C2ZjP23j~y*tgIwXSbfHx)ih_&aYl?AQKvEATw*5CwIlZdk z^vVMJ?(pt3GAqL~*%hKN+%$63R808URU%Nm%<^Yif)I_%fO(3rq|N?H0eZCFYimWJ zTgP@**>;5~j3cXd`dL;bg7d%7UbL-C?0a>4(OP>^QD50hH?o(0djvE_EFEL3TuXi| zNK>NW1bklqv@0?JiP22ESkUFyL5WXt$a;$AIx_}lZp3aC!Hvm*Jt*Dnv^-ymOxJN| zDHf#l@6)1EA07bvw8>`--n20mT4T?N!qnoj40e_WAspM==K}6l_vKoEu zNa${26LV?I&BCrR(KZLkHRxU~(##tsTH@Vp8wkk1!;)yK;vBbuE5=#FQ5-GP1M zua6;v8=Db1T)8e14pv<L0A3DvpH?-wuX52334CtqOjTw6I5;H zlJ&Akl*reM)pAOR&4y*392U}X-3HN~@MhM4@n3cCRthtHsoJ&b0x)pMac<8EU8 zLAqNN=KCU2LoWVcY`lP5?r4J220s2_(J1qHdBM>FjKli$32ilD%k^;?329MI>eB@^ z`PI+ZdLJHvhtB`{Jk$6I*_S>KrY0D8>pPA-?$?wB8x@|g2yRH;u6?`4MhVkEoZcqb zMgfg?nv|`!dz6zCMzg6!h}vb<<8KPP$(%4PoWp7Em?#t@KREXDdjS4x%;RdnKjLj3 zg`7gWMscqG;qsb!hp;+Tv&te0GZx2oVdP-3&50+)40nNT@zKoxeF%ru3A(O55Xt7X z@b_^7I}aDZKZ(E}Y$QjJE>h2sIP`N^R)*=)Z`6ly-~Jem!UCtyrM$H`N!ujuolr-9h_zw*;a8e z&XMb)2@u_a2;_}q!|}SW&}3)>TM5=fG@94GZl#WO69mO<9Bz3EGJKtdl{P1y6H6mJ zMsOE<*5_o*q_6v~Aj5!Ee6sy)qAQiSto_vvx-KTl#(Hl>Q_I_RcBg`5!@yCR{Z`-- ze41_Gqa}giBA$jM_#^M;QmCA*1gs*;z2+ucAjCBB<}e)%jgUR<3!5Ujqh|%^8EF&I z->UYBfzBGuJ3cWErXRMCJvdx*WS(I^FX(v>*tcgX>`KXqq*_lw*OSqCd~bUR(|pVn zCfH~JH~E>^+($Y5J|CSd>9pSnFra;(tFcEFcEt`EKFG2K-Dv{X)xkS_@qLiAXUc1- zW^cI&RKRyX_Fg?Ui|EYMou_JA%kgEaj)Csmtm_pmd3^LKqoK)Gudv(sxo*8ELL zNsj{C98W)4FH*l3*5vO+R8TJlpNODwD1!g+vj+iG2In};)$K|_*A~Wco(+xTgZkJG z_sLoLLx$gJNrJ4>z`#Oaxi#Vw#LCKbWP8!W_M&QzWOmfUzy`-gJ1yvHn<^2!A3Ox6 zWeE{}0appoGH(5QUoNtnJPc(ghTtqdB=HSl6k9EdD-7#JiMC$YEx>~%)+WTb z!x@kPUoO;kL?ou=Q67Qbj%S|IR+?>}__$N+V2A#ibTp)+b;|Ez?;~k<*~8rph)!>-M-g8H8XNenon?Il-7p3Al6PA_ zQE2|@x~-Pvb7R3kZ**as=?NTc0^5)Fnh5^1P&HFlI9OqaMWhaTG6AjmPC@ru<5#(i z2bWU&!Y3KQ@o(%fyYLZ`_$@{@&1?C$%PtnxHO|={wrfOjyx`WX<#`K{|G{aF%UXn6>ay3GQV$6};k;(&8~_zI@`NlhPyXZ6W(V zVU~(4w&d9Ne&dkVc%EG-~2ANjp0db>B9V6>Ek=+ES3kvmwdd;CAXC$QuCH4 z(87^;Wl6N-BDiV7LrZFA7=IXc-)~+0IT1cCi2|-0d~)?dpDv1)8@3hPVcO=47Z%z!4^WvcxBm(RTyp%BqE+39Jqjt@eJ26B<_X`!O!LnLsXDlp zVeNdG^%uqcRPU6A{9<7SU|KkuiWCeM`b1zZhYotc)+nqcuFvAG`z7Z+lDtcJK$z2X#HM$NztxZ2bUlfa8Q^pz^JiyMzX7el`x7A@;x5LDr;hu z8*QSX8!av?7`Eh{D=eCu5q(2)6!|5Cn6xv?J)B4W* zd^@7B+h`wo#4ZVeXnt%*QSg?!(e4n%70zu4TV|6)aLv+k53F!c>&mvd#;Md;VLaOy zG#bKIDFRc)jUm%s7oZ!uhRTdyz3iO4A)*_otO&>7m@utO<_FlWFtZW#FKS19R1B{1 zmkLwxz5+z~zcm@)hkwJtnFlx6Peq}BNz?NX2ks}NA8X^`odH8~fRE-Uhe+NXLRvD? zYp$~ig04mT*R!AH_-N*968go91ZiN$oL#m|VGUg6GPotN$|_swDeXl|oN06`5rvt* zXQH(zNJaSLodUmUpP&Y(ZD*E!AwV1X?r1}mnBjlY02XF{5pO0433; z)3q~gv!EM*jD+NDj&i4bg7*9p1jy_IpE*+kPoE07lDVOz*pE}Y3meRW94P3bC#G^i z`IWF=k#aW7>#GeCjm88DWkUoQkzW0JS&72#k`icK^DMtouW-jjcMEDzW(s=4XNMtp z&IAtn>MCOO#rj)UYNjYE8|dV1q->*e9;S;eOt=GaINqivhz zpYwh}w{0A*I9&Udd`wl?BcjsgNG>vvH9iNHeh_7~!yd-Un$WhepGGmt3uI(63)=F*1clrG2IP!TEU?g-;$NDFhh((}l z?epvL!fmGo8O@W=&aiva2vhMiG1Y(VK`0ppYKj1vkX^3EE6of+8pY{6Jv=4N*7#^C zDx!9(;p@kAFqsI%aLL@&?vc@1TmL%;1y6Ya-uokQKB{3AE`AO~)bSq6_(^UPA zC@#a)2!@s4^(maINbRCu#8aeT%+@YTCkJE17A^bO(${OXJ4AHFV1fnt5n+bmSWBr* zQxgO>sIhkj{S{*jo;>zA^VFZG>ZwkZUpkG zC5}R|37ME^NVik*1cBMgJ@zkOLRJLEbB2x0paj-$tgO;$TgaTh^ygQ&D7V&8xW{qw z<4Q~W`KqT|fdF&CfI5qCQClSvbqB8Pi*380`@uhn<) z^3!%kCTU!!Cdxl$rd=b7HkA@=ZE?!8UYY8oXV6icRjQYKU23wxO zTJ^OEua@|Tch{g1?ISqEPqmK(T%`wh99qZOA)sjQ4QIibvWTysp&YF+D;3mG=rOEX zzzszj>geJOeeB~=olCJV1YFte`75=L-I3)=gDD~dXhQ{QZWb5a$qG~L1b$RGmM+9{ z!rvi$@u*D`c2y-sa`B0mZHq;6rN`>ZVs(3Li%*0dD(%7R7_fGc7$1H{ILEM4f^L;Y z64}MZJlb7`%Vepd&UmMds=mD-3LTn~o59E7B_Y?Lk&=!)Da;DP*TD?^voGTX z;3z}}MsJRG7umV3{P>=yZKNoS0y-`=Hd#O`Th7b!WsV9vZmID+T%hF@CQlTav9A~b zSNoliznn+_T3Y3GHd9oV!|B=?w!()yYq5x^F!hC_uwmZxAVHBsEU9ffw{ffRCA?R^`}BroR_7rFPCzHM zS&&W)i$>WN-u00PYt3ep?hs@G2c_CcA0vOBY_X>V-G-8i$~2mZaRT=t7`WfcB?bLB zT2LmX*&lm|-?F`uvJupH*T1^OcKSG)G$=dCo)@G^=mzj?|E7<8h0i>f*KbXA#Y(q} z^KTa<>v*K;O|fJ}c!xHP@M(#Z+~ebM_yBv;p9I~y>4~XP`%su+?AWo)E|^A`HJ1QG zAw5t#zep4}@ClhyVdk<9FB6R%=@?uYqA(r81Ye~CT7n$X^|`m-DprG0`0UZ5t+vF& z_iC1%+C0}b3zA$C4;%<UNT3nxn$sVHLNmzFvqxWB;bE)=QAz~T1Q`%?Gxu#xo^BOBT9SJ->i0gH z%(B66e-Wm~yjP^!Px2hdkkrZ50<1Zi!hmMDtbX(UtaY{({yNc*WG zDcf@6Nn$N)14D%cwk}?h)cLCCTd63tfqPt0%%;1gsor5?m>1e_Cp`i96Ne*w0)zwHd zc)m}gn*-h2F6h6`fiZtLa_yqI)jbqeuwDANE0%Vjnz7O@6Qtc(&O&wn0U?)|vj;vJ zgc*Q?&$1hC5};E$6CG@LwZuO0sWaM|rpW5TuqRwud|&vir3tzBkVC~B6lRbQCriOS z=68ytt4H>5TbpC8f>hI1ilFNr0jfC=_<+Du`^TO@i&OJ-Fu40kQn0MVrxXimjy)s_ z1)wn3V!sh^ok+_LPqAEKH!6cNc=+c5(w>Z`86{Jxo8uT0V-bg$uTwQBe<+T|%4ZH| z^(H~;%EcI(wRw;p;o0GU0Arb%kc!^l_nrc|#UE>Ej5W@-%SEAQ7{|ot6;phN2o!bS z{!*j`JV=zakOu^)Q;aQG1(||cK4>@1_EE7G)JE#MA)}LnNW(TC3mnHMd#@m4AuVti zopyGo|4diYvD~~&^zX&uCIrap*TYyHS!s43bGahI`bh*_Y z@#%O`%&>@$J6*W@bX|!R3A%B3eLJp+%WRD(ewAU%-?qeYR%HJks}%N@S@eE@Om6U+ zCbiyH`>QX{De>x_-lM?P(B)&a)Ow2IYM7GC2jB@|`oSK&-DV0n=k7YMZbGjCZ&w^# zor2LODYRE94I=@Mj1xaUn;pJ{OzAkiCE$Lk!&$L^^DxjzEwhgW`B>haVgIL~>-%(c z@&maONyEMb1uM~jvmEo?bjR4m5_D@COI*p|XrMV)Qvf=#R7^V+#Z-9@Cy z+u}Fkco6%RT_sH0nPUWsxm4dG0u$F+_pD8hCk}i7&h&BpkaXUsu-kRtZm>Uj1P_xh z%3p*S!N6VkD+1b!cEG7)6$S$CuZgH%rEVI5#pa*PfI>G;&scM6-cq|*km3{46Y2P5 zA(D4_T&lKwQD`@_ET<_uY?DtyM*{1pnYK$5N=)HMJ~d*`3DT$t#xm`)nJ!LmLL4lv z7xYU&5=0LRyQ#qUdl>PRG*Re$;Z7EKq$^Y;nnn-%|4}Oz(qK0)@f_ntOqH>+ zg|V`&HbBsgPL4SB>0iN&twrK^Q2OKq+Z@NqQ?cdREUe-xx5c*R+lzv3aHTwo_sa9; zKw)Y|NBF%A5^&MMw>sa(_~!vIO1Oj1E}%(?#&l?9!%^ z&|7ei=2$dSaf}h?3OtN#@FjIDoF`+CjUKX^Mx=%8lpupMX!HmhJ(nwhRRzLZd9u=$lM;1*z7By3X!UAqFk zw=Hojkv!YuF+A$Po$kv%`{{LybjWxoo;;(iKH1(ArV|qpY_*G@0niDwu<&*lm@>Oe zB<-cTm>^SY*LeaX9D0nOh$V^O%|}VqkeA8XftVwKQri_z&h?ky(~p<93J3N?s`dr} zhKR!o4<93iT-OekQPqP!ma0;-Eh@wS<#9MFQJ66aUT!e^D5x7_*~YHpDuz$W?9!#s z=^SFEiFU03`7w2B*DLI5O4f6251$aeS+?CI;2Bf_JAuPiy-gIADi3cAZpeu~2i(RA zOYw0u^ghj)AY+CD+^potv`l+k1h*B4ZO)f?R-A&ws8m80$ICcXvBUNW`mtEUUGfEC zMhZJ@Xdb*Bk40l11BJrW$l680A68zw2=1#Ch?z^gKw7`YM0d;7TTr!2mO;{g{%uD0CGjKc?TUr=UJdMXQe%Rbz1IXc3M;L}WBO^4Hs~3Ny@yt6+uQFW~x`9l|+d zvL%Z`aVOVr=~&%rs|B@>>7rg2JRQ~9MiKp2^s9DMj>qY*HOfZs8-;-cZolQEcm3dX zd2(3%0Fh`#LUJ3IKmqF1Dn-J0lZPE;%R5##ScOlTg0lqMCg3MWHlgNN^;al+_3mPS z_zDHaClmNjQRx3B=;&YM1(OG&jv|SD0MVCS+q7=3!jRbhxdfVx9^EuweW#lCf8M1`bhDFNw-4@HV?9b-MdU*&oc8}p1V#_5)9~=4Hd>fI1i}@oE!#(velV`9Y=?00DMO3G z-VkCSGEiywQen3$q@+n7zP<{IfAy4#FdlL?1|r$(El*L@f*-EV4W%~M!)bYZJ#qLX z$k3#>HAQW=fPQTije$mHp|$yF1`Amq0sXAzl-WB9yOqEbe6pPo_G>;NNqUQR&T3y; z_58s6P_dmWihc#UhhONU>8_kZvTT3qqo;+r^^(+Emrbo zTuFDlJj3RTO55S~OtB>bfZhO_qG=VXtq3YXW5Z> z;$WO>>}zX@ca2MopaE%TiQvi`g2Xl7gs+R@hI;}!;&y&KMKaIWNjY}8C^U(&NlBP$ zR}1QQ!wY|3AJs9lxpU22yH(JCLhAJDF8+)?C@TF!PJXv#dXUk!F=F`wG@oh4O34Bt zF-X#Fk%H8b!@jXxVdg)u)WQ7mRJK+m*9*knDq-ZV_j&IhVND8BT`IRd``l-y?YPj9 z#T@(TI^yZhiM=b;=IaDWj+bO)GJPBj!&WSQ{Z5cQn=fIrSnN_Fv?qL;K)JZW)(g_q z)2o)*|M~De$VClU@5+5F%tky(fI7Ws+lnr@Wxh-nY!!4n2N5*bO*On-bvF&F5)RRhv}Yt89%RJrpy~o)ch2@L8-4?m91uz*x2y zRRp`mBD=H{ivKO7T7i%oYo3QrHRanXQ7HaoQENv_q16iN!^T}iHrDVLgt-f!CWajZ zZnMV(wVO9o28?a~&W|ADOx84?h`D8YB&+v$JP}6puoRVIPl&{#dTW`@kEhQ<{GXdiu_*rY+fWs2s0|ud zYkNg!#IO*F;JgQchQcucihHNhIuumD%c_Dl2FgSK7Ll=`O?Zs6b2b859H*9-+4%~) zZRfslHdj;xpoWV^aW_3|lNEI1%m*+R!_-wib7#pa`?ta@5uQSjC`h*}%D}FkX=3FG zGOGz`so`urN)Hi%9!$w)Kb$P!ijLK+it)U-Af9~`&tkPcLHoKD?dzJ_*DbfVMPaxP zmU3|4s~d;4`#iitp%`HQ^Lgkg-<0W?r0ubZJaqP0YrR$YFy1KaRfXNu@dSX5VAPI@ zY@-?DZRwn2_HxXoOYlNAS(POPyK?ZT(k|`1Uo6p95 zU!Fg{^e_;f+hyM>Cn<}8Dg%Zwe{1X#5#6{aI8&ULiQ+07j~z3+R+t*NrY^R=9vrSh z$8&^$o8VskdfUBmI0@!YSkNup(@kqmuiJ#**3+{VSi0y;L3{o@EB9Gsa(_;-{a(<0 z3YY@U4A1$r58eE*wfS(*9)0X3g_$lcnt@L-?uZ|V~ z0t3a_yw;lHv17F5*&ZJYBdpr~D4=g*cq)9uH-+5d8zgIJoLm1-q^@ZR>|=c<3hjQc zX1SeL>0*QZ5Lu`-_FYljR3svQ|ARQZlk@jAf>ifXNgf}oU-;Yst6SbFY=BSBfp5mk zb8VbY5G<4?ITg3fp0v(6rn5P}iAHuH3 zShUR6SN{W%=&>`)t1IG%qR``x9eeGOt%T_T<~&&zAtBeCUIY8~v`2kxa>Bmdc%T;Y zZ!qZ7yLUT%M}jZ?*S8VNNa5CzGohU)=!!y*Hp@l`(=uo31$INNC5Yta!hb4~;+e)^ zvc@t!M%X4rz9D7{vl24!6k)X6c;$ zeb#V@7p_ABt{Hql#oxSVI~2O2Cjj{W4+v5#qIp~G5dqgPnb@bWu zM0zg2q=y7(5OZPkd_*`JNwBr?a?*0)A~pNS{iQ6>7X{q!DaudgiE0N=m(P%#teT7ds+{TRM4NL)fP&6yhX5JhS z4z%j-96%08f|LmFHaS-(iWU)Q*Rj?`_J+bbMKr7m4j=q2u)mAw7EoYfq{KSnlnM9{ zVG*5Q>(T_W0!R!L+7CrxFtBfcAEOI}x)v$cg|7ColhIrc*=Ql!bXV>ar4cfP264DWxy7AokDFUiQmCu!?h?@M7b%L+{HZ4g1LLoRCS1z2xl90V$X z%dJx+dM+zr7t|5v48VhkTo<+kUJsxk+=1Yd|T>MC(Ci=MjxlE3DU98dB`UvR#s}S z3A&abnS#64IOffw*1pj|JVTs|eCj}jSqzvllb88rHd+K4*)en5$>sCyVL=vFVAjIs z3AvK*3nP=T%O_?ClJX|og^fhhqiN!L-YCGL2|bo235&o8%xyjazu1T+3Fwz7PoroE z2r{xqc3Q5FK?UjbnhiGHLmXR3S>;JV8j*w;4N9q2?xShMB*cws1ZhM#iZI6W0`Auo z{8;ksa6EN#daAVx1}FIy0Uxy{R<^_LXmUg`t`9BMCxl=2P!VY^)eIRl)P@T(FbJ-) z{frS}rFK7t#Wrt3KG1$Ex)zXs11q<5Q5gIXC-oeKU6JXzDYi&3C{nKs&vZrRFMBtC znQaktLy!6x+iF)dhKHty?N5RfJ8{qudtG5F&DdAFe|^rr+H7l&Cmsq9<-%qXQ19t= zb51XBu!}uJPeVJ>ims^8R!^GYh z!V|cl?yFMzK$NIh?jW7^$9q2I>D7z$xjH3Cb%Bmnt6KN99Z<9kd=_kDV)Z{+6a76_b1o!iUoZT>(=4XHTjS#qElE5@JJXU~-y7Wt1lk8*I-j zOmkq8wSFe{p4Jm;zjLX>o{^x#5UDnL`( z^LGUX3+ipRC~k5B*^bFF8~CbE#^l5*X2R95m?$(MkP@l4_XJ%N`0`}idAq^xTP0C| z;QIPQ(dbSZhkIX~M)D-P7zu2(-M3~Mn5R!>#v0lEk3Fx+`+8rJx zhNmN^bzeO5D`<$?HeqHA5ynib5@3FipDdRIr7T}sr--gE*buQN9$#9~4Q0W! zvcjfyC2z#6+dSSSpA^9lFP%V8=9nmMk;{i}WHcmDa?4J!pg85pPz}&W133^`PJbU=nWquM=E?$KM?c{LNJ>*sXo8&rbH5% z)v0!eDE@aSyC01VQiClNl_nn96WGLFD#+aO<=ZZoGFv^S1=Fy0X@`7rm^5>6BKyrh zxZIM+sjRV|3DS(CMJtXLZ8=(0Z~aBlDQ!0*8>@9;vNDnAAuqaVwpDakTwZh=JivwM=&`Tx=KZcbOUcSb4ei5hQ)!8$q{2EASK&uvWz+TO^7;qhbvg2dv@Q zo zu_&&-5xmBK63>DHt|ZPr6@?ZALjV8usqt?x16JslK8}~pTs!+k61vrw8DbL%Fpa~A z6c@1@6srTm<~^~^YuxeFo)FOul0-X_!qaSKoP2U53xTEkt+Ts^kOsGXbq13EBq-q$@;WI9{HK zX{%QZqzU`$fJPtBieltVu5hLy7W(^pi$eD!^u|3 zjj6woYnl@ew0lHx3k2?6rc^~Ii9~_1h823c$@3-X5Q$4$A$l4~3?tSEWR*2-vPQnR6 z*be$E{P?idWZyhMy#MYg^KM~5ov*zR0{XVWMS9AgU{-)ZjXz?h#ua- z0jAm(`ci^>60SvN%AIhVPeZ$h@*_JW=-LdYWD^22yX+gUKx0_;?_uE%70?)0ueHev zQ+}{TRauH4^FvXi@3m|nse&UpHb;;`riBn3Ug1*)il5dj9QYAEU574xm1AcJwVVC~U@S%F!#S(fODq(R$JFJrJW=7>mJ+t+Wjg$f5Og%$}v zZf*&*o~vz{=F)|oT9%Nhhy)aX#W;u8ls%5^!@$* zYhXs<@?ZS&H$BXRXu|s?%j{Bx-HJ^}MxB}aSkE|VaNF*@w%uvgMSg{Efms8rw+2lfS{_L?9=gb$}Bh?~k3!gt?*L`&YT!K~Z}0jdgAbSo^| z$Kj4G)fNlTf&Ha9XuXvQxoX9g<%GLM6m9SI1>yad#Q zKc%M>rELJ=7oAv;-#+6_NNzo(COm5277TuzPj}l`v>%B`tEXndXt+XvmZ4h3b@WCd zy5EU#<82DNiGi;fNyjWk)VsBT=ww3XS@g05+3ai&;V9OuYvVUw`y8|)@r2-=IH(8vi%_NIbvU|ud_ zPbfmV=Tp&K`N?_N_HRMP@zneU_RS-NU0Hz+Wxbsz3hhmcP}TJwOo?y}>nT7nj?ZO% z1(_VgVR$-yP{kaa@8PCg1k)EGQZ2=@g_V2``q|X9~Mbg0Ez-aQ_?6o*BUinSCh&eLJ+6 zOYOJc0=O2x*SP5P8g#Qnp}6sb#@GOb=^ejmo+bk1(%#7dDRa6FR6IrP-^m-y-2$#| zIs+>G_w1QNnEw%S8XQPlESeMpd=mz4Qn!i~bhJlk+&IwhOx%$;PB%vVAB5We0`rvi}LXhOoTxXy+)ZTwVxP*#&6Lt=%T5E+pHyu5t=r zsb_>ASwaM{#j!RtoG4vW0jg6EcYcIll$|8vj%omY1 zI)h{UxaV4hNUj^X91yHV*iV-B=z4oW6vikil9gwD-T_c7=J`72%<9;?^;pYX>nozm zGa@sZ#XV+-D73C)5k8m{rc!u}I|Ap2wf2rkOp|-QfZz7H2-JaaX``L}cYsz$phaTm zdzci?PLg1QV9#rWh*EaQvkLAw6um6aU+xN%8p307* zI+0@k7L{pE8*KkokSb$aiY2kPCExnTps3ra3bz&5M*d373R$aYw>2yVe3bsn%!R@fd9 zHIGZ?*q;@4V>m?Y{{zCb7n>QSm;`&vC(UKwJ+IwG#VRUd6`a)sT{~GFT&9M_38N!2 z!jmfkO_SLndqRLwy8kXKRggN4wN%P={JS2~p^=dk`}VsoHdeV=mhLa=f}7W)TWgnl zc%T}OMLTS$ApOIFQLS_akRqOKcZx{8Y{Rv7kANTI^}0pD>d5d39?eDBBUgad9DFty zv05L;HrHso1vn7VG-e#h$*KEyF+xw3a_8ZcAo&%g)?+0mh?cj zr)U85DmKri`wY|al9O$Y0Id&{43PBmaSZcp+bBRw`t<5&|MB5m)Hwg=^Bmi|_C&>Q zyQ0I@NewzCv26?#g*gp;ST^c+kF@2jBD!_T8IiArFh5)P(rxvY9gm$Bh9#9NOuKl` zl_1CCf;8o!-VfVch3W7#86w!=L5?^^fApCSu4S{BV;}oW>8NI%*-0iQbpI^7Q9+94 zXqk*u%3XrAOUDrIYL@Jyjh|Y2u-z2tM@58D@r?dFSP#(QbphE z?408SX)Z$8cD}+|9~;*nE!bh#ddO&{=Ox?qf-1fRS2#ljD4w{x#@PrTH8B?z(ccKT zb;P%1YGk^FL~-NEJuA}|2>X@JS*2l5@HSfjK-kMK>TQHQaWf>D=%LTs(`_i`NNi4en+_ZI_U?qJU9+xQ?fxYTNH~Fa)>V zcAK5_Io{1*_1reI_gwzTnA6O*ON8h%Di%0W#}kIa0+}+yp5J1BQYtqCQqMZ0QGlZ=CX8Wx$6CXUS7Yev)Q;^b_>meQ;xsus@4`UU3 zWm(`^L3;8EY-qk(HMXxSULw*3_NpMGjHNl0O;S(0dc=jb-wC*SrgOc0LeRA-ny_!Z zEfVG@HR*0!=d&e+QIULIz~2CMvhWt7?IY1C?r70ASwQIVzJr=d9tD#GwGevuwp1U* z^}F=VjW$QnP1V8mT!R~|QWPDJWcLXZbL{0!BGH<(Nw!--28&&OyB!vAofv~w`}aO@ z@YwEEC#tIK7lQ7a!8=EdM3Zf>XjI2Wh>08Q>(~%cXj^;!ESsb-zvRIEC9Vr`kL%w0 zTP)9KWF~nHlgiIuM4|G|>ZdzbY_gXH{THfP>TG;0-}6Z%?TlRDnIF2UIx1FL4~6MR z;28EE0sSVqWQS~m5N!->W>(4>LxxX`ue}_b<)hzUw(``L28=UUttf8t0{x~sVTz-z zSN64LA=;Ui%}$+d#|0(Nur6Pg*|koWx3AmPzHT-m<@S+c8I5DR%j~?52-C!{49#6B zKt$*ARvX|Wuo#tN;{{yhOzARvFpjV7-TxQ%8)2%)b4us(?e<$irt-o2EkQvhI{0?g zF!!zYlt@~zZJW@{TPVap94)}2>guC~OKg`Y?i7jh1}2dgux$I2sBRgIgo*fyAnk_F znrWX3@Dt(Z5xAAG8$X7iTD@s=UK8&SNgw#S6|uVdK=E&d7&l`?cjfl#+sA$%hofO} z9ToIqta>SS#=>O$BJf^e|4}?O>nO#qV5Xh(4+w6l%Y$F@LOU-`ky;2{xy)0J7L^8% zPdn{8Q5g9CeQvga3e&SJoxVp1Fz{^^c>m?)>sev9*TAGlX&0dp+bE)23X{+u5TXU` z`D@+EwpL#VOC|8LzTc++vl!FZ;{WbbNccj}2bX+884L>CQ}N@k_K*yN62mr8(DgB; ztx2MGb9@5bbGgl~@$kgP){Z48lnK(i8}Xs`p%3@(HNbxGPw4cNwNYp{3b;aXot(u7 zBUKc}mReD>=YI8s2($`&9PM*41LET)cOfYzn=b<8;ms_^mI!DqH>|d0J|iW`_F|P! zm#+7wY9DvBuoi<~_M)I`2H(ncZ?3jO@yru>V*j6qfo#JT`@yHAXY4ak@r>Hfg#4#~ zXG~dHsk6&Oq{v`BU_5%KT`mf3o4S9orTZ-H`AdWE5<*8UmMfzBy#=zJ^MvU+x=a#B zX^Thq8o=B00bx3h@m3zJ1SovO*t_lQ|8o&(av*Sp2a#lWQey;^YQhM+M`1V66QrtR zNg~iG_&1!Oqe5;-cx#T8!|@h{=E$%GZ(`4S7;D(3m#DIM`ZRb=oE)DM#dYVv8g3@Dfs&}^Uha#PT;`BGumWBSP9;(~v}fp)+CP1Y_Zk*UsO~>q85){3yZ;k(?ZNjU0wQPq8v+hT zU;&AX!2OZYXMeQw6v5aztJ{|g`8n!h_}1H%qB8Gyjj}!p()%tQ@MeVN?N$+4*$6bV z84A!hr_^H)_&6@6>+BI9he)}+|AqyrJCdX0@mwMO0ws4-7^^I|XMKDKNtuvc_#YR~ zpdqdq$x;*s6YA{j|3ak4ck6-a z#~#Snk6ka|mRBa0QHlh8?xru%eChOe946<73{KWzJx-{Qg{%K*bPD|S=y+L z*2_agJLQ`|_|=xIlMLYmiWzySNFpx^kD}^bW=Ll|pzZCk)2X3lhB*Wkgq=)Mh79HpNw&wsCegU#zoIw#+X71ppvq77Se4+$~H)T66XnEfZ}6X@AI%a#dI zDhaOTR#zs-xSXnMwRVLWFcdcv?3jRCVjOWPD@SM5_eIs3f}_qEt;YT*f@TJ3K~y`= zJPYWSi%t#=d+aPx+;1;xCO~bHe*3YH!174Q#`=7*%}v1z{3IIdN8qkulB<&>oB$9x+p68(k~Cr1lweRi4?*c}b9ABUQn_((K!axo1q0}73lsduE-aK4}@^~|E{;dPWt1-=Fw%;|Lb2T)FM*#i-fV@b z?$_7YVxN~w(X%Kzye#O3Z!(@}bM3Ejf;{46TkmhW$^sF$QXA%>mOXy7Qauc~KjpSi z&=nVj^BlD$B4{y4&IZfn_=OdPwp{bDZBme>kP^Y(pfw3GhM8^kQTu}cgB{zt#lG_` z!mhg+Z95$#s#m`O_A?))?>74MLj>Kg2G;D3fIT~0G&dWl6k(jzMvCGZi5wjV$9qMf zX84a$i+YgEQ@%YVz{fJ^JeKCRV-x9v`~EFn6FT$$V#P8W1G7sX*E%Vs#N$m`=Ev&binRj!g;*eXF< z9pV|o$^_U_;4Sl_!gf(>fAYzhDV~h&4MEz}dtfg+tgsssu{_}5{=+9m&Pxol&*FLc zG}zb9BRzf4uBxw_oh<@|9bdM|zN0Wbh;3ddLvTM8qLL^NtngqMMl zeIYj|C|&op)4~kbTZ<7sz2SU-D{au=A$FgKvCZ3J<@4=94|(tl!8QvCx)YO3yd!+W zD{MfdI9)s~O(|Smthv3GA?$k1?@!vhQBfGM{aZ6_mBO@>r&;u*w+d;AebuV}n?A|d z2;WLOB}8ix{Lydf13|4Q%qDOu*39xU?W9vy$e)q5xNtF!N_Xc zo^2TdZfa22oC-f1?>VAT{X4|DvZEn*aS4v%KPFX(_Rb0(tx-p- zwXAqzyz^k5%2viH!AtFITO|rz8IHv-B+R!6Qg_GGg;uSwYc&>h7TL=_?%2+`_K^=y z7&XZLsW81kz%JL${VssjkQ9+C=i7xSy`!wWW8HikB1l(z^y=Az!`?ufB7%;Dy5*rq z=sb7Lw!ip%@Go)wY2W)E$(ZWk=mJCI&|JGzBtOUr*^eTSH&GPoI=yVp>B1Td`vfw+ zvZ+W)+>eTajSBbHgOQTTK8_qbU8j6JR1(z7S0h z9D>x@NzlB0!-!mMFn>5 z4+y)dMENz{E)XVRl);xi+5rLzg5cbKh#aRKV>4fjN1`7oi3bsgxZ?FO{=>nxjPp zM+?inKh07R>E16n5Iw7~Yex=Vt=hI`Ta`$zYH6P8X2b`sT2%Mry)&>f{IM^kqiVMG zH6p1MWA$^8a2P2_R}t#uS!1-2|J@>=@Ku|CQxrv)IaA`s*n>k$F;LMW;Mz4PDJeIE z5bHsaTnom9@z81=B2eq72<*B{`^e|NS$u>`FC;Elc2d(EBt%L+si$y1c9)=Ac5)ca z1uk0@8nSOa?-@}6#^*6!T%S>xa-wOnvGI2wb81Poy{9l`c5d3peOPKjo; zZA!Ny(HI2mrB!ovo@sMMpc&~QOm>E?M9{5&Tpe*vkK+j#!?j()z7;yJW5nJLiALv9 z?#Z?f1l&;d#sbc{KL%2tEQ|4Wz0`V$z~JCp(0-+`|M|*#+zuNeikr3xXj`W9quUry z6cdx`87mXTPvrg*WcOM`aE%VQVu#{5+`eO@y)Vq~mgx&_kmBOUnpz#*btX7;?chQD zL#3rNZB1EOcJWWT3l46|KNqI)ovl@NtHN|NDcn{MQ*5pv`5uh0*)5Fci>9)}tQU3- zka|1pXoNBOi0l`gE%3-j`#?d8lZm1EjrN6yxlRJ~M?WQ&ie=EK7iO7@3mMw$)a5MZHv#Xa@z{=ncmSi=Bw$EEy9z|03eu za3g@oo6Ao`a8sOwV1HjhI+2Ha3I#Q}1qF7$!c>oE?49OQDb8XV%LW2(Sb? zHZ*iBnscbs4%S_8OeqhR3SvYFi>z5Z##U#UVR4IeuZfxXWDvAr9JN> z(G9Y1{>@KBnm4q`VbAaP8^pMX*JF>aYqzbxmosb*hEI(TWMr?El z|0W9sg5Swv+o-U&6ck*R1Fn;tc(>B_R>ivMNP+9~ClBQ-kFWayLDvvYdT6QVhV8FD z3GT{dR_Tn3NkYA85k}{m0@Rzu`RZp1yI~0S>;${UC+)Axa1Q~u(Ni=|uY)yxtEjF* zHpj`?lJp)b8cjU5bAIQtnRcfjb@b}j*Pc|EQDE8X1-{M}h~O&COo-xC)|UF*jN4FN ze=6b$s5TW1sdq$Cc{dNV58{dMgvI@tAd5SR*Fv0Y+r>Wz`|qL{)`4cQT`MXZZXjB@ zDR7Ck!)_4K{VsI>wi|`rI8P13Ll9)tjuddWyDJ_ySv;;d9F(474++w~@U*Z!?4vL& zf|!~W2zG5vY)a0xaIMV|-Hk#tlqAbY3q;}MfaZhzxvgY3<*zLinN~*jmtwFpYTJZ; z2gTM2w1w>tqVgNPcci_ppzDTyIUji#ctb{HMv$dE4jrwq3`An?Mc%x`iTA9VbRUy0*w??gNXaL}8|0rW5c7oo96- zxSCLgm~MaYd71b=J$l<)K0R|< zs~NE(K{vE4I(|bo&nIZxg7ALC76`g#WQN;nZJCdzUi^I7dO>w_=hoQH-GK?6VtY}zK zbHXS}MC@T7JD839k$CPyD``C%v^GODdOJ<8zr_N~ptz|o&xNB}WeY`eJsu-D`*d6B zbG-cY%P?foMV+f#iLSaKId{$(7g9&F0!xta7Wcv`+>r4q$Y&YC);(x zZX>)C4j6nDn=6v5TZj4;Gwo?#K)*gcZJm$B&ZRSWX_cb5<%uf4RSEL@KutVWQ-_nn z;O~4pE;=0*s`4~nmY1J?2n?*e0VXl z4!791u5`&6+Sg~>_k}1meF`dlKNN5S6I{4!?Ltv#04`i@un)hxY=zwwPl-^&qBmM> zfhhc(ADwDz6{JF*2s@S(+m?7*@gVu?)Ws=nyJ0;cnkfp+$QB2)p(}q``N@@)w!_Ee zaDBH<0UDHpFxr_{5q2AuJmJAI4xWqce9@T*I*fWF|If6GMe;vmt;m7GZc2y3Y5Zjz z4jMev?(^_iYw>Z8RjGnn%L%ydoh#s1X5jEf4mQ__NUPqfFSWHkeEH!8R<5wC7D0=2 zs}^QG=w};9LVPX)BOcqiCbn}2@40sF)!kX{4<|(I9$|`Z$0Ygv3cLBi5Hx~W_8UNq+qzA0T$MKeq^%EWt8bzT0r&s37AnOi6HKVCKuynJ^c8kKI z5^FqUZ~5d>=-?!C!!<-QAUGPzw4MT5Ue%m*w%H&--6r=2uOQkDq+eGivTHg5OJUAj z-?{(ozKUD~?p)2gktTJ02(~pt!j`>fAV3H!gh0ZsCbYG+2r6o|)f*uUfwo$$ zqpkhFKj)lq`+L3qzBHcCbN2Y2d(Ly7A>>DMxEz;`OqC#SBu}y2D?C120S-1+AdR=z9p--CRdkQ?+~?rb;O1$9ELj{ zAzBd`#rw=^0l)v@8&~c+Y_CWf@x}^w{-11z1BJf2m-KRbQxs~K z1{~@uZ>JJq*CijH@9(x3QjM!w-cON`_@S>B!`L(JJ^}K}I|v*8gMu_LWB=Y5o*4oX zgOdB9ke{BsXeO_o_GeKTY<%0Jms4VPqc4o+}i@ zeiUOEyL~R?9@h8XzP*RrzXbJr1(Q==Hu3el*8VMG`!~u{@3}XDHH4BbI$wak9I0qH zQohPV4WD{GU*;(;RsyE2ZDNyMsj#1$`>~LV@I}b2hiIlaO^}iiaE)450jljEMN9*| z)P5l?FGQ#}T9zQ4;asa}C_nV9jTMQix+8E?t}sQ(uY@fY@IAoFWXyI5`%%S2S`LLj zCkowQ6o;S?dRUN#Y7PqJ<+n9eLZXO8^?@)|b?$nbef?&iSBC4@(k>MAJc!nc`A#!w zZgB5>ouZf=z2`;khaul6uH1B}yPzAos$}(UxkGjok|%hAi`ugxIZdR|woZ@^j)-AO zs4?U{Qa;H!tAECtQmNBA-fp`E!|te+jmfI%b~qG)0_{S3CjeNf8FnTBjCtqVM*(EV z5Ffbd7GG^Hd|!(&Rs4u#b2tW#r$1^ql8{wI@aKtD@6QovcJq z2Ml>~M&QpYk1V&np(GjsFNH_!mqeiE{sl$(_!kJJ99yvWc=b*X{bIzKF9K;DNz5P~ z-MiGT3t>sI-L5c|J6)U2HcZfG%hT1nT$rxLqWhn*k3(8okk+M#eJ1FKZ7`=)&b8O4 zd5RPJSu0m5w*(4VlJz?zAKOI~eLe^P^ir77I$V9Yiuax^Hc$jw6Tzq*d%)oT%3Y1+ z!)luq5>Q_6o`Y?UAU}X3?zIL5{m75V(&xBS1iu*j@>7)$L%+~o716g@rhc|ewAVyo z6cNy8qxn3bo=&s*KdIDrV`hXqPlrxt^Ct=&tH)9N;n}@f!|L#<7&Ts8F%zjQwBy*8 z^MHj*Sw5HI!Vl(6T~WWTaN` zX-G!|-Yya#&xv`@*mWU-7sg#SR)G280c#7JD<10EFV=+AZIi2PT}WLR!`j+50Y9R+ zRz((dp9p?MX@*ztvljvdUO*u_*c^6V4JBlh;4a`_sS=n&VlVGX33U91j&_BD{`2D@ zd9Mu?_NS!7jk0{<-b9Cs>W8NnukMee5;?ZHY(7No(Ns~{%}wbR74}zCo$fjANH|FH zYo&^z2gx;a-OKoLoA}>(*A>`&VOluwVcV!6!#EC|m^~qUba$EkT4Bbzxrrau--d{x zef!u~y7`Dhb5LgZx`4j#bZ4mNB0&aIV&1H99OJbU^im`dpAI50Ai2EOalEjO0S}Gx zZp4Z1MKYKK#L@LC71tC73ZgQKnyn4 z1>M2E_S5wnJ@nBTaqbC&QqkF*BNhtMrH-9$vnqw@L}5`*+*S&NcZlHVXpvJnt+Ta? zqU)01OSgR?S2PE1?EwLbb21hoe0o=deW0*!R3SXo&jgtpI2h09bZwXPfI!uebZ!?N z1$^Rk6rH-IV1L+oIf4wQdm3qSf7e)fNF!65NYYFd^dC}hT&qN_K?FYv@Q@qrcf!;M zb7{4`5;BV!&C}C4w-cu&tH;NUI1sYSMDfKHOMl*ah`^e?n*)S`zOlXV0rYst%mx-| zUW@%VTNVsM?q3j= z#@KkK-6#V0E&2ji7n!a|0*ey%W08HO#m(DdXf0W;NK`{g_YOJ1ibSEBJXuWtxd3gt znaABU+Z<4q(?ZxPgjAOEt+ej&HuwMSOcKU;><=MnEL(Q;4hi|q9br<`%3+7FuiC4k zhwGl?>E+E~kG&c4jx3U&@IMJLO?YHzDNES90Uq5sQ3kHg{}HinJtA5@g}r2)2=vkM zOdFjh&?J%QL}nB=n@ty@e+QSdk*pM8S_;uWio@AkCro9mr(lq;`jMRaa;IMDeRK?H0XNjue5e;l`+=)d*-S8k68TZ=H~Dtu($CE&Z)y-PzvHNu+DXOuo=H5Ax0j3Vf_ z^&QF8xCN~IoYe?3 z@?)^;^IQnGPFZHpE3AR{HZtm1m38)45ozyKB5(o#{Doo>hdYxVfRgszO+M^2rcxTe8+J>F0|`w3KD@n)^*bzg&6Q za~oQ}kU|bid16=-AdTn)t9N;SpT8g0{opff5)8XD?@u~;@uPPjBqC4uThi=9L4C)C zg~%>_BIH{gkz(QJAr@Ih?q25)AXe*RJ4XjC!}~;_&y;m9vQZBD+(URIvN4MG_+I=~ z+PIKkV^*?%agIGD0)uk0db0gSVMe!4JQsmh0lLmvi-~ZD$Tvjtn}ah}Imb?l;@6x! zEa4;}V%H3W=6^Yn$H3E!Opw9L64|$8SRCsKeT1nDyUr7=SRnKQ3py2o^wK@4NYQetCSCB;}8xUxH z$};+NyGul>Lplo=4Xm7dLngkXa-JKa0b(o!aWRA#Kljk83^%ot}DAv7(BS{cjx8Mw`7z1!G!MbHPCW#soEph)u;u4V^? zSav<(TAm2u?p-_E$q+uidZV3Dm=3uwZ6N|B9R@-0y*M}#rnbX$KOei)mZnHbl+CS9 z3KPLes>*&Y;HPUyJO}TtHa$dR6${q^0=`gQ%rk7YFtyUVQP?>}GfxBx?v&QqK1n5Z zhAsaY;*y&x?W==HM|Y(P4I5s_ujr9T09+m-F_3_ARtF)9Y@IsWZVUvhB-Xm+J;6af;jY*G;-;rKfZcwL z6)6R4V#rTzOel(yosLbJr}#FXsa#=|O5nbg3io>9EacA-kzUHQDf-6uf&AF&#*6m$ z9isR-$+T$#zQD9DX|_U81#-+vkyZVa3npK;h&+5Y$OuCGlVi|(W_eyBmXZDK7?3voedZEvpp`}6vkvE?selfL5NQCO3!iICJTn2 z0KL+CD=>3JrBRqZTx)X$D7`Djs~3da#~1EwTUBq{LOLY2;`2IuAs72VVc)?7I^_1TuphzRNNsT<|I+~mmc89K zMW9Pg}`ep(is%ZvU7(4-JsVc*P|F(YgdRu0$w90zPZ!p2r{6fi}?PR1~3w3 zPpc7Nc8WQS#H?0G1?jT9O^fHe;ZXdD9bSF7-fjvQ?|d*OBX*B4B^StB)}sP}DcNpq z;|b9iuAUkARFSM#Tm&`Ed9<}j0VY>Zw`WrxB{ztyn6I7L4Lt#~!vX)F6o{ey>^t=@GwY1(G=4FU&{(a=j1wVZ9sn z!fzB4x+9;3GTZfA(KJF*apDmk-?D+iyh^YL(S%P$ajL}bNQ2kXVAvP2llF#V8(eya zmxw}*xHdadHrH+x3?~LO&^Q|45igYqf$nrhaHc5Xqp5Nao?H1{byPnpaK+gT3% zo}OR#-nI?4AVop%6jfVWD2ng$D40G=Q&5(4pBMCNePq0d&*N1Q0z1#!%0uT&FI3hR za&ipJD~Z`jLDm)aXz=-UT8LFLa$F0xryulD@~50*KMRP%5%;p62ZRI<%*07TzDqf= zbXiWEC5mruA?9c{2>R9e#|fO2wg~I+P_eYNVy1hTns1FFQaQ$tWA;)gV3f2*5_ULM zz@P|kO=pC|Z#}x^T6<`{G$iGXeW6`2!k3hrj+k+00mjC?AxLMWs|d91wP&0Mf-QpD z2aw)q4Pg|C?JptOndy_xR4lcR1ZmxY<=n?V74RL-h@#hq(BV%#CAp^FaY^UdMWRp* zXOngK9s5B$T!KE+BD-0T)%@yCo1mZ;It*dBLN-O1np(=($)|^GtlE1Rhh>6(b8;33 z=d@O=v=t(1t;;qn8Wd}6r6_(qAePF#v0enGnPPIr*lR+5c6uTG^^TyQ9h~lFnLXsA z`6`;QmU~5j?`_?Ex_*iMFu*sq@Z3M%?h^DvAzN1boQ??js(;_1mZLC(l%e&PCqRF2 znuw^Sl?eJ$T!Y35`MInlEAIl;;C`kWDX7S$sx&YJNQh^1n-!6{Xdm*vxi1VoU zQGn9_I;6j^ZMXi+Mq-?#1 zfdjKmdbj>uz%J7(BxJqyNB4G=AYDlyQec>@OFqZHo;CSecd9Ema(dRW+Bg=kuAjESEnpcZ^GDHL&-Rd?w zrB8kuCfG<3=~J@0QbH2>0aDQ*R3m=To<*9xP7a%^)=Uu@e15pFHX@+;?$O2GQP@xL z!6i^)KZ2zKgxdl&2e_}LWw)NBlki9%uAv&k12>#Cn!qKdLymysnheY%ve6Z@^40~Oe z!O7#~ee)A%Gq^T#4Mn_CM7l97n#hQ%+9qZg=s=p`KU<*fFKS2wgvnsqMXPt{9F;jWa?>=--Slol1hby4ZN#Ai@>Ht z2Nz2Af_wf6ku+mc%G)igt`Cod;+FtVC>fS6Oe38`fHI2<(kR4{V77)hHkvi)s<#L- z)Q*)Bgqg*<{k7=&#JY5|!wUPRJ6DEwB19gaH`D&2FhAt?J!Bs%=z9cf&qqOLMI+t?EU~xR82iLXP-p{>Azn2IogVwSSXTqF*yMd z#}l5j6^qECmZ^mwsnc2~t+u&>l$j_R1>2w7RMggm6z zxWQ~g**0I$|2+0ANWiSgu?CUcCp^{O3aL+TU!YUcUj!MgqC7USOEUpX4nkfi(%vXU zx$f4u&F&VY+L1-E3<Hht1`vB?I7;g#nXPUe=prCLP6Bnu}n+Z2I4rjvYcyWu01Z2ALId1JO*Y8YhFw1`PY6Pa`=wOJcc!h zL}ywnYTahK*nS)GIA#ru=m3-JmLzNT+Dn40A`AzJxxLg5g#u)hs%eL8AGV3Y5FeYm z**;X5HnPDY@t}>?z7X>@Cf6)-+tOnDnkY!=~>u6TdX=j>?O9%nChXQ7PK4^h|SnWrJw_aGP`jg zB-b-63b`i&OjhqpR!_G9**+;;(}dLuu=)m}`3Rpei^H7#N;E2$nEHXe_IiN&A%X{T zLeTdSBT_NDFvq7xeRIv9xlEB^t;^QQWF~8bXi5>|qqR~Y zUpkNOF`FVxzs9ij+I#^9+_}kh=9XU;g@tfp{&f3O2=~0Vue}?xF`XE)%Ow$;$m#8J zDSB!<&ZrwsO`dbAey;Ub6kVmY?=>#52L)+Memtur-NpojV;632s{+E;HmBEWAvFaP z3AKC0e%mXGW>&D=o(~BqnWz0C6vTfcir*avI}x4>@*vRod|Ytcq_A(ufXFDjMNq}d zPJpv!ac4*%OVa$T-WBq;&0cK-L-Fm%tT&NCZmfp*+VP7NGe@ zDkeDHrhPVB6pC%zv9)dK1gi=O7@ee8s1t0ZDE`wLlVvsk6|dKkIo6t@AfSTH3_B`{ zZ)tN=wtXib$Y{20TxgvXCW;3LaiSg#5D^BXeyvDQgIhN@Syu;l$tp$hs}8SJ`Bo?F zr>eJ3YSs|4&|Z~@VTYGQp}kyRN9S3akZ%jDKYK3)hj5SiK#->C?fM@Ivzy$F=+u)w zllGwX)9kze56(C!3w8e+Aj01BcmVGx=)ZY!zY39f8YGf$^$nf6b+E^T*}~n^C61*f zV`b@~7}Pi#QISsw(v<#vhug#uh4Aw#dJ#(AgR-BA8$hheFJhN&VRqK z-z{F=!pf8CucP^yVe>(HI9-XDO!z4HGH$KF##SoZOQrG=eqg7~5{+uPMZ z*sgKc{T241qA0~(vKy^W0!XV_PlqNrsr7N9FnR7dZlkjr&}csw@v8GK(`Uj;l}b{h z479gH6g=%pdsl!~a#FGnLqtJyO~S4!^aYd@A-r*; z0I8B2HrOo+Yca{tV*@uB8PpslqHi|lmbijju_!?-ZYDp$PR@@i%Xk!C5>n&-^7M+i z?7Oy36h?vmj4H{orU0=ejLw7Y6QnapoR!!=1$<}T+Gam4@(Ar7__Do)XxovpwJ7jP zpr&lP6^O{nIJnuZ|2k?WvI6%V#?Y>{8n#q9zEB_IwQI76TcI z99*Y#P?)CVmFT_yaUuU9MzgsIVdODKB=VkETYh}W6dNb#FV=8Eo(XKFic~&$I!zMB z-y~A|a6XF-D{D$o^x}gHqPIk$A-M=-o>Z6t$`J?m69E>RTN{Wv!y{*fRBKZHt*F4a zcCw1Fh!j&f2}LZpJRAbrO*x;{|Y!9~GbIY+n^a>)6>*s}?7j>M^Wgb2R9@;Ygc3WrhI>h4MP zY$zi|rMlAQDfiUL85C!K!?5PEUw6eJgFVrgpXclKAj2a+RT`vOtm&6JN z>zLh!kUd#9Ke?ssEM&f9P-=H54ROnu{Z%0F;dExxeQdWA5c-#5w5r9NlaT`?5BGA{;VWjQ|4@8=37~B3LwnjnAsThEqUPlM8rvY~f5Eg9@F7jKO`PAU3a!SXV(m%1+O( z(guq{%XtswXMLy;1JE{Oy*(VVBBL%5vRHt@Ja~~H9nkf_779>Eb5$aqZA~F1?akyE z)DE3})A@EdWGsw~!4&E*$Vl@}Y#)DoWu5gDq?|F4{V0s~6QT;33^98mfP(1$y;dba zGkXr}X?sGRK1F#9#y%mxQMeagYZgHZgw2oT@M3_+wv@|D&S61zLri+^KCl7#@<|+4 z{vjf*D9&lF%CL^V1Zi!{5qc*9KfLH>7JE$6qTCov!~u#0C9-AS{4pVFMyrz_a?26) zLoan&v=|nJ1UO}sa^Vs|%I2{H^@kj*2^q9gOFyX8UKE92uK{=4+Y0(^>>deLqlCa_ zXHP>7=X7uKQ@_Pc_Fd5#uV_}bbyxVx^S<@}lk+`XW3MTi?y~jA?1ECC@67Z%yHa7g z0}BsBwboHE`~b4g!V<9zk;vLF21j6S$a=JCwJlTFcU_9phy!gDg~C`O{kaeC2~Zwy z>i1eQl^#zf*desj?Fq#4$Ms0r*#h0qL5^J`3U$K{M;K3lc|anX8$y2}za)kuQxLU# zMeviz?_Ja$3CXiip(?bk0W~*v!kfrGA>YtZyh6U5g2mi|Ula6$kj}L^8yhF@isWl6 zh)F>Cobt1^!O2)_|0B#?BruwBk;1e(O;Z2g6rd{Ske9qPrn;R`FKpu(o=JPv2MlG#l2m6P@zKKH$ z;{5y}WpUm!&?rA6%9nGQ07dcpDT_2+LfnDvIG?&JfI@z9ZLomv{0Ke&4ijeRbfw9& z>{Kk`jq_}}FvZfT){1JIFQ{HLO-?p7Iy(XzZE+}}C>!3Qy&BNqB20!`YkLM<`AMuz zpS7J5k%kwP;Bn+{0)9P}G;f!#s(mVw?`&Uw_TmTy+Qs92o_KD0-2PWUBh#(B6^DqC zaTr+30>F?=p4AG_vqHV0*}Fm->m^g#CentGGFLKarv=U)klMM`{|HGi-X6D41pKfy zSEDcYd5A51H*@@8bmM5SbFY|VGZpqd z$CLz)bcC-t?>jVFY#*Ao*Y6yN*-UkepnCh}B+~dD=bBsXs(?oGi-hoKKvB221_`;_ z$6WUbEY~McDPm|JoX42VR4x$BH)B{n)}H<)?D~$go_f1|^4TnUdzJ#8^%YGkZP{dd zOku_#w|RS}*!A}FK;S5Yv1}AeN<~mh#8~1**a;MtiRf$PIXd4eg#Gt87MoQsEG)2v zDH5Wdyy@pzONxZv3GDw8`-4a{x2<;8(LJ;6B|)0jty6dV(iFmeO&1pxXm4{Ae|y#% zc7y8tLU1wps!}uj$)@G*K)B6GYQX{L;xxxyoaWl4N)uX*XBE3%7?usD*0ziL!VOB6 z*axJN;ZrSUPYP=UBuTtCcvdV@vZy{mW979`s8s zOVsc}LBGPid7C9f_cP963;uXLypra4;#7g6gR1W0Dt9ksdv-?D(Q%s}oE>i@3 zrHL5lu6-#yJkHkfo-hLvi^3!NtAN`X*b{i0#XQy``%FY?%qYpT^JfyKa3t&}9GYg` z1^xDQaF!dH275Fl!Hdkv2~(|1kn+*0tFmVW{FrIqDzKW6xMe;TW#jgIfXTXT8j;_G zya<3JB9zMOY<=4&A~3)*!{Bx@>pu%h7(r3!L4!>ak>(E_+|!mRtg+d`N4v|`2+}sW zJ@2kbc|p-+x=3IlbKy(NmDKU}rUuy4-+lYs}f zwQu3nR?tTuT*r@Q$AI( zpL!rrj+B=lDJz#p=7&XbUljLZZ2MWJ2)?QVi+F3Cm?}iSNeI(diK1?9+AN#zZwRtd z^xBLinN)ITqhNdxDG#9FATv4Hl+JnYjH<*potRS;>Zd&TKT6;TFjiQ6d!4GQKd| zRto6zS+h5}rkY1KdtMY8)46+BI~XwjY5PYl% zT1$t8X=>59B8x8YNqZsi2P2?T&TJJgbUG)e#6ApJQTKSp9$iF)eph5rZbGtd zjrhTiNO^n~%+%9LLW>#xiR@#QAqO2uHjMZ74Q-}~epWEJDl{0O-!ZbE6 zZJ{#(|Kj$@<@i}Rw;k^G#NvOJq7r2Pe(0*p+P?GS<5M@3-4k(Jd`;~ey*<) zq_%@AFljY3#9>8iA1w9xf_~a?tB$JmA`$3rUNqfm6=wB4Fw~w4*=SN8_q`Vbd~ZwL zsWlty(xoJ%dHk@kAzmlsd)r5{0bPYz*RHX$Nzr7zMe^0M#UVC1LIiqn>Y3`(i{{uY zL0Zu|d5Tqs>>Sf1&mgw*Z$+X845VNbF2`OGrhd+1Y=CbF`5huzJxNZHu*vy$ND@W< zB5HpXqB^>By1v0a5@dw%j8$Z}Ec0==>6H}15J8%P?7!WwF!kic^c*un$oCVC%eXxy zOsB%v+h`L))}F%#+7gBROK${W1k8 n+m5&so!0dqoJ3eG+Tj&6x+`M)r_$`<>!w zH6tOTs;>&tvJ7_T_d^sN8o)WQ|8k-jax6t6+oCIqJuMP#PeZ%Bqs#d_-`iZ6L^ zb9I(=N#Td%+~~UrGb({|KbiMy++)~}Mb@v2goh;Rk;Ex7UNUBgPW8F+e2;*LpkGCq zaC0!{B!Zv$)@3VY*J{~#`$R;#8q=twodj_LI3qT!G(P` zMid4T6Pt;m^cZ5Hwm~G{r=hapx=C1z&%Kpz7xHZyR#?P=*AZwlDq`bl!0#2$q{YUK zvy%$@>h47Yn8o&Ch)wI!$1k(CS38Cefqu-WE>jC2O0;d!<}SWS>Fh4#Wa+bmN1=w!n(Vr%(rCf~>)vsuTw-dHFg?kloi~PNCcm}V>&1b#Spy4rLpZ!fyez^}}6-Df05g0!GHnDAa z=XgI@3-K)XZLfaGIp!xsWx>$4Uc>G8LX>o1vrY^AG#wIwq2e^Y3z=To^!-u|By|>R z>voXk%ji=--e^t)&sVFnO3M$stlwQLqhyWJY7 zWYUl_;L^|ODr)#hYUkr|$9juGT}}iZJqbYyZYyiBClscePM%-|N0oFD_%2^7x19EL zd*+@EpQvcEgy_D4WA$6xrp~uQsT^qpqug1%&)QL3yih4UiZ z?Eew+BJf;f{|o?82z&<%Fc?EPaDI7xD70pggk&!elr)7HA9^B`K&hS<8toEAk*hN{ z{H|A+cD+=IkGq=#C_=f>o)GYpIxvB0ADrbBili@e&1$dZWxo)`H~qjY+#Xg6hfh@Y zF>rwbi)g8+^cSI!Jb3TVr842gWxTyEjFu)Ay$VXO+RXEPynySIy{QQ1*BP{xTg#7A zIoa?E?7JI$!S3OCr*#w5TCJ@+`{Qf(v=f`ky+P+>(^`8pWMt*ZaDRy)qrhznlZOI+ zOLHt?-GOI`MoYZd7Y{z`Lyi)0kkd92t8v-B**(B*wjZCR@Fa)jptaUh6vlw%j@d{X zDCqeIxFguX+HXLRN>X-@D~771?9L1N2FnU%k<(e!oMlCUf@_|WYZjiEJIS_(Or$~8 zj-9MEmFaLD4}AOVb5ZCcMtdW6!6x4tjPhk;*8FloKaHFsbM0zjR!DMFlijZ{`8roF8j)6 z!v1^A!15>BkzW@@V_nTg%gERjBFJ)s7g-d$#j)z`+EiW?gLJCwBMQ4@6o-O2_J9!e z3`%5L&VQxlKniapUlclaU@si-jRJw6!*xvub$;JuykeVTC{i**9Ed_D#9|Y+S3ut} zRsxVvJs{}25)@h933<6xHrLu2A-}LBK_4kVfd0ZdXxgzFtfPXohgC4aek9<_>WTGj z&Wm@6LS?6FYtD?HW0`_}Ou3p&vtJ0)%pqCgTCNC*nU%ELFy|#0wm9tvD>40QTT>;V z2*p`N*fVn7i57YFH)=S~zAQSU*!8xX?6xgFEX}do74|DSA06#@fbx#b+2LtHsyw=D zRom{ll1rby*~&xq7JQFw3*olXYHL*3_f~GBcw5>b3WYoI;RUu^P?HeLhsAPf{n_O1 zjoMyw4>Dj$pByYpV%l2dg{?DLT1 zz$SF`;Ez~`t-c7#yqmMn1A;z>Gw8I%R)hq2(B92{vpGO_kKAj!1?evgq%3O@@Jr#~ zN+gaB2x}W`e8#Q%HbdRKHWNFl_lLDuB)MV0n>h(fzy8^0#s-$oR(k5W6_t{a5( zk;|U`00HV`jZNk{@NkGLfWczvAuiFfC6DhiN03sOpL+&bVMv)SW&Q~QVYn+f*F7st zTTV~fVADfF1g^4eP5?Y_71}ZZKJQLadXSAGne`H&BJSm7FHNu!g1#MTJ-T+WheKLk3})D}kQT?| zG7VUhO6#0o?iLQRcI@5SZe7#LY@cEn(V+u-+UpAYb(K} z$~8#cGpFZ=*pXQ+3++k)-@1FH4tawx<@WF2(|RbZNzj>Stqp63+8*axc{{}ZQsW$} zZ-=s0UTQl6ERd9Bj=dNl$Ix27{+9)r>HH_HRYBT{Qg4QRZHKS%^n&VBlNZ=^f+TyZ z)P^X?RAjl+;ZKF=6Nd7=!Q6*M@Ly&&8mp{@$3&s=bb1HwI3t3-pTi`ImJ>=~@#ZvF zBdwVi(sRm#HESj9P;Mx8*aSh!Z-e(n7e~ZMRvE5FR+Ruk{N#5}3BY-HDK2 z%_CTI&j#phuursq3L^8K+*E@d(c~ul%udwHLkLf^3wM!#QIBNsuG~X_vW`7lb7J3O z>m^8Ix^?MncZVpc)-rGRhNz=UW}**FvX=t+=sx5NLpTA)>zKmrTY#*_vhQ&+wf9A*CwUn>xm~my zK-1f>d33SD%y@oIBvJ_P`AXriM0g*<;7x}V#eW<&SQLLSEAf{6>@ku2$K}LQY^n%U zh`{3ndp}h~Z<&e9u@6(}6ZrFv+D9U2+h6Jq``#t%$09Q1ZF?uSRWGvt2vX+AaV5ES z)1DBwV=JzXJ@oD7kh+s6&|5&~@Q2ULLPgG_5pWj?ro@|!$` z+A+eg**;ga-#A9eQ9|iHFoU^&W}(ec*vIvkH{u3izg7|$pNra#RD!%H0+25X`}H+g zEM`P*{}7Vo$H&B^5%7*M<@e~`(LPd`hTxcUt$ixMKryMDMOkce-1pCrg5Pm81Aj#d zIt|NV0>APz1%2yAOX+*IFeTsI^=6x&io=vtq16e~@=RRKz+-1rMDUa5y-^+9%2LoL zx0~bBCC?Gdc(U5~W$i9RuMjH7wgAHEG3zN3Gm;cLX@0W4&Jv=~xQu8qvTZg-kP7{F z%W1D75a?b;JO^(3vm(&k2iYz6Dy-tX;NR*Ab}YocJ9nx5&-29kj{fl(7R=X#m3jxt z5lH&rsxnY092@*2vC1$&6q+*>5r0Pg9zj2I16VQzmJorV$2&)k6$TKIC#heilEN! zMAJ@iyP2l2ZW^*m$nOJorLYGJ;{|EXeYRAweu+E3C;Q}rd}YXfykVLhR@hgbA(1#{ z?p=`>g#u1K_KASLfLbJ_H`|x@18GMw$1?a!LY(k&BhvfO7ZkNSMe{qyfmQAqWuPb& z!5!^5C(ogRG(HUz2sxnuZL5rIQH3cLVo1Aq;LO2t* zf{%yr7(Rjwh5hjKEh=cQW@aHL}Hk&0UJvw!$IUn9~fQ|$=> zdQptDU#7x=K`%$rtDF@FjJj9Z7Df5Nb1aFCxI_JoXnw%>VTjtXP}C62f_(Y+XXAPm z@L*xWe%VA3{=ZMqe_{Gc3fRR)i$ufyiwd{7?2tx33H&6>o5xckweL`R^&n=VRfwP`mstBQ4DM5Dv`c_-sSXCr!&W~mxC{{d8wSk z#ScRn`h0qrIvyt#!@; zIGg-QnD!;t%$G3E+wDo$?6UVml#cPQFSkzw=~v4ZF7x)EfG|zT=k@pj;{2e(E8)6r z<)YBYL*rSOiv;}qL`yhz)C)6?^beV4_3_Yn+b$wQbCy_VpNd4oZ|>O5&UuM24ZXQz zXN>9FmjqeE@`qB)$0GYe1)}+CoHkvdFe{|*{kB>`zu2_oF^^rz>*;P$X*R616Wb@l zim`*D&`Ctear^gah}Vv%Tj+c{_aO0pY}1k2h}pd&(8O-=1BZri4Eum174}PTOvz{~ z4Ec~z6&DomAR_p-vI?U1LdeUNzz(MJ4r{KqzlNxj)27(J6=qoX@8wzY3x|9i;lOQz z{-@BnEFa%my2Gv(kui9fhdu@Us`iqOPIGmi4HBJEXqW!kY+FTP6bcK*A@Z?bNFTFa zlQ2bKuZTe3kPKXjZpxnp{dPhEIUz5#q-fzc4%6Aq>#%)QXvakLE%cJuVjq0%Wr%(x zGEl*=3}HXSy(78#amyEh>ZA?N>9-`6UOadkCro1#`O2$8Bl-Pp#Q#H!4hWsu&HoP)3BF z6v?-W!wdZD3K7B&ihfSqPl_!Om9~=e)Yej4A?Pc)p;Pw`c34>Z6w0girvUoJ*wa1` z&}UIsE>UUBRb2WS6m44))86&+Y;Di>T@f`FvT9Lo-2|ly(8$v-exH!F*@lMWog&kh zf{bM>il5OwWmb)Bg#im7{~sp%@@s&h%-0O zm$6DDn!@QsGJds!ep5yV7AYEQ6vcPmag=@~OnpbnRvjtxZc-d8`ngb0UId3zW9*{< zbK=<%oEU!Xb#l>{<7*e#)e2K3*O8s<3^xlhnwUP$wL1bRj`J(|pn$JhS8yv1k^6VI zjJIC~5XndEvs}P8`|xJ5FKmS|C6FpbXZ(s;=DJ{mMZfY{7_vi3T-&rG@)&Lh21FV=P|7Zzj;GKG@sWn#FC#Bq>1vN zU&b1U z$%ZEVAbuVavv;F0X6L-^3&LPWj-4k!0}m}=(Mv(Tg9!dmlx(P#+)Kkg>lm`c^02kj zU%)r4zr^sD3b%hq2$A5dZnH@AqjmC1dr@IVgwmxsaWX($WlA3GoA3CPojcuPmn!Th ztvAd_coR2@LSAh5VT0Z}3i@yD<;55S(#iGCkSW>lZ0PQ=R4RT2D&zytibP}ZGNj^M zB0#AOUeAIIn-NMlQnv0$IjW$w_F_nlL4jQRUXs*|P<#|#S9C(WSp>4XXOK)ACP-g% znm5^k5FXxhm^CWwdxJnfpO<}-qV$TebPDZjt-fpyW~^~^IPoFh z6u&+=m}KC?CT{hjF<3b|gS;%j)U{69+B#*jwF&y8&xpSEHz9ZaohmIE9YU7d^+$Ym zR?=kaAwYviM7iR@HoQ-m;@}P^w@$Dz0WuXi9Er+8>XvDEghha8cECiBmeeWH3xpXi3z1ydq;>W^K}O3Bgu%G5T^^bKF zflq>$|5|i^<+rnYd-fHtSHb!FkziPrayE@BPgQcwZW}GS@7j+#TXD#Aq}*|;rU?3R z=$#cEW%Grpmy>#h?f~{#$So5Q3H!qFv&n~`Rc1E`Go~0+j@l3bHE|WD!?AG2@A6QQ zsJpmjF+aUoA$P2>r9y_ibI)QKh|IG*VFsXWMuSx-%ra~(-PKw;x3zS#?#-J;qy|}z z<@a0P=DQ;K#V%%*4rTVu6VT)e0iKro+~o{m9(%&2itwYFj$$r4OCtDb9mgiOOwjK; zJ^5WoXFgYn#8PN2D-9-JYDEb((gJGSV;e;C`-bOaxZbylO7EMu${*dk0p-Mz+5`cr z#9a~J^}J95DdpOHy`XRM>GBPymd&)5kRVyRT}lXl3Atr$n+K3nzF<7UjInzKw9?8} z`!EgFGoCEkKCZYRtD)s1wjbiU>>zoHp+=|Mw=Y>!ZaCvB+0RKxXmWV~4Z#UByHm&+l$(BdI=4mZ(6 z2E*QeGovm!}m4%!g$}Cua$=aK@ zdhrbVT`F~+jsypVnUNFwYVEMXRLVBRukf{h^W_}e?T%H$1bww=-cGjs6qXFH9Zg|O z-salhglY4^s)Nhz^7nn_kqA`i7ydiK{u73ki$QeTC<>!m5OtoNGwdx9Xjod;PS&b0 z4RhB(`2Ume{rcTPH4%Y42AA zsiApup&eG3)}o!4Yi|np*5wu@^sx7*5Y2UHrG4e^L~93p$SzcnB2G_&^`tP(;wK)h z;|v=W;?nRTG)ZA*p4M^uMyL78tW=bo`}-_FBl;R<+FDP?#Rz<7Lm` z^>!veryaA${wYYSb0mrPxd4NNT{s>Wzx<)km~0#`$;ht+$eMfzn@|>@45@G{>(LHz zS%HVrHv&9T-gKm5mAx(K7yR&;)at(Wk*^8oYMcV^6rzcEw2>L^z5!Abk^(kFP+LUZ zzGPjEm^33qp}4cZ+K+@B$L7|xZJcj8f^-V25HVZao_%w9a#Njln1iCwO=RceW9*!N z_{811cfZBX7x42I9UDQ;+Ab1>`5DbebeY1`>)(D*U-^y*l!WdySLQndD&!0rv0NcK z1q*#AlBw$i{o==w;4<5s!kK#H*%^}4=|22czbHr-ke|jLM*xK})B+sv1s%lVynP%Z zkz^^bD?TQhZ~wcc3;5UW5Jl@|1wXCMA`X1f2SoJekRd3UV2(4-BBD{_m?(#}d;vdU z&iAv85rN(y7&jhknqxy^E~~{hFT~OGcj0g^5Tp(GhdNTWSi15r2Z~&DZQ3iI&B;I^ z3+LCFe7&EF53T|si^XRq)Gc!lg^npr;a)izoq{Dbktg@#B{J^o#>S|IZiXVc$&L_|OkiJ-& z0adB=I!nUs{*5S`{=63Gh-QHH1p6IU5h#wVXvl@umfzNy@E|<1gVX`B(n`egw$Z}%m zdRgoFD5S#(gZ)Qge>51A$Zy`dzuYeUCnQEd9`+KkarSLtifWn03HFBqELi*pZbVCL z+kqW?XnjR&KR(QG$A@QheCQWSVoFI}W-(y~L5esL%TC1(D&nTrBFq@z$duny9a3Hq zg_>Vj$j{#O|MDO!CU02+j4x{Bh`@1X9C0=lDgGP*8cDs$>iNzfe4QuyDH({RmK+Fi|vKP3-auADmsob7~l3^L}6eMAH_!iDow(^MAklbE-(By1p0@7Jx|}? z3JAXui1=M5#1uwK*g_R%KHg|Tgd2laA*$!#K6anNLBrJP?@1EVouKROVa0_-QnN$) zQDvg|7RZ5Co|TIbdM@L0XoA(*MiKoST4?n!!3#(P`?7UK{VQ_wm?>~EUKXand!pQQjtNj1)3<*)vJry5PiIX*+6DhPo2FyO+wC$zM)%gVu6DD+ zl$BeOz+rqUoje~c7Njh89sENcs^tGown0QcilZ>Loo$UGFpncyr+9)C3d0V^aNA`2 zohTYB*^KzJ0IeLF;B8nX9_K_6X@ryIMX$8lHi^OzdRc!|#)W+^V!Y!f>@^W+*b56s$*|X3 zf;2j(h(>?q3jp6Im{-eQUt?bv#qaQXL=`teTsvG`U91>C9Yf$#T_eb9Vq0y0l)EmJ zmXt=Y42@XFK%q;`yV*x#c2_9H*$JLzPY5ys@jRKZ937C3+nKgh$ko8lgjW*Y6>|>FAC-26j2*-u%&V@o(qIipeQ9449A*7g|yb;^u z!mOfV^x`uW_QP3Jkc-)0N6`2+H>F!yC@5JwFS#D;=>a;v+9vzb7s*RfobWBRuL}6( z>kOqMo4&-Z6495=<2MSw*91a74k~b6SYl}+_-nK*c*`%lv!XCebh^O#cRF9za6zi# zXKuSiLLB{p$6*B_4rz^SdrCkHNd9|jm?nEp6o#3%v|Mar6mzh6F%;mGfM(mLf_{8Z z&cv|r|A2j~v0A`G(X}G@)$MN568YX`*Nf`2@f*LxqQZXv!7LyyhobgmswC&Gz!D-b z^d~mfS-HZj4LE81cufvT4{Sh@a2HlgdVh&%x{Sr9m<u7-tTL4q2Ne7F#;i&d>dMxyaC6Anx~IWjQJ6AO z-iOa~G$iYXw|V>imwll}%4eNeGsSue`Z3CvTAOEd_7XMhlI~#w4$1wZF}y5t*%q(0 zyigKzUCNE@sQ`JU-OMZ0iU3cPP8B0{zo1`d$@;mNnMF(7UKi0%MIMi?c1BpA7Q8KX zqpjRhzWfzPi~%*u^y!6yoG0GeY8^w&nT2ysKeNRK3;Go)A$Pd*Tlu@&vPD;!+c_9| zoyM^ug|-A^pv!HMh)h-Mq^8y>)8z1JyC_sGo(ej8y9F8SG163iyFIm(Iaah*%(tW{ z8lKrSbry&8_e9WV58I!eVlTYFXE}2JU1Z;(8n~xl{VFi5QJpvC>wd3YC@OX3vySaj z0h%kJ0$G?xw4loF`9Jw&mrbt<4~gpc6fZhz8KSTfkXc1)@fZIUuh*$cD-H3i$9sm_ ztblUk>AaZyT8LJ_1Bl!EAqtKFr-%RfnvW{VlWnE|(&K!|mkY3fyRq|iP?#~{{jI{i z#P$%uPk8nizUaQfv@RdRjo5ezSsxhG(=rsM?u;>Pzz$I6sTCM=3lRvJ<1;nn=+Wak zYY5@tefwC8!hZec#F3=OOoRPiH1{nc=r;w?Vf%BS(07)LJV~|&imo4ejGAiy7R4_s zNi*>a*`(iPyW~9ROp1F~-P~G=^Y$jYR74UVsaSTTV(XFe#v>JL3D^~)(;43SWA+mP z)&+v9Fj8S&Y(-&jtKBIwLo*OrZ|MT`5~k0wak)OMD}RB-L=L~${&dNju`!}ju@j39 zI_OfTnJKzo565=QlP3P#qR^Ca`YHR10Q18G?^z&FEFCIA)>n92`94jh%vd{(y(Wy?jS9>I6d}9M# zq<8=*#Z=g@1gIjXd3Vcl-dRqET%Eh$W*;i-K1yCAYVFdm1NC)#uQ=P-&j*O4d6qNV zwaKQHHe3|?z&v9aoUwgG1PVbLTI{@oe`!Br*|x| zTfRY<&hZXCI%2(qC^Z{jP_RHU1SvH)qBBs$3Wfb#42{C@P1uBxf_C+ekz%7TooQY- z3Z?O=eu;i95>0mpQ!cgkLLiXGotEuO7x)wqoFN74Ap*@{!&8PzLEpKAc9U5t4A*z1 zZ58&-IyhUql${ZU9^Q&(?|(u#3a{>p3!yVC=_o+hl>%Yk((kIjqVy4!x-$!!w>K}h zp&`c~_o7u%EWlvV(1AnkmjU5Bi)XH9Lq3fJO#0VDKJMiic1)msw?-?BpV4i0DiGbS zu3b~2u@_(DYj+#=cn?F*B>lJBz8MlwO*VEOt`ww09BK;fW`U4ej%OxV&k*fwHVaTb>;AGDaB?kcLUWW*Sr69ic>S&_tp+Lu|&kBP)i;CSVg zsmpF|$mz^_POt@nK_q5ZX>#YXA$6GcWVr}4m7FRMNFxDdlxSd?F6V?z|fabTS-64Fl?Pqp>?oX^oUwq8X4St4zKywzS2 zD4w@Z9_+D87s|jgd~u1L^G%sIqMauKgMm$KX*8ZMNV~^jF#4j9mfB&U@-cod zM0W0SoqbOnM-PbkIej^H%9@|-K1(y=` zlT*SE;V%PpEYLYNK0p?tWMY>IGFbU?QePM1dWxGDw_gcTXzR{u`<=oxR`xd5Ps7ZW zy%Q2U+u2j>bbA6xc_QMu*1lk5@gGb-q$6XFFM7U+G%i_Nm0Ul;t_qL=x}j57yG77< zRE~jdl(1@Dv&LPSYHUi#!0`m8n7txM9SnU|bfnuxb;A6c!80vOK`Pvd$h1N$3GA zwmi1esiIJQTIV#Ir7(4(VuQ&!+bPIszd4VC+&%%{*+KA1?5J?NcoAKmD zg<`m&X1{<|{_ponc26<~zih7G~_v2+{XL z3l1$|s&2XLY#oJ>u`!QnBSI7y3|1rnWr18I;Iqg5flzDvecDw0v!c;aof}P#20<=;bvO~+PTZntU z>iL!Sv_SjLfTom{iNHAZiH?fM)8Ze5DK(BP*=dE{T3sI8^nE6ZI+e+hci0s^XaFq%oZt3Ra#nd0|-AXB!-gKt*dQGL3Uzm7Sv+nL>RHluJpN+P4nIR(FD6z6dlQQ z*>Zvkc9TfHaferN%)VEc#!*2D;txwxa0HS&>jkNRA>)(yl~6cnY|)7Z9g&^Z8nVP= z84-Sn-W6mh_^pjuTgZ<;T%yhh`MOcsm}UPG_G{~*2<8s#i{FEw722@Uu2Pt)o7afX zm?=b0Xr|0{P@Mf-B>zjH!P>N(ub96!OLTg4a>6tT>dzIV5=`RpTMw6NwFoqj$K4rr z=T!iJEimb9vqbULKN!o&ixnkoLn_h18fCZbqEIm}XOjE5 z==(tG!TuQ>cL6P;Efd}2*ItW@5>%;iQ?{kjSwqFIo%4=W6f!r@Yo1}NLuNYPQ1P5Za79gP(Ja zFUlE$N5;dxF9OvfUSt0oBFTuZB!>2HP7!h=x%?#GCPMgGl7DDf|_Z0}+@-i6QI-ED!qoVp%kEIZVa-K+) zf;^pkK8y~fL^*8Q%#fZB*z;i)1p>UEl}Ohvu*D+K+GO=YI~XDpJjh~HJlDQ@?b+&v zMJ1SfjR^Dst7Caw=p8V+-JUL=AAjS zyz|bSIde|v(PVA2gr5;b`vs|7E3B&7)U58l%~s?_uoJE@J<%+jdt+` zq@unT$u9F{0_5FQ)gIjwt+s1Kfe|T5j#d~R^#U<=N(5<7ZX9%MV0BQIiq6D@S(QhV zI#$9eak+44OU}f2(O*p^xUgT^MfR?+>O69&{oqH0LnUP(nmK&5C=@5g>`059K`;~BuuP;6H=rIOQHTkUrq%~ej!E7^Y>M%Ups+f^5m zIn3uGdD4lQ-6=vS4eeFjvIxU`?|yx4vcj+$S(;sBOC3i3RBLQ!>dBGBQ^C;lc`T1g>~aw_1tnq=ElWU;OUjG!hP}4M(?_BZq#@Si z=@II#u~q>}C-ZXj?f&bJ13BwKG-bE^#52?lDE2r@El(7xt6gz{RSA&+BB8Aag?0&s z+Ly>H-xI>r9{ar3k%~ifiGal)J@(-I?wLF7kEv)@3w74%(Htn_3eg-iPSPCu}S#q`5ha%BC*TwVm6R6GG@Jk@T1P3Qe(O1Y6 zq_avoHWpi!$KiYP-G{0JB#yf@STa^z;8si8{ukM+qEOrP^vU*`!eJ03_SYaQd_x31 zt2^VL>lHwOJ}2K2AZg^$Oe?_eh!7gGJagi{rR*?9iUgAx+30HI+Z~=Fy1TI+k z5-Id+MkzKcCJV#NIFtxyC`{RhX8vKRt?`IVG>bPYOtxWLZ`(a8&hxtz4*8AacAGur zN%3kimo?kdJx?}C`esK3sROnQgI>Y=J@IHu5`TK{5YMn3cheXaGiQiE$+*eDKC_)E z2!}b^@qbHz`bQId3+L=J0*U-NPRGeILf!K{L%u4~ByYFDBKY@bHQ$(EC3m=}p%(eF zZg8`3=rle(bI@JM5CulyEDyJN69i!-jr*@wlS-}A@PkcO>y*eN!@Oi|gB|qeWx9o1 z`N`BH9v5lVnf zB6(Bby8z+v)1F1-g6UNO)_9Rj5l_6>6REN{ofx_5;2L;z2eL~@$5Kh9NVyG-kI5k}128xZNj!Xs7r0?S2K}ljqgoT1t}|CZWnB# z5czO%YuSM%cArBSxt;yUR6#nvWBa~U5~L^W!Bm2qc{f)Hl35W`Pq}@mKPWJciM{^z zWnp5Av4n{aJ)tm)Vwj|2+5at3LM3F^<+~{;h1`Lu^sJVba%wqP6siSx;|kK*)L!RL zpf1ig)*ck3gS!{(w#6Pr+ANHD?G_BI7I}igrZ5+w5WysY;}U(Z;>N zz9PW;$@MF&P~pJr%gw|s74DI&g^QOj* zCblB`iU{Ei&)Q$rv2}93T_6%AB)2>eN!2hCt9zZ=OAJ&LWA?r~x#It@KuZKbje^_=}|i6hUaw- z8Gsp_b-d&-6B|hEi9RueB0?iEhZfjr!XJCb5qQ`YqEV%Z_#Cs5sRxQV<+V~_y;}*{ zsea56cWb#F)ha~wizjnAF>X%?hQ=Bfn;>25{hol~p#A?df?+`9;Ax>_BOV@7&jfva zdq)(wDdk+w{_PoX2s0Z_?NZ`2n(L#be4AY%0*Ue7oN1#xR!d|_vI2ov8I!O(6$`WR z{Pa?6m2VcL?T_zXY=2XjI%Y7A;PQpMt|cAD)m?tcXBPXn0PbQ@`c$ zyojxA9b2rw2oxL7&ndHE0+iFSs>J3g%!6H3o7ghga!;3+U@yEzAavB>hQqD)nx~cX z9rT5gf-r$RUv63}?6k{5zRxV3WTz`kfj{QYz9c|Kc*KkDDvlARP$mU#H`!Q^X1Ek^ z;n8mM=wlCU>27bb8bP{i5N&ht z)Fm3F>YjeL!oJ7~o>kO37yS%sm@pzgYxqUmVuM7a5H>xTHdKH!Is~qMn)6 zRW;W}3i25n+}nPsAkCSBSq6-~*ivCvsf;#b(XwtEj=|ckR&f-72pV7O0)cUs6Vqw_ z-FDbBqwz6#l3~YEIKKk}(0>!A^<`Tqk%>WVl+3PxL``(PxCO6V`nAhMgx#Q;V^;}M zZniEM&-b{aduvYA&$6_i6GuHM^6)I{D-^~uM|SMu2Z#`c-Gy&ftgsScb%gF*PEJK) z)Hz|l5)M~Y(0St=iJR7u#BiP>;D@F@r6Jl`>tSQnxq#c4`NXU)Pft_eEw~D6vH*SiIm8`eF zibAdUV3gSV0_uJVnz#YP0)K~{eOj5G>R>bOFU2eEp-C90;%wmB7~td zE>>XA2&(>gY}~OB@r;hLxegS zMaPyoJ{mqR0>x&sU@f+Dg#uGAH@vdPE)))JBv;(M1<6TzIl~oJPHoXEGS^0lK*OVd zHQVz1K|b(1FoM3cCvMS!8e1t43K?FC@OYOnIngv_@-ATQPQV|{)JQ+xMxTf3$%ltIA* zqQGN39o3;CdtQ)X&c}(X&Cv@YdDFiIe++w}_$LtN0@l`rk!6@M_yg9UB1PhRRRjf%U8l) zalmKz0lR2KD23_`=#^&I35MmvxIErolxk`DB2lSufBcBw?+?5)w*r})RSEKO7(K#j zJ%*zsb^fBY9{dl&mJOz(yll zH0BCfb>YNto6aZgDiLAs_?``Rt->@CYc94pqlIX}Wb-CGI$7K!F;kseY*j*`Upb7x z&3DY^^rUaA;xHJ?kfaCv`c}d+Q^eqokzY%QNF(%mvtps$o607$fVKC1!r_f&cI4CcYY|v-L=pzs-_!MP zY@^szMTF{<#AEnBEwWW2X=q9r1eFbLpG9{7n?(;T8|j4dCRn>@aKT<6VK4MN*g8Ae z+7f9YZjSDJDhe3q#Pc%bNK;Z1zpXfVau+8$Y zuDd+0Fhw8Pji~%Nfl!Uxxke;Ar00!-L>uVBD&K};`=JQx@CO>~3We!n+Buys))-G4 ztTi8c$nF(^%n!Ca|Cp^7(8sfNPO^1-(nxOQer> zy?you2$asCu^%cNdU-4>z;L2znr4x+_Mhn-V5Vw=FW3HldYwx#PLS7mkfn&@!V2W_YeN1w`YBHO@=1ZjdS9WnM3 zfVp02X}u7M53#`>`+Nh7!0QFPEigs^WsE3ctVk7bLIF4Dt5T_woWeJvrz;9o$U)#o zProGyGei3IxA#0MgWf}j=&~^(CH*y$1>p^XULEWUwas!RjGIIc#d3Xm_rZ2FrZPmM z9vwT;?auLhBBvp{ti&NUh;>zbgCGnfo2T0rg{d)bjLZ?VTf3))g|ti4>=lQaq=`M} zMv`mJsBcTw?+sAzP+LW$kjvY!Ms^6#RWfoV6D`wXcDo1^**C4fWhxA((r2<70eWA? zMldqDh-su=B<9RpZnXmnhR&aa?D@FEWE*a1R@tY-UKOE8XkbyDI0|9~>6NG4Bw! z7yQxVDA@c-VP@I%cs9OLPag-Owh}AZSptCvW^IM?U7RalU;&mF#%!gWbD! z+TAG%{=L$XpJj_ezL5gZG|z?hmRAMYL0@F^oXFC21r58J|9X2=MCy3r;YV$?!s`1- zml#pwdJ(82>l-=A*d<8sc2?9xyMgw|GQ&=LUGy;gf>C!qXQz*crs21N*XV2kS45GM zhpHZ?*6G{-RmEzaan}&?t!9^t$T&bKg0(Ek)$Nxe(T=G1vnH4;6bj`YZD9=W0rr4s z@XNc%Ho{i<18j0wnl^Z3V*e^R2Yp(QQX`LhGJ2-j>N$U+G#5)v_L3)IN#eF@+39>J zA{84nXh<*nC-u8gq>)}oekUZdt5>x1tg&rZ*tw%kgxP0GI>k#lyY!mZAw2 z&9*6-js4YAqcw<#^nXII@>1 zy(mEWug-m?njLIs>cPB%{d+jHIW9^lC~e@S{jKk<;ps?#JV+o!4nyVJ1`CG{h!q!e zHZfcTvX30);r7}g>pgAsCOV8{YQ>i0PnS#rBv0H8{M$)+?yn z@!A$|N8~Y;&9RPXBp7YCMt=;KR$R0Z2rQy#nQupg;fM)bYrrl+`eMMqK6dnDNoC03 zBacHUZ>QZB9!uo!Tf&jm7aT@61b5h11z{9X?0oy0$8lv`3+GD(Ltp4Pk-KSSow5EkCry3Se@hTDvIVRZENhA`uyqfVaox`b3rGZzB>DZoh9H%m4{dD+gsJ;YzH zn+oR$c)8p77&co!Q9@G`6=dhwb;6;yMkZt~{RR{ghoT%Dhr{6(1tUz>s1o@AyNo}*w zi%zas8!bW0^b&{3oy(=8G(?arqfvx-$Hw=>%dx{=o7Mw$#Td5_g4FgJGz)TUlSk4I znC-PTkHmHY2YrtVg(k;9biN%BhS%iAS$0fesunFq9?=#hqQE6ty}P8^{-y+BJvt#= zhMa&tia*s=IVaM&pKU++1<82oOx&pr*?4< zahNFKD@b`Ls4)!h7EMgBfMZqtpxx__!SS^^Ny@dEqJ)OH5~1%}e}WCML>oKoSrPR0 zWv`^yo{#A(B88Hko5@YKy$r0c+yxDOI;U=qEP<`IZ#jjcy64Y2wsJ|JP^n{;b8Ng~ z;f(o&H;i#`iwHzs%eLrN50~SGEW;D>S=Kh9!5$GIFr0SrKzmX!jM{N!nfdWDJ1R;T z2#2fjR(sknL(X~R_F08_7(s@v&wWFX;l)h}L`y$#lwcp7Z$mvP{7=Ly*PVhr1DE}y z<%kf9VYP;JT48#>95)bF?GJNjMYHd0!NBBDITjZ6#avam?~-w{U1biR)bhr((lTisC`XktnF6|!X9V*ylG8BA?xz|7^e{S z?-b?Oqw9|CoNu4V2$@|ycAR}xVaj=VJ0r5ruJx$nTe$SDFc}Xk$m9!hlP8Sj;`6ZF z$_1$+;_3-88hW-r$cd*tpm5+JGJCMX9u_53FSAIdU0eKdM!~^t&o|gBDSV4Wj^D@( z*%LpxfEJA~%`CSgI_;735&Mx6!61Vj=M8;@7_h9JCRkj6Y7OXfaUZmUJ{b>j#U+yq z?9&2)nPN_Bi|n%^keI?oA&5THVJ=5BD35(pFnldtI&T+WrjnIsU}CohqiiO}?Zk48NmV@(16%&-DOYZ^ts(}_`}7)UuL@E#CIcOZw%bP@-#u@c9PpksAtZlgLu8!ohk{|4j*;oO zYdo5E7>y^u@g6-&VmyY{#8h%!gtjTdr@DCoIskK16fXa9mB!|Y0)vwZGIfx!#36oS zuu))Z1>sjWRgTPL5n1a5ol-~B=sqXF(zue?gbDUr&*7a-tz4M5LxK!}OsqE-Sf@~! z$r<|SN%JE8-5&@R(d;9^P-iYD)!8Sqz@Y^v&;*|pq-QTl>t*LCO#PCz%dkW)dmCd# z2_q*1Rf@@Wwa_ls*fWA*U}eX1%j8Sq zw<1vhn><~LZnS4b2<##?DYF-a;jbSe+HRK2rg1_jvvqfg5~_C+ z`OH0naCdDfez+!k+$#;y-e{3vVCUvUQHh))FLBa1Ot9o8A|c#zCq?UkMx7bj<06GA zu(U_YXTKAXf`czR`@O?s3-`QLQE9IV(rwwXbgnV~Qz$g+m8E!PvJX79oY(A~YG)@- z#SbsY;QGjUB9OU0M|>8}7Yq%7rXDY?eLwXCSJsf|UMfOpM>a&eY?S9fzhHXHMtcsC z9vt%fX1hrgH^qg!ZC1f;;X z1`TCycnXH5*1dmLN=HPI=8^u)@J*34pCuA9mzm-=}8_h&k~&?qBBds zSeuK#SL+fX6meo(OZSeIcFdDd#4wBo<=8(&pwZD@v9IM3_F=w-*XR;q-s8!54$H7B zJhr1%*Jt83TsVBl*xp4Z3+UhLMAc|3=1q?Ygz~dvCGswOUy8zxNuT+LLgI5je>_UUliD5LeAE12JJCnnu#0J#ps=WAs!jvA%W`{pmKwl5c$j9ub zo(yAuHd5ix(ARJ#bw>)0<%~a5kn+Oyt};sqlPN=qTN48G`*U;Ibl^hwz+U@}NR&gH zUU#(}5(;&W4q~e8c~M~S#QIu$NnuJZ%45~eB|uLci6t@m)@JXD;+0*?6fmFwOyTKT zHVzR8dE=8qEAyYFC}_oQW+gFP6ph@6A{#F&>=z<%mK43wZO_fb?REoKvfAt}MFi<4 zOYJ7q!#{RlYH`JCy)5w*lO_%fk;XRIMA50u$(kms@bHP|9agPy7_CEb7gAymi9l6{ z=0<92iv$Dj2y*AYxWj5Z0qr)4RnjKUfcHt3?s5I%Pjo-naBSLgU5;rLC49stqg=kBUm1gQp=TrB9sA;4NcD8^|FSU!K8^z6bfrz9Z zhk8uDT`B}`s0mdf6CWk$18du%d3Li$?`z7_ivOq}xznim_Le~C%S8S}#C2yEh1mTy z`4jCtfsm8zY}q%3sYsCJp=)A;MBxM;bww$1Yzr$uyHjMc9am7QzRR}^C(t}s@)^ys zxColqn`X;`P-I8Q9ubj>V8{C1MO;)8qz$oVW3MX=*ArmGzEcbcd~r}vXIUS^t8p%n zEzIaa^K~nhez>e=36aPeU8#=x5?drlb|cX+vL*g_RuMjH`6SdhhS_Wm*sVdB+Gs_< zLF8ilKm;-)!8k-bRsxKgrAh7}cU0K9qL6?Uw=654?+LO>OPK!Se5U$&0`42FW#nBa zs1jJ9upVf!TSN#oOpo!EyeEY@*)F$f{(N~BCxY2lD@b*C9kXnw1FW_c*@predzFu( z_{YM;&MbujZ{5>3b>npFT*2_pU~stp_&~#i_zn7aAfAoo9sRTUWn|DT3Vpqqj)

mk5kU@y`l^;L4-ED{CD zA`ly(_XH^jM)2P$>#Bbjg$C%@USelW4vCNMo-Nk}Ulydsx)h^-`?|;T^z{f9e;^2t zg$X`rmpG8k3%go?ndpYwZIptcl{&Wbh0PF#A@bNaugI1-j3h6ZE)(Qkp>NO(X9bTF zIIH(GXyeQ{u*E(#g*2fXxz32oLe?D#(NxXKBl4d-!;|1|os3w9kjos=wCclpqW0t1 zP~4l`)+|e0HHr#TFnUwCr^PzHNHohk9K}W9!17gzGV4$j)y~Le|MI*54AZ)D+t$h? z-()YP@{?mb9WK7%4`t&`#lBd^LuA-lv8=OVrt~LVH%p=xTc9 z1~f3xfDyqP`60Y4WCdp$30lVkQ>L23~xOd(r9 zZK~QX@-#1Nd$`{EI3$l@7=OJ^FjPs4G8AxT*e^WEiMq`v>ZGVs|UB299vdhv;h z>GqC8SUkDD+J5pYVndU1pSu_xj$WdWo-V-rg5Bbf^qo`JS-K!?1{^=kW;)uxKrVJ# zg=poB74Ih&PZ_3^|r@h*0TY@*WU$0 zBaCCsVE-15x?-0E;Z^M7+w6z;L8R5L9(lF>SYeeW6QEmkoRa30uF8!r*B>BC$dol- zv)z@#jP*h*6%L&j$CnXZGs((Dqm1q?kA*PpqzYnX<9Itm_16 zG~HWZQ|6fr9Gqs)C>)q5#Kfe%APnPORU0D_f3^Kl6zZDXRu@eo`n6>0vEp1r+;0j~ zcqV%0_PP53q3|(Wbo_!~=;7z8vB^EthKa)Dqn&1?fZlF*rV{BSchE)lD^ElfisL!O zl63r9q|hS63UW&surZq@8YMqB?YVjOz5u-P)=QBu=mvi%BJ4;qf&}stzXpc)o;xLM zbv`FTG%{Eoqa3=*&JqRwIE&aRGvds^7l;(jPvyA(~Tg0 zv2OCQ+d+@{(>oFV_y>tjXMrQ{5<}1Q5Yiq`}DFq6b=nWOCzphHyHB_n6_(~4sslFBBJxSBEir) zm`7%*91*mCsBej!gV!a)9qsi>7_}O^Vj8Z&Hj1Kw(%2G7<{OhumA1>X*V6ITh4z9& zttVgEX0$e76#(uzgN)o9*mp%BHoCwyN&rq};x-C1TzG}i>%PNdbK?H_^ zOXte%ovUVauAF9H5j_-#FU8}tz==FxTVtX@1h>T=fw?Du)LvPF((*sA*YOKM&C&&;) zcRps9c-+aFxw;i`x5puEDX>fj(J^+GK&WVRHLb~VMF}$pE<3Rp%{A(^qJ_#|!!6D& zsfVtOR-M)&ie4?@^z5?r&O2|Q8alqaOa?l{5$aI_Fk7pzjHd$dG-I#{Q zRK}LQ_JD{CQI^j*gJvU^ZI6gVF@+s9`Sz%Q<_MYk3_k6Vq0Y2=(HTowY|o1Auuy2E zWaE50E=Wno7q)b7o`0;h^;q?Tfk@3GNvkd%|5>S=<2@`|gVom$MP*BI+ckE{%u@;C zQ=)Z})@gl2rR40$-9|s5(1|h~KB>UQisFNbgZG5pEdov0-TJ6CD(r1gUoVS)C{Vs8 zA{EBZc^~_mKQ2d4%+K}qcM&KdI^CMX0hfKE8cOIR*@U#u2vhqbGhS&pGSmJ~FfhP* zBFp6;i}0N@&gK2lPMD31?v+c^b895X9-exj&)`epVT1@2DUCgSW*TjrC}Ct?i2x#! zJ{OBdLnL%$Y|DiLQ(P0vw#}(05N6AMXOp#w24}@(C?w>uc0AUTMRe`(eIe@c{BkKE z*pFrbL({T{#9qD)6omwIp}b#L3DRy8-EB++-R+|)BY(61z|_N#6C3}ASR~jB+)Jsq<8xdNV8!IlMd`IbJaO3z2gyC)+X= z4prw{vzpKCTv2?$M$S(00d5bANKuDdU#vP@Z)+XW$GS+_AxMUAH!iTF{&-|Np3mH3 zY5trs_f#n*1?X6n*cU{JnAK4Fy2p1;UEewNQB0-T4@D9C9D~?}0-^C3C)GAwSYJ7* zt5tFXb*|lzN`lwBd@B`(;mrM2rTcf=EI~Nz+pC`~RG6YmF%^?<^`20Q-JIVw3zD@C zTy^S$viY+jQ96#M*umu5Nyjw1HplkKdBjr`QmuJ3#uo(10aG(;aYMC~CjGvr=&IVw ze$s{uh8FIex;#>;S!_3X8gk7oNZ8GSpmFJ9Q>Mn!h7Us7xslCm-o z&nmTdM53mM1lJ=Ac~3BW&m#{qv%`|y{_g?kyl=9;9a-T1u?Vo9)3H8gBLsScS&MqC z?O}FruU)UGzvf5?+kmV09~L3V>+nDUBCS;|?32ym5< zCD=+&oEwj2aj5t}>RB#S*g}V0l}o!S=i4HOk;%$NTO|m~T~lXuO^t4-H;4jPG>$bY zOfmawQ0e=?pA46)?IVAZV^-u&>fbLd2&wZrwqk$7fjryU4+Ns=fvb;pyHKdyC}5$( zV=Fevw@``0=+JeY-75$S>2Ve_l>(u`^I{Wmt|07_o6Htt%W8$K6EVDNENt@mem08I zGxmc=hwUPT7A?q3=kRQ&2vIpK(%Mt8lVX|d)DMXe3g&3B*xnEhRd}bd$=>#dnNbq< zK`JdC!?7duP)J*nkf7WCPmuP-G`Rg-VZ9L<>~RvZmo`)s*g$umTag0cqaBUS3gki& zEij(b(SlKfkxCR;>73eNV+Cnyw(U%Zw|REV>@9X@Dm$H3N9y@iqN)F7L}y7?Wvw-e z5~_;EL7qMBDHw{A;``?D0gfo7&_S2_{h(670ixwcIC*&2A$q-2**X6^mESl{PwjgT zlR9(;l7txhW&5E>p~r?ww>~evy%zz! z8Z%gr$qHq&-7G4@^D1^x&9+NKnBhRC0BOzRf)WNv)na|JZDz7{S#&{&TWrxzK1x7( zGRfcw^RUtf9Trxba^HbR;;5Vdsu-9q+1FHL{}c$kj6!V1)dY`?;kMB}x0u*4)TJhD zUl#Uv3#K@vN+rNDNy7R#4lY>A?3cofNauZ6d zR1o$iaXrUYI@n)>LG*P3WcJ)tGTMt4UqUZ?dO^dEqfm zgW;#7P#9e4T%cjQbL?a3m%-0l!l5DGtXO1UT@qkf5Gb54NcWGAW!bF?hc|Tu26eL0 zaE(ae;aF*o-Qf?jIMSW3e8Et7cV+wWSsUzEo&etE&$3dG%IC;u?-BtRFO8&>8-%D9 zw}PaNWm`Qq(Yb48p*=1HTgk>9_JqQrlxV&6m^~>fNTj2ujCl0-o^-_ctL-I) zshbpRnO#nJ6xNaKw53E*@Q}X!tW;r=MO%nXHdQc;mBNB@oXk$L4W2r)W0Uu0wpES`5W@&D2z`u0z=gu;QD{Mf#EHaUeyah=IFr!c1r>DDGJ zW|~(;2F~W$0jI$2+l}*4WU$vofr&w^ZU3H%jbudI?CeKF4y4a4z~3=0G({nO{~`VD zn+nSgVS5l0Mh#hImy68n9o?GB#`rYT>uAwvYcfjis!Z-&g!>E|DSdxs37nEg#&Gk zf^ZOBxQYA{v4q>=$x-Ljy@00#LjxDZ_t)5K9?c0#JEDf8f)vHYDdv{3MRDa* z7AC<)qI%N4wThT9g^Y`{Am-ckU6J5RZrr%Q_fx@8kytwN*mN70dL}bm!nZ1s=qp~% z2ENI7*YPyboF9BB&Gj@_;=7q8R*;ULA3Web|3ab+KnsDIihEf*%c0vRgxpavAS3w%<#L|uSnKgt}f^;U<+vS4( z1=r1A30KF67KQ}=J#;T1LzGY$DuG*VqA+=nnrQbc2v-=N?`X7{sf3av+6G}NaCkG{ z*aiX5w~6lGX?sM0nMhxLmHkUlHOyo8YoB~9L>8Cqn_h@JI6+!VM&n}{iH@!L_H~iG z4KVU$-x1aa$SpiIa^h{dp3TwLc_;SHw0i_;Cl;r5yijenMg$Gh%6bgeaLde|6a_ZY zOLO>GKP?mrDVBA-mpwKcmDmz{Rfrm}P?p&3eZlZe=BuMiqOuk@*ZwU!J^1?Ikcu}i)iQ?% zw?;1H8+&4_dFkfZA%Bt^qcQu7Kqx!8V;cH)_Vsm8;Pl#|H`uoorXd)xQ>Iv7AzBWT zTeuBo7QI&lc!&%W%@TlzX#2IqmI{(hys+5*;OUB`3HT>Z7uo*LwD$yk5!2k3a+35f z(J2ghzpTK|w9{%s{z<$NU|jrkhbXeY-B@Ag3sT%Y=!z}y@CeMtE>qZt99r<$@>uB) zqCU)~)tUrD?_Mcs)P7;g)u}zw&MNCpCCG`F!>}+V9c^1;XRQz6@mCMCa}=hnP(w%9 z@lBy%7%P#FIaO?QX%hQ^$a1?~tBhhkd z;ticA7^-z>8Vl1M_JAlp@O<~IqYPW^+2rEkxX7MLrIHDd=9=Wz8Oi2Wd*);LRpDxA zhaC__uSuOIQ3Pu*ivYK>w#7Q`Umky50h&6J(|wT2BGO{sY9ERMOVU4(B^fr*Qb({e z>Y(c_)wZ-UY)kD-5ko)VXCu%03x^@km8-b)Xoon74_l8;-1Q>SNZ-4(AI45^5`t%j zD<6IQgx)0reO27iY6}z&>lUPN1uVqbDl8Y#hX;4NIh*V7M+Rd|>!8B$K%=*CgK0MM z+;>Hy1Gy`aZl`SkP&)man``F^(Lj_w9E+PjO2Hct-~USRduM!;Shp*?NBtgMY3Mnd ztxy`cDTX^-S@YBbp2>+hP2J1eZLJ{n$wY>_-lHOE$xeGh5MD6y#QOMvP-smCd8xfB z9ERg5!#bQWoD?}oD~6)yC38cprvE1@bxe;TG&-yPR8GZ(Y#V+gtj^Q9BS-i+8mzI4 zM5OAxs?7|Qp@P)olmSZ{k;(-ZL~+X$)?0=ghqr9875F1GPqMa#1-H!K#NvFw!3POw2Fsuss4MTX3v{$2!n zCNGBSkp01*KpY|SZkt(4b$SB0ykV^UPe^Z#-j0>Y)@@NU*Vs9mpsVD%R-7Z5IXv0C z`Q&4}ZIB>^kO%tIqk7V6K+j2ol87B25scklk zKq=i@R@jRQ>urt}Is7)*dmi1V-vIlw!Xc~cJJts1QleUE^iW!_EhJ&=OX^?6op>Cj zqL445FQB$WU#mL%g=p|GGS2K+M2+Gikhx?C>_u}0X_^V8h&9Uu;Bnuh1y-Rj-B^N* z9q(U`a`R>9{b`}VU@>B%cLZT3GLOS~-~zc$`cOoAr*q}%&Xs#QS1z@0G(e$kF{8P? zqp{R}C`{3p@Ml*DP+Z6MGTBrTqJU&=D>w9QtRQ*Ow#Z0o4!2vTQS%et)(KJ-dSwH9mvtg&`q;T7a=X}^+_}_tcrKKbHlV+?Ids?B#|2&N zs~Ho4j;wL8MXj*!HJ;)z*(fVBHd>T0mLmDb&BD}|Lxe1wB0y93GlAAht?3Stq3 zq*&`%Yp;6JV^ddKr@}PMI2@X3esl=Cy!7_PrVtCf9z!tS5~K{&0qlH*RomJn$=a2X zI@b#8Eh5EWVrIS#^oQSWt2oIq%asm~&uZ(gUT4LEdfD6_P_^ABidtAQjv1`x9uk3> zolm0l4Am?$bWkI*=0;or>B2Y*pY0-1i}H?|Lc|gK9K!ZU#`~Gg#KLwzUWK1397ft0 zWQxNCHA*&MW5cfXgx!zKvMCDtqJz~{^qTN;+8oit+|A`E9D?LZ{cWD8WJ|(!=IRFp zX&e~9A53m(bhU7~h&0Y5y&3BSsD89BlSZ!9iUOCpY!RCjhOI&(3T%%MnPVeUS1a}l zhGB4F-ys*-YmWC<73^QWZ>GH~90qrEJF>#g*m{aWQF2iJL#LqY8_7gh*uYepBTPkb6^uiJvsyy7 zD40f92WHrXqL3}VPr0rtpoVk{n#*NH>qZe7B%P}^*xerPp1#X66ej1A80U{RK`6}I zSY^z_kyG3XL<<97(y9_cib9`#o82c6hHK~4hnZ|;Rd0@nbVH;;#NmW(79rGTQe4ZY z?cmTX*&Q`~-G5|Ki?n@;|8+eH=7NlJ>WWwh$ zkCW&IwcH58vMk=@TdN{9-37g@M*B@Fr?I8k)+HF&OxLS+JcYSqyVm|D?5xx+PS)+V z&$ga=;&urN&l0A*>=>4a>>Nj#m^Uc1^BhsH@mJapgs3z_em6aRp~LREyWU#5#$tjr z=QZPRwQPk0SFv>DLlGu!7$?6ED2^IowLHi$mWaRr2tKj)Rc0h6+BVT>pz>IDuI&)u z>2CBU|ERFnw_|gzoz->xMaB zAz#9|=%j%oMSIR4K|}fxJ19W?5sr6O?6#LZE>{O|NrBML#T>BgNe>j}!N@KF7NuD} zz2%8gpThS<3Ezo>wM-|Z;-mg|QB{yES1z;!6{KJtK_@8OKHa=Sobk{#H}N_wyL z77QuiW8>6|;SvwEXln@Vym-gOgP(Im8cBU)(l ztK}gvUuoz;1j*ravRi~u`AFuo!~P-)O^tkMr+p;AZP|l0s+~T5`gF4j{PZp&s6b>y zV6T1GA@ZVmh}?c67>bptBD+pF%s<1JuyPA@@;X*TxJOT$$?_(l&;+c2v8}+3&zNWw zf})j-kmU)&na-)~Q3wAggZ0k|P>bO5F}Y)zU9mf)jhC>C86!Y19Bp52vlJ#z-U}Rt zSdCyP7C+y0k*Sb%qJ=ieNW^ncFMrA(I@(%seEBx}QacG?Bt3?v>Qw@W6Z`$tDitJe zjLGSMah+gj1+E{juomHvZ(hf`eCF}rhyr7y#tpM)6!xW})NOI%$ZANQh^p*};%KS~ z*p22$*AW6-?0~!@q(;)s{8jch!B7awjTk($GxtEDkbyYUy-MLw`?2waT`x#Qki6}+ zQJ$`(1LISvbX=aaY{8Hz4myx4Tb?M?4M|vWE=R-p{($cJaMU)iN;wAVuncbimd+VkK?X97wJ6eP5%>xO7QaReQtVy;_#+#_Dz!TpUI0bUG-#i&S!v`&Q3Qt_;E z+bigA`!e1Gtr*(_HH_=-*{hGu+1tH#y>%#Q=yood@3P+r>-wN>vuusjG1y~9(xgv6 zPE;thAbldApD&3(smE4S9Btofqf_x2e92;#DMDDG;op)g|1m4^G}sf*?`UVQ)gXcf zOUG^`JKKe*er6e~rq=`lYr{tjA8N-uk}GK?_LhU9(scF-0<>d~nTyT#y(a=CsEtYO zrLQPjz_4=;)|T;E(@#X7j%6SolM-B%-(pvZ9;(XiCeChd6M?SioHfnvQ8@HkG~z3) zRFv>uav~HgqTMH2Xze88R$04fwEu}ni*W%%8_lzyOr_b@lHAs)YgXS7k+K3FjDJrz5G+~)vnr)kCRGin2m(5=B z>}17(`(;)3FA=Co_aY2H?+a@tO^EGdWKQevz_X3d(H7FvcNn+J_o>BsMgM(wKT zSB7kLF{{BJ>E5#P*y473MbLX+vw@V`_S&nWhAvDS(tofeh2bYJ##zGWpE?!Ev3=am zNnx%R7Tec^sc=Clf<10S$L(BE^oq4^?9ob?X+uRM&yoar{#qams~pt0itIrVsA#fo zk7RAT1bH8InS|T7JMf7d9lQR{lhGcWvfEdF6OsX=?EyQcHK=Oa#iG-M9aBf!B?{9! z$V)KK(oZP#PJD_CaoR{x;OpS(Lyy=5fiP2XuwI1g=RkrpwwAKqHqo=o%cZ_nBt%Vh zD+3oy!LDbCCz*_Nl3Tz+@G=;O7xNV+FWIEPBF+nfw1O;oAer1^Z-_wgoLyjIG7QtF zA@Sy3^kch4L7qO^`f&G_?KVvi#&NZlZ?ha+o5;$xhXq1+X2}KQvJ}3mVBacxQ8+XL zhWWAUWv`18_1LMqElVw#dJI=X=$Sp!Qy`M~O9i41LpWM*w+WJEkK$6D-RlXW`y#uo zMUZAl6vnJuVd{-3Y-#Rqv#Q@h@Oe|Jh)t~3R*9w=O`Aun6$(|lp_C8A3&QHP`Fw0Y za4;#xxAe4U!*iLD_~I-fial1h!TNf5$e@1qQ-xul<&%`gZxjsKqXM?X_6djaIaC?h z0TJjKmY_^C%&RSS#_vw0!9FglvagE}YB7$TbfKgoKNgL0n1?HwhX*=5yha)rcBLS- zcxf@dX>%MDOkkP0NFaQWkn!wcmJWv{wW7nAd8ljr{5Lu z_nkeIR>J7!f93a3DR&|pWc!|j1IzJlc(DM(Eq#jhS1{xp8FOi}m?$*niJEnmuP_zk z)qA_O)n*EYIeyr1+v*9qn`f^n>@{I+Ncme4Bv;r;&w!0z0`1#>2!jg-(D0Oyi+@IaLHI#c;{DdH#3?r?N@mzQa2Z3yCz9AIa`oMa1w^Sf) zu&8=7nyaEKr7kZnMG&vTxhJ+a_NgA zl7_iYH)Osh$oROX$S(94Rwue{Gfa>s!zEn8MmfN(PmbLr5O_d3$92zNh@j4xS`$s! zkJz0~p-GbUtvctq%PCz`D{&9gg-kdhN*E#7;Td4-ghRGiu41OH7eUQdvDl8M5+7WF z*4zo<(8B$D^|kLG3Q30y9AZE7aAH!a-KuaH^Cw$YyuEd`-6=|VsZkXuiL`w4MPsaB z=|YPCLS+5!vbiUh&$lu`+F`PUMGFL?VaE<$cUWwVD8ABcd?49~9DE+X_JZgzpT#}= zBMOIJP1bITG>u#BxQLJjI0E zg2{Hq;ZPQL-Q`ZLuORJ{6K%0>5(>?HeJQ(+bYb}4x12jDS)Q~2Ezu&ICj?(oTP%{7 zuQg7f^0+%d()F|mv=WyY?RN?@DZH@T4thF<3Mvdb@DqVTWoV;gL)Ue|vp6pkYqLEN}U!o8Sv!Y=Erm{6y2lbE0Sr|>nU zXh#hY4x^Vn6mmpPzAqDvLdzL?cPLEfpy3t6+ETF~^`@F!7GgE=E00&FGyNVG{MgHY zZ7k~)O|wA(4w$SvmBTIBagJ1ud33RSG}#Fu$~(Du!O5ydJN+eK7>;s}f?VJmqJ#=f zV8v=T2zv$gL{(_FTT^K!b!^VCJYg!-FD=dPRhR%y`ywy z?0kDph)g=RXW3U@4w<}DyXl>Ey8{fZ!m^Z0*B2-rCMW9V28X^EJ0{;NRm(mp%wB4z zjraUcuCI9K!D%){ke){Lv%>Coz{YC5P4k3U7DMmfW(r1I!v^0Ph7BUS<2psbE?Y46p9;L zP|D|;h2(w_$Pt5iynprwhqTT(oKv3^45N)>qL}^bW4NGvvVHm0@N}}aO&TJXJLKt{ zqDgkEV5r+jPNU;C!Q*vG#jKwYq;<<;nMf?=IO4Po;oMxIo^QsAV5!wsix?(s)HCyC z8M0XY~N(^ULjz4&ermp7xp`rJ>e|aX?gkQ2&AUv|_x&;*-(CepZCQ9S$ps z^Y?Acu-}VD*`?Z-9~N*$kl?E_l5iq;dDW9~KwoCZ1mHh2j{n^BB70BRI}*{kc0vPQ zg9IO)D`(lI9zMQ#v5it#%_&XFHrpWxi)>7{Ad5lwT7)phweH7+TSOtJWLu@3{(5-& z4@=R1SZrSu4DE=$KfIr(@TeG;?8kX>hRneJeXP_WVry*G+iXFa@q3r{MYnB<5S;L8 zMv@X+DuOztkULecdfMW0xs5$46w1A>fXVc)!hz|`L}4z6ke_htY$Nz#Y&+uZ zjIfnLu<*(UnClzp8m+p zNIl^HAC_{36YUU&@DUMdHd)`Z*8~E$!(?>zPr|-uN1IvNCA@C~--H@!d3@m` zC#t5}a8cko!L9l63Tv=p$w|U!D@i4Ht2)y~3B^p*5ymQiysL7zH7g81haWq<(RK^a z!|5n?6x-`UG}Y+zi5e$w2&y2>2J&Y3#V!avb2e6uGRy5JB9VsaN;a@tY_JGqKRJ%U z%R2<%<7%!Zs=9H<8n`&BwjyDwg~eIBUtui(8fzl6QgtXC$*3r5M@@N=s=ylB}8u_l3c_8qxOME^SMH6>D$LbwAARBCtK}1B7_cSmxUum z>^F)Os)5)t*RFO8?i0N zws&ob^%gMWd5aZ0k#FD*>NA_xZnya&I!BU%udwZ&l@74i&9kQjqh95G ze%a$_kvA9bwl@R=Q}pU;J0=`5(?w`LweO`;1R^c>--@~<-3N&af}o25XR`>#Hd7sAn}mDZAtSYrGrY*)mvCPwoMMo zOS!9+6zE|!5*EJil&-2Bku<2<{v!&!ai@YSfM>l0qQzlBQ}%ZS_3=DqhY#)8zMdv> zbX94W3z7kXfkc7bB}Cn+1}ADvd-Fta9=1uh(q0pV24$>9Ry6+fu@n!n9p%gP_Niwy z2XnCUX;G*nn$+{`3j(~&L)fN$$-~@Jus#Zhp1MjV6t5SCGd3tl6~_xPCygnxUnr>V z?%14X=?-E=d?E`4s6Xc56SU?eht%fw0(4vjDfpcSciRkw)ikHf!nRb}dQZ^3=s|nh zpXIBet8a${DKpw{bO~r;=MHx?vA^fBoR62;Uj@Qjg>zAy{iGw5dEXy_6^wT0W!vfR zgj8LXxZIl?8EjZ;SBOX@4^Fka6%2~=95?a8M7Mh_Uu5{eCjLe%5zxHYw8W+>Ox4wM zNI=#&j2wE@S*@U|>psqEleLYuSw#BQEhksbwZo#2JBBu6_M!l_?AVRBM(L_YgwR1b znyJn@8B)hG;&D4$0A9M+RogiVhlC}Q*u0!4Oi!juP-fTqW2n&~;}w80gq7&_L=xDY zmhHsIaE)#jV~ECbdWbrhZg%nBY|jgqHVJz~fcoH~_C)2va~@m%y74mn_PUtrG* zgo@{t;%fVdN3yAJwpSdW?vrhQ6`*Kt`6J!?NQlys>+luYY@hrSkcwamBU#^^+&Is^ z?38qdc^r3hF}qk49zhVfiSc=jLu$&Z#wrErjWJh^x7i*ZIJmDZ^YDH5^|E$C>1w8(FCdmBkXxip^7lPDHU)*Y zAA2+ppIox(Gi8E+I>Ws2Yn1Brmq!VRW`zY4w7%G4}5fJQv&J z!cn)O_4v9#^jfcqBqjQ_zxosTnWd3ovorooA`QoAojS(R)uV=v7-_dVg3%Y+EnSFa z%#Ekp;~q0qfA*9R^(Ebr=}7pR?FFYqny*|sonzhpbm#0E`@6!?3rPF8pF#Vdh%`Bu z<+$a*PUh0TK%nMbRqa}jR|vvremtwF)Rqgu47*#a@$lPwTJ2x{5Q{KL!Tu*0)+OZ$ zER-V+{M27VwzA!hHNtNRlF@)ZX?B^1$MAmNq%bXcxPjGgwg6>kurA5Bgh!zYwaBLW zGo4k{Hcw$+q|3%2j$#o8JR)jXm~oD{C{h4hDw+nYjCXFZr&CFyj|4hH_MB*xnT20d z?&u3?(JLPC4%q83UGRgZUa+amZ&t<{@pC|W(b7x^6?pM z4>``l0GTcxt`??Ruy@la+biU*Dx}Q8UAag(Z#P9Y+KE(N!(!-L{7INE!jaW>+TRJ% zZM}vJvL7f+mPG|pLBByrO)DdX+bq`;p4hU}iWR1Y$JRg6RWY4~mX&%e8V**WaNztJ z)cS0ir-sw$jAW6m7lnc_$BOWxK9!zp!?|`y7?$2{thPURN}j!Pj2&@=`OZYTeeQkI z!JuxJ(6eVcjMTg5+t&qoH+V9+o_C`*xqiJ}=tT9qu6to-a;c?>NO|4W%WR0k-#z0z zJ*D*xzuS^q>UxY1SKA0BfOP~B91UG31Pdh3z*f_3f~>~E^_9-4s7#=CzQ|&pOrksF z>XQYXHMG9ulO9)@B8FDGmQ}Mo;?L-|9)j?df{YDT4m)j|K$s^`zSFJVCq(JFRl*8V zmh9d9qkZxpAvJ?618=!(JH99iY@^-S##{3vLGm6UjiGS@@G%q*tMVUmw?9o4+5gJ? z$XZc&4}(?Z5+;Kps~3$CPO72lbcc0_KoNx$$|mK_R4#|t@}}&yW1b67u+5_b5p93* zpFBiQyj!`=zN&C&+Hs|Y_8mcZEXYNI*U#fHFqw|U)$0XAb5J7=M?OxGBULuLxk^L7 zrN`9(zZQxn&ibCq)jRBA5ve&(l;-gbI4sOm5sY4SRpP93wH8*~{O|PK5|L8C+d{CK z-1e9p_WarNp=N1A1`f23gjHDN#5l5qU1#U~3nKh*b@QficBv3KjEzl*@$MHoB!`Zf zahv0iFB2OjTO}BV4;qj~!KUl8sYga(1EAl(NuwDjk|S_3_6reczoAlzy4OKY9NG5+ z0->RQaN+kp4(~x9b(vyA+eYS!*IJEJg5q>%MYFvn3WZ@1h5KPF_&*XMd@T?}$ViNx z`2i%#*J+}q2~gybs^}K#5J5H2#|%5!Wg>?6;guQ4`ez7}r)2k%8FDIK?b%>d44=f- z>M1nyGf=m2o#=6qG|{lj%(pI~kR{^!a{J1EPT_+K$ZwQz7{v0P9kZK62wxl=mS}Tb zY`2IOn(K~!9AXMmud-4Wx48mgd5xg4ARoQhBKCfhMP^v{LpP>O;b@9j9cfi@A%)u) z3++D9HLv94U@M?Th(h+)#PV6st`Q79p4`4Gs5IMeL<~KnEg!d1djA&+_2Z6gbUk~J zaA0)|Z}v|GH73wfp@ph!w8yiYUxdcgjSf+2Y(Ysi*P-k)@V&`*sI3Xl)(BGXS1Zw! z*d!1d7Cr4OJCedr$m{InpQ z-KXyW>!omb54bE5=`0NviRZg2=XS4Mrn%%&f1YZ|{_Qw_Ci;9gN&DIE5s9|Wm00b5 z0UB!9h>nMEu$hi%+0iAo)t?1j8QZ@~>I0991Ur}X8Df7?IE-&j5|L{BPXsbfm-+UO zJZ0ZLgX~8i5#f!fgOMs5A_}dBc6VGlabtzacSu?vyIo?h)P+bt{y+s)(WtW zx$$b-tYGNXgNqp(TZPFl+(ThAX*)#;opB}K8;#Q!MWPbv9lPv^KS0~z0<7u4ZtD_- zmhO$o=7IM0PmJuD3!*8ArJSXS7GAL7ydswg%I&u%j(es~gyVGVGbeC2onyO2 zqWf8V(d_>SC|fYwb5QoCCkrxs?m-L!x%RZitX%A0w^FqjyTcI z5h5yEeew+fN_%I0r1A3;hsTy|;0tWG2!?K+q`mPpf0V<7B3s}58MZ$fBr8awQ?o?LxZi$Rra}0K@{IQel!8<+B>bkjZ>Hc(BIp} zL18uDs7#T<_d-kkND>#(LZKO0=RtUSk5akJPS!5;LBmj$6^TyG$Ha={_;Gle9I9wrSh#Jdr~4b5Aw8t%_lt?RJ6at_M>kO)6&Kuv@!S zF*HK5Z7L=wZGs>SFyT+L0s*}$1z9ZI?h^{_lhd*G|50@&@KILR-`8sY)>^IGY1OW7 zty)16cI(ox2?|IFini5ul1#{uWnv~146QW@TL2M|ecu9vu!WF>5FoS$v{s9tQgCUV z2niryZEbC}t$n}0=iV^Bt>klm=kCjM=iYPAQee*{@rXF)hvy{mun>kUmJ8E7o^nv7 z5BQv0MRG&hUO8F+;koz?5ov*B#9O!7TY|J8G#<+wpLvkd6|3$){Xb%8jy&%a*e`|1 z#-%|Hik=o^0hL5cZLY$dqtFE7E*OYSw%Vu3K=h+VVfP6Q;w=I554O=qpTZo)ZiOkG z*D7S)twoS;|Gr0TuY#-q&LQx&WbE|(iyTFp21goQ*F+&VlmR?)UnZ!%Q&t!-BJR8? zt_wJFwf0rTx|S5m&<&PtQhk1|ItyVSKPu=(KAMl1XQq#)_^f>kY^k8W9hm4V6z<#x z;F4^&waLsQ5D&Kv$rQn(=Q{hXD8WbuqM`2yYTt;Kw63ytAqszR_|qgH!;!q~ZjTF7lN42OLz6AYRLdHNO%QOyfTkBhY;kK5 ziO!t6T-X^;p#?HMc%a#OUrZX>m&J;UC`@&j9e!yM`Y02GHHo+h;NuG*Nl$XrsZ|tJ zBL=?Q_WCmVaEASZ4`T()4lC>iGyEjy#4I}|3S*Pq+K7m;{c7><%GXhofIaVtq=^ zvFlI^d{mHL^z7Bu@_e{;YPv-gc8z7%MfJx@MNyNsEDG-UGVNm(BGL>-Fc#0W+2_)1 z2-5Jk+%}0w_3ZuAY=_T)-NG{qCfPwjC*KlibxoGsi`1dE+WU&4Woc|||M0m+;cqFg z$o?m&rbzU#lbQF8OCV}QX3jn_cZJ>HVWOrYQG-)54~w~cAf<4xpav)=W#b?D$TRaQ zPtROr0|lu$rRS~o3x&04V5+WL!MWI`_}J9czP7<Eeg8zp37@!#9mJ3&Xg9}5ubo-dPVSre^dnjo5^ukr*Qj^Xl|Y|B6(S` zp1XdkIB%&+=6oNp$F65_uTv`#Htt+o2j+KL%Fc#xdk6?ZAs8cP( z)bPP3woLLqdA5t_e*aIEm3PcvbYkKv{@PyAsieKK_IUGT+b8I{hDS(F(e|!Skf>dU ze4U*Ybe~wPb;DR0I=uKQNHjXWA6C!h0#qI-BW*y)T<#ek^C@J{*w%Xx%i&39j{w!n z`2i=E!#?ity2GpOsE@-3MBDUA>+o?Y@@Dg?kE4(8*OfWQ0N>K+*(8pRv`T0z&4;xMnb)xz%c6EivAHu?12CMx*Entl4Ba0XBBQTwxx4%mM7Y~W&YsTC!2j0j^pz&0fbl9~=%tIvS` z6W@xxCdk*(xjmjc*IwQ1QH9+ocGPWe zpRvGVqA&`0@CjL5fW7yTfwnT4QZ8jVST%`47XWpfJ6;r|*b&GHA5@su=`9{dctUQ| zL8g^GCO_Xk5RDXV^?Z8kN)9Y+HO3=R)jL*&#vqzI334VNsSCIbPbM3QKm*=UM4<6#vUBGRSxlG~ zjpf)rPGMU9)KkchP8ZOM*ja(;J_l*QfhO8C19`|Lif2qI2`|XFU@Zmgq=4eu z1OIlug_VNZxv*@*q-~C%>rPrkN|q%exb!?cM{Tt*rKk2xwJ&{xFpVe(t~2)tv5p_g zvmYr)d-6ie;r#+Zk2eJ$;(8m}87(GnOu+pTE0u_?BB@O^nAvTxy*_znYc)!r z0g4u^8~DpsD0%-Bys)1=*v92QkYh@ zu4}E0*zH2pmLEYMX}tg|5VvNM4_L%a@SsSp0qC6D89_SLR<a*9+1+Y#rp<%>s;cC)3>S5J9_|3>;!}ZLI`u2Ko{b)sqs{ zTO8CZ;?^vf$?52Jx% z8Jt%NX-!~4m1lJrWLrg{*`YkN1ZsWaXl5qQxUE9&cZ6q!DFKVEF?grp^}`PMqG(t= zj}uYI{w@k%RUe)iF1wzvn@f2qxYEO5wx6>*1l^Rx@k-izHFC1d%Gc{RSwE0*yrnwzrT*0pFZErX?C1 zZIUPycIvqmHeX>zqyWE)w$VqlPK{ZUkKm1ev%Mogm4$IMS)z7KP({{OC2H$r(lO;m zD6WqKu?Jy~3)6&R$ywUeWNdJK`Hir?4Lz;@L4d-J@0!-WWcji3t@fTMwD0a4eqbN^ zSlO{`#)#z<`?n~xgw(!$>@J0A3f{t5t3MKQ zyFvyge8<>*BDmqpg(=MMZJ{Xi1*u9zT2KZQ)>ol3gzUQ{zlv%_shgZkGwod=zH-CgDZJjeadGagGS~gg&CSe-5Lyh+YTSqZzPUU6n6a%7O;Kl(=u)kM916M z?-5ViVhl^WYfPK|w(KPrI<{z7|!Cn!hdETSK(q{W(G7~49EDXDs z+Ud{H7;|(7Lw4Es$w{y8=+VutRhZ%6)*G=w0@Q%LpcqVqd_meh5Q#?y8LJi|FyzRL z#K+l2A5AZY@ye8E+eL5#218#$=LdX}U?XD+tz87F?u|3ce<)1V2R7kR;48N{@Y;gc z7uoj(R1fU)0!#N%a59Q4%Y&oa-r8#U0<@PFr1a`-g@TNuEI)7tm2T%ekthi_JkRg4 zW}i6={ab4lpegxzZ0!nbXe*kIRjjxEx4Nj-^5J%$!bI)gUCLv>Jt*krGC!;pS}ua? zG;Y&%NSQ7QKhJ5|_PT=Z17Ue{3DNO2AtKGB)mU%U?Zetc@{d2|dh*jDgm=wjqSMBn z$YK3OVH$!jI2qFV!fh@pCTma%>r>!>TxeZ|NciRyM6|Y9KS8&YIB`npvDU`=B=FjF zEX?!pj}s3!9d5Cif)s}hEw1(ph3H_weTN4>&zAY*ZO^Q2tK4iWJZ#^(`($N}ec=bB zXIdX*(bxq7u5y^0oG`gxT;~(8)UiJ*;6AAVoJj57B<6Xx$OZ^An9t9(K?>5ZQx$8D z^Ku$MH}ILU5Dx(MkWY}PtHSWRRer9h@$CJr!s-aWj1nE)WN(T>Q~5m$+ZS(l(M(JG z)EVtn8|)jRF#2gj(yg1qR2~@bud?ofuG!g0^jbe*n)%Xf8`_zY(;Wh~PYbeOf}N}O z&~lqBf@^oMg2MKk&k@SQR6~Ky5v1Cm_I=EbDD3yJD6dIHp==i6pL~iQeR|qw3Nu+) zJ7oy2>;iJX?2?9=4994uT`MAOZ>`GAVa49!Asrox@u)6>RF~SdhmBS^c&lC~ETsIJ}s%N1E6sn=z)ZTsVmrCG<2nWw-==-vX*7-(TLNF%I z`SKpcc5=tJbb13_A*TA9ilW^2x7D6mxzrMZY6Wk`c41e5t0a!oGW&|K`?h#SLB6NR z9ubN3@R*t{%g1YTkT5Y09>PApc>kh^<$I8!N9SJ(Ff!x!&$c%drV-Mc;YaSchr!)@ zr?m^ZW<1c^SZJRLyImrJ+sFdD?hXiUw9YJ@cxLB9yIvGlO#1zHw}P(GPsI6IX4wxB z$rVr}_cS(21nLi1V3jsbkWRfigLCG5A9rXbzkSUP64$jmemIu;gz#T+o$V8H44%)4 z?PWn0i?j3;{9R-hcY~-UfWsfDe)D&7qTY)*+hvMnNKQ=LZQoVc_37YqOliH{BMLQV zwAPHZu^!;QF2hO%XntE|W!toQHrvOIAJ5Om0Re3{8T>}uUwz6%lNO(SEa(P;-In|2 zCnC6^4%8vPngT{PLM%kNnLXSLJ8`ewE;7krFmX&qKVFyyl%VU2zFdWm);AzV{d!?q zJ{DKx)|iYOfO8W2ld#vwMdpM-mMVtaDSk1P7>1y&Y)QaqX|{_zg@T6^X-lGRM`HU7oM_n+Q5ZXR`BlNI$|e!0d&tm%_8Xtv%Q03|*zZL-_aj6Xb)`jO z{+Q^l+N>Pj>jWtkkD7s%a=WU#E46(`we?omrA_J8!}`|l+Zbqx@TZ$s@zKvt! z`EmQUpj#3$prRA&O(N0e7>CDDam-SMU8nlx$&b`1pMX(Ce?M-I3TwdDZ3(2?s%@Ol z^6C^stMyl)oJPg9GekXbt|Ty1X?(alFpTkhv@_E`4WE{gYaQqMlL zF4&W{+P$K?c}`8a(<%gAcLLdxFj5thMRQF?S`Zs(b41Xv)GrE3S!8Q{DQA|?lJ>`o zf^@il|50|thexM9YJc|OMDu)W_gUH}ZSUA#ZU5JcES>GcnBtV+fquK0=nM}W;9?so zz??EFT;~~?276Q_*YEp^*ncvF)ixOw3>c9)qR>P38kxXfE$Av@!Xz9N*aol-K3P9x z>d|?>s5i-|H!F&Kvc9HJ)zY+5ML@salMQ3z;6tk_5Kd%!Tph3F)@bMiG2P{KDzsf1bGKICu#T43!D5O0l3QMZJWtPqJvGfz(kIYU~i^7mT3WFraHVJ8Q zNMgR)>ID53re9?~F5CaPBDk`{)w<+|u7ZxT8Z(7m4utv9G~`+GdePj%&xnjKj-uDZ zVL?RK?)28(S=L?HO;Ou~^)h!kFj>?g7(MzWw6JI_nSg~XfrV)zFoLmSzP=d(^o1Rq zJJ2a1ElV`(Wsiiv<%!w`J0m)MJUO+wW9e4=)Wgg{`M)&C!WRevxmV) zQD`rC*s)-3$F#L}*26^eTKGeD+1=*~?$!+}FQvk?ASWEMxeC*J1iiWRwFoijf#JCs ziJG;EnihLUM0$`P&B6!;{PN=>xb+yr8T9)}C=19R2~v6*43;n6L)di*TgF*-y|DkJ zWVLaxeP0w(r}XJ%A%$rR@)VpNGK8p>E*1nV?lKYFDjC5O2{M)iR-U8x7)?}C73&m9`nO~iT z>_*XCudp%952&y=fQ|Jco|9!mrA#l)fu!awil<+>1tGWqKM)Ge@trn)8_E;))xj@c zwGCFRzC>>czsZJ*!U)}+($gMOSk0}eWr{3YkX{XyQSA}|*HR6|(KWCrH;d-SdpXbd zQ*FP`!js?@dq;rc-o!iy4<{G=$iau%JPDrd zd362TG6VKwK{|{Cg0umIn8+hbj?CeLQ*FYd$er_aqy_rvMw5kYDRWx-X= zNdvgG7S4~@d_mVHnq0wbFZYzfXe`pYzI6>xWumy^xfJHwI$`G$B!i)!-@i8&TZyOOIy59eP~^H@^5z#Qq0gLYruefecMw)bCsdM1FZo zzg7fB0hS+%0NsVC5JO+fv8vkF!-R7T@qi$0<~6OgiY-6gW7;z|lwDvs!tV4L%FD#M z8lIa(3RcP{ClzZ8MA7u%WZu>b(0(0Bq@!+IM4&lY@qH`ew%g~X0-kcTrymi;uk=Zh zvpm>ne-%wrAy?Gh`?~^)BVzvjScs_CH^9+dCg6sAw3tUHlQ=!jW`5QuhSi^MR}OHA z)3|BD;IXTP)sdQ(L`_3tW0`%+r=W48WZ=7-Fir1yy95q;d4zdy#<%+NXt7NA7RoEJjl%S&>rZ<;W;+GkSWu)i{L5^YXztg0 z3cn=N`5yi&l3ScQ;aoV-!P6_(o>?~0E*GRktlV$1D+QR2K*qD4Z||ETuyp#5>Tg|q zxNYh@8}1X*cU{dQ_J{~n0YcY zi|%u!kRVaJAo#_vwJSt)pB&y@;Qf@^4I=4tzygHz7N8a!j+ro{#owMoK$bY-Y+vMu&0s9w_6)-23u#d$W{pG>WLKVnKBBq;~y1WWxN zK7CL8LtHhOyfiT?Uy9cXxT-iy=W(XJUKBSmJgJr1_k{gh`CPsnF&E10hsjj15SuMs zmyJ|oGzW>G?zEMLUC~&p;*_$SOLE#Glbm- zkW`v)^d;hs&dTG;17p54;spsA|S_Cz_X_|jW&GkuW zc3wy(FSd%n*3?!pv#o4vTiIrB_x7;pes|GxJ^n=Pm}pXKlU-<(=z|NXE`VQrfC$D9 zKV$4;R|~lrJzcx>3p=5^;{g^7CG4xEAaz~-aT+F7?-RFD)I+v(wAwoQ>4yW7YhN7(ruM9y){O;rn@73G3>DkcLT)e*?vVWjD-(t4 zkMCOBv23lC3sP8Sl+Bl)`^i2#R+@Ohnt0Mw_1?iKAOhO zyw1nbnNggXcKGbTBH8_x%>28e(IVtKyfv7B;-(`V2l2Yx{!@uqime-vl~u0p!(A;0 z7am+?KN6tBG8b25{e>8s8}3Z8pDOIW^(|5|QW)*mi_t)(XNGLzV3jMI+tZ zd}{t2+wW74LI1=Kbb@^oVs^-b9LZoF@wo!;my_+%5fq}`zf%{KbGLm>MBT;Y{cB31 zwlT1lR;vg3Zz_tu-pi9?cb}KxZ=1Fj-gn#d?e;^T;K2cDcAvs-B}s(WGKA?+*FLB| zWC^&j)Z@KGBG@-8L{+Eu%sH`rmn{}lOYqzv_ttXTkk~On-i0ZmPme21ulR95BvOD)h%FsH6h|p>#pGXal_K0%2cuPA-DYd{rS$57=Qo9^ zprdlu2@IB>6l8_+^H9O=w%1OJKrOrj?6D7h#*XGP`U#CCPDWD zAm7-L;Gprgi0&8}D1l4~jGxZ1zxa~IS z$~H-;vr%-${tS%wW*_d`t-G}-%rJ07SZ5y!(5u9TvP5;Y^%w=D%W{s0%B_$h#qX|k z(De_|_9iP)GzG=JVA3G=qbGHleezIG{XS_{Xk$9 zM*c-`D>=>H5>b05*PNLBO~}=a4`M|9d=?5!Y+3AdW$b?EYC(?)s4s%|NUZ z-Pea*QLVejv(T;*azzDFFBNu8k`gHUFS8p&ab1Zm5 zrrRP>sJA=%k=<;Gkk%(ER_pc#u78(0qMuK_q*Pn2FNVfSPK{rI1ERQ9TNuR(*K5Lb zK+IXV27mMrVP{=t-xGA5ePoE8_b7<*Pw7JoF7#luERf&rL2)TFGg^RV1R_1K12DQE zirZNNSyCRV>TJ2s)3$R>#Hs|?yO4a|ffdee{7`m?u8A@`}bMPPgiQPj47co5IuH_2}QsjIA$e^@2{VGj{W zo+**(;XVc3OH?74%&SVBJtd;Qg|zT{8r_q|AYNBqV zy`!*ynvtwL4>Pa;`Nikg{)WfS6*CrhhioG^isHsTA3NXHTbLg3Fj@H)ep`~!50=IY z>_K5#9SKZZ|5V5g!iXZoaWaFm-P67Zi6nFUotG?v&3TM16xP@l$MbE4z_~589KORg zdtNj@wBm;A7row5)v)q6Cor_HB}>JkTHMYGyWz|!w7&^aM~3u(zw|hOoUc9u2XUK& z3^iZ9l%OWtARo{Cwoc-QS&_LTHM1>66M%yDm zg&m8UPfpllFA6d_{qSDF*Yi@9!QI?cZ2v zDZe0}o2bJzNT1aO5@@NSy0cdy?4>N!sUH-H3c~vuseE?^{xAYkU^2;t^ZxBYJo5Z5w^K58?`);r87-?G4f0_@I-7GfiQ}=ghJuIWhl2 z2*}uafs;LUod|By67@U-udr_=DTPI?P4LdI7lo$cr-uFYRv~(HV%KCFsW2UGZN#Mp z25yl!iizY`iq8GA>KxF>a%%B38|Mov(3Ra53%R*QL=0mJE0Wm<@IDdfN+0rB7&fG+ z-w>j()YRT~(uW6d&$~JUx?|nR0~-g|OkMx36WOm)E`-)IiL)h&6-{E)<;Pf&FawgP z-5xw~ZMUhSxHfW`B}=_e!b0I{Q-{L-E21#7;d~f8N0QOr#FpKkgxq;syL(*b_pOl^ZRI+GGz7H*g(UE1@Enx%X00{)So)vQ zy4qe7bn|j>uD%9)M-+P1jT79>_BSEwMGK}N&JW0cgc*P*;=HNcl@0I*rrN2A+KNQ& z3L7UP9ZFPBPE=RgQ-Yl~{;&eu;%;WaEe*S{RW@4*+@2JyUU;+hB2h_xxReP=#+Q_4 z1fI6v6v+*S%x>G&W586&ekPt_iXdatnW%@%Z9MZh=|e`367v@I|MSd%Ovg!2K+FaFk-!0--B>u+ryrssvjY*eQ_ z#L&p`=8g8Tpz8-dkBaRRAB{xj>57H+r5slk-pwc59Rj3HgO?>cGpQc$o6)*ChFC#{ z$5fmHzuTq@GYEZq^|WVvT+EqOvt`M&XzUeRl`vC|u^7w%Zxf=2h)~Du1p$gFhGX`+ z!aB`2&67RHoip^?_>PEf5V3zF?Sqq|P+GJ=p2I&CVo0$>#S*uF3%akQB$CNh`O6Uq zRM4?$rQNHr8;^9DboQ9AmJ-ZEbU?-kQgBH$6Ss{KA-|71QwEJzAet*ZLz-DN!}SrO?)ZWLEvU2*}mq{endRGJ;Ke~Uo%81gByE299{w!$bfyPX(mbY7u~?Ha|CJJ6Bd$<^i#5!}`9 za4o0F23)kR(rpoEr7^`(GOUJOHc7xQ$VM($GV8y?o)wk;bnn&2W-H9TlgqDu$mR>V zMXra0aHjmgF7(NZq8YYcVMfY1Sha0}S~nPBmYJyS!v2ekNZ+sCrw=>=Y_~sn7^pNw z?e9K2vwv_g_ND(LOk-kl5MbXa1Zhi_?OZ%Q1zyd++ez#cFPELvyF_#Y*k8+NurSTw ztCEoLOe+w99w#=|*spvfm2y!+4`X>UceGS*oWh#on$5wx{{nka6vmXxa=HCpfU?qt zr`xMOB9sB&`)vVg9S3g=?qtl~5kUnx0nN2#cK#DkTtl(i6|-*$lY+MJlwXTwlJAP5 zwYF!LbxFp;tz%=hfub-#;v3~#nMc@;Vih$DF*R+KQ)RJczlRikb^;m@f(&LLZoELu zJ*2nKh)9dOrlfXF?S_i~SrMq6vxxmmVSOPh56rU*id;%c+rNs(9=k*YYJ)H|jB9j3 z_o>Uk2^P~D?F!LcXGe$`d|eX5E}vlE^hE^QP>p@p!?75<=EwBmWa|AZsA04)&DmeE zZ;?d=v@v+aSGLYy`Ro|S(re^z1?ie(4G|I!+aElEM<1~o_S!)aC=i!|l?R_}9OW$& z#SIuNI$J5I`KZ9X$f{!%Gmcfvu{}NwcGBYpop0R)$clY$P6~a5Tx%jR_=;I}uaA%A0p{;VpImp}&OMAo? zD3*SGuxN#?_bFk~A^>j9f)pGdzkl++Ir&ulu_)*C9>he!8$7q9di!GB71O?~!oI4o zYfuRNHe@ZnE{YzRr6$2fB}H*=PCM;eig2rFG)AKAyTVK?jZ0^PMDQ|6kXT)j^%8;h zwpT2&zCJgOQ#i0?*&tym4lHUfWaZf?QRq9m>N-#S%qPeQV}PVURz)5Yg`teVPKI;f zlLt@XyKJXV9z0L*PQmsdikqg)ND=cGvOoDmG#PFDIlLGI6m4yi>UY+|S6f2KXm@WE zp*~iaRt2WREA3N3HwXw8ud?$>TyzWr`Us1EQIP0-IZR?*qzKbw{rDk2)X82BJa;Tko}Aiehb#X{{=<4IX4g z&}nTDaQ#WtHzXP=tyvV;q*1a1-{O-;TDL{*B>@IQ=4N7c#AkLFy2Q2$yWmOUnJ~sc z*7qB6E^xU>bRjsx$wI@eBGBeuShVf#!^d|%V|OV`Bm2vnT#=7>Z^1MxRhUW(#^i?L zwqA%4!0W=X^4YdQ&~+fwnJ{ed30hZWv~JI{-GWrtr+Y8k=fiE!RM{H}Q(Z{XRfh#! zDR>G(DBrFxg+eu%c*#U>Vb8-~nyF6{YV zxoG&PlX(NWUC_;~&bXLS$`QqVRr_1y&G!irXoYN4{%%oJ#zv<3LXdort)gl{ey)Dp z-)enK7T0U`hl7H2n`4^`(=b|_gXhuzDvE~jzB9gc)fhYfDKOE&dAH2&67-8siU&I# z#Wk!TjCiNruOyUE66Podduy04VSKryrHbTsDIUC#bwL`pw`kf~#Oq!iXby0z$Li`L>!T>T zbZo*bJs3XhAp@}Q8Tj2E^3b~;RlpVln^K79YBiHZ2k+xJ}e>l z{|lnKxy^ggioPK1a}j7eFHzS&4)00V7o<_!#YG3lOZQAm-se9~7L6`mTCiczSk{CPEVf%rw zn`>lo*t6~uf&S#_hvVl0jAh5vg|6R@D?KxJy$qE&A zJ%cUL*VYQt2)N*UXfOMeeY&UGUwsUhZ7$9iR*;TLkS6B(WLJ8qovUh#-6TkUPA-%{Ac^zC8;gxu%EuI%icJRq9aBakAQj8I{J zEVxi|IJU6j*=r8hTDF4B!r&qI4Yn!)#-n4}yc5-XZMz^9jljH8*!KF!F?stY!G28$ zx*0j$IPuKHW!5H&n+2rtyW5E*#==F&{wd5(d!X^a7JLotKs$Sfoj1|dQ4qrCfqg-U z){K>;!z z`Vz^K!}NZWK(6uqU{UfdM@1oCE>go5_wk)-ICE{8C^RM;*M#d7M$e6@Ni?nw#IbON zq#uq%V|}7=vwlIEm4-H9fEkriYY}8@9?UA_`iCU3Fx8=Kz^CwmkXkJ1E3eF9U!Ckq z&;sDG79ZjDnk|iXnW^w?uXGLLWr+Ino9ukjQEfR(D+n`HC zgY8J>LM@2fjXm!v$0p1^Hi4&+iT0V#L^-3PQRGann&Mls4t0cbyF-wAFt@%*hLn!( zwC*A?6umHE)}wOhCpce~Yo=<`$HfSiyG@(^W-4Lqz4_IcAr<0)KW#_F7ttz#8a zElWh#^H@B)$g+Kg^t4e{;xjm-zMjXiT0};UK1eIbyuRRrqq-8a=5 z6?V^v!w|ye93vi|9rj61w$#}Vr@G|Kj6JBZTQP;`rm!MxtVmj|Tk1Obr8BHdMC#v%%RP;cv(wA0N?}(vh9TtJs#X-Yl+y7Vztbn~*uKhMRG6;jhT-hK zE#Q8x;1kz!Ol+{fil$W{>Ft_C(_C%Xsn0<4t^y<`7@dnp1lUSL zxc0ymkC2<6YCiWepAvJ``SzR#BSIW>7yE1(QK|o}5TbKqaEP*6VTwpptwhGyw)v=h zG9B?@R5k4U87|q;YJ7T5w0i{AOYsl}3%K#&Xf_w+;#od53u7$b$ZDTDc&gs$Q_C8G zY!Ym>CQ;}U3iABmedZI#i?ZVO#b=44r3EzS8ilDdUl-c00;~+UF@Ys!M<8YU9;~D( z(oN5>q3M?D;h`CNO|+pNqDUb@&0;}i+}&dHlNnQb-D#^lOsrchak=$^^s{Zkg0=~3 zQS`RWqPXQ$ga!ek_SrhNHMehBZ0`!vZp7Ah*(Uk|FJg0j>x|=U-cIs)dvHEm(C&?lSm==hqK6cl?YnhD7Zzfn-J}h z7g?-24G?r4K2zI-FH9RM3f1)N)z^mmSf^wtPScM?3AS6f^Lp}T)3slS=&I~LKFotg zc9JkQEWUS%2()2;i{7-Wgj}U!=+xKR?~;@#tO_I$j)>y6MO>kEw~vHrH>Raeu3uvp z&IWRRp_hren!uJxZD7)AuU)4o*P3A=?qK#U5y;xU8xL$VY`CD`+N(JANN%Rg9{*f{ zvMPh}`L@6h&lHO*$~EQSEY!tIg;`C}eKW#VECUA_F=V=rj z2m``;wL;ODy^gYSo9)9ru>8Kjhh;0hN@3ccyKh0v)(U7n)Nslyx90_EE~f!rzYhon z>^dCWilgK<7m1#H;`3$j9LHJy{5h@|gft{^9Y6#n0_!MyJ8hV5b??L4p6*t9+SwPm zqQO2qK03xmD(nU*k{`Ih)T(>TmrhSpx^%Pgf}U5zVYj|MIQ1?M)QXndYQ-{L^2`(C zFuzBb!E{!+q`DVSG-vq1=9_$531~}JX}*2xGslWcGshzX6kx`xwUKAWFVA%)(~R_7 zPOH}nQx3KSXRuW~Cg^^cxKyB*z@GkuXxc6uQv>y?L(@@G9VfDuP%e!AasjtQh77Z5 zLKMn-w=6%*5affSPI>Jp4xT?}i|Ecl_+rJ!XUJChY+btb#3GJu71V=n(~`gn0OAdJ z&Zx6`(cM~RJIJyZlO^4UFW=V9A$!Z06vd<|1t0H(lk* zZ-WGM{6n}TIPEpqk417{BSsjpHakuP`bUTIqhoE7pj+zle~zHav!XB@CcUM$(dtyu&H(7k*IhDB=ZKO)jU8F}D2KJG+AgDp_l57rJ2m6)PVQnKZ$Dr}oY zapT)RE#0;YYD2;UXP|ri)ddi!E3vE0u2I;nytHw4qmWxwQbp&;Z?}r%3QM`Yk984r zi*cm0dVu}Lo+46UAGBtB`SgJs)46t!hsP>vxU1QNg05>=*eb9INt{;1&&Dia*S%LK zVuWa`ElW~RY>V+A{j#S7dMXY(%I^>|z1eg>GGYJLtxK zXgJ2n{c<0lXk41uF~`0wNN?^={hkd}SUX43B!Vbzh(=?+LDq;ee1_B>y)33M6{qy= z+t(@u7+g5DFp*aXX_4*VhoQ_GJjAhyB;^kaGBW*9l_}!X@V+oLOCpR1Jf`)GPr+xC zmdEe-)r(vTa4rXvf>Qf|C~kBg(DUmMVb}2ikwVMxkY6R8Yo7F}jjbMFIJwS)*;}QA2#nIC3Ys6 zL0;~4SigNKY20U!XxxE1l=TyZrbe=}?S3CFis#xx3cK3Iv^ExD*ThDP@-RwhQw<+?IDIRmk{vjG2 z!x$8bAOfAMt;8-^2K4=mM$zlITm+iR7Axs|>=@YhL~`v3?OP!;i~)sOVFm@ia|4vZ zFQIj~VMbJ3-p1_bqElI5(|x+d1j#mn2Mn$*x5TK(7!w|2E#1elq$5ko9hVa77xAE5Q&2(F=L z7NWE{*TOz;=J?VA8z-O-wNP958X?AttBRge4*8Uv@5b9}0(6LH+Q4q~QDIGzID&z( zJv%89l@DQUBAgVp4?F?>D;~mLea1c(K}U`Ht$||L_TU`0-M+TMdEpYdoN2kaVa^O0k4JQ}kOA4yB;gncaN)-J=s8{@ZDLU4;8*JLQ* z!c6Ten%ih_?iyGp+hTW%sFBZ&pcXV$h{nFZed6ggtE^g3T)b-k-GkQM}897`N*k~V) zqST!0^U8ZX%eq*QS$}Q&p$V@o(LejGNLq5pvWqcnCq!Ty*}vFsSnW!cALQE=hCIB~ z?oiOpE@~u2cCRp{qBy;80Jh!=HZi_7uq_sKYY^j2- zsw^~NkQAvEg~lD-DEDHVY@0lRDsuD0cxn;B^^m(#g`E+?=ZI>uX5KyqsCQ&8}6LB_q-2e7i}=H7l?t(O`o`af@+8SYkmx6T!8j zK+NAFUjW-OTi+5PicZw8wG|4}K01(&6QMT+nUGjvYg17yzjbY%y(5w~B5=u7NQp`lWR)a~3|A?NR|PVLAMp z=bQ>!PZ6p4%)+WO6}1k!hK-SU%Lql&k{BKmeylLdd*8G6h|kqlxkT2trz3n3wr4zMsbKPSsl#M|HR7x&Rx3g9c_HDAuT~5v)Y%phY1_$)RVUU~ z+g?F3AtAB;?PuVSzvv0h6a#XsFLh?9+1oL%{v4#%C((F$L$PkS#2M%REA4Hc_i4hopKcp|LcP-$WwgIkbja?%GE!0mx>L53GNcMinsoW+= zX^45s3-4r}|9>D7tc8S5tnjx`@{oP$#Fi~UOHnLyox$6$4fBAHFwcG>Kr8ZL5X<_;8z6QSDA*s4rkR1Hb7zh zQpAHLcA*f-xoICO-(vqKNFU#uS8Wgba38ee#wr{%lrz=@eAbowd>v)Gj_+Dy69lyr zNCRx1fNNyi@ntAKBOn=SLn zLnsn+$!ip(*m04u){;!$Y5sJf9rEdE1m|e`mxn>$w%8XplAV^wxu$(bfcjAF#?cm^ zAg;IUW)WTQOG8iE_XHWoAH&2$AKK>#%&wv)fUed;dqhOag z3G1)pvN?hAiJi7ZQOp=Z%vcJGS);Jlh>kPx!0a;-+?Sjl&lrm?s(oVe*4&YmSz{3K0)+)Bg1l=S^9H$_{ zW!55|sFx!NXJR*VkhaonSwTfln_(A6yaa71%Q}_ZnsA%pnrQRXB zYfBL-TBiivDlUlT#_hb#E}9QqcB{T9$Pgiw*bp3tz9hnTzkDq#QNPDrs)GkQUx8f1 zUE)g!KKIFXwTJEV8#~r+w%Y|ee|{MDy)^tT(Oes{CH|T&%!=z+U2czlF0OSQ3e&>w zOBft3;Eq=+g6mHT&y2tH@KpKq_UZGiEE(I6?Q5Bb{MxVP7iEH46Mqr{9^xs9^;2-yEvK~=ofP@}#MED{%@Kw6bxY}KU)k!)-&Z?c0YI4{6r$nUD+801woDZIJG^zfZS!GF9@%zCNVEl3Jp24tlz{E>m%p?d6(nmQ z0a0f+c}Uj#<72Fcpc~uKID@oDd~_yOm${?$9C-4VdE6;TyiWm`RynROuPqt1e@h^(+=&^JUlKHMe~a_#kv+d zm`otupRH5um?#Vp+{q#i0i|}|b_g_3?tsV03U9GRM50lH`1!(~A?M+GPZ6VN-5Nf4 zM{!#|+*Q;cict$aqh!=RCKNdFxVawaH+y;643iFLcRFRk^( z^g__^w+b`e==#E(dMR1P`}JkAME|xZ!9J-S((!5Sgov*0@RM9+?4AY)nnR2l->a}| zhZt~(-e!qHa%_>vA7{OVMPOi>Jb3P5nQS|-(-I;wHabGv zM?Ukb&nni=Yjkm}g^Bh>0oSlx%-2NhG7;Ro1j^kl*h{i2MWp&eQ(*y3vhNGB9}FCA zJrty(tO7C({)21S1bcXl3DrEQI`k2yW8rL`8}B(+$b!ITH>VLqSsJXL=x$mbgp)SR zm%%J$g|VXcV`0C0h)1Fa?SSSsgp546KDf%{g!!1y8O_dy;}^DH2-9;qB7;p0mgNZ) z%`KcCnru%9JQJ7m1xyx!2IEO)r#&O!MkBFqDb|c}nIy4}xuUzJcH}v(V%4@TNokuf zNA}Toh(f1=L;DQdE9g4$rZeoe)AotR>g|nU#9^O2l3l8O_(LDnBekdfO<`ACc2qBN z_T?rh>X&m|(O?5bprmnJl!hzJ?CzUyqZFhK$D7JdHtev+Jf!y_BnR?61J#=r8Pc;?efpD&_MDj5Xs-z} ztgTx}e#C=JcYR8g#8z;zFBT&u8awt+?6FKC|CixVfj{#$g zc){lbjpXp~sfQ&Jl`gV>3+k(N8e-cy(tPGqa06b5CF(1hNx>xbmukD=5Ti0jA%61dio@6%`Hg4rU`2Q z!2q9vt}{|Ku=|9?xzLiI9C*@z3xz+8N*k#tI>pf%`KZT*SVfsR94E&LFrTMZHP{q| zU5O9CXtx!@ZdnWtXV~gwG_nslwpZ9qe=fg#A$v&#%Iem$tFS;(=#X9U=JDDXkoF9 z71St(c*Xgpk8{$8HbGG3ck;Pxq9}Z6==^9;+zFGQ(&q_|5_Pu7Ly8|7kJ#@7gH<(- zX?Wd7cI;YwV%7vZ<{`{cB-QNv-;$PDLnT#mFw5-wBG5K|;fm}50UATWiTWx;vMgH^ z)_HoaRVe6IWxhN%OcG{XgJX6hr#N{^s}hm&xuXTvKCD^WHj2Us73Dn;F0~JQJY^04%>jOBxg(xmA$^mt`VT{ zMDyH4bAx@u$3?QjA^VAd8xq*vv+Z$VH-6eB5MBCZlEMqqT=>~jM4>z$7?Jz3r5?UH z@$F~$5nnCnzGV~wk?vjZ<7w;wxc-|2U5)+ZGJLBrO&t^Eg3;(R>t&m}W4*oNV|g;& zV*ly{>)%>v7w>cV_{wwb%L1;_oNyj*HkXUw`XTe>2=(<6g;t-gEju%Rw)OVevk`2u z=^h}h5w_U^{-hIl_knAVgssgN-F-EIWZh0!#A`(3J8PS<*0w541;y~iS;a4UNVi9@ zY~J!I_iZe;4uzS_5JnM0cG@RQ)XkGz;C}@D46K0RR%W;DhZ0OCzhYelG?_IE#4+mI z37aY;S@&~6KMpXRA~x2?w=Y@QvAN1B1ZnT7`KxVxW8-~#yd&DP3ezMUig8SyE9Aa2IU~V1 zf-Mt?R&f-TPSI{b|M^SC{&^oAxE{wegCO0-V$4+g%*Vaouu0e43tn_3=}i{C!9{|8 zj*w?yapa0ZTbS)(k6Mus)1Y<2HKNXDh@fMI7}K0w1IO(FkyOF3h$lA77cxMeH0?Pd zS4pC7srVCfM4Gjb5c~xd4^4Z(V0C3Nz~1-`IF~7gvgM zd)Y_d(XE$tC`>7s1LdjdBOjw*#q(YwMk}Ur!-*x+>_R~j7UmV~TVfXpxyDLpBi}9; zf%@b6V6N~WCzM@mS9iwb@;H98fEzMwy1^E)TSS4W639mImVl6iY!3yDirJQ?Shr^S zVU$Jo_J)c?yBL-f3>~ElGUS0D8szqW`nl-ad)O}oX(+rfz47K-mI(A?2){;oRwzWX zBeL67BES#PqfvWSK}N7|?`}3rVJZytgy-5GK?>k$78dO*K9bgiIpzI9(676_QjoBJ zi{jSwfw>17?7ZKf!)da&bxRT#MzPP?MVQ(!Wv%;bH$fV52POu3DeMLvhTDhQB3+r*e54T~8^F@U{+XJiqj{Cl?%#Qf%DlRg{-Vt<5WEg53c1Bq3 z>9mn=ABw_xKloW&e{>UK_>*8L?qnPI>MJfWWo5HqSW!2M!ss2XdAr{y<>RurR`c`zXF=CwE^AfR;iH)h*_QrH(CceS+js{z z$UqO2AL4rtoXd)dFE|v!t}8hRx0WWO4{nBKJ4u+`g&Q0?L=jcqAd*&+oTNC_jzx~h zgao@2=3!RZE+vD%8~lEtkMZU-nB^PnMUm-NEXrg50RcDEI`ik*(ay5hS0~mZr?@_` zeyjacL{=&DH!Q<0d({X=12dW;c2?k&u5gFxupxxZNx<7 zeKlK9%d=Zgo2ancP$e26$EeGFYTQY*P2X&*eF_QFM#8NtsC0u!ZYRJ_L>4cq&-*;I z`jLl+*lU8puP1H=b_K8Go9tbm=FGf_l1Vus=(y)WnnmtMwyli7Ci_^iZqf_mC{Ue$ z5X|5ehw%ZWjqn-~7}`-%#e2d7zJN*gvw6|E&Cd zT=N>y%tmjtt8Y}8X)6lzKx4gy$e1U^)(1Q&&g0i*sDNu`KOJ1th3N%$8s!*1Cg?x8 z$QbUet9|C75uWH81>D-_xPx)DnC(iEa$v*AY1}I!xhk+vmS=}OCVgNl>P=xcB`J4w zw@Y3>m;5JTl%MYsc7ue`A)S#&iQ*bH5M33nbe^CyuTH)e5<#1e#61usf|r9w2oW_w zTXsmOC?@Edg{E7Ijq`ch%H}6^Y@Zd8VH|@m=a}vDB}BQiN83Z7j!(@Mp!EXt0|V!E6;HCSkYwOlQ_l zJUg%6di%IcS)Lgr;MP>IZ^~?5rcc1{S(A+sU>w+0`o%M4Jq?!7R1xVDqWwr=%o1{Q z$SEga7|-zu@)1+R$deGQ3B@v__NIWFjNn`|(b`3EvzInxr2SXW)yU6FRvg(YmZ~q} zz>)*A?SBG(!)*v8dw68$LUqOAb43T^&}iQm#m!J?Jp1Zx!hXY(XJURhoPT0YxeIKV zVj2D5Zo&Q4o)LkDXW@yH?|qH+&Z6m($DkH(Tr@QRp<%lu^6v2*7~e4NmEjYJX7JEzScg<=Xt!33gUATE|P-Li^Bz3~eQU6`*G9$FgPoQ^+l<`}ry9 zgozEfcB!_{eC(SObV{)Qc^Ek2joQUWT^*QJFS08H+z6z@=PkBteJn?lkliRibAQ$4 zSJuUcM~zIg2Nb6IGt;WhVx(r6&x7V3E|h&7hVzh0vxj_Kq$mSUz6VfI8oAsufH1ULOa8DrDIL=XQq7aF+NLMIyR2 z6&+s?+Bd_hJtfefL>su)_9fHsh%YYY%c9V@QUt{9H37F)GEh_tpq3z8`kOu~@UK*5 z{}puKDl8n?r>tea-D6*R>s%w=uiJBGK7lfkZR z-`aAle9^Ir>DEO=+LNeSA}6v#1!*fRhN(78z%7uHJk)(26n2lrD6>uolqc)#5uc@N z*RI#wG69N~-fUrCmLBN$xL`n2r-)9fz-jAr2CWhTZ)?Zag9+&DG}J`<+2F5Ob4 zuKIO5FjovN`)q+AYUX8M_4bpOUSjnpX5o0_{6Nl6by>o?;!_Vt~^#D z=oTYykZ?q|CX)}3Myx@QdW+&bpzZN-!5i0fdsooQwETb1S^S0~kZbl1@w5xgZ{A}D z3ejlVfw4V%*hhwnrCF3KKM|%q?K5jS7H+i*{{(a`;NHviaEx6k zl526dzFqAG4->VkFqUWC1pV<-x)=zT?Uh4PMDQO_6xH_bk!nBjx$@CXz#vtnpqrfs zaA#)Ig=udIN3j1#*L%QMU7hj&wY9dj!}-*(r@j9Ofhs4&a}v4spb*9Lygc7x0={*VBl&TgBh1%zqP+e@ zMTL7;-D~qxiO>zowiPKnNL~D?aQJv7mnj1Rax4G5=)QB_SPJvrDjNOaqnHZs^0Gex zX{$S8>G`{t2*f*i`fWBy&~HLV=Tj(h#vuo*ic<_0)<%dxe~xZAy1{-FvJ=~-b8nk3 z=;v6zJPJA3Zw@KogzmzI)C&P7H&;tb|CfS#Y^*(ytlgQct#vZvwX$jVMktHjUS~m` za_zgx3$065O_LJ;AW{4ZO14znNWp6_y;}R3gjHF$7sL+j{*YGkj4zd0ekjZtXTuTp zbbC@1dX~ju&z=q-k6&|}AF`rnvxIk&MS@^6U?4G@MIfl+qUZ;Rp=1`OLg>muxbB1TCH2BAVs6Nsp#R81o?1z2UB z^v`mII1pkx-?5rtX3cd*53rfgU~$DVOhaYj3SE+rl{Xw@HWmx{s@yBYG^W9JiWrWj zk_MDm{R~(y&7y}F_W6vYGbqGQ$R2woRVasxq!2aoX0!msg1-pTaw&D&KLi+-DE>h{ z2obE7e7o{ZpGo??T);XB(nA~}fVvAYX70hCU&HD4h$xhUb5DCxVcP2iM5a2?5Uly{ zwB;d9p5ze}*2h+M;6&M4=W4Fnc8SQOvAs!5QqtA? z`L%pLBn=GZLjm$JU_jG8c7mK?sQ!bZ2)fpv67=ISTB2-DWb_LmP10HL zv};pY?q_Dd8sO=fTTbq2u-gRvo)S;w#f$9@5dw=++|d8IFI#rVD@Q{sSJ`+$KVfbw zY_vt9FqAxZ#O$a5MLRctnwoJj&hgcX2T|JKQPvnzJ2TG< zV3OIMP@LoAueKurzO-EDW@`(%vkKv~+esmoyK_rh7!!Z&9g!I25eP-Rt1!KY!kD)Y z1pHVH#>WGO)I^{mSYGGy@lnXv8$;J$c#9Z1j9QHnUM5H}xiO54T_M2N`|vQkPC*t~ zcwcC7#$v7y@ehu%2NVn&H2&oIac{IoM5B2{`T3TwaQI#8xD-0mc2*ctrQO`aHiqz< zh+Aw^SWS)>q3ZggkS`x&h+>sS?0wOgoPyTfhxT$N|FoW6Oq)49SyDv$+zUmcH$%94x88PIVR}ilj2_TuEGisZ&dXMO>r&Z@=M zvybCbh>pj~v7mm>s_UJS+vX(x(1(dgs}qxpP-b{c$XA3lx*0ZI*ni(Be4>*+GbG0t zS4pug7NWgp7o*RrFb#2x?9J8~qI&h|Z7+sQt$TSPvp$!j@3<=I!5J`*nJB7)pAcniljD56l#SV&aKpPL*mY8?M(>bQ%f3cj>3K)(iRuR zJkqK_>i7!Tc&bF=1u!WcwhbNP%{l8p)Gr?I&_@J)i$>ybi!JDj zfAtA+^`&K6njjM*yOs7(028B_195A4coaWrabtJsFO5_?)n zm#~QRh7FbqfKbP4rS}6 zu3un#O$6E&%fZRI!hY<~c;omPvky}wno8HA_SD~ej&rq}ZI8mfDO^-b?FC_4db}1> z0N7IrFj>1ztcB~}BbFgVQzMZxo|idG7lrhED{=*~T9C$!W^_MN7#2lv%3U@K%^vCZ zl&!F<{_b<7M@x9Rwrd0fdq@vhyOA7hv~P*3&vth?mcbH3Y+pLX-D%MbveEue~TLTZ@j2~ zRbhX|beD`e{%TWobiYiKb?YUIo2Hads22{4cB~=u)-nLRi+IgxOzltF}hvUcu zY(@$CZp(Ohw;QeTLJ12`?t|4)DfaiNkV`*iScUwnAO-g6+TA|>FTynO0cVKKt_~1! z*dm^>uM_m2R@Xi~dfIIv`lab;Yqi)2LBEf%bMv0~!NFETM5YP!HSMU7eg=p=G ziWzvqI8m|97Kq{pD4U0zUkFlNtaVdsJ=c#@spwJY1zrBW&+kO~B+q9zi$Z=*MGeR& z4i%(h{Yd;|N8*{4Z8MwXDe}jnXq8EF=sEN>N<$LIfJ6j$nk^TF_72CXSKQVM`S}_h z3<UTP#Sk zdf|+CYlwm^3)jr*1u0BSG`A|A7l9SPPg1u1JtU@yC!1#5M}m}dc5$g){vRK{6>ZZi z6!xQneL*fTSBgL(*pi6ZcLit+Lo^goh<<{8NV)m#vqw_6pDcYnp2FD?EFEME`(?`R znhtxTz=}nqA?*j&Sy?JQqJztAPKd)vwyg@`9-VvGc7YIMI?6dqgl}gqfbo*p;yZpbTazm=89xgW5i$H55 znI*PeVJ$HUBw`C^uGNRwo_)GnbI8h2<;jYX{VF7f0IMLIEA>fHXy&kigYLC|hUgRJ z3(suCUDQVF@*&aT>E(jK_p0rZe9Tl7Io-`-VgM7Ptf&$JTI@YCp_?&71iqaD9Gh`6 z;f2W#h{QtAjOgHUFl5g!#5C7&0j;*?<>xBr*gGNa*e0yTT>BAme#nraDzMJNG-9+Q zgYFSvV3=ncrZ81HUM^1ttc-g_;){5`)b3Z%ub~WFU_2y9W6}!_ZY{Qn0m)29ft3i+ z86?#rD5~b!(;+X@qvz-H0NL~Eq>(B}0|!N6gCdvn(-1uXDP<1-t%8)s1dNVyU9sPV zcnDoPb+t38_`#e>qIOOM|1l;vug9N--TblP9YcV*;}Yu|a4r_M^-KPUu!e8SuJfzw ztRg_kMDp%4UC?i1*?b;4$5x8Ou$`G(XL}uN7vL#zhdcvEK=W zhRM=U3G1 z?>c^`k_e0n zE|mDd-c4meMF0*UL;hFM=q1A8d3Mpqee!|u(o3wfkRL?uwmSXx7lrzZQ5GDiFkNb` z`1!)tX*MQ6ox(YEKO{&?UfVAtS1*D?b{@)z4|RuobmnijL^ z3e!3^pqMQUAXArsH3C#{e21RwI6OCszz6j5d^@Hf&Fnk6uboiXcVZaYCj3-){e-8a z3=8%Y#Sim98Fw2bO#VTm2iXXP>E@}WHSLSa>^?yn1dlsznIYY|#anES!i+Q`rC2vw z5R%1<;S%i^@WtPq8=YhaL$>tRYCEhj#YZxc>d3P5siebsPO`s+q&yPXr5BUb_arlx zzzmXoRunSB{gqho7X*Dh{iK7?BV=%LR|}=Wb&n`~U$D!TYD(F!$B5*Y(edin_rJ05 zjl~uV8G~F?!*-h@3N@Wxyyxud=d4VB5O{o|ah-U*41?#G_@ zp9PpUnOtq%%rD^ILhLXEH0<93zKfB3m^?Z5$xlKFvIYN_!3my4KCO74gEzeOyo8Mq zg{JatjmH4W*Z{Fsh`SB@Fu-wK&)A_ax`f2O?Z+yQZL`k8%zPTU^0z6>j8f3>p*A9S?$-tR0*4| zl0(ntT9%M+Hsx=#ogp?>h|f!F6rzw*+qc*o3SanSHeqq%Gne|3#zo}w_M1Y!q{0Ym zcc?IZcrwC)_Hh9U>H>$kOyRId#Kv5eY}jYBLxOV~)>*y6z9iVKJ*+|4&(mmR4}T`; zrl~l@@LNTpK3}+4JJEeU%;J z6Ug?;7#0R&MG0?`h*r20tiKDkSVPF0n9R?;y(vW1c-Y9bQvsmLyVO4SIb!GnJ1$IT z`?4Ti;6^D4j@@>>2y{93;MPpJCcHN!V9h!1rlpI(&(y>HEmy(RgOTIw>Si*>ib5&m z8YL5TPlrsyC);ub{TSZRt&cq$;HkxXPA}PD2U8h`>a%=bm}5%&fvqQ-X14DwZQro; z^qxhh)~vEGf1aqotZ?Rgbf&j&iOLxAqJ^ya5Fsi{Zf=&*nK1#5h!;$RXAl+iKOK1S z;QfUYf}_!Y*|pM-`8cKVa||PZBWFn{!*Tf!*wO&W&U=|16=YI_^(lT4&xt~VFkQ>R z{B0qYUm?51Engt)ceG52vh!HhOC+Ml^OSUZh;~BF+|n!KHb5jT8Lid|bOdaWD3m^i z^%1e0kUeK?wmqdVU7zsF6(u+!+!eAiBS)9I1N(kaSR=fM6xvS%I9SH53e$~9Z%YTJ-J|v#Zjo}{(`&#<)s>AuI(-lytOZU#UFBFih-I&~sw~PjB4~g5KTXS~F0sHvX zJ~5YRyI5hy6pd3hi!Te&tqJ34<~IfW&~U$z#hmK`g--@K?-M8*4-kcV$8mMt9>U!) zi_xgCUlr*kg?32LkAio|$$|DI(WvW0X<1kMr+}|ZzM)Z@__p~Hl2vPYFSgqPtO!=llK!`rE*p2zn8h(INXJypVb@AD;~?_j3P)Q=S=I=Nw{ zOjiuD7QY zrUoqRNxP;Zz~OO3sV3S|LE1YJ(a<6kByB~AKicA?i0T8xP);N>Zod#@(j3Q}C#_|O z?Aqy22D3gff+IWo{8xOHjAYxI=j_{pzI9SQv71sD3lux8Q%HV%qq`?}7i1kF_80KtwNZQi04SvewGluJ-=?s`DOEMUdR~3ME|mmsD>@)EBD&2 z5Y@(y-!DQ`RuM*taA+E$bW!+|!Ynh68XQh#km+QI?Spz@dkCN2KHENaEh+u_#||hS z%(CnYB55a<5B8n`V32O9Jr+O$?Lf;AU=n!zlHKslHcJG*+8)T|oUIqZ`J&M?4eH)f zTP+w|6S%iGPPlT4YOg55k9I$K$~i5}*kwfGb|%E(&;~|$fqnX`#8I(6NcT}bErQRS zCvO`YgsJq6vNz_u-eTJXDNFBbjS6ci=uLvxYv-i+brI=sKBidguOan7co6S}aND}| zcJtRr+`$f>z+a(M;EgP?y)b4;)~7Uf*-Z6 zBG5y^6KsG0Z8)|MrMImX7xW#=Ze2gw3WYUXEtNJ^VZU98OUPC*KV*Js3f#$3YY_A` zNoJ(bo)5Wuz}5e~!eKmQL2iGtuF~EQ1+ddJBTn!^NdH!So&87Qz{8hCL`2-nlN)OZ z*hSwU%C93{4DHbr>4WIklL|9f_)s~&WS>c8S9J-WnH%i{fWR zp}RUSaAdhdkj~^D+=cAVkPw-y+91;zxq^OVM>-LSNh0{kkdPSM*Qr(_8Z$dsW)XfA z3Xrf4W4j?hR-s<>n*@D%_Z0CwZHGkgYl}}(=0biWitqW+6|BC$3e(AMX}#^DZ~1WH zq=;RuFtx^|@BaeV9Vc!68(QzGK8)9Q6 zMRrbM1}8tl+q8Y-+aN{`<}4Ov?M6Y0!7`@Zt}qGjiD1RdA|YYo$s!rvoG<8G2XkPb zEfMxdIc`7R>(YLEO;q1Z2@-J<{|*E{*6gl207dP)qIGQButiu-Jp(BUzrFxY4*Y~4 zh~z6AE-O1jgtZ97{5T-~$0$)~FMcd&V1`h~&R^|pvuw7fMD(A_n$WXR4M--0dp_d}`V;X5z7Ji!6YEFGMogxz;9v?|PS>x2D-yVc#&_ka;G$ z^m{%(OCe!j7SQVI(%bG(IN09j0gJuKpY+*~CE~>K*`;8dPLyw#@>oULi3&U%hVowB z`084FB!EJh=*<+M6S&Npa=6i!3Nq#sQ3iFptgn7NQ=ZVQp=^SSrm$HU7w5C+fg9~g>O*Y*cv-70;>iWXK2y5HF#1aKf41n zGVGm@g-vWY@4a#RhX{1_f&~gqS@#ts1SkP1IkDS*7$7Cw!z=xC!7%Q6 zo~yQ%A;FVTI9xH?nM!bUncEy{Ljt+LqQEn$1TU4lhs{fGq)hsyUk#X)E%p@=Xiw`F zdfiiiPO{Z>7>FJq5{*c1T;}9I;roR|v=Y1RNU$a>B2xI-mJ6O)7g+vQrYH@9F3z}C z;cuBO6p^ux;SbRbbQ4h1W|`?MFhhSeH~l@ z6o_oDXv}~_0Ca-k2WPe|2>Axd)N6q~8{m<7Ziv?fNR2}har>E|Z_$uMqKLD(2w{il zacdX*TS$L!Y9SsT?CMTF{ozI@HCk>a0&@^`I?!JWh|&i=U^fXdSjo*x@Pcez1iy3X z*OSed%U$mri!5;trMII!muyVG2A@s@4zEGsv zr`u-(;Bd9uz9!(u3&j%nzFkD{t1dqmk%R*4D+<3~@qDpd5vePT@f|%Vs%J*xDUEHL zXMQXs{6u8BhIcIdDdOBWRUAI;GC2$`5QPrH?$}{f0&p*ejkg8`8HDJ#xP2JHY52AN z^exc+CpRE6(Z&dpJzJ{cPYLLYZDPn9-3+X-IU+KHC34Ya3x&cFX1!zO?6ykL!mvs{ zR*rV-@k?AAa^lUEhx&s;ek6`eDY16uw6ebzFl%a51Zmvsg!MUM(l1S6>f{8##7UfdScvV@tD6-nOn04` zy_N2De7+Tn$bcMerUy?6Q1xq5ow?A8ka>K3f>kNZut-Iw&^8LOVp2>es~5>H1vER@ zZuW-U{rmQ}0}9hHC)(u}z#jsIZX|2gCU=$EA4SpUQPr%|ncJV(1}Hrn~GX0$j?8(@|(hM}cD3i>6WgAmNb zHc=QPj>FDZkbNkE|CV_#&$5qo12eF=W7_4Oit6nVktoGm!Ivm)PM|ocnk~qMS&=BT z{_Q2J-m2eja|Nje7e<)(;C!_)#J8)q(P%EqYVS({o?cSj{!F9& zR#4lj&NghR`bD+#A~N=w2+VvvjW8n&7dq2!67ZV|EKRgBqt;0zEq1WX)ZHiS-gNvE zdwt6E(F2O2E@l|NoZcj}<%<~hdRgt9iuIr$iN<=QNRBkm2~nwTt&FTR<9CY0SDLKZ z;oRO-c_V!Gis-)Y$T>&sn6O{BxI;y*;L`38=*@_}!v|WL0M*c*0VAx35X0P7x<*Q6 zqXm6Yg+)j)&I>uUd)dyAmV2LssoxM}yw2>{Zzn^#W0iPd`HO(>Tnt`(pzDM}2v?&FD8BHnU1W8Bl?arE>6EzD z3((7f!}{9s5blb)*{GhL-m6b{dnkna;2G|b5YC46<-)!$*4ZrXgAv727uJul7Kw~Y zfe7r9X+N~d3ThzsZah7In#~O{uPwGUA(y<*>L6v$g`~{kMD)Ej3DV$n>^a&`1sK3& zZB?>%vpctKwwFSR%xoO9U($;dfz`pfVoQ;wr6#1*KBX8x{l$^V*>CBcT3Zv5#g9A^8#CoT}E_qsSqWo6_OBg-P9P2F-#kFtW(!PDZ-4-CFkK>yg zBN)eB+Ukq?W zG)vOi-wQCgqgD%c_54G-|(uF3Ex zY@{F!ZQYwU&YlwR&B}~%sCC#c;{?9yCfn4I`K6Wada*D-icVHfx5`xZ9E!K2!i;{n z?0Hev4J+*%f<6fh5xuU} zS&?XdEJsF=+5p?z(!!hS`btDR!21C%G{ShihqJFzTy3;?!n*RzwE zDE`yLEM2sKQ`MJ6(zaXPe4_k-)0Eh1-wQ;>A=$Llo#bn6u!w%04TxYr!A6MSTN=&h zm-#Va|AFcILcT0JP@J_7?vYDuQlLZ&FxtuYoM$DX_+s-B-j3KR5qzh*_UdFUDL8n@ z7<*MvzlAWEcpP^e`R+rv~WkQf{-Y zso1`VQ`QL52Q270?GkTLZ?o+c-M23@Qk0b~@$h4!`HK2x!JqhpFiQ{%{&9OV0Gd>1 z{}rGKC-*Epy>g0O`~x5zp#5wTUl$@$&!>pnbrSRo0YOw}P+~-iR1!gh6{hG!kysnf zqK6b#_3$|(lq7Ce;|iM}vSG7DKFN8=wO@$jn>-xPPj*h&PYv&ayN;CEKSW{B5A7|o z_Z6mPyjfPGRr66Qv2^m`a)0$s642ub+WfBLI`d>`t3;q)9%>`DPCz}81*KZ64{%Tc zdk}Zcke7}XW7Q+yelG%3bY}B*`+EqtH?FtK20#xJAInX*uLx7DWC3`4zbh3z3QY_f zn8Juq<=W#Rxzm`gu?2$uV?wZ+y~lo=%8(vsr)(3}Zcy{m0jFfVAyhyFM!j`cUmc;uOc=YgUku5Nm0(%?8m6H)rvwrISEv2 ztwD(XIJLcvJa+t21lokro;~(PNQ*POqH*>|A=-qh*_nO&uoZl!a;m)<(jQ)Oc(t7u zUGw<9Cr?vl$;Oq*#&Y|&=)S2k^u;!M_1#dy;h=(j8#`mxLo`3! z*pcNvKSl(NQjYxn6bSi3+GcI$JZ?{m!VpE$Gi<)XOilZ`UF{3-87S!2PnLc!&xL5m z)7xR68st;IRkz?=HFk6bDcU`cW4NN$t`&u%Gm z+5{LQeI|GuYZs&x-cp=0+E<2x-EiJ-6BTx!7J|VGb;NF*B6;@dqSIS(jMg-_ttqv+ zqEM(bwfNHZ1xQOX_@?%U^uWoyW#CokP)Lxht4-EVvzJoo3$i(d3DT4F80+D4!#v<8 zbAo+AfYugrW!CZFGT{yti_P9IdU9u(vs|^sI)|(oaNO;70k#epKs+Jq3BaBZiB_Mj z*xbHr_63-%UMo&oiDFs)&o8lA3i@pTFAGxLS{5Sv=G!&}{bC7XMiDzAn&v_V0bdtj zpQB0QuIPgl>jY{Wm z$&c~O6y>%3ETc_UDGKdC$1B6$7NEQG8qG%hGxOy3U+76#IM;58zw-3{u*Qt1qeSIA4T?rpzmo8cI0hpz{Q$Z zs+=xNyAQ2Dw85SiU>MMb;=KL~A-`c@YZsd!uZzHn9mr6B?p_~R3~Lkb>Unm(2!4=; zN4TEl+6|)ks$bv9v%>d<$=<$Wy>$)Q>1jM&SEt)UcImuw-{b4OPZS!{*SYt5Oo%ly zWV{_$kS1ecqs~4E`6y@zcCV@Sl95C+9WnhrF9mh6UmmcWXrB(%L=`0=2F7CK(MJmBdQ@OLR6Q<82CfjZW{pfn%P4&PH5hrW3HzmXH+};O%tY{Gl`mDq&cc+=G`ziJyY&`izXZ~FmB#}#t6^X& znO<_o3i@Ib+|Z+zA%dIVDan>Sn%`nkXvK-LW}6&h+e$HxxkG+aYeb=1=j48|ebkX% zygqnkbM0HBF4W+JB0a%3e5js-yD34(zBxU)qsDETy8d8A-tJ0e=iW;45h5_QbEm-O>wlyd=tRbxX+103ig#_u6^1PXYmk_f}Zm z_ln|McC^7sOLQ<+QBJb^6ia2Vm19=riB#s_9yqoh&R3Qw93Y)2=4O7`SHT!A64B4{ z@#lhK?7URgB+Bs{%59}6e(sWc_{`QP_sq~tG>NY6%|H6A{X&4{!~K&D_*bby5Ezl~ z*0Z88F?tE%aCt638J_1s^s5gL%cQoi-(p`^n3SjYJacCL2J0)xy7p%#v@$nYOe9tb zB$Rn!#Xc3XXBVa0`jFl6n`>-SfD9vc!0g2U@#-e44X<=0Zjq#wy)CHK=BxtX_{hFA z1`6FukMhc3LxiYUU!Tixp9r-3%z>@;Xb2DG0FoENnb^WEQaC)Z@#4wTnZ*7uq+|0H zL*caThHu*vAy!hqY+I@z?cf@&pA;<0E*FKdK3E3#uQC9%ALF)1fZ>8=f>xQmCKx6Q zQ=GfYlC_94Z?q3Xo)Jax9xoc}OFO#yXr)~x(9tQqQS}J1k_)w70yKp-y4RAmwpbMZ z!AP>sHii78@B$OR7s0QpyD{;L$?!ZoBO0xAckEjG`h!4LV{+qm$5X?pe3jiKB55cn zA76N#1bxl?^y(jeE6-5o~|y0ftFi_FGjabi-)jXOcic?VD^md0{{umRQWa3ZnNSH!z=vTldfDAt#J zxE3+|+K_d)9Nx7E_-gnu#mJQPei#akP1s671?25nP4;ia`Wnt{YkGUdV!PxKD14tV0;a+aXoMZ`Es+>Q zB=~fix=E0Q{oq);Q$gSQ>^Q#?>B0=}goBH$NMV|K9m4UpEdchG)pkNa1q`=;DfmBv zL~PIV> zvSyWS5K%ioZWLzgMj_t;4wi^32-Ev9yjSGNGGZK-5)X*H#@hYsJr&htRWj8xRV72ywXe@lX z0YjLd(U($**lHU2t*y7$Lpkazj*-p?GR?`2OWZe!gDp7R_}JoajeX`ZVrl0?_uFL( z`Wf$sbPmcYSEne+s;$YYy*5r1-_0&*XzrE^(;#*_&c8?#O$`J-WVx_gYfDo(GI6b8 z)xvCB&J)&FXYYEYgm$)z?rSHXT(vf)a^53W-rf`*C+|Z?0_J_nc^`URnE7j6pPn8w zhy3sbPII%JQwk38M=RmIiRtd$SN~09KdQwsRD$gjk3(R_aHyrz_T_>;J{$8698FV% z@sY_TId+W*{&tX3bHY%q$e|5Qe3+89$P&%}6pS2XiGU(9pldUPSWrFT6z@^kkB2i= zwcGZGwDAmS`@bpV8^xiXXP9?Hpt6wz@4x%r`-k@(fmO1PKLL%(5CPyv?Q=qO>z6A~ zUPaGTkj3Y8DZeJ*CpI%$jNb=gUp-tVB$I4u$n8ud9k62|cO)b7q7(xbcmR$&T) zKcOSd>j6rr^>~Z@TafRwFI*>-YNSv1v2;kjPbJ!eJ}pcV3YVQwsgvECN;3Ff#2Fq) z;eG|Ug0{(ckyHWiZHWvxH;^c zZ;C+2a(GhvP+=;8`+!4a>y-gygfj4nlCVBPMAFG>&U+IA#K56kCCw4kih)(Nq$8@7 zm*Hkx79hT+q%5>kkj^_sJEAj)&>j%UFCl&=5%fM3(j03zwgDf;O~>}zQIY&QNLFu5 zZY{I(qR=2lF`cW^XETBFSs-rfRCiu-+7^6UO?IUs=}I1k31W7O5IJ!yfxj2KEkGWM zN;1aVQvtGv@vT@}fG3;wpIJZ0-VE^2URX3yYZqi9xH^urzXb7qM2s;i4aWZSF_lA~B7NU=R`>0w(HW)Ncq zY;vsuYtc|hlU*B zJkRpeQEiWO#EJ)>7!-KerhzXy%Ocu@vVkt;=_ zXE3X_+v<>&Jzw0pt%8g+43#~vY2nd#u`9=uAY4zRa=audj~WYsQ}4B`I2YQ{6y5P2w!0Hcvz<+4zaL$) z&*zX`Q-Q&yh+QV6L6>MuL4T?p>Ah@omgy^^U;m>pWNnX!Jp9^@gCjgg zkm=}(5A%hoq~nTmGwpZ64BE+-s#Eo4r}tOc??s_@coIwPTmTs4oNn(3&@cRjy;U>C z{v}AU1+A6#VMvEQ;(EIz*GHULzsNqNFr{S`U_bBE0*pjHPafELDYUCZ(2yo%zy3=? zl+P!%hEKekpboWlE8LA~MY3+U^9c+qeXfmAY&hOGOm&Z96_zU^O=@r2VFe2N*=}32 z$SJ0kwym9Q%S315PLyqS{#B-(C|l&ho2*_@G&VyHb$%{D_0B_U3rwYdJQ{y zd$5Z|XFkXWuelL6KFK4J<+YN&HAfzmBHwlGJ8PRN(m!3lhiW;_dJ$&64^5nhn zEVJ;4kgp<%`o@k*dt8)`d)P{+&>=oO+$t9)KYv;V=|IJ{us%mR($hza~gOCzi+n?e#*; zwznxI%?-GJjz|FokwfcY0H=ilWImGBlqE=AMe@fO6YzcJ-T}YNCWh3>rZpHYwN(Ko zTB{1-hp!Q&_6b-4f{hR&&#zqk7K)!Wf|M+6NP9tm%^s6wIQfgU?#4MnNX`Xh7E9q@ zf|LcD9{&^<$32q1bi!^IqNwcFmW16Iz^hAs*KF$rSW!|8;L&rZRf<4G_h#nDC&r@E zVBa6oJRbhpuSB3K*sZ8O{6>h8e{=DC`>(=`6}orjP@F%aFpVf|-G~vl%bx`K?{r8W z2H2%SWZx2zvR;~nbmVsgXj-y*Wpe9W>nTXnPE@RN7BXhq9f6W)-H7mcE{x>IMe)nt ziHJ{Q$?UNrQTe7$R4l`4?}>8EYwX5|ul+ORr?E#$kCYw3HLoZg3%=Str*$lND`oQwL-*iT>2$i)wX6WTC=Xum2RV2tc|KN3KckXH*3U+KDJnqf1PZj#wNYJU%59|s-a%b?+J<_fUk*JD_F?h8g_h<|P zNzJW8Z|O#n>B+zWqb)6jyJJ_okHRX8mQAwTg_s?FoN)EU8M3bk^b-Sv&jO$ZE9e(Pf36*Nld!LA0Ln#C+a&@m87F>XL&zMRFf1=8 z)Ak6mj0aBQw>&XAT2L||R@^_+o)38<<2k**AmD4@V3==jrZA>7i|mXr!+HAIS$0n0 z@Kd>iw~uM^2K!eLX%Z&B(Tl^yK<8oz{?zJTKiPHf5{Z7cm9BEXe9iWRD85>(M?@?x zOf`{1Q*67!jKQl*`2k;RHG;m+6L>>P*e^t2d>_b@a@B7FIyg6m(Y05Fw7*v`VGl@F zS0$?t*n1*|uhjWF!JVFyBEeJRM4}i!*Z@4P2+)Ypqeok=!Zd=Fh%rPf3{e@a&)GzU zecKbcNO_kF`(-><_M7r-jwqCRXg{Z>g#t8}_Z#Y1D#YlxyNn(T>aAWRei|d0GC$ffM!vVOoA_M~gkAupj^QST20Dm^~JvyY}vG zYZPWUky+!{fcM-@B2ZOfQGu+Z{jLM@JA^&Z-wSFnXW?JQ{tzOQn>HmkRolM=eb3!H z_!dv0F!?iEHP`+dax;o!hT9iPd`;S7*8qp}Y|XaTLy$3ieGAe9 zSpphEXEAY6K$JRgpgk+($7Z~&3$97wV!b)8750skWf6vBTPj7jZfGhu3R6{XHW%by z3ow+oAn|=FgtOq8e|oZ~_r$8_l_A`<`;B&02#+2-(7vItugZy~Y_c0f(P&Glawj~Q zy8;2D+1M6B;`4qHs2m%M5gR4I@a2`Xw%}!AOo-z!HqV|AU<~N>_j_4fh-T`s=c$lv z64FUlE8stu{CMVsn4J`XMkQ2&xYT4EJQOlLkf%R%>dtCvi$t)RraOiszFO##ARh#S@(dm2~ z739$hdsZ%2B&+~rIJYx1Ly{8}`)sbleo4p!6i>JtMWHnraU|IPm5M&Ph~1#hKKo-| zeoyo*2Z!+4m5c2Wg?*QK->kPsg(+t6fYD=Yihv(P9mOTkvkL)Tnm4!2QsZmwC)zhU^S$TK6t?K0uU#B;WZDL0T|Dx(A(q;%g#le{@mq z3a}Up{D;B(-WgI@sR1tjNkU=f7`5}Rxvr&=TXFp5jkR%hnUPYi| z$2Y$=4F=mU1+~Vdd|D|_GQSNu^gxU@>=gT(2>b{nH#Q|V?r<*})$WyXqkZ;iqUcCO z46-i?_;oABKkS6v_HEI8`w|!-w|hbc7IC^|2+%iPWl^bJ^FP_W6>gl>issh1Wu(A~IYjTUt)d#K@^269Ds6wePlq5Kk?GxUv=t`UW3ez^Q(Z z{UqcV$B`zH%@E8?<#?E{cyS7L?UZIK0`$BQs}f}Vux5dQ-iTET(+Rw{@ddUDG5o0B zBdrv2!r7E}!50PnQb`oH?k=#`MbPJ3)#R8&8{7|hwOvsLz2kJv{hV_(N8mj&B6EwP zd~fo3H$SxBZWo2&NLFo2ZkS<%1!>NG{KRqJ6XFAJ-*mw8Q(1Av#V@%1OcXH_uV zHi$3scWJVz8D^QaE0)%^l}H6u)ps;f>e& zg%s{)gR*P0J(5Z>9M+sYAq>m&``s)DjYs2Z(BwWQ2M>SRxy+?I)*IY$dF}F-_bTUL}c?x+7kl4XuTq3 z3o@eoQ1VtDW_;DH<=Pmw}aQK|$a%ng0>bFwGJQPRZ z&i*3;m1bal8;ea}o#r8%$lD9&+8{w{)td+>CUo0H@O6wsy2^5egQ@B=>6!CuR*u~Y zxpOX79JB6z0i1JS_ZlZR=_?h*fE-sqE0!9Au1!hFGe`zj4fl z3sXGK8W8%n`vhrAdrkS-rKRmH%WSkLei{-y17U0>RYGw*H_tMKY3yW{*F=R`sFFKl zRA%D@by6z%SQ#{~2+2Ej=hmb!m7^K)45qTy21wibVc=k$AVb!_2)0#a0J`tluLWr3 zxw$oxm6=jw-_fX;N>HhU+)wz+fLX4cykGfB)~yg74*U88VlQ{76>8@C%o^vfwx zgmu%0g(;nluO3D4NXTCd%XM5TX_kJZvxL3Fg~R)m`tX6nbqL=~CoCVN3B~)F%HztG zX+Y@1=$Bg>S~rhg7Ktn7@cZpHOtM4?1EuS0}g3 zmF2CgX8B6o(PFoKMUYC8P1Wuj52$m83%@@=*k1Vg)dmU4%!Uy9}j_SoEG>+QHOtEV`E7|mY={8TWh zPClW~J~bPnhG)~xQ)|jBO^_N3<8fZF2Zc!S?Ab-`4KOR}X6dGA6fLV7D%fj371Y96vfd7d#7>EHU0X%9y&{SXb-EV> z=I$Aen6hPfLJ{<`q=--JeF1-Q!dMYzEx4Am0bDl+GIh&+Cn|B+J-JyFnx9cP67}h9 zyHD61|G1T6fD--P;07p zCyI1M_+@wOIV3dJAKPhVqA^73ZcU}lNfmcDzon5xi9I6{wcq&Tn{9pwzddicEml}x zO5H~HfVRn&i9(;)i>KKJ0pHL*xAI%}tB{-nLIKio91UL-g{pZ&5})mrkUN$ekJuRj z7PEMVxGxQPnFPEu9o&^`- zDWW2-{`}~@n@Y$}4h#67B2eX_{pf>cKI4nw=cbZ-ZKF*HgeW>sNL>}!N)hN4Ujv-Z zDj}A!^T%0zqHNBI@|pHhNJ9DSH*G@9S#sMFJEgE+T*-!2_|mZN&4)tmL*jHXO@O4Q zdDOgBVV_iN3Fhb?QT*~6j267OjF}?&G3h^|zZHf2xyaKsgm7o90)KLWPv)MX%WX`6 zjO3`I40}{in^h#6_nypD-rbn_LmXFHtz>uC)y!((v=E_xUhAikB2* z*?vVJPsEQ4CSfrYbO70%dQO;FE3t!$^sVpSIn90%ptJK+W-ki*MJv9I{aTm-aj&#a z$nc7V7g7|*iX(QVFs-;}4c`6$Dw{(w* zK3L%fd7Yxe6O@|2+nHI}CMp%6mp8%cLV@soP*Jzv3;IE0OUGu}#fyA}Yy-1!@O7u4 zU-=_>OT`~f!tN5y*MjLLFVMKp28&9U+V|Gmuuur^A*g# zg&ldDn{183zI3E_YIJjlDf67D{tF$FZ&gBc+r6;Qw^~8J-1vYpqWL^uh!mD@)igM6 zcIjfDClf~cq1|@1AZ-%w)V?l2=^WKE6ZSnJ-$EGMT&mz5q=nq)7FE5qd$08nqO1FMfoC;1D$O1QW&Ptc-t!+RzCbG*_3FqpNptA%bdd;O;hmW2GM(mBh! zwk8mCB0Msa`K%OyQH|!7(DVi&n$tdYTl=bowokC*M_{4X{?p^rYayAFQ7*ObEG3z5 z^;lR-c9U?(RyQwMx7q!CRoO7n>G&EzW3@P0i$e{NWKZVacmUt5@z5SR2CLzoI@f|_o%+3><L&Av z`H5`;kcTAs1okhQIJmt(wQ@`|gYh}70TZMP3o%$fSeQ>!=Fdy0Zbc4W4_uVDB9 zq>rV`(Hs~hS3VnZubN?3E6nIb$6-v5M~JTo`=34cF-?_>jeSp4x{HG=oP=U*r+X-( zThBDhSC|GQtCvc!usEdUo70_kzs(i_KHs_O&2LX#e7<4T`86BPZJHrfn@y$XE9$LD z3EYuiqeHcFKO|0ht0MH_IG^NvU*!lG{6a*kbIZ8JUJ+zbjU7A$Imdqr`N=+9b$G2^ zvyS+vPzos~s%x1^?ZOHnhtSKH7pi1f(jXjN%Av^GHu>kF?2+Zya zE24J!>I-Ry!c@3Qkf=Ubh`TdHonBmjdg~g?5~QmS6l4}zjsRUvHm!4vfGy&E&I}pc z5BMf~CS>4+FK$f&zRAaHokH%Cc;@sN5MGRj> z>lQ>HKD7n{J?-W>hMx}53Fz#y&kOqGXqa;1`9_E@G#{VPx;L)53D{lr_8=9NWQZx>g~o5MHSs)wf`W%vs33{{L}6Y&{DBnzwU{8)bnDs8#w*Oor$;k6i4=sGBo0doLnf3Z50)kD z$AW%GOvib&$IW0sC)wE{4QEY z*p1C9{|aI^h(g9(iI?{jV085B3a4hEAURJ}%(6#AI(MgwhIELiQ@uSE(xr9pWh+9| zx!rYdFI#R~1pP`()=b0YgVl!w4D(0~p1drm`r}zxR1F~gmn#bGH34cLG-Q;WP?+(R zj-w~#vL-Mncma-28eJyNg805FltcT(U@7|)q+&HRZmx)A8eMb!FYuyD{t+MvP zkA0K%5<#Y7rRH6a(1FDH(Z00NieNB{-Ss~z@9(Ds`INNZ!eWyQ z%NCo_Q3fJW5nCzb7t0tP*sM~RhQ)M-yXZN9<}Qvc1pAzjK1|$BVyQ#UC!6dF5vdWj z=`4iozbxpBL+2JNi^4Rqtzw6>`n%qq2pPm$hgDfDNPXPTvF{z?k{j1?q_^#Y{wqcZ zn{#%(2>#x3Z+NY>L!wcGJPbB$moV?2L~*Sa`{cb;AsGb`&Y16uKodEY$ezDlQVC{i zyW{ZW9)-2$8ugQAV+DP0P>fn?Ss|L`l5=RTUio`q;jZwojiP_IxPBagkpT2q$!jzO#mFCHnqu zC_zSgI0Q_$e~Ca1zg&@*iFdS5Q~_ClEHHjH^^{|sMDlHS%*|c)s3^3(C=*`Fj{=x1 z8MRpgei%k@B(cT9^ra|2Vb3Y7MX|Bbv69wm;u=Ks6NktU$M6Fo6$6*7t#=NhYV4$l zzSsiEPn;E|*gp6;cvoTOkPLT^@`&!>9cc+d#D7+#FCG(n(R}+*1V1kNOgIwqYQuTl z(fx;)6Dt^xn=1SG29gE_!a8>dZsri&OH@A^=ciQ3eo}8y=$Be7xo7?^v3??QVRgiM zXVWcMS+V|#(w7MzI%30x7|Zk+uU8KRkjOz?`f&leLJ^}|x7ZVcbcHXIV+jv6Po?6= zweGf`1c*ffWTNbsg1$rJq7o5?TPPBvI3aduZ6VCJ6T*yQx3sSIQONC>zvXu8#!wle zRa94P!$r_Slyyx!iV4!{@iBg2%LV+m#;*!W>in`S5{#58&9M4-YhX=zrcFcr4%pL3#YtGyo}Tf-*2>3n_*u@s#X=WuS<2r+yd+X|wn zktetT-6fhYJdOlPmh}~Z!gJvD^;1|&MvlcE4-scqO}A`?{SX{pcX)yQSXdvkyK}mA zUK*0;aC9kC*jIujK)%*#BG76!hG{IYC8>DW*xWBRiQqrAf&Hyoh#@&WujcIZ1*i8d zuwRD!C-<$BoyuQ_r}F7_XV zsTY|h#`&X=H`%mF2g+-!i460=%^+&u5y7u~FQ}7Y-xZ00aSUs|gj{9lLI; zPn3D6oE6bYhywW$$4LoqWp{|cP!7b~>j;JY5`i&OY7Yq0nDn9&{lYyi$RM!7%OoHs zhPsUxk%@Ls(<^YF;wiLhVzykv6^Al-3rucaWy=KpnBgmzWBu|F-&VRDEgf4UNOMlt zO|{Ap#c;+a#;rL-o!m3m-V0IA-xeMnwvmg5MsS(UMf@5;-w$UkubZ$R=Nyc?$(3xb zNHqII*=h`qGoiLTP^b+PO7iaZsqLhpBg63>n`@T|(XuYRy4zI>Qy*!E$1y#Xg4r0h zzP}?SHiX6~tl`;sKqiYf9E)m} zcf!Ahw7iXG#0pF7JzYS$^Og}2+CW3hV6 zk8(c|!)J=C1PnFSc^a!r+_abG@zgU;i5P}6wd>oPDr}M1iC71x*+v)dal=LlP&WZTOuN>BR7ifw5<|i2oX+{e@d}b)``RjWyt># z7t$TVv_)r+SC`q(1*txY4Wic-rd{~hDYQQcP#bOOi*S0L-V*FX5h=`>zTi4^WewP$ zKQ4F~D7I@vrJNFpjo&EX7f%{qaqkL{mo*tay<51<2DSp*v|zSCVhy-_JR;IPv4HN zhmW+^j*3D#mZ(cG&f_ zc4+J9@LQ#XjkPKuONI!DtF2y=OA<28024v+^&MthW` z2l%XJIE1k^3)a?TPl>XWP4V#Pp?BJBh4p)tDm|(Un{9z8R9Fy`Al!CGbYRS2+v7Pq zW-hQ76s9SO4XY$U@=L)mzWA)lEwwWqPtE}d5uO#)Zw|(1#E9Bw{}zd%$ZcJbW1p)H z;N|J;`rQS>T8|z+5%RKrqDA%XN3+5P3;M`D!!oV2RHt+bP0q3sQD_r~+hCt|S|>rO z$hxVl6@kvu$o|>ZB*bvGtzKupR#??FmC3K*Awk;u^3+3n?JbW(BnB3(eWZ@K&=gJ% z@W1V1Q78!0n)XSBS;?GdCh(+g-w^iSvGmgG_-$Y5B}5N082p}Y*NGC=ds z15s&Ih+p7<%V zs_bsT@R7>bvouNsr6PraL=j28D;^bvfz0Q7zsNJ7hIb%h*_Yc2k;2x2W>CcHE45W3 zGAgl&*mz*8Iwl-S<(h|w%RmV|D~yk~zj_8bT9m=%;$2}Dh4es*?8aRoyQG?{m&k=j zZ&4VfH!HbX-0ab0ZmZnbR=M786NS>@?CPf8&!H?oB8E9aFic7&#<^pbh&M?x?&>_tIZFWV!$qw_dtr@XwZmc2aue<;KtAyiTigP_4~*bO24 zPKOQ*D9Bmz5MM?ylKE;v&t;I((e0RgB0nI&LSSlArN5Esh2X*+S5glPQO78q$g`2E zs1SkP-iVFCSqjtJ$az4fJt^49KnTuhkmi|dJ46p_Sl%SeSNEQz(kxn@%@<1gREgyHzP1sx4~Skbx4T6ZFi%CcMku6G7HlDx;MWt&iwP zCwYKR?IDsS*j~Q2y?jA?`4&qNg?4s6K=l=7Az}9IJ|Xp)=(7+}n30He$D0nn zxLhToedBlsnC``?U-@j2KNn_Hz;Y4!vY=kpbr;BMt%6}Sr14`9 zIP3_KT8YeM*)S!7)rpX6d8AZ~#TPbm z6hk}E#Kr-W!GmW6m*wJQWEVYGw*v2ObvVVVSY zGsF6N&WsZLU)ey<3A?Etccp_p=j1%BPGewlxMMmT&p>_SA>n9b*`KxSB{YdRw8&IY zM3e1Nf$&QihMV|8d(sJk!OQn@g$RsFr|*oVvIXBPVpts5-$3+JIDFg&=!qt7?});3 z@0QZbez>17J?2M058d_K1w-PK2g<*z&ct$aTQw72l)vHTSo9p48 zy}R0c51*Sg!xk${r?~u<+HnCo#jOCj6%=^>>nZyqDE-j`L{e8~j>L$+AVkw}wz~{2 z)K!9^YRtHGwX20W{|uFN=1xgX8zCx{VUu*7WeHFoOd`zqmwFWPS&g>OqjF+04*Y@; z-8j9o*?#XaFD@&E!TY{vq7kdD%X1{7MG;HB$+`+s{CE^X3N1xQKLOtsIDk5ZNk^$p z^E=E7fYJkZPWK4X*p{t4jXx&9_L11LHL<5whD`PEqClq>4A-(k7j~vX>QoX1&qn`l zwvD2P(do}xwXI$>?qxD#dimE?Vut&gwi-j)z}%OptAPL$kflPWhS&GPpPm1DW1zC--OnHui4k3B zTa}Q$6-OelhlE1kaAhaAm+f9?4_LZ?Q<#=_G}WKk_l&*ki4#o~(N$!Mz2^yzm+j%N zy)Q`FxxC00+ZS6v^y`N9viWw608vQwBX$*`uod&Qim}(&ZAzdguAr=?2l&~#%UH1@ z=y7gKvz01LgBYD|J$l+gFBCzY^kQ3{jBIHrvX!2%FBbkbC>*+ZZtaw_t9RKePU$pn zu`Nj{uWm1&Ew%B4($Gp+Y+GTn@m^+s7KsL&TDC{;t`Q7V-A_-HUvDLvwx(j(pu55( zg-5U$OEdiiHSO}Jiy=+RaDwhhQAhJd@Xw(huI7seH(NwX?8>IML}5xiT924ujqUWP zj_T?5vPYq*imy#OEJ#uMtjhT020g-Deh^}~pCN?}BLhz0mqiN$n$j!9z9$%TUgX+K zg3mgoV(hrwZt|p&U4gZBYcl5uZcVo*p-fpnE*J*9h|L?l@@i51m?1%h-H}R7h1H54 zhA;z@G1e%|I5EY=EZI!^g9t1m9AFjN+XA%r?YdpEw0FrbLS}6NNZ#3Jg{d~NXK|u= zE+&-&C3KpT6TUasI=zUt>CfNzgWUDL*iqTQ~Z1H(p7&S4&r zp8`9Y%&ykje}n^LFR1v)K?o0#yXVP~!6h#cOY3vfTDGx5t`Oum!4aOeUoFIHb5k5U zc#ip=NPeS^ED`fgcY|o*ht6&!v6ni%1B*(RTGp0WhQc&Br)5vJ6?nv%icL02VL$9& zI6>8}x8@AhX!57PwawaCsVF`FLV_k`&SDA3gwIEcf=uoVKKt>V|k zY>Nn?bdDU6w~SiP5Sby~Ef9tcyPz#XTKc=n6ZLE4<>HVh<|97eekVXrBFkzuc3P0K zIU)s6LjXFm-94VZYg7J4EoW83sQQ@DvB(MNuhw zGKU}gwSfQ9FnT8mD})t;PRj;+%aab5#PM+fb)`I#Qmwc1$-L19w<)@e?~E*Q?y=9m zL~i3l5_4-~9pIM@ z^UMMiDYgmEZxaQ%K0ok)J)s~Kw{GUu&cns5nI{#d;MV;pi!B!jOZHgJu_oIf%qqYf z13$yJgeaU}R>8q-cE)q!m28Xs$>Sn3GCS;|Uz3y%?y+k9;W2Oq-#N*Q{6G;}NwrPk zmSsJh5WWAgU3TN_X)oer`BS!1VJ3n)2gD}8+S({W7?vSA z8b2o-b)qi1B-Yv6$t*>Am<_eFA~3AEa>Dr+f$;HdtE`ITsMg#2BGSu3Y>0m4Hz9R# z7QRORPk`pK_e$*WZigqDH+EnjAj=_lNEw+ec1T-`TQ=KdL7FZXqS( z-3vBJ%S5DcDDh<3MuE^d45K0R=Q#okH_=!xJ?=UY!|!uA5?RrHQSS+(VUiKGH12%+ zM5F;aL?HdR(|+Droc0ZDqVsRLy((g;bEITG63L?6FamS@PZ1fQUfc?6bvf_ zt2^fG=~%PI4hV+2hvnr@u;)Gc#;$#^QEFd598%Mi;TYZcuAt9RQ46*MlDL>(g$)#D z4AgR-gzFF3M5jbAm)JhsY`LN^oGnc-?G&ala5Od_-DI-_`OTmJ8XX@I`CsArDUtIj z3&;N8iJY}pe&rcq5$cKaWsmCCy|2BdFybST+C6q8Xm3@b zu_1zy80tSgFSdiGGd`ETLVDVs*i?aucDqiHT24=Cv^zbDp*y%g&&GD96pyLQe(Deh zuK1j{4A0mXk>qTJS%2}C8tiZh=-iE6Q(3x4Rj|T#i5QMABVxJ56WH;12rRG$(P_=; z`l>V2=hv%qs2Tr6eAxojYq@WZSdd zi)E+HvcGx>rxw+PFxA7V7vueJiVCgdJ1J+OdRuC?i(U<_Jhi#Jy`sWCB^c)I@cbiF z?9;-usblt2_63E*j(__k>np@!;=(22>m}R&KjPRADUJUq80p&mHeM9gjSR8l!gr26 zkxY^b?}hvL8c{+MI4s~(TUdJu+&`-lpjYAr!@1b%aV@i2rrUD@VN1Y)G|w^-LF-{7 z4)Gf8*Z&hAaV<`ZKqdF-kEvn)el zTAf}HFSbVn!bszR63rxGFFbh0#Hzo|7gdX1)`R#JK9gu{w9O*YH$JpQ^kJ(Y#kMSL zL3z#NBGH~*_O;iE3yXG)GXuUWQuvl(ADYkP!R>Z)l7?*saS7_}N1`!g;^_@gm>L-h z8EVTHr0ocG!p@J|5@A{Z^A00t0@^ra4VnA#P6hm~yis@MwVoP5te9;SpyWubt=w7! z!>Alwi;&8BVH%N-=Jb0CGZr-B?B+SB&YgR@-aZg9tUL6W;4jU60|Es`W(t?vc0uBK ztqhLh_J|O=Usx>1V7aOGrf9Uke~kCeV*(6Wa0UXCZ&e_19+m^NN=$Ap(7#eNSR`G8 zc4eTAJ$m)APaGvob$k+b>+EmCL{tK}d+-3u#~kJSYZ@-txpEfEN#*gvnN(CUR%Q~mx#{Z4%! zf9=Uz8hMC&S%41oN$GB<6~6MaFS9@4)LWvteu;^mV(cA7`^{ieLnN<*I}T=fw`TT3 z#fIfSjE^8MIhVW%MdJ}Mw7H&s#R>XRWyf0QT80SB7d9x))vmN0LE2J)Qe~cp`@lUZ z_I%-Mg9_&q&qqP&6N_!8!>CC!ZLKH26Q@@hcu}-hJ^pOP(zC1f+8+c%OS4+G=uzi* zlES$2&SWP<2~#&>u$>grD^LC6#I9M1`t_a1xw&>)QFK8%Hni-0LH%YsZ%mkHvroJQ zk$&a0th4S4Q#n5u*Ui3Fl(33hHp_3tZ6dG;wb8mb-#C$2JICf6TV(eL=x2jFR8Cj- zC-aZaD`st_i$L33rngkyX&FMHw|5omM{9+m(43By>#fMsMn_L85u)VS_>36Op^rM0 z^mZ|Z2?Rq)k!AT}Tj;s5Z_UbE?8%vxZa1dbZjYRRbWf4(_soeMnCFbl8z1-N=axKs z4u`D&aM-qbr7k7c9t%xDW#jbr-S#6v24EB($?Q%6ZTfoE7mrWUPWOoze%s^YrTOvJ zxw%&CxzaIxWRD3jEaQvi6=SmyU5)N%v+Q|?GDTRu-(L2NCGrGv!s8~jY)R)v^p3}I zFv+v8{f=TZSCJe$GuB{Ni^P0JFV$j5E)>CccfE3>JV>&Ls#)4o$gYE&EaE$O7R%~;hocO8E%;aSeCn0BIZyIuTyvV`59ds=o0yV+$Tg{@$O6z6*j zhf-xGilgo$qNrMg+^ehY2|=398{u4z!!=eT0>h5_cayy-5Js5p*77^qWAAwZ++;8- zYyWn5s%poXb*t@Te;`L_lf0=*Ed1jlQ6ryE8B6)1U}$2*6#SNOXrivkb+fIDr;yo( z7@EXo!mQp2Xj#^J_{`dAw%60K`o5Rax3|42%=qHcjbHfhguL8_W+^Y+@y81klyW)K z+XzuKBkK;LwKY;G{4DWx$`oaZz*xM+jowN;7Gd!nx=C262=t5UhunU<9T23x#HQ_$ zh)%V=m`pGV7wfNj0Y_0#g=4YD{^IcxKZlq4kwhqf^7<8EXDDW$b4sFdr5nP!xs$T7 zJiKSyh)&9m;6%07(nSf~7*c}Lvt@`tH*&<+T`Iua`m4({8_cL&*;nvKehR7;$w(ta zV66&=?v2SSv3CW-#GojvQ+hLZQJlk?m@k2sDG8*Pc$6sjx8lUMfcB2dNc@xn~G=@1MHXL#nrIuG3^3Vk1q z$-L}h-hMLdry@~gMrJlD?GJhh^rsjDQQ3lw0k4FWRvfHDj^v=d?2&1De#*2zI_#J+jlXun zAx%nbT4jF|q)F@o&)TK!p^7d&yV_?I4%3{)IrRoX(!pyhu-gUvCse;xtc-D@=wsK& zJ+>xVl+*1ep6Bp3L{Z`na$+!Oo)8E_jk7E6rmY^^GNom!{Xsxu*s_k-ig$(R_vxKW z>;r{q6lzp0>+|g56Tom#ie$xky^Or>UMKpuFS|;8lx%zb|J=4YJy%D*L*`gdCG*Em z+1%vD$`KW_2Ak|hilUbt=(*hemc6~&VDmpTa&*(=x zh@5bMXNH-)igj(Xs3uWOk+%YZVT3BrT6-jqamhl3pFcFh5>tiDX zLJgx)G`u@m2)k^R{Zu%tK|P@4TB-=#`6fSN6BVS)0u1d;QJBRZ`Adjcen*^waHTwv zA?Nq5Qxet7BU@j~?c!79q9y#45J(W9xR$;7)?Hz$#ULI^0Y|3VAQ2eK=m;a>H{BO% zM5OI7=AU|Fk?j) zgD-GuCT<*H;&vBR=h?<}(an+{Ob9$2Dokx9lVh#ZZLXtCM3xF{g;3bw@wHlg zWT~wc#ZRs^^54X{V6UwgHPkUa8; z$j)Q$1czF@oN96fLod&*o{y1tTk4c(r$l>pxvdk0QDVv8GsM<=0VfXZI5mHo)e460 z!~4s&7LWGN_Zto))s0&FP*A_8=x~x~*lZV_gTnjJg(t!w=-6bRQ3Surz|VudhsV;h zwKL8yMfq1z^ia+f7``5Yp$Ts+SoIkjS5GU#By_xiu{>p$iYIMtrcK-pIJ7=HY!X* zISIvhj@aW-qtMnr=uu%R?X1JL$}RA??L)!Pr4n3;+C}HR&{bPPSYs&b;h2@(YM)jt z)eLE=7mxL84(adM(fuq%kn|W1<_IYrsve?*^%mz2S6EueUCxGz8tO!6vDngu8HS@W zB$X{duk!LUEl1(7W)d5g!_~ARPg*=NKhI_fFqp-~@~!ivqu3-Q6K$a>;4 zP9@h?`=uaN#CRcpRpHQN7!HhT=qnmg7&*=}h1RI((3Lxid8K_tm`JoDr`R2T22kmdGQI$NY=j^~ zhda>t1pBd&e(k#}6T4?68g@v&V|SUAh#vM29NS`R=TQ;D8jU}~qq%+*OFc6~kl3&( zVm&|29&9UDJ{DV_AgIH z6%(+H6+s%$Ho%YQzaIHo!Y^pdac1O z`YS{xH_@=lKB2HW+r0Ty^LFbh7^bT*iwD`B!dfYCP1%;w7qDAJa?W4!!&DVnWZh*C zdwH=OCIYvs4w2`~%Zl3!K^9A5!_3J3_H^4M3SB$6u61{&Z4v5n*>@A0wnofEn4z1a z^LUl);cp7AemP9QXRMZ&hu!>q(J3NunX+T*@bAao7!u)X## zVS3(H`3x`7cJtqWjFeW)rrM~|CH4~$DWJHpc!K3RU@UiAu|Ozogv@Pw!NOcwff*{`;`C<$G0D*P}b>n{*Xvv8Bdb(6tY%!q_dE&J%f>g*F^}w zw!<6s@?ifKMTbXN^=!~}I=M1;^58O%fUYNaTQeOF{9t35a4nTijeJ%Y@0 z4#qhO?L{FUE!dP9_Aig-jt_6@-$Im=(zlm=sIVH^uw2HRzw}Nhr$3j*JnJq*B#gUh zZ%((H98%{UycS0=%n?c(h{7wH2DevKwO8V^<{5iL6#Wu*Ra#|d7&Fb&6b|hh*3y7k z=6NE}e4a;<&|V>=W!%L3?K*~bmy@- zZ*92%&rPX0TTx|Kz8ezLi&1>Qx(d?9ZfF#C_o(*DW$l&Q>=wb$%#p}+6j;8e%|fTN zRAHW9w4f2_+KDKAFvs4xfi3gGP!yDLyUoeMhDKMKEl%Kvh5rRu_Vpr!@jSZzXnnw% z(^z(Ejs4b>%eK6J?)H`lbeaZ~rcbm#CUd7rrShyW{pyDu^M82K66D`o8ti>R3f7}c z+&=Lhh#w(>OO^H+0WAlyNY>l`IqaCe2Lq(mLoizR60+fn*VFT2ZobOy5{ORs>#*M; zWoWp&_j(puIh^;%0!#BOWArMOEkJdrcFj47H@rfJD*df(R^v&tW0MQ5S%CiZ#QDxX zPj_xy{|3L&jPLqedFAC5Ej_gvM zy;?BLc}c8*asRFee)mIond3w=%Dm!&Kdu;l)MB^h!*4Z~G9(N;TZ*_+d{*VYQ@XHmT-x&yDV9iDX( zE5iOM!15{LD@$|i-;Oi!Ho~KUeIOiqHnz9`J>O6KGY~Lf6@;d~ECMYllpDTp2!!p2 z9k~JF*&94PZIfD3+Y zJ`|sNW1%f~7@e0IZH1twD279d=LLc;41>zSLKC{nj)@$ql<{5r$iKjB`m}E>LP^qN z_^e1&!pUwEAO7zP>bKs#hxPWT=|hOivw)5DcRc-bAh1 z6Zh)b!`c-NEg7bv{>Z;C;4#?eu`eVs4qEU(AxyJq>9{+G+tq?Jt1r6W13ikLfG$)+ zJ!ep0E3xsy(G=l(=rMsHw1&uZ9ztu8ZqCzQM0DxhYfBxTKCrK2)>?Z`kQJs^>O%X2 zN79mPmR|%_d=dBSHc!cW1MKTRCsXp~XqUZDF<~`jpeUXb2=ob!KcvI$8;aNF1&6fA zQiSN?n1N&LArJS&{a?PqWJ=4+(s5?0U>Ie|*rGM}lqc@dt*<@ji6ibowLR~!Wl!r| zzIp%fl)ZV6_`eTAvG`oXgPDzYsD)cH-tru_*5tOuTB_4M4oHOP7psB+<(d8G zB7`<&=E(Sny_BRR#GIXNuZY61m0+64-WLdyJBcCv+<$|jN#S58qdSXjtWy~Aw#wC9_AMrg3YNB&3`9&7fu4^YI%1$z3uqm*uE^&3r(Tfd z(or+d_IXstd{hn>*lz^G5O5ofBc%cZUbHZ7xhz(6W<=29!@*aceJDg}#}_|iSAR&D zirDesSlBp0qVnNpKCG}>7-^%kPfzj4wl$kpg`jjMu$FyY?d^I~Q6??`}3)kS0#zO>Ms|a1_gs^}b&ulcwQSD+9Jb9^(Kdt6sUYLU*`~sl2!vi|#}Op8)gpv;;e#S(8$2^ksn5<@WG@L) z>j?Dg-&8oXDYDUvZ);IPX?dxsSb=WyMLE+#jS zAtHoz_3LRC6Vhd4ZFJC(}enC)~y>$ z{kwvp+YA-g8ctVTlC+4e(*y^8uxI18rnU5-!$|$9!U_Z#Ut9;F@Hkm0%woPc^to2$ z$$1(^Im(_D)P-04eaR12+bfZnBYr zfejvbAslWb$u`!@=z9YXM5O|uO+4pH-tRF{*i2rZ!ojPSrK|ssbeL?nqL`Rdbx&Kn zm-5>7*S6VT1?XoXdN@V)kIrOU=Gwm$rhK{6(yx1yU3^KP@H37SxwhNqMWNeo@w$$) z%_fdYUlob2AR@b!_mQs&vNFjNvF)cPsvE4Eh@t!JW@*+-I4tH#1$K)NtAphS+nIjd zn@rv<<)$8%B}``$4GV0J!p@{V!06-tk-1$eDvdxpC(eem+6&0$X<6?OzYq>f1VaTi z_?c<4W1>;$jabS&uP}4P*>AIbAP|0f*h%9D@}Z~1bfpdY1d&t}iT_QvI|S)SY`la4 z?(@hqWi@A>UTB#P;}arFuoDF-cO(Zt=_*^62yC7Y_O#gwGC+kf4(P4h>?vW*h1B6| z^<1%8l+f?ANL4F3!Bu$?Kk8wP3eul$-Fw^56{adwiss6sf_?SUK*&cB6yriXFsjk$p9ZM-9JwjD1U(#dpEJ_yOxB5;dP} z+;XyRrrjs#J75s$X|{QyQ9~BOi(3^A4S!|lkr{T%v$I&owCu67f~?)Pvel8=h@A4- zKRy4+rz-8EmysYWjr`(lxJ{oHA@qDWKl8zY)YZ37PrFm$P#1d6^>&ZPU32~QmX^$R zeA+_GNG3hB;ZTEBcy!BxUp$2b=}L#E>(`%X#%jDE!@wm`esi?*ZxD%gWwkVva>FdL zY7u-!=SNAJL}9UwzRM0O7+N;5WkZe~5~e^#LBc%Jl#EEBU-^iDA5jXHIUj5UJU;#@ z5EGc$f)My81W4IdhI^YGcDW#B^BM>@0yE2=R_<+Vya2^W88dFVLZOX?2&*79|F9^O zg@N%6$cW4qq*vM##8b1{WR8f**lew$_=K<2P5}4%1xoLDn!XyKS$+w70dK*?Z2@G03t=oals$#Fw&Df?P4x@gp;J`DWSG+NGa`%#>$M zYF$x8G$;)I5XAbwDyRw8M^yUW*NMPNWG0`akVf9*zavr@)1%wO?y;U;$bh_J*1G*P znSfX230%7#6onq7j!(0x3ey8Dh)%JYo-}%7jvj$GibC`H@?RHg@x*NVS@wEoVmC^% z+{v>+XC3yX1f9!QR1iIa_j%T`~`U>T8dg43Xq)4EwM34q6BFrk6df_iO8t&b#1VT0<<1> z#`*HSR^ssF)6-8hH`o(`>VW$=#pRi2=AkduRtcz?RhuF=ozoIk6}DEy(6Bcvxm?u= zQv=sMyiD6=p9eBN7}~NFh1s_5xW{f&kj`bMXY;MC6e3T=f2_6Ho~LDRnr%>+)?fel z^*^-D9x?L1e)fXGq24k2vHwyytSvtDVtD^b6z#3J&*d!a}eQ4dGE%wnb0K$O750FhfR>xH$Qfd4Ve%I`Sh4yVxLbDF9 zhm~==r(gxhq&D>o7KIV%*tgflcsQlk_4Z?h!_FP?eNa-J(3y1q6nj)*ess};)0WqE zMW62~ds1ZDh{IpF00N97l@0Ab!j^g*Mz@Nr$^qNe65A&b1}o}Gwml~b-HSL3JM_SD zw9bx*sCn7AIk9o4{k9XzkYt0M7Ysj*{7k7RU;af;E>`7i`;H(@8(loU#BLQ}!V9yD zIn(v`lr%KwV2us%lrdQ_9qMtIT*F+YjqtcEWP)s>K&UboQ?&6w2pxf?xCJA7m7cw0 z#)gwmZ?|=VVcH^b*I6jvS+j^t!KsQT?T;SrSW#{7C>%C{#EvHuJJv_9lI!jBUxH3g zU|~+QuR7?{hx5=k1z6JG?`^jz7-pVFzT$wXB(b&91}ZLm+M@ZZeU0CxmlG?_F0ztj z=E6Kg8fOb@1;tR$yNtwdiGaH6Yj>F4vOSmm>CX;3Hf%mMbGm)>%OP>1wjxox*sc%^YbQH14!`Js z2<#U;q<-B~AefpO!zjCT6M?q!n>k=l2!y4U*t%8L&MQQrHE*K8$p`!?K_+Q(>oR-S zbLPvK%O}4=gbypM!}f<5lr^zWO}Y_+^_8+ zPmtBJPA?e~93s``1jCHC06Jy$^Pan%j!t{wHmM0h%cY0o0sZ}MTyWP;$wg~zn-P#x# z=-g#HMATVlzXYv#HgB?Bo^1ls1w0OXDCmb0Znvn8b7|LghJoO z$_U63Vd{vS=IpT=LFQm!L0Vqx8cCgq5XSD-=s#-_rSn9CCt#UdjwDHH?5~PqWYXhB z{Sb>m-iM|2;MXA17@i&A;t9}7NhM5Fn5qx2J+k2NT6xmwp{2u6rFi)*B-2`3=DkK%#D%eNnNYz+Abg8ZAr< z_|!|_XqF&FaogcIv8Xe$cD?dGD@eZPFkPC+E@Fp0*92GsU44%Efp z08>RQjqh{1T_#A*th~u3)bbpVH$uz#*DWs+bfzi7?l4o$Pr*d<)th-ls_&4)!;89QrX!kOut6PCfV}}Q%PhUbXS0( zMJWHK*-=k=YUNt{y@z}CNU=XC9MTQf%kpty|Glx<9ImksJaxR47o;mgg+1`i^C^WH zftF3#2r*qPM8gYWu&#$WK;$yh?iC0#A6@0k+RB>jNfBu+KjQv*lWkKH=43u>+dTh~ z)mVVp<8hpArgL*RMomLI;R=uAE7QU2 z^*22Zz9JkYfzbCeo0gqy+-c)IwRT;E6mkR^EI84T@!(=%CP+S=;QUXsa!*gUFtuZA z1w(dUb`7_p%p5AH{@wPcUq*NHlP5`N}sTXeG-3 z;#C3+<>{S^Pd&NCx(jlq!9>T;JlO=jZ_adpVnLS87hvv?O~aZ(t49?e`DecoU`4pr>_GGI_)AZqC4}a3 zhhP|_;!SB%>2>0@LkV0zvZMmFITTJV_wzvF|I6 zE-^?gTlmQfcK%RwMX^Ci3f`F}wlrFnQ&5x7l*D$P6?q{vx*sw=WrB)#V7Mh#l4G9~qHPS;YW$Mi?eNU>>8BP|*@J?V zl)+WgG6X2;==L`&j?KaLVt!}d)pD+3g&t1}3uDDxOQwlH8+G`ct1u-Ez;cu=6<`F~ zDmO&@j&=5-C^RG3l@;m&E@6h!P2^^Ml*rO6jNI;C%VmLA=^S)0P_K2eC%4>~P`2d2 zwlAzg5#noJh?c2~b!+I01x2pw1!xPn8xpLSKJ;BQ`_g}{Cg}FBIIAluMdYmxrDR~j!ZJnv5>xGc%etXzs+g7i&NeWYM>s)SHWdd3R zIy^43=N+OkD+kVpFJzlY@EV0o61@-e3Q&JN!CNtgekRc#*DTZJWXMC9EP8@3c6C?6u#Hmrlr1tgD!hXakS%mw_b0*RhV`ZVjVQs+Jx8NxsLbTxregjAwZygzRe*KaN-aFaK<;knI+a=Y?Ih_qm2CVSnj0zUZrS>_dXwZU{uqW^+49BuNQcH9Bhd9v;A0!(`>D-}+~wOv58raQjNu2Yyig)N&3?ArpN zFNpA<9@o`VcI<4lG=(W6H;(@oOLxRT_Rp}Vg=h#U9k1j&Juru(zVd@m@)#Wt>;@rP#N8yG+?-5 z=~(YR-R-BHDRI`qG92$v=VMN}=?;-A75sVDnVEVoTv0wV!K2NqmQ)=`-NawVEv1;)3tu= zh-mc6RWJJ3KRr+MShC6fxFF82(Q*Xo z!B~{Z(35f$!*zI?&Gn=R@6~g&UFcAfIdJ@H1sU5(t=kIh=N@-@-5j|y`L$r^DA&16 zUis~hB6V(081$~<`Or>#wqS?T&IyL5r}V|m#=nK>`2e0+(lR+(U)C)UIHB&wC)zH% zO$6q23`XDjD;&m-1vVv8J(?n#Zvn_#@^jsrq$H}AB({`Wt0-X#QhIl{Ho?${(mdVI zx?dkQ`hoLffWjfb;k>Bq&6IVI?y8K15#}zilyL=MLTSY!ZZQH z|2yI4%oYr-9mOHfmI^zAQW_cXw@+XDT2CUy@kUTAQVrT2@mTtc92(L(6IHtb!eK)1;o0a;hlw3{ z+bpyD1T|D^)>*oTyRqqHD;(A=E)i0tI`^38)77(Egh*9cJt*(kC`=CAPrMEy;Fs}mO8u7 zVbp^~mM6%dO1_9iz29ayL8~13kJ&a;1X_zD%`@wEotinrR(TN!uyFmct%6!P&+vKN z?r|NB6(_3~ShHYg`{C8F!hYk?u=b?O^Oj)P90qf_ff;?q6QC}mGvK>|VdV|E%f8eX z6uL4NX7M)!!=X1)tDDL}aF<;vDp`1jV9y>XME@eCmnpP%vItZeDJ@}`;DBxQB;g4M zX@T=1ghq24Y_QMY1ZFBTCl}kd6sEk~oOpJcWbVH$LTJ+5leex6FHHVz>>RC z1ll`pa6cQaFx@@4KYFbZ^36p&t=agtwx4?b=$#aKDtpWc`o*bRHbaEa;?X(~HhO{# zRzOQF{M^??;2YU#bB0emuE@A|ERjitKPI!6!g{hkw~#vGhGKbSSE3QEO8KKjx6*D? zR2V+Fgqaw(VWQ9hOaoK@6^(Y-36V_P=A*mqx1JmyetC9M zfOel-GXLC^m1pNou~VL&mW-CwSo^C8zSL^;=)zX^o@maBlsa$ZIviDxeRNK8&Y&J&j&j$ZY2&fWxKr8!GsdT&L0=hirq54Ws6OBl$w{u z3HBKw{gRPg3iJa3>h9NE*lM?`+ljC|nI zvszIV4P1Jgr`V{2GP(Sm7uK?8qP?hasF&Z#EZ%wl?6GZo*4kGF5E~X|0lQiRY0_lf zrEXB1es9>B;iEp!ek4LzCu1>$U;~B2=nO$d6=f^2+edhjhnKT6?y)<)l*lPbJ!kuS zMeyECv35DM0=VjPu1-X__c-Z=SOk@zY)&>fO zWj`XWJ8+f=)N||cvXz!AK&x774z59C5W7C>JUgzB$~j$B2}U+%WU0|bWc9T2D^Hi5 z#~zLoVnJHR=(0P#=aE4z&^|FR)WQPbCo!I*%vVIw4}EKs>}G#okb%|tPR1Ez@Yik= zF>G-|ctpTMRIUvYEo?B@yvL$tc*h!1V6j+wK=+?R4$g)8Zbbn-^cvS%P62 z^YHyzWD7;0%dt`o_(n&VNTtWyg+g<2Px-9#qfN`Cu@!lb?}?wItaZDY2fS`J=GxKThRmq$go*lYJD& zSe)Fry<_$=J+=-Ig&q{e&~CFjp-}$qa0hL-rw=xa-mYo17Eu^GNe~>}Wp8_SME1Ep z{mCO)57NVb-ys7#6lU%Rf)vh)JF>y>u_0gv`lfE(?CT0s>>b5ifeWpRVEBked<~Sx zk2Y|Q&h;$Er!BCbDa_Ep0osRggQu<1%Rn-70aw>Wf??jzRyCcURb~yMFt%vI#_YGA zn#QHzZ!UsiA{a+_xhE9w=L|SEvi`Lg27&!4nQv@Ho?SVVWOOPg4^b(;21A6S@kD0V z#tBg)+hL|n7ob7W>@SoO+)U4m6&$%vw2i_v;^;21!r_hAI3Xt1qpUXmSp>!jX?%X8 z_MSud;nOff@x?nrbuC-rCVxX9^e8Ka=V2Qx0^|L5U6l<@My8=&8n?Sd2%|Ls78<|u z2SvGXkO^L9rN0}|>0u!j;7TkB4C}P-?v^wC_ZMF|ZV_;s{d3fKe zTkKQAfT8mUOY^XCr3iGsWoNG4pfG8f`nUOZboHoyV+UAIg+q6EibX6x+iv#MSXDu1 z$p$4ekEXQ?1!;QgHZJ3*ZVB zE|H%aPIAWoRKxQ2^11C5Po3Vny}hE$J}o+5nc*d73Tns3*j>%sAU|t;MWjU>Pz!CO zK={N)5=gjVfnPdP)Gz=ky;E$JAagGZJ=*nm*jiDT9t@tM!@apPzF|RP_Y}RMHi#02 zw=hcwq{AYF633Pl*c*b3cJFR|?YP3xZvat+x0C56Gq#tGAdWs_`2)T0_VRh{6?57v zX4~bWQ{3UTho>Iq;a89!S$^KZy(Ouo_JA-&mSX4Hek!2W%LfIbc)RRDQD_~XE9B%@ zq|b>!cVv_=ZZ8N?I40)xW%`ZhXEorZQmWR6Me$SHN;Vg1^z664i%O*j_qQ&wkB$V; z!92ErZzvoFBbJIyS?ehR6Rq2qw%#L8T0fCAQ*NkmhqjWjUPza&-EEx0G?C4RqtjBM z@M~l>l=5p_B??1OjQ$Ag(zXbPN@eaRW=BN`jV>(C$B0-Y5%5P*>1!lRw9~$RCs5y@ zviWGJwUupet5{?=h)Bm$`u6Q%w+V!DMc^Mx3H_%)=y+t^WRHE2r1a;z zf`s!AMuiNhQeZnzAY_P)qs0P+h753Vi@r*sK38&uT@*QBdB*M%k>{u5@J^KJQPJCBlT8-nhj`~eo30=;4vW4KuZ39Kwml+IZF|L( z_VV@ZVwa$>+d2=zaL2H`V$){zz-pM681)N+qPk%GacyDaA)NRa-Rrc z)bHmu{4>EY>IKD^$e7~MGMTnvhb9-2FflTL#(gH;(`hGS>e!IDV}1JM+ADp>ujcdS%98ktVjFZ_XPdRC%1lkxwTQS z-7LEQ#IQTKy(ls}IKoRhwD`~pD-{U!w7JJ})Lx*2jYKPJ3CDuueFR+c80= zAsx=nA3Z91^PXY}hvF^r3q0j1d-UjKDPuz+Y)N7f4;2iZOU+D$MKW51(1?4-*jOPZ z@a0*SreGNU?{)8Sz2ym0{lRTnmalMVSqhp_HAy%K`KA*NPgl)2wW!(73o`L6+F8h3 zN!sf2yF>oSz}r^)oFFL}2)sVS*DtoOh!h4j(TEw8YU?iwKSpVZ@jnz3SX6h57zQ>o zcS2qPB1WnBZWfiMqnTG$sv4g=Jqdl$P(Fbwj9@zztw7nn3htd|I3 zs9}q2v7y5LvyH4HB1SaYGfD1+_Zd|2M4$>bb;(gYA;|DZfN)D$qAJq0n<-)_I43qC z)8+}&4h$h73cAKifa$vn_QSXDxsW&;(J31#jP>revgWq3rz0y6O>Je1+sd{@pPuDN zQnZ!Nwk##1VkXaxjxj>pL=4l%rA2R#uZj|Wkcs+viF$14;(x8i-WHu@mCBRX-xUr6 z!-osT(>o%B_QMv=v(McN*6veQqn4B}x2r|bs`%fX<2xdT(eK9+zs+IChL!xaA%Yr& zNF4DlA*x8rgX2ic(>)e*T__-|7Gf2|(m3MQ34~(bZkS`g71W4JXcXDRnndGr`?H8F zW$}FIH^w3C6Zb(1ohU$3*{)3Dex*DycN3=9ViO!{LW1$9BB%`MeKc#Iv1!RXg~h3< zwp*B{z(=dX+{TL@ofVIx-=A*n!VGFlW48TS;V`JkLsH+T?uS61cx#?!Ul*VtsmS7= zZn$7bJTNB9y>X8}zkH7DjZgG=G<2Wj2RO$zdVFELG?%mfmmdhlwar@FHglt0D;T;C zYbeLMc(e`}$bsD=NK5$O?zVv*2WPAC2rtnNQ}HCvo$Mip=POs9t*)_D!O+f(O!-il zECMSd+AN}(C=n&B%YL%&h<0wOHF+_}f9pK>qQmyeG70GZQ84T##krFB#za|(y)P#oS@gHh#@*s1lk}wp4c6~UocFEUbN-m9FupRX>=Tot;uk|R*4YW zg<14m+bm2gSk&-yYlJAWACju5yqs|;MMd5#-xmzKX0*$*;_P!j4LSJoG~pLC-61dI zEBV?Yg~ouOQ{aZC!0k9U#!3Z|eo>I9!9AS@H)fczr`xG}A7Pg^JoM*R)s8w!RMaaL$qKCc)0n6E3CrHt}z{ z-PV~)x7#Te_hgC2)zOCkR3s=>YuTO*rs!H0)?~r3B}9vTxlQu~CmPGnR94xlWP)?6 z7hrPKsyzWq2qW@#K#)1-#;-3)v%Mh#^MPJi#EgC06Og`JS6NzlSAC?Tt#RQnGll5CI-6LWgj%=a;6TmAeZ?joK;m3eI z_+l&cY`rlqR^s74y}H;G&sCU(8j@{x)Z+%pVDAy45$JDc+CG7>{DKK5P4f{E{dcgJ z4?;OZ@Rn#kM~zz(jZKl4ll68ySpfELqvymuc2Y#D7@r9*{=WirEsIO-$5IK0_G1nQ z*W|*)ou5~4S1L@V;?!8aea9mPj_qfu9uY5SS!*={p#&UH6x*vFnGxp-_J#n173ufe zn?e*mY-m4w%VU_aQ6=^lq0sBLvUP1`$UE<}i_;)d|65y*ZnZ7~J_O=j>&f?rqEX%$ zPAWGm%rB?3bz4sBLYp8MTGd}~aOp|R8ZWUYgjpXr+=fg%04J>YSD`leW(!i)f{yKJ@yv@54&Ly;}>#2oDO=DtagKJjB>5mz~iO_a#xPN6WAWA)Ll^Q^I= zJXtXQd1qqI6ft{IC`=Pxs_Tw59ouCec#6p-<5|l+CxjH#oR4a6WV>mAXiUF9=GUWV zyx#5*k#hreccy&9h93XJ%#FJDv>v(QNxnI(cYQ zZcN@??Mgw-u|C5{Zr&_RFKAaG22ik>XCp)kdy{M!b0{9?nPA;;@{Zf1$rNnq(ua;H znmkk=ZHgzS%q%IVEfl5+2iIpxqjh&OIzNwdW|J^^;SAuEM1aoYey(Mo9d|?w>+b_0 z%3=bL&sT6;jnITv%;BRN;3HX+0DwfO`?Rl#z@0;muGLS z9&YD6bz;Lx_c?Y|=7j=>=H}AP?~CB;p=IBA8zoHh5?iYhTMt;NAS0BU)lxUv9`i_q z2UpV42!__hQ2dM8Q=VF)0h4Tr5Ya^kD~fYsFm~5?_H&ix9M-K`kf|uhY;z=PnjyM2%c{+Zs{P!l ztx%LOgaaSJ0_$0Vj-*#Tijj13Qy z13Z)d+|zeVeYRt2jlCe~i-k8#!h}-GrAk}SOQY;Ncn^bR;d4geTQ!$O4X)}_9!MvTsttm+um0617?lBR2M3FYmBtPMPINp z#Mb-bNQRhy%k4F#2@O2IXv*1jEA8@JDD;Q*&F991_*F#585jwOTrINhB8FXbEXG;& z(sYYRp`4&wW%rB1_@-d%=s|@;%kuNEZ7@riI${NrVDv5(($7%yzD*z$#1#g$8LJV6 zmGI`YW97(>?y-HIq^)up{zW7%wqF$14(E{luhiM|4l|I#ZCzn+35FjMUN+%*Vtufj zqr+Lz;g2y95}nZ_zr;xXCNe+R!xi>V1!-qdsjR@=k_V!lTuctwZ31poJvbJXITEHc z#IVeR{Z2hS@#s{(zrB1#d&NTQuY6iA>oR#3%@tyh?i@YT9#fc3=qm&#zDf|0w{uIY za6@};RpYs)dOUcao!V#z6~!1~R*WAl4ioH8qA*6B4P$U6O6(mGC|Gam7v&QU)0LN& zo;Qil{-;G^LMGy|G&f#ipA`!bQZnv1J=cpvmQC@O|h=RREG)EVBxi= zr@n{df!!uR7c(PAfP<2Gar2XD>7F;RTd|;0noKZ&3!ptNOw)V!>19tUOe-d27e8W4 zJ)#5&$Sn#}$D|V35qMcB^l(CKe1Vwk5sA^ntdNxVjwuC`I`S^-@M6xC&$6o@hD@r1 zQwocaI{%>{)#b@e<1m5mUv^z&C3<6`X?kQ@6W@hcjOOpM$hQMO2?BF$gc36%NULBB zAsw+pC!nDQlMETxu_AP?Q4~xC>1D5!6iZbYEf*WN(@^|0c@Z1bf%z)#_% zD70oWAECb_V-a(d@v(?P>`3%~-u1F+Yi{geyS5-yH9C?-w;KhiidZZ@z@7e)2%+V} zB$G8fnE*2YT)rka0R}bZHMmA>5g{~=n|m|L<6Aw6{LMs;dh0}Bn%>-xyjF(*olR8l zN>o?a89~39XrFB6{{3Ik7@aI^(`4G!g#o4M$7B74=v&&soi;#W$}KF04{*CcX!(Tf zl5`s)Og-P@O568(%MheY))$|iTp_P@xo$&8%B|2d;KvSju1yuBq7H;I_j*)eCfWk& z)+!jfjLu}DVT&CUh0584W9cyCe(jmhEn8_HDGJlQ8A$YlDZ2OGp?0HE1UROp&VJw!UO>m5O*SxDV1E>1bL~zMd_JY8 z5#f-196pYI;@SHlqr@0idg^pGDcCo*DVdw=dcN%w4#UOM}kmk0&6zMO0?1trG8V=+-W`yG032<`#pwv~eQ%a$pTd z%VDZ$%>U>yV{L|qPnIpV{R&efwddL)fiS#nnJw7ilvI`p*i2~3XjHKfVglSTw z5m6gSuhhz!%Vg0hm~S7CYs6Fxy{Ze-hCAY~6&~guN#i`dB2L(GNX35|XXA9*+>sc%R;~*zQo6 zMh(8-Mk=T&Vy7>)Q9=~SQ`}-ynjR7iMUKftPaxf+8QcDNxX2Pz4`iOM&_VPruritP z4q3B&#WO|@%`5CLg6i1L8R4{OAA0V?iw>``8%i!ziyR)m2x00$f3V4J^T^xd^8J@( zBZcWe8a$O^8!bdH`yd=$;o-Aer`t@0Rm5T};%^Y3m-4u{eva*OC^4mSYA+43d|7w% z%zC>ZGsEVATP5a|*LoW$BE=QNFtnE@5Ec`>d=}GW5!Ag476BKCLX8+{M-I+52-3

(ZCDZLKg{dUsA=L2ba7+Z6(7NK_I_nS!g$&DNP`W<~ z4jY6Vd=+H!4Y^%3Etl#i^&uN1sMG2HW9vKsv#!ql-`c8eZEYP-?Y8cTCWIAjtxQFR z6mZnm+rAkjA=4}jR-3`zf^1|NR)CNM0s#WF4Oq1m0i~k1+Bb$U1Us#^RqOxr{hbG1 z@4f%l==nZpJ!ibeiQI>`}BG6|g2>4vDFdVQojoA$X@XPWzkt5KEeeN8G%ANJp4}+|Egj8g zFwOdS(&OVc*)J4!t<8pd>^?G_?kRSnqO^?29ObsiD&%xqC!%Vd#EEB}Z5E_pZYB86lP2!)<+A3#hyFf;>_Gq zo8s{!WfF9z00R@9zOhXzfWkO#19ot!R%fqy zNhBQAR*UH0drrvb>WS40872{EOh0z{6?SDMD7w#Jw;nkRKXaL(Yl0gTtEN_Owc9;B zVg$ay6^36P%g`sohHjw<(rpSybA2cq^+n-yUaTk$+m03S9gXoF3vG%hbj_>tc#v8s zK-ZtzTJ_G{CALhE8c%FlX)6?lQ=Eiw@4ZQgX&0%T*GJ;Nb=Ks>1Iu)5W`W@VYuiPn z$fLXFoUEH>tq$puvXV6WK#)?~=8fgz@8T*@_(=>xt-in>79mW95gaNf3&Jbk_!3(s zKr6YEmj+(-l;}vz!__idr>HPMC52;icpcj01)W^K;AG`8+wAbb@`FzuSZ=!n8Q^@J zd8V^Xd(JUT@;S=1-wM+Y@r{-CN+K?)`)%C=)~R~ylG969omw+VYtfp@(>2ZZs*=Jk zD|oeCT@COBPcAQH1iXuAe&i1h*(44o0g@!ez>yxIq^>qnIJD@UnUmjcsI($c=$d1D zDm&{Z+j!3a%gItjUmy(2#h6?DwI?O+UJkO)CEy^ofX@rUB~KK&_JV`h+e@d}ivkQz zPC+(9en=?%EcE8xVkbm^1F7+&F#fJXY8ixF)HyZ8h9Aus8Fs!xnA9{+AK4y)(NyRB z+lW6yW_Wx%r`5UK-q}5hWqRG`f(aSAo*O2jMq$Hz%TXAn`{)B&Bfzd2_2tOypET(* zZKkN9%MQ#!E~mv_5hcu~eXYF9dV-FsIUQANJF3>$A4Q>tRQ#8n@bIx^jrMm>Nz+bk zt(30!2cl3v_o{R3%i{vTksu={Kp81XU9FqKp~BRz*l2SY$MnmpY8xRK)}gW@o}p>t zk3@Cst^eIl#kip>+|UWPatC z@8d*0SOhrKlfB#s6$`_m^r~{QY=R&&shk_mn5_~DKZQuqik-7<_6&6Kg98WJLC?S% z$qUCTLX?i0VL{uPn7t|-)`?hI1_BzVM4&6_GxeKRP(lRfC6hm+?S73D_FkANTMjU@!bO$L5KO02W3`FHOsCme^u1wsY2Y+v3HNr>L;Byx4Y$0Bc#9 zc{qxBK`8Wa=4keMFA9elR?d0nupm89T*m$t@RpWKQCNwxhbXqh>+}vf6nfhRJ~flHOjX zisC=4ybL^jK9xvv`svnF4GV0sC=|>$UkX2~1Yx#dEIug}_7??to#pJrHRUUwfu0%O zzn}eH5Kf|-4({4f&|YA15#c15oWVh`{nK;BMyD0#jka%2ApyNfA@}yRoq&KUxiZxw5E1jrKj(X zjM{jGLyK^Y#odt}t8KDp3y=5+q*h~rx>ZDqj@LEmY`#a3!NEL1mR&Fv5Ll99NV`Fp z)@W;zX(>XX(}xz)1iRe{T3h*nY`5==5C$sptBc*oM4pF`)-1O)5$KYxy?R=;!r{2k zzO*WmT%X*&bZ7gr309*h*g|#U7MtUF+Nws_a)m<&-#5~>2+^3d%#yYioZ<+=e{Aen zWP*`-_Gm_96xTc-3kLRB44V-Fzib+m&?JuRs6bpT0_-Jo(Rs5+(meU}*=^%Rpte{B z$|%J)QIO)!C>=g!Gd;nHNp;89?XVSsVF}AA;GIe$G>1hCi*BS!&>+)A-<}SUTF11l zweKiQB~nyFrSfLMs1g~olQ-^LM4@YB1J05Jd@8SEH;YUTd${j;5-d_=*iZ)u%4gbr z0+il4v-)Vu3@Z_Y8{|fxlH_Rg3{>tJig@L=M;$~Ll23c?6KkG1*0{sYz~gIYoY>9s z=!MbSk~E#Vv3)9++MWgCAQXo7CEysmy+1EV|Kk}xm#wM5p1>g{hL(i}?f6HB)bJ@@e?)9mvz&PEMp-t`xxu(r)8rr2FVKJFM6!AFM( zVQO%~8O^t%%=33tZ|taf#@0JzSfnr6C>VYzJor9hTZQ5K1b5tjOvLfjv)YaeJLmHG zr(=%2Eh3|MWHai87tREPW)x#U@>0PtZ=%n3BS-x2i^i7dp#k<&1!>AC?zKy8fl%N) zUb{$UmX;^7V{ZZBP&OW0M56{S3CFN8d)JFlzuj$@%nFeR0Bq;Y>qbG9AH5$X3sLc# ztF5P}BNy+Nwq7vQ%<-z8YlG)K0Y>TbxYjuA@YP!U&@Hsr1SwZ8v2yGqkIY6ufjz=G zPZCLgum>uzD;z}nqCEw~PBG7FmgZ4uvT-#>AdJEwS&3UH95~9Th=f8Hi4qjN(O(HG zy*6IfqKGg9hO-wewl)z$r@M2~LVGKbNH&U!?IRJWAdoj+J9)?yhbY zq!~Q7jqUZZE(ejj z{hm;0Kc?U|TYq6{J%Zf?WJT^3WP?k7IsIWH9f_00M5ZO&wM^v3rO^sRpwNP>EL6-Y zh3G_VU1S!N7v`0_ZSX45X>ZoP^|nIcP%#f;G5f7B^9zgmF?-no$J$Burhr&tdEzeZ zEzc;iY5wfuxkQJa=Ub$^;7dhOKX3=4O(qA=t|CzeS0dUv-|3KMm9s4M7lf~Zy_;;5 z!tjw@hE1a!Psa<%WIn1Df*hI27O$;~R_cammS6-9S4WBsBUH>qP*x5{_I09#Ab}kC0Sc`~(C$7NoAo5!g=p4iZByN6Kkg=qq zt8TL+jta+=Fk{~of}y;eGW+!WkWIYezx6YMIsv*0E4|)JBvQ9;Uum}~9EJ%wucfFR z{UDKF0&jf;16zZ+Qz)?kBIpdH1D@(`~F5;)b^$7o^7Cz3;Lo6sAD7 zno@*B+g}8RR)#ihya27t#kzQw&G59e1qn=S3@;MFw;$^5iS%f-t@IMOs?M}m1?V=c zv+~esM;%6&8FTEUAe@|@*J__yK$v0JU+*eK{jkeuC71fJwkyko7w zn!-B{k+VU6!afoV<@UwGhy6#`=c}F@s?lWm;zDTjNk{eSj_NfWmFxyl=mCUTaoFx) z4AUSR2?-;Vc!zH_{vI^trZde z5VzwCTw=RCLrQWl`?ZIUjoV}U6sFt*((`V%!w#uNensCDq_0sk=ViV`ekuOuDX^xl ztIQ7^%IU+aYwYGlWC-0LHIhtBVOkH-n44T%U>W*uAsQO}h&Lb}agPY0CyNTp$J&pC zY1&vUDU4Pa?l6il@lwvE%^ySw zb&cSz-Z~QamFC}9+HqlZdQ)VA<IaTzzY|j&79u0F3T<#4s9(e%)oEpzE-<3 zk!F}25nV4FScnW*jJ2CYp?ST!cC%X)rhM6r$h59PVTjw-6;b32s*qk5=Ak9ho{-U9p$&lyW%xVLczbU;-PFS29mE8fwFpX4RwP@k`V_& zlys9vzB6UJB`F-ng{-S1VYr^6(JyFUmD&hTJRE~fMIJ7XjkU)W_Sq4uK+a2;0g(*| z*4S!6$~e&a@vT$ z^AT>z6x7cS4S9W<>|+r^wX(j;%-C($E`=7lc2q3A%AFp*&eBG6WrAsN=G@x=IG%C`vuq3qI}qB2|RnYoXf#-wbqzj|hx7++IwcP%45 zW0ibOFB{}x?N&+@W;XIt#F=HT5H(`*t#kY;`;DO3+rkOykU-#`!+#??%a=u=%yJF@ zogQ13md2qDwFmo)C}D+;H*7@8Bi_(pLzjmvr>~&1@=5fC6~s^@ zJ@nR+r|e6Dp+??5Gb|l|oBnk6ML?(@{i!JdwLFgD>g? zVfcJ*v1E}pIKnW!%pG~YTQJpeSYf8uElE9ef!xzNMWFr2fa%@v&w`$%Kdg5ih8xtV8#-;3Zs%q1*XRrZZqD0F`By9e5}3WwN8VWQq{_t+tr zIQf~vbbmev^$aT&3Pq2=r9+X`IDw!0c9c|h*>=x+dcrLGmBOLq0-n9FW&eUGbSZ>zK=4G8I6os)o z$vCffDCthIB}g;MC0uHM@koZcUl~7T!N4cC6!Um(_}CM0FIU5i{?uv`c#oDLJ#OcV z5C$ndo7c8dZVBvi5#hM3bZ?E_DnL_CR!y`Ng+uw80!Y55iUK=D#ci#lte;T$!Eo?c zV)qE^ZC<8}YUHUpGFvC5;U6iAR<=)SXrDCI$^@yUSC1YxMqxfbG-`xv8U042z<2NL ze8%s2&n_7i?3f&MNR|PdwEy8SUN@H;YWt@kXXK9R7H;7>svA3MuyQ-ezO*LL$#r-m zK#74ShJcuiyJ&sUg3`;k?+Z9}o>$ZCu*jM0#o7eMnh+Who;NZLx6}UWkfQEJ0pk4xOyVT>?X?uf zr|yTttVTgi!Ipy)IDyR+gflEkG@Uso@{618sZLIOva@liEp)iIrWhwUc1TdOG4i_@ zZ<=rKdx9fd&|GY^|2X7GJe9BM1?xypS0wkk!7fvnMyAV$b}x^}DJiqt6o#>rODc~w zR@*>9?Jkx^HiWobvLAU0+Bm#0-D(6WNrp6Sr6*@XE#xeV12=0DiCX*KHPQ|#tltB& zAm}yXL@t47l>NQa)8X5f@>k!bS>zQgFD6BD`%T2vbauFfq#YbFz z5t7e!B%yFDmLK2vl&x~e13Wh|woWi?ETYrHS}fL{vp(d=)RTUR4RVOUS{ApJ!v*0b zGC5f!Ky}AvG##(qaqQ_mHbE5iEmjX_DoiWUh^r{H1)lN9=GS&xtw-gtuAx;g1dsHU zw3Rn`Bp;Hb?zYYI!A`cd3Q+vsMqFnq%qkFh-a)4(y0|$aBJ&h6)5y5yo1UQzLAOrN z5Z#5!Y$~=c!u=hj>rKZL3&T`>KFvHRt z^NQKlMM$jn=w(SBv$qNn0{ejwHPZaSY~A|_eqyS~Zt71xgN}mPHri1Ul4K=T;fU+% zic*^<#83WBE{f|zxd$g6e3C1SYC+${Y-6{J=p)o|qUzT)h9CG8&k?rvHr3(r8RL#E z!8wp1Ef~Y0VUb611CHRgt#l~qD&!{`1YzRpuGXp`nDg7}vfa@OS^%bH}-)k!^RY5xQ(BkMj z-A9n>Bl{gc@{IByC#k_jmLm#7MAzT@sLdAgJqeaew^hk<>3UBxGP5|p#5M?pv3iI@ z`0IkY``^KieMVTZSYc;cry}U9Av{)}@Dg~F%`LHi358jbr?c{ZgefudaXiV+Z2-bi z=A&#M?ff$cn=~bMnNXN=n5<;aYFCO93M=;q}&x%Bf zS65T|O8bFeC<1%DQpvef6b3mR*VZ=BQ>$X`tOp6wpE59)YePH|-iKjq`5{5NkGpE= zD_cxBG+xdH3h?ujD-yrPD{s3J0|rS+SE310BU<2~L?Sgg_M{i2a*GR#Y@VkGRwCjp zD{bW`QmCp3Qx&sR?&8n`w>3&aJ1OL<4!V&YEr) zHWA4vADh3@zUkqCLwegaXXsNJLzuzl(_F)5Pihn6Eky~av9OdUnS6WD(|1ehY2_Xj zU5?GQ$qw5qC$v}A+g!nL|4vaYy0VhMzxI)cIc!vHm}#+sJow$@xg&E)&Gs9IECD(s zz9|^G6T^L{W-hc7qJ*C%7UkX*qIt)gtL-12l|H%)&4CXD89=OimY}9-7i|X9XF1O; z&at}$V8dl?dfFfn!mnjGSE2S(;n3m9Hz(U;4&(J(kj=Llf>itcA3tn!6=s2XB+V8o z7z#;F#-znkVYts6Q?Pd-{a9eDMPX>!DCjy5dr5Z069sQtTC)Bi3Ux;|@wV9;4#yVq zvn{Yc3uuk@!qmYkaEo;pQ0rM|3+?-!k79B$T8f1$ z5kfshn2xgtJa17BMwQYXjOBx7MTwNXC73ut7>1)g^fsI8X;Egww73A=oSwhHe&Z4C zI~OvWJQMYw-dSs%f^=rQX%ok4ZPZVD&fZul`r_6Q8@Zvwmhmhr5`{X+K9-K1AxJg7 zd-t`C3WsM{ectrMB^eOg>4lI*Z%FJWpA|*(rCw`2_NC9?76>HCYpcb0!B<3}n$rs- zQ@}R}hS_>#13U5wHYky!Z>$(`nx8s>ZwOxxHbP}0(688K<7ohUu)=}!!K8mtFciuC z6<)9HxG4N`$3AK&5^39)J!5|r4EwA|qhZhCEp|f-G+N#RvEi-?lk?#lte1k+;;sw( z3NrS?Vlv7!P>5;4Yjn)+7YGbs@`JCFJ?MGqh3?qh$ag4bCfHP%DHy&u=vI|hup?S4 zQm_GzEzT8(xA3>=@CweJ&)9Co!CwZ-052&_y_kh8vO|e-uwW*Gi1vy|Fdc7rM#@>o z1;cL;qx-VV&gSMrPlIS22G#8&!O$h)*<@_KU9deA1OG*3Y4#;SMj6RH>_~EJ_B9d0 zOdrBW)9x3h2Rdq&`a${$Q9>KhH$z;n)Fz7r?|J%o&JuvplT|IprYyB*9kKz}x324s zkhFa7oMIa!KvI~IA+QGp8HkZchF926g|Tcv$5l36ke(S8+3=bnM9=6_hWGQ$wpfHv`2AX1S9t>X7?WAdHN+Yb=p4z| zNt$SrZ4rg~N-DUd&$4DOp|m0kLjXDUYhhR~uE@263R8JbJ|Zplx@V;92QXK2N-+FF zv~|iXvp;(R&UCHF27MqHc*~D*C;p)@CCkmh1+9d`__Ld~ZwfM=hj$-dW!(kzMMN#A zVNIl!hzkS}Lv4&j4P!Y;6pB0Ay37_UOmTWYMErG`MlLOAm=cg(8Bf?gX>t2x zv>625!7EP$x=~>`!)G+EII$sn)~h35X=|)qM25a!4$4*h^4ZlM7YRNdK+Ny7hxb;M zT9;jcKBgkidMZr&9?C}Q{4N2Q#X~V~iZ<9GA_fvVcv4W+vYFquFzn}Tj)>H8Y}*QZ zN@30YwpAQs1eg%)JEyRL+iFdo5+5ekqHyT55m<>&>+^Os%VAU zY9~Yry9zDV*xhSHxGw{Z8tSdz|29b2!Z8z?Sy#kMEk8M)LU9!xNo5y@C?+OpCg&WP7%T+ zyH6Lbr#v~b)1cErVVd1>#~t>5A~#nP*em^51U?N&M}BrsNGfS8Q~)m&_YC)txJj5f%Vq%x+ zl=;a5#>cnR#<%RT3Ma<5JQG>r<4R?^&G3x$%n+{re&;aqps?0n5`?K-)}IcKYM+K% zvGw*J!O%?zP))G&ewDy$|9h@oFB~0D*;Z6DizYH#*VzpshmOKGJ^R0YBKY=4id@@R zxb;x@!$f&3s=MvS!Ze?Y@10m4suHB{N=nPiY?g!Gqj>|W7htBIe0nzjoR~h@8bzV2 z?m_#yz_tld(9vCcWOwke!-9%3u72!w!O$!$dX)qm`0W$ZHvBD5A8o-l+UI{wdg3EN z5FN`e5FwgNd;z~Kz(BI8lcDV3z3*!x`cJ8Tyu4vvBTD#wM-SlKMBFM8Tz1sV(TDR6 zK?XvC#?w8DABu$U=Q(5q@8Q5!?3AJ%pIJhhh(mY-VuSGXj@T=A=JMJ!74I# zg?4UR0Ns0Wt^E}Ns_Q>=gk7#MoKSuRH@H^_h7KBmmaJt9haC+<6IhAnd47y%k?=E8 zSQUblc6#Rqo2D?0=VDUcFEZ0@fheKC-g#`-mw4vW%a_QY&sITL&drO_qBf6oD?QJ9 zBvxZE#{Z@e?J1VAyuS#9Vo|l;Z0GF-(-(Zz@-jFfnK^-WaiZYD;C2y0L$i?>c}UQQ zscCzB^AxL0KYJ9>spWyp%v>&N ze=i8nS#qlO>3sm^0eUDr1eNN%eThiKXJ_V@*);-zB|MsJLh|QYkM5>X`=J0`#c7Wh z&Ipk>S|#hXL_VzeFwKJ6cBy5H#L&bZ&9u>;{nk5fx#bpnTuAHe&dT_XrSToxBKPPU zty=W(gFpq2&E*Uc^j&PO3JXnq%O+bOV(9(-3vqh3Oqf<5-Tm~*<*oLX!_Mt{I=3yg zcLhVcFb9;IIo942#jjFZBZ(=rwgZ(?Ik0kitgsuOB^3?gjis3_!USgcog#&bOA66+ zTJK31<$)X)kh-vci4p@RvCL*F>>S82 z42B*=Q3vkWa1y`m+XTZXj2hjxHP7~n5V+tiY`eYS>EXB^cXqD|hF@tK>Pr~l;GxUQ z;O8G*7}&Ybg*tLEMaUbU3Bq2m8*-x%jGvk`&c5&A&Zin}kizr@VvBohhyYw*?g2Hk zavSFfyCmOiixmz(h>n_h9X0DZYBqP&Otu!$sYWIr3v9QLmaO24vBma_;DV^Kwua@4$O)w&#-Bn)*fdlwfGo0nG7Oizgk#gUwD4-_kS5S{2qbL`}@z z^N18g$39e8pPjRJPe|FE^8&=cOQhE`Ay_n#9>cd3ML~z3LCo!P0lI`m2;+-(wIF5D zL-8$5@#d+R@3k964DU&8OU~HyM*q&bDT3icTR@J?mRQV7(x^ zPzCR~d3Iw0=N58v{6GS8?r*YD!jvv^!n~|hJ3Kb6>iEu$wn#8k7JbAUtkn}_qKKB^gv1KCWXUvymx4S z>nBK4q@FHgpD%h+1n2D~kE32~ne7ANGsBA)=w*=-{&fCr=e|f(SS;AX(Cc3mAygCJ zFxM^;^da6lHE8wPC7wp_m`F9H3I@*ek$}p<_OtaBjlM~^^?pDUO(Yzg*v|yQ4?f;7 zK3I$3->yjj3n1)Nh!EHrirvd!2!<{{uu|9F_M#{hi;CSWJ0uXscYyShU-8_d z%2;qS?RAIrOBROk-%q4QRXxQn+#ib4x4nrVcQQ(_n^{$tu*1#O_7=s3(o6MBGRU(=av{4cCP*}z zJ&}jTERUuUNw=h0o*?b#y=Ah^7x3%Ow*6AHFa?-( zJn_AdC}mio)Qjw8QK+P}tu@#F=m2L*Xk))b44+wUJ|Zu!>|$;fDGZrRGd<)mvPjTi zQw1q%RE(o{tAIM07vuvTk(1A-`H+BCSBw(L&pcXmm4O)j8oJvbm4Hf4Y_98^JJZ?) zX+!UPJ}(;Tw>@66HV5b1mk)$^_f*`|E){{Y@c$RW`pQ(4@Z(8R3{`iwR(7tM*EwTC z=gcPiz9;XBpQAezrUg7R$Lyy9n&cB=*t3yiu?BN zXPeK))(72xd)_m%SH^Ud0DNgbTx{X21K`CpwHAZP{^jXXVlDlkAWv^bIyQ><=EUC>~=+J>08% zS39XNz02>YZy!75Xmptla_-HA2h>>3Z3aq~Y=VM@>?j|~;zdnXB@gERGx z#Z;bY!_VYxUTP01OtA+RM|W$*o;_WDw95qo%ew8_c-lTpdihdVZ4=Ay{iTJ*9d{}JHilSq{_R8p~bz5XTW1}4r9lp?&JhoxCz2U|5 zt|&&4`-Df*?ZfbQ_fJ9ET-vs=+&&WUkIm`{I8lwSZVg0A<7nT^<>_Z%Aq|~(eAPVr zyuyqD&ljjVa1-?f5#X5i(4rI}az^Iqmsqi&|48KTdqQNnh^^vktMh_%K4W{a$99Uq zvX;inCXLto|C=IF@rXeK?YP2ljX4tRcMF6cW4n>t=^w7Q~0LOGrKJQkS3zG8|JPkzRrdNp&s!B?|t()B_9DWfJhGS1Bt)ng0 zLv;N#>z2gpnss;km=}d0x$NuivbiGARrseuG(=*tWxcLyx`FNg%IQR(Pi*?tlD=?{L$-cpdl_%YZCPuBg?523iQAWNvx^j_n{Q3(ajShxfEu`O=MYP0T`d9|&FH>!eRSuto*TUtcC8{p zk$kZ5V=GLNo%Q3bmzN@Y3Nln}eMQhDk(1IU8|G!n#c>EbDCMye!KRpn<8WNj6sDz^9DJm*m?wk3gZG68VI`PQZU+4XdMj z`esWKQPZV%S@gACX5E~EdphhGbyQc`gQ8GB2L5ueewrs3_|4UaZk;gG38@h#b+adq zH!b5P-hM3zfA1`)v^NwE69w4?<2_k?)6{rypZl)dRnc* zp)JFaySGikzIM7pDQilkIWUj4bie1{J15=#k%&t#WCLff2d4NHBW=AFovZ<(L`H@% zEQFCVLtJo6uSCOg=suBFxYCRp5bj6Lx z8bNg{LJuqgyKSS#M|CvY?;LU!GL_HB0YQeVckg>gjIqCX=4h%-;llQ>PS83OE9K(i z?;?a*#nC$6h)&*rL!r|!GJ;Wm6qbZxpb(i8*7*Xv!4t3@V`MWd)iGrj;x5Yu3bTlS zF=N0Mr__dtMBB?obA+g{p9zLaG0vzjQkE!ZKE(M&NVjBKc_NiAh*)38Bq->J@M>;j zMS(f$%FE}q1EVL&?r-tKmR@-ESLyw^EfqsJZeFF;9{Xi{yfNJf+S!M1Mk+Hcq(!NW#QsL0m zeGyue`g64>q4GY(Wmz`QlVoD~0tHwG%~p#8+v~9?fXM~{ zLL~c~(q1z+vY4^NUJykMZmNzq)!IQ%K%JPRm5lr=qR>BFR`0_Rm=MMHNa=2W^R)48 zbK+ZD^jdz?o1rjwcHc#iAw?f)A^%@s14Qs^!4-NaV9!%+cY7`*skDV0=CE_g+Rh#8 zE#C`)qhW#1F1$sUX~*MMtKE7gGCj5z zXZJQhn9hZ{LD)1LE=akY>Fh_IlqhVHjJC&xDW|M}Q|~O#hgl2E^x0Dm0ogQZu3-43 zjO9aAY`+tMHsZ~tZ5dC`FFD51HLpoo_PQ`-k1fPY(FuVtKO@)kbL=0Yz*Z7gQU9rM z*sYxT(r~Vu80em>c6=P~tLMf^8QNA_Hr9Fxp2>*24IcG4+tk|+Jeiz$BW*ZZ(96LS zK>Z#o5e4QVZH;9DG)*oBbpdM!MPRV^?T{ec5h3d2!WggU_6JW*ANDWE<+JmK2(;ww zT{ZT;!l5R19NN=g+zy4B3JVbByHr5mAXzwC!5a7X&lcLvqKBcAho>~_B?5Is9_JhF zcEP~(>6PP8ty*CDqEPDb?HFYA#B30?pPT29=I3&Qv`$bvKGtDQYuey#5s60X?KN^e zu!l+dyom51xkNo|*b5@iYRb9odV5VMu#kpl5>A4D6Gi<{yPjjK{YwyzdUDRb>@C8S zR^|AL$%>#1myA0G!S` zlx|Oo5`M-w*C`DcCWbOA7AY>&i5>W}Fp}O{Zfg=bv)eXeYfG3uX|F-DYj$v&Y`aAa zjn2p!H435e{i0|ppdVap?789#=Y%%BC)kf`?KdMk;u{e0`zwg`jXty+K0k) z2Yy7@J$>mYC=6LiPF8{3E?g)Q+#Z^P!0tBdBFMx*5(YOrJiDigKmn}TT<1I{q+g%* zgfhd~Q8mpbcoDo8bH=lUUId?oAThmG1fQ9xfM7ni-gCq-oX2AEE5R^H@NUbsf=5R? zyxI1#AUq+U!@U@LnM*nYfpKb%elORH5cV!T*sgkdS>M+_Een{#>=nWV-C4S-@qw&fgl4Uzrujd`}tId4F@iLegR>_U??Si2RX~lcj+dyFkk9&PYQXlq=$9RDrr!b?w z_i4WP^8{!qRdh>9viWCnVKXP+)(H8c(6luYl0pHr(sqj;x^!SZs=jzP^u{_{~(=dmi`dY_?x>>_38`QFzYbt-j}pkeC~kLQ56Uv{vV|SU-;|8OveuWq~up z8@ZxGF5uFWA@|`)hi6+KfiPt_`(ObHnW=%I(V=Cd3#~w5IwgkFmr|Q5!~%kZLF7=3 z$?~K~p*#AF>}!hz1FuD8h1~gW6oJA{E^M(a3e(7p0-kJs;}OSq?y+MEYxW?>hQh!{ zf}ywKO>^U$C)pR?_5{-;2WVe($i{OqXTVDYsg&~>D!T`(>}HQoD`P2$S(0E__i$5? zmx(d8UY_FUt|gciw!VTi69?j~llKY1a~6iUCV0%g^}29c;W0gXbhXV2N3|Zp^)6k1 zNCd5p)!RC1Rz-W09d=4Y+RV{?Nngn8J))F$zIPr>YB$;G~h z-7E@y6D=_-tc##O%(~mzWt_UTwZZmtU&X?CE(gmY3d5gt0vWElK%n-Hs<|E2*u&jq z!;}P$^>g9kk*iB?>;;0{7^zT8VBIUgAA=bjRQ#&P|TXNv@AN~{R^ z1Rk>Mh2aGSd23c!13aG*+G1M;>2A&fC4!jtRk>=lpUt&3Z_Bc4six^4694Z*-} ze9MfuWVEK++aiV;99>s!V!L|Y>5vdHz-ULMqq}`cMEDtgS0B4r;V@0nrf2>7h6r$^ zT}!52B}BW>vt41=d(^Gm6ZWzn3x!pqgkQ3yds6)U=2?LN&6E{c4gfVC*I)K)XA4mM z(Ut|az|(QM<4a&m93ES+sdLt9`;EgBtEb92$-9Cuvv)n;jH}-Pgr5f1JT@YTcuj&5 zol&=0S5ZRahUH+E#eN_H{T5$S8DG6RzGfm%D%M{__(|!})9&(c*Iubsps@QDs-GCI zUmMM?V0&*@??%4q z*vi@!PRQg=3Z2pdOY-EewKA69cW`$e?<4~qaP}&>Qd^W*a>z zTaHru)SpR8W4oegeUXPx&X{S}C>$8i$|-qgixmZAGHe zRd|ao&5<>X5+@WEVaYU|E#L+xu!7G=V&O^KECOH7`05Su)oUY-`8Cp*?-V`y**tB3 zQW*Ae!c=De5eOZbPLcMpu>Ue9MF+_OUbBBp%dcPYJ@D#JT5r5tu%&i4=q@QAb}SB2y%K_Om5I zfkg>(H%l7OgSC?m>gS;u&wa;W2wxVTLf4&fE8U0<=+T z9kR*tg};O0zZb$@Ur{&=?-0J?_H|)B+U?;BFS$-VYRuQks;6s8;ad6rnWcLU%jQ(>tB z`VH_A;0w^t;r>>ndpFoKXJRGkHQ&~GPE04}*;bFEdj@ltx69)Y^+0w^0A5ZopWjHN z8_|1+bqdlFj3e<0EVN^eDJDZp>UsYN@!*0A4$EH=hQZ^_vyQEqdKMznGaaiQUsWs( z%a$m|6`mBw(bz$>WI=u&dZ$A=V{5ccjEEFR@dG%2^-vlZy{2mqEAZ^BJ!_F}nIOn` zl$51u`_FP-=M?h3-MsPCrdE5_DUlw=T6{#=VNt@sU@;seA%r7Dg2$I8yfoeHy#VqH zc@zGY00Z17&#qPwMpAmE*mVlSJxM{KsVcLo?QK6Lx!0p?co8N92XA=Yi zFIWYo!kM1l@WTX{-p$Kr%UNfKjE4##Dhn#pWv#$%na6u*plDeM(IVE}4!xNsGT6=2U7E2YR zrQ9Fn^Y~*w5cWmCZoI7EuCKC(MWl(CaH>1ff_0K|5g1G4M+?W;(~j6kabT$xf-$Y- zhZfsz!O;B}XGMNhICOtBvd`G@1V#6w{3!n-ii*$Nx1!AcAw)MMv28r>Um-kt-+H@6 zVJf1-2B8i0JwcefB^fENq~vb)6OZT1yaqwpbV0i0mLGS0*s2nFQ@Y}PSTHczC$ls= zU>LwBBBzBliiKsYW}H}Csl{}sDD)C1nn>1XMMw2ydsak#o3h_>WX$IyQI9 z@mUk>9*0N)vRkuE!7xelF`XMY)1vSjy2vSnyXJwrHtV9%g8p+2DJF4hVb2>IR_yl|Q{;W4VuYdIe z;z^ud#(H^!0FBBVC4&X!)=e0miptPJwBABtz>`yNvzX_^!Xh>z1)|Qv@Z}O|;^vCt zQhK{36%~DVBVUVFsyNCW9?L7rwy~a30-g5o_Qek&Y0J5#C6Y644F3*v_`x{>ug141747WY zwC>oloz`EFzCVq_&6r2QDoQR^B}l7*cvRJe++0t zu_ev!9d9J066M;%jd{}eqmkmX5&PT%;?Sqh?CW1;jC z1=c&NQ9a#k4+>I|JkDaVS|~IGp}C&+*x6Y2H8xF1?Lcu#S`F2<%+sg0tw5p$qcy^T zG0rrS_0>*rtHnj9BU8Kgu#*bI06PJ7&M86q6>a}2`;(``=a9YY=}yj@+F8B9F8r8u zFdDWt%us&q9 z;pI*@-Jkx{y-{0Aa5aG$aUro#vs%BzYXzx}9e5eSikbFb9?!HLjw6zubZeH8gEZgqU6g-R<%zX}zH0e>%jtGXy6tA7B zH^#aztd0^ns5%vp#(AZa6 ztVSs8D(@}kN(AM|St5nsuO8acZH)+=h7i_7<~u0TCv6T-v#-u^1-g>J|1HkY&PB(F|g8y9?P*yJ_8}_ zFC)j`NO<%1kuNG;lRkD|bsY6$L3%3QQg71~h6#?&(`=!qqYcSD zyV(*!`ldWm4nE|lghMgHU_$f)D!FTQCCwX#Y$^O(6(!9rDQbrL*m2g#*JGvK;+( z0&`zkXs3m#ihR7;edk<||FTxk*ComgqQGX6j+3bZw63(Q0Fhi9kx0#h+bq246o?Ww zLeaG5u^9{Gnn99ooabc~N{3*pm&ITW;dN=3!x8Yoa@{5vX7WIZA@OP1CsJr;T3H#Z z)3YM*6ML|qy`mu9(X~giy`iv%K=Lh%?4+Q7k)*l5+hxt(6P<>kF0t6oJul>mM1rt4 zCg@W=wOd!aN0?4xjpj`tO9)1g+LHIz2AhybjuquHn=Bl9F)0;4TgkRmlxPbPJw+)~ zlaqA2CizT0IISXufgZsEo2CbdwnSM6mXmV3{X-N@oVHb2_D=yCOD`fqWFHH{T68xi z<<{=!hs4-qFR6=3QUI*J%u!CDAM#*IE?NHZd&r} zXCnAil=*_~mgAXe>%hKN*@fj+gwH{{C@_6r@Ba3o!eN&Bj?%9Be26~U+G@8e z3{&Zam=NeE5W0I9htGUZS1>Bi9`~sD+F9|nE%s}N*~|c4?>L0-iCk(QaLE1BbdC!j z3o@pAt8#g5J?9G`pLr?CNi2fj7DdA<>vp>K>h(X8^z4>u-xCGS5c#j+S32Mm2~v~0 z^+>TFiW2(s@XE;SN+W7#u}`E+>6X;3mt}mClAMf%j-P*$(yb@_JRu5|vB#3Fy^Vr& zQI}*sI`yASjfIcy*5nkq@RZQ&`m+?BxAi!`*0y^QC^P5S3j*QS-oA8RfE)?4vWH&rqCI0Dhz)tXv^^v@V+438_Uh(3;%&?%g$s4&PJh%wO z$3l_9gv3aBiIob66(Ld#!s?SP5-m)mA*ib?7G%idTh~Os9oN_&*tQj%X@28b zj_#a-El&IN7tfZ;w2 z^*yrg{P-Okk`~M-mU*kgXM!vE>38 z5xOHnp<5q7j_Hqr>Q?jsHP0ICZBfDs#jX`A_3wHPNsI9=aPgOjho9rmRM~eG4*ay& zY|!5M3Q>X~9X4?sA|<8Y%D(R=MNn_&Q=9afpW>yQp4wsqJv;b8%N=MsBDG!3=JkPnenix9fL zFu%BM75jpS1e2&z@e*hgJqu$urlg1s`WzAHN{(@yTowz_6FqM3ZuN$vlXTg)M5H(j z8nyBRx=N6$Q*rr`qHq|n+`??GtnT*$5Iu^-kfwK3Pqj=DnWuYeN(<$4Ax}6AU3|+1 zDOUVK6dIRPQGwz7agJha1#)eQ5bVM5Aju zY8E2Ptg)ZxnfSg8*IpG-KZAMv5H{0)pS=hI!_Lr-D7ViGktg1=AfY4h z6%px$$W~;`Mmm&GBlj8i3x?lOQVL%S+~1^&q?xdGNqlW9{bISIFet+ZjeVB)XV>KaK}zo0J%vZ>7eA3Ixoc`__wIJU zOO}LqrX3Ory5XGc#d}6DFYtRsFb~JX@^bAR0oqD;3_|DkT|s9jxobDfCEMpNh7_hf z$FWMgKse%G2aPXzELB8W)Za;@9@=|EZ@XHU^4Wtlvn+n!GZ*0z7sX|dGlq!{u0CPY+kU%Vd$dWYVa=lOL*(qmJdv4B#0$20 zGFXpq9gob9{aO^-+CI6aebRJ$&LIi<@zU{DB0U?uI!x&PMU>DPdI+@l6Dg3{)eGaL zmxdB0;DNB})q-b#kI5;x%bw^VuvEk_Gh&%p zHr_L^Z!5ML9+xZsW?0lsBrU;>$5O$tirtjbD=9TOsasF0``^T#J-hy2Vk)nPQA&mg z@88y>T%qx~F$5$c92&xGl+(NIo}w?Z$yn94-wA6HPMl~jDohiyP&vbh=}|!jl}iq( zij~^A2H_Koc9oFAB_fiF0YUzWRNoSTQgkDTMIt_SXS#P}W_or3GEp&4fzXZ&fR8$x zfvd<8%MlDbm*gTao@skU302@QiuwGU=jeQT(XnY&_JSZkm4}Ad%bqz1<4Udls3-}h zyQie~ND9QzPeZUQZ+};!fX86?(moKTSK{^Ct;<(K0s6vh+3dH{cRP_*Ys|W*bVp}= zh=^ed6tfe~6ATmUj7iTjkwV)JWB2lKvrYV;1*IgX^yqF|y`WcT{eIi4YwSOQaFf4p zlYQp05YEJ;%H;~f^*ua0>$d4?VJ0b8Plc8u;N4ip({!2*@Mud+A5o*V4`M01VJJq8KVRAdiy|W7-qI^J0)_!`h4ftAaQ#Z zX{c=OsNNH8ho`&E(B6s-T;DB)h(W@PROBV3D!#hO9uOr=q@v6*mg5PSY#iaHv#1q& z0tBl3}ZaL)%#cgWOkLA`KIu#GVzVFS>WZ9`|bw5HZNHe+h(dVS8m` zd*#md%DM8@dj8j;GX=VFRe7Dlp#}_@hGSidXmIVeV`73-kfF_Zp+M+e$!u=fBfEOV zBKp$7vv{G6^%C&1%jVRo1*!1l_B}Q`krrDre2M3Z5axG$iwy1Af=^H+7Q5S)Bq)7l zj-%LChystJ+G_Li&_WOl9ap47%x{IoGuN+n!qZ9!f@9&;-yjZddL$)Vs={ud#88 zhL_^DI@}Zsgs#G6Co<>1^2`U8AA0h@a{Fx}zC5QGt&dkl2-S1bEHBCbEegE-5W~^m z48ezn+Ib36%h;UJF-sMoS<-5cyhS2w5U;DYVWJ1tl2UH#YWc$2Ph8V0=^C3Z6skJV zc&L%pV`UdF(GwV+d%L01{^YQuW|IaWpKn32 zUG=R{cWG=iN}tyV1(RW>5;GE79a zHu08?@s`F&FX*W#u*mLH6uceXG4)u@c>Ad!v!J-}5gYAM9|qTVPozXerwcNmT#2J`Pan+_!Ou61I=gUk*(n;86>$&pD-WNZ`;F3AXz4Py~{6RAc{z zTg7_2QIt@~5Y9e!dm_sao~z64P7&ba*g|EoA2~#jy79Y_shYDG$JjhuCCq8;)deUhESE_x{?O*IUn+uTaMQsR zJ7?$5T^~-0F!SwM<<;WLHfNMQ(ZPdfU%FaEQ*+*?uqsz&j6!%7PW^2 zeM`&Eq-(eCRv=R7&4C5wTI)7?p2&P!u{8-YD{e*lX0zvqo#tS)!`|@J?C`5GCVEoFP8@-H=w>!8F98t*ehV3i5=Nt}XKRF6`}#g$hZLl5 z_4dw>_B&SqsS73fQjSx0t1x|yb%aby79g#ZMEQsf_PCr39x8@7cm!i)He4WZT#~~x z_yeAi@fbqdRw!Nyx9`(3fzIrxEAT*Hk*;2$T zCyPQ?4U1JoO`GZXuC`Xp-r#LT82(dns=7hpuqKjMj-3|TbDo~sathM(?C-*GhKRX@ zFF)|?Ie4@DNa0X(Q?K`miz>_vq zAap)k2&TooB7}0eN|2VvNKX=79TsqupvV&t7;G>u`Z&(hURc#u4M`aHn3vJAuQVo5}uWs zr{#-4o4a*Su_qFl`{JN*t04UuE8-GlhXDP7m&`GvvG~yHX|vl_>5}z1kEUMUkW1|M z!f@tBNUHJGo=8z3WzkPv6H4bO6SH##;G(TH%`R3Lo_V^~J^e1bR0P^|WbJF?czO7` zVEB&SRaRPp^>Kc?*NMnlk@bjmQ&61}>ATBb;{Y!JQSUMqvyp=Qg!b=zX`0M%qF-F; zd0<}~;r%T(`%EcPYvA&;!PbZpW(41aTH7oPTe_}<%WZ;`$_D~n3d~bYwqJURvf`XV zuKJ(z0`|_qf;D^`5N1l{M~)+27oyqd4d_IkX@3^sj6pDNMfJcVFF95Au4gQcWub}o zPa*m)C8?MF%VTmf^t1ZHwZy<7*P%R_mixYqB9Kl{Lt?9_TkM1&KN8lg zg?JWSz$nSS#bwt)rgDzRs15K+ks<;WNacnnJ=EcPh@vj!IUrSG-_q?kAn&TpHb68g z>Zq#isG4r~2(s}E8xHK~M&dz{&zd2M3VqBL3SD*^=6N~Oq4{7rf^eg)A_vc1mL~#z zJXSXJ3kAa5?vsh*14KQttlYDp+sz@6_n_&XB_}hh&}KS_+;24rQ2kjsSlcZMO*_4^ z(%$uOeEkOdhr%>Trxv+JxjsNlhs=3(sa+sQu5PKR_8pIsI5omv-2~xx-==-L?H-TQ z1t$g~9u{Oq=j5mH*7Ud#h4ex>ty1Ce<4I0U>0wJeB^Ech_E_$ST@CYeg^(sdLv3VO zXS&si0<-))@4~nA_A?2ZTB3{QT6<0un9611+aZBa*Wj{rjOPo(RdHr621WiV6y_pb z#>)1o8z4~lLxpLv^s#o4LzW)>)~*$#WEP(ZcAbN~931%F;0a;soh4hWGy(T5FSIg2 zD#bJ&R>mK5l#6rvbps1|dM6;>@A=D=_SJ!d7N`{1C_77A+yU^LEZ z1!w@@2E==xO(ZRq%ktMfX-+wl?WBWp+(p~JJzw8qb?7}ed69_k*aHGo|I)Bj__*9Dn8+s0UXY=S8MaHkQLB3`rAiYk*i zwRO=;EBX#Rfwx3rqL}qSWOS|7d09-p$h3K*H6|$W)l2wh*=|u_Ilc{ddOPh^!O+8~ z8sY-&awhm%D6wlK^5efL_ng7iBFr4?4{ zd3qzdT&}Qwb&(^-$2`ZIQ|;#p!bkM%fWHizDMBbcOTwG)35Stx+nQmY?E<8Rdq&(l z)Xo(k4l@ZzquS>LVL0Bh+`i%A5%&$XZVFR*X<2Dmk@XXz1dLPV*bq;4dg?|iQJB)` zcG-xU?T}6#PPRD-h>2Tk5)5pP&{pcOa2WDP`$(Hg`?u$b%s8yIj}v(~s&2F|-a;Pu zO-}7;-&Po|isWCryFmDE6crZHXLg%MM|YtACBV;}4Zv>RnZi*^S*8^W(LSD__Ij~>V^TY9PO6NDj&P1|b@kaw)Ie+vZeBGbf8 z_NiM#+P3u^`Y#ip+@iv=eY<&#$`aNzDoC@kGjTWr!H23%7H0I>e%3~x=fxs~Zshe- z7cJDjQbbrOh_S(0D-dc&3Tpy0X16G`3yDC!h_>J1v1O}HY+7k29OBRM)D(n>1nCkM zm9_>;N&>-K22ziADNHf%%-nFgX^B1T5YbXSi99L@4`s!eHy$q#b_GbHmE@$M0=3c8 zW4c>A=@%XD--ztdHv7;alW9o*q4tqr=zF$id3HfEIQ$wV{F!4tM1ajqZf5X2Ba~Q| zC4#rxW{C(#W4XPtdI2AkEsNq?cH3rAXbv?FLdNa4pGdHFLwp@iMWRrr`kv=bsw zC#K7hO}?WmD6oh(jF=4&raVq@bv$U?C8*r9*+t2{z<-GV2PJ5_>N>2#GDQg+HNIA| z$b)lYQs#$%15^~;~RIOCy6XnVE;rTZA(N997Tp%>V@GbUb|V+ zCyfsA4!C!IiT&CkyT6rK-#91;d(rBN5l3uwh(eA0zRD}?UqWF72C*rvu+OAG2;4_k z#M}>ag}&X+6P+p3xq6~~(ZeYS=G~;QxREioc>$6vrB_cIp{Q?v;j8S6kk6v8C2b^A zD9vmGci2xnC)#7WI?ZwT+9YYjTah4)AFG^!;of5y{fsEo)VXK;$rU?oz7m9P8KjTy z5@GN59js+CdMfe0UwJ{1R2jZ&x7$miFoQ59JlkFqP`hwEbxdJ8gFW+n`tND`K!mWt z7|iG*fN39j8ZN-3E#6}1bR!LH4Mh-uPwDxBK4;}|Nqy{Z}+$3ZMC-jwsttG$R2hw1!W0n zwXLlea!D>3H)dgItw~s6iOAk7KtKo~tgzb#Y^^N@tqQf)8!-V2_WS*IX#2I=-}80O zJKX;NJ~W=!`|S5QpL5T654z3qC_gv`I_%l8Iwd;-M;x|Qt(2G17C}m5uajfXIv74| zfrSe~}>IB~%f2VGZ`IoSk9n~Ur0bG>6ZR1{2*5rsZ1y7kNK29MIU%@&^V zq8faiwoly^1xw^C2ZhXU2}YKL+kYD>Li|nU_gI=@cCScum~#xrwEMk)j@m~rEZ%Qv zhib)|MYd8f_TcVRa8SB0o8VrA+gb!;g=4vc#mTM+l&&*inO*eM8ZVZVKMTh16+fcr z1ma4x&*cKxZx&S8Xc3qjL3-C!XyuJ-jOgS==A+Z5c0wc#9kS;|HeQH%&yz>W<_N@g zm9lq9*jy19dnCqmYT=%e8T1P$2Co@h4>ZWa?4L!9eJ)Jp=%wN>P6^vul!Bz1 zds+la8`^)Eol-dVIEc*bwvYFNLetJn4>~X71xY*}hk8>Krr6dEt?TU(f!OAOY1zqG zB|`iib&y{-OXSbJ=pX`Y2Y6?VVqPLgSbZ{GQu?2$FiD zY!uxf;3IaZCUbC6<{+nY5yQu#wWq76FG#|t?gAXD!*9l>c$FIZc&j4xMIR2o*zB|I zc9E#}(wuddns(R&4o~k785RlBjC@@yuIDtw4}r z;OV7NJ3V_y1nTV9qr3e?VcLwmevv)wpa?xpdsHBf=fp&XV~LZOIre=)8h3Hs z9?MhM4>c0e#;zIL$0ASBQwBX0j-Q3DHL*}50;M61D6@?%*?26NmQS`{cxH)+z3yq* zaUb8sRnIrc8rhtXKjj|6Fd(?Bd@Qr?p!E|a4j{6t(%PMF6Gfv!y4sDcUbSZeN@X1# zk+sNO~I=Z`gn*mZ?unni&%XPdUv~0Ks%9e2cb*waEYq>h%YND4)8Eoms?`Dh~l4) z?1ggAu+w^q$RLbJ6qQfpU|3$F$KqQ>)wezEOm&$2Lqv$&HX zi&%J>wn1x!VY-f9PkKrOir8fQN+=HB@MN()El8`mG+tmG0n|`fYt)#K^Iqb0|t*BXa6~X6cmbP?o|7_ zJ({EoJSl3b7+&j4x@S$``Wo9QO6=RHiL3(w)0x7St#szJ7gWlKe{kUCl&u@N;r)m( z9}H9P-;7jaM0l=!R#85_vSi1au*15EsMR$-$TZtGM9`-iEM|=niVZ0!=e~Jb7Pr>I zl-0sfcSol~Hrz<#^FWSF0srxqV0*nhE}ht_v}E(Miz|#cE`Vi7d?xHEC14a8Ho2{M zNsy)Yhu!U6g9rwO?g*EM*k5#t`nqXLX47W-gebANv6C^TYFB2Hu&3QG^F5;QdntjB z=e9n#H~fDer;kr5BFw|4V8)QgniY1Hh;V}mh{#A+p~#JidhzcjOm$MD4L5kVIe`}4 z%}-gWjTAvY!a6uhp=qT}@HA(plS%gi=zdVM69L-ai=NvqHAH zxsyF+UluVg4wvexE*_X>*NH+A)YYR$7whVaGHZR2o91nY^3NEfp8)E z;D~DpYj)^LMfVT~Y`Q0b&w5F<`NFj4hVON~&lU*KRLmm7w#7p9M@n%Cc_B+ERkDe?ZPw*e=k^bwJ#0@!os}Nc>Ait40brA zuMeh|WNyYricS_C!#6irxuDl5qfgEDkW=){ZK{%QnpL9EuY3%**jI+d4DyhsqhpN? z6NM7tKQ3wPt}HCW^wHgdo!^39M2futl8?vU4@FKDg+8JJz{BI?+4!Lh8vBe!QQ{QG zDo2Gi2{S_4cJrTnUhcQ!PHd~<5)Q%BM|BnVTM=nm$CAeL)obj$LxjyXpvr$yka|(M z!3yTzJd$>`?5nWXJF`~L>6o_CuDU&zL~L)w5TcVG-y?P#nmy#{_A2@Ag6jfpdYpI$x4l z6XhGC#KGnI&OUrcEK>q!;R;l$DW4FLG^gaYk9an)-JR@rnfcx00I ziFpDpKoX1`a4`cD*kLSE4D1hn3h?dq^b9=3FBUPDRv`@z?81lw^SJyKT;~bGDRWh3 z{|ehKN^B5!>SgwVFe9j$&&o2#Ue6{tU5|v}J4cWJcI9)an`<8zf+slgSz*HjSlJV$ zGI1ZI%x)Jc7LV+8Z~K9#KT*@N+wK?eDa7p(@+X{daz%{8F>s`%gea21VJ{;0Dz--? z->3$sli|AaRZ*!Zzk+KL%x_;Y63h=n{|WXf1>@+1^WQWrsLZ5mMAtEE+tMI!jZ?8k zoQh?()mb-1#b%F`sk}bIah{G$ChZ%7%&&9X7uoF!dvmxoix&qrT2usC!euP>Jj1=* zESoHH93T{33y?dk@}j!%8ZlF0tvrY^^9eLlR92eMEma*)NlV$w<&+X z#(RoOHQS|KIMrcrtbNpK1>yAAyp%O5Ojk~CXBs{w5c}RXl^>u*_L3-Z%m(ZI?O&ce zt@p)GkB;SBSlMK^dRRIy{S}Tq%uh|U`voaAZo>5BKF1Se_N@~;Hw*etGm^^j7s6UZ zg==FgWxoC;q1cdAA%2QKc4x#%?5;}3_VdEb4dyh)29ddz-@=65B693e(7>#*5u(uf zo;|wRc!lBI>FwM)V#b0?$|>0llFntWJSK`b)W{u1okw#=9{yc#$2__eV;uIJ03Z4b zOZ0}zb5WZUu)|)KryI8`#*iX5NLOF{l)hgS8o{}k-A$66tbHn*=ys_5o_<*rm{HJD zmz!%ng!~)dHj8fy1qbUbV&qV%i~tSrLS8+J)$Q7|)s2@an#lWCWcKZ};Yvf_Cd$^p zNP#$Pug!hb#tFix%%K_fLxt%V2Eq{t<%FLnf^WVHk`Hq6Rbo}5>C=~&-YwV@&ERNT zH!+R2SW$7s8HmwKZU7>Q+IY4<2$#<)1uZE|Cvm7d7C>B%A1)vf!NZH~h9f;m!Ju7aNw0Y0`L*xf$A zvAtp0xg!hg*Pf$&>H+Ien1=95oT*=He-nholGD~t)ZaZC!SbCa_t@JWP5G61+yve@ zj^vv7!JQKho@|IHG_+sOzLro}48Xt%mwDA5)v*c-FbcD{Mu%*f<}JZ9)E0YI^wrl&5!rMEH2VUYDg9j zz+|R<&vRn~3E{SUAv)22%s4Aim`-%8n$tdij+F|=2BLf2vy092w8J?e{K?a1w$w;& z^fPypmiA#-5NF&DV8(2_Re%ZxkL_>66_%uzWPrE_VB^>nmhcgZphs9?;)^K};+N^W zNt>;p_^LhrBevXO@Gm&SRtQq|=sWJPbsojrN0QxSvqOe&5d^ zK^ps4vzWG_nExH(W+?QW zQfn>8r%Z{JibAJo+vxry?Qj;h%uL(Q1u2P(3VYV0&Q>owTQk)%g7HJmNy!^6$GyLb z2IpC$7Fd^i0KUy~WwxDINVMLf$NtD@C5j4P6D4whd+{9mrZ9s&k!_G1!C8R_l&e=` z+b=+0F|Elu{F`jv(>qv4Ph~N7SCaOMuyb!w5F7b>HU&EF1@?}6V{P2A)`!cED@1@L zL9cYJeZb+R1A8tmTos{MjZ8?wKB!0-)V_MQ@q5`hN71U`IrU19?8h}gFE52tF^@Uc zSI|#{5=4_KRG>vRJa^=|O*TSl==JGM2r}I(z^{4R)HUHHbw&7<*bx3Mx1?gp%GIwN z9a@oa9MvIQQ3n{A5KNlPvmYx#T!Gs4rDXSwoPJu8czI7L=8&;#z*Y9XJJ}ZXc(Rmra|#C%U%_X4H_AZFZ@Lm zI-7@tvi&ogJGW(bfxYXyF%lChDZ561*}*fpe!&v<1CMPvj1kfz0S(@IK9#jrD#-Zs z!>ri`g?%>=^*trP^|+|9DUwcR_i;`Xn$ofrXIrlc#6e-M?zT^U4@~VPL8*qPpld`R zHFAp0_E`bnUdq%^6d%7<)HoV)>3XBX3(J~18u!>h!C1o!bfWVf`7NmU_ zYw9m8S#W9NVOuOp{IFPv+}a#3z$>x^jn)OhW?}l?v3I(?;o;1I8hcaWIQHCxl%#@3 zq>p?bVr*kBl3L#sjE%}oaT)zxVe%kzo@v0jMYsX~k>?p*60VAV>`)Q|)$?tMpk{2% z{Gj^DIec0;=JI!+E%(HU!bHoiw5=6n_6$$y>U)n+tX%?y_LMLJn#?9WZ2Ln?U3h5W zFT}m}ycbBCmfaj>9Wn&k1^h`6E@n2b$DgyL4Z7S1g$_%Snk&Hj1gUaLVp7Wv_P#rW z;}1@0N?WH3o}ThAt>0t+`2*thOL}N(=HT+o!RpMxCIa>`(P32E)a5}Qg5BIYyHZ5z zIx~~|)SY&XAOq8_UthaUVWv+R4!5nF0F?$=F3F1x7J*hVPKjbRtD`(;j~+d3oWgV| z2pfePjzFO-r0G(Y@M!T2Lz26ytymOVfyCD?G+oO)LD)S%s@?N+QE1o2HLGlKXZS#^ ztyGw865muEUsXM4_WdHoImFFWc-3ELFNsJ6ZL>DEA?*2zL$W-IWuLPSF-}y*sqE?> z#>!+T=30fdb_CnC-w>kHGUd*$!oK4vGfhiuvcgnBuP|wB4|-I`@;Np|VVD;L+IDhj zUG0%Qc?X=KFs$JhbgOL?@WoeOsVphgi{{G!GUE*R&$EMmoRglQ3v|YjsIggxnT;X_q!BQ8tMQn9&{BG||5EDO!vH6u8l0GN`)eI_;%g|2< z(ad0nrp7+yk?De#^`&;LkVccexpYl$7KRbU=m*+2Jd)vTUBV~~7N(@&66i<{aLi0O~T5P!R_=%{IfvubL zGi>#u#41atBugriQ@94+?q$jR)9i)TD5%F_RF$^O3wE0RO^j)`Cw}$Nq4wG9ZNDI_ zLT0`&Wd}T~zDl(3cg_g+D7@nYZJyJLWi&2SEwb|p!>0;4$m=ma7NT_)=^!CEfFrfy zDJ@$|ZIuY*E5|y(uM~#gw;$0GZ*!jTiW;G5L4 z&u$Whu}CAl&O_48f^iNENf#j1af`=usle9RCS~LGg_lPHjgniupD2<>p*g14ZF^W4 zF0=JM%99sY8?b(s7g&-$K994+e9xl+q~Es5qt8yAEn_;r6ojwkrPxffS3PoE647eB z+h_BRO%^+*EoJ#>`)CdsVeB(IxmD^T5Wfcm`tq%>FbVtM&bq(CFn|UOK|E}bAZ_k^ zdBcrZt=%aiONqq!OUwA#h2!57jg}Q5$(3u9gg8L_0i_fLDG*(?=`8M#397(dtk6w5 z)3Qh(bYgq`u2&B{8qbgRKO)A#L8!4TJ;nav$%33@{8??b&n07l?JJL5SU%lPonI6& z_HZ;hz}8im9%5t0dMQjx>EZJ;thZ;^+&s7&B?9_zxf4ZnZMywL1e#KqTWHf1rfezLV+?Gb zU|gc{@E}S53D)TOY2WRMd{iP&c%E~M_P1}sD6_{8)WxH$-M?>QtdI;N5QZ=098(C$)EDWy;n_?`gtag)dymT0oEa$hP)P{&e--=Hx zZ`m=4o5E=#P!)BLNtSXMirH9OA|jpS7jYdwqVU@D&yLFZP)b$s*J#FE!&&nP0cibvEG763BRh-&PV?mU=7qG!Aa4Wh_e1z!olETNor`d;6u_!SEEuPOhY(I)+$OikIpwAr4V(6u#kEh6NYmjT`Zwbar(C9j1 ziiC9SJ0eqEcW#}&?>WM}Tw_Io@v9oHWpI{9hy8M`ZOTR$rWunh!jT^+b6{-bq4OysWB>*DqCe^Kjf|}>t=i1W(G&zsu zpw%OgW8Y=Jb%20lxxFMn2@*|Uhw-vQX49@SQ|-J%sj(cYv$q^-l_KvIlVS~BapLtJ zg_%r};CR03`Dyk!kJK6Ys~(Bfj=8Ao9kU)zKx1DD#AzEXf`7B?POd+>*6tQ1vb8@3 zb1YYw?xZU!Ev2x!y>CTk|7xogj1w7yVeGsf6(N@P0B*RLvRgcNSFWtLE6k_GogrEi zx(Q?BcT&Xo12N(rJ1^uLEtxuxk{fS{O0hxx8#TOnc11oE8q%?P)A^a(>^*|9&6p4< zvyTch9{9d2vrz)El?Zt820u*%dh}Y&0b8PQ{M>JA*{65$l3ZKi*@yQZGtAZqP!;c! z`<|<_O@g#{04_ptwkYHWeZK-s$rVvF#b%iiOYLNu2g_uMnwt`gJ? zms@NcZSUnO7dZ^O+B0#N!$IVWLXm&?SuL;|gxS%B&wf^>VPiN4Bkx@sqaruQr({^Z zpVCrcX>qy?t*ZHgk)cJXc<`3Nx4uwBTG3XusI7|kXnq(E>M{C+=gl;);V5HI3bK*8 z{(Aenf^dxYGB(@%BB0M)2no(tgo*VTA*$tv4#!V+v!Jhb(Gc)n>*q=6MM-(uzLtf( zZtiX07St@5yWIwPxMRUq8|ukAYBrv)nr_8{Gz5Wkj*8ep$hY#&6tg#7s9qSMW~WwR zt;P1R5=h%~&(X{tZX{)7E7+^6533A+t8K2*M3$G9^Xs@iixEUj*aqSFDYYJaX6LC| z+b>Gw{5Vd7MfS1?aj12B`d7~p#3({L-WCOpb?w=s*Z1v1MVFIzy{VghF$;V2LYB#4 zsmz+B?3)geU0BF{@F+p*4li7~?MR3~(Ix3A92d%k_}Skx@|XnTm}1@7A+;i_I<#C12^@HOivKLH&Mox3 zOyhp55uFD2#Rk|kh2hS`mcx1Y*2%TGBKY9#=YHs@?GYvZ0AgJiWs)g&(o3l1#`!(P zM8~1*hAGJp2~zlAL|v~`nC4({WYR<%=?TLR&ng=$80Q4{11M2+5=Kk8F2xzC#EZJP zt=U#8?9E!iH-#5yQ7Ak&B|(UxJXygnd&{DQ?!aJ3HZdic%avTLt1!3@A_G z;O+&FMck~wUKXG^ypZcCdqI$94?ra5j~>NJwvwIk`$~zT7cGYo=ly^Hon;9dI?!$w zimXo+M%U#dMe-laC_l#=wB0W%wPm(#l*POfF9qA#jMh(tXsDFlai+S<7Kz}uGSiqA z{D|>bH5DnR5EH-NZvA+mJgSs&*!oYLn4s}L^`c=2N)1nW(OtUslPFPNJ0n7D@t|ZW zDpAFDbr~f3$FF6LeN_NHoIloRKTw#yvRg-=tyqYh?A00&7C*q|rPLEotjw_o6^{Kz zM82qH3(v-=28v3*VOJXM=W0QkU#KT_KFddisevz>DX~U~x~HJ`a!O%3#z(|5FA2;S zL=l$`ojf3!8awStD4P?^2DF3!R}{^}R01K_kC(?HGW+%fsp5k+SQL87U1`d0^Jub< zE$2v6WOsOU0p|E^R5rTo$>jwsJd3lG{FXX?M(t;!h*@laS`?;v!PbW?ru{(#D#M_1 z($0G<`@ZQM77p9HDq^e@n(Vy-@pI+pm8(hipFKp2vv*KQIW9kL6#!QGn-Wb9Bho>!R*~1;`2PemR_|&S|V186E z_V%_Q;KwDmc@pXTaLGANkh1!s!v8&mwa(rtNaXOKbM{%G$n?B)3G?>`CrFG0qw{<1 z>mo#^@(xvEgR(f8!wuOmVaj7x-F27c3ek@-Sk+prFck;Cx>IeLAT8#=UuG*DpkGsN zdp*-R49fpr;aJ^JwitFvIP&h~9$EW1*<{yNLW>N#tlW>4TTy*Z)iS4KPVbwdsR@#T zUuM;UG~xN`=N^xMI2zs$gEZ4Uu2AoMO9WyozAM$JjlwJit(de_!TfG*^7Oxx>43ir zXji=EvEZL%Roj}Q)_n>@DhNidk6BNL)OQ=AL$?V=zKt4SLxp06_*cxg`-CYl_%_<@ z?v1AYTO!#!T#?14#h3!w)S0P?_wuE|nRB&m7m*Q3=OwK{VOqqoXBrd!r-BTA*is?I zXRV$#g*A>B6sCO~`>_Y~now+CQ6)#&8-El_YFo3iZS5SpNias^u>!puVSdJgvp{_J zVJmqaf33i?!Gm%A$5@qM95Qsr59uY*R%DC1TbGq*gyRfFs+6bEm%WTk)3!|0Q9CEd zn0D*VbJ6Rb8O`4!eYEfRF_H13m6#&m;#H!=D#PW;^wWFndeJC?O>d=j6Ns)U`FQnH zB#DV|R*Z4FmRc5%g+XGh89mvd%EWN-v$f>13#qXp!fmu>izX#)oFD@+5KqX33j5D! zva}e(HCE)&75tEwDy-3$ZpguK)SYd!M0CN+rf9GyOvOXCLNSp=Bk{vkU@N_>mVHxf zRcBck;tAqJa#g%mL5Pl%#$uknQfK)D z-kYt95S)q^Bb?lOdo-#~T#DV{i=>}CS60HaWGAJ0cBUBvpBpS+ z6x!++;6eB*5;cT4Jh3XJfN3)G&PLy+3KNN0y8Tuwz_#|}8k?_RoP`4s#!qosvcR*# z&`O@?Y^|_nqE41ITr|nzMZJiWhsBUGJ177LSesF)v%`Y)IXrGjIN_uSGzOD;7;7%I zR!`r$PalgtBM39wW>2>^h2b8C_1S%#6A~|QPc9|;Ci|yn4Ytva*!v!i$wwzJI&A}l zD5zhz9yUZ_wc3qOt(D)t+eM_+IO0s%{Q|MoN6=DEdFBgC57`3>N3J|j&fXnCv1*T= z#E8#OSY^AJu(jA^u!SPha%_F&m)bfZYQhX1-;)3gK_eIo_dJXT0tK+H;88>O+dKHmna{6?7C+Nq4|BQwk0_K&5y8fjJb>EYM1(6B zmaek56=vl7ea)_@B1of1o1ZLgsk1MM5Wllv?zWF`Y)?ydf!!;pPVd=<_nge0Cfy7r zvuQ?*8WbUmBW1E>Dds5b9!(oP9qWJ`ZapQ~d9#CLdR-8=2zP2BDt2mY8iR#p_L36E zcHwafA(?8hSj(y3z9bNPF*-4Yy~#)sXmzFu75iHIzMw`WS;l^Si%=~5(&E}n>!#V` zo}7Ehy(bUZZciY4!odKgo%94~0Hu&HIwMHs{gDK-*972f&z^nlO@(7CyY{*j4Ns3d zKVy}Bs3vAgO2+FG0XsGB#8XQxR}ru$Sl!+$ch!qTp^?{K`!#FwSUf=lQSOxO7bSiem}%Gj z?*uz3nwoz^#<%UX7tj@9_TMW^S0)C#WTn>ONtv-ddv?7HlMj@qc>aIGV=qjfhfg{C zmlrf@r{gz8iXSpRF$I)Jr-2da*)eqfjHl;@6qc9yCsW?;TgK7=J^tN>YlWn&E^C4ZD9A9Kl3TXwAHUrNs{H?LqM5MO1S)1h0<Q0Es+n5!6iCRv$;ue&LDpIm^7MFm2?FT}AmX8TiU01wobXPdC_~b?|gudZPv8Fqnn~(m2%%zfay0Fy7 z3elLe)l1J-&%xrm6^cSpJ-eZc>S@{gaK$l4h&qEQo;miYXQc^Tp-9ent0=K%tQMth zm#{{kUT6WeeO^La)pGfDIN<5gCCN>32sC~aIBw< zXeJ!C7Lll*-=0V9)ol79(jmSmOy6|;W@Z1o!yNrymzd3oS@~^FT2BGV1L?@Z!%o!w zMG1JheN9ocJ$U}(!EmTBJYhzR8)x?lG5Vm1SP4v6p$IIU@qH-Myj}s6_=`lRG3^_h z&eqJf2L%0d3wt^)ALffj$zu`tLu11;FsF2KSt&?GGUuQxKU?htdWR1zzLqCFJIa>G zUR)G{Gg#@avCqr}MEUn-!fB|%pd z7K0h3bt39lc@=9))W!-Hi0z0XVr@f`+$ulex#O)Q`~HJZ&$X9DgdgX(Olz-w%sw;^ zNcSYJ9xk}95`p++5gsGUY=j`Q>B7dH_8o=8+Ojyf8fmtqDEdl@*nrz)p;&)z38u_u z2*=(@G8<3ewn-#ahouy@*)wc+HcyEz0-niYWD$2;#xpR^qcA^JWbb~25@_Gqs(ELt zYV1E9p4+~?edS8KQji9XM*x3wj`0AnQ_Gi!By0UedeSAI89qXas4+{oiqU6$^$A1Nx%TU1-px%$qgiAF8qFkfv8 z90uRpYi*$*EgH`2bD2FWq?xquP*C99fWKNh?K!j`hw!?Bd)u>u^!3uN>Gltg zYMZsYZBDIy;!%%UQ)>ehj-xIuVH+;Y5VK{R!v+B-pCZ85@CLn5>UODY7FGc)UpXF- zlrDL$%@C&M3IyF&D-2J9ssVP5vD>z&ZR)1BsT=Iqo~&bOy*;lmwbFJr>?|{Hcw}EJ zpL}isk?4lk`< zc4_%y+u|^2ww2lAf^ioY#LbTc&F=%&lBH)Fw*_mw>I+Z$S!($bS4*R z(N8@_JnWco{Biq=!W<>;9An)TjPsRcnC*oP7bOk_tV>u*n11U$+8z*~6zPDMDolS7 z0GC7i8QFX%SKwObQIDqn?wEI3mW6jQI;$N93%=8BjUbh9Ha-3!SH55uHk2pDz-f04acbb8Sb+e;r*m|hgN zY}U=uR~+)-vJQi#Hwx0OTe|kR#qJh>#b_(m1&zg|2wFtfur=dQ5cUwwtoLP#pn{2P zR4cp){1zo_ssLY54lhL7MTcj-nIcjy7mO<8kgXJfHo>|S3LD!z)8&J{oEaYzQ9o66 z2cpZ$;J{&yb%;*YB`rtnf4v;8-!j|Q+S`JREIY!SQnrMB7J(UiT{t%!m6iNEMPXXw zlY|Xb{&<;EzFRaJ(^j=Y&cN;w6i=(x2Pa|ptdw+xJ>W- zW*YeLFrK)`){Cz7F6@=-h2qzTD(e&}td=VhG7-ZjRa5TxbIyruS$ zC4ktj0oovbT$sp_!|oVtHwb7U#=D8cwHq^AX4;J+`s&`Y569vAifp84@$(MCOigx| zD0F~zBC7IKh`=Dwh!d-9i4aS}jo+}19)X}ehr>D{IN293Xh#%|Z5x5(9y=xskAgB^ zo&C(CIe4V7Q*P&kY5J4|`^^6nhz#N!qs@b%JK#uk;JM0!ApHus0NXw?1$CslqQf5PiXlMJ@6&-sH*&TxZS@SL6Ge?BjOgA2} zi_i9lo=b1r41A44woo-}t5yiY_DOOK(;yJ1Uw^(NJ0c8|ZbriWwCBTmnTG2_D~O^k z6VOFMfa0Tqq`YfffBTfebSl9)z0!IKX`!D|tGlBlrnOp*tdJO%S2yn3v-aS9Ck}&JVT~q9H z3dWr}qKX_>Q+A_BvBSkl-Ei2qMbU(=I%Fdirp?fKj2I;Z$M`7?cWmJ}p5w_c?5MHF z6sDDDt7o?#Txx3^1{t_|`=y|`e;A3&UwgLk*pX~kIJRLd+ArL~y(LPVk(f`$FT_o& zAkdJU@)PTmHb{ti_+T#8F1HFn@*`Ine!Q02Vo}JiOa9Xhcs^O>IXX7&IJbDNZS;70 zp!@zts}q5)o!%Dg9_|gAPJ5k5HDi3iOInMU#L}oowPzg$6I!MAuGO((-oddO#WC_f zM4;ec#SfJm+474-SlkaIIiFX8I8C^bPS`+UxO8e3uQ7uKNZ7S!KO5Q^v8HvO{XhT? z@I+Y39sUD?aG+z;k#Gq!%}Pb_GX_dH5;%;$JsWJg7sUEqX>%QLY|FI<2izAvYCjdA z3VHfvNBY$@K-%@(m@)Ps?8Q%IeD+2P$=eQWL?O}h79lEcV+CTx6BCsjEx+%XWvWG< zGMg>eDIKeKy?Su_g{enuvM5wIUN%hFXDtzq6RLk9?#Dd?1CKs-^9$?jzeUk7?%S1p z0sizlh%_plTaJfc0g5=c{NVZZ^%2sAF*sIE^6r zkdv64wnu~*ckENxV+zN%4L}m1K``mksRa5_1x z4dT_JQ2M!j3+);YKlISI?Mn*BntJx@5@8&LOw)nP?kel*MZuTWdL2itr)SC3VS68Q zBwpIUf&J|v4_{hxD26Zp?&%(nZH3MMx8nGkIN3ZViq?2ZOkiAoUie#WEuzEe*43?x z?3sVdhOGi*=bjS<=1s(+QNmsnVytK-8#%=JcDoIAv|TSkWK-Mhb#1k)?G{n=)pqG) zJ+iUkNK$Wii=z7WB8Y+C>YX7_V-rQE;HRgCpPECK7J+tBwM>Q93i?NafdZQ&9P15t zIx}ppD6rvXtX1^27d(60)OuD$ds#59{bLbtMDW$xvL*bnx$)xeX8WTk@R_HQl)d2~ zkt!;*FKr}-#ze_)>nTVV^RW5T*Q1IPl7_Znf>APZa8Vgftn)E;oEyn5E`cpf9c1-W z#nO?Z4DrLrN2{ySj)_1&7{Fn7+N+MnbDNWAmxQSJ+J|nk&(;y9wOA2L+vfygDaGaG zIpub}2=FHumEVesI_n_{dHdltae%^_GUY}5ynI)P-i>Fk3}Y60Bn=of)RqgylCet4 zlk+wwFp2epJKdT@pdq)(M)_L;)Y2Vadmr8uvv%ngJpT2z>qVd<7$u2>n+4;T>h{ih zc(it1$OiOs$i7xA?CYsnwAiL4?d!t-cEus*Re?so1{ScPv2k7iy3os+>J@@SVu*uX z?=OXDH7ojdd(~6Y<#buObvVq_t&;WT|JWRpqHtJX9~FQf;lt3)?l!Mn+e0&$LpJ0rZNvAdmq zX12YenE3S|XD{2ic1a{!5)5);k$Z#vLlm0I$sDmldsC2!aBgL-y`?bKa9+=`>$k*o z!OOukyG1Z|Nw#yiYVPI<+UC@^&6#KSIix=&<+yuvh)uy)kF2r+L8|H5t(O%mtga%> zib-N2h5^@>!EDJ)Vf~0n1HttW>=6;;?7o9bIYdrGpcj0dM_Z5CU%e12ADftn7f(+g zr5rE{^|q%EHqz$VHTAJl(&GcJk9|QnuEw&;e`2j{{(VtIO(98hHSz`BAOif6dWiMN z7J-^)zV#E149e^~f)JM7B?`lFdK02LGW$BiDx5@t;dYG+|AiuH0OlOFB??nPewsU; z7d;|19>EO{kL4HnimfDyKT~1+xgO@$aI~m=W=K(>8pD}|JMbTh29r+iIC=Q#stBp+ zmBkA!Us2RlkVHMIOn`{4OtmQ=>*UfK#%=Cb8aAbM1uZJ-21E zJ*P1JV2j45Zf68(!~}Fl%j{1=6ogFd0sF`{0JTiQV_^Yafc{HZ9l!)Wy3}*5y9jC$ zCbfGgOtBnRmSg+i+n(0nc#3SS2=Q~`C;Ry0Xq%7o>|L;;d$*^j_c&2&!R4R`l*dJf zG_4;Hq+y)Hc3Y*#wbgQGy~AdCUS`38@LD|A5pK5&kve%%v^Yt^PZ%N{e6oM=f--gMIi1=of^bK6)L0FQ z>F&MhpD^L=-V3xCfefuX~s- z+kb42Y5I2WYoAn@Vkzf%W1-z77#m50Do`R!S{Kpc7a!KfMY7G-PsBJ$$L8Y={u{#d zM34G<3};>r6G;=OS05XtFxB7-ykpf`yGKxr*~8CGy#OOvtYyuP2~k@(`RcMtf;Fc+ zdoYpGXfJs72juSH6#-gOUcf5NskXx-3(^SPUl3s2I0f$F=V!OQ?WyT+u5LX4mBnZe z723OY#Ok~J+duXm5$VbJ@e}M?g=u=eq$auuL@ta^viOV>78kLZSP{fU%8nZbG>n*^1 z{7#+?SCC3OnvU6no(n#HCz;|p*(yb$8BmZ`TPQ@0h~zceS_d+nZd*KEN%=&+qca|b zbYz*mZ)eO@o>w-<+tq4v4O(=+3S~aw|*aOgs3p1ZE8cfdceP&mvKw?TdY7~ac)09 zBnPZq(9h&jmwhalE}SjNpSg;PA9fCo)^t3a?**M(d(4(840m!8{B$fAh?A)G$g^AR zR~}iB;x+GCkCdY*#9!M<71&0WMN%rrL4%?d>+GlUIi-Pn&Jtb||HpcLU zlH5as`IZzGz?5yrM*IiSD}cquInWp+~G7>9eEls)TN z&n@0|ZtpZZ>rjg-v+lnIN^HCwHE~sFkStJlA=s*d0*g6s59X}wA z-1625(!V?m6>U=356qaH(PE8#?|M-qPZ$bLAJ*VSu>K8`#E zzb}go)2p3LO@sYh;n=QdNr(Ra+EdF8 z6`I6CR0Kc9p(!0;EP8yJS=Hvzr+4aY**XMcG}2|e>`jmU-Byf7O}DoN=@il@)9qt> z0C5}=lge4AKP>{WeD?F~IssbQv-b@)NMYJAHX39sbch@&CLHY{LEWQBXr?I`WD9aL zHD1E4J$uH3(|fEk=b|25k?B0E5erYteH=(ta)qJ>jIEqciu@UP4FR zs*bgX?6-oh7}@F913SqaHoEO4M$OSR67OJ9t$vG*5IuggIL^rIoNYx;X`8w|h#lZF zSd_>T9=tH(ST6$O3inGNusuR?EbhRR=Tm|{se2MSy9m-M?y0$`9wZc- zF*=PhqSRl9XER)!Ki7UBNUQSjEjU48v6?MBZ^x{M<)ZkavTwSsRXN#JcnPl_J=R{k z)gJN^E-hbV8$5gt<$;|F)09M+e&m`Q<&;PqGa%e@c3#}+0$F!x+Gw(U+^58m**Fg8 zM&OVLFhQG3egO2`@}fww<1MRTlf!T=*=(-}irEJn?7tO;*+{(R*&70pCGqCqbc0>J zKNiLHD3_#mv!H(|YnjEvgkx*CXlIK!T7<}zaC3)H#7eu<3t>Lwl~1yJJ(F}d*{4pm z$s*9AUR{y5^vIC50;de~M4-w0PjNf(9VoJVSkCn@`jDA_rSDXOFPx@hzi zedMeN@*5&jLl732YyT9C>>4S#vA2X_myQgQD7P;(L!hc)ld4KC7j6{AtKvsPav#=5 zH2TmN-(vSEO!x5(f%xzJLVUJuRn@@)#cp>8JI(2EA#zL5i0* z+!@TPCR?hQ*wld%6kg?c3~%fbC0HX&8!l~_YtJj}Q{NRXw%UgeTrL_7h+XoB71Xg0 z*yj|Z5#xos)V}HnF-3~)ArvPN?#8)W8J;B|rdc$_5+X!~$W$xBnWdI5nwEw7`b_=N zOnrl;o$}h^WtWy!;j`AtI@8F}crbcYAqrjSfmZAkg?%$CDJT4uNv(&!R;?&FffrO% z?Rmivj=g0`%IB4lhsOp_1cSSE-!jwI1>|pzi!s>4YB0u(mqrv0TT-ClLqye8= z7go;(%ZQ@y7t_;7qs`^F?0p9zF^kv^XfyCZhd~Kytz9ih@v<>jV#|c6I0sRKbqdEZ z#8hC?8a*ZEW;uU7Ata_r?TY8;X8VOGah;!#sI-GCGY9wD1czZ*)?J452S+%TtJv7Svb63enY)Wbi!3O`If1Pg zrdGb~V|-HwSvWRjlcdVWry>p}i4jpkPzh-<+`wW|N4AM=i&E1fUCEHSNzj)9S$)=0 za5P)ssoH0kN6hf|L|)*8;@}7ON=Gc?2}>v7O^5x{o5D1X{tUzmq2#PTc?1$HffU}v zkJ)ELi4)>G{2pE>sEM54a-`6@3Q<+t+IqWHVf}6-xyZjq02u>(N9Lh5GXm-GXw~n2 zMKH^`*ad}s=IuGT|Kwo|Gkp0d zw8)te!Bvo5FA5zlXj#v@hd}4R^WhBl$Wv?3sv6~>M26TZ1$=ocL3-967J-rx6w2k0 z#iy}F6xvdZ;KZ*yoO!&#&Ui|V&IW!6j#!&0aIdu~JSzXULlkW?yK3wof_}nB&hMF( zY#FNTeaE17HXt!M$8HnBCzYNCn_2slqA|{ri9!HUhz4c0Z?xSW!}B_~7cKvm7a!i5 zl6Krvw;ZjsClro+;b|JTmLAu$Z%=ztVV`V=PtBug9iq@39u~3fXMYuh!zBrx!@ly< z2v~VGSPucloV~eJ&?F(!Q>54nWEFVRV6_XhSOW(1$`SJI?WLf)Q)X2Hv2l3r$g$N~ zd~(6bgSOFe%faLG671`bdge(fM6jO_fJ2jz3e2@%3(;~6tw;vw51x~ex#WA{c+g%P?|*jxUG&*E`eHH)iS=+dl+plYGb8n*x;8K4*jd(<880 zRAgUiiK(91#jVHn0-;%WCdH#yqunY>?CBK5_9ojv5i}O{hi$yVbSkq=$_bY1QT_Np z9#fdgTUWHMx5ouO_kmC8+LUl)PSJ^Ye%_ zFTafqRv+_|8=iPByXr zd_gn{g~ub2Ou0@lj$3h}g60kN_@FvF&xQ-ahi`FJZX+DDu7Q8W0<>C#mEBY~T9xSZ z?`-vgvo%f)PGBFjM15ye)2O^6Z zhX_C;<7EF7qxQF>m9!ioN@4rH z!zK#o7kG1hkOIc#Jf=fMjJ`p`5w+9k8NX2BuB*u&@iLISD7D7~sH$I|epaV2t&q44 zmJhZF!k=*c7!E%>odPGvL>;p~Whr`NN66-HqQs#bEK90-&wNWXT8-Lvj(z+Wu~_D0 zkn&C1HKGuGb5E3vZxM*2bsM&D3JQ4dPE0Jb?}*OWoSGHiV(iDBi(lWPRxJRRaACkT z%Of6@NIjUadVx5N|JYP@Y3eNdKT+Z^%kC*F=Px19fR-H?YWSjnJ~tj@%B`1BY!2s* z+(~%Z;hh;9$f9e+cwc3X6?h3clUT8U#swjxrkYGs9UkDVM#Q)rj=<-t&IZ#&g7?Q8 z@32~hxk!z6Gm*kQ#QCho=87DfIsohDN!uub>XjB~vuziQo#QDmnX0g6&%uxO@^}j+ zLY!;7Zel^p6X-JwPlYdg86B&tc=SIvtJYrkB*kerT>Z8PFz5j()_mZ}%LSe}ih=ek zg^8r=2*gD_yp`?32(#sjNDrRdgoDqGR^(|qJ@+i*a?2)rnzq_yZF4u; z!-Dj3Vk%iOp1nQBa76K&vT8OuyH3saE&Xc6!~qM=l^VjavBB!R4Cd~o8FT;GxXPXv zCGz&Wd1*TK(O<=^L9Zd+z00DqeOz>^5889Vbiq}kXxZe&_A?5zy$+&^`U&7W6L0Nv zieMsf5ae#*3touoXW?JG+y;0dJ@NE6N?~|dk--1P-2$;;gwq5-jX!Pj(p5~5a3+);~n%Q<}v*mbH z|3YrUi)@NR%#^V3+H^s6Vsmp?hj;41W2T5+!`g85cf{6;Mw=v3^5R@>t9N@rysb&L z;weF@yzYjZ>~9MH*9Wd*zaepGez=%eGHj(Yfqo?3dy)i^G2FwH+lPe7JaE7u`%i`G z3O7S|oU~DbjAR#CO}gLXn23{zo8eG4o{l z4ZhpP2~i&ojb)jMZ_9ox5>>#H;EF~!ueLzM$m%>U!+3_DU{8q#2ZC?8mDZk3!u>L) z7???a@+9p?Hnbm}Z`ZcQ91Kn^r-17O;Tcuq&dK8t(%+4n%LfEwM~k>ak|VkzEAuqt zqzy93b~vOf`S?$55Tv*~-7_8%h&(~~BHvoFI8n;y`M;jGb=|3Dt?TR!hqxWmG4CCx zF6VAt-#XRaD@@T?MZgt+K&NDOV-Unz6|~8B+O?hz6MoIsTY%Zp4;lVDJfe`wBY<)3 z$EKgH=1IuY<)E*fRyh9HjKEkI53n0VVT4Ar#Nqrx0-C$Bi86KbWvqHnmQt81DM2vt zkSNpM+3I!LvQ=*hvX}GiJUM9os-1!Pd@>8*s*7g4N@51NG7<9-E@f!79}n~0}`cq=)HInW`rd58}?X<7tp(R zUz@HlJRx=>cF1Q5@+Gi^s>ruTJd$Ec(EeQ@2z$;}*LPH{K3l!bTQV4g(+*Y4A#9NK>Ojd4ETNdw?Jk@Fd+}K7snqKriR_7s95Vn zWVda&FgdZhsH=lJ1!JcMaF;M4ZTEPB^HtUD&9#;gWbXXCK+XhDa&c$X#r3N!CtC)4 z(?*t$$(|rLIT;aOdr*+3PbgqvnB#Gb`}KGx+%AYI=b$0ED)>S}zf!kJlQxI02Te;Le@B-mzYmlFC z{g~@*cdq{0Rf>z{j0%qmUlxIKxKxw4!1aQ0LDVKb6}78Ggd<}y_fdojRjG9mC4Q@& zHiv>2!-1ZXp0b~imiN#srLA^lTkT%^fhaV!<=_;{@$eK56w?%@X~DD8UJM${5+PQC zL^k?@PtTCrn#~qHemhcN+%(hHdsd_b*Kr=%W_z9V^1$MPRHL2PAF&CB!$6fxqz6u=H%r%(K-qFd1X-KTVW`#%{L{ zDHz*`oFCKc2puNSjhHNaJQsoJzML15omCDe_Lo3 z4tYhBv-F<`(%^F&8tvx_Q!8hD77hNxy^%dFBE2Y(`Ixr_7!20tvPyf;??Ch^QC=oP zv-W9W8i=OAVk|o@wmu@nImEdsxWlF|hwM(#DU+#@Ye@mMuW5&Df6nQI!Qini@T3U9 zPP8ooaTM9w72AGcnuC-Yk_Jx+srt>Fh&2zN7J>c*^|8kAe6i177LmFUDyg;e0HU!79j$QW$q?)U+I^mqlO@->KNO;ymugpCYFcFv3c~lXy1H2@Kq+^P z9A=voj-{lqRbrckBg2qLPp?b zbF;Qu(jf(Q@7mpR1sVCLAK?(UpZD3l_JF65nU?)Xa0tR;d=sT@iO03gLZ57_Z4!)) z305v=*sn#=$Ab~FGVAcndaNJcfEW$b7|uES{8y&ib7L^0j)+e!8j}X zm!vp6C+t>{7znXL7BHA-`Bot!EqrnAi;r2ggVqCRrp*g~zAoSV1=Owvv{^MK;$7 z>^xAaVEtYu0?eaHV@BIqvSa()KHD!kWj>IeXeSho4318F?O9P6jR|=z zyX}Q+bl#+70ag{?5`}5~V1@lF8`-)@Rx0fiZA8-iE?tBAS|7Vo1m|A@?UL(+V|DCS z8=u)|Lqvg32-swX!c>N~cFpOn_KsJ8I#kH)`Yv5pN)`_*V>9jBilXR2!|t_F3dcd2kSgH- zF-3$pyE1j#<5RiSdXDnSv@P~v7sWyp64S8l(bPI&{oM~p;dmTU>wfD zsiGgZQZZNig;*NFDyeqv=&Xx({y&YN$AG?Rz}4e}%m*3ZHMLsUg#}I2_RzV-w!K z)E>({cVvN_&R-`;rz%?37g=`!Mltx@!8&4iF6bkoBs1{;d#E;Z@JQz1mh9#I*A*Mv zHAK>oqlIZU?Yg*Ro&8Xd;wNYmT9%FL-ld=YND!8d!}i8@Plu7ql zakP_DuH7OSry>$u(s%AE3aMo|)Vg`<@PN~3y#(XL#>r}Wg7x;qmE4%wFpp(l99~px zjK}K3u(<+}VdJE>xk{Ku2C=>)wo#BZI3MYgLfaw~o7K`#RMxV={+!J|LUs?{cP?gs zwR!g0>c{LFLH~V2!(I2o_GQuFbKAkA*2R<4?E(FVTUSA5_wjYs$79%6Nrr7$Hen7O ztQ4fxJOmVRDzd4LDQ6tlBDO{tZk1K)-d%tSM~xY5TNRG1O5nP%#CCeh?{&Sei=7gP z%*f*{&^m{?sau1opjp=K0;!lLWs}o5a=O(KvLrk#4-txGUaHxCY4x5+p|wHk^iW$B zZ?^|+w32Co*fcZPcHiv3L=SuR_?Cg^Yg;7><0-3c+Z3itna%TcWAZDH>d~{Oo%X0; z&TF;(Nigmv1`W0M{SgFjC-Ky2pAjHxWQwO5`-)I(ukIH)^z{`ba(66RUS+wmpn0c= z)SN2iN|kH4Y%a`qA7zW15QP!r=3q7hu-~S55~)66^myMvTObNOK~>n+c{p=mfz>HY z*L*d4nU+XpX^G|tfD^AN9>B>qjJ!H~@MX1iZuO)QBT`)SKGSH z)=y!d>q~bq-R+yAFi^>|@q9T0gy0be=mUsg-0tvqxQFkA)O~z4YsP~fcW%*I9jSgK zNXczg%fbh9z^XjK*{L(MCtu(R@DGGmKndz?D?LHm)aA&**akuE4;yCw|92g~QB>@w z-c~t0r|frL9wQ>x$vo)&#mk$-$%>Wp1AmGwIzOxF+@ZrVO8!An=p9)qDvRxUK^W5m zJ@`Hf#~OYYlxBFrx>q!wF!bi3E%E>3>OA19uI~P?ooZ|C`qtW3Yu#2wAWW_GVS`Kw zxUJqKm*kRhLv9ibZ8ZZ(*n7`F2oP2X0kVL$fws2KgE&yI>&B3Pf}^d~`nUT0-=E(( z;re=b0k8A+a!_M0gvsrWeU2@QTG09xv(2vB#-zxu^+yBF$KJw zkaZJwO-SmNWZeasEc~MHOOJ5ajfvn&;4B)bC4Pp2aafdI3w;^6$M(cHpFS(74D5bkz)t%V zFH8?4X#OPVn#=D+Ek8YPdx{hJm5|@3qR>9>)_d)%?*Zt4AiS{MekAAyut)zM{p}`U zrlfc8{&q(qZn(S*_4HUDx#o9>Pm6${4Ns&Ut@rq+eOw=`F6>Ig;WjtV{wB<*a9R^P zYnz=Ffd=;I-@l)o6QF_om`t5I)xPj|APtl|GM&l3DeUHN7*G7y`e+7haERmFjl#6Q zfKBhwL>x~tsrHmGV_q*`+xhv+5#&?MPQeF`6?sI_Ba`j8Px@x@ z>a!cR*q;PhgV|H;UkXwsK6C9q3bU1vM{zCy@v zGUYNAs|RwsR#b)vIV>J?dEM)mNEW<=9SU^i28c@4{V8^w!fqxJ!^8Ktu=`Dt>FrEy zvljM=5P=NDCUShDE_pb_#Wz`@h%}H+yi{q2Jd#J89D7;Fujpt2Ozpq<5-`ZJ6Bglq z?6j2HSN`crKxGH>Cf^WZ6`wr9t^CNzEhi7zw?$&$v-q__p6-W&`o)k*PJZu~dsDGD zi|#fBPEwM1hwtyR(mHGoSb>K;UFbP~wjiT04pS@`3AN?IjL?GxMOLXWZAWW05(pyK z_{0>|Ly}vy!mhgDS#>Yp`aeaZ@$A60eEju@3tjs@B$g}9DYhE~+<^4xe-ExLZ%PpI z3-pQK;*-N$4O?FUuOB1jG#pBB=9Q&^`xWCB_5iF_5BIr_^7w8M0ebPX+wZnag=tXh zb~q-?DZfHFBDqaKCXC_LM}3CAu>C6(rn#-V(0>kqOB$R_n{1trMn-vwt@of{5)xt? zd?7sk+Gd6QE19?kyrf|Z>Y|l?dWhqTbLdIm>>-mn z4VKF7g8qxFt&8t1kJnbm_m)_9(HTa@UIskpp<6jrB);islWmkQ1hs-~uxXwUWc=^u zeKSR2%|2#5EXPNjUANhyJ`WYAz)Si?0*=N@`g4L5-s@f~_c8Ih)m(FJzK0zP=gC0h z!7eywR!7T0YZ7FAO%3Jq1INzV;^R*q!oTt!JMLk;9?^rH_O2i;?bEBLy{~Yvb>fth zoL7DTrsX-U`=;7$0&Z#^WeJ4^-O^_Yuo1y8$#T&s>s<38+u@Vr{Wld-+u#LZSR#aDZ=C*fl3JG{x@qQ4(*S zrZ7eFV12)B6ks?CQm4Vf=gwRsf}39yDW*m;k?K%q_lQn`frYer)+DICUw#QN?^9~O z_X(tf#mV6%LDqwN)C``7-u8Kz(BQrGfc5;hD-_CT)^n1e+bFX$9ijwo&3yK}BEekn z>Ej31!pF9Wf@(x{RiJcTZc$Mff+0gD*s}_|4Le@5Gtm00v*}$W1RtGM3A){85P|~v zRxN`2Hj*WrbV8UaI_EF$s915PVV1q&Q?PyG+&ja9_b1;Vp> z%#QiEvr7+l>{@8A__+Re_U~<{1zgo=C*Z43n5wyLBeu+M%SS#lk2CAB%aio~h<7uB zgQ#Pr;nZn4cE5^-n9N)3aDiziiDYJ_hUo4mL z#qB;lqmx;bV+RB&Fi_pVDCJf=<|!m#%R!RP{ve7QW15u}Mi=f+BGD6mp=wT-Q*XNn zv@%_q9RKpM6p@}6#hus(BDjHIsZO^4=Mw~n!X5C;zxOc-nB$Z{0Qp)WeW-|9-Jmf2 z!&bCjzlR7?&+(Kq<6nBzjtheA+iR z&amSOtHkA$_^JR69g%xnk!Z6{VUr57EKS(+v}^YV0?m8kwabD)mCf_1a|&cKJI|I02RlpzhSw$`eqDOs zYqdUtMT>;kZ-m?;8pVrwn;>n%uuIe~`xM~D;ljpc7aCXE4@F@ScgOC*^*)wYX^k;5 z3wMd)1~~^cVSCc2z=dZ979CLM3i%Ygki*^yS&pz<1-UTdGo=?&EE+B1=VKoC;Zk2n zd&!aZQZ}<1n=J}$%tLw4775TzmL1bl>!UEmzsUi&^B}i~!+6=F+@}=u;OGY4%6)32 zTzA?@A8}!Ni=9>2DfaM2b@8b!V69?H&i&K1U8U@E>^T|3Y;G zo7;+iZ9z9S*lCG4Oc6MH2Yo}aAjybhn=X+yQ+~h#OqZ33%|X6G8b8knvfKvb!-YtJ(|`*sk=$&}clfe4&V$zN?8*-cPzG8H9Si5% zQ-Zz^x*0UuWS>9_c-?e+PLL)h4|&8&eAL^cBn zbhokA&ig#@ZuZ;TKF|4@894K}2;&Vk@rDL_SFsF;JDzjw{Zs_EBF1SYUvjCdpHp9> zea?gKJ?Qb51!#00vR&U&*!3`8Qz?(C*ZHJ@GRaMeXxCBpw5G60>7+S--E4N<6F1c-xBEu4Ig6fClUmka)o{UbFTCkSFj;mFF@(q zQ@LQ>EXd##Mi2#5n7&G1MKolUVPgqZ_2U~Vihwo*j!ZK*Gpv#oF zXD26cdYIuX5vIEHw0pnL-Cnk+y{rt068p6%3)GX*^%sQo{bcNEsdY7T0DuOs2M_y4ABgT!ihyv}SYx6W5Z6#Dz@6VIm7+xiLHE zQ}T3#QRXj_P4l1&`a$^*`?4r}C2VA}y9y`Kt`v#J;`J0SneZ&H7J;^4^-X8i>wP>+ zULV$S>+9pWDeJXku%Q3F&x@kXi^X}LMV}fDEFz9T6C*jWL4T%L*ExQiITiJ}hx={>Pb>^%|O2oA>V&xeApRRfT`%)*k( zx4s1FGn#}la}%hfW0Zsv_@KY96fA&VclWj*Dy;ft8#Vwedc52qz|d1st^-+;VaQD2 z^a2E{o)e}iDG{Cw^KG^uCoJB5xZ+5Dbv(B(o0G_uBhTOgrVa*eP@b(;tQ!=#XoqCr z1?!!azU1`i2rL8HZv{07I`PR^>B}Oxg94{ceuDFGBJ?*A=}ATuwbKp{_%+J2_kDhz z=;bM)#y<80PNfx0euZ7~W%AM}4jjzuJwk3>(-Xd^un6=N7GriKYMFxmYm_YbG#`yP zE>1G7qzjf|{%@&G_mDeE#i=@*E$C)UFJv*;cXLIecD~N^44dcC@NfuQv0H>(T?3@2 zu}9doEtePanC%lGu{BBZjD50ERBAcC)9Wuc+i8)QAAZlcQTdmhL)IxWiy;jg+wUn% z;oZ9TMwpw+#HS*-+I#fx-P^u*8JL04Z!AmvIzjTbZ78$b6n06oQAV>K!mgp1>OxfF z-b8}#z3_*ZD$IN(Pv9h!=E39;;$cNTB_A35gfy!ZX8(ydt-z#6Al^_Z+ag;;r!^hR z*LLpQWYs%LuHPuC>cWAx9`1!=_C^B4- zg7aG|aSH9@UYw!#Zc7&oX1Kq^_50c{Meuuj9mk147=a({XGP}|Yg=F3wti*sqPfzZ z6OoczcOiFlz!x=Gehb8qJs|>X4pDVE9I53n_ji%Bp-C78HPTh~p(u)mah=!#|t%! zi{sT#bQrr#Ng!#4RW(?}fo-*cigF9Dqp{)Kk%d;~DP5l6YOGuox)<0f9EByEd|T-g z1?(VvsBW-UQT!*yfyw?R%n+SwYMmlcdf*e8eF?MUR!EY1zm&t_wUo&RtWpS(&0w;0iQiKF_vp@3DCYkk|j7NfAuOT z`c7q1@-_i2p4sc{rwY5GGvsL`U07`kbZqrMO0}6H(j0u8%QZk$PzxwLiQW_?N{Ec( z5V>5~4Nl-9zT8pB&Qj^ut+!Pv0o9*vSz`b6MTMrd?#r=Hgh+UFZ}!o>Xk30B=w_K| zi*JRYU2ESJ(G@R4?tC|Qh(fLhaP`a$tDB(v!UJtCXTx`+qcdnlL!QC30Su&=J?wL$ zYd9Yz+$ROq_u3o>T#Tb z6&!Y)%_2}Q3^u=-tV$#|AU*HuZaaJm#s^-Rd{Wy!pMuxGH1?84K|f2aO)+a0_RKuF z>N&^?Xz^*BE^d(>^F?s?l0I3RApHp5k>|-I%LP$_Wf;7C;f;{TuXlVd+A@ee`ky`* zkSmb5FR8e+apC`7=t%XWG$HZF(R?3vkOqNdqlw1IRHJy#|4=b z8vgoznd zeHD&&FZl%gddmCMdtFc$$2oT8H>psg-CP!L+7WN6K54~Bu(OwgD1#v=N!<)uwpPT`hJk|a#qxil2x&ReTg=rYEZwtAc!3*d$9y@Q>kJ_!m%nS9T;l8pUg)dMf z+7>u-&9T0M3^g}1arTzm*hH3b91&Bo9RIjT)OoZl|7e*#EktkHOJ@ZpVyZ3bDP5eg zJ(lMSp-kMz^Y4iLQY7ltGsONDTOjDRkMRs(I4=#+;Kd^H)ZxQHz80?dxS>cjDx*%!lSnrp0G zbeeX&9?iW&cHOsKjd($Jf9-ZrE?xry%LqKL_Y~Frj&PXf_LVFGqs>dtR^(9R1Yo{M zZZBYh^a+;QLeZ!$BZ5CobeIHbVNY~~_9{$8LvjjH|IHDv5Ki**qB{2aI0W&Mr69tQ z@CA{XmJF=shU{e_N)3#DwAcrN{wRg7WOtO}?((tdEGE`jk^QfLrm|#upf^6*9+5A8ilNf{Ya2$2=4q+=pHYE zcC@}p_JqRj1ITS{;?+3SekPKel5yB7;rbM|r$lpKX!seiXzl3)1t-^07Gl0At{HEX z?!4QIgx&O~7KGCBY?=tJUFSDe*t3E(qoZcAZC99KK$JCRyL`HM^=^JPtyz$chYC1& z*>Ruio}}J(!YAbFjw3|-lc4`ykyHa)mv#6gdR#=R-~&N_Q0CGVh)l$H??nL7{-Y?G z14rJzbTwgGihehGGX6-79{ZKZWrn1-gHupPCheN2zLdRc}q zKo@>I&E*KXkwV??h((1Z1=z4AIJnmaPUJbhFR@v^ln8!qRw~RO<5^$ef*XreP z^SY=sJaDnG(BAYpsJGTW5TJQk!8P|s-vzlDg^Pg%tql={9VgHqm+eY40Bxd(6xcCy z9zu^6@-TkjfJ`G72+|Qy#Zac%ZW$@kR+3P+9621|; z_8M0cgFJe`1nZfAZ!g$oeLQ5-kP^>Rg7gK32{si{h3HGXxddZDvKO>S6xN=!(NG!A zv+W|#GMp=i?XU;DjMmw21XL7eGSclOpOIFK3D)_`BDlF4k%o*{r!WI@VO_(8t;O~q zK{t%3E905ZzWzNZ?u#0P%^PvHz9ABMD0E;5zhEA=6)$cp-eI>2vV_tLa_w$~879s| z9D4$m(*o=66AT_O#`-ASwYOsib88@xy~m#PX_At9T9(4vc66PVxu*R#TO@js+a;qV z>{`^b$6ay`&bnBEzu4=*4Nq<-@llO6)saA*NDQXVm)?--74TlDpyvg ze(aOWxw9-y*II8;+#-lKZNaMtH**X7Wc(swq7Y`Q{ZSMKw&qABVxR#1my?|h8|-Nh;c;n? zIVk9cT~6tN6; zhpPqL?tnow6z!@e<;Uz!Q5o1M4CuQRriGmL(9VXf*;fR{i*pbs|ET>$nD(6C+3;4~ zAsZ#=mi3eZ9Hpm4Y?4nA%A1T~H=FJYV3i|Cz1J4|=-@z8Zq{mYAxvar{lgP&HTK0E4MR99)0$JEd zdB=4w7ffEnCasSk)rF6iTZWI)g|oEE3I%DX{EG)0l@)j8MZ)TZxz-@a&@pUcS7z)@AxM@A?UrpYc5n8(iIt>Kbc4#xW2M~`1p9ufq2a} z`*#<_NdJM=_O#b1B8pox@tSpZk02w^t5=fUmq?t+)SxRg zP85A!vI>NL0-hW&>zOFwu>$VJg0810CGt`2qmS2Nf)6Q751GRj#QH9H2)C6JfIaY| zi)yAUU48@U z-SD1oSbBc_D!VE{Nk?ai-_f6lLM1Eod zghQAtQPhW|{#LB8`{`&eTN9{P?64goGHzYk`E&fcYK!RXRjkV{cHDo9;mW98FA$72cd8lokf5t8HmxYdQiR+fTw%WybmgJEB_Xw$qR^W`!^hbg zpO~}nHhfOi3R2ba)p!)pUVKypq>b)*+>R;e8ahUNjaP)}ZEhCFs6PwPsd&Tcc*6>N z$HO;EmT^C`_XS-W(5shv>*sF1SXgoa_E)|rOn$mF2*t)56Va(rF8%|BsVl3sGTTyo zT6Uv-EY}SR%}Ezw^d|XDAx^6 z2mD1*SZG5dd5Ddi5>o#HaV`6+pg)3b4R!_Gs*9%OH*bJlbqirvD;}%EcC|2Z`S9^Z zD(nUWt3PlL>~SC4pQTc$Fx6&;_0;`yA*w}BK+?_Y1hq(4p4em?e6D0($u|0QNEFf| z+vL-sdlIp&J`OkQZODAxy{wXi&Y+`lv^vYwIPyQDSf-&Ww#6Znb3QD3+$TvMWd-aY?a$s z0k?tc0GL<6ty$LilSOx3;8>NJAI-IgMPfA0FIs6&D(q$u`A!Z?7|b+@n&2TrHpvSqzGKdX!n`qYH7!Y1;)TGZHPb6AHWEEpO@!n;)vR)73s%7`LN2cD;`oJs{bBq_C?ehY|(~dhVLj ztrySPhWi}xrc!%UVFo17g529yywp-fajSJa7Mb(V6HE7r+5^vqT398bP*HqaC7fzo zDCowjt!znPe`ujC^JzNQ)OXC@XqAF)_3--D(>4p!+2s5vul$FF_$7JjHv64|>S}g| z{Y7CXlyZC}n^zex)wm;pX>vv!i*SB;j-;>cLHc4%Cv18=GfN+ zi7V90ZZ99#xpQl0eVs*oTyzqLfdT>gl8>hpXPBNLdqzYWj|>oO%Pf1&V?HdTKyibV zz!HPDCBk$rg6L+Utrk+f-egyTOdrn;Z6>XTQfLO`?(_+1s0H6Gfp(`JuEF%$+{sAqm1o z5qnI~&9qw}P6L@oFR41`D7NdC6wE&N6i%uIqSN+CNTkhB*wq+GXJ0F_^`bCl`ke~y zs2fF~3DlX?t&cT%*j~1tzxHbn&u!b)QM$@r5Tu9b#HEE{c)cX-+7Rf3s%4n&wlDQ? z=>k(-q`N|xN=PInmmdhag(zP%Y&(0cw@>l_@?(hmj2Cn*N7DkaL3=_Jw}t9iHH1|3 zRFSj==uPoi0as}7thnveR$G{$1s{Yg9j_J5&C){=bSZWTGhQPmjIerzDG=4>x%LME z8r^ZIrDMxJJ1I#0Y#E3sU}`#IXGPNDj-=D%zYD3LW9#iBg^qZ_jMEy<_VQ0Yp;!%3w>Fix0 z*NedZU5y14*KgKQ#BT0=F~x=Hdu90Hc2Ou#me*qTP$FI~PLLsgTohV5cKiT)(uXm( zY$*zB)gRzR6JbL^Ix$(^JO3)c0ts|zk*n0tLx)e06`GoD?+dtz9fZ^gR~q|XA6EdP z5_NXF0F|bsq*!-_X&~DFp>+E%A%>dW0_2&}as!jwnXQEBEkqeKL)qpTaRqGyh!Y@ts?ueV#keGs|YNOQ(Gj| z7I02Zi$uev4UuPm5@IlObqhEnK&xbl117))LAv|)$_l%pzl&ldM)9h1ry$wb)w8Ux z0O{UXw$%R1hdY`U*ifG*V2T{E5k3!U6(LI&a07N??@N35(R;+_dyqTcz8p(^l?GS}h+Dl8sLptvv zgEV3M2>a5#7fU!vOhW{O2{Q^<)vlFmh-L?os#6EoI0^Fv#FmZ!9f!?p(2pH+!Mec}|I+-7?O z-512#hV$Ma0;4w+QFSDO>@{Bky?$%|&huNV>dt`LDDNuvAePXygU zYFocnSDDeG&?#=P1vW;23fTF>(_lPhc#K2#2uAiSTUfu3lIf7e%Y7VLi+v)h7?c4I zrqU+@W4FIhLOdZyq#fAlk+-$IN<%x&mDikW-gK^cw*5gAwgP-J@hkg^kh@2;tuLpY z5>%GarB(JsT#I<1L??(FsjLS=0V0fn67g(6rb0V02}s_3;I;3(lm75*sJNXCA2ZWgoH$ zKG)ebn`H;$DGzCKG*)Ptf>eM=XpR*MXg0Q$+I)o>aMBGF$8f2S>(;-AtyGvn!@W%e z1=X1C5rI+yHzt1+(AS43XLW7hP-nHB5RtZb%s9}od%eBxAzIp*9F{--U#>tZ8-;P~ zJo~N)ZWfM~V9Qm|H74Ly-6%|bJUMT+n-gi{bt^AItllDXW@(C5YxKHUCufnhM;wtQ zG6J_6=*s#6-d;XSCa<3Hh2sw@!!`Zra-6Be`Il`(!^@-o!+dN6X(tj!;)n2%aq>0iGN{xLWNX68{uMiTHmkx%&bOanZJb$mX&v^=xQTgtL`3g}eF_zJ~15c_# zl=xx=pK6mm;UOg+oyAYEpxY2mOy@+)9&P!exk_V&VP0TM5_#Iz&#*EJ|Yvp}EoQs5J|_RTQs1z;|LTqA)^z;g!7XvvPLWX=era@gMn!eW)N! zr>(>Jo%qU-i-iR5V$Jp~Q8cvp2;{mUJNn-fi985L>c-ha(Dwz&y=)sR0)xX9k2~>9 zA=;Jj@wQwPT{h~mJ+V44ma-bvQmH|#P!v5JDO0+eJt)ZHAX6p4L5}q*lLY0g@X9_H~1goQx`VH-+7_4Cm3`e&XZeJGaQ0asL4oGVB{Wuaw2NE7vv$FJ5|UvNY2V_kuQUTpc8{xBZ4OAAkIh*$r;HO zPoY#U6Y{oDD+)!X2Btcigj|uwDiL}(oWQZxhD`gluv=843ZAi-6Y!07Rdz;@aV^Nn zEU^B=04&7^hS@{~U8$o&*(sJO>=rae^<)J!DvH}-__;Yc<5-Kmn8=jLZ?~NmW}Z5? zHQHYiaX5%8us4LMm>)@0%Z39Ox%1l^-=1A8mxtBs%ser$$}XpN^19hgM7O?iErn!X zk_cKD5hn4c0|TW_8va9(4HTdSt-E*$u!$a}vAgHlWRF-A$zyDq5MSfz8A$d| z57^c+9eG$}O4g74D+<%Lcx{99GENG**&HgTXc7B|C=39GOJ?dIW}o`pctW#up^{jo{tRcvXen@00QLz}uRodN>SsCR{MH z1ZiqdtjJ7P*i8kJ)Lg%3i@-qSM2eW=#Xee_U%;_gC4!stT)wbcL5k{&W$Zd1g|K%O zkAbiIsHEP#?5{owc{}-6{^3Yh2s_BU6Z`B(9^Tcxzs$-E6!aE0g7-$T@&hH(z@m~3 z^U0ZxM@Emb5rT~FplBW@HlznUUljK>9bd-Qu|Qap-n`zHDBSfTfGtv-3hoVjKkao< zbS-cQx6-PVl5w9J;!&kpKnpY<1qwUtqh2h*f%~5Y+=xtu(cdOWbK1(52aj4C?VKp? z2aT&O&Kk62?en89W{VwNVwVZh#$lr;*ewdX_Kyxd6S2E}QXLlHE%xzH&l!7o!0|AK zOI{e0nS!)*a5$F>))bFollA)Z3!$KGJh<`mzSy?-%#0=cF{>A3XP}i5OyX^lmhSgy z&TiY+v3s>0^iUQZQS)r{S-PCObFesUzZFr#mBj_iUhpU-l8%`D--K9k90$-$XLI<_ z6KF$-8O*m&JVCe0z>UadqshV$W7T&rGCEfaYSrsKXL~S*fz;Me(U~z$uWRio4^aN^ zYl{UK(P_L?9mCVX3SruZBTEEHpBHj_Yg+i|h6wMae-O!ivhli__`ccp7g3m?0i#CQ zS%uw{55+{nKLn{av$ZD2?il0Z;s^G`53aJiJrpksQRcpalpD+)R$y1#Kv8tMuRjpq zkC};svYj?kL`pn);MEd)LVy7~wTq2R0zOlGA)2&;Oeyi@iNa6GxW}weLE6?fw~&Jm=v+i$t$Hk<3?^do#=k1Y|@ zD0w^e2W^EY7cVjOoP!V#4LEVz6~kg^D>)MUbnR*@-q%*T%<4Toc=6*ynHLOsiR~Ai z0f`))$I=Gb36ybUSKez)5f$B~%j)Vq6syWWRm*irARFono~%Z@qr zw2#Vz$M$oDH7X6W0{0TDY__Mc5C(GlheI;oXNQuG)1%dbZYP1im1}PZ(|I0|`7ytc zNIZ~hMgVD2A2dbYQ53^~AIuEIemp9^Ez{ZI}`6(#BWcpYVKMV_)#WPbBYCKlS)7NqjFsTr-BR*{`lET-T z#>zS+id&ci7?^(vQgAO0`(GMQn9nKL-xdYl$5-3qBGMLWOM#n>BIJ{zP#YHz+z<%p z1KSyVVAE}e2o#t+V7yf*tO=2_@=R+Lq^S1Noq+^#qaEu?(6mNkf4>*xQ@r7R`)V>l zH~OIzIOyLJrk-v+dP*a{mnDfnEApbS^9FeU|8%R7M;QbQj>k&~| zCTX&;ZO433Nxgd7^9s8zr9HnY&;Be-53t)Fvo-jbXJ9r~RJRAcw1#CV-Nj$} ze2DhKNGla$#9v&g5B$YdRw06Bv*s|*Ted{d?flp@!E}DUBawxX@6oRtd*(5vq04DJ zgG&GGcfzbN5@27Dwo;az6j3(gRwE^jt^c4kl3L)!*gjMwg9Mwa%)UGkKv(l|6wWv? zzITe^8iBS9;vx~d&nKb|{Y3I3Im{;txFj;~vDcDCVu;ZD=G-<>h)SdmXSo85?daCc z_Ddh`*fG~C6lSwjxl%dVM_VN}w?=d~t=;)!TZJi_bmJeeokH&ON#`dV2E`sx7)Fk$ zvw4XAt02{-V_ou#4-uvvFoZ*PmjLDPGtQ~Hn-HHypIhv41>My7=j=T;Nirm~Jj zv+a>Y>;znZKki{}Rw#sS##4f>f&tiyNBcU}CX2?p!e~HXhXI$Z)i%}V<0%aTcvdXP zc(yi(YhB@UIIW<-3|oWE^+YYt`epI`{J+<>`+S+0#mcq4UHNKvd*yK$iibsX{TmfZ zK~VgAk2r$&ZhOSKiNK(ZmmptX0XJxq_-yzt9uUQ?t$0;Ka3eu?yT%?A{WDX&nak26 zqG|oWy5K;Q?IA8R^U{l~oZ~bE%e#PVu^Z}_obd)_UI|7|*`A=V+E3+7m#vTe@1wmCc6W-qfxMe&E0!?FUyM^|bgQE533gE{s~ z0fyTd#I<6&|zCH#H0j*1svlV+RJ83?0C26TnTiub2e=PCHxkVode^O zO*|V*$>5MIWw7Lbr8q_k?uUM3f9D}ecGKa}yevqy;VcXh+ZiENhlVaNFlvAI@z`?Z zr1`1OOrzhbt+4x^BDudEaw`np`DJjr)CMZXEwelXXW0x!i=qWynOSIS1UMN74&$At zbaemWh_ABsK4;*`1sgBZ?AM~WWinE1jpM>}h(`3rqTsuUXe=g0>?2_&d%~armi+X^ zqytBcwQ51vD*WB9!kgRfM1oN$o$nF!yP!9)y3JZdboCd?XKkx6J;{hLQO5)rzhjkB zjs1fVbr(i6?5x7BG9(4$+4(I|{4TyW5JKN)?}$ccPFKLutg}l(T`*8c{e~c=XYd?{ zPJs}mKf@_un8NHNueP`#pABMFSp`$k2_K<28Vn&X#2w~YVd`MRlBL|Og8o{flM42r z+OqPX$Oi%!e7Q^E{t z@XN(DR!l`}7txKyXt-5T-q-D1B4espYHthE?l9sS9IoH@8y|E6o>bEdtQ(MJ=&rl>Ek{Uk&~^jEwodDjBiq}e%7HdAK?EL zJ2)r0>)ded3Lgpk8&V0wD}HUWeIlw~NBFqcsk*{0ONHnLH%sQfLUv<<@^-~e1iUOs z6q;~XQa=kRtiE73Bj`(tC$R7bWkmQAri$QxC){xsi;W^_uH{s<-c}2`=JKnHn=oPe zk9{qe9^X&97&}UO`Mrg8pwekehR^2OD3Qo;zy4UVkm~~Cl&O(vHqpoHfQ=NRkgE=- zPtz=2nAR}Gp{WsjPLN$XU>M;JWhZYPfi?roqdiva6VtsR*ri^XfCKQLdPdL>Y18z0 zQ?pFXofXlw=V9!;@w@PjD6R#Qu)1yU3A^7E91F0;e;@))3XYry?L$EZyrX=NT^V*^ zS*O2UVGR^NpjZhPB2%E5j{JmAn1t7{!9F~G>=+y3lf}0nI<(e?3ThKCDQz#sD;TfVr>vqwbagDJp{Ya|A{ox9PaqOlWU z9dnWu3An|O7Quc#N26ywNe*3tf*NeKNc8sx%rkED;XZwO+D?VtxXD(w)g^E~jvnox zFeAX^q#=ab=%e$w*yR-E+c{xZd0~X#OOD?E5XqkwRDbP@D>!4nFRD9jNIg$pH<0~Y zWgm-9`KWzitIEDQ3CIv-h2;z5S|K+?De_cYWcP^Tnu{fS?0E_^5;zD#$IxfSNM$I) z9u(4-T!s@MnC~#BFuGA`xuUy5lS2`#BG|K{s123aH(Y451l?XDnb$)$R|Li`tsr9c zKKv%u2LGtA>*+9<3*J`ZpM7>l3~gP+w*D@H8^R)ViMyo(#og2AeOSMp!N!>6$z)>z zXX@uY!nzK~ZuyVGOi3(XN@jhOmR|GYvC!@j(5lku-zEw%K$2{j%$GSy*nI~N;Nv`O zSw1h;S2AsCBKG0HO?{~dZg5bW4aKtUr358VGn;L%cuF9ua~R(r_Np&NCqw)$uCh}i z^t$3MIwL31)kosh2Lfd-!VXpka8Kaq9z${juo|RX3>0rq^BDGl89>(7q^O*I2j`u~ zL~_$VSkKSw$-bn7r=z%Xn2`D(GB#pku}sd&@`NgYBcg zD}418UuQ-FRTfP=D{{YG5+TCWv!K;`LpT)tdkHg8pV$oYPSh-mI#>qp4E?wLFw}MRh0j}aM1$u_W@s0 z7~k{=z3=R8GX&lKr{_R4N3C2WW;~7CeMp)b%S7?w{}7EPVb%+ASAOts&4Qq+3fTdzrw{{-Oo%#>yWAcXbdx(M zOMb^JGm&8kK1>|G;fn9moM3lz92x{hbq4j{RFN-~6I~tLsb2`XJxb<^wB{wZP?{Z;cAqQVV(XQHkvy^R#A<60pwT(B5XsM93DR2%=Xv8VK9UBG zD@d_-1UXPoOtbeDbiYxFCveocJ{8@s3T-LZWP^jnk2hD`cgAtQCJF_nVpH925>OfY zPR~RvY^0!T3b%&G(d8#*|2IzV60rL}6&q;6g~44zCx1N0~r- z1T#J5f&X^``{NCpbhy~!b5QDVl=zFRMg*Ev$X(rjE#TTPSnAoY2>V@~H-P!<>aY9s z+@?6#2r##uv(X-o5~e$mEG)($$0z8DI>{yFy+{CQM9OJKxB<=NMvTwh3vOJ`h4li3!7W)_kxUCBr_vs{}FII zUK$rP4&QcZt}hJ%HMC~FD@fG{+}Vu^^L33#x4V3*Ge;`VF5PB*1Svn@hzVPUPr&*h@>Df9%xsZdN8E>qMeN zBt1Dc4HVK6)p?{!)@uj(EZEOyx;cj}u%C&<(BQWD!it6Vb6*0U9}z@ZAmm3GGh9}i zz$Z7ub7@TAQ3#URQDH5a`Lpbd!n7rlA!G07g?v}4W};c*D5QO8$QXNHv2GVY2_F{f z?RgN`OHNjyT(b$5W^=&f=_xXerOOle&3G~aC(*j667T`6n`H{RH5Q9@*)J#%F_?x^ zySc?9@oP&xDX_OHXAe89Dv_1v#7TIf=MWL3YnT&t)GA+mLlYS zBy~eTV#poM(?oaY4!tn)Zdzu~iN=yXd!)slSC|pY)O+Cn_=sC?z189h)2&cW!1~Lv zP7zqAZN+QaiVw9FOE}VgTmX?qq{AmgLQCi~ry&_ampSx$%Tf$u7JT5lZLWufY*g?u zmwLz~4wyLDRtsv!$8e_(C-Cal_!PXe)|@VPkXFNFT4_6doK7TVR}_|hY9WAL zbnD(rvIhOkL~!F2ubPptB{)=c#w05`#YQMhV`DjHH-7LshBGp#wSf?D?rd2g95z!x((ly$6_-|(KvY4G5fQS zR*}ROmRN^}%+q+Rv%KSBaH6fZcRlRfKj++@WoAV#FNtGpaW@INh9F#{uk==*AYQ#v zzd=bJ>gI@ny#9hT>*!v1q4pCW8RH@aH{cOLzRJ@(T#$8?3!h}Q$Zlttz`-bH>qKxL zqnJskT(sCrqLJavHJj{}M26%7ya3u+5ttT!7Gn0MKyYwnbY-yWko?QoSEgMoG?bqm zjqx*JUlol)JJ+tTZz>$D4g97K6riO~CD}Sp&%mFhZS??CA41u3=@|;sNex`K1+bKLa70OWKcxyKbE6`Z3P4KBBp+Hr&v_;%XZH67CV1 zDrw-ua0%`ebdNKE%*7VYzJcB=LdGG*QA@0~TIQ!MRhX`fZ!NK3D(q@R8X~O#%SJXw zG>VpS^Vj#-JP)a<=iN!R!b3Tk;%u=;kj_ntW@Xv|4|pVimF^SrOS~AVr%Qi9Lf1kr zDbjM#(>$e{T9B`@%1Yr7o=1+La0ibkc?HxDXj4%%a7bU1gyy@p(HM>(4ZAwv~dKN6uSo6{e!VfZq1@ zvgLN(Cl16G=Ge!A^a+zQA^SwYb$GO{6yJRo%m?(yV|J5*?(-gqhSntesR$}r%%)X( z9~>+SMP=ySXSe|UK|V(wZ66k-O_Okxn(5Q=3KFyBK3z%?ashd^*{AE6(PDKze4%ug z)hkSGfz^mzk`HL|6qX-?@meiMMWIT5_~p3t1rLL}&Ov)ikRgj^MJMx6Cq#EnY+~bX zfnoQZ=UfF82gh0>(8CfGL@Z>SY=S6s5DQ=-OBK+R$Y315Be}xVo{w%JH`)1uEDy?y z6{JiKXXe{FpMr&7jacvwL58Fs1~{7)*6&|aX}oDyys2Eih?*+`1G=m1kk6>6@HzAB zgrLTvYIPw0Q)lOVQIUel_Ms0;lK*qXq@;};}C`Rg9jsL3+C{vBc0NbjfBZWf6t zpOR{Q6r@M(WeYXE!#w1;S;-6aSP$9Tmtz~DK#<;~ps-M)FoTO7lm=O2wRxTpyak9; zxyWZe+q~e-8eHZGGB+{Q=d-Ls$ge`FoV=UB*xJ};|0~R3X{rz!;MnA>YH8o7voDtr zORM8`4e`2KyG)Qd<5%Q}UGC%FTvc_ZX{KE#=ql=t4yfJf(_-wn+#V8e3m$(Q2u75O zphhy9Ikr)VYL3oB^cNpM9tICBrFP0gMBC*_=L@AIcHFn%QGK5?MB84r&#qDw<-EQ< zI9*-up`JG}HG8X|Yay&X&Pl?I@`Tn|)=Ob}mJ;Ib-rs|0;n5X#j{s$kd1%Do5d-W2 zpXtJyMi(YiVTd2IWJPEp$##q6NbM1y6F){BbC%hk1>G3vWhG>BA0L;P&U8EH3t%P! zlZ|!urYJOUJSKz=$sNJ?5vw*lNF}Jfv3%Sn=9y;LwYI15~2Kt&+{32_3dfR3d3pt z@&EZ_tv=B$xBSSyJDo%f53PM+rtCc0HKOp760hD6JRQ_>M7HmXNagvFFfKRkrmi%# zNSnZ5mCT9t^J#QpMtysdpc~g*6e=g#bP-%t{9x)WakeN-6z2xSlRQYv3s1Es0qP6% zuNxio$E*ekcOp3YdnIAnOo$e?R$@)%-@=S)UT(I1eugVJBF^e%0yJb4yV$fWIQ7Fs za199*nJOHGyk{EfY=loBExChtfOiQw0#uHzA561Q!bVUrc62Xr86GX(f}9*AUT z7uhPG>=(bd!PYCxn6{TywwGbwbqkLcRwW{R42)H5w!MO`Rk_hLxDnRu6EHVDk*5ER zAocdf_T^E9-RyIYhW#A8+Wbyb*HXT%dixBX!1KJl;tM>xwYg*VHf!_A18d*xN!QFI zd(fb2Sx)IDNP~jMRwRAOt-C0!WxnEyQ~O{hZMEJ$4;~=7jM`uiX>$KQ-EEXF0?6G( zuJ*==60mep5~)jgU*h@x5k+a?$u-NWc=Ietoj;l=a9pUsGJF~e#D$Q~>O6$jCel2_ z?QXB2`{H<{jp>@v;Ijpu;b#VZFlO2z5t+MG+1&e`fLmcO{P&|Md&o|R#&}@KS$e21 z39^m?1~J!IdshTEV<(oP!+h9oE{CEnY%PyhZwo%34c1dct@1n+mt5IqTb_t?`^A;IpX&j@ri(<) z(?XmE)+CC_3X79uTSZ~L%Jxabb|vCD>YnQUjxSmdobL*1 zu^(I&#}+qUAwCg>(Mdz#9zR&KK(1|QHqNsfggtLwzt7k);&*v~s0=s{+=yriP(#Op z>*MkRooCB5v2T-a4f3;%ZRxiGk&O6V%9|1?|{4mU=F_4(HM^t zE!bv}Pr4SXP%@gueTFw|IS>tH#jILXn$@}mgAlI@xRXVoVc|>%B1415Al?yX6YJ)D zDhIx=3A#;ubnA9j$!HOn$u}!kx-i9}P+4T-6ah#n;`*~zz|GVUc}nMcS1pnT_&`Ja z;940we^nG$)u`|@)+R{F@%oi^#z&pnh8T63z2za3uJ8SAL8?L>Jl|{%fNw#{1PlU# zyL`DxBD!Xi1I~FDfj2qv9irApDX1;!u3k1;VHy%|n9YldjT5B!@ddbpdDH`ZD~7Dx zgQ-04S%uHlRl;sdxLzNNyxG8gEU>nPa9 zcDpdq=hn>OuiY)^R&T6;I|+}0H0wT5Ig3nu(Edw7#$eEx!J}+|0Ch7PGhunv*hmp5 z4RIPB@}KbW0SBavcf2eSXgV%_;lAbwG1QS~LYAj66tp|`E zUnigig9@-5u{YW_QK&W-;T*L1jtH{I(lASs1)qYQ^H(CddB+ntit*2j;Km_Sh6`R4 zrvB51;0<&LxKRk+XQbA0*g8dZ+wI${R?2C_hd$G^spuSiDxiU`-J?U#x90(=kw=Z$ z97{^-KvA#4d`{9p{;CLpl-YtsUmw= zM0y>(WNqbn^D$4LJVY4S8}d9M1^u_&2-+-DMWDv#SH<#a^;$tj;oRa?)~vAhPbVCN z`3XBB5^ccd0{-lN?Gxt~WZG*AyJa!3AXU<$r$up#Ed$1jg*$R+_8BxN2Hq$v?p z3XiVQlVz4DS~1nj0v=Exfg$K}dse{h$oPlgs*qy~d_2B{vTUCK9Y;)yU+?`w zbPdhiQg&eERz;u%?62rZ+W+{}L$e~x{L4bLH8PC>eocT*$b)RV4-dhh^*EDBX>gHE$c7lNpPu}B^=+OGEyDFhfFcBdfCKea_-yLiE9$ zc-;Z(ohW58ChUhMV2DkBgdh!-I=zJj+R0k@t|@#D;K zP6W5)IAY=d#;#a&krHTcm)O@tar25|7(caWV~XTDcA}b7;CF*DB&EX!7yx$cBw~KJW>JmxN)-2{`?!{g_V<8+1BSEg6DcD~NNTHz~`0DZfKe#k!VF4K(N$ zBnTt97f-SsB5?8kbq$5`NS7Dyj+RKW0aB45zCT9d?751U%voa$&kOGMtU98oR^Z zi^(zIf+^2BYxD(jHNhKpwjC9w(_^{A+c5!}#%4a0we+f>n{m7*gwXDBB)2Yk9WOez z%5GXlB05K&aYOA6A@>cWhrNE#T?tws_Kb$3^$?Ak0xPWBY^Wgoi1W2eFErMlU)O?} zIV4ivp50*SilTvmgY%frCbeMr9OZ)aeGJbJhZUypNVd%4h4V~Z4j#Qr5rX|Zs+eRH`>O|KA$$b+Y6qNv4H>>!nPjUcNk-bh&+^tTbS z33Yaz=zcLEAgb%!X1mc7gA=ser!sxkPefOJzk5)AOctgCdNw;fo9FMS2(+iO_@Lid z0m)5zA&h|^2r}N7nTy)(9`x+qvxoKe3EwPfKp@5L6Ljs*Z`~iZhkV*o_1x=20&X2} zSgD5>(rm>(e|#&_(ABop=f@guNRHWSgk94EBgiFoOcchly<}^kTYxn&Tz-hCf#=zx zX@Br?!Q(}(#YNZrESDIiePt!kEz1dmZHN$&0n2QcjTB^~d1io(@iAQ2^m#ld=-M3^ z65toaW{IW|kw4MZK7DId@Q}GpkV)fQ(E!T_X>6aEv+-X0y#S4v5M__CcAs@}I1LVo zy(j3Zg5jPToyrotWEC{Vlw)!vhhyBO!YqOKfo=9hg(Fyh7dh^vwZ%!1#9g# zLHD!?dmk$jZwQwc5UplowRr0R~{ zhpa|ndPx+1cG0gn;R$>mVk{KcX%XCFfO!_SbHW<%eWx4wm3c>y!rtIU{jS1YS2Hf1 zu*p~=5@xw+H8Bh{0zkX%#{x7{GmnLbk`2~h6v_1^s!h;nr@ESzc*Lb+fs$8lLmbo-02n>lXEQeOSHD2&*N&4>YCz6QYf@YcNu$+K$&-TDfS zD46%L?~6wHlsI^x{lI770IDbLYds9!J$Bo59+Eb>D9!E`bX`U#_OSKv7%Q5Hy=Pl* zVT$HX!M*ptJSxae;jlPFh?Y%4y^>4DQ-ZE#Cl0){hc&Rzo)(SSY%5;hRy@0{_+VRc zlg;*}3ACn!zEcskt5N&=N znVIE2ZdjDTJm}*(8u(6j+slFs5=jT4Di*a)AFrDa?2jwf5%0R6&&+hekj&SI>{`Y8 znW+p$u+DxY8Y7aLfn1R#`TWPX9IrgS%7zFse9?@_EP{|t^wCs5wFpHyL0ZFbXi$pn z5g(6{!6FWjse<(X_?{PvEt1GPxB%mxf^K%1TDel4FN$j&qbw1&gSIG&#zh72vO`r0I&Ct5nDbLo_hNt3iqXCSRIXlct-K;(wfi577H1QGmx|Fc(meZKGc zQWExm?S1xHdpLVsgGU!7bGSvZrv-V9cEFKd*dY-ps%%NWpfKG3=#8ugs0}*~J9Z^U)+Lx45V5l!4iv40}*8)Ta6odL7lCAUgY5YYWl|h8O6_+_s1k zswzK@229hr2iB%=|NNvsfapx*d6u=CC@`SCS&99{lk3Khi?H_uL(itNn1mt%skyXd zh&pfixtEPf;X@C zW@U@zdHBT4r8dcvu`HfRWiN1;jWQpHAj=%a^2)fsvE>f4BI6>qQIN_;$S2o+FAypx zSKI59u$!Q2Zoz6@ioa6gUX6i@RCAr3^rw@!S+)-as0>uW(C1>Y^^pjw4D3iG?Yk>L z>Q(#K+I0%+6UE{2j)|!j6bl;cCJ`w-n=X06$Pf(6WLE!$Nj9KA5J}%kEfUM6>z~u zB$3)x#+1J%5T4*3g4F@{K)pl@Ombm_%L~gFg+g#y#ua3lP~dP-CBk9lX*?7k7FLnQ zM^`d}PZ6eV_=r;&9g^jZc`j7DT2Z0SJ9g-1YXoV0rQBQ>ZO@k3W1d}q*&|~W*eV=Y zj#f_VIox?#v@ra^a&SbeP0eA^$^XQ>i6<7!u-61b$=G3I-p<-QL3t^ip3_9ZkawR% zc@Das*u?jT+U1d}?aQl3MU`iC?`q#xSVNLzHNGdn>yA&JuGzk`U})QHloWF9)--`Z zJ3DE2i2y^#s+RD;@x9Y*o+wl`@QK+<2RdZx~ zWtkvvN?@v22U{EbaVibtt1XW3w!t#(501*I!V)_o1i#&}mHV8+8vb;f7tSJ9Tbqcm zRgplz>&t5aq1`#}(wVn?OQg_iqaz%NHQXs0?DQ?+{kRIh-ye&vfSYw`W!WN8K-?2C zj^qo4PH`{#>*ZD~0&jgtM^*C#{1w}@R^)aNxGco)-ls(W=p&?yMk358ZxX z?q0V^FXZa}BEhf`4adYN*6p@$h)UbBm_`$TU9+yYt`?DEm&G5f)Mwkz zF|H$Xw7bt1R^xZ-Vha_fK)LZT7~E!pUg-p0i~JJP+-ecn%7iUB?I25NyKIlhlp?YZ z*ldpp`r-UIshfwwCIeRZ0YBlpyoQZdUEt${1p#NK|j@L~1{9phQ&& ze@jib+O{di`y40!4C{OBkD~cEyKOucW9?L$Lg&{F`CFp!&SO*=5}Uf zo&8G``UxkXs3m=2Lpsa4^{I7Z?GjPckdd8xHd-{NT_Ga5rs_7P)=jnV3Wf@jOt6H5 zdV7jH*vT-L{6vtq#Cs)?+#4aJ4;!x{awIFqB7sPfoP?|+JftKvKgT@FqjQ{Ji9myN z>Dtw%c_bBCjE{=A%@HBgM1O2I6{FlGN~9^l7qKhMgLIBXBJ&cq=}h>`J)dYvudjHO z2$YMFq_rVodjvzdve}Vqzw?AYLer2drvY#HBSo#7IGVNNg0OjNJ;<|oV#N~;_A>$MQBGxZ6b>a|IN5u6ffb6v3poEqP~kzLPzRAY z0E~$&w*M0`6f#flJb&wn$U*vO%#RF6r$i*@wx!G3mQ86}+F~DzqMp5KpH9BNyxF0A z!>zP$2!=Od(S^}48^a^ldm4Jhrw2$uI(P12wul!pAXhWE4;3e#R?a>l$)0KQ^~!?$iq+6%&Auz|6?EPGq*sA%NHSCA|l zZSM*4_MY6{u*DT3w*vXtIIljW%lS6ZGh-4AJ? zNxV+h{EI|_(^C&~@T4$rVy(bM&u=|uP~R-8Rv4D3qfR|CY^9*OpnSCM^mGZV#=ogB zbr*GJO#ZC34@4o);?^|`<{x_6u06Wj7aBrZ4K1x(X=D4EC}i8SoUYhGfLA6_{mDk% zNRXoBCz4VOw;y?0UDH`_fzWMJYbK=Dtg-%{JkrZWEBN;RO0j;Mtmh8*6onkXXb~L~ zrt)BiF^OA|F`fWIVhp27l_x+)m}@4s_Txo>!`8j6D>%QMmQFuPBEk_SI@PdI-eGDL z%^aV3wXII4?5DegMq$d`dq|(YwoL#&_>!=V$wBjW5tPsR4VWsn-w9GyPCU3cEIY8m zo)?9xfo(=;9AWwc%=}o_u&9hHAKY??M8-H;*ulNb z`Aq>vx@g<&Yl5m7*<#QY<_^2UQ=Hf~=|ok%b$6KB%(3EB%NOK((-x}8ek~L_bru#8 zM%o4uC~k6e(zYv13(<>WwqGFdo(reQ);}o#PItjj12PAdYeUoNvvRl{ z$V=nGSnFEL_vewL(XcR5P({IKp-pmt?SZY<>tPC#`f`~a2vdHGWqFblizlC$JIS&I!!gOZRRe;9 zmhZ_}S+L}?GQq%b{%Ayq3T?V5p7-Kt`&VttM594D9vriO1Bc0VBFJi0boS2{lS>7b z&ehhK&QE>_>9tLws3hyArs@{kcF#(Fvr6)rRDQZW#M4mSI%HVFp|%I)S1d@Ypt@#B z4`-p@G}Du%nj2GFG21H`I%NDIHcv-A>8ZwM`+N&=K0-8%=jzaYAxcPH-nys6N`!ey z7ff}PDjc1}2YrQA?MwKTwpLLzP%N7h)b#>X$@LjG>R$)jAOhvokdH5gErQ{#9FJ(G zZ4!kp$o3|ZtVKS)S){;7F*@Ax%hi-`8ik=CVx zs_>Rt&VM6`5G?6zLANYKd39i0wA^|qZ5Xyrjh}m>w)#}vB>RQucU0~-n=DU|cNM71 zlnI1ZDB&W3qyBPFjYW8!o&29*7#BFYr%NIj>?sgA#LR^4awz+aIBD7E545h6qpV0y z{E$ezu`ma{WF48S!#V<^foYp=391a}$H@Tu#Mow1apN2ZQ51q3qgYX=kEC zdfrYXIj1{3v1IkB`er*MsHR-X?pxvtjV#HIh^VGScN5t=p>u;uDOLTNDB%;44fIlb zO9Xg}gcujv-vnvH+}zRj**)!13-Om?HwlK}1cCKLB>Ho+XcPta#OR}Y`sl-I3=mdE zBFTwG2t@`~4lQPlw98_mz>3t+DrGl$oD=95k$j6ybOL4DshyWi76E3FlxapdrB*OB z6zj6BwoI6J#PX0=NN6v=#wldUHM#^Zwu(XlzyJLk?1;i-#|eL&vxL(QNj$7|mAxZK z*=V&QyLvAmyb52zC3cN4g_HDU+`ccQF-a<~VOXjS_m$RDQREUE!%5FQ0=zKJ8`0@0 z@wh&fT!iftpgMxE&kOtPpa_%(TcB+-xDD;N$?rX0^+=V;To!g6AHZnvr+i@nk_=eP)ew>&~0-?qsB_nDp>B}3~a(RdpMR~ zX|E|vDKSh@W^V{kN;nu;nQx~(T@Dl0Wsea>H4c&SM#KodCd_k(7GclyyAB3DRa;^= z3W!lDEm^iuU^EBY#2kW%5Z+P9zGjBtIRRFUY`f7Sm=w-7Y^P$WNmtZlJ@&Xi&P7U` z{b{{Lp&HMyKOFAi?wK9z=N@j|Rc*BADcC(dEnRI|&4;gVx^xq1DrAY<{fGq_LO`s`$!vJLSRFg}9) z*S($r0y+3sL|I1|R!`1YWHS|Z4&1tVgDn>^@Q+?FCQ%;sCr)f#XImA9J(f(Rah4?8 zJg!ssj`p;|u$Dww6CIC(g4EE>9WydI+e;n^S3_ASyzG&Ku+*8b&$p5^d<86688w*r zzakoZcIw#8?o$|+h={hrmJlJ-Qc*Im!p5iZm=aDl9u*E5j^N5_1dA_QBO3N6+oj-| zBLjIWA~-kI(#-jwH7N}*?bxBKZB>|0rLYA3?$-q<+wm3iPpuq(4zi~|UR`VNcr1nM ziOHb9dM;h?$b8;@9&Ep!z=i=D>j(GQm7+3!@c`bBB4a|q-%`C@r6@YGqBtbkGS+&C z5(-;d#P@xhFiZ~UGsJ$9j_a2wE4F)uVY+z#{Cpelk^8rdPL^?9HO(I^=A?a&hjUwZ z**s6jEAtWOvDLz~9QgLT?I}lnVk7zRUlG!G*0>_FVE*OE`H!|rW?4K9*9uX-M zf%g}-Zm=E-hsujpWU~jgTRl1A3cAU7*s?^RC8L3&!3qWS9dpgb@s>yS$Cx^|2-SjQ zeQJDzjq~tGPJCy2xN}B#+n_KR6rt9?E+BoLvX-;e9#qMTK|F%(Qz2)$=6xGhQHc<)iCgL00wEIksE0PzOV~o{d?X2rznb@>=_c zhcj>LWFIRWK5#iSnOfI2xxp^^Jw&n}BXzT{33!T{rNJb&{m6-_U1KA&C(Sagey@mu z5w3rBQNleI7Y)WaFq1pDJVA91U8!IIdc8g9IjJ|hL5uBH)t*eAe$ljBXj4Rq_87>B zx1?HTr&^Xr$Mj1>6nRF*pRoA0)8;6#nk2JUlA9P=S;RhSwn*X289daM2(>Tlr>2^hMLuKp*`uO|=DwT8+bl?)88`gEws@2{ z-7wvDdK4Gt_OilJui)1GFP?5lULL2D_IHQK?c#F5E_y1w;AoRZfYjdd} zUsy?gA-)w$?Aspw=DsDT@fUbw`@`rfa-e&wAg{%xSJGw*P`(nDVdeIyQ0P1HVhI_@ zit=_5$z5jh3%N<#Asnh%dJ1!!?M0_xAB$~l1*?P8qVSF!?4sKlfzY{Gc_8)ofyc_# zx7pL@Vh15_gw0(MsJ+geI@#3cgE{zN^Q zJHUaU9rc8f@cj~N6C_vJqWZ|gXLszj3!Vw|j`WHDRhY~QN;!J|frA5kQIP2?;Pa-; z#cST|C))KFUln<53f5rfME71ea0+mJ^oh-}K}yNH56t4gPhlDsTZ^npM+?yem=wh} z%8TRd*CJ3g?1Iro*FB0&@~4kgueJFKhx$mZ+9a!kOVSjCrQT|oYuiL2PY$3NIoJg6 zbb@RFFs<*lLn55JeU@fVi|%6(GW(M!)ha$#!us(w5#T5CXtS4{|5&@|50Iz}ZpD!2 zi7Z^%6{6{#vPZ<$+pZLW4sv2fWNNmLAkF*JJ8YoGM4ok4+dU2=Yms&KGr{nw>-cC4 z2D4^}6vhV$l4R>tso3iLIZis(9NlCa1;xOl5ew++dVE_=z4U{h6!aN!pT<-QycLD? z7euFNGBP{bn+ntX*_F~3@aLI3sYg5GkRE+6TkC&1%;QRv&+6X}+oo)iN2!kmVTQy7 zkOTZLfk4U`f4E8K1*PJIuiWPw}9 zj7firB`5deQ-pZ4Zmp?^EjCw#_Ht__fS|m#s4GSH4lN~T_Sm+{vtkd7j7FUh3>31R zTd#0fDilg!eTOiui|7=0Cbr)p=1?Y}J^!jhil5n|tNlqZG->Ywj09mkG$Mrx8k%Gs zX|E}b3vX~kdSqp&bWuYxTs?Ece96H;z@&=&_o6B&0!*gX%(QL_hi{{AWWd=9LehE?az%}7A`06og$WxX>m$*gu_# zXuT&qxp&o@yXV_s!O*|`=hku)+&2f(DOg%(8E3yHMDNon0U?@`z-M5!C?;%9b(^5m=`ex zBH#0EA<8FnR_rKma(JR?t_%@o3WiJ*OsSkGiooa9F20PDleH2`7%V!O7Dp=dzZ8Od zY(pk2R{%abvp}d+n73&u#WX+0qXyhLz<#5!w>_$=eCb;}_Q8tL_LRcp_)0zB+lvAW zuJ_z$uPf*yh1_T$!$ZH(^AFo6iVHc8KsblXs(weHz)HuA8>~!W)i5?n>>+{BAs*Dt z#czb+^H|Ls*{7Q5@K6g!=?(UT*HeSe6)JYdswc zoC+LpAaC1gy_RaQlL6~lB;x4^XAQZnijseeVPaAo@U2F>k zy~C|wl-1s9tSuLnqMfeUWP3c^0k?$D`@@l%_bhu^ki3f%@dJx6`*ch=bgBK*(dV&u zM1Yf?I!F8B^8oR^YH=hSS#Q^g!fU!BW^%p44B`U@5AQo{*xf_gGQGGwsP28*I$u4eAB>J`#O#?;|-e})lujxBBnj~-fP%LK?gQo6%T zhM<3ANu>8|w+OUQBr;?B{L!9cIKMw2Kqq~9ja~8r!7xTds%B__Unvrt!L<&bF*kIz zC}hPYDU$!!IKq29b9013`wT*%$g(|_&kGUOQqL$BO8wsMWCKB3XU4S1C$mQ*(`ss z7z01J-xE@UZm)4`QkzBbK=PHvbgq!pikg9{*x+| zeIy!vBULxkE_{(N4CN=WN$~}NkXUmQ7OLy*M$tm!XDs+N?v;PbTC};4b07D_R?b(wfG+UK(V0<9?Fim)pD+>Mz=&j#x%Iu%?o8h?li!zU8oal1nxye2!+ ztmM@EF3&_C_p0@2HLYKY9$tnvU5Q?N` zJtPWUlQS!>T*eAf=**ikY^JA8HLglEO-BL8W;q3Q5pMgmcX~z?Ee^&m4rY$sddv~g z`9QK_`7KKtH9ito6xFC(Xe|(M=cvK{1g}n@q>ejuRO;b4Z)qAAwv#dR7 z-M&=a6k8(*0~~KO+eU#0$dIoeNT@F+1F%7x$%LM5<{TKuI zPI7I4#~+zZfp^$)L249(_jsc6#GE@l8rfjj#sdkeM|w|H_)!7Z~t2!x8j(oWo- z_tbDQ6cxIpofLuI6s@}Ixv{Vh{Q3RUS@(P-K=&z+XWM6958>kvuL-*)_MMS3Ey9=$S2d~YrehP*@%$S3vOUo987oFI#*AfbA{E61+OoFpK z^7!P5wp8JB+tR(9v`g@Fi>M((6yZua;cga1J%VLEXB|(a6X=1%}r>#AqtDuOAw~ZIJfMhutwDaxjPytM4RQwF4`bZ zc6|D5`-#G^b$aE}Q@$LtG4hdU&k1=WauP*tMU>Fb3lZh|vl37VsN_$ye>sp(Ap1lB-Z+|%)8h>o5V{!u>8%zn0~3u>;F+;wj+$UF)x0VehBO z!pdkxp6Ab>Sw4vacl&Mnk*tCk<`U8vv(05TL0Df%zCODZvaDuBLh&x{oBQ&gznSkms3mZe?$y*z?u_D5n*z9vt{~m442;MxeQL0 zjbeY!vC=K+2V%19I$RhQPS#JhQ3}IpmoD79{hvRY6)zi!Mi{fiYEgm`jBYD%p&JR5 zHHpl3^z8IVtaYF56oE2IPo~@+_h?3R9qEp>Peh37cz$#*R%hQj21Wf_?x7JcgfqfA3Gkwl7)Q7^NC-aoyb4(hCN0;aV%(V{f`%cGs zGE@x>Aa4^}t+QyMV?}ZsMZ)B+Rrj+sRwGEaIX-i*P4g(G!>O3iUn~ft6s0IRI^w-i zBp9V>`rK)IJbh~YMqTSYE*RbP(t5`7Qy(I zWLJh|EYa0=JN6x)HO1~14E+IVBlfrx(uCU+CApP0Nd#JsDI{r21mID|Lu1w=MEMF> z+aK{{*&N^7Tb?ZR{WBhQa2po4-xmliDRpwj&`(5x1(uwc(P2aX!;>M~zBK5q3iI|X zS;y`v5ST5Cvz4)-BIuh~$Y?7cp^f&F|4t)CzHuT@;Fqh|!fvyPg7nVFz3ZCDe&WM6 zUBtjtUpfNY(z$60s~#PyM-+8huB)VKwJO^GHP|Ah49(W%7TYYu+wgKkL)#Zm1xTBg zCGcA)2*1GGr)ozh4$vMV(Xi;s9azDs>Mg>Q@kl*3kauyhlO+PYMVl|&%(YlNol6#n zIJ&)lP$XKS9|AVx{QjKDz$!mYJH=v@WiL;ciJ-^H8@s=)l&PT!!uJS zp+R7WJ>`k{Q|+SDWDdGtNc6Y(Xl^y? z1>iY4EpDX$h_Mw0;z_^^H`M26<&IBH`ph0@;bAllWquX9~e*5fV(cn2>mM*S#fOh8?>nsr3 zY)H25;)Fy0>yxa^mpOnXB8BdI_b6K?q!MGv5s%Xm3`2Pq8(GA`*psdBG;cL6KRa)h zU3WG-Lak&Vy>x>h#qHLyi`|-z8j7ULP>&kHaFM03zFd_u>=ANB3d|NiRm(C%nC3d# z{PM)3TaYDPDgvxZyE8ZM8cD5@OjxuB@sI_Yy7LI6bLE@-~KQ?j% zUE>ykQ0bh2vG>PXl}HqgI@9HFj@{?abJM_5(ta*TNipz6zbz7?*R+p0tE@&8_#H5? zr)~B}aTkdncH1QwY7ryZ3EStfa zShVk0)g+rC*gjXy&{3k?*fiJ@(L>Gg86(qKZmlAP?u!3`Cxu|;rI{~oe`)bckJ@pM z#-1_9>F)?>tgmhk$AZUdCfmO}2^t;DRG)r_Bos8W2Z|ylK;6l@=_X{oBg|Iqr*~VX zr$b({*}4f(A(SVNFMPBhtf zWPUQ&28qBcvapU_QEqn$(r$MTyxV@NaQh~jMYIGB_gaCdbf9Pv8;rEiwsH|EXhmcO zw!;2Tn1bdMU|4On0I#5HVB+6e1XYnN*Q4W*20Q2}aHNRfs=e%x)vCtwHbHnQ<#^$w z!eoq?_iB63(~)t%o_E=We-9Z03#71$v9gOq3Wg73xw<5XE*u$6ZdU|V2}AhBy;F!> z56(KcD1cB!Su(e0_b4{BVrGv{-R)PxDuAwL(5_)m`x__m>if`!F5y77S|o1`8KkJS znW9jRQ_~-@B?|LKEPHjdWe%8MG1)9YZRtqjFuvCPp))%c*%6O&JGZY1@`gwX2H(7k z?a!i+D@OcE>~&9#p+Oc__W6H=d||O;$L@BeL+XS*n%yQCxIHvW6OauOg;Musu@HZ# z!X6MNZw9kT_G^C{;W4)6Hp|nN$NMGAFfFsfqxGFI^woJZuf}E^cF2Uofc5llJ0L`d zQq$ARkU4vsf_55RUj8pRs5 zY137bT_G}+)R%8(G#XXRvbTt&zKEfkBlXtLVKf&M+b}^&M2$x#SCBzP>0d!lAb7>=cMGY7>-MJVup8#jMq6Yy z$8+qEC~DU&(Jmi1#dg?pKpmZF=+*WFK`d-CGxJ9xg{mox6=Lg97>0{Rb2|Cm_W>}R z+J;k%d8uv9Rw4>jo)<^1Sz(`(xigDy%TmoNt;N%HM<8m4=b%d2jI*l-dt8*zvqmQJ zIRtz~1m1|=Xu>}A&j2{)onfC92)y0Ik9|Rym!BTL-M*zTm2qm_+~bq5f921~xuo4F z5b8N{@ww5uib6&1--6+O>*LSr1I(_pEQiJMVnlN6KEc2scF|ccM}&}_Ou#WAKcFNq zCc$9cc~l6io;FOC0va|Crih|;pNXpQDgk)qeYf0j!#Rjm+|3Bm)_c-Onsl!Kj2@_M z-89mkPCs}Dqx37DvMh-&oOd0tq|dcaf52n#936IUWs>-u2y{jr_0q8=3%SR>AR=tG zja?G?x!!4C6$Q>%r0r!VajhT~JceVk8x^Jm3@#=1V+TEC;p29H=uFdCIffbIkkSpw z&9!<#)rAa1x1@Gf*=|uNQ?E)X?wu9#UZV>FJ`<*cN9-d{g*6xY`hSE%mGvCl-_HNg zGxa@ny?t5~3MiBB4SU`B-j_sFaTYLU@3yPjpIWmx97I}IPrzaJM9TX^LCQEXj>Je@ z05%J_$dB11A^$FLs%6vKp9})A%hS=llB0@jk4IApnb-+5jx+Pqo-~i+d9tn3Q#4QQ|Pt(>@g8w`0UcvXP1n%_XWcur#EJa zE1A*5_1KsGO++XO*Y)y}Vc!x-9fnN`m$=^+l*T|~p$=+82YX8K;&33`ZgkdAvA#>zt*NQ+b(^^CG?7QvJjk?t4 zTC8I`0TW!z{(EeI2=Gbz7q(z*VYl7o6dg@#Im1>{6kgb`Pfr`^;kL;wHeF%fg%B^& zE8B#aprgB4ZY_1@wAWMT0loS1eOjVa>k*@ z$LZ8mzkTVG@GzP;xI42S3X*;dmzHBn<2Yf>5(S16SgpQa;ZW^aawn4zrpBU02YYQR z5&>3@PhE9Fhn)hK~IY9^N}phUloLv>;yyIg`WcGi)ciQWIOj3TkH!Wg{&Aw6ZSJ< z8j!URSM@oLvJ+*w<+e_U*9;jrz?u~fJf7XN_^mB1wnG$+xH@%17GhyqWa@E6QQQ;L zCLV8?VG#`7bx5)1>!%d&gfiJ|IPL@t@YhB5+s=qUF?4%k9|(k7x1x9fLgTZ_Ti4~= zWfwpWENK9IvD!?O(7wzckqF=QB9Rda&P(l90r*n?+=c~oLCROg&B#ViH=+{Rz|8{i zhgq#y1zvZpSp;ACG(4jIDzCwFtwr>}0t=*EM8J26qN48{dum&g{a%n)$Pe!`3WusM z$!FgAqd(lP9k#ea*!xl4!t{;^Mg1}aUmfBgQ5)<=N0<@f>1r$-iP zI%jh0FEZ?AbnjvJDjf3Vc3>&@17k%A{pU`uRxo8Lf*gowOv2PHHriMw;OpOh|M{Tb^flY4up*-CUG+oQG9t{vJMwy_;T9xAAgpGV|5`QlKJk5aLOiCFMh(9}F8q!!+t9V{qw10zb zRG8Y%iR+5@NzaHE^+93wbG$m9xpS&!F28q5DrUBtsGa-{Z{TEEq<6ostcSTNSU zr!a-ZK_c2G0CX+u*siZo>S)CI{$w#qC)E+7RCHWNw#jWHw5631|HM8 z(@qPvf5KcnyyMYY*0B)$z#ko<_5Hs+4nt|Fwd?E~pXE`C-l2OYCtm_oak7lVd+R7f zt_ie!xcRW|!lALbK*w~zurwh*!5Z3rCPJtYZd}WfIKH$;MB@b|6&#VT6@c$k&70ob zfe%eHC&4@-MB1aQ-b!w%AcQv2*4* zNWE&T{n%rVRd1Bw&PAUiF$^7^b9jbbEI>~25tw{n>GoyeP=d(%rOwm;M>OTWc-iUs z*w#rWiHyNY?zE?9GAOfVpNqN3-}R#4_1gL=R&ShwX4(*eb0dqa4QSy}EUJ3r{8{!Z zg=u{5$tf&5mLjVcDX^u(Pc8B73DKyh5{zW$+S5W|z+gcqpQR^7AkB`FsYlS^8Oz|;u%Sm7n zcdzy*k*GjztV7TK*z!}qIWKk7Lc;b90v=k6XwQQv5(VY8w_h}!MkwWC!k410)5#sW}P zG}fn@ssp5|SkCX@h;f{aRbqJ4fe4oMDe!eg`M`bFAzn} zZA)jjE#20(bZpzQsdj^iE;>3pIz3dE3#a+L6&GIA4G(kwEfhh=`fjyV-yMo z?&VS%9pa=Yu!Ey;1Tsbmg@%weI_K2%zu!7BvWHuXHDMWG9VepL&~-K5@jM$`u=2zt zAxbq8XjUn6QL1TiNb{|JVWS-sjAom2y4TD$Qo^=Z z+8aLZMgclOO1-q(@>?F(LP)L=urnirXZ5eOAQUztE}`My(9>(_+coOSjG4n4vE+)aR*KrUNNHAs-6 zUw!j!_JBu4kFB;Gk7}(NXk{KwZQE$$6{b8aFJ;0}=dw#hqSCkzF1Fdlqlq#&BSPb_tb})xd+-~ zA9`xO1lG(;xybrhl<=+GEB)9pMb9~aa z(_@$09scmaB?sppTxrwOc?}&lz*Y!Sb+=|_*hY`KGsdxIi9Ido{YH-6H%6PnHTDP5 zDLvd^d>q?(&v_m_hiBPK3iF*FU$p0R?fBz$Ti)C;(_VHWPaw8>rg4>AT7Km#qztn$ zj+Id^N8E0geNWU-oX9oFX1h@oM(OLjSuX{BE?&l36CE@w5G~Zz9g+yG7G~IpbMzUr zDMIbj=w#WNi?p6>k?6dJTl~`-7D$u%8BwT+SP6doJe<+3n|-7(waI{oH7e_LNl3Q8 zVYvNJVP1vyIz!7HLV-CZPn2spH2Q^Tv=qlO8!X$C7vhg=z{Gktr>Oxvw6 z+#H@oy&V!D$5?c4X8#fl)rbn+Nc)dRvy+>LPUhz?C7P`r(vo8j?uaDEHY2jxMPxE* zn>wit`P?5nR6959v7bA- zL`-dkLvl~xWd5LFd()0jZLSRmZFWR-N{LQk!k!bLPWw}*7hXo#mw)@Bm7EL|eN$8l zo?BLVU@!Z^n}jKnJUS!R*g=pCPVe4jwgBy99_@bR;NdHcvhr#Zcpvpgh3wLCOQ)frEFVT)wb-Bwxx^OmhNGhV6BRR zHKOk1Zsd7~ESF?W^L!(g-k8H#=Q{h65NWZ~DY3ju1f%|5cdQCQy$yDmC=}*sQ)F0X zi+$anK;UTUOH4L`R3l@3!fy7*d2?jyW}kKS)CV^rCcVW*IV3?I_Q9&t=}R%b%UWxm zC}fwHhYZX*PyNC^bU`)-7`_~O7v7)`Zn9=izkd;`p)Ux~Mn3b~JHnwMFr1cOVE-0@ zQ75%?VQSap)Xqj3H9G&R;g#`n?p!YspgxZ7WCb(9ZV(LJy;3%t??_`#yO#w_^57oH z1}oaT&ir!|#@(JLD^~WtcCW(|^BPVr9B01}q{>-$KJmg|>v}!M#8WYoJMjbe>dWL#nYZQv2g?l}+(vlA@r`)(KL%_vqODj{+K48s?`O z>f|isRi_-Qnt7~xvIe`9KQAQ)W{lH)-!wdL>}>;^`fkM^B(4*1kg*iC7iEuoMf2{STKX6Yg? zWJVn%5~Uzhw;z%v3jMphLMoOCp>W!vDV*6u8ceb@B1OPz`;Fphz_yx4WN-8TJUiVa zv#mowdY&lM83H-wwnZQmF{>vQS6hTbOJ!!TcN#U|%Upjqe_glKyW5Q&1 zbPFCVpAw)pPtL~Ji$~x%I$_s-jR@~Wjqr&A0d4#PQPnfoazM`^nqYYKFcj$?vIj&U z?@FE7WecbhDoYTP8sSmcj^=9K_VVqlQ8wK@&$pIdwkFgXuF|}iY zZ4x~+6+Ie7Qgt!g~@ z?T4K7>BUZ(<#Gn_w6pxL}f}^V>xUy|d3y z8zdCIXeanLm5NRNWfgAb%)G zhdZ?3P_13@P0wVLZe{GJf}u&HTk$;@LfOjQc*2uKE4|rPEeLa2mGaj++SWOyHhLCt zNm4JYmUh=<8|ego{6Leyq- zpEA{Q1u0=+Q4DKr#a1QEr|7QK6572__eUZhMboXtA3>6KG^b1Uj7Oi@*dm9FFFT}; z9%Nnnrl6{*c~zueTyLLvg6L?y&d$4%1oX*6YoqatIe~+uY!Ts7y0(YMTd`oMRjH>Z zY^@01eoOgsHA3%Ti}twa)EL*sId)VabS+L<3+zwAVVD^pJFH1AFqYPikP#BZjiy+Wh`hFN zRAhDIpdgisFU?2=ot95mu{a6&3Lm3c#{8EpXU?}&|e6~|P zUiO`GZH^EeGWW-9jVC7a!&P#!sHONJ*~mlxZE)u)J`j z4M@YT^xI($QDiJfc;p_3FHd3)T5msf$d+XnRvmsONNIx@bj-#(=6(`;=oL1z{poQp z*KpFez+tL+J6?UQR*<^s(mB)CDNI+@&}o|lLLp^w8~ca2Px8dwFo(8JVQQtck|9+a z{zLwBd5m4xYo4-8MmIbET2jJb>jt#sS~u90!qiOf1P8u&j~1jws2R@q*q~!-%u}>Y z!M5yH>yb_oW7t9F%5p@4RrX9P?biaKz4<1~0;WN8Mvp)UZHi|`{)4f(y;_h0Cr4J= z0nY^sugDX4L~7$QdtPC3%)$;QT-wW?FgM92G6L|g4}%JAhgi9kVZ1k#RK2NnCl}!- z{lSA95AL;70#q3@vo!dVToLF`#>o|(U6lKWBtDyvfzoKXay&1 zms4{3XdjzPh5ZeDjkjNUxocTjqfSaU?m(@pY)zw9O0B8t+h^6 zeKWexpnp^7ogz{HW#}eZs{>ZT9qo`n;2=B3`T29g;d|=)khCi7Pok*)w&+u_e+a4v zcFVL2t~>W|X(EElCXz_ zcqLMwx*4wVXCExk?Ba2!)~-CYp7U8jHyEm0*vR~1v^;)9G2tbBV%Ua^*^5q*M3p>l zPO(g23PC!etJ-Xbr%N0XW5KctPn~0R?Sk0tkHS6szLG`+l{QOEAE)+(r@=D1mJSYNn+^thbEB7b4##_e` zc#UBA{E$wFR7K}F>4lw~lIWzQ-j;|2Llv#Nv1}(0nz$^%eDatuix;j6+G=J+9?m2E zi>Lgdcp>ZHKMKG?4>Uaf?BNrOcH7r)fF8yf9grXty~VB)jlx9|cg@yIFcb&fBDQOkizQaG@EXfLjp{tI{PbW5gHDxNZC%XHxs zfxy0w;{Y_9aar44xp}0_nIB))5 z$b#>20Iu#BO*)F=ZN-F>Z}+7MJ&SZF|4R{QA+&$CU^RL(=1<0Ynn)`KsSB&|G>l1` zahbOkN#RGF#H$JsCWl2^-2PZF5YS>9MWD!eQeJElV3~DfAs-1g*0>Y!8&TV-sL=Hh zM3I4n!=g}Fa*r`Yd?*;|wimJ*cJcSYl!~-c?ZCN)2-L=*WeDZ5>0|_~u`ChEq^)K{ zTg^f%6O8J2)v>C*$Ev5;D9Mpp(x(WT0m)Zup6QAEoF4>lySdZ;fQ1X15Nfrp-}8j-MR!A zK21&yQrl6(7;Ak!4Q;VW)3s+H!JJb2g-~ejJ8@86VKbcI-hsN=3V;0MQNHrr;0 zsZ}em0B4UoJiTE2srp^^j3B(?o{aV2AB4zUr(n@IbJ$a7tI^{2mS8CVz(jrlL(ktu z2{nfH`%1gurgJF57@Vve8IM)kO^!z-3o2r0_;(kE18xEtmG2Y^^@;HC z=)y`)%tnY7-kQ;)Yi8GOHYQEcE#HwQo8`HTs6^`6W(!fv%$nLE*Lox>EXdBU65?w= zftKzrg(NsXMbd{6OJQwxZUprIq&Sn<0d@&&7v5)}g*7N~obib{a*k>&h)d zG~OSHA8xj;g1rCu=7;S*g~JyX6c#x~wHzmE6T%pUY&$!tauGuta?p~rD$gaW*AS~! znD%6o$1--YP~Z~V#0Yk*6M^?r$ewsWUGMOvnUMhBE>F%nUpKZ5_MFGdpbYoT&kKe! zO0pkT-WDaW!n&@|{w7R;I(6z{w`UNhyCjQ}F}q6uPB`10g40Pfi#(R22@C~0!0Z_IJnp7Ugs`6y2;2pG`hfyB-8*%*!-}P1_SeWu z`zwxO9I(giO`%YVK@uVOyD-o795le*RhUwgb5LcU%{-Tov#DM7RpD@xgmts_vm`vS zS6LTD1#SwsU_{w;fKxCMq&@4J1{)-bkICV(b+?tIpNm`vRoH~~=cFfr+~o?Jntl$U zYz*ilq!rOZ%MNU9u@y?x}m>)OUv;T$4TmfB%|Nar8P zCP?J)bVzm_RhM?zIm@R=Y;7f02>7xFdwXM`~@oeOZv|J2dyuBi2vA&wjRW z=&N}M9YniR^uQEetl7wU9!Ov zuQtBA)&A(uYYo9!lgvUL_VoRcIFb>9zY2%eI=g%v=VA7dD3l|T##w2f>l6|ktD1eR z8c%i8>{3xkP%fA9<#vrAZ)D?CVi^K-HuOI^)4R=|<)eFh!MHQi#@g+oXsnZm5bh** zai3;GMI=Ks$pnYB!(BD0opbD$4#{LNRg^8r>qg<1%90*8T2|`N=n@Qtk$apCRsT)N z!b>0p-sq2@(IOM`je_*}T+FyWp>Sx8qibHBYfpPp1iWMj{#ilKT~h2*1I0fdsaKrx z2W0PR(h@trGY?Q1$x?PY_E{kneNAm75-F2T_EiyieUh7#n-r#Gk?>QdKp4Q-#?i39 z6@gEtvMgEJx+=JST_IZFh+Q&{-EEhrqOw@(vtU3;;BnDH$IR+|yB!vUX&L*M{mH)x z^IFtV6ZWx3>Uo*p_(YgDU{_)!NAw*3p5FzEf4UNKxhRyB6@y)+Ft5y#6ySFR;HR=sJ|fr)3BztSmdnuOyv3gzIN(m}?@8gVw|x90 zJWl>aEBrCy`XsV#oS;f2OA~WE-Pzi4w#3uLD=?Iju-^%4<-MD;maT{%RM}CHsQPFH zJC)P_vm(GDMrO)&dgW%+`E zKkSImCiaj&T$nsCE@upa{OJ^h7}Ve_=&(~x?%I8P<|J$~*q=p-K4mHQzbT|jky05; zF*_{+@02}~RXgmTg4B119;gKWTOcsNm8_PnpR}iL+?;BfVCQuUsrwBX*2gY#kQ~8X z`j-XB76DQ9`tJ!+YV}8HseWG=HX_G5`;Zd(kqB^E9LHy+^%J7R_GpE~hhq8zA(sG+ANUoju>0nO@Q5>D8ZJjQ2>jy>QJw2}N zZbuZRf!*VfQ%hEFmJo#1w#nV z+XMlXD_8)YE~uGdb4&ZKWxCB2(Z>hr=A%R%N&7`~a=rEHtAAk2J)`J!s>PZFL&3_D zh-9~TGzA;XvBQ2r^<%l&LZW@H9XUtQB|>V=W_wN)Uvh8^RgA6sHj(H-6|s?aTw$te z5ZVJQ7;&=Xk#FuBXP><#MDn^E#742VErM!5#{a)6Kq-kGDwB@oQg!^kh|~nc;e0Z; zI*jFxWURHH3i7rN8J&Yo5AU)*grQ z*}A&g9&^a%f_1q)BdB>`lN=&44^-PBk@y6%E9DUG6`|0+IEI;oD&*^;Xb|Yq#okEA z<|fcmt*|qqgmYF&4Df+Vis55XDKGNEEWhkCKRB0Zq~*TPJ}(MI!h?OCecb_9xufkm z0Ul)tDB;kxpTkt+N)(yxenIu2wNu+Ia_+SWPCQoqDEIHjs&`2eVwH&OpZ`4Fo>q_& zs-01;6Y@1Z!*t``l>6#iWB*YUwZq;BlN0>zvCsbyQfRqS%%*?a;r>N72^LYJ$nN4D;k*vV}|l=y|ssJa#iz{9cXO~-0hN^YQLVPv(b$$sVOux*@Y z^91PW0~y;(#w2Z{u)21`zDPD{uWb_r{#d8!P-T~(bgog)lx`+Unr;j+GFo3{Pbh(w z`4as^#f;w?`o(dPLo;Saexx~ptO!&xKiG0`6QT|$l?ZOi(FA6}VgC`PXK?>&m)}a5 zq=$Atw~l?|_XX+FEXif`tigJS5I(p87GT5}gO!4{h!LKuJhW4IkX+PEzc z&3Qv>QP-Ql7m2qYT`84cdrC08ZbYoS#4fxoL`UnHD*K#Z7+HJCij`d=0!16yx~bSM z69^9v!kU|1=MOV(BN=MF1j83MoNF$QU$Z;`WzFIw=4XObb7fxAVjjkrn3Z|5wz0Ks zV>jDm!SEV%%gby^dvwi81mSIyKbcD}eoWzLWM8YnJ@`p_Tts+gAyaGz1VU!z33z_i zBkwCnR@xZ>DhH`CsRu-mkDze0blU&!k;v zA%eVtE0cWc(<9m1Hu3tTAeo$6S!LrD=5zROjm!=mY^KPe_r94pW<(rN2EzkX{%GuyFY?+h(UTz#KhG@kJs~skp7+E@zVvtR+y4+odoyQHGp> z?Gp%m_LQigOr<;_8qCPO23KY4?1(?j=FFZ~7{;Yq*xY0<2vQRm^u+l}iRKgbv#*O9 zS|qOwVZ4*V@GDDlQ|F%AHN`IM2?d6R3>;!#RahOodWz29tLDoQ*f&Ik|KV{wd)aq9 z`Kk46Ius6-A1(@X9A^p7q+KDoKDB1IjTSL{t(detTisw&Lbo)h^iK-7EqGp-*3dpWZm$VZ ziSjYgF#~DQcSWEq@+0=1hbtZ`u#XjPA5SQT#`Ft%Lko=Elfa02ej<9P*jMTiN!el-_JtDK zOTHb-art7GiAG^NvxmM-;lL=zBJ}e+MWFPNoj- zG-@-mNB0cd?s>?cOxYOb)P(I3iSlz!P{{q!lb)Q}yuuC&z|YB+Dtk%cP=Z4{pIb-o zI2RPHq#L$GYcuR+gDB&G|2rl6Z+{k@!sX*|uE@>`X^$Y0 zuy7f$hwHJ34C7_Vc&YtEX{Z27i0uP=Ul2ABnl`&5M4ei<=5*a`yHb$6*$VRET`NSw z-u-*q4W95=6}RNG>@Gp_&zI2FPX*|nxNT?W6Sq9!P!(tAZ#=tnrd5bSz8v(?1%Kn| zDQe$h78SFF;jfh8mhL>?Au?<-Cir`Q7Qx1NuKh_!J%J6MQw&;oncpQb=;>^^lTQIx17HpFgFID80MW3-F;r3f%t7UN5?8A9YcCKp^J_uLKs zz)0pTPO;+J)@ zh2f^WAo-wuXCRp8-k7|>G886GPC#USxueJ3-1TPrnZk5~=$vZ2&H|2ZXMlP@WXi}1 zqD~Q)*a%NY)oU_Gks)$tR4Fn=hkQ*}tMjKiWOUaA-`&oL5V*=MU{bhn5SVeVnM30dphS5Y6y7R?e$!mIIDvORQFamv`o7#wk=TF%WGWg7Fa|0oFPIM%k~NS`Eq+4W@U@8KP7P` zizd0H{WeV+PvZ;OUtbd`{JJ#Mq#Cx1t^EfPskXbKBBy1iM4-hPMYveC4~1xA>jwB? zSBξoPKMBjBr@&%+G2xa&eR@(zovy8!u)iD$+T@6WPFLi*H(T}~XX%JN0=^X`GW z2iw8`Pc*cgF2i-3pl{>gDEt2!VSjErxGkPPk#7?N%&pbGsAfFy-qRIm~^?ZcBKeaK>qZ?dx5l;8PZ~36bKg~M5DDK@|b$LsQ!x_?8|>e z(DwqmwOncJaZ!A|N5y#J%M@mzbXJwGp9=Oc~Xc8abjhS5BnlU$HrNO zqNr3>qy$|H0dg;jKA|6P+bs<9OyYdMFLMGRn-C1a6}#(@7K*n3NUSvAk`T|=arv{L>-56OwSF`WCkND zo70sQh(xvBoo$zuhSY@!)o)PPm&INzJ4-K!LRp*+GVPO)v;fmaSByEAG&6^5baB#V!g{#v3>vfFb(>LEpxn-MjNNbgKw{fDf(3VDOLAI6DR_OgvaK6&yqM&* z^ss33;wU!r;_TwqnuMi9q6vt)tmHs){(V47kjZ5%-wP4xxc}95U4U4W((?b2pl@An z@wk}X(wVxdR&KCv74#Fu2@C8%c6cVt@!57qNQhvnM3zSei1^b2PG64%c&e`a#G(dE z1ej`QNHt=1QIJB%azl?J%2Ofi`y74W2@zW+0z)~W0A1v@0*rL0*~X2wUKG0a?Cff* zRhaI8ohxGs`ax_AU%#>7ji>E30rA4;T8DzZt)peD>0@Df#+~yT?Y#SebQ`Oh2cpjk zQ3Ygxi|mpRg{8o8_17N$5~4a4fJn0_ysm>^$}MMu3hVRBy)x zebw2v8Cd(fC={F8G|#>?may;sLDW5ACW8N=YoEQoeP$yX2X>W+{x?!aM-Z#C?jrgA z{vP%!Zce9wO^^pK!hRwWbxcZ(WKc*R!yFqwNaF=*O(DN<0Z|G1ayJr!`>ZsiW-#%R z`e`^~$o)1g#A4n_W|F4|2oLS-l4(}i8M|{Tc9d;(fF~L&Pi$XhzZMKv{->GXdQxZ? zjs6%ue4w2T$>o%77EdofC5l;|)w&6Xz%d&tOsg^T(6MlhJt{~ua`6%tvByIs;s>}b zw9F8Rs~c=H=7z|_^O<`MHbKyj%9uD7Dhg~yNYJrtS;x*5wmzL8Rnwf>Txy#|>0H=x zYzDQ7;FsW7-SUp46JRxpL(5wtFalXJu>5ua#1z@P0u-OB+7LW4{rMcF*-;wp;s;4X z<1mlNo^*{6QvhdEJ6^APVEHR@wql=*c{OdbcU;Yli_xG?LOdA;-ag z74{=>c-rB4JigDfe}*)}IX>oc9{9H~-OcOfMDAA~3zGdEY$;bg-RENu5=#td>QtKWP@KYfF zT|EW;=nk2r*ZUudK=%*j$1B(F4XK@BrPcPhAT7(sVw>d%xWVM57B+CBRfN?1fK>7u z_K_f2wKk!wV4rye6zd1EBPU4za^A8zm`RLfTa zdOV7)F*&kL3Wpb0*$SPVs;hDqL#wPxQFKU798U~e!*^snRQ}^%6877_IO}LINf7e1 zsI)G@Wc{ncelqHQioU=O`-f=sQ*vUw2pOO+JPM>46Jh*D>~bMKerL))>!~maB|pJ- z(kCR$LB%Q#YM0;iWKdq%|Prn7THV@RBNQ@m_>61!!MuI?%+>xFkLMwSq!)i zX{a^fiv))czF05lM{u~5HyJ@kBDs71*`j#v63CZ&!_TSaDXHcrcWuBol45D@@HkE| zS+YlksUM>72ojG$UzWQUXy{cu0PeBXS`1O+`|#cs9Q@*9s7YJWOuf zzANPC07o(L#zNYcryty9HwjSb0X+s-e}#SLyIp98-7E@KaQ?7&*kD25!xMEmzAOC! z{yK1yWW&P;NQV~iA~ZHYB#@?JF=L!ycp#FI2WP86qPrZs@W`9atq_4ER>!_TNi{DD z6X;JNL;6+PdLIhX?AG=C23_#8bG3u%TW@y>6WOhA-@Z0VKm%PY)y0`Yw0anh3g(1x z5sHut751|my-I%8v#miSx*(Cq3@xxm!Eje@UfOAiVv{V-H!rd06iZ*j_CU!9uKr6R zggvSDnN;m2CoZuaMGE_^qNveu9!ewjvJl&)vpe8jJj}K~i0J!!T#O&-lfqQHSC4-7 zUy*ZhymL*q9|}|T!;864%M!>m8!l3(ommv=<+DP(F6RdiPU{H8lU*K+%={2;-&b$R z5I))Ptd)fDWR~KS3ey#5=PZ}>@mfJ&yRlKuhW`lBobg)@SK3!Hh^G75Vl(aQ0(6W{ z22P3bK@t4N2tPDhu`s<6Lu9PL7NlcQsZ84X5PR*-*IxUiH3%_JJ7%vwF=u~A)kbR* z#V>V~&*pH3+QC_Nr=1qv&wW|gN1;aqUo87$($3HHS;20PvGc>P|C}gPB(JrJ_%8|3 z2;5)cZ&`pJr{3SUECn@OSY+MMBCT<*p-~a(A0{UPlr|xJ%S$DDg@CWg9Wi8IY_142 zVoWp>wi0u|wunOAu-_2N(hXJEXV=c#$Pq!Rk|(CuzXce&RKsHXIHY@{jGoiEUEOa#*Q?%Bh-Dy-R@g9~YF;|Iu;ky87Q1u3Fu_ul<&Q24BU^;R3KusQ`B zPEor*Bs;q8^?G|mfE-b?!)>%>3i=h4-P%-?#fjpHkbIaNlS~xwzc^Cwnc!8Sm#4qN zm+?e?YiyNB{x=urv@m3#trpF9Vxkx`x3*3MKb$<;%(hq47@M~}?1V6tJF~9t%<5(K zo*?Zzum;)Cu2BGOD$c?lxAhVtN=IYsD?qj=&SqgT&STxsg5Qa$T+iA|ciO{Br%|Yz zZkZu``^e#z6~evx^|YLj7rsRp*0{xlsnW5sW&DhLNRwdXaj0d~^Fq8+<*u`W0N>rw z_-^woi-+8pVhE9E+ah817ikG1=Hni!-5s{k7AuO%$*<W@WUMS%DyV6}47bGDJo4 zvZDMDJt@eVkqmPjr4)YMV^$f`I3t=XY_lN6qwA6{p^e%QO?!F$w*w)I)P|bWhAJoc z&o5{AhBO%WiP>pkEdZQdI7ysKvLR4K6tQQ+3%F9yPf=%~g^6{wNZjlXPe+}^y`D7+U$Z9M(#9J^TwDC`hk8d*d2mJGarLffP%`$eUy?ee*Qv{>r>=dIVIosxo!hG$|f8DHBh&l|s?>;-Du+KER zs5ry^lEyj3d4(Q3Gf@<>haU(T;NnNZJ`S05@7vwJm_sIX67?Pti`k`u*IxY1PLJOl z-xoWMIkrv-W9xLSeNAboL-)RY?7H;l2Uf-H2ZDa}4%IULD6&+-q4)dFeSBNv0D%DL|ZIVQB(juPWj-qm@t)>KV)pO z3;Cgbh;f-E==%#<-F*ieq&NNYbXzZanENnXCDB}O8^X5-S07w;aJ}si^!@a1UFq31 z+pSF$UsGKsI1Io_6HSX`FUw1|z5XTw^~2amQ4|wg{}!h6GT}k~XuJ=fn6uYzQCLIP zz^foOu>#~2Ar+!~1jz-#6CMNe?5DzEXXA&Do6IAkP?>D*h&kL}uncY4;VUNkUGcnp znf*@&k$B=O}CJXN|2y%ok*gU5(c6;ni5vXP)1NHM_J0?it1f~b< z>$w2B9zG7gB;OGtWyi9$rz=+35JB2KawtO_!tYeBvxveBZTrgE?JHZ_S5`RdlFzoU zY_@SBS+oSJatc!r>NfK%Pe6TD%?)*i6$Yqg(yK)Qc1**pZxwH-<}Of*5P+pWoK$xxk3PVku4L2!f|s3L7i8-mC{Ih=AX%yB z<@s-@cdDi?RkPo&6UEO!_W~YI7FZY2$OoAgyh>QF@KF|m@5&`^*`iR5wiQu(DkOFm zvPx}?ce~o({fkBBNWf9Fm^ptz{M`2__vPA!!gSaurz3WS z5d8^_2ET1D_JD~!SPc}bX>Me=lr{$u#GAo7I0qcC+yU`d;e?UW#e z=Ek{vf1&VPF1WnOu}g(XD>Dl-H(L)uO-)^+O44Z$J&Vg9hpAp_7Yg}`^OZ-!Q?~cE zio(1-w2LPP{X*d6LL@SmBe@+jtu>urQrtC%i9Wq&Dqa}uKOxIbP3d)Z;S+pMf$u#t z&xfg5K@@%Zixi=@XJ@krh6?$gIGWx9NG_q^6IhRlV`Bb(d1o3+<)brPy7z)JBLxY_DF|*~k#? z*R!|fDID4_X{OE2yUz?O6aC!NnuLLo6|mW9s^g|Mx;MIwqWWo@iQcPi6825is}L_5 zA~3Ldt(yw%WdUl^y?1vzp)kFywR8VyJ1wZet3rhydkxF%@`O)-vPKyez)a9j$@_DQ zx*NN+%wfFWgozXFUwXF@1HP1trJugTLONxu_Hwu#x>l;%uY>=;t zKxgISF;ku+-wxll&t2UmRMx9J^`k7R$$o4ZWB}oE@rP9D&WTz zRW1yC3RB!Dj%*Kx?>HG&a8N7=QT@C1v82M}(e2?rk6Kx0I>cM)*J*+@Ppp+}n-LJM zB(aaMG@yKBMxwS>h|0rz$6~9_bDY;Ui0EsS8_&tItpVr9Fd3FP*>(%lKV1+@z$(uF z21r|b!b3SGNW~;#ZT}9R^88nji@lrA75m6Y5^bf=2M~)MSYuZSP!rhpPWykJTwr`V zO*~P$!l}q?uU5x9-KLOgc%fymI0m<{^%Y49G9NUX3$ zB2q}(c2pVm3D6j~_rlz;BO!+q>q<{F&at-y{dyl2!HeztX^ehF!ed4O*$iYM{~2;) zOfq7!So-Y}KKEU7YFf)F%sF=vg%2j60heVx((#F6Y=x+c9W2AK_Oa3Xyl6EdluK2xzhZRq}e8>65v z?Jkz4jTNS|oLWT8B7)SF2M&G)GKHus1FoBG!tzB3^J2Ar)va665@(Vc$r-&Kxama_1?Qlh}I?Z z3+*=w`!Q}`xyo^5mbS0lgF`Sottj7%8G3g=BTNB~Z?(<-B1m&|pt1{|1o(d75hEXS z-P~Whritw<*EpWfRO>FHpLN6I+M|1lKt65L;DinmP#N6VaTXj2u%mR9u1JLeGD>&A zgnlYO=hkhjtrVobqtL^W_|67l>XfS6=3Gv}(VSyjMWpQ$kn8-d!d))DM)SLthx9sU z3htc8WPTF2hkMeQ@Kv7y{p}NlDP|b*&MM~O{{~ZK{rEL4H4~(($@Dkn)<-C;4Y^_U zFZ;E2bNGli-Q65~M+*Ac3~PdCQBR?sefwNzk0_QB2jb2y&SCJe@G(jv7-F%X3Ho)0 z5j5SF>ui!}eBNHxXeA-}Fc_1J{*2D3(jE4c!hSG#okpU0i3ltW-C*9B;Uqkt7l{TT zwazoyAt6R9ADKV{)$i zx?LwgKE3+%v~CLXsS_E*=vK4c5n|it?y!dxrn6f&$q=+n2vPmJUuTse${j_Q*cL$w z;3|Z-RRJ>f`l5(g*vSx`6T>0MsSr)&MoJ(A4P;TMQASR&U0&+Tq)*CUud#lDOe@Y{ zMYs^wwidM?hcx6+ghjUzg1%p2%_MM`83^hNbO{#Q!y?ch6M3ZNZP2oWeUH(DGNoNB z3Joa0leoPmK)X-vU(kVYV!I&K8_<1#eGsB}=;cMqzE$Q^G8H6$-X%aP))Q%n8wAx6 zb&bwX_6EB}6f{+M_uQ!SE@NH;=Vnky$aff#mz3mVA*bx<1ld1M2r?~V<0ajn43Qju zcx|$&g47+hZIi7Kpt5I|SJ|%=reiQ%15cixtJg&E?QY$Ob`IWPL?L4Yd07V+OaW;a zG2UpG3TUaxgK4dOTTp$7ArY;_9>RQyf4+L2^(04dDafGPEn*- zz*hpjpN+awh~hf9x6M|VHNi$FPvR|ynrv%`KD%qH)hSHYJRS0j&c*cCfk4+`GXukd zuZiHQy(zVEgZ(K$;s=c!VehBm9awAoY&nVjYCf{fS#DfspBIe`_#(+6l2agzg^*2sz=b5_?35yy=%Ym_2HijM44+X( zAy!w~1SumnuGn5t*tg#M`of@pN;KcPL+iLFpAx2+_VPu}Iib^@J%1`h<_V8HGwd1x zzJN0*bCK3HUJP+wWTq)>>3CFdW zwoI6c504^!D|zU(Aw8p@_rq5N{ZPAKn`sEloDj_ysQH3;?8)$v({sdDx*(Zm@Z*b` zXRdu|Is`iF#mdYVEA7idK2PbSkVCnBRkTpABF=Vw)A-Ob90B(eW(;^z^^8)nhx&_1 zC%@6cp18vX2~sR+@O8@X>!@@lIeDC=?+xiW_@LfyqeJ=$apWi^VHgYXPUEK0$^(QO zk=j{lm4b9VJ(DcXV;@P_j*tTDY+M#>ub{6093L*2ts-#7yyX^qJ$y~o_EEvz?D{7~ zV$2ac!YZ?X?-V>VN@V1P6s!DZ-Yf>fZm0MlR-1pEk$(Qp3}VaANKLo#h+_%Z{% zFM6t0keVHvGBt3HcBDUd3jM9ZGq=Za>^Vj5DI zmXSi@T?tz$g71jq`<9<7on|{k3D;8067y@#o^MA)rMWuTa#?v>(2p3b7ASdk=`2W8 zOD|w{wSX@tA03ZkyG{h!T+q6eljqGsp$wjM17mfpsFZQ?sZB5@?D2FSgW$^LT5gD! zASLD%l7hZl?=9x-Wp;?BZNtzRnj4~Hle2N6p0sr#`oxm;XmQzgLEmsr_iDg@L}3QN zU$6^j`@Dysq%E_VmkAShYU@t>y28Hgt@FGLZ`Y7Kk%w3>gV-xzs*)ei!Y61N=SJYx zKPnsq4rRy#el~_Ynp~jTlIhg>k@3a0T$rB4c4V%t5TKLDIs@}5>(XflYayNzrk^7P zt+fUA=X5MyCz2lfejkN2j+|u_=GaFe%a><3@s~YG{=`w z=qyp+AjARcP=#9^IwDBc<%c3u>5L*Yux}LzS=KB`ZJy&PG{Z5v7TDcN;O`0UhthkZ zn_^j_`!T*xw!0Sy`_>N}G0;{7c=#D6>QdV#=x5yo1W=0Xpa_)f%<3Z1H^UBzqF)?m zmXlu`X{Wy>B6aQ3qkC8TFl3@fIT=;9i|6`WovOgH*PgXoMDVSl$p|FbZK9AtQf!2K z1Q>$Wjra$&`-GS|W9}Yq_oqKhHBFa?wFgDi#28AVWq!Z**@TNNTq;67#-(Dqr%wynwdn`@kQ6Cd09GEjiqp3+` z3nGPZ@t+WZq7SV+w8nlRKv5@mK5a8YxI5-J<}2*`8ilPo`g*M>^bnS`b+)M!^u>!U z3~#Ey>P7Rtnh7mw&wq*lS32$ql#SKd0{f?8DLcu2`mw^~o|yyVq`*EABwgD)euNyL zF4%iS?o{6CFMP^JBW_S)pBLcMqJn7KDohy&lKNQr^kY-7)3(>TiJ~_;og6- zYYpKl8>m<+&G`f0lQi@p5h#1$Xj~#b9AaOdg`=qLn1g>*1m7{}y0k4UvK&$TL>L>v zD#xVo35q%k^K3nP< zm(KV3dDHHxol{axn{ASa{&({BJ8YVeZ(d$(Lc$gbQ+F;>@};*|F!*kF4?=E&Vt$ts z^UI3!y9);dr^?K~^X+hF7W?&xZK(jM)Hp1E{8f-1PE|Ljs`sXDzMQ}z=L=8!l+=B2QL%><(5vSF`*J!e z`b4qy6{K!&On9 zi(8k$BO8u1*q=qC%D7hJY<0mi9!-eqnAuH;{?Y(7ZM4Y&N=N7LN?R=WDP>$;P~;_K zn!ThLznG9Eic;_2hfkfIinaE-Ae}Z&PsK+CXahfnyAXB2`mqS~%;yUX%_zI6|x}%O2r2g;B z*knO>Cpm=Nobhu?hr3ISnl2Thj?=5 zbqpgZPlyn%LAluYwdwSU-0`+fkWo1{rN)^m-GrNE0#+OHktx#c^JM?KSrm03@2I%>@SdbnBGLn| zY?I=L0Mm5vz%lj@h5dvX#2mJN3HvSHIcSBWy-KzTKTfC1jG!6+<;6aaZusfwt#JBh zK^K4SmNTuN=oH%Z;U16L0|Gn{w3n}UbBc#bCKqNgm)h8nZbH1R#eOQl@SIstab^?G zfP(&{?T*Eg25z<|M5P7TRJPLabuJe!6}EGzXU+;oM4~9xWLT;rmB~us_SipAXn*0h4z?& zevZfx4nLN4RwNpA#*eE_S2%1AdhVH_=bo7&`YGYuBE@W0IvaFF5G`Sss1}Vn492}Z z2did5>IYY*-ku9_6S_6Q+llmNL&Cv+q>+{I&jY z@YzSuSBj(lE|@tti$aO{?1DBp9j!*Pp6?3L2)8EekpTFuj+_IB_WiEXB8u_lI`7>! zM+7Q{0b|J&*t+oPP2Kz4WLpJjbo;wj_K(W@mRTx2Es~@a3so zCU;qjC{%7taS`VGuu&z}O?h0&SRBz;|OjPP)B$!jyTe3`=+$ zY;XAVih5I{Q2)5ZJB2rMl?!E1Lg~^E97aSSuF35UPu`yCr;t11=0I+O2|q@N{2D+HD~|@kFt>=Km8Qtj5ci&*6>w!9^TfhEyVQc-`8GO*v~T# zJME>VGD-5Lh+2|Zw6M1Y{O{IiI9IJ(vx}pX?XRNx!MHP;VgCv6^t5>=_tn~$RuZe( z$Z0ZemkRk!E>*QfjxR0?@$ku|o*3fYEx24a-yxmfM_|<1142v_Y#n1NJZbsDRBxQT zFHI8g4dB9pdFd#dP??ByI~Rn`7o!T1G+j_KZr+L^K3gXW9d!H1G4@M^weQF>gBK_5 zw2ARt_>f7N(t~xgy&wwZ<0v^{F9jek4|YmGecr97eW z!e{(iY}OCP*H%NMU`nEQdI-6$nCdi}cUgZ?{1~HRC<#i*?+y^v7dtMVWN{A^fri5* zEU+;Geg;G*W!g_amBm7i^zP-rQ`I5(lz&Jm{MLxWXhew8I3tFo#~fj;NK}XTyx9^W zP(n!-P6o5>zXR-8w(``PQkyAAWf0~{SVaKXCoMP!?z?;NK-;1iKlxZu8E|e}Me#Ex zRlC#)`Ze0~BKp=8=N8$Ug8tZp@{i+SdK#UGQEt>bjw{8v-gei}ok;X&2>@h(NiTo^}db8Zdby8}d@-8kcE=s7kyjY80 zWD`VmLxKgbxdIe;V#X}ns4xX|EUY;_W3Fux^yQC`pv*R56#r0kbY?d&2Db=9Qs-Ri z$f#6{()jUbCt@2sC;~koH`_@&EJO(h);u8D^UJm8bd2LU@p$~fm)XBUVub(n zd~)Gh|7~6jD|rd~k{}h&!J$;Y5I!+yxeW^8)Q)PqLt$SVzJa^{U?7B3`ZOyLfi|(c z7>)*Zc4^(=M#jBZ)FD~YY_pZd_XF&_q}2%faUX+N&~t)*+>y=P5A$-Ly(k)$OV!S@ z_d~e7yw3imu*OVUbFh+^+ZWeC;oSV{F8jKIeAF>Jg#E-p$L0pb_;xUlxIOn0foMDm zAuRr=5CeLjq)|%*eC@I(AOIogXMi&tk4wg=wTkE`I3lezNay}W6kkFzjwpcrK4g>6 z5tA+RPlB8d+=h?H{}#9H@3y~&@A!3FOsCe_-@;c-P3h`Qk) zvk5)Rn*^yhLeR@>xB#s`Q$N?TLPU<1=C}azqd8b95%6PiS7c(YJtgd0B0cE3<({IS zPiI0x_epzNi72*hyL+~KCP27hQomW5&H^=Ond{gP63Cv$j@7m?z}(iR$uOTb3HtFK zgC|&cypyd~G%DZPlx4dW7LWVcz$DZ%mnWwqAs^=281{?T1?ie1gq84h9U@<^MQUq? zeIOWE2AE|yJX^b~=BE84B$PP+tWvvtJqi7Ary5q;9RX^)Mf~*vLCQ$R#7uTbx6slB z&YT$I$Zz-7_ENm+D53Aij;c-ku_>a^q!SgZZI;6Hx_hpN@%>Clo!T@rRlP_|%{8Kg zJD*q_k|lJsZ(jVhAhaNr0Xf5U=OKF`WJzbdxt%w4hiONuew*B^98@&(aRRg5x%&L$ zrwUj=SIE2UCm|7e43+UP`|Jk3QYO0hFSpAB#Q5j&-OKE%05vl>gzPU7xhL!hTvy3(w|;q^Qz!FSSL2VH+3Q9UqNlRvXf2BFtW7 zI|QjDopD1?YZUV9xkq0aUUmf58t)`e+ktc}w*9h8?5GIDCQIO2z9~RyDXb6bYyD3XbFc@WqW#7;U%irnya&=biAB{w+mnU~2ZKY8veZLDneS zt$!NEQ>9402iQ2UO%=YtLLz2CtXz=JI=ym^Em4^IQIgIKvYuUUTSfE@PM`vrw1)7- zi3MA1r^56NqMyBA73&XX z`Bg?(eH8PQmply1+AQ_v`L66oIPCU?j?)EddhWvs+(#NszkT9!=z87|IR{Q}lsVD52U}AqIli z>6n{MyFwIf>S{v=4pdUA02An3z`yy#}OL(u| zU2^N~t2Ge)?1>~WEoe81z#!xnquJ6!z~{niq2qbpE|Q=B&c^H%yC)>#{?LS<;59rl zJQ6aAwzcHi#PksqQM=SWzXeF8kW<)Xmk7|@ z_Od3sOks+5j2QN>M!P~3Ke^;M%(@Aa(`4N%CfMNc0qw?QIi_rFUYhc5ecjpBi)@`J zzCNgmBeGg(RU%P=BX*3E@6yG+n` z)o|WbaOCk-QT#OE<(W(1^=U#6w*Y#IKvB2zR@6&jU(|gQqfzUZ#*soU^|uQ9Ed-qj zGy=IR-YFU-aA3@27s#}SMW9X`dnH)=GeIqqhL!F-&c9_=7}B(tKf}uQkUHEqc(Bb6 zQAlpdQ(FkJ})+4RkBX+%rWY>#rr?0}kO9~SuuoP^B zD4~&hC}|d^z6d6dXESaQq&($Ofupy(JO8c6RC&WYpR839eZPekgA zoA?HPlO7ZF-90AC!i))1OiFS$$VVa@A3o4km!sH}@PP#F^vt_RnBKXm_f57;VFt&Y z+OT6|TSXwZW919nbIvSl5JfeNKf&Q*e~3T6XZ`VJ{3q4g%c4*{cU#$IM+3xOCKmSW zbb#nvNGtM#088*P#oYL~GqHDIalv-7qGo-2_pyr=cKZzvSN8P~lN*_64=GHYQ5@CJ zMv)bY@TmiC9Zx*7M59(kOi|YVlJE^3TZp{Uc&iNY5XNCEvRaVRaxvo-v7JIb!{1eM zcwCI@(qE=&lIG-Pd78Z;nx?jFx3zHZ|F;O7;=P&zCjzRXBPhpD7v=MG-`5}wdVanC z{@@m|(iz(;MWEL7>xsp?Jfv2z;$bO$N0EN^$h>Sal5N+A^sjJG;@75^ATv=mKXqna z@6OECcC+YAmSbjziPQmZwvXicR7@<%Kk10!DQU?DISL zf_8HLl%$#7-@heN7~y?Txwd&4rSJ?Mw_!>^$6@5|?2-jm^{MnS-QK_eo)(StiPpwE z`+G=#aG{Ln*uMmQB@qsY*cThXzDcrs5wUNI;Me#NtoU%TjAfxNVmFFTxmZ!FMc992 zfbNHMhCL?8e7+wcipN7#d_p3(b)V%5hU3E1o+Cvar-`ZwReGq&HF8$?ioWfMy{1m6 zb2m@G^@=6`3?wODQJA?B8OJNHy&*)0wKm~_rXY#kksME>17i6Tqjp{+3D{}0SG}?f zt8=ByQAeaTr`F))e7=27Y2bexoobgTNJ&G746~jJlj`R0qFDAL>n+HHKtuA(oT@Xk zpS3|D!N4LmxlFrP(08WvIPoqg0=TB%-YwWA?EK zH0>Sc&1aejQwmOz&@B}3-^L2Y$-b1MP;rdBFjKLXs}%G(4zS^i-y-BEQFaNB4ugfs z_*BbY8xg`?P)r}AF!`o<2$H>=Dk~6$d@}Rms0&OKVrUMnWN_rXx1OOX4H=wWG}q>a zaPPi7Y_q~)rWVE8=Gk5mXu^>y#Cr}2_$E9MEyBW!y(NmDciM*7v=Nc9chb2GK?y3) zPKiJd)1@O($ojV+O?$9)FZSGXt;^1H`3@->7ssSse^F?H3?@Wvln|dg!#YbmWFp;p zuSFC?r?k}#w+w~Ds&>NcJiZl(7TUvPDYhp?pf?=f0I7jy>BmnOkMk-n`^*gvqUSO%l=NmqTpZlJPt{CG4;; zHHzo8?#PyS%n=dPIJq_7Z*Pb6bX8sq;qP`~zfEUh(VIzU|0fcS?%AuWeWsbP-$~lb zH@TPBWfBXzT6AhSaM-XR_8kE=psL_ONX01yWwjw8$)|32 zYiyW^R90I%Y`rmpzI!nj!{PU*B2cyB@dcI<(z5%ACl(dtz5&x~>Kik0*jA5O9!ZQ8 zDuQB@JlMdi7xG!&1^=PMHi+OS9lseXIWAR;LYLgn>swK7v3#j-6^VKwD|lk{3~LJT z?CdRP)@-wv1nG1`-&3I6 zxC`W$=I9{rI$BRG;@MSJP=@Lbq6 zB*5a{6ke4F3;J>#7j6yC>)`}iA|xABoXF%D|8qgs_{d^=MnQ_yYq~3AnFw?x2kZpq z6%gUI?IO`pm~Tb&Yj601I_Elnk*|s1Rz2&zGKJayot5@S5$R5)!a`|BNOP==Un}pZ zc7y#zM5-}5$FAN((03TJmL=9#n9sc_D!W;bIfk$=`YZxr(IQqS$&LRB$q~yg&QI80 zK|gp{Z$UXX72=PTmr35~te`I~1KzluzZXnt8O8hpC+!MBN;|X=A7NL9IA_>qyM05D zLGRPOuU)Nhc!!$m&ILSsShw)O$!Aw}JiX6u5@gJr`_f9Lt!D=Y0AA!VwjB~`P_t`_vu5o!1tj!oE%lj>_ivZI^cnD$1CMFgp^ zdw!ErvMmn@V0+3`z?J|zYS$g#yV~jmrQ`s+nC2s(?e1_dh)kbFu%#8VmxbsO^e^+Q zJ!Ig-ysPY(AQSKBH`?Dr%!!@bJEpBa2RnAvpDwSq{|m9K4p{E?&j9JgZ3xz1*+Mo< zqsgfMa`LpE0W;^(L5teG!hW1mO>?A}G+7inB(-~KYWMck?#k5eXH&cDtxR-!__a;Q z6)mypg8s)=#`4iui`j0`7_=dn`2D5Ae*WSLd5^UTv-=}@$-eIl#q4^$nM==sbV8_G z>_y@5D=vO79cZtL<`+xP?qYF&VZSHHL^%Gh5_Xpe>R8OCXIqvK^OaV%mv3>CY_*k& zNbPtJu~`a-6!Me^vK|+OokPjpM|E~eB-+-t zITJy>4}};XC-G75{Bh$hW{Z9Cmp-db2MyKsBT*PJdSPHjmW@cm!6Qf8_>lfs`7-AX zezjGLLOG-#G0bX1JjlJ~Y`2U5*Ovf=3uz17z;B2`mmVu$Z#O7RX}B`cCAx>8@5>43 z86|B{NZYGhZyTbp-yUV=m7jTgIHcE!fHgPM9v6Wz)KeoPG+iz{ZCpr26|!6Rwr;c{ z5vX%4TYSM{A?k2qQDsMMl`RxxeT~s`Vr_^+G88Ky=O8&f#8TWIVvm(0emvFc1*v^8 z>cg2_;;mT(T6%hUv%MI?r>5?(qv5-bvZe54?84`K1<18Gy0Tpa{csN&Hu6r7$)z`D z5xOdln)4&Zx#3u5-`;JOZIqrU~B)(!}c-Dy7N)d={t%Wnz z5&#M#o9u{yU%7YdWG4S<=e0ufJ%AaEU3R{(pYYjO2IqX+O%z|Hv%42qzW_%e&JW}K z5kbF^%W;DY@l`s5ON*=|q|`};yY7^9KErUAJU>9kme^%=f?COS`<-ozrJaEdI&<$+ zEZtR}v?K2Cvn2HpCXR*4X9y9|_VK8F;d_&$anDx12WHw*!>b zawLIn6(nhX1`fEM4RMZjgNw1wxCob{HwAo;qcIIvHx&{yse1mfmtAzg7jvSb8r4Mm zLV$3F*k$Z$!7$}8gk?8`XdcAdwrALF>FC?#K6bdUFAP0gCi-|0m{~j$t(NdF?EL&d zI=I6r{OyOqQ4q4pieYq&`B)}Q2cDVLU~?4q+sFeWY=saVcCvA9$Kocd3JH7M*vD!V z)}c&>UXhS+KF@cHNcX#;-XV$X--+UOM1Dx6MAl$mdma*v9E|ZPemw2#!t|{K7wxJ5 z5U`BcHw6BFr_Bx4Lv*SoYa%H2+%HJ|yCFK6tuQrcpIOsB=c)Et`z$}io|rk`5+Rns z$SJ|?QI8xlVp$pAS)m$k)H9ur`1QHVNz{AwTUn zyQ6<)?}krf2bS>@XXpRQ=S0D0w(YfV2!>xUy$`wPRl8bLIwFSjgN+vO>(O6LDdxVA z0K2(Z+ZLeJ?^IS>uEMkeGf2r84#=%Mq#O^jX##Y3rmkrG+|CSN9^X}AjUi!Htgy&h z1ZWh@cf_LK5+YOfT!};OvSZ=9zI}SwWiJpJj;7KXhR4k^c2qxqM;9aZ^-ZPn4uINM zE^#Nhh3%`Rx365>zN)N!;DZOGXaVs{A9f(>}%Ac zyL&1+nx-I#fs~y6A!LD}J(y`W;6-0PS+H!`lZ5Fz?$FHJp9qC1xQVOwS{oQBsk&9+ z^>nN#OqsL0SKA{BQ$OTV7rjCAoul+&Cr42^J(OJPTqc4ktu92akTbgJRWQbbA}8p+_Y{IrnY<+z3;Tj?$``|KIf$vRm= zIZFlT&F_BqyS7PT-&c3E?y$O$4u|%=ZBK|hzPF-d?qX{ZWLUc)lHIdACUKq*1kJ-@ zc0l`e_&|m-`*yXC0J%=dli;7z$&*q1y4WWos8ed{-2+oSH!(Z^C5XP0Z;!(uzebqK z@``6&LinbBH`xyr_Un>gEBKQ)i$F)naBk9W4@ui+uCsd;_HDc$mi|~l-?IqP(QTN6 z45=R&INBap*jJz+l3!rsg{dX2Of_sxHMHnaK3_z#Ol>Md$vmYK3fmU_Dae%!UK1MPywXw|pAm|q#3R`lW_pT^hA*lf`r|_C_B*BTO z|8F2u2Y0Kfwt<40JrUOPAOULd+JfKje68yDdp&`~ynj~k(0!-kX(TERIPb;;pRnsZ zyl&cfA!?V}xxl7`loa0Oozj&yFU0le+s{@hOs2A0#(1s^kUK#mr=@Lzz84TrhHJG$ z1Zs$iL(G~4d^hv76R`u~V|R#~X3q;!h2H&p*dG)oqi#KVXyWy@w?&|~+0H3^C&Iof z2arF7#I2hWh~+T0m;RQ-pHjqgOG2*`c8y|u6(&a#d3K#J<#5~5MX=d^D1x>c8M;S& zq1sdYC(N@V>!uXCBkS0LyJfiUXl0k$ZRxCe*Ii%_3e!`km(D-AqSUeiY@fAW?+oJt zWG>Wa)xB^kkR&Z8p8OsrGjC;)^2mBr#gFCM0$)* ziYD4|A@L+6{i+iK|MClUMr1$p#^sfu^KaK3ghF%i*%WKtk+A;4)Z(4RQ|(6z(4Rc&@6@{U}XVP+k1=S zwm5v8K>jGpmW7XbwchRCHrI$irI3WR%^|6TNifkaM7HG92cIVAzw9CQzNGPbg&@t9 z5xiN=(z3cPBq05R(Hsw{G(uA>w*iWzO@+vUIadS210gpNYnygpZ?2698J?O^(=l_p z?D;;B&hXA0R+%7o828h)Q7i<>o8b@-rb_shqEii3SPZu*Y0BBDRcAS0O$)h6^RaG< zJry7iiPC+m4)E;q)n}Q3I|Lbz!n^}BtwX^7sygnFI7IvG{qV&Y>Ia? zmkH2ysb=X_*-e7uQ23*4*@WStTr$rNVwYV-jQeEjKTEU{#Vewm~rzhhScb z?GvD$IoOhI4H1Ly#%S6Z0V>UfjfaBwg=pQ`6?4zdSz(_9NXHE14Dy**i1l?Iq@nvS zVQr8yej=M^T}7bC5ktm~v9SUa*|&cm%U76u;Sg@byil>A-_bDE-nusr#76uv4z&FF>USax)fnhoS8lSwaJrD$q~ zAvazat`1+~5HM-q4ghl|+wIPfu9rKK7lkN%*7dMo2+&ojnpsw=F!QFk(?>1P#|F%emt4J(9yIn+I-3&SDdQg}} zH*limr_-Yu6mMHq6d7mpMf9a|z0_E55``8OyB%R~I-W-jZB!{ydla6&NbX8dj; zjfBx3I~fx3n}C$9eJDsfPt>im&mJM{OB5f6#FvZW>pUjPDYLsUsq#d7KK~#^N>EbFwbU%a84d9HifCFbE;VHA)VL1JG;&j#nDO(X2i$a0U^3l`hW-b zzdVgfH`?Klgli!i@~a^;hGzu*_IetkoMzYlK4jNgh3Smz1L8o7q)S&JRs{T}ES`?# z(Z-QfGla2_=(#hb=P{b6=m&&oe#h(?77O9Cm5Xe$!h95gZ_80?ieNZe$RPYPshwq3 z8Pc3xJynd2{et1A-#L~2%xe&s4qb1sOBAG9K`l}WO;?IU$v5@C$*xhD(uPK2xmZp7 zVTii*`Wvioh{}r=;^4(YKYW~C3vEP*=K5Lr+F~0OzRZortWaTwfSDSzp9@Hv32V#2 zj*fRNw9~=Wa+dXU_yFZ_PN@O>@L1_=+Z}+jrS`u9RQCAp-5neD*slcDG%1PF!JHmH z5XtuhXPiXDzW(|-LIw}5S$3}o%!ovC0%yq*A!>@L$t;^IKv%o-8|+RT)|OdCClML- z=5@}==u%rDqVJ2>w;XM;s*p{$0l4ekC_pV`j*9#4^McxIs+Y?!&8AX&RTR}*$6^T? z&bN+`h-o*GCp}I|=e^-`NjY^gyHJpZA*~d1mgg=Kfk7NOdZb;dFsvsW)O0l)AN zJB*>l{Vh?b(2-5fOw8zzIy=t8XEL2y-rAec!V;x(rMt80e&+7N@DUaO>g@LdREEd7 z89Y&Lv3EqEQ;Vc0ZyiEm>93{__FAu_o|3BD;uW6lPEp*n)NVsD%~mZE-9H*87+$NS z299YF5&f@NNhH~}y$FerS4E>e9J$Ue*=lcxtoV$37;=OW`=dxykdp>i!9wIXM9?5O zK4)s<9D6rV=k$iRZvSy$81DG=N zViV(HsM@6>&@MKQr%?_2h9G4YkC%n7>p~QU$PCIOdKzO#OZ*)J&O^*H3XwNE&j%Zt0V9OPz4kxy*)4BDKpl_)zSL~A=Aps2= zbvLGO1!>f9o*Z=^`cJ?_-z9D5k3({f-55E|Di-jE*7lWC0#9~jla+-;4`Of8$_0Gx z1<38$EMY1$vM9z`4r#Z==>$CWB<#76fX3uGY4#UH2$NSV?mhO$G{tLk+euNV7kYK* zn5J_Wh}(e)_Sv_6?7`g#Q(hE+(?K%KIj5i4=Any$MTLbf)zel};GUA^bLl z87`;kvd(e@eG7-P$gMCW?bWZhRVqx~-9?Jm;*EAZP{tK;kA$t2YbVp+kAbaVr-aEK z6?icn{wzqzkx3EzC`8dW7z48Z2r75!mbzz?j{`+BA(EfZwsr9zNps!B-%8cixCgTu z8Mu)E8|F?vWvTrVC8JP2d;l$)YXp6p*nzU`d&2ZDW+PBB3?LuLHXAFzT;jPPS!_QQ zA{%F|XR|#l=y_MVr^?W`JKwe{%pyBhw&vKB z@?%q+r~7&lDF(M#9kna1IY26pHj4d9kSgUwuUUc!F_wYV{6CGHQx9y9Ht6hsY;B;uRx9znkVc$Vf(3QY=r|1Dc|8sluUyB(jBK5b53-dLO`pgMo|LZ7kx{OMrUG)w$ zH=;p%UaRYR5$OvV4?qA_i05G%ge1szBv#mPG(kQ@U`Muh+6T@|4ZW?pBqoH($O0ATif=VV8_sX(Ywo4$}UKcG?%4tphG5r}D`tnQGE(+}& zjwt5@_9H=tZCsQ~z`G%is`MDp%gze=e(%z&e^>jru&=|IWJJ8^D~|ch_@GyhK6bMp z34*L|;l_9yTHF7p6wPt9)gfjTNA_e9`^p zo*vR8t+n|UQB)WqX;L)VFGL|WGO^gEhY!Q0aH*i`0W%wZTKJTMTmdeLd4As&D3s&8 z7{_h5D6CQ_!{JkCpFAKE`S%@w0-nOu@}4Mf0B;GPE9)8F33of>MziqE185Ut@=nMX zFj_VzdI?jt82&G_xHZGoVck7S=3Q_Wk%xU2~oWx))v z76GPKguUKz&t42U&_R8$b#h2h1(mPu)Dy)|)f#(MbULk=h5MSqnjcvB_NLsiXzzF zT^F~nC?R=aCjs4j4^^kymCo78Ji96+$d1S=tz8`;9+lhX;ZsqNet3BXW4wza$2}sD zK`ze{li`0Q?Wci4RSwVK*s~T>v<~xyS$B{)yO|=ol32|7p$Jd7) z4{^!X>H@p+6yMQ)=F?8yQSJL8`2OkH^G17AQ0-{io!W_~w`~}Hw{ape{l$p zp;96#v#Fx_16HbPi93g3@4Uqpgl`$X`{eX-zc6ibTG&nY>rS}&$i5@{?KMHB30GJ2 zi3RBH@lk%I?YwtEe$oum-RA;fk|yFz&CdxiN#vmsKaEqS*q24|lO3bc%>JuIU?5Mg z*>B$tk+Lev9NJ{x7eQ0IxjNO{ipLv5>5=tEnrylVR2ObOeAJLWRkfd!jbxLziQ>DD6U%O$O%E^Oe6b^> zd~Z?3yW8uul6HoSZt4yvu2sO#V2noE5kcCR#XfOTVc!9R@E(IxuRw4Yr8@Sp4@ICy z6Bs7b-Sqr3;Lr+A*8H?yBnsKTu#9C}WTOQ6;J#SF!g4F&*#j|!+VL#gWFIM8}PuSP(u9(~n+FwG7 zK}g&sd2F?_q|n^frid91B!6q{`y$fo!(#lfcM%}}jzt@+ufqOjF&LIs0pb910CrnQ zpIw|^Y_|*0dS_m&(e4g$iTvVR8zw;SB63#&D|3a76oCr5)w?aVZO%E0v{apyUdvS) zCUksqoXdP%$TzV_I?GRl9OxYHuhpIrh126b57{OKsSAho%GVa!0YQrA$I{!fdL{j_ z6Ej$9ucnhUVv2dO9TkD*_u;bpH-*X0%`+S`hp(M-)iW|McENijka-wg$*_H*7>KGj2Z&$zcQTuwDYxB31vC^-U)nRt)=M zjxe<es-(Enl=cNGY{?vQJHPi>~V#e6M1@m8XrJOJ~5L7eB)5=+-#GD z$;6!-m)g$*q&}W{&6}PPt_RSG% z8co4EQ38`lI)+mI*kdgrn-fp1@0h>DUI>uI#R9Xp1Zj+X(AwJqeruMpFpAn-1(2vc zEjpu4#obxKt(!A}hy@B9j-b7-gOmN4l=>kn7H54xKp5QQuJWi5y>Y5~s|+oU7YwWE zX`W=cLWTrbKsdg8x@{Kp4Mt=s$F>M-?#bYY6jyY+-6kRd~V)NF%0q2$l1 zZJ1z~BpaH&^pTAg(KlyU6l+Lhgy|xhla*mfA#yJ%fe|xVfO>WB+t+@fF!joaWTGQy zPX#zQQh+NI+a;)7Nt>EfaQ2Ge=dETVJU>lFxVED6(|54m@vHVQrKceZbUdkQeB(o(g9bS^5RxC!pTS6+>0V6h>JU&kFf5ml}8+wd81rl zx?AY7Euf;}O>&#uX55gQv`v4GCg~ou&^_qhE9p+tC8kxtpQS7*pm0m*q6L5aR76qy zzdz?Z>Gj`NDd+Q?J?{CQ^PFe!x#F<-cTuRmZO0B>erW&5<)Z77zL7QlPyB^63?_!} z=Gbilv54mN`rTP?Q5YSXCm#C!p5V=c<4*0LcXDpMB}HLkO4C&~TwyvW^A6HD4a=?0 zvoO_YAs}FEQ$?Z5lr-noI^f-^!qy2;*r-&IZC03?dms+O?s!tv*Jf?X&AbzOUVNBfe(v?#+{j9no>#epBZ*RFGT zV&a4ot0ta@7{Gn=AU0;alnTUb_)6<{o~Ymp~h<8B+jCt@65c{_tM_e0N9 z%r0re1>#D>z|s2QR^$Y1K1g0?kENZUSD=EY7L?g25#riAvKwyHP98{Pe*2)+D2D!J zw>)Cw6^?^fT+IreNI&%+$Um(*C+Y)KW1kl_E`JTRZjLo3xm2i?jL zD>&$&{~+`opA=yJaSXE+&d>7>-`ct9-0}u{(_t|FQfpoRMq(}b;M4?S9X;(SB0FqL z-KLh>J&5$Q)U6j2ps(n3kEH;HYV{XufMTkz2M9<0)acVi*AQ z&gbAW;0eKNF8T^^eKDst~5`lKM|)7KiTUcuJi3NqT( zC;}^~k6xDT*Pc;>g6;m_IXts*{;3@c?T8?4`^dZ_i|y|@3Xawgp|DT>oiwykwih`MeO=I(I{=-v1%mY9#-FvZ zWeUd@pi}?@%r=NZ(Gsb)t)4T9B_EVCc6rVd*r(j1aIAHJ)VrHKsf$fL>ySD-cfQ#I z)RAG6WT1E}8^_|>V}DdCD&v%!vf~2G3(f#}%>O?jdci9zCph-J4@ICtiJ(g!EFSDmSJE;f5MN8UQ5n_#4 zGOV%-J^<7DZo_pBzEdd1;-$69`U+EYaI-SUGJ+JH&-udk3ZSYTjw1*7Eg%l3uaft~XuH_;(V9ud<&g(~$JlJ?0D z&zIR>7P^ zG!K99GTd%oR#3lXPMjPgciO+boN%VpseL<{ZuvK<{E#8{)H-oD+TEg(HweQuK0Dre zi$MEm@kpd1Djg0^2fu!k1mjTqFJrnWS|&lIf^Xtf+bt5^kZ&TSCbdSU*pnj0tpYLH z5y@iq)t5zMp`cTUC-~PL6_sHtIwBe-Zx86W;Vj`JLt2+XaTOQ#?!q(_R>MrL_dNwE zOA@sSdr&BLb_h<$5uz3UAR#JcCP%Z!;5x%5h`{I5JZjjArk)cegIQ-mkIjta*E3fCfOx#&V6 zA1DtgCjNqO^1a?N!gPBKZ{Idqz|T56q+h}vHqc`C`eUDmWvWCp2+!rxoY>{?+~bY% z2=hMwHR-*d)w%=G$9d1V+ zqb-3QPtD_JK1M8Kkiv&v(!L=?+n${gR`d;mbdH1DLF?x!6M51!F7T*lAK}zjF2KUo zCgSFpW_o-zFF&?efHC05qhDR4E%#V5Y_T;0RLXo0NLSkihvhJ%4)eM2ytp5exu@;+ zln4w-t0#}$_Sug|3I)7UuT=IGQQ}t4{Y45{2%KQo&0eQEeJRKc{WZI#)G-e*gApmE0Ha}CxqyPlw2DQ*q>UV{4{U2TP`3rE`mN> zHnE^00-b27!BWm1dFkom6gna9JjauJMPXhM0EW|;v?5`ukl7Pf-e`{|LtX+Cic@nK zk%Ocy64ursyT!GkT?cKgh%_2WF|- zdX08aG)4kOSIV9e@ZYpN1)*3VieC}LsprPeyB+rv7+vpq>XzEQZsWi{L25dN!umS+ z$KMvDRkBu9CZopPJ)TxoW3TqR9*=xTiift84fA*uY0o^q!X6f6^EtG~#w!?q8r<^I zwm_KQ0s_UhG#6Ja(^ad4<31O-e(2()N7-smg%U4rUek71n2z(vvf6&*S$LPK!p{^^ z3wpCUA}V8y2WNJq=Y+z-Se)HD!CC}qL5D6~?SB=H11mjC>08=IJ{dEU74?X(3dQzw zCERK4a=07j{cMOZLv?Hqi((zm{tt>k^LqBDhD<;zSL`<_)K*$2>iX ztVx?M5E>4z$rcIG(lp{$`xT}WSgVjSS%5_K$Gh+o9tFdwn9Vyt8W1kXuPKiDf*de5 znzq;}Q52nr7tOy5#9wLn>|^{pSe*;>*y~>M8~IPA(|1uGR^zqrGoOM)3vTbw#qLvB z(Xw_f?X>NYq~9hcby8NM1PC#PD-g|C96t8ke4@Wb%reUCxXn~DS{r^a4YKGl-%DoU z6{IqJDpR(+HIcIiaZ2kH66MP))JVJd(-g#LotU&puh;Dyl3*}q3I_`^1jjZqUwHyd z;JtV_FF!P|z#jJ4bOz%Us=Ecao7qf_=v6b$V1cN z=Qat@rVN(nDod?^qAF9dN?*;uwA5TlW><^;R@`?(K{{JPHfZGbQ z&&t2`LJ}}5-BE>Zr!Z5{rz#(@#jZm9CfnR;V-%#B8AL~r@wP>t3FE`I!o&T04YZXW zKC|?o?NpeW(JESMy9DS;0pi8v-|cbS(ktx)kJEDJ1a`|sBn{m}60DxL1ZyV>P3&~r zZPr2I*hk*uaa_XTy<=+*NsR>05}icxlcapk;KgR4b@pO9+|t?Z@o<;h+S*Ws=`{jP zeD^AaXm-1)1y0Oks!3hfyY861u&_4bwknX{5}-)8^k8G?@MoZwSox&2K<*3jrN_D|0{ zD3i{!k6aw{mg7l`C;Mw1((&$CuD?!@Mkfnp+4_cDB(^l{7Kd0BT8k5vTOD$?oC6!= zPC;6DxE4%r$L5_64)7Hck}H7u#gY z9uubF|CMrXA8)6f)J)B96w(SbXk*?Gt-!05w1@qN2?2sVi((eIl@fhkI z!sEw39Tufa@R4cPf0l%_f!H2Dut7qxjVZiSBH83ehxXV=Ldc3PBou=UrCqG0mGRCD<@QFgE&r zyqnoHVU{{Wk*Pwi#2)kXvfT0PY+K=w_@T{>_Jp9$QaH(#e9T&j5&cd?dU|4Vt-a>y zIrQ&Gf$(pFG&wyI+pQOU9uP+X+3O9wa}E*4eJ|EGH&5oqT+C zOYKzqrYQ7O%xXT)Jp@VIzu!Hb>;Pjpsq#$IgjJJ5H{twZG>Q#1FZ= zc-;7fSfv;r+>^28q1&ys6PYmynlCG*`BEoJoS^~oi857~Aw;M$VT(OE)1f;fvX{$6 zVTqt)R-CaXJh2p_?RkM%5~8Ge_JXh$Ngd~d{r0BkOplO2&Hv^SA6@qBqxQZq)gr=` zv5#B=U>u@UY&fe=v#UfT1;4s@P}b?))2zEE=9;pAYsvt@_^ZLi za~a+VJwCYYJYWww%&u7^)zI=>at_;!=^_y*dSJg^wo+k6q>B4I-s^>EH0pfS_A3V% zUzulz1pJ37fo2Q>t+eMvj14J5rE<8vAVU09g=6E&uzr@>D_#^_mX!TLfWhD$p|W{J z3D>B%JvyT|&OZsn6_OZ*#K{F;ipeF2imPl42Y*@=HWOCwcrNB#yqTY^_8Cve!hrcH z7Xg=uLZgFwD0E>q@&0C)ibx;fy_eW!0zrNlA-M%XP7d>!8?ueNk#n;fJZUC@9p_sF zV!Q55BT{&mN5UM&ey0NtC@JeF5I-#X#vWUVerHCsI8@xK;jsT)1V$kk^F>-_ugw!B zR?uIDX_g7IMCebqVFPTP!*HiMV4DQBUS-B2G6E0SPEV6vg-hQC+a>6gCMvlS9(6+4 zge9W*7ZKvZWOvjB`lAMg1)*p7lzmL-{KB2W_IHuTrzC@9*XQChRDNK)~s=$^p#UUN6t}3(Hp7P|@{kB|TCW+I~Y}+mnwOh2R>UlHV z9u^iNC+S{R17yGlqaYjr(LOO-Xuy)F`QedeMdM}-zSN04u>daF`iZ~ z$b9fS)+^%%`-Nxk+q{FPmyAuwWlwU`nJP$IPORQ#k9pL&aa-TozR%_h#;F~UmUdW^ z2n+*kR?b4d71HWzoSWSBarl%hL@>j)D1W2|<>tG{bl>+YMuH zr{#GDZhjg2sQ?`)!c$?PO%;k$gEXx~xf*SOX!Myws|1zT*%HqW?|7Ci^ZeX1XI0uJ zp*S%kns-*&Q^MK`c5Db2DgLdor$tm>HcphF)G0x_*|mLF`5dA49E4HM7xHe8gji|OzeL3ghz6>NZ28PIN4Z}V@pih^CD3lzsm3sGe;8HFM2wO>hS7c$JhU5CuuLi zKOOuL`?E+s=sKf)=t(Fl!zxH3CGp9xkc3&zAmUSD7YfBeg@?j7+Ab1>ZVboQ!Dl@! zy5)T3zbZthj_rZ9{4IevJzU1&N8Ka>Q;By)Bnhp|6LYAnuv!7iJiT+FO;K3&r!ufP zj(M)~DlQ2nc2Y2wioGNb1@C%19cbU7qh0=0;(ciFz_@*CcH45hLPTn25r_|Vr6A3G zSQe!_cpUlr!kOqMc-=);0x|y)^%8K(xuy=l2w753l zz~Pk2G3$gcP1>r2!!XKdyVd53h~H3-SC4p-)038*n9*Q!1*x%9=XSPNVcL}*$)=HX8NX_|)g68#@4gJ^^;5ZVQ%IXRj)|ZK*Uafg~lHDr6RqloT*3;?Ncj+wsX~A8==3|Yv)#Hy(!uQ5TdtNZk zM@jRV3VTsFK61#%hdf#QzScc`98+AJ=EED|VQ1`LqA+HdmS;2Ue)akI?79uv^$pff z6h@JkX>COKC%2&@(buXH?xJ%AT902lv-Oh#3%HdL{IX&xJufY_nv+8MO?K>Je^EHL z8b^h*>|Y*tV$VLi_?nn2AKyU<`;t&h`TOxNu8f#@fnkMQlbNE`z@T?d6C3QyBE?>E z4X@y-<}0F*=hT9|CzkE7%LQpjhc+GT3WXV7W@30!HoM!35bF~w*X|Ie4~Lgmaq8+O zMCXgDimGg&K>UGod4m0rvW!U7bYkCb@ltC&v*f<2tWHR~MC+Tml*%6UBp5B`XtP`> zwmlqYYpq!nttP!8z!|YuMW6{~{7&t(fRE!M4vl|y%qOxm&A0B0UyJ1hcO(bpi=>Sx zl&9-fz7(+9i9nNs(W!mbOOWEPyW!feTVDZ2sx4EN@^nG)eXXTED%-d?hAAR&v$xnt zPuV?<263TH6pU?Tm)&lYgxN%YRcBK@B`>?!>aj%*hwv)KjdP`-zjyO1er{IyEHV!E zOHX)e>ZVg0r`oRtX=YhJ93a~JJhcL-Y^E69IA;$Ej zRS5eI_b66e;RqWeL`l3mV4n~1`#DY^8V?YeVw*1lO9Bgut|a>_r;r5uRG8nk--tp5 z$SlvdHyl(+!~2W?L({QyCwog_8i{v9PEBLZW6C)?6ZM`8z8*^na%PtZFsDIYbAp*5 z!z%qRc&14-PVK9n9vP#g-RkKnEV~K+je^wMxqUn9sWA1H$Wq?=j1BaB&(u6K*&gsn z@(q@<<_{bv%a}e`K2wD$=-gC(E1vPl<45M%;{r5LMjY5=1B^3JjB&4s`EkNU=AI%i zP*$%T@yFGdr2(gQtCt})X`aguJESeVb=#8;&rV%;ZrhFsDKQA*?R;i3nk~;O1=C!T zrm^;WArk1U{_I#SSNpR_v?_zEJS z*_Ce)DK;DSnoQ?(^-|ijy-nt0yV(6AF#jm4R9S&Q{Mo!%FO|v{4+eimyKKezd|j09 zHrp!#V|a4$*i$=e?P-Unc8)u>XSV%8Fb-*@R14qqXolz9l4WMs6V1;?GrnzncH3rb z0okP@(!o*rnZTL1%S6yLNn?GQwexu93?tqUI(j@!mC=ja1Y>7Wl+Z3PSQN$>Npzn5 zhYQiuvIOVC5&_o6>8bNP?#~Lp<>W8*EUHcx>k@ryd0nF zHYJy&B!&GQY!n6(1-`hEJL^W9rWDS;+Oe?nq3%gZR z*lrKMF=3`Xr!e(bAd$=#WiJcI4W&om!@VK`#a2oTywYBEcy`9Pv#a;nYYq{(mkrUZ z!)K<;Ii~$dkP&O%CB?4~gd%^odopZ^GV1WJ992t7$6;<*U;GAX87WwaYhXew<5i%o zNO2Jq7d7v&4#G@83iAjy*ug84S;|EMRGS{9vqWhwAH3WGTO#bMco&PI)_x^Q98no_ z3-<`#u}sCfON<)sx~Js z>>h<_#>sAdUNvs84~8*0VcI9YMG5hTFoq9r zrQIk3l?S`uv#hrum3QLGJyhY?+hN$=wI2#IIu#Yo8!h8NKGbYXYfSU@Q8wK{&g!C9s19rEf zC@WcjEb~ACB06^MWaB*CnfvZe&mSc|tw}Jh-tWXewqK;ULeWH)iMVr4IXQP8ntX3I z?zO*oDd{R?et3&<7`|;Y?Bh360>=TI>n2cI2w&>Yh!%&kPxC&kQ47<)&Ye2h9SXC& zN`rO9Dvvq0ZGqKzxD!0p=?cfD^3j&5Pn#hUg~Ed2&{AlRI%dUWWD4v*@K9Z7W+|SE z^o*9VmxbaMlaq!b*Q=s4#MxatBnX!kWLWV5GtO?g1wgaAb3$sPFcF;j@a7}n`&E3V zgs;Uu)HZhjGXfeRpyU{ZW{E;Ao zQ^fNI%2h&i^wgZ~HqsNmxC8T34ffxHab4$Op&WDOBEUF$=>Pj;(?yO8DR9x}*nCmW zvmVejfj7IzQypKjH5k-eVD+NVE;`b4P`?QEJFD5~{&X9ZfQ}U=IRb7K(D^)kal^k0 zt&RYbRjVuJ0zZFJLBI-Bk z_8&H={q{T0#n+nsitX+9BE;jx;Ho^HT=82}Xg9TkO8-zaK!xRIA%9Q^npJwm3ij!E1QB07c?tqAcKUzWfS!GoTF`7Gr$fSpk<1wuEZ zS;OX;5{(Yu!g+O6E*|5%a~b)q_IR!kWS99x?iL}|l1U*o`;2gCEUfu8YV0ebXbw_X z_O!Qzs9;!+KK98Dg#F2$>ohjh5GS{bL?uNhu55i3X3V*8G_iueCm1V3n{2p!Uzp11 zfo@kEMt&elEK*lQj7aIbJ3>@U_V5T3PLsig-f~Wt^KG7@G^^vspBTH(mbJnO+fQs+ zX!`_d%ZWuh>~{*w+9TII;snwA%#Zw}wMns#q)!k$k*2tmBEw zdWfR!rW}QhdPNs@xz6rh7= zr;UAk&Vsjg&a}TeCEDnc0qT$6MjHARp1fDvrv&N9NDS})m%?#H7a)#?(RC3RoJ5VP3I7sejjUxUCYK{>`&dlE)_)Kjx< zy&%oNd>T(%oT7j2@xdy^Hhae7NsgaWCjMEE9|U`tOXi!ocz6jgbwAdH0vW)LD6)K3 zVOo!orc%3EATGZ2Xlx`oy)(PV0On+*8*KEVw}>_6y`u}r3eygL2+6hexjN9rO|f0ZBQ%= zDC~CED_4j1Pf_Seul%ZnZYdw{8jB2;e_^h! zvQLQ;+ti(V0%ngyU}BL%To+WRHrOp9(HWi>c~9o&j9i)Z7cq|Ly}ZGEUy#+AIb53< zWeFk8Lzmm^X9{br$qb<$(gF<_F^+W_radEwchsZr#=>8%a?dVSn-&a^uvTCt2mU%9};C6@{rCDM9%CN71 zff#)%FA+IzPQ!RiK&j)aqR^0JX%ZfC)`jxZLQ zXLosYcq~H5CKo*<*4UW+A`7NB;ZWZ z8gfa*TSB+8+;)1BQnXCj?B5fllA`e0d|yab{I=;iBHWpfY3JrtV}DnaZ!m0i5mVLa z`mv`rOt*h1nkhp=8_BOb0CBj2HN}bcX;EV92c&ghyF>(LqC>}acBMz6k=($&YO8%i zgjiR;v`jh+Q+R2LedOB${_AUcem&n4u_lIyN^?%FsX4iMg*_$MdL0bMVMzy$wWmd= zyXf6;EPur_6-GYZ+k#hJbQ#kX?gF@l)LIh53hNq;_P*kE?hY3PZV+$~_;-d~eJ2rd z*vpeC`??^_<&7+5Jp`z6L={g6)<=kO7{HVUpHV*)fpO^Ev7;qC@&TqYgQkNeMTj4A z4z-19^3O$L)grpMNs=qawjm-gPQ*A~M^_wu?Ek}yk1nwpil==V+*fOCc`o_O6OV1M zEyDhkBZ&bFm2wk8-PLv}hMshh@9U=pXnVW`Fdo0lm4idngq;*_{gqUtFg5VeZ$pV4 zC`hqKGXh@}DXxUF=2bATE){{sW5Bq1KMTFRV`c%p>ngijn8p*moJZxpf(!uBl6sYu z$!#`BL|<_69{EU)5CY8FtCKcI zn0jyH=(r%4Ivz|SB~~|FkGPwZg0|#UWvZ<~fF7~eN)>vmAgyLhdf#Wi@v`*NxO%GW zC_g1iEDPh6uzWl_qEFkOdBpzW*?CLjG5NjL?Ca)b*H6!`Yp{QaLe;2bCG4Z$0mS`| zA3`~bu}S-aXe1cUgQ;CAK)2eWInz_&xXOAa6WrnhftJXPoBL9{}5(GAj~Rjtwp}Fc6_q%_w%-X+L3lj^zaY$b2J@nS6rd#o5lZ^A z!tpo3_Jp0qzk1F@0m^^;fG+4x0y-5eqa3u)3dR+2bU7b{t#+L#RGbQCReR;)@0WMC zLK`YdtbyMdgfbE0w|Zc%{l5^E7V<$%*$M~vSt_x$0&0Qm6K;x7KO_!?#kNV2^rgA6 z!0J6AJSnt`tywS*GQtTdJK9t37(Uj6qR@rs>$$HyB%sw8t=G?T zC?A42YJV!187s4T6A-HP_qqN}$2sk0~#)pF4OC_Tj!T815A0+?HK4 zSyvWWkK*FeVE216jDt%idVwGv$RO^Ov@Jr6Ubbo5`EYk+_6~Zc;zZFXF3i6YjH{Cm zj(#~J#EC)pL+`0CI|U{!7f{(nd{-38E-2v0>jlIM1}o*-UPLnPrzY&1yP@wmV zlPD4R{?V+U|FJD6rhiINj6gpOT)pMtH=EYj1-)aLSXGu?V7pS7IiZMQqwN|Y8h{S^ zCiwH$IwZ0!n_?^ElPJB5Zz(p8Q>H2}lR~lZW|8O(6IEtC1n6u~13(*lj@>H?jq2ZT zpncC{DO;8tc$Irll(-%APqJZF3)7f$Q&zn_uE916#$VVFc)n?!a(0MDf%#*qY@fo^ zU0PLDU}ppVV9U1Qkdat-oVN4X9C&*@J?1>lRephq5bR;h2!iN zq+z};C@-^_qR|xHMltNaeY(vNCC(FWMLO7`9OmgD6)`PKrz&wPVw)6A-6N`yt85Y= z?wPScqT`Q({6fRG2-6{)j2k)TT+uJ4!d)t+zU?N#SRwX2%Ir3emQiairgsUdh0gj} zXLpN2*H7A2=s+K;`2bIWRE24DU4LlK*23;s>6jDZqzI+nu)>8dg@FapUtoe4eqBM7nN4B7`gCk)PG%MRVzqI#qjy1`3fZgVa z>HP3?2}06$<_KI=#b6h~c`mKis}9)1qQ-&NI|HUH5;itR3%Ad;oNgv~ag0?7%2G+2 zCIX9~JUzztD;&qNzxXA;%oQNE9kLj>TJiKniIMhKh4sVdDHFqDLUb!xhEB}C1sEQj zJvP3$%|6y2$Z~C|-5#vQJSG{DkBdl#)L>3Nmk7jJ$Js}A`y}fv3Z-Mulx`=5V%vD^ zDYVhTaT(mup}mb4WXpWvfX(n2W?ODXXA7#4&FgvVUFk?iw}h<`Vrn=MAe79-Y_kaZ z8Ejh^9FHut2G4-+GT17Igw)3p4#BUsCf>3&2z1xmCk7BrqcX`L(ECL}dL4Y;U>l96 zviWw4h!j!GUC}xSF#2uUb+BOyvr_c|5x0w9=mR3r_Vnl!`|S6H;vDy`DvJi70xb?p zDK~d}SZU~y)(`pwDXSA9ZdbV52~Tv(Y@Ud8Q=Tm_VN^TSo)jexXCLlYPYY7*NKuWcU@lw(DNyyTEWMn1=ce|vgjE?qT0zehx&%X}Jk!^>SF7)!3K!tTbp9PWdaE!!f@ z`svZFzwJ?&&SkgOAWkP`!L2p+gl8xbR~d%q?}S-C$f``h^5|as>~~`>-5gmHyoIu3 zd{IPNK^OaT?&%~*E%%^DUFlKThJ~^)6d=3K9~$iE9)k*M6&#QnK_(g79h?tm3sEmW z>uLI#*oPu8>(qJBK0PGHhWhY}X%~w?zY8ibWz15%TsStUf*1GzwM-?Jakb*q?=^FD zUJ17}YbT;l(z?d%`bm*OdlDJxk@ZtNUFzDIHz0-i;h{Z@O0gbH(pD#0WZB~=L2oF_ z*&Z`C&dbd<%yF^B$GoJ|Q&25cm|oHkEDv(1-s#C(Y9Bi}jc3bM_Jk;E?VhpO#(Az) z`@D$sr=+aiA~Nek)p)TX zd6~M&iLGJ}v)EScHWBH4FEM1Q*nGN+5La(;3c=YLVV@Lrf4ieUo!m1O2UrX0pEvaB!VBCqrOYGw8j(RH;J$?mx6g974!m4b9XtY9p6{gIx#~tDe zK|*TF9P+J~ZT4M)RLTQs^rCa1HU0n}m0Ej7kjd}NEA%@aK3m&pA1KVw;(Z+p@*lqs z6nmE8<+Yi!iYTmuo-*dg?#V>jY#3oyDy&$PxQH+ehRs_z@Rg#`Ql!#L zY?J_v>z>L;DSEbG)Xfu*k@J_#AWAj{yL+}!vD9CL_uDdi(X*c3{rH(hE9`_v(n|zZ z-u1{66Bf&I-1~xbdO&j%Dr5<}heQ%gi*a>2p`|29|`Ma6~EH=&<;Hxc51y|SKn z!G6Lt`Q#%jT59Imy$<=j^3raD9Wn+zIZLNJbAB4xS8k^nVOCuybn<_ZOA2qm)(FN` zSDiS#6i(d^QRr`4;>y-26sP^?kLq^sefHV!5fyt>p62cNYr@P3#;Z!KjR5&x9?wN%q74w#EZ0wU2Is(5%BqI& zQ(I%-_58`Q!`m%ifJWz!Ngf_&4|~+n2}dW`;`2F!ydREy*NH-d+u}F%mkP&$M^Lf2 z#14rNd3@T(qg_Z8`bDO-h+gintU&BaQS++Oh+`p`(u0Zi#qY<|{g64e%LM4#iQP+1 zG}c-lhurz~zTQ`mmW!K{u>PJ=#?UM!K(Rr>c@4a@dJ$s1)HBLn5vJt8Cs=213&y!Z z$Ua$;mTjPah)DT|H;OOR?guePNlAJXdroITBJuimDkr0xPwalCH5t~qNQvLpw=724{MH~Pt3Y_2+LmYcOtQm@rCB8@bUp1QA%+2unrvCm)Y_{eP&q}HOOh7J=i?$V5|G|rFg?P! zs&V4U)z!`@K^(dVU?|&%!mNQI_YJjAKN#bJ@+sHJ4)3R)1Z4 zJ`~kEunyA$6YUc}jAi0RUFMoDcgQ}znIY*c7~9B0#vC3p7F(Vt3CpM6iUnyhb{i9x z7SQ~`_jmP&heh%YUM7N&T$*8{y$n1;o>)ECek#bQoo-lRH44*fxZopGK>^VTsQ%zk zm1FB-Pkmp%VYXdiIu!UL2u#hiogRC-Zi4;BW3!TnJSck|&x^tka#h%i7~uHiTY^8aNegyVM@wTmMgSx3&qk) z@ZQYzE|&pUL5q2fxJwj@&hRbyPA)l0`+WHdMWC_VZ1hnr$>kQ8jN^XV6O<)K@U~{v zf^jhpON>t06k(>OipJRt0cvV{TL+uv`QTRQ32&ammYVvOnuWH)A&b6!$F8>7VKDI2 zWKRpmN=8Ud^LN5=i9{wfa&pgz7ETx&vU{e7vjx9F{2WfVHx`(kS67;J} zR^VLVlTM(Avby((UDQf|SBNL`@%CRL#3rPQcnZ8Uhk5?1vg?KEH}=0vtZgf3Vgd#T z#2M37wZ7I8qA)CP9;~xMg&A*}QambcKlQ}fZEJ%_z&e}e34)!Mbv8$kdXjmZ?RI-= zGShPS0M;Z5i=kVBbG4oDys+vEF^v9qK@HKCh1vQE=dHQv(d{D-5ycQayO5)T9w)C5 zfi^z1sLZZWm>%F}^X!z_XQwyVU7j2siPY{I>~2p&OL`5LMQ`h#qhM|SWWzk`DN2|j zdxL^YJuMUeMb3~(xf&zFsk9z@c&u@Ht{A^n%7-w5=n|4%~Va4$= z@>3@~anyLTPyFP3;*lvBPG1rxaf-LA1DityZeAFDPs_HZ<7JJhlqA+O-N(U%Rf@bza~}v(hw@^o(uy zXx>(ks&>Gb?6GGA;?98~O4|2(5&RU(ifJ`-VsCjuJUou&nPIwpG(p0+@AECk_HwB# zMh7ZW)KZJfv?=y6B?6vktUIx2rClIMhfb`TV_#7?);uJc#}Vol5$FTYm_x0n0~kR` z>+7JndDj?wKp+l8_X-RKR@y_NQ0mFW_4csBaf#f=#a%O%2;r}acjC3SC09^qWPtWKJh^zX+(O6^ zelFXQ3Hd-28Y4*_J;Z-1FJgW^iIV2!cCj#@gqE5|TWY7Z)W9>WwW~y=Lu~Vy0JrM| zTkoBXIMUAU*sMp{?|8D8=JM&BZT$t~x*Ep0%pMT-ZzD`EbrHiSo`?Fow!OttxjaZF zS6P`b^HGvW6xz=P7~?nQt+D9}`%)BtqS4lg66Z3X?|YdYZ~`vQIfHOYJmmRLE_vLZ zQP>-!JMjMOjyd+Oh!h%(%CJw&wvXgP(c0NJ_5^$8#{_9Xw}IccFDT5W61aMNjA08$ zr|gUDDn;pcSSI6g*9x(T^r$LDWGYq73cXGwYL_P{UNiO{+0B#kts+tvESf3U_`mZ! zd+H`({ccvzuwN@oO&NK>dD62UUot<)F=5Rtm-U`1 zZYcqi{N;(9!Cp{g+!2RW@m8%L!0TQf561~R?I7?i-V@Mztee?@*EYtJ-`?@9sv2-DTvP!jevVeRa0v4}58AW>Ch z%85G%^YfcZ$EXD}dAK_i^jW|=*^%vbhbS&4obu{qtX7++#Y;YpbY6E`Bp zQSF;3$+JP8=;*S*d%^S){FMU*_gOIH3Wak7KOZQL!@+B!wip26vYbY7Wl_w3od zW3x?jvrRj)O%v_6A~HN9(TpgwcRYuljB1rfq5-O2>dFAH3X~2 z1~oLU)syTR5q)W3FYZ}{A4H)MCwJ^UHG5rzG?%x`ZG0)Y$?)G+RLS<*JxUq7i-dBq z4Hc%cPMy10zQW8;@UlG4i)0_TpU>WGK{Z4^-*NZ5P(EK)cm}kB=2(jWoh*b$im0V$ zm%u!C%ZrD{qUF=at`&e|c4U&>t)Rs3#%?`cbKv+`%(!j}T6;J@+HC_A8#@YXS^pI_Yuo}5$F$QJBhPK7#OmGQp?mQ-y%o z?A}$)ORcA{I)&?$N*m;efgd=+h6#}oA0iX%0RhG%yXK(!hq}cyk1mny$1IQ5rvuM4 z@MhMCLOZb2x&SxGbL;~Ve6=?>R@;R|V5Wa0LMfkDm_xVPhsXR zxLZQ?F5IS;Ix#H&=pu2YZFC|QmRf37w$yBusinOl#>V$X@bI?|Q+bI|h)KU7$S7iM ztjLa^2gr|~6Nm#cK#rn5ojPC6{cr>>6J&g{n`YQG3Tsl~$m-qQzUBma)~6yhe7M~& zLfm?^P>!wUOBeP0^A$tyNZJd1xnvNXEEbVApP07y)ZBSiDHxk_bQw(RYGE4v!rm7r z*e?WH?>YJ$u&5prEiCsrVXg0GUDzTKY4IBqH`wCV1hNp$*3l^L?J^OmHM?OuhOTX+ z!{d|IqaSNeIi#V32Mx6s1lg5=L(mtHO!@1cLe`A9LjFmRR`yM^^Z(a~ST>u|qima3 zIy|v|s~jd=?GW<-(p2ao82dZ;Pz}Pq9{tROXU4OS_Y{mj{=5`?{z6N50)&fX7&#@# zkn{|80`v6gvWG>ZHN(>!i$)7j0cYW0TWPGYem?S8CD&E9QxwKR3|pABuXqA_i#2j- zbNta0Kq-^wx#F1qxhy5?j9b{ zv%5Xu;kIqM*iRIuv3%02v3X)e!f{2V@FD~IAG5ojhVB+9&7;EMhdveC9g=QXDGDRT zJyKp!Ry#zieL6SqV-Drc9zmVgJ-fyeB|yYj-xh`SbZmz#V+RvR?}$XdurQjoKM61b z`_Zz(mspr7@Puuy$=%2*&w92foa%^ynbyiMY8#+=b6) zFT6&OipPlc*T&<@(>yd++TDV&B#beRvRU#pdxW-sm&3HohygkRb=x;V0y=(a=TbzK8lAX*lq=UcX>T=*v%3Z3ioxMWm38;%U_f||J>(FcViBj`5xLaZ z`Kqvm!YZ2IVcHsmXp!u_TT?FS-~<9m`-Efr<>xMKZ+c$16YPNRc=Akgbd_xOUQiy( z%4^=2F%yW5M7SwomkKlPZ%k;in-q@aa^+`p>MQ~(@hpk#mgtN(C`3g^KU?tlQ)A^y z!w4OpwEy^&IacB2ovB%GOB7~Jux5ux3v4XT6`ua&iZ!R#*4S=A+B2$oRZ$76@St#< zEjV-y2<8O}6=2XsTD4Co0V8vGHDC2t1p*goZQvqdqPbLs?`!JUtS2qY-+NUdsW)YPnu()ZL35C&-JI)}QfD{azwgr}_C}Y1f8}sEwO*7Vp2EA~F6{FPQ}|#k<70T`3xdp5^ZF9_ zZ}P2Umx&Y`SDdIWW7ECH^MtqZ4fYK|iW>?W^d`^G#J~ZO_N{gEXiUM`0EH>5rEYVW z&Zk=w;B8CRl2U)T0_8U>C@PR~0(1>|fm{RFcyN*(# z{8A+Zx?6#tp*{-J*n;@n-`^o=hj7yvCP-FS4d`k=5`nVIdC}44`V-+ewD_0G?wn|8 zQR1PYAbDsh#(`2cMl>psFZ-94+s_0U0S-a%Ajb>E<{?*%9>C082^Gnpu!$g%mk{2M z7u(~4`a$BQPvef-$7>?RmSET59L9{^5T*5niB!70?X(tA<>0rL7d&O&0rP~@y8Sg* zKoBUZi4^7r@!cx>bQSqDH*?`X@J?=bI)R}YnU+!I?})&#o~hYvJrpJ@N2F2qT>-iu z+?ww@4@1q3mhhzfpvPLN07G-Arr^*lo9PI{vS@ZL5Q+oUyiZ)HZAlZzLg3Lft=7PedcaSJF5JL$z)Wd^%_Ob^(Tn#JgCkoT6z|^Y82~kP} z+Jq!;rImUt-NIG?uQ?+{h%yU7pr4ly)WoAt5rokc%kYuBc!bsEGN(&MV&{F12naCOwRH9A`U%z~NHdPkIXc_^>?zZdEU*teCAIY80q0+WaplRnJ>r!g z9R-Dc_v|^uE>zeLK2l-gVSl}SPQ;icS;ld@(ry=p>UfdPSZ@c-``En(2xx$GNUA%s z&!&q&Cl9Yg8rS9v#fk6KuB~l%DC6Y(fDbr)xh_aq9~O*DXk<#p%U=?KmbPnmz5So3 z4Z>cv_BW4ud&Xw_^k|atvoA~*z`LomYlJC>_G~?}4`&QFi{QPK`w*^@+pMFA6keVf zVLcS4nY`(a$NGDJhv5P9G5emN+kcX18$VPSBhq*OpmLHme&5=ro7ue~M`jlLHN$Uk|w zOWXGLH-)JfaochBiRze7%QA^Yb?nMrDpKsii%r-_+x_Cf@c&)*RmISC-cwg`9lpZz z2H%;BgZIoEbH$;LULd>u?L=Zg%lV2)yoWD5GVvnPh9dE(f8d#pE<(0a!U)@JjAv?P zZPkkbw8hi$c!}qU@O;!LQtM#@CO87FtI=z+CehFXXH<_MAxZ^MoRLP52@0v*$&n zr6lZuVDu{v`EK$;W2YP*uUjQqlgs~`#H{XoEO}R2M+oQ=kNcYjN|1?&M*io7Me76yfgEgq6y0jeq zgZ3g(e7OvF-6bF!7}(Q5j80b1IRWqq@8hXB^o_6~4o2c?+%f`j!wr1m)saGp3Axdm zsA$HibNj9~U17!v!((RfZBZF1{HtOkJ!2pF8JL+s<_@mH zH9{0Of;;sM3dfo;qb4QEyF{Uab4wmQH*dV%BN!)wCqW%%4%#43Qkp`eqRjFIX)F8O zY%6mxM)E)P|B-pSu0t^Ww&iFQDS9uJz@4JeUMa=t+_p!URt-&-V?bCxnqP{-3T~-+ zJQ!TU&E^4X7Lkh6IOnwIJrC2?3oR=d+sIMy15$>j=zF4QLygik2cDg0pZYl@h9(mP zmM#-=nL|{W>8&&xcJup}u>4huZT*ZyD+tT1gNS~P zaXXQ{7@Z5Enw{vH>57qQ+a=)3eqCJnw$F>gj}`{7^lb5jC~*bek3nc@z%+{#e;dr> z66^d6Fta!UH*$Q2?-Hb!tPecX_YjJSk8XW_fh9e$h7OkjQv_)d{xZ@wUx3BZzDs9& zT;ceAE@jii>v}HZJ?SF*rC=-xppd_@M|?J!nu*d&o1w44Z&awPKIz(Dp|AsYgjGfy3X=1mggp ztIVc0!{ZsbWO0?v7mU9q>`zEoaC44=J4(1swoMc|RLyr3quN5W<0LPGPby63`k=sE ziH~c+IGTBhykx{gbG4Dc!wZV0uGYu>ddrHUU0$Y(;XUD)>7czWB11G-zN`N%5ZjN` z26xB5il7-1qui_}mXywan)mBeD{l+*;#j4TTvQ0A#KTnyyI)uvMJ)!Jk&W164~oJhQRX(8#jzhd0U>EjN@L)sKm-N`p@a$~@$0+@ z#_3*Zb5_|*Pf$^nkvZ%Y4pXIalVuwPsepTY(7m)Las}|*IUTKqCq2i}{o%drw}M(( zZcb>GoYTFN%Y|_n{Q}2MF^yCmi9H+jm8w6r454dy(FC_gaC7aa-i( zsvB>(_u9`zr;iT@!^!rjAag(;?;Bxrgy_K!5@lm}A*--v5vYu4T&f|yVn;-wExo$+ z=xb*L&R9Nz!^(q^xXFIDU(BwK&Fu5G)9KrR~ik%wt3m@ zQ?lD?DNEOFc9@}Oe z1gL^;3`k*dDU;^-8i*|P2iw_T#WM;h#n9;og;+|`#?4@+c9bv-GjazeHc~XLcJxA2 zYzER&`o#WJiM0q=<@9%kP4?2rjTBj#Efx+xD&_=VQ~apbdmc2(dH=%B(k4&CO)npv zU9lM&MT-5u7d^*UJ+AfsEBN~CO#fPdWK6Nag0 zA#kwQRc@|g)soelZLOCgnI=S<8U*8Swj@!GV_c7h--xK9{XsAeZnC7Tc~|j&Qt}cR zUQ6xuToLGcC9K7ZI5TnjiB)UuZNa#m4n^9uGQ-6MZ{ri7(s~TOvf~P9M=Q{otFI7s zwi^6dXcZnW+eRt-tq{FAv3P==QJ8TZRBY!wcw~cJFcB(+mrCO1LIJIYz^&qx_Bjz4 z)>C_zp5C(D%%PS7+^j1F*5EZ`46FCF&`(Z z5QPTd>9)|S1Zd-PtL+yG()4n6<0+m7i{Ha-sYi5d+ts#v1fz{KkVljt8T69E3{&?C zNff*+L^qC4-6#{BZwk_gfoaSSVl+Bq!zM)vt3DjJ8f~;FWZ~o|`(4un{roEfdE;1f zvqXzOQ`C<92em~+hESXg#I4raK`$tok;5+g<$n|;CW3gefkqx`9~|thyre`lFnA); zgSWljN|ELb!lnzX#VIW{v%=rCcERLWHO*rSW#9U=2)@!~iEqE8`NG^v80cb8POVr; zMEmFx5!B_qh}i76%LJK#kvvd-RbeLS`Sn~4uJLr#(trxije>DXQTwF*cC#q4KkYh+ zm)l7MruBv{)?Gm+riyoxO6x1+r(w)JubwEo$e0ovD7pr~nP$7}0nbNk`vlT6seZF~$z#x5W-AhOY1!#SWl7_lzj|$)(c; zcH&F^0Fn1+4h>Jf3s?u<}mDa&wwqXq#)7C|hiVI~~XQ0RN{X2jle=F?PqhIr^fmWhe#%fqa^SX?U6pFva z`!QLOu}xl5iIn4i>1p%Q;+8z?X%l(8t?TFclBXTnyr$5y4zQt{wi5#M>)hHJJ*EFi zkUG<-coo|%(_^H}#8;GA2SFVn8u`NNz}H}%Jq5$kq2tZgU69^kjbfAa6`-uA7VvAa zez}z2UKn0*cMp7!6K7%AoxGgjJ?T;4v(BHJR!>`EDXZC$XJbc_w~uGIaHh!|eK+?7D6CjYlXW{^T$yI?-+trsapGb2M!C|3yx+9*%=S z0QQ>)gy~tni~`yB9HFy>%->QW7E^fOoErA{mDxSqZ$zg81h976ZqG8JN(!NSh4}3S z(-l}XS#D2w;yj6IyyVeh2yt2CAaPuj*m%_C)Ap9I{}varl;q{{J<(`m3BHOF_SqRR zNAoHP0+re4h2v`Ki*24l``-X!`y+;lZ&}g@=aJeJ?iHIzl460apc#9T{ z4a!Ho!zzUJ(?ot^q{38%m9C1E6qJ7}0#%K{dWZenQy$tl#x9>p1WhhNva8yz5F+1^ z#n{Fhhi$wci9nqwnl|r33qg=NPVZiBg$h&TkyS?)Sd~C*%ut+fMod9Lr>nAgy-o21 zBj8adt21`MbIZWH9TK1x7?)@2_S<2>FgNpp4B&EmQ50IqLhF3HwFvQ>kRv8}bz^n> zpGdL(0WvZeux~GtTfi?R2IY3zS00T;Q&4u-qjJ7@gDBJurCUjueMsf zgTg~~m9|@evN-59K0BXn_PJcnKCrC+D9BQ1)4r3PQJ6vE)E(XtzBW7N<#jthEoSlc zBE)vY1Et+4N?f2vm+;-W&9g9pTj9p6weBKNeg$HF_b5!G5iV=OFwyrss!Q9>Hc8=F zaxf+|)#^owCG&1O*>;exT1vLuFUh?FRl)g7ClBVo1{ zdx{{milp`d8{;Wpgur^Pw4V!8{;Q85*0NcE4Y6O@a2|=>Fx+-eacCA7nEw%=n}Ksz zZ`aKQ#_gH=MJCygFLI!fgo`bLMb=J9^gCLCD#}*aVVy)E{;l1OXIJjAZi2DV@=WksSjh0cZe+be>o@}IMZI{mj)2DDzft7C8h(LCpmsX(u z(@BulRaBK7sSj^&VfpM)G8RQ(`>c_t$frb!mB`12J?~hC$#Gq3F9=h>+1mN% z7S6VJJX+G3ayavE;W&VU*{(3QZtdqoiql@ogRmY$th;E83C%zy z(0{B!fzb*@iS5ERL((RCh8I_aL)H>OZ3Q^$`He?W_yahgeo>Gs!`E)Gs}>Ms#NdO~ z+BE{PM7~*-*25$7aei(03ut%Sh7LYn4!CXVt?S1kQ%#q4ovcD(mfHVg>)hkBF6;lV zmSy_2{95nqq^6Z=3UWSJR^yC-5JOGPZ{6E&*u9P2^S(E5D~pZu`3!O#gE7W9jd2>| zWDQEIFHDLAwYn#4Kw#NHt<0~#=j*zzar?(_usyEV`#K*#*WrC=UAwS#?cUbPY0^ww zA)*?zdaHYnUu}~`p?i}~-gcG{C1_cVvVeubYkUl~tF}YPHyuygQGK#^ilhY)<~QBS z*sCFWY$BH-7X&Ck%X8X;y33>ab~&WNZ6tBjF z)rwv_h26{ zEQsah+j7yUI;;jv^Iai!8(K)ur(>hCrPwYD`{6qgQw!Iv_V8l)ytC!=>|=uJD29>a zV-nlI9U@Zn7~1{qO93D~wa|JAP|U(Y7BfrOSVD1>5{N z0Sz26@ZmA`x)3?ihi(Io6mOMX6p_&cvtJGz%xEYRL4&pSNUC;es`jwmxCRPY4_>26qux6Qe>fXG{vC(}GG9wOBjn>Zn&Pil6K5&d8>VK*7;_ z+oij02=R+cv&-Nv*&QGyZ$3EQo)`4pGz^hf`)%4g86WE*^+ZdF8zZwN$9hp=9C5|j z^8IJaYh=Ra=CwZ4B1FN_P%1>3bX~=UtkJ$Cg3o7wZVo#M(>wVxFFsU2BX@nvT8u0_ z9wKv<<^@isi9m&-vNdK4(%#ZGv)`T*!4KPHQ4}sMs}V&doLnVAoyiNt!rvHzr5{*c z3@LGtnIoxpYY~AZ!arFvln&T6m5?ZCGRCrcqX4YIP7@Vc#_Rt-TJ3L97IOe@e)kZ{w{c*-^)eKTzgHBkFY!aqDiSkpHiFXWHv^p zC9O;p;F4&5mSpgPI^Nz8K`o1VCUJJ{x4(%(d5~R`{WmQl{}74BE@<9}O`j@&uXJVv zJE>nt;|LcDAH*m`NZRf4Cq16l(6z`Cm$Wa5L?go;rwf!P1!-hh%F*8Ens--fJDU9J zaa)0W^>{&d>g%MtFTPXtRwfS$GJ!8fF`tm7!VGt(fqw-er_y%6K}70@(iN^w$MI;g zHw2?@j;DezTD_%2R1u`a3>8hgxhr=3J11Yt%OD!UUad_~^TVC}nKbk4dZh7yi~Dzov5G(nXQoO*jN{xc>(mjZ>Qv3CDvV)w6@fwB zyIVKwA-5k%W8=j#N!CXcij>;DHMM7keN&LSQ$i%-3yP%})CPoD&K?0~Aop9jC`uEi zw<&V#vm>E*s}^DqwN5I>XreP2>7;h|i|$A4plCEFD*}tdPepUXft#|w2hhANneTxg z8{^no*xeflNq%pvthEQ__9?l?f;!Hx8KU`mU4Cu`I(lq~D5Q<5%W?K_h<M`n+_WbjgLPu%u_r<-E`_328UW3@-DZZMGz>aiP;|0T1Yg9Ge2%%*!Zc*tszb=8 z*c<8a(ZsQZ+$V@Y6*N_^tHw=nTPJl4uiFEK7n!CTc=Me$o=vA?+OcoR$5eWH^u zPd?nu7Tbsr)0vBLyev<2P$ZhXPdfP`qEI%@v%0-`T9Be#;+Q;1ZeI=9yUT2D8V?uu zC1g+*qCpdXN@=AGKeG+Y@d9ekH8DWZ^36p4iWA^K?4 zy~gvcDR>RxBldiVNOqE6WT?F+iiT|!FB9uuuC=xhgw3PVz8AH(M4>qBl4PLfni?O2 zG{$2~VZ3z~0e^T*#*gQ^J3#zjpT5W*NyEbESQ{(oDL1RJ!n5wu?u?&q*m{PC_B2w>HHlHcCvUK!;VyqElcYhrD z_7Du!`cc~>ppje7E1zsnye@(t{Db2x$bSuZWRX6#N&9H6&m@`8hWY^kii~8aK6rx! zHIOkx81cxbR4irlBjxM6ZH$!NCG9(+khznJ+Rse*o(R;SB!SlMcshKRw>0%5Q8YDV zof9U^iV*yGuLj`^O(qA0o`SxahM_~vz9~!_ApEr5z7@c$4X^IDVFLb36Uk0Q+5B*?9s-mU zmXz5@0V;-iViI~XAgqY;^%y5a>tpuIYr3(CqEHT5tV6SZSqMNQjYl6HA}~`#@%i;b zt4uUf@{G+Ejn5;2D1G#w|XoQ>&{B+ukB*Rv_yYu?K|chx1GJT$r-({LC3PKomc&hQQL7 zaO_yaMD&AXR1}7=0Nu8z+@2DoDv9O;7LnUetJJ>9seOy>X;D-;?q&24JSRjxzh^<) z&Pg6pp5|>>hO>F{^qoAWG?_06Xgi!vQeAj*l^qp@>WV?ePNcnuGo*R@bQH={fEnfA z%Ga-`rIEzZo$zF*M4~nC@6t=wT6)_>5&WQG8AkA1m|4_e&FQ(;N%ibeCqKpVw)`lxPfuE2g$uE0);PB2tb=GKScX6x-M7KG2(R#VPy`JZIMoOHZl9eH&F4He7PC_!3eyEv!lb~Qet!2#lVx&7S1npuw9 zZP*JUx|+is<7+N$mG#$aNhK-q_hj zFN}ciTD|NrG_mF+TBr}~cTtY35U3BGi{-x_dx}5?Jpc9jAYsRVK~|mg(CG~-?3lFsbRWN zuW$U$uyZ1*ud7z`W?~luNEvzNOfn z2%t=l+7SVb^T{tQVR{|5JP~MKIL6pQC58OpVBYA8jQd`jAS%UTcfIcA^|n?}BMgl| zo2I8~8mulP!uX;Gn*%~81~Ng%gqS0009Hlet#kcahzb_TE}#g*J(#b`#qi()BB>E>I{s&5cDUJ1q)nCZo)d9J?H#GxE9G-U{%Q z&9Bbka^wAU^hj2+1QD{UcKPU+cTT;`gN6m@n6}p2T>)ZUk5Ox*1*sr%VwMPAIydjC zu$ll{*Dh(T9X{ffF zcH>1SWDb2|pmh>ZbJXp3np~^x{@|Sz$&1^Q0=~=pJu=#!5~Q#f4x#$J!(F${mdi~$ zlUoK?ais|H*NNn3!wA{u{;jZ|ncNI9r0jK3eA=T1j?$~ys zut9fh6GUXbQ%*fJ?XgKA#`uz$RRm8zJkSF?za#X44q;FcNb~&Rd2I`Kxr_QKwl5^n zwr08gN^Y8eeDmHsjJ8NI<#CayYukaQ3$@#L{JwUaf^$l_S(I|otjo~g0f^tE!DWpwc`r=i70+~>HUCB5~6bP1eT17Y`Y*G z89zEVW@iK_c53$myCk^;jGvQA| zEM`qSpC^@{3sXt16HvL-CPbd8+O4V0)9tdLpJFhfY*<9b8yE_`56YMt8h~?f#>lr}yRG?70;%Kf!`?5R2)E{v# zYO^g@6xxKFxR~V!P*}`$LsEchT&S9JuCd8>2x=24{o2Ubp-O;j9=eRzStmh~dSl6)OY3In3-y2~ zewyUSnv^{x0(0|H`}Rme{)B#YY46s#Yx+CW5rd zvxwA)L?!c3TTy76g?zR}rRWo|x{yGk?C4a?e>MwJiuQ$#_J?%XQ7B0&ws%DEqfEPQ zPK}4`-yuFzqzmT8uR2H&&1DIA$x0n}9CnR}6rs4en(hBbg?QJIo;+tMP+Uq_oVSn3 z!w>3-;pQ1 zNJ1!-iP<>;U-~R~ZLD3GVx^karkZw2bo{DAo{}taD)o5J_-LSze591CfC9T&6rW0F zE~(rvtX1Yfxf4U*B*mhAMI_&XJT}Aw1Ug^;RlUV-oa-KCH@RaK+IX?cQR|4+!3#xz zBNd*;XrF{k@H8t4kRrqrPus)**+?&Zd9lqDq%EEpGSoK7O|Fp~ILtQ-__={VoL(K9~Y;4Y>Y^L!_6l~28Gch!J(lj zB*py}ibbtNFf@IH#m`|<@|?~5U1J+U3}#y`++)WD{cIZ^jTM!}#@T;FqGnG>N6`&O z0JPq-n4Rk)H>tzm70G9H&J2F3hJE&<;O^bCr)`j%3OPl$wVbIO!B4tTs0nlTPkR)| zAA$zKXWU!`yK$MNM5WmZxZ_XQ>q35A9h8W1PL=wDcRPYaIe6IB&l5zqN|RC8BqmJ9 z8Vwi!Li>iis2VpnZtuuB>_L$<6x}-SE+!V+7!ftKBV6>^cLR!{!y(_kFGN+`P7UD~ z`+*2bR42^?EDwh*OC&#zBH8daByFrHutsqN*e1=sp3~FDiR^o4qByC4BCI(OLlMbR z0df*+B_Ho;wo(K}39_QR!B~BOSdLuw@>)9}=+i3ViQN8In6zSg96296`^5_}h>)<0 zA+CF6FSi?hN?7`|0B+z<$W2k1J1B8=##6;W+g)TbFCcBk)j&a?8{A}7Hb|HzLyCx^ zk4pO%O28#Ejr*Y_-hM3X2d~?fa8z@PWu@q3f>uN6-D5rLyqGaQ#-Q=W4tOFsG z?v!|T{jDGs&5LuJ_s0NEZ$qKgEkE!2Gqlt-G9*6Kg3*f(R%hNeXY% zR_p7cP&_B|!vgwEVRFi6Ey|FaX5@-Z0+UY0lMh(G5Tp+ibhO-*lTPn)Uk{54`l+Z} zSGRk`+<>U$n%X?usjxg~GeQ!*diJ)J=_E3dnmQ8Tgrlo+(82h#5SI+lU8c79Z9053 zJKJ6s^vlk~TqOLt!G1M_fTj0r#bghCi2lxK6rz+0K`j$Uw^*CpT3=a<3hhlHl@;lC zu~comLZJlAW7cF(A;zeZWU|!W3GsQ0=yJBky0W0ZavK4W)@LdnOvAG2jH;iRQ{n7ru{Dke7~UH+)jo> zXes#l*(042f%^39)Xgr0h~4!He9h1Cpg3{1w{g2xh|GzHxdpC}Zx?~SOf+x9wlU8@ z!jv}xHx$<7c?#+$3YBzMa++M67j3ABZpa+8N9AUmcjy^I9w1vJie4;9gggPCXMS^a zX|WZGKwT~_-PgXl(k6z$+*C}lnF4-n>7#1vL)fuM6;r&;g1)Vg+hnH_nC+WHr`Sov zzhUiz+wymTLQPQ)Rr~S+`*WHidsI88+Pk9o{u>-G)EhY;@b^R{3#SYl`?^vVi6%w6 z<`>Xu$Sm$I(fTVyhG!=~cXsmRvy*4qZK6;sXW^sV9tcotD=JhRq($@+Wsil(PG1Cx z*IBkGvim2CRenK5V0OK`opDT1c?v)yo|&R6Jvj3xir~xMVfwAY<_DtFp@AliJ<{60 zR74d_dtfeMLrT0K>DqL(H0vkh}bVC+Pi0uuv@hCIcO#UhmMo>Tx2 zM-Loq*SrAq1#=>a)Ng}bC!+uQxKHP_)3r84H1eHL5Rck-17I{q>`4J%2qdD8=we0` zpT}cd??(lFa%6e^Ue{CuMWoQ}TC)edr*&hx!v=-`<6}8F+@C)rs3CQ% zGT#;mkkuP(8<)%Nk7@AV@L#sJ4sAZavDP-r%g-MkW+qA5Aiuk8Ye)3eyKR4Pk3}=l zF}Z!a55XkDD*>{1)Zny<&HYNy2N)?Y$lCKWUv zVmCZgC@g<)>CzOE5UsH9icZ$LjzZ>WiXasnJSM|7%dNhm9eF^SY{x^`)b0jJ?41jc zK|CUsXCMBRPe}?3nx@$&1${3aJ6Jl=?iKdm>3+=z9na@CLl8`uOW58r(or&tSZ$vW zrflwU>xlgz?JWgt78RySA2u%>>O+!V8aZamIuCqs~)XkELw zb?w2{wW}>VP-rNgm)L4qGDJx=axc8cwhQ{U8IA;cyr9tbiA2^Hwlv!QbbQCIG0l#K z_-KZ>IDNjI5~OTo@KBEZD;=C6Q_GI|!~csvbIOS=W6#_v&$&@V%6Yc@2oeF3mj1LT z__s_B&8{|jkL8L1nCdsuEnYXfeNwb!|q+92);O&rkTO9=d>v5wiv=oZwGHw zyRp0`(f-{*VTX+fJBLEM?KlyrUC-X#>~^_*UB>W??;u{idi1e7ulUgbyl)XDm$$Q34sq zow3Sp5mZbmVK~gw>rzUs*Bi8OYGLdxIvF(YDzl+-tHtZ<+~yVq zPkfvzQuWpLu)Jujad0`=m=LEIS9w`-`-T|EIVo4rjR{GKNCKcZQ0R$Nb)}4IEfIy* zV2d`EJ6~H9l1MewrFNCuIzehegb^`u-fR+qB4TOEei7W!30%RqTnU&J&1a>(IZY^v zPe6U;qayevq-&oCy4zF2)S^IoLn8s8{S8h30pvwBS?-VL}C1^5@;jv zrd@s_q94F0G%VoJ04X8SXpX{h=-xtEcRC)=mne?Zk+)ey>NAA%z^{Y5b@l=~E;prP zxJB)R00}v5boF*IKylFUQ1tfzb)UYa+^&1c$7Imd+b0F+Y4^OtfpPiSis`z%x=-{l z-=rO62E(hjXuhqVE{T)vXc4F=Y8zPT_$RGDW745Upv@y<1tL&`fxLw!f_uQg5jII~ zjmgQZ5zB0bAkCi>Nn{<}l4bKl(7W%v-xkR2=c6nJp*ML|i0o|?+Qtx>CVvzSbM|8J z785A96lVoBp5f$2V|T}X%-#{*uY@pzYUgQJpS(f}*JgIDNECFGvm$D@2r>y#zX+>g zPa!{QuvZkb2ZclDGc&x>vu_7ts%E=8ZedCX>$M`%ob-~Oz8dXu5vZjz2{fOfWB(%x zIa95S;sQ$ul5;nHI@_iYynWKVbNgqUpSRieiQ+rRJ!sfr5q#mwq#yV7U+i6@dYUl7$KO_YiRa2f?@GrW9Gt8+oPv@NYnFvESluC28Li@}a0n?S7{g{(LtYne+d%*O5Pq9Ph`jbM`2oH%+Y!~AJRFg z3)h=|>jM@Q<-q-(u-k8m zFw~t#3_jau0_qnR=i={$sCl9k2EdCUG57gI-RyIoLT*LJcI2S~{MJ*1_Vt*6_=?>t zjJMOUKGW{&fXGjwL$8;h9~2y{*yns*1TvGUHp>)X71y52HLFx(H1bV2WaaXrz-ZD) z7TOdc-=*wZxAJ_84Q3wl4v0?XExXaYaKMfV(x%T$OxUrIKyy`Yyv!~Nh1Qo^hL8Nt zC+HR0J4t`pn6pod?!)mhj@!M$VeU6<=Y7$-h(ZNW_gQM)1jzryloN9}Q#>R{q3MZX z<7~JPpU!)~Vk3fw6bvQpF(Dddl&sR~BIpSbd@jkz1a{w+BZ|L*L~+FxrNU2(Onz=1 zDz~YERqFd+CHL8d`l^|H1NB1EIf@y*oftUtSNUi{gjS7>)Rb5IiE)5&HzlE0d+HDIJiMN~}2q z)Gp3Wgn$zmtnbQAqjzv*z?XsEsxv;NPCdHXEppSM=ok=_$xuOG33NJLZdh%@MWF+R zJLUIJhN$fo4fa%UkL3kFPi}gG2W?p`+-LbBkgha7aDJQ+!rqOUi1OgRP+MX3AsAz% zTlcQkAV>pn`X8sa$>)Q&Fz`e=^r(#C_FF+;*i^%Ol)>5`MWL{4i)f|R5FF*(jo2gT z`id`7C--9m1H8O$_2sI18zo5dwoR|GN96YX0CRDkWeL-5ox5UXCr^Mpjvvu=okIjY zT8=T8qvbY5zBD)$;(?DfR+5D4Mbxluf|cZuZ58C}T^v1D8O_hfB!e}E5Pqe$V}fMJ zj2>^V3XoxZqFydjAtV_ji%RXHpl^fLS?f^QYHy0-3!h(#wt-t;1(SLEyxO+?M?9pS zeR}q_FUm_P>%z^x5(3J!D7Wu-3lcC=m}|r3raXCk|Lh3?pMf^%RuSSI-*9~S@eTHz zAVUG^2DUJ)COZRx62nl*5ovci0az7az7nPwu>zih?S|Jt)JTF))f>1ju}_K;n0CeO zgl!kBm#lQpFB|Q4`Lb9YueCemBx`bN-t^*%eCsO$b$1*B+bk|fHCk$07Fbe1`N)nf zr-Zq-L=ke4m%{NabPM;t zX%{I<^XI2JQ79P7^Vd4zfx{yB42Ph?!G4*>(&=w6rE$KV)Ep)U@+;!A(JF7hL5A6S zqGSqg`U51t5rDggmT{+y3j3kx8V7Y1sk)j}UA@f@6vvviO59l+Me(H>%)!OB2$LIE zE?VsN3(~}+IQ2FKciZ7bb|eI&P>*Gdw&nmYY?|42c)7hMNPEFgit4~W0>X|SK@y!9 zKP^&NO7`w{Jl{L)iz51ZcI(sAdJ6gxFIMVe>m`COY0KV|J8eh^@2*f*+9*LPd*yxC zo)v|Tf)zYbYUM)uUdU?ccE_c##Ab`=(|kg^@dn|rOKxa{ZA$DmX!3YLbjk)ta;m1$ zUKI47Ob%jG-9GJ*y%z$}CYLbdduxh7%23=wsJ|0$yM0MSGRz6w8@+{1 z|Lsd93Z}FE22WQ{?wgX@w}7;4K!|vH3tzXLHdc@pM>w<8#tHaZp)D%c3WX*2OOld4 z-idDBEB6RCTfU@Cu`jP(aC!AQTP%w2iAOQ8Rwc+77&tuB>f~l7J)LL!0(s2n$L&`k zhHR#FTt4EXg7-Db2z?;y_N~nA}=pT_5W=h!E1Er;ml`uib5TaRm8cSw66q6 z@`DFHZ1)NJG0Va(W_IfnLcrm@1q%TKLWnF$dzVah8HS(ft=bC1m-@e=$1R2K|3+IS-Fd%eB z@|yoMA=+^?&*#bFTy_$_43VkH!(7U~Do7!Urlh|XVyhbHBk3#X+qPv1D{@lUuhLwY=0|ge z7Fw}rv=d6W5$gJJNFcRuO=|CIs}f`orw&Z9t#VUV$1I4LUV}X^iZ5J$+BNA36dr+L z4q^6-kQlEDJeJ#ULjn{oCz>rzoHs@Bj3*1f0MwP&G) zMX+gx)A&$s9}$s!Nx6HMKK4;TnuM3{68p4(pEE4-nCi0ILTD$dy~@5SNF6gr4YJO` zi-fxM>}I_}To_1GwacZRWSA%cm21qcA~%o-xfdhsOp#aI*k zvq~_sX-9>8#WSU&{nx@&Am4E%T1$vdF%b3G0++_gKuI;uaxA^|_D503a6GCdbL^s! zZx#-DB`|I>`~M`O&xJ>tJ@%$>P^r30=MQaObY!!Sob!p1;hN;?7PV-;ui@ia03XlR5M>-v!?s<3 zGV)}$7iPj=2-5NGdu#1<@S*{7V$ayyf??1sa!1R5r70+IY2KEw8{2#s%m!YnsTafM zCq&U0Kpwl$ZW5vu3*%A%gi7RlM4)1@Kva1^4ls9l9(n5}Z{IAuh~(QhLVSYzTk#{vlHf?rT@%84xgo&Oou%;v(ykMP!zWqdeHHFq zVYjG$Frk43@khzj?-MaJNtN!~?UE=|7pXD0soxZ$HJ=`zWglwCtq0>E`^#ne@^H7l z69aaQcC!MIn53sr;mg_VGl4<}z>gyBly`~ZM=PZgtEkg<^U^fyEFa%ea7e}Neqpk{ zxb8XcrqyXP$6sn)f@j;L750GKS{Py|wEk8)mD~tiT#to-PYfJwkIU^B6!hg5WOHe(JJKr-1VQ7uL>>YU0g}%ZpvoL&lf%uwI?=T*f%!% zPgv05=EP;q#FWa>RC7)EL-EH*z7 z_TPPE*h*j=vFt$7C7e^-TyH-ViJm#$#I4T$;|;c21iEc#<}j<5n;hC^tv$a0D{O+~ z09Srq8KdJ50n?!H7a?1m#w{yP?(o>JSmciKuQwE+V_@qQR&1U2cTvgZ+@aNI6}5K- zsbegPNwlwB0BAHv^!hSDh_YoeFuo%vCQxxRkk<}4kKP!8LGU$#yuYVbys^5z&R&^|1u9k>?2w^zVK7>%VNdn%2`MY*4g2@~BNRd!p6AfwDJ2D==SaCrzp3ax8V zA1~-j&{E?9tP24yFWV!f$_^6XYz0<4F;8D_8$y6{M(SCUTIm> zveI4<2xblLQwaL zv(cUsqx^4$%&5JLBVHXwkPm!5=PuT7H-i#q8UK`)0YX>?|;ZC6@R? z5y|PytXJyn=@9zp@_c(nZdU%*NhlSX>J%-p1A+;EtK-SvZyObWVj;i{A5@)?>teJj zO}7I9(w|ZfbyCo;E8O_=j96^fy#a}gF_xESHv|xo7LFSO$dZw~&joON+wn&GngF#K zf@tl7a{DBCsYX<{z=n!MiIRxkj<+l!nnns4X*sScp9%ha1~E1NLm}Uz1K8_Wxv;)B zx(Q-aW%ETKH#B?NO1Y`tAikk&gZ2d2pC)9D{Jo&q^7mI@N7F6j$W(fRyzY5VyjJL( z;%-Yir|j*J@`y2y+BHrz39l8%*bDfQpq2)4m3Kv;@5jXw1xfoqA<8vI zn_jf?!2bRhPf3dX5Wf6EyFnD6Cs+Cr`*<23tK0Ri%=Q_hSYBuHsm zo@?1*j|!+6bT_e0_QxCr?}8F!N3-Qk3uI>Jq7$MFr9SjZk%+zqPN7 zC#g*~O+>22x27b|HVFAvdt=e04?=jatwz37s4&)CD}nFL!jy^xOL8W}@(b*R5TDCk zv0$VGDP+dO14i0K0Sf7uw9$YFL(Cj|FGP^>8s^-`-Xsk9yKA6Y`?Mg1lNykueMU%) zAn7=s^(R^X;E$j`TbZ1I#_}3oY|jdjcWw-`B~t{rRr&5P+bE}6egByXSkw!W);A=k;iL#z+Jj`zA3gp`=iIZpL>ywPb zDryxWbldbM*@|$G#)}jbJd(8e!LxnNbo_0lAgyzB6SDpL1pMbJFr%`yV5i~eel(!v z-<}Ui@Oi3!d8z$cP~G7a+VIz24T&~y=ZWfXA+(lbI_JZG^MxIl$cgL7^F?9bLh3x0 zJV&9tjHVa2*tbJ?5SBYiY?Lr%Y2SDVMnDfKe{&PO$Ce*b5= z{WL==W-KO5|Ly}9#-e%2vZ!4zOv%OZ%GT+V0iNE?PO98)5hOZ&DlLfz?eik|hH(R9 zo!ur11uN!du+X}M_-rar3+JH^-a2`aSX7e=aQF3hTw(e6;FrN|kobVOZ)izKiZ1Oo`y{{zgb-nzE3b?tMGUty(G3wsIr zPV%MFQg6;mF;I}!)Dh7h7N9=k#meoGA7n!r1*7CiZ()G+Y{db`06W7T7twz~l-F9= z{thT#F-gF8EnhzmMnvFbTOk~dR8sj??w*|KXqsz{{8W%q=Hyxd|J=<-)~kilbO`C8 zXB^h?h&2gQ!oZ^aD?yrR%)sIHirkboUYyCvH^~7iXP~_i!V@{37;AqCkUbzOj_qwh zKiQ5iJW-CJZ0+E-+ehAl&OrEGd9cj-SrI5|ab$eb?iHZToPs&KU&z<^QMljjL19WL zvx_r#*7PM$rH#z>jvSWbwlTOjuPKNcjP<}C@9r;?#2+NclVyQUv9s`Wx_0gh=nx=?@=Q&?Umpj z_E?5p^|sIXY*agh8iyS%4hZC&P0# zN08QXa&Pc;EVAd);k(?|-BtxS?pVdq6 zR*BP5p0p;BeD{w{U>oD8Fm0cb6(^X(cfdrLs$On?5S^-xZ{Dm;~9gs z;~n#M5&RIkG=IwF*{ki2K+&_!-lo*Pm3C)HgN-p(dToecSg^wddA&U%BISLcM<07K z1ZZ8mqP23STXd$_4?-ebx#`I+U(n}UhztmaZ%66ahO+xamsx|`gQB7Bh$TY$-}v7B zHZdI+#_7`RJbN}Iez6?eOuKAKfM}(Qp&_wBkp3u}h+1f*@LB?l_SrQzo zcvW~SP0_U|`;e>uNtkfq>J-Ad?-DHIc7uGVc}C_KyIF3Umpj_+?vZ1(2edUJHngv=v8(+D*Yllzq@v`C^X$~6kI3%?V= z@9gjG-J@^c9=*F*N;IDr)}*5ocyntLiRvd}<5&f+`ZtK`w@zA^R&)C`5vdW~$W4;n zE6DWWd1R4w5(wWIDJPNYEm?=>X4JaKk0KEdQ%+n@4iv#xUW|`08fJ>(lfJu07ke_q z2=cLwmMwy>gyW&-?c38RF?ZN}yQ1kd`^TAj2`f#f+2z^JHciki3f1l-9)z~7DjgA# zllqg3ZM`T|UY6`+h626hGp$-gh6`p^(0-7xp9)j^__3;({X#&qTpv|7qn@D7-#st2 zyTV%J59B4L3}LNH0lLL}w*4bO_!p5rwGY4Ra~eQTNO#-kg=sDKATp(O(n0&GD9nH^ zY_~cEUm6#Kw@9Yiw?cd^Ix>W^) zq%Zs5jY7VcN;$7>6ZFgO0C9P57xtrSh&H7~rVZ&hT;3Jiu@Jy*;l%)Z zCJ@}^uSUEgij_@3?OE_e( z8rg~!c1e_QQeP0>p#CNr<>Z^lj&husIIeonM?^*I`F)FIdG1kB)XTL8EgpPn!@&s@ zCPb}F6pDDZd>zBmCJXv<6c_N&v?+KOW4SszX|;lWc}!JLO;w{4;fQS!kqoloX5AOU zw=FKeu&d5~CP=iA%vwcqvCuE1WD%Mq5bVY!OLA_l-nKu z=VNl^5Ib5Kvu?tEcyZm0iuYk6kPl`>>g~G%=|$tr^fT<7MWeNgkCx|^SiTUIZrfhj zzU+`aEl4xE)pj;d4s+~>B2k#puoO(~@Ya`H_IjsUT}S}EQfS$*6N2>C@Hn#e?wawH zK%s8=an8jNdrcJIvM{SWZI^|$b~wYzr1_V00Cblm?d^bvAeK_hoV@CNBKT_Q)4`*| zZu_!mRD^quZ1!;X3zDWp2oh3I-!=FrO0wX>^N?~Di4&GW725y>@Z)TF6cZo`8yuoM z7E6lb^GQL9-CQNL$}nyRd~AafD6sDbOhr|5%MqpsdJ*Awt4$Rl^u~IphpWM!6OAp1 zJB4{wiCdh+IqoI4O1@MIMkdXywSvBgMQjb}?P^iND6HR}+BpmRgmzFwpAVcA+(oc5 zy&#%;z4;*5^B08}B<>#Kk^p@UuheFa@yk)R`d;vlOS!={S7o|FP_G`yHQ$hi!!t6i zYk=;#VY@vPVEC$y6r{vb8=@WPej6Pqq=q;JE1G@3BMR!C*fW<1`q9A(6ZhCxS~`CB z%(h7(0!a)jDY3bNROaZ^V+Uc?{F;+eqkn|1Z z+bsbloA9Tdfa=^`a!(Ox33d>{>cYqnJeC+|`5`!8 z2+hqs78ikb87JQ2*#e9t?n1Ynm~M*$L~umUk9!0u!sVsa-c3k7Tr>Um%Zqv-se{nu zfe??T@7ANYofq_d70=;ga3Q4Ay<0E4`5K?&oe%u*+57Ia+XZQ0VrE5A+P%5JlQi#L z$Fm3eio!5(3!AHpbE0`pE$9P*CetIuk!<)(5*7mxS}i>(WoS*@?2Zc8Sw?XVcR z&`ANnx4Ft5k}qvTLK7nB+O|j11cc(*LY0a@gHDVbI~Wdzi$wBM<>V^&%Dz?g@(Tmpr;dfg&WJ|yNK_`u#qn()_Lz*Z-&bs%gngQkJQTqcSZ7hf>!_G64xoQ` zhV>AY+MTUvJUe-leOHk37d2PMxGycVhzNcg;@`4~$zIX2Mfd3!#j|CUJ1?XJC!?+f zCx?_&AU^Izwm}4S?&y)WQEtXKb}6cDbMWZJ=xYofFRwx!?(+hajYCC*4NOZ2cYf6} zdp&ry&fKfB!+!+*fPR!09{a+zU|+fziu^If(koEHcw{B%Eehqi_#8@fgRcbSJkxoe zDD*%CJIG0k3u$lExHQ$c(eWHFv1da9{n070S#IC&(%+JAzY>8G$nfk=SRv}|ccLi8 zEHrysO32TvVdyu?<@6a^xqS`$NztyuKEr-|^$P9kkNCh7 zIktXCZn7GWy7hd!R*0fFL&S9wRO~8B`21|27VN6@5s?zLtal5M?${m-F;g`)?oE2B zeNPlEPLT;rU;)aMj50%6Vi$+tbh+1i<0|JkP3El$-WLW19MCI1d z9ucMrP9Y|ifkV(9UAtRR@Iu14d1u5X2~q@}qj&>Z9T3)=ic0LHkYa|cNZkIkM3f_@yWZzZ^uu?lN|2^WY*OMrh-3_)$UX+kXyBGnr zf?*^}zKYen+L}c4E8Qp_jbcSb`jj?{>Q~Y6(Q$b+e4(|3)SRyG{q~k1l}g6d7*}5p zq8)RN?Xr)^O}6a|n%Ww7cu2Nh5r@~yVFMM$_XAQ8)%IkFBV+6sZ^;v+hKXq2c&iZ5 z^3XITbzrWPU^t48jq<64%?ZKzMqqyEs*S=h!90{NRoiX;jfa>cMs{K?*V?}INhxiW7vlm68p86=!-jm7Vfc;8D z+9JPHf@O9oKsbJ+GvfUKvHV^DciN3N_`oP*Sb^fWj|=)*@`T;fdJEIWtYP-`5OjDf znH7)NP(j~cj<0VyUxdsMgmxGbEwRT0eQLRpaivzBj>4gYyBszrj^@L#t1hx#3ghP< zul-S!-H1THj4eT4?}UJcZuK6w_vH?Dx&1~&3fT+wL$8MDzgeMsqTj5r*F~T;Fy|+Y z{{IY6=UKKqSO1syZJp)JxOYIx-+UNuLivVIviq=0d8&O|R5q~gu>(bQdTf9^I3z{f zXhFU`B1qp!&r{4|LX4+JFd@j@0?S#UD8AnPc@nXwg?+uV%djVE2SspWc!e9rbrRNm zM?@Nfl?c6+-tTb&Fi#p^|p@1M;BM9K_I(#%8@)1PxM-vD|uTEAk7G zC{}9IPVc5tB-C+_vCm#r0Pju$n6Ghr6`brOD5L7C`^uAj9|0CA>S2cde-U^ zLQ_a4L05Y~n6_-*j#SBj5EY}$@C=U>(s)FfvKBWdVv#8#ZE)%^D&4l~k;(EzqMam` zC~K!yECOkE!Saqx5C|JcxR|tww4aDZ@n!aIhTJ4bfyTfIk{>JmAZe`BIbn}*fkOFe zWXG}#Y?H7bQLw#fM2SG2Jl7_yMSv#2s`to3`(5yAZh{Z))c|s&W&Ldd|0&?vhO?~Q z`3WdtRvh7@t_}YI8!ak{AFbq*_k@6&rLn@PdP5=6CVNUm%HvpyVSTj<5$G`9DlW}h zVsizlaod)1TNS)y#J;B9_5^sTX6v8oC)o)>@?zVv)&42q3&;)_V?XwvKye&p8_rJN zY`5Ozb8?myw|GeWEUn#l$j>*^kdb<7>?Z=g`OQ_ZlM84BHq~JtR9ZYIrNeWKXKq)B z;M@J~9$oLT*&#p<3Xmn)YjZ>(!8;$gv!^W*pq3XmAF!HqOxDMUZ5O6N3Yr@ckgz5} zx+vjPz#a^q+#bzBkoS-v1-n#ND`jm=**}HgjFMND+P{LoW|{_Pg?;$HiJ|tFW)cbE zUn@e#*}<)5n8o1c;A5iu3cXYgBj;UdJWMQ8!-ai|v-6X}`iLl+PrAyqJRz!>k2;bv zx&4H9?+7cU4r;Q9)RVKL%nNQ3^uwFYRzV5dxIG~RrRS()uLzQ3>!dl3Yi)`BO%y8Q zwNEm*-x0wV`t^#fUG3VN!5ZIrFQ90+g0zNlMOUv8`=kF!sO;6z4k&jj9vZxuz8Y2OmZR)GK(me)k2%`i`g5$pey9H`LU+5XZ z|J=cK=MPlctN>Fr`^3n*Ccsl0+=r~P!*|=<)V5077a-pU#;m<4NEenxbe0sLVtiX= zp~~J0o)~$bZXZK6sA`2+6;s7-6(l0cmberUpvPWc)?l6GrmY@^-#ltvLo7CBOJA#yvb^zJn!kfgjdFqO;_iDnZQA*_?z1pQzg+q{_zk9Ptgl8u$i zqdO_XjbrX*~HFx5}(ZuG}Gn<@%r zfUiYtvd^U>pqxEwRl>B++4AXD7b1^kzvr(-w}?V!gW%4HShEnNixg=BID15K`*}!U z#L$QBm%)uXY&$78O^u>~y;8=4y!r zj|)+@5gB9b({fXdXd<_HFY4a}5$khy>6W^x^0&OhMqRBx*4l`WLK1WJBZGS)59Awz z`;pNZRwK7>!u%4N)L}}<3bKl+S8Ll9pkuF(Qco{BVS7ZT;6)Ox|4m466xa3_<))}y z3S6$(YHx;c+&SgiCqC=_SiY)w47fv(G!WK}+cyNrm<=;$365!*B2fRHUHe!}ZePmb z5w4^Y!ek;1z-KC~Bm_-upKptT7mfH`_F>xv{ix#w4`Tp-5P?j&M1pT!Km$fx2Z%H* zwQD{HglptT8u3frz?b@wt_pUv<_UziLYj1l-9<)WTEQ5~RSinH2Xp#lN;rL`D)SlaQ9)AVf*AZx`eJK5j=uAi1HVhuV8`vjs|3 zH#&x%jb8JkUH5tDG&!2HxGas>CxrRB74meWhxtOZNPJ2((&>61rh?>FyDn^5>j63P z^b89Em;IlWFBPR%`o**Djx_Aqr;FViAgd)}Io4IsuVP$%%icvsI{fPuP4*o@byBb~ zCYfyeo`@6x0lUo>4-sO86Zo3gl;Afais|^N0>0yh;+?QnBKZ7NS#(#g7KMsUWOKVk zZa?-O#oUqY6;_|kXBHo_LxN<>>2HhuQNSlXHd+9?)U{vmfsy`gpS;WN6{O%!$tN6? zYjh)je+cuZ`qh`Gth0WCBnZP#fqh4Sjx39z_SPePTlTPgFi*GueI$f_bt#Hj9upw@ zSm8u!U7Bq%5vbWPbkx9r#zts{DAcT^xXioBX<5kC(T3rB%IC5+|0zj)s zXM0|N+Vdtm*cGa!kzj-sKykfgnkn-y(G-e+c@(q1_$?+A(7uu)N zl%hCV&I+xYD5R)0fZuxSE`o3WQ*%#kv~LLep_EJ%*`tEA33@2DxaZKlHZFv~-p6@H z%hQ6Sh@mgkC%(MKCWt^UFc`UKix=d`$ZM&HG)6%jKHB9W1wyozm)nXo?9!u){Y=nT zgOv;|p7yH{Ae(A(r_v&*I_&Y6JoZwE;bbb7h-vDY+peTB1ipUmp*|}L^*YzcOVcL1 zQ&3~PuF9Ek+5s!Nbrw;Dj3IO~AVi>$Pq3*ODX4n^-9&aAI;Dzjl|7ygIR=fSPY6=} zu?crAjvUUDApm?G`{3;OA3@4b7teCn;?D$sRve7}*o=?>IU_P1=Q(4xDD-o_)3mof z1jm0&EX#f^NU7NwY_?ZJ$VfpEAJsnvub6oJFAGqvB#gYp;^wuF-wuT~L45_{7IurE z9}0|>B9EyuON?%ycKT-l*1$%)OF>EN?Bw#Z6`Rghthf6@S`zxKZ?Lb20F-eAiYD3D zJt2x#v+iH9?+166KHV)Qx4)a{7fTf9=Ab^KR5V7NWgRUW{Tm|D zj*teU2`g$1X~FeRLw)2uy2X71}NeAwO) zfdXGxJJwP*mb{E3AY4*^@(PHL^3X2k(sT($55T8)+n`f1@bvQM{TqEI_dw4Ci<5Yp!@UYf>d;dqFW+BC^M^B=bNMWJw~Yh`Etl^u4~9S|rSVsX01xHiBGljpRrK+lMv zKfpWLE(x_`UexXrUAq>Y-MCD04DW}mQ#!cxYSvde@!v}87E(RE+i4^15rV;-=D5^q z(5EVe)B{DQK3Ff|w&@8WU*DE0E)N*nSaws_xSBIXfT<-P{AQx7XzMV=;qAu1Q=Z;=X1e`51dyt7>=N47?j!(pLLQtAaVH^P15T`nBlHx(7l+Gr>0llt z3LTP#)nN3KJQU#Ry=1W8h6z%b9GSBj9ss7`BQ{QeYP2_QwWT4_s|#LP2D{;g0MAx% z#ZY5E74&&(+M{auq$qv}XGM-y+HZx`uCZs}Q46ul3L~(@+I3&TkMe0^^S*S0ps!I* zObj}o7s01~dCJZ|?b>5sN(YG}c9yVjqzQwgEDkm>5V+42Q_vPG6G4?}m|;Jb+kMop zSRz?(^8%4ljNx1CjtjO!MD4YL7LJW6S+A@ zC+zK1zo0Yr#xMIY?G<&>`g^M&H#^<$wR`2Hk@EBgGE~6Vniu@FY&IRmw`n|!EAn9> zgcI$6RbaI}E(%S@8o|DZM1CLwQN-b;JEdoY$$`(p0h=vAVYz87VI(aJP+LH%vPRH< zbEN=m_MX-m+pI2xq2Arl?6gf#3&atI+Dst`xbOo*vhS&C`3eSN&>!#?jc=@G6bP2`?|0`$kOi%`!|A!?hc;=bY6%_ z&|aOpSyIp^{b&rGaK*Mj6fNz$YizCDf&W)a*{+UzH$DM1@}tR4%w(o)!}82Ns}qR{ z%_RyO$gKf&#mGSyYY6fe;WKFsurPg=ixVzV3$Oy1P84z+L+y zUU7pkJ&z)Qn0-F&$wD8sTZL&Ct%>yEoq}{d+S<-eUf?u*&af|wNCk7yCiATjfW)1! zAd;4iHYg;*)4Sxu#s^54fe#P1q5v^dae3u#dsdL*96Jox&J+R4PEe^mu^GaQA;jcS z$brI+CFuZVEE%>k#4e6B*ISK%Utu}2OC5WY9TSZTxvkZ9`;8ziOa?<9w?70F&y~iL zKMIkVmRBUQ5|$Fdmj=nq61)1V;4qA~BcWPpH>D}CeyPQ75rxLd7&gj2E4QBksAA(R zbEgRYlgF2255vJJp|Gx^(+1sp_v~(c1Slc{yew|t5^~+H7iuTEi;Ywm0XKRt0`i!{ z1RosI;O;%wzAr$nod6U<4*@o>b39{_08h+BgmkAx1LX8o%PVSJkW$Ad#=U;iXGHML z@Mr|h57R??4 zffguEq8{nPcLRJo4@S2&%yAzTg)(6@c0bn!Q|)6S5FU=2xP2x7OlFkW@Bm~)#zqLJ zH%1J$k#aMZodzXzC!v-5h&?SL&8zV%ZQWv~aT|+jn-=0e3J1wpAlg3O6e zHwpXtkBFdlrN}-NB2dgM)XsDgq$+Hi!vikO-aCY8pYh!Jr5i1y1Fmjro3Yns1&CPx zEZT3jAX&Jb*HWbFZGH&AtU161_HsdACLW)2Y-8|loiVd@?t0rKNDbLI?DF>XtWG4_ z`XO{lY>}Jf!-1x8sqGPk7KFVB9U;x@?B^kfT7K_zEgK)8up;h3ZDWhVFv6oK%PzH- zglIK20#EA>QbHK!qhmQ!osu{CU#?&e{WaXmA0v^{`L@MAA&OSA$yH}3Pd{6+LFbD5 zMI`-E+AvwB5D83(#%!Y8)K4PXyyT0yyDE6MPo?^DlRib^7+Ht4m3zy*=_4ZE@eeJsS?qs*z>!I7PFS7ds#8}7Vmc8~ZL0Z$< zL*2)1SieAEdPw>3PEQC+O|S~UM8kfi0{XILa{RMFAz@y#q)>K5NQa7zkC(8C94&%R z=j`NFXD9CnAAx9ymz|}5_7g!`vIxo7xK#vCgp->$CvCEz8WuB9vw|18z|aV0D+GN* zVuLFJH{5nnsM-h~XR@{V+$jP{$BtFO-1Blk^hsWU{WBn>$~c34`aYjeCQ^d8$gL5e zclLcS)!8>hQJXnV7T9~96oIA~muRkm&(X4k{pTZDl0^paRwYDZ+UnS5KM5(&IQp~} z*mM!Jr0k#U6bEkPz1UW#L$N}_v9vR!!RuYrUKU`JcWkM(%Sn$rCg@7Lwv&fR4DP`X zakC&5a1$m6E<7_)ZoB>#ve%l)Hq)C=$0Ce7H-Aq4bd{Ua-c}_hOZVHYqSJu7`NMv< z5H-q0ipE9;z?6*JGXj){F~*{7KN6(ZGDc#Sw1GY?r_fbL0hvn2y;U$d=k-&*^LAUP&W96zR&3;QwVOj0FnS%}a-UQ)!9@DV`@ z5h)!BhYQbvuZI-W(o0a}`qvNuJQBT`F?%;80D)JOE3joPr^)s9zI&|JgXK<8{%|fvU8Vm?)YjmLJ-m!9Yy2oO5u)=;7WI}=}<4YNnUhp ziA?m=3D66dmrl97;h-H9)acj47LQqI$gH$qh{&iJ%2(iINBD}Zm)AUJ?ExlA;Xa2i zJV4!@u3BP$7bK@-F4OM%E&%Gy8JQOQiV*pppS9!sp*s6sfKKpxm*oVQD2a_{f|UyT z-o~PnG%m~%g(CIt(%t6DO)m6PjjalRBU0Sfhft5A@_s{bU#Q(_N9FeEF`e@;F?l9L zLrr7c&I$M~p!ZNxu5Z0)8r04nIZs9JhZx8Ao|tJL?&^y`Qu+1?0m?R(?k$(w&lI#v z$854N$D?3&Xa^bL;_J0tP%SorQ^(#Am?G$gqS1aT0t?N9ciYe9q@(EX%g@!q zASJ2Izl%uD7aNb*+j5hz+pR6J&vgSbWSaLP6#-upQ{;;x5xRBOe*3E2zD4|=&%P-N zCDF5o4HEF9l&iEP>TpExpAct?@eudDRi}>F7}1sM?vp#%IY$J^wL`j*b}I@=A^VQP zmJ{Wc5P_^y4cmf_yZNH18Y@@X^C38TjTq=CSw9wnQ*jxQ;uZ15js&EzV0jADv~P++ zJDr+iAL)+M_a)7Egd}#`^`cR=QPcNU<;seRa>*kRufeiNliusB3cM7cl8mQti* zTO?YTSUO1_cA8(ACrfQr@M@jJ?rTfyq_wS+X0ceeme01e^70LcvV3%uG>YICR311E zrM>fct}3>liQvag*#s=QIZSZW)AL1d36bI4_sXdq9?xov-hP zENVU8_J+uerp$hJREYAqtwO?H6r@1obx-wj@FQgFq-s0Y5tI)g_p;ab^fBA_l+*O~ zaltTOXr73DQUuzkh$H9ya#Q?7T&lVIhnUW;Xx$YE^Xyz^%YfiZuKhSgJtXL-sn{9V z`^81+7+A=5SV9DHdJsc0<#JPj{5T#J0{Uo3AsKJF94{7&kB}j4uA4R)jG9CK(QdudpABsZNGWrj+sd9%kn#B=C(Nsq? zQgg5S<+eu9H%-^xvLopTlqRzPE(v>H-o7*#MAJRvZ$f-`_}OKr1%2)FIpSHXu!h|P zZ2>L>-yEzybN^y0m&vlW#r@125_wRi76_O0O6R===qKkT}Kq{EbC<_r{~&KT*& zXs-Yh@Tq(4DLH-V;m5$n+tVVD0S({C0rMwhd+hL=VxxdP`B3J79`i>)yFICi+uoYbqPBZ zOlw(Z5ve}+DO&d%tcwU#KUF;+wKN_w*vM>bA~L?19osiT2uO+MjW$Hkf1=Wy7<bFW^6;Zp$OJ5Vl>!FixxY!WL`|qL3Tg*P@bQX{EEKbfEqixPpQ8q$ng@ z#^sHj2{9-PTuIUqXMYfh_K0&$|Hly9b2KQ6v!ZD5&V);AioMwpV0)w2RAK)Rg$^%3 z&T6y0D-?zY3@~h8MIiS`3778g3;5Q7hkcx0^FG@b65T2L3gg)L-78FT&D#p2=Gu%A1TbGBWyEVE~u|oNN;W_%u2_0_f<+x)xs{cMm>Q zUVbi3Xx@{s>B7{O=IP%1K3kafJ~JINk(<)qgILt}1t?=9ysMuUq;`m}qw$T6#6de7 zLbT4F*E(~*{Z-IshnlS!)ko|LAp-ZaNePgB`2z_ope^o7uc2Q`9geVUe zE$GywN`nJ|{70emJZ|4f$3H%ewQrm-tzMXe8kU%V@7{P3M@wvD|0wvsvV#xgjCAB= zE(h#Q!oIb8^yp)o1LS5|XT+T$YI75koreV|nHDh)E%WVk@I|3&v7Hg17RVg2p8iqD z&yzRiZ@qk=+S*0&8F7DOe-)$^-E-eTz6)ZJ==1d}u_U$3RC+)ZVmZSOQ|u=JioJB- zRGTH(F-7MEmbdvLQV`50GxL@TG1Zertf>Ewt1|(ty1MfJbYGlKb=uQy+K#nawN=Wl z=v2!phzJEl+iL4e@*s~aFD7}xpw=wx$R<0o$`-;F!WP0-8)((3h&yU+UyKB-ICizm z)b{`R-h0C9{H><ATF|sGFtJ0{{mB6=iC|g zRfVZ79^FyRyF!F;k>~i(+FpZoivxNYz0xzaKlG^G;Nk9l_+}^^$F`%cL05y3qI8Wz ze7UH#aU#-E?9o^Z> z(QqWE+7|@kdO_;K{xb)MA)odQ!PsM&zE75L3b(5~&DkAC&K%xhqXZfBe!ctIXoYDU zQY9OZQf;t1MW8MgS=xT&8MDC&xUF-@V2vI;(6$T4fjK!J?)GkBUEd|crEgFRJ}2d` z>d9O}SULrEQka!kU4#~mWre751RQ1SaDXu61pB=JT}3w@Uyt?|K`l)j>io^aeK;$G zFv(6IY~@=Sp_Wa(v~9Z7h8Ng-$`YH4`T9zG{|#XOWNE&d?SnbWn>8!WHgB;{h(eyh zm$g}dw$)t$iAyE;~v~UO$$C_=2*Un)DW7%%jzO4^!yy|cUzGF zFZ_AQ$z`b|j||7BR8&r~a*^pyDJG$rgZl*;tVF7cUGPU9**Rmu>6!%*(uCKS%&EL^bXzN8JSW5Iz>Xk?v0b9YW?j(-!_9lWoS<=W z&<+Yx$E&+uZMLTb;%n!NQyDQ`^VcI$T=T*Xdrff+1oeMswEb2nb`H;wh+Nt~M4>BZ z7Ou2QzfCyS*`r^Np4L-1ZY1Kx-(!75pw_!Czp|V46`*IGO;`(k$SNI%pY=-HEEvne zfEDT*9*xKv5Bs)7Fcyv24ou+YT>9~C`-<&3Prz?mtoK&?lL&F684Q~yqeFsS)E^=Z zDwNIq|8#I-*@>k#RDcR}4wopDA!r*00*4gb)$b6B&8w;|E4D(-f*^5Ic5jW%7hN-pO6!4{L7RNJtq_sX*CTs{Z53j~c#}%f~dons1obZH53s2;L{emEc zz)mf)FAo63QO1Qaq5$`a5a&M#4;{67QAp4IC1JBXT8?fv9La8&ZnHfaqitzhEudw; zw#JSr9M|Yd;4}?bD2z}IJq2_~vG06xdor9so3q)g#_$0wIxkxD9yvE4l zWjQ-trF|wxWtFwMl=fFejYGtYqqA?9d0CZDEQdQdScox2;4H-#5C7<%L0-7ypp6zK zj!plvlzmr_c}^mK&j-lv^l09%8n7f@D9ETvNapi5T zm9|QNWBlo+d7UjSoipa0UfFP_^{8B~9#oXp94;-(?U+y;>LM;iur%#)k*M(Wj0N_b zN7GfkY0hMUzbq0RMBZ$szNq$At_ay2LJiQKrbX8 zz9<-fY{N5r$1d^cx0f%2%Vb~moc(ZGILf0sXC62`Z-ISZkg4T1$Mbc5P@712l0FQG5tx;#woS!$v4DoR9LvLYqYyo1Yd~nuMtjPFWU|a|6JVS>*R8iZyCMo; z9Ns0soanKE^9p|>iI%q491PBEbo8_`rHcz2=B(JuO;#fs6_zDvUa9RCq>lE3%j~4W zG;=cEr0qjP0Ca-wSlU31_6ZTxQbdbjQ(x(@bNhOI(E=nnKLp(Yj#ziaQy6nwVb^+M zijoPB;esS)Vdhth@0RcJL6vpA-Q$q>GK74V2*w>V_+8_PY`MC(RrEMp$R||VE@8&N zw^}IzwOE@IwXvY0f~(~{JMDNb=LmT?ep5I$=-3=I$?UT?L7=liscV6KPLNTWoUF1Q z3hU6=%$nb?zM^rgDF_kXt3pg3Sg}$RMxbBwbOG6;gNPx$#1@^(ABw%HA@7=rSo%R`_*qkEt z$FR`j1coPC^}9?2>OMVh_M5ef>^q)ahp35mmq)hG#C7CWyW2Adac(V#Ng_})Y`I!n z;Gk_Yllh1MgEc@V#T90X#uvA3(9tGfnwTk<56^ub%iJJ&dca;0rpYX^!-2t;6@g0F zZgCNnOFbFhli@8S{tB8H&)MW?G84%^PkVN8skhIGq9XNzCXe4q5oiJR!+|anqOI9& z+ij-8REDd`wYE|~v+VhJBVsjS8s}L>FlZ9m#@zF(4*7|!WK=o@Rh>LRVvfYlh(L=_ zes0^&87SX++zds25$aJ!n2*d~m!AEO5LjsalzoK)bHaMP`J0>0z8ej`U*F9JUf#=1*IIAbeCjK83Pm@c(l!c;AJN91d5D|XncT)Ce1!H;rF%DqwdHR<%zq?QE4=x@* zFq}m3aizUJS{YyrP|M%|sVMz|#YO0n$h<>DhOj~$U34}lS%Fi6dQV+;Q==tCVKBJ! zU+uwWP4RA|;0(KM;YAaL%22qJ{qsTY*S4j)uxemz4?AF=`smx1mxnj@Y0y z))tFxvM4YVx74vxZsSz<X zAhu$&6po)r-_Q@+Q=XmP4nZm?>u?}89N02`C&(HVlPWle#UErO#u2$SQ6)CVdrC$F zxh<62`)>urFBtp6ewY6^Y#$fhYhM^l;_kPviAKg@14rI!{RG&pf_jDx6{41`7}gxG z8|-FL=*rdC^tO?nk*fidFTk+kWKMQCf8g-!p_bEAH(IendRS47RS-drw}@S{D^3pl z(+aEeL^w^9rsP~fnt~DuJf%lG^7O{TXI4zHy@D(%3hbXR6CFoHh$rUocD(O}UD)Y( zOw>3`IJ=Wg;5Jd{Ey{`O>^A~58iA5&?B<^w8B^_SoEVz~(=yM8%19A_fK z1Zg3SxcxRmm?$*q_VRT8@w&t$s}+I4hS!i^p2A}y_rH~*QT^m9GzV4-z}|=!jPX;6 zt;ZK@tx;t5D|p5_nK&8$Ky(e5Q9Vk z!NaV*DFU-M0jAwGqX@?V3)M)Dq&ZSak%xZkwMs$fv-|6_`xn}G1Zg_2!x{UbK>P)a zNl@C|!sHvv+?Od#Bg08`qYh59MWK;s$g|tc6{6g;`=*|0-E0d5V^Pn;2(Tr>)YG%q zHQj8z1Bu1j(XNcs8{as(F2cCT_4vs55@X1`K4q0m^~;3Tq4w2}*qcsaqVUeLmn*_= zyjplMUnpLmYth4~p z;O<(gP0>=W%G-cOYp^FfUs)K)Q@dW7VUIe%{mUYIPJqVr4Qlf*II2qK^LX_ep}6FG z^|{J^C&(a&t!;Z$(CNT6^xs6Mfzk})_m{N4i@@NVo3iq)15?h;+F+NDh7u=sR2r*B z*9ucI28*^}JMntK_&vKRIZ5hZHbf)}#THwB3Q;z@C0E24y+_^dMIcN0F!GWU1=To7 z?@#HdohI>fSUZ7E(Q)jU@O@V8CEdgY2|q+OS(qWHC`#b}>!Gf4ms2i^9fEP2EX!bY zQ#N52dXir^&3|j&R9ht&e?DUT%jp<$sM|$l_Pbrv$9|%4>^{cqE9^Nh;7r5p)2;LD zoM7aEai!YZWv)cGCi{ot7zYUgP7rhDvM~_Y62iv_;chrK2olbc6yY}&6&K}b^yzWU z*h>^GCF$h1*m#E=TIV9Ck`dJBL`#J|eFuN7UPMMij;Ts)qYw)RJsCcb0TzRvq>Vv>PuzDs8-lF2{^(W*NbgVE)Diz%WbYO%{tY3s=+qrG6W&aMjlmP6pe-j z&edUiMUbV2Pb1y}e<>6{aR0|OMTcjS78cJmZwN9*of|MT;su};hg$nNq1fayegXE5 zF#V?bsknUDW$%jMlM6d7Tt0D>z_fqh4r2HvC>hD82W5ayiWuiFRSGxPz9a(e23&U_nNjVNFh`4%#!rM52|*xh^ljo$ENyK@voNt3Bf5=Xboz zT1AQTa$9K%TW=>sh-*_)k{peWM4GP6-?Njs^kcBT#G|4JjMLcLM%z0IQ#CTmEA4{s z0lF;3@IH)S;-1JL@A`|Hm(q|KB_F$7Vc+)-KC{HnWoY&=AvN|*#j5DdCl^~!0fvL$ zA3w>Pg&4c|5@;g?Stfct(so8CCwhtsxy()3d_l@8IJOjf`T|S`v&WNkH7ZmbjdzL| z$3KmfzO{Oe9{sNEYmW<1UpOx9uu~p4S*MKOIv6@^&|v#rSHgxP*@gr5o?sm6;Th{i0TN69Q5cLYm;-`*QS6j+unOcxZ}TY~ibG$Nya@F>jPx9u*r zzdNL3-$k^)@13NiW0;KJY}X4=$|;CSA_&Zf-7ihz4ST zXQtp0gU8xVPh5g_30B%Y5*8KadF=IE+JW%Jek<6uyG{>Yf97Fw*WUAF?0VDKBLDp^ zlF_TaJ$u__3e!{$4>#I*GG#J%>kpceQM zqzomT$A($1S(Xi1OYz&v% z071qu$y24YWdn|_>3%K79(zX$ql!Zr9ybRnJt&28E!+(u+Ht(mexRV_HrzR8RFY~5 zV$EBv+^eBWrI<^z9|_ayQj~*jngC@MAr(+=GdxPJRBe@jW_+vk|81?OWP9gVx>LwM zNx_31+i$JTPWEF@hs+0E{+Sm-<+t`9Xa6n8G-da%v}ZjE1;BbV&(G%Ij8|Iij}Ehq zJF{DHnD;|6#vclIfz+`%Df_ZWaS4`W3h`MjOna$n46FBhf^;)4iM+!Pa%nNfx<6um znH{KV*B#Km_3P|MO2noiQ8Rfxwi#X`d`&(-w!q`5U_dHyyv~-35E~_3c-c&OM5H)} z8JV)Ew2h*4Z326PRJCluC4OzdsR^*$#89SNt6R_pMzx3<;VZxyD` zMa2mDw5_w>3)4w9%O3WwKx_`jh|0DtMfTylAyIZPc`Z};pAjJrQqNv}?J_}{n{C`1 z{cGA4qOc%j7`D{D;rZz@)6V15wIWbq8KPF!&p}2mBxsn+gN=rt%XhpmtvyZ+zsn^ragEPJtS$KcSZ<#c)W{%$@gEAu z9WLA0nB6)j+t?%vGl3rabFEqvGbv_gZgJ;1?jw2~d5WlvPHsifmdL}WsP81kg(GBah0*nqWCXq&s06? z%A))ZYY;thfhw4S{9qf(nnb0t)IF#K+fhf{3Q7y@=R&dL!A8a$d)nixQc|6JM<_N0 zOX>C}hrzjFo&D7zrbA~U&wYI|7B(h1fg`aPW;Z&81-H zhXiTviM5ifg8qGm%h7A{Beu|S5 zc&5JHqml*17|9aQ)a%_Ij{!$*yi*uaSw&6eTa74PqC?0I{~z<57|^ucN)VS9U;IaH zpD>lfyS?5H3(&2gmwU(_bC{3kDZWRq3sV2^Qaqg}3pl&IZvqq=3tx;=&j*C!mXW+S zp9k-MBG5XR1|Q0$7{+#7Y9A3Hwx%dEK4bsp1-7j#$3W*rLX6+YTL#;IDooRkKQbxB zL|>81I2ewTbr<&YM$_J){khq0@H|LNr!gwt-_u0V!_xurPpMFr<8E9@>o<_i0M{*s>bB7*s`BiSZYPo~>FBE}y9jzjT;m$3W1AaRa) z<=!Bu?q!hWJ17vRt57<9$AlT4&c%<~f9K+mqugUBgyU-9BUDgiCvyqpF3nyQrcc55 z;e31_w%0AP*F~fo3{g6-+WyBOkM4*0R(~>ezD$-J>*rz-LO0h2f#F4Vi72s0gI)3C zzeJ*y15nJrIadP8jhK(`B50>!m)UD~?vKbA)^f!pjT1TU?Xzcfb8Y$!OoLtV<2suY_YC#^e{m zm!#PDO^ik1Q!5j#Rj6lT#4o|H-IjraKPI1DD!Fhwj#_mFPdVkH7} z4;v5Cj67^rp7`vJb!T=gwFd>MCM8Kjo9dCj-n;$n6%W}Fk1WFb59Zq(v2k#q_?suC z4S2D$_XPD@lkB0qbbh7?0waTDwq#qb5Tu^|JRVorH-%U>WPWK`a6mUi6gB!$ZvA6& z(PhV%w{4#oDgKRZ+7@i9?3B`0g%{hlPIt3eLezQs@WRfe8*P~vgXQ;Z^Ey<0?SLo1 zp<(j)5&Mb5mutB?)Y{(#X+wss-#%O%%c6-Rsw?bcf*MCTUOm7#eoX{Aga|N`*IS5d z@%L`F>vI_=Rg~FHf^lh{SPw68sIYfkZt+&zFj3+;0gfII)CT_RC`P_>X-Kx-9+ zny`&jX-^7hJ#Yq{V4WUSo<?F$_b+*DFFBf}Gn1lQ8+B z(Du$QiLS+kcUlbb zeY_MH#}uY9%CmNXC=@dC)?s#$!m)yJn%OT2lNn2uMVKQW;7NP&a~`B{{9Oi{G#Co9 zA)?V>7Q&!p33fK`@cdk}Q<)Mg7j&*BPRqpA;x0act=edtR*J zLBB>6T6(s5oh?(C?zXS44W6BFH+@ic7T5n%6*7&c2kjaBC&NI^_OxQD0=8z`_JqA8 z$aG5J)7m`_hwEjWAkI4E23&zs!~b|vK48o3L+Mz|nMHfgZk}cz6^t!KR*ucyE)<2O z!7Cv9)8`#kpbUZ;cTXK$k!+WJ)BhBOX7uW%&!<4wxWd=j!y>+0G^R!F;gCMIqr!2V zdC*6h-7Y1`mxo&YbB+e?q{vL_v5;sq6^ zYC9uLBf}ANwVibsC)C~+q|L#E*Id2+1_}eBy<@EX!IQ(ZlZp7ND`H`%n(^2=%|KWGvW+e9XRKV5xEac47?0st(B*Ve$4M$B(2~$@Zm~GaZ#0ICRZ2P&Hw%2YgTy2l zNn^c)l4HA$ufbh_6^TI0k;OquI>~-E#nWeWo?0Y8!{vYuhrzbSBlQ4hEgqRHpO`Fx zZTqCKeiwA6DICr*>vmd1EobaTChcv{k2##Qy)Q!)3jquF^2QS&z2gw5m(o`IfZ`!k zCng`;RZL?)ECT&1M8L!@5r|{T?gUfaQ->31Zu_&BTSd+LUD?a2HW^Tv&|Ca zpMKhhIFKw5EgmDV@{mT@UovIuMUCS=5aSh<)-1wTKmJ9y=8}#-5BJ_{eX^UN=DEfjR zc=tQ)D$jTF5FGKtc8y@{8l15dkAydLC6IaZ88*~&m%zrWuzVq!Fj+2LCU{C3d3#~X z9`cmPZonGdD-=g)c(QsD16dJi^r}n78STdT_H-`c*!bdKY$ruiHQJb-6JTL??l>C5 zGl=qwAZT=E_d)x!hzySO9N%m34HwbhJ6Kla6R-ZzGfV78iit}|8=}n=jsuC~Y3Vqx5GBt1 zxYR_Xnzo3*M2t(7;)3x|u7L7%UfPZe(`Q_Bl-o}QVreCbi4rop=w2vZUk0(}&kM&D z(q6wIYKmHK5h;TWIbnkZ7`G8^wKh^=sz#88*PkB<#Vv$6mf`MJ9cQf2a|}WaDU|o>LtGT5+Y}|B1#(;y_&u{kpj&7gvGXKt zt}yA#5K65qwWWfLRomva&9+m39>JLB4O07qHH*ZYUWa44 z5Z*NGlTHbWN$d#w?K4hEmf|uC70hzGR1|uHvt_v8-xQ*4xcAA^27a9|ZG-n9+wb2N z^gS;c{A{+No}_cx_R|kPWIq(7yaMg`R_uuT8C%DLLfp_sOt5;-g?cwFu^ED08)rd= zhH*=`#vGLeDp!q1I8f{mmGvVv&!BrJ<7j(DlsA@^Mke2n<^L>ILm! zWM32N`pJf^=_XMq3}#Q#h6ylplV#jgVajQrv8;W@UK{68lh{PRtFY$08HRyu4Ij30 zk>W356uq{o!VE$C?AcglX`i!>e{G?NjP7lf)}SEWJlWtI-C+@EC+ZE{o{n~*>SMd` z6DC9(i;J-+@v;E>Lhx+KTTk>yvcX;zU2W%m>0O0sd*|GSvr9JFh4)9u7z{zE*#zSn zd1dFRreMSJGon#Fk~h9LBukG;g znR#>Uh$qX+1lVSKRxs`>I#lmp&H>wA6){dp7N!Mf*mu2zb9)}LzbPEsdTIx(;9WfZ ze&YdX^sWMpU>J6&Mb$+KPmy|`A3Dc7EWHupOjNen77W2OF8nuUKrhoYnBwEMI`ab)J16i)OD+L%b z=6w*4%sm{!k7_^}AkQJkZt(~w<`R$Q3|%IusX0&^q`jC8QRoIHz@+*c{pZ$-$VwcK zPDIkCc^Rh<9DSo@rOgx6&nCzQ?s0w)Yk`WvmS_Ad`o2wgn2Loem_WkH4~$pM7N=~CQ)TrCo-VAQbw)=Oa; zJYiY7vi z%cci2>~pM#h}1gZuE%b3o4O% z5nvpV)+xp4N|UEMduXN|R5*5c4BsubiN{2t(K6Jh8=gHS0(EfooMz8EIKCBWnwJD9 zw}O`w4s5>=jAc!DVmS)e_Ipv(r{)dW-I(QA%3u3HZOnDHVTKsU|LPE17ZO?coM5;J zV2iLR=*Dsj2rfB#DvHurqTyA9&En?gS`{#3|EukT!L}Pl`sd&2|D?}@~_#Az) z##Rc_omcn1+GKkKe7@I6K-KnpUf3tF`hF$EV8}B@sZ7$n>ABmsa7}$jfReL2R-!y* z-Rojv*3EvIq_l5|Ks(CH6O+<5z!46_xqJ*0itVNuN)R#HE@v>8h@t_-GFRIA3DK~AFtcv<7?c<|PmU5|@^IlL6KHz- zLr!|J)>tM-^jRVHyh`IJE1IEp0g z@fVLgx$C(t_AgUnAz0R5Z5IeI)D&{=K!bfnP@Tg2+ve%nO>68@QD}p9IPO~56BL1i zP0>AegJ(uO`T%F^@q*3>+0z)tg=hc$HnFRS@ZQ2ke!mEe_v=_Fn4>Uz0EQ2xv$~xJ zzWE}>H9Qm{UcP|~MPY66W?RWOv7#_lX2(Q}KOUA1_dD%}Kg(s+fu5{SiNauyPoYj_ z?Oh1Hy9)c-RN~?=p$?m8-GpgZY303kqr!3Qx!^E{g-+meD(&IDc8>^g=ti;m-Ji>V zy))L%auJv;jsZ=!O(0e^N~%%;6FHFUl{G1nHtNl~*met1E}x{SFCU38<{Xhm1@o7= zl7=G-Vjr7!zT}GPHq538Q!?j?q+KOIFZg83m-{M9h(O^y7SH9GFy)D>+BQzGbQef+ zpL;-nvFKbo&!#FIn*iS_V{3(D&quskx-c#3(;FA`3bSg+g6=#Pu`@PMq_`e} z#t&lV%{E%Zu%3A@m^ ze6hXi8MFKA#4r9IL7EfUd(xw_3uZu~wPy}2vWpasYe|Qtgk7B@pk~P{NPiJ%RaJ@; zL#+U<3bq{<*;I$w#{FV?PV0(Y%$Z@c9Tki}mIQ3IY zLY#o}2dh^^izCgw3agRA`pRL&$Tbs=*zZN8*t^1%Fw3LFepCyTV zQz#5nibBb6wA6GiIbhYf1o%HL#Z}sLQDRq1@X1}QuT-GXwrf15>+G$o1Zq+KEq3pueeSUcQh-9?M5FP%3LGfOGERwTyx)rVf$!eh=f`?eEU zOCz{JV<Rn7hAuetOHoM0I9GqXt$hvX*sS#V2bhjjvRXx`dA#Wn~s5fyk;-{#>&+fLi1F#WGYVir9fAxb#Dj@@9t0KIEp zy{>)rk@nS7?1(6|tFUb=CgCgXs4#WV`TCPP8Phh%w7{zb1=hVZ#tM%&3pV? z0#wR!A3DH3|4>YaxIGdv0<^Ptx87Exa2$q25lrbt!WssD%f{*UdRrl)S{bZPh$VeU zlvp}ecz^5&2|-5x=boCH(Q{pm#ic-@53o=r3iFH-0K-MJ#hw?WImMZLd&Q&B_(vMR zUK7;FC2&=0?H+j(7jqavZwRtfI080{>1<~_o~r`46+0(L*LX$_UYg$%jz8w_pbHu8 znazo1&{jTM+>@^mL2VP)bfJA+ke+Av&(H3k!e8qp3L{={Y*&$8Bfw~duMz&p_!=#; z+eB2mqS$(~-Elq@Nf1IJ)qET8<&^3IQ7yo!Ht^#%29tX5=7Pz>$%(m< zymdBTVH%2fp{}_Nf;1~po{#Tb0eT)ds!aG#JZ-wF?Jy4`Z#v|WQ{G`eHJ8+M;`E`# z9kuJv?wf^?TKf+X{b%||3!U<~px1~f-u%E?C?!4?Qm4Gd*C;@bTiMTwuQ z;TTLS$2Q$oPejw&S3in2kL}M<&`@238K_pLFiRb^YjFH%ADB-;@z-0Pm{@85D$G=# zT%*qyqb8Nbt3{+SI2Ne4>vB0J$%$IP`e4YiDPm~whGpE;2IbO?E#wIC1L63Czw{a_ z5Mo?;`hfXl#SUrwM8u!(^|UZ-n9>J?Xd|9nlc)&T0%5gLuV*zfxsJ$BW3GfTw+*n} z4zUXJ#=(8|GeOtllQ=n+sn%roEDu7=%Y!N6o%Uy?p@ICac|!cF5Mz!dL~gzq54KM) zfYNmr!g4VeA<8&vCc1tr#i>GZ2fivy*Z67hNDlLl zzO>M8a2WlB*o~f+s}Sd`!9pBohF9A#1+~f&Jont^nFb&R!eL>iAj_&-_iL!5CysnY zh>H~asXQCohaZN*LOZu-rd_CT_^}luUU8!kZD$_F@SrXg?66B+T zTn?A2(rQJdypbbD+LT`^& zgYe|XdG0As$qG-}^PZBuw~(XL%MRODqrA4zUiG9s`Nz%*PzuVboPPi2DF?uR&BuA> zLLy_g#-%XZU26X-3Jt<9^m6;M01f7MS&#{$YW5A07*x7lk;L$~-6R5y=6o(5?=89b zQLxHNZJY@4cXe_JtDr(yzY^J5+!EACgHiMuxpX6BlxUGKL)p1%r8O!XD;*^ZL$*@{ z20wUl-w-6lwpnYggmGzv+kWCD41&G+gu;IML|jX^Tst8eEzjGx*Syj$TSUC}D4e)nuCNAe2YT}XP%JOKdV?%cTyXCPCku$tXkkpAn z>=a%rK>rF-#hF5^^9=UftQpRoB^(|Ch1}VO9a5osNC~K(Jpra@_~OnJiRr}dYaKEV z`(D@x>u-gK)Y7d_-)n58KwJTN#c9m$ZWe(;wGJm2*;Y>sr?1)$2{0B#DHtsFb0H?G zGBerEdNSyW(;i{Lg~FfN1249Ao^g|GPdSF$Y#Tp&Wn0RvI?<-oC*N9(hKq?|mN9p)W@*xtd#a1-7YW)y$;!ynowmY$D0we!`*5o;i2 z+(B$vQv#~N&lPX6pB189#(BPdPJq4pmo;{=ry4u~i%534kn~I4S?-3cJi!EcBZPCV zR%~nz_ViIM5T-f9Sg)fLW`JZEAs+ni5QPT07vvfCeGzETB*evwZHf>L8j|58JX?UN zOtQm;BLzQ)hdsa61#FM~_NWNVZC*M!m)atz8YT03sJ&*V?e+v{KbN*0w!K~etW50g zVRfRdD@Q$+_m=3;`jjZKM`cMS<27OS#mIyS4*nK$#I;|0Q96N0&dvTVLR|T`a)*E4 zve*{ZCp-y+>g@ti=sj}Va6FLc`-BrnQVKh<-2PnzhC@O)3A;v!vFy{cm*so7Cweo5 z3j03LJU!dI*Qz{UQE?_O&mIuc4;l02VjRN!+$2)`UGURJ&mYU(yK==0L-vzjbfMUMe(pT#+gwqoigDoTUY4{+MTp<<35m%CiQv>ED#noU3&bfe!c(#Rhew{Ae{z9coQp(J z8#_P3KKjw`XwRHB=d@G zrV!IUKAEs%3TrBZJezKTc3LEcp`-R-N9~~q$y)>yuf)zNl5Q2{^J{tA0YCkN_7{QJ z#{oDvw|{#i7Q^(-LpbVFf=mHy#3)$*MJF(Pcu7T#=dv8(>-xufh(OHHzc<#4L1*zi9v|08|h2w7u3(@?bIaKvNpL~sU60h^* zr{_FuqdjU2)+#v!-zFG47rq@y$2e%yfLfVw98Y*u<9R1rB1-&G1nD%)lFhcGIpXNR z#uhj3MG?aiX~yi9y(AbLid*x@^?OA$CWI?$Q?iZw>wkd6R67e{m z+dCIGorkFjaLy*$bdNo@s~ir8EfA#G$!+V{owf_np|k7jtjQB*H}NX9!=4nRira?` z8D{_O=?37j>Q#j)K{A8|)+watJH0&l@N$~cGx)n+LYeN~{}70~UVom5)Apg&u^5DK zc!&P1kpE!I5j+39$EGG1U~%}%LJSpWn4?cmwJRLzNX#ARTe-YFdvdQ2SgE(6KZb29 zMbpOuo>gvAm{z>B7PVtN)|877SIa9qWiRj5MRuQP>_?uz2+w3S9v(7g zn9cL>#FA>8?^zgRgf(oZAVY-lpkvdmNhqFl@u`Jw_GH_AK7FaAil1|MsxjD~cvUdo zY_(c-apn8J&wlC2SYXv$^Ew3MkQVXShM5=hKklMIVI+;VYo=N?u8?W1njf#2r|-{ay|yPd-j2TjVbg95^>W2SSIx%B^blACg`RSG! z_OW%bDTp2v+ob|b0NuWIoLwzMMm7y?vV8?9|8&hvyH;TaAy@_3pqKCpkL}sxo3_!z z>9#rcvWHnI_L}GF5|&$Oe-MR%D3NNzhaZiFhOHBx=ra4LD72>}DHjY^dpxZ>w|3>Z z)eG!fqR^yK@M(Uaa9prg^zMG8P4K)D70Xzar|j0Fw=M7(F6e9>jY8VoC5$1?@gBaq z&DJC`%QqM{S_f|pQ;haA5#vv0RH70d-{T_0N>8rir1X?97lWJcu#*bLbrPNlVbzP` zGSzyykc+`1_NJ&&7mFL%VHURxP~)_VuW^n2PO<7Cm(usI&*j%STH>9%?31FB;(LQ{ zwa+RXn=@1vD!(B7PhUX<6q~G%6SK_?*r?7n@5F#2Wg;^_ zysxKhy#VW)Pb_jf7(K*`h)DGJyOXU=LF#&O3RhoDzoKTh(_Rx%YmfZ}@u*)3Qrv+4 z1Bcr=k7GKAj~rot@wh6UF6YJ;!oL+F)E*6aY zT{)-kV6y;q8T*zOl9w)p*F02+aT+{kunqSDh9@%P;a}xDq$St%z4kmr?^T|`lZvI$ zCl;SrZPOeSq!^HS4kQa{N4*g7)!6BtaX7I8o5vIFqZ=tC&P)W!QBr6JkDsJk#MI z(tN;bL}XD0&mcSbYtu!c?)KT++h;Ad8G^Cy+soyvDG;Nn@y+B_3$i{Z%%#b#9Sy-8T7 zm%slDvyvic8C z!{|zx_Jf-#MblZoQ5)z*fPHLiTcz?zbyMrA}E= zz}~bYwl<`=1EQU$XFP1TDTbNkIc<87L^@y@k!aMpDJ|z#EV4{hA-mG4SB zAqw+`+%j@ZrS_~ajqcXFyPZ>*&Y^|H*7>fGx*&Tudjdn|-?zjB_+Wttccl;ssBK)b zFqO~n^c0B}@eVCnrbfF~1pg7q*tB&XW{9&{4TE-fi_%&hW3sBl;I%8eRuJB z94s0mm)#Uzo`-t!!J~)SEegjURb?{26j3WU4uzhc&8&UaLaP$gk!i!;ASkmloIP5C zU8bEjN3q(BYSslu)Ls4t9~M3SY*4ynEEjvJbXTv^YXtQ{!XTDb={8|MF6vcS+BM;C ztL^uk($_z=*A6=DsGX&ycSw-m28zxV_Lx8%Q`pc!5cV077@h07UuDlK%nXf6>Rsqp zLNrlJ3>Lv$`;7=%_u^5s+Fu0eT6R-Qcv*<=3r+S9(JAru#f|5~r*|H-_ccPN^x